From 48b128d239b0e975b9f12e1f3cc5aab2a6963e74 Mon Sep 17 00:00:00 2001 From: Aleksandar Prokopec Date: Tue, 10 Jul 2012 18:05:26 +0200 Subject: SI-6052 - fix groupBy on parallel collections --- src/library/scala/collection/parallel/immutable/ParHashMap.scala | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) (limited to 'src') diff --git a/src/library/scala/collection/parallel/immutable/ParHashMap.scala b/src/library/scala/collection/parallel/immutable/ParHashMap.scala index ad882390c8..c9876c4d74 100644 --- a/src/library/scala/collection/parallel/immutable/ParHashMap.scala +++ b/src/library/scala/collection/parallel/immutable/ParHashMap.scala @@ -202,7 +202,7 @@ extends collection.parallel.BucketCombiner[(K, V), ParHashMap[K, V], (K, V), Has def groupByKey[Repr](cbf: () => Combiner[V, Repr]): ParHashMap[K, Repr] = { val bucks = buckets.filter(_ != null).map(_.headPtr) val root = new Array[HashMap[K, AnyRef]](bucks.length) - + combinerTaskSupport.executeAndWaitResult(new CreateGroupedTrie(cbf, bucks, root, 0, bucks.length)) var bitmap = 0 @@ -306,8 +306,7 @@ extends collection.parallel.BucketCombiner[(K, V), ParHashMap[K, V], (K, V), Has unrolled = unrolled.next } - evaluateCombiners(trie) - trie.asInstanceOf[HashMap[K, Repr]] + evaluateCombiners(trie).asInstanceOf[HashMap[K, Repr]] } private def evaluateCombiners(trie: HashMap[K, Combiner[V, Repr]]): HashMap[K, Repr] = trie match { case hm1: HashMap.HashMap1[_, _] => -- cgit v1.2.3 From ab0e09bb44567a19690529c03cb388295ce5d338 Mon Sep 17 00:00:00 2001 From: Alexander Clare Date: Thu, 12 Jul 2012 07:59:42 -0500 Subject: SI-5906 Search for sorted sequences Augments sequence classes with search functionality, using binary search (comparable to that found in java.util.Collections) for indexed sequences and linear search for others. --- src/library/scala/collection/Searching.scala | 100 +++++++++++++++++++++ .../scala/collection/generic/IsSeqLike.scala | 38 ++++++++ test/files/run/search.check | 6 ++ test/files/run/search.scala | 14 +++ 4 files changed, 158 insertions(+) create mode 100644 src/library/scala/collection/Searching.scala create mode 100644 src/library/scala/collection/generic/IsSeqLike.scala create mode 100644 test/files/run/search.check create mode 100644 test/files/run/search.scala (limited to 'src') diff --git a/src/library/scala/collection/Searching.scala b/src/library/scala/collection/Searching.scala new file mode 100644 index 0000000000..d62421b486 --- /dev/null +++ b/src/library/scala/collection/Searching.scala @@ -0,0 +1,100 @@ +/* __ *\ +** ________ ___ / / ___ Scala API ** +** / __/ __// _ | / / / _ | (c) 2003-2012, LAMP/EPFL ** +** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** +** /____/\___/_/ |_/____/_/ | | ** +** |/ ** +\* */ + +package scala.collection + +import scala.annotation.tailrec +import scala.collection.generic.IsSeqLike +import scala.math.Ordering + +/** A collection of wrappers that provide sequence classes with search functionality. + * + * Example usage: + * {{{ + * import scala.collection.Searching._ + * val l = List(1, 2, 3, 4, 5) + * l.search(3) + * // == 2 + * }}} + */ +object Searching { + class SearchImpl[A, Repr](val coll: SeqLike[A, Repr]) { + /** Search the sorted sequence for a specific element. + * + * The sequence should be sorted with the same `Ordering` before calling; otherwise, + * the results are undefined. + * + * @see [[scala.math.Ordering]] + * @see [[scala.collection.SeqLike]], method `sorted` + * + * @param elem the element to find. + * @param ord the ordering to be used to compare elements. + * @return a `Right` value containing the index corresponding to the element in the + * $coll, or a `Left` value containing the index where the element would be + * inserted if the element is not in the $coll. + */ + final def search[B >: A](elem: B)(implicit ord: Ordering[B]): Either[Int, Int] = + coll match { + case _: IndexedSeqLike[A, Repr] => binarySearch(elem, -1, coll.length)(ord) + case _ => linearSearch(coll.view, elem, 0)(ord) + } + + /** Search within an interval in the sorted sequence for a specific element. + * + * The sequence should be sorted with the same `Ordering` before calling; otherwise, + * the results are undefined. + * + * @see [[scala.math.Ordering]] + * @see [[scala.collection.SeqLike]], method `sorted` + * + * @param elem the element to find. + * @param from the index where the search starts. + * @param to the index following where the search ends. + * @param ord the ordering to be used to compare elements. + * @return a `Right` value containing the index corresponding to the element in the + * $coll, or a `Left` value containing the index where the element would be + * inserted if the element is not in the $coll. + */ + final def search[B >: A](elem: B, from: Int, to: Int) + (implicit ord: Ordering[B]): Either[Int, Int] = + coll match { + case _: IndexedSeqLike[A, Repr] => binarySearch(elem, from-1, to)(ord) + case _ => linearSearch(coll.view(from, to), elem, from)(ord) + } + + @tailrec + private def binarySearch[B >: A](elem: B, from: Int, to: Int) + (implicit ord: Ordering[B]): Either[Int, Int] = { + if ((to-from) == 1) Left(from) else { + val idx = (to+from)/2 + math.signum(ord.compare(elem, coll(idx))) match { + case -1 => binarySearch(elem, from, idx)(ord) + case 1 => binarySearch(elem, idx, to)(ord) + case _ => Right(idx) + } + } + } + + private def linearSearch[B >: A](c: SeqView[A, Repr], elem: B, offset: Int) + (implicit ord: Ordering[B]): Either[Int, Int] = { + var idx = offset + val it = c.iterator + while (it.hasNext) { + val cur = it.next() + if (ord.equiv(elem, cur)) return Right(idx) + else if (ord.lt(elem, cur)) return Left(idx-1) + idx += 1 + } + Left(idx) + } + + } + + implicit def search[Repr, A](coll: Repr) + (implicit fr: IsSeqLike[Repr]): SearchImpl[fr.A, Repr] = new SearchImpl(fr.conversion(coll)) +} diff --git a/src/library/scala/collection/generic/IsSeqLike.scala b/src/library/scala/collection/generic/IsSeqLike.scala new file mode 100644 index 0000000000..47e2924d34 --- /dev/null +++ b/src/library/scala/collection/generic/IsSeqLike.scala @@ -0,0 +1,38 @@ +/* __ *\ +** ________ ___ / / ___ Scala API ** +** / __/ __// _ | / / / _ | (c) 2003-2012, LAMP/EPFL ** +** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** +** /____/\___/_/ |_/____/_/ | | ** +** |/ ** +\* */ + +package scala.collection +package generic + +/** Type class witnessing that a collection representation type `Repr` has + * elements of type `A` and has a conversion to `SeqLike[A, Repr]`. + * + * @see [[scala.collection.generic.IsTraversableLike]] + */ +trait IsSeqLike[Repr] { + /** The type of elements we can traverse over. */ + type A + /** A conversion from the representation type `Repr` to a `SeqLike[A,Repr]`. */ + val conversion: Repr => SeqLike[A, Repr] +} + +object IsSeqLike { + import language.higherKinds + + implicit val stringRepr: IsSeqLike[String] { type A = Char } = + new IsSeqLike[String] { + type A = Char + val conversion = implicitly[String => SeqLike[Char, String]] + } + + implicit def seqLikeRepr[C[_], A0](implicit conv: C[A0] => SeqLike[A0,C[A0]]): IsSeqLike[C[A0]] { type A = A0 } = + new IsSeqLike[C[A0]] { + type A = A0 + val conversion = conv + } +} diff --git a/test/files/run/search.check b/test/files/run/search.check new file mode 100644 index 0000000000..3dc3c9d369 --- /dev/null +++ b/test/files/run/search.check @@ -0,0 +1,6 @@ +Right(2) +Right(4) +Left(9) +Right(2) +Right(4) +Left(9) diff --git a/test/files/run/search.scala b/test/files/run/search.scala new file mode 100644 index 0000000000..1e57fa2bf1 --- /dev/null +++ b/test/files/run/search.scala @@ -0,0 +1,14 @@ +object Test extends App { + import scala.collection.{LinearSeq, IndexedSeq} + import scala.collection.Searching._ + + val ls = LinearSeq(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 13) + println(ls.search(3)) + println(ls.search(5, 3, 8)) + println(ls.search(12)) + + val is = IndexedSeq(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 13) + println(is.search(3)) + println(is.search(5, 3, 8)) + println(is.search(12)) +} -- cgit v1.2.3 From d9c9e58ebdaf674b525df0264d3cf7b35beb5902 Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Fri, 13 Jul 2012 09:55:05 -0700 Subject: Removed AestheticSettings. Review by @odersky. --- src/compiler/scala/reflect/reify/Phases.scala | 2 +- src/compiler/scala/reflect/reify/Reifier.scala | 2 +- src/compiler/scala/tools/nsc/Global.scala | 115 ++++++++------------- src/compiler/scala/tools/nsc/ast/TreeGen.scala | 8 +- .../scala/tools/nsc/ast/parser/Scanners.scala | 6 +- .../scala/tools/nsc/backend/icode/ICodes.scala | 2 +- .../scala/tools/nsc/backend/jvm/GenJVM.scala | 2 +- .../scala/tools/nsc/backend/msil/GenMSIL.scala | 8 +- .../scala/tools/nsc/backend/opt/Inliners.scala | 2 +- .../scala/tools/nsc/interpreter/Power.scala | 2 +- .../scala/tools/nsc/interpreter/TypeStrings.scala | 2 +- .../tools/nsc/settings/AestheticSettings.scala | 39 ------- .../scala/tools/nsc/transform/ExplicitOuter.scala | 4 +- .../scala/tools/nsc/transform/UnCurry.scala | 2 +- .../scala/tools/nsc/typechecker/Infer.scala | 4 +- .../tools/nsc/typechecker/PatternMatching.scala | 2 +- .../tools/nsc/typechecker/SyntheticMethods.scala | 2 +- .../scala/tools/nsc/typechecker/Typers.scala | 10 +- 18 files changed, 74 insertions(+), 140 deletions(-) delete mode 100644 src/compiler/scala/tools/nsc/settings/AestheticSettings.scala (limited to 'src') diff --git a/src/compiler/scala/reflect/reify/Phases.scala b/src/compiler/scala/reflect/reify/Phases.scala index 1710cae2a5..5a10ad9282 100644 --- a/src/compiler/scala/reflect/reify/Phases.scala +++ b/src/compiler/scala/reflect/reify/Phases.scala @@ -26,7 +26,7 @@ trait Phases extends Reshape if (reifyDebug) println("[reshape phase]") tree = reshape.transform(tree) if (reifyDebug) println("[interlude]") - if (reifyDebug) println("reifee = " + (if (opt.showTrees) "\n" + nodePrinters.nodeToString(tree).trim else tree.toString)) + if (reifyDebug) println("reifee = " + (if (settings.Xshowtrees.value || settings.XshowtreesCompact.value || settings.XshowtreesStringified.value) "\n" + nodePrinters.nodeToString(tree).trim else tree.toString)) if (reifyDebug) println("[calculate phase]") calculate.traverse(tree) diff --git a/src/compiler/scala/reflect/reify/Reifier.scala b/src/compiler/scala/reflect/reify/Reifier.scala index 8fba7274be..98cd1b0f43 100644 --- a/src/compiler/scala/reflect/reify/Reifier.scala +++ b/src/compiler/scala/reflect/reify/Reifier.scala @@ -58,7 +58,7 @@ abstract class Reifier extends States val result = reifee match { case tree: Tree => - reifyTrace("reifying = ")(if (opt.showTrees) "\n" + nodePrinters.nodeToString(tree).trim else tree.toString) + reifyTrace("reifying = ")(if (settings.Xshowtrees.value || settings.XshowtreesCompact.value || settings.XshowtreesStringified.value) "\n" + nodePrinters.nodeToString(tree).trim else tree.toString) reifyTrace("reifee is located at: ")(tree.pos) reifyTrace("universe = ")(universe) reifyTrace("mirror = ")(mirror) diff --git a/src/compiler/scala/tools/nsc/Global.scala b/src/compiler/scala/tools/nsc/Global.scala index e378d71944..e9452fed0d 100644 --- a/src/compiler/scala/tools/nsc/Global.scala +++ b/src/compiler/scala/tools/nsc/Global.scala @@ -15,7 +15,6 @@ import reporters.{ Reporter, ConsoleReporter } import util.{ Exceptional, ClassPath, MergedClassPath, StatisticsInfo, ScalaClassLoader, returning } import scala.reflect.internal.util.{ NoPosition, SourceFile, NoSourceFile, BatchSourceFile, ScriptSourceFile } import scala.reflect.internal.pickling.{ PickleBuffer, PickleFormat } -import settings.{ AestheticSettings } import symtab.{ Flags, SymbolTable, SymbolLoaders, SymbolTrackers } import symtab.classfile.Pickler import dependencies.DependencyAnalysis @@ -224,7 +223,7 @@ class Global(var currentSettings: Settings, var reporter: Reporter) def globalError(msg: String) = reporter.error(NoPosition, msg) def inform(msg: String) = reporter.echo(msg) def warning(msg: String) = - if (opt.fatalWarnings) globalError(msg) + if (settings.fatalWarnings.value) globalError(msg) else reporter.warning(NoPosition, msg) // Getting in front of Predef's asserts to supplement with more info. @@ -265,7 +264,7 @@ class Global(var currentSettings: Settings, var reporter: Reporter) msg + " in " + (currentTime - start) + "ms" def informComplete(msg: String): Unit = reporter.withoutTruncating(inform(msg)) - def informProgress(msg: String) = if (opt.verbose) inform("[" + msg + "]") + def informProgress(msg: String) = if (settings.verbose.value) inform("[" + msg + "]") def inform[T](msg: String, value: T): T = returning(value)(x => inform(msg + x)) def informTime(msg: String, start: Long) = informProgress(elapsedMessage(msg, start)) @@ -310,7 +309,7 @@ class Global(var currentSettings: Settings, var reporter: Reporter) None } - val charset = opt.encoding flatMap loadCharset getOrElse { + val charset = ( if (settings.encoding.isSetByUser) Some(settings.encoding.value) else None ) flatMap loadCharset getOrElse { settings.encoding.value = defaultEncoding // A mandatory charset Charset.forName(defaultEncoding) } @@ -325,7 +324,7 @@ class Global(var currentSettings: Settings, var reporter: Reporter) } } - opt.sourceReader flatMap loadReader getOrElse { + ( if (settings.sourceReader.isSetByUser) Some(settings.sourceReader.value) else None ) flatMap loadReader getOrElse { new SourceReader(charset.newDecoder(), reporter) } } @@ -333,54 +332,12 @@ class Global(var currentSettings: Settings, var reporter: Reporter) if (!dependencyAnalysis.off) dependencyAnalysis.loadDependencyAnalysis() - if (opt.verbose || opt.logClasspath) { + if (settings.verbose.value || settings.Ylogcp.value) { // Uses the "do not truncate" inform informComplete("[search path for source files: " + classPath.sourcepaths.mkString(",") + "]") informComplete("[search path for class files: " + classPath.asClasspathString + "]") } - object opt extends AestheticSettings { - def settings = Global.this.settings - - // protected implicit lazy val globalPhaseOrdering: Ordering[Phase] = Ordering[Int] on (_.id) - def isActive(ph: Settings#PhasesSetting) = ph containsPhase globalPhase - def wasActive(ph: Settings#PhasesSetting) = ph containsPhase globalPhase.prev - - // Allows for syntax like scalac -Xshow-class Random@erasure,typer - private def splitClassAndPhase(str: String, term: Boolean): Name = { - def mkName(s: String) = if (term) newTermName(s) else newTypeName(s) - (str indexOf '@') match { - case -1 => mkName(str) - case idx => - val phasePart = str drop (idx + 1) - settings.Yshow.tryToSetColon(phasePart split ',' toList) - mkName(str take idx) - } - } - - // behavior - - // debugging - def checkPhase = wasActive(settings.check) - def logPhase = isActive(settings.log) - - // Write *.icode files right after GenICode when -Xprint-icode was given. - def writeICodeAtICode = settings.writeICode.isSetByUser && isActive(settings.writeICode) - - // showing/printing things - def browsePhase = isActive(settings.browse) - def echoFilenames = opt.debug && (opt.verbose || currentRun.size < 5) - def noShow = settings.Yshow.isDefault - def printLate = settings.printLate.value - def printPhase = isActive(settings.Xprint) - def showNames = List(showClass, showObject).flatten - def showPhase = isActive(settings.Yshow) - def showSymbols = settings.Yshowsyms.value - def showTrees = settings.Xshowtrees.value || settings.XshowtreesCompact.value || settings.XshowtreesStringified.value - val showClass = optSetting[String](settings.Xshowcls) map (x => splitClassAndPhase(x, false)) - val showObject = optSetting[String](settings.Xshowobj) map (x => splitClassAndPhase(x, true)) - } - // The current division between scala.reflect.* and scala.tools.nsc.* is pretty // clunky. It is often difficult to have a setting influence something without having // to create it on that side. For this one my strategy is a constant def at the file @@ -389,11 +346,8 @@ class Global(var currentSettings: Settings, var reporter: Reporter) // Here comes another one... override protected val enableTypeVarExperimentals = settings.Xexperimental.value - // True if -Xscript has been set, indicating a script run. - def isScriptRun = opt.script.isDefined - def getSourceFile(f: AbstractFile): BatchSourceFile = - if (isScriptRun) ScriptSourceFile(f, reader read f) + if (settings.script.isSetByUser) ScriptSourceFile(f, reader read f) else new BatchSourceFile(f, reader read f) def getSourceFile(name: String): SourceFile = { @@ -448,7 +402,7 @@ class Global(var currentSettings: Settings, var reporter: Reporter) if ((unit ne null) && unit.exists) lastSeenSourceFile = unit.source - if (opt.echoFilenames) + if (settings.debug.value && (settings.verbose.value || currentRun.size < 5)) inform("[running phase " + name + " on " + unit + "]") val unit0 = currentUnit @@ -1171,7 +1125,7 @@ class Global(var currentSettings: Settings, var reporter: Reporter) val info3: List[String] = ( ( List("== Enclosing template or block ==", nodePrinters.nodeToString(enclosing).trim) ) ++ ( if (tpe eq null) Nil else List("== Expanded type of tree ==", typeDeconstruct.show(tpe)) ) - ++ ( if (!opt.debug) Nil else List("== Current unit body ==", nodePrinters.nodeToString(currentUnit.body)) ) + ++ ( if (!settings.debug.value) Nil else List("== Current unit body ==", nodePrinters.nodeToString(currentUnit.body)) ) ++ ( List(errorMessage) ) ) @@ -1185,7 +1139,7 @@ class Global(var currentSettings: Settings, var reporter: Reporter) def echoPhaseSummary(ph: Phase) = { /** Only output a summary message under debug if we aren't echoing each file. */ - if (opt.debug && !opt.echoFilenames) + if (settings.debug.value && !(settings.verbose.value || currentRun.size < 5)) inform("[running phase " + ph.name + " on " + currentRun.size + " compilation units]") } @@ -1490,8 +1444,24 @@ class Global(var currentSettings: Settings, var reporter: Reporter) } } - private def showMembers() = - opt.showNames foreach (x => showDef(x, opt.declsOnly, globalPhase)) + private def showMembers() = { + // Allows for syntax like scalac -Xshow-class Random@erasure,typer + def splitClassAndPhase(str: String, term: Boolean): Name = { + def mkName(s: String) = if (term) newTermName(s) else newTypeName(s) + (str indexOf '@') match { + case -1 => mkName(str) + case idx => + val phasePart = str drop (idx + 1) + settings.Yshow.tryToSetColon(phasePart split ',' toList) + mkName(str take idx) + } + } + if (settings.Xshowcls.isSetByUser) + showDef(splitClassAndPhase(settings.Xshowcls.value, false), false, globalPhase) + + if (settings.Xshowobj.isSetByUser) + showDef(splitClassAndPhase(settings.Xshowobj.value, true), false, globalPhase) + } // Similarly, this will only be created under -Yshow-syms. object trackerFactory extends SymbolTrackers { @@ -1576,37 +1546,40 @@ class Global(var currentSettings: Settings, var reporter: Reporter) // progress update informTime(globalPhase.description, startTime) phaseTimings(globalPhase) = currentTime - startTime - - if (opt.writeICodeAtICode || (opt.printPhase && runIsAtOptimiz)) { + val shouldWriteIcode = ( + (settings.writeICode.isSetByUser && (settings.writeICode containsPhase globalPhase)) + || (!settings.Xprint.doAllPhases && (settings.Xprint containsPhase globalPhase) && runIsAtOptimiz) + ) + if (shouldWriteIcode) { // Write *.icode files when -Xprint-icode or -Xprint: was given. writeICode() - } else if (opt.printPhase || opt.printLate && runIsAt(cleanupPhase)) { + } else if ((settings.Xprint containsPhase globalPhase) || settings.printLate.value && runIsAt(cleanupPhase)) { // print trees - if (opt.showTrees) nodePrinters.printAll() + if (settings.Xshowtrees.value || settings.XshowtreesCompact.value || settings.XshowtreesStringified.value) nodePrinters.printAll() else printAllUnits() } // print the symbols presently attached to AST nodes - if (opt.showSymbols) + if (settings.Yshowsyms.value) trackerFactory.snapshot() // print members - if (opt.showPhase) + if (settings.Yshow containsPhase globalPhase) showMembers() // browse trees with swing tree viewer - if (opt.browsePhase) + if (settings.browse containsPhase globalPhase) treeBrowser browse (phase.name, units) // move the pointer globalPhase = globalPhase.next // run tree/icode checkers - if (opt.checkPhase) + if (settings.check containsPhase globalPhase.prev) runCheckers() // output collected statistics - if (opt.printStats) + if (settings.Ystatistics.value) statistics.print(phase) advancePhase @@ -1616,7 +1589,7 @@ class Global(var currentSettings: Settings, var reporter: Reporter) units map (_.body) foreach (traceSymbols recordSymbolsInTree _) // In case no phase was specified for -Xshow-class/object, show it now for sure. - if (opt.noShow) + if (settings.Yshow.isDefault) showMembers() reportCompileErrors() @@ -1648,7 +1621,7 @@ class Global(var currentSettings: Settings, var reporter: Reporter) def compile(filenames: List[String]) { try { val sources: List[SourceFile] = - if (isScriptRun && filenames.size > 1) returning(Nil)(_ => globalError("can only compile one script at a time")) + if (settings.script.isSetByUser && filenames.size > 1) returning(Nil)(_ => globalError("can only compile one script at a time")) else filenames map getSourceFile compileSources(sources) @@ -1795,7 +1768,7 @@ class Global(var currentSettings: Settings, var reporter: Reporter) informProgress("wrote " + file) } catch { case ex: IOException => - if (opt.debug) ex.printStackTrace() + if (settings.debug.value) ex.printStackTrace() globalError("could not write file " + file) } }) @@ -1806,8 +1779,8 @@ class Global(var currentSettings: Settings, var reporter: Reporter) // and forScaladoc default to onlyPresentation, which is the same as defaulting // to false except in old code. The downside is that this leaves us calling a // deprecated method: but I see no simple way out, so I leave it for now. - def forJVM = opt.jvm - override def forMSIL = opt.msil + def forJVM = settings.target.value startsWith "jvm" + override def forMSIL = settings.target.value startsWith "msil" def forInteractive = onlyPresentation def forScaladoc = onlyPresentation def createJavadoc = false diff --git a/src/compiler/scala/tools/nsc/ast/TreeGen.scala b/src/compiler/scala/tools/nsc/ast/TreeGen.scala index be5909a67f..75852d4f2f 100644 --- a/src/compiler/scala/tools/nsc/ast/TreeGen.scala +++ b/src/compiler/scala/tools/nsc/ast/TreeGen.scala @@ -91,16 +91,16 @@ abstract class TreeGen extends reflect.internal.TreeGen with TreeDSL { case Block((vd: ValDef) :: Nil, orig@Match(selector, cases)) => // println("block match: "+ (selector, cases, vd) + "for:\n"+ matchExpr ) caseMatch(matchExpr, selector, cases, m => copyBlock(matchExpr, List(vd), m)) // virtpatmat - case Apply(Apply(TypeApply(Select(tgt, nme.runOrElse), targs), List(scrut)), List(matcher)) if opt.virtPatmat => // println("virt match: "+ (tgt, targs, scrut, matcher) + "for:\n"+ matchExpr ) + case Apply(Apply(TypeApply(Select(tgt, nme.runOrElse), targs), List(scrut)), List(matcher)) if !settings.XoldPatmat.value => // println("virt match: "+ (tgt, targs, scrut, matcher) + "for:\n"+ matchExpr ) caseVirtualizedMatch(matchExpr, tgt, targs, scrut, matcher) // optimized version of virtpatmat - case Block(stats, matchEndDef) if opt.virtPatmat && (stats forall hasSynthCaseSymbol) => + case Block(stats, matchEndDef) if !settings.XoldPatmat.value && (stats forall hasSynthCaseSymbol) => // the assumption is once we encounter a case, the remainder of the block will consist of cases // the prologue may be empty, usually it is the valdef that stores the scrut val (prologue, cases) = stats span (s => !s.isInstanceOf[LabelDef]) caseVirtualizedMatchOpt(matchExpr, prologue, cases, matchEndDef, identity) // optimized version of virtpatmat - case Block(outerStats, orig@Block(stats, matchEndDef)) if opt.virtPatmat && (stats forall hasSynthCaseSymbol) => + case Block(outerStats, orig@Block(stats, matchEndDef)) if !settings.XoldPatmat.value && (stats forall hasSynthCaseSymbol) => val (prologue, cases) = stats span (s => !s.isInstanceOf[LabelDef]) caseVirtualizedMatchOpt(matchExpr, prologue, cases, matchEndDef, m => copyBlock(matchExpr, outerStats, m)) case other => @@ -111,7 +111,7 @@ abstract class TreeGen extends reflect.internal.TreeGen with TreeDSL { def copyBlock(orig: Tree, stats: List[Tree], expr: Tree): Block = Block(stats, expr) def dropSyntheticCatchAll(cases: List[CaseDef]): List[CaseDef] = - if (!opt.virtPatmat) cases + if (settings.XoldPatmat.value) cases else cases filter { case CaseDef(pat, EmptyTree, Throw(Apply(Select(New(exTpt), nme.CONSTRUCTOR), _))) if (treeInfo.isWildcardArg(pat) && (exTpt.tpe.typeSymbol eq MatchErrorClass)) => false case CaseDef(pat, guard, body) => true diff --git a/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala b/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala index 6ba273b8ea..f1baeca8d0 100644 --- a/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala +++ b/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala @@ -398,7 +398,7 @@ trait Scanners extends ScannersCommon { * there a realistic situation where one would need it? */ if (isDigit(ch)) { - if (opt.future) syntaxError("Non-zero numbers may not have a leading zero.") + if (settings.future.value) syntaxError("Non-zero numbers may not have a leading zero.") else deprecationWarning("Treating numbers with a leading zero as octal is deprecated.") } base = 8 @@ -975,9 +975,9 @@ trait Scanners extends ScannersCommon { val c = lookahead.getc() /** As of scala 2.11, it isn't a number unless c here is a digit, so - * opt.future excludes the rest of the logic. + * settings.future.value excludes the rest of the logic. */ - if (opt.future && !isDigit(c)) + if (settings.future.value && !isDigit(c)) return setStrVal() val isDefinitelyNumber = (c: @switch) match { diff --git a/src/compiler/scala/tools/nsc/backend/icode/ICodes.scala b/src/compiler/scala/tools/nsc/backend/icode/ICodes.scala index 631b71d83a..ebc62e2212 100644 --- a/src/compiler/scala/tools/nsc/backend/icode/ICodes.scala +++ b/src/compiler/scala/tools/nsc/backend/icode/ICodes.scala @@ -42,7 +42,7 @@ abstract class ICodes extends AnyRef /** Debugging flag */ def shouldCheckIcode = settings.check contains global.genicode.phaseName - def checkerDebug(msg: String) = if (shouldCheckIcode && global.opt.debug) println(msg) + def checkerDebug(msg: String) = if (shouldCheckIcode && global.settings.debug.value) println(msg) /** The ICode linearizer. */ val linearizer: Linearizer = settings.Xlinearizer.value match { diff --git a/src/compiler/scala/tools/nsc/backend/jvm/GenJVM.scala b/src/compiler/scala/tools/nsc/backend/jvm/GenJVM.scala index 9661ae6b3e..72985d58af 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/GenJVM.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/GenJVM.scala @@ -755,7 +755,7 @@ abstract class GenJVM extends SubComponent with GenJVMUtil with GenAndroid with } } val index = jmember.getConstantPool.addUtf8(sig).toShort - if (opt.verboseDebug) + if (settings.verbose.value && settings.debug.value) beforeErasure(println("add generic sig "+sym+":"+sym.info+" ==> "+sig+" @ "+index)) val buf = ByteBuffer.allocate(2) diff --git a/src/compiler/scala/tools/nsc/backend/msil/GenMSIL.scala b/src/compiler/scala/tools/nsc/backend/msil/GenMSIL.scala index d23571b517..599f0dd640 100644 --- a/src/compiler/scala/tools/nsc/backend/msil/GenMSIL.scala +++ b/src/compiler/scala/tools/nsc/backend/msil/GenMSIL.scala @@ -45,8 +45,8 @@ abstract class GenMSIL extends SubComponent { //classes is ICodes.classes, a HashMap[Symbol, IClass] classes.values foreach codeGenerator.findEntryPoint - if( opt.showClass.isDefined && (codeGenerator.entryPoint == null) ) { // TODO introduce dedicated setting instead - val entryclass = opt.showClass.get.toString + if( settings.Xshowcls.isSetByUser && (codeGenerator.entryPoint == null) ) { // TODO introduce dedicated setting instead + val entryclass = settings.Xshowcls.value.toString warning("Couldn't find entry class " + entryclass) } @@ -1731,8 +1731,8 @@ abstract class GenMSIL extends SubComponent { false } - if((entryPoint == null) && opt.showClass.isDefined) { // TODO introduce dedicated setting instead - val entryclass = opt.showClass.get.toString + if((entryPoint == null) && settings.Xshowcls.isSetByUser) { // TODO introduce dedicated setting instead + val entryclass = settings.Xshowcls.value.toString val cfn = cls.symbol.fullName if(cfn == entryclass) { for (m <- cls.methods; if isEntryPoint(m.symbol)) { entryPoint = m.symbol } diff --git a/src/compiler/scala/tools/nsc/backend/opt/Inliners.scala b/src/compiler/scala/tools/nsc/backend/opt/Inliners.scala index 44acfed411..1603b286db 100644 --- a/src/compiler/scala/tools/nsc/backend/opt/Inliners.scala +++ b/src/compiler/scala/tools/nsc/backend/opt/Inliners.scala @@ -240,7 +240,7 @@ abstract class Inliners extends SubComponent { } val tfa = new analysis.MTFAGrowable() - tfa.stat = global.opt.printStats + tfa.stat = global.settings.Ystatistics.value val staleOut = new mutable.ListBuffer[BasicBlock] val splicedBlocks = mutable.Set.empty[BasicBlock] val staleIn = mutable.Set.empty[BasicBlock] diff --git a/src/compiler/scala/tools/nsc/interpreter/Power.scala b/src/compiler/scala/tools/nsc/interpreter/Power.scala index 57d7cef726..b9af648630 100644 --- a/src/compiler/scala/tools/nsc/interpreter/Power.scala +++ b/src/compiler/scala/tools/nsc/interpreter/Power.scala @@ -73,7 +73,7 @@ class Power[ReplValsImpl <: ReplVals : ru.TypeTag: ClassTag](val intp: IMain, re pass += 1 val (repeats, unseen) = todo partition seen unseenHistory += unseen.size - if (opt.verbose) { + if (settings.verbose.value) { println("%3d %s accumulated, %s discarded. This pass: %s unseen, %s repeats".format( pass, keep.size, discarded, unseen.size, repeats.size)) } diff --git a/src/compiler/scala/tools/nsc/interpreter/TypeStrings.scala b/src/compiler/scala/tools/nsc/interpreter/TypeStrings.scala index 56b9c7011c..d8af6b4b4f 100644 --- a/src/compiler/scala/tools/nsc/interpreter/TypeStrings.scala +++ b/src/compiler/scala/tools/nsc/interpreter/TypeStrings.scala @@ -57,7 +57,7 @@ trait StructuredTypeStrings extends DestructureTypes { else block(level, grouping)(name, nodes) } private def shortClass(x: Any) = { - if (opt.debug) { + if (settings.debug.value) { val name = (x.getClass.getName split '.').last val isAnon = name.reverse takeWhile (_ != '$') forall (_.isDigit) val str = if (isAnon) name else (name split '$').last diff --git a/src/compiler/scala/tools/nsc/settings/AestheticSettings.scala b/src/compiler/scala/tools/nsc/settings/AestheticSettings.scala deleted file mode 100644 index 2baff0bb1c..0000000000 --- a/src/compiler/scala/tools/nsc/settings/AestheticSettings.scala +++ /dev/null @@ -1,39 +0,0 @@ -/* NSC -- new Scala compiler - * Copyright 2005-2011 LAMP/EPFL - * @author Paul Phillips - */ - -package scala.tools.nsc -package settings - -/** Taking flag checking to a somewhat higher level. */ -trait AestheticSettings { - def settings: Settings - - // Some(value) if setting has been set by user, None otherwise. - def optSetting[T](s: Settings#Setting): Option[T] = - if (s.isDefault) None else Some(s.value.asInstanceOf[T]) - - def script = optSetting[String](settings.script) - def encoding = optSetting[String](settings.encoding) - def sourceReader = optSetting[String](settings.sourceReader) - - def debug = settings.debug.value - def declsOnly = false - def deprecation = settings.deprecation.value - def experimental = settings.Xexperimental.value - def fatalWarnings = settings.fatalWarnings.value - def feature = settings.feature.value - def future = settings.future.value - def logClasspath = settings.Ylogcp.value - def printStats = settings.Ystatistics.value - def target = settings.target.value - def unchecked = settings.unchecked.value - def verbose = settings.verbose.value - def virtPatmat = !settings.XoldPatmat.value - - /** Derived values */ - def jvm = target startsWith "jvm" - def msil = target == "msil" - def verboseDebug = debug && verbose -} diff --git a/src/compiler/scala/tools/nsc/transform/ExplicitOuter.scala b/src/compiler/scala/tools/nsc/transform/ExplicitOuter.scala index ab7bbc591b..d2688e9cc5 100644 --- a/src/compiler/scala/tools/nsc/transform/ExplicitOuter.scala +++ b/src/compiler/scala/tools/nsc/transform/ExplicitOuter.scala @@ -516,11 +516,11 @@ abstract class ExplicitOuter extends InfoTransform super.transform(treeCopy.Apply(tree, sel, outerVal :: args)) // entry point for pattern matcher translation - case mch: Match if (!opt.virtPatmat) => // don't use old pattern matcher as fallback when the user wants the virtualizing one + case mch: Match if settings.XoldPatmat.value => // don't use old pattern matcher as fallback when the user wants the virtualizing one matchTranslation(mch) case _ => - if (opt.virtPatmat) { // this turned out to be expensive, hence the hacky `if` and `return` + if (!settings.XoldPatmat.value) { // this turned out to be expensive, hence the hacky `if` and `return` tree match { // for patmatvirtualiser // base..eq(o) --> base.$outer().eq(o) if there's an accessor, else the whole tree becomes TRUE diff --git a/src/compiler/scala/tools/nsc/transform/UnCurry.scala b/src/compiler/scala/tools/nsc/transform/UnCurry.scala index 663b3dd2e9..ff38227294 100644 --- a/src/compiler/scala/tools/nsc/transform/UnCurry.scala +++ b/src/compiler/scala/tools/nsc/transform/UnCurry.scala @@ -713,7 +713,7 @@ abstract class UnCurry extends InfoTransform addJavaVarargsForwarders(dd, flatdd) case Try(body, catches, finalizer) => - if (opt.virtPatmat) { if(catches exists (cd => !treeInfo.isCatchCase(cd))) debugwarn("VPM BUG! illegal try/catch "+ catches); tree } + if (!settings.XoldPatmat.value) { if(catches exists (cd => !treeInfo.isCatchCase(cd))) debugwarn("VPM BUG! illegal try/catch "+ catches); tree } else if (catches forall treeInfo.isCatchCase) tree else { val exname = unit.freshTermName("ex$") diff --git a/src/compiler/scala/tools/nsc/typechecker/Infer.scala b/src/compiler/scala/tools/nsc/typechecker/Infer.scala index 960c210649..2f9bb24079 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Infer.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Infer.scala @@ -505,14 +505,14 @@ trait Infer { && (restpe.isWildcard || (varianceInType(restpe)(tparam) & COVARIANT) == 0) // don't retract covariant occurrences ) - // checks opt.virtPatmat directly so one need not run under -Xexperimental to use virtpatmat + // checks !settings.XoldPatmat.value directly so one need not run under -Xexperimental to use virtpatmat buf += ((tparam, if (retract) None else Some( if (targ.typeSymbol == RepeatedParamClass) targ.baseType(SeqClass) else if (targ.typeSymbol == JavaRepeatedParamClass) targ.baseType(ArrayClass) // this infers Foo.type instead of "object Foo" (see also widenIfNecessary) - else if (targ.typeSymbol.isModuleClass || ((opt.experimental || opt.virtPatmat) && tvar.constr.avoidWiden)) targ + else if (targ.typeSymbol.isModuleClass || ((settings.Xexperimental.value || !settings.XoldPatmat.value) && tvar.constr.avoidWiden)) targ else targ.widen ) )) diff --git a/src/compiler/scala/tools/nsc/typechecker/PatternMatching.scala b/src/compiler/scala/tools/nsc/typechecker/PatternMatching.scala index b1e68e2757..0dd4f9fbf9 100644 --- a/src/compiler/scala/tools/nsc/typechecker/PatternMatching.scala +++ b/src/compiler/scala/tools/nsc/typechecker/PatternMatching.scala @@ -52,7 +52,7 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL // @inline final def patmatDebug(s: => String) = if (printPatmat) println(s) def newTransformer(unit: CompilationUnit): Transformer = - if (opt.virtPatmat) new MatchTransformer(unit) + if (!settings.XoldPatmat.value) new MatchTransformer(unit) else noopTransformer // duplicated from CPSUtils (avoid dependency from compiler -> cps plugin...) diff --git a/src/compiler/scala/tools/nsc/typechecker/SyntheticMethods.scala b/src/compiler/scala/tools/nsc/typechecker/SyntheticMethods.scala index 5465a3b47f..8ca98a39c3 100644 --- a/src/compiler/scala/tools/nsc/typechecker/SyntheticMethods.scala +++ b/src/compiler/scala/tools/nsc/typechecker/SyntheticMethods.scala @@ -77,7 +77,7 @@ trait SyntheticMethods extends ast.TreeDSL { // like Tags and Arrays which are not robust and infer things // which they shouldn't. val accessorLub = ( - if (opt.experimental) { + if (settings.Xexperimental.value) { global.weakLub(accessors map (_.tpe.finalResultType))._1 match { case RefinedType(parents, decls) if !decls.isEmpty => intersectionType(parents) case tp => tp diff --git a/src/compiler/scala/tools/nsc/typechecker/Typers.scala b/src/compiler/scala/tools/nsc/typechecker/Typers.scala index 5241974793..831f24c999 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Typers.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Typers.scala @@ -93,7 +93,7 @@ trait Typers extends Modes with Adaptations with Tags { // - we may virtualize matches (if -Xexperimental and there's a suitable __match in scope) // - we synthesize PartialFunction implementations for `x => x match {...}` and `match {...}` when the expected type is PartialFunction // this is disabled by: -Xoldpatmat, scaladoc or interactive compilation - @inline private def newPatternMatching = opt.virtPatmat && !forScaladoc && !forInteractive // && (phase.id < currentRun.uncurryPhase.id) + @inline private def newPatternMatching = !settings.XoldPatmat.value && !forScaladoc && !forInteractive // && (phase.id < currentRun.uncurryPhase.id) abstract class Typer(context0: Context) extends TyperDiagnostics with Adaptation with Tag with TyperContextErrors { import context0.unit @@ -2299,7 +2299,7 @@ trait Typers extends Modes with Adaptations with Tags { val casesTyped = typedCases(cases, selectorTp, pt) val (resTp, needAdapt) = - if (opt.virtPatmat) ptOrLubPacked(casesTyped, pt) + if (!settings.XoldPatmat.value) ptOrLubPacked(casesTyped, pt) else ptOrLub(casesTyped map (_.tpe), pt) val casesAdapted = if (!needAdapt) casesTyped else casesTyped map (adaptCase(_, mode, resTp)) @@ -2315,7 +2315,7 @@ trait Typers extends Modes with Adaptations with Tags { // TODO: add fallback __match sentinel to predef val matchStrategy: Tree = - if (!(newPatternMatching && opt.experimental && context.isNameInScope(vpmName._match))) null // fast path, avoiding the next line if there's no __match to be seen + if (!(newPatternMatching && settings.Xexperimental.value && context.isNameInScope(vpmName._match))) null // fast path, avoiding the next line if there's no __match to be seen else newTyper(context.makeImplicit(reportAmbiguousErrors = false)).silent(_.typed(Ident(vpmName._match), EXPRmode, WildcardType), reportAmbiguousErrors = false) match { case SilentResultValue(ms) => ms case _ => null @@ -3202,7 +3202,7 @@ trait Typers extends Modes with Adaptations with Tags { // if there's a ClassTag that allows us to turn the unchecked type test for `pt` into a checked type test // return the corresponding extractor (an instance of ClassTag[`pt`]) - def extractorForUncheckedType(pos: Position, pt: Type): Option[Tree] = if (!opt.virtPatmat || isPastTyper) None else { + def extractorForUncheckedType(pos: Position, pt: Type): Option[Tree] = if (settings.XoldPatmat.value || isPastTyper) None else { // only look at top-level type, can't (reliably) do anything about unchecked type args (in general) pt.normalize.typeConstructor match { // if at least one of the types in an intersection is checkable, use the checkable ones @@ -3980,7 +3980,7 @@ trait Typers extends Modes with Adaptations with Tags { // in the special (though common) case where the types are equal, it pays to pack before comparing // especially virtpatmat needs more aggressive unification of skolemized types // this breaks src/library/scala/collection/immutable/TrieIterator.scala - if ( opt.virtPatmat && !isPastTyper + if ( !settings.XoldPatmat.value && !isPastTyper && thenp1.tpe.annotations.isEmpty && elsep1.tpe.annotations.isEmpty // annotated types need to be lubbed regardless (at least, continations break if you by pass them like this) && thenTp =:= elseTp ) (thenp1.tpe, false) // use unpacked type -- cgit v1.2.3 From 9e101a3de85dcd8b54985176e6a0fea11bd1bf78 Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Thu, 12 Jul 2012 22:50:53 -0700 Subject: Simplify raw types logic. It was spread out much further than was warranted. Review by @moors. --- src/reflect/scala/reflect/internal/Symbols.scala | 61 +++++------------------- src/reflect/scala/reflect/internal/Types.scala | 14 ++++++ 2 files changed, 27 insertions(+), 48 deletions(-) (limited to 'src') diff --git a/src/reflect/scala/reflect/internal/Symbols.scala b/src/reflect/scala/reflect/internal/Symbols.scala index 04fa01c6f3..e5d7f67e4c 100644 --- a/src/reflect/scala/reflect/internal/Symbols.scala +++ b/src/reflect/scala/reflect/internal/Symbols.scala @@ -1441,9 +1441,9 @@ trait Symbols extends api.Symbols { self: SymbolTable => } /** Set initial info. */ - def setInfo(info: Type): this.type = { info_=(info); this } + def setInfo(info: Type): this.type = { info_=(info); this } /** Modifies this symbol's info in place. */ - def modifyInfo(f: Type => Type): this.type = setInfo(f(info)) + def modifyInfo(f: Type => Type): this.type = setInfo(f(info)) /** Substitute second list of symbols for first in current info. */ def substInfo(syms0: List[Symbol], syms1: List[Symbol]): this.type = if (syms0.isEmpty) this @@ -1572,13 +1572,18 @@ trait Symbols extends api.Symbols { self: SymbolTable => * This is done in checkAccessible and overriding checks in refchecks * We can't do this on class loading because it would result in infinite cycles. */ - final def cookJavaRawInfo() { - if (hasFlag(TRIEDCOOKING)) return else setFlag(TRIEDCOOKING) // only try once... - val oldInfo = info - doCookJavaRawInfo() - } + def cookJavaRawInfo(): Unit = { + // only try once... + if (this hasFlag TRIEDCOOKING) + return - protected def doCookJavaRawInfo(): Unit + this setFlag TRIEDCOOKING + info // force the current info + if (isJavaDefined || isType && owner.isJavaDefined) + this modifyInfo rawToExistential + else if (isOverloaded) + alternatives withFilter (_.isJavaDefined) foreach (_ modifyInfo rawToExistential) + } /** The type constructor of a symbol is: * For a type symbol, the type corresponding to the symbol itself, @@ -2666,36 +2671,6 @@ trait Symbols extends api.Symbols { self: SymbolTable => name = nme.expandedName(name.toTermName, base) } } - - protected def doCookJavaRawInfo() { - def cook(sym: Symbol) { - require(sym.isJavaDefined, sym) - // @M: I think this is more desirable, but Martin prefers to leave raw-types as-is as much as possible - // object rawToExistentialInJava extends TypeMap { - // def apply(tp: Type): Type = tp match { - // // any symbol that occurs in a java sig, not just java symbols - // // see http://lampsvn.epfl.ch/trac/scala/ticket/2454#comment:14 - // case TypeRef(pre, sym, List()) if !sym.typeParams.isEmpty => - // val eparams = typeParamsToExistentials(sym, sym.typeParams) - // existentialAbstraction(eparams, TypeRef(pre, sym, eparams map (_.tpe))) - // case _ => - // mapOver(tp) - // } - // } - val tpe1 = rawToExistential(sym.tpe) - // println("cooking: "+ sym +": "+ sym.tpe +" to "+ tpe1) - if (tpe1 ne sym.tpe) { - sym.setInfo(tpe1) - } - } - - if (isJavaDefined) - cook(this) - else if (isOverloaded) - for (sym2 <- alternatives) - if (sym2.isJavaDefined) - cook(sym2) - } } implicit val TermSymbolTag = ClassTag[TermSymbol](classOf[TermSymbol]) @@ -2924,15 +2899,6 @@ trait Symbols extends api.Symbols { self: SymbolTable => * public class Test1 {} * info for T in Test1 should be >: Nothing <: Test3[_] */ - protected def doCookJavaRawInfo() { - if (isJavaDefined || owner.isJavaDefined) { - val tpe1 = rawToExistential(info) - // println("cooking type: "+ this +": "+ info +" to "+ tpe1) - if (tpe1 ne info) { - setInfo(tpe1) - } - } - } Statistics.incCounter(typeSymbolCount) } @@ -3305,7 +3271,6 @@ trait Symbols extends api.Symbols { self: SymbolTable => override def info: Type = NoType override def existentialBound: Type = NoType override def rawInfo: Type = NoType - protected def doCookJavaRawInfo() {} override def accessBoundary(base: Symbol): Symbol = enclosingRootClass def cloneSymbolImpl(owner: Symbol, newFlags: Long) = abort("NoSymbol.clone()") override def originalEnclosingMethod = this diff --git a/src/reflect/scala/reflect/internal/Types.scala b/src/reflect/scala/reflect/internal/Types.scala index f3dd1f03ad..2c705b0b5b 100644 --- a/src/reflect/scala/reflect/internal/Types.scala +++ b/src/reflect/scala/reflect/internal/Types.scala @@ -4199,6 +4199,20 @@ trait Types extends api.Types { self: SymbolTable => mapOver(tp) } } + /*** + *@M: I think this is more desirable, but Martin prefers to leave raw-types as-is as much as possible + object rawToExistentialInJava extends TypeMap { + def apply(tp: Type): Type = tp match { + // any symbol that occurs in a java sig, not just java symbols + // see http://lampsvn.epfl.ch/trac/scala/ticket/2454#comment:14 + case TypeRef(pre, sym, List()) if !sym.typeParams.isEmpty => + val eparams = typeParamsToExistentials(sym, sym.typeParams) + existentialAbstraction(eparams, TypeRef(pre, sym, eparams map (_.tpe))) + case _ => + mapOver(tp) + } + } + */ /** Used by existentialAbstraction. */ -- cgit v1.2.3 From 6559722330786dd26cc86b554296d5cb23eeb912 Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Mon, 16 Jul 2012 06:39:55 -0700 Subject: Closes SI-6072, crasher with overloaded eq. You don't want to do name-based selections in later phases if you can help it, because there is nobody left to resolve your overloads. If as in this example you're calling a known method, use the symbol. Review by @hubertp. --- src/compiler/scala/tools/nsc/transform/Mixin.scala | 2 +- test/files/pos/t6072.scala | 3 +++ 2 files changed, 4 insertions(+), 1 deletion(-) create mode 100644 test/files/pos/t6072.scala (limited to 'src') diff --git a/src/compiler/scala/tools/nsc/transform/Mixin.scala b/src/compiler/scala/tools/nsc/transform/Mixin.scala index fe5bef5009..930a7b34ce 100644 --- a/src/compiler/scala/tools/nsc/transform/Mixin.scala +++ b/src/compiler/scala/tools/nsc/transform/Mixin.scala @@ -868,7 +868,7 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL { rhs match { case Block(List(assign), returnTree) => val Assign(moduleVarRef, _) = assign - val cond = Apply(Select(moduleVarRef, nme.eq), List(NULL)) + val cond = Apply(Select(moduleVarRef, Object_eq), List(NULL)) mkFastPathBody(clazz, moduleSym, cond, List(assign), List(NULL), returnTree, attrThis, args) case _ => assert(false, "Invalid getter " + rhs + " for module in class " + clazz) diff --git a/test/files/pos/t6072.scala b/test/files/pos/t6072.scala new file mode 100644 index 0000000000..e25ebbffc5 --- /dev/null +++ b/test/files/pos/t6072.scala @@ -0,0 +1,3 @@ +class A { + object B { def eq(lvl: Int) = ??? } +} -- cgit v1.2.3 From 41869c39ba68ef574595533777d2a99fcabdbdc3 Mon Sep 17 00:00:00 2001 From: Alexander Clare Date: Mon, 16 Jul 2012 13:35:43 -0500 Subject: Changes suggested by @retronym and @jsuereth Change return type to case classes, branch between functions depending on IndexedSeq instead of IndexedSeqLike, and alter tests accordingly. Clean up doc comments and reflect changes in them. --- src/library/scala/collection/Searching.scala | 58 ++++++++++++++-------- .../scala/collection/generic/IsSeqLike.scala | 21 +++++++- test/files/run/search.check | 12 ++--- test/files/run/search.scala | 2 +- 4 files changed, 64 insertions(+), 29 deletions(-) (limited to 'src') diff --git a/src/library/scala/collection/Searching.scala b/src/library/scala/collection/Searching.scala index d62421b486..c1f7f4cae6 100644 --- a/src/library/scala/collection/Searching.scala +++ b/src/library/scala/collection/Searching.scala @@ -19,36 +19,51 @@ import scala.math.Ordering * import scala.collection.Searching._ * val l = List(1, 2, 3, 4, 5) * l.search(3) - * // == 2 + * // == Found(2) * }}} */ object Searching { + sealed abstract class SearchResult { + def insertionPoint: Int + } + + case class Found(foundIndex: Int) extends SearchResult { + override def insertionPoint = foundIndex + } + case class InsertionPoint(insertionPoint: Int) extends SearchResult + class SearchImpl[A, Repr](val coll: SeqLike[A, Repr]) { - /** Search the sorted sequence for a specific element. + /** Search the sorted sequence for a specific element. If the sequence is an + * `IndexedSeq`, a binary search is used. Otherwise, a linear search is used. * * The sequence should be sorted with the same `Ordering` before calling; otherwise, * the results are undefined. * + * @see [[scala.math.IndexedSeq]] * @see [[scala.math.Ordering]] * @see [[scala.collection.SeqLike]], method `sorted` * * @param elem the element to find. * @param ord the ordering to be used to compare elements. - * @return a `Right` value containing the index corresponding to the element in the - * $coll, or a `Left` value containing the index where the element would be - * inserted if the element is not in the $coll. + * + * @return a `Found` value containing the index corresponding to the element in the + * sequence, or the `InsertionPoint` where the element would be inserted if + * the element is not in the sequence. */ - final def search[B >: A](elem: B)(implicit ord: Ordering[B]): Either[Int, Int] = + final def search[B >: A](elem: B)(implicit ord: Ordering[B]): SearchResult = coll match { - case _: IndexedSeqLike[A, Repr] => binarySearch(elem, -1, coll.length)(ord) + case _: IndexedSeq[A] => binarySearch(elem, -1, coll.length)(ord) case _ => linearSearch(coll.view, elem, 0)(ord) } - /** Search within an interval in the sorted sequence for a specific element. + /** Search within an interval in the sorted sequence for a specific element. If the + * sequence is an IndexedSeq, a binary search is used. Otherwise, a linear search + * is used. * * The sequence should be sorted with the same `Ordering` before calling; otherwise, * the results are undefined. * + * @see [[scala.math.IndexedSeq]] * @see [[scala.math.Ordering]] * @see [[scala.collection.SeqLike]], method `sorted` * @@ -56,41 +71,42 @@ object Searching { * @param from the index where the search starts. * @param to the index following where the search ends. * @param ord the ordering to be used to compare elements. - * @return a `Right` value containing the index corresponding to the element in the - * $coll, or a `Left` value containing the index where the element would be - * inserted if the element is not in the $coll. + * + * @return a `Found` value containing the index corresponding to the element in the + * sequence, or the `InsertionPoint` where the element would be inserted if + * the element is not in the sequence. */ final def search[B >: A](elem: B, from: Int, to: Int) - (implicit ord: Ordering[B]): Either[Int, Int] = + (implicit ord: Ordering[B]): SearchResult = coll match { - case _: IndexedSeqLike[A, Repr] => binarySearch(elem, from-1, to)(ord) + case _: IndexedSeq[A] => binarySearch(elem, from-1, to)(ord) case _ => linearSearch(coll.view(from, to), elem, from)(ord) } @tailrec private def binarySearch[B >: A](elem: B, from: Int, to: Int) - (implicit ord: Ordering[B]): Either[Int, Int] = { - if ((to-from) == 1) Left(from) else { - val idx = (to+from)/2 + (implicit ord: Ordering[B]): SearchResult = { + if ((to-from) == 1) InsertionPoint(from) else { + val idx = from+(to-from)/2 math.signum(ord.compare(elem, coll(idx))) match { case -1 => binarySearch(elem, from, idx)(ord) case 1 => binarySearch(elem, idx, to)(ord) - case _ => Right(idx) + case _ => Found(idx) } } } private def linearSearch[B >: A](c: SeqView[A, Repr], elem: B, offset: Int) - (implicit ord: Ordering[B]): Either[Int, Int] = { + (implicit ord: Ordering[B]): SearchResult = { var idx = offset val it = c.iterator while (it.hasNext) { val cur = it.next() - if (ord.equiv(elem, cur)) return Right(idx) - else if (ord.lt(elem, cur)) return Left(idx-1) + if (ord.equiv(elem, cur)) return Found(idx) + else if (ord.lt(elem, cur)) return InsertionPoint(idx-1) idx += 1 } - Left(idx) + InsertionPoint(idx) } } diff --git a/src/library/scala/collection/generic/IsSeqLike.scala b/src/library/scala/collection/generic/IsSeqLike.scala index 47e2924d34..8eac025ed6 100644 --- a/src/library/scala/collection/generic/IsSeqLike.scala +++ b/src/library/scala/collection/generic/IsSeqLike.scala @@ -10,8 +10,27 @@ package scala.collection package generic /** Type class witnessing that a collection representation type `Repr` has - * elements of type `A` and has a conversion to `SeqLike[A, Repr]`. + * elements of type `A` and has a conversion to `SeqLike[A, Repr]`. * + * This type enables simple enrichment of `Seq`s with extension methods which + * can make full use of the mechanics of the Scala collections framework in + * their implementation. + * + * Example usage: + * {{{ + * class FilterMapImpl[A, Repr](val r: SeqLike[A, Repr]) { + * final def filterMap[B, That](f: A => Option[B])(implicit cbf: CanBuildFrom[Repr, B, That]): That = + * r.flatMap(f(_)) + * } + * implicit def filterMap[Repr, A](r: Repr)(implicit fr: IsSeqLike[Repr]): FilterMapImpl[fr.A,Repr] = + * new FilterMapImpl(fr.conversion(r)) + * + * val l = List(1, 2, 3, 4, 5) + * List(1, 2, 3, 4, 5) filterMap (i => if(i % 2 == 0) Some(i) else None) + * // == List(2, 4) + * }}} + * + * @see [[scala.collection.generic.Seq]] * @see [[scala.collection.generic.IsTraversableLike]] */ trait IsSeqLike[Repr] { diff --git a/test/files/run/search.check b/test/files/run/search.check index 3dc3c9d369..a885696509 100644 --- a/test/files/run/search.check +++ b/test/files/run/search.check @@ -1,6 +1,6 @@ -Right(2) -Right(4) -Left(9) -Right(2) -Right(4) -Left(9) +Found(2) +Found(4) +InsertionPoint(9) +Found(2) +Found(4) +InsertionPoint(9) diff --git a/test/files/run/search.scala b/test/files/run/search.scala index 1e57fa2bf1..ed7fed54a7 100644 --- a/test/files/run/search.scala +++ b/test/files/run/search.scala @@ -1,6 +1,6 @@ object Test extends App { import scala.collection.{LinearSeq, IndexedSeq} - import scala.collection.Searching._ + import scala.collection.Searching.search val ls = LinearSeq(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 13) println(ls.search(3)) -- cgit v1.2.3 From b68d57210abe536ee43a8a1c4ec4b4629145ccc2 Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Thu, 19 Jul 2012 08:04:43 -0700 Subject: Expanded use of HIDDEN flag. Like the comment says: /** Symbols which are marked HIDDEN. (Expand this list?) * * - $outer fields and accessors * - super accessors * - protected accessors * - lazy local accessors * - bridge methods * - default argument getters * - evaluation-order preserving locals for right-associative and out-of-order named arguments * - catch-expression storing vals * - anything else which feels a setFlag(HIDDEN) */ I also changed a few safe-appearing locations to check isHidden rather than isSynthetic. Review by @dragos, @odersky. --- .../scala/tools/nsc/ast/parser/TreeBuilder.scala | 6 +++--- .../scala/tools/nsc/backend/jvm/GenJVM.scala | 2 +- .../tools/nsc/symtab/classfile/ClassfileParser.scala | 2 +- src/compiler/scala/tools/nsc/transform/Erasure.scala | 2 +- .../scala/tools/nsc/transform/OverridingPairs.scala | 4 ++-- .../scala/tools/nsc/transform/SpecializeTypes.scala | 2 +- .../scala/tools/nsc/typechecker/Namers.scala | 4 ++-- .../scala/tools/nsc/typechecker/NamesDefaults.scala | 4 ++-- .../scala/tools/nsc/typechecker/RefChecks.scala | 2 +- .../scala/tools/nsc/typechecker/SuperAccessors.scala | 12 ++++++------ .../scala/reflect/internal/ClassfileConstants.scala | 4 ++-- src/reflect/scala/reflect/internal/Definitions.scala | 4 ++-- src/reflect/scala/reflect/internal/Flags.scala | 19 ++++++++++++++++--- test/files/run/t6028.check | 20 ++++++++++---------- 14 files changed, 50 insertions(+), 37 deletions(-) (limited to 'src') diff --git a/src/compiler/scala/tools/nsc/ast/parser/TreeBuilder.scala b/src/compiler/scala/tools/nsc/ast/parser/TreeBuilder.scala index 90f9d538c1..ce4ef9ca54 100644 --- a/src/compiler/scala/tools/nsc/ast/parser/TreeBuilder.scala +++ b/src/compiler/scala/tools/nsc/ast/parser/TreeBuilder.scala @@ -191,7 +191,7 @@ abstract class TreeBuilder { } else { val x = freshTermName() Block( - List(ValDef(Modifiers(SYNTHETIC), x, TypeTree(), stripParens(left))), + List(ValDef(Modifiers(SYNTHETIC | HIDDEN), x, TypeTree(), stripParens(left))), Apply(atPos(opPos union right.pos) { Select(stripParens(right), op.encode) }, List(Ident(x)))) } } else { @@ -488,7 +488,7 @@ abstract class TreeBuilder { def makeCatchFromExpr(catchExpr: Tree): CaseDef = { val binder = freshTermName("x") val pat = Bind(binder, Typed(Ident(nme.WILDCARD), Ident(tpnme.Throwable))) - val catchDef = ValDef(NoMods, freshTermName("catchExpr"), TypeTree(), catchExpr) + val catchDef = ValDef(Modifiers(HIDDEN), freshTermName("catchExpr"), TypeTree(), catchExpr) val catchFn = Ident(catchDef.name) val body = atPos(catchExpr.pos.makeTransparent)(Block( List(catchDef), @@ -562,7 +562,7 @@ abstract class TreeBuilder { val tmp = freshTermName() val firstDef = atPos(matchExpr.pos) { - ValDef(Modifiers(PrivateLocal | SYNTHETIC | (mods.flags & LAZY)), + ValDef(Modifiers(PrivateLocal | SYNTHETIC | HIDDEN | (mods.flags & LAZY)), tmp, TypeTree(), matchExpr) } var cnt = 0 diff --git a/src/compiler/scala/tools/nsc/backend/jvm/GenJVM.scala b/src/compiler/scala/tools/nsc/backend/jvm/GenJVM.scala index 763a567828..07b215202d 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/GenJVM.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/GenJVM.scala @@ -1811,7 +1811,7 @@ abstract class GenJVM extends SubComponent with GenJVMUtil with GenAndroid with * Synthetic locals are skipped. All variables are method-scoped. */ private def genLocalVariableTable(m: IMethod, jcode: JCode) { - val vars = m.locals filterNot (_.sym.isSynthetic) + val vars = m.locals filterNot (_.sym.isHidden) if (vars.isEmpty) return val pool = jclass.getConstantPool diff --git a/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala b/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala index a035a346e6..9edab4d310 100644 --- a/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala +++ b/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala @@ -872,7 +872,7 @@ abstract class ClassfileParser { sym.setFlag(SYNTHETIC | HIDDEN) in.skip(attrLen) case tpnme.BridgeATTR => - sym.setFlag(BRIDGE) + sym.setFlag(BRIDGE | HIDDEN) in.skip(attrLen) case tpnme.DeprecatedATTR => val arg = Literal(Constant("see corresponding Javadoc for more information.")) diff --git a/src/compiler/scala/tools/nsc/transform/Erasure.scala b/src/compiler/scala/tools/nsc/transform/Erasure.scala index 5115c49c87..5dd63c938f 100644 --- a/src/compiler/scala/tools/nsc/transform/Erasure.scala +++ b/src/compiler/scala/tools/nsc/transform/Erasure.scala @@ -413,7 +413,7 @@ abstract class Erasure extends AddInterfaces if (!bridgeNeeded) return - val newFlags = (member.flags | BRIDGE) & ~(ACCESSOR | DEFERRED | LAZY | lateDEFERRED) + val newFlags = (member.flags | BRIDGE | HIDDEN) & ~(ACCESSOR | DEFERRED | LAZY | lateDEFERRED) val bridge = other.cloneSymbolImpl(owner, newFlags) setPos owner.pos debuglog("generating bridge from %s (%s): %s to %s: %s".format( diff --git a/src/compiler/scala/tools/nsc/transform/OverridingPairs.scala b/src/compiler/scala/tools/nsc/transform/OverridingPairs.scala index d8c18c2d50..5105e9ee8a 100644 --- a/src/compiler/scala/tools/nsc/transform/OverridingPairs.scala +++ b/src/compiler/scala/tools/nsc/transform/OverridingPairs.scala @@ -31,11 +31,11 @@ abstract class OverridingPairs { private val self = base.thisType /** Symbols to exclude: Here these are constructors, private locals, - * and bridges. But it may be refined in subclasses. + * and hidden symbols, including bridges. But it may be refined in subclasses. * */ protected def exclude(sym: Symbol): Boolean = - sym.isConstructor || sym.isPrivateLocal || sym.hasFlag(BRIDGE) + sym.isConstructor || sym.isPrivateLocal || sym.isHidden /** The parents of base (may also be refined). */ diff --git a/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala b/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala index ffcb682cf7..40b6ac644e 100644 --- a/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala +++ b/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala @@ -798,7 +798,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers { var specializingOn = specializedParams(sym) val unusedStvars = specializingOn filterNot specializedTypeVars(sym.info) - if (unusedStvars.nonEmpty && currentRun.compiles(sym) && !sym.isSynthetic) { + if (unusedStvars.nonEmpty && currentRun.compiles(sym) && !sym.isHidden) { reporter.warning(sym.pos, "%s %s unused or used in non-specializable positions.".format( unusedStvars.mkString("", ", ", ""), diff --git a/src/compiler/scala/tools/nsc/typechecker/Namers.scala b/src/compiler/scala/tools/nsc/typechecker/Namers.scala index 48fd6ba928..c2d520f8ec 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Namers.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Namers.scala @@ -556,7 +556,7 @@ trait Namers extends MethodSynthesis { // via "x$lzy" as can be seen in test #3927. val sym = ( if (owner.isClass) createFieldSymbol(tree) - else owner.newValue(tree.name append nme.LAZY_LOCAL, tree.pos, tree.mods.flags & ~IMPLICIT) + else owner.newValue(tree.name append nme.LAZY_LOCAL, tree.pos, (tree.mods.flags | HIDDEN) & ~IMPLICIT) ) enterValSymbol(tree, sym setFlag MUTABLE setLazyAccessor lazyAccessor) } @@ -577,7 +577,7 @@ trait Namers extends MethodSynthesis { case DefDef(_, nme.CONSTRUCTOR, _, _, _, _) => assignAndEnterFinishedSymbol(tree) case DefDef(mods, name, tparams, _, _, _) => - val bridgeFlag = if (mods hasAnnotationNamed tpnme.bridgeAnnot) BRIDGE else 0 + val bridgeFlag = if (mods hasAnnotationNamed tpnme.bridgeAnnot) BRIDGE | HIDDEN else 0 val sym = assignAndEnterSymbol(tree) setFlag bridgeFlag if (name == nme.copy && sym.isSynthetic) diff --git a/src/compiler/scala/tools/nsc/typechecker/NamesDefaults.scala b/src/compiler/scala/tools/nsc/typechecker/NamesDefaults.scala index a0c1342026..f7f2c3c902 100644 --- a/src/compiler/scala/tools/nsc/typechecker/NamesDefaults.scala +++ b/src/compiler/scala/tools/nsc/typechecker/NamesDefaults.scala @@ -164,7 +164,7 @@ trait NamesDefaults { self: Analyzer => // never used for constructor calls, they always have a stable qualifier def blockWithQualifier(qual: Tree, selected: Name) = { - val sym = blockTyper.context.owner.newValue(unit.freshTermName("qual$"), qual.pos) setInfo qual.tpe + val sym = blockTyper.context.owner.newValue(unit.freshTermName("qual$"), qual.pos, newFlags = HIDDEN) setInfo qual.tpe blockTyper.context.scope enter sym val vd = atPos(sym.pos)(ValDef(sym, qual) setType NoType) // it stays in Vegas: SI-5720, SI-5727 @@ -281,7 +281,7 @@ trait NamesDefaults { self: Analyzer => } else arg.tpe ).widen // have to widen or types inferred from literal defaults will be singletons - val s = context.owner.newValue(unit.freshTermName("x$"), arg.pos) setInfo ( + val s = context.owner.newValue(unit.freshTermName("x$"), arg.pos, newFlags = HIDDEN) setInfo ( if (byName) functionType(Nil, argTpe) else argTpe ) (context.scope.enter(s), byName, repeated) diff --git a/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala b/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala index 7318538de7..c35bbb4046 100644 --- a/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala +++ b/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala @@ -172,7 +172,7 @@ abstract class RefChecks extends InfoTransform with reflect.internal.transform.R def varargBridge(member: Symbol, bridgetpe: Type): Tree = { log("Generating varargs bridge for " + member.fullLocationString + " of type " + bridgetpe) - val bridge = member.cloneSymbolImpl(clazz, member.flags | VBRIDGE) setPos clazz.pos + val bridge = member.cloneSymbolImpl(clazz, member.flags | VBRIDGE | HIDDEN) setPos clazz.pos bridge.setInfo(bridgetpe.cloneInfo(bridge)) clazz.info.decls enter bridge diff --git a/src/compiler/scala/tools/nsc/typechecker/SuperAccessors.scala b/src/compiler/scala/tools/nsc/typechecker/SuperAccessors.scala index b544407286..d9cf71d9af 100644 --- a/src/compiler/scala/tools/nsc/typechecker/SuperAccessors.scala +++ b/src/compiler/scala/tools/nsc/typechecker/SuperAccessors.scala @@ -57,8 +57,8 @@ abstract class SuperAccessors extends transform.Transform with transform.TypingT val clazz = qual.symbol val supername = nme.superName(name) val superAcc = clazz.info.decl(supername).suchThat(_.alias == sym) orElse { - debuglog("add super acc " + sym + sym.locationString + " to `" + clazz);//debug - val acc = clazz.newMethod(supername, sel.pos, SUPERACCESSOR | PRIVATE) setAlias sym + debuglog(s"add super acc ${sym.fullLocationString} to $clazz") + val acc = clazz.newMethod(supername, sel.pos, SUPERACCESSOR | PRIVATE | HIDDEN) setAlias sym val tpe = clazz.thisType memberType sym match { case t if sym.isModule && !sym.isMethod => NullaryMethodType(t) case t => t @@ -370,7 +370,7 @@ abstract class SuperAccessors extends transform.Transform with transform.TypingT } val protAcc = clazz.info.decl(accName).suchThat(s => s == NoSymbol || s.tpe =:= accType(s)) orElse { - val newAcc = clazz.newMethod(nme.protName(sym.originalName), tree.pos) + val newAcc = clazz.newMethod(nme.protName(sym.originalName), tree.pos, newFlags = HIDDEN) newAcc setInfoAndEnter accType(newAcc) val code = DefDef(newAcc, { @@ -381,7 +381,7 @@ abstract class SuperAccessors extends transform.Transform with transform.TypingT args.foldLeft(base)(Apply(_, _)) }) - debuglog("" + code) + debuglog("created protected accessor: " + code) storeAccessorDefinition(clazz, code) newAcc } @@ -393,7 +393,7 @@ abstract class SuperAccessors extends transform.Transform with transform.TypingT case _ => mkApply(TypeApply(selection, targs)) } } - debuglog("Replaced " + tree + " with " + res) + debuglog(s"Replaced $tree with $res") if (hasArgs) localTyper.typedOperator(res) else localTyper.typed(res) } @@ -432,7 +432,7 @@ abstract class SuperAccessors extends transform.Transform with transform.TypingT val accName = nme.protSetterName(field.originalName) val protectedAccessor = clazz.info decl accName orElse { - val protAcc = clazz.newMethod(accName, field.pos) + val protAcc = clazz.newMethod(accName, field.pos, newFlags = HIDDEN) val paramTypes = List(clazz.typeOfThis, field.tpe) val params = protAcc newSyntheticValueParams paramTypes val accessorType = MethodType(params, UnitClass.tpe) diff --git a/src/reflect/scala/reflect/internal/ClassfileConstants.scala b/src/reflect/scala/reflect/internal/ClassfileConstants.scala index 3346e9cccb..76a2056bfe 100644 --- a/src/reflect/scala/reflect/internal/ClassfileConstants.scala +++ b/src/reflect/scala/reflect/internal/ClassfileConstants.scala @@ -342,7 +342,7 @@ object ClassfileConstants { case JAVA_ACC_PRIVATE => PRIVATE case JAVA_ACC_PROTECTED => PROTECTED case JAVA_ACC_FINAL => FINAL - case JAVA_ACC_SYNTHETIC => SYNTHETIC + case JAVA_ACC_SYNTHETIC => SYNTHETIC | HIDDEN // maybe should be just hidden? case JAVA_ACC_STATIC => STATIC case JAVA_ACC_ABSTRACT => if (isAnnotation) 0L else if (isClass) ABSTRACT else DEFERRED case JAVA_ACC_INTERFACE => if (isAnnotation) 0L else TRAIT | INTERFACE | ABSTRACT @@ -372,7 +372,7 @@ object ClassfileConstants { } def methodFlags(jflags: Int): Long = { initFields(jflags) - translateFlags(jflags, if ((jflags & JAVA_ACC_BRIDGE) != 0) BRIDGE else 0) + translateFlags(jflags, if ((jflags & JAVA_ACC_BRIDGE) != 0) BRIDGE | HIDDEN else 0) } } object FlagTranslation extends FlagTranslation { } diff --git a/src/reflect/scala/reflect/internal/Definitions.scala b/src/reflect/scala/reflect/internal/Definitions.scala index cd243b9df0..18a51e7539 100644 --- a/src/reflect/scala/reflect/internal/Definitions.scala +++ b/src/reflect/scala/reflect/internal/Definitions.scala @@ -844,8 +844,8 @@ trait Definitions extends api.StandardDefinitions { lazy val Object_!= = enterNewMethod(ObjectClass, nme.NE, anyrefparam, booltype, FINAL) lazy val Object_eq = enterNewMethod(ObjectClass, nme.eq, anyrefparam, booltype, FINAL) lazy val Object_ne = enterNewMethod(ObjectClass, nme.ne, anyrefparam, booltype, FINAL) - lazy val Object_isInstanceOf = newT1NoParamsMethod(ObjectClass, nme.isInstanceOf_Ob, FINAL | SYNTHETIC)(_ => booltype) - lazy val Object_asInstanceOf = newT1NoParamsMethod(ObjectClass, nme.asInstanceOf_Ob, FINAL | SYNTHETIC)(_.typeConstructor) + lazy val Object_isInstanceOf = newT1NoParamsMethod(ObjectClass, nme.isInstanceOf_Ob, FINAL | SYNTHETIC | HIDDEN)(_ => booltype) + lazy val Object_asInstanceOf = newT1NoParamsMethod(ObjectClass, nme.asInstanceOf_Ob, FINAL | SYNTHETIC | HIDDEN)(_.typeConstructor) lazy val Object_synchronized = newPolyMethod(1, ObjectClass, nme.synchronized_, FINAL)(tps => (Some(List(tps.head.typeConstructor)), tps.head.typeConstructor) ) diff --git a/src/reflect/scala/reflect/internal/Flags.scala b/src/reflect/scala/reflect/internal/Flags.scala index 55fa00dd4d..a9a65a838b 100644 --- a/src/reflect/scala/reflect/internal/Flags.scala +++ b/src/reflect/scala/reflect/internal/Flags.scala @@ -116,6 +116,20 @@ class ModifierFlags { final val LAZY = 1L << 31 // symbol is a lazy val. can't have MUTABLE unless transformed by typer final val PRESUPER = 1L << 37 // value is evaluated before super call final val DEFAULTINIT = 1L << 41 // symbol is initialized to the default value: used by -Xcheckinit + final val HIDDEN = 1L << 46 // symbol should be ignored when typechecking; will be marked ACC_SYNTHETIC in bytecode + + /** Symbols which are marked HIDDEN. (Expand this list?) + * + * - $outer fields and accessors + * - super accessors + * - protected accessors + * - lazy local accessors + * - bridge methods + * - default argument getters + * - evaluation-order preserving locals for right-associative and out-of-order named arguments + * - catch-expression storing vals + * - anything else which feels a setFlag(HIDDEN) + */ // Overridden. def flagToString(flag: Long): String = "" @@ -165,7 +179,6 @@ class Flags extends ModifierFlags { // A Java method's type is ``cooked'' by transforming raw types to existentials final val SYNCHRONIZED = 1L << 45 // symbol is a method which should be marked ACC_SYNCHRONIZED - final val HIDDEN = 1L << 46 // symbol should be ignored when typechecking; will be marked ACC_SYNTHETIC in bytecode // ------- shift definitions ------------------------------------------------------- @@ -248,7 +261,7 @@ class Flags extends ModifierFlags { /** These modifiers appear in TreePrinter output. */ final val PrintableFlags = ExplicitFlags | BridgeFlags | LOCAL | SYNTHETIC | STABLE | CASEACCESSOR | MACRO | - ACCESSOR | SUPERACCESSOR | PARAMACCESSOR | STATIC | SPECIALIZED | SYNCHRONIZED + ACCESSOR | SUPERACCESSOR | PARAMACCESSOR | STATIC | SPECIALIZED | SYNCHRONIZED | HIDDEN /** When a symbol for a field is created, only these flags survive * from Modifiers. Others which may be applied at creation time are: @@ -414,7 +427,7 @@ class Flags extends ModifierFlags { case VARARGS => "" // (1L << 43) case TRIEDCOOKING => "" // (1L << 44) case SYNCHRONIZED => "" // (1L << 45) - case 0x400000000000L => "" // (1L << 46) + case HIDDEN => "" // (1L << 46) case 0x800000000000L => "" // (1L << 47) case 0x1000000000000L => "" // (1L << 48) case 0x2000000000000L => "" // (1L << 49) diff --git a/test/files/run/t6028.check b/test/files/run/t6028.check index dca61115ad..9dffcbadd4 100644 --- a/test/files/run/t6028.check +++ b/test/files/run/t6028.check @@ -31,14 +31,14 @@ package { }; final def apply(): Int = $anonfun$foo$1.this.apply$mcI$sp(); def apply$mcI$sp(): Int = $anonfun$foo$1.this.$outer.T$$classParam.+($anonfun$foo$1.this.$outer.field()).+($anonfun$foo$1.this.methodParam$1).+($anonfun$foo$1.this.methodLocal$1); - private[this] val $outer: T = _; - def T$$anonfun$$$outer(): T = $anonfun$foo$1.this.$outer; - final def apply(): Object = scala.Int.box($anonfun$foo$1.this.apply()); + private[this] val $outer: T = _; + def T$$anonfun$$$outer(): T = $anonfun$foo$1.this.$outer; + final def apply(): Object = scala.Int.box($anonfun$foo$1.this.apply()); private[this] val methodParam$1: Int = _; private[this] val methodLocal$1: Int = _ }; abstract trait MethodLocalTrait$1 extends Object { - def T$MethodLocalTrait$$$outer(): T + def T$MethodLocalTrait$$$outer(): T }; object MethodLocalObject$2 extends Object with T#MethodLocalTrait$1 { def ($outer: T, barParam$1: Int): ... = { @@ -46,9 +46,9 @@ package { MethodLocalObject$2.this.$asInstanceOf[T#MethodLocalTrait$1$class]()./*MethodLocalTrait$1$class*/$init$(barParam$1); () }; - private[this] val $outer: T = _; - def T$MethodLocalObject$$$outer(): T = MethodLocalObject$2.this.$outer; - def T$MethodLocalTrait$$$outer(): T = MethodLocalObject$2.this.$outer + private[this] val $outer: T = _; + def T$MethodLocalObject$$$outer(): T = MethodLocalObject$2.this.$outer; + def T$MethodLocalTrait$$$outer(): T = MethodLocalObject$2.this.$outer }; final private[this] def MethodLocalObject$1(barParam$1: Int, MethodLocalObject$module$1: scala.runtime.VolatileObjectRef): ... = { MethodLocalObject$module$1.elem = new ...(T.this, barParam$1); @@ -69,9 +69,9 @@ package { def apply$mcV$sp(): Unit = try { $anonfun$tryy$1.this.tryyLocal$1.elem = $anonfun$tryy$1.this.tryyParam$1 } finally (); - private[this] val $outer: T = _; - def T$$anonfun$$$outer(): T = $anonfun$tryy$1.this.$outer; - final def apply(): Object = { + private[this] val $outer: T = _; + def T$$anonfun$$$outer(): T = $anonfun$tryy$1.this.$outer; + final def apply(): Object = { $anonfun$tryy$1.this.apply(); scala.runtime.BoxedUnit.UNIT }; -- cgit v1.2.3 From 2e69ae41d9b4af6dd87e14e5f6f980c51e164e4c Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Thu, 19 Jul 2012 10:30:23 -0700 Subject: Renaming phase time-travel methods. enteringPhase and exitingPhase are our unambiguously named phase time travel methods. atPhase is deprecated. Other methods and uses have all been brought into line with that. Review by @lrytz. --- src/compiler/scala/tools/nsc/Global.scala | 60 +++++++++++----------- src/compiler/scala/tools/nsc/SubComponent.scala | 4 +- .../scala/tools/nsc/backend/ScalaPrimitives.scala | 2 +- .../scala/tools/nsc/backend/icode/TypeKinds.scala | 2 +- .../tools/nsc/backend/jvm/BytecodeWriters.scala | 2 +- .../scala/tools/nsc/backend/jvm/GenASM.scala | 16 +++--- .../scala/tools/nsc/backend/jvm/GenJVM.scala | 18 +++---- .../scala/tools/nsc/backend/msil/GenMSIL.scala | 4 +- .../scala/tools/nsc/backend/opt/Inliners.scala | 2 +- .../nsc/dependencies/DependencyAnalysis.scala | 8 +-- .../scala/tools/nsc/interactive/Global.scala | 2 +- .../nsc/interactive/RefinedBuildManager.scala | 6 +-- .../scala/tools/nsc/interpreter/ExprTyper.scala | 2 +- .../scala/tools/nsc/interpreter/ILoop.scala | 4 +- .../scala/tools/nsc/interpreter/IMain.scala | 12 ++--- .../scala/tools/nsc/interpreter/Imports.scala | 2 +- .../tools/nsc/interpreter/JLineCompletion.scala | 6 +-- .../tools/nsc/interpreter/MemberHandlers.scala | 4 +- .../scala/tools/nsc/interpreter/Phased.scala | 4 +- .../nsc/symtab/classfile/ClassfileParser.scala | 6 +-- .../tools/nsc/symtab/classfile/ICodeReader.scala | 2 +- .../scala/tools/nsc/transform/AddInterfaces.scala | 6 +-- .../scala/tools/nsc/transform/Erasure.scala | 46 ++++++++--------- .../scala/tools/nsc/transform/ExplicitOuter.scala | 4 +- .../tools/nsc/transform/ExtensionMethods.scala | 2 +- .../scala/tools/nsc/transform/Flatten.scala | 10 ++-- .../scala/tools/nsc/transform/InfoTransform.scala | 4 +- .../scala/tools/nsc/transform/LazyVals.scala | 2 +- src/compiler/scala/tools/nsc/transform/Mixin.scala | 32 ++++++------ .../scala/tools/nsc/transform/PostErasure.scala | 6 +-- .../tools/nsc/transform/SpecializeTypes.scala | 26 +++++----- .../scala/tools/nsc/transform/UnCurry.scala | 8 +-- .../scala/tools/nsc/typechecker/RefChecks.scala | 10 ++-- .../scala/tools/nsc/typechecker/Tags.scala | 2 +- .../scala/reflect/internal/SymbolTable.scala | 23 ++++----- src/reflect/scala/reflect/internal/Symbols.scala | 6 +-- .../reflect/internal/pickling/UnPickler.scala | 4 +- .../internal/util/TraceSymbolActivity.scala | 2 +- .../scala/reflect/runtime/SymbolLoaders.scala | 2 +- 39 files changed, 181 insertions(+), 182 deletions(-) (limited to 'src') diff --git a/src/compiler/scala/tools/nsc/Global.scala b/src/compiler/scala/tools/nsc/Global.scala index e9452fed0d..6a61beb55d 100644 --- a/src/compiler/scala/tools/nsc/Global.scala +++ b/src/compiler/scala/tools/nsc/Global.scala @@ -168,7 +168,7 @@ class Global(var currentSettings: Settings, var reporter: Reporter) if (lastPrintedSource == source) println(": tree is unchanged since " + lastPrintedPhase) else { - lastPrintedPhase = phase.prev // since we're running inside "afterPhase" + lastPrintedPhase = phase.prev // since we're running inside "exitingPhase" lastPrintedSource = source println("") println(source) @@ -782,7 +782,7 @@ class Global(var currentSettings: Settings, var reporter: Reporter) */ def afterEachPhase[T](op: => T): List[(Phase, T)] = { phaseDescriptors.map(_.ownPhase).filterNot(_ eq NoPhase).foldLeft(List[(Phase, T)]()) { (res, ph) => - val value = afterPhase(ph)(op) + val value = exitingPhase(ph)(op) if (res.nonEmpty && res.head._2 == value) res else ((ph, value)) :: res } reverse @@ -1051,26 +1051,26 @@ class Global(var currentSettings: Settings, var reporter: Reporter) def currentSource: SourceFile = if (currentUnit.exists) currentUnit.source else lastSeenSourceFile // TODO - trim these to the absolute minimum. - @inline final def afterErasure[T](op: => T): T = afterPhase(currentRun.erasurePhase)(op) - @inline final def afterExplicitOuter[T](op: => T): T = afterPhase(currentRun.explicitouterPhase)(op) - @inline final def afterFlatten[T](op: => T): T = afterPhase(currentRun.flattenPhase)(op) - @inline final def afterIcode[T](op: => T): T = afterPhase(currentRun.icodePhase)(op) - @inline final def afterMixin[T](op: => T): T = afterPhase(currentRun.mixinPhase)(op) - @inline final def afterPickler[T](op: => T): T = afterPhase(currentRun.picklerPhase)(op) - @inline final def afterRefchecks[T](op: => T): T = afterPhase(currentRun.refchecksPhase)(op) - @inline final def afterSpecialize[T](op: => T): T = afterPhase(currentRun.specializePhase)(op) - @inline final def afterTyper[T](op: => T): T = afterPhase(currentRun.typerPhase)(op) - @inline final def afterUncurry[T](op: => T): T = afterPhase(currentRun.uncurryPhase)(op) - @inline final def beforeErasure[T](op: => T): T = beforePhase(currentRun.erasurePhase)(op) - @inline final def beforeExplicitOuter[T](op: => T): T = beforePhase(currentRun.explicitouterPhase)(op) - @inline final def beforeFlatten[T](op: => T): T = beforePhase(currentRun.flattenPhase)(op) - @inline final def beforeIcode[T](op: => T): T = beforePhase(currentRun.icodePhase)(op) - @inline final def beforeMixin[T](op: => T): T = beforePhase(currentRun.mixinPhase)(op) - @inline final def beforePickler[T](op: => T): T = beforePhase(currentRun.picklerPhase)(op) - @inline final def beforeRefchecks[T](op: => T): T = beforePhase(currentRun.refchecksPhase)(op) - @inline final def beforeSpecialize[T](op: => T): T = beforePhase(currentRun.specializePhase)(op) - @inline final def beforeTyper[T](op: => T): T = beforePhase(currentRun.typerPhase)(op) - @inline final def beforeUncurry[T](op: => T): T = beforePhase(currentRun.uncurryPhase)(op) + @inline final def exitingErasure[T](op: => T): T = exitingPhase(currentRun.erasurePhase)(op) + @inline final def exitingExplicitOuter[T](op: => T): T = exitingPhase(currentRun.explicitouterPhase)(op) + @inline final def exitingFlatten[T](op: => T): T = exitingPhase(currentRun.flattenPhase)(op) + @inline final def exitingIcode[T](op: => T): T = exitingPhase(currentRun.icodePhase)(op) + @inline final def exitingMixin[T](op: => T): T = exitingPhase(currentRun.mixinPhase)(op) + @inline final def exitingPickler[T](op: => T): T = exitingPhase(currentRun.picklerPhase)(op) + @inline final def exitingRefchecks[T](op: => T): T = exitingPhase(currentRun.refchecksPhase)(op) + @inline final def exitingSpecialize[T](op: => T): T = exitingPhase(currentRun.specializePhase)(op) + @inline final def exitingTyper[T](op: => T): T = exitingPhase(currentRun.typerPhase)(op) + @inline final def exitingUncurry[T](op: => T): T = exitingPhase(currentRun.uncurryPhase)(op) + @inline final def enteringErasure[T](op: => T): T = enteringPhase(currentRun.erasurePhase)(op) + @inline final def enteringExplicitOuter[T](op: => T): T = enteringPhase(currentRun.explicitouterPhase)(op) + @inline final def enteringFlatten[T](op: => T): T = enteringPhase(currentRun.flattenPhase)(op) + @inline final def enteringIcode[T](op: => T): T = enteringPhase(currentRun.icodePhase)(op) + @inline final def enteringMixin[T](op: => T): T = enteringPhase(currentRun.mixinPhase)(op) + @inline final def enteringPickler[T](op: => T): T = enteringPhase(currentRun.picklerPhase)(op) + @inline final def enteringRefchecks[T](op: => T): T = enteringPhase(currentRun.refchecksPhase)(op) + @inline final def enteringSpecialize[T](op: => T): T = enteringPhase(currentRun.specializePhase)(op) + @inline final def enteringTyper[T](op: => T): T = enteringPhase(currentRun.typerPhase)(op) + @inline final def enteringUncurry[T](op: => T): T = enteringPhase(currentRun.uncurryPhase)(op) def explainContext(c: analyzer.Context): String = ( if (c == null) "" else ( @@ -1109,7 +1109,7 @@ class Global(var currentSettings: Settings, var reporter: Reporter) val info1 = formatExplain( "while compiling" -> currentSource.path, - "during phase" -> ( if (globalPhase eq phase) phase else "global=%s, atPhase=%s".format(globalPhase, phase) ), + "during phase" -> ( if (globalPhase eq phase) phase else "global=%s, enteringPhase=%s".format(globalPhase, phase) ), "library version" -> scala.util.Properties.versionString, "compiler version" -> Properties.versionString, "reconstructed args" -> settings.recreateArgs.mkString(" ") @@ -1469,7 +1469,7 @@ class Global(var currentSettings: Settings, var reporter: Reporter) lazy val trackers = currentRun.units.toList map (x => SymbolTracker(x)) def snapshot() = { inform("\n[[symbol layout at end of " + phase + "]]") - afterPhase(phase) { + exitingPhase(phase) { trackers foreach { t => t.snapshot() inform(t.show("Heading from " + phase.prev.name + " to " + phase.name)) @@ -1605,7 +1605,7 @@ class Global(var currentSettings: Settings, var reporter: Reporter) // Reset project if (!stopPhase("namer")) { - atPhase(namerPhase) { + enteringPhase(namerPhase) { resetProjectClasses(RootClass) } } @@ -1645,7 +1645,7 @@ class Global(var currentSettings: Settings, var reporter: Reporter) if (firstPhase ne null) { // we might get here during initialization, is a source is newer than the binary val maxId = math.max(globalPhase.id, typerPhase.id) firstPhase.iterator takeWhile (_.id < maxId) foreach (ph => - atPhase(ph)(ph.asInstanceOf[GlobalPhase] applyPhase unit)) + enteringPhase(ph)(ph.asInstanceOf[GlobalPhase] applyPhase unit)) refreshProgress } } @@ -1654,8 +1654,8 @@ class Global(var currentSettings: Settings, var reporter: Reporter) * is needed for?) */ private def resetPackageClass(pclazz: Symbol) { - atPhase(firstPhase) { - pclazz.setInfo(atPhase(typerPhase)(pclazz.info)) + enteringPhase(firstPhase) { + pclazz.setInfo(enteringPhase(typerPhase)(pclazz.info)) } if (!pclazz.isRoot) resetPackageClass(pclazz.owner) } @@ -1703,7 +1703,7 @@ class Global(var currentSettings: Settings, var reporter: Reporter) def printAllUnits() { print("[[syntax trees at end of %25s]]".format(phase)) - afterPhase(phase)(currentRun.units foreach { unit => + exitingPhase(phase)(currentRun.units foreach { unit => nodePrinters showUnit unit }) } @@ -1712,7 +1712,7 @@ class Global(var currentSettings: Settings, var reporter: Reporter) */ def showDef(fullName: Name, declsOnly: Boolean, ph: Phase) = { val boringOwners = Set[Symbol](definitions.AnyClass, definitions.AnyRefClass, definitions.ObjectClass) - def phased[T](body: => T): T = afterPhase(ph)(body) + def phased[T](body: => T): T = exitingPhase(ph)(body) def boringMember(sym: Symbol) = boringOwners(sym.owner) def symString(sym: Symbol) = if (sym.isTerm) sym.defString else sym.toString diff --git a/src/compiler/scala/tools/nsc/SubComponent.scala b/src/compiler/scala/tools/nsc/SubComponent.scala index a3e451f32f..6115fb1947 100644 --- a/src/compiler/scala/tools/nsc/SubComponent.scala +++ b/src/compiler/scala/tools/nsc/SubComponent.scala @@ -47,8 +47,8 @@ abstract class SubComponent { private var ownPhaseCache: WeakReference[Phase] = new WeakReference(null) private var ownPhaseRunId = global.NoRunId - @inline final def beforeOwnPhase[T](op: => T) = global.beforePhase(ownPhase)(op) - @inline final def afterOwnPhase[T](op: => T) = global.afterPhase(ownPhase)(op) + @inline final def beforeOwnPhase[T](op: => T) = global.enteringPhase(ownPhase)(op) + @inline final def afterOwnPhase[T](op: => T) = global.exitingPhase(ownPhase)(op) /** The phase corresponding to this subcomponent in the current compiler run */ def ownPhase: Phase = { diff --git a/src/compiler/scala/tools/nsc/backend/ScalaPrimitives.scala b/src/compiler/scala/tools/nsc/backend/ScalaPrimitives.scala index aab944f65a..59d2da3572 100644 --- a/src/compiler/scala/tools/nsc/backend/ScalaPrimitives.scala +++ b/src/compiler/scala/tools/nsc/backend/ScalaPrimitives.scala @@ -565,7 +565,7 @@ abstract class ScalaPrimitives { import definitions._ val code = getPrimitive(fun) - def elementType = beforeTyper { + def elementType = enteringTyper { val arrayParent = tpe :: tpe.parents collectFirst { case TypeRef(_, ArrayClass, elem :: Nil) => elem } diff --git a/src/compiler/scala/tools/nsc/backend/icode/TypeKinds.scala b/src/compiler/scala/tools/nsc/backend/icode/TypeKinds.scala index 1ec2cf017a..d0fb08954a 100644 --- a/src/compiler/scala/tools/nsc/backend/icode/TypeKinds.scala +++ b/src/compiler/scala/tools/nsc/backend/icode/TypeKinds.scala @@ -139,7 +139,7 @@ trait TypeKinds { self: ICodes => * Here we make the adjustment by rewinding to a pre-erasure state and * sifting through the parents for a class type. */ - def lub0(tk1: TypeKind, tk2: TypeKind): Type = beforeUncurry { + def lub0(tk1: TypeKind, tk2: TypeKind): Type = enteringUncurry { import definitions._ val tp = global.lub(List(tk1.toType, tk2.toType)) val (front, rest) = tp.parents span (_.typeSymbol.isTrait) diff --git a/src/compiler/scala/tools/nsc/backend/jvm/BytecodeWriters.scala b/src/compiler/scala/tools/nsc/backend/jvm/BytecodeWriters.scala index ff68aba845..446a0c4392 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/BytecodeWriters.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/BytecodeWriters.scala @@ -23,7 +23,7 @@ trait BytecodeWriters { import global._ private def outputDirectory(sym: Symbol): AbstractFile = ( - settings.outputDirs.outputDirFor(beforeFlatten(sym.sourceFile)) + settings.outputDirs.outputDirFor(enteringFlatten(sym.sourceFile)) ) private def getFile(base: AbstractFile, /*cls.getName()*/ clsName: String, suffix: String): AbstractFile = { var dir = base diff --git a/src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala b/src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala index 756d90bc53..eb9b4b7dd3 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala @@ -33,7 +33,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters { override def newPhase(p: Phase): Phase = new AsmPhase(p) private def outputDirectory(sym: Symbol): AbstractFile = - settings.outputDirs outputDirFor beforeFlatten(sym.sourceFile) + settings.outputDirs outputDirFor enteringFlatten(sym.sourceFile) private def getFile(base: AbstractFile, clsName: String, suffix: String): AbstractFile = { var dir = base @@ -81,7 +81,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters { // At this point it's a module with a main-looking method, so either succeed or warn that it isn't. hasApproximate && { // Before erasure so we can identify generic mains. - beforeErasure { + enteringErasure { val companion = sym.linkedClassOfClass val companionMain = companion.tpe.member(nme.main) @@ -311,7 +311,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters { } def isTopLevelModule(sym: Symbol): Boolean = - afterPickler { sym.isModuleClass && !sym.isImplClass && !sym.isNestedClass } + exitingPickler { sym.isModuleClass && !sym.isImplClass && !sym.isNestedClass } def isStaticModule(sym: Symbol): Boolean = { sym.isModuleClass && !sym.isImplClass && !sym.isLifted @@ -569,7 +569,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters { * of inner class all until root class. */ def collectInnerClass(s: Symbol): Unit = { - // TODO: some beforeFlatten { ... } which accounts for + // TODO: some enteringFlatten { ... } which accounts for // being nested in parameterized classes (if we're going to selectively flatten.) val x = innerClassSymbolFor(s) if(x ne NoSymbol) { @@ -671,7 +671,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters { innerSym.rawname + innerSym.moduleSuffix // add inner classes which might not have been referenced yet - afterErasure { + exitingErasure { for (sym <- List(csym, csym.linkedClassOfClass); m <- sym.info.decls.map(innerClassSymbolFor) if m.isClass) innerClassBuffer += m } @@ -867,7 +867,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters { if (!needsGenericSignature(sym)) { return null } - val memberTpe = beforeErasure(owner.thisType.memberInfo(sym)) + val memberTpe = enteringErasure(owner.thisType.memberInfo(sym)) val jsOpt: Option[String] = erasure.javaSig(sym, memberTpe) if (jsOpt.isEmpty) { return null } @@ -901,7 +901,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters { } if ((settings.check containsName phaseName)) { - val normalizedTpe = beforeErasure(erasure.prepareSigMap(memberTpe)) + val normalizedTpe = enteringErasure(erasure.prepareSigMap(memberTpe)) val bytecodeTpe = owner.thisType.memberInfo(sym) if (!sym.isType && !sym.isConstructor && !(erasure.erasure(sym)(normalizedTpe) =:= bytecodeTpe)) { getCurrentCUnit().warning(sym.pos, @@ -1427,7 +1427,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters { if (lmoc != NoSymbol) { // it must be a top level class (name contains no $s) val isCandidateForForwarders = { - afterPickler { !(lmoc.name.toString contains '$') && lmoc.hasModuleFlag && !lmoc.isImplClass && !lmoc.isNestedClass } + exitingPickler { !(lmoc.name.toString contains '$') && lmoc.hasModuleFlag && !lmoc.isImplClass && !lmoc.isNestedClass } } if (isCandidateForForwarders) { log("Adding static forwarders from '%s' to implementations in '%s'".format(c.symbol, lmoc)) diff --git a/src/compiler/scala/tools/nsc/backend/jvm/GenJVM.scala b/src/compiler/scala/tools/nsc/backend/jvm/GenJVM.scala index 763a567828..280afb441f 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/GenJVM.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/GenJVM.scala @@ -38,7 +38,7 @@ abstract class GenJVM extends SubComponent with GenJVMUtil with GenAndroid with override def newPhase(p: Phase): Phase = new JvmPhase(p) private def outputDirectory(sym: Symbol): AbstractFile = - settings.outputDirs outputDirFor beforeFlatten(sym.sourceFile) + settings.outputDirs outputDirFor enteringFlatten(sym.sourceFile) private def getFile(base: AbstractFile, clsName: String, suffix: String): AbstractFile = { var dir = base @@ -85,7 +85,7 @@ abstract class GenJVM extends SubComponent with GenJVMUtil with GenAndroid with // succeed or warn that it isn't. hasApproximate && { // Before erasure so we can identify generic mains. - beforeErasure { + enteringErasure { val companion = sym.linkedClassOfClass val companionMain = companion.tpe.member(nme.main) @@ -316,7 +316,7 @@ abstract class GenJVM extends SubComponent with GenJVMUtil with GenAndroid with * of inner class all until root class. */ def collectInnerClass(s: Symbol): Unit = { - // TODO: some beforeFlatten { ... } which accounts for + // TODO: some enteringFlatten { ... } which accounts for // being nested in parameterized classes (if we're going to selectively flatten.) val x = innerClassSymbolFor(s) if(x ne NoSymbol) { @@ -446,7 +446,7 @@ abstract class GenJVM extends SubComponent with GenJVMUtil with GenAndroid with // it must be a top level class (name contains no $s) def isCandidateForForwarders(sym: Symbol): Boolean = - afterPickler { + exitingPickler { !(sym.name.toString contains '$') && sym.hasModuleFlag && !sym.isImplClass && !sym.isNestedClass } @@ -735,13 +735,13 @@ abstract class GenJVM extends SubComponent with GenJVMUtil with GenAndroid with ) def addGenericSignature(jmember: JMember, sym: Symbol, owner: Symbol) { if (needsGenericSignature(sym)) { - val memberTpe = beforeErasure(owner.thisType.memberInfo(sym)) + val memberTpe = enteringErasure(owner.thisType.memberInfo(sym)) erasure.javaSig(sym, memberTpe) foreach { sig => // This seems useful enough in the general case. log(sig) if (checkSignatures) { - val normalizedTpe = beforeErasure(erasure.prepareSigMap(memberTpe)) + val normalizedTpe = enteringErasure(erasure.prepareSigMap(memberTpe)) val bytecodeTpe = owner.thisType.memberInfo(sym) if (!sym.isType && !sym.isConstructor && !(erasure.erasure(sym)(normalizedTpe) =:= bytecodeTpe)) { clasz.cunit.warning(sym.pos, @@ -757,7 +757,7 @@ abstract class GenJVM extends SubComponent with GenJVMUtil with GenAndroid with } val index = jmember.getConstantPool.addUtf8(sig).toShort if (settings.verbose.value && settings.debug.value) - beforeErasure(println("add generic sig "+sym+":"+sym.info+" ==> "+sig+" @ "+index)) + enteringErasure(println("add generic sig "+sym+":"+sym.info+" ==> "+sig+" @ "+index)) val buf = ByteBuffer.allocate(2) buf putShort index @@ -833,7 +833,7 @@ abstract class GenJVM extends SubComponent with GenJVMUtil with GenAndroid with innerSym.rawname + innerSym.moduleSuffix // add inner classes which might not have been referenced yet - afterErasure { + exitingErasure { for (sym <- List(clasz.symbol, clasz.symbol.linkedClassOfClass); m <- sym.info.decls.map(innerClassSymbolFor) if m.isClass) innerClassBuffer += m } @@ -1988,7 +1988,7 @@ abstract class GenJVM extends SubComponent with GenJVMUtil with GenAndroid with ) def isTopLevelModule(sym: Symbol): Boolean = - afterPickler { sym.isModuleClass && !sym.isImplClass && !sym.isNestedClass } + exitingPickler { sym.isModuleClass && !sym.isImplClass && !sym.isNestedClass } def isStaticModule(sym: Symbol): Boolean = { sym.isModuleClass && !sym.isImplClass && !sym.isLifted diff --git a/src/compiler/scala/tools/nsc/backend/msil/GenMSIL.scala b/src/compiler/scala/tools/nsc/backend/msil/GenMSIL.scala index 599f0dd640..9f65020928 100644 --- a/src/compiler/scala/tools/nsc/backend/msil/GenMSIL.scala +++ b/src/compiler/scala/tools/nsc/backend/msil/GenMSIL.scala @@ -1126,7 +1126,7 @@ abstract class GenMSIL extends SubComponent { } // method: implicit view(FunctionX[PType0, PType1, ...,PTypeN, ResType]):DelegateType - val (isDelegateView, paramType, resType) = beforeTyper { + val (isDelegateView, paramType, resType) = enteringTyper { msym.tpe match { case MethodType(params, resultType) if (params.length == 1 && msym.name == nme.view_) => @@ -1955,7 +1955,7 @@ abstract class GenMSIL extends SubComponent { } // createClassMembers0 private def isTopLevelModule(sym: Symbol): Boolean = - beforeRefchecks { + enteringRefchecks { sym.isModuleClass && !sym.isImplClass && !sym.isNestedClass } diff --git a/src/compiler/scala/tools/nsc/backend/opt/Inliners.scala b/src/compiler/scala/tools/nsc/backend/opt/Inliners.scala index 1603b286db..5ccd7054fd 100644 --- a/src/compiler/scala/tools/nsc/backend/opt/Inliners.scala +++ b/src/compiler/scala/tools/nsc/backend/opt/Inliners.scala @@ -547,7 +547,7 @@ abstract class Inliners extends SubComponent { private def isHigherOrderMethod(sym: Symbol) = ( sym.isMethod - && beforeExplicitOuter(sym.info.paramTypes exists isFunctionType) // was "at erasurePhase.prev" + && enteringExplicitOuter(sym.info.paramTypes exists isFunctionType) // was "at erasurePhase.prev" ) /** Should method 'sym' being called in 'receiver' be loaded from disk? */ diff --git a/src/compiler/scala/tools/nsc/dependencies/DependencyAnalysis.scala b/src/compiler/scala/tools/nsc/dependencies/DependencyAnalysis.scala index 317cc28298..e35191ca20 100644 --- a/src/compiler/scala/tools/nsc/dependencies/DependencyAnalysis.scala +++ b/src/compiler/scala/tools/nsc/dependencies/DependencyAnalysis.scala @@ -145,7 +145,7 @@ trait DependencyAnalysis extends SubComponent with Files { val name = d.toString d.symbol match { case s : ModuleClassSymbol => - val isTopLevelModule = afterPickler { !s.isImplClass && !s.isNestedClass } + val isTopLevelModule = exitingPickler { !s.isImplClass && !s.isNestedClass } if (isTopLevelModule && (s.companionModule != NoSymbol)) { dependencies.emits(source, nameToFile(unit.source.file, name)) @@ -183,7 +183,7 @@ trait DependencyAnalysis extends SubComponent with Files { // was "at uncurryPhase.prev", which is actually non-deterministic // because the continuations plugin may or may not supply uncurry's // immediately preceding phase. - beforeRefchecks(checkType(tree.symbol.tpe)) + enteringRefchecks(checkType(tree.symbol.tpe)) } tree match { @@ -191,7 +191,7 @@ trait DependencyAnalysis extends SubComponent with Files { !cdef.symbol.isAnonymousFunction => if (cdef.symbol != NoSymbol) buf += cdef.symbol // was "at erasurePhase.prev" - beforeExplicitOuter { + enteringExplicitOuter { for (s <- cdef.symbol.info.decls) s match { case ts: TypeSymbol if !ts.isClass => @@ -203,7 +203,7 @@ trait DependencyAnalysis extends SubComponent with Files { case ddef: DefDef => // was "at typer.prev" - beforeTyper { checkType(ddef.symbol.tpe) } + enteringTyper { checkType(ddef.symbol.tpe) } super.traverse(tree) case a @ Select(q, n) if ((a.symbol != NoSymbol) && (q.symbol != null)) => // #2556 if (!a.symbol.isConstructor && diff --git a/src/compiler/scala/tools/nsc/interactive/Global.scala b/src/compiler/scala/tools/nsc/interactive/Global.scala index 8f287a5c7a..76f02ed141 100644 --- a/src/compiler/scala/tools/nsc/interactive/Global.scala +++ b/src/compiler/scala/tools/nsc/interactive/Global.scala @@ -1066,7 +1066,7 @@ class Global(settings: Settings, _reporter: Reporter, projectName: String = "") * @return true iff typechecked correctly */ private def applyPhase(phase: Phase, unit: CompilationUnit) { - atPhase(phase) { phase.asInstanceOf[GlobalPhase] applyPhase unit } + enteringPhase(phase) { phase.asInstanceOf[GlobalPhase] applyPhase unit } } } diff --git a/src/compiler/scala/tools/nsc/interactive/RefinedBuildManager.scala b/src/compiler/scala/tools/nsc/interactive/RefinedBuildManager.scala index 57f0835edd..d01a82c0e1 100644 --- a/src/compiler/scala/tools/nsc/interactive/RefinedBuildManager.scala +++ b/src/compiler/scala/tools/nsc/interactive/RefinedBuildManager.scala @@ -49,7 +49,7 @@ class RefinedBuildManager(val settings: Settings) extends Changes with BuildMana protected def newCompiler(settings: Settings) = new BuilderGlobal(settings) val compiler = newCompiler(settings) - import compiler.{ Symbol, Type, beforeErasure } + import compiler.{ Symbol, Type, enteringErasure } import compiler.dependencyAnalysis.Inherited private case class SymWithHistory(sym: Symbol, befErasure: Type) @@ -161,7 +161,7 @@ class RefinedBuildManager(val settings: Settings) extends Changes with BuildMana isCorrespondingSym(s.sym, sym)) match { case Some(SymWithHistory(oldSym, info)) => val changes = changeSet(oldSym.info, sym) - val changesErasure = beforeErasure(changeSet(info, sym)) + val changesErasure = enteringErasure(changeSet(info, sym)) changesOf(oldSym) = (changes ++ changesErasure).distinct case _ => @@ -332,7 +332,7 @@ class RefinedBuildManager(val settings: Settings) extends Changes with BuildMana for (src <- files; localDefs = compiler.dependencyAnalysis.definitions(src)) { definitions(src) = (localDefs map (s => { this.classes += s.fullName -> src - SymWithHistory(s.cloneSymbol, beforeErasure(s.info.cloneInfo(s))) + SymWithHistory(s.cloneSymbol, enteringErasure(s.info.cloneInfo(s))) })) } this.references = compiler.dependencyAnalysis.references diff --git a/src/compiler/scala/tools/nsc/interpreter/ExprTyper.scala b/src/compiler/scala/tools/nsc/interpreter/ExprTyper.scala index f2438dcc20..a6f411c44d 100644 --- a/src/compiler/scala/tools/nsc/interpreter/ExprTyper.scala +++ b/src/compiler/scala/tools/nsc/interpreter/ExprTyper.scala @@ -63,7 +63,7 @@ trait ExprTyper { case IR.Success => val sym0 = symbolOfTerm(name) // drop NullaryMethodType - val sym = sym0.cloneSymbol setInfo afterTyper(sym0.info.finalResultType) + val sym = sym0.cloneSymbol setInfo exitingTyper(sym0.info.finalResultType) if (sym.info.typeSymbol eq UnitClass) NoSymbol else sym case _ => NoSymbol diff --git a/src/compiler/scala/tools/nsc/interpreter/ILoop.scala b/src/compiler/scala/tools/nsc/interpreter/ILoop.scala index e5e7d7081d..2e0cc1a7c3 100644 --- a/src/compiler/scala/tools/nsc/interpreter/ILoop.scala +++ b/src/compiler/scala/tools/nsc/interpreter/ILoop.scala @@ -65,7 +65,7 @@ class ILoop(in0: Option[BufferedReader], protected val out: JPrintWriter) import global._ def printAfterTyper(msg: => String) = - intp.reporter printUntruncatedMessage afterTyper(msg) + intp.reporter printUntruncatedMessage exitingTyper(msg) /** Strip NullaryMethodType artifacts. */ private def replInfo(sym: Symbol) = { @@ -342,7 +342,7 @@ class ILoop(in0: Option[BufferedReader], protected val out: JPrintWriter) // This groups the members by where the symbol is defined val byOwner = syms groupBy (_.owner) - val sortedOwners = byOwner.toList sortBy { case (owner, _) => afterTyper(source.info.baseClasses indexOf owner) } + val sortedOwners = byOwner.toList sortBy { case (owner, _) => exitingTyper(source.info.baseClasses indexOf owner) } sortedOwners foreach { case (owner, members) => diff --git a/src/compiler/scala/tools/nsc/interpreter/IMain.scala b/src/compiler/scala/tools/nsc/interpreter/IMain.scala index b385787cce..5a8bbfff15 100644 --- a/src/compiler/scala/tools/nsc/interpreter/IMain.scala +++ b/src/compiler/scala/tools/nsc/interpreter/IMain.scala @@ -770,7 +770,7 @@ class IMain(initialSettings: Settings, protected val out: JPrintWriter) extends val readRoot = getRequiredModule(readPath) // the outermost wrapper (accessPath split '.').foldLeft(readRoot: Symbol) { case (sym, "") => sym - case (sym, name) => afterTyper(termMember(sym, name)) + case (sym, name) => exitingTyper(termMember(sym, name)) } } /** We get a bunch of repeated warnings for reasons I haven't @@ -962,7 +962,7 @@ class IMain(initialSettings: Settings, protected val out: JPrintWriter) extends } lazy val resultSymbol = lineRep.resolvePathToSymbol(accessPath) - def applyToResultMember[T](name: Name, f: Symbol => T) = afterTyper(f(resultSymbol.info.nonPrivateDecl(name))) + def applyToResultMember[T](name: Name, f: Symbol => T) = exitingTyper(f(resultSymbol.info.nonPrivateDecl(name))) /* typeOf lookup with encoding */ def lookupTypeOf(name: Name) = typeOf.getOrElse(name, typeOf(global.encode(name.toString))) @@ -974,10 +974,10 @@ class IMain(initialSettings: Settings, protected val out: JPrintWriter) extends /** Types of variables defined by this request. */ lazy val compilerTypeOf = typeMap[Type](x => x) withDefaultValue NoType /** String representations of same. */ - lazy val typeOf = typeMap[String](tp => afterTyper(tp.toString)) + lazy val typeOf = typeMap[String](tp => exitingTyper(tp.toString)) // lazy val definedTypes: Map[Name, Type] = { - // typeNames map (x => x -> afterTyper(resultSymbol.info.nonPrivateDecl(x).tpe)) toMap + // typeNames map (x => x -> exitingTyper(resultSymbol.info.nonPrivateDecl(x).tpe)) toMap // } lazy val definedSymbols = ( termNames.map(x => x -> applyToResultMember(x, x => x)) ++ @@ -1074,7 +1074,7 @@ class IMain(initialSettings: Settings, protected val out: JPrintWriter) extends else NoType } } - def cleanMemberDecl(owner: Symbol, member: Name): Type = afterTyper { + def cleanMemberDecl(owner: Symbol, member: Name): Type = exitingTyper { normalizeNonPublic { owner.info.nonPrivateDecl(member).tpe match { case NullaryMethodType(tp) => tp @@ -1163,7 +1163,7 @@ class IMain(initialSettings: Settings, protected val out: JPrintWriter) extends def symbolDefString(sym: Symbol) = { TypeStrings.quieter( - afterTyper(sym.defString), + exitingTyper(sym.defString), sym.owner.name + ".this.", sym.owner.fullName + "." ) diff --git a/src/compiler/scala/tools/nsc/interpreter/Imports.scala b/src/compiler/scala/tools/nsc/interpreter/Imports.scala index d579e0369e..3441970894 100644 --- a/src/compiler/scala/tools/nsc/interpreter/Imports.scala +++ b/src/compiler/scala/tools/nsc/interpreter/Imports.scala @@ -191,5 +191,5 @@ trait Imports { prevRequestList flatMap (req => req.handlers map (req -> _)) private def membersAtPickler(sym: Symbol): List[Symbol] = - beforePickler(sym.info.nonPrivateMembers) + enteringPickler(sym.info.nonPrivateMembers) } \ No newline at end of file diff --git a/src/compiler/scala/tools/nsc/interpreter/JLineCompletion.scala b/src/compiler/scala/tools/nsc/interpreter/JLineCompletion.scala index c429e3b196..ab7a47bc8d 100644 --- a/src/compiler/scala/tools/nsc/interpreter/JLineCompletion.scala +++ b/src/compiler/scala/tools/nsc/interpreter/JLineCompletion.scala @@ -47,12 +47,12 @@ class JLineCompletion(val intp: IMain) extends Completion with CompletionOutput def anyRefMethodsToShow = Set("isInstanceOf", "asInstanceOf", "toString") def tos(sym: Symbol): String = sym.decodedName - def memberNamed(s: String) = afterTyper(effectiveTp member newTermName(s)) + def memberNamed(s: String) = exitingTyper(effectiveTp member newTermName(s)) def hasMethod(s: String) = memberNamed(s).isMethod // XXX we'd like to say "filterNot (_.isDeprecated)" but this causes the // compiler to crash for reasons not yet known. - def members = afterTyper((effectiveTp.nonPrivateMembers ++ anyMembers) filter (_.isPublic)) + def members = exitingTyper((effectiveTp.nonPrivateMembers ++ anyMembers) filter (_.isPublic)) def methods = members filter (_.isMethod) def packages = members filter (_.isPackage) def aliases = members filter (_.isAliasType) @@ -111,7 +111,7 @@ class JLineCompletion(val intp: IMain) extends Completion with CompletionOutput def excludeNames: List[String] = (anyref.methodNames filterNot anyRefMethodsToShow) :+ "_root_" def methodSignatureString(sym: Symbol) = { - IMain stripString afterTyper(new MethodSymbolOutput(sym).methodString()) + IMain stripString exitingTyper(new MethodSymbolOutput(sym).methodString()) } def exclude(name: String): Boolean = ( diff --git a/src/compiler/scala/tools/nsc/interpreter/MemberHandlers.scala b/src/compiler/scala/tools/nsc/interpreter/MemberHandlers.scala index 236f3f23c5..9b988c0921 100644 --- a/src/compiler/scala/tools/nsc/interpreter/MemberHandlers.scala +++ b/src/compiler/scala/tools/nsc/interpreter/MemberHandlers.scala @@ -208,10 +208,10 @@ trait MemberHandlers { def importedSymbols = individualSymbols ++ wildcardSymbols lazy val individualSymbols: List[Symbol] = - beforePickler(individualNames map (targetType nonPrivateMember _)) + enteringPickler(individualNames map (targetType nonPrivateMember _)) lazy val wildcardSymbols: List[Symbol] = - if (importsWildcard) beforePickler(targetType.nonPrivateMembers) + if (importsWildcard) enteringPickler(targetType.nonPrivateMembers) else Nil /** Complete list of names imported by a wildcard */ diff --git a/src/compiler/scala/tools/nsc/interpreter/Phased.scala b/src/compiler/scala/tools/nsc/interpreter/Phased.scala index f39c025a86..1f33ed57b7 100644 --- a/src/compiler/scala/tools/nsc/interpreter/Phased.scala +++ b/src/compiler/scala/tools/nsc/interpreter/Phased.scala @@ -68,7 +68,7 @@ trait Phased { def apply[T](body: => T) = immutable.SortedMap[PhaseName, T](atMap(PhaseName.all)(body): _*) - def atCurrent[T](body: => T): T = atPhase(get)(body) + def atCurrent[T](body: => T): T = enteringPhase(get)(body) def multi[T](body: => T): Seq[T] = multi map (ph => at(ph)(body)) def all[T](body: => T): Seq[T] = atMulti(PhaseName.all)(body) def show[T](body: => T): Seq[T] = { @@ -121,7 +121,7 @@ trait Phased { def isEmpty = this eq NoPhaseName // Execute some code during this phase. - def apply[T](body: => T): T = atPhase(phase)(body) + def apply[T](body: => T): T = enteringPhase(phase)(body) } case object Parser extends PhaseName diff --git a/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala b/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala index a035a346e6..5007169569 100644 --- a/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala +++ b/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala @@ -420,9 +420,9 @@ abstract class ClassfileParser { var sym: Symbol = rootMirror.RootClass // was "at flatten.prev" - beforeFlatten { + enteringFlatten { for (part0 <- parts; if !(part0 == ""); part = newTermName(part0)) { - val sym1 = beforeIcode { + val sym1 = enteringIcode { sym.linkedClassOfClass.info sym.info.decl(part.encode) }//.suchThat(module == _.isModule) @@ -1203,7 +1203,7 @@ abstract class ClassfileParser { // if loading during initialization of `definitions` typerPhase is not yet set. // in that case we simply load the member at the current phase if (currentRun.typerPhase != null) - beforeTyper(getMember(sym, innerName.toTypeName)) + enteringTyper(getMember(sym, innerName.toTypeName)) else getMember(sym, innerName.toTypeName) diff --git a/src/compiler/scala/tools/nsc/symtab/classfile/ICodeReader.scala b/src/compiler/scala/tools/nsc/symtab/classfile/ICodeReader.scala index bb9f9bde98..ccd1acf9c1 100644 --- a/src/compiler/scala/tools/nsc/symtab/classfile/ICodeReader.scala +++ b/src/compiler/scala/tools/nsc/symtab/classfile/ICodeReader.scala @@ -179,7 +179,7 @@ abstract class ICodeReader extends ClassfileParser { } else { forceMangledName(name, false) - afterFlatten(rootMirror.getClassByName(name.toTypeName)) + exitingFlatten(rootMirror.getClassByName(name.toTypeName)) } if (sym.isModule) sym.moduleClass diff --git a/src/compiler/scala/tools/nsc/transform/AddInterfaces.scala b/src/compiler/scala/tools/nsc/transform/AddInterfaces.scala index 822c819e7d..ae57b526fb 100644 --- a/src/compiler/scala/tools/nsc/transform/AddInterfaces.scala +++ b/src/compiler/scala/tools/nsc/transform/AddInterfaces.scala @@ -112,7 +112,7 @@ abstract class AddInterfaces extends InfoTransform { self: Erasure => def implClass(iface: Symbol): Symbol = { iface.info - implClassMap.getOrElse(iface, atPhase(implClassPhase) { + implClassMap.getOrElse(iface, enteringPhase(implClassPhase) { log("Creating implClass for " + iface) if (iface.implClass ne NoSymbol) log("%s.implClass already exists: %s".format(iface, iface.implClass)) @@ -196,7 +196,7 @@ abstract class AddInterfaces extends InfoTransform { self: Erasure => case PolyType(_, restpe) => implType(restpe) } - implSym setInfo implType(beforeErasure(iface.info)) + implSym setInfo implType(enteringErasure(iface.info)) } override def load(clazz: Symbol) { complete(clazz) } @@ -353,7 +353,7 @@ abstract class AddInterfaces extends InfoTransform { self: Erasure => val mix1 = mix if (mix == tpnme.EMPTY) mix else { - val ps = beforeErasure { + val ps = enteringErasure { sym.info.parents dropWhile (p => p.symbol.name != mix) } assert(!ps.isEmpty, tree); diff --git a/src/compiler/scala/tools/nsc/transform/Erasure.scala b/src/compiler/scala/tools/nsc/transform/Erasure.scala index 5115c49c87..c121027f9b 100644 --- a/src/compiler/scala/tools/nsc/transform/Erasure.scala +++ b/src/compiler/scala/tools/nsc/transform/Erasure.scala @@ -170,7 +170,7 @@ abstract class Erasure extends AddInterfaces /** The Java signature of type 'info', for symbol sym. The symbol is used to give the right return * type for constructors. */ - def javaSig(sym0: Symbol, info: Type): Option[String] = beforeErasure { + def javaSig(sym0: Symbol, info: Type): Option[String] = enteringErasure { val isTraitSignature = sym0.enclClass.isTrait def superSig(parents: List[Type]) = { @@ -204,7 +204,7 @@ abstract class Erasure extends AddInterfaces // Anything which could conceivably be a module (i.e. isn't known to be // a type parameter or similar) must go through here or the signature is // likely to end up with Foo.Empty where it needs Foo.Empty$. - def fullNameInSig(sym: Symbol) = "L" + beforeIcode(sym.javaBinaryName) + def fullNameInSig(sym: Symbol) = "L" + enteringIcode(sym.javaBinaryName) def jsig(tp0: Type, existentiallyBound: List[Symbol] = Nil, toplevel: Boolean = false, primitiveOK: Boolean = true): String = { val tp = tp0.dealias @@ -379,7 +379,7 @@ abstract class Erasure extends AddInterfaces val bridgeTarget = mutable.HashMap[Symbol, Symbol]() var bridges = List[Tree]() - val opc = beforeExplicitOuter { + val opc = enteringExplicitOuter { new overridingPairs.Cursor(owner) { override def parents = List(owner.info.firstParent) override def exclude(sym: Symbol) = !sym.isMethod || sym.isPrivate || super.exclude(sym) @@ -391,7 +391,7 @@ abstract class Erasure extends AddInterfaces val member = opc.overriding val other = opc.overridden //println("bridge? " + member + ":" + member.tpe + member.locationString + " to " + other + ":" + other.tpe + other.locationString)//DEBUG - if (beforeExplicitOuter(!member.isDeferred)) + if (enteringExplicitOuter(!member.isDeferred)) checkPair(member, other) opc.next @@ -401,7 +401,7 @@ abstract class Erasure extends AddInterfaces def checkPair(member: Symbol, other: Symbol) { val otpe = erasure(owner)(other.tpe) - val bridgeNeeded = afterErasure ( + val bridgeNeeded = exitingErasure ( !(other.tpe =:= member.tpe) && !(deconstMap(other.tpe) =:= deconstMap(member.tpe)) && { var e = bridgesScope.lookupEntry(member.name) @@ -425,16 +425,16 @@ abstract class Erasure extends AddInterfaces // the parameter symbols need to have the new owner bridge setInfo (otpe cloneInfo bridge) bridgeTarget(bridge) = member - afterErasure(owner.info.decls enter bridge) + exitingErasure(owner.info.decls enter bridge) if (other.owner == owner) { - afterErasure(owner.info.decls.unlink(other)) + exitingErasure(owner.info.decls.unlink(other)) toBeRemoved += other } bridgesScope enter bridge bridges ::= makeBridgeDefDef(bridge, member, other) } - def makeBridgeDefDef(bridge: Symbol, member: Symbol, other: Symbol) = afterErasure { + def makeBridgeDefDef(bridge: Symbol, member: Symbol, other: Symbol) = exitingErasure { // type checking ensures we can safely call `other`, but unless `member.tpe <:< other.tpe`, // calling `member` is not guaranteed to succeed in general, there's // nothing we can do about this, except for an unapply: when this subtype test fails, @@ -445,7 +445,7 @@ abstract class Erasure extends AddInterfaces def maybeWrap(bridgingCall: Tree): Tree = { val guardExtractor = ( // can't statically know which member is going to be selected, so don't let this depend on member.isSynthetic (member.name == nme.unapply || member.name == nme.unapplySeq) - && !afterErasure((member.tpe <:< other.tpe))) // no static guarantees (TODO: is the subtype test ever true?) + && !exitingErasure((member.tpe <:< other.tpe))) // no static guarantees (TODO: is the subtype test ever true?) import CODE._ val _false = FALSE_typed @@ -656,7 +656,7 @@ abstract class Erasure extends AddInterfaces if (isPrimitiveValueType(targ.tpe) || isErasedValueType(targ.tpe)) { val noNullCheckNeeded = targ.tpe match { case ErasedValueType(tref) => - atPhase(currentRun.erasurePhase) { + enteringPhase(currentRun.erasurePhase) { isPrimitiveValueClass(erasedValueClassArg(tref).typeSymbol) } case _ => @@ -737,7 +737,7 @@ abstract class Erasure extends AddInterfaces (tree.attachments.get[TypeRefAttachment]: @unchecked) match { case Some(itype) => val tref = itype.tpe - val argPt = atPhase(currentRun.erasurePhase)(erasedValueClassArg(tref)) + val argPt = enteringPhase(currentRun.erasurePhase)(erasedValueClassArg(tref)) log(s"transforming inject $arg -> $tref/$argPt") val result = typed(arg, mode, argPt) log(s"transformed inject $arg -> $tref/$argPt = $result:${result.tpe}") @@ -803,23 +803,23 @@ abstract class Erasure extends AddInterfaces * but their erased types are the same. */ private def checkNoDoubleDefs(root: Symbol) { - def afterErasure[T](op: => T): T = atPhase(phase.next.next)(op) + def exitingErasure[T](op: => T): T = enteringPhase(phase.next.next)(op) def doubleDefError(sym1: Symbol, sym2: Symbol) { // the .toString must also be computed at the earlier phase - val tpe1 = afterRefchecks(root.thisType.memberType(sym1)) - val tpe2 = afterRefchecks(root.thisType.memberType(sym2)) + val tpe1 = exitingRefchecks(root.thisType.memberType(sym1)) + val tpe2 = exitingRefchecks(root.thisType.memberType(sym2)) if (!tpe1.isErroneous && !tpe2.isErroneous) unit.error( if (sym1.owner == root) sym1.pos else root.pos, (if (sym1.owner == sym2.owner) "double definition:\n" else if (sym1.owner == root) "name clash between defined and inherited member:\n" else "name clash between inherited members:\n") + - sym1 + ":" + afterRefchecks(tpe1.toString) + + sym1 + ":" + exitingRefchecks(tpe1.toString) + (if (sym1.owner == root) "" else sym1.locationString) + " and\n" + - sym2 + ":" + afterRefchecks(tpe2.toString) + + sym2 + ":" + exitingRefchecks(tpe2.toString) + (if (sym2.owner == root) " at line " + (sym2.pos).line else sym2.locationString) + "\nhave same type" + - (if (afterRefchecks(tpe1 =:= tpe2)) "" else " after erasure: " + afterErasure(sym1.tpe))) + (if (exitingRefchecks(tpe1 =:= tpe2)) "" else " after erasure: " + exitingErasure(sym1.tpe))) sym1.setInfo(ErrorType) } @@ -829,7 +829,7 @@ abstract class Erasure extends AddInterfaces if (e.sym.isTerm) { var e1 = decls.lookupNextEntry(e) while (e1 ne null) { - if (afterErasure(e1.sym.info =:= e.sym.info)) doubleDefError(e.sym, e1.sym) + if (exitingErasure(e1.sym.info =:= e.sym.info)) doubleDefError(e.sym, e1.sym) e1 = decls.lookupNextEntry(e1) } } @@ -843,10 +843,10 @@ abstract class Erasure extends AddInterfaces || !sym.hasTypeAt(currentRun.refchecksPhase.id)) override def matches(sym1: Symbol, sym2: Symbol): Boolean = - afterErasure(sym1.tpe =:= sym2.tpe) + exitingErasure(sym1.tpe =:= sym2.tpe) } while (opc.hasNext) { - if (!afterRefchecks( + if (!exitingRefchecks( root.thisType.memberType(opc.overriding) matches root.thisType.memberType(opc.overridden))) { debuglog("" + opc.overriding.locationString + " " + @@ -865,8 +865,8 @@ abstract class Erasure extends AddInterfaces for (member <- root.info.nonPrivateMember(other.name).alternatives) { if (member != other && !(member hasFlag BRIDGE) && - afterErasure(member.tpe =:= other.tpe) && - !afterRefchecks( + exitingErasure(member.tpe =:= other.tpe) && + !exitingRefchecks( root.thisType.memberType(member) matches root.thisType.memberType(other))) { debuglog("" + member.locationString + " " + member.infosString + other.locationString + " " + other.infosString); doubleDefError(member, other) @@ -1149,7 +1149,7 @@ abstract class Erasure extends AddInterfaces override def transform(tree: Tree): Tree = { val tree1 = preTransformer.transform(tree) // log("tree after pretransform: "+tree1) - afterErasure { + exitingErasure { val tree2 = mixinTransformer.transform(tree1) // debuglog("tree after addinterfaces: \n" + tree2) diff --git a/src/compiler/scala/tools/nsc/transform/ExplicitOuter.scala b/src/compiler/scala/tools/nsc/transform/ExplicitOuter.scala index d2688e9cc5..b692482972 100644 --- a/src/compiler/scala/tools/nsc/transform/ExplicitOuter.scala +++ b/src/compiler/scala/tools/nsc/transform/ExplicitOuter.scala @@ -170,7 +170,7 @@ abstract class ExplicitOuter extends InfoTransform } if (!clazz.isTrait && !parents.isEmpty) { for (mc <- clazz.mixinClasses) { - val mixinOuterAcc: Symbol = afterExplicitOuter(outerAccessor(mc)) + val mixinOuterAcc: Symbol = exitingExplicitOuter(outerAccessor(mc)) if (mixinOuterAcc != NoSymbol) { if (decls1 eq decls) decls1 = decls.cloneScope val newAcc = mixinOuterAcc.cloneSymbol(clazz, mixinOuterAcc.flags & ~DEFERRED) @@ -558,7 +558,7 @@ abstract class ExplicitOuter extends InfoTransform /** The transformation method for whole compilation units */ override def transformUnit(unit: CompilationUnit) { - afterExplicitOuter(super.transformUnit(unit)) + exitingExplicitOuter(super.transformUnit(unit)) } } diff --git a/src/compiler/scala/tools/nsc/transform/ExtensionMethods.scala b/src/compiler/scala/tools/nsc/transform/ExtensionMethods.scala index 5f66cadbc9..b4d396d9a4 100644 --- a/src/compiler/scala/tools/nsc/transform/ExtensionMethods.scala +++ b/src/compiler/scala/tools/nsc/transform/ExtensionMethods.scala @@ -69,7 +69,7 @@ abstract class ExtensionMethods extends Transform with TypingTransformers { /** Return the extension method that corresponds to given instance method `meth`. */ - def extensionMethod(imeth: Symbol): Symbol = atPhase(currentRun.refchecksPhase) { + def extensionMethod(imeth: Symbol): Symbol = enteringPhase(currentRun.refchecksPhase) { val companionInfo = imeth.owner.companionModule.info val candidates = extensionNames(imeth) map (companionInfo.decl(_)) val matching = candidates filter (alt => normalize(alt.tpe, imeth.owner) matches imeth.tpe) diff --git a/src/compiler/scala/tools/nsc/transform/Flatten.scala b/src/compiler/scala/tools/nsc/transform/Flatten.scala index c8de25b2ea..2a3eb9959c 100644 --- a/src/compiler/scala/tools/nsc/transform/Flatten.scala +++ b/src/compiler/scala/tools/nsc/transform/Flatten.scala @@ -20,7 +20,7 @@ abstract class Flatten extends InfoTransform { /** Updates the owning scope with the given symbol; returns the old symbol. */ - private def replaceSymbolInCurrentScope(sym: Symbol): Symbol = afterFlatten { + private def replaceSymbolInCurrentScope(sym: Symbol): Symbol = exitingFlatten { val scope = sym.owner.info.decls val old = scope lookup sym.name if (old ne NoSymbol) @@ -53,7 +53,7 @@ abstract class Flatten extends InfoTransform { clazz.isClass && !clazz.isPackageClass && { // Cannot flatten here: class A[T] { object B } // was "at erasurePhase.prev" - beforeErasure(clazz.typeParams.isEmpty) + enteringErasure(clazz.typeParams.isEmpty) } } @@ -67,11 +67,11 @@ abstract class Flatten extends InfoTransform { val decls1 = scopeTransform(clazz) { val decls1 = newScope if (clazz.isPackageClass) { - afterFlatten { decls foreach (decls1 enter _) } + exitingFlatten { decls foreach (decls1 enter _) } } else { val oldowner = clazz.owner - afterFlatten { oldowner.info } + exitingFlatten { oldowner.info } parents1 = parents mapConserve (this) for (sym <- decls) { @@ -123,7 +123,7 @@ abstract class Flatten extends InfoTransform { liftedDefs(sym.enclosingTopLevelClass.owner) += tree EmptyTree case Select(qual, name) if (sym.isStaticModule && !sym.owner.isPackageClass) => - afterFlatten(atPos(tree.pos)(gen.mkAttributedRef(sym))) + exitingFlatten(atPos(tree.pos)(gen.mkAttributedRef(sym))) case _ => tree } diff --git a/src/compiler/scala/tools/nsc/transform/InfoTransform.scala b/src/compiler/scala/tools/nsc/transform/InfoTransform.scala index 880f0f0157..5080f92e43 100644 --- a/src/compiler/scala/tools/nsc/transform/InfoTransform.scala +++ b/src/compiler/scala/tools/nsc/transform/InfoTransform.scala @@ -10,11 +10,11 @@ package transform * An InfoTransform contains a compiler phase that transforms trees and symbol infos -- making sure they stay consistent. * The symbol info is transformed assuming it is consistent right before this phase. * The info transformation is triggered by Symbol::rawInfo, which caches the results in the symbol's type history. - * This way sym.info (during an atPhase(p)) can look up what the symbol's info should look like at the beginning of phase p. + * This way sym.info (during an enteringPhase(p)) can look up what the symbol's info should look like at the beginning of phase p. * (If the transformed info had not been stored yet, rawInfo will compute the info by composing the info-transformers * of the most recent phase before p, up to the transformer of the phase right before p.) * - * Concretely, atPhase(p) { sym.info } yields the info *before* phase p has transformed it. Imagine you're a phase and it all makes sense. + * Concretely, enteringPhase(p) { sym.info } yields the info *before* phase p has transformed it. Imagine you're a phase and it all makes sense. */ trait InfoTransform extends Transform { import global.{Symbol, Type, InfoTransformer, infoTransformers} diff --git a/src/compiler/scala/tools/nsc/transform/LazyVals.scala b/src/compiler/scala/tools/nsc/transform/LazyVals.scala index e8387c80f5..4c555f4740 100644 --- a/src/compiler/scala/tools/nsc/transform/LazyVals.scala +++ b/src/compiler/scala/tools/nsc/transform/LazyVals.scala @@ -275,7 +275,7 @@ abstract class LazyVals extends Transform with TypingTransformers with ast.TreeD bmps(n) else { val sym = meth.newVariable(nme.newBitmapName(nme.BITMAP_NORMAL, n), meth.pos).setInfo(ByteClass.tpe) - beforeTyper { + enteringTyper { sym addAnnotation VolatileAttr } diff --git a/src/compiler/scala/tools/nsc/transform/Mixin.scala b/src/compiler/scala/tools/nsc/transform/Mixin.scala index 930a7b34ce..e21a52a9fb 100644 --- a/src/compiler/scala/tools/nsc/transform/Mixin.scala +++ b/src/compiler/scala/tools/nsc/transform/Mixin.scala @@ -68,7 +68,7 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL { * maps all other types to themselves. */ private def toInterface(tp: Type): Type = - beforeMixin(tp.typeSymbol.toInterface).tpe + enteringMixin(tp.typeSymbol.toInterface).tpe private def isFieldWithBitmap(field: Symbol) = { field.info // ensure that nested objects are transformed @@ -102,7 +102,7 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL { private val toInterfaceMap = new TypeMap { def apply(tp: Type): Type = mapOver( tp match { case TypeRef(pre, sym, args) if sym.isImplClass => - typeRef(pre, beforeMixin(sym.toInterface), args) + typeRef(pre, enteringMixin(sym.toInterface), args) case _ => tp }) } @@ -119,7 +119,7 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL { * @param mixinClass The mixin class that produced the superaccessor */ private def rebindSuper(base: Symbol, member: Symbol, mixinClass: Symbol): Symbol = - afterPickler { + exitingPickler { var bcs = base.info.baseClasses.dropWhile(mixinClass != _).tail var sym: Symbol = NoSymbol debuglog("starting rebindsuper " + base + " " + member + ":" + member.tpe + @@ -165,7 +165,7 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL { addMember(clazz, cloneBeforeErasure(mixinClass, mixinMember, clazz)) def cloneBeforeErasure(mixinClass: Symbol, mixinMember: Symbol, clazz: Symbol): Symbol = { - val newSym = beforeErasure { + val newSym = enteringErasure { // since we used `mixinMember` from the interface that represents the trait that's // being mixed in, have to instantiate the interface type params (that may occur in mixinMember's // info) as they are seen from the class. We can't use the member that we get from the @@ -311,12 +311,12 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL { // mixinMember is a value of type unit. No field needed ; case _ => // otherwise mixin a field as well - // atPhase: the private field is moved to the implementation class by erasure, + // enteringPhase: the private field is moved to the implementation class by erasure, // so it can no longer be found in the mixinMember's owner (the trait) - val accessed = beforePickler(mixinMember.accessed) + val accessed = enteringPickler(mixinMember.accessed) // #3857, need to retain info before erasure when cloning (since cloning only // carries over the current entry in the type history) - val sym = beforeErasure { + val sym = enteringErasure { // so we have a type history entry before erasure clazz.newValue(nme.getterToLocal(mixinMember.name), mixinMember.pos).setInfo(mixinMember.tpe.resultType) } @@ -380,7 +380,7 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL { var parents1 = parents var decls1 = decls if (!clazz.isPackageClass) { - afterMixin(clazz.owner.info) + exitingMixin(clazz.owner.info) if (clazz.isImplClass) { clazz setFlag lateMODULE var sourceModule = clazz.owner.info.decls.lookup(sym.name.toTermName) @@ -408,7 +408,7 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL { parents1 = parents.head :: (parents.tail map toInterface) } } - //decls1 = atPhase(phase.next)(newScopeWith(decls1.toList: _*))//debug + //decls1 = enteringPhase(phase.next)(newScopeWith(decls1.toList: _*))//debug if ((parents1 eq parents) && (decls1 eq decls)) tp else ClassInfoType(parents1, decls1, clazz) @@ -525,7 +525,7 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL { tree match { case Template(parents, self, body) => localTyper = erasure.newTyper(rootContext.make(tree, currentOwner)) - afterMixin(currentOwner.owner.info)//todo: needed? + exitingMixin(currentOwner.owner.info)//todo: needed? if (!currentOwner.isTrait && !isPrimitiveValueClass(currentOwner)) addMixedinMembers(currentOwner, unit) @@ -544,7 +544,7 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL { else EmptyTree } else { - if (currentOwner.isTrait && sym.isSetter && !beforePickler(sym.isDeferred)) { + if (currentOwner.isTrait && sym.isSetter && !enteringPickler(sym.isDeferred)) { sym.addAnnotation(TraitSetterAnnotationClass) } tree @@ -703,7 +703,7 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL { val rhs0 = (Super(clazz, tpnme.EMPTY) DOT stat.symbol.alias)(vparams map (v => Ident(v.symbol)): _*) val rhs1 = localTyped(stat.pos, rhs0, stat.symbol.tpe.resultType) - deriveDefDef(stat)(_ => beforeMixin(transform(rhs1))) + deriveDefDef(stat)(_ => enteringMixin(transform(rhs1))) case _ => stat } @@ -722,7 +722,7 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL { def createBitmap: Symbol = { val bitmapKind = bitmapKindForCategory(category) val sym = clazz0.newVariable(bitmapName, clazz0.pos) setInfo bitmapKind.tpe - beforeTyper(sym addAnnotation VolatileAttr) + enteringTyper(sym addAnnotation VolatileAttr) category match { case nme.BITMAP_TRANSIENT | nme.BITMAP_CHECKINIT_TRANSIENT => sym addAnnotation TransientAttr @@ -1166,7 +1166,7 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL { def implSym = implClass(sym.owner).info.member(sym.name) assert(target ne NoSymbol, List(sym + ":", sym.tpe, sym.owner, implClass(sym.owner), implSym, - beforePrevPhase(implSym.tpe), phase) mkString " " + enteringPrevPhase(implSym.tpe), phase) mkString " " ) typedPos(tree.pos)(Apply(staticRef(target), transformSuper(qual) :: args)) } @@ -1195,7 +1195,7 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL { typedPos(tree.pos)((transformSuper(qual) DOT sym1)()) } else { - staticCall(beforePrevPhase(sym.overridingSymbol(implClass(sym.owner)))) + staticCall(enteringPrevPhase(sym.overridingSymbol(implClass(sym.owner)))) } } else { @@ -1245,7 +1245,7 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL { val tree1 = super.transform(preTransform(tree)) // localTyper needed when not flattening inner classes. parts after an // inner class will otherwise be typechecked with a wrong scope - try afterMixin(postTransform(tree1)) + try exitingMixin(postTransform(tree1)) finally localTyper = saved } } diff --git a/src/compiler/scala/tools/nsc/transform/PostErasure.scala b/src/compiler/scala/tools/nsc/transform/PostErasure.scala index 151bc66a79..0a5ceaa081 100644 --- a/src/compiler/scala/tools/nsc/transform/PostErasure.scala +++ b/src/compiler/scala/tools/nsc/transform/PostErasure.scala @@ -24,7 +24,7 @@ trait PostErasure extends InfoTransform with TypingTransformers { case ConstantType(Constant(tp: Type)) => ConstantType(Constant(apply(tp))) case ErasedValueType(tref) => - atPhase(currentRun.erasurePhase)(erasure.erasedValueClassArg(tref)) + enteringPhase(currentRun.erasurePhase)(erasure.erasedValueClassArg(tref)) case _ => mapOver(tp) } } @@ -39,7 +39,7 @@ trait PostErasure extends InfoTransform with TypingTransformers { Apply(sel @ Select( Apply(Select(New(tpt), nme.CONSTRUCTOR), List(arg)), acc), List()) - if atPhase(currentRun.erasurePhase) { + if enteringPhase(currentRun.erasurePhase) { tpt.tpe.typeSymbol.isDerivedValueClass && sel.symbol == tpt.tpe.typeSymbol.derivedValueClassUnbox } => @@ -50,7 +50,7 @@ trait PostErasure extends InfoTransform with TypingTransformers { Apply(Select(New(tpt1), nme.CONSTRUCTOR), List(arg1)), cmp), List(Apply(Select(New(tpt2), nme.CONSTRUCTOR), List(arg2)))) - if atPhase(currentRun.erasurePhase) { + if enteringPhase(currentRun.erasurePhase) { tpt1.tpe.typeSymbol.isDerivedValueClass && (cmp == nme.EQ || cmp == nme.NE) && tpt2.tpe.typeSymbol == tpt1.tpe.typeSymbol diff --git a/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala b/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala index ffcb682cf7..c91877dc34 100644 --- a/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala +++ b/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala @@ -397,7 +397,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers { tpes foreach (tp => buf ++= specializedTypeVars(tp)) buf.result } - def specializedTypeVars(sym: Symbol): immutable.Set[Symbol] = beforeTyper(specializedTypeVars(sym.info)) + def specializedTypeVars(sym: Symbol): immutable.Set[Symbol] = enteringTyper(specializedTypeVars(sym.info)) /** Return the set of @specialized type variables mentioned by the given type. * It only counts type variables that appear: @@ -533,7 +533,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers { var newClassTParams: List[Symbol] = Nil // unspecialized type parameters of 'specializedClass' (cloned) // has to be a val in order to be computed early. It is later called - // within 'atPhase(next)', which would lead to an infinite cycle otherwise + // within 'enteringPhase(next)', which would lead to an infinite cycle otherwise val specializedInfoType: Type = { oldClassTParams = survivingParams(clazz.info.typeParams, env) newClassTParams = produceTypeParameters(oldClassTParams, sClass, env) map subst(env) @@ -553,7 +553,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers { var res: List[Type] = Nil // log(specializedClass + ": seeking specialized parents of class with parents: " + parents.map(_.typeSymbol)) for (p <- parents) { - val stp = afterSpecialize(specializedType(p)) + val stp = exitingSpecialize(specializedType(p)) if (stp != p) if (p.typeSymbol.isTrait) res ::= stp else if (currentRun.compiles(clazz)) @@ -563,7 +563,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers { res } - var parents = List(applyContext(beforeTyper(clazz.tpe))) + var parents = List(applyContext(enteringTyper(clazz.tpe))) // log("!!! Parents: " + parents + ", sym: " + parents.map(_.typeSymbol)) if (parents.head.typeSymbol.isTrait) parents = parents.head.parents.head :: parents @@ -585,7 +585,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers { GenPolyType(newClassTParams, ClassInfoType(parents ::: extraSpecializedMixins, decls1, sClass)) } - afterSpecialize(sClass setInfo specializedInfoType) + exitingSpecialize(sClass setInfo specializedInfoType) val fullEnv = outerEnv ++ env /** Enter 'sym' in the scope of the current specialized class. It's type is @@ -774,7 +774,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers { if (existing != NoSymbol) clazz.owner.info.decls.unlink(existing) - afterSpecialize(clazz.owner.info.decls enter spc) //!!! assumes fully specialized classes + exitingSpecialize(clazz.owner.info.decls enter spc) //!!! assumes fully specialized classes } if (subclasses.nonEmpty) clazz.resetFlag(FINAL) cleanAnyRefSpecCache(clazz, decls1) @@ -792,7 +792,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers { */ private def normalizeMember(owner: Symbol, sym: Symbol, outerEnv: TypeEnv): List[Symbol] = { sym :: ( - if (!sym.isMethod || beforeTyper(sym.typeParams.isEmpty)) Nil + if (!sym.isMethod || enteringTyper(sym.typeParams.isEmpty)) Nil else { // debuglog("normalizeMember: " + sym.fullNameAsName('.').decode) var specializingOn = specializedParams(sym) @@ -941,7 +941,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers { checkOverriddenTParams(overridden) val env = unify(overridden.info, overriding.info, emptyEnv, false, true) - def atNext = afterSpecialize(overridden.owner.info.decl(specializedName(overridden, env))) + def atNext = exitingSpecialize(overridden.owner.info.decl(specializedName(overridden, env))) if (TypeEnv.restrict(env, stvars).nonEmpty && TypeEnv.isValid(env, overridden) && atNext != NoSymbol) { debuglog(" " + pp(env) + " found " + atNext) @@ -982,7 +982,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers { } ) overloads(overriding) ::= Overload(om, env) - ifDebug(afterSpecialize(assert( + ifDebug(exitingSpecialize(assert( overridden.owner.info.decl(om.name) != NoSymbol, "Could not find " + om.name + " in " + overridden.owner.info.decls)) ) @@ -1140,7 +1140,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers { case cinfo @ ClassInfoType(parents, decls, clazz) if !unspecializableClass(cinfo) => val tparams = tpe.typeParams if (tparams.isEmpty) - afterSpecialize(parents map (_.typeSymbol.info)) + exitingSpecialize(parents map (_.typeSymbol.info)) val parents1 = parents mapConserve specializedType if (parents ne parents1) { @@ -1611,10 +1611,10 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers { deriveValDef(newValDef)(transform) case Apply(sel @ Select(sup @ Super(qual, name), name1), args) - if (sup.symbol.info.parents != beforePrevPhase(sup.symbol.info.parents)) => + if (sup.symbol.info.parents != enteringPrevPhase(sup.symbol.info.parents)) => def parents = sup.symbol.info.parents - debuglog(tree + " parents changed from: " + beforePrevPhase(parents) + " to: " + parents) + debuglog(tree + " parents changed from: " + enteringPrevPhase(parents) + " to: " + parents) val res = localTyper.typed( Apply(Select(Super(qual, name) setPos sup.pos, name1) setPos sel.pos, transformTrees(args)) setPos tree.pos) @@ -1843,7 +1843,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers { informProgress("specializing " + unit) override def transform(tree: Tree) = { val resultTree = if (settings.nospecialization.value) tree - else afterSpecialize(specializeCalls(unit).transform(tree)) + else exitingSpecialize(specializeCalls(unit).transform(tree)) // Remove the final modifier and @inline annotation from anything in the // original class (since it's being overridden in at least onesubclass). diff --git a/src/compiler/scala/tools/nsc/transform/UnCurry.scala b/src/compiler/scala/tools/nsc/transform/UnCurry.scala index 0e1a341da7..a5bfac0e03 100644 --- a/src/compiler/scala/tools/nsc/transform/UnCurry.scala +++ b/src/compiler/scala/tools/nsc/transform/UnCurry.scala @@ -402,7 +402,7 @@ abstract class UnCurry extends InfoTransform // when calling into scala varargs, make sure it's a sequence. def arrayToSequence(tree: Tree, elemtp: Type) = { - afterUncurry { + exitingUncurry { localTyper.typedPos(pos) { val pt = arrayType(elemtp) val adaptedTree = // might need to cast to Array[elemtp], as arrays are not covariant @@ -432,7 +432,7 @@ abstract class UnCurry extends InfoTransform case _ => EmptyTree } } - afterUncurry { + exitingUncurry { localTyper.typedPos(pos) { gen.mkMethodCall(tree, toArraySym, Nil, List(traversableClassTag(tree.tpe))) } @@ -456,7 +456,7 @@ abstract class UnCurry extends InfoTransform else arrayToSequence(mkArray, varargsElemType) } - afterUncurry { + exitingUncurry { if (isJava && !isReferenceArray(suffix.tpe) && isArrayOfSymbol(fun.tpe.params.last.tpe, ObjectClass)) { // The array isn't statically known to be a reference array, so call ScalaRuntime.toObjectArray. suffix = localTyper.typedPos(pos) { @@ -664,7 +664,7 @@ abstract class UnCurry extends InfoTransform result setType uncurryTreeType(result.tpe) } - def postTransform(tree: Tree): Tree = afterUncurry { + def postTransform(tree: Tree): Tree = exitingUncurry { def applyUnary(): Tree = { // TODO_NMT: verify that the inner tree of a type-apply also gets parens if the // whole tree is a polymorphic nullary method application diff --git a/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala b/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala index 7318538de7..063ebe5614 100644 --- a/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala +++ b/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala @@ -543,13 +543,13 @@ abstract class RefChecks extends InfoTransform with reflect.internal.transform.R def uncurryAndErase(tp: Type) = erasure.erasure(sym)(uncurry.transformInfo(sym, tp)) val tp1 = uncurryAndErase(clazz.thisType.memberType(sym)) val tp2 = uncurryAndErase(clazz.thisType.memberType(other)) - afterErasure(tp1 matches tp2) + exitingErasure(tp1 matches tp2) }) def ignoreDeferred(member: Symbol) = ( (member.isAbstractType && !member.isFBounded) || ( member.isJavaDefined && - // the test requires afterErasure so shouldn't be + // the test requires exitingErasure so shouldn't be // done if the compiler has no erasure phase available (currentRun.erasurePhase == NoPhase || javaErasedOverridingSym(member) != NoSymbol) ) @@ -1239,7 +1239,7 @@ abstract class RefChecks extends InfoTransform with reflect.internal.transform.R case vsym => ValDef(vsym) } } - def createStaticModuleAccessor() = afterRefchecks { + def createStaticModuleAccessor() = exitingRefchecks { val method = ( sym.owner.newMethod(sym.name.toTermName, sym.pos, (sym.flags | STABLE) & ~MODULE) setInfoAndEnter NullaryMethodType(sym.moduleClass.tpe) @@ -1250,7 +1250,7 @@ abstract class RefChecks extends InfoTransform with reflect.internal.transform.R vdef, localTyper.typedPos(tree.pos) { val vsym = vdef.symbol - afterRefchecks { + exitingRefchecks { val rhs = gen.newModule(sym, vsym.tpe) val body = if (sym.owner.isTrait) rhs else gen.mkAssignAndReturn(vsym, rhs) DefDef(sym, body.changeOwner(vsym -> sym)) @@ -1291,7 +1291,7 @@ abstract class RefChecks extends InfoTransform with reflect.internal.transform.R if (hasUnitType) List(typed(lazyDef)) else List( typed(ValDef(vsym)), - afterRefchecks(typed(lazyDef)) + exitingRefchecks(typed(lazyDef)) ) } diff --git a/src/compiler/scala/tools/nsc/typechecker/Tags.scala b/src/compiler/scala/tools/nsc/typechecker/Tags.scala index 052484e8e1..b929ecdb41 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Tags.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Tags.scala @@ -10,7 +10,7 @@ trait Tags { trait Tag { self: Typer => - private def resolveTag(pos: Position, taggedTp: Type, allowMaterialization: Boolean) = beforeTyper { + private def resolveTag(pos: Position, taggedTp: Type, allowMaterialization: Boolean) = enteringTyper { def wrapper (tree: => Tree): Tree = if (allowMaterialization) (context.withMacrosEnabled[Tree](tree)) else (context.withMacrosDisabled[Tree](tree)) wrapper(inferImplicit( EmptyTree, diff --git a/src/reflect/scala/reflect/internal/SymbolTable.scala b/src/reflect/scala/reflect/internal/SymbolTable.scala index 5ae8f22c64..112e4e8e88 100644 --- a/src/reflect/scala/reflect/internal/SymbolTable.scala +++ b/src/reflect/scala/reflect/internal/SymbolTable.scala @@ -127,7 +127,7 @@ abstract class SymbolTable extends makro.Universe type RunId = Int final val NoRunId = 0 - // sigh, this has to be public or atPhase doesn't inline. + // sigh, this has to be public or enteringPhase doesn't inline. var phStack: List[Phase] = Nil private[this] var ph: Phase = NoPhase private[this] var per = NoPeriod @@ -190,23 +190,17 @@ abstract class SymbolTable extends makro.Universe p != NoPhase && phase.id > p.id /** Perform given operation at given phase. */ - @inline final def atPhase[T](ph: Phase)(op: => T): T = { + @inline final def enteringPhase[T](ph: Phase)(op: => T): T = { val saved = pushPhase(ph) try op finally popPhase(saved) } + @inline final def exitingPhase[T](ph: Phase)(op: => T): T = enteringPhase(ph.next)(op) + @inline final def enteringPrevPhase[T](op: => T): T = enteringPhase(phase.prev)(op) - /** Since when it is to be "at" a phase is inherently ambiguous, - * a couple unambiguously named methods. - */ - @inline final def beforePhase[T](ph: Phase)(op: => T): T = atPhase(ph)(op) - @inline final def afterPhase[T](ph: Phase)(op: => T): T = atPhase(ph.next)(op) - @inline final def afterCurrentPhase[T](op: => T): T = atPhase(phase.next)(op) - @inline final def beforePrevPhase[T](op: => T): T = atPhase(phase.prev)(op) - - @inline final def atPhaseNotLaterThan[T](target: Phase)(op: => T): T = - if (isAtPhaseAfter(target)) atPhase(target)(op) else op + @inline final def enteringPhaseNotLaterThan[T](target: Phase)(op: => T): T = + if (isAtPhaseAfter(target)) enteringPhase(target)(op) else op final def isValid(period: Period): Boolean = period != 0 && runId(period) == currentRunId && { @@ -333,6 +327,11 @@ abstract class SymbolTable extends makro.Universe /** Is this symbol table a part of a compiler universe? */ def isCompilerUniverse = false + + @deprecated("Use enteringPhase", "2.10.0") + @inline final def atPhase[T](ph: Phase)(op: => T): T = enteringPhase(ph)(op) + @deprecated("Use enteringPhaseNotLaterThan", "2.10.0") + @inline final def atPhaseNotLaterThan[T](target: Phase)(op: => T): T = enteringPhaseNotLaterThan(target)(op) } object SymbolTableStats { diff --git a/src/reflect/scala/reflect/internal/Symbols.scala b/src/reflect/scala/reflect/internal/Symbols.scala index dced794771..759c77f717 100644 --- a/src/reflect/scala/reflect/internal/Symbols.scala +++ b/src/reflect/scala/reflect/internal/Symbols.scala @@ -1615,7 +1615,7 @@ trait Symbols extends api.Symbols { self: SymbolTable => */ def unsafeTypeParams: List[Symbol] = if (isMonomorphicType) Nil - else atPhase(unsafeTypeParamPhase)(rawInfo.typeParams) + else enteringPhase(unsafeTypeParamPhase)(rawInfo.typeParams) /** The type parameters of this symbol. * assumption: if a type starts out as monomorphic, it will not acquire @@ -1627,9 +1627,9 @@ trait Symbols extends api.Symbols { self: SymbolTable => // analogously to the "info" getter, here we allow for two completions: // one: sourceCompleter to LazyType, two: LazyType to completed type if (validTo == NoPeriod) - atPhase(phaseOf(infos.validFrom))(rawInfo load this) + enteringPhase(phaseOf(infos.validFrom))(rawInfo load this) if (validTo == NoPeriod) - atPhase(phaseOf(infos.validFrom))(rawInfo load this) + enteringPhase(phaseOf(infos.validFrom))(rawInfo load this) rawInfo.typeParams } diff --git a/src/reflect/scala/reflect/internal/pickling/UnPickler.scala b/src/reflect/scala/reflect/internal/pickling/UnPickler.scala index 4411b79b97..924d141669 100644 --- a/src/reflect/scala/reflect/internal/pickling/UnPickler.scala +++ b/src/reflect/scala/reflect/internal/pickling/UnPickler.scala @@ -838,7 +838,7 @@ abstract class UnPickler /*extends reflect.generic.UnPickler*/ { private val p = phase override def complete(sym: Symbol) : Unit = try { val tp = at(i, () => readType(sym.isTerm)) // after NMT_TRANSITION, revert `() => readType(sym.isTerm)` to `readType` - atPhase(p) (sym setInfo tp) + enteringPhase(p) (sym setInfo tp) if (currentRunId != definedAtRunId) sym.setInfo(adaptToNewRunMap(tp)) } @@ -856,7 +856,7 @@ abstract class UnPickler /*extends reflect.generic.UnPickler*/ { super.complete(sym) var alias = at(j, readSymbol) if (alias.isOverloaded) - alias = atPhase(picklerPhase)((alias suchThat (alt => sym.tpe =:= sym.owner.thisType.memberType(alt)))) + alias = enteringPhase(picklerPhase)((alias suchThat (alt => sym.tpe =:= sym.owner.thisType.memberType(alt)))) sym.asInstanceOf[TermSymbol].setAlias(alias) } diff --git a/src/reflect/scala/reflect/internal/util/TraceSymbolActivity.scala b/src/reflect/scala/reflect/internal/util/TraceSymbolActivity.scala index 5fbeb5f576..814fa23e29 100644 --- a/src/reflect/scala/reflect/internal/util/TraceSymbolActivity.scala +++ b/src/reflect/scala/reflect/internal/util/TraceSymbolActivity.scala @@ -125,7 +125,7 @@ trait TraceSymbolActivity { } ph } - private def runBeforeErasure[T](body: => T): T = atPhase(findErasurePhase)(body) + private def runBeforeErasure[T](body: => T): T = enteringPhase(findErasurePhase)(body) def showAllSymbols() { if (!traceSymbolActivity) return diff --git a/src/reflect/scala/reflect/runtime/SymbolLoaders.scala b/src/reflect/scala/reflect/runtime/SymbolLoaders.scala index c1cd5d2911..17e3d7581a 100644 --- a/src/reflect/scala/reflect/runtime/SymbolLoaders.scala +++ b/src/reflect/scala/reflect/runtime/SymbolLoaders.scala @@ -28,7 +28,7 @@ trait SymbolLoaders { self: SymbolTable => debugInfo("completing "+sym+"/"+clazz.fullName) assert(sym == clazz || sym == module || sym == module.moduleClass) // try { - atPhaseNotLaterThan(picklerPhase) { + enteringPhaseNotLaterThan(picklerPhase) { val loadingMirror = mirrorThatLoaded(sym) val javaClass = loadingMirror.javaClass(clazz.javaClassName) loadingMirror.unpickleClass(clazz, module, javaClass) -- cgit v1.2.3 From fa63170098a239ada206e74117194ae8c4dd2600 Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Sun, 15 Jul 2012 12:16:36 -0700 Subject: Removed "core classes first" logic. I see no evidence this is necessary, and it should be wildly uncontroversial that if we can lose this, we should. Hardcoded filenames which don't even exist in the repository ("StandardEmbeddings.scala", "EmbeddedControls.scala") have no place in compiler source. If some portion of this is believed necessary then better evidence of that should be offered. Review by @lrytz. --- src/compiler/scala/tools/nsc/Global.scala | 43 +------------------------------ 1 file changed, 1 insertion(+), 42 deletions(-) (limited to 'src') diff --git a/src/compiler/scala/tools/nsc/Global.scala b/src/compiler/scala/tools/nsc/Global.scala index 6a61beb55d..1c68b783bf 100644 --- a/src/compiler/scala/tools/nsc/Global.scala +++ b/src/compiler/scala/tools/nsc/Global.scala @@ -1498,8 +1498,7 @@ class Global(var currentSettings: Settings, var reporter: Reporter) /** Compile list of source files */ def compileSources(_sources: List[SourceFile]) { - val depSources = dependencyAnalysis calculateFiles _sources.distinct - val sources = coreClassesFirst(depSources) + val sources = dependencyAnalysis calculateFiles _sources.distinct // there is a problem already, e.g. a plugin was passed a bad option if (reporter.hasErrors) return @@ -1659,46 +1658,6 @@ class Global(var currentSettings: Settings, var reporter: Reporter) } if (!pclazz.isRoot) resetPackageClass(pclazz.owner) } - - /** - * Re-orders the source files to - * 1. This Space Intentionally Left Blank - * 2. LowPriorityImplicits / EmbeddedControls (i.e. parents of Predef) - * 3. the rest - * - * 1 is to avoid cyclic reference errors. - * 2 is due to the following. When completing "Predef" (*), typedIdent is called - * for its parents (e.g. "LowPriorityImplicits"). typedIdent checks whether - * the symbol reallyExists, which tests if the type of the symbol after running - * its completer is != NoType. - * If the "namer" phase has not yet run for "LowPriorityImplicits", the symbol - * has a SourcefileLoader as type. Calling "doComplete" on it does nothing at - * all, because the source file is part of the files to be compiled anyway. - * So the "reallyExists" test will return "false". - * Only after the namer, the symbol has a lazy type which actually computes - * the info, and "reallyExists" behaves as expected. - * So we need to make sure that the "namer" phase is run on predef's parents - * before running it on predef. - * - * (*) Predef is completed early when calling "mkAttributedRef" during the - * addition of "import Predef._" to sourcefiles. So this situation can't - * happen for user classes. - * - */ - private def coreClassesFirst(files: List[SourceFile]) = { - val goLast = 4 - def rank(f: SourceFile) = { - if (f.file.container.name != "scala") goLast - else f.file.name match { - case "LowPriorityImplicits.scala" => 2 - case "StandardEmbeddings.scala" => 2 - case "EmbeddedControls.scala" => 2 - case "Predef.scala" => 3 /* Predef.scala before Any.scala, etc. */ - case _ => goLast - } - } - files sortBy rank - } } // class Run def printAllUnits() { -- cgit v1.2.3 From 186f57ab4b1611820ad6d532eaafc7b12c6994cf Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Sun, 22 Jul 2012 22:16:03 -0700 Subject: Improve unchecked warnings. Spurious test was not good. Better test avoids suppressing some legitimate warnings. Review by @moors. --- src/compiler/scala/tools/nsc/typechecker/Infer.scala | 12 +++++++----- test/files/neg/unchecked2.check | 19 +++++++++++++++++++ test/files/neg/unchecked2.flags | 1 + test/files/neg/unchecked2.scala | 8 ++++++++ test/files/pos/t1439.scala | 2 +- 5 files changed, 36 insertions(+), 6 deletions(-) create mode 100644 test/files/neg/unchecked2.check create mode 100644 test/files/neg/unchecked2.flags create mode 100644 test/files/neg/unchecked2.scala (limited to 'src') diff --git a/src/compiler/scala/tools/nsc/typechecker/Infer.scala b/src/compiler/scala/tools/nsc/typechecker/Infer.scala index 2f9bb24079..94dfcfa7dd 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Infer.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Infer.scala @@ -1278,8 +1278,10 @@ trait Infer { } else { for (arg <- args) { if (sym == ArrayClass) check(arg, bound) - else if (arg.typeArgs.nonEmpty) () // avoid spurious warnings with higher-kinded types - else if (sym == NonLocalReturnControlClass) () // no way to suppress unchecked warnings on try/catch + // avoid spurious warnings with higher-kinded types + else if (arg.typeArgs exists (_.typeSymbol.isTypeParameterOrSkolem)) () + // no way to suppress unchecked warnings on try/catch + else if (sym == NonLocalReturnControlClass) () else arg match { case TypeRef(_, sym, _) if isLocalBinding(sym) => ; @@ -1423,7 +1425,7 @@ trait Infer { ) // Intentionally *not* using `Type#typeSymbol` here, which would normalize `tp` - // and collect symbols from the result type of any resulting `PolyType`s, which + // and collect symbols from the result type of any resulting `PolyType`s, which // are not free type parameters of `tp`. // // Contrast with `isFreeTypeParamNoSkolem`. @@ -1456,7 +1458,7 @@ trait Infer { def inferExprAlternative(tree: Tree, pt: Type) = tree.tpe match { case OverloadedType(pre, alts) => tryTwice { isSecondTry => val alts0 = alts filter (alt => isWeaklyCompatible(pre.memberType(alt), pt)) - val noAlternatives = alts0.isEmpty + val noAlternatives = alts0.isEmpty val alts1 = if (noAlternatives) alts else alts0 //println("trying "+alts1+(alts1 map (_.tpe))+(alts1 map (_.locationString))+" for "+pt) @@ -1614,7 +1616,7 @@ trait Infer { val saved = context.state var fallback = false context.setBufferErrors() - // We cache the current buffer because it is impossible to + // We cache the current buffer because it is impossible to // distinguish errors that occurred before entering tryTwice // and our first attempt in 'withImplicitsDisabled'. If the // first attempt fails we try with implicits on *and* clean diff --git a/test/files/neg/unchecked2.check b/test/files/neg/unchecked2.check new file mode 100644 index 0000000000..2c0be9ce00 --- /dev/null +++ b/test/files/neg/unchecked2.check @@ -0,0 +1,19 @@ +unchecked2.scala:2: error: non variable type-argument Int in type Option[Int] is unchecked since it is eliminated by erasure + Some(123).isInstanceOf[Option[Int]] + ^ +unchecked2.scala:3: error: non variable type-argument String in type Option[String] is unchecked since it is eliminated by erasure + Some(123).isInstanceOf[Option[String]] + ^ +unchecked2.scala:4: error: non variable type-argument List[String] in type Option[List[String]] is unchecked since it is eliminated by erasure + Some(123).isInstanceOf[Option[List[String]]] + ^ +unchecked2.scala:5: error: non variable type-argument List[Int => String] in type Option[List[Int => String]] is unchecked since it is eliminated by erasure + Some(123).isInstanceOf[Option[List[Int => String]]] + ^ +unchecked2.scala:6: error: non variable type-argument (String, Double) in type Option[(String, Double)] is unchecked since it is eliminated by erasure + Some(123).isInstanceOf[Option[(String, Double)]] + ^ +unchecked2.scala:7: error: non variable type-argument String => Double in type Option[String => Double] is unchecked since it is eliminated by erasure + Some(123).isInstanceOf[Option[String => Double]] + ^ +6 errors found diff --git a/test/files/neg/unchecked2.flags b/test/files/neg/unchecked2.flags new file mode 100644 index 0000000000..144ddac9d3 --- /dev/null +++ b/test/files/neg/unchecked2.flags @@ -0,0 +1 @@ +-unchecked -Xfatal-warnings diff --git a/test/files/neg/unchecked2.scala b/test/files/neg/unchecked2.scala new file mode 100644 index 0000000000..a2e757e1dc --- /dev/null +++ b/test/files/neg/unchecked2.scala @@ -0,0 +1,8 @@ +object Test { + Some(123).isInstanceOf[Option[Int]] + Some(123).isInstanceOf[Option[String]] + Some(123).isInstanceOf[Option[List[String]]] + Some(123).isInstanceOf[Option[List[Int => String]]] + Some(123).isInstanceOf[Option[(String, Double)]] + Some(123).isInstanceOf[Option[String => Double]] +} diff --git a/test/files/pos/t1439.scala b/test/files/pos/t1439.scala index 68a7332b2a..0efcc74b65 100644 --- a/test/files/pos/t1439.scala +++ b/test/files/pos/t1439.scala @@ -2,7 +2,7 @@ class View[C[A]] { } object Test { - null match { + (null: Any) match { case v: View[_] => } } -- cgit v1.2.3 From 188083efcdd4c0b70c3449f40e3c9e9365e53650 Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Wed, 25 Jul 2012 12:48:34 -0700 Subject: Optimization in List. Mark all the closure-accepting methods which have implementations within List as @inline final so the closures can be eliminated. (All the methods are effectively final anyway, as Nil and :: are the only possible subclasses.) And compromise with respect to the inlining of inherited methods by overriding foreach (responsible for over 300 closures) with an inlinable implementation. Review by @gkossakowski. --- src/library/scala/collection/immutable/List.scala | 18 ++++++++++++++---- 1 file changed, 14 insertions(+), 4 deletions(-) (limited to 'src') diff --git a/src/library/scala/collection/immutable/List.scala b/src/library/scala/collection/immutable/List.scala index 74dc385f99..19df64558e 100644 --- a/src/library/scala/collection/immutable/List.scala +++ b/src/library/scala/collection/immutable/List.scala @@ -152,7 +152,7 @@ sealed abstract class List[+A] extends AbstractSeq[A] * @usecase def mapConserve(f: A => A): List[A] * @inheritdoc */ - def mapConserve[B >: A <: AnyRef](f: A => B): List[B] = { + @inline final def mapConserve[B >: A <: AnyRef](f: A => B): List[B] = { @tailrec def loop(mapped: ListBuffer[B], unchanged: List[A], pending: List[A]): List[B] = if (pending.isEmpty) { @@ -257,7 +257,7 @@ sealed abstract class List[+A] extends AbstractSeq[A] (b.toList, these) } - override def takeWhile(p: A => Boolean): List[A] = { + @inline final override def takeWhile(p: A => Boolean): List[A] = { val b = new ListBuffer[A] var these = this while (!these.isEmpty && p(these.head)) { @@ -267,7 +267,7 @@ sealed abstract class List[+A] extends AbstractSeq[A] b.toList } - override def dropWhile(p: A => Boolean): List[A] = { + @inline final override def dropWhile(p: A => Boolean): List[A] = { @tailrec def loop(xs: List[A]): List[A] = if (xs.isEmpty || !p(xs.head)) xs @@ -276,7 +276,7 @@ sealed abstract class List[+A] extends AbstractSeq[A] loop(this) } - override def span(p: A => Boolean): (List[A], List[A]) = { + @inline final override def span(p: A => Boolean): (List[A], List[A]) = { val b = new ListBuffer[A] var these = this while (!these.isEmpty && p(these.head)) { @@ -286,6 +286,16 @@ sealed abstract class List[+A] extends AbstractSeq[A] (b.toList, these) } + // Overridden with an implementation identical to the inherited one (at this time) + // solely so it can be finalized and thus inlinable. + @inline final override def foreach[U](f: A => U) { + var these = this + while (!these.isEmpty) { + f(these.head) + these = these.tail + } + } + override def reverse: List[A] = { var result: List[A] = Nil var these = this -- cgit v1.2.3 From adeffda25e94ed0206d35bdb9b42523227a89f8c Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Wed, 25 Jul 2012 15:21:16 -0700 Subject: Refined isEffectivelyFinal logic for sealedness. If the enclosing class of a method is sealed and has only final subclasses, then the method is effectively final in the sealed class if none of the subclasses overrides it. This makes it possible to inline more methods without explicitly marking them final. Note that the test doesn't fail before this patch due to SI-6142, a bug in the optimizer, but here's a bytecode diff to prove it: @@ -16,8 +16,10 @@ public final class Test$ { Code: : getstatic // Field Foo$.MODULE$:LFoo$; : invokevirtual // Method Foo$.mkFoo:()LFoo; +: pop : bipush -: invokevirtual // Method Foo.bar:(I)I +: iconst_1 +: iadd : ireturn And the test in neg, which is manually made to fail due to the absence of inline warnings, correctly refuses to inline the methods. Review by @dragos. --- src/reflect/scala/reflect/internal/Symbols.scala | 16 ++++++--- test/files/neg/sealed-final-neg.check | 4 +++ test/files/neg/sealed-final-neg.flags | 1 + test/files/neg/sealed-final-neg.scala | 41 ++++++++++++++++++++++++ test/files/pos/sealed-final.flags | 1 + test/files/pos/sealed-final.scala | 14 ++++++++ test/files/run/t2886.check | 10 +++--- 7 files changed, 78 insertions(+), 9 deletions(-) create mode 100644 test/files/neg/sealed-final-neg.check create mode 100644 test/files/neg/sealed-final-neg.flags create mode 100644 test/files/neg/sealed-final-neg.scala create mode 100644 test/files/pos/sealed-final.flags create mode 100644 test/files/pos/sealed-final.scala (limited to 'src') diff --git a/src/reflect/scala/reflect/internal/Symbols.scala b/src/reflect/scala/reflect/internal/Symbols.scala index 636f1e2f01..7e6de14295 100644 --- a/src/reflect/scala/reflect/internal/Symbols.scala +++ b/src/reflect/scala/reflect/internal/Symbols.scala @@ -191,7 +191,7 @@ trait Symbols extends api.Symbols { self: SymbolTable => } else None } - + // Begin Reflection Helpers // Replaces a repeated parameter type at the end of the parameter list @@ -354,7 +354,7 @@ trait Symbols extends api.Symbols { self: SymbolTable => val selection = select(alts, defaultFilteringOps) val knownApplicable = applicable(selection) - + if (knownApplicable.size == 1) knownApplicable.head else NoSymbol } @@ -1016,6 +1016,14 @@ trait Symbols extends api.Symbols { self: SymbolTable => def isTopLevelModule = hasFlag(MODULE) && owner.isPackageClass + /** A helper function for isEffectivelyFinal. */ + private def isNotOverridden = ( + owner.isClass && ( + owner.isEffectivelyFinal + || owner.isSealed && owner.children.forall(c => c.isEffectivelyFinal && (overridingSymbol(c) == NoSymbol)) + ) + ) + /** Is this symbol effectively final? I.e, it cannot be overridden */ final def isEffectivelyFinal: Boolean = ( (this hasFlag FINAL | PACKAGE) @@ -1023,8 +1031,8 @@ trait Symbols extends api.Symbols { self: SymbolTable => || isTerm && ( isPrivate || isLocal - || owner.isClass && owner.isEffectivelyFinal - ) + || isNotOverridden + ) ) /** Is this symbol locally defined? I.e. not accessed from outside `this` instance */ diff --git a/test/files/neg/sealed-final-neg.check b/test/files/neg/sealed-final-neg.check new file mode 100644 index 0000000000..500d23f49a --- /dev/null +++ b/test/files/neg/sealed-final-neg.check @@ -0,0 +1,4 @@ +sealed-final-neg.scala:41: error: expected class or object definition +"Due to SI-6142 this emits no warnings, so we'll just break it until that's fixed." +^ +one error found diff --git a/test/files/neg/sealed-final-neg.flags b/test/files/neg/sealed-final-neg.flags new file mode 100644 index 0000000000..cfabf7a5b4 --- /dev/null +++ b/test/files/neg/sealed-final-neg.flags @@ -0,0 +1 @@ +-Xfatal-warnings -Yinline-warnings -optimise \ No newline at end of file diff --git a/test/files/neg/sealed-final-neg.scala b/test/files/neg/sealed-final-neg.scala new file mode 100644 index 0000000000..bc25330e13 --- /dev/null +++ b/test/files/neg/sealed-final-neg.scala @@ -0,0 +1,41 @@ +package neg1 { + sealed abstract class Foo { + @inline def bar(x: Int) = x + 1 + } + object Foo { + def mkFoo(): Foo = new Baz2 + } + + object Baz1 extends Foo + final class Baz2 extends Foo + final class Baz3 extends Foo { + override def bar(x: Int) = x - 1 + } + + object Test { + // bar can't be inlined - it is overridden in Baz3 + def f = Foo.mkFoo() bar 10 + } +} + +package neg2 { + sealed abstract class Foo { + @inline def bar(x: Int) = x + 1 + } + object Foo { + def mkFoo(): Foo = new Baz2 + } + + object Baz1 extends Foo + final class Baz2 extends Foo + class Baz3 extends Foo { + override def bar(x: Int) = x - 1 + } + + object Test { + // bar can't be inlined - Baz3 is not final + def f = Foo.mkFoo() bar 10 + } +} + +"Due to SI-6142 this emits no warnings, so we'll just break it until that's fixed." diff --git a/test/files/pos/sealed-final.flags b/test/files/pos/sealed-final.flags new file mode 100644 index 0000000000..cfabf7a5b4 --- /dev/null +++ b/test/files/pos/sealed-final.flags @@ -0,0 +1 @@ +-Xfatal-warnings -Yinline-warnings -optimise \ No newline at end of file diff --git a/test/files/pos/sealed-final.scala b/test/files/pos/sealed-final.scala new file mode 100644 index 0000000000..bdedb5c1f6 --- /dev/null +++ b/test/files/pos/sealed-final.scala @@ -0,0 +1,14 @@ +sealed abstract class Foo { + @inline def bar(x: Int) = x + 1 +} +object Foo { + def mkFoo(): Foo = new Baz2 +} + +object Baz1 extends Foo +final class Baz2 extends Foo + +object Test { + // bar should be inlined now + def f = Foo.mkFoo() bar 10 +} diff --git a/test/files/run/t2886.check b/test/files/run/t2886.check index 8d97a82799..b093815562 100644 --- a/test/files/run/t2886.check +++ b/test/files/run/t2886.check @@ -1,5 +1,5 @@ -((x: String) => { - val x$1 = x; - val x$2 = x; - Test.this.test(x$2, x$1) -}) +((x: String) => { + val x$1 = x; + val x$2 = x; + Test.this.test(x$2, x$1) +}) -- cgit v1.2.3 From b79c7600544db9964c228b94a2f70f3ed854f89b Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Thu, 26 Jul 2012 10:16:29 -0700 Subject: Removed restriction on final vars, SI-2418. The original fix for SI-2418 excluded final vars entirely, but the problem was not final vars per se, but the emission of ACC_FINAL in combination with ACC_VOLATILE. Since vars never get ACC_FINAL now, this is no longer an issue. --- src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala | 3 --- src/compiler/scala/tools/nsc/typechecker/Typers.scala | 9 +++------ test/files/run/t2418.check | 1 + test/files/run/t2418.scala | 10 ++++++++++ 4 files changed, 14 insertions(+), 9 deletions(-) create mode 100644 test/files/run/t2418.check create mode 100644 test/files/run/t2418.scala (limited to 'src') diff --git a/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala b/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala index ba6c43f9d3..d480fef1c6 100644 --- a/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala +++ b/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala @@ -239,9 +239,6 @@ trait ContextErrors { def VolatileValueError(vdef: Tree) = issueNormalTypeError(vdef, "values cannot be volatile") - def FinalVolatileVarError(vdef: Tree) = - issueNormalTypeError(vdef, "final vars cannot be volatile") - def LocalVarUninitializedError(vdef: Tree) = issueNormalTypeError(vdef, "local variables must be initialized") diff --git a/src/compiler/scala/tools/nsc/typechecker/Typers.scala b/src/compiler/scala/tools/nsc/typechecker/Typers.scala index 568a3a9c14..c3d93a749e 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Typers.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Typers.scala @@ -1791,12 +1791,9 @@ trait Typers extends Modes with Adaptations with Tags { var tpt1 = checkNoEscaping.privates(sym, typer1.typedType(vdef.tpt)) checkNonCyclic(vdef, tpt1) - if (sym.hasAnnotation(definitions.VolatileAttr)) { - if (!sym.isMutable) - VolatileValueError(vdef) - else if (sym.isFinal) - FinalVolatileVarError(vdef) - } + if (sym.hasAnnotation(definitions.VolatileAttr) && !sym.isMutable) + VolatileValueError(vdef) + val rhs1 = if (vdef.rhs.isEmpty) { if (sym.isVariable && sym.owner.isTerm && !isPastTyper) diff --git a/test/files/run/t2418.check b/test/files/run/t2418.check new file mode 100644 index 0000000000..f599e28b8a --- /dev/null +++ b/test/files/run/t2418.check @@ -0,0 +1 @@ +10 diff --git a/test/files/run/t2418.scala b/test/files/run/t2418.scala new file mode 100644 index 0000000000..f330bef60a --- /dev/null +++ b/test/files/run/t2418.scala @@ -0,0 +1,10 @@ +class Foo { + @volatile final var x=10 + override def toString = "" + x +} + +object Test { + def main(args: Array[String]): Unit = { + println((new Foo)) + } +} -- cgit v1.2.3 From 2dbeff0035060f5cf909443315733d7d7911928d Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Sat, 21 Jul 2012 08:57:59 -0700 Subject: Renaming files to please ant. "If the names don't fit, do not commit!" --- .../scala/tools/nsc/doc/model/LinkTo.scala | 24 +++ src/compiler/scala/tools/nsc/doc/model/Links.scala | 24 --- src/library/scala/reflect/ClassManifest.scala | 240 --------------------- .../reflect/ClassManifestDeprecatedApis.scala | 240 +++++++++++++++++++++ 4 files changed, 264 insertions(+), 264 deletions(-) create mode 100644 src/compiler/scala/tools/nsc/doc/model/LinkTo.scala delete mode 100644 src/compiler/scala/tools/nsc/doc/model/Links.scala delete mode 100644 src/library/scala/reflect/ClassManifest.scala create mode 100644 src/library/scala/reflect/ClassManifestDeprecatedApis.scala (limited to 'src') diff --git a/src/compiler/scala/tools/nsc/doc/model/LinkTo.scala b/src/compiler/scala/tools/nsc/doc/model/LinkTo.scala new file mode 100644 index 0000000000..b76dee0f14 --- /dev/null +++ b/src/compiler/scala/tools/nsc/doc/model/LinkTo.scala @@ -0,0 +1,24 @@ +/* NSC -- new Scala compiler + * Copyright 2007-2011 LAMP/EPFL + */ + +package scala.tools.nsc +package doc +package model + +import scala.collection._ + +abstract sealed class LinkTo +case class LinkToTpl(tpl: DocTemplateEntity) extends LinkTo +case class LinkToMember(mbr: MemberEntity, inTpl: DocTemplateEntity) extends LinkTo +case class Tooltip(name: String) extends LinkTo { def this(tpl: TemplateEntity) = this(tpl.qualifiedName) } +// case class LinkToExternal(name: String, url: String) extends LinkTo // for SI-191, whenever Manohar will have time +case object NoLink extends LinkTo // you should use Tooltip if you have a name from the user, this is only in case all fails + +object LinkToTpl { + // this makes it easier to create links + def apply(tpl: TemplateEntity) = tpl match { + case dtpl: DocTemplateEntity => new LinkToTpl(dtpl) + case ntpl: TemplateEntity => new Tooltip(ntpl.qualifiedName) + } +} diff --git a/src/compiler/scala/tools/nsc/doc/model/Links.scala b/src/compiler/scala/tools/nsc/doc/model/Links.scala deleted file mode 100644 index b76dee0f14..0000000000 --- a/src/compiler/scala/tools/nsc/doc/model/Links.scala +++ /dev/null @@ -1,24 +0,0 @@ -/* NSC -- new Scala compiler - * Copyright 2007-2011 LAMP/EPFL - */ - -package scala.tools.nsc -package doc -package model - -import scala.collection._ - -abstract sealed class LinkTo -case class LinkToTpl(tpl: DocTemplateEntity) extends LinkTo -case class LinkToMember(mbr: MemberEntity, inTpl: DocTemplateEntity) extends LinkTo -case class Tooltip(name: String) extends LinkTo { def this(tpl: TemplateEntity) = this(tpl.qualifiedName) } -// case class LinkToExternal(name: String, url: String) extends LinkTo // for SI-191, whenever Manohar will have time -case object NoLink extends LinkTo // you should use Tooltip if you have a name from the user, this is only in case all fails - -object LinkToTpl { - // this makes it easier to create links - def apply(tpl: TemplateEntity) = tpl match { - case dtpl: DocTemplateEntity => new LinkToTpl(dtpl) - case ntpl: TemplateEntity => new Tooltip(ntpl.qualifiedName) - } -} diff --git a/src/library/scala/reflect/ClassManifest.scala b/src/library/scala/reflect/ClassManifest.scala deleted file mode 100644 index d226e43e77..0000000000 --- a/src/library/scala/reflect/ClassManifest.scala +++ /dev/null @@ -1,240 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2007-2011, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala.reflect - -import scala.collection.mutable.{ WrappedArray, ArrayBuilder } -import java.lang.{ Class => jClass } - -@deprecated("Use scala.reflect.ClassTag instead", "2.10.0") -trait ClassManifestDeprecatedApis[T] extends OptManifest[T] { - self: ClassManifest[T] => - - @deprecated("Use runtimeClass instead", "2.10.0") - def erasure: jClass[_] = runtimeClass - - private def subtype(sub: jClass[_], sup: jClass[_]): Boolean = { - def loop(left: Set[jClass[_]], seen: Set[jClass[_]]): Boolean = { - left.nonEmpty && { - val next = left.head - val supers = next.getInterfaces.toSet ++ Option(next.getSuperclass) - supers(sup) || { - val xs = left ++ supers filterNot seen - loop(xs - next, seen + next) - } - } - } - loop(Set(sub), Set()) - } - - private def subargs(args1: List[OptManifest[_]], args2: List[OptManifest[_]]) = (args1 corresponds args2) { - // !!! [Martin] this is wrong, need to take variance into account - case (x: ClassManifest[_], y: ClassManifest[_]) => x <:< y - case (x, y) => (x eq NoManifest) && (y eq NoManifest) - } - - /** Tests whether the type represented by this manifest is a subtype - * of the type represented by `that` manifest, subject to the limitations - * described in the header. - */ - @deprecated("Use scala.reflect.runtime.universe.TypeTag for subtype checking instead", "2.10.0") - def <:<(that: ClassManifest[_]): Boolean = { - // All types which could conform to these types will override <:<. - def cannotMatch = { - import Manifest._ - that.isInstanceOf[AnyValManifest[_]] || (that eq AnyVal) || (that eq Nothing) || (that eq Null) - } - - // This is wrong, and I don't know how it can be made right - // without more development of Manifests, due to arity-defying - // relationships like: - // - // List[String] <: AnyRef - // Map[Int, Int] <: Iterable[(Int, Int)] - // - // Given the manifest for Map[A, B] how do I determine that a - // supertype has single type argument (A, B) ? I don't see how we - // can say whether X <:< Y when type arguments are involved except - // when the erasure is the same, even before considering variance. - !cannotMatch && { - // this part is wrong for not considering variance - if (this.erasure == that.erasure) - subargs(this.typeArguments, that.typeArguments) - // this part is wrong for punting unless the rhs has no type - // arguments, but it's better than a blindfolded pinata swing. - else - that.typeArguments.isEmpty && subtype(this.erasure, that.erasure) - } - } - - /** Tests whether the type represented by this manifest is a supertype - * of the type represented by `that` manifest, subject to the limitations - * described in the header. - */ - @deprecated("Use scala.reflect.runtime.universe.TypeTag for subtype checking instead", "2.10.0") - def >:>(that: ClassManifest[_]): Boolean = - that <:< this - - override def canEqual(other: Any) = other match { - case _: ClassManifest[_] => true - case _ => false - } - - protected def arrayClass[T](tp: jClass[_]): jClass[Array[T]] = - java.lang.reflect.Array.newInstance(tp, 0).getClass.asInstanceOf[jClass[Array[T]]] - - @deprecated("Use wrap instead", "2.10.0") - def arrayManifest: ClassManifest[Array[T]] = - ClassManifest.classType[Array[T]](arrayClass[T](erasure), this) - - override def newArray(len: Int): Array[T] = - java.lang.reflect.Array.newInstance(erasure, len).asInstanceOf[Array[T]] - - @deprecated("Use wrap.newArray instead", "2.10.0") - def newArray2(len: Int): Array[Array[T]] = - java.lang.reflect.Array.newInstance(arrayClass[T](erasure), len) - .asInstanceOf[Array[Array[T]]] - - @deprecated("Use wrap.wrap.newArray instead", "2.10.0") - def newArray3(len: Int): Array[Array[Array[T]]] = - java.lang.reflect.Array.newInstance(arrayClass[Array[T]](arrayClass[T](erasure)), len) - .asInstanceOf[Array[Array[Array[T]]]] - - @deprecated("Use wrap.wrap.wrap.newArray instead", "2.10.0") - def newArray4(len: Int): Array[Array[Array[Array[T]]]] = - java.lang.reflect.Array.newInstance(arrayClass[Array[Array[T]]](arrayClass[Array[T]](arrayClass[T](erasure))), len) - .asInstanceOf[Array[Array[Array[Array[T]]]]] - - @deprecated("Use wrap.wrap.wrap.wrap.newArray instead", "2.10.0") - def newArray5(len: Int): Array[Array[Array[Array[Array[T]]]]] = - java.lang.reflect.Array.newInstance(arrayClass[Array[Array[Array[T]]]](arrayClass[Array[Array[T]]](arrayClass[Array[T]](arrayClass[T](erasure)))), len) - .asInstanceOf[Array[Array[Array[Array[Array[T]]]]]] - - @deprecated("Create WrappedArray directly instead", "2.10.0") - def newWrappedArray(len: Int): WrappedArray[T] = - // it's safe to assume T <: AnyRef here because the method is overridden for all value type manifests - new WrappedArray.ofRef[T with AnyRef](newArray(len).asInstanceOf[Array[T with AnyRef]]).asInstanceOf[WrappedArray[T]] - - @deprecated("Use ArrayBuilder.make(this) instead", "2.10.0") - def newArrayBuilder(): ArrayBuilder[T] = - // it's safe to assume T <: AnyRef here because the method is overridden for all value type manifests - new ArrayBuilder.ofRef[T with AnyRef]()(this.asInstanceOf[ClassManifest[T with AnyRef]]).asInstanceOf[ArrayBuilder[T]] - - @deprecated("Use scala.reflect.runtime.universe.TypeTag to capture type structure instead", "2.10.0") - def typeArguments: List[OptManifest[_]] = List() - - protected def argString = - if (typeArguments.nonEmpty) typeArguments.mkString("[", ", ", "]") - else if (erasure.isArray) "["+ClassManifest.fromClass(erasure.getComponentType)+"]" - else "" -} - -/** `ClassManifestFactory` defines factory methods for manifests. - * It is intended for use by the compiler and should not be used in client code. - * - * Unlike `ClassManifest`, this factory isn't annotated with a deprecation warning. - * This is done to prevent avalanches of deprecation warnings in the code that calls methods with manifests. - * - * In a perfect world, we would just remove the @deprecated annotation from `ClassManifest` the object - * and then delete it in 2.11. After all, that object is explicitly marked as internal, so noone should use it. - * However a lot of existing libraries disregarded the scaladoc that comes with `ClassManifest`, - * so we need to somehow nudge them into migrating prior to removing stuff out of the blue. - * Hence we've introduced this design decision as the lesser of two evils. - */ -object ClassManifestFactory { - val Byte = ManifestFactory.Byte - val Short = ManifestFactory.Short - val Char = ManifestFactory.Char - val Int = ManifestFactory.Int - val Long = ManifestFactory.Long - val Float = ManifestFactory.Float - val Double = ManifestFactory.Double - val Boolean = ManifestFactory.Boolean - val Unit = ManifestFactory.Unit - val Any = ManifestFactory.Any - val Object = ManifestFactory.Object - val AnyVal = ManifestFactory.AnyVal - val Nothing = ManifestFactory.Nothing - val Null = ManifestFactory.Null - - def fromClass[T](clazz: jClass[T]): ClassManifest[T] = clazz match { - case java.lang.Byte.TYPE => Byte.asInstanceOf[ClassManifest[T]] - case java.lang.Short.TYPE => Short.asInstanceOf[ClassManifest[T]] - case java.lang.Character.TYPE => Char.asInstanceOf[ClassManifest[T]] - case java.lang.Integer.TYPE => Int.asInstanceOf[ClassManifest[T]] - case java.lang.Long.TYPE => Long.asInstanceOf[ClassManifest[T]] - case java.lang.Float.TYPE => Float.asInstanceOf[ClassManifest[T]] - case java.lang.Double.TYPE => Double.asInstanceOf[ClassManifest[T]] - case java.lang.Boolean.TYPE => Boolean.asInstanceOf[ClassManifest[T]] - case java.lang.Void.TYPE => Unit.asInstanceOf[ClassManifest[T]] - case _ => classType[T with AnyRef](clazz).asInstanceOf[ClassManifest[T]] - } - - def singleType[T <: AnyRef](value: AnyRef): Manifest[T] = Manifest.singleType(value) - - /** ClassManifest for the class type `clazz`, where `clazz` is - * a top-level or static class. - * @note This no-prefix, no-arguments case is separate because we - * it's called from ScalaRunTime.boxArray itself. If we - * pass varargs as arrays into this, we get an infinitely recursive call - * to boxArray. (Besides, having a separate case is more efficient) - */ - def classType[T](clazz: jClass[_]): ClassManifest[T] = - new ClassTypeManifest[T](None, clazz, Nil) - - /** ClassManifest for the class type `clazz[args]`, where `clazz` is - * a top-level or static class and `args` are its type arguments */ - def classType[T](clazz: jClass[_], arg1: OptManifest[_], args: OptManifest[_]*): ClassManifest[T] = - new ClassTypeManifest[T](None, clazz, arg1 :: args.toList) - - /** ClassManifest for the class type `clazz[args]`, where `clazz` is - * a class with non-package prefix type `prefix` and type arguments `args`. - */ - def classType[T](prefix: OptManifest[_], clazz: jClass[_], args: OptManifest[_]*): ClassManifest[T] = - new ClassTypeManifest[T](Some(prefix), clazz, args.toList) - - def arrayType[T](arg: OptManifest[_]): ClassManifest[Array[T]] = arg match { - case NoManifest => Object.asInstanceOf[ClassManifest[Array[T]]] - case m: ClassManifest[_] => m.asInstanceOf[ClassManifest[T]].arrayManifest - } - - /** ClassManifest for the abstract type `prefix # name`. `upperBound` is not - * strictly necessary as it could be obtained by reflection. It was - * added so that erasure can be calculated without reflection. */ - def abstractType[T](prefix: OptManifest[_], name: String, clazz: jClass[_], args: OptManifest[_]*): ClassManifest[T] = - new ClassManifest[T] { - override def runtimeClass = clazz - override val typeArguments = args.toList - override def toString = prefix.toString+"#"+name+argString - } - - /** ClassManifest for the abstract type `prefix # name`. `upperBound` is not - * strictly necessary as it could be obtained by reflection. It was - * added so that erasure can be calculated without reflection. - * todo: remove after next boostrap - */ - def abstractType[T](prefix: OptManifest[_], name: String, upperbound: ClassManifest[_], args: OptManifest[_]*): ClassManifest[T] = - new ClassManifest[T] { - override def runtimeClass = upperbound.erasure - override val typeArguments = args.toList - override def toString = prefix.toString+"#"+name+argString - } -} - -/** Manifest for the class type `clazz[args]`, where `clazz` is - * a top-level or static class */ -private class ClassTypeManifest[T]( - prefix: Option[OptManifest[_]], - val runtimeClass: jClass[_], - override val typeArguments: List[OptManifest[_]]) extends ClassManifest[T] -{ - override def toString = - (if (prefix.isEmpty) "" else prefix.get.toString+"#") + - (if (erasure.isArray) "Array" else erasure.getName) + - argString -} \ No newline at end of file diff --git a/src/library/scala/reflect/ClassManifestDeprecatedApis.scala b/src/library/scala/reflect/ClassManifestDeprecatedApis.scala new file mode 100644 index 0000000000..d226e43e77 --- /dev/null +++ b/src/library/scala/reflect/ClassManifestDeprecatedApis.scala @@ -0,0 +1,240 @@ +/* __ *\ +** ________ ___ / / ___ Scala API ** +** / __/ __// _ | / / / _ | (c) 2007-2011, LAMP/EPFL ** +** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** +** /____/\___/_/ |_/____/_/ | | ** +** |/ ** +\* */ + +package scala.reflect + +import scala.collection.mutable.{ WrappedArray, ArrayBuilder } +import java.lang.{ Class => jClass } + +@deprecated("Use scala.reflect.ClassTag instead", "2.10.0") +trait ClassManifestDeprecatedApis[T] extends OptManifest[T] { + self: ClassManifest[T] => + + @deprecated("Use runtimeClass instead", "2.10.0") + def erasure: jClass[_] = runtimeClass + + private def subtype(sub: jClass[_], sup: jClass[_]): Boolean = { + def loop(left: Set[jClass[_]], seen: Set[jClass[_]]): Boolean = { + left.nonEmpty && { + val next = left.head + val supers = next.getInterfaces.toSet ++ Option(next.getSuperclass) + supers(sup) || { + val xs = left ++ supers filterNot seen + loop(xs - next, seen + next) + } + } + } + loop(Set(sub), Set()) + } + + private def subargs(args1: List[OptManifest[_]], args2: List[OptManifest[_]]) = (args1 corresponds args2) { + // !!! [Martin] this is wrong, need to take variance into account + case (x: ClassManifest[_], y: ClassManifest[_]) => x <:< y + case (x, y) => (x eq NoManifest) && (y eq NoManifest) + } + + /** Tests whether the type represented by this manifest is a subtype + * of the type represented by `that` manifest, subject to the limitations + * described in the header. + */ + @deprecated("Use scala.reflect.runtime.universe.TypeTag for subtype checking instead", "2.10.0") + def <:<(that: ClassManifest[_]): Boolean = { + // All types which could conform to these types will override <:<. + def cannotMatch = { + import Manifest._ + that.isInstanceOf[AnyValManifest[_]] || (that eq AnyVal) || (that eq Nothing) || (that eq Null) + } + + // This is wrong, and I don't know how it can be made right + // without more development of Manifests, due to arity-defying + // relationships like: + // + // List[String] <: AnyRef + // Map[Int, Int] <: Iterable[(Int, Int)] + // + // Given the manifest for Map[A, B] how do I determine that a + // supertype has single type argument (A, B) ? I don't see how we + // can say whether X <:< Y when type arguments are involved except + // when the erasure is the same, even before considering variance. + !cannotMatch && { + // this part is wrong for not considering variance + if (this.erasure == that.erasure) + subargs(this.typeArguments, that.typeArguments) + // this part is wrong for punting unless the rhs has no type + // arguments, but it's better than a blindfolded pinata swing. + else + that.typeArguments.isEmpty && subtype(this.erasure, that.erasure) + } + } + + /** Tests whether the type represented by this manifest is a supertype + * of the type represented by `that` manifest, subject to the limitations + * described in the header. + */ + @deprecated("Use scala.reflect.runtime.universe.TypeTag for subtype checking instead", "2.10.0") + def >:>(that: ClassManifest[_]): Boolean = + that <:< this + + override def canEqual(other: Any) = other match { + case _: ClassManifest[_] => true + case _ => false + } + + protected def arrayClass[T](tp: jClass[_]): jClass[Array[T]] = + java.lang.reflect.Array.newInstance(tp, 0).getClass.asInstanceOf[jClass[Array[T]]] + + @deprecated("Use wrap instead", "2.10.0") + def arrayManifest: ClassManifest[Array[T]] = + ClassManifest.classType[Array[T]](arrayClass[T](erasure), this) + + override def newArray(len: Int): Array[T] = + java.lang.reflect.Array.newInstance(erasure, len).asInstanceOf[Array[T]] + + @deprecated("Use wrap.newArray instead", "2.10.0") + def newArray2(len: Int): Array[Array[T]] = + java.lang.reflect.Array.newInstance(arrayClass[T](erasure), len) + .asInstanceOf[Array[Array[T]]] + + @deprecated("Use wrap.wrap.newArray instead", "2.10.0") + def newArray3(len: Int): Array[Array[Array[T]]] = + java.lang.reflect.Array.newInstance(arrayClass[Array[T]](arrayClass[T](erasure)), len) + .asInstanceOf[Array[Array[Array[T]]]] + + @deprecated("Use wrap.wrap.wrap.newArray instead", "2.10.0") + def newArray4(len: Int): Array[Array[Array[Array[T]]]] = + java.lang.reflect.Array.newInstance(arrayClass[Array[Array[T]]](arrayClass[Array[T]](arrayClass[T](erasure))), len) + .asInstanceOf[Array[Array[Array[Array[T]]]]] + + @deprecated("Use wrap.wrap.wrap.wrap.newArray instead", "2.10.0") + def newArray5(len: Int): Array[Array[Array[Array[Array[T]]]]] = + java.lang.reflect.Array.newInstance(arrayClass[Array[Array[Array[T]]]](arrayClass[Array[Array[T]]](arrayClass[Array[T]](arrayClass[T](erasure)))), len) + .asInstanceOf[Array[Array[Array[Array[Array[T]]]]]] + + @deprecated("Create WrappedArray directly instead", "2.10.0") + def newWrappedArray(len: Int): WrappedArray[T] = + // it's safe to assume T <: AnyRef here because the method is overridden for all value type manifests + new WrappedArray.ofRef[T with AnyRef](newArray(len).asInstanceOf[Array[T with AnyRef]]).asInstanceOf[WrappedArray[T]] + + @deprecated("Use ArrayBuilder.make(this) instead", "2.10.0") + def newArrayBuilder(): ArrayBuilder[T] = + // it's safe to assume T <: AnyRef here because the method is overridden for all value type manifests + new ArrayBuilder.ofRef[T with AnyRef]()(this.asInstanceOf[ClassManifest[T with AnyRef]]).asInstanceOf[ArrayBuilder[T]] + + @deprecated("Use scala.reflect.runtime.universe.TypeTag to capture type structure instead", "2.10.0") + def typeArguments: List[OptManifest[_]] = List() + + protected def argString = + if (typeArguments.nonEmpty) typeArguments.mkString("[", ", ", "]") + else if (erasure.isArray) "["+ClassManifest.fromClass(erasure.getComponentType)+"]" + else "" +} + +/** `ClassManifestFactory` defines factory methods for manifests. + * It is intended for use by the compiler and should not be used in client code. + * + * Unlike `ClassManifest`, this factory isn't annotated with a deprecation warning. + * This is done to prevent avalanches of deprecation warnings in the code that calls methods with manifests. + * + * In a perfect world, we would just remove the @deprecated annotation from `ClassManifest` the object + * and then delete it in 2.11. After all, that object is explicitly marked as internal, so noone should use it. + * However a lot of existing libraries disregarded the scaladoc that comes with `ClassManifest`, + * so we need to somehow nudge them into migrating prior to removing stuff out of the blue. + * Hence we've introduced this design decision as the lesser of two evils. + */ +object ClassManifestFactory { + val Byte = ManifestFactory.Byte + val Short = ManifestFactory.Short + val Char = ManifestFactory.Char + val Int = ManifestFactory.Int + val Long = ManifestFactory.Long + val Float = ManifestFactory.Float + val Double = ManifestFactory.Double + val Boolean = ManifestFactory.Boolean + val Unit = ManifestFactory.Unit + val Any = ManifestFactory.Any + val Object = ManifestFactory.Object + val AnyVal = ManifestFactory.AnyVal + val Nothing = ManifestFactory.Nothing + val Null = ManifestFactory.Null + + def fromClass[T](clazz: jClass[T]): ClassManifest[T] = clazz match { + case java.lang.Byte.TYPE => Byte.asInstanceOf[ClassManifest[T]] + case java.lang.Short.TYPE => Short.asInstanceOf[ClassManifest[T]] + case java.lang.Character.TYPE => Char.asInstanceOf[ClassManifest[T]] + case java.lang.Integer.TYPE => Int.asInstanceOf[ClassManifest[T]] + case java.lang.Long.TYPE => Long.asInstanceOf[ClassManifest[T]] + case java.lang.Float.TYPE => Float.asInstanceOf[ClassManifest[T]] + case java.lang.Double.TYPE => Double.asInstanceOf[ClassManifest[T]] + case java.lang.Boolean.TYPE => Boolean.asInstanceOf[ClassManifest[T]] + case java.lang.Void.TYPE => Unit.asInstanceOf[ClassManifest[T]] + case _ => classType[T with AnyRef](clazz).asInstanceOf[ClassManifest[T]] + } + + def singleType[T <: AnyRef](value: AnyRef): Manifest[T] = Manifest.singleType(value) + + /** ClassManifest for the class type `clazz`, where `clazz` is + * a top-level or static class. + * @note This no-prefix, no-arguments case is separate because we + * it's called from ScalaRunTime.boxArray itself. If we + * pass varargs as arrays into this, we get an infinitely recursive call + * to boxArray. (Besides, having a separate case is more efficient) + */ + def classType[T](clazz: jClass[_]): ClassManifest[T] = + new ClassTypeManifest[T](None, clazz, Nil) + + /** ClassManifest for the class type `clazz[args]`, where `clazz` is + * a top-level or static class and `args` are its type arguments */ + def classType[T](clazz: jClass[_], arg1: OptManifest[_], args: OptManifest[_]*): ClassManifest[T] = + new ClassTypeManifest[T](None, clazz, arg1 :: args.toList) + + /** ClassManifest for the class type `clazz[args]`, where `clazz` is + * a class with non-package prefix type `prefix` and type arguments `args`. + */ + def classType[T](prefix: OptManifest[_], clazz: jClass[_], args: OptManifest[_]*): ClassManifest[T] = + new ClassTypeManifest[T](Some(prefix), clazz, args.toList) + + def arrayType[T](arg: OptManifest[_]): ClassManifest[Array[T]] = arg match { + case NoManifest => Object.asInstanceOf[ClassManifest[Array[T]]] + case m: ClassManifest[_] => m.asInstanceOf[ClassManifest[T]].arrayManifest + } + + /** ClassManifest for the abstract type `prefix # name`. `upperBound` is not + * strictly necessary as it could be obtained by reflection. It was + * added so that erasure can be calculated without reflection. */ + def abstractType[T](prefix: OptManifest[_], name: String, clazz: jClass[_], args: OptManifest[_]*): ClassManifest[T] = + new ClassManifest[T] { + override def runtimeClass = clazz + override val typeArguments = args.toList + override def toString = prefix.toString+"#"+name+argString + } + + /** ClassManifest for the abstract type `prefix # name`. `upperBound` is not + * strictly necessary as it could be obtained by reflection. It was + * added so that erasure can be calculated without reflection. + * todo: remove after next boostrap + */ + def abstractType[T](prefix: OptManifest[_], name: String, upperbound: ClassManifest[_], args: OptManifest[_]*): ClassManifest[T] = + new ClassManifest[T] { + override def runtimeClass = upperbound.erasure + override val typeArguments = args.toList + override def toString = prefix.toString+"#"+name+argString + } +} + +/** Manifest for the class type `clazz[args]`, where `clazz` is + * a top-level or static class */ +private class ClassTypeManifest[T]( + prefix: Option[OptManifest[_]], + val runtimeClass: jClass[_], + override val typeArguments: List[OptManifest[_]]) extends ClassManifest[T] +{ + override def toString = + (if (prefix.isEmpty) "" else prefix.get.toString+"#") + + (if (erasure.isArray) "Array" else erasure.getName) + + argString +} \ No newline at end of file -- cgit v1.2.3 From 855f01b30b37ee8f07612d8e568eda5d408fd2df Mon Sep 17 00:00:00 2001 From: Simon Ochsenreither Date: Wed, 11 Jul 2012 22:13:05 +0200 Subject: SI-6064 Add method contains to Option. The Option API more or less mirrors the Collection API, but it seems that somehow this method has been forgotten. Review: @axel22 --- src/library/scala/Option.scala | 9 +++++++++ test/files/run/t6064.scala | 9 +++++++++ 2 files changed, 18 insertions(+) create mode 100644 test/files/run/t6064.scala (limited to 'src') diff --git a/src/library/scala/Option.scala b/src/library/scala/Option.scala index c461b413d6..5953a51c78 100644 --- a/src/library/scala/Option.scala +++ b/src/library/scala/Option.scala @@ -209,6 +209,15 @@ sealed abstract class Option[+A] extends Product with Serializable { def withFilter(q: A => Boolean): WithFilter = new WithFilter(x => p(x) && q(x)) } + /** Tests whether the option contains a given value as an element. + * + * @param elem the element to test. + * @return `true` if the option has an element that is equal (as + * determined by `==`) to `elem`, `false` otherwise. + */ + final def contains[A1 >: A](elem: A1): Boolean = + !isEmpty && this.get == elem + /** Returns true if this option is nonempty '''and''' the predicate * $p returns true when applied to this $option's value. * Otherwise, returns false. diff --git a/test/files/run/t6064.scala b/test/files/run/t6064.scala new file mode 100644 index 0000000000..fc184dd92d --- /dev/null +++ b/test/files/run/t6064.scala @@ -0,0 +1,9 @@ +object Test extends App { + assert(Option(42) contains 42) + assert(Some(42) contains 42) + assert(Option(BigInt(42)) contains 42) + assert(Option(42) contains BigInt(42)) + assert(!(None contains 42)) + assert(Some(null) contains null) + assert(!(Option(null) contains null)) +} \ No newline at end of file -- cgit v1.2.3 From 5f31daa147a08df41ce4c69abbc2abadfcb9b5ee Mon Sep 17 00:00:00 2001 From: Simon Ochsenreither Date: Sun, 29 Jul 2012 19:11:01 +0200 Subject: Fixes typo in Throwable compiler warning --- src/compiler/scala/tools/nsc/typechecker/Typers.scala | 2 +- test/files/neg/catch-all.check | 6 +++--- 2 files changed, 4 insertions(+), 4 deletions(-) (limited to 'src') diff --git a/src/compiler/scala/tools/nsc/typechecker/Typers.scala b/src/compiler/scala/tools/nsc/typechecker/Typers.scala index c74da7f2a9..43697f3b1b 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Typers.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Typers.scala @@ -4935,7 +4935,7 @@ trait Typers extends Modes with Adaptations with Tags { var catches1 = typedCases(catches, ThrowableClass.tpe, pt) for (cdef <- catches1 if cdef.guard.isEmpty) { - def warn(name: Name) = context.warning(cdef.pat.pos, s"This catches all Throwables. If this is really intended, use `case ${name.decoded} : Throwable` to clear this warning.") + def warn(name: Name) = context.warning(cdef.pat.pos, s"This catches all Throwables. If this is really intended, use `case ${name.decoded}: Throwable` to clear this warning.") def unbound(t: Tree) = t.symbol == null || t.symbol == NoSymbol cdef.pat match { case Bind(name, i@Ident(_)) if unbound(i) => warn(name) diff --git a/test/files/neg/catch-all.check b/test/files/neg/catch-all.check index 62f895cc7e..a9cd0ba927 100644 --- a/test/files/neg/catch-all.check +++ b/test/files/neg/catch-all.check @@ -1,10 +1,10 @@ -catch-all.scala:2: error: This catches all Throwables. If this is really intended, use `case _ : Throwable` to clear this warning. +catch-all.scala:2: error: This catches all Throwables. If this is really intended, use `case _: Throwable` to clear this warning. try { "warn" } catch { case _ => } ^ -catch-all.scala:4: error: This catches all Throwables. If this is really intended, use `case x : Throwable` to clear this warning. +catch-all.scala:4: error: This catches all Throwables. If this is really intended, use `case x: Throwable` to clear this warning. try { "warn" } catch { case x => } ^ -catch-all.scala:6: error: This catches all Throwables. If this is really intended, use `case x : Throwable` to clear this warning. +catch-all.scala:6: error: This catches all Throwables. If this is really intended, use `case x: Throwable` to clear this warning. try { "warn" } catch { case _: RuntimeException => ; case x => } ^ three errors found -- cgit v1.2.3 From 48f8235822a2a100d6c4e8d3d7349df565ac6d40 Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Mon, 30 Jul 2012 07:56:12 -0700 Subject: Fix for SI-6154, VerifyError originating in uncurry. Lhs still might be an Ident. Miguel did all the work, I just wrote it down in code form. --- src/compiler/scala/tools/nsc/transform/UnCurry.scala | 8 ++++---- test/files/run/t6154.check | 1 + test/files/run/t6154.scala | 10 ++++++++++ 3 files changed, 15 insertions(+), 4 deletions(-) create mode 100644 test/files/run/t6154.check create mode 100644 test/files/run/t6154.scala (limited to 'src') diff --git a/src/compiler/scala/tools/nsc/transform/UnCurry.scala b/src/compiler/scala/tools/nsc/transform/UnCurry.scala index 2d6017a014..4be76a128a 100644 --- a/src/compiler/scala/tools/nsc/transform/UnCurry.scala +++ b/src/compiler/scala/tools/nsc/transform/UnCurry.scala @@ -35,8 +35,8 @@ import language.postfixOps * - convert non-local returns to throws with enclosing try statements. * - convert try-catch expressions in contexts where there might be values on the stack to * a local method and a call to it (since an exception empties the evaluation stack): - * - * meth(x_1,..., try { x_i } catch { ..}, .. x_b0) ==> + * + * meth(x_1,..., try { x_i } catch { ..}, .. x_b0) ==> * { * def liftedTry$1 = try { x_i } catch { .. } * meth(x_1, .., liftedTry$1(), .. ) @@ -632,7 +632,7 @@ abstract class UnCurry extends InfoTransform treeCopy.Apply(tree, transform(fn), transformTrees(transformArgs(tree.pos, fn.symbol, args, formals))) } - case Assign(Select(_, _), _) => + case Assign(_: RefTree, _) => withNeedLift(true) { super.transform(tree) } case Assign(lhs, _) if lhs.symbol.owner != currentMethod || lhs.symbol.hasFlag(LAZY | ACCESSOR) => @@ -641,7 +641,7 @@ abstract class UnCurry extends InfoTransform case ret @ Return(_) if (isNonLocalReturn(ret)) => withNeedLift(true) { super.transform(ret) } - case Try(_, Nil, _) => + case Try(_, Nil, _) => // try-finally does not need lifting: lifting is needed only for try-catch // expressions that are evaluated in a context where the stack might not be empty. // `finally` does not attempt to continue evaluation after an exception, so the fact diff --git a/test/files/run/t6154.check b/test/files/run/t6154.check new file mode 100644 index 0000000000..9766475a41 --- /dev/null +++ b/test/files/run/t6154.check @@ -0,0 +1 @@ +ok diff --git a/test/files/run/t6154.scala b/test/files/run/t6154.scala new file mode 100644 index 0000000000..02ef62905f --- /dev/null +++ b/test/files/run/t6154.scala @@ -0,0 +1,10 @@ +object Test { + def foo(a: Int) { + var bar: Int = 0 + bar = try { 0 } catch { case ex: Throwable => 0 } + new { foo(bar) } + } + + def main(args: Array[String]): Unit = + try foo(0) catch { case _: java.lang.StackOverflowError => println("ok") } +} -- cgit v1.2.3 From 4b0f6d9ccc9023635ce6297839138fa68a5ebc33 Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Thu, 2 Aug 2012 19:56:15 -0700 Subject: Don't know how git merge loses a whole brace. --- src/compiler/scala/tools/nsc/interactive/Global.scala | 1 + 1 file changed, 1 insertion(+) (limited to 'src') diff --git a/src/compiler/scala/tools/nsc/interactive/Global.scala b/src/compiler/scala/tools/nsc/interactive/Global.scala index a530425dc7..5263a0fd2a 100644 --- a/src/compiler/scala/tools/nsc/interactive/Global.scala +++ b/src/compiler/scala/tools/nsc/interactive/Global.scala @@ -1050,6 +1050,7 @@ class Global(settings: Settings, _reporter: Reporter, projectName: String = "") } finally { interruptsEnabled = true } + } // ---------------- Helper classes --------------------------- -- cgit v1.2.3 From 434adb151083a658b498ffb9c3284f6bb471597a Mon Sep 17 00:00:00 2001 From: Vlad Ureche Date: Wed, 1 Aug 2012 12:30:11 +0200 Subject: Want a 25% partest speedup?* ... well then, don't compile twice! Explanation: The compilation process in partest happens in 3 stages: - scala + java files, all fed to scalac (so the java signatures are loaded and the scala bytecode is generated) - java files, fed to javac (so the java bytecode is generated) - scala files, fed to scalac (so the scala bytecode correctly links to the javac-generated bytecode) While this mechanism is great to have, for simple 1-file scala tests it's overkill by compiling scala files twice. So I adjusted the compile procedure to only run the first step if java files are empty, leading to a 25% partest speedup.* Also included Seth Tisue's comment about the tests that require the three-step compilation. * as measured on test.scaladoc --- .../scala/tools/partest/nest/RunnerManager.scala | 30 +++++++++++++++++++--- 1 file changed, 27 insertions(+), 3 deletions(-) (limited to 'src') diff --git a/src/partest/scala/tools/partest/nest/RunnerManager.scala b/src/partest/scala/tools/partest/nest/RunnerManager.scala index dc15d4475b..d7d0b5649a 100644 --- a/src/partest/scala/tools/partest/nest/RunnerManager.scala +++ b/src/partest/scala/tools/partest/nest/RunnerManager.scala @@ -323,10 +323,34 @@ class RunnerManager(kind: String, val fileManager: FileManager, params: TestRunP val (scalaFiles, javaFiles) = g partition isScala val allFiles = javaFiles ++ scalaFiles + /* The test can contain both java and scala files, each of which should be compiled with the corresponding + * compiler. Since the source files can reference each other both ways (java referencing scala classes and + * vice versa, the partest compilation routine attempts to reach a "bytecode fixpoint" between the two + * compilers -- that's when bytecode generated by each compiler implements the signatures expected by the other. + * + * In theory this property can't be guaranteed, as neither compiler can know what signatures the other + * compiler expects and how to implement them. (see SI-1240 for the full story) + * + * In practice, this happens in 3 steps: + * STEP1: feed all the files to scalac + * it will parse java files and obtain their expected signatures and generate bytecode for scala files + * STEP2: feed the java files to javac + * it will generate the bytecode for the java files and link to the scalac-generated bytecode for scala + * STEP3: only if there are both scala and java files, recompile the scala sources so they link to the correct + * java signatures, in case the signatures deduced by scalac from the source files were wrong. Since the + * bytecode for java is already in place, we only feed the scala files to scalac so it will take the + * java signatures from the existing javac-generated bytecode + */ List(1, 2, 3).foldLeft(CompileSuccess: CompilationOutcome) { - case (CompileSuccess, 1) if scalaFiles.nonEmpty => compileMgr.attemptCompile(Some(outDir), allFiles, kind, logFile) // java + scala - case (CompileSuccess, 2) if javaFiles.nonEmpty => javac(outDir, javaFiles, logFile) // java - case (CompileSuccess, 3) if scalaFiles.nonEmpty => compileMgr.attemptCompile(Some(outDir), scalaFiles, kind, logFile) // scala + case (CompileSuccess, 1) if scalaFiles.nonEmpty => + compileMgr.attemptCompile(Some(outDir), allFiles, kind, logFile) + case (CompileSuccess, 2) if javaFiles.nonEmpty => + javac(outDir, javaFiles, logFile) + case (CompileSuccess, 3) if scalaFiles.nonEmpty && javaFiles.nonEmpty => + // TODO: Do we actually need this? SI-1240 is known to require this, but we don't know if other tests + // require it: https://groups.google.com/forum/?fromgroups#!topic/scala-internals/rFDKAcOKciU + compileMgr.attemptCompile(Some(outDir), scalaFiles, kind, logFile) + case (outcome, _) => outcome } } -- cgit v1.2.3 From c7e733ebf456fcd5659998a684a1ef8de8133802 Mon Sep 17 00:00:00 2001 From: "Daniel C. Sobral" Date: Tue, 7 Aug 2012 13:45:26 -0300 Subject: SI-6119 Fix mispelling on take documentation. --- src/library/scala/collection/GenTraversableLike.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) (limited to 'src') diff --git a/src/library/scala/collection/GenTraversableLike.scala b/src/library/scala/collection/GenTraversableLike.scala index 0d51230623..903de4a247 100644 --- a/src/library/scala/collection/GenTraversableLike.scala +++ b/src/library/scala/collection/GenTraversableLike.scala @@ -333,7 +333,7 @@ trait GenTraversableLike[+A, +Repr] extends Any with GenTraversableOnce[A] with /** Selects first ''n'' elements. * $orderDependent - * @param n Tt number of elements to take from this $coll. + * @param n the number of elements to take from this $coll. * @return a $coll consisting only of the first `n` elements of this $coll, * or else the whole $coll, if it has less than `n` elements. */ -- cgit v1.2.3 From eb2375cc5327293c708226e78f80a97cc780a12f Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Thu, 9 Aug 2012 14:10:22 -0700 Subject: Warn when Any or AnyVal is inferred. For the very small price of annotating types as Any/AnyVal in those cases where we wish to use them, we can obtain useful warnings. I made trunk clean against this warning and found several bugs or at least suboptimalities in the process. I put the warning behind -Xlint for the moment, but I think this belongs on by default, even for this alone: scala> List(1, 2, 3) contains "a" :8: warning: a type was inferred to be `Any`; this may indicate a programming error. List(1, 2, 3) contains "a" ^ res0: Boolean = false Or this punishment meted out by SI-4042: scala> 1l to 5l contains 5 :8: warning: a type was inferred to be `AnyVal`; this may indicate a programming error. 1l to 5l contains 5 ^ res0: Boolean = false A different situation where this arises, which I have seen variations of many times: scala> class A[T](default: T) { def get(x: => Option[T]) = x getOrElse Some(default) } :7: warning: a type was inferred to be `Any`; this may indicate a programming error. class A[T](default: T) { def get(x: => Option[T]) = x getOrElse Some(default) } ^ // Oops, this was what I meant scala> class A[T](default: T) { def get(x: => Option[T]) = x getOrElse default } defined class A Harder to avoid spurious warnings when "Object" is inferred. --- .../tools/nsc/backend/icode/ICodeCheckers.scala | 7 +-- .../scala/tools/nsc/interpreter/ISettings.scala | 2 +- .../scala/tools/nsc/settings/Warnings.scala | 4 +- .../scala/tools/nsc/typechecker/Infer.scala | 50 ++++++++++++----- .../scala/tools/nsc/typechecker/TreeCheckers.scala | 2 +- .../scala/tools/nsc/typechecker/Typers.scala | 7 +-- .../library/scala/util/continuations/package.scala | 2 +- src/library/scala/collection/SeqLike.scala | 2 +- src/library/scala/collection/SeqProxyLike.scala | 2 +- .../scala/collection/generic/SeqForwarder.scala | 2 +- .../scala/collection/immutable/NumericRange.scala | 2 +- .../collection/parallel/ParIterableLike.scala | 64 ++++++---------------- .../parsing/combinator/lexical/StdLexical.scala | 2 +- .../scala/tools/scalap/scalax/rules/Rule.scala | 2 +- .../scalap/scalax/rules/scalasig/ScalaSig.scala | 4 +- src/swing/scala/swing/ComboBox.scala | 2 +- src/swing/scala/swing/ListView.scala | 2 +- test/files/neg/warn-inferred-any.check | 10 ++++ test/files/neg/warn-inferred-any.flags | 1 + test/files/neg/warn-inferred-any.scala | 19 +++++++ 20 files changed, 105 insertions(+), 83 deletions(-) create mode 100644 test/files/neg/warn-inferred-any.check create mode 100644 test/files/neg/warn-inferred-any.flags create mode 100644 test/files/neg/warn-inferred-any.scala (limited to 'src') diff --git a/src/compiler/scala/tools/nsc/backend/icode/ICodeCheckers.scala b/src/compiler/scala/tools/nsc/backend/icode/ICodeCheckers.scala index 0d688d51f2..aa3f4dcb7e 100644 --- a/src/compiler/scala/tools/nsc/backend/icode/ICodeCheckers.scala +++ b/src/compiler/scala/tools/nsc/backend/icode/ICodeCheckers.scala @@ -381,10 +381,9 @@ abstract class ICodeCheckers { for (instr <- b) { this.instruction = instr - def checkLocal(local: Local): Unit = { - method lookupLocal local.sym.name getOrElse { - icodeError(" " + local + " is not defined in method " + method) - } + def checkLocal(local: Local) { + if ((method lookupLocal local.sym.name).isEmpty) + icodeError(s" $local is not defined in method $method") } def checkField(obj: TypeKind, field: Symbol): Unit = obj match { case REFERENCE(sym) => diff --git a/src/compiler/scala/tools/nsc/interpreter/ISettings.scala b/src/compiler/scala/tools/nsc/interpreter/ISettings.scala index b65a1ac889..762092c08a 100644 --- a/src/compiler/scala/tools/nsc/interpreter/ISettings.scala +++ b/src/compiler/scala/tools/nsc/interpreter/ISettings.scala @@ -44,7 +44,7 @@ class ISettings(intp: IMain) { } def deprecation: Boolean = intp.settings.deprecation.value - def allSettings = Map( + def allSettings = Map[String, Any]( "maxPrintString" -> maxPrintString, "maxAutoprintCompletion" -> maxAutoprintCompletion, "unwrapStrings" -> unwrapStrings, diff --git a/src/compiler/scala/tools/nsc/settings/Warnings.scala b/src/compiler/scala/tools/nsc/settings/Warnings.scala index 16f8685a87..bfa1714894 100644 --- a/src/compiler/scala/tools/nsc/settings/Warnings.scala +++ b/src/compiler/scala/tools/nsc/settings/Warnings.scala @@ -29,7 +29,8 @@ trait Warnings { warnInaccessible, warnNullaryOverride, warnNullaryUnit, - warnAdaptedArgs + warnAdaptedArgs, + warnInferAny ) // Warning groups. @@ -52,6 +53,7 @@ trait Warnings { val warnInaccessible = BooleanSetting ("-Ywarn-inaccessible", "Warn about inaccessible types in method signatures.") val warnNullaryOverride = BooleanSetting ("-Ywarn-nullary-override", "Warn when non-nullary overrides nullary, e.g. `def foo()` over `def foo`.") + val warnInferAny = BooleanSetting ("-Ywarn-infer-any", "Warn when a type argument is inferred to be `Any`.") // Backward compatibility. def Xwarnfatal = fatalWarnings diff --git a/src/compiler/scala/tools/nsc/typechecker/Infer.scala b/src/compiler/scala/tools/nsc/typechecker/Infer.scala index de04b1cb68..032212e3c0 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Infer.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Infer.scala @@ -149,14 +149,13 @@ trait Infer { case tv @ TypeVar(origin, constr) if !tv.untouchable => if (constr.inst == NoType) { throw new DeferredNoInstance(() => - "no unique instantiation of type variable " + origin + " could be found") + s"no unique instantiation of type variable $origin could be found") } else if (excludedVars(tv)) { throw new NoInstance("cyclic instantiation") } else { excludedVars += tv - val res = apply(constr.inst) - excludedVars -= tv - res + try apply(constr.inst) + finally excludedVars -= tv } case _ => mapOver(tp) @@ -643,6 +642,25 @@ trait Infer { tvars, tparams, tparams map inferVariance(formals, restpe), false, lubDepth(formals) max lubDepth(argtpes) ) + // Can warn about inferring Any/AnyVal as long as they don't appear + // explicitly anywhere amongst the formal, argument, result, or expected type. + def canWarnAboutAny = !(pt :: restpe :: formals ::: argtpes exists (t => (t contains AnyClass) || (t contains AnyValClass))) + def argumentPosition(idx: Int): Position = context.tree match { + case x: ValOrDefDef => x.rhs match { + case Apply(fn, args) if idx < args.size => args(idx).pos + case _ => context.tree.pos + } + case _ => context.tree.pos + } + if (settings.warnInferAny.value && context.reportErrors && canWarnAboutAny) { + foreachWithIndex(targs) ((targ, idx) => + targ.typeSymbol match { + case sym @ (AnyClass | AnyValClass) => + context.unit.warning(argumentPosition(idx), s"a type was inferred to be `${sym.name}`; this may indicate a programming error.") + case _ => + } + ) + } adjustTypeArgs(tparams, tvars, targs, restpe) } @@ -1088,12 +1106,12 @@ trait Infer { * @param targs ... * @param pt ... */ - private def substExpr(tree: Tree, undetparams: List[Symbol], - targs: List[Type], pt: Type) { + private def substExpr(tree: Tree, undetparams: List[Symbol], targs: List[Type], pt: Type) { if (targs eq null) { if (!tree.tpe.isErroneous && !pt.isErroneous) PolymorphicExpressionInstantiationError(tree, undetparams, pt) - } else { + } + else { new TreeTypeSubstituter(undetparams, targs).traverse(tree) notifyUndetparamsInferred(undetparams, targs) } @@ -1221,17 +1239,19 @@ trait Infer { } } else None - (inferFor(pt) orElse inferForApproxPt) map { targs => - new TreeTypeSubstituter(undetparams, targs).traverse(tree) - notifyUndetparamsInferred(undetparams, targs) - } getOrElse { - debugwarn("failed inferConstructorInstance for "+ tree +" : "+ tree.tpe +" under "+ undetparams +" pt = "+ pt +(if(isFullyDefined(pt)) " (fully defined)" else " (not fully defined)")) - // if (settings.explaintypes.value) explainTypes(resTp.instantiateTypeParams(undetparams, tvars), pt) - ConstrInstantiationError(tree, resTp, pt) + val inferred = inferFor(pt) orElse inferForApproxPt + + inferred match { + case Some(targs) => + new TreeTypeSubstituter(undetparams, targs).traverse(tree) + notifyUndetparamsInferred(undetparams, targs) + case _ => + debugwarn("failed inferConstructorInstance for "+ tree +" : "+ tree.tpe +" under "+ undetparams +" pt = "+ pt +(if(isFullyDefined(pt)) " (fully defined)" else " (not fully defined)")) + // if (settings.explaintypes.value) explainTypes(resTp.instantiateTypeParams(undetparams, tvars), pt) + ConstrInstantiationError(tree, resTp, pt) } } - def instBounds(tvar: TypeVar): (Type, Type) = { val tparam = tvar.origin.typeSymbol val instType = toOrigin(tvar.constr.inst) diff --git a/src/compiler/scala/tools/nsc/typechecker/TreeCheckers.scala b/src/compiler/scala/tools/nsc/typechecker/TreeCheckers.scala index 07d457b17b..9d5b52808d 100644 --- a/src/compiler/scala/tools/nsc/typechecker/TreeCheckers.scala +++ b/src/compiler/scala/tools/nsc/typechecker/TreeCheckers.scala @@ -278,7 +278,7 @@ abstract class TreeCheckers extends Analyzer { def cond(s: Symbol) = !s.isTerm || s.isMethod || s == sym.owner if (sym.owner != currentOwner) { - val expected = currentOwner.ownerChain find (x => cond(x)) getOrElse fail("DefTree can't find owner: ") + val expected = currentOwner.ownerChain find (x => cond(x)) getOrElse { fail("DefTree can't find owner: ") ; NoSymbol } if (sym.owner != expected) fail("""| | currentOwner chain: %s diff --git a/src/compiler/scala/tools/nsc/typechecker/Typers.scala b/src/compiler/scala/tools/nsc/typechecker/Typers.scala index e3bcff7d84..7eb53ca7de 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Typers.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Typers.scala @@ -3111,9 +3111,8 @@ trait Typers extends Modes with Adaptations with Tags { // define the undetparams which have been fixed by this param list, replace the corresponding symbols in "fun" // returns those undetparams which have not been instantiated. val undetparams = inferMethodInstance(fun, tparams, args1, pt) - val result = doTypedApply(tree, fun, args1, mode, pt) - context.undetparams = undetparams - result + try doTypedApply(tree, fun, args1, mode, pt) + finally context.undetparams = undetparams } } } @@ -4555,7 +4554,7 @@ trait Typers extends Modes with Adaptations with Tags { assert(errorContainer == null, "Cannot set ambiguous error twice for identifier") errorContainer = tree } - + val fingerPrint: Long = name.fingerPrint var defSym: Symbol = tree.symbol // the directly found symbol diff --git a/src/continuations/library/scala/util/continuations/package.scala b/src/continuations/library/scala/util/continuations/package.scala index 641f4594e4..93238d50e1 100644 --- a/src/continuations/library/scala/util/continuations/package.scala +++ b/src/continuations/library/scala/util/continuations/package.scala @@ -167,7 +167,7 @@ package object continuations { } def shiftUnitR[A,B](x: A): ControlContext[A,B,B] = { - new ControlContext(null, x) + new ControlContext[A, B, B](null, x) } /** diff --git a/src/library/scala/collection/SeqLike.scala b/src/library/scala/collection/SeqLike.scala index d7418de9c3..416aa916b4 100644 --- a/src/library/scala/collection/SeqLike.scala +++ b/src/library/scala/collection/SeqLike.scala @@ -383,7 +383,7 @@ trait SeqLike[+A, +Repr] extends Any with IterableLike[A, Repr] with GenSeqLike[ * @return `true` if this $coll has an element that is equal (as * determined by `==`) to `elem`, `false` otherwise. */ - def contains(elem: Any): Boolean = exists (_ == elem) + def contains[A1 >: A](elem: A1): Boolean = exists (_ == elem) /** Produces a new sequence which contains all elements of this $coll and also all elements of * a given sequence. `xs union ys` is equivalent to `xs ++ ys`. diff --git a/src/library/scala/collection/SeqProxyLike.scala b/src/library/scala/collection/SeqProxyLike.scala index 3783ef771f..7e77418996 100644 --- a/src/library/scala/collection/SeqProxyLike.scala +++ b/src/library/scala/collection/SeqProxyLike.scala @@ -50,7 +50,7 @@ trait SeqProxyLike[+A, +Repr <: SeqLike[A, Repr] with Seq[A]] extends SeqLike[A, override def lastIndexOfSlice[B >: A](that: GenSeq[B]): Int = self.lastIndexOfSlice(that) override def lastIndexOfSlice[B >: A](that: GenSeq[B], end: Int): Int = self.lastIndexOfSlice(that, end) override def containsSlice[B](that: GenSeq[B]): Boolean = self.indexOfSlice(that) != -1 - override def contains(elem: Any): Boolean = self.contains(elem) + override def contains[A1 >: A](elem: A1): Boolean = self.contains(elem) override def union[B >: A, That](that: GenSeq[B])(implicit bf: CanBuildFrom[Repr, B, That]): That = self.union(that)(bf) override def diff[B >: A](that: GenSeq[B]): Repr = self.diff(that) override def intersect[B >: A](that: GenSeq[B]): Repr = self.intersect(that) diff --git a/src/library/scala/collection/generic/SeqForwarder.scala b/src/library/scala/collection/generic/SeqForwarder.scala index 10e8c37cbf..bdec165314 100644 --- a/src/library/scala/collection/generic/SeqForwarder.scala +++ b/src/library/scala/collection/generic/SeqForwarder.scala @@ -50,7 +50,7 @@ trait SeqForwarder[+A] extends Seq[A] with IterableForwarder[A] { override def lastIndexOfSlice[B >: A](that: GenSeq[B]): Int = underlying lastIndexOfSlice that override def lastIndexOfSlice[B >: A](that: GenSeq[B], end: Int): Int = underlying.lastIndexOfSlice(that, end) override def containsSlice[B](that: GenSeq[B]): Boolean = underlying containsSlice that - override def contains(elem: Any): Boolean = underlying contains elem + override def contains[A1 >: A](elem: A1): Boolean = underlying contains elem override def corresponds[B](that: GenSeq[B])(p: (A,B) => Boolean): Boolean = underlying.corresponds(that)(p) override def indices: Range = underlying.indices } diff --git a/src/library/scala/collection/immutable/NumericRange.scala b/src/library/scala/collection/immutable/NumericRange.scala index 5662a11f93..ce04ef09af 100644 --- a/src/library/scala/collection/immutable/NumericRange.scala +++ b/src/library/scala/collection/immutable/NumericRange.scala @@ -182,7 +182,7 @@ extends AbstractSeq[T] with IndexedSeq[T] with Serializable { def containsTyped(x: T): Boolean = isWithinBoundaries(x) && (((x - start) % step) == zero) - override def contains(x: Any): Boolean = + override def contains[A1 >: T](x: A1): Boolean = try containsTyped(x.asInstanceOf[T]) catch { case _: ClassCastException => false } diff --git a/src/library/scala/collection/parallel/ParIterableLike.scala b/src/library/scala/collection/parallel/ParIterableLike.scala index 85758b29bc..4feff34751 100644 --- a/src/library/scala/collection/parallel/ParIterableLike.scala +++ b/src/library/scala/collection/parallel/ParIterableLike.scala @@ -171,9 +171,9 @@ self: ParIterableLike[T, Repr, Sequential] => /** The task support object which is responsible for scheduling and * load-balancing tasks to processors. - * + * * @see [[scala.collection.parallel.TaskSupport]] - */ + */ def tasksupport = { val ts = _tasksupport if (ts eq null) { @@ -188,18 +188,18 @@ self: ParIterableLike[T, Repr, Sequential] => * A task support object can be changed in a parallel collection after it * has been created, but only during a quiescent period, i.e. while there * are no concurrent invocations to parallel collection methods. - * - * Here is a way to change the task support of a parallel collection: - * - * {{{ - * import scala.collection.parallel._ - * val pc = mutable.ParArray(1, 2, 3) - * pc.tasksupport = new ForkJoinTaskSupport( - * new scala.concurrent.forkjoin.ForkJoinPool(2)) - * }}} + * + * Here is a way to change the task support of a parallel collection: + * + * {{{ + * import scala.collection.parallel._ + * val pc = mutable.ParArray(1, 2, 3) + * pc.tasksupport = new ForkJoinTaskSupport( + * new scala.concurrent.forkjoin.ForkJoinPool(2)) + * }}} * * @see [[scala.collection.parallel.TaskSupport]] - */ + */ def tasksupport_=(ts: TaskSupport) = _tasksupport = ts def seq: Sequential @@ -877,13 +877,13 @@ self: ParIterableLike[T, Repr, Sequential] => override def toSet[U >: T]: immutable.ParSet[U] = toParCollection[U, immutable.ParSet[U]](() => immutable.ParSet.newCombiner[U]) override def toMap[K, V](implicit ev: T <:< (K, V)): immutable.ParMap[K, V] = toParMap[K, V, immutable.ParMap[K, V]](() => immutable.ParMap.newCombiner[K, V]) - + override def toVector: Vector[T] = to[Vector] override def to[Col[_]](implicit cbf: CanBuildFrom[Nothing, T, Col[T @uncheckedVariance]]): Col[T @uncheckedVariance] = if (cbf().isCombiner) { toParCollection[T, Col[T]](() => cbf().asCombiner) } else seq.to(cbf) - + /* tasks */ protected trait StrictSplitterCheckTask[R, Tp] extends Task[R, Tp] { @@ -935,8 +935,8 @@ self: ParIterableLike[T, Repr, Sequential] => (f: First, s: Second) extends Composite[FR, SR, R, First, Second](f, s) { def leaf(prevr: Option[R]) = { - tasksupport.executeAndWaitResult(ft) - tasksupport.executeAndWaitResult(st) + tasksupport.executeAndWaitResult(ft) : Any + tasksupport.executeAndWaitResult(st) : Any mergeSubtasks } } @@ -946,8 +946,8 @@ self: ParIterableLike[T, Repr, Sequential] => (f: First, s: Second) extends Composite[FR, SR, R, First, Second](f, s) { def leaf(prevr: Option[R]) = { - val ftfuture = tasksupport.execute(ft) - tasksupport.executeAndWaitResult(st) + val ftfuture: () => Any = tasksupport.execute(ft) + tasksupport.executeAndWaitResult(st) : Any ftfuture() mergeSubtasks } @@ -1504,31 +1504,3 @@ self: ParIterableLike[T, Repr, Sequential] => }) } - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/src/library/scala/util/parsing/combinator/lexical/StdLexical.scala b/src/library/scala/util/parsing/combinator/lexical/StdLexical.scala index 5d7386b5c1..3d04a3a00c 100644 --- a/src/library/scala/util/parsing/combinator/lexical/StdLexical.scala +++ b/src/library/scala/util/parsing/combinator/lexical/StdLexical.scala @@ -50,7 +50,7 @@ class StdLexical extends Lexical with StdTokens { def identChar = letter | elem('_') // see `whitespace in `Scanners` - def whitespace: Parser[Any] = rep( + def whitespace: Parser[Any] = rep[Any]( whitespaceChar | '/' ~ '*' ~ comment | '/' ~ '/' ~ rep( chrExcept(EofCh, '\n') ) diff --git a/src/scalap/scala/tools/scalap/scalax/rules/Rule.scala b/src/scalap/scala/tools/scalap/scalax/rules/Rule.scala index 1500b81050..489a05ecd0 100644 --- a/src/scalap/scala/tools/scalap/scalax/rules/Rule.scala +++ b/src/scalap/scala/tools/scalap/scalax/rules/Rule.scala @@ -50,7 +50,7 @@ trait Rule[-In, +Out, +A, +X] extends (In => Result[Out, A, X]) { lazy val choices = Rule.this :: other :: Nil } - def orError[In2 <: In] = this orElse(error[In2]) + def orError[In2 <: In] = this orElse error[Any] def |[In2 <: In, Out2 >: Out, A2 >: A, X2 >: X](other : => Rule[In2, Out2, A2, X2]) = orElse(other) diff --git a/src/scalap/scala/tools/scalap/scalax/rules/scalasig/ScalaSig.scala b/src/scalap/scala/tools/scalap/scalax/rules/scalasig/ScalaSig.scala index e88efa1bfd..7d06a7169b 100644 --- a/src/scalap/scala/tools/scalap/scalax/rules/scalasig/ScalaSig.scala +++ b/src/scalap/scala/tools/scalap/scalax/rules/scalasig/ScalaSig.scala @@ -167,7 +167,7 @@ object ScalaSigEntryParsers extends RulesWithState with MemoisableRules { val symbolInfo = nameRef ~ symbolRef ~ nat ~ (symbolRef?) ~ ref ~ get ^~~~~~^ SymbolInfo - def symHeader(key: Int) = (key -~ none | (key + 64) -~ nat) + def symHeader(key: Int): EntryParser[Any] = (key -~ none | (key + 64) -~ nat) def symbolEntry(key : Int) = symHeader(key) -~ symbolInfo @@ -263,7 +263,7 @@ object ScalaSigEntryParsers extends RulesWithState with MemoisableRules { 47 -~ typeLevel ~ typeIndex ^~^ DeBruijnIndexType, 48 -~ typeRef ~ (symbolRef*) ^~^ ExistentialType) as "type" - lazy val literal = oneOf( + lazy val literal: EntryParser[Any] = oneOf( 24 -^ (()), 25 -~ longValue ^^ (_ != 0L), 26 -~ longValue ^^ (_.toByte), diff --git a/src/swing/scala/swing/ComboBox.scala b/src/swing/scala/swing/ComboBox.scala index c7a457d082..67e39cfe3b 100644 --- a/src/swing/scala/swing/ComboBox.scala +++ b/src/swing/scala/swing/ComboBox.scala @@ -182,7 +182,7 @@ class ComboBox[A](items: Seq[A]) extends Component with Publisher { * of the component to its own defaults _after_ the renderer has been * configured. That's Swing's principle of most suprise. */ - def renderer: ListView.Renderer[A] = ListView.Renderer.wrap(peer.getRenderer) + def renderer: ListView.Renderer[A] = ListView.Renderer.wrap[A](peer.getRenderer) def renderer_=(r: ListView.Renderer[A]) { peer.setRenderer(r.peer) } /* XXX: currently not safe to expose: diff --git a/src/swing/scala/swing/ListView.scala b/src/swing/scala/swing/ListView.scala index 282d24696e..22850bac42 100644 --- a/src/swing/scala/swing/ListView.scala +++ b/src/swing/scala/swing/ListView.scala @@ -216,7 +216,7 @@ class ListView[A] extends Component { def adjusting = peer.getSelectionModel.getValueIsAdjusting } - def renderer: ListView.Renderer[A] = ListView.Renderer.wrap(peer.getCellRenderer) + def renderer: ListView.Renderer[A] = ListView.Renderer.wrap[A](peer.getCellRenderer) def renderer_=(r: ListView.Renderer[A]) { peer.setCellRenderer(r.peer) } def fixedCellWidth = peer.getFixedCellWidth diff --git a/test/files/neg/warn-inferred-any.check b/test/files/neg/warn-inferred-any.check new file mode 100644 index 0000000000..8c18616b6f --- /dev/null +++ b/test/files/neg/warn-inferred-any.check @@ -0,0 +1,10 @@ +warn-inferred-any.scala:8: error: a type was inferred to be `Any`; this may indicate a programming error. + { List(1, 2, 3) contains "a" } // only this warns + ^ +warn-inferred-any.scala:16: error: a type was inferred to be `AnyVal`; this may indicate a programming error. + { 1l to 5l contains 5 } + ^ +warn-inferred-any.scala:17: error: a type was inferred to be `AnyVal`; this may indicate a programming error. + { 1l to 5l contains 5d } + ^ +three errors found diff --git a/test/files/neg/warn-inferred-any.flags b/test/files/neg/warn-inferred-any.flags new file mode 100644 index 0000000000..a3127d392a --- /dev/null +++ b/test/files/neg/warn-inferred-any.flags @@ -0,0 +1 @@ +-Xfatal-warnings -Ywarn-infer-any diff --git a/test/files/neg/warn-inferred-any.scala b/test/files/neg/warn-inferred-any.scala new file mode 100644 index 0000000000..b853e6e5a8 --- /dev/null +++ b/test/files/neg/warn-inferred-any.scala @@ -0,0 +1,19 @@ +trait Foo[-A <: AnyRef, +B <: AnyRef] { + def run[U](x: A)(action: B => U): Boolean = ??? + + { run(_: A)(_: B => String) } +} + +trait Xs[+A] { + { List(1, 2, 3) contains "a" } // only this warns + { List(1, 2, 3) contains 1 } + { identity(List(1, 2, 3) contains 1) } + { List("a") foreach println } +} + +trait Ys[+A] { + { 1 to 5 contains 5l } + { 1l to 5l contains 5 } + { 1l to 5l contains 5d } + { 1l to 5l contains 5l } +} -- cgit v1.2.3 From 0b7aaa5251622b5e1192ef7da27823f150cb1918 Mon Sep 17 00:00:00 2001 From: Julien Richard-Foy Date: Fri, 10 Aug 2012 00:34:39 +0200 Subject: Fix raw string interpolator: string parts which were after the first argument were still escaped --- src/library/scala/StringContext.scala | 2 +- test/files/run/rawstrings.check | 2 +- test/files/run/rawstrings.scala | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) (limited to 'src') diff --git a/src/library/scala/StringContext.scala b/src/library/scala/StringContext.scala index 723d95a499..855f63907e 100644 --- a/src/library/scala/StringContext.scala +++ b/src/library/scala/StringContext.scala @@ -63,7 +63,7 @@ case class StringContext(parts: String*) { val bldr = new java.lang.StringBuilder(process(pi.next())) while (ai.hasNext) { bldr append ai.next - bldr append treatEscapes(pi.next()) + bldr append process(pi.next()) } bldr.toString } diff --git a/test/files/run/rawstrings.check b/test/files/run/rawstrings.check index 36e63594df..2b6c40725a 100644 --- a/test/files/run/rawstrings.check +++ b/test/files/run/rawstrings.check @@ -1 +1 @@ -[\n\t'"$] +[\n\t'"$\n] diff --git a/test/files/run/rawstrings.scala b/test/files/run/rawstrings.scala index 9df64f6625..b4d6e0c40a 100644 --- a/test/files/run/rawstrings.scala +++ b/test/files/run/rawstrings.scala @@ -1,3 +1,3 @@ object Test extends App { - println(raw"[\n\t'${'"'}$$]") + println(raw"[\n\t'${'"'}$$\n]") } -- cgit v1.2.3 From fbbbb2294680c0f57506f885971b148cae53c92d Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Fri, 10 Aug 2012 07:29:31 -0700 Subject: Made -Xfatal-warnings less immediately fatal. Instead of changing warnings to errors mid-stream, at the end of a run I check for condition "no errors, some warnings, and fatal warnings" and then generate an error at that point. This is necessary to test for some warnings which come from later stages. --- src/compiler/scala/tools/nsc/Global.scala | 7 +- .../tools/nsc/reporters/AbstractReporter.scala | 6 +- test/files/neg/abstract-inaccessible.check | 10 +- test/files/neg/ambiguous-float-dots.check | 23 ++- test/files/neg/array-not-seq.check | 12 +- test/files/neg/catch-all.check | 10 +- test/files/neg/check-dead.check | 12 +- test/files/neg/checksensible.check | 202 +++++++++++---------- .../neg/classmanifests_new_deprecations.check | 124 ++++++------- test/files/neg/exhausting.check | 16 +- test/files/neg/macro-deprecate-idents.check | 106 +++++------ test/files/neg/main1.check | 14 +- test/files/neg/migration28.check | 4 +- test/files/neg/names-defaults-neg-warn.check | 8 +- test/files/neg/nullary-override.check | 4 +- test/files/neg/overloaded-implicit.check | 8 +- test/files/neg/package-ob-case.check | 4 +- test/files/neg/patmatexhaust.check | 24 +-- test/files/neg/permanent-blindness.check | 10 +- test/files/neg/sealed-java-enums.check | 4 +- test/files/neg/stmt-expr-discard.check | 8 +- test/files/neg/switch.check | 8 +- test/files/neg/t2442.check | 8 +- test/files/neg/t2796.check | 4 +- test/files/neg/t3098.check | 4 +- test/files/neg/t3234.check | 6 +- test/files/neg/t3234.flags | 2 +- test/files/neg/t3683a.check | 4 +- test/files/neg/t4302.check | 4 +- test/files/neg/t4440.check | 12 +- test/files/neg/t4691_exhaust_extractor.check | 10 +- test/files/neg/t4749.check | 16 +- test/files/neg/t4762.check | 8 +- test/files/neg/t4851.check | 18 +- test/files/neg/t5426.check | 12 +- test/files/neg/t5663-badwarneq.check | 18 +- test/files/neg/t5830.check | 8 +- test/files/neg/t6011.check | 10 +- test/files/neg/t6048.check | 10 +- test/files/neg/unchecked-suppress.check | 10 +- test/files/neg/unchecked.check | 16 +- test/files/neg/unchecked2.check | 16 +- test/files/neg/unit-returns-value.check | 8 +- test/files/neg/virtpatmat_reach_null.check | 4 +- .../neg/virtpatmat_reach_sealed_unsealed.check | 12 +- test/files/neg/virtpatmat_unreach_select.check | 4 +- test/files/neg/warn-inferred-any.check | 10 +- 47 files changed, 477 insertions(+), 381 deletions(-) (limited to 'src') diff --git a/src/compiler/scala/tools/nsc/Global.scala b/src/compiler/scala/tools/nsc/Global.scala index 056ae2f809..3f4c51748c 100644 --- a/src/compiler/scala/tools/nsc/Global.scala +++ b/src/compiler/scala/tools/nsc/Global.scala @@ -222,9 +222,7 @@ class Global(var currentSettings: Settings, var reporter: Reporter) def error(msg: String) = globalError(msg) def globalError(msg: String) = reporter.error(NoPosition, msg) def inform(msg: String) = reporter.echo(msg) - def warning(msg: String) = - if (settings.fatalWarnings.value) globalError(msg) - else reporter.warning(NoPosition, msg) + def warning(msg: String) = reporter.warning(NoPosition, msg) // Getting in front of Predef's asserts to supplement with more info. // This has the happy side effect of masking the one argument forms @@ -1481,6 +1479,9 @@ class Global(var currentSettings: Settings, var reporter: Reporter) } def reportCompileErrors() { + if (!reporter.hasErrors && reporter.hasWarnings && settings.fatalWarnings.value) + globalError("No warnings can be incurred under -Xfatal-warnings.") + if (reporter.hasErrors) { for ((sym, file) <- symSource.iterator) { sym.reset(new loaders.SourcefileLoader(file)) diff --git a/src/compiler/scala/tools/nsc/reporters/AbstractReporter.scala b/src/compiler/scala/tools/nsc/reporters/AbstractReporter.scala index 491718bc0d..fdf82ece71 100644 --- a/src/compiler/scala/tools/nsc/reporters/AbstractReporter.scala +++ b/src/compiler/scala/tools/nsc/reporters/AbstractReporter.scala @@ -29,11 +29,7 @@ abstract class AbstractReporter extends Reporter { private def noWarnings = settings.nowarnings.value private def isPromptSet = settings.prompt.value - protected def info0(pos: Position, msg: String, _severity: Severity, force: Boolean) { - val severity = - if (settings.fatalWarnings.value && _severity == WARNING) ERROR - else _severity - + protected def info0(pos: Position, msg: String, severity: Severity, force: Boolean) { if (severity == INFO) { if (isVerbose || force) { severity.count += 1 diff --git a/test/files/neg/abstract-inaccessible.check b/test/files/neg/abstract-inaccessible.check index 42b98ac026..d56f5691be 100644 --- a/test/files/neg/abstract-inaccessible.check +++ b/test/files/neg/abstract-inaccessible.check @@ -1,13 +1,15 @@ -abstract-inaccessible.scala:5: error: method implementMe in trait YourTrait references private[foo] trait Bippy. +abstract-inaccessible.scala:5: warning: method implementMe in trait YourTrait references private[foo] trait Bippy. Classes which cannot access Bippy may be unable to provide a concrete implementation of implementMe. def implementMe(f: Int => (String, Bippy)): Unit ^ -abstract-inaccessible.scala:6: error: method overrideMe in trait YourTrait references private[foo] trait Bippy. +abstract-inaccessible.scala:6: warning: method overrideMe in trait YourTrait references private[foo] trait Bippy. Classes which cannot access Bippy may be unable to override overrideMe. def overrideMe[T <: Bippy](x: T): T = x ^ -abstract-inaccessible.scala:7: error: method overrideMeAlso in trait YourTrait references private[foo] trait Bippy. +abstract-inaccessible.scala:7: warning: method overrideMeAlso in trait YourTrait references private[foo] trait Bippy. Classes which cannot access Bippy may be unable to override overrideMeAlso. def overrideMeAlso(x: Map[Int, Set[Bippy]]) = 5 ^ -three errors found +error: No warnings can be incurred under -Xfatal-warnings. +three warnings found +one error found diff --git a/test/files/neg/ambiguous-float-dots.check b/test/files/neg/ambiguous-float-dots.check index 6c21056d7a..cdd2d6fa2a 100644 --- a/test/files/neg/ambiguous-float-dots.check +++ b/test/files/neg/ambiguous-float-dots.check @@ -1,16 +1,27 @@ -ambiguous-float-dots.scala:2: error: This lexical syntax is deprecated. From scala 2.11, a dot will only be considered part of a number if it is immediately followed by a digit. +ambiguous-float-dots.scala:2: warning: This lexical syntax is deprecated. From scala 2.11, a dot will only be considered part of a number if it is immediately followed by a digit. val x0 = 5. ^ -ambiguous-float-dots.scala:6: error: This lexical syntax is deprecated. From scala 2.11, a dot will only be considered part of a number if it is immediately followed by a digit. +ambiguous-float-dots.scala:6: warning: This lexical syntax is deprecated. From scala 2.11, a dot will only be considered part of a number if it is immediately followed by a digit. val x1 = 5.f ^ -ambiguous-float-dots.scala:7: error: Treating numbers with a leading zero as octal is deprecated. +ambiguous-float-dots.scala:7: warning: Treating numbers with a leading zero as octal is deprecated. val y0 = 055 ^ -ambiguous-float-dots.scala:11: error: This lexical syntax is deprecated. From scala 2.11, a dot will only be considered part of a number if it is immediately followed by a digit. +ambiguous-float-dots.scala:11: warning: This lexical syntax is deprecated. From scala 2.11, a dot will only be considered part of a number if it is immediately followed by a digit. 1.+(2) ^ -ambiguous-float-dots.scala:12: error: This lexical syntax is deprecated. From scala 2.11, a dot will only be considered part of a number if it is immediately followed by a digit. +ambiguous-float-dots.scala:12: warning: This lexical syntax is deprecated. From scala 2.11, a dot will only be considered part of a number if it is immediately followed by a digit. 1. + 2 ^ -5 errors found +ambiguous-float-dots.scala:11: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses + 1.+(2) + ^ +ambiguous-float-dots.scala:12: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses + 1. + 2 + ^ +ambiguous-float-dots.scala:13: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses + 1 + 2 + ^ +error: No warnings can be incurred under -Xfatal-warnings. +8 warnings found +one error found diff --git a/test/files/neg/array-not-seq.check b/test/files/neg/array-not-seq.check index a3a639e772..6cfaa06efb 100644 --- a/test/files/neg/array-not-seq.check +++ b/test/files/neg/array-not-seq.check @@ -1,13 +1,15 @@ -array-not-seq.scala:2: error: An Array will no longer match as Seq[_]. +array-not-seq.scala:2: warning: An Array will no longer match as Seq[_]. def f1(x: Any) = x.isInstanceOf[Seq[_]] ^ -array-not-seq.scala:4: error: An Array will no longer match as Seq[_]. +array-not-seq.scala:4: warning: An Array will no longer match as Seq[_]. case _: Seq[_] => true ^ -array-not-seq.scala:16: error: An Array will no longer match as Seq[_]. +array-not-seq.scala:16: warning: An Array will no longer match as Seq[_]. case (Some(_: Seq[_]), Nil, _) => 1 ^ -array-not-seq.scala:17: error: An Array will no longer match as Seq[_]. +array-not-seq.scala:17: warning: An Array will no longer match as Seq[_]. case (None, List(_: List[_], _), _) => 2 ^ -four errors found +error: No warnings can be incurred under -Xfatal-warnings. +four warnings found +one error found diff --git a/test/files/neg/catch-all.check b/test/files/neg/catch-all.check index a9cd0ba927..d59e826f03 100644 --- a/test/files/neg/catch-all.check +++ b/test/files/neg/catch-all.check @@ -1,10 +1,12 @@ -catch-all.scala:2: error: This catches all Throwables. If this is really intended, use `case _: Throwable` to clear this warning. +catch-all.scala:2: warning: This catches all Throwables. If this is really intended, use `case _: Throwable` to clear this warning. try { "warn" } catch { case _ => } ^ -catch-all.scala:4: error: This catches all Throwables. If this is really intended, use `case x: Throwable` to clear this warning. +catch-all.scala:4: warning: This catches all Throwables. If this is really intended, use `case x: Throwable` to clear this warning. try { "warn" } catch { case x => } ^ -catch-all.scala:6: error: This catches all Throwables. If this is really intended, use `case x: Throwable` to clear this warning. +catch-all.scala:6: warning: This catches all Throwables. If this is really intended, use `case x: Throwable` to clear this warning. try { "warn" } catch { case _: RuntimeException => ; case x => } ^ -three errors found +error: No warnings can be incurred under -Xfatal-warnings. +three warnings found +one error found diff --git a/test/files/neg/check-dead.check b/test/files/neg/check-dead.check index 29601c1d4a..2150a942bf 100644 --- a/test/files/neg/check-dead.check +++ b/test/files/neg/check-dead.check @@ -1,13 +1,15 @@ -check-dead.scala:7: error: dead code following this construct +check-dead.scala:7: warning: dead code following this construct def z1 = y1(throw new Exception) // should warn ^ -check-dead.scala:10: error: dead code following this construct +check-dead.scala:10: warning: dead code following this construct def z2 = y2(throw new Exception) // should warn ^ -check-dead.scala:29: error: dead code following this construct +check-dead.scala:29: warning: dead code following this construct throw new Exception // should warn ^ -check-dead.scala:33: error: dead code following this construct +check-dead.scala:33: warning: dead code following this construct throw new Exception // should warn ^ -four errors found +error: No warnings can be incurred under -Xfatal-warnings. +four warnings found +one error found diff --git a/test/files/neg/checksensible.check b/test/files/neg/checksensible.check index 23af94180a..e5f1a38d96 100644 --- a/test/files/neg/checksensible.check +++ b/test/files/neg/checksensible.check @@ -1,100 +1,102 @@ -checksensible.scala:13: error: comparing a fresh object using `eq' will always yield false - (new AnyRef) eq (new AnyRef) - ^ -checksensible.scala:14: error: comparing a fresh object using `ne' will always yield true - (new AnyRef) ne (new AnyRef) - ^ -checksensible.scala:15: error: comparing a fresh object using `eq' will always yield false - Shmoopie eq (new AnyRef) - ^ -checksensible.scala:16: error: comparing a fresh object using `eq' will always yield false - (Shmoopie: AnyRef) eq (new AnyRef) - ^ -checksensible.scala:17: error: comparing a fresh object using `eq' will always yield false - (new AnyRef) eq Shmoopie - ^ -checksensible.scala:18: error: comparing a fresh object using `eq' will always yield false - (new AnyRef) eq null - ^ -checksensible.scala:19: error: comparing a fresh object using `eq' will always yield false - null eq new AnyRef - ^ -checksensible.scala:26: error: comparing values of types Unit and Int using `==' will always yield false - (c = 1) == 0 - ^ -checksensible.scala:27: error: comparing values of types Int and Unit using `==' will always yield false - 0 == (c = 1) - ^ -checksensible.scala:29: error: comparing values of types Int and String using `==' will always yield false - 1 == "abc" - ^ -checksensible.scala:33: error: comparing values of types Some[Int] and Int using `==' will always yield false - Some(1) == 1 // as above - ^ -checksensible.scala:38: error: comparing a fresh object using `==' will always yield false - new AnyRef == 1 - ^ -checksensible.scala:41: error: comparing values of types Int and Boolean using `==' will always yield false - 1 == (new java.lang.Boolean(true)) - ^ -checksensible.scala:43: error: comparing values of types Int and Boolean using `!=' will always yield true - 1 != true - ^ -checksensible.scala:44: error: comparing values of types Unit and Boolean using `==' will always yield false - () == true - ^ -checksensible.scala:45: error: comparing values of types Unit and Unit using `==' will always yield true - () == () - ^ -checksensible.scala:46: error: comparing values of types Unit and Unit using `==' will always yield true - () == println - ^ -checksensible.scala:47: error: comparing values of types Unit and scala.runtime.BoxedUnit using `==' will always yield true - () == scala.runtime.BoxedUnit.UNIT // these should warn for always being true/false - ^ -checksensible.scala:48: error: comparing values of types scala.runtime.BoxedUnit and Unit using `!=' will always yield false - scala.runtime.BoxedUnit.UNIT != () - ^ -checksensible.scala:51: error: comparing values of types Int and Unit using `!=' will always yield true - (1 != println) - ^ -checksensible.scala:52: error: comparing values of types Int and Symbol using `!=' will always yield true - (1 != 'sym) - ^ -checksensible.scala:58: error: comparing a fresh object using `==' will always yield false - ((x: Int) => x + 1) == null - ^ -checksensible.scala:59: error: comparing a fresh object using `==' will always yield false - Bep == ((_: Int) + 1) - ^ -checksensible.scala:61: error: comparing a fresh object using `==' will always yield false - new Object == new Object - ^ -checksensible.scala:62: error: comparing a fresh object using `==' will always yield false - new Object == "abc" - ^ -checksensible.scala:63: error: comparing a fresh object using `!=' will always yield true - new Exception() != new Exception() - ^ -checksensible.scala:66: error: comparing values of types Int and Null using `==' will always yield false - if (foo.length == null) "plante" else "plante pas" - ^ -checksensible.scala:71: error: comparing values of types Bip and Bop using `==' will always yield false - (x1 == x2) - ^ -checksensible.scala:81: error: comparing values of types EqEqRefTest.this.C3 and EqEqRefTest.this.Z1 using `==' will always yield false - c3 == z1 - ^ -checksensible.scala:82: error: comparing values of types EqEqRefTest.this.Z1 and EqEqRefTest.this.C3 using `==' will always yield false - z1 == c3 - ^ -checksensible.scala:83: error: comparing values of types EqEqRefTest.this.Z1 and EqEqRefTest.this.C3 using `!=' will always yield true - z1 != c3 - ^ -checksensible.scala:84: error: comparing values of types EqEqRefTest.this.C3 and String using `!=' will always yield true - c3 != "abc" - ^ -checksensible.scala:95: error: comparing values of types Unit and Int using `!=' will always yield true - while ((c = in.read) != -1) - ^ -33 errors found +checksensible.scala:13: warning: comparing a fresh object using `eq' will always yield false + (new AnyRef) eq (new AnyRef) + ^ +checksensible.scala:14: warning: comparing a fresh object using `ne' will always yield true + (new AnyRef) ne (new AnyRef) + ^ +checksensible.scala:15: warning: comparing a fresh object using `eq' will always yield false + Shmoopie eq (new AnyRef) + ^ +checksensible.scala:16: warning: comparing a fresh object using `eq' will always yield false + (Shmoopie: AnyRef) eq (new AnyRef) + ^ +checksensible.scala:17: warning: comparing a fresh object using `eq' will always yield false + (new AnyRef) eq Shmoopie + ^ +checksensible.scala:18: warning: comparing a fresh object using `eq' will always yield false + (new AnyRef) eq null + ^ +checksensible.scala:19: warning: comparing a fresh object using `eq' will always yield false + null eq new AnyRef + ^ +checksensible.scala:26: warning: comparing values of types Unit and Int using `==' will always yield false + (c = 1) == 0 + ^ +checksensible.scala:27: warning: comparing values of types Int and Unit using `==' will always yield false + 0 == (c = 1) + ^ +checksensible.scala:29: warning: comparing values of types Int and String using `==' will always yield false + 1 == "abc" + ^ +checksensible.scala:33: warning: comparing values of types Some[Int] and Int using `==' will always yield false + Some(1) == 1 // as above + ^ +checksensible.scala:38: warning: comparing a fresh object using `==' will always yield false + new AnyRef == 1 + ^ +checksensible.scala:41: warning: comparing values of types Int and Boolean using `==' will always yield false + 1 == (new java.lang.Boolean(true)) + ^ +checksensible.scala:43: warning: comparing values of types Int and Boolean using `!=' will always yield true + 1 != true + ^ +checksensible.scala:44: warning: comparing values of types Unit and Boolean using `==' will always yield false + () == true + ^ +checksensible.scala:45: warning: comparing values of types Unit and Unit using `==' will always yield true + () == () + ^ +checksensible.scala:46: warning: comparing values of types Unit and Unit using `==' will always yield true + () == println + ^ +checksensible.scala:47: warning: comparing values of types Unit and scala.runtime.BoxedUnit using `==' will always yield true + () == scala.runtime.BoxedUnit.UNIT // these should warn for always being true/false + ^ +checksensible.scala:48: warning: comparing values of types scala.runtime.BoxedUnit and Unit using `!=' will always yield false + scala.runtime.BoxedUnit.UNIT != () + ^ +checksensible.scala:51: warning: comparing values of types Int and Unit using `!=' will always yield true + (1 != println) + ^ +checksensible.scala:52: warning: comparing values of types Int and Symbol using `!=' will always yield true + (1 != 'sym) + ^ +checksensible.scala:58: warning: comparing a fresh object using `==' will always yield false + ((x: Int) => x + 1) == null + ^ +checksensible.scala:59: warning: comparing a fresh object using `==' will always yield false + Bep == ((_: Int) + 1) + ^ +checksensible.scala:61: warning: comparing a fresh object using `==' will always yield false + new Object == new Object + ^ +checksensible.scala:62: warning: comparing a fresh object using `==' will always yield false + new Object == "abc" + ^ +checksensible.scala:63: warning: comparing a fresh object using `!=' will always yield true + new Exception() != new Exception() + ^ +checksensible.scala:66: warning: comparing values of types Int and Null using `==' will always yield false + if (foo.length == null) "plante" else "plante pas" + ^ +checksensible.scala:71: warning: comparing values of types Bip and Bop using `==' will always yield false + (x1 == x2) + ^ +checksensible.scala:81: warning: comparing values of types EqEqRefTest.this.C3 and EqEqRefTest.this.Z1 using `==' will always yield false + c3 == z1 + ^ +checksensible.scala:82: warning: comparing values of types EqEqRefTest.this.Z1 and EqEqRefTest.this.C3 using `==' will always yield false + z1 == c3 + ^ +checksensible.scala:83: warning: comparing values of types EqEqRefTest.this.Z1 and EqEqRefTest.this.C3 using `!=' will always yield true + z1 != c3 + ^ +checksensible.scala:84: warning: comparing values of types EqEqRefTest.this.C3 and String using `!=' will always yield true + c3 != "abc" + ^ +checksensible.scala:95: warning: comparing values of types Unit and Int using `!=' will always yield true + while ((c = in.read) != -1) + ^ +error: No warnings can be incurred under -Xfatal-warnings. +33 warnings found +one error found diff --git a/test/files/neg/classmanifests_new_deprecations.check b/test/files/neg/classmanifests_new_deprecations.check index 841e893249..2301947b04 100644 --- a/test/files/neg/classmanifests_new_deprecations.check +++ b/test/files/neg/classmanifests_new_deprecations.check @@ -1,61 +1,63 @@ -classmanifests_new_deprecations.scala:2: error: type ClassManifest in object Predef is deprecated: Use scala.reflect.ClassTag instead - def cm1[T: ClassManifest] = ??? - ^ -classmanifests_new_deprecations.scala:3: error: type ClassManifest in object Predef is deprecated: Use scala.reflect.ClassTag instead - def cm2[T](implicit evidence$1: ClassManifest[T]) = ??? - ^ -classmanifests_new_deprecations.scala:4: error: type ClassManifest in object Predef is deprecated: Use scala.reflect.ClassTag instead - val cm3: ClassManifest[Int] = null - ^ -classmanifests_new_deprecations.scala:4: error: type ClassManifest in object Predef is deprecated: Use scala.reflect.ClassTag instead - val cm3: ClassManifest[Int] = null - ^ -classmanifests_new_deprecations.scala:6: error: type ClassManifest in package reflect is deprecated: Use scala.reflect.ClassTag instead - def rcm1[T: scala.reflect.ClassManifest] = ??? - ^ -classmanifests_new_deprecations.scala:7: error: type ClassManifest in package reflect is deprecated: Use scala.reflect.ClassTag instead - def rcm2[T](implicit evidence$1: scala.reflect.ClassManifest[T]) = ??? - ^ -classmanifests_new_deprecations.scala:8: error: type ClassManifest in package reflect is deprecated: Use scala.reflect.ClassTag instead - val rcm3: scala.reflect.ClassManifest[Int] = null - ^ -classmanifests_new_deprecations.scala:8: error: type ClassManifest in package reflect is deprecated: Use scala.reflect.ClassTag instead - val rcm3: scala.reflect.ClassManifest[Int] = null - ^ -classmanifests_new_deprecations.scala:10: error: type ClassManifest in object Predef is deprecated: Use scala.reflect.ClassTag instead - type CM[T] = ClassManifest[T] - ^ -classmanifests_new_deprecations.scala:15: error: type ClassManifest in package reflect is deprecated: Use scala.reflect.ClassTag instead - type RCM[T] = scala.reflect.ClassManifest[T] - ^ -classmanifests_new_deprecations.scala:20: error: type Manifest in object Predef is deprecated: Use scala.reflect.ClassTag (to capture erasures) or scala.reflect.runtime.universe.TypeTag (to capture types) or both instead - def m1[T: Manifest] = ??? - ^ -classmanifests_new_deprecations.scala:21: error: type Manifest in object Predef is deprecated: Use scala.reflect.ClassTag (to capture erasures) or scala.reflect.runtime.universe.TypeTag (to capture types) or both instead - def m2[T](implicit evidence$1: Manifest[T]) = ??? - ^ -classmanifests_new_deprecations.scala:22: error: type Manifest in object Predef is deprecated: Use scala.reflect.ClassTag (to capture erasures) or scala.reflect.runtime.universe.TypeTag (to capture types) or both instead - val m3: Manifest[Int] = null - ^ -classmanifests_new_deprecations.scala:22: error: type Manifest in object Predef is deprecated: Use scala.reflect.ClassTag (to capture erasures) or scala.reflect.runtime.universe.TypeTag (to capture types) or both instead - val m3: Manifest[Int] = null - ^ -classmanifests_new_deprecations.scala:24: error: trait Manifest in package reflect is deprecated: Use scala.reflect.ClassTag (to capture erasures) or scala.reflect.runtime.universe.TypeTag (to capture types) or both instead - def rm1[T: scala.reflect.Manifest] = ??? - ^ -classmanifests_new_deprecations.scala:25: error: trait Manifest in package reflect is deprecated: Use scala.reflect.ClassTag (to capture erasures) or scala.reflect.runtime.universe.TypeTag (to capture types) or both instead - def rm2[T](implicit evidence$1: scala.reflect.Manifest[T]) = ??? - ^ -classmanifests_new_deprecations.scala:26: error: trait Manifest in package reflect is deprecated: Use scala.reflect.ClassTag (to capture erasures) or scala.reflect.runtime.universe.TypeTag (to capture types) or both instead - val rm3: scala.reflect.Manifest[Int] = null - ^ -classmanifests_new_deprecations.scala:26: error: trait Manifest in package reflect is deprecated: Use scala.reflect.ClassTag (to capture erasures) or scala.reflect.runtime.universe.TypeTag (to capture types) or both instead - val rm3: scala.reflect.Manifest[Int] = null - ^ -classmanifests_new_deprecations.scala:28: error: type Manifest in object Predef is deprecated: Use scala.reflect.ClassTag (to capture erasures) or scala.reflect.runtime.universe.TypeTag (to capture types) or both instead - type M[T] = Manifest[T] - ^ -classmanifests_new_deprecations.scala:33: error: trait Manifest in package reflect is deprecated: Use scala.reflect.ClassTag (to capture erasures) or scala.reflect.runtime.universe.TypeTag (to capture types) or both instead - type RM[T] = scala.reflect.Manifest[T] - ^ -20 errors found +classmanifests_new_deprecations.scala:2: warning: type ClassManifest in object Predef is deprecated: Use scala.reflect.ClassTag instead + def cm1[T: ClassManifest] = ??? + ^ +classmanifests_new_deprecations.scala:3: warning: type ClassManifest in object Predef is deprecated: Use scala.reflect.ClassTag instead + def cm2[T](implicit evidence$1: ClassManifest[T]) = ??? + ^ +classmanifests_new_deprecations.scala:4: warning: type ClassManifest in object Predef is deprecated: Use scala.reflect.ClassTag instead + val cm3: ClassManifest[Int] = null + ^ +classmanifests_new_deprecations.scala:4: warning: type ClassManifest in object Predef is deprecated: Use scala.reflect.ClassTag instead + val cm3: ClassManifest[Int] = null + ^ +classmanifests_new_deprecations.scala:6: warning: type ClassManifest in package reflect is deprecated: Use scala.reflect.ClassTag instead + def rcm1[T: scala.reflect.ClassManifest] = ??? + ^ +classmanifests_new_deprecations.scala:7: warning: type ClassManifest in package reflect is deprecated: Use scala.reflect.ClassTag instead + def rcm2[T](implicit evidence$1: scala.reflect.ClassManifest[T]) = ??? + ^ +classmanifests_new_deprecations.scala:8: warning: type ClassManifest in package reflect is deprecated: Use scala.reflect.ClassTag instead + val rcm3: scala.reflect.ClassManifest[Int] = null + ^ +classmanifests_new_deprecations.scala:8: warning: type ClassManifest in package reflect is deprecated: Use scala.reflect.ClassTag instead + val rcm3: scala.reflect.ClassManifest[Int] = null + ^ +classmanifests_new_deprecations.scala:10: warning: type ClassManifest in object Predef is deprecated: Use scala.reflect.ClassTag instead + type CM[T] = ClassManifest[T] + ^ +classmanifests_new_deprecations.scala:15: warning: type ClassManifest in package reflect is deprecated: Use scala.reflect.ClassTag instead + type RCM[T] = scala.reflect.ClassManifest[T] + ^ +classmanifests_new_deprecations.scala:20: warning: type Manifest in object Predef is deprecated: Use scala.reflect.ClassTag (to capture erasures) or scala.reflect.runtime.universe.TypeTag (to capture types) or both instead + def m1[T: Manifest] = ??? + ^ +classmanifests_new_deprecations.scala:21: warning: type Manifest in object Predef is deprecated: Use scala.reflect.ClassTag (to capture erasures) or scala.reflect.runtime.universe.TypeTag (to capture types) or both instead + def m2[T](implicit evidence$1: Manifest[T]) = ??? + ^ +classmanifests_new_deprecations.scala:22: warning: type Manifest in object Predef is deprecated: Use scala.reflect.ClassTag (to capture erasures) or scala.reflect.runtime.universe.TypeTag (to capture types) or both instead + val m3: Manifest[Int] = null + ^ +classmanifests_new_deprecations.scala:22: warning: type Manifest in object Predef is deprecated: Use scala.reflect.ClassTag (to capture erasures) or scala.reflect.runtime.universe.TypeTag (to capture types) or both instead + val m3: Manifest[Int] = null + ^ +classmanifests_new_deprecations.scala:24: warning: trait Manifest in package reflect is deprecated: Use scala.reflect.ClassTag (to capture erasures) or scala.reflect.runtime.universe.TypeTag (to capture types) or both instead + def rm1[T: scala.reflect.Manifest] = ??? + ^ +classmanifests_new_deprecations.scala:25: warning: trait Manifest in package reflect is deprecated: Use scala.reflect.ClassTag (to capture erasures) or scala.reflect.runtime.universe.TypeTag (to capture types) or both instead + def rm2[T](implicit evidence$1: scala.reflect.Manifest[T]) = ??? + ^ +classmanifests_new_deprecations.scala:26: warning: trait Manifest in package reflect is deprecated: Use scala.reflect.ClassTag (to capture erasures) or scala.reflect.runtime.universe.TypeTag (to capture types) or both instead + val rm3: scala.reflect.Manifest[Int] = null + ^ +classmanifests_new_deprecations.scala:26: warning: trait Manifest in package reflect is deprecated: Use scala.reflect.ClassTag (to capture erasures) or scala.reflect.runtime.universe.TypeTag (to capture types) or both instead + val rm3: scala.reflect.Manifest[Int] = null + ^ +classmanifests_new_deprecations.scala:28: warning: type Manifest in object Predef is deprecated: Use scala.reflect.ClassTag (to capture erasures) or scala.reflect.runtime.universe.TypeTag (to capture types) or both instead + type M[T] = Manifest[T] + ^ +classmanifests_new_deprecations.scala:33: warning: trait Manifest in package reflect is deprecated: Use scala.reflect.ClassTag (to capture erasures) or scala.reflect.runtime.universe.TypeTag (to capture types) or both instead + type RM[T] = scala.reflect.Manifest[T] + ^ +error: No warnings can be incurred under -Xfatal-warnings. +20 warnings found +one error found diff --git a/test/files/neg/exhausting.check b/test/files/neg/exhausting.check index 0f0d13cb33..c573eb3e15 100644 --- a/test/files/neg/exhausting.check +++ b/test/files/neg/exhausting.check @@ -1,25 +1,27 @@ -exhausting.scala:21: error: match may not be exhaustive. +exhausting.scala:21: warning: match may not be exhaustive. It would fail on the following input: List(_, _, _) def fail1[T](xs: List[T]) = xs match { ^ -exhausting.scala:27: error: match may not be exhaustive. +exhausting.scala:27: warning: match may not be exhaustive. It would fail on the following input: Nil def fail2[T](xs: List[T]) = xs match { ^ -exhausting.scala:32: error: match may not be exhaustive. +exhausting.scala:32: warning: match may not be exhaustive. It would fail on the following input: List((x: Int forSome x not in (1, 2))) def fail3a(xs: List[Int]) = xs match { ^ -exhausting.scala:39: error: match may not be exhaustive. +exhausting.scala:39: warning: match may not be exhaustive. It would fail on the following input: Bar3 def fail3[T](x: Foo[T]) = x match { ^ -exhausting.scala:47: error: match may not be exhaustive. +exhausting.scala:47: warning: match may not be exhaustive. It would fail on the following inputs: (Bar1, Bar2), (Bar1, Bar3), (Bar2, Bar1), (Bar2, Bar2) def fail4[T <: AnyRef](xx: (Foo[T], Foo[T])) = xx match { ^ -exhausting.scala:56: error: match may not be exhaustive. +exhausting.scala:56: warning: match may not be exhaustive. It would fail on the following inputs: (Bar1, Bar2), (Bar1, Bar3), (Bar2, Bar1), (Bar2, Bar2) def fail5[T](xx: (Foo[T], Foo[T])) = xx match { ^ -6 errors found +error: No warnings can be incurred under -Xfatal-warnings. +6 warnings found +one error found diff --git a/test/files/neg/macro-deprecate-idents.check b/test/files/neg/macro-deprecate-idents.check index f8a7e519df..c653eabaef 100644 --- a/test/files/neg/macro-deprecate-idents.check +++ b/test/files/neg/macro-deprecate-idents.check @@ -1,52 +1,54 @@ -macro-deprecate-idents.scala:2: error: macro is now a reserved word; usage as an identifier is deprecated - val macro = ??? - ^ -macro-deprecate-idents.scala:6: error: macro is now a reserved word; usage as an identifier is deprecated - var macro = ??? - ^ -macro-deprecate-idents.scala:10: error: macro is now a reserved word; usage as an identifier is deprecated - type macro = Int - ^ -macro-deprecate-idents.scala:14: error: macro is now a reserved word; usage as an identifier is deprecated - class macro - ^ -macro-deprecate-idents.scala:18: error: macro is now a reserved word; usage as an identifier is deprecated - class macro - ^ -macro-deprecate-idents.scala:22: error: macro is now a reserved word; usage as an identifier is deprecated - object macro - ^ -macro-deprecate-idents.scala:26: error: macro is now a reserved word; usage as an identifier is deprecated - object macro - ^ -macro-deprecate-idents.scala:30: error: macro is now a reserved word; usage as an identifier is deprecated - trait macro - ^ -macro-deprecate-idents.scala:34: error: macro is now a reserved word; usage as an identifier is deprecated - trait macro - ^ -macro-deprecate-idents.scala:37: error: macro is now a reserved word; usage as an identifier is deprecated -package macro { - ^ -macro-deprecate-idents.scala:38: error: macro is now a reserved word; usage as an identifier is deprecated - package macro.bar { - ^ -macro-deprecate-idents.scala:43: error: macro is now a reserved word; usage as an identifier is deprecated - package macro.foo { - ^ -macro-deprecate-idents.scala:48: error: macro is now a reserved word; usage as an identifier is deprecated - val Some(macro) = Some(42) - ^ -macro-deprecate-idents.scala:49: error: macro is now a reserved word; usage as an identifier is deprecated - macro match { - ^ -macro-deprecate-idents.scala:50: error: macro is now a reserved word; usage as an identifier is deprecated - case macro => println(macro) - ^ -macro-deprecate-idents.scala:50: error: macro is now a reserved word; usage as an identifier is deprecated - case macro => println(macro) - ^ -macro-deprecate-idents.scala:55: error: macro is now a reserved word; usage as an identifier is deprecated - def macro = 2 - ^ -17 errors found +macro-deprecate-idents.scala:2: warning: macro is now a reserved word; usage as an identifier is deprecated + val macro = ??? + ^ +macro-deprecate-idents.scala:6: warning: macro is now a reserved word; usage as an identifier is deprecated + var macro = ??? + ^ +macro-deprecate-idents.scala:10: warning: macro is now a reserved word; usage as an identifier is deprecated + type macro = Int + ^ +macro-deprecate-idents.scala:14: warning: macro is now a reserved word; usage as an identifier is deprecated + class macro + ^ +macro-deprecate-idents.scala:18: warning: macro is now a reserved word; usage as an identifier is deprecated + class macro + ^ +macro-deprecate-idents.scala:22: warning: macro is now a reserved word; usage as an identifier is deprecated + object macro + ^ +macro-deprecate-idents.scala:26: warning: macro is now a reserved word; usage as an identifier is deprecated + object macro + ^ +macro-deprecate-idents.scala:30: warning: macro is now a reserved word; usage as an identifier is deprecated + trait macro + ^ +macro-deprecate-idents.scala:34: warning: macro is now a reserved word; usage as an identifier is deprecated + trait macro + ^ +macro-deprecate-idents.scala:37: warning: macro is now a reserved word; usage as an identifier is deprecated +package macro { + ^ +macro-deprecate-idents.scala:38: warning: macro is now a reserved word; usage as an identifier is deprecated + package macro.bar { + ^ +macro-deprecate-idents.scala:43: warning: macro is now a reserved word; usage as an identifier is deprecated + package macro.foo { + ^ +macro-deprecate-idents.scala:48: warning: macro is now a reserved word; usage as an identifier is deprecated + val Some(macro) = Some(42) + ^ +macro-deprecate-idents.scala:49: warning: macro is now a reserved word; usage as an identifier is deprecated + macro match { + ^ +macro-deprecate-idents.scala:50: warning: macro is now a reserved word; usage as an identifier is deprecated + case macro => println(macro) + ^ +macro-deprecate-idents.scala:50: warning: macro is now a reserved word; usage as an identifier is deprecated + case macro => println(macro) + ^ +macro-deprecate-idents.scala:55: warning: macro is now a reserved word; usage as an identifier is deprecated + def macro = 2 + ^ +error: No warnings can be incurred under -Xfatal-warnings. +17 warnings found +one error found diff --git a/test/files/neg/main1.check b/test/files/neg/main1.check index 1a7a13e1e9..b745105818 100644 --- a/test/files/neg/main1.check +++ b/test/files/neg/main1.check @@ -1,26 +1,28 @@ -main1.scala:3: error: Foo has a main method with parameter type Array[String], but foo1.Foo will not be a runnable program. +main1.scala:3: warning: Foo has a main method with parameter type Array[String], but foo1.Foo will not be a runnable program. Reason: companion is a trait, which means no static forwarder can be generated. object Foo { // companion is trait ^ -main1.scala:10: error: Foo has a main method with parameter type Array[String], but foo2.Foo will not be a runnable program. +main1.scala:10: warning: Foo has a main method with parameter type Array[String], but foo2.Foo will not be a runnable program. Reason: companion contains its own main method, which means no static forwarder can be generated. object Foo { // companion has its own main ^ -main1.scala:22: error: Foo has a main method with parameter type Array[String], but foo3.Foo will not be a runnable program. +main1.scala:22: warning: Foo has a main method with parameter type Array[String], but foo3.Foo will not be a runnable program. Reason: companion contains its own main method (implementation restriction: no main is allowed, regardless of signature), which means no static forwarder can be generated. object Foo { // Companion contains main, but not an interfering main. ^ -main1.scala:31: error: Foo has a main method with parameter type Array[String], but foo4.Foo will not be a runnable program. +main1.scala:31: warning: Foo has a main method with parameter type Array[String], but foo4.Foo will not be a runnable program. Reason: companion contains its own main method, which means no static forwarder can be generated. object Foo extends Foo { // Inherits main from the class ^ -main1.scala:39: error: Foo has a main method with parameter type Array[String], but foo5.Foo will not be a runnable program. +main1.scala:39: warning: Foo has a main method with parameter type Array[String], but foo5.Foo will not be a runnable program. Reason: companion contains its own main method, which means no static forwarder can be generated. object Foo extends Foo { // Overrides main from the class ^ -5 errors found +error: No warnings can be incurred under -Xfatal-warnings. +5 warnings found +one error found diff --git a/test/files/neg/migration28.check b/test/files/neg/migration28.check index d7dfacf3db..afb4db62e2 100644 --- a/test/files/neg/migration28.check +++ b/test/files/neg/migration28.check @@ -1,5 +1,7 @@ -migration28.scala:4: error: method scanRight in trait TraversableLike has changed semantics in version 2.9.0: +migration28.scala:4: warning: method scanRight in trait TraversableLike has changed semantics in version 2.9.0: The behavior of `scanRight` has changed. The previous behavior can be reproduced with scanRight.reverse. List(1,2,3,4,5).scanRight(0)(_+_) ^ +error: No warnings can be incurred under -Xfatal-warnings. +one warning found one error found diff --git a/test/files/neg/names-defaults-neg-warn.check b/test/files/neg/names-defaults-neg-warn.check index e1085acf76..0f4edef84e 100644 --- a/test/files/neg/names-defaults-neg-warn.check +++ b/test/files/neg/names-defaults-neg-warn.check @@ -1,7 +1,9 @@ -names-defaults-neg-warn.scala:11: error: the parameter name s has been deprecated. Use x instead. +names-defaults-neg-warn.scala:11: warning: the parameter name s has been deprecated. Use x instead. deprNam2.f(s = "dlfkj") ^ -names-defaults-neg-warn.scala:12: error: the parameter name x has been deprecated. Use s instead. +names-defaults-neg-warn.scala:12: warning: the parameter name x has been deprecated. Use s instead. deprNam2.g(x = "dlkjf") ^ -two errors found +error: No warnings can be incurred under -Xfatal-warnings. +two warnings found +one error found diff --git a/test/files/neg/nullary-override.check b/test/files/neg/nullary-override.check index 6b2ded2d4a..f032f4a6c2 100644 --- a/test/files/neg/nullary-override.check +++ b/test/files/neg/nullary-override.check @@ -1,4 +1,6 @@ -nullary-override.scala:2: error: non-nullary method overrides nullary method +nullary-override.scala:2: warning: non-nullary method overrides nullary method class B extends A { override def x(): Int = 4 } ^ +error: No warnings can be incurred under -Xfatal-warnings. +one warning found one error found diff --git a/test/files/neg/overloaded-implicit.check b/test/files/neg/overloaded-implicit.check index bdbe6a89d5..ca0870705d 100644 --- a/test/files/neg/overloaded-implicit.check +++ b/test/files/neg/overloaded-implicit.check @@ -1,7 +1,9 @@ -overloaded-implicit.scala:2: error: parameterized overloaded implicit methods are not visible as view bounds +overloaded-implicit.scala:2: warning: parameterized overloaded implicit methods are not visible as view bounds implicit def imp1[T](x: List[T]): Map[T, T] = Map() ^ -overloaded-implicit.scala:3: error: parameterized overloaded implicit methods are not visible as view bounds +overloaded-implicit.scala:3: warning: parameterized overloaded implicit methods are not visible as view bounds implicit def imp1[T](x: Set[T]): Map[T, T] = Map() ^ -two errors found +error: No warnings can be incurred under -Xfatal-warnings. +two warnings found +one error found diff --git a/test/files/neg/package-ob-case.check b/test/files/neg/package-ob-case.check index e6b2f858ef..063a120db1 100644 --- a/test/files/neg/package-ob-case.check +++ b/test/files/neg/package-ob-case.check @@ -1,5 +1,7 @@ -package-ob-case.scala:3: error: it is not recommended to define classes/objects inside of package objects. +package-ob-case.scala:3: warning: it is not recommended to define classes/objects inside of package objects. If possible, define class X in package foo instead. case class X(z: Int) { } ^ +error: No warnings can be incurred under -Xfatal-warnings. +one warning found one error found diff --git a/test/files/neg/patmatexhaust.check b/test/files/neg/patmatexhaust.check index 4556e6622f..6069dfdaab 100644 --- a/test/files/neg/patmatexhaust.check +++ b/test/files/neg/patmatexhaust.check @@ -1,40 +1,42 @@ -patmatexhaust.scala:7: error: match may not be exhaustive. +patmatexhaust.scala:7: warning: match may not be exhaustive. It would fail on the following input: Baz def ma1(x:Foo) = x match { ^ -patmatexhaust.scala:11: error: match may not be exhaustive. +patmatexhaust.scala:11: warning: match may not be exhaustive. It would fail on the following input: Bar(_) def ma2(x:Foo) = x match { ^ -patmatexhaust.scala:23: error: match may not be exhaustive. +patmatexhaust.scala:23: warning: match may not be exhaustive. It would fail on the following inputs: (Kult(_), Kult(_)), (Qult(), Qult()) def ma3(x:Mult) = (x,x) match { // not exhaustive ^ -patmatexhaust.scala:49: error: match may not be exhaustive. +patmatexhaust.scala:49: warning: match may not be exhaustive. It would fail on the following inputs: Gp(), Gu def ma4(x:Deep) = x match { // missing cases: Gu, Gp ^ -patmatexhaust.scala:55: error: unreachable code +patmatexhaust.scala:55: warning: unreachable code case _ if 1 == 0 => ^ -patmatexhaust.scala:53: error: match may not be exhaustive. +patmatexhaust.scala:53: warning: match may not be exhaustive. It would fail on the following input: Gp() def ma5(x:Deep) = x match { ^ -patmatexhaust.scala:75: error: match may not be exhaustive. +patmatexhaust.scala:75: warning: match may not be exhaustive. It would fail on the following input: B() def ma9(x: B) = x match { ^ -patmatexhaust.scala:100: error: match may not be exhaustive. +patmatexhaust.scala:100: warning: match may not be exhaustive. It would fail on the following input: C1() def ma10(x: C) = x match { // not exhaustive: C1 is not sealed. ^ -patmatexhaust.scala:114: error: match may not be exhaustive. +patmatexhaust.scala:114: warning: match may not be exhaustive. It would fail on the following inputs: D1, D2() def ma10(x: C) = x match { // not exhaustive: C1 has subclasses. ^ -patmatexhaust.scala:126: error: match may not be exhaustive. +patmatexhaust.scala:126: warning: match may not be exhaustive. It would fail on the following input: C1() def ma10(x: C) = x match { // not exhaustive: C1 is not abstract. ^ -10 errors found +error: No warnings can be incurred under -Xfatal-warnings. +10 warnings found +one error found diff --git a/test/files/neg/permanent-blindness.check b/test/files/neg/permanent-blindness.check index 18b4543707..cdde201ef6 100644 --- a/test/files/neg/permanent-blindness.check +++ b/test/files/neg/permanent-blindness.check @@ -1,10 +1,12 @@ -permanent-blindness.scala:10: error: imported `Bippy' is permanently hidden by definition of class Bippy in package bar +permanent-blindness.scala:10: warning: imported `Bippy' is permanently hidden by definition of class Bippy in package bar import foo.{ Bippy, Bop, Dingus } ^ -permanent-blindness.scala:10: error: imported `Bop' is permanently hidden by definition of object Bop in package bar +permanent-blindness.scala:10: warning: imported `Bop' is permanently hidden by definition of object Bop in package bar import foo.{ Bippy, Bop, Dingus } ^ -permanent-blindness.scala:10: error: imported `Dingus' is permanently hidden by definition of object Dingus in package bar +permanent-blindness.scala:10: warning: imported `Dingus' is permanently hidden by definition of object Dingus in package bar import foo.{ Bippy, Bop, Dingus } ^ -three errors found +error: No warnings can be incurred under -Xfatal-warnings. +three warnings found +one error found diff --git a/test/files/neg/sealed-java-enums.check b/test/files/neg/sealed-java-enums.check index 20d00c8e91..a3c39ec5cd 100644 --- a/test/files/neg/sealed-java-enums.check +++ b/test/files/neg/sealed-java-enums.check @@ -1,5 +1,7 @@ -sealed-java-enums.scala:5: error: match may not be exhaustive. +sealed-java-enums.scala:5: warning: match may not be exhaustive. It would fail on the following inputs: BLOCKED, TERMINATED, TIMED_WAITING def f(state: State) = state match { ^ +error: No warnings can be incurred under -Xfatal-warnings. +one warning found one error found diff --git a/test/files/neg/stmt-expr-discard.check b/test/files/neg/stmt-expr-discard.check index 2d6420a61d..1207e6da50 100644 --- a/test/files/neg/stmt-expr-discard.check +++ b/test/files/neg/stmt-expr-discard.check @@ -1,7 +1,9 @@ -stmt-expr-discard.scala:3: error: a pure expression does nothing in statement position; you may be omitting necessary parentheses +stmt-expr-discard.scala:3: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses + 2 ^ -stmt-expr-discard.scala:4: error: a pure expression does nothing in statement position; you may be omitting necessary parentheses +stmt-expr-discard.scala:4: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses - 4 ^ -two errors found +error: No warnings can be incurred under -Xfatal-warnings. +two warnings found +one error found diff --git a/test/files/neg/switch.check b/test/files/neg/switch.check index e4730b6459..f968d3a448 100644 --- a/test/files/neg/switch.check +++ b/test/files/neg/switch.check @@ -1,7 +1,9 @@ -switch.scala:38: error: could not emit switch for @switch annotated match +switch.scala:38: warning: could not emit switch for @switch annotated match def fail2(c: Char) = (c: @switch @unchecked) match { ^ -switch.scala:45: error: could not emit switch for @switch annotated match +switch.scala:45: warning: could not emit switch for @switch annotated match def fail3(c: Char) = (c: @unchecked @switch) match { ^ -two errors found +error: No warnings can be incurred under -Xfatal-warnings. +two warnings found +one error found diff --git a/test/files/neg/t2442.check b/test/files/neg/t2442.check index 714816fd62..9ff0b44661 100644 --- a/test/files/neg/t2442.check +++ b/test/files/neg/t2442.check @@ -1,9 +1,11 @@ -t2442.scala:4: error: match may not be exhaustive. +t2442.scala:4: warning: match may not be exhaustive. It would fail on the following input: THREE def f(e: MyEnum) = e match { ^ -t2442.scala:11: error: match may not be exhaustive. +t2442.scala:11: warning: match may not be exhaustive. It would fail on the following input: BLUE def g(e: MySecondEnum) = e match { ^ -two errors found +error: No warnings can be incurred under -Xfatal-warnings. +two warnings found +one error found diff --git a/test/files/neg/t2796.check b/test/files/neg/t2796.check index aeb18497ed..4456a7fc19 100644 --- a/test/files/neg/t2796.check +++ b/test/files/neg/t2796.check @@ -1,4 +1,6 @@ -t2796.scala:7: error: Implementation restriction: early definitions in traits are not initialized before the super class is initialized. +t2796.scala:7: warning: Implementation restriction: early definitions in traits are not initialized before the super class is initialized. val abstractVal = "T1.abstractVal" // warn ^ +error: No warnings can be incurred under -Xfatal-warnings. +one warning found one error found diff --git a/test/files/neg/t3098.check b/test/files/neg/t3098.check index 85829747b9..5343b128f0 100644 --- a/test/files/neg/t3098.check +++ b/test/files/neg/t3098.check @@ -1,5 +1,7 @@ -b.scala:3: error: match may not be exhaustive. +b.scala:3: warning: match may not be exhaustive. It would fail on the following input: (_ : C) def f = (null: T) match { ^ +error: No warnings can be incurred under -Xfatal-warnings. +one warning found one error found diff --git a/test/files/neg/t3234.check b/test/files/neg/t3234.check index 477b021e5e..8f0d624ed9 100644 --- a/test/files/neg/t3234.check +++ b/test/files/neg/t3234.check @@ -1,2 +1,6 @@ -error: there were 1 inliner warnings; re-run with -Yinline-warnings for details +t3234.scala:17: warning: At the end of the day, could not inline @inline-marked method foo3 + println(foo(42) + foo2(11) + foo3(2)) + ^ +error: No warnings can be incurred under -Xfatal-warnings. +one warning found one error found diff --git a/test/files/neg/t3234.flags b/test/files/neg/t3234.flags index c9cefdc4b9..cc3d9fb6f0 100644 --- a/test/files/neg/t3234.flags +++ b/test/files/neg/t3234.flags @@ -1 +1 @@ --Yinline -Xfatal-warnings \ No newline at end of file +-Yinline -Yinline-warnings -Xfatal-warnings diff --git a/test/files/neg/t3683a.check b/test/files/neg/t3683a.check index 3de3ad784e..6386265ebc 100644 --- a/test/files/neg/t3683a.check +++ b/test/files/neg/t3683a.check @@ -1,5 +1,7 @@ -t3683a.scala:14: error: match may not be exhaustive. +t3683a.scala:14: warning: match may not be exhaustive. It would fail on the following input: XX() w match { ^ +error: No warnings can be incurred under -Xfatal-warnings. +one warning found one error found diff --git a/test/files/neg/t4302.check b/test/files/neg/t4302.check index 327425acb0..1a59b79a3b 100644 --- a/test/files/neg/t4302.check +++ b/test/files/neg/t4302.check @@ -1,4 +1,6 @@ -t4302.scala:2: error: abstract type T in type T is unchecked since it is eliminated by erasure +t4302.scala:2: warning: abstract type T in type T is unchecked since it is eliminated by erasure def hasMatch[T](x: AnyRef) = x.isInstanceOf[T] ^ +error: No warnings can be incurred under -Xfatal-warnings. +one warning found one error found diff --git a/test/files/neg/t4440.check b/test/files/neg/t4440.check index 2861dc3040..10e7188e32 100644 --- a/test/files/neg/t4440.check +++ b/test/files/neg/t4440.check @@ -1,13 +1,15 @@ -t4440.scala:12: error: The outer reference in this type test cannot be checked at run time. +t4440.scala:12: warning: The outer reference in this type test cannot be checked at run time. case _: b.Inner => println("b") ^ -t4440.scala:13: error: The outer reference in this type test cannot be checked at run time. +t4440.scala:13: warning: The outer reference in this type test cannot be checked at run time. case _: a.Inner => println("a") // this is the case we want ^ -t4440.scala:16: error: The outer reference in this type test cannot be checked at run time. +t4440.scala:16: warning: The outer reference in this type test cannot be checked at run time. case _: a.Inner => println("a") ^ -t4440.scala:17: error: The outer reference in this type test cannot be checked at run time. +t4440.scala:17: warning: The outer reference in this type test cannot be checked at run time. case _: b.Inner => println("b") // this is the case we want ^ -four errors found +error: No warnings can be incurred under -Xfatal-warnings. +four warnings found +one error found diff --git a/test/files/neg/t4691_exhaust_extractor.check b/test/files/neg/t4691_exhaust_extractor.check index cd12e56f86..6396944145 100644 --- a/test/files/neg/t4691_exhaust_extractor.check +++ b/test/files/neg/t4691_exhaust_extractor.check @@ -1,13 +1,15 @@ -t4691_exhaust_extractor.scala:17: error: match may not be exhaustive. +t4691_exhaust_extractor.scala:17: warning: match may not be exhaustive. It would fail on the following input: Bar3() def f1(x: Foo) = x match { ^ -t4691_exhaust_extractor.scala:23: error: match may not be exhaustive. +t4691_exhaust_extractor.scala:23: warning: match may not be exhaustive. It would fail on the following input: Bar3() def f2(x: Foo) = x match { ^ -t4691_exhaust_extractor.scala:29: error: match may not be exhaustive. +t4691_exhaust_extractor.scala:29: warning: match may not be exhaustive. It would fail on the following input: Bar3() def f3(x: Foo) = x match { ^ -three errors found +error: No warnings can be incurred under -Xfatal-warnings. +three warnings found +one error found diff --git a/test/files/neg/t4749.check b/test/files/neg/t4749.check index 93ad3935fa..34eed6e433 100644 --- a/test/files/neg/t4749.check +++ b/test/files/neg/t4749.check @@ -1,28 +1,30 @@ -t4749.scala:2: error: Fail1 has a main method with parameter type Array[String], but bippy.Fail1 will not be a runnable program. +t4749.scala:2: warning: Fail1 has a main method with parameter type Array[String], but bippy.Fail1 will not be a runnable program. Reason: main method must have exact signature (Array[String])Unit object Fail1 { ^ -t4749.scala:6: error: Fail2 has a main method with parameter type Array[String], but bippy.Fail2 will not be a runnable program. +t4749.scala:6: warning: Fail2 has a main method with parameter type Array[String], but bippy.Fail2 will not be a runnable program. Reason: main methods cannot be generic. object Fail2 { ^ -t4749.scala:13: error: Fail3 has a main method with parameter type Array[String], but bippy.Fail3 will not be a runnable program. +t4749.scala:13: warning: Fail3 has a main method with parameter type Array[String], but bippy.Fail3 will not be a runnable program. Reason: main methods cannot refer to type parameters or abstract types. object Fail3 extends Bippy[Unit] { } ^ -t4749.scala:16: error: Fail4 has a main method with parameter type Array[String], but bippy.Fail4 will not be a runnable program. +t4749.scala:16: warning: Fail4 has a main method with parameter type Array[String], but bippy.Fail4 will not be a runnable program. Reason: companion is a trait, which means no static forwarder can be generated. object Fail4 { ^ -t4749.scala:21: error: Fail5 has a main method with parameter type Array[String], but bippy.Fail5 will not be a runnable program. +t4749.scala:21: warning: Fail5 has a main method with parameter type Array[String], but bippy.Fail5 will not be a runnable program. Reason: companion contains its own main method, which means no static forwarder can be generated. object Fail5 extends Fail5 { } ^ -t4749.scala:26: error: Fail6 has a main method with parameter type Array[String], but bippy.Fail6 will not be a runnable program. +t4749.scala:26: warning: Fail6 has a main method with parameter type Array[String], but bippy.Fail6 will not be a runnable program. Reason: companion contains its own main method (implementation restriction: no main is allowed, regardless of signature), which means no static forwarder can be generated. object Fail6 { ^ -6 errors found +error: No warnings can be incurred under -Xfatal-warnings. +6 warnings found +one error found diff --git a/test/files/neg/t4762.check b/test/files/neg/t4762.check index 5e67f2022a..a0525f6226 100644 --- a/test/files/neg/t4762.check +++ b/test/files/neg/t4762.check @@ -1,7 +1,9 @@ -t4762.scala:15: error: private[this] value x in class B shadows mutable x inherited from class A. Changes to x will not be visible within class B - you may want to give them distinct names. +t4762.scala:15: warning: private[this] value x in class B shadows mutable x inherited from class A. Changes to x will not be visible within class B - you may want to give them distinct names. /* (99,99) */ (this.x, this.y), ^ -t4762.scala:48: error: private[this] value x in class Derived shadows mutable x inherited from class Base. Changes to x will not be visible within class Derived - you may want to give them distinct names. +t4762.scala:48: warning: private[this] value x in class Derived shadows mutable x inherited from class Base. Changes to x will not be visible within class Derived - you may want to give them distinct names. class Derived( x : Int ) extends Base( x ) { override def toString = x.toString } ^ -two errors found +error: No warnings can be incurred under -Xfatal-warnings. +two warnings found +one error found diff --git a/test/files/neg/t4851.check b/test/files/neg/t4851.check index 8011350f23..0fd66b9efe 100644 --- a/test/files/neg/t4851.check +++ b/test/files/neg/t4851.check @@ -1,43 +1,45 @@ -S.scala:2: error: Adapting argument list by inserting (): leaky (Object-receiving) target makes this especially dangerous. +S.scala:2: warning: Adapting argument list by inserting (): leaky (Object-receiving) target makes this especially dangerous. signature: J(x: Any): J given arguments: after adaptation: new J((): Unit) val x1 = new J ^ -S.scala:3: error: Adapting argument list by inserting (): leaky (Object-receiving) target makes this especially dangerous. +S.scala:3: warning: Adapting argument list by inserting (): leaky (Object-receiving) target makes this especially dangerous. signature: J(x: Any): J given arguments: after adaptation: new J((): Unit) val x2 = new J() ^ -S.scala:4: error: Adapting argument list by creating a 5-tuple: this may not be what you want. +S.scala:4: warning: Adapting argument list by creating a 5-tuple: this may not be what you want. signature: J(x: Any): J given arguments: 1, 2, 3, 4, 5 after adaptation: new J((1, 2, 3, 4, 5): (Int, Int, Int, Int, Int)) val x3 = new J(1, 2, 3, 4, 5) ^ -S.scala:6: error: Adapting argument list by creating a 3-tuple: this may not be what you want. +S.scala:6: warning: Adapting argument list by creating a 3-tuple: this may not be what you want. signature: Some.apply[A](x: A): Some[A] given arguments: 1, 2, 3 after adaptation: Some((1, 2, 3): (Int, Int, Int)) val y1 = Some(1, 2, 3) ^ -S.scala:7: error: Adapting argument list by creating a 3-tuple: this may not be what you want. +S.scala:7: warning: Adapting argument list by creating a 3-tuple: this may not be what you want. signature: Some(x: A): Some[A] given arguments: 1, 2, 3 after adaptation: new Some((1, 2, 3): (Int, Int, Int)) val y2 = new Some(1, 2, 3) ^ -S.scala:9: error: Adapting argument list by inserting (): this is unlikely to be what you want. +S.scala:9: warning: Adapting argument list by inserting (): this is unlikely to be what you want. signature: J2[T](x: T): J2[T] given arguments: after adaptation: new J2((): Unit) val z1 = new J2 ^ -S.scala:10: error: Adapting argument list by inserting (): this is unlikely to be what you want. +S.scala:10: warning: Adapting argument list by inserting (): this is unlikely to be what you want. signature: J2[T](x: T): J2[T] given arguments: after adaptation: new J2((): Unit) val z2 = new J2() ^ -7 errors found +error: No warnings can be incurred under -Xfatal-warnings. +7 warnings found +one error found diff --git a/test/files/neg/t5426.check b/test/files/neg/t5426.check index d9e192d3f0..98f3ddaaae 100644 --- a/test/files/neg/t5426.check +++ b/test/files/neg/t5426.check @@ -1,13 +1,15 @@ -t5426.scala:2: error: comparing values of types Some[Int] and Int using `==' will always yield false +t5426.scala:2: warning: comparing values of types Some[Int] and Int using `==' will always yield false def f1 = Some(5) == 5 ^ -t5426.scala:3: error: comparing values of types Int and Some[Int] using `==' will always yield false +t5426.scala:3: warning: comparing values of types Int and Some[Int] using `==' will always yield false def f2 = 5 == Some(5) ^ -t5426.scala:8: error: comparing values of types Int and Some[Int] using `==' will always yield false +t5426.scala:8: warning: comparing values of types Int and Some[Int] using `==' will always yield false (x1 == x2) ^ -t5426.scala:9: error: comparing values of types Some[Int] and Int using `==' will always yield false +t5426.scala:9: warning: comparing values of types Some[Int] and Int using `==' will always yield false (x2 == x1) ^ -four errors found +error: No warnings can be incurred under -Xfatal-warnings. +four warnings found +one error found diff --git a/test/files/neg/t5663-badwarneq.check b/test/files/neg/t5663-badwarneq.check index 00c2234e9d..12e93ff373 100644 --- a/test/files/neg/t5663-badwarneq.check +++ b/test/files/neg/t5663-badwarneq.check @@ -1,22 +1,24 @@ -t5663-badwarneq.scala:42: error: comparing case class values of types Some[Int] and None.type using `==' will always yield false +t5663-badwarneq.scala:42: warning: comparing case class values of types Some[Int] and None.type using `==' will always yield false println(new Some(1) == None) // Should complain on type, was: spuriously complains on fresh object ^ -t5663-badwarneq.scala:43: error: comparing case class values of types Some[Int] and Thing using `==' will always yield false +t5663-badwarneq.scala:43: warning: comparing case class values of types Some[Int] and Thing using `==' will always yield false println(Some(1) == new Thing(1)) // Should complain on type, was: spuriously complains on fresh object ^ -t5663-badwarneq.scala:51: error: ThingOne and Thingy are unrelated: they will most likely never compare equal +t5663-badwarneq.scala:51: warning: ThingOne and Thingy are unrelated: they will most likely never compare equal println(t1 == t2) // true, but apparently unrelated, a compromise warning ^ -t5663-badwarneq.scala:52: error: ThingThree and Thingy are unrelated: they will most likely never compare equal +t5663-badwarneq.scala:52: warning: ThingThree and Thingy are unrelated: they will most likely never compare equal println(t4 == t2) // true, complains because ThingThree is final and Thingy not a subclass, stronger claim than unrelated ^ -t5663-badwarneq.scala:55: error: comparing case class values of types ThingTwo and Some[Int] using `==' will always yield false +t5663-badwarneq.scala:55: warning: comparing case class values of types ThingTwo and Some[Int] using `==' will always yield false println(t3 == Some(1)) // false, warn on different cases ^ -t5663-badwarneq.scala:56: error: comparing values of types ThingOne and Cousin using `==' will always yield false +t5663-badwarneq.scala:56: warning: comparing values of types ThingOne and Cousin using `==' will always yield false println(t1 == c) // should warn ^ -t5663-badwarneq.scala:64: error: comparing case class values of types Simple and SimpleSibling.type using `==' will always yield false +t5663-badwarneq.scala:64: warning: comparing case class values of types Simple and SimpleSibling.type using `==' will always yield false println(new Simple() == SimpleSibling) // like Some(1) == None, but needn't be final case ^ -7 errors found +error: No warnings can be incurred under -Xfatal-warnings. +7 warnings found +one error found diff --git a/test/files/neg/t5830.check b/test/files/neg/t5830.check index 726fac2a1e..58c3a1be38 100644 --- a/test/files/neg/t5830.check +++ b/test/files/neg/t5830.check @@ -1,7 +1,9 @@ -t5830.scala:6: error: unreachable code +t5830.scala:6: warning: unreachable code case 'a' => println("b") // unreachable ^ -t5830.scala:4: error: could not emit switch for @switch annotated match +t5830.scala:4: warning: could not emit switch for @switch annotated match def unreachable(ch: Char) = (ch: @switch) match { ^ -two errors found +error: No warnings can be incurred under -Xfatal-warnings. +two warnings found +one error found diff --git a/test/files/neg/t6011.check b/test/files/neg/t6011.check index 5b5a861e5b..cb7f189031 100644 --- a/test/files/neg/t6011.check +++ b/test/files/neg/t6011.check @@ -1,10 +1,12 @@ -t6011.scala:4: error: unreachable code +t6011.scala:4: warning: unreachable code case 'a' | 'c' => 1 // unreachable ^ -t6011.scala:10: error: unreachable code +t6011.scala:10: warning: unreachable code case 'b' | 'a' => 1 // unreachable ^ -t6011.scala:8: error: could not emit switch for @switch annotated match +t6011.scala:8: warning: could not emit switch for @switch annotated match def f2(ch: Char): Any = (ch: @annotation.switch) match { ^ -three errors found +error: No warnings can be incurred under -Xfatal-warnings. +three warnings found +one error found diff --git a/test/files/neg/t6048.check b/test/files/neg/t6048.check index 051f41877e..319e3fa620 100644 --- a/test/files/neg/t6048.check +++ b/test/files/neg/t6048.check @@ -1,10 +1,12 @@ -t6048.scala:3: error: unreachable code +t6048.scala:3: warning: unreachable code case _ if false => x // unreachable ^ -t6048.scala:8: error: unreachable code +t6048.scala:8: warning: unreachable code case _ if false => x // unreachable ^ -t6048.scala:14: error: unreachable code +t6048.scala:14: warning: unreachable code case 5 if true => x // unreachable ^ -three errors found +error: No warnings can be incurred under -Xfatal-warnings. +three warnings found +one error found diff --git a/test/files/neg/unchecked-suppress.check b/test/files/neg/unchecked-suppress.check index 2e23d21386..038105918e 100644 --- a/test/files/neg/unchecked-suppress.check +++ b/test/files/neg/unchecked-suppress.check @@ -1,10 +1,12 @@ -unchecked-suppress.scala:4: error: non-variable type argument Int in type pattern Set[Int] is unchecked since it is eliminated by erasure +unchecked-suppress.scala:4: warning: non-variable type argument Int in type pattern Set[Int] is unchecked since it is eliminated by erasure case xs: Set[Int] => xs.head // unchecked ^ -unchecked-suppress.scala:5: error: non-variable type argument String in type pattern Map[String @unchecked,String] is unchecked since it is eliminated by erasure +unchecked-suppress.scala:5: warning: non-variable type argument String in type pattern Map[String @unchecked,String] is unchecked since it is eliminated by erasure case xs: Map[String @unchecked, String] => xs.head // one unchecked, one okay ^ -unchecked-suppress.scala:7: error: non-variable type argument Int in type pattern (Int, Int) => Int is unchecked since it is eliminated by erasure +unchecked-suppress.scala:7: warning: non-variable type argument Int in type pattern (Int, Int) => Int is unchecked since it is eliminated by erasure case f: ((Int, Int) => Int) => // unchecked ^ -three errors found +error: No warnings can be incurred under -Xfatal-warnings. +three warnings found +one error found diff --git a/test/files/neg/unchecked.check b/test/files/neg/unchecked.check index 34a11db1a0..19b8c908da 100644 --- a/test/files/neg/unchecked.check +++ b/test/files/neg/unchecked.check @@ -1,19 +1,21 @@ -unchecked.scala:18: error: non-variable type argument String in type pattern Iterable[String] is unchecked since it is eliminated by erasure +unchecked.scala:18: warning: non-variable type argument String in type pattern Iterable[String] is unchecked since it is eliminated by erasure case xs: Iterable[String] => xs.head // unchecked ^ -unchecked.scala:22: error: non-variable type argument Any in type pattern Set[Any] is unchecked since it is eliminated by erasure +unchecked.scala:22: warning: non-variable type argument Any in type pattern Set[Any] is unchecked since it is eliminated by erasure case xs: Set[Any] => xs.head // unchecked ^ -unchecked.scala:26: error: non-variable type argument Any in type pattern Map[Any,Any] is unchecked since it is eliminated by erasure +unchecked.scala:26: warning: non-variable type argument Any in type pattern Map[Any,Any] is unchecked since it is eliminated by erasure case xs: Map[Any, Any] => xs.head // unchecked ^ -unchecked.scala:35: error: non-variable type argument List[Nothing] in type pattern Test.Contra[List[Nothing]] is unchecked since it is eliminated by erasure +unchecked.scala:35: warning: non-variable type argument List[Nothing] in type pattern Test.Contra[List[Nothing]] is unchecked since it is eliminated by erasure case xs: Contra[List[Nothing]] => xs.head // unchecked ^ -unchecked.scala:50: error: non-variable type argument String in type pattern Test.Exp[String] is unchecked since it is eliminated by erasure +unchecked.scala:50: warning: non-variable type argument String in type pattern Test.Exp[String] is unchecked since it is eliminated by erasure case ArrayApply(x: Exp[Array[T]], _, j: Exp[String]) => x // unchecked ^ -unchecked.scala:55: error: non-variable type argument Array[T] in type pattern Test.Exp[Array[T]] is unchecked since it is eliminated by erasure +unchecked.scala:55: warning: non-variable type argument Array[T] in type pattern Test.Exp[Array[T]] is unchecked since it is eliminated by erasure case ArrayApply(x: Exp[Array[T]], _, _) => x // unchecked ^ -6 errors found +error: No warnings can be incurred under -Xfatal-warnings. +6 warnings found +one error found diff --git a/test/files/neg/unchecked2.check b/test/files/neg/unchecked2.check index e37865928e..599d11c43a 100644 --- a/test/files/neg/unchecked2.check +++ b/test/files/neg/unchecked2.check @@ -1,19 +1,21 @@ -unchecked2.scala:2: error: non-variable type argument Int in type Option[Int] is unchecked since it is eliminated by erasure +unchecked2.scala:2: warning: non-variable type argument Int in type Option[Int] is unchecked since it is eliminated by erasure Some(123).isInstanceOf[Option[Int]] ^ -unchecked2.scala:3: error: non-variable type argument String in type Option[String] is unchecked since it is eliminated by erasure +unchecked2.scala:3: warning: non-variable type argument String in type Option[String] is unchecked since it is eliminated by erasure Some(123).isInstanceOf[Option[String]] ^ -unchecked2.scala:4: error: non-variable type argument List[String] in type Option[List[String]] is unchecked since it is eliminated by erasure +unchecked2.scala:4: warning: non-variable type argument List[String] in type Option[List[String]] is unchecked since it is eliminated by erasure Some(123).isInstanceOf[Option[List[String]]] ^ -unchecked2.scala:5: error: non-variable type argument List[Int => String] in type Option[List[Int => String]] is unchecked since it is eliminated by erasure +unchecked2.scala:5: warning: non-variable type argument List[Int => String] in type Option[List[Int => String]] is unchecked since it is eliminated by erasure Some(123).isInstanceOf[Option[List[Int => String]]] ^ -unchecked2.scala:6: error: non-variable type argument (String, Double) in type Option[(String, Double)] is unchecked since it is eliminated by erasure +unchecked2.scala:6: warning: non-variable type argument (String, Double) in type Option[(String, Double)] is unchecked since it is eliminated by erasure Some(123).isInstanceOf[Option[(String, Double)]] ^ -unchecked2.scala:7: error: non-variable type argument String => Double in type Option[String => Double] is unchecked since it is eliminated by erasure +unchecked2.scala:7: warning: non-variable type argument String => Double in type Option[String => Double] is unchecked since it is eliminated by erasure Some(123).isInstanceOf[Option[String => Double]] ^ -6 errors found +error: No warnings can be incurred under -Xfatal-warnings. +6 warnings found +one error found diff --git a/test/files/neg/unit-returns-value.check b/test/files/neg/unit-returns-value.check index ab458a350b..363946f94d 100644 --- a/test/files/neg/unit-returns-value.check +++ b/test/files/neg/unit-returns-value.check @@ -1,7 +1,9 @@ -unit-returns-value.scala:4: error: a pure expression does nothing in statement position; you may be omitting necessary parentheses +unit-returns-value.scala:4: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses if (b) return 5 ^ -unit-returns-value.scala:4: error: enclosing method f has result type Unit: return value discarded +unit-returns-value.scala:4: warning: enclosing method f has result type Unit: return value discarded if (b) return 5 ^ -two errors found +error: No warnings can be incurred under -Xfatal-warnings. +two warnings found +one error found diff --git a/test/files/neg/virtpatmat_reach_null.check b/test/files/neg/virtpatmat_reach_null.check index 595c8ec889..e0c36c8c5b 100644 --- a/test/files/neg/virtpatmat_reach_null.check +++ b/test/files/neg/virtpatmat_reach_null.check @@ -1,4 +1,6 @@ -virtpatmat_reach_null.scala:13: error: unreachable code +virtpatmat_reach_null.scala:13: warning: unreachable code case _ => // unreachable ^ +error: No warnings can be incurred under -Xfatal-warnings. +one warning found one error found diff --git a/test/files/neg/virtpatmat_reach_sealed_unsealed.check b/test/files/neg/virtpatmat_reach_sealed_unsealed.check index 10638eff52..064a12bcaa 100644 --- a/test/files/neg/virtpatmat_reach_sealed_unsealed.check +++ b/test/files/neg/virtpatmat_reach_sealed_unsealed.check @@ -1,14 +1,16 @@ -virtpatmat_reach_sealed_unsealed.scala:16: error: match may not be exhaustive. +virtpatmat_reach_sealed_unsealed.scala:16: warning: match may not be exhaustive. It would fail on the following input: false (true: Boolean) match { case true => } // not exhaustive, but reachable ^ -virtpatmat_reach_sealed_unsealed.scala:18: error: unreachable code +virtpatmat_reach_sealed_unsealed.scala:18: warning: unreachable code (true: Boolean) match { case true => case false => case _ => } // exhaustive, last case is unreachable ^ -virtpatmat_reach_sealed_unsealed.scala:19: error: unreachable code +virtpatmat_reach_sealed_unsealed.scala:19: warning: unreachable code (true: Boolean) match { case true => case false => case _: Boolean => } // exhaustive, last case is unreachable ^ -virtpatmat_reach_sealed_unsealed.scala:20: error: unreachable code +virtpatmat_reach_sealed_unsealed.scala:20: warning: unreachable code (true: Boolean) match { case true => case false => case _: Any => } // exhaustive, last case is unreachable ^ -four errors found +error: No warnings can be incurred under -Xfatal-warnings. +four warnings found +one error found diff --git a/test/files/neg/virtpatmat_unreach_select.check b/test/files/neg/virtpatmat_unreach_select.check index 3771971020..4fc78cd412 100644 --- a/test/files/neg/virtpatmat_unreach_select.check +++ b/test/files/neg/virtpatmat_unreach_select.check @@ -1,4 +1,6 @@ -virtpatmat_unreach_select.scala:10: error: unreachable code +virtpatmat_unreach_select.scala:10: warning: unreachable code case WARNING.id => // unreachable ^ +error: No warnings can be incurred under -Xfatal-warnings. +one warning found one error found diff --git a/test/files/neg/warn-inferred-any.check b/test/files/neg/warn-inferred-any.check index 8c18616b6f..4628033e55 100644 --- a/test/files/neg/warn-inferred-any.check +++ b/test/files/neg/warn-inferred-any.check @@ -1,10 +1,12 @@ -warn-inferred-any.scala:8: error: a type was inferred to be `Any`; this may indicate a programming error. +warn-inferred-any.scala:8: warning: a type was inferred to be `Any`; this may indicate a programming error. { List(1, 2, 3) contains "a" } // only this warns ^ -warn-inferred-any.scala:16: error: a type was inferred to be `AnyVal`; this may indicate a programming error. +warn-inferred-any.scala:16: warning: a type was inferred to be `AnyVal`; this may indicate a programming error. { 1l to 5l contains 5 } ^ -warn-inferred-any.scala:17: error: a type was inferred to be `AnyVal`; this may indicate a programming error. +warn-inferred-any.scala:17: warning: a type was inferred to be `AnyVal`; this may indicate a programming error. { 1l to 5l contains 5d } ^ -three errors found +error: No warnings can be incurred under -Xfatal-warnings. +three warnings found +one error found -- cgit v1.2.3 From 0aa77ffa7cf2a95d9d84d4bc5e635163a84ca931 Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Fri, 10 Aug 2012 07:28:56 -0700 Subject: Warn about catching non-local returns. Almost every time someone is shooting themself in the foot by catching a non-local return, it is apparent from the structure of the AST that they are doing so. Warn them. --- src/compiler/scala/tools/nsc/transform/UnCurry.scala | 6 ++++++ src/reflect/scala/reflect/internal/TreeInfo.scala | 14 +++++++++----- test/files/neg/nonlocal-warning.check | 9 +++++++++ test/files/neg/nonlocal-warning.flags | 1 + test/files/neg/nonlocal-warning.scala | 7 +++++++ 5 files changed, 32 insertions(+), 5 deletions(-) create mode 100644 test/files/neg/nonlocal-warning.check create mode 100644 test/files/neg/nonlocal-warning.flags create mode 100644 test/files/neg/nonlocal-warning.scala (limited to 'src') diff --git a/src/compiler/scala/tools/nsc/transform/UnCurry.scala b/src/compiler/scala/tools/nsc/transform/UnCurry.scala index 72dd8acad7..fc61997cd5 100644 --- a/src/compiler/scala/tools/nsc/transform/UnCurry.scala +++ b/src/compiler/scala/tools/nsc/transform/UnCurry.scala @@ -204,6 +204,12 @@ abstract class UnCurry extends InfoTransform val keyDef = ValDef(key, New(ObjectClass.tpe)) val tryCatch = Try(body, pat -> rhs) + body foreach { + case Try(t, catches, _) if catches exists treeInfo.catchesThrowable => + unit.warning(body.pos, "catch block may intercept non-local return from " + meth) + case _ => + } + Block(List(keyDef), tryCatch) } } diff --git a/src/reflect/scala/reflect/internal/TreeInfo.scala b/src/reflect/scala/reflect/internal/TreeInfo.scala index 1b4c1b2877..e92cfba1c5 100644 --- a/src/reflect/scala/reflect/internal/TreeInfo.scala +++ b/src/reflect/scala/reflect/internal/TreeInfo.scala @@ -402,11 +402,15 @@ abstract class TreeInfo { def catchesThrowable(cdef: CaseDef) = catchesAllOf(cdef, ThrowableClass.tpe) /** Does this CaseDef catch everything of a certain Type? */ - def catchesAllOf(cdef: CaseDef, threshold: Type) = - isDefaultCase(cdef) || (cdef.guard.isEmpty && (unbind(cdef.pat) match { - case Typed(Ident(nme.WILDCARD), tpt) => (tpt.tpe != null) && (threshold <:< tpt.tpe) - case _ => false - })) + def catchesAllOf(cdef: CaseDef, threshold: Type) = { + def unbound(t: Tree) = t.symbol == null || t.symbol == NoSymbol + cdef.guard.isEmpty && (unbind(cdef.pat) match { + case Ident(nme.WILDCARD) => true + case i@Ident(name) => unbound(i) + case Typed(_, tpt) => (tpt.tpe != null) && (threshold <:< tpt.tpe) + case _ => false + }) + } /** Is this pattern node a catch-all or type-test pattern? */ def isCatchCase(cdef: CaseDef) = cdef match { diff --git a/test/files/neg/nonlocal-warning.check b/test/files/neg/nonlocal-warning.check new file mode 100644 index 0000000000..efb3efaaa2 --- /dev/null +++ b/test/files/neg/nonlocal-warning.check @@ -0,0 +1,9 @@ +nonlocal-warning.scala:4: warning: This catches all Throwables. If this is really intended, use `case x: Throwable` to clear this warning. + catch { case x => 11 } + ^ +nonlocal-warning.scala:2: warning: catch block may intercept non-local return from method foo + def foo(l: List[Int]): Int = { + ^ +error: No warnings can be incurred under -Xfatal-warnings. +two warnings found +one error found diff --git a/test/files/neg/nonlocal-warning.flags b/test/files/neg/nonlocal-warning.flags new file mode 100644 index 0000000000..e8fb65d50c --- /dev/null +++ b/test/files/neg/nonlocal-warning.flags @@ -0,0 +1 @@ +-Xfatal-warnings \ No newline at end of file diff --git a/test/files/neg/nonlocal-warning.scala b/test/files/neg/nonlocal-warning.scala new file mode 100644 index 0000000000..cc98bd631a --- /dev/null +++ b/test/files/neg/nonlocal-warning.scala @@ -0,0 +1,7 @@ +class Foo { + def foo(l: List[Int]): Int = { + try l foreach { _ => return 5 } + catch { case x => 11 } + 22 + } +} -- cgit v1.2.3 From db46c71e8830639bc79e6363332a06642fd3d8cc Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Fri, 10 Aug 2012 16:19:19 -0700 Subject: Improvement for SI-2251, failure to lub f-bounds. After a great struggle, I realized that the major reason that code like this still doesn't compile: List(Stream(), List()) is that we were poisoning the computed lub in mergePrefixAndArgs by throwing in Any when the max recursion depth was reached. I modified it to return NoType instead, which allowed me to teach lublist to recognize what has happened and fall back to a weaker type, one which does not contain recursive bounds. This enables the lubbing process to complete. The most elusive lub, defeated. Notice also that the refinement members are correctly parameterized on Nothing, rather than on Any as has often been the case. scala> List(Stream(), List()) res0: List[scala.collection.immutable.LinearSeq[Nothing] with scala.collection.AbstractSeq[Nothing]{def companion: scala.collection.generic.GenericCompanion[scala.collection.immutable.LinearSeq with scala.collection.AbstractSeq]; def reverse: scala.collection.immutable.LinearSeq[Nothing] with scala.collection.AbstractSeq[Nothing]{def companion: scala.collection.generic.GenericCompanion[scala.collection.immutable.LinearSeq with scala.collection.AbstractSeq]; def reverse: scala.collection.immutable.LinearSeq[Nothing] with scala.collection.AbstractSeq[Nothing]{def reverse: scala.collection.immutable.LinearSeq[Nothing] with scala.collection.AbstractSeq[Nothing]; def dropRight(n: Int): scala.collection.immutable.LinearSeq[Nothing] with scala.collection.AbstractSeq[Nothing]; def takeRight(n: ... --- src/reflect/scala/reflect/internal/Types.scala | 127 +++++++++++++++---------- test/files/neg/lubs.check | 9 +- test/files/pos/ticket2251.scala | 14 +++ test/files/run/lub-visibility.check | 2 +- test/files/run/t2251.check | 1 + test/files/run/t2251.scala | 19 ++++ test/files/run/t2251b.check | 11 +++ test/files/run/t2251b.scala | 48 ++++++++++ 8 files changed, 178 insertions(+), 53 deletions(-) create mode 100644 test/files/run/t2251.check create mode 100644 test/files/run/t2251.scala create mode 100644 test/files/run/t2251b.check create mode 100644 test/files/run/t2251b.scala (limited to 'src') diff --git a/src/reflect/scala/reflect/internal/Types.scala b/src/reflect/scala/reflect/internal/Types.scala index 7df34a14e2..f519a2d4a6 100644 --- a/src/reflect/scala/reflect/internal/Types.scala +++ b/src/reflect/scala/reflect/internal/Types.scala @@ -6219,25 +6219,26 @@ trait Types extends api.Types { self: SymbolTable => * @See baseTypeSeq for a definition of sorted and upwards closed. */ private def lubList(ts: List[Type], depth: Int): List[Type] = { - // Matching the type params of one of the initial types means dummies. - val initialTypeParams = ts map (_.typeParams) - def isHotForTs(xs: List[Type]) = initialTypeParams contains xs.map(_.typeSymbol) - + var lubListDepth = 0 + // This catches some recursive situations which would otherwise + // befuddle us, e.g. pos/hklub0.scala + def isHotForTs(xs: List[Type]) = ts exists (_.typeParams == xs.map(_.typeSymbol)) def elimHigherOrderTypeParam(tp: Type) = tp match { - case TypeRef(pre, sym, args) if args.nonEmpty && isHotForTs(args) => tp.typeConstructor - case _ => tp + case TypeRef(_, _, args) if args.nonEmpty && isHotForTs(args) => + logResult("Retracting dummies from " + tp + " in lublist")(tp.typeConstructor) + case _ => tp } - var lubListDepth = 0 - def loop(tsBts: List[List[Type]]): List[Type] = { + // pretypes is a tail-recursion-preserving accumulator. + @annotation.tailrec def loop(pretypes: List[Type], tsBts: List[List[Type]]): List[Type] = { lubListDepth += 1 - if (tsBts.isEmpty || tsBts.exists(_.isEmpty)) Nil - else if (tsBts.tail.isEmpty) tsBts.head + if (tsBts.isEmpty || tsBts.exists(_.isEmpty)) pretypes.reverse + else if (tsBts.tail.isEmpty) pretypes.reverse ++ tsBts.head else { // ts0 is the 1-dimensional frontier of symbols cutting through 2-dimensional tsBts. // Invariant: all symbols "under" (closer to the first row) the frontier // are smaller (according to _.isLess) than the ones "on and beyond" the frontier - val ts0 = tsBts map (_.head) + val ts0 = tsBts map (_.head) // Is the frontier made up of types with the same symbol? val isUniformFrontier = (ts0: @unchecked) match { @@ -6250,23 +6251,23 @@ trait Types extends api.Types { self: SymbolTable => // merging, strip targs that refer to bound tparams (when we're computing the lub of type // constructors.) Also filter out all types that are a subtype of some other type. if (isUniformFrontier) { - if (settings.debug.value || printLubs) { - val fbounds = findRecursiveBounds(ts0) - if (fbounds.nonEmpty) { - println("Encountered " + fbounds.size + " recursive bounds while lubbing " + ts0.size + " types.") - for ((p0, p1) <- fbounds) { - val desc = if (p0 == p1) "its own bounds" else "the bounds of " + p1 - - println(" " + p0.fullLocationString + " appears in " + desc) - println(" " + p1 + " " + p1.info.bounds) + val fbounds = findRecursiveBounds(ts0) map (_._2) + val tcLubList = typeConstructorLubList(ts0) + def isRecursive(tp: Type) = tp.typeSymbol.typeParams exists fbounds.contains + + val ts1 = ts0 map { t => + if (isRecursive(t)) { + tcLubList map (t baseType _.typeSymbol) find (t => !isRecursive(t)) match { + case Some(tp) => logResult(s"Breaking recursion in lublist, substituting weaker type.\n Was: $t\n Now")(tp) + case _ => t } - println("") } + else t } val tails = tsBts map (_.tail) - mergePrefixAndArgs(elimSub(ts0 map elimHigherOrderTypeParam, depth), 1, depth) match { - case Some(tp) => tp :: loop(tails) - case _ => loop(tails) + mergePrefixAndArgs(elimSub(ts1, depth) map elimHigherOrderTypeParam, 1, depth) match { + case Some(tp) => loop(tp :: pretypes, tails) + case _ => loop(pretypes, tails) } } else { @@ -6283,7 +6284,7 @@ trait Types extends api.Types { self: SymbolTable => printLubMatrix((ts zip tsBts).toMap, lubListDepth) } - loop(newtps) + loop(pretypes, newtps) } } } @@ -6292,7 +6293,7 @@ trait Types extends api.Types { self: SymbolTable => if (printLubs) printLubMatrix((ts zip initialBTSes).toMap, depth) - loop(initialBTSes) + loop(Nil, initialBTSes) } /** The minimal symbol (wrt Symbol.isLess) of a list of types */ @@ -6427,6 +6428,23 @@ trait Types extends api.Types { self: SymbolTable => private val lubResults = new mutable.HashMap[(Int, List[Type]), Type] private val glbResults = new mutable.HashMap[(Int, List[Type]), Type] + /** Given a list of types, finds all the base classes they have in + * common, then returns a list of type constructors derived directly + * from the symbols (so any more specific type information is ignored.) + * The list is filtered such that every type constructor in the list + * expects the same number of type arguments, which is chosen based + * on the deepest class among the common baseclasses. + */ + def typeConstructorLubList(ts: List[Type]): List[Type] = { + val bcs = ts.flatMap(_.baseClasses).distinct sortWith (_ isLess _) + val tcons = bcs filter (clazz => ts forall (_.typeSymbol isSubClass clazz)) + + tcons map (_.typeConstructor) match { + case Nil => Nil + case t :: ts => t :: ts.filter(_.typeParams.size == t.typeParams.size) + } + } + def lub(ts: List[Type]): Type = ts match { case List() => NothingClass.tpe case List(t) => t @@ -6434,8 +6452,20 @@ trait Types extends api.Types { self: SymbolTable => Statistics.incCounter(lubCount) val start = Statistics.pushTimer(typeOpsStack, lubNanos) try { - lub(ts, lubDepth(ts)) - } finally { + val res = lub(ts, lubDepth(ts)) + // If the number of unapplied type parameters in all incoming + // types is consistent, and the lub does not match that, return + // the type constructor of the calculated lub instead. This + // is because lubbing type constructors tends to result in types + // which have been applied to dummies or Nothing. + ts.map(_.typeParams.size).distinct match { + case x :: Nil if res.typeParams.size != x => + logResult(s"Stripping type args from lub because $res is not consistent with $ts")(res.typeConstructor) + case _ => + res + } + } + finally { lubResults.clear() glbResults.clear() Statistics.popTimer(typeOpsStack, start) @@ -6469,13 +6499,13 @@ trait Types extends api.Types { self: SymbolTable => } } def lub1(ts0: List[Type]): Type = { - val (ts, tparams) = stripExistentialsAndTypeVars(ts0) + val (ts, tparams) = stripExistentialsAndTypeVars(ts0) val lubBaseTypes: List[Type] = lubList(ts, depth) - val lubParents = spanningTypes(lubBaseTypes) - val lubOwner = commonOwner(ts) - val lubBase = intersectionType(lubParents, lubOwner) + val lubParents = spanningTypes(lubBaseTypes) + val lubOwner = commonOwner(ts) + val lubBase = intersectionType(lubParents, lubOwner) val lubType = - if (phase.erasedTypes || depth == 0) lubBase + if (phase.erasedTypes || depth == 0 ) lubBase else { val lubRefined = refinedType(lubParents, lubOwner) val lubThisType = lubRefined.typeSymbol.thisType @@ -6492,6 +6522,7 @@ trait Types extends api.Types { self: SymbolTable => val syms = narrowts map (t => t.nonPrivateMember(proto.name).suchThat(sym => sym.tpe matches prototp.substThis(lubThisType.typeSymbol, t))) + if (syms contains NoSymbol) NoSymbol else { val symtypes = @@ -6518,10 +6549,8 @@ trait Types extends api.Types { self: SymbolTable => // add a refinement symbol for all non-class members of lubBase // which are refined by every type in ts. for (sym <- lubBase.nonPrivateMembers ; if !excludeFromLub(sym)) { - try { - val lsym = lubsym(sym) - if (lsym != NoSymbol) addMember(lubThisType, lubRefined, lsym, depth) - } catch { + try lubsym(sym) andAlso (addMember(lubThisType, lubRefined, _, depth)) + catch { case ex: NoCommonType => } } @@ -6765,18 +6794,16 @@ trait Types extends api.Types { self: SymbolTable => debuglog("transposed irregular matrix!?" +(tps, argss)) None case Some(argsst) => - val args = map2(sym.typeParams, argsst) { (tparam, as) => - if (depth == 0) { - if (tparam.variance == variance) { - // Take the intersection of the upper bounds of the type parameters - // rather than falling all the way back to "Any", otherwise we end up not - // conforming to bounds. - val bounds0 = sym.typeParams map (_.info.bounds.hi) filterNot (_.typeSymbol == AnyClass) - if (bounds0.isEmpty) AnyClass.tpe - else intersectionType(bounds0 map (b => b.asSeenFrom(tps.head, sym))) - } - else if (tparam.variance == -variance) NothingClass.tpe - else NoType + val args = map2(sym.typeParams, argsst) { (tparam, as0) => + val as = as0.distinct + if (as.size == 1) as.head + else if (depth == 0) { + log("Giving up merging args: can't unify %s under %s".format(as.mkString(", "), tparam.fullLocationString)) + // Don't return "Any" (or "Nothing") when we have to give up due to + // recursion depth. Return NoType, which prevents us from poisoning + // lublist's results. It can recognize the recursion and deal with it, but + // only if we aren't returning invalid types. + NoType } else { if (tparam.variance == variance) lub(as, decr(depth)) @@ -6785,7 +6812,7 @@ trait Types extends api.Types { self: SymbolTable => val l = lub(as, decr(depth)) val g = glb(as, decr(depth)) if (l <:< g) l - else { // Martin: I removed this, because incomplete. Not sure there is a good way to fix it. For the moment we + else { // Martin: I removed this, because incomplete. Not sure there is a good way to fix it. For the moment we // just err on the conservative side, i.e. with a bound that is too high. // if(!(tparam.info.bounds contains tparam)) //@M can't deal with f-bounds, see #2251 diff --git a/test/files/neg/lubs.check b/test/files/neg/lubs.check index 77ab20102c..affbd4983c 100644 --- a/test/files/neg/lubs.check +++ b/test/files/neg/lubs.check @@ -1,5 +1,10 @@ +lubs.scala:10: error: type mismatch; + found : test1.A[test1.A[Object]] + required: test1.A[test1.A[test1.A[Any]]] + val x3: A[A[A[Any]]] = f + ^ lubs.scala:11: error: type mismatch; - found : test1.A[test1.A[test1.A[Any]]] + found : test1.A[test1.A[Object]] required: test1.A[test1.A[test1.A[test1.A[Any]]]] val x4: A[A[A[A[Any]]]] = f ^ @@ -13,4 +18,4 @@ lubs.scala:25: error: type mismatch; required: test2.A{type T >: Null <: test2.A{type T >: Null <: test2.A{type T >: Null <: test2.A}}} val x4: A { type T >: Null <: A { type T >: Null <: A { type T >: Null <: A } } } = f ^ -three errors found +four errors found diff --git a/test/files/pos/ticket2251.scala b/test/files/pos/ticket2251.scala index b3afee4ea9..c220e85350 100644 --- a/test/files/pos/ticket2251.scala +++ b/test/files/pos/ticket2251.scala @@ -22,4 +22,18 @@ lub of List(D, C) is B[_2] forSome { type _2 >: D with C{} <: B[_1] forSome { ty // should be: B[X] forSome {type X <: B[X]} -- can this be done automatically? for now, just detect f-bounded polymorphism and fall back to more coarse approximation val data: List[A] = List(new C, new D) + + val data2 = List(new C, new D) + + val data3: List[B[X] forSome { type X <: B[_ <: A] }] = List(new C, new D) + + // Not yet -- + // val data4: List[B[X] forSome { type X <: B[X] }] = List(new C, new D) + // :7: error: type mismatch; + // found : List[B[_ >: D with C <: B[_ >: D with C <: A]]] + // required: List[B[X] forSome { type X <: B[X] }] + // val data4: List[B[X] forSome { type X <: B[X] }] = List(new C, new D) + + // works + val data5 = List[B[X] forSome { type X <: B[X] }](new C, new D) } diff --git a/test/files/run/lub-visibility.check b/test/files/run/lub-visibility.check index 3461d1bf6b..f3a6bef215 100644 --- a/test/files/run/lub-visibility.check +++ b/test/files/run/lub-visibility.check @@ -8,7 +8,7 @@ scala> // should infer List[scala.collection.immutable.Seq[Nothing]] scala> // but reverted that for SI-5534. scala> val x = List(List(), Vector()) -x: List[scala.collection.immutable.Seq[Nothing] with scala.collection.AbstractSeq[Nothing]{def companion: scala.collection.generic.GenericCompanion[scala.collection.immutable.Seq with scala.collection.AbstractSeq{def dropRight(n: Int): scala.collection.immutable.Seq[Any] with scala.collection.AbstractSeq[Any]; def takeRight(n: Int): scala.collection.immutable.Seq[Any] with scala.collection.AbstractSeq[Any]; def drop(n: Int): scala.collection.immutable.Seq[Any] with scala.collection.AbstractSeq[Any]; def take(n: Int): scala.collection.immutable.Seq[Any] with scala.collection.AbstractSeq[Any]; def slice(from: Int,until: Int): scala.collection.immutable.Seq[Any] with scala.collection.AbstractSeq[Any]}]; def dropRight(n: Int): scala.collection.immutable.Seq[Nothing] with scala.collection.Ab... +x: List[scala.collection.immutable.Seq[Nothing] with scala.collection.AbstractSeq[Nothing]{def companion: scala.collection.generic.GenericCompanion[scala.collection.immutable.Seq with scala.collection.AbstractSeq]; def dropRight(n: Int): scala.collection.immutable.Seq[Nothing] with scala.collection.AbstractSeq[Nothing]{def companion: scala.collection.generic.GenericCompanion[scala.collection.immutable.Seq with scala.collection.AbstractSeq]; def dropRight(n: Int): scala.collection.immutable.Seq[Nothing] with scala.collection.AbstractSeq[Nothing]{def dropRight(n: Int): scala.collection.immutable.Seq[Nothing] with scala.collection.AbstractSeq[Nothing]; def takeRight(n: Int): scala.collection.immutable.Seq[Nothing] with scala.collection.AbstractSeq[Nothing]; def drop(n: Int): scala.collecti... scala> scala> diff --git a/test/files/run/t2251.check b/test/files/run/t2251.check new file mode 100644 index 0000000000..55ad2a5857 --- /dev/null +++ b/test/files/run/t2251.check @@ -0,0 +1 @@ +Set(List(List(C), Stream(D, ?))) diff --git a/test/files/run/t2251.scala b/test/files/run/t2251.scala new file mode 100644 index 0000000000..00c5619b49 --- /dev/null +++ b/test/files/run/t2251.scala @@ -0,0 +1,19 @@ +class A +trait B[T <: B[T]] extends A +class C extends B[C] { override def toString = "C" } +class D extends B[D] { override def toString = "D" } + +class E { + val ys = List(List(new C), Stream(new D)) +} + +object Test { + def trav = List(List(), Stream()) + + def main(args: Array[String]): Unit = { + val f = (new E).ys _ + var xs: Set[List[_ <: Seq[B[_]]]] = Set() + xs += f() + println(xs) + } +} diff --git a/test/files/run/t2251b.check b/test/files/run/t2251b.check new file mode 100644 index 0000000000..42b0be457a --- /dev/null +++ b/test/files/run/t2251b.check @@ -0,0 +1,11 @@ +TypeTag[List[scala.collection.immutable.LinearSeq[B[_ >: D with C <: B[_ >: D with C <: A]]] with scala.collection.AbstractSeq[B[_ >: D with C <: B[_ >: D with C <: A]]]{def companion: scala.collection.generic.GenericCompanion[scala.collection.immutable.LinearSeq with scala.collection.AbstractSeq]; def reverse: scala.collection.immutable.LinearSeq[B[_ >: D with C <: A]] with scala.collection.AbstractSeq[B[_ >: D with C <: A]]{def companion: scala.collection.generic.GenericCompanion[scala.collection.immutable.LinearSeq with scala.collection.AbstractSeq]; def reverse: scala.collection.immutable.LinearSeq[A] with scala.collection.AbstractSeq[A]; def dropRight(n: Int): scala.collection.immutable.LinearSeq[A] with scala.collection.AbstractSeq[A]; def takeRight(n: Int): scala.collection.immutable.LinearSeq[A] with scala.collection.AbstractSeq[A]; def drop(n: Int): scala.collection.immutable.LinearSeq[A] with scala.collection.AbstractSeq[A]; def take(n: Int): scala.collection.immutable.LinearSeq[A] with scala.collection.AbstractSeq[A]; def slice(from: Int,until: Int): scala.collection.immutable.LinearSeq[A] with scala.collection.AbstractSeq[A]}; def dropRight(n: Int): scala.collection.immutable.LinearSeq[B[_ >: D with C <: A]] with scala.collection.AbstractSeq[B[_ >: D with C <: A]]{def companion: scala.collection.generic.GenericCompanion[scala.collection.immutable.LinearSeq with scala.collection.AbstractSeq]; def reverse: scala.collection.immutable.LinearSeq[A] with scala.collection.AbstractSeq[A]; def dropRight(n: Int): scala.collection.immutable.LinearSeq[A] with scala.collection.AbstractSeq[A]; def takeRight(n: Int): scala.collection.immutable.LinearSeq[A] with scala.collection.AbstractSeq[A]; def drop(n: Int): scala.collection.immutable.LinearSeq[A] with scala.collection.AbstractSeq[A]; def take(n: Int): scala.collection.immutable.LinearSeq[A] with scala.collection.AbstractSeq[A]; def slice(from: Int,until: Int): scala.collection.immutable.LinearSeq[A] with scala.collection.AbstractSeq[A]}; def takeRight(n: Int): scala.collection.immutable.LinearSeq[B[_ >: D with C <: A]] with scala.collection.AbstractSeq[B[_ >: D with C <: A]]{def companion: scala.collection.generic.GenericCompanion[scala.collection.immutable.LinearSeq with scala.collection.AbstractSeq]; def reverse: scala.collection.immutable.LinearSeq[A] with scala.collection.AbstractSeq[A]; def dropRight(n: Int): scala.collection.immutable.LinearSeq[A] with scala.collection.AbstractSeq[A]; def takeRight(n: Int): scala.collection.immutable.LinearSeq[A] with scala.collection.AbstractSeq[A]; def drop(n: Int): scala.collection.immutable.LinearSeq[A] with scala.collection.AbstractSeq[A]; def take(n: Int): scala.collection.immutable.LinearSeq[A] with scala.collection.AbstractSeq[A]; def slice(from: Int,until: Int): scala.collection.immutable.LinearSeq[A] with scala.collection.AbstractSeq[A]}; def drop(n: Int): scala.collection.immutable.LinearSeq[B[_ >: D with C <: A]] with scala.collection.AbstractSeq[B[_ >: D with C <: A]]{def companion: scala.collection.generic.GenericCompanion[scala.collection.immutable.LinearSeq with scala.collection.AbstractSeq]; def reverse: scala.collection.immutable.LinearSeq[A] with scala.collection.AbstractSeq[A]; def dropRight(n: Int): scala.collection.immutable.LinearSeq[A] with scala.collection.AbstractSeq[A]; def takeRight(n: Int): scala.collection.immutable.LinearSeq[A] with scala.collection.AbstractSeq[A]; def drop(n: Int): scala.collection.immutable.LinearSeq[A] with scala.collection.AbstractSeq[A]; def take(n: Int): scala.collection.immutable.LinearSeq[A] with scala.collection.AbstractSeq[A]; def slice(from: Int,until: Int): scala.collection.immutable.LinearSeq[A] with scala.collection.AbstractSeq[A]}; def take(n: Int): scala.collection.immutable.LinearSeq[B[_ >: D with C <: A]] with scala.collection.AbstractSeq[B[_ >: D with C <: A]]{def companion: scala.collection.generic.GenericCompanion[scala.collection.immutable.LinearSeq with scala.collection.AbstractSeq]; def reverse: scala.collection.immutable.LinearSeq[A] with scala.collection.AbstractSeq[A]; def dropRight(n: Int): scala.collection.immutable.LinearSeq[A] with scala.collection.AbstractSeq[A]; def takeRight(n: Int): scala.collection.immutable.LinearSeq[A] with scala.collection.AbstractSeq[A]; def drop(n: Int): scala.collection.immutable.LinearSeq[A] with scala.collection.AbstractSeq[A]; def take(n: Int): scala.collection.immutable.LinearSeq[A] with scala.collection.AbstractSeq[A]; def slice(from: Int,until: Int): scala.collection.immutable.LinearSeq[A] with scala.collection.AbstractSeq[A]}; def slice(from: Int,until: Int): scala.collection.immutable.LinearSeq[B[_ >: D with C <: A]] with scala.collection.AbstractSeq[B[_ >: D with C <: A]]{def companion: scala.collection.generic.GenericCompanion[scala.collection.immutable.LinearSeq with scala.collection.AbstractSeq]; def reverse: scala.collection.immutable.LinearSeq[A] with scala.collection.AbstractSeq[A]; def dropRight(n: Int): scala.collection.immutable.LinearSeq[A] with scala.collection.AbstractSeq[A]; def takeRight(n: Int): scala.collection.immutable.LinearSeq[A] with scala.collection.AbstractSeq[A]; def drop(n: Int): scala.collection.immutable.LinearSeq[A] with scala.collection.AbstractSeq[A]; def take(n: Int): scala.collection.immutable.LinearSeq[A] with scala.collection.AbstractSeq[A]; def slice(from: Int,until: Int): scala.collection.immutable.LinearSeq[A] with scala.collection.AbstractSeq[A]}; def splitAt(n: Int): (scala.collection.immutable.LinearSeq[A] with scala.collection.AbstractSeq[A], scala.collection.immutable.LinearSeq[A] with scala.collection.AbstractSeq[A])}]] +TypeTag[List[scala.collection.immutable.Iterable[B[_ >: F with E with D with C <: B[_ >: F with E with D with C <: A]]] with F with Int => Any]] +TypeTag[List[scala.collection.immutable.Seq[B[_ >: D with C <: B[_ >: D with C <: A]]] with scala.collection.AbstractSeq[B[_ >: D with C <: B[_ >: D with C <: A]]]{def companion: scala.collection.generic.GenericCompanion[scala.collection.immutable.Seq with scala.collection.AbstractSeq]; def dropRight(n: Int): scala.collection.immutable.Seq[B[_ >: D with C <: A]] with scala.collection.AbstractSeq[B[_ >: D with C <: A]]{def companion: scala.collection.generic.GenericCompanion[scala.collection.immutable.Seq with scala.collection.AbstractSeq]; def dropRight(n: Int): scala.collection.immutable.Seq[A] with scala.collection.AbstractSeq[A]; def takeRight(n: Int): scala.collection.immutable.Seq[A] with scala.collection.AbstractSeq[A]; def drop(n: Int): scala.collection.immutable.Seq[A] with scala.collection.AbstractSeq[A]; def take(n: Int): scala.collection.immutable.Seq[A] with scala.collection.AbstractSeq[A]; def slice(from: Int,until: Int): scala.collection.immutable.Seq[A] with scala.collection.AbstractSeq[A]; def init: scala.collection.immutable.Seq[A] with scala.collection.AbstractSeq[A]}; def takeRight(n: Int): scala.collection.immutable.Seq[B[_ >: D with C <: A]] with scala.collection.AbstractSeq[B[_ >: D with C <: A]]{def companion: scala.collection.generic.GenericCompanion[scala.collection.immutable.Seq with scala.collection.AbstractSeq]; def dropRight(n: Int): scala.collection.immutable.Seq[A] with scala.collection.AbstractSeq[A]; def takeRight(n: Int): scala.collection.immutable.Seq[A] with scala.collection.AbstractSeq[A]; def drop(n: Int): scala.collection.immutable.Seq[A] with scala.collection.AbstractSeq[A]; def take(n: Int): scala.collection.immutable.Seq[A] with scala.collection.AbstractSeq[A]; def slice(from: Int,until: Int): scala.collection.immutable.Seq[A] with scala.collection.AbstractSeq[A]; def init: scala.collection.immutable.Seq[A] with scala.collection.AbstractSeq[A]}; def drop(n: Int): scala.collection.immutable.Seq[B[_ >: D with C <: A]] with scala.collection.AbstractSeq[B[_ >: D with C <: A]]{def companion: scala.collection.generic.GenericCompanion[scala.collection.immutable.Seq with scala.collection.AbstractSeq]; def dropRight(n: Int): scala.collection.immutable.Seq[A] with scala.collection.AbstractSeq[A]; def takeRight(n: Int): scala.collection.immutable.Seq[A] with scala.collection.AbstractSeq[A]; def drop(n: Int): scala.collection.immutable.Seq[A] with scala.collection.AbstractSeq[A]; def take(n: Int): scala.collection.immutable.Seq[A] with scala.collection.AbstractSeq[A]; def slice(from: Int,until: Int): scala.collection.immutable.Seq[A] with scala.collection.AbstractSeq[A]; def init: scala.collection.immutable.Seq[A] with scala.collection.AbstractSeq[A]}; def take(n: Int): scala.collection.immutable.Seq[B[_ >: D with C <: A]] with scala.collection.AbstractSeq[B[_ >: D with C <: A]]{def companion: scala.collection.generic.GenericCompanion[scala.collection.immutable.Seq with scala.collection.AbstractSeq]; def dropRight(n: Int): scala.collection.immutable.Seq[A] with scala.collection.AbstractSeq[A]; def takeRight(n: Int): scala.collection.immutable.Seq[A] with scala.collection.AbstractSeq[A]; def drop(n: Int): scala.collection.immutable.Seq[A] with scala.collection.AbstractSeq[A]; def take(n: Int): scala.collection.immutable.Seq[A] with scala.collection.AbstractSeq[A]; def slice(from: Int,until: Int): scala.collection.immutable.Seq[A] with scala.collection.AbstractSeq[A]; def init: scala.collection.immutable.Seq[A] with scala.collection.AbstractSeq[A]}; def slice(from: Int,until: Int): scala.collection.immutable.Seq[B[_ >: D with C <: A]] with scala.collection.AbstractSeq[B[_ >: D with C <: A]]{def companion: scala.collection.generic.GenericCompanion[scala.collection.immutable.Seq with scala.collection.AbstractSeq]; def dropRight(n: Int): scala.collection.immutable.Seq[A] with scala.collection.AbstractSeq[A]; def takeRight(n: Int): scala.collection.immutable.Seq[A] with scala.collection.AbstractSeq[A]; def drop(n: Int): scala.collection.immutable.Seq[A] with scala.collection.AbstractSeq[A]; def take(n: Int): scala.collection.immutable.Seq[A] with scala.collection.AbstractSeq[A]; def slice(from: Int,until: Int): scala.collection.immutable.Seq[A] with scala.collection.AbstractSeq[A]; def init: scala.collection.immutable.Seq[A] with scala.collection.AbstractSeq[A]}; def splitAt(n: Int): (scala.collection.immutable.Seq[A] with scala.collection.AbstractSeq[A], scala.collection.immutable.Seq[A] with scala.collection.AbstractSeq[A]); def init: scala.collection.immutable.Seq[B[_ >: D with C <: A]] with scala.collection.AbstractSeq[B[_ >: D with C <: A]]{def companion: scala.collection.generic.GenericCompanion[scala.collection.immutable.Seq with scala.collection.AbstractSeq]; def dropRight(n: Int): scala.collection.immutable.Seq[A] with scala.collection.AbstractSeq[A]; def takeRight(n: Int): scala.collection.immutable.Seq[A] with scala.collection.AbstractSeq[A]; def drop(n: Int): scala.collection.immutable.Seq[A] with scala.collection.AbstractSeq[A]; def take(n: Int): scala.collection.immutable.Seq[A] with scala.collection.AbstractSeq[A]; def slice(from: Int,until: Int): scala.collection.immutable.Seq[A] with scala.collection.AbstractSeq[A]; def init: scala.collection.immutable.Seq[A] with scala.collection.AbstractSeq[A]}}]] +TypeTag[List[scala.collection.Set[_ >: G with F <: B[_ >: G with F <: B[_ >: G with F <: A]]]]] +TypeTag[List[scala.collection.Set[_ >: G with F <: B[_ >: G with F <: B[_ >: G with F <: A]]]]] +TypeTag[List[scala.collection.Set[_ >: G with F <: B[_ >: G with F <: B[_ >: G with F <: A]]]]] +TypeTag[List[Seq[B[_ >: G with F <: B[_ >: G with F <: A]]]]] +TypeTag[List[scala.collection.Map[_ >: F with C <: B[_ >: F with C <: B[_ >: F with C <: A]], B[_ >: G with D <: B[_ >: G with D <: A]]]]] +TypeTag[List[scala.collection.AbstractSeq[B[_ >: G with F <: B[_ >: G with F <: A]]] with scala.collection.LinearSeq[B[_ >: G with F <: B[_ >: G with F <: A]]]{def companion: scala.collection.generic.GenericCompanion[scala.collection.AbstractSeq with scala.collection.LinearSeq]; def dropRight(n: Int): scala.collection.AbstractSeq[B[_ >: G with F <: A]] with scala.collection.LinearSeq[B[_ >: G with F <: A]]{def companion: scala.collection.generic.GenericCompanion[scala.collection.AbstractSeq with scala.collection.LinearSeq]; def dropRight(n: Int): scala.collection.AbstractSeq[A] with scala.collection.LinearSeq[A]; def drop(n: Int): scala.collection.AbstractSeq[A] with scala.collection.LinearSeq[A]; def take(n: Int): scala.collection.AbstractSeq[A] with scala.collection.LinearSeq[A]; def slice(from: Int,until: Int): scala.collection.AbstractSeq[A] with scala.collection.LinearSeq[A]}; def drop(n: Int): scala.collection.AbstractSeq[B[_ >: G with F <: A]] with scala.collection.LinearSeq[B[_ >: G with F <: A]]{def companion: scala.collection.generic.GenericCompanion[scala.collection.AbstractSeq with scala.collection.LinearSeq]; def dropRight(n: Int): scala.collection.AbstractSeq[A] with scala.collection.LinearSeq[A]; def drop(n: Int): scala.collection.AbstractSeq[A] with scala.collection.LinearSeq[A]; def take(n: Int): scala.collection.AbstractSeq[A] with scala.collection.LinearSeq[A]; def slice(from: Int,until: Int): scala.collection.AbstractSeq[A] with scala.collection.LinearSeq[A]}; def take(n: Int): scala.collection.AbstractSeq[B[_ >: G with F <: A]] with scala.collection.LinearSeq[B[_ >: G with F <: A]]{def companion: scala.collection.generic.GenericCompanion[scala.collection.AbstractSeq with scala.collection.LinearSeq]; def dropRight(n: Int): scala.collection.AbstractSeq[A] with scala.collection.LinearSeq[A]; def drop(n: Int): scala.collection.AbstractSeq[A] with scala.collection.LinearSeq[A]; def take(n: Int): scala.collection.AbstractSeq[A] with scala.collection.LinearSeq[A]; def slice(from: Int,until: Int): scala.collection.AbstractSeq[A] with scala.collection.LinearSeq[A]}; def slice(from: Int,until: Int): scala.collection.AbstractSeq[B[_ >: G with F <: A]] with scala.collection.LinearSeq[B[_ >: G with F <: A]]{def companion: scala.collection.generic.GenericCompanion[scala.collection.AbstractSeq with scala.collection.LinearSeq]; def dropRight(n: Int): scala.collection.AbstractSeq[A] with scala.collection.LinearSeq[A]; def drop(n: Int): scala.collection.AbstractSeq[A] with scala.collection.LinearSeq[A]; def take(n: Int): scala.collection.AbstractSeq[A] with scala.collection.LinearSeq[A]; def slice(from: Int,until: Int): scala.collection.AbstractSeq[A] with scala.collection.LinearSeq[A]}}]] +TypeTag[List[Seq[B[_ >: G with F <: B[_ >: G with F <: A]]]]] +TypeTag[List[Seq[B[_ >: G with F <: B[_ >: G with F <: A]]]]] diff --git a/test/files/run/t2251b.scala b/test/files/run/t2251b.scala new file mode 100644 index 0000000000..b67b3aec1e --- /dev/null +++ b/test/files/run/t2251b.scala @@ -0,0 +1,48 @@ +class A +trait B[T <: B[T]] extends A +class B1[T <: B1[T]] extends B[T] +class C extends B[C] { override def toString = "C" } +class D extends B[D] { override def toString = "D" } +class E extends B[E] { override def toString = "E" } +class F extends B[F] { override def toString = "F" } +class G extends B1[G] { override def toString = "G" } + +object Test { + import scala.collection.{ mutable, immutable } + import scala.collection.immutable.{ Vector } + import scala.reflect.runtime.universe._ + def what[T: TypeTag](x: T) = println(typeTag[T]) + + def main(args: Array[String]): Unit = { + what(List(List(new C), Stream(new D))) + what(List(List(new C), Stream(new D), Vector(new E), Set(new F))) + what(List(immutable.Vector(new C), Stream(new D))) + what(List(collection.Set(new F), mutable.Set(new G))) + what(List(collection.Set(new F), immutable.Set(new G))) + what(List(mutable.Set(new F), immutable.Set(new G))) + what(List(mutable.Seq(new F), immutable.Seq(new G))) + what(List(mutable.Map(new C -> new D), immutable.Map(new F -> new G))) + what(List(mutable.MutableList(new F), immutable.List(new G))) + what(List(mutable.Seq(new F), collection.Seq(new G))) + what(List(mutable.LinearSeq(new F), collection.IndexedSeq(new G))) + } +} + + +// class D extends B[D] { override def toString = "D" } + + +// class E { +// val ys = List(List(new C), Stream(new D)) +// } + +// object Test { +// def trav = List(List(), Stream()) + +// def main(args: Array[String]): Unit = { +// val f = (new E).ys _ +// var xs: Set[List[_ <: Seq[B[_]]]] = Set() +// xs += f() +// println(xs) +// } +// } -- cgit v1.2.3 From 5be6e644dccde9298413ede3c8d20528fba12643 Mon Sep 17 00:00:00 2001 From: Ruediger Klaehn Date: Sun, 12 Aug 2012 22:55:07 +0200 Subject: Improve efficiency of updated Added utility method to create a HashTrieSet with two leaf HashSets with different hash Used said utility method instead of creating a temorary HashTrieSet with an empty elems array Added assertions to HashTrieSet to validate tree --- .../scala/collection/immutable/HashSet.scala | 39 ++++++++++++++++------ 1 file changed, 29 insertions(+), 10 deletions(-) (limited to 'src') diff --git a/src/library/scala/collection/immutable/HashSet.scala b/src/library/scala/collection/immutable/HashSet.scala index c60fdc3bf1..43e776d5ae 100644 --- a/src/library/scala/collection/immutable/HashSet.scala +++ b/src/library/scala/collection/immutable/HashSet.scala @@ -102,6 +102,30 @@ object HashSet extends ImmutableSetFactory[HashSet] { private object EmptyHashSet extends HashSet[Any] { } + // utility method to create a HashTrieSet from two leaf HashSets (HashSet1 or HashSetCollision1) with non-colliding hash code) + private def makeHashTrieSet[A](hash0:Int, elem0:HashSet[A], hash1:Int, elem1:HashSet[A], level:Int) : HashTrieSet[A] = { + val index0 = (hash0 >>> level) & 0x1f + val index1 = (hash1 >>> level) & 0x1f + if(index0 != index1) { + val bitmap = (1 << index0) | (1 << index1) + val elems = new Array[HashSet[A]](2) + if(index0 < index1) { + elems(0) = elem0 + elems(1) = elem1 + } else { + elems(0) = elem1 + elems(1) = elem0 + } + new HashTrieSet[A](bitmap, elems, elem0.size + elem1.size) + } else { + val elems = new Array[HashSet[A]](1) + val bitmap = (1 << index0) + val child = makeHashTrieSet(hash0, elem0, hash1, elem1, level + 5) + elems(0) = child + new HashTrieSet[A](bitmap, elems, child.size) + } + } + // TODO: add HashSet2, HashSet3, ... class HashSet1[A](private[HashSet] val key: A, private[HashSet] val hash: Int) extends HashSet[A] { @@ -114,9 +138,7 @@ object HashSet extends ImmutableSetFactory[HashSet] { if (hash == this.hash && key == this.key) this else { if (hash != this.hash) { - //new HashTrieSet[A](level+5, this, new HashSet1(key, hash)) - val m = new HashTrieSet[A](0,new Array[HashSet[A]](0),0) // TODO: could save array alloc - m.updated0(this.key, this.hash, level).updated0(key, hash, level) + makeHashTrieSet(this.hash, this, hash, new HashSet1(key, hash), level) } else { // 32-bit hash collision (rare, but not impossible) new HashSetCollision1(hash, ListSet.empty + this.key + key) @@ -140,13 +162,7 @@ object HashSet extends ImmutableSetFactory[HashSet] { override def updated0(key: A, hash: Int, level: Int): HashSet[A] = if (hash == this.hash) new HashSetCollision1(hash, ks + key) - else { - var m: HashSet[A] = new HashTrieSet[A](0,new Array[HashSet[A]](0),0) - // might be able to save some ops here, but it doesn't seem to be worth it - for (k <- ks) - m = m.updated0(k, this.hash, level) - m.updated0(key, hash, level) - } + else makeHashTrieSet(this.hash, this, hash, new HashSet1(key, hash), level) override def removed0(key: A, hash: Int, level: Int): HashSet[A] = if (hash == this.hash) { @@ -179,6 +195,9 @@ object HashSet extends ImmutableSetFactory[HashSet] { class HashTrieSet[A](private val bitmap: Int, private[collection] val elems: Array[HashSet[A]], private val size0: Int) extends HashSet[A] { + assert(Integer.bitCount(bitmap) == elems.length) + // assertion has to remain disabled until SI-6197 is solved + // assert(elems.length > 1 || (elems.length == 1 && elems(0).isInstanceOf[HashTrieSet[_]])) override def size = size0 -- cgit v1.2.3 From 0308ae88026a4a8d427d1a9156c31c0ff8dd2561 Mon Sep 17 00:00:00 2001 From: Aleksandar Prokopec Date: Wed, 15 Aug 2012 15:50:03 +0200 Subject: Fixes SI-6150. Removes the `VectorReusableCBF` and pattern matching on it in optimized `Vector` methods. Instead, we now have a new `ReusableCBF` instance in `IndexedSeq` and check for equality when trying to optimize `:+`, `+:` and `updated`. This overridden `ReusableCBF` is used by `IndexedSeq`, `immutable.IndexedSeq` and `immutable.Vector`. The net effect is that calling `:+` and similar methods on a `Vector` instance with a `CBF` that came from `IndexedSeq` or somewhere lower in the hierarchy will always create a `Vector` using the optimized method. --- src/library/scala/collection/IndexedSeq.scala | 4 ++- .../collection/generic/GenTraversableFactory.scala | 2 +- .../scala/collection/immutable/IndexedSeq.scala | 2 +- .../scala/collection/immutable/Vector.scala | 29 +++++++----------- test/files/run/t6150.scala | 34 ++++++++++++++++++++++ 5 files changed, 49 insertions(+), 22 deletions(-) create mode 100644 test/files/run/t6150.scala (limited to 'src') diff --git a/src/library/scala/collection/IndexedSeq.scala b/src/library/scala/collection/IndexedSeq.scala index 56dd0bffff..4d1758fdd3 100644 --- a/src/library/scala/collection/IndexedSeq.scala +++ b/src/library/scala/collection/IndexedSeq.scala @@ -29,7 +29,9 @@ trait IndexedSeq[+A] extends Seq[A] * @define Coll `IndexedSeq` */ object IndexedSeq extends SeqFactory[IndexedSeq] { - implicit def canBuildFrom[A]: CanBuildFrom[Coll, A, IndexedSeq[A]] = ReusableCBF.asInstanceOf[GenericCanBuildFrom[A]] + override lazy val ReusableCBF: GenericCanBuildFrom[Nothing] = new ReusableCBF + + implicit def canBuildFrom[A]: CanBuildFrom[Coll, A, IndexedSeq[A]] = IndexedSeq.ReusableCBF.asInstanceOf[GenericCanBuildFrom[A]] def newBuilder[A]: Builder[A, IndexedSeq[A]] = immutable.IndexedSeq.newBuilder[A] } diff --git a/src/library/scala/collection/generic/GenTraversableFactory.scala b/src/library/scala/collection/generic/GenTraversableFactory.scala index 2aaf93de05..3d5306621a 100644 --- a/src/library/scala/collection/generic/GenTraversableFactory.scala +++ b/src/library/scala/collection/generic/GenTraversableFactory.scala @@ -40,7 +40,7 @@ abstract class GenTraversableFactory[CC[X] <: GenTraversable[X] with GenericTrav // A default implementation of GenericCanBuildFrom which can be cast // to whatever is desired. - private class ReusableCBF extends GenericCanBuildFrom[Nothing] { + private[collection] class ReusableCBF extends GenericCanBuildFrom[Nothing] { override def apply() = newBuilder[Nothing] } // Working around SI-4789 by using a lazy val instead of an object. diff --git a/src/library/scala/collection/immutable/IndexedSeq.scala b/src/library/scala/collection/immutable/IndexedSeq.scala index b37edc4254..3abac932e6 100644 --- a/src/library/scala/collection/immutable/IndexedSeq.scala +++ b/src/library/scala/collection/immutable/IndexedSeq.scala @@ -36,6 +36,6 @@ object IndexedSeq extends SeqFactory[IndexedSeq] { def length = buf.length def apply(idx: Int) = buf.apply(idx) } - implicit def canBuildFrom[A]: CanBuildFrom[Coll, A, IndexedSeq[A]] = ReusableCBF.asInstanceOf[GenericCanBuildFrom[A]] + implicit def canBuildFrom[A]: CanBuildFrom[Coll, A, IndexedSeq[A]] = IndexedSeq.ReusableCBF.asInstanceOf[GenericCanBuildFrom[A]] def newBuilder[A]: Builder[A, IndexedSeq[A]] = Vector.newBuilder[A] } diff --git a/src/library/scala/collection/immutable/Vector.scala b/src/library/scala/collection/immutable/Vector.scala index 4dfe147a65..d0098e8420 100644 --- a/src/library/scala/collection/immutable/Vector.scala +++ b/src/library/scala/collection/immutable/Vector.scala @@ -18,14 +18,8 @@ import scala.collection.parallel.immutable.ParVector /** Companion object to the Vector class */ object Vector extends SeqFactory[Vector] { - private[collection] class VectorReusableCBF extends GenericCanBuildFrom[Nothing] { - override def apply() = newBuilder[Nothing] - } - - private val VectorReusableCBF: GenericCanBuildFrom[Nothing] = new VectorReusableCBF - @inline implicit def canBuildFrom[A]: CanBuildFrom[Coll, A, Vector[A]] = - VectorReusableCBF.asInstanceOf[CanBuildFrom[Coll, A, Vector[A]]] + IndexedSeq.ReusableCBF.asInstanceOf[CanBuildFrom[Coll, A, Vector[A]]] def newBuilder[A]: Builder[A, Vector[A]] = new VectorBuilder[A] private[immutable] val NIL = new Vector[Nothing](0, 0, 0) @inline override def empty[A]: Vector[A] = NIL @@ -146,20 +140,17 @@ override def companion: GenericCompanion[Vector] = Vector // SeqLike api - @inline override def updated[B >: A, That](index: Int, elem: B)(implicit bf: CanBuildFrom[Vector[A], B, That]): That = bf match { - case _: Vector.VectorReusableCBF => updateAt(index, elem).asInstanceOf[That] // just ignore bf - case _ => super.updated(index, elem)(bf) - } + @inline override def updated[B >: A, That](index: Int, elem: B)(implicit bf: CanBuildFrom[Vector[A], B, That]): That = + if (bf eq IndexedSeq.ReusableCBF) updateAt(index, elem).asInstanceOf[That] // just ignore bf + else super.updated(index, elem)(bf) - @inline override def +:[B >: A, That](elem: B)(implicit bf: CanBuildFrom[Vector[A], B, That]): That = bf match { - case _: Vector.VectorReusableCBF => appendFront(elem).asInstanceOf[That] // just ignore bf - case _ => super.+:(elem)(bf) - } + @inline override def +:[B >: A, That](elem: B)(implicit bf: CanBuildFrom[Vector[A], B, That]): That = + if (bf eq IndexedSeq.ReusableCBF) appendFront(elem).asInstanceOf[That] // just ignore bf + else super.+:(elem)(bf) - @inline override def :+[B >: A, That](elem: B)(implicit bf: CanBuildFrom[Vector[A], B, That]): That = bf match { - case _: Vector.VectorReusableCBF => appendBack(elem).asInstanceOf[That] // just ignore bf - case _ => super.:+(elem)(bf) - } + @inline override def :+[B >: A, That](elem: B)(implicit bf: CanBuildFrom[Vector[A], B, That]): That = + if (bf eq IndexedSeq.ReusableCBF) appendBack(elem).asInstanceOf[That] // just ignore bf + else super.:+(elem)(bf) override def take(n: Int): Vector[A] = { if (n <= 0) diff --git a/test/files/run/t6150.scala b/test/files/run/t6150.scala new file mode 100644 index 0000000000..1b3de0c50a --- /dev/null +++ b/test/files/run/t6150.scala @@ -0,0 +1,34 @@ + + + +import collection._ + + + +object Test extends App { + + val cbf1 = implicitly[generic.CanBuildFrom[Vector[Int], Int, IndexedSeq[Int]]] + val cbf2 = implicitly[generic.CanBuildFrom[immutable.IndexedSeq[Int], Int, IndexedSeq[Int]]] + val cbf3 = implicitly[generic.CanBuildFrom[IndexedSeq[Int], Int, IndexedSeq[Int]]] + + def check[C](v: C) = { + assert(v == Vector(1, 2, 3, 4)) + assert(v.isInstanceOf[Vector[_]]) + } + + val v = immutable.Vector(1, 2, 3) + + check(v.:+(4)(cbf1)) + check(v.:+(4)(cbf2)) + check(v.:+(4)(cbf3)) + + val iiv: immutable.IndexedSeq[Int] = immutable.Vector(1, 2, 3) + + check(iiv.:+(4)(cbf2)) + check(iiv.:+(4)(cbf3)) + + val iv: IndexedSeq[Int] = immutable.Vector(1, 2, 3) + + check(iv.:+(4)(cbf3)) + +} -- cgit v1.2.3 From 0fc0038e33b629efcaa0aa314b0e69419c116777 Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Thu, 16 Aug 2012 16:46:15 -0700 Subject: Modified SI-6150 fix to use intended ReusableCBF. I also realized it didn't have to be lazy, and made it so. --- src/library/scala/collection/IndexedSeq.scala | 9 ++-- .../collection/generic/GenTraversableFactory.scala | 4 +- .../scala/collection/immutable/IndexedSeq.scala | 3 +- .../scala/collection/immutable/Vector.scala | 6 +-- test/files/run/t6150.scala | 48 +++++++++++----------- 5 files changed, 37 insertions(+), 33 deletions(-) (limited to 'src') diff --git a/src/library/scala/collection/IndexedSeq.scala b/src/library/scala/collection/IndexedSeq.scala index 4d1758fdd3..39be1f7a9e 100644 --- a/src/library/scala/collection/IndexedSeq.scala +++ b/src/library/scala/collection/IndexedSeq.scala @@ -29,9 +29,12 @@ trait IndexedSeq[+A] extends Seq[A] * @define Coll `IndexedSeq` */ object IndexedSeq extends SeqFactory[IndexedSeq] { - override lazy val ReusableCBF: GenericCanBuildFrom[Nothing] = new ReusableCBF - - implicit def canBuildFrom[A]: CanBuildFrom[Coll, A, IndexedSeq[A]] = IndexedSeq.ReusableCBF.asInstanceOf[GenericCanBuildFrom[A]] + override val ReusableCBF: GenericCanBuildFrom[Nothing] = new GenericCanBuildFrom[Nothing] { + override def apply() = newBuilder[Nothing] + } + implicit def canBuildFrom[A]: CanBuildFrom[Coll, A, IndexedSeq[A]] = + ReusableCBF.asInstanceOf[GenericCanBuildFrom[A]] + def newBuilder[A]: Builder[A, IndexedSeq[A]] = immutable.IndexedSeq.newBuilder[A] } diff --git a/src/library/scala/collection/generic/GenTraversableFactory.scala b/src/library/scala/collection/generic/GenTraversableFactory.scala index 3d5306621a..076f555506 100644 --- a/src/library/scala/collection/generic/GenTraversableFactory.scala +++ b/src/library/scala/collection/generic/GenTraversableFactory.scala @@ -40,11 +40,9 @@ abstract class GenTraversableFactory[CC[X] <: GenTraversable[X] with GenericTrav // A default implementation of GenericCanBuildFrom which can be cast // to whatever is desired. - private[collection] class ReusableCBF extends GenericCanBuildFrom[Nothing] { + val ReusableCBF: GenericCanBuildFrom[Nothing] = new GenericCanBuildFrom[Nothing] { override def apply() = newBuilder[Nothing] } - // Working around SI-4789 by using a lazy val instead of an object. - lazy val ReusableCBF: GenericCanBuildFrom[Nothing] = new ReusableCBF /** A generic implementation of the `CanBuildFrom` trait, which forwards * all calls to `apply(from)` to the `genericBuilder` method of diff --git a/src/library/scala/collection/immutable/IndexedSeq.scala b/src/library/scala/collection/immutable/IndexedSeq.scala index 3abac932e6..68f642b558 100644 --- a/src/library/scala/collection/immutable/IndexedSeq.scala +++ b/src/library/scala/collection/immutable/IndexedSeq.scala @@ -36,6 +36,7 @@ object IndexedSeq extends SeqFactory[IndexedSeq] { def length = buf.length def apply(idx: Int) = buf.apply(idx) } - implicit def canBuildFrom[A]: CanBuildFrom[Coll, A, IndexedSeq[A]] = IndexedSeq.ReusableCBF.asInstanceOf[GenericCanBuildFrom[A]] + implicit def canBuildFrom[A]: CanBuildFrom[Coll, A, IndexedSeq[A]] = + scala.collection.IndexedSeq.ReusableCBF.asInstanceOf[GenericCanBuildFrom[A]] def newBuilder[A]: Builder[A, IndexedSeq[A]] = Vector.newBuilder[A] } diff --git a/src/library/scala/collection/immutable/Vector.scala b/src/library/scala/collection/immutable/Vector.scala index d0098e8420..f912285143 100644 --- a/src/library/scala/collection/immutable/Vector.scala +++ b/src/library/scala/collection/immutable/Vector.scala @@ -19,7 +19,7 @@ import scala.collection.parallel.immutable.ParVector */ object Vector extends SeqFactory[Vector] { @inline implicit def canBuildFrom[A]: CanBuildFrom[Coll, A, Vector[A]] = - IndexedSeq.ReusableCBF.asInstanceOf[CanBuildFrom[Coll, A, Vector[A]]] + scala.collection.IndexedSeq.ReusableCBF.asInstanceOf[GenericCanBuildFrom[A]] def newBuilder[A]: Builder[A, Vector[A]] = new VectorBuilder[A] private[immutable] val NIL = new Vector[Nothing](0, 0, 0) @inline override def empty[A]: Vector[A] = NIL @@ -144,11 +144,11 @@ override def companion: GenericCompanion[Vector] = Vector if (bf eq IndexedSeq.ReusableCBF) updateAt(index, elem).asInstanceOf[That] // just ignore bf else super.updated(index, elem)(bf) - @inline override def +:[B >: A, That](elem: B)(implicit bf: CanBuildFrom[Vector[A], B, That]): That = + @inline override def +:[B >: A, That](elem: B)(implicit bf: CanBuildFrom[Vector[A], B, That]): That = if (bf eq IndexedSeq.ReusableCBF) appendFront(elem).asInstanceOf[That] // just ignore bf else super.+:(elem)(bf) - @inline override def :+[B >: A, That](elem: B)(implicit bf: CanBuildFrom[Vector[A], B, That]): That = + @inline override def :+[B >: A, That](elem: B)(implicit bf: CanBuildFrom[Vector[A], B, That]): That = if (bf eq IndexedSeq.ReusableCBF) appendBack(elem).asInstanceOf[That] // just ignore bf else super.:+(elem)(bf) diff --git a/test/files/run/t6150.scala b/test/files/run/t6150.scala index 1b3de0c50a..f3e83e1549 100644 --- a/test/files/run/t6150.scala +++ b/test/files/run/t6150.scala @@ -1,34 +1,36 @@ +object Test { + import collection.{ immutable, mutable, generic } + def TheOneTrueCBF = collection.IndexedSeq.ReusableCBF + val cbf1 = implicitly[generic.CanBuildFrom[immutable.Vector[Int], Int, collection.IndexedSeq[Int]]] + val cbf2 = implicitly[generic.CanBuildFrom[immutable.IndexedSeq[Int], Int, collection.IndexedSeq[Int]]] + val cbf3 = implicitly[generic.CanBuildFrom[collection.IndexedSeq[Int], Int, collection.IndexedSeq[Int]]] + val cbf4 = implicitly[generic.CanBuildFrom[immutable.Vector[Int], Int, immutable.IndexedSeq[Int]]] + val cbf5 = implicitly[generic.CanBuildFrom[immutable.Vector[Int], Int, immutable.Vector[Int]]] + val cbf6 = implicitly[generic.CanBuildFrom[immutable.IndexedSeq[Int], Int, immutable.IndexedSeq[Int]]] -import collection._ - - - -object Test extends App { - - val cbf1 = implicitly[generic.CanBuildFrom[Vector[Int], Int, IndexedSeq[Int]]] - val cbf2 = implicitly[generic.CanBuildFrom[immutable.IndexedSeq[Int], Int, IndexedSeq[Int]]] - val cbf3 = implicitly[generic.CanBuildFrom[IndexedSeq[Int], Int, IndexedSeq[Int]]] - def check[C](v: C) = { assert(v == Vector(1, 2, 3, 4)) assert(v.isInstanceOf[Vector[_]]) } - + def checkRealMccoy(x: AnyRef) = { + assert(x eq TheOneTrueCBF, cbf1) + } + val v = immutable.Vector(1, 2, 3) - - check(v.:+(4)(cbf1)) - check(v.:+(4)(cbf2)) - check(v.:+(4)(cbf3)) - val iiv: immutable.IndexedSeq[Int] = immutable.Vector(1, 2, 3) - - check(iiv.:+(4)(cbf2)) - check(iiv.:+(4)(cbf3)) - val iv: IndexedSeq[Int] = immutable.Vector(1, 2, 3) - - check(iv.:+(4)(cbf3)) - + + def main(args: Array[String]): Unit = { + List(cbf1, cbf2, cbf3, cbf4, cbf5, cbf6) foreach checkRealMccoy + check(v.:+(4)(cbf1)) + check(v.:+(4)(cbf2)) + check(v.:+(4)(cbf3)) + + check(iiv.:+(4)(cbf2)) + check(iiv.:+(4)(cbf3)) + + check(iv.:+(4)(cbf3)) + } } -- cgit v1.2.3 From 823239f347dd516214a64d755f0d09e9e0321d9c Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Fri, 17 Aug 2012 17:58:20 -0700 Subject: Modified SI-6150 fix again. Have to keep a sharp eye on those ReusableCBFs. Now all the indexed sequences should be using the same instance. --- src/library/scala/collection/IndexedSeq.scala | 10 ++++------ .../collection/generic/GenTraversableFactory.scala | 6 ++---- .../collection/generic/IndexedSeqFactory.scala | 21 +++++++++++++++++++++ .../scala/collection/immutable/IndexedSeq.scala | 7 ++++--- src/library/scala/collection/immutable/Vector.scala | 6 +++--- 5 files changed, 34 insertions(+), 16 deletions(-) create mode 100644 src/library/scala/collection/generic/IndexedSeqFactory.scala (limited to 'src') diff --git a/src/library/scala/collection/IndexedSeq.scala b/src/library/scala/collection/IndexedSeq.scala index 39be1f7a9e..8918fbb6c8 100644 --- a/src/library/scala/collection/IndexedSeq.scala +++ b/src/library/scala/collection/IndexedSeq.scala @@ -6,8 +6,6 @@ ** |/ ** \* */ - - package scala.collection import generic._ @@ -28,13 +26,13 @@ trait IndexedSeq[+A] extends Seq[A] * @define coll indexed sequence * @define Coll `IndexedSeq` */ -object IndexedSeq extends SeqFactory[IndexedSeq] { +object IndexedSeq extends IndexedSeqFactory[IndexedSeq] { + // A single CBF which can be checked against to identify + // an indexed collection type. override val ReusableCBF: GenericCanBuildFrom[Nothing] = new GenericCanBuildFrom[Nothing] { override def apply() = newBuilder[Nothing] } + def newBuilder[A]: Builder[A, IndexedSeq[A]] = immutable.IndexedSeq.newBuilder[A] implicit def canBuildFrom[A]: CanBuildFrom[Coll, A, IndexedSeq[A]] = ReusableCBF.asInstanceOf[GenericCanBuildFrom[A]] - - def newBuilder[A]: Builder[A, IndexedSeq[A]] = immutable.IndexedSeq.newBuilder[A] } - diff --git a/src/library/scala/collection/generic/GenTraversableFactory.scala b/src/library/scala/collection/generic/GenTraversableFactory.scala index 076f555506..6614dbdc62 100644 --- a/src/library/scala/collection/generic/GenTraversableFactory.scala +++ b/src/library/scala/collection/generic/GenTraversableFactory.scala @@ -38,11 +38,10 @@ import language.higherKinds abstract class GenTraversableFactory[CC[X] <: GenTraversable[X] with GenericTraversableTemplate[X, CC]] extends GenericCompanion[CC] { - // A default implementation of GenericCanBuildFrom which can be cast - // to whatever is desired. - val ReusableCBF: GenericCanBuildFrom[Nothing] = new GenericCanBuildFrom[Nothing] { + private[this] val ReusableCBFInstance: GenericCanBuildFrom[Nothing] = new GenericCanBuildFrom[Nothing] { override def apply() = newBuilder[Nothing] } + def ReusableCBF: GenericCanBuildFrom[Nothing] = ReusableCBFInstance /** A generic implementation of the `CanBuildFrom` trait, which forwards * all calls to `apply(from)` to the `genericBuilder` method of @@ -250,4 +249,3 @@ abstract class GenTraversableFactory[CC[X] <: GenTraversable[X] with GenericTrav b.result } } - diff --git a/src/library/scala/collection/generic/IndexedSeqFactory.scala b/src/library/scala/collection/generic/IndexedSeqFactory.scala new file mode 100644 index 0000000000..e5162c640b --- /dev/null +++ b/src/library/scala/collection/generic/IndexedSeqFactory.scala @@ -0,0 +1,21 @@ +/* __ *\ +** ________ ___ / / ___ Scala API ** +** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL ** +** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** +** /____/\___/_/ |_/____/_/ | | ** +** |/ ** +\* */ + +package scala.collection +package generic + +import language.higherKinds + +/** A template for companion objects of IndexedSeq and subclasses thereof. + * + * @since 2.8 + */ +abstract class IndexedSeqFactory[CC[X] <: IndexedSeq[X] with GenericTraversableTemplate[X, CC]] extends SeqFactory[CC] { + override def ReusableCBF: GenericCanBuildFrom[Nothing] = + scala.collection.IndexedSeq.ReusableCBF.asInstanceOf[GenericCanBuildFrom[Nothing]] +} diff --git a/src/library/scala/collection/immutable/IndexedSeq.scala b/src/library/scala/collection/immutable/IndexedSeq.scala index 68f642b558..a5d5728191 100644 --- a/src/library/scala/collection/immutable/IndexedSeq.scala +++ b/src/library/scala/collection/immutable/IndexedSeq.scala @@ -31,12 +31,13 @@ trait IndexedSeq[+A] extends Seq[A] * @define coll indexed sequence * @define Coll `IndexedSeq` */ -object IndexedSeq extends SeqFactory[IndexedSeq] { +object IndexedSeq extends IndexedSeqFactory[IndexedSeq] { class Impl[A](buf: ArrayBuffer[A]) extends AbstractSeq[A] with IndexedSeq[A] with Serializable { def length = buf.length def apply(idx: Int) = buf.apply(idx) } - implicit def canBuildFrom[A]: CanBuildFrom[Coll, A, IndexedSeq[A]] = - scala.collection.IndexedSeq.ReusableCBF.asInstanceOf[GenericCanBuildFrom[A]] def newBuilder[A]: Builder[A, IndexedSeq[A]] = Vector.newBuilder[A] + + implicit def canBuildFrom[A]: CanBuildFrom[Coll, A, IndexedSeq[A]] = + ReusableCBF.asInstanceOf[GenericCanBuildFrom[A]] } diff --git a/src/library/scala/collection/immutable/Vector.scala b/src/library/scala/collection/immutable/Vector.scala index f912285143..dc65253a55 100644 --- a/src/library/scala/collection/immutable/Vector.scala +++ b/src/library/scala/collection/immutable/Vector.scala @@ -17,10 +17,10 @@ import scala.collection.parallel.immutable.ParVector /** Companion object to the Vector class */ -object Vector extends SeqFactory[Vector] { - @inline implicit def canBuildFrom[A]: CanBuildFrom[Coll, A, Vector[A]] = - scala.collection.IndexedSeq.ReusableCBF.asInstanceOf[GenericCanBuildFrom[A]] +object Vector extends IndexedSeqFactory[Vector] { def newBuilder[A]: Builder[A, Vector[A]] = new VectorBuilder[A] + implicit def canBuildFrom[A]: CanBuildFrom[Coll, A, Vector[A]] = + ReusableCBF.asInstanceOf[GenericCanBuildFrom[A]] private[immutable] val NIL = new Vector[Nothing](0, 0, 0) @inline override def empty[A]: Vector[A] = NIL } -- cgit v1.2.3 From 9733f56c876c6097383bd4103bd0a7be4373742e Mon Sep 17 00:00:00 2001 From: Aleksandar Prokopec Date: Mon, 20 Aug 2012 22:10:57 +0200 Subject: Fixes SI-4996. This bug is a result of a subtle interplay of the stackable modifications mechanism and specialization. Prior to this commit, using `abstract override` with specialization was broken in the sense that specialization did not create a specialized version of the super accessor. Observe the following code: trait A[@specialized(Int) T] { def foo(t: T) } trait B extends A[Int] { def foo(t: Int) { println("B.foo") } } trait M extends B { abstract override def foo(t: Int) { super.foo(t) println("M.foo") } } object C extends B with M During the `superaccessors` phase, the following stub is generated in `M`: private def super$foo(t: Int) Note that `foo` is a method that will later need to be specialized. During the `specialize` phase, `A.foo` gets a *special overload* `A.foo$mcI$sp`, which is a bridge to `A.foo`. `B` extends `A$mcI$sp` (previously `A[Int]`), so `B.foo` gets a *special override* `B.foo$mcI$sp`, which contains the implementation. `B.foo` is overridden to become a bridge to `B.foo$mcI$sp`. `M` extends `B`, so `M.foo` gets a special override `M.foo$mcI$sp`, and `M.foo` itself is turned into a bridge to `M.foo$mcI$sp`, just as was the case with `B`. This is where the first problem arises - `M.foo$mcI$sp` does not get an `ABSOVERRIDE` flag after being created. This commit fixes that. As mentioned earlier, `M` has a super accessor `super$foo`. A super accessor (naturally) does not override anything, thus, according to the standing specialization criteria it does not need a special override (see `needsSpecialOverride`). Its type does not contain any specialized type parameters, thus, it is not eligible to obtain a special overload. So, our `super$foo` stays the way it is, subsequently being renamed to `M$$super$foo`. Later, during `mixin`, it is implemented in `C` as a forwarder to `B$class.foo` (Not `B.foo`, because the implementation classes are generated in `mixin`). Lets see what we have now (I omit the `$this` parameter for methods in implementation classes `B$class` and `M$class`): class B$class def foo(t: Int) = ------------\ <-\ def foo$mcI$sp(t: Int) = | | | | trait M$class | | def foo(t: Int) = -------\ | | | | | def foo$mcI$sp(t: Int) = <-/<-\ | | { | | | /---- M$$super$foo(t) | | | | ... | | | | } | | | | | | | | object C | | | | def foo$mcI$sp(t: Int) = -----/ <-/ | \-> def M$$super$foo(t: Int) = | { | ------------------------------------/ Now, call `C.foo`. This call is rewritten to `C.foo$mcI$sp`, which is a bridge to `M$class.foo$mcI$sp`. Follow the lines on the diagram to enter an endless loop. Boom! Stack overflow. The culprit is the super accessor `C.M$$super$foo`, which should have forwarded to the special override `B$class.foo$mcI$sp`, instead of to `B$class.foo`. So, we have 2 options: 1) Make `C.M$$super$foo` forward to the proper special override, where the implementation actually is. 2) During specialization, create a specialized version of `M$$super$foo` called `M$$super$foo$mcI$sp`, and set its alias to the special overload `foo$mcI$sp`. Later, during `mixin`, this super accessor will be appropriately resolved in concrete classes. Option 1) involves cluttering `mixin` with specialization logic. Furthermore, the specialization already does create specialized versions of super accessors when the super accessor type contains specialized type parameters (in other words, it generates a special overload). Thus, 2) seems an ideal solution. We cannot deduct if a super accessor should get a special override directly, but we can see if its alias needs a special override. If it does, then we generate a special override for the super accessor. This commit introduces the following changes: 1) The `ABSOVERRIDE` flag is now retained, as mentioned earlier. 2) A super accessor gets a special override if its alias needs a special override. 3) The super calls in the methods bodies are rewritten to their specialized variants if they exist. 4) Newly generated special overrides and special overloads are now entered into the declaration list of the owner during specialization. Strangely, this was not done before, but it is necessary for `mixin` to detect the generated special overload/override which is an alias for the super accessor. --- .../tools/nsc/transform/SpecializeTypes.scala | 41 ++++++++++++++++--- test/files/run/t4996.check | 4 ++ test/files/run/t4996.scala | 47 ++++++++++++++++++++++ 3 files changed, 86 insertions(+), 6 deletions(-) create mode 100644 test/files/run/t4996.check create mode 100644 test/files/run/t4996.scala (limited to 'src') diff --git a/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala b/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala index 3366244bc6..d2c3744a1c 100644 --- a/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala +++ b/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala @@ -218,6 +218,11 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers { def target = t } + /** Symbol is a special overload of the super accessor. */ + case class SpecialSuperAccessor(t: Symbol) extends SpecializedInfo { + def target = t + } + /** Symbol is a specialized accessor for the `target` field. */ case class SpecializedAccessor(target: Symbol) extends SpecializedInfo { override def isAccessor = true @@ -865,6 +870,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers { } val specMember = subst(outerEnv)(specializedOverload(owner, sym, spec)) + owner.info.decls.enter(specMember) typeEnv(specMember) = typeEnv(sym) ++ outerEnv ++ spec wasSpecializedForTypeVars(specMember) ++= spec collect { case (s, tp) if s.tpe == tp => s } @@ -894,10 +900,11 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers { } /** Return the specialized overload of `m`, in the given environment. */ - private def specializedOverload(owner: Symbol, sym: Symbol, env: TypeEnv): Symbol = { + private def specializedOverload(owner: Symbol, sym: Symbol, env: TypeEnv, nameSymbol: Symbol = NoSymbol): Symbol = { val newFlags = (sym.flags | SPECIALIZED) & ~(DEFERRED | CASEACCESSOR) // this method properly duplicates the symbol's info - ( sym.cloneSymbol(owner, newFlags, specializedName(sym, env)) + val specname = specializedName(nameSymbol orElse sym, env) + ( sym.cloneSymbol(owner, newFlags, specname) modifyInfo (info => subst(env, info.asSeenFrom(owner.thisType, sym.owner))) ) } @@ -957,14 +964,32 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers { } (clazz.info.decls flatMap { overriding => needsSpecialOverride(overriding) match { - case (NoSymbol, _) => None + case (NoSymbol, _) => + if (overriding.isSuperAccessor) { + val alias = overriding.alias + debuglog("checking special overload for super accessor: %s, alias for %s".format(overriding.fullName, alias.fullName)) + needsSpecialOverride(alias) match { + case nope @ (NoSymbol, _) => None + case (overridden, env) => + val om = specializedOverload(clazz, overriding, env, overridden) + om.setName(nme.superName(om.name)) + om.asInstanceOf[TermSymbol].setAlias(info(alias).target) + om.owner.info.decls.enter(om) + info(om) = SpecialSuperAccessor(om) + om.makeNotPrivate(om.owner) + overloads(overriding) ::= Overload(om, env) + Some(om) + } + } else None case (overridden, env) => val om = specializedOverload(clazz, overridden, env) + clazz.info.decls.enter(om) debuglog("specialized overload %s for %s in %s: %s".format(om, overriding.name.decode, pp(env), om.info)) + if (overriding.isAbstractOverride) om.setFlag(ABSOVERRIDE) typeEnv(om) = env addConcreteSpecMethod(overriding) info(om) = ( - if (overriding.isDeferred) { // abstract override + if (overriding.isDeferred) { // abstract override debuglog("abstract override " + overriding.fullName + " with specialized " + om.fullName) Forward(overriding) } @@ -1287,7 +1312,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers { if (sym.isPrivate) debuglog( "seeing private member %s, currentClass: %s, owner: %s, isAccessible: %b, isLocalName: %b".format( sym, currentClass, sym.owner.enclClass, isAccessible(sym), nme.isLocalName(sym.name)) - ) + ) if (shouldMakePublic(sym) && !isAccessible(sym)) { debuglog("changing private flag of " + sym) sym.makeNotPrivate(sym.owner) @@ -1548,7 +1573,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers { case SpecialOverride(target) => assert(body.isDefinedAt(target), "sym: " + symbol.fullName + " target: " + target.fullName) //debuglog("moving implementation, body of target " + target + ": " + body(target)) - debuglog("%s is param accessor? %b".format(ddef.symbol, ddef.symbol.isParamAccessor)) + log("%s is param accessor? %b".format(ddef.symbol, ddef.symbol.isParamAccessor)) // we have an rhs, specialize it val tree1 = addBody(ddef, target) (new ChangeOwnerTraverser(target, tree1.symbol))(tree1.rhs) @@ -1596,6 +1621,10 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers { case Abstract(targ) => debuglog("abstract: " + targ) localTyper.typed(deriveDefDef(tree)(rhs => rhs)) + + case SpecialSuperAccessor(targ) => + debuglog("special super accessor: " + targ + " for " + tree) + localTyper.typed(deriveDefDef(tree)(rhs => rhs)) } case ValDef(_, _, _, _) if symbol.hasFlag(SPECIALIZED) && !symbol.isParamAccessor => diff --git a/test/files/run/t4996.check b/test/files/run/t4996.check new file mode 100644 index 0000000000..8d45b413c9 --- /dev/null +++ b/test/files/run/t4996.check @@ -0,0 +1,4 @@ +B.foo +M.foo +B.foo +M.foo \ No newline at end of file diff --git a/test/files/run/t4996.scala b/test/files/run/t4996.scala new file mode 100644 index 0000000000..8e7636aaac --- /dev/null +++ b/test/files/run/t4996.scala @@ -0,0 +1,47 @@ + + + + + + +trait A[@specialized(Int) T] { + def foo(t: T) +} + + +trait B extends A[Int] { + def foo(t: Int) { + println("B.foo") + } +} + + +trait M extends B { + abstract override def foo(t: Int) { + super.foo(t) + println("M.foo") + } +} + + +object C extends B with M + + +object D extends B { + override def foo(t: Int) { + super.foo(t) + println("M.foo") + } +} + + +object Test { + + def main(args: Array[String]) { + D.foo(42) // OK, prints B.foo M.foo + C.foo(42) // was StackOverflowError + } + +} + + -- cgit v1.2.3 From 21105654c40ed0c462142bcbb6c8eced77f8b07a Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Fri, 24 Aug 2012 00:54:59 +0200 Subject: SI-3577 Make varianceInType aware of BoundedWildcardType. --- .../scala/tools/nsc/typechecker/Variances.scala | 2 ++ test/files/pos/t3577.scala | 29 ++++++++++++++++++++++ 2 files changed, 31 insertions(+) create mode 100644 test/files/pos/t3577.scala (limited to 'src') diff --git a/src/compiler/scala/tools/nsc/typechecker/Variances.scala b/src/compiler/scala/tools/nsc/typechecker/Variances.scala index b9f2b9abd8..279096bddd 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Variances.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Variances.scala @@ -67,6 +67,8 @@ trait Variances { def varianceInType(tp: Type)(tparam: Symbol): Int = tp match { case ErrorType | WildcardType | NoType | NoPrefix | ThisType(_) | ConstantType(_) => VARIANCES + case BoundedWildcardType(bounds) => + varianceInType(bounds)(tparam) case SingleType(pre, sym) => varianceInType(pre)(tparam) case TypeRef(pre, sym, args) => diff --git a/test/files/pos/t3577.scala b/test/files/pos/t3577.scala new file mode 100644 index 0000000000..80a280f67a --- /dev/null +++ b/test/files/pos/t3577.scala @@ -0,0 +1,29 @@ +case class Check[A](val value: A) + +case class C2(checks: Check[_]*); + +object C { + def m(x : C2): Any = (null: Any) match { + case C2(_, rest @ _*) => { + rest.map(_.value) + } + } +} + +/////////////////// + +object Container { + trait Exp[+T] + abstract class FuncExp[-S, +T] + + sealed abstract class FoundNode[T, Repr] { + def optimize[TupleT, U, That](parentNode: FlatMap[T, Repr, U, That]): Any + def optimize2[TupleT, U, That](parentNode: Any): Any + } + + class FlatMap[T, Repr, U, That] + + val Seq(fn: FoundNode[t, repr]) = Seq[FoundNode[_, _]]() + fn.optimize(null) // was: scala.MatchError: ? (of class BoundedWildcardType) @ Variances#varianceInType + fn.optimize2(null) // was: fatal error: bad type: ?(class scala.reflect.internal.Types$BoundedWildcardType) @ Pickle.putType +} -- cgit v1.2.3 From 2b4e7183fd24113cca5e868456668fd05c848168 Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Fri, 24 Aug 2012 01:05:07 +0200 Subject: Make RefChecks#validateVariance aware of BoundedWildcardType. The only test case that I know for this will be neutered by the imminent fix for SI-6258; so I haven't been able to test this. But trying this manually, you can see that this patch defers the the SI-6258 to the erasure phase. Original: scala.MatchError: ? (of class scala.reflect.internal.Types$BoundedWildcardType) at scala.tools.nsc.typechecker.RefChecks$RefCheckTransformer$$anon$3.scala$tools$nsc$typechecker$RefChecks$RefCheckTransformer$$anon$$validateVariance$1(RefChecks.scala:894) at scala.tools.nsc.typechecker.RefChecks$RefCheckTransformer$$anon$3.validateVariance(RefChecks.scala:965) Modified: java.lang.ClassCastException: scala.reflect.internal.Types$TypeRef$$anon$6 cannot be cast to scala.reflect.internal.Types$TypeBounds at scala.reflect.internal.Types$TypeMap.mapOver(Types.scala:4160) at scala.reflect.internal.transform.Erasure$ErasureMap.apply(Erasure.scala:156) --- src/compiler/scala/tools/nsc/typechecker/RefChecks.scala | 16 +++++++++------- 1 file changed, 9 insertions(+), 7 deletions(-) (limited to 'src') diff --git a/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala b/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala index 91435e2214..a2afce8a49 100644 --- a/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala +++ b/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala @@ -896,13 +896,15 @@ abstract class RefChecks extends InfoTransform with reflect.internal.transform.R * the type occurs itself at variance position given by `variance` */ def validateVariance(tp: Type, variance: Int): Unit = tp match { - case ErrorType => ; - case WildcardType => ; - case NoType => ; - case NoPrefix => ; - case ThisType(_) => ; - case ConstantType(_) => ; - // case DeBruijnIndex(_, _) => ; + case ErrorType => + case WildcardType => + case BoundedWildcardType(bounds) => + validateVariance(bounds, variance) + case NoType => + case NoPrefix => + case ThisType(_) => + case ConstantType(_) => + // case DeBruijnIndex(_, _) => case SingleType(pre, sym) => validateVariance(pre, variance) case TypeRef(pre, sym, args) => -- cgit v1.2.3 From 00e46b3dbcea2b72fd3941b7ffc2efba382871e9 Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Fri, 24 Aug 2012 01:16:47 +0200 Subject: Mention BoundedWildcardType in "a standard type pattern match". --- src/reflect/scala/reflect/internal/Types.scala | 2 ++ 1 file changed, 2 insertions(+) (limited to 'src') diff --git a/src/reflect/scala/reflect/internal/Types.scala b/src/reflect/scala/reflect/internal/Types.scala index 9ca480155e..aa9ab9f572 100644 --- a/src/reflect/scala/reflect/internal/Types.scala +++ b/src/reflect/scala/reflect/internal/Types.scala @@ -22,6 +22,8 @@ import util.ThreeValues._ // internal: error case WildcardType => // internal: unknown + case BoundedWildcardType(bounds) => + // internal: unknown case NoType => case NoPrefix => case ThisType(sym) => -- cgit v1.2.3 From 2527a5a688b14d7936e8bb1d2116aa06ad8170c4 Mon Sep 17 00:00:00 2001 From: Eugene Vigdorchik Date: Fri, 31 Aug 2012 12:47:16 +0400 Subject: Use proper Option methods instead of .get in scaladoc ModelFactory. Review by @ureche. --- .../scala/tools/nsc/doc/model/ModelFactory.scala | 36 +++++++++------------- 1 file changed, 15 insertions(+), 21 deletions(-) (limited to 'src') diff --git a/src/compiler/scala/tools/nsc/doc/model/ModelFactory.scala b/src/compiler/scala/tools/nsc/doc/model/ModelFactory.scala index ed8541f692..fd672613e4 100644 --- a/src/compiler/scala/tools/nsc/doc/model/ModelFactory.scala +++ b/src/compiler/scala/tools/nsc/doc/model/ModelFactory.scala @@ -121,7 +121,7 @@ class ModelFactory(val global: Global, val settings: doc.Settings) { } if (inTpl != null) thisFactory.comment(sym, thisTpl, inTpl) else None } - def group = if (comment.isDefined) comment.get.group.getOrElse("No Group") else "No Group" + def group = comment flatMap (_.group) getOrElse "No Group" override def inTemplate = inTpl override def toRoot: List[MemberImpl] = this :: inTpl.toRoot def inDefinitionTemplates = this match { @@ -143,8 +143,10 @@ class ModelFactory(val global: Global, val settings: doc.Settings) { else None if (sym.isPrivate) PrivateInTemplate(inTpl) else if (sym.isProtected) ProtectedInTemplate(qual getOrElse inTpl) - else if (qual.isDefined) PrivateInTemplate(qual.get) - else Public() + else qual match { + case Some(q) => PrivateInTemplate(q) + case None => Public() + } } } def flags = { @@ -487,8 +489,8 @@ class ModelFactory(val global: Global, val settings: doc.Settings) { def groupSearch[T](extractor: Comment => T, default: T): T = { // query this template - if (comment.isDefined) { - val entity = extractor(comment.get) + for (c <- comment) { + val entity = extractor(c) if (entity != default) return entity } // query linearization @@ -544,8 +546,8 @@ class ModelFactory(val global: Global, val settings: doc.Settings) { // also remove property("package object") from test/scaladoc/scalacheck/HtmlFactoryTest.scala so you don't break // the test suite... val packageObject = if (inPackageObject) ".package" else "" - if (!conversion.isDefined) optimize(inDefinitionTemplates.head.qualifiedName + packageObject + "#" + name) - else optimize(conversion.get.conversionQualifiedName + packageObject + "#" + name) + val qualifiedName = conversion map (_.conversionQualifiedName) getOrElse inDefinitionTemplates.head.qualifiedName + optimize(qualifiedName + packageObject + "#" + name) } def isBridge = sym.isBridge def isUseCase = useCaseOf.isDefined @@ -876,20 +878,12 @@ class ModelFactory(val global: Global, val settings: doc.Settings) { def makeTemplate(aSym: Symbol, inTpl: Option[TemplateImpl]): TemplateImpl = { assert(modelFinished) - def makeNoDocTemplate(aSym: Symbol, inTpl: TemplateImpl): NoDocTemplateImpl = { - val bSym = normalizeTemplate(aSym) - noDocTemplatesCache.get(bSym) match { - case Some(noDocTpl) => noDocTpl - case None => new NoDocTemplateImpl(bSym, inTpl) - } - } + def makeNoDocTemplate(aSym: Symbol, inTpl: TemplateImpl): NoDocTemplateImpl = + noDocTemplatesCache get aSym getOrElse new NoDocTemplateImpl(aSym, inTpl) - findTemplateMaybe(aSym) match { - case Some(dtpl) => - dtpl - case None => - val bSym = normalizeTemplate(aSym) - makeNoDocTemplate(bSym, if (inTpl.isDefined) inTpl.get else makeTemplate(bSym.owner)) + findTemplateMaybe(aSym) getOrElse { + val bSym = normalizeTemplate(aSym) + makeNoDocTemplate(bSym, inTpl getOrElse makeTemplate(bSym.owner)) } } @@ -1016,7 +1010,7 @@ class ModelFactory(val global: Global, val settings: doc.Settings) { } def makeQualifiedName(sym: Symbol, relativeTo: Option[Symbol] = None): String = { - val stop = if (relativeTo.isDefined) relativeTo.get.ownerChain.toSet else Set[Symbol]() + val stop = relativeTo map (_.ownerChain.toSet) getOrElse Set[Symbol]() var sym1 = sym var path = new StringBuilder() // var path = List[Symbol]() -- cgit v1.2.3 From 656a1c47c4fb2648a10296263133aa73e91061c9 Mon Sep 17 00:00:00 2001 From: Som Snytt Date: Thu, 30 Aug 2012 15:17:58 -0700 Subject: On --grep, partest must dir.list to descend into subdirs (fixes SI-6296) In addition, always use UTF-8 when reading test files (and not default), since some tests exercise UTF-8. Also, quote "$JAVACMD" and convert it (and $JAVA_HOME) for cygwin, where $JAVACMD is likely to include a space. --- src/partest/scala/tools/partest/nest/ConsoleRunner.scala | 7 ++++--- src/partest/scala/tools/partest/package.scala | 2 +- test/partest | 8 +++++++- 3 files changed, 12 insertions(+), 5 deletions(-) (limited to 'src') diff --git a/src/partest/scala/tools/partest/nest/ConsoleRunner.scala b/src/partest/scala/tools/partest/nest/ConsoleRunner.scala index ccc756c158..84d9832f97 100644 --- a/src/partest/scala/tools/partest/nest/ConsoleRunner.scala +++ b/src/partest/scala/tools/partest/nest/ConsoleRunner.scala @@ -70,10 +70,11 @@ class ConsoleRunner extends DirectRunner { // true if a test path matches the --grep expression. private def pathMatchesExpr(path: Path, expr: String) = { def pred(p: Path) = file2String(p.toFile) contains expr - def srcs = path.toDirectory.deepList() filter (_.hasExtension("scala", "java")) + def greppable(f: Path) = f.isFile && (f hasExtension ("scala", "java")) + def any(d: Path) = d.toDirectory.deepList() exists (f => greppable(f) && pred(f)) (path.isFile && pred(path)) || - (path.isDirectory && srcs.exists(pred)) || + (path.isDirectory && any(path)) || (pred(path changeExtension "check")) } @@ -121,7 +122,7 @@ class ConsoleRunner extends DirectRunner { val grepOption = parsed get "--grep" val grepPaths = grepOption.toList flatMap { expr => val subjectDirs = testSetKinds map (srcDir / _ toDirectory) - val testPaths = subjectDirs flatMap (_.files filter stdFilter) + val testPaths = subjectDirs flatMap (_.list filter stdFilter) val paths = testPaths filter (p => pathMatchesExpr(p, expr)) if (paths.isEmpty) diff --git a/src/partest/scala/tools/partest/package.scala b/src/partest/scala/tools/partest/package.scala index 08934ef143..09eb4b66f1 100644 --- a/src/partest/scala/tools/partest/package.scala +++ b/src/partest/scala/tools/partest/package.scala @@ -45,7 +45,7 @@ package object partest { def path2String(path: String) = file2String(new JFile(path)) def file2String(f: JFile) = - try SFile(f).slurp() + try SFile(f).slurp(scala.io.Codec.UTF8) catch { case _: FileNotFoundException => "" } def basename(name: String): String = Path(name).stripExtension diff --git a/test/partest b/test/partest index 8352f8a946..ae60c49929 100755 --- a/test/partest +++ b/test/partest @@ -70,6 +70,12 @@ if $cygwin; then else format=windows fi + if [ -n "${JAVA_HOME}" ] ; then + JAVA_HOME=`cygpath --$format "$JAVA_HOME"` + fi + if [ -n "${JAVACMD}" ] ; then + JAVACMD=`cygpath --$format "$JAVACMD"` + fi SCALA_HOME=`cygpath --$format "$SCALA_HOME"` EXT_CLASSPATH=`cygpath --path --$format "$EXT_CLASSPATH"` fi @@ -84,7 +90,7 @@ if [ ! -z "${PARTEST_DEBUG}" ] ; then partestDebugStr="-Dpartest.debug=${PARTEST_DEBUG}" fi -${JAVACMD:=java} \ +"${JAVACMD:=java}" \ $JAVA_OPTS -cp "$EXT_CLASSPATH" \ ${partestDebugStr} \ -Dscala.home="${SCALA_HOME}" \ -- cgit v1.2.3 From 1a32068e08c54249a1a2317ca321e7984edd2b96 Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Fri, 31 Aug 2012 08:41:55 -0700 Subject: Warn when overloaded types are seen after typer. Since it almost invariably means a crash is coming. --- .../scala/tools/nsc/typechecker/Typers.scala | 25 ++++++++++++++++++++++ 1 file changed, 25 insertions(+) (limited to 'src') diff --git a/src/compiler/scala/tools/nsc/typechecker/Typers.scala b/src/compiler/scala/tools/nsc/typechecker/Typers.scala index 1770f2419a..f8826dc27f 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Typers.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Typers.scala @@ -4503,6 +4503,31 @@ trait Typers extends Modes with Adaptations with Tags { * @return ... */ def typedSelect(tree: Tree, qual: Tree, name: Name): Tree = { + val t = typedSelectInternal(tree, qual, name) + if (isPastTyper) t.tpe match { + case OverloadedType(pre, alts) => + if (alts forall (s => (s.owner == ObjectClass) || (s.owner == AnyClass) || isPrimitiveValueClass(s.owner))) () + else { + val msg = + s"""|Select received overloaded type during $phase, but typer is over. + |We are likely doomed to crash in the backend. + |$t has these overloads: + |${alts map (s => " " + s.defStringSeenAs(pre memberType s)) mkString "\n"} + |""".stripMargin + + if (context.reportErrors) + unit.warning(t.pos, msg) + else + Console.err.println(msg) + + if (settings.debug.value) + (new Throwable).printStackTrace + } + case _ => + } + t + } + def typedSelectInternal(tree: Tree, qual: Tree, name: Name): Tree = { def asDynamicCall = dyna.mkInvoke(context.tree, tree, qual, name) map (typed1(_, mode, pt)) val sym = tree.symbol orElse member(qual, name) orElse { -- cgit v1.2.3 From 74842f72a0af485e5def796f777f7003f969d75b Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Fri, 31 Aug 2012 08:45:34 -0700 Subject: Workaround for SI-6301, @specialize crasher. SpecializeTypes is generating symbols with overloaded types which then proceed to crash in CleanUp or GenICode. Until I or someone works out why that is, take a look in case the overload is easily disambiguated by the argument list arity, in which case warn and proceed. --- .../scala/tools/nsc/transform/CleanUp.scala | 36 ++++++++++++++-------- 1 file changed, 24 insertions(+), 12 deletions(-) (limited to 'src') diff --git a/src/compiler/scala/tools/nsc/transform/CleanUp.scala b/src/compiler/scala/tools/nsc/transform/CleanUp.scala index 570704f049..1aa170a07e 100644 --- a/src/compiler/scala/tools/nsc/transform/CleanUp.scala +++ b/src/compiler/scala/tools/nsc/transform/CleanUp.scala @@ -441,19 +441,31 @@ abstract class CleanUp extends Transform with ast.TreeDSL { * is a value type (int et al.) in which case it must cast to the boxed version * because invoke only returns object and erasure made sure the result is * expected to be an AnyRef. */ - val t: Tree = ad.symbol.tpe match { - case MethodType(mparams, resType) => - assert(params.length == mparams.length, mparams) - - typedPos { - val sym = currentOwner.newValue(mkTerm("qual"), ad.pos) setInfo qual0.tpe - qual = REF(sym) + val t: Tree = { + val (mparams, resType) = ad.symbol.tpe match { + case MethodType(mparams, resType) => + assert(params.length == mparams.length, ((params, mparams))) + (mparams, resType) + case tpe @ OverloadedType(pre, alts) => + unit.warning(ad.pos, s"Overloaded type reached the backend! This is a bug in scalac.\n Symbol: ${ad.symbol}\n Overloads: $tpe\n Arguments: " + ad.args.map(_.tpe)) + alts filter (_.paramss.flatten.size == params.length) map (_.tpe) match { + case mt @ MethodType(mparams, resType) :: Nil => + unit.warning(NoPosition, "Only one overload has the right arity, proceeding with overload " + mt) + (mparams, resType) + case _ => + unit.error(ad.pos, "Cannot resolve overload.") + (Nil, NoType) + } + } + typedPos { + val sym = currentOwner.newValue(mkTerm("qual"), ad.pos) setInfo qual0.tpe + qual = REF(sym) - BLOCK( - VAL(sym) === qual0, - callAsReflective(mparams map (_.tpe), resType) - ) - } + BLOCK( + VAL(sym) === qual0, + callAsReflective(mparams map (_.tpe), resType) + ) + } } /* For testing purposes, the dynamic application's condition -- cgit v1.2.3 From f4c45ae204ce3ff3c16b19cab266d0b6515b6e0f Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Fri, 31 Aug 2012 10:49:24 -0700 Subject: Rewrite of GenICode adapt. Started for debuggability, stayed for clarify/performance. --- .../scala/tools/nsc/backend/icode/GenICode.scala | 102 +++++++++------------ 1 file changed, 43 insertions(+), 59 deletions(-) (limited to 'src') diff --git a/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala b/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala index 431802d185..59741e95f8 100644 --- a/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala +++ b/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala @@ -125,7 +125,7 @@ abstract class GenICode extends SubComponent { // in companion object accessors to @static fields, we access the static field directly val hostClass = m.symbol.owner.companionClass val staticfield = hostClass.info.findMember(m.symbol.accessed.name, NoFlags, NoFlags, false) - + if (m.symbol.isGetter) { ctx1.bb.emit(LOAD_FIELD(staticfield, true) setHostClass hostClass, tree.pos) ctx1.bb.closeWith(RETURN(m.returnType)) @@ -659,16 +659,16 @@ abstract class GenICode extends SubComponent { } else { val sym = tree.symbol val local = ctx.method.addLocal(new Local(sym, toTypeKind(sym.info), false)) - + if (rhs == EmptyTree) { debuglog("Uninitialized variable " + tree + " at: " + (tree.pos)); ctx.bb.emit(getZeroOf(local.kind)) } - + var ctx1 = ctx if (rhs != EmptyTree) ctx1 = genLoad(rhs, ctx, local.kind); - + ctx1.bb.emit(STORE_LOCAL(local), tree.pos) ctx1.scope.add(local) ctx1.bb.emit(SCOPE_ENTER(local)) @@ -727,10 +727,10 @@ abstract class GenICode extends SubComponent { ctx1.bb.enterIgnoreMode generatedType = expectedType ctx1 - } + } genLoadReturn - case t @ Try(_, _, _) => + case t @ Try(_, _, _) => genLoadTry(t, ctx, generatedType = _) case Throw(expr) => @@ -750,7 +750,7 @@ abstract class GenICode extends SubComponent { case Object_asInstanceOf => true case _ => abort("Unexpected type application " + fun + "[sym: " + sym.fullName + "]" + " in: " + tree) } - + val Select(obj, _) = fun val l = toTypeKind(obj.tpe) val r = toTypeKind(targs.head.tpe) @@ -794,7 +794,7 @@ abstract class GenICode extends SubComponent { ctx.bb.emit(THIS(ctx.clazz.symbol), tree.pos) val ctx1 = genLoadArguments(args, fun.symbol.info.paramTypes, ctx) - + ctx1.bb.emit(CALL_METHOD(fun.symbol, invokeStyle), tree.pos) generatedType = if (fun.symbol.isConstructor) UNIT @@ -812,7 +812,7 @@ abstract class GenICode extends SubComponent { val ctor = fun.symbol debugassert(ctor.isClassConstructor, "'new' call to non-constructor: " + ctor.name) - + generatedType = toTypeKind(tpt.tpe) debugassert(generatedType.isReferenceType || generatedType.isArrayType, "Non reference type cannot be instantiated: " + generatedType) @@ -858,7 +858,7 @@ abstract class GenICode extends SubComponent { ctx1 } ctx2 - + case _ => abort("Cannot instantiate " + tpt + " of kind: " + generatedType) } @@ -899,7 +899,7 @@ abstract class GenICode extends SubComponent { ctx1 case app @ Apply(fun @ Select(qual, _), args) - if !ctx.method.symbol.isStaticConstructor + if !ctx.method.symbol.isStaticConstructor && fun.symbol.isAccessor && fun.symbol.accessed.hasStaticAnnotation && qual.tpe.typeSymbol.orElse(fun.symbol.owner).companionClass != NoSymbol => // bypass the accessor to the companion object and load the static field directly @@ -938,11 +938,11 @@ abstract class GenICode extends SubComponent { } } genLoadApply5 - + case app @ Apply(fun, args) => def genLoadApply6 = { val sym = fun.symbol - + if (sym.isLabel) { // jump to a label val label = ctx.labels.getOrElse(sym, { // it is a forward jump, scan for labels @@ -979,7 +979,7 @@ abstract class GenICode extends SubComponent { Static(true) else Dynamic - + var ctx1 = if (invokeStyle.hasInstance) { if (forMSIL && !(invokeStyle.isInstanceOf[SuperCall]) && msil_IsValuetypeInstMethod(sym)) @@ -987,20 +987,20 @@ abstract class GenICode extends SubComponent { else genLoadQualifier(fun, ctx) } else ctx - + ctx1 = genLoadArguments(args, sym.info.paramTypes, ctx1) val cm = CALL_METHOD(sym, invokeStyle) - + /** In a couple cases, squirrel away a little extra information in the * CALL_METHOD for use by GenJVM. */ fun match { case Select(qual, _) => val qualSym = findHostClass(qual.tpe, sym) - + if (qualSym == ArrayClass) cm setTargetTypeKind toTypeKind(qual.tpe) else cm setHostClass qualSym - + log( if (qualSym == ArrayClass) "Stored target type kind " + toTypeKind(qual.tpe) + " for " + sym.fullName else s"Set more precise host class for ${sym.fullName} hostClass: $qualSym" @@ -1140,7 +1140,7 @@ abstract class GenICode extends SubComponent { val elmKind = toTypeKind(tpt.tpe) generatedType = ARRAY(elmKind) val elems = _elems.toIndexedSeq - + ctx1.bb.emit(CONSTANT(new Constant(elems.length)), tree.pos) ctx1.bb.emit(CREATE_ARRAY(elmKind, 1)) // inline array literals @@ -1163,7 +1163,7 @@ abstract class GenICode extends SubComponent { val afterCtx = ctx1.newBlock var caseCtx: Context = null generatedType = toTypeKind(tree.tpe) - + var targets: List[BasicBlock] = Nil var tags: List[Int] = Nil var default: BasicBlock = afterCtx.bb @@ -1190,7 +1190,7 @@ abstract class GenICode extends SubComponent { abort("Invalid case statement in switch-like pattern match: " + tree + " at: " + (tree.pos)) } - + caseCtx = genLoad(body, tmpCtx, generatedType) // close the block unless it's already been closed by the body, which closes the block if it ends in a jump (which is emitted to have alternatives share their body) caseCtx.bb.closeWith(JUMP(afterCtx.bb) setPos caze.pos) @@ -1218,34 +1218,28 @@ abstract class GenICode extends SubComponent { resCtx } - private def adapt(from: TypeKind, to: TypeKind, ctx: Context, pos: Position): Unit = { - if (!(from <:< to) && !(from == NullReference && to == NothingReference)) { - to match { - case UNIT => - ctx.bb.emit(DROP(from), pos) - debuglog("Dropped an " + from); - - case _ => - debugassert(from != UNIT, "Can't convert from UNIT to " + to + " at: " + pos) - assert(!from.isReferenceType && !to.isReferenceType, - "type error: can't convert from " + from + " to " + to +" in unit " + unit.source + " at " + pos) - - ctx.bb.emit(CALL_PRIMITIVE(Conversion(from, to)), pos) - } - } else if (from == NothingReference) { - ctx.bb.emit(THROW(ThrowableClass)) - ctx.bb.enterIgnoreMode - } else if (from == NullReference) { - ctx.bb.emit(DROP(from)) - ctx.bb.emit(CONSTANT(Constant(null))) + private def adapt(from: TypeKind, to: TypeKind, ctx: Context, pos: Position) { + // An awful lot of bugs explode here - let's leave ourselves more clues. + // A typical example is an overloaded type assigned after typer. + log(s"GenICode#adapt($from, $to, $ctx, $pos)") + + val conforms = (from <:< to) || (from == NullReference && to == NothingReference) + def coerce(from: TypeKind, to: TypeKind) = ctx.bb.emit(CALL_PRIMITIVE(Conversion(from, to)), pos) + def checkAssertions() { + def msg = s"Can't convert from $from to $to in unit ${unit.source} at $pos" + debugassert(from != UNIT, msg) + assert(!from.isReferenceType && !to.isReferenceType, msg) } - else if (from == ThrowableReference && !(ThrowableClass.tpe <:< to.toType)) { - log("Inserted check-cast on throwable to " + to + " at " + pos) - ctx.bb.emit(CHECK_CAST(to)) + if (conforms) from match { + case NothingReference => ctx.bb.emit(THROW(ThrowableClass)) ; ctx.bb.enterIgnoreMode + case NullReference => ctx.bb.emit(Seq(DROP(from), CONSTANT(Constant(null)))) + case ThrowableReference if !(ThrowableClass.tpe <:< to.toType) => ctx.bb.emit(CHECK_CAST(to)) // downcast throwables + case BYTE | SHORT | CHAR | INT if to == LONG => coerce(INT, LONG) // widen subrange types + case _ => () } - else (from, to) match { - case (BYTE, LONG) | (SHORT, LONG) | (CHAR, LONG) | (INT, LONG) => ctx.bb.emit(CALL_PRIMITIVE(Conversion(INT, LONG))) - case _ => () + else to match { + case UNIT => ctx.bb.emit(DROP(from), pos) // value discarding + case _ => checkAssertions() ; coerce(from, to) // other primitive coercions } } @@ -1965,18 +1959,8 @@ abstract class GenICode extends SubComponent { var handlerCount = 0 - override def toString(): String = { - val buf = new StringBuilder() - buf.append("\tpackage: ").append(packg).append('\n') - buf.append("\tclazz: ").append(clazz).append('\n') - buf.append("\tmethod: ").append(method).append('\n') - buf.append("\tbb: ").append(bb).append('\n') - buf.append("\tlabels: ").append(labels).append('\n') - buf.append("\texception handlers: ").append(handlers).append('\n') - buf.append("\tcleanups: ").append(cleanups).append('\n') - buf.append("\tscope: ").append(scope).append('\n') - buf.toString() - } + override def toString = + s"package $packg { class $clazz { def $method { bb=$bb } } }" def loadException(ctx: Context, exh: ExceptionHandler, pos: Position) = { debuglog("Emitting LOAD_EXCEPTION for class: " + exh.loadExceptionClass) -- cgit v1.2.3 From a3680be29ccd5314c5d027d473b37940eaecd530 Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Fri, 31 Aug 2012 10:20:16 -0700 Subject: Actual fix for SI-6301, specialized crasher. This means the workaround in the previous commit is no longer reached, but it should remain where it is as a much needed layer of robustness/useful error reporting. --- .../tools/nsc/transform/SpecializeTypes.scala | 96 ++++++++++------------ .../scala/tools/nsc/typechecker/Duplicators.scala | 34 ++++++-- 2 files changed, 70 insertions(+), 60 deletions(-) (limited to 'src') diff --git a/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala b/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala index 1e4c1d454b..8eaf12fac0 100644 --- a/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala +++ b/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala @@ -178,6 +178,14 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers { case class Overload(sym: Symbol, env: TypeEnv) { override def toString = "specialized overload " + sym + " in " + env + def matchesSym(other: Symbol) = sym.tpe =:= other.tpe + def matchesEnv(env1: TypeEnv) = TypeEnv.includes(env, env1) + } + private def newOverload(method: Symbol, specializedMethod: Symbol, env: TypeEnv) = { + assert(!specializedMethod.isOverloaded, specializedMethod.defString) + val om = Overload(specializedMethod, env) + overloads(method) ::= om + om } /** Just to mark uncheckable */ @@ -294,10 +302,6 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers { } } - /** Return the specialized overload of sym in the given env, if any. */ - def overload(sym: Symbol, env: TypeEnv) = - overloads(sym).find(ov => TypeEnv.includes(ov.env, env)) - /** Return the specialized name of 'sym' in the given environment. It * guarantees the same result regardless of the map order by sorting * type variables alphabetically. @@ -633,7 +637,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers { info(om) = if (original.isDeferred) Forward(original) else Implementation(original) typeEnv(om) = env ++ typeEnv(m) // add the environment for any method tparams - overloads(specMember) ::= Overload(om, typeEnv(om)) + newOverload(specMember, om, typeEnv(om)) enterMember(om) } @@ -835,7 +839,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers { debuglog("%s expands to %s in %s".format(sym, specMember.name.decode, pp(env))) info(specMember) = NormalizedMember(sym) - overloads(sym) ::= Overload(specMember, env) + newOverload(sym, specMember, env) owner.info.decls.enter(specMember) specMember } @@ -878,9 +882,8 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers { if (wasSpec.nonEmpty) debuglog("specialized overload for %s in %s".format(specMember, pp(typeEnv(specMember)))) - overloads(sym) ::= Overload(specMember, spec) + newOverload(sym, specMember, spec) info(specMember) = SpecialOverload(sym, typeEnv(specMember)) - specMember } @@ -977,7 +980,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers { om.owner.info.decls.enter(om) info(om) = SpecialSuperAccessor(om) om.makeNotPrivate(om.owner) - overloads(overriding) ::= Overload(om, env) + newOverload(overriding, om, env) Some(om) } } else None @@ -1009,7 +1012,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers { SpecialOverride(impl) } ) - overloads(overriding) ::= Overload(om, env) + newOverload(overriding, om, env) ifDebug(exitingSpecialize(assert( overridden.owner.info.decl(om.name) != NoSymbol, "Could not find " + om.name + " in " + overridden.owner.info.decls)) @@ -1491,54 +1494,41 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers { } transformTypeApply - case Select(qual, name) => - def transformSelect = { - qual match { - case _: Super if illegalSpecializedInheritance(currentClass) => - val pos = tree.pos - debuglog(pos.source.file.name+":"+pos.line+": not specializing call to super inside illegal specialized inheritance class.") - debuglog(pos.lineContent) - tree - case _ => - - debuglog("specializing Select %s [tree.tpe: %s]".format(symbol.defString, tree.tpe)) + case Select(Super(_, _), _) if illegalSpecializedInheritance(currentClass) => + val pos = tree.pos + debuglog(pos.source.file.name+":"+pos.line+": not specializing call to super inside illegal specialized inheritance class.\n" + pos.lineContent) + tree - //log("!!! select " + tree + " -> " + symbol.info + " specTypeVars: " + specializedTypeVars(symbol.info)) - if (specializedTypeVars(symbol.info).nonEmpty && name != nme.CONSTRUCTOR) { - // log("!!! unifying " + (symbol, symbol.tpe) + " and " + (tree, tree.tpe)) - val env = unify(symbol.tpe, tree.tpe, emptyEnv, false) - // log("!!! found env: " + env + "; overloads: " + overloads(symbol)) - if (!env.isEmpty) { - // debuglog("checking for rerouting: " + tree + " with sym.tpe: " + symbol.tpe + " tree.tpe: " + tree.tpe + " env: " + env) - val specMember = overload(symbol, env) - if (specMember.isDefined) { - localTyper.typedOperator(atPos(tree.pos)(Select(transform(qual), specMember.get.sym.name))) - } - else { - val qual1 = transform(qual) + case Select(qual, name) if name != nme.CONSTRUCTOR && specializedTypeVars(symbol.info).nonEmpty => + debuglog("specializing Select %s [tree.tpe: %s]".format(symbol.defString, tree.tpe)) + val env = unify(symbol.tpe, tree.tpe, emptyEnv, false) + if (env.isEmpty) super.transform(tree) + else { + val qual1 = transform(qual) + def reselect(member: Symbol) = { + val newSelect = atPos(tree.pos)(Select(qual1, member)) + if (member.isMethod) localTyper typedOperator newSelect + else localTyper typed newSelect + } + overloads(symbol) find (_ matchesEnv env) match { + case Some(Overload(member, _)) => reselect(member) + case _ => val specMember = qual1.tpe.member(specializedName(symbol, env)).suchThat(_.tpe matches subst(env, symbol.tpe)) - if (specMember ne NoSymbol) { - val tree1 = atPos(tree.pos)(Select(qual1, specMember)) - if (specMember.isMethod) - localTyper.typedOperator(tree1) - else - localTyper.typed(tree1) - } else + if (specMember ne NoSymbol) + reselect(specMember) + else treeCopy.Select(tree, qual1, name) - } - } else - super.transform(tree) - } else overloads(symbol).find(_.sym.info =:= symbol.info) match { - case Some(specMember) => - val qual1 = transform(qual) - debuglog("** routing " + tree + " to " + specMember.sym.fullName + " tree: " + Select(qual1, specMember.sym)) - localTyper.typedOperator(atPos(tree.pos)(Select(qual1, specMember.sym))) - case None => - super.transform(tree) - } + } } + case Select(qual, _) => + overloads(symbol) find (_ matchesSym symbol) match { + case Some(Overload(member, _)) => + val newTree = Select(transform(qual), member) + debuglog(s"** routing $tree to ${member.fullName} tree: $newTree") + localTyper.typedOperator(atPos(tree.pos)(newTree)) + case None => + super.transform(tree) } - transformSelect case PackageDef(pid, stats) => tree.symbol.info // make sure specializations have been performed diff --git a/src/compiler/scala/tools/nsc/typechecker/Duplicators.scala b/src/compiler/scala/tools/nsc/typechecker/Duplicators.scala index 070f083a89..41fbaa168e 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Duplicators.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Duplicators.scala @@ -317,13 +317,33 @@ abstract class Duplicators extends Analyzer { super.typed(tree, mode, pt) case Select(th @ This(_), sel) if (oldClassOwner ne null) && (th.symbol == oldClassOwner) => - // log("selection on this, no type ascription required") - // we use the symbol name instead of the tree name because the symbol may have been - // name mangled, rendering the tree name obsolete - // log(tree) - val t = super.typed(atPos(tree.pos)(Select(This(newClassOwner), tree.symbol.name)), mode, pt) - // log("typed to: " + t + "; tpe = " + t.tpe + "; " + inspectTpe(t.tpe)) - t + // We use the symbol name instead of the tree name because the symbol + // may have been name mangled, rendering the tree name obsolete. + // ...but you can't just do a Select on a name because if the symbol is + // overloaded, you will crash in the backend. + val memberByName = newClassOwner.thisType.member(tree.symbol.name) + def nameSelection = Select(This(newClassOwner), tree.symbol.name) + val newTree = ( + if (memberByName.isOverloaded) { + // Find the types of the overload alternatives as seen in the new class, + // and filter the list down to those which match the old type (after + // fixing the old type so it is seen as if from the new class.) + val typeInNewClass = fixType(oldClassOwner.info memberType tree.symbol) + val alts = memberByName.alternatives + val memberTypes = alts map (newClassOwner.info memberType _) + val memberString = memberByName.defString + alts zip memberTypes filter (_._2 =:= typeInNewClass) match { + case ((alt, tpe)) :: Nil => + log(s"Arrested overloaded type in Duplicators, narrowing to ${alt.defStringSeenAs(tpe)}\n Overload was: $memberString") + Select(This(newClassOwner), alt) + case _ => + log(s"Could not disambiguate $memberString in Duplicators. Attempting name-based selection, but this may not end well...") + nameSelection + } + } + else nameSelection + ) + super.typed(atPos(tree.pos)(newTree), mode, pt) case This(_) if (oldClassOwner ne null) && (tree.symbol == oldClassOwner) => // val tree1 = Typed(This(newClassOwner), TypeTree(fixType(tree.tpe.widen))) -- cgit v1.2.3 From 7206df0374add1bcf73e15f61a024852463f6fc9 Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Fri, 31 Aug 2012 08:43:33 -0700 Subject: Don't synthesize trees with overloaded calls. Finding call sites where we would generate (and get away with) an overloaded constructor call after overloading resolution is already done. --- src/compiler/scala/tools/nsc/ast/TreeGen.scala | 5 ++++- src/compiler/scala/tools/nsc/transform/Constructors.scala | 2 +- src/reflect/scala/reflect/internal/Definitions.scala | 1 + src/reflect/scala/reflect/internal/Trees.scala | 8 ++++++++ 4 files changed, 14 insertions(+), 2 deletions(-) (limited to 'src') diff --git a/src/compiler/scala/tools/nsc/ast/TreeGen.scala b/src/compiler/scala/tools/nsc/ast/TreeGen.scala index 3ccd8ec4ae..0e65bfca6d 100644 --- a/src/compiler/scala/tools/nsc/ast/TreeGen.scala +++ b/src/compiler/scala/tools/nsc/ast/TreeGen.scala @@ -52,7 +52,10 @@ abstract class TreeGen extends reflect.internal.TreeGen with TreeDSL { } // wrap the given expression in a SoftReference so it can be gc-ed - def mkSoftRef(expr: Tree): Tree = atPos(expr.pos)(New(SoftReferenceClass.tpe, expr)) + def mkSoftRef(expr: Tree): Tree = atPos(expr.pos) { + val constructor = SoftReferenceClass.info.nonPrivateMember(nme.CONSTRUCTOR).suchThat(_.paramss.flatten.size == 1) + NewFromConstructor(constructor, List(expr)) + } // annotate the expression with @unchecked def mkUnchecked(expr: Tree): Tree = atPos(expr.pos) { diff --git a/src/compiler/scala/tools/nsc/transform/Constructors.scala b/src/compiler/scala/tools/nsc/transform/Constructors.scala index afc109c47a..283b923bc5 100644 --- a/src/compiler/scala/tools/nsc/transform/Constructors.scala +++ b/src/compiler/scala/tools/nsc/transform/Constructors.scala @@ -129,7 +129,7 @@ abstract class Constructors extends Transform with ast.TreeDSL { if (from.name != nme.OUTER) result else localTyper.typedPos(to.pos) { - IF (from OBJ_EQ NULL) THEN Throw(NullPointerExceptionClass.tpe) ELSE result + IF (from OBJ_EQ NULL) THEN Throw(NewFromConstructor(NPEConstructor, Nil)) ELSE result } } diff --git a/src/reflect/scala/reflect/internal/Definitions.scala b/src/reflect/scala/reflect/internal/Definitions.scala index a8e9fd3586..d14a9e50a6 100644 --- a/src/reflect/scala/reflect/internal/Definitions.scala +++ b/src/reflect/scala/reflect/internal/Definitions.scala @@ -282,6 +282,7 @@ trait Definitions extends api.StandardDefinitions { lazy val MatchErrorClass = requiredClass[MatchError] lazy val NonLocalReturnControlClass = requiredClass[scala.runtime.NonLocalReturnControl[_]] lazy val NullPointerExceptionClass = getClassByName(sn.NPException) + lazy val NPEConstructor = getMemberMethod(NullPointerExceptionClass, nme.CONSTRUCTOR) suchThat (_.paramss.flatten.isEmpty) lazy val ThrowableClass = getClassByName(sn.Throwable) lazy val UninitializedErrorClass = requiredClass[UninitializedFieldError] diff --git a/src/reflect/scala/reflect/internal/Trees.scala b/src/reflect/scala/reflect/internal/Trees.scala index 0180ed4c4f..2ba6c187b7 100644 --- a/src/reflect/scala/reflect/internal/Trees.scala +++ b/src/reflect/scala/reflect/internal/Trees.scala @@ -400,6 +400,14 @@ trait Trees extends api.Trees { self: SymbolTable => def ApplyConstructor(tpt: Tree, args: List[Tree]) = Apply(Select(New(tpt), nme.CONSTRUCTOR), args) + def NewFromConstructor(constructor: Symbol, args: List[Tree]) = { + assert(constructor.isConstructor, constructor) + val instance = New(TypeTree(constructor.owner.tpe)) + val init = Select(instance, nme.CONSTRUCTOR) setSymbol constructor + + Apply(init, args) + } + case class ApplyDynamic(qual: Tree, args: List[Tree]) extends SymTree with TermTree with ApplyDynamicApi object ApplyDynamic extends ApplyDynamicExtractor -- cgit v1.2.3 From d48021cc33ef150a822e375fc0e2acf706852beb Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Fri, 31 Aug 2012 11:46:48 -0700 Subject: Eliminated more overloaded types after typer. Synthetic calls to .## and the UninitializedField constructor carried overloaded types. --- .../scala/tools/nsc/transform/Erasure.scala | 14 +++++--- src/compiler/scala/tools/nsc/transform/Mixin.scala | 40 +++++++++++----------- .../scala/reflect/internal/Definitions.scala | 1 + 3 files changed, 31 insertions(+), 24 deletions(-) (limited to 'src') diff --git a/src/compiler/scala/tools/nsc/transform/Erasure.scala b/src/compiler/scala/tools/nsc/transform/Erasure.scala index 892da7987f..c9de497dea 100644 --- a/src/compiler/scala/tools/nsc/transform/Erasure.scala +++ b/src/compiler/scala/tools/nsc/transform/Erasure.scala @@ -959,7 +959,7 @@ abstract class Erasure extends AddInterfaces case TypeApply(sel @ Select(qual, name), List(targ)) => if (qual.tpe != null && isPrimitiveValueClass(qual.tpe.typeSymbol) && targ.tpe != null && targ.tpe <:< AnyRefClass.tpe) unit.error(sel.pos, "isInstanceOf cannot test if value types are references.") - + def mkIsInstanceOf(q: () => Tree)(tp: Type): Tree = Apply( TypeApply( @@ -1055,7 +1055,8 @@ abstract class Erasure extends AddInterfaces SelectFromArray(qual, name, erasure(tree.symbol)(qual.tpe)).copyAttrs(fn), args) } - } else if (args.isEmpty && interceptedMethods(fn.symbol)) { + } + else if (args.isEmpty && interceptedMethods(fn.symbol)) { if (fn.symbol == Any_## || fn.symbol == Object_##) { // This is unattractive, but without it we crash here on ().## because after // erasure the ScalaRunTime.hash overload goes from Unit => Int to BoxedUnit => Int. @@ -1067,9 +1068,14 @@ abstract class Erasure extends AddInterfaces case s @ (ShortClass | ByteClass | CharClass) => numericConversion(qual, s) case BooleanClass => If(qual, LIT(true.##), LIT(false.##)) case _ => - global.typer.typed(gen.mkRuntimeCall(nme.hash_, List(qual))) + val alts = ScalaRunTimeModule.info.member(nme.hash_).alternatives + def alt1 = alts find (_.info.paramTypes.head =:= qual.tpe) + def alt2 = ScalaRunTimeModule.info.member(nme.hash_) suchThat (_.info.paramTypes.head.typeSymbol == AnyClass) + val newTree = gen.mkRuntimeCall(nme.hash_, qual :: Nil) setSymbol (alt1 getOrElse alt2) + + global.typer.typed(newTree) } - } else if (isPrimitiveValueClass(qual.tpe.typeSymbol)) { + } else if (isPrimitiveValueClass(qual.tpe.typeSymbol)) { // Rewrite 5.getClass to ScalaRunTime.anyValClass(5) global.typer.typed(gen.mkRuntimeCall(nme.anyValClass, List(qual, typer.resolveClassTag(tree.pos, qual.tpe.widen)))) } else { diff --git a/src/compiler/scala/tools/nsc/transform/Mixin.scala b/src/compiler/scala/tools/nsc/transform/Mixin.scala index 717d9209ae..af954bc343 100644 --- a/src/compiler/scala/tools/nsc/transform/Mixin.scala +++ b/src/compiler/scala/tools/nsc/transform/Mixin.scala @@ -492,19 +492,19 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL { * fields count as fields defined by the class itself. */ private val fieldOffset = perRunCaches.newMap[Symbol, Int]() - + private val bitmapKindForCategory = perRunCaches.newMap[Name, ClassSymbol]() - + // ByteClass, IntClass, LongClass private def bitmapKind(field: Symbol): ClassSymbol = bitmapKindForCategory(bitmapCategory(field)) - + private def flagsPerBitmap(field: Symbol): Int = bitmapKind(field) match { case BooleanClass => 1 case ByteClass => 8 case IntClass => 32 case LongClass => 64 } - + /** The first transform; called in a pre-order traversal at phase mixin * (that is, every node is processed before its children). @@ -718,7 +718,7 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL { val sym = clazz0.info.decl(bitmapName) assert(!sym.isOverloaded, sym) - + def createBitmap: Symbol = { val bitmapKind = bitmapKindForCategory(category) val sym = clazz0.newVariable(bitmapName, clazz0.pos) setInfo bitmapKind.tpe @@ -732,7 +732,7 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL { case BooleanClass => VAL(sym) === FALSE case _ => VAL(sym) === ZERO } - + sym setFlag PrivateLocal clazz0.info.decls.enter(sym) addDef(clazz0.pos, init) @@ -744,7 +744,7 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL { else createBitmap } - + def maskForOffset(offset: Int, sym: Symbol, kind: ClassSymbol): Tree = { def realOffset = offset % flagsPerBitmap(sym) if (kind == LongClass ) LIT(1L << realOffset) else LIT(1 << realOffset) @@ -755,9 +755,9 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL { val bmp = bitmapFor(clazz, offset, valSym) def mask = maskForOffset(offset, valSym, kind) def x = This(clazz) DOT bmp - def newValue = if (kind == BooleanClass) TRUE else (x GEN_| (mask, kind)) + def newValue = if (kind == BooleanClass) TRUE else (x GEN_| (mask, kind)) - x === newValue + x === newValue } /** Return an (untyped) tree of the form 'clazz.this.bitmapSym & mask (==|!=) 0', the @@ -775,7 +775,7 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL { else lhs GEN_!= (ZERO, kind) } } - + def mkSlowPathDef(clazz: Symbol, lzyVal: Symbol, cond: Tree, syncBody: List[Tree], stats: List[Tree], retVal: Tree, attrThis: Tree, args: List[Tree]): Symbol = { val defSym = clazz.newMethod(nme.newLazyValSlowComputeName(lzyVal.name), lzyVal.pos, PRIVATE) @@ -791,14 +791,14 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL { stats: List[Tree], retVal: Tree): Tree = { mkFastPathBody(clazz, lzyVal, cond, syncBody, stats, retVal, gen.mkAttributedThis(clazz), List()) } - + def mkFastPathBody(clazz: Symbol, lzyVal: Symbol, cond: Tree, syncBody: List[Tree], stats: List[Tree], retVal: Tree, attrThis: Tree, args: List[Tree]): Tree = { val slowPathSym: Symbol = mkSlowPathDef(clazz, lzyVal, cond, syncBody, stats, retVal, attrThis, args) If(cond, fn (This(clazz), slowPathSym, args.map(arg => Ident(arg.symbol)): _*), retVal) } - - + + /** Always copy the tree if we are going to perform sym substitution, * otherwise we will side-effect on the tree that is used in the fast path */ @@ -807,7 +807,7 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL { if (tree.hasSymbol && from.contains(tree.symbol)) super.transform(tree.duplicate) else super.transform(tree.duplicate) - + override def apply[T <: Tree](tree: T): T = if (from.isEmpty) tree else super.apply(tree) } @@ -827,8 +827,8 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL { * The result will be a tree of the form * { if ((bitmap&n & MASK) == 0) this.l$compute() * else l$ - * - * ... + * + * ... * def l$compute() = { synchronized(this) { * if ((bitmap$n & MASK) == 0) { * init // l$ = @@ -836,7 +836,7 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL { * }} * l$ * } - * + * * ... * this.f1 = null * ... this.fn = null @@ -846,7 +846,7 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL { * For Int bitmap it is 32 and then 'n' in the above code is: (offset / 32), * the MASK is (1 << (offset % 32)). * If the class contains only a single lazy val then the bitmap is represented - * as a Boolean and the condition checking is a simple bool test. + * as a Boolean and the condition checking is a simple bool test. */ def mkLazyDef(clazz: Symbol, lzyVal: Symbol, init: List[Tree], retVal: Tree, offset: Int): Tree = { def nullify(sym: Symbol) = Select(This(clazz), sym.accessedOrSelf) === LIT(null) @@ -878,13 +878,13 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL { def mkCheckedAccessor(clazz: Symbol, retVal: Tree, offset: Int, pos: Position, fieldSym: Symbol): Tree = { val sym = fieldSym.getter(fieldSym.owner) val bitmapSym = bitmapFor(clazz, offset, sym) - val kind = bitmapKind(sym) + val kind = bitmapKind(sym) val mask = maskForOffset(offset, sym, kind) val msg = "Uninitialized field: " + unit.source + ": " + pos.line val result = IF (mkTest(clazz, mask, bitmapSym, false, kind)) . THEN (retVal) . - ELSE (THROW(UninitializedErrorClass, LIT(msg))) + ELSE (Throw(NewFromConstructor(UninitializedConstructor, List(LIT(msg))))) typedPos(pos)(BLOCK(result, retVal)) } diff --git a/src/reflect/scala/reflect/internal/Definitions.scala b/src/reflect/scala/reflect/internal/Definitions.scala index d14a9e50a6..d0299701bb 100644 --- a/src/reflect/scala/reflect/internal/Definitions.scala +++ b/src/reflect/scala/reflect/internal/Definitions.scala @@ -283,6 +283,7 @@ trait Definitions extends api.StandardDefinitions { lazy val NonLocalReturnControlClass = requiredClass[scala.runtime.NonLocalReturnControl[_]] lazy val NullPointerExceptionClass = getClassByName(sn.NPException) lazy val NPEConstructor = getMemberMethod(NullPointerExceptionClass, nme.CONSTRUCTOR) suchThat (_.paramss.flatten.isEmpty) + lazy val UninitializedConstructor = getMemberMethod(UninitializedErrorClass, nme.CONSTRUCTOR) suchThat (_.paramss.flatten.size == 1) lazy val ThrowableClass = getClassByName(sn.Throwable) lazy val UninitializedErrorClass = requiredClass[UninitializedFieldError] -- cgit v1.2.3 From 7e4d8a42ff87224a1063449f93f2975bda0d7c01 Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Fri, 31 Aug 2012 16:18:15 -0700 Subject: Minor library changes to help overloading issues. --- src/library/scala/UninitializedFieldError.scala | 6 ++---- src/library/scala/runtime/ScalaRunTime.scala | 3 +-- src/reflect/scala/reflect/internal/SymbolTable.scala | 2 +- 3 files changed, 4 insertions(+), 7 deletions(-) (limited to 'src') diff --git a/src/library/scala/UninitializedFieldError.scala b/src/library/scala/UninitializedFieldError.scala index a6e510a849..9485019aa0 100644 --- a/src/library/scala/UninitializedFieldError.scala +++ b/src/library/scala/UninitializedFieldError.scala @@ -18,8 +18,6 @@ package scala * * @since 2.7 */ -final case class UninitializedFieldError(msg: String) - extends RuntimeException(msg) { - def this(obj: Any) = - this(if (null != obj) obj.toString() else "null") +final case class UninitializedFieldError(msg: String) extends RuntimeException(msg) { + def this(obj: Any) = this("" + obj) } diff --git a/src/library/scala/runtime/ScalaRunTime.scala b/src/library/scala/runtime/ScalaRunTime.scala index e5f5e9dc5d..d32ece144a 100644 --- a/src/library/scala/runtime/ScalaRunTime.scala +++ b/src/library/scala/runtime/ScalaRunTime.scala @@ -24,8 +24,7 @@ import java.lang.reflect.{ Modifier, Method => JMethod } * outside the API and subject to change or removal without notice. */ object ScalaRunTime { - def isArray(x: AnyRef): Boolean = isArray(x, 1) - def isArray(x: Any, atLevel: Int): Boolean = + def isArray(x: Any, atLevel: Int = 1): Boolean = x != null && isArrayClass(x.getClass, atLevel) private def isArrayClass(clazz: Class[_], atLevel: Int): Boolean = diff --git a/src/reflect/scala/reflect/internal/SymbolTable.scala b/src/reflect/scala/reflect/internal/SymbolTable.scala index 625dc8b7a0..c0b933698b 100644 --- a/src/reflect/scala/reflect/internal/SymbolTable.scala +++ b/src/reflect/scala/reflect/internal/SymbolTable.scala @@ -63,7 +63,7 @@ abstract class SymbolTable extends macros.Universe private[scala] def printCaller[T](msg: String)(result: T) = { Console.err.println("%s: %s\nCalled from: %s".format(msg, result, - (new Throwable).getStackTrace.drop(2).take(15).mkString("\n"))) + (new Throwable).getStackTrace.drop(2).take(50).mkString("\n"))) result } -- cgit v1.2.3 From 1d70cacca1864ae536e2e1d31d117d43dbc92c24 Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Fri, 31 Aug 2012 16:18:42 -0700 Subject: Hardening specialization and others. Cleaned up some logic which has become unreasonably circuitous over time. Gave "mkSuperSelect" an accurate name (it's now "mkSuperInitCall".) Put in better logging for spotting OverloadedTypes which should not be. --- src/compiler/scala/tools/nsc/ast/TreeGen.scala | 8 +- src/compiler/scala/tools/nsc/ast/Trees.scala | 2 +- .../scala/tools/nsc/ast/parser/Parsers.scala | 4 +- .../scala/tools/nsc/transform/AddInterfaces.scala | 2 +- .../scala/tools/nsc/transform/Constructors.scala | 2 +- src/compiler/scala/tools/nsc/transform/Mixin.scala | 2 +- .../tools/nsc/transform/SpecializeTypes.scala | 155 +++++++++++---------- .../scala/tools/nsc/typechecker/Duplicators.scala | 12 +- .../scala/tools/nsc/typechecker/Typers.scala | 25 ++-- .../scala/reflect/internal/Definitions.scala | 5 +- src/reflect/scala/reflect/internal/Trees.scala | 6 +- 11 files changed, 113 insertions(+), 110 deletions(-) (limited to 'src') diff --git a/src/compiler/scala/tools/nsc/ast/TreeGen.scala b/src/compiler/scala/tools/nsc/ast/TreeGen.scala index 0e65bfca6d..3177ceefcd 100644 --- a/src/compiler/scala/tools/nsc/ast/TreeGen.scala +++ b/src/compiler/scala/tools/nsc/ast/TreeGen.scala @@ -54,7 +54,7 @@ abstract class TreeGen extends reflect.internal.TreeGen with TreeDSL { // wrap the given expression in a SoftReference so it can be gc-ed def mkSoftRef(expr: Tree): Tree = atPos(expr.pos) { val constructor = SoftReferenceClass.info.nonPrivateMember(nme.CONSTRUCTOR).suchThat(_.paramss.flatten.size == 1) - NewFromConstructor(constructor, List(expr)) + NewFromConstructor(constructor, expr) } // annotate the expression with @unchecked @@ -209,7 +209,7 @@ abstract class TreeGen extends reflect.internal.TreeGen with TreeDSL { else AppliedTypeTree(Ident(clazz), targs map TypeTree) )) } - def mkSuperSelect = Select(Super(This(tpnme.EMPTY), tpnme.EMPTY), nme.CONSTRUCTOR) + def mkSuperInitCall: Select = Select(Super(This(tpnme.EMPTY), tpnme.EMPTY), nme.CONSTRUCTOR) def wildcardStar(tree: Tree) = atPos(tree.pos) { Typed(tree, Ident(tpnme.WILDCARD_STAR)) } @@ -360,8 +360,8 @@ abstract class TreeGen extends reflect.internal.TreeGen with TreeDSL { */ def mkSynchronizedCheck(clazz: Symbol, cond: Tree, syncBody: List[Tree], stats: List[Tree]): Tree = mkSynchronizedCheck(mkAttributedThis(clazz), cond, syncBody, stats) - - def mkSynchronizedCheck(attrThis: Tree, cond: Tree, syncBody: List[Tree], stats: List[Tree]): Tree = + + def mkSynchronizedCheck(attrThis: Tree, cond: Tree, syncBody: List[Tree], stats: List[Tree]): Tree = Block(mkSynchronized( attrThis, If(cond, Block(syncBody: _*), EmptyTree)) :: diff --git a/src/compiler/scala/tools/nsc/ast/Trees.scala b/src/compiler/scala/tools/nsc/ast/Trees.scala index 085ce82025..30fc1778b1 100644 --- a/src/compiler/scala/tools/nsc/ast/Trees.scala +++ b/src/compiler/scala/tools/nsc/ast/Trees.scala @@ -116,7 +116,7 @@ trait Trees extends reflect.internal.Trees { self: Global => // convert (implicit ... ) to ()(implicit ... ) if its the only parameter section if (vparamss1.isEmpty || !vparamss1.head.isEmpty && vparamss1.head.head.mods.isImplicit) vparamss1 = List() :: vparamss1; - val superRef: Tree = atPos(superPos)(gen.mkSuperSelect) + val superRef: Tree = atPos(superPos)(gen.mkSuperInitCall) val superCall = (superRef /: argss) (Apply.apply) List( atPos(wrappingPos(superPos, lvdefs ::: argss.flatten)) ( diff --git a/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala b/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala index 17bea7f796..1d101b2e7d 100644 --- a/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala +++ b/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala @@ -397,7 +397,7 @@ self => Nil, ListOfNil, TypeTree(), - Block(List(Apply(gen.mkSuperSelect, Nil)), Literal(Constant(()))) + Block(List(Apply(gen.mkSuperInitCall, Nil)), Literal(Constant(()))) ) // def main @@ -1302,7 +1302,7 @@ self => placeholderParams = placeholderParams ::: savedPlaceholderParams res } - + def expr0(location: Int): Tree = (in.token: @scala.annotation.switch) match { case IF => diff --git a/src/compiler/scala/tools/nsc/transform/AddInterfaces.scala b/src/compiler/scala/tools/nsc/transform/AddInterfaces.scala index e4b5e92ae2..182a9505f8 100644 --- a/src/compiler/scala/tools/nsc/transform/AddInterfaces.scala +++ b/src/compiler/scala/tools/nsc/transform/AddInterfaces.scala @@ -318,7 +318,7 @@ abstract class AddInterfaces extends InfoTransform { self: Erasure => // body until now, because the typer knows that Any has no // constructor and won't accept a call to super.init. assert((clazz isSubClass AnyValClass) || clazz.info.parents.isEmpty, clazz) - Block(List(Apply(gen.mkSuperSelect, Nil)), expr) + Block(List(Apply(gen.mkSuperInitCall, Nil)), expr) case Block(stats, expr) => // needs `hasSymbol` check because `supercall` could be a block (named / default args) diff --git a/src/compiler/scala/tools/nsc/transform/Constructors.scala b/src/compiler/scala/tools/nsc/transform/Constructors.scala index 283b923bc5..ff1225d291 100644 --- a/src/compiler/scala/tools/nsc/transform/Constructors.scala +++ b/src/compiler/scala/tools/nsc/transform/Constructors.scala @@ -129,7 +129,7 @@ abstract class Constructors extends Transform with ast.TreeDSL { if (from.name != nme.OUTER) result else localTyper.typedPos(to.pos) { - IF (from OBJ_EQ NULL) THEN Throw(NewFromConstructor(NPEConstructor, Nil)) ELSE result + IF (from OBJ_EQ NULL) THEN Throw(NewFromConstructor(NPEConstructor)) ELSE result } } diff --git a/src/compiler/scala/tools/nsc/transform/Mixin.scala b/src/compiler/scala/tools/nsc/transform/Mixin.scala index af954bc343..e5d365f1d2 100644 --- a/src/compiler/scala/tools/nsc/transform/Mixin.scala +++ b/src/compiler/scala/tools/nsc/transform/Mixin.scala @@ -884,7 +884,7 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL { val result = IF (mkTest(clazz, mask, bitmapSym, false, kind)) . THEN (retVal) . - ELSE (Throw(NewFromConstructor(UninitializedConstructor, List(LIT(msg))))) + ELSE (Throw(NewFromConstructor(UninitializedFieldConstructor, LIT(msg)))) typedPos(pos)(BLOCK(result, retVal)) } diff --git a/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala b/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala index 8eaf12fac0..9c17be23cc 100644 --- a/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala +++ b/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala @@ -119,6 +119,22 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers { } } + @annotation.tailrec private def findSymbol[T](candidates: List[T], f: T => Symbol): Symbol = { + if (candidates.isEmpty) NoSymbol + else f(candidates.head) match { + case NoSymbol => findSymbol(candidates.tail, f) + case sym => sym + } + } + private def hasNewParents(tree: Tree) = { + val parents = tree.symbol.info.parents + val prev = enteringPrevPhase(tree.symbol.info.parents) + (parents != prev) && { + debuglog(s"$tree parents changed from: $prev to: $parents") + true + } + } + // If we replace `isBoundedGeneric` with (tp <:< AnyRefClass.tpe), // then pos/spec-List.scala fails - why? Does this kind of check fail // for similar reasons? Does `sym.isAbstractType` make a difference? @@ -178,7 +194,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers { case class Overload(sym: Symbol, env: TypeEnv) { override def toString = "specialized overload " + sym + " in " + env - def matchesSym(other: Symbol) = sym.tpe =:= other.tpe + def matchesSym(sym1: Symbol) = sym.info =:= sym1.info def matchesEnv(env1: TypeEnv) = TypeEnv.includes(env, env1) } private def newOverload(method: Symbol, specializedMethod: Symbol, env: TypeEnv) = { @@ -1424,35 +1440,61 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers { } else None } + def matchingSymbolInPrefix(pre: Type, member: Symbol, env: TypeEnv): Symbol = { + pre member specializedName(member, env) suchThat (_.tpe matches subst(env, member.tpe)) + } + + def transformSelect(sel: Select) = { + val Select(qual, name) = sel + debuglog(s"specializing Select(sym=${symbol.defString}, tree.tpe=${tree.tpe})") + + val qual1 = transform(qual) + def copySelect = treeCopy.Select(tree, qual1, name) + def newSelect(member: Symbol) = atPos(tree.pos)(Select(qual1, member)) + def typedOp(member: Symbol) = localTyper typedOperator newSelect(member) + def typedTree(member: Symbol) = localTyper typed newSelect(member) + + val ignoreEnv = specializedTypeVars(symbol.info).isEmpty || name == nme.CONSTRUCTOR + if (ignoreEnv) overloads(symbol) find (_ matchesSym symbol) match { + case Some(Overload(member, _)) => typedOp(member) + case _ => copySelect + } + else { + val env = unify(symbol.tpe, tree.tpe, emptyEnv, false) + overloads(symbol) find (_ matchesEnv env) match { + case Some(Overload(member, _)) => typedOp(member) + case _ => + matchingSymbolInPrefix(qual1.tpe, symbol, env) match { + case NoSymbol => copySelect + case member if member.isMethod => typedOp(member) + case member => typedTree(member) + } + } + } + } + curTree = tree tree match { case Apply(Select(New(tpt), nme.CONSTRUCTOR), args) => def transformNew = { - debuglog("Attempting to specialize new %s(%s)".format(tpt, args.mkString(", "))) - val found = findSpec(tpt.tpe) - if (found.typeSymbol ne tpt.tpe.typeSymbol) { - // the ctor can be specialized - debuglog("** instantiated specialized type: " + found) - reportError { - localTyper.typedPos(tree.pos)(New(found, transformTrees(args): _*)) - } { - _ => super.transform(tree) + debuglog("Attempting to specialize new %s(%s)".format(tpt, args.mkString(", "))) + val found = specializedType(tpt.tpe) + if (found.typeSymbol ne tpt.tpe.typeSymbol) { // the ctor can be specialized + val inst = New(found, transformTrees(args): _*) + reportError(localTyper.typedPos(tree.pos)(inst))(_ => super.transform(tree)) } - } else super.transform(tree) + else + super.transform(tree) } transformNew - case Apply(sel @ Select(sup @ Super(qual, name), name1), args) - if (sup.symbol.info.parents != enteringPrevPhase(sup.symbol.info.parents)) => + case Apply(sel @ Select(sup @ Super(qual, name), name1), args) if hasNewParents(sup) => def transformSuperApply = { - - def parents = sup.symbol.info.parents - debuglog(tree + " parents changed from: " + enteringPrevPhase(parents) + " to: " + parents) - - val res = localTyper.typed( - Apply(Select(Super(qual, name) setPos sup.pos, name1) setPos sel.pos, transformTrees(args)) setPos tree.pos) - debuglog("retyping call to super, from: " + symbol + " to " + res.symbol) - res + val sup1 = Super(qual, name) setPos sup.pos + val tree1 = Apply(Select(sup1, name1) setPos sel.pos, transformTrees(args)) + val res = localTyper.typedPos(tree.pos)(tree1) + debuglog(s"retyping call to super, from: $symbol to ${res.symbol}") + res } transformSuperApply @@ -1499,36 +1541,8 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers { debuglog(pos.source.file.name+":"+pos.line+": not specializing call to super inside illegal specialized inheritance class.\n" + pos.lineContent) tree - case Select(qual, name) if name != nme.CONSTRUCTOR && specializedTypeVars(symbol.info).nonEmpty => - debuglog("specializing Select %s [tree.tpe: %s]".format(symbol.defString, tree.tpe)) - val env = unify(symbol.tpe, tree.tpe, emptyEnv, false) - if (env.isEmpty) super.transform(tree) - else { - val qual1 = transform(qual) - def reselect(member: Symbol) = { - val newSelect = atPos(tree.pos)(Select(qual1, member)) - if (member.isMethod) localTyper typedOperator newSelect - else localTyper typed newSelect - } - overloads(symbol) find (_ matchesEnv env) match { - case Some(Overload(member, _)) => reselect(member) - case _ => - val specMember = qual1.tpe.member(specializedName(symbol, env)).suchThat(_.tpe matches subst(env, symbol.tpe)) - if (specMember ne NoSymbol) - reselect(specMember) - else - treeCopy.Select(tree, qual1, name) - } - } - case Select(qual, _) => - overloads(symbol) find (_ matchesSym symbol) match { - case Some(Overload(member, _)) => - val newTree = Select(transform(qual), member) - debuglog(s"** routing $tree to ${member.fullName} tree: $newTree") - localTyper.typedOperator(atPos(tree.pos)(newTree)) - case None => - super.transform(tree) - } + case sel @ Select(_, _) => + transformSelect(sel) case PackageDef(pid, stats) => tree.symbol.info // make sure specializations have been performed @@ -1553,41 +1567,31 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers { transformTemplate case ddef @ DefDef(_, _, _, vparamss, _, _) if info.isDefinedAt(symbol) => - def transformDefDef = { - // log("--> method: " + ddef + " in " + ddef.symbol.owner + ", " + info(symbol)) - def reportTypeError(body: =>Tree) = reportError(body)(_ => ddef) - + def transformDefDef = { if (symbol.isConstructor) { - - val t = atOwner(symbol)(forwardCtorCall(tree.pos, gen.mkSuperSelect, vparamss, symbol.owner)) - + val t = atOwner(symbol)(forwardCtorCall(tree.pos, gen.mkSuperInitCall, vparamss, symbol.owner)) if (symbol.isPrimaryConstructor) localTyper.typedPos(symbol.pos)(deriveDefDef(tree)(_ => Block(List(t), Literal(Constant())))) else // duplicate the original constructor - reportTypeError(duplicateBody(ddef, info(symbol).target)) + reportError(duplicateBody(ddef, info(symbol).target))(_ => ddef) } else info(symbol) match { case Implementation(target) => assert(body.isDefinedAt(target), "sym: " + symbol.fullName + " target: " + target.fullName) // we have an rhs, specialize it - val tree1 = reportTypeError { - duplicateBody(ddef, target) - } + val tree1 = reportError(duplicateBody(ddef, target))(_ => ddef) debuglog("implementation: " + tree1) deriveDefDef(tree1)(transform) case NormalizedMember(target) => - val constraints = satisfiabilityConstraints(typeEnv(symbol)) - log("constraints: " + constraints) - if (target.isDeferred || constraints == None) { - deriveDefDef(tree)(_ => localTyper typed gen.mkSysErrorCall("Fatal error in code generation: this should never be called.")) - } else { - // we have an rhs, specialize it - val tree1 = reportTypeError { - duplicateBody(ddef, target, constraints.get) - } - debuglog("implementation: " + tree1) - deriveDefDef(tree1)(transform) + logResult("constraints")(satisfiabilityConstraints(typeEnv(symbol))) match { + case Some(constraint) if !target.isDeferred => + // we have an rhs, specialize it + val tree1 = reportError(duplicateBody(ddef, target, constraint))(_ => ddef) + debuglog("implementation: " + tree1) + deriveDefDef(tree1)(transform) + case _ => + deriveDefDef(tree)(_ => localTyper typed gen.mkSysErrorCall("Fatal error in code generation: this should never be called.")) } case SpecialOverride(target) => @@ -1837,6 +1841,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers { * }} */ private def forwardCtorCall(pos: scala.reflect.internal.util.Position, receiver: Tree, paramss: List[List[ValDef]], clazz: Symbol): Tree = { + log(s"forwardCtorCall($pos, $receiver, $paramss, $clazz)") /** A constructor parameter `f` initializes a specialized field * iff: @@ -1873,10 +1878,6 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers { //! TODO: make sure the param types are seen from the right prefix map2(fun.info.paramTypes, vparams)((tp, arg) => gen.maybeMkAsInstanceOf(Ident(arg), tp, arg.tpe)) ) - private def findSpec(tp: Type): Type = tp match { - case TypeRef(pre, sym, _ :: _) => specializedType(tp) - case _ => tp - } class SpecializationTransformer(unit: CompilationUnit) extends Transformer { informProgress("specializing " + unit) diff --git a/src/compiler/scala/tools/nsc/typechecker/Duplicators.scala b/src/compiler/scala/tools/nsc/typechecker/Duplicators.scala index 41fbaa168e..745250e167 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Duplicators.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Duplicators.scala @@ -336,9 +336,15 @@ abstract class Duplicators extends Analyzer { case ((alt, tpe)) :: Nil => log(s"Arrested overloaded type in Duplicators, narrowing to ${alt.defStringSeenAs(tpe)}\n Overload was: $memberString") Select(This(newClassOwner), alt) - case _ => - log(s"Could not disambiguate $memberString in Duplicators. Attempting name-based selection, but this may not end well...") - nameSelection + case xs => + alts filter (alt => (alt.paramss corresponds tree.symbol.paramss)(_.size == _.size)) match { + case alt :: Nil => + log(s"Resorted to parameter list arity to disambiguate to $alt\n Overload was: $memberString") + Select(This(newClassOwner), alt) + case _ => + log(s"Could not disambiguate $memberTypes. Attempting name-based selection, but we may crash later.") + nameSelection + } } } else nameSelection diff --git a/src/compiler/scala/tools/nsc/typechecker/Typers.scala b/src/compiler/scala/tools/nsc/typechecker/Typers.scala index f8826dc27f..d2e6616f38 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Typers.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Typers.scala @@ -4504,25 +4504,18 @@ trait Typers extends Modes with Adaptations with Tags { */ def typedSelect(tree: Tree, qual: Tree, name: Name): Tree = { val t = typedSelectInternal(tree, qual, name) + // Checking for OverloadedTypes being handed out after overloading + // resolution has already happened. if (isPastTyper) t.tpe match { case OverloadedType(pre, alts) => if (alts forall (s => (s.owner == ObjectClass) || (s.owner == AnyClass) || isPrimitiveValueClass(s.owner))) () - else { - val msg = - s"""|Select received overloaded type during $phase, but typer is over. - |We are likely doomed to crash in the backend. - |$t has these overloads: - |${alts map (s => " " + s.defStringSeenAs(pre memberType s)) mkString "\n"} - |""".stripMargin - - if (context.reportErrors) - unit.warning(t.pos, msg) - else - Console.err.println(msg) - - if (settings.debug.value) - (new Throwable).printStackTrace - } + else if (settings.debug.value) printCaller( + s"""|Select received overloaded type during $phase, but typer is over. + |If this type reaches the backend, we are likely doomed to crash. + |$t has these overloads: + |${alts map (s => " " + s.defStringSeenAs(pre memberType s)) mkString "\n"} + |""".stripMargin + )("") case _ => } t diff --git a/src/reflect/scala/reflect/internal/Definitions.scala b/src/reflect/scala/reflect/internal/Definitions.scala index d0299701bb..3da7997f6d 100644 --- a/src/reflect/scala/reflect/internal/Definitions.scala +++ b/src/reflect/scala/reflect/internal/Definitions.scala @@ -282,11 +282,12 @@ trait Definitions extends api.StandardDefinitions { lazy val MatchErrorClass = requiredClass[MatchError] lazy val NonLocalReturnControlClass = requiredClass[scala.runtime.NonLocalReturnControl[_]] lazy val NullPointerExceptionClass = getClassByName(sn.NPException) - lazy val NPEConstructor = getMemberMethod(NullPointerExceptionClass, nme.CONSTRUCTOR) suchThat (_.paramss.flatten.isEmpty) - lazy val UninitializedConstructor = getMemberMethod(UninitializedErrorClass, nme.CONSTRUCTOR) suchThat (_.paramss.flatten.size == 1) lazy val ThrowableClass = getClassByName(sn.Throwable) lazy val UninitializedErrorClass = requiredClass[UninitializedFieldError] + lazy val NPEConstructor = getMemberMethod(NullPointerExceptionClass, nme.CONSTRUCTOR) suchThat (_.paramss.flatten.isEmpty) + lazy val UninitializedFieldConstructor = UninitializedErrorClass.primaryConstructor + // fundamental reference classes lazy val PartialFunctionClass = requiredClass[PartialFunction[_,_]] lazy val AbstractPartialFunctionClass = requiredClass[scala.runtime.AbstractPartialFunction[_,_]] diff --git a/src/reflect/scala/reflect/internal/Trees.scala b/src/reflect/scala/reflect/internal/Trees.scala index 2ba6c187b7..4455341faa 100644 --- a/src/reflect/scala/reflect/internal/Trees.scala +++ b/src/reflect/scala/reflect/internal/Trees.scala @@ -400,12 +400,14 @@ trait Trees extends api.Trees { self: SymbolTable => def ApplyConstructor(tpt: Tree, args: List[Tree]) = Apply(Select(New(tpt), nme.CONSTRUCTOR), args) - def NewFromConstructor(constructor: Symbol, args: List[Tree]) = { + // Creates a constructor call from the constructor symbol. This is + // to avoid winding up with an OverloadedType for the constructor call. + def NewFromConstructor(constructor: Symbol, args: Tree*) = { assert(constructor.isConstructor, constructor) val instance = New(TypeTree(constructor.owner.tpe)) val init = Select(instance, nme.CONSTRUCTOR) setSymbol constructor - Apply(init, args) + Apply(init, args.toList) } case class ApplyDynamic(qual: Tree, args: List[Tree]) -- cgit v1.2.3 From 8706ad0402dce21da4c364ed416c37e6c0d74c23 Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Fri, 31 Aug 2012 16:30:48 -0700 Subject: Culling debugging code. I think one could actually choke and die on the quantity of debugging code we have some places. Look at this diff. I think you are a lot better off being able to see the logic. Let's figure out a way to keep ephemeral tracing code from burying us (both at source level and in log output.) --- .../tools/nsc/transform/SpecializeTypes.scala | 37 +++++++--------------- 1 file changed, 11 insertions(+), 26 deletions(-) (limited to 'src') diff --git a/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala b/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala index 9c17be23cc..6a6d0cdea1 100644 --- a/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala +++ b/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala @@ -1416,28 +1416,15 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers { def transform1(tree: Tree) = { val symbol = tree.symbol - /** The specialized symbol of 'tree.symbol' for tree.tpe, if there is one */ - def specSym(qual: Tree): Option[Symbol] = { + def specSym(qual: Tree): Symbol = { val env = unify(symbol.tpe, tree.tpe, emptyEnv, false) - debuglog("[specSym] checking for rerouting: %s with \n\tsym.tpe: %s, \n\ttree.tpe: %s \n\tenv: %s \n\tname: %s" - .format(tree, symbol.tpe, tree.tpe, env, specializedName(symbol, env))) - if (!env.isEmpty) { // a method? - val specCandidates = qual.tpe.member(specializedName(symbol, env)) - val specMember = specCandidates suchThat { s => - doesConform(symbol, tree.tpe, qual.tpe.memberType(s), env) - } - - debuglog("[specSym] found: " + specCandidates.tpe + ", instantiated as: " + tree.tpe) - debuglog("[specSym] found specMember: " + specMember) - if (specMember ne NoSymbol) - if (TypeEnv.includes(typeEnv(specMember), env)) Some(specMember) - else { - debuglog("wrong environments for specialized member: \n\ttypeEnv(%s) = %s\n\tenv = %s".format(specMember, typeEnv(specMember), env)) - None - } - else None - } else None + def isMatch(member: Symbol) = ( + doesConform(symbol, tree.tpe, qual.tpe memberType member, env) + && TypeEnv.includes(typeEnv(member), env) + ) + if (env.isEmpty) NoSymbol + else qual.tpe member specializedName(symbol, env) suchThat isMatch } def matchingSymbolInPrefix(pre: Type, member: Symbol, env: TypeEnv): Symbol = { @@ -1505,7 +1492,10 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers { val qual1 = transform(qual) // log(">>> TypeApply: " + tree + ", qual1: " + qual1) specSym(qual1) match { - case Some(specMember) => + case NoSymbol => + // See pos/exponential-spec.scala - can't call transform on the whole tree again. + treeCopy.TypeApply(tree, treeCopy.Select(sel, qual1, name), transformTrees(targs)) + case specMember => debuglog("found " + specMember.fullName) ifDebug(assert(symbol.info.typeParams.length == targs.length, symbol.info.typeParams + " / " + targs)) @@ -1527,11 +1517,6 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers { debuglog("rewrote " + tree + " to " + tree1) localTyper.typedOperator(atPos(tree.pos)(tree1)) // being polymorphic, it must be a method } - - case None => - treeCopy.TypeApply(tree, treeCopy.Select(sel, qual1, name), super.transformTrees(targs)) - // See pos/exponential-spec.scala - can't call transform on the whole tree again. - // super.transform(tree) } } transformTypeApply -- cgit v1.2.3 From 6917599b9bb5a316e0ce9e63927dae8c0f09c861 Mon Sep 17 00:00:00 2001 From: Som Snytt Date: Thu, 23 Aug 2012 11:25:01 -0700 Subject: SI-6278 fixed: synthetic implicit def must sort with its associated implicit class Add a case to the ad-hoc (or add-hack) addSynthetics to keep the trees close. This relies on naming convention, so changes in naming of the implicit def would require an update here. --- .../scala/tools/nsc/typechecker/Typers.scala | 6 +++++ test/files/pos/t6278-synth-def.scala | 30 ++++++++++++++++++++++ 2 files changed, 36 insertions(+) create mode 100644 test/files/pos/t6278-synth-def.scala (limited to 'src') diff --git a/src/compiler/scala/tools/nsc/typechecker/Typers.scala b/src/compiler/scala/tools/nsc/typechecker/Typers.scala index 1770f2419a..043658dab5 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Typers.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Typers.scala @@ -2739,12 +2739,18 @@ trait Typers extends Modes with Adaptations with Tags { // this code by associating defaults and companion objects // with the original tree instead of the new symbol. def matches(stat: Tree, synt: Tree) = (stat, synt) match { + // synt is default arg for stat case (DefDef(_, statName, _, _, _, _), DefDef(mods, syntName, _, _, _, _)) => mods.hasDefaultFlag && syntName.toString.startsWith(statName.toString) + // synt is companion module case (ClassDef(_, className, _, _), ModuleDef(_, moduleName, _)) => className.toTermName == moduleName + // synt is implicit def for implicit class (#6278) + case (ClassDef(cmods, cname, _, _), DefDef(dmods, dname, _, _, _, _)) => + cmods.isImplicit && dmods.isImplicit && cname.toTermName == dname + case _ => false } diff --git a/test/files/pos/t6278-synth-def.scala b/test/files/pos/t6278-synth-def.scala new file mode 100644 index 0000000000..b8b660fbe3 --- /dev/null +++ b/test/files/pos/t6278-synth-def.scala @@ -0,0 +1,30 @@ + +package t6278 + +import language.implicitConversions + +object test { + def ok() { + class Foo(val i: Int) { + def foo[A](body: =>A): A = body + } + implicit def toFoo(i: Int): Foo = new Foo(i) + + val k = 1 + k foo println("k?") + val j = 2 + } + def nope() { + implicit class Foo(val i: Int) { + def foo[A](body: =>A): A = body + } + + val k = 1 + k foo println("k?") + //lazy + val j = 2 + } + def main(args: Array[String]) { + ok(); nope() + } +} -- cgit v1.2.3 From 4692ce2cb14751d3f0f1bf78397586c2a465add6 Mon Sep 17 00:00:00 2001 From: Eugene Vigdorchik Date: Sun, 2 Sep 2012 23:59:48 +0400 Subject: Used methods according to @paulp suggestions. --- src/compiler/scala/tools/nsc/doc/model/ModelFactory.scala | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) (limited to 'src') diff --git a/src/compiler/scala/tools/nsc/doc/model/ModelFactory.scala b/src/compiler/scala/tools/nsc/doc/model/ModelFactory.scala index fd672613e4..2b9638076c 100644 --- a/src/compiler/scala/tools/nsc/doc/model/ModelFactory.scala +++ b/src/compiler/scala/tools/nsc/doc/model/ModelFactory.scala @@ -546,7 +546,7 @@ class ModelFactory(val global: Global, val settings: doc.Settings) { // also remove property("package object") from test/scaladoc/scalacheck/HtmlFactoryTest.scala so you don't break // the test suite... val packageObject = if (inPackageObject) ".package" else "" - val qualifiedName = conversion map (_.conversionQualifiedName) getOrElse inDefinitionTemplates.head.qualifiedName + val qualifiedName = conversion.fold(inDefinitionTemplates.head.qualifiedName)(_.conversionQualifiedName) optimize(qualifiedName + packageObject + "#" + name) } def isBridge = sym.isBridge @@ -879,7 +879,7 @@ class ModelFactory(val global: Global, val settings: doc.Settings) { assert(modelFinished) def makeNoDocTemplate(aSym: Symbol, inTpl: TemplateImpl): NoDocTemplateImpl = - noDocTemplatesCache get aSym getOrElse new NoDocTemplateImpl(aSym, inTpl) + noDocTemplatesCache getOrElse (aSym, new NoDocTemplateImpl(aSym, inTpl)) findTemplateMaybe(aSym) getOrElse { val bSym = normalizeTemplate(aSym) -- cgit v1.2.3 From 72315f51116ee8cc97b966a33a63c9056e5258b0 Mon Sep 17 00:00:00 2001 From: Eugene Vigdorchik Date: Mon, 3 Sep 2012 20:55:44 +0400 Subject: Clear undo log after each unit has been type-checked to prevent 300M memory lost in scaladoc. Review by @adriaanm. --- src/compiler/scala/tools/nsc/typechecker/Analyzer.scala | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) (limited to 'src') diff --git a/src/compiler/scala/tools/nsc/typechecker/Analyzer.scala b/src/compiler/scala/tools/nsc/typechecker/Analyzer.scala index ab8836f339..8218289f93 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Analyzer.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Analyzer.scala @@ -87,10 +87,10 @@ trait Analyzer extends AnyRef override def run() { val start = if (Statistics.canEnable) Statistics.startTimer(typerNanos) else null global.echoPhaseSummary(this) - currentRun.units foreach applyPhase - undoLog.clear() - // need to clear it after as well or 10K+ accumulated entries are - // uncollectable the rest of the way. + for (unit <- currentRun.units) { + applyPhase(unit) + undoLog.clear() + } if (Statistics.canEnable) Statistics.stopTimer(typerNanos, start) } def apply(unit: CompilationUnit) { -- cgit v1.2.3 From c49e23572883d427e9471015cd8554c875a9a492 Mon Sep 17 00:00:00 2001 From: Michael Bayne Date: Sat, 1 Sep 2012 13:00:46 -0700 Subject: SI-6295: Introduced NoExternalID, fixed DocType's documentation. DocType claimed to take Option[ExternalID], but instead took ExternalID, which provided no means to construct a DocType that contains no external id. This introduces a NoExternalID marker object which means what it says. Also added a convenience apply method that assumes no external id, nor internal subset declarations. This allows one to construct DocType("html"), which suffices if one intends only to support modern web browsers. --- src/library/scala/xml/dtd/DocType.scala | 8 +++++++- src/library/scala/xml/dtd/ExternalID.scala | 11 +++++++++++ 2 files changed, 18 insertions(+), 1 deletion(-) (limited to 'src') diff --git a/src/library/scala/xml/dtd/DocType.scala b/src/library/scala/xml/dtd/DocType.scala index 64aa7e2f74..78aacb2cea 100644 --- a/src/library/scala/xml/dtd/DocType.scala +++ b/src/library/scala/xml/dtd/DocType.scala @@ -15,7 +15,7 @@ package dtd * @author Burak Emir * * @param name name of this DOCTYPE - * @param extID None, or Some(external ID of this doctype) + * @param extID NoExternalID or the external ID of this doctype * @param intSubset sequence of internal subset declarations */ case class DocType(name: String, extID: ExternalID, intSubset: Seq[dtd.Decl]) @@ -32,3 +32,9 @@ case class DocType(name: String, extID: ExternalID, intSubset: Seq[dtd.Decl]) """""".format(name, extID.toString, intString) } } + +object DocType +{ + /** Creates a doctype with no external id, nor internal subset declarations. */ + def apply(name: String): DocType = apply(name, NoExternalID, Nil) +} diff --git a/src/library/scala/xml/dtd/ExternalID.scala b/src/library/scala/xml/dtd/ExternalID.scala index a0a5818d07..ccee5dbe5a 100644 --- a/src/library/scala/xml/dtd/ExternalID.scala +++ b/src/library/scala/xml/dtd/ExternalID.scala @@ -73,3 +73,14 @@ case class PublicID(publicId: String, systemId: String) extends ExternalID { /** always empty */ def child = Nil } + +/** A marker used when a `DocType` contains no external id. + * + * @author Michael Bayne + */ +object NoExternalID extends ExternalID { + val publicId = null + val systemId = null + + override def toString = "" +} -- cgit v1.2.3 From 4a0472561db2e96a6698c49b3ec6a1bd588678e5 Mon Sep 17 00:00:00 2001 From: Eugene Vigdorchik Date: Tue, 4 Sep 2012 20:38:23 +0400 Subject: Cleanup makeShadowingTable, save some memory. Review by @VladUreche or @heathermiller. --- .../doc/model/ModelFactoryImplicitSupport.scala | 66 +++++++++------------- 1 file changed, 26 insertions(+), 40 deletions(-) (limited to 'src') diff --git a/src/compiler/scala/tools/nsc/doc/model/ModelFactoryImplicitSupport.scala b/src/compiler/scala/tools/nsc/doc/model/ModelFactoryImplicitSupport.scala index 89195020c4..4089e96f51 100644 --- a/src/compiler/scala/tools/nsc/doc/model/ModelFactoryImplicitSupport.scala +++ b/src/compiler/scala/tools/nsc/doc/model/ModelFactoryImplicitSupport.scala @@ -424,66 +424,52 @@ trait ModelFactoryImplicitSupport { /* ========================= HELPER METHODS ========================== */ /** * Computes the shadowing table for all the members in the implicit conversions - * @param mbrs All template's members, including usecases and full signature members + * @param members All template's members, including usecases and full signature members * @param convs All the conversions the template takes part in - * @param inTpl the ususal :) + * @param inTpl the usual :) */ - def makeShadowingTable(mbrs: List[MemberImpl], + def makeShadowingTable(members: List[MemberImpl], convs: List[ImplicitConversionImpl], inTpl: DocTemplateImpl): Map[MemberEntity, ImplicitMemberShadowing] = { assert(modelFinished) - var shadowingTable = Map[MemberEntity, ImplicitMemberShadowing]() + val shadowingTable = mutable.Map[MemberEntity, ImplicitMemberShadowing]() + val membersByName: Map[Name, List[MemberImpl]] = members.groupBy(_.sym.name) + val convsByMember = (Map.empty[MemberImpl, ImplicitConversionImpl] /: convs) { + case (map, conv) => map ++ conv.memberImpls.map (_ -> conv) + } for (conv <- convs) { - val otherConvs = convs.filterNot(_ == conv) + val otherConvMembers: Map[Name, List[MemberImpl]] = convs filterNot (_ == conv) flatMap (_.memberImpls) groupBy (_.sym.name) for (member <- conv.memberImpls) { - // for each member in our list val sym1 = member.sym val tpe1 = conv.toType.memberInfo(sym1) - // check if it's shadowed by a member in the original class - var shadowedBySyms: List[Symbol] = List() - for (mbr <- mbrs) { - val sym2 = mbr.sym - if (sym1.name == sym2.name) { - val shadowed = !settings.docImplicitsSoundShadowing.value || { - val tpe2 = inTpl.sym.info.memberInfo(sym2) - !isDistinguishableFrom(tpe1, tpe2) - } - if (shadowed) - shadowedBySyms ::= sym2 - } + // check if it's shadowed by a member in the original class. + val shadowed = membersByName.get(sym1.name).toList.flatten filter { other => + !settings.docImplicitsSoundShadowing.value || !isDistinguishableFrom(tpe1, inTpl.sym.info.memberInfo(other.sym)) } - val shadowedByMembers = mbrs.filter((mb: MemberImpl) => shadowedBySyms.contains(mb.sym)) - - // check if it's shadowed by another member - var ambiguousByMembers: List[MemberEntity] = List() - for (conv <- otherConvs) - for (member2 <- conv.memberImpls) { - val sym2 = member2.sym - if (sym1.name == sym2.name) { - val tpe2 = conv.toType.memberInfo(sym2) - // Ambiguity should be an equivalence relation - val ambiguated = !isDistinguishableFrom(tpe1, tpe2) || !isDistinguishableFrom(tpe2, tpe1) - if (ambiguated) - ambiguousByMembers ::= member2 - } - } + // check if it's shadowed by another conversion. + val ambiguous = otherConvMembers.get(sym1.name).toList.flatten filter { other => + val tpe2 = convsByMember(other).toType.memberInfo(other.sym) + !isDistinguishableFrom(tpe1, tpe2) || !isDistinguishableFrom(tpe2, tpe1) + } // we finally have the shadowing info - val shadowing = new ImplicitMemberShadowing { - def shadowingMembers: List[MemberEntity] = shadowedByMembers - def ambiguatingMembers: List[MemberEntity] = ambiguousByMembers - } + if (!shadowed.isEmpty || !ambiguous.isEmpty) { + val shadowing = new ImplicitMemberShadowing { + def shadowingMembers: List[MemberEntity] = shadowed + def ambiguatingMembers: List[MemberEntity] = ambiguous + } - shadowingTable += (member -> shadowing) + shadowingTable += (member -> shadowing) + } } } - shadowingTable + shadowingTable.toMap } @@ -608,4 +594,4 @@ trait ModelFactoryImplicitSupport { false } else true // the member structure is different foo(3, 5) vs foo(3)(5) } -} \ No newline at end of file +} -- cgit v1.2.3 From ccbc51d3a07cccccb996254a68ed870d6831c511 Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Tue, 4 Sep 2012 09:32:32 -0700 Subject: Test case for SI-6301. And misc changes/comments based on pull request feedback from @retronym. --- src/compiler/scala/tools/nsc/backend/icode/GenICode.scala | 6 ++++-- src/compiler/scala/tools/nsc/transform/Erasure.scala | 7 +++++++ test/files/pos/t6301.scala | 9 +++++++++ 3 files changed, 20 insertions(+), 2 deletions(-) create mode 100644 test/files/pos/t6301.scala (limited to 'src') diff --git a/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala b/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala index 59741e95f8..49f3781372 100644 --- a/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala +++ b/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala @@ -1234,8 +1234,10 @@ abstract class GenICode extends SubComponent { case NothingReference => ctx.bb.emit(THROW(ThrowableClass)) ; ctx.bb.enterIgnoreMode case NullReference => ctx.bb.emit(Seq(DROP(from), CONSTANT(Constant(null)))) case ThrowableReference if !(ThrowableClass.tpe <:< to.toType) => ctx.bb.emit(CHECK_CAST(to)) // downcast throwables - case BYTE | SHORT | CHAR | INT if to == LONG => coerce(INT, LONG) // widen subrange types - case _ => () + case _ => + // widen subrange types + if (from.isIntSizedType && to == LONG) + coerce(INT, LONG) } else to match { case UNIT => ctx.bb.emit(DROP(from), pos) // value discarding diff --git a/src/compiler/scala/tools/nsc/transform/Erasure.scala b/src/compiler/scala/tools/nsc/transform/Erasure.scala index c9de497dea..6c8417eb56 100644 --- a/src/compiler/scala/tools/nsc/transform/Erasure.scala +++ b/src/compiler/scala/tools/nsc/transform/Erasure.scala @@ -999,6 +999,7 @@ abstract class Erasure extends AddInterfaces } else if (fn.symbol == Any_isInstanceOf) { preEraseIsInstanceOf } else if (fn.symbol.owner.isRefinementClass && !fn.symbol.isOverridingSymbol) { + // !!! Another spot where we produce overloaded types (see test run/t6301) ApplyDynamic(qualifier, args) setSymbol fn.symbol setPos tree.pos } else if (fn.symbol.isMethodWithExtension) { Apply(gen.mkAttributedRef(extensionMethods.extensionMethod(fn.symbol)), qualifier :: args) @@ -1068,6 +1069,12 @@ abstract class Erasure extends AddInterfaces case s @ (ShortClass | ByteClass | CharClass) => numericConversion(qual, s) case BooleanClass => If(qual, LIT(true.##), LIT(false.##)) case _ => + // Since we are past typer, we need to avoid creating trees carrying + // overloaded types. This logic is custom (and technically incomplete, + // although serviceable) for def hash. What is really needed is for + // the overloading logic presently hidden away in a few different + // places to be properly exposed so we can just call "resolveOverload" + // after typer. Until then: val alts = ScalaRunTimeModule.info.member(nme.hash_).alternatives def alt1 = alts find (_.info.paramTypes.head =:= qual.tpe) def alt2 = ScalaRunTimeModule.info.member(nme.hash_) suchThat (_.info.paramTypes.head.typeSymbol == AnyClass) diff --git a/test/files/pos/t6301.scala b/test/files/pos/t6301.scala new file mode 100644 index 0000000000..fa81bbfa77 --- /dev/null +++ b/test/files/pos/t6301.scala @@ -0,0 +1,9 @@ +trait LoadedOver[@specialized(Int) A] { + def foo(x: Any): A + def foo(xs: String): A +} + +object Test { + def loaded: AnyRef with LoadedOver[Int] = sys.error("") + loaded.foo("") +} -- cgit v1.2.3 From da29b3f4d4a8dd30fa08f398bbb9f12b5e2a7e16 Mon Sep 17 00:00:00 2001 From: Michael Thorpe Date: Sun, 26 Aug 2012 00:34:02 +0100 Subject: Remove extraneous null check in RedBlackTree This changes the RedBlackTree foreach method to be be a simple wrapper around a slightly shorter function, without an unnecessary nullity check. --- .../scala/collection/immutable/RedBlackTree.scala | 22 ++++++++++++++-------- 1 file changed, 14 insertions(+), 8 deletions(-) (limited to 'src') diff --git a/src/library/scala/collection/immutable/RedBlackTree.scala b/src/library/scala/collection/immutable/RedBlackTree.scala index 4b573511d1..332f0c09cd 100644 --- a/src/library/scala/collection/immutable/RedBlackTree.scala +++ b/src/library/scala/collection/immutable/RedBlackTree.scala @@ -73,17 +73,23 @@ object RedBlackTree { result } - def foreach[A, B, U](tree: Tree[A, B], f: ((A, B)) => U): Unit = if (tree ne null) { - if (tree.left ne null) foreach(tree.left, f) + + def foreach[A,B,U](tree:Tree[A,B], f:((A,B)) => U):Unit = if (tree ne null) _foreach(tree,f) + + private[this] def _foreach[A, B, U](tree: Tree[A, B], f: ((A, B)) => U) { + if (tree.left ne null) _foreach(tree.left, f) f((tree.key, tree.value)) - if (tree.right ne null) foreach(tree.right, f) - } - def foreachKey[A, U](tree: Tree[A, _], f: A => U): Unit = if (tree ne null) { - if (tree.left ne null) foreachKey(tree.left, f) - f(tree.key) - if (tree.right ne null) foreachKey(tree.right, f) + if (tree.right ne null) _foreach(tree.right, f) } + + def foreachKey[A, U](tree:Tree[A,_], f: A => U):Unit = if (tree ne null) _foreachKey(tree,f) + private[this] def _foreachKey[A, U](tree: Tree[A, _], f: A => U) { + if (tree.left ne null) _foreachKey(tree.left, f) + f((tree.key)) + if (tree.right ne null) _foreachKey(tree.right, f) + } + def iterator[A, B](tree: Tree[A, B]): Iterator[(A, B)] = new EntriesIterator(tree) def keysIterator[A, _](tree: Tree[A, _]): Iterator[A] = new KeysIterator(tree) def valuesIterator[_, B](tree: Tree[_, B]): Iterator[B] = new ValuesIterator(tree) -- cgit v1.2.3 From cc561873185d25e71091a11f5cb1b3003b9ebca3 Mon Sep 17 00:00:00 2001 From: Vojin Jovanovic Date: Wed, 12 Sep 2012 14:48:06 +0200 Subject: SI-6315 fixed. --- src/actors-migration/scala/actors/MigrationSystem.scala | 5 +++-- src/actors-migration/scala/actors/Pattern.scala | 3 ++- src/actors-migration/scala/actors/Props.scala | 4 +++- src/actors-migration/scala/actors/StashingActor.scala | 4 +++- src/actors-migration/scala/actors/Timeout.scala | 2 +- test/files/jvm/actmig-PinS_1.scala | 1 + test/files/jvm/actmig-PinS_2.scala | 3 ++- test/files/jvm/actmig-PinS_3.scala | 3 ++- test/files/jvm/actmig-loop-react.scala | 5 +++-- test/files/jvm/actmig-public-methods_1.scala | 3 ++- test/files/jvm/actmig-react-receive.scala | 5 +++-- test/files/jvm/actmig-react-within.scala | 5 +++-- test/files/jvm/actmig-receive.scala | 5 +++-- 13 files changed, 31 insertions(+), 17 deletions(-) (limited to 'src') diff --git a/src/actors-migration/scala/actors/MigrationSystem.scala b/src/actors-migration/scala/actors/MigrationSystem.scala index ffc93d9c6f..3dcb38e634 100644 --- a/src/actors-migration/scala/actors/MigrationSystem.scala +++ b/src/actors-migration/scala/actors/MigrationSystem.scala @@ -1,10 +1,11 @@ -package scala.actors +package scala.actors.migration +import scala.actors._ import scala.collection._ object MigrationSystem { - private[actors] val contextStack = new ThreadLocal[immutable.Stack[Boolean]] { + private[migration] val contextStack = new ThreadLocal[immutable.Stack[Boolean]] { override def initialValue() = immutable.Stack[Boolean]() } diff --git a/src/actors-migration/scala/actors/Pattern.scala b/src/actors-migration/scala/actors/Pattern.scala index 26e9d1bb64..28fd128141 100644 --- a/src/actors-migration/scala/actors/Pattern.scala +++ b/src/actors-migration/scala/actors/Pattern.scala @@ -1,5 +1,6 @@ -package scala.actors +package scala.actors.migration +import scala.actors._ import scala.concurrent.util.Duration import language.implicitConversions diff --git a/src/actors-migration/scala/actors/Props.scala b/src/actors-migration/scala/actors/Props.scala index 891e23213a..c12384ea55 100644 --- a/src/actors-migration/scala/actors/Props.scala +++ b/src/actors-migration/scala/actors/Props.scala @@ -1,4 +1,6 @@ -package scala.actors +package scala.actors.migration + +import scala.actors._ /** * ActorRef configuration object. It represents the minimal subset of Akka Props class. diff --git a/src/actors-migration/scala/actors/StashingActor.scala b/src/actors-migration/scala/actors/StashingActor.scala index 8f96e1b002..47f73c252a 100644 --- a/src/actors-migration/scala/actors/StashingActor.scala +++ b/src/actors-migration/scala/actors/StashingActor.scala @@ -1,5 +1,7 @@ -package scala.actors +package scala.actors.migration +import scala.actors._ +import scala.actors.Actor._ import scala.collection._ import scala.concurrent.util.Duration import java.util.concurrent.TimeUnit diff --git a/src/actors-migration/scala/actors/Timeout.scala b/src/actors-migration/scala/actors/Timeout.scala index 7e400ab140..78d15e5704 100644 --- a/src/actors-migration/scala/actors/Timeout.scala +++ b/src/actors-migration/scala/actors/Timeout.scala @@ -6,7 +6,7 @@ ** |/ ** \* */ -package scala.actors +package scala.actors.migration import scala.concurrent.util.Duration import java.util.concurrent.TimeUnit diff --git a/test/files/jvm/actmig-PinS_1.scala b/test/files/jvm/actmig-PinS_1.scala index 1fb50567b9..7ffff2d889 100644 --- a/test/files/jvm/actmig-PinS_1.scala +++ b/test/files/jvm/actmig-PinS_1.scala @@ -1,4 +1,5 @@ import scala.actors._ +import scala.actors.migration._ import scala.concurrent.util.duration._ import scala.concurrent.{ Promise, Await } diff --git a/test/files/jvm/actmig-PinS_2.scala b/test/files/jvm/actmig-PinS_2.scala index 46277efd43..dd0e6e5f0e 100644 --- a/test/files/jvm/actmig-PinS_2.scala +++ b/test/files/jvm/actmig-PinS_2.scala @@ -1,4 +1,5 @@ -import scala.actors.{ MigrationSystem, StashingActor, ActorRef, Props, Exit } +import scala.actors._ +import scala.actors.migration._ import scala.concurrent.util.duration._ import scala.concurrent.{ Promise, Await } diff --git a/test/files/jvm/actmig-PinS_3.scala b/test/files/jvm/actmig-PinS_3.scala index 321e99b1c2..9261046770 100644 --- a/test/files/jvm/actmig-PinS_3.scala +++ b/test/files/jvm/actmig-PinS_3.scala @@ -1,4 +1,5 @@ -import scala.actors.{ MigrationSystem, StashingActor, ActorRef, Terminated, Props } +import scala.actors._ +import scala.actors.migration._ import scala.concurrent.util.duration._ import scala.concurrent.{ Promise, Await } diff --git a/test/files/jvm/actmig-loop-react.scala b/test/files/jvm/actmig-loop-react.scala index d0cba656f8..828ebf6546 100644 --- a/test/files/jvm/actmig-loop-react.scala +++ b/test/files/jvm/actmig-loop-react.scala @@ -1,6 +1,7 @@ -import scala.actors.MigrationSystem._ +import scala.actors.migration.MigrationSystem._ import scala.actors.Actor._ -import scala.actors.{ Actor, StashingActor, ActorRef, Props, MigrationSystem, PoisonPill } +import scala.actors._ +import scala.actors.migration._ import java.util.concurrent.{ TimeUnit, CountDownLatch } import scala.collection.mutable.ArrayBuffer import scala.concurrent.util.duration._ diff --git a/test/files/jvm/actmig-public-methods_1.scala b/test/files/jvm/actmig-public-methods_1.scala index 7e5bc24210..59bdb500a5 100644 --- a/test/files/jvm/actmig-public-methods_1.scala +++ b/test/files/jvm/actmig-public-methods_1.scala @@ -1,10 +1,11 @@ import scala.collection.mutable.ArrayBuffer import scala.actors.Actor._ import scala.actors._ +import scala.actors.migration._ import scala.util._ import java.util.concurrent.{ TimeUnit, CountDownLatch } import scala.concurrent.util.Duration -import scala.actors.pattern._ +import scala.actors.migration.pattern._ object Test { val NUMBER_OF_TESTS = 6 diff --git a/test/files/jvm/actmig-react-receive.scala b/test/files/jvm/actmig-react-receive.scala index 8464a2af79..4ffdc722fd 100644 --- a/test/files/jvm/actmig-react-receive.scala +++ b/test/files/jvm/actmig-react-receive.scala @@ -1,6 +1,7 @@ -import scala.actors.MigrationSystem._ +import scala.actors.migration.MigrationSystem._ import scala.actors.Actor._ -import scala.actors.{ Actor, StashingActor, ActorRef, Props, MigrationSystem, PoisonPill } +import scala.actors._ +import scala.actors.migration._ import java.util.concurrent.{ TimeUnit, CountDownLatch } import scala.collection.mutable.ArrayBuffer import scala.concurrent.util.duration._ diff --git a/test/files/jvm/actmig-react-within.scala b/test/files/jvm/actmig-react-within.scala index f0326f885d..5c51508e5d 100644 --- a/test/files/jvm/actmig-react-within.scala +++ b/test/files/jvm/actmig-react-within.scala @@ -1,6 +1,7 @@ -import scala.actors.MigrationSystem._ +import scala.actors.migration.MigrationSystem._ import scala.actors.Actor._ -import scala.actors.{ Actor, StashingActor, ActorRef, Props, MigrationSystem, PoisonPill } +import scala.actors._ +import scala.actors.migration._ import java.util.concurrent.{ TimeUnit, CountDownLatch } import scala.collection.mutable.ArrayBuffer import scala.concurrent.util.duration._ diff --git a/test/files/jvm/actmig-receive.scala b/test/files/jvm/actmig-receive.scala index 30730e9d87..bd45d6e4ca 100644 --- a/test/files/jvm/actmig-receive.scala +++ b/test/files/jvm/actmig-receive.scala @@ -1,6 +1,7 @@ -import scala.actors.MigrationSystem._ +import scala.actors.migration.MigrationSystem._ import scala.actors.Actor._ -import scala.actors.{ Actor, StashingActor, ActorRef, Props, MigrationSystem, PoisonPill } +import scala.actors._ +import scala.actors.migration._ import java.util.concurrent.{ TimeUnit, CountDownLatch } import scala.collection.mutable.ArrayBuffer import scala.concurrent.util.duration._ -- cgit v1.2.3 From aa90f538dbab5036867b80f22490c1841006fa08 Mon Sep 17 00:00:00 2001 From: Stuart Golodetz Date: Thu, 13 Sep 2012 22:10:38 +0000 Subject: Make the scalac Ant task recognise -Yrangepos The scalac Ant task was not setting up the Scala compiler to produce range positions when specifying the -Yrangepos flag in its addparams property. --- src/compiler/scala/tools/ant/Scalac.scala | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) (limited to 'src') diff --git a/src/compiler/scala/tools/ant/Scalac.scala b/src/compiler/scala/tools/ant/Scalac.scala index c6809fb48e..bcb7d494b6 100644 --- a/src/compiler/scala/tools/ant/Scalac.scala +++ b/src/compiler/scala/tools/ant/Scalac.scala @@ -19,6 +19,7 @@ import org.apache.tools.ant.util.facade.{FacadeTaskHelper, ImplementationSpecificArgument} import scala.tools.nsc.{Global, Settings, CompilerCommand} +import scala.tools.nsc.interactive.RangePositions import scala.tools.nsc.io.{Path => SPath} import scala.tools.nsc.reporters.{Reporter, ConsoleReporter} @@ -518,7 +519,10 @@ class Scalac extends ScalaMatchingTask with ScalacShared { new Settings(error) protected def newGlobal(settings: Settings, reporter: Reporter) = - new Global(settings, reporter) + if (settings.Yrangepos.value) + new Global(settings, reporter) with RangePositions + else + new Global(settings, reporter) /*============================================================================*\ ** The big execute method ** -- cgit v1.2.3 From 87f1f997fdaea4890e5890f64eddc7b5690f1018 Mon Sep 17 00:00:00 2001 From: Eugene Vigdorchik Date: Wed, 19 Sep 2012 13:31:07 +0400 Subject: Scaladoc cleanup. Review by @VladUreche. --- .../scala/tools/nsc/doc/model/ModelFactory.scala | 47 +++++++++------------- test/scaladoc/scalacheck/HtmlFactoryTest.scala | 2 +- 2 files changed, 20 insertions(+), 29 deletions(-) (limited to 'src') diff --git a/src/compiler/scala/tools/nsc/doc/model/ModelFactory.scala b/src/compiler/scala/tools/nsc/doc/model/ModelFactory.scala index 2b9638076c..16c33e6d96 100644 --- a/src/compiler/scala/tools/nsc/doc/model/ModelFactory.scala +++ b/src/compiler/scala/tools/nsc/doc/model/ModelFactory.scala @@ -124,15 +124,11 @@ class ModelFactory(val global: Global, val settings: doc.Settings) { def group = comment flatMap (_.group) getOrElse "No Group" override def inTemplate = inTpl override def toRoot: List[MemberImpl] = this :: inTpl.toRoot - def inDefinitionTemplates = this match { - case mb: NonTemplateMemberEntity if (mb.useCaseOf.isDefined) => - mb.useCaseOf.get.inDefinitionTemplates - case _ => - if (inTpl == null) - List(makeRootPackage) - else - makeTemplate(sym.owner)::(sym.allOverriddenSymbols map { inhSym => makeTemplate(inhSym.owner) }) - } + def inDefinitionTemplates = + if (inTpl == null) + List(makeRootPackage) + else + makeTemplate(sym.owner)::(sym.allOverriddenSymbols map { inhSym => makeTemplate(inhSym.owner) }) def visibility = { if (sym.isPrivateLocal) PrivateInInstance() else if (sym.isProtectedLocal) ProtectedInInstance() @@ -525,29 +521,25 @@ class ModelFactory(val global: Global, val settings: doc.Settings) { extends MemberImpl(sym, inTpl) with NonTemplateMemberEntity { override lazy val comment = { val inRealTpl = - /* Variable precendence order for implicitly added members: Take the variable defifinitions from ... - * 1. the target of the implicit conversion - * 2. the definition template (owner) - * 3. the current template - */ - if (conversion.isDefined) findTemplateMaybe(conversion.get.toType.typeSymbol) match { - case Some(d) if d != makeRootPackage => d //in case of NoSymbol, it will give us the root package - case _ => findTemplateMaybe(sym.owner) match { - case Some(d) if d != makeRootPackage => d //in case of NoSymbol, it will give us the root package - case _ => inTpl - } - } else inTpl - if (inRealTpl != null) thisFactory.comment(sym, None, inRealTpl) else None + conversion.fold(Option(inTpl)) { conv => + /* Variable precendence order for implicitly added members: Take the variable defifinitions from ... + * 1. the target of the implicit conversion + * 2. the definition template (owner) + * 3. the current template + */ + findTemplateMaybe(conv.toType.typeSymbol) filterNot (_ == makeRootPackage) orElse ( + findTemplateMaybe(sym.owner) filterNot (_ == makeRootPackage) orElse Option(inTpl) + ) + } + inRealTpl flatMap (thisFactory.comment(sym, None, _)) } + override def inDefinitionTemplates = useCaseOf.fold(super.inDefinitionTemplates)(_.inDefinitionTemplates) + override def qualifiedName = optimize(inTemplate.qualifiedName + "#" + name) lazy val definitionName = { - // this contrived name is here just to satisfy some older tests -- if you decide to remove it, be my guest, and - // also remove property("package object") from test/scaladoc/scalacheck/HtmlFactoryTest.scala so you don't break - // the test suite... - val packageObject = if (inPackageObject) ".package" else "" val qualifiedName = conversion.fold(inDefinitionTemplates.head.qualifiedName)(_.conversionQualifiedName) - optimize(qualifiedName + packageObject + "#" + name) + optimize(qualifiedName + "#" + name) } def isBridge = sym.isBridge def isUseCase = useCaseOf.isDefined @@ -781,7 +773,6 @@ class ModelFactory(val global: Global, val settings: doc.Settings) { } } - /** Get the root package */ def makeRootPackage: PackageImpl = docTemplatesCache(RootPackage).asInstanceOf[PackageImpl] // TODO: Should be able to override the type diff --git a/test/scaladoc/scalacheck/HtmlFactoryTest.scala b/test/scaladoc/scalacheck/HtmlFactoryTest.scala index 13eacf79a5..d7b5e48288 100644 --- a/test/scaladoc/scalacheck/HtmlFactoryTest.scala +++ b/test/scaladoc/scalacheck/HtmlFactoryTest.scala @@ -680,7 +680,7 @@ object Test extends Properties("HtmlFactory") { property("package object") = files("com/example/p1/package.html") match { case node: scala.xml.Node => - node.toString contains "com.example.p1.package#packageObjectMethod" + node.toString contains "com.example.p1#packageObjectMethod" case _ => false } -- cgit v1.2.3 From 676d895b7827f988b95a23c5bf7d40719fa438fe Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Thu, 20 Sep 2012 22:02:18 +0200 Subject: SI-6381 Honour -Yrangepos in the REPL --- src/compiler/scala/tools/nsc/interpreter/IMain.scala | 5 ++++- test/files/run/t6381.check | 17 +++++++++++++++++ test/files/run/t6381.scala | 13 +++++++++++++ 3 files changed, 34 insertions(+), 1 deletion(-) create mode 100644 test/files/run/t6381.check create mode 100644 test/files/run/t6381.scala (limited to 'src') diff --git a/src/compiler/scala/tools/nsc/interpreter/IMain.scala b/src/compiler/scala/tools/nsc/interpreter/IMain.scala index 790a1ac8d4..9a22c15a12 100644 --- a/src/compiler/scala/tools/nsc/interpreter/IMain.scala +++ b/src/compiler/scala/tools/nsc/interpreter/IMain.scala @@ -262,7 +262,10 @@ class IMain(initialSettings: Settings, protected val out: JPrintWriter) extends protected def newCompiler(settings: Settings, reporter: Reporter): ReplGlobal = { settings.outputDirs setSingleOutput virtualDirectory settings.exposeEmptyPackage.value = true - new Global(settings, reporter) with ReplGlobal + if (settings.Yrangepos.value) + new Global(settings, reporter) with ReplGlobal with interactive.RangePositions + else + new Global(settings, reporter) with ReplGlobal } /** Parent classloader. Overridable. */ diff --git a/test/files/run/t6381.check b/test/files/run/t6381.check new file mode 100644 index 0000000000..b51cfd0398 --- /dev/null +++ b/test/files/run/t6381.check @@ -0,0 +1,17 @@ +Type in expressions to have them evaluated. +Type :help for more information. + +scala> import language.experimental.macros +import language.experimental.macros + +scala> def pos_impl(c: reflect.macros.Context): c.Expr[String] = + c.literal(c.enclosingPosition.getClass.toString) +pos_impl: (c: scala.reflect.macros.Context)c.Expr[String] + +scala> def pos = macro pos_impl +pos: String + +scala> pos +res0: String = class scala.reflect.internal.util.RangePosition + +scala> diff --git a/test/files/run/t6381.scala b/test/files/run/t6381.scala new file mode 100644 index 0000000000..859ec3cb30 --- /dev/null +++ b/test/files/run/t6381.scala @@ -0,0 +1,13 @@ +import scala.tools.partest.ReplTest + +object Test extends ReplTest { + def code = """ + |import language.experimental.macros + |def pos_impl(c: reflect.macros.Context): c.Expr[String] = + | c.literal(c.enclosingPosition.getClass.toString) + |def pos = macro pos_impl + |pos + |""".stripMargin.trim + + override def extraSettings: String = "-Yrangepos" +} -- cgit v1.2.3 From ce1bbfe5c6a06e7de69210fbedd5e4cae270510a Mon Sep 17 00:00:00 2001 From: Som Snytt Date: Wed, 19 Sep 2012 01:01:15 -0700 Subject: Regex.unapplySeq should not take Any (Fixes SI-6406) This deprecates unapplySeq(Any) and adds overloaded unapplySeq(CharSequence) and unapplySeq(Match), with the putative advantage that you can't try to extract the unextractable. Regex is massaged so that the underlying Pattern is primary, rather than the String-valued expression. Regex and its unanchored companion (I almost wrote unmoored) share a Pattern object, so that unapplySeq(Match) can easily test whether the Match was generated by this Regex; in that case, the match result is used immediately, instead of reapplying the regex to the matched string. The documentation is massaged to reflect unanchored and also to align with the underlying terminology, e.g., "subgroup" really just means "group." --- src/library/scala/util/matching/Regex.scala | 98 +++++++++++++++++++---------- test/files/neg/t6406-regextract.check | 6 ++ test/files/neg/t6406-regextract.flags | 1 + test/files/neg/t6406-regextract.scala | 5 ++ test/files/run/t6406-regextract.check | 4 ++ test/files/run/t6406-regextract.scala | 30 +++++++++ 6 files changed, 110 insertions(+), 34 deletions(-) create mode 100644 test/files/neg/t6406-regextract.check create mode 100644 test/files/neg/t6406-regextract.flags create mode 100644 test/files/neg/t6406-regextract.scala create mode 100644 test/files/run/t6406-regextract.check create mode 100644 test/files/run/t6406-regextract.scala (limited to 'src') diff --git a/src/library/scala/util/matching/Regex.scala b/src/library/scala/util/matching/Regex.scala index 3655a0a019..63d049208a 100644 --- a/src/library/scala/util/matching/Regex.scala +++ b/src/library/scala/util/matching/Regex.scala @@ -131,7 +131,7 @@ import java.util.regex.{ Pattern, Matcher } * @author Martin Odersky * @version 1.1, 29/01/2008 * - * @param regex A string representing a regular expression + * @param pattern The compiled pattern * @param groupNames A mapping from names to indices in capture groups * * @define replacementString @@ -144,41 +144,67 @@ import java.util.regex.{ Pattern, Matcher } * to automatically escape these characters. */ @SerialVersionUID(-2094783597747625537L) -class Regex(regex: String, groupNames: String*) extends Serializable { +class Regex private[matching](val pattern: Pattern, groupNames: String*) extends Serializable { outer => import Regex._ - /** The compiled pattern */ - val pattern = Pattern.compile(regex) + /** + * @param regex A string representing a regular expression + * @param groupNames A mapping from names to indices in capture groups + */ + def this(regex: String, groupNames: String*) = this(Pattern.compile(regex), groupNames: _*) - /** Tries to match target (whole match) and returns the matching subgroups. - * if the pattern has no subgroups, then it returns an empty list on a - * successful match. - * - * Note, however, that if some subgroup has not been matched, a `null` will - * be returned for that subgroup. + /** Tries to match a [[java.lang.CharSequence]]. + * If the match succeeds, the result is a list of the matching + * groups (or a `null` element if a group did not match any input). + * If the pattern specifies no groups, then the result will be an empty list + * on a successful match. * + * This method attempts to match the entire input by default; to find the next + * matching subsequence, use an unanchored Regex. + * For example: * * {{{ * val p1 = "ab*c".r - * val p2 = "a(b*)c".r - * * val p1Matches = "abbbc" match { * case p1() => true * case _ => false * } - * + * val p2 = "a(b*)c".r * val numberOfB = "abbbc" match { * case p2(b) => Some(b.length) * case _ => None * } + * val p3 = "b*".r.unanchored + * val p3Matches = "abbbc" match { + * case p3() => true + * case _ => false + * } * }}} * - * @param target The string to match + * @param s The string to match * @return The matches */ + def unapplySeq(s: CharSequence): Option[Seq[String]] = { + val m = pattern matcher s + if (runMatcher(m)) Some(1 to m.groupCount map m.group) + else None + } + + /** Tries to match on a [[scala.util.matching.Regex.Match]]. + * A previously failed match results in None. + * If a successful match was made against the current pattern, then that result is used. + * Otherwise, this Regex is applied to the previously matched input, + * and the result of that match is used. + */ + def unapplySeq(m: Match): Option[Seq[String]] = + if (m.matched == null) None + else if (m.matcher.pattern == this.pattern) Some(1 to m.groupCount map m.group) + else unapplySeq(m.matched) + + @deprecated("Extracting a match result from anything but a CharSequence or Match is deprecated", "2.10.0") def unapplySeq(target: Any): Option[List[String]] = target match { case s: CharSequence => val m = pattern matcher s @@ -187,6 +213,8 @@ class Regex(regex: String, groupNames: String*) extends Serializable { case m: Match => unapplySeq(m.matched) case _ => None } + + // @see UnanchoredRegex protected def runMatcher(m: Matcher) = m.matches() /** Return all matches of this regexp in given character sequence as a [[scala.util.matching.Regex.MatchIterator]], @@ -200,7 +228,7 @@ class Regex(regex: String, groupNames: String*) extends Serializable { * @return A [[scala.util.matching.Regex.MatchIterator]] of all matches. * @example {{{for (words <- """\w+""".r findAllIn "A simple example.") yield words}}} */ - def findAllIn(source: java.lang.CharSequence) = new Regex.MatchIterator(source, this, groupNames) + def findAllIn(source: CharSequence) = new Regex.MatchIterator(source, this, groupNames) /** Return all matches of this regexp in given character sequence as a @@ -210,7 +238,7 @@ class Regex(regex: String, groupNames: String*) extends Serializable { * @return A [[scala.collection.Iterator]] of [[scala.util.matching.Regex.Match]] for all matches. * @example {{{for (words <- """\w+""".r findAllMatchIn "A simple example.") yield words.start}}} */ - def findAllMatchIn(source: java.lang.CharSequence): Iterator[Match] = { + def findAllMatchIn(source: CharSequence): Iterator[Match] = { val matchIterator = findAllIn(source) new Iterator[Match] { def hasNext = matchIterator.hasNext @@ -228,7 +256,7 @@ class Regex(regex: String, groupNames: String*) extends Serializable { * @return An [[scala.Option]] of the first matching string in the text. * @example {{{"""\w+""".r findFirstIn "A simple example." foreach println // prints "A"}}} */ - def findFirstIn(source: java.lang.CharSequence): Option[String] = { + def findFirstIn(source: CharSequence): Option[String] = { val m = pattern.matcher(source) if (m.find) Some(m.group) else None } @@ -245,7 +273,7 @@ class Regex(regex: String, groupNames: String*) extends Serializable { * @return A [[scala.Option]] of [[scala.util.matching.Regex.Match]] of the first matching string in the text. * @example {{{("""[a-z]""".r findFirstMatchIn "A simple example.") map (_.start) // returns Some(2), the index of the first match in the text}}} */ - def findFirstMatchIn(source: java.lang.CharSequence): Option[Match] = { + def findFirstMatchIn(source: CharSequence): Option[Match] = { val m = pattern.matcher(source) if (m.find) Some(new Match(source, m, groupNames)) else None } @@ -262,7 +290,7 @@ class Regex(regex: String, groupNames: String*) extends Serializable { * @return A [[scala.Option]] of the matched prefix. * @example {{{"""[a-z]""".r findPrefixOf "A simple example." // returns None, since the text does not begin with a lowercase letter}}} */ - def findPrefixOf(source: java.lang.CharSequence): Option[String] = { + def findPrefixOf(source: CharSequence): Option[String] = { val m = pattern.matcher(source) if (m.lookingAt) Some(m.group) else None } @@ -279,7 +307,7 @@ class Regex(regex: String, groupNames: String*) extends Serializable { * @return A [[scala.Option]] of the [[scala.util.matching.Regex.Match]] of the matched string. * @example {{{"""\w+""".r findPrefixMatchOf "A simple example." map (_.after) // returns Some(" simple example.")}}} */ - def findPrefixMatchOf(source: java.lang.CharSequence): Option[Match] = { + def findPrefixMatchOf(source: CharSequence): Option[Match] = { val m = pattern.matcher(source) if (m.lookingAt) Some(new Match(source, m, groupNames)) else None } @@ -293,7 +321,7 @@ class Regex(regex: String, groupNames: String*) extends Serializable { * @return The resulting string * @example {{{"""\d+""".r replaceAllIn ("July 15", "") // returns "July "}}} */ - def replaceAllIn(target: java.lang.CharSequence, replacement: String): String = { + def replaceAllIn(target: CharSequence, replacement: String): String = { val m = pattern.matcher(target) m.replaceAll(replacement) } @@ -316,7 +344,7 @@ class Regex(regex: String, groupNames: String*) extends Serializable { * @param replacer The function which maps a match to another string. * @return The target string after replacements. */ - def replaceAllIn(target: java.lang.CharSequence, replacer: Match => String): String = { + def replaceAllIn(target: CharSequence, replacer: Match => String): String = { val it = new Regex.MatchIterator(target, this, groupNames).replacementData it foreach (md => it replace replacer(md)) it.replaced @@ -343,7 +371,7 @@ class Regex(regex: String, groupNames: String*) extends Serializable { * @param replacer The function which optionally maps a match to another string. * @return The target string after replacements. */ - def replaceSomeIn(target: java.lang.CharSequence, replacer: Match => Option[String]): String = { + def replaceSomeIn(target: CharSequence, replacer: Match => Option[String]): String = { val it = new Regex.MatchIterator(target, this, groupNames).replacementData for (matchdata <- it ; replacement <- replacer(matchdata)) it replace replacement @@ -359,7 +387,7 @@ class Regex(regex: String, groupNames: String*) extends Serializable { * @param replacement The string that will replace the match * @return The resulting string */ - def replaceFirstIn(target: java.lang.CharSequence, replacement: String): String = { + def replaceFirstIn(target: CharSequence, replacement: String): String = { val m = pattern.matcher(target) m.replaceFirst(replacement) } @@ -370,7 +398,7 @@ class Regex(regex: String, groupNames: String*) extends Serializable { * @return The array of strings computed by splitting the * input around matches of this regexp */ - def split(toSplit: java.lang.CharSequence): Array[String] = + def split(toSplit: CharSequence): Array[String] = pattern.split(toSplit) /** Create a new Regex with the same pattern, but no requirement that @@ -390,9 +418,11 @@ class Regex(regex: String, groupNames: String*) extends Serializable { * * @return The new unanchored regex */ - def unanchored: UnanchoredRegex = new Regex(regex, groupNames: _*) with UnanchoredRegex { override def anchored = outer } + def unanchored: UnanchoredRegex = new Regex(pattern, groupNames: _*) with UnanchoredRegex { override def anchored = outer } def anchored: Regex = this + def regex: String = pattern.pattern + /** The string defining the regular expression */ override def toString = regex } @@ -421,7 +451,7 @@ object Regex { trait MatchData { /** The source from where the match originated */ - val source: java.lang.CharSequence + val source: CharSequence /** The names of the groups, or some empty sequence if one defined */ val groupNames: Seq[String] @@ -459,25 +489,25 @@ object Regex { /** The char sequence before first character of match, * or `null` if nothing was matched */ - def before: java.lang.CharSequence = + def before: CharSequence = if (start >= 0) source.subSequence(0, start) else null /** The char sequence before first character of match in group `i`, * or `null` if nothing was matched for that group */ - def before(i: Int): java.lang.CharSequence = + def before(i: Int): CharSequence = if (start(i) >= 0) source.subSequence(0, start(i)) else null /** Returns char sequence after last character of match, * or `null` if nothing was matched */ - def after: java.lang.CharSequence = + def after: CharSequence = if (end >= 0) source.subSequence(end, source.length) else null /** The char sequence after last character of match in group `i`, * or `null` if nothing was matched for that group */ - def after(i: Int): java.lang.CharSequence = + def after(i: Int): CharSequence = if (end(i) >= 0) source.subSequence(end(i), source.length) else null @@ -501,8 +531,8 @@ object Regex { /** Provides information about a succesful match. */ - class Match(val source: java.lang.CharSequence, - matcher: Matcher, + class Match(val source: CharSequence, + private[matching] val matcher: Matcher, val groupNames: Seq[String]) extends MatchData { /** The index of the first matched character */ @@ -563,7 +593,7 @@ object Regex { /** A class to step through a sequence of regex matches */ - class MatchIterator(val source: java.lang.CharSequence, val regex: Regex, val groupNames: Seq[String]) + class MatchIterator(val source: CharSequence, val regex: Regex, val groupNames: Seq[String]) extends AbstractIterator[String] with Iterator[String] with MatchData { self => protected[Regex] val matcher = regex.pattern.matcher(source) diff --git a/test/files/neg/t6406-regextract.check b/test/files/neg/t6406-regextract.check new file mode 100644 index 0000000000..19425a68b0 --- /dev/null +++ b/test/files/neg/t6406-regextract.check @@ -0,0 +1,6 @@ +t6406-regextract.scala:4: warning: method unapplySeq in class Regex is deprecated: Extracting a match result from anything but a CharSequence or Match is deprecated + List(1) collect { case r(i) => i } + ^ +error: No warnings can be incurred under -Xfatal-warnings. +one warning found +one error found diff --git a/test/files/neg/t6406-regextract.flags b/test/files/neg/t6406-regextract.flags new file mode 100644 index 0000000000..85d8eb2ba2 --- /dev/null +++ b/test/files/neg/t6406-regextract.flags @@ -0,0 +1 @@ +-Xfatal-warnings diff --git a/test/files/neg/t6406-regextract.scala b/test/files/neg/t6406-regextract.scala new file mode 100644 index 0000000000..0f5dad908d --- /dev/null +++ b/test/files/neg/t6406-regextract.scala @@ -0,0 +1,5 @@ + +object Test extends App { + val r = "(\\d+)".r + List(1) collect { case r(i) => i } +} diff --git a/test/files/run/t6406-regextract.check b/test/files/run/t6406-regextract.check new file mode 100644 index 0000000000..88c5a52eb3 --- /dev/null +++ b/test/files/run/t6406-regextract.check @@ -0,0 +1,4 @@ +List(1, 3) +List(1, 3) +List(1, 3) +Some(2011) Some(2011) diff --git a/test/files/run/t6406-regextract.scala b/test/files/run/t6406-regextract.scala new file mode 100644 index 0000000000..83679a5167 --- /dev/null +++ b/test/files/run/t6406-regextract.scala @@ -0,0 +1,30 @@ + +object Test extends App { + import util.matching._ + import Regex._ + + val r = "(\\d+)".r + val q = """(\d)""".r + val ns = List("1,2","x","3,4") + val u = r.unanchored + + val is = ns collect { case u(x) => x } map { case r(x) => x } + println(is) + // Match from same pattern + val js = (ns map { u findFirstMatchIn _ }).flatten map { case r(x) => x } + println(js) + // Match not from same pattern + val ks = (ns map { q findFirstMatchIn _ }).flatten map { case r(x) => x } + println(ks) + + val t = "Last modified 2011-07-15" + val p1 = """(\d\d\d\d)-(\d\d)-(\d\d)""".r + val y1: Option[String] = for { + p1(year, month, day) <- p1 findFirstIn t + } yield year + val y2: Option[String] = for { + p1(year, month, day) <- p1 findFirstMatchIn t + } yield year + println(s"$y1 $y2") + +} -- cgit v1.2.3 From b45a91fe228be063b9f9192cc135459f32d82ae0 Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Wed, 26 Sep 2012 12:11:48 -0700 Subject: Expanded an error message from the backend. --- src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala | 15 +++++++++++---- 1 file changed, 11 insertions(+), 4 deletions(-) (limited to 'src') diff --git a/src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala b/src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala index c3fca13374..18e9ae620e 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala @@ -604,11 +604,18 @@ abstract class GenASM extends SubComponent with BytecodeWriters { val internalName = cachedJN.toString() val trackedSym = jsymbol(sym) reverseJavaName.get(internalName) match { - case None => + case Some(oldsym) if oldsym.exists && trackedSym.exists => + assert( + // In contrast, neither NothingClass nor NullClass show up bytecode-level. + (oldsym == trackedSym) || (oldsym == RuntimeNothingClass) || (oldsym == RuntimeNullClass), + s"""|Different class symbols have the same bytecode-level internal name: + | name: $internalName + | oldsym: ${oldsym.fullNameString} + | tracked: ${trackedSym.fullNameString} + """.stripMargin + ) + case _ => reverseJavaName.put(internalName, trackedSym) - case Some(oldsym) => - assert((oldsym == trackedSym) || (oldsym == RuntimeNothingClass) || (oldsym == RuntimeNullClass), // In contrast, neither NothingClass nor NullClass show up bytecode-level. - "how can getCommonSuperclass() do its job if different class symbols get the same bytecode-level internal name: " + internalName) } } -- cgit v1.2.3 From 83b5d4c0c9af462fc562c571f17dfcd00f47255d Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Wed, 26 Sep 2012 12:13:03 -0700 Subject: Comments explaining some brokenness in Namers. --- src/compiler/scala/tools/nsc/typechecker/Namers.scala | 16 ++++++++++++++++ 1 file changed, 16 insertions(+) (limited to 'src') diff --git a/src/compiler/scala/tools/nsc/typechecker/Namers.scala b/src/compiler/scala/tools/nsc/typechecker/Namers.scala index df8eb9c6b9..55ec8bead4 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Namers.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Namers.scala @@ -161,6 +161,9 @@ trait Namers extends MethodSynthesis { else innerNamer } + // FIXME - this logic needs to be thoroughly explained + // and justified. I know it's wrong with repect to package + // objects, but I think it's also wrong in other ways. protected def conflict(newS: Symbol, oldS: Symbol) = ( ( !oldS.isSourceMethod || nme.isSetterName(newS.name) @@ -188,6 +191,19 @@ trait Namers extends MethodSynthesis { /** Enter symbol into given scope and return symbol itself */ def enterInScope(sym: Symbol, scope: Scope): Symbol = { + // FIXME - this is broken in a number of ways. + // + // 1) If "sym" allows overloading, that is not itself sufficient to skip + // the check, because "prev.sym" also must allow overloading. + // + // 2) There is nothing which reconciles a package's scope with + // the package object's scope. This is the source of many bugs + // with e.g. defining a case class in a package object. When + // compiling against classes, the class symbol is created in the + // package and in the package object, and the conflict is undetected. + // There is also a non-deterministic outcome for situations like + // an object with the same name as a method in the package object. + // allow for overloaded methods if (!allowsOverload(sym)) { val prev = scope.lookupEntry(sym.name) -- cgit v1.2.3 From e6f10b07d44f0ddde26246b4a41527a84eede81c Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Wed, 26 Sep 2012 12:14:13 -0700 Subject: Fixed SI-5604, selections on package objects. mkAttributedSelect, which creates a Select tree based on a symbol, has been a major source of package object bugs, because it has not been accurately identifying selections on package objects. When selecting foo.bar, if foo turns out to be a package object, the created Select tree must be foo.`package`.bar However mkAttributedSelect was only examining the owner of the symbol, which means it would work if the package object defined bar directly, but not if it inherited it. --- src/reflect/scala/reflect/internal/TreeGen.scala | 25 ++++++++++-- test/files/pos/t5604b/T_1.scala | 6 +++ test/files/pos/t5604b/T_2.scala | 6 +++ test/files/pos/t5604b/Test_1.scala | 7 ++++ test/files/pos/t5604b/Test_2.scala | 7 ++++ test/files/pos/t5604b/pack_1.scala | 5 +++ test/files/run/t5604.check | 8 ++++ test/files/run/t5604.scala | 50 ++++++++++++++++++++++++ 8 files changed, 111 insertions(+), 3 deletions(-) create mode 100644 test/files/pos/t5604b/T_1.scala create mode 100644 test/files/pos/t5604b/T_2.scala create mode 100644 test/files/pos/t5604b/Test_1.scala create mode 100644 test/files/pos/t5604b/Test_2.scala create mode 100644 test/files/pos/t5604b/pack_1.scala create mode 100644 test/files/run/t5604.check create mode 100644 test/files/run/t5604.scala (limited to 'src') diff --git a/src/reflect/scala/reflect/internal/TreeGen.scala b/src/reflect/scala/reflect/internal/TreeGen.scala index ebf0998573..c1753fc5a1 100644 --- a/src/reflect/scala/reflect/internal/TreeGen.scala +++ b/src/reflect/scala/reflect/internal/TreeGen.scala @@ -172,10 +172,29 @@ abstract class TreeGen extends macros.TreeBuilder { if (qual.symbol != null && (qual.symbol.isEffectiveRoot || qual.symbol.isEmptyPackage)) mkAttributedIdent(sym) else { + // Have to recognize anytime a selection is made on a package + // so it can be rewritten to foo.bar.`package`.name rather than + // foo.bar.name if name is in the package object. + // TODO - factor out the common logic between this and + // the Typers method "isInPackageObject", used in typedIdent. + val qualsym = ( + if (qual.tpe ne null) qual.tpe.typeSymbol + else if (qual.symbol ne null) qual.symbol + else NoSymbol + ) + val needsPackageQualifier = ( + (sym ne null) + && qualsym.isPackage + && !sym.isDefinedInPackage + ) val pkgQualifier = - if (sym != null && sym.owner.isPackageObjectClass && sym.effectiveOwner == qual.tpe.typeSymbol) { - val obj = sym.owner.sourceModule - Select(qual, nme.PACKAGE) setSymbol obj setType singleType(qual.tpe, obj) + if (needsPackageQualifier) { + // The owner of a symbol which requires package qualification may be the + // package object iself, but it also could be any superclass of the package + // object. In the latter case, we must go through the qualifier's info + // to obtain the right symbol. + val packageObject = if (sym.owner.isModuleClass) sym.owner.sourceModule else qual.tpe member nme.PACKAGE + Select(qual, nme.PACKAGE) setSymbol packageObject setType singleType(qual.tpe, packageObject) } else qual diff --git a/test/files/pos/t5604b/T_1.scala b/test/files/pos/t5604b/T_1.scala new file mode 100644 index 0000000000..179dcb10c6 --- /dev/null +++ b/test/files/pos/t5604b/T_1.scala @@ -0,0 +1,6 @@ +// sandbox/t5604/T.scala +package t6504 + +trait T { + def foo: Boolean = false +} diff --git a/test/files/pos/t5604b/T_2.scala b/test/files/pos/t5604b/T_2.scala new file mode 100644 index 0000000000..179dcb10c6 --- /dev/null +++ b/test/files/pos/t5604b/T_2.scala @@ -0,0 +1,6 @@ +// sandbox/t5604/T.scala +package t6504 + +trait T { + def foo: Boolean = false +} diff --git a/test/files/pos/t5604b/Test_1.scala b/test/files/pos/t5604b/Test_1.scala new file mode 100644 index 0000000000..f7c58ebe83 --- /dev/null +++ b/test/files/pos/t5604b/Test_1.scala @@ -0,0 +1,7 @@ +// sandbox/t5604/Test.scala +package t6504 + +object Test { + def blerg1(a: Any): Any = if (foo) blerg1(0) + def blerg2(a: Any): Any = if (t6504.foo) blerg2(0) +} diff --git a/test/files/pos/t5604b/Test_2.scala b/test/files/pos/t5604b/Test_2.scala new file mode 100644 index 0000000000..f7c58ebe83 --- /dev/null +++ b/test/files/pos/t5604b/Test_2.scala @@ -0,0 +1,7 @@ +// sandbox/t5604/Test.scala +package t6504 + +object Test { + def blerg1(a: Any): Any = if (foo) blerg1(0) + def blerg2(a: Any): Any = if (t6504.foo) blerg2(0) +} diff --git a/test/files/pos/t5604b/pack_1.scala b/test/files/pos/t5604b/pack_1.scala new file mode 100644 index 0000000000..f50d568bfa --- /dev/null +++ b/test/files/pos/t5604b/pack_1.scala @@ -0,0 +1,5 @@ +// sandbox/t5604/pack.scala +package t6504 + +object `package` extends T { +} diff --git a/test/files/run/t5604.check b/test/files/run/t5604.check new file mode 100644 index 0000000000..53a2fc8894 --- /dev/null +++ b/test/files/run/t5604.check @@ -0,0 +1,8 @@ +long +double +long +double +long +double +long +double diff --git a/test/files/run/t5604.scala b/test/files/run/t5604.scala new file mode 100644 index 0000000000..a06c8aab3e --- /dev/null +++ b/test/files/run/t5604.scala @@ -0,0 +1,50 @@ +// a.scala +// Fri Jan 13 11:31:47 PST 2012 + +package foo { + object regular extends Duh { + def buh(n: Long) = println("long") + def buh(n: Double) = println("double") + } + class regular { + import regular._ + + duh(33L) + duh(3.0d) + foo.regular.duh(33L) + foo.regular.duh(3.0d) + buh(66L) + buh(6.0d) + foo.regular.buh(66L) + foo.regular.buh(6.0d) + } + + trait Duh { + def duh(n: Long) = println("long") + def duh(n: Double) = println("double") + } + package object bar extends Duh { + def buh(n: Long) = println("long") + def buh(n: Double) = println("double") + } + package bar { + object Main { + def main(args:Array[String]) { + duh(33L) + duh(3.0d) + foo.bar.duh(33L) + foo.bar.duh(3.0d) + buh(66L) + buh(6.0d) + foo.bar.buh(66L) + foo.bar.buh(6.0d) + } + } + } +} + +object Test { + def main(args: Array[String]): Unit = { + foo.bar.Main.main(null) + } +} -- cgit v1.2.3 From 97ede5a64a60c739f03619f9c0d7e2bf88a97207 Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Wed, 26 Sep 2012 12:19:50 -0700 Subject: Simplifications in typedIdent. These accompany the changes to mkAttributedSelect in the prior commit, and document additional brokenness with package objects which stillr emains. --- .../scala/tools/nsc/typechecker/Typers.scala | 93 ++++++++++++---------- 1 file changed, 51 insertions(+), 42 deletions(-) (limited to 'src') diff --git a/src/compiler/scala/tools/nsc/typechecker/Typers.scala b/src/compiler/scala/tools/nsc/typechecker/Typers.scala index 2344e71883..6e222459c9 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Typers.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Typers.scala @@ -605,23 +605,31 @@ trait Typers extends Modes with Adaptations with Tags { } /** Is `sym` defined in package object of package `pkg`? + * Since sym may be defined in some parent of the package object, + * we cannot inspect its owner only; we have to go through the + * info of the package object. However to avoid cycles we'll check + * what other ways we can before pushing that way. */ private def isInPackageObject(sym: Symbol, pkg: Symbol) = { - def isInPkgObj(sym: Symbol) = - !sym.owner.isPackage && { - sym.owner.isPackageObjectClass && - sym.owner.owner == pkg || - pkg.isInitialized && { - // need to be careful here to not get a cyclic reference during bootstrap - val pkgobj = pkg.info.member(nme.PACKAGEkw) - pkgobj.isInitialized && - (pkgobj.info.member(sym.name).alternatives contains sym) - } + val pkgClass = if (pkg.isTerm) pkg.moduleClass else pkg + def matchesInfo = ( + pkg.isInitialized && { + // need to be careful here to not get a cyclic reference during bootstrap + val module = pkg.info member nme.PACKAGEkw + module.isInitialized && (module.info.member(sym.name).alternatives contains sym) } - pkg.isPackageClass && { + ) + def isInPkgObj(sym: Symbol) = ( + !sym.isPackage + && !sym.owner.isPackageClass + && (sym.owner ne NoSymbol) + && (sym.owner.owner == pkgClass || matchesInfo) + ) + + pkgClass.isPackageClass && ( if (sym.isOverloaded) sym.alternatives forall isInPkgObj else isInPkgObj(sym) - } + ) } /** Post-process an identifier or selection node, performing the following: @@ -4768,6 +4776,22 @@ trait Typers extends Modes with Adaptations with Tags { defSym = rootMirror.EmptyPackageClass.tpe.nonPrivateMember(name) defSym != NoSymbol } + def correctForPackageObject(sym: Symbol): Symbol = { + if (sym.isTerm && isInPackageObject(sym, pre.typeSymbol)) { + val sym1 = pre member sym.name + if ((sym1 eq NoSymbol) || (sym eq sym1)) sym else { + qual = gen.mkAttributedQualifier(pre) + log(s""" + | !!! Overloaded package object member resolved incorrectly. + | prefix: $pre + | Discarded: ${sym.defString} + | Using: ${sym1.defString} + """.stripMargin) + sym1 + } + } + else sym + } def startingIdentContext = ( // ignore current variable scope in patterns to enforce linearity if ((mode & (PATTERNmode | TYPEPATmode)) == 0) context @@ -4779,11 +4803,11 @@ trait Typers extends Modes with Adaptations with Tags { // which are methods (note: if we don't do that // case x :: xs in class List would return the :: method) // unless they are stable or are accessors (the latter exception is for better error messages). - def qualifies(sym: Symbol): Boolean = { - sym.hasRawInfo && // this condition avoids crashing on self-referential pattern variables - reallyExists(sym) && - ((mode & PATTERNmode | FUNmode) != (PATTERNmode | FUNmode) || !sym.isSourceMethod || sym.hasFlag(ACCESSOR)) - } + def qualifies(sym: Symbol): Boolean = ( + sym.hasRawInfo // this condition avoids crashing on self-referential pattern variables + && reallyExists(sym) + && ((mode & PATTERNmode | FUNmode) != (PATTERNmode | FUNmode) || !sym.isSourceMethod || sym.hasFlag(ACCESSOR)) + ) if (defSym == NoSymbol) { var defEntry: ScopeEntry = null // the scope entry of defSym, if defined in a local scope @@ -4791,32 +4815,17 @@ trait Typers extends Modes with Adaptations with Tags { var cx = startingIdentContext while (defSym == NoSymbol && cx != NoContext && (cx.scope ne null)) { // cx.scope eq null arises during FixInvalidSyms in Duplicators pre = cx.enclClass.prefix + // !!! FIXME. This call to lookupEntry is at the root of all the + // bad behavior with overloading in package objects. lookupEntry + // just takes the first symbol it finds in scope, ignoring the rest. + // When a selection on a package object arrives here, the first + // overload is always chosen. "correctForPackageObject" exists to + // undo that decision. Obviously it would be better not to do it in + // the first place; however other things seem to be tied to obtaining + // that ScopeEntry, specifically calculating the nesting depth. defEntry = cx.scope.lookupEntry(name) - if ((defEntry ne null) && qualifies(defEntry.sym)) { - // Right here is where SI-1987, overloading in package objects, can be - // seen to go wrong. There is an overloaded symbol, but when referring - // to the unqualified identifier from elsewhere in the package, only - // the last definition is visible. So overloading mis-resolves and is - // definition-order dependent, bad things. See run/t1987.scala. - // - // I assume the actual problem involves how/where these symbols are entered - // into the scope. But since I didn't figure out how to fix it that way, I - // catch it here by looking up package-object-defined symbols in the prefix. - if (isInPackageObject(defEntry.sym, pre.typeSymbol)) { - defSym = pre.member(defEntry.sym.name) - if (defSym ne defEntry.sym) { - qual = gen.mkAttributedQualifier(pre) - log(s""" - | !!! Overloaded package object member resolved incorrectly. - | prefix: $pre - | Discarded: ${defEntry.sym.defString} - | Using: ${defSym.defString} - """.stripMargin) - } - } - else - defSym = defEntry.sym - } + if ((defEntry ne null) && qualifies(defEntry.sym)) + defSym = correctForPackageObject(defEntry.sym) else { cx = cx.enclClass val foundSym = pre.member(name) filter qualifies -- cgit v1.2.3 From ddcf5cea60245ee8d41e248feabe901df609bcf4 Mon Sep 17 00:00:00 2001 From: Nada Amin Date: Fri, 28 Sep 2012 01:32:44 +0200 Subject: Fix class loader issues in instrumentation tests. The ASM ClassWriter uses a wimpy class loader when computing common superclasses. This could cause a ClassNotFoundException in the transform method (at reader.accept). This exception gets swallowed, resulting in a class that should be instrumented to silently not be. The fix is to override getCommonSuperClass to use the correct class loader. Trivia: This bug was discovered while 'stress-testing' this instrumentation scheme on the Coursera students, to check that they implement one method in terms of another in the assignment. --- .../tools/partest/javaagent/ASMTransformer.java | 30 ++++++++++++++++++++-- 1 file changed, 28 insertions(+), 2 deletions(-) (limited to 'src') diff --git a/src/partest/scala/tools/partest/javaagent/ASMTransformer.java b/src/partest/scala/tools/partest/javaagent/ASMTransformer.java index 09cd485d6b..a9a56d124d 100644 --- a/src/partest/scala/tools/partest/javaagent/ASMTransformer.java +++ b/src/partest/scala/tools/partest/javaagent/ASMTransformer.java @@ -26,9 +26,35 @@ public class ASMTransformer implements ClassFileTransformer { className.startsWith("instrumented/")); } - public byte[] transform(ClassLoader loader, String className, Class classBeingRedefined, ProtectionDomain protectionDomain, byte[] classfileBuffer) { + public byte[] transform(final ClassLoader classLoader, String className, Class classBeingRedefined, ProtectionDomain protectionDomain, byte[] classfileBuffer) { if (shouldTransform(className)) { - ClassWriter writer = new ClassWriter(ClassWriter.COMPUTE_FRAMES | ClassWriter.COMPUTE_MAXS); + ClassWriter writer = new ClassWriter(ClassWriter.COMPUTE_FRAMES | ClassWriter.COMPUTE_MAXS) { + // this is copied verbatim from the superclass, + // except that we use the outer class loader + @Override protected String getCommonSuperClass(final String type1, final String type2) { + Class c, d; + try { + c = Class.forName(type1.replace('/', '.'), false, classLoader); + d = Class.forName(type2.replace('/', '.'), false, classLoader); + } catch (Exception e) { + throw new RuntimeException(e.toString()); + } + if (c.isAssignableFrom(d)) { + return type1; + } + if (d.isAssignableFrom(c)) { + return type2; + } + if (c.isInterface() || d.isInterface()) { + return "java/lang/Object"; + } else { + do { + c = c.getSuperclass(); + } while (!c.isAssignableFrom(d)); + return c.getName().replace('.', '/'); + } + } + }; ProfilerVisitor visitor = new ProfilerVisitor(writer); ClassReader reader = new ClassReader(classfileBuffer); reader.accept(visitor, 0); -- cgit v1.2.3 From 6476eb374019713e3fd55b9affa8ed6d7362a884 Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Fri, 28 Sep 2012 07:51:35 -0700 Subject: Purged DebruijnIndex. Apparently everyone agrees it's not used anymore. --- .../scala/tools/nsc/symtab/classfile/Pickler.scala | 6 +- .../tools/nsc/typechecker/DestructureTypes.scala | 1 - .../scala/tools/nsc/typechecker/Implicits.scala | 2 +- .../scala/tools/nsc/typechecker/RefChecks.scala | 1 - .../scala/tools/nsc/util/ShowPickled.scala | 1 - src/reflect/scala/reflect/internal/Types.scala | 67 ---------------------- .../reflect/internal/pickling/PickleFormat.scala | 4 +- .../scalap/scalax/rules/scalasig/ScalaSig.scala | 48 ---------------- .../scalax/rules/scalasig/ScalaSigPrinter.scala | 1 - .../tools/scalap/scalax/rules/scalasig/Type.scala | 1 - 10 files changed, 4 insertions(+), 128 deletions(-) (limited to 'src') diff --git a/src/compiler/scala/tools/nsc/symtab/classfile/Pickler.scala b/src/compiler/scala/tools/nsc/symtab/classfile/Pickler.scala index 29b238c4cb..c5e7fa7989 100644 --- a/src/compiler/scala/tools/nsc/symtab/classfile/Pickler.scala +++ b/src/compiler/scala/tools/nsc/symtab/classfile/Pickler.scala @@ -177,7 +177,7 @@ abstract class Pickler extends SubComponent { */ private def putType(tp: Type): Unit = if (putEntry(tp)) { tp match { - case NoType | NoPrefix /*| DeBruijnIndex(_, _) */ => + case NoType | NoPrefix => ; case ThisType(sym) => putSymbol(sym) @@ -609,8 +609,6 @@ abstract class Pickler extends SubComponent { writeRef(restpe); writeRefs(tparams); POLYtpe case ExistentialType(tparams, restpe) => writeRef(restpe); writeRefs(tparams); EXISTENTIALtpe - // case DeBruijnIndex(l, i) => - // writeNat(l); writeNat(i); DEBRUIJNINDEXtpe case c @ Constant(_) => if (c.tag == BooleanTag) writeLong(if (c.booleanValue) 1 else 0) else if (ByteTag <= c.tag && c.tag <= LongTag) writeLong(c.longValue) @@ -1057,8 +1055,6 @@ abstract class Pickler extends SubComponent { case ExistentialType(tparams, restpe) => print("EXISTENTIALtpe "); printRef(restpe); printRefs(tparams); print("||| "+entry) - // case DeBruijnIndex(l, i) => - // print("DEBRUIJNINDEXtpe "); print(l+" "+i) case c @ Constant(_) => print("LITERAL ") if (c.tag == BooleanTag) print("Boolean "+(if (c.booleanValue) 1 else 0)) diff --git a/src/compiler/scala/tools/nsc/typechecker/DestructureTypes.scala b/src/compiler/scala/tools/nsc/typechecker/DestructureTypes.scala index e8865964b0..2555d199d5 100644 --- a/src/compiler/scala/tools/nsc/typechecker/DestructureTypes.scala +++ b/src/compiler/scala/tools/nsc/typechecker/DestructureTypes.scala @@ -188,7 +188,6 @@ trait DestructureTypes { case AntiPolyType(pre, targs) => product(tp, prefix(pre), typeArgs(targs)) case ClassInfoType(parents, decls, clazz) => product(tp, parentList(parents), scope(decls), wrapAtom(clazz)) case ConstantType(const) => product(tp, constant("value", const)) - case DeBruijnIndex(level, index, args) => product(tp, const("level" -> level), const("index" -> index), typeArgs(args)) case OverloadedType(pre, alts) => product(tp, prefix(pre), node("alts", typeList(alts map pre.memberType))) case RefinedType(parents, decls) => product(tp, parentList(parents), scope(decls)) case SingleType(pre, sym) => product(tp, prefix(pre), wrapAtom(sym)) diff --git a/src/compiler/scala/tools/nsc/typechecker/Implicits.scala b/src/compiler/scala/tools/nsc/typechecker/Implicits.scala index 7852ff49e1..8bf2768e0a 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Implicits.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Implicits.scala @@ -132,7 +132,7 @@ trait Implicits { } /* Map a polytype to one in which all type parameters and argument-dependent types are replaced by wildcards. - * Consider `implicit def b(implicit x: A): x.T = error("")`. We need to approximate DebruijnIndex types + * Consider `implicit def b(implicit x: A): x.T = error("")`. We need to approximate debruijn index types * when checking whether `b` is a valid implicit, as we haven't even searched a value for the implicit arg `x`, * so we have to approximate (otherwise it is excluded a priori). */ diff --git a/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala b/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala index 5e1f52830c..f9648e6e21 100644 --- a/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala +++ b/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala @@ -913,7 +913,6 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans case NoPrefix => case ThisType(_) => case ConstantType(_) => - // case DeBruijnIndex(_, _) => case SingleType(pre, sym) => validateVariance(pre, variance) case TypeRef(pre, sym, args) => diff --git a/src/compiler/scala/tools/nsc/util/ShowPickled.scala b/src/compiler/scala/tools/nsc/util/ShowPickled.scala index d1f3183f68..787291b13c 100644 --- a/src/compiler/scala/tools/nsc/util/ShowPickled.scala +++ b/src/compiler/scala/tools/nsc/util/ShowPickled.scala @@ -94,7 +94,6 @@ object ShowPickled extends Names { case ANNOTATEDtpe => "ANNOTATEDtpe" case ANNOTINFO => "ANNOTINFO" case ANNOTARGARRAY => "ANNOTARGARRAY" - // case DEBRUIJNINDEXtpe => "DEBRUIJNINDEXtpe" case EXISTENTIALtpe => "EXISTENTIALtpe" case TREE => "TREE" case MODIFIERS => "MODIFIERS" diff --git a/src/reflect/scala/reflect/internal/Types.scala b/src/reflect/scala/reflect/internal/Types.scala index f6af515fa3..927bc46808 100644 --- a/src/reflect/scala/reflect/internal/Types.scala +++ b/src/reflect/scala/reflect/internal/Types.scala @@ -68,8 +68,6 @@ import util.ThreeValues._ // a type variable // Replace occurrences of type parameters with type vars, where // inst is the instantiation and constr is a list of bounds. - case DeBruijnIndex(level, index) - // for dependent method types: a type referring to a method parameter. case ErasedValueType(clazz, underlying) // only used during erasure of derived value classes. */ @@ -3403,23 +3401,6 @@ trait Types extends api.Types { self: SymbolTable => override def safeToString: String = name.toString +": "+ tp } - /** A De Bruijn index referring to a previous type argument. Only used - * as a serialization format. - */ - case class DeBruijnIndex(level: Int, idx: Int, args: List[Type]) extends Type { - override def safeToString: String = "De Bruijn index("+level+","+idx+")" - } - - /** A binder defining data associated with De Bruijn indices. Only used - * as a serialization format. - */ - case class DeBruijnBinder(pnames: List[Name], ptypes: List[Type], restpe: Type) extends Type { - override def safeToString = { - val kind = if (pnames.head.isTypeName) "poly" else "method" - "De Bruijn "+kind+"("+(pnames mkString ",")+";"+(ptypes mkString ",")+";"+restpe+")" - } - } - /** A temporary type representing the erasure of a user-defined value type. * Created during phase erasure, eliminated again in posterasure. * @@ -3813,50 +3794,6 @@ trait Types extends api.Types { self: SymbolTable => } } - object toDeBruijn extends TypeMap { - private var paramStack: List[List[Symbol]] = Nil - def mkDebruijnBinder(params: List[Symbol], restpe: Type) = { - paramStack = params :: paramStack - try { - DeBruijnBinder(params map (_.name), params map (p => this(p.info)), this(restpe)) - } finally paramStack = paramStack.tail - } - def apply(tp: Type): Type = tp match { - case PolyType(tparams, restpe) => - mkDebruijnBinder(tparams, restpe) - case MethodType(params, restpe) => - mkDebruijnBinder(params, restpe) - case TypeRef(NoPrefix, sym, args) => - val level = paramStack indexWhere (_ contains sym) - if (level < 0) mapOver(tp) - else DeBruijnIndex(level, paramStack(level) indexOf sym, args mapConserve this) - case _ => - mapOver(tp) - } - } - - def fromDeBruijn(owner: Symbol) = new TypeMap { - private var paramStack: List[List[Symbol]] = Nil - def apply(tp: Type): Type = tp match { - case DeBruijnBinder(pnames, ptypes, restpe) => - val isType = pnames.head.isTypeName - val newParams = for (name <- pnames) yield - if (isType) owner.newTypeParameter(name.toTypeName) - else owner.newValueParameter(name.toTermName) - paramStack = newParams :: paramStack - try { - foreach2(newParams, ptypes)((p, t) => p setInfo this(t)) - val restpe1 = this(restpe) - if (isType) PolyType(newParams, restpe1) - else MethodType(newParams, restpe1) - } finally paramStack = paramStack.tail - case DeBruijnIndex(level, idx, args) => - TypeRef(NoPrefix, paramStack(level)(idx), args map this) - case _ => - mapOver(tp) - } - } - // Hash consing -------------------------------------------------------------- private val initialUniquesCapacity = 4096 @@ -4163,10 +4100,6 @@ trait Types extends api.Types { self: SymbolTable => if ((annots1 eq annots) && (atp1 eq atp)) tp else if (annots1.isEmpty) atp1 else AnnotatedType(annots1, atp1, selfsym) - case DeBruijnIndex(shift, idx, args) => - val args1 = args mapConserve this - if (args1 eq args) tp - else DeBruijnIndex(shift, idx, args1) /* case ErrorType => tp case WildcardType => tp diff --git a/src/reflect/scala/reflect/internal/pickling/PickleFormat.scala b/src/reflect/scala/reflect/internal/pickling/PickleFormat.scala index 16747af08a..94b2f77ff9 100644 --- a/src/reflect/scala/reflect/internal/pickling/PickleFormat.scala +++ b/src/reflect/scala/reflect/internal/pickling/PickleFormat.scala @@ -56,7 +56,7 @@ object PickleFormat { * | 42 ANNOTATEDtpe len_Nat [sym_Ref /* no longer needed */] tpe_Ref {annotinfo_Ref} * | 43 ANNOTINFO len_Nat AnnotInfoBody * | 44 ANNOTARGARRAY len_Nat {constAnnotArg_Ref} - * | 47 DEBRUIJNINDEXtpe len_Nat level_Nat index_Nat + * | 47 DEBRUIJNINDEXtpe len_Nat level_Nat index_Nat /* no longer needed */ * | 48 EXISTENTIALtpe len_Nat type_Ref {symbol_Ref} * | 49 TREE len_Nat 1 EMPTYtree * | 49 TREE len_Nat 2 PACKAGEtree type_Ref sym_Ref mods_Ref name_Ref {tree_Ref} @@ -161,7 +161,7 @@ object PickleFormat { final val ANNOTARGARRAY = 44 final val SUPERtpe = 46 - final val DEBRUIJNINDEXtpe = 47 + final val DEBRUIJNINDEXtpe = 47 // no longer generated final val EXISTENTIALtpe = 48 final val TREE = 49 // prefix code that means a tree is coming diff --git a/src/scalap/scala/tools/scalap/scalax/rules/scalasig/ScalaSig.scala b/src/scalap/scala/tools/scalap/scalax/rules/scalasig/ScalaSig.scala index 7d06a7169b..76dc0eaf1e 100644 --- a/src/scalap/scala/tools/scalap/scalax/rules/scalasig/ScalaSig.scala +++ b/src/scalap/scala/tools/scalap/scalax/rules/scalasig/ScalaSig.scala @@ -171,53 +171,6 @@ object ScalaSigEntryParsers extends RulesWithState with MemoisableRules { def symbolEntry(key : Int) = symHeader(key) -~ symbolInfo - /*************************************************** - * Symbol table attribute format: - * Symtab = nentries_Nat {Entry} - * Entry = 1 TERMNAME len_Nat NameInfo - * | 2 TYPENAME len_Nat NameInfo - * | 3 NONEsym len_Nat - * | 4 TYPEsym len_Nat SymbolInfo - * | 5 ALIASsym len_Nat SymbolInfo - * | 6 CLASSsym len_Nat SymbolInfo [thistype_Ref] - * | 7 MODULEsym len_Nat SymbolInfo - * | 8 VALsym len_Nat [defaultGetter_Ref /* no longer needed*/] SymbolInfo [alias_Ref] - * | 9 EXTref len_Nat name_Ref [owner_Ref] - * | 10 EXTMODCLASSref len_Nat name_Ref [owner_Ref] - * | 11 NOtpe len_Nat - * | 12 NOPREFIXtpe len_Nat - * | 13 THIStpe len_Nat sym_Ref - * | 14 SINGLEtpe len_Nat type_Ref sym_Ref - * | 15 CONSTANTtpe len_Nat constant_Ref - * | 16 TYPEREFtpe len_Nat type_Ref sym_Ref {targ_Ref} - * | 17 TYPEBOUNDStpe len_Nat tpe_Ref tpe_Ref - * | 18 REFINEDtpe len_Nat classsym_Ref {tpe_Ref} - * | 19 CLASSINFOtpe len_Nat classsym_Ref {tpe_Ref} - * | 20 METHODtpe len_Nat tpe_Ref {sym_Ref} - * | 21 POLYTtpe len_Nat tpe_Ref {sym_Ref} - * | 22 IMPLICITMETHODtpe len_Nat tpe_Ref {sym_Ref} /* no longer needed */ - * | 52 SUPERtpe len_Nat tpe_Ref tpe_Ref - * | 24 LITERALunit len_Nat - * | 25 LITERALboolean len_Nat value_Long - * | 26 LITERALbyte len_Nat value_Long - * | 27 LITERALshort len_Nat value_Long - * | 28 LITERALchar len_Nat value_Long - * | 29 LITERALint len_Nat value_Long - * | 30 LITERALlong len_Nat value_Long - * | 31 LITERALfloat len_Nat value_Long - * | 32 LITERALdouble len_Nat value_Long - * | 33 LITERALstring len_Nat name_Ref - * | 34 LITERALnull len_Nat - * | 35 LITERALclass len_Nat tpe_Ref - * | 36 LITERALenum len_Nat sym_Ref - * | 40 SYMANNOT len_Nat sym_Ref AnnotInfoBody - * | 41 CHILDREN len_Nat sym_Ref {sym_Ref} - * | 42 ANNOTATEDtpe len_Nat [sym_Ref /* no longer needed */] tpe_Ref {annotinfo_Ref} - * | 43 ANNOTINFO len_Nat AnnotInfoBody - * | 44 ANNOTARGARRAY len_Nat {constAnnotArg_Ref} - * | 47 DEBRUIJNINDEXtpe len_Nat level_Nat index_Nat - * | 48 EXISTENTIALtpe len_Nat type_Ref {symbol_Ref} - */ val noSymbol = 3 -^ NoSymbol val typeSymbol = symbolEntry(4) ^^ TypeSymbol as "typeSymbol" val aliasSymbol = symbolEntry(5) ^^ AliasSymbol as "alias" @@ -260,7 +213,6 @@ object ScalaSigEntryParsers extends RulesWithState with MemoisableRules { 22 -~ typeRef ~ (symbolRef*) ^~^ MethodType, 42 -~ typeRef ~ (attribTreeRef*) ^~^ AnnotatedType, 51 -~ typeRef ~ symbolRef ~ (attribTreeRef*) ^~~^ AnnotatedWithSelfType, - 47 -~ typeLevel ~ typeIndex ^~^ DeBruijnIndexType, 48 -~ typeRef ~ (symbolRef*) ^~^ ExistentialType) as "type" lazy val literal: EntryParser[Any] = oneOf( diff --git a/src/scalap/scala/tools/scalap/scalax/rules/scalasig/ScalaSigPrinter.scala b/src/scalap/scala/tools/scalap/scalax/rules/scalasig/ScalaSigPrinter.scala index 411a87e4bb..f3d449b87f 100644 --- a/src/scalap/scala/tools/scalap/scalax/rules/scalasig/ScalaSigPrinter.scala +++ b/src/scalap/scala/tools/scalap/scalax/rules/scalasig/ScalaSigPrinter.scala @@ -381,7 +381,6 @@ class ScalaSigPrinter(stream: PrintStream, printPrivates: Boolean) { toString(typeRef, sep) } case AnnotatedWithSelfType(typeRef, symbol, attribTreeRefs) => toString(typeRef, sep) - //case DeBruijnIndexType(typeLevel, typeIndex) => case ExistentialType(typeRef, symbols) => { val refs = symbols.map(toString _).filter(!_.startsWith("_")).map("type " + _) toString(typeRef, sep) + (if (refs.size > 0) refs.mkString(" forSome {", "; ", "}") else "") diff --git a/src/scalap/scala/tools/scalap/scalax/rules/scalasig/Type.scala b/src/scalap/scala/tools/scalap/scalax/rules/scalasig/Type.scala index 543ddbe186..0444e701f2 100644 --- a/src/scalap/scala/tools/scalap/scalax/rules/scalasig/Type.scala +++ b/src/scalap/scala/tools/scalap/scalax/rules/scalasig/Type.scala @@ -22,5 +22,4 @@ case class PolyType(typeRef : Type, symbols : Seq[TypeSymbol]) extends Type case class PolyTypeWithCons(typeRef : Type, symbols : Seq[TypeSymbol], cons: String) extends Type case class AnnotatedType(typeRef : Type, attribTreeRefs : List[Int]) extends Type case class AnnotatedWithSelfType(typeRef : Type, symbol : Symbol, attribTreeRefs : List[Int]) extends Type -case class DeBruijnIndexType(typeLevel : Int, typeIndex : Int) extends Type case class ExistentialType(typeRef : Type, symbols : Seq[Symbol]) extends Type -- cgit v1.2.3 From 9ad98963d092d91ca3da6dc7fcc935c386f49a74 Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Fri, 28 Sep 2012 20:39:53 -0700 Subject: Fix for SI-6447, macro dependent type propagation. It really pays not to write new TypeMaps unless it is absolutely necessary, because there are about 1000 ways to get them wrong. I'm 98% sure this one can be dropped. Review by @xeno-by. --- src/compiler/scala/tools/nsc/typechecker/Macros.scala | 2 +- test/files/pos/t6447.scala | 18 ++++++++++++++++++ 2 files changed, 19 insertions(+), 1 deletion(-) create mode 100644 test/files/pos/t6447.scala (limited to 'src') diff --git a/src/compiler/scala/tools/nsc/typechecker/Macros.scala b/src/compiler/scala/tools/nsc/typechecker/Macros.scala index bcc37e8b37..db3c133ee1 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Macros.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Macros.scala @@ -315,7 +315,7 @@ trait Macros extends scala.tools.reflect.FastTrack with Traces { case _ => pre } - TypeRef(pre1, sym, args map mapOver) + typeRef(apply(pre1), sym, mapOverArgs(args, sym.typeParams)) case _ => mapOver(tp) } diff --git a/test/files/pos/t6447.scala b/test/files/pos/t6447.scala new file mode 100644 index 0000000000..1c0c0f2a31 --- /dev/null +++ b/test/files/pos/t6447.scala @@ -0,0 +1,18 @@ +import scala.language.experimental.macros +import scala.reflect.macros.Context + +class X { type T } + +object X { + // this works + def foo(x: X): x.T = macro fooImpl + def fooImpl(c: Context)(x: c.Expr[X]): c.Expr[x.value.T] = ??? + + // this doesn't + def bar(x: X, y: X): (x.T, y.T) = macro barImpl + def barImpl(c: Context)(x: c.Expr[X], y: c.Expr[X]): c.Expr[(x.value.T, y.value.T)] = ??? + + // neither does this + def baz(x: X)(xs: List[x.T]): Unit = macro bazImpl + def bazImpl(c: Context)(x: c.Expr[X])(xs: c.Expr[List[x.value.T]]): c.Expr[Unit] = ??? +} -- cgit v1.2.3 From f2f4f558980a8c344db01903579b98e7aa5d6185 Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Fri, 28 Sep 2012 20:54:35 -0700 Subject: Some cleanups in Macros. Since I was in the neigborhood for SI-6447. --- .../scala/tools/nsc/typechecker/Macros.scala | 78 +++++++++++----------- 1 file changed, 38 insertions(+), 40 deletions(-) (limited to 'src') diff --git a/src/compiler/scala/tools/nsc/typechecker/Macros.scala b/src/compiler/scala/tools/nsc/typechecker/Macros.scala index db3c133ee1..2b78b37439 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Macros.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Macros.scala @@ -300,53 +300,51 @@ trait Macros extends scala.tools.reflect.FastTrack with Traces { private def macroImplSig(macroDef: Symbol, tparams: List[TypeDef], vparamss: List[List[ValDef]], retTpe: Type): (List[List[Symbol]], Type) = { // had to move method's body to an object because of the recursive dependencies between sigma and param object SigGenerator { - def sigma(tpe: Type): Type = { - class SigmaTypeMap extends TypeMap { - def apply(tp: Type): Type = tp match { - case TypeRef(pre, sym, args) => - val pre1 = pre match { - case ThisType(sym) if sym == macroDef.owner => - SingleType(SingleType(SingleType(NoPrefix, ctxParam), MacroContextPrefix), ExprValue) - case SingleType(NoPrefix, sym) => - mfind(vparamss)(_.symbol == sym) match { - case Some(macroDefParam) => SingleType(SingleType(NoPrefix, param(macroDefParam)), ExprValue) - case _ => pre - } - case _ => - pre - } - typeRef(apply(pre1), sym, mapOverArgs(args, sym.typeParams)) - case _ => - mapOver(tp) - } + def WeakTagClass = getMember(MacroContextClass, tpnme.WeakTypeTag) + def ExprClass = getMember(MacroContextClass, tpnme.Expr) + val cache = scala.collection.mutable.Map[Symbol, Symbol]() + val ctxParam = makeParam(nme.macroContext, macroDef.pos, MacroContextClass.tpe, SYNTHETIC) + val paramss = List(ctxParam) :: mmap(vparamss)(param) + val implReturnType = typeRef(singleType(NoPrefix, ctxParam), ExprClass, List(sigma(retTpe))) + + object SigmaTypeMap extends TypeMap { + def mapPrefix(pre: Type) = pre match { + case ThisType(sym) if sym == macroDef.owner => + singleType(singleType(singleType(NoPrefix, ctxParam), MacroContextPrefix), ExprValue) + case SingleType(NoPrefix, sym) => + mfind(vparamss)(_.symbol == sym).fold(pre)(p => singleType(singleType(NoPrefix, param(p)), ExprValue)) + case _ => + mapOver(pre) + } + def apply(tp: Type): Type = tp match { + case TypeRef(pre, sym, args) => + val pre1 = mapPrefix(pre) + val args1 = mapOverArgs(args, sym.typeParams) + if ((pre eq pre1) && (args eq args1)) tp + else typeRef(pre1, sym, args1) + case _ => + mapOver(tp) } - - new SigmaTypeMap() apply tpe } + def sigma(tpe: Type): Type = SigmaTypeMap(tpe) - def makeParam(name: Name, pos: Position, tpe: Type, flags: Long = 0L) = + def makeParam(name: Name, pos: Position, tpe: Type, flags: Long) = macroDef.newValueParameter(name, pos, flags) setInfo tpe - val ctxParam = makeParam(nme.macroContext, macroDef.pos, MacroContextClass.tpe, SYNTHETIC) - def implType(isType: Boolean, origTpe: Type): Type = + def implType(isType: Boolean, origTpe: Type): Type = { + def tsym = if (isType) WeakTagClass else ExprClass + def targ = origTpe.typeArgs.headOption getOrElse NoType + if (isRepeatedParamType(origTpe)) - appliedType( - RepeatedParamClass.typeConstructor, - List(implType(isType, sigma(origTpe.typeArgs.head)))) - else { - val tsym = getMember(MacroContextClass, if (isType) tpnme.WeakTypeTag else tpnme.Expr) + scalaRepeatedType(implType(isType, sigma(targ))) + else typeRef(singleType(NoPrefix, ctxParam), tsym, List(sigma(origTpe))) - } - val paramCache = scala.collection.mutable.Map[Symbol, Symbol]() - def param(tree: Tree): Symbol = - paramCache.getOrElseUpdate(tree.symbol, { + } + def param(tree: Tree): Symbol = ( + cache.getOrElseUpdate(tree.symbol, { val sym = tree.symbol - val sigParam = makeParam(sym.name, sym.pos, implType(sym.isType, sym.tpe)) - if (sym.isSynthetic) sigParam.flags |= SYNTHETIC - sigParam + makeParam(sym.name, sym.pos, implType(sym.isType, sym.tpe), sym getFlag SYNTHETIC) }) - - val paramss = List(ctxParam) :: mmap(vparamss)(param) - val implRetTpe = typeRef(singleType(NoPrefix, ctxParam), getMember(MacroContextClass, tpnme.Expr), List(sigma(retTpe))) + ) } import SigGenerator._ @@ -354,7 +352,7 @@ trait Macros extends scala.tools.reflect.FastTrack with Traces { macroTraceVerbose("tparams are: ")(tparams) macroTraceVerbose("vparamss are: ")(vparamss) macroTraceVerbose("retTpe is: ")(retTpe) - macroTraceVerbose("macroImplSig is: ")((paramss, implRetTpe)) + macroTraceVerbose("macroImplSig is: ")((paramss, implReturnType)) } /** Verifies that the body of a macro def typechecks to a reference to a static public non-overloaded method, -- cgit v1.2.3 From d892e8b3b215d39f00fbbcdb202baf5329c39815 Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Fri, 28 Sep 2012 22:03:39 -0700 Subject: Fix for SI-5130, precision disappearing from refinement. Remove some code, win a prize. --- src/reflect/scala/reflect/internal/Types.scala | 6 ---- test/files/pos/t5130.scala | 46 ++++++++++++++++++++++++++ 2 files changed, 46 insertions(+), 6 deletions(-) create mode 100644 test/files/pos/t5130.scala (limited to 'src') diff --git a/src/reflect/scala/reflect/internal/Types.scala b/src/reflect/scala/reflect/internal/Types.scala index 2c036b3308..3e55617cbf 100644 --- a/src/reflect/scala/reflect/internal/Types.scala +++ b/src/reflect/scala/reflect/internal/Types.scala @@ -3553,12 +3553,6 @@ trait Types extends api.Types { self: SymbolTable => val pre1 = pre match { case x: SuperType if sym1.isEffectivelyFinal || sym1.isDeferred => x.thistpe - case _: CompoundType if sym1.isClass => - // sharpen prefix so that it is maximal and still contains the class. - pre.parents.reverse dropWhile (_.member(sym1.name) != sym1) match { - case Nil => pre - case parent :: _ => parent - } case _ => pre } if (pre eq pre1) TypeRef(pre, sym1, args) diff --git a/test/files/pos/t5130.scala b/test/files/pos/t5130.scala new file mode 100644 index 0000000000..676d3c7050 --- /dev/null +++ b/test/files/pos/t5130.scala @@ -0,0 +1,46 @@ +import scala.language.reflectiveCalls + +class A { + this_a => + + def b = new B + class B { def a: this_a.type = this_a } +} +trait A2 { def c = () } + +object Test { + val v1 = new A { def c = () } + val v2 = new A with A2 { } + val v3: A { def c: Unit } = null + def d1 = new A { def c = () } + def d2 = new A with A2 { } + def d3: A { def c: Unit } = null + var x1 = new A { def c = () } + var x2 = new A with A2 { } + var x3: A { def c: Unit } = null + + def main(args: Array[String]): Unit = { + val mv1 = new A { def c = () } + val mv2 = new A with A2 { } + val mv3: A { def c: Unit } = null + def md1 = new A { def c = () } + def md2 = new A with A2 { } + def md3: A { def c: Unit } = null + + v1.b.a.c + v2.b.a.c + v3.b.a.c + d1.b.a.c + d2.b.a.c + d3.b.a.c + x1.b.a.c + x2.b.a.c + x3.b.a.c + mv1.b.a.c + mv2.b.a.c + mv3.b.a.c + md1.b.a.c + md2.b.a.c + md3.b.a.c + } +} -- cgit v1.2.3 From 29a59700b4cf1ade91abb6020ba4814be5ef88e7 Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Sat, 29 Sep 2012 14:20:21 -0700 Subject: Added utility function shortClass. Pretty sick of names like scala> typeOf[List[Int]].getClass.getName res0: String = scala.reflect.internal.Types$TypeRef$$anon$1 I wrote this so I can see what the class of some arbitrary thing is in a way which my little brain can understand. For the example above we get scala> shortClassOfInstance(typeOf[List[Int]]) res0: String = ArgsTypeRef with AliasTypeRef Let's pimp a "shortClassName" onto AnyRef and be happy. --- .../tools/nsc/interpreter/JLineCompletion.scala | 13 ++------- src/reflect/scala/reflect/internal/Symbols.scala | 4 +-- .../scala/reflect/internal/util/StringOps.scala | 8 +++++ .../scala/reflect/internal/util/package.scala | 34 ++++++++++++++++++++++ test/files/run/shortClass.check | 10 +++++++ test/files/run/shortClass.scala | 24 +++++++++++++++ 6 files changed, 80 insertions(+), 13 deletions(-) create mode 100644 src/reflect/scala/reflect/internal/util/package.scala create mode 100644 test/files/run/shortClass.check create mode 100644 test/files/run/shortClass.scala (limited to 'src') diff --git a/src/compiler/scala/tools/nsc/interpreter/JLineCompletion.scala b/src/compiler/scala/tools/nsc/interpreter/JLineCompletion.scala index b390ad5417..9a4be27c76 100644 --- a/src/compiler/scala/tools/nsc/interpreter/JLineCompletion.scala +++ b/src/compiler/scala/tools/nsc/interpreter/JLineCompletion.scala @@ -10,6 +10,7 @@ import scala.tools.jline._ import scala.tools.jline.console.completer._ import Completion._ import scala.collection.mutable.ListBuffer +import scala.reflect.internal.util.StringOps.longestCommonPrefix // REPL completor - queries supplied interpreter for valid // completions based on current contents of buffer. @@ -301,16 +302,6 @@ class JLineCompletion(val intp: IMain) extends Completion with CompletionOutput def isConsecutiveTabs(buf: String, cursor: Int) = cursor == lastCursor && buf == lastBuf - // Longest common prefix - def commonPrefix(xs: List[String]): String = { - if (xs.isEmpty || xs.contains("")) "" - else xs.head.head match { - case ch => - if (xs.tail forall (_.head == ch)) "" + ch + commonPrefix(xs map (_.tail)) - else "" - } - } - // This is jline's entry point for completion. override def complete(buf: String, cursor: Int): Candidates = { verbosity = if (isConsecutiveTabs(buf, cursor)) verbosity + 1 else 0 @@ -324,7 +315,7 @@ class JLineCompletion(val intp: IMain) extends Completion with CompletionOutput val newCursor = if (winners contains "") p.cursor else { - val advance = commonPrefix(winners) + val advance = longestCommonPrefix(winners) lastCursor = p.position + advance.length lastBuf = (buf take p.position) + advance repldbg("tryCompletion(%s, _) lastBuf = %s, lastCursor = %s, p.position = %s".format( diff --git a/src/reflect/scala/reflect/internal/Symbols.scala b/src/reflect/scala/reflect/internal/Symbols.scala index 3adcd86c73..c0d15450e9 100644 --- a/src/reflect/scala/reflect/internal/Symbols.scala +++ b/src/reflect/scala/reflect/internal/Symbols.scala @@ -8,7 +8,7 @@ package internal import scala.collection.{ mutable, immutable } import scala.collection.mutable.ListBuffer -import util.Statistics +import util.{ Statistics, shortClassOfInstance } import Flags._ import scala.annotation.tailrec import scala.reflect.io.AbstractFile @@ -182,7 +182,7 @@ trait Symbols extends api.Symbols { self: SymbolTable => if (isGADTSkolem) " (this is a GADT skolem)" else "" - def shortSymbolClass = getClass.getName.split('.').last.stripPrefix("Symbols$") + def shortSymbolClass = shortClassOfInstance(this) def symbolCreationString: String = ( "%s%25s | %-40s | %s".format( if (settings.uniqid.value) "%06d | ".format(id) else "", diff --git a/src/reflect/scala/reflect/internal/util/StringOps.scala b/src/reflect/scala/reflect/internal/util/StringOps.scala index 281ade8134..e4ad8559e2 100644 --- a/src/reflect/scala/reflect/internal/util/StringOps.scala +++ b/src/reflect/scala/reflect/internal/util/StringOps.scala @@ -34,6 +34,14 @@ trait StringOps { s.substring(0, idx + 1) } } + def longestCommonPrefix(xs: List[String]): String = { + if (xs.isEmpty || xs.contains("")) "" + else xs.head.head match { + case ch => + if (xs.tail forall (_.head == ch)) "" + ch + longestCommonPrefix(xs map (_.tail)) + else "" + } + } def decompose(str: String, sep: Char): List[String] = { def ws(start: Int): List[String] = diff --git a/src/reflect/scala/reflect/internal/util/package.scala b/src/reflect/scala/reflect/internal/util/package.scala new file mode 100644 index 0000000000..83c8bf67ba --- /dev/null +++ b/src/reflect/scala/reflect/internal/util/package.scala @@ -0,0 +1,34 @@ +package scala +package reflect +package internal + +package object util { + import StringOps.longestCommonPrefix + + // Shorten a name like Symbols$FooSymbol to FooSymbol. + private def shortenName(name: String): String = { + if (name == "") return "" + val segments = (name split '$').toList + val last = segments.last + + if (last.length == 0) + segments takeRight 2 mkString "$" + else + last + } + + def shortClassOfInstance(x: AnyRef): String = shortClass(x.getClass) + def shortClass(clazz: Class[_]): String = { + val name: String = (clazz.getName split '.').last + def isModule = name endsWith "$" // object + def isAnon = (name split '$').last forall (_.isDigit) // anonymous class + + if (isModule) + (name split '$' filterNot (_ == "")).last + "$" + else if (isAnon) { + val parents = clazz.getSuperclass :: clazz.getInterfaces.toList + parents map (c => shortClass(c)) mkString " with " + } + else shortenName(name) + } +} diff --git a/test/files/run/shortClass.check b/test/files/run/shortClass.check new file mode 100644 index 0000000000..fbdb725cca --- /dev/null +++ b/test/files/run/shortClass.check @@ -0,0 +1,10 @@ +bippity.bop.Foo +bippity.bop.Foo$Bar +bippity.bop.Foo$Bar$ +Test$$anon$1 +Test$$anon$2 +Foo +Bar +Bar$ +Foo with DingDongBippy +Bar with DingDongBippy diff --git a/test/files/run/shortClass.scala b/test/files/run/shortClass.scala new file mode 100644 index 0000000000..b7bb016896 --- /dev/null +++ b/test/files/run/shortClass.scala @@ -0,0 +1,24 @@ +import scala.reflect.internal.util._ + +package bippity { + trait DingDongBippy + + package bop { + class Foo { + class Bar + object Bar + } + } +} + +object Test { + import bippity._ + import bop._ + + def main(args: Array[String]): Unit = { + val f = new Foo + val instances = List(f, new f.Bar, f.Bar, new Foo with DingDongBippy, new f.Bar with DingDongBippy) + instances map (_.getClass.getName) foreach println + instances map shortClassOfInstance foreach println + } +} -- cgit v1.2.3 From 8886d22cd64e2bf861079873751455aeef9ee7a1 Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Sat, 29 Sep 2012 18:00:57 -0700 Subject: Fix for SI-5859, inapplicable varargs. And other polishing related to varargs handling. --- .../scala/tools/nsc/typechecker/EtaExpansion.scala | 2 +- .../scala/tools/nsc/typechecker/Infer.scala | 27 ++++++++++++---------- .../scala/tools/nsc/typechecker/Namers.scala | 2 +- .../scala/tools/nsc/typechecker/RefChecks.scala | 26 ++++++--------------- .../scala/tools/nsc/typechecker/Typers.scala | 2 +- src/reflect/scala/reflect/internal/Types.scala | 14 +++++++---- test/files/pos/t5859.scala | 15 ++++++++++++ 7 files changed, 50 insertions(+), 38 deletions(-) create mode 100644 test/files/pos/t5859.scala (limited to 'src') diff --git a/src/compiler/scala/tools/nsc/typechecker/EtaExpansion.scala b/src/compiler/scala/tools/nsc/typechecker/EtaExpansion.scala index b04a736fd3..059d32fcb5 100644 --- a/src/compiler/scala/tools/nsc/typechecker/EtaExpansion.scala +++ b/src/compiler/scala/tools/nsc/typechecker/EtaExpansion.scala @@ -118,7 +118,7 @@ trait EtaExpansion { self: Analyzer => val origTpe = sym.tpe val isRepeated = definitions.isRepeatedParamType(origTpe) // SI-4176 Don't leak A* in eta-expanded function types. See t4176b.scala - val droppedStarTpe = if (settings.etaExpandKeepsStar.value) origTpe else dropRepeatedParamType(origTpe) + val droppedStarTpe = if (settings.etaExpandKeepsStar.value) origTpe else dropIllegalStarTypes(origTpe) val valDef = ValDef(Modifiers(SYNTHETIC | PARAM), sym.name.toTermName, TypeTree(droppedStarTpe), EmptyTree) (valDef, isRepeated) } diff --git a/src/compiler/scala/tools/nsc/typechecker/Infer.scala b/src/compiler/scala/tools/nsc/typechecker/Infer.scala index 48abfd7a2c..6558379c51 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Infer.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Infer.scala @@ -1593,10 +1593,10 @@ trait Infer extends Checkable { } // Drop those that use a default; keep those that use vararg/tupling conversion. mtypes exists (t => - !t.typeSymbol.hasDefaultFlag && { - compareLengths(t.params, argtpes) < 0 || // tupling (*) - hasExactlyNumParams(t, argtpes.length) // same nb or vararg - } + !t.typeSymbol.hasDefaultFlag && ( + compareLengths(t.params, argtpes) < 0 // tupling (*) + || hasExactlyNumParams(t, argtpes.length) // same nb or vararg + ) ) // (*) more arguments than parameters, but still applicable: tupling conversion works. // todo: should not return "false" when paramTypes = (Unit) no argument is given @@ -1623,15 +1623,18 @@ trait Infer extends Checkable { case OverloadedType(pre, alts) => val pt = if (pt0.typeSymbol == UnitClass) WildcardType else pt0 tryTwice { isSecondTry => - debuglog("infer method alt "+ tree.symbol +" with alternatives "+ - (alts map pre.memberType) +", argtpes = "+ argtpes +", pt = "+ pt) + debuglog(s"infer method alt ${tree.symbol} with alternatives ${alts map pre.memberType} argtpes=$argtpes pt=$pt") - val applicable = resolveOverloadedMethod(argtpes, { - alts filter { alt => - inSilentMode(context)(isApplicable(undetparams, followApply(pre.memberType(alt)), argtpes, pt)) && - (!varArgsOnly || isVarArgsList(alt.tpe.params)) - } - }) + def varargsApplicableCheck(alt: Symbol) = !varArgsOnly || ( + isVarArgsList(alt.tpe.params) + && (argtpes.size >= alt.tpe.params.size) // must be checked now due to SI-5859 + ) + val applicable = resolveOverloadedMethod(argtpes, + alts filter (alt => + varargsApplicableCheck(alt) + && inSilentMode(context)(isApplicable(undetparams, followApply(pre memberType alt), argtpes, pt)) + ) + ) def improves(sym1: Symbol, sym2: Symbol) = { // util.trace("improve "+sym1+sym1.locationString+" on "+sym2+sym2.locationString) diff --git a/src/compiler/scala/tools/nsc/typechecker/Namers.scala b/src/compiler/scala/tools/nsc/typechecker/Namers.scala index abd433b929..f562a251e3 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Namers.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Namers.scala @@ -800,7 +800,7 @@ trait Namers extends MethodSynthesis { false } - val tpe1 = dropRepeatedParamType(tpe.deconst) + val tpe1 = dropIllegalStarTypes(tpe.deconst) val tpe2 = tpe1.widen // This infers Foo.type instead of "object Foo" diff --git a/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala b/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala index 5e1f52830c..4b08f3ee80 100644 --- a/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala +++ b/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala @@ -60,23 +60,8 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans super.transformInfo(sym, tp) } - val toJavaRepeatedParam = new TypeMap { - def apply(tp: Type) = tp match { - case TypeRef(pre, RepeatedParamClass, args) => - typeRef(pre, JavaRepeatedParamClass, args) - case _ => - mapOver(tp) - } - } - - val toScalaRepeatedParam = new TypeMap { - def apply(tp: Type): Type = tp match { - case TypeRef(pre, JavaRepeatedParamClass, args) => - typeRef(pre, RepeatedParamClass, args) - case _ => - mapOver(tp) - } - } + val toJavaRepeatedParam = new SubstSymMap(RepeatedParamClass -> JavaRepeatedParamClass) + val toScalaRepeatedParam = new SubstSymMap(JavaRepeatedParamClass -> RepeatedParamClass) def accessFlagsToString(sym: Symbol) = flagsToString( sym getFlag (PRIVATE | PROTECTED), @@ -1483,8 +1468,11 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans } private def isRepeatedParamArg(tree: Tree) = currentApplication match { case Apply(fn, args) => - !args.isEmpty && (args.last eq tree) && - fn.tpe.params.length == args.length && isRepeatedParamType(fn.tpe.params.last.tpe) + ( args.nonEmpty + && (args.last eq tree) + && (fn.tpe.params.length == args.length) + && isRepeatedParamType(fn.tpe.params.last.tpe) + ) case _ => false } diff --git a/src/compiler/scala/tools/nsc/typechecker/Typers.scala b/src/compiler/scala/tools/nsc/typechecker/Typers.scala index 3a443ea2c0..dc6beaf5c6 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Typers.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Typers.scala @@ -4988,7 +4988,7 @@ trait Typers extends Modes with Adaptations with Tags { val tree3 = stabilize(tree2, pre2, mode, pt) // SI-5967 Important to replace param type A* with Seq[A] when seen from from a reference, to avoid // inference errors in pattern matching. - tree3 setType dropRepeatedParamType(tree3.tpe) + tree3 setType dropIllegalStarTypes(tree3.tpe) } } } diff --git a/src/reflect/scala/reflect/internal/Types.scala b/src/reflect/scala/reflect/internal/Types.scala index 2c036b3308..97ad02c8a9 100644 --- a/src/reflect/scala/reflect/internal/Types.scala +++ b/src/reflect/scala/reflect/internal/Types.scala @@ -3800,12 +3800,16 @@ trait Types extends api.Types { self: SymbolTable => // This is the specified behavior. protected def etaExpandKeepsStar = false - object dropRepeatedParamType extends TypeMap { + /** Turn any T* types into Seq[T] except when + * in method parameter position. + */ + object dropIllegalStarTypes extends TypeMap { def apply(tp: Type): Type = tp match { case MethodType(params, restpe) => - MethodType(params, apply(restpe)) - case PolyType(tparams, restpe) => - PolyType(tparams, apply(restpe)) + // Not mapping over params + val restpe1 = apply(restpe) + if (restpe eq restpe1) tp + else MethodType(params, restpe1) case TypeRef(_, RepeatedParamClass, arg :: Nil) => seqType(arg) case _ => @@ -4618,6 +4622,8 @@ trait Types extends api.Types { self: SymbolTable => /** A map to implement the `substSym` method. */ class SubstSymMap(from: List[Symbol], to: List[Symbol]) extends SubstMap(from, to) { + def this(pairs: (Symbol, Symbol)*) = this(pairs.toList.map(_._1), pairs.toList.map(_._2)) + protected def toType(fromtp: Type, sym: Symbol) = fromtp match { case TypeRef(pre, _, args) => copyTypeRef(fromtp, pre, sym, args) case SingleType(pre, _) => singleType(pre, sym) diff --git a/test/files/pos/t5859.scala b/test/files/pos/t5859.scala new file mode 100644 index 0000000000..2a31e68ee5 --- /dev/null +++ b/test/files/pos/t5859.scala @@ -0,0 +1,15 @@ + +class A { + def f(xs: List[Int], ys: AnyRef*) = () + def f(xs: AnyRef*) = () + + f() + f(List[AnyRef](): _*) + f(List(): _*) + f(Nil: _*) + f(Array(): _*) + f(Array[AnyRef](): _*) + f(List(1)) + f(List(1), Nil: _*) + f(List(1), Array(): _*) +} -- cgit v1.2.3 From 75a075b507b1c3f4463ab0eb42fecde66978e903 Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Sat, 29 Sep 2012 16:24:43 -0700 Subject: Fix for SI-5353, imperfect error message. The fix of course is a perfect error message. --- src/compiler/scala/tools/nsc/symtab/classfile/Pickler.scala | 4 ++-- test/files/neg/t5353.check | 4 ++++ test/files/neg/t5353.scala | 3 +++ test/files/neg/t5692a.check | 2 +- test/files/neg/t5692b.check | 2 +- 5 files changed, 11 insertions(+), 4 deletions(-) create mode 100644 test/files/neg/t5353.check create mode 100644 test/files/neg/t5353.scala (limited to 'src') diff --git a/src/compiler/scala/tools/nsc/symtab/classfile/Pickler.scala b/src/compiler/scala/tools/nsc/symtab/classfile/Pickler.scala index 29b238c4cb..439e824aff 100644 --- a/src/compiler/scala/tools/nsc/symtab/classfile/Pickler.scala +++ b/src/compiler/scala/tools/nsc/symtab/classfile/Pickler.scala @@ -71,8 +71,8 @@ abstract class Pickler extends SubComponent { if (!t.isDef && t.hasSymbol && t.symbol.isTermMacro) { unit.error(t.pos, t.symbol.typeParams.length match { case 0 => "macro has not been expanded" - case 1 => "type parameter not specified" - case _ => "type parameters not specified" + case 1 => "this type parameter must be specified" + case _ => "these type parameters must be specified" }) return } diff --git a/test/files/neg/t5353.check b/test/files/neg/t5353.check new file mode 100644 index 0000000000..75e2435600 --- /dev/null +++ b/test/files/neg/t5353.check @@ -0,0 +1,4 @@ +t5353.scala:2: error: this type parameter must be specified + def f(x: Boolean) = if (x) Array("abc") else Array() + ^ +one error found diff --git a/test/files/neg/t5353.scala b/test/files/neg/t5353.scala new file mode 100644 index 0000000000..1ee869aac1 --- /dev/null +++ b/test/files/neg/t5353.scala @@ -0,0 +1,3 @@ +class A { + def f(x: Boolean) = if (x) Array("abc") else Array() +} diff --git a/test/files/neg/t5692a.check b/test/files/neg/t5692a.check index ded95a8820..7fbfb5dba7 100644 --- a/test/files/neg/t5692a.check +++ b/test/files/neg/t5692a.check @@ -1,4 +1,4 @@ -Test_2.scala:2: error: type parameter not specified +Test_2.scala:2: error: this type parameter must be specified def x = Macros.foo ^ one error found diff --git a/test/files/neg/t5692b.check b/test/files/neg/t5692b.check index e453870ec8..16796826b4 100644 --- a/test/files/neg/t5692b.check +++ b/test/files/neg/t5692b.check @@ -1,4 +1,4 @@ -Test_2.scala:2: error: type parameters not specified +Test_2.scala:2: error: these type parameters must be specified def x = Macros.foo ^ one error found -- cgit v1.2.3 From 32e70a01da1fda7bdc91d7301ee3b8707fd2bcd4 Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Sat, 29 Sep 2012 18:07:09 -0700 Subject: Fix for SI-4729, overriding java varargs in scala. This was a bad interaction between anonymous subclasses and bridge methods. new Foo { override def bar = 5 } Scala figures it can mark "bar" private since hey, what's the difference. The problem is that if it was overriding a java-defined varargs method in scala, the bridge method logic says "Oh, it's private? Then you don't need a varargs bridge." Hey scalac, you're the one that made me private! You made me like this! You! --- .../scala/tools/nsc/typechecker/RefChecks.scala | 32 ++++++++++++---------- src/reflect/scala/reflect/internal/Types.scala | 14 ++++++++++ test/files/run/t4729.check | 4 +++ test/files/run/t4729/J_1.java | 4 +++ test/files/run/t4729/S_2.scala | 29 ++++++++++++++++++++ 5 files changed, 69 insertions(+), 14 deletions(-) create mode 100644 test/files/run/t4729.check create mode 100644 test/files/run/t4729/J_1.java create mode 100644 test/files/run/t4729/S_2.scala (limited to 'src') diff --git a/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala b/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala index 4b08f3ee80..f4ec1666b3 100644 --- a/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala +++ b/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala @@ -141,27 +141,22 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans // Override checking ------------------------------------------------------------ - def isJavaVarargsAncestor(clazz: Symbol) = ( - clazz.isClass - && clazz.isJavaDefined - && (clazz.info.nonPrivateDecls exists isJavaVarArgsMethod) - ) - /** Add bridges for vararg methods that extend Java vararg methods */ def addVarargBridges(clazz: Symbol): List[Tree] = { // This is quite expensive, so attempt to skip it completely. // Insist there at least be a java-defined ancestor which // defines a varargs method. TODO: Find a cheaper way to exclude. - if (clazz.thisType.baseClasses exists isJavaVarargsAncestor) { + if (inheritsJavaVarArgsMethod(clazz)) { log("Found java varargs ancestor in " + clazz.fullLocationString + ".") val self = clazz.thisType val bridges = new ListBuffer[Tree] def varargBridge(member: Symbol, bridgetpe: Type): Tree = { - log("Generating varargs bridge for " + member.fullLocationString + " of type " + bridgetpe) + log(s"Generating varargs bridge for ${member.fullLocationString} of type $bridgetpe") - val bridge = member.cloneSymbolImpl(clazz, member.flags | VBRIDGE | ARTIFACT) setPos clazz.pos + val newFlags = (member.flags | VBRIDGE | ARTIFACT) & ~PRIVATE + val bridge = member.cloneSymbolImpl(clazz, newFlags) setPos clazz.pos bridge.setInfo(bridgetpe.cloneInfo(bridge)) clazz.info.decls enter bridge @@ -174,26 +169,35 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans localTyper typed DefDef(bridge, body) } - // For all concrete non-private members that have a (Scala) repeated parameter: - // compute the corresponding method type `jtpe` with a Java repeated parameter + // For all concrete non-private members (but: see below) that have a (Scala) repeated + // parameter: compute the corresponding method type `jtpe` with a Java repeated parameter // if a method with type `jtpe` exists and that method is not a varargs bridge // then create a varargs bridge of type `jtpe` that forwards to the // member method with the Scala vararg type. - for (member <- clazz.info.nonPrivateMembers) { + // + // @PP: Can't call nonPrivateMembers because we will miss refinement members, + // which have been marked private. See SI-4729. + for (member <- nonTrivialMembers(clazz)) { + log(s"Considering $member for java varargs bridge in $clazz") if (!member.isDeferred && member.isMethod && hasRepeatedParam(member.info)) { val inherited = clazz.info.nonPrivateMemberAdmitting(member.name, VBRIDGE) + // Delaying calling memberType as long as possible if (inherited ne NoSymbol) { - val jtpe = toJavaRepeatedParam(self.memberType(member)) + val jtpe = toJavaRepeatedParam(self memberType member) // this is a bit tortuous: we look for non-private members or bridges // if we find a bridge everything is OK. If we find another member, // we need to create a bridge - if (inherited filter (sym => (self.memberType(sym) matches jtpe) && !(sym hasFlag VBRIDGE)) exists) + val inherited1 = inherited filter (sym => !(sym hasFlag VBRIDGE) && (self memberType sym matches jtpe)) + if (inherited1.exists) bridges += varargBridge(member, jtpe) } } } + if (bridges.size > 0) + log(s"Adding ${bridges.size} bridges for methods extending java varargs.") + bridges.toList } else Nil diff --git a/src/reflect/scala/reflect/internal/Types.scala b/src/reflect/scala/reflect/internal/Types.scala index 97ad02c8a9..b3b302121d 100644 --- a/src/reflect/scala/reflect/internal/Types.scala +++ b/src/reflect/scala/reflect/internal/Types.scala @@ -6989,6 +6989,14 @@ trait Types extends api.Types { self: SymbolTable => } } + def isJavaVarargsAncestor(clazz: Symbol) = ( + clazz.isClass + && clazz.isJavaDefined + && (clazz.info.nonPrivateDecls exists isJavaVarArgsMethod) + ) + def inheritsJavaVarArgsMethod(clazz: Symbol) = + clazz.thisType.baseClasses exists isJavaVarargsAncestor + /** All types in list must be polytypes with type parameter lists of * same length as tparams. * Returns list of list of bounds infos, where corresponding type @@ -7101,6 +7109,12 @@ trait Types extends api.Types { self: SymbolTable => else (ps :+ SerializableClass.tpe).toList ) + /** Members of the given class, other than those inherited + * from Any or AnyRef. + */ + def nonTrivialMembers(clazz: Symbol): Iterable[Symbol] = + clazz.info.members filterNot (sym => sym.owner == ObjectClass || sym.owner == AnyClass) + def objToAny(tp: Type): Type = if (!phase.erasedTypes && tp.typeSymbol == ObjectClass) AnyClass.tpe else tp diff --git a/test/files/run/t4729.check b/test/files/run/t4729.check new file mode 100644 index 0000000000..9a2aa56d99 --- /dev/null +++ b/test/files/run/t4729.check @@ -0,0 +1,4 @@ +WrappedArray(1, 2) +WrappedArray(1, 2) +WrappedArray(1, 2) +WrappedArray(1, 2) diff --git a/test/files/run/t4729/J_1.java b/test/files/run/t4729/J_1.java new file mode 100644 index 0000000000..2ffb5a88d1 --- /dev/null +++ b/test/files/run/t4729/J_1.java @@ -0,0 +1,4 @@ +// Java Interface: +public interface J_1 { + public void method(String... s); +} diff --git a/test/files/run/t4729/S_2.scala b/test/files/run/t4729/S_2.scala new file mode 100644 index 0000000000..e34e3d34d4 --- /dev/null +++ b/test/files/run/t4729/S_2.scala @@ -0,0 +1,29 @@ + // Scala class: +class ScalaVarArgs extends J_1 { + // -- no problem on overriding it using ordinary class + def method(s: String*) { println(s) } +} + +object Test { + def main(args: Array[String]) { + //[1] Ok - no problem using inferred type + val varArgs = new J_1 { + def method(s: String*) { println(s) } + } + varArgs.method("1", "2") + + //[2] Ok -- no problem when explicit set its type after construction + val b: J_1 = varArgs + b.method("1", "2") + + //[3] Ok -- no problem on calling its method + (new ScalaVarArgs).method("1", "2") + (new ScalaVarArgs: J_1).method("1", "2") + + //[4] Not Ok -- error when assigning anonymous class to a explictly typed val + // Compiler error: object creation impossible, since method method in trait VarArgs of type (s: [java.lang.String])Unit is not defined + val tagged: J_1 = new J_1 { + def method(s: String*) { println(s) } + } + } +} -- cgit v1.2.3 From d16326a7b679ec7877ff6b5223d2176f8a651b70 Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Sun, 30 Sep 2012 09:43:51 -0700 Subject: Fix for SI-6452, leak in ListBuffer. The private var which holds a pointer to the end of the list was not cleared even when the length of the buffer was reduced to 0. --- .../scala/collection/mutable/ListBuffer.scala | 44 +++++++++++++++------- 1 file changed, 31 insertions(+), 13 deletions(-) (limited to 'src') diff --git a/src/library/scala/collection/mutable/ListBuffer.scala b/src/library/scala/collection/mutable/ListBuffer.scala index cd743999bc..bced92e663 100644 --- a/src/library/scala/collection/mutable/ListBuffer.scala +++ b/src/library/scala/collection/mutable/ListBuffer.scala @@ -56,12 +56,18 @@ final class ListBuffer[A] import scala.collection.Traversable import scala.collection.immutable.ListSerializeEnd + /** Expected invariants: + * If start.isEmpty, last0 == null + * If start.nonEmpty, last0 != null + * If len == 0, start.isEmpty + * If len > 0, start.nonEmpty + */ private var start: List[A] = Nil private var last0: ::[A] = _ private var exported: Boolean = false private var len = 0 - protected def underlying: immutable.Seq[A] = start + protected def underlying: List[A] = start private def writeObject(out: ObjectOutputStream) { // write start @@ -133,7 +139,7 @@ final class ListBuffer[A] if (exported) copy() if (n == 0) { val newElem = new :: (x, start.tail); - if (last0 eq start) { + if ((last0 eq null) || (last0 eq start)) { last0 = newElem } start = newElem @@ -162,7 +168,7 @@ final class ListBuffer[A] */ def += (x: A): this.type = { if (exported) copy() - if (start.isEmpty) { + if (isEmpty) { last0 = new :: (x, Nil) start = last0 } else { @@ -184,6 +190,7 @@ final class ListBuffer[A] */ def clear() { start = Nil + last0 = null exported = false len = 0 } @@ -197,7 +204,7 @@ final class ListBuffer[A] def +=: (x: A): this.type = { if (exported) copy() val newElem = new :: (x, start) - if (start.isEmpty) last0 = newElem + if (isEmpty) last0 = newElem start = newElem len += 1 this @@ -219,7 +226,7 @@ final class ListBuffer[A] if (n == 0) { while (!elems.isEmpty) { val newElem = new :: (elems.head, start) - if (start.isEmpty) last0 = newElem + if (isEmpty) last0 = newElem start = newElem elems = elems.tail } @@ -243,6 +250,15 @@ final class ListBuffer[A] } } + /** Reduce the length of the buffer, and null out last0 + * if this reduces the length to 0. + */ + private def reduceLengthBy(num: Int) { + len -= num + if (len <= 0) // obviously shouldn't be < 0, but still better not to leak + last0 = null + } + /** Removes a given number of elements on a given index position. May take * time linear in the buffer size. * @@ -274,7 +290,7 @@ final class ListBuffer[A] c -= 1 } } - len -= count1 + reduceLengthBy(count1) } // Implementation of abstract method in Builder @@ -285,7 +301,7 @@ final class ListBuffer[A] * copied lazily, the first time it is mutated. */ override def toList: List[A] = { - exported = !start.isEmpty + exported = !isEmpty start } @@ -296,7 +312,7 @@ final class ListBuffer[A] * @param xs the list to which elements are prepended */ def prependToList(xs: List[A]): List[A] = { - if (start.isEmpty) xs + if (isEmpty) xs else { if (exported) copy() last0.tl = xs @@ -331,7 +347,7 @@ final class ListBuffer[A] if (last0 eq cursor.tail) last0 = cursor.asInstanceOf[::[A]] cursor.asInstanceOf[::[A]].tl = cursor.tail.tail } - len -= 1 + reduceLengthBy(1) old } @@ -343,11 +359,12 @@ final class ListBuffer[A] */ override def -= (elem: A): this.type = { if (exported) copy() - if (start.isEmpty) {} + if (isEmpty) {} else if (start.head == elem) { start = start.tail - len -= 1 - } else { + reduceLengthBy(1) + } + else { var cursor = start while (!cursor.tail.isEmpty && cursor.tail.head != elem) { cursor = cursor.tail @@ -357,7 +374,7 @@ final class ListBuffer[A] if (z.tl == last0) last0 = z z.tl = cursor.tail.tail - len -= 1 + reduceLengthBy(1) } } this @@ -397,6 +414,7 @@ final class ListBuffer[A] /** Copy contents of this buffer */ private def copy() { + if (isEmpty) return var cursor = start val limit = last0.tail clear() -- cgit v1.2.3 From d2074796a8b822c4c82faecc8eb0eef4837508e3 Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Mon, 1 Oct 2012 05:44:50 -0700 Subject: Fix for TypeVar instantiation. In an effort to reduce the enormous amount of duplication which now exists among methods which attempt to deduce something about the relationship between two types, a sampling (and only a sampling - this might not even be half of them) given here: def isAsSpecific(ftpe1: Type, ftpe2: Type): Boolean def isCompatibleByName(tp: Type, pt: Type): Boolean def isConservativelyCompatible(tp: Type, pt: Type): Boolean def isConsistent(tp1: Type, tp2: Type): Boolean def isDifferentType(tp1: Type, tp2: Type): Boolean def isDifferentTypeConstructor(tp1: Type, tp2: Type): Boolean def isDistinguishableFrom(t1: Type, t2: Type): Boolean def isNeverSubType(tp1: Type, tp2: Type): Boolean def isNumericSubType(tp1: Type, tp2: Type): Boolean def isPlausiblyCompatible(tp: Type, pt: Type): Boolean def isPopulated(tp1: Type, tp2: Type): Boolean def isSameType(tp1: Type, tp2: Type): Boolean def isSameType2(tp1: Type, tp2: Type): Boolean def isSubType(tp1: Type, tp2: Type): Boolean def isWeakSubType(tp1: Type, tp2: Type): Boolean def isWeaklyCompatible(tp: Type, pt: Type): Boolean def matches(tpe1: Type, tpe2: Type): Boolean def overlaps(tp1: Type, tp2: Type): Boolean def typesConform(tp: Type, pt: Type): Boolean I began pulling a thread left by moors in isPopulated: need to investgate why this can't be made symmetric -- neg/gadts1 fails, and run/existials also. Followed that to this code in TypeVar: val newInst = wildcardToTypeVarMap(tp) (constr isWithinBounds newInst) && { setInst(tp); true } -------^ That was the obstacle to symmetry, because it creates a cycle in e.g. run/existentials. Kept pulling the string, came back to my own comment of long ago: !!! Is it somehow guaranteed that this will not break under nesting? In general one has to save and restore the contents of the field... Decided that uncertainty could no longer be tolerated. Unless it can be proven somehow that there will never be crosstalk among the save/suspension points, we should do it this way even if nothing demands it yet. What's in this commit: - Made isPopulated symmetric. - Made setInst resistant to TypeVar cycles. - Fixed above mentioned bug in registerTypeEquality. - Added some rigor to the suspension/unsuspension of TypeVars so it will not break under nesting. - Recovered pos/t0851.scala from its deletion. --- src/reflect/scala/reflect/internal/Types.scala | 409 ++++++++++++------------- test/files/neg/gadts1.check | 7 +- test/files/neg/patmat-type-check.check | 17 +- test/files/neg/t3015.check | 5 +- test/files/neg/unchecked-impossible.check | 6 +- 5 files changed, 230 insertions(+), 214 deletions(-) (limited to 'src') diff --git a/src/reflect/scala/reflect/internal/Types.scala b/src/reflect/scala/reflect/internal/Types.scala index 2c036b3308..1e143f10f6 100644 --- a/src/reflect/scala/reflect/internal/Types.scala +++ b/src/reflect/scala/reflect/internal/Types.scala @@ -1039,69 +1039,66 @@ trait Types extends api.Types { self: SymbolTable => } def findMembers(excludedFlags: Long, requiredFlags: Long): Scope = { - // if this type contains type variables, put them to sleep for a while -- don't just wipe them out by - // replacing them by the corresponding type parameter, as that messes up (e.g.) type variables in type refinements - // without this, the matchesType call would lead to type variables on both sides - // of a subtyping/equality judgement, which can lead to recursive types being constructed. - // See (t0851) for a situation where this happens. - val suspension: List[TypeVar] = if (this.isGround) null else suspendTypeVarsInType(this) - - if (Statistics.canEnable) Statistics.incCounter(findMembersCount) - val start = if (Statistics.canEnable) Statistics.pushTimer(typeOpsStack, findMembersNanos) else null - - //Console.println("find member " + name.decode + " in " + this + ":" + this.baseClasses)//DEBUG - var members: Scope = null - var required = requiredFlags - var excluded = excludedFlags | DEFERRED - var continue = true - var self: Type = null - while (continue) { - continue = false - val bcs0 = baseClasses - var bcs = bcs0 - while (!bcs.isEmpty) { - val decls = bcs.head.info.decls - var entry = decls.elems - while (entry ne null) { - val sym = entry.sym - val flags = sym.flags - if ((flags & required) == required) { - val excl = flags & excluded - if (excl == 0L && - (// omit PRIVATE LOCALS unless selector class is contained in class owning the def. - (bcs eq bcs0) || - (flags & PrivateLocal) != PrivateLocal || - (bcs0.head.hasTransOwner(bcs.head)))) { - if (members eq null) members = newFindMemberScope - var others: ScopeEntry = members.lookupEntry(sym.name) - var symtpe: Type = null - while ((others ne null) && { - val other = others.sym - (other ne sym) && - ((other.owner eq sym.owner) || - (flags & PRIVATE) != 0 || { - if (self eq null) self = this.narrow - if (symtpe eq null) symtpe = self.memberType(sym) - !(self.memberType(other) matches symtpe) - })}) { - others = members lookupNextEntry others + def findMembersInternal: Scope = { + var members: Scope = null + if (Statistics.canEnable) Statistics.incCounter(findMembersCount) + val start = if (Statistics.canEnable) Statistics.pushTimer(typeOpsStack, findMembersNanos) else null + + //Console.println("find member " + name.decode + " in " + this + ":" + this.baseClasses)//DEBUG + var required = requiredFlags + var excluded = excludedFlags | DEFERRED + var continue = true + var self: Type = null + while (continue) { + continue = false + val bcs0 = baseClasses + var bcs = bcs0 + while (!bcs.isEmpty) { + val decls = bcs.head.info.decls + var entry = decls.elems + while (entry ne null) { + val sym = entry.sym + val flags = sym.flags + if ((flags & required) == required) { + val excl = flags & excluded + if (excl == 0L && + (// omit PRIVATE LOCALS unless selector class is contained in class owning the def. + (bcs eq bcs0) || + (flags & PrivateLocal) != PrivateLocal || + (bcs0.head.hasTransOwner(bcs.head)))) { + if (members eq null) members = newFindMemberScope + var others: ScopeEntry = members.lookupEntry(sym.name) + var symtpe: Type = null + while ((others ne null) && { + val other = others.sym + (other ne sym) && + ((other.owner eq sym.owner) || + (flags & PRIVATE) != 0 || { + if (self eq null) self = this.narrow + if (symtpe eq null) symtpe = self.memberType(sym) + !(self.memberType(other) matches symtpe) + })}) { + others = members lookupNextEntry others + } + if (others eq null) members enter sym + } else if (excl == DEFERRED) { + continue = true } - if (others eq null) members enter sym - } else if (excl == DEFERRED) { - continue = true } - } - entry = entry.next - } // while (entry ne null) - // excluded = excluded | LOCAL - bcs = bcs.tail - } // while (!bcs.isEmpty) - required |= DEFERRED - excluded &= ~(DEFERRED.toLong) - } // while (continue) - if (Statistics.canEnable) Statistics.popTimer(typeOpsStack, start) - if (suspension ne null) suspension foreach (_.suspended = false) - if (members eq null) EmptyScope else members + entry = entry.next + } // while (entry ne null) + // excluded = excluded | LOCAL + bcs = bcs.tail + } // while (!bcs.isEmpty) + required |= DEFERRED + excluded &= ~(DEFERRED.toLong) + } // while (continue) + if (Statistics.canEnable) Statistics.popTimer(typeOpsStack, start) + if (members eq null) EmptyScope else members + } + + if (this.isGround) findMembersInternal + else suspendingTypeVars(typeVarsInType(this))(findMembersInternal) } /** @@ -1115,102 +1112,98 @@ trait Types extends api.Types { self: SymbolTable => */ //TODO: use narrow only for modules? (correct? efficiency gain?) def findMember(name: Name, excludedFlags: Long, requiredFlags: Long, stableOnly: Boolean): Symbol = { - // if this type contains type variables, put them to sleep for a while -- don't just wipe them out by - // replacing them by the corresponding type parameter, as that messes up (e.g.) type variables in type refinements - // without this, the matchesType call would lead to type variables on both sides - // of a subtyping/equality judgement, which can lead to recursive types being constructed. - // See (t0851) for a situation where this happens. - val suspension: List[TypeVar] = if (this.isGround) null else suspendTypeVarsInType(this) - - if (Statistics.canEnable) Statistics.incCounter(findMemberCount) - val start = if (Statistics.canEnable) Statistics.pushTimer(typeOpsStack, findMemberNanos) else null - - //Console.println("find member " + name.decode + " in " + this + ":" + this.baseClasses)//DEBUG - var member: Symbol = NoSymbol - var members: List[Symbol] = null - var lastM: ::[Symbol] = null - var membertpe: Type = null - var required = requiredFlags - var excluded = excludedFlags | DEFERRED - var continue = true - var self: Type = null - - while (continue) { - continue = false - val bcs0 = baseClasses - var bcs = bcs0 - while (!bcs.isEmpty) { - val decls = bcs.head.info.decls - var entry = decls.lookupEntry(name) - while (entry ne null) { - val sym = entry.sym - val flags = sym.flags - if ((flags & required) == required) { - val excl = flags & excluded - if (excl == 0L && - (// omit PRIVATE LOCALS unless selector class is contained in class owning the def. - (bcs eq bcs0) || - (flags & PrivateLocal) != PrivateLocal || - (bcs0.head.hasTransOwner(bcs.head)))) { - if (name.isTypeName || stableOnly && sym.isStable) { - if (Statistics.canEnable) Statistics.popTimer(typeOpsStack, start) - if (suspension ne null) suspension foreach (_.suspended = false) - return sym - } else if (member eq NoSymbol) { - member = sym - } else if (members eq null) { - if ((member ne sym) && - ((member.owner eq sym.owner) || - (flags & PRIVATE) != 0 || { - if (self eq null) self = this.narrow - if (membertpe eq null) membertpe = self.memberType(member) - !(membertpe matches self.memberType(sym)) - })) { - lastM = new ::(sym, null) - members = member :: lastM - } - } else { - var others: List[Symbol] = members - var symtpe: Type = null - while ((others ne null) && { - val other = others.head - (other ne sym) && - ((other.owner eq sym.owner) || + def findMemberInternal: Symbol = { + var member: Symbol = NoSymbol + var members: List[Symbol] = null + var lastM: ::[Symbol] = null + if (Statistics.canEnable) Statistics.incCounter(findMemberCount) + val start = if (Statistics.canEnable) Statistics.pushTimer(typeOpsStack, findMemberNanos) else null + + //Console.println("find member " + name.decode + " in " + this + ":" + this.baseClasses)//DEBUG + var membertpe: Type = null + var required = requiredFlags + var excluded = excludedFlags | DEFERRED + var continue = true + var self: Type = null + + while (continue) { + continue = false + val bcs0 = baseClasses + var bcs = bcs0 + while (!bcs.isEmpty) { + val decls = bcs.head.info.decls + var entry = decls.lookupEntry(name) + while (entry ne null) { + val sym = entry.sym + val flags = sym.flags + if ((flags & required) == required) { + val excl = flags & excluded + if (excl == 0L && + (// omit PRIVATE LOCALS unless selector class is contained in class owning the def. + (bcs eq bcs0) || + (flags & PrivateLocal) != PrivateLocal || + (bcs0.head.hasTransOwner(bcs.head)))) { + if (name.isTypeName || stableOnly && sym.isStable) { + if (Statistics.canEnable) Statistics.popTimer(typeOpsStack, start) + return sym + } else if (member eq NoSymbol) { + member = sym + } else if (members eq null) { + if ((member ne sym) && + ((member.owner eq sym.owner) || (flags & PRIVATE) != 0 || { if (self eq null) self = this.narrow - if (symtpe eq null) symtpe = self.memberType(sym) - !(self.memberType(other) matches symtpe) - })}) { - others = others.tail - } - if (others eq null) { - val lastM1 = new ::(sym, null) - lastM.tl = lastM1 - lastM = lastM1 + if (membertpe eq null) membertpe = self.memberType(member) + !(membertpe matches self.memberType(sym)) + })) { + lastM = new ::(sym, null) + members = member :: lastM + } + } else { + var others: List[Symbol] = members + var symtpe: Type = null + while ((others ne null) && { + val other = others.head + (other ne sym) && + ((other.owner eq sym.owner) || + (flags & PRIVATE) != 0 || { + if (self eq null) self = this.narrow + if (symtpe eq null) symtpe = self.memberType(sym) + !(self.memberType(other) matches symtpe) + })}) { + others = others.tail + } + if (others eq null) { + val lastM1 = new ::(sym, null) + lastM.tl = lastM1 + lastM = lastM1 + } } + } else if (excl == DEFERRED) { + continue = true } - } else if (excl == DEFERRED) { - continue = true } - } - entry = decls lookupNextEntry entry - } // while (entry ne null) - // excluded = excluded | LOCAL - bcs = if (name == nme.CONSTRUCTOR) Nil else bcs.tail - } // while (!bcs.isEmpty) - required |= DEFERRED - excluded &= ~(DEFERRED.toLong) - } // while (continue) - if (Statistics.canEnable) Statistics.popTimer(typeOpsStack, start) - if (suspension ne null) suspension foreach (_.suspended = false) - if (members eq null) { - if (member == NoSymbol) if (Statistics.canEnable) Statistics.incCounter(noMemberCount) - member - } else { - if (Statistics.canEnable) Statistics.incCounter(multMemberCount) - lastM.tl = Nil - baseClasses.head.newOverloaded(this, members) + entry = decls lookupNextEntry entry + } // while (entry ne null) + // excluded = excluded | LOCAL + bcs = if (name == nme.CONSTRUCTOR) Nil else bcs.tail + } // while (!bcs.isEmpty) + required |= DEFERRED + excluded &= ~(DEFERRED.toLong) + } // while (continue) + if (Statistics.canEnable) Statistics.popTimer(typeOpsStack, start) + if (members eq null) { + if (member == NoSymbol) if (Statistics.canEnable) Statistics.incCounter(noMemberCount) + member + } else { + if (Statistics.canEnable) Statistics.incCounter(multMemberCount) + lastM.tl = Nil + baseClasses.head.newOverloaded(this, members) + } } + + if (this.isGround) findMemberInternal + else suspendingTypeVars(typeVarsInType(this))(findMemberInternal) } /** The (existential or otherwise) skolems and existentially quantified variables which are free in this type */ @@ -3074,7 +3067,10 @@ trait Types extends api.Types { self: SymbolTable => // invariant: before mutating constr, save old state in undoLog // (undoLog is used to reset constraints to avoid piling up unrelated ones) def setInst(tp: Type) { -// assert(!(tp containsTp this), this) + if (tp eq this) { + log(s"TypeVar cycle: called setInst passing $this to itself.") + return + } undoLog record this // if we were compared against later typeskolems, repack the existential, // because skolems are only compatible if they were created at the same level @@ -3219,16 +3215,19 @@ trait Types extends api.Types { self: SymbolTable => def registerTypeEquality(tp: Type, typeVarLHS: Boolean): Boolean = { // println("regTypeEq: "+(safeToString, debugString(tp), tp.getClass, if (typeVarLHS) "in LHS" else "in RHS", if (suspended) "ZZ" else if (constr.instValid) "IV" else "")) //@MDEBUG -// println("constr: "+ constr) - def checkIsSameType(tp: Type) = - if(typeVarLHS) constr.inst =:= tp - else tp =:= constr.inst + def checkIsSameType(tp: Type) = ( + if (typeVarLHS) constr.inst =:= tp + else tp =:= constr.inst + ) if (suspended) tp =:= origin else if (constr.instValid) checkIsSameType(tp) else isRelatable(tp) && { val newInst = wildcardToTypeVarMap(tp) - (constr isWithinBounds newInst) && { setInst(tp); true } + (constr isWithinBounds newInst) && { + setInst(newInst) + true + } } } @@ -5101,28 +5100,18 @@ trait Types extends api.Types { self: SymbolTable => class SubTypePair(val tp1: Type, val tp2: Type) { override def hashCode = tp1.hashCode * 41 + tp2.hashCode - override def equals(other: Any) = other match { + override def equals(other: Any) = (this eq other.asInstanceOf[AnyRef]) || (other match { + // suspend TypeVars in types compared by =:=, + // since we don't want to mutate them simply to check whether a subtype test is pending + // in addition to making subtyping "more correct" for type vars, + // it should avoid the stackoverflow that's been plaguing us (https://groups.google.com/d/topic/scala-internals/2gHzNjtB4xA/discussion) + // this method is only called when subtyping hits a recursion threshold (subsametypeRecursions >= LogPendingSubTypesThreshold) case stp: SubTypePair => - // suspend TypeVars in types compared by =:=, - // since we don't want to mutate them simply to check whether a subtype test is pending - // in addition to making subtyping "more correct" for type vars, - // it should avoid the stackoverflow that's been plaguing us (https://groups.google.com/d/topic/scala-internals/2gHzNjtB4xA/discussion) - // this method is only called when subtyping hits a recursion threshold (subsametypeRecursions >= LogPendingSubTypesThreshold) - def suspend(tp: Type) = - if (tp.isGround) null else suspendTypeVarsInType(tp) - def revive(suspension: List[TypeVar]) = - if (suspension ne null) suspension foreach (_.suspended = false) - - val suspensions = Array(tp1, stp.tp1, tp2, stp.tp2) map suspend - - val sameTypes = (tp1 =:= stp.tp1) && (tp2 =:= stp.tp2) - - suspensions foreach revive - - sameTypes + val tvars = List(tp1, stp.tp1, tp2, stp.tp2) flatMap (t => if (t.isGround) Nil else typeVarsInType(t)) + suspendingTypeVars(tvars)(tp1 =:= stp.tp1 && tp2 =:= stp.tp2) case _ => false - } + }) override def toString = tp1+" <: def isPopulated(tp1: Type, tp2: Type): Boolean = { def isConsistent(tp1: Type, tp2: Type): Boolean = (tp1, tp2) match { case (TypeRef(pre1, sym1, args1), TypeRef(pre2, sym2, args2)) => - assert(sym1 == sym2) + assert(sym1 == sym2, (sym1, sym2)) pre1 =:= pre2 && - forall3(args1, args2, sym1.typeParams) { (arg1, arg2, tparam) => - //if (tparam.variance == 0 && !(arg1 =:= arg2)) Console.println("inconsistent: "+arg1+"!="+arg2)//DEBUG + forall3(args1, args2, sym1.typeParams)((arg1, arg2, tparam) => if (tparam.variance == 0) arg1 =:= arg2 - else if (arg1.isInstanceOf[TypeVar]) - // if left-hand argument is a typevar, make it compatible with variance - // this is for more precise pattern matching - // todo: work this in the spec of this method - // also: think what happens if there are embedded typevars? - if (tparam.variance < 0) arg1 <:< arg2 else arg2 <:< arg1 - else true - } + // if left-hand argument is a typevar, make it compatible with variance + // this is for more precise pattern matching + // todo: work this in the spec of this method + // also: think what happens if there are embedded typevars? + else arg1 match { + case _: TypeVar => if (tparam.variance < 0) arg1 <:< arg2 else arg2 <:< arg1 + case _ => true + } + ) case (et: ExistentialType, _) => et.withTypeVars(isConsistent(_, tp2)) case (_, et: ExistentialType) => et.withTypeVars(isConsistent(tp1, _)) } - def check(tp1: Type, tp2: Type) = + def check(tp1: Type, tp2: Type) = ( if (tp1.typeSymbol.isClass && tp1.typeSymbol.hasFlag(FINAL)) tp1 <:< tp2 || isNumericValueClass(tp1.typeSymbol) && isNumericValueClass(tp2.typeSymbol) else tp1.baseClasses forall (bc => tp2.baseTypeIndex(bc) < 0 || isConsistent(tp1.baseType(bc), tp2.baseType(bc))) + ) - check(tp1, tp2)/* && check(tp2, tp1)*/ // need to investgate why this can't be made symmetric -- neg/gadts1 fails, and run/existials also. + check(tp1, tp2) && check(tp2, tp1) } /** Does a pattern of type `patType` need an outer test when executed against @@ -5302,13 +5292,15 @@ trait Types extends api.Types { self: SymbolTable => try { val before = undoLog.log var result = false - - try result = { - isSameType1(tp1, tp2) - } finally if (!result) undoLog.undoTo(before) + try { + result = isSameType1(tp1, tp2) + } + finally if (!result) undoLog.undoTo(before) result - } finally undoLog.unlock() - } finally { + } + finally undoLog.unlock() + } + finally { subsametypeRecursions -= 1 // XXX AM TODO: figure out when it is safe and needed to clear the log -- the commented approach below is too eager (it breaks #3281, #3866) // it doesn't help to keep separate recursion counts for the three methods that now share it @@ -5590,12 +5582,12 @@ trait Types extends api.Types { self: SymbolTable => } tp1 match { case tv @ TypeVar(_,_) => - return tv.registerTypeEquality(tp2, true) + return tv.registerTypeEquality(tp2, typeVarLHS = true) case _ => } tp2 match { case tv @ TypeVar(_,_) => - return tv.registerTypeEquality(tp1, false) + return tv.registerTypeEquality(tp1, typeVarLHS = false) case _ => } tp1 match { @@ -6870,15 +6862,22 @@ trait Types extends api.Types { self: SymbolTable => } tvs.reverse } - /** Make each type var in this type use its original type for comparisons instead - * of collecting constraints. - */ - def suspendTypeVarsInType(tp: Type): List[TypeVar] = { - val tvs = typeVarsInType(tp) - // !!! Is it somehow guaranteed that this will not break under nesting? - // In general one has to save and restore the contents of the field... + + // If this type contains type variables, put them to sleep for a while. + // Don't just wipe them out by replacing them by the corresponding type + // parameter, as that messes up (e.g.) type variables in type refinements. + // Without this, the matchesType call would lead to type variables on both + // sides of a subtyping/equality judgement, which can lead to recursive types + // being constructed. See pos/t0851 for a situation where this happens. + def suspendingTypeVarsInType[T](tp: Type)(op: => T): T = + suspendingTypeVars(typeVarsInType(tp))(op) + + @inline final def suspendingTypeVars[T](tvs: List[TypeVar])(op: => T): T = { + val saved = tvs map (_.suspended) tvs foreach (_.suspended = true) - tvs + + try op + finally foreach2(tvs, saved)(_.suspended = _) } /** Compute lub (if `variance == 1`) or glb (if `variance == -1`) of given list diff --git a/test/files/neg/gadts1.check b/test/files/neg/gadts1.check index 44d2b114d6..a61231a27a 100644 --- a/test/files/neg/gadts1.check +++ b/test/files/neg/gadts1.check @@ -1,8 +1,3 @@ -gadts1.scala:15: error: type mismatch; - found : Test.Double - required: a - case NumTerm(n) => c.x = Double(1.0) - ^ gadts1.scala:20: error: class Cell of type Test.Cell does not take type parameters. case Cell[a](x: Int) => c.x = 5 ^ @@ -11,4 +6,4 @@ gadts1.scala:20: error: type mismatch; required: a case Cell[a](x: Int) => c.x = 5 ^ -three errors found +two errors found diff --git a/test/files/neg/patmat-type-check.check b/test/files/neg/patmat-type-check.check index 721217c314..fedac3b746 100644 --- a/test/files/neg/patmat-type-check.check +++ b/test/files/neg/patmat-type-check.check @@ -1,12 +1,27 @@ patmat-type-check.scala:11: warning: fruitless type test: a value of type Test.Bop4[T] cannot also be a Seq[A] def s3[T](x: Bop4[T]) = x match { case Seq('b', 'o', 'b') => true } ^ +patmat-type-check.scala:11: error: pattern type is incompatible with expected type; + found : Seq[A] + required: Test.Bop4[T] + def s3[T](x: Bop4[T]) = x match { case Seq('b', 'o', 'b') => true } + ^ patmat-type-check.scala:15: warning: fruitless type test: a value of type Test.Bop5[_$1,T1,T2] cannot also be a Seq[A] def s4[T1, T2](x: Bop5[_, T1, T2]) = x match { case Seq('b', 'o', 'b') => true } ^ +patmat-type-check.scala:15: error: pattern type is incompatible with expected type; + found : Seq[A] + required: Test.Bop5[_$1,T1,T2] where type _$1 + def s4[T1, T2](x: Bop5[_, T1, T2]) = x match { case Seq('b', 'o', 'b') => true } + ^ patmat-type-check.scala:19: warning: fruitless type test: a value of type Test.Bop3[T] cannot also be a Seq[A] def f4[T](x: Bop3[T]) = x match { case Seq('b', 'o', 'b') => true } ^ +patmat-type-check.scala:19: error: pattern type is incompatible with expected type; + found : Seq[A] + required: Test.Bop3[T] + def f4[T](x: Bop3[T]) = x match { case Seq('b', 'o', 'b') => true } + ^ patmat-type-check.scala:22: error: scrutinee is incompatible with pattern type; found : Seq[A] required: String @@ -28,4 +43,4 @@ patmat-type-check.scala:30: error: scrutinee is incompatible with pattern type; def f4[T](x: Bop3[Char]) = x match { case Seq('b', 'o', 'b') => true } // fail ^ three warnings found -four errors found +7 errors found diff --git a/test/files/neg/t3015.check b/test/files/neg/t3015.check index 6948392bb0..4a03c940f4 100644 --- a/test/files/neg/t3015.check +++ b/test/files/neg/t3015.check @@ -3,4 +3,7 @@ t3015.scala:7: error: scrutinee is incompatible with pattern type; required: String val b(foo) = "foo" ^ -one error found +error: type mismatch; + found : _$1 + required: String +two errors found diff --git a/test/files/neg/unchecked-impossible.check b/test/files/neg/unchecked-impossible.check index 75fc390fa8..d150a5a853 100644 --- a/test/files/neg/unchecked-impossible.check +++ b/test/files/neg/unchecked-impossible.check @@ -1,6 +1,10 @@ unchecked-impossible.scala:5: warning: fruitless type test: a value of type T2[Int,Int] cannot also be a Seq[A] case Seq(x) => ^ -error: No warnings can be incurred under -Xfatal-warnings. +unchecked-impossible.scala:5: error: pattern type is incompatible with expected type; + found : Seq[A] + required: T2[Int,Int] + case Seq(x) => + ^ one warning found one error found -- cgit v1.2.3 From bc6815b2b8e87041fb10a53653af68bcb91672fc Mon Sep 17 00:00:00 2001 From: Eugene Vigdorchik Date: Mon, 1 Oct 2012 21:52:35 +0400 Subject: More retrofit of scaladoc model factory. --- .../scala/tools/nsc/doc/model/ModelFactory.scala | 37 ++++++++-------------- 1 file changed, 14 insertions(+), 23 deletions(-) (limited to 'src') diff --git a/src/compiler/scala/tools/nsc/doc/model/ModelFactory.scala b/src/compiler/scala/tools/nsc/doc/model/ModelFactory.scala index 9f3e7db7d0..1013204816 100644 --- a/src/compiler/scala/tools/nsc/doc/model/ModelFactory.scala +++ b/src/compiler/scala/tools/nsc/doc/model/ModelFactory.scala @@ -191,7 +191,7 @@ class ModelFactory(val global: Global, val settings: doc.Settings) { case NullaryMethodType(res) => resultTpe(res) case _ => tpe } - val tpe = if (!isImplicitlyInherited) sym.tpe else byConversion.get.toType memberInfo sym + val tpe = byConversion.fold(sym.tpe) (_.toType memberInfo sym) makeTypeInTemplateContext(resultTpe(tpe), inTemplate, sym) } def isDef = false @@ -429,12 +429,12 @@ class ModelFactory(val global: Global, val settings: doc.Settings) { conversions flatMap (conv => if (!implicitExcluded(conv.conversionQualifiedName)) conv.targetTypeComponents map { - case pair@(template, tpe) => + case (template, tpe) => template match { case d: DocTemplateImpl if (d != this) => d.registerImplicitlyConvertibleClass(this, conv) case _ => // nothing } - (pair._1, pair._2, conv) + (template, tpe, conv) } else List() ) @@ -483,23 +483,14 @@ class ModelFactory(val global: Global, val settings: doc.Settings) { def inheritanceDiagram = makeInheritanceDiagram(this) def contentDiagram = makeContentDiagram(this) - def groupSearch[T](extractor: Comment => T, default: T): T = { - // query this template - for (c <- comment) { - val entity = extractor(c) - if (entity != default) return entity - } - // query linearization - for (tpl <- linearizationTemplates.collect{ case dtpl: DocTemplateImpl if dtpl!=this => dtpl}) { - val entity = tpl.groupSearch(extractor, default) - if (entity != default) return entity - } - default + def groupSearch[T](extractor: Comment => Option[T]): Option[T] = { + val comments = comment +: linearizationTemplates.collect { case dtpl: DocTemplateImpl => dtpl.comment } + comments.flatten.map(extractor).flatten.headOption } - def groupDescription(group: String): Option[Body] = groupSearch(_.groupDesc.get(group), None) - def groupPriority(group: String): Int = groupSearch(_.groupPrio.get(group) match { case Some(prio) => prio; case _ => 0 }, 0) - def groupName(group: String): String = groupSearch(_.groupNames.get(group) match { case Some(name) => name; case _ => group }, group) + def groupDescription(group: String): Option[Body] = groupSearch(_.groupDesc.get(group)) + def groupPriority(group: String): Int = groupSearch(_.groupPrio.get(group)) getOrElse 0 + def groupName(group: String): String = groupSearch(_.groupNames.get(group)) getOrElse group } abstract class PackageImpl(sym: Symbol, inTpl: PackageImpl) extends DocTemplateImpl(sym, inTpl) with Package { @@ -554,7 +545,7 @@ class ModelFactory(val global: Global, val settings: doc.Settings) { useCaseOf: Option[MemberEntity], inTpl: DocTemplateImpl) extends NonTemplateMemberImpl(sym, conversion, useCaseOf, inTpl) { def valueParams = { - val info = if (!isImplicitlyInherited) sym.info else conversion.get.toType memberInfo sym + val info = conversion.fold(sym.info)(_.toType memberInfo sym) info.paramss map { ps => (ps.zipWithIndex) map { case (p, i) => if (p.nameString contains "$") makeValueParam(p, inTpl, optimize("arg" + i)) else makeValueParam(p, inTpl) }} @@ -967,10 +958,10 @@ class ModelFactory(val global: Global, val settings: doc.Settings) { val ignoreParents = Set[Symbol](AnyClass, AnyRefClass, ObjectClass) val filtParents = // we don't want to expose too many links to AnyRef, that will just be redundant information - if (tpl.isDefined && { val sym = tpl.get.sym; (!sym.isModule && parents.length < 2) || (sym == AnyValClass) || (sym == AnyRefClass) || (sym == AnyClass) }) - parents - else - parents.filterNot((p: Type) => ignoreParents(p.typeSymbol)) + tpl match { + case Some(tpl) if (!tpl.sym.isModule && parents.length < 2) || (tpl.sym == AnyValClass) || (tpl.sym == AnyRefClass) || (tpl.sym == AnyClass) => parents + case _ => parents.filterNot((p: Type) => ignoreParents(p.typeSymbol)) + } /** Returns: * - a DocTemplate if the type's symbol is documented -- cgit v1.2.3 From 516fe526f40e900e13319298f472db3e63525204 Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Tue, 2 Oct 2012 07:53:07 -0700 Subject: Removed obsolete migration test. Arrays are not Seqs: a fact known by all and sundry. --- .../scala/tools/nsc/transform/ExplicitOuter.scala | 13 ----------- test/files/neg/array-not-seq.check | 15 ------------- test/files/neg/array-not-seq.flags | 1 - test/files/neg/array-not-seq.scala | 26 ---------------------- 4 files changed, 55 deletions(-) delete mode 100644 test/files/neg/array-not-seq.check delete mode 100644 test/files/neg/array-not-seq.flags delete mode 100644 test/files/neg/array-not-seq.scala (limited to 'src') diff --git a/src/compiler/scala/tools/nsc/transform/ExplicitOuter.scala b/src/compiler/scala/tools/nsc/transform/ExplicitOuter.scala index c5494b5b1f..3f7fff3954 100644 --- a/src/compiler/scala/tools/nsc/transform/ExplicitOuter.scala +++ b/src/compiler/scala/tools/nsc/transform/ExplicitOuter.scala @@ -83,12 +83,6 @@ abstract class ExplicitOuter extends InfoTransform } } - /** Issue a migration warning for instance checks which might be on an Array and - * for which the type parameter conforms to Seq, because these answers changed in 2.8. - */ - def isArraySeqTest(lhs: Type, rhs: Type) = - (ArrayClass.tpe <:< lhs.widen) && (rhs.widen matchesPattern SeqClass.tpe) - def outerAccessor(clazz: Symbol): Symbol = { val firstTry = clazz.info.decl(nme.expandedName(nme.OUTER, clazz)) if (firstTry != NoSymbol && firstTry.outerSource == clazz) firstTry @@ -550,13 +544,6 @@ abstract class ExplicitOuter extends InfoTransform } case _ => - if (settings.Xmigration28.value) tree match { - case TypeApply(fn @ Select(qual, _), args) if fn.symbol == Object_isInstanceOf || fn.symbol == Any_isInstanceOf => - if (isArraySeqTest(qual.tpe, args.head.tpe)) - unit.warning(tree.pos, "An Array will no longer match as Seq[_].") - case _ => () - } - val x = super.transform(tree) if (x.tpe eq null) x else x setType transformInfo(currentOwner, x.tpe) diff --git a/test/files/neg/array-not-seq.check b/test/files/neg/array-not-seq.check deleted file mode 100644 index 6cfaa06efb..0000000000 --- a/test/files/neg/array-not-seq.check +++ /dev/null @@ -1,15 +0,0 @@ -array-not-seq.scala:2: warning: An Array will no longer match as Seq[_]. - def f1(x: Any) = x.isInstanceOf[Seq[_]] - ^ -array-not-seq.scala:4: warning: An Array will no longer match as Seq[_]. - case _: Seq[_] => true - ^ -array-not-seq.scala:16: warning: An Array will no longer match as Seq[_]. - case (Some(_: Seq[_]), Nil, _) => 1 - ^ -array-not-seq.scala:17: warning: An Array will no longer match as Seq[_]. - case (None, List(_: List[_], _), _) => 2 - ^ -error: No warnings can be incurred under -Xfatal-warnings. -four warnings found -one error found diff --git a/test/files/neg/array-not-seq.flags b/test/files/neg/array-not-seq.flags deleted file mode 100644 index 4e9f7e4a56..0000000000 --- a/test/files/neg/array-not-seq.flags +++ /dev/null @@ -1 +0,0 @@ --Xmigration -Xfatal-warnings \ No newline at end of file diff --git a/test/files/neg/array-not-seq.scala b/test/files/neg/array-not-seq.scala deleted file mode 100644 index 5f367bdd85..0000000000 --- a/test/files/neg/array-not-seq.scala +++ /dev/null @@ -1,26 +0,0 @@ -object Test { - def f1(x: Any) = x.isInstanceOf[Seq[_]] - def f2(x: Any) = x match { - case _: Seq[_] => true - case _ => false - } - - def f3(x: Any) = x match { - case _: Array[_] => true - case _ => false - } - - def f4(x: Any) = x.isInstanceOf[Traversable[_]] - - def f5(x1: Any, x2: Any, x3: AnyRef) = (x1, x2, x3) match { - case (Some(_: Seq[_]), Nil, _) => 1 - case (None, List(_: List[_], _), _) => 2 - case _ => 3 - } - - def main(args: Array[String]): Unit = { - // println(f1(Array(1))) - // println(f2(Array(1))) - // println(f3(Array(1)) - } -} -- cgit v1.2.3 From 68292195ee3f4c810fdedb78058a05ded983e422 Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Tue, 2 Oct 2012 07:59:17 -0700 Subject: Defanged dummy type arguments. Spending a few hours on yet another tpe vs. tpeHK bug drove me to finally go after the dragon in his cave. I created a separate method for when you want to receive an invalid type which will cause mysterious crashes later if it is out of your sight for five seconds. def tpeHK : Type // unapplied type params stay unapplied def tpe_* : Type // unapplied type params applied as dummy args def tpe : Type // if there are unapplied type params, ABORT Actually it doesn't really abort, but I had it print a stack trace during development so I could track down every site which blindly calls tpe and see why they were doing it. Now it only admonishes you under -Ydebug. This way is a big step forward: you have to make a choice, which is good, because if you choose not to decide you still have made a choice, only mostly what you have chosen is bugs. --- src/reflect/scala/reflect/internal/Symbols.scala | 52 +++++++++++++++++------- 1 file changed, 38 insertions(+), 14 deletions(-) (limited to 'src') diff --git a/src/reflect/scala/reflect/internal/Symbols.scala b/src/reflect/scala/reflect/internal/Symbols.scala index 3adcd86c73..db2cb8d225 100644 --- a/src/reflect/scala/reflect/internal/Symbols.scala +++ b/src/reflect/scala/reflect/internal/Symbols.scala @@ -1186,17 +1186,34 @@ trait Symbols extends api.Symbols { self: SymbolTable => } } - /** Get type. The type of a symbol is: - * for a type symbol, the type corresponding to the symbol itself, - * @M you should use tpeHK for a type symbol with type parameters if - * the kind of the type need not be *, as tpe introduces dummy arguments - * to generate a type of kind * - * for a term symbol, its usual type. + /** Get type. The type of a term symbol is its usual type. + * For a type symbol with no type parameters, it is the type corresponding to the symbol + * itself. For a type symbol with type parameters, you + * must determine whether you want tpeHK or tpe_*. If you + * call .tpe on such a symbol, you may trigger an assertion. * See the tpe/tpeHK overrides in TypeSymbol for more. */ def tpe: Type = info + + /** The type of this symbol, with any unapplied type parameters + * remaining unapplied (which means the type constructor.) + * It may be of any kind. + */ def tpeHK: Type = tpe + /** The type of this symbol, guaranteed to be of kind *. + * If there are unapplied type parameters, they will be + * substituted with dummy type arguments derived from the + * type parameters. Such types are not valid in a general + * sense and will cause difficult-to-find bugs if allowed + * to roam free. + * + * You must call tpe_* explicitly to obtain these types, + * at which point you are responsible for them as if it they + * were your own minor children. + */ + def tpe_* : Type = tpe + /** Get type info associated with symbol at current phase, after * ensuring that symbol is initialized (i.e. type is completed). */ @@ -2688,20 +2705,27 @@ trait Symbols extends api.Symbols { self: SymbolTable => * tsym.tpe = TypeRef(NoPrefix, T, List()) * }}} */ - override def tpe: Type = { + override def tpe: Type = tpeOfKind(kindStar = false) + override def tpe_* : Type = tpeOfKind(kindStar = true) + + private def tpeOfKind(kindStar: Boolean): Type = { if (tpeCache eq NoType) throw CyclicReference(this, typeConstructor) if (tpePeriod != currentPeriod) { if (isValid(tpePeriod)) { tpePeriod = currentPeriod - } else { + } + else { if (isInitialized) tpePeriod = currentPeriod tpeCache = NoType - val targs = - if (phase.erasedTypes && this != ArrayClass) List() - else unsafeTypeParams map (_.typeConstructor) - //@M! use typeConstructor to generate dummy type arguments, - // sym.tpe should not be called on a symbol that's supposed to be a higher-kinded type - // memberType should be used instead, that's why it uses tpeHK and not tpe + val targs = ( + if (phase.erasedTypes && this != ArrayClass) Nil + else unsafeTypeParams map (_.typeConstructor) match { + case dummies if dummies.nonEmpty && settings.debug.value && !kindStar => + printCaller(s"""Call to ${this.tpe} with unapplied ${dummies mkString ", "}: call tpe_* or tpeHK""")(dummies) + case dummies => + dummies + } + ) tpeCache = newTypeRef(targs) } } -- cgit v1.2.3 From 68a3a1d73f7b82203edc9eb6314a9923010c2b7e Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Tue, 2 Oct 2012 08:01:41 -0700 Subject: Cleanup in old Typers code. Using established facilities for doing established things. --- .../scala/tools/nsc/typechecker/Typers.scala | 59 ++++++++-------------- 1 file changed, 22 insertions(+), 37 deletions(-) (limited to 'src') diff --git a/src/compiler/scala/tools/nsc/typechecker/Typers.scala b/src/compiler/scala/tools/nsc/typechecker/Typers.scala index dc6beaf5c6..f9a7435095 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Typers.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Typers.scala @@ -2625,16 +2625,13 @@ trait Typers extends Modes with Adaptations with Tags { if (context.retyping) context.scope enter vparam.symbol vparam.symbol } - val vparams = fun.vparams mapConserve (typedValDef) - // for (vparam <- vparams) { - // checkNoEscaping.locals(context.scope, WildcardType, vparam.tpt); () - // } + val vparams = fun.vparams mapConserve typedValDef val formals = vparamSyms map (_.tpe) - val body1 = typed(fun.body, respt) - val restpe = packedType(body1, fun.symbol).deconst.resultType - val funtpe = typeRef(clazz.tpe.prefix, clazz, formals :+ restpe) - // body = checkNoEscaping.locals(context.scope, restpe, body) - treeCopy.Function(fun, vparams, body1).setType(funtpe) + val body1 = typed(fun.body, respt) + val restpe = packedType(body1, fun.symbol).deconst.resultType + val funtpe = appliedType(clazz, formals :+ restpe: _*) + + treeCopy.Function(fun, vparams, body1) setType funtpe } } } @@ -3480,35 +3477,23 @@ trait Typers extends Modes with Adaptations with Tags { // local dummy fixes SI-5544 val localTyper = newTyper(context.make(ann, context.owner.newLocalDummy(ann.pos))) localTyper.typed(ann, mode, annClass.tpe) - } else { - // Since a selfsym is supplied, the annotation should have - // an extra "self" identifier in scope for type checking. - // This is implemented by wrapping the rhs - // in a function like "self => rhs" during type checking, - // and then stripping the "self =>" and substituting - // in the supplied selfsym. + } + else { + // Since a selfsym is supplied, the annotation should have an extra + // "self" identifier in scope for type checking. This is implemented + // by wrapping the rhs in a function like "self => rhs" during type + // checking, and then stripping the "self =>" and substituting in + // the supplied selfsym. val funcparm = ValDef(NoMods, nme.self, TypeTree(selfsym.info), EmptyTree) - val func = Function(List(funcparm), ann.duplicate) - // The .duplicate of annot.constr - // deals with problems that - // accur if this annotation is - // later typed again, which - // the compiler sometimes does. - // The problem is that "self" - // ident's within annot.constr - // will retain the old symbol - // from the previous typing. - val fun1clazz = FunctionClass(1) - val funcType = typeRef(fun1clazz.tpe.prefix, - fun1clazz, - List(selfsym.info, annClass.tpe)) - - (typed(func, mode, funcType): @unchecked) match { - case t @ Function(List(arg), rhs) => - val subs = - new TreeSymSubstituter(List(arg.symbol),List(selfsym)) - subs(rhs) - } + // The .duplicate of annot.constr deals with problems that accur + // if this annotation is later typed again, which the compiler + // sometimes does. The problem is that "self" ident's within + // annot.constr will retain the old symbol from the previous typing. + val func = Function(funcparm :: Nil, ann.duplicate) + val funcType = appliedType(FunctionClass(1), selfsym.info, annClass.tpe) + val Function(arg :: Nil, rhs) = typed(func, mode, funcType) + + rhs.substituteSymbols(arg.symbol :: Nil, selfsym :: Nil) } def annInfo(t: Tree): AnnotationInfo = t match { -- cgit v1.2.3 From 609b3b8b740447b212f0ee04c1e52d8c72b8dfc8 Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Tue, 2 Oct 2012 08:03:17 -0700 Subject: Hardening in appliedType and Subst*Map. All driven by the knowledge gained from logging all the calls to tpe and appliedType and seeing what was coming through there. Some needless type creation is avoided in the maps, and appliedType will squawk to those who listen when it sees something out of order. --- src/reflect/scala/reflect/internal/Types.scala | 61 +++++++++++++++----------- 1 file changed, 35 insertions(+), 26 deletions(-) (limited to 'src') diff --git a/src/reflect/scala/reflect/internal/Types.scala b/src/reflect/scala/reflect/internal/Types.scala index 876ae53da0..e80cf6fde6 100644 --- a/src/reflect/scala/reflect/internal/Types.scala +++ b/src/reflect/scala/reflect/internal/Types.scala @@ -3603,16 +3603,16 @@ trait Types extends api.Types { self: SymbolTable => if (args.isEmpty) tycon //@M! `if (args.isEmpty) tycon' is crucial (otherwise we create new types in phases after typer and then they don't get adapted (??)) else tycon match { case TypeRef(pre, sym @ (NothingClass|AnyClass), _) => copyTypeRef(tycon, pre, sym, Nil) //@M drop type args to Any/Nothing - case TypeRef(pre, sym, _) => copyTypeRef(tycon, pre, sym, args) + case TypeRef(pre, sym, Nil) => copyTypeRef(tycon, pre, sym, args) + case TypeRef(pre, sym, bogons) => debugwarn(s"Dropping $bogons from $tycon in appliedType.") ; copyTypeRef(tycon, pre, sym, args) case PolyType(tparams, restpe) => restpe.instantiateTypeParams(tparams, args) case ExistentialType(tparams, restpe) => newExistentialType(tparams, appliedType(restpe, args)) case st: SingletonType => appliedType(st.widen, args) // @M TODO: what to do? see bug1 - case RefinedType(parents, decls) => RefinedType(parents map (appliedType(_, args)), decls) // MO to AM: please check - case TypeBounds(lo, hi) => TypeBounds(appliedType(lo, args), appliedType(hi, args)) + case RefinedType(parents, decls) => RefinedType(parents map (appliedType(_, args)), decls) // @PP: Can this be right? + case TypeBounds(lo, hi) => TypeBounds(appliedType(lo, args), appliedType(hi, args)) // @PP: Can this be right? case tv@TypeVar(_, _) => tv.applyArgs(args) case AnnotatedType(annots, underlying, self) => AnnotatedType(annots, appliedType(underlying, args), self) - case ErrorType => tycon - case WildcardType => tycon // needed for neg/t0226 + case ErrorType | WildcardType => tycon case _ => abort(debugString(tycon)) } @@ -4513,16 +4513,18 @@ trait Types extends api.Types { self: SymbolTable => tp } - def apply(tp0: Type): Type = if (from.isEmpty) tp0 else { - @tailrec def subst(tp: Type, sym: Symbol, from: List[Symbol], to: List[T]): Type = - if (from.isEmpty) tp - // else if (to.isEmpty) error("Unexpected substitution on '%s': from = %s but to == Nil".format(tp, from)) - else if (matches(from.head, sym)) toType(tp, to.head) - else subst(tp, sym, from.tail, to.tail) + @tailrec private def subst(tp: Type, sym: Symbol, from: List[Symbol], to: List[T]): Type = ( + if (from.isEmpty) tp + // else if (to.isEmpty) error("Unexpected substitution on '%s': from = %s but to == Nil".format(tp, from)) + else if (matches(from.head, sym)) toType(tp, to.head) + else subst(tp, sym, from.tail, to.tail) + ) - val boundSyms = tp0.boundSyms - val tp1 = if (boundSyms.nonEmpty && (boundSyms exists from.contains)) renameBoundSyms(tp0) else tp0 - val tp = mapOver(tp1) + def apply(tp0: Type): Type = if (from.isEmpty) tp0 else { + val boundSyms = tp0.boundSyms + val tp1 = if (boundSyms.nonEmpty && (boundSyms exists from.contains)) renameBoundSyms(tp0) else tp0 + val tp = mapOver(tp1) + def substFor(sym: Symbol) = subst(tp, sym, from, to) tp match { // @M @@ -4537,9 +4539,11 @@ trait Types extends api.Types { self: SymbolTable => // (must not recurse --> loops) // 3) replacing m by List in m[Int] should yield List[Int], not just List case TypeRef(NoPrefix, sym, args) => - appliedType(subst(tp, sym, from, to), args) // if args.isEmpty, appliedType is the identity + val tcon = substFor(sym) + if ((tp eq tcon) || args.isEmpty) tcon + else appliedType(tcon.typeConstructor, args) case SingleType(NoPrefix, sym) => - subst(tp, sym, from, to) + substFor(sym) case _ => tp } @@ -4554,25 +4558,29 @@ trait Types extends api.Types { self: SymbolTable => case TypeRef(pre, _, args) => copyTypeRef(fromtp, pre, sym, args) case SingleType(pre, _) => singleType(pre, sym) } - override def apply(tp: Type): Type = if (from.isEmpty) tp else { - @tailrec def subst(sym: Symbol, from: List[Symbol], to: List[Symbol]): Symbol = - if (from.isEmpty) sym - // else if (to.isEmpty) error("Unexpected substitution on '%s': from = %s but to == Nil".format(sym, from)) - else if (matches(from.head, sym)) to.head - else subst(sym, from.tail, to.tail) - tp match { + @tailrec private def subst(sym: Symbol, from: List[Symbol], to: List[Symbol]): Symbol = ( + if (from.isEmpty) sym + // else if (to.isEmpty) error("Unexpected substitution on '%s': from = %s but to == Nil".format(sym, from)) + else if (matches(from.head, sym)) to.head + else subst(sym, from.tail, to.tail) + ) + private def substFor(sym: Symbol) = subst(sym, from, to) + + override def apply(tp: Type): Type = ( + if (from.isEmpty) tp + else tp match { case TypeRef(pre, sym, args) if pre ne NoPrefix => - val newSym = subst(sym, from, to) + val newSym = substFor(sym) // mapOver takes care of subst'ing in args mapOver ( if (sym eq newSym) tp else copyTypeRef(tp, pre, newSym, args) ) // assert(newSym.typeParams.length == sym.typeParams.length, "typars mismatch in SubstSymMap: "+(sym, sym.typeParams, newSym, newSym.typeParams)) case SingleType(pre, sym) if pre ne NoPrefix => - val newSym = subst(sym, from, to) + val newSym = substFor(sym) mapOver( if (sym eq newSym) tp else singleType(pre, newSym) ) case _ => super.apply(tp) } - } + ) override def mapOver(tree: Tree, giveup: ()=>Nothing): Tree = { object trans extends TypeMapTransformer { @@ -5955,6 +5963,7 @@ trait Types extends api.Types { self: SymbolTable => * than member `sym2` of `tp2`? */ private def specializesSym(tp1: Type, sym1: Symbol, tp2: Type, sym2: Symbol, depth: Int): Boolean = { + require((sym1 ne NoSymbol) && (sym2 ne NoSymbol), ((tp1, sym1, tp2, sym2, depth))) val info1 = tp1.memberInfo(sym1) val info2 = tp2.memberInfo(sym2).substThis(tp2.typeSymbol, tp1) //System.out.println("specializes "+tp1+"."+sym1+":"+info1+sym1.locationString+" AND "+tp2+"."+sym2+":"+info2)//DEBUG -- cgit v1.2.3 From b1307ff156c4b3ba736659ac3b5aee50bb38844f Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Tue, 2 Oct 2012 08:05:28 -0700 Subject: All the actual changes of tpe to tpe_* or tpeHK. These are the call sites which formerly could be seen to call .tpe on a symbol with unapplied type parameters. Now each such call site makes an explicit choice about what is intended for the result type. --- src/compiler/scala/tools/nsc/backend/icode/TypeKinds.scala | 2 +- src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala | 4 ++-- .../tools/nsc/doc/model/ModelFactoryImplicitSupport.scala | 4 ++-- src/compiler/scala/tools/nsc/interpreter/IMain.scala | 2 +- .../scala/tools/nsc/symtab/classfile/ClassfileParser.scala | 10 +++++----- src/compiler/scala/tools/nsc/symtab/classfile/Pickler.scala | 2 +- src/compiler/scala/tools/nsc/transform/ExplicitOuter.scala | 2 +- src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala | 4 ++-- src/compiler/scala/tools/nsc/transform/UnCurry.scala | 2 +- src/compiler/scala/tools/nsc/typechecker/Implicits.scala | 2 +- src/compiler/scala/tools/nsc/typechecker/Namers.scala | 4 ++-- src/compiler/scala/tools/nsc/typechecker/Typers.scala | 12 ++++++------ .../scala/tools/selectivecps/CPSAnnotationChecker.scala | 4 ++-- .../plugin/scala/tools/selectivecps/CPSUtils.scala | 2 +- .../scala/tools/selectivecps/SelectiveANFTransform.scala | 8 +++----- .../scala/tools/selectivecps/SelectiveCPSTransform.scala | 6 +++--- src/reflect/scala/reflect/internal/BaseTypeSeqs.scala | 2 +- src/reflect/scala/reflect/internal/Definitions.scala | 9 +++++---- src/reflect/scala/reflect/internal/Symbols.scala | 4 ++-- src/reflect/scala/reflect/internal/TreeGen.scala | 2 +- src/reflect/scala/reflect/internal/Types.scala | 7 ++++--- src/reflect/scala/reflect/internal/transform/Erasure.scala | 2 +- src/reflect/scala/reflect/runtime/JavaMirrors.scala | 2 +- 23 files changed, 49 insertions(+), 49 deletions(-) (limited to 'src') diff --git a/src/compiler/scala/tools/nsc/backend/icode/TypeKinds.scala b/src/compiler/scala/tools/nsc/backend/icode/TypeKinds.scala index 8387d3d6e5..dc948a3d08 100644 --- a/src/compiler/scala/tools/nsc/backend/icode/TypeKinds.scala +++ b/src/compiler/scala/tools/nsc/backend/icode/TypeKinds.scala @@ -55,7 +55,7 @@ trait TypeKinds { self: ICodes => def toType: Type = reversePrimitiveMap get this map (_.tpe) getOrElse { this match { - case REFERENCE(cls) => cls.tpe + case REFERENCE(cls) => cls.tpe_* case ARRAY(elem) => arrayType(elem.toType) case _ => abort("Unknown type kind.") } diff --git a/src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala b/src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala index 85ad5a6884..d2ba8546b2 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala @@ -83,7 +83,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters { // Before erasure so we can identify generic mains. enteringErasure { val companion = sym.linkedClassOfClass - val companionMain = companion.tpe.member(nme.main) + val companionMain = companion.tpe_*.member(nme.main) if (hasJavaMainMethod(companion)) failNoForwarder("companion contains its own main method") @@ -2890,7 +2890,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters { (kind: @unchecked) match { case FLOAT => emit(Opcodes.FCMPG) case DOUBLE => emit(Opcodes.DCMPL) // TODO bug? why not DCMPG? http://docs.oracle.com/javase/specs/jvms/se5.0/html/Instructions2.doc3.html - + } } genCompare diff --git a/src/compiler/scala/tools/nsc/doc/model/ModelFactoryImplicitSupport.scala b/src/compiler/scala/tools/nsc/doc/model/ModelFactoryImplicitSupport.scala index 4089e96f51..af89978be1 100644 --- a/src/compiler/scala/tools/nsc/doc/model/ModelFactoryImplicitSupport.scala +++ b/src/compiler/scala/tools/nsc/doc/model/ModelFactoryImplicitSupport.scala @@ -98,7 +98,7 @@ trait ModelFactoryImplicitSupport { else { var context: global.analyzer.Context = global.analyzer.rootContext(NoCompilationUnit) - val results = global.analyzer.allViewsFrom(sym.tpe, context, sym.typeParams) + val results = global.analyzer.allViewsFrom(sym.tpe_*, context, sym.typeParams) var conversions = results.flatMap(result => makeImplicitConversion(sym, result._1, result._2, context, inTpl)) // also keep empty conversions, so they appear in diagrams // conversions = conversions.filter(!_.members.isEmpty) @@ -109,7 +109,7 @@ trait ModelFactoryImplicitSupport { hardcoded.arraySkipConversions.contains(conv.conversionQualifiedName)) // Filter out non-sensical conversions from value types - if (isPrimitiveValueType(sym.tpe)) + if (isPrimitiveValueType(sym.tpe_*)) conversions = conversions.filter((ic: ImplicitConversionImpl) => hardcoded.valueClassFilter(sym.nameString, ic.conversionQualifiedName)) diff --git a/src/compiler/scala/tools/nsc/interpreter/IMain.scala b/src/compiler/scala/tools/nsc/interpreter/IMain.scala index 9a22c15a12..6d51dc1a39 100644 --- a/src/compiler/scala/tools/nsc/interpreter/IMain.scala +++ b/src/compiler/scala/tools/nsc/interpreter/IMain.scala @@ -1064,7 +1064,7 @@ class IMain(initialSettings: Settings, protected val out: JPrintWriter) extends } def cleanMemberDecl(owner: Symbol, member: Name): Type = exitingTyper { normalizeNonPublic { - owner.info.nonPrivateDecl(member).tpe match { + owner.info.nonPrivateDecl(member).tpe_* match { case NullaryMethodType(tp) => tp case tp => tp } diff --git a/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala b/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala index e4a17f3f41..7f1a6c7a03 100644 --- a/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala +++ b/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala @@ -358,7 +358,7 @@ abstract class ClassfileParser { } value match { case ct: Constant => ct - case cls: Symbol => Constant(cls.tpe) + case cls: Symbol => Constant(cls.tpe_*) case arr: Type => Constant(arr) } } @@ -512,9 +512,9 @@ abstract class ClassfileParser { } else raiseLoaderLevel { val superType = if (isAnnotation) { in.nextChar; definitions.AnnotationClass.tpe } - else pool.getSuperClass(in.nextChar).tpe + else pool.getSuperClass(in.nextChar).tpe_* val ifaceCount = in.nextChar - var ifaces = for (i <- List.range(0, ifaceCount)) yield pool.getSuperClass(in.nextChar).tpe + var ifaces = for (i <- List.range(0, ifaceCount)) yield pool.getSuperClass(in.nextChar).tpe_* if (isAnnotation) ifaces = definitions.ClassfileAnnotationClass.tpe :: ifaces superType :: ifaces } @@ -751,7 +751,7 @@ abstract class ClassfileParser { val classSym = classNameToSymbol(subName(c => c == ';' || c == '<')) assert(!classSym.isOverloaded, classSym.alternatives) - var tpe = processClassType(processInner(classSym.tpe)) + var tpe = processClassType(processInner(classSym.tpe_*)) while (sig.charAt(index) == '.') { accept('.') val name = subName(c => c == ';' || c == '<' || c == '.').toTypeName @@ -784,7 +784,7 @@ abstract class ClassfileParser { index += 1 val restype = if (sym != null && sym.isClassConstructor) { accept('V') - clazz.tpe + clazz.tpe_* } else sig2type(tparams, skiptvs) JavaMethodType(sym.newSyntheticValueParams(paramtypes.toList), restype) diff --git a/src/compiler/scala/tools/nsc/symtab/classfile/Pickler.scala b/src/compiler/scala/tools/nsc/symtab/classfile/Pickler.scala index 5dbd22f23b..3aaf38aab8 100644 --- a/src/compiler/scala/tools/nsc/symtab/classfile/Pickler.scala +++ b/src/compiler/scala/tools/nsc/symtab/classfile/Pickler.scala @@ -568,7 +568,7 @@ abstract class Pickler extends SubComponent { tag case sym: ClassSymbol => writeSymInfo(sym) - if (sym.thisSym.tpe != sym.tpe) writeRef(sym.typeOfThis) + if (sym.thisSym.tpe_* != sym.tpe_*) writeRef(sym.typeOfThis) CLASSsym case sym: TypeSymbol => writeSymInfo(sym) diff --git a/src/compiler/scala/tools/nsc/transform/ExplicitOuter.scala b/src/compiler/scala/tools/nsc/transform/ExplicitOuter.scala index 3f7fff3954..4a0d25fd09 100644 --- a/src/compiler/scala/tools/nsc/transform/ExplicitOuter.scala +++ b/src/compiler/scala/tools/nsc/transform/ExplicitOuter.scala @@ -91,7 +91,7 @@ abstract class ExplicitOuter extends InfoTransform def newOuterAccessor(clazz: Symbol) = { val accFlags = SYNTHETIC | ARTIFACT | METHOD | STABLE | ( if (clazz.isTrait) DEFERRED else 0 ) val sym = clazz.newMethod(nme.OUTER, clazz.pos, accFlags) - val restpe = if (clazz.isTrait) clazz.outerClass.tpe else clazz.outerClass.thisType + val restpe = if (clazz.isTrait) clazz.outerClass.tpe_* else clazz.outerClass.thisType sym expandName clazz sym.referenced = clazz diff --git a/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala b/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala index 998cf17716..88c6f8d823 100644 --- a/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala +++ b/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala @@ -588,7 +588,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers { res } - var parents = List(applyContext(enteringTyper(clazz.tpe))) + var parents = List(applyContext(enteringTyper(clazz.tpe_*))) // log("!!! Parents: " + parents + ", sym: " + parents.map(_.typeSymbol)) if (parents.head.typeSymbol.isTrait) parents = parents.head.parents.head :: parents @@ -1150,7 +1150,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers { private def subst(env: TypeEnv)(decl: Symbol): Symbol = decl modifyInfo (info => - if (decl.isConstructor) MethodType(subst(env, info).params, decl.owner.tpe) + if (decl.isConstructor) MethodType(subst(env, info).params, decl.owner.tpe_*) else subst(env, info) ) diff --git a/src/compiler/scala/tools/nsc/transform/UnCurry.scala b/src/compiler/scala/tools/nsc/transform/UnCurry.scala index f68cbfc141..403045952e 100644 --- a/src/compiler/scala/tools/nsc/transform/UnCurry.scala +++ b/src/compiler/scala/tools/nsc/transform/UnCurry.scala @@ -134,7 +134,7 @@ abstract class UnCurry extends InfoTransform def isByNameRef(tree: Tree) = ( tree.isTerm && !byNameArgs(tree) - && tree.hasSymbolWhich(s => isByNameParamType(s.tpe)) + && tree.hasSymbolWhich(isByName) ) /** Uncurry a type of a tree node. diff --git a/src/compiler/scala/tools/nsc/typechecker/Implicits.scala b/src/compiler/scala/tools/nsc/typechecker/Implicits.scala index 8bf2768e0a..5f12da8354 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Implicits.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Implicits.scala @@ -997,7 +997,7 @@ trait Implicits { case Some(imap) => imap case None => val result = new InfoMap - getClassParts(sym.tpe)(result, new mutable.HashSet(), pending + sym) + getClassParts(sym.tpeHK)(result, new mutable.HashSet(), pending + sym) infoMapCache(sym) = result result } diff --git a/src/compiler/scala/tools/nsc/typechecker/Namers.scala b/src/compiler/scala/tools/nsc/typechecker/Namers.scala index f562a251e3..9e1a9d6d17 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Namers.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Namers.scala @@ -848,7 +848,7 @@ trait Namers extends MethodSynthesis { val sym = ( if (hasType || hasName) { - owner.typeOfThis = if (hasType) selfTypeCompleter(tpt) else owner.tpe + owner.typeOfThis = if (hasType) selfTypeCompleter(tpt) else owner.tpe_* val selfSym = owner.thisSym setPos self.pos if (hasName) selfSym setName name else selfSym } @@ -934,7 +934,7 @@ trait Namers extends MethodSynthesis { // DEPMETTODO: do we need to skolemize value parameter symbols? if (tpt.isEmpty && meth.name == nme.CONSTRUCTOR) { - tpt defineType context.enclClass.owner.tpe + tpt defineType context.enclClass.owner.tpe_* tpt setPos meth.pos.focus } var resultPt = if (tpt.isEmpty) WildcardType else typer.typedType(tpt).tpe diff --git a/src/compiler/scala/tools/nsc/typechecker/Typers.scala b/src/compiler/scala/tools/nsc/typechecker/Typers.scala index f9a7435095..0c21f739cc 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Typers.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Typers.scala @@ -327,7 +327,7 @@ trait Typers extends Modes with Adaptations with Tags { } def checkNonCyclic(sym: Symbol) { - if (!checkNonCyclic(sym.pos, sym.tpe)) sym.setInfo(ErrorType) + if (!checkNonCyclic(sym.pos, sym.tpe_*)) sym.setInfo(ErrorType) } def checkNonCyclic(defn: Tree, tpt: Tree) { @@ -1460,7 +1460,7 @@ trait Typers extends Modes with Adaptations with Tags { val supertparams = if (supertpt.hasSymbol) supertpt.symbol.typeParams else List() var supertpe = supertpt.tpe if (!supertparams.isEmpty) - supertpe = PolyType(supertparams, appliedType(supertpe, supertparams map (_.tpeHK))) + supertpe = PolyType(supertparams, appliedType(supertpe.typeConstructor, supertparams map (_.tpeHK))) // A method to replace a super reference by a New in a supercall def transformSuperCall(scall: Tree): Tree = (scall: @unchecked) match { @@ -1820,7 +1820,7 @@ trait Typers extends Modes with Adaptations with Tags { } } - treeCopy.Template(templ, parents1, self1, body1) setType clazz.tpe + treeCopy.Template(templ, parents1, self1, body1) setType clazz.tpe_* } /** Remove definition annotations from modifiers (they have been saved @@ -3490,7 +3490,7 @@ trait Typers extends Modes with Adaptations with Tags { // sometimes does. The problem is that "self" ident's within // annot.constr will retain the old symbol from the previous typing. val func = Function(funcparm :: Nil, ann.duplicate) - val funcType = appliedType(FunctionClass(1), selfsym.info, annClass.tpe) + val funcType = appliedType(FunctionClass(1), selfsym.info, annClass.tpe_*) val Function(arg :: Nil, rhs) = typed(func, mode, funcType) rhs.substituteSymbols(arg.symbol :: Nil, selfsym :: Nil) @@ -4016,7 +4016,7 @@ trait Typers extends Modes with Adaptations with Tags { if (name != tpnme.WILDCARD) namer.enterInScope(sym) else context.scope.enter(sym) - tree setSymbol sym setType sym.tpe + tree setSymbol sym setType sym.tpeHK case name: TermName => val sym = @@ -4215,7 +4215,7 @@ trait Typers extends Modes with Adaptations with Tags { NotAMemberError(tpt, TypeTree(tp), nme.CONSTRUCTOR) setError(tpt) } - else if (!( tp == sym.thisSym.tpe // when there's no explicit self type -- with (#3612) or without self variable + else if (!( tp == sym.thisSym.tpe_* // when there's no explicit self type -- with (#3612) or without self variable // sym.thisSym.tpe == tp.typeOfThis (except for objects) || narrowRhs(tp) <:< tp.typeOfThis || phase.erasedTypes diff --git a/src/continuations/plugin/scala/tools/selectivecps/CPSAnnotationChecker.scala b/src/continuations/plugin/scala/tools/selectivecps/CPSAnnotationChecker.scala index b373b3d0de..be3138c373 100644 --- a/src/continuations/plugin/scala/tools/selectivecps/CPSAnnotationChecker.scala +++ b/src/continuations/plugin/scala/tools/selectivecps/CPSAnnotationChecker.scala @@ -96,7 +96,7 @@ abstract class CPSAnnotationChecker extends CPSUtils with Modes { if (!cpsEnabled) return bounds val anyAtCPS = newCpsParamsMarker(NothingClass.tpe, AnyClass.tpe) - if (isFunctionType(tparams.head.owner.tpe) || isPartialFunctionType(tparams.head.owner.tpe)) { + if (isFunctionType(tparams.head.owner.tpe_*) || isPartialFunctionType(tparams.head.owner.tpe_*)) { vprintln("function bound: " + tparams.head.owner.tpe + "/"+bounds+"/"+targs) if (hasCpsParamTypes(targs.last)) bounds.reverse match { @@ -356,7 +356,7 @@ abstract class CPSAnnotationChecker extends CPSUtils with Modes { global.globalError("not a single cps annotation: " + xs) xs(0) } - + def emptyOrSingleList(xs: List[AnnotationInfo]) = if (xs.isEmpty) Nil else List(single(xs)) def transChildrenInOrder(tree: Tree, tpe: Type, childTrees: List[Tree], byName: List[Tree]) = { diff --git a/src/continuations/plugin/scala/tools/selectivecps/CPSUtils.scala b/src/continuations/plugin/scala/tools/selectivecps/CPSUtils.scala index 46c644bcd6..eab442aaef 100644 --- a/src/continuations/plugin/scala/tools/selectivecps/CPSUtils.scala +++ b/src/continuations/plugin/scala/tools/selectivecps/CPSUtils.scala @@ -57,7 +57,7 @@ trait CPSUtils { protected def newMarker(sym: Symbol): AnnotationInfo = AnnotationInfo marker sym.tpe protected def newCpsParamsMarker(tp1: Type, tp2: Type) = - newMarker(appliedType(MarkerCPSTypes.tpe, List(tp1, tp2))) + newMarker(appliedType(MarkerCPSTypes, tp1, tp2)) // annotation checker diff --git a/src/continuations/plugin/scala/tools/selectivecps/SelectiveANFTransform.scala b/src/continuations/plugin/scala/tools/selectivecps/SelectiveANFTransform.scala index ba87cadfeb..7229ea41f4 100644 --- a/src/continuations/plugin/scala/tools/selectivecps/SelectiveANFTransform.scala +++ b/src/continuations/plugin/scala/tools/selectivecps/SelectiveANFTransform.scala @@ -172,7 +172,7 @@ abstract class SelectiveANFTransform extends PluginComponent with Transform with debuglog("transforming valdef " + vd.symbol) if (getExternalAnswerTypeAnn(tpt.tpe).isEmpty) { - + atOwner(vd.symbol) { val rhs1 = transExpr(rhs, None, None) @@ -468,7 +468,7 @@ abstract class SelectiveANFTransform extends PluginComponent with Transform with val sym: Symbol = ( currentOwner.newValue(newTermName(unit.fresh.newName("tmp")), tree.pos, Flags.SYNTHETIC) setInfo valueTpe - setAnnotations List(AnnotationInfo(MarkerCPSSym.tpe, Nil, Nil)) + setAnnotations List(AnnotationInfo(MarkerCPSSym.tpe_*, Nil, Nil)) ) expr.changeOwner(currentOwner -> sym) @@ -500,9 +500,7 @@ abstract class SelectiveANFTransform extends PluginComponent with Transform with // TODO: better yet: do without annotations on symbols val spcVal = getAnswerTypeAnn(anfRhs.tpe) - if (spcVal.isDefined) { - tree.symbol.setAnnotations(List(AnnotationInfo(MarkerCPSSym.tpe, Nil, Nil))) - } + spcVal foreach (_ => tree.symbol setAnnotations List(AnnotationInfo(MarkerCPSSym.tpe_*, Nil, Nil))) (stms:::List(treeCopy.ValDef(tree, mods, name, tpt, anfRhs)), linearize(spc, spcVal)(unit, tree.pos)) diff --git a/src/continuations/plugin/scala/tools/selectivecps/SelectiveCPSTransform.scala b/src/continuations/plugin/scala/tools/selectivecps/SelectiveCPSTransform.scala index 54a0079f40..eb23e1276c 100644 --- a/src/continuations/plugin/scala/tools/selectivecps/SelectiveCPSTransform.scala +++ b/src/continuations/plugin/scala/tools/selectivecps/SelectiveCPSTransform.scala @@ -56,7 +56,7 @@ abstract class SelectiveCPSTransform extends PluginComponent with case _ => getExternalAnswerTypeAnn(tp) match { case Some((res, outer)) => - appliedType(Context.tpe, List(removeAllCPSAnnotations(tp), res, outer)) + appliedType(Context.tpeHK, List(removeAllCPSAnnotations(tp), res, outer)) case _ => removeAllCPSAnnotations(tp) } @@ -107,7 +107,7 @@ abstract class SelectiveCPSTransform extends PluginComponent with TypeApply(funR, List(targs(0), targs(1))).setType(appliedType(funR.tpe, List(targs(0).tpe, targs(1).tpe))), args.map(transform(_)) - ).setType(appliedType(Context.tpe, List(targs(0).tpe,targs(1).tpe,targs(1).tpe))) + ).setType(appliedType(Context.tpeHK, List(targs(0).tpe,targs(1).tpe,targs(1).tpe))) } case Apply(TypeApply(fun, targs), args) @@ -192,7 +192,7 @@ abstract class SelectiveCPSTransform extends PluginComponent with val targettp = transformCPSType(tree.tpe) val pos = catches.head.pos - val funSym = currentOwner.newValueParameter(cpsNames.catches, pos).setInfo(appliedType(PartialFunctionClass.tpe, List(ThrowableClass.tpe, targettp))) + val funSym = currentOwner.newValueParameter(cpsNames.catches, pos).setInfo(appliedType(PartialFunctionClass, ThrowableClass.tpe, targettp)) val funDef = localTyper.typed(atPos(pos) { ValDef(funSym, Match(EmptyTree, catches1)) }) diff --git a/src/reflect/scala/reflect/internal/BaseTypeSeqs.scala b/src/reflect/scala/reflect/internal/BaseTypeSeqs.scala index 539984c67f..86ea2c099b 100644 --- a/src/reflect/scala/reflect/internal/BaseTypeSeqs.scala +++ b/src/reflect/scala/reflect/internal/BaseTypeSeqs.scala @@ -158,7 +158,7 @@ trait BaseTypeSeqs { val parents = tp.parents // Console.println("computing baseTypeSeq of " + tsym.tpe + " " + parents)//DEBUG val buf = new mutable.ListBuffer[Type] - buf += tsym.tpe + buf += tsym.tpe_* var btsSize = 1 if (parents.nonEmpty) { val nparents = parents.length diff --git a/src/reflect/scala/reflect/internal/Definitions.scala b/src/reflect/scala/reflect/internal/Definitions.scala index abf11020fa..517df37c6b 100644 --- a/src/reflect/scala/reflect/internal/Definitions.scala +++ b/src/reflect/scala/reflect/internal/Definitions.scala @@ -223,7 +223,7 @@ trait Definitions extends api.StandardDefinitions { def fullyInitializeSymbol(sym: Symbol): Symbol = { sym.initialize fullyInitializeType(sym.info) - fullyInitializeType(sym.tpe) + fullyInitializeType(sym.tpe_*) sym } def fullyInitializeType(tp: Type): Type = { @@ -410,7 +410,8 @@ trait Definitions extends api.StandardDefinitions { def isScalaRepeatedParamType(tp: Type) = tp.typeSymbol == RepeatedParamClass def isJavaRepeatedParamType(tp: Type) = tp.typeSymbol == JavaRepeatedParamClass def isRepeatedParamType(tp: Type) = isScalaRepeatedParamType(tp) || isJavaRepeatedParamType(tp) - def isRepeated(param: Symbol) = isRepeatedParamType(param.tpe) + def isRepeated(param: Symbol) = isRepeatedParamType(param.tpe_*) + def isByName(param: Symbol) = isByNameParamType(param.tpe_*) def isCastSymbol(sym: Symbol) = sym == Any_asInstanceOf || sym == Object_asInstanceOf def isJavaVarArgsMethod(m: Symbol) = m.isMethod && isJavaVarArgs(m.info.params) @@ -549,7 +550,7 @@ trait Definitions extends api.StandardDefinitions { // The given symbol represents either String.+ or StringAdd.+ def isStringAddition(sym: Symbol) = sym == String_+ || sym == StringAdd_+ - def isArrowAssoc(sym: Symbol) = ArrowAssocClass.tpe.decls.toList contains sym + def isArrowAssoc(sym: Symbol) = sym.owner == ArrowAssocClass // The given symbol is a method with the right name and signature to be a runnable java program. def isJavaMainMethod(sym: Symbol) = (sym.name == nme.main) && (sym.info match { @@ -733,7 +734,7 @@ trait Definitions extends api.StandardDefinitions { * C[E1, ..., En] forSome { E1 >: LB1 <: UB1 ... en >: LBn <: UBn }. */ def classExistentialType(clazz: Symbol): Type = - newExistentialType(clazz.typeParams, clazz.tpe) + newExistentialType(clazz.typeParams, clazz.tpe_*) /** Given type U, creates a Type representing Class[_ <: U]. */ diff --git a/src/reflect/scala/reflect/internal/Symbols.scala b/src/reflect/scala/reflect/internal/Symbols.scala index db2cb8d225..765a29946d 100644 --- a/src/reflect/scala/reflect/internal/Symbols.scala +++ b/src/reflect/scala/reflect/internal/Symbols.scala @@ -642,7 +642,7 @@ trait Symbols extends api.Symbols { self: SymbolTable => final def isStaticModule = isModule && isStatic && !isMethod final def isThisSym = isTerm && owner.thisSym == this final def isError = hasFlag(IS_ERROR) - final def isErroneous = isError || isInitialized && tpe.isErroneous + final def isErroneous = isError || isInitialized && tpe_*.isErroneous def isHigherOrderTypeParameter = owner.isTypeParameterOrSkolem @@ -1739,7 +1739,7 @@ trait Symbols extends api.Symbols { self: SymbolTable => def thisSym: Symbol = this /** The type of `this` in a class, or else the type of the symbol itself. */ - def typeOfThis = thisSym.tpe + def typeOfThis = thisSym.tpe_* /** If symbol is a class, the type this.type in this class, * otherwise NoPrefix. diff --git a/src/reflect/scala/reflect/internal/TreeGen.scala b/src/reflect/scala/reflect/internal/TreeGen.scala index c1753fc5a1..6ce93d93b2 100644 --- a/src/reflect/scala/reflect/internal/TreeGen.scala +++ b/src/reflect/scala/reflect/internal/TreeGen.scala @@ -165,7 +165,7 @@ abstract class TreeGen extends macros.TreeBuilder { This(sym.name.toTypeName) setSymbol sym setType sym.thisType def mkAttributedIdent(sym: Symbol): Tree = - Ident(sym.name) setSymbol sym setType sym.tpe + Ident(sym.name) setSymbol sym setType sym.tpeHK def mkAttributedSelect(qual: Tree, sym: Symbol): Tree = { // Tests involving the repl fail without the .isEmptyPackage condition. diff --git a/src/reflect/scala/reflect/internal/Types.scala b/src/reflect/scala/reflect/internal/Types.scala index e80cf6fde6..aa3c99eb1d 100644 --- a/src/reflect/scala/reflect/internal/Types.scala +++ b/src/reflect/scala/reflect/internal/Types.scala @@ -735,6 +735,7 @@ trait Types extends api.Types { self: SymbolTable => * }}} */ def memberInfo(sym: Symbol): Type = { + require(sym ne NoSymbol, this) sym.info.asSeenFrom(this, sym.owner) } @@ -1403,7 +1404,7 @@ trait Types extends api.Types { self: SymbolTable => object ThisType extends ThisTypeExtractor { def apply(sym: Symbol): Type = - if (phase.erasedTypes) sym.tpe + if (phase.erasedTypes) sym.tpe_* else unique(new UniqueThisType(sym)) } @@ -1633,7 +1634,7 @@ trait Types extends api.Types { self: SymbolTable => val paramToVarMap = varToParamMap map (_.swap) val varToParam = new TypeMap { def apply(tp: Type) = varToParamMap get tp match { - case Some(sym) => sym.tpe + case Some(sym) => sym.tpe_* case _ => mapOver(tp) } } @@ -1652,7 +1653,7 @@ trait Types extends api.Types { self: SymbolTable => tpe.baseTypeSeqCache = undetBaseTypeSeq tpe.baseTypeSeqCache = if (tpe.typeSymbol.isRefinementClass) - tpe.memo(compoundBaseTypeSeq(tpe))(_.baseTypeSeq updateHead tpe.typeSymbol.tpe) + tpe.memo(compoundBaseTypeSeq(tpe))(_.baseTypeSeq updateHead tpe.typeSymbol.tpe_*) else compoundBaseTypeSeq(tpe) } finally { diff --git a/src/reflect/scala/reflect/internal/transform/Erasure.scala b/src/reflect/scala/reflect/internal/transform/Erasure.scala index 977398909f..fd3934b3d6 100644 --- a/src/reflect/scala/reflect/internal/transform/Erasure.scala +++ b/src/reflect/scala/reflect/internal/transform/Erasure.scala @@ -69,7 +69,7 @@ trait Erasure { // // This requires that cls.isClass. protected def rebindInnerClass(pre: Type, cls: Symbol): Type = { - if (cls.owner.isClass) cls.owner.tpe else pre // why not cls.isNestedClass? + if (cls.owner.isClass) cls.owner.tpe_* else pre // why not cls.isNestedClass? } def unboxDerivedValueClassMethod(clazz: Symbol): Symbol = diff --git a/src/reflect/scala/reflect/runtime/JavaMirrors.scala b/src/reflect/scala/reflect/runtime/JavaMirrors.scala index b6b2537dc4..76fa5cd002 100644 --- a/src/reflect/scala/reflect/runtime/JavaMirrors.scala +++ b/src/reflect/scala/reflect/runtime/JavaMirrors.scala @@ -1116,7 +1116,7 @@ trait JavaMirrors extends internal.SymbolTable with api.JavaUniverse { thisUnive constructorCache enter (jconstr, constr) val tparams = jconstr.getTypeParameters.toList map createTypeParameter val paramtpes = jconstr.getGenericParameterTypes.toList map typeToScala - setMethType(constr, tparams, paramtpes, clazz.tpe) + setMethType(constr, tparams, paramtpes, clazz.tpe_*) constr setInfo GenPolyType(tparams, MethodType(clazz.newSyntheticValueParams(paramtpes), clazz.tpe)) copyAnnotations(constr, jconstr) constr -- cgit v1.2.3 From 1b4251288c1305d6cd5174d21785fddb26f2053d Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Tue, 2 Oct 2012 09:32:55 -0700 Subject: Overhauled documentation and structure of tpe/tpeHK/etc. Trying to make the code structure as hotspot friendly as I can, which is no mean feat. --- src/reflect/scala/reflect/internal/Symbols.scala | 179 +++++++++++---------- .../reflect/runtime/SynchronizedSymbols.scala | 3 +- 2 files changed, 99 insertions(+), 83 deletions(-) (limited to 'src') diff --git a/src/reflect/scala/reflect/internal/Symbols.scala b/src/reflect/scala/reflect/internal/Symbols.scala index 765a29946d..a58cc99f86 100644 --- a/src/reflect/scala/reflect/internal/Symbols.scala +++ b/src/reflect/scala/reflect/internal/Symbols.scala @@ -1186,20 +1186,35 @@ trait Symbols extends api.Symbols { self: SymbolTable => } } - /** Get type. The type of a term symbol is its usual type. - * For a type symbol with no type parameters, it is the type corresponding to the symbol - * itself. For a type symbol with type parameters, you - * must determine whether you want tpeHK or tpe_*. If you - * call .tpe on such a symbol, you may trigger an assertion. - * See the tpe/tpeHK overrides in TypeSymbol for more. - */ - def tpe: Type = info - - /** The type of this symbol, with any unapplied type parameters - * remaining unapplied (which means the type constructor.) - * It may be of any kind. - */ - def tpeHK: Type = tpe + /** The "type" of this symbol. The type of a term symbol is its usual + * type. A TypeSymbol is more complicated; see that class for elaboration. + * Since tpe forwards to tpe_*, if you call it on a type symbol with unapplied + * type parameters, the type returned will contain dummies types. These will + * hide legitimate errors or create spurious ones if used as normal types. + */ + final def tpe: Type = tpe_* + + /** typeConstructor throws an exception when called on term + * symbols; this is a more forgiving alternative. Calls + * typeConstructor on TypeSymbols, returns info otherwise. + */ + def tpeHK: Type = info + + /** Only applicable to TypeSymbols, it is the type corresponding + * to the symbol itself. For instance, the type of a List might + * be List[Int] - the same symbol's typeConstructor is simply List. + * One might be tempted to write that as List[_], and in some + * contexts this is possible, but it is discouraged because it is + * syntactically indistinguishable from and easily confused with the + * type List[T] forSome { type T; }, which can also be written List[_]. + */ + def typeConstructor: Type = ( + // Avoiding a third override in NoSymbol to preserve bimorphism + if (this eq NoSymbol) + abort("no-symbol does not have a type constructor (this may indicate scalac cannot find fundamental classes)") + else + abort("typeConstructor inapplicable for " + this) + ) /** The type of this symbol, guaranteed to be of kind *. * If there are unapplied type parameters, they will be @@ -1208,11 +1223,20 @@ trait Symbols extends api.Symbols { self: SymbolTable => * sense and will cause difficult-to-find bugs if allowed * to roam free. * - * You must call tpe_* explicitly to obtain these types, - * at which point you are responsible for them as if it they - * were your own minor children. + * If you call tpe_* explicitly to obtain these types, + * you are responsible for them as if it they were your own + * minor children. */ - def tpe_* : Type = tpe + def tpe_* : Type = info + + // Alternate implementation of def tpe for warning about misuse, + // disabled to keep the method maximally hotspot-friendly: + // def tpe: Type = { + // val result = tpe_* + // if (settings.debug.value && result.typeArgs.nonEmpty) + // printCaller(s"""Call to ${this.tpe} created $result: call tpe_* or tpeHK""")("") + // result + // } /** Get type info associated with symbol at current phase, after * ensuring that symbol is initialized (i.e. type is completed). @@ -1449,14 +1473,6 @@ trait Symbols extends api.Symbols { self: SymbolTable => alternatives withFilter (_.isJavaDefined) foreach (_ modifyInfo rawToExistential) } - /** The type constructor of a symbol is: - * For a type symbol, the type corresponding to the symbol itself, - * excluding parameters. - * Not applicable for term symbols. - */ - def typeConstructor: Type = - abort("typeConstructor inapplicable for " + this) - /** The logic approximately boils down to finding the most recent phase * which immediately follows any of parser, namer, typer, or erasure. * In effect that means this will return one of: @@ -2624,6 +2640,20 @@ trait Symbols extends api.Symbols { self: SymbolTable => owner.newNonClassSymbol(name, pos, newFlags) } + /** Let's say you have a type definition + * + * {{{ + * type T <: Number + * }}} + * + * and tsym is the symbol corresponding to T. Then + * + * {{{ + * tsym is an instance of AbstractTypeSymbol + * tsym.info = TypeBounds(Nothing, Number) + * tsym.tpe = TypeRef(NoPrefix, T, List()) + * }}} + */ class AbstractTypeSymbol protected[Symbols] (initOwner: Symbol, initPos: Position, initName: TypeName) extends TypeSymbol(initOwner, initPos, initName) { type TypeOfClonedSymbol = TypeSymbol @@ -2692,70 +2722,57 @@ trait Symbols extends api.Symbols { self: SymbolTable => private def newPrefix = if (this hasFlag EXISTENTIAL | PARAM) NoPrefix else owner.thisType private def newTypeRef(targs: List[Type]) = typeRef(newPrefix, this, targs) - /** Let's say you have a type definition - * - * {{{ - * type T <: Number - * }}} + /** A polymorphic type symbol has two distinct "types": * - * and tsym is the symbol corresponding to T. Then + * tpe_* a TypeRef with: dummy type args, no unapplied type parameters, and kind * + * tpeHK a TypeRef with: no type args, unapplied type parameters, and + * kind (*,*,...,*) => * depending on the number of tparams. * - * {{{ - * tsym.info = TypeBounds(Nothing, Number) - * tsym.tpe = TypeRef(NoPrefix, T, List()) - * }}} - */ - override def tpe: Type = tpeOfKind(kindStar = false) - override def tpe_* : Type = tpeOfKind(kindStar = true) - - private def tpeOfKind(kindStar: Boolean): Type = { - if (tpeCache eq NoType) throw CyclicReference(this, typeConstructor) - if (tpePeriod != currentPeriod) { - if (isValid(tpePeriod)) { - tpePeriod = currentPeriod - } - else { - if (isInitialized) tpePeriod = currentPeriod - tpeCache = NoType - val targs = ( - if (phase.erasedTypes && this != ArrayClass) Nil - else unsafeTypeParams map (_.typeConstructor) match { - case dummies if dummies.nonEmpty && settings.debug.value && !kindStar => - printCaller(s"""Call to ${this.tpe} with unapplied ${dummies mkString ", "}: call tpe_* or tpeHK""")(dummies) - case dummies => - dummies - } - ) - tpeCache = newTypeRef(targs) - } - } - assert(tpeCache ne null/*, "" + this + " " + phase*/)//debug + * The dummy type args in tpe_* are created by wrapping a TypeRef + * around the type parameter symbols. Types containing dummies will + * hide errors or introduce spurious ones if they are passed around + * as if normal types. They should only be used in local operations + * where they will either be discarded immediately after, or will + * undergo substitution in which the dummies are replaced by actual + * type arguments. + */ + override def tpe_* : Type = { + maybeUpdateTypeCache() tpeCache } - - /** @M -- tpe vs tpeHK: - * - * tpe: creates a TypeRef with dummy type arguments and kind * - * tpeHK: creates a TypeRef with no type arguments but with type parameters - * - * If typeParams is nonEmpty, calling tpe may hide errors or - * introduce spurious ones. (For example, when deriving a type from - * the symbol of a type argument that may be higher-kinded.) As far - * as I can tell, it only makes sense to call tpe in conjunction - * with a substitution that replaces the generated dummy type - * arguments by their actual types. - * - * TODO: the above conditions desperately need to be enforced by code. - */ - override def tpeHK = typeConstructor // @M! used in memberType - override def typeConstructor: Type = { + maybeUpdateTyconCache() + tyconCache + } + override def tpeHK: Type = typeConstructor + + private def maybeUpdateTyconCache() { if ((tyconCache eq null) || tyconRunId != currentRunId) { tyconCache = newTypeRef(Nil) tyconRunId = currentRunId } assert(tyconCache ne null) - tyconCache + } + private def maybeUpdateTypeCache() { + if (tpePeriod != currentPeriod) { + if (isValid(tpePeriod)) + tpePeriod = currentPeriod + else + updateTypeCache() // perform the actual update + } + } + private def updateTypeCache() { + if (tpeCache eq NoType) + throw CyclicReference(this, typeConstructor) + + if (isInitialized) + tpePeriod = currentPeriod + + tpeCache = NoType // cycle marker + tpeCache = newTypeRef( + if (phase.erasedTypes && this != ArrayClass || unsafeTypeParams.isEmpty) Nil + else unsafeTypeParams map (_.typeConstructor) + ) } override def info_=(tp: Type) { @@ -3190,8 +3207,6 @@ trait Symbols extends api.Symbols { self: SymbolTable => override def owner: Symbol = abort("no-symbol does not have an owner") - override def typeConstructor: Type = - abort("no-symbol does not have a type constructor (this may indicate scalac cannot find fundamental classes)") } protected def makeNoSymbol: NoSymbol = new NoSymbol diff --git a/src/reflect/scala/reflect/runtime/SynchronizedSymbols.scala b/src/reflect/scala/reflect/runtime/SynchronizedSymbols.scala index 3c2885a9f4..dc8717b28e 100644 --- a/src/reflect/scala/reflect/runtime/SynchronizedSymbols.scala +++ b/src/reflect/scala/reflect/runtime/SynchronizedSymbols.scala @@ -118,7 +118,8 @@ trait SynchronizedSymbols extends internal.Symbols { self: SymbolTable => override def name_=(x: Name) = synchronized { super.name_=(x) } override def rawname = synchronized { super.rawname } override def typeConstructor: Type = synchronized { super.typeConstructor } - override def tpe: Type = synchronized { super.tpe } + override def tpe_* : Type = synchronized { super.tpe_* } + override def tpeHK : Type = synchronized { super.tpeHK } } trait SynchronizedClassSymbol extends ClassSymbol with SynchronizedTypeSymbol { -- cgit v1.2.3 From 968f492aa1225f0a7786396a97749ef967ad898f Mon Sep 17 00:00:00 2001 From: Eugene Vigdorchik Date: Wed, 3 Oct 2012 18:44:24 +0400 Subject: Fix scaladoc links in a couple of places. --- src/library/scala/collection/Searching.scala | 4 ++-- src/library/scala/collection/generic/IsSeqLike.scala | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) (limited to 'src') diff --git a/src/library/scala/collection/Searching.scala b/src/library/scala/collection/Searching.scala index c1f7f4cae6..33e50365ee 100644 --- a/src/library/scala/collection/Searching.scala +++ b/src/library/scala/collection/Searching.scala @@ -39,7 +39,7 @@ object Searching { * The sequence should be sorted with the same `Ordering` before calling; otherwise, * the results are undefined. * - * @see [[scala.math.IndexedSeq]] + * @see [[scala.collection.IndexedSeq]] * @see [[scala.math.Ordering]] * @see [[scala.collection.SeqLike]], method `sorted` * @@ -63,7 +63,7 @@ object Searching { * The sequence should be sorted with the same `Ordering` before calling; otherwise, * the results are undefined. * - * @see [[scala.math.IndexedSeq]] + * @see [[scala.collection.IndexedSeq]] * @see [[scala.math.Ordering]] * @see [[scala.collection.SeqLike]], method `sorted` * diff --git a/src/library/scala/collection/generic/IsSeqLike.scala b/src/library/scala/collection/generic/IsSeqLike.scala index 8eac025ed6..9467510a2c 100644 --- a/src/library/scala/collection/generic/IsSeqLike.scala +++ b/src/library/scala/collection/generic/IsSeqLike.scala @@ -30,7 +30,7 @@ package generic * // == List(2, 4) * }}} * - * @see [[scala.collection.generic.Seq]] + * @see [[scala.collection.Seq]] * @see [[scala.collection.generic.IsTraversableLike]] */ trait IsSeqLike[Repr] { -- cgit v1.2.3 From dee6a347335e9a4b42342664aa50b0cb217c00a9 Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Wed, 3 Oct 2012 13:54:30 -0700 Subject: Renamed hasSymbol to hasSymbolField. Suggestion by retronym that the obvious implementation of "hasSymbol" be called "hasSymbol" reminded me we have a method called "hasSymbol" which does not have that implementation, and which has burned us already with subtle bugginess. I think that "hasSymbolField" is self-documenting. --- .../scala/reflect/reify/codegen/GenTrees.scala | 6 +++--- .../scala/reflect/reify/phases/Metalevels.scala | 2 +- .../scala/tools/nsc/ast/TreeBrowsers.scala | 2 +- src/compiler/scala/tools/nsc/ast/TreeGen.scala | 2 +- src/compiler/scala/tools/nsc/ast/Trees.scala | 2 +- .../scala/tools/nsc/symtab/classfile/Pickler.scala | 4 ++-- .../scala/tools/nsc/transform/AddInterfaces.scala | 2 +- src/compiler/scala/tools/nsc/transform/Mixin.scala | 4 ++-- .../scala/tools/nsc/typechecker/Duplicators.scala | 4 ++-- .../scala/tools/nsc/typechecker/Infer.scala | 2 +- .../tools/nsc/typechecker/PatternMatching.scala | 2 +- .../scala/tools/nsc/typechecker/Typers.scala | 22 ++++++++++---------- .../scala/tools/reflect/ToolBoxFactory.scala | 2 +- src/detach/plugin/scala/tools/detach/Detach.scala | 24 +++++++++++----------- src/reflect/scala/reflect/api/Trees.scala | 2 +- src/reflect/scala/reflect/internal/Importers.scala | 6 +++--- src/reflect/scala/reflect/internal/Printers.scala | 6 +++--- src/reflect/scala/reflect/internal/Trees.scala | 9 ++++---- 18 files changed, 52 insertions(+), 51 deletions(-) (limited to 'src') diff --git a/src/compiler/scala/reflect/reify/codegen/GenTrees.scala b/src/compiler/scala/reflect/reify/codegen/GenTrees.scala index bdcc7383b0..86ad23cd15 100644 --- a/src/compiler/scala/reflect/reify/codegen/GenTrees.scala +++ b/src/compiler/scala/reflect/reify/codegen/GenTrees.scala @@ -64,7 +64,7 @@ trait GenTrees { // usually we don't reify symbols/types, because they can be re-inferred during subsequent reflective compilation // however, reification of AnnotatedTypes is special. see ``reifyType'' to find out why. - if (reifyTreeSymbols && tree.hasSymbol) { + if (reifyTreeSymbols && tree.hasSymbolField) { if (reifyDebug) println("reifying symbol %s for tree %s".format(tree.symbol, tree)) rtree = mirrorBuildCall(nme.setSymbol, rtree, reify(tree.symbol)) } @@ -86,8 +86,8 @@ trait GenTrees { // see ``Metalevels'' for more info about metalevel breaches // and about how we deal with splices that contain them - val isMetalevelBreach = splicee exists (sub => sub.hasSymbol && sub.symbol != NoSymbol && sub.symbol.metalevel > 0) - val isRuntimeEval = splicee exists (sub => sub.hasSymbol && sub.symbol == ExprSplice) + val isMetalevelBreach = splicee exists (sub => sub.hasSymbolField && sub.symbol != NoSymbol && sub.symbol.metalevel > 0) + val isRuntimeEval = splicee exists (sub => sub.hasSymbolField && sub.symbol == ExprSplice) if (isMetalevelBreach || isRuntimeEval) { // we used to convert dynamic splices into runtime evals transparently, but we no longer do that // why? see comments in ``Metalevels'' diff --git a/src/compiler/scala/reflect/reify/phases/Metalevels.scala b/src/compiler/scala/reflect/reify/phases/Metalevels.scala index fbbd12a42f..4c6ebbb288 100644 --- a/src/compiler/scala/reflect/reify/phases/Metalevels.scala +++ b/src/compiler/scala/reflect/reify/phases/Metalevels.scala @@ -124,7 +124,7 @@ trait Metalevels { withinSplice { super.transform(TreeSplice(ReifiedTree(universe, mirror, symtab1, rtree, tpe, rtpe, concrete))) } case TreeSplice(splicee) => if (reifyDebug) println("entering splice: " + splicee) - val breaches = splicee filter (sub => sub.hasSymbol && sub.symbol != NoSymbol && sub.symbol.metalevel > 0) + val breaches = splicee filter (sub => sub.hasSymbolField && sub.symbol != NoSymbol && sub.symbol.metalevel > 0) if (!insideSplice && breaches.nonEmpty) { // we used to convert dynamic splices into runtime evals transparently, but we no longer do that // why? see comments above diff --git a/src/compiler/scala/tools/nsc/ast/TreeBrowsers.scala b/src/compiler/scala/tools/nsc/ast/TreeBrowsers.scala index 267a5dcefd..be7a6295b4 100644 --- a/src/compiler/scala/tools/nsc/ast/TreeBrowsers.scala +++ b/src/compiler/scala/tools/nsc/ast/TreeBrowsers.scala @@ -509,7 +509,7 @@ abstract class TreeBrowsers { /** Return a textual representation of this t's symbol */ def symbolText(t: Tree): String = { val prefix = - if (t.hasSymbol) "[has] " + if (t.hasSymbolField) "[has] " else if (t.isDef) "[defines] " else "" diff --git a/src/compiler/scala/tools/nsc/ast/TreeGen.scala b/src/compiler/scala/tools/nsc/ast/TreeGen.scala index 20da5f0087..a74b62bf8d 100644 --- a/src/compiler/scala/tools/nsc/ast/TreeGen.scala +++ b/src/compiler/scala/tools/nsc/ast/TreeGen.scala @@ -22,7 +22,7 @@ abstract class TreeGen extends scala.reflect.internal.TreeGen with TreeDSL { def mkCheckInit(tree: Tree): Tree = { val tpe = - if (tree.tpe != null || !tree.hasSymbol) tree.tpe + if (tree.tpe != null || !tree.hasSymbolField) tree.tpe else tree.symbol.tpe if (!global.phase.erasedTypes && settings.warnSelectNullable.value && diff --git a/src/compiler/scala/tools/nsc/ast/Trees.scala b/src/compiler/scala/tools/nsc/ast/Trees.scala index 3ccc595fb2..f6073cf185 100644 --- a/src/compiler/scala/tools/nsc/ast/Trees.scala +++ b/src/compiler/scala/tools/nsc/ast/Trees.scala @@ -341,7 +341,7 @@ trait Trees extends scala.reflect.internal.Trees { self: Global => tree case _ => val dupl = tree.duplicate - if (tree.hasSymbol && (!localOnly || (locals contains tree.symbol)) && !(keepLabels && tree.symbol.isLabel)) + if (tree.hasSymbolField && (!localOnly || (locals contains tree.symbol)) && !(keepLabels && tree.symbol.isLabel)) dupl.symbol = NoSymbol dupl.tpe = null dupl diff --git a/src/compiler/scala/tools/nsc/symtab/classfile/Pickler.scala b/src/compiler/scala/tools/nsc/symtab/classfile/Pickler.scala index 5dbd22f23b..182338a0a1 100644 --- a/src/compiler/scala/tools/nsc/symtab/classfile/Pickler.scala +++ b/src/compiler/scala/tools/nsc/symtab/classfile/Pickler.scala @@ -68,7 +68,7 @@ abstract class Pickler extends SubComponent { return } - if (!t.isDef && t.hasSymbol && t.symbol.isTermMacro) { + if (!t.isDef && t.hasSymbolField && t.symbol.isTermMacro) { unit.error(t.pos, t.symbol.typeParams.length match { case 0 => "macro has not been expanded" case 1 => "this type parameter must be specified" @@ -235,7 +235,7 @@ abstract class Pickler extends SubComponent { private def putTree(tree: Tree): Unit = if (putEntry(tree)) { if (tree != EmptyTree) putType(tree.tpe) - if (tree.hasSymbol) + if (tree.hasSymbolField) putSymbol(tree.symbol) tree match { diff --git a/src/compiler/scala/tools/nsc/transform/AddInterfaces.scala b/src/compiler/scala/tools/nsc/transform/AddInterfaces.scala index 3e0e40e525..32c2d63b2a 100644 --- a/src/compiler/scala/tools/nsc/transform/AddInterfaces.scala +++ b/src/compiler/scala/tools/nsc/transform/AddInterfaces.scala @@ -321,7 +321,7 @@ abstract class AddInterfaces extends InfoTransform { self: Erasure => Block(List(Apply(gen.mkSuperInitCall, Nil)), expr) case Block(stats, expr) => - // needs `hasSymbol` check because `supercall` could be a block (named / default args) + // needs `hasSymbolField` check because `supercall` could be a block (named / default args) val (presuper, supercall :: rest) = stats span (t => t.hasSymbolWhich(_ hasFlag PRESUPER)) treeCopy.Block(tree, presuper ::: (supercall :: mixinConstructorCalls ::: rest), expr) } diff --git a/src/compiler/scala/tools/nsc/transform/Mixin.scala b/src/compiler/scala/tools/nsc/transform/Mixin.scala index 27ceb66af4..b54f128961 100644 --- a/src/compiler/scala/tools/nsc/transform/Mixin.scala +++ b/src/compiler/scala/tools/nsc/transform/Mixin.scala @@ -438,7 +438,7 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL { tree match { case Assign(lhs, rhs) => traverse(rhs) // assignments don't count case _ => - if (tree.hasSymbol && tree.symbol != NoSymbol) { + if (tree.hasSymbolField && tree.symbol != NoSymbol) { val sym = tree.symbol if ((sym.hasAccessorFlag || (sym.isTerm && !sym.isMethod)) && sym.isPrivate @@ -804,7 +804,7 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL { */ class TreeSymSubstituterWithCopying(from: List[Symbol], to: List[Symbol]) extends TreeSymSubstituter(from, to) { override def transform(tree: Tree): Tree = - if (tree.hasSymbol && from.contains(tree.symbol)) + if (tree.hasSymbolField && from.contains(tree.symbol)) super.transform(tree.duplicate) else super.transform(tree.duplicate) diff --git a/src/compiler/scala/tools/nsc/typechecker/Duplicators.scala b/src/compiler/scala/tools/nsc/typechecker/Duplicators.scala index 2dd0836322..1bd2bbcd5c 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Duplicators.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Duplicators.scala @@ -233,7 +233,7 @@ abstract class Duplicators extends Analyzer { override def typed(tree: Tree, mode: Int, pt: Type): Tree = { debuglog("typing " + tree + ": " + tree.tpe + ", " + tree.getClass) val origtreesym = tree.symbol - if (tree.hasSymbol && tree.symbol != NoSymbol + if (tree.hasSymbolField && tree.symbol != NoSymbol && !tree.symbol.isLabel // labels cannot be retyped by the type checker as LabelDef has no ValDef/return type trees && invalidSyms.isDefinedAt(tree.symbol)) { debuglog("removed symbol " + tree.symbol) @@ -403,7 +403,7 @@ abstract class Duplicators extends Analyzer { case _ => debuglog("Duplicators default case: " + tree.summaryString) debuglog(" ---> " + tree) - if (tree.hasSymbol && tree.symbol != NoSymbol && (tree.symbol.owner == definitions.AnyClass)) { + if (tree.hasSymbolField && tree.symbol != NoSymbol && (tree.symbol.owner == definitions.AnyClass)) { tree.symbol = NoSymbol // maybe we can find a more specific member in a subclass of Any (see AnyVal members, like ==) } val ntree = castType(tree, pt) diff --git a/src/compiler/scala/tools/nsc/typechecker/Infer.scala b/src/compiler/scala/tools/nsc/typechecker/Infer.scala index d9f0c150ce..472cfed894 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Infer.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Infer.scala @@ -269,7 +269,7 @@ trait Infer extends Checkable { def errorValue = if (context.reportErrors) context.owner.newErrorValue(name) else stdErrorValue def errorSym = if (tree.isType) errorClass else errorValue - if (tree.hasSymbol) + if (tree.hasSymbolField) tree setSymbol errorSym tree setType ErrorType diff --git a/src/compiler/scala/tools/nsc/typechecker/PatternMatching.scala b/src/compiler/scala/tools/nsc/typechecker/PatternMatching.scala index 52880609e9..96e1ed9a1c 100644 --- a/src/compiler/scala/tools/nsc/typechecker/PatternMatching.scala +++ b/src/compiler/scala/tools/nsc/typechecker/PatternMatching.scala @@ -2547,7 +2547,7 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL } val toString = - if (p.hasSymbol && p.symbol.isStable) p.symbol.name.toString // tp.toString + if (p.hasSymbolField && p.symbol.isStable) p.symbol.name.toString // tp.toString else p.toString //+"#"+ id Const.unique(narrowTp, new ValueConst(narrowTp, checkableType(wideTp), toString)) // must make wide type checkable so that it is comparable to types from TypeConst diff --git a/src/compiler/scala/tools/nsc/typechecker/Typers.scala b/src/compiler/scala/tools/nsc/typechecker/Typers.scala index dc6beaf5c6..12e26a812d 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Typers.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Typers.scala @@ -53,7 +53,7 @@ trait Typers extends Modes with Adaptations with Tags { object UnTyper extends Traverser { override def traverse(tree: Tree) = { if (tree != EmptyTree) tree.tpe = null - if (tree.hasSymbol) tree.symbol = NoSymbol + if (tree.hasSymbolField) tree.symbol = NoSymbol super.traverse(tree) } } @@ -242,7 +242,7 @@ trait Typers extends Modes with Adaptations with Tags { * of its symbol was not volatile? */ protected def isStableExceptVolatile(tree: Tree) = { - tree.hasSymbol && tree.symbol != NoSymbol && tree.tpe.isVolatile && + tree.hasSymbolField && tree.symbol != NoSymbol && tree.tpe.isVolatile && { val savedTpe = tree.symbol.info val savedSTABLE = tree.symbol getFlag STABLE tree.symbol setInfo AnyRefClass.tpe @@ -915,7 +915,7 @@ trait Typers extends Modes with Adaptations with Tags { // but this needs additional investigation, because it crashes t5228, gadts1 and maybe something else // tree setType tree.tpe.normalize tree - } else if (tree.hasSymbol && !tree.symbol.typeParams.isEmpty && !inHKMode(mode) && + } else if (tree.hasSymbolField && !tree.symbol.typeParams.isEmpty && !inHKMode(mode) && !(tree.symbol.isJavaDefined && context.unit.isJava)) { // (7) // @M When not typing a higher-kinded type ((mode & HKmode) == 0) // or raw type (tree.symbol.isJavaDefined && context.unit.isJava), types must be of kind *, @@ -923,7 +923,7 @@ trait Typers extends Modes with Adaptations with Tags { // @M TODO: why do kind-* tree's have symbols, while higher-kinded ones don't? MissingTypeParametersError(tree) } else if ( // (7.1) @M: check kind-arity - // @M: removed check for tree.hasSymbol and replace tree.symbol by tree.tpe.symbol (TypeTree's must also be checked here, and they don't directly have a symbol) + // @M: removed check for tree.hasSymbolField and replace tree.symbol by tree.tpe.symbol (TypeTree's must also be checked here, and they don't directly have a symbol) (inHKMode(mode)) && // @M: don't check tree.tpe.symbol.typeParams. check tree.tpe.typeParams!!! // (e.g., m[Int] --> tree.tpe.symbol.typeParams.length == 1, tree.tpe.typeParams.length == 0!) @@ -1457,7 +1457,7 @@ trait Typers extends Modes with Adaptations with Tags { // Determine // - supertparams: Missing type parameters from supertype // - supertpe: Given supertype, polymorphic in supertparams - val supertparams = if (supertpt.hasSymbol) supertpt.symbol.typeParams else List() + val supertparams = if (supertpt.hasSymbolField) supertpt.symbol.typeParams else List() var supertpe = supertpt.tpe if (!supertparams.isEmpty) supertpe = PolyType(supertparams, appliedType(supertpe, supertparams map (_.tpeHK))) @@ -2896,7 +2896,7 @@ trait Typers extends Modes with Adaptations with Tags { def duplErrorTree(err: AbsTypeError) = { issue(err); duplErrTree } def preSelectOverloaded(fun: Tree): Tree = { - if (fun.hasSymbol && fun.symbol.isOverloaded) { + if (fun.hasSymbolField && fun.symbol.isOverloaded) { // remove alternatives with wrong number of parameters without looking at types. // less expensive than including them in inferMethodAlternatvie (see below). def shapeType(arg: Tree): Type = arg match { @@ -4200,7 +4200,7 @@ trait Typers extends Modes with Adaptations with Tags { val tpt1 = { val tpt0 = typedTypeConstructor(tpt) if (checkStablePrefixClassType(tpt0)) - if (tpt0.hasSymbol && !tpt0.symbol.typeParams.isEmpty) { + if (tpt0.hasSymbolField && !tpt0.symbol.typeParams.isEmpty) { context.undetparams = cloneSymbols(tpt0.symbol.typeParams) notifyUndetparamsAdded(context.undetparams) TypeTree().setOriginal(tpt0) @@ -4412,12 +4412,12 @@ trait Typers extends Modes with Adaptations with Tags { else doTypedApply(tree, fun2, args, mode, pt) /* - if (fun2.hasSymbol && fun2.symbol.isConstructor && (mode & EXPRmode) != 0) { + if (fun2.hasSymbolField && fun2.symbol.isConstructor && (mode & EXPRmode) != 0) { res.tpe = res.tpe.notNull } */ // TODO: In theory we should be able to call: - //if (fun2.hasSymbol && fun2.symbol.name == nme.apply && fun2.symbol.owner == ArrayClass) { + //if (fun2.hasSymbolField && fun2.symbol.name == nme.apply && fun2.symbol.owner == ArrayClass) { // But this causes cyclic reference for Array class in Cleanup. It is easy to overcome this // by calling ArrayClass.info here (or some other place before specialize). if (fun2.symbol == Array_apply && !res.isErrorTyped) { @@ -5023,7 +5023,7 @@ trait Typers extends Modes with Adaptations with Tags { val tpt1 = typed1(tpt, mode | FUNmode | TAPPmode, WildcardType) if (tpt1.isErrorTyped) { tpt1 - } else if (!tpt1.hasSymbol) { + } else if (!tpt1.hasSymbolField) { AppliedTypeNoParametersError(tree, tpt1.tpe) } else { val tparams = tpt1.symbol.typeParams @@ -5395,7 +5395,7 @@ trait Typers extends Modes with Adaptations with Tags { if (context.retyping && (tree.tpe ne null) && (tree.tpe.isErroneous || !(tree.tpe <:< pt))) { tree.tpe = null - if (tree.hasSymbol) tree.symbol = NoSymbol + if (tree.hasSymbolField) tree.symbol = NoSymbol } alreadyTyped = tree.tpe ne null diff --git a/src/compiler/scala/tools/reflect/ToolBoxFactory.scala b/src/compiler/scala/tools/reflect/ToolBoxFactory.scala index 95135b84e0..bc8ded62d8 100644 --- a/src/compiler/scala/tools/reflect/ToolBoxFactory.scala +++ b/src/compiler/scala/tools/reflect/ToolBoxFactory.scala @@ -102,7 +102,7 @@ abstract class ToolBoxFactory[U <: JavaUniverse](val u: U) { factorySelf => }) var expr = new Transformer { override def transform(tree: Tree): Tree = - if (tree.hasSymbol && tree.symbol.isFreeTerm) { + if (tree.hasSymbolField && tree.symbol.isFreeTerm) { tree match { case Ident(_) => val freeTermRef = Ident(freeTermNames(tree.symbol.asFreeTerm)) diff --git a/src/detach/plugin/scala/tools/detach/Detach.scala b/src/detach/plugin/scala/tools/detach/Detach.scala index 376a56beed..d56a7f0fbe 100644 --- a/src/detach/plugin/scala/tools/detach/Detach.scala +++ b/src/detach/plugin/scala/tools/detach/Detach.scala @@ -206,7 +206,7 @@ abstract class Detach extends PluginComponent symSet(capturedObjects, owner) += qsym case Select(qual, name) - if (qual.hasSymbol && + if (qual.hasSymbolField && (sym.owner != owner) && !(sym.ownerChain contains ScalaPackageClass) && !(sym.owner hasFlag JAVA)) => @@ -284,7 +284,7 @@ abstract class Detach extends PluginComponent def isOuter(sym: Symbol): Boolean = sym.isOuterAccessor || sym.name.endsWith(nme.OUTER/*, nme.OUTER.length*/) - if (tree.hasSymbol && isOuter(tree.symbol)) subst(from, to) + if (tree.hasSymbolField && isOuter(tree.symbol)) subst(from, to) super.traverse(tree) } } @@ -293,7 +293,7 @@ abstract class Detach extends PluginComponent private class TreeTypeRefSubstituter(clazz: Symbol) extends Traverser { override def traverse(tree: Tree) { val sym = tree.symbol - if (tree.hasSymbol && isRefClass(sym.tpe) && + if (tree.hasSymbolField && isRefClass(sym.tpe) && (sym.owner.enclClass == clazz) && (sym.isValueParameter || sym.hasFlag(PARAMACCESSOR))) { sym setInfo mkRemoteRefClass(sym.tpe) @@ -329,7 +329,7 @@ abstract class Detach extends PluginComponent } val map = new mutable.HashMap[Symbol, Symbol] override def traverse(tree: Tree) { - if (tree.hasSymbol && tree.symbol != NoSymbol) { + if (tree.hasSymbolField && tree.symbol != NoSymbol) { val sym = tree.symbol if (sym.owner == from) { val sym1 = map get sym match { @@ -369,7 +369,7 @@ abstract class Detach extends PluginComponent def removeAccessors(tree: Tree): Tree = tree match { case Apply(fun, _) => removeAccessors(fun) - case Select(qual, _) if tree.hasSymbol && tree.symbol.isOuterAccessor => + case Select(qual, _) if tree.hasSymbolField && tree.symbol.isOuterAccessor => removeAccessors(qual) case _ => tree @@ -382,7 +382,7 @@ abstract class Detach extends PluginComponent // transforms field assignment $outer.i$1.elem=.. // into setter $outer.i$1_=(..) case Assign(lhs @ Select(qual1 @ Select(qual, name), name1), rhs) - if qual1.hasSymbol && !qual1.symbol.isPrivateLocal && + if qual1.hasSymbolField && !qual1.symbol.isPrivateLocal && isRemoteRefClass(qual1.tpe) => if (DEBUG) println("\nTreeAccessorSubstituter: Assign1\n\tqual1="+qual1+", sel.tpe="+lhs.tpe+ @@ -398,7 +398,7 @@ abstract class Detach extends PluginComponent // transforms local assignment this.x$1.elem=.. // into setter method this.x$1_=(..) case Assign(lhs @ Select(qual, name), rhs) - if qual.hasSymbol && qual.symbol.isPrivateLocal && + if qual.hasSymbolField && qual.symbol.isPrivateLocal && isRemoteRefClass(qual.tpe) => if (DEBUG) println("\nTreeAccessorSubstituter: Assign2"+ @@ -412,7 +412,7 @@ abstract class Detach extends PluginComponent Apply(fun, List(super.transform(rhs))) setType lhs.tpe case Assign(Select(qual, name), rhs) - if qual.hasSymbol && (objs contains qual.symbol) => + if qual.hasSymbolField && (objs contains qual.symbol) => val sym = qual.symbol val proxy = proxySyms(objs indexOf sym) if (DEBUG) @@ -461,7 +461,7 @@ abstract class Detach extends PluginComponent // transforms field $outer.name$1 into getter method $outer.name$1() case Select(qual @ Select(_, name1), name) - if qual.hasSymbol && name1.endsWith(nme.OUTER/*, nme.OUTER.length*/) && + if qual.hasSymbolField && name1.endsWith(nme.OUTER/*, nme.OUTER.length*/) && !tree.symbol.isMethod => if (DEBUG) println("\nTreeAccessorSubstituter: Select0\n\tqual="+qual+ @@ -500,7 +500,7 @@ abstract class Detach extends PluginComponent // transforms field access $outer.i$1.elem // into invocation of getter method $outer.i$1() case Select(qual @ Select(qual1, name1), name) - if qual.hasSymbol && !qual.symbol.isPrivateLocal && + if qual.hasSymbolField && !qual.symbol.isPrivateLocal && isRemoteRefClass(qual.tpe) => if (DEBUG) println("\nTreeAccessorSubstituter: Select2\n\tqual="+qual+ @@ -513,7 +513,7 @@ abstract class Detach extends PluginComponent // transforms local access this.i$1.elem // into invocation of getter method this.i$1() case Select(qual, name) - if qual.hasSymbol && qual.symbol.isPrivateLocal && + if qual.hasSymbolField && qual.symbol.isPrivateLocal && isRemoteRefClass(qual.tpe) => if (DEBUG) println("\nTreeAccessorSubstituter: Select3\n\tqual="+qual+ @@ -523,7 +523,7 @@ abstract class Detach extends PluginComponent Apply(fun, List()) setType tree.tpe case Select(qual, name) - if qual.hasSymbol && (objs contains qual.symbol) => + if qual.hasSymbolField && (objs contains qual.symbol) => if (DEBUG) println("\nTreeAccessorSubstituter: Select4\n\tqual="+qual+ ", qual.tpe="+qual.tpe+", name="+name)//debug diff --git a/src/reflect/scala/reflect/api/Trees.scala b/src/reflect/scala/reflect/api/Trees.scala index 1f15ee6070..bbd5d00be3 100644 --- a/src/reflect/scala/reflect/api/Trees.scala +++ b/src/reflect/scala/reflect/api/Trees.scala @@ -100,7 +100,7 @@ trait Trees { self: Universe => def symbol: Symbol /** ... */ - def hasSymbol: Boolean + def hasSymbolField: Boolean /** Provides an alternate if tree is empty * @param alt The alternate tree diff --git a/src/reflect/scala/reflect/internal/Importers.scala b/src/reflect/scala/reflect/internal/Importers.scala index 43902c1930..ea8d6078ff 100644 --- a/src/reflect/scala/reflect/internal/Importers.scala +++ b/src/reflect/scala/reflect/internal/Importers.scala @@ -427,17 +427,17 @@ trait Importers extends api.Importers { self: SymbolTable => } addFixup({ if (mytree != null) { - val mysym = if (tree.hasSymbol) importSymbol(tree.symbol) else NoSymbol + val mysym = if (tree.hasSymbolField) importSymbol(tree.symbol) else NoSymbol val mytpe = importType(tree.tpe) mytree match { case mytt: TypeTree => val tt = tree.asInstanceOf[from.TypeTree] - if (mytree.hasSymbol) mytt.symbol = mysym + if (mytree.hasSymbolField) mytt.symbol = mysym if (tt.wasEmpty) mytt.defineType(mytpe) else mytt.setType(mytpe) if (tt.original != null) mytt.setOriginal(importTree(tt.original)) case _ => - if (mytree.hasSymbol) mytree.symbol = importSymbol(tree.symbol) + if (mytree.hasSymbolField) mytree.symbol = importSymbol(tree.symbol) mytree.tpe = importType(tree.tpe) } } diff --git a/src/reflect/scala/reflect/internal/Printers.scala b/src/reflect/scala/reflect/internal/Printers.scala index fb165ab50f..b4f03cfdd1 100644 --- a/src/reflect/scala/reflect/internal/Printers.scala +++ b/src/reflect/scala/reflect/internal/Printers.scala @@ -546,8 +546,8 @@ trait Printers extends api.Printers { self: SymbolTable => case _ => print(value.toString) } case tree: Tree => - val hasSymbol = tree.hasSymbol && tree.symbol != NoSymbol - val isError = hasSymbol && tree.symbol.name.toString == nme.ERROR.toString + val hasSymbolField = tree.hasSymbolField && tree.symbol != NoSymbol + val isError = hasSymbolField && tree.symbol.name.toString == nme.ERROR.toString printProduct( tree, preamble = _ => { @@ -560,7 +560,7 @@ trait Printers extends api.Printers { self: SymbolTable => if (isError) print("<") print(name) if (isError) print(": error>") - } else if (hasSymbol) { + } else if (hasSymbolField) { tree match { case _: Ident | _: Select | _: SelectFromTypeTree => print(tree.symbol) case _ => print(tree.symbol.name) diff --git a/src/reflect/scala/reflect/internal/Trees.scala b/src/reflect/scala/reflect/internal/Trees.scala index 0f133c2306..d74a78ebda 100644 --- a/src/reflect/scala/reflect/internal/Trees.scala +++ b/src/reflect/scala/reflect/internal/Trees.scala @@ -31,7 +31,8 @@ trait Trees extends api.Trees { self: SymbolTable => def symbol: Symbol = null //!!!OPT!!! symbol is about 3% of hot compile times -- megamorphic dispatch? def symbol_=(sym: Symbol) { throw new UnsupportedOperationException("symbol_= inapplicable for " + this) } def setSymbol(sym: Symbol): this.type = { symbol = sym; this } - def hasSymbol = false + def hasSymbolField = false + @deprecated("Use hasSymbolField", "2.11.0") def hasSymbol = hasSymbolField def isDef = false @@ -62,7 +63,7 @@ trait Trees extends api.Trees { self: SymbolTable => private[scala] def copyAttrs(tree: Tree): this.type = { rawatt = tree.rawatt tpe = tree.tpe - if (hasSymbol) symbol = tree.symbol + if (hasSymbolField) symbol = tree.symbol this } @@ -210,7 +211,7 @@ trait Trees extends api.Trees { self: SymbolTable => trait TypTree extends Tree with TypTreeApi abstract class SymTree extends Tree with SymTreeContextApi { - override def hasSymbol = true + override def hasSymbolField = true override var symbol: Symbol = NoSymbol } @@ -1412,7 +1413,7 @@ trait Trees extends api.Trees { self: SymbolTable => } if (tree.tpe ne null) tree.tpe = symSubst(tree.tpe) - if (tree.hasSymbol) { + if (tree.hasSymbolField) { subst(from, to) tree match { case Ident(name0) if tree.symbol != NoSymbol => -- cgit v1.2.3 From ca89fb9221b4b96fca9d72292dbe46b4bb0ce326 Mon Sep 17 00:00:00 2001 From: Eugene Vigdorchik Date: Thu, 4 Oct 2012 12:09:36 +0400 Subject: Fix broken links with names that need to be encoded when querying the owner. --- src/compiler/scala/tools/nsc/doc/model/MemberLookup.scala | 2 +- test/scaladoc/resources/links.scala | 4 +++- test/scaladoc/run/links.scala | 4 ++-- 3 files changed, 6 insertions(+), 4 deletions(-) (limited to 'src') diff --git a/src/compiler/scala/tools/nsc/doc/model/MemberLookup.scala b/src/compiler/scala/tools/nsc/doc/model/MemberLookup.scala index 7ab73cceff..7f260bb90a 100644 --- a/src/compiler/scala/tools/nsc/doc/model/MemberLookup.scala +++ b/src/compiler/scala/tools/nsc/doc/model/MemberLookup.scala @@ -173,7 +173,7 @@ trait MemberLookup { // and removing NoType classes def cleanupBogusClasses(syms: List[Symbol]) = { syms.filter(_.info != NoType) } - def syms(name: Name) = container.info.nonPrivateMember(name).alternatives + def syms(name: Name) = container.info.nonPrivateMember(name.encodedName).alternatives def termSyms = cleanupBogusClasses(syms(newTermName(name))) def typeSyms = cleanupBogusClasses(syms(newTypeName(name))) diff --git a/test/scaladoc/resources/links.scala b/test/scaladoc/resources/links.scala index 09a52a4334..12f03a10d8 100644 --- a/test/scaladoc/resources/links.scala +++ b/test/scaladoc/resources/links.scala @@ -22,6 +22,7 @@ package scala.test.scaladoc.links { object Target { type T = Int => Int type S = Int + type ::[X] = scala.collection.immutable.::[X] class C def foo(i: Int) = 2 def foo(z: String) = 3 @@ -43,6 +44,7 @@ package scala.test.scaladoc.links { * - [[[[Target!.foo[A[_[_]]]* trait Target -> def foo with 3 nested tparams]]]] (should exercise nested parens) * - [[Target$.T object Target -> type T]] * - [[Target$.S object Target -> type S]] + * - [[Target$.:: object Target -> type ::]] * - [[Target$.foo(z:Str* object Target -> def foo]] * - [[Target$.bar object Target -> def bar]] * - [[[[Target$.foo[A[_[_]]]* trait Target -> def foo with 3 nested tparams]]]] (should exercise nested parens) @@ -59,4 +61,4 @@ package scala.test.scaladoc.links { def localMethod = 3 } } -class ImOutside \ No newline at end of file +class ImOutside diff --git a/test/scaladoc/run/links.scala b/test/scaladoc/run/links.scala index de359539cf..ca8f3e2723 100644 --- a/test/scaladoc/run/links.scala +++ b/test/scaladoc/run/links.scala @@ -22,7 +22,7 @@ object Test extends ScaladocModelTest { val memberLinks = countLinks(TEST.comment.get, _.link.isInstanceOf[LinkToMember]) val templateLinks = countLinks(TEST.comment.get, _.link.isInstanceOf[LinkToTpl]) - assert(memberLinks == 15, memberLinks + " == 15 (the member links in object TEST)") + assert(memberLinks == 16, memberLinks + " == 16 (the member links in object TEST)") assert(templateLinks == 5, templateLinks + " == 5 (the template links in object TEST)") } -} \ No newline at end of file +} -- cgit v1.2.3 From b405a2969477f9e9a76274eac1ebda3c0f2942ad Mon Sep 17 00:00:00 2001 From: Aleksandar Prokopec Date: Wed, 3 Oct 2012 18:06:39 +0200 Subject: SI-6467: Zero element in aggregate now by-name --- .../scala/collection/GenTraversableOnce.scala | 5 +++-- src/library/scala/collection/TraversableOnce.scala | 2 +- .../scala/collection/parallel/ParIterableLike.scala | 11 ++++++----- .../scala/collection/parallel/mutable/ParArray.scala | 2 +- test/files/presentation/ide-bug-1000531.check | 2 +- test/files/run/t6467.scala | 20 ++++++++++++++++++++ 6 files changed, 32 insertions(+), 10 deletions(-) create mode 100644 test/files/run/t6467.scala (limited to 'src') diff --git a/src/library/scala/collection/GenTraversableOnce.scala b/src/library/scala/collection/GenTraversableOnce.scala index a872bc0948..9167280910 100644 --- a/src/library/scala/collection/GenTraversableOnce.scala +++ b/src/library/scala/collection/GenTraversableOnce.scala @@ -261,11 +261,12 @@ trait GenTraversableOnce[+A] extends Any { * @tparam B the type of accumulated results * @param z the initial value for the accumulated result of the partition - this * will typically be the neutral element for the `seqop` operator (e.g. - * `Nil` for list concatenation or `0` for summation) + * `Nil` for list concatenation or `0` for summation) and may be evaluated + * more than once * @param seqop an operator used to accumulate results within a partition * @param combop an associative operator used to combine results from different partitions */ - def aggregate[B](z: B)(seqop: (B, A) => B, combop: (B, B) => B): B + def aggregate[B](z: =>B)(seqop: (B, A) => B, combop: (B, B) => B): B /** Applies a binary operator to all elements of this $coll, going right to left. * $willNotTerminateInf diff --git a/src/library/scala/collection/TraversableOnce.scala b/src/library/scala/collection/TraversableOnce.scala index f912304680..a61d1354dc 100644 --- a/src/library/scala/collection/TraversableOnce.scala +++ b/src/library/scala/collection/TraversableOnce.scala @@ -184,7 +184,7 @@ trait TraversableOnce[+A] extends Any with GenTraversableOnce[A] { def fold[A1 >: A](z: A1)(op: (A1, A1) => A1): A1 = foldLeft(z)(op) - def aggregate[B](z: B)(seqop: (B, A) => B, combop: (B, B) => B): B = foldLeft(z)(seqop) + def aggregate[B](z: =>B)(seqop: (B, A) => B, combop: (B, B) => B): B = foldLeft(z)(seqop) def sum[B >: A](implicit num: Numeric[B]): B = foldLeft(num.zero)(num.plus) diff --git a/src/library/scala/collection/parallel/ParIterableLike.scala b/src/library/scala/collection/parallel/ParIterableLike.scala index b9a9e35574..0c0ff2b027 100644 --- a/src/library/scala/collection/parallel/ParIterableLike.scala +++ b/src/library/scala/collection/parallel/ParIterableLike.scala @@ -433,12 +433,13 @@ self: ParIterableLike[T, Repr, Sequential] => * @tparam S the type of accumulated results * @param z the initial value for the accumulated result of the partition - this * will typically be the neutral element for the `seqop` operator (e.g. - * `Nil` for list concatenation or `0` for summation) + * `Nil` for list concatenation or `0` for summation) and may be evaluated + * more than once * @param seqop an operator used to accumulate results within a partition * @param combop an associative operator used to combine results from different partitions */ - def aggregate[S](z: S)(seqop: (S, T) => S, combop: (S, S) => S): S = { - tasksupport.executeAndWaitResult(new Aggregate(z, seqop, combop, splitter)) + def aggregate[S](z: =>S)(seqop: (S, T) => S, combop: (S, S) => S): S = { + tasksupport.executeAndWaitResult(new Aggregate(() => z, seqop, combop, splitter)) } def foldLeft[S](z: S)(op: (S, T) => S): S = seq.foldLeft(z)(op) @@ -1006,10 +1007,10 @@ self: ParIterableLike[T, Repr, Sequential] => override def merge(that: Fold[U]) = result = op(result, that.result) } - protected[this] class Aggregate[S](z: S, seqop: (S, T) => S, combop: (S, S) => S, protected[this] val pit: IterableSplitter[T]) + protected[this] class Aggregate[S](z: () => S, seqop: (S, T) => S, combop: (S, S) => S, protected[this] val pit: IterableSplitter[T]) extends Accessor[S, Aggregate[S]] { @volatile var result: S = null.asInstanceOf[S] - def leaf(prevr: Option[S]) = result = pit.foldLeft(z)(seqop) + def leaf(prevr: Option[S]) = result = pit.foldLeft(z())(seqop) protected[this] def newSubtask(p: IterableSplitter[T]) = new Aggregate(z, seqop, combop, p) override def merge(that: Aggregate[S]) = result = combop(result, that.result) } diff --git a/src/library/scala/collection/parallel/mutable/ParArray.scala b/src/library/scala/collection/parallel/mutable/ParArray.scala index 56cc06f99e..deff9eda3b 100644 --- a/src/library/scala/collection/parallel/mutable/ParArray.scala +++ b/src/library/scala/collection/parallel/mutable/ParArray.scala @@ -181,7 +181,7 @@ self => override def fold[U >: T](z: U)(op: (U, U) => U): U = foldLeft[U](z)(op) - override def aggregate[S](z: S)(seqop: (S, T) => S, combop: (S, S) => S): S = foldLeft[S](z)(seqop) + override def aggregate[S](z: =>S)(seqop: (S, T) => S, combop: (S, S) => S): S = foldLeft[S](z)(seqop) override def sum[U >: T](implicit num: Numeric[U]): U = { var s = sum_quick(num, arr, until, i, num.zero) diff --git a/test/files/presentation/ide-bug-1000531.check b/test/files/presentation/ide-bug-1000531.check index 4be98a6b21..6c3892d272 100644 --- a/test/files/presentation/ide-bug-1000531.check +++ b/test/files/presentation/ide-bug-1000531.check @@ -19,7 +19,7 @@ retrieved 126 members [accessible: true] `method addString(b: StringBuilder)StringBuilder` [accessible: true] `method addString(b: StringBuilder, sep: String)StringBuilder` [accessible: true] `method addString(b: StringBuilder, start: String, sep: String, end: String)StringBuilder` -[accessible: true] `method aggregate[B](z: B)(seqop: (B, B) => B, combop: (B, B) => B)B` +[accessible: true] `method aggregate[B](z: => B)(seqop: (B, B) => B, combop: (B, B) => B)B` [accessible: true] `method asInstanceOf[T0]=> T0` [accessible: true] `method buffered=> scala.collection.BufferedIterator[B]` [accessible: true] `method collectFirst[B](pf: PartialFunction[B,B])Option[B]` diff --git a/test/files/run/t6467.scala b/test/files/run/t6467.scala new file mode 100644 index 0000000000..dc93b69fdc --- /dev/null +++ b/test/files/run/t6467.scala @@ -0,0 +1,20 @@ + + + + +import collection._ + + + +object Test extends App { + + def compare(s1: String, s2: String) { + assert(s1 == s2, s1 + "\nvs.\n" + s2) + } + + compare(List(1, 2, 3, 4).aggregate(new java.lang.StringBuffer)(_ append _, _ append _).toString, "1234") + compare(List(1, 2, 3, 4).par.aggregate(new java.lang.StringBuffer)(_ append _, _ append _).toString, "1234") + compare(Seq(0 until 100: _*).aggregate(new java.lang.StringBuffer)(_ append _, _ append _).toString, (0 until 100).mkString) + compare(Seq(0 until 100: _*).par.aggregate(new java.lang.StringBuffer)(_ append _, _ append _).toString, (0 until 100).mkString) + +} \ No newline at end of file -- cgit v1.2.3 From 53e8009192a9473101bf20998dd6111bd0160d6f Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Mon, 1 Oct 2012 09:40:05 -0700 Subject: Fix for gluttunous raw type creation. ClassfileParser had a bug which led to it thinking pretty much anything might be a raw type, and thus creating extra symbols for no good reason. When compiling scala.collection, before this change it thought 1094 raw types had passed by; afterward it thought 237. --- .../tools/nsc/symtab/classfile/ClassfileParser.scala | 18 ++++++++++-------- 1 file changed, 10 insertions(+), 8 deletions(-) (limited to 'src') diff --git a/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala b/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala index e4a17f3f41..53893c2eda 100644 --- a/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala +++ b/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala @@ -732,17 +732,19 @@ abstract class ClassfileParser { } accept('>') assert(xs.length > 0, tp) - newExistentialType(existentials.toList, typeRef(pre, classSym, xs.toList)) - } else if (classSym.isMonomorphicType) { + logResult("new existential")(newExistentialType(existentials.toList, typeRef(pre, classSym, xs.toList))) + } + // isMonomorphicType is false if the info is incomplete, as it usually is here + // so have to check unsafeTypeParams.isEmpty before worrying about raw type case below, + // or we'll create a boatload of needless existentials. + else if (classSym.isMonomorphicType || classSym.unsafeTypeParams.isEmpty) { tp - } else { + } + else { // raw type - existentially quantify all type parameters val eparams = typeParamsToExistentials(classSym, classSym.unsafeTypeParams) - val t = typeRef(pre, classSym, eparams.map(_.tpeHK)) - val res = newExistentialType(eparams, t) - if (settings.debug.value && settings.verbose.value) - println("raw type " + classSym + " -> " + res) - res + val t = typeRef(pre, classSym, eparams.map(_.tpeHK)) + logResult(s"raw type from $classSym")(newExistentialType(eparams, t)) } case tp => assert(sig.charAt(index) != '<', tp) -- cgit v1.2.3 From 88fa89c238c5410a52d10171b727b4b41ab3cbca Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Thu, 4 Oct 2012 14:23:48 -0700 Subject: Replaced some comments. With the far more readable not-html comments of modern times. --- .../scala/tools/nsc/typechecker/Infer.scala | 40 +++++++++++----------- 1 file changed, 20 insertions(+), 20 deletions(-) (limited to 'src') diff --git a/src/compiler/scala/tools/nsc/typechecker/Infer.scala b/src/compiler/scala/tools/nsc/typechecker/Infer.scala index d9f0c150ce..8f3995baf0 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Infer.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Infer.scala @@ -30,8 +30,8 @@ trait Infer extends Checkable { private def assertNonCyclic(tvar: TypeVar) = assert(tvar.constr.inst != tvar, tvar.origin) - /** The formal parameter types corresponding to formals. - * If formals has a repeated last parameter, a list of + /** The formal parameter types corresponding to `formals`. + * If `formals` has a repeated last parameter, a list of * (nargs - params.length + 1) copies of its type is returned. * By-name types are replaced with their underlying type. * @@ -295,8 +295,8 @@ trait Infer extends Checkable { /* -- Tests & Checks---------------------------------------------------- */ - /** Check that sym is defined and accessible as a member of - * tree site with type pre in current context. + /** Check that `sym` is defined and accessible as a member of + * tree `site` with type `pre` in current context. * * Note: pre is not refchecked -- moreover, refchecking the resulting tree may not refcheck pre, * since pre may not occur in its type (callers should wrap the result in a TypeTreeWithDeferredRefCheck) @@ -438,9 +438,9 @@ trait Infer extends Checkable { } /** Return inferred type arguments of polymorphic expression, given - * its type parameters and result type and a prototype pt. + * its type parameters and result type and a prototype `pt`. * If no minimal type variables exist that make the - * instantiated type a subtype of pt, return null. + * instantiated type a subtype of `pt`, return null. * * @param tparams ... * @param restpe ... @@ -472,7 +472,7 @@ trait Infer extends Checkable { /** Return inferred proto-type arguments of function, given * its type and value parameters and result type, and a - * prototype pt for the function result. + * prototype `pt` for the function result. * Type arguments need to be either determined precisely by * the prototype, or they are maximized, if they occur only covariantly * in the value parameter list. @@ -565,7 +565,7 @@ trait Infer extends Checkable { * * Rewrite for repeated param types: Map T* entries to Seq[T]. * @return map from tparams to inferred arg, if inference was successful, tparams that map to None are considered left undetermined - * type parameters that are inferred as `scala.Nothing` and that are not covariant in restpe are taken to be undetermined + * type parameters that are inferred as `scala.Nothing` and that are not covariant in `restpe` are taken to be undetermined */ def adjustTypeArgs(tparams: List[Symbol], tvars: List[TypeVar], targs: List[Type], restpe: Type = WildcardType): AdjustedTypeArgs.Result = { val buf = AdjustedTypeArgs.Result.newBuilder[Symbol, Option[Type]] @@ -593,7 +593,7 @@ trait Infer extends Checkable { /** Return inferred type arguments, given type parameters, formal parameters, * argument types, result type and expected result type. - * If this is not possible, throw a NoInstance exception. + * If this is not possible, throw a `NoInstance` exception. * Undetermined type arguments are represented by `definitions.NothingClass.tpe`. * No check that inferred parameters conform to their bounds is made here. * @@ -858,7 +858,7 @@ trait Infer extends Checkable { } else res1 } - /** Is type ftpe1 strictly more specific than type ftpe2 + /** Is type `ftpe1` strictly more specific than type `ftpe2` * when both are alternatives in an overloaded function? * @see SLS (sec:overloading-resolution) * @@ -1124,8 +1124,8 @@ trait Infer extends Checkable { } } - /** Substitute free type variables undetparams of application - * fn(args), given prototype pt. + /** Substitute free type variables `undetparams` of application + * `fn(args)`, given prototype `pt`. * * @param fn fn: the function that needs to be instantiated. * @param undetparams the parameters that need to be determined @@ -1178,8 +1178,8 @@ trait Infer extends Checkable { def widen(tp: Type): Type = abstractTypesToBounds(tp) - /** Substitute free type variables undetparams of type constructor - * tree in pattern, given prototype pt. + /** Substitute free type variables `undetparams` of type constructor + * `tree` in pattern, given prototype `pt`. * * @param tree the constuctor that needs to be instantiated * @param undetparams the undetermined type parameters @@ -1489,8 +1489,8 @@ trait Infer extends Checkable { } */ - /** Assign tree the symbol and type of the alternative which - * matches prototype pt, if it exists. + /** Assign `tree` the symbol and type of the alternative which + * matches prototype `pt`, if it exists. * If several alternatives match `pt`, take parameterless one. * If no alternative matches `pt`, take the parameterless one anyway. */ @@ -1593,8 +1593,8 @@ trait Infer extends Checkable { } } - /** Assign tree the type of an alternative which is applicable - * to argtpes, and whose result type is compatible with `pt`. + /** Assign `tree` the type of an alternative which is applicable + * to `argtpes`, and whose result type is compatible with `pt`. * If several applicable alternatives exist, drop the alternatives which use * default arguments, then select the most specialized one. * If no applicable alternative exists, and pt != WildcardType, try again @@ -1686,8 +1686,8 @@ trait Infer extends Checkable { else infer(true) } - /** Assign tree the type of all polymorphic alternatives - * with nparams as the number of type parameters, if it exists. + /** Assign `tree` the type of all polymorphic alternatives + * with `nparams` as the number of type parameters, if it exists. * If no such polymorphic alternative exist, error. * * @param tree ... -- cgit v1.2.3 From 6c7e6eb5d7bc29975d12b87ca65e3059ede422db Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Thu, 4 Oct 2012 14:24:04 -0700 Subject: Worked over inferMethodAlternative. We shouldn't let these core methods become so complicated. They are way harder to follow than is necessary. It is why nobody ever fixes the bugs in them. This is only the beginning really. So many things in the compiler would border on trivial to fix if one didn't have to navigate so much cruft and indirection. Here are some methods which no longer exist: - hasExactlyNumParams. A dubious name for a method containing the expression "len <= n + 1". - resolveOverloadedMethod. A method which returns a list of symbols can't be resolving all that much. - isUnitForVarArgs. Take a guess as to what that method does. Ha ha, you were not even close. Still on my hit list: - "very similar logic to doTypedApply in typechecker" I find a good rule of thumb is never to write a comment which paraphrases to "this is very similar to that." This entire patch is the fault of a. moors for trying to cherry-pick a comment of mine from github into trunk. This patch hopefully makes the comment unnecessary. --- .../scala/tools/nsc/typechecker/Infer.scala | 340 +++++++++++---------- .../scala/tools/nsc/typechecker/Typers.scala | 43 ++- src/reflect/scala/reflect/internal/Types.scala | 17 +- 3 files changed, 214 insertions(+), 186 deletions(-) (limited to 'src') diff --git a/src/compiler/scala/tools/nsc/typechecker/Infer.scala b/src/compiler/scala/tools/nsc/typechecker/Infer.scala index 8f3995baf0..5c5966834b 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Infer.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Infer.scala @@ -49,6 +49,24 @@ trait Infer extends Checkable { } else formals1 } + /** Sorts the alternatives according to the given comparison function. + * Returns a list containing the best alternative as well as any which + * the best fails to improve upon. + */ + private def bestAlternatives(alternatives: List[Symbol])(isBetter: (Symbol, Symbol) => Boolean): List[Symbol] = { + def improves(sym1: Symbol, sym2: Symbol) = ( + sym2 == NoSymbol + || sym2.isError + || sym2.hasAnnotation(BridgeClass) + || isBetter(sym1, sym2) + ) + + alternatives sortWith improves match { + case best :: rest if rest.nonEmpty => best :: rest.filterNot(alt => improves(best, alt)) + case bests => bests + } + } + /** Returns `(formals, formalsExpanded)` where `formalsExpanded` are the expected types * for the `nbSubPats` sub-patterns of an extractor pattern, of which the corresponding * unapply[Seq] call is assumed to have result type `resTp`. @@ -112,17 +130,6 @@ trait Infer extends Checkable { else (formals, formalsExpanded) } - def actualTypes(actuals: List[Type], nformals: Int): List[Type] = - if (nformals == 1 && !hasLength(actuals, 1)) - List(if (actuals.isEmpty) UnitClass.tpe else tupleType(actuals)) - else actuals - - def actualArgs(pos: Position, actuals: List[Tree], nformals: Int): List[Tree] = { - val inRange = nformals == 1 && !hasLength(actuals, 1) && actuals.lengthCompare(MaxTupleArity) <= 0 - if (inRange && !phase.erasedTypes) List(atPos(pos)(gen.mkTuple(actuals))) - else actuals - } - /** A fresh type variable with given type parameter as origin. * * @param tparam ... @@ -672,6 +679,35 @@ trait Infer extends Checkable { adjustTypeArgs(tparams, tvars, targs, restpe) } + /** One must step carefully when assessing applicability due to + * complications from varargs, tuple-conversion, named arguments. + * This method is used to filter out inapplicable methods, + * its behavior slightly configurable based on what stage of + * overloading resolution we're at. + * + * This method has boolean parameters, which is usually suboptimal + * but I didn't work out a better way. They don't have defaults, + * and the method's scope is limited. + */ + private[typechecker] def isApplicableBasedOnArity(tpe: Type, argsCount: Int, varargsStar: Boolean, tuplingAllowed: Boolean): Boolean = followApply(tpe) match { + case OverloadedType(pre, alts) => + alts exists (alt => isApplicableBasedOnArity(pre memberType alt, argsCount, varargsStar, tuplingAllowed)) + case _ => + val paramsCount = tpe.params.length + val simpleMatch = paramsCount == argsCount + def varargsTarget = isVarArgsList(tpe.params) + def varargsMatch = varargsTarget && (paramsCount - 1) <= argsCount + def tuplingMatch = tuplingAllowed && (argsCount != 1) && (paramsCount == 1 || paramsCount == 2 && varargsTarget) + + // A varargs star call, e.g. (x, y:_*) can only match a varargs method + // with the same number of parameters. See SI-5859 for an example of what + // would fail were this not enforced before we arrived at isApplicable. + if (varargsStar) + varargsTarget && (paramsCount == argsCount) + else + simpleMatch || varargsMatch || tuplingMatch + } + private[typechecker] def followApply(tp: Type): Type = tp match { case NullaryMethodType(restp) => val restp1 = followApply(restp) @@ -688,14 +724,6 @@ trait Infer extends Checkable { else OverloadedType(tp, appmeth.alternatives) } - def hasExactlyNumParams(tp: Type, n: Int): Boolean = tp match { - case OverloadedType(pre, alts) => - alts exists (alt => hasExactlyNumParams(pre.memberType(alt), n)) - case _ => - val len = tp.params.length - len == n || isVarArgsList(tp.params) && len <= n + 1 - } - /** * Verifies whether the named application is valid. The logic is very * similar to the one in NamesDefaults.removeNames. @@ -741,17 +769,48 @@ trait Infer extends Checkable { (argtpes1, argPos, namesOK) } - /** don't do a () to (()) conversion for methods whose second parameter - * is a varargs. This is a fairly kludgey way to address #3224. - * We'll probably find a better way to do this by identifying - * tupled and n-ary methods, but thiws is something for a future major revision. + /** True if the given parameter list can accept a tupled argument list, + * and the argument list can be tupled (based on its length.) */ - def isUnitForVarArgs(args: List[AnyRef], params: List[Symbol]): Boolean = - args.isEmpty && hasLength(params, 2) && isVarArgsList(params) + def eligibleForTupleConversion(formals: List[Type], args: List[_]): Boolean = { + // Can't have exactly one argument; can't have more than MaxTupleArity. + def argumentsOk = args match { + case _ :: Nil => false + case _ => (args lengthCompare MaxTupleArity) <= 0 + } + // Must have either one parameter, or two with the second being varargs. + def paramsOk = formals match { + case _ :: Nil => true + case _ :: last :: Nil => args.nonEmpty && isScalaRepeatedParamType(last) // avoid () to (()) conversion on varargs; see SI-3224 + case _ => false + } + + argumentsOk && paramsOk + } + + /** If the given argument types are eligible for tuple conversion, the type + * of the tuple. Otherwise, NoType. + */ + def typeAfterTupleConversion(formals: List[Type], argtpes: List[Type]): Type = ( + if (eligibleForTupleConversion(formals, argtpes)) { + if (argtpes.isEmpty) UnitClass.tpe // empty argument list is 0-tuple + else tupleType(argtpes map { // already ruled out 1-element list + case NamedType(name, tp) => UnitClass.tpe // not a named arg - only assignments here + case RepeatedType(tp) => tp + case tp => tp + }) + } + else NoType + ) - /** Is there an instantiation of free type variables undetparams - * such that function type ftpe is applicable to - * argtpes and its result conform to pt? + def tupleIfNecessary(formals: List[Type], argtpes: List[Type]): List[Type] = typeAfterTupleConversion(formals, argtpes) match { + case NoType => argtpes + case tpe => tpe :: Nil + } + + /** Is there an instantiation of free type variables `undetparams` + * such that function type `ftpe` is applicable to + * `argtpes` and its result conform to `pt`? * * @param undetparams ... * @param ftpe the type of the function (often a MethodType) @@ -766,23 +825,15 @@ trait Infer extends Checkable { argtpes0: List[Type], pt: Type): Boolean = ftpe match { case OverloadedType(pre, alts) => - alts exists (alt => isApplicable(undetparams, pre.memberType(alt), argtpes0, pt)) + alts exists (alt => isApplicable(undetparams, pre memberType alt, argtpes0, pt)) case ExistentialType(tparams, qtpe) => isApplicable(undetparams, qtpe, argtpes0, pt) case mt @ MethodType(params, _) => val formals = formalTypes(mt.paramTypes, argtpes0.length, removeByName = false) - def tryTupleApply: Boolean = { - // if 1 formal, 1 argtpe (a tuple), otherwise unmodified argtpes0 - val tupleArgTpes = actualTypes(argtpes0 map { - // no assignment is treated as named argument here - case NamedType(name, tp) => UnitClass.tpe - case tp => tp - }, formals.length) - - !sameLength(argtpes0, tupleArgTpes) && - !isUnitForVarArgs(argtpes0, params) && - isApplicable(undetparams, ftpe, tupleArgTpes, pt) + def tryTupleApply = typeAfterTupleConversion(formals, argtpes0) match { + case NoType => false + case tupledType => isApplicable(undetparams, ftpe, tupledType :: Nil, pt) } def typesCompatible(argtpes: List[Type]) = { val restpe = ftpe.resultType(argtpes) @@ -804,17 +855,16 @@ trait Infer extends Checkable { val lencmp = compareLengths(argtpes0, formals) if (lencmp > 0) tryTupleApply else if (lencmp == 0) { - if (!argtpes0.exists(_.isInstanceOf[NamedType])) { - // fast track if no named arguments are used + // fast track if no named arguments are used + if (!containsNamedType(argtpes0)) typesCompatible(argtpes0) - } else { // named arguments are used val (argtpes1, argPos, namesOK) = checkNames(argtpes0, params) // when using named application, the vararg param has to be specified exactly once - ( namesOK && (isIdentity(argPos) || sameLength(formals, params)) && - // nb. arguments and names are OK, check if types are compatible - typesCompatible(reorderArgs(argtpes1, argPos)) + ( namesOK + && (isIdentity(argPos) || sameLength(formals, params)) + && typesCompatible(reorderArgs(argtpes1, argPos)) // nb. arguments and names are OK, check if types are compatible ) } } @@ -868,7 +918,7 @@ trait Infer extends Checkable { */ def isAsSpecific(ftpe1: Type, ftpe2: Type): Boolean = ftpe1 match { case OverloadedType(pre, alts) => - alts exists (alt => isAsSpecific(pre.memberType(alt), ftpe2)) + alts exists (alt => isAsSpecific(pre memberType alt, ftpe2)) case et: ExistentialType => isAsSpecific(ftpe1.skolemizeExistential, ftpe2) //et.withTypeVars(isAsSpecific(_, ftpe2)) @@ -895,7 +945,7 @@ trait Infer extends Checkable { case _ => ftpe2 match { case OverloadedType(pre, alts) => - alts forall (alt => isAsSpecific(ftpe1, pre.memberType(alt))) + alts forall (alt => isAsSpecific(ftpe1, pre memberType alt)) case et: ExistentialType => et.withTypeVars(isAsSpecific(ftpe1, _)) case mt: MethodType => @@ -1138,10 +1188,10 @@ trait Infer extends Checkable { args: List[Tree], pt0: Type): List[Symbol] = fn.tpe match { case mt @ MethodType(params0, _) => try { - val pt = if (pt0.typeSymbol == UnitClass) WildcardType else pt0 - val formals = formalTypes(mt.paramTypes, args.length) - val argtpes = actualTypes(args map (x => elimAnonymousClass(x.tpe.deconst)), formals.length) - val restpe = fn.tpe.resultType(argtpes) + val pt = if (pt0.typeSymbol == UnitClass) WildcardType else pt0 + val formals = formalTypes(mt.paramTypes, args.length) + val argtpes = tupleIfNecessary(formals, args map (x => elimAnonymousClass(x.tpe.deconst))) + val restpe = fn.tpe.resultType(argtpes) val AdjustedTypeArgs.AllArgsAndUndets(okparams, okargs, allargs, leftUndet) = methTypeArgs(undetparams, formals, restpe, argtpes, pt) @@ -1496,45 +1546,22 @@ trait Infer extends Checkable { */ def inferExprAlternative(tree: Tree, pt: Type) = tree.tpe match { case OverloadedType(pre, alts) => tryTwice { isSecondTry => - val alts0 = alts filter (alt => isWeaklyCompatible(pre.memberType(alt), pt)) - val noAlternatives = alts0.isEmpty - val alts1 = if (noAlternatives) alts else alts0 + val alts0 = alts filter (alt => isWeaklyCompatible(pre.memberType(alt), pt)) + val alts1 = if (alts0.isEmpty) alts else alts0 - //println("trying "+alts1+(alts1 map (_.tpe))+(alts1 map (_.locationString))+" for "+pt) - def improves(sym1: Symbol, sym2: Symbol): Boolean = - sym2 == NoSymbol || sym2.hasAnnotation(BridgeClass) || - { val tp1 = pre.memberType(sym1) - val tp2 = pre.memberType(sym2) - (tp2 == ErrorType || - !global.typer.infer.isWeaklyCompatible(tp2, pt) && global.typer.infer.isWeaklyCompatible(tp1, pt) || - isStrictlyMoreSpecific(tp1, tp2, sym1, sym2)) } + val bests = bestAlternatives(alts1) { (sym1, sym2) => + val tp1 = pre.memberType(sym1) + val tp2 = pre.memberType(sym2) - val best = ((NoSymbol: Symbol) /: alts1) ((best, alt) => - if (improves(alt, best)) alt else best) - - val competing = alts1 dropWhile (alt => best == alt || improves(best, alt)) - - if (best == NoSymbol) { - if (settings.debug.value) { - tree match { - case Select(qual, _) => - Console.println("qual: " + qual + ":" + qual.tpe + - " with decls " + qual.tpe.decls + - " with members " + qual.tpe.members + - " with members " + qual.tpe.member(newTermName("$minus"))) - case _ => - } - } - // todo: missing test case - NoBestExprAlternativeError(tree, pt, isSecondTry) - } else if (!competing.isEmpty) { - if (noAlternatives) NoBestExprAlternativeError(tree, pt, isSecondTry) - else if (!pt.isErroneous) AmbiguousExprAlternativeError(tree, pre, best, competing.head, pt, isSecondTry) - } else { -// val applicable = alts1 filter (alt => -// global.typer.infer.isWeaklyCompatible(pre.memberType(alt), pt)) -// checkNotShadowed(tree.pos, pre, best, applicable) - tree.setSymbol(best).setType(pre.memberType(best)) + ( tp2 == ErrorType + || (!isWeaklyCompatible(tp2, pt) && isWeaklyCompatible(tp1, pt)) + || isStrictlyMoreSpecific(tp1, tp2, sym1, sym2) + ) + } + bests match { + case best :: Nil => tree setSymbol best setType (pre memberType best) + case best :: competing :: _ if alts0.nonEmpty => if (!pt.isErroneous) AmbiguousExprAlternativeError(tree, pre, best, competing, pt, isSecondTry) + case _ => if (bests.isEmpty || alts0.isEmpty) NoBestExprAlternativeError(tree, pt, isSecondTry) // todo: missing test case } } } @@ -1553,44 +1580,43 @@ trait Infer extends Checkable { private def paramMatchesName(param: Symbol, name: Name) = param.name == name || param.deprecatedParamName.exists(_ == name) - // Check the first parameter list the same way. - private def methodMatchesName(method: Symbol, name: Name) = method.paramss match { - case ps :: _ => ps exists (p => paramMatchesName(p, name)) - case _ => false + private def containsNamedType(argtpes: List[Type]): Boolean = argtpes match { + case Nil => false + case NamedType(_, _) :: _ => true + case _ :: rest => containsNamedType(rest) } - - private def resolveOverloadedMethod(argtpes: List[Type], eligible: List[Symbol]) = { + private def namesOfNamedArguments(argtpes: List[Type]) = + argtpes collect { case NamedType(name, _) => name } + + /** Given a list of argument types and eligible method overloads, whittle the + * list down to the methods which should be considered for specificity + * testing, taking into account here: + * - named arguments at the call site (keep only methods with name-matching parameters) + * - if multiple methods are eligible, drop any methods which take default arguments + * - drop any where arity cannot match under any conditions (allowing for + * overloaded applies, varargs, and tupling conversions) + * This method is conservative; it can tolerate some varieties of false positive, + * but no false negatives. + * + * @param eligible the overloaded method symbols + * @param argtpes the argument types at the call site + * @param varargsStar true if the call site has a `: _*` attached to the last argument + */ + private def overloadsToConsiderBySpecificity(eligible: List[Symbol], argtpes: List[Type], varargsStar: Boolean): List[Symbol] = { // If there are any foo=bar style arguments, and any of the overloaded // methods has a parameter named `foo`, then only those methods are considered. - val namesOfArgs = argtpes collect { case NamedType(name, _) => name } - val namesMatch = ( - if (namesOfArgs.isEmpty) Nil - else eligible filter { m => - namesOfArgs forall { name => - methodMatchesName(m, name) - } - } - ) - - if (namesMatch.nonEmpty) namesMatch - else if (eligible.isEmpty || eligible.tail.isEmpty) eligible - else eligible filter { alt => - // for functional values, the `apply` method might be overloaded - val mtypes = followApply(alt.tpe) match { - case OverloadedType(_, alts) => alts map (_.tpe) - case t => t :: Nil - } - // Drop those that use a default; keep those that use vararg/tupling conversion. - mtypes exists (t => - !t.typeSymbol.hasDefaultFlag && ( - compareLengths(t.params, argtpes) < 0 // tupling (*) - || hasExactlyNumParams(t, argtpes.length) // same nb or vararg - ) - ) - // (*) more arguments than parameters, but still applicable: tupling conversion works. - // todo: should not return "false" when paramTypes = (Unit) no argument is given - // (tupling would work) + val namesMatch = namesOfNamedArguments(argtpes) match { + case Nil => Nil + case names => eligible filter (m => names forall (name => m.info.params exists (p => paramMatchesName(p, name)))) } + if (namesMatch.nonEmpty) + namesMatch + else if (eligible.isEmpty || eligible.tail.isEmpty) + eligible + else + eligible filter (alt => + !alt.hasDefault && isApplicableBasedOnArity(alt.tpe, argtpes.length, varargsStar, tuplingAllowed = true) + ) } /** Assign `tree` the type of an alternative which is applicable @@ -1606,46 +1632,38 @@ trait Infer extends Checkable { * of some NamedType does not exist in an alternative's parameter names, * the type is replaces by `Unit`, i.e. the argument is treated as an * assignment expression. + * + * @pre tree.tpe is an OverloadedType. */ - def inferMethodAlternative(tree: Tree, undetparams: List[Symbol], - argtpes: List[Type], pt0: Type, varArgsOnly: Boolean = false, lastInferAttempt: Boolean = true): Unit = tree.tpe match { - case OverloadedType(pre, alts) => - val pt = if (pt0.typeSymbol == UnitClass) WildcardType else pt0 - tryTwice { isSecondTry => - debuglog(s"infer method alt ${tree.symbol} with alternatives ${alts map pre.memberType} argtpes=$argtpes pt=$pt") - - def varargsApplicableCheck(alt: Symbol) = !varArgsOnly || ( - isVarArgsList(alt.tpe.params) - && (argtpes.size >= alt.tpe.params.size) // must be checked now due to SI-5859 - ) - val applicable = resolveOverloadedMethod(argtpes, - alts filter (alt => - varargsApplicableCheck(alt) - && inSilentMode(context)(isApplicable(undetparams, followApply(pre memberType alt), argtpes, pt)) - ) - ) - - def improves(sym1: Symbol, sym2: Symbol) = { - // util.trace("improve "+sym1+sym1.locationString+" on "+sym2+sym2.locationString) - sym2 == NoSymbol || sym2.isError || sym2.hasAnnotation(BridgeClass) || - isStrictlyMoreSpecific(followApply(pre.memberType(sym1)), - followApply(pre.memberType(sym2)), sym1, sym2) - } - - val best = ((NoSymbol: Symbol) /: applicable) ((best, alt) => - if (improves(alt, best)) alt else best) - val competing = applicable.dropWhile(alt => best == alt || improves(best, alt)) - if (best == NoSymbol) { - if (pt == WildcardType) NoBestMethodAlternativeError(tree, argtpes, pt, isSecondTry && lastInferAttempt) - else inferMethodAlternative(tree, undetparams, argtpes, WildcardType, lastInferAttempt = isSecondTry) - } else if (!competing.isEmpty) { - AmbiguousMethodAlternativeError(tree, pre, best, competing.head, argtpes, pt, isSecondTry && lastInferAttempt) - } else { -// checkNotShadowed(tree.pos, pre, best, applicable) - tree.setSymbol(best).setType(pre.memberType(best)) - } + def inferMethodAlternative(tree: Tree, undetparams: List[Symbol], argtpes0: List[Type], pt0: Type): Unit = { + val OverloadedType(pre, alts) = tree.tpe + var varargsStar = false + val argtpes = argtpes0 mapConserve { + case RepeatedType(tp) => varargsStar = true ; tp + case tp => tp + } + def followType(sym: Symbol) = followApply(pre memberType sym) + def bestForExpectedType(pt: Type, isLastTry: Boolean): Unit = { + val applicable0 = alts filter (alt => inSilentMode(context)(isApplicable(undetparams, followType(alt), argtpes, pt))) + val applicable = overloadsToConsiderBySpecificity(applicable0, argtpes, varargsStar) + val ranked = bestAlternatives(applicable)((sym1, sym2) => + isStrictlyMoreSpecific(followType(sym1), followType(sym2), sym1, sym2) + ) + ranked match { + case best :: competing :: _ => AmbiguousMethodAlternativeError(tree, pre, best, competing, argtpes, pt, isLastTry) // ambiguous + case best :: Nil => tree setSymbol best setType (pre memberType best) // success + case Nil if pt eq WildcardType => NoBestMethodAlternativeError(tree, argtpes, pt, isLastTry) // failed + case Nil => bestForExpectedType(WildcardType, isLastTry) // failed, but retry with WildcardType } - case _ => + } + // This potentially makes up to four attempts: tryTwice may execute + // with and without views enabled, and bestForExpectedType will try again + // with pt = WildcardType if it fails with pt != WildcardType. + tryTwice { isLastTry => + val pt = if (pt0.typeSymbol == UnitClass) WildcardType else pt0 + debuglog(s"infer method alt ${tree.symbol} with alternatives ${alts map pre.memberType} argtpes=$argtpes pt=$pt") + bestForExpectedType(pt, isLastTry) + } } /** Try inference twice, once without views and once with views, diff --git a/src/compiler/scala/tools/nsc/typechecker/Typers.scala b/src/compiler/scala/tools/nsc/typechecker/Typers.scala index dc6beaf5c6..48d8c6186c 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Typers.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Typers.scala @@ -2898,19 +2898,18 @@ trait Typers extends Modes with Adaptations with Tags { def preSelectOverloaded(fun: Tree): Tree = { if (fun.hasSymbol && fun.symbol.isOverloaded) { // remove alternatives with wrong number of parameters without looking at types. - // less expensive than including them in inferMethodAlternatvie (see below). + // less expensive than including them in inferMethodAlternative (see below). def shapeType(arg: Tree): Type = arg match { case Function(vparams, body) => - functionType(vparams map (vparam => AnyClass.tpe), shapeType(body)) + functionType(vparams map (_ => AnyClass.tpe), shapeType(body)) case AssignOrNamedArg(Ident(name), rhs) => NamedType(name, shapeType(rhs)) case _ => NothingClass.tpe } val argtypes = args map shapeType - val pre = fun.symbol.tpe.prefix - - var sym = fun.symbol filter { alt => + val pre = fun.symbol.tpe.prefix + var sym = fun.symbol filter { alt => // must use pt as expected type, not WildcardType (a tempting quick fix to #2665) // now fixed by using isWeaklyCompatible in exprTypeArgs // TODO: understand why exactly -- some types were not inferred anymore (`ant clean quick.bin` failed) @@ -2921,16 +2920,15 @@ trait Typers extends Modes with Adaptations with Tags { // Types: "refs = Array(Map(), Map())". I determined that inference fails if there are at // least two invariant type parameters. See the test case I checked in to help backstop: // pos/isApplicableSafe.scala. - isApplicableSafe(context.undetparams, followApply(pre.memberType(alt)), argtypes, pt) + isApplicableSafe(context.undetparams, followApply(pre memberType alt), argtypes, pt) } if (sym.isOverloaded) { - val sym1 = sym filter (alt => { - // eliminate functions that would result from tupling transforms - // keeps alternatives with repeated params - hasExactlyNumParams(followApply(alt.tpe), argtypes.length) || - // also keep alts which define at least one default - alt.tpe.paramss.exists(_.exists(_.hasDefault)) - }) + // eliminate functions that would result from tupling transforms + // keeps alternatives with repeated params + val sym1 = sym filter (alt => + isApplicableBasedOnArity(pre memberType alt, argtypes.length, varargsStar = false, tuplingAllowed = false) + || alt.tpe.params.exists(_.hasDefault) + ) if (sym1 != NoSymbol) sym = sym1 } if (sym == NoSymbol) fun @@ -2944,16 +2942,19 @@ trait Typers extends Modes with Adaptations with Tags { case OverloadedType(pre, alts) => def handleOverloaded = { val undetparams = context.extractUndetparams() - - val argtpes = new ListBuffer[Type] - val amode = forArgMode(fun, mode) + val argtpes = new ListBuffer[Type] + val amode = forArgMode(fun, mode) val args1 = args map { case arg @ AssignOrNamedArg(Ident(name), rhs) => // named args: only type the righthand sides ("unknown identifier" errors otherwise) val rhs1 = typedArg(rhs, amode, BYVALmode, WildcardType) argtpes += NamedType(name, rhs1.tpe.deconst) // the assign is untyped; that's ok because we call doTypedApply - atPos(arg.pos) { new AssignOrNamedArg(arg.lhs, rhs1) } + treeCopy.AssignOrNamedArg(arg, arg.lhs, rhs1) + case arg @ Typed(repeated, Ident(tpnme.WILDCARD_STAR)) => + val arg1 = typedArg(arg, amode, BYVALmode, WildcardType) + argtpes += RepeatedType(arg1.tpe.deconst) + arg1 case arg => val arg1 = typedArg(arg, amode, BYVALmode, WildcardType) argtpes += arg1.tpe.deconst @@ -2963,7 +2964,7 @@ trait Typers extends Modes with Adaptations with Tags { if (context.hasErrors) setError(tree) else { - inferMethodAlternative(fun, undetparams, argtpes.toList, pt, varArgsOnly = treeInfo.isWildcardStarArgList(args)) + inferMethodAlternative(fun, undetparams, argtpes.toList, pt) doTypedApply(tree, adapt(fun, forFunMode(mode), WildcardType), args1, mode, pt) } } @@ -2979,10 +2980,8 @@ trait Typers extends Modes with Adaptations with Tags { * default arguments) */ def tryTupleApply: Option[Tree] = { - // if 1 formal, 1 arg (a tuple), otherwise unmodified args - val tupleArgs = actualArgs(tree.pos.makeTransparent, args, formals.length) - - if (!sameLength(tupleArgs, args) && !isUnitForVarArgs(args, params)) { + if (eligibleForTupleConversion(formals, args) && !phase.erasedTypes) { + val tupleArgs = List(atPos(tree.pos.makeTransparent)(gen.mkTuple(args))) // expected one argument, but got 0 or >1 ==> try applying to tuple // the inner "doTypedApply" does "extractUndetparams" => restore when it fails val savedUndetparams = context.undetparams diff --git a/src/reflect/scala/reflect/internal/Types.scala b/src/reflect/scala/reflect/internal/Types.scala index 52efab3e48..a23ca9b311 100644 --- a/src/reflect/scala/reflect/internal/Types.scala +++ b/src/reflect/scala/reflect/internal/Types.scala @@ -2806,9 +2806,13 @@ trait Types extends api.Types { self: SymbolTable => override def kind = "OverloadedType" } - def overloadedType(pre: Type, alternatives: List[Symbol]): Type = - if (alternatives.tail.isEmpty) pre memberType alternatives.head - else OverloadedType(pre, alternatives) + /** The canonical creator for OverloadedTypes. + */ + def overloadedType(pre: Type, alternatives: List[Symbol]): Type = alternatives match { + case Nil => NoType + case alt :: Nil => pre memberType alt + case _ => OverloadedType(pre, alternatives) + } /** A class remembering a type instantiation for some a set of overloaded * polymorphic symbols. @@ -3399,6 +3403,13 @@ trait Types extends api.Types { self: SymbolTable => case class NamedType(name: Name, tp: Type) extends Type { override def safeToString: String = name.toString +": "+ tp } + /** As with NamedType, used only when calling isApplicable. + * Records that the application has a wildcard star (aka _*) + * at the end of it. + */ + case class RepeatedType(tp: Type) extends Type { + override def safeToString: String = tp + ": _*" + } /** A temporary type representing the erasure of a user-defined value type. * Created during phase erasure, eliminated again in posterasure. -- cgit v1.2.3 From 120e14fadf30b4c39f953832108d19b736dc6f2d Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Tue, 2 Oct 2012 11:21:16 -0700 Subject: Fix for rangepos crasher. wrapClassTagUnapply was generating an unpositioned tree which would crash under -Yrangepos. See SI-6338. --- src/compiler/scala/tools/nsc/typechecker/Typers.scala | 5 +++-- test/files/neg/t3015.check | 5 +---- test/files/pos/classtag-pos.flags | 1 + test/files/pos/classtag-pos.scala | 5 +++++ 4 files changed, 10 insertions(+), 6 deletions(-) create mode 100644 test/files/pos/classtag-pos.flags create mode 100644 test/files/pos/classtag-pos.scala (limited to 'src') diff --git a/src/compiler/scala/tools/nsc/typechecker/Typers.scala b/src/compiler/scala/tools/nsc/typechecker/Typers.scala index 12e26a812d..cf9a07a7e4 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Typers.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Typers.scala @@ -3298,12 +3298,13 @@ trait Typers extends Modes with Adaptations with Tags { // println(util.Position.formatMessage(uncheckedPattern.pos, "made unchecked type test into a checked one", true)) val args = List(uncheckedPattern) + val app = atPos(uncheckedPattern.pos)(Apply(classTagExtractor, args)) // must call doTypedUnapply directly, as otherwise we get undesirable rewrites // and re-typechecks of the target of the unapply call in PATTERNmode, // this breaks down when the classTagExtractor (which defineds the unapply member) is not a simple reference to an object, // but an arbitrary tree as is the case here - doTypedUnapply(Apply(classTagExtractor, args), classTagExtractor, classTagExtractor, args, PATTERNmode, pt) - } + doTypedUnapply(app, classTagExtractor, classTagExtractor, args, PATTERNmode, pt) + } // if there's a ClassTag that allows us to turn the unchecked type test for `pt` into a checked type test // return the corresponding extractor (an instance of ClassTag[`pt`]) diff --git a/test/files/neg/t3015.check b/test/files/neg/t3015.check index 4a03c940f4..6948392bb0 100644 --- a/test/files/neg/t3015.check +++ b/test/files/neg/t3015.check @@ -3,7 +3,4 @@ t3015.scala:7: error: scrutinee is incompatible with pattern type; required: String val b(foo) = "foo" ^ -error: type mismatch; - found : _$1 - required: String -two errors found +one error found diff --git a/test/files/pos/classtag-pos.flags b/test/files/pos/classtag-pos.flags new file mode 100644 index 0000000000..281f0a10cd --- /dev/null +++ b/test/files/pos/classtag-pos.flags @@ -0,0 +1 @@ +-Yrangepos diff --git a/test/files/pos/classtag-pos.scala b/test/files/pos/classtag-pos.scala new file mode 100644 index 0000000000..768d2e27f4 --- /dev/null +++ b/test/files/pos/classtag-pos.scala @@ -0,0 +1,5 @@ +import scala.reflect.runtime.universe._ + +class A { + def f[T: TypeTag] = typeOf[T] match { case TypeRef(_, _, args) => args } +} -- cgit v1.2.3 From 1f99df2c66cb1933dd4db74aa872497a4e26975b Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Tue, 2 Oct 2012 11:24:24 -0700 Subject: Eliminated pattern matcher warning. Implicit extractor must be available to pattern match on abstract type. Requires prior commit not to crash under -Yrangepos. --- src/compiler/scala/tools/nsc/interpreter/TypeStrings.scala | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) (limited to 'src') diff --git a/src/compiler/scala/tools/nsc/interpreter/TypeStrings.scala b/src/compiler/scala/tools/nsc/interpreter/TypeStrings.scala index fe1c4c0ca8..0bf4999fd6 100644 --- a/src/compiler/scala/tools/nsc/interpreter/TypeStrings.scala +++ b/src/compiler/scala/tools/nsc/interpreter/TypeStrings.scala @@ -212,6 +212,7 @@ trait TypeStrings { } private def tparamString[T: ru.TypeTag] : String = { + import ru._ def typeArguments: List[ru.Type] = ru.typeOf[T] match { case ru.TypeRef(_, _, args) => args; case _ => Nil } // [Eugene to Paul] need to use not the `rootMirror`, but a mirror with the REPL's classloader // how do I get to it? acquiring context classloader seems unreliable because of multithreading @@ -256,4 +257,4 @@ trait TypeStrings { ) } -object TypeStrings extends TypeStrings { } \ No newline at end of file +object TypeStrings extends TypeStrings { } -- cgit v1.2.3 From d7354838948be58b8045e1218a9c757d9b90df76 Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Tue, 2 Oct 2012 10:37:13 -0700 Subject: Fix for spurious warning. Eliminates spurious "catch block may intercept non-local return" seen in recent builds of master. Unified some catch logic in TreeInfo, and removed some which never worked. --- .../scala/tools/nsc/transform/UnCurry.scala | 19 +++++++-------- .../scala/tools/nsc/typechecker/Typers.scala | 14 +++++------ src/reflect/scala/reflect/internal/TreeInfo.scala | 28 +++++++++++++++------- test/files/neg/catch-all.check | 9 ++++--- test/files/neg/nonlocal-warning.check | 3 ++- test/files/neg/nonlocal-warning.scala | 11 +++++++++ 6 files changed, 53 insertions(+), 31 deletions(-) (limited to 'src') diff --git a/src/compiler/scala/tools/nsc/transform/UnCurry.scala b/src/compiler/scala/tools/nsc/transform/UnCurry.scala index f68cbfc141..ea93ad1bd4 100644 --- a/src/compiler/scala/tools/nsc/transform/UnCurry.scala +++ b/src/compiler/scala/tools/nsc/transform/UnCurry.scala @@ -205,11 +205,8 @@ abstract class UnCurry extends InfoTransform val keyDef = ValDef(key, New(ObjectClass.tpe)) val tryCatch = Try(body, pat -> rhs) - body foreach { - case Try(t, catches, _) if catches exists treeInfo.catchesThrowable => - unit.warning(body.pos, "catch block may intercept non-local return from " + meth) - case _ => - } + for (Try(t, catches, _) <- body ; cdef <- catches ; if treeInfo catchesThrowable cdef) + unit.warning(body.pos, "catch block may intercept non-local return from " + meth) Block(List(keyDef), tryCatch) } @@ -691,16 +688,16 @@ abstract class UnCurry extends InfoTransform else tree } - + def isThrowable(pat: Tree): Boolean = pat match { - case Typed(Ident(nme.WILDCARD), tpt) => + case Typed(Ident(nme.WILDCARD), tpt) => tpt.tpe =:= ThrowableClass.tpe - case Bind(_, pat) => + case Bind(_, pat) => isThrowable(pat) case _ => false } - + def isDefaultCatch(cdef: CaseDef) = isThrowable(cdef.pat) && cdef.guard.isEmpty def postTransformTry(tree: Try) = { @@ -764,10 +761,10 @@ abstract class UnCurry extends InfoTransform case tree: Try => postTransformTry(tree) - + case Apply(Apply(fn, args), args1) => treeCopy.Apply(tree, fn, args ::: args1) - + case Ident(name) => assert(name != tpnme.WILDCARD_STAR, tree) applyUnary() diff --git a/src/compiler/scala/tools/nsc/typechecker/Typers.scala b/src/compiler/scala/tools/nsc/typechecker/Typers.scala index cf9a07a7e4..9c6f6a0f99 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Typers.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Typers.scala @@ -5133,14 +5133,12 @@ trait Typers extends Modes with Adaptations with Tags { var block1 = typed(tree.block, pt) var catches1 = typedCases(tree.catches, ThrowableClass.tpe, pt) - for (cdef <- catches1 if cdef.guard.isEmpty) { - def warn(name: Name) = context.warning(cdef.pat.pos, s"This catches all Throwables. If this is really intended, use `case ${name.decoded} : Throwable` to clear this warning.") - def unbound(t: Tree) = t.symbol == null || t.symbol == NoSymbol - cdef.pat match { - case Bind(name, i @ Ident(_)) if unbound(i) => warn(name) - case i @ Ident(name) if unbound(i) => warn(name) - case _ => - } + for (cdef <- catches1; if treeInfo catchesThrowable cdef) { + val name = (treeInfo assignedNameOfPattern cdef).decoded + context.warning(cdef.pat.pos, + s"""|This catches all Throwables, which often has undesirable consequences. + |If intentional, use `case $name : Throwable` to clear this warning.""".stripMargin + ) } val finalizer1 = diff --git a/src/reflect/scala/reflect/internal/TreeInfo.scala b/src/reflect/scala/reflect/internal/TreeInfo.scala index db062d138f..d4a22886dd 100644 --- a/src/reflect/scala/reflect/internal/TreeInfo.scala +++ b/src/reflect/scala/reflect/internal/TreeInfo.scala @@ -397,19 +397,31 @@ abstract class TreeInfo { case _ => false } - /** Does this CaseDef catch Throwable? */ - def catchesThrowable(cdef: CaseDef) = catchesAllOf(cdef, ThrowableClass.tpe) + private def hasNoSymbol(t: Tree) = t.symbol == null || t.symbol == NoSymbol - /** Does this CaseDef catch everything of a certain Type? */ - def catchesAllOf(cdef: CaseDef, threshold: Type) = { - def unbound(t: Tree) = t.symbol == null || t.symbol == NoSymbol + /** If this CaseDef assigns a name to its top-level pattern, + * in the form 'expr @ pattern' or 'expr: pattern', returns + * the name. Otherwise, nme.NO_NAME. + * + * Note: in the case of Constant patterns such as 'case x @ "" =>', + * the pattern matcher eliminates the binding and inlines the constant, + * so as far as this method is likely to be able to determine, + * the name is NO_NAME. + */ + def assignedNameOfPattern(cdef: CaseDef): Name = cdef.pat match { + case Bind(name, _) => name + case Ident(name) => name + case _ => nme.NO_NAME + } + + /** Does this CaseDef catch Throwable? */ + def catchesThrowable(cdef: CaseDef) = ( cdef.guard.isEmpty && (unbind(cdef.pat) match { case Ident(nme.WILDCARD) => true - case i@Ident(name) => unbound(i) - case Typed(_, tpt) => (tpt.tpe != null) && (threshold <:< tpt.tpe) + case i@Ident(name) => hasNoSymbol(i) case _ => false }) - } + ) /** Is this pattern node a catch-all or type-test pattern? */ def isCatchCase(cdef: CaseDef) = cdef match { diff --git a/test/files/neg/catch-all.check b/test/files/neg/catch-all.check index aaf51480c3..2d58dd99a8 100644 --- a/test/files/neg/catch-all.check +++ b/test/files/neg/catch-all.check @@ -1,10 +1,13 @@ -catch-all.scala:2: warning: This catches all Throwables. If this is really intended, use `case _ : Throwable` to clear this warning. +catch-all.scala:2: warning: This catches all Throwables, which often has undesirable consequences. +If intentional, use `case _ : Throwable` to clear this warning. try { "warn" } catch { case _ => } ^ -catch-all.scala:4: warning: This catches all Throwables. If this is really intended, use `case x : Throwable` to clear this warning. +catch-all.scala:4: warning: This catches all Throwables, which often has undesirable consequences. +If intentional, use `case x : Throwable` to clear this warning. try { "warn" } catch { case x => } ^ -catch-all.scala:6: warning: This catches all Throwables. If this is really intended, use `case x : Throwable` to clear this warning. +catch-all.scala:6: warning: This catches all Throwables, which often has undesirable consequences. +If intentional, use `case x : Throwable` to clear this warning. try { "warn" } catch { case _: RuntimeException => ; case x => } ^ error: No warnings can be incurred under -Xfatal-warnings. diff --git a/test/files/neg/nonlocal-warning.check b/test/files/neg/nonlocal-warning.check index 5202df655a..67b3b10095 100644 --- a/test/files/neg/nonlocal-warning.check +++ b/test/files/neg/nonlocal-warning.check @@ -1,4 +1,5 @@ -nonlocal-warning.scala:4: warning: This catches all Throwables. If this is really intended, use `case x : Throwable` to clear this warning. +nonlocal-warning.scala:4: warning: This catches all Throwables, which often has undesirable consequences. +If intentional, use `case x : Throwable` to clear this warning. catch { case x => 11 } ^ nonlocal-warning.scala:2: warning: catch block may intercept non-local return from method foo diff --git a/test/files/neg/nonlocal-warning.scala b/test/files/neg/nonlocal-warning.scala index cc98bd631a..f908a86302 100644 --- a/test/files/neg/nonlocal-warning.scala +++ b/test/files/neg/nonlocal-warning.scala @@ -4,4 +4,15 @@ class Foo { catch { case x => 11 } 22 } + + val pf: PartialFunction[Throwable, Unit] = { + case x if false => () + } + + def bar(l: List[Int]): Int = { + try l foreach { _ => return 5 } + catch pf + finally println() + 22 + } } -- cgit v1.2.3 From 256934160007079f473131469af2df4d023c2cfc Mon Sep 17 00:00:00 2001 From: James Roper Date: Fri, 5 Oct 2012 12:22:24 +1000 Subject: SI-6478 Fixing JavaTokenParser ident --- .../util/parsing/combinator/JavaTokenParsers.scala | 7 +++--- test/files/run/parserJavaIdent.check | 26 ++++++++++++++++++++++ test/files/run/parserJavaIdent.scala | 26 ++++++++++++++++++++++ 3 files changed, 56 insertions(+), 3 deletions(-) create mode 100644 test/files/run/parserJavaIdent.check create mode 100644 test/files/run/parserJavaIdent.scala (limited to 'src') diff --git a/src/library/scala/util/parsing/combinator/JavaTokenParsers.scala b/src/library/scala/util/parsing/combinator/JavaTokenParsers.scala index 520ac8cc2c..4e8504d346 100644 --- a/src/library/scala/util/parsing/combinator/JavaTokenParsers.scala +++ b/src/library/scala/util/parsing/combinator/JavaTokenParsers.scala @@ -21,11 +21,12 @@ import scala.annotation.migration * - `floatingPointNumber` */ trait JavaTokenParsers extends RegexParsers { - /** Anything starting with an ASCII alphabetic character or underscore, - * followed by zero or more repetitions of regex's `\w`. + /** Anything that is a valid Java identifier, according to + * The Java Language Spec. + * Generally, this means a letter, followed by zero or more letters or numbers. */ def ident: Parser[String] = - """[a-zA-Z_]\w*""".r + """\p{javaJavaIdentifierStart}\p{javaJavaIdentifierPart}*""".r /** An integer, without sign or with a negative sign. */ def wholeNumber: Parser[String] = """-?\d+""".r diff --git a/test/files/run/parserJavaIdent.check b/test/files/run/parserJavaIdent.check new file mode 100644 index 0000000000..597ddbee47 --- /dev/null +++ b/test/files/run/parserJavaIdent.check @@ -0,0 +1,26 @@ +[1.7] parsed: simple +[1.8] parsed: with123 +[1.6] parsed: with$ +[1.10] parsed: withøßöèæ +[1.6] parsed: with_ +[1.6] parsed: _with +[1.1] failure: java identifier expected + +3start +^ +[1.1] failure: java identifier expected + +-start +^ +[1.5] failure: java identifier expected + +with-s + ^ +[1.3] failure: java identifier expected + +we♥scala + ^ +[1.6] failure: java identifier expected + +with space + ^ diff --git a/test/files/run/parserJavaIdent.scala b/test/files/run/parserJavaIdent.scala new file mode 100644 index 0000000000..c068075e4e --- /dev/null +++ b/test/files/run/parserJavaIdent.scala @@ -0,0 +1,26 @@ +object Test extends scala.util.parsing.combinator.JavaTokenParsers { + + def test[A](s: String) { + val res = parseAll(ident, s) match { + case Failure(_, in) => Failure("java identifier expected", in) + case o => o + } + println(res) + } + + def main(args: Array[String]) { + // Happy tests + test("simple") + test("with123") + test("with$") + test("withøßöèæ") + test("with_") + test("_with") + // Sad tests + test("3start") + test("-start") + test("with-s") + test("we♥scala") + test("with space") + } +} -- cgit v1.2.3 From 781788c9e23b7c5e1406e2fbc9dc2aaa8764381a Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Fri, 5 Oct 2012 08:05:08 -0700 Subject: Incorporated pull request feedback. And fixed the test I broke at the last minute. Reworked tupling logic to make it harder to break. Expanded test coverage. --- .../scala/tools/nsc/typechecker/Infer.scala | 78 ++++++++++++---------- .../scala/tools/nsc/typechecker/Typers.scala | 5 +- test/files/neg/t3224.check | 28 ++++++-- test/files/neg/t3224.scala | 48 +++++++++---- 4 files changed, 104 insertions(+), 55 deletions(-) (limited to 'src') diff --git a/src/compiler/scala/tools/nsc/typechecker/Infer.scala b/src/compiler/scala/tools/nsc/typechecker/Infer.scala index 2fd685866f..16e864bd41 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Infer.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Infer.scala @@ -695,15 +695,15 @@ trait Infer extends Checkable { case _ => val paramsCount = tpe.params.length val simpleMatch = paramsCount == argsCount - def varargsTarget = isVarArgsList(tpe.params) + val varargsTarget = isVarArgsList(tpe.params) def varargsMatch = varargsTarget && (paramsCount - 1) <= argsCount - def tuplingMatch = tuplingAllowed && (argsCount != 1) && (paramsCount == 1 || paramsCount == 2 && varargsTarget) + def tuplingMatch = tuplingAllowed && eligibleForTupleConversion(paramsCount, argsCount, varargsTarget) // A varargs star call, e.g. (x, y:_*) can only match a varargs method // with the same number of parameters. See SI-5859 for an example of what // would fail were this not enforced before we arrived at isApplicable. if (varargsStar) - varargsTarget && (paramsCount == argsCount) + varargsTarget && simpleMatch else simpleMatch || varargsMatch || tuplingMatch } @@ -772,40 +772,46 @@ trait Infer extends Checkable { /** True if the given parameter list can accept a tupled argument list, * and the argument list can be tupled (based on its length.) */ - def eligibleForTupleConversion(formals: List[Type], args: List[_]): Boolean = { - // Can't have exactly one argument; can't have more than MaxTupleArity. - def argumentsOk = args match { - case _ :: Nil => false - case _ => (args lengthCompare MaxTupleArity) <= 0 + def eligibleForTupleConversion(paramsCount: Int, argsCount: Int, varargsTarget: Boolean): Boolean = { + def canSendTuple = argsCount match { + case 0 => !varargsTarget // avoid () to (()) conversion - SI-3224 + case 1 => false // can't tuple a single argument + case n => n <= MaxTupleArity // <= 22 arguments } - // Must have either one parameter, or two with the second being varargs. - def paramsOk = formals match { - case _ :: Nil => true - case _ :: last :: Nil => args.nonEmpty && isScalaRepeatedParamType(last) // avoid () to (()) conversion on varargs; see SI-3224 - case _ => false + def canReceiveTuple = paramsCount match { + case 1 => true + case 2 => varargsTarget + case _ => false } - - argumentsOk && paramsOk + canSendTuple && canReceiveTuple + } + def eligibleForTupleConversion(formals: List[Type], argsCount: Int): Boolean = formals match { + case p :: Nil => eligibleForTupleConversion(1, argsCount, varargsTarget = isScalaRepeatedParamType(p)) + case _ :: p :: Nil if isScalaRepeatedParamType(p) => eligibleForTupleConversion(2, argsCount, varargsTarget = true) + case _ => false } - /** If the given argument types are eligible for tuple conversion, the type - * of the tuple. Otherwise, NoType. + /** The type of an argument list after being coerced to a tuple. + * @pre: the argument list is eligible for tuple conversion. */ - def typeAfterTupleConversion(formals: List[Type], argtpes: List[Type]): Type = ( - if (eligibleForTupleConversion(formals, argtpes)) { - if (argtpes.isEmpty) UnitClass.tpe // empty argument list is 0-tuple - else tupleType(argtpes map { // already ruled out 1-element list - case NamedType(name, tp) => UnitClass.tpe // not a named arg - only assignments here - case RepeatedType(tp) => tp - case tp => tp - }) - } - else NoType + private def typeAfterTupleConversion(argtpes: List[Type]): Type = ( + if (argtpes.isEmpty) UnitClass.tpe // aka "Tuple0" + else tupleType(argtpes map { + case NamedType(name, tp) => UnitClass.tpe // not a named arg - only assignments here + case RepeatedType(tp) => tp // but probably shouldn't be tupling a call containing :_* + case tp => tp + }) ) - def tupleIfNecessary(formals: List[Type], argtpes: List[Type]): List[Type] = typeAfterTupleConversion(formals, argtpes) match { - case NoType => argtpes - case tpe => tpe :: Nil + /** If the argument list needs to be tupled for the parameter list, + * a list containing the type of the tuple. Otherwise, the original + * argument list. + */ + def tupleIfNecessary(formals: List[Type], argtpes: List[Type]): List[Type] = { + if (eligibleForTupleConversion(formals, argtpes.size)) + typeAfterTupleConversion(argtpes) :: Nil + else + argtpes } /** Is there an instantiation of free type variables `undetparams` @@ -829,11 +835,12 @@ trait Infer extends Checkable { case ExistentialType(tparams, qtpe) => isApplicable(undetparams, qtpe, argtpes0, pt) case mt @ MethodType(params, _) => - val formals = formalTypes(mt.paramTypes, argtpes0.length, removeByName = false) + val argslen = argtpes0.length + val formals = formalTypes(mt.paramTypes, argslen, removeByName = false) - def tryTupleApply = typeAfterTupleConversion(formals, argtpes0) match { - case NoType => false - case tupledType => isApplicable(undetparams, ftpe, tupledType :: Nil, pt) + def tryTupleApply = { + val tupled = tupleIfNecessary(mt.paramTypes, argtpes0) + (tupled ne argtpes0) && isApplicable(undetparams, ftpe, tupled, pt) } def typesCompatible(argtpes: List[Type]) = { val restpe = ftpe.resultType(argtpes) @@ -1558,10 +1565,11 @@ trait Infer extends Checkable { || isStrictlyMoreSpecific(tp1, tp2, sym1, sym2) ) } + // todo: missing test case for bests.isEmpty bests match { case best :: Nil => tree setSymbol best setType (pre memberType best) case best :: competing :: _ if alts0.nonEmpty => if (!pt.isErroneous) AmbiguousExprAlternativeError(tree, pre, best, competing, pt, isSecondTry) - case _ => if (bests.isEmpty || alts0.isEmpty) NoBestExprAlternativeError(tree, pt, isSecondTry) // todo: missing test case + case _ => if (bests.isEmpty || alts0.isEmpty) NoBestExprAlternativeError(tree, pt, isSecondTry) } } } diff --git a/src/compiler/scala/tools/nsc/typechecker/Typers.scala b/src/compiler/scala/tools/nsc/typechecker/Typers.scala index 3054993c20..87f165a54c 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Typers.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Typers.scala @@ -2973,14 +2973,15 @@ trait Typers extends Modes with Adaptations with Tags { case mt @ MethodType(params, _) => val paramTypes = mt.paramTypes // repeat vararg as often as needed, remove by-name - val formals = formalTypes(paramTypes, args.length) + val argslen = args.length + val formals = formalTypes(paramTypes, argslen) /** Try packing all arguments into a Tuple and apply `fun` * to that. This is the last thing which is tried (after * default arguments) */ def tryTupleApply: Option[Tree] = { - if (eligibleForTupleConversion(formals, args) && !phase.erasedTypes) { + if (eligibleForTupleConversion(paramTypes, argslen) && !phase.erasedTypes) { val tupleArgs = List(atPos(tree.pos.makeTransparent)(gen.mkTuple(args))) // expected one argument, but got 0 or >1 ==> try applying to tuple // the inner "doTypedApply" does "extractUndetparams" => restore when it fails diff --git a/test/files/neg/t3224.check b/test/files/neg/t3224.check index 29304c567a..69b02c8862 100644 --- a/test/files/neg/t3224.check +++ b/test/files/neg/t3224.check @@ -1,6 +1,26 @@ -t3224.scala:29: error: polymorphic expression cannot be instantiated to expected type; +t3224.scala:30: error: polymorphic expression cannot be instantiated to expected type; found : [T]Array[T] required: List[?] - println(Texts textL Array()); println(Texts textL Array(1)); println(Texts textL Array(1, 1)) - ^ -one error found + println(Texts textL Array()) + ^ +t3224.scala:34: error: type mismatch; + found : List[Nothing] + required: Array[?] + println(Texts textA List()) + ^ +t3224.scala:35: error: type mismatch; + found : List[Int] + required: Array[?] + println(Texts textA List(1)) + ^ +t3224.scala:36: error: type mismatch; + found : List[Int] + required: Array[?] + println(Texts textA List(1, 1)); + ^ +t3224.scala:48: error: polymorphic expression cannot be instantiated to expected type; + found : [T]Array[T] + required: List[?] + assert(size(Array()) == 0) + ^ +5 errors found diff --git a/test/files/neg/t3224.scala b/test/files/neg/t3224.scala index 774de3335a..b7af8a67b5 100755 --- a/test/files/neg/t3224.scala +++ b/test/files/neg/t3224.scala @@ -1,30 +1,50 @@ object Texts{ - def textL[T](list: List[T]) = { - list match{ - case List() => "Empty" - case List(_) => "One" + def textL[T](list: List[T]) = { + list match{ + case List() => "Empty" + case List(_) => "One" case List(_*) => "Many" } } - def textA[T](array: Array[T]) = { - array match{ - case Array() => "Empty" - case Array(_) => "One" + def textA[T](array: Array[T]) = { + array match{ + case Array() => "Empty" + case Array(_) => "One" case Array(_*) => "Many" } } } object Test extends App { + { + implicit def array2list[T](array: Array[T]) = { + println(array.toList.size) + array.toList + } + + println(Texts textL List()) + println(Texts textL List(1)) + println(Texts textL List(1, 1)); + + println(Texts textL Array()) + println(Texts textL Array(1)) + println(Texts textL Array(1, 1)) - implicit def array2list[T](array: Array[T]) = { - println(array.toList.size) - array.toList + println(Texts textA List()) + println(Texts textA List(1)) + println(Texts textA List(1, 1)); + + println(Texts textA Array()) + println(Texts textA Array(1)) + println(Texts textA Array(1, 1)) } - - println(Texts textL List()); println(Texts textL List(1)); println(Texts textL List(1, 1)); + { + implicit def array2list[T](array: Array[T]) = array.toList + def size[T](list: List[T]) = list.size - println(Texts textL Array()); println(Texts textL Array(1)); println(Texts textL Array(1, 1)) + assert(size(array2list(Array())) == 0) + assert(size(Array()) == 0) + } } -- cgit v1.2.3 From 883f1ac88dd7cec5882d42d6b48d7f267d1f6e00 Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Fri, 5 Oct 2012 22:19:52 -0700 Subject: Responded to comment about how many isCoercibles there are. I make the case that there is only one. --- src/compiler/scala/tools/nsc/typechecker/Infer.scala | 11 +++++++++++ 1 file changed, 11 insertions(+) (limited to 'src') diff --git a/src/compiler/scala/tools/nsc/typechecker/Infer.scala b/src/compiler/scala/tools/nsc/typechecker/Infer.scala index 16e864bd41..64b0da15aa 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Infer.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Infer.scala @@ -410,8 +410,19 @@ trait Infer extends Checkable { /** Like weakly compatible but don't apply any implicit conversions yet. * Used when comparing the result type of a method with its prototype. + * * [Martin] I think Infer is also created by Erasure, with the default * implementation of isCoercible + * [Paulp] (Assuming the above must refer to my comment on isCoercible) + * Nope, I examined every occurrence of Inferencer in trunk. It + * appears twice as a self-type, once at its definition, and once + * where it is instantiated in Typers. There are no others. + * + % ack -A0 -B0 --no-filename '\bInferencer\b' src + self: Inferencer => + self: Inferencer => + class Inferencer(context: Context) extends InferencerContextErrors with InferCheckable { + val infer = new Inferencer(context0) { */ def isConservativelyCompatible(tp: Type, pt: Type): Boolean = context.withImplicitsDisabled(isWeaklyCompatible(tp, pt)) -- cgit v1.2.3 From ff9f60f420c090b6716c927ab0359b082f2299de Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Sat, 6 Oct 2012 10:20:45 -0700 Subject: Fix for SI-6482, lost bounds in extension methods. That was a good one. How to create a new method with type parameters from multiple sources, herein. --- .../tools/nsc/transform/ExtensionMethods.scala | 57 +++++++++++++++++----- test/files/pos/t6482.scala | 11 +++++ 2 files changed, 55 insertions(+), 13 deletions(-) create mode 100644 test/files/pos/t6482.scala (limited to 'src') diff --git a/src/compiler/scala/tools/nsc/transform/ExtensionMethods.scala b/src/compiler/scala/tools/nsc/transform/ExtensionMethods.scala index 90d5a7ac75..8ffe9c2948 100644 --- a/src/compiler/scala/tools/nsc/transform/ExtensionMethods.scala +++ b/src/compiler/scala/tools/nsc/transform/ExtensionMethods.scala @@ -76,13 +76,13 @@ abstract class ExtensionMethods extends Transform with TypingTransformers { } /** This method removes the `$this` argument from the parameter list a method. - * + * * A method may be a `PolyType`, in which case we tear out the `$this` and the class * type params from its nested `MethodType`. * It may be a `MethodType`, either with a curried parameter list in which the first argument * is a `$this` - we just return the rest of the list. * This means that the corresponding symbol was generated during `extmethods`. - * + * * It may also be a `MethodType` in which the `$this` does not appear in a curried parameter list. * The curried lists disappear during `uncurry`, and the methods may be duplicated afterwards, * for instance, during `specialize`. @@ -111,20 +111,51 @@ abstract class ExtensionMethods extends Transform with TypingTransformers { if (unboxed.isDerivedValueClass) checkNonCyclic(pos, seen + clazz, unboxed) } + /** We will need to clone the info of the original method (which obtains clones + * of the method type parameters), clone the type parameters of the value class, + * and create a new polymethod with the union of all those type parameters, with + * their infos adjusted to be consistent with their new home. Example: + * + * class Foo[+A <: AnyRef](val xs: List[A]) extends AnyVal { + * def baz[B >: A](x: B): List[B] = x :: xs + * // baz has to be transformed into this extension method, where + * // A is cloned from class Foo and B is cloned from method baz: + * // def extension$baz[B >: A <: Any, A >: Nothing <: AnyRef]($this: Foo[A])(x: B): List[B] + * } + * + * TODO: factor out the logic for consolidating type parameters from a class + * and a method for re-use elsewhere, because nobody will get this right without + * some higher level facilities. + */ def extensionMethInfo(extensionMeth: Symbol, origInfo: Type, clazz: Symbol): Type = { - // No variance for method type parameters - var newTypeParams = cloneSymbolsAtOwner(clazz.typeParams, extensionMeth) map (_ resetFlag COVARIANT | CONTRAVARIANT) - val thisParamType = appliedType(clazz.typeConstructor, newTypeParams map (_.tpeHK)) + val GenPolyType(tparamsFromMethod, methodResult) = origInfo cloneInfo extensionMeth + // Start with the class type parameters - clones will be method type parameters + // so must drop their variance. + var tparamsFromClass = cloneSymbolsAtOwner(clazz.typeParams, extensionMeth) map (_ resetFlag COVARIANT | CONTRAVARIANT) + def fix(tp: Type) = tp.substSym(clazz.typeParams, tparamsFromClass) + + val thisParamType = appliedType(clazz.typeConstructor, tparamsFromClass map (_.tpeHK)) val thisParam = extensionMeth.newValueParameter(nme.SELF, extensionMeth.pos) setInfo thisParamType - def transform(clonedType: Type): Type = clonedType match { - case MethodType(params, restpe) => - // I assume it was a bug that this was dropping params... [Martin]: No, it wasn't; it's curried. - MethodType(List(thisParam), clonedType) - case NullaryMethodType(restpe) => - MethodType(List(thisParam), restpe) + val resultType = methodResult match { + case MethodType(_, _) => MethodType(List(thisParam), methodResult) + case NullaryMethodType(restpe) => MethodType(List(thisParam), restpe) } - val GenPolyType(tparams, restpe) = origInfo cloneInfo extensionMeth - GenPolyType(tparams ::: newTypeParams, transform(restpe) substSym (clazz.typeParams, newTypeParams)) + // We can't substitute symbols on the entire polytype because we + // need to modify the bounds of the cloned type parameters, but we + // don't want to substitute for the cloned type parameters themselves. + val tparams = tparamsFromMethod ::: tparamsFromClass + GenPolyType(tparams map (_ modifyInfo fix), fix(resultType)) + + // For reference, calling fix on the GenPolyType plays out like this: + // error: scala.reflect.internal.Types$TypeError: type arguments [B#7344,A#6966] + // do not conform to method extension$baz#16148's type parameter bounds + // + // And the difference is visible here. See how B is bounded from below by A#16149 + // in both cases, but in the failing case, the other type parameter has turned into + // a different A. + // + // bad: [B#16154 >: A#16149, A#16155 <: AnyRef#2189]($this#16156: Foo#6965[A#16155])(x#16157: B#16154)List#2457[B#16154] + // good: [B#16151 >: A#16149, A#16149 <: AnyRef#2189]($this#16150: Foo#6965[A#16149])(x#16153: B#16151)List#2457[B#16151] } private def allParams(tpe: Type): List[Symbol] = tpe match { diff --git a/test/files/pos/t6482.scala b/test/files/pos/t6482.scala new file mode 100644 index 0000000000..24ea38e519 --- /dev/null +++ b/test/files/pos/t6482.scala @@ -0,0 +1,11 @@ +final class TraversableOnceOps[+A](val collection: TraversableOnce[A]) extends AnyVal { + def reduceLeftOption[B >: A](op: (B, A) => B): Option[B] = + if (collection.isEmpty) None else Some(collection.reduceLeft[B](op)) +} +// error: type arguments [B] do not conform to method reduceLeft's type parameter bounds [B >: A] +// if (collection.isEmpty) None else Some(collection.reduceLeft[B](op)) +// ^ + +class Foo[+A <: AnyRef](val xs: List[A]) extends AnyVal { + def baz[B >: A](x: B): List[B] = x :: xs +} -- cgit v1.2.3 From 153ccb4757718cceb219988f30381f73362e6075 Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Sat, 6 Oct 2012 14:22:19 -0700 Subject: Incorporated pull request feedback. --- .../scala/tools/nsc/transform/ExtensionMethods.scala | 13 ++++++------- src/reflect/scala/reflect/internal/Definitions.scala | 5 +++++ 2 files changed, 11 insertions(+), 7 deletions(-) (limited to 'src') diff --git a/src/compiler/scala/tools/nsc/transform/ExtensionMethods.scala b/src/compiler/scala/tools/nsc/transform/ExtensionMethods.scala index 8ffe9c2948..a5d396cf45 100644 --- a/src/compiler/scala/tools/nsc/transform/ExtensionMethods.scala +++ b/src/compiler/scala/tools/nsc/transform/ExtensionMethods.scala @@ -131,15 +131,13 @@ abstract class ExtensionMethods extends Transform with TypingTransformers { val GenPolyType(tparamsFromMethod, methodResult) = origInfo cloneInfo extensionMeth // Start with the class type parameters - clones will be method type parameters // so must drop their variance. - var tparamsFromClass = cloneSymbolsAtOwner(clazz.typeParams, extensionMeth) map (_ resetFlag COVARIANT | CONTRAVARIANT) + val tparamsFromClass = cloneSymbolsAtOwner(clazz.typeParams, extensionMeth) map (_ resetFlag COVARIANT | CONTRAVARIANT) def fix(tp: Type) = tp.substSym(clazz.typeParams, tparamsFromClass) - val thisParamType = appliedType(clazz.typeConstructor, tparamsFromClass map (_.tpeHK)) + val thisParamType = appliedType(clazz, tparamsFromClass map (_.tpeHK): _*) val thisParam = extensionMeth.newValueParameter(nme.SELF, extensionMeth.pos) setInfo thisParamType - val resultType = methodResult match { - case MethodType(_, _) => MethodType(List(thisParam), methodResult) - case NullaryMethodType(restpe) => MethodType(List(thisParam), restpe) - } + val resultType = MethodType(List(thisParam), dropNullaryMethod(methodResult)) + // We can't substitute symbols on the entire polytype because we // need to modify the bounds of the cloned type parameters, but we // don't want to substitute for the cloned type parameters themselves. @@ -152,7 +150,8 @@ abstract class ExtensionMethods extends Transform with TypingTransformers { // // And the difference is visible here. See how B is bounded from below by A#16149 // in both cases, but in the failing case, the other type parameter has turned into - // a different A. + // a different A. (What is that A? It is a clone of the original A created in + // SubstMap during the call to substSym, but I am not clear on all the particulars.) // // bad: [B#16154 >: A#16149, A#16155 <: AnyRef#2189]($this#16156: Foo#6965[A#16155])(x#16157: B#16154)List#2457[B#16154] // good: [B#16151 >: A#16149, A#16149 <: AnyRef#2189]($this#16150: Foo#6965[A#16149])(x#16153: B#16151)List#2457[B#16151] diff --git a/src/reflect/scala/reflect/internal/Definitions.scala b/src/reflect/scala/reflect/internal/Definitions.scala index 8865c762c4..3efa2c2bb0 100644 --- a/src/reflect/scala/reflect/internal/Definitions.scala +++ b/src/reflect/scala/reflect/internal/Definitions.scala @@ -669,6 +669,11 @@ trait Definitions extends api.StandardDefinitions { case _ => Nil } + def dropNullaryMethod(tp: Type) = tp match { + case NullaryMethodType(restpe) => restpe + case _ => tp + } + def unapplyUnwrap(tpe:Type) = tpe.finalResultType.normalize match { case RefinedType(p :: _, _) => p.normalize case tp => tp -- cgit v1.2.3 From 61f12faacaaccf366f9211ba6493fb042a91f1d2 Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Sat, 6 Oct 2012 15:42:50 -0700 Subject: A little more housecleaning in ExtensionMethods. The only real contribution is readability. --- .../tools/nsc/transform/ExtensionMethods.scala | 126 ++++++++++++--------- .../scala/reflect/internal/Definitions.scala | 6 + 2 files changed, 80 insertions(+), 52 deletions(-) (limited to 'src') diff --git a/src/compiler/scala/tools/nsc/transform/ExtensionMethods.scala b/src/compiler/scala/tools/nsc/transform/ExtensionMethods.scala index a5d396cf45..8a4ce6a53e 100644 --- a/src/compiler/scala/tools/nsc/transform/ExtensionMethods.scala +++ b/src/compiler/scala/tools/nsc/transform/ExtensionMethods.scala @@ -75,32 +75,41 @@ abstract class ExtensionMethods extends Transform with TypingTransformers { matching.head } + /** Recognize a MethodType which represents an extension method. + * + * It may have a curried parameter list with the `$this` alone in the first + * parameter list, in which case that parameter list is dropped. Or, since + * the curried lists disappear during uncurry, it may have a single parameter + * list with `$this` as the first parameter, in which case that parameter is + * removed from the list. + */ + object ExtensionMethodType { + def unapply(tp: Type) = tp match { + case MethodType(thiz :: rest, restpe) if thiz.name == nme.SELF => + Some( if (rest.isEmpty) restpe else MethodType(rest, restpe) ) + case _ => + None + } + } + /** This method removes the `$this` argument from the parameter list a method. * * A method may be a `PolyType`, in which case we tear out the `$this` and the class - * type params from its nested `MethodType`. - * It may be a `MethodType`, either with a curried parameter list in which the first argument - * is a `$this` - we just return the rest of the list. - * This means that the corresponding symbol was generated during `extmethods`. - * - * It may also be a `MethodType` in which the `$this` does not appear in a curried parameter list. - * The curried lists disappear during `uncurry`, and the methods may be duplicated afterwards, - * for instance, during `specialize`. - * In this case, the first argument is `$this` and we just get rid of it. + * type params from its nested `MethodType`. Or it may be a MethodType, as + * described at the ExtensionMethodType extractor. */ private def normalize(stpe: Type, clazz: Symbol): Type = stpe match { case PolyType(tparams, restpe) => - GenPolyType(tparams dropRight clazz.typeParams.length, normalize(restpe.substSym(tparams takeRight clazz.typeParams.length, clazz.typeParams), clazz)) - case MethodType(List(thiz), restpe) if thiz.name == nme.SELF => - restpe - case MethodType(tparams, restpe) => - MethodType(tparams.drop(1), restpe) + // method type parameters, class type parameters + val (mtparams, ctparams) = tparams splitAt (tparams.length - clazz.typeParams.length) + GenPolyType(mtparams, normalize(restpe.substSym(ctparams, clazz.typeParams), clazz)) + case ExtensionMethodType(etpe) => + etpe case _ => stpe } class Extender(unit: CompilationUnit) extends TypingTransformer(unit) { - private val extensionDefs = mutable.Map[Symbol, mutable.ListBuffer[Tree]]() def checkNonCyclic(pos: Position, seen: Set[Symbol], clazz: Symbol): Unit = @@ -156,12 +165,6 @@ abstract class ExtensionMethods extends Transform with TypingTransformers { // bad: [B#16154 >: A#16149, A#16155 <: AnyRef#2189]($this#16156: Foo#6965[A#16155])(x#16157: B#16154)List#2457[B#16154] // good: [B#16151 >: A#16149, A#16149 <: AnyRef#2189]($this#16150: Foo#6965[A#16149])(x#16153: B#16151)List#2457[B#16151] } - - private def allParams(tpe: Type): List[Symbol] = tpe match { - case MethodType(params, res) => params ::: allParams(res) - case _ => List() - } - override def transform(tree: Tree): Tree = { tree match { case Template(_, _, _) => @@ -176,39 +179,58 @@ abstract class ExtensionMethods extends Transform with TypingTransformers { super.transform(tree) } else tree case DefDef(_, _, tparams, vparamss, _, rhs) if tree.symbol.isMethodWithExtension => - val companion = currentOwner.companionModule - val origMeth = tree.symbol - val extensionName = extensionNames(origMeth).head - val extensionMeth = companion.moduleClass.newMethod(extensionName, origMeth.pos, origMeth.flags & ~OVERRIDE & ~PROTECTED | FINAL) - .setAnnotations(origMeth.annotations) - companion.info.decls.enter(extensionMeth) - val newInfo = extensionMethInfo(extensionMeth, origMeth.info, currentOwner) + val origMeth = tree.symbol + val origThis = currentOwner + val origTpeParams = tparams.map(_.symbol) ::: origThis.typeParams // method type params ++ class type params + val origParams = vparamss.flatten map (_.symbol) + val companion = origThis.companionModule + + def makeExtensionMethodSymbol = { + val extensionName = extensionNames(origMeth).head + val extensionMeth = ( + companion.moduleClass.newMethod(extensionName, origMeth.pos, origMeth.flags & ~OVERRIDE & ~PROTECTED | FINAL) + setAnnotations origMeth.annotations + ) + companion.info.decls.enter(extensionMeth) + } + + val extensionMeth = makeExtensionMethodSymbol + val newInfo = extensionMethInfo(extensionMeth, origMeth.info, origThis) extensionMeth setInfo newInfo - log("Value class %s spawns extension method.\n Old: %s\n New: %s".format( - currentOwner, - origMeth.defString, - extensionMeth.defString)) // extensionMeth.defStringSeenAs(origInfo - - def thisParamRef = gen.mkAttributedIdent(extensionMeth.info.params.head setPos extensionMeth.pos) - val GenPolyType(extensionTpeParams, extensionMono) = extensionMeth.info - val origTpeParams = (tparams map (_.symbol)) ::: currentOwner.typeParams - val extensionBody = rhs + + log(s"Value class $origThis spawns extension method.\n Old: ${origMeth.defString}\n New: ${extensionMeth.defString}") + + val GenPolyType(extensionTpeParams, MethodType(thiz :: Nil, extensionMono)) = newInfo + val extensionParams = allParameters(extensionMono) + val extensionThis = gen.mkAttributedIdent(thiz setPos extensionMeth.pos) + + val extensionBody = ( + rhs .substituteSymbols(origTpeParams, extensionTpeParams) - .substituteSymbols(vparamss.flatten map (_.symbol), allParams(extensionMono).tail) - .substituteThis(currentOwner, thisParamRef) - .changeOwner((origMeth, extensionMeth)) - extensionDefs(companion) += atPos(tree.pos) { DefDef(extensionMeth, extensionBody) } - val extensionCallPrefix = Apply( - gen.mkTypeApply(gen.mkAttributedRef(companion), extensionMeth, origTpeParams map (_.tpeHK)), - List(This(currentOwner))) - val extensionCall = atOwner(origMeth) { - localTyper.typedPos(rhs.pos) { - (extensionCallPrefix /: vparamss) { - case (fn, params) => Apply(fn, params map (param => Ident(param.symbol))) - } - } - } - deriveDefDef(tree)(_ => extensionCall) + .substituteSymbols(origParams, extensionParams) + .substituteThis(origThis, extensionThis) + .changeOwner(origMeth -> extensionMeth) + ) + + // Record the extension method ( FIXME: because... ? ) + extensionDefs(companion) += atPos(tree.pos)(DefDef(extensionMeth, extensionBody)) + + // These three lines are assembling Foo.bar$extension[T1, T2, ...]($this) + // which leaves the actual argument application for extensionCall. + val sel = Select(gen.mkAttributedRef(companion), extensionMeth) + val targs = origTpeParams map (_.tpeHK) + val callPrefix = gen.mkMethodCall(sel, targs, This(origThis) :: Nil) + + // Apply all the argument lists. + deriveDefDef(tree)(_ => + atOwner(origMeth)( + localTyper.typedPos(rhs.pos)( + (callPrefix /: vparamss) { + case (fn, params) => Apply(fn, params map (param => Ident(param.symbol))) + } + ) + ) + ) case _ => super.transform(tree) } diff --git a/src/reflect/scala/reflect/internal/Definitions.scala b/src/reflect/scala/reflect/internal/Definitions.scala index 3efa2c2bb0..1eed833ecc 100644 --- a/src/reflect/scala/reflect/internal/Definitions.scala +++ b/src/reflect/scala/reflect/internal/Definitions.scala @@ -867,6 +867,12 @@ trait Definitions extends api.StandardDefinitions { removeRedundantObjects(parents) } + /** Flatten curried parameter lists of a method type. */ + def allParameters(tpe: Type): List[Symbol] = tpe match { + case MethodType(params, res) => params ::: allParameters(res) + case _ => Nil + } + def typeStringNoPackage(tp: Type) = "" + tp stripPrefix tp.typeSymbol.enclosingPackage.fullName + "." -- cgit v1.2.3 From d562ef92e491828e82712ad50dbe82c034ed5f2c Mon Sep 17 00:00:00 2001 From: Ruslan Shevchenko Date: Sun, 7 Oct 2012 10:35:39 +0300 Subject: fixed trivial error with printing of exception stack trace in verbose mode. --- src/compiler/scala/tools/nsc/Global.scala | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) (limited to 'src') diff --git a/src/compiler/scala/tools/nsc/Global.scala b/src/compiler/scala/tools/nsc/Global.scala index cb5e2ad555..68ba217b48 100644 --- a/src/compiler/scala/tools/nsc/Global.scala +++ b/src/compiler/scala/tools/nsc/Global.scala @@ -1520,9 +1520,11 @@ class Global(var currentSettings: Settings, var reporter: Reporter) try compileUnitsInternal(units, fromPhase) catch { case ex: Throwable => val shown = if (settings.verbose.value) { - val pw = new java.io.PrintWriter(new java.io.StringWriter) + val sw = new java.io.StringWriter() + val pw = new java.io.PrintWriter(sw) ex.printStackTrace(pw) - pw.toString + pw.flush() + sw.toString } else ex.getClass.getName // ex.printStackTrace(Console.out) // DEBUG for fsc, note that error stacktraces do not print in fsc globalError(supplementErrorMessage("uncaught exception during compilation: " + shown)) -- cgit v1.2.3 From 1abc9013b825942ef0373a4e383efb627b379617 Mon Sep 17 00:00:00 2001 From: Ruslan Shevchenko Date: Sun, 7 Oct 2012 13:11:24 +0300 Subject: using existing scala.tools.nsc.util.stackTraceString --- src/compiler/scala/tools/nsc/Global.scala | 13 +++++-------- 1 file changed, 5 insertions(+), 8 deletions(-) (limited to 'src') diff --git a/src/compiler/scala/tools/nsc/Global.scala b/src/compiler/scala/tools/nsc/Global.scala index 68ba217b48..7a960c4d57 100644 --- a/src/compiler/scala/tools/nsc/Global.scala +++ b/src/compiler/scala/tools/nsc/Global.scala @@ -12,7 +12,7 @@ import scala.tools.util.PathResolver import scala.collection.{ mutable, immutable } import io.{ SourceReader, AbstractFile, Path } import reporters.{ Reporter, ConsoleReporter } -import util.{ Exceptional, ClassPath, MergedClassPath, StatisticsInfo, ScalaClassLoader, returning } +import util.{ Exceptional, ClassPath, MergedClassPath, StatisticsInfo, ScalaClassLoader, returning, stackTraceString } import scala.reflect.internal.util.{ NoPosition, OffsetPosition, SourceFile, NoSourceFile, BatchSourceFile, ScriptSourceFile } import scala.reflect.internal.pickling.{ PickleBuffer, PickleFormat } import symtab.{ Flags, SymbolTable, SymbolLoaders, SymbolTrackers } @@ -1519,13 +1519,10 @@ class Global(var currentSettings: Settings, var reporter: Reporter) def compileUnits(units: List[CompilationUnit], fromPhase: Phase) { try compileUnitsInternal(units, fromPhase) catch { case ex: Throwable => - val shown = if (settings.verbose.value) { - val sw = new java.io.StringWriter() - val pw = new java.io.PrintWriter(sw) - ex.printStackTrace(pw) - pw.flush() - sw.toString - } else ex.getClass.getName + val shown = if (settings.verbose.value) + stackTraceString(ex) + else + ex.getClass.getName // ex.printStackTrace(Console.out) // DEBUG for fsc, note that error stacktraces do not print in fsc globalError(supplementErrorMessage("uncaught exception during compilation: " + shown)) throw ex -- cgit v1.2.3 From b755617973dac7d63c9427b5887074a64cc78b93 Mon Sep 17 00:00:00 2001 From: Eugene Vigdorchik Date: Sun, 7 Oct 2012 18:14:09 +0400 Subject: Make reads in CommentFactory slice from the underlying buffer rather than create from a StringBuilder. Make the code easier to reason about. --- .../nsc/doc/model/comment/CommentFactory.scala | 134 ++++++--------------- 1 file changed, 40 insertions(+), 94 deletions(-) (limited to 'src') diff --git a/src/compiler/scala/tools/nsc/doc/model/comment/CommentFactory.scala b/src/compiler/scala/tools/nsc/doc/model/comment/CommentFactory.scala index 1baa7f9831..b294b347f6 100644 --- a/src/compiler/scala/tools/nsc/doc/model/comment/CommentFactory.scala +++ b/src/compiler/scala/tools/nsc/doc/model/comment/CommentFactory.scala @@ -477,7 +477,6 @@ trait CommentFactory { thisFactory: ModelFactory with CommentFactory with Member var summaryParsed = false def document(): Body = { - nextChar() val blocks = new mutable.ListBuffer[Block] while (char != endOfText) blocks += block() @@ -559,21 +558,21 @@ trait CommentFactory { thisFactory: ModelFactory with CommentFactory with Member def code(): Block = { jumpWhitespace() jump("{{{") - readUntil("}}}") + val str = readUntil("}}}") if (char == endOfText) reportError(pos, "unclosed code block") else jump("}}}") blockEnded("code block") - Code(normalizeIndentation(getRead)) + Code(normalizeIndentation(str)) } /** {{{ title ::= ('=' inline '=' | "==" inline "==" | ...) '\n' }}} */ def title(): Block = { jumpWhitespace() - val inLevel = repeatJump("=") + val inLevel = repeatJump('=') val text = inline(check("=" * inLevel)) - val outLevel = repeatJump("=", inLevel) + val outLevel = repeatJump('=', inLevel) if (inLevel != outLevel) reportError(pos, "unbalanced or unclosed heading") blockEnded("heading") @@ -583,7 +582,7 @@ trait CommentFactory { thisFactory: ModelFactory with CommentFactory with Member /** {{{ hrule ::= "----" { '-' } '\n' }}} */ def hrule(): Block = { jumpWhitespace() - repeatJump("-") + repeatJump('-') blockEnded("horizontal rule") HorizontalRule() } @@ -621,8 +620,7 @@ trait CommentFactory { thisFactory: ModelFactory with CommentFactory with Member } do { - readUntil { char == safeTagMarker || char == endOfText } - val str = getRead() + val str = readUntil { char == safeTagMarker || char == endOfText } nextChar() list += str @@ -660,8 +658,8 @@ trait CommentFactory { thisFactory: ModelFactory with CommentFactory with Member else if (check(",,")) subscript() else if (check("[[")) link() else { - readUntil { char == safeTagMarker || check("''") || char == '`' || check("__") || char == '^' || check(",,") || check("[[") || isInlineEnd || checkParaEnded || char == endOfLine } - Text(getRead()) + val str = readUntil { char == safeTagMarker || check("''") || char == '`' || check("__") || char == '^' || check(",,") || check("[[") || isInlineEnd || checkParaEnded || char == endOfLine } + Text(str) } } @@ -698,9 +696,8 @@ trait CommentFactory { thisFactory: ModelFactory with CommentFactory with Member def htmlTag(): HtmlTag = { jump(safeTagMarker) - readUntil(safeTagMarker) + val read = readUntil(safeTagMarker) if (char != endOfText) jump(safeTagMarker) - var read = getRead HtmlTag(read) } @@ -762,14 +759,11 @@ trait CommentFactory { thisFactory: ModelFactory with CommentFactory with Member def link(): Inline = { val SchemeUri = """([a-z]+:.*)""".r jump("[[") - var parens = 1 - readUntil { parens += 1; !check("[") } - getRead // clear the buffer + var parens = 2 + repeatJump('[') val start = "[" * parens val stop = "]" * parens //println("link with " + parens + " matching parens") - readUntil { check(stop) || check(" ") } - val target = getRead() + val target = readUntil { check(stop) || check(" ") } val title = if (!check(stop)) Some({ jump(" ") @@ -860,7 +854,6 @@ trait CommentFactory { thisFactory: ModelFactory with CommentFactory with Member (char == endOfText) || ((char == endOfLine) && { val poff = offset - val pc = char nextChar() // read EOL val ok = { checkSkipInitWhitespace(endOfLine) || @@ -870,7 +863,6 @@ trait CommentFactory { thisFactory: ModelFactory with CommentFactory with Member checkSkipInitWhitespace('\u003D') } offset = poff - char = pc ok }) } @@ -882,40 +874,31 @@ trait CommentFactory { thisFactory: ModelFactory with CommentFactory with Member protected sealed class CharReader(buffer: String) { reader => - var char: Char = _ var offset: Int = 0 + def char: Char = + if (offset >= buffer.length) endOfText else buffer charAt offset final def nextChar() { - if (offset >= buffer.length) - char = endOfText - else { - char = buffer charAt offset - offset += 1 - } + offset += 1 } final def check(chars: String): Boolean = { val poff = offset - val pc = char val ok = jump(chars) offset = poff - char = pc ok } def checkSkipInitWhitespace(c: Char): Boolean = { val poff = offset - val pc = char jumpWhitespace() val ok = jump(c) offset = poff - char = pc ok } def checkSkipInitWhitespace(chars: String): Boolean = { val poff = offset - val pc = char jumpWhitespace() val (ok0, chars0) = if (chars.charAt(0) == ' ') @@ -924,20 +907,17 @@ trait CommentFactory { thisFactory: ModelFactory with CommentFactory with Member (true, chars) val ok = ok0 && jump(chars0) offset = poff - char = pc ok } def countWhitespace: Int = { var count = 0 val poff = offset - val pc = char while (isWhitespace(char) && char != endOfText) { nextChar() count += 1 } offset = poff - char = pc count } @@ -964,38 +944,10 @@ trait CommentFactory { thisFactory: ModelFactory with CommentFactory with Member index == chars.length } - final def checkedJump(chars: String): Boolean = { - val poff = offset - val pc = char - val ok = jump(chars) - if (!ok) { - offset = poff - char = pc - } - ok - } - - final def repeatJump(chars: String, max: Int): Int = { + final def repeatJump(c: Char, max: Int = Int.MaxValue): Int = { var count = 0 - var more = true - while (more && count < max) { - if (!checkedJump(chars)) - more = false - else - count += 1 - } - count - } - - final def repeatJump(chars: String): Int = { - var count = 0 - var more = true - while (more) { - if (!checkedJump(chars)) - more = false - else - count += 1 - } + while (jump(c) && count < max) + count += 1 count } @@ -1035,47 +987,41 @@ trait CommentFactory { thisFactory: ModelFactory with CommentFactory with Member /* READERS */ - private val readBuilder = new mutable.StringBuilder - - final def getRead(): String = { - val bld = readBuilder.toString - readBuilder.clear() - if (bld.length < 6) bld.intern else bld - } - - final def readUntil(ch: Char): Int = { - var count = 0 - while (char != ch && char != endOfText) { - readBuilder += char - nextChar() + final def readUntil(c: Char): String = { + withRead { + while (char != c && char != endOfText) { + nextChar() + } } - count } - final def readUntil(chars: String): Int = { + final def readUntil(chars: String): String = { assert(chars.length > 0) - var count = 0 - val c = chars.charAt(0) - while (!check(chars) && char != endOfText) { - readBuilder += char - nextChar() - while (char != c && char != endOfText) { - readBuilder += char + withRead { + val c = chars.charAt(0) + while (!check(chars) && char != endOfText) { nextChar() + while (char != c && char != endOfText) + nextChar() } } - count } - final def readUntil(pred: => Boolean): Int = { - var count = 0 - while (!pred && char != endOfText) { - readBuilder += char - nextChar() + final def readUntil(pred: => Boolean): String = { + withRead { + while (char != endOfText && !pred) { + nextChar() + } } - count } + private def withRead(read: => Unit): String = { + val start = offset + read + buffer.substring(start, offset) + } + + /* CHARS CLASSES */ def isWhitespace(c: Char) = c == ' ' || c == '\t' -- cgit v1.2.3 From 17fd905e868db7de4c7f9046bb9c4937e281c1fe Mon Sep 17 00:00:00 2001 From: Simon Ochsenreither Date: Fri, 21 Sep 2012 21:14:19 +0200 Subject: SI-6388 Remove deprecated items in the compiler jar Deprecations in the following files were not removed to prevent SBT from breaking: - src/compiler/scala/tools/nsc/Interpreter.scala - src/compiler/scala/tools/nsc/InterpreterLoop.scala - src/compiler/scala/tools/nsc/interpreter/ILoop.scala - src/compiler/scala/tools/nsc/interpreter/InteractiveReader.scala --- src/compiler/scala/tools/nsc/GenericRunnerSettings.scala | 3 --- src/compiler/scala/tools/nsc/Global.scala | 7 ++----- src/compiler/scala/tools/nsc/doc/model/Entity.scala | 3 --- src/compiler/scala/tools/nsc/interpreter/IMain.scala | 2 -- src/compiler/scala/tools/nsc/util/FreshNameCreator.scala | 5 ----- test/files/run/t1500.scala | 2 +- test/files/run/t1501.scala | 2 +- 7 files changed, 4 insertions(+), 20 deletions(-) (limited to 'src') diff --git a/src/compiler/scala/tools/nsc/GenericRunnerSettings.scala b/src/compiler/scala/tools/nsc/GenericRunnerSettings.scala index b2f27d1925..00e374db62 100644 --- a/src/compiler/scala/tools/nsc/GenericRunnerSettings.scala +++ b/src/compiler/scala/tools/nsc/GenericRunnerSettings.scala @@ -39,7 +39,4 @@ class GenericRunnerSettings(error: String => Unit) extends Settings(error) { val nc = BooleanSetting( "-nc", "do not use the fsc compilation daemon") withAbbreviation "-nocompdaemon" - - @deprecated("Use `nc` instead", "2.9.0") def nocompdaemon = nc - @deprecated("Use `save` instead", "2.9.0") def savecompiled = save } diff --git a/src/compiler/scala/tools/nsc/Global.scala b/src/compiler/scala/tools/nsc/Global.scala index cb5e2ad555..f46ed54441 100644 --- a/src/compiler/scala/tools/nsc/Global.scala +++ b/src/compiler/scala/tools/nsc/Global.scala @@ -1743,12 +1743,9 @@ class Global(var currentSettings: Settings, var reporter: Reporter) // deprecated method: but I see no simple way out, so I leave it for now. def forJVM = settings.target.value startsWith "jvm" override def forMSIL = settings.target.value startsWith "msil" - def forInteractive = onlyPresentation - def forScaladoc = onlyPresentation + def forInteractive = false + def forScaladoc = false def createJavadoc = false - - @deprecated("Use forInteractive or forScaladoc, depending on what you're after", "2.9.0") - def onlyPresentation = false } object Global { diff --git a/src/compiler/scala/tools/nsc/doc/model/Entity.scala b/src/compiler/scala/tools/nsc/doc/model/Entity.scala index a63849e3f6..fb3b7a7dc6 100644 --- a/src/compiler/scala/tools/nsc/doc/model/Entity.scala +++ b/src/compiler/scala/tools/nsc/doc/model/Entity.scala @@ -149,9 +149,6 @@ trait MemberEntity extends Entity { /** Some migration warning if this member has a migration annotation, or none otherwise. */ def migration: Option[Body] - @deprecated("Use `inDefinitionTemplates` instead", "2.9.0") - def inheritedFrom: List[TemplateEntity] - /** For members representing values: the type of the value returned by this member; for members * representing types: the type itself. */ def resultType: TypeEntity diff --git a/src/compiler/scala/tools/nsc/interpreter/IMain.scala b/src/compiler/scala/tools/nsc/interpreter/IMain.scala index 6d51dc1a39..92c2fc9768 100644 --- a/src/compiler/scala/tools/nsc/interpreter/IMain.scala +++ b/src/compiler/scala/tools/nsc/interpreter/IMain.scala @@ -187,8 +187,6 @@ class IMain(initialSettings: Settings, protected val out: JPrintWriter) extends else null } } - @deprecated("Use `global` for access to the compiler instance.", "2.9.0") - lazy val compiler: global.type = global import global._ import definitions.{ScalaPackage, JavaLangPackage, termMember, typeMember} diff --git a/src/compiler/scala/tools/nsc/util/FreshNameCreator.scala b/src/compiler/scala/tools/nsc/util/FreshNameCreator.scala index 554a96d627..75774e3498 100644 --- a/src/compiler/scala/tools/nsc/util/FreshNameCreator.scala +++ b/src/compiler/scala/tools/nsc/util/FreshNameCreator.scala @@ -14,11 +14,6 @@ trait FreshNameCreator { */ def newName(): String def newName(prefix: String): String - - @deprecated("use newName(prefix)", "2.9.0") - def newName(pos: scala.reflect.internal.util.Position, prefix: String): String = newName(prefix) - @deprecated("use newName()", "2.9.0") - def newName(pos: scala.reflect.internal.util.Position): String = newName() } object FreshNameCreator { diff --git a/test/files/run/t1500.scala b/test/files/run/t1500.scala index ab132b724f..6d2e7ee05f 100644 --- a/test/files/run/t1500.scala +++ b/test/files/run/t1500.scala @@ -21,7 +21,7 @@ object Test { val settings = new Settings() settings.classpath.value = System.getProperty("java.class.path") val tool = new interpreter.IMain(settings) - val global = tool.compiler + val global = tool.global import global._ import definitions._ diff --git a/test/files/run/t1501.scala b/test/files/run/t1501.scala index aba206bc7a..a2f7bb3a65 100644 --- a/test/files/run/t1501.scala +++ b/test/files/run/t1501.scala @@ -31,7 +31,7 @@ object Test { val settings = new Settings() settings.classpath.value = System.getProperty("java.class.path") val tool = new interpreter.IMain(settings) - val global = tool.compiler + val global = tool.global import global._ import definitions._ -- cgit v1.2.3 From d43a3efe813ade912d34b48bd11b56e8762c3e01 Mon Sep 17 00:00:00 2001 From: Simon Ochsenreither Date: Tue, 18 Sep 2012 16:09:45 +0200 Subject: SI-6388 Remove deprecated items in scala/collection --- src/library/scala/collection/JavaConversions.scala | 36 --- src/library/scala/collection/JavaConverters.scala | 33 --- src/library/scala/collection/immutable/List.scala | 245 --------------------- .../mutable/SynchronizedPriorityQueue.scala | 8 - 4 files changed, 322 deletions(-) (limited to 'src') diff --git a/src/library/scala/collection/JavaConversions.scala b/src/library/scala/collection/JavaConversions.scala index 8e4fdf537d..173ce2d71d 100644 --- a/src/library/scala/collection/JavaConversions.scala +++ b/src/library/scala/collection/JavaConversions.scala @@ -91,42 +91,6 @@ object JavaConversions extends WrapAsScala with WrapAsJava { @deprecated("Use a member of scala.collection.convert.Wrappers", "2.10.0") val MutableSeqWrapper = Wrappers.MutableSeqWrapper @deprecated("Use a member of scala.collection.convert.Wrappers", "2.10.0") val MutableSetWrapper = Wrappers.MutableSetWrapper @deprecated("Use a member of scala.collection.convert.Wrappers", "2.10.0") val SeqWrapper = Wrappers.SeqWrapper - - // Note to implementors: the cavalcade of deprecated methods herein should - // serve as a warning to any who follow: don't overload implicit methods. - - @deprecated("use bufferAsJavaList instead", "2.9.0") - def asJavaList[A](b : mutable.Buffer[A]): ju.List[A] = bufferAsJavaList[A](b) - - @deprecated("use mutableSeqAsJavaList instead", "2.9.0") - def asJavaList[A](b : mutable.Seq[A]): ju.List[A] = mutableSeqAsJavaList[A](b) - - @deprecated("use seqAsJavaList instead", "2.9.0") - def asJavaList[A](b : Seq[A]): ju.List[A] = seqAsJavaList[A](b) - - @deprecated("use mutableSetAsJavaSet instead", "2.9.0") - def asJavaSet[A](s : mutable.Set[A]): ju.Set[A] = mutableSetAsJavaSet[A](s) - - @deprecated("use setAsJavaSet instead", "2.9.0") - def asJavaSet[A](s: Set[A]): ju.Set[A] = setAsJavaSet[A](s) - - @deprecated("use mutableMapAsJavaMap instead", "2.9.0") - def asJavaMap[A, B](m : mutable.Map[A, B]): ju.Map[A, B] = mutableMapAsJavaMap[A, B](m) - - @deprecated("use mapAsJavaMap instead", "2.9.0") - def asJavaMap[A, B](m : Map[A, B]): ju.Map[A, B] = mapAsJavaMap[A, B](m) - - @deprecated("use iterableAsScalaIterable instead", "2.9.0") - def asScalaIterable[A](i : jl.Iterable[A]): Iterable[A] = iterableAsScalaIterable[A](i) - - @deprecated("use collectionAsScalaIterable instead", "2.9.0") - def asScalaIterable[A](i : ju.Collection[A]): Iterable[A] = collectionAsScalaIterable[A](i) - - @deprecated("use mapAsScalaMap instead", "2.9.0") - def asScalaMap[A, B](m: ju.Map[A, B]): mutable.Map[A, B] = mapAsScalaMap[A, B](m) - - @deprecated("use propertiesAsScalaMap instead", "2.9.0") - def asScalaMap(p: ju.Properties): mutable.Map[String, String] = propertiesAsScalaMap(p) } diff --git a/src/library/scala/collection/JavaConverters.scala b/src/library/scala/collection/JavaConverters.scala index f8a9466caf..98afffe3b4 100755 --- a/src/library/scala/collection/JavaConverters.scala +++ b/src/library/scala/collection/JavaConverters.scala @@ -67,37 +67,4 @@ object JavaConverters extends DecorateAsJava with DecorateAsScala { type AsJavaEnumeration[A] = Decorators.AsJavaEnumeration[A] @deprecated("Don't access these decorators directly.", "2.10.0") type AsJavaDictionary[A, B] = Decorators.AsJavaDictionary[A, B] - - @deprecated("Use bufferAsJavaListConverter instead", "2.9.0") - def asJavaListConverter[A](b : mutable.Buffer[A]): AsJava[ju.List[A]] = bufferAsJavaListConverter(b) - - @deprecated("Use mutableSeqAsJavaListConverter instead", "2.9.0") - def asJavaListConverter[A](b : mutable.Seq[A]): AsJava[ju.List[A]] = mutableSeqAsJavaListConverter(b) - - @deprecated("Use seqAsJavaListConverter instead", "2.9.0") - def asJavaListConverter[A](b : Seq[A]): AsJava[ju.List[A]] = seqAsJavaListConverter(b) - - @deprecated("Use mutableSetAsJavaSetConverter instead", "2.9.0") - def asJavaSetConverter[A](s : mutable.Set[A]): AsJava[ju.Set[A]] = mutableSetAsJavaSetConverter(s) - - @deprecated("Use setAsJavaSetConverter instead", "2.9.0") - def asJavaSetConverter[A](s : Set[A]): AsJava[ju.Set[A]] = setAsJavaSetConverter(s) - - @deprecated("use mutableMapAsJavaMapConverter instead", "2.9.0") - def asJavaMapConverter[A, B](m : mutable.Map[A, B]): AsJava[ju.Map[A, B]] = mutableMapAsJavaMapConverter(m) - - @deprecated("Use mapAsJavaMapConverter instead", "2.9.0") - def asJavaMapConverter[A, B](m : Map[A, B]): AsJava[ju.Map[A, B]] = mapAsJavaMapConverter(m) - - @deprecated("Use iterableAsScalaIterableConverter instead", "2.9.0") - def asScalaIterableConverter[A](i : jl.Iterable[A]): AsScala[Iterable[A]] = iterableAsScalaIterableConverter(i) - - @deprecated("Use collectionAsScalaIterableConverter instead", "2.9.0") - def asScalaIterableConverter[A](i : ju.Collection[A]): AsScala[Iterable[A]] = collectionAsScalaIterableConverter(i) - - @deprecated("Use mapAsScalaMapConverter instead", "2.9.0") - def asScalaMapConverter[A, B](m : ju.Map[A, B]): AsScala[mutable.Map[A, B]] = mapAsScalaMapConverter(m) - - @deprecated("Use propertiesAsScalaMapConverter instead", "2.9.0") - def asScalaMapConverter(p: ju.Properties): AsScala[mutable.Map[String, String]] = propertiesAsScalaMapConverter(p) } diff --git a/src/library/scala/collection/immutable/List.scala b/src/library/scala/collection/immutable/List.scala index 83da68eb68..47cac9a1d5 100644 --- a/src/library/scala/collection/immutable/List.scala +++ b/src/library/scala/collection/immutable/List.scala @@ -311,9 +311,6 @@ sealed abstract class List[+A] extends AbstractSeq[A] override def toStream : Stream[A] = if (isEmpty) Stream.Empty else new Stream.Cons(head, tail.toStream) - - @deprecated("use `distinct` instead", "2.8.0") - def removeDuplicates: List[A] = distinct } /** The empty list. @@ -407,248 +404,6 @@ object List extends SeqFactory[List] { override def empty[A]: List[A] = Nil override def apply[A](xs: A*): List[A] = xs.toList - - /** Create a sorted list with element values `v,,>n+1,, = step(v,,n,,)` - * where `v,,0,, = start` and elements are in the range between `start` - * (inclusive) and `end` (exclusive). - * - * @param start the start value of the list - * @param end the end value of the list - * @param step the increment function of the list, which given `v,,n,,`, - * computes `v,,n+1,,`. Must be monotonically increasing - * or decreasing. - * @return the sorted list of all integers in range `[start;end)`. - */ - @deprecated("use `iterate` instead", "2.8.0") - def range(start: Int, end: Int, step: Int => Int): List[Int] = { - val up = step(start) > start - val down = step(start) < start - val b = new ListBuffer[Int] - var i = start - while ((!up || i < end) && (!down || i > end)) { - b += i - val next = step(i) - if (i == next) - throw new IllegalArgumentException("the step function did not make any progress on "+ i) - i = next - } - b.toList - } - - /** Create a list containing several copies of an element. - * - * @param n the length of the resulting list - * @param elem the element composing the resulting list - * @return a list composed of `n` elements all equal to `elem` - */ - @deprecated("use `fill` instead", "2.8.0") - def make[A](n: Int, elem: A): List[A] = { - val b = new ListBuffer[A] - var i = 0 - while (i < n) { - b += elem - i += 1 - } - b.toList - } - - /** Concatenate all the elements of a given list of lists. - * - * @param xss the list of lists that are to be concatenated - * @return the concatenation of all the lists - */ - @deprecated("use `xss.flatten` instead of `List.flatten(xss)`", "2.8.0") - def flatten[A](xss: List[List[A]]): List[A] = { - val b = new ListBuffer[A] - for (xs <- xss) { - var xc = xs - while (!xc.isEmpty) { - b += xc.head - xc = xc.tail - } - } - b.toList - } - - /** Transforms a list of pairs into a pair of lists. - * - * @param xs the list of pairs to unzip - * @return a pair of lists. - */ - @deprecated("use `xs.unzip` instead of `List.unzip(xs)`", "2.8.0") - def unzip[A,B](xs: List[(A,B)]): (List[A], List[B]) = { - val b1 = new ListBuffer[A] - val b2 = new ListBuffer[B] - var xc = xs - while (!xc.isEmpty) { - b1 += xc.head._1 - b2 += xc.head._2 - xc = xc.tail - } - (b1.toList, b2.toList) - } - - /** Transforms an iterable of pairs into a pair of lists. - * - * @param xs the iterable of pairs to unzip - * @return a pair of lists. - */ - @deprecated("use `xs.unzip` instead of `List.unzip(xs)`", "2.8.0") - def unzip[A,B](xs: Iterable[(A,B)]): (List[A], List[B]) = - xs.foldRight[(List[A], List[B])]((Nil, Nil)) { - case ((x, y), (xs, ys)) => (x :: xs, y :: ys) - } - - /** - * Returns the `Left` values in the given `Iterable` of `Either`s. - */ - @deprecated("use `xs collect { case Left(x: A) => x }` instead of `List.lefts(xs)`", "2.8.0") - def lefts[A, B](es: Iterable[Either[A, B]]) = - es.foldRight[List[A]](Nil)((e, as) => e match { - case Left(a) => a :: as - case Right(_) => as - }) - - /** - * Returns the `Right` values in the given `Iterable` of `Either`s. - */ - @deprecated("use `xs collect { case Right(x: B) => x }` instead of `List.rights(xs)`", "2.8.0") - def rights[A, B](es: Iterable[Either[A, B]]) = - es.foldRight[List[B]](Nil)((e, bs) => e match { - case Left(_) => bs - case Right(b) => b :: bs - }) - - /** Transforms an Iterable of Eithers into a pair of lists. - * - * @param es the iterable of Eithers to separate - * @return a pair of lists. - */ - @deprecated("use `(for (Left(x) <- es) yield x, for (Right(x) <- es) yield x)` instead", "2.8.0") - def separate[A,B](es: Iterable[Either[A, B]]): (List[A], List[B]) = - es.foldRight[(List[A], List[B])]((Nil, Nil)) { - case (Left(a), (lefts, rights)) => (a :: lefts, rights) - case (Right(b), (lefts, rights)) => (lefts, b :: rights) - } - - /** Converts an iterator to a list. - * - * @param it the iterator to convert - * @return a list that contains the elements returned by successive - * calls to `it.next` - */ - @deprecated("use `it.toList` instead of `List.toList(it)`", "2.8.0") - def fromIterator[A](it: Iterator[A]): List[A] = it.toList - - /** Converts an array into a list. - * - * @param arr the array to convert - * @return a list that contains the same elements than `arr` - * in the same order - */ - @deprecated("use `array.toList` instead of `List.fromArray(array)`", "2.8.0") - def fromArray[A](arr: Array[A]): List[A] = fromArray(arr, 0, arr.length) - - /** Converts a range of an array into a list. - * - * @param arr the array to convert - * @param start the first index to consider - * @param len the length of the range to convert - * @return a list that contains the same elements than `arr` - * in the same order - */ - @deprecated("use `array.view(start, end).toList` instead of `List.fromArray(array, start, end)`", "2.8.0") - def fromArray[A](arr: Array[A], start: Int, len: Int): List[A] = { - var res: List[A] = Nil - var i = start + len - while (i > start) { - i -= 1 - res = arr(i) :: res - } - res - } - - /** Returns the list resulting from applying the given function `f` - * to corresponding elements of the argument lists. - * - * @param f function to apply to each pair of elements. - * @return `[f(a,,0,,,b,,0,,), ..., f(a,,n,,,b,,n,,)]` if the lists are - * `[a,,0,,, ..., a,,k,,]`, `[b,,0,,, ..., b,,l,,]` and - * `n = min(k,l)` - */ - @deprecated("use `(xs, ys).zipped.map(f)` instead of `List.map2(xs, ys)(f)`", "2.8.0") - def map2[A,B,C](xs: List[A], ys: List[B])(f: (A, B) => C): List[C] = { - val b = new ListBuffer[C] - var xc = xs - var yc = ys - while (!xc.isEmpty && !yc.isEmpty) { - b += f(xc.head, yc.head) - xc = xc.tail - yc = yc.tail - } - b.toList - } - - /** Tests whether the given predicate `p` holds - * for all corresponding elements of the argument lists. - * - * @param f function to apply to each pair of elements. - * @return `(p(a0,b0) && - * ... && p(an,bn))]` - * if the lists are `[a0, ..., ak]`; - * `[b0, ..., bl]` - * and `n = min(k,l)` - */ - @deprecated("use `(xs, ys).zipped.forall(f)` instead of `List.forall2(xs, ys)(f)`", "2.8.0") - def forall2[A,B](xs: List[A], ys: List[B])(f: (A, B) => Boolean): Boolean = { - var xc = xs - var yc = ys - while (!xc.isEmpty && !yc.isEmpty) { - if (!f(xc.head, yc.head)) return false - xc = xc.tail - yc = yc.tail - } - true - } - - /** Tests whether the given predicate `p` holds - * for some corresponding elements of the argument lists. - * - * @param f function to apply to each pair of elements. - * @return `n != 0 && (p(a0,b0) || - * ... || p(an,bn))]` if the lists are - * `[a0, ..., ak]`, - * `[b0, ..., bl]` and - * `n = min(k,l)` - */ - @deprecated("use `(xs, ys).zipped.exists(f)` instead of `List.exists2(xs, ys)(f)`", "2.8.0") - def exists2[A,B](xs: List[A], ys: List[B])(f: (A, B) => Boolean): Boolean = { - var xc = xs - var yc = ys - while (!xc.isEmpty && !yc.isEmpty) { - if (f(xc.head, yc.head)) return true - xc = xc.tail - yc = yc.tail - } - false - } - - /** Transposes a list of lists. - * pre: All element lists have the same length. - * - * @param xss the list of lists - * @return the transposed list of lists - */ - @deprecated("use `xss.transpose` instead of `List.transpose(xss)`", "2.8.0") - def transpose[A](xss: List[List[A]]): List[List[A]] = { - val buf = new ListBuffer[List[A]] - var yss = xss - while (!yss.head.isEmpty) { - buf += (yss map (_.head)) - yss = (yss map (_.tail)) - } - buf.toList - } } /** Only used for list serialization */ diff --git a/src/library/scala/collection/mutable/SynchronizedPriorityQueue.scala b/src/library/scala/collection/mutable/SynchronizedPriorityQueue.scala index bc32537798..120b3d66a0 100644 --- a/src/library/scala/collection/mutable/SynchronizedPriorityQueue.scala +++ b/src/library/scala/collection/mutable/SynchronizedPriorityQueue.scala @@ -73,14 +73,6 @@ class SynchronizedPriorityQueue[A](implicit ord: Ordering[A]) extends PriorityQu */ override def head: A = synchronized { super.head } - /** Returns the element with the highest priority in the queue, - * or throws an error if there is no element contained in the queue. - * - * @return the element with the highest priority. - */ - @deprecated("Use `head` instead.", "2.9.0") - override def max: A = synchronized { super.max } - /** Removes all elements from the queue. After this operation is completed, * the queue will be empty. */ -- cgit v1.2.3 From c52f91ca0db573fdfc879ae199a237db256d7523 Mon Sep 17 00:00:00 2001 From: Simon Ochsenreither Date: Fri, 21 Sep 2012 20:04:05 +0200 Subject: SI-6388 Remove deprecated items in scala/math --- src/library/scala/math/BigDecimal.scala | 6 ------ src/library/scala/math/BigInt.scala | 6 ------ 2 files changed, 12 deletions(-) (limited to 'src') diff --git a/src/library/scala/math/BigDecimal.scala b/src/library/scala/math/BigDecimal.scala index eb73d58d1c..a721fd647c 100644 --- a/src/library/scala/math/BigDecimal.scala +++ b/src/library/scala/math/BigDecimal.scala @@ -25,12 +25,6 @@ object BigDecimal { private val maxCached = 512 val defaultMathContext = MathContext.DECIMAL128 - @deprecated("Use Long.MinValue", "2.9.0") - val MinLong = new BigDecimal(BigDec valueOf Long.MinValue, defaultMathContext) - - @deprecated("Use Long.MaxValue", "2.9.0") - val MaxLong = new BigDecimal(BigDec valueOf Long.MaxValue, defaultMathContext) - /** Cache ony for defaultMathContext using BigDecimals in a small range. */ private lazy val cache = new Array[BigDecimal](maxCached - minCached + 1) diff --git a/src/library/scala/math/BigInt.scala b/src/library/scala/math/BigInt.scala index 3eb41053f7..9218e41ceb 100644 --- a/src/library/scala/math/BigInt.scala +++ b/src/library/scala/math/BigInt.scala @@ -23,12 +23,6 @@ object BigInt { private val cache = new Array[BigInt](maxCached - minCached + 1) private val minusOne = BigInteger.valueOf(-1) - @deprecated("Use Long.MinValue", "2.9.0") - val MinLong = BigInt(Long.MinValue) - - @deprecated("Use Long.MaxValue", "2.9.0") - val MaxLong = BigInt(Long.MaxValue) - /** Constructs a `BigInt` whose value is equal to that of the * specified integer value. * -- cgit v1.2.3 From e3cec78518a0529152fe6beda3cc6c9a14ea0f9b Mon Sep 17 00:00:00 2001 From: Simon Ochsenreither Date: Tue, 18 Sep 2012 03:44:59 +0200 Subject: SI-6388 Remove first parts of deprecated @serializable annotation --- .../scala/tools/nsc/backend/jvm/GenASM.scala | 1 - .../scala/tools/nsc/backend/jvm/GenJVM.scala | 1 - .../scala/tools/nsc/backend/msil/GenMSIL.scala | 13 -------- .../scala/tools/nsc/doc/html/SyntaxHigh.scala | 2 +- src/library/scala/package.scala | 6 +--- src/reflect/scala/reflect/internal/Symbols.scala | 5 +--- test/files/jvm/t1143-2/t1143-2.scala | 26 +++++++--------- test/files/jvm/t1143.scala | 12 +++----- test/files/jvm/t1600.scala | 3 +- test/files/jvm/typerep.scala | 26 ---------------- test/files/pos/annotations.scala | 2 +- test/files/pos/attributes.scala | 2 ++ test/files/pos/spec-annotations.scala | 2 +- test/files/pos/t1385.scala | 4 +-- test/files/pos/t640.scala | 4 +-- test/files/run/t3038d.scala | 4 +-- test/files/run/t3667.check | 3 -- test/files/run/t3667.scala | 35 ---------------------- 18 files changed, 28 insertions(+), 123 deletions(-) (limited to 'src') diff --git a/src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala b/src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala index eb38a80d60..34d46e27fe 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala @@ -1342,7 +1342,6 @@ abstract class GenASM extends SubComponent with BytecodeWriters { // Additional interface parents based on annotations and other cues def newParentForAttr(attr: Symbol): Option[Symbol] = attr match { - case SerializableAttr => Some(SerializableClass) case CloneableAttr => Some(CloneableClass) case RemoteAttr => Some(RemoteInterfaceClass) case _ => None diff --git a/src/compiler/scala/tools/nsc/backend/jvm/GenJVM.scala b/src/compiler/scala/tools/nsc/backend/jvm/GenJVM.scala index e11704d694..617c641fa9 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/GenJVM.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/GenJVM.scala @@ -218,7 +218,6 @@ abstract class GenJVM extends SubComponent with GenJVMUtil with GenAndroid with // Additional interface parents based on annotations and other cues def newParentForAttr(attr: Symbol): Option[Symbol] = attr match { - case SerializableAttr => Some(SerializableClass) case CloneableAttr => Some(JavaCloneableClass) case RemoteAttr => Some(RemoteInterfaceClass) case _ => None diff --git a/src/compiler/scala/tools/nsc/backend/msil/GenMSIL.scala b/src/compiler/scala/tools/nsc/backend/msil/GenMSIL.scala index 242b60c769..39ea074dc0 100644 --- a/src/compiler/scala/tools/nsc/backend/msil/GenMSIL.scala +++ b/src/compiler/scala/tools/nsc/backend/msil/GenMSIL.scala @@ -124,7 +124,6 @@ abstract class GenMSIL extends SubComponent { // Scala attributes // symtab.Definitions -> object (singleton..) - val SerializableAttr = definitions.SerializableAttr.tpe val CloneableAttr = definitions.CloneableAttr.tpe val TransientAtt = definitions.TransientAttr.tpe // remoting: the architectures are too different, no mapping (no portable code @@ -1633,18 +1632,6 @@ abstract class GenMSIL extends SubComponent { mf = mf | (if (sym hasFlag Flags.ABSTRACT) TypeAttributes.Abstract else 0) mf = mf | (if (sym.isTrait && !sym.isImplClass) TypeAttributes.Interface else TypeAttributes.Class) mf = mf | (if (sym isFinal) TypeAttributes.Sealed else 0) - - sym.annotations foreach { a => a match { - case AnnotationInfo(SerializableAttr, _, _) => - // TODO: add the Serializable TypeAttribute also if the annotation - // System.SerializableAttribute is present (.net annotation, not scala) - // Best way to do it: compare with - // definitions.getClass("System.SerializableAttribute").tpe - // when frontend available - mf = mf | TypeAttributes.Serializable - case _ => () - }} - mf // static: not possible (or?) } diff --git a/src/compiler/scala/tools/nsc/doc/html/SyntaxHigh.scala b/src/compiler/scala/tools/nsc/doc/html/SyntaxHigh.scala index e21ee07963..01c0b78efe 100644 --- a/src/compiler/scala/tools/nsc/doc/html/SyntaxHigh.scala +++ b/src/compiler/scala/tools/nsc/doc/html/SyntaxHigh.scala @@ -40,7 +40,7 @@ private[html] object SyntaxHigh { /** Standard library classes/objects, sorted alphabetically */ val standards = Array ( - "WeakTypeTag", "Any", "AnyRef", "AnyVal", "App", "Application", "Array", + "WeakTypeTag", "Any", "AnyRef", "AnyVal", "App", "Array", "Boolean", "Byte", "Char", "Class", "ClassTag", "ClassManifest", "Console", "Double", "Enumeration", "Float", "Function", "Int", "List", "Long", "Manifest", "Map", diff --git a/src/library/scala/package.scala b/src/library/scala/package.scala index a41cdedfa9..9b7ca64b7e 100644 --- a/src/library/scala/package.scala +++ b/src/library/scala/package.scala @@ -34,9 +34,6 @@ package object scala { override def toString = "object AnyRef" } - @deprecated("instead of `@serializable class C`, use `class C extends Serializable`", "2.9.0") - type serializable = annotation.serializable - @deprecated("instead of `@cloneable class C`, use `class C extends Cloneable`", "2.10.0") type cloneable = annotation.cloneable @@ -126,9 +123,8 @@ package object scala { type deprecatedName = annotation.deprecatedName type inline = annotation.inline type native = annotation.native - type noinline = noannotation.inline + type noinline = annotation.noinline type remote = annotation.remote - type serializable = annotation.serializable type specialized = annotation.specialized type transient = annotation.transient type throws = annotation.throws diff --git a/src/reflect/scala/reflect/internal/Symbols.scala b/src/reflect/scala/reflect/internal/Symbols.scala index 13436f4251..a252437bcf 100644 --- a/src/reflect/scala/reflect/internal/Symbols.scala +++ b/src/reflect/scala/reflect/internal/Symbols.scala @@ -709,10 +709,7 @@ trait Symbols extends api.Symbols { self: SymbolTable => } def isStrictFP = hasAnnotation(ScalaStrictFPAttr) || (enclClass hasAnnotation ScalaStrictFPAttr) - def isSerializable = ( - info.baseClasses.exists(p => p == SerializableClass || p == JavaSerializableClass) - || hasAnnotation(SerializableAttr) // last part can be removed, @serializable annotation is deprecated - ) + def isSerializable = info.baseClasses.exists(p => p == SerializableClass || p == JavaSerializableClass) def hasBridgeAnnotation = hasAnnotation(BridgeClass) def isDeprecated = hasAnnotation(DeprecatedAttr) def deprecationMessage = getAnnotation(DeprecatedAttr) flatMap (_ stringArg 0) diff --git a/test/files/jvm/t1143-2/t1143-2.scala b/test/files/jvm/t1143-2/t1143-2.scala index 44b1febd8b..13ab13b48c 100644 --- a/test/files/jvm/t1143-2/t1143-2.scala +++ b/test/files/jvm/t1143-2/t1143-2.scala @@ -16,43 +16,39 @@ object Serialize { } } -@serializable @SerialVersionUID(1L) -class VarModel[T]( getter: => T, setter: T => Unit ) -{ +class VarModel[T](getter: => T, setter: T => Unit) extends Serializable { Serialize.write(getter) Serialize.write(setter) - def this( getter: => T ) = this( getter, null ) + def this(getter: => T) = this(getter, null) def getObject: AnyRef = getter.asInstanceOf[AnyRef] - def setObject( v: AnyRef ) = { - if( setter==null ) - throw new RuntimeException( "Tried to set readonly model!") - setter( v.asInstanceOf[T] ) + def setObject(v: AnyRef) = { + if(setter==null) + throw new RuntimeException("Tried to set readonly model!") + setter(v.asInstanceOf[T]) } def detach = () } -@serializable @SerialVersionUID(1L) -class Printer( p: VarModel[String] ) { - def print = println( p.getObject ); +class Printer(p: VarModel[String]) extends Serializable { + def print = println(p.getObject) } class Component extends Marker { } class Form extends Component { } -@serializable @SerialVersionUID(1L) -class Main { +class Main extends Serializable { var pass = "pass" - def main(args : Array[String]) : Unit = { + def main(args: Array[String]): Unit = { val f = new Form { - val p = new Printer( new VarModel( pass, s => pass = s ) ); + val p = new Printer(new VarModel(pass, s => pass = s)) p.print } () diff --git a/test/files/jvm/t1143.scala b/test/files/jvm/t1143.scala index 7dd374f432..eb03c7224e 100644 --- a/test/files/jvm/t1143.scala +++ b/test/files/jvm/t1143.scala @@ -16,9 +16,8 @@ object Serialize { } } -@serializable @SerialVersionUID(1L) -class VarModel[T](getter: => T, setter: T => Unit) { +class VarModel[T](getter: => T, setter: T => Unit) extends Serializable { Serialize.write(getter) Serialize.write(setter) @@ -35,23 +34,20 @@ class VarModel[T](getter: => T, setter: T => Unit) { def detach = () } -@serializable @SerialVersionUID(1L) -class Printer(p: VarModel[String]) { +class Printer(p: VarModel[String]) extends Serializable { def print = println(p.getObject) } -@serializable @SerialVersionUID(1L) -class Component { +class Component extends Serializable { } class Form extends Component { } -@serializable @SerialVersionUID(1L) -class Main { +class Main extends Serializable { var pass = "pass" def main(args: Array[String]) { val f = new Form { diff --git a/test/files/jvm/t1600.scala b/test/files/jvm/t1600.scala index 7e23687425..69179c1ba4 100644 --- a/test/files/jvm/t1600.scala +++ b/test/files/jvm/t1600.scala @@ -69,8 +69,7 @@ object Test { var hashCodeModifier = 0 } - @serializable - class Foo { + class Foo extends Serializable { override def hashCode = System.identityHashCode(this) + Foo.hashCodeModifier } } diff --git a/test/files/jvm/typerep.scala b/test/files/jvm/typerep.scala index 3befc7ff3f..47bd16a467 100644 --- a/test/files/jvm/typerep.scala +++ b/test/files/jvm/typerep.scala @@ -280,100 +280,74 @@ object TypeRep { override def toString = "Nothing" } - @serializable case class ClassRep[A](elemRep: TypeRep[A]) extends TypeRep[Class[A]] { override def toString = "Class[" + elemRep + "]" } - @serializable case class SomeRep[A](elemRep: TypeRep[A]) extends TypeRep[Some[A]] { override def toString = "Some[" + elemRep + "]" } - @serializable case class NoneRep[A](elemRep: TypeRep[A]) extends TypeRep[Option[A]] { override def toString = "None[" + elemRep + "]" } - - @serializable case class ListRep[A](elemRep: TypeRep[A]) extends TypeRep[List[A]] { override def toString = "List[" + elemRep + "]" } - - @serializable case class ArrayRep[A](elemRep: TypeRep[A]) extends TypeRep[Array[A]] { override def toString = "Array[" + elemRep + "]" } - - @serializable case class Tuple2Rep[A1, A2](_1: TypeRep[A1], _2: TypeRep[A2]) extends TypeRep[(A1, A2)] { override def toString = "Tuple2[" + _1 + ", " + _2 + "]" } - @serializable case class Tuple3Rep[A1, A2, A3](_1: TypeRep[A1], _2: TypeRep[A2], _3: TypeRep[A3]) extends TypeRep[Tuple3[A1, A2, A3]] { override def toString = "Tuple3[" + _1 + ", " + _2 + ", " + _3 + "]" } - @serializable case class Tuple4Rep[A1, A2, A3, A4](_1: TypeRep[A1], _2: TypeRep[A2], _3: TypeRep[A3], _4: TypeRep[A4]) extends TypeRep[Tuple4[A1, A2, A3, A4]] { override def toString = "Tuple4[" + _1 + ", " + _2 + ", " + _3 + ", " + _4 + "]" } - @serializable case class Tuple5Rep[A1, A2, A3, A4, A5](_1: TypeRep[A1], _2: TypeRep[A2], _3: TypeRep[A3], _4: TypeRep[A4], _5: TypeRep[A5]) extends TypeRep[Tuple5[A1, A2, A3, A4, A5]] { override def toString = "Tuple5[" + _1 + ", " + _2 + ", " + _3 + ", " + _4 + ", " + _5 + "]" } - @serializable case class Tuple6Rep[A1, A2, A3, A4, A5, A6](val _1: TypeRep[A1], val _2: TypeRep[A2], val _3: TypeRep[A3], val _4: TypeRep[A4], val _5: TypeRep[A5], val _6: TypeRep[A6]) extends TypeRep[Tuple6[A1, A2, A3, A4, A5, A6]] { override def toString = "Tuple6[" + _1 + ", " + _2 + ", " + _3 + ", " + _4 + ", " + _5 + ", " + _6 + "]" } - @serializable case class Tuple7Rep[A1, A2, A3, A4, A5, A6, A7](val _1: TypeRep[A1], val _2: TypeRep[A2], val _3: TypeRep[A3], val _4: TypeRep[A4], val _5: TypeRep[A5], val _6: TypeRep[A6], val _7: TypeRep[A7]) extends TypeRep[Tuple7[A1, A2, A3, A4, A5, A6, A7]] { override def toString = "Tuple7[" + _1 + ", " + _2 + ", " + _3 + ", " + _4 + ", " + _5 + ", " + _6 + ", " + _7 + "]" } - @serializable case class Tuple8Rep[A1, A2, A3, A4, A5, A6, A7, A8](val _1: TypeRep[A1], val _2: TypeRep[A2], val _3: TypeRep[A3], val _4: TypeRep[A4], val _5: TypeRep[A5], val _6: TypeRep[A6], val _7: TypeRep[A7], val _8: TypeRep[A8]) extends TypeRep[Tuple8[A1, A2, A3, A4, A5, A6, A7, A8]] { override def toString = "Tuple8[" + _1 + ", " + _2 + ", " + _3 + ", " + _4 + ", " + _5 + ", " + _6 + ", " + _7 + ", " + _8 + "]" } - @serializable case class Tuple9Rep[A1, A2, A3, A4, A5, A6, A7, A8, A9](val _1: TypeRep[A1], val _2: TypeRep[A2], val _3: TypeRep[A3], val _4: TypeRep[A4], val _5: TypeRep[A5], val _6: TypeRep[A6], val _7: TypeRep[A7], val _8: TypeRep[A8], val _9: TypeRep[A9]) extends TypeRep[Tuple9[A1, A2, A3, A4, A5, A6, A7, A8, A9]] { override def toString = "Tuple9[" + _1 + ", " + _2 + ", " + _3 + ", " + _4 + ", " + _5 + ", " + _6 + ", " + _7 + ", " + _8 + ", " + _9 + "]" } - @serializable case class Function1Rep[A1, B](a1: TypeRep[A1], b: TypeRep[B]) extends TypeRep[Function1[A1, B]] { override def toString = "Function1[" + a1 + ", " + b + "]" } - @serializable case class Function2Rep[A1, A2, B](a1: TypeRep[A1], a2: TypeRep[A2], b: TypeRep[B]) extends TypeRep[Function2[A1, A2, B]] { override def toString = "Function2[" + a1 + ", " + a2 + ", " + b + "]" } - @serializable case class Function3Rep[A1, A2, A3, B](a1: TypeRep[A1], a2: TypeRep[A2], a3: TypeRep[A3], b: TypeRep[B]) extends TypeRep[Function3[A1, A2, A3, B]] { override def toString = "Function3[" + a1 + ", " + a2 + ", " + a3 + ", " + b + "]" } - @serializable case class Function4Rep[A1, A2, A3, A4, B](a1: TypeRep[A1], a2: TypeRep[A2], a3: TypeRep[A3], a4: TypeRep[A4], b: TypeRep[B]) extends TypeRep[Function4[A1, A2, A3, A4, B]] { override def toString = "Function4[" + a1 + ", " + a2 + ", " + a3 + ", " + a4 + ", " + b + "]" } - @serializable case class Function5Rep[A1, A2, A3, A4, A5, B](a1: TypeRep[A1], a2: TypeRep[A2], a3: TypeRep[A3], a4: TypeRep[A4], a5: TypeRep[A5], b: TypeRep[B]) extends TypeRep[Function5[A1, A2, A3, A4, A5, B]] { override def toString = "Function5[" + a1 + ", " + a2 + ", " + a3 + ", " + a4 + ", " + a5 + ", " + b + "]" } - @serializable case class Function6Rep[A1, A2, A3, A4, A5, A6, B](a1: TypeRep[A1], a2: TypeRep[A2], a3: TypeRep[A3], a4: TypeRep[A4], a5: TypeRep[A5], a6: TypeRep[A6], b: TypeRep[B]) extends TypeRep[Function6[A1, A2, A3, A4, A5, A6, B]] { override def toString = "Function6[" + a1 + ", " + a2 + ", " + a3 + ", " + a4 + ", " + a5 + ", " + a6 + ", " + b + "]" } - @serializable case class Function7Rep[A1, A2, A3, A4, A5, A6, A7, B](a1: TypeRep[A1], a2: TypeRep[A2], a3: TypeRep[A3], a4: TypeRep[A4], a5: TypeRep[A5], a6: TypeRep[A6], a7: TypeRep[A7], b: TypeRep[B]) extends TypeRep[Function7[A1, A2, A3, A4, A5, A6, A7, B]] { override def toString = "Function7[" + a1 + ", " + a2 + ", " + a3 + ", " + a4 + ", " + a5 + ", " + a6 + ", " + a7 + ", " + b + "]" } - @serializable case class Function8Rep[A1, A2, A3, A4, A5, A6, A7, A8, B](a1: TypeRep[A1], a2: TypeRep[A2], a3: TypeRep[A3], a4: TypeRep[A4], a5: TypeRep[A5], a6: TypeRep[A6], a7: TypeRep[A7], a8: TypeRep[A8], b: TypeRep[B]) extends TypeRep[Function8[A1, A2, A3, A4, A5, A6, A7, A8, B]] { override def toString = "Function8[" + a1 + ", " + a2 + ", " + a3 + ", " + a4 + ", " + a5 + ", " + a6 + ", " + a7 + ", " + a8 + b + "]" } - @serializable case class Function9Rep[A1, A2, A3, A4, A5, A6, A7, A8, A9, B](a1: TypeRep[A1], a2: TypeRep[A2], a3: TypeRep[A3], a4: TypeRep[A4], a5: TypeRep[A5], a6: TypeRep[A6], a7: TypeRep[A7], a8: TypeRep[A8], a9: TypeRep[A9], b: TypeRep[B]) extends TypeRep[Function9[A1, A2, A3, A4, A5, A6, A7, A8, A9, B]] { override def toString = "Function9[" + a1 + ", " + a2 + ", " + a3 + ", " + a4 + ", " + a5 + ", " + a6 + ", " + a7 + ", " + a8 + ", " + b + "]" } /* - @serializable case class ObjectRep[A](c: Class) extends TypeRep[A] { override def toString = c.getName } diff --git a/test/files/pos/annotations.scala b/test/files/pos/annotations.scala index 706a715bad..501e2a6bd3 100644 --- a/test/files/pos/annotations.scala +++ b/test/files/pos/annotations.scala @@ -2,7 +2,7 @@ class ann(i: Int) extends scala.annotation.Annotation class cfann(x: String) extends annotation.ClassfileAnnotation // annotations on abstract types -abstract class C1[@serializable @cloneable +T, U, V[_]] +abstract class C1[@cloneable +T, U, V[_]] abstract class C2[@deprecated @ann(1) T <: Number, V] diff --git a/test/files/pos/attributes.scala b/test/files/pos/attributes.scala index ec735d0aae..60e00bff7d 100644 --- a/test/files/pos/attributes.scala +++ b/test/files/pos/attributes.scala @@ -1,3 +1,5 @@ +class serializable extends annotation.StaticAnnotation + @serializable class C1; @serializable @volatile class C2; @serializable @volatile class C3; diff --git a/test/files/pos/spec-annotations.scala b/test/files/pos/spec-annotations.scala index 48281e5df5..6c1f737470 100644 --- a/test/files/pos/spec-annotations.scala +++ b/test/files/pos/spec-annotations.scala @@ -1,7 +1,7 @@ class ann(i: Int) extends scala.annotation.Annotation // annotations on abstract types -abstract class C1[@serializable @cloneable +T, U, V[_]] +abstract class C1[@cloneable +T, U, V[_]] abstract class C2[@deprecated @ann(1) T <: Number, V] diff --git a/test/files/pos/t1385.scala b/test/files/pos/t1385.scala index 59953bcc39..6fe7308281 100644 --- a/test/files/pos/t1385.scala +++ b/test/files/pos/t1385.scala @@ -1,3 +1,3 @@ -@serializable object Test { - private def readResolve:AnyRef = this +object Test extends Serializable { + private def readResolve: AnyRef = this } diff --git a/test/files/pos/t640.scala b/test/files/pos/t640.scala index 55f61df8af..45608bc3d4 100644 --- a/test/files/pos/t640.scala +++ b/test/files/pos/t640.scala @@ -1,2 +1,2 @@ -@serializable class A -@serializable class B extends A +class A extends Serializable +class B extends A with Serializable diff --git a/test/files/run/t3038d.scala b/test/files/run/t3038d.scala index 6cd2d83776..9550165235 100644 --- a/test/files/run/t3038d.scala +++ b/test/files/run/t3038d.scala @@ -16,9 +16,7 @@ trait Foo { } } - -@serializable -class Bar extends Foo { +class Bar extends Foo with Serializable { @transient protected var first: Any = null def size = a @transient var second: Any = null diff --git a/test/files/run/t3667.check b/test/files/run/t3667.check index bbe5d1bc48..6375c88997 100644 --- a/test/files/run/t3667.check +++ b/test/files/run/t3667.check @@ -1,6 +1,3 @@ -1 -2 -3 4 2 3 diff --git a/test/files/run/t3667.scala b/test/files/run/t3667.scala index f30d57ce3a..ada09d5886 100644 --- a/test/files/run/t3667.scala +++ b/test/files/run/t3667.scala @@ -1,27 +1,9 @@ object Test { def main(args: Array[String]) { - val o1 = new Outer1 - val o2 = new Outer2 - val o3 = new Outer3 val o4 = new Outer4 val o5 = new Outer5 val o6 = new Outer6 - println(1) - ser(new o1.Inner(1)) - o1.Inner // make sure the Inner$module field of the Outer1 instance is initialized! - ser(new o1.Inner(1)) - - println(2) - ser(new o2.Inner(1)) - o2.Inner - ser(new o2.Inner(1)) - - println(3) - ser(new o3.Inner(1)) - o3.Inner - ser(new o3.Inner(1)) - println(4) ser(new o4.Inner(1)) o4.Inner @@ -54,23 +36,6 @@ object Test { } -@serializable -class Outer1 { - @serializable - class Inner(x: Int = 1) -} - -@serializable -class Outer2 { - case class Inner(x: Int = 1) -} - -@serializable -class Outer3 { - case class Inner(x: Int) -} - - class Outer4 extends Serializable { class Inner(x: Int = 1) extends Serializable } -- cgit v1.2.3 From 025e1aeb9b9e558a261930164e2c837494ddd25d Mon Sep 17 00:00:00 2001 From: Simon Ochsenreither Date: Fri, 21 Sep 2012 22:28:42 +0200 Subject: SI-6388 Remove deprecated item in scala/swing --- src/swing/scala/swing/TabbedPane.scala | 3 --- 1 file changed, 3 deletions(-) (limited to 'src') diff --git a/src/swing/scala/swing/TabbedPane.scala b/src/swing/scala/swing/TabbedPane.scala index ca1eb2b64c..03e8c12c9c 100644 --- a/src/swing/scala/swing/TabbedPane.scala +++ b/src/swing/scala/swing/TabbedPane.scala @@ -112,9 +112,6 @@ class TabbedPane extends Component with Publisher { */ def tabPlacement_=(b: Alignment.Value) { peer.setTabPlacement(b.id) } - @deprecated("Use tabPlacement_=() instead.", "2.9.1") - def tabPlacement(b: Alignment.Value) { peer.setTabPlacement(b.id) } - /** * The current page selection */ -- cgit v1.2.3 From 3b73e0dd101aade6478517016df80975e6b996bd Mon Sep 17 00:00:00 2001 From: Simon Ochsenreither Date: Fri, 21 Sep 2012 21:46:52 +0200 Subject: SI-6388 Remove some remaining, minor deprecations --- src/library/scala/io/Position.scala | 8 -------- src/reflect/scala/reflect/internal/util/Set.scala | 2 -- 2 files changed, 10 deletions(-) (limited to 'src') diff --git a/src/library/scala/io/Position.scala b/src/library/scala/io/Position.scala index dae478f31a..8c2d62f5b1 100644 --- a/src/library/scala/io/Position.scala +++ b/src/library/scala/io/Position.scala @@ -68,14 +68,6 @@ abstract class Position { } object Position extends Position { - /** The undefined position */ - @deprecated("This will be removed", "2.9.0") - final val NOPOS = 0 - - /** The first position in a source file */ - @deprecated("This will be removed", "2.9.0") - final val FIRSTPOS = encode(1, 1) - def checkInput(line: Int, column: Int) { if (line < 0) throw new IllegalArgumentException(line + " < 0") diff --git a/src/reflect/scala/reflect/internal/util/Set.scala b/src/reflect/scala/reflect/internal/util/Set.scala index d708a09de7..94743f2069 100644 --- a/src/reflect/scala/reflect/internal/util/Set.scala +++ b/src/reflect/scala/reflect/internal/util/Set.scala @@ -18,8 +18,6 @@ abstract class Set[T <: AnyRef] { def apply(x: T): Boolean = contains(x) - @deprecated("use `iterator` instead", "2.9.0") def elements = iterator - def contains(x: T): Boolean = findEntry(x) ne null -- cgit v1.2.3 From d1a35ccf001301881dac2baca972234d8e4d8d25 Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Mon, 8 Oct 2012 16:23:36 -0700 Subject: Possible fix for continuations bug. It comes looking for an implicit from (A @foo) => B and gives up, despite the fact that there is an implicit from A => B. Maybe there is some good reason for this, and/or I would fully believe there is a better way to fix it, but I'll propose this and wait to hear about the good reason and/or better way. --- .../scala/tools/nsc/typechecker/Typers.scala | 2 +- .../implicit-infer-annotations.check | 5 ++ .../implicit-infer-annotations.scala | 59 ++++++++++++++++++++++ 3 files changed, 65 insertions(+), 1 deletion(-) create mode 100644 test/files/continuations-run/implicit-infer-annotations.check create mode 100644 test/files/continuations-run/implicit-infer-annotations.scala (limited to 'src') diff --git a/src/compiler/scala/tools/nsc/typechecker/Typers.scala b/src/compiler/scala/tools/nsc/typechecker/Typers.scala index c832987de5..688b4b5160 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Typers.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Typers.scala @@ -194,7 +194,7 @@ trait Typers extends Modes with Adaptations with Tags { case PolyType(_, _) => EmptyTree case _ => def wrapImplicit(from: Type): Tree = { - val result = inferImplicit(tree, functionType(from :: Nil, to), reportAmbiguous, true, context, saveErrors) + val result = inferImplicit(tree, functionType(from.withoutAnnotations :: Nil, to), reportAmbiguous, true, context, saveErrors) if (result.subst != EmptyTreeTypeSubstituter) { result.subst traverse tree notifyUndetparamsInferred(result.subst.from, result.subst.to) diff --git a/test/files/continuations-run/implicit-infer-annotations.check b/test/files/continuations-run/implicit-infer-annotations.check new file mode 100644 index 0000000000..e8206c4319 --- /dev/null +++ b/test/files/continuations-run/implicit-infer-annotations.check @@ -0,0 +1,5 @@ +Range(5, 6, 7, 8, 9, 10) +Range(5, 6, 7, 8, 9, 10) +15 +List(10, 1, 2, 3) +Range(5, 6, 7, 8, 9, 10) diff --git a/test/files/continuations-run/implicit-infer-annotations.scala b/test/files/continuations-run/implicit-infer-annotations.scala new file mode 100644 index 0000000000..3f0e959f60 --- /dev/null +++ b/test/files/continuations-run/implicit-infer-annotations.scala @@ -0,0 +1,59 @@ +import annotation._ + +object A { + class foo[-B,+C] extends StaticAnnotation with TypeConstraint + + def shift[A, B, C](fun: (A => B) => C): A @foo[B, C] = ??? + def reset[A, C](ctx: => (A @foo[A, C])): C = ??? + + def m1 = reset { shift { f: (Int => Range) => f(5) }.to(10) } +} + +object B { + import scala.util.continuations._ + + def m1 = reset { shift { f: (Int => Range) => f(5) }.to(10) } + def m2 = reset { val a = shift { f: (Int => Range) => f(5) } ; a.to(10) } + + val x1 = reset{ + shift{ cont: (Int => Range) => + cont(5) + }.to(10) + } + + val x2 = reset{ + val a = shift{ cont: (Int => Range) => + cont(5) + } + a.to(10) + } // x is now Range(5, 6, 7, 8, 9, 10) + + val x3 = reset{ + shift{ cont: (Int => Int) => + cont(5) + } + 10 + } // x is now 15 + + val x4 = reset{ + 10 :: shift{ cont: (List[Int] => List[Int]) => + cont(List(1, 2, 3)) + } + } // x is List(10, 1, 2, 3) + + val x5 = reset{ + new scala.runtime.RichInt(shift{ cont: (Int => Range) => + cont(5) + }) to 10 + } +} + +object Test { + def main(args: Array[String]): Unit = { + import B._ + println(x1) + println(x2) + println(x3) + println(x4) + println(x5) + } +} -- cgit v1.2.3 From 981424b376e8e66253c2ec863ca1222e41d8b374 Mon Sep 17 00:00:00 2001 From: Hubert Plociniczak Date: Fri, 21 Sep 2012 14:10:33 +0200 Subject: Closes SI-6358. Move accessor generation for lazy vals to typers. Until now lazy accessors were handled somehow special because their symbol was created in typers but the corresponding tree was only added in Refchecks. This irregularity caused serious problems for value classes. Also it now looks just better when lazy value is treated in a similar way as other fields. I needed to adapt reifier so that it handles the new implementation correctly. Previously it had to recreate lazy val only by removing defdef and renaming. Now we basically need to recreate lazy val from scratch. There is one minor change to cps plugin but that is still fine because lazy vals were never really part of the transformation. Some range positions needed to be fixed manually. We could do it at the creation time but that would require a lot more "if (symbol.isLazy)" conditions for MethodSyntheis and Symbol/Tree creation and would just unnecessary complicate api. If someone has a better idea, please speak up. Range positions changes were necessary because previously accessors were created at refchecks and they weren't checked by validator (even though they were wrong). This commit removes lazy val implementation restriction introduced for 2.10.0. --- .../scala/reflect/reify/phases/Reshape.scala | 61 +++++++++++++++++---- .../scala/tools/nsc/transform/LazyVals.scala | 3 ++ .../tools/nsc/typechecker/MethodSynthesis.scala | 63 ++++++++++++++++++---- .../scala/tools/nsc/typechecker/Namers.scala | 6 +-- .../tools/nsc/typechecker/PatternMatching.scala | 4 -- .../scala/tools/nsc/typechecker/RefChecks.scala | 46 ++++------------ .../scala/tools/nsc/typechecker/Typers.scala | 15 +++--- .../tools/selectivecps/CPSAnnotationChecker.scala | 6 ++- .../tools/selectivecps/SelectiveANFTransform.scala | 9 ++-- src/reflect/scala/reflect/internal/Trees.scala | 18 ++++--- test/files/continuations-neg/lazy.check | 8 ++- .../files/neg/valueclasses-impl-restrictions.check | 12 ++--- .../files/neg/valueclasses-impl-restrictions.scala | 1 - test/files/pos/t6358.scala | 6 +++ test/files/run/reify_lazyunit.check | 3 ++ test/files/run/reify_lazyunit.scala | 13 +++++ 16 files changed, 179 insertions(+), 95 deletions(-) create mode 100644 test/files/pos/t6358.scala create mode 100644 test/files/run/reify_lazyunit.check create mode 100644 test/files/run/reify_lazyunit.scala (limited to 'src') diff --git a/src/compiler/scala/reflect/reify/phases/Reshape.scala b/src/compiler/scala/reflect/reify/phases/Reshape.scala index b5894e8eb6..ef099f9f1b 100644 --- a/src/compiler/scala/reflect/reify/phases/Reshape.scala +++ b/src/compiler/scala/reflect/reify/phases/Reshape.scala @@ -46,13 +46,13 @@ trait Reshape { if (discard) hk else ta case classDef @ ClassDef(mods, name, params, impl) => val Template(parents, self, body) = impl - var body1 = trimAccessors(classDef, body) + var body1 = trimAccessors(classDef, reshapeLazyVals(body)) body1 = trimSyntheticCaseClassMembers(classDef, body1) var impl1 = Template(parents, self, body1).copyAttrs(impl) ClassDef(mods, name, params, impl1).copyAttrs(classDef) case moduledef @ ModuleDef(mods, name, impl) => val Template(parents, self, body) = impl - var body1 = trimAccessors(moduledef, body) + var body1 = trimAccessors(moduledef, reshapeLazyVals(body)) body1 = trimSyntheticCaseClassMembers(moduledef, body1) var impl1 = Template(parents, self, body1).copyAttrs(impl) ModuleDef(mods, name, impl1).copyAttrs(moduledef) @@ -60,15 +60,11 @@ trait Reshape { val discardedParents = parents collect { case tt: TypeTree => tt } filter isDiscarded if (reifyDebug && discardedParents.length > 0) println("discarding parents in Template: " + discardedParents.mkString(", ")) val parents1 = parents diff discardedParents - val body1 = trimSyntheticCaseClassCompanions(body) + val body1 = reshapeLazyVals(trimSyntheticCaseClassCompanions(body)) Template(parents1, self, body1).copyAttrs(template) case block @ Block(stats, expr) => - val stats1 = trimSyntheticCaseClassCompanions(stats) + val stats1 = reshapeLazyVals(trimSyntheticCaseClassCompanions(stats)) Block(stats1, expr).copyAttrs(block) - case valdef @ ValDef(mods, name, tpt, rhs) if valdef.symbol.isLazy => - if (reifyDebug) println("dropping $lzy in lazy val's name: " + tree) - val name1 = if (name endsWith nme.LAZY_LOCAL) name dropRight nme.LAZY_LOCAL.length else name - ValDef(mods, name1, tpt, rhs).copyAttrs(valdef) case unapply @ UnApply(fun, args) => def extractExtractor(tree: Tree): Tree = { val Apply(fun, args) = tree @@ -248,6 +244,20 @@ trait Reshape { New(TypeTree(ann.atp) setOriginal extractOriginal(ann.original), List(args)) } + private def toPreTyperLazyVal(ddef: DefDef): ValDef = { + def extractRhs(rhs: Tree) = rhs match { + case Block(Assign(lhs, rhs)::Nil, _) if lhs.symbol.isLazy => rhs + case _ => rhs // unit or trait case + } + val DefDef(mods0, name0, _, _, tpt0, rhs0) = ddef + val name1 = nme.dropLocalSuffix(name0) + val Modifiers(flags0, privateWithin0, annotations0) = mods0 + var flags1 = (flags0 & GetterFlags) & ~(STABLE | ACCESSOR | METHOD) + val mods1 = Modifiers(flags1, privateWithin0, annotations0) setPositions mods0.positions + val mods2 = toPreTyperModifiers(mods1, ddef.symbol) + ValDef(mods2, name1, tpt0, extractRhs(rhs0)) + } + private def trimAccessors(deff: Tree, stats: List[Tree]): List[Tree] = { val symdefs = (stats collect { case vodef: ValOrDefDef => vodef } map (vodeff => vodeff.symbol -> vodeff)).toMap val accessors = scala.collection.mutable.Map[ValDef, List[DefDef]]() @@ -270,7 +280,7 @@ trait Reshape { }); var stats1 = stats flatMap { - case vdef @ ValDef(mods, name, tpt, rhs) => + case vdef @ ValDef(mods, name, tpt, rhs) if !mods.isLazy => val mods1 = if (accessors.contains(vdef)) { val ddef = accessors(vdef)(0) // any accessor will do val Modifiers(flags, privateWithin, annotations) = mods @@ -287,7 +297,9 @@ trait Reshape { val vdef1 = ValDef(mods2, name1, tpt, rhs) if (reifyDebug) println("resetting visibility of field: %s => %s".format(vdef, vdef1)) Some(vdef1) // no copyAttrs here, because new ValDef and old symbols are now out of sync - case ddef @ DefDef(mods, name, tparams, vparamss, tpt, rhs) => + case ddef @ DefDef(mods, name, tparams, vparamss, tpt, rhs) if !ddef.mods.isLazy => + // lazy val accessors are removed in reshapeLazyVals + // as they are needed to recreate lazy vals if (accessors.values.exists(_.contains(ddef))) { if (reifyDebug) println("discarding accessor method: " + ddef) None @@ -301,6 +313,35 @@ trait Reshape { stats1 } + private def reshapeLazyVals(stats: List[Tree]): List[Tree] = { + val lazyvaldefs:Map[Symbol, DefDef] = stats.collect({ case ddef: DefDef if ddef.mods.isLazy => ddef }). + map((ddef: DefDef) => ddef.symbol -> ddef).toMap + // lazy valdef and defdef are in the same block. + // only that valdef needs to have its rhs rebuilt from defdef + stats flatMap (stat => stat match { + case vdef @ ValDef(mods0, name0, tpt0, rhs0) if vdef.symbol.isLazy => + if (reifyDebug) println(s"reconstructing original lazy value for $vdef") + val ddefSym = vdef.symbol.lazyAccessor + val vdef1 = lazyvaldefs.get(ddefSym) match { + case Some(ddef) => + toPreTyperLazyVal(ddef) + case None => + if (reifyDebug) println("couldn't find corresponding lazy val accessor") + vdef + } + if (reifyDebug) println(s"reconstructed lazy val is $vdef1") + vdef1::Nil + case ddef @ DefDef(mods0, name0, _, _, tpt0, rhs0) if ddef.symbol.isLazy => + def hasUnitType(sym: Symbol) = (sym.tpe.typeSymbol == UnitClass) && sym.tpe.annotations.isEmpty + if (hasUnitType(ddef.symbol)) { + // since lazy values of type Unit don't have val's + // we need to create them from scratch + toPreTyperLazyVal(ddef) :: Nil + } else Nil + case _ => stat::Nil + }) + } + private def trimSyntheticCaseClassMembers(deff: Tree, stats: List[Tree]): List[Tree] = stats filterNot (memberDef => memberDef.isDef && { val isSynthetic = memberDef.symbol.isSynthetic diff --git a/src/compiler/scala/tools/nsc/transform/LazyVals.scala b/src/compiler/scala/tools/nsc/transform/LazyVals.scala index c0cc560a17..481228fb3d 100644 --- a/src/compiler/scala/tools/nsc/transform/LazyVals.scala +++ b/src/compiler/scala/tools/nsc/transform/LazyVals.scala @@ -94,6 +94,7 @@ abstract class LazyVals extends Transform with TypingTransformers with ast.TreeD } else sym.owner } + debuglog(s"determined enclosing class/dummy/method for lazy val as $enclosingClassOrDummyOrMethod given symbol $sym") val idx = lazyVals(enclosingClassOrDummyOrMethod) lazyVals(enclosingClassOrDummyOrMethod) = idx + 1 val (rhs1, sDef) = mkLazyDef(enclosingClassOrDummyOrMethod, transform(rhs), idx, sym) @@ -194,6 +195,7 @@ abstract class LazyVals extends Transform with TypingTransformers with ast.TreeD val defSym = clazz.newMethod(nme.newLazyValSlowComputeName(lzyVal.name), lzyVal.pos, STABLE | PRIVATE) defSym setInfo MethodType(List(), lzyVal.tpe.resultType) defSym.owner = lzyVal.owner + debuglog(s"crete slow compute path $defSym with owner ${defSym.owner} for lazy val $lzyVal") if (bitmaps.contains(lzyVal)) bitmaps(lzyVal).map(_.owner = defSym) val rhs: Tree = (gen.mkSynchronizedCheck(clazz, cond, syncBody, stats)).changeOwner(currentOwner -> defSym) @@ -248,6 +250,7 @@ abstract class LazyVals extends Transform with TypingTransformers with ast.TreeD def mkBlock(stmt: Tree) = BLOCK(stmt, mkSetFlag(bitmapSym, mask, bitmapRef), UNIT) + debuglog(s"create complete lazy def in $methOrClass for $lazyVal") val (block, res) = tree match { case Block(List(assignment), res) if !lazyUnit(lazyVal) => (mkBlock(assignment), res) diff --git a/src/compiler/scala/tools/nsc/typechecker/MethodSynthesis.scala b/src/compiler/scala/tools/nsc/typechecker/MethodSynthesis.scala index 91dcd90962..8e803a9a9e 100644 --- a/src/compiler/scala/tools/nsc/typechecker/MethodSynthesis.scala +++ b/src/compiler/scala/tools/nsc/typechecker/MethodSynthesis.scala @@ -198,13 +198,14 @@ trait MethodSynthesis { if (nme.isSetterName(name)) ValOrValWithSetterSuffixError(tree) - val getter = Getter(tree).createAndEnterSymbol() - tree.symbol = ( - if (mods.isLazy) enterLazyVal(tree, getter) - else { + if (mods.isLazy) { + val lazyValGetter = LazyValGetter(tree).createAndEnterSymbol() + enterLazyVal(tree, lazyValGetter) + } else { if (mods.isPrivateLocal) PrivateThisCaseClassParameterError(tree) + val getter = Getter(tree).createAndEnterSymbol() // Create the setter if necessary. if (mods.isMutable) Setter(tree).createAndEnterSymbol() @@ -219,7 +220,7 @@ trait MethodSynthesis { } def addDerivedTrees(typer: Typer, stat: Tree): List[Tree] = stat match { - case vd @ ValDef(mods, name, tpt, rhs) if !noFinishGetterSetter(vd) && !vd.symbol.isLazy => + case vd @ ValDef(mods, name, tpt, rhs) if !noFinishGetterSetter(vd) => // If we don't save the annotations, they seem to wander off. val annotations = stat.symbol.initialize.annotations ( allValDefDerived(vd) @@ -247,6 +248,7 @@ trait MethodSynthesis { def standardAccessors(vd: ValDef): List[DerivedFromValDef] = ( if (vd.mods.isMutable && !vd.mods.isLazy) List(Getter(vd), Setter(vd)) + else if (vd.mods.isLazy) List(LazyValGetter(vd)) else List(Getter(vd)) ) def beanAccessors(vd: ValDef): List[DerivedFromValDef] = { @@ -259,10 +261,15 @@ trait MethodSynthesis { else Nil } def allValDefDerived(vd: ValDef) = { - val field = if (vd.mods.isDeferred) Nil else List(Field(vd)) + val field = if (vd.mods.isDeferred || (vd.mods.isLazy && hasUnitType(vd.symbol))) Nil + else List(Field(vd)) field ::: standardAccessors(vd) ::: beanAccessors(vd) } + // Take into account annotations so that we keep annotated unit lazy val + // to get better error message already from the cps plugin itself + def hasUnitType(sym: Symbol) = (sym.tpe.typeSymbol == UnitClass) && sym.tpe.annotations.isEmpty + /** This trait assembles what's needed for synthesizing derived methods. * Important: Typically, instances of this trait are created TWICE for each derived * symbol; once form Namers in an enter method, and once from Typers in addDerivedTrees. @@ -388,16 +395,12 @@ trait MethodSynthesis { def name: TermName = tree.name.toTermName } - case class Getter(tree: ValDef) extends DerivedGetter { + abstract class BaseGetter(tree: ValDef) extends DerivedGetter { def name = tree.name def category = GetterTargetClass def flagsMask = GetterFlags def flagsExtra = ACCESSOR | ( if (tree.mods.isMutable) 0 else STABLE ) - override def derivedSym = ( - if (mods.isDeferred) basisSym - else basisSym.getter(enclClass) - ) override def validate() { assert(derivedSym != NoSymbol, tree) if (derivedSym.isOverloaded) @@ -405,6 +408,13 @@ trait MethodSynthesis { super.validate() } + } + case class Getter(tree: ValDef) extends BaseGetter(tree) { + override def derivedSym = ( + if (mods.isDeferred) basisSym + else basisSym.getter(enclClass) + ) + override def derivedTree: DefDef = { // For existentials, don't specify a type for the getter, even one derived // from the symbol! This leads to incompatible existentials for the field and @@ -437,6 +447,36 @@ trait MethodSynthesis { } } } + /** Implements lazy value accessors: + * - for lazy values of type Unit and all lazy fields inside traits, + * the rhs is the initializer itself + * - for all other lazy values z the accessor is a block of this form: + * { z = ; z } where z can be an identifier or a field. + */ + case class LazyValGetter(tree: ValDef) extends BaseGetter(tree) { + // todo: in future this should be enabled but now other phases still depend on the flag for various reasons + //override def flagsMask = (super.flagsMask & ~LAZY) + override def derivedSym = basisSym.lazyAccessor + override def derivedTree: DefDef = { + val ValDef(_, _, tpt0, rhs0) = tree + val rhs1 = transformed.get(rhs0) match { + case Some(rhs) => rhs + case None => rhs0 + } + val body = ( + if (tree.symbol.owner.isTrait || hasUnitType(basisSym)) rhs1 + else gen.mkAssignAndReturn(basisSym, rhs1) + ) + derivedSym.setPos(tree.pos) // cannot set it at createAndEnterSymbol because basisSym can possible stil have NoPosition + val ddefRes = atPos(tree.pos)(DefDef(derivedSym, body.changeOwner(followModuleClass = true, basisSym -> derivedSym))) + // ValDef will have its position focused whereas DefDef will have original correct rangepos + // ideally positions would be correct at the creation time but lazy vals are really a special case + // here so for the sake of keeping api clean we fix positions manually in LazyValGetter + ddefRes.tpt.setPos(tpt0.pos) + tpt0.setPos(tpt0.pos.focus) + ddefRes + } + } case class Setter(tree: ValDef) extends DerivedSetter { def name = nme.getterToSetter(tree.name) def category = SetterTargetClass @@ -455,6 +495,7 @@ trait MethodSynthesis { override def keepClean = !mods.isParamAccessor override def derivedTree = ( if (mods.isDeferred) EmptyTree + else if (mods.isLazy) copyValDef(tree)(mods = mods | flagsExtra, name = this.name, rhs = EmptyTree).setPos(tree.pos.focus) else copyValDef(tree)(mods = mods | flagsExtra, name = this.name) ) } diff --git a/src/compiler/scala/tools/nsc/typechecker/Namers.scala b/src/compiler/scala/tools/nsc/typechecker/Namers.scala index 9e1a9d6d17..f456856b3e 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Namers.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Namers.scala @@ -113,10 +113,8 @@ trait Namers extends MethodSynthesis { || (context.unit.isJava) ) def noFinishGetterSetter(vd: ValDef) = ( - vd.mods.isPrivateLocal - || vd.symbol.isModuleVar - || vd.symbol.isLazy - ) + (vd.mods.isPrivateLocal && !vd.mods.isLazy) // all lazy vals need accessors, even private[this] + || vd.symbol.isModuleVar) def setPrivateWithin[T <: Symbol](tree: Tree, sym: T, mods: Modifiers): T = if (sym.isPrivateLocal || !mods.hasAccessBoundary) sym diff --git a/src/compiler/scala/tools/nsc/typechecker/PatternMatching.scala b/src/compiler/scala/tools/nsc/typechecker/PatternMatching.scala index 96e1ed9a1c..b6a56515ca 100644 --- a/src/compiler/scala/tools/nsc/typechecker/PatternMatching.scala +++ b/src/compiler/scala/tools/nsc/typechecker/PatternMatching.scala @@ -1376,10 +1376,6 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL t.symbol.owner = currentOwner case d : DefTree if (d.symbol != NoSymbol) && ((d.symbol.owner == NoSymbol) || (d.symbol.owner == origOwner)) => // don't indiscriminately change existing owners! (see e.g., pos/t3440, pos/t3534, pos/unapplyContexts2) patmatDebug("def: "+ (d, d.symbol.ownerChain, currentOwner.ownerChain)) - if(d.symbol.isLazy) { // for lazy val's accessor -- is there no tree?? - assert(d.symbol.lazyAccessor != NoSymbol && d.symbol.lazyAccessor.owner == d.symbol.owner, d.symbol.lazyAccessor) - d.symbol.lazyAccessor.owner = currentOwner - } if(d.symbol.moduleClass ne NoSymbol) d.symbol.moduleClass.owner = currentOwner diff --git a/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala b/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala index ccb9478aee..dacb68ea86 100644 --- a/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala +++ b/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala @@ -1014,15 +1014,18 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans private def enterSyms(stats: List[Tree]) { var index = -1 for (stat <- stats) { - index = index + 1; + index = index + 1 + def enterSym(sym: Symbol) = if (sym.isLocal) { + currentLevel.scope.enter(sym) + symIndex(sym) = index + } + stat match { + case DefDef(_, _, _, _, _, _) if stat.symbol.isLazy => + enterSym(stat.symbol) case ClassDef(_, _, _, _) | DefDef(_, _, _, _, _, _) | ModuleDef(_, _, _) | ValDef(_, _, _, _) => //assert(stat.symbol != NoSymbol, stat);//debug - val sym = stat.symbol.lazyAccessorOrSelf - if (sym.isLocal) { - currentLevel.scope.enter(sym) - symIndex(sym) = index; - } + enterSym(stat.symbol.lazyAccessorOrSelf) case _ => } } @@ -1287,34 +1290,6 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans }) } - /** Implements lazy value accessors: - * - for lazy values of type Unit and all lazy fields inside traits, - * the rhs is the initializer itself - * - for all other lazy values z the accessor is a block of this form: - * { z = ; z } where z can be an identifier or a field. - */ - private def makeLazyAccessor(tree: Tree, rhs: Tree): List[Tree] = { - val vsym = tree.symbol - assert(vsym.isTerm, vsym) - val hasUnitType = vsym.tpe.typeSymbol == UnitClass - val lazySym = vsym.lazyAccessor - assert(lazySym != NoSymbol, vsym) - - // for traits, this is further transformed in mixins - val body = ( - if (tree.symbol.owner.isTrait || hasUnitType) rhs - else gen.mkAssignAndReturn(vsym, rhs) - ) - val lazyDef = atPos(tree.pos)(DefDef(lazySym, body.changeOwner(vsym -> lazySym))) - debuglog("Created lazy accessor: " + lazyDef) - - if (hasUnitType) List(typed(lazyDef)) - else List( - typed(ValDef(vsym)), - exitingRefchecks(typed(lazyDef)) - ) - } - def transformStat(tree: Tree, index: Int): List[Tree] = tree match { case t if treeInfo.isSelfConstrCall(t) => assert(index == 0, index) @@ -1327,8 +1302,7 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans case ModuleDef(_, _, _) => eliminateModuleDefs(tree) case ValDef(_, _, _, _) => val tree1 @ ValDef(_, _, _, rhs) = transform(tree) // important to do before forward reference check - if (tree.symbol.isLazy) - makeLazyAccessor(tree, rhs) + if (tree1.symbol.isLazy) tree1 :: Nil else { val lazySym = tree.symbol.lazyAccessorOrSelf if (lazySym.isLocal && index <= currentLevel.maxindex) { diff --git a/src/compiler/scala/tools/nsc/typechecker/Typers.scala b/src/compiler/scala/tools/nsc/typechecker/Typers.scala index 688b4b5160..53ff15fef2 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Typers.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Typers.scala @@ -1425,9 +1425,6 @@ trait Typers extends Modes with Adaptations with Tags { //see https://issues.scala-lang.org/browse/SI-6463 case _: ClassDef => implRestriction(tree, "nested class") - case x: ValDef if x.mods.isLazy => - //see https://issues.scala-lang.org/browse/SI-6358 - implRestriction(tree, "lazy val") case _ => } super.traverse(tree) @@ -1907,7 +1904,7 @@ trait Typers extends Modes with Adaptations with Tags { val rhs1 = if (vdef.rhs.isEmpty) { - if (sym.isVariable && sym.owner.isTerm && !isPastTyper) + if (sym.isVariable && sym.owner.isTerm && !sym.isLazy && !isPastTyper) LocalVarUninitializedError(vdef) vdef.rhs } else { @@ -2333,9 +2330,15 @@ trait Typers extends Modes with Adaptations with Tags { case _ => } } - val stats1 = typedStats(block.stats, context.owner) + val stats1 = if (isPastTyper) block.stats else + block.stats.flatMap(stat => stat match { + case vd@ValDef(_, _, _, _) if vd.symbol.isLazy => + namer.addDerivedTrees(Typer.this, vd) + case _ => stat::Nil + }) + val stats2 = typedStats(stats1, context.owner) val expr1 = typed(block.expr, mode & ~(FUNmode | QUALmode), pt) - treeCopy.Block(block, stats1, expr1) + treeCopy.Block(block, stats2, expr1) .setType(if (treeInfo.isExprSafeToInline(block)) expr1.tpe else expr1.tpe.deconst) } finally { // enable escaping privates checking from the outside and recycle diff --git a/src/continuations/plugin/scala/tools/selectivecps/CPSAnnotationChecker.scala b/src/continuations/plugin/scala/tools/selectivecps/CPSAnnotationChecker.scala index be3138c373..c147dc483d 100644 --- a/src/continuations/plugin/scala/tools/selectivecps/CPSAnnotationChecker.scala +++ b/src/continuations/plugin/scala/tools/selectivecps/CPSAnnotationChecker.scala @@ -496,7 +496,11 @@ abstract class CPSAnnotationChecker extends CPSUtils with Modes { case ValDef(mods, name, tpt, rhs) => vprintln("[checker] checking valdef " + name + "/"+tpe+"/"+tpt+"/"+tree.symbol.tpe) // ValDef symbols must *not* have annotations! - if (hasAnswerTypeAnn(tree.symbol.info)) { // is it okay to modify sym here? + // lazy vals are currently not supported + // but if we erase here all annotations, compiler will complain only + // when generating bytecode. + // This way lazy vals will be reported as unsupported feature later rather than weird type error. + if (hasAnswerTypeAnn(tree.symbol.info) && !mods.isLazy) { // is it okay to modify sym here? vprintln("removing annotation from sym " + tree.symbol + "/" + tree.symbol.tpe + "/" + tpt) tpt modifyType removeAllCPSAnnotations tree.symbol modifyInfo removeAllCPSAnnotations diff --git a/src/continuations/plugin/scala/tools/selectivecps/SelectiveANFTransform.scala b/src/continuations/plugin/scala/tools/selectivecps/SelectiveANFTransform.scala index 7229ea41f4..ef13f8b1d8 100644 --- a/src/continuations/plugin/scala/tools/selectivecps/SelectiveANFTransform.scala +++ b/src/continuations/plugin/scala/tools/selectivecps/SelectiveANFTransform.scala @@ -195,9 +195,12 @@ abstract class SelectiveANFTransform extends PluginComponent with Transform with case _ => if (hasAnswerTypeAnn(tree.tpe)) { - if (!cpsAllowed) - unit.error(tree.pos, "cps code not allowed here / " + tree.getClass + " / " + tree) - + if (!cpsAllowed) { + if (tree.symbol.isLazy) + unit.error(tree.pos, "implementation restriction: cps annotations not allowed on lazy value definitions") + else + unit.error(tree.pos, "cps code not allowed here / " + tree.getClass + " / " + tree) + } log(tree) } diff --git a/src/reflect/scala/reflect/internal/Trees.scala b/src/reflect/scala/reflect/internal/Trees.scala index d74a78ebda..ea7336340f 100644 --- a/src/reflect/scala/reflect/internal/Trees.scala +++ b/src/reflect/scala/reflect/internal/Trees.scala @@ -179,9 +179,12 @@ trait Trees extends api.Trees { self: SymbolTable => new ForeachPartialTreeTraverser(pf).traverse(this) } - def changeOwner(pairs: (Symbol, Symbol)*): Tree = { + def changeOwner(pairs: (Symbol, Symbol)*): Tree = + changeOwner(false, pairs: _*) + + def changeOwner(followModuleClass: Boolean, pairs: (Symbol, Symbol)*): Tree = { pairs.foldLeft(this) { case (t, (oldOwner, newOwner)) => - new ChangeOwnerTraverser(oldOwner, newOwner) apply t + new ChangeOwnerTraverser(oldOwner, newOwner, followModuleClass) apply t } } @@ -1310,7 +1313,11 @@ trait Trees extends api.Trees { self: SymbolTable => } } - class ChangeOwnerTraverser(val oldowner: Symbol, val newowner: Symbol) extends Traverser { + class ChangeOwnerTraverser(val oldowner: Symbol, val newowner: Symbol, followModuleClass: Boolean = false) extends Traverser { + def changeSymboOwnerIfCorrect(sym: Symbol) = { + if (sym != NoSymbol && sym.owner == oldowner) + sym.owner = newowner + } def changeOwner(tree: Tree) = tree match { case Return(expr) => if (tree.symbol == oldowner) { @@ -1323,9 +1330,8 @@ trait Trees extends api.Trees { self: SymbolTable => } } case _: DefTree | _: Function => - if (tree.symbol != NoSymbol && tree.symbol.owner == oldowner) { - tree.symbol.owner = newowner - } + changeSymboOwnerIfCorrect(tree.symbol) + if (followModuleClass) changeSymboOwnerIfCorrect(tree.symbol.moduleClass) case _ => } override def traverse(tree: Tree) { diff --git a/test/files/continuations-neg/lazy.check b/test/files/continuations-neg/lazy.check index b8c6887409..3c460546be 100644 --- a/test/files/continuations-neg/lazy.check +++ b/test/files/continuations-neg/lazy.check @@ -1,6 +1,4 @@ -lazy.scala:5: error: type mismatch; - found : Unit @scala.util.continuations.cpsParam[Unit,Unit] - required: Unit - def foo() = { - ^ +lazy.scala:6: error: implementation restriction: cps annotations not allowed on lazy value definitions + lazy val x = shift((k:Unit=>Unit)=>k()) + ^ one error found diff --git a/test/files/neg/valueclasses-impl-restrictions.check b/test/files/neg/valueclasses-impl-restrictions.check index 17d07ba960..63924493aa 100644 --- a/test/files/neg/valueclasses-impl-restrictions.check +++ b/test/files/neg/valueclasses-impl-restrictions.check @@ -2,20 +2,16 @@ valueclasses-impl-restrictions.scala:3: error: implementation restriction: neste This restriction is planned to be removed in subsequent releases. object X ^ -valueclasses-impl-restrictions.scala:4: error: implementation restriction: lazy val is not allowed in value class -This restriction is planned to be removed in subsequent releases. - lazy val y = 1 - ^ -valueclasses-impl-restrictions.scala:10: error: implementation restriction: nested trait is not allowed in value class +valueclasses-impl-restrictions.scala:9: error: implementation restriction: nested trait is not allowed in value class This restriction is planned to be removed in subsequent releases. trait I2 { ^ -valueclasses-impl-restrictions.scala:16: error: implementation restriction: nested class is not allowed in value class +valueclasses-impl-restrictions.scala:15: error: implementation restriction: nested class is not allowed in value class This restriction is planned to be removed in subsequent releases. val i2 = new I2 { val q = x.s } ^ -valueclasses-impl-restrictions.scala:22: error: implementation restriction: nested class is not allowed in value class +valueclasses-impl-restrictions.scala:21: error: implementation restriction: nested class is not allowed in value class This restriction is planned to be removed in subsequent releases. private[this] class I2(val q: String) ^ -5 errors found +four errors found diff --git a/test/files/neg/valueclasses-impl-restrictions.scala b/test/files/neg/valueclasses-impl-restrictions.scala index 53396db958..137f3f854c 100644 --- a/test/files/neg/valueclasses-impl-restrictions.scala +++ b/test/files/neg/valueclasses-impl-restrictions.scala @@ -1,7 +1,6 @@ class M(val t: Int) extends AnyVal { def lazyString = { object X - lazy val y = 1 () => X } } diff --git a/test/files/pos/t6358.scala b/test/files/pos/t6358.scala new file mode 100644 index 0000000000..25539c885e --- /dev/null +++ b/test/files/pos/t6358.scala @@ -0,0 +1,6 @@ +class L(val t: Int) extends AnyVal { + def lazyString = { + lazy val x = t.toString + () => x + } +} diff --git a/test/files/run/reify_lazyunit.check b/test/files/run/reify_lazyunit.check new file mode 100644 index 0000000000..1b46c909be --- /dev/null +++ b/test/files/run/reify_lazyunit.check @@ -0,0 +1,3 @@ +12 +one +two diff --git a/test/files/run/reify_lazyunit.scala b/test/files/run/reify_lazyunit.scala new file mode 100644 index 0000000000..78b00cde28 --- /dev/null +++ b/test/files/run/reify_lazyunit.scala @@ -0,0 +1,13 @@ +import scala.reflect.runtime.universe._ +import scala.tools.reflect.Eval + +object Test extends App { + reify { + lazy val x = { 0; println("12")} + x + println("one") + x + println("two") + }.eval +} + -- cgit v1.2.3 From a15969a6963a896fa3a4aa43effaf625833e363d Mon Sep 17 00:00:00 2001 From: Hubert Plociniczak Date: Thu, 27 Sep 2012 11:33:32 +0200 Subject: Incorporated changes suggested in code review --- .../scala/reflect/reify/phases/Reshape.scala | 6 +-- .../scala/tools/nsc/transform/AddInterfaces.scala | 5 +-- .../tools/nsc/typechecker/MethodSynthesis.scala | 19 +++++++--- src/reflect/scala/reflect/internal/Trees.scala | 43 ++++++++++------------ 4 files changed, 38 insertions(+), 35 deletions(-) (limited to 'src') diff --git a/src/compiler/scala/reflect/reify/phases/Reshape.scala b/src/compiler/scala/reflect/reify/phases/Reshape.scala index ef099f9f1b..9a1732a872 100644 --- a/src/compiler/scala/reflect/reify/phases/Reshape.scala +++ b/src/compiler/scala/reflect/reify/phases/Reshape.scala @@ -297,7 +297,7 @@ trait Reshape { val vdef1 = ValDef(mods2, name1, tpt, rhs) if (reifyDebug) println("resetting visibility of field: %s => %s".format(vdef, vdef1)) Some(vdef1) // no copyAttrs here, because new ValDef and old symbols are now out of sync - case ddef @ DefDef(mods, name, tparams, vparamss, tpt, rhs) if !ddef.mods.isLazy => + case ddef: DefDef if !ddef.mods.isLazy => // lazy val accessors are removed in reshapeLazyVals // as they are needed to recreate lazy vals if (accessors.values.exists(_.contains(ddef))) { @@ -319,7 +319,7 @@ trait Reshape { // lazy valdef and defdef are in the same block. // only that valdef needs to have its rhs rebuilt from defdef stats flatMap (stat => stat match { - case vdef @ ValDef(mods0, name0, tpt0, rhs0) if vdef.symbol.isLazy => + case vdef: ValDef if vdef.symbol.isLazy => if (reifyDebug) println(s"reconstructing original lazy value for $vdef") val ddefSym = vdef.symbol.lazyAccessor val vdef1 = lazyvaldefs.get(ddefSym) match { @@ -331,7 +331,7 @@ trait Reshape { } if (reifyDebug) println(s"reconstructed lazy val is $vdef1") vdef1::Nil - case ddef @ DefDef(mods0, name0, _, _, tpt0, rhs0) if ddef.symbol.isLazy => + case ddef: DefDef if ddef.symbol.isLazy => def hasUnitType(sym: Symbol) = (sym.tpe.typeSymbol == UnitClass) && sym.tpe.annotations.isEmpty if (hasUnitType(ddef.symbol)) { // since lazy values of type Unit don't have val's diff --git a/src/compiler/scala/tools/nsc/transform/AddInterfaces.scala b/src/compiler/scala/tools/nsc/transform/AddInterfaces.scala index 32c2d63b2a..328e4ce71f 100644 --- a/src/compiler/scala/tools/nsc/transform/AddInterfaces.scala +++ b/src/compiler/scala/tools/nsc/transform/AddInterfaces.scala @@ -231,9 +231,8 @@ abstract class AddInterfaces extends InfoTransform { self: Erasure => extends ChangeOwnerTraverser(oldowner, newowner) { override def traverse(tree: Tree) { tree match { - case Return(expr) => - if (tree.symbol == oldowner) tree.symbol = newowner - case _ => + case _: Return => change(tree.symbol) + case _ => } super.traverse(tree) } diff --git a/src/compiler/scala/tools/nsc/typechecker/MethodSynthesis.scala b/src/compiler/scala/tools/nsc/typechecker/MethodSynthesis.scala index 8e803a9a9e..c95951e608 100644 --- a/src/compiler/scala/tools/nsc/typechecker/MethodSynthesis.scala +++ b/src/compiler/scala/tools/nsc/typechecker/MethodSynthesis.scala @@ -454,21 +454,30 @@ trait MethodSynthesis { * { z = ; z } where z can be an identifier or a field. */ case class LazyValGetter(tree: ValDef) extends BaseGetter(tree) { + class ChangeOwnerAndModuleClassTraverser(oldowner: Symbol, newowner: Symbol) + extends ChangeOwnerTraverser(oldowner, newowner) { + + override def traverse(tree: Tree) { + tree match { + case _: DefTree => change(tree.symbol.moduleClass) + case _ => + } + super.traverse(tree) + } + } + // todo: in future this should be enabled but now other phases still depend on the flag for various reasons //override def flagsMask = (super.flagsMask & ~LAZY) override def derivedSym = basisSym.lazyAccessor override def derivedTree: DefDef = { val ValDef(_, _, tpt0, rhs0) = tree - val rhs1 = transformed.get(rhs0) match { - case Some(rhs) => rhs - case None => rhs0 - } + val rhs1 = transformed.getOrElse(rhs0, rhs0) val body = ( if (tree.symbol.owner.isTrait || hasUnitType(basisSym)) rhs1 else gen.mkAssignAndReturn(basisSym, rhs1) ) derivedSym.setPos(tree.pos) // cannot set it at createAndEnterSymbol because basisSym can possible stil have NoPosition - val ddefRes = atPos(tree.pos)(DefDef(derivedSym, body.changeOwner(followModuleClass = true, basisSym -> derivedSym))) + val ddefRes = atPos(tree.pos)(DefDef(derivedSym, new ChangeOwnerAndModuleClassTraverser(basisSym, derivedSym)(body))) // ValDef will have its position focused whereas DefDef will have original correct rangepos // ideally positions would be correct at the creation time but lazy vals are really a special case // here so for the sake of keeping api clean we fix positions manually in LazyValGetter diff --git a/src/reflect/scala/reflect/internal/Trees.scala b/src/reflect/scala/reflect/internal/Trees.scala index ea7336340f..4bb88145b3 100644 --- a/src/reflect/scala/reflect/internal/Trees.scala +++ b/src/reflect/scala/reflect/internal/Trees.scala @@ -179,12 +179,9 @@ trait Trees extends api.Trees { self: SymbolTable => new ForeachPartialTreeTraverser(pf).traverse(this) } - def changeOwner(pairs: (Symbol, Symbol)*): Tree = - changeOwner(false, pairs: _*) - - def changeOwner(followModuleClass: Boolean, pairs: (Symbol, Symbol)*): Tree = { + def changeOwner(pairs: (Symbol, Symbol)*): Tree = { pairs.foldLeft(this) { case (t, (oldOwner, newOwner)) => - new ChangeOwnerTraverser(oldOwner, newOwner, followModuleClass) apply t + new ChangeOwnerTraverser(oldOwner, newOwner) apply t } } @@ -1313,29 +1310,27 @@ trait Trees extends api.Trees { self: SymbolTable => } } - class ChangeOwnerTraverser(val oldowner: Symbol, val newowner: Symbol, followModuleClass: Boolean = false) extends Traverser { - def changeSymboOwnerIfCorrect(sym: Symbol) = { + class ChangeOwnerTraverser(val oldowner: Symbol, val newowner: Symbol) extends Traverser { + final def change(sym: Symbol) = { if (sym != NoSymbol && sym.owner == oldowner) sym.owner = newowner } - def changeOwner(tree: Tree) = tree match { - case Return(expr) => - if (tree.symbol == oldowner) { - // SI-5612 - if (newowner hasTransOwner oldowner) - log("NOT changing owner of %s because %s is nested in %s".format(tree, newowner, oldowner)) - else { - log("changing owner of %s: %s => %s".format(tree, oldowner, newowner)) - tree.symbol = newowner - } - } - case _: DefTree | _: Function => - changeSymboOwnerIfCorrect(tree.symbol) - if (followModuleClass) changeSymboOwnerIfCorrect(tree.symbol.moduleClass) - case _ => - } override def traverse(tree: Tree) { - changeOwner(tree) + tree match { + case _: Return => + if (tree.symbol == oldowner) { + // SI-5612 + if (newowner hasTransOwner oldowner) + log("NOT changing owner of %s because %s is nested in %s".format(tree, newowner, oldowner)) + else { + log("changing owner of %s: %s => %s".format(tree, oldowner, newowner)) + tree.symbol = newowner + } + } + case _: DefTree | _: Function => + change(tree.symbol) + case _ => + } super.traverse(tree) } } -- cgit v1.2.3 From b7a378685ad97fbc1426bb15d5681cea6be0bb3d Mon Sep 17 00:00:00 2001 From: Hubert Plociniczak Date: Tue, 9 Oct 2012 10:13:17 +0200 Subject: Crash on missing accessor (internal bug in the lazy vals implementation) instead of trying to recover from the bug --- src/compiler/scala/reflect/reify/Errors.scala | 5 +++++ src/compiler/scala/reflect/reify/phases/Reshape.scala | 3 +-- 2 files changed, 6 insertions(+), 2 deletions(-) (limited to 'src') diff --git a/src/compiler/scala/reflect/reify/Errors.scala b/src/compiler/scala/reflect/reify/Errors.scala index 73c13901b6..fb6765d9ed 100644 --- a/src/compiler/scala/reflect/reify/Errors.scala +++ b/src/compiler/scala/reflect/reify/Errors.scala @@ -71,4 +71,9 @@ trait Errors { val msg = "internal error: erroneous reifees are not supported, make sure that your reifee has typechecked successfully before passing it to the reifier" throw new UnexpectedReificationError(defaultErrorPosition, msg) } + + def CannotReifyInvalidLazyVal(tree: ValDef) = { + val msg = "internal error: could not reconstruct original lazy val due to missing accessor" + throw new UnexpectedReificationError(tree.pos, msg) + } } diff --git a/src/compiler/scala/reflect/reify/phases/Reshape.scala b/src/compiler/scala/reflect/reify/phases/Reshape.scala index 9a1732a872..1b7509fdbe 100644 --- a/src/compiler/scala/reflect/reify/phases/Reshape.scala +++ b/src/compiler/scala/reflect/reify/phases/Reshape.scala @@ -326,8 +326,7 @@ trait Reshape { case Some(ddef) => toPreTyperLazyVal(ddef) case None => - if (reifyDebug) println("couldn't find corresponding lazy val accessor") - vdef + CannotReifyInvalidLazyVal(vdef) } if (reifyDebug) println(s"reconstructed lazy val is $vdef1") vdef1::Nil -- cgit v1.2.3 From 432f9368011e0fd9e89ca0e18082bfec180baf32 Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Mon, 10 Sep 2012 09:40:03 -0700 Subject: Experimental option -Ybreak-cycles. Overcomes cycles encountered during classfile parsing in possibly sketchy fashion. "illegal cyclic reference involving class Foo" is the watchword. See SI-3809. --- src/compiler/scala/tools/nsc/Global.scala | 6 ++ .../scala/tools/nsc/settings/ScalaSettings.scala | 1 + .../tools/nsc/typechecker/ContextErrors.scala | 4 +- .../scala/tools/nsc/typechecker/Namers.scala | 14 +-- .../scala/tools/nsc/typechecker/Typers.scala | 5 +- .../scala/reflect/internal/SymbolTable.scala | 1 + src/reflect/scala/reflect/internal/Symbols.scala | 4 + src/reflect/scala/reflect/internal/Types.scala | 107 +++++++++++++++------ .../internal/settings/MutableSettings.scala | 1 + src/reflect/scala/reflect/runtime/Settings.scala | 1 + test/files/lib/jsoup-1.3.1.jar.desired.sha1 | 1 + test/files/pos/cycle-bounds.scala | 1 + test/files/pos/cycle-jsoup.flags | 1 + test/files/pos/cycle-jsoup.scala | 5 + test/files/pos/cycle.flags | 1 + test/files/pos/cycle/J_1.java | 16 +++ test/files/pos/cycle/X_2.scala | 3 + test/pending/pos/t4612.scala | 15 +++ test/pending/pos/t4744/Bar.scala | 1 + test/pending/pos/t4744/Foo.java | 1 + test/pending/pos/t5082.scala | 8 ++ 21 files changed, 152 insertions(+), 45 deletions(-) create mode 100644 test/files/lib/jsoup-1.3.1.jar.desired.sha1 create mode 100644 test/files/pos/cycle-bounds.scala create mode 100644 test/files/pos/cycle-jsoup.flags create mode 100644 test/files/pos/cycle-jsoup.scala create mode 100644 test/files/pos/cycle.flags create mode 100644 test/files/pos/cycle/J_1.java create mode 100644 test/files/pos/cycle/X_2.scala create mode 100644 test/pending/pos/t4612.scala create mode 100644 test/pending/pos/t4744/Bar.scala create mode 100644 test/pending/pos/t4744/Foo.java create mode 100644 test/pending/pos/t5082.scala (limited to 'src') diff --git a/src/compiler/scala/tools/nsc/Global.scala b/src/compiler/scala/tools/nsc/Global.scala index cb5e2ad555..fb9539e2b2 100644 --- a/src/compiler/scala/tools/nsc/Global.scala +++ b/src/compiler/scala/tools/nsc/Global.scala @@ -1046,6 +1046,12 @@ class Global(var currentSettings: Settings, var reporter: Reporter) def currentUnit: CompilationUnit = if (currentRun eq null) NoCompilationUnit else currentRun.currentUnit def currentSource: SourceFile = if (currentUnit.exists) currentUnit.source else lastSeenSourceFile + override def isPastTyper = ( + (curRun ne null) + && (currentRun.typerPhase ne null) + && (globalPhase.id > currentRun.typerPhase.id) + ) + // TODO - trim these to the absolute minimum. @inline final def exitingErasure[T](op: => T): T = exitingPhase(currentRun.erasurePhase)(op) @inline final def exitingPostErasure[T](op: => T): T = exitingPhase(currentRun.posterasurePhase)(op) diff --git a/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala b/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala index 3ff7af791b..4829fb81b5 100644 --- a/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala +++ b/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala @@ -127,6 +127,7 @@ trait ScalaSettings extends AbsScalaSettings val overrideObjects = BooleanSetting ("-Yoverride-objects", "Allow member objects to be overridden.") val overrideVars = BooleanSetting ("-Yoverride-vars", "Allow vars to be overridden.") val Yhelp = BooleanSetting ("-Y", "Print a synopsis of private options.") + val breakCycles = BooleanSetting ("-Ybreak-cycles", "Attempt to break cycles encountered during classfile parsing") val browse = PhasesSetting ("-Ybrowse", "Browse the abstract syntax tree after") val check = PhasesSetting ("-Ycheck", "Check the tree at the end of") val Yshow = PhasesSetting ("-Yshow", "(Requires -Xshow-class or -Xshow-object) Show after") diff --git a/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala b/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala index f12f08030b..7c02f094ed 100644 --- a/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala +++ b/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala @@ -657,8 +657,8 @@ trait ContextErrors { def CyclicAliasingOrSubtypingError(errPos: Position, sym0: Symbol) = issueTypeError(PosAndMsgTypeError(errPos, "cyclic aliasing or subtyping involving "+sym0)) - def CyclicReferenceError(errPos: Position, lockedSym: Symbol) = - issueTypeError(PosAndMsgTypeError(errPos, "illegal cyclic reference involving " + lockedSym)) + def CyclicReferenceError(errPos: Position, tp: Type, lockedSym: Symbol) = + issueTypeError(PosAndMsgTypeError(errPos, s"illegal cyclic reference involving $tp and $lockedSym")) // macro-related errors (also see MacroErrors below) diff --git a/src/compiler/scala/tools/nsc/typechecker/Namers.scala b/src/compiler/scala/tools/nsc/typechecker/Namers.scala index 9e1a9d6d17..4b60fd8b27 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Namers.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Namers.scala @@ -711,8 +711,9 @@ trait Namers extends MethodSynthesis { tp match { case TypeBounds(lo, _) => // check that lower bound is not an F-bound + // but carefully: class Foo[T <: Bar[_ >: T]] should be allowed for (TypeRef(_, sym, _) <- lo) - sym.initialize + sym.maybeInitialize case _ => } tp @@ -731,17 +732,6 @@ trait Namers extends MethodSynthesis { if (needsCycleCheck && !typer.checkNonCyclic(tree.pos, tp)) sym setInfo ErrorType } - // tree match { - // case ClassDef(_, _, _, impl) => - // val parentsOK = ( - // treeInfo.isInterface(sym, impl.body) - // || (sym eq ArrayClass) - // || (sym isSubClass AnyValClass) - // ) - // if (!parentsOK) - // ensureParent(sym, AnyRefClass) - // case _ => () - // } } def moduleClassTypeCompleter(tree: ModuleDef) = { diff --git a/src/compiler/scala/tools/nsc/typechecker/Typers.scala b/src/compiler/scala/tools/nsc/typechecker/Typers.scala index 688b4b5160..7a2c64142d 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Typers.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Typers.scala @@ -292,8 +292,7 @@ trait Typers extends Modes with Adaptations with Tags { */ def checkNonCyclic(pos: Position, tp: Type): Boolean = { def checkNotLocked(sym: Symbol) = { - sym.initialize - sym.lockOK || { CyclicAliasingOrSubtypingError(pos, sym); false } + sym.initialize.lockOK || { CyclicAliasingOrSubtypingError(pos, sym); false } } tp match { case TypeRef(pre, sym, args) => @@ -320,7 +319,7 @@ trait Typers extends Modes with Adaptations with Tags { } def checkNonCyclic(pos: Position, tp: Type, lockedSym: Symbol): Boolean = try { - if (!lockedSym.lock(CyclicReferenceError(pos, lockedSym))) false + if (!lockedSym.lock(CyclicReferenceError(pos, tp, lockedSym))) false else checkNonCyclic(pos, tp) } finally { lockedSym.unlock() diff --git a/src/reflect/scala/reflect/internal/SymbolTable.scala b/src/reflect/scala/reflect/internal/SymbolTable.scala index 90e49f2043..38e33c001c 100644 --- a/src/reflect/scala/reflect/internal/SymbolTable.scala +++ b/src/reflect/scala/reflect/internal/SymbolTable.scala @@ -48,6 +48,7 @@ abstract class SymbolTable extends macros.Universe def abort(msg: String): Nothing = throw new FatalError(supplementErrorMessage(msg)) def shouldLogAtThisPhase = false + def isPastTyper = false @deprecated("Give us a reason", "2.10.0") def abort(): Nothing = abort("unknown error") diff --git a/src/reflect/scala/reflect/internal/Symbols.scala b/src/reflect/scala/reflect/internal/Symbols.scala index f9f1ea3936..d506a43829 100644 --- a/src/reflect/scala/reflect/internal/Symbols.scala +++ b/src/reflect/scala/reflect/internal/Symbols.scala @@ -1398,6 +1398,10 @@ trait Symbols extends api.Symbols { self: SymbolTable => if (!isInitialized) info this } + def maybeInitialize: this.type = { + try initialize + catch { case _: CyclicReference => debuglog("Encountering cycle in maybe-initialization of $this") ; this } + } /** Called when the programmer requests information that might require initialization of the underlying symbol. * diff --git a/src/reflect/scala/reflect/internal/Types.scala b/src/reflect/scala/reflect/internal/Types.scala index 3f0b620ee2..9a43ad441f 100644 --- a/src/reflect/scala/reflect/internal/Types.scala +++ b/src/reflect/scala/reflect/internal/Types.scala @@ -91,6 +91,7 @@ trait Types extends api.Types { self: SymbolTable => private final val printLubs = sys.props contains "scalac.debug.lub" private final val traceTypeVars = sys.props contains "scalac.debug.tvar" + private final val breakCycles = settings.breakCycles.value /** In case anyone wants to turn off lub verification without reverting anything. */ private final val verifyLubs = true /** In case anyone wants to turn off type parameter bounds being used @@ -1615,6 +1616,37 @@ trait Types extends api.Types { self: SymbolTable => ) } + protected def computeBaseClasses(tpe: Type): List[Symbol] = { + def csym = tpe.typeSymbol + csym :: { + if (tpe.parents.isEmpty || csym.hasFlag(PACKAGE)) Nil + else { + //Console.println("computing base classes of " + typeSymbol + " at phase " + phase);//DEBUG + // optimized, since this seems to be performance critical + val superclazz = tpe.firstParent + var mixins = tpe.parents.tail + val sbcs = superclazz.baseClasses + var bcs = sbcs + def isNew(clazz: Symbol): Boolean = ( + superclazz.baseTypeIndex(clazz) < 0 && + { var p = bcs; + while ((p ne sbcs) && (p.head != clazz)) p = p.tail; + p eq sbcs + } + ) + while (!mixins.isEmpty) { + def addMixinBaseClasses(mbcs: List[Symbol]): List[Symbol] = + if (mbcs.isEmpty) bcs + else if (isNew(mbcs.head)) mbcs.head :: addMixinBaseClasses(mbcs.tail) + else addMixinBaseClasses(mbcs.tail) + bcs = addMixinBaseClasses(mixins.head.baseClasses) + mixins = mixins.tail + } + bcs + } + } + } + protected def defineBaseTypeSeqOfCompoundType(tpe: CompoundType) = { val period = tpe.baseTypeSeqPeriod if (period != currentPeriod) { @@ -1675,41 +1707,60 @@ trait Types extends api.Types { self: SymbolTable => throw new TypeError("illegal cyclic inheritance involving " + tpe.typeSymbol) } - protected def defineBaseClassesOfCompoundType(tpe: CompoundType) = { - def computeBaseClasses: List[Symbol] = - if (tpe.parents.isEmpty) List(tpe.typeSymbol) - else { - //Console.println("computing base classes of " + typeSymbol + " at phase " + phase);//DEBUG - // optimized, since this seems to be performance critical - val superclazz = tpe.firstParent - var mixins = tpe.parents.tail - val sbcs = superclazz.baseClasses - var bcs = sbcs - def isNew(clazz: Symbol): Boolean = - superclazz.baseTypeIndex(clazz) < 0 && - { var p = bcs; - while ((p ne sbcs) && (p.head != clazz)) p = p.tail; - p eq sbcs - } - while (!mixins.isEmpty) { - def addMixinBaseClasses(mbcs: List[Symbol]): List[Symbol] = - if (mbcs.isEmpty) bcs - else if (isNew(mbcs.head)) mbcs.head :: addMixinBaseClasses(mbcs.tail) - else addMixinBaseClasses(mbcs.tail) - bcs = addMixinBaseClasses(mixins.head.baseClasses) - mixins = mixins.tail + object baseClassesCycleMonitor { + private var open: List[Symbol] = Nil + @inline private def cycleLog(msg: => String) { + Console.err.println(msg) + } + def size = open.size + def push(clazz: Symbol) { + cycleLog("+ " + (" " * size) + clazz.fullNameString) + open ::= clazz + } + def pop(clazz: Symbol) { + assert(open.head eq clazz, (clazz, open)) + open = open.tail + } + def isOpen(clazz: Symbol) = open contains clazz + } + + protected def defineBaseClassesOfCompoundType(tpe: CompoundType) { + def define = defineBaseClassesOfCompoundType(tpe, force = false) + if (isPastTyper || !breakCycles) define + else tpe match { + // non-empty parents helpfully excludes all package classes + case tpe @ ClassInfoType(_ :: _, _, clazz) if !clazz.isAnonOrRefinementClass => + // Cycle: force update + if (baseClassesCycleMonitor isOpen clazz) + defineBaseClassesOfCompoundType(tpe, force = true) + else { + baseClassesCycleMonitor push clazz + try define + finally baseClassesCycleMonitor pop clazz } - tpe.typeSymbol :: bcs - } + case _ => + define + } + } + private def defineBaseClassesOfCompoundType(tpe: CompoundType, force: Boolean) { val period = tpe.baseClassesPeriod - if (period != currentPeriod) { + if (period == currentPeriod) { + if (force && breakCycles) { + def what = tpe.typeSymbol + " in " + tpe.typeSymbol.owner.fullNameString + val bcs = computeBaseClasses(tpe) + tpe.baseClassesCache = bcs + warning(s"Breaking cycle in base class computation of $what ($bcs)") + } + } + else { tpe.baseClassesPeriod = currentPeriod if (!isValidForBaseClasses(period)) { val start = if (Statistics.canEnable) Statistics.pushTimer(typeOpsStack, baseClassesNanos) else null try { tpe.baseClassesCache = null - tpe.baseClassesCache = tpe.memo(computeBaseClasses)(tpe.typeSymbol :: _.baseClasses.tail) - } finally { + tpe.baseClassesCache = tpe.memo(computeBaseClasses(tpe))(tpe.typeSymbol :: _.baseClasses.tail) + } + finally { if (Statistics.canEnable) Statistics.popTimer(typeOpsStack, start) } } diff --git a/src/reflect/scala/reflect/internal/settings/MutableSettings.scala b/src/reflect/scala/reflect/internal/settings/MutableSettings.scala index 459326e96f..844ecf908a 100644 --- a/src/reflect/scala/reflect/internal/settings/MutableSettings.scala +++ b/src/reflect/scala/reflect/internal/settings/MutableSettings.scala @@ -47,4 +47,5 @@ abstract class MutableSettings extends AbsSettings { def XoldPatmat: BooleanSetting def XnoPatmatAnalysis: BooleanSetting def XfullLubs: BooleanSetting + def breakCycles: BooleanSetting } diff --git a/src/reflect/scala/reflect/runtime/Settings.scala b/src/reflect/scala/reflect/runtime/Settings.scala index da4f4fbda1..08d58aaadc 100644 --- a/src/reflect/scala/reflect/runtime/Settings.scala +++ b/src/reflect/scala/reflect/runtime/Settings.scala @@ -43,6 +43,7 @@ class Settings extends MutableSettings { val printtypes = new BooleanSetting(false) val uniqid = new BooleanSetting(false) val verbose = new BooleanSetting(false) + val breakCycles = new BooleanSetting(false) val Yrecursion = new IntSetting(0) val maxClassfileName = new IntSetting(255) diff --git a/test/files/lib/jsoup-1.3.1.jar.desired.sha1 b/test/files/lib/jsoup-1.3.1.jar.desired.sha1 new file mode 100644 index 0000000000..46fa3dae9d --- /dev/null +++ b/test/files/lib/jsoup-1.3.1.jar.desired.sha1 @@ -0,0 +1 @@ +346d3dff4088839d6b4d163efa2892124039d216 ?jsoup-1.3.1.jar diff --git a/test/files/pos/cycle-bounds.scala b/test/files/pos/cycle-bounds.scala new file mode 100644 index 0000000000..0aa7aa552b --- /dev/null +++ b/test/files/pos/cycle-bounds.scala @@ -0,0 +1 @@ +class Foo[T <: Comparable[_ >: T]] diff --git a/test/files/pos/cycle-jsoup.flags b/test/files/pos/cycle-jsoup.flags new file mode 100644 index 0000000000..ca20f55172 --- /dev/null +++ b/test/files/pos/cycle-jsoup.flags @@ -0,0 +1 @@ +-Ybreak-cycles diff --git a/test/files/pos/cycle-jsoup.scala b/test/files/pos/cycle-jsoup.scala new file mode 100644 index 0000000000..879e693537 --- /dev/null +++ b/test/files/pos/cycle-jsoup.scala @@ -0,0 +1,5 @@ +object Test { + def main(args : Array[String]) { + org.jsoup.Jsoup.parse(null: java.net.URL, 3000) + } +} diff --git a/test/files/pos/cycle.flags b/test/files/pos/cycle.flags new file mode 100644 index 0000000000..ca20f55172 --- /dev/null +++ b/test/files/pos/cycle.flags @@ -0,0 +1 @@ +-Ybreak-cycles diff --git a/test/files/pos/cycle/J_1.java b/test/files/pos/cycle/J_1.java new file mode 100644 index 0000000000..0cc218eebe --- /dev/null +++ b/test/files/pos/cycle/J_1.java @@ -0,0 +1,16 @@ +package bar; + +public class J_1 { + public void f(C.D arg) { + } +} + +class B extends J_1 { + public void g(C.D arg) { + } +} + +class C extends B { + public class D { + } +} diff --git a/test/files/pos/cycle/X_2.scala b/test/files/pos/cycle/X_2.scala new file mode 100644 index 0000000000..c1840f3b99 --- /dev/null +++ b/test/files/pos/cycle/X_2.scala @@ -0,0 +1,3 @@ +import bar.J_1._ //<--- illegal cyclic reference involving + +class X diff --git a/test/pending/pos/t4612.scala b/test/pending/pos/t4612.scala new file mode 100644 index 0000000000..a93c12ef01 --- /dev/null +++ b/test/pending/pos/t4612.scala @@ -0,0 +1,15 @@ +class CyclicReferenceCompilerBug { + trait Trait[A] { + def foo: A + } + + class Class extends Trait[Class] { + def foo = new Class + + trait OtherTrait extends Trait[OtherTrait] { + self: Class => + + def foo = new Class + } + } +} diff --git a/test/pending/pos/t4744/Bar.scala b/test/pending/pos/t4744/Bar.scala new file mode 100644 index 0000000000..1fb6d78973 --- /dev/null +++ b/test/pending/pos/t4744/Bar.scala @@ -0,0 +1 @@ +class Bar { val quux = new Foo[java.lang.Integer]() } diff --git a/test/pending/pos/t4744/Foo.java b/test/pending/pos/t4744/Foo.java new file mode 100644 index 0000000000..6c764d0470 --- /dev/null +++ b/test/pending/pos/t4744/Foo.java @@ -0,0 +1 @@ +public class Foo> {} diff --git a/test/pending/pos/t5082.scala b/test/pending/pos/t5082.scala new file mode 100644 index 0000000000..20a6cfc55f --- /dev/null +++ b/test/pending/pos/t5082.scala @@ -0,0 +1,8 @@ +object Test { + sealed trait A + case object A1 extends A +} + +trait Something[T] + +case class Test() extends Something[Test.A] -- cgit v1.2.3 From ba36c44c31d1a1e0b5c0cf3d4775edd0ae0d5a13 Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Fri, 28 Sep 2012 14:36:42 -0700 Subject: Fix for SI-4744, another variety of cycle. I threw this in with the previous commit behind -Ybreak-cycles, but this one is much less sketchy. Explanation: have to handle f-bounds more deftly. Namers forces lower bounds to prevent recursion in that direction, but a light touch is required to handle these two situations differently: // This is a cyclic type parameter - an error is correct class A[T <: Comparable[_ <: T]] // This is not cyclic - it flips the arrow class B[T <: Comparable[_ >: T]] Long have I been haunted by the knowledge that you can write class B in java, but not in scala: public class B> {} It's over! We've achieved parity with java. --- .../scala/tools/nsc/settings/ScalaSettings.scala | 2 +- .../scala/tools/nsc/typechecker/Namers.scala | 40 ++++++++++++++++------ src/reflect/scala/reflect/internal/Symbols.scala | 6 ++-- src/reflect/scala/reflect/internal/Types.scala | 3 +- test/files/neg/cycle-bounds.check | 4 +++ test/files/neg/cycle-bounds.flags | 1 + test/files/neg/cycle-bounds.scala | 5 +++ test/files/neg/t1224.check | 2 +- test/files/neg/t1224.flags | 1 + test/files/pos/cycle-bounds.scala | 1 - test/files/pos/t4744.flags | 1 + test/files/pos/t4744/Bar.scala | 1 + test/files/pos/t4744/Foo.java | 1 + test/pending/pos/t4744/Bar.scala | 1 - test/pending/pos/t4744/Foo.java | 1 - wip.scala | 2 ++ 16 files changed, 53 insertions(+), 19 deletions(-) create mode 100644 test/files/neg/cycle-bounds.check create mode 100644 test/files/neg/cycle-bounds.flags create mode 100644 test/files/neg/cycle-bounds.scala create mode 100644 test/files/neg/t1224.flags delete mode 100644 test/files/pos/cycle-bounds.scala create mode 100644 test/files/pos/t4744.flags create mode 100644 test/files/pos/t4744/Bar.scala create mode 100644 test/files/pos/t4744/Foo.java delete mode 100644 test/pending/pos/t4744/Bar.scala delete mode 100644 test/pending/pos/t4744/Foo.java create mode 100644 wip.scala (limited to 'src') diff --git a/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala b/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala index 4829fb81b5..404f5e6b6e 100644 --- a/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala +++ b/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala @@ -127,7 +127,7 @@ trait ScalaSettings extends AbsScalaSettings val overrideObjects = BooleanSetting ("-Yoverride-objects", "Allow member objects to be overridden.") val overrideVars = BooleanSetting ("-Yoverride-vars", "Allow vars to be overridden.") val Yhelp = BooleanSetting ("-Y", "Print a synopsis of private options.") - val breakCycles = BooleanSetting ("-Ybreak-cycles", "Attempt to break cycles encountered during classfile parsing") + val breakCycles = BooleanSetting ("-Ybreak-cycles", "Attempt to break cycles encountered during typing") val browse = PhasesSetting ("-Ybrowse", "Browse the abstract syntax tree after") val check = PhasesSetting ("-Ycheck", "Check the tree at the end of") val Yshow = PhasesSetting ("-Yshow", "(Requires -Xshow-class or -Xshow-object) Show after") diff --git a/src/compiler/scala/tools/nsc/typechecker/Namers.scala b/src/compiler/scala/tools/nsc/typechecker/Namers.scala index 4b60fd8b27..710b3e4e54 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Namers.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Namers.scala @@ -707,30 +707,50 @@ trait Namers extends MethodSynthesis { // --- Lazy Type Assignment -------------------------------------------------- - def initializeLowerBounds(tp: Type): Type = { + def findCyclicalLowerBound(tp: Type): Symbol = { tp match { case TypeBounds(lo, _) => // check that lower bound is not an F-bound // but carefully: class Foo[T <: Bar[_ >: T]] should be allowed - for (TypeRef(_, sym, _) <- lo) - sym.maybeInitialize + for (tp1 @ TypeRef(_, sym, _) <- lo) { + if (settings.breakCycles.value) { + if (!sym.maybeInitialize) { + log(s"Cycle inspecting $lo for possible f-bounds: ${sym.fullLocationString}") + return sym + } + } + else sym.initialize + } case _ => } - tp + NoSymbol } def monoTypeCompleter(tree: Tree) = mkTypeCompleter(tree) { sym => + // this early test is there to avoid infinite baseTypes when + // adding setters and getters --> bug798 + def needsCycleCheck = sym.isNonClassType && !sym.isParameter && !sym.isExistential logAndValidate(sym) { - val tp = initializeLowerBounds(typeSig(tree)) + val tp = typeSig(tree) + + findCyclicalLowerBound(tp) andAlso { sym => + if (needsCycleCheck) { + // neg/t1224: trait C[T] ; trait A { type T >: C[T] <: C[C[T]] } + // To avoid an infinite loop on the above, we cannot break all cycles + log(s"Reinitializing info of $sym to catch any genuine cycles") + sym reset sym.info + sym.initialize + } + } sym setInfo { if (sym.isJavaDefined) RestrictJavaArraysMap(tp) else tp } - // this early test is there to avoid infinite baseTypes when - // adding setters and getters --> bug798 - val needsCycleCheck = (sym.isAliasType || sym.isAbstractType) && !sym.isParameter - if (needsCycleCheck && !typer.checkNonCyclic(tree.pos, tp)) - sym setInfo ErrorType + if (needsCycleCheck) { + log(s"Needs cycle check: ${sym.debugLocationString}") + if (!typer.checkNonCyclic(tree.pos, tp)) + sym setInfo ErrorType + } } } diff --git a/src/reflect/scala/reflect/internal/Symbols.scala b/src/reflect/scala/reflect/internal/Symbols.scala index d506a43829..7d0c05bc81 100644 --- a/src/reflect/scala/reflect/internal/Symbols.scala +++ b/src/reflect/scala/reflect/internal/Symbols.scala @@ -1398,9 +1398,9 @@ trait Symbols extends api.Symbols { self: SymbolTable => if (!isInitialized) info this } - def maybeInitialize: this.type = { - try initialize - catch { case _: CyclicReference => debuglog("Encountering cycle in maybe-initialization of $this") ; this } + def maybeInitialize = { + try { initialize ; true } + catch { case _: CyclicReference => debuglog("Hit cycle in maybeInitialize of $this") ; false } } /** Called when the programmer requests information that might require initialization of the underlying symbol. diff --git a/src/reflect/scala/reflect/internal/Types.scala b/src/reflect/scala/reflect/internal/Types.scala index 9a43ad441f..f8b5d089e8 100644 --- a/src/reflect/scala/reflect/internal/Types.scala +++ b/src/reflect/scala/reflect/internal/Types.scala @@ -1710,7 +1710,8 @@ trait Types extends api.Types { self: SymbolTable => object baseClassesCycleMonitor { private var open: List[Symbol] = Nil @inline private def cycleLog(msg: => String) { - Console.err.println(msg) + if (settings.debug.value) + Console.err.println(msg) } def size = open.size def push(clazz: Symbol) { diff --git a/test/files/neg/cycle-bounds.check b/test/files/neg/cycle-bounds.check new file mode 100644 index 0000000000..d924838aec --- /dev/null +++ b/test/files/neg/cycle-bounds.check @@ -0,0 +1,4 @@ +cycle-bounds.scala:5: error: illegal cyclic reference involving type T +class NotOk[T <: Comparable[_ <: T]] + ^ +one error found diff --git a/test/files/neg/cycle-bounds.flags b/test/files/neg/cycle-bounds.flags new file mode 100644 index 0000000000..ca20f55172 --- /dev/null +++ b/test/files/neg/cycle-bounds.flags @@ -0,0 +1 @@ +-Ybreak-cycles diff --git a/test/files/neg/cycle-bounds.scala b/test/files/neg/cycle-bounds.scala new file mode 100644 index 0000000000..0b43bc703e --- /dev/null +++ b/test/files/neg/cycle-bounds.scala @@ -0,0 +1,5 @@ +// This should be allowed +class Ok[T <: Comparable[_ >: T]] + +// This is (il)legitimately a cyclic reference +class NotOk[T <: Comparable[_ <: T]] diff --git a/test/files/neg/t1224.check b/test/files/neg/t1224.check index fb61275911..ab8a6f1130 100644 --- a/test/files/neg/t1224.check +++ b/test/files/neg/t1224.check @@ -1,4 +1,4 @@ -t1224.scala:4: error: illegal cyclic reference involving type T +t1224.scala:4: error: lower bound C[A.this.T] does not conform to upper bound C[C[A.this.T]] type T >: C[T] <: C[C[T]] ^ one error found diff --git a/test/files/neg/t1224.flags b/test/files/neg/t1224.flags new file mode 100644 index 0000000000..ca20f55172 --- /dev/null +++ b/test/files/neg/t1224.flags @@ -0,0 +1 @@ +-Ybreak-cycles diff --git a/test/files/pos/cycle-bounds.scala b/test/files/pos/cycle-bounds.scala deleted file mode 100644 index 0aa7aa552b..0000000000 --- a/test/files/pos/cycle-bounds.scala +++ /dev/null @@ -1 +0,0 @@ -class Foo[T <: Comparable[_ >: T]] diff --git a/test/files/pos/t4744.flags b/test/files/pos/t4744.flags new file mode 100644 index 0000000000..ca20f55172 --- /dev/null +++ b/test/files/pos/t4744.flags @@ -0,0 +1 @@ +-Ybreak-cycles diff --git a/test/files/pos/t4744/Bar.scala b/test/files/pos/t4744/Bar.scala new file mode 100644 index 0000000000..1fb6d78973 --- /dev/null +++ b/test/files/pos/t4744/Bar.scala @@ -0,0 +1 @@ +class Bar { val quux = new Foo[java.lang.Integer]() } diff --git a/test/files/pos/t4744/Foo.java b/test/files/pos/t4744/Foo.java new file mode 100644 index 0000000000..6c764d0470 --- /dev/null +++ b/test/files/pos/t4744/Foo.java @@ -0,0 +1 @@ +public class Foo> {} diff --git a/test/pending/pos/t4744/Bar.scala b/test/pending/pos/t4744/Bar.scala deleted file mode 100644 index 1fb6d78973..0000000000 --- a/test/pending/pos/t4744/Bar.scala +++ /dev/null @@ -1 +0,0 @@ -class Bar { val quux = new Foo[java.lang.Integer]() } diff --git a/test/pending/pos/t4744/Foo.java b/test/pending/pos/t4744/Foo.java deleted file mode 100644 index 6c764d0470..0000000000 --- a/test/pending/pos/t4744/Foo.java +++ /dev/null @@ -1 +0,0 @@ -public class Foo> {} diff --git a/wip.scala b/wip.scala new file mode 100644 index 0000000000..ed9ba97640 --- /dev/null +++ b/wip.scala @@ -0,0 +1,2 @@ +object Foo { } +case class Foo(x: Int) -- cgit v1.2.3 From eed61bed1ff3ff70c32b579a5995da4ce59e2d67 Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Fri, 28 Sep 2012 23:09:55 -0700 Subject: Incorporated pull request feedback. --- src/compiler/scala/tools/nsc/typechecker/Namers.scala | 4 ++++ src/reflect/scala/reflect/internal/Types.scala | 13 +++++++------ wip.scala | 2 -- 3 files changed, 11 insertions(+), 8 deletions(-) delete mode 100644 wip.scala (limited to 'src') diff --git a/src/compiler/scala/tools/nsc/typechecker/Namers.scala b/src/compiler/scala/tools/nsc/typechecker/Namers.scala index 710b3e4e54..25888fc054 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Namers.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Namers.scala @@ -729,6 +729,10 @@ trait Namers extends MethodSynthesis { def monoTypeCompleter(tree: Tree) = mkTypeCompleter(tree) { sym => // this early test is there to avoid infinite baseTypes when // adding setters and getters --> bug798 + // It is a def in an attempt to provide some insulation against + // uninitialized symbols misleading us. It is not a certainty + // this accomplishes anything, but performance is a non-consideration + // on these flag checks so it can't hurt. def needsCycleCheck = sym.isNonClassType && !sym.isParameter && !sym.isExistential logAndValidate(sym) { val tp = typeSig(tree) diff --git a/src/reflect/scala/reflect/internal/Types.scala b/src/reflect/scala/reflect/internal/Types.scala index f8b5d089e8..e15340543e 100644 --- a/src/reflect/scala/reflect/internal/Types.scala +++ b/src/reflect/scala/reflect/internal/Types.scala @@ -1617,14 +1617,14 @@ trait Types extends api.Types { self: SymbolTable => } protected def computeBaseClasses(tpe: Type): List[Symbol] = { - def csym = tpe.typeSymbol - csym :: { - if (tpe.parents.isEmpty || csym.hasFlag(PACKAGE)) Nil + val parents = tpe.parents // adriaan says tpe.parents does work sometimes, so call it only once + val baseTail = ( + if (parents.isEmpty || parents.head.isInstanceOf[PackageTypeRef]) Nil else { //Console.println("computing base classes of " + typeSymbol + " at phase " + phase);//DEBUG // optimized, since this seems to be performance critical - val superclazz = tpe.firstParent - var mixins = tpe.parents.tail + val superclazz = parents.head // parents.isEmpty was already excluded + var mixins = parents.tail val sbcs = superclazz.baseClasses var bcs = sbcs def isNew(clazz: Symbol): Boolean = ( @@ -1644,7 +1644,8 @@ trait Types extends api.Types { self: SymbolTable => } bcs } - } + ) + tpe.typeSymbol :: baseTail } protected def defineBaseTypeSeqOfCompoundType(tpe: CompoundType) = { diff --git a/wip.scala b/wip.scala deleted file mode 100644 index ed9ba97640..0000000000 --- a/wip.scala +++ /dev/null @@ -1,2 +0,0 @@ -object Foo { } -case class Foo(x: Int) -- cgit v1.2.3 From f61cd63442089c79efd67fbe1b0a5c7de953d35a Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Thu, 11 Oct 2012 15:05:10 -0700 Subject: Removed redundant containsUncheckable. Like the comment said: "TODO: at the very least, reduce duplication wrt checkCheckable" I went with the very most, eliminating it. --- .../scala/tools/nsc/typechecker/Checkable.scala | 12 ++++++ .../scala/tools/nsc/typechecker/Infer.scala | 45 ---------------------- .../tools/nsc/typechecker/PatternMatching.scala | 1 - .../scala/tools/nsc/typechecker/Typers.scala | 4 +- 4 files changed, 14 insertions(+), 48 deletions(-) (limited to 'src') diff --git a/src/compiler/scala/tools/nsc/typechecker/Checkable.scala b/src/compiler/scala/tools/nsc/typechecker/Checkable.scala index 7e15cf91a7..508a55e33a 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Checkable.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Checkable.scala @@ -154,6 +154,7 @@ trait Checkable { def neverSubClass = isNeverSubClass(Xsym, Psym) def neverMatches = result == StaticallyFalse def isUncheckable = result == Uncheckable + def isCheckable = !isUncheckable def uncheckableMessage = uncheckableType match { case NoType => "something" case tp @ RefinedType(_, _) => "refinement " + tp @@ -233,6 +234,17 @@ trait Checkable { trait InferCheckable { self: Inferencer => + def isUncheckable(P0: Type) = !isCheckable(P0) + + def isCheckable(P0: Type): Boolean = ( + uncheckedOk(P0) || (P0.widen match { + case TypeRef(_, NothingClass | NullClass | AnyValClass, _) => false + case RefinedType(_, decls) if !decls.isEmpty => false + case p => + new CheckabilityChecker(AnyClass.tpe, p) isCheckable + }) + ) + /** TODO: much better error positions. * Kind of stuck right now because they just pass us the one tree. * TODO: Eliminate inPattern, canRemedy, which have no place here. diff --git a/src/compiler/scala/tools/nsc/typechecker/Infer.scala b/src/compiler/scala/tools/nsc/typechecker/Infer.scala index 16e864bd41..e18310cf84 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Infer.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Infer.scala @@ -1363,51 +1363,6 @@ trait Infer extends Checkable { } } - /** Does `tp` contain any types that cannot be checked at run-time (i.e., after erasure, will isInstanceOf[erased(tp)] imply conceptualIsInstanceOf[tp]?) - * we should find a way to ask erasure: hey, is `tp` going to make it through you with all of its isInstanceOf resolving powers intact? - * TODO: at the very least, reduce duplication wrt checkCheckable - */ - def containsUnchecked(tp: Type): Boolean = { - def check(tp: Type, bound: List[Symbol]): Boolean = { - def isSurroundingTypeParam(sym: Symbol) = { - val e = context.scope.lookupEntry(sym.name) - ( (e ne null) - && (e.sym == sym ) - && !e.sym.isTypeParameterOrSkolem - && (e.owner == context.scope) - ) - } - def isLocalBinding(sym: Symbol) = ( - sym.isAbstractType && ( - (bound contains sym) - || (sym.name == tpnme.WILDCARD) - || isSurroundingTypeParam(sym) - ) - ) - tp.normalize match { - case SingleType(pre, _) => - check(pre, bound) - case TypeRef(_, ArrayClass, arg :: _) => - check(arg, bound) - case tp @ TypeRef(pre, sym, args) => - ( (sym.isAbstractType && !isLocalBinding(sym)) - || (args exists (x => !isLocalBinding(x.typeSymbol))) - || check(pre, bound) - ) - // case RefinedType(_, decls) if decls.nonEmpty => - // patternWarning(tp, "refinement ") - case RefinedType(parents, _) => - parents exists (p => check(p, bound)) - case ExistentialType(quantified, tp1) => - check(tp1, bound ::: quantified) - case _ => - false - } - } - check(tp, Nil) - } - - /** Type intersection of simple type tp1 with general type tp2. * The result eliminates some redundancies. */ diff --git a/src/compiler/scala/tools/nsc/typechecker/PatternMatching.scala b/src/compiler/scala/tools/nsc/typechecker/PatternMatching.scala index b6a56515ca..21e2b7ceec 100644 --- a/src/compiler/scala/tools/nsc/typechecker/PatternMatching.scala +++ b/src/compiler/scala/tools/nsc/typechecker/PatternMatching.scala @@ -192,7 +192,6 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL trait MatchTranslation extends MatchMonadInterface { self: TreeMakers with CodegenCore => import typer.{typed, context, silent, reallyExists} - // import typer.infer.containsUnchecked // Why is it so difficult to say "here's a name and a context, give me any // matching symbol in scope" ? I am sure this code is wrong, but attempts to diff --git a/src/compiler/scala/tools/nsc/typechecker/Typers.scala b/src/compiler/scala/tools/nsc/typechecker/Typers.scala index 85005c23c3..a262438ae0 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Typers.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Typers.scala @@ -3362,10 +3362,10 @@ trait Typers extends Modes with Adaptations with Tags { // if at least one of the types in an intersection is checkable, use the checkable ones // this avoids problems as in run/matchonseq.scala, where the expected type is `Coll with scala.collection.SeqLike` // Coll is an abstract type, but SeqLike of course is not - case RefinedType(parents, _) if (parents.length >= 2) && (parents.exists(tp => !infer.containsUnchecked(tp))) => + case RefinedType(ps, _) if ps.length > 1 && (ps exists infer.isCheckable) => None - case ptCheckable if infer.containsUnchecked(ptCheckable) => + case ptCheckable if infer isUncheckable ptCheckable => val classTagExtractor = resolveClassTag(pos, ptCheckable) if (classTagExtractor != EmptyTree && unapplyMember(classTagExtractor.tpe) != NoSymbol) -- cgit v1.2.3 From 18c6d58a5dc994ce19b0419c7c2dce460acecbdd Mon Sep 17 00:00:00 2001 From: Simon Ochsenreither Date: Mon, 8 Oct 2012 03:45:26 +0200 Subject: SI-6388 Remove Application --- src/library/scala/Application.scala | 79 ------------------------------ test/disabled/run/t4146.scala | 7 +++ test/files/jvm/t1342/SI.scala | 2 +- test/files/jvm/t2163/t2163.java | 9 ++++ test/files/jvm/t2163/t2163.scala | 5 ++ test/files/jvm/t2570/Test.scala | 2 +- test/files/jvm/t3415/HelloWorld.scala | 2 +- test/files/jvm/t4283/AbstractFoo.java | 5 ++ test/files/jvm/t4283/ScalaBipp.scala | 5 ++ test/files/jvm/t4283/Test.scala | 4 ++ test/files/jvm/ticket2163/ticket2163.java | 9 ---- test/files/jvm/ticket2163/ticket2163.scala | 5 -- test/files/jvm/ticket4283/AbstractFoo.java | 5 -- test/files/jvm/ticket4283/ScalaBipp.scala | 5 -- test/files/jvm/ticket4283/Test.scala | 4 -- test/files/pos/chang/Test.scala | 2 +- test/files/pos/liftcode_polymorphic.scala | 2 +- test/files/pos/t1230/S.scala | 2 +- test/files/pos/t1231/S.scala | 2 +- test/files/pos/t715/meredith_1.scala | 58 +++++++++++----------- test/files/pos/t715/runner_2.scala | 2 +- test/files/run/collection-stacks.scala | 2 +- test/files/run/t4047.scala | 2 +- test/files/run/t4146.scala | 7 --- 24 files changed, 74 insertions(+), 153 deletions(-) delete mode 100644 src/library/scala/Application.scala create mode 100644 test/disabled/run/t4146.scala create mode 100644 test/files/jvm/t2163/t2163.java create mode 100644 test/files/jvm/t2163/t2163.scala create mode 100644 test/files/jvm/t4283/AbstractFoo.java create mode 100644 test/files/jvm/t4283/ScalaBipp.scala create mode 100644 test/files/jvm/t4283/Test.scala delete mode 100644 test/files/jvm/ticket2163/ticket2163.java delete mode 100644 test/files/jvm/ticket2163/ticket2163.scala delete mode 100644 test/files/jvm/ticket4283/AbstractFoo.java delete mode 100644 test/files/jvm/ticket4283/ScalaBipp.scala delete mode 100644 test/files/jvm/ticket4283/Test.scala delete mode 100644 test/files/run/t4146.scala (limited to 'src') diff --git a/src/library/scala/Application.scala b/src/library/scala/Application.scala deleted file mode 100644 index 5b1098bd72..0000000000 --- a/src/library/scala/Application.scala +++ /dev/null @@ -1,79 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala - -import scala.compat.Platform.currentTime - -/** The `Application` trait can be used to quickly turn objects - * into executable programs, but is ''not recommended''. - * Here is an example: - * {{{ - * object Main extends Application { - * Console.println("Hello World!") - * } - * }}} - * Here, object `Main` inherits the `main` method of `Application`. - * The body of the `Main` object defines the main program. This technique - * does not work if the main program depends on command-line arguments - * (which are not accessible with the technique presented here). - * - * It is possible to time the execution of objects that inherit from class - * `Application` by setting the global `scala.time` - * property. Here is an example for benchmarking object `Main`: - * {{{ - * java -Dscala.time Main - * }}} - * In practice the `Application` trait has a number of serious pitfalls: - * - * - Threaded code that references the object will block until static - * initialization is complete. However, because the entire execution - * of an `object` extending `Application` takes place during - * static initialization, concurrent code will ''always'' deadlock if - * it must synchronize with the enclosing object. - * - As described above, there is no way to obtain the - * command-line arguments because all code in body of an `object` - * extending `Application` is run as part of the static initialization - * which occurs before `Application`'s `main` method - * even begins execution. - * - Static initializers are run only once during program execution, and - * JVM authors usually assume their execution to be relatively short. - * Therefore, certain JVM configurations may become confused, or simply - * fail to optimize or JIT the code in the body of an `object` extending - * `Application`. This can lead to a significant performance degradation. - * - * It is recommended to use the [[scala.App]] trait instead. - * {{{ - * object Main { - * def main(args: Array[String]) { - * //.. - * } - * } - * }}} - * - * @author Matthias Zenger - * @version 1.0, 10/09/2003 - */ -@deprecated("use App instead", "2.9.0") -trait Application { - - /** The time when the execution of this program started, - * in milliseconds since 1 January 1970 UTC. */ - val executionStart: Long = currentTime - - /** The default main method. - * - * @param args the arguments passed to the main method - */ - def main(args: Array[String]) { - if (util.Properties propIsSet "scala.time") { - val total = currentTime - executionStart - Console.println("[total " + total + "ms]") - } - } -} diff --git a/test/disabled/run/t4146.scala b/test/disabled/run/t4146.scala new file mode 100644 index 0000000000..a17de50ee1 --- /dev/null +++ b/test/disabled/run/t4146.scala @@ -0,0 +1,7 @@ +object bob extends App { + var name = "Bob" +} + +object Test extends App { + assert(bob.name == "Bob") +} diff --git a/test/files/jvm/t1342/SI.scala b/test/files/jvm/t1342/SI.scala index 8e3b753210..7c37d4bcd7 100644 --- a/test/files/jvm/t1342/SI.scala +++ b/test/files/jvm/t1342/SI.scala @@ -4,7 +4,7 @@ class SI extends JI { } } -object Test extends Application { +object Test extends App { val x: JI = new SI x.varArgsMethod("one", "two") } diff --git a/test/files/jvm/t2163/t2163.java b/test/files/jvm/t2163/t2163.java new file mode 100644 index 0000000000..83bd37d212 --- /dev/null +++ b/test/files/jvm/t2163/t2163.java @@ -0,0 +1,9 @@ +import java.util.*; + +public class t2163 { + public void test() { + List array = new ArrayList(); + T2163Scala foo = new T2163Scala(array); + foo.bar(array); + } +} diff --git a/test/files/jvm/t2163/t2163.scala b/test/files/jvm/t2163/t2163.scala new file mode 100644 index 0000000000..f73b520cbe --- /dev/null +++ b/test/files/jvm/t2163/t2163.scala @@ -0,0 +1,5 @@ +class T2163Scala[CC[X]](x: CC[Int]) { + def bar[DD[X]](meh: DD[Int]): CC[Int] = x +} + +object Test extends App {} diff --git a/test/files/jvm/t2570/Test.scala b/test/files/jvm/t2570/Test.scala index 7944aedae6..f1cba53546 100644 --- a/test/files/jvm/t2570/Test.scala +++ b/test/files/jvm/t2570/Test.scala @@ -1,3 +1,3 @@ class Test2 extends Test1[Test3[Test4]] class Test4 -object Test extends Application {} \ No newline at end of file +object Test extends App {} \ No newline at end of file diff --git a/test/files/jvm/t3415/HelloWorld.scala b/test/files/jvm/t3415/HelloWorld.scala index 53bf55e444..5ef012390e 100644 --- a/test/files/jvm/t3415/HelloWorld.scala +++ b/test/files/jvm/t3415/HelloWorld.scala @@ -1,4 +1,4 @@ -object Test extends Application { +object Test extends App { @Hello def foo() { } } diff --git a/test/files/jvm/t4283/AbstractFoo.java b/test/files/jvm/t4283/AbstractFoo.java new file mode 100644 index 0000000000..74f3827fe3 --- /dev/null +++ b/test/files/jvm/t4283/AbstractFoo.java @@ -0,0 +1,5 @@ +package test; + +/* package private */ class AbstractFoo { + public int t; +} diff --git a/test/files/jvm/t4283/ScalaBipp.scala b/test/files/jvm/t4283/ScalaBipp.scala new file mode 100644 index 0000000000..36dea9f4de --- /dev/null +++ b/test/files/jvm/t4283/ScalaBipp.scala @@ -0,0 +1,5 @@ +package test + +class ScalaBipp extends AbstractFoo { + def make: Option[ScalaBipp] = Option(this) +} diff --git a/test/files/jvm/t4283/Test.scala b/test/files/jvm/t4283/Test.scala new file mode 100644 index 0000000000..9bbfaab928 --- /dev/null +++ b/test/files/jvm/t4283/Test.scala @@ -0,0 +1,4 @@ + +object Test extends App { + val x = (new test.ScalaBipp).make.get.t // java.lang.IllegalAccessError: tried to access class test.AbstractFoo from class other.IllegalAccess$ +} diff --git a/test/files/jvm/ticket2163/ticket2163.java b/test/files/jvm/ticket2163/ticket2163.java deleted file mode 100644 index b6511d241c..0000000000 --- a/test/files/jvm/ticket2163/ticket2163.java +++ /dev/null @@ -1,9 +0,0 @@ -import java.util.*; - -public class ticket2163 { - public void test() { - List array = new ArrayList(); - Ticket2163Scala foo = new Ticket2163Scala(array); - foo.bar(array); - } -} diff --git a/test/files/jvm/ticket2163/ticket2163.scala b/test/files/jvm/ticket2163/ticket2163.scala deleted file mode 100644 index d30bfe251b..0000000000 --- a/test/files/jvm/ticket2163/ticket2163.scala +++ /dev/null @@ -1,5 +0,0 @@ -class Ticket2163Scala[CC[X]](x: CC[Int]) { - def bar[DD[X]](meh: DD[Int]): CC[Int] = x -} - -object Test extends Application {} diff --git a/test/files/jvm/ticket4283/AbstractFoo.java b/test/files/jvm/ticket4283/AbstractFoo.java deleted file mode 100644 index 74f3827fe3..0000000000 --- a/test/files/jvm/ticket4283/AbstractFoo.java +++ /dev/null @@ -1,5 +0,0 @@ -package test; - -/* package private */ class AbstractFoo { - public int t; -} diff --git a/test/files/jvm/ticket4283/ScalaBipp.scala b/test/files/jvm/ticket4283/ScalaBipp.scala deleted file mode 100644 index 36dea9f4de..0000000000 --- a/test/files/jvm/ticket4283/ScalaBipp.scala +++ /dev/null @@ -1,5 +0,0 @@ -package test - -class ScalaBipp extends AbstractFoo { - def make: Option[ScalaBipp] = Option(this) -} diff --git a/test/files/jvm/ticket4283/Test.scala b/test/files/jvm/ticket4283/Test.scala deleted file mode 100644 index 9bbfaab928..0000000000 --- a/test/files/jvm/ticket4283/Test.scala +++ /dev/null @@ -1,4 +0,0 @@ - -object Test extends App { - val x = (new test.ScalaBipp).make.get.t // java.lang.IllegalAccessError: tried to access class test.AbstractFoo from class other.IllegalAccess$ -} diff --git a/test/files/pos/chang/Test.scala b/test/files/pos/chang/Test.scala index 9bb745e377..f74c6355b5 100644 --- a/test/files/pos/chang/Test.scala +++ b/test/files/pos/chang/Test.scala @@ -1,3 +1,3 @@ -object Test extends Application { +object Test extends App { new com.netgents.hello.Outer[String] } diff --git a/test/files/pos/liftcode_polymorphic.scala b/test/files/pos/liftcode_polymorphic.scala index 8f537d278a..249f5a0569 100644 --- a/test/files/pos/liftcode_polymorphic.scala +++ b/test/files/pos/liftcode_polymorphic.scala @@ -1,6 +1,6 @@ import scala.reflect.runtime.universe._ -object Append extends Application { +object Append extends App { def append[A](l1: List[A], l2: List[A]):List[A] = l1 match { diff --git a/test/files/pos/t1230/S.scala b/test/files/pos/t1230/S.scala index f8a691b6de..530dd4b853 100644 --- a/test/files/pos/t1230/S.scala +++ b/test/files/pos/t1230/S.scala @@ -1 +1 @@ -object S extends Application { (new J).foo = 5 } +object S extends App { (new J).foo = 5 } diff --git a/test/files/pos/t1231/S.scala b/test/files/pos/t1231/S.scala index ee08866e04..f14aa2561b 100644 --- a/test/files/pos/t1231/S.scala +++ b/test/files/pos/t1231/S.scala @@ -1 +1 @@ -object S extends Application { println(J.j1) } +object S extends App { println(J.j1) } diff --git a/test/files/pos/t715/meredith_1.scala b/test/files/pos/t715/meredith_1.scala index 8261b9881a..c28afb4a9b 100644 --- a/test/files/pos/t715/meredith_1.scala +++ b/test/files/pos/t715/meredith_1.scala @@ -3,7 +3,7 @@ package com.sap.dspace.model.othello; import scala.xml._ trait XMLRenderer { - type T <: Any {def getClass() : java.lang.Class[_]} + type T <: Any {def getClass(): java.lang.Class[_]} val valueTypes = List( classOf[java.lang.Boolean], @@ -14,21 +14,21 @@ trait XMLRenderer { ) def value2XML( - value : Object, - field : java.lang.reflect.Field, - pojo : T - ) : Node = { + value: Object, + field: java.lang.reflect.Field, + pojo: T + ): Node = { value match { - case null => Text( "null" ) + case null => Text("null") case vUnmatched => if (value.isInstanceOf[java.lang.Boolean]) - Text( value.asInstanceOf[java.lang.Boolean].toString ) + Text(value.asInstanceOf[java.lang.Boolean].toString) else if (value.isInstanceOf[java.lang.Integer]) - Text( value.asInstanceOf[java.lang.Integer].toString ) + Text(value.asInstanceOf[java.lang.Integer].toString) else if (value.isInstanceOf[java.lang.Float]) - Text( value.asInstanceOf[java.lang.Float].toString ) + Text(value.asInstanceOf[java.lang.Float].toString) // else if (value.isInstanceOf[T]) - // pojo2XML( value.asInstanceOf[T] ) + // pojo2XML(value.asInstanceOf[T]) else @@ -42,16 +42,16 @@ trait XMLRenderer { } def field2XML( - field : java.lang.reflect.Field, - pojo : T - ) : Elem = { + field: java.lang.reflect.Field, + pojo: T + ): Elem = { - val accessible = field.isAccessible; - field.setAccessible( true ); + val accessible = field.isAccessible + field.setAccessible(true) // BUGBUG lgm need to disambiguate on type and possibly make // recursive call to pojo2XML - val fldValXML = value2XML( field.get( pojo ), field, pojo ); - field.setAccessible( accessible ); + val fldValXML = value2XML(field.get( pojo ), field, pojo) + field.setAccessible( accessible ) Elem( null, @@ -62,37 +62,37 @@ trait XMLRenderer { ) } - def pojo2XML( pojo : T ) : Elem = { + def pojo2XML(pojo: T): Elem = { val progeny = for (field <- pojo.getClass.getDeclaredFields) - yield field2XML( field, pojo ); + yield field2XML(field, pojo) Elem( null, pojo.getClass.getName, null, TopScope, - progeny.asInstanceOf[Array[scala.xml.Node]] : _* + progeny.asInstanceOf[Array[scala.xml.Node]]: _* ) } } -case class POJO2XMLRenderer( recurse : Boolean ) +case class POJO2XMLRenderer(recurse: Boolean) extends XMLRenderer { type T = java.io.Serializable override def value2XML( - value : Object, - field : java.lang.reflect.Field, - pojo : java.io.Serializable - ) : Node = { - if (recurse) super.value2XML( value, field, pojo ) - else Text( value + "" ) + value: Object, + field: java.lang.reflect.Field, + pojo: java.io.Serializable + ): Node = { + if (recurse) super.value2XML(value, field, pojo) + else Text(value + "") } } -object thePOJO2XMLRenderer extends POJO2XMLRenderer( true ) { +object thePOJO2XMLRenderer extends POJO2XMLRenderer(true) { } -object Test extends Application { +object Test extends App { println(com.sap.dspace.model.othello.thePOJO2XMLRenderer) } diff --git a/test/files/pos/t715/runner_2.scala b/test/files/pos/t715/runner_2.scala index 1e4f40d654..d54805629a 100644 --- a/test/files/pos/t715/runner_2.scala +++ b/test/files/pos/t715/runner_2.scala @@ -1,3 +1,3 @@ -object Test extends Application { +object Test extends App { println(com.sap.dspace.model.othello.thePOJO2XMLRenderer) } diff --git a/test/files/run/collection-stacks.scala b/test/files/run/collection-stacks.scala index fbee3f8594..be9fbbf1ae 100644 --- a/test/files/run/collection-stacks.scala +++ b/test/files/run/collection-stacks.scala @@ -1,6 +1,6 @@ import scala.collection.{ immutable, mutable } -object Test extends Application { +object Test extends App { def mutableStack[T](xs: T*): mutable.Stack[T] = { val s = new mutable.Stack[T] s.pushAll(xs) diff --git a/test/files/run/t4047.scala b/test/files/run/t4047.scala index cd42a8b4df..08989bd278 100644 --- a/test/files/run/t4047.scala +++ b/test/files/run/t4047.scala @@ -18,7 +18,7 @@ class D extends Bar[Unit]{ def foo = println("Unit: called D.foo") } -object Test extends Application { +object Test extends App { val a: Foo[Unit] = new A a.foo a.foo diff --git a/test/files/run/t4146.scala b/test/files/run/t4146.scala deleted file mode 100644 index 93ce22b519..0000000000 --- a/test/files/run/t4146.scala +++ /dev/null @@ -1,7 +0,0 @@ -object bob extends Application { - var name = "Bob" -} - -object Test extends App { - assert(bob.name == "Bob") -} -- cgit v1.2.3 From 63ba3d64a7002ef67f7f13083a18fe1042a3adba Mon Sep 17 00:00:00 2001 From: David Hall Date: Fri, 12 Oct 2012 11:45:46 -0700 Subject: Fixes SI-6521, overrides Range#head to be faster --- src/library/scala/collection/immutable/Range.scala | 1 + 1 file changed, 1 insertion(+) (limited to 'src') diff --git a/src/library/scala/collection/immutable/Range.scala b/src/library/scala/collection/immutable/Range.scala index 92ea5d3f04..ab303dde56 100644 --- a/src/library/scala/collection/immutable/Range.scala +++ b/src/library/scala/collection/immutable/Range.scala @@ -78,6 +78,7 @@ extends scala.collection.AbstractSeq[Int] final val terminalElement = start + numRangeElements * step override def last = if (isEmpty) Nil.last else lastElement + override def head = if (isEmpty) Nil.head else start override def min[A1 >: Int](implicit ord: Ordering[A1]): Int = if (ord eq Ordering.Int) { -- cgit v1.2.3 From 02909f2be30db5c0f79f961cad17e2dc2f026ff4 Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Fri, 12 Oct 2012 22:24:32 -0700 Subject: Warn about more misplaced expressions. An identifier being used in statement position is not likely what was meant when it is a non-lazy getter. --- .../scala/tools/nsc/typechecker/Typers.scala | 13 +--------- src/reflect/scala/reflect/internal/TreeInfo.scala | 28 ++++++++++++++++++++++ test/files/neg/unit-returns-value.check | 8 ++++++- test/files/neg/unit-returns-value.scala | 23 +++++++++++++++++- 4 files changed, 58 insertions(+), 14 deletions(-) (limited to 'src') diff --git a/src/compiler/scala/tools/nsc/typechecker/Typers.scala b/src/compiler/scala/tools/nsc/typechecker/Typers.scala index a262438ae0..546baba996 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Typers.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Typers.scala @@ -2713,17 +2713,6 @@ trait Typers extends Modes with Adaptations with Tags { case Some(imp1: Import) => imp1 case _ => log("unhandled import: "+imp+" in "+unit); imp } - private def isWarnablePureExpression(tree: Tree) = tree match { - case EmptyTree | Literal(Constant(())) => false - case _ => - !tree.isErrorTyped && (treeInfo isExprSafeToInline tree) && { - val sym = tree.symbol - (sym == null) || !(sym.isModule || sym.isLazy) || { - debuglog("'Pure' but side-effecting expression in statement position: " + tree) - false - } - } - } def typedStats(stats: List[Tree], exprOwner: Symbol): List[Tree] = { val inBlock = exprOwner == context.owner @@ -2760,7 +2749,7 @@ trait Typers extends Modes with Adaptations with Tags { ConstructorsOrderError(stat) } - if (isWarnablePureExpression(result)) context.warning(stat.pos, + if (treeInfo.isPureExprForWarningPurposes(result)) context.warning(stat.pos, "a pure expression does nothing in statement position; " + "you may be omitting necessary parentheses" ) diff --git a/src/reflect/scala/reflect/internal/TreeInfo.scala b/src/reflect/scala/reflect/internal/TreeInfo.scala index d4a22886dd..66326c90e9 100644 --- a/src/reflect/scala/reflect/internal/TreeInfo.scala +++ b/src/reflect/scala/reflect/internal/TreeInfo.scala @@ -104,6 +104,34 @@ abstract class TreeInfo { false } + /** As if the name of the method didn't give it away, + * this logic is designed around issuing helpful + * warnings and minimizing spurious ones. That means + * don't reuse it for important matters like inlining + * decisions. + */ + def isPureExprForWarningPurposes(tree: Tree) = tree match { + case EmptyTree | Literal(Constant(())) => false + case _ => + def isWarnableRefTree = tree match { + case t: RefTree => isExprSafeToInline(t.qualifier) && t.symbol != null && t.symbol.isAccessor + case _ => false + } + def isWarnableSymbol = { + val sym = tree.symbol + (sym == null) || !(sym.isModule || sym.isLazy) || { + debuglog("'Pure' but side-effecting expression in statement position: " + tree) + false + } + } + + ( !tree.isErrorTyped + && (isExprSafeToInline(tree) || isWarnableRefTree) + && isWarnableSymbol + ) + } + + @deprecated("Use isExprSafeToInline instead", "2.10.0") def isPureExpr(tree: Tree) = isExprSafeToInline(tree) diff --git a/test/files/neg/unit-returns-value.check b/test/files/neg/unit-returns-value.check index 363946f94d..f30a506ebe 100644 --- a/test/files/neg/unit-returns-value.check +++ b/test/files/neg/unit-returns-value.check @@ -4,6 +4,12 @@ unit-returns-value.scala:4: warning: a pure expression does nothing in statement unit-returns-value.scala:4: warning: enclosing method f has result type Unit: return value discarded if (b) return 5 ^ +unit-returns-value.scala:22: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses + i1 // warn + ^ +unit-returns-value.scala:23: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses + i2 // warn + ^ error: No warnings can be incurred under -Xfatal-warnings. -two warnings found +four warnings found one error found diff --git a/test/files/neg/unit-returns-value.scala b/test/files/neg/unit-returns-value.scala index ecc981f217..fc5a37069f 100644 --- a/test/files/neg/unit-returns-value.scala +++ b/test/files/neg/unit-returns-value.scala @@ -3,9 +3,30 @@ object Test { var b = false if (b) return 5 } - + // no warning def g { return println("hello") } } + +class UnusedValues { + var i1 = 2 + val i2 = 2 + lazy val i3 = 2 + object i4 { } + def i5 = 2 + final def i6 = 2 + + def x = { + i1 // warn + i2 // warn + i3 // no warn + i4 // no warn + i5 // no warn + i6 // could warn someday, if i6 returned 2.type instead of Int + + 5 + } +} + -- cgit v1.2.3 From 267650cf9c3b07e360a59f3c5b70b37fea9de453 Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Mon, 1 Oct 2012 09:10:45 -0700 Subject: Fix for SI-6206, inconsistency with apply. The code part of this patch is 100% written by retronym, who apparently has higher standards than I do because I found it just lying around in his repository. I think I'll go pick through his trash and see if he's throwing away any perfectly good muffins. I made the test case more exciting so as to feel useful. --- .../scala/tools/nsc/typechecker/Typers.scala | 24 ++++++++------ test/files/run/t6206.check | 4 +++ test/files/run/t6206.scala | 37 ++++++++++++++++++++++ 3 files changed, 56 insertions(+), 9 deletions(-) create mode 100644 test/files/run/t6206.check create mode 100644 test/files/run/t6206.scala (limited to 'src') diff --git a/src/compiler/scala/tools/nsc/typechecker/Typers.scala b/src/compiler/scala/tools/nsc/typechecker/Typers.scala index 546baba996..a83787a43c 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Typers.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Typers.scala @@ -1044,15 +1044,21 @@ trait Typers extends Modes with Adaptations with Tags { def insertApply(): Tree = { assert(!inHKMode(mode), modeString(mode)) //@M - val qual = adaptToName(tree, nme.apply) match { - case id @ Ident(_) => - val pre = if (id.symbol.owner.isPackageClass) id.symbol.owner.thisType - else if (id.symbol.owner.isClass) - context.enclosingSubClassContext(id.symbol.owner).prefix - else NoPrefix - stabilize(id, pre, EXPRmode | QUALmode, WildcardType) - case sel @ Select(qualqual, _) => - stabilize(sel, qualqual.tpe, EXPRmode | QUALmode, WildcardType) + val adapted = adaptToName(tree, nme.apply) + def stabilize0(pre: Type): Tree = stabilize(adapted, pre, EXPRmode | QUALmode, WildcardType) + // TODO reconcile the overlap between Typers#stablize and TreeGen.stabilize + val qual = adapted match { + case This(_) => + gen.stabilize(adapted) + case Ident(_) => + val owner = adapted.symbol.owner + val pre = + if (owner.isPackageClass) owner.thisType + else if (owner.isClass) context.enclosingSubClassContext(owner).prefix + else NoPrefix + stabilize0(pre) + case Select(qualqual, _) => + stabilize0(qualqual.tpe) case other => other } diff --git a/test/files/run/t6206.check b/test/files/run/t6206.check new file mode 100644 index 0000000000..8064573667 --- /dev/null +++ b/test/files/run/t6206.check @@ -0,0 +1,4 @@ +outer +outer +inner +inner diff --git a/test/files/run/t6206.scala b/test/files/run/t6206.scala new file mode 100644 index 0000000000..07ff246d02 --- /dev/null +++ b/test/files/run/t6206.scala @@ -0,0 +1,37 @@ +class Outer { + def apply( position : Inner ) {} + class Inner + + this.apply(new Inner) + this (new Inner) // error, +} + + +class Outer1 { + + self => + + def apply( position : Inner ) : String = "outer" + + class Inner( ) { + + def apply(arg: Inner): String = "inner" + + def testMe = { + List( + self.apply( this ), // a) this works + self( this ), // b) this does not work! + this apply this, + this(this) + ) foreach println + } + } +} + +object Test { + def main(args: Array[String]): Unit = { + val o = new Outer1 + val i = new o.Inner + i.testMe + } +} -- cgit v1.2.3 From 4dd4bebadbd4985ea74772ac6a87cfa9ce1cbdd5 Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Tue, 16 Oct 2012 07:56:56 -0700 Subject: Removed .disabled files. These things don't belong in trunk. They poison all my searches with bogus results. I'd say two years was about 1.999 years too long. --- .../collection/Sequentializable.scala.disabled | 10 -- .../immutable/GenIterable.scala.disabled | 37 ----- .../collection/immutable/GenMap.scala.disabled | 36 ----- .../collection/immutable/GenSeq.scala.disabled | 49 ------ .../collection/immutable/GenSet.scala.disabled | 43 ----- .../immutable/GenTraversable.scala.disabled | 41 ----- .../collection/mutable/GenIterable.scala.disabled | 37 ----- .../scala/collection/mutable/GenMap.scala.disabled | 40 ----- .../scala/collection/mutable/GenSeq.scala.disabled | 44 ----- .../scala/collection/mutable/GenSet.scala.disabled | 46 ------ .../mutable/GenTraversable.scala.disabled | 38 ----- .../immutable/ParNumericRange.scala.disabled | 128 --------------- src/library/scala/parallel/package.scala.disabled | 178 --------------------- src/swing/scala/swing/Font.scala.disabled | 70 -------- 14 files changed, 797 deletions(-) delete mode 100644 src/library/scala/collection/Sequentializable.scala.disabled delete mode 100644 src/library/scala/collection/immutable/GenIterable.scala.disabled delete mode 100644 src/library/scala/collection/immutable/GenMap.scala.disabled delete mode 100644 src/library/scala/collection/immutable/GenSeq.scala.disabled delete mode 100644 src/library/scala/collection/immutable/GenSet.scala.disabled delete mode 100644 src/library/scala/collection/immutable/GenTraversable.scala.disabled delete mode 100644 src/library/scala/collection/mutable/GenIterable.scala.disabled delete mode 100644 src/library/scala/collection/mutable/GenMap.scala.disabled delete mode 100644 src/library/scala/collection/mutable/GenSeq.scala.disabled delete mode 100644 src/library/scala/collection/mutable/GenSet.scala.disabled delete mode 100644 src/library/scala/collection/mutable/GenTraversable.scala.disabled delete mode 100644 src/library/scala/collection/parallel/immutable/ParNumericRange.scala.disabled delete mode 100644 src/library/scala/parallel/package.scala.disabled delete mode 100644 src/swing/scala/swing/Font.scala.disabled (limited to 'src') diff --git a/src/library/scala/collection/Sequentializable.scala.disabled b/src/library/scala/collection/Sequentializable.scala.disabled deleted file mode 100644 index df457671a6..0000000000 --- a/src/library/scala/collection/Sequentializable.scala.disabled +++ /dev/null @@ -1,10 +0,0 @@ -package scala.collection - - - - -trait Sequentializable[+T, +Repr] { - - def seq: Repr - -} diff --git a/src/library/scala/collection/immutable/GenIterable.scala.disabled b/src/library/scala/collection/immutable/GenIterable.scala.disabled deleted file mode 100644 index 858abd27aa..0000000000 --- a/src/library/scala/collection/immutable/GenIterable.scala.disabled +++ /dev/null @@ -1,37 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala.collection -package immutable - - -import generic._ -import mutable.Builder - - -/** A base trait for iterable collections that can be mutated. - * - * $possiblyparinfo - * - * $iterableInfo - */ -trait GenIterable[+A] extends GenTraversable[A] - with scala.collection.GenIterable[A] - with scala.collection.GenIterableLike[A, GenIterable[A]] -// with GenericTraversableTemplate[A, GenIterable] -{ - def seq: Iterable[A] - //override def companion: GenericCompanion[GenIterable] = GenIterable -} - - -// object GenIterable extends TraversableFactory[GenIterable] { -// implicit def canBuildFrom[A]: CanBuildFrom[Coll, A, GenIterable[A]] = ReusableCBF.asInstanceOf[GenericCanBuildFrom[A]] -// def newBuilder[A]: Builder[A, GenIterable[A]] = Iterable.newBuilder -// } - diff --git a/src/library/scala/collection/immutable/GenMap.scala.disabled b/src/library/scala/collection/immutable/GenMap.scala.disabled deleted file mode 100644 index eb7ef2951c..0000000000 --- a/src/library/scala/collection/immutable/GenMap.scala.disabled +++ /dev/null @@ -1,36 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala.collection -package immutable - -import generic._ - - -/** A base trait for maps that can be mutated. - * $possiblyparinfo - * $mapNote - * $mapTags - * @since 1.0 - * @author Matthias Zenger - */ -trait GenMap[A, +B] -extends GenIterable[(A, B)] - with scala.collection.GenMap[A, B] - with scala.collection.GenMapLike[A, B, GenMap[A, B]] -{ - def seq: Map[A, B] -} - - -// object GenMap extends MapFactory[GenMap] { -// def empty[A, B]: Map[A, B] = Map.empty - -// /** $mapCanBuildFromInfo */ -// implicit def canBuildFrom[A, B]: CanBuildFrom[Coll, (A, B), GenMap[A, B]] = new MapCanBuildFrom[A, B] -// } diff --git a/src/library/scala/collection/immutable/GenSeq.scala.disabled b/src/library/scala/collection/immutable/GenSeq.scala.disabled deleted file mode 100644 index b8bc420ec3..0000000000 --- a/src/library/scala/collection/immutable/GenSeq.scala.disabled +++ /dev/null @@ -1,49 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - - - -package scala.collection -package immutable - - -import generic._ -import mutable.Builder - - -/** A subtrait of `collection.GenSeq` which represents sequences - * that can be mutated. - * - * $possiblyparinfo - * - * $seqInfo - * - * The class adds an `update` method to `collection.Seq`. - * - * @define Coll `mutable.Seq` - * @define coll mutable sequence - */ -trait GenSeq[+A] extends GenIterable[A] - with scala.collection.GenSeq[A] - with scala.collection.GenSeqLike[A, GenSeq[A]] -// with GenericTraversableTemplate[A, GenSeq] -{ - def seq: Seq[A] - //override def companion: GenericCompanion[GenSeq] = GenSeq -} - - -// object GenSeq extends SeqFactory[GenSeq] { -// implicit def canBuildFrom[A]: CanBuildFrom[Coll, A, GenSeq[A]] = ReusableCBF.asInstanceOf[GenericCanBuildFrom[A]] -// def newBuilder[A]: Builder[A, GenSeq[A]] = Seq.newBuilder -// } - - - - - diff --git a/src/library/scala/collection/immutable/GenSet.scala.disabled b/src/library/scala/collection/immutable/GenSet.scala.disabled deleted file mode 100644 index 828219580e..0000000000 --- a/src/library/scala/collection/immutable/GenSet.scala.disabled +++ /dev/null @@ -1,43 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - - - -package scala.collection -package immutable - - -import generic._ -import mutable.Builder - - -/** A generic trait for mutable sets. - * - * $possiblyparinfo - * $setNote - * $setTags - * - * @since 1.0 - * @author Matthias Zenger - * @define Coll `mutable.Set` - * @define coll mutable set - */ -trait GenSet[A] extends GenIterable[A] - with scala.collection.GenSet[A] - with scala.collection.GenSetLike[A, GenSet[A]] -// with GenericSetTemplate[A, GenSet] -{ - //override def companion: GenericCompanion[GenSet] = GenSet - def seq: Set[A] -} - - -// object GenSet extends TraversableFactory[GenSet] { -// implicit def canBuildFrom[A] = ReusableCBF.asInstanceOf[GenericCanBuildFrom[A]] -// def newBuilder[A] = Set.newBuilder -// } diff --git a/src/library/scala/collection/immutable/GenTraversable.scala.disabled b/src/library/scala/collection/immutable/GenTraversable.scala.disabled deleted file mode 100644 index 4a5cf12ebe..0000000000 --- a/src/library/scala/collection/immutable/GenTraversable.scala.disabled +++ /dev/null @@ -1,41 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - - - -package scala.collection -package immutable - - -import generic._ -import mutable.Builder - - -/** A trait for traversable collections that can be mutated. - * - * $possiblyparinfo - * - * $traversableInfo - * @define mutability mutable - */ -trait GenTraversable[+A] extends scala.collection.GenTraversable[A] - with scala.collection.GenTraversableLike[A, GenTraversable[A]] -// with GenericTraversableTemplate[A, GenTraversable] - with Mutable -{ - def seq: Traversable[A] - //override def companion: GenericCompanion[GenTraversable] = GenTraversable -} - - -// object GenTraversable extends TraversableFactory[GenTraversable] { -// implicit def canBuildFrom[A]: CanBuildFrom[Coll, A, GenTraversable[A]] = ReusableCBF.asInstanceOf[GenericCanBuildFrom[A]] -// def newBuilder[A]: Builder[A, GenTraversable[A]] = Traversable.newBuilder -// } - - diff --git a/src/library/scala/collection/mutable/GenIterable.scala.disabled b/src/library/scala/collection/mutable/GenIterable.scala.disabled deleted file mode 100644 index e09981bc9b..0000000000 --- a/src/library/scala/collection/mutable/GenIterable.scala.disabled +++ /dev/null @@ -1,37 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala.collection -package mutable - - -import generic._ - - -/** A base trait for iterable collections that can be mutated. - * - * $possiblyparinfo - * - * $iterableInfo - */ -trait GenIterable[A] extends GenTraversable[A] - with scala.collection.GenIterable[A] - with scala.collection.GenIterableLike[A, GenIterable[A]] -// with GenericTraversableTemplate[A, GenIterable] -{ - def seq: Iterable[A] - //override def companion: GenericCompanion[GenIterable] = GenIterable -} - - -// object GenIterable extends TraversableFactory[GenIterable] { -// implicit def canBuildFrom[A]: CanBuildFrom[Coll, A, GenIterable[A]] = ReusableCBF.asInstanceOf[GenericCanBuildFrom[A]] -// def newBuilder[A]: Builder[A, GenIterable[A]] = Iterable.newBuilder -// } - - diff --git a/src/library/scala/collection/mutable/GenMap.scala.disabled b/src/library/scala/collection/mutable/GenMap.scala.disabled deleted file mode 100644 index eca63b43ce..0000000000 --- a/src/library/scala/collection/mutable/GenMap.scala.disabled +++ /dev/null @@ -1,40 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - - - -package scala.collection -package mutable - - -import generic._ - - -/** A base trait for maps that can be mutated. - * $possiblyparinfo - * $mapNote - * $mapTags - * @since 1.0 - * @author Matthias Zenger - */ -trait GenMap[A, B] -extends GenIterable[(A, B)] - with scala.collection.GenMap[A, B] - with scala.collection.GenMapLike[A, B, GenMap[A, B]] -{ - def seq: Map[A, B] -} - - -// object GenMap extends MapFactory[GenMap] { -// def empty[A, B]: Map[A, B] = Map.empty - -// /** $mapCanBuildFromInfo */ -// implicit def canBuildFrom[A, B]: CanBuildFrom[Coll, (A, B), GenMap[A, B]] = new MapCanBuildFrom[A, B] -// } - diff --git a/src/library/scala/collection/mutable/GenSeq.scala.disabled b/src/library/scala/collection/mutable/GenSeq.scala.disabled deleted file mode 100644 index 53ec5acc34..0000000000 --- a/src/library/scala/collection/mutable/GenSeq.scala.disabled +++ /dev/null @@ -1,44 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - - - -package scala.collection -package mutable - - -import generic._ - - -/** A subtrait of `collection.GenSeq` which represents sequences - * that can be mutated. - * - * $possiblyparinfo - * - * $seqInfo - * - * The class adds an `update` method to `collection.Seq`. - * - * @define Coll `mutable.Seq` - * @define coll mutable sequence - */ -trait GenSeq[A] extends GenIterable[A] - with scala.collection.GenSeq[A] - with scala.collection.GenSeqLike[A, GenSeq[A]] -// with GenericTraversableTemplate[A, GenSeq] -{ - //override def companion: GenericCompanion[GenSeq] = GenSeq - def seq: Seq[A] -} - - -// object GenSeq extends SeqFactory[GenSeq] { -// implicit def canBuildFrom[A]: CanBuildFrom[Coll, A, GenSeq[A]] = ReusableCBF.asInstanceOf[GenericCanBuildFrom[A]] -// def newBuilder[A]: Builder[A, GenSeq[A]] = Seq.newBuilder -// } - diff --git a/src/library/scala/collection/mutable/GenSet.scala.disabled b/src/library/scala/collection/mutable/GenSet.scala.disabled deleted file mode 100644 index 9080abaf38..0000000000 --- a/src/library/scala/collection/mutable/GenSet.scala.disabled +++ /dev/null @@ -1,46 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - - - -package scala.collection -package mutable - - - -import generic._ - - -/** A generic trait for mutable sets. - * - * $possiblyparinfo - * $setNote - * $setTags - * - * @since 1.0 - * @author Matthias Zenger - * @define Coll `mutable.Set` - * @define coll mutable set - */ -trait GenSet[A] extends GenIterable[A] - with Growable[A] - with scala.collection.GenSet[A] - with scala.collection.GenSetLike[A, GenSet[A]] -// with GenericSetTemplate[A, GenSet] -{ - //override def companion: GenericCompanion[GenSet] = GenSet - def seq: Set[A] -} - - -// object GenSet extends TraversableFactory[GenSet] { -// implicit def canBuildFrom[A]: CanBuildFrom[Coll, A, GenSet[A]] = ReusableCBF.asInstanceOf[GenericCanBuildFrom[A]] -// def newBuilder[A]: Builder[A, GenSet[A]] = Set.newBuilder -// } - - diff --git a/src/library/scala/collection/mutable/GenTraversable.scala.disabled b/src/library/scala/collection/mutable/GenTraversable.scala.disabled deleted file mode 100644 index e78e758c12..0000000000 --- a/src/library/scala/collection/mutable/GenTraversable.scala.disabled +++ /dev/null @@ -1,38 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - - - -package scala.collection -package mutable - - -import generic._ - - -/** A trait for traversable collections that can be mutated. - * - * $possiblyparinfo - * - * $traversableInfo - * @define mutability mutable - */ -trait GenTraversable[A] extends scala.collection.GenTraversable[A] - with scala.collection.GenTraversableLike[A, GenTraversable[A]] -// with GenericTraversableTemplate[A, GenTraversable] - with Mutable -{ - def seq: Traversable[A] - //override def companion: GenericCompanion[GenTraversable] = GenTraversable -} - -// object GenTraversable extends TraversableFactory[GenTraversable] { -// implicit def canBuildFrom[A] = ReusableCBF.asInstanceOf[GenericCanBuildFrom[A]] -// def newBuilder[A] = Traversable.newBuilder -// } - diff --git a/src/library/scala/collection/parallel/immutable/ParNumericRange.scala.disabled b/src/library/scala/collection/parallel/immutable/ParNumericRange.scala.disabled deleted file mode 100644 index 04bc8b8d29..0000000000 --- a/src/library/scala/collection/parallel/immutable/ParNumericRange.scala.disabled +++ /dev/null @@ -1,128 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - - -package scala.collection.parallel.immutable - - - -import scala.collection.immutable.NumericRange -import scala.collection.parallel.Combiner -import scala.collection.generic.CanCombineFrom -import scala.collection.parallel.ParIterableIterator - - - -/** Parallel ranges for numeric types. - * - * $paralleliterableinfo - * - * $sideeffects - * - * @param range the sequential range this parallel range was obtained from - * - * @author Aleksandar Prokopec - * @since 2.9 - * - * @define Coll `immutable.ParRange` - * @define coll immutable parallel range - */ -@SerialVersionUID(1L) -class ParNumericRange[T](val range: NumericRange[T])(implicit num: Integral[T]) -extends ParSeq[T] - with Serializable -{ -self => - - def seq = range - - @inline final def length = range.length - - @inline final def apply(idx: Int) = range.apply(idx); - - def parallelIterator = new ParNumericRangeIterator with SCPI - - type SCPI = SignalContextPassingIterator[ParNumericRangeIterator] - - class ParNumericRangeIterator(range: NumericRange[T] = self.range, num: Integral[T] = self.num) - extends ParIterator { - me: SignalContextPassingIterator[ParNumericRangeIterator] => - override def toString = "ParNumericRangeIterator(over: " + range + ")" - private var ind = 0 - private val len = range.length - - final def remaining = len - ind - - final def hasNext = ind < len - - final def next = if (hasNext) { - val r = range.apply(ind) - ind += 1 - r - } else Iterator.empty.next - - private def rangeleft: NumericRange[T] = range.drop(ind) - - def dup = new ParNumericRangeIterator(rangeleft) with SCPI - - def split = { - val rleft = rangeleft - val elemleft = rleft.length - if (elemleft < 2) Seq(new ParNumericRangeIterator(rleft) with SCPI) - else Seq( - new ParNumericRangeIterator(rleft.take(elemleft / 2)) with SCPI, - new ParNumericRangeIterator(rleft.drop(elemleft / 2)) with SCPI - ) - } - - def psplit(sizes: Int*) = { - var rleft = rangeleft - for (sz <- sizes) yield { - val fronttaken = rleft.take(sz) - rleft = rleft.drop(sz) - new ParNumericRangeIterator(fronttaken) with SCPI - } - } - - /* accessors */ - - override def foreach[U](f: T => U): Unit = { - rangeleft.foreach(f) - ind = len - } - - override def reduce[U >: T](op: (U, U) => U): U = { - val r = rangeleft.reduceLeft(op) - ind = len - r - } - - /* transformers */ - - override def map2combiner[S, That](f: T => S, cb: Combiner[S, That]): Combiner[S, That] = { - while (hasNext) { - cb += f(next) - } - cb - } - } - -} - - -object ParNumericRange { - def apply[T](start: T, end: T, step: T, inclusive: Boolean)(implicit num: Integral[T]) = new ParNumericRange[T]( - if (inclusive) NumericRange.inclusive(start, end, step)(num) - else NumericRange.apply(start, end, step)(num) - ) -} - - - - - diff --git a/src/library/scala/parallel/package.scala.disabled b/src/library/scala/parallel/package.scala.disabled deleted file mode 100644 index 45f5470d03..0000000000 --- a/src/library/scala/parallel/package.scala.disabled +++ /dev/null @@ -1,178 +0,0 @@ -package scala - - - -import scala.concurrent.forkjoin._ - - -/** This package object contains various parallel operations. - * - * @define invokingPar - * Invoking a parallel computation creates a future which will - * hold the result of the computation once it completes. Querying - * the result of a future before its parallel computation has completed - * will block the caller. For all practical concerns, the dependency - * chain obtained by querying results of unfinished futures can have - * arbitrary lengths. However, care must be taken not to create a - * circular dependency, as this will result in a deadlock. - * - * Additionally, if the parallel computation performs a blocking call - * (e.g. an I/O operation or waiting for a lock) other than waiting for a future, - * it should do so by invoking the `block` method. This is another - * form of waiting that could potentially create a circular dependency, - * an the user should take care not to do this. - * - * Users should be aware that invoking a parallel computation has a - * certain overhead. Parallel computations should not be invoked for - * small computations, as this can lead to bad performance. A rule of the - * thumb is having parallel computations equivalent to a loop - * with 50000 arithmetic operations (at least). If a parallel computation - * is invoked within another parallel computation, then it should be - * computationally equivalent to a loop with 10000 arithmetic operations. - */ -package object parallel { - - private[scala] val forkjoinpool = new ForkJoinPool() - - private class Task[T](body: =>T) extends RecursiveTask[T] with Future[T] { - def compute = body - def apply() = join() - } - - private final def newTask[T](body: =>T) = new Task[T](body) - - private final def executeTask[T](task: RecursiveTask[T]) { - if (Thread.currentThread().isInstanceOf[ForkJoinWorkerThread]) task.fork - else forkjoinpool.execute(task) - } - - /* public methods */ - - /** Performs a call which can potentially block execution. - * - * Example: - * {{{ - * val lock = new ReentrantLock - * - * // ... do something ... - * - * blocking { - * if (!lock.hasLock) lock.lock() - * } - * }}} - * - * '''Note:''' calling methods that wait arbitrary amounts of time - * (e.g. for I/O operations or locks) may severely decrease performance - * or even result in deadlocks. This does not include waiting for - * results of futures. - */ - def blocking[T](body: =>T): T = { - if (Thread.currentThread().isInstanceOf[ForkJoinWorkerThread]) { - val blocker = new ForkJoinPool.ManagedBlocker { - @volatile var done = false - @volatile var result: Any = _ - def block() = { - result = body - done = true - true - } - def isReleasable() = done - } - ForkJoinPool.managedBlock(blocker, true) - blocker.result.asInstanceOf[T] - } else body - } - - /** Starts a parallel computation and returns a future. - * - * $invokingPar - * - * @tparam T the type of the result of the parallel computation - * @param body the computation to be invoked in parallel - * @return a future with the result - */ - def par[T](body: =>T): Future[T] = { - val task = newTask(body) - executeTask(task) - task - } - - /** Starts 2 parallel computations and returns a future. - * - * $invokingPar - * - * @tparam T1 the type of the result of 1st the parallel computation - * @tparam T2 the type of the result of 2nd the parallel computation - * @param b1 the 1st computation to be invoked in parallel - * @param b2 the 2nd computation to be invoked in parallel - * @return a tuple of futures corresponding to parallel computations - */ - def par[T1, T2](b1: =>T1, b2: =>T2): (Future[T1], Future[T2]) = { - val t1 = newTask(b1) - executeTask(t1) - val t2 = newTask(b2) - executeTask(t2) - (t1, t2) - } - - /** Starts 3 parallel computations and returns a future. - * - * $invokingPar - * - * @tparam T1 the type of the result of 1st the parallel computation - * @tparam T2 the type of the result of 2nd the parallel computation - * @tparam T3 the type of the result of 3rd the parallel computation - * @param b1 the 1st computation to be invoked in parallel - * @param b2 the 2nd computation to be invoked in parallel - * @param b3 the 3rd computation to be invoked in parallel - * @return a tuple of futures corresponding to parallel computations - */ - def par[T1, T2, T3](b1: =>T1, b2: =>T2, b3: =>T3): (Future[T1], Future[T2], Future[T3]) = { - val t1 = newTask(b1) - executeTask(t1) - val t2 = newTask(b2) - executeTask(t2) - val t3 = newTask(b3) - executeTask(t3) - (t1, t2, t3) - } - - /** Starts 4 parallel computations and returns a future. - * - * $invokingPar - * - * @tparam T1 the type of the result of 1st the parallel computation - * @tparam T2 the type of the result of 2nd the parallel computation - * @tparam T3 the type of the result of 3rd the parallel computation - * @tparam T4 the type of the result of 4th the parallel computation - * @param b1 the 1st computation to be invoked in parallel - * @param b2 the 2nd computation to be invoked in parallel - * @param b3 the 3rd computation to be invoked in parallel - * @param b4 the 4th computation to be invoked in parallel - * @return a tuple of futures corresponding to parallel computations - */ - def par[T1, T2, T3, T4](b1: =>T1, b2: =>T2, b3: =>T3, b4: =>T4): (Future[T1], Future[T2], Future[T3], Future[T4]) = { - val t1 = newTask(b1) - executeTask(t1) - val t2 = newTask(b2) - executeTask(t2) - val t3 = newTask(b3) - executeTask(t3) - val t4 = newTask(b4) - executeTask(t4) - (t1, t2, t3, t4) - } - -} - - - - - - - - - - - - diff --git a/src/swing/scala/swing/Font.scala.disabled b/src/swing/scala/swing/Font.scala.disabled deleted file mode 100644 index 9e21eb859c..0000000000 --- a/src/swing/scala/swing/Font.scala.disabled +++ /dev/null @@ -1,70 +0,0 @@ -package scala.swing - -/*object Font { - def apply(fontFormat: Int, fontFile: java.io.File) = java.awt.Font.createFont(fontFormat, fontFile) - def apply(fontFormat: Int, fontStream: java.io.InputStream) = java.awt.Font.createFont(fontFormat, fontStream) - def decode(str: String) = java.awt.Font.decode(str) - - /* TODO: finish implementation - /** - * See [java.awt.Font.getFont]. - */ - def get(attributes: Map[_ <: java.text.AttributedCharacterIterator.Attribute, _]) = - java.awt.Font.getFont(ImmutableMapWrapper(attributes)) - - import java.{util => ju} - private case class ImmutableMapWrapper[A, B](underlying : Map[A, B])(t : ClassTag[A]) extends ju.AbstractMap[A, B] { - self => - override def size = underlying.size - - override def put(k : A, v : B) = - throw new UnsupportedOperationException("This is a wrapper that does not support mutation") - override def remove(k : AnyRef) = - throw new UnsupportedOperationException("This is a wrapper that does not support mutation") - - override def entrySet : ju.Set[ju.Map.Entry[A, B]] = new ju.AbstractSet[ju.Map.Entry[A, B]] { - def size = self.size - - def iterator = new ju.Iterator[ju.Map.Entry[A, B]] { - val ui = underlying.iterator - var prev : Option[A] = None - - def hasNext = ui.hasNext - - def next = { - val (k, v) = ui.next - prev = Some(k) - new ju.Map.Entry[A, B] { - def getKey = k - def getValue = v - def setValue(v1 : B) = self.put(k, v1) - override def equals(other : Any) = other match { - case e : ju.Map.Entry[_, _] => k == e.getKey && v == e.getValue - case _ => false - } - } - } - - def remove = prev match { - case Some(k) => val v = self.remove(k.asInstanceOf[AnyRef]) ; prev = None ; v - case _ => throw new IllegalStateException("next must be called at least once before remove") - } - } - } - } - */ - - /** - * See [java.awt.Font.getFont]. - */ - def get(nm: String) = java.awt.Font.getFont(nm) - /** - * See [java.awt.Font.getFont]. - */ - def get(nm: String, font: Font) = java.awt.Font.getFont(nm, font) - - def Insets(x: Int, y: Int, width: Int, height: Int) = new Insets(x, y, width, height) - def Rectangle(x: Int, y: Int, width: Int, height: Int) = new Insets(x, y, width, height) - def Point(x: Int, y: Int) = new Point(x, y) - def Dimension(x: Int, y: Int) = new Dimension(x, y) -}*/ \ No newline at end of file -- cgit v1.2.3 From cbad218dba47d49a39897b86d467c384538fdd53 Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Sun, 21 Oct 2012 23:34:35 +0200 Subject: SI-2968 Fix brace healing for `^case (class|object) {` The scanner coalesces the pair of tokens into CASEOBJECT or CASECLASS, but fails to set `offset` back to the start of `case`. Brace healing is then unable to correctly guess the location of the missing brace. This commit resets `offset` and `lastOffset`, as though caseobject were a single keyword. Only the former was neccessary to fix this bug; I haven't found a test that shows the need for the latter. --- .../scala/tools/nsc/ast/parser/Scanners.scala | 6 +++++ test/files/neg/t2968.check | 10 +++++++++ test/files/neg/t2968.scala | 26 ++++++++++++++++++++++ test/files/neg/t2968b.check | 4 ++++ test/files/neg/t2968b.scala | 7 ++++++ 5 files changed, 53 insertions(+) create mode 100644 test/files/neg/t2968.check create mode 100644 test/files/neg/t2968.scala create mode 100644 test/files/neg/t2968b.check create mode 100644 test/files/neg/t2968b.scala (limited to 'src') diff --git a/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala b/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala index 5902209898..5b828ded79 100644 --- a/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala +++ b/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala @@ -283,10 +283,16 @@ trait Scanners extends ScannersCommon { prev copyFrom this val nextLastOffset = charOffset - 1 fetchToken() + def resetOffset() { + offset = prev.offset + lastOffset = prev.lastOffset + } if (token == CLASS) { token = CASECLASS + resetOffset() } else if (token == OBJECT) { token = CASEOBJECT + resetOffset() } else { lastOffset = nextLastOffset next copyFrom this diff --git a/test/files/neg/t2968.check b/test/files/neg/t2968.check new file mode 100644 index 0000000000..5d2387f98c --- /dev/null +++ b/test/files/neg/t2968.check @@ -0,0 +1,10 @@ +t2968.scala:8: error: Missing closing brace `}' assumed here +} // missing brace +^ +t2968.scala:17: error: Missing closing brace `}' assumed here +} // missing brace +^ +t2968.scala:26: error: Missing closing brace `}' assumed here +} // missing brace +^ +three errors found diff --git a/test/files/neg/t2968.scala b/test/files/neg/t2968.scala new file mode 100644 index 0000000000..41c3a798a5 --- /dev/null +++ b/test/files/neg/t2968.scala @@ -0,0 +1,26 @@ +object t1 { + case object Const { + } + + class Var + { + +} // missing brace + +object t2 { + case class Const() { + } + + class Var + { + +} // missing brace + +object t3 { + final case class Const() { + } + + class Var + { + +} // missing brace diff --git a/test/files/neg/t2968b.check b/test/files/neg/t2968b.check new file mode 100644 index 0000000000..36d25a2d12 --- /dev/null +++ b/test/files/neg/t2968b.check @@ -0,0 +1,4 @@ +t2968b.scala:7: error: '}' expected but eof found. +// missing brace + ^ +one error found diff --git a/test/files/neg/t2968b.scala b/test/files/neg/t2968b.scala new file mode 100644 index 0000000000..422b618aba --- /dev/null +++ b/test/files/neg/t2968b.scala @@ -0,0 +1,7 @@ +case class Const() +{ +} + +class Var +{ +// missing brace -- cgit v1.2.3 From 2dc5841638f8a48bace0084ac25baaef50e865f9 Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Thu, 18 Oct 2012 13:00:19 -0700 Subject: Introduces some structure for name lookups. Too many unrelated things are intermingled. If you want to know what symbol corresponds to a particular name in a particular context, you shouldn't have to involve either a Tree or a Typer to find that out. I toiled line by line over typedIdent until it had shed its redundancies and freed itself from the bowels of typed1. The mechanism of name lookup is such that adding a qualifier when the occasion calls for it is inseperable without a lot more effort. So to preserve a sane interface I devised this small partitioning of outcomes. case class LookupSucceeded(qualifier, symbol) case class LookupAmbiguous(msg) case class LookupInaccessible(symbol, msg) case class LookupNotFound(msg) --- .../scala/tools/nsc/typechecker/Contexts.scala | 19 +++++++++++++++++-- 1 file changed, 17 insertions(+), 2 deletions(-) (limited to 'src') diff --git a/src/compiler/scala/tools/nsc/typechecker/Contexts.scala b/src/compiler/scala/tools/nsc/typechecker/Contexts.scala index 211da044e6..0abd8c188e 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Contexts.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Contexts.scala @@ -35,6 +35,22 @@ trait Contexts { self: Analyzer => val completeList = JavaLangPackage :: ScalaPackage :: PredefModule :: Nil } + sealed abstract class NameLookup { def symbol: Symbol } + case class LookupSucceeded(qualifier: Tree, symbol: Symbol) extends NameLookup + case class LookupAmbiguous(msg: String) extends NameLookup { def symbol = NoSymbol } + case class LookupInaccessible(symbol: Symbol, msg: String) extends NameLookup + case class LookupNotFound() extends NameLookup { def symbol = NoSymbol } + // case object LookupNotFound extends NameLookup { def symbol = NoSymbol } + // + // !!! Bug - case object LookupNotFound does not match - we get an + // "impossible" MatchError. case class LookupNotFound() matches in + // the same spot. + + def ambiguousImports(imp1: ImportInfo, imp2: ImportInfo) = + LookupAmbiguous(s"it is imported twice in the same scope by\n$imp1\nand $imp2") + def ambiguousDefnAndImport(owner: Symbol, imp: ImportInfo) = + LookupAmbiguous(s"it is both defined in $owner and imported subsequently by \n$imp") + private val startContext = { NoContext.make( Template(List(), emptyValDef, List()) setSymbol global.NoSymbol setType global.NoType, @@ -480,8 +496,7 @@ trait Contexts { self: Analyzer => c } - /** Is `sym` accessible as a member of tree `site` with type - * `pre` in current context? + /** Is `sym` accessible as a member of `pre` in current context? */ def isAccessible(sym: Symbol, pre: Type, superAccess: Boolean = false): Boolean = { lastAccessCheckDetails = "" -- cgit v1.2.3 From d477a0f7f90d224f8162abd9847ecf71482e179a Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Tue, 23 Oct 2012 14:37:05 -0700 Subject: Adds the core symbol lookup logic to Typers. This unifies several disparate/ad-hoc mechanisms for excluding symbols from eligibility in a single predicate. This is the method on Context: def lookupSymbol(name: Name, qualifies: Symbol => Boolean) The logic is largely that which was buried in typedIdent, except that I fixed SI-3160 so that import foo._ does not inject foo's private members into your namespace. --- .../scala/tools/nsc/typechecker/Contexts.scala | 238 ++++++++++++++++++++- 1 file changed, 235 insertions(+), 3 deletions(-) (limited to 'src') diff --git a/src/compiler/scala/tools/nsc/typechecker/Contexts.scala b/src/compiler/scala/tools/nsc/typechecker/Contexts.scala index 0abd8c188e..ad79468b0c 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Contexts.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Contexts.scala @@ -646,7 +646,7 @@ trait Contexts { self: Analyzer => case ImportSelector(from, _, to, _) :: sels1 => var impls = collect(sels1) filter (info => info.name != from) if (to != nme.WILDCARD) { - for (sym <- imp.importedSymbol(to).alternatives) + for (sym <- importedAccessibleSymbol(imp, to).alternatives) if (isQualifyingImplicit(to, sym, pre, imported = true)) impls = new ImplicitInfo(to, pre, sym) :: impls } @@ -692,6 +692,238 @@ trait Contexts { self: Analyzer => implicitsCache } + /** It's possible that seemingly conflicting identifiers are + * identifiably the same after type normalization. In such cases, + * allow compilation to proceed. A typical example is: + * package object foo { type InputStream = java.io.InputStream } + * import foo._, java.io._ + */ + def isAmbiguousImport(imp1: ImportInfo, imp2: ImportInfo, name: Name): Boolean = { + // The imported symbols from each import. + def imp1Symbol = importedAccessibleSymbol(imp1, name) + def imp2Symbol = importedAccessibleSymbol(imp2, name) + // The types of the qualifiers from which the ambiguous imports come. + // If the ambiguous name is a value, these must be the same. + def t1 = imp1.qual.tpe + def t2 = imp2.qual.tpe + // The types of the ambiguous symbols, seen as members of their qualifiers. + // If the ambiguous name is a monomorphic type, we can relax this far. + def mt1 = t1 memberType imp1Symbol + def mt2 = t2 memberType imp2Symbol + + def characterize = List( + s"types: $t1 =:= $t2 ${t1 =:= t2} members: ${mt1 =:= mt2}", + s"member type 1: $mt1", + s"member type 2: $mt2" + ).mkString("\n ") + + imp1Symbol.exists && imp2Symbol.exists && ( + // The symbol names are checked rather than the symbols themselves because + // each time an overloaded member is looked up it receives a new symbol. + // So foo.member("x") != foo.member("x") if x is overloaded. This seems + // likely to be the cause of other bugs too... + if (t1 =:= t2 && imp1Symbol.name == imp2Symbol.name) { + log(s"Suppressing ambiguous import: $t1 =:= $t2 && $imp1Symbol == $imp2Symbol") + false + } + // Monomorphism restriction on types is in part because type aliases could have the + // same target type but attach different variance to the parameters. Maybe it can be + // relaxed, but doesn't seem worth it at present. + else if (mt1 =:= mt2 && name.isTypeName && imp1Symbol.isMonomorphicType && imp2Symbol.isMonomorphicType) { + log(s"Suppressing ambiguous import: $mt1 =:= $mt2 && $imp1Symbol and $imp2Symbol are equivalent") + false + } + else { + log(s"Import is genuinely ambiguous:\n " + characterize) + true + } + ) + } + + def importedAccessibleSymbol(imp: ImportInfo, name: Name) = { + imp importedSymbol name filter (s => isAccessible(s, imp.qual.tpe, superAccess = false)) + } + + /** Is `sym` defined in package object of package `pkg`? + * Since sym may be defined in some parent of the package object, + * we cannot inspect its owner only; we have to go through the + * info of the package object. However to avoid cycles we'll check + * what other ways we can before pushing that way. + */ + def isInPackageObject(sym: Symbol, pkg: Symbol) = { + val pkgClass = if (pkg.isTerm) pkg.moduleClass else pkg + def matchesInfo = ( + pkg.isInitialized && { + // need to be careful here to not get a cyclic reference during bootstrap + val module = pkg.info member nme.PACKAGEkw + module.isInitialized && (module.info.member(sym.name).alternatives contains sym) + } + ) + def isInPkgObj(sym: Symbol) = ( + !sym.isPackage + && !sym.owner.isPackageClass + && (sym.owner ne NoSymbol) + && (sym.owner.owner == pkgClass || matchesInfo) + ) + + pkgClass.isPackageClass && ( + if (sym.isOverloaded) sym.alternatives forall isInPkgObj + else isInPkgObj(sym) + ) + } + + /** Find the symbol of a simple name starting from this context. + * All names are filtered through the "qualifies" predicate, + * the search continuing as long as no qualifying name is found. + */ + def lookupSymbol(name: Name, qualifies: Symbol => Boolean): NameLookup = { + var lookupError: NameLookup = null // set to non-null if a definite error is encountered + var inaccessible: NameLookup = null // records inaccessible symbol for error reporting in case none is found + var defEntry: ScopeEntry = null // the scope entry of defSym, if defined in a local scope + var defSym: Symbol = NoSymbol // the directly found symbol + var pre: Type = NoPrefix // the prefix type of defSym, if a class member + var cx: Context = this + var needsQualifier = false // working around package object overloading bug + + def defEntrySymbol = if (defEntry eq null) NoSymbol else defEntry.sym + def localScopeDepth = if (defEntry eq null) 0 else cx.scope.nestingLevel - defEntry.owner.nestingLevel + + def finish(qual: Tree, sym: Symbol): NameLookup = ( + if (lookupError ne null) lookupError + else sym match { + case NoSymbol if inaccessible ne null => inaccessible + case NoSymbol => LookupNotFound() + case _ => LookupSucceeded(qual, sym) + } + ) + def isPackageOwnedInDifferentUnit(s: Symbol) = ( + s.isDefinedInPackage && ( + !currentRun.compiles(s) + || unit.exists && s.sourceFile != unit.source.file + ) + ) + def requiresQualifier(s: Symbol) = needsQualifier || ( + s.owner.isClass + && !s.owner.isPackageClass + && !s.isTypeParameterOrSkolem + ) + def lookupInPrefix(name: Name) = pre member name filter qualifies + def accessibleInPrefix(s: Symbol) = isAccessible(s, pre, superAccess = false) + + def correctForPackageObject(sym: Symbol): Symbol = { + if (sym.isTerm && isInPackageObject(sym, pre.typeSymbol)) { + val sym1 = lookupInPrefix(sym.name) + if ((sym1 eq NoSymbol) || (sym eq sym1)) sym else { + needsQualifier = true + log(s""" + | !!! Overloaded package object member resolved incorrectly. + | prefix: $pre + | Discarded: ${sym.defString} + | Using: ${sym1.defString} + """.stripMargin) + sym1 + } + } + else sym + } + + def searchPrefix = { + cx = cx.enclClass + val found0 = lookupInPrefix(name) + val found1 = found0 filter accessibleInPrefix + if (found0.exists && !found1.exists && inaccessible == null) + inaccessible = LookupInaccessible(found0, analyzer.lastAccessCheckDetails) + + found1 + } + // cx.scope eq null arises during FixInvalidSyms in Duplicators + while (defSym == NoSymbol && (cx ne NoContext) && (cx.scope ne null)) { + pre = cx.enclClass.prefix + // !!! FIXME. This call to lookupEntry is at the root of all the + // bad behavior with overloading in package objects. lookupEntry + // just takes the first symbol it finds in scope, ignoring the rest. + // When a selection on a package object arrives here, the first + // overload is always chosen. "correctForPackageObject" exists to + // undo that decision. Obviously it would be better not to do it in + // the first place; however other things seem to be tied to obtaining + // that ScopeEntry, specifically calculating the nesting depth. + defEntry = cx.scope lookupEntry name + defSym = defEntrySymbol filter qualifies map correctForPackageObject orElse searchPrefix + if (!defSym.exists) + cx = cx.outer + } + + val symbolDepth = cx.depth - localScopeDepth + var impSym: Symbol = NoSymbol + var imports = Context.this.imports // impSym != NoSymbol => it is imported from imports.head + def imp1 = imports.head + + while (!qualifies(impSym) && imports.nonEmpty && imp1.depth > symbolDepth) { + impSym = importedAccessibleSymbol(imp1, name) + if (!impSym.exists) + imports = imports.tail + } + if (defSym.exists && impSym.exists) { + // imported symbols take precedence over package-owned symbols in different compilation units. + if (isPackageOwnedInDifferentUnit(defSym)) + defSym = NoSymbol + // Defined symbols take precedence over erroneous imports. + else if (impSym.isError || impSym.name == nme.CONSTRUCTOR) + impSym = NoSymbol + // Otherwise they are irreconcilably ambiguous + else + return ambiguousDefnAndImport(defSym.owner, imp1) + } + + // At this point only one or the other of defSym and impSym might be set. + if (defSym.exists) { + if (requiresQualifier(defSym)) + finish(gen.mkAttributedQualifier(pre), defSym) + else + finish(EmptyTree, defSym) + } + else if (impSym.exists) { + // Imports against which we will test impSym for any ambiguities + var importsTail = imports.tail + val imp1Explicit = imp1 isExplicitImport name + def imp2 = importsTail.head + def sameDepth = imp1.depth == imp2.depth + def isDone = importsTail.isEmpty || imp1Explicit && !sameDepth + + while (lookupError == null && !isDone) { + val other = importedAccessibleSymbol(imp2, name) + // Ambiguity check between imports. + // The same name imported again is potentially ambiguous if the name is: + // - after explicit import, explicitly imported again at the same or lower depth + // - after explicit import, wildcard imported at lower depth + // - after wildcard import, wildcard imported at the same depth + // Under all such conditions isAmbiguousImport is called, which will + // examine the imports in case they are importing the same thing; if that + // can't be established conclusively, an error is issued. + if (qualifies(other)) { + val imp2Explicit = imp2 isExplicitImport name + val needsCheck = ( + if (sameDepth) imp1Explicit == imp2Explicit + else imp1Explicit || imp2Explicit + ) + log(s"Import ambiguity: imp1=$imp1, imp2=$imp2, sameDepth=$sameDepth, needsCheck=$needsCheck") + if (needsCheck && isAmbiguousImport(imp1, imp2, name)) + lookupError = ambiguousImports(imp1, imp2) + else if (imp2Explicit) { + // if we weren't ambiguous and imp2 is explicit, imp2 replaces imp1 + // as the current winner. + impSym = other + imports = importsTail + } + } + importsTail = importsTail.tail + } + // optimization: don't write out package prefixes + finish(resetPos(imp1.qual.duplicate), impSym) + } + else finish(EmptyTree, NoSymbol) + } + /** * Find a symbol in this context or one of its outers. * @@ -719,8 +951,8 @@ trait Contexts { self: Analyzer => /** The prefix expression */ def qual: Tree = tree.symbol.info match { case ImportType(expr) => expr - case ErrorType => tree setType NoType // fix for #2870 - case _ => throw new FatalError("symbol " + tree.symbol + " has bad type: " + tree.symbol.info) //debug + case ErrorType => tree setType NoType // fix for #2870 + case _ => throw new FatalError("symbol " + tree.symbol + " has bad type: " + tree.symbol.info) //debug } /** Is name imported explicitly, not via wildcard? */ -- cgit v1.2.3 From 56b23776f14499e96078be403a423f23ba50dbbb Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Tue, 23 Oct 2012 14:42:36 -0700 Subject: Made SilentResult more monadic. Given that it's just a reimplementation of Option, we may as well not also reimplement methods like map and getOrElse at every call site. --- src/compiler/scala/tools/nsc/typechecker/Typers.scala | 15 ++++++++++++++- 1 file changed, 14 insertions(+), 1 deletion(-) (limited to 'src') diff --git a/src/compiler/scala/tools/nsc/typechecker/Typers.scala b/src/compiler/scala/tools/nsc/typechecker/Typers.scala index 2c3ff0bfa4..d0c4d6d65f 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Typers.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Typers.scala @@ -68,7 +68,20 @@ trait Typers extends Modes with Adaptations with Tags { } */ - sealed abstract class SilentResult[+T] + sealed abstract class SilentResult[+T] { + @inline final def map[U](f: T => U): SilentResult[U] = this match { + case SilentResultValue(value) => SilentResultValue(f(value)) + case x: SilentTypeError => x + } + @inline final def filter(p: T => Boolean): SilentResult[T] = this match { + case SilentResultValue(value) if !p(value) => SilentTypeError(TypeErrorWrapper(new TypeError(NoPosition, "!p"))) + case _ => this + } + @inline final def orElse[T1 >: T](f: AbsTypeError => T1): T1 = this match { + case SilentResultValue(value) => value + case SilentTypeError(err) => f(err) + } + } case class SilentTypeError(err: AbsTypeError) extends SilentResult[Nothing] { } case class SilentResultValue[+T](value: T) extends SilentResult[T] { } -- cgit v1.2.3 From cd6ad8929d49dc77ade17bd1a4cab9a7c7494366 Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Tue, 23 Oct 2012 14:44:08 -0700 Subject: Added some symbol lookup convenience methods. Greasing the wheels for Typer's well-being. --- .../tools/nsc/typechecker/ContextErrors.scala | 13 +++++++----- .../scala/tools/nsc/typechecker/Modes.scala | 1 + .../scala/tools/nsc/typechecker/Typers.scala | 23 ++++++++++++++++++---- 3 files changed, 28 insertions(+), 9 deletions(-) (limited to 'src') diff --git a/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala b/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala index 7c02f094ed..b6cb3626ec 100644 --- a/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala +++ b/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala @@ -802,7 +802,10 @@ trait ContextErrors { ) } - def AccessError(tree: Tree, sym: Symbol, pre: Type, owner0: Symbol, explanation: String) = { + def AccessError(tree: Tree, sym: Symbol, ctx: Context, explanation: String): AbsTypeError = + AccessError(tree, sym, ctx.enclClass.owner.thisType, ctx.enclClass.owner, explanation) + + def AccessError(tree: Tree, sym: Symbol, pre: Type, owner0: Symbol, explanation: String): AbsTypeError = { def errMsg = { val location = if (sym.isClassConstructor) owner0 else pre.widen.directObjectString @@ -835,7 +838,7 @@ trait ContextErrors { // side-effect on the tree, break the overloaded type cycle in infer private def setErrorOnLastTry(lastTry: Boolean, tree: Tree) = if (lastTry) setError(tree) - + def NoBestMethodAlternativeError(tree: Tree, argtpes: List[Type], pt: Type, lastTry: Boolean) = { issueNormalTypeError(tree, applyErrorMsg(tree, " cannot be applied to ", argtpes, pt)) @@ -848,7 +851,7 @@ trait ContextErrors { def AmbiguousMethodAlternativeError(tree: Tree, pre: Type, best: Symbol, firstCompeting: Symbol, argtpes: List[Type], pt: Type, lastTry: Boolean) = { - + if (!(argtpes exists (_.isErroneous)) && !pt.isErroneous) { val msg0 = "argument types " + argtpes.mkString("(", ",", ")") + @@ -858,7 +861,7 @@ trait ContextErrors { setErrorOnLastTry(lastTry, tree) } else setError(tree) // do not even try further attempts because they should all fail // even if this is not the last attempt (because of the SO's possibility on the horizon) - + } def NoBestExprAlternativeError(tree: Tree, pt: Type, lastTry: Boolean) = { @@ -1191,7 +1194,7 @@ trait ContextErrors { setError(arg) } else arg } - + def WarnAfterNonSilentRecursiveInference(param: Symbol, arg: Tree)(implicit context: Context) = { val note = "type-checking the invocation of "+ param.owner +" checks if the named argument expression '"+ param.name + " = ...' is a valid assignment\n"+ "in the current scope. The resulting type inference error (see above) can be fixed by providing an explicit type in the local definition for "+ param.name +"." diff --git a/src/compiler/scala/tools/nsc/typechecker/Modes.scala b/src/compiler/scala/tools/nsc/typechecker/Modes.scala index d942d080cb..05c4d594ad 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Modes.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Modes.scala @@ -109,6 +109,7 @@ trait Modes { final def inFunMode(mode: Int) = (mode & FUNmode) != 0 final def inPolyMode(mode: Int) = (mode & POLYmode) != 0 final def inPatternMode(mode: Int) = (mode & PATTERNmode) != 0 + final def inPatternNotFunMode(mode: Int) = inPatternMode(mode) && !inFunMode(mode) final def inExprModeOr(mode: Int, others: Int) = (mode & (EXPRmode | others)) != 0 final def inExprModeButNot(mode: Int, prohibited: Int) = (mode & (EXPRmode | prohibited)) == EXPRmode diff --git a/src/compiler/scala/tools/nsc/typechecker/Typers.scala b/src/compiler/scala/tools/nsc/typechecker/Typers.scala index d0c4d6d65f..d7607ae383 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Typers.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Typers.scala @@ -3971,15 +3971,30 @@ trait Typers extends Modes with Adaptations with Tags { } def typed1(tree: Tree, mode: Int, pt: Type): Tree = { - def isPatternMode = inPatternMode(mode) + def isPatternMode = inPatternMode(mode) + def inPatternConstructor = inAllModes(mode, PATTERNmode | FUNmode) + def isQualifierMode = (mode & QUALmode) != 0 - //Console.println("typed1("+tree.getClass()+","+Integer.toHexString(mode)+","+pt+")") //@M! get the type of the qualifier in a Select tree, otherwise: NoType def prefixType(fun: Tree): Type = fun match { case Select(qualifier, _) => qualifier.tpe -// case Ident(name) => ?? - case _ => NoType + case _ => NoType } + // Lookup in the given class using the root mirror. + def lookupInOwner(owner: Symbol, name: Name): Symbol = + if (isQualifierMode) rootMirror.missingHook(owner, name) else NoSymbol + + // Lookup in the given qualifier. Used in last-ditch efforts by typedIdent and typedSelect. + def lookupInRoot(name: Name): Symbol = lookupInOwner(rootMirror.RootClass, name) + def lookupInEmpty(name: Name): Symbol = lookupInOwner(rootMirror.EmptyPackageClass, name) + def lookupInQualifier(qual: Tree, name: Name): Symbol = ( + if (name == nme.ERROR || qual.tpe.widen.isErroneous) + NoSymbol + else lookupInOwner(qual.tpe.typeSymbol, name) orElse { + NotAMemberError(tree, qual, name) + NoSymbol + } + ) def typedAnnotated(atd: Annotated): Tree = { val ann = atd.annot -- cgit v1.2.3 From 6e4e851cc9d28a197ba6afc2fd4098f7eca9aee8 Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Tue, 23 Oct 2012 14:46:02 -0700 Subject: Simplifying Typer. Apply convenience methods to strip away complications. --- .../scala/tools/nsc/typechecker/Typers.scala | 286 ++++++++------------- 1 file changed, 111 insertions(+), 175 deletions(-) (limited to 'src') diff --git a/src/compiler/scala/tools/nsc/typechecker/Typers.scala b/src/compiler/scala/tools/nsc/typechecker/Typers.scala index d7607ae383..64ee0b0e4b 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Typers.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Typers.scala @@ -577,7 +577,7 @@ trait Typers extends Modes with Adaptations with Tags { * @return modified tree and new prefix type */ private def makeAccessible(tree: Tree, sym: Symbol, pre: Type, site: Tree): (Tree, Type) = - if (isInPackageObject(sym, pre.typeSymbol)) { + if (context.isInPackageObject(sym, pre.typeSymbol)) { if (pre.typeSymbol == ScalaPackageClass && sym.isTerm) { // short cut some aliases. It seems pattern matching needs this // to notice exhaustiveness and to generate good code when @@ -610,34 +610,6 @@ trait Typers extends Modes with Adaptations with Tags { (checkAccessible(tree, sym, pre, site), pre) } - /** Is `sym` defined in package object of package `pkg`? - * Since sym may be defined in some parent of the package object, - * we cannot inspect its owner only; we have to go through the - * info of the package object. However to avoid cycles we'll check - * what other ways we can before pushing that way. - */ - private def isInPackageObject(sym: Symbol, pkg: Symbol) = { - val pkgClass = if (pkg.isTerm) pkg.moduleClass else pkg - def matchesInfo = ( - pkg.isInitialized && { - // need to be careful here to not get a cyclic reference during bootstrap - val module = pkg.info member nme.PACKAGEkw - module.isInitialized && (module.info.member(sym.name).alternatives contains sym) - } - ) - def isInPkgObj(sym: Symbol) = ( - !sym.isPackage - && !sym.owner.isPackageClass - && (sym.owner ne NoSymbol) - && (sym.owner.owner == pkgClass || matchesInfo) - ) - - pkgClass.isPackageClass && ( - if (sym.isOverloaded) sym.alternatives forall isInPkgObj - else isInPkgObj(sym) - ) - } - /** Post-process an identifier or selection node, performing the following: * 1. Check that non-function pattern expressions are stable * 2. Check that packages and static modules are not used as values @@ -652,7 +624,7 @@ trait Typers extends Modes with Adaptations with Tags { def fail() = NotAValueError(tree, sym) if (tree.isErrorTyped) tree - else if ((mode & (PATTERNmode | FUNmode)) == PATTERNmode && tree.isTerm) { // (1) + else if (inPatternNotFunMode(mode) && tree.isTerm) { // (1) if (sym.isValue) { val tree1 = checkStable(tree) // A module reference in a pattern has type Foo.type, not "object Foo" @@ -867,26 +839,25 @@ trait Typers extends Modes with Adaptations with Tags { // avoid throwing spurious DivergentImplicit errors if (context.hasErrors) - return setError(tree) - - withCondConstrTyper(treeInfo.isSelfOrSuperConstrCall(tree)){ typer1 => - if (original != EmptyTree && pt != WildcardType) - typer1.silent(tpr => { - val withImplicitArgs = tpr.applyImplicitArgs(tree) - if (tpr.context.hasErrors) tree // silent will wrap it in SilentTypeError anyway - else tpr.typed(withImplicitArgs, mode, pt) - }) match { - case SilentResultValue(result) => - result - case _ => + setError(tree) + else + withCondConstrTyper(treeInfo.isSelfOrSuperConstrCall(tree))(typer1 => + if (original != EmptyTree && pt != WildcardType) ( + typer1 silent { tpr => + val withImplicitArgs = tpr.applyImplicitArgs(tree) + if (tpr.context.hasErrors) tree // silent will wrap it in SilentTypeError anyway + else tpr.typed(withImplicitArgs, mode, pt) + } + orElse { _ => debuglog("fallback on implicits: " + tree + "/" + resetAllAttrs(original)) val tree1 = typed(resetAllAttrs(original), mode, WildcardType) tree1.tpe = addAnnotations(tree1, tree1.tpe) if (tree1.isEmpty) tree1 else adapt(tree1, mode, pt, EmptyTree) - } - else - typer1.typed(typer1.applyImplicitArgs(tree), mode, pt) - } + } + ) + else + typer1.typed(typer1.applyImplicitArgs(tree), mode, pt) + ) } def instantiateToMethodType(mt: MethodType): Tree = { @@ -1139,7 +1110,7 @@ trait Typers extends Modes with Adaptations with Tags { inExprModeButNot(mode, FUNmode) && !tree.isDef && // typechecking application tree.symbol != null && tree.symbol.isTermMacro) // of a macro macroExpand(this, tree, mode, pt) - else if ((mode & (PATTERNmode | FUNmode)) == (PATTERNmode | FUNmode)) + else if (inAllModes(mode, PATTERNmode | FUNmode)) adaptConstrPattern() else if (inAllModes(mode, EXPRmode | FUNmode) && !tree.tpe.isInstanceOf[MethodType] && @@ -1286,12 +1257,10 @@ trait Typers extends Modes with Adaptations with Tags { */ def instantiateExpectingUnit(tree: Tree, mode: Int): Tree = { val savedUndetparams = context.undetparams - silent(_.instantiate(tree, mode, UnitClass.tpe)) match { - case SilentResultValue(t) => t - case _ => - context.undetparams = savedUndetparams - val valueDiscard = atPos(tree.pos)(Block(List(instantiate(tree, mode, WildcardType)), Literal(Constant()))) - typed(valueDiscard, mode, UnitClass.tpe) + silent(_.instantiate(tree, mode, UnitClass.tpe)) orElse { _ => + context.undetparams = savedUndetparams + val valueDiscard = atPos(tree.pos)(Block(List(instantiate(tree, mode, WildcardType)), Literal(Constant()))) + typed(valueDiscard, mode, UnitClass.tpe) } } @@ -1348,16 +1317,12 @@ trait Typers extends Modes with Adaptations with Tags { def doAdapt(restpe: Type) = //util.trace("adaptToArgs "+qual+", name = "+name+", argtpes = "+(args map (_.tpe))+", pt = "+pt+" = ") adaptToMember(qual, HasMethodMatching(name, args map (_.tpe), restpe), reportAmbiguous, saveErrors) - if (pt != WildcardType) { - silent(_ => doAdapt(pt)) match { - case SilentResultValue(result) if result != qual => - result - case _ => - debuglog("fallback on implicits in adaptToArguments: "+qual+" . "+name) - doAdapt(WildcardType) - } - } else + + if (pt == WildcardType) doAdapt(pt) + else silent(_ => doAdapt(pt)) filter (_ != qual) orElse (_ => + logResult(s"fallback on implicits in adaptToArguments: $qual.$name")(doAdapt(WildcardType)) + ) } /** Try to apply an implicit conversion to `qual` so that it contains @@ -1365,27 +1330,24 @@ trait Typers extends Modes with Adaptations with Tags { * account using `adaptToArguments`. */ def adaptToMemberWithArgs(tree: Tree, qual: Tree, name: Name, mode: Int, reportAmbiguous: Boolean, saveErrors: Boolean): Tree = { - def onError(reportError: => Tree): Tree = { - context.tree match { - case Apply(tree1, args) if (tree1 eq tree) && args.nonEmpty => - silent(_.typedArgs(args, mode)) match { - case SilentResultValue(xs) => - val args = xs.asInstanceOf[List[Tree]] - if (args exists (_.isErrorTyped)) - reportError - else - adaptToArguments(qual, name, args, WildcardType, reportAmbiguous, saveErrors) - case _ => - reportError - } - case _ => - reportError - } - } - silent(_.adaptToMember(qual, HasMember(name), false)) match { - case SilentResultValue(res) => res - case SilentTypeError(err) => onError({if (reportAmbiguous) { context.issue(err) }; setError(tree)}) + def onError(reportError: => Tree): Tree = context.tree match { + case Apply(tree1, args) if (tree1 eq tree) && args.nonEmpty => + ( silent (_.typedArgs(args, mode)) + map (_.asInstanceOf[List[Tree]]) + filter (xs => !(xs exists (_.isErrorTyped))) + map (xs => adaptToArguments(qual, name, xs, WildcardType, reportAmbiguous, saveErrors)) + orElse ( _ => reportError) + ) + case _ => + reportError } + + silent(_.adaptToMember(qual, HasMember(name), false)) orElse (err => + onError { + if (reportAmbiguous) context issue err + setError(tree) + } + ) } /** Try to apply an implicit conversion to `qual` to that it contains a @@ -2107,18 +2069,14 @@ trait Typers extends Modes with Adaptations with Tags { val enclClass = context.enclClass.owner def defineAlias(name: Name) = if (context.scope.lookup(name) == NoSymbol) { - lookupVariable(name.toString.substring(1), enclClass) match { - case Some(repl) => - silent(_.typedTypeConstructor(stringParser(repl).typ())) match { - case SilentResultValue(tpt) => - val alias = enclClass.newAliasType(name.toTypeName, useCase.pos) - val tparams = cloneSymbolsAtOwner(tpt.tpe.typeSymbol.typeParams, alias) - val newInfo = genPolyType(tparams, appliedType(tpt.tpe, tparams map (_.tpe))) - alias setInfo newInfo - context.scope.enter(alias) - case _ => - } - case _ => + lookupVariable(name.toString.substring(1), enclClass) foreach { repl => + silent(_.typedTypeConstructor(stringParser(repl).typ())) map { tpt => + val alias = enclClass.newAliasType(name.toTypeName, useCase.pos) + val tparams = cloneSymbolsAtOwner(tpt.tpe.typeSymbol.typeParams, alias) + val newInfo = genPolyType(tparams, appliedType(tpt.tpe, tparams map (_.tpe))) + alias setInfo newInfo + context.scope.enter(alias) + } } } for (tree <- trees; t <- tree) @@ -2464,10 +2422,7 @@ trait Typers extends Modes with Adaptations with Tags { // TODO: add fallback __match sentinel to predef val matchStrategy: Tree = if (!(newPatternMatching && settings.Xexperimental.value && context.isNameInScope(vpmName._match))) null // fast path, avoiding the next line if there's no __match to be seen - else newTyper(context.makeImplicit(reportAmbiguousErrors = false)).silent(_.typed(Ident(vpmName._match), EXPRmode, WildcardType), reportAmbiguousErrors = false) match { - case SilentResultValue(ms) => ms - case _ => null - } + else newTyper(context.makeImplicit(reportAmbiguousErrors = false)).silent(_.typed(Ident(vpmName._match), EXPRmode, WildcardType), reportAmbiguousErrors = false) orElse (_ => null) if (matchStrategy ne null) // virtualize typed((new PureMatchTranslator(this.asInstanceOf[patmat.global.analyzer.Typer] /*TODO*/, matchStrategy)).translateMatch(match_), mode, pt) @@ -2664,15 +2619,13 @@ trait Typers extends Modes with Adaptations with Tags { else { fun match { case etaExpansion(vparams, fn, args) => - silent(_.typed(fn, forFunMode(mode), pt)) match { - case SilentResultValue(fn1) if context.undetparams.isEmpty => - // if context,undetparams is not empty, the function was polymorphic, - // so we need the missing arguments to infer its type. See #871 - //println("typing eta "+fun+":"+fn1.tpe+"/"+context.undetparams) - val ftpe = normalize(fn1.tpe) baseType FunctionClass(numVparams) - if (isFunctionType(ftpe) && isFullyDefined(ftpe)) - return typedFunction(fun, mode, ftpe) - case _ => + silent(_.typed(fn, forFunMode(mode), pt)) filter (_ => context.undetparams.isEmpty) map { fn1 => + // if context,undetparams is not empty, the function was polymorphic, + // so we need the missing arguments to infer its type. See #871 + //println("typing eta "+fun+":"+fn1.tpe+"/"+context.undetparams) + val ftpe = normalize(fn1.tpe) baseType FunctionClass(numVparams) + if (isFunctionType(ftpe) && isFullyDefined(ftpe)) + return typedFunction(fun, mode, ftpe) } case _ => } @@ -3039,14 +2992,13 @@ trait Typers extends Modes with Adaptations with Tags { * to that. This is the last thing which is tried (after * default arguments) */ - def tryTupleApply: Option[Tree] = { + def tryTupleApply: Option[Tree] = ( if (eligibleForTupleConversion(paramTypes, argslen) && !phase.erasedTypes) { val tupleArgs = List(atPos(tree.pos.makeTransparent)(gen.mkTuple(args))) // expected one argument, but got 0 or >1 ==> try applying to tuple // the inner "doTypedApply" does "extractUndetparams" => restore when it fails val savedUndetparams = context.undetparams - silent(_.doTypedApply(tree, fun, tupleArgs, mode, pt)) match { - case SilentResultValue(t) => + silent(_.doTypedApply(tree, fun, tupleArgs, mode, pt)) map { t => // Depending on user options, may warn or error here if // a Unit or tuple was inserted. Some(t) filter (tupledTree => @@ -3054,12 +3006,10 @@ trait Typers extends Modes with Adaptations with Tags { || tupledTree.symbol == null || checkValidAdaptation(tupledTree, args) ) - case _ => - context.undetparams = savedUndetparams - None - } - } else None - } + } orElse { _ => context.undetparams = savedUndetparams ; None } + } + else None + ) /** Treats an application which uses named or default arguments. * Also works if names + a vararg used: when names are used, the vararg @@ -3951,12 +3901,8 @@ trait Typers extends Modes with Adaptations with Tags { } } - def wrapErrors(tree: Tree, typeTree: Typer => Tree): Tree = { - silent(typeTree) match { - case SilentResultValue(r) => r - case SilentTypeError(err) => DynamicRewriteError(tree, err) - } - } + def wrapErrors(tree: Tree, typeTree: Typer => Tree): Tree = + silent(typeTree) orElse (err => DynamicRewriteError(tree, err)) } final def deindentTyping() = context.typingIndentLevel -= 2 @@ -4418,12 +4364,7 @@ trait Typers extends Modes with Adaptations with Tags { setError(treeCopy.Apply(tree, fun, args)) } - silent(_.doTypedApply(tree, fun, args, mode, pt)) match { - case SilentResultValue(t) => - t - case SilentTypeError(err) => - onError(err) - } + silent(_.doTypedApply(tree, fun, args, mode, pt)) orElse onError } def normalTypedApply(tree: Tree, fun: Tree, args: List[Tree]) = { @@ -4673,45 +4614,43 @@ trait Typers extends Modes with Adaptations with Tags { if (!reallyExists(sym)) { def handleMissing: Tree = { - if (context.owner.enclosingTopLevelClass.isJavaDefined && name.isTypeName) { - val tree1 = atPos(tree.pos) { gen.convertToSelectFromType(qual, name) } - if (tree1 != EmptyTree) return typed1(tree1, mode, pt) - } - - // try to expand according to Dynamic rules. - asDynamicCall foreach (x => return x) - - debuglog( - "qual = " + qual + ":" + qual.tpe + - "\nSymbol=" + qual.tpe.termSymbol + "\nsymbol-info = " + qual.tpe.termSymbol.info + - "\nscope-id = " + qual.tpe.termSymbol.info.decls.hashCode() + "\nmembers = " + qual.tpe.members + - "\nname = " + name + "\nfound = " + sym + "\nowner = " + context.enclClass.owner) - - def makeInteractiveErrorTree = { - val tree1 = tree match { - case Select(_, _) => treeCopy.Select(tree, qual, name) - case SelectFromTypeTree(_, _) => treeCopy.SelectFromTypeTree(tree, qual, name) - } - setError(tree1) + def errorTree = tree match { + case _ if !forInteractive => tree + case Select(_, _) => treeCopy.Select(tree, qual, name) + case SelectFromTypeTree(_, _) => treeCopy.SelectFromTypeTree(tree, qual, name) } - - if (name == nme.ERROR && forInteractive) - return makeInteractiveErrorTree - - if (!qual.tpe.widen.isErroneous) { - if ((mode & QUALmode) != 0) { - val lastTry = rootMirror.missingHook(qual.tpe.typeSymbol, name) - if (lastTry != NoSymbol) return typed1(tree setSymbol lastTry, mode, pt) + def asTypeSelection = ( + if (context.owner.enclosingTopLevelClass.isJavaDefined && name.isTypeName) { + atPos(tree.pos)(gen.convertToSelectFromType(qual, name)) match { + case EmptyTree => None + case tree1 => Some(typed1(tree1, mode, pt)) + } } - NotAMemberError(tree, qual, name) - } - - if (forInteractive) makeInteractiveErrorTree else setError(tree) + else None + ) + debuglog(s""" + |qual=$qual:${qual.tpe} + |symbol=${qual.tpe.termSymbol.defString} + |scope-id=${qual.tpe.termSymbol.info.decls.hashCode} + |members=${qual.tpe.members mkString ", "} + |name=$name + |found=$sym + |owner=${context.enclClass.owner} + """.stripMargin) + + // 1) Try converting a term selection on a java class into a type selection. + // 2) Try expanding according to Dynamic rules. + // 3) Try looking up the name in the qualifier. + asTypeSelection orElse asDynamicCall getOrElse (lookupInQualifier(qual, name) match { + case NoSymbol => setError(errorTree) + case found => typed1(tree setSymbol found, mode, pt) + }) } handleMissing - } else { + } + else { val tree1 = tree match { - case Select(_, _) => treeCopy.Select(tree, qual, name) + case Select(_, _) => treeCopy.Select(tree, qual, name) case SelectFromTypeTree(_, _) => treeCopy.SelectFromTypeTree(tree, qual, name) } val (result, accessibleError) = silent(_.makeAccessible(tree1, sym, qual.tpe, qual)) match { @@ -4780,19 +4719,16 @@ trait Typers extends Modes with Adaptations with Tags { val tree1 = // temporarily use `filter` and an alternative for `withFilter` if (name == nme.withFilter) - silent(_ => typedSelect(tree, qual1, name)) match { - case SilentResultValue(result) => - result - case _ => - silent(_ => typed1(Select(qual1, nme.filter) setPos tree.pos, mode, pt)) match { - case SilentResultValue(result2) => - unit.deprecationWarning( - tree.pos, "`withFilter' method does not yet exist on " + qual1.tpe.widen + - ", using `filter' method instead") - result2 - case SilentTypeError(err) => - WithFilterError(tree, err) - } + silent(_ => typedSelect(tree, qual1, name)) orElse { _ => + silent(_ => typed1(Select(qual1, nme.filter) setPos tree.pos, mode, pt)) match { + case SilentResultValue(result2) => + unit.deprecationWarning( + tree.pos, "`withFilter' method does not yet exist on " + qual1.tpe.widen + + ", using `filter' method instead") + result2 + case SilentTypeError(err) => + WithFilterError(tree, err) + } } else typedSelect(tree, qual1, name) -- cgit v1.2.3 From f5c336d5660495f7083d7c7b91e48b9621cddbb2 Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Tue, 23 Oct 2012 14:46:53 -0700 Subject: Switch typedIdent to use Context's lookupSymbol. This completes the transition. Typer's bevy of special cases to influence symbol lookup are encoded in its local "qualifies" method, which it passes to lookupSymbol. This allows access to be done correctly without infecting Typer with such pedestrian concerns. --- .../scala/tools/nsc/typechecker/Typers.scala | 281 ++++----------------- 1 file changed, 49 insertions(+), 232 deletions(-) (limited to 'src') diff --git a/src/compiler/scala/tools/nsc/typechecker/Typers.scala b/src/compiler/scala/tools/nsc/typechecker/Typers.scala index 64ee0b0e4b..e45df55ca7 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Typers.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Typers.scala @@ -4743,6 +4743,18 @@ trait Typers extends Modes with Adaptations with Tags { } } + /** A symbol qualifies if: + * - it exists + * - it is not stale (stale symbols are made to disappear here) + * - if we are in a pattern constructor, method definitions do not qualify + * unless they are stable. Otherwise, 'case x :: xs' would find the :: method. + */ + def qualifies(sym: Symbol) = ( + sym.hasRawInfo + && reallyExists(sym) + && !(inPatternConstructor && sym.isMethod && !sym.isStable) + ) + /** Attribute an identifier consisting of a simple name or an outer reference. * * @param tree The tree representing the identifier. @@ -4751,244 +4763,49 @@ trait Typers extends Modes with Adaptations with Tags { * (2) Change imported symbols to selections */ def typedIdent(tree: Tree, name: Name): Tree = { - var errorContainer: AbsTypeError = null - def ambiguousError(msg: String) = { - assert(errorContainer == null, "Cannot set ambiguous error twice for identifier") - errorContainer = AmbiguousIdentError(tree, name, msg) - } - def identError(tree: AbsTypeError) = { - assert(errorContainer == null, "Cannot set ambiguous error twice for identifier") - errorContainer = tree - } + def emptyPackageOk = settings.exposeEmptyPackage.value // setting to enable unqualified idents in empty package - var defSym: Symbol = tree.symbol // the directly found symbol - var pre: Type = NoPrefix // the prefix type of defSym, if a class member - var qual: Tree = EmptyTree // the qualifier tree if transformed tree is a select - var inaccessibleSym: Symbol = NoSymbol // the first symbol that was found but that was discarded - // for being inaccessible; used for error reporting - var inaccessibleExplanation: String = "" - - // If a special setting is given, the empty package will be checked as a - // last ditch effort before failing. This method sets defSym and returns - // true if a member of the given name exists. - def checkEmptyPackage(): Boolean = { - defSym = rootMirror.EmptyPackageClass.tpe.nonPrivateMember(name) - defSym != NoSymbol + def issue(err: AbsTypeError) = { + // Avoiding some spurious error messages: see SI-2388. + val suppress = reporter.hasErrors && (name startsWith tpnme.ANON_CLASS_NAME) + if (!suppress) + ErrorUtils.issueTypeError(err) + + setError(tree) } - def correctForPackageObject(sym: Symbol): Symbol = { - if (sym.isTerm && isInPackageObject(sym, pre.typeSymbol)) { - val sym1 = pre member sym.name - if ((sym1 eq NoSymbol) || (sym eq sym1)) sym else { - qual = gen.mkAttributedQualifier(pre) - log(s""" - | !!! Overloaded package object member resolved incorrectly. - | prefix: $pre - | Discarded: ${sym.defString} - | Using: ${sym1.defString} - """.stripMargin) - sym1 - } - } - else sym + // ignore current variable scope in patterns to enforce linearity + val startContext = if (inNoModes(mode, PATTERNmode | TYPEPATmode)) context else context.outer + val nameLookup = tree.symbol match { + case NoSymbol => startContext.lookupSymbol(name, qualifies) + case sym => LookupSucceeded(EmptyTree, sym) } - def startingIdentContext = ( - // ignore current variable scope in patterns to enforce linearity - if ((mode & (PATTERNmode | TYPEPATmode)) == 0) context - else context.outer - ) - // A symbol qualifies if it exists and is not stale. Stale symbols - // are made to disappear here. In addition, - // if we are in a constructor of a pattern, we ignore all definitions - // which are methods (note: if we don't do that - // case x :: xs in class List would return the :: method) - // unless they are stable or are accessors (the latter exception is for better error messages). - def qualifies(sym: Symbol): Boolean = ( - sym.hasRawInfo // this condition avoids crashing on self-referential pattern variables - && reallyExists(sym) - && ((mode & PATTERNmode | FUNmode) != (PATTERNmode | FUNmode) || !sym.isSourceMethod || sym.hasFlag(ACCESSOR)) + val defSym = ( + nameLookup.symbol + orElse ( if (emptyPackageOk) lookupInEmpty(name) else NoSymbol ) + orElse (lookupInRoot(name) andAlso (sym => return typed1(tree setSymbol sym, mode, pt))) + orElse (context.owner newErrorSymbol name) ) - - if (defSym == NoSymbol) { - var defEntry: ScopeEntry = null // the scope entry of defSym, if defined in a local scope - - var cx = startingIdentContext - while (defSym == NoSymbol && cx != NoContext && (cx.scope ne null)) { // cx.scope eq null arises during FixInvalidSyms in Duplicators - pre = cx.enclClass.prefix - // !!! FIXME. This call to lookupEntry is at the root of all the - // bad behavior with overloading in package objects. lookupEntry - // just takes the first symbol it finds in scope, ignoring the rest. - // When a selection on a package object arrives here, the first - // overload is always chosen. "correctForPackageObject" exists to - // undo that decision. Obviously it would be better not to do it in - // the first place; however other things seem to be tied to obtaining - // that ScopeEntry, specifically calculating the nesting depth. - defEntry = cx.scope.lookupEntry(name) - if ((defEntry ne null) && qualifies(defEntry.sym)) - defSym = correctForPackageObject(defEntry.sym) - else { - cx = cx.enclClass - val foundSym = pre.member(name) filter qualifies - defSym = foundSym filter (context.isAccessible(_, pre, false)) - if (defSym == NoSymbol) { - if ((foundSym ne NoSymbol) && (inaccessibleSym eq NoSymbol)) { - inaccessibleSym = foundSym - inaccessibleExplanation = analyzer.lastAccessCheckDetails - } - cx = cx.outer - } - } - } - - val symDepth = if (defEntry eq null) cx.depth - else cx.depth - (cx.scope.nestingLevel - defEntry.owner.nestingLevel) - var impSym: Symbol = NoSymbol // the imported symbol - var imports = context.imports // impSym != NoSymbol => it is imported from imports.head - while (!reallyExists(impSym) && !imports.isEmpty && imports.head.depth > symDepth) { - impSym = imports.head.importedSymbol(name) - if (!impSym.exists) imports = imports.tail - } - - // detect ambiguous definition/import, - // update `defSym` to be the final resolved symbol, - // update `pre` to be `sym`s prefix type in case it is an imported member, - // and compute value of: - - if (defSym.exists && impSym.exists) { - // imported symbols take precedence over package-owned symbols in different - // compilation units. Defined symbols take precedence over erroneous imports. - if (defSym.isDefinedInPackage && - (!currentRun.compiles(defSym) || - context.unit.exists && defSym.sourceFile != context.unit.source.file)) - defSym = NoSymbol - else if (impSym.isError || impSym.name == nme.CONSTRUCTOR) - impSym = NoSymbol - } - if (defSym.exists) { - if (impSym.exists) - ambiguousError( - "it is both defined in "+defSym.owner + - " and imported subsequently by \n"+imports.head) - else if (!defSym.owner.isClass || defSym.owner.isPackageClass || defSym.isTypeParameterOrSkolem) - pre = NoPrefix - else - qual = atPos(tree.pos.focusStart)(gen.mkAttributedQualifier(pre)) - } else { - if (impSym.exists) { - var impSym1: Symbol = NoSymbol - var imports1 = imports.tail - - /** It's possible that seemingly conflicting identifiers are - * identifiably the same after type normalization. In such cases, - * allow compilation to proceed. A typical example is: - * package object foo { type InputStream = java.io.InputStream } - * import foo._, java.io._ - */ - def ambiguousImport() = { - // The types of the qualifiers from which the ambiguous imports come. - // If the ambiguous name is a value, these must be the same. - def t1 = imports.head.qual.tpe - def t2 = imports1.head.qual.tpe - // The types of the ambiguous symbols, seen as members of their qualifiers. - // If the ambiguous name is a monomorphic type, we can relax this far. - def mt1 = t1 memberType impSym - def mt2 = t2 memberType impSym1 - def characterize = List( - s"types: $t1 =:= $t2 ${t1 =:= t2} members: ${mt1 =:= mt2}", - s"member type 1: $mt1", - s"member type 2: $mt2", - s"$impSym == $impSym1 ${impSym == impSym1}", - s"${impSym.debugLocationString} ${impSym.getClass}", - s"${impSym1.debugLocationString} ${impSym1.getClass}" - ).mkString("\n ") - - // The symbol names are checked rather than the symbols themselves because - // each time an overloaded member is looked up it receives a new symbol. - // So foo.member("x") != foo.member("x") if x is overloaded. This seems - // likely to be the cause of other bugs too... - if (t1 =:= t2 && impSym.name == impSym1.name) - log(s"Suppressing ambiguous import: $t1 =:= $t2 && $impSym == $impSym1") - // Monomorphism restriction on types is in part because type aliases could have the - // same target type but attach different variance to the parameters. Maybe it can be - // relaxed, but doesn't seem worth it at present. - else if (mt1 =:= mt2 && name.isTypeName && impSym.isMonomorphicType && impSym1.isMonomorphicType) - log(s"Suppressing ambiguous import: $mt1 =:= $mt2 && $impSym and $impSym1 are equivalent") - else { - log(s"Import is genuinely ambiguous:\n " + characterize) - ambiguousError(s"it is imported twice in the same scope by\n${imports.head}\nand ${imports1.head}") - } - } - while (errorContainer == null && !imports1.isEmpty && - (!imports.head.isExplicitImport(name) || - imports1.head.depth == imports.head.depth)) { - impSym1 = imports1.head.importedSymbol(name) - if (reallyExists(impSym1)) { - if (imports1.head.isExplicitImport(name)) { - if (imports.head.isExplicitImport(name) || - imports1.head.depth != imports.head.depth) ambiguousImport() - impSym = impSym1 - imports = imports1 - } else if (!imports.head.isExplicitImport(name) && - imports1.head.depth == imports.head.depth) ambiguousImport() - } - imports1 = imports1.tail - } - defSym = impSym - val qual0 = imports.head.qual - if (!(shortenImports && qual0.symbol.isPackage)) // optimization: don't write out package prefixes - qual = atPos(tree.pos.focusStart)(resetPos(qual0.duplicate)) - pre = qual.tpe - } - else if (settings.exposeEmptyPackage.value && checkEmptyPackage()) - log("Allowing empty package member " + name + " due to settings.") + import InferErrorGen._ + nameLookup match { + case LookupAmbiguous(msg) => issue(AmbiguousIdentError(tree, name, msg)) + case LookupInaccessible(sym, msg) => issue(AccessError(tree, sym, context, msg)) + case LookupNotFound() => issue(SymbolNotFoundError(tree, name, context.owner, startContext)) + case LookupSucceeded(qual, sym) => + // this -> Foo.this + if (sym.isThisSym) + typed1(This(sym.owner) setPos tree.pos, mode, pt) + // Inferring classOf type parameter from expected type. Otherwise an + // actual call to the stubbed classOf method is generated, returning null. + else if (isPredefMemberNamed(sym, nme.classOf) && pt.typeSymbol == ClassClass && pt.typeArgs.nonEmpty) + typedClassOf(tree, TypeTree(pt.typeArgs.head)) else { - if ((mode & QUALmode) != 0) { - val lastTry = rootMirror.missingHook(rootMirror.RootClass, name) - if (lastTry != NoSymbol) return typed1(tree setSymbol lastTry, mode, pt) - } - if (settings.debug.value) { - log(context.imports)//debug - } - if (inaccessibleSym eq NoSymbol) { - // Avoiding some spurious error messages: see SI-2388. - if (reporter.hasErrors && (name startsWith tpnme.ANON_CLASS_NAME)) () - else identError(SymbolNotFoundError(tree, name, context.owner, startingIdentContext)) - } else - identError(InferErrorGen.AccessError( - tree, inaccessibleSym, context.enclClass.owner.thisType, context.enclClass.owner, - inaccessibleExplanation - )) - defSym = context.owner.newErrorSymbol(name) + val pre1 = if (sym.owner.isPackageClass) sym.owner.thisType else if (qual == EmptyTree) NoPrefix else qual.tpe + val tree1 = if (qual == EmptyTree) tree else atPos(tree.pos)(Select(atPos(tree.pos.focusStart)(qual), name)) + val (tree2, pre2) = makeAccessible(tree1, sym, pre1, qual) + // SI-5967 Important to replace param type A* with Seq[A] when seen from from a reference, to avoid + // inference errors in pattern matching. + stabilize(tree2, pre2, mode, pt) modifyType dropIllegalStarTypes } - } - } - if (errorContainer != null) { - ErrorUtils.issueTypeError(errorContainer) - setError(tree) - } else { - if (defSym.owner.isPackageClass) - pre = defSym.owner.thisType - - // Inferring classOf type parameter from expected type. - if (defSym.isThisSym) { - typed1(This(defSym.owner) setPos tree.pos, mode, pt) - } - // Inferring classOf type parameter from expected type. Otherwise an - // actual call to the stubbed classOf method is generated, returning null. - else if (isPredefMemberNamed(defSym, nme.classOf) && pt.typeSymbol == ClassClass && pt.typeArgs.nonEmpty) - typedClassOf(tree, TypeTree(pt.typeArgs.head)) - else { - val tree1 = ( - if (qual == EmptyTree) tree - // atPos necessary because qualifier might come from startContext - else atPos(tree.pos)(Select(qual, name)) - ) - val (tree2, pre2) = makeAccessible(tree1, defSym, pre, qual) - // assert(pre.typeArgs isEmpty) // no need to add #2416-style check here, right? - val tree3 = stabilize(tree2, pre2, mode, pt) - // SI-5967 Important to replace param type A* with Seq[A] when seen from from a reference, to avoid - // inference errors in pattern matching. - tree3 setType dropIllegalStarTypes(tree3.tpe) - } } } -- cgit v1.2.3 From 9cbbb1ccb50fc0a6c423321494ea360f25736e28 Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Tue, 23 Oct 2012 15:02:51 -0700 Subject: Adding some comments and clearer naming. --- src/compiler/scala/tools/nsc/typechecker/Contexts.scala | 9 ++++++--- 1 file changed, 6 insertions(+), 3 deletions(-) (limited to 'src') diff --git a/src/compiler/scala/tools/nsc/typechecker/Contexts.scala b/src/compiler/scala/tools/nsc/typechecker/Contexts.scala index ad79468b0c..a79fec42bf 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Contexts.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Contexts.scala @@ -740,6 +740,9 @@ trait Contexts { self: Analyzer => ) } + /** The symbol with name `name` imported via the import in `imp`, + * if any such symbol is accessible from this context. + */ def importedAccessibleSymbol(imp: ImportInfo, name: Name) = { imp importedSymbol name filter (s => isAccessible(s, imp.qual.tpe, superAccess = false)) } @@ -759,7 +762,7 @@ trait Contexts { self: Analyzer => module.isInitialized && (module.info.member(sym.name).alternatives contains sym) } ) - def isInPkgObj(sym: Symbol) = ( + def inPackageObject(sym: Symbol) = ( !sym.isPackage && !sym.owner.isPackageClass && (sym.owner ne NoSymbol) @@ -767,8 +770,8 @@ trait Contexts { self: Analyzer => ) pkgClass.isPackageClass && ( - if (sym.isOverloaded) sym.alternatives forall isInPkgObj - else isInPkgObj(sym) + if (sym.isOverloaded) sym.alternatives forall inPackageObject + else inPackageObject(sym) ) } -- cgit v1.2.3 From 578c4c6c64250a12a0e625d1e54a74dbdad6d972 Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Tue, 23 Oct 2012 16:43:25 -0700 Subject: Make LookupNotFound a case object. Turns out putting a group of case classes at what feels like the top level might not be top-level enough, like if your "top" is Analyzer and you wind up with different outer pointers in every instance of Typer. Moved the whole bundle to SymbolTable. --- src/compiler/scala/tools/nsc/typechecker/Contexts.scala | 13 +------------ src/compiler/scala/tools/nsc/typechecker/Typers.scala | 2 +- src/reflect/scala/reflect/internal/Scopes.scala | 8 ++++++++ 3 files changed, 10 insertions(+), 13 deletions(-) (limited to 'src') diff --git a/src/compiler/scala/tools/nsc/typechecker/Contexts.scala b/src/compiler/scala/tools/nsc/typechecker/Contexts.scala index a79fec42bf..482fb2231b 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Contexts.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Contexts.scala @@ -35,17 +35,6 @@ trait Contexts { self: Analyzer => val completeList = JavaLangPackage :: ScalaPackage :: PredefModule :: Nil } - sealed abstract class NameLookup { def symbol: Symbol } - case class LookupSucceeded(qualifier: Tree, symbol: Symbol) extends NameLookup - case class LookupAmbiguous(msg: String) extends NameLookup { def symbol = NoSymbol } - case class LookupInaccessible(symbol: Symbol, msg: String) extends NameLookup - case class LookupNotFound() extends NameLookup { def symbol = NoSymbol } - // case object LookupNotFound extends NameLookup { def symbol = NoSymbol } - // - // !!! Bug - case object LookupNotFound does not match - we get an - // "impossible" MatchError. case class LookupNotFound() matches in - // the same spot. - def ambiguousImports(imp1: ImportInfo, imp2: ImportInfo) = LookupAmbiguous(s"it is imported twice in the same scope by\n$imp1\nand $imp2") def ambiguousDefnAndImport(owner: Symbol, imp: ImportInfo) = @@ -795,7 +784,7 @@ trait Contexts { self: Analyzer => if (lookupError ne null) lookupError else sym match { case NoSymbol if inaccessible ne null => inaccessible - case NoSymbol => LookupNotFound() + case NoSymbol => LookupNotFound case _ => LookupSucceeded(qual, sym) } ) diff --git a/src/compiler/scala/tools/nsc/typechecker/Typers.scala b/src/compiler/scala/tools/nsc/typechecker/Typers.scala index e45df55ca7..884e4b3d9c 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Typers.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Typers.scala @@ -4789,7 +4789,7 @@ trait Typers extends Modes with Adaptations with Tags { nameLookup match { case LookupAmbiguous(msg) => issue(AmbiguousIdentError(tree, name, msg)) case LookupInaccessible(sym, msg) => issue(AccessError(tree, sym, context, msg)) - case LookupNotFound() => issue(SymbolNotFoundError(tree, name, context.owner, startContext)) + case LookupNotFound => issue(SymbolNotFoundError(tree, name, context.owner, startContext)) case LookupSucceeded(qual, sym) => // this -> Foo.this if (sym.isThisSym) diff --git a/src/reflect/scala/reflect/internal/Scopes.scala b/src/reflect/scala/reflect/internal/Scopes.scala index 89332d0ae5..a4b541e34d 100644 --- a/src/reflect/scala/reflect/internal/Scopes.scala +++ b/src/reflect/scala/reflect/internal/Scopes.scala @@ -8,6 +8,14 @@ package internal trait Scopes extends api.Scopes { self: SymbolTable => + /** An ADT to represent the results of symbol name lookups. + */ + sealed trait NameLookup { def symbol: Symbol } + case class LookupSucceeded(qualifier: Tree, symbol: Symbol) extends NameLookup + case class LookupAmbiguous(msg: String) extends NameLookup { def symbol = NoSymbol } + case class LookupInaccessible(symbol: Symbol, msg: String) extends NameLookup + case object LookupNotFound extends NameLookup { def symbol = NoSymbol } + class ScopeEntry(val sym: Symbol, val owner: Scope) { /** the next entry in the hash bucket */ -- cgit v1.2.3 From 1841114e6cbb1ab4e3ad6abcd17b0bc9ebef0481 Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Mon, 29 Oct 2012 12:36:33 -0700 Subject: An option for real repl output. There have been many requests to expose the products of the repl in some way outside in-memory. This does that. scala # usual in-memory behavior scala -Yrepl-outdir "" # make up a temp dir scala -Yrepl-outdir /foo/bar # use /foo/bar --- .../scala/tools/nsc/interpreter/IMain.scala | 41 +++++++---------- .../scala/tools/nsc/interpreter/ReplDir.scala | 48 ++++++++++++++++++++ .../scala/tools/nsc/settings/ScalaSettings.scala | 1 + test/files/run/repl-out-dir.check | 53 ++++++++++++++++++++++ test/files/run/repl-out-dir.scala | 13 ++++++ test/files/run/t6223.check | 2 +- test/files/run/t6223.scala | 2 +- 7 files changed, 133 insertions(+), 27 deletions(-) create mode 100644 src/compiler/scala/tools/nsc/interpreter/ReplDir.scala create mode 100644 test/files/run/repl-out-dir.check create mode 100644 test/files/run/repl-out-dir.scala (limited to 'src') diff --git a/src/compiler/scala/tools/nsc/interpreter/IMain.scala b/src/compiler/scala/tools/nsc/interpreter/IMain.scala index 92c2fc9768..4e702a09e6 100644 --- a/src/compiler/scala/tools/nsc/interpreter/IMain.scala +++ b/src/compiler/scala/tools/nsc/interpreter/IMain.scala @@ -30,18 +30,6 @@ import scala.reflect.runtime.{ universe => ru } import scala.reflect.{ ClassTag, classTag } import scala.tools.reflect.StdRuntimeTags._ -/** directory to save .class files to */ -private class ReplVirtualDirectory(out: JPrintWriter) extends VirtualDirectory("(memory)", None) { - private def pp(root: AbstractFile, indentLevel: Int) { - val spaces = " " * indentLevel - out.println(spaces + root.name) - if (root.isDirectory) - root.toList sortBy (_.name) foreach (x => pp(x, indentLevel + 1)) - } - // print the contents hierarchically - def show() = pp(this, 0) -} - /** An interpreter for Scala code. * * The main public entry points are compile(), interpret(), and bind(). @@ -77,16 +65,19 @@ private class ReplVirtualDirectory(out: JPrintWriter) extends VirtualDirectory(" class IMain(initialSettings: Settings, protected val out: JPrintWriter) extends Imports { imain => - /** Leading with the eagerly evaluated. - */ - val virtualDirectory: VirtualDirectory = new ReplVirtualDirectory(out) // "directory" for classfiles - private var currentSettings: Settings = initialSettings - private[nsc] var printResults = true // whether to print result lines - private[nsc] var totalSilence = false // whether to print anything - private var _initializeComplete = false // compiler is initialized - private var _isInitialized: Future[Boolean] = null // set up initialization future - private var bindExceptions = true // whether to bind the lastException variable - private var _executionWrapper = "" // code to be wrapped around all lines + object replOutput extends ReplOutput(settings.Yreploutdir) { } + + @deprecated("Use replOutput.dir instead", "2.11.0") + def virtualDirectory = replOutput.dir + def showDirectory = replOutput.show(out) + + private var currentSettings: Settings = initialSettings + private[nsc] var printResults = true // whether to print result lines + private[nsc] var totalSilence = false // whether to print anything + private var _initializeComplete = false // compiler is initialized + private var _isInitialized: Future[Boolean] = null // set up initialization future + private var bindExceptions = true // whether to bind the lastException variable + private var _executionWrapper = "" // code to be wrapped around all lines /** We're going to go to some trouble to initialize the compiler asynchronously. * It's critical that nothing call into it until it's been initialized or we will @@ -258,7 +249,7 @@ class IMain(initialSettings: Settings, protected val out: JPrintWriter) extends /** Instantiate a compiler. Overridable. */ protected def newCompiler(settings: Settings, reporter: Reporter): ReplGlobal = { - settings.outputDirs setSingleOutput virtualDirectory + settings.outputDirs setSingleOutput replOutput.dir settings.exposeEmptyPackage.value = true if (settings.Yrangepos.value) new Global(settings, reporter) with ReplGlobal with interactive.RangePositions @@ -296,7 +287,7 @@ class IMain(initialSettings: Settings, protected val out: JPrintWriter) extends ensureClassLoader() _classLoader } - private class TranslatingClassLoader(parent: ClassLoader) extends AbstractFileClassLoader(virtualDirectory, parent) { + private class TranslatingClassLoader(parent: ClassLoader) extends AbstractFileClassLoader(replOutput.dir, parent) { /** Overridden here to try translating a simple name to the generated * class name if the original attempt fails. This method is used by * getResourceAsStream as well as findClass. @@ -670,7 +661,7 @@ class IMain(initialSettings: Settings, protected val out: JPrintWriter) extends prevRequests.clear() referencedNameMap.clear() definedNameMap.clear() - virtualDirectory.clear() + replOutput.dir.clear() } /** This instance is no longer needed, so release any resources diff --git a/src/compiler/scala/tools/nsc/interpreter/ReplDir.scala b/src/compiler/scala/tools/nsc/interpreter/ReplDir.scala new file mode 100644 index 0000000000..9fbf64acb5 --- /dev/null +++ b/src/compiler/scala/tools/nsc/interpreter/ReplDir.scala @@ -0,0 +1,48 @@ +/* NSC -- new Scala compiler + * Copyright 2005-2012 LAMP/EPFL + * @author Paul Phillips + */ + +package scala.tools.nsc +package interpreter + +import io.VirtualDirectory +import settings.MutableSettings +import scala.reflect.io.{ AbstractFile, PlainDirectory, Directory } +import scala.collection.generic.Clearable + +/** Directory to save .class files to. */ +trait ReplDir extends AbstractFile with Clearable { } + +private class ReplVirtualDir() extends VirtualDirectory("(memory)", None) with ReplDir { } +private class ReplRealDir(dir: Directory) extends PlainDirectory(dir) with ReplDir { + def clear() = { + dir.deleteRecursively() + dir.createDirectory() + } +} + +class ReplOutput(val dirSetting: MutableSettings#StringSetting) { + // outdir for generated classfiles - may be in-memory (the default), + // a generated temporary directory, or a specified outdir. + val dir: ReplDir = ( + if (dirSetting.isDefault) + new ReplVirtualDir() + else if (dirSetting.value == "") + new ReplRealDir(Directory.makeTemp("repl")) + else + new ReplRealDir(Directory(dirSetting.value)) + ) + + // print the contents hierarchically + def show(out: JPrintWriter) = { + def pp(root: AbstractFile, indentLevel: Int) { + val label = root.name + val spaces = " " * indentLevel + out.println(spaces + label) + if (root.isDirectory) + root.toList sortBy (_.name) foreach (x => pp(x, indentLevel + 1)) + } + pp(dir, 0) + } +} diff --git a/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala b/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala index 404f5e6b6e..80336d9fa3 100644 --- a/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala +++ b/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala @@ -170,6 +170,7 @@ trait ScalaSettings extends AbsScalaSettings val Ybuilderdebug = ChoiceSetting ("-Ybuilder-debug", "manager", "Compile using the specified build manager.", List("none", "refined", "simple"), "none") val Yreifycopypaste = BooleanSetting ("-Yreify-copypaste", "Dump the reified trees in copypasteable representation.") val Yreplsync = BooleanSetting ("-Yrepl-sync", "Do not use asynchronous code for repl startup") + val Yreploutdir = StringSetting ("-Yrepl-outdir", "path", "Write repl-generated classfiles to given output directory (use \"\" to generate a temporary dir)" , "") val Ynotnull = BooleanSetting ("-Ynotnull", "Enable (experimental and incomplete) scala.NotNull.") val YmethodInfer = BooleanSetting ("-Yinfer-argument-types", "Infer types for arguments of overriden methods.") val etaExpandKeepsStar = BooleanSetting ("-Yeta-expand-keeps-star", "Eta-expand varargs methods to T* rather than Seq[T]. This is a temporary option to ease transition.") diff --git a/test/files/run/repl-out-dir.check b/test/files/run/repl-out-dir.check new file mode 100644 index 0000000000..a96f9ba9d9 --- /dev/null +++ b/test/files/run/repl-out-dir.check @@ -0,0 +1,53 @@ +Type in expressions to have them evaluated. +Type :help for more information. + +scala> + +scala> case class Bippy(x: Int) +defined class Bippy + +scala> val x = Bippy(1) +x: Bippy = Bippy(1) + +scala> $intp.showDirectory +repl-out-dir-run.obj + $line1 + $eval$.class + $eval.class + $line2 + $eval$.class + $eval.class + $read$$iw$$iw$.class + $read$$iw$.class + $read$.class + $read.class + $line3 + $eval$.class + $eval.class + $read$$iw$$iw$.class + $read$$iw$$iw$Bippy$.class + $read$$iw$$iw$Bippy.class + $read$$iw$.class + $read$.class + $read.class + $line4 + $eval$.class + $eval.class + $read$$iw$$iw$.class + $read$$iw$.class + $read$.class + $read.class + $line5 + $eval$.class + $eval.class + $read$$iw$$iw$.class + $read$$iw$.class + $read$.class + $read.class + $repl_$init.class + Test$.class + Test.class + +scala> + +scala> diff --git a/test/files/run/repl-out-dir.scala b/test/files/run/repl-out-dir.scala new file mode 100644 index 0000000000..33c823aa2d --- /dev/null +++ b/test/files/run/repl-out-dir.scala @@ -0,0 +1,13 @@ +import scala.tools.partest.ReplTest +import scala.tools.nsc.Settings + +object Test extends ReplTest { + override def extraSettings = s"-Yrepl-outdir ${testOutput.path}" + + def code = s""" +case class Bippy(x: Int) +val x = Bippy(1) +$$intp.showDirectory + """ + +} diff --git a/test/files/run/t6223.check b/test/files/run/t6223.check index 90ec019407..4a09d1930f 100644 --- a/test/files/run/t6223.check +++ b/test/files/run/t6223.check @@ -1,4 +1,4 @@ bar -bar$mcI$sp bar$mIc$sp bar$mIcI$sp +bar$mcI$sp diff --git a/test/files/run/t6223.scala b/test/files/run/t6223.scala index 4ab7c832e6..fb176e32e6 100644 --- a/test/files/run/t6223.scala +++ b/test/files/run/t6223.scala @@ -5,7 +5,7 @@ class Foo[@specialized(Int) A](a:A) { object Test { def main(args:Array[String]) { val f = new Foo(333) - val ms = f.getClass().getDeclaredMethods() + val ms = f.getClass().getDeclaredMethods().sortBy(_.getName) ms.foreach(m => println(m.getName)) } } -- cgit v1.2.3 From 1e1199d8abbd81ab2fa3b9cbab0290d6793e0945 Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Wed, 31 Oct 2012 08:26:18 -0700 Subject: Fix for -Xcheckinit failures. The GenASM phase had an eager val which required loading a bunch of symbols, which meant an earlier call to isPastTyper could lead to a cycle of the form: new Run new GenASM rootMirror.getRequiredClass findMember defineBaseClassesOfCompoundType isPastTyper currentRun.typerPhase and the opening "new Run" hasn't yet caught up to where currentRun.typerPhase is set. This was remedied by making the eager val lazy, and substantially hardened against recurrence via a method on global "isGlobalInitialized" which verifies that both the definitions object and the root mirror have completed their init methods. --- src/compiler/scala/tools/nsc/Global.scala | 10 +++++++--- src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala | 4 +++- src/reflect/scala/reflect/internal/Mirrors.scala | 5 +++++ src/reflect/scala/reflect/internal/Types.scala | 2 +- 4 files changed, 16 insertions(+), 5 deletions(-) (limited to 'src') diff --git a/src/compiler/scala/tools/nsc/Global.scala b/src/compiler/scala/tools/nsc/Global.scala index 40a14aec6f..5b5cffa885 100644 --- a/src/compiler/scala/tools/nsc/Global.scala +++ b/src/compiler/scala/tools/nsc/Global.scala @@ -1046,9 +1046,13 @@ class Global(var currentSettings: Settings, var reporter: Reporter) def currentUnit: CompilationUnit = if (currentRun eq null) NoCompilationUnit else currentRun.currentUnit def currentSource: SourceFile = if (currentUnit.exists) currentUnit.source else lastSeenSourceFile + def isGlobalInitialized = ( + definitions.isDefinitionsInitialized + && rootMirror.isMirrorInitialized + ) override def isPastTyper = ( (curRun ne null) - && (currentRun.typerPhase ne null) + && isGlobalInitialized // defense against init order issues && (globalPhase.id > currentRun.typerPhase.id) ) @@ -1525,9 +1529,9 @@ class Global(var currentSettings: Settings, var reporter: Reporter) def compileUnits(units: List[CompilationUnit], fromPhase: Phase) { try compileUnitsInternal(units, fromPhase) catch { case ex: Throwable => - val shown = if (settings.verbose.value) + val shown = if (settings.verbose.value) stackTraceString(ex) - else + else ex.getClass.getName // ex.printStackTrace(Console.out) // DEBUG for fsc, note that error stacktraces do not print in fsc globalError(supplementErrorMessage("uncaught exception during compilation: " + shown)) diff --git a/src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala b/src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala index 34d46e27fe..f4921e79e5 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala @@ -53,7 +53,9 @@ abstract class GenASM extends SubComponent with BytecodeWriters { override def erasedTypes = true def apply(cls: IClass) = sys.error("no implementation") - val BeanInfoAttr = rootMirror.getRequiredClass("scala.beans.BeanInfo") + // Lazy val; can't have eager vals in Phase constructors which may + // cause cycles before Global has finished initialization. + lazy val BeanInfoAttr = rootMirror.getRequiredClass("scala.beans.BeanInfo") def isJavaEntryPoint(icls: IClass) = { val sym = icls.symbol diff --git a/src/reflect/scala/reflect/internal/Mirrors.scala b/src/reflect/scala/reflect/internal/Mirrors.scala index 019cf7f908..d16374476a 100644 --- a/src/reflect/scala/reflect/internal/Mirrors.scala +++ b/src/reflect/scala/reflect/internal/Mirrors.scala @@ -20,6 +20,8 @@ trait Mirrors extends api.Mirrors { trait RootSymbol extends Symbol { def mirror: Mirror } abstract class RootsBase(rootOwner: Symbol) extends scala.reflect.api.Mirror[Mirrors.this.type] { thisMirror => + private[this] var initialized = false + def isMirrorInitialized = initialized protected[scala] def rootLoader: LazyType @@ -229,6 +231,7 @@ trait Mirrors extends api.Mirrors { // } def init() { + if (initialized) return // Still fiddling with whether it's cleaner to do some of this setup here // or from constructors. The latter approach tends to invite init order issues. @@ -240,6 +243,8 @@ trait Mirrors extends api.Mirrors { RootClass.info.decls enter EmptyPackage RootClass.info.decls enter RootPackage + + initialized = true } } diff --git a/src/reflect/scala/reflect/internal/Types.scala b/src/reflect/scala/reflect/internal/Types.scala index 92db92d5f3..b6c67ad63e 100644 --- a/src/reflect/scala/reflect/internal/Types.scala +++ b/src/reflect/scala/reflect/internal/Types.scala @@ -1728,7 +1728,7 @@ trait Types extends api.Types { self: SymbolTable => protected def defineBaseClassesOfCompoundType(tpe: CompoundType) { def define = defineBaseClassesOfCompoundType(tpe, force = false) - if (isPastTyper || !breakCycles) define + if (!breakCycles || isPastTyper) define else tpe match { // non-empty parents helpfully excludes all package classes case tpe @ ClassInfoType(_ :: _, _, clazz) if !clazz.isAnonOrRefinementClass => -- cgit v1.2.3 From c15171dd280bede6e7c3814d055017965c490e76 Mon Sep 17 00:00:00 2001 From: Eugene Burmako Date: Thu, 25 Oct 2012 15:06:10 +0300 Subject: silences optional logs in reflection Some parts of the compiler call `SymbolTable.log` to dump information about the progress of compilation, type inference, etc. In Global.scala, `log` checks the -Ylog configuration setting to approve or reject incoming messages. However in runtime reflection (scala.reflect.runtime.JavaUniverse) `log` always prints its argument, which is annoying. Here's why this is happening. In runtime reflection -Ylog is inapplicable, because this is a phase-based setting, whereas reflection does not have a notion of phases. Moreover reflection doesn't expose `settings` to the programmers (the corresponding `def settings` definition is declared in scala.reflect.internal.Required). Therefore there's no obvious solution to conditional printing in `log` when invoked in reflective setting. The situation is tough and needs to be addressed in a principled manner. However we're in between RC1 and RC2 at the moment, so I don't fancy significant changes right now. Nevertheless we need to fix the annoyance right away, therefore I change reflective `log` to use -Ydebug. This is inconsistent w.r.t how Global works, and also there's no way to enable logging short of casting to `scala.reflect.runtime.SymbolTable`, but it looks like a decent temporary measure. --- src/reflect/scala/reflect/runtime/JavaUniverse.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) (limited to 'src') diff --git a/src/reflect/scala/reflect/runtime/JavaUniverse.scala b/src/reflect/scala/reflect/runtime/JavaUniverse.scala index e18435d5b0..1b69ca4e89 100644 --- a/src/reflect/scala/reflect/runtime/JavaUniverse.scala +++ b/src/reflect/scala/reflect/runtime/JavaUniverse.scala @@ -17,7 +17,7 @@ class JavaUniverse extends internal.SymbolTable with ReflectSetup with runtime.S def forInteractive = false def forScaladoc = false - def log(msg: => AnyRef): Unit = println(" [] "+msg) + def log(msg: => AnyRef): Unit = if (settings.debug.value) println(" [] "+msg) type TreeCopier = InternalTreeCopierOps def newStrictTreeCopier: TreeCopier = new StrictTreeCopier -- cgit v1.2.3 From 98e3e4774a4e7ab1f65a1173e5c73caa8f427314 Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Wed, 31 Oct 2012 13:15:43 -0700 Subject: Minor tweaks to logging approach. --- src/reflect/scala/reflect/runtime/JavaUniverse.scala | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) (limited to 'src') diff --git a/src/reflect/scala/reflect/runtime/JavaUniverse.scala b/src/reflect/scala/reflect/runtime/JavaUniverse.scala index 1b69ca4e89..0f70a676fa 100644 --- a/src/reflect/scala/reflect/runtime/JavaUniverse.scala +++ b/src/reflect/scala/reflect/runtime/JavaUniverse.scala @@ -13,11 +13,12 @@ class JavaUniverse extends internal.SymbolTable with ReflectSetup with runtime.S def picklerPhase = SomePhase - lazy val settings = new Settings def forInteractive = false def forScaladoc = false + lazy val settings = new Settings + private val isLogging = sys.props contains "scala.debug.reflect" - def log(msg: => AnyRef): Unit = if (settings.debug.value) println(" [] "+msg) + def log(msg: => AnyRef): Unit = if (isLogging) Console.err.println("[reflect] " + msg) type TreeCopier = InternalTreeCopierOps def newStrictTreeCopier: TreeCopier = new StrictTreeCopier -- cgit v1.2.3 From 8d962ed4ddd310cc784121c426a2e3f56a112540 Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Wed, 31 Oct 2012 10:58:45 -0700 Subject: Fix performance bug in GenASM. It was going through missingHook looking for android classes every time something was compiled, which means four failing missingHook calls for every line in the repl. --- .../scala/tools/nsc/backend/jvm/GenASM.scala | 20 ++++++++++---------- src/reflect/scala/reflect/internal/Mirrors.scala | 4 +++- 2 files changed, 13 insertions(+), 11 deletions(-) (limited to 'src') diff --git a/src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala b/src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala index f4921e79e5..18222794a8 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala @@ -46,6 +46,16 @@ abstract class GenASM extends SubComponent with BytecodeWriters { private def getFile(sym: Symbol, clsName: String, suffix: String): AbstractFile = getFile(outputDirectory(sym), clsName, suffix) + /** From the reference documentation of the Android SDK: + * The `Parcelable` interface identifies classes whose instances can be written to and restored from a `Parcel`. + * Classes implementing the `Parcelable` interface must also have a static field called `CREATOR`, + * which is an object implementing the `Parcelable.Creator` interface. + */ + private val androidFieldName = newTermName("CREATOR") + + private lazy val AndroidParcelableInterface = rootMirror.getClassIfDefined("android.os.Parcelable") + private lazy val AndroidCreatorClass = rootMirror.getClassIfDefined("android.os.Parcelable$Creator") + /** JVM code generation phase */ class AsmPhase(prev: Phase) extends ICodePhase(prev) { @@ -1202,16 +1212,6 @@ abstract class GenASM extends SubComponent with BytecodeWriters { trait JAndroidBuilder { self: JPlainBuilder => - /** From the reference documentation of the Android SDK: - * The `Parcelable` interface identifies classes whose instances can be written to and restored from a `Parcel`. - * Classes implementing the `Parcelable` interface must also have a static field called `CREATOR`, - * which is an object implementing the `Parcelable.Creator` interface. - */ - private val androidFieldName = newTermName("CREATOR") - - private lazy val AndroidParcelableInterface = rootMirror.getClassIfDefined("android.os.Parcelable") - private lazy val AndroidCreatorClass = rootMirror.getClassIfDefined("android.os.Parcelable$Creator") - def isAndroidParcelableClass(sym: Symbol) = (AndroidParcelableInterface != NoSymbol) && (sym.parentSymbols contains AndroidParcelableInterface) diff --git a/src/reflect/scala/reflect/internal/Mirrors.scala b/src/reflect/scala/reflect/internal/Mirrors.scala index d16374476a..ff58a31d20 100644 --- a/src/reflect/scala/reflect/internal/Mirrors.scala +++ b/src/reflect/scala/reflect/internal/Mirrors.scala @@ -79,7 +79,9 @@ trait Mirrors extends api.Mirrors { protected def universeMissingHook(owner: Symbol, name: Name): Symbol = thisUniverse.missingHook(owner, name) - private[scala] def missingHook(owner: Symbol, name: Name): Symbol = mirrorMissingHook(owner, name) orElse universeMissingHook(owner, name) + private[scala] def missingHook(owner: Symbol, name: Name): Symbol = logResult(s"missingHook($owner, $name)")( + mirrorMissingHook(owner, name) orElse universeMissingHook(owner, name) + ) // todo: get rid of most the methods here and keep just staticClass/Module/Package -- cgit v1.2.3 From 187c61a0e49c8f880a0599d64955e47e167579dc Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Wed, 31 Oct 2012 14:08:29 -0700 Subject: Fix for SI-6597, implicit case class crasher. It seems to me like every call to scope.lookup in the compiler is a latent bug. If a symbol is overloaded, you get one at random. (See the FIXME comment in f5c336d5660 for more on this.) --- src/compiler/scala/tools/nsc/typechecker/Namers.scala | 2 +- test/files/neg/t6597.check | 4 ++++ test/files/neg/t6597.scala | 5 +++++ 3 files changed, 10 insertions(+), 1 deletion(-) create mode 100644 test/files/neg/t6597.check create mode 100644 test/files/neg/t6597.scala (limited to 'src') diff --git a/src/compiler/scala/tools/nsc/typechecker/Namers.scala b/src/compiler/scala/tools/nsc/typechecker/Namers.scala index 2c4034db84..99b927af66 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Namers.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Namers.scala @@ -414,7 +414,7 @@ trait Namers extends MethodSynthesis { * a module definition or a class definition. */ def enterModuleSymbol(tree : ModuleDef): Symbol = { - var m: Symbol = context.scope.lookup(tree.name) + var m: Symbol = context.scope lookupAll tree.name find (_.isModule) getOrElse NoSymbol val moduleFlags = tree.mods.flags | MODULE if (m.isModule && !m.isPackage && inCurrentScope(m) && (currentRun.canRedefine(m) || m.isSynthetic)) { updatePosFlags(m, tree.pos, moduleFlags) diff --git a/test/files/neg/t6597.check b/test/files/neg/t6597.check new file mode 100644 index 0000000000..1d52519d1d --- /dev/null +++ b/test/files/neg/t6597.check @@ -0,0 +1,4 @@ +t6597.scala:3: error: illegal combination of modifiers: implicit and case for: class Quux + implicit case class Quux(value: Int) extends AnyVal with T + ^ +one error found diff --git a/test/files/neg/t6597.scala b/test/files/neg/t6597.scala new file mode 100644 index 0000000000..dde53bcc89 --- /dev/null +++ b/test/files/neg/t6597.scala @@ -0,0 +1,5 @@ +object Test { + trait T extends Any + implicit case class Quux(value: Int) extends AnyVal with T + object Quux +} -- cgit v1.2.3 From 8a537b7d7da03833946a6a2f4461da2080363c88 Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Mon, 29 Oct 2012 17:17:43 -0700 Subject: Fix SI-6584, Stream#distinct uses too much memory. Nesting recursive calls in Stream is always a dicey business. --- src/library/scala/collection/immutable/Stream.scala | 13 ++++++++++--- test/files/run/t6584.check | 8 ++++++++ test/files/run/t6584.scala | 16 ++++++++++++++++ 3 files changed, 34 insertions(+), 3 deletions(-) create mode 100644 test/files/run/t6584.check create mode 100644 test/files/run/t6584.scala (limited to 'src') diff --git a/src/library/scala/collection/immutable/Stream.scala b/src/library/scala/collection/immutable/Stream.scala index 461a375317..78c4d76eda 100644 --- a/src/library/scala/collection/immutable/Stream.scala +++ b/src/library/scala/collection/immutable/Stream.scala @@ -841,9 +841,16 @@ self => * // produces: "1, 2, 3, 4, 5, 6" * }}} */ - override def distinct: Stream[A] = - if (isEmpty) this - else cons(head, tail.filter(head != _).distinct) + override def distinct: Stream[A] = { + // This should use max memory proportional to N, whereas + // recursively calling distinct on the tail is N^2. + def loop(seen: Set[A], rest: Stream[A]): Stream[A] = { + if (rest.isEmpty) rest + else if (seen(rest.head)) loop(seen, rest.tail) + else cons(rest.head, loop(seen + rest.head, rest.tail)) + } + loop(Set(), this) + } /** Returns a new sequence of given length containing the elements of this * sequence followed by zero or more occurrences of given elements. diff --git a/test/files/run/t6584.check b/test/files/run/t6584.check new file mode 100644 index 0000000000..35c8688751 --- /dev/null +++ b/test/files/run/t6584.check @@ -0,0 +1,8 @@ +Array: 102400 +Vector: 102400 +List: 102400 +Stream: 102400 +Array: 102400 +Vector: 102400 +List: 102400 +Stream: 102400 diff --git a/test/files/run/t6584.scala b/test/files/run/t6584.scala new file mode 100644 index 0000000000..24c236ef35 --- /dev/null +++ b/test/files/run/t6584.scala @@ -0,0 +1,16 @@ +object Test { + def main(args: Array[String]): Unit = { + val size = 100 * 1024 + val doubled = (1 to size) ++ (1 to size) + + println("Array: " + Array.tabulate(size)(x => x).distinct.size) + println("Vector: " + Vector.tabulate(size)(x => x).distinct.size) + println("List: " + List.tabulate(size)(x => x).distinct.size) + println("Stream: " + Stream.tabulate(size)(x => x).distinct.size) + + println("Array: " + doubled.toArray.distinct.size) + println("Vector: " + doubled.toVector.distinct.size) + println("List: " + doubled.toList.distinct.size) + println("Stream: " + doubled.toStream.distinct.size) + } +} -- cgit v1.2.3 From 4e4060f4faee791759417f1a598322e90623464d Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Mon, 29 Oct 2012 20:20:35 -0700 Subject: Implementation of Stream#dropRight. "There's nothing we can do about dropRight," you say? Oh but there is. --- .../scala/collection/immutable/Stream.scala | 32 ++++++++++++++++++---- test/files/run/streams.check | 1 + test/files/run/streams.scala | 5 +++- 3 files changed, 31 insertions(+), 7 deletions(-) (limited to 'src') diff --git a/src/library/scala/collection/immutable/Stream.scala b/src/library/scala/collection/immutable/Stream.scala index 78c4d76eda..5566806c55 100644 --- a/src/library/scala/collection/immutable/Stream.scala +++ b/src/library/scala/collection/immutable/Stream.scala @@ -181,6 +181,7 @@ import scala.language.implicitConversions * @define coll stream * @define orderDependent * @define orderDependentFold + * @define willTerminateInf Note: lazily evaluated; will terminate for infinite-sized collections. */ abstract class Stream[+A] extends AbstractSeq[A] with LinearSeq[A] @@ -286,9 +287,8 @@ self => len } - /** It's an imperfect world, but at least we can bottle up the - * imperfection in a capsule. - */ + // It's an imperfect world, but at least we can bottle up the + // imperfection in a capsule. @inline private def asThat[That](x: AnyRef): That = x.asInstanceOf[That] @inline private def asStream[B](x: AnyRef): Stream[B] = x.asInstanceOf[Stream[B]] @inline private def isStreamBuilder[B, That](bf: CanBuildFrom[Stream[A], B, That]) = @@ -725,10 +725,15 @@ self => * // produces: "5, 6, 7, 8, 9" * }}} */ - override def take(n: Int): Stream[A] = + override def take(n: Int): Stream[A] = ( + // Note that the n == 1 condition appears redundant but is not. + // It prevents "tail" from being referenced (and its head being evaluated) + // when obtaining the last element of the result. Such are the challenges + // of working with a lazy-but-not-really sequence. if (n <= 0 || isEmpty) Stream.empty else if (n == 1) cons(head, Stream.empty) else cons(head, tail take n-1) + ) @tailrec final override def drop(n: Int): Stream[A] = if (n <= 0 || isEmpty) this @@ -784,8 +789,23 @@ self => these } - // there's nothing we can do about dropRight, so we just keep the definition - // in LinearSeq + /** + * @inheritdoc + * $willTerminateInf + */ + override def dropRight(n: Int): Stream[A] = { + // We make dropRight work for possibly infinite streams by carrying + // a buffer of the dropped size. As long as the buffer is full and the + // rest is non-empty, we can feed elements off the buffer head. When + // the rest becomes empty, the full buffer is the dropped elements. + def advance(stub0: List[A], stub1: List[A], rest: Stream[A]): Stream[A] = { + if (rest.isEmpty) Stream.empty + else if (stub0.isEmpty) advance(stub1.reverse, Nil, rest) + else cons(stub0.head, advance(stub0.tail, rest.head :: stub1, rest.tail)) + } + if (n <= 0) this + else advance((this take n).toList, Nil, this drop n) + } /** Returns the longest prefix of this `Stream` whose elements satisfy the * predicate `p`. diff --git a/test/files/run/streams.check b/test/files/run/streams.check index 7f894052d9..032057d4a1 100644 --- a/test/files/run/streams.check +++ b/test/files/run/streams.check @@ -23,3 +23,4 @@ Stream(100001, ?) true true 705082704 +6 diff --git a/test/files/run/streams.scala b/test/files/run/streams.scala index 51b4e5d76c..dc5d0204ac 100644 --- a/test/files/run/streams.scala +++ b/test/files/run/streams.scala @@ -29,7 +29,7 @@ object Test extends App { def powers(x: Int) = if ((x&(x-1)) == 0) Some(x) else None println(s3.flatMap(powers).reverse.head) - // large enough to generate StackOverflows (on most systems) + // large enough to generate StackOverflows (on most systems) // unless the following methods are tail call optimized. val size = 100000 @@ -43,4 +43,7 @@ object Test extends App { println(Stream.from(1).take(size).foldLeft(0)(_ + _)) val arr = new Array[Int](size) Stream.from(1).take(size).copyToArray(arr, 0) + + // dropRight terminates + println(Stream from 1 dropRight 1000 take 3 sum) } -- cgit v1.2.3 From d0c4be6861109683d80513eda74e5c6ca88f1441 Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Tue, 30 Oct 2012 14:29:14 -0700 Subject: Warn about unused private members. Warnings enabled via -Xlint. It's one of the most requested features. And it is hard to argue we don't need it: see the 99 methods removed in the next commit. This should close SI-440. --- .../scala/tools/nsc/typechecker/Analyzer.scala | 9 ++-- .../tools/nsc/typechecker/TypeDiagnostics.scala | 57 +++++++++++++++++++++ test/files/neg/warn-unused-privates.check | 30 +++++++++++ test/files/neg/warn-unused-privates.flags | 1 + test/files/neg/warn-unused-privates.scala | 58 ++++++++++++++++++++++ test/files/pos/t5809.scala | 3 +- 6 files changed, 154 insertions(+), 4 deletions(-) create mode 100644 test/files/neg/warn-unused-privates.check create mode 100644 test/files/neg/warn-unused-privates.flags create mode 100644 test/files/neg/warn-unused-privates.scala (limited to 'src') diff --git a/src/compiler/scala/tools/nsc/typechecker/Analyzer.scala b/src/compiler/scala/tools/nsc/typechecker/Analyzer.scala index 3526d932d3..9a6b5c45c4 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Analyzer.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Analyzer.scala @@ -95,14 +95,17 @@ trait Analyzer extends AnyRef } def apply(unit: CompilationUnit) { try { - unit.body = newTyper(rootContext(unit)).typed(unit.body) + val typer = newTyper(rootContext(unit)) + unit.body = typer.typed(unit.body) if (global.settings.Yrangepos.value && !global.reporter.hasErrors) global.validatePositions(unit.body) for (workItem <- unit.toCheck) workItem() - } finally { + if (settings.lint.value) + typer checkUnused unit + } + finally { unit.toCheck.clear() } } } } } - diff --git a/src/compiler/scala/tools/nsc/typechecker/TypeDiagnostics.scala b/src/compiler/scala/tools/nsc/typechecker/TypeDiagnostics.scala index e5c0f5767c..283d0fa440 100644 --- a/src/compiler/scala/tools/nsc/typechecker/TypeDiagnostics.scala +++ b/src/compiler/scala/tools/nsc/typechecker/TypeDiagnostics.scala @@ -426,6 +426,63 @@ trait TypeDiagnostics { def permanentlyHiddenWarning(pos: Position, hidden: Name, defn: Symbol) = contextWarning(pos, "imported `%s' is permanently hidden by definition of %s".format(hidden, defn.fullLocationString)) + object checkUnused { + val ignoreNames = Set[TermName]("readResolve", "readObject", "writeObject", "writeReplace") + + class UnusedPrivates extends Traverser { + val defnTrees = ListBuffer[MemberDef]() + val targets = mutable.Set[Symbol]() + def qualifies(sym: Symbol) = ( + (sym ne null) + && (sym.isMethod || sym.isPrivateLocal && !nme.isLocalName(sym.name)) + && !sym.isParameter + && !sym.isParamAccessor // could improve this, but it's a pain + ) + + override def traverse(t: Tree): Unit = { + t match { + case t: ValOrDefDef if qualifies(t.symbol) => defnTrees += t + case t: RefTree if t.symbol ne null => targets += t.symbol + case _ => + } + super.traverse(t) + } + def isUnused(m: Symbol): Boolean = ( + m.isPrivate + && !targets(m) + && !ignoreNames(m.name) // serialization methods + && !isConstantType(m.info.resultType) // subject to constant inlining + ) + def unused = defnTrees.toList filter (t => isUnused(t.symbol)) + } + + def apply(unit: CompilationUnit) = { + val p = new UnusedPrivates + p traverse unit.body + p.unused foreach { defn: DefTree => + val sym = defn.symbol + val isDefaultGetter = sym.name containsName nme.DEFAULT_GETTER_STRING + val pos = ( + if (defn.pos.isDefined) defn.pos + else if (sym.pos.isDefined) sym.pos + else sym match { + case sym: TermSymbol => sym.referenced.pos + case _ => NoPosition + } + ) + val what = ( + if (isDefaultGetter) "default argument" + else if (sym.isConstructor) "constructor" + else if (sym.isSetter) "setter" + else if (sym.isGetter) "getter" + else if (sym.isMethod) "method" + else "member" + ) + unit.warning(pos, s"private $what in ${sym.owner} is never used") + } + } + } + object checkDead { private var expr: Symbol = NoSymbol diff --git a/test/files/neg/warn-unused-privates.check b/test/files/neg/warn-unused-privates.check new file mode 100644 index 0000000000..c37e01106c --- /dev/null +++ b/test/files/neg/warn-unused-privates.check @@ -0,0 +1,30 @@ +warn-unused-privates.scala:2: warning: private constructor in class Bippy is never used + private def this(c: Int) = this(c, c) // warn + ^ +warn-unused-privates.scala:4: warning: private method in class Bippy is never used + private def boop(x: Int) = x+a+b // warn + ^ +warn-unused-privates.scala:6: warning: private getter in class Bippy is never used + final private val MILLIS2: Int = 1000 // warn + ^ +warn-unused-privates.scala:13: warning: private getter in object Bippy is never used + private val HEY_INSTANCE: Int = 1000 // warn + ^ +warn-unused-privates.scala:41: warning: private getter in trait Accessors is never used + private var v1: Int = 0 // warn + ^ +warn-unused-privates.scala:42: warning: private setter in trait Accessors is never used + private var v2: Int = 0 // warn, never set + ^ +warn-unused-privates.scala:43: warning: private getter in trait Accessors is never used + private var v3: Int = 0 // warn, never got + ^ +warn-unused-privates.scala:55: warning: private default argument in trait DefaultArgs is never used + private def bippy(x1: Int, x2: Int = 10, x3: Int = 15): Int = x1 + x2 + x3 + ^ +warn-unused-privates.scala:55: warning: private default argument in trait DefaultArgs is never used + private def bippy(x1: Int, x2: Int = 10, x3: Int = 15): Int = x1 + x2 + x3 + ^ +error: No warnings can be incurred under -Xfatal-warnings. +9 warnings found +one error found diff --git a/test/files/neg/warn-unused-privates.flags b/test/files/neg/warn-unused-privates.flags new file mode 100644 index 0000000000..7949c2afa2 --- /dev/null +++ b/test/files/neg/warn-unused-privates.flags @@ -0,0 +1 @@ +-Xlint -Xfatal-warnings diff --git a/test/files/neg/warn-unused-privates.scala b/test/files/neg/warn-unused-privates.scala new file mode 100644 index 0000000000..1ac272357f --- /dev/null +++ b/test/files/neg/warn-unused-privates.scala @@ -0,0 +1,58 @@ +class Bippy(a: Int, b: Int) { + private def this(c: Int) = this(c, c) // warn + private def bippy(x: Int): Int = bippy(x) // TODO: could warn + private def boop(x: Int) = x+a+b // warn + final private val MILLIS1 = 2000 // no warn, might have been inlined + final private val MILLIS2: Int = 1000 // warn + final private val HI_COMPANION: Int = 500 // no warn, accessed from companion + def hi() = Bippy.HI_INSTANCE +} +object Bippy { + def hi(x: Bippy) = x.HI_COMPANION + private val HI_INSTANCE: Int = 500 // no warn, accessed from instance + private val HEY_INSTANCE: Int = 1000 // warn +} + +class A(val msg: String) +class B1(msg: String) extends A(msg) +class B2(msg0: String) extends A(msg0) +class B3(msg0: String) extends A("msg") + +/*** Early defs full of noise due to SI-6595. ***/ +/*** +class Boppy extends { + private val hmm: String = "abc" // no warn, used in early defs + private val hom: String = "def" // no warn, used in body + private final val him = "ghi" // no warn, might have been (was) inlined + final val him2 = "ghi" // no warn, same + final val himinline = him + private val hum: String = "jkl" // warn + final val ding = hmm.length +} with Mutable { + val dinger = hom + private val hummer = "def" // warn + + private final val bum = "ghi" // no warn, might have been (was) inlined + final val bum2 = "ghi" // no warn, same +} +***/ + +trait Accessors { + private var v1: Int = 0 // warn + private var v2: Int = 0 // warn, never set + private var v3: Int = 0 // warn, never got + private var v4: Int = 0 // no warn + + def bippy(): Int = { + v3 = 5 + v4 = 6 + v2 + v4 + } +} + +trait DefaultArgs { + // warn about default getters for x2 and x3 + private def bippy(x1: Int, x2: Int = 10, x3: Int = 15): Int = x1 + x2 + x3 + + def boppy() = bippy(5, 100, 200) +} diff --git a/test/files/pos/t5809.scala b/test/files/pos/t5809.scala index 133e13c4ed..4bcd743faa 100644 --- a/test/files/pos/t5809.scala +++ b/test/files/pos/t5809.scala @@ -1,5 +1,6 @@ package object foo { implicit class PimpedInt(foo: Int) { def bar = ??? + def bippy = foo } -} \ No newline at end of file +} -- cgit v1.2.3 From d5ebd7e069d6a60936267e239f74ce89a3851453 Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Tue, 30 Oct 2012 14:29:23 -0700 Subject: Remove unused private members. That's a lot of unused code. Most of this is pure cruft; a small amount is debugging code which somebody might want to keep around, but we should not be using trunk as a repository of our personal snippets of undocumented, unused, unintegrated debugging code. So let's make the easy decision to err in the removing direction. If it isn't built to last, it shouldn't be checked into master. --- .../scala/actors/migration/StashingActor.scala | 2 +- src/compiler/scala/reflect/reify/package.scala | 2 +- .../scala/reflect/reify/utils/SymbolTables.scala | 7 +- src/compiler/scala/tools/cmd/Reference.scala | 2 +- src/compiler/scala/tools/nsc/Global.scala | 3 - src/compiler/scala/tools/nsc/ScriptRunner.scala | 11 --- .../scala/tools/nsc/ast/parser/MarkupParsers.scala | 2 +- .../scala/tools/nsc/ast/parser/Parsers.scala | 17 +--- .../scala/tools/nsc/ast/parser/TreeBuilder.scala | 7 -- .../tools/nsc/backend/icode/ICodeCheckers.scala | 7 -- .../backend/icode/analysis/TypeFlowAnalysis.scala | 22 ------ .../scala/tools/nsc/backend/jvm/GenASM.scala | 14 ---- .../scala/tools/nsc/backend/jvm/GenJVM.scala | 14 ---- .../scala/tools/nsc/backend/msil/GenMSIL.scala | 91 ---------------------- .../tools/nsc/backend/opt/ClosureElimination.scala | 4 - .../nsc/backend/opt/DeadCodeElimination.scala | 7 -- .../scala/tools/nsc/backend/opt/Inliners.scala | 3 +- .../scala/tools/nsc/dependencies/Changes.scala | 7 +- .../html/page/diagram/DotDiagramGenerator.scala | 4 - .../scala/tools/nsc/doc/model/ModelFactory.scala | 9 --- .../nsc/doc/model/ModelFactoryTypeSupport.scala | 2 - .../doc/model/diagram/DiagramDirectiveParser.scala | 3 +- .../nsc/interactive/RefinedBuildManager.scala | 2 +- .../nsc/interactive/tests/InteractiveTest.scala | 2 + .../interactive/tests/core/SourcesCollector.scala | 3 +- .../scala/tools/nsc/interpreter/ILoop.scala | 46 ----------- .../scala/tools/nsc/interpreter/IMain.scala | 35 ++------- .../tools/nsc/interpreter/JLineCompletion.scala | 3 +- .../tools/nsc/interpreter/MemberHandlers.scala | 2 - .../scala/tools/nsc/interpreter/TypeStrings.scala | 4 - src/compiler/scala/tools/nsc/matching/Matrix.scala | 2 +- .../scala/tools/nsc/matching/PatternBindings.scala | 2 - .../scala/tools/nsc/matching/Patterns.scala | 7 -- .../tools/nsc/reporters/ConsoleReporter.scala | 1 - .../scala/tools/nsc/symtab/SymbolTrackers.scala | 4 - .../tools/nsc/symtab/classfile/ICodeReader.scala | 2 +- .../scala/tools/nsc/symtab/classfile/Pickler.scala | 2 - .../scala/tools/nsc/transform/Erasure.scala | 1 - src/compiler/scala/tools/nsc/transform/Mixin.scala | 9 --- .../tools/nsc/transform/SpecializeTypes.scala | 28 ------- .../scala/tools/nsc/transform/UnCurry.scala | 6 -- .../tools/nsc/typechecker/ContextErrors.scala | 2 +- .../scala/tools/nsc/typechecker/Contexts.scala | 14 ---- .../tools/nsc/typechecker/DestructureTypes.scala | 2 - .../scala/tools/nsc/typechecker/Duplicators.scala | 11 --- .../tools/nsc/typechecker/PatternMatching.scala | 2 +- .../scala/tools/nsc/typechecker/RefChecks.scala | 3 - .../tools/nsc/typechecker/SuperAccessors.scala | 3 - .../tools/nsc/typechecker/SyntheticMethods.scala | 6 -- .../scala/tools/nsc/typechecker/TreeCheckers.scala | 4 - .../tools/nsc/typechecker/TypeDiagnostics.scala | 2 +- .../scala/tools/nsc/typechecker/Typers.scala | 8 +- src/compiler/scala/tools/nsc/util/ClassPath.scala | 4 - .../scala/tools/nsc/util/ScalaClassLoader.scala | 1 - .../scala/tools/nsc/util/WorkScheduler.scala | 6 +- src/library/scala/collection/IndexedSeqLike.scala | 1 - src/library/scala/collection/Iterator.scala | 2 - .../scala/collection/concurrent/TrieMap.scala | 4 +- src/library/scala/collection/immutable/List.scala | 6 -- .../scala/collection/immutable/NumericRange.scala | 27 ++----- .../scala/collection/immutable/StringLike.scala | 9 +-- .../scala/collection/immutable/Vector.scala | 7 +- .../scala/collection/mutable/IndexedSeqView.scala | 2 - src/library/scala/collection/parallel/Tasks.scala | 9 --- .../collection/parallel/mutable/ParArray.scala | 2 - .../parallel/mutable/ParFlatHashTable.scala | 10 --- .../collection/parallel/mutable/ParHashMap.scala | 21 +---- .../collection/parallel/mutable/ParHashSet.scala | 5 +- src/library/scala/concurrent/JavaConversions.scala | 4 - src/library/scala/sys/SystemProperties.scala | 1 - .../scala/util/automata/WordBerrySethi.scala | 1 - src/library/scala/util/matching/Regex.scala | 4 +- src/library/scala/xml/persistent/SetStorage.scala | 6 +- .../lamp/compiler/msil/emit/AssemblyBuilder.scala | 3 - .../epfl/lamp/compiler/msil/emit/ILGenerator.scala | 2 +- .../ch/epfl/lamp/compiler/msil/emit/Label.scala | 15 ++-- .../scala/tools/partest/PartestDefaults.scala | 2 - src/partest/scala/tools/partest/PartestTask.scala | 2 - .../scala/tools/partest/nest/ConsoleRunner.scala | 2 - .../scala/tools/partest/nest/RunnerManager.scala | 3 +- src/reflect/scala/reflect/api/TypeTags.scala | 2 +- .../scala/reflect/internal/Definitions.scala | 13 +--- src/reflect/scala/reflect/internal/Names.scala | 10 ++- src/reflect/scala/reflect/internal/Printers.scala | 2 +- src/reflect/scala/reflect/internal/Types.scala | 7 +- .../internal/util/TraceSymbolActivity.scala | 35 +-------- src/scalacheck/org/scalacheck/Commands.scala | 5 -- 87 files changed, 80 insertions(+), 613 deletions(-) (limited to 'src') diff --git a/src/actors-migration/scala/actors/migration/StashingActor.scala b/src/actors-migration/scala/actors/migration/StashingActor.scala index 12bad2ed1c..75f95b78ca 100644 --- a/src/actors-migration/scala/actors/migration/StashingActor.scala +++ b/src/actors-migration/scala/actors/migration/StashingActor.scala @@ -65,7 +65,7 @@ trait StashingActor extends InternalActor { * Puts the behavior on top of the hotswap stack. * If "discardOld" is true, an unbecome will be issued prior to pushing the new behavior to the stack */ - private def become(behavior: Receive, discardOld: Boolean = true) { + private def become(behavior: Receive, discardOld: Boolean) { if (discardOld) unbecome() behaviorStack = behaviorStack.push(wrapWithSystemMessageHandling(behavior)) } diff --git a/src/compiler/scala/reflect/reify/package.scala b/src/compiler/scala/reflect/reify/package.scala index 55f8684df2..1ae6df14be 100644 --- a/src/compiler/scala/reflect/reify/package.scala +++ b/src/compiler/scala/reflect/reify/package.scala @@ -5,7 +5,7 @@ import scala.reflect.macros.{Context, ReificationException, UnexpectedReificatio import scala.tools.nsc.Global package object reify { - private def mkReifier(global1: Global)(typer: global1.analyzer.Typer, universe: global1.Tree, mirror: global1.Tree, reifee: Any, concrete: Boolean = false): Reifier { val global: global1.type } = { + private def mkReifier(global1: Global)(typer: global1.analyzer.Typer, universe: global1.Tree, mirror: global1.Tree, reifee: Any, concrete: Boolean): Reifier { val global: global1.type } = { val typer1: typer.type = typer val universe1: universe.type = universe val mirror1: mirror.type = mirror diff --git a/src/compiler/scala/reflect/reify/utils/SymbolTables.scala b/src/compiler/scala/reflect/reify/utils/SymbolTables.scala index dbb0836e0a..2607b8f9b7 100644 --- a/src/compiler/scala/reflect/reify/utils/SymbolTables.scala +++ b/src/compiler/scala/reflect/reify/utils/SymbolTables.scala @@ -89,11 +89,6 @@ trait SymbolTables { add(ValDef(NoMods, freshName(name0), TypeTree(), reification) updateAttachment bindingAttachment) } - private def add(sym: Symbol, name: TermName): SymbolTable = { - if (!(syms contains sym)) error("cannot add an alias to a symbol not in the symbol table") - add(sym, name, EmptyTree) - } - private def remove(sym: Symbol): SymbolTable = { val newSymtab = symtab - sym val newAliases = aliases filter (_._1 != sym) @@ -214,4 +209,4 @@ trait SymbolTables { } } } -} \ No newline at end of file +} diff --git a/src/compiler/scala/tools/cmd/Reference.scala b/src/compiler/scala/tools/cmd/Reference.scala index b6c564e9fb..4323b21dfd 100644 --- a/src/compiler/scala/tools/cmd/Reference.scala +++ b/src/compiler/scala/tools/cmd/Reference.scala @@ -46,7 +46,7 @@ object Reference { val MaxLine = 80 class Accumulators() { - private var _help = new ListBuffer[() => String] + private val _help = new ListBuffer[() => String] private var _unary = List[String]() private var _binary = List[String]() private var _expand = Map[String, List[String]]() diff --git a/src/compiler/scala/tools/nsc/Global.scala b/src/compiler/scala/tools/nsc/Global.scala index 5b5cffa885..3e77fc982d 100644 --- a/src/compiler/scala/tools/nsc/Global.scala +++ b/src/compiler/scala/tools/nsc/Global.scala @@ -1194,9 +1194,6 @@ class Global(var currentSettings: Settings, var reporter: Reporter) /** Has any macro expansion used a fallback during this run? */ var seenMacroExpansionsFallingBack = false - /** To be initialized from firstPhase. */ - private var terminalPhase: Phase = NoPhase - private val unitbuf = new mutable.ListBuffer[CompilationUnit] val compiledFiles = new mutable.HashSet[String] diff --git a/src/compiler/scala/tools/nsc/ScriptRunner.scala b/src/compiler/scala/tools/nsc/ScriptRunner.scala index d64396bec7..89d64a2d2a 100644 --- a/src/compiler/scala/tools/nsc/ScriptRunner.scala +++ b/src/compiler/scala/tools/nsc/ScriptRunner.scala @@ -57,17 +57,6 @@ class ScriptRunner extends HasCompileSocket { else scriptFile.stripSuffix(".scala") + ".jar" ) - /** Read the entire contents of a file as a String. */ - private def contentsOfFile(filename: String) = File(filename).slurp() - - /** Split a fully qualified object name into a - * package and an unqualified object name */ - private def splitObjectName(fullname: String): (Option[String], String) = - (fullname lastIndexOf '.') match { - case -1 => (None, fullname) - case idx => (Some(fullname take idx), fullname drop (idx + 1)) - } - /** Compile a script using the fsc compilation daemon. */ private def compileWithDaemon(settings: GenericRunnerSettings, scriptFileIn: String) = { diff --git a/src/compiler/scala/tools/nsc/ast/parser/MarkupParsers.scala b/src/compiler/scala/tools/nsc/ast/parser/MarkupParsers.scala index 991ee39258..9c03b10157 100755 --- a/src/compiler/scala/tools/nsc/ast/parser/MarkupParsers.scala +++ b/src/compiler/scala/tools/nsc/ast/parser/MarkupParsers.scala @@ -89,7 +89,7 @@ trait MarkupParsers { var xEmbeddedBlock = false - private var debugLastStartElement = new mutable.Stack[(Int, String)] + private val debugLastStartElement = new mutable.Stack[(Int, String)] private def debugLastPos = debugLastStartElement.top._1 private def debugLastElem = debugLastStartElement.top._2 diff --git a/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala b/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala index 072f4b9ef2..380fd1fcaa 100644 --- a/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala +++ b/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala @@ -299,11 +299,7 @@ self => inScalaPackage = false currentPackage = "" } - private lazy val primitiveNames: Set[Name] = tpnme.ScalaValueNames.toSet - - private def inScalaRootPackage = inScalaPackage && currentPackage == "scala" - private def isScalaArray(name: Name) = inScalaRootPackage && name == tpnme.Array - private def isPrimitiveType(name: Name) = inScalaRootPackage && primitiveNames(name) + private def inScalaRootPackage = inScalaPackage && currentPackage == "scala" def parseStartRule: () => Tree @@ -1138,16 +1134,7 @@ self => }) } - private def stringOp(t: Tree, op: TermName) = { - val str = in.strVal - in.nextToken() - if (str.length == 0) t - else atPos(t.pos.startOrPoint) { - Apply(Select(t, op), List(Literal(Constant(str)))) - } - } - - private def interpolatedString(inPattern: Boolean = false): Tree = atPos(in.offset) { + private def interpolatedString(inPattern: Boolean): Tree = atPos(in.offset) { val start = in.offset val interpolator = in.name diff --git a/src/compiler/scala/tools/nsc/ast/parser/TreeBuilder.scala b/src/compiler/scala/tools/nsc/ast/parser/TreeBuilder.scala index c6a38f5be6..3ff52cc32b 100644 --- a/src/compiler/scala/tools/nsc/ast/parser/TreeBuilder.scala +++ b/src/compiler/scala/tools/nsc/ast/parser/TreeBuilder.scala @@ -379,13 +379,6 @@ abstract class TreeBuilder { def makeCombination(pos: Position, meth: TermName, qual: Tree, pat: Tree, body: Tree): Tree = Apply(Select(qual, meth) setPos qual.pos, List(makeClosure(pos, pat, body))) setPos pos - /** Optionally, if pattern is a `Bind`, the bound name, otherwise None. - */ - def patternVar(pat: Tree): Option[Name] = pat match { - case Bind(name, _) => Some(name) - case _ => None - } - /** If `pat` is not yet a `Bind` wrap it in one with a fresh name */ def makeBind(pat: Tree): Tree = pat match { diff --git a/src/compiler/scala/tools/nsc/backend/icode/ICodeCheckers.scala b/src/compiler/scala/tools/nsc/backend/icode/ICodeCheckers.scala index aa3f4dcb7e..5ccbbf997e 100644 --- a/src/compiler/scala/tools/nsc/backend/icode/ICodeCheckers.scala +++ b/src/compiler/scala/tools/nsc/backend/icode/ICodeCheckers.scala @@ -103,7 +103,6 @@ abstract class ICodeCheckers { private def posStr(p: Position) = if (p.isDefined) p.line.toString else "" - private def indent(s: String, spaces: Int): String = indent(s, " " * spaces) private def indent(s: String, prefix: String): String = { val lines = s split "\\n" lines map (prefix + _) mkString "\n" @@ -170,7 +169,6 @@ abstract class ICodeCheckers { val preds = bl.predecessors def hasNothingType(s: TypeStack) = s.nonEmpty && (s.head == NothingReference) - def hasNullType(s: TypeStack) = s.nonEmpty && (s.head == NullReference) /** XXX workaround #1: one stack empty, the other has BoxedUnit. * One example where this arises is: @@ -369,11 +367,6 @@ abstract class ICodeCheckers { } } - /** Return true if k1 is a subtype of any of the following types, - * according to the somewhat relaxed subtyping standards in effect here. - */ - def isOneOf(k1: TypeKind, kinds: TypeKind*) = kinds exists (k => isSubtype(k1, k)) - def subtypeTest(k1: TypeKind, k2: TypeKind): Unit = if (isSubtype(k1, k2)) () else typeError(k2, k1) diff --git a/src/compiler/scala/tools/nsc/backend/icode/analysis/TypeFlowAnalysis.scala b/src/compiler/scala/tools/nsc/backend/icode/analysis/TypeFlowAnalysis.scala index 5d81109ac9..cdf2788284 100644 --- a/src/compiler/scala/tools/nsc/backend/icode/analysis/TypeFlowAnalysis.scala +++ b/src/compiler/scala/tools/nsc/backend/icode/analysis/TypeFlowAnalysis.scala @@ -546,9 +546,6 @@ abstract class TypeFlowAnalysis { relevantBBs ++= blocks } - /* the argument is also included in the result */ - private def transitivePreds(b: BasicBlock): Set[BasicBlock] = { transitivePreds(List(b)) } - /* those BBs in the argument are also included in the result */ private def transitivePreds(starters: Traversable[BasicBlock]): Set[BasicBlock] = { val result = mutable.Set.empty[BasicBlock] @@ -562,19 +559,6 @@ abstract class TypeFlowAnalysis { result.toSet } - /* those BBs in the argument are also included in the result */ - private def transitiveSuccs(starters: Traversable[BasicBlock]): Set[BasicBlock] = { - val result = mutable.Set.empty[BasicBlock] - var toVisit: List[BasicBlock] = starters.toList.distinct - while(toVisit.nonEmpty) { - val h = toVisit.head - toVisit = toVisit.tail - result += h - for(p <- h.successors; if !result(p) && !toVisit.contains(p)) { toVisit = p :: toVisit } - } - result.toSet - } - /* A basic block B is "on the perimeter" of the current control-flow subgraph if none of its successors belongs to that subgraph. * In that case, for the purposes of inlining, we're interested in the typestack right before the last inline candidate in B, not in those afterwards. * In particular we can do without computing the outflow at B. */ @@ -685,12 +669,6 @@ abstract class TypeFlowAnalysis { if(!worklist.contains(b)) { worklist += b } } - /* this is not a general purpose method to add to the worklist, - * because the assert is expected to hold only when called from MTFAGrowable.reinit() */ - private def enqueue(bs: Traversable[BasicBlock]) { - bs foreach enqueue - } - private def blankOut(blocks: scala.collection.Set[BasicBlock]) { blocks foreach { b => in(b) = typeFlowLattice.bottom diff --git a/src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala b/src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala index 18222794a8..a6e4339d82 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala @@ -32,20 +32,6 @@ abstract class GenASM extends SubComponent with BytecodeWriters { /** Create a new phase */ override def newPhase(p: Phase): Phase = new AsmPhase(p) - private def outputDirectory(sym: Symbol): AbstractFile = - settings.outputDirs outputDirFor enteringFlatten(sym.sourceFile) - - private def getFile(base: AbstractFile, clsName: String, suffix: String): AbstractFile = { - var dir = base - val pathParts = clsName.split("[./]").toList - for (part <- pathParts.init) { - dir = dir.subdirectoryNamed(part) - } - dir.fileNamed(pathParts.last + suffix) - } - private def getFile(sym: Symbol, clsName: String, suffix: String): AbstractFile = - getFile(outputDirectory(sym), clsName, suffix) - /** From the reference documentation of the Android SDK: * The `Parcelable` interface identifies classes whose instances can be written to and restored from a `Parcel`. * Classes implementing the `Parcelable` interface must also have a static field called `CREATOR`, diff --git a/src/compiler/scala/tools/nsc/backend/jvm/GenJVM.scala b/src/compiler/scala/tools/nsc/backend/jvm/GenJVM.scala index 617c641fa9..6797b15cc6 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/GenJVM.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/GenJVM.scala @@ -37,20 +37,6 @@ abstract class GenJVM extends SubComponent with GenJVMUtil with GenAndroid with /** Create a new phase */ override def newPhase(p: Phase): Phase = new JvmPhase(p) - private def outputDirectory(sym: Symbol): AbstractFile = - settings.outputDirs outputDirFor enteringFlatten(sym.sourceFile) - - private def getFile(base: AbstractFile, clsName: String, suffix: String): AbstractFile = { - var dir = base - val pathParts = clsName.split("[./]").toList - for (part <- pathParts.init) { - dir = dir.subdirectoryNamed(part) - } - dir.fileNamed(pathParts.last + suffix) - } - private def getFile(sym: Symbol, clsName: String, suffix: String): AbstractFile = - getFile(outputDirectory(sym), clsName, suffix) - /** JVM code generation phase */ class JvmPhase(prev: Phase) extends ICodePhase(prev) { diff --git a/src/compiler/scala/tools/nsc/backend/msil/GenMSIL.scala b/src/compiler/scala/tools/nsc/backend/msil/GenMSIL.scala index 39ea074dc0..8197e564d1 100644 --- a/src/compiler/scala/tools/nsc/backend/msil/GenMSIL.scala +++ b/src/compiler/scala/tools/nsc/backend/msil/GenMSIL.scala @@ -2244,97 +2244,6 @@ abstract class GenMSIL extends SubComponent { methods(sym) = mInfo } - /* - * add mapping between sym and method with newName, paramTypes of newClass - */ - private def mapMethod(sym: Symbol, newClass: MsilType, newName: String, paramTypes: Array[MsilType]) { - val methodInfo = newClass.GetMethod(newName, paramTypes) - assert(methodInfo != null, "Can't find mapping for " + sym + " -> " + - newName + "(" + paramTypes + ")") - mapMethod(sym, methodInfo) - if (methodInfo.IsStatic) - dynToStatMapped += sym - } - - /* - * add mapping between method with name and paramTypes of clazz to - * method with newName and newParamTypes of newClass (used for instance - * for "wait") - */ - private def mapMethod( - clazz: Symbol, name: Name, paramTypes: Array[Type], - newClass: MsilType, newName: String, newParamTypes: Array[MsilType]) { - val methodSym = lookupMethod(clazz, name, paramTypes) - assert(methodSym != null, "cannot find method " + name + "(" + - paramTypes + ")" + " in class " + clazz) - mapMethod(methodSym, newClass, newName, newParamTypes) - } - - /* - * add mapping for member with name and paramTypes to member - * newName of newClass (same parameters) - */ - private def mapMethod( - clazz: Symbol, name: Name, paramTypes: Array[Type], - newClass: MsilType, newName: String) { - mapMethod(clazz, name, paramTypes, newClass, newName, paramTypes map msilType) - } - - /* - * add mapping for all methods with name of clazz to the corresponding - * method (same parameters) with newName of newClass - */ - private def mapMethod( - clazz: Symbol, name: Name, - newClass: MsilType, newName: String) { - val memberSym: Symbol = clazz.tpe.member(name) - memberSym.tpe match { - // alternatives: List[Symbol] - case OverloadedType(_, alternatives) => - alternatives.foreach(s => mapMethod(s, newClass, newName, msilParamTypes(s))) - - // paramTypes: List[Type], resType: Type - case MethodType(params, resType) => - mapMethod(memberSym, newClass, newName, msilParamTypes(memberSym)) - - case _ => - abort("member not found: " + clazz + ", " + name) - } - } - - - /* - * find the method in clazz with name and paramTypes - */ - private def lookupMethod(clazz: Symbol, name: Name, paramTypes: Array[Type]): Symbol = { - val memberSym = clazz.tpe.member(name) - memberSym.tpe match { - case OverloadedType(_, alternatives) => - alternatives.find(s => { - var i: Int = 0 - var typesOK: Boolean = true - if (paramTypes.length == s.tpe.paramTypes.length) { - while(i < paramTypes.length) { - if (paramTypes(i) != s.tpe.paramTypes(i)) - typesOK = false - i += 1 - } - } else { - typesOK = false - } - typesOK - }) match { - case Some(sym) => sym - case None => abort("member of " + clazz + ", " + name + "(" + - paramTypes + ") not found") - } - - case MethodType(_, _) => memberSym - - case _ => abort("member not found: " + name + " of " + clazz) - } - } - private def showsym(sym: Symbol): String = (sym.toString + "\n symbol = " + Flags.flagsToString(sym.flags) + " " + sym + "\n owner = " + Flags.flagsToString(sym.owner.flags) + " " + sym.owner diff --git a/src/compiler/scala/tools/nsc/backend/opt/ClosureElimination.scala b/src/compiler/scala/tools/nsc/backend/opt/ClosureElimination.scala index eb2da72401..bcdcbfd435 100644 --- a/src/compiler/scala/tools/nsc/backend/opt/ClosureElimination.scala +++ b/src/compiler/scala/tools/nsc/backend/opt/ClosureElimination.scala @@ -197,16 +197,12 @@ abstract class ClosureElimination extends SubComponent { /** Peephole optimization. */ abstract class PeepholeOpt { - - private var method: IMethod = NoIMethod - /** Concrete implementations will perform their optimizations here */ def peep(bb: BasicBlock, i1: Instruction, i2: Instruction): Option[List[Instruction]] var liveness: global.icodes.liveness.LivenessAnalysis = null def apply(m: IMethod): Unit = if (m.hasCode) { - method = m liveness = new global.icodes.liveness.LivenessAnalysis liveness.init(m) liveness.run diff --git a/src/compiler/scala/tools/nsc/backend/opt/DeadCodeElimination.scala b/src/compiler/scala/tools/nsc/backend/opt/DeadCodeElimination.scala index 36a5d61cfb..c03f7999f4 100644 --- a/src/compiler/scala/tools/nsc/backend/opt/DeadCodeElimination.scala +++ b/src/compiler/scala/tools/nsc/backend/opt/DeadCodeElimination.scala @@ -280,13 +280,6 @@ abstract class DeadCodeElimination extends SubComponent { compensations } - private def withClosed[a](bb: BasicBlock)(f: => a): a = { - if (bb.nonEmpty) bb.close - val res = f - if (bb.nonEmpty) bb.open - res - } - private def findInstruction(bb: BasicBlock, i: Instruction): (BasicBlock, Int) = { for (b <- linearizer.linearizeAt(method, bb)) { val idx = b.toList indexWhere (_ eq i) diff --git a/src/compiler/scala/tools/nsc/backend/opt/Inliners.scala b/src/compiler/scala/tools/nsc/backend/opt/Inliners.scala index 5c2c2a37e6..ab5184dcbd 100644 --- a/src/compiler/scala/tools/nsc/backend/opt/Inliners.scala +++ b/src/compiler/scala/tools/nsc/backend/opt/Inliners.scala @@ -50,6 +50,7 @@ abstract class Inliners extends SubComponent { val phaseName = "inliner" /** Debug - for timing the inliner. */ + /**** private def timed[T](s: String, body: => T): T = { val t1 = System.currentTimeMillis() val res = body @@ -60,6 +61,7 @@ abstract class Inliners extends SubComponent { res } + ****/ /** Look up implementation of method 'sym in 'clazz'. */ @@ -1031,7 +1033,6 @@ abstract class Inliners extends SubComponent { case Public => true } private def sameSymbols = caller.sym == inc.sym - private def sameOwner = caller.owner == inc.owner /** Gives green light for inlining (which may still be vetoed later). Heuristics: * - it's bad to make the caller larger (> SMALL_METHOD_SIZE) if it was small diff --git a/src/compiler/scala/tools/nsc/dependencies/Changes.scala b/src/compiler/scala/tools/nsc/dependencies/Changes.scala index 7f5f412a20..c8ff700208 100644 --- a/src/compiler/scala/tools/nsc/dependencies/Changes.scala +++ b/src/compiler/scala/tools/nsc/dependencies/Changes.scala @@ -61,12 +61,7 @@ abstract class Changes { annotationsChecked.forall(a => (sym1.hasAnnotation(a) == sym2.hasAnnotation(a))) - private def sameType(tp1: Type, tp2: Type)(implicit strict: Boolean) = { - def typeOf(tp: Type): String = tp.toString + "[" + tp.getClass + "]" - val res = sameType0(tp1, tp2) - //if (!res) println("\t different types: " + typeOf(tp1) + " : " + typeOf(tp2)) - res - } + private def sameType(tp1: Type, tp2: Type)(implicit strict: Boolean) = sameType0(tp1, tp2) private def sameType0(tp1: Type, tp2: Type)(implicit strict: Boolean): Boolean = ((tp1, tp2) match { /*case (ErrorType, _) => false diff --git a/src/compiler/scala/tools/nsc/doc/html/page/diagram/DotDiagramGenerator.scala b/src/compiler/scala/tools/nsc/doc/html/page/diagram/DotDiagramGenerator.scala index 304c534bdc..8c1e9b0fe0 100644 --- a/src/compiler/scala/tools/nsc/doc/html/page/diagram/DotDiagramGenerator.scala +++ b/src/compiler/scala/tools/nsc/doc/html/page/diagram/DotDiagramGenerator.scala @@ -22,8 +22,6 @@ class DotDiagramGenerator(settings: doc.Settings) extends DiagramGenerator { private var pathToLib: String = null // maps nodes to unique indices private var node2Index: Map[Node, Int] = null - // maps an index to its corresponding node - private var index2Node: Map[Int, Node] = null // true if the current diagram is a class diagram private var isInheritanceDiagram = false // incoming implicit nodes (needed for determining the CSS class of a node) @@ -42,7 +40,6 @@ class DotDiagramGenerator(settings: doc.Settings) extends DiagramGenerator { // clean things up a bit, so we don't leave garbage on the heap this.page = null node2Index = null - index2Node = null incomingImplicitNodes = List() result } @@ -116,7 +113,6 @@ class DotDiagramGenerator(settings: doc.Settings) extends DiagramGenerator { node2Index = d.nodes.zipWithIndex.toMap incomingImplicitNodes = List() } - index2Node = node2Index map {_.swap} val implicitsDot = { if (!isInheritanceDiagram) "" diff --git a/src/compiler/scala/tools/nsc/doc/model/ModelFactory.scala b/src/compiler/scala/tools/nsc/doc/model/ModelFactory.scala index 6690eee1ea..2ca80c9282 100644 --- a/src/compiler/scala/tools/nsc/doc/model/ModelFactory.scala +++ b/src/compiler/scala/tools/nsc/doc/model/ModelFactory.scala @@ -43,20 +43,11 @@ class ModelFactory(val global: Global, val settings: doc.Settings) { def modelFinished: Boolean = _modelFinished private var universe: Universe = null - private def dbg(msg: String) = if (sys.props contains "scala.scaladoc.debug") println(msg) protected def closestPackage(sym: Symbol) = { if (sym.isPackage || sym.isPackageClass) sym else sym.enclosingPackage } - private def printWithoutPrefix(memberSym: Symbol, templateSym: Symbol) = { - dbg( - "memberSym " + memberSym + " templateSym " + templateSym + " encls = " + - closestPackage(memberSym) + ", " + closestPackage(templateSym) - ) - memberSym.isOmittablePrefix || (closestPackage(memberSym) == closestPackage(templateSym)) - } - def makeModel: Option[Universe] = { val universe = new Universe { thisUniverse => thisFactory.universe = thisUniverse diff --git a/src/compiler/scala/tools/nsc/doc/model/ModelFactoryTypeSupport.scala b/src/compiler/scala/tools/nsc/doc/model/ModelFactoryTypeSupport.scala index c435930c7c..cd86dcb606 100644 --- a/src/compiler/scala/tools/nsc/doc/model/ModelFactoryTypeSupport.scala +++ b/src/compiler/scala/tools/nsc/doc/model/ModelFactoryTypeSupport.scala @@ -34,8 +34,6 @@ trait ModelFactoryTypeSupport { /** */ def makeType(aType: Type, inTpl: TemplateImpl): TypeEntity = { - def templatePackage = closestPackage(inTpl.sym) - def createTypeEntity = new TypeEntity { private var nameBuffer = new StringBuilder private var refBuffer = new immutable.TreeMap[Int, (LinkTo, Int)] diff --git a/src/compiler/scala/tools/nsc/doc/model/diagram/DiagramDirectiveParser.scala b/src/compiler/scala/tools/nsc/doc/model/diagram/DiagramDirectiveParser.scala index 49cfaffc2e..7f8268c7c5 100644 --- a/src/compiler/scala/tools/nsc/doc/model/diagram/DiagramDirectiveParser.scala +++ b/src/compiler/scala/tools/nsc/doc/model/diagram/DiagramDirectiveParser.scala @@ -154,7 +154,6 @@ trait DiagramDirectiveParser { private val NodeSpecRegex = "\\\"[A-Za-z\\*][A-Za-z\\.\\*]*\\\"" private val NodeSpecPattern = Pattern.compile(NodeSpecRegex) private val EdgeSpecRegex = "\\(" + NodeSpecRegex + "\\s*\\->\\s*" + NodeSpecRegex + "\\)" - private val EdgeSpecPattern = Pattern.compile(NodeSpecRegex) // And the composed regexes: private val HideNodesRegex = new Regex("^hideNodes(\\s*" + NodeSpecRegex + ")+$") private val HideEdgesRegex = new Regex("^hideEdges(\\s*" + EdgeSpecRegex + ")+$") @@ -259,4 +258,4 @@ trait DiagramDirectiveParser { result } -} \ No newline at end of file +} diff --git a/src/compiler/scala/tools/nsc/interactive/RefinedBuildManager.scala b/src/compiler/scala/tools/nsc/interactive/RefinedBuildManager.scala index f3d454ad3e..f07a0a49ab 100644 --- a/src/compiler/scala/tools/nsc/interactive/RefinedBuildManager.scala +++ b/src/compiler/scala/tools/nsc/interactive/RefinedBuildManager.scala @@ -69,7 +69,7 @@ class RefinedBuildManager(val settings: Settings) extends Changes with BuildMana private var inherited: mutable.Map[AbstractFile, immutable.Set[Inherited]] = _ /** Reverse of definitions, used for caching */ - private var classes: mutable.Map[String, AbstractFile] = + private val classes: mutable.Map[String, AbstractFile] = new mutable.HashMap[String, AbstractFile] { override def default(key: String) = null } diff --git a/src/compiler/scala/tools/nsc/interactive/tests/InteractiveTest.scala b/src/compiler/scala/tools/nsc/interactive/tests/InteractiveTest.scala index cb46c0fdca..88ea259e0e 100644 --- a/src/compiler/scala/tools/nsc/interactive/tests/InteractiveTest.scala +++ b/src/compiler/scala/tools/nsc/interactive/tests/InteractiveTest.scala @@ -110,6 +110,7 @@ abstract class InteractiveTest } /** Perform n random tests with random changes. */ + /**** private def randomTests(n: Int, files: Array[SourceFile]) { val tester = new Tester(n, files, settings) { override val compiler = self.compiler @@ -117,6 +118,7 @@ abstract class InteractiveTest } tester.run() } + ****/ /** shutdown the presentation compiler. */ protected def shutdown() { diff --git a/src/compiler/scala/tools/nsc/interactive/tests/core/SourcesCollector.scala b/src/compiler/scala/tools/nsc/interactive/tests/core/SourcesCollector.scala index e80b741a8d..471a05a44d 100644 --- a/src/compiler/scala/tools/nsc/interactive/tests/core/SourcesCollector.scala +++ b/src/compiler/scala/tools/nsc/interactive/tests/core/SourcesCollector.scala @@ -17,6 +17,5 @@ private[tests] object SourcesCollector { } private def source(file: Path): SourceFile = source(AbstractFile.getFile(file.toFile)) - private def source(filename: String): SourceFile = source(AbstractFile.getFile(filename)) private def source(file: AbstractFile): SourceFile = new BatchSourceFile(file) -} \ No newline at end of file +} diff --git a/src/compiler/scala/tools/nsc/interpreter/ILoop.scala b/src/compiler/scala/tools/nsc/interpreter/ILoop.scala index 1d6ec77ef2..18d0567ff3 100644 --- a/src/compiler/scala/tools/nsc/interpreter/ILoop.scala +++ b/src/compiler/scala/tools/nsc/interpreter/ILoop.scala @@ -276,21 +276,6 @@ class ILoop(in0: Option[BufferedReader], protected val out: JPrintWriter) cmd("phase", "", "set the implicit phase for power commands", phaseCommand) ) - private def dumpCommand(): Result = { - echo("" + power) - history.asStrings takeRight 30 foreach echo - in.redrawLine() - } - private def valsCommand(): Result = power.valsDescription - - private val typeTransforms = List( - "scala.collection.immutable." -> "immutable.", - "scala.collection.mutable." -> "mutable.", - "scala.collection.generic." -> "generic.", - "java.lang." -> "jl.", - "scala.runtime." -> "runtime." - ) - private def importsCommand(line: String): Result = { val tokens = words(line) val handlers = intp.languageWildcardHandlers ++ intp.importHandlers @@ -458,36 +443,6 @@ class ILoop(in0: Option[BufferedReader], protected val out: JPrintWriter) } } - private def wrapCommand(line: String): Result = { - def failMsg = "Argument to :wrap must be the name of a method with signature [T](=> T): T" - onIntp { intp => - import intp._ - import global._ - - words(line) match { - case Nil => - intp.executionWrapper match { - case "" => "No execution wrapper is set." - case s => "Current execution wrapper: " + s - } - case "clear" :: Nil => - intp.executionWrapper match { - case "" => "No execution wrapper is set." - case s => intp.clearExecutionWrapper() ; "Cleared execution wrapper." - } - case wrapper :: Nil => - intp.typeOfExpression(wrapper) match { - case PolyType(List(targ), MethodType(List(arg), restpe)) => - intp setExecutionWrapper intp.pathToTerm(wrapper) - "Set wrapper to '" + wrapper + "'" - case tp => - failMsg + "\nFound: " - } - case _ => failMsg - } - } - } - private def pathToPhaseWrapper = intp.pathToTerm("$r") + ".phased.atCurrent" private def phaseCommand(name: String): Result = { val phased: Phased = power.phased @@ -891,7 +846,6 @@ class ILoop(in0: Option[BufferedReader], protected val out: JPrintWriter) object ILoop { implicit def loopToInterpreter(repl: ILoop): IMain = repl.intp - private def echo(msg: String) = Console println msg // Designed primarily for use by test code: take a String with a // bunch of code, and prints out a transcript of what it would look diff --git a/src/compiler/scala/tools/nsc/interpreter/IMain.scala b/src/compiler/scala/tools/nsc/interpreter/IMain.scala index 4e702a09e6..a44f862dd7 100644 --- a/src/compiler/scala/tools/nsc/interpreter/IMain.scala +++ b/src/compiler/scala/tools/nsc/interpreter/IMain.scala @@ -437,18 +437,6 @@ class IMain(initialSettings: Settings, protected val out: JPrintWriter) extends executingRequest } - // rewriting "5 // foo" to "val x = { 5 // foo }" creates broken code because - // the close brace is commented out. Strip single-line comments. - // ... but for error message output reasons this is not used, and rather than - // enclosing in braces it is constructed like "val x =\n5 // foo". - private def removeComments(line: String): String = { - showCodeIfDebugging(line) // as we're about to lose our // show - line.lines map (s => s indexOf "//" match { - case -1 => s - case idx => s take idx - }) mkString "\n" - } - private def safePos(t: Tree, alt: Int): Int = try t.pos.startOrPoint catch { case _: UnsupportedOperationException => alt } @@ -682,10 +670,6 @@ class IMain(initialSettings: Settings, protected val out: JPrintWriter) extends class ReadEvalPrint(lineId: Int) { def this() = this(freshLineId()) - private var lastRun: Run = _ - private var evalCaught: Option[Throwable] = None - private var conditionalWarnings: List[ConditionalWarning] = Nil - val packageName = sessionNames.line + lineId val readName = sessionNames.read val evalName = sessionNames.eval @@ -742,10 +726,7 @@ class IMain(initialSettings: Settings, protected val out: JPrintWriter) extends } lazy val evalClass = load(evalPath) - lazy val evalValue = callEither(resultName) match { - case Left(ex) => evalCaught = Some(ex) ; None - case Right(result) => Some(result) - } + lazy val evalValue = callOpt(resultName) def compile(source: String): Boolean = compileAndSaveRun("", source) @@ -789,7 +770,6 @@ class IMain(initialSettings: Settings, protected val out: JPrintWriter) extends showCodeIfDebugging(code) val (success, run) = compileSourcesKeepingRun(new BatchSourceFile(label, packaged(code))) updateRecentWarnings(run) - lastRun = run success } } @@ -1150,13 +1130,12 @@ class IMain(initialSettings: Settings, protected val out: JPrintWriter) extends /** Secret bookcase entrance for repl debuggers: end the line * with "// show" and see what's going on. */ - def isShow = code.lines exists (_.trim endsWith "// show") - def isShowRaw = code.lines exists (_.trim endsWith "// raw") - - // old style - beSilentDuring(parse(code)) foreach { ts => - ts foreach { t => - withoutUnwrapping(repldbg(asCompactString(t))) + def isShow = code.lines exists (_.trim endsWith "// show") + if (isReplDebug || isShow) { + beSilentDuring(parse(code)) foreach { ts => + ts foreach { t => + withoutUnwrapping(echo(asCompactString(t))) + } } } } diff --git a/src/compiler/scala/tools/nsc/interpreter/JLineCompletion.scala b/src/compiler/scala/tools/nsc/interpreter/JLineCompletion.scala index 9a4be27c76..9dcaf0e05e 100644 --- a/src/compiler/scala/tools/nsc/interpreter/JLineCompletion.scala +++ b/src/compiler/scala/tools/nsc/interpreter/JLineCompletion.scala @@ -326,8 +326,7 @@ class JLineCompletion(val intp: IMain) extends Completion with CompletionOutput Some(Candidates(newCursor, winners)) } - def mkDotted = Parsed.dotted(buf, cursor) withVerbosity verbosity - def mkUndelimited = Parsed.undelimited(buf, cursor) withVerbosity verbosity + def mkDotted = Parsed.dotted(buf, cursor) withVerbosity verbosity // a single dot is special cased to completion on the previous result def lastResultCompletion = diff --git a/src/compiler/scala/tools/nsc/interpreter/MemberHandlers.scala b/src/compiler/scala/tools/nsc/interpreter/MemberHandlers.scala index df96a27291..7e35a0a98a 100644 --- a/src/compiler/scala/tools/nsc/interpreter/MemberHandlers.scala +++ b/src/compiler/scala/tools/nsc/interpreter/MemberHandlers.scala @@ -21,8 +21,6 @@ trait MemberHandlers { private def codegenln(leadingPlus: Boolean, xs: String*): String = codegen(leadingPlus, (xs ++ Array("\n")): _*) private def codegenln(xs: String*): String = codegenln(true, xs: _*) - - private def codegen(xs: String*): String = codegen(true, xs: _*) private def codegen(leadingPlus: Boolean, xs: String*): String = { val front = if (leadingPlus) "+ " else "" front + (xs map string2codeQuoted mkString " + ") diff --git a/src/compiler/scala/tools/nsc/interpreter/TypeStrings.scala b/src/compiler/scala/tools/nsc/interpreter/TypeStrings.scala index 0bf4999fd6..c4687841d5 100644 --- a/src/compiler/scala/tools/nsc/interpreter/TypeStrings.scala +++ b/src/compiler/scala/tools/nsc/interpreter/TypeStrings.scala @@ -39,7 +39,6 @@ trait StructuredTypeStrings extends DestructureTypes { val ParamGrouping = Grouping("(", ", ", ")", true) val BlockGrouping = Grouping(" { ", "; ", "}", false) - private implicit def lowerName(n: Name): String = "" + n private def str(level: Int)(body: => String): String = " " * level + body private def block(level: Int, grouping: Grouping)(name: String, nodes: List[TypeNode]): String = { val l1 = str(level)(name + grouping.ldelim) @@ -214,9 +213,6 @@ trait TypeStrings { private def tparamString[T: ru.TypeTag] : String = { import ru._ def typeArguments: List[ru.Type] = ru.typeOf[T] match { case ru.TypeRef(_, _, args) => args; case _ => Nil } - // [Eugene to Paul] need to use not the `rootMirror`, but a mirror with the REPL's classloader - // how do I get to it? acquiring context classloader seems unreliable because of multithreading - def typeVariables: List[java.lang.Class[_]] = typeArguments map (targ => ru.rootMirror.runtimeClass(targ)) brackets(typeArguments map (jc => tvarString(List(jc))): _*) } diff --git a/src/compiler/scala/tools/nsc/matching/Matrix.scala b/src/compiler/scala/tools/nsc/matching/Matrix.scala index 93e936fe1f..7788343069 100644 --- a/src/compiler/scala/tools/nsc/matching/Matrix.scala +++ b/src/compiler/scala/tools/nsc/matching/Matrix.scala @@ -247,7 +247,7 @@ trait Matrix extends MatrixAdditions { private def newVar( pos: Position, tpe: Type, - flags: List[Long] = Nil, + flags: List[Long], name: TermName = null): Symbol = { val n = if (name == null) cunit.freshTermName("temp") else name diff --git a/src/compiler/scala/tools/nsc/matching/PatternBindings.scala b/src/compiler/scala/tools/nsc/matching/PatternBindings.scala index ee96f15f40..57d5128c02 100644 --- a/src/compiler/scala/tools/nsc/matching/PatternBindings.scala +++ b/src/compiler/scala/tools/nsc/matching/PatternBindings.scala @@ -108,8 +108,6 @@ trait PatternBindings extends ast.TreeDSL case b @ Bind(_, pat) => b.symbol :: strip(pat) case _ => Nil } - private def deepstrip(t: Tree): List[Symbol] = - treeCollect(t, { case x: Bind => x.symbol }) } case class Binding(pvar: Symbol, tvar: Symbol) { diff --git a/src/compiler/scala/tools/nsc/matching/Patterns.scala b/src/compiler/scala/tools/nsc/matching/Patterns.scala index 40e520076a..35d015d543 100644 --- a/src/compiler/scala/tools/nsc/matching/Patterns.scala +++ b/src/compiler/scala/tools/nsc/matching/Patterns.scala @@ -189,13 +189,6 @@ trait Patterns extends ast.TreeDSL { private lazy val packedType = global.typer.computeType(tpt, tpt.tpe) private lazy val consRef = appliedType(ConsClass, packedType) private lazy val listRef = appliedType(ListClass, packedType) - private lazy val seqRef = appliedType(SeqClass, packedType) - - private def thisSeqRef = { - val tc = (tree.tpe baseType SeqClass).typeConstructor - if (tc.typeParams.size == 1) appliedType(tc, List(packedType)) - else seqRef - } // Fold a list into a well-typed x :: y :: etc :: tree. private def listFolder(hd: Tree, tl: Tree): Tree = unbind(hd) match { diff --git a/src/compiler/scala/tools/nsc/reporters/ConsoleReporter.scala b/src/compiler/scala/tools/nsc/reporters/ConsoleReporter.scala index 6fae641487..e816d6d36e 100644 --- a/src/compiler/scala/tools/nsc/reporters/ConsoleReporter.scala +++ b/src/compiler/scala/tools/nsc/reporters/ConsoleReporter.scala @@ -94,6 +94,5 @@ class ConsoleReporter(val settings: Settings, reader: BufferedReader, writer: Pr } } - private def abort(msg: String) = throw new Error(msg) override def flush() { writer.flush() } } diff --git a/src/compiler/scala/tools/nsc/symtab/SymbolTrackers.scala b/src/compiler/scala/tools/nsc/symtab/SymbolTrackers.scala index d9d25bf95a..249f6151ef 100644 --- a/src/compiler/scala/tools/nsc/symtab/SymbolTrackers.scala +++ b/src/compiler/scala/tools/nsc/symtab/SymbolTrackers.scala @@ -17,9 +17,6 @@ trait SymbolTrackers { val global: Global import global._ - private implicit lazy val TreeOrdering: Ordering[Tree] = - Ordering by (x => (x.shortClass, x.symbol)) - private implicit lazy val SymbolOrdering: Ordering[Symbol] = Ordering by (x => (x.kindString, x.name.toString)) @@ -76,7 +73,6 @@ trait SymbolTrackers { private def isFlagsChange(sym: Symbol) = changed.flags contains sym private implicit def NodeOrdering: Ordering[Node] = Ordering by (_.root) - private def ownersString(sym: Symbol, num: Int) = sym.ownerChain drop 1 take num mkString " -> " object Node { def nodes(syms: Set[Symbol]): List[Node] = { diff --git a/src/compiler/scala/tools/nsc/symtab/classfile/ICodeReader.scala b/src/compiler/scala/tools/nsc/symtab/classfile/ICodeReader.scala index c02503902e..b286f52280 100644 --- a/src/compiler/scala/tools/nsc/symtab/classfile/ICodeReader.scala +++ b/src/compiler/scala/tools/nsc/symtab/classfile/ICodeReader.scala @@ -901,7 +901,7 @@ abstract class ICodeReader extends ClassfileParser { for (bb <- method.code.blocks ; (i, idx) <- bb.toList.zipWithIndex) i match { case cm @ CALL_METHOD(m, Static(true)) if m.isClassConstructor => - def loop(bb0: BasicBlock, idx0: Int, depth: Int = 0): Unit = { + def loop(bb0: BasicBlock, idx0: Int, depth: Int): Unit = { rdef.findDefs(bb0, idx0, 1, depth) match { case ((bb1, idx1)) :: _ => bb1(idx1) match { diff --git a/src/compiler/scala/tools/nsc/symtab/classfile/Pickler.scala b/src/compiler/scala/tools/nsc/symtab/classfile/Pickler.scala index 933c689378..7c82895677 100644 --- a/src/compiler/scala/tools/nsc/symtab/classfile/Pickler.scala +++ b/src/compiler/scala/tools/nsc/symtab/classfile/Pickler.scala @@ -26,8 +26,6 @@ import Flags._ abstract class Pickler extends SubComponent { import global._ - private final val showSig = false - val phaseName = "pickler" currentRun diff --git a/src/compiler/scala/tools/nsc/transform/Erasure.scala b/src/compiler/scala/tools/nsc/transform/Erasure.scala index 45be0901c3..7d7e53b946 100644 --- a/src/compiler/scala/tools/nsc/transform/Erasure.scala +++ b/src/compiler/scala/tools/nsc/transform/Erasure.scala @@ -744,7 +744,6 @@ abstract class Erasure extends AddInterfaces } if (noNullCheckNeeded) unbox(qual1, targ.tpe) else { - def nullConst = Literal(Constant(null)) setType NullClass.tpe val untyped = // util.trace("new asinstanceof test") { gen.evalOnce(qual1, context.owner, context.unit) { qual => diff --git a/src/compiler/scala/tools/nsc/transform/Mixin.scala b/src/compiler/scala/tools/nsc/transform/Mixin.scala index 6e0d2bb08a..93575d291e 100644 --- a/src/compiler/scala/tools/nsc/transform/Mixin.scala +++ b/src/compiler/scala/tools/nsc/transform/Mixin.scala @@ -197,9 +197,6 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL { * - lazy fields don't get a setter. */ def addLateInterfaceMembers(clazz: Symbol) { - def makeConcrete(member: Symbol) = - member setPos clazz.pos resetFlag (DEFERRED | lateDEFERRED) - if (treatedClassInfos(clazz) != clazz.info) { treatedClassInfos(clazz) = clazz.info assert(phase == currentRun.mixinPhase, phase) @@ -980,12 +977,6 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL { def addInitBits(clazz: Symbol, rhs: Tree): Tree = new AddInitBitsTransformer(clazz) transform rhs - def isCheckInitField(field: Symbol) = - needsInitFlag(field) && !field.isDeferred - - def superClassesToCheck(clazz: Symbol) = - clazz.ancestors filterNot (_ hasFlag TRAIT | JAVA) - // begin addNewDefs /** Fill the map from fields to offset numbers. diff --git a/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala b/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala index 88c6f8d823..1f815ff4c7 100644 --- a/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala +++ b/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala @@ -101,7 +101,6 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers { /** Concrete methods that use a specialized type, or override such methods. */ private val concreteSpecMethods = perRunCaches.newWeakSet[Symbol]() - private def specializedTypes(tps: List[Symbol]) = tps filter (_.isSpecialized) private def specializedOn(sym: Symbol): List[Symbol] = { sym getAnnotation SpecializedClass match { case Some(AnnotationInfo(_, Nil, _)) => specializableTypes.map(_.typeSymbol) @@ -1120,10 +1119,6 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers { } } - /** Apply type bindings in the given environment `env` to all declarations. */ - private def subst(env: TypeEnv, decls: List[Symbol]): List[Symbol] = - decls map subst(env) - /** Apply the type environment 'env' to the given type. All type * bindings are supposed to be to primitive types. A type variable * that is annotated with 'uncheckedVariance' is mapped to the corresponding @@ -1154,29 +1149,6 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers { else subst(env, info) ) - /** Checks if the type parameter symbol is not specialized - * and is used as type parameters when extending a class with a specialized - * type parameter. - * At some point we may remove this restriction. - * - * Example: - * - * class Base[@specialized T] - * class Derived[T] extends Base[T] // a non-specialized T is - * // used as a type param for Base - * // -> returning true - */ - private def notSpecializedIn(tsym: Symbol, supertpe: Type) = supertpe match { - case TypeRef(_, supersym, supertargs) => - val tspec = specializedOn(tsym).toSet - for (supt <- supersym.typeParams) { - val supspec = specializedOn(supt).toSet - if (tspec != supspec && tspec.subsetOf(supspec)) - reporter.error(tsym.pos, "Type parameter has to be specialized at least for the same types as in the superclass. Missing types: " + (supspec.diff(tspec)).mkString(", ")) - } - case _ => //log("nope") - } - private def unspecializableClass(tp: Type) = ( definitions.isRepeatedParamType(tp) // ??? || tp.typeSymbol.isJavaDefined diff --git a/src/compiler/scala/tools/nsc/transform/UnCurry.scala b/src/compiler/scala/tools/nsc/transform/UnCurry.scala index 5fc5d2127b..598aaffd4a 100644 --- a/src/compiler/scala/tools/nsc/transform/UnCurry.scala +++ b/src/compiler/scala/tools/nsc/transform/UnCurry.scala @@ -112,8 +112,6 @@ abstract class UnCurry extends InfoTransform private lazy val serialVersionUIDAnnotation = AnnotationInfo(SerialVersionUIDAttr.tpe, List(Literal(Constant(0))), List()) - private var nprinted = 0 - // I don't have a clue why I'm catching TypeErrors here, but it's better // than spewing stack traces at end users for internal errors. Examples // which hit at this point should not be hard to come by, but the immediate @@ -802,10 +800,6 @@ abstract class UnCurry extends InfoTransform if (!dd.symbol.hasAnnotation(VarargsClass) || !repeatedParams.contains(dd.symbol)) return flatdd - def toSeqType(tp: Type): Type = { - val arg = elementType(ArrayClass, tp) - seqType(arg) - } def toArrayType(tp: Type): Type = { val arg = elementType(SeqClass, tp) // to prevent generation of an `Object` parameter from `Array[T]` parameter later diff --git a/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala b/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala index b6cb3626ec..bd1649dec5 100644 --- a/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala +++ b/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala @@ -670,7 +670,7 @@ trait ContextErrors { // same reason as for MacroBodyTypecheckException case object MacroExpansionException extends Exception with scala.util.control.ControlThrowable - private def macroExpansionError(expandee: Tree, msg: String = null, pos: Position = NoPosition) = { + private def macroExpansionError(expandee: Tree, msg: String, pos: Position = NoPosition) = { def msgForLog = if (msg != null && (msg contains "exception during macro expansion")) msg.split(EOL).drop(1).headOption.getOrElse("?") else msg macroLogLite("macro expansion has failed: %s".format(msgForLog)) val errorPos = if (pos != NoPosition) pos else (if (expandee.pos != NoPosition) expandee.pos else enclosingMacroPosition) diff --git a/src/compiler/scala/tools/nsc/typechecker/Contexts.scala b/src/compiler/scala/tools/nsc/typechecker/Contexts.scala index 44b584e237..92e2bc186e 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Contexts.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Contexts.scala @@ -511,20 +511,6 @@ trait Contexts { self: Analyzer => } else (owner hasTransOwner ab) } -/* - var c = this - while (c != NoContext && c.owner != owner) { - if (c.outer eq null) assert(false, "accessWithin(" + owner + ") " + c);//debug - if (c.outer.enclClass eq null) assert(false, "accessWithin(" + owner + ") " + c);//debug - c = c.outer.enclClass - } - c != NoContext - } -*/ - /** Is `clazz` a subclass of an enclosing class? */ - def isSubClassOfEnclosing(clazz: Symbol): Boolean = - enclosingSuperClassContext(clazz) != NoContext - def isSubThisType(pre: Type, clazz: Symbol): Boolean = pre match { case ThisType(pclazz) => pclazz isNonBottomSubClass clazz case _ => false diff --git a/src/compiler/scala/tools/nsc/typechecker/DestructureTypes.scala b/src/compiler/scala/tools/nsc/typechecker/DestructureTypes.scala index 2555d199d5..ea406dac2d 100644 --- a/src/compiler/scala/tools/nsc/typechecker/DestructureTypes.scala +++ b/src/compiler/scala/tools/nsc/typechecker/DestructureTypes.scala @@ -37,8 +37,6 @@ trait DestructureTypes { def wrapSequence(nodes: List[Node]): Node def wrapAtom[U](value: U): Node - private implicit def liftToTerm(name: String): TermName = newTermName(name) - private val openSymbols = scala.collection.mutable.Set[Symbol]() private def nodeList[T](elems: List[T], mkNode: T => Node): Node = diff --git a/src/compiler/scala/tools/nsc/typechecker/Duplicators.scala b/src/compiler/scala/tools/nsc/typechecker/Duplicators.scala index 0a07a598d9..56ecf1fd00 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Duplicators.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Duplicators.scala @@ -200,17 +200,6 @@ abstract class Duplicators extends Analyzer { typed(ddef) } - private def inspectTpe(tpe: Type) = { - tpe match { - case MethodType(_, res) => - res + ", " + res.bounds.hi + ", " + (res.bounds.hi match { - case TypeRef(_, _, args) if (args.length > 0) => args(0) + ", " + args(0).bounds.hi - case _ => "non-tref: " + res.bounds.hi.getClass - }) - case _ => - } - } - /** Optionally cast this tree into some other type, if required. * Unless overridden, just returns the tree. */ diff --git a/src/compiler/scala/tools/nsc/typechecker/PatternMatching.scala b/src/compiler/scala/tools/nsc/typechecker/PatternMatching.scala index 21e2b7ceec..3f0a4d1548 100644 --- a/src/compiler/scala/tools/nsc/typechecker/PatternMatching.scala +++ b/src/compiler/scala/tools/nsc/typechecker/PatternMatching.scala @@ -2054,7 +2054,7 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL // throws an AnalysisBudget.Exception when the prop results in a CNF that's too big // TODO: be smarter/more efficient about this (http://lara.epfl.ch/w/sav09:tseitin_s_encoding) def eqFreePropToSolvable(p: Prop): Formula = { - def negationNormalFormNot(p: Prop, budget: Int = AnalysisBudget.max): Prop = + def negationNormalFormNot(p: Prop, budget: Int): Prop = if (budget <= 0) throw AnalysisBudget.exceeded else p match { case And(a, b) => Or(negationNormalFormNot(a, budget - 1), negationNormalFormNot(b, budget - 1)) diff --git a/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala b/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala index dacb68ea86..c04a8661b2 100644 --- a/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala +++ b/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala @@ -834,7 +834,6 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans // Variance Checking -------------------------------------------------------- - private val ContraVariance = -1 private val NoVariance = 0 private val CoVariance = 1 private val AnyVariance = 2 @@ -1108,8 +1107,6 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans def isMaybeAnyValue(s: Symbol) = isPrimitiveValueClass(unboxedValueClass(s)) || isMaybeValue(s) // used to short-circuit unrelatedTypes check if both sides are special def isSpecial(s: Symbol) = isMaybeAnyValue(s) || isAnyNumber(s) - // unused - def possibleNumericCount = onSyms(_ filter (x => isNumeric(x) || isMaybeValue(x)) size) val nullCount = onSyms(_ filter (_ == NullClass) size) def nonSensibleWarning(what: String, alwaysEqual: Boolean) = { diff --git a/src/compiler/scala/tools/nsc/typechecker/SuperAccessors.scala b/src/compiler/scala/tools/nsc/typechecker/SuperAccessors.scala index 2306575d74..b8b34ce738 100644 --- a/src/compiler/scala/tools/nsc/typechecker/SuperAccessors.scala +++ b/src/compiler/scala/tools/nsc/typechecker/SuperAccessors.scala @@ -509,9 +509,6 @@ abstract class SuperAccessors extends transform.Transform with transform.TypingT def accessibleThroughSubclassing = validCurrentOwner && clazz.thisSym.isSubClass(sym.owner) && !clazz.isTrait - def packageAccessBoundry(sym: Symbol) = - sym.accessBoundary(sym.enclosingPackageClass) - val isCandidate = ( sym.isProtected && sym.isJavaDefined diff --git a/src/compiler/scala/tools/nsc/typechecker/SyntheticMethods.scala b/src/compiler/scala/tools/nsc/typechecker/SyntheticMethods.scala index 2bfad223f6..4bcdb177ae 100644 --- a/src/compiler/scala/tools/nsc/typechecker/SyntheticMethods.scala +++ b/src/compiler/scala/tools/nsc/typechecker/SyntheticMethods.scala @@ -104,12 +104,6 @@ trait SyntheticMethods extends ast.TreeDSL { (m0 ne meth) && !m0.isDeferred && !m0.isSynthetic && (m0.owner != AnyValClass) && (typeInClazz(m0) matches typeInClazz(meth)) } } - def readConstantValue[T](name: String, default: T = null.asInstanceOf[T]): T = { - clazzMember(newTermName(name)).info match { - case NullaryMethodType(ConstantType(Constant(value))) => value.asInstanceOf[T] - case _ => default - } - } def productIteratorMethod = { createMethod(nme.productIterator, iteratorOfType(accessorLub))(_ => gen.mkMethodCall(ScalaRunTimeModule, nme.typedProductIterator, List(accessorLub), List(mkThis)) diff --git a/src/compiler/scala/tools/nsc/typechecker/TreeCheckers.scala b/src/compiler/scala/tools/nsc/typechecker/TreeCheckers.scala index 153fb76b3e..96f4ef3f55 100644 --- a/src/compiler/scala/tools/nsc/typechecker/TreeCheckers.scala +++ b/src/compiler/scala/tools/nsc/typechecker/TreeCheckers.scala @@ -186,10 +186,6 @@ abstract class TreeCheckers extends Analyzer { errorFn(t1.pos, "trees differ\n old: " + treestr(t1) + "\n new: " + treestr(t2)) private def typesDiffer(tree: Tree, tp1: Type, tp2: Type) = errorFn(tree.pos, "types differ\n old: " + tp1 + "\n new: " + tp2 + "\n tree: " + tree) - private def ownersDiffer(tree: Tree, shouldBe: Symbol) = { - val sym = tree.symbol - errorFn(tree.pos, sym + " has wrong owner: " + ownerstr(sym.owner) + ", should be: " + ownerstr(shouldBe)) - } /** XXX Disabled reporting of position errors until there is less noise. */ private def noPos(t: Tree) = diff --git a/src/compiler/scala/tools/nsc/typechecker/TypeDiagnostics.scala b/src/compiler/scala/tools/nsc/typechecker/TypeDiagnostics.scala index 283d0fa440..34f736e047 100644 --- a/src/compiler/scala/tools/nsc/typechecker/TypeDiagnostics.scala +++ b/src/compiler/scala/tools/nsc/typechecker/TypeDiagnostics.scala @@ -58,7 +58,7 @@ trait TypeDiagnostics { /** A map of Positions to addendums - if an error involves a position in * the map, the addendum should also be printed. */ - private var addendums = perRunCaches.newMap[Position, () => String]() + private val addendums = perRunCaches.newMap[Position, () => String]() private var isTyperInPattern = false /** Devising new ways of communicating error info out of diff --git a/src/compiler/scala/tools/nsc/typechecker/Typers.scala b/src/compiler/scala/tools/nsc/typechecker/Typers.scala index a6c8a5d887..19c2c4042a 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Typers.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Typers.scala @@ -2454,7 +2454,6 @@ trait Typers extends Modes with Adaptations with Tags { assert(isPartial) private val anonClass = context.owner.newAnonymousFunctionClass(tree.pos) - private val funThis = This(anonClass) anonClass addAnnotation AnnotationInfo(SerialVersionUIDAttr.tpe, List(Literal(Constant(0))), List()) @@ -3933,15 +3932,10 @@ trait Typers extends Modes with Adaptations with Tags { } def typed1(tree: Tree, mode: Int, pt: Type): Tree = { - def isPatternMode = inPatternMode(mode) + def isPatternMode = inPatternMode(mode) def inPatternConstructor = inAllModes(mode, PATTERNmode | FUNmode) def isQualifierMode = (mode & QUALmode) != 0 - //@M! get the type of the qualifier in a Select tree, otherwise: NoType - def prefixType(fun: Tree): Type = fun match { - case Select(qualifier, _) => qualifier.tpe - case _ => NoType - } // Lookup in the given class using the root mirror. def lookupInOwner(owner: Symbol, name: Name): Symbol = if (isQualifierMode) rootMirror.missingHook(owner, name) else NoSymbol diff --git a/src/compiler/scala/tools/nsc/util/ClassPath.scala b/src/compiler/scala/tools/nsc/util/ClassPath.scala index 65aba2b721..c732917835 100644 --- a/src/compiler/scala/tools/nsc/util/ClassPath.scala +++ b/src/compiler/scala/tools/nsc/util/ClassPath.scala @@ -32,10 +32,6 @@ object ClassPath { def lsDir(dir: Directory, filt: String => Boolean = _ => true) = dir.list filter (x => filt(x.name) && (x.isDirectory || isJarOrZip(x))) map (_.path) toList - def basedir(s: String) = - if (s contains File.separator) s.substring(0, s.lastIndexOf(File.separator)) - else "." - if (pattern == "*") lsDir(Directory(".")) else if (pattern endsWith wildSuffix) lsDir(Directory(pattern dropRight 2)) else if (pattern contains '*') { diff --git a/src/compiler/scala/tools/nsc/util/ScalaClassLoader.scala b/src/compiler/scala/tools/nsc/util/ScalaClassLoader.scala index 9de3a2427f..3c97f9da7d 100644 --- a/src/compiler/scala/tools/nsc/util/ScalaClassLoader.scala +++ b/src/compiler/scala/tools/nsc/util/ScalaClassLoader.scala @@ -142,7 +142,6 @@ object ScalaClassLoader { with HasClassPath { private var classloaderURLs: Seq[URL] = urls - private def classpathString = ClassPath.fromURLs(urls: _*) def classPathURLs: Seq[URL] = classloaderURLs def classPath: ClassPath[_] = JavaClassPath fromURLs classPathURLs diff --git a/src/compiler/scala/tools/nsc/util/WorkScheduler.scala b/src/compiler/scala/tools/nsc/util/WorkScheduler.scala index b1f4696d3e..4f7a9ff878 100644 --- a/src/compiler/scala/tools/nsc/util/WorkScheduler.scala +++ b/src/compiler/scala/tools/nsc/util/WorkScheduler.scala @@ -7,9 +7,9 @@ class WorkScheduler { type Action = () => Unit - private var todo = new mutable.Queue[Action] - private var throwables = new mutable.Queue[Throwable] - private var interruptReqs = new mutable.Queue[InterruptReq] + private val todo = new mutable.Queue[Action] + private val throwables = new mutable.Queue[Throwable] + private val interruptReqs = new mutable.Queue[InterruptReq] /** Called from server: block until one of todo list, throwables or interruptReqs is nonempty */ def waitForMoreWork() = synchronized { diff --git a/src/library/scala/collection/IndexedSeqLike.scala b/src/library/scala/collection/IndexedSeqLike.scala index 7cac6154b9..22ad857119 100644 --- a/src/library/scala/collection/IndexedSeqLike.scala +++ b/src/library/scala/collection/IndexedSeqLike.scala @@ -53,7 +53,6 @@ trait IndexedSeqLike[+A, +Repr] extends Any with SeqLike[A, Repr] { // pre: start >= 0, end <= self.length @SerialVersionUID(1756321872811029277L) protected class Elements(start: Int, end: Int) extends AbstractIterator[A] with BufferedIterator[A] with Serializable { - private def initialSize = if (end <= start) 0 else end - start private var index = start private def available = (end - index) max 0 diff --git a/src/library/scala/collection/Iterator.scala b/src/library/scala/collection/Iterator.scala index e12b8d231c..fddd436dde 100644 --- a/src/library/scala/collection/Iterator.scala +++ b/src/library/scala/collection/Iterator.scala @@ -562,7 +562,6 @@ trait Iterator[+A] extends TraversableOnce[A] { * handling of structural calls. It's not what's intended here. */ class Leading extends AbstractIterator[A] { - private var isDone = false val lookahead = new mutable.Queue[A] def advance() = { self.hasNext && p(self.head) && { @@ -572,7 +571,6 @@ trait Iterator[+A] extends TraversableOnce[A] { } def finish() = { while (advance()) () - isDone = true } def hasNext = lookahead.nonEmpty || advance() def next() = { diff --git a/src/library/scala/collection/concurrent/TrieMap.scala b/src/library/scala/collection/concurrent/TrieMap.scala index 82f62f3c85..b0736ecace 100644 --- a/src/library/scala/collection/concurrent/TrieMap.scala +++ b/src/library/scala/collection/concurrent/TrieMap.scala @@ -920,8 +920,8 @@ object TrieMap extends MutableMapFactory[TrieMap] { private[collection] class TrieMapIterator[K, V](var level: Int, private var ct: TrieMap[K, V], mustInit: Boolean = true) extends Iterator[(K, V)] { - private var stack = new Array[Array[BasicNode]](7) - private var stackpos = new Array[Int](7) + private val stack = new Array[Array[BasicNode]](7) + private val stackpos = new Array[Int](7) private var depth = -1 private var subiter: Iterator[(K, V)] = null private var current: KVNode[K, V] = null diff --git a/src/library/scala/collection/immutable/List.scala b/src/library/scala/collection/immutable/List.scala index 47cac9a1d5..d825f5fb20 100644 --- a/src/library/scala/collection/immutable/List.scala +++ b/src/library/scala/collection/immutable/List.scala @@ -379,12 +379,6 @@ final case class ::[B](private var hd: B, private[scala] var tl: List[B]) extend current = list } } - - private def oldWriteObject(out: ObjectOutputStream) { - var xs: List[B] = this - while (!xs.isEmpty) { out.writeObject(xs.head); xs = xs.tail } - out.writeObject(ListSerializeEnd) - } } /** $factoryInfo diff --git a/src/library/scala/collection/immutable/NumericRange.scala b/src/library/scala/collection/immutable/NumericRange.scala index ce04ef09af..2df7db4d22 100644 --- a/src/library/scala/collection/immutable/NumericRange.scala +++ b/src/library/scala/collection/immutable/NumericRange.scala @@ -81,17 +81,6 @@ extends AbstractSeq[T] with IndexedSeq[T] with Serializable { // to guard against any (most likely illusory) performance drop. They should // be eliminated one way or another. - // Counts how many elements from the start meet the given test. - private def skipCount(p: T => Boolean): Int = { - var current = start - var counted = 0 - - while (counted < length && p(current)) { - counted += 1 - current += step - } - counted - } // Tests whether a number is within the endpoints, without testing // whether it is a member of the sequence (i.e. when step > 1.) private def isWithinBoundaries(elem: T) = !isEmpty && ( @@ -124,21 +113,21 @@ extends AbstractSeq[T] with IndexedSeq[T] with Serializable { if (idx < 0 || idx >= length) throw new IndexOutOfBoundsException(idx.toString) else locationAfterN(idx) } - + import NumericRange.defaultOrdering - + override def min[T1 >: T](implicit ord: Ordering[T1]): T = if (ord eq defaultOrdering(num)) { if (num.signum(step) > 0) start else last } else super.min(ord) - - override def max[T1 >: T](implicit ord: Ordering[T1]): T = + + override def max[T1 >: T](implicit ord: Ordering[T1]): T = if (ord eq defaultOrdering(num)) { if (num.signum(step) > 0) last else start } else super.max(ord) - + // Motivated by the desire for Double ranges with BigDecimal precision, // we need some way to map a Range and get another Range. This can't be // done in any fully general way because Ranges are not arbitrary @@ -213,7 +202,7 @@ extends AbstractSeq[T] with IndexedSeq[T] with Serializable { /** A companion object for numeric ranges. */ object NumericRange { - + /** Calculates the number of elements in a range given start, end, step, and * whether or not it is inclusive. Throws an exception if step == 0 or * the number of elements exceeds the maximum Int. @@ -272,7 +261,7 @@ object NumericRange { new Exclusive(start, end, step) def inclusive[T](start: T, end: T, step: T)(implicit num: Integral[T]): Inclusive[T] = new Inclusive(start, end, step) - + private[collection] val defaultOrdering = Map[Numeric[_], Ordering[_]]( Numeric.BigIntIsIntegral -> Ordering.BigInt, Numeric.IntIsIntegral -> Ordering.Int, @@ -284,6 +273,6 @@ object NumericRange { Numeric.DoubleAsIfIntegral -> Ordering.Double, Numeric.BigDecimalAsIfIntegral -> Ordering.BigDecimal ) - + } diff --git a/src/library/scala/collection/immutable/StringLike.scala b/src/library/scala/collection/immutable/StringLike.scala index 4d28bf9518..4020f1f5b3 100644 --- a/src/library/scala/collection/immutable/StringLike.scala +++ b/src/library/scala/collection/immutable/StringLike.scala @@ -19,12 +19,11 @@ import scala.reflect.ClassTag * @since 2.8 */ object StringLike { - // just statics for companion class. - private final val LF: Char = 0x0A - private final val FF: Char = 0x0C - private final val CR: Char = 0x0D - private final val SU: Char = 0x1A + private final val LF = 0x0A + private final val FF = 0x0C + private final val CR = 0x0D + private final val SU = 0x1A } import StringLike._ diff --git a/src/library/scala/collection/immutable/Vector.scala b/src/library/scala/collection/immutable/Vector.scala index 895d073869..1f90436636 100644 --- a/src/library/scala/collection/immutable/Vector.scala +++ b/src/library/scala/collection/immutable/Vector.scala @@ -630,14 +630,13 @@ override def companion: GenericCompanion[Vector] = Vector } -class VectorIterator[+A](_startIndex: Int, _endIndex: Int) +class VectorIterator[+A](_startIndex: Int, endIndex: Int) extends AbstractIterator[A] with Iterator[A] with VectorPointer[A @uncheckedVariance] { private var blockIndex: Int = _startIndex & ~31 private var lo: Int = _startIndex & 31 - private var endIndex: Int = _endIndex private var endLo = math.min(endIndex - blockIndex, 32) @@ -667,13 +666,13 @@ extends AbstractIterator[A] res } - private[collection] def remainingElementCount: Int = (_endIndex - (blockIndex + lo)) max 0 + private[collection] def remainingElementCount: Int = (endIndex - (blockIndex + lo)) max 0 /** Creates a new vector which consists of elements remaining in this iterator. * Such a vector can then be split into several vectors using methods like `take` and `drop`. */ private[collection] def remainingVector: Vector[A] = { - val v = new Vector(blockIndex + lo, _endIndex, blockIndex + lo) + val v = new Vector(blockIndex + lo, endIndex, blockIndex + lo) v.initFrom(this) v } diff --git a/src/library/scala/collection/mutable/IndexedSeqView.scala b/src/library/scala/collection/mutable/IndexedSeqView.scala index ab3d0ec312..17ad459e2c 100644 --- a/src/library/scala/collection/mutable/IndexedSeqView.scala +++ b/src/library/scala/collection/mutable/IndexedSeqView.scala @@ -82,8 +82,6 @@ self => protected override def newTakenWhile(p: A => Boolean): Transformed[A] = new { val pred = p } with AbstractTransformed[A] with TakenWhile protected override def newReversed: Transformed[A] = new AbstractTransformed[A] with Reversed - private implicit def asThis(xs: Transformed[A]): This = xs.asInstanceOf[This] - override def filter(p: A => Boolean): This = newFiltered(p) override def init: This = newSliced(SliceInterval(0, self.length - 1)) override def drop(n: Int): This = newSliced(SliceInterval(n, self.length)) diff --git a/src/library/scala/collection/parallel/Tasks.scala b/src/library/scala/collection/parallel/Tasks.scala index 2556cd3f68..d6b75202da 100644 --- a/src/library/scala/collection/parallel/Tasks.scala +++ b/src/library/scala/collection/parallel/Tasks.scala @@ -67,19 +67,10 @@ trait Task[R, +Tp] { private[parallel] def tryMerge(t: Tp @uncheckedVariance) { val that = t.asInstanceOf[Task[R, Tp]] val local = result // ensure that any effects of modifying `result` are detected - // checkMerge(that) if (this.throwable == null && that.throwable == null) merge(t) mergeThrowables(that) } - private def checkMerge(that: Task[R, Tp] @uncheckedVariance) { - if (this.throwable == null && that.throwable == null && (this.result == null || that.result == null)) { - println("This: " + this + ", thr=" + this.throwable + "; merged with " + that + ", thr=" + that.throwable) - } else if (this.throwable != null || that.throwable != null) { - println("merging this: " + this + " with thr: " + this.throwable + " with " + that + ", thr=" + that.throwable) - } - } - private[parallel] def mergeThrowables(that: Task[_, _]) { if (this.throwable != null && that.throwable != null) { // merge exceptions, since there were multiple exceptions diff --git a/src/library/scala/collection/parallel/mutable/ParArray.scala b/src/library/scala/collection/parallel/mutable/ParArray.scala index deff9eda3b..5ac2725f11 100644 --- a/src/library/scala/collection/parallel/mutable/ParArray.scala +++ b/src/library/scala/collection/parallel/mutable/ParArray.scala @@ -579,8 +579,6 @@ self => /* operations */ - private def asTask[R, Tp](t: Any) = t.asInstanceOf[Task[R, Tp]] - private def buildsArray[S, That](c: Builder[S, That]) = c.isInstanceOf[ParArrayCombiner[_]] override def map[S, That](f: T => S)(implicit bf: CanBuildFrom[ParArray[T], S, That]) = if (buildsArray(bf(repr))) { diff --git a/src/library/scala/collection/parallel/mutable/ParFlatHashTable.scala b/src/library/scala/collection/parallel/mutable/ParFlatHashTable.scala index c7f025207c..0b81d2c90a 100644 --- a/src/library/scala/collection/parallel/mutable/ParFlatHashTable.scala +++ b/src/library/scala/collection/parallel/mutable/ParFlatHashTable.scala @@ -38,10 +38,6 @@ trait ParFlatHashTable[T] extends scala.collection.mutable.FlatHashTable[T] { } } - private def checkbounds() = if (idx >= itertable.length) { - throw new IndexOutOfBoundsException(idx.toString) - } - def newIterator(index: Int, until: Int, totalsize: Int): IterableSplitter[T] def remaining = totalsize - traversed @@ -102,11 +98,5 @@ trait ParFlatHashTable[T] extends scala.collection.mutable.FlatHashTable[T] { } count } - - private def check() = if (table.slice(idx, until).count(_ != null) != remaining) { - println("Invariant broken: " + debugInformation) - assert(false) - } } - } diff --git a/src/library/scala/collection/parallel/mutable/ParHashMap.scala b/src/library/scala/collection/parallel/mutable/ParHashMap.scala index fad7ddad59..3b2c66763e 100644 --- a/src/library/scala/collection/parallel/mutable/ParHashMap.scala +++ b/src/library/scala/collection/parallel/mutable/ParHashMap.scala @@ -166,9 +166,8 @@ private[mutable] abstract class ParHashMapCombiner[K, V](private val tableLoadFa extends scala.collection.parallel.BucketCombiner[(K, V), ParHashMap[K, V], DefaultEntry[K, V], ParHashMapCombiner[K, V]](ParHashMapCombiner.numblocks) with scala.collection.mutable.HashTable.HashUtils[K] { - private var mask = ParHashMapCombiner.discriminantmask - private var nonmasklen = ParHashMapCombiner.nonmasklength - private var seedvalue = 27 + private val nonmasklen = ParHashMapCombiner.nonmasklength + private val seedvalue = 27 def +=(elem: (K, V)) = { sz += 1 @@ -232,7 +231,6 @@ extends scala.collection.parallel.BucketCombiner[(K, V), ParHashMap[K, V], Defau def setSize(sz: Int) = tableSize = sz def insertEntry(/*block: Int, */e: DefaultEntry[K, V]) = { var h = index(elemHashCode(e.key)) - // assertCorrectBlock(h, block) var olde = table(h).asInstanceOf[DefaultEntry[K, V]] // check if key already exists @@ -252,13 +250,6 @@ extends scala.collection.parallel.BucketCombiner[(K, V), ParHashMap[K, V], Defau true } else false } - private def assertCorrectBlock(h: Int, block: Int) { - val blocksize = table.length / (1 << ParHashMapCombiner.discriminantbits) - if (!(h >= block * blocksize && h < (block + 1) * blocksize)) { - println("trying to put " + h + " into block no.: " + block + ", range: [" + block * blocksize + ", " + (block + 1) * blocksize + ">") - assert(h >= block * blocksize && h < (block + 1) * blocksize) - } - } protected def createNewEntry[X](key: K, x: X) = ??? } @@ -288,7 +279,6 @@ extends scala.collection.parallel.BucketCombiner[(K, V), ParHashMap[K, V], Defau val chunksz = unrolled.size while (i < chunksz) { val elem = chunkarr(i) - // assertCorrectBlock(block, elem.key) if (t.insertEntry(elem)) insertcount += 1 i += 1 } @@ -297,13 +287,6 @@ extends scala.collection.parallel.BucketCombiner[(K, V), ParHashMap[K, V], Defau } insertcount } - private def assertCorrectBlock(block: Int, k: K) { - val hc = improve(elemHashCode(k), seedvalue) - if ((hc >>> nonmasklen) != block) { - println(hc + " goes to " + (hc >>> nonmasklen) + ", while expected block is " + block) - assert((hc >>> nonmasklen) == block) - } - } def split = { val fp = howmany / 2 List(new FillBlocks(buckets, table, offset, fp), new FillBlocks(buckets, table, offset + fp, howmany - fp)) diff --git a/src/library/scala/collection/parallel/mutable/ParHashSet.scala b/src/library/scala/collection/parallel/mutable/ParHashSet.scala index aef9f6856b..22f22c8305 100644 --- a/src/library/scala/collection/parallel/mutable/ParHashSet.scala +++ b/src/library/scala/collection/parallel/mutable/ParHashSet.scala @@ -120,9 +120,8 @@ private[mutable] abstract class ParHashSetCombiner[T](private val tableLoadFacto extends scala.collection.parallel.BucketCombiner[T, ParHashSet[T], Any, ParHashSetCombiner[T]](ParHashSetCombiner.numblocks) with scala.collection.mutable.FlatHashTable.HashUtils[T] { //self: EnvironmentPassingCombiner[T, ParHashSet[T]] => - private var mask = ParHashSetCombiner.discriminantmask - private var nonmasklen = ParHashSetCombiner.nonmasklength - private var seedvalue = 27 + private val nonmasklen = ParHashSetCombiner.nonmasklength + private val seedvalue = 27 def +=(elem: T) = { sz += 1 diff --git a/src/library/scala/concurrent/JavaConversions.scala b/src/library/scala/concurrent/JavaConversions.scala index f66d64bc3b..1b32781afa 100644 --- a/src/library/scala/concurrent/JavaConversions.scala +++ b/src/library/scala/concurrent/JavaConversions.scala @@ -41,10 +41,6 @@ object JavaConversions { exec.execute(task) } - def managedBlock(blocker: ManagedBlocker) { - blocker.block() - } - def shutdown() { // do nothing } diff --git a/src/library/scala/sys/SystemProperties.scala b/src/library/scala/sys/SystemProperties.scala index 5777c255c3..fbf2d940dc 100644 --- a/src/library/scala/sys/SystemProperties.scala +++ b/src/library/scala/sys/SystemProperties.scala @@ -64,7 +64,6 @@ object SystemProperties { propertyHelp(p.key) = helpText p } - private def str(key: String, helpText: String) = addHelp(Prop[String](key), helpText) private def bool(key: String, helpText: String): BooleanProp = addHelp[BooleanProp]( if (key startsWith "java.") BooleanProp.valueIsTrue(key) else BooleanProp.keyExists(key), helpText diff --git a/src/library/scala/util/automata/WordBerrySethi.scala b/src/library/scala/util/automata/WordBerrySethi.scala index 235a74dd7a..b648d179c6 100644 --- a/src/library/scala/util/automata/WordBerrySethi.scala +++ b/src/library/scala/util/automata/WordBerrySethi.scala @@ -152,7 +152,6 @@ abstract class WordBerrySethi extends BaseBerrySethi { new NondetWordAutom[_labelT] { val nstates = pos val labels = WordBerrySethi.this.labels.toList - val initials = initialsArr val finals = finalsArr val delta = deltaArr val default = defaultArr diff --git a/src/library/scala/util/matching/Regex.scala b/src/library/scala/util/matching/Regex.scala index 63d049208a..9bd596a904 100644 --- a/src/library/scala/util/matching/Regex.scala +++ b/src/library/scala/util/matching/Regex.scala @@ -199,7 +199,7 @@ class Regex private[matching](val pattern: Pattern, groupNames: String*) extends * Otherwise, this Regex is applied to the previously matched input, * and the result of that match is used. */ - def unapplySeq(m: Match): Option[Seq[String]] = + def unapplySeq(m: Match): Option[Seq[String]] = if (m.matched == null) None else if (m.matcher.pattern == this.pattern) Some(1 to m.groupCount map m.group) else unapplySeq(m.matched) @@ -650,7 +650,7 @@ object Regex { private[matching] trait Replacement { protected def matcher: Matcher - private var sb = new java.lang.StringBuffer + private val sb = new java.lang.StringBuffer def replaced = { val newsb = new java.lang.StringBuffer(sb) diff --git a/src/library/scala/xml/persistent/SetStorage.scala b/src/library/scala/xml/persistent/SetStorage.scala index 765d2a8393..56a0be6cf9 100644 --- a/src/library/scala/xml/persistent/SetStorage.scala +++ b/src/library/scala/xml/persistent/SetStorage.scala @@ -20,16 +20,14 @@ import java.io.File */ class SetStorage(file: File) extends CachedFileStorage(file) { - private var theSet: mutable.HashSet[Node] = new mutable.HashSet[Node] + private val theSet = mutable.HashSet[Node]() // initialize { val it = super.initialNodes dirty = it.hasNext - for(x <- it) { - theSet += x; - } + theSet ++= it } /* forwarding methods to hashset*/ diff --git a/src/msil/ch/epfl/lamp/compiler/msil/emit/AssemblyBuilder.scala b/src/msil/ch/epfl/lamp/compiler/msil/emit/AssemblyBuilder.scala index 3110ccd1ce..6bf4c7d1da 100644 --- a/src/msil/ch/epfl/lamp/compiler/msil/emit/AssemblyBuilder.scala +++ b/src/msil/ch/epfl/lamp/compiler/msil/emit/AssemblyBuilder.scala @@ -75,9 +75,6 @@ class AssemblyBuilder(name: AssemblyName) //########################################################################## // protected members - // the access properties - Save, Run, RunAndSave - private var access : Int = _ - // all extern assemblies used in this assembly builder protected var externAssemblies = scala.collection.mutable.Set.empty[Assembly] diff --git a/src/msil/ch/epfl/lamp/compiler/msil/emit/ILGenerator.scala b/src/msil/ch/epfl/lamp/compiler/msil/emit/ILGenerator.scala index 2aa9a99054..63ecbfd353 100644 --- a/src/msil/ch/epfl/lamp/compiler/msil/emit/ILGenerator.scala +++ b/src/msil/ch/epfl/lamp/compiler/msil/emit/ILGenerator.scala @@ -452,7 +452,7 @@ import ILGenerator._ private var locals: Int = 0 // stack of label for exception mechanism - private var excStack: ExceptionStack = new ExceptionStack() + private val excStack: ExceptionStack = new ExceptionStack() // the method info owner of this ILGenerator var owner: MethodBase = _owner diff --git a/src/msil/ch/epfl/lamp/compiler/msil/emit/Label.scala b/src/msil/ch/epfl/lamp/compiler/msil/emit/Label.scala index 22c1b1150b..a80ea72323 100644 --- a/src/msil/ch/epfl/lamp/compiler/msil/emit/Label.scala +++ b/src/msil/ch/epfl/lamp/compiler/msil/emit/Label.scala @@ -107,16 +107,15 @@ object Label { //######################################################################## // Special Labels - final class SpecialLabel(_kind: Label.Kind) extends Label { - private final var kind: Label.Kind = _kind + final class SpecialLabel(kind: Label.Kind) extends Label { def isInitialized() = true - def getAddress(): Int = { throw new RuntimeException("" + kind.toString()) } - def getStacksize(): Int = { throw new RuntimeException("" + kind.toString()) } - def setStacksize(stacksize: Int) { throw new RuntimeException(kind.toString()) } - def incStacksize() { throw new RuntimeException(kind.toString()) } + def getAddress(): Int = { throw new RuntimeException("" + kind) } + def getStacksize(): Int = { throw new RuntimeException("" + kind) } + def setStacksize(stacksize: Int) { throw new RuntimeException("" + kind) } + def incStacksize() { throw new RuntimeException("" + kind) } def getKind(): Kind = kind - def mergeWith(that: Label) { throw new RuntimeException(kind.toString()) } - override def toString(): String = "Label(" + kind.toString() + ")" + def mergeWith(that: Label) { throw new RuntimeException("" + kind) } + override def toString() = s"Label($kind)" } final val NewScope: Label = new SpecialLabel(Kind.NewScope) diff --git a/src/partest/scala/tools/partest/PartestDefaults.scala b/src/partest/scala/tools/partest/PartestDefaults.scala index b27ce6ff75..e3f1cb8bd9 100644 --- a/src/partest/scala/tools/partest/PartestDefaults.scala +++ b/src/partest/scala/tools/partest/PartestDefaults.scala @@ -8,8 +8,6 @@ import java.lang.Runtime.getRuntime object PartestDefaults { import nsc.Properties._ - private def wrapAccessControl[T](body: => Option[T]): Option[T] = - try body catch { case _: java.security.AccessControlException => None } def testRootName = propOrNone("partest.root") def srcDirName = propOrElse("partest.srcdir", "files") diff --git a/src/partest/scala/tools/partest/PartestTask.scala b/src/partest/scala/tools/partest/PartestTask.scala index 959d682872..51c77d386a 100644 --- a/src/partest/scala/tools/partest/PartestTask.scala +++ b/src/partest/scala/tools/partest/PartestTask.scala @@ -182,7 +182,6 @@ class PartestTask extends Task with CompilationPathProperty { private var javaccmd: Option[File] = None private var showDiff: Boolean = false private var showLog: Boolean = false - private var runFailed: Boolean = false private var posFiles: Option[FileSet] = None private var negFiles: Option[FileSet] = None private var runFiles: Option[FileSet] = None @@ -355,7 +354,6 @@ class PartestTask extends Task with CompilationPathProperty { antFileManager.showDiff = showDiff antFileManager.showLog = showLog - antFileManager.failed = runFailed antFileManager.CLASSPATH = ClassPath.join(classpath.list: _*) antFileManager.LATEST_LIB = scalaLibrary.getAbsolutePath antFileManager.LATEST_REFLECT = scalaReflect.getAbsolutePath diff --git a/src/partest/scala/tools/partest/nest/ConsoleRunner.scala b/src/partest/scala/tools/partest/nest/ConsoleRunner.scala index 84d9832f97..dddc10b251 100644 --- a/src/partest/scala/tools/partest/nest/ConsoleRunner.scala +++ b/src/partest/scala/tools/partest/nest/ConsoleRunner.scala @@ -95,8 +95,6 @@ class ConsoleRunner extends DirectRunner { else if (parsed isSet "--pack") new ConsoleFileManager("build/pack") else new ConsoleFileManager // auto detection, see ConsoleFileManager.findLatest - def argNarrowsTests(x: String) = denotesTestSet(x) || denotesTestPath(x) - NestUI._verbose = parsed isSet "--verbose" fileManager.showDiff = true // parsed isSet "--show-diff" diff --git a/src/partest/scala/tools/partest/nest/RunnerManager.scala b/src/partest/scala/tools/partest/nest/RunnerManager.scala index d4b9feecce..cce717cddf 100644 --- a/src/partest/scala/tools/partest/nest/RunnerManager.scala +++ b/src/partest/scala/tools/partest/nest/RunnerManager.scala @@ -260,13 +260,12 @@ class RunnerManager(kind: String, val fileManager: FileManager, params: TestRunP runCommand(cmd, logFile) } - private def getCheckFilePath(dir: File, suffix: String = "") = { + private def getCheckFilePath(dir: File, suffix: String) = { def chkFile(s: String) = (Directory(dir) / "%s%s.check".format(fileBase, s)).toFile if (chkFile("").isFile || suffix == "") chkFile("") else chkFile("-" + suffix) } - private def getCheckFile(dir: File) = Some(getCheckFilePath(dir, kind)) filter (_.canRead) private def compareOutput(dir: File, logFile: File): String = { val checkFile = getCheckFilePath(dir, kind) diff --git a/src/reflect/scala/reflect/api/TypeTags.scala b/src/reflect/scala/reflect/api/TypeTags.scala index 812d5199fc..09a246c8a5 100644 --- a/src/reflect/scala/reflect/api/TypeTags.scala +++ b/src/reflect/scala/reflect/api/TypeTags.scala @@ -95,7 +95,7 @@ import scala.language.implicitConversions * scala> paramInfo(List(1, 2)) * type of List(1, 2) has type arguments List(Int) * }}} - * + * * === `WeakTypeTag`s === * *`WeakTypeTag[T]` generalizes `TypeTag[T]`. Unlike a regular `TypeTag`, components of diff --git a/src/reflect/scala/reflect/internal/Definitions.scala b/src/reflect/scala/reflect/internal/Definitions.scala index 35acc2bcce..ac1722f069 100644 --- a/src/reflect/scala/reflect/internal/Definitions.scala +++ b/src/reflect/scala/reflect/internal/Definitions.scala @@ -31,7 +31,7 @@ trait Definitions extends api.StandardDefinitions { val clazz = owner.newClassSymbol(name, NoPosition, flags) clazz setInfoAndEnter ClassInfoType(parents, newScope, clazz) } - private def newMethod(owner: Symbol, name: TermName, formals: List[Type], restpe: Type, flags: Long = 0L): MethodSymbol = { + private def newMethod(owner: Symbol, name: TermName, formals: List[Type], restpe: Type, flags: Long): MethodSymbol = { val msym = owner.newMethod(name.encode, NoPosition, flags) val params = msym.newSyntheticValueParams(formals) msym setInfo MethodType(params, restpe) @@ -1232,17 +1232,6 @@ trait Definitions extends api.StandardDefinitions { else flatNameString(etp.typeSymbol, '.') } - /** Surgery on the value classes. Without this, AnyVals defined in source - * files end up with an AnyRef parent. It is likely there is a better way - * to evade that AnyRef. - */ - private def setParents(sym: Symbol, parents: List[Type]): Symbol = sym.rawInfo match { - case ClassInfoType(_, scope, clazz) => - sym setInfo ClassInfoType(parents, scope, clazz) - case _ => - sym - } - def init() { if (isInitialized) return // force initialization of every symbol that is synthesized or hijacked by the compiler diff --git a/src/reflect/scala/reflect/internal/Names.scala b/src/reflect/scala/reflect/internal/Names.scala index 0114fb037c..6a07502ace 100644 --- a/src/reflect/scala/reflect/internal/Names.scala +++ b/src/reflect/scala/reflect/internal/Names.scala @@ -206,8 +206,11 @@ trait Names extends api.Names with LowPriorityNames { /** @return the hash value of this name */ final override def hashCode(): Int = index - // Presently disabled. - // override def equals(other: Any) = paranoidEquals(other) + /**** + * This has been quite useful to find places where people are comparing + * a TermName and a TypeName, or a Name and a String. + + override def equals(other: Any) = paranoidEquals(other) private def paranoidEquals(other: Any): Boolean = { val cmp = this eq other.asInstanceOf[AnyRef] if (cmp || !nameDebug) @@ -215,7 +218,7 @@ trait Names extends api.Names with LowPriorityNames { other match { case x: String => - Console.println("Compared " + debugString + " and String '" + x + "'") + Console.println(s"Compared $debugString and String '$x'") case x: Name => if (this.isTermName != x.isTermName) { val panic = this.toTermName == x.toTermName @@ -228,6 +231,7 @@ trait Names extends api.Names with LowPriorityNames { } false } + ****/ /** @return the i'th Char of this name */ final def charAt(i: Int): Char = chrs(index + i) diff --git a/src/reflect/scala/reflect/internal/Printers.scala b/src/reflect/scala/reflect/internal/Printers.scala index eaa05bc89d..02ec0b0e06 100644 --- a/src/reflect/scala/reflect/internal/Printers.scala +++ b/src/reflect/scala/reflect/internal/Printers.scala @@ -525,7 +525,7 @@ trait Printers extends api.Printers { self: SymbolTable => private var depth = 0 private var printTypesInFootnotes = true private var printingFootnotes = false - private var footnotes = footnoteIndex.mkFootnotes() + private val footnotes = footnoteIndex.mkFootnotes() def print(args: Any*): Unit = { // don't print type footnotes if the argument is a mere type diff --git a/src/reflect/scala/reflect/internal/Types.scala b/src/reflect/scala/reflect/internal/Types.scala index 3516078ea1..0b065bb441 100644 --- a/src/reflect/scala/reflect/internal/Types.scala +++ b/src/reflect/scala/reflect/internal/Types.scala @@ -101,10 +101,6 @@ trait Types extends api.Types { self: SymbolTable => protected val enableTypeVarExperimentals = settings.Xexperimental.value - /** Empty immutable maps to avoid allocations. */ - private val emptySymMap = immutable.Map[Symbol, Symbol]() - private val emptySymCount = immutable.Map[Symbol, Int]() - /** The current skolemization level, needed for the algorithms * in isSameType, isSubType that do constraint solving under a prefix. */ @@ -5801,6 +5797,8 @@ trait Types extends api.Types { self: SymbolTable => * types which are used internally in type applications and * types which are not. */ + /**** Not used right now, but kept around to document which Types + * land in which bucket. private def isInternalTypeNotUsedAsTypeArg(tp: Type): Boolean = tp match { case AntiPolyType(pre, targs) => true case ClassInfoType(parents, defs, clazz) => true @@ -5811,6 +5809,7 @@ trait Types extends api.Types { self: SymbolTable => case TypeBounds(lo, hi) => true case _ => false } + ****/ private def isInternalTypeUsedAsTypeArg(tp: Type): Boolean = tp match { case WildcardType => true case BoundedWildcardType(_) => true diff --git a/src/reflect/scala/reflect/internal/util/TraceSymbolActivity.scala b/src/reflect/scala/reflect/internal/util/TraceSymbolActivity.scala index 46d80e9680..7ea8a75417 100644 --- a/src/reflect/scala/reflect/internal/util/TraceSymbolActivity.scala +++ b/src/reflect/scala/reflect/internal/util/TraceSymbolActivity.scala @@ -13,7 +13,6 @@ trait TraceSymbolActivity { scala.sys addShutdownHook showAllSymbols() private type Set[T] = scala.collection.immutable.Set[T] - private val Set = scala.collection.immutable.Set val allSymbols = mutable.Map[Int, Symbol]() val allChildren = mutable.Map[Int, List[Int]]() withDefaultValue Nil @@ -44,38 +43,6 @@ trait TraceSymbolActivity { } } - /** TODO. - */ - private def reachableDirectlyFromSymbol(sym: Symbol): List[Symbol] = ( - List(sym.owner, sym.alias, sym.thisSym) - ++ sym.children - ++ sym.info.parents.map(_.typeSymbol) - ++ sym.typeParams - ++ sym.paramss.flatten - ) - private def reachable[T](inputs: Traversable[T], mkSymbol: T => Symbol): Set[Symbol] = { - def loop(seen: Set[Symbol], remaining: List[Symbol]): Set[Symbol] = { - remaining match { - case Nil => seen - case head :: rest => - if ((head eq null) || (head eq NoSymbol) || seen(head)) loop(seen, rest) - else loop(seen + head, rest ++ reachableDirectlyFromSymbol(head).filterNot(seen)) - } - } - loop(immutable.Set(), inputs.toList map mkSymbol filterNot (_ eq null) distinct) - } - private def treeList(t: Tree) = { - val buf = mutable.ListBuffer[Tree]() - t foreach (buf += _) - buf.toList - } - - private def reachableFromSymbol(root: Symbol): Set[Symbol] = - reachable[Symbol](List(root, root.info.typeSymbol), x => x) - - private def reachableFromTree(tree: Tree): Set[Symbol] = - reachable[Tree](treeList(tree), _.symbol) - private def signature(id: Int) = runBeforeErasure(allSymbols(id).defString) private def dashes(s: Any): String = ("" + s) map (_ => '-') @@ -119,7 +86,7 @@ trait TraceSymbolActivity { } println("\n") } - private def showFreq[T, U](xs: Traversable[T])(groupFn: T => U, showFn: U => String = (x: U) => "" + x) = { + private def showFreq[T, U](xs: Traversable[T])(groupFn: T => U, showFn: U => String) = { showMapFreq(xs.toList groupBy groupFn)(showFn) } private lazy val findErasurePhase: Phase = { diff --git a/src/scalacheck/org/scalacheck/Commands.scala b/src/scalacheck/org/scalacheck/Commands.scala index 88ef8ae2a1..2acc460b5e 100644 --- a/src/scalacheck/org/scalacheck/Commands.scala +++ b/src/scalacheck/org/scalacheck/Commands.scala @@ -87,11 +87,6 @@ trait Commands extends Prop { private val bindings = new scala.collection.mutable.ListBuffer[(State,Any)] - private def initState() = { - bindings.clear() - initialState() - } - private def genCmds: Gen[Cmds] = { def sizedCmds(s: State)(sz: Int): Gen[Cmds] = if(sz <= 0) value(Cmds(Nil, Nil)) else for { -- cgit v1.2.3 From 64258cf019fb7ebfd9a78451236dac9c676f120b Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Thu, 1 Nov 2012 12:58:43 -0700 Subject: Fixed bug in Symbol filtering. If you called filter on an overloaded symbol, it tried to return itself if no alternatives were filtered out. The test being performed, however, would only ever be true if the list call was to the (non-existent) "filterConserve", which is to say that in general, xs ne xs.filter(_ => true) The upshot is that we were creating a new symbol on every filter call to an overloaded symbol. To make completely sure this would be a performance winner, I also eliminated the closure and perform the filtering locally. --- src/reflect/scala/reflect/internal/Symbols.scala | 19 +++++++++++++++---- 1 file changed, 15 insertions(+), 4 deletions(-) (limited to 'src') diff --git a/src/reflect/scala/reflect/internal/Symbols.scala b/src/reflect/scala/reflect/internal/Symbols.scala index 8cebfabe6f..53a236fa3c 100644 --- a/src/reflect/scala/reflect/internal/Symbols.scala +++ b/src/reflect/scala/reflect/internal/Symbols.scala @@ -1672,12 +1672,23 @@ trait Symbols extends api.Symbols { self: SymbolTable => def filter(cond: Symbol => Boolean): Symbol = if (isOverloaded) { - val alts = alternatives - val alts1 = alts filter cond - if (alts1 eq alts) this + var changed = false + var alts0: List[Symbol] = alternatives + var alts1: List[Symbol] = Nil + + while (alts0.nonEmpty) { + if (cond(alts0.head)) + alts1 ::= alts0.head + else + changed = true + + alts0 = alts0.tail + } + + if (!changed) this else if (alts1.isEmpty) NoSymbol else if (alts1.tail.isEmpty) alts1.head - else owner.newOverloaded(info.prefix, alts1) + else owner.newOverloaded(info.prefix, alts1.reverse) } else if (cond(this)) this else NoSymbol -- cgit v1.2.3 From d7ed53f26d6466109cd5828008a8ba2026d95238 Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Thu, 1 Nov 2012 13:01:02 -0700 Subject: Hardening scope lookup against wrongness. Calls to scope.lookup have until this commit discarded any symbols after the first, even at the same nesting depth. Some call sites which did this were innocuous, because they were only checking if lookup(name) != NoSymbol. To make this clearer in the future, I added a method which communicates the intent: containsName. And I added an even more relevant one, which falls between "lookup" and "lookupAll". def lookupUnshadowedEntries(name: Name) That method will be put to work in symbol lookup, allowing us to put to bed methods like "correctForPackageObject". --- src/reflect/scala/reflect/internal/Scopes.scala | 60 ++++++++++++++++++++++--- 1 file changed, 53 insertions(+), 7 deletions(-) (limited to 'src') diff --git a/src/reflect/scala/reflect/internal/Scopes.scala b/src/reflect/scala/reflect/internal/Scopes.scala index a4b541e34d..352bc8fdc9 100644 --- a/src/reflect/scala/reflect/internal/Scopes.scala +++ b/src/reflect/scala/reflect/internal/Scopes.scala @@ -25,8 +25,9 @@ trait Scopes extends api.Scopes { self: SymbolTable => */ var next: ScopeEntry = null + def depth = owner.nestingLevel override def hashCode(): Int = sym.name.start - override def toString(): String = sym.toString() + override def toString() = s"$sym (depth=$depth)" } /** @@ -216,14 +217,46 @@ trait Scopes extends api.Scopes { self: SymbolTable => } } - /** lookup a symbol - * - * @param name ... - * @return ... + /** Lookup a module or a class, filtering out matching names in scope + * which do not match that requirement. + */ + def lookupModule(name: Name): Symbol = lookupAll(name.toTermName) find (_.isModule) getOrElse NoSymbol + def lookupClass(name: Name): Symbol = lookupAll(name.toTypeName) find (_.isClass) getOrElse NoSymbol + + /** True if the name exists in this scope, false otherwise. */ + def containsName(name: Name) = lookupEntry(name) != null + + /** Lookup a symbol. */ def lookup(name: Name): Symbol = { val e = lookupEntry(name) - if (e eq null) NoSymbol else e.sym + if (e eq null) NoSymbol + else if (lookupNextEntry(e) eq null) e.sym + else { + // We shouldn't get here: until now this method was picking a random + // symbol when there was more than one with the name, so this should + // only be called knowing that there are 0-1 symbols of interest. So, we + // can safely return an overloaded symbol rather than throwing away the + // rest of them. Most likely we still break, but at least we will break + // in an understandable fashion (unexpectedly overloaded symbol) rather + // than a non-deterministic bizarre one (see any bug involving overloads + // in package objects.) + val alts = lookupAll(name).toList + log("!!! scope lookup of $name found multiple symbols: $alts") + // FIXME - how is one supposed to create an overloaded symbol without + // knowing the correct owner? Using the symbol owner is not correct; + // say for instance this is List's scope and the symbols are its three + // mkString members. Those symbols are owned by TraversableLike, which + // is no more meaningful an owner than NoSymbol given that we're in + // List. Maybe it makes no difference who owns the overloaded symbol, in + // which case let's establish that and have a canonical creation method. + // + // FIXME - a similar question for prefix, although there are more + // clues from the symbols on that one, as implemented here. In general + // the distinct list is one type and lub becomes the identity. + val prefix = lub(alts map (_.info.prefix) distinct) + NoSymbol.newOverloaded(prefix, alts) + } } /** Returns an iterator yielding every symbol with given name in this scope. @@ -231,7 +264,20 @@ trait Scopes extends api.Scopes { self: SymbolTable => def lookupAll(name: Name): Iterator[Symbol] = new Iterator[Symbol] { var e = lookupEntry(name) def hasNext: Boolean = e ne null - def next(): Symbol = { val r = e.sym; e = lookupNextEntry(e); r } + def next(): Symbol = try e.sym finally e = lookupNextEntry(e) + } + + def lookupAllEntries(name: Name): Iterator[ScopeEntry] = new Iterator[ScopeEntry] { + var e = lookupEntry(name) + def hasNext: Boolean = e ne null + def next(): ScopeEntry = try e finally e = lookupNextEntry(e) + } + + def lookupUnshadowedEntries(name: Name): Iterator[ScopeEntry] = { + lookupEntry(name) match { + case null => Iterator.empty + case e => lookupAllEntries(name) filter (e1 => (e eq e1) || (e.depth == e1.depth && e.sym != e1.sym)) + } } /** lookup a symbol entry matching given name. -- cgit v1.2.3 From 14704da1b854e04d8e8de81eb7741757f33a2d13 Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Thu, 1 Nov 2012 13:02:24 -0700 Subject: Convenience method commonSymbolOwner. --- src/reflect/scala/reflect/internal/Symbols.scala | 18 ++++++++++++++++++ 1 file changed, 18 insertions(+) (limited to 'src') diff --git a/src/reflect/scala/reflect/internal/Symbols.scala b/src/reflect/scala/reflect/internal/Symbols.scala index 53a236fa3c..9a4b272d6c 100644 --- a/src/reflect/scala/reflect/internal/Symbols.scala +++ b/src/reflect/scala/reflect/internal/Symbols.scala @@ -45,6 +45,24 @@ trait Symbols extends api.Symbols { self: SymbolTable => m } + /** The deepest symbol which appears in the owner chains of all + * the given symbols. + */ + def commonSymbolOwner(syms: List[Symbol]): Symbol = { + def loop(owner: Symbol, rest: List[Symbol]): Symbol = rest match { + case Nil => owner + case x :: xs if x.ownerChain contains owner => loop(owner, xs) + case x :: xs if owner.ownerChain contains x => loop(x, xs) + case x :: xs => + x.ownerChain find (owner.ownerChain contains _) match { + case Some(common) => loop(common, xs) + case _ => NoSymbol + } + } + if (syms.isEmpty || (syms contains NoSymbol)) NoSymbol + else loop(syms.head.owner, syms.tail) + } + /** Create a new free term. Its owner is NoSymbol. */ def newFreeTermSymbol(name: TermName, value: => Any, flags: Long = 0L, origin: String): FreeTermSymbol = -- cgit v1.2.3 From 77a45858777554c6e1fb7b9583359a6a492ec066 Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Thu, 1 Nov 2012 13:01:59 -0700 Subject: The improvements made possible by the scope changes. --- .../nsc/symtab/classfile/ClassfileParser.scala | 2 +- .../scala/tools/nsc/transform/Flatten.scala | 14 +++--- .../scala/tools/nsc/typechecker/Contexts.scala | 52 +++++++--------------- .../scala/tools/nsc/typechecker/Namers.scala | 4 +- test/files/neg/dbldef.check | 4 +- 5 files changed, 26 insertions(+), 50 deletions(-) (limited to 'src') diff --git a/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala b/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala index 42589874fe..e69b4bee94 100644 --- a/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala +++ b/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala @@ -562,7 +562,7 @@ abstract class ClassfileParser { 0 until in.nextChar foreach (_ => parseMethod()) val needsConstructor = ( !sawPrivateConstructor - && instanceScope.lookup(nme.CONSTRUCTOR) == NoSymbol + && !(instanceScope containsName nme.CONSTRUCTOR) && (sflags & INTERFACE) == 0 ) if (needsConstructor) diff --git a/src/compiler/scala/tools/nsc/transform/Flatten.scala b/src/compiler/scala/tools/nsc/transform/Flatten.scala index e2913bea0d..672a11ec30 100644 --- a/src/compiler/scala/tools/nsc/transform/Flatten.scala +++ b/src/compiler/scala/tools/nsc/transform/Flatten.scala @@ -18,18 +18,14 @@ abstract class Flatten extends InfoTransform { /** the following two members override abstract members in Transform */ val phaseName: String = "flatten" - /** Updates the owning scope with the given symbol; returns the old symbol. + /** Updates the owning scope with the given symbol, unlinking any others. */ - private def replaceSymbolInCurrentScope(sym: Symbol): Symbol = exitingFlatten { + private def replaceSymbolInCurrentScope(sym: Symbol): Unit = exitingFlatten { val scope = sym.owner.info.decls - val old = scope lookup sym.name andAlso scope.unlink + val old = (scope lookupUnshadowedEntries sym.name).toList + old foreach (scope unlink _) scope enter sym - - if (old eq NoSymbol) - log(s"lifted ${sym.fullLocationString}") - else - log(s"lifted ${sym.fullLocationString} after unlinking existing $old from scope.") - + log(s"lifted ${sym.fullLocationString}" + ( if (old.isEmpty) "" else " after unlinking $old from scope." )) old } diff --git a/src/compiler/scala/tools/nsc/typechecker/Contexts.scala b/src/compiler/scala/tools/nsc/typechecker/Contexts.scala index 92e2bc186e..f2409ea482 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Contexts.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Contexts.scala @@ -758,15 +758,12 @@ trait Contexts { self: Analyzer => def lookupSymbol(name: Name, qualifies: Symbol => Boolean): NameLookup = { var lookupError: NameLookup = null // set to non-null if a definite error is encountered var inaccessible: NameLookup = null // records inaccessible symbol for error reporting in case none is found - var defEntry: ScopeEntry = null // the scope entry of defSym, if defined in a local scope var defSym: Symbol = NoSymbol // the directly found symbol + var symbolDepth: Int = -1 // the depth of the directly found symbol var pre: Type = NoPrefix // the prefix type of defSym, if a class member var cx: Context = this var needsQualifier = false // working around package object overloading bug - def defEntrySymbol = if (defEntry eq null) NoSymbol else defEntry.sym - def localScopeDepth = if (defEntry eq null) 0 else cx.scope.nestingLevel - defEntry.owner.nestingLevel - def finish(qual: Tree, sym: Symbol): NameLookup = ( if (lookupError ne null) lookupError else sym match { @@ -789,23 +786,6 @@ trait Contexts { self: Analyzer => def lookupInPrefix(name: Name) = pre member name filter qualifies def accessibleInPrefix(s: Symbol) = isAccessible(s, pre, superAccess = false) - def correctForPackageObject(sym: Symbol): Symbol = { - if (sym.isTerm && isInPackageObject(sym, pre.typeSymbol)) { - val sym1 = lookupInPrefix(sym.name) - if ((sym1 eq NoSymbol) || (sym eq sym1)) sym else { - needsQualifier = true - log(s""" - | !!! Overloaded package object member resolved incorrectly. - | prefix: $pre - | Discarded: ${sym.defString} - | Using: ${sym1.defString} - """.stripMargin) - sym1 - } - } - else sym - } - def searchPrefix = { cx = cx.enclClass val found0 = lookupInPrefix(name) @@ -817,22 +797,24 @@ trait Contexts { self: Analyzer => } // cx.scope eq null arises during FixInvalidSyms in Duplicators while (defSym == NoSymbol && (cx ne NoContext) && (cx.scope ne null)) { - pre = cx.enclClass.prefix - // !!! FIXME. This call to lookupEntry is at the root of all the - // bad behavior with overloading in package objects. lookupEntry - // just takes the first symbol it finds in scope, ignoring the rest. - // When a selection on a package object arrives here, the first - // overload is always chosen. "correctForPackageObject" exists to - // undo that decision. Obviously it would be better not to do it in - // the first place; however other things seem to be tied to obtaining - // that ScopeEntry, specifically calculating the nesting depth. - defEntry = cx.scope lookupEntry name - defSym = defEntrySymbol filter qualifies map correctForPackageObject orElse searchPrefix - if (!defSym.exists) - cx = cx.outer + val entries = (cx.scope lookupUnshadowedEntries name filter (e => qualifies(e.sym))).toList + + pre = cx.enclClass.prefix + symbolDepth = if (entries.isEmpty) cx.depth else (cx.depth - cx.scope.nestingLevel) + entries.head.depth + defSym = entries match { + case Nil => searchPrefix + case hd :: Nil => hd.sym + case alts => logResult(s"!!! lookup overloaded")(cx.owner.newOverloaded(pre, entries map (_.sym))) + } + + if (defSym.exists) // we have a winner: record the symbol depth + symbolDepth = ( + if (entries.isEmpty) cx.depth + else (cx.depth - cx.scope.nestingLevel) + entries.head.depth + ) + else cx = cx.outer // push further outward } - val symbolDepth = cx.depth - localScopeDepth var impSym: Symbol = NoSymbol var imports = Context.this.imports // impSym != NoSymbol => it is imported from imports.head def imp1 = imports.head diff --git a/src/compiler/scala/tools/nsc/typechecker/Namers.scala b/src/compiler/scala/tools/nsc/typechecker/Namers.scala index 99b927af66..d8d021f64d 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Namers.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Namers.scala @@ -381,8 +381,8 @@ trait Namers extends MethodSynthesis { if (sym eq NoSymbol) return val ctx = if (context.owner.isPackageObjectClass) context.outer else context - val module = if (sym.isModule) sym else ctx.scope lookup tree.name.toTermName - val clazz = if (sym.isClass) sym else ctx.scope lookup tree.name.toTypeName + val module = if (sym.isModule) sym else ctx.scope lookupModule tree.name + val clazz = if (sym.isClass) sym else ctx.scope lookupClass tree.name val fails = ( module.isModule && clazz.isClass diff --git a/test/files/neg/dbldef.check b/test/files/neg/dbldef.check index 3ee63475e4..b896c4cdcf 100644 --- a/test/files/neg/dbldef.check +++ b/test/files/neg/dbldef.check @@ -6,9 +6,7 @@ dbldef.scala:1: error: type mismatch; required: Int case class test0(x: Int, x: Float) ^ -dbldef.scala:1: error: type mismatch; - found : Float - required: Int +dbldef.scala:1: error: in class test0, multiple overloaded alternatives of x define default arguments case class test0(x: Int, x: Float) ^ three errors found -- cgit v1.2.3 From 9809721f0bab937029984aa97496d56db08ff61f Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Thu, 1 Nov 2012 23:22:05 -0700 Subject: Revamp import ambiguity logic. Code reviewer prodded me into figuring out where my earlier attempts to simplify the import logic broke down. Now it should be much easier to follow. --- .../scala/tools/nsc/typechecker/Contexts.scala | 90 ++++++++++------------ 1 file changed, 42 insertions(+), 48 deletions(-) (limited to 'src') diff --git a/src/compiler/scala/tools/nsc/typechecker/Contexts.scala b/src/compiler/scala/tools/nsc/typechecker/Contexts.scala index f2409ea482..03d30a6029 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Contexts.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Contexts.scala @@ -759,10 +759,9 @@ trait Contexts { self: Analyzer => var lookupError: NameLookup = null // set to non-null if a definite error is encountered var inaccessible: NameLookup = null // records inaccessible symbol for error reporting in case none is found var defSym: Symbol = NoSymbol // the directly found symbol - var symbolDepth: Int = -1 // the depth of the directly found symbol var pre: Type = NoPrefix // the prefix type of defSym, if a class member - var cx: Context = this - var needsQualifier = false // working around package object overloading bug + var cx: Context = this // the context under consideration + var symbolDepth: Int = -1 // the depth of the directly found symbol def finish(qual: Tree, sym: Symbol): NameLookup = ( if (lookupError ne null) lookupError @@ -778,7 +777,7 @@ trait Contexts { self: Analyzer => || unit.exists && s.sourceFile != unit.source.file ) ) - def requiresQualifier(s: Symbol) = needsQualifier || ( + def requiresQualifier(s: Symbol) = ( s.owner.isClass && !s.owner.isPackageClass && !s.isTypeParameterOrSkolem @@ -797,33 +796,35 @@ trait Contexts { self: Analyzer => } // cx.scope eq null arises during FixInvalidSyms in Duplicators while (defSym == NoSymbol && (cx ne NoContext) && (cx.scope ne null)) { - val entries = (cx.scope lookupUnshadowedEntries name filter (e => qualifies(e.sym))).toList - pre = cx.enclClass.prefix - symbolDepth = if (entries.isEmpty) cx.depth else (cx.depth - cx.scope.nestingLevel) + entries.head.depth + val entries = (cx.scope lookupUnshadowedEntries name filter (e => qualifies(e.sym))).toList defSym = entries match { case Nil => searchPrefix - case hd :: Nil => hd.sym - case alts => logResult(s"!!! lookup overloaded")(cx.owner.newOverloaded(pre, entries map (_.sym))) + case hd :: tl => + // we have a winner: record the symbol depth + symbolDepth = (cx.depth - cx.scope.nestingLevel) + hd.depth + if (tl.isEmpty) hd.sym + else logResult(s"!!! lookup overloaded")(cx.owner.newOverloaded(pre, entries map (_.sym))) } - - if (defSym.exists) // we have a winner: record the symbol depth - symbolDepth = ( - if (entries.isEmpty) cx.depth - else (cx.depth - cx.scope.nestingLevel) + entries.head.depth - ) - else cx = cx.outer // push further outward + if (!defSym.exists) + cx = cx.outer // push further outward } + if (symbolDepth < 0) + symbolDepth = cx.depth var impSym: Symbol = NoSymbol - var imports = Context.this.imports // impSym != NoSymbol => it is imported from imports.head + var imports = Context.this.imports def imp1 = imports.head + def imp2 = imports.tail.head + def imp1Explicit = imp1 isExplicitImport name + def imp2Explicit = imp2 isExplicitImport name while (!qualifies(impSym) && imports.nonEmpty && imp1.depth > symbolDepth) { impSym = importedAccessibleSymbol(imp1, name) if (!impSym.exists) imports = imports.tail } + if (defSym.exists && impSym.exists) { // imported symbols take precedence over package-owned symbols in different compilation units. if (isPackageOwnedInDifferentUnit(defSym)) @@ -844,40 +845,33 @@ trait Contexts { self: Analyzer => finish(EmptyTree, defSym) } else if (impSym.exists) { - // Imports against which we will test impSym for any ambiguities - var importsTail = imports.tail - val imp1Explicit = imp1 isExplicitImport name - def imp2 = importsTail.head - def sameDepth = imp1.depth == imp2.depth - def isDone = importsTail.isEmpty || imp1Explicit && !sameDepth - + def sameDepth = imp1.depth == imp2.depth + def needsCheck = if (sameDepth) imp1Explicit == imp2Explicit else imp1Explicit || imp2Explicit + def isDone = imports.tail.isEmpty || (!sameDepth && imp1Explicit) + def ambiguous = needsCheck && isAmbiguousImport(imp1, imp2, name) && { + lookupError = ambiguousImports(imp1, imp2) + true + } + // Ambiguity check between imports. + // The same name imported again is potentially ambiguous if the name is: + // - after explicit import, explicitly imported again at the same or lower depth + // - after explicit import, wildcard imported at lower depth + // - after wildcard import, wildcard imported at the same depth + // Under all such conditions isAmbiguousImport is called, which will + // examine the imports in case they are importing the same thing; if that + // can't be established conclusively, an error is issued. while (lookupError == null && !isDone) { val other = importedAccessibleSymbol(imp2, name) - // Ambiguity check between imports. - // The same name imported again is potentially ambiguous if the name is: - // - after explicit import, explicitly imported again at the same or lower depth - // - after explicit import, wildcard imported at lower depth - // - after wildcard import, wildcard imported at the same depth - // Under all such conditions isAmbiguousImport is called, which will - // examine the imports in case they are importing the same thing; if that - // can't be established conclusively, an error is issued. - if (qualifies(other)) { - val imp2Explicit = imp2 isExplicitImport name - val needsCheck = ( - if (sameDepth) imp1Explicit == imp2Explicit - else imp1Explicit || imp2Explicit - ) - log(s"Import ambiguity: imp1=$imp1, imp2=$imp2, sameDepth=$sameDepth, needsCheck=$needsCheck") - if (needsCheck && isAmbiguousImport(imp1, imp2, name)) - lookupError = ambiguousImports(imp1, imp2) - else if (imp2Explicit) { - // if we weren't ambiguous and imp2 is explicit, imp2 replaces imp1 - // as the current winner. - impSym = other - imports = importsTail - } + // if the competing import is unambiguous and explicit, it is the new winner. + val isNewWinner = qualifies(other) && !ambiguous && imp2Explicit + // imports is imp1 :: imp2 :: rest. + // If there is a new winner, it is imp2, and imports drops imp1. + // If there is not, imp1 is still the winner, and it drops imp2. + if (isNewWinner) { + impSym = other + imports = imports.tail } - importsTail = importsTail.tail + else imports = imp1 :: imports.tail.tail } // optimization: don't write out package prefixes finish(resetPos(imp1.qual.duplicate), impSym) -- cgit v1.2.3 From 8541ea31091ce6b066825b8bbae76a02a96a884a Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Sat, 3 Nov 2012 13:34:20 +0100 Subject: Comment to link code to a relevant JIRA ticket --- src/compiler/scala/tools/nsc/typechecker/Typers.scala | 2 ++ 1 file changed, 2 insertions(+) (limited to 'src') diff --git a/src/compiler/scala/tools/nsc/typechecker/Typers.scala b/src/compiler/scala/tools/nsc/typechecker/Typers.scala index 19c2c4042a..c798e38e92 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Typers.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Typers.scala @@ -582,6 +582,8 @@ trait Typers extends Modes with Adaptations with Tags { // short cut some aliases. It seems pattern matching needs this // to notice exhaustiveness and to generate good code when // List extractors are mixed with :: patterns. See Test5 in lists.scala. + // + // TODO SI-6609 Eliminate this special case once the old pattern matcher is removed. def dealias(sym: Symbol) = (atPos(tree.pos.makeTransparent) {gen.mkAttributedRef(sym)} setPos tree.pos, sym.owner.thisType) sym.name match { -- cgit v1.2.3 From d3da3ef83293c0e174e07aba643b3a1f46c110c5 Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Sat, 3 Nov 2012 06:23:56 -0700 Subject: Expanded unused warnings. Now warns on unused private and local terms and types. In addition it warns when a local var is read-only past the point of its creation - something I never would have guessed would be such a gold mine. Over 100 vars in trunk turn into vals. --- .../tools/nsc/typechecker/TypeDiagnostics.scala | 87 ++++++++++++++++++---- test/files/neg/warn-unused-privates.check | 49 ++++++++++-- test/files/neg/warn-unused-privates.scala | 53 ++++++++++++- 3 files changed, 165 insertions(+), 24 deletions(-) (limited to 'src') diff --git a/src/compiler/scala/tools/nsc/typechecker/TypeDiagnostics.scala b/src/compiler/scala/tools/nsc/typechecker/TypeDiagnostics.scala index 34f736e047..7f46cdfb37 100644 --- a/src/compiler/scala/tools/nsc/typechecker/TypeDiagnostics.scala +++ b/src/compiler/scala/tools/nsc/typechecker/TypeDiagnostics.scala @@ -432,34 +432,84 @@ trait TypeDiagnostics { class UnusedPrivates extends Traverser { val defnTrees = ListBuffer[MemberDef]() val targets = mutable.Set[Symbol]() + val setVars = mutable.Set[Symbol]() + val treeTypes = mutable.Set[Type]() + + def defnSymbols = defnTrees.toList map (_.symbol) + def localVars = defnSymbols filter (t => t.isLocal && t.isVar) + + def qualifiesTerm(sym: Symbol) = ( + (sym.isModule || sym.isMethod || sym.isPrivateLocal || sym.isLocal) + && !nme.isLocalName(sym.name) + && !sym.isParameter + && !sym.isParamAccessor // could improve this, but it's a pain + && !sym.isEarlyInitialized // lots of false positives in the way these are encoded + && !(sym.isGetter && sym.accessed.isEarlyInitialized) + ) + def qualifiesType(sym: Symbol) = !sym.isDefinedInPackage def qualifies(sym: Symbol) = ( (sym ne null) - && (sym.isMethod || sym.isPrivateLocal && !nme.isLocalName(sym.name)) - && !sym.isParameter - && !sym.isParamAccessor // could improve this, but it's a pain + && (sym.isTerm && qualifiesTerm(sym) || sym.isType && qualifiesType(sym)) ) override def traverse(t: Tree): Unit = { t match { - case t: ValOrDefDef if qualifies(t.symbol) => defnTrees += t + case t: MemberDef if qualifies(t.symbol) => defnTrees += t case t: RefTree if t.symbol ne null => targets += t.symbol + case Assign(lhs, _) if lhs.symbol != null => setVars += lhs.symbol case _ => } + // Only record type references which don't originate within the + // definition of the class being referenced. + if (t.tpe ne null) { + for (tp <- t.tpe ; if !treeTypes(tp) && !currentOwner.ownerChain.contains(tp.typeSymbol)) { + tp match { + case NoType | NoPrefix => + case NullaryMethodType(_) => + case MethodType(_, _) => + case _ => + log(s"$tp referenced from $currentOwner") + treeTypes += tp + } + } + // e.g. val a = new Foo ; new a.Bar ; don't let a be reported as unused. + t.tpe.prefix foreach { + case SingleType(_, sym) => targets += sym + case _ => + } + } super.traverse(t) } - def isUnused(m: Symbol): Boolean = ( - m.isPrivate + def isUnused(t: Tree): Boolean = ( + if (t.symbol.isTerm) isUnusedTerm(t.symbol) + else isUnusedType(t.symbol) + ) + def isUnusedType(m: Symbol): Boolean = ( + m.isType + && !m.isTypeParameterOrSkolem // would be nice to improve this + && (m.isPrivate || m.isLocal) + && !(treeTypes.exists(tp => tp exists (t => t.typeSymbolDirect == m))) + ) + def isUnusedTerm(m: Symbol): Boolean = ( + (m.isTerm) + && (m.isPrivate || m.isLocal) && !targets(m) - && !ignoreNames(m.name) // serialization methods - && !isConstantType(m.info.resultType) // subject to constant inlining + && !(m.name == nme.WILDCARD) // e.g. val _ = foo + && !ignoreNames(m.name) // serialization methods + && !isConstantType(m.info.resultType) // subject to constant inlining + && !treeTypes.exists(_ contains m) // e.g. val a = new Foo ; new a.Bar ) - def unused = defnTrees.toList filter (t => isUnused(t.symbol)) + def unusedTypes = defnTrees.toList filter (t => isUnusedType(t.symbol)) + def unusedTerms = defnTrees.toList filter (v => isUnusedTerm(v.symbol)) + // local vars which are never set, except those already returned in unused + def unsetVars = localVars filter (v => !setVars(v) && !isUnusedTerm(v)) } def apply(unit: CompilationUnit) = { val p = new UnusedPrivates p traverse unit.body - p.unused foreach { defn: DefTree => + val unused = p.unusedTerms + unused foreach { defn: DefTree => val sym = defn.symbol val isDefaultGetter = sym.name containsName nme.DEFAULT_GETTER_STRING val pos = ( @@ -470,15 +520,26 @@ trait TypeDiagnostics { case _ => NoPosition } ) + val why = if (sym.isPrivate) "private" else "local" val what = ( if (isDefaultGetter) "default argument" else if (sym.isConstructor) "constructor" + else if (sym.isVar || sym.isGetter && sym.accessed.isVar) "var" + else if (sym.isVal || sym.isGetter && sym.accessed.isVal) "val" else if (sym.isSetter) "setter" - else if (sym.isGetter) "getter" else if (sym.isMethod) "method" - else "member" + else if (sym.isModule) "object" + else "term" ) - unit.warning(pos, s"private $what in ${sym.owner} is never used") + unit.warning(pos, s"$why $what in ${sym.owner} is never used") + } + p.unsetVars foreach { v => + unit.warning(v.pos, s"local var ${v.name} in ${v.owner} is never set - it could be a val") + } + p.unusedTypes foreach { t => + val sym = t.symbol + val why = if (sym.isPrivate) "private" else "local" + unit.warning(t.pos, s"$why ${sym.fullLocationString} is never used") } } } diff --git a/test/files/neg/warn-unused-privates.check b/test/files/neg/warn-unused-privates.check index c37e01106c..9c41a33e8f 100644 --- a/test/files/neg/warn-unused-privates.check +++ b/test/files/neg/warn-unused-privates.check @@ -4,27 +4,60 @@ warn-unused-privates.scala:2: warning: private constructor in class Bippy is nev warn-unused-privates.scala:4: warning: private method in class Bippy is never used private def boop(x: Int) = x+a+b // warn ^ -warn-unused-privates.scala:6: warning: private getter in class Bippy is never used +warn-unused-privates.scala:6: warning: private val in class Bippy is never used final private val MILLIS2: Int = 1000 // warn ^ -warn-unused-privates.scala:13: warning: private getter in object Bippy is never used +warn-unused-privates.scala:13: warning: private val in object Bippy is never used private val HEY_INSTANCE: Int = 1000 // warn ^ -warn-unused-privates.scala:41: warning: private getter in trait Accessors is never used +warn-unused-privates.scala:35: warning: private val in class Boppy is never used + private val hummer = "def" // warn + ^ +warn-unused-privates.scala:42: warning: private var in trait Accessors is never used private var v1: Int = 0 // warn ^ -warn-unused-privates.scala:42: warning: private setter in trait Accessors is never used +warn-unused-privates.scala:43: warning: private setter in trait Accessors is never used private var v2: Int = 0 // warn, never set ^ -warn-unused-privates.scala:43: warning: private getter in trait Accessors is never used +warn-unused-privates.scala:44: warning: private var in trait Accessors is never used private var v3: Int = 0 // warn, never got ^ -warn-unused-privates.scala:55: warning: private default argument in trait DefaultArgs is never used +warn-unused-privates.scala:56: warning: private default argument in trait DefaultArgs is never used private def bippy(x1: Int, x2: Int = 10, x3: Int = 15): Int = x1 + x2 + x3 ^ -warn-unused-privates.scala:55: warning: private default argument in trait DefaultArgs is never used +warn-unused-privates.scala:56: warning: private default argument in trait DefaultArgs is never used private def bippy(x1: Int, x2: Int = 10, x3: Int = 15): Int = x1 + x2 + x3 ^ +warn-unused-privates.scala:67: warning: local var in method f0 is never used + var x = 1 // warn + ^ +warn-unused-privates.scala:74: warning: local val in method f1 is never used + val b = new Outer // warn + ^ +warn-unused-privates.scala:84: warning: private object in object Types is never used + private object Dongo { def f = this } // warn + ^ +warn-unused-privates.scala:94: warning: local object in method l1 is never used + object HiObject { def f = this } // warn + ^ +warn-unused-privates.scala:78: warning: local var x in method f2 is never set - it could be a val + var x = 100 // warn about it being a var + ^ +warn-unused-privates.scala:85: warning: private class Bar1 in object Types is never used + private class Bar1 // warn + ^ +warn-unused-privates.scala:87: warning: private type Alias1 in object Types is never used + private type Alias1 = String // warn + ^ +warn-unused-privates.scala:95: warning: local class Hi is never used + class Hi { // warn + ^ +warn-unused-privates.scala:99: warning: local class DingDongDoobie is never used + class DingDongDoobie // warn + ^ +warn-unused-privates.scala:102: warning: local type OtherThing is never used + type OtherThing = String // warn + ^ error: No warnings can be incurred under -Xfatal-warnings. -9 warnings found +20 warnings found one error found diff --git a/test/files/neg/warn-unused-privates.scala b/test/files/neg/warn-unused-privates.scala index 1ac272357f..cb6e946a34 100644 --- a/test/files/neg/warn-unused-privates.scala +++ b/test/files/neg/warn-unused-privates.scala @@ -18,8 +18,10 @@ class B1(msg: String) extends A(msg) class B2(msg0: String) extends A(msg0) class B3(msg0: String) extends A("msg") -/*** Early defs full of noise due to SI-6595. ***/ -/*** +/*** Early defs warnings disabled primarily due to SI-6595. + * The test case is here to assure we aren't issuing false positives; + * the ones labeled "warn" don't warn. + ***/ class Boppy extends { private val hmm: String = "abc" // no warn, used in early defs private val hom: String = "def" // no warn, used in body @@ -35,7 +37,6 @@ class Boppy extends { private final val bum = "ghi" // no warn, might have been (was) inlined final val bum2 = "ghi" // no warn, same } -***/ trait Accessors { private var v1: Int = 0 // warn @@ -56,3 +57,49 @@ trait DefaultArgs { def boppy() = bippy(5, 100, 200) } + +class Outer { + class Inner +} + +trait Locals { + def f0 = { + var x = 1 // warn + var y = 2 + y = 3 + y + y + } + def f1 = { + val a = new Outer // no warn + val b = new Outer // warn + new a.Inner + } + def f2 = { + var x = 100 // warn about it being a var + x + } +} + +object Types { + private object Dongo { def f = this } // warn + private class Bar1 // warn + private class Bar2 // no warn + private type Alias1 = String // warn + private type Alias2 = String // no warn + def bippo = (new Bar2).toString + + def f(x: Alias2) = x.length + + def l1() = { + object HiObject { def f = this } // warn + class Hi { // warn + def f1: Hi = new Hi + def f2(x: Hi) = x + } + class DingDongDoobie // warn + class Bippy // no warn + type Something = Bippy // no warn + type OtherThing = String // warn + (new Bippy): Something + } +} -- cgit v1.2.3 From 9c09c170998f74fba03990977b285e3121db32a6 Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Sat, 3 Nov 2012 06:29:38 -0700 Subject: Removing unused locals and making vars into vals. According to "git diff" the difference from master to this commit includes: Minus: 112 vals, 135 vars Plus: 165 vals, 2 vars Assuming all the removed ones were vals, which is true from 10K feet, it suggests I removed 80 unused vals and turned 133 vars into vals. There are a few other -Xlint driven improvements bundled with this, like putting double-parentheses around Some((x, y)) so it doesn't trigger the "adapting argument list" warning. --- src/actors/scala/actors/Future.scala | 2 +- .../scala/reflect/reify/codegen/GenTypes.scala | 1 - .../scala/reflect/reify/phases/Reshape.scala | 13 ++++--- .../scala/reflect/reify/utils/Extractors.scala | 4 +-- .../scala/reflect/reify/utils/NodePrinters.scala | 4 +-- .../scala/reflect/reify/utils/SymbolTables.scala | 4 +-- src/compiler/scala/tools/ant/Pack200Task.scala | 4 +-- src/compiler/scala/tools/nsc/Global.scala | 3 +- src/compiler/scala/tools/nsc/PhaseAssembly.scala | 12 +++---- .../scala/tools/nsc/ast/TreeBrowsers.scala | 1 - .../scala/tools/nsc/ast/parser/MarkupParsers.scala | 3 +- .../scala/tools/nsc/ast/parser/Parsers.scala | 12 ++----- .../scala/tools/nsc/ast/parser/TreeBuilder.scala | 2 +- .../scala/tools/nsc/backend/icode/GenICode.scala | 40 ++++++++++----------- .../tools/nsc/backend/icode/ICodeCheckers.scala | 2 +- .../icode/analysis/ReachingDefinitions.scala | 4 +-- .../backend/icode/analysis/TypeFlowAnalysis.scala | 5 ++- .../tools/nsc/backend/jvm/BytecodeWriters.scala | 2 +- .../scala/tools/nsc/backend/jvm/GenASM.scala | 14 ++++---- .../scala/tools/nsc/backend/jvm/GenJVM.scala | 9 ++--- .../scala/tools/nsc/backend/msil/GenMSIL.scala | 17 +++++---- .../tools/nsc/backend/opt/ClosureElimination.scala | 2 +- .../scala/tools/nsc/backend/opt/Inliners.scala | 7 ++-- .../scala/tools/nsc/dependencies/Changes.scala | 1 - .../scala/tools/nsc/doc/html/page/Template.scala | 3 +- .../html/page/diagram/DotDiagramGenerator.scala | 2 +- .../nsc/doc/html/page/diagram/DotRunner.scala | 5 ++- .../scala/tools/nsc/doc/model/MemberLookup.scala | 2 +- .../scala/tools/nsc/doc/model/ModelFactory.scala | 5 ++- .../doc/model/ModelFactoryImplicitSupport.scala | 3 +- .../nsc/doc/model/ModelFactoryTypeSupport.scala | 1 - .../scala/tools/nsc/doc/model/TreeFactory.scala | 4 +-- .../nsc/doc/model/comment/CommentFactory.scala | 5 ++- .../nsc/doc/model/diagram/DiagramFactory.scala | 2 +- .../scala/tools/nsc/interactive/Global.scala | 2 +- .../nsc/interactive/tests/core/CoreTestDefs.scala | 5 +-- .../scala/tools/nsc/interpreter/ILoop.scala | 1 - .../scala/tools/nsc/interpreter/IMain.scala | 8 ++--- .../scala/tools/nsc/javac/JavaParsers.scala | 3 +- .../scala/tools/nsc/symtab/SymbolLoaders.scala | 1 - .../scala/tools/nsc/symtab/SymbolTrackers.scala | 3 +- .../nsc/symtab/classfile/ClassfileParser.scala | 10 +++--- .../tools/nsc/symtab/classfile/ICodeReader.scala | 11 ++---- .../scala/tools/nsc/symtab/classfile/Pickler.scala | 3 +- .../scala/tools/nsc/symtab/clr/TypeParser.scala | 4 +-- .../scala/tools/nsc/transform/CleanUp.scala | 2 +- .../scala/tools/nsc/transform/Constructors.scala | 5 +-- .../scala/tools/nsc/transform/Erasure.scala | 8 ----- .../scala/tools/nsc/transform/ExplicitOuter.scala | 2 +- .../tools/nsc/typechecker/ContextErrors.scala | 8 ++--- .../scala/tools/nsc/typechecker/Implicits.scala | 8 ++--- .../scala/tools/nsc/typechecker/Infer.scala | 1 - .../tools/nsc/typechecker/MethodSynthesis.scala | 2 +- .../scala/tools/nsc/typechecker/Namers.scala | 7 ++-- .../tools/nsc/typechecker/NamesDefaults.scala | 4 +-- .../tools/nsc/typechecker/PatternMatching.scala | 23 +++++++----- .../scala/tools/nsc/typechecker/RefChecks.scala | 8 ++--- .../tools/nsc/typechecker/SuperAccessors.scala | 2 +- .../tools/nsc/typechecker/SyntheticMethods.scala | 11 +++--- .../scala/tools/nsc/typechecker/Typers.scala | 42 +++++++++++----------- .../scala/tools/reflect/ToolBoxFactory.scala | 41 ++++++++++----------- src/library/scala/collection/SeqLike.scala | 10 +++--- .../scala/collection/concurrent/TrieMap.scala | 2 +- .../scala/collection/immutable/HashMap.scala | 3 -- .../scala/collection/immutable/TrieIterator.scala | 1 - .../scala/collection/immutable/Vector.scala | 16 ++++----- .../scala/collection/mutable/FlatHashTable.scala | 2 +- .../scala/collection/mutable/ListBuffer.scala | 1 - src/library/scala/collection/parallel/Tasks.scala | 2 -- .../collection/parallel/mutable/ParArray.scala | 8 ++--- .../collection/parallel/mutable/ParHashMap.scala | 2 +- .../collection/parallel/mutable/ParHashSet.scala | 4 +-- .../collection/parallel/mutable/ParHashTable.scala | 2 +- .../mutable/ResizableParArrayCombiner.scala | 2 +- src/library/scala/collection/script/Message.scala | 2 +- .../scala/util/automata/WordBerrySethi.scala | 1 - .../scala/util/parsing/input/OffsetPosition.scala | 2 +- src/library/scala/xml/PrettyPrinter.scala | 1 - src/library/scala/xml/dtd/ElementValidator.scala | 2 +- .../scala/xml/include/sax/XIncludeFilter.scala | 2 +- src/library/scala/xml/parsing/MarkupParser.scala | 10 ++---- .../lamp/compiler/msil/emit/ILPrinterVisitor.scala | 19 +++++----- .../lamp/compiler/msil/emit/ModuleBuilder.scala | 2 +- .../msil/emit/MultipleFilesILPrinterVisitor.scala | 8 ++--- .../msil/emit/SingleFileILPrinterVisitor.scala | 6 ++-- .../epfl/lamp/compiler/msil/emit/TypeBuilder.scala | 2 +- .../scala/tools/partest/ScaladocModelTest.scala | 5 +-- .../scala/tools/partest/nest/CompileManager.scala | 1 - .../tools/partest/nest/ConsoleFileManager.scala | 5 --- .../scala/tools/partest/nest/ConsoleRunner.scala | 2 -- .../scala/tools/partest/nest/RunnerManager.scala | 5 ++- src/reflect/scala/reflect/api/Printers.scala | 12 +++---- .../scala/reflect/internal/BaseTypeSeqs.scala | 2 +- .../scala/reflect/internal/Definitions.scala | 3 +- src/reflect/scala/reflect/internal/Mirrors.scala | 2 +- src/reflect/scala/reflect/internal/Printers.scala | 2 +- src/reflect/scala/reflect/internal/Types.scala | 18 +++++----- .../reflect/internal/pickling/UnPickler.scala | 4 +-- .../scala/reflect/internal/util/Statistics.scala | 1 - .../scala/reflect/runtime/JavaMirrors.scala | 4 +-- .../scala/reflect/runtime/SymbolLoaders.scala | 2 +- src/scalap/scala/tools/scalap/Arguments.scala | 2 +- .../scalax/rules/scalasig/ScalaSigPrinter.scala | 8 ++--- test/files/run/reify_newimpl_11.check | 6 ++-- test/files/run/reify_newimpl_13.check | 6 ++-- test/files/run/reify_newimpl_19.check | 6 ++-- 106 files changed, 278 insertions(+), 355 deletions(-) (limited to 'src') diff --git a/src/actors/scala/actors/Future.scala b/src/actors/scala/actors/Future.scala index fb7bb488a2..3269174afe 100644 --- a/src/actors/scala/actors/Future.scala +++ b/src/actors/scala/actors/Future.scala @@ -174,7 +174,7 @@ object Futures { * or timeout + `System.currentTimeMillis()` is negative. */ def awaitAll(timeout: Long, fts: Future[Any]*): List[Option[Any]] = { - var resultsMap: scala.collection.mutable.Map[Int, Option[Any]] = new scala.collection.mutable.HashMap[Int, Option[Any]] + val resultsMap: scala.collection.mutable.Map[Int, Option[Any]] = new scala.collection.mutable.HashMap[Int, Option[Any]] var cnt = 0 val mappedFts = fts.map(ft => diff --git a/src/compiler/scala/reflect/reify/codegen/GenTypes.scala b/src/compiler/scala/reflect/reify/codegen/GenTypes.scala index 7aa87dc2f8..ca44938f50 100644 --- a/src/compiler/scala/reflect/reify/codegen/GenTypes.scala +++ b/src/compiler/scala/reflect/reify/codegen/GenTypes.scala @@ -74,7 +74,6 @@ trait GenTypes { if (reifyDebug) println("splicing " + tpe) val tagFlavor = if (concrete) tpnme.TypeTag.toString else tpnme.WeakTypeTag.toString - val key = (tagFlavor, tpe.typeSymbol) // if this fails, it might produce the dreaded "erroneous or inaccessible type" error // to find out the whereabouts of the error run scalac with -Ydebug if (reifyDebug) println("launching implicit search for %s.%s[%s]".format(universe, tagFlavor, tpe)) diff --git a/src/compiler/scala/reflect/reify/phases/Reshape.scala b/src/compiler/scala/reflect/reify/phases/Reshape.scala index 9a1732a872..f31c3d4755 100644 --- a/src/compiler/scala/reflect/reify/phases/Reshape.scala +++ b/src/compiler/scala/reflect/reify/phases/Reshape.scala @@ -48,13 +48,13 @@ trait Reshape { val Template(parents, self, body) = impl var body1 = trimAccessors(classDef, reshapeLazyVals(body)) body1 = trimSyntheticCaseClassMembers(classDef, body1) - var impl1 = Template(parents, self, body1).copyAttrs(impl) + val impl1 = Template(parents, self, body1).copyAttrs(impl) ClassDef(mods, name, params, impl1).copyAttrs(classDef) case moduledef @ ModuleDef(mods, name, impl) => val Template(parents, self, body) = impl var body1 = trimAccessors(moduledef, reshapeLazyVals(body)) body1 = trimSyntheticCaseClassMembers(moduledef, body1) - var impl1 = Template(parents, self, body1).copyAttrs(impl) + val impl1 = Template(parents, self, body1).copyAttrs(impl) ModuleDef(mods, name, impl1).copyAttrs(moduledef) case template @ Template(parents, self, body) => val discardedParents = parents collect { case tt: TypeTree => tt } filter isDiscarded @@ -116,7 +116,6 @@ trait Reshape { private def toPreTyperModifiers(mods: Modifiers, sym: Symbol) = { if (!sym.annotations.isEmpty) { - val Modifiers(flags, privateWithin, annotations) = mods val postTyper = sym.annotations filter (_.original != EmptyTree) if (reifyDebug && !postTyper.isEmpty) println("reify symbol annotations for: " + sym) if (reifyDebug && !postTyper.isEmpty) println("originals are: " + sym.annotations) @@ -252,7 +251,7 @@ trait Reshape { val DefDef(mods0, name0, _, _, tpt0, rhs0) = ddef val name1 = nme.dropLocalSuffix(name0) val Modifiers(flags0, privateWithin0, annotations0) = mods0 - var flags1 = (flags0 & GetterFlags) & ~(STABLE | ACCESSOR | METHOD) + val flags1 = (flags0 & GetterFlags) & ~(STABLE | ACCESSOR | METHOD) val mods1 = Modifiers(flags1, privateWithin0, annotations0) setPositions mods0.positions val mods2 = toPreTyperModifiers(mods1, ddef.symbol) ValDef(mods2, name1, tpt0, extractRhs(rhs0)) @@ -267,7 +266,7 @@ trait Reshape { def detectBeanAccessors(prefix: String): Unit = { if (defdef.name.startsWith(prefix)) { - var name = defdef.name.toString.substring(prefix.length) + val name = defdef.name.toString.substring(prefix.length) def uncapitalize(s: String) = if (s.length == 0) "" else { val chars = s.toCharArray; chars(0) = chars(0).toLower; new String(chars) } def findValDef(name: String) = (symdefs.values collect { case vdef: ValDef if nme.dropLocalSuffix(vdef.name).toString == name => vdef }).headOption val valdef = findValDef(name).orElse(findValDef(uncapitalize(name))).orNull @@ -279,11 +278,11 @@ trait Reshape { detectBeanAccessors("is") }); - var stats1 = stats flatMap { + val stats1 = stats flatMap { case vdef @ ValDef(mods, name, tpt, rhs) if !mods.isLazy => val mods1 = if (accessors.contains(vdef)) { val ddef = accessors(vdef)(0) // any accessor will do - val Modifiers(flags, privateWithin, annotations) = mods + val Modifiers(flags, _, annotations) = mods var flags1 = flags & ~LOCAL if (!ddef.symbol.isPrivate) flags1 = flags1 & ~PRIVATE val privateWithin1 = ddef.mods.privateWithin diff --git a/src/compiler/scala/reflect/reify/utils/Extractors.scala b/src/compiler/scala/reflect/reify/utils/Extractors.scala index b60d15c1d4..50bd309b52 100644 --- a/src/compiler/scala/reflect/reify/utils/Extractors.scala +++ b/src/compiler/scala/reflect/reify/utils/Extractors.scala @@ -187,7 +187,7 @@ trait Extractors { Literal(Constant(origin: String))))) if uref1.name == nme.UNIVERSE_SHORT && build1 == nme.build && newFreeTerm == nme.newFreeTerm && uref2.name == nme.UNIVERSE_SHORT && build2 == nme.build && flagsFromBits == nme.flagsFromBits => - Some(uref1, name, reifyBinding(tree), flags, origin) + Some((uref1, name, reifyBinding(tree), flags, origin)) case _ => None } @@ -204,7 +204,7 @@ trait Extractors { Literal(Constant(origin: String))))) if uref1.name == nme.UNIVERSE_SHORT && build1 == nme.build && newFreeType == nme.newFreeType && uref2.name == nme.UNIVERSE_SHORT && build2 == nme.build && flagsFromBits == nme.flagsFromBits => - Some(uref1, name, reifyBinding(tree), flags, origin) + Some((uref1, name, reifyBinding(tree), flags, origin)) case _ => None } diff --git a/src/compiler/scala/reflect/reify/utils/NodePrinters.scala b/src/compiler/scala/reflect/reify/utils/NodePrinters.scala index 000e500c69..9b7cc9f2ae 100644 --- a/src/compiler/scala/reflect/reify/utils/NodePrinters.scala +++ b/src/compiler/scala/reflect/reify/utils/NodePrinters.scala @@ -25,8 +25,8 @@ trait NodePrinters { // Rolling a full-fledged, robust TreePrinter would be several times more code. // Also as of late we have tests that ensure that UX won't be broken by random changes to the reifier. val lines = (tree.toString.split(EOL) drop 1 dropRight 1).toList splitAt 2 - var (List(universe, mirror), reification) = lines - reification = (for (line <- reification) yield { + val (List(universe, mirror), reification0) = lines + val reification = (for (line <- reification0) yield { var s = line substring 2 s = s.replace(nme.UNIVERSE_PREFIX.toString, "") s = s.replace(".apply", "") diff --git a/src/compiler/scala/reflect/reify/utils/SymbolTables.scala b/src/compiler/scala/reflect/reify/utils/SymbolTables.scala index 2607b8f9b7..babea450c1 100644 --- a/src/compiler/scala/reflect/reify/utils/SymbolTables.scala +++ b/src/compiler/scala/reflect/reify/utils/SymbolTables.scala @@ -102,7 +102,7 @@ trait SymbolTables { newSymtab = newSymtab map { case ((sym, tree)) => val ValDef(mods, primaryName, tpt, rhs) = tree val tree1 = - if (!(newAliases contains (sym, primaryName))) { + if (!(newAliases contains ((sym, primaryName)))) { val primaryName1 = newAliases.find(_._1 == sym).get._2 ValDef(mods, primaryName1, tpt, rhs).copyAttrs(tree) } else tree @@ -138,7 +138,7 @@ trait SymbolTables { var result = new SymbolTable(original = Some(encoded)) encoded foreach (entry => (entry.attachments.get[ReifyBindingAttachment], entry.attachments.get[ReifyAliasAttachment]) match { case (Some(ReifyBindingAttachment(_)), _) => result += entry - case (_, Some(ReifyAliasAttachment(sym, alias))) => result = new SymbolTable(result.symtab, result.aliases :+ (sym, alias)) + case (_, Some(ReifyAliasAttachment(sym, alias))) => result = new SymbolTable(result.symtab, result.aliases :+ ((sym, alias))) case _ => // do nothing, this is boilerplate that can easily be recreated by subsequent `result.encode` }) result diff --git a/src/compiler/scala/tools/ant/Pack200Task.scala b/src/compiler/scala/tools/ant/Pack200Task.scala index ff18ddff91..117a1c9def 100644 --- a/src/compiler/scala/tools/ant/Pack200Task.scala +++ b/src/compiler/scala/tools/ant/Pack200Task.scala @@ -99,8 +99,8 @@ class Pack200Task extends ScalaMatchingTask { private def getFileList: List[File] = { var files: List[File] = Nil val fs = getImplicitFileSet - var ds = fs.getDirectoryScanner(getProject()) - var dir = fs.getDir(getProject()) + val ds = fs.getDirectoryScanner(getProject()) + val dir = fs.getDir(getProject()) for (filename <- ds.getIncludedFiles() if filename.toLowerCase.endsWith(".jar")) { val file = new File(dir, filename) diff --git a/src/compiler/scala/tools/nsc/Global.scala b/src/compiler/scala/tools/nsc/Global.scala index 3e77fc982d..69daa8ce6f 100644 --- a/src/compiler/scala/tools/nsc/Global.scala +++ b/src/compiler/scala/tools/nsc/Global.scala @@ -298,7 +298,6 @@ class Global(var currentSettings: Settings, var reporter: Reporter) private val reader: SourceReader = { val defaultEncoding = Properties.sourceEncoding - val defaultReader = Properties.sourceReader def loadCharset(name: String) = try Some(Charset.forName(name)) @@ -1726,7 +1725,7 @@ class Global(var currentSettings: Settings, var reporter: Reporter) val printer = new icodes.TextPrinter(null, icodes.linearizer) icodes.classes.values.foreach((cls) => { val suffix = if (cls.symbol.hasModuleFlag) "$.icode" else ".icode" - var file = getFile(cls.symbol, suffix) + val file = getFile(cls.symbol, suffix) // if (file.exists()) // file = new File(file.getParentFile(), file.getName() + "1") try { diff --git a/src/compiler/scala/tools/nsc/PhaseAssembly.scala b/src/compiler/scala/tools/nsc/PhaseAssembly.scala index 46cdc6a4a0..6c339fb5ae 100644 --- a/src/compiler/scala/tools/nsc/PhaseAssembly.scala +++ b/src/compiler/scala/tools/nsc/PhaseAssembly.scala @@ -55,7 +55,7 @@ trait PhaseAssembly { * node object does not exist, then create it. */ def getNodeByPhase(phs: SubComponent): Node = { - var node: Node = getNodeByPhase(phs.phaseName) + val node: Node = getNodeByPhase(phs.phaseName) node.phaseobj match { case None => node.phaseobj = Some(List[SubComponent](phs)) @@ -75,7 +75,7 @@ trait PhaseAssembly { * list of the nodes */ def softConnectNodes(frm: Node, to: Node) { - var e = new Edge(frm, to, false) + val e = new Edge(frm, to, false) this.edges += e frm.after += e @@ -87,7 +87,7 @@ trait PhaseAssembly { * list of the nodes */ def hardConnectNodes(frm: Node, to: Node) { - var e = new Edge(frm, to, true) + val e = new Edge(frm, to, true) this.edges += e frm.after += e @@ -164,7 +164,7 @@ trait PhaseAssembly { } else { - var promote = hl.to.before.filter(e => (!e.hard)) + val promote = hl.to.before.filter(e => (!e.hard)) hl.to.before.clear sanity foreach (edge => hl.to.before += edge) for (edge <- promote) { @@ -245,7 +245,7 @@ trait PhaseAssembly { for (phs <- phsSet) { - var fromnode = graph.getNodeByPhase(phs) + val fromnode = graph.getNodeByPhase(phs) phs.runsRightAfter match { case None => @@ -306,7 +306,7 @@ trait PhaseAssembly { sbuf.append("\"" + node.allPhaseNames + "(" + node.level + ")" + "\" [color=\"#0000ff\"]\n") } sbuf.append("}\n") - var out = new BufferedWriter(new FileWriter(filename)) + val out = new BufferedWriter(new FileWriter(filename)) out.write(sbuf.toString) out.flush() out.close() diff --git a/src/compiler/scala/tools/nsc/ast/TreeBrowsers.scala b/src/compiler/scala/tools/nsc/ast/TreeBrowsers.scala index be7a6295b4..3141227bad 100644 --- a/src/compiler/scala/tools/nsc/ast/TreeBrowsers.scala +++ b/src/compiler/scala/tools/nsc/ast/TreeBrowsers.scala @@ -529,7 +529,6 @@ abstract class TreeBrowsers { * attributes */ def symbolAttributes(t: Tree): String = { val s = t.symbol - var att = "" if ((s ne null) && (s != NoSymbol)) { var str = flagsToString(s.flags) diff --git a/src/compiler/scala/tools/nsc/ast/parser/MarkupParsers.scala b/src/compiler/scala/tools/nsc/ast/parser/MarkupParsers.scala index 9c03b10157..bb003ef0e1 100755 --- a/src/compiler/scala/tools/nsc/ast/parser/MarkupParsers.scala +++ b/src/compiler/scala/tools/nsc/ast/parser/MarkupParsers.scala @@ -124,7 +124,6 @@ trait MarkupParsers { val start = curOffset val key = xName xEQ - val delim = ch val mid = curOffset val value: Tree = ch match { case '"' | '\'' => @@ -410,7 +409,7 @@ trait MarkupParsers { * | Name [S] '/' '>' */ def xPattern: Tree = { - var start = curOffset + val start = curOffset val qname = xName debugLastStartElement.push((start, qname)) xSpaceOpt diff --git a/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala b/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala index 380fd1fcaa..722e6d1e9a 100644 --- a/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala +++ b/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala @@ -919,7 +919,7 @@ self => ) def compoundTypeRest(t: Tree): Tree = { - var ts = new ListBuffer[Tree] += t + val ts = new ListBuffer[Tree] += t while (in.token == WITH) { in.nextToken() ts += annotType() @@ -1270,7 +1270,7 @@ self => def expr(): Tree = expr(Local) def expr(location: Int): Tree = { - var savedPlaceholderParams = placeholderParams + val savedPlaceholderParams = placeholderParams placeholderParams = List() var res = expr0(location) if (!placeholderParams.isEmpty && !isWildcard(res)) { @@ -1320,7 +1320,6 @@ self => parseTry case WHILE => def parseWhile = { - val start = in.offset atPos(in.skipToken()) { val lname: Name = freshTermName(nme.WHILE_PREFIX) val cond = condExpr() @@ -1332,7 +1331,6 @@ self => parseWhile case DO => def parseDo = { - val start = in.offset atPos(in.skipToken()) { val lname: Name = freshTermName(nme.DO_WHILE_PREFIX) val body = expr() @@ -1796,7 +1794,6 @@ self => * }}} */ def pattern2(): Tree = { - val nameOffset = in.offset val p = pattern3() if (in.token != AT) p @@ -1909,7 +1906,7 @@ self => val start = in.offset in.token match { case IDENTIFIER | BACKQUOTED_IDENT | THIS => - var t = stableId() + val t = stableId() in.token match { case INTLIT | LONGLIT | FLOATLIT | DOUBLELIT => t match { @@ -2616,7 +2613,6 @@ self => in.nextToken() newLinesOpt() atPos(start, in.offset) { - val nameOffset = in.offset val name = identForType() // @M! a type alias as well as an abstract type may declare type parameters val tparams = typeParamClauseOpt(name, null) @@ -2893,7 +2889,6 @@ self => * }}} */ def packaging(start: Int): Tree = { - val nameOffset = in.offset val pkg = pkgQualId() val stats = inBracesOrNil(topStatSeq()) makePackaging(start, pkg, stats) @@ -3103,7 +3098,6 @@ self => ts ++= topStatSeq() } } else { - val nameOffset = in.offset in.flushDoc val pkg = pkgQualId() diff --git a/src/compiler/scala/tools/nsc/ast/parser/TreeBuilder.scala b/src/compiler/scala/tools/nsc/ast/parser/TreeBuilder.scala index 3ff52cc32b..bc7a679560 100644 --- a/src/compiler/scala/tools/nsc/ast/parser/TreeBuilder.scala +++ b/src/compiler/scala/tools/nsc/ast/parser/TreeBuilder.scala @@ -450,7 +450,7 @@ abstract class TreeBuilder { def combine(gs: List[ValFrom]): ValFrom = (gs: @unchecked) match { case g :: Nil => g case ValFrom(pos1, pat1, rhs1) :: gs2 => - val ValFrom(pos2, pat2, rhs2) = combine(gs2) + val ValFrom(_, pat2, rhs2) = combine(gs2) ValFrom(pos1, makeTuple(List(pat1, pat2), false), Apply(Select(rhs1, nme.zip), List(rhs2))) } makeForYield(List(combine(gs)), body) diff --git a/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala b/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala index d4126f2786..9a7aafd787 100644 --- a/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala +++ b/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala @@ -432,7 +432,7 @@ abstract class GenICode extends SubComponent { private def genPrimitiveOp(tree: Apply, ctx: Context, expectedType: TypeKind): (Context, TypeKind) = { val sym = tree.symbol - val Apply(fun @ Select(receiver, _), args) = tree + val Apply(fun @ Select(receiver, _), _) = tree val code = scalaPrimitives.getPrimitive(sym, receiver.tpe) if (scalaPrimitives.isArithmeticOp(code)) @@ -543,9 +543,8 @@ abstract class GenICode extends SubComponent { // emits CIL_LOAD_ARRAY_ITEM_ADDRESS case Apply(fun, args) => if (isPrimitive(fun.symbol)) { - val sym = tree.symbol - val Apply(fun @ Select(receiver, _), args) = tree + val Select(receiver, _) = fun val code = scalaPrimitives.getPrimitive(sym, receiver.tpe) if (isArrayOp(code)) { @@ -858,7 +857,7 @@ abstract class GenICode extends SubComponent { // we store this boxed value to a local, even if not really needed. // boxing optimization might use it, and dead code elimination will // take care of unnecessary stores - var loc1 = ctx.makeLocal(tree.pos, expr.tpe, "boxed") + val loc1 = ctx.makeLocal(tree.pos, expr.tpe, "boxed") ctx1.bb.emit(STORE_LOCAL(loc1)) ctx1.bb.emit(LOAD_LOCAL(loc1)) } @@ -1104,7 +1103,7 @@ abstract class GenICode extends SubComponent { case Match(selector, cases) => def genLoadMatch = { debuglog("Generating SWITCH statement."); - var ctx1 = genLoad(selector, ctx, INT) // TODO: Java 7 allows strings in switches (so, don't assume INT and don't convert the literals using intValue) + val ctx1 = genLoad(selector, ctx, INT) // TODO: Java 7 allows strings in switches (so, don't assume INT and don't convert the literals using intValue) val afterCtx = ctx1.newBlock var caseCtx: Context = null generatedType = toTypeKind(tree.tpe) @@ -2116,7 +2115,7 @@ abstract class GenICode extends SubComponent { } else ctx - val finalizerExh = if (finalizer != EmptyTree) Some({ + if (finalizer != EmptyTree) { val exh = outerCtx.newExceptionHandler(NoSymbol, toTypeKind(finalizer.tpe), finalizer.pos) // finalizer covers exception handlers this.addActiveHandler(exh) // .. and body aswell val ctx = finalizerCtx.enterExceptionHandler(exh) @@ -2129,21 +2128,20 @@ abstract class GenICode extends SubComponent { ctx1.bb.enterIgnoreMode; ctx1.bb.close finalizerCtx.endHandler() - exh - }) else None - - val exhs = handlers.map { case (sym, kind, handler) => // def genWildcardHandler(sym: Symbol): (Symbol, TypeKind, Context => Context) = - val exh = this.newExceptionHandler(sym, kind, tree.pos) - var ctx1 = outerCtx.enterExceptionHandler(exh) - ctx1.addFinalizer(finalizer, finalizerCtx) - loadException(ctx1, exh, tree.pos) - ctx1 = handler(ctx1) - // emit finalizer - val ctx2 = emitFinalizer(ctx1) - ctx2.bb.closeWith(JUMP(afterCtx.bb)) - outerCtx.endHandler() - exh - } + } + + for ((sym, kind, handler) <- handlers) { + val exh = this.newExceptionHandler(sym, kind, tree.pos) + var ctx1 = outerCtx.enterExceptionHandler(exh) + ctx1.addFinalizer(finalizer, finalizerCtx) + loadException(ctx1, exh, tree.pos) + ctx1 = handler(ctx1) + // emit finalizer + val ctx2 = emitFinalizer(ctx1) + ctx2.bb.closeWith(JUMP(afterCtx.bb)) + outerCtx.endHandler() + } + val bodyCtx = this.newBlock if (finalizer != EmptyTree) bodyCtx.addFinalizer(finalizer, finalizerCtx) diff --git a/src/compiler/scala/tools/nsc/backend/icode/ICodeCheckers.scala b/src/compiler/scala/tools/nsc/backend/icode/ICodeCheckers.scala index 5ccbbf997e..bc42605246 100644 --- a/src/compiler/scala/tools/nsc/backend/icode/ICodeCheckers.scala +++ b/src/compiler/scala/tools/nsc/backend/icode/ICodeCheckers.scala @@ -294,7 +294,7 @@ abstract class ICodeCheckers { else prefix + " with initial stack " + initial.types.mkString("[", ", ", "]") }) - var stack = new TypeStack(initial) + val stack = new TypeStack(initial) def checkStack(len: Int) { if (stack.length < len) ICodeChecker.this.icodeError("Expected at least " + len + " elements on the stack", stack) diff --git a/src/compiler/scala/tools/nsc/backend/icode/analysis/ReachingDefinitions.scala b/src/compiler/scala/tools/nsc/backend/icode/analysis/ReachingDefinitions.scala index 6f9302c97b..6cd349df01 100644 --- a/src/compiler/scala/tools/nsc/backend/icode/analysis/ReachingDefinitions.scala +++ b/src/compiler/scala/tools/nsc/backend/icode/analysis/ReachingDefinitions.scala @@ -155,7 +155,7 @@ abstract class ReachingDefinitions { import lattice.IState def updateReachingDefinition(b: BasicBlock, idx: Int, rd: ListSet[Definition]): ListSet[Definition] = { val STORE_LOCAL(local) = b(idx) - var tmp = local + val tmp = local (rd filter { case (l, _, _) => l != tmp }) + ((tmp, b, idx)) } @@ -197,7 +197,7 @@ abstract class ReachingDefinitions { def findDefs(bb: BasicBlock, idx: Int, m: Int, depth: Int): List[(BasicBlock, Int)] = if (idx > 0) { assert(bb.closed, bb) - var instrs = bb.getArray + val instrs = bb.getArray var res: List[(BasicBlock, Int)] = Nil var i = idx var n = m diff --git a/src/compiler/scala/tools/nsc/backend/icode/analysis/TypeFlowAnalysis.scala b/src/compiler/scala/tools/nsc/backend/icode/analysis/TypeFlowAnalysis.scala index cdf2788284..c4f4c60846 100644 --- a/src/compiler/scala/tools/nsc/backend/icode/analysis/TypeFlowAnalysis.scala +++ b/src/compiler/scala/tools/nsc/backend/icode/analysis/TypeFlowAnalysis.scala @@ -136,7 +136,7 @@ abstract class TypeFlowAnalysis { timer.start // icodes.lubs0 = 0 forwardAnalysis(blockTransfer) - val t = timer.stop + timer.stop if (settings.debug.value) { linearizer.linearize(method).foreach(b => if (b != method.startBlock) assert(visited.contains(b), @@ -326,7 +326,6 @@ abstract class TypeFlowAnalysis { class TransferFunction(consumed: Int, gens: List[Gen]) extends (lattice.Elem => lattice.Elem) { def apply(in: lattice.Elem): lattice.Elem = { val out = lattice.IState(new VarBinding(in.vars), new TypeStack(in.stack)) - val bindings = out.vars val stack = out.stack out.stack.pop(consumed) @@ -389,7 +388,7 @@ abstract class TypeFlowAnalysis { timer.start forwardAnalysis(blockTransfer) - val t = timer.stop + timer.stop /* Now that `forwardAnalysis(blockTransfer)` has finished, all inlining candidates can be found in `remainingCALLs`, whose keys are callsites and whose values are pieces of information about the typestack just before the callsite in question. diff --git a/src/compiler/scala/tools/nsc/backend/jvm/BytecodeWriters.scala b/src/compiler/scala/tools/nsc/backend/jvm/BytecodeWriters.scala index 086327934b..fcd196eff7 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/BytecodeWriters.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/BytecodeWriters.scala @@ -102,7 +102,7 @@ trait BytecodeWriters { super.writeClass(label, jclassName, jclassBytes, sym) val pathName = jclassName - var dumpFile = pathName.split("[./]").foldLeft(baseDir: Path) (_ / _) changeExtension "class" toFile; + val dumpFile = pathName.split("[./]").foldLeft(baseDir: Path) (_ / _) changeExtension "class" toFile; dumpFile.parent.createDirectory() val outstream = new DataOutputStream(new FileOutputStream(dumpFile.path)) diff --git a/src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala b/src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala index a6e4339d82..34f854a072 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala @@ -81,7 +81,6 @@ abstract class GenASM extends SubComponent with BytecodeWriters { // Before erasure so we can identify generic mains. enteringErasure { val companion = sym.linkedClassOfClass - val companionMain = companion.tpe_*.member(nme.main) if (hasJavaMainMethod(companion)) failNoForwarder("companion contains its own main method") @@ -592,7 +591,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters { collectInnerClass(sym) - var hasInternalName = (sym.isClass || (sym.isModule && !sym.isMethod)) + val hasInternalName = (sym.isClass || (sym.isModule && !sym.isMethod)) val cachedJN = javaNameCache.getOrElseUpdate(sym, { if (hasInternalName) { sym.javaBinaryName } else { sym.javaSimpleName } @@ -1172,7 +1171,6 @@ abstract class GenASM extends SubComponent with BytecodeWriters { debuglog("Dumping mirror class for object: " + moduleClass) val linkedClass = moduleClass.companionClass - val linkedModule = linkedClass.companionSymbol lazy val conflictingNames: Set[Name] = { (linkedClass.info.members collect { case sym if sym.name.isTermName => sym.name }).toSet } @@ -2212,7 +2210,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters { def getMerged(): scala.collection.Map[Local, List[Interval]] = { // TODO should but isn't: unbalanced start(s) of scope(s) - val shouldBeEmpty = pending filter { p => val Pair(k, st) = p; st.nonEmpty }; + val shouldBeEmpty = pending filter { p => val Pair(_, st) = p; st.nonEmpty }; val merged = mutable.Map[Local, List[Interval]]() def addToMerged(lv: Local, start: Label, end: Label) { val intv = Interval(start, end) @@ -2275,7 +2273,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters { } // quest for deterministic output that Map.toList doesn't provide (so that ant test.stability doesn't complain). val srtd = fltnd.sortBy { kr => - val Triple(name: String, local: Local, intrvl: Interval) = kr + val Triple(name: String, _, intrvl: Interval) = kr Triple(intrvl.start, intrvl.end - intrvl.start, name) // ie sort by (start, length, name) } @@ -2510,7 +2508,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters { def genFldsInstr() = (instr: @unchecked) match { case lf @ LOAD_FIELD(field, isStatic) => - var owner = javaName(lf.hostClass) + val owner = javaName(lf.hostClass) debuglog("LOAD_FIELD with owner: " + owner + " flags: " + Flags.flagsToString(field.owner.flags)) val fieldJName = javaName(field) val fieldDescr = descriptor(field) @@ -3343,8 +3341,8 @@ abstract class GenASM extends SubComponent with BytecodeWriters { var wasReduced = false val entryPoints: List[BasicBlock] = m.startBlock :: (m.exh map (_.startBlock)); - var elided = mutable.Set.empty[BasicBlock] // debug - var newTargets = mutable.Set.empty[BasicBlock] // debug + val elided = mutable.Set.empty[BasicBlock] // debug + val newTargets = mutable.Set.empty[BasicBlock] // debug for (ep <- entryPoints) { var reachable = directSuccStar(ep) // this list may contain blocks belonging to jump-chains that we'll skip over diff --git a/src/compiler/scala/tools/nsc/backend/jvm/GenJVM.scala b/src/compiler/scala/tools/nsc/backend/jvm/GenJVM.scala index 6797b15cc6..2043a34ef6 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/GenJVM.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/GenJVM.scala @@ -73,7 +73,6 @@ abstract class GenJVM extends SubComponent with GenJVMUtil with GenAndroid with // Before erasure so we can identify generic mains. enteringErasure { val companion = sym.linkedClassOfClass - val companionMain = companion.tpe.member(nme.main) if (hasJavaMainMethod(companion)) failNoForwarder("companion contains its own main method") @@ -514,9 +513,6 @@ abstract class GenJVM extends SubComponent with GenJVMUtil with GenAndroid with * @author Ross Judson (ross.judson@soletta.com) */ def genBeanInfoClass(c: IClass) { - val description = c.symbol getAnnotation BeanDescriptionAttr - // informProgress(description.toString) - val beanInfoClass = fjbgContext.JClass(javaFlags(c.symbol), javaName(c.symbol) + "BeanInfo", "scala/beans/ScalaBeanInfo", @@ -1063,7 +1059,7 @@ abstract class GenJVM extends SubComponent with GenJVMUtil with GenAndroid with var i = 0 var index = 0 - var argTypes = mirrorMethod.getArgumentTypes() + val argTypes = mirrorMethod.getArgumentTypes() while (i < argTypes.length) { mirrorCode.emitLOAD(index, argTypes(i)) index += argTypes(i).getSize() @@ -1095,7 +1091,6 @@ abstract class GenJVM extends SubComponent with GenJVMUtil with GenAndroid with val className = jclass.getName val linkedClass = moduleClass.companionClass - val linkedModule = linkedClass.companionSymbol lazy val conflictingNames: Set[Name] = { linkedClass.info.members collect { case sym if sym.name.isTermName => sym.name } toSet } @@ -1339,7 +1334,7 @@ abstract class GenJVM extends SubComponent with GenJVMUtil with GenAndroid with case LOAD_LOCAL(local) => jcode.emitLOAD(indexOf(local), javaType(local.kind)) case lf @ LOAD_FIELD(field, isStatic) => - var owner = javaName(lf.hostClass) + val owner = javaName(lf.hostClass) debuglog("LOAD_FIELD with owner: " + owner + " flags: " + Flags.flagsToString(field.owner.flags)) val fieldJName = javaName(field) diff --git a/src/compiler/scala/tools/nsc/backend/msil/GenMSIL.scala b/src/compiler/scala/tools/nsc/backend/msil/GenMSIL.scala index 8197e564d1..21b62b0e6f 100644 --- a/src/compiler/scala/tools/nsc/backend/msil/GenMSIL.scala +++ b/src/compiler/scala/tools/nsc/backend/msil/GenMSIL.scala @@ -258,9 +258,9 @@ abstract class GenMSIL extends SubComponent { * and thus shouldn't be added by this method. */ def addAttributes(member: ICustomAttributeSetter, annotations: List[AnnotationInfo]) { - val attributes = annotations.map(_.atp.typeSymbol).collect { - case definitions.TransientAttr => null // TODO this is just an example - } + // val attributes = annotations.map(_.atp.typeSymbol).collect { + // case definitions.TransientAttr => null // TODO this is just an example + // } return // TODO: implement at some point } @@ -823,7 +823,7 @@ abstract class GenMSIL extends SubComponent { def loadFieldOrAddress(field: Symbol, isStatic: Boolean, msg: String, loadAddr : Boolean) { debuglog(msg + " with owner: " + field.owner + " flags: " + Flags.flagsToString(field.owner.flags)) - var fieldInfo = fields.get(field) match { + val fieldInfo = fields.get(field) match { case Some(fInfo) => fInfo case None => val fInfo = getType(field.owner).GetField(msilName(field)) @@ -1254,7 +1254,7 @@ abstract class GenMSIL extends SubComponent { mcode.Emit(OpCodes.Stloc, switchLocal) var i = 0 for (l <- tags) { - var targetLabel = labels(branches(i)) + val targetLabel = labels(branches(i)) for (i <- l) { mcode.Emit(OpCodes.Ldloc, switchLocal) loadI4(i, mcode) @@ -1871,7 +1871,7 @@ abstract class GenMSIL extends SubComponent { val sym = ifield.symbol debuglog("Adding field: " + sym.fullName) - var attributes = msilFieldFlags(sym) + val attributes = msilFieldFlags(sym) val fieldTypeWithCustomMods = new PECustomMod(msilType(sym.tpe), customModifiers(sym.annotations)) @@ -1905,7 +1905,7 @@ abstract class GenMSIL extends SubComponent { val ownerType = getType(sym.enclClass).asInstanceOf[TypeBuilder] assert(mtype == ownerType, "mtype = " + mtype + "; ownerType = " + ownerType) - var paramTypes = msilParamTypes(sym) + val paramTypes = msilParamTypes(sym) val attr = msilMethodFlags(sym) if (m.symbol.isClassConstructor) { @@ -1917,7 +1917,7 @@ abstract class GenMSIL extends SubComponent { mapConstructor(sym, constr) addAttributes(constr, sym.annotations) } else { - var resType = msilType(m.returnType) + val resType = msilType(m.returnType) val method = ownerType.DefineMethod(msilName(sym), attr, resType, paramTypes) for (i <- 0.until(paramTypes.length)) { @@ -2037,7 +2037,6 @@ abstract class GenMSIL extends SubComponent { } private def generateMirrorClass(sym: Symbol) { - val tBuilder = getType(sym) assert(sym.isModuleClass, "Can't generate Mirror-Class for the Non-Module class " + sym) debuglog("Dumping mirror class for object: " + sym) val moduleName = msilName(sym) diff --git a/src/compiler/scala/tools/nsc/backend/opt/ClosureElimination.scala b/src/compiler/scala/tools/nsc/backend/opt/ClosureElimination.scala index bcdcbfd435..1c57120762 100644 --- a/src/compiler/scala/tools/nsc/backend/opt/ClosureElimination.scala +++ b/src/compiler/scala/tools/nsc/backend/opt/ClosureElimination.scala @@ -120,7 +120,7 @@ abstract class ClosureElimination extends SubComponent { case LOAD_FIELD(f, false) /* if accessible(f, m.symbol) */ => def replaceFieldAccess(r: Record) { - val Record(cls, bindings) = r + val Record(cls, _) = r info.getFieldNonRecordValue(r, f) foreach { v => bb.replaceInstruction(i, DROP(REFERENCE(cls)) :: valueToInstruction(v) :: Nil) debuglog(s"replaced $i with $v") diff --git a/src/compiler/scala/tools/nsc/backend/opt/Inliners.scala b/src/compiler/scala/tools/nsc/backend/opt/Inliners.scala index ab5184dcbd..595a40fdd3 100644 --- a/src/compiler/scala/tools/nsc/backend/opt/Inliners.scala +++ b/src/compiler/scala/tools/nsc/backend/opt/Inliners.scala @@ -322,8 +322,8 @@ abstract class Inliners extends SubComponent { if (settings.debug.value) inlineLog("caller", ownedName(m.symbol), "in " + m.symbol.owner.fullName) - var sizeBeforeInlining = m.code.blockCount - var instrBeforeInlining = m.code.instructionCount + val sizeBeforeInlining = m.code.blockCount + val instrBeforeInlining = m.code.instructionCount var retry = false var count = 0 @@ -479,7 +479,7 @@ abstract class Inliners extends SubComponent { * As a whole, both `preInline()` invocations amount to priming the inlining process, * so that the first TFA that is run afterwards is able to gain more information as compared to a cold-start. */ - val totalPreInlines = { + /*val totalPreInlines = */ { // Val name commented out to emphasize it is never used val firstRound = preInline(true) if(firstRound == 0) 0 else (firstRound + preInline(false)) } @@ -571,7 +571,6 @@ abstract class Inliners extends SubComponent { m.normalize if (sizeBeforeInlining > 0) { val instrAfterInlining = m.code.instructionCount - val prefix = if ((instrAfterInlining > 2 * instrBeforeInlining) && (instrAfterInlining > 200)) "!!" else "" val inlinings = caller.inlinedCalls if (inlinings > 0) { val s1 = s"instructions $instrBeforeInlining -> $instrAfterInlining" diff --git a/src/compiler/scala/tools/nsc/dependencies/Changes.scala b/src/compiler/scala/tools/nsc/dependencies/Changes.scala index c8ff700208..b3cacee20a 100644 --- a/src/compiler/scala/tools/nsc/dependencies/Changes.scala +++ b/src/compiler/scala/tools/nsc/dependencies/Changes.scala @@ -165,7 +165,6 @@ abstract class Changes { /** Return the list of changes between 'from' and 'toSym.info'. */ def changeSet(from: Type, toSym: Symbol): List[Change] = { - implicit val defaultReason = "types" implicit val defaultStrictTypeRefTest = true val to = toSym.info diff --git a/src/compiler/scala/tools/nsc/doc/html/page/Template.scala b/src/compiler/scala/tools/nsc/doc/html/page/Template.scala index 919a45aefc..20c143cd17 100644 --- a/src/compiler/scala/tools/nsc/doc/html/page/Template.scala +++ b/src/compiler/scala/tools/nsc/doc/html/page/Template.scala @@ -527,7 +527,7 @@ class Template(universe: doc.Universe, generator: DiagramGenerator, tpl: DocTemp val sourceLink: Seq[scala.xml.Node] = mbr match { case dtpl: DocTemplateEntity if (isSelf && dtpl.sourceUrl.isDefined && dtpl.inSource.isDefined && !isReduced) => - val (absFile, line) = dtpl.inSource.get + val (absFile, _) = dtpl.inSource.get
Source
{ { Text(absFile.file.getName) } }
case _ => NodeSeq.Empty @@ -651,7 +651,6 @@ class Template(universe: doc.Universe, generator: DiagramGenerator, tpl: DocTemp case dtpl: DocTemplateEntity if isSelf && !isReduced => val diagram = f(dtpl) if (diagram.isDefined) { - val s = universe.settings val diagramSvg = generator.generate(diagram.get, tpl, this) if (diagramSvg != NodeSeq.Empty) {
diff --git a/src/compiler/scala/tools/nsc/doc/html/page/diagram/DotDiagramGenerator.scala b/src/compiler/scala/tools/nsc/doc/html/page/diagram/DotDiagramGenerator.scala index 8c1e9b0fe0..f4608bdb8e 100644 --- a/src/compiler/scala/tools/nsc/doc/html/page/diagram/DotDiagramGenerator.scala +++ b/src/compiler/scala/tools/nsc/doc/html/page/diagram/DotDiagramGenerator.scala @@ -211,7 +211,7 @@ class DotDiagramGenerator(settings: doc.Settings) extends DiagramGenerator { def escape(name: String) = name.replace("&", "&").replace("<", "<").replace(">", ">"); // assemble node attribues in a map - var attr = scala.collection.mutable.Map[String, String]() + val attr = scala.collection.mutable.Map[String, String]() // link node.doctpl match { diff --git a/src/compiler/scala/tools/nsc/doc/html/page/diagram/DotRunner.scala b/src/compiler/scala/tools/nsc/doc/html/page/diagram/DotRunner.scala index 5cdd5c74a4..be7c27a4ae 100644 --- a/src/compiler/scala/tools/nsc/doc/html/page/diagram/DotRunner.scala +++ b/src/compiler/scala/tools/nsc/doc/html/page/diagram/DotRunner.scala @@ -183,7 +183,7 @@ class DotProcess(settings: doc.Settings) { private[this] def outputFn(stdOut: InputStream): Unit = { val reader = new BufferedReader(new InputStreamReader(stdOut)) - var buffer: StringBuilder = new StringBuilder() + val buffer: StringBuilder = new StringBuilder() try { var line = reader.readLine while (!error && line != null) { @@ -209,7 +209,6 @@ class DotProcess(settings: doc.Settings) { private[this] def errorFn(stdErr: InputStream): Unit = { val reader = new BufferedReader(new InputStreamReader(stdErr)) - var buffer: StringBuilder = new StringBuilder() try { var line = reader.readLine while (line != null) { @@ -225,4 +224,4 @@ class DotProcess(settings: doc.Settings) { errorBuffer.append(" Error thread in " + templateName + ": Exception: " + exc + "\n") } } -} \ No newline at end of file +} diff --git a/src/compiler/scala/tools/nsc/doc/model/MemberLookup.scala b/src/compiler/scala/tools/nsc/doc/model/MemberLookup.scala index 5257db1610..2a28d4c589 100644 --- a/src/compiler/scala/tools/nsc/doc/model/MemberLookup.scala +++ b/src/compiler/scala/tools/nsc/doc/model/MemberLookup.scala @@ -19,7 +19,7 @@ trait MemberLookup { def memberLookup(pos: Position, query: String, inTplOpt: Option[DocTemplateImpl]): LinkTo = { assert(modelFinished) - var members = breakMembers(query) + val members = breakMembers(query) //println(query + " => " + members) // (1) First look in the root package, as most of the links are qualified diff --git a/src/compiler/scala/tools/nsc/doc/model/ModelFactory.scala b/src/compiler/scala/tools/nsc/doc/model/ModelFactory.scala index 2ca80c9282..010bb98549 100644 --- a/src/compiler/scala/tools/nsc/doc/model/ModelFactory.scala +++ b/src/compiler/scala/tools/nsc/doc/model/ModelFactory.scala @@ -853,7 +853,6 @@ class ModelFactory(val global: Global, val settings: doc.Settings) { } def findMember(aSym: Symbol, inTpl: DocTemplateImpl): Option[MemberImpl] = { - val tplSym = normalizeTemplate(aSym.owner) inTpl.members.find(_.sym == aSym) } @@ -1007,7 +1006,7 @@ class ModelFactory(val global: Global, val settings: doc.Settings) { def makeQualifiedName(sym: Symbol, relativeTo: Option[Symbol] = None): String = { val stop = relativeTo map (_.ownerChain.toSet) getOrElse Set[Symbol]() var sym1 = sym - var path = new StringBuilder() + val path = new StringBuilder() // var path = List[Symbol]() while ((sym1 != NoSymbol) && (path.isEmpty || !stop(sym1))) { @@ -1076,7 +1075,7 @@ class ModelFactory(val global: Global, val settings: doc.Settings) { def findExternalLink(sym: Symbol, name: String): Option[LinkTo] = { val sym1 = if (sym == AnyClass || sym == AnyRefClass || sym == AnyValClass || sym == NothingClass) ListClass - else if (sym.isPackage) + else if (sym.isPackage) /* Get package object which has associatedFile ne null */ sym.info.member(newTermName("package")) else sym diff --git a/src/compiler/scala/tools/nsc/doc/model/ModelFactoryImplicitSupport.scala b/src/compiler/scala/tools/nsc/doc/model/ModelFactoryImplicitSupport.scala index af89978be1..a76f90febb 100644 --- a/src/compiler/scala/tools/nsc/doc/model/ModelFactoryImplicitSupport.scala +++ b/src/compiler/scala/tools/nsc/doc/model/ModelFactoryImplicitSupport.scala @@ -96,7 +96,7 @@ trait ModelFactoryImplicitSupport { // But we don't want that, so we'll simply refuse to find implicit conversions on for Nothing and Null if (!(sym.isClass || sym.isTrait || sym == AnyRefClass) || sym == NothingClass || sym == NullClass) Nil else { - var context: global.analyzer.Context = global.analyzer.rootContext(NoCompilationUnit) + val context: global.analyzer.Context = global.analyzer.rootContext(NoCompilationUnit) val results = global.analyzer.allViewsFrom(sym.tpe_*, context, sym.typeParams) var conversions = results.flatMap(result => makeImplicitConversion(sym, result._1, result._2, context, inTpl)) @@ -387,7 +387,6 @@ trait ModelFactoryImplicitSupport { lazy val memberImpls: List[MemberImpl] = { // Obtain the members inherited by the implicit conversion val memberSyms = toType.members.filter(implicitShouldDocument(_)).toList - val existingSyms = sym.info.members // Debugging part :) debug(sym.nameString + "\n" + "=" * sym.nameString.length()) diff --git a/src/compiler/scala/tools/nsc/doc/model/ModelFactoryTypeSupport.scala b/src/compiler/scala/tools/nsc/doc/model/ModelFactoryTypeSupport.scala index cd86dcb606..8ba1560926 100644 --- a/src/compiler/scala/tools/nsc/doc/model/ModelFactoryTypeSupport.scala +++ b/src/compiler/scala/tools/nsc/doc/model/ModelFactoryTypeSupport.scala @@ -229,7 +229,6 @@ trait ModelFactoryTypeSupport { def appendClauses = { nameBuffer append " forSome {" var first = true - val qset = quantified.toSet for (sym <- quantified) { if (!first) { nameBuffer append ", " } else first = false if (sym.isSingletonExistential) { diff --git a/src/compiler/scala/tools/nsc/doc/model/TreeFactory.scala b/src/compiler/scala/tools/nsc/doc/model/TreeFactory.scala index bd7534ded4..b972649194 100755 --- a/src/compiler/scala/tools/nsc/doc/model/TreeFactory.scala +++ b/src/compiler/scala/tools/nsc/doc/model/TreeFactory.scala @@ -21,7 +21,7 @@ trait TreeFactory { thisTreeFactory: ModelFactory with TreeFactory => def makeTree(rhs: Tree): Option[TreeEntity] = { - var expr = new StringBuilder + val expr = new StringBuilder var refs = new immutable.TreeMap[Int, (Entity, Int)] // start, (Entity to be linked to , end) rhs.pos match { @@ -39,7 +39,7 @@ trait TreeFactory { thisTreeFactory: ModelFactory with TreeFactory => * stores it in tree.refs with its position */ def makeLink(rhs: Tree){ - var start = pos.startOrPoint - firstIndex + val start = pos.startOrPoint - firstIndex val end = pos.endOrPoint - firstIndex if(start != end) { var asym = rhs.symbol diff --git a/src/compiler/scala/tools/nsc/doc/model/comment/CommentFactory.scala b/src/compiler/scala/tools/nsc/doc/model/comment/CommentFactory.scala index 822c11307c..20e2979615 100644 --- a/src/compiler/scala/tools/nsc/doc/model/comment/CommentFactory.scala +++ b/src/compiler/scala/tools/nsc/doc/model/comment/CommentFactory.scala @@ -759,8 +759,7 @@ trait CommentFactory { thisFactory: ModelFactory with CommentFactory with Member def link(): Inline = { val SchemeUri = """([a-z]+:.*)""".r jump("[[") - var parens = 2 + repeatJump('[') - val start = "[" * parens + val parens = 2 + repeatJump('[') val stop = "]" * parens //println("link with " + parens + " matching parens") val target = readUntil { check(stop) || check(" ") } @@ -805,7 +804,7 @@ trait CommentFactory { thisFactory: ModelFactory with CommentFactory with Member */ def normalizeIndentation(_code: String): String = { - var code = _code.trim + val code = _code.trim var maxSkip = Integer.MAX_VALUE var crtSkip = 0 var wsArea = true diff --git a/src/compiler/scala/tools/nsc/doc/model/diagram/DiagramFactory.scala b/src/compiler/scala/tools/nsc/doc/model/diagram/DiagramFactory.scala index db2d0c0175..78bff9d349 100644 --- a/src/compiler/scala/tools/nsc/doc/model/diagram/DiagramFactory.scala +++ b/src/compiler/scala/tools/nsc/doc/model/diagram/DiagramFactory.scala @@ -48,7 +48,7 @@ trait DiagramFactory extends DiagramDirectiveParser { val thisNode = ThisNode(tpl.resultType, Some(tpl))(Some(tpl.qualifiedName + " (this " + tpl.kind + ")")) // superclasses - var superclasses: List[Node] = + val superclasses: List[Node] = tpl.parentTypes.collect { case p: (TemplateEntity, TypeEntity) if !classExcluded(p._1) => NormalNode(p._2, Some(p._1))() }.reverse diff --git a/src/compiler/scala/tools/nsc/interactive/Global.scala b/src/compiler/scala/tools/nsc/interactive/Global.scala index 2e2c772a38..dc66bb7fd7 100644 --- a/src/compiler/scala/tools/nsc/interactive/Global.scala +++ b/src/compiler/scala/tools/nsc/interactive/Global.scala @@ -355,7 +355,7 @@ class Global(settings: Settings, _reporter: Reporter, projectName: String = "") } // don't forget to service interrupt requests - val iqs = scheduler.dequeueAllInterrupts(_.execute()) + scheduler.dequeueAllInterrupts(_.execute()) debugLog("ShutdownReq: cleaning work queue (%d items)".format(units.size)) debugLog("Cleanup up responses (%d loadedType pending, %d parsedEntered pending)" diff --git a/src/compiler/scala/tools/nsc/interactive/tests/core/CoreTestDefs.scala b/src/compiler/scala/tools/nsc/interactive/tests/core/CoreTestDefs.scala index c8e6b6ccce..704d014eb9 100644 --- a/src/compiler/scala/tools/nsc/interactive/tests/core/CoreTestDefs.scala +++ b/src/compiler/scala/tools/nsc/interactive/tests/core/CoreTestDefs.scala @@ -77,7 +77,8 @@ private[tests] trait CoreTestDefs // askHyperlinkPos for `Int` at (73,19) pi.scala --> class Int in package scala has null sourceFile! val treePath = if (tree.symbol.sourceFile ne null) tree.symbol.sourceFile.path else null val treeName = if (tree.symbol.sourceFile ne null) tree.symbol.sourceFile.name else null - val sourceFile = sourceFiles.find(_.path == treePath) match { + + sourceFiles.find(_.path == treePath) match { case Some(source) => compiler.askLinkPos(tree.symbol, source, r) r.get match { @@ -97,4 +98,4 @@ private[tests] trait CoreTestDefs } } } -} \ No newline at end of file +} diff --git a/src/compiler/scala/tools/nsc/interpreter/ILoop.scala b/src/compiler/scala/tools/nsc/interpreter/ILoop.scala index 18d0567ff3..d5b5d43baf 100644 --- a/src/compiler/scala/tools/nsc/interpreter/ILoop.scala +++ b/src/compiler/scala/tools/nsc/interpreter/ILoop.scala @@ -279,7 +279,6 @@ class ILoop(in0: Option[BufferedReader], protected val out: JPrintWriter) private def importsCommand(line: String): Result = { val tokens = words(line) val handlers = intp.languageWildcardHandlers ++ intp.importHandlers - val isVerbose = tokens contains "-v" handlers.filterNot(_.importedSymbols.isEmpty).zipWithIndex foreach { case (handler, idx) => diff --git a/src/compiler/scala/tools/nsc/interpreter/IMain.scala b/src/compiler/scala/tools/nsc/interpreter/IMain.scala index a44f862dd7..7e2dbef9ec 100644 --- a/src/compiler/scala/tools/nsc/interpreter/IMain.scala +++ b/src/compiler/scala/tools/nsc/interpreter/IMain.scala @@ -69,9 +69,8 @@ class IMain(initialSettings: Settings, protected val out: JPrintWriter) extends @deprecated("Use replOutput.dir instead", "2.11.0") def virtualDirectory = replOutput.dir - def showDirectory = replOutput.show(out) + def showDirectory() = replOutput.show(out) - private var currentSettings: Settings = initialSettings private[nsc] var printResults = true // whether to print result lines private[nsc] var totalSilence = false // whether to print anything private var _initializeComplete = false // compiler is initialized @@ -98,7 +97,7 @@ class IMain(initialSettings: Settings, protected val out: JPrintWriter) extends if (isInitializeComplete) global.classPath.asURLs else new PathResolver(settings).result.asURLs // the compiler's classpath ) - def settings = currentSettings + def settings = initialSettings def mostRecentLine = prevRequestList match { case Nil => "" case req :: _ => req.originalLine @@ -592,7 +591,7 @@ class IMain(initialSettings: Settings, protected val out: JPrintWriter) extends */ def bind(name: String, boundType: String, value: Any, modifiers: List[String] = Nil): IR.Result = { val bindRep = new ReadEvalPrint() - val run = bindRep.compile(""" + bindRep.compile(""" |object %s { | var value: %s = _ | def set(x: Any) = value = x.asInstanceOf[%s] @@ -622,7 +621,6 @@ class IMain(initialSettings: Settings, protected val out: JPrintWriter) extends def rebind(p: NamedParam): IR.Result = { val name = p.name - val oldType = typeOfTerm(name) orElse { return IR.Error } val newType = p.tpe val tempName = freshInternalVarName() diff --git a/src/compiler/scala/tools/nsc/javac/JavaParsers.scala b/src/compiler/scala/tools/nsc/javac/JavaParsers.scala index a30ae1cb36..8d70ac7c4a 100644 --- a/src/compiler/scala/tools/nsc/javac/JavaParsers.scala +++ b/src/compiler/scala/tools/nsc/javac/JavaParsers.scala @@ -348,8 +348,7 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners { /** Annotation ::= TypeName [`(` AnnotationArgument {`,` AnnotationArgument} `)`] */ def annotation() { - val pos = in.currentPos - var t = qualId() + qualId() if (in.token == LPAREN) { skipAhead(); accept(RPAREN) } else if (in.token == LBRACE) { skipAhead(); accept(RBRACE) } } diff --git a/src/compiler/scala/tools/nsc/symtab/SymbolLoaders.scala b/src/compiler/scala/tools/nsc/symtab/SymbolLoaders.scala index 369b6aa77d..a5acf5734c 100644 --- a/src/compiler/scala/tools/nsc/symtab/SymbolLoaders.scala +++ b/src/compiler/scala/tools/nsc/symtab/SymbolLoaders.scala @@ -226,7 +226,6 @@ abstract class SymbolLoaders { assert(root.isPackageClass, root) root.setInfo(new PackageClassInfoType(newScope, root)) - val sourcepaths = classpath.sourcepaths if (!root.isRoot) { for (classRep <- classpath.classes if platform.doLoad(classRep)) { initializeFromClassPath(root, classRep) diff --git a/src/compiler/scala/tools/nsc/symtab/SymbolTrackers.scala b/src/compiler/scala/tools/nsc/symtab/SymbolTrackers.scala index 249f6151ef..d2d97ceacf 100644 --- a/src/compiler/scala/tools/nsc/symtab/SymbolTrackers.scala +++ b/src/compiler/scala/tools/nsc/symtab/SymbolTrackers.scala @@ -110,7 +110,6 @@ trait SymbolTrackers { case Some(oldFlags) => val added = masked & ~oldFlags val removed = oldFlags & ~masked - val steady = masked & ~(added | removed) val all = masked | oldFlags val strs = 0 to 63 map { bit => val flag = 1L << bit @@ -177,7 +176,7 @@ trait SymbolTrackers { } def show(label: String): String = { val hierarchy = Node(current) - val Change(added, removed, symMap, owners, flags) = history.head + val Change(_, removed, symMap, _, _) = history.head def detailString(sym: Symbol) = { val ownerString = sym.ownerChain splitAt 3 match { case (front, back) => diff --git a/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala b/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala index 42589874fe..5922d67a94 100644 --- a/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala +++ b/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala @@ -497,8 +497,8 @@ abstract class ClassfileParser { def parseClass() { val jflags = in.nextChar val isAnnotation = hasAnnotation(jflags) - var sflags = toScalaClassFlags(jflags) - var nameIdx = in.nextChar + val sflags = toScalaClassFlags(jflags) + val nameIdx = in.nextChar currentClass = pool.getClassName(nameIdx) /** Parse parents for Java classes. For Scala, return AnyRef, since the real type will be unpickled. @@ -596,7 +596,7 @@ abstract class ClassfileParser { def parseField() { val jflags = in.nextChar - var sflags = toScalaFieldFlags(jflags) + val sflags = toScalaFieldFlags(jflags) if ((sflags & PRIVATE) != 0L && !global.settings.optimise.value) { in.skip(4); skipAttributes() } else { @@ -626,7 +626,7 @@ abstract class ClassfileParser { def parseMethod() { val jflags = in.nextChar.toInt - var sflags = toScalaMethodFlags(jflags) + val sflags = toScalaMethodFlags(jflags) if (isPrivate(jflags) && !global.settings.optimise.value) { val name = pool.getName(in.nextChar) if (name == nme.CONSTRUCTOR) @@ -1078,7 +1078,7 @@ abstract class ClassfileParser { def enterClassAndModule(entry: InnerClassEntry, file: AbstractFile, jflags: Int) { val completer = new global.loaders.ClassfileLoader(file) val name = entry.originalName - var sflags = toScalaClassFlags(jflags) + val sflags = toScalaClassFlags(jflags) val owner = getOwner(jflags) val scope = getScope(jflags) val innerClass = owner.newClass(name.toTypeName, NoPosition, sflags) setInfo completer diff --git a/src/compiler/scala/tools/nsc/symtab/classfile/ICodeReader.scala b/src/compiler/scala/tools/nsc/symtab/classfile/ICodeReader.scala index b286f52280..5af6786002 100644 --- a/src/compiler/scala/tools/nsc/symtab/classfile/ICodeReader.scala +++ b/src/compiler/scala/tools/nsc/symtab/classfile/ICodeReader.scala @@ -33,7 +33,6 @@ abstract class ICodeReader extends ClassfileParser { * for non-static members. */ def readClass(cls: Symbol): (IClass, IClass) = { - var classFile: io.AbstractFile = null; cls.info // ensure accurate type information isScalaModule = cls.isModule && !cls.isJavaDefined @@ -58,11 +57,9 @@ abstract class ICodeReader extends ClassfileParser { override def parseClass() { this.instanceCode = new IClass(clazz) this.staticCode = new IClass(staticModule) - val jflags = in.nextChar - val isAttribute = (jflags & JAVA_ACC_ANNOTATION) != 0 - val sflags = toScalaClassFlags(jflags) // what, this is never used?? - val c = pool getClassSymbol in.nextChar + in.nextChar + pool getClassSymbol in.nextChar parseInnerClasses() in.skip(2) // super class @@ -125,7 +122,7 @@ abstract class ICodeReader extends ClassfileParser { override def parseMethod() { val (jflags, sym) = parseMember(false) - var beginning = in.bp + val beginning = in.bp try { if (sym != NoSymbol) { this.method = new IMethod(sym) @@ -669,7 +666,6 @@ abstract class ICodeReader extends ClassfileParser { val blocks = makeBasicBlocks var otherBlock: BasicBlock = NoBasicBlock - var disableJmpTarget = false for ((pc, instr) <- instrs.iterator) { // Console.println("> " + pc + ": " + instr); @@ -724,7 +720,6 @@ abstract class ICodeReader extends ClassfileParser { /** Abstract interpretation for one instruction. */ override def mutatingInterpret(out: typeFlowLattice.Elem, i: Instruction): typeFlowLattice.Elem = { - val bindings = out.vars val stack = out.stack import stack.push i match { diff --git a/src/compiler/scala/tools/nsc/symtab/classfile/Pickler.scala b/src/compiler/scala/tools/nsc/symtab/classfile/Pickler.scala index 7c82895677..de12428c7c 100644 --- a/src/compiler/scala/tools/nsc/symtab/classfile/Pickler.scala +++ b/src/compiler/scala/tools/nsc/symtab/classfile/Pickler.scala @@ -425,7 +425,7 @@ abstract class Pickler extends SubComponent { * argument of some Annotation */ private def putMods(mods: Modifiers) = if (putEntry(mods)) { // annotations in Modifiers are removed by the typechecker - val Modifiers(flags, privateWithin, Nil) = mods + val Modifiers(_, privateWithin, Nil) = mods putEntry(privateWithin) } @@ -998,7 +998,6 @@ abstract class Pickler extends SubComponent { } def printRefs(refs: List[AnyRef]) { refs foreach printRef } def printSymInfo(sym: Symbol) { - var posOffset = 0 printRef(sym.name) printRef(localizedOwner(sym)) print(flagsToString(sym.flags & PickledFlags)+" ") diff --git a/src/compiler/scala/tools/nsc/symtab/clr/TypeParser.scala b/src/compiler/scala/tools/nsc/symtab/clr/TypeParser.scala index 1d2ffd2a73..99dec8e3f7 100644 --- a/src/compiler/scala/tools/nsc/symtab/clr/TypeParser.scala +++ b/src/compiler/scala/tools/nsc/symtab/clr/TypeParser.scala @@ -520,12 +520,12 @@ abstract class TypeParser { val delegateParamTypes: List[Type] = List(typClrType); // not ImplicitMethodType, this is for methods with implicit parameters (not implicit methods) val forwardViewMethodType = (msym: Symbol) => JavaMethodType(msym.newSyntheticValueParams(delegateParamTypes), funType) - val fmsym = createMethod(nme.view_, flags, forwardViewMethodType, null, true); + createMethod(nme.view_, flags, forwardViewMethodType, null, true); // create the backward view: function => delegate val functionParamTypes: List[Type] = List(funType); val backwardViewMethodType = (msym: Symbol) => JavaMethodType(msym.newSyntheticValueParams(functionParamTypes), typClrType) - val bmsym = createMethod(nme.view_, flags, backwardViewMethodType, null, true); + createMethod(nme.view_, flags, backwardViewMethodType, null, true); } private def createDelegateChainers(typ: MSILType) = { diff --git a/src/compiler/scala/tools/nsc/transform/CleanUp.scala b/src/compiler/scala/tools/nsc/transform/CleanUp.scala index 1f353bb31c..2e504af47f 100644 --- a/src/compiler/scala/tools/nsc/transform/CleanUp.scala +++ b/src/compiler/scala/tools/nsc/transform/CleanUp.scala @@ -45,7 +45,7 @@ abstract class CleanUp extends Transform with ast.TreeDSL { result } private def transformTemplate(tree: Tree) = { - val Template(parents, self, body) = tree + val Template(_, _, body) = tree clearStatics() val newBody = transformTrees(body) val templ = deriveTemplate(tree)(_ => transformTrees(newStaticMembers.toList) ::: newBody) diff --git a/src/compiler/scala/tools/nsc/transform/Constructors.scala b/src/compiler/scala/tools/nsc/transform/Constructors.scala index 1db3db9376..b8c14c2733 100644 --- a/src/compiler/scala/tools/nsc/transform/Constructors.scala +++ b/src/compiler/scala/tools/nsc/transform/Constructors.scala @@ -422,7 +422,7 @@ abstract class Constructors extends Transform with ast.TreeDSL { def ensureAccessor(sym: Symbol)(acc: => Symbol) = if (sym.owner == clazz && !sym.isMethod && sym.isPrivate) { // there's an access to a naked field of the enclosing class - var getr = acc + val getr = acc getr makeNotPrivate clazz getr } else { @@ -529,7 +529,8 @@ abstract class Constructors extends Transform with ast.TreeDSL { (pre ::: supercalls, rest) } - var (uptoSuperStats, remainingConstrStats) = splitAtSuper(constrStatBuf.toList) + val (uptoSuperStats, remainingConstrStats0) = splitAtSuper(constrStatBuf.toList) + var remainingConstrStats = remainingConstrStats0 /** XXX This is not corect: remainingConstrStats.nonEmpty excludes too much, * but excluding it includes too much. The constructor sequence being mimicked diff --git a/src/compiler/scala/tools/nsc/transform/Erasure.scala b/src/compiler/scala/tools/nsc/transform/Erasure.scala index 7d7e53b946..7c77d7e27e 100644 --- a/src/compiler/scala/tools/nsc/transform/Erasure.scala +++ b/src/compiler/scala/tools/nsc/transform/Erasure.scala @@ -724,15 +724,7 @@ abstract class Erasure extends AddInterfaces case Apply(TypeApply(sel @ Select(qual, name), List(targ)), List()) if tree.symbol == Any_asInstanceOf => val qual1 = typedQualifier(qual, NOmode, ObjectClass.tpe) // need to have an expected type, see #3037 - val qualClass = qual1.tpe.typeSymbol -/* - val targClass = targ.tpe.typeSymbol - if (isNumericValueClass(qualClass) && isNumericValueClass(targClass)) - // convert numeric type casts - atPos(tree.pos)(Apply(Select(qual1, "to" + targClass.name), List())) - else -*/ if (isPrimitiveValueType(targ.tpe) || isErasedValueType(targ.tpe)) { val noNullCheckNeeded = targ.tpe match { case ErasedValueType(tref) => diff --git a/src/compiler/scala/tools/nsc/transform/ExplicitOuter.scala b/src/compiler/scala/tools/nsc/transform/ExplicitOuter.scala index 4a0d25fd09..cfd1063f40 100644 --- a/src/compiler/scala/tools/nsc/transform/ExplicitOuter.scala +++ b/src/compiler/scala/tools/nsc/transform/ExplicitOuter.scala @@ -335,7 +335,7 @@ abstract class ExplicitOuter extends InfoTransform */ def outerAccessorDef: Tree = { val outerAcc = outerAccessor(currentClass) - var rhs: Tree = + val rhs: Tree = if (outerAcc.isDeferred) EmptyTree else This(currentClass) DOT outerField(currentClass) diff --git a/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala b/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala index bd1649dec5..2a25cc37a0 100644 --- a/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala +++ b/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala @@ -157,7 +157,6 @@ trait ContextErrors { case RefinedType(parents, decls) if !decls.isEmpty && found.typeSymbol.isAnonOrRefinementClass => val retyped = typed (tree.duplicate setType null) val foundDecls = retyped.tpe.decls filter (sym => !sym.isConstructor && !sym.isSynthetic) - if (foundDecls.isEmpty || (found.typeSymbol eq NoSymbol)) found else { // The members arrive marked private, presumably because there was no @@ -171,11 +170,11 @@ trait ContextErrors { case _ => found } - assert(!found.isErroneous && !req.isErroneous, (found, req)) + assert(!foundType.isErroneous && !req.isErroneous, (foundType, req)) - issueNormalTypeError(tree, withAddendum(tree.pos)(typeErrorMsg(found, req, infer.isPossiblyMissingArgs(found, req))) ) + issueNormalTypeError(tree, withAddendum(tree.pos)(typeErrorMsg(foundType, req, infer.isPossiblyMissingArgs(foundType, req))) ) if (settings.explaintypes.value) - explainTypes(found, req) + explainTypes(foundType, req) } def WithFilterError(tree: Tree, ex: AbsTypeError) = { @@ -673,7 +672,6 @@ trait ContextErrors { private def macroExpansionError(expandee: Tree, msg: String, pos: Position = NoPosition) = { def msgForLog = if (msg != null && (msg contains "exception during macro expansion")) msg.split(EOL).drop(1).headOption.getOrElse("?") else msg macroLogLite("macro expansion has failed: %s".format(msgForLog)) - val errorPos = if (pos != NoPosition) pos else (if (expandee.pos != NoPosition) expandee.pos else enclosingMacroPosition) if (msg != null) context.error(pos, msg) // issueTypeError(PosAndMsgTypeError(..)) won't work => swallows positions setError(expandee) throw MacroExpansionException diff --git a/src/compiler/scala/tools/nsc/typechecker/Implicits.scala b/src/compiler/scala/tools/nsc/typechecker/Implicits.scala index 5a9a4caea1..73efceb242 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Implicits.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Implicits.scala @@ -664,10 +664,6 @@ trait Implicits { // duplicating the code here, but this is probably a // hotspot (and you can't just call typed, need to force // re-typecheck) - // TODO: the return tree is ignored. This seems to make - // no difference, but it's bad practice regardless. - - val checked = itree2 match { case TypeApply(fun, args) => typedTypeApply(itree2, EXPRmode, fun, args) case Apply(TypeApply(fun, args), _) => typedTypeApply(itree2, EXPRmode, fun, args) // t2421c @@ -677,7 +673,7 @@ trait Implicits { if (context.hasErrors) fail("typing TypeApply reported errors for the implicit tree: " + context.errBuffer.head.errMsg) else { - val result = new SearchResult(itree2, subst) + val result = new SearchResult(checked, subst) if (Statistics.canEnable) Statistics.incCounter(foundImplicits) printInference("[success] found %s for pt %s".format(result, ptInstantiated)) result @@ -1205,7 +1201,7 @@ trait Implicits { } ) // todo. migrate hardcoded materialization in Implicits to corresponding implicit macros - var materializer = atPos(pos.focus)(gen.mkMethodCall(TagMaterializers(tagClass), List(tp), if (prefix != EmptyTree) List(prefix) else List())) + val materializer = atPos(pos.focus)(gen.mkMethodCall(TagMaterializers(tagClass), List(tp), if (prefix != EmptyTree) List(prefix) else List())) if (settings.XlogImplicits.value) println("materializing requested %s.%s[%s] using %s".format(pre, tagClass.name, tp, materializer)) if (context.macrosEnabled) success(materializer) // don't call `failure` here. if macros are disabled, we just fail silently diff --git a/src/compiler/scala/tools/nsc/typechecker/Infer.scala b/src/compiler/scala/tools/nsc/typechecker/Infer.scala index 61e4fb86a2..b96daa49e2 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Infer.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Infer.scala @@ -312,7 +312,6 @@ trait Infer extends Checkable { if (sym.isError) { tree setSymbol sym setType ErrorType } else { - val topClass = context.owner.enclosingTopLevelClass if (context.unit.exists) context.unit.depends += sym.enclosingTopLevelClass diff --git a/src/compiler/scala/tools/nsc/typechecker/MethodSynthesis.scala b/src/compiler/scala/tools/nsc/typechecker/MethodSynthesis.scala index c95951e608..049348b0b8 100644 --- a/src/compiler/scala/tools/nsc/typechecker/MethodSynthesis.scala +++ b/src/compiler/scala/tools/nsc/typechecker/MethodSynthesis.scala @@ -556,7 +556,7 @@ trait MethodSynthesis { // No Symbols available. private def beanAccessorsFromNames(tree: ValDef) = { - val ValDef(mods, name, tpt, _) = tree + val ValDef(mods, _, _, _) = tree val hasBP = mods hasAnnotationNamed tpnme.BeanPropertyAnnot val hasBoolBP = mods hasAnnotationNamed tpnme.BooleanBeanPropertyAnnot diff --git a/src/compiler/scala/tools/nsc/typechecker/Namers.scala b/src/compiler/scala/tools/nsc/typechecker/Namers.scala index 99b927af66..5e537e3bb3 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Namers.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Namers.scala @@ -337,7 +337,6 @@ trait Namers extends MethodSynthesis { } private def enterClassSymbol(tree: ClassDef, clazz: ClassSymbol): Symbol = { - val file = contextFile if (clazz.sourceFile != null && clazz.sourceFile != contextFile) debugwarn("!!! Source mismatch in " + clazz + ": " + clazz.sourceFile + " vs. " + contextFile) @@ -643,7 +642,7 @@ trait Namers extends MethodSynthesis { } def enterClassDef(tree: ClassDef) { - val ClassDef(mods, name, tparams, impl) = tree + val ClassDef(mods, _, _, impl) = tree val primaryConstructorArity = treeInfo.firstConstructorArgs(impl.body).size tree.symbol = enterClassSymbol(tree) tree.symbol setInfo completerOf(tree) @@ -1200,9 +1199,9 @@ trait Namers extends MethodSynthesis { // same local block several times (which can happen in interactive mode) we might // otherwise not find the default symbol, because the second time it the method // symbol will be re-entered in the scope but the default parameter will not. - val att = meth.attachments.get[DefaultsOfLocalMethodAttachment] match { + meth.attachments.get[DefaultsOfLocalMethodAttachment] match { case Some(att) => att.defaultGetters += default - case None => meth.updateAttachment(new DefaultsOfLocalMethodAttachment(default)) + case None => meth.updateAttachment(new DefaultsOfLocalMethodAttachment(default)) } } } else if (baseHasDefault) { diff --git a/src/compiler/scala/tools/nsc/typechecker/NamesDefaults.scala b/src/compiler/scala/tools/nsc/typechecker/NamesDefaults.scala index 74acaba74a..f097aa6424 100644 --- a/src/compiler/scala/tools/nsc/typechecker/NamesDefaults.scala +++ b/src/compiler/scala/tools/nsc/typechecker/NamesDefaults.scala @@ -171,7 +171,7 @@ trait NamesDefaults { self: Analyzer => qual changeOwner (blockTyper.context.owner -> sym) val newQual = atPos(qual.pos.focus)(blockTyper.typedQualifier(Ident(sym.name))) - var baseFunTransformed = atPos(baseFun.pos.makeTransparent) { + val baseFunTransformed = atPos(baseFun.pos.makeTransparent) { // setSymbol below is important because the 'selected' function might be overloaded. by // assigning the correct method symbol, typedSelect will just assign the type. the reason // to still call 'typed' is to correctly infer singleton types, SI-5259. @@ -319,7 +319,7 @@ trait NamesDefaults { self: Analyzer => assert(isNamedApplyBlock(transformedFun), transformedFun) val NamedApplyInfo(qual, targs, vargss, blockTyper) = context.namedApplyBlockInfo.get._2 - val existingBlock @ Block(stats, funOnly) = transformedFun + val Block(stats, funOnly) = transformedFun // type the application without names; put the arguments in definition-site order val typedApp = doTypedApply(tree, funOnly, reorderArgs(namelessArgs, argPos), mode, pt) diff --git a/src/compiler/scala/tools/nsc/typechecker/PatternMatching.scala b/src/compiler/scala/tools/nsc/typechecker/PatternMatching.scala index 3f0a4d1548..60cd21cbf1 100644 --- a/src/compiler/scala/tools/nsc/typechecker/PatternMatching.scala +++ b/src/compiler/scala/tools/nsc/typechecker/PatternMatching.scala @@ -1818,9 +1818,9 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL def toString(x: AnyRef) = if (x eq null) "" else x.toString if (cols.isEmpty || cols.tails.isEmpty) cols map toString else { - val (colStrs, colLens) = cols map {c => val s = toString(c); (s, s.length)} unzip - val maxLen = max(colLens) - val avgLen = colLens.sum/colLens.length + val colLens = cols map (c => toString(c).length) + val maxLen = max(colLens) + val avgLen = colLens.sum/colLens.length val goalLen = maxLen min avgLen*2 def pad(s: String) = { val toAdd = ((goalLen - s.length) max 0) + 2 @@ -2263,9 +2263,9 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL private[this] val id: Int = Var.nextId // private[this] var canModify: Option[Array[StackTraceElement]] = None - private[this] def ensureCanModify = {} //if (canModify.nonEmpty) patmatDebug("BUG!"+ this +" modified after having been observed: "+ canModify.get.mkString("\n")) + private[this] def ensureCanModify() = {} //if (canModify.nonEmpty) patmatDebug("BUG!"+ this +" modified after having been observed: "+ canModify.get.mkString("\n")) - private[this] def observed = {} //canModify = Some(Thread.currentThread.getStackTrace) + private[this] def observed() = {} //canModify = Some(Thread.currentThread.getStackTrace) // don't access until all potential equalities have been registered using registerEquality private[this] val symForEqualsTo = new scala.collection.mutable.HashMap[Const, Sym] @@ -2418,7 +2418,13 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL private lazy val equalitySyms = {observed; symForEqualsTo.values.toList} // don't call until all equalities have been registered and registerNull has been called (if needed) - def describe = toString + ": " + staticTp + domain.map(_.mkString(" ::= ", " | ", "// "+ symForEqualsTo.keys)).getOrElse(symForEqualsTo.keys.mkString(" ::= ", " | ", " | ...")) + " // = " + path + def describe = { + def domain_s = domain match { + case Some(d) => d mkString (" ::= ", " | ", "// "+ symForEqualsTo.keys) + case _ => symForEqualsTo.keys mkString (" ::= ", " | ", " | ...") + } + s"$this: ${staticTp}${domain_s} // = $path" + } override def toString = "V"+ id } @@ -2504,7 +2510,7 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL // corresponds to a type test that does not imply any value-equality (well, except for outer checks, which we don't model yet) sealed class TypeConst(val tp: Type) extends Const { assert(!(tp =:= NullTp)) - private[this] val id: Int = Const.nextTypeId + /*private[this] val id: Int = */ Const.nextTypeId val wideTp = widenToClass(tp) def isValue = false @@ -2552,7 +2558,7 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL sealed class ValueConst(val tp: Type, val wideTp: Type, override val toString: String) extends Const { // patmatDebug("VC"+(tp, wideTp, toString)) assert(!(tp =:= NullTp)) // TODO: assert(!tp.isStable) - private[this] val id: Int = Const.nextValueId + /*private[this] val id: Int = */Const.nextValueId def isValue = true } @@ -2778,7 +2784,6 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL // when does the match fail? val matchFails = Not(\/(symbolicCases)) - val vars = gatherVariables(matchFails) // debug output: patmatDebug("analysing:") diff --git a/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala b/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala index c04a8661b2..5b2fbb4fd0 100644 --- a/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala +++ b/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala @@ -237,7 +237,7 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans case class MixinOverrideError(member: Symbol, msg: String) - var mixinOverrideErrors = new ListBuffer[MixinOverrideError]() + val mixinOverrideErrors = new ListBuffer[MixinOverrideError]() def printMixinOverrideErrors() { mixinOverrideErrors.toList match { @@ -1217,7 +1217,7 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans /* Convert a reference to a case factory of type `tpe` to a new of the class it produces. */ def toConstructor(pos: Position, tpe: Type): Tree = { - var rtpe = tpe.finalResultType + val rtpe = tpe.finalResultType assert(rtpe.typeSymbol hasFlag CASE, tpe); localTyper.typedOperator { atPos(pos) { @@ -1298,7 +1298,7 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans } case ModuleDef(_, _, _) => eliminateModuleDefs(tree) case ValDef(_, _, _, _) => - val tree1 @ ValDef(_, _, _, rhs) = transform(tree) // important to do before forward reference check + val tree1 = transform(tree) // important to do before forward reference check if (tree1.symbol.isLazy) tree1 :: Nil else { val lazySym = tree.symbol.lazyAccessorOrSelf @@ -1540,7 +1540,7 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans tree } private def transformSelect(tree: Select): Tree = { - val Select(qual, name) = tree + val Select(qual, _) = tree val sym = tree.symbol /** Note: if a symbol has both @deprecated and @migration annotations and both diff --git a/src/compiler/scala/tools/nsc/typechecker/SuperAccessors.scala b/src/compiler/scala/tools/nsc/typechecker/SuperAccessors.scala index b8b34ce738..90d265d7b3 100644 --- a/src/compiler/scala/tools/nsc/typechecker/SuperAccessors.scala +++ b/src/compiler/scala/tools/nsc/typechecker/SuperAccessors.scala @@ -390,7 +390,7 @@ abstract class SuperAccessors extends transform.Transform with transform.TypingT * typed. */ private def makeAccessor(tree: Select, targs: List[Tree]): Tree = { - val Select(qual, name) = tree + val Select(qual, _) = tree val sym = tree.symbol val clazz = hostForAccessorOf(sym, currentClass) diff --git a/src/compiler/scala/tools/nsc/typechecker/SyntheticMethods.scala b/src/compiler/scala/tools/nsc/typechecker/SyntheticMethods.scala index 4bcdb177ae..f7cd89144a 100644 --- a/src/compiler/scala/tools/nsc/typechecker/SyntheticMethods.scala +++ b/src/compiler/scala/tools/nsc/typechecker/SyntheticMethods.scala @@ -109,9 +109,6 @@ trait SyntheticMethods extends ast.TreeDSL { gen.mkMethodCall(ScalaRunTimeModule, nme.typedProductIterator, List(accessorLub), List(mkThis)) ) } - def projectionMethod(accessor: Symbol, num: Int) = { - createMethod(nme.productAccessorName(num), accessor.tpe.resultType)(_ => REF(accessor)) - } /** Common code for productElement and (currently disabled) productElementName */ @@ -203,10 +200,15 @@ trait SyntheticMethods extends ast.TreeDSL { /** The _1, _2, etc. methods to implement ProductN, disabled * until we figure out how to introduce ProductN without cycles. */ - def productNMethods = { + /**** + def productNMethods = { val accs = accessors.toIndexedSeq 1 to arity map (num => productProj(arity, num) -> (() => projectionMethod(accs(num - 1), num))) } + def projectionMethod(accessor: Symbol, num: Int) = { + createMethod(nme.productAccessorName(num), accessor.tpe.resultType)(_ => REF(accessor)) + } + ****/ // methods for both classes and objects def productMethods = { @@ -327,7 +329,6 @@ trait SyntheticMethods extends ast.TreeDSL { def isRewrite(sym: Symbol) = sym.isCaseAccessorMethod && !sym.isPublic for (ddef @ DefDef(_, _, _, _, _, _) <- templ.body ; if isRewrite(ddef.symbol)) { - val original = ddef.symbol val newAcc = deriveMethod(ddef.symbol, name => context.unit.freshTermName(name + "$")) { newAcc => newAcc.makePublic newAcc resetFlag (ACCESSOR | PARAMACCESSOR) diff --git a/src/compiler/scala/tools/nsc/typechecker/Typers.scala b/src/compiler/scala/tools/nsc/typechecker/Typers.scala index c798e38e92..3d80df405d 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Typers.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Typers.scala @@ -1197,9 +1197,9 @@ trait Typers extends Modes with Adaptations with Tags { val found = tree.tpe if (!found.isErroneous && !pt.isErroneous) { if ((!context.reportErrors && isPastTyper) || tree.attachments.get[MacroExpansionAttachment].isDefined) { - val (bound, req) = pt match { - case ExistentialType(qs, tpe) => (qs, tpe) - case _ => (Nil, pt) + val bound = pt match { + case ExistentialType(qs, _) => qs + case _ => Nil } val boundOrSkolems = bound ++ pt.skolemsExceptMethodTypeParams if (boundOrSkolems.nonEmpty) { @@ -1519,7 +1519,6 @@ trait Typers extends Modes with Adaptations with Tags { val (stats, rest) = cstats span (x => !treeInfo.isSuperConstrCall(x)) (stats map (_.duplicate), if (rest.isEmpty) EmptyTree else rest.head.duplicate) } - val cstats1 = if (superCall == EmptyTree) preSuperStats else preSuperStats :+ superCall val cbody1 = treeCopy.Block(cbody, preSuperStats, superCall match { case Apply(_, _) if supertparams.nonEmpty => transformSuperCall(superCall) case _ => cunit.duplicate @@ -1805,7 +1804,7 @@ trait Typers extends Modes with Adaptations with Tags { def typedTemplate(templ: Template, parents1: List[Tree]): Template = { val clazz = context.owner // complete lazy annotations - val annots = clazz.annotations + clazz.annotations if (templ.symbol == NoSymbol) templ setSymbol clazz.newLocalDummy(templ.pos) val self1 = templ.self match { @@ -1886,8 +1885,8 @@ trait Typers extends Modes with Adaptations with Tags { val typedMods = typedModifiers(vdef.mods) // complete lazy annotations - val annots = sym.annotations - var tpt1 = checkNoEscaping.privates(sym, typer1.typedType(vdef.tpt)) + sym.annotations + val tpt1 = checkNoEscaping.privates(sym, typer1.typedType(vdef.tpt)) checkNonCyclic(vdef, tpt1) if (sym.hasAnnotation(definitions.VolatileAttr) && !sym.isMutable) @@ -2123,13 +2122,13 @@ trait Typers extends Modes with Adaptations with Tags { val vparamss1 = ddef.vparamss mapConserve (_ mapConserve typedValDef) // complete lazy annotations - val annots = meth.annotations + meth.annotations for (vparams1 <- vparamss1; vparam1 <- vparams1 dropRight 1) if (isRepeatedParamType(vparam1.symbol.tpe)) StarParamNotLastError(vparam1) - var tpt1 = checkNoEscaping.privates(meth, typedType(ddef.tpt)) + val tpt1 = checkNoEscaping.privates(meth, typedType(ddef.tpt)) checkNonCyclic(ddef, tpt1) ddef.tpt.setType(tpt1.tpe) val typedMods = typedModifiers(ddef.mods) @@ -2199,7 +2198,7 @@ trait Typers extends Modes with Adaptations with Tags { val tparams1 = tdef.tparams mapConserve typedTypeDef val typedMods = typedModifiers(tdef.mods) // complete lazy annotations - val annots = tdef.symbol.annotations + tdef.symbol.annotations // @specialized should not be pickled when compiling with -no-specialize if (settings.nospecialization.value && currentRun.compiles(tdef.symbol)) { @@ -3744,11 +3743,11 @@ trait Typers extends Modes with Adaptations with Tags { if (wc.symbol == NoSymbol) { namer.enterSym(wc); wc.symbol setFlag EXISTENTIAL } else context.scope enter wc.symbol val whereClauses1 = typedStats(tree.whereClauses, context.owner) - for (vd @ ValDef(_, _, _, _) <- tree.whereClauses) + for (vd @ ValDef(_, _, _, _) <- whereClauses1) if (vd.symbol.tpe.isVolatile) AbstractionFromVolatileTypeError(vd) val tpt1 = typedType(tree.tpt, mode) - existentialTransform(tree.whereClauses map (_.symbol), tpt1.tpe)((tparams, tp) => + existentialTransform(whereClauses1 map (_.symbol), tpt1.tpe)((tparams, tp) => TypeTree(newExistentialType(tparams, tp)) setOriginal tree ) } @@ -4775,7 +4774,8 @@ trait Typers extends Modes with Adaptations with Tags { * (2) Change imported symbols to selections */ def typedIdent(tree: Tree, name: Name): Tree = { - def emptyPackageOk = settings.exposeEmptyPackage.value // setting to enable unqualified idents in empty package + // setting to enable unqualified idents in empty package + def inEmptyPackage = if (settings.exposeEmptyPackage.value) lookupInEmpty(name) else NoSymbol def issue(err: AbsTypeError) = { // Avoiding some spurious error messages: see SI-2388. @@ -4791,17 +4791,15 @@ trait Typers extends Modes with Adaptations with Tags { case NoSymbol => startContext.lookupSymbol(name, qualifies) case sym => LookupSucceeded(EmptyTree, sym) } - val defSym = ( - nameLookup.symbol - orElse ( if (emptyPackageOk) lookupInEmpty(name) else NoSymbol ) - orElse (lookupInRoot(name) andAlso (sym => return typed1(tree setSymbol sym, mode, pt))) - orElse (context.owner newErrorSymbol name) - ) import InferErrorGen._ nameLookup match { case LookupAmbiguous(msg) => issue(AmbiguousIdentError(tree, name, msg)) case LookupInaccessible(sym, msg) => issue(AccessError(tree, sym, context, msg)) - case LookupNotFound => issue(SymbolNotFoundError(tree, name, context.owner, startContext)) + case LookupNotFound => + inEmptyPackage orElse lookupInRoot(name) match { + case NoSymbol => issue(SymbolNotFoundError(tree, name, context.owner, startContext)) + case sym => typed1(tree setSymbol sym, mode, pt) + } case LookupSucceeded(qual, sym) => // this -> Foo.this if (sym.isThisSym) @@ -4905,7 +4903,7 @@ trait Typers extends Modes with Adaptations with Tags { val pid1 = typedQualifier(pdef.pid).asInstanceOf[RefTree] assert(sym.moduleClass ne NoSymbol, sym) // complete lazy annotations - val annots = sym.annotations + sym.annotations val stats1 = newTyper(context.make(tree, sym.moduleClass, sym.info.decls)) .typedStats(pdef.stats, NoSymbol) treeCopy.PackageDef(tree, pid1, stats1) setType NoType @@ -5225,7 +5223,7 @@ trait Typers extends Modes with Adaptations with Tags { } alreadyTyped = tree.tpe ne null - var tree1: Tree = if (alreadyTyped) tree else { + val tree1: Tree = if (alreadyTyped) tree else { printTyping( ptLine("typing %s: pt = %s".format(ptTree(tree), pt), "undetparams" -> context.undetparams, diff --git a/src/compiler/scala/tools/reflect/ToolBoxFactory.scala b/src/compiler/scala/tools/reflect/ToolBoxFactory.scala index bc8ded62d8..996ff00d36 100644 --- a/src/compiler/scala/tools/reflect/ToolBoxFactory.scala +++ b/src/compiler/scala/tools/reflect/ToolBoxFactory.scala @@ -29,8 +29,8 @@ abstract class ToolBoxFactory[U <: JavaUniverse](val u: U) { factorySelf => lazy val classLoader = new AbstractFileClassLoader(virtualDirectory, factorySelf.mirror.classLoader) lazy val mirror: u.Mirror = u.runtimeMirror(classLoader) - class ToolBoxGlobal(settings: scala.tools.nsc.Settings, reporter: Reporter) - extends ReflectGlobal(settings, reporter, toolBoxSelf.classLoader) { + class ToolBoxGlobal(settings: scala.tools.nsc.Settings, reporter0: Reporter) + extends ReflectGlobal(settings, reporter0, toolBoxSelf.classLoader) { import definitions._ private val trace = scala.tools.nsc.util.trace when settings.debug.value @@ -73,13 +73,14 @@ abstract class ToolBoxFactory[U <: JavaUniverse](val u: U) { factorySelf => val typed = expr filter (t => t.tpe != null && t.tpe != NoType && !t.isInstanceOf[TypeTree]) if (!typed.isEmpty) throw ToolBoxError("reflective toolbox has failed: cannot operate on trees that are already typed") - val freeTypes = expr.freeTypes - if (freeTypes.length > 0) { - var msg = "reflective toolbox has failed:" + EOL - msg += "unresolved free type variables (namely: " + (freeTypes map (ft => "%s %s".format(ft.name, ft.origin)) mkString ", ") + "). " - msg += "have you forgot to use TypeTag annotations for type parameters external to a reifee? " - msg += "if you have troubles tracking free type variables, consider using -Xlog-free-types" - throw ToolBoxError(msg) + if (expr.freeTypes.nonEmpty) { + val ft_s = expr.freeTypes map (ft => s" ${ft.name} ${ft.origin}") mkString "\n " + throw ToolBoxError(s""" + |reflective toolbox failed due to unresolved free type variables: + |$ft_s + |have you forgotten to use TypeTag annotations for type parameters external to a reifee? + |if you have troubles tracking free type variables, consider using -Xlog-free-types + """.stripMargin.trim) } } @@ -100,7 +101,7 @@ abstract class ToolBoxFactory[U <: JavaUniverse](val u: U) { factorySelf => if (namesakes.length > 0) name += ("$" + (namesakes.length + 1)) freeTermNames += (ft -> newTermName(name + nme.REIFY_FREE_VALUE_SUFFIX)) }) - var expr = new Transformer { + val expr = new Transformer { override def transform(tree: Tree): Tree = if (tree.hasSymbolField && tree.symbol.isFreeTerm) { tree match { @@ -132,7 +133,7 @@ abstract class ToolBoxFactory[U <: JavaUniverse](val u: U) { factorySelf => val ownerClass = rootMirror.EmptyPackageClass.newClassSymbol(newTypeName("")) build.setTypeSignature(ownerClass, ClassInfoType(List(ObjectClass.tpe), newScope, ownerClass)) val owner = ownerClass.newLocalDummy(expr.pos) - var currentTyper = analyzer.newTyper(analyzer.rootContext(NoCompilationUnit, EmptyTree).make(expr, owner)) + val currentTyper = analyzer.newTyper(analyzer.rootContext(NoCompilationUnit, EmptyTree).make(expr, owner)) val wrapper1 = if (!withImplicitViewsDisabled) (currentTyper.context.withImplicitsEnabled[Tree] _) else (currentTyper.context.withImplicitsDisabled[Tree] _) val wrapper2 = if (!withMacrosDisabled) (currentTyper.context.withMacrosEnabled[Tree] _) else (currentTyper.context.withMacrosDisabled[Tree] _) def wrapper (tree: => Tree) = wrapper1(wrapper2(tree)) @@ -146,7 +147,7 @@ abstract class ToolBoxFactory[U <: JavaUniverse](val u: U) { factorySelf => case Block(dummies, unwrapped) => (dummies, unwrapped) case unwrapped => (Nil, unwrapped) } - var invertedIndex = freeTerms map (_.swap) + val invertedIndex = freeTerms map (_.swap) // todo. also fixup singleton types unwrapped = new Transformer { override def transform(tree: Tree): Tree = @@ -202,7 +203,7 @@ abstract class ToolBoxFactory[U <: JavaUniverse](val u: U) { factorySelf => def wrap(expr0: Tree): ModuleDef = { val (expr, freeTerms) = extractFreeTerms(expr0, wrapFreeTermRefs = true) - val (obj, mclazz) = rootMirror.EmptyPackageClass.newModuleAndClassSymbol( + val (obj, _) = rootMirror.EmptyPackageClass.newModuleAndClassSymbol( nextWrapperModuleName()) val minfo = ClassInfoType(List(ObjectClass.tpe), newScope, obj.moduleClass) @@ -235,7 +236,7 @@ abstract class ToolBoxFactory[U <: JavaUniverse](val u: U) { factorySelf => NoPosition)) trace("wrapped: ")(showAttributed(moduledef, true, true, settings.Yshowsymkinds.value)) - var cleanedUp = resetLocalAttrs(moduledef) + val cleanedUp = resetLocalAttrs(moduledef) trace("cleaned up: ")(showAttributed(cleanedUp, true, true, settings.Yshowsymkinds.value)) cleanedUp.asInstanceOf[ModuleDef] } @@ -353,8 +354,8 @@ abstract class ToolBoxFactory[U <: JavaUniverse](val u: U) { factorySelf => def typeCheck(tree: u.Tree, expectedType: u.Type, silent: Boolean = false, withImplicitViewsDisabled: Boolean = false, withMacrosDisabled: Boolean = false): u.Tree = compiler.withCleanupCaches { if (compiler.settings.verbose.value) println("importing "+tree+", expectedType = "+expectedType) - var ctree: compiler.Tree = importer.importTree(tree) - var cexpectedType: compiler.Type = importer.importType(expectedType) + val ctree: compiler.Tree = importer.importTree(tree) + val cexpectedType: compiler.Type = importer.importType(expectedType) if (compiler.settings.verbose.value) println("typing "+ctree+", expectedType = "+expectedType) val ttree: compiler.Tree = compiler.typeCheck(ctree, cexpectedType, silent = silent, withImplicitViewsDisabled = withImplicitViewsDisabled, withMacrosDisabled = withMacrosDisabled) @@ -373,9 +374,9 @@ abstract class ToolBoxFactory[U <: JavaUniverse](val u: U) { factorySelf => private def inferImplicit(tree: u.Tree, pt: u.Type, isView: Boolean, silent: Boolean, withMacrosDisabled: Boolean, pos: u.Position): u.Tree = compiler.withCleanupCaches { if (compiler.settings.verbose.value) println("importing "+pt, ", tree = "+tree+", pos = "+pos) - var ctree: compiler.Tree = importer.importTree(tree) - var cpt: compiler.Type = importer.importType(pt) - var cpos: compiler.Position = importer.importPosition(pos) + val ctree: compiler.Tree = importer.importTree(tree) + val cpt: compiler.Type = importer.importType(pt) + val cpos: compiler.Position = importer.importPosition(pos) if (compiler.settings.verbose.value) println("inferring implicit %s of type %s, macros = %s".format(if (isView) "view" else "value", pt, !withMacrosDisabled)) val itree: compiler.Tree = compiler.inferImplicit(ctree, cpt, isView = isView, silent = silent, withMacrosDisabled = withMacrosDisabled, pos = cpos) @@ -409,7 +410,7 @@ abstract class ToolBoxFactory[U <: JavaUniverse](val u: U) { factorySelf => def compile(tree: u.Tree): () => Any = { if (compiler.settings.verbose.value) println("importing "+tree) - var ctree: compiler.Tree = importer.importTree(tree) + val ctree: compiler.Tree = importer.importTree(tree) if (compiler.settings.verbose.value) println("compiling "+ctree) compiler.compile(ctree) diff --git a/src/library/scala/collection/SeqLike.scala b/src/library/scala/collection/SeqLike.scala index a1749a480b..33b6ab4165 100644 --- a/src/library/scala/collection/SeqLike.scala +++ b/src/library/scala/collection/SeqLike.scala @@ -103,7 +103,7 @@ trait SeqLike[+A, +Repr] extends Any with IterableLike[A, Repr] with GenSeqLike[ def segmentLength(p: A => Boolean, from: Int): Int = { var i = 0 - var it = iterator.drop(from) + val it = iterator.drop(from) while (it.hasNext && p(it.next())) i += 1 i @@ -111,7 +111,7 @@ trait SeqLike[+A, +Repr] extends Any with IterableLike[A, Repr] with GenSeqLike[ def indexWhere(p: A => Boolean, from: Int): Int = { var i = from - var it = iterator.drop(from) + val it = iterator.drop(from) while (it.hasNext) { if (p(it.next())) return i else i += 1 @@ -177,10 +177,10 @@ trait SeqLike[+A, +Repr] extends Any with IterableLike[A, Repr] with GenSeqLike[ result } private def swap(i: Int, j: Int) { - var tmpI = idxs(i) + val tmpI = idxs(i) idxs(i) = idxs(j) idxs(j) = tmpI - var tmpE = elms(i) + val tmpE = elms(i) elms(i) = elms(j) elms(j) = tmpE } @@ -777,7 +777,7 @@ object SeqLike { val iter = S.iterator.drop(m0) val Wopt = kmpOptimizeWord(W, n0, n1, true) val T = kmpJumpTable(Wopt, n1-n0) - var cache = new Array[AnyRef](n1-n0) // Ring buffer--need a quick way to do a look-behind + val cache = new Array[AnyRef](n1-n0) // Ring buffer--need a quick way to do a look-behind var largest = 0 var i, m = 0 var answer = -1 diff --git a/src/library/scala/collection/concurrent/TrieMap.scala b/src/library/scala/collection/concurrent/TrieMap.scala index b0736ecace..231f8157e4 100644 --- a/src/library/scala/collection/concurrent/TrieMap.scala +++ b/src/library/scala/collection/concurrent/TrieMap.scala @@ -545,7 +545,7 @@ private[collection] final class CNode[K, V](val bitmap: Int, val array: Array[Ba // removed (those existing when the op began) // - if there are only null-i-nodes below, returns null def toCompressed(ct: TrieMap[K, V], lev: Int, gen: Gen) = { - var bmp = bitmap + val bmp = bitmap var i = 0 val arr = array val tmparray = new Array[BasicNode](arr.length) diff --git a/src/library/scala/collection/immutable/HashMap.scala b/src/library/scala/collection/immutable/HashMap.scala index ee41e2aa3c..9b6183c0a4 100644 --- a/src/library/scala/collection/immutable/HashMap.scala +++ b/src/library/scala/collection/immutable/HashMap.scala @@ -471,9 +471,6 @@ time { mNew.iterator.foreach( p => ()) } // condition below is due to 2 things: // 1) no unsigned int compare on JVM // 2) 0 (no lsb) should always be greater in comparison - val a = thislsb - 1 - val b = thatlsb - 1 - if (unsignedCompare(thislsb - 1, thatlsb - 1)) { val m = thiselems(thisi) totalelems += m.size diff --git a/src/library/scala/collection/immutable/TrieIterator.scala b/src/library/scala/collection/immutable/TrieIterator.scala index e8e904f1f9..3f1c5ea57a 100644 --- a/src/library/scala/collection/immutable/TrieIterator.scala +++ b/src/library/scala/collection/immutable/TrieIterator.scala @@ -177,7 +177,6 @@ private[collection] abstract class TrieIterator[+T](elems: Array[Iterable[T]]) e if (depth > 0) { // 2) topmost comes before (is not) arrayD // steal a portion of top to create a new iterator - val topmost = arrayStack(0) if (posStack(0) == arrayStack(0).length - 1) { // 2a) only a single entry left on top // this means we have to modify this iterator - pop topmost diff --git a/src/library/scala/collection/immutable/Vector.scala b/src/library/scala/collection/immutable/Vector.scala index 1f90436636..7e1f3eadd0 100644 --- a/src/library/scala/collection/immutable/Vector.scala +++ b/src/library/scala/collection/immutable/Vector.scala @@ -242,8 +242,8 @@ override def companion: GenericCompanion[Vector] = Vector private[immutable] def appendFront[B>:A](value: B): Vector[B] = { if (endIndex != startIndex) { - var blockIndex = (startIndex - 1) & ~31 - var lo = (startIndex - 1) & 31 + val blockIndex = (startIndex - 1) & ~31 + val lo = (startIndex - 1) & 31 if (startIndex != blockIndex + 32) { val s = new Vector(startIndex - 1, endIndex, blockIndex) @@ -339,8 +339,8 @@ override def companion: GenericCompanion[Vector] = Vector // //println("------- append " + value) // debug() if (endIndex != startIndex) { - var blockIndex = endIndex & ~31 - var lo = endIndex & 31 + val blockIndex = endIndex & ~31 + val lo = endIndex & 31 if (endIndex != blockIndex) { //println("will make writable block (from "+focus+") at: " + blockIndex) @@ -574,9 +574,7 @@ override def companion: GenericCompanion[Vector] = Vector } private def dropFront0(cutIndex: Int): Vector[A] = { - var blockIndex = cutIndex & ~31 - var lo = cutIndex & 31 - + val blockIndex = cutIndex & ~31 val xor = cutIndex ^ (endIndex - 1) val d = requiredDepth(xor) val shift = (cutIndex & ~((1 << (5*d))-1)) @@ -606,9 +604,7 @@ override def companion: GenericCompanion[Vector] = Vector } private def dropBack0(cutIndex: Int): Vector[A] = { - var blockIndex = (cutIndex - 1) & ~31 - var lo = ((cutIndex - 1) & 31) + 1 - + val blockIndex = (cutIndex - 1) & ~31 val xor = startIndex ^ (cutIndex - 1) val d = requiredDepth(xor) val shift = (startIndex & ~((1 << (5*d))-1)) diff --git a/src/library/scala/collection/mutable/FlatHashTable.scala b/src/library/scala/collection/mutable/FlatHashTable.scala index 74f576b0f7..f1301d2011 100644 --- a/src/library/scala/collection/mutable/FlatHashTable.scala +++ b/src/library/scala/collection/mutable/FlatHashTable.scala @@ -266,7 +266,7 @@ trait FlatHashTable[A] extends FlatHashTable.HashUtils[A] { val totalbuckets = totalSizeMapBuckets var bucketidx = 0 var tableidx = 0 - var tbl = table + val tbl = table var tableuntil = sizeMapBucketSize min tbl.length while (bucketidx < totalbuckets) { var currbucketsz = 0 diff --git a/src/library/scala/collection/mutable/ListBuffer.scala b/src/library/scala/collection/mutable/ListBuffer.scala index bced92e663..0a61b537ce 100644 --- a/src/library/scala/collection/mutable/ListBuffer.scala +++ b/src/library/scala/collection/mutable/ListBuffer.scala @@ -269,7 +269,6 @@ final class ListBuffer[A] if (exported) copy() val n1 = n max 0 val count1 = count min (len - n1) - var old = start.head if (n1 == 0) { var c = count1 while (c > 0) { diff --git a/src/library/scala/collection/parallel/Tasks.scala b/src/library/scala/collection/parallel/Tasks.scala index d6b75202da..af32faf0aa 100644 --- a/src/library/scala/collection/parallel/Tasks.scala +++ b/src/library/scala/collection/parallel/Tasks.scala @@ -66,7 +66,6 @@ trait Task[R, +Tp] { private[parallel] def tryMerge(t: Tp @uncheckedVariance) { val that = t.asInstanceOf[Task[R, Tp]] - val local = result // ensure that any effects of modifying `result` are detected if (this.throwable == null && that.throwable == null) merge(t) mergeThrowables(that) } @@ -167,7 +166,6 @@ trait AdaptiveWorkStealingTasks extends Tasks { while (last.next != null) { // val lastresult = Option(last.body.result) - val beforelast = last last = last.next if (last.tryCancel()) { // println("Done with " + beforelast.body + ", next direct is " + last.body) diff --git a/src/library/scala/collection/parallel/mutable/ParArray.scala b/src/library/scala/collection/parallel/mutable/ParArray.scala index 5ac2725f11..7527c9a71a 100644 --- a/src/library/scala/collection/parallel/mutable/ParArray.scala +++ b/src/library/scala/collection/parallel/mutable/ParArray.scala @@ -184,7 +184,7 @@ self => override def aggregate[S](z: =>S)(seqop: (S, T) => S, combop: (S, S) => S): S = foldLeft[S](z)(seqop) override def sum[U >: T](implicit num: Numeric[U]): U = { - var s = sum_quick(num, arr, until, i, num.zero) + val s = sum_quick(num, arr, until, i, num.zero) i = until s } @@ -200,7 +200,7 @@ self => } override def product[U >: T](implicit num: Numeric[U]): U = { - var p = product_quick(num, arr, until, i, num.one) + val p = product_quick(num, arr, until, i, num.one) i = until p } @@ -432,7 +432,7 @@ self => private def filter2combiner_quick[U >: T, This](pred: T => Boolean, cb: Builder[U, This], a: Array[Any], ntil: Int, from: Int) { var j = i while(j < ntil) { - var curr = a(j).asInstanceOf[T] + val curr = a(j).asInstanceOf[T] if (pred(curr)) cb += curr j += 1 } @@ -447,7 +447,7 @@ self => private def filterNot2combiner_quick[U >: T, This](pred: T => Boolean, cb: Builder[U, This], a: Array[Any], ntil: Int, from: Int) { var j = i while(j < ntil) { - var curr = a(j).asInstanceOf[T] + val curr = a(j).asInstanceOf[T] if (!pred(curr)) cb += curr j += 1 } diff --git a/src/library/scala/collection/parallel/mutable/ParHashMap.scala b/src/library/scala/collection/parallel/mutable/ParHashMap.scala index 3b2c66763e..d8f846dd10 100644 --- a/src/library/scala/collection/parallel/mutable/ParHashMap.scala +++ b/src/library/scala/collection/parallel/mutable/ParHashMap.scala @@ -231,7 +231,7 @@ extends scala.collection.parallel.BucketCombiner[(K, V), ParHashMap[K, V], Defau def setSize(sz: Int) = tableSize = sz def insertEntry(/*block: Int, */e: DefaultEntry[K, V]) = { var h = index(elemHashCode(e.key)) - var olde = table(h).asInstanceOf[DefaultEntry[K, V]] + val olde = table(h).asInstanceOf[DefaultEntry[K, V]] // check if key already exists var ce = olde diff --git a/src/library/scala/collection/parallel/mutable/ParHashSet.scala b/src/library/scala/collection/parallel/mutable/ParHashSet.scala index 22f22c8305..cbfb09bfdd 100644 --- a/src/library/scala/collection/parallel/mutable/ParHashSet.scala +++ b/src/library/scala/collection/parallel/mutable/ParHashSet.scala @@ -263,12 +263,12 @@ with scala.collection.mutable.FlatHashTable.HashUtils[T] { (elemsIn + leftoversIn, elemsLeft concat leftoversLeft) } private def insertAll(atPos: Int, beforePos: Int, elems: UnrolledBuffer[Any]): (Int, UnrolledBuffer[Any]) = { - var leftovers = new UnrolledBuffer[Any] + val leftovers = new UnrolledBuffer[Any] var inserted = 0 var unrolled = elems.headPtr var i = 0 - var t = table + val t = table while (unrolled ne null) { val chunkarr = unrolled.array val chunksz = unrolled.size diff --git a/src/library/scala/collection/parallel/mutable/ParHashTable.scala b/src/library/scala/collection/parallel/mutable/ParHashTable.scala index bb9a7b7823..b203ef8e5d 100644 --- a/src/library/scala/collection/parallel/mutable/ParHashTable.scala +++ b/src/library/scala/collection/parallel/mutable/ParHashTable.scala @@ -110,7 +110,7 @@ trait ParHashTable[K, Entry >: Null <: HashEntry[K, Entry]] extends scala.collec } else Seq(this.asInstanceOf[IterRepr]) private def convertToArrayBuffer(chainhead: Entry): mutable.ArrayBuffer[T] = { - var buff = mutable.ArrayBuffer[Entry]() + val buff = mutable.ArrayBuffer[Entry]() var curr = chainhead while (curr ne null) { buff += curr diff --git a/src/library/scala/collection/parallel/mutable/ResizableParArrayCombiner.scala b/src/library/scala/collection/parallel/mutable/ResizableParArrayCombiner.scala index 68f37137f8..f78de073d6 100644 --- a/src/library/scala/collection/parallel/mutable/ResizableParArrayCombiner.scala +++ b/src/library/scala/collection/parallel/mutable/ResizableParArrayCombiner.scala @@ -26,7 +26,7 @@ trait ResizableParArrayCombiner[T] extends LazyCombiner[T, ParArray[T], ExposedA override def sizeHint(sz: Int) = if (chain.length == 1) chain(0).sizeHint(sz) // public method with private[mutable] type ExposedArrayBuffer in parameter type; cannot be overridden. - def newLazyCombiner(c: ArrayBuffer[ExposedArrayBuffer[T]]) = ResizableParArrayCombiner(c) + final def newLazyCombiner(c: ArrayBuffer[ExposedArrayBuffer[T]]) = ResizableParArrayCombiner(c) def allocateAndCopy = if (chain.size > 1) { val arrayseq = new ArraySeq[T](size) diff --git a/src/library/scala/collection/script/Message.scala b/src/library/scala/collection/script/Message.scala index 7466f2ac3e..43cb8a6138 100644 --- a/src/library/scala/collection/script/Message.scala +++ b/src/library/scala/collection/script/Message.scala @@ -69,7 +69,7 @@ class Script[A] extends ArrayBuffer[Message[A]] with Message[A] { override def toString(): String = { var res = "Script(" - var it = this.iterator + val it = this.iterator var i = 1 while (it.hasNext) { if (i > 1) diff --git a/src/library/scala/util/automata/WordBerrySethi.scala b/src/library/scala/util/automata/WordBerrySethi.scala index b648d179c6..07d5d2ff08 100644 --- a/src/library/scala/util/automata/WordBerrySethi.scala +++ b/src/library/scala/util/automata/WordBerrySethi.scala @@ -140,7 +140,6 @@ abstract class WordBerrySethi extends BaseBerrySethi { val delta1 = immutable.Map(deltaq.zipWithIndex map (_.swap): _*) val finalsArr = (0 until pos map (k => finals.getOrElse(k, 0))).toArray // 0 == not final - val initialsArr = initials.toArray val deltaArr: Array[mutable.Map[_labelT, immutable.BitSet]] = (0 until pos map { x => diff --git a/src/library/scala/util/parsing/input/OffsetPosition.scala b/src/library/scala/util/parsing/input/OffsetPosition.scala index 3366584ab2..115741b9e9 100644 --- a/src/library/scala/util/parsing/input/OffsetPosition.scala +++ b/src/library/scala/util/parsing/input/OffsetPosition.scala @@ -22,7 +22,7 @@ case class OffsetPosition(source: java.lang.CharSequence, offset: Int) extends P /** An index that contains all line starts, including first line, and eof. */ private lazy val index: Array[Int] = { - var lineStarts = new ArrayBuffer[Int] + val lineStarts = new ArrayBuffer[Int] lineStarts += 0 for (i <- 0 until source.length) if (source.charAt(i) == '\n') lineStarts += (i + 1) diff --git a/src/library/scala/xml/PrettyPrinter.scala b/src/library/scala/xml/PrettyPrinter.scala index da82aca33a..8c0a101c2a 100755 --- a/src/library/scala/xml/PrettyPrinter.scala +++ b/src/library/scala/xml/PrettyPrinter.scala @@ -47,7 +47,6 @@ class PrettyPrinter(width: Int, step: Int) { val tmp = width - cur if (s.length <= tmp) return List(Box(ind, s)) - val sb = new StringBuilder() var i = s indexOf ' ' if (i > tmp || i == -1) throw new BrokenException() // cannot break diff --git a/src/library/scala/xml/dtd/ElementValidator.scala b/src/library/scala/xml/dtd/ElementValidator.scala index f97da1c8a3..5260d87b04 100644 --- a/src/library/scala/xml/dtd/ElementValidator.scala +++ b/src/library/scala/xml/dtd/ElementValidator.scala @@ -61,7 +61,7 @@ class ElementValidator() extends Function1[Node,Boolean] { */ def check(md: MetaData): Boolean = { val len: Int = exc.length - var ok = new mutable.BitSet(adecls.length) + val ok = new mutable.BitSet(adecls.length) for (attr <- md) { def attrStr = attr.value.toString diff --git a/src/library/scala/xml/include/sax/XIncludeFilter.scala b/src/library/scala/xml/include/sax/XIncludeFilter.scala index 52ddf6b476..ac5a8c8331 100644 --- a/src/library/scala/xml/include/sax/XIncludeFilter.scala +++ b/src/library/scala/xml/include/sax/XIncludeFilter.scala @@ -275,7 +275,7 @@ class XIncludeFilter extends XMLFilterImpl { try { val uc = source.openConnection() val in = new BufferedInputStream(uc.getInputStream()) - var encodingFromHeader = uc.getContentEncoding() + val encodingFromHeader = uc.getContentEncoding() var contentType = uc.getContentType() if (encodingFromHeader != null) encoding = encodingFromHeader diff --git a/src/library/scala/xml/parsing/MarkupParser.scala b/src/library/scala/xml/parsing/MarkupParser.scala index d4dc6da14d..8f8c25805c 100755 --- a/src/library/scala/xml/parsing/MarkupParser.scala +++ b/src/library/scala/xml/parsing/MarkupParser.scala @@ -154,7 +154,7 @@ trait MarkupParser extends MarkupParserCommon with TokenTests var info_enc: Option[String] = None var info_stdl: Option[Boolean] = None - var m = xmlProcInstr() + val m = xmlProcInstr() var n = 0 if (isProlog) @@ -303,10 +303,8 @@ trait MarkupParser extends MarkupParserCommon with TokenTests var scope: NamespaceBinding = pscope var aMap: MetaData = Null while (isNameStart(ch)) { - val pos = this.pos - val qname = xName - val _ = xEQ + xEQ // side effect val value = xAttributeValue() Utility.prefix(qname) match { @@ -423,7 +421,7 @@ trait MarkupParser extends MarkupParserCommon with TokenTests * content1 ::= '<' content1 | '&' charref ... * }}} */ def content(pscope: NamespaceBinding): NodeSeq = { - var ts = new NodeBuffer + val ts = new NodeBuffer var exit = eof // todo: optimize seq repr. def done = new NodeSeq { val theSeq = ts.toList } @@ -582,7 +580,6 @@ trait MarkupParser extends MarkupParserCommon with TokenTests var exit = false while (! exit) { putChar(ch) - val opos = pos nextch exit = eof || ( ch == '<' ) || ( ch == '&' ) @@ -828,7 +825,6 @@ trait MarkupParser extends MarkupParserCommon with TokenTests * }}} */ def entityDecl() = { var isParameterEntity = false - var entdef: EntityDef = null xToken("NTITY") xSpace if ('%' == ch) { diff --git a/src/msil/ch/epfl/lamp/compiler/msil/emit/ILPrinterVisitor.scala b/src/msil/ch/epfl/lamp/compiler/msil/emit/ILPrinterVisitor.scala index 0ed5e3f3bb..413b08ddd8 100644 --- a/src/msil/ch/epfl/lamp/compiler/msil/emit/ILPrinterVisitor.scala +++ b/src/msil/ch/epfl/lamp/compiler/msil/emit/ILPrinterVisitor.scala @@ -96,7 +96,7 @@ abstract class ILPrinterVisitor extends Visitor { protected def println(s: String){ print(s); println() } protected def println(o: Object){ print(o); println() } protected def printName(name: String) { - var ch = name.charAt(0) + val ch = name.charAt(0) //if (Character.isLetter(ch) && Character.isLowerCase(ch)) { if ((ch != '.') && (ch != '!')) { print('\''); print(name); print('\'') @@ -174,7 +174,6 @@ abstract class ILPrinterVisitor extends Visitor { print(constraintFlags(tVar)) if(tVar.Constraints.length > 0) { print('(') - val lastCnstrtIdx = tVar.Constraints.length - 1 for (ic <- 0 until tVar.Constraints.length) { val cnstrt = tVar.Constraints(ic) printReference(cnstrt) @@ -211,7 +210,7 @@ abstract class ILPrinterVisitor extends Visitor { print(" extends ") printReference(`type`.BaseType()) } - var ifaces: Array[Type] = `type`.getInterfaces() + val ifaces: Array[Type] = `type`.getInterfaces() if (ifaces.length > 0) { println() print(" implements ") @@ -331,7 +330,7 @@ abstract class ILPrinterVisitor extends Visitor { def msilSyntaxDouble(valDou: java.lang.Double) : String = { // !!! check if encoding is correct - var bits = java.lang.Double.doubleToRawLongBits(valDou.doubleValue()) + val bits = java.lang.Double.doubleToRawLongBits(valDou.doubleValue()) /* see p. 170 in Lidin's book Expert .NET 2.0 IL Assembler */ /* Note: no value is equal to Nan, including NaN. Thus, x == Double.NaN always evaluates to false. */ val res = if (valDou.isNaN) "0xffffffffffffffff /* NaN */ " /* TODO this is 'quiet NaN, http://www.savrola.com/resources/NaN.html , what's the difference with a 'signaling NaN'?? */ @@ -452,7 +451,7 @@ abstract class ILPrinterVisitor extends Visitor { */ @throws(classOf[IOException]) def caseOpCode(opCode: OpCode) { - var opString = opCode.toString() + val opString = opCode.toString() print(opString) pad(14 - opString.length()) @@ -661,7 +660,7 @@ abstract class ILPrinterVisitor extends Visitor { print(' '); printReference(method.DeclaringType) print("::"); printName(method.Name) - var params = method.GetParameters() + val params = method.GetParameters() print("(") for (i <- 0 until params.length) { if (i > 0) print(", ") @@ -744,7 +743,7 @@ abstract class ILPrinterVisitor extends Visitor { } def printAttributes(icap: ICustomAttributeProvider) { - var attrs = icap.GetCustomAttributes(false) + val attrs = icap.GetCustomAttributes(false) for (i <- 0 until attrs.length) { print(".custom ") printSignature((attrs(i).asInstanceOf[Attribute]).getConstructor()) @@ -767,7 +766,7 @@ object ILPrinterVisitor { def hasControlChars(str: String): Boolean = { for(i <- 0 until str.length()) { - var ch = str.charAt(i) + val ch = str.charAt(i) ch match { case '\b' => case '\t' => @@ -789,7 +788,7 @@ object ILPrinterVisitor { case e : java.io.UnsupportedEncodingException => throw new RuntimeException(e) } } - var str = new StringBuffer(s) + val str = new StringBuffer(s) var ss = EMPTY var i = 0 while(i < str.length()) { @@ -834,7 +833,7 @@ object ILPrinterVisitor { final var primitive = scala.collection.mutable.Map.empty[Type, String] def addPrimitive(name: String, sig: String) { - var `type` = + val `type` = Type.GetType(name) assert(`type` != null, "Cannot lookup primitive type " + `type`) primitive.put(`type`, sig) diff --git a/src/msil/ch/epfl/lamp/compiler/msil/emit/ModuleBuilder.scala b/src/msil/ch/epfl/lamp/compiler/msil/emit/ModuleBuilder.scala index 981e855e0e..2319d5ca27 100644 --- a/src/msil/ch/epfl/lamp/compiler/msil/emit/ModuleBuilder.scala +++ b/src/msil/ch/epfl/lamp/compiler/msil/emit/ModuleBuilder.scala @@ -73,7 +73,7 @@ class ModuleBuilder(name: String, fullname: String, scopeName: String, assembly: baseType: Type, interfaces: Array[Type]): TypeBuilder = { - var t: Type = GetType(typeName) // Module.GetType(String) + val t: Type = GetType(typeName) // Module.GetType(String) if (t != null) throw new RuntimeException ("Type [" + Assembly + "]" + typeName + "' already exists!") diff --git a/src/msil/ch/epfl/lamp/compiler/msil/emit/MultipleFilesILPrinterVisitor.scala b/src/msil/ch/epfl/lamp/compiler/msil/emit/MultipleFilesILPrinterVisitor.scala index 55c52109b6..bbbbf40508 100644 --- a/src/msil/ch/epfl/lamp/compiler/msil/emit/MultipleFilesILPrinterVisitor.scala +++ b/src/msil/ch/epfl/lamp/compiler/msil/emit/MultipleFilesILPrinterVisitor.scala @@ -41,7 +41,7 @@ final class MultipleFilesILPrinterVisitor(destPath: String, sourceFilesPath: Str scala.util.Sorting.quickSort(as)(assemblyNameComparator) // Arrays.sort(as, assemblyNameComparator) // print each module - var m: Array[Module] = assemblyBuilder.GetModules() + val m: Array[Module] = assemblyBuilder.GetModules() nomembers = true for(i <- 0 until m.length) { print(m(i).asInstanceOf[ModuleBuilder]) @@ -68,10 +68,10 @@ final class MultipleFilesILPrinterVisitor(destPath: String, sourceFilesPath: Str if (!module.globalsCreated) module.CreateGlobalFunctions() - var m: Array[MethodInfo] = module.GetMethods() + val m: Array[MethodInfo] = module.GetMethods() // "Types" contain all the classes - var t: Array[Type] = module.GetTypes() + val t: Array[Type] = module.GetTypes() for(i <- 0 until t.length) { val tBuilder = t(i).asInstanceOf[TypeBuilder] val sourceFilename = tBuilder.sourceFilename @@ -108,7 +108,7 @@ final class MultipleFilesILPrinterVisitor(destPath: String, sourceFilesPath: Str // now write the global methods (typically contains the "main" method) if(!nomembers) { - var globalMethods: File = new File(destPath, ILPrinterVisitor.currAssembly.GetName().Name + ".msil") + val globalMethods: File = new File(destPath, ILPrinterVisitor.currAssembly.GetName().Name + ".msil") val append = assemblyBuilder.generatedFiles.contains(globalMethods.getPath) out = new PrintWriter(new BufferedWriter(new FileWriter(globalMethods, append))) diff --git a/src/msil/ch/epfl/lamp/compiler/msil/emit/SingleFileILPrinterVisitor.scala b/src/msil/ch/epfl/lamp/compiler/msil/emit/SingleFileILPrinterVisitor.scala index 5d59d4d25a..50e9f45373 100644 --- a/src/msil/ch/epfl/lamp/compiler/msil/emit/SingleFileILPrinterVisitor.scala +++ b/src/msil/ch/epfl/lamp/compiler/msil/emit/SingleFileILPrinterVisitor.scala @@ -48,7 +48,7 @@ final class SingleFileILPrinterVisitor(_fileName: String) extends ILPrinterVisit printAssemblyBoilerplate() // print each module - var m: Array[Module] = assemblyBuilder.GetModules() + val m: Array[Module] = assemblyBuilder.GetModules() nomembers = true for(i <- 0 until m.length) { print(m(i).asInstanceOf[ModuleBuilder]) @@ -78,12 +78,12 @@ final class SingleFileILPrinterVisitor(_fileName: String) extends ILPrinterVisit if (!module.globalsCreated) module.CreateGlobalFunctions() - var m: Array[MethodInfo] = module.GetMethods() + val m: Array[MethodInfo] = module.GetMethods() for(i <- 0 until m.length) { print(m(i).asInstanceOf[MethodBuilder]) } - var t: Array[Type] = module.GetTypes() + val t: Array[Type] = module.GetTypes() for(i <- 0 until t.length) { print(t(i).asInstanceOf[TypeBuilder]) } diff --git a/src/msil/ch/epfl/lamp/compiler/msil/emit/TypeBuilder.scala b/src/msil/ch/epfl/lamp/compiler/msil/emit/TypeBuilder.scala index 57dc883898..0b0b16da65 100644 --- a/src/msil/ch/epfl/lamp/compiler/msil/emit/TypeBuilder.scala +++ b/src/msil/ch/epfl/lamp/compiler/msil/emit/TypeBuilder.scala @@ -221,7 +221,7 @@ class TypeBuilder (module: Module, attributes: Int, fullName: String, baseType: object TypeBuilder { def types2String(types: Array[Type]): String = { - var s = new StringBuffer("(") + val s = new StringBuffer("(") for(i <- 0 until types.length) { if (i > 0) s.append(", ") s.append(types(i)) diff --git a/src/partest/scala/tools/partest/ScaladocModelTest.scala b/src/partest/scala/tools/partest/ScaladocModelTest.scala index b8a41aabe4..f399b86029 100644 --- a/src/partest/scala/tools/partest/ScaladocModelTest.scala +++ b/src/partest/scala/tools/partest/ScaladocModelTest.scala @@ -86,10 +86,7 @@ abstract class ScaladocModelTest extends DirectTest { private[this] def newDocFactory: DocFactory = { settings = new Settings(_ => ()) settings.scaladocQuietRun = true // yaay, no more "model contains X documentable templates"! - val args = extraSettings + " " + scaladocSettings - val command = new ScalaDoc.Command((CommandLineParser tokenize (args)), settings) - val docFact = new DocFactory(new ConsoleReporter(settings), settings) - docFact + new DocFactory(new ConsoleReporter(settings), settings) } // compile with scaladoc and output the result diff --git a/src/partest/scala/tools/partest/nest/CompileManager.scala b/src/partest/scala/tools/partest/nest/CompileManager.scala index 0f2806214f..1b6fd410cb 100644 --- a/src/partest/scala/tools/partest/nest/CompileManager.scala +++ b/src/partest/scala/tools/partest/nest/CompileManager.scala @@ -71,7 +71,6 @@ class DirectCompiler(val fileManager: FileManager) extends SimpleCompiler { } private def updatePluginPath(options: String): String = { - val dir = fileManager.testRootDir def absolutize(path: String) = Path(path) match { case x if x.isAbsolute => x.path case x => (fileManager.testRootDir / x).toAbsolute.path diff --git a/src/partest/scala/tools/partest/nest/ConsoleFileManager.scala b/src/partest/scala/tools/partest/nest/ConsoleFileManager.scala index 32f14872ec..a517763e04 100644 --- a/src/partest/scala/tools/partest/nest/ConsoleFileManager.scala +++ b/src/partest/scala/tools/partest/nest/ConsoleFileManager.scala @@ -148,11 +148,6 @@ class ConsoleFileManager extends FileManager { latestPartestFile = prefixFile("build/pack/lib/scala-partest.jar") } - val dists = testParent / "dists" - val build = testParent / "build" - // in case of an installed dist, testRootDir is one level deeper - val bin = testParent.parent / "bin" - def mostRecentOf(base: String, names: String*) = names map (x => prefixFile(base + "/" + x).lastModified) reduceLeft (_ max _) diff --git a/src/partest/scala/tools/partest/nest/ConsoleRunner.scala b/src/partest/scala/tools/partest/nest/ConsoleRunner.scala index dddc10b251..a3ce45c86b 100644 --- a/src/partest/scala/tools/partest/nest/ConsoleRunner.scala +++ b/src/partest/scala/tools/partest/nest/ConsoleRunner.scala @@ -26,8 +26,6 @@ class ConsoleRunner extends DirectRunner { private def antFilter(p: Path) = p.isFile && (p endsWith "build.xml") val testSets = { - val pathFilter: Path => Boolean = x => x.isDirectory || (x hasExtension "scala") - List( TestSet("pos", stdFilter, "Testing compiler (on files whose compilation should succeed)"), TestSet("neg", stdFilter, "Testing compiler (on files whose compilation should fail)"), diff --git a/src/partest/scala/tools/partest/nest/RunnerManager.scala b/src/partest/scala/tools/partest/nest/RunnerManager.scala index cce717cddf..2d06cad7e4 100644 --- a/src/partest/scala/tools/partest/nest/RunnerManager.scala +++ b/src/partest/scala/tools/partest/nest/RunnerManager.scala @@ -855,9 +855,8 @@ class RunnerManager(kind: String, val fileManager: FileManager, params: TestRunP if (fileManager.failed && !runner.logFile.canRead) return TestState.Ok - // sys addShutdownHook cleanup() - val ((success, ctx), elapsed) = timed(runner.run()) - val state = if (success) TestState.Ok else TestState.Fail + val (success, ctx) = runner.run() + val state = if (success) TestState.Ok else TestState.Fail runner.reportResult(ctx.writers) state diff --git a/src/reflect/scala/reflect/api/Printers.scala b/src/reflect/scala/reflect/api/Printers.scala index 1e8161aeef..2a10d89234 100644 --- a/src/reflect/scala/reflect/api/Printers.scala +++ b/src/reflect/scala/reflect/api/Printers.scala @@ -7,7 +7,7 @@ import java.io.{ PrintWriter, StringWriter } * * === Printing Trees === * The method `show` displays the "prettified" representation of reflection artifacts. - * This representation provides one with the desugared Java representation of Scala code. + * This representation provides one with the desugared Java representation of Scala code. * For example: * * {{{ @@ -30,7 +30,7 @@ import java.io.{ PrintWriter, StringWriter } * () * } * }}} - * + * * The method `showRaw` displays internal structure of a given reflection object * as a Scala abstract syntax tree (AST), the representation that the Scala typechecker * operates on. @@ -54,7 +54,7 @@ import java.io.{ PrintWriter, StringWriter } * Literal(Constant(2))))))), * Literal(Constant(()))) * }}} - * + * * The method `showRaw` can also print [[scala.reflect.api.Types]] next to the artifacts * being inspected * {{{ @@ -89,7 +89,7 @@ import java.io.{ PrintWriter, StringWriter } * * === Printing Types === * - * The method `show` + * The method `show` * {{{ * scala> import scala.reflect.runtime.universe._ * import scala.reflect.runtime.universe._ @@ -124,7 +124,7 @@ import java.io.{ PrintWriter, StringWriter } * newTermName("y")#2541#GET)) * }}} * - * For more details about `Printer`s and other aspects of Scala reflection, see the + * For more details about `Printer`s and other aspects of Scala reflection, see the * [[http://docs.scala-lang.org/overviews/reflection/overview.html Reflection Guide]] * */ @@ -160,7 +160,7 @@ trait Printers { self: Universe => protected def render(what: Any, mkPrinter: PrintWriter => TreePrinter, printTypes: BooleanFlag = None, printIds: BooleanFlag = None, printKinds: BooleanFlag = None, printMirrors: BooleanFlag = None): String = { val buffer = new StringWriter() val writer = new PrintWriter(buffer) - var printer = mkPrinter(writer) + val printer = mkPrinter(writer) printTypes.value.map(printTypes => if (printTypes) printer.withTypes else printer.withoutTypes) printIds.value.map(printIds => if (printIds) printer.withIds else printer.withoutIds) printKinds.value.map(printKinds => if (printKinds) printer.withKinds else printer.withoutKinds) diff --git a/src/reflect/scala/reflect/internal/BaseTypeSeqs.scala b/src/reflect/scala/reflect/internal/BaseTypeSeqs.scala index 86ea2c099b..8ea86755c6 100644 --- a/src/reflect/scala/reflect/internal/BaseTypeSeqs.scala +++ b/src/reflect/scala/reflect/internal/BaseTypeSeqs.scala @@ -115,7 +115,7 @@ trait BaseTypeSeqs { def map(f: Type => Type): BaseTypeSeq = { // inlined `elems map f` for performance val len = length - var arr = new Array[Type](len) + val arr = new Array[Type](len) var i = 0 while (i < len) { arr(i) = f(elems(i)) diff --git a/src/reflect/scala/reflect/internal/Definitions.scala b/src/reflect/scala/reflect/internal/Definitions.scala index ac1722f069..77564b717f 100644 --- a/src/reflect/scala/reflect/internal/Definitions.scala +++ b/src/reflect/scala/reflect/internal/Definitions.scala @@ -1068,7 +1068,6 @@ trait Definitions extends api.StandardDefinitions { } } def getMemberClass(owner: Symbol, name: Name): ClassSymbol = { - val y = getMember(owner, name.toTypeName) getMember(owner, name.toTypeName) match { case x: ClassSymbol => x case _ => fatalMissingSymbol(owner, name, "member class") @@ -1235,7 +1234,7 @@ trait Definitions extends api.StandardDefinitions { def init() { if (isInitialized) return // force initialization of every symbol that is synthesized or hijacked by the compiler - val forced = symbolsNotPresentInBytecode + val _ = symbolsNotPresentInBytecode isInitialized = true } //init diff --git a/src/reflect/scala/reflect/internal/Mirrors.scala b/src/reflect/scala/reflect/internal/Mirrors.scala index ff58a31d20..a75185899f 100644 --- a/src/reflect/scala/reflect/internal/Mirrors.scala +++ b/src/reflect/scala/reflect/internal/Mirrors.scala @@ -43,7 +43,7 @@ trait Mirrors extends api.Mirrors { if (point > 0) getModuleOrClass(path.toTermName, point) else RootClass val name = path subName (point + 1, len) - var sym = owner.info member name + val sym = owner.info member name val result = if (path.isTermName) sym.suchThat(_ hasFlag MODULE) else sym if (result != NoSymbol) result else { diff --git a/src/reflect/scala/reflect/internal/Printers.scala b/src/reflect/scala/reflect/internal/Printers.scala index 02ec0b0e06..2bd7d1f856 100644 --- a/src/reflect/scala/reflect/internal/Printers.scala +++ b/src/reflect/scala/reflect/internal/Printers.scala @@ -168,7 +168,7 @@ trait Printers extends api.Printers { self: SymbolTable => ) def printFlags(flags: Long, privateWithin: String) { - var mask: Long = if (settings.debug.value) -1L else PrintableFlags + val mask: Long = if (settings.debug.value) -1L else PrintableFlags val s = flagsToString(flags & mask, privateWithin) if (s != "") print(s + " ") } diff --git a/src/reflect/scala/reflect/internal/Types.scala b/src/reflect/scala/reflect/internal/Types.scala index 0b065bb441..a5fc861b01 100644 --- a/src/reflect/scala/reflect/internal/Types.scala +++ b/src/reflect/scala/reflect/internal/Types.scala @@ -1725,8 +1725,8 @@ trait Types extends api.Types { self: SymbolTable => } protected def defineBaseClassesOfCompoundType(tpe: CompoundType) { - def define = defineBaseClassesOfCompoundType(tpe, force = false) - if (!breakCycles || isPastTyper) define + def define() = defineBaseClassesOfCompoundType(tpe, force = false) + if (!breakCycles || isPastTyper) define() else tpe match { // non-empty parents helpfully excludes all package classes case tpe @ ClassInfoType(_ :: _, _, clazz) if !clazz.isAnonOrRefinementClass => @@ -1735,11 +1735,11 @@ trait Types extends api.Types { self: SymbolTable => defineBaseClassesOfCompoundType(tpe, force = true) else { baseClassesCycleMonitor push clazz - try define + try define() finally baseClassesCycleMonitor pop clazz } case _ => - define + define() } } private def defineBaseClassesOfCompoundType(tpe: CompoundType, force: Boolean) { @@ -1999,7 +1999,7 @@ trait Types extends api.Types { self: SymbolTable => var change = false for ((from, targets) <- refs(NonExpansive).iterator) for (target <- targets) { - var thatInfo = classInfo(target) + val thatInfo = classInfo(target) if (thatInfo.state != Initialized) change = change | thatInfo.propagate() addRefs(NonExpansive, from, thatInfo.getRefs(NonExpansive, target)) @@ -2007,7 +2007,7 @@ trait Types extends api.Types { self: SymbolTable => } for ((from, targets) <- refs(Expansive).iterator) for (target <- targets) { - var thatInfo = classInfo(target) + val thatInfo = classInfo(target) if (thatInfo.state != Initialized) change = change | thatInfo.propagate() addRefs(Expansive, from, thatInfo.getRefs(NonExpansive, target)) @@ -4071,7 +4071,7 @@ trait Types extends api.Types { self: SymbolTable => variance = -variance val tparams1 = mapOver(tparams) variance = -variance - var result1 = this(result) + val result1 = this(result) if ((tparams1 eq tparams) && (result1 eq result)) tp else PolyType(tparams1, result1.substSym(tparams, tparams1)) case TypeBounds(lo, hi) => @@ -4133,7 +4133,7 @@ trait Types extends api.Types { self: SymbolTable => else copyMethodType(tp, params1, result1.substSym(params, params1)) case PolyType(tparams, result) => val tparams1 = mapOver(tparams) - var result1 = this(result) + val result1 = this(result) if ((tparams1 eq tparams) && (result1 eq result)) tp else PolyType(tparams1, result1.substSym(tparams, tparams1)) case NullaryMethodType(result) => @@ -4163,7 +4163,7 @@ trait Types extends api.Types { self: SymbolTable => copyRefinedType(rtp, parents1, decls1) case ExistentialType(tparams, result) => val tparams1 = mapOver(tparams) - var result1 = this(result) + val result1 = this(result) if ((tparams1 eq tparams) && (result1 eq result)) tp else newExistentialType(tparams1, result1.substSym(tparams, tparams1)) case OverloadedType(pre, alts) => diff --git a/src/reflect/scala/reflect/internal/pickling/UnPickler.scala b/src/reflect/scala/reflect/internal/pickling/UnPickler.scala index ca47ef7e26..2cb2c57e32 100644 --- a/src/reflect/scala/reflect/internal/pickling/UnPickler.scala +++ b/src/reflect/scala/reflect/internal/pickling/UnPickler.scala @@ -339,7 +339,7 @@ abstract class UnPickler /*extends scala.reflect.generic.UnPickler*/ { case TYPEREFtpe => val pre = readTypeRef() val sym = readSymbolRef() - var args = until(end, readTypeRef) + val args = until(end, readTypeRef) TypeRef(pre, sym, args) case TYPEBOUNDStpe => TypeBounds(readTypeRef(), readTypeRef()) @@ -759,7 +759,7 @@ abstract class UnPickler /*extends scala.reflect.generic.UnPickler*/ { val tag = readNat() if (tag != MODIFIERS) errorBadSignature("expected a modifiers tag (" + tag + ")") - val end = readNat() + readIndex + val _ = readNat() + readIndex val pflagsHi = readNat() val pflagsLo = readNat() val pflags = (pflagsHi.toLong << 32) + pflagsLo diff --git a/src/reflect/scala/reflect/internal/util/Statistics.scala b/src/reflect/scala/reflect/internal/util/Statistics.scala index 2c90d2d525..b078b7d4f9 100644 --- a/src/reflect/scala/reflect/internal/util/Statistics.scala +++ b/src/reflect/scala/reflect/internal/util/Statistics.scala @@ -257,7 +257,6 @@ quant) def enabled = _enabled def enabled_=(cond: Boolean) = { if (cond && !_enabled) { - val test = new Timer("", Nil) val start = System.nanoTime() var total = 0L for (i <- 1 to 10000) { diff --git a/src/reflect/scala/reflect/runtime/JavaMirrors.scala b/src/reflect/scala/reflect/runtime/JavaMirrors.scala index f517c30fe6..0cfb3fd623 100644 --- a/src/reflect/scala/reflect/runtime/JavaMirrors.scala +++ b/src/reflect/scala/reflect/runtime/JavaMirrors.scala @@ -379,7 +379,7 @@ private[reflect] trait JavaMirrors extends internal.SymbolTable with api.JavaUni val varargMatch = args.length >= params.length - 1 && isVarArgsList(params) if (!perfectMatch && !varargMatch) { val n_arguments = if (isVarArgsList(params)) s"${params.length - 1} or more" else s"${params.length}" - var s_arguments = if (params.length == 1 && !isVarArgsList(params)) "argument" else "arguments" + val s_arguments = if (params.length == 1 && !isVarArgsList(params)) "argument" else "arguments" throw new ScalaReflectionException(s"${showMethodSig(symbol)} takes $n_arguments $s_arguments") } @@ -1042,7 +1042,7 @@ private[reflect] trait JavaMirrors extends internal.SymbolTable with api.JavaUni private def jclassAsScala(jclazz: jClass[_], owner: Symbol): ClassSymbol = { val name = scalaSimpleName(jclazz) val completer = (clazz: Symbol, module: Symbol) => new FromJavaClassCompleter(clazz, module, jclazz) - val (clazz, module) = createClassModule(owner, name, completer) + val (clazz, _) = createClassModule(owner, name, completer) classCache enter (jclazz, clazz) clazz } diff --git a/src/reflect/scala/reflect/runtime/SymbolLoaders.scala b/src/reflect/scala/reflect/runtime/SymbolLoaders.scala index 2b192ce570..60b22afd18 100644 --- a/src/reflect/scala/reflect/runtime/SymbolLoaders.scala +++ b/src/reflect/scala/reflect/runtime/SymbolLoaders.scala @@ -116,7 +116,7 @@ private[reflect] trait SymbolLoaders { self: SymbolTable => currentMirror.tryJavaClass(path) match { case Some(cls) => val loadingMirror = currentMirror.mirrorDefining(cls) - val (clazz, module) = + val (_, module) = if (loadingMirror eq currentMirror) { createClassModule(pkgClass, name.toTypeName, new TopClassCompleter(_, _)) } else { diff --git a/src/scalap/scala/tools/scalap/Arguments.scala b/src/scalap/scala/tools/scalap/Arguments.scala index 53f722994d..f01f2ff749 100644 --- a/src/scalap/scala/tools/scalap/Arguments.scala +++ b/src/scalap/scala/tools/scalap/Arguments.scala @@ -87,7 +87,7 @@ object Arguments { i += 2 } } else { - var iter = prefixes.iterator + val iter = prefixes.iterator val j = i while ((i == j) && iter.hasNext) { val prefix = iter.next diff --git a/src/scalap/scala/tools/scalap/scalax/rules/scalasig/ScalaSigPrinter.scala b/src/scalap/scala/tools/scalap/scalax/rules/scalasig/ScalaSigPrinter.scala index f3d449b87f..78044a9caf 100644 --- a/src/scalap/scala/tools/scalap/scalax/rules/scalasig/ScalaSigPrinter.scala +++ b/src/scalap/scala/tools/scalap/scalax/rules/scalasig/ScalaSigPrinter.scala @@ -70,7 +70,7 @@ class ScalaSigPrinter(stream: PrintStream, printPrivates: Boolean) { } def isCaseClassObject(o: ObjectSymbol): Boolean = { - val TypeRefType(prefix, classSymbol: ClassSymbol, typeArgs) = o.infoType + val TypeRefType(_, classSymbol: ClassSymbol, _) = o.infoType o.isFinal && (classSymbol.children.find(x => x.isCase && x.isInstanceOf[MethodSymbol]) match { case Some(_) => true case None => false @@ -167,7 +167,7 @@ class ScalaSigPrinter(stream: PrintStream, printPrivates: Boolean) { print("object ") val poName = o.symbolInfo.owner.name print(processName(poName)) - val TypeRefType(prefix, classSymbol: ClassSymbol, typeArgs) = o.infoType + val TypeRefType(_, classSymbol: ClassSymbol, _) = o.infoType printType(classSymbol) print(" {\n") printChildren(level, classSymbol) @@ -179,7 +179,7 @@ class ScalaSigPrinter(stream: PrintStream, printPrivates: Boolean) { printModifiers(o) print("object ") print(processName(o.name)) - val TypeRefType(prefix, classSymbol: ClassSymbol, typeArgs) = o.infoType + val TypeRefType(_, classSymbol: ClassSymbol, _) = o.infoType printType(classSymbol) print(" {\n") printChildren(level, classSymbol) @@ -191,7 +191,7 @@ class ScalaSigPrinter(stream: PrintStream, printPrivates: Boolean) { val j = str.indexOf("[") if (j > 0) str = str.substring(0, j) str = StringUtil.trimStart(str, "=> ") - var i = str.lastIndexOf(".") + val i = str.lastIndexOf(".") val res = if (i > 0) str.substring(i + 1) else str if (res.length > 1) StringUtil.decapitalize(res.substring(0, 1)) else res.toLowerCase }) diff --git a/test/files/run/reify_newimpl_11.check b/test/files/run/reify_newimpl_11.check index 2f5cb581e6..c019c6db2d 100644 --- a/test/files/run/reify_newimpl_11.check +++ b/test/files/run/reify_newimpl_11.check @@ -1,2 +1,4 @@ -scala.tools.reflect.ToolBoxError: reflective toolbox has failed: -unresolved free type variables (namely: T defined by C in reify_newimpl_11.scala:6:11). have you forgot to use TypeTag annotations for type parameters external to a reifee? if you have troubles tracking free type variables, consider using -Xlog-free-types +scala.tools.reflect.ToolBoxError: reflective toolbox failed due to unresolved free type variables: + T defined by C in reify_newimpl_11.scala:6:11 +have you forgotten to use TypeTag annotations for type parameters external to a reifee? +if you have troubles tracking free type variables, consider using -Xlog-free-types diff --git a/test/files/run/reify_newimpl_13.check b/test/files/run/reify_newimpl_13.check index d518cd7b84..13e3c9af1e 100644 --- a/test/files/run/reify_newimpl_13.check +++ b/test/files/run/reify_newimpl_13.check @@ -1,2 +1,4 @@ -scala.tools.reflect.ToolBoxError: reflective toolbox has failed: -unresolved free type variables (namely: T defined by C in reify_newimpl_13.scala:7:13). have you forgot to use TypeTag annotations for type parameters external to a reifee? if you have troubles tracking free type variables, consider using -Xlog-free-types +scala.tools.reflect.ToolBoxError: reflective toolbox failed due to unresolved free type variables: + T defined by C in reify_newimpl_13.scala:7:13 +have you forgotten to use TypeTag annotations for type parameters external to a reifee? +if you have troubles tracking free type variables, consider using -Xlog-free-types diff --git a/test/files/run/reify_newimpl_19.check b/test/files/run/reify_newimpl_19.check index 8b8652f92c..c749d4f106 100644 --- a/test/files/run/reify_newimpl_19.check +++ b/test/files/run/reify_newimpl_19.check @@ -1,2 +1,4 @@ -scala.tools.reflect.ToolBoxError: reflective toolbox has failed: -unresolved free type variables (namely: T defined by C in reify_newimpl_19.scala:7:10). have you forgot to use TypeTag annotations for type parameters external to a reifee? if you have troubles tracking free type variables, consider using -Xlog-free-types +scala.tools.reflect.ToolBoxError: reflective toolbox failed due to unresolved free type variables: + T defined by C in reify_newimpl_19.scala:7:10 +have you forgotten to use TypeTag annotations for type parameters external to a reifee? +if you have troubles tracking free type variables, consider using -Xlog-free-types -- cgit v1.2.3 From 0bcb9e9169146e3f589c6c9f65cc4a5523b78120 Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Sat, 3 Nov 2012 16:19:46 +0100 Subject: SI-6567 Warning for Option(implicitView(foo)) I've seen the reported problem before in the wild. It seems worthy of a special warning, so long as we advocate Option.apply as an alternative to `if (x == null) Some(x) else None`. It is behind -Xlint at the moment, an option that could do with some promotion. --- src/compiler/scala/tools/nsc/typechecker/RefChecks.scala | 11 ++++++++++- src/reflect/scala/reflect/internal/Definitions.scala | 10 ++++++---- test/files/neg/t6567.check | 9 +++++++++ test/files/neg/t6567.flags | 1 + test/files/neg/t6567.scala | 11 +++++++++++ 5 files changed, 37 insertions(+), 5 deletions(-) create mode 100644 test/files/neg/t6567.check create mode 100644 test/files/neg/t6567.flags create mode 100644 test/files/neg/t6567.scala (limited to 'src') diff --git a/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala b/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala index c04a8661b2..60bed95b9e 100644 --- a/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala +++ b/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala @@ -1051,6 +1051,12 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans def apply(tp: Type) = mapOver(tp).normalize } + def checkImplicitViewOptionApply(pos: Position, fn: Tree, args: List[Tree]): Unit = if (settings.lint.value) (fn, args) match { + case (tap@TypeApply(fun, targs), List(view: ApplyImplicitView)) if fun.symbol == Option_apply => + unit.warning(pos, s"Suspicious application of an implicit view (${view.fun}) in the argument to Option.apply.") // SI-6567 + case _ => + } + def checkSensible(pos: Position, fn: Tree, args: List[Tree]) = fn match { case Select(qual, name @ (nme.EQ | nme.NE | nme.eq | nme.ne)) if args.length == 1 => def isReferenceOp = name == nme.eq || name == nme.ne @@ -1535,7 +1541,10 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans case Apply(fn, args) => // sensicality should be subsumed by the unreachability/exhaustivity/irrefutability analyses in the pattern matcher - if (!inPattern) checkSensible(tree.pos, fn, args) + if (!inPattern) { + checkImplicitViewOptionApply(tree.pos, fn, args) + checkSensible(tree.pos, fn, args) + } currentApplication = tree tree } diff --git a/src/reflect/scala/reflect/internal/Definitions.scala b/src/reflect/scala/reflect/internal/Definitions.scala index ac1722f069..188078146a 100644 --- a/src/reflect/scala/reflect/internal/Definitions.scala +++ b/src/reflect/scala/reflect/internal/Definitions.scala @@ -540,10 +540,12 @@ trait Definitions extends api.StandardDefinitions { lazy val ScalaLongSignatureAnnotation = requiredClass[scala.reflect.ScalaLongSignature] // Option classes - lazy val OptionClass: ClassSymbol = requiredClass[Option[_]] - lazy val SomeClass: ClassSymbol = requiredClass[Some[_]] - lazy val NoneModule: ModuleSymbol = requiredModule[scala.None.type] - lazy val SomeModule: ModuleSymbol = requiredModule[scala.Some.type] + lazy val OptionClass: ClassSymbol = requiredClass[Option[_]] + lazy val OptionModule: ModuleSymbol = requiredModule[scala.Option.type] + lazy val Option_apply = getMemberMethod(OptionModule, nme.apply) + lazy val SomeClass: ClassSymbol = requiredClass[Some[_]] + lazy val NoneModule: ModuleSymbol = requiredModule[scala.None.type] + lazy val SomeModule: ModuleSymbol = requiredModule[scala.Some.type] def compilerTypeFromTag(tt: ApiUniverse # WeakTypeTag[_]): Type = tt.in(rootMirror).tpe def compilerSymbolFromTag(tt: ApiUniverse # WeakTypeTag[_]): Symbol = tt.in(rootMirror).tpe.typeSymbol diff --git a/test/files/neg/t6567.check b/test/files/neg/t6567.check new file mode 100644 index 0000000000..a733d75354 --- /dev/null +++ b/test/files/neg/t6567.check @@ -0,0 +1,9 @@ +t6567.scala:8: warning: Suspicious application of an implicit view (Test.this.a2b) in the argument to Option.apply. + Option[B](a) + ^ +t6567.scala:10: warning: Suspicious application of an implicit view (Test.this.a2b) in the argument to Option.apply. + val b: Option[B] = Option(a) + ^ +error: No warnings can be incurred under -Xfatal-warnings. +two warnings found +one error found diff --git a/test/files/neg/t6567.flags b/test/files/neg/t6567.flags new file mode 100644 index 0000000000..e93641e931 --- /dev/null +++ b/test/files/neg/t6567.flags @@ -0,0 +1 @@ +-Xlint -Xfatal-warnings \ No newline at end of file diff --git a/test/files/neg/t6567.scala b/test/files/neg/t6567.scala new file mode 100644 index 0000000000..650e5e39ae --- /dev/null +++ b/test/files/neg/t6567.scala @@ -0,0 +1,11 @@ +class A +class B + +object Test { + val a: A = null + implicit def a2b(a: A) = new B + + Option[B](a) + + val b: Option[B] = Option(a) +} -- cgit v1.2.3 From 30954277448ea270ea5cf59af6c5760451a81005 Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Sat, 3 Nov 2012 08:55:07 -0700 Subject: Restored a scaladoc val body. Sometimes unused vals have side effecting right hand sides, and it looks like one or maybe two slipped through my net. --- src/compiler/scala/tools/nsc/doc/model/ModelFactory.scala | 1 + src/partest/scala/tools/partest/ScaladocModelTest.scala | 5 ++++- 2 files changed, 5 insertions(+), 1 deletion(-) (limited to 'src') diff --git a/src/compiler/scala/tools/nsc/doc/model/ModelFactory.scala b/src/compiler/scala/tools/nsc/doc/model/ModelFactory.scala index 010bb98549..a1ba086ce1 100644 --- a/src/compiler/scala/tools/nsc/doc/model/ModelFactory.scala +++ b/src/compiler/scala/tools/nsc/doc/model/ModelFactory.scala @@ -853,6 +853,7 @@ class ModelFactory(val global: Global, val settings: doc.Settings) { } def findMember(aSym: Symbol, inTpl: DocTemplateImpl): Option[MemberImpl] = { + normalizeTemplate(aSym.owner) inTpl.members.find(_.sym == aSym) } diff --git a/src/partest/scala/tools/partest/ScaladocModelTest.scala b/src/partest/scala/tools/partest/ScaladocModelTest.scala index f399b86029..b8a41aabe4 100644 --- a/src/partest/scala/tools/partest/ScaladocModelTest.scala +++ b/src/partest/scala/tools/partest/ScaladocModelTest.scala @@ -86,7 +86,10 @@ abstract class ScaladocModelTest extends DirectTest { private[this] def newDocFactory: DocFactory = { settings = new Settings(_ => ()) settings.scaladocQuietRun = true // yaay, no more "model contains X documentable templates"! - new DocFactory(new ConsoleReporter(settings), settings) + val args = extraSettings + " " + scaladocSettings + val command = new ScalaDoc.Command((CommandLineParser tokenize (args)), settings) + val docFact = new DocFactory(new ConsoleReporter(settings), settings) + docFact } // compile with scaladoc and output the result -- cgit v1.2.3 From 357f45c1152728a5e461312f462aa7ab63e2adec Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Sat, 3 Nov 2012 10:07:48 -0700 Subject: Fix for SI-6426, importable _. Prohibit `_` as an identifier, it can only bring badness. --- src/compiler/scala/tools/nsc/ast/parser/Parsers.scala | 7 ++----- src/compiler/scala/tools/nsc/ast/parser/Scanners.scala | 5 ++++- test/files/neg/t6426.check | 7 +++++++ test/files/neg/t6426.scala | 5 +++++ 4 files changed, 18 insertions(+), 6 deletions(-) create mode 100644 test/files/neg/t6426.check create mode 100644 test/files/neg/t6426.scala (limited to 'src') diff --git a/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala b/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala index 380fd1fcaa..d7ee09c808 100644 --- a/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala +++ b/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala @@ -976,11 +976,8 @@ self => /** Assumed (provisionally) to be TermNames. */ def ident(skipIt: Boolean): Name = - if (isIdent) { - val name = in.name.encode - in.nextToken() - name - } else { + if (isIdent) rawIdent().encode + else { syntaxErrorOrIncomplete(expectedMsg(IDENTIFIER), skipIt) nme.ERROR } diff --git a/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala b/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala index 5b828ded79..270a7fc8bf 100644 --- a/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala +++ b/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala @@ -613,7 +613,10 @@ trait Scanners extends ScannersCommon { if (ch == '`') { nextChar() finishNamed(BACKQUOTED_IDENT) - if (name.length == 0) syntaxError("empty quoted identifier") + if (name.length == 0) + syntaxError("empty quoted identifier") + else if (name == nme.WILDCARD) + syntaxError("wildcard invalid as backquoted identifier") } else syntaxError("unclosed quoted identifier") } diff --git a/test/files/neg/t6426.check b/test/files/neg/t6426.check new file mode 100644 index 0000000000..149f74c4de --- /dev/null +++ b/test/files/neg/t6426.check @@ -0,0 +1,7 @@ +t6426.scala:4: error: wildcard invalid as backquoted identifier + println(`_`.Buffer(0)) + ^ +t6426.scala:5: error: ')' expected but '}' found. +} +^ +two errors found diff --git a/test/files/neg/t6426.scala b/test/files/neg/t6426.scala new file mode 100644 index 0000000000..a27d18eb58 --- /dev/null +++ b/test/files/neg/t6426.scala @@ -0,0 +1,5 @@ +class A { + import collection.{mutable => _, _} + + println(`_`.Buffer(0)) +} -- cgit v1.2.3 From dad886659faca4fba2d4937c9bc6780591b02c27 Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Sat, 3 Nov 2012 13:34:20 +0100 Subject: SI-6611 Tighten up an unsafe array optimization The net was cast too wide and was unsafely optimizing away array copies. --- src/compiler/scala/tools/nsc/transform/CleanUp.scala | 13 ++++++++----- src/reflect/scala/reflect/internal/TreeInfo.scala | 14 ++++++++++++++ test/files/instrumented/t6611.check | 1 + test/files/instrumented/t6611.scala | 13 +++++++++++++ test/files/run/t6611.scala | 6 ++++++ 5 files changed, 42 insertions(+), 5 deletions(-) create mode 100644 test/files/instrumented/t6611.check create mode 100644 test/files/instrumented/t6611.scala create mode 100644 test/files/run/t6611.scala (limited to 'src') diff --git a/src/compiler/scala/tools/nsc/transform/CleanUp.scala b/src/compiler/scala/tools/nsc/transform/CleanUp.scala index 1f353bb31c..bdcebf47b8 100644 --- a/src/compiler/scala/tools/nsc/transform/CleanUp.scala +++ b/src/compiler/scala/tools/nsc/transform/CleanUp.scala @@ -15,6 +15,7 @@ abstract class CleanUp extends Transform with ast.TreeDSL { import global._ import definitions._ import CODE._ + import treeInfo.StripCast /** the following two members override abstract members in Transform */ val phaseName: String = "cleanup" @@ -618,14 +619,16 @@ abstract class CleanUp extends Transform with ast.TreeDSL { } transformApply - // This transform replaces Array(Predef.wrapArray(Array(...)), ) - // with just Array(...) - case Apply(appMeth, List(Apply(wrapRefArrayMeth, List(array)), _)) + // Replaces `Array(Predef.wrapArray(ArrayValue(...).$asInstanceOf[...]), )` + // with just `ArrayValue(...).$asInstanceOf[...]` + // + // See SI-6611; we must *only* do this for literal vararg arrays. + case Apply(appMeth, List(Apply(wrapRefArrayMeth, List(arg @ StripCast(ArrayValue(_, _)))), _)) if (wrapRefArrayMeth.symbol == Predef_wrapRefArray && appMeth.symbol == ArrayModule_overloadedApply.suchThat { - _.tpe.resultType.dealias.typeSymbol == ObjectClass + _.tpe.resultType.dealias.typeSymbol == ObjectClass // [T: ClassTag](xs: T*): Array[T] post erasure }) => - super.transform(array) + super.transform(arg) case _ => super.transform(tree) diff --git a/src/reflect/scala/reflect/internal/TreeInfo.scala b/src/reflect/scala/reflect/internal/TreeInfo.scala index 66326c90e9..bee92b446b 100644 --- a/src/reflect/scala/reflect/internal/TreeInfo.scala +++ b/src/reflect/scala/reflect/internal/TreeInfo.scala @@ -265,6 +265,20 @@ abstract class TreeInfo { tree } + /** Strips layers of `.asInstanceOf[T]` / `_.$asInstanceOf[T]()` from an expression */ + def stripCast(tree: Tree): Tree = tree match { + case TypeApply(sel @ Select(inner, _), _) if isCastSymbol(sel.symbol) => + stripCast(inner) + case Apply(TypeApply(sel @ Select(inner, _), _), Nil) if isCastSymbol(sel.symbol) => + stripCast(inner) + case t => + t + } + + object StripCast { + def unapply(tree: Tree): Some[Tree] = Some(stripCast(tree)) + } + /** Is tree a self or super constructor call? */ def isSelfOrSuperConstrCall(tree: Tree) = { // stripNamedApply for SI-3584: adaptToImplicitMethod in Typers creates a special context diff --git a/test/files/instrumented/t6611.check b/test/files/instrumented/t6611.check new file mode 100644 index 0000000000..5cd691e93a --- /dev/null +++ b/test/files/instrumented/t6611.check @@ -0,0 +1 @@ +Method call statistics: diff --git a/test/files/instrumented/t6611.scala b/test/files/instrumented/t6611.scala new file mode 100644 index 0000000000..821d5f3fbf --- /dev/null +++ b/test/files/instrumented/t6611.scala @@ -0,0 +1,13 @@ +import scala.tools.partest.instrumented.Instrumentation._ + +object Test { + def main(args: Array[String]) { + startProfiling() + + // tests optimization in Cleanup for varargs reference arrays + val a = Array("") + + stopProfiling() + printStatistics() + } +} diff --git a/test/files/run/t6611.scala b/test/files/run/t6611.scala new file mode 100644 index 0000000000..c0297372f0 --- /dev/null +++ b/test/files/run/t6611.scala @@ -0,0 +1,6 @@ +object Test extends App { + val a = Array("1") + val a2 = Array(a: _*) + a2(0) = "2" + assert(a(0) == "1") +} -- cgit v1.2.3 From 8265175ecc42293997d59049f430396c77a2b891 Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Sun, 4 Nov 2012 14:17:25 +0100 Subject: Expand optimization of Array(e1, ..., en) to primitive arrays. --- .../scala/tools/nsc/transform/CleanUp.scala | 7 +++ src/library/scala/Array.scala | 10 ++++ .../scala/reflect/internal/Definitions.scala | 13 ++--- test/files/instrumented/t6611.scala | 24 ++++++++- test/files/run/t6611.scala | 63 ++++++++++++++++++++-- 5 files changed, 106 insertions(+), 11 deletions(-) (limited to 'src') diff --git a/src/compiler/scala/tools/nsc/transform/CleanUp.scala b/src/compiler/scala/tools/nsc/transform/CleanUp.scala index bdcebf47b8..4f145d3d7e 100644 --- a/src/compiler/scala/tools/nsc/transform/CleanUp.scala +++ b/src/compiler/scala/tools/nsc/transform/CleanUp.scala @@ -629,6 +629,13 @@ abstract class CleanUp extends Transform with ast.TreeDSL { _.tpe.resultType.dealias.typeSymbol == ObjectClass // [T: ClassTag](xs: T*): Array[T] post erasure }) => super.transform(arg) + case Apply(appMeth, List(elem0, Apply(wrapArrayMeth, List(rest @ ArrayValue(elemtpt, _))))) + if wrapArrayMeth.symbol == Predef_wrapArray(elemtpt.tpe) && + appMeth.symbol == ArrayModule_overloadedApply.suchThat { + tp => tp.tpe.paramss.flatten.lift.apply(1).exists(_.tpe.typeSymbol == SeqClass) && + tp.tpe.resultType =:= arrayType(elemtpt.tpe) // (p1: AnyVal1, ps: AnyVal1*): Array[AnyVal1] post erasure + } => + super.transform(rest.copy(elems = elem0 :: rest.elems)) case _ => super.transform(tree) diff --git a/src/library/scala/Array.scala b/src/library/scala/Array.scala index 0b8550be37..514844a5fa 100644 --- a/src/library/scala/Array.scala +++ b/src/library/scala/Array.scala @@ -115,6 +115,8 @@ object Array extends FallbackArrayBuilding { * @param xs the elements to put in the array * @return an array containing all elements from xs. */ + // Subject to a compiler optimization in Cleanup. + // Array(e0, ..., en) is translated to { val a = new Array(3); a(i) = ei; a } def apply[T: ClassTag](xs: T*): Array[T] = { val array = new Array[T](xs.length) var i = 0 @@ -123,6 +125,7 @@ object Array extends FallbackArrayBuilding { } /** Creates an array of `Boolean` objects */ + // Subject to a compiler optimization in Cleanup, see above. def apply(x: Boolean, xs: Boolean*): Array[Boolean] = { val array = new Array[Boolean](xs.length + 1) array(0) = x @@ -132,6 +135,7 @@ object Array extends FallbackArrayBuilding { } /** Creates an array of `Byte` objects */ + // Subject to a compiler optimization in Cleanup, see above. def apply(x: Byte, xs: Byte*): Array[Byte] = { val array = new Array[Byte](xs.length + 1) array(0) = x @@ -141,6 +145,7 @@ object Array extends FallbackArrayBuilding { } /** Creates an array of `Short` objects */ + // Subject to a compiler optimization in Cleanup, see above. def apply(x: Short, xs: Short*): Array[Short] = { val array = new Array[Short](xs.length + 1) array(0) = x @@ -150,6 +155,7 @@ object Array extends FallbackArrayBuilding { } /** Creates an array of `Char` objects */ + // Subject to a compiler optimization in Cleanup, see above. def apply(x: Char, xs: Char*): Array[Char] = { val array = new Array[Char](xs.length + 1) array(0) = x @@ -159,6 +165,7 @@ object Array extends FallbackArrayBuilding { } /** Creates an array of `Int` objects */ + // Subject to a compiler optimization in Cleanup, see above. def apply(x: Int, xs: Int*): Array[Int] = { val array = new Array[Int](xs.length + 1) array(0) = x @@ -168,6 +175,7 @@ object Array extends FallbackArrayBuilding { } /** Creates an array of `Long` objects */ + // Subject to a compiler optimization in Cleanup, see above. def apply(x: Long, xs: Long*): Array[Long] = { val array = new Array[Long](xs.length + 1) array(0) = x @@ -177,6 +185,7 @@ object Array extends FallbackArrayBuilding { } /** Creates an array of `Float` objects */ + // Subject to a compiler optimization in Cleanup, see above. def apply(x: Float, xs: Float*): Array[Float] = { val array = new Array[Float](xs.length + 1) array(0) = x @@ -186,6 +195,7 @@ object Array extends FallbackArrayBuilding { } /** Creates an array of `Double` objects */ + // Subject to a compiler optimization in Cleanup, see above. def apply(x: Double, xs: Double*): Array[Double] = { val array = new Array[Double](xs.length + 1) array(0) = x diff --git a/src/reflect/scala/reflect/internal/Definitions.scala b/src/reflect/scala/reflect/internal/Definitions.scala index ac1722f069..71559896ab 100644 --- a/src/reflect/scala/reflect/internal/Definitions.scala +++ b/src/reflect/scala/reflect/internal/Definitions.scala @@ -340,12 +340,13 @@ trait Definitions extends api.StandardDefinitions { lazy val PredefModule = requiredModule[scala.Predef.type] lazy val PredefModuleClass = PredefModule.moduleClass - def Predef_classOf = getMemberMethod(PredefModule, nme.classOf) - def Predef_identity = getMemberMethod(PredefModule, nme.identity) - def Predef_conforms = getMemberMethod(PredefModule, nme.conforms) - def Predef_wrapRefArray = getMemberMethod(PredefModule, nme.wrapRefArray) - def Predef_??? = getMemberMethod(PredefModule, nme.???) - def Predef_implicitly = getMemberMethod(PredefModule, nme.implicitly) + def Predef_classOf = getMemberMethod(PredefModule, nme.classOf) + def Predef_identity = getMemberMethod(PredefModule, nme.identity) + def Predef_conforms = getMemberMethod(PredefModule, nme.conforms) + def Predef_wrapRefArray = getMemberMethod(PredefModule, nme.wrapRefArray) + def Predef_wrapArray(tp: Type) = getMemberMethod(PredefModule, wrapArrayMethodName(tp)) + def Predef_??? = getMemberMethod(PredefModule, nme.???) + def Predef_implicitly = getMemberMethod(PredefModule, nme.implicitly) /** Is `sym` a member of Predef with the given name? * Note: DON't replace this by sym == Predef_conforms/etc, as Predef_conforms is a `def` diff --git a/test/files/instrumented/t6611.scala b/test/files/instrumented/t6611.scala index 821d5f3fbf..4c52f8a5ef 100644 --- a/test/files/instrumented/t6611.scala +++ b/test/files/instrumented/t6611.scala @@ -5,7 +5,29 @@ object Test { startProfiling() // tests optimization in Cleanup for varargs reference arrays - val a = Array("") + Array("") + + + Array(true) + Array(true, false) + Array(1: Byte) + Array(1: Byte, 2: Byte) + Array(1: Short) + Array(1: Short, 2: Short) + Array(1) + Array(1, 2) + Array(1L) + Array(1L, 2L) + Array(1d) + Array(1d, 2d) + Array(1f) + Array(1f, 2f) + + /* Not currently optimized: + Array[Int](1, 2) etc + Array(()) + Array((), ()) + */ stopProfiling() printStatistics() diff --git a/test/files/run/t6611.scala b/test/files/run/t6611.scala index c0297372f0..c295368aea 100644 --- a/test/files/run/t6611.scala +++ b/test/files/run/t6611.scala @@ -1,6 +1,61 @@ object Test extends App { - val a = Array("1") - val a2 = Array(a: _*) - a2(0) = "2" - assert(a(0) == "1") + locally { + val a = Array("1") + val a2 = Array(a: _*) + assert(a ne a2) + } + + locally { + val a = Array("1": Object) + val a2 = Array(a: _*) + assert(a ne a2) + } + + locally { + val a = Array(true) + val a2 = Array(a: _*) + assert(a ne a2) + } + + locally { + val a = Array(1: Short) + val a2 = Array(a: _*) + assert(a ne a2) + } + + locally { + val a = Array(1: Byte) + val a2 = Array(a: _*) + assert(a ne a2) + } + + locally { + val a = Array(1) + val a2 = Array(a: _*) + assert(a ne a2) + } + + locally { + val a = Array(1L) + val a2 = Array(a: _*) + assert(a ne a2) + } + + locally { + val a = Array(1f) + val a2 = Array(a: _*) + assert(a ne a2) + } + + locally { + val a = Array(1d) + val a2 = Array(a: _*) + assert(a ne a2) + } + + locally { + val a = Array(()) + val a2 = Array(a: _*) + assert(a ne a2) + } } -- cgit v1.2.3 From 477eee3acf0ec9e6f339efb97a213a2a0775495f Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Sun, 4 Nov 2012 05:36:34 -0800 Subject: Pull request feedback. Factor out 8x duplicated bit of UnPicklers. --- .../scala/reflect/internal/pickling/UnPickler.scala | 18 ++++++++++-------- 1 file changed, 10 insertions(+), 8 deletions(-) (limited to 'src') diff --git a/src/reflect/scala/reflect/internal/pickling/UnPickler.scala b/src/reflect/scala/reflect/internal/pickling/UnPickler.scala index 2cb2c57e32..13e42102cc 100644 --- a/src/reflect/scala/reflect/internal/pickling/UnPickler.scala +++ b/src/reflect/scala/reflect/internal/pickling/UnPickler.scala @@ -188,11 +188,12 @@ abstract class UnPickler /*extends scala.reflect.generic.UnPickler*/ { } protected def readTermName(): TermName = readName().toTermName protected def readTypeName(): TypeName = readName().toTypeName + private def readEnd() = readNat() + readIndex /** Read a symbol */ protected def readSymbol(): Symbol = { val tag = readByte() - val end = readNat() + readIndex + val end = readEnd() def atEnd = readIndex == end def readExtSymbol(): Symbol = { @@ -320,7 +321,7 @@ abstract class UnPickler /*extends scala.reflect.generic.UnPickler*/ { */ protected def readType(forceProperType: Boolean = false): Type = { val tag = readByte() - val end = readNat() + readIndex + val end = readEnd() (tag: @switch) match { case NOtpe => NoType @@ -426,7 +427,7 @@ abstract class UnPickler /*extends scala.reflect.generic.UnPickler*/ { protected def readChildren() { val tag = readByte() assert(tag == CHILDREN) - val end = readNat() + readIndex + val end = readEnd() val target = readSymbolRef() while (readIndex != end) target addChild readSymbolRef() } @@ -445,7 +446,7 @@ abstract class UnPickler /*extends scala.reflect.generic.UnPickler*/ { */ private def readArrayAnnot() = { readByte() // skip the `annotargarray` tag - val end = readNat() + readIndex + val end = readEnd() until(end, () => readClassfileAnnotArg(readNat())).toArray(JavaArgumentTag) } protected def readClassfileAnnotArg(i: Int): ClassfileAnnotArg = bytes(index(i)) match { @@ -481,7 +482,7 @@ abstract class UnPickler /*extends scala.reflect.generic.UnPickler*/ { val tag = readByte() if (tag != SYMANNOT) errorBadSignature("symbol annotation expected ("+ tag +")") - val end = readNat() + readIndex + val end = readEnd() val target = readSymbolRef() target.addAnnotation(readAnnotationInfo(end)) } @@ -492,7 +493,7 @@ abstract class UnPickler /*extends scala.reflect.generic.UnPickler*/ { val tag = readByte() if (tag != ANNOTINFO) errorBadSignature("annotation expected (" + tag + ")") - val end = readNat() + readIndex + val end = readEnd() readAnnotationInfo(end) } @@ -501,7 +502,7 @@ abstract class UnPickler /*extends scala.reflect.generic.UnPickler*/ { val outerTag = readByte() if (outerTag != TREE) errorBadSignature("tree expected (" + outerTag + ")") - val end = readNat() + readIndex + val end = readEnd() val tag = readByte() val tpe = if (tag == EMPTYtree) NoType else readTypeRef() @@ -759,7 +760,8 @@ abstract class UnPickler /*extends scala.reflect.generic.UnPickler*/ { val tag = readNat() if (tag != MODIFIERS) errorBadSignature("expected a modifiers tag (" + tag + ")") - val _ = readNat() + readIndex + + readEnd() val pflagsHi = readNat() val pflagsLo = readNat() val pflags = (pflagsHi.toLong << 32) + pflagsLo -- cgit v1.2.3 From 092345a24c22a821204fb358d33272ae8f7353be Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Sun, 4 Nov 2012 14:44:59 +0100 Subject: Refactor guards checking for a particular overload of Array.apply. --- src/compiler/scala/tools/nsc/transform/CleanUp.scala | 11 ++--------- src/reflect/scala/reflect/internal/Definitions.scala | 2 ++ 2 files changed, 4 insertions(+), 9 deletions(-) (limited to 'src') diff --git a/src/compiler/scala/tools/nsc/transform/CleanUp.scala b/src/compiler/scala/tools/nsc/transform/CleanUp.scala index 4f145d3d7e..61f2dd39d5 100644 --- a/src/compiler/scala/tools/nsc/transform/CleanUp.scala +++ b/src/compiler/scala/tools/nsc/transform/CleanUp.scala @@ -624,17 +624,10 @@ abstract class CleanUp extends Transform with ast.TreeDSL { // // See SI-6611; we must *only* do this for literal vararg arrays. case Apply(appMeth, List(Apply(wrapRefArrayMeth, List(arg @ StripCast(ArrayValue(_, _)))), _)) - if (wrapRefArrayMeth.symbol == Predef_wrapRefArray && - appMeth.symbol == ArrayModule_overloadedApply.suchThat { - _.tpe.resultType.dealias.typeSymbol == ObjectClass // [T: ClassTag](xs: T*): Array[T] post erasure - }) => + if wrapRefArrayMeth.symbol == Predef_wrapRefArray && appMeth.symbol == ArrayModule_genericApply => super.transform(arg) case Apply(appMeth, List(elem0, Apply(wrapArrayMeth, List(rest @ ArrayValue(elemtpt, _))))) - if wrapArrayMeth.symbol == Predef_wrapArray(elemtpt.tpe) && - appMeth.symbol == ArrayModule_overloadedApply.suchThat { - tp => tp.tpe.paramss.flatten.lift.apply(1).exists(_.tpe.typeSymbol == SeqClass) && - tp.tpe.resultType =:= arrayType(elemtpt.tpe) // (p1: AnyVal1, ps: AnyVal1*): Array[AnyVal1] post erasure - } => + if wrapArrayMeth.symbol == Predef_wrapArray(elemtpt.tpe) && appMeth.symbol == ArrayModule_apply(elemtpt.tpe) => super.transform(rest.copy(elems = elem0 :: rest.elems)) case _ => diff --git a/src/reflect/scala/reflect/internal/Definitions.scala b/src/reflect/scala/reflect/internal/Definitions.scala index 71559896ab..f839df1870 100644 --- a/src/reflect/scala/reflect/internal/Definitions.scala +++ b/src/reflect/scala/reflect/internal/Definitions.scala @@ -471,6 +471,8 @@ trait Definitions extends api.StandardDefinitions { // arrays and their members lazy val ArrayModule = requiredModule[scala.Array.type] lazy val ArrayModule_overloadedApply = getMemberMethod(ArrayModule, nme.apply) + def ArrayModule_genericApply = ArrayModule_overloadedApply.suchThat(_.paramss.flatten.last.tpe.typeSymbol == ClassTagClass) // [T: ClassTag](xs: T*): Array[T] + def ArrayModule_apply(tp: Type) = ArrayModule_overloadedApply.suchThat(_.tpe.resultType =:= arrayType(tp)) // (p1: AnyVal1, ps: AnyVal1*): Array[AnyVal1] lazy val ArrayClass = getRequiredClass("scala.Array") // requiredClass[scala.Array[_]] lazy val Array_apply = getMemberMethod(ArrayClass, nme.apply) lazy val Array_update = getMemberMethod(ArrayClass, nme.update) -- cgit v1.2.3 From bc3dda2b0222d3b7cf3db491728b98f9b6110856 Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Sat, 29 Sep 2012 18:15:15 +0200 Subject: SI-6448 Collecting the spoils of PartialFun#runWith Avoids calling both `isDefinedAt` and `apply`. This pathological case that would benefit the most looks like: xs collect { case x if {expensive(); true} => x } The typical change looks like: - for (x <- this) if (pf.isDefinedAt(x)) b += pf(x) + foreach(pf.runWith(b += _)) Incorporates feedback provided by Pavel Pavlov: https://github.com/retronym/scala/commit/ef5430 A few more opportunities for optimization are noted in the `Pending` section of the enclosed test. `Iterator.collect` would be nice, but a solution eludes me. Calling the guard less frequently does change the behaviour of these functions in an obervable way, but not contravene the documented semantics. That said, there is an alternative opinion on the comment of the ticket: https://issues.scala-lang.org/browse/SI-6448 --- src/library/scala/Option.scala | 2 +- src/library/scala/collection/TraversableLike.scala | 2 +- src/library/scala/collection/TraversableOnce.scala | 6 +-- .../scala/collection/immutable/Stream.scala | 13 +++-- .../collection/parallel/RemainsIterator.scala | 3 +- .../collection/parallel/mutable/ParArray.scala | 3 +- test/files/run/t6448.check | 32 ++++++++++++ test/files/run/t6448.scala | 61 ++++++++++++++++++++++ 8 files changed, 110 insertions(+), 12 deletions(-) create mode 100644 test/files/run/t6448.check create mode 100644 test/files/run/t6448.scala (limited to 'src') diff --git a/src/library/scala/Option.scala b/src/library/scala/Option.scala index 755071a14f..95fddc43f4 100644 --- a/src/library/scala/Option.scala +++ b/src/library/scala/Option.scala @@ -256,7 +256,7 @@ sealed abstract class Option[+A] extends Product with Serializable { * value (if possible), or $none. */ @inline final def collect[B](pf: PartialFunction[A, B]): Option[B] = - if (!isEmpty && pf.isDefinedAt(this.get)) Some(pf(this.get)) else None + if (!isEmpty) pf.lift(this.get) else None /** Returns this $option if it is nonempty, * otherwise return the result of evaluating `alternative`. diff --git a/src/library/scala/collection/TraversableLike.scala b/src/library/scala/collection/TraversableLike.scala index 7849f1c544..ad96382d52 100644 --- a/src/library/scala/collection/TraversableLike.scala +++ b/src/library/scala/collection/TraversableLike.scala @@ -275,7 +275,7 @@ trait TraversableLike[+A, +Repr] extends Any def collect[B, That](pf: PartialFunction[A, B])(implicit bf: CanBuildFrom[Repr, B, That]): That = { val b = bf(repr) - for (x <- this) if (pf.isDefinedAt(x)) b += pf(x) + foreach(pf.runWith(b += _)) b.result } diff --git a/src/library/scala/collection/TraversableOnce.scala b/src/library/scala/collection/TraversableOnce.scala index a61d1354dc..569412a441 100644 --- a/src/library/scala/collection/TraversableOnce.scala +++ b/src/library/scala/collection/TraversableOnce.scala @@ -128,10 +128,8 @@ trait TraversableOnce[+A] extends Any with GenTraversableOnce[A] { * @example `Seq("a", 1, 5L).collectFirst({ case x: Int => x*10 }) = Some(10)` */ def collectFirst[B](pf: PartialFunction[A, B]): Option[B] = { - for (x <- self.toIterator) { // make sure to use an iterator or `seq` - if (pf isDefinedAt x) - return Some(pf(x)) - } + // make sure to use an iterator or `seq` + self.toIterator.foreach(pf.runWith(b => return Some(b))) None } diff --git a/src/library/scala/collection/immutable/Stream.scala b/src/library/scala/collection/immutable/Stream.scala index 5566806c55..e6b110131d 100644 --- a/src/library/scala/collection/immutable/Stream.scala +++ b/src/library/scala/collection/immutable/Stream.scala @@ -385,12 +385,17 @@ self => // 1) stackoverflows (could be achieved with tailrec, too) // 2) out of memory errors for big streams (`this` reference can be eliminated from the stack) var rest: Stream[A] = this - while (rest.nonEmpty && !pf.isDefinedAt(rest.head)) rest = rest.tail + + // Avoids calling both `pf.isDefined` and `pf.apply`. + var newHead: B = null.asInstanceOf[B] + val runWith = pf.runWith((b: B) => newHead = b) + + while (rest.nonEmpty && !runWith(rest.head)) rest = rest.tail // without the call to the companion object, a thunk is created for the tail of the new stream, // and the closure of the thunk will reference `this` if (rest.isEmpty) Stream.Empty.asInstanceOf[That] - else Stream.collectedTail(rest, pf, bf).asInstanceOf[That] + else Stream.collectedTail(newHead, rest, pf, bf).asInstanceOf[That] } } @@ -1170,8 +1175,8 @@ object Stream extends SeqFactory[Stream] { cons(stream.head, stream.tail filter p) } - private[immutable] def collectedTail[A, B, That](stream: Stream[A], pf: PartialFunction[A, B], bf: CanBuildFrom[Stream[A], B, That]) = { - cons(pf(stream.head), stream.tail.collect(pf)(bf).asInstanceOf[Stream[B]]) + private[immutable] def collectedTail[A, B, That](head: B, stream: Stream[A], pf: PartialFunction[A, B], bf: CanBuildFrom[Stream[A], B, That]) = { + cons(head, stream.tail.collect(pf)(bf).asInstanceOf[Stream[B]]) } } diff --git a/src/library/scala/collection/parallel/RemainsIterator.scala b/src/library/scala/collection/parallel/RemainsIterator.scala index 9bf287cc39..857d051ded 100644 --- a/src/library/scala/collection/parallel/RemainsIterator.scala +++ b/src/library/scala/collection/parallel/RemainsIterator.scala @@ -123,9 +123,10 @@ private[collection] trait AugmentedIterableIterator[+T] extends RemainsIterator[ def collect2combiner[S, That](pf: PartialFunction[T, S], cb: Combiner[S, That]): Combiner[S, That] = { //val cb = pbf(repr) + val runWith = pf.runWith(cb += _) while (hasNext) { val curr = next - if (pf.isDefinedAt(curr)) cb += pf(curr) + runWith(curr) } cb } diff --git a/src/library/scala/collection/parallel/mutable/ParArray.scala b/src/library/scala/collection/parallel/mutable/ParArray.scala index 7527c9a71a..92f0b27568 100644 --- a/src/library/scala/collection/parallel/mutable/ParArray.scala +++ b/src/library/scala/collection/parallel/mutable/ParArray.scala @@ -405,9 +405,10 @@ self => private def collect2combiner_quick[S, That](pf: PartialFunction[T, S], a: Array[Any], cb: Builder[S, That], ntil: Int, from: Int) { var j = from + val runWith = pf.runWith(b => cb += b) while (j < ntil) { val curr = a(j).asInstanceOf[T] - if (pf.isDefinedAt(curr)) cb += pf(curr) + runWith(curr) j += 1 } } diff --git a/test/files/run/t6448.check b/test/files/run/t6448.check new file mode 100644 index 0000000000..9401568319 --- /dev/null +++ b/test/files/run/t6448.check @@ -0,0 +1,32 @@ + +=List.collect= +f(1) +f(2) +List(1) + +=List.collectFirst= +f(1) +Some(1) + +=Option.collect= +f(1) +Some(1) + +=Option.collect= +f(2) +None + +=Stream.collect= +f(1) +f(2) +List(1) + +=Stream.collectFirst= +f(1) +Some(1) + +=ParVector.collect= +(ParVector(1),2) + +=ParArray.collect= +(ParArray(1),2) diff --git a/test/files/run/t6448.scala b/test/files/run/t6448.scala new file mode 100644 index 0000000000..4d1528e500 --- /dev/null +++ b/test/files/run/t6448.scala @@ -0,0 +1,61 @@ +// Tests to show that various `collect` functions avoid calling +// both `PartialFunction#isDefinedAt` and `PartialFunction#apply`. +// +object Test { + def f(i: Int) = { println("f(" + i + ")"); true } + class Counter { + var count = 0 + def apply(i: Int) = synchronized {count += 1; true} + } + + def testing(label: String)(body: => Any) { + println(s"\n=$label=") + println(body) + } + + def main(args: Array[String]) { + testing("List.collect")(List(1, 2) collect { case x if f(x) && x < 2 => x}) + testing("List.collectFirst")(List(1, 2) collectFirst { case x if f(x) && x < 2 => x}) + testing("Option.collect")(Some(1) collect { case x if f(x) && x < 2 => x}) + testing("Option.collect")(Some(2) collect { case x if f(x) && x < 2 => x}) + testing("Stream.collect")((Stream(1, 2).collect { case x if f(x) && x < 2 => x}).toList) + testing("Stream.collectFirst")(Stream.continually(1) collectFirst { case x if f(x) && x < 2 => x}) + + import collection.parallel.ParIterable + import collection.parallel.immutable.ParVector + import collection.parallel.mutable.ParArray + testing("ParVector.collect") { + val counter = new Counter() + (ParVector(1, 2) collect { case x if counter(x) && x < 2 => x}, counter.synchronized(counter.count)) + } + + testing("ParArray.collect") { + val counter = new Counter() + (ParArray(1, 2) collect { case x if counter(x) && x < 2 => x}, counter.synchronized(counter.count)) + } + + object PendingTests { + testing("Iterator.collect")((Iterator(1, 2) collect { case x if f(x) && x < 2 => x}).toList) + + testing("List.view.collect")((List(1, 2).view collect { case x if f(x) && x < 2 => x}).force) + + // This would do the trick in Future.collect, but I haven't added this yet as there is a tradeoff + // with extra allocations to consider. + // + // pf.lift(v) match { + // case Some(x) => p success x + // case None => fail(v) + // } + testing("Future.collect") { + import concurrent.ExecutionContext.Implicits.global + import concurrent.Await + import concurrent.duration.Duration + val result = concurrent.future(1) collect { case x if f(x) => x} + Await.result(result, Duration.Inf) + } + + // TODO Future.{onSuccess, onFailure, recoverWith, andThen} + } + + } +} -- cgit v1.2.3 From c04a4edcac9e6ecf653159942394a82d579f1f88 Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Sun, 4 Nov 2012 13:04:33 -0800 Subject: Revert "Convenience method commonSymbolOwner." I'm reverting 14704da1b8 because it isn't yet used anywhere, but leaving it in the history because I or someone is going to put it to work. --- src/reflect/scala/reflect/internal/Symbols.scala | 18 ------------------ 1 file changed, 18 deletions(-) (limited to 'src') diff --git a/src/reflect/scala/reflect/internal/Symbols.scala b/src/reflect/scala/reflect/internal/Symbols.scala index 9a4b272d6c..53a236fa3c 100644 --- a/src/reflect/scala/reflect/internal/Symbols.scala +++ b/src/reflect/scala/reflect/internal/Symbols.scala @@ -45,24 +45,6 @@ trait Symbols extends api.Symbols { self: SymbolTable => m } - /** The deepest symbol which appears in the owner chains of all - * the given symbols. - */ - def commonSymbolOwner(syms: List[Symbol]): Symbol = { - def loop(owner: Symbol, rest: List[Symbol]): Symbol = rest match { - case Nil => owner - case x :: xs if x.ownerChain contains owner => loop(owner, xs) - case x :: xs if owner.ownerChain contains x => loop(x, xs) - case x :: xs => - x.ownerChain find (owner.ownerChain contains _) match { - case Some(common) => loop(common, xs) - case _ => NoSymbol - } - } - if (syms.isEmpty || (syms contains NoSymbol)) NoSymbol - else loop(syms.head.owner, syms.tail) - } - /** Create a new free term. Its owner is NoSymbol. */ def newFreeTermSymbol(name: TermName, value: => Any, flags: Long = 0L, origin: String): FreeTermSymbol = -- cgit v1.2.3 From ed3709a5dfd84f073a9a99e43418f693adbac07c Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Tue, 6 Nov 2012 14:15:25 +0100 Subject: Revert "Refactor guards checking for a particular overload of Array.apply." This reverts commit 092345a24c22a821204fb358d33272ae8f7353be. --- src/compiler/scala/tools/nsc/transform/CleanUp.scala | 11 +++++++++-- src/reflect/scala/reflect/internal/Definitions.scala | 2 -- 2 files changed, 9 insertions(+), 4 deletions(-) (limited to 'src') diff --git a/src/compiler/scala/tools/nsc/transform/CleanUp.scala b/src/compiler/scala/tools/nsc/transform/CleanUp.scala index 3b74cb1168..6af7b78181 100644 --- a/src/compiler/scala/tools/nsc/transform/CleanUp.scala +++ b/src/compiler/scala/tools/nsc/transform/CleanUp.scala @@ -624,10 +624,17 @@ abstract class CleanUp extends Transform with ast.TreeDSL { // // See SI-6611; we must *only* do this for literal vararg arrays. case Apply(appMeth, List(Apply(wrapRefArrayMeth, List(arg @ StripCast(ArrayValue(_, _)))), _)) - if wrapRefArrayMeth.symbol == Predef_wrapRefArray && appMeth.symbol == ArrayModule_genericApply => + if (wrapRefArrayMeth.symbol == Predef_wrapRefArray && + appMeth.symbol == ArrayModule_overloadedApply.suchThat { + _.tpe.resultType.dealias.typeSymbol == ObjectClass // [T: ClassTag](xs: T*): Array[T] post erasure + }) => super.transform(arg) case Apply(appMeth, List(elem0, Apply(wrapArrayMeth, List(rest @ ArrayValue(elemtpt, _))))) - if wrapArrayMeth.symbol == Predef_wrapArray(elemtpt.tpe) && appMeth.symbol == ArrayModule_apply(elemtpt.tpe) => + if wrapArrayMeth.symbol == Predef_wrapArray(elemtpt.tpe) && + appMeth.symbol == ArrayModule_overloadedApply.suchThat { + tp => tp.tpe.paramss.flatten.lift.apply(1).exists(_.tpe.typeSymbol == SeqClass) && + tp.tpe.resultType =:= arrayType(elemtpt.tpe) // (p1: AnyVal1, ps: AnyVal1*): Array[AnyVal1] post erasure + } => super.transform(rest.copy(elems = elem0 :: rest.elems)) case _ => diff --git a/src/reflect/scala/reflect/internal/Definitions.scala b/src/reflect/scala/reflect/internal/Definitions.scala index df4258253f..60a1913548 100644 --- a/src/reflect/scala/reflect/internal/Definitions.scala +++ b/src/reflect/scala/reflect/internal/Definitions.scala @@ -471,8 +471,6 @@ trait Definitions extends api.StandardDefinitions { // arrays and their members lazy val ArrayModule = requiredModule[scala.Array.type] lazy val ArrayModule_overloadedApply = getMemberMethod(ArrayModule, nme.apply) - def ArrayModule_genericApply = ArrayModule_overloadedApply.suchThat(_.paramss.flatten.last.tpe.typeSymbol == ClassTagClass) // [T: ClassTag](xs: T*): Array[T] - def ArrayModule_apply(tp: Type) = ArrayModule_overloadedApply.suchThat(_.tpe.resultType =:= arrayType(tp)) // (p1: AnyVal1, ps: AnyVal1*): Array[AnyVal1] lazy val ArrayClass = getRequiredClass("scala.Array") // requiredClass[scala.Array[_]] lazy val Array_apply = getMemberMethod(ArrayClass, nme.apply) lazy val Array_update = getMemberMethod(ArrayClass, nme.update) -- cgit v1.2.3 From 46fc45e62a1f4ae5a17f5abcb346ff49cff5a7ea Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Tue, 6 Nov 2012 14:15:28 +0100 Subject: Revert "Expand optimization of Array(e1, ..., en) to primitive arrays." This reverts commit 8265175ecc42293997d59049f430396c77a2b891. --- .../scala/tools/nsc/transform/CleanUp.scala | 7 --- src/library/scala/Array.scala | 10 ---- .../scala/reflect/internal/Definitions.scala | 13 +++-- test/files/instrumented/t6611.scala | 24 +-------- test/files/run/t6611.scala | 63 ++-------------------- 5 files changed, 11 insertions(+), 106 deletions(-) (limited to 'src') diff --git a/src/compiler/scala/tools/nsc/transform/CleanUp.scala b/src/compiler/scala/tools/nsc/transform/CleanUp.scala index 6af7b78181..122a37c0c6 100644 --- a/src/compiler/scala/tools/nsc/transform/CleanUp.scala +++ b/src/compiler/scala/tools/nsc/transform/CleanUp.scala @@ -629,13 +629,6 @@ abstract class CleanUp extends Transform with ast.TreeDSL { _.tpe.resultType.dealias.typeSymbol == ObjectClass // [T: ClassTag](xs: T*): Array[T] post erasure }) => super.transform(arg) - case Apply(appMeth, List(elem0, Apply(wrapArrayMeth, List(rest @ ArrayValue(elemtpt, _))))) - if wrapArrayMeth.symbol == Predef_wrapArray(elemtpt.tpe) && - appMeth.symbol == ArrayModule_overloadedApply.suchThat { - tp => tp.tpe.paramss.flatten.lift.apply(1).exists(_.tpe.typeSymbol == SeqClass) && - tp.tpe.resultType =:= arrayType(elemtpt.tpe) // (p1: AnyVal1, ps: AnyVal1*): Array[AnyVal1] post erasure - } => - super.transform(rest.copy(elems = elem0 :: rest.elems)) case _ => super.transform(tree) diff --git a/src/library/scala/Array.scala b/src/library/scala/Array.scala index 514844a5fa..0b8550be37 100644 --- a/src/library/scala/Array.scala +++ b/src/library/scala/Array.scala @@ -115,8 +115,6 @@ object Array extends FallbackArrayBuilding { * @param xs the elements to put in the array * @return an array containing all elements from xs. */ - // Subject to a compiler optimization in Cleanup. - // Array(e0, ..., en) is translated to { val a = new Array(3); a(i) = ei; a } def apply[T: ClassTag](xs: T*): Array[T] = { val array = new Array[T](xs.length) var i = 0 @@ -125,7 +123,6 @@ object Array extends FallbackArrayBuilding { } /** Creates an array of `Boolean` objects */ - // Subject to a compiler optimization in Cleanup, see above. def apply(x: Boolean, xs: Boolean*): Array[Boolean] = { val array = new Array[Boolean](xs.length + 1) array(0) = x @@ -135,7 +132,6 @@ object Array extends FallbackArrayBuilding { } /** Creates an array of `Byte` objects */ - // Subject to a compiler optimization in Cleanup, see above. def apply(x: Byte, xs: Byte*): Array[Byte] = { val array = new Array[Byte](xs.length + 1) array(0) = x @@ -145,7 +141,6 @@ object Array extends FallbackArrayBuilding { } /** Creates an array of `Short` objects */ - // Subject to a compiler optimization in Cleanup, see above. def apply(x: Short, xs: Short*): Array[Short] = { val array = new Array[Short](xs.length + 1) array(0) = x @@ -155,7 +150,6 @@ object Array extends FallbackArrayBuilding { } /** Creates an array of `Char` objects */ - // Subject to a compiler optimization in Cleanup, see above. def apply(x: Char, xs: Char*): Array[Char] = { val array = new Array[Char](xs.length + 1) array(0) = x @@ -165,7 +159,6 @@ object Array extends FallbackArrayBuilding { } /** Creates an array of `Int` objects */ - // Subject to a compiler optimization in Cleanup, see above. def apply(x: Int, xs: Int*): Array[Int] = { val array = new Array[Int](xs.length + 1) array(0) = x @@ -175,7 +168,6 @@ object Array extends FallbackArrayBuilding { } /** Creates an array of `Long` objects */ - // Subject to a compiler optimization in Cleanup, see above. def apply(x: Long, xs: Long*): Array[Long] = { val array = new Array[Long](xs.length + 1) array(0) = x @@ -185,7 +177,6 @@ object Array extends FallbackArrayBuilding { } /** Creates an array of `Float` objects */ - // Subject to a compiler optimization in Cleanup, see above. def apply(x: Float, xs: Float*): Array[Float] = { val array = new Array[Float](xs.length + 1) array(0) = x @@ -195,7 +186,6 @@ object Array extends FallbackArrayBuilding { } /** Creates an array of `Double` objects */ - // Subject to a compiler optimization in Cleanup, see above. def apply(x: Double, xs: Double*): Array[Double] = { val array = new Array[Double](xs.length + 1) array(0) = x diff --git a/src/reflect/scala/reflect/internal/Definitions.scala b/src/reflect/scala/reflect/internal/Definitions.scala index 60a1913548..5c982742bc 100644 --- a/src/reflect/scala/reflect/internal/Definitions.scala +++ b/src/reflect/scala/reflect/internal/Definitions.scala @@ -340,13 +340,12 @@ trait Definitions extends api.StandardDefinitions { lazy val PredefModule = requiredModule[scala.Predef.type] lazy val PredefModuleClass = PredefModule.moduleClass - def Predef_classOf = getMemberMethod(PredefModule, nme.classOf) - def Predef_identity = getMemberMethod(PredefModule, nme.identity) - def Predef_conforms = getMemberMethod(PredefModule, nme.conforms) - def Predef_wrapRefArray = getMemberMethod(PredefModule, nme.wrapRefArray) - def Predef_wrapArray(tp: Type) = getMemberMethod(PredefModule, wrapArrayMethodName(tp)) - def Predef_??? = getMemberMethod(PredefModule, nme.???) - def Predef_implicitly = getMemberMethod(PredefModule, nme.implicitly) + def Predef_classOf = getMemberMethod(PredefModule, nme.classOf) + def Predef_identity = getMemberMethod(PredefModule, nme.identity) + def Predef_conforms = getMemberMethod(PredefModule, nme.conforms) + def Predef_wrapRefArray = getMemberMethod(PredefModule, nme.wrapRefArray) + def Predef_??? = getMemberMethod(PredefModule, nme.???) + def Predef_implicitly = getMemberMethod(PredefModule, nme.implicitly) /** Is `sym` a member of Predef with the given name? * Note: DON't replace this by sym == Predef_conforms/etc, as Predef_conforms is a `def` diff --git a/test/files/instrumented/t6611.scala b/test/files/instrumented/t6611.scala index 4c52f8a5ef..821d5f3fbf 100644 --- a/test/files/instrumented/t6611.scala +++ b/test/files/instrumented/t6611.scala @@ -5,29 +5,7 @@ object Test { startProfiling() // tests optimization in Cleanup for varargs reference arrays - Array("") - - - Array(true) - Array(true, false) - Array(1: Byte) - Array(1: Byte, 2: Byte) - Array(1: Short) - Array(1: Short, 2: Short) - Array(1) - Array(1, 2) - Array(1L) - Array(1L, 2L) - Array(1d) - Array(1d, 2d) - Array(1f) - Array(1f, 2f) - - /* Not currently optimized: - Array[Int](1, 2) etc - Array(()) - Array((), ()) - */ + val a = Array("") stopProfiling() printStatistics() diff --git a/test/files/run/t6611.scala b/test/files/run/t6611.scala index c295368aea..c0297372f0 100644 --- a/test/files/run/t6611.scala +++ b/test/files/run/t6611.scala @@ -1,61 +1,6 @@ object Test extends App { - locally { - val a = Array("1") - val a2 = Array(a: _*) - assert(a ne a2) - } - - locally { - val a = Array("1": Object) - val a2 = Array(a: _*) - assert(a ne a2) - } - - locally { - val a = Array(true) - val a2 = Array(a: _*) - assert(a ne a2) - } - - locally { - val a = Array(1: Short) - val a2 = Array(a: _*) - assert(a ne a2) - } - - locally { - val a = Array(1: Byte) - val a2 = Array(a: _*) - assert(a ne a2) - } - - locally { - val a = Array(1) - val a2 = Array(a: _*) - assert(a ne a2) - } - - locally { - val a = Array(1L) - val a2 = Array(a: _*) - assert(a ne a2) - } - - locally { - val a = Array(1f) - val a2 = Array(a: _*) - assert(a ne a2) - } - - locally { - val a = Array(1d) - val a2 = Array(a: _*) - assert(a ne a2) - } - - locally { - val a = Array(()) - val a2 = Array(a: _*) - assert(a ne a2) - } + val a = Array("1") + val a2 = Array(a: _*) + a2(0) = "2" + assert(a(0) == "1") } -- cgit v1.2.3 From 0625f0cbc2e0a213df39d6b261afab49d5d23f53 Mon Sep 17 00:00:00 2001 From: Lukas Rytz Date: Tue, 6 Nov 2012 12:01:52 +0100 Subject: comment / question in typers --- src/compiler/scala/tools/nsc/typechecker/Typers.scala | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) (limited to 'src') diff --git a/src/compiler/scala/tools/nsc/typechecker/Typers.scala b/src/compiler/scala/tools/nsc/typechecker/Typers.scala index 3d80df405d..61baa978ba 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Typers.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Typers.scala @@ -853,6 +853,8 @@ trait Typers extends Modes with Adaptations with Tags { orElse { _ => debuglog("fallback on implicits: " + tree + "/" + resetAllAttrs(original)) val tree1 = typed(resetAllAttrs(original), mode, WildcardType) + // Q: `typed` already calls `addAnnotations` and `adapt`. the only difference here is that + // we pass `EmptyTree` as the `original`. intended? added in 2009 (53d98e7d42) by martin. tree1.tpe = addAnnotations(tree1, tree1.tpe) if (tree1.isEmpty) tree1 else adapt(tree1, mode, pt, EmptyTree) } @@ -5212,7 +5214,6 @@ trait Typers extends Modes with Adaptations with Tags { lastTreeToTyper = tree indentTyping() - var alreadyTyped = false val startByType = if (Statistics.canEnable) Statistics.pushTimer(byTypeStack, byTypeNanos(tree.getClass)) else null if (Statistics.canEnable) Statistics.incCounter(visitsByType, tree.getClass) try { @@ -5222,7 +5223,7 @@ trait Typers extends Modes with Adaptations with Tags { if (tree.hasSymbolField) tree.symbol = NoSymbol } - alreadyTyped = tree.tpe ne null + val alreadyTyped = tree.tpe ne null val tree1: Tree = if (alreadyTyped) tree else { printTyping( ptLine("typing %s: pt = %s".format(ptTree(tree), pt), -- cgit v1.2.3 From e51e9b5c5a357967f642262d88244dae550c91b2 Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Tue, 6 Nov 2012 13:35:21 -0800 Subject: Removed unused imports. A dizzying number of unused imports, limited to files in src/compiler. I especially like that the unused import option (not quite ready for checkin itself) finds places where feature implicits have been imported which are no longer necessary, e.g. this commit includes half a dozen removals of "import scala.language.implicitConversions". --- .../scala/reflect/macros/runtime/Enclosures.scala | 1 - .../scala/reflect/macros/runtime/ExprUtils.scala | 1 - src/compiler/scala/reflect/reify/Errors.scala | 1 - src/compiler/scala/reflect/reify/Phases.scala | 3 +-- src/compiler/scala/reflect/reify/States.scala | 1 - src/compiler/scala/reflect/reify/Taggers.scala | 1 - .../scala/reflect/reify/codegen/GenAnnotationInfos.scala | 3 +-- src/compiler/scala/reflect/reify/codegen/GenNames.scala | 3 +-- .../scala/reflect/reify/codegen/GenPositions.scala | 3 +-- .../scala/reflect/reify/codegen/GenSymbols.scala | 1 - src/compiler/scala/reflect/reify/codegen/GenUtils.scala | 3 +-- src/compiler/scala/reflect/reify/package.scala | 7 +++---- src/compiler/scala/reflect/reify/phases/Calculate.scala | 1 - src/compiler/scala/reflect/reify/phases/Metalevels.scala | 1 - src/compiler/scala/reflect/reify/phases/Reify.scala | 4 +--- .../scala/reflect/reify/utils/NodePrinters.scala | 2 -- .../scala/reflect/reify/utils/SymbolTables.scala | 2 -- src/compiler/scala/tools/cmd/CommandLine.scala | 2 +- src/compiler/scala/tools/cmd/FromString.scala | 2 +- src/compiler/scala/tools/cmd/gen/CodegenSpec.scala | 2 -- src/compiler/scala/tools/nsc/CompilationUnits.scala | 2 +- src/compiler/scala/tools/nsc/CompileClient.scala | 1 - src/compiler/scala/tools/nsc/CompileServer.scala | 2 +- src/compiler/scala/tools/nsc/CompileSocket.scala | 6 +----- src/compiler/scala/tools/nsc/CompilerCommand.scala | 1 - src/compiler/scala/tools/nsc/Driver.scala | 8 ++++---- src/compiler/scala/tools/nsc/Global.scala | 7 ++----- src/compiler/scala/tools/nsc/Main.scala | 5 +---- src/compiler/scala/tools/nsc/MainBench.scala | 16 ++++------------ src/compiler/scala/tools/nsc/MainGenericRunner.scala | 2 -- src/compiler/scala/tools/nsc/ObjectRunner.scala | 1 - src/compiler/scala/tools/nsc/Phases.scala | 1 - src/compiler/scala/tools/nsc/ScriptRunner.scala | 1 - src/compiler/scala/tools/nsc/ast/DocComments.scala | 3 --- src/compiler/scala/tools/nsc/ast/Printers.scala | 2 -- src/compiler/scala/tools/nsc/ast/TreeBrowsers.scala | 1 - src/compiler/scala/tools/nsc/ast/TreeDSL.scala | 1 - src/compiler/scala/tools/nsc/ast/TreeGen.scala | 1 - src/compiler/scala/tools/nsc/ast/TreeInfo.scala | 4 ---- .../scala/tools/nsc/ast/parser/MarkupParsers.scala | 6 ++---- src/compiler/scala/tools/nsc/ast/parser/Parsers.scala | 1 - .../scala/tools/nsc/ast/parser/SymbolicXMLBuilder.scala | 1 - src/compiler/scala/tools/nsc/backend/JavaPlatform.scala | 1 - .../scala/tools/nsc/backend/ScalaPrimitives.scala | 1 - .../scala/tools/nsc/backend/WorklistAlgorithm.scala | 1 - .../scala/tools/nsc/backend/icode/BasicBlocks.scala | 2 +- .../tools/nsc/backend/icode/ExceptionHandlers.scala | 2 +- .../scala/tools/nsc/backend/icode/GenICode.scala | 1 - src/compiler/scala/tools/nsc/backend/icode/ICodes.scala | 2 -- .../scala/tools/nsc/backend/icode/Linearizers.scala | 1 - src/compiler/scala/tools/nsc/backend/icode/Members.scala | 2 -- src/compiler/scala/tools/nsc/backend/icode/Opcodes.scala | 1 - .../scala/tools/nsc/backend/icode/Printers.scala | 4 ---- .../scala/tools/nsc/backend/icode/TypeKinds.scala | 1 - .../scala/tools/nsc/backend/icode/TypeStacks.scala | 2 -- .../nsc/backend/icode/analysis/TypeFlowAnalysis.scala | 1 - .../scala/tools/nsc/backend/jvm/BytecodeWriters.scala | 3 +-- src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala | 3 --- src/compiler/scala/tools/nsc/backend/jvm/GenJVM.scala | 3 --- .../scala/tools/nsc/backend/jvm/GenJVMUtil.scala | 1 - src/compiler/scala/tools/nsc/backend/msil/GenMSIL.scala | 3 +-- .../scala/tools/nsc/backend/opt/ClosureElimination.scala | 1 - .../tools/nsc/backend/opt/DeadCodeElimination.scala | 1 - .../tools/nsc/backend/opt/InlineExceptionHandlers.scala | 1 - .../tools/nsc/dependencies/DependencyAnalysis.scala | 1 - src/compiler/scala/tools/nsc/doc/DocFactory.scala | 4 +--- src/compiler/scala/tools/nsc/doc/Settings.scala | 2 -- src/compiler/scala/tools/nsc/doc/html/HtmlPage.scala | 2 +- src/compiler/scala/tools/nsc/doc/html/page/Index.scala | 2 -- .../scala/tools/nsc/doc/html/page/IndexScript.scala | 1 - src/compiler/scala/tools/nsc/doc/html/page/Source.scala | 2 -- .../scala/tools/nsc/doc/html/page/Template.scala | 4 ---- .../nsc/doc/html/page/diagram/DotDiagramGenerator.scala | 1 - .../tools/nsc/doc/html/page/diagram/DotRunner.scala | 2 -- src/compiler/scala/tools/nsc/doc/model/LinkTo.scala | 2 -- .../scala/tools/nsc/doc/model/MemberLookup.scala | 2 -- .../nsc/doc/model/ModelFactoryImplicitSupport.scala | 6 ------ .../tools/nsc/doc/model/ModelFactoryTypeSupport.scala | 8 -------- .../scala/tools/nsc/doc/model/comment/Body.scala | 2 -- .../tools/nsc/doc/model/comment/CommentFactory.scala | 4 +--- .../nsc/doc/model/diagram/DiagramDirectiveParser.scala | 3 --- .../tools/nsc/doc/model/diagram/DiagramFactory.scala | 1 - .../scala/tools/nsc/interactive/BuildManager.scala | 5 ----- .../scala/tools/nsc/interactive/CompilerControl.scala | 2 -- .../scala/tools/nsc/interactive/ContextTrees.scala | 1 - src/compiler/scala/tools/nsc/interactive/Global.scala | 6 +----- src/compiler/scala/tools/nsc/interactive/Picklers.scala | 8 +++----- src/compiler/scala/tools/nsc/interactive/REPL.scala | 6 +----- .../scala/tools/nsc/interactive/RangePositions.scala | 3 +-- .../tools/nsc/interactive/RefinedBuildManager.scala | 1 - .../scala/tools/nsc/interactive/SimpleBuildManager.scala | 3 --- .../tools/nsc/interactive/tests/InteractiveTest.scala | 8 -------- .../nsc/interactive/tests/InteractiveTestSettings.scala | 7 +++---- .../tools/nsc/interactive/tests/core/CoreTestDefs.scala | 1 - .../tests/core/PresentationCompilerInstance.scala | 3 +-- .../tests/core/PresentationCompilerTestDef.scala | 3 +-- .../nsc/interactive/tests/core/SourcesCollector.scala | 1 - .../tools/nsc/interpreter/AbstractFileClassLoader.scala | 2 +- src/compiler/scala/tools/nsc/interpreter/ByteCode.scala | 1 - .../scala/tools/nsc/interpreter/CompletionAware.scala | 2 -- .../tools/nsc/interpreter/ConsoleReaderHelper.scala | 2 -- src/compiler/scala/tools/nsc/interpreter/ExprTyper.scala | 3 +-- src/compiler/scala/tools/nsc/interpreter/ILoop.scala | 11 +---------- src/compiler/scala/tools/nsc/interpreter/ILoopInit.scala | 2 -- src/compiler/scala/tools/nsc/interpreter/IMain.scala | 8 +------- .../scala/tools/nsc/interpreter/InteractiveReader.scala | 2 -- .../scala/tools/nsc/interpreter/JLineCompletion.scala | 2 -- .../scala/tools/nsc/interpreter/JLineReader.scala | 2 -- .../scala/tools/nsc/interpreter/MemberHandlers.scala | 2 -- src/compiler/scala/tools/nsc/interpreter/Parsed.scala | 1 - src/compiler/scala/tools/nsc/interpreter/Phased.scala | 2 +- src/compiler/scala/tools/nsc/interpreter/Power.scala | 5 ----- .../scala/tools/nsc/interpreter/ReplGlobal.scala | 1 - .../scala/tools/nsc/interpreter/ReplStrings.scala | 2 -- src/compiler/scala/tools/nsc/interpreter/ReplVals.scala | 1 - .../scala/tools/nsc/interpreter/TypeStrings.scala | 4 ---- src/compiler/scala/tools/nsc/io/Jar.scala | 1 - src/compiler/scala/tools/nsc/io/Lexer.scala | 4 +--- src/compiler/scala/tools/nsc/io/Pickler.scala | 1 - src/compiler/scala/tools/nsc/io/Replayer.scala | 2 +- src/compiler/scala/tools/nsc/io/SourceReader.scala | 2 +- src/compiler/scala/tools/nsc/io/package.scala | 3 +-- src/compiler/scala/tools/nsc/matching/MatchSupport.scala | 3 --- .../scala/tools/nsc/matching/MatrixAdditions.scala | 4 +--- .../scala/tools/nsc/matching/ParallelMatching.scala | 5 +---- .../scala/tools/nsc/matching/PatternBindings.scala | 2 -- src/compiler/scala/tools/nsc/matching/Patterns.scala | 1 - src/compiler/scala/tools/nsc/plugins/Plugin.scala | 5 +---- src/compiler/scala/tools/nsc/reporters/Reporter.scala | 1 - src/compiler/scala/tools/nsc/scratchpad/Mixer.scala | 3 --- .../scala/tools/nsc/scratchpad/SourceInserter.scala | 2 -- src/compiler/scala/tools/nsc/settings/FscSettings.scala | 2 +- .../scala/tools/nsc/settings/MutableSettings.scala | 1 - .../scala/tools/nsc/symtab/BrowsingLoaders.scala | 1 - src/compiler/scala/tools/nsc/symtab/SymbolLoaders.scala | 1 - src/compiler/scala/tools/nsc/symtab/SymbolTrackers.scala | 1 - .../tools/nsc/symtab/classfile/ClassfileParser.scala | 3 +-- .../scala/tools/nsc/symtab/classfile/ICodeReader.scala | 3 --- src/compiler/scala/tools/nsc/symtab/clr/CLRTypes.scala | 5 ----- src/compiler/scala/tools/nsc/symtab/clr/TypeParser.scala | 1 - .../scala/tools/nsc/transform/AddInterfaces.scala | 2 -- .../scala/tools/nsc/transform/ExtensionMethods.scala | 4 ---- src/compiler/scala/tools/nsc/transform/Flatten.scala | 2 -- .../scala/tools/nsc/transform/InlineErasure.scala | 10 ++++++---- .../scala/tools/nsc/transform/SampleTransform.scala | 1 - src/compiler/scala/tools/nsc/transform/TailCalls.scala | 2 +- .../scala/tools/nsc/transform/TypingTransformers.scala | 2 -- src/compiler/scala/tools/nsc/typechecker/Checkable.scala | 6 +----- .../scala/tools/nsc/typechecker/ConstantFolder.scala | 2 -- .../scala/tools/nsc/typechecker/ContextErrors.scala | 3 +-- src/compiler/scala/tools/nsc/typechecker/Contexts.scala | 2 -- .../scala/tools/nsc/typechecker/DestructureTypes.scala | 2 -- src/compiler/scala/tools/nsc/typechecker/Implicits.scala | 3 +-- src/compiler/scala/tools/nsc/typechecker/Infer.scala | 3 +-- src/compiler/scala/tools/nsc/typechecker/Macros.scala | 5 ----- .../scala/tools/nsc/typechecker/MethodSynthesis.scala | 3 +-- src/compiler/scala/tools/nsc/typechecker/Namers.scala | 2 -- .../scala/tools/nsc/typechecker/NamesDefaults.scala | 1 - .../scala/tools/nsc/typechecker/PatternMatching.scala | 2 -- .../scala/tools/nsc/typechecker/SyntheticMethods.scala | 2 -- .../scala/tools/nsc/typechecker/TreeCheckers.scala | 1 - .../scala/tools/nsc/typechecker/TypeDiagnostics.scala | 2 -- src/compiler/scala/tools/nsc/util/ClassPath.scala | 1 - .../scala/tools/nsc/util/CommandLineParser.scala | 1 - src/compiler/scala/tools/nsc/util/Exceptional.scala | 2 -- src/compiler/scala/tools/nsc/util/MsilClassPath.scala | 5 +---- src/compiler/scala/tools/nsc/util/ShowPickled.scala | 2 +- .../scala/tools/reflect/MacroImplementations.scala | 3 +-- src/compiler/scala/tools/reflect/ReflectMain.scala | 3 +-- src/compiler/scala/tools/reflect/StdTags.scala | 1 - src/compiler/scala/tools/reflect/ToolBoxFactory.scala | 12 +++--------- src/compiler/scala/tools/reflect/package.scala | 1 - src/compiler/scala/tools/util/Javap.scala | 4 +--- src/compiler/scala/tools/util/PathResolver.scala | 1 - 174 files changed, 79 insertions(+), 393 deletions(-) (limited to 'src') diff --git a/src/compiler/scala/reflect/macros/runtime/Enclosures.scala b/src/compiler/scala/reflect/macros/runtime/Enclosures.scala index be5f2dbe83..d9f337b5ba 100644 --- a/src/compiler/scala/reflect/macros/runtime/Enclosures.scala +++ b/src/compiler/scala/reflect/macros/runtime/Enclosures.scala @@ -5,7 +5,6 @@ trait Enclosures { self: Context => import universe._ - import mirror._ private def site = callsiteTyper.context private def enclTrees = site.enclosingContextChain map (_.tree) diff --git a/src/compiler/scala/reflect/macros/runtime/ExprUtils.scala b/src/compiler/scala/reflect/macros/runtime/ExprUtils.scala index 672699f00e..a719beed97 100644 --- a/src/compiler/scala/reflect/macros/runtime/ExprUtils.scala +++ b/src/compiler/scala/reflect/macros/runtime/ExprUtils.scala @@ -5,7 +5,6 @@ trait ExprUtils { self: Context => import universe._ - import mirror._ def literalNull = Expr[Null](Literal(Constant(null)))(TypeTag.Null) diff --git a/src/compiler/scala/reflect/reify/Errors.scala b/src/compiler/scala/reflect/reify/Errors.scala index b8b5f8033b..2cb9f22c72 100644 --- a/src/compiler/scala/reflect/reify/Errors.scala +++ b/src/compiler/scala/reflect/reify/Errors.scala @@ -7,7 +7,6 @@ trait Errors { self: Reifier => import global._ - import definitions._ def defaultErrorPosition = { val stack = currents collect { case t: Tree if t.pos != NoPosition => t.pos } diff --git a/src/compiler/scala/reflect/reify/Phases.scala b/src/compiler/scala/reflect/reify/Phases.scala index 5a10ad9282..d43532090c 100644 --- a/src/compiler/scala/reflect/reify/Phases.scala +++ b/src/compiler/scala/reflect/reify/Phases.scala @@ -10,7 +10,6 @@ trait Phases extends Reshape self: Reifier => import global._ - import definitions._ private var alreadyRun = false @@ -41,4 +40,4 @@ trait Phases extends Reshape result } -} \ No newline at end of file +} diff --git a/src/compiler/scala/reflect/reify/States.scala b/src/compiler/scala/reflect/reify/States.scala index 58455c9f3c..29bfa19845 100644 --- a/src/compiler/scala/reflect/reify/States.scala +++ b/src/compiler/scala/reflect/reify/States.scala @@ -4,7 +4,6 @@ trait States { self: Reifier => import global._ - import definitions._ /** Encapsulates reifier state * diff --git a/src/compiler/scala/reflect/reify/Taggers.scala b/src/compiler/scala/reflect/reify/Taggers.scala index cbaee41890..af0341fd38 100644 --- a/src/compiler/scala/reflect/reify/Taggers.scala +++ b/src/compiler/scala/reflect/reify/Taggers.scala @@ -8,7 +8,6 @@ abstract class Taggers { import c.universe._ import definitions._ - import treeBuild._ val coreTags = Map( ByteTpe -> nme.Byte, diff --git a/src/compiler/scala/reflect/reify/codegen/GenAnnotationInfos.scala b/src/compiler/scala/reflect/reify/codegen/GenAnnotationInfos.scala index dec491aabe..5a454e1e07 100644 --- a/src/compiler/scala/reflect/reify/codegen/GenAnnotationInfos.scala +++ b/src/compiler/scala/reflect/reify/codegen/GenAnnotationInfos.scala @@ -5,7 +5,6 @@ trait GenAnnotationInfos { self: Reifier => import global._ - import definitions._ // usually annotations are reified as their originals from Modifiers // however, when reifying free and tough types, we're forced to reify annotation infos as is @@ -52,4 +51,4 @@ trait GenAnnotationInfos { val reifiedAssocs = ann.assocs map (assoc => scalaFactoryCall(nme.Tuple2, reify(assoc._1), reifyClassfileAnnotArg(assoc._2))) mirrorFactoryCall(nme.Annotation, reify(ann.atp), mkList(reifiedArgs), mkListMap(reifiedAssocs)) } -} \ No newline at end of file +} diff --git a/src/compiler/scala/reflect/reify/codegen/GenNames.scala b/src/compiler/scala/reflect/reify/codegen/GenNames.scala index 4abf88f475..7c3c1d1149 100644 --- a/src/compiler/scala/reflect/reify/codegen/GenNames.scala +++ b/src/compiler/scala/reflect/reify/codegen/GenNames.scala @@ -5,10 +5,9 @@ trait GenNames { self: Reifier => import global._ - import definitions._ def reifyName(name: Name) = { val factory = if (name.isTypeName) nme.nmeNewTypeName else nme.nmeNewTermName mirrorCall(factory, Literal(Constant(name.toString))) } -} \ No newline at end of file +} diff --git a/src/compiler/scala/reflect/reify/codegen/GenPositions.scala b/src/compiler/scala/reflect/reify/codegen/GenPositions.scala index 8c5db04454..1d151c5135 100644 --- a/src/compiler/scala/reflect/reify/codegen/GenPositions.scala +++ b/src/compiler/scala/reflect/reify/codegen/GenPositions.scala @@ -5,7 +5,6 @@ trait GenPositions { self: Reifier => import global._ - import definitions._ // we do not reify positions because this inflates resulting trees, but doesn't buy as anything // where would one use positions? right, in error messages @@ -14,4 +13,4 @@ trait GenPositions { // however both macros and toolboxes have their own means to report errors in synthetic trees def reifyPosition(pos: Position): Tree = reifyMirrorObject(NoPosition) -} \ No newline at end of file +} diff --git a/src/compiler/scala/reflect/reify/codegen/GenSymbols.scala b/src/compiler/scala/reflect/reify/codegen/GenSymbols.scala index 22a834d2e4..39103b801e 100644 --- a/src/compiler/scala/reflect/reify/codegen/GenSymbols.scala +++ b/src/compiler/scala/reflect/reify/codegen/GenSymbols.scala @@ -5,7 +5,6 @@ trait GenSymbols { self: Reifier => import global._ - import definitions._ /** Symbol table of the reifee. * diff --git a/src/compiler/scala/reflect/reify/codegen/GenUtils.scala b/src/compiler/scala/reflect/reify/codegen/GenUtils.scala index 49877b4286..6554947f88 100644 --- a/src/compiler/scala/reflect/reify/codegen/GenUtils.scala +++ b/src/compiler/scala/reflect/reify/codegen/GenUtils.scala @@ -5,7 +5,6 @@ trait GenUtils { self: Reifier => import global._ - import definitions._ def reifyList(xs: List[Any]): Tree = mkList(xs map reify) @@ -145,4 +144,4 @@ trait GenUtils { if (origin == "") origin = "of unknown origin" origin } -} \ No newline at end of file +} diff --git a/src/compiler/scala/reflect/reify/package.scala b/src/compiler/scala/reflect/reify/package.scala index 1ae6df14be..7be57c0cb7 100644 --- a/src/compiler/scala/reflect/reify/package.scala +++ b/src/compiler/scala/reflect/reify/package.scala @@ -1,7 +1,6 @@ package scala.reflect -import scala.language.implicitConversions -import scala.reflect.macros.{Context, ReificationException, UnexpectedReificationException} +import scala.reflect.macros.ReificationException import scala.tools.nsc.Global package object reify { @@ -24,7 +23,8 @@ package object reify { private[reify] def mkDefaultMirrorRef(global: Global)(universe: global.Tree, typer0: global.analyzer.Typer): global.Tree = { import global._ - import definitions._ + import definitions.JavaUniverseClass + val enclosingErasure = { val rClassTree = reifyEnclosingRuntimeClass(global)(typer0) // HACK around SI-6259 @@ -71,7 +71,6 @@ package object reify { // a class/object body, this will return an EmptyTree. def reifyEnclosingRuntimeClass(global: Global)(typer0: global.analyzer.Typer): global.Tree = { import global._ - import definitions._ def isThisInScope = typer0.context.enclosingContextChain exists (_.tree.isInstanceOf[ImplDef]) if (isThisInScope) { val enclosingClasses = typer0.context.enclosingContextChain map (_.tree) collect { case classDef: ClassDef => classDef } diff --git a/src/compiler/scala/reflect/reify/phases/Calculate.scala b/src/compiler/scala/reflect/reify/phases/Calculate.scala index 4d1e22abe7..5566fd7a77 100644 --- a/src/compiler/scala/reflect/reify/phases/Calculate.scala +++ b/src/compiler/scala/reflect/reify/phases/Calculate.scala @@ -5,7 +5,6 @@ trait Calculate { self: Reifier => import global._ - import definitions._ implicit class RichCalculateSymbol(sym: Symbol) { def metalevel: Int = { assert(sym != null && sym != NoSymbol); localSymbols.getOrElse(sym, 0) } diff --git a/src/compiler/scala/reflect/reify/phases/Metalevels.scala b/src/compiler/scala/reflect/reify/phases/Metalevels.scala index 4c6ebbb288..92d951c3a1 100644 --- a/src/compiler/scala/reflect/reify/phases/Metalevels.scala +++ b/src/compiler/scala/reflect/reify/phases/Metalevels.scala @@ -5,7 +5,6 @@ trait Metalevels { self: Reifier => import global._ - import definitions._ /** * Makes sense of cross-stage bindings. diff --git a/src/compiler/scala/reflect/reify/phases/Reify.scala b/src/compiler/scala/reflect/reify/phases/Reify.scala index dc0028be38..2741785752 100644 --- a/src/compiler/scala/reflect/reify/phases/Reify.scala +++ b/src/compiler/scala/reflect/reify/phases/Reify.scala @@ -2,7 +2,6 @@ package scala.reflect.reify package phases import scala.runtime.ScalaRunTime.isAnyVal -import scala.runtime.ScalaRunTime.isTuple import scala.reflect.reify.codegen._ trait Reify extends GenSymbols @@ -16,7 +15,6 @@ trait Reify extends GenSymbols self: Reifier => import global._ - import definitions._ private object reifyStack { def currents: List[Any] = state.reifyStack @@ -56,4 +54,4 @@ trait Reify extends GenSymbols case _ => throw new Error("reifee %s of type %s is not supported".format(reifee, reifee.getClass)) }) -} \ No newline at end of file +} diff --git a/src/compiler/scala/reflect/reify/utils/NodePrinters.scala b/src/compiler/scala/reflect/reify/utils/NodePrinters.scala index dbe4ccfb6a..97ec479a6c 100644 --- a/src/compiler/scala/reflect/reify/utils/NodePrinters.scala +++ b/src/compiler/scala/reflect/reify/utils/NodePrinters.scala @@ -11,8 +11,6 @@ trait NodePrinters { self: Utils => import global._ - import definitions._ - import Flag._ object reifiedNodeToString extends (Tree => String) { def apply(tree: Tree): String = { diff --git a/src/compiler/scala/reflect/reify/utils/SymbolTables.scala b/src/compiler/scala/reflect/reify/utils/SymbolTables.scala index babea450c1..99118c4f2e 100644 --- a/src/compiler/scala/reflect/reify/utils/SymbolTables.scala +++ b/src/compiler/scala/reflect/reify/utils/SymbolTables.scala @@ -8,8 +8,6 @@ trait SymbolTables { self: Utils => import global._ - import definitions._ - import Flag._ class SymbolTable private[SymbolTable] ( private[SymbolTable] val symtab: immutable.ListMap[Symbol, Tree] = immutable.ListMap[Symbol, Tree](), diff --git a/src/compiler/scala/tools/cmd/CommandLine.scala b/src/compiler/scala/tools/cmd/CommandLine.scala index 75f96d3c4b..cf0463423c 100644 --- a/src/compiler/scala/tools/cmd/CommandLine.scala +++ b/src/compiler/scala/tools/cmd/CommandLine.scala @@ -19,7 +19,7 @@ class CommandLine(val spec: Reference, val originalArgs: List[String]) extends C def this(spec: Reference, line: String) = this(spec, Parser tokenize line) def this(spec: Reference, args: Array[String]) = this(spec, args.toList) - import spec.{ isAnyOption, isUnaryOption, isBinaryOption, isExpandOption } + import spec.{ isUnaryOption, isBinaryOption, isExpandOption } val Terminator = "--" val ValueForUnaryOption = "true" // so if --opt is given, x(--opt) = true diff --git a/src/compiler/scala/tools/cmd/FromString.scala b/src/compiler/scala/tools/cmd/FromString.scala index cba2e99998..2a624875ee 100644 --- a/src/compiler/scala/tools/cmd/FromString.scala +++ b/src/compiler/scala/tools/cmd/FromString.scala @@ -6,7 +6,7 @@ package scala.tools package cmd -import nsc.io.{ Path, File, Directory } +import scala.tools.nsc.io.{ File, Directory } import scala.reflect.runtime.{universe => ru} import scala.tools.reflect.StdRuntimeTags._ diff --git a/src/compiler/scala/tools/cmd/gen/CodegenSpec.scala b/src/compiler/scala/tools/cmd/gen/CodegenSpec.scala index 903517c5b4..ee7e605425 100644 --- a/src/compiler/scala/tools/cmd/gen/CodegenSpec.scala +++ b/src/compiler/scala/tools/cmd/gen/CodegenSpec.scala @@ -12,8 +12,6 @@ trait CodegenSpec extends Spec with Meta.StdOpts with Interpolation { def referenceSpec = CodegenSpec def programInfo = Spec.Info("codegen", "", "scala.tools.cmd.gen.Codegen") - import FromString.ExistingDir - help("Usage: codegen []") // val inDir = "in" / "directory containing templates" --^ ExistingDir diff --git a/src/compiler/scala/tools/nsc/CompilationUnits.scala b/src/compiler/scala/tools/nsc/CompilationUnits.scala index 4e7ba60d5e..5be819c134 100644 --- a/src/compiler/scala/tools/nsc/CompilationUnits.scala +++ b/src/compiler/scala/tools/nsc/CompilationUnits.scala @@ -6,7 +6,7 @@ package scala.tools.nsc import util.FreshNameCreator -import scala.reflect.internal.util.{ Position, NoPosition, BatchSourceFile, SourceFile, NoSourceFile } +import scala.reflect.internal.util.{ SourceFile, NoSourceFile } import scala.collection.mutable import scala.collection.mutable.{ LinkedHashSet, ListBuffer } diff --git a/src/compiler/scala/tools/nsc/CompileClient.scala b/src/compiler/scala/tools/nsc/CompileClient.scala index 731f6926f0..c756a1b0d9 100644 --- a/src/compiler/scala/tools/nsc/CompileClient.scala +++ b/src/compiler/scala/tools/nsc/CompileClient.scala @@ -5,7 +5,6 @@ package scala.tools.nsc -import java.io.{ BufferedReader, File, InputStreamReader, PrintWriter } import settings.FscSettings import scala.tools.util.CompileOutputCommon import sys.SystemProperties.preferIPv4Stack diff --git a/src/compiler/scala/tools/nsc/CompileServer.scala b/src/compiler/scala/tools/nsc/CompileServer.scala index c23c1e6154..521f788fa1 100644 --- a/src/compiler/scala/tools/nsc/CompileServer.scala +++ b/src/compiler/scala/tools/nsc/CompileServer.scala @@ -5,7 +5,7 @@ package scala.tools.nsc -import java.io.{ BufferedOutputStream, FileOutputStream, PrintStream } +import java.io.PrintStream import scala.tools.nsc.reporters.{Reporter, ConsoleReporter} import scala.reflect.internal.util.FakePos //Position import scala.tools.util.SocketServer diff --git a/src/compiler/scala/tools/nsc/CompileSocket.scala b/src/compiler/scala/tools/nsc/CompileSocket.scala index 9a3e8d1530..6b55537195 100644 --- a/src/compiler/scala/tools/nsc/CompileSocket.scala +++ b/src/compiler/scala/tools/nsc/CompileSocket.scala @@ -5,13 +5,9 @@ package scala.tools.nsc -import java.io.{ IOException, FileNotFoundException, PrintWriter, FileOutputStream } -import java.io.{ BufferedReader, FileReader } -import java.util.regex.Pattern -import java.net._ +import java.io.{ FileNotFoundException, PrintWriter, FileOutputStream } import java.security.SecureRandom import io.{ File, Path, Directory, Socket } -import scala.util.control.Exception.catching import scala.tools.util.CompileOutputCommon import scala.reflect.internal.util.StringOps.splitWhere import scala.sys.process._ diff --git a/src/compiler/scala/tools/nsc/CompilerCommand.scala b/src/compiler/scala/tools/nsc/CompilerCommand.scala index e994150f6f..829e097714 100644 --- a/src/compiler/scala/tools/nsc/CompilerCommand.scala +++ b/src/compiler/scala/tools/nsc/CompilerCommand.scala @@ -5,7 +5,6 @@ package scala.tools.nsc -import scala.collection.mutable.ListBuffer import io.File /** A class representing command line info for scalac */ diff --git a/src/compiler/scala/tools/nsc/Driver.scala b/src/compiler/scala/tools/nsc/Driver.scala index 1775602122..b5fd20e1cc 100644 --- a/src/compiler/scala/tools/nsc/Driver.scala +++ b/src/compiler/scala/tools/nsc/Driver.scala @@ -1,11 +1,11 @@ package scala.tools.nsc -import scala.tools.nsc.reporters.{Reporter, ConsoleReporter} +import scala.tools.nsc.reporters.ConsoleReporter import Properties.{ versionString, copyrightString, residentPromptString } -import scala.reflect.internal.util.{ BatchSourceFile, FakePos } +import scala.reflect.internal.util.FakePos abstract class Driver { - + val prompt = residentPromptString val versionMsg = "Scala compiler " + @@ -68,4 +68,4 @@ abstract class Driver { sys.exit(if (reporter.hasErrors) 1 else 0) } -} \ No newline at end of file +} diff --git a/src/compiler/scala/tools/nsc/Global.scala b/src/compiler/scala/tools/nsc/Global.scala index 401eed9f75..f0984c2ebc 100644 --- a/src/compiler/scala/tools/nsc/Global.scala +++ b/src/compiler/scala/tools/nsc/Global.scala @@ -8,12 +8,11 @@ package scala.tools.nsc import java.io.{ File, FileOutputStream, PrintWriter, IOException, FileNotFoundException } import java.nio.charset.{ Charset, CharsetDecoder, IllegalCharsetNameException, UnsupportedCharsetException } import scala.compat.Platform.currentTime -import scala.tools.util.PathResolver import scala.collection.{ mutable, immutable } import io.{ SourceReader, AbstractFile, Path } import reporters.{ Reporter, ConsoleReporter } -import util.{ Exceptional, ClassPath, MergedClassPath, StatisticsInfo, ScalaClassLoader, returning, stackTraceString } -import scala.reflect.internal.util.{ NoPosition, OffsetPosition, SourceFile, NoSourceFile, BatchSourceFile, ScriptSourceFile } +import util.{ ClassPath, MergedClassPath, StatisticsInfo, returning, stackTraceString } +import scala.reflect.internal.util.{ OffsetPosition, SourceFile, NoSourceFile, BatchSourceFile, ScriptSourceFile } import scala.reflect.internal.pickling.{ PickleBuffer, PickleFormat } import symtab.{ Flags, SymbolTable, SymbolLoaders, SymbolTrackers } import symtab.classfile.Pickler @@ -29,8 +28,6 @@ import backend.jvm.{GenJVM, GenASM} import backend.opt.{ Inliners, InlineExceptionHandlers, ClosureElimination, DeadCodeElimination } import backend.icode.analysis._ import scala.language.postfixOps -import scala.reflect.internal.StdAttachments -import scala.reflect.ClassTag class Global(var currentSettings: Settings, var reporter: Reporter) extends SymbolTable diff --git a/src/compiler/scala/tools/nsc/Main.scala b/src/compiler/scala/tools/nsc/Main.scala index 7d112dfb3e..5a3ea56f67 100644 --- a/src/compiler/scala/tools/nsc/Main.scala +++ b/src/compiler/scala/tools/nsc/Main.scala @@ -7,15 +7,12 @@ package scala.tools.nsc import java.io.File import File.pathSeparator - import scala.tools.nsc.interactive.{ RefinedBuildManager, SimpleBuildManager } import scala.tools.nsc.io.AbstractFile -import scala.tools.nsc.reporters.{Reporter, ConsoleReporter} -import scala.reflect.internal.util.{ BatchSourceFile, FakePos } //{Position} import Properties.msilLibPath /** The main class for NSC, a compiler for the programming - * language Scala. + * language Scala. */ object Main extends Driver with EvalLoop { diff --git a/src/compiler/scala/tools/nsc/MainBench.scala b/src/compiler/scala/tools/nsc/MainBench.scala index f18ff19d7d..03190a63f3 100644 --- a/src/compiler/scala/tools/nsc/MainBench.scala +++ b/src/compiler/scala/tools/nsc/MainBench.scala @@ -5,28 +5,20 @@ package scala.tools.nsc -import java.io.File -import File.pathSeparator - -import scala.tools.nsc.interactive.{ RefinedBuildManager, SimpleBuildManager } -import scala.tools.nsc.io.AbstractFile -import scala.tools.nsc.reporters.{Reporter, ConsoleReporter} -import scala.reflect.internal.util.{ BatchSourceFile, FakePos } //{Position} -import Properties.{ versionString, copyrightString, residentPromptString, msilLibPath } import scala.reflect.internal.util.Statistics /** The main class for NSC, a compiler for the programming * language Scala. */ object MainBench extends Driver with EvalLoop { - + lazy val theCompiler = Global(settings, reporter) - + override def newCompiler() = theCompiler - + val NIter = 50 val NBest = 10 - + override def main(args: Array[String]) = { val times = new Array[Long](NIter) var start = System.nanoTime() diff --git a/src/compiler/scala/tools/nsc/MainGenericRunner.scala b/src/compiler/scala/tools/nsc/MainGenericRunner.scala index e4a20b4a8c..adb03ca374 100644 --- a/src/compiler/scala/tools/nsc/MainGenericRunner.scala +++ b/src/compiler/scala/tools/nsc/MainGenericRunner.scala @@ -5,8 +5,6 @@ package scala.tools.nsc -import java.net.URL -import scala.tools.util.PathResolver import io.{ File } import util.{ ClassPath, ScalaClassLoader } import Properties.{ versionString, copyrightString } diff --git a/src/compiler/scala/tools/nsc/ObjectRunner.scala b/src/compiler/scala/tools/nsc/ObjectRunner.scala index f5123513c4..3c75429311 100644 --- a/src/compiler/scala/tools/nsc/ObjectRunner.scala +++ b/src/compiler/scala/tools/nsc/ObjectRunner.scala @@ -8,7 +8,6 @@ package scala.tools.nsc import java.net.URL import util.ScalaClassLoader -import java.lang.reflect.InvocationTargetException import util.Exceptional.unwrap trait CommonRunner { diff --git a/src/compiler/scala/tools/nsc/Phases.scala b/src/compiler/scala/tools/nsc/Phases.scala index c914344fd5..aad70a9c5e 100644 --- a/src/compiler/scala/tools/nsc/Phases.scala +++ b/src/compiler/scala/tools/nsc/Phases.scala @@ -5,7 +5,6 @@ package scala.tools.nsc -import symtab.Flags import scala.reflect.internal.util.TableDef import scala.language.postfixOps diff --git a/src/compiler/scala/tools/nsc/ScriptRunner.scala b/src/compiler/scala/tools/nsc/ScriptRunner.scala index 0a50b2eefb..0b307a861e 100644 --- a/src/compiler/scala/tools/nsc/ScriptRunner.scala +++ b/src/compiler/scala/tools/nsc/ScriptRunner.scala @@ -7,7 +7,6 @@ package scala.tools.nsc import io.{ Directory, File, Path } import java.io.IOException -import java.net.URL import scala.tools.nsc.reporters.{Reporter,ConsoleReporter} import util.Exceptional.unwrap diff --git a/src/compiler/scala/tools/nsc/ast/DocComments.scala b/src/compiler/scala/tools/nsc/ast/DocComments.scala index 5a4be5125d..f6cbebe10c 100755 --- a/src/compiler/scala/tools/nsc/ast/DocComments.scala +++ b/src/compiler/scala/tools/nsc/ast/DocComments.scala @@ -7,10 +7,7 @@ package scala.tools.nsc package ast import symtab._ -import reporters._ -import scala.reflect.internal.util.{Position, NoPosition} import util.DocStrings._ -import scala.reflect.internal.Chars._ import scala.collection.mutable /* diff --git a/src/compiler/scala/tools/nsc/ast/Printers.scala b/src/compiler/scala/tools/nsc/ast/Printers.scala index 83222a24b4..0414e0f123 100644 --- a/src/compiler/scala/tools/nsc/ast/Printers.scala +++ b/src/compiler/scala/tools/nsc/ast/Printers.scala @@ -7,8 +7,6 @@ package scala.tools.nsc package ast import java.io.{ OutputStream, PrintWriter, StringWriter, Writer } -import symtab.Flags._ -import symtab.SymbolTable trait Printers extends scala.reflect.internal.Printers { this: Global => diff --git a/src/compiler/scala/tools/nsc/ast/TreeBrowsers.scala b/src/compiler/scala/tools/nsc/ast/TreeBrowsers.scala index 20f9bdd47e..a22ce7affd 100644 --- a/src/compiler/scala/tools/nsc/ast/TreeBrowsers.scala +++ b/src/compiler/scala/tools/nsc/ast/TreeBrowsers.scala @@ -17,7 +17,6 @@ import javax.swing.tree._ import scala.concurrent.Lock import scala.text._ import symtab.Flags._ -import symtab.SymbolTable import scala.language.implicitConversions /** diff --git a/src/compiler/scala/tools/nsc/ast/TreeDSL.scala b/src/compiler/scala/tools/nsc/ast/TreeDSL.scala index 9a5b92e795..3acefe9441 100644 --- a/src/compiler/scala/tools/nsc/ast/TreeDSL.scala +++ b/src/compiler/scala/tools/nsc/ast/TreeDSL.scala @@ -21,7 +21,6 @@ trait TreeDSL { import global._ import definitions._ - import gen.{ scalaDot } object CODE { // Add a null check to a Tree => Tree function diff --git a/src/compiler/scala/tools/nsc/ast/TreeGen.scala b/src/compiler/scala/tools/nsc/ast/TreeGen.scala index ac1ea7afa6..983f355c58 100644 --- a/src/compiler/scala/tools/nsc/ast/TreeGen.scala +++ b/src/compiler/scala/tools/nsc/ast/TreeGen.scala @@ -8,7 +8,6 @@ package ast import scala.collection.mutable.ListBuffer import symtab.Flags._ -import symtab.SymbolTable import scala.language.postfixOps /** XXX to resolve: TreeGen only assumes global is a SymbolTable, but diff --git a/src/compiler/scala/tools/nsc/ast/TreeInfo.scala b/src/compiler/scala/tools/nsc/ast/TreeInfo.scala index cbbb4c8ba8..97227a5b6e 100644 --- a/src/compiler/scala/tools/nsc/ast/TreeInfo.scala +++ b/src/compiler/scala/tools/nsc/ast/TreeInfo.scala @@ -7,8 +7,6 @@ package scala.tools.nsc package ast import scala.reflect.internal.HasFlags -import scala.reflect.internal.Flags._ -import symtab._ /** This class ... * @@ -19,8 +17,6 @@ abstract class TreeInfo extends scala.reflect.internal.TreeInfo { val global: Global import global._ - import definitions.ThrowableClass - /** Is tree legal as a member definition of an interface? */ override def isInterfaceMember(tree: Tree): Boolean = tree match { diff --git a/src/compiler/scala/tools/nsc/ast/parser/MarkupParsers.scala b/src/compiler/scala/tools/nsc/ast/parser/MarkupParsers.scala index 5df6fd8482..ab2afcb403 100755 --- a/src/compiler/scala/tools/nsc/ast/parser/MarkupParsers.scala +++ b/src/compiler/scala/tools/nsc/ast/parser/MarkupParsers.scala @@ -10,10 +10,8 @@ import scala.collection.mutable import mutable.{ Buffer, ArrayBuffer, ListBuffer } import scala.util.control.ControlThrowable import scala.tools.nsc.util.CharArrayReader -import scala.reflect.internal.util.SourceFile -import scala.xml.{ Text, TextBuffer } +import scala.xml.TextBuffer import scala.xml.parsing.MarkupParserCommon -import scala.xml.Utility.{ isNameStart, isNameChar, isSpace } import scala.reflect.internal.Chars.{ SU, LF } // XXX/Note: many/most of the functions in here are almost direct cut and pastes @@ -51,7 +49,7 @@ trait MarkupParsers { class MarkupParser(parser: SourceFileParser, final val preserveWS: Boolean) extends MarkupParserCommon { - import Tokens.{ EMPTY, LBRACE, RBRACE } + import Tokens.{ LBRACE, RBRACE } type PositionType = Position type InputType = CharArrayReader diff --git a/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala b/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala index 9dda05db78..efcde1f74f 100644 --- a/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala +++ b/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala @@ -376,7 +376,6 @@ self => * } * }}} */ - import definitions._ def emptyPkg = atPos(0, 0, 0) { Ident(nme.EMPTY_PACKAGE_NAME) } def emptyInit = DefDef( diff --git a/src/compiler/scala/tools/nsc/ast/parser/SymbolicXMLBuilder.scala b/src/compiler/scala/tools/nsc/ast/parser/SymbolicXMLBuilder.scala index e8ef670222..4329ccefc7 100755 --- a/src/compiler/scala/tools/nsc/ast/parser/SymbolicXMLBuilder.scala +++ b/src/compiler/scala/tools/nsc/ast/parser/SymbolicXMLBuilder.scala @@ -11,7 +11,6 @@ import scala.xml.{ EntityRef, Text } import scala.xml.XML.{ xmlns } import symtab.Flags.MUTABLE import scala.reflect.internal.util.StringOps.splitWhere -import scala.language.implicitConversions /** This class builds instance of `Tree` that represent XML. * diff --git a/src/compiler/scala/tools/nsc/backend/JavaPlatform.scala b/src/compiler/scala/tools/nsc/backend/JavaPlatform.scala index fc5d4372c5..fd4366baf1 100644 --- a/src/compiler/scala/tools/nsc/backend/JavaPlatform.scala +++ b/src/compiler/scala/tools/nsc/backend/JavaPlatform.scala @@ -8,7 +8,6 @@ package backend import io.AbstractFile import util.{ClassPath,JavaClassPath,MergedClassPath,DeltaClassPath} -import util.ClassPath.{ JavaContext, DefaultJavaContext } import scala.tools.util.PathResolver trait JavaPlatform extends Platform { diff --git a/src/compiler/scala/tools/nsc/backend/ScalaPrimitives.scala b/src/compiler/scala/tools/nsc/backend/ScalaPrimitives.scala index 393f081f74..f6b0701f86 100644 --- a/src/compiler/scala/tools/nsc/backend/ScalaPrimitives.scala +++ b/src/compiler/scala/tools/nsc/backend/ScalaPrimitives.scala @@ -6,7 +6,6 @@ package scala.tools.nsc package backend -import scala.tools.nsc.backend.icode._ import scala.collection.{ mutable, immutable } /** Scala primitive operations are represented as methods in `Any` and diff --git a/src/compiler/scala/tools/nsc/backend/WorklistAlgorithm.scala b/src/compiler/scala/tools/nsc/backend/WorklistAlgorithm.scala index 798a80ea37..93b37f415d 100644 --- a/src/compiler/scala/tools/nsc/backend/WorklistAlgorithm.scala +++ b/src/compiler/scala/tools/nsc/backend/WorklistAlgorithm.scala @@ -6,7 +6,6 @@ package scala.tools.nsc package backend -import scala.tools.nsc.ast._ import scala.collection.{ mutable, immutable } /** diff --git a/src/compiler/scala/tools/nsc/backend/icode/BasicBlocks.scala b/src/compiler/scala/tools/nsc/backend/icode/BasicBlocks.scala index 068836fe4f..b62d5cb4e4 100644 --- a/src/compiler/scala/tools/nsc/backend/icode/BasicBlocks.scala +++ b/src/compiler/scala/tools/nsc/backend/icode/BasicBlocks.scala @@ -17,7 +17,7 @@ trait BasicBlocks { self: ICodes => import opcodes._ - import global.{ ifDebug, settings, log, nme } + import global.{ settings, log, nme } import nme.isExceptionResultName /** Override Array creation for efficiency (to not go through reflection). */ diff --git a/src/compiler/scala/tools/nsc/backend/icode/ExceptionHandlers.scala b/src/compiler/scala/tools/nsc/backend/icode/ExceptionHandlers.scala index 2cebf7ad99..f35996eeb9 100644 --- a/src/compiler/scala/tools/nsc/backend/icode/ExceptionHandlers.scala +++ b/src/compiler/scala/tools/nsc/backend/icode/ExceptionHandlers.scala @@ -7,7 +7,7 @@ package scala.tools.nsc package backend package icode -import scala.collection.{ mutable, immutable } +import scala.collection.immutable /** * Exception handlers are pieces of code that `handle` exceptions on diff --git a/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala b/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala index 9524309e25..720896d0b3 100644 --- a/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala +++ b/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala @@ -13,7 +13,6 @@ import scala.collection.mutable.{ ListBuffer, Buffer } import scala.tools.nsc.symtab._ import scala.annotation.switch import PartialFunction._ -import scala.language.postfixOps /** This class ... * diff --git a/src/compiler/scala/tools/nsc/backend/icode/ICodes.scala b/src/compiler/scala/tools/nsc/backend/icode/ICodes.scala index 19a700f452..7c6f2a0620 100644 --- a/src/compiler/scala/tools/nsc/backend/icode/ICodes.scala +++ b/src/compiler/scala/tools/nsc/backend/icode/ICodes.scala @@ -8,8 +8,6 @@ package backend package icode import java.io.PrintWriter -import scala.collection.mutable -import scala.tools.nsc.symtab._ import analysis.{ Liveness, ReachingDefinitions } import scala.tools.nsc.symtab.classfile.ICodeReader diff --git a/src/compiler/scala/tools/nsc/backend/icode/Linearizers.scala b/src/compiler/scala/tools/nsc/backend/icode/Linearizers.scala index a38eab4515..b8a98955c9 100644 --- a/src/compiler/scala/tools/nsc/backend/icode/Linearizers.scala +++ b/src/compiler/scala/tools/nsc/backend/icode/Linearizers.scala @@ -8,7 +8,6 @@ package scala.tools.nsc package backend package icode -import scala.tools.nsc.ast._ import scala.collection.{ mutable, immutable } import mutable.ListBuffer diff --git a/src/compiler/scala/tools/nsc/backend/icode/Members.scala b/src/compiler/scala/tools/nsc/backend/icode/Members.scala index 7ba212f42e..07abe9d74f 100644 --- a/src/compiler/scala/tools/nsc/backend/icode/Members.scala +++ b/src/compiler/scala/tools/nsc/backend/icode/Members.scala @@ -7,10 +7,8 @@ package scala.tools.nsc package backend package icode -import java.io.PrintWriter import scala.collection.{ mutable, immutable } import scala.reflect.internal.util.{ SourceFile, NoSourceFile } -import symtab.Flags.{ DEFERRED } trait ReferenceEquality { override def hashCode = System.identityHashCode(this) diff --git a/src/compiler/scala/tools/nsc/backend/icode/Opcodes.scala b/src/compiler/scala/tools/nsc/backend/icode/Opcodes.scala index 8c9a72638d..796e3a5629 100644 --- a/src/compiler/scala/tools/nsc/backend/icode/Opcodes.scala +++ b/src/compiler/scala/tools/nsc/backend/icode/Opcodes.scala @@ -9,7 +9,6 @@ package scala.tools.nsc package backend package icode -import scala.tools.nsc.ast._ import scala.reflect.internal.util.{Position,NoPosition} /* diff --git a/src/compiler/scala/tools/nsc/backend/icode/Printers.scala b/src/compiler/scala/tools/nsc/backend/icode/Printers.scala index 6cac641e3e..61af6e5119 100644 --- a/src/compiler/scala/tools/nsc/backend/icode/Printers.scala +++ b/src/compiler/scala/tools/nsc/backend/icode/Printers.scala @@ -8,13 +8,9 @@ package backend package icode import java.io.PrintWriter -import scala.tools.nsc.symtab.Flags -import scala.reflect.internal.util.Position trait Printers { self: ICodes => import global._ - import global.icodes.opcodes._ - import global.icodes._ class TextPrinter(writer: PrintWriter, lin: Linearizer) { private var margin = 0 diff --git a/src/compiler/scala/tools/nsc/backend/icode/TypeKinds.scala b/src/compiler/scala/tools/nsc/backend/icode/TypeKinds.scala index 353cc6dd0a..f96dce9f1c 100644 --- a/src/compiler/scala/tools/nsc/backend/icode/TypeKinds.scala +++ b/src/compiler/scala/tools/nsc/backend/icode/TypeKinds.scala @@ -140,7 +140,6 @@ trait TypeKinds { self: ICodes => * sifting through the parents for a class type. */ def lub0(tk1: TypeKind, tk2: TypeKind): Type = enteringUncurry { - import definitions._ val tp = global.lub(List(tk1.toType, tk2.toType)) val (front, rest) = tp.parents span (_.typeSymbol.isTrait) diff --git a/src/compiler/scala/tools/nsc/backend/icode/TypeStacks.scala b/src/compiler/scala/tools/nsc/backend/icode/TypeStacks.scala index 23d3d05c64..c1bf4304ea 100644 --- a/src/compiler/scala/tools/nsc/backend/icode/TypeStacks.scala +++ b/src/compiler/scala/tools/nsc/backend/icode/TypeStacks.scala @@ -15,8 +15,6 @@ package icode trait TypeStacks { self: ICodes => - import opcodes._ - /* This class simulates the type of the operand * stack of the ICode. */ diff --git a/src/compiler/scala/tools/nsc/backend/icode/analysis/TypeFlowAnalysis.scala b/src/compiler/scala/tools/nsc/backend/icode/analysis/TypeFlowAnalysis.scala index 26363a4170..15755f31ad 100644 --- a/src/compiler/scala/tools/nsc/backend/icode/analysis/TypeFlowAnalysis.scala +++ b/src/compiler/scala/tools/nsc/backend/icode/analysis/TypeFlowAnalysis.scala @@ -68,7 +68,6 @@ abstract class TypeFlowAnalysis { * names to types and a type stack. */ object typeFlowLattice extends SemiLattice { - import icodes._ type Elem = IState[VarBinding, icodes.TypeStack] val top = new Elem(new VarBinding, typeStackLattice.top) diff --git a/src/compiler/scala/tools/nsc/backend/jvm/BytecodeWriters.scala b/src/compiler/scala/tools/nsc/backend/jvm/BytecodeWriters.scala index d6410cb317..8c8950d295 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/BytecodeWriters.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/BytecodeWriters.scala @@ -10,8 +10,7 @@ import java.io.{ DataOutputStream, FileOutputStream, OutputStream, File => JFile import scala.tools.nsc.io._ import scala.tools.nsc.util.ScalaClassLoader import scala.tools.util.JavapClass -import java.util.jar.{ JarEntry, JarOutputStream, Attributes } -import Attributes.Name +import java.util.jar.Attributes.Name import scala.language.postfixOps /** For the last mile: turning generated bytecode in memory into diff --git a/src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala b/src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala index 68701ddd2e..8e7b9f2ba2 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala @@ -6,12 +6,9 @@ package scala.tools.nsc package backend.jvm -import java.nio.ByteBuffer import scala.collection.{ mutable, immutable } import scala.reflect.internal.pickling.{ PickleFormat, PickleBuffer } import scala.tools.nsc.symtab._ -import scala.tools.nsc.io.AbstractFile - import scala.tools.asm import asm.Label diff --git a/src/compiler/scala/tools/nsc/backend/jvm/GenJVM.scala b/src/compiler/scala/tools/nsc/backend/jvm/GenJVM.scala index 91796072d2..7fde3e1eaa 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/GenJVM.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/GenJVM.scala @@ -6,7 +6,6 @@ package scala.tools.nsc package backend.jvm -import java.io.{ByteArrayOutputStream, DataOutputStream, OutputStream } import java.nio.ByteBuffer import scala.collection.{ mutable, immutable } import scala.reflect.internal.pickling.{ PickleFormat, PickleBuffer } @@ -16,8 +15,6 @@ import scala.reflect.internal.ClassfileConstants._ import ch.epfl.lamp.fjbg._ import JAccessFlags._ import JObjectType.{ JAVA_LANG_STRING, JAVA_LANG_OBJECT } -import java.util.jar.{ JarEntry, JarOutputStream } -import scala.tools.nsc.io.AbstractFile import scala.language.postfixOps /** This class ... diff --git a/src/compiler/scala/tools/nsc/backend/jvm/GenJVMUtil.scala b/src/compiler/scala/tools/nsc/backend/jvm/GenJVMUtil.scala index e002a614bd..613f8f893e 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/GenJVMUtil.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/GenJVMUtil.scala @@ -14,7 +14,6 @@ trait GenJVMUtil { import global._ import icodes._ - import icodes.opcodes._ import definitions._ /** Map from type kinds to the Java reference types. It is used for diff --git a/src/compiler/scala/tools/nsc/backend/msil/GenMSIL.scala b/src/compiler/scala/tools/nsc/backend/msil/GenMSIL.scala index 29fc32e492..93a1252553 100644 --- a/src/compiler/scala/tools/nsc/backend/msil/GenMSIL.scala +++ b/src/compiler/scala/tools/nsc/backend/msil/GenMSIL.scala @@ -8,7 +8,6 @@ package scala.tools.nsc package backend.msil import java.io.{File, IOException} -import java.nio.{ByteBuffer, ByteOrder} import scala.collection.{ mutable, immutable } import scala.tools.nsc.symtab._ @@ -312,7 +311,7 @@ abstract class GenMSIL extends SubComponent { /* def getAttributeArgs(consts: List[Constant], nvPairs: List[(Name, Constant)]): Array[Byte] = { val buf = ByteBuffer.allocate(2048) // FIXME: this may be not enough! - buf.order(ByteOrder.LITTLE_ENDIAN) + buf.order(java.nio.ByteOrder.LITTLE_ENDIAN) buf.putShort(1.toShort) // signature def emitSerString(str: String) = { diff --git a/src/compiler/scala/tools/nsc/backend/opt/ClosureElimination.scala b/src/compiler/scala/tools/nsc/backend/opt/ClosureElimination.scala index 49d6e1fd4b..8d6de821bb 100644 --- a/src/compiler/scala/tools/nsc/backend/opt/ClosureElimination.scala +++ b/src/compiler/scala/tools/nsc/backend/opt/ClosureElimination.scala @@ -7,7 +7,6 @@ package scala.tools.nsc package backend.opt import scala.tools.nsc.backend.icode.analysis.LubException -import scala.tools.nsc.symtab._ /** * @author Iulian Dragos diff --git a/src/compiler/scala/tools/nsc/backend/opt/DeadCodeElimination.scala b/src/compiler/scala/tools/nsc/backend/opt/DeadCodeElimination.scala index 5aa6e0f2da..f7e743a6f1 100644 --- a/src/compiler/scala/tools/nsc/backend/opt/DeadCodeElimination.scala +++ b/src/compiler/scala/tools/nsc/backend/opt/DeadCodeElimination.scala @@ -8,7 +8,6 @@ package scala.tools.nsc package backend.opt import scala.collection.{ mutable, immutable } -import symtab._ /** */ diff --git a/src/compiler/scala/tools/nsc/backend/opt/InlineExceptionHandlers.scala b/src/compiler/scala/tools/nsc/backend/opt/InlineExceptionHandlers.scala index ab238af239..c534c2230c 100644 --- a/src/compiler/scala/tools/nsc/backend/opt/InlineExceptionHandlers.scala +++ b/src/compiler/scala/tools/nsc/backend/opt/InlineExceptionHandlers.scala @@ -4,7 +4,6 @@ package scala.tools.nsc package backend.opt -import scala.util.control.Breaks._ /** * This optimization phase inlines the exception handlers so that further phases can optimize the code better diff --git a/src/compiler/scala/tools/nsc/dependencies/DependencyAnalysis.scala b/src/compiler/scala/tools/nsc/dependencies/DependencyAnalysis.scala index ad6ca68fec..4d4b6589a0 100644 --- a/src/compiler/scala/tools/nsc/dependencies/DependencyAnalysis.scala +++ b/src/compiler/scala/tools/nsc/dependencies/DependencyAnalysis.scala @@ -3,7 +3,6 @@ package dependencies import io.Path import scala.collection._ -import symtab.Flags import scala.tools.nsc.io.AbstractFile import scala.reflect.internal.util.SourceFile diff --git a/src/compiler/scala/tools/nsc/doc/DocFactory.scala b/src/compiler/scala/tools/nsc/doc/DocFactory.scala index 642e330a57..a091bc3e62 100644 --- a/src/compiler/scala/tools/nsc/doc/DocFactory.scala +++ b/src/compiler/scala/tools/nsc/doc/DocFactory.scala @@ -8,9 +8,7 @@ package doc import scala.util.control.ControlThrowable import reporters.Reporter -import scala.reflect.internal.util.{ NoPosition, BatchSourceFile} -import io.{ File, Directory } -import DocParser.Parsed +import scala.reflect.internal.util.BatchSourceFile /** A documentation processor controls the process of generating Scala * documentation, which is as follows. diff --git a/src/compiler/scala/tools/nsc/doc/Settings.scala b/src/compiler/scala/tools/nsc/doc/Settings.scala index 10a0d8d879..16f3c3776b 100644 --- a/src/compiler/scala/tools/nsc/doc/Settings.scala +++ b/src/compiler/scala/tools/nsc/doc/Settings.scala @@ -6,9 +6,7 @@ package scala.tools.nsc package doc -import java.io.File import java.net.URI -import java.lang.System import scala.language.postfixOps /** An extended version of compiler settings, with additional Scaladoc-specific options. diff --git a/src/compiler/scala/tools/nsc/doc/html/HtmlPage.scala b/src/compiler/scala/tools/nsc/doc/html/HtmlPage.scala index e8131e242b..c898348526 100644 --- a/src/compiler/scala/tools/nsc/doc/html/HtmlPage.scala +++ b/src/compiler/scala/tools/nsc/doc/html/HtmlPage.scala @@ -10,7 +10,7 @@ package html import model._ import comment._ -import scala.xml.{XML, NodeSeq} +import scala.xml.NodeSeq import scala.xml.dtd.{DocType, PublicID} import scala.collection._ import java.io.Writer diff --git a/src/compiler/scala/tools/nsc/doc/html/page/Index.scala b/src/compiler/scala/tools/nsc/doc/html/page/Index.scala index 86407fb9a3..daa9df690c 100644 --- a/src/compiler/scala/tools/nsc/doc/html/page/Index.scala +++ b/src/compiler/scala/tools/nsc/doc/html/page/Index.scala @@ -9,10 +9,8 @@ package html package page import model._ - import scala.collection._ import scala.xml._ -import scala.util.parsing.json.{JSONObject, JSONArray} class Index(universe: doc.Universe, val index: doc.Index) extends HtmlPage { diff --git a/src/compiler/scala/tools/nsc/doc/html/page/IndexScript.scala b/src/compiler/scala/tools/nsc/doc/html/page/IndexScript.scala index a205e02533..e3c94505ab 100644 --- a/src/compiler/scala/tools/nsc/doc/html/page/IndexScript.scala +++ b/src/compiler/scala/tools/nsc/doc/html/page/IndexScript.scala @@ -8,7 +8,6 @@ package scala.tools.nsc.doc.html.page import scala.tools.nsc.doc import scala.tools.nsc.doc.model.{Package, DocTemplateEntity} import scala.tools.nsc.doc.html.{Page, HtmlFactory} -import java.nio.channels.Channels import scala.util.parsing.json.{JSONObject, JSONArray} class IndexScript(universe: doc.Universe, index: doc.Index) extends Page { diff --git a/src/compiler/scala/tools/nsc/doc/html/page/Source.scala b/src/compiler/scala/tools/nsc/doc/html/page/Source.scala index 807a1bc11a..1d6404e1a4 100644 --- a/src/compiler/scala/tools/nsc/doc/html/page/Source.scala +++ b/src/compiler/scala/tools/nsc/doc/html/page/Source.scala @@ -8,8 +8,6 @@ package doc package html package page -import model._ -import comment._ import scala.xml.{NodeSeq, Unparsed} import java.io.File diff --git a/src/compiler/scala/tools/nsc/doc/html/page/Template.scala b/src/compiler/scala/tools/nsc/doc/html/page/Template.scala index 7d5566c897..3f40e2cd0a 100644 --- a/src/compiler/scala/tools/nsc/doc/html/page/Template.scala +++ b/src/compiler/scala/tools/nsc/doc/html/page/Template.scala @@ -8,10 +8,6 @@ package doc package html package page -import model._ -import model.diagram._ -import diagram._ - import scala.xml.{ NodeSeq, Text, UnprefixedAttribute } import scala.language.postfixOps diff --git a/src/compiler/scala/tools/nsc/doc/html/page/diagram/DotDiagramGenerator.scala b/src/compiler/scala/tools/nsc/doc/html/page/diagram/DotDiagramGenerator.scala index f4608bdb8e..df7c7d3dcd 100644 --- a/src/compiler/scala/tools/nsc/doc/html/page/diagram/DotDiagramGenerator.scala +++ b/src/compiler/scala/tools/nsc/doc/html/page/diagram/DotDiagramGenerator.scala @@ -10,7 +10,6 @@ package diagram import scala.xml.{NodeSeq, XML, PrefixedAttribute, Elem, MetaData, Null, UnprefixedAttribute} import scala.collection.immutable._ -import javax.xml.parsers.SAXParser import model._ import model.diagram._ diff --git a/src/compiler/scala/tools/nsc/doc/html/page/diagram/DotRunner.scala b/src/compiler/scala/tools/nsc/doc/html/page/diagram/DotRunner.scala index be7c27a4ae..2fa1bf62f3 100644 --- a/src/compiler/scala/tools/nsc/doc/html/page/diagram/DotRunner.scala +++ b/src/compiler/scala/tools/nsc/doc/html/page/diagram/DotRunner.scala @@ -10,12 +10,10 @@ import java.io.InputStreamReader import java.io.OutputStreamWriter import java.io.BufferedWriter import java.io.BufferedReader -import java.io.IOException import scala.sys.process._ import scala.concurrent.SyncVar import model._ -import model.diagram._ /** This class takes care of running the graphviz dot utility */ class DotRunner(settings: doc.Settings) { diff --git a/src/compiler/scala/tools/nsc/doc/model/LinkTo.scala b/src/compiler/scala/tools/nsc/doc/model/LinkTo.scala index 6c13d5a6d3..361837b743 100644 --- a/src/compiler/scala/tools/nsc/doc/model/LinkTo.scala +++ b/src/compiler/scala/tools/nsc/doc/model/LinkTo.scala @@ -6,8 +6,6 @@ package scala.tools.nsc package doc package model -import scala.collection._ - abstract sealed class LinkTo final case class LinkToTpl(tpl: DocTemplateEntity) extends LinkTo final case class LinkToMember(mbr: MemberEntity, inTpl: DocTemplateEntity) extends LinkTo diff --git a/src/compiler/scala/tools/nsc/doc/model/MemberLookup.scala b/src/compiler/scala/tools/nsc/doc/model/MemberLookup.scala index 2a28d4c589..4793716b9f 100644 --- a/src/compiler/scala/tools/nsc/doc/model/MemberLookup.scala +++ b/src/compiler/scala/tools/nsc/doc/model/MemberLookup.scala @@ -4,8 +4,6 @@ package model import comment._ -import scala.reflect.internal.util.FakePos //Position - /** This trait extracts all required information for documentation from compilation units */ trait MemberLookup { thisFactory: ModelFactory => diff --git a/src/compiler/scala/tools/nsc/doc/model/ModelFactoryImplicitSupport.scala b/src/compiler/scala/tools/nsc/doc/model/ModelFactoryImplicitSupport.scala index 73db25405d..c1ca8c1448 100644 --- a/src/compiler/scala/tools/nsc/doc/model/ModelFactoryImplicitSupport.scala +++ b/src/compiler/scala/tools/nsc/doc/model/ModelFactoryImplicitSupport.scala @@ -13,12 +13,7 @@ package model import comment._ import scala.collection._ -import scala.util.matching.Regex - import symtab.Flags -import io._ - -import model.{ RootPackage => RootPackageEntity } /** * This trait finds implicit conversions for a class in the default scope and creates scaladoc entries for each of them. @@ -58,7 +53,6 @@ trait ModelFactoryImplicitSupport { import global._ import global.analyzer._ import global.definitions._ - import rootMirror.{RootPackage, RootClass, EmptyPackage, EmptyPackageClass} import settings.hardcoded // debugging: diff --git a/src/compiler/scala/tools/nsc/doc/model/ModelFactoryTypeSupport.scala b/src/compiler/scala/tools/nsc/doc/model/ModelFactoryTypeSupport.scala index 38b3855b69..1876415f2a 100644 --- a/src/compiler/scala/tools/nsc/doc/model/ModelFactoryTypeSupport.scala +++ b/src/compiler/scala/tools/nsc/doc/model/ModelFactoryTypeSupport.scala @@ -9,13 +9,6 @@ import comment._ import diagram._ import scala.collection._ -import scala.util.matching.Regex - -import symtab.Flags - -import io._ - -import model.{ RootPackage => RootPackageEntity } /** This trait extracts all required information for documentation from compilation units */ trait ModelFactoryTypeSupport { @@ -28,7 +21,6 @@ trait ModelFactoryTypeSupport { import global._ import definitions.{ ObjectClass, NothingClass, AnyClass, AnyValClass, AnyRefClass } - import rootMirror.{ RootPackage, RootClass, EmptyPackage } protected val typeCache = new mutable.LinkedHashMap[Type, TypeEntity] diff --git a/src/compiler/scala/tools/nsc/doc/model/comment/Body.scala b/src/compiler/scala/tools/nsc/doc/model/comment/Body.scala index 3e5e634e18..8848af95eb 100644 --- a/src/compiler/scala/tools/nsc/doc/model/comment/Body.scala +++ b/src/compiler/scala/tools/nsc/doc/model/comment/Body.scala @@ -10,8 +10,6 @@ package comment import scala.collection._ -import java.net.URL - /** A body of text. A comment has a single body, which is composed of * at least one block. Inside every body is exactly one summary (see * [[scala.tools.nsc.doc.model.comment.Summary]]). */ diff --git a/src/compiler/scala/tools/nsc/doc/model/comment/CommentFactory.scala b/src/compiler/scala/tools/nsc/doc/model/comment/CommentFactory.scala index d22065f846..9617b15068 100644 --- a/src/compiler/scala/tools/nsc/doc/model/comment/CommentFactory.scala +++ b/src/compiler/scala/tools/nsc/doc/model/comment/CommentFactory.scala @@ -8,11 +8,9 @@ package doc package model package comment -import reporters.Reporter import scala.collection._ import scala.util.matching.Regex -import scala.annotation.switch -import scala.reflect.internal.util.{NoPosition, Position} +import scala.reflect.internal.util.Position import scala.language.postfixOps /** The comment parser transforms raw comment strings into `Comment` objects. diff --git a/src/compiler/scala/tools/nsc/doc/model/diagram/DiagramDirectiveParser.scala b/src/compiler/scala/tools/nsc/doc/model/diagram/DiagramDirectiveParser.scala index 7f8268c7c5..6a6c60fb3e 100644 --- a/src/compiler/scala/tools/nsc/doc/model/diagram/DiagramDirectiveParser.scala +++ b/src/compiler/scala/tools/nsc/doc/model/diagram/DiagramDirectiveParser.scala @@ -7,9 +7,6 @@ import comment.CommentFactory import java.util.regex.{Pattern, Matcher} import scala.util.matching.Regex -// statistics -import html.page.diagram.DiagramStats - /** * This trait takes care of parsing @{inheritance, content}Diagram annotations * diff --git a/src/compiler/scala/tools/nsc/doc/model/diagram/DiagramFactory.scala b/src/compiler/scala/tools/nsc/doc/model/diagram/DiagramFactory.scala index 78bff9d349..849a2ac4b3 100644 --- a/src/compiler/scala/tools/nsc/doc/model/diagram/DiagramFactory.scala +++ b/src/compiler/scala/tools/nsc/doc/model/diagram/DiagramFactory.scala @@ -4,7 +4,6 @@ package diagram import model._ import comment.CommentFactory -import scala.collection.mutable // statistics import html.page.diagram.DiagramStats diff --git a/src/compiler/scala/tools/nsc/interactive/BuildManager.scala b/src/compiler/scala/tools/nsc/interactive/BuildManager.scala index 3e7ac573e9..a3f76994bc 100644 --- a/src/compiler/scala/tools/nsc/interactive/BuildManager.scala +++ b/src/compiler/scala/tools/nsc/interactive/BuildManager.scala @@ -7,11 +7,6 @@ package scala.tools.nsc package interactive import scala.collection._ - -import scala.tools.nsc.reporters.{Reporter, ConsoleReporter} -import scala.reflect.internal.util.FakePos - -import dependencies._ import io.AbstractFile import scala.language.implicitConversions diff --git a/src/compiler/scala/tools/nsc/interactive/CompilerControl.scala b/src/compiler/scala/tools/nsc/interactive/CompilerControl.scala index b4af8f00d6..f3cd41f32f 100644 --- a/src/compiler/scala/tools/nsc/interactive/CompilerControl.scala +++ b/src/compiler/scala/tools/nsc/interactive/CompilerControl.scala @@ -7,8 +7,6 @@ package interactive import scala.util.control.ControlThrowable import scala.tools.nsc.io.AbstractFile -import scala.tools.nsc.symtab._ -import scala.tools.nsc.ast._ import scala.tools.nsc.util.FailedInterrupt import scala.tools.nsc.util.EmptyAction import scala.tools.nsc.util.WorkScheduler diff --git a/src/compiler/scala/tools/nsc/interactive/ContextTrees.scala b/src/compiler/scala/tools/nsc/interactive/ContextTrees.scala index b2568e34bd..93ef4c4d6c 100644 --- a/src/compiler/scala/tools/nsc/interactive/ContextTrees.scala +++ b/src/compiler/scala/tools/nsc/interactive/ContextTrees.scala @@ -6,7 +6,6 @@ package scala.tools.nsc package interactive import scala.collection.mutable.ArrayBuffer -import scala.reflect.internal.util.Position trait ContextTrees { self: Global => diff --git a/src/compiler/scala/tools/nsc/interactive/Global.scala b/src/compiler/scala/tools/nsc/interactive/Global.scala index c86bcae412..92673157e4 100644 --- a/src/compiler/scala/tools/nsc/interactive/Global.scala +++ b/src/compiler/scala/tools/nsc/interactive/Global.scala @@ -8,17 +8,13 @@ package interactive import java.io.{ PrintWriter, StringWriter, FileReader, FileWriter } import scala.collection.mutable import mutable.{LinkedHashMap, SynchronizedMap, HashSet, SynchronizedSet} -import scala.concurrent.SyncVar import scala.util.control.ControlThrowable import scala.tools.nsc.io.{ AbstractFile, LogReplay, Logger, NullLogger, Replayer } -import scala.tools.nsc.util.{ WorkScheduler, MultiHashMap } +import scala.tools.nsc.util.MultiHashMap import scala.reflect.internal.util.{ SourceFile, BatchSourceFile, Position, RangePosition, NoPosition } import scala.tools.nsc.reporters._ import scala.tools.nsc.symtab._ -import scala.tools.nsc.ast._ -import scala.tools.nsc.io.Pickler._ import scala.tools.nsc.typechecker.DivergentImplicit -import scala.annotation.tailrec import symtab.Flags.{ACCESSOR, PARAMACCESSOR} import scala.language.implicitConversions diff --git a/src/compiler/scala/tools/nsc/interactive/Picklers.scala b/src/compiler/scala/tools/nsc/interactive/Picklers.scala index ffad19fbaa..1dc891b984 100644 --- a/src/compiler/scala/tools/nsc/interactive/Picklers.scala +++ b/src/compiler/scala/tools/nsc/interactive/Picklers.scala @@ -6,12 +6,10 @@ package scala.tools.nsc package interactive import util.InterruptReq -import scala.reflect.internal.util.{SourceFile, BatchSourceFile} -import io.{AbstractFile, PlainFile} - +import scala.reflect.internal.util.{ SourceFile, BatchSourceFile } +import io.{ AbstractFile, PlainFile, Pickler, CondPickler } import util.EmptyAction -import scala.reflect.internal.util.{Position, RangePosition, NoPosition, OffsetPosition, TransparentPosition} -import io.{Pickler, CondPickler} +import scala.reflect.internal.util.{ RangePosition, OffsetPosition, TransparentPosition } import io.Pickler._ import scala.collection.mutable import mutable.ListBuffer diff --git a/src/compiler/scala/tools/nsc/interactive/REPL.scala b/src/compiler/scala/tools/nsc/interactive/REPL.scala index dacfa679dd..d1a29aeb07 100644 --- a/src/compiler/scala/tools/nsc/interactive/REPL.scala +++ b/src/compiler/scala/tools/nsc/interactive/REPL.scala @@ -5,15 +5,11 @@ package scala.tools.nsc package interactive -import scala.concurrent.SyncVar import scala.reflect.internal.util._ -import scala.tools.nsc.symtab._ -import scala.tools.nsc.ast._ import scala.tools.nsc.reporters._ import scala.tools.nsc.io._ import scala.tools.nsc.scratchpad.SourceInserter -import scala.tools.nsc.interpreter.AbstractFileClassLoader -import java.io.{File, FileWriter} +import java.io.FileWriter /** Interface of interactive compiler to a client such as an IDE */ diff --git a/src/compiler/scala/tools/nsc/interactive/RangePositions.scala b/src/compiler/scala/tools/nsc/interactive/RangePositions.scala index b95f1fa7ca..ecaa793da7 100644 --- a/src/compiler/scala/tools/nsc/interactive/RangePositions.scala +++ b/src/compiler/scala/tools/nsc/interactive/RangePositions.scala @@ -8,7 +8,6 @@ package interactive import ast.Trees import ast.Positions import scala.reflect.internal.util.{SourceFile, Position, RangePosition, NoPosition} -import scala.tools.nsc.util.WorkScheduler import scala.collection.mutable.ListBuffer /** Handling range positions @@ -60,7 +59,7 @@ self: scala.tools.nsc.Global => } // -------------- ensuring no overlaps ------------------------------- - + /** Ensure that given tree has no positions that overlap with * any of the positions of `others`. This is done by * shortening the range, assigning TransparentPositions diff --git a/src/compiler/scala/tools/nsc/interactive/RefinedBuildManager.scala b/src/compiler/scala/tools/nsc/interactive/RefinedBuildManager.scala index 3ccf482f0f..9873276f05 100644 --- a/src/compiler/scala/tools/nsc/interactive/RefinedBuildManager.scala +++ b/src/compiler/scala/tools/nsc/interactive/RefinedBuildManager.scala @@ -12,7 +12,6 @@ import scala.util.control.Breaks._ import scala.tools.nsc.symtab.Flags import dependencies._ -import scala.reflect.internal.util.FakePos import util.ClassPath import io.AbstractFile import scala.tools.util.PathResolver diff --git a/src/compiler/scala/tools/nsc/interactive/SimpleBuildManager.scala b/src/compiler/scala/tools/nsc/interactive/SimpleBuildManager.scala index 465dcaaf1c..ff25dac7ac 100644 --- a/src/compiler/scala/tools/nsc/interactive/SimpleBuildManager.scala +++ b/src/compiler/scala/tools/nsc/interactive/SimpleBuildManager.scala @@ -8,9 +8,6 @@ package interactive import scala.collection._ import scala.tools.nsc.reporters.{Reporter, ConsoleReporter} -import dependencies._ - -import scala.reflect.internal.util.FakePos import io.AbstractFile /** A simple build manager, using the default scalac dependency tracker. diff --git a/src/compiler/scala/tools/nsc/interactive/tests/InteractiveTest.scala b/src/compiler/scala/tools/nsc/interactive/tests/InteractiveTest.scala index 360e17acee..f2614bcc42 100644 --- a/src/compiler/scala/tools/nsc/interactive/tests/InteractiveTest.scala +++ b/src/compiler/scala/tools/nsc/interactive/tests/InteractiveTest.scala @@ -7,14 +7,6 @@ package interactive package tests import core._ - -import java.io.File.pathSeparatorChar -import java.io.File.separatorChar - -import scala.annotation.migration -import scala.reflect.internal.util.Position -import scala.reflect.internal.util.SourceFile - import scala.collection.mutable.ListBuffer /** A base class for writing interactive compiler tests. diff --git a/src/compiler/scala/tools/nsc/interactive/tests/InteractiveTestSettings.scala b/src/compiler/scala/tools/nsc/interactive/tests/InteractiveTestSettings.scala index 4d85ab9d88..ad5c61b2b0 100644 --- a/src/compiler/scala/tools/nsc/interactive/tests/InteractiveTestSettings.scala +++ b/src/compiler/scala/tools/nsc/interactive/tests/InteractiveTestSettings.scala @@ -25,7 +25,6 @@ trait InteractiveTestSettings extends TestSettings with PresentationCompilerInst * test. */ override protected def prepareSettings(settings: Settings) { - import java.io.File._ def adjustPaths(paths: settings.PathSetting*) { for (p <- paths if argsString.contains(p.name)) p.value = p.value.map { case '/' => separatorChar @@ -45,10 +44,10 @@ trait InteractiveTestSettings extends TestSettings with PresentationCompilerInst case _ => () } - // Make the --sourcepath path provided in the .flags file (if any) relative to the test's base directory + // Make the --sourcepath path provided in the .flags file (if any) relative to the test's base directory if(settings.sourcepath.isSetByUser) settings.sourcepath.value = (baseDir / Path(settings.sourcepath.value)).path - + adjustPaths(settings.bootclasspath, settings.classpath, settings.javabootclasspath, settings.sourcepath) } @@ -67,4 +66,4 @@ trait InteractiveTestSettings extends TestSettings with PresentationCompilerInst reporter.println("\targsString: %s".format(argsString)) super.printClassPath(reporter) } -} \ No newline at end of file +} diff --git a/src/compiler/scala/tools/nsc/interactive/tests/core/CoreTestDefs.scala b/src/compiler/scala/tools/nsc/interactive/tests/core/CoreTestDefs.scala index 704d014eb9..9085eb56e6 100644 --- a/src/compiler/scala/tools/nsc/interactive/tests/core/CoreTestDefs.scala +++ b/src/compiler/scala/tools/nsc/interactive/tests/core/CoreTestDefs.scala @@ -3,7 +3,6 @@ package interactive package tests.core import scala.reflect.internal.util.Position -import scala.tools.nsc.interactive.tests.core._ /** Set of core test definitions that are executed for each test run. */ private[tests] trait CoreTestDefs diff --git a/src/compiler/scala/tools/nsc/interactive/tests/core/PresentationCompilerInstance.scala b/src/compiler/scala/tools/nsc/interactive/tests/core/PresentationCompilerInstance.scala index 5c1837b3bf..b3f80168ff 100644 --- a/src/compiler/scala/tools/nsc/interactive/tests/core/PresentationCompilerInstance.scala +++ b/src/compiler/scala/tools/nsc/interactive/tests/core/PresentationCompilerInstance.scala @@ -3,7 +3,6 @@ package interactive package tests.core import reporters.{Reporter => CompilerReporter} -import scala.reflect.internal.util.Position /** Trait encapsulating the creation of a presentation compiler's instance.*/ private[tests] trait PresentationCompilerInstance extends TestSettings { @@ -28,4 +27,4 @@ private[tests] trait PresentationCompilerInstance extends TestSettings { reporter.println("\tbootClassPath: %s".format(settings.bootclasspath.value)) reporter.println("\tverbose: %b".format(settings.verbose.value)) } -} \ No newline at end of file +} diff --git a/src/compiler/scala/tools/nsc/interactive/tests/core/PresentationCompilerTestDef.scala b/src/compiler/scala/tools/nsc/interactive/tests/core/PresentationCompilerTestDef.scala index 9cf2aa4fe4..4d5b4e1129 100644 --- a/src/compiler/scala/tools/nsc/interactive/tests/core/PresentationCompilerTestDef.scala +++ b/src/compiler/scala/tools/nsc/interactive/tests/core/PresentationCompilerTestDef.scala @@ -1,6 +1,5 @@ package scala.tools.nsc.interactive.tests.core -import scala.tools.nsc.interactive.Global import scala.reflect.internal.util.Position trait PresentationCompilerTestDef { @@ -16,4 +15,4 @@ trait PresentationCompilerTestDef { protected def format(pos: Position): String = (if(pos.isDefined) "(%d,%d)".format(pos.line, pos.column) else "") -} \ No newline at end of file +} diff --git a/src/compiler/scala/tools/nsc/interactive/tests/core/SourcesCollector.scala b/src/compiler/scala/tools/nsc/interactive/tests/core/SourcesCollector.scala index 471a05a44d..676feeba8a 100644 --- a/src/compiler/scala/tools/nsc/interactive/tests/core/SourcesCollector.scala +++ b/src/compiler/scala/tools/nsc/interactive/tests/core/SourcesCollector.scala @@ -4,7 +4,6 @@ import scala.reflect.internal.util.{SourceFile,BatchSourceFile} import scala.tools.nsc.io.{AbstractFile,Path} private[tests] object SourcesCollector { - import Path._ type SourceFilter = Path => Boolean /** diff --git a/src/compiler/scala/tools/nsc/interpreter/AbstractFileClassLoader.scala b/src/compiler/scala/tools/nsc/interpreter/AbstractFileClassLoader.scala index 638bca8a72..fcb485defd 100644 --- a/src/compiler/scala/tools/nsc/interpreter/AbstractFileClassLoader.scala +++ b/src/compiler/scala/tools/nsc/interpreter/AbstractFileClassLoader.scala @@ -5,7 +5,7 @@ package scala.tools.nsc package interpreter -import scala.tools.nsc.io.{ File, AbstractFile } +import scala.tools.nsc.io.AbstractFile import util.ScalaClassLoader import java.net.URL import scala.collection.{ mutable, immutable } diff --git a/src/compiler/scala/tools/nsc/interpreter/ByteCode.scala b/src/compiler/scala/tools/nsc/interpreter/ByteCode.scala index 40e9d3d600..014661e525 100644 --- a/src/compiler/scala/tools/nsc/interpreter/ByteCode.scala +++ b/src/compiler/scala/tools/nsc/interpreter/ByteCode.scala @@ -7,7 +7,6 @@ package scala.tools.nsc package interpreter import java.lang.reflect -import java.util.concurrent.ConcurrentHashMap import util.ScalaClassLoader import ScalaClassLoader.appLoader import scala.reflect.NameTransformer._ diff --git a/src/compiler/scala/tools/nsc/interpreter/CompletionAware.scala b/src/compiler/scala/tools/nsc/interpreter/CompletionAware.scala index ab96f415db..3a0b48ef57 100644 --- a/src/compiler/scala/tools/nsc/interpreter/CompletionAware.scala +++ b/src/compiler/scala/tools/nsc/interpreter/CompletionAware.scala @@ -6,8 +6,6 @@ package scala.tools.nsc package interpreter -import scala.reflect.NameTransformer - /** An interface for objects which are aware of tab completion and * will supply their own candidates and resolve their own paths. */ diff --git a/src/compiler/scala/tools/nsc/interpreter/ConsoleReaderHelper.scala b/src/compiler/scala/tools/nsc/interpreter/ConsoleReaderHelper.scala index 07e36f4f27..b5850d901c 100644 --- a/src/compiler/scala/tools/nsc/interpreter/ConsoleReaderHelper.scala +++ b/src/compiler/scala/tools/nsc/interpreter/ConsoleReaderHelper.scala @@ -7,8 +7,6 @@ package scala.tools.nsc package interpreter import scala.tools.jline.console.{ ConsoleReader, CursorBuffer } -import scala.tools.jline.console.completer.CompletionHandler -import Completion._ trait ConsoleReaderHelper extends ConsoleReader { def currentLine = "" + getCursorBuffer.buffer diff --git a/src/compiler/scala/tools/nsc/interpreter/ExprTyper.scala b/src/compiler/scala/tools/nsc/interpreter/ExprTyper.scala index bb8c50d6fd..4b084c71a9 100644 --- a/src/compiler/scala/tools/nsc/interpreter/ExprTyper.scala +++ b/src/compiler/scala/tools/nsc/interpreter/ExprTyper.scala @@ -6,7 +6,6 @@ package scala.tools.nsc package interpreter -import scala.reflect.internal.util.BatchSourceFile import scala.tools.nsc.ast.parser.Tokens.EOF trait ExprTyper { @@ -15,7 +14,7 @@ trait ExprTyper { import repl._ import global.{ reporter => _, Import => _, _ } import definitions._ - import syntaxAnalyzer.{ UnitParser, UnitScanner, token2name } + import syntaxAnalyzer.{ UnitParser, UnitScanner } import naming.freshInternalVarName object codeParser extends { val global: repl.global.type = repl.global } with CodeHandlers[Tree] { diff --git a/src/compiler/scala/tools/nsc/interpreter/ILoop.scala b/src/compiler/scala/tools/nsc/interpreter/ILoop.scala index fb3578713a..350cc364ab 100644 --- a/src/compiler/scala/tools/nsc/interpreter/ILoop.scala +++ b/src/compiler/scala/tools/nsc/interpreter/ILoop.scala @@ -8,23 +8,17 @@ package interpreter import Predef.{ println => _, _ } import java.io.{ BufferedReader, FileReader } -import java.util.concurrent.locks.ReentrantLock -import scala.sys.process.Process import session._ import scala.util.Properties.{ jdkHome, javaVersion } import scala.tools.util.{ Javap } -import scala.annotation.tailrec -import scala.collection.mutable.ListBuffer -import scala.concurrent.ops import util.{ ClassPath, Exceptional, stringFromWriter, stringFromStream } -import interpreter._ import io.{ File, Directory } import scala.reflect.NameTransformer._ import util.ScalaClassLoader import ScalaClassLoader._ import scala.tools.util._ import scala.language.{implicitConversions, existentials} -import scala.reflect.{ClassTag, classTag} +import scala.reflect.classTag import scala.tools.reflect.StdRuntimeTags._ /** The Scala interactive shell. It provides a read-eval-print loop @@ -791,9 +785,6 @@ class ILoop(in0: Option[BufferedReader], protected val out: JPrintWriter) // we can get at it in generated code. addThunk(intp.quietBind(NamedParam[IMain]("$intp", intp)(tagOfIMain, classTag[IMain]))) addThunk({ - import scala.tools.nsc.io._ - import Properties.userHome - import scala.compat.Platform.EOL val autorun = replProps.replAutorunCode.option flatMap (f => io.File(f).safeSlurp()) if (autorun.isDefined) intp.quietRun(autorun.get) }) diff --git a/src/compiler/scala/tools/nsc/interpreter/ILoopInit.scala b/src/compiler/scala/tools/nsc/interpreter/ILoopInit.scala index e3c0494fa3..b6c0f42abe 100644 --- a/src/compiler/scala/tools/nsc/interpreter/ILoopInit.scala +++ b/src/compiler/scala/tools/nsc/interpreter/ILoopInit.scala @@ -6,8 +6,6 @@ package scala.tools.nsc package interpreter -import scala.reflect.internal.util.Position -import scala.util.control.Exception.ignoring import scala.tools.nsc.util.stackTraceString /** diff --git a/src/compiler/scala/tools/nsc/interpreter/IMain.scala b/src/compiler/scala/tools/nsc/interpreter/IMain.scala index 985d9677ac..db27531595 100644 --- a/src/compiler/scala/tools/nsc/interpreter/IMain.scala +++ b/src/compiler/scala/tools/nsc/interpreter/IMain.scala @@ -11,21 +11,15 @@ import util.stringFromWriter import scala.reflect.internal.util._ import java.net.URL import scala.sys.BooleanProp -import io.VirtualDirectory import scala.tools.nsc.io.AbstractFile import reporters._ -import symtab.Flags -import scala.reflect.internal.Names import scala.tools.util.PathResolver import scala.tools.nsc.util.ScalaClassLoader import ScalaClassLoader.URLClassLoader import scala.tools.nsc.util.Exceptional.unwrap import scala.collection.{ mutable, immutable } -import scala.util.control.Exception.{ ultimately } import IMain._ import java.util.concurrent.Future -import typechecker.Analyzer -import scala.language.implicitConversions import scala.reflect.runtime.{ universe => ru } import scala.reflect.{ ClassTag, classTag } import scala.tools.reflect.StdRuntimeTags._ @@ -179,7 +173,7 @@ class IMain(initialSettings: Settings, protected val out: JPrintWriter) extends } import global._ - import definitions.{ScalaPackage, JavaLangPackage, termMember, typeMember} + import definitions.{ termMember, typeMember } import rootMirror.{RootClass, getClassIfDefined, getModuleIfDefined, getRequiredModule, getRequiredClass} implicit class ReplTypeOps(tp: Type) { diff --git a/src/compiler/scala/tools/nsc/interpreter/InteractiveReader.scala b/src/compiler/scala/tools/nsc/interpreter/InteractiveReader.scala index 8331fddca6..6513381d77 100644 --- a/src/compiler/scala/tools/nsc/interpreter/InteractiveReader.scala +++ b/src/compiler/scala/tools/nsc/interpreter/InteractiveReader.scala @@ -7,8 +7,6 @@ package scala.tools.nsc package interpreter import java.io.IOException -import java.nio.channels.ClosedByInterruptException -import scala.util.control.Exception._ import session.History import InteractiveReader._ import Properties.isMac diff --git a/src/compiler/scala/tools/nsc/interpreter/JLineCompletion.scala b/src/compiler/scala/tools/nsc/interpreter/JLineCompletion.scala index bb19a4b48e..5ee5e5526d 100644 --- a/src/compiler/scala/tools/nsc/interpreter/JLineCompletion.scala +++ b/src/compiler/scala/tools/nsc/interpreter/JLineCompletion.scala @@ -6,8 +6,6 @@ package scala.tools.nsc package interpreter -import scala.tools.jline._ -import scala.tools.jline.console.completer._ import Completion._ import scala.collection.mutable.ListBuffer import scala.reflect.internal.util.StringOps.longestCommonPrefix diff --git a/src/compiler/scala/tools/nsc/interpreter/JLineReader.scala b/src/compiler/scala/tools/nsc/interpreter/JLineReader.scala index 10f972452f..e033bab03b 100644 --- a/src/compiler/scala/tools/nsc/interpreter/JLineReader.scala +++ b/src/compiler/scala/tools/nsc/interpreter/JLineReader.scala @@ -9,9 +9,7 @@ package interpreter import scala.tools.jline.console.ConsoleReader import scala.tools.jline.console.completer._ import session._ -import scala.collection.JavaConverters._ import Completion._ -import io.Streamable.slurp /** * Reads from the console using JLine. diff --git a/src/compiler/scala/tools/nsc/interpreter/MemberHandlers.scala b/src/compiler/scala/tools/nsc/interpreter/MemberHandlers.scala index c3720db1b4..6348e428f8 100644 --- a/src/compiler/scala/tools/nsc/interpreter/MemberHandlers.scala +++ b/src/compiler/scala/tools/nsc/interpreter/MemberHandlers.scala @@ -7,8 +7,6 @@ package scala.tools.nsc package interpreter import scala.collection.{ mutable, immutable } -import scala.PartialFunction.cond -import scala.reflect.internal.Chars import scala.reflect.internal.Flags._ import scala.language.implicitConversions diff --git a/src/compiler/scala/tools/nsc/interpreter/Parsed.scala b/src/compiler/scala/tools/nsc/interpreter/Parsed.scala index b0be956df8..24c01e9ae6 100644 --- a/src/compiler/scala/tools/nsc/interpreter/Parsed.scala +++ b/src/compiler/scala/tools/nsc/interpreter/Parsed.scala @@ -6,7 +6,6 @@ package scala.tools.nsc package interpreter -import scala.tools.jline.console.completer.ArgumentCompleter.{ ArgumentDelimiter, ArgumentList } import util.returning /** One instance of a command buffer. diff --git a/src/compiler/scala/tools/nsc/interpreter/Phased.scala b/src/compiler/scala/tools/nsc/interpreter/Phased.scala index f60dc79a04..e6b780f177 100644 --- a/src/compiler/scala/tools/nsc/interpreter/Phased.scala +++ b/src/compiler/scala/tools/nsc/interpreter/Phased.scala @@ -6,7 +6,7 @@ package scala.tools.nsc package interpreter -import scala.collection.{ mutable, immutable } +import scala.collection.immutable import scala.language.implicitConversions /** Mix this into an object and use it as a phasing diff --git a/src/compiler/scala/tools/nsc/interpreter/Power.scala b/src/compiler/scala/tools/nsc/interpreter/Power.scala index af1cbd24eb..0af295c8af 100644 --- a/src/compiler/scala/tools/nsc/interpreter/Power.scala +++ b/src/compiler/scala/tools/nsc/interpreter/Power.scala @@ -8,8 +8,6 @@ package interpreter import scala.collection.{ mutable, immutable } import scala.util.matching.Regex -import scala.reflect.internal.util.{ BatchSourceFile } -import session.{ History } import scala.io.Codec import java.net.{ URL, MalformedURLException } import io.{ Path } @@ -48,7 +46,6 @@ class Power[ReplValsImpl <: ReplVals : ru.TypeTag: ClassTag](val intp: IMain, re import intp.{ beQuietDuring, typeOfExpression, interpret, parse } import intp.global._ import definitions.{ compilerTypeFromTag, compilerSymbolFromTag} - import rootMirror.{ getClassIfDefined, getModuleIfDefined } abstract class SymSlurper { def isKeep(sym: Symbol): Boolean @@ -283,8 +280,6 @@ class Power[ReplValsImpl <: ReplVals : ru.TypeTag: ClassTag](val intp: IMain, re abstract class PrettifierClass[T: Prettifier]() { val pretty = implicitly[Prettifier[T]] - import pretty._ - def value: Seq[T] def pp(f: Seq[T] => Seq[T]): Unit = diff --git a/src/compiler/scala/tools/nsc/interpreter/ReplGlobal.scala b/src/compiler/scala/tools/nsc/interpreter/ReplGlobal.scala index 7c698a2f3e..16b22869e7 100644 --- a/src/compiler/scala/tools/nsc/interpreter/ReplGlobal.scala +++ b/src/compiler/scala/tools/nsc/interpreter/ReplGlobal.scala @@ -6,7 +6,6 @@ package scala.tools.nsc package interpreter -import reporters._ import typechecker.Analyzer /** A layer on top of Global so I can guarantee some extra diff --git a/src/compiler/scala/tools/nsc/interpreter/ReplStrings.scala b/src/compiler/scala/tools/nsc/interpreter/ReplStrings.scala index f8ecc6c6fe..670bbf9bae 100644 --- a/src/compiler/scala/tools/nsc/interpreter/ReplStrings.scala +++ b/src/compiler/scala/tools/nsc/interpreter/ReplStrings.scala @@ -6,8 +6,6 @@ package scala.tools.nsc package interpreter -import scala.collection.{ mutable, immutable } -import scala.PartialFunction.cond import scala.reflect.internal.Chars trait ReplStrings { diff --git a/src/compiler/scala/tools/nsc/interpreter/ReplVals.scala b/src/compiler/scala/tools/nsc/interpreter/ReplVals.scala index 53478bdc5d..ea100b25f2 100644 --- a/src/compiler/scala/tools/nsc/interpreter/ReplVals.scala +++ b/src/compiler/scala/tools/nsc/interpreter/ReplVals.scala @@ -57,7 +57,6 @@ object ReplVals { */ def mkCompilerTypeFromTag[T <: Global](global: T) = { import global._ - import definitions._ /** We can't use definitions.compilerTypeFromTag directly because we're passing * it to map and the compiler refuses to perform eta expansion on a method diff --git a/src/compiler/scala/tools/nsc/interpreter/TypeStrings.scala b/src/compiler/scala/tools/nsc/interpreter/TypeStrings.scala index 558eba8d42..9fb79a9d6f 100644 --- a/src/compiler/scala/tools/nsc/interpreter/TypeStrings.scala +++ b/src/compiler/scala/tools/nsc/interpreter/TypeStrings.scala @@ -13,15 +13,12 @@ import NameTransformer._ import scala.reflect.runtime.{universe => ru} import scala.reflect.{ClassTag, classTag} import typechecker.DestructureTypes -import scala.reflect.internal.util.StringOps.ojoin -import scala.language.implicitConversions /** A more principled system for turning types into strings. */ trait StructuredTypeStrings extends DestructureTypes { val global: Global import global._ - import definitions._ case class LabelAndType(label: String, typeName: String) { } object LabelAndType { @@ -48,7 +45,6 @@ trait StructuredTypeStrings extends DestructureTypes { l1 +: l2 :+ l3 mkString "\n" } private def maybeBlock(level: Int, grouping: Grouping)(name: String, nodes: List[TypeNode]): String = { - import grouping._ val threshold = 70 val try1 = str(level)(name + grouping.join(nodes map (_.show(0, grouping.labels)): _*)) diff --git a/src/compiler/scala/tools/nsc/io/Jar.scala b/src/compiler/scala/tools/nsc/io/Jar.scala index e919621338..ef2c9b13c0 100644 --- a/src/compiler/scala/tools/nsc/io/Jar.scala +++ b/src/compiler/scala/tools/nsc/io/Jar.scala @@ -10,7 +10,6 @@ import java.io.{ InputStream, OutputStream, IOException, FileNotFoundException, import java.util.jar._ import scala.collection.JavaConverters._ import Attributes.Name -import util.ClassPath import scala.language.implicitConversions // Attributes.Name instances: diff --git a/src/compiler/scala/tools/nsc/io/Lexer.scala b/src/compiler/scala/tools/nsc/io/Lexer.scala index 5ffb5b4d4f..e843f8d5ce 100644 --- a/src/compiler/scala/tools/nsc/io/Lexer.scala +++ b/src/compiler/scala/tools/nsc/io/Lexer.scala @@ -1,8 +1,6 @@ package scala.tools.nsc.io -import java.io.{Reader, Writer, StringReader, StringWriter} -import scala.collection.mutable.{Buffer, ArrayBuffer} -import scala.math.BigInt +import java.io.Reader /** Companion object of class `Lexer` which defines tokens and some utility concepts * used for tokens and lexers diff --git a/src/compiler/scala/tools/nsc/io/Pickler.scala b/src/compiler/scala/tools/nsc/io/Pickler.scala index b03a921e87..56ff4a57ee 100644 --- a/src/compiler/scala/tools/nsc/io/Pickler.scala +++ b/src/compiler/scala/tools/nsc/io/Pickler.scala @@ -1,6 +1,5 @@ package scala.tools.nsc.io -import scala.annotation.unchecked import Lexer._ import java.io.Writer import scala.language.implicitConversions diff --git a/src/compiler/scala/tools/nsc/io/Replayer.scala b/src/compiler/scala/tools/nsc/io/Replayer.scala index 5cb61b6cb1..e3dc8939a3 100644 --- a/src/compiler/scala/tools/nsc/io/Replayer.scala +++ b/src/compiler/scala/tools/nsc/io/Replayer.scala @@ -3,7 +3,7 @@ package scala.tools.nsc.io import java.io.{Reader, Writer} import Pickler._ -import Lexer.{Token, EOF} +import Lexer.EOF abstract class LogReplay { def logreplay(event: String, x: => Boolean): Boolean diff --git a/src/compiler/scala/tools/nsc/io/SourceReader.scala b/src/compiler/scala/tools/nsc/io/SourceReader.scala index 569270f530..af745eb3e8 100644 --- a/src/compiler/scala/tools/nsc/io/SourceReader.scala +++ b/src/compiler/scala/tools/nsc/io/SourceReader.scala @@ -9,7 +9,7 @@ package io import java.io.{ FileInputStream, InputStream, IOException } import java.nio.{ByteBuffer, CharBuffer} -import java.nio.channels.{FileChannel, ReadableByteChannel, Channels} +import java.nio.channels.{ ReadableByteChannel, Channels } import java.nio.charset.{CharsetDecoder, CoderResult} import scala.tools.nsc.reporters._ diff --git a/src/compiler/scala/tools/nsc/io/package.scala b/src/compiler/scala/tools/nsc/io/package.scala index 711696bb6e..c29a7c96df 100644 --- a/src/compiler/scala/tools/nsc/io/package.scala +++ b/src/compiler/scala/tools/nsc/io/package.scala @@ -7,7 +7,6 @@ package scala.tools.nsc import java.util.concurrent.{ Future, Callable } import java.util.{ Timer, TimerTask } -import java.util.jar.{ Attributes } import scala.language.implicitConversions package object io { @@ -27,7 +26,7 @@ package object io { type VirtualFile = scala.reflect.io.VirtualFile val ZipArchive = scala.reflect.io.ZipArchive type ZipArchive = scala.reflect.io.ZipArchive - + implicit def postfixOps = scala.language.postfixOps // make all postfix ops in this package compile without warning type JManifest = java.util.jar.Manifest diff --git a/src/compiler/scala/tools/nsc/matching/MatchSupport.scala b/src/compiler/scala/tools/nsc/matching/MatchSupport.scala index 5ca9fd5062..5ce1aabcd8 100644 --- a/src/compiler/scala/tools/nsc/matching/MatchSupport.scala +++ b/src/compiler/scala/tools/nsc/matching/MatchSupport.scala @@ -6,9 +6,6 @@ package scala.tools.nsc package matching -import transform.ExplicitOuter -import ast.{ Printers, Trees } -import java.io.{ StringWriter, PrintWriter } import scala.annotation.elidable import scala.language.postfixOps diff --git a/src/compiler/scala/tools/nsc/matching/MatrixAdditions.scala b/src/compiler/scala/tools/nsc/matching/MatrixAdditions.scala index 7220253003..b1ca6e7b5a 100644 --- a/src/compiler/scala/tools/nsc/matching/MatrixAdditions.scala +++ b/src/compiler/scala/tools/nsc/matching/MatrixAdditions.scala @@ -7,7 +7,6 @@ package scala.tools.nsc package matching import transform.ExplicitOuter -import PartialFunction._ /** Traits which are mixed into MatchMatrix, but separated out as * (somewhat) independent components to keep them on the sidelines. @@ -17,7 +16,6 @@ trait MatrixAdditions extends ast.TreeDSL { import global.{ typer => _, _ } import symtab.Flags - import CODE._ import Debug._ import treeInfo._ import definitions.{ isPrimitiveValueClass } @@ -190,4 +188,4 @@ trait MatrixAdditions extends ast.TreeDSL { } } } -} \ No newline at end of file +} diff --git a/src/compiler/scala/tools/nsc/matching/ParallelMatching.scala b/src/compiler/scala/tools/nsc/matching/ParallelMatching.scala index 9d01e73063..ea4d9cd3f4 100644 --- a/src/compiler/scala/tools/nsc/matching/ParallelMatching.scala +++ b/src/compiler/scala/tools/nsc/matching/ParallelMatching.scala @@ -9,11 +9,8 @@ package matching import PartialFunction._ import scala.collection.{ mutable } -import scala.reflect.internal.util.Position import transform.ExplicitOuter -import symtab.Flags import mutable.ListBuffer -import scala.annotation.elidable import scala.language.postfixOps trait ParallelMatching extends ast.TreeDSL @@ -26,7 +23,7 @@ trait ParallelMatching extends ast.TreeDSL import global.{ typer => _, _ } import definitions.{ - AnyRefClass, IntClass, BooleanClass, SomeClass, OptionClass, + IntClass, BooleanClass, SomeClass, OptionClass, getProductArgs, productProj, Object_eq, Any_asInstanceOf } import CODE._ diff --git a/src/compiler/scala/tools/nsc/matching/PatternBindings.scala b/src/compiler/scala/tools/nsc/matching/PatternBindings.scala index 5d3b4027de..3ff5ce83bb 100644 --- a/src/compiler/scala/tools/nsc/matching/PatternBindings.scala +++ b/src/compiler/scala/tools/nsc/matching/PatternBindings.scala @@ -7,7 +7,6 @@ package scala.tools.nsc package matching import transform.ExplicitOuter -import PartialFunction._ import scala.language.postfixOps trait PatternBindings extends ast.TreeDSL @@ -17,7 +16,6 @@ trait PatternBindings extends ast.TreeDSL import global.{ typer => _, _ } import definitions.{ EqualsPatternClass } import CODE._ - import Debug._ /** EqualsPattern **/ def isEquals(tpe: Type) = tpe.typeSymbol == EqualsPatternClass diff --git a/src/compiler/scala/tools/nsc/matching/Patterns.scala b/src/compiler/scala/tools/nsc/matching/Patterns.scala index 48c78ef9e0..e92c43f1fd 100644 --- a/src/compiler/scala/tools/nsc/matching/Patterns.scala +++ b/src/compiler/scala/tools/nsc/matching/Patterns.scala @@ -6,7 +6,6 @@ package scala.tools.nsc package matching -import symtab.Flags import PartialFunction._ /** Patterns are wrappers for Trees with enhanced semantics. diff --git a/src/compiler/scala/tools/nsc/plugins/Plugin.scala b/src/compiler/scala/tools/nsc/plugins/Plugin.scala index 2050ce7ffd..6c64ea907f 100644 --- a/src/compiler/scala/tools/nsc/plugins/Plugin.scala +++ b/src/compiler/scala/tools/nsc/plugins/Plugin.scala @@ -6,13 +6,10 @@ package scala.tools.nsc package plugins -import io.{ File, Path, Jar } +import io.{ Path, Jar } import java.net.URLClassLoader import java.util.jar.JarFile import java.util.zip.ZipException - -import scala.collection.mutable -import mutable.ListBuffer import scala.xml.XML /** Information about a plugin loaded from a jar file. diff --git a/src/compiler/scala/tools/nsc/reporters/Reporter.scala b/src/compiler/scala/tools/nsc/reporters/Reporter.scala index c5321dd728..cddbd62994 100644 --- a/src/compiler/scala/tools/nsc/reporters/Reporter.scala +++ b/src/compiler/scala/tools/nsc/reporters/Reporter.scala @@ -7,7 +7,6 @@ package scala.tools.nsc package reporters import scala.reflect.internal.util._ -import scala.reflect.internal.util.StringOps._ /** * This interface provides methods to issue information, warning and diff --git a/src/compiler/scala/tools/nsc/scratchpad/Mixer.scala b/src/compiler/scala/tools/nsc/scratchpad/Mixer.scala index 10e9982594..3aecc06b1e 100644 --- a/src/compiler/scala/tools/nsc/scratchpad/Mixer.scala +++ b/src/compiler/scala/tools/nsc/scratchpad/Mixer.scala @@ -2,9 +2,6 @@ package scala.tools.nsc.scratchpad import java.io.{FileInputStream, InputStreamReader, IOException} -import scala.runtime.ScalaRunTime.stringOf -import java.lang.reflect.InvocationTargetException -import scala.reflect.runtime.ReflectionUtils._ import scala.collection.mutable.ArrayBuffer @deprecated("SI-6458: Instrumentation logic will be moved out of the compiler.","2.10.0") diff --git a/src/compiler/scala/tools/nsc/scratchpad/SourceInserter.scala b/src/compiler/scala/tools/nsc/scratchpad/SourceInserter.scala index 01dccd7521..61c1717fea 100644 --- a/src/compiler/scala/tools/nsc/scratchpad/SourceInserter.scala +++ b/src/compiler/scala/tools/nsc/scratchpad/SourceInserter.scala @@ -1,8 +1,6 @@ package scala.tools.nsc package scratchpad -import java.io.Writer -import scala.reflect.internal.util.SourceFile import scala.reflect.internal.Chars._ @deprecated("SI-6458: Instrumentation logic will be moved out of the compiler.","2.10.0") diff --git a/src/compiler/scala/tools/nsc/settings/FscSettings.scala b/src/compiler/scala/tools/nsc/settings/FscSettings.scala index 06ebc20d3e..14b398e50a 100644 --- a/src/compiler/scala/tools/nsc/settings/FscSettings.scala +++ b/src/compiler/scala/tools/nsc/settings/FscSettings.scala @@ -8,7 +8,7 @@ package nsc package settings import util.ClassPath -import io.{ Directory, Path, AbstractFile } +import io.{ Path, AbstractFile } class FscSettings(error: String => Unit) extends Settings(error) { outer => diff --git a/src/compiler/scala/tools/nsc/settings/MutableSettings.scala b/src/compiler/scala/tools/nsc/settings/MutableSettings.scala index f1f289ed4d..7eae2295f6 100644 --- a/src/compiler/scala/tools/nsc/settings/MutableSettings.scala +++ b/src/compiler/scala/tools/nsc/settings/MutableSettings.scala @@ -10,7 +10,6 @@ package settings import io.{ AbstractFile, Jar, Path, PlainFile, VirtualDirectory } import scala.reflect.internal.util.StringOps -import scala.collection.mutable.ListBuffer import scala.io.Source import scala.reflect.{ ClassTag, classTag } diff --git a/src/compiler/scala/tools/nsc/symtab/BrowsingLoaders.scala b/src/compiler/scala/tools/nsc/symtab/BrowsingLoaders.scala index f2aab36b51..67fa908ee3 100644 --- a/src/compiler/scala/tools/nsc/symtab/BrowsingLoaders.scala +++ b/src/compiler/scala/tools/nsc/symtab/BrowsingLoaders.scala @@ -6,7 +6,6 @@ package scala.tools.nsc package symtab -import scala.reflect.internal.util.BatchSourceFile import scala.tools.nsc.io.AbstractFile /** A subclass of SymbolLoaders that implements browsing behavior. diff --git a/src/compiler/scala/tools/nsc/symtab/SymbolLoaders.scala b/src/compiler/scala/tools/nsc/symtab/SymbolLoaders.scala index b670cc93ae..2377c3979d 100644 --- a/src/compiler/scala/tools/nsc/symtab/SymbolLoaders.scala +++ b/src/compiler/scala/tools/nsc/symtab/SymbolLoaders.scala @@ -10,7 +10,6 @@ import java.io.IOException import scala.compat.Platform.currentTime import scala.tools.nsc.util.{ ClassPath } import classfile.ClassfileParser -import scala.reflect.internal.Flags._ import scala.reflect.internal.MissingRequirementError import scala.reflect.internal.util.Statistics import scala.tools.nsc.io.{ AbstractFile, MsilFile } diff --git a/src/compiler/scala/tools/nsc/symtab/SymbolTrackers.scala b/src/compiler/scala/tools/nsc/symtab/SymbolTrackers.scala index 62bd16139e..035244e421 100644 --- a/src/compiler/scala/tools/nsc/symtab/SymbolTrackers.scala +++ b/src/compiler/scala/tools/nsc/symtab/SymbolTrackers.scala @@ -6,7 +6,6 @@ package scala.tools.nsc package symtab -import scala.collection.{ mutable, immutable } import scala.language.implicitConversions import scala.language.postfixOps diff --git a/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala b/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala index bdb4000d16..b254ded8fe 100644 --- a/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala +++ b/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala @@ -23,7 +23,6 @@ import scala.tools.nsc.io.AbstractFile abstract class ClassfileParser { val global: Global import global._ - import definitions.{ AnnotationClass, ClassfileAnnotationClass } import scala.reflect.internal.ClassfileConstants._ import Flags._ @@ -1169,7 +1168,7 @@ abstract class ClassfileParser { originalName + " in " + outerName + "(" + externalName +")" } - object innerClasses extends scala.collection.mutable.HashMap[Name, InnerClassEntry] { + object innerClasses extends mutable.HashMap[Name, InnerClassEntry] { /** Return the Symbol of the top level class enclosing `name`, * or 'name's symbol if no entry found for `name`. */ diff --git a/src/compiler/scala/tools/nsc/symtab/classfile/ICodeReader.scala b/src/compiler/scala/tools/nsc/symtab/classfile/ICodeReader.scala index d2bb6ebe4c..fafeb12146 100644 --- a/src/compiler/scala/tools/nsc/symtab/classfile/ICodeReader.scala +++ b/src/compiler/scala/tools/nsc/symtab/classfile/ICodeReader.scala @@ -9,9 +9,7 @@ package classfile import scala.collection.{ mutable, immutable } import mutable.ListBuffer -import backend.icode._ import ClassfileConstants._ -import scala.reflect.internal.Flags._ /** ICode reader from Java bytecode. * @@ -716,7 +714,6 @@ abstract class ICodeReader extends ClassfileParser { val tfa = new analysis.MethodTFA() { import analysis._ - import analysis.typeFlowLattice.IState /** Abstract interpretation for one instruction. */ override def mutatingInterpret(out: typeFlowLattice.Elem, i: Instruction): typeFlowLattice.Elem = { diff --git a/src/compiler/scala/tools/nsc/symtab/clr/CLRTypes.scala b/src/compiler/scala/tools/nsc/symtab/clr/CLRTypes.scala index 40189b9444..624db027f1 100644 --- a/src/compiler/scala/tools/nsc/symtab/clr/CLRTypes.scala +++ b/src/compiler/scala/tools/nsc/symtab/clr/CLRTypes.scala @@ -7,12 +7,8 @@ package scala.tools.nsc package symtab package clr -import java.io.File -import java.util.{Comparator, StringTokenizer} -import scala.util.Sorting import ch.epfl.lamp.compiler.msil._ import scala.collection.{ mutable, immutable } -import scala.reflect.internal.util.{Position, NoPosition} /** * Collects all types from all reference assemblies. @@ -21,7 +17,6 @@ abstract class CLRTypes { val global: Global import global.Symbol - import global.definitions //########################################################################## diff --git a/src/compiler/scala/tools/nsc/symtab/clr/TypeParser.scala b/src/compiler/scala/tools/nsc/symtab/clr/TypeParser.scala index fa19963cbf..f0e49ce500 100644 --- a/src/compiler/scala/tools/nsc/symtab/clr/TypeParser.scala +++ b/src/compiler/scala/tools/nsc/symtab/clr/TypeParser.scala @@ -12,7 +12,6 @@ import ch.epfl.lamp.compiler.msil.{Type => MSILType, Attribute => MSILAttribute, import scala.collection.{ mutable, immutable } import scala.reflect.internal.pickling.UnPickler import ch.epfl.lamp.compiler.msil.Type.TMVarUsage -import scala.language.implicitConversions /** * @author Nikolay Mihaylov diff --git a/src/compiler/scala/tools/nsc/transform/AddInterfaces.scala b/src/compiler/scala/tools/nsc/transform/AddInterfaces.scala index 1198ac773b..9b74e56f70 100644 --- a/src/compiler/scala/tools/nsc/transform/AddInterfaces.scala +++ b/src/compiler/scala/tools/nsc/transform/AddInterfaces.scala @@ -8,8 +8,6 @@ package transform import symtab._ import Flags._ -import scala.collection.{ mutable, immutable } -import scala.collection.mutable.ListBuffer abstract class AddInterfaces extends InfoTransform { self: Erasure => import global._ // the global environment diff --git a/src/compiler/scala/tools/nsc/transform/ExtensionMethods.scala b/src/compiler/scala/tools/nsc/transform/ExtensionMethods.scala index 7bafbf58b9..b70d94a081 100644 --- a/src/compiler/scala/tools/nsc/transform/ExtensionMethods.scala +++ b/src/compiler/scala/tools/nsc/transform/ExtensionMethods.scala @@ -8,9 +8,6 @@ package transform import symtab._ import Flags._ import scala.collection.{ mutable, immutable } -import scala.collection.mutable -import scala.tools.nsc.util.FreshNameCreator -import scala.runtime.ScalaRunTime.{ isAnyVal, isTuple } /** * Perform Step 1 in the inline classes SIP: Creates extension methods for all @@ -23,7 +20,6 @@ abstract class ExtensionMethods extends Transform with TypingTransformers { import global._ // the global environment import definitions._ // standard classes and methods - import typer.{ typed, atOwner } // methods to type trees /** the following two members override abstract members in Transform */ val phaseName: String = "extmethods" diff --git a/src/compiler/scala/tools/nsc/transform/Flatten.scala b/src/compiler/scala/tools/nsc/transform/Flatten.scala index ba64b3aa0a..a52dadb134 100644 --- a/src/compiler/scala/tools/nsc/transform/Flatten.scala +++ b/src/compiler/scala/tools/nsc/transform/Flatten.scala @@ -8,12 +8,10 @@ package transform import symtab._ import Flags._ -import scala.collection.{ mutable, immutable } import scala.collection.mutable.ListBuffer abstract class Flatten extends InfoTransform { import global._ - import definitions._ /** the following two members override abstract members in Transform */ val phaseName: String = "flatten" diff --git a/src/compiler/scala/tools/nsc/transform/InlineErasure.scala b/src/compiler/scala/tools/nsc/transform/InlineErasure.scala index 0af3cf732f..83dbc23014 100644 --- a/src/compiler/scala/tools/nsc/transform/InlineErasure.scala +++ b/src/compiler/scala/tools/nsc/transform/InlineErasure.scala @@ -1,9 +1,11 @@ package scala.tools.nsc package transform -trait InlineErasure { self: Erasure => - +trait InlineErasure { + self: Erasure => + +/** import global._ import definitions._ - -} \ No newline at end of file + **/ +} diff --git a/src/compiler/scala/tools/nsc/transform/SampleTransform.scala b/src/compiler/scala/tools/nsc/transform/SampleTransform.scala index 44d8860916..e2594468ab 100644 --- a/src/compiler/scala/tools/nsc/transform/SampleTransform.scala +++ b/src/compiler/scala/tools/nsc/transform/SampleTransform.scala @@ -12,7 +12,6 @@ abstract class SampleTransform extends Transform { // inherits abstract value `global` and class `Phase` from Transform import global._ // the global environment - import definitions._ // standard classes and methods import typer.{typed, atOwner} // methods to type trees /** the following two members override abstract members in Transform */ diff --git a/src/compiler/scala/tools/nsc/transform/TailCalls.scala b/src/compiler/scala/tools/nsc/transform/TailCalls.scala index 95cb052fda..2e0cc3bd98 100644 --- a/src/compiler/scala/tools/nsc/transform/TailCalls.scala +++ b/src/compiler/scala/tools/nsc/transform/TailCalls.scala @@ -17,7 +17,7 @@ import Flags.SYNTHETIC abstract class TailCalls extends Transform { import global._ // the global environment import definitions._ // standard classes and methods - import typer.{ typed, typedPos } // methods to type trees + import typer.typedPos // methods to type trees val phaseName: String = "tailcalls" diff --git a/src/compiler/scala/tools/nsc/transform/TypingTransformers.scala b/src/compiler/scala/tools/nsc/transform/TypingTransformers.scala index c7bc16f249..b7da0e0087 100644 --- a/src/compiler/scala/tools/nsc/transform/TypingTransformers.scala +++ b/src/compiler/scala/tools/nsc/transform/TypingTransformers.scala @@ -6,8 +6,6 @@ package scala.tools.nsc package transform -import scala.collection.{ mutable, immutable } - /** A base class for transforms. * A transform contains a compiler phase which applies a tree transformer. */ diff --git a/src/compiler/scala/tools/nsc/typechecker/Checkable.scala b/src/compiler/scala/tools/nsc/typechecker/Checkable.scala index e0800a95eb..166a9785fa 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Checkable.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Checkable.scala @@ -6,12 +6,8 @@ package scala.tools.nsc package typechecker -import scala.collection.{ mutable, immutable } -import scala.collection.mutable.ListBuffer -import scala.util.control.ControlThrowable -import symtab.Flags._ -import scala.annotation.tailrec import Checkability._ +import scala.language.postfixOps /** On pattern matcher checkability: * diff --git a/src/compiler/scala/tools/nsc/typechecker/ConstantFolder.scala b/src/compiler/scala/tools/nsc/typechecker/ConstantFolder.scala index 89e2ee44be..a9f6e2517b 100644 --- a/src/compiler/scala/tools/nsc/typechecker/ConstantFolder.scala +++ b/src/compiler/scala/tools/nsc/typechecker/ConstantFolder.scala @@ -6,7 +6,6 @@ package scala.tools.nsc package typechecker - import java.lang.ArithmeticException /** This class ... @@ -18,7 +17,6 @@ abstract class ConstantFolder { val global: Global import global._ - import definitions._ /** If tree is a constant operation, replace with result. */ def apply(tree: Tree): Tree = fold(tree, tree match { diff --git a/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala b/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala index b548f685bd..bfc9f08553 100644 --- a/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala +++ b/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala @@ -6,9 +6,8 @@ package scala.tools.nsc package typechecker -import scala.collection.{ mutable, immutable } import scala.reflect.internal.util.StringOps.{ countElementsAsString, countAsString } -import symtab.Flags.{ PRIVATE, PROTECTED, IS_ERROR } +import symtab.Flags.IS_ERROR import scala.compat.Platform.EOL import scala.reflect.runtime.ReflectionUtils import scala.reflect.macros.runtime.AbortMacroException diff --git a/src/compiler/scala/tools/nsc/typechecker/Contexts.scala b/src/compiler/scala/tools/nsc/typechecker/Contexts.scala index 0a4813b0cb..16794905a9 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Contexts.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Contexts.scala @@ -6,7 +6,6 @@ package scala.tools.nsc package typechecker -import symtab.Flags._ import scala.collection.mutable.{LinkedHashSet, Set} import scala.annotation.tailrec @@ -69,7 +68,6 @@ trait Contexts { self: Analyzer => def rootContext(unit: CompilationUnit): Context = rootContext(unit, EmptyTree, false) def rootContext(unit: CompilationUnit, tree: Tree): Context = rootContext(unit, tree, false) def rootContext(unit: CompilationUnit, tree: Tree, erasedTypes: Boolean): Context = { - import definitions._ var sc = startContext for (sym <- rootImports(unit)) { sc = sc.makeNewImport(sym) diff --git a/src/compiler/scala/tools/nsc/typechecker/DestructureTypes.scala b/src/compiler/scala/tools/nsc/typechecker/DestructureTypes.scala index 3133c18839..79cd46e018 100644 --- a/src/compiler/scala/tools/nsc/typechecker/DestructureTypes.scala +++ b/src/compiler/scala/tools/nsc/typechecker/DestructureTypes.scala @@ -6,8 +6,6 @@ package scala.tools.nsc package typechecker -import scala.language.implicitConversions - /** A generic means of breaking down types into their subcomponents. * Types are decomposed top down, and recognizable substructure is * dispatched via self-apparently named methods. Those methods can diff --git a/src/compiler/scala/tools/nsc/typechecker/Implicits.scala b/src/compiler/scala/tools/nsc/typechecker/Implicits.scala index 43d1cd1264..576a21fe31 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Implicits.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Implicits.scala @@ -30,7 +30,7 @@ trait Implicits { import global._ import definitions._ import ImplicitsStats._ - import typeDebug.{ ptTree, ptBlock, ptLine } + import typeDebug.{ ptBlock, ptLine } import global.typer.{ printTyping, deindentTyping, indentTyping, printInference } def inferImplicit(tree: Tree, pt: Type, reportAmbiguous: Boolean, isView: Boolean, context: Context): SearchResult = @@ -1468,7 +1468,6 @@ trait Implicits { interpolate(msg, Map((typeParamNames zip typeArgs): _*)) // TODO: give access to the name and type of the implicit argument, etc? def validate: Option[String] = { - import scala.util.matching.Regex; import scala.collection.breakOut // is there a shorter way to avoid the intermediate toList? val refs = """\$\{([^}]+)\}""".r.findAllIn(msg).matchData.map(_ group 1).toSet val decls = typeParamNames.toSet diff --git a/src/compiler/scala/tools/nsc/typechecker/Infer.scala b/src/compiler/scala/tools/nsc/typechecker/Infer.scala index 0084ebc65e..6e42481d60 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Infer.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Infer.scala @@ -6,11 +6,10 @@ package scala.tools.nsc package typechecker -import scala.collection.{ mutable, immutable } +import scala.collection.immutable import scala.collection.mutable.ListBuffer import scala.util.control.ControlThrowable import symtab.Flags._ -import scala.annotation.tailrec /** This trait ... * diff --git a/src/compiler/scala/tools/nsc/typechecker/Macros.scala b/src/compiler/scala/tools/nsc/typechecker/Macros.scala index 2b78b37439..09f3fefeba 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Macros.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Macros.scala @@ -3,15 +3,10 @@ package typechecker import symtab.Flags._ import scala.tools.nsc.util._ -import scala.tools.nsc.util.ClassPath._ import scala.reflect.runtime.ReflectionUtils import scala.collection.mutable.ListBuffer -import scala.compat.Platform.EOL import scala.reflect.internal.util.Statistics import scala.reflect.macros.util._ -import java.lang.{Class => jClass} -import java.lang.reflect.{Array => jArray, Method => jMethod} -import scala.reflect.internal.util.Collections._ import scala.util.control.ControlThrowable import scala.reflect.macros.runtime.AbortMacroException diff --git a/src/compiler/scala/tools/nsc/typechecker/MethodSynthesis.scala b/src/compiler/scala/tools/nsc/typechecker/MethodSynthesis.scala index 288b7d761f..6aafd32237 100644 --- a/src/compiler/scala/tools/nsc/typechecker/MethodSynthesis.scala +++ b/src/compiler/scala/tools/nsc/typechecker/MethodSynthesis.scala @@ -6,7 +6,6 @@ package scala.tools.nsc package typechecker import symtab.Flags._ -import scala.collection.{ mutable, immutable } import scala.reflect.internal.util.StringOps.{ ojoin } import scala.reflect.ClassTag import scala.reflect.runtime.{ universe => ru } @@ -456,7 +455,7 @@ trait MethodSynthesis { case class LazyValGetter(tree: ValDef) extends BaseGetter(tree) { class ChangeOwnerAndModuleClassTraverser(oldowner: Symbol, newowner: Symbol) extends ChangeOwnerTraverser(oldowner, newowner) { - + override def traverse(tree: Tree) { tree match { case _: DefTree => change(tree.symbol.moduleClass) diff --git a/src/compiler/scala/tools/nsc/typechecker/Namers.scala b/src/compiler/scala/tools/nsc/typechecker/Namers.scala index 3146377d04..728eefc96e 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Namers.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Namers.scala @@ -8,9 +8,7 @@ package typechecker import scala.collection.mutable import scala.annotation.tailrec -import scala.ref.WeakReference import symtab.Flags._ -import scala.tools.nsc.io.AbstractFile /** This trait declares methods to create symbols and to enter them into scopes. * diff --git a/src/compiler/scala/tools/nsc/typechecker/NamesDefaults.scala b/src/compiler/scala/tools/nsc/typechecker/NamesDefaults.scala index 4332b9977c..252a738755 100644 --- a/src/compiler/scala/tools/nsc/typechecker/NamesDefaults.scala +++ b/src/compiler/scala/tools/nsc/typechecker/NamesDefaults.scala @@ -8,7 +8,6 @@ package typechecker import symtab.Flags._ import scala.collection.mutable -import scala.ref.WeakReference import scala.reflect.ClassTag /** diff --git a/src/compiler/scala/tools/nsc/typechecker/PatternMatching.scala b/src/compiler/scala/tools/nsc/typechecker/PatternMatching.scala index e3f2b946bd..129a4a05cb 100644 --- a/src/compiler/scala/tools/nsc/typechecker/PatternMatching.scala +++ b/src/compiler/scala/tools/nsc/typechecker/PatternMatching.scala @@ -37,13 +37,11 @@ import scala.reflect.internal.Types * - recover exhaustivity/unreachability of user-defined extractors by partitioning the types they match on using an HList or similar type-level structure */ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL { // self: Analyzer => - import Statistics._ import PatternMatchingStats._ val global: Global // need to repeat here because otherwise last mixin defines global as // SymbolTable. If we had DOT this would not be an issue import global._ // the global environment - import definitions._ // standard classes and methods val phaseName: String = "patmat" diff --git a/src/compiler/scala/tools/nsc/typechecker/SyntheticMethods.scala b/src/compiler/scala/tools/nsc/typechecker/SyntheticMethods.scala index 3608213028..07135c3af9 100644 --- a/src/compiler/scala/tools/nsc/typechecker/SyntheticMethods.scala +++ b/src/compiler/scala/tools/nsc/typechecker/SyntheticMethods.scala @@ -6,9 +6,7 @@ package scala.tools.nsc package typechecker -import symtab.Flags import symtab.Flags._ -import scala.collection.mutable import scala.collection.mutable.ListBuffer /** Synthetic method implementations for case classes and case objects. diff --git a/src/compiler/scala/tools/nsc/typechecker/TreeCheckers.scala b/src/compiler/scala/tools/nsc/typechecker/TreeCheckers.scala index d6073cddbe..710adf5a9c 100644 --- a/src/compiler/scala/tools/nsc/typechecker/TreeCheckers.scala +++ b/src/compiler/scala/tools/nsc/typechecker/TreeCheckers.scala @@ -6,7 +6,6 @@ package scala.tools.nsc package typechecker -import scala.tools.nsc.symtab.Flags._ import scala.collection.mutable import mutable.ListBuffer import util.returning diff --git a/src/compiler/scala/tools/nsc/typechecker/TypeDiagnostics.scala b/src/compiler/scala/tools/nsc/typechecker/TypeDiagnostics.scala index 90dfd3180b..4f5291507e 100644 --- a/src/compiler/scala/tools/nsc/typechecker/TypeDiagnostics.scala +++ b/src/compiler/scala/tools/nsc/typechecker/TypeDiagnostics.scala @@ -8,7 +8,6 @@ package typechecker import scala.collection.mutable import scala.collection.mutable.ListBuffer -import scala.util.control.ControlThrowable import scala.util.control.Exception.ultimately import symtab.Flags._ import PartialFunction._ @@ -37,7 +36,6 @@ trait TypeDiagnostics { import global._ import definitions._ - import global.typer.{ infer, context } /** The common situation of making sure nothing is erroneous could be * nicer if Symbols, Types, and Trees all implemented some common interface diff --git a/src/compiler/scala/tools/nsc/util/ClassPath.scala b/src/compiler/scala/tools/nsc/util/ClassPath.scala index 522d6d2b59..0c49b9b8e7 100644 --- a/src/compiler/scala/tools/nsc/util/ClassPath.scala +++ b/src/compiler/scala/tools/nsc/util/ClassPath.scala @@ -11,7 +11,6 @@ import java.net.URL import scala.collection.{ mutable, immutable } import io.{ File, Directory, Path, Jar, AbstractFile } import scala.reflect.internal.util.StringOps.splitWhere -import scala.reflect.ClassTag import Jar.isJarOrZip import File.pathSeparator import java.net.MalformedURLException diff --git a/src/compiler/scala/tools/nsc/util/CommandLineParser.scala b/src/compiler/scala/tools/nsc/util/CommandLineParser.scala index 9cf2c535df..81c1b1d37a 100644 --- a/src/compiler/scala/tools/nsc/util/CommandLineParser.scala +++ b/src/compiler/scala/tools/nsc/util/CommandLineParser.scala @@ -7,7 +7,6 @@ package scala.tools.nsc package util import scala.util.parsing.combinator._ -import scala.util.parsing.input.{ Reader } import scala.util.parsing.input.CharArrayReader.EofCh import scala.collection.mutable.ListBuffer diff --git a/src/compiler/scala/tools/nsc/util/Exceptional.scala b/src/compiler/scala/tools/nsc/util/Exceptional.scala index 34344263e8..1608ffa425 100644 --- a/src/compiler/scala/tools/nsc/util/Exceptional.scala +++ b/src/compiler/scala/tools/nsc/util/Exceptional.scala @@ -3,8 +3,6 @@ package util import java.util.concurrent.ExecutionException import java.lang.reflect.{ InvocationTargetException, UndeclaredThrowableException } -import scala.reflect.internal.util.StringOps._ -import scala.language.implicitConversions object Exceptional { def unwrap(x: Throwable): Throwable = x match { diff --git a/src/compiler/scala/tools/nsc/util/MsilClassPath.scala b/src/compiler/scala/tools/nsc/util/MsilClassPath.scala index aa3b7c286d..2f209c550d 100644 --- a/src/compiler/scala/tools/nsc/util/MsilClassPath.scala +++ b/src/compiler/scala/tools/nsc/util/MsilClassPath.scala @@ -8,9 +8,6 @@ package scala.tools.nsc package util -import java.io.File -import java.net.URL -import java.util.StringTokenizer import scala.util.Sorting import scala.collection.mutable import scala.tools.nsc.io.{ AbstractFile, MsilFile } @@ -166,4 +163,4 @@ class AssemblyClassPath(types: Array[MSILType], namespace: String, val context: * MSILType values. */ class MsilClassPath(ext: String, user: String, source: String, context: MsilContext) -extends MergedClassPath[MsilFile](MsilClassPath.assembleEntries(ext, user, source, context), context) { } \ No newline at end of file +extends MergedClassPath[MsilFile](MsilClassPath.assembleEntries(ext, user, source, context), context) { } diff --git a/src/compiler/scala/tools/nsc/util/ShowPickled.scala b/src/compiler/scala/tools/nsc/util/ShowPickled.scala index e04987be1b..759c06dc0f 100644 --- a/src/compiler/scala/tools/nsc/util/ShowPickled.scala +++ b/src/compiler/scala/tools/nsc/util/ShowPickled.scala @@ -7,7 +7,7 @@ package scala.tools package nsc package util -import java.io.{File, FileInputStream, PrintStream} +import java.io.PrintStream import java.lang.Long.toHexString import java.lang.Float.intBitsToFloat import java.lang.Double.longBitsToDouble diff --git a/src/compiler/scala/tools/reflect/MacroImplementations.scala b/src/compiler/scala/tools/reflect/MacroImplementations.scala index 86cd845c54..d7c50504a8 100644 --- a/src/compiler/scala/tools/reflect/MacroImplementations.scala +++ b/src/compiler/scala/tools/reflect/MacroImplementations.scala @@ -1,6 +1,5 @@ package scala.tools.reflect -import scala.reflect.macros.{ReificationException, UnexpectedReificationException} import scala.reflect.macros.runtime.Context import scala.collection.mutable.ListBuffer import scala.collection.mutable.Stack @@ -147,4 +146,4 @@ abstract class MacroImplementations { Block(evals.toList, atPos(origApplyPos.focus)(expr)) setPos origApplyPos.makeTransparent } -} \ No newline at end of file +} diff --git a/src/compiler/scala/tools/reflect/ReflectMain.scala b/src/compiler/scala/tools/reflect/ReflectMain.scala index 116ae24cdd..3ae21b6b98 100644 --- a/src/compiler/scala/tools/reflect/ReflectMain.scala +++ b/src/compiler/scala/tools/reflect/ReflectMain.scala @@ -4,7 +4,6 @@ package reflect import scala.tools.nsc.Driver import scala.tools.nsc.Global import scala.tools.nsc.Settings -import scala.tools.nsc.util.ClassPath.DefaultJavaContext import scala.tools.nsc.util.ScalaClassLoader import scala.tools.util.PathResolver @@ -16,4 +15,4 @@ object ReflectMain extends Driver { } override def newCompiler(): Global = new ReflectGlobal(settings, reporter, classloaderFromSettings(settings)) -} \ No newline at end of file +} diff --git a/src/compiler/scala/tools/reflect/StdTags.scala b/src/compiler/scala/tools/reflect/StdTags.scala index a3bc9b9bd1..5c62819f04 100644 --- a/src/compiler/scala/tools/reflect/StdTags.scala +++ b/src/compiler/scala/tools/reflect/StdTags.scala @@ -1,7 +1,6 @@ package scala.tools package reflect -import java.lang.{Class => jClass} import scala.reflect.{ClassTag, classTag} import scala.reflect.api.{Mirror, TypeCreator, Universe => ApiUniverse} diff --git a/src/compiler/scala/tools/reflect/ToolBoxFactory.scala b/src/compiler/scala/tools/reflect/ToolBoxFactory.scala index 996ff00d36..1c5cfe5faa 100644 --- a/src/compiler/scala/tools/reflect/ToolBoxFactory.scala +++ b/src/compiler/scala/tools/reflect/ToolBoxFactory.scala @@ -3,12 +3,8 @@ package reflect import scala.tools.nsc.reporters._ import scala.tools.nsc.CompilerCommand -import scala.tools.nsc.Global -import scala.tools.nsc.typechecker.Modes import scala.tools.nsc.io.VirtualDirectory import scala.tools.nsc.interpreter.AbstractFileClassLoader -import scala.tools.nsc.util.FreshNameCreator -import scala.reflect.internal.Flags import scala.reflect.internal.util.{BatchSourceFile, NoSourceFile, NoFile} import java.lang.{Class => jClass} import scala.compat.Platform.EOL @@ -313,11 +309,9 @@ abstract class ToolBoxFactory[U <: JavaUniverse](val u: U) { factorySelf => // reporter doesn't accumulate errors, but the front-end does def throwIfErrors() = { - if (frontEnd.hasErrors) { - var msg = "reflective compilation has failed: " + EOL + EOL - msg += frontEnd.infos map (_.msg) mkString EOL - throw ToolBoxError(msg) - } + if (frontEnd.hasErrors) throw ToolBoxError( + "reflective compilation has failed: " + EOL + EOL + (frontEnd.infos map (_.msg) mkString EOL) + ) } } diff --git a/src/compiler/scala/tools/reflect/package.scala b/src/compiler/scala/tools/reflect/package.scala index 3f880bf7f8..bf533766d0 100644 --- a/src/compiler/scala/tools/reflect/package.scala +++ b/src/compiler/scala/tools/reflect/package.scala @@ -76,7 +76,6 @@ package object reflect { private[reflect] def frontEndToReporter(frontEnd: FrontEnd, settings0: Settings): Reporter = new AbstractReporter { val settings = settings0 - import frontEnd.{Severity => ApiSeverity} val API_INFO = frontEnd.INFO val API_WARNING = frontEnd.WARNING val API_ERROR = frontEnd.ERROR diff --git a/src/compiler/scala/tools/util/Javap.scala b/src/compiler/scala/tools/util/Javap.scala index c3264d0787..4d94581cc1 100644 --- a/src/compiler/scala/tools/util/Javap.scala +++ b/src/compiler/scala/tools/util/Javap.scala @@ -6,10 +6,8 @@ package scala.tools package util -import java.lang.reflect.{ GenericSignatureFormatError, Method, Constructor } -import java.lang.{ ClassLoader => JavaClassLoader } import scala.tools.nsc.util.ScalaClassLoader -import java.io.{ InputStream, PrintWriter, ByteArrayInputStream, FileNotFoundException } +import java.io.{ InputStream, PrintWriter, ByteArrayInputStream } import scala.tools.nsc.io.File import Javap._ import scala.language.reflectiveCalls diff --git a/src/compiler/scala/tools/util/PathResolver.scala b/src/compiler/scala/tools/util/PathResolver.scala index 0af1011bda..6b0821edf3 100644 --- a/src/compiler/scala/tools/util/PathResolver.scala +++ b/src/compiler/scala/tools/util/PathResolver.scala @@ -6,7 +6,6 @@ package scala.tools package util -import java.net.{ URL, MalformedURLException } import scala.tools.reflect.WrappedProperties.AccessControl import nsc.{ Settings, GenericRunnerSettings } import nsc.util.{ ClassPath, JavaClassPath, ScalaClassLoader } -- cgit v1.2.3 From fc89074f50f278ee31313dd136f10bd046e137cc Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Tue, 6 Nov 2012 16:57:13 -0800 Subject: Deprecation patrol. Threw in deprecation warning reduction in src/compiler. --- src/compiler/scala/tools/nsc/ast/DocComments.scala | 2 +- src/compiler/scala/tools/nsc/ast/TreeBrowsers.scala | 3 +-- .../scala/tools/nsc/backend/icode/ICodeCheckers.scala | 3 +-- src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala | 4 ++-- src/compiler/scala/tools/nsc/backend/jvm/GenJVM.scala | 4 ++-- src/compiler/scala/tools/nsc/backend/msil/GenMSIL.scala | 16 ++++++++-------- .../nsc/doc/model/diagram/DiagramDirectiveParser.scala | 2 +- .../scala/tools/nsc/symtab/BrowsingLoaders.scala | 2 +- src/compiler/scala/tools/nsc/symtab/SymbolLoaders.scala | 4 ++-- .../tools/nsc/symtab/classfile/ClassfileParser.scala | 6 +++--- .../scala/tools/nsc/symtab/classfile/ICodeReader.scala | 2 +- .../scala/tools/nsc/transform/AddInterfaces.scala | 2 +- src/compiler/scala/tools/nsc/transform/Mixin.scala | 4 ++-- .../scala/tools/nsc/transform/SpecializeTypes.scala | 2 +- src/compiler/scala/tools/nsc/transform/UnCurry.scala | 2 +- src/compiler/scala/tools/nsc/typechecker/Namers.scala | 4 ++-- .../scala/tools/nsc/typechecker/PatternMatching.scala | 4 ++-- src/compiler/scala/tools/reflect/ToolBoxFactory.scala | 10 +++++----- src/reflect/scala/reflect/internal/HasFlags.scala | 5 +++-- 19 files changed, 40 insertions(+), 41 deletions(-) (limited to 'src') diff --git a/src/compiler/scala/tools/nsc/ast/DocComments.scala b/src/compiler/scala/tools/nsc/ast/DocComments.scala index f6cbebe10c..21407289db 100755 --- a/src/compiler/scala/tools/nsc/ast/DocComments.scala +++ b/src/compiler/scala/tools/nsc/ast/DocComments.scala @@ -461,7 +461,7 @@ trait DocComments { self: Global => //val (classes, pkgs) = site.ownerChain.span(!_.isPackageClass) //val sites = (classes ::: List(pkgs.head, rootMirror.RootClass))) //findIn(sites) - findIn(site.ownerChain ::: List(definitions.EmptyPackage)) + findIn(site.ownerChain ::: List(rootMirror.EmptyPackage)) } def getType(str: String, variable: String): Type = { diff --git a/src/compiler/scala/tools/nsc/ast/TreeBrowsers.scala b/src/compiler/scala/tools/nsc/ast/TreeBrowsers.scala index a22ce7affd..30a9348fb0 100644 --- a/src/compiler/scala/tools/nsc/ast/TreeBrowsers.scala +++ b/src/compiler/scala/tools/nsc/ast/TreeBrowsers.scala @@ -16,7 +16,6 @@ import javax.swing.tree._ import scala.concurrent.Lock import scala.text._ -import symtab.Flags._ import scala.language.implicitConversions /** @@ -530,7 +529,7 @@ abstract class TreeBrowsers { val s = t.symbol if ((s ne null) && (s != NoSymbol)) { - var str = flagsToString(s.flags) + var str = s.flagString if (s.isStaticMember) str = str + " isStatic "; (str + " annotations: " + s.annotations.mkString("", " ", "") + (if (s.isTypeSkolem) "\ndeSkolemized annotations: " + s.deSkolemize.annotations.mkString("", " ", "") else "")) diff --git a/src/compiler/scala/tools/nsc/backend/icode/ICodeCheckers.scala b/src/compiler/scala/tools/nsc/backend/icode/ICodeCheckers.scala index ec03343320..221652723d 100644 --- a/src/compiler/scala/tools/nsc/backend/icode/ICodeCheckers.scala +++ b/src/compiler/scala/tools/nsc/backend/icode/ICodeCheckers.scala @@ -9,7 +9,6 @@ package icode import scala.collection.mutable import scala.collection.mutable.ListBuffer -import scala.tools.nsc.symtab._ abstract class ICodeCheckers { val global: Global @@ -487,7 +486,7 @@ abstract class ICodeCheckers { case LOAD_MODULE(module) => checkBool((module.isModule || module.isModuleClass), - "Expected module: " + module + " flags: " + Flags.flagsToString(module.flags)); + "Expected module: " + module + " flags: " + module.flagString); pushStack(toTypeKind(module.tpe)); case STORE_THIS(kind) => diff --git a/src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala b/src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala index 8e7b9f2ba2..8bae80c760 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala @@ -2429,7 +2429,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters { case LOAD_MODULE(module) => // assert(module.isModule, "Expected module: " + module) - debuglog("generating LOAD_MODULE for: " + module + " flags: " + Flags.flagsToString(module.flags)); + debuglog("generating LOAD_MODULE for: " + module + " flags: " + module.flagString); if (clasz.symbol == module.moduleClass && jMethodName != nme.readResolve.toString) { jmethod.visitVarInsn(Opcodes.ALOAD, 0) } else { @@ -2506,7 +2506,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters { case lf @ LOAD_FIELD(field, isStatic) => val owner = javaName(lf.hostClass) - debuglog("LOAD_FIELD with owner: " + owner + " flags: " + Flags.flagsToString(field.owner.flags)) + debuglog("LOAD_FIELD with owner: " + owner + " flags: " + field.owner.flagString) val fieldJName = javaName(field) val fieldDescr = descriptor(field) val opc = if (isStatic) Opcodes.GETSTATIC else Opcodes.GETFIELD diff --git a/src/compiler/scala/tools/nsc/backend/jvm/GenJVM.scala b/src/compiler/scala/tools/nsc/backend/jvm/GenJVM.scala index 7fde3e1eaa..06f94ef46c 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/GenJVM.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/GenJVM.scala @@ -1333,7 +1333,7 @@ abstract class GenJVM extends SubComponent with GenJVMUtil with GenAndroid with case lf @ LOAD_FIELD(field, isStatic) => val owner = javaName(lf.hostClass) debuglog("LOAD_FIELD with owner: " + owner + - " flags: " + Flags.flagsToString(field.owner.flags)) + " flags: " + field.owner.flagString) val fieldJName = javaName(field) val fieldJType = javaType(field) if (isStatic) jcode.emitGETSTATIC(owner, fieldJName, fieldJType) @@ -1341,7 +1341,7 @@ abstract class GenJVM extends SubComponent with GenJVMUtil with GenAndroid with case LOAD_MODULE(module) => // assert(module.isModule, "Expected module: " + module) - debuglog("generating LOAD_MODULE for: " + module + " flags: " + Flags.flagsToString(module.flags)); + debuglog("generating LOAD_MODULE for: " + module + " flags: " + module.flagString); if (clasz.symbol == module.moduleClass && jmethod.getName() != nme.readResolve.toString) jcode.emitALOAD_0() else diff --git a/src/compiler/scala/tools/nsc/backend/msil/GenMSIL.scala b/src/compiler/scala/tools/nsc/backend/msil/GenMSIL.scala index 93a1252553..2253ae6e15 100644 --- a/src/compiler/scala/tools/nsc/backend/msil/GenMSIL.scala +++ b/src/compiler/scala/tools/nsc/backend/msil/GenMSIL.scala @@ -464,7 +464,7 @@ abstract class GenMSIL extends SubComponent { private[GenMSIL] def genClass(iclass: IClass) { val sym = iclass.symbol - debuglog("Generating class " + sym + " flags: " + Flags.flagsToString(sym.flags)) + debuglog("Generating class " + sym + " flags: " + sym.flagString) clasz = iclass val tBuilder = getType(sym).asInstanceOf[TypeBuilder] @@ -509,7 +509,7 @@ abstract class GenMSIL extends SubComponent { private def genMethod(m: IMethod) { - debuglog("Generating method " + m.symbol + " flags: " + Flags.flagsToString(m.symbol.flags) + + debuglog("Generating method " + m.symbol + " flags: " + m.symbol.flagString + " owner: " + m.symbol.owner) method = m localBuilders.clear @@ -524,8 +524,8 @@ abstract class GenMSIL extends SubComponent { mcode = mBuilder.GetILGenerator() } catch { case e: Exception => - java.lang.System.out.println("m.symbol = " + Flags.flagsToString(m.symbol.flags) + " " + m.symbol) - java.lang.System.out.println("m.symbol.owner = " + Flags.flagsToString(m.symbol.owner.flags) + " " + m.symbol.owner) + java.lang.System.out.println("m.symbol = " + m.symbol.flagString + " " + m.symbol) + java.lang.System.out.println("m.symbol.owner = " + m.symbol.owner.flagString + " " + m.symbol.owner) java.lang.System.out.println("mBuilder = " + mBuilder) java.lang.System.out.println("mBuilder.DeclaringType = " + TypeAttributes.toString(mBuilder.DeclaringType.Attributes) + @@ -821,7 +821,7 @@ abstract class GenMSIL extends SubComponent { def loadFieldOrAddress(field: Symbol, isStatic: Boolean, msg: String, loadAddr : Boolean) { debuglog(msg + " with owner: " + field.owner + - " flags: " + Flags.flagsToString(field.owner.flags)) + " flags: " + field.owner.flagString) val fieldInfo = fields.get(field) match { case Some(fInfo) => fInfo case None => @@ -1899,7 +1899,7 @@ abstract class GenMSIL extends SubComponent { if (iclass.symbol != definitions.ArrayClass) { for (m: IMethod <- iclass.methods) { val sym = m.symbol - debuglog("Creating MethodBuilder for " + Flags.flagsToString(sym.flags) + " " + + debuglog("Creating MethodBuilder for " + sym.flagString + " " + sym.owner.fullName + "::" + sym.name) val ownerType = getType(sym.enclClass).asInstanceOf[TypeBuilder] @@ -2243,8 +2243,8 @@ abstract class GenMSIL extends SubComponent { } private def showsym(sym: Symbol): String = (sym.toString + - "\n symbol = " + Flags.flagsToString(sym.flags) + " " + sym + - "\n owner = " + Flags.flagsToString(sym.owner.flags) + " " + sym.owner + "\n symbol = " + sym.flagString + " " + sym + + "\n owner = " + sym.owner.flagString + " " + sym.owner ) } // class BytecodeGenerator diff --git a/src/compiler/scala/tools/nsc/doc/model/diagram/DiagramDirectiveParser.scala b/src/compiler/scala/tools/nsc/doc/model/diagram/DiagramDirectiveParser.scala index 6a6c60fb3e..fbf6e3386b 100644 --- a/src/compiler/scala/tools/nsc/doc/model/diagram/DiagramDirectiveParser.scala +++ b/src/compiler/scala/tools/nsc/doc/model/diagram/DiagramDirectiveParser.scala @@ -179,7 +179,7 @@ trait DiagramDirectiveParser { def warning(message: String) = { // we need the position from the package object (well, ideally its comment, but yeah ...) val sym = if (template.sym.isPackage) template.sym.info.member(global.nme.PACKAGE) else template.sym - assert((sym != global.NoSymbol) || (sym == global.definitions.RootPackage)) + assert((sym != global.NoSymbol) || (sym == global.rootMirror.RootPackage)) global.reporter.warning(sym.pos, message) } diff --git a/src/compiler/scala/tools/nsc/symtab/BrowsingLoaders.scala b/src/compiler/scala/tools/nsc/symtab/BrowsingLoaders.scala index 67fa908ee3..4e4efef607 100644 --- a/src/compiler/scala/tools/nsc/symtab/BrowsingLoaders.scala +++ b/src/compiler/scala/tools/nsc/symtab/BrowsingLoaders.scala @@ -27,7 +27,7 @@ abstract class BrowsingLoaders extends SymbolLoaders { override protected def enterIfNew(owner: Symbol, member: Symbol, completer: SymbolLoader): Symbol = { completer.sourcefile match { case Some(src) => - (if (member.isModule) member.moduleClass else member).sourceFile = src + (if (member.isModule) member.moduleClass else member).associatedFile = src case _ => } val decls = owner.info.decls diff --git a/src/compiler/scala/tools/nsc/symtab/SymbolLoaders.scala b/src/compiler/scala/tools/nsc/symtab/SymbolLoaders.scala index 2377c3979d..19502f0d7e 100644 --- a/src/compiler/scala/tools/nsc/symtab/SymbolLoaders.scala +++ b/src/compiler/scala/tools/nsc/symtab/SymbolLoaders.scala @@ -161,8 +161,8 @@ abstract class SymbolLoaders { private def setSource(sym: Symbol) { sourcefile foreach (sf => sym match { - case cls: ClassSymbol => cls.sourceFile = sf - case mod: ModuleSymbol => mod.moduleClass.sourceFile = sf + case cls: ClassSymbol => cls.associatedFile = sf + case mod: ModuleSymbol => mod.moduleClass.associatedFile = sf case _ => () }) } diff --git a/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala b/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala index b254ded8fe..67f6c3ec5d 100644 --- a/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala +++ b/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala @@ -185,7 +185,7 @@ abstract class ClassfileParser { if (in.buf(start).toInt != CONSTANT_CLASS) errorBadTag(start) val name = getExternalName(in.getChar(start + 1)) if (nme.isModuleName(name)) - c = rootMirror.getModule(nme.stripModuleSuffix(name)) + c = rootMirror.getModuleByName(nme.stripModuleSuffix(name)) else c = classNameToSymbol(name) @@ -236,7 +236,7 @@ abstract class ClassfileParser { //assert(name.endsWith("$"), "Not a module class: " + name) f = forceMangledName(name dropRight 1, true) if (f == NoSymbol) - f = rootMirror.getModule(name dropRight 1) + f = rootMirror.getModuleByName(name dropRight 1) } else { val origName = nme.originalName(name) val owner = if (static) ownerTpe.typeSymbol.linkedClassOfClass else ownerTpe.typeSymbol @@ -477,7 +477,7 @@ abstract class ClassfileParser { if (name.pos('.') == name.length) definitions.getMember(rootMirror.EmptyPackageClass, name.toTypeName) else - rootMirror.getClass(name) // see tickets #2464, #3756 + rootMirror.getClassByName(name) // see tickets #2464, #3756 } catch { case _: FatalError => loadClassSymbol(name) } diff --git a/src/compiler/scala/tools/nsc/symtab/classfile/ICodeReader.scala b/src/compiler/scala/tools/nsc/symtab/classfile/ICodeReader.scala index fafeb12146..b7511377cc 100644 --- a/src/compiler/scala/tools/nsc/symtab/classfile/ICodeReader.scala +++ b/src/compiler/scala/tools/nsc/symtab/classfile/ICodeReader.scala @@ -165,7 +165,7 @@ abstract class ICodeReader extends ClassfileParser { } else if (nme.isModuleName(name)) { val strippedName = nme.stripModuleSuffix(name) - forceMangledName(newTermName(strippedName.decode), true) orElse rootMirror.getModule(strippedName) + forceMangledName(newTermName(strippedName.decode), true) orElse rootMirror.getModuleByName(strippedName) } else { forceMangledName(name, false) diff --git a/src/compiler/scala/tools/nsc/transform/AddInterfaces.scala b/src/compiler/scala/tools/nsc/transform/AddInterfaces.scala index 9b74e56f70..5fbc15f858 100644 --- a/src/compiler/scala/tools/nsc/transform/AddInterfaces.scala +++ b/src/compiler/scala/tools/nsc/transform/AddInterfaces.scala @@ -92,7 +92,7 @@ abstract class AddInterfaces extends InfoTransform { self: Erasure => impl.typeOfThis = iface.typeOfThis impl.thisSym setName iface.thisSym.name } - impl.sourceFile = iface.sourceFile + impl.associatedFile = iface.sourceFile if (inClass) iface.owner.info.decls enter impl diff --git a/src/compiler/scala/tools/nsc/transform/Mixin.scala b/src/compiler/scala/tools/nsc/transform/Mixin.scala index 11b734684d..8122dc38cf 100644 --- a/src/compiler/scala/tools/nsc/transform/Mixin.scala +++ b/src/compiler/scala/tools/nsc/transform/Mixin.scala @@ -384,7 +384,7 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL { if (sourceModule != NoSymbol) { sourceModule setPos sym.pos if (sourceModule.flags != MODULE) { - log("!!! Directly setting sourceModule flags from %s to MODULE".format(flagsToString(sourceModule.flags))) + log("!!! Directly setting sourceModule flags from %s to MODULE".format(sourceModule.flagString)) sourceModule.flags = MODULE } } @@ -1204,7 +1204,7 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL { tree case Select(qual, name) if sym.owner.isImplClass && !isStaticOnly(sym) => - assert(!sym.isMethod, "no method allowed here: %s%s %s".format(sym, sym.isImplOnly, flagsToString(sym.flags))) + assert(!sym.isMethod, "no method allowed here: %s%s %s".format(sym, sym.isImplOnly, sym.flagString)) // refer to fields in some implementation class via an abstract // getter in the interface. val iface = toInterface(sym.owner.tpe).typeSymbol diff --git a/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala b/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala index a0dd245b65..78fb725041 100644 --- a/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala +++ b/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala @@ -545,7 +545,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers { def cloneInSpecializedClass(member: Symbol, flagFn: Long => Long, newName: Name = null) = member.cloneSymbol(sClass, flagFn(member.flags | SPECIALIZED), newName) - sClass.sourceFile = clazz.sourceFile + sClass.associatedFile = clazz.sourceFile currentRun.symSource(sClass) = clazz.sourceFile // needed later on by mixin val env = mapAnyRefsInSpecSym(env0, clazz, sClass) diff --git a/src/compiler/scala/tools/nsc/transform/UnCurry.scala b/src/compiler/scala/tools/nsc/transform/UnCurry.scala index e69b1bc482..84803d0b6b 100644 --- a/src/compiler/scala/tools/nsc/transform/UnCurry.scala +++ b/src/compiler/scala/tools/nsc/transform/UnCurry.scala @@ -336,7 +336,7 @@ abstract class UnCurry extends InfoTransform // def applyOrElse[A1 <: A, B1 >: B](x: A1, default: A1 => B1): B1 = val applyOrElseMethodDef = { - val methSym = anonClass.newMethod(fun.pos, nme.applyOrElse) setFlag (FINAL | OVERRIDE) + val methSym = anonClass.newMethod(nme.applyOrElse, fun.pos, newFlags = FINAL | OVERRIDE) val List(argtpe) = formals val A1 = methSym newTypeParameter(newTypeName("A1")) setInfo TypeBounds.upper(argtpe) diff --git a/src/compiler/scala/tools/nsc/typechecker/Namers.scala b/src/compiler/scala/tools/nsc/typechecker/Namers.scala index 728eefc96e..3f5410eb45 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Namers.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Namers.scala @@ -338,7 +338,7 @@ trait Namers extends MethodSynthesis { if (clazz.sourceFile != null && clazz.sourceFile != contextFile) debugwarn("!!! Source mismatch in " + clazz + ": " + clazz.sourceFile + " vs. " + contextFile) - clazz.sourceFile = contextFile + clazz.associatedFile = contextFile if (clazz.sourceFile != null) { assert(currentRun.canRedefine(clazz) || clazz.sourceFile == currentRun.symSource(clazz), clazz.sourceFile) currentRun.symSource(clazz) = clazz.sourceFile @@ -426,7 +426,7 @@ trait Namers extends MethodSynthesis { setPrivateWithin(tree, m.moduleClass) } if (m.owner.isPackageClass && !m.isPackage) { - m.moduleClass.sourceFile = contextFile + m.moduleClass.associatedFile = contextFile currentRun.symSource(m) = m.moduleClass.sourceFile registerTopLevelSym(m) } diff --git a/src/compiler/scala/tools/nsc/typechecker/PatternMatching.scala b/src/compiler/scala/tools/nsc/typechecker/PatternMatching.scala index 129a4a05cb..ab3476b91f 100644 --- a/src/compiler/scala/tools/nsc/typechecker/PatternMatching.scala +++ b/src/compiler/scala/tools/nsc/typechecker/PatternMatching.scala @@ -1430,7 +1430,7 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL def flatMap(prev: Tree, b: Symbol, next: Tree): Tree def flatMapCond(cond: Tree, res: Tree, nextBinder: Symbol, next: Tree): Tree def flatMapGuard(cond: Tree, next: Tree): Tree - def ifThenElseZero(c: Tree, then: Tree): Tree = IF (c) THEN then ELSE zero + def ifThenElseZero(c: Tree, thenp: Tree): Tree = IF (c) THEN thenp ELSE zero protected def zero: Tree } @@ -1523,7 +1523,7 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL // __match.zero protected def zero: Tree = _match(vpmName.zero) // __match.guard(`c`, `then`) - def guard(c: Tree, then: Tree): Tree = _match(vpmName.guard) APPLY (c, then) + def guard(c: Tree, thenp: Tree): Tree = _match(vpmName.guard) APPLY (c, thenp) //// methods in the monad instance -- used directly in translation // `prev`.flatMap(`b` => `next`) diff --git a/src/compiler/scala/tools/reflect/ToolBoxFactory.scala b/src/compiler/scala/tools/reflect/ToolBoxFactory.scala index 1c5cfe5faa..f0c88eadea 100644 --- a/src/compiler/scala/tools/reflect/ToolBoxFactory.scala +++ b/src/compiler/scala/tools/reflect/ToolBoxFactory.scala @@ -331,15 +331,15 @@ abstract class ToolBoxFactory[U <: JavaUniverse](val u: U) { factorySelf => command.settings.outputDirs setSingleOutput virtualDirectory val instance = new ToolBoxGlobal(command.settings, frontEndToReporter(frontEnd, command.settings)) if (frontEnd.hasErrors) { - var msg = "reflective compilation has failed: cannot initialize the compiler: " + EOL + EOL - msg += frontEnd.infos map (_.msg) mkString EOL - throw ToolBoxError(msg) + throw ToolBoxError( + "reflective compilation has failed: cannot initialize the compiler: " + EOL + EOL + + (frontEnd.infos map (_.msg) mkString EOL) + ) } instance } catch { case ex: Throwable => - var msg = "reflective compilation has failed: cannot initialize the compiler due to %s".format(ex.toString) - throw ToolBoxError(msg, ex) + throw ToolBoxError(s"reflective compilation has failed: cannot initialize the compiler due to $ex", ex) } } diff --git a/src/reflect/scala/reflect/internal/HasFlags.scala b/src/reflect/scala/reflect/internal/HasFlags.scala index 4a3663b8ea..6f8befd23e 100644 --- a/src/reflect/scala/reflect/internal/HasFlags.scala +++ b/src/reflect/scala/reflect/internal/HasFlags.scala @@ -158,13 +158,14 @@ trait HasFlags { else nonAccess + " " + access } + // Guess this can't be deprecated seeing as it's in the reflect API. + def isParameter = hasFlag(PARAM) + // Backward compat section @deprecated( "Use isTrait", "2.10.0") def hasTraitFlag = hasFlag(TRAIT) @deprecated("Use hasDefault", "2.10.0") def hasDefaultFlag = hasFlag(DEFAULTPARAM) - @deprecated("Use isValueParameter or isTypeParameter", "2.10.0") - def isParameter = hasFlag(PARAM) @deprecated("Use flagString", "2.10.0") def defaultFlagString = flagString @deprecated("Use flagString(mask)", "2.10.0") -- cgit v1.2.3 From cac5a08611f9511ba4d94b99db630404efae190a Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Sun, 4 Nov 2012 14:17:25 +0100 Subject: Optimize primitive Array(e1, ..., en) Expands an existing optimization for reference arrays to apply to primitives, as well. Fixes one aspect of SI-6247. --- .../scala/tools/nsc/transform/CleanUp.scala | 8 +-- src/library/scala/Array.scala | 10 ++++ .../scala/reflect/internal/Definitions.scala | 15 +++--- test/files/instrumented/t6611.scala | 24 ++++++++- test/files/run/t6611.scala | 63 ++++++++++++++++++++-- 5 files changed, 105 insertions(+), 15 deletions(-) (limited to 'src') diff --git a/src/compiler/scala/tools/nsc/transform/CleanUp.scala b/src/compiler/scala/tools/nsc/transform/CleanUp.scala index 16f6c80101..5318f98fa8 100644 --- a/src/compiler/scala/tools/nsc/transform/CleanUp.scala +++ b/src/compiler/scala/tools/nsc/transform/CleanUp.scala @@ -624,11 +624,11 @@ abstract class CleanUp extends Transform with ast.TreeDSL { // // See SI-6611; we must *only* do this for literal vararg arrays. case Apply(appMeth, List(Apply(wrapRefArrayMeth, List(arg @ StripCast(ArrayValue(_, _)))), _)) - if (wrapRefArrayMeth.symbol == Predef_wrapRefArray && - appMeth.symbol == ArrayModule_overloadedApply.suchThat { - _.tpe.resultType.dealias.typeSymbol == ObjectClass // [T: ClassTag](xs: T*): Array[T] post erasure - }) => + if wrapRefArrayMeth.symbol == Predef_wrapRefArray && appMeth.symbol == ArrayModule_genericApply => super.transform(arg) + case Apply(appMeth, List(elem0, Apply(wrapArrayMeth, List(rest @ ArrayValue(elemtpt, _))))) + if wrapArrayMeth.symbol == Predef_wrapArray(elemtpt.tpe) && appMeth.symbol == ArrayModule_apply(elemtpt.tpe) => + super.transform(rest.copy(elems = elem0 :: rest.elems).copyAttrs(rest)) case _ => super.transform(tree) diff --git a/src/library/scala/Array.scala b/src/library/scala/Array.scala index 90684b5fdd..b9f51803ec 100644 --- a/src/library/scala/Array.scala +++ b/src/library/scala/Array.scala @@ -115,6 +115,8 @@ object Array extends FallbackArrayBuilding { * @param xs the elements to put in the array * @return an array containing all elements from xs. */ + // Subject to a compiler optimization in Cleanup. + // Array(e0, ..., en) is translated to { val a = new Array(3); a(i) = ei; a } def apply[T: ClassTag](xs: T*): Array[T] = { val array = new Array[T](xs.length) var i = 0 @@ -123,6 +125,7 @@ object Array extends FallbackArrayBuilding { } /** Creates an array of `Boolean` objects */ + // Subject to a compiler optimization in Cleanup, see above. def apply(x: Boolean, xs: Boolean*): Array[Boolean] = { val array = new Array[Boolean](xs.length + 1) array(0) = x @@ -132,6 +135,7 @@ object Array extends FallbackArrayBuilding { } /** Creates an array of `Byte` objects */ + // Subject to a compiler optimization in Cleanup, see above. def apply(x: Byte, xs: Byte*): Array[Byte] = { val array = new Array[Byte](xs.length + 1) array(0) = x @@ -141,6 +145,7 @@ object Array extends FallbackArrayBuilding { } /** Creates an array of `Short` objects */ + // Subject to a compiler optimization in Cleanup, see above. def apply(x: Short, xs: Short*): Array[Short] = { val array = new Array[Short](xs.length + 1) array(0) = x @@ -150,6 +155,7 @@ object Array extends FallbackArrayBuilding { } /** Creates an array of `Char` objects */ + // Subject to a compiler optimization in Cleanup, see above. def apply(x: Char, xs: Char*): Array[Char] = { val array = new Array[Char](xs.length + 1) array(0) = x @@ -159,6 +165,7 @@ object Array extends FallbackArrayBuilding { } /** Creates an array of `Int` objects */ + // Subject to a compiler optimization in Cleanup, see above. def apply(x: Int, xs: Int*): Array[Int] = { val array = new Array[Int](xs.length + 1) array(0) = x @@ -168,6 +175,7 @@ object Array extends FallbackArrayBuilding { } /** Creates an array of `Long` objects */ + // Subject to a compiler optimization in Cleanup, see above. def apply(x: Long, xs: Long*): Array[Long] = { val array = new Array[Long](xs.length + 1) array(0) = x @@ -177,6 +185,7 @@ object Array extends FallbackArrayBuilding { } /** Creates an array of `Float` objects */ + // Subject to a compiler optimization in Cleanup, see above. def apply(x: Float, xs: Float*): Array[Float] = { val array = new Array[Float](xs.length + 1) array(0) = x @@ -186,6 +195,7 @@ object Array extends FallbackArrayBuilding { } /** Creates an array of `Double` objects */ + // Subject to a compiler optimization in Cleanup, see above. def apply(x: Double, xs: Double*): Array[Double] = { val array = new Array[Double](xs.length + 1) array(0) = x diff --git a/src/reflect/scala/reflect/internal/Definitions.scala b/src/reflect/scala/reflect/internal/Definitions.scala index 9f515e18d7..03f71f20c4 100644 --- a/src/reflect/scala/reflect/internal/Definitions.scala +++ b/src/reflect/scala/reflect/internal/Definitions.scala @@ -340,12 +340,13 @@ trait Definitions extends api.StandardDefinitions { lazy val PredefModule = requiredModule[scala.Predef.type] lazy val PredefModuleClass = PredefModule.moduleClass - def Predef_classOf = getMemberMethod(PredefModule, nme.classOf) - def Predef_identity = getMemberMethod(PredefModule, nme.identity) - def Predef_conforms = getMemberMethod(PredefModule, nme.conforms) - def Predef_wrapRefArray = getMemberMethod(PredefModule, nme.wrapRefArray) - def Predef_??? = getMemberMethod(PredefModule, nme.???) - def Predef_implicitly = getMemberMethod(PredefModule, nme.implicitly) + def Predef_classOf = getMemberMethod(PredefModule, nme.classOf) + def Predef_identity = getMemberMethod(PredefModule, nme.identity) + def Predef_conforms = getMemberMethod(PredefModule, nme.conforms) + def Predef_wrapRefArray = getMemberMethod(PredefModule, nme.wrapRefArray) + def Predef_wrapArray(tp: Type) = getMemberMethod(PredefModule, wrapArrayMethodName(tp)) + def Predef_??? = getMemberMethod(PredefModule, nme.???) + def Predef_implicitly = getMemberMethod(PredefModule, nme.implicitly) /** Is `sym` a member of Predef with the given name? * Note: DON't replace this by sym == Predef_conforms/etc, as Predef_conforms is a `def` @@ -470,6 +471,8 @@ trait Definitions extends api.StandardDefinitions { // arrays and their members lazy val ArrayModule = requiredModule[scala.Array.type] lazy val ArrayModule_overloadedApply = getMemberMethod(ArrayModule, nme.apply) + def ArrayModule_genericApply = ArrayModule_overloadedApply.suchThat(_.paramss.flatten.last.tpe.typeSymbol == ClassTagClass) // [T: ClassTag](xs: T*): Array[T] + def ArrayModule_apply(tp: Type) = ArrayModule_overloadedApply.suchThat(_.tpe.resultType =:= arrayType(tp)) // (p1: AnyVal1, ps: AnyVal1*): Array[AnyVal1] lazy val ArrayClass = getRequiredClass("scala.Array") // requiredClass[scala.Array[_]] lazy val Array_apply = getMemberMethod(ArrayClass, nme.apply) lazy val Array_update = getMemberMethod(ArrayClass, nme.update) diff --git a/test/files/instrumented/t6611.scala b/test/files/instrumented/t6611.scala index 821d5f3fbf..4c52f8a5ef 100644 --- a/test/files/instrumented/t6611.scala +++ b/test/files/instrumented/t6611.scala @@ -5,7 +5,29 @@ object Test { startProfiling() // tests optimization in Cleanup for varargs reference arrays - val a = Array("") + Array("") + + + Array(true) + Array(true, false) + Array(1: Byte) + Array(1: Byte, 2: Byte) + Array(1: Short) + Array(1: Short, 2: Short) + Array(1) + Array(1, 2) + Array(1L) + Array(1L, 2L) + Array(1d) + Array(1d, 2d) + Array(1f) + Array(1f, 2f) + + /* Not currently optimized: + Array[Int](1, 2) etc + Array(()) + Array((), ()) + */ stopProfiling() printStatistics() diff --git a/test/files/run/t6611.scala b/test/files/run/t6611.scala index c0297372f0..c295368aea 100644 --- a/test/files/run/t6611.scala +++ b/test/files/run/t6611.scala @@ -1,6 +1,61 @@ object Test extends App { - val a = Array("1") - val a2 = Array(a: _*) - a2(0) = "2" - assert(a(0) == "1") + locally { + val a = Array("1") + val a2 = Array(a: _*) + assert(a ne a2) + } + + locally { + val a = Array("1": Object) + val a2 = Array(a: _*) + assert(a ne a2) + } + + locally { + val a = Array(true) + val a2 = Array(a: _*) + assert(a ne a2) + } + + locally { + val a = Array(1: Short) + val a2 = Array(a: _*) + assert(a ne a2) + } + + locally { + val a = Array(1: Byte) + val a2 = Array(a: _*) + assert(a ne a2) + } + + locally { + val a = Array(1) + val a2 = Array(a: _*) + assert(a ne a2) + } + + locally { + val a = Array(1L) + val a2 = Array(a: _*) + assert(a ne a2) + } + + locally { + val a = Array(1f) + val a2 = Array(a: _*) + assert(a ne a2) + } + + locally { + val a = Array(1d) + val a2 = Array(a: _*) + assert(a ne a2) + } + + locally { + val a = Array(()) + val a2 = Array(a: _*) + assert(a ne a2) + } } -- cgit v1.2.3 From 36edc795c4edd829fad82d9bcd530272228d8eba Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Wed, 7 Nov 2012 11:08:28 -0800 Subject: A few straggling unused imports. --- src/compiler/scala/tools/nsc/ast/Positions.scala | 2 +- src/compiler/scala/tools/nsc/backend/WorklistAlgorithm.scala | 2 +- src/compiler/scala/tools/nsc/backend/icode/Opcodes.scala | 2 +- src/compiler/scala/tools/nsc/doc/Uncompilable.scala | 2 +- src/compiler/scala/tools/nsc/doc/html/page/Source.scala | 2 +- src/compiler/scala/tools/nsc/interpreter/ExprTyper.scala | 2 +- src/compiler/scala/tools/nsc/io/MsilFile.scala | 2 +- src/compiler/scala/tools/nsc/plugins/PluginDescription.scala | 2 +- src/compiler/scala/tools/nsc/transform/SampleTransform.scala | 4 ++-- src/compiler/scala/tools/nsc/typechecker/Typers.scala | 2 +- src/reflect/scala/reflect/internal/Trees.scala | 2 +- 11 files changed, 12 insertions(+), 12 deletions(-) (limited to 'src') diff --git a/src/compiler/scala/tools/nsc/ast/Positions.scala b/src/compiler/scala/tools/nsc/ast/Positions.scala index d8fb632f73..77acbba056 100644 --- a/src/compiler/scala/tools/nsc/ast/Positions.scala +++ b/src/compiler/scala/tools/nsc/ast/Positions.scala @@ -1,7 +1,7 @@ package scala.tools.nsc package ast -import scala.reflect.internal.util.{ SourceFile, Position, OffsetPosition, NoPosition } +import scala.reflect.internal.util.{ SourceFile, OffsetPosition } trait Positions extends scala.reflect.internal.Positions { self: Global => diff --git a/src/compiler/scala/tools/nsc/backend/WorklistAlgorithm.scala b/src/compiler/scala/tools/nsc/backend/WorklistAlgorithm.scala index 93b37f415d..49dc105c79 100644 --- a/src/compiler/scala/tools/nsc/backend/WorklistAlgorithm.scala +++ b/src/compiler/scala/tools/nsc/backend/WorklistAlgorithm.scala @@ -6,7 +6,7 @@ package scala.tools.nsc package backend -import scala.collection.{ mutable, immutable } +import scala.collection.mutable /** * Simple implementation of a worklist algorithm. A processing diff --git a/src/compiler/scala/tools/nsc/backend/icode/Opcodes.scala b/src/compiler/scala/tools/nsc/backend/icode/Opcodes.scala index 796e3a5629..0e7c75de50 100644 --- a/src/compiler/scala/tools/nsc/backend/icode/Opcodes.scala +++ b/src/compiler/scala/tools/nsc/backend/icode/Opcodes.scala @@ -66,7 +66,7 @@ import scala.reflect.internal.util.{Position,NoPosition} * in the source files. */ trait Opcodes { self: ICodes => - import global.{Symbol, NoSymbol, Type, Name, Constant}; + import global.{Symbol, NoSymbol, Name, Constant}; // categories of ICode instructions final val localsCat = 1 diff --git a/src/compiler/scala/tools/nsc/doc/Uncompilable.scala b/src/compiler/scala/tools/nsc/doc/Uncompilable.scala index d3e5c869e0..9447e36610 100644 --- a/src/compiler/scala/tools/nsc/doc/Uncompilable.scala +++ b/src/compiler/scala/tools/nsc/doc/Uncompilable.scala @@ -15,7 +15,7 @@ trait Uncompilable { val global: Global val settings: Settings - import global.{ reporter, inform, warning, newTypeName, newTermName, Symbol, Name, DocComment, NoSymbol } + import global.{ reporter, inform, warning, newTypeName, newTermName, Symbol, DocComment, NoSymbol } import global.definitions.AnyRefClass import global.rootMirror.RootClass diff --git a/src/compiler/scala/tools/nsc/doc/html/page/Source.scala b/src/compiler/scala/tools/nsc/doc/html/page/Source.scala index 1d6404e1a4..37145756d9 100644 --- a/src/compiler/scala/tools/nsc/doc/html/page/Source.scala +++ b/src/compiler/scala/tools/nsc/doc/html/page/Source.scala @@ -8,7 +8,7 @@ package doc package html package page -import scala.xml.{NodeSeq, Unparsed} +import scala.xml.NodeSeq import java.io.File class Source(sourceFile: File) extends HtmlPage { diff --git a/src/compiler/scala/tools/nsc/interpreter/ExprTyper.scala b/src/compiler/scala/tools/nsc/interpreter/ExprTyper.scala index 4b084c71a9..ebd0030802 100644 --- a/src/compiler/scala/tools/nsc/interpreter/ExprTyper.scala +++ b/src/compiler/scala/tools/nsc/interpreter/ExprTyper.scala @@ -14,7 +14,7 @@ trait ExprTyper { import repl._ import global.{ reporter => _, Import => _, _ } import definitions._ - import syntaxAnalyzer.{ UnitParser, UnitScanner } + import syntaxAnalyzer.UnitParser import naming.freshInternalVarName object codeParser extends { val global: repl.global.type = repl.global } with CodeHandlers[Tree] { diff --git a/src/compiler/scala/tools/nsc/io/MsilFile.scala b/src/compiler/scala/tools/nsc/io/MsilFile.scala index 2f0a71fc60..1a3a4f5c81 100644 --- a/src/compiler/scala/tools/nsc/io/MsilFile.scala +++ b/src/compiler/scala/tools/nsc/io/MsilFile.scala @@ -6,7 +6,7 @@ package scala.tools.nsc package io -import ch.epfl.lamp.compiler.msil.{ Type => MsilType, _ } +import ch.epfl.lamp.compiler.msil.{ Type => MsilType } /** This class wraps an MsilType. It exists only so * ClassPath can treat all of JVM/MSIL/bin/src files diff --git a/src/compiler/scala/tools/nsc/plugins/PluginDescription.scala b/src/compiler/scala/tools/nsc/plugins/PluginDescription.scala index bd567400fb..9ecc098687 100644 --- a/src/compiler/scala/tools/nsc/plugins/PluginDescription.scala +++ b/src/compiler/scala/tools/nsc/plugins/PluginDescription.scala @@ -6,7 +6,7 @@ package scala.tools.nsc package plugins -import scala.xml.{Node,NodeSeq} +import scala.xml.Node /** A description of a compiler plugin, suitable for serialization * to XML for inclusion in the plugin's .jar file. diff --git a/src/compiler/scala/tools/nsc/transform/SampleTransform.scala b/src/compiler/scala/tools/nsc/transform/SampleTransform.scala index e2594468ab..cffb483072 100644 --- a/src/compiler/scala/tools/nsc/transform/SampleTransform.scala +++ b/src/compiler/scala/tools/nsc/transform/SampleTransform.scala @@ -11,8 +11,8 @@ package transform abstract class SampleTransform extends Transform { // inherits abstract value `global` and class `Phase` from Transform - import global._ // the global environment - import typer.{typed, atOwner} // methods to type trees + import global._ // the global environment + import typer.typed // method to type trees /** the following two members override abstract members in Transform */ val phaseName: String = "sample-phase" diff --git a/src/compiler/scala/tools/nsc/typechecker/Typers.scala b/src/compiler/scala/tools/nsc/typechecker/Typers.scala index b1fd29ccdc..c203e62786 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Typers.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Typers.scala @@ -2424,7 +2424,7 @@ trait Typers extends Modes with Adaptations with Tags { // (virtualized matches are expanded during type checking so they have the full context available) // otherwise, do nothing: matches are translated during phase `patmat` (unless -Xoldpatmat) def virtualizedMatch(match_ : Match, mode: Int, pt: Type) = { - import patmat.{vpmName, PureMatchTranslator, OptimizingMatchTranslator} + import patmat.{ vpmName, PureMatchTranslator } // TODO: add fallback __match sentinel to predef val matchStrategy: Tree = diff --git a/src/reflect/scala/reflect/internal/Trees.scala b/src/reflect/scala/reflect/internal/Trees.scala index 9238f09ed5..ed08226ec7 100644 --- a/src/reflect/scala/reflect/internal/Trees.scala +++ b/src/reflect/scala/reflect/internal/Trees.scala @@ -1312,7 +1312,7 @@ trait Trees extends api.Trees { self: SymbolTable => class ChangeOwnerTraverser(val oldowner: Symbol, val newowner: Symbol) extends Traverser { final def change(sym: Symbol) = { - if (sym != NoSymbol && sym.owner == oldowner) + if (sym != NoSymbol && sym.owner == oldowner) sym.owner = newowner } override def traverse(tree: Tree) { -- cgit v1.2.3 From a7cc894ff2b4c5b661890272ec401cde12da3f9e Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Wed, 7 Nov 2012 22:03:13 +0100 Subject: More principled tree copying. Canonical > home-spun. --- src/compiler/scala/tools/nsc/transform/CleanUp.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) (limited to 'src') diff --git a/src/compiler/scala/tools/nsc/transform/CleanUp.scala b/src/compiler/scala/tools/nsc/transform/CleanUp.scala index 5318f98fa8..847ca574a9 100644 --- a/src/compiler/scala/tools/nsc/transform/CleanUp.scala +++ b/src/compiler/scala/tools/nsc/transform/CleanUp.scala @@ -628,7 +628,7 @@ abstract class CleanUp extends Transform with ast.TreeDSL { super.transform(arg) case Apply(appMeth, List(elem0, Apply(wrapArrayMeth, List(rest @ ArrayValue(elemtpt, _))))) if wrapArrayMeth.symbol == Predef_wrapArray(elemtpt.tpe) && appMeth.symbol == ArrayModule_apply(elemtpt.tpe) => - super.transform(rest.copy(elems = elem0 :: rest.elems).copyAttrs(rest)) + super.transform(treeCopy.ArrayValue(rest, rest.elemtpt, elem0 :: rest.elems)) case _ => super.transform(tree) -- cgit v1.2.3 From 20976578ee06411c0971b21836defa8a30246c9c Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Wed, 7 Nov 2012 11:07:34 -0800 Subject: Warn about unused imports. Hidden behind -Xlint as usual. This commit also includes further simplification of the symbol lookup logic which I unearthed on the way to reporting unused imports. Plus unusually comprehensive documentation of same. --- .../scala/tools/nsc/symtab/classfile/Pickler.scala | 16 +- .../scala/tools/nsc/typechecker/Contexts.scala | 191 +++++++++++++++------ test/files/neg/warn-unused-imports.check | 44 +++++ test/files/neg/warn-unused-imports.flags | 1 + test/files/neg/warn-unused-imports.scala | 125 ++++++++++++++ 5 files changed, 319 insertions(+), 58 deletions(-) create mode 100644 test/files/neg/warn-unused-imports.check create mode 100644 test/files/neg/warn-unused-imports.flags create mode 100644 test/files/neg/warn-unused-imports.scala (limited to 'src') diff --git a/src/compiler/scala/tools/nsc/symtab/classfile/Pickler.scala b/src/compiler/scala/tools/nsc/symtab/classfile/Pickler.scala index feaa1907e7..941604b154 100644 --- a/src/compiler/scala/tools/nsc/symtab/classfile/Pickler.scala +++ b/src/compiler/scala/tools/nsc/symtab/classfile/Pickler.scala @@ -28,11 +28,21 @@ abstract class Pickler extends SubComponent { val phaseName = "pickler" - currentRun - def newPhase(prev: Phase): StdPhase = new PicklePhase(prev) class PicklePhase(prev: Phase) extends StdPhase(prev) { + override def run() { + super.run() + // This is run here rather than after typer because I found + // some symbols - usually annotations, possibly others - had not + // yet performed the necessary symbol lookup, leading to + // spurious claims of unusedness. + if (settings.lint.value) { + log("Clearing recorded import selectors.") + analyzer.clearUnusedImports() + } + } + def apply(unit: CompilationUnit) { def pickle(tree: Tree) { def add(sym: Symbol, pickle: Pickle) = { @@ -77,6 +87,8 @@ abstract class Pickler extends SubComponent { } pickle(unit.body) + if (settings.lint.value) + analyzer.warnUnusedImports(unit) } } diff --git a/src/compiler/scala/tools/nsc/typechecker/Contexts.scala b/src/compiler/scala/tools/nsc/typechecker/Contexts.scala index 16794905a9..dfc621d60e 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Contexts.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Contexts.scala @@ -6,7 +6,7 @@ package scala.tools.nsc package typechecker -import scala.collection.mutable.{LinkedHashSet, Set} +import scala.collection.mutable import scala.annotation.tailrec /** @@ -15,6 +15,7 @@ import scala.annotation.tailrec */ trait Contexts { self: Analyzer => import global._ + import definitions.{ JavaLangPackage, ScalaPackage, PredefModule } object NoContext extends Context { outer = this @@ -27,7 +28,6 @@ trait Contexts { self: Analyzer => override def toString = "NoContext" } private object RootImports { - import definitions._ // Possible lists of root imports val javaList = JavaLangPackage :: Nil val javaAndScalaList = JavaLangPackage :: ScalaPackage :: Nil @@ -46,6 +46,28 @@ trait Contexts { self: Analyzer => rootMirror.RootClass.info.decls) } + private lazy val allUsedSelectors = + mutable.Map[ImportInfo, Set[ImportSelector]]() withDefaultValue Set() + private lazy val allImportInfos = + mutable.Map[CompilationUnit, List[ImportInfo]]() withDefaultValue Nil + + def clearUnusedImports() { + allUsedSelectors.clear() + allImportInfos.clear() + } + def warnUnusedImports(unit: CompilationUnit) = { + val imps = allImportInfos(unit).reverse.distinct + + for (imp <- imps) { + val used = allUsedSelectors(imp) + def isMask(s: ImportSelector) = s.name != nme.WILDCARD && s.rename == nme.WILDCARD + + imp.tree.selectors filterNot (s => isMask(s) || used(s)) foreach { sel => + unit.warning(imp posOf sel, "Unused import") + } + } + } + var lastAccessCheckDetails: String = "" /** List of symbols to import from in a root context. Typically that @@ -146,7 +168,7 @@ trait Contexts { self: Analyzer => var typingIndentLevel: Int = 0 def typingIndent = " " * typingIndentLevel - var buffer: Set[AbsTypeError] = _ + var buffer: mutable.Set[AbsTypeError] = _ def enclClassOrMethod: Context = if ((owner eq NoSymbol) || (owner.isClass) || (owner.isMethod)) this @@ -185,13 +207,13 @@ trait Contexts { self: Analyzer => def setThrowErrors() = mode &= (~AllMask) def setAmbiguousErrors(report: Boolean) = if (report) mode |= AmbiguousErrors else mode &= notThrowMask - def updateBuffer(errors: Set[AbsTypeError]) = buffer ++= errors + def updateBuffer(errors: mutable.Set[AbsTypeError]) = buffer ++= errors def condBufferFlush(removeP: AbsTypeError => Boolean) { val elems = buffer.filter(removeP) buffer --= elems } def flushBuffer() { buffer.clear() } - def flushAndReturnBuffer(): Set[AbsTypeError] = { + def flushAndReturnBuffer(): mutable.Set[AbsTypeError] = { val current = buffer.clone() buffer.clear() current @@ -284,7 +306,7 @@ trait Contexts { self: Analyzer => c.checking = this.checking c.retyping = this.retyping c.openImplicits = this.openImplicits - c.buffer = if (this.buffer == null) LinkedHashSet[AbsTypeError]() else this.buffer // need to initialize + c.buffer = if (this.buffer == null) mutable.LinkedHashSet[AbsTypeError]() else this.buffer // need to initialize registerContext(c.asInstanceOf[analyzer.Context]) debuglog("[context] ++ " + c.unit + " / " + tree.summaryString) c @@ -302,8 +324,13 @@ trait Contexts { self: Analyzer => def makeNewImport(sym: Symbol): Context = makeNewImport(gen.mkWildcardImport(sym)) - def makeNewImport(imp: Import): Context = - make(unit, imp, owner, scope, new ImportInfo(imp, depth) :: imports) + def makeNewImport(imp: Import): Context = { + val impInfo = new ImportInfo(imp, depth) + if (settings.lint.value && imp.pos.isDefined) // pos.isDefined excludes java.lang/scala/Predef imports + allImportInfos(unit) ::= impInfo + + make(unit, imp, owner, scope, impInfo :: imports) + } def make(tree: Tree, owner: Symbol, scope: Scope): Context = if (tree == this.tree && owner == this.owner && scope == this.scope) this @@ -326,7 +353,7 @@ trait Contexts { self: Analyzer => val c = make(newtree) c.setBufferErrors() c.setAmbiguousErrors(reportAmbiguousErrors) - c.buffer = new LinkedHashSet[AbsTypeError]() + c.buffer = mutable.LinkedHashSet[AbsTypeError]() c } @@ -672,10 +699,13 @@ trait Contexts { self: Analyzer => * package object foo { type InputStream = java.io.InputStream } * import foo._, java.io._ */ - def isAmbiguousImport(imp1: ImportInfo, imp2: ImportInfo, name: Name): Boolean = { - // The imported symbols from each import. - def imp1Symbol = importedAccessibleSymbol(imp1, name) - def imp2Symbol = importedAccessibleSymbol(imp2, name) + private def resolveAmbiguousImport(name: Name, imp1: ImportInfo, imp2: ImportInfo): Option[ImportInfo] = { + val imp1Explicit = imp1 isExplicitImport name + val imp2Explicit = imp2 isExplicitImport name + val ambiguous = if (imp1.depth == imp2.depth) imp1Explicit == imp2Explicit else !imp1Explicit && imp2Explicit + val imp1Symbol = (imp1 importedSymbol name).initialize filter (s => isAccessible(s, imp1.qual.tpe, superAccess = false)) + val imp2Symbol = (imp2 importedSymbol name).initialize filter (s => isAccessible(s, imp2.qual.tpe, superAccess = false)) + // The types of the qualifiers from which the ambiguous imports come. // If the ambiguous name is a value, these must be the same. def t1 = imp1.qual.tpe @@ -691,25 +721,27 @@ trait Contexts { self: Analyzer => s"member type 2: $mt2" ).mkString("\n ") - imp1Symbol.exists && imp2Symbol.exists && ( + if (!ambiguous || !imp2Symbol.exists) Some(imp1) + else if (!imp1Symbol.exists) Some(imp2) + else ( // The symbol names are checked rather than the symbols themselves because // each time an overloaded member is looked up it receives a new symbol. // So foo.member("x") != foo.member("x") if x is overloaded. This seems // likely to be the cause of other bugs too... if (t1 =:= t2 && imp1Symbol.name == imp2Symbol.name) { log(s"Suppressing ambiguous import: $t1 =:= $t2 && $imp1Symbol == $imp2Symbol") - false + Some(imp1) } // Monomorphism restriction on types is in part because type aliases could have the // same target type but attach different variance to the parameters. Maybe it can be // relaxed, but doesn't seem worth it at present. else if (mt1 =:= mt2 && name.isTypeName && imp1Symbol.isMonomorphicType && imp2Symbol.isMonomorphicType) { log(s"Suppressing ambiguous import: $mt1 =:= $mt2 && $imp1Symbol and $imp2Symbol are equivalent") - false + Some(imp1) } else { log(s"Import is genuinely ambiguous:\n " + characterize) - true + None } ) } @@ -717,9 +749,11 @@ trait Contexts { self: Analyzer => /** The symbol with name `name` imported via the import in `imp`, * if any such symbol is accessible from this context. */ - def importedAccessibleSymbol(imp: ImportInfo, name: Name) = { - imp importedSymbol name filter (s => isAccessible(s, imp.qual.tpe, superAccess = false)) - } + def importedAccessibleSymbol(imp: ImportInfo, name: Name): Symbol = + importedAccessibleSymbol(imp, name, requireExplicit = false) + + private def importedAccessibleSymbol(imp: ImportInfo, name: Name, requireExplicit: Boolean): Symbol = + imp.importedSymbol(name, requireExplicit) filter (s => isAccessible(s, imp.qual.tpe, superAccess = false)) /** Is `sym` defined in package object of package `pkg`? * Since sym may be defined in some parent of the package object, @@ -814,11 +848,15 @@ trait Contexts { self: Analyzer => var imports = Context.this.imports def imp1 = imports.head def imp2 = imports.tail.head + def sameDepth = imp1.depth == imp2.depth def imp1Explicit = imp1 isExplicitImport name def imp2Explicit = imp2 isExplicitImport name - while (!qualifies(impSym) && imports.nonEmpty && imp1.depth > symbolDepth) { - impSym = importedAccessibleSymbol(imp1, name) + def lookupImport(imp: ImportInfo, requireExplicit: Boolean) = + importedAccessibleSymbol(imp, name, requireExplicit) filter qualifies + + while (!impSym.exists && imports.nonEmpty && imp1.depth > symbolDepth) { + impSym = lookupImport(imp1, requireExplicit = false) if (!impSym.exists) imports = imports.tail } @@ -843,33 +881,45 @@ trait Contexts { self: Analyzer => finish(EmptyTree, defSym) } else if (impSym.exists) { - def sameDepth = imp1.depth == imp2.depth - def needsCheck = if (sameDepth) imp1Explicit == imp2Explicit else imp1Explicit || imp2Explicit - def isDone = imports.tail.isEmpty || (!sameDepth && imp1Explicit) - def ambiguous = needsCheck && isAmbiguousImport(imp1, imp2, name) && { - lookupError = ambiguousImports(imp1, imp2) - true - } - // Ambiguity check between imports. - // The same name imported again is potentially ambiguous if the name is: - // - after explicit import, explicitly imported again at the same or lower depth - // - after explicit import, wildcard imported at lower depth - // - after wildcard import, wildcard imported at the same depth - // Under all such conditions isAmbiguousImport is called, which will - // examine the imports in case they are importing the same thing; if that - // can't be established conclusively, an error is issued. - while (lookupError == null && !isDone) { - val other = importedAccessibleSymbol(imp2, name) - // if the competing import is unambiguous and explicit, it is the new winner. - val isNewWinner = qualifies(other) && !ambiguous && imp2Explicit - // imports is imp1 :: imp2 :: rest. - // If there is a new winner, it is imp2, and imports drops imp1. - // If there is not, imp1 is still the winner, and it drops imp2. - if (isNewWinner) { - impSym = other - imports = imports.tail + // We continue walking down the imports as long as the tail is non-empty, which gives us: + // imports == imp1 :: imp2 :: _ + // And at least one of the following is true: + // - imp1 and imp2 are at the same depth + // - imp1 is a wildcard import, so all explicit imports from outer scopes must be checked + def keepLooking = ( + lookupError == null + && imports.tail.nonEmpty + && (sameDepth || !imp1Explicit) + ) + // If we find a competitor imp2 which imports the same name, possible outcomes are: + // + // - same depth, imp1 wild, imp2 explicit: imp2 wins, drop imp1 + // - same depth, imp1 wild, imp2 wild: ambiguity check + // - same depth, imp1 explicit, imp2 explicit: ambiguity check + // - differing depth, imp1 wild, imp2 explicit: ambiguity check + // - all others: imp1 wins, drop imp2 + // + // The ambiguity check is: if we can verify that both imports refer to the same + // symbol (e.g. import foo.X followed by import foo._) then we discard imp2 + // and proceed. If we cannot, issue an ambiguity error. + while (keepLooking) { + // If not at the same depth, limit the lookup to explicit imports. + // This is desirable from a performance standpoint (compare to + // filtering after the fact) but also necessary to keep the unused + // import check from being misled by symbol lookups which are not + // actually used. + val other = lookupImport(imp2, requireExplicit = !sameDepth) + def imp1wins = { imports = imp1 :: imports.tail.tail } + def imp2wins = { impSym = other ; imports = imports.tail } + + if (!other.exists) // imp1 wins; drop imp2 and continue. + imp1wins + else if (sameDepth && !imp1Explicit && imp2Explicit) // imp2 wins; drop imp1 and continue. + imp2wins + else resolveAmbiguousImport(name, imp1, imp2) match { + case Some(imp) => if (imp eq imp1) imp1wins else imp2wins + case _ => lookupError = ambiguousImports(imp1, imp2) } - else imports = imp1 :: imports.tail.tail } // optimization: don't write out package prefixes finish(resetPos(imp1.qual.duplicate), impSym) @@ -901,6 +951,9 @@ trait Contexts { self: Analyzer => } //class Context class ImportInfo(val tree: Import, val depth: Int) { + def pos = tree.pos + def posOf(sel: ImportSelector) = tree.pos withPoint sel.namePos + /** The prefix expression */ def qual: Tree = tree.symbol.info match { case ImportType(expr) => expr @@ -914,22 +967,43 @@ trait Contexts { self: Analyzer => /** The symbol with name `name` imported from import clause `tree`. */ - def importedSymbol(name: Name): Symbol = { + def importedSymbol(name: Name): Symbol = importedSymbol(name, requireExplicit = false) + + private def recordUsage(sel: ImportSelector, result: Symbol) { + def posstr = pos.source.file.name + ":" + posOf(sel).safeLine + def resstr = if (tree.symbol.hasCompleteInfo) s"(qual=$qual, $result)" else s"(expr=${tree.expr}, ${result.fullLocationString})" + debuglog(s"In $this at $posstr, selector '${selectorString(sel)}' resolved to $resstr") + allUsedSelectors(this) += sel + } + + /** If requireExplicit is true, wildcard imports are not considered. */ + def importedSymbol(name: Name, requireExplicit: Boolean): Symbol = { var result: Symbol = NoSymbol var renamed = false var selectors = tree.selectors - while (selectors != Nil && result == NoSymbol) { - if (selectors.head.rename == name.toTermName) + def current = selectors.head + while (selectors.nonEmpty && result == NoSymbol) { + if (current.rename == name.toTermName) result = qual.tpe.nonLocalMember( // new to address #2733: consider only non-local members for imports - if (name.isTypeName) selectors.head.name.toTypeName else selectors.head.name) - else if (selectors.head.name == name.toTermName) + if (name.isTypeName) current.name.toTypeName else current.name) + else if (current.name == name.toTermName) renamed = true - else if (selectors.head.name == nme.WILDCARD && !renamed) + else if (current.name == nme.WILDCARD && !renamed && !requireExplicit) result = qual.tpe.nonLocalMember(name) - selectors = selectors.tail + + if (result == NoSymbol) + selectors = selectors.tail } + if (settings.lint.value && selectors.nonEmpty && result != NoSymbol && pos != NoPosition) + recordUsage(current, result) + result } + private def selectorString(s: ImportSelector): String = { + if (s.name == nme.WILDCARD && s.rename == null) "_" + else if (s.name == s.rename) "" + s.name + else s.name + " => " + s.rename + } def allImportedSymbols: Iterable[Symbol] = qual.tpe.members flatMap (transformImport(tree.selectors, _)) @@ -943,7 +1017,12 @@ trait Contexts { self: Analyzer => case _ :: rest => transformImport(rest, sym) } - override def toString() = tree.toString() + override def hashCode = tree.## + override def equals(other: Any) = other match { + case that: ImportInfo => (tree == that.tree) + case _ => false + } + override def toString = tree.toString } case class ImportType(expr: Tree) extends Type { diff --git a/test/files/neg/warn-unused-imports.check b/test/files/neg/warn-unused-imports.check new file mode 100644 index 0000000000..e61ec267d3 --- /dev/null +++ b/test/files/neg/warn-unused-imports.check @@ -0,0 +1,44 @@ +warn-unused-imports.scala:7: warning: it is not recommended to define classes/objects inside of package objects. +If possible, define class A in package p1 instead. + class A + ^ +warn-unused-imports.scala:13: warning: it is not recommended to define classes/objects inside of package objects. +If possible, define class A in package p2 instead. + class A + ^ +warn-unused-imports.scala:99: warning: local trait Warn is never used + trait Warn { // warn about unused local trait for good measure + ^ +warn-unused-imports.scala:57: warning: Unused import + import p1.A // warn + ^ +warn-unused-imports.scala:62: warning: Unused import + import p1.{ A, B } // warn on A + ^ +warn-unused-imports.scala:67: warning: Unused import + import p1.{ A, B } // warn on both + ^ +warn-unused-imports.scala:67: warning: Unused import + import p1.{ A, B } // warn on both + ^ +warn-unused-imports.scala:73: warning: Unused import + import c._ // warn + ^ +warn-unused-imports.scala:78: warning: Unused import + import p1._ // warn + ^ +warn-unused-imports.scala:85: warning: Unused import + import c._ // warn + ^ +warn-unused-imports.scala:91: warning: Unused import + import p1.c._ // warn + ^ +warn-unused-imports.scala:98: warning: Unused import + import p1._ // warn + ^ +warn-unused-imports.scala:118: warning: Unused import + import p1.A // warn + ^ +error: No warnings can be incurred under -Xfatal-warnings. +13 warnings found +one error found diff --git a/test/files/neg/warn-unused-imports.flags b/test/files/neg/warn-unused-imports.flags new file mode 100644 index 0000000000..954eaba352 --- /dev/null +++ b/test/files/neg/warn-unused-imports.flags @@ -0,0 +1 @@ +-Xfatal-warnings -Xlint diff --git a/test/files/neg/warn-unused-imports.scala b/test/files/neg/warn-unused-imports.scala new file mode 100644 index 0000000000..b7a2f1c414 --- /dev/null +++ b/test/files/neg/warn-unused-imports.scala @@ -0,0 +1,125 @@ +class Bippo { + def length: Int = 123 + class Tree +} + +package object p1 { + class A + implicit class B(val s: String) { def bippy = s } + val c: Bippo = new Bippo + type D = String +} +package object p2 { + class A + implicit class B(val s: String) { def bippy = s } + val c: Bippo = new Bippo + type D = Int +} + +trait NoWarn { + { + import p1._ // no warn + println("abc".bippy) + } + + { + import p1._ // no warn + println(new A) + } + + { + import p1.B // no warn + println("abc".bippy) + } + + { + import p1._ // no warn + import c._ // no warn + println(length) + } + + { + import p1._ // no warn + import c._ // no warn + val x: Tree = null + println(x) + } + + { + import p1.D // no warn + val x: D = null + println(x) + } +} + +trait Warn { + { + import p1.A // warn + println(123) + } + + { + import p1.{ A, B } // warn on A + println("abc".bippy) + } + + { + import p1.{ A, B } // warn on both + println(123) + } + + { + import p1._ // no warn (technically this could warn, but not worth the effort to unroll unusedness transitively) + import c._ // warn + println(123) + } + + { + import p1._ // warn + println(123) + } + + { + class Tree + import p1._ // no warn + import c._ // warn + val x: Tree = null + println(x) + } + + { + import p1.c._ // warn + println(123) + } +} + +trait Nested { + { + import p1._ // warn + trait Warn { // warn about unused local trait for good measure + import p2._ + println(new A) + println("abc".bippy) + } + println("") + } + + { + import p1._ // no warn + trait NoWarn { + import p2.B // no warn + println("abc".bippy) + println(new A) + } + println(new NoWarn { }) + } + + { + import p1.A // warn + trait Warn { + import p2.A + println(new A) + } + println(new Warn { }) + } +} -- cgit v1.2.3 From 31801561809834b06dc867c4b09fd8212fd23651 Mon Sep 17 00:00:00 2001 From: 杨博 Date: Thu, 8 Nov 2012 09:42:40 +0800 Subject: Treat name in getResourceAsStream as resource path The parameter for getResourceAsStream is not a class name. It's a resource path! --- .../scala/tools/nsc/interpreter/AbstractFileClassLoader.scala | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) (limited to 'src') diff --git a/src/compiler/scala/tools/nsc/interpreter/AbstractFileClassLoader.scala b/src/compiler/scala/tools/nsc/interpreter/AbstractFileClassLoader.scala index fcb485defd..a71b9a76c8 100644 --- a/src/compiler/scala/tools/nsc/interpreter/AbstractFileClassLoader.scala +++ b/src/compiler/scala/tools/nsc/interpreter/AbstractFileClassLoader.scala @@ -25,7 +25,7 @@ class AbstractFileClassLoader(val root: AbstractFile, parent: ClassLoader) protected def findAbstractFile(name: String): AbstractFile = { var file: AbstractFile = root - val pathParts = classNameToPath(name) split '/' + val pathParts = name split '/' for (dirPart <- pathParts.init) { file = file.lookupName(dirPart, true) @@ -59,7 +59,7 @@ class AbstractFileClassLoader(val root: AbstractFile, parent: ClassLoader) case null => super.getResourceAsStream(name) case file => file.input } - override def classBytes(name: String): Array[Byte] = findAbstractFile(name) match { + override def classBytes(name: String): Array[Byte] = findAbstractFile(classNameToPath(name)) match { case null => super.classBytes(name) case file => file.toByteArray } -- cgit v1.2.3 From bb9d367784829debcf09120cd3bd0b8f83c92508 Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Thu, 8 Nov 2012 15:52:03 -0800 Subject: Remove hostile code from explicitouter. If that's the $outer accessor, what's that being created in newOuterAccessor? Sketchy, sketchy business. --- src/compiler/scala/tools/nsc/transform/ExplicitOuter.scala | 3 --- 1 file changed, 3 deletions(-) (limited to 'src') diff --git a/src/compiler/scala/tools/nsc/transform/ExplicitOuter.scala b/src/compiler/scala/tools/nsc/transform/ExplicitOuter.scala index 8de248f4e6..073887a778 100644 --- a/src/compiler/scala/tools/nsc/transform/ExplicitOuter.scala +++ b/src/compiler/scala/tools/nsc/transform/ExplicitOuter.scala @@ -158,9 +158,6 @@ abstract class ExplicitOuter extends InfoTransform var decls1 = decls if (isInner(clazz) && !clazz.isInterface) { decls1 = decls.cloneScope - val outerAcc = clazz.newMethod(nme.OUTER, clazz.pos) // 3 - outerAcc expandName clazz - decls1 enter newOuterAccessor(clazz) if (hasOuterField(clazz)) //2 decls1 enter newOuterField(clazz) -- cgit v1.2.3 From b540aaee4ba30e2dd980456a44e8c6d732222df1 Mon Sep 17 00:00:00 2001 From: Jan Niehusmann Date: Fri, 9 Nov 2012 15:05:58 +0100 Subject: Fix SI-6637 (misoptimization in erasure) Move the optimization one level deeper so the expression being tested with isInstanceOf is always evaluated. --- .../scala/tools/nsc/transform/Erasure.scala | 22 +++++++++++----------- test/files/run/t6637.check | 1 + test/files/run/t6637.scala | 8 ++++++++ 3 files changed, 20 insertions(+), 11 deletions(-) create mode 100644 test/files/run/t6637.check create mode 100644 test/files/run/t6637.scala (limited to 'src') diff --git a/src/compiler/scala/tools/nsc/transform/Erasure.scala b/src/compiler/scala/tools/nsc/transform/Erasure.scala index 5c18d1dc6d..cfc3d0a377 100644 --- a/src/compiler/scala/tools/nsc/transform/Erasure.scala +++ b/src/compiler/scala/tools/nsc/transform/Erasure.scala @@ -1033,17 +1033,17 @@ abstract class Erasure extends AddInterfaces Apply(Select(qual, cmpOp), List(gen.mkAttributedQualifier(targ.tpe))) } case RefinedType(parents, decls) if (parents.length >= 2) => - // Optimization: don't generate isInstanceOf tests if the static type - // conforms, because it always succeeds. (Or at least it had better.) - // At this writing the pattern matcher generates some instance tests - // involving intersections where at least one parent is statically known true. - // That needs fixing, but filtering the parents here adds an additional - // level of robustness (in addition to the short term fix.) - val parentTests = parents filterNot (qual.tpe <:< _) - - if (parentTests.isEmpty) Literal(Constant(true)) - else gen.evalOnce(qual, currentOwner, unit) { q => - atPos(tree.pos) { + gen.evalOnce(qual, currentOwner, unit) { q => + // Optimization: don't generate isInstanceOf tests if the static type + // conforms, because it always succeeds. (Or at least it had better.) + // At this writing the pattern matcher generates some instance tests + // involving intersections where at least one parent is statically known true. + // That needs fixing, but filtering the parents here adds an additional + // level of robustness (in addition to the short term fix.) + val parentTests = parents filterNot (qual.tpe <:< _) + + if (parentTests.isEmpty) Literal(Constant(true)) + else atPos(tree.pos) { parentTests map mkIsInstanceOf(q) reduceRight gen.mkAnd } } diff --git a/test/files/run/t6637.check b/test/files/run/t6637.check new file mode 100644 index 0000000000..9766475a41 --- /dev/null +++ b/test/files/run/t6637.check @@ -0,0 +1 @@ +ok diff --git a/test/files/run/t6637.scala b/test/files/run/t6637.scala new file mode 100644 index 0000000000..d3c380370b --- /dev/null +++ b/test/files/run/t6637.scala @@ -0,0 +1,8 @@ + +object Test extends App { + try { + class A ; class B ; List().head.isInstanceOf[A with B] + } catch { + case _ :java.util.NoSuchElementException => println("ok") + } +} -- cgit v1.2.3 From 2a1d0202f1b7f59f6312263b7a80ea750657296d Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Sat, 3 Nov 2012 09:20:09 -0700 Subject: Hardened JavaMirrors against linkage errors. After running into a ClassCircularityError and following it back to this, I simplified the method to route all linkage errors the same way. And added logging. --- src/reflect/scala/reflect/runtime/JavaMirrors.scala | 11 ++++------- 1 file changed, 4 insertions(+), 7 deletions(-) (limited to 'src') diff --git a/src/reflect/scala/reflect/runtime/JavaMirrors.scala b/src/reflect/scala/reflect/runtime/JavaMirrors.scala index 0cfb3fd623..44fbd55162 100644 --- a/src/reflect/scala/reflect/runtime/JavaMirrors.scala +++ b/src/reflect/scala/reflect/runtime/JavaMirrors.scala @@ -496,13 +496,10 @@ private[reflect] trait JavaMirrors extends internal.SymbolTable with api.JavaUni Class.forName(path, true, classLoader) /** Does `path` correspond to a Java class with that fully qualified name in the current class loader? */ - def tryJavaClass(path: String): Option[jClass[_]] = - try { - Some(javaClass(path)) - } catch { - case (_: ClassNotFoundException) | (_: NoClassDefFoundError) | (_: IncompatibleClassChangeError) => - None - } + def tryJavaClass(path: String): Option[jClass[_]] = ( + try Some(javaClass(path)) + catch { case ex @ (_: LinkageError | _: ClassNotFoundException) => None } // TODO - log + ) /** The mirror that corresponds to the classloader that original defined the given Java class */ def mirrorDefining(jclazz: jClass[_]): JavaMirror = { -- cgit v1.2.3 From 632daed4ed846f773bb9a730c0721d21f3fa53c0 Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Sun, 28 Oct 2012 12:08:37 -0700 Subject: Minor tweaks in Types/Scopes. All methods to do with handling imports more uniformly and early filtering of symbols which cannot be imported. Also make TypeBounds treat a Wildcard in lower or upper bounds as an empty bound, so we don't see all these method signatures like def f[T >: ? <: ?] because that's not helpful. --- .../scala/tools/nsc/typechecker/Contexts.scala | 2 +- .../scala/reflect/internal/Definitions.scala | 20 ++++++++++++ src/reflect/scala/reflect/internal/Scopes.scala | 37 ++++++---------------- src/reflect/scala/reflect/internal/Types.scala | 12 ++++--- 4 files changed, 39 insertions(+), 32 deletions(-) (limited to 'src') diff --git a/src/compiler/scala/tools/nsc/typechecker/Contexts.scala b/src/compiler/scala/tools/nsc/typechecker/Contexts.scala index dfc621d60e..78380ad054 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Contexts.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Contexts.scala @@ -1006,7 +1006,7 @@ trait Contexts { self: Analyzer => } def allImportedSymbols: Iterable[Symbol] = - qual.tpe.members flatMap (transformImport(tree.selectors, _)) + importableMembers(qual.tpe) flatMap (transformImport(tree.selectors, _)) private def transformImport(selectors: List[ImportSelector], sym: Symbol): List[Symbol] = selectors match { case List() => List() diff --git a/src/reflect/scala/reflect/internal/Definitions.scala b/src/reflect/scala/reflect/internal/Definitions.scala index 9f515e18d7..ed45010091 100644 --- a/src/reflect/scala/reflect/internal/Definitions.scala +++ b/src/reflect/scala/reflect/internal/Definitions.scala @@ -235,6 +235,26 @@ trait Definitions extends api.StandardDefinitions { scope.sorted foreach fullyInitializeSymbol scope } + /** Is this symbol a member of Object or Any? */ + def isUniversalMember(sym: Symbol) = ( + (sym ne NoSymbol) + && (ObjectClass isSubClass sym.owner) + ) + + /** Is this symbol unimportable? Unimportable symbols include: + * - constructors, because is not a real name + * - private[this] members, which cannot be referenced from anywhere else + * - members of Any or Object, because every instance will inherit a + * definition which supersedes the imported one + */ + def isUnimportable(sym: Symbol) = ( + (sym eq NoSymbol) + || sym.isConstructor + || sym.isPrivateLocal + || isUniversalMember(sym) + ) + def isImportable(sym: Symbol) = !isUnimportable(sym) + /** Is this type equivalent to Any, AnyVal, or AnyRef? */ def isTrivialTopType(tp: Type) = ( tp =:= AnyClass.tpe diff --git a/src/reflect/scala/reflect/internal/Scopes.scala b/src/reflect/scala/reflect/internal/Scopes.scala index be1c333634..0e1d52cc95 100644 --- a/src/reflect/scala/reflect/internal/Scopes.scala +++ b/src/reflect/scala/reflect/internal/Scopes.scala @@ -341,36 +341,19 @@ trait Scopes extends api.Scopes { self: SymbolTable => */ def iterator: Iterator[Symbol] = toList.iterator -/* - /** Does this scope contain an entry for `sym`? - */ - def contains(sym: Symbol): Boolean = lookupAll(sym.name) contains sym - - /** A scope that contains all symbols of this scope and that also contains `sym`. - */ - def +(sym: Symbol): Scope = - if (contains(sym)) this - else { - val result = cloneScope - result enter sym - result - } + def containsName(name: Name) = lookupEntry(name) != null + def containsSymbol(s: Symbol) = lookupAll(s.name) contains s - /** A scope that contains all symbols of this scope except `sym`. - */ - def -(sym: Symbol): Scope = - if (!contains(sym)) this - else { - val result = cloneScope - result unlink sym - result - } -*/ override def foreach[U](p: Symbol => U): Unit = toList foreach p - override def filter(p: Symbol => Boolean): Scope = - if (!(toList forall p)) newScopeWith(toList filter p: _*) else this - + override def filterNot(p: Symbol => Boolean): Scope = ( + if (toList exists p) newScopeWith(toList filterNot p: _*) + else this + ) + override def filter(p: Symbol => Boolean): Scope = ( + if (toList forall p) this + else newScopeWith(toList filter p: _*) + ) @deprecated("Use `toList.reverse` instead", "2.10.0") def reverse: List[Symbol] = toList.reverse diff --git a/src/reflect/scala/reflect/internal/Types.scala b/src/reflect/scala/reflect/internal/Types.scala index 91a64def83..3104c1e74e 100644 --- a/src/reflect/scala/reflect/internal/Types.scala +++ b/src/reflect/scala/reflect/internal/Types.scala @@ -1516,8 +1516,8 @@ trait Types extends api.Types { self: SymbolTable => } private def lowerString = if (emptyLowerBound) "" else " >: " + lo private def upperString = if (emptyUpperBound) "" else " <: " + hi - private def emptyLowerBound = typeIsNothing(lo) - private def emptyUpperBound = typeIsAny(hi) + private def emptyLowerBound = typeIsNothing(lo) || lo.isWildcard + private def emptyUpperBound = typeIsAny(hi) || hi.isWildcard def isEmptyBounds = emptyLowerBound && emptyUpperBound // override def isNullable: Boolean = NullClass.tpe <:< lo; @@ -7240,8 +7240,12 @@ trait Types extends api.Types { self: SymbolTable => /** Members of the given class, other than those inherited * from Any or AnyRef. */ - def nonTrivialMembers(clazz: Symbol): Iterable[Symbol] = - clazz.info.members filterNot (sym => sym.owner == ObjectClass || sym.owner == AnyClass) + def nonTrivialMembers(clazz: Symbol): Scope = clazz.info.members filterNot isUniversalMember + + /** Members which can be imported into other scopes. + */ + def importableMembers(clazz: Symbol): Scope = importableMembers(clazz.info) + def importableMembers(pre: Type): Scope = pre.members filter isImportable def objToAny(tp: Type): Type = if (!phase.erasedTypes && tp.typeSymbol == ObjectClass) AnyClass.tpe -- cgit v1.2.3 From 45c2d7f1dab5cb6a4afdac5e1d3f3d5caad86b62 Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Sun, 28 Oct 2012 12:13:19 -0700 Subject: Massively simplified repl initialization. --- .../scala/tools/nsc/interpreter/ILoop.scala | 130 +++++++++++---------- .../scala/tools/nsc/interpreter/ILoopInit.scala | 123 ------------------- 2 files changed, 70 insertions(+), 183 deletions(-) delete mode 100644 src/compiler/scala/tools/nsc/interpreter/ILoopInit.scala (limited to 'src') diff --git a/src/compiler/scala/tools/nsc/interpreter/ILoop.scala b/src/compiler/scala/tools/nsc/interpreter/ILoop.scala index 350cc364ab..bb8aa13f6d 100644 --- a/src/compiler/scala/tools/nsc/interpreter/ILoop.scala +++ b/src/compiler/scala/tools/nsc/interpreter/ILoop.scala @@ -9,7 +9,7 @@ package interpreter import Predef.{ println => _, _ } import java.io.{ BufferedReader, FileReader } import session._ -import scala.util.Properties.{ jdkHome, javaVersion } +import scala.util.Properties.{ jdkHome, javaVersion, versionString, javaVmName } import scala.tools.util.{ Javap } import util.{ ClassPath, Exceptional, stringFromWriter, stringFromStream } import io.{ File, Directory } @@ -20,6 +20,8 @@ import scala.tools.util._ import scala.language.{implicitConversions, existentials} import scala.reflect.classTag import scala.tools.reflect.StdRuntimeTags._ +import scala.concurrent.{ ExecutionContext, Await, Future, future } +import ExecutionContext.Implicits._ /** The Scala interactive shell. It provides a read-eval-print loop * around the Interpreter class. @@ -36,17 +38,33 @@ import scala.tools.reflect.StdRuntimeTags._ class ILoop(in0: Option[BufferedReader], protected val out: JPrintWriter) extends AnyRef with LoopCommands - with ILoopInit { def this(in0: BufferedReader, out: JPrintWriter) = this(Some(in0), out) def this() = this(None, new JPrintWriter(Console.out, true)) + @deprecated("Use `intp` instead.", "2.9.0") def interpreter = intp + @deprecated("Use `intp` instead.", "2.9.0") def interpreter_= (i: Interpreter): Unit = intp = i + var in: InteractiveReader = _ // the input stream from which commands come var settings: Settings = _ var intp: IMain = _ - @deprecated("Use `intp` instead.", "2.9.0") def interpreter = intp - @deprecated("Use `intp` instead.", "2.9.0") def interpreter_= (i: Interpreter): Unit = intp = i + private var globalFuture: Future[Boolean] = _ + + /** Print a welcome message */ + def printWelcome() { + echo(s""" + |Welcome to Scala $versionString ($javaVmName, Java $javaVersion). + |Type in expressions to have them evaluated. + |Type :help for more information.""".trim.stripMargin + ) + replinfo("[info] started at " + new java.util.Date) + } + + protected def asyncMessage(msg: String) { + if (isReplInfo || isReplPower) + echoAndRefresh(msg) + } /** Having inherited the difficult "var-ness" of the repl instance, * I'm trying to work around it by moving operations into a class from @@ -495,33 +513,30 @@ class ILoop(in0: Option[BufferedReader], protected val out: JPrintWriter) true } + // return false if repl should exit + def processLine(line: String): Boolean = { + import scala.concurrent.duration._ + Await.ready(globalFuture, 60.seconds) + + (line ne null) && (command(line) match { + case Result(false, _) => false + case Result(_, Some(line)) => addReplay(line) ; true + case _ => true + }) + } + + private def readOneLine() = { + out.flush() + in readLine prompt + } + /** The main read-eval-print loop for the repl. It calls * command() for each line of input, and stops when * command() returns false. */ - def loop() { - def readOneLine() = { - out.flush() - in readLine prompt - } - // return false if repl should exit - def processLine(line: String): Boolean = { - if (isAsync) { - if (!awaitInitialized()) return false - runThunks() - } - if (line eq null) false // assume null means EOF - else command(line) match { - case Result(false, _) => false - case Result(_, Some(finalLine)) => addReplay(finalLine) ; true - case _ => true - } - } - def innerLoop() { - if ( try processLine(readOneLine()) catch crashRecovery ) - innerLoop() - } - innerLoop() + @tailrec final def loop() { + if ( try processLine(readOneLine()) catch crashRecovery ) + loop() } /** interpret all lines from a specified file */ @@ -767,45 +782,40 @@ class ILoop(in0: Option[BufferedReader], protected val out: JPrintWriter) SimpleReader() } } - def process(settings: Settings): Boolean = savingContextLoader { - this.settings = settings - createInterpreter() - // sets in to some kind of reader depending on environmental cues - in = in0 match { - case Some(reader) => SimpleReader(reader, out, true) - case None => - // some post-initialization - chooseReader(settings) match { - case x: JLineReader => addThunk(x.consoleReader.postInit) ; x - case x => x - } + private def loopPostInit() { + in match { + case x: JLineReader => x.consoleReader.postInit + case _ => } // Bind intp somewhere out of the regular namespace where // we can get at it in generated code. - addThunk(intp.quietBind(NamedParam[IMain]("$intp", intp)(tagOfIMain, classTag[IMain]))) - addThunk({ - val autorun = replProps.replAutorunCode.option flatMap (f => io.File(f).safeSlurp()) - if (autorun.isDefined) intp.quietRun(autorun.get) - }) - - loadFiles(settings) - // it is broken on startup; go ahead and exit - if (intp.reporter.hasErrors) - return false - - // This is about the illusion of snappiness. We call initialize() - // which spins off a separate thread, then print the prompt and try - // our best to look ready. The interlocking lazy vals tend to - // inter-deadlock, so we break the cycle with a single asynchronous - // message to an actor. - if (isAsync) { - intp initialize initializedCallback() - createAsyncListener() // listens for signal to run postInitialization + intp.quietBind(NamedParam[IMain]("$intp", intp)(tagOfIMain, classTag[IMain])) + // Auto-run code via some setting. + ( replProps.replAutorunCode.option + flatMap (f => io.File(f).safeSlurp()) + foreach (intp quietRun _) + ) + // classloader and power mode setup + intp.setContextClassLoader + if (isReplPower) { + replProps.power setValue true + unleashAndSetPhase() + asyncMessage(power.banner) } - else { + } + def process(settings: Settings): Boolean = savingContextLoader { + this.settings = settings + createInterpreter() + var thunks: List[() => Unit] = Nil + + // sets in to some kind of reader depending on environmental cues + in = in0.fold(chooseReader(settings))(r => SimpleReader(r, out, true)) + globalFuture = future { intp.initializeSynchronous() - postInitialization() + loopPostInit() + loadFiles(settings) + !intp.reporter.hasErrors } printWelcome() diff --git a/src/compiler/scala/tools/nsc/interpreter/ILoopInit.scala b/src/compiler/scala/tools/nsc/interpreter/ILoopInit.scala deleted file mode 100644 index b6c0f42abe..0000000000 --- a/src/compiler/scala/tools/nsc/interpreter/ILoopInit.scala +++ /dev/null @@ -1,123 +0,0 @@ -/* NSC -- new Scala compiler - * Copyright 2005-2013 LAMP/EPFL - * @author Paul Phillips - */ - -package scala.tools.nsc -package interpreter - -import scala.tools.nsc.util.stackTraceString - -/** - * Machinery for the asynchronous initialization of the repl. - */ -trait ILoopInit { - self: ILoop => - - /** Print a welcome message */ - def printWelcome() { - import Properties._ - val welcomeMsg = - """|Welcome to Scala %s (%s, Java %s). - |Type in expressions to have them evaluated. - |Type :help for more information.""" . - stripMargin.format(versionString, javaVmName, javaVersion) - echo(welcomeMsg) - replinfo("[info] started at " + new java.util.Date) - } - - protected def asyncMessage(msg: String) { - if (isReplInfo || isReplPower) - echoAndRefresh(msg) - } - - private val initLock = new java.util.concurrent.locks.ReentrantLock() - private val initCompilerCondition = initLock.newCondition() // signal the compiler is initialized - private val initLoopCondition = initLock.newCondition() // signal the whole repl is initialized - private val initStart = System.nanoTime - - private def withLock[T](body: => T): T = { - initLock.lock() - try body - finally initLock.unlock() - } - // a condition used to ensure serial access to the compiler. - @volatile private var initIsComplete = false - @volatile private var initError: String = null - private def elapsed() = "%.3f".format((System.nanoTime - initStart).toDouble / 1000000000L) - - // the method to be called when the interpreter is initialized. - // Very important this method does nothing synchronous (i.e. do - // not try to use the interpreter) because until it returns, the - // repl's lazy val `global` is still locked. - protected def initializedCallback() = withLock(initCompilerCondition.signal()) - - // Spins off a thread which awaits a single message once the interpreter - // has been initialized. - protected def createAsyncListener() = { - io.spawn { - withLock(initCompilerCondition.await()) - asyncMessage("[info] compiler init time: " + elapsed() + " s.") - postInitialization() - } - } - - // called from main repl loop - protected def awaitInitialized(): Boolean = { - if (!initIsComplete) - withLock { while (!initIsComplete) initLoopCondition.await() } - if (initError != null) { - println(""" - |Failed to initialize the REPL due to an unexpected error. - |This is a bug, please, report it along with the error diagnostics printed below. - |%s.""".stripMargin.format(initError) - ) - false - } else true - } - // private def warningsThunks = List( - // () => intp.bind("lastWarnings", "" + typeTag[List[(Position, String)]], intp.lastWarnings _), - // ) - - protected def postInitThunks = List[Option[() => Unit]]( - Some(intp.setContextClassLoader _), - if (isReplPower) Some(() => enablePowerMode(true)) else None - ).flatten - // ++ ( - // warningsThunks - // ) - // called once after init condition is signalled - protected def postInitialization() { - try { - postInitThunks foreach (f => addThunk(f())) - runThunks() - } catch { - case ex: Throwable => - initError = stackTraceString(ex) - throw ex - } finally { - initIsComplete = true - - if (isAsync) { - asyncMessage("[info] total init time: " + elapsed() + " s.") - withLock(initLoopCondition.signal()) - } - } - } - // code to be executed only after the interpreter is initialized - // and the lazy val `global` can be accessed without risk of deadlock. - private var pendingThunks: List[() => Unit] = Nil - protected def addThunk(body: => Unit) = synchronized { - pendingThunks :+= (() => body) - } - protected def runThunks(): Unit = synchronized { - if (pendingThunks.nonEmpty) - repldbg("Clearing " + pendingThunks.size + " thunks.") - - while (pendingThunks.nonEmpty) { - val thunk = pendingThunks.head - pendingThunks = pendingThunks.tail - thunk() - } - } -} -- cgit v1.2.3 From f56f9a3c4b7b9903c732658f052be1172dfd9baa Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Sun, 28 Oct 2012 12:22:24 -0700 Subject: Massively simplified repl name resolution. --- .../scala/tools/nsc/interpreter/ILoop.scala | 32 +- .../scala/tools/nsc/interpreter/IMain.scala | 359 ++++++++++++--------- .../scala/tools/nsc/interpreter/Imports.scala | 14 +- .../tools/nsc/interpreter/MemberHandlers.scala | 42 +-- src/reflect/scala/reflect/internal/Scopes.scala | 1 - test/files/jvm/interpreter.check | 2 +- test/files/run/repl-colon-type.check | 2 +- 7 files changed, 241 insertions(+), 211 deletions(-) (limited to 'src') diff --git a/src/compiler/scala/tools/nsc/interpreter/ILoop.scala b/src/compiler/scala/tools/nsc/interpreter/ILoop.scala index bb8aa13f6d..d99a1c18f9 100644 --- a/src/compiler/scala/tools/nsc/interpreter/ILoop.scala +++ b/src/compiler/scala/tools/nsc/interpreter/ILoop.scala @@ -394,32 +394,12 @@ class ILoop(in0: Option[BufferedReader], protected val out: JPrintWriter) } } - protected def newJavap() = new JavapClass(addToolsJarToLoader(), new IMain.ReplStrippingWriter(intp)) { - override def tryClass(path: String): Array[Byte] = { - val hd :: rest = path split '.' toList; - // If there are dots in the name, the first segment is the - // key to finding it. - if (rest.nonEmpty) { - intp optFlatName hd match { - case Some(flat) => - val clazz = flat :: rest mkString NAME_JOIN_STRING - val bytes = super.tryClass(clazz) - if (bytes.nonEmpty) bytes - else super.tryClass(clazz + MODULE_SUFFIX_STRING) - case _ => super.tryClass(path) - } - } - else { - // Look for Foo first, then Foo$, but if Foo$ is given explicitly, - // we have to drop the $ to find object Foo, then tack it back onto - // the end of the flattened name. - def className = intp flatName path - def moduleName = (intp flatName path.stripSuffix(MODULE_SUFFIX_STRING)) + MODULE_SUFFIX_STRING - - val bytes = super.tryClass(className) - if (bytes.nonEmpty) bytes - else super.tryClass(moduleName) - } + protected def newJavap() = { + val intp = ILoop.this.intp + import intp._ + + new JavapClass(addToolsJarToLoader(), new IMain.ReplStrippingWriter(intp)) { + override def tryClass(path: String) = super.tryClass(translatePath(path) getOrElse path) } } private lazy val javap = substituteAndLog[Javap]("javap", NoJavap)(newJavap()) diff --git a/src/compiler/scala/tools/nsc/interpreter/IMain.scala b/src/compiler/scala/tools/nsc/interpreter/IMain.scala index db27531595..c5f9553634 100644 --- a/src/compiler/scala/tools/nsc/interpreter/IMain.scala +++ b/src/compiler/scala/tools/nsc/interpreter/IMain.scala @@ -137,6 +137,8 @@ class IMain(initialSettings: Settings, protected val out: JPrintWriter) extends catch AbstractOrMissingHandler() } private def tquoted(s: String) = "\"\"\"" + s + "\"\"\"" + private val logScope = scala.sys.props contains "scala.repl.scope" + private def scopelog(msg: String) = if (logScope) Console.err.println(msg) // argument is a thunk to execute after init is done def initialize(postInitSignal: => Unit) { @@ -173,8 +175,7 @@ class IMain(initialSettings: Settings, protected val out: JPrintWriter) extends } import global._ - import definitions.{ termMember, typeMember } - import rootMirror.{RootClass, getClassIfDefined, getModuleIfDefined, getRequiredModule, getRequiredClass} + import definitions.{ ObjectClass, termMember, typeMember, dropNullaryMethod} implicit class ReplTypeOps(tp: Type) { def orElse(other: => Type): Type = if (tp ne NoType) tp else other @@ -190,7 +191,7 @@ class IMain(initialSettings: Settings, protected val out: JPrintWriter) extends // make sure we don't overwrite their unwisely named res3 etc. def freshUserTermName(): TermName = { val name = newTermName(freshUserVarName()) - if (definedNameMap contains name) freshUserTermName() + if (replScope containsName name) freshUserTermName() else name } def isUserTermName(name: Name) = isUserVarName("" + name) @@ -280,20 +281,54 @@ class IMain(initialSettings: Settings, protected val out: JPrintWriter) extends ensureClassLoader() _classLoader } + + def backticked(s: String): String = ( + (s split '.').toList map { + case "_" => "_" + case s if nme.keywords(newTermName(s)) => s"`$s`" + case s => s + } mkString "." + ) + + abstract class PhaseDependentOps { + def shift[T](op: => T): T + + def lookup(name: Name): Symbol = shift(replScope lookup name) + def path(name: => Name): String = shift(path(symbolOfName(name))) + def path(sym: Symbol): String = backticked(shift(sym.fullName)) + def name(sym: Symbol): Name = shift(sym.name) + def info(sym: Symbol): Type = shift(sym.info) + def sig(sym: Symbol): String = shift(sym.defString) + } + object typerOp extends PhaseDependentOps { + def shift[T](op: => T): T = exitingTyper(op) + } + object flatOp extends PhaseDependentOps { + def shift[T](op: => T): T = exitingFlatten(op) + } + + def originalPath(name: Name): String = typerOp path name + def originalPath(sym: Symbol): String = typerOp path sym + def flatPath(sym: Symbol): String = flatOp shift sym.javaClassName + // def translatePath(path: String) = symbolOfPath(path).fold(Option.empty[String])(flatPath) + def translatePath(path: String) = { + val sym = if (path endsWith "$") symbolOfTerm(path.init) else symbolOfIdent(path) + sym match { + case NoSymbol => None + case _ => Some(flatPath(sym)) + } + } + private class TranslatingClassLoader(parent: ClassLoader) extends AbstractFileClassLoader(replOutput.dir, parent) { /** Overridden here to try translating a simple name to the generated * class name if the original attempt fails. This method is used by * getResourceAsStream as well as findClass. */ - override protected def findAbstractFile(name: String): AbstractFile = { + override protected def findAbstractFile(name: String): AbstractFile = super.findAbstractFile(name) match { - // deadlocks on startup if we try to translate names too early - case null if isInitializeComplete => - generatedName(name) map (x => super.findAbstractFile(x)) orNull - case file => - file + case null => translatePath(name) map (super.findAbstractFile(_)) orNull + case file => file } - } } private def makeClassLoader(): AbstractFileClassLoader = new TranslatingClassLoader(parentClassLoader match { @@ -306,26 +341,12 @@ class IMain(initialSettings: Settings, protected val out: JPrintWriter) extends // Set the current Java "context" class loader to this interpreter's class loader def setContextClassLoader() = classLoader.setAsContext() - /** Given a simple repl-defined name, returns the real name of - * the class representing it, e.g. for "Bippy" it may return - * {{{ - * $line19.$read$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$Bippy - * }}} - */ - def generatedName(simpleName: String): Option[String] = { - if (simpleName endsWith nme.MODULE_SUFFIX_STRING) optFlatName(simpleName.init) map (_ + nme.MODULE_SUFFIX_STRING) - else optFlatName(simpleName) - } - def flatName(id: String) = optFlatName(id) getOrElse id - def optFlatName(id: String) = requestForIdent(id) map (_ fullFlatName id) - - def allDefinedNames = definedNameMap.keys.toList.sorted + def allDefinedNames = exitingTyper(replScope.toList.map(_.name).sorted) def pathToType(id: String): String = pathToName(newTypeName(id)) def pathToTerm(id: String): String = pathToName(newTermName(id)) - def pathToName(name: Name): String = { - if (definedNameMap contains name) - definedNameMap(name) fullPath name - else name.toString + def pathToName(name: Name): String = replScope lookup name match { + case NoSymbol => name.toString + case sym => exitingTyper(sym.fullName) } /** Most recent tree handled which wasn't wholly synthetic. */ @@ -339,50 +360,50 @@ class IMain(initialSettings: Settings, protected val out: JPrintWriter) extends None } - /** Stubs for work in progress. */ - def handleTypeRedefinition(name: TypeName, old: Request, req: Request) = { - for (t1 <- old.simpleNameOfType(name) ; t2 <- req.simpleNameOfType(name)) { - repldbg("Redefining type '%s'\n %s -> %s".format(name, t1, t2)) - } - } + private def updateReplScope(sym: Symbol, isDefined: Boolean) { + def log(what: String) { + val mark = if (sym.isType) "t " else "v " + val name = exitingTyper(sym.nameString) + val info = cleanTypeAfterTyper(sym) + val defn = sym defStringSeenAs info - def handleTermRedefinition(name: TermName, old: Request, req: Request) = { - for (t1 <- old.compilerTypeOf get name ; t2 <- req.compilerTypeOf get name) { - // Printing the types here has a tendency to cause assertion errors, like - // assertion failed: fatal: has owner value x, but a class owner is required - // so DBG is by-name now to keep it in the family. (It also traps the assertion error, - // but we don't want to unnecessarily risk hosing the compiler's internal state.) - repldbg("Redefining term '%s'\n %s -> %s".format(name, t1, t2)) + scopelog(f"[$mark$what%6s] $name%-25s $defn%s") } + if (ObjectClass isSubClass sym.owner) return + // unlink previous + replScope lookupAll sym.name foreach { sym => + log("unlink") + replScope unlink sym + } + val what = if (isDefined) "define" else "import" + log(what) + replScope enter sym } def recordRequest(req: Request) { - if (req == null || referencedNameMap == null) + if (req == null) return prevRequests += req - req.referencedNames foreach (x => referencedNameMap(x) = req) // warning about serially defining companions. It'd be easy // enough to just redefine them together but that may not always // be what people want so I'm waiting until I can do it better. - for { - name <- req.definedNames filterNot (x => req.definedNames contains x.companionName) - oldReq <- definedNameMap get name.companionName - newSym <- req.definedSymbols get name - oldSym <- oldReq.definedSymbols get name.companionName - } { - exitingTyper(replwarn(s"warning: previously defined $oldSym is not a companion to $newSym.")) - replwarn("Companions must be defined together; you may wish to use :paste mode for this.") + exitingTyper { + req.defines filterNot (s => req.defines contains s.companionSymbol) foreach { newSym => + val companion = newSym.name.companionName + val found = replScope lookup companion + replScope lookup companion andAlso { oldSym => + replwarn(s"warning: previously defined $oldSym is not a companion to $newSym.") + replwarn("Companions must be defined together; you may wish to use :paste mode for this.") + } + } } - - // Updating the defined name map - req.definedNames foreach { name => - if (definedNameMap contains name) { - if (name.isTypeName) handleTypeRedefinition(name.toTypeName, definedNameMap(name), req) - else handleTermRedefinition(name.toTermName, definedNameMap(name), req) + exitingTyper { + req.imports foreach (sym => updateReplScope(sym, isDefined = false)) + req.defines foreach { sym => + updateReplScope(sym, isDefined = true) } - definedNameMap(name) = req } } @@ -639,8 +660,7 @@ class IMain(initialSettings: Settings, protected val out: JPrintWriter) extends resetClassLoader() resetAllCreators() prevRequests.clear() - referencedNameMap.clear() - definedNameMap.clear() + resetReplScope() replOutput.dir.clear() } @@ -769,6 +789,11 @@ class IMain(initialSettings: Settings, protected val out: JPrintWriter) extends /** One line of code submitted by the user for interpretation */ // private class Request(val line: String, val trees: List[Tree]) { + def defines = defHandlers flatMap (_.definedSymbols) + def imports = importedSymbols + def references = referencedNames map symbolOfName + def value = Some(handlers.last) filter (h => h.definesValue) map (h => definedSymbols(h.definesTerm.get)) getOrElse NoSymbol + val reqId = nextReqId() val lineRep = new ReadEvalPrint() @@ -789,11 +814,10 @@ class IMain(initialSettings: Settings, protected val out: JPrintWriter) extends /** def and val names */ def termNames = handlers flatMap (_.definesTerm) def typeNames = handlers flatMap (_.definesType) - def definedOrImported = handlers flatMap (_.definedOrImported) - def definedSymbolList = defHandlers flatMap (_.definedSymbols) - - def definedTypeSymbol(name: String) = definedSymbols(newTypeName(name)) - def definedTermSymbol(name: String) = definedSymbols(newTermName(name)) + def importedSymbols = handlers flatMap { + case x: ImportHandler => x.importedSymbols + case _ => Nil + } /** Code to import bound names from previous lines - accessPath is code to * append to objectName to access anything bound by request. @@ -801,26 +825,14 @@ class IMain(initialSettings: Settings, protected val out: JPrintWriter) extends val ComputedImports(importsPreamble, importsTrailer, accessPath) = importsCode(referencedNames.toSet) - /** Code to access a variable with the specified name */ - def fullPath(vname: String) = ( - lineRep.readPath + accessPath + ".`%s`".format(vname) - ) - /** Same as fullpath, but after it has been flattened, so: - * $line5.$iw.$iw.$iw.Bippy // fullPath - * $line5.$iw$$iw$$iw$Bippy // fullFlatName - */ - def fullFlatName(name: String) = - lineRep.readPath + accessPath.replace('.', '$') + nme.NAME_JOIN_STRING + name - /** The unmangled symbol name, but supplemented with line info. */ def disambiguated(name: Name): String = name + " (in " + lineRep + ")" - /** Code to access a variable with the specified name */ - def fullPath(vname: Name): String = fullPath(vname.toString) - /** the line of code to compute */ def toCompute = line + def fullPath(vname: String) = s"${lineRep.readPath}$accessPath.`$vname`" + /** generate the source code for the object that computes this request */ private object ObjectSourceCode extends CodeAssembler[MemberHandler] { def path = pathToTerm("$intp") @@ -829,12 +841,13 @@ class IMain(initialSettings: Settings, protected val out: JPrintWriter) extends // $intp is not bound; punt, but include the line. else if (path == "$intp") List( "def $line = " + tquoted(originalLine), + // "def $req = %s.requestForReqId(%s).orNull".format(path, reqId), "def $trees = Nil" ) else List( "def $line = " + tquoted(originalLine), - "def $req = %s.requestForReqId(%s).orNull".format(path, reqId), - "def $trees = if ($req eq null) Nil else $req.trees".format(lineRep.readName, path, reqId) + "def $trees = Nil" + // "def $trees = if ($req eq null) Nil else $req.trees".format(lineRep.readName, path, reqId) ) } @@ -850,13 +863,10 @@ class IMain(initialSettings: Settings, protected val out: JPrintWriter) extends /** We only want to generate this code when the result * is a value which can be referred to as-is. */ - val evalResult = - if (!handlers.last.definesValue) "" - else handlers.last.definesTerm match { - case Some(vname) if typeOf contains vname => - "lazy val %s = %s".format(lineRep.resultName, fullPath(vname)) - case _ => "" - } + val evalResult = Request.this.value match { + case NoSymbol => "" + case sym => "lazy val %s = %s".format(lineRep.resultName, originalPath(sym)) + } // first line evaluates object to make sure constructor is run // initial "" so later code can uniformly be: + etc val preamble = """ @@ -878,15 +888,6 @@ class IMain(initialSettings: Settings, protected val out: JPrintWriter) extends val generate = (m: MemberHandler) => m resultExtractionCode Request.this } - // get it - def getEvalTyped[T] : Option[T] = getEval map (_.asInstanceOf[T]) - def getEval: Option[AnyRef] = { - // ensure it has been compiled - compile - // try to load it and call the value method - lineRep.evalValue filterNot (_ == null) - } - /** Compile the object file. Returns whether the compilation succeeded. * If all goes well, the "types" map is computed. */ lazy val compile: Boolean = { @@ -905,7 +906,7 @@ class IMain(initialSettings: Settings, protected val out: JPrintWriter) extends val name = dh.member.name definedSymbols get name foreach { sym => dh.member setSymbol sym - repldbg("Set symbol of " + name + " to " + sym.defString) + repldbg("Set symbol of " + name + " to " + symbolDefString(sym)) } } @@ -919,7 +920,7 @@ class IMain(initialSettings: Settings, protected val out: JPrintWriter) extends /* typeOf lookup with encoding */ def lookupTypeOf(name: Name) = typeOf.getOrElse(name, typeOf(global.encode(name.toString))) - def simpleNameOfType(name: TypeName) = (compilerTypeOf get name) map (_.typeSymbol.simpleName) + def simpleNameOfType(name: TypeName) = (compilerTypeOf get name) map (_.typeSymbolDirect.simpleName) private def typeMap[T](f: Type => T) = mapFrom[Name, Name, T](termNames ++ typeNames)(x => f(cleanMemberDecl(resultSymbol, x))) @@ -934,7 +935,7 @@ class IMain(initialSettings: Settings, protected val out: JPrintWriter) extends // } lazy val definedSymbols = ( termNames.map(x => x -> applyToResultMember(x, x => x)) ++ - typeNames.map(x => x -> compilerTypeOf(x).typeSymbol) + typeNames.map(x => x -> compilerTypeOf(x).typeSymbolDirect) ).toMap[Name, Symbol] withDefaultValue NoSymbol lazy val typesOfDefinedTerms = mapFrom[Name, Name, Type](termNames)(x => applyToResultMember(x, _.tpe)) @@ -964,45 +965,64 @@ class IMain(initialSettings: Settings, protected val out: JPrintWriter) extends private var mostRecentWarnings: List[(global.Position, String)] = Nil def lastWarnings = mostRecentWarnings - def treesForRequestId(id: Int): List[Tree] = - requestForReqId(id).toList flatMap (_.trees) + private lazy val globalImporter = global.mkImporter(ru) + private lazy val importer = ru.mkImporter(global) + + private implicit def importFromRu(sym: ru.Symbol): global.Symbol = + globalImporter importSymbol sym - def requestForReqId(id: Int): Option[Request] = - if (executingRequest != null && executingRequest.reqId == id) Some(executingRequest) - else prevRequests find (_.reqId == id) + private implicit def importToRu(sym: global.Symbol): ru.Symbol = + importer importSymbol sym - def requestForName(name: Name): Option[Request] = { - assert(definedNameMap != null, "definedNameMap is null") - definedNameMap get name + private def jmirror = ru.rootMirror match { + case j: ru.JavaMirror => j + case _ => null + } + def classOfTerm(id: String): Option[JClass] = symbolOfTerm(id) match { + case NoSymbol => None + case sym => Some(jmirror runtimeClass (importer importSymbol sym).asClass) } - def requestForIdent(line: String): Option[Request] = - requestForName(newTermName(line)) orElse requestForName(newTypeName(line)) + def typeOfTerm(id: String): Type = symbolOfTerm(id).tpe - def requestHistoryForName(name: Name): List[Request] = - prevRequests.toList.reverse filter (_.definedNames contains name) + def valueOfTerm(id: String): Option[Any] = exitingTyper { + def value() = { + val sym0 = symbolOfTerm(id) + val sym = (importer importSymbol sym0).asTerm + val mirror = ru.runtimeMirror(classLoader) + val module = mirror.reflectModule(sym.owner.companionSymbol.asModule).instance + val module1 = mirror.reflect(module) + val invoker = module1.reflectField(sym) - def definitionForName(name: Name): Option[MemberHandler] = - requestForName(name) flatMap { req => - req.handlers find (_.definedNames contains name) + invoker.get } - def valueOfTerm(id: String): Option[AnyRef] = - requestForName(newTermName(id)) flatMap (_.getEval) - - def classOfTerm(id: String): Option[JClass] = - valueOfTerm(id) map (_.getClass) + try Some(value()) catch { case _: Exception => None } + } - def typeOfTerm(id: String): Type = newTermName(id) match { - case nme.ROOTPKG => RootClass.tpe - case name => requestForName(name).fold(NoType: Type)(_ compilerTypeOf name) + def symbolOfPath(path: String): Symbol = { + if (path contains '.') { + tryTwice { + if (path endsWith "$") rmirror.staticModule(path.init) + else rmirror.staticModule(path) orElse rmirror.staticClass(path) + } + } + else { + if (path endsWith "$") symbolOfTerm(path.init) + else symbolOfIdent(path) orElse rumirror.staticClass(path) + } } - def symbolOfType(id: String): Symbol = - requestForName(newTypeName(id)).fold(NoSymbol: Symbol)(_ definedTypeSymbol id) + def tryTwice(op: => Symbol): Symbol = { + exitingTyper(op) orElse exitingFlatten(op) + } - def symbolOfTerm(id: String): Symbol = - requestForIdent(newTermName(id)).fold(NoSymbol: Symbol)(_ definedTermSymbol id) + def signatureOf(sym: Symbol) = typerOp sig sym + // exitingTyper(sym.defString) + def symbolOfIdent(id: String): Symbol = symbolOfTerm(id) orElse symbolOfType(id) + def symbolOfType(id: String): Symbol = tryTwice(replScope lookup (id: TypeName)) + def symbolOfTerm(id: String): Symbol = tryTwice(replScope lookup (id: TermName)) + def symbolOfName(id: Name): Symbol = replScope lookup id def runtimeClassAndTypeOfTerm(id: String): Option[(JClass, Type)] = { classOfTerm(id) flatMap { clazz => @@ -1023,14 +1043,18 @@ class IMain(initialSettings: Settings, protected val out: JPrintWriter) extends else NoType } } - def cleanMemberDecl(owner: Symbol, member: Name): Type = exitingTyper { - normalizeNonPublic { - owner.info.nonPrivateDecl(member).tpe_* match { - case NullaryMethodType(tp) => tp - case tp => tp - } - } + + def cleanTypeAfterTyper(sym: => Symbol): Type = { + exitingTyper( + normalizeNonPublic( + dropNullaryMethod( + sym.tpe_* + ) + ) + ) } + def cleanMemberDecl(owner: Symbol, member: Name): Type = + cleanTypeAfterTyper(owner.info nonPrivateDecl member) object exprTyper extends { val repl: IMain.this.type = imain @@ -1049,40 +1073,65 @@ class IMain(initialSettings: Settings, protected val out: JPrintWriter) extends def definedTerms = onlyTerms(allDefinedNames) filterNot isInternalTermName def definedTypes = onlyTypes(allDefinedNames) - def definedSymbols = prevRequestList.flatMap(_.definedSymbols.values).toSet[Symbol] - def definedSymbolList = prevRequestList flatMap (_.definedSymbolList) filterNot (s => isInternalTermName(s.name)) + def definedSymbols = prevRequestList flatMap (_.defines) toSet + def definedSymbolList = prevRequestList flatMap (_.defines) filterNot (s => isInternalTermName(s.name)) // Terms with user-given names (i.e. not res0 and not synthetic) def namedDefinedTerms = definedTerms filterNot (x => isUserVarName("" + x) || directlyBoundNames(x)) - private def findName(name: Name) = definedSymbols find (_.name == name) getOrElse NoSymbol - /** Translate a repl-defined identifier into a Symbol. */ - def apply(name: String): Symbol = - types(name) orElse terms(name) + def apply(name: String): Symbol = types(name) orElse terms(name) + def types(name: String): Symbol = replScope lookup (name: TypeName) orElse getClassIfDefined(name) + def terms(name: String): Symbol = replScope lookup (name: TermName) orElse getModuleIfDefined(name) + + def types[T: global.TypeTag] : Symbol = typeOf[T].typeSymbol + def terms[T: global.TypeTag] : Symbol = typeOf[T].termSymbol + def apply[T: global.TypeTag] : Symbol = typeOf[T].typeSymbol + + lazy val DummyInfoSymbol = NoSymbol.newValue("replScopeDummy") + private lazy val DummyInfo = TypeRef(NoPrefix, DummyInfoSymbol, Nil) + private def enterDummySymbol(name: Name) = name match { + case x: TermName => replScope enter (NoSymbol.newValue(x) setInfo DummyInfo) + case x: TypeName => replScope enter (NoSymbol.newClass(x) setInfo DummyInfo) + } - def types(name: String): Symbol = { - val tpname = newTypeName(name) - findName(tpname) orElse getClassIfDefined(tpname) + private var _replScope: Scope = _ + private def resetReplScope() { + _replScope = newScope } - def terms(name: String): Symbol = { - val termname = newTypeName(name) - findName(termname) orElse getModuleIfDefined(termname) + def initReplScope() { + languageWildcardSyms foreach { clazz => + importableMembers(clazz) foreach { sym => + updateReplScope(sym, isDefined = false) + } + } } - // [Eugene to Paul] possibly you could make use of TypeTags here - def types[T: ClassTag] : Symbol = types(classTag[T].runtimeClass.getName) - def terms[T: ClassTag] : Symbol = terms(classTag[T].runtimeClass.getName) - def apply[T: ClassTag] : Symbol = apply(classTag[T].runtimeClass.getName) + def replScope = { + if (_replScope eq null) + _replScope = newScope - def classSymbols = allDefSymbols collect { case x: ClassSymbol => x } - def methodSymbols = allDefSymbols collect { case x: MethodSymbol => x } + _replScope + } + def lookupAll(name: String) = (replScope.lookupAll(name: TermName) ++ replScope.lookupAll(name: TypeName)).toList + def unlinkAll(name: String) = { + val syms = lookupAll(name) + syms foreach { sym => + replScope unlink sym + } + enterDummySymbol(name: TermName) + enterDummySymbol(name: TypeName) + syms + } + def isUnlinked(name: Name) = { + symbolOfName(name) match { + case NoSymbol => false + case sym => sym.info.typeSymbolDirect == DummyInfoSymbol + } + } - /** the previous requests this interpreter has processed */ private var executingRequest: Request = _ private val prevRequests = mutable.ListBuffer[Request]() - private val referencedNameMap = mutable.Map[Name, Request]() - private val definedNameMap = mutable.Map[Name, Request]() private val directlyBoundNames = mutable.Set[Name]() def allHandlers = prevRequestList flatMap (_.handlers) diff --git a/src/compiler/scala/tools/nsc/interpreter/Imports.scala b/src/compiler/scala/tools/nsc/interpreter/Imports.scala index 50db23b042..021f07002b 100644 --- a/src/compiler/scala/tools/nsc/interpreter/Imports.scala +++ b/src/compiler/scala/tools/nsc/interpreter/Imports.scala @@ -12,7 +12,7 @@ trait Imports { self: IMain => import global._ - import definitions.{ ScalaPackage, JavaLangPackage, PredefModule } + import definitions.{ ObjectClass, ScalaPackage, JavaLangPackage, PredefModule } import memberHandlers._ def isNoImports = settings.noimports.value @@ -104,7 +104,9 @@ trait Imports { * last one imported is actually usable. */ case class ComputedImports(prepend: String, append: String, access: String) - protected def importsCode(wanted: Set[Name]): ComputedImports = { + protected def importsCode(wanted0: Set[Name]): ComputedImports = { + val wanted = wanted0 filterNot isUnlinked + /** Narrow down the list of requests from which imports * should be taken. Removes requests which cannot contribute * useful imports for the specified set of wanted names. @@ -173,11 +175,11 @@ trait Imports { // the name of the variable, so that we don't need to // handle quoting keywords separately. case x => - for (imv <- x.definedNames) { - if (currentImps contains imv) addWrapper() + for (sym <- x.definedSymbols) { + if (currentImps contains sym.name) addWrapper() - code append ("import " + (req fullPath imv) + "\n") - currentImps += imv + code append (s"import ${x.path}\n") + currentImps += sym.name } } } diff --git a/src/compiler/scala/tools/nsc/interpreter/MemberHandlers.scala b/src/compiler/scala/tools/nsc/interpreter/MemberHandlers.scala index 6348e428f8..95482f1e46 100644 --- a/src/compiler/scala/tools/nsc/interpreter/MemberHandlers.scala +++ b/src/compiler/scala/tools/nsc/interpreter/MemberHandlers.scala @@ -51,21 +51,20 @@ trait MemberHandlers { def chooseHandler(member: Tree): MemberHandler = member match { case member: DefDef => new DefHandler(member) case member: ValDef => new ValHandler(member) - case member: Assign => new AssignHandler(member) case member: ModuleDef => new ModuleHandler(member) case member: ClassDef => new ClassHandler(member) case member: TypeDef => new TypeAliasHandler(member) + case member: Assign => new AssignHandler(member) case member: Import => new ImportHandler(member) case DocDef(_, documented) => chooseHandler(documented) case member => new GenericHandler(member) } sealed abstract class MemberDefHandler(override val member: MemberDef) extends MemberHandler(member) { - def symbol = if (member.symbol eq null) NoSymbol else member.symbol - def name: Name = member.name - def mods: Modifiers = member.mods - def keyword = member.keyword - def prettyName = name.decode + override def name: Name = member.name + def mods: Modifiers = member.mods + def keyword = member.keyword + def prettyName = name.decode override def definesImplicit = member.mods.isImplicit override def definesTerm: Option[TermName] = Some(name.toTermName) filter (_ => name.isTermName) @@ -77,6 +76,9 @@ trait MemberHandlers { * in a single interpreter request. */ sealed abstract class MemberHandler(val member: Tree) { + def name: Name = nme.NO_NAME + def path = intp.originalPath(symbol) + def symbol = if (member.symbol eq null) NoSymbol else member.symbol def definesImplicit = false def definesValue = false def isLegalTopLevel = false @@ -87,7 +89,6 @@ trait MemberHandlers { lazy val referencedNames = ImportVarsTraverser(member) def importedNames = List[Name]() def definedNames = definesTerm.toList ++ definesType.toList - def definedOrImported = definedNames ++ importedNames def definedSymbols = List[Symbol]() def extraCodeToEvaluate(req: Request): String = "" @@ -110,10 +111,10 @@ trait MemberHandlers { // if this is a lazy val we avoid evaluating it here val resultString = if (mods.isLazy) codegenln(false, "") - else any2stringOf(req fullPath name, maxStringElements) + else any2stringOf(path, maxStringElements) val vidString = - if (replProps.vids) """" + " @ " + "%%8x".format(System.identityHashCode(%s)) + " """.trim.format(req fullPath name) + if (replProps.vids) s"""" + " @ " + "%%8x".format(System.identityHashCode($path)) + " """.trim else "" """ + "%s%s: %s = " + %s""".format(prettyName, vidString, string2code(req typeOf name), resultString) @@ -132,7 +133,7 @@ trait MemberHandlers { class AssignHandler(member: Assign) extends MemberHandler(member) { val Assign(lhs, rhs) = member - val name = newTermName(freshInternalVarName()) + override lazy val name = newTermName(freshInternalVarName()) override def definesTerm = Some(name) override def definesValue = true @@ -157,6 +158,7 @@ trait MemberHandlers { } class ClassHandler(member: ClassDef) extends MemberDefHandler(member) { + override def definedSymbols = List(symbol, symbol.companionSymbol) filterNot (_ == NoSymbol) override def definesType = Some(name.toTypeName) override def definesTerm = Some(name.toTermName) filter (_ => mods.isCase) override def isLegalTopLevel = true @@ -175,7 +177,11 @@ trait MemberHandlers { class ImportHandler(imp: Import) extends MemberHandler(imp) { val Import(expr, selectors) = imp - def targetType: Type = intp.typeOfExpression("" + expr) + def targetType = intp.global.rootMirror.getModuleIfDefined("" + expr) match { + case NoSymbol => intp.typeOfExpression("" + expr) + case sym => sym.thisType + } + private def importableTargetMembers = importableMembers(targetType).toList override def isLegalTopLevel = true def createImportForName(name: Name): String = { @@ -198,22 +204,16 @@ trait MemberHandlers { /** Whether this import includes a wildcard import */ val importsWildcard = selectorWild.nonEmpty - /** Whether anything imported is implicit .*/ - def importsImplicit = implicitSymbols.nonEmpty - def implicitSymbols = importedSymbols filter (_.isImplicit) def importedSymbols = individualSymbols ++ wildcardSymbols - lazy val individualSymbols: List[Symbol] = - enteringPickler(individualNames map (targetType nonPrivateMember _)) - - lazy val wildcardSymbols: List[Symbol] = - if (importsWildcard) enteringPickler(targetType.nonPrivateMembers.toList) - else Nil + private val selectorNames = selectorRenames filterNot (_ == nme.USCOREkw) flatMap (_.bothNames) toSet + lazy val individualSymbols: List[Symbol] = exitingTyper(importableTargetMembers filter (m => selectorNames(m.name))) + lazy val wildcardSymbols: List[Symbol] = exitingTyper(if (importsWildcard) importableTargetMembers else Nil) /** Complete list of names imported by a wildcard */ lazy val wildcardNames: List[Name] = wildcardSymbols map (_.name) - lazy val individualNames: List[Name] = selectorRenames filterNot (_ == nme.USCOREkw) flatMap (_.bothNames) + lazy val individualNames: List[Name] = individualSymbols map (_.name) /** The names imported by this statement */ override lazy val importedNames: List[Name] = wildcardNames ++ individualNames diff --git a/src/reflect/scala/reflect/internal/Scopes.scala b/src/reflect/scala/reflect/internal/Scopes.scala index 0e1d52cc95..a593a412d7 100644 --- a/src/reflect/scala/reflect/internal/Scopes.scala +++ b/src/reflect/scala/reflect/internal/Scopes.scala @@ -341,7 +341,6 @@ trait Scopes extends api.Scopes { self: SymbolTable => */ def iterator: Iterator[Symbol] = toList.iterator - def containsName(name: Name) = lookupEntry(name) != null def containsSymbol(s: Symbol) = lookupAll(s.name) contains s override def foreach[U](p: Symbol => U): Unit = toList foreach p diff --git a/test/files/jvm/interpreter.check b/test/files/jvm/interpreter.check index 6145b6c4d2..891ed36028 100644 --- a/test/files/jvm/interpreter.check +++ b/test/files/jvm/interpreter.check @@ -357,7 +357,7 @@ defined class Term scala> def f(e: Exp) = e match { // non-exhaustive warning here case _:Fact => 3 } -:18: warning: match is not exhaustive! +:16: warning: match is not exhaustive! missing combination Exp missing combination Term diff --git a/test/files/run/repl-colon-type.check b/test/files/run/repl-colon-type.check index 35cd04ba87..2e8ce8c801 100644 --- a/test/files/run/repl-colon-type.check +++ b/test/files/run/repl-colon-type.check @@ -52,7 +52,7 @@ scala> :type protected lazy val f = 5 Access to protected value f not permitted because enclosing object $eval in package $line19 is not a subclass of object $iw where target is defined - lazy val $result = `f` + lazy val $result = f ^ -- cgit v1.2.3 From 31ed2e8da246da07a3318c34cdfae2ca02740524 Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Mon, 29 Oct 2012 07:41:41 -0700 Subject: Moved IMain ops requiring stability into implicit class. A long-standing annoyance of having IMain stored in a var is that you can't call a method on it which returns a dependent type and then pass that to any other method. I realized I could get around this by creating an implicit class around the var; in the class, it is a val, so the method can be written there, and we implicitly convert from the var on demand. --- .../scala/tools/nsc/interpreter/ILoop.scala | 111 +---------------- .../scala/tools/nsc/interpreter/package.scala | 137 +++++++++++++++++++++ test/files/run/repl-colon-type.check | 2 - 3 files changed, 140 insertions(+), 110 deletions(-) (limited to 'src') diff --git a/src/compiler/scala/tools/nsc/interpreter/ILoop.scala b/src/compiler/scala/tools/nsc/interpreter/ILoop.scala index d99a1c18f9..cde8d81611 100644 --- a/src/compiler/scala/tools/nsc/interpreter/ILoop.scala +++ b/src/compiler/scala/tools/nsc/interpreter/ILoop.scala @@ -66,54 +66,6 @@ class ILoop(in0: Option[BufferedReader], protected val out: JPrintWriter) echoAndRefresh(msg) } - /** Having inherited the difficult "var-ness" of the repl instance, - * I'm trying to work around it by moving operations into a class from - * which it will appear a stable prefix. - */ - private def onIntp[T](f: IMain => T): T = f(intp) - - class IMainOps[T <: IMain](val intp: T) { - import intp._ - import global._ - - def printAfterTyper(msg: => String) = - intp.reporter printUntruncatedMessage exitingTyper(msg) - - /** Strip NullaryMethodType artifacts. */ - private def replInfo(sym: Symbol) = { - sym.info match { - case NullaryMethodType(restpe) if sym.isAccessor => restpe - case info => info - } - } - def echoTypeStructure(sym: Symbol) = - printAfterTyper("" + deconstruct.show(replInfo(sym))) - - def echoTypeSignature(sym: Symbol, verbose: Boolean) = { - if (verbose) ILoop.this.echo("// Type signature") - printAfterTyper("" + replInfo(sym)) - - if (verbose) { - ILoop.this.echo("\n// Internal Type structure") - echoTypeStructure(sym) - } - } - } - implicit def stabilizeIMain(intp: IMain) = new IMainOps[intp.type](intp) - - /** TODO - - * -n normalize - * -l label with case class parameter names - * -c complete - leave nothing out - */ - private def typeCommandInternal(expr: String, verbose: Boolean): Result = { - onIntp { intp => - val sym = intp.symbolOfLine(expr) - if (sym.exists) intp.echoTypeSignature(sym, verbose) - else "" - } - } - override def echoCommandMessage(msg: String) { intp.reporter printUntruncatedMessage msg } @@ -269,7 +221,7 @@ class ILoop(in0: Option[BufferedReader], protected val out: JPrintWriter) historyCommand, cmd("h?", "", "search the history", searchHistory), cmd("imports", "[name name ...]", "show import history, identifying sources of names", importsCommand), - cmd("implicits", "[-v]", "show the implicits in scope", implicitsCommand), + cmd("implicits", "[-v]", "show the implicits in scope", intp.implicitsCommand), cmd("javap", "", "disassemble a file or class name", javapCommand), cmd("load", "", "load and interpret a Scala file", loadCommand), nullary("paste", "enter paste mode: all input up to ctrl-D compiled together", pasteCommand), @@ -312,63 +264,6 @@ class ILoop(in0: Option[BufferedReader], protected val out: JPrintWriter) } } - private def implicitsCommand(line: String): Result = onIntp { intp => - import intp._ - import global._ - - def p(x: Any) = intp.reporter.printMessage("" + x) - - // If an argument is given, only show a source with that - // in its name somewhere. - val args = line split "\\s+" - val filtered = intp.implicitSymbolsBySource filter { - case (source, syms) => - (args contains "-v") || { - if (line == "") (source.fullName.toString != "scala.Predef") - else (args exists (source.name.toString contains _)) - } - } - - if (filtered.isEmpty) - return "No implicits have been imported other than those in Predef." - - filtered foreach { - case (source, syms) => - p("/* " + syms.size + " implicit members imported from " + source.fullName + " */") - - // This groups the members by where the symbol is defined - val byOwner = syms groupBy (_.owner) - val sortedOwners = byOwner.toList sortBy { case (owner, _) => exitingTyper(source.info.baseClasses indexOf owner) } - - sortedOwners foreach { - case (owner, members) => - // Within each owner, we cluster results based on the final result type - // if there are more than a couple, and sort each cluster based on name. - // This is really just trying to make the 100 or so implicits imported - // by default into something readable. - val memberGroups: List[List[Symbol]] = { - val groups = members groupBy (_.tpe.finalResultType) toList - val (big, small) = groups partition (_._2.size > 3) - val xss = ( - (big sortBy (_._1.toString) map (_._2)) :+ - (small flatMap (_._2)) - ) - - xss map (xs => xs sortBy (_.name.toString)) - } - - val ownerMessage = if (owner == source) " defined in " else " inherited from " - p(" /* " + members.size + ownerMessage + owner.fullName + " */") - - memberGroups foreach { group => - group foreach (s => p(" " + intp.symbolDefString(s))) - p("") - } - } - p("") - } - } - private def findToolsJar() = { val jdkPath = Directory(jdkHome) val jar = jdkPath / "lib" / "tools.jar" toFile; @@ -408,8 +303,8 @@ class ILoop(in0: Option[BufferedReader], protected val out: JPrintWriter) private def typeCommand(line0: String): Result = { line0.trim match { case "" => ":type [-v] " - case s if s startsWith "-v " => typeCommandInternal(s stripPrefix "-v " trim, true) - case s => typeCommandInternal(s, false) + case s if s startsWith "-v " => intp.typeCommandInternal(s stripPrefix "-v " trim, true) + case s => intp.typeCommandInternal(s, false) } } diff --git a/src/compiler/scala/tools/nsc/interpreter/package.scala b/src/compiler/scala/tools/nsc/interpreter/package.scala index e3440c9f8b..6a2d69db2c 100644 --- a/src/compiler/scala/tools/nsc/interpreter/package.scala +++ b/src/compiler/scala/tools/nsc/interpreter/package.scala @@ -6,6 +6,10 @@ package scala.tools.nsc import scala.language.implicitConversions +import scala.reflect.{ classTag, ClassTag } +import scala.reflect.runtime.{ universe => ru } +import scala.reflect.{ClassTag, classTag} +import scala.reflect.api.{Mirror, TypeCreator, Universe => ApiUniverse} /** The main REPL related classes and values are as follows. * In addition to standard compiler classes Global and Settings, there are: @@ -46,4 +50,137 @@ package object interpreter extends ReplConfig with ReplStrings { private[nsc] implicit def enrichAnyRefWithTap[T](x: T) = new TapMaker(x) private[nsc] def tracing[T](msg: String)(x: T): T = x.tapTrace(msg) private[nsc] def debugging[T](msg: String)(x: T) = x.tapDebug(msg) + + private val ourClassloader = getClass.getClassLoader + + def staticTypeTag[T: ClassTag]: ru.TypeTag[T] = ru.TypeTag[T]( + ru.runtimeMirror(ourClassloader), + new TypeCreator { + def apply[U <: ApiUniverse with Singleton](m: Mirror[U]): U # Type = + m.staticClass(classTag[T].runtimeClass.getName).toTypeConstructor.asInstanceOf[U # Type] + }) + + /** This class serves to trick the compiler into treating a var + * (intp, in ILoop) as a stable identifier. + */ + implicit class IMainOps(val intp: IMain) { + import intp._ + import global.{ reporter => _, _ } + import definitions._ + + lazy val tagOfStdReplVals = staticTypeTag[scala.tools.nsc.interpreter.StdReplVals] + + protected def echo(msg: String) = { + Console.out println msg + Console.out.flush() + } + + def wrapCommand(line: String): String = { + def failMsg = "Argument to :wrap must be the name of a method with signature [T](=> T): T" + + words(line) match { + case Nil => + intp.executionWrapper match { + case "" => "No execution wrapper is set." + case s => "Current execution wrapper: " + s + } + case "clear" :: Nil => + intp.executionWrapper match { + case "" => "No execution wrapper is set." + case s => intp.clearExecutionWrapper() ; "Cleared execution wrapper." + } + case wrapper :: Nil => + intp.typeOfExpression(wrapper) match { + case PolyType(List(targ), MethodType(List(arg), restpe)) => + setExecutionWrapper(originalPath(wrapper)) + "Set wrapper to '" + wrapper + "'" + case tp => + failMsg + "\nFound: " + } + case _ => failMsg + } + } + + def implicitsCommand(line: String): String = { + def p(x: Any) = intp.reporter.printMessage("" + x) + + // If an argument is given, only show a source with that + // in its name somewhere. + val args = line split "\\s+" + val filtered = intp.implicitSymbolsBySource filter { + case (source, syms) => + (args contains "-v") || { + if (line == "") (source.fullName.toString != "scala.Predef") + else (args exists (source.name.toString contains _)) + } + } + + if (filtered.isEmpty) + return "No implicits have been imported other than those in Predef." + + filtered foreach { + case (source, syms) => + p("/* " + syms.size + " implicit members imported from " + source.fullName + " */") + + // This groups the members by where the symbol is defined + val byOwner = syms groupBy (_.owner) + val sortedOwners = byOwner.toList sortBy { case (owner, _) => exitingTyper(source.info.baseClasses indexOf owner) } + + sortedOwners foreach { + case (owner, members) => + // Within each owner, we cluster results based on the final result type + // if there are more than a couple, and sort each cluster based on name. + // This is really just trying to make the 100 or so implicits imported + // by default into something readable. + val memberGroups: List[List[Symbol]] = { + val groups = members groupBy (_.tpe.finalResultType) toList + val (big, small) = groups partition (_._2.size > 3) + val xss = ( + (big sortBy (_._1.toString) map (_._2)) :+ + (small flatMap (_._2)) + ) + + xss map (xs => xs sortBy (_.name.toString)) + } + + val ownerMessage = if (owner == source) " defined in " else " inherited from " + p(" /* " + members.size + ownerMessage + owner.fullName + " */") + + memberGroups foreach { group => + group foreach (s => p(" " + intp.symbolDefString(s))) + p("") + } + } + p("") + } + "" + } + + /** TODO - + * -n normalize + * -l label with case class parameter names + * -c complete - leave nothing out + */ + def typeCommandInternal(expr: String, verbose: Boolean): Unit = + symbolOfLine(expr) andAlso (echoTypeSignature(_, verbose)) + + def printAfterTyper(msg: => String) = + reporter printUntruncatedMessage exitingTyper(msg) + + private def replInfo(sym: Symbol) = + if (sym.isAccessor) dropNullaryMethod(sym.info) else sym.info + + def echoTypeStructure(sym: Symbol) = + printAfterTyper("" + deconstruct.show(replInfo(sym))) + + def echoTypeSignature(sym: Symbol, verbose: Boolean) = { + if (verbose) echo("// Type signature") + printAfterTyper("" + replInfo(sym)) + + if (verbose) { + echo("\n// Internal Type structure") + echoTypeStructure(sym) + } + } + } } diff --git a/test/files/run/repl-colon-type.check b/test/files/run/repl-colon-type.check index 2e8ce8c801..7716221f54 100644 --- a/test/files/run/repl-colon-type.check +++ b/test/files/run/repl-colon-type.check @@ -14,7 +14,6 @@ scala> :type List[1, 2, 3] List[1, 2, 3] ^ - scala> :type List(1, 2, 3) List[Int] @@ -55,7 +54,6 @@ scala> :type protected lazy val f = 5 lazy val $result = f ^ - scala> :type def f = 5 => Int -- cgit v1.2.3 From 8da7e37674d771e177445cc0c56eab7b7016c2f2 Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Sun, 28 Oct 2012 14:27:21 -0700 Subject: Cleanups to the previous repl commits. --- .../scala/tools/nsc/interpreter/ILoop.scala | 3 +- .../scala/tools/nsc/interpreter/IMain.scala | 120 +++++++++------------ .../scala/tools/nsc/interpreter/Imports.scala | 64 ++++++----- .../scala/tools/nsc/interpreter/Power.scala | 25 ++--- test/files/jvm/interpreter.check | 2 +- 5 files changed, 97 insertions(+), 117 deletions(-) (limited to 'src') diff --git a/src/compiler/scala/tools/nsc/interpreter/ILoop.scala b/src/compiler/scala/tools/nsc/interpreter/ILoop.scala index cde8d81611..cf525d5cfc 100644 --- a/src/compiler/scala/tools/nsc/interpreter/ILoop.scala +++ b/src/compiler/scala/tools/nsc/interpreter/ILoop.scala @@ -329,7 +329,8 @@ class ILoop(in0: Option[BufferedReader], protected val out: JPrintWriter) } } - private def pathToPhaseWrapper = intp.pathToTerm("$r") + ".phased.atCurrent" + private def pathToPhaseWrapper = intp.originalPath("$r") + ".phased.atCurrent" + private def phaseCommand(name: String): Result = { val phased: Phased = power.phased import phased.NoPhaseName diff --git a/src/compiler/scala/tools/nsc/interpreter/IMain.scala b/src/compiler/scala/tools/nsc/interpreter/IMain.scala index c5f9553634..0ef27ac96a 100644 --- a/src/compiler/scala/tools/nsc/interpreter/IMain.scala +++ b/src/compiler/scala/tools/nsc/interpreter/IMain.scala @@ -177,6 +177,23 @@ class IMain(initialSettings: Settings, protected val out: JPrintWriter) extends import global._ import definitions.{ ObjectClass, termMember, typeMember, dropNullaryMethod} + lazy val runtimeMirror = ru.runtimeMirror(classLoader) + + private def noFatal(body: => Symbol): Symbol = try body catch { case _: FatalError => NoSymbol } + + def getClassIfDefined(path: String) = ( + noFatal(runtimeMirror staticClass path) + orElse noFatal(rootMirror staticClass path) + ) + def getModuleIfDefined(path: String) = ( + noFatal(runtimeMirror staticModule path) + orElse noFatal(rootMirror staticModule path) + ) + def getPathIfDefined(path: String) = ( + if (path endsWith "$") getModuleIfDefined(path.init) + else getClassIfDefined(path) + ) + implicit class ReplTypeOps(tp: Type) { def orElse(other: => Type): Type = if (tp ne NoType) tp else other def andAlso(fn: Type => Type): Type = if (tp eq NoType) tp else fn(tp) @@ -307,9 +324,10 @@ class IMain(initialSettings: Settings, protected val out: JPrintWriter) extends def shift[T](op: => T): T = exitingFlatten(op) } - def originalPath(name: Name): String = typerOp path name - def originalPath(sym: Symbol): String = typerOp path sym - def flatPath(sym: Symbol): String = flatOp shift sym.javaClassName + def originalPath(name: String): String = originalPath(name: TermName) + def originalPath(name: Name): String = typerOp path name + def originalPath(sym: Symbol): String = typerOp path sym + def flatPath(sym: Symbol): String = flatOp shift sym.javaClassName // def translatePath(path: String) = symbolOfPath(path).fold(Option.empty[String])(flatPath) def translatePath(path: String) = { val sym = if (path endsWith "$") symbolOfTerm(path.init) else symbolOfIdent(path) @@ -341,13 +359,8 @@ class IMain(initialSettings: Settings, protected val out: JPrintWriter) extends // Set the current Java "context" class loader to this interpreter's class loader def setContextClassLoader() = classLoader.setAsContext() - def allDefinedNames = exitingTyper(replScope.toList.map(_.name).sorted) - def pathToType(id: String): String = pathToName(newTypeName(id)) - def pathToTerm(id: String): String = pathToName(newTermName(id)) - def pathToName(name: Name): String = replScope lookup name match { - case NoSymbol => name.toString - case sym => exitingTyper(sym.fullName) - } + def allDefinedNames: List[Name] = exitingTyper(replScope.toList.map(_.name).sorted) + def unqualifiedIds: List[String] = allDefinedNames map (_.decode) sorted /** Most recent tree handled which wasn't wholly synthetic. */ private def mostRecentlyHandledTree: Option[Tree] = { @@ -401,9 +414,7 @@ class IMain(initialSettings: Settings, protected val out: JPrintWriter) extends } exitingTyper { req.imports foreach (sym => updateReplScope(sym, isDefined = false)) - req.defines foreach { sym => - updateReplScope(sym, isDefined = true) - } + req.defines foreach (sym => updateReplScope(sym, isDefined = true)) } } @@ -746,7 +757,7 @@ class IMain(initialSettings: Settings, protected val out: JPrintWriter) extends * following accessPath into the outer one. */ def resolvePathToSymbol(accessPath: String): Symbol = { - val readRoot = getRequiredModule(readPath) // the outermost wrapper + val readRoot = getModuleIfDefined(readPath) // the outermost wrapper (accessPath split '.').foldLeft(readRoot: Symbol) { case (sym, "") => sym case (sym, name) => exitingTyper(termMember(sym, name)) @@ -823,7 +834,7 @@ class IMain(initialSettings: Settings, protected val out: JPrintWriter) extends * append to objectName to access anything bound by request. */ val ComputedImports(importsPreamble, importsTrailer, accessPath) = - importsCode(referencedNames.toSet) + exitingTyper(importsCode(referencedNames.toSet)) /** The unmangled symbol name, but supplemented with line info. */ def disambiguated(name: Name): String = name + " (in " + lineRep + ")" @@ -835,7 +846,7 @@ class IMain(initialSettings: Settings, protected val out: JPrintWriter) extends /** generate the source code for the object that computes this request */ private object ObjectSourceCode extends CodeAssembler[MemberHandler] { - def path = pathToTerm("$intp") + def path = originalPath("$intp") def envLines = { if (!isReplPower) Nil // power mode only for now // $intp is not bound; punt, but include the line. @@ -930,9 +941,6 @@ class IMain(initialSettings: Settings, protected val out: JPrintWriter) extends /** String representations of same. */ lazy val typeOf = typeMap[String](tp => exitingTyper(tp.toString)) - // lazy val definedTypes: Map[Name, Type] = { - // typeNames map (x => x -> exitingTyper(resultSymbol.info.nonPrivateDecl(x).tpe)) toMap - // } lazy val definedSymbols = ( termNames.map(x => x -> applyToResultMember(x, x => x)) ++ typeNames.map(x => x -> compilerTypeOf(x).typeSymbolDirect) @@ -965,22 +973,18 @@ class IMain(initialSettings: Settings, protected val out: JPrintWriter) extends private var mostRecentWarnings: List[(global.Position, String)] = Nil def lastWarnings = mostRecentWarnings - private lazy val globalImporter = global.mkImporter(ru) - private lazy val importer = ru.mkImporter(global) - - private implicit def importFromRu(sym: ru.Symbol): global.Symbol = - globalImporter importSymbol sym - - private implicit def importToRu(sym: global.Symbol): ru.Symbol = - importer importSymbol sym - - private def jmirror = ru.rootMirror match { - case j: ru.JavaMirror => j + private lazy val importToGlobal = global mkImporter ru + private lazy val importToRuntime = ru mkImporter global + private lazy val javaMirror = ru.rootMirror match { + case x: ru.JavaMirror => x case _ => null } + private implicit def importFromRu(sym: ru.Symbol): Symbol = importToGlobal importSymbol sym + private implicit def importToRu(sym: Symbol): ru.Symbol = importToRuntime importSymbol sym + def classOfTerm(id: String): Option[JClass] = symbolOfTerm(id) match { case NoSymbol => None - case sym => Some(jmirror runtimeClass (importer importSymbol sym).asClass) + case sym => Some(javaMirror runtimeClass importToRu(sym).asClass) } def typeOfTerm(id: String): Type = symbolOfTerm(id).tpe @@ -988,10 +992,9 @@ class IMain(initialSettings: Settings, protected val out: JPrintWriter) extends def valueOfTerm(id: String): Option[Any] = exitingTyper { def value() = { val sym0 = symbolOfTerm(id) - val sym = (importer importSymbol sym0).asTerm - val mirror = ru.runtimeMirror(classLoader) - val module = mirror.reflectModule(sym.owner.companionSymbol.asModule).instance - val module1 = mirror.reflect(module) + val sym = (importToRuntime importSymbol sym0).asTerm + val module = runtimeMirror.reflectModule(sym.owner.companionSymbol.asModule).instance + val module1 = runtimeMirror.reflect(module) val invoker = module1.reflectField(sym) invoker.get @@ -1000,29 +1003,20 @@ class IMain(initialSettings: Settings, protected val out: JPrintWriter) extends try Some(value()) catch { case _: Exception => None } } - def symbolOfPath(path: String): Symbol = { - if (path contains '.') { - tryTwice { - if (path endsWith "$") rmirror.staticModule(path.init) - else rmirror.staticModule(path) orElse rmirror.staticClass(path) - } - } - else { - if (path endsWith "$") symbolOfTerm(path.init) - else symbolOfIdent(path) orElse rumirror.staticClass(path) - } - } - - def tryTwice(op: => Symbol): Symbol = { - exitingTyper(op) orElse exitingFlatten(op) - } + /** It's a bit of a shotgun approach, but for now we will gain in + * robustness. Try a symbol-producing operation at phase typer, and + * if that is NoSymbol, try again at phase flatten. I'll be able to + * lose this and run only from exitingTyper as soon as I figure out + * exactly where a flat name is sneaking in when calculating imports. + */ + def tryTwice(op: => Symbol): Symbol = exitingTyper(op) orElse exitingFlatten(op) - def signatureOf(sym: Symbol) = typerOp sig sym - // exitingTyper(sym.defString) - def symbolOfIdent(id: String): Symbol = symbolOfTerm(id) orElse symbolOfType(id) - def symbolOfType(id: String): Symbol = tryTwice(replScope lookup (id: TypeName)) - def symbolOfTerm(id: String): Symbol = tryTwice(replScope lookup (id: TermName)) - def symbolOfName(id: Name): Symbol = replScope lookup id + def signatureOf(sym: Symbol) = typerOp sig sym + def symbolOfPath(path: String): Symbol = exitingTyper(getPathIfDefined(path)) + def symbolOfIdent(id: String): Symbol = symbolOfTerm(id) orElse symbolOfType(id) + def symbolOfType(id: String): Symbol = tryTwice(replScope lookup (id: TypeName)) + def symbolOfTerm(id: String): Symbol = tryTwice(replScope lookup (id: TermName)) + def symbolOfName(id: Name): Symbol = replScope lookup id def runtimeClassAndTypeOfTerm(id: String): Option[(JClass, Type)] = { classOfTerm(id) flatMap { clazz => @@ -1068,8 +1062,8 @@ class IMain(initialSettings: Settings, protected val out: JPrintWriter) extends def typeOfExpression(expr: String, silent: Boolean = true): Type = exprTyper.typeOfExpression(expr, silent) - protected def onlyTerms(xs: List[Name]) = xs collect { case x: TermName => x } - protected def onlyTypes(xs: List[Name]) = xs collect { case x: TypeName => x } + protected def onlyTerms(xs: List[Name]): List[TermName] = xs collect { case x: TermName => x } + protected def onlyTypes(xs: List[Name]): List[TypeName] = xs collect { case x: TypeName => x } def definedTerms = onlyTerms(allDefinedNames) filterNot isInternalTermName def definedTypes = onlyTypes(allDefinedNames) @@ -1144,14 +1138,6 @@ class IMain(initialSettings: Settings, protected val out: JPrintWriter) extends def allImplicits = allHandlers filter (_.definesImplicit) flatMap (_.definedNames) def importHandlers = allHandlers collect { case x: ImportHandler => x } - def visibleTermNames: List[Name] = definedTerms ++ importedTerms distinct - - /** Another entry point for tab-completion, ids in scope */ - def unqualifiedIds = visibleTermNames map (_.toString) filterNot (_ contains "$") sorted - - /** Parse the ScalaSig to find type aliases */ - def aliasForType(path: String) = ByteCode.aliasForType(path) - def withoutUnwrapping(op: => Unit): Unit = { val saved = isettings.unwrapStrings isettings.unwrapStrings = false diff --git a/src/compiler/scala/tools/nsc/interpreter/Imports.scala b/src/compiler/scala/tools/nsc/interpreter/Imports.scala index 021f07002b..c5048ebfd8 100644 --- a/src/compiler/scala/tools/nsc/interpreter/Imports.scala +++ b/src/compiler/scala/tools/nsc/interpreter/Imports.scala @@ -148,44 +148,42 @@ trait Imports { code append "object %s {\n".format(impname) trailingBraces append "}\n" accessPath append ("." + impname) - - currentImps.clear + currentImps.clear() + } + def maybeWrap(names: Name*) = if (names exists currentImps) addWrapper() + def wrapBeforeAndAfter[T](op: => T): T = { + addWrapper() + try op finally addWrapper() } - - addWrapper() // loop through previous requests, adding imports for each one - for (ReqAndHandler(req, handler) <- reqsToUse) { - handler match { - // If the user entered an import, then just use it; add an import wrapping - // level if the import might conflict with some other import - case x: ImportHandler => - if (x.importsWildcard || currentImps.exists(x.importedNames contains _)) - addWrapper() - - code append (x.member + "\n") - - // give wildcard imports a import wrapper all to their own - if (x.importsWildcard) addWrapper() - else currentImps ++= x.importedNames - - // For other requests, import each defined name. - // import them explicitly instead of with _, so that - // ambiguity errors will not be generated. Also, quote - // the name of the variable, so that we don't need to - // handle quoting keywords separately. - case x => - for (sym <- x.definedSymbols) { - if (currentImps contains sym.name) addWrapper() - - code append (s"import ${x.path}\n") - currentImps += sym.name - } + wrapBeforeAndAfter { + for (ReqAndHandler(req, handler) <- reqsToUse) { + handler match { + // If the user entered an import, then just use it; add an import wrapping + // level if the import might conflict with some other import + case x: ImportHandler if x.importsWildcard => + wrapBeforeAndAfter(code append (x.member + "\n")) + case x: ImportHandler => + maybeWrap(x.importedNames: _*) + code append (x.member + "\n") + currentImps ++= x.importedNames + + // For other requests, import each defined name. + // import them explicitly instead of with _, so that + // ambiguity errors will not be generated. Also, quote + // the name of the variable, so that we don't need to + // handle quoting keywords separately. + case x => + for (sym <- x.definedSymbols) { + maybeWrap(sym.name) + code append s"import ${x.path}\n" + currentImps += sym.name + } + } } } - // add one extra wrapper, to prevent warnings in the common case of - // redefining the value bound in the last interpreter request. - addWrapper() + ComputedImports(code.toString, trailingBraces.toString, accessPath.toString) } diff --git a/src/compiler/scala/tools/nsc/interpreter/Power.scala b/src/compiler/scala/tools/nsc/interpreter/Power.scala index 0af295c8af..ab0f1c0033 100644 --- a/src/compiler/scala/tools/nsc/interpreter/Power.scala +++ b/src/compiler/scala/tools/nsc/interpreter/Power.scala @@ -145,7 +145,7 @@ class Power[ReplValsImpl <: ReplVals : ru.TypeTag: ClassTag](val intp: IMain, re // First we create the ReplVals instance and bind it to $r intp.bind("$r", replVals) // Then we import everything from $r. - intp interpret ("import " + intp.pathToTerm("$r") + "._") + intp interpret ("import " + intp.originalPath("$r") + "._") // And whatever else there is to do. init.lines foreach (intp interpret _) } @@ -406,20 +406,15 @@ class Power[ReplValsImpl <: ReplVals : ru.TypeTag: ClassTag](val intp: IMain, re lazy val rutil: ReplUtilities = new ReplUtilities { } lazy val phased: Phased = new { val global: intp.global.type = intp.global } with Phased { } - def context(code: String) = analyzer.rootContext(unit(code)) - def source(code: String) = newSourceFile(code) - def unit(code: String) = newCompilationUnit(code) - def trees(code: String) = parse(code) getOrElse Nil - def typeOf(id: String) = intp.typeOfExpression(id) + def context(code: String) = analyzer.rootContext(unit(code)) + def source(code: String) = newSourceFile(code) + def unit(code: String) = newCompilationUnit(code) + def trees(code: String) = parse(code) getOrElse Nil + def typeOf(id: String) = intp.typeOfExpression(id) - override def toString = """ + override def toString = s""" |** Power mode status ** - |Default phase: %s - |Names: %s - |Identifiers: %s - """.stripMargin.format( - phased.get, - intp.allDefinedNames mkString " ", - intp.unqualifiedIds mkString " " - ) + |Default phase: ${phased.get} + |Names: ${intp.unqualifiedIds mkString " "} + """.stripMargin } diff --git a/test/files/jvm/interpreter.check b/test/files/jvm/interpreter.check index 891ed36028..6145b6c4d2 100644 --- a/test/files/jvm/interpreter.check +++ b/test/files/jvm/interpreter.check @@ -357,7 +357,7 @@ defined class Term scala> def f(e: Exp) = e match { // non-exhaustive warning here case _:Fact => 3 } -:16: warning: match is not exhaustive! +:18: warning: match is not exhaustive! missing combination Exp missing combination Term -- cgit v1.2.3 From 3292c4a8473d51bfe6d35522196d094081152fd9 Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Mon, 5 Nov 2012 07:05:01 -0800 Subject: A utility function to summarize an exception. Now when I start the repl and trigger an exception during initialization, it offers a one line message like: uncaught exception during compilation: AssertionError("assertion failed: (14,13)") @ scala.reflect.internal.Symbols$Symbol.updateInfo(Symbols.scala:1309) Which is a marked debugging improvement from RC1: uncaught exception during compilation: java.lang.AssertionError --- src/compiler/scala/tools/nsc/Global.scala | 4 ++-- src/compiler/scala/tools/nsc/util/package.scala | 12 ++++++++++++ 2 files changed, 14 insertions(+), 2 deletions(-) (limited to 'src') diff --git a/src/compiler/scala/tools/nsc/Global.scala b/src/compiler/scala/tools/nsc/Global.scala index f0984c2ebc..552146b9d4 100644 --- a/src/compiler/scala/tools/nsc/Global.scala +++ b/src/compiler/scala/tools/nsc/Global.scala @@ -1525,8 +1525,8 @@ class Global(var currentSettings: Settings, var reporter: Reporter) val shown = if (settings.verbose.value) stackTraceString(ex) else - ex.getClass.getName - // ex.printStackTrace(Console.out) // DEBUG for fsc, note that error stacktraces do not print in fsc + stackTraceHeadString(ex) // note that error stacktraces do not print in fsc + globalError(supplementErrorMessage("uncaught exception during compilation: " + shown)) throw ex } diff --git a/src/compiler/scala/tools/nsc/util/package.scala b/src/compiler/scala/tools/nsc/util/package.scala index d34d4ee092..e9dcaa8e16 100644 --- a/src/compiler/scala/tools/nsc/util/package.scala +++ b/src/compiler/scala/tools/nsc/util/package.scala @@ -83,6 +83,18 @@ package object util { } def stackTraceString(ex: Throwable): String = stringFromWriter(ex printStackTrace _) + /** A one line string which contains the class of the exception, the + * message if any, and the first non-Predef location in the stack trace + * (to exclude assert, require, etc.) + */ + def stackTraceHeadString(ex: Throwable): String = { + val frame = ex.getStackTrace.dropWhile(_.getClassName contains "Predef").head + val msg = ex.getMessage match { case null | "" => "" ; case s => s"""("$s")""" } + val clazz = ex.getClass.getName.split('.').last + + s"$clazz$msg @ $frame" + } + lazy val trace = new SimpleTracer(System.out) lazy val errtrace = new SimpleTracer(System.err) -- cgit v1.2.3 From c7a2e3961c7ae9cc2986ceba89427890c6a586d8 Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Fri, 9 Nov 2012 16:49:37 -0700 Subject: Restore briefly awol // 3 in explicitouter --- src/compiler/scala/tools/nsc/transform/ExplicitOuter.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) (limited to 'src') diff --git a/src/compiler/scala/tools/nsc/transform/ExplicitOuter.scala b/src/compiler/scala/tools/nsc/transform/ExplicitOuter.scala index 073887a778..13e7e17951 100644 --- a/src/compiler/scala/tools/nsc/transform/ExplicitOuter.scala +++ b/src/compiler/scala/tools/nsc/transform/ExplicitOuter.scala @@ -158,7 +158,7 @@ abstract class ExplicitOuter extends InfoTransform var decls1 = decls if (isInner(clazz) && !clazz.isInterface) { decls1 = decls.cloneScope - decls1 enter newOuterAccessor(clazz) + decls1 enter newOuterAccessor(clazz) // 3 if (hasOuterField(clazz)) //2 decls1 enter newOuterField(clazz) } -- cgit v1.2.3 From 3baa06debb0c8a1be401e4338960569d71fa1af8 Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Fri, 9 Nov 2012 21:29:06 -0700 Subject: Fix merge error. I fixed that already and somehow it didn't appear in my push. --- src/compiler/scala/tools/nsc/Global.scala | 2 +- src/compiler/scala/tools/nsc/interpreter/ILoop.scala | 1 + 2 files changed, 2 insertions(+), 1 deletion(-) (limited to 'src') diff --git a/src/compiler/scala/tools/nsc/Global.scala b/src/compiler/scala/tools/nsc/Global.scala index 552146b9d4..13bec828ca 100644 --- a/src/compiler/scala/tools/nsc/Global.scala +++ b/src/compiler/scala/tools/nsc/Global.scala @@ -11,7 +11,7 @@ import scala.compat.Platform.currentTime import scala.collection.{ mutable, immutable } import io.{ SourceReader, AbstractFile, Path } import reporters.{ Reporter, ConsoleReporter } -import util.{ ClassPath, MergedClassPath, StatisticsInfo, returning, stackTraceString } +import util.{ ClassPath, MergedClassPath, StatisticsInfo, returning, stackTraceString, stackTraceHeadString } import scala.reflect.internal.util.{ OffsetPosition, SourceFile, NoSourceFile, BatchSourceFile, ScriptSourceFile } import scala.reflect.internal.pickling.{ PickleBuffer, PickleFormat } import symtab.{ Flags, SymbolTable, SymbolLoaders, SymbolTrackers } diff --git a/src/compiler/scala/tools/nsc/interpreter/ILoop.scala b/src/compiler/scala/tools/nsc/interpreter/ILoop.scala index cf525d5cfc..74549ef558 100644 --- a/src/compiler/scala/tools/nsc/interpreter/ILoop.scala +++ b/src/compiler/scala/tools/nsc/interpreter/ILoop.scala @@ -9,6 +9,7 @@ package interpreter import Predef.{ println => _, _ } import java.io.{ BufferedReader, FileReader } import session._ +import scala.annotation.tailrec import scala.util.Properties.{ jdkHome, javaVersion, versionString, javaVmName } import scala.tools.util.{ Javap } import util.{ ClassPath, Exceptional, stringFromWriter, stringFromStream } -- cgit v1.2.3 From 92daa5eda501b1b3a4368a42963af6df578906c4 Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Sat, 10 Nov 2012 20:39:01 -0700 Subject: Address obvious bug in MutableSettings. If x startsWith "-" it seems unlikely that x == "". Free with purchase: test case with 100 argument permutations. That's only a smidgen shy of infinity. --- .../scala/tools/nsc/settings/AbsSettings.scala | 2 +- .../scala/tools/nsc/settings/MutableSettings.scala | 35 +- test/files/run/settings-parse.check | 566 +++++++++++++++++++++ test/files/run/settings-parse.scala | 27 + 4 files changed, 608 insertions(+), 22 deletions(-) create mode 100644 test/files/run/settings-parse.check create mode 100644 test/files/run/settings-parse.scala (limited to 'src') diff --git a/src/compiler/scala/tools/nsc/settings/AbsSettings.scala b/src/compiler/scala/tools/nsc/settings/AbsSettings.scala index adabeb02a3..e965370713 100644 --- a/src/compiler/scala/tools/nsc/settings/AbsSettings.scala +++ b/src/compiler/scala/tools/nsc/settings/AbsSettings.scala @@ -133,7 +133,7 @@ trait AbsSettings extends scala.reflect.internal.settings.AbsSettings { case _ => false } override def hashCode() = name.hashCode + value.hashCode - override def toString() = name + " = " + value + override def toString() = name + " = " + (if (value == "") "\"\"" else value) } trait InternalSetting extends AbsSetting { diff --git a/src/compiler/scala/tools/nsc/settings/MutableSettings.scala b/src/compiler/scala/tools/nsc/settings/MutableSettings.scala index 7eae2295f6..4f4f0544da 100644 --- a/src/compiler/scala/tools/nsc/settings/MutableSettings.scala +++ b/src/compiler/scala/tools/nsc/settings/MutableSettings.scala @@ -62,30 +62,23 @@ class MutableSettings(val errorFn: String => Unit) (checkDependencies, residualArgs) case "--" :: xs => (checkDependencies, xs) + // discard empties, sometimes they appear because of ant or etc. + // but discard carefully, because an empty string is valid as an argument + // to an option, e.g. -cp "" . So we discard them only when they appear + // where an option should be, not where an argument to an option should be. + case "" :: xs => + loop(xs, residualArgs) case x :: xs => - val isOpt = x startsWith "-" - if (isOpt) { - val newArgs = parseParams(args) - if (args eq newArgs) { - errorFn(s"bad option: '$x'") - (false, args) + if (x startsWith "-") { + parseParams(args) match { + case newArgs if newArgs eq args => errorFn(s"bad option: '$x'") ; (false, args) + case newArgs => loop(newArgs, residualArgs) } - // discard empties, sometimes they appear because of ant or etc. - // but discard carefully, because an empty string is valid as an argument - // to an option, e.g. -cp "" . So we discard them only when they appear - // in option position. - else if (x == "") { - loop(xs, residualArgs) - } - else lookupSetting(x) match { - case Some(s) if s.shouldStopProcessing => (checkDependencies, newArgs) - case _ => loop(newArgs, residualArgs) - } - } - else { - if (processAll) loop(xs, residualArgs :+ x) - else (checkDependencies, args) } + else if (processAll) + loop(xs, residualArgs :+ x) + else + (checkDependencies, args) } loop(arguments, Nil) } diff --git a/test/files/run/settings-parse.check b/test/files/run/settings-parse.check new file mode 100644 index 0000000000..18145c9100 --- /dev/null +++ b/test/files/run/settings-parse.check @@ -0,0 +1,566 @@ +0) List(-cp, ) ==> Settings { + -d = . + -classpath = "" +} + +1) List(-cp, , ) ==> Settings { + -d = . + -classpath = "" +} + +2) List(, -cp, ) ==> Settings { + -d = . + -classpath = "" +} + +3) List(-cp, , -deprecation) ==> Settings { + -d = . + -deprecation = true + -classpath = "" +} + +4) List(-cp, , , -deprecation) ==> Settings { + -d = . + -deprecation = true + -classpath = "" +} + +5) List(-cp, , -deprecation, ) ==> Settings { + -d = . + -deprecation = true + -classpath = "" +} + +6) List(, -cp, , -deprecation) ==> Settings { + -d = . + -deprecation = true + -classpath = "" +} + +7) List(-cp, , -deprecation, foo.scala) ==> Settings { + -d = . + -deprecation = true + -classpath = "" +} + +8) List(-cp, , , -deprecation, foo.scala) ==> Settings { + -d = . + -deprecation = true + -classpath = "" +} + +9) List(-cp, , -deprecation, , foo.scala) ==> Settings { + -d = . + -deprecation = true + -classpath = "" +} + +10) List(-cp, , -deprecation, foo.scala, ) ==> Settings { + -d = . + -deprecation = true + -classpath = "" +} + +11) List(, -cp, , -deprecation, foo.scala) ==> Settings { + -d = . + -deprecation = true + -classpath = "" +} + +12) List(-cp, , foo.scala) ==> Settings { + -d = . + -classpath = "" +} + +13) List(-cp, , , foo.scala) ==> Settings { + -d = . + -classpath = "" +} + +14) List(-cp, , foo.scala, ) ==> Settings { + -d = . + -classpath = "" +} + +15) List(, -cp, , foo.scala) ==> Settings { + -d = . + -classpath = "" +} + +16) List(-cp, , foo.scala, -deprecation) ==> Settings { + -d = . + -deprecation = true + -classpath = "" +} + +17) List(-cp, , , foo.scala, -deprecation) ==> Settings { + -d = . + -deprecation = true + -classpath = "" +} + +18) List(-cp, , foo.scala, , -deprecation) ==> Settings { + -d = . + -deprecation = true + -classpath = "" +} + +19) List(-cp, , foo.scala, -deprecation, ) ==> Settings { + -d = . + -deprecation = true + -classpath = "" +} + +20) List(, -cp, , foo.scala, -deprecation) ==> Settings { + -d = . + -deprecation = true + -classpath = "" +} + +21) List(-deprecation, -cp, ) ==> Settings { + -d = . + -deprecation = true + -classpath = "" +} + +22) List(, -deprecation, -cp, ) ==> Settings { + -d = . + -deprecation = true + -classpath = "" +} + +23) List(-deprecation, -cp, , ) ==> Settings { + -d = . + -deprecation = true + -classpath = "" +} + +24) List(-deprecation, , -cp, ) ==> Settings { + -d = . + -deprecation = true + -classpath = "" +} + +25) List(-deprecation, -cp, , foo.scala) ==> Settings { + -d = . + -deprecation = true + -classpath = "" +} + +26) List(, -deprecation, -cp, , foo.scala) ==> Settings { + -d = . + -deprecation = true + -classpath = "" +} + +27) List(-deprecation, -cp, , , foo.scala) ==> Settings { + -d = . + -deprecation = true + -classpath = "" +} + +28) List(-deprecation, -cp, , foo.scala, ) ==> Settings { + -d = . + -deprecation = true + -classpath = "" +} + +29) List(-deprecation, , -cp, , foo.scala) ==> Settings { + -d = . + -deprecation = true + -classpath = "" +} + +30) List(-deprecation, foo.scala, -cp, ) ==> Settings { + -d = . + -deprecation = true + -classpath = "" +} + +31) List(, -deprecation, foo.scala, -cp, ) ==> Settings { + -d = . + -deprecation = true + -classpath = "" +} + +32) List(-deprecation, , foo.scala, -cp, ) ==> Settings { + -d = . + -deprecation = true + -classpath = "" +} + +33) List(-deprecation, foo.scala, -cp, , ) ==> Settings { + -d = . + -deprecation = true + -classpath = "" +} + +34) List(-deprecation, foo.scala, , -cp, ) ==> Settings { + -d = . + -deprecation = true + -classpath = "" +} + +35) List(foo.scala, -cp, ) ==> Settings { + -d = . + -classpath = "" +} + +36) List(, foo.scala, -cp, ) ==> Settings { + -d = . + -classpath = "" +} + +37) List(foo.scala, -cp, , ) ==> Settings { + -d = . + -classpath = "" +} + +38) List(foo.scala, , -cp, ) ==> Settings { + -d = . + -classpath = "" +} + +39) List(foo.scala, -cp, , -deprecation) ==> Settings { + -d = . + -deprecation = true + -classpath = "" +} + +40) List(, foo.scala, -cp, , -deprecation) ==> Settings { + -d = . + -deprecation = true + -classpath = "" +} + +41) List(foo.scala, -cp, , , -deprecation) ==> Settings { + -d = . + -deprecation = true + -classpath = "" +} + +42) List(foo.scala, -cp, , -deprecation, ) ==> Settings { + -d = . + -deprecation = true + -classpath = "" +} + +43) List(foo.scala, , -cp, , -deprecation) ==> Settings { + -d = . + -deprecation = true + -classpath = "" +} + +44) List(foo.scala, -deprecation, -cp, ) ==> Settings { + -d = . + -deprecation = true + -classpath = "" +} + +45) List(, foo.scala, -deprecation, -cp, ) ==> Settings { + -d = . + -deprecation = true + -classpath = "" +} + +46) List(foo.scala, , -deprecation, -cp, ) ==> Settings { + -d = . + -deprecation = true + -classpath = "" +} + +47) List(foo.scala, -deprecation, -cp, , ) ==> Settings { + -d = . + -deprecation = true + -classpath = "" +} + +48) List(foo.scala, -deprecation, , -cp, ) ==> Settings { + -d = . + -deprecation = true + -classpath = "" +} + +0) List(-cp, /tmp:/bippy) ==> Settings { + -d = . + -classpath = /tmp:/bippy +} + +1) List(-cp, /tmp:/bippy, ) ==> Settings { + -d = . + -classpath = /tmp:/bippy +} + +2) List(, -cp, /tmp:/bippy) ==> Settings { + -d = . + -classpath = /tmp:/bippy +} + +3) List(-cp, /tmp:/bippy, -deprecation) ==> Settings { + -d = . + -deprecation = true + -classpath = /tmp:/bippy +} + +4) List(-cp, /tmp:/bippy, , -deprecation) ==> Settings { + -d = . + -deprecation = true + -classpath = /tmp:/bippy +} + +5) List(-cp, /tmp:/bippy, -deprecation, ) ==> Settings { + -d = . + -deprecation = true + -classpath = /tmp:/bippy +} + +6) List(, -cp, /tmp:/bippy, -deprecation) ==> Settings { + -d = . + -deprecation = true + -classpath = /tmp:/bippy +} + +7) List(-cp, /tmp:/bippy, -deprecation, foo.scala) ==> Settings { + -d = . + -deprecation = true + -classpath = /tmp:/bippy +} + +8) List(-cp, /tmp:/bippy, , -deprecation, foo.scala) ==> Settings { + -d = . + -deprecation = true + -classpath = /tmp:/bippy +} + +9) List(-cp, /tmp:/bippy, -deprecation, , foo.scala) ==> Settings { + -d = . + -deprecation = true + -classpath = /tmp:/bippy +} + +10) List(-cp, /tmp:/bippy, -deprecation, foo.scala, ) ==> Settings { + -d = . + -deprecation = true + -classpath = /tmp:/bippy +} + +11) List(, -cp, /tmp:/bippy, -deprecation, foo.scala) ==> Settings { + -d = . + -deprecation = true + -classpath = /tmp:/bippy +} + +12) List(-cp, /tmp:/bippy, foo.scala) ==> Settings { + -d = . + -classpath = /tmp:/bippy +} + +13) List(-cp, /tmp:/bippy, , foo.scala) ==> Settings { + -d = . + -classpath = /tmp:/bippy +} + +14) List(-cp, /tmp:/bippy, foo.scala, ) ==> Settings { + -d = . + -classpath = /tmp:/bippy +} + +15) List(, -cp, /tmp:/bippy, foo.scala) ==> Settings { + -d = . + -classpath = /tmp:/bippy +} + +16) List(-cp, /tmp:/bippy, foo.scala, -deprecation) ==> Settings { + -d = . + -deprecation = true + -classpath = /tmp:/bippy +} + +17) List(-cp, /tmp:/bippy, , foo.scala, -deprecation) ==> Settings { + -d = . + -deprecation = true + -classpath = /tmp:/bippy +} + +18) List(-cp, /tmp:/bippy, foo.scala, , -deprecation) ==> Settings { + -d = . + -deprecation = true + -classpath = /tmp:/bippy +} + +19) List(-cp, /tmp:/bippy, foo.scala, -deprecation, ) ==> Settings { + -d = . + -deprecation = true + -classpath = /tmp:/bippy +} + +20) List(, -cp, /tmp:/bippy, foo.scala, -deprecation) ==> Settings { + -d = . + -deprecation = true + -classpath = /tmp:/bippy +} + +21) List(-deprecation, -cp, /tmp:/bippy) ==> Settings { + -d = . + -deprecation = true + -classpath = /tmp:/bippy +} + +22) List(, -deprecation, -cp, /tmp:/bippy) ==> Settings { + -d = . + -deprecation = true + -classpath = /tmp:/bippy +} + +23) List(-deprecation, -cp, /tmp:/bippy, ) ==> Settings { + -d = . + -deprecation = true + -classpath = /tmp:/bippy +} + +24) List(-deprecation, , -cp, /tmp:/bippy) ==> Settings { + -d = . + -deprecation = true + -classpath = /tmp:/bippy +} + +25) List(-deprecation, -cp, /tmp:/bippy, foo.scala) ==> Settings { + -d = . + -deprecation = true + -classpath = /tmp:/bippy +} + +26) List(, -deprecation, -cp, /tmp:/bippy, foo.scala) ==> Settings { + -d = . + -deprecation = true + -classpath = /tmp:/bippy +} + +27) List(-deprecation, -cp, /tmp:/bippy, , foo.scala) ==> Settings { + -d = . + -deprecation = true + -classpath = /tmp:/bippy +} + +28) List(-deprecation, -cp, /tmp:/bippy, foo.scala, ) ==> Settings { + -d = . + -deprecation = true + -classpath = /tmp:/bippy +} + +29) List(-deprecation, , -cp, /tmp:/bippy, foo.scala) ==> Settings { + -d = . + -deprecation = true + -classpath = /tmp:/bippy +} + +30) List(-deprecation, foo.scala, -cp, /tmp:/bippy) ==> Settings { + -d = . + -deprecation = true + -classpath = /tmp:/bippy +} + +31) List(, -deprecation, foo.scala, -cp, /tmp:/bippy) ==> Settings { + -d = . + -deprecation = true + -classpath = /tmp:/bippy +} + +32) List(-deprecation, , foo.scala, -cp, /tmp:/bippy) ==> Settings { + -d = . + -deprecation = true + -classpath = /tmp:/bippy +} + +33) List(-deprecation, foo.scala, -cp, /tmp:/bippy, ) ==> Settings { + -d = . + -deprecation = true + -classpath = /tmp:/bippy +} + +34) List(-deprecation, foo.scala, , -cp, /tmp:/bippy) ==> Settings { + -d = . + -deprecation = true + -classpath = /tmp:/bippy +} + +35) List(foo.scala, -cp, /tmp:/bippy) ==> Settings { + -d = . + -classpath = /tmp:/bippy +} + +36) List(, foo.scala, -cp, /tmp:/bippy) ==> Settings { + -d = . + -classpath = /tmp:/bippy +} + +37) List(foo.scala, -cp, /tmp:/bippy, ) ==> Settings { + -d = . + -classpath = /tmp:/bippy +} + +38) List(foo.scala, , -cp, /tmp:/bippy) ==> Settings { + -d = . + -classpath = /tmp:/bippy +} + +39) List(foo.scala, -cp, /tmp:/bippy, -deprecation) ==> Settings { + -d = . + -deprecation = true + -classpath = /tmp:/bippy +} + +40) List(, foo.scala, -cp, /tmp:/bippy, -deprecation) ==> Settings { + -d = . + -deprecation = true + -classpath = /tmp:/bippy +} + +41) List(foo.scala, -cp, /tmp:/bippy, , -deprecation) ==> Settings { + -d = . + -deprecation = true + -classpath = /tmp:/bippy +} + +42) List(foo.scala, -cp, /tmp:/bippy, -deprecation, ) ==> Settings { + -d = . + -deprecation = true + -classpath = /tmp:/bippy +} + +43) List(foo.scala, , -cp, /tmp:/bippy, -deprecation) ==> Settings { + -d = . + -deprecation = true + -classpath = /tmp:/bippy +} + +44) List(foo.scala, -deprecation, -cp, /tmp:/bippy) ==> Settings { + -d = . + -deprecation = true + -classpath = /tmp:/bippy +} + +45) List(, foo.scala, -deprecation, -cp, /tmp:/bippy) ==> Settings { + -d = . + -deprecation = true + -classpath = /tmp:/bippy +} + +46) List(foo.scala, , -deprecation, -cp, /tmp:/bippy) ==> Settings { + -d = . + -deprecation = true + -classpath = /tmp:/bippy +} + +47) List(foo.scala, -deprecation, -cp, /tmp:/bippy, ) ==> Settings { + -d = . + -deprecation = true + -classpath = /tmp:/bippy +} + +48) List(foo.scala, -deprecation, , -cp, /tmp:/bippy) ==> Settings { + -d = . + -deprecation = true + -classpath = /tmp:/bippy +} + diff --git a/test/files/run/settings-parse.scala b/test/files/run/settings-parse.scala new file mode 100644 index 0000000000..2b04f55b24 --- /dev/null +++ b/test/files/run/settings-parse.scala @@ -0,0 +1,27 @@ +import scala.tools.nsc._ + +object Test { + val tokens = List("", "-deprecation", "foo.scala") + val subsets = tokens.toSet.subsets.toList + val permutations0 = subsets.flatMap(_.toList.permutations).distinct + + def runWithCp(cp: String) = { + val permutations = permutations0 flatMap ("-cp CPTOKEN" :: _ permutations) + + for ((p, i) <- permutations.distinct.sortBy(_ mkString "").zipWithIndex) { + val args = p flatMap (_ split "\\s+") map (x => if (x == "CPTOKEN") cp else x) + val s = new settings.MutableSettings(println) + val (ok, residual) = s.processArguments(args, processAll = true) + + val expected = args filter (_ == "foo.scala") + assert(residual == expected, residual) + assert(ok, args) + println(s"$i) $args ==> $s") + } + } + + def main(args0: Array[String]): Unit = { + runWithCp("") + runWithCp("/tmp:/bippy") + } +} -- cgit v1.2.3 From 085b6a5bbed3839238c5cc0434281cf8e4c1ad19 Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Sat, 20 Oct 2012 21:46:23 -0700 Subject: SI-5330, SI-6014 deal with existential self-type This has been broken since https://github.com/scala/scala/commit/b7b81ca2#L0L567. The existential rash is treated in a similar manner as in fc24db4c. Conceptually, the fix would be `def selfTypeSkolemized = widen.skolemizeExistential.narrow`, but simply widening before narrowing achieves the same thing. Since we're in existential voodoo territory, let's go for the minimal fix: replacing `this.narrow` by `widen.narrow`. -- Original patch by @retronym in #1074, refined by @paulp to only perform widen.narrow incantation if there are existentials present in the widened type, as narrowing is expensive when the type is not a singleton. The result is that compiling the entirety of quick, that code path is hit only 143 times. All the other calls hit .narrow directly as before. It looks like the definition of negligible in the diff of -Ystatistics when compiling src/library/scala/collection: < #symbols : 306315 --- > #symbols : 306320 12c13 < #unique types : 293859 --- > #unique types : 293865 I'm assuming based on the 2/1000ths of a percent increase in symbol and type creation that wall clock is manageable, but I didn't measure it. --- src/reflect/scala/reflect/internal/Types.scala | 18 +++++++++++++++--- test/files/pos/t5330.scala | 22 ++++++++++++++++++++++ test/files/pos/t5330b.scala | 6 ++++++ test/files/pos/t5330c.scala | 5 +++++ test/files/pos/t6014.scala | 13 +++++++++++++ 5 files changed, 61 insertions(+), 3 deletions(-) create mode 100644 test/files/pos/t5330.scala create mode 100644 test/files/pos/t5330b.scala create mode 100644 test/files/pos/t5330c.scala create mode 100644 test/files/pos/t6014.scala (limited to 'src') diff --git a/src/reflect/scala/reflect/internal/Types.scala b/src/reflect/scala/reflect/internal/Types.scala index 3104c1e74e..02cdac4046 100644 --- a/src/reflect/scala/reflect/internal/Types.scala +++ b/src/reflect/scala/reflect/internal/Types.scala @@ -321,6 +321,18 @@ trait Types extends api.Types { self: SymbolTable => } } + /** Same as a call to narrow unless existentials are visible + * after widening the type. In that case, narrow from the widened + * type instead of the proxy. This gives buried existentials a + * chance to make peace with the other types. See SI-5330. + */ + private def narrowForFindMember(tp: Type): Type = { + val w = tp.widen + // Only narrow on widened type when we have to -- narrow is expensive unless the target is a singleton type. + if ((tp ne w) && containsExistential(w)) w.narrow + else tp.narrow + } + /** The base class for all types */ abstract class Type extends TypeApiImpl with Annotatable[Type] { /** Types for which asSeenFrom always is the identity, no matter what @@ -1070,7 +1082,7 @@ trait Types extends api.Types { self: SymbolTable => (other ne sym) && ((other.owner eq sym.owner) || (flags & PRIVATE) != 0 || { - if (self eq null) self = this.narrow + if (self eq null) self = narrowForFindMember(this) if (symtpe eq null) symtpe = self.memberType(sym) !(self.memberType(other) matches symtpe) })}) { @@ -1148,7 +1160,7 @@ trait Types extends api.Types { self: SymbolTable => if ((member ne sym) && ((member.owner eq sym.owner) || (flags & PRIVATE) != 0 || { - if (self eq null) self = this.narrow + if (self eq null) self = narrowForFindMember(this) if (membertpe eq null) membertpe = self.memberType(member) !(membertpe matches self.memberType(sym)) })) { @@ -1163,7 +1175,7 @@ trait Types extends api.Types { self: SymbolTable => (other ne sym) && ((other.owner eq sym.owner) || (flags & PRIVATE) != 0 || { - if (self eq null) self = this.narrow + if (self eq null) self = narrowForFindMember(this) if (symtpe eq null) symtpe = self.memberType(sym) !(self.memberType(other) matches symtpe) })}) { diff --git a/test/files/pos/t5330.scala b/test/files/pos/t5330.scala new file mode 100644 index 0000000000..813acd4b83 --- /dev/null +++ b/test/files/pos/t5330.scala @@ -0,0 +1,22 @@ +trait FM[A] { + def map(f: A => Any) +} + +trait M[A] extends FM[A] { + def map(f: A => Any) +} + +trait N[A] extends FM[A] + +object test { + def kaboom(xs: M[_]) = xs map (x => ()) // missing parameter type. + + def okay1[A](xs: M[A]) = xs map (x => ()) + def okay2(xs: FM[_]) = xs map (x => ()) + def okay3(xs: N[_]) = xs map (x => ()) +} + +class CC2(xs: List[_]) { + def f(x1: Any, x2: Any) = null + def g = xs map (x => f(x, x)) +} diff --git a/test/files/pos/t5330b.scala b/test/files/pos/t5330b.scala new file mode 100644 index 0000000000..dbeb165cd8 --- /dev/null +++ b/test/files/pos/t5330b.scala @@ -0,0 +1,6 @@ +abstract trait Base { + def foo: this.type +}; +class Derived[T] extends Base { + def foo: Nothing = sys.error("!!!") +} diff --git a/test/files/pos/t5330c.scala b/test/files/pos/t5330c.scala new file mode 100644 index 0000000000..af31f3dfd1 --- /dev/null +++ b/test/files/pos/t5330c.scala @@ -0,0 +1,5 @@ +object t5330c { + val s: Set[_ >: Char] = Set('A') + s forall ("ABC" contains _) + s.forall( c => "ABC".toSeq.contains( c )) +} diff --git a/test/files/pos/t6014.scala b/test/files/pos/t6014.scala new file mode 100644 index 0000000000..46e03bb552 --- /dev/null +++ b/test/files/pos/t6014.scala @@ -0,0 +1,13 @@ +object Test { + case class CC[T](key: T) + type Alias[T] = Seq[CC[T]] + + def f(xs: Seq[CC[_]]) = xs map { case CC(x) => CC(x) } // ok + def g(xs: Alias[_]) = xs map { case CC(x) => CC(x) } // fails + // ./a.scala:11: error: missing parameter type for expanded function + // The argument types of an anonymous function must be fully known. (SLS 8.5) + // Expected type was: ? + // def g(xs: Alias[_]) = xs map { case CC(x) => CC(x) } // fails + // ^ + // one error found +} \ No newline at end of file -- cgit v1.2.3 From 823d77947e7f6502905cfbafee396fad0a908ede Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Wed, 14 Nov 2012 12:22:22 -0800 Subject: Fix for SI-6357, cycle with value classes. Don't force the owner info. --- src/compiler/scala/tools/nsc/typechecker/Namers.scala | 3 ++- test/files/neg/t6357.check | 4 ++++ test/files/neg/t6357.scala | 6 ++++++ 3 files changed, 12 insertions(+), 1 deletion(-) create mode 100644 test/files/neg/t6357.check create mode 100644 test/files/neg/t6357.scala (limited to 'src') diff --git a/src/compiler/scala/tools/nsc/typechecker/Namers.scala b/src/compiler/scala/tools/nsc/typechecker/Namers.scala index 3f5410eb45..da2282c1dd 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Namers.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Namers.scala @@ -1316,7 +1316,8 @@ trait Namers extends MethodSynthesis { if (clazz.isDerivedValueClass) { log("Ensuring companion for derived value class " + name + " at " + cdef.pos.show) clazz setFlag FINAL - enclosingNamerWithScope(clazz.owner.info.decls).ensureCompanionObject(cdef) + // Don't force the owner's info lest we create cycles as in SI-6357. + enclosingNamerWithScope(clazz.owner.rawInfo.decls).ensureCompanionObject(cdef) } result diff --git a/test/files/neg/t6357.check b/test/files/neg/t6357.check new file mode 100644 index 0000000000..a534d1439a --- /dev/null +++ b/test/files/neg/t6357.check @@ -0,0 +1,4 @@ +t6357.scala:3: error: value class may not be a local class + final class Y(val j: Int) extends AnyVal + ^ +one error found diff --git a/test/files/neg/t6357.scala b/test/files/neg/t6357.scala new file mode 100644 index 0000000000..47f5629638 --- /dev/null +++ b/test/files/neg/t6357.scala @@ -0,0 +1,6 @@ +object K { + def q = { + final class Y(val j: Int) extends AnyVal + 3 + } +} -- cgit v1.2.3 From 24958f7a8b1748be0c462b4563e652c9f7e24d6a Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Wed, 14 Nov 2012 11:40:12 -0800 Subject: Fix for SI-6664, cycle in case classes. Scope lookup of an overloaded symbol was accidentally forcing some lazy info. Since as usual the caller didn't even have any interest in the symbol, but only in whether the name existed at all, I changed it call the method I made for this exact purpose, containsName. Also I much reduced the number of checks being made in the quest for an inherited copy method. --- src/compiler/scala/tools/nsc/typechecker/Namers.scala | 7 +++---- src/reflect/scala/reflect/internal/Scopes.scala | 8 +++++--- test/files/pos/t6664.scala | 4 ++++ test/files/pos/t6664b.scala | 5 +++++ 4 files changed, 17 insertions(+), 7 deletions(-) create mode 100644 test/files/pos/t6664.scala create mode 100644 test/files/pos/t6664b.scala (limited to 'src') diff --git a/src/compiler/scala/tools/nsc/typechecker/Namers.scala b/src/compiler/scala/tools/nsc/typechecker/Namers.scala index 3f5410eb45..04fb69671e 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Namers.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Namers.scala @@ -909,11 +909,10 @@ trait Namers extends MethodSynthesis { val modClass = companionSymbolOf(clazz, context).moduleClass modClass.attachments.get[ClassForCaseCompanionAttachment] foreach { cma => val cdef = cma.caseClass - def hasCopy(decls: Scope) = (decls lookup nme.copy) != NoSymbol + def hasCopy = (decls containsName nme.copy) || parents.exists(_ member nme.copy exists) + // SI-5956 needs (cdef.symbol == clazz): there can be multiple class symbols with the same name - if (cdef.symbol == clazz && !hasCopy(decls) && - !parents.exists(p => hasCopy(p.typeSymbol.info.decls)) && - !parents.flatMap(_.baseClasses).distinct.exists(bc => hasCopy(bc.info.decls))) + if (cdef.symbol == clazz && !hasCopy) addCopyMethod(cdef, templateNamer) } } diff --git a/src/reflect/scala/reflect/internal/Scopes.scala b/src/reflect/scala/reflect/internal/Scopes.scala index a593a412d7..950e30dbc5 100644 --- a/src/reflect/scala/reflect/internal/Scopes.scala +++ b/src/reflect/scala/reflect/internal/Scopes.scala @@ -242,7 +242,8 @@ trait Scopes extends api.Scopes { self: SymbolTable => // than a non-deterministic bizarre one (see any bug involving overloads // in package objects.) val alts = lookupAll(name).toList - log("!!! scope lookup of $name found multiple symbols: $alts") + def alts_s = alts map (s => s.defString) mkString " " + log(s"!!! scope lookup of $name found multiple symbols: $alts_s") // FIXME - how is one supposed to create an overloaded symbol without // knowing the correct owner? Using the symbol owner is not correct; // say for instance this is List's scope and the symbols are its three @@ -254,8 +255,9 @@ trait Scopes extends api.Scopes { self: SymbolTable => // FIXME - a similar question for prefix, although there are more // clues from the symbols on that one, as implemented here. In general // the distinct list is one type and lub becomes the identity. - val prefix = lub(alts map (_.info.prefix) distinct) - NoSymbol.newOverloaded(prefix, alts) + // val prefix = lub(alts map (_.info.prefix) distinct) + // Now using NoSymbol and NoPrefix always to avoid forcing info (SI-6664) + NoSymbol.newOverloaded(NoPrefix, alts) } } diff --git a/test/files/pos/t6664.scala b/test/files/pos/t6664.scala new file mode 100644 index 0000000000..7eb85f619d --- /dev/null +++ b/test/files/pos/t6664.scala @@ -0,0 +1,4 @@ +final case class A(i: Int, s: String) { + protected def copy(s2: String): A = A(i, s2) + protected def copy(i2: Int): A = A(i2, s) +} diff --git a/test/files/pos/t6664b.scala b/test/files/pos/t6664b.scala new file mode 100644 index 0000000000..a622866838 --- /dev/null +++ b/test/files/pos/t6664b.scala @@ -0,0 +1,5 @@ +object T { + def A(s: String): A = new A(3, s) + def A(i: Int): A = A(i, "abc") + case class A(i: Int, s: String) +} -- cgit v1.2.3 From 666e375526f9fac2c491f514fc3020a0c209ee13 Mon Sep 17 00:00:00 2001 From: Jean-Remi Desjardins Date: Wed, 14 Nov 2012 17:32:58 -0500 Subject: Fix Documentation Typo There was a typo on the if. I also thought it might be worth rephrasing the paragraph because it seemed a bit contrived. It's a trivial fix, but it would allow us to close a bug in the issue tracker which seems worthwhile to me. --- src/library/scala/xml/transform/RewriteRule.scala | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) (limited to 'src') diff --git a/src/library/scala/xml/transform/RewriteRule.scala b/src/library/scala/xml/transform/RewriteRule.scala index 1dca495a10..13210a6fd2 100644 --- a/src/library/scala/xml/transform/RewriteRule.scala +++ b/src/library/scala/xml/transform/RewriteRule.scala @@ -11,8 +11,8 @@ package scala.xml package transform -/** a RewriteRule, when applied to a term, yields either - * the resulting of rewriting or the term itself it the rule +/** A RewriteRule, when applied to a term, yields either + * the result of rewriting the term or the term itself if the rule * is not applied. * * @author Burak Emir -- cgit v1.2.3 From 62ebb7cb9d82986f640681ef5d5d16475855b2ef Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Wed, 14 Nov 2012 14:36:13 -0800 Subject: Lower confusion levels in typedApply. It's the usual thing, making logic more logical and removing comments which are incorrect and/or no longer useful. --- .../scala/tools/nsc/typechecker/Typers.scala | 68 ++++++++-------------- 1 file changed, 25 insertions(+), 43 deletions(-) (limited to 'src') diff --git a/src/compiler/scala/tools/nsc/typechecker/Typers.scala b/src/compiler/scala/tools/nsc/typechecker/Typers.scala index c203e62786..f5e9540d28 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Typers.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Typers.scala @@ -4426,15 +4426,6 @@ trait Typers extends Modes with Adaptations with Tags { if (useTry) tryTypedApply(fun2, args) else doTypedApply(tree, fun2, args, mode, pt) - /* - if (fun2.hasSymbolField && fun2.symbol.isConstructor && (mode & EXPRmode) != 0) { - res.tpe = res.tpe.notNull - } - */ - // TODO: In theory we should be able to call: - //if (fun2.hasSymbolField && fun2.symbol.name == nme.apply && fun2.symbol.owner == ArrayClass) { - // But this causes cyclic reference for Array class in Cleanup. It is easy to overcome this - // by calling ArrayClass.info here (or some other place before specialize). if (fun2.symbol == Array_apply && !res.isErrorTyped) { val checked = gen.mkCheckInit(res) // this check is needed to avoid infinite recursion in Duplicators @@ -4449,38 +4440,36 @@ trait Typers extends Modes with Adaptations with Tags { } } - def typedApply(tree: Apply) = { - val fun = tree.fun - val args = tree.args - fun match { - case Block(stats, expr) => - typed1(atPos(tree.pos)(Block(stats, Apply(expr, args) setPos tree.pos.makeTransparent)), mode, pt) - case _ => - normalTypedApply(tree, fun, args) match { - case Apply(Select(New(tpt), name), args) - if (tpt.tpe != null && - tpt.tpe.typeSymbol == ArrayClass && - args.length == 1 && - erasure.GenericArray.unapply(tpt.tpe).isDefined) => // !!! todo simplify by using extractor - // convert new Array[T](len) to evidence[ClassTag[T]].newArray(len) - // convert new Array^N[T](len) for N > 1 to evidence[ClassTag[Array[...Array[T]...]]].newArray(len), where Array HK gets applied (N-1) times - // [Eugene] no more MaxArrayDims. ClassTags are flexible enough to allow creation of arrays of arbitrary dimensionality (w.r.t JVM restrictions) - val Some((level, componentType)) = erasure.GenericArray.unapply(tpt.tpe) - val tagType = List.iterate(componentType, level)(tpe => appliedType(ArrayClass.toTypeConstructor, List(tpe))).last - val newArrayApp = atPos(tree.pos) { - val tag = resolveClassTag(tree.pos, tagType) - if (tag.isEmpty) MissingClassTagError(tree, tagType) - else new ApplyToImplicitArgs(Select(tag, nme.newArray), args) + // convert new Array[T](len) to evidence[ClassTag[T]].newArray(len) + // convert new Array^N[T](len) for N > 1 to evidence[ClassTag[Array[...Array[T]...]]].newArray(len) + // where Array HK gets applied (N-1) times + object ArrayInstantiation { + def unapply(tree: Apply) = tree match { + case Apply(Select(New(tpt), name), arg :: Nil) if tpt.tpe != null && tpt.tpe.typeSymbol == ArrayClass => + Some(tpt.tpe) collect { + case erasure.GenericArray(level, componentType) => + val tagType = (1 until level).foldLeft(componentType)((res, _) => arrayType(res)) + + resolveClassTag(tree.pos, tagType) match { + case EmptyTree => MissingClassTagError(tree, tagType) + case tag => atPos(tree.pos)(new ApplyToImplicitArgs(Select(tag, nme.newArray), arg :: Nil)) } - typed(newArrayApp, mode, pt) - case Apply(Select(fun, nme.apply), _) if treeInfo.isSuperConstrCall(fun) => //SI-5696 - TooManyArgumentListsForConstructor(tree) - case tree1 => - tree1 } + case _ => None } } + def typedApply(tree: Apply) = tree match { + case Apply(Block(stats, expr), args) => + typed1(atPos(tree.pos)(Block(stats, Apply(expr, args) setPos tree.pos.makeTransparent)), mode, pt) + case Apply(fun, args) => + normalTypedApply(tree, fun, args) match { + case ArrayInstantiation(tree1) => typed(tree1, mode, pt) + case Apply(Select(fun, nme.apply), _) if treeInfo.isSuperConstrCall(fun) => TooManyArgumentListsForConstructor(tree) //SI-5696 + case tree1 => tree1 + } + } + def convertToAssignment(fun: Tree, qual: Tree, name: Name, args: List[Tree]): Tree = { val prefix = name.toTermName stripSuffix nme.EQL def mkAssign(vble: Tree): Tree = @@ -4534,8 +4523,6 @@ trait Typers extends Modes with Adaptations with Tags { case This(_) => qual1.symbol case _ => qual1.tpe.typeSymbol } - //println(clazz+"/"+qual1.tpe.typeSymbol+"/"+qual1) - def findMixinSuper(site: Type): Type = { var ps = site.parents filter (_.typeSymbol.name == mix) if (ps.isEmpty) @@ -4543,11 +4530,6 @@ trait Typers extends Modes with Adaptations with Tags { if (ps.isEmpty) { debuglog("Fatal: couldn't find site " + site + " in " + site.parents.map(_.typeSymbol.name)) if (phase.erasedTypes && context.enclClass.owner.isImplClass) { - // println(qual1) - // println(clazz) - // println(site) - // println(site.parents) - // println(mix) // the reference to super class got lost during erasure restrictionError(tree.pos, unit, "traits may not select fields or methods from super[C] where C is a class") ErrorType -- cgit v1.2.3 From 6023706458ca14ecd62a0b1b68352662e787020f Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Wed, 14 Nov 2012 13:03:28 -0800 Subject: Error for SI-6355, overloading of applyDynamic. As long as it can never be called anyway, seems like we'd be doing people a kindness to fail the compile rather than letting it be ambiguous at every use site. --- src/compiler/scala/tools/nsc/typechecker/RefChecks.scala | 9 +++++++++ src/compiler/scala/tools/nsc/typechecker/Typers.scala | 2 +- test/files/neg/t6355.check | 4 ++++ test/files/neg/t6355.scala | 13 +++++++++++++ 4 files changed, 27 insertions(+), 1 deletion(-) create mode 100644 test/files/neg/t6355.check create mode 100644 test/files/neg/t6355.scala (limited to 'src') diff --git a/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala b/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala index dbd2a0e49b..24b0611d6a 100644 --- a/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala +++ b/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala @@ -130,6 +130,15 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans } } } + + // Check for doomed attempt to overload applyDynamic + if (clazz isSubClass DynamicClass) { + clazz.info member nme.applyDynamic match { + case sym if sym.isOverloaded => unit.error(sym.pos, "implementation restriction: applyDynamic cannot be overloaded") + case _ => + } + } + if (settings.lint.value) { clazz.info.decls filter (x => x.isImplicit && x.typeParams.nonEmpty) foreach { sym => val alts = clazz.info.decl(sym.name).alternatives diff --git a/src/compiler/scala/tools/nsc/typechecker/Typers.scala b/src/compiler/scala/tools/nsc/typechecker/Typers.scala index c203e62786..d734e2e861 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Typers.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Typers.scala @@ -3871,7 +3871,7 @@ trait Typers extends Modes with Adaptations with Tags { * */ def mkInvoke(cxTree: Tree, tree: Tree, qual: Tree, name: Name): Option[Tree] = { - debuglog(s"mkInvoke($cxTree, $tree, $qual, $name)") + debuglog(s"dyna.mkInvoke($cxTree, $tree, $qual, $name)") acceptsApplyDynamicWithType(qual, name) map { tp => // tp eq NoType => can call xxxDynamic, but not passing any type args (unless specified explicitly by the user) // in scala-virtualized, when not NoType, tp is passed as type argument (for selection on a staged Struct) diff --git a/test/files/neg/t6355.check b/test/files/neg/t6355.check new file mode 100644 index 0000000000..c1fa147f52 --- /dev/null +++ b/test/files/neg/t6355.check @@ -0,0 +1,4 @@ +t6355.scala:12: error: implementation restriction: applyDynamic cannot be overloaded + def applyDynamic(name: String)(x: Int): Int = 2 + ^ +one error found diff --git a/test/files/neg/t6355.scala b/test/files/neg/t6355.scala new file mode 100644 index 0000000000..3007dc49f6 --- /dev/null +++ b/test/files/neg/t6355.scala @@ -0,0 +1,13 @@ +package foo + +import scala.language.dynamics + +class DoesntExtendDynamic { + def applyDynamic(name: String)(s: String): Int = 1 + def applyDynamic(name: String)(x: Int): Int = 2 +} + +class A extends Dynamic { + def applyDynamic(name: String)(s: String): Int = 1 + def applyDynamic(name: String)(x: Int): Int = 2 +} -- cgit v1.2.3 From 768a4082a6090835afef34ee38c2c398da335b01 Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Wed, 14 Nov 2012 10:54:27 -0800 Subject: Fix for overly eager package object initialization. A subtle change in the order in which symbol attributes were inspected (now you know why I avoid vals in the compiler) led to a cycle during initialization for slick. I'm afraid I don't know how to reproduce the issue outside of slick and sbt, so I added some logging instead. After some challenges juggling general correctness and cycle avoidance, I resorted to improving and documenting the logic as well. I predict reviewer will be pleased. --- .../scala/tools/nsc/typechecker/Contexts.scala | 41 ++++++++++++++++------ src/reflect/scala/reflect/internal/Symbols.scala | 8 +++++ 2 files changed, 38 insertions(+), 11 deletions(-) (limited to 'src') diff --git a/src/compiler/scala/tools/nsc/typechecker/Contexts.scala b/src/compiler/scala/tools/nsc/typechecker/Contexts.scala index 78380ad054..a8d7de6362 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Contexts.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Contexts.scala @@ -761,25 +761,44 @@ trait Contexts { self: Analyzer => * info of the package object. However to avoid cycles we'll check * what other ways we can before pushing that way. */ - def isInPackageObject(sym: Symbol, pkg: Symbol) = { - val pkgClass = if (pkg.isTerm) pkg.moduleClass else pkg + def isInPackageObject(sym: Symbol, pkg: Symbol): Boolean = { + def uninitialized(what: String) = { + log(s"Cannot look for $sym in package object of $pkg; $what is not initialized.") + false + } + def pkgClass = if (pkg.isTerm) pkg.moduleClass else pkg def matchesInfo = ( - pkg.isInitialized && { - // need to be careful here to not get a cyclic reference during bootstrap + // need to be careful here to not get a cyclic reference during bootstrap + if (pkg.isInitialized) { val module = pkg.info member nme.PACKAGEkw - module.isInitialized && (module.info.member(sym.name).alternatives contains sym) + if (module.isInitialized) + module.info.member(sym.name).alternatives contains sym + else + uninitialized("" + module) } + else uninitialized("" + pkg) ) def inPackageObject(sym: Symbol) = ( - !sym.isPackage - && !sym.owner.isPackageClass - && (sym.owner ne NoSymbol) - && (sym.owner.owner == pkgClass || matchesInfo) + // To be in the package object, one of these must be true: + // 1) sym.owner is a package object class, and sym.owner.owner is the package class for `pkg` + // 2) sym.owner is inherited by the correct package object class + // We try to establish 1) by inspecting the owners directly, and then we try + // to rule out 2), and only if both those fail do we resort to looking in the info. + !sym.isPackage && (sym.owner ne NoSymbol) && ( + if (sym.owner.isPackageObjectClass) + sym.owner.owner == pkgClass + else + !sym.owner.isPackageClass && matchesInfo + ) ) + // An overloaded symbol might not have the expected owner! + // The alternatives must be inspected directly. pkgClass.isPackageClass && ( - if (sym.isOverloaded) sym.alternatives forall inPackageObject - else inPackageObject(sym) + if (sym.isOverloaded) + sym.alternatives forall (isInPackageObject(_, pkg)) + else + inPackageObject(sym) ) } diff --git a/src/reflect/scala/reflect/internal/Symbols.scala b/src/reflect/scala/reflect/internal/Symbols.scala index 30e5ecd643..b975ea5786 100644 --- a/src/reflect/scala/reflect/internal/Symbols.scala +++ b/src/reflect/scala/reflect/internal/Symbols.scala @@ -2463,6 +2463,14 @@ trait Symbols extends api.Symbols { self: SymbolTable => override def isMethod = this hasFlag METHOD override def isModule = this hasFlag MODULE override def isOverloaded = this hasFlag OVERLOADED + /*** !!! TODO: shouldn't we do something like the following: + override def isOverloaded = ( + if (this.isInitialized) + this hasFlag OVERLOADED + else + (infos ne null) && infos.info.isInstanceOf[OverloadedType] + ) + ***/ override def isPackage = this hasFlag PACKAGE override def isValueParameter = this hasFlag PARAM -- cgit v1.2.3 From 7936ce55315c40886fad508df8e56f78a8efea8f Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Wed, 14 Nov 2012 15:39:09 -0800 Subject: Added -Xdev setting... you know, for devs A setting we developers can give all the time and expect to hear useful things without being buried in debugging output. As the comment says: This is for WARNINGS which should reach the ears of scala developers whenever they occur, but are not useful for normal users. They should be precise, explanatory, and infrequent. Please don't use this as a logging mechanism. !!! is prefixed to all messages issued via this route to make them visually distinct. This is what I always intended for "debugwarn", the method I have deprecated in favor of the more accurate: def devWarning(msg: => String): Unit In this VERY SAME COMMIT, I performed the CLOSELY RELATED task of quieting down an -Xlint warning which had become too noisy thanks to implicit classes tickling it. I tightened that warn condition to include both -Xlint and -Xdev. --- src/compiler/scala/tools/nsc/Global.scala | 14 +++++++++----- src/compiler/scala/tools/nsc/backend/icode/GenICode.scala | 7 +++++-- src/compiler/scala/tools/nsc/backend/icode/ICodes.scala | 4 ++-- .../nsc/backend/icode/analysis/ReachingDefinitions.scala | 2 +- src/compiler/scala/tools/nsc/settings/ScalaSettings.scala | 1 + src/compiler/scala/tools/nsc/transform/Mixin.scala | 2 +- .../scala/tools/nsc/transform/SpecializeTypes.scala | 2 +- src/compiler/scala/tools/nsc/transform/UnCurry.scala | 5 +++-- src/compiler/scala/tools/nsc/typechecker/Checkable.scala | 2 +- src/compiler/scala/tools/nsc/typechecker/Contexts.scala | 6 ++++-- src/compiler/scala/tools/nsc/typechecker/Implicits.scala | 2 +- src/compiler/scala/tools/nsc/typechecker/Infer.scala | 3 ++- src/compiler/scala/tools/nsc/typechecker/Namers.scala | 2 +- .../scala/tools/nsc/typechecker/PatternMatching.scala | 4 +++- src/compiler/scala/tools/nsc/typechecker/RefChecks.scala | 3 ++- src/compiler/scala/tools/nsc/typechecker/Typers.scala | 2 +- src/reflect/scala/reflect/internal/Scopes.scala | 2 +- src/reflect/scala/reflect/internal/SymbolTable.scala | 7 +++++-- src/reflect/scala/reflect/internal/Symbols.scala | 6 +++--- src/reflect/scala/reflect/internal/TreeInfo.scala | 9 ++++----- src/reflect/scala/reflect/internal/Types.scala | 12 ++++++------ src/reflect/scala/reflect/internal/transform/Erasure.scala | 2 +- test/files/neg/overloaded-implicit.flags | 2 +- 23 files changed, 59 insertions(+), 42 deletions(-) (limited to 'src') diff --git a/src/compiler/scala/tools/nsc/Global.scala b/src/compiler/scala/tools/nsc/Global.scala index 13bec828ca..9c87ff9ad8 100644 --- a/src/compiler/scala/tools/nsc/Global.scala +++ b/src/compiler/scala/tools/nsc/Global.scala @@ -253,11 +253,15 @@ class Global(var currentSettings: Settings, var reporter: Reporter) if (settings.debug.value) body } - // Warnings issued only under -Ydebug. For messages which should reach - // developer ears, but are not adequately actionable by users. - @inline final override def debugwarn(msg: => String) { - if (settings.debug.value) - warning(msg) + /** This is for WARNINGS which should reach the ears of scala developers + * whenever they occur, but are not useful for normal users. They should + * be precise, explanatory, and infrequent. Please don't use this as a + * logging mechanism. !!! is prefixed to all messages issued via this route + * to make them visually distinct. + */ + @inline final override def devWarning(msg: => String) { + if (settings.developer.value || settings.debug.value) + warning("!!! " + msg) } private def elapsedMessage(msg: String, start: Long) = diff --git a/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala b/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala index 720896d0b3..03ad618b86 100644 --- a/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala +++ b/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala @@ -1251,8 +1251,11 @@ abstract class GenICode extends SubComponent { val sym = ( if (!tree.symbol.isPackageClass) tree.symbol else tree.symbol.info.member(nme.PACKAGE) match { - case NoSymbol => assert(false, "Cannot use package as value: " + tree) ; NoSymbol - case s => debugwarn("Bug: found package class where package object expected. Converting.") ; s.moduleClass + case NoSymbol => + abort("Cannot use package as value: " + tree) + case s => + devWarning(s"Found ${tree.symbol} where a package object is required. Converting to ${s.moduleClass}") + s.moduleClass } ) debuglog("LOAD_MODULE from %s: %s".format(tree.shortClass, sym)) diff --git a/src/compiler/scala/tools/nsc/backend/icode/ICodes.scala b/src/compiler/scala/tools/nsc/backend/icode/ICodes.scala index 7c6f2a0620..e2d387c65d 100644 --- a/src/compiler/scala/tools/nsc/backend/icode/ICodes.scala +++ b/src/compiler/scala/tools/nsc/backend/icode/ICodes.scala @@ -28,7 +28,7 @@ abstract class ICodes extends AnyRef with Repository { val global: Global - import global.{ log, definitions, settings, perRunCaches } + import global.{ log, definitions, settings, perRunCaches, devWarning } /** The ICode representation of classes */ val classes = perRunCaches.newMap[global.Symbol, IClass]() @@ -82,7 +82,7 @@ abstract class ICodes extends AnyRef // Something is leaving open/empty blocks around (see SI-4840) so // let's not kill the deal unless it's nonempty. if (b.isEmpty) { - log("!!! Found open but empty block while inlining " + m + ": removing from block list.") + devWarning(s"Found open but empty block while inlining $m: removing from block list.") m.code removeBlock b } else dumpMethodAndAbort(m, b) diff --git a/src/compiler/scala/tools/nsc/backend/icode/analysis/ReachingDefinitions.scala b/src/compiler/scala/tools/nsc/backend/icode/analysis/ReachingDefinitions.scala index 45c85ff25a..48755d4424 100644 --- a/src/compiler/scala/tools/nsc/backend/icode/analysis/ReachingDefinitions.scala +++ b/src/compiler/scala/tools/nsc/backend/icode/analysis/ReachingDefinitions.scala @@ -52,7 +52,7 @@ abstract class ReachingDefinitions { // it makes it harder to spot the real problems. val result = (a.stack, b.stack).zipped map (_ ++ _) if (settings.debug.value && (a.stack.length != b.stack.length)) - debugwarn("Mismatched stacks in ReachingDefinitions#lub2: " + a.stack + ", " + b.stack + ", returning " + result) + devWarning(s"Mismatched stacks in ReachingDefinitions#lub2: ${a.stack}, ${b.stack}, returning $result") result } ) diff --git a/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala b/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala index 8dce48ee9a..af0e3c97b0 100644 --- a/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala +++ b/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala @@ -74,6 +74,7 @@ trait ScalaSettings extends AbsScalaSettings val assemextdirs = StringSetting ("-Xassem-extdirs", "dirs", "(Requires -target:msil) List of directories containing assemblies. default:lib", Defaults.scalaLibDir.path).dependsOn(target, "msil") val sourcedir = StringSetting ("-Xsourcedir", "directory", "(Requires -target:msil) Mirror source folder structure in output directory.", ".").dependsOn(target, "msil") val checkInit = BooleanSetting ("-Xcheckinit", "Wrap field accessors to throw an exception on uninitialized access.") + val developer = BooleanSetting ("-Xdev", "Indicates user is a developer - issue warnings about anything which seems amiss") val noassertions = BooleanSetting ("-Xdisable-assertions", "Generate no assertions or assumptions.") val elidebelow = IntSetting ("-Xelide-below", "Calls to @elidable methods are omitted if method priority is lower than argument", elidable.MINIMUM, None, elidable.byName get _) diff --git a/src/compiler/scala/tools/nsc/transform/Mixin.scala b/src/compiler/scala/tools/nsc/transform/Mixin.scala index 8122dc38cf..2025891ab2 100644 --- a/src/compiler/scala/tools/nsc/transform/Mixin.scala +++ b/src/compiler/scala/tools/nsc/transform/Mixin.scala @@ -289,7 +289,7 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL { for (mixinMember <- mixinClass.info.decls) { if (isConcreteAccessor(mixinMember)) { if (isOverriddenAccessor(mixinMember, clazz.info.baseClasses)) - debugwarn("!!! is overridden val: "+mixinMember.fullLocationString) + devWarning(s"Overridden concrete accessor: ${mixinMember.fullLocationString}") else { // mixin field accessors val mixedInAccessor = cloneAndAddMixinMember(mixinClass, mixinMember) diff --git a/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala b/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala index 78fb725041..3af9524f3e 100644 --- a/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala +++ b/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala @@ -1482,7 +1482,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers { } // See SI-5583. Don't know why it happens now if it didn't before. if (specMember.info.typeParams.isEmpty && residualTargs.nonEmpty) { - log("!!! Type args to be applied, but symbol says no parameters: " + ((specMember.defString, residualTargs))) + devWarning("Type args to be applied, but symbol says no parameters: " + ((specMember.defString, residualTargs))) localTyper.typed(sel) } else { diff --git a/src/compiler/scala/tools/nsc/transform/UnCurry.scala b/src/compiler/scala/tools/nsc/transform/UnCurry.scala index 84803d0b6b..becc7f65ff 100644 --- a/src/compiler/scala/tools/nsc/transform/UnCurry.scala +++ b/src/compiler/scala/tools/nsc/transform/UnCurry.scala @@ -297,7 +297,8 @@ abstract class UnCurry extends InfoTransform * If there's a default case, the original match is used for applyOrElse, and isDefinedAt returns `true` */ def synthPartialFunction(fun: Function) = { - if (!settings.XoldPatmat.value) debugwarn("Under the new pattern matching scheme, PartialFunction should have been synthesized during typers.") + if (!settings.XoldPatmat.value) + devWarning("Under the new pattern matching scheme, PartialFunction should have been synthesized during typers.") val targs = fun.tpe.typeArgs val (formals, restpe) = (targs.init, targs.last) @@ -704,7 +705,7 @@ abstract class UnCurry extends InfoTransform val finalizer = tree.finalizer if (!settings.XoldPatmat.value) { if (catches exists (cd => !treeInfo.isCatchCase(cd))) - debugwarn("VPM BUG! illegal try/catch " + catches) + devWarning("VPM BUG - illegal try/catch " + catches) tree } else if (catches forall treeInfo.isCatchCase) { tree diff --git a/src/compiler/scala/tools/nsc/typechecker/Checkable.scala b/src/compiler/scala/tools/nsc/typechecker/Checkable.scala index 166a9785fa..9efa3f36b0 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Checkable.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Checkable.scala @@ -130,7 +130,7 @@ trait Checkable { else if (P3) RuntimeCheckable else if (uncheckableType == NoType) { // Avoid warning (except ourselves) if we can't pinpoint the uncheckable type - debugwarn("Checkability checker says 'Uncheckable', but uncheckable type cannot be found:\n" + summaryString) + debuglog("Checkability checker says 'Uncheckable', but uncheckable type cannot be found:\n" + summaryString) CheckabilityError } else Uncheckable diff --git a/src/compiler/scala/tools/nsc/typechecker/Contexts.scala b/src/compiler/scala/tools/nsc/typechecker/Contexts.scala index 78380ad054..0a9baca58b 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Contexts.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Contexts.scala @@ -398,8 +398,10 @@ trait Contexts { self: Analyzer => unit.error(pos, if (checking) "\n**** ERROR DURING INTERNAL CHECKING ****\n" + msg else msg) @inline private def issueCommon(err: AbsTypeError)(pf: PartialFunction[AbsTypeError, Unit]) { - debugwarn("issue error: " + err.errMsg) - if (settings.Yissuedebug.value) (new Exception).printStackTrace() + if (settings.Yissuedebug.value) { + log("issue error: " + err.errMsg) + (new Exception).printStackTrace() + } if (pf isDefinedAt err) pf(err) else if (bufferErrors) { buffer += err } else throw new TypeError(err.errPos, err.errMsg) diff --git a/src/compiler/scala/tools/nsc/typechecker/Implicits.scala b/src/compiler/scala/tools/nsc/typechecker/Implicits.scala index 576a21fe31..710b7e9051 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Implicits.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Implicits.scala @@ -82,7 +82,7 @@ trait Implicits { val result = new ImplicitSearch(tree, pt, isView, implicitSearchContext, pos).bestImplicit if (saveAmbiguousDivergent && implicitSearchContext.hasErrors) { context.updateBuffer(implicitSearchContext.errBuffer.filter(err => err.kind == ErrorKinds.Ambiguous || err.kind == ErrorKinds.Divergent)) - debugwarn("update buffer: " + implicitSearchContext.errBuffer) + debuglog("update buffer: " + implicitSearchContext.errBuffer) } printInference("[infer implicit] inferred " + result) context.undetparams = context.undetparams filterNot result.subst.from.contains diff --git a/src/compiler/scala/tools/nsc/typechecker/Infer.scala b/src/compiler/scala/tools/nsc/typechecker/Infer.scala index 6e42481d60..ac367dfde6 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Infer.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Infer.scala @@ -1319,7 +1319,8 @@ trait Infer extends Checkable { new TreeTypeSubstituter(undetparams, targs).traverse(tree) notifyUndetparamsInferred(undetparams, targs) case _ => - debugwarn("failed inferConstructorInstance for "+ tree +" : "+ tree.tpe +" under "+ undetparams +" pt = "+ pt +(if(isFullyDefined(pt)) " (fully defined)" else " (not fully defined)")) + def full = if (isFullyDefined(pt)) "(fully defined)" else "(not fully defined)" + devWarning(s"failed inferConstructorInstance for $tree: ${tree.tpe} undet=$undetparams, pt=$pt $full") // if (settings.explaintypes.value) explainTypes(resTp.instantiateTypeParams(undetparams, tvars), pt) ConstrInstantiationError(tree, resTp, pt) } diff --git a/src/compiler/scala/tools/nsc/typechecker/Namers.scala b/src/compiler/scala/tools/nsc/typechecker/Namers.scala index 04fb69671e..ee1b1f9b37 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Namers.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Namers.scala @@ -336,7 +336,7 @@ trait Namers extends MethodSynthesis { private def enterClassSymbol(tree: ClassDef, clazz: ClassSymbol): Symbol = { if (clazz.sourceFile != null && clazz.sourceFile != contextFile) - debugwarn("!!! Source mismatch in " + clazz + ": " + clazz.sourceFile + " vs. " + contextFile) + devWarning(s"Source file mismatch in $clazz: ${clazz.sourceFile} vs. $contextFile") clazz.associatedFile = contextFile if (clazz.sourceFile != null) { diff --git a/src/compiler/scala/tools/nsc/typechecker/PatternMatching.scala b/src/compiler/scala/tools/nsc/typechecker/PatternMatching.scala index 7cb420d2dc..6c916649f0 100644 --- a/src/compiler/scala/tools/nsc/typechecker/PatternMatching.scala +++ b/src/compiler/scala/tools/nsc/typechecker/PatternMatching.scala @@ -271,7 +271,9 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL // we don't transform after uncurry // (that would require more sophistication when generating trees, // and the only place that emits Matches after typers is for exception handling anyway) - if(phase.id >= currentRun.uncurryPhase.id) debugwarn("running translateMatch at "+ phase +" on "+ selector +" match "+ cases) + if (phase.id >= currentRun.uncurryPhase.id) + devWarning(s"running translateMatch past uncurry (at $phase) on $selector match $cases") + patmatDebug("translating "+ cases.mkString("{", "\n", "}")) val start = if (Statistics.canEnable) Statistics.startTimer(patmatNanos) else null diff --git a/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala b/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala index 9eb06dbdbf..7a7c7c7d25 100644 --- a/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala +++ b/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala @@ -139,7 +139,8 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans } } - if (settings.lint.value) { + // This has become noisy with implicit classes. + if (settings.lint.value && settings.developer.value) { clazz.info.decls filter (x => x.isImplicit && x.typeParams.nonEmpty) foreach { sym => val alts = clazz.info.decl(sym.name).alternatives if (alts.size > 1) diff --git a/src/compiler/scala/tools/nsc/typechecker/Typers.scala b/src/compiler/scala/tools/nsc/typechecker/Typers.scala index d9084af7bc..fb5c5e6f84 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Typers.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Typers.scala @@ -1545,7 +1545,7 @@ trait Typers extends Modes with Adaptations with Tags { val preSuperVals = treeInfo.preSuperFields(templ.body) if (preSuperVals.isEmpty && preSuperStats.nonEmpty) - debugwarn("Wanted to zip empty presuper val list with " + preSuperStats) + devWarning("Wanted to zip empty presuper val list with " + preSuperStats) else map2(preSuperStats, preSuperVals)((ldef, gdef) => gdef.tpt.tpe = ldef.symbol.tpe) diff --git a/src/reflect/scala/reflect/internal/Scopes.scala b/src/reflect/scala/reflect/internal/Scopes.scala index 950e30dbc5..5b5097bcc2 100644 --- a/src/reflect/scala/reflect/internal/Scopes.scala +++ b/src/reflect/scala/reflect/internal/Scopes.scala @@ -243,7 +243,7 @@ trait Scopes extends api.Scopes { self: SymbolTable => // in package objects.) val alts = lookupAll(name).toList def alts_s = alts map (s => s.defString) mkString " " - log(s"!!! scope lookup of $name found multiple symbols: $alts_s") + devWarning(s"scope lookup of $name found multiple symbols: $alts_s") // FIXME - how is one supposed to create an overloaded symbol without // knowing the correct owner? Using the symbol owner is not correct; // say for instance this is List's scope and the symbols are its three diff --git a/src/reflect/scala/reflect/internal/SymbolTable.scala b/src/reflect/scala/reflect/internal/SymbolTable.scala index fb1bf9ed9d..a3f814000f 100644 --- a/src/reflect/scala/reflect/internal/SymbolTable.scala +++ b/src/reflect/scala/reflect/internal/SymbolTable.scala @@ -54,13 +54,16 @@ abstract class SymbolTable extends macros.Universe @deprecated("Give us a reason", "2.10.0") def abort(): Nothing = abort("unknown error") + @deprecated("Use devWarning if this is really a warning; otherwise use log", "2.11.0") + def debugwarn(msg: => String): Unit = devWarning(msg) + /** Override with final implementation for inlining. */ def debuglog(msg: => String): Unit = if (settings.debug.value) log(msg) - def debugwarn(msg: => String): Unit = if (settings.debug.value) Console.err.println(msg) + def devWarning(msg: => String): Unit = if (settings.debug.value) Console.err.println(msg) def throwableAsString(t: Throwable): String = "" + t /** Prints a stack trace if -Ydebug or equivalent was given, otherwise does nothing. */ - def debugStack(t: Throwable): Unit = debugwarn(throwableAsString(t)) + def debugStack(t: Throwable): Unit = devWarning(throwableAsString(t)) /** Overridden when we know more about what was happening during a failure. */ def supplementErrorMessage(msg: String): String = msg diff --git a/src/reflect/scala/reflect/internal/Symbols.scala b/src/reflect/scala/reflect/internal/Symbols.scala index 0ad0275fba..7ae98c85af 100644 --- a/src/reflect/scala/reflect/internal/Symbols.scala +++ b/src/reflect/scala/reflect/internal/Symbols.scala @@ -1272,13 +1272,13 @@ trait Symbols extends api.Symbols { self: SymbolTable => cnt += 1 // allow for two completions: // one: sourceCompleter to LazyType, two: LazyType to completed type - if (cnt == 3) abort("no progress in completing " + this + ":" + tp) + if (cnt == 3) abort(s"no progress in completing $this: $tp") } rawInfo } catch { case ex: CyclicReference => - debugwarn("... hit cycle trying to complete " + this.fullLocationString) + devWarning("... hit cycle trying to complete " + this.fullLocationString) throw ex } @@ -3157,7 +3157,7 @@ trait Symbols extends api.Symbols { self: SymbolTable => override def companionSymbol = fail(NoSymbol) locally { - debugwarn("creating stub symbol for " + stubWarning) + devWarning("creating stub symbol for " + stubWarning) } } class StubClassSymbol(owner0: Symbol, name0: TypeName) extends ClassSymbol(owner0, owner0.pos, name0) with StubSymbol diff --git a/src/reflect/scala/reflect/internal/TreeInfo.scala b/src/reflect/scala/reflect/internal/TreeInfo.scala index 7ae7cf1821..18f685549d 100644 --- a/src/reflect/scala/reflect/internal/TreeInfo.scala +++ b/src/reflect/scala/reflect/internal/TreeInfo.scala @@ -147,11 +147,10 @@ abstract class TreeInfo { val plen = params.length val alen = args.length def fail() = { - global.debugwarn( - "Mismatch trying to zip method parameters and argument list:\n" + - " params = " + params + "\n" + - " args = " + args + "\n" - ) + global.devWarning( + s"""|Mismatch trying to zip method parameters and argument list: + | params = $params + | args = $args""".stripMargin) false } diff --git a/src/reflect/scala/reflect/internal/Types.scala b/src/reflect/scala/reflect/internal/Types.scala index 42a9d9e456..bdf23a2b41 100644 --- a/src/reflect/scala/reflect/internal/Types.scala +++ b/src/reflect/scala/reflect/internal/Types.scala @@ -1969,7 +1969,7 @@ trait Types extends api.Types { self: SymbolTable => case tr @ TypeRef(_, sym, args) if args.nonEmpty => val tparams = tr.initializedTypeParams if (settings.debug.value && !sameLength(tparams, args)) - debugwarn("Mismatched zip in computeRefs(): " + sym.info.typeParams + ", " + args) + devWarning(s"Mismatched zip in computeRefs(): ${sym.info.typeParams}, $args") foreach2(tparams, args) { (tparam1, arg) => if (arg contains tparam) { @@ -2102,7 +2102,7 @@ trait Types extends api.Types { self: SymbolTable => // it later turns out not to have kind *. See SI-4070. Only // logging it for now. if (sym.typeParams.size != args.size) - log("!!! %s.transform(%s), but tparams.isEmpty and args=".format(this, tp, args)) + devWarning(s"$this.transform($tp), but tparams.isEmpty and args=$args") asSeenFromOwner(tp).instantiateTypeParams(sym.typeParams, args) } @@ -3691,7 +3691,7 @@ trait Types extends api.Types { self: SymbolTable => tycon match { case TypeRef(pre, sym @ (NothingClass|AnyClass), _) => copyTypeRef(tycon, pre, sym, Nil) //@M drop type args to Any/Nothing case TypeRef(pre, sym, Nil) => copyTypeRef(tycon, pre, sym, args) - case TypeRef(pre, sym, bogons) => debugwarn(s"Dropping $bogons from $tycon in appliedType.") ; copyTypeRef(tycon, pre, sym, args) + case TypeRef(pre, sym, bogons) => devWarning(s"Dropping $bogons from $tycon in appliedType.") ; copyTypeRef(tycon, pre, sym, args) case PolyType(tparams, restpe) => restpe.instantiateTypeParams(tparams, args) case ExistentialType(tparams, restpe) => newExistentialType(tparams, appliedType(restpe, args)) case st: SingletonType => appliedType(st.widen, args) // @M TODO: what to do? see bug1 @@ -5036,7 +5036,7 @@ trait Types extends api.Types { self: SymbolTable => else { var rebind0 = pre.findMember(sym.name, BRIDGE, 0, true) orElse { if (sym.isAliasType) throw missingAliasException - debugwarn(pre+"."+sym+" does no longer exist, phase = "+phase) + devWarning(s"$pre.$sym no longer exist at phase $phase") throw new MissingTypeControl // For build manager and presentation compiler purposes } /** The two symbols have the same fully qualified name */ @@ -5094,7 +5094,7 @@ trait Types extends api.Types { self: SymbolTable => if ((pre1 eq pre) && (sym1 eq sym) && (args1 eq args)/* && sym.isExternal*/) { tp } else if (sym1 == NoSymbol) { - debugwarn("adapt fail: "+pre+" "+pre1+" "+sym) + devWarning(s"adapt to new run failed: pre=$pre pre1=$pre1 sym=$sym") tp } else { copyTypeRef(tp, pre1, sym1, args1) @@ -7283,7 +7283,7 @@ trait Types extends api.Types { self: SymbolTable => protected def typeToString(tpe: Type): String = if (tostringRecursions >= maxTostringRecursions) { - debugwarn("Exceeded recursion depth attempting to print type.") + devWarning("Exceeded recursion depth attempting to print " + util.shortClassOfInstance(tpe)) if (settings.debug.value) (new Throwable).printStackTrace diff --git a/src/reflect/scala/reflect/internal/transform/Erasure.scala b/src/reflect/scala/reflect/internal/transform/Erasure.scala index a84f01031a..59bf51d638 100644 --- a/src/reflect/scala/reflect/internal/transform/Erasure.scala +++ b/src/reflect/scala/reflect/internal/transform/Erasure.scala @@ -233,7 +233,7 @@ trait Erasure { // It seems there is a deeper problem here, which needs // following up to. But we will not risk regressions // in 2.10 because of it. - log(s"!!! unexpected constructor erasure $tp for $clazz") + devWarning(s"unexpected constructor erasure $tp for $clazz") specialScalaErasure(tp) } } diff --git a/test/files/neg/overloaded-implicit.flags b/test/files/neg/overloaded-implicit.flags index 7949c2afa2..9c1e74e4ef 100644 --- a/test/files/neg/overloaded-implicit.flags +++ b/test/files/neg/overloaded-implicit.flags @@ -1 +1 @@ --Xlint -Xfatal-warnings +-Xlint -Xfatal-warnings -Xdev -- cgit v1.2.3 From 8fb4e9ecc76efcfc71d485ad78c9f474d1a89175 Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Sat, 10 Nov 2012 11:03:30 -0700 Subject: Removed unused imports in swing. --- src/swing/scala/swing/Button.scala | 3 --- src/swing/scala/swing/ButtonGroup.scala | 4 +--- src/swing/scala/swing/EditorPane.scala | 3 --- src/swing/scala/swing/FormattedTextField.scala | 4 ---- src/swing/scala/swing/MainFrame.scala | 4 ---- src/swing/scala/swing/PasswordField.scala | 4 ---- src/swing/scala/swing/ProgressBar.scala | 4 ---- src/swing/scala/swing/Reactions.scala | 2 -- src/swing/scala/swing/SplitPane.scala | 3 --- src/swing/scala/swing/SwingActor.scala | 4 ---- src/swing/scala/swing/TextArea.scala | 4 ---- src/swing/scala/swing/TextComponent.scala | 3 --- src/swing/scala/swing/ToggleButton.scala | 3 --- src/swing/scala/swing/Window.scala | 3 --- 14 files changed, 1 insertion(+), 47 deletions(-) (limited to 'src') diff --git a/src/swing/scala/swing/Button.scala b/src/swing/scala/swing/Button.scala index f10d49d804..0170727e3b 100644 --- a/src/swing/scala/swing/Button.scala +++ b/src/swing/scala/swing/Button.scala @@ -6,11 +6,8 @@ ** |/ ** \* */ - - package scala.swing -import event._ import javax.swing._ object Button { diff --git a/src/swing/scala/swing/ButtonGroup.scala b/src/swing/scala/swing/ButtonGroup.scala index 2075df7c92..0b04d20837 100644 --- a/src/swing/scala/swing/ButtonGroup.scala +++ b/src/swing/scala/swing/ButtonGroup.scala @@ -8,9 +8,7 @@ package scala.swing -import event._ -import javax.swing.{AbstractButton => JAbstractButton,Icon} -import scala.collection.{ mutable, immutable } +import scala.collection.mutable /** * A button mutex. At most one of its associated buttons is selected diff --git a/src/swing/scala/swing/EditorPane.scala b/src/swing/scala/swing/EditorPane.scala index b8c506daf0..9b1aab7874 100644 --- a/src/swing/scala/swing/EditorPane.scala +++ b/src/swing/scala/swing/EditorPane.scala @@ -6,13 +6,10 @@ ** |/ ** \* */ - package scala.swing -import event._ import javax.swing._ import javax.swing.text._ -import java.awt.event._ /** * A text component that allows multiline text input and display. diff --git a/src/swing/scala/swing/FormattedTextField.scala b/src/swing/scala/swing/FormattedTextField.scala index 311ff42d0a..b08075850c 100644 --- a/src/swing/scala/swing/FormattedTextField.scala +++ b/src/swing/scala/swing/FormattedTextField.scala @@ -6,13 +6,9 @@ ** |/ ** \* */ - - package scala.swing -import event._ import javax.swing._ -import java.awt.event._ object FormattedTextField { /** diff --git a/src/swing/scala/swing/MainFrame.scala b/src/swing/scala/swing/MainFrame.scala index 85ce0755ac..1dfc155f9c 100644 --- a/src/swing/scala/swing/MainFrame.scala +++ b/src/swing/scala/swing/MainFrame.scala @@ -6,12 +6,8 @@ ** |/ ** \* */ - - package scala.swing -import event._ - /** * A frame that can be used for main application windows. Shuts down the * framework and quits the application when closed. diff --git a/src/swing/scala/swing/PasswordField.scala b/src/swing/scala/swing/PasswordField.scala index d2fdd0d38a..fd0b586a0f 100644 --- a/src/swing/scala/swing/PasswordField.scala +++ b/src/swing/scala/swing/PasswordField.scala @@ -6,13 +6,9 @@ ** |/ ** \* */ - - package scala.swing -import event._ import javax.swing._ -import java.awt.event._ /** * A password field, that displays a replacement character for each character in the password. diff --git a/src/swing/scala/swing/ProgressBar.scala b/src/swing/scala/swing/ProgressBar.scala index 33dd716524..81e2989c3e 100644 --- a/src/swing/scala/swing/ProgressBar.scala +++ b/src/swing/scala/swing/ProgressBar.scala @@ -6,12 +6,8 @@ ** |/ ** \* */ - - package scala.swing -import event._ - /** * A bar indicating progress of some action. Can be in indeterminate mode, * in which it indicates that the action is in progress (usually by some diff --git a/src/swing/scala/swing/Reactions.scala b/src/swing/scala/swing/Reactions.scala index d8a62aa99d..c32212cf3a 100644 --- a/src/swing/scala/swing/Reactions.scala +++ b/src/swing/scala/swing/Reactions.scala @@ -14,8 +14,6 @@ import event.Event import scala.collection.mutable.{Buffer, ListBuffer} object Reactions { - import scala.ref._ - class Impl extends Reactions { private val parts: Buffer[Reaction] = new ListBuffer[Reaction] def isDefinedAt(e: Event) = parts.exists(_ isDefinedAt e) diff --git a/src/swing/scala/swing/SplitPane.scala b/src/swing/scala/swing/SplitPane.scala index dd4f2908d5..f61dfedbf4 100644 --- a/src/swing/scala/swing/SplitPane.scala +++ b/src/swing/scala/swing/SplitPane.scala @@ -6,11 +6,8 @@ ** |/ ** \* */ - - package scala.swing -import event._ import Swing._ /** diff --git a/src/swing/scala/swing/SwingActor.scala b/src/swing/scala/swing/SwingActor.scala index 6692180aac..c665fa4c00 100644 --- a/src/swing/scala/swing/SwingActor.scala +++ b/src/swing/scala/swing/SwingActor.scala @@ -6,12 +6,8 @@ ** |/ ** \* */ - - package scala.swing -import scala.actors._ - // Dummy to keep ant from recompiling on every run. trait SwingActor { } diff --git a/src/swing/scala/swing/TextArea.scala b/src/swing/scala/swing/TextArea.scala index 01bf115d28..2f6bdca119 100644 --- a/src/swing/scala/swing/TextArea.scala +++ b/src/swing/scala/swing/TextArea.scala @@ -6,13 +6,9 @@ ** |/ ** \* */ - - package scala.swing -import event._ import javax.swing._ -import java.awt.event._ /** * A text component that allows multiline text input and display. diff --git a/src/swing/scala/swing/TextComponent.scala b/src/swing/scala/swing/TextComponent.scala index 48c03a5f54..4d23399737 100644 --- a/src/swing/scala/swing/TextComponent.scala +++ b/src/swing/scala/swing/TextComponent.scala @@ -6,12 +6,9 @@ ** |/ ** \* */ - - package scala.swing import event._ -import javax.swing._ import javax.swing.text._ import javax.swing.event._ diff --git a/src/swing/scala/swing/ToggleButton.scala b/src/swing/scala/swing/ToggleButton.scala index 3d3d0b957f..8f210d00d8 100644 --- a/src/swing/scala/swing/ToggleButton.scala +++ b/src/swing/scala/swing/ToggleButton.scala @@ -6,11 +6,8 @@ ** |/ ** \* */ - - package scala.swing -import event._ import javax.swing._ /** diff --git a/src/swing/scala/swing/Window.scala b/src/swing/scala/swing/Window.scala index 5bdb50e959..a9f4ae7538 100644 --- a/src/swing/scala/swing/Window.scala +++ b/src/swing/scala/swing/Window.scala @@ -6,13 +6,10 @@ ** |/ ** \* */ - - package scala.swing import java.awt.{Window => AWTWindow} import event._ -import javax.swing._ /** * A window with decoration such as a title, border, and action buttons. -- cgit v1.2.3 From 66d3540769628fb08c3946c98fee2b5a3fe176ce Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Sat, 10 Nov 2012 11:07:24 -0700 Subject: Remove unused imports in partest. --- src/partest/scala/tools/partest/ScaladocModelTest.scala | 4 +--- src/partest/scala/tools/partest/nest/ConsoleFileManager.scala | 2 -- src/partest/scala/tools/partest/nest/ConsoleRunner.scala | 1 - src/partest/scala/tools/partest/nest/DirectRunner.scala | 1 - src/partest/scala/tools/partest/nest/FileManager.scala | 1 - src/partest/scala/tools/partest/nest/PathSettings.scala | 1 - 6 files changed, 1 insertion(+), 9 deletions(-) (limited to 'src') diff --git a/src/partest/scala/tools/partest/ScaladocModelTest.scala b/src/partest/scala/tools/partest/ScaladocModelTest.scala index e7134d0271..acaddff944 100644 --- a/src/partest/scala/tools/partest/ScaladocModelTest.scala +++ b/src/partest/scala/tools/partest/ScaladocModelTest.scala @@ -5,8 +5,6 @@ package scala.tools.partest -import scala.tools.partest._ -import java.io._ import scala.tools.nsc._ import scala.tools.nsc.util.CommandLineParser import scala.tools.nsc.doc.{Settings, DocFactory, Universe} @@ -87,7 +85,7 @@ abstract class ScaladocModelTest extends DirectTest { settings = new Settings(_ => ()) settings.scaladocQuietRun = true // yaay, no more "model contains X documentable templates"! val args = extraSettings + " " + scaladocSettings - val command = new ScalaDoc.Command((CommandLineParser tokenize (args)), settings) + new ScalaDoc.Command((CommandLineParser tokenize (args)), settings) // side-effecting, I think val docFact = new DocFactory(new ConsoleReporter(settings), settings) docFact } diff --git a/src/partest/scala/tools/partest/nest/ConsoleFileManager.scala b/src/partest/scala/tools/partest/nest/ConsoleFileManager.scala index 891830b509..75aed449a8 100644 --- a/src/partest/scala/tools/partest/nest/ConsoleFileManager.scala +++ b/src/partest/scala/tools/partest/nest/ConsoleFileManager.scala @@ -18,8 +18,6 @@ import io.{ Path, Directory } import File.pathSeparator import ClassPath.{ join } import PathResolver.{ Environment, Defaults } -import RunnerUtils._ - class ConsoleFileManager extends FileManager { var testBuild: Option[String] = PartestDefaults.testBuild diff --git a/src/partest/scala/tools/partest/nest/ConsoleRunner.scala b/src/partest/scala/tools/partest/nest/ConsoleRunner.scala index 308124e250..d23ee81e4d 100644 --- a/src/partest/scala/tools/partest/nest/ConsoleRunner.scala +++ b/src/partest/scala/tools/partest/nest/ConsoleRunner.scala @@ -11,7 +11,6 @@ package nest import java.io.{File, PrintStream, FileOutputStream, BufferedReader, InputStreamReader, StringWriter, PrintWriter} import utils.Properties._ -import RunnerUtils._ import scala.tools.nsc.Properties.{ versionMsg, setProp } import scala.tools.nsc.util.CommandLineParser import scala.tools.nsc.io diff --git a/src/partest/scala/tools/partest/nest/DirectRunner.scala b/src/partest/scala/tools/partest/nest/DirectRunner.scala index 32ef8b41ea..3aaf784cad 100644 --- a/src/partest/scala/tools/partest/nest/DirectRunner.scala +++ b/src/partest/scala/tools/partest/nest/DirectRunner.scala @@ -14,7 +14,6 @@ import scala.tools.nsc.util.ScalaClassLoader import scala.tools.nsc.io.Path import scala.collection.{ mutable, immutable } import java.util.concurrent._ -import scala.collection.convert.decorateAll._ case class TestRunParams(val scalaCheckParentClassLoader: ScalaClassLoader) diff --git a/src/partest/scala/tools/partest/nest/FileManager.scala b/src/partest/scala/tools/partest/nest/FileManager.scala index 2823967ecf..21fd314552 100644 --- a/src/partest/scala/tools/partest/nest/FileManager.scala +++ b/src/partest/scala/tools/partest/nest/FileManager.scala @@ -13,7 +13,6 @@ import java.io.{File, FilenameFilter, IOException, StringWriter, FileReader, PrintWriter, FileWriter} import java.net.URI import scala.tools.nsc.io.{ Path, Directory, File => SFile } -import scala.sys.process._ import scala.collection.mutable trait FileUtil { diff --git a/src/partest/scala/tools/partest/nest/PathSettings.scala b/src/partest/scala/tools/partest/nest/PathSettings.scala index a42c2219b1..02651c527b 100644 --- a/src/partest/scala/tools/partest/nest/PathSettings.scala +++ b/src/partest/scala/tools/partest/nest/PathSettings.scala @@ -9,7 +9,6 @@ import scala.tools.nsc.Properties.{ setProp, propOrEmpty, propOrNone, propOrElse import scala.tools.nsc.util.ClassPath import scala.tools.nsc.io import io.{ Path, File, Directory } -import RunnerUtils._ object PathSettings { import PartestDefaults.{ testRootDir, srcDirName } -- cgit v1.2.3 From c4395b373c259be98ea06b156499569d985c97a4 Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Sat, 10 Nov 2012 11:10:22 -0700 Subject: Remove unused imports in continuations. --- src/continuations/plugin/scala/tools/selectivecps/CPSUtils.scala | 1 - .../plugin/scala/tools/selectivecps/SelectiveANFTransform.scala | 3 --- .../plugin/scala/tools/selectivecps/SelectiveCPSPlugin.scala | 5 ----- .../plugin/scala/tools/selectivecps/SelectiveCPSTransform.scala | 5 ----- 4 files changed, 14 deletions(-) (limited to 'src') diff --git a/src/continuations/plugin/scala/tools/selectivecps/CPSUtils.scala b/src/continuations/plugin/scala/tools/selectivecps/CPSUtils.scala index eab442aaef..c591030bce 100644 --- a/src/continuations/plugin/scala/tools/selectivecps/CPSUtils.scala +++ b/src/continuations/plugin/scala/tools/selectivecps/CPSUtils.scala @@ -7,7 +7,6 @@ import scala.tools.nsc.Global trait CPSUtils { val global: Global import global._ - import definitions._ var cpsEnabled = false val verbose: Boolean = System.getProperty("cpsVerbose", "false") == "true" diff --git a/src/continuations/plugin/scala/tools/selectivecps/SelectiveANFTransform.scala b/src/continuations/plugin/scala/tools/selectivecps/SelectiveANFTransform.scala index ef13f8b1d8..f62eebaaa0 100644 --- a/src/continuations/plugin/scala/tools/selectivecps/SelectiveANFTransform.scala +++ b/src/continuations/plugin/scala/tools/selectivecps/SelectiveANFTransform.scala @@ -2,13 +2,10 @@ package scala.tools.selectivecps -import scala.tools.nsc._ import scala.tools.nsc.transform._ import scala.tools.nsc.symtab._ import scala.tools.nsc.plugins._ -import scala.tools.nsc.ast._ - /** * In methods marked @cps, explicitly name results of calls to other @cps methods */ diff --git a/src/continuations/plugin/scala/tools/selectivecps/SelectiveCPSPlugin.scala b/src/continuations/plugin/scala/tools/selectivecps/SelectiveCPSPlugin.scala index 8a500d6c4d..90e64d8171 100644 --- a/src/continuations/plugin/scala/tools/selectivecps/SelectiveCPSPlugin.scala +++ b/src/continuations/plugin/scala/tools/selectivecps/SelectiveCPSPlugin.scala @@ -3,15 +3,11 @@ package scala.tools.selectivecps import scala.tools.nsc -import scala.tools.nsc.typechecker._ import nsc.Global -import nsc.Phase import nsc.plugins.Plugin import nsc.plugins.PluginComponent class SelectiveCPSPlugin(val global: Global) extends Plugin { - import global._ - val name = "continuations" val description = "applies selective cps conversion" @@ -26,7 +22,6 @@ class SelectiveCPSPlugin(val global: Global) extends Plugin { override val runsBefore = List("uncurry") } - val components = List[PluginComponent](anfPhase, cpsPhase) val checker = new CPSAnnotationChecker { diff --git a/src/continuations/plugin/scala/tools/selectivecps/SelectiveCPSTransform.scala b/src/continuations/plugin/scala/tools/selectivecps/SelectiveCPSTransform.scala index f4b0fb0419..f16cfb10f8 100644 --- a/src/continuations/plugin/scala/tools/selectivecps/SelectiveCPSTransform.scala +++ b/src/continuations/plugin/scala/tools/selectivecps/SelectiveCPSTransform.scala @@ -2,13 +2,8 @@ package scala.tools.selectivecps -import scala.collection._ - -import scala.tools.nsc._ import scala.tools.nsc.transform._ import scala.tools.nsc.plugins._ - -import scala.tools.nsc.ast.TreeBrowsers import scala.tools.nsc.ast._ /** -- cgit v1.2.3 From dbd7d718ae4a18a6b78b8d52fb554e15830eb30c Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Sun, 11 Nov 2012 12:50:31 -0800 Subject: Remove unused imports in library. --- src/library/scala/collection/BitSetLike.scala | 1 - src/library/scala/collection/DefaultMap.scala | 6 +----- src/library/scala/collection/GenIterableLike.scala | 2 +- src/library/scala/collection/GenIterableView.scala | 7 ------- src/library/scala/collection/GenIterableViewLike.scala | 7 ------- src/library/scala/collection/GenSeqView.scala | 7 ------- src/library/scala/collection/GenTraversableView.scala | 7 ------- src/library/scala/collection/GenTraversableViewLike.scala | 2 -- src/library/scala/collection/IndexedSeqLike.scala | 1 - src/library/scala/collection/Iterable.scala | 1 - src/library/scala/collection/IterableProxy.scala | 2 -- src/library/scala/collection/IterableViewLike.scala | 1 - src/library/scala/collection/JavaConversions.scala | 1 - src/library/scala/collection/JavaConverters.scala | 2 -- src/library/scala/collection/LinearSeqLike.scala | 4 ---- src/library/scala/collection/LinearSeqOptimized.scala | 6 ++---- src/library/scala/collection/MapProxyLike.scala | 2 -- src/library/scala/collection/SeqViewLike.scala | 1 - src/library/scala/collection/SetProxyLike.scala | 3 --- src/library/scala/collection/Traversable.scala | 4 +--- src/library/scala/collection/TraversableView.scala | 1 - src/library/scala/collection/TraversableViewLike.scala | 1 - src/library/scala/collection/convert/Decorators.scala | 2 +- src/library/scala/collection/generic/IterableForwarder.scala | 5 +---- src/library/scala/collection/immutable/DefaultMap.scala | 4 ---- src/library/scala/collection/immutable/List.scala | 3 --- src/library/scala/collection/immutable/LongMap.scala | 2 -- src/library/scala/collection/immutable/NumericRange.scala | 3 --- src/library/scala/collection/immutable/StringLike.scala | 1 - src/library/scala/collection/immutable/StringOps.scala | 2 -- src/library/scala/collection/mutable/ArrayBuilder.scala | 3 --- src/library/scala/collection/mutable/ArrayLike.scala | 3 --- src/library/scala/collection/mutable/BufferProxy.scala | 3 --- src/library/scala/collection/mutable/IndexedSeqLike.scala | 3 --- src/library/scala/collection/mutable/IndexedSeqOptimized.scala | 3 --- src/library/scala/collection/mutable/LinkedListLike.scala | 3 --- src/library/scala/collection/mutable/MapLike.scala | 4 +--- src/library/scala/collection/mutable/SeqLike.scala | 1 - src/library/scala/collection/mutable/SetBuilder.scala | 3 --- src/library/scala/collection/mutable/SetLike.scala | 2 +- src/library/scala/collection/mutable/SynchronizedQueue.scala | 2 -- src/library/scala/collection/mutable/SynchronizedSet.scala | 2 -- src/library/scala/collection/mutable/WrappedArrayBuilder.scala | 1 - src/library/scala/collection/parallel/ParIterable.scala | 1 - src/library/scala/collection/parallel/ParSeq.scala | 3 --- src/library/scala/collection/parallel/ParSeqView.scala | 3 +-- src/library/scala/collection/parallel/ParSet.scala | 6 ------ src/library/scala/collection/parallel/ParSetLike.scala | 8 -------- src/library/scala/collection/parallel/immutable/ParIterable.scala | 2 -- src/library/scala/collection/parallel/immutable/ParSeq.scala | 3 --- src/library/scala/collection/parallel/immutable/ParSet.scala | 1 - src/library/scala/collection/parallel/mutable/ParIterable.scala | 2 -- src/library/scala/collection/parallel/mutable/ParMapLike.scala | 3 --- src/library/scala/collection/parallel/mutable/ParSeq.scala | 6 ------ src/library/scala/collection/parallel/mutable/ParSet.scala | 5 ----- src/library/scala/collection/parallel/mutable/ParSetLike.scala | 6 ------ src/library/scala/concurrent/impl/Future.scala | 2 +- src/library/scala/math/ScalaNumericConversions.scala | 2 -- src/library/scala/util/automata/WordBerrySethi.scala | 2 +- src/library/scala/util/parsing/combinator/PackratParsers.scala | 1 - src/library/scala/util/parsing/combinator/lexical/Scanners.scala | 3 --- src/library/scala/util/parsing/combinator/testing/Tester.scala | 1 - src/library/scala/util/parsing/json/JSON.scala | 3 --- src/library/scala/util/parsing/json/Lexer.scala | 1 - src/library/scala/util/parsing/json/Parser.scala | 1 - src/library/scala/xml/XML.scala | 2 -- src/library/scala/xml/factory/XMLLoader.scala | 2 +- src/library/scala/xml/include/sax/EncodingHeuristics.scala | 2 -- src/library/scala/xml/include/sax/XIncluder.scala | 2 -- src/library/scala/xml/parsing/MarkupParserCommon.scala | 1 - 70 files changed, 13 insertions(+), 185 deletions(-) (limited to 'src') diff --git a/src/library/scala/collection/BitSetLike.scala b/src/library/scala/collection/BitSetLike.scala index 4a1c0beaa6..d0f4e323c7 100644 --- a/src/library/scala/collection/BitSetLike.scala +++ b/src/library/scala/collection/BitSetLike.scala @@ -11,7 +11,6 @@ package scala.collection import BitSetLike._ -import generic._ import mutable.StringBuilder /** A template trait for bitsets. diff --git a/src/library/scala/collection/DefaultMap.scala b/src/library/scala/collection/DefaultMap.scala index 5c91183891..cbd7e3f8b9 100644 --- a/src/library/scala/collection/DefaultMap.scala +++ b/src/library/scala/collection/DefaultMap.scala @@ -6,12 +6,8 @@ ** |/ ** \* */ - - package scala.collection -import generic._ - /** A default map which implements the `+` and `-` methods of maps. * * Instances that inherit from `DefaultMap[A, B]` still have to define: @@ -27,7 +23,7 @@ import generic._ * @since 2.8 */ trait DefaultMap[A, +B] extends Map[A, B] { self => - + /** A default implementation which creates a new immutable map. */ override def +[B1 >: B](kv: (A, B1)): Map[A, B1] = { diff --git a/src/library/scala/collection/GenIterableLike.scala b/src/library/scala/collection/GenIterableLike.scala index 2ba9a7283d..ceb97707e1 100644 --- a/src/library/scala/collection/GenIterableLike.scala +++ b/src/library/scala/collection/GenIterableLike.scala @@ -8,7 +8,7 @@ package scala.collection -import generic.{ CanBuildFrom => CBF, _ } +import generic.{ CanBuildFrom => CBF } /** A template trait for all iterable collections which may possibly * have their operations implemented in parallel. diff --git a/src/library/scala/collection/GenIterableView.scala b/src/library/scala/collection/GenIterableView.scala index ca0332e9ad..5ab48efdf3 100644 --- a/src/library/scala/collection/GenIterableView.scala +++ b/src/library/scala/collection/GenIterableView.scala @@ -8,11 +8,4 @@ package scala.collection - -import generic._ - - - trait GenIterableView[+A, +Coll] extends GenIterableViewLike[A, Coll, GenIterableView[A, Coll]] { } - - diff --git a/src/library/scala/collection/GenIterableViewLike.scala b/src/library/scala/collection/GenIterableViewLike.scala index 4e4ceb4cea..e8d264cdd4 100644 --- a/src/library/scala/collection/GenIterableViewLike.scala +++ b/src/library/scala/collection/GenIterableViewLike.scala @@ -8,13 +8,6 @@ package scala.collection - - -import generic._ -import TraversableView.NoBuilder - - - trait GenIterableViewLike[+A, +Coll, +This <: GenIterableView[A, Coll] with GenIterableViewLike[A, Coll, This]] diff --git a/src/library/scala/collection/GenSeqView.scala b/src/library/scala/collection/GenSeqView.scala index 92c8b779e9..423f8e305e 100644 --- a/src/library/scala/collection/GenSeqView.scala +++ b/src/library/scala/collection/GenSeqView.scala @@ -8,11 +8,4 @@ package scala.collection - -import generic._ - - - trait GenSeqView[+A, +Coll] extends GenSeqViewLike[A, Coll, GenSeqView[A, Coll]] { } - - diff --git a/src/library/scala/collection/GenTraversableView.scala b/src/library/scala/collection/GenTraversableView.scala index cceb06882e..1d98eff8c1 100644 --- a/src/library/scala/collection/GenTraversableView.scala +++ b/src/library/scala/collection/GenTraversableView.scala @@ -8,11 +8,4 @@ package scala.collection - -import generic._ - - - trait GenTraversableView[+A, +Coll] extends GenTraversableViewLike[A, Coll, GenTraversableView[A, Coll]] { } - - diff --git a/src/library/scala/collection/GenTraversableViewLike.scala b/src/library/scala/collection/GenTraversableViewLike.scala index 77fe0802bf..8c9607663b 100644 --- a/src/library/scala/collection/GenTraversableViewLike.scala +++ b/src/library/scala/collection/GenTraversableViewLike.scala @@ -11,8 +11,6 @@ package scala.collection import generic._ import mutable.{ Builder, ArrayBuffer } -import TraversableView.NoBuilder - trait GenTraversableViewLike[+A, +Coll, diff --git a/src/library/scala/collection/IndexedSeqLike.scala b/src/library/scala/collection/IndexedSeqLike.scala index 7d87a8a630..1d8e2b1583 100644 --- a/src/library/scala/collection/IndexedSeqLike.scala +++ b/src/library/scala/collection/IndexedSeqLike.scala @@ -8,7 +8,6 @@ package scala.collection -import generic._ import mutable.ArrayBuffer import scala.annotation.tailrec diff --git a/src/library/scala/collection/Iterable.scala b/src/library/scala/collection/Iterable.scala index 5b73d720a8..09c9ce122c 100644 --- a/src/library/scala/collection/Iterable.scala +++ b/src/library/scala/collection/Iterable.scala @@ -11,7 +11,6 @@ package scala.collection import generic._ -import scala.util.control.Breaks._ import mutable.Builder /** A base trait for iterable collections. diff --git a/src/library/scala/collection/IterableProxy.scala b/src/library/scala/collection/IterableProxy.scala index 2d041928cc..ddb2502965 100644 --- a/src/library/scala/collection/IterableProxy.scala +++ b/src/library/scala/collection/IterableProxy.scala @@ -8,8 +8,6 @@ package scala.collection -import generic._ - /** This trait implements a proxy for iterable objects. It forwards all calls * to a different iterable object. * diff --git a/src/library/scala/collection/IterableViewLike.scala b/src/library/scala/collection/IterableViewLike.scala index 3a81a3422f..b195ae4bc7 100644 --- a/src/library/scala/collection/IterableViewLike.scala +++ b/src/library/scala/collection/IterableViewLike.scala @@ -9,7 +9,6 @@ package scala.collection import generic._ -import TraversableView.NoBuilder import immutable.Stream import scala.language.implicitConversions diff --git a/src/library/scala/collection/JavaConversions.scala b/src/library/scala/collection/JavaConversions.scala index ce4ba870d1..7ff29650fa 100644 --- a/src/library/scala/collection/JavaConversions.scala +++ b/src/library/scala/collection/JavaConversions.scala @@ -8,7 +8,6 @@ package scala.collection -import java.{ lang => jl, util => ju }, java.util.{ concurrent => juc } import convert._ /** A collection of implicit conversions supporting interoperability between diff --git a/src/library/scala/collection/JavaConverters.scala b/src/library/scala/collection/JavaConverters.scala index f00c8880d2..439991708e 100755 --- a/src/library/scala/collection/JavaConverters.scala +++ b/src/library/scala/collection/JavaConverters.scala @@ -8,14 +8,12 @@ package scala.collection -import java.{ lang => jl, util => ju }, java.util.{ concurrent => juc } import convert._ // TODO: I cleaned all this documentation up in JavaConversions, but the // documentation in here is basically the pre-cleaned-up version with minor // additions. Would be nice to have in one place. - /** A collection of decorators that allow converting between * Scala and Java collections using `asScala` and `asJava` methods. * diff --git a/src/library/scala/collection/LinearSeqLike.scala b/src/library/scala/collection/LinearSeqLike.scala index 78108a9c0f..2a824bcff3 100644 --- a/src/library/scala/collection/LinearSeqLike.scala +++ b/src/library/scala/collection/LinearSeqLike.scala @@ -6,13 +6,9 @@ ** |/ ** \* */ - package scala.collection -import generic._ -import mutable.ListBuffer import immutable.List -import scala.util.control.Breaks._ import scala.annotation.tailrec /** A template trait for linear sequences of type `LinearSeq[A]`. diff --git a/src/library/scala/collection/LinearSeqOptimized.scala b/src/library/scala/collection/LinearSeqOptimized.scala index 280326d46f..f71fd227cd 100755 --- a/src/library/scala/collection/LinearSeqOptimized.scala +++ b/src/library/scala/collection/LinearSeqOptimized.scala @@ -8,10 +8,8 @@ package scala.collection -import generic._ import mutable.ListBuffer import immutable.List -import scala.util.control.Breaks._ /** A template trait for linear sequences of type `LinearSeq[A]` which optimizes * the implementation of several methods under the assumption of fast linear access. @@ -91,7 +89,7 @@ trait LinearSeqOptimized[+A, +Repr <: LinearSeqOptimized[A, Repr]] extends Linea } false } - + override /*IterableLike*/ def find(p: A => Boolean): Option[A] = { var these = this @@ -112,7 +110,7 @@ trait LinearSeqOptimized[+A, +Repr <: LinearSeqOptimized[A, Repr]] extends Linea } acc } - + override /*IterableLike*/ def foldRight[B](z: B)(f: (A, B) => B): B = if (this.isEmpty) z diff --git a/src/library/scala/collection/MapProxyLike.scala b/src/library/scala/collection/MapProxyLike.scala index 44b39f65da..ad09f7b970 100644 --- a/src/library/scala/collection/MapProxyLike.scala +++ b/src/library/scala/collection/MapProxyLike.scala @@ -8,8 +8,6 @@ package scala.collection -import generic._ - // Methods could be printed by cat MapLike.scala | egrep '^ (override )?def' /** This trait implements a proxy for Map objects. It forwards diff --git a/src/library/scala/collection/SeqViewLike.scala b/src/library/scala/collection/SeqViewLike.scala index 5f2bf902b1..27536791a2 100644 --- a/src/library/scala/collection/SeqViewLike.scala +++ b/src/library/scala/collection/SeqViewLike.scala @@ -10,7 +10,6 @@ package scala.collection import generic._ import Seq.fill -import TraversableView.NoBuilder /** A template trait for non-strict views of sequences. * $seqViewInfo diff --git a/src/library/scala/collection/SetProxyLike.scala b/src/library/scala/collection/SetProxyLike.scala index 5196f39917..265d1c4806 100644 --- a/src/library/scala/collection/SetProxyLike.scala +++ b/src/library/scala/collection/SetProxyLike.scala @@ -6,11 +6,8 @@ ** |/ ** \* */ - package scala.collection -import generic._ - // Methods could be printed by cat SetLike.scala | egrep '^ (override )?def' /** This trait implements a proxy for sets. It forwards diff --git a/src/library/scala/collection/Traversable.scala b/src/library/scala/collection/Traversable.scala index 36ef230a42..4ca2095f4c 100644 --- a/src/library/scala/collection/Traversable.scala +++ b/src/library/scala/collection/Traversable.scala @@ -6,12 +6,10 @@ ** |/ ** \* */ - - package scala.collection import generic._ -import mutable.{Builder, Buffer, ArrayBuffer, ListBuffer} +import mutable.Builder import scala.util.control.Breaks /** A trait for traversable collections. diff --git a/src/library/scala/collection/TraversableView.scala b/src/library/scala/collection/TraversableView.scala index cce6b72257..af219084b8 100644 --- a/src/library/scala/collection/TraversableView.scala +++ b/src/library/scala/collection/TraversableView.scala @@ -10,7 +10,6 @@ package scala.collection import generic._ import mutable.Builder -import TraversableView.NoBuilder /** A base trait for non-strict views of traversable collections. * $traversableViewInfo diff --git a/src/library/scala/collection/TraversableViewLike.scala b/src/library/scala/collection/TraversableViewLike.scala index 0925fe4770..6846a505bf 100644 --- a/src/library/scala/collection/TraversableViewLike.scala +++ b/src/library/scala/collection/TraversableViewLike.scala @@ -10,7 +10,6 @@ package scala.collection import generic._ import mutable.{ Builder, ArrayBuffer } -import TraversableView.NoBuilder import scala.annotation.migration import scala.language.implicitConversions diff --git a/src/library/scala/collection/convert/Decorators.scala b/src/library/scala/collection/convert/Decorators.scala index e2c46c1e4f..f004e4712b 100644 --- a/src/library/scala/collection/convert/Decorators.scala +++ b/src/library/scala/collection/convert/Decorators.scala @@ -9,7 +9,7 @@ package scala.collection package convert -import java.{ lang => jl, util => ju }, java.util.{ concurrent => juc } +import java.{ util => ju } private[collection] trait Decorators { /** Generic class containing the `asJava` converter method */ diff --git a/src/library/scala/collection/generic/IterableForwarder.scala b/src/library/scala/collection/generic/IterableForwarder.scala index 90ebcace84..8feace3f8b 100644 --- a/src/library/scala/collection/generic/IterableForwarder.scala +++ b/src/library/scala/collection/generic/IterableForwarder.scala @@ -6,12 +6,9 @@ ** |/ ** \* */ - - package scala.collection.generic -import scala.collection._ -import scala.collection.mutable.Buffer +import scala.collection._ /** This trait implements a forwarder for iterable objects. It forwards * all calls to a different iterable object, except for diff --git a/src/library/scala/collection/immutable/DefaultMap.scala b/src/library/scala/collection/immutable/DefaultMap.scala index 4a0503adfd..620baec9a8 100755 --- a/src/library/scala/collection/immutable/DefaultMap.scala +++ b/src/library/scala/collection/immutable/DefaultMap.scala @@ -6,13 +6,9 @@ ** |/ ** \* */ - - package scala.collection package immutable -import generic._ - /** A default map which implements the `+` and `-` * methods of maps. It does so using the default builder for * maps defined in the `Map` object. diff --git a/src/library/scala/collection/immutable/List.scala b/src/library/scala/collection/immutable/List.scala index 1ebbff53ea..aeaa479e2f 100644 --- a/src/library/scala/collection/immutable/List.scala +++ b/src/library/scala/collection/immutable/List.scala @@ -386,9 +386,6 @@ final case class ::[B](private var hd: B, private[scala] var tl: List[B]) extend * @define Coll `List` */ object List extends SeqFactory[List] { - - import scala.collection.{Iterable, Seq, IndexedSeq} - /** $genericCanBuildFromInfo */ implicit def canBuildFrom[A]: CanBuildFrom[Coll, A, List[A]] = ReusableCBF.asInstanceOf[GenericCanBuildFrom[A]] diff --git a/src/library/scala/collection/immutable/LongMap.scala b/src/library/scala/collection/immutable/LongMap.scala index 2a2910439a..fab1b7f00b 100644 --- a/src/library/scala/collection/immutable/LongMap.scala +++ b/src/library/scala/collection/immutable/LongMap.scala @@ -77,8 +77,6 @@ object LongMap { } } -import LongMap._ - // Iterator over a non-empty LongMap. private[immutable] abstract class LongMapIterator[V, T](it: LongMap[V]) extends AbstractIterator[T] { diff --git a/src/library/scala/collection/immutable/NumericRange.scala b/src/library/scala/collection/immutable/NumericRange.scala index 9e64bee1ce..195aeed281 100644 --- a/src/library/scala/collection/immutable/NumericRange.scala +++ b/src/library/scala/collection/immutable/NumericRange.scala @@ -6,12 +6,10 @@ ** |/ ** \* */ - package scala.collection package immutable import mutable.{ Builder, ListBuffer } -import generic._ /** `NumericRange` is a more generic version of the * `Range` class which works with arbitrary types. @@ -176,7 +174,6 @@ extends AbstractSeq[T] with IndexedSeq[T] with Serializable { catch { case _: ClassCastException => false } final override def sum[B >: T](implicit num: Numeric[B]): B = { - import num.Ops if (isEmpty) this.num fromInt 0 else if (numRangeElements == 1) head else ((this.num fromInt numRangeElements) * (head + last) / (this.num fromInt 2)) diff --git a/src/library/scala/collection/immutable/StringLike.scala b/src/library/scala/collection/immutable/StringLike.scala index 68bef42c34..663318330c 100644 --- a/src/library/scala/collection/immutable/StringLike.scala +++ b/src/library/scala/collection/immutable/StringLike.scala @@ -9,7 +9,6 @@ package scala.collection package immutable -import generic._ import mutable.Builder import scala.util.matching.Regex import scala.math.ScalaNumber diff --git a/src/library/scala/collection/immutable/StringOps.scala b/src/library/scala/collection/immutable/StringOps.scala index a650d98697..16c1f96cc2 100644 --- a/src/library/scala/collection/immutable/StringOps.scala +++ b/src/library/scala/collection/immutable/StringOps.scala @@ -6,8 +6,6 @@ ** |/ ** \* */ - - package scala.collection package immutable diff --git a/src/library/scala/collection/mutable/ArrayBuilder.scala b/src/library/scala/collection/mutable/ArrayBuilder.scala index 0ce2cda32c..2fe3e91d68 100644 --- a/src/library/scala/collection/mutable/ArrayBuilder.scala +++ b/src/library/scala/collection/mutable/ArrayBuilder.scala @@ -6,12 +6,9 @@ ** |/ ** \* */ - - package scala.collection package mutable -import generic._ import scala.reflect.ClassTag import scala.runtime.ScalaRunTime diff --git a/src/library/scala/collection/mutable/ArrayLike.scala b/src/library/scala/collection/mutable/ArrayLike.scala index 31f3d2a497..40017aa08e 100644 --- a/src/library/scala/collection/mutable/ArrayLike.scala +++ b/src/library/scala/collection/mutable/ArrayLike.scala @@ -6,11 +6,8 @@ ** |/ ** \* */ - - package scala.collection package mutable -import generic._ /** A common supertrait of `ArrayOps` and `WrappedArray` that factors out most * operations on arrays and wrapped arrays. diff --git a/src/library/scala/collection/mutable/BufferProxy.scala b/src/library/scala/collection/mutable/BufferProxy.scala index 37aa1862fa..ade0b94230 100644 --- a/src/library/scala/collection/mutable/BufferProxy.scala +++ b/src/library/scala/collection/mutable/BufferProxy.scala @@ -6,12 +6,9 @@ ** |/ ** \* */ - - package scala.collection package mutable -import generic._ import script._ /** This is a simple proxy class for - import scala.collection.Traversable - /** A common implementation of `newBuilder` for all mutable maps * in terms of `empty`. * diff --git a/src/library/scala/collection/mutable/SeqLike.scala b/src/library/scala/collection/mutable/SeqLike.scala index 447100cf4c..ddfde536c9 100644 --- a/src/library/scala/collection/mutable/SeqLike.scala +++ b/src/library/scala/collection/mutable/SeqLike.scala @@ -9,7 +9,6 @@ package scala.collection package mutable -import generic._ import parallel.mutable.ParSeq /** A template trait for mutable sequences of type `mutable.Seq[A]`. diff --git a/src/library/scala/collection/mutable/SetBuilder.scala b/src/library/scala/collection/mutable/SetBuilder.scala index 42fd651d41..40f0b8932c 100644 --- a/src/library/scala/collection/mutable/SetBuilder.scala +++ b/src/library/scala/collection/mutable/SetBuilder.scala @@ -6,12 +6,9 @@ ** |/ ** \* */ - package scala.collection package mutable -import generic._ - /** The canonical builder for mutable Sets. * * @tparam A The type of the elements that will be contained in this set. diff --git a/src/library/scala/collection/mutable/SetLike.scala b/src/library/scala/collection/mutable/SetLike.scala index 01f87447ae..4a907e7dc4 100644 --- a/src/library/scala/collection/mutable/SetLike.scala +++ b/src/library/scala/collection/mutable/SetLike.scala @@ -11,7 +11,7 @@ package mutable import generic._ import script._ -import scala.annotation.{ migration, bridge } +import scala.annotation.migration import parallel.mutable.ParSet /** A template trait for mutable sets of type `mutable.Set[A]`. diff --git a/src/library/scala/collection/mutable/SynchronizedQueue.scala b/src/library/scala/collection/mutable/SynchronizedQueue.scala index 9559d5eaa5..c5f133eec7 100644 --- a/src/library/scala/collection/mutable/SynchronizedQueue.scala +++ b/src/library/scala/collection/mutable/SynchronizedQueue.scala @@ -25,8 +25,6 @@ package mutable * @define coll synchronized queue */ class SynchronizedQueue[A] extends Queue[A] { - import scala.collection.Traversable - /** Checks if the queue is empty. * * @return true, iff there is no element in the queue. diff --git a/src/library/scala/collection/mutable/SynchronizedSet.scala b/src/library/scala/collection/mutable/SynchronizedSet.scala index e4a44993ff..bc9873880c 100644 --- a/src/library/scala/collection/mutable/SynchronizedSet.scala +++ b/src/library/scala/collection/mutable/SynchronizedSet.scala @@ -24,8 +24,6 @@ import script._ * @define coll synchronized set */ trait SynchronizedSet[A] extends Set[A] { - import scala.collection.Traversable - abstract override def size: Int = synchronized { super.size } diff --git a/src/library/scala/collection/mutable/WrappedArrayBuilder.scala b/src/library/scala/collection/mutable/WrappedArrayBuilder.scala index 7e0210311c..55328a5d3d 100644 --- a/src/library/scala/collection/mutable/WrappedArrayBuilder.scala +++ b/src/library/scala/collection/mutable/WrappedArrayBuilder.scala @@ -11,7 +11,6 @@ package scala.collection package mutable -import generic._ import scala.reflect.ClassTag import scala.runtime.ScalaRunTime._ diff --git a/src/library/scala/collection/parallel/ParIterable.scala b/src/library/scala/collection/parallel/ParIterable.scala index 2b24c88139..f170b944eb 100644 --- a/src/library/scala/collection/parallel/ParIterable.scala +++ b/src/library/scala/collection/parallel/ParIterable.scala @@ -11,7 +11,6 @@ package scala.collection.parallel import scala.collection.GenIterable import scala.collection.generic._ import scala.collection.parallel.mutable.ParArrayCombiner -import scala.collection.parallel.mutable.ParArray /** A template trait for parallel iterable collections. * diff --git a/src/library/scala/collection/parallel/ParSeq.scala b/src/library/scala/collection/parallel/ParSeq.scala index b905d1d41f..dee523ad89 100644 --- a/src/library/scala/collection/parallel/ParSeq.scala +++ b/src/library/scala/collection/parallel/ParSeq.scala @@ -18,9 +18,6 @@ import scala.collection.generic.ParFactory import scala.collection.generic.CanCombineFrom import scala.collection.GenSeq import scala.collection.parallel.mutable.ParArrayCombiner -import scala.collection.parallel.mutable.ParArray - - /** A template trait for parallel sequences. * diff --git a/src/library/scala/collection/parallel/ParSeqView.scala b/src/library/scala/collection/parallel/ParSeqView.scala index 3e3c497352..9acc4b0b73 100644 --- a/src/library/scala/collection/parallel/ParSeqView.scala +++ b/src/library/scala/collection/parallel/ParSeqView.scala @@ -6,10 +6,9 @@ ** |/ ** \* */ - package scala.collection.parallel -import scala.collection.{ TraversableView, SeqView, Parallel, Iterator } +import scala.collection.{ SeqView, Parallel, Iterator } import scala.collection.generic.CanCombineFrom /** A template view of a non-strict view of a parallel sequence. diff --git a/src/library/scala/collection/parallel/ParSet.scala b/src/library/scala/collection/parallel/ParSet.scala index 6e5e9b4387..bc6d5c6245 100644 --- a/src/library/scala/collection/parallel/ParSet.scala +++ b/src/library/scala/collection/parallel/ParSet.scala @@ -17,14 +17,8 @@ package scala.collection.parallel import scala.collection.Set import scala.collection.GenSet -import scala.collection.mutable.Builder import scala.collection.generic._ - - - - - /** A template trait for parallel sets. * * $sideeffects diff --git a/src/library/scala/collection/parallel/ParSetLike.scala b/src/library/scala/collection/parallel/ParSetLike.scala index c80b5ded26..20a5f693ce 100644 --- a/src/library/scala/collection/parallel/ParSetLike.scala +++ b/src/library/scala/collection/parallel/ParSetLike.scala @@ -15,14 +15,6 @@ import scala.collection.SetLike import scala.collection.GenSetLike import scala.collection.GenSet import scala.collection.Set -import scala.collection.mutable.Builder - - - - - - - /** A template trait for parallel sets. This trait is mixed in with concrete * parallel sets to override the representation type. diff --git a/src/library/scala/collection/parallel/immutable/ParIterable.scala b/src/library/scala/collection/parallel/immutable/ParIterable.scala index 142f07ff26..ec07e44c4d 100644 --- a/src/library/scala/collection/parallel/immutable/ParIterable.scala +++ b/src/library/scala/collection/parallel/immutable/ParIterable.scala @@ -15,8 +15,6 @@ import scala.collection.generic._ import scala.collection.parallel.ParIterableLike import scala.collection.parallel.Combiner -import scala.collection.GenIterable - /** A template trait for immutable parallel iterable collections. * diff --git a/src/library/scala/collection/parallel/immutable/ParSeq.scala b/src/library/scala/collection/parallel/immutable/ParSeq.scala index aa19307387..b54a5f0205 100644 --- a/src/library/scala/collection/parallel/immutable/ParSeq.scala +++ b/src/library/scala/collection/parallel/immutable/ParSeq.scala @@ -18,9 +18,6 @@ import scala.collection.generic.CanCombineFrom import scala.collection.generic.ParFactory import scala.collection.parallel.ParSeqLike import scala.collection.parallel.Combiner -import scala.collection.GenSeq - - /** An immutable variant of `ParSeq`. * diff --git a/src/library/scala/collection/parallel/immutable/ParSet.scala b/src/library/scala/collection/parallel/immutable/ParSet.scala index 3622377a55..aba8486ab5 100644 --- a/src/library/scala/collection/parallel/immutable/ParSet.scala +++ b/src/library/scala/collection/parallel/immutable/ParSet.scala @@ -9,7 +9,6 @@ package scala.collection package parallel.immutable -import scala.collection.GenSet import scala.collection.generic._ import scala.collection.parallel.ParSetLike import scala.collection.parallel.Combiner diff --git a/src/library/scala/collection/parallel/mutable/ParIterable.scala b/src/library/scala/collection/parallel/mutable/ParIterable.scala index 7090c510a0..d76e4b1745 100644 --- a/src/library/scala/collection/parallel/mutable/ParIterable.scala +++ b/src/library/scala/collection/parallel/mutable/ParIterable.scala @@ -12,8 +12,6 @@ package scala.collection.parallel.mutable import scala.collection.generic._ import scala.collection.parallel.ParIterableLike import scala.collection.parallel.Combiner -import scala.collection.GenIterable - /** A template trait for mutable parallel iterable collections. * diff --git a/src/library/scala/collection/parallel/mutable/ParMapLike.scala b/src/library/scala/collection/parallel/mutable/ParMapLike.scala index cdcfc59f8f..08bc706c8a 100644 --- a/src/library/scala/collection/parallel/mutable/ParMapLike.scala +++ b/src/library/scala/collection/parallel/mutable/ParMapLike.scala @@ -12,13 +12,10 @@ package mutable import scala.collection.generic._ -import scala.collection.mutable.Builder import scala.collection.mutable.Cloneable import scala.collection.generic.Growable import scala.collection.generic.Shrinkable - - /** A template trait for mutable parallel maps. This trait is to be mixed in * with concrete parallel maps to override the representation type. * diff --git a/src/library/scala/collection/parallel/mutable/ParSeq.scala b/src/library/scala/collection/parallel/mutable/ParSeq.scala index 95a4d4a13a..8a55ab83f1 100644 --- a/src/library/scala/collection/parallel/mutable/ParSeq.scala +++ b/src/library/scala/collection/parallel/mutable/ParSeq.scala @@ -17,12 +17,6 @@ import scala.collection.generic.CanCombineFrom import scala.collection.generic.ParFactory import scala.collection.parallel.ParSeqLike import scala.collection.parallel.Combiner -import scala.collection.GenSeq - - - - - /** A mutable variant of `ParSeq`. * diff --git a/src/library/scala/collection/parallel/mutable/ParSet.scala b/src/library/scala/collection/parallel/mutable/ParSet.scala index d8f821746c..ca41852512 100644 --- a/src/library/scala/collection/parallel/mutable/ParSet.scala +++ b/src/library/scala/collection/parallel/mutable/ParSet.scala @@ -13,11 +13,6 @@ package scala.collection.parallel.mutable import scala.collection.generic._ import scala.collection.parallel.Combiner -import scala.collection.GenSet - - - - /** A mutable variant of `ParSet`. * diff --git a/src/library/scala/collection/parallel/mutable/ParSetLike.scala b/src/library/scala/collection/parallel/mutable/ParSetLike.scala index 609888f1a9..0941229124 100644 --- a/src/library/scala/collection/parallel/mutable/ParSetLike.scala +++ b/src/library/scala/collection/parallel/mutable/ParSetLike.scala @@ -10,17 +10,11 @@ package scala.collection package parallel.mutable - - -import scala.collection.mutable.Set -import scala.collection.mutable.Builder import scala.collection.mutable.Cloneable import scala.collection.GenSetLike import scala.collection.generic.Growable import scala.collection.generic.Shrinkable - - /** A template trait for mutable parallel sets. This trait is mixed in with concrete * parallel sets to override the representation type. * diff --git a/src/library/scala/concurrent/impl/Future.scala b/src/library/scala/concurrent/impl/Future.scala index 8c2a77c75f..055ce6e4fa 100644 --- a/src/library/scala/concurrent/impl/Future.scala +++ b/src/library/scala/concurrent/impl/Future.scala @@ -12,7 +12,7 @@ package scala.concurrent.impl import scala.concurrent.ExecutionContext import scala.util.control.NonFatal -import scala.util.{Try, Success, Failure} +import scala.util.{ Success, Failure } private[concurrent] object Future { diff --git a/src/library/scala/math/ScalaNumericConversions.scala b/src/library/scala/math/ScalaNumericConversions.scala index 6ddf48d03b..59fc7f27b2 100644 --- a/src/library/scala/math/ScalaNumericConversions.scala +++ b/src/library/scala/math/ScalaNumericConversions.scala @@ -8,8 +8,6 @@ package scala.math -import java.{ lang => jl } - /** A slightly more specific conversion trait for classes which * extend ScalaNumber (which excludes value classes.) */ diff --git a/src/library/scala/util/automata/WordBerrySethi.scala b/src/library/scala/util/automata/WordBerrySethi.scala index 3dcbf65aca..2f4625da44 100644 --- a/src/library/scala/util/automata/WordBerrySethi.scala +++ b/src/library/scala/util/automata/WordBerrySethi.scala @@ -21,7 +21,7 @@ import scala.util.regexp.WordExp abstract class WordBerrySethi extends BaseBerrySethi { override val lang: WordExp - import lang.{ Alt, Eps, Letter, Meta, RegExp, Sequ, Star, _labelT } + import lang.{ Alt, Eps, Letter, RegExp, Sequ, Star, _labelT } protected var labels: mutable.HashSet[_labelT] = _ // don't let this fool you, only labelAt is a real, surjective mapping diff --git a/src/library/scala/util/parsing/combinator/PackratParsers.scala b/src/library/scala/util/parsing/combinator/PackratParsers.scala index 16705d45f9..cd0907e40f 100644 --- a/src/library/scala/util/parsing/combinator/PackratParsers.scala +++ b/src/library/scala/util/parsing/combinator/PackratParsers.scala @@ -8,7 +8,6 @@ package scala.util.parsing.combinator -import scala.util.parsing.combinator._ import scala.util.parsing.input.{ Reader, Position } import scala.collection.mutable import scala.language.implicitConversions diff --git a/src/library/scala/util/parsing/combinator/lexical/Scanners.scala b/src/library/scala/util/parsing/combinator/lexical/Scanners.scala index 5c23ad70cd..f6a8daabd9 100644 --- a/src/library/scala/util/parsing/combinator/lexical/Scanners.scala +++ b/src/library/scala/util/parsing/combinator/lexical/Scanners.scala @@ -6,13 +6,10 @@ ** |/ ** \* */ - - package scala.util.parsing package combinator package lexical -import token._ import input._ /** This component provides core functionality for lexical parsers. diff --git a/src/library/scala/util/parsing/combinator/testing/Tester.scala b/src/library/scala/util/parsing/combinator/testing/Tester.scala index 95730ee292..3cdab2a885 100644 --- a/src/library/scala/util/parsing/combinator/testing/Tester.scala +++ b/src/library/scala/util/parsing/combinator/testing/Tester.scala @@ -7,7 +7,6 @@ \* */ package scala.util.parsing.combinator.testing -import scala.util.parsing.combinator._ import scala.util.parsing.combinator.lexical.Lexical import scala.util.parsing.combinator.syntactical.TokenParsers diff --git a/src/library/scala/util/parsing/json/JSON.scala b/src/library/scala/util/parsing/json/JSON.scala index 2f450ed864..8f951d519a 100644 --- a/src/library/scala/util/parsing/json/JSON.scala +++ b/src/library/scala/util/parsing/json/JSON.scala @@ -7,9 +7,6 @@ \* */ package scala.util.parsing.json -import scala.util.parsing.combinator._ -import scala.util.parsing.combinator.syntactical._ -import scala.util.parsing.combinator.lexical._ /** * This object provides a simple interface to the JSON parser class. diff --git a/src/library/scala/util/parsing/json/Lexer.scala b/src/library/scala/util/parsing/json/Lexer.scala index 991b5d5c6c..762c1352a7 100644 --- a/src/library/scala/util/parsing/json/Lexer.scala +++ b/src/library/scala/util/parsing/json/Lexer.scala @@ -11,7 +11,6 @@ package scala.util.parsing.json import scala.util.parsing.combinator._ -import scala.util.parsing.combinator.syntactical._ import scala.util.parsing.combinator.lexical._ import scala.util.parsing.input.CharArrayReader.EofCh diff --git a/src/library/scala/util/parsing/json/Parser.scala b/src/library/scala/util/parsing/json/Parser.scala index cb87866f07..bf1162000b 100644 --- a/src/library/scala/util/parsing/json/Parser.scala +++ b/src/library/scala/util/parsing/json/Parser.scala @@ -12,7 +12,6 @@ package scala.util.parsing.json import scala.util.parsing.combinator._ import scala.util.parsing.combinator.syntactical._ -import scala.util.parsing.combinator.lexical._ /** * A marker class for the JSON result types. diff --git a/src/library/scala/xml/XML.scala b/src/library/scala/xml/XML.scala index d101684459..ec5e5e9e1c 100755 --- a/src/library/scala/xml/XML.scala +++ b/src/library/scala/xml/XML.scala @@ -45,8 +45,6 @@ object MinimizeMode extends Enumeration { val Never = Value } -import Source._ - /** The object `XML` provides constants, and functions to load * and save XML elements. Use this when data binding is not desired, i.e. * when XML is handled using `Symbol` nodes. diff --git a/src/library/scala/xml/factory/XMLLoader.scala b/src/library/scala/xml/factory/XMLLoader.scala index 72e4c51b11..efa241e388 100644 --- a/src/library/scala/xml/factory/XMLLoader.scala +++ b/src/library/scala/xml/factory/XMLLoader.scala @@ -12,7 +12,7 @@ package factory import javax.xml.parsers.SAXParserFactory import parsing.{ FactoryAdapter, NoBindingFactoryAdapter } -import java.io.{ InputStream, Reader, StringReader, File, FileDescriptor, FileInputStream } +import java.io.{ InputStream, Reader, File, FileDescriptor } import java.net.URL /** Presents collection of XML loading methods which use the parser diff --git a/src/library/scala/xml/include/sax/EncodingHeuristics.scala b/src/library/scala/xml/include/sax/EncodingHeuristics.scala index 1340689cae..8d8ce5b290 100644 --- a/src/library/scala/xml/include/sax/EncodingHeuristics.scala +++ b/src/library/scala/xml/include/sax/EncodingHeuristics.scala @@ -6,10 +6,8 @@ ** |/ ** \* */ - package scala.xml package include.sax -import scala.xml.include._ import java.io.InputStream import scala.util.matching.Regex diff --git a/src/library/scala/xml/include/sax/XIncluder.scala b/src/library/scala/xml/include/sax/XIncluder.scala index 5064d6b3d8..81c5613541 100644 --- a/src/library/scala/xml/include/sax/XIncluder.scala +++ b/src/library/scala/xml/include/sax/XIncluder.scala @@ -6,11 +6,9 @@ ** |/ ** \* */ - package scala.xml package include.sax -import scala.xml.include._ import scala.collection.mutable import org.xml.sax.{ ContentHandler, XMLReader, Locator, Attributes } import org.xml.sax.ext.LexicalHandler diff --git a/src/library/scala/xml/parsing/MarkupParserCommon.scala b/src/library/scala/xml/parsing/MarkupParserCommon.scala index da640484e0..43ec539931 100644 --- a/src/library/scala/xml/parsing/MarkupParserCommon.scala +++ b/src/library/scala/xml/parsing/MarkupParserCommon.scala @@ -10,7 +10,6 @@ package scala.xml package parsing import scala.io.Source -import scala.xml.dtd._ import scala.annotation.switch import Utility.Escapes.{ pairs => unescape } -- cgit v1.2.3 From 120879e2f5a10beaa94ef9e886e67a1c092fded0 Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Sat, 10 Nov 2012 15:52:25 -0700 Subject: Deal with possibly spurious warning in Macros. --- src/compiler/scala/tools/nsc/typechecker/Macros.scala | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) (limited to 'src') diff --git a/src/compiler/scala/tools/nsc/typechecker/Macros.scala b/src/compiler/scala/tools/nsc/typechecker/Macros.scala index 09f3fefeba..4b534b0d2e 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Macros.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Macros.scala @@ -720,16 +720,15 @@ trait Macros extends scala.tools.reflect.FastTrack with Traces { /** Does the same as `macroExpand`, but without typechecking the expansion * Meant for internal use within the macro infrastructure, don't use it elsewhere. */ - private def macroExpand1(typer: Typer, expandee: Tree): MacroExpansionResult = + private def macroExpand1(typer: Typer, expandee: Tree): MacroExpansionResult = { // verbose printing might cause recursive macro expansions, so I'm shutting it down here withInfoLevel(nodePrinters.InfoLevel.Quiet) { if (expandee.symbol.isErroneous || (expandee exists (_.isErroneous))) { val reason = if (expandee.symbol.isErroneous) "not found or incompatible macro implementation" else "erroneous arguments" macroTraceVerbose("cancelled macro expansion because of %s: ".format(reason))(expandee) - return Cancel(typer.infer.setError(expandee)) + Cancel(typer.infer.setError(expandee)) } - - try { + else try { val runtime = macroRuntime(expandee.symbol) if (runtime != null) macroExpandWithRuntime(typer, expandee, runtime) else macroExpandWithoutRuntime(typer, expandee) @@ -737,6 +736,7 @@ trait Macros extends scala.tools.reflect.FastTrack with Traces { case typer.TyperErrorGen.MacroExpansionException => Failure(expandee) } } + } /** Expands a macro when a runtime (i.e. the macro implementation) can be successfully loaded * Meant for internal use within the macro infrastructure, don't use it elsewhere. -- cgit v1.2.3 From 345f937b3441f248ac9156484758fa17b1a78941 Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Sun, 11 Nov 2012 17:34:16 -0800 Subject: applyOrElse is a synthetic method. --- src/compiler/scala/tools/nsc/transform/UnCurry.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) (limited to 'src') diff --git a/src/compiler/scala/tools/nsc/transform/UnCurry.scala b/src/compiler/scala/tools/nsc/transform/UnCurry.scala index 84803d0b6b..a3340bcc16 100644 --- a/src/compiler/scala/tools/nsc/transform/UnCurry.scala +++ b/src/compiler/scala/tools/nsc/transform/UnCurry.scala @@ -336,7 +336,7 @@ abstract class UnCurry extends InfoTransform // def applyOrElse[A1 <: A, B1 >: B](x: A1, default: A1 => B1): B1 = val applyOrElseMethodDef = { - val methSym = anonClass.newMethod(nme.applyOrElse, fun.pos, newFlags = FINAL | OVERRIDE) + val methSym = anonClass.newMethod(nme.applyOrElse, fun.pos, newFlags = FINAL | OVERRIDE | SYNTHETIC) val List(argtpe) = formals val A1 = methSym newTypeParameter(newTypeName("A1")) setInfo TypeBounds.upper(argtpe) -- cgit v1.2.3 From 645c2676dd6699ac24a57dfe750386bbdb827ee8 Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Mon, 12 Nov 2012 23:21:47 -0800 Subject: Commenting out unused members. I want to get this commit into the history because the tests pass here, which demonstrates that every commented out method is not only unnecessary internally but has zero test coverage. Since I know (based on the occasional source code comment, or more often based on knowing something about other source bases) that some of these can't be removed without breaking other things, I want to at least record a snapshot of the identities of all these unused and untested methods. This commit will be reverted; then there will be another commit which removes the subset of these methods which I believe to be removable. The remainder are in great need of tests which exercise the interfaces upon which other repositories depend. --- src/compiler/scala/reflect/reify/Errors.scala | 8 +- .../scala/reflect/reify/codegen/GenUtils.scala | 40 ++-- .../scala/reflect/reify/phases/Metalevels.scala | 4 +- .../scala/reflect/reify/utils/SymbolTables.scala | 2 +- src/compiler/scala/tools/ant/sabbus/Settings.scala | 2 +- src/compiler/scala/tools/cmd/FromString.scala | 14 +- src/compiler/scala/tools/cmd/Reference.scala | 2 +- .../scala/tools/nsc/CompilationUnits.scala | 22 +- src/compiler/scala/tools/nsc/CompileServer.scala | 2 +- src/compiler/scala/tools/nsc/CompilerCommand.scala | 6 +- src/compiler/scala/tools/nsc/CompilerRun.scala | 36 +-- src/compiler/scala/tools/nsc/Global.scala | 107 +++++---- src/compiler/scala/tools/nsc/ObjectRunner.scala | 4 +- src/compiler/scala/tools/nsc/Phases.scala | 4 +- src/compiler/scala/tools/nsc/Properties.scala | 2 +- src/compiler/scala/tools/nsc/ScriptRunner.scala | 2 +- src/compiler/scala/tools/nsc/ast/DocComments.scala | 8 +- src/compiler/scala/tools/nsc/ast/Printers.scala | 82 +------ src/compiler/scala/tools/nsc/ast/TreeDSL.scala | 76 +++---- src/compiler/scala/tools/nsc/ast/TreeGen.scala | 170 +++++++-------- src/compiler/scala/tools/nsc/ast/TreeInfo.scala | 6 +- .../scala/tools/nsc/ast/parser/Parsers.scala | 25 ++- .../scala/tools/nsc/ast/parser/Scanners.scala | 59 ++--- .../scala/tools/nsc/ast/parser/Tokens.scala | 62 +++--- .../scala/tools/nsc/ast/parser/TreeBuilder.scala | 26 +-- .../tools/nsc/backend/icode/BasicBlocks.scala | 36 +-- .../nsc/backend/icode/ExceptionHandlers.scala | 10 +- .../scala/tools/nsc/backend/icode/GenICode.scala | 28 +-- .../scala/tools/nsc/backend/icode/Members.scala | 50 ++--- .../scala/tools/nsc/backend/icode/Opcodes.scala | 20 +- .../scala/tools/nsc/backend/icode/Primitives.scala | 16 +- .../scala/tools/nsc/backend/icode/Repository.scala | 14 +- .../scala/tools/nsc/backend/icode/TypeKinds.scala | 4 +- .../scala/tools/nsc/backend/icode/TypeStacks.scala | 6 +- .../backend/icode/analysis/CopyPropagation.scala | 20 +- .../backend/icode/analysis/DataFlowAnalysis.scala | 10 +- .../backend/icode/analysis/TypeFlowAnalysis.scala | 62 +++--- .../scala/tools/nsc/backend/jvm/GenASM.scala | 12 +- .../scala/tools/nsc/backend/jvm/GenJVM.scala | 26 +-- .../scala/tools/nsc/backend/msil/GenMSIL.scala | 12 +- .../tools/nsc/backend/opt/ClosureElimination.scala | 4 +- .../scala/tools/nsc/backend/opt/Inliners.scala | 6 +- src/compiler/scala/tools/nsc/doc/html/Page.scala | 6 +- .../scala/tools/nsc/doc/model/Entity.scala | 14 +- .../tools/nsc/doc/model/IndexModelFactory.scala | 2 +- .../scala/tools/nsc/doc/model/ModelFactory.scala | 48 ++-- .../doc/model/ModelFactoryImplicitSupport.scala | 32 +-- .../tools/nsc/doc/model/comment/Comment.scala | 2 +- .../nsc/doc/model/comment/CommentFactory.scala | 36 +-- .../tools/nsc/doc/model/diagram/Diagram.scala | 18 +- .../scala/tools/nsc/interactive/BuildManager.scala | 2 +- .../scala/tools/nsc/interactive/Global.scala | 68 +++--- .../scala/tools/nsc/interpreter/ByteCode.scala | 28 +-- .../scala/tools/nsc/interpreter/CodeHandlers.scala | 100 ++++----- .../scala/tools/nsc/interpreter/CommandLine.scala | 2 +- .../scala/tools/nsc/interpreter/Completion.scala | 2 +- .../tools/nsc/interpreter/CompletionAware.scala | 46 ++-- .../tools/nsc/interpreter/CompletionOutput.scala | 2 +- .../nsc/interpreter/ConsoleReaderHelper.scala | 10 +- .../scala/tools/nsc/interpreter/Delimited.scala | 6 +- .../scala/tools/nsc/interpreter/ExprTyper.scala | 9 +- .../scala/tools/nsc/interpreter/ILoop.scala | 55 +++-- .../scala/tools/nsc/interpreter/IMain.scala | 199 ++++++++--------- .../scala/tools/nsc/interpreter/ISettings.scala | 10 +- .../scala/tools/nsc/interpreter/Imports.scala | 22 +- .../tools/nsc/interpreter/InteractiveReader.scala | 12 +- .../tools/nsc/interpreter/JLineCompletion.scala | 12 +- .../scala/tools/nsc/interpreter/JLineReader.scala | 8 +- .../scala/tools/nsc/interpreter/Logger.scala | 6 +- .../scala/tools/nsc/interpreter/LoopCommands.scala | 38 ++-- .../tools/nsc/interpreter/MemberHandlers.scala | 26 +-- .../scala/tools/nsc/interpreter/NamedParam.scala | 6 +- .../scala/tools/nsc/interpreter/Naming.scala | 2 +- .../scala/tools/nsc/interpreter/Parsed.scala | 14 +- .../scala/tools/nsc/interpreter/Phased.scala | 30 +-- .../scala/tools/nsc/interpreter/Power.scala | 152 ++++++------- .../scala/tools/nsc/interpreter/ReplConfig.scala | 24 +- .../scala/tools/nsc/interpreter/ReplProps.scala | 6 +- .../scala/tools/nsc/interpreter/ReplStrings.scala | 2 +- .../scala/tools/nsc/interpreter/RichClass.scala | 7 +- .../scala/tools/nsc/interpreter/SimpleReader.scala | 8 +- .../scala/tools/nsc/interpreter/TypeStrings.scala | 18 +- .../scala/tools/nsc/interpreter/package.scala | 54 ++--- .../tools/nsc/interpreter/session/History.scala | 10 +- .../nsc/interpreter/session/SimpleHistory.scala | 6 +- src/compiler/scala/tools/nsc/io/Fileish.scala | 52 ++--- src/compiler/scala/tools/nsc/io/Jar.scala | 24 +- src/compiler/scala/tools/nsc/io/MsilFile.scala | 2 +- src/compiler/scala/tools/nsc/io/Pickler.scala | 74 +++---- src/compiler/scala/tools/nsc/io/Socket.scala | 8 +- src/compiler/scala/tools/nsc/io/SourceReader.scala | 2 +- src/compiler/scala/tools/nsc/io/package.scala | 22 +- .../scala/tools/nsc/javac/JavaParsers.scala | 8 +- .../scala/tools/nsc/javac/JavaScanners.scala | 48 ++-- .../scala/tools/nsc/javac/JavaTokens.scala | 12 +- .../scala/tools/nsc/matching/MatchSupport.scala | 38 ++-- src/compiler/scala/tools/nsc/matching/Matrix.scala | 48 ++-- .../tools/nsc/matching/ParallelMatching.scala | 6 +- .../scala/tools/nsc/matching/PatternBindings.scala | 2 +- .../scala/tools/nsc/matching/Patterns.scala | 48 ++-- .../scala/tools/nsc/settings/AbsSettings.scala | 16 +- .../tools/nsc/settings/AdvancedScalaSettings.scala | 148 ++++++------- .../scala/tools/nsc/settings/MutableSettings.scala | 10 +- .../scala/tools/nsc/settings/ScalaSettings.scala | 10 +- .../tools/nsc/settings/StandardScalaSettings.scala | 2 +- .../scala/tools/nsc/settings/Warnings.scala | 14 +- .../nsc/symtab/classfile/AbstractFileReader.scala | 12 +- .../nsc/symtab/classfile/ClassfileParser.scala | 20 +- .../tools/nsc/symtab/classfile/ICodeReader.scala | 6 +- .../scala/tools/nsc/symtab/classfile/Pickler.scala | 208 +++++++++--------- .../tools/nsc/transform/SpecializeTypes.scala | 28 +-- .../scala/tools/nsc/transform/TailCalls.scala | 2 +- .../tools/nsc/transform/TypingTransformers.scala | 2 +- .../scala/tools/nsc/typechecker/Contexts.scala | 32 +-- .../tools/nsc/typechecker/DestructureTypes.scala | 20 +- .../scala/tools/nsc/typechecker/Duplicators.scala | 38 ++-- .../scala/tools/nsc/typechecker/Implicits.scala | 12 +- .../tools/nsc/typechecker/MethodSynthesis.scala | 70 +++--- .../scala/tools/nsc/typechecker/Namers.scala | 22 +- .../tools/nsc/typechecker/NamesDefaults.scala | 2 +- .../tools/nsc/typechecker/PatternMatching.scala | 102 ++++----- .../scala/tools/nsc/typechecker/TreeCheckers.scala | 14 +- .../tools/nsc/typechecker/TypeDiagnostics.scala | 16 +- .../scala/tools/nsc/typechecker/Typers.scala | 14 +- .../scala/tools/nsc/typechecker/Unapplies.scala | 10 +- src/compiler/scala/tools/nsc/util/ClassPath.scala | 44 ++-- .../scala/tools/nsc/util/CommandLineParser.scala | 8 +- .../scala/tools/nsc/util/JavaCharArrayReader.scala | 51 ++--- .../scala/tools/nsc/util/ScalaClassLoader.scala | 62 +++--- .../scala/tools/nsc/util/SimpleTracer.scala | 2 +- src/compiler/scala/tools/nsc/util/package.scala | 22 +- .../scala/tools/reflect/ToolBoxFactory.scala | 4 +- src/compiler/scala/tools/util/Javap.scala | 4 +- src/compiler/scala/tools/util/PathResolver.scala | 12 +- .../scala/util/continuations/ControlContext.scala | 4 +- .../library/scala/util/continuations/package.scala | 6 +- src/detach/plugin/scala/tools/detach/Detach.scala | 2 +- src/partest/scala/tools/partest/CompilerTest.scala | 3 +- src/partest/scala/tools/partest/SecurityTest.scala | 22 +- src/partest/scala/tools/partest/TestUtil.scala | 14 +- .../partest/instrumented/Instrumentation.scala | 1 + .../tools/partest/nest/ConsoleFileManager.scala | 20 +- .../scala/tools/partest/nest/ConsoleRunner.scala | 2 +- .../scala/tools/partest/nest/FileManager.scala | 8 +- src/partest/scala/tools/partest/nest/NestUI.scala | 20 +- .../tools/partest/nest/ReflectiveRunner.scala | 9 +- .../scala/tools/partest/nest/RunnerManager.scala | 8 +- .../scala/tools/partest/nest/RunnerUtils.scala | 46 ++-- src/partest/scala/tools/partest/package.scala | 8 +- .../scala/tools/partest/utils/PrintMgr.scala | 104 ++++----- .../scala/reflect/internal/AnnotationInfos.scala | 11 +- .../scala/reflect/internal/BuildUtils.scala | 2 - .../reflect/internal/ClassfileConstants.scala | 12 +- .../scala/reflect/internal/Definitions.scala | 137 ++++++------ .../reflect/internal/ExistentialsAndSkolems.scala | 1 - src/reflect/scala/reflect/internal/Importers.scala | 2 +- src/reflect/scala/reflect/internal/Names.scala | 64 +++--- src/reflect/scala/reflect/internal/Printers.scala | 4 +- src/reflect/scala/reflect/internal/Scopes.scala | 2 +- src/reflect/scala/reflect/internal/StdNames.scala | 242 ++++++++++----------- .../scala/reflect/internal/SymbolTable.scala | 3 +- src/reflect/scala/reflect/internal/Symbols.scala | 125 ++++++----- src/reflect/scala/reflect/internal/TreeGen.scala | 12 +- src/reflect/scala/reflect/internal/TreeInfo.scala | 58 ++--- src/reflect/scala/reflect/internal/Trees.scala | 2 +- .../scala/reflect/internal/TypeDebugging.scala | 4 +- src/reflect/scala/reflect/internal/Types.scala | 196 ++++++++--------- .../reflect/internal/pickling/PickleBuffer.scala | 2 +- .../reflect/internal/pickling/PickleFormat.scala | 2 +- .../reflect/internal/pickling/UnPickler.scala | 14 +- .../scala/reflect/internal/util/Collections.scala | 54 ++--- .../scala/reflect/internal/util/HashSet.scala | 4 +- .../scala/reflect/internal/util/Origins.scala | 2 - .../scala/reflect/internal/util/Position.scala | 2 +- .../scala/reflect/internal/util/SourceFile.scala | 9 +- .../scala/reflect/internal/util/StringOps.scala | 46 ++-- .../scala/reflect/internal/util/TableDef.scala | 8 +- .../internal/util/TraceSymbolActivity.scala | 4 +- .../scala/reflect/internal/util/WeakHashSet.scala | 3 - src/reflect/scala/reflect/io/AbstractFile.scala | 12 +- src/reflect/scala/reflect/io/Directory.scala | 17 +- src/reflect/scala/reflect/io/File.scala | 104 ++++----- src/reflect/scala/reflect/io/Path.scala | 34 +-- src/reflect/scala/reflect/io/PlainFile.scala | 12 +- src/reflect/scala/reflect/io/Streamable.scala | 10 +- .../scala/reflect/io/VirtualDirectory.scala | 5 +- src/reflect/scala/reflect/io/VirtualFile.scala | 4 +- src/reflect/scala/reflect/io/ZipArchive.scala | 10 +- src/reflect/scala/reflect/macros/TreeBuilder.scala | 1 - .../scala/reflect/runtime/JavaMirrors.scala | 19 +- .../scala/reflect/runtime/JavaUniverse.scala | 5 +- .../reflect/runtime/SynchronizedSymbols.scala | 4 +- src/reflect/scala/reflect/runtime/package.scala | 2 +- 193 files changed, 2581 insertions(+), 2662 deletions(-) (limited to 'src') diff --git a/src/compiler/scala/reflect/reify/Errors.scala b/src/compiler/scala/reflect/reify/Errors.scala index a72233274e..9e59b40236 100644 --- a/src/compiler/scala/reflect/reify/Errors.scala +++ b/src/compiler/scala/reflect/reify/Errors.scala @@ -21,10 +21,10 @@ trait Errors { throw new ReificationException(defaultErrorPosition, msg) } - def CannotReifySymbol(sym: Symbol) = { - val msg = "implementation restriction: cannot reify symbol %s (%s)".format(sym, sym.accurateKindString) - throw new ReificationException(defaultErrorPosition, msg) - } + // def CannotReifySymbol(sym: Symbol) = { + // val msg = "implementation restriction: cannot reify symbol %s (%s)".format(sym, sym.accurateKindString) + // throw new ReificationException(defaultErrorPosition, msg) + // } def CannotReifyWeakType(details: Any) = { val msg = "cannot create a TypeTag" + details + ": use WeakTypeTag instead" diff --git a/src/compiler/scala/reflect/reify/codegen/GenUtils.scala b/src/compiler/scala/reflect/reify/codegen/GenUtils.scala index 6554947f88..d0f8ae76e2 100644 --- a/src/compiler/scala/reflect/reify/codegen/GenUtils.scala +++ b/src/compiler/scala/reflect/reify/codegen/GenUtils.scala @@ -91,20 +91,20 @@ trait GenUtils { def termPath(fullname: String): Tree = path(fullname, newTermName) /** An (unreified) path that refers to type definition with given fully qualified name */ - def typePath(fullname: String): Tree = path(fullname, newTypeName) - - def isTough(tpe: Type) = { - def isTough(tpe: Type) = tpe match { - case _: RefinedType => true - case _: ExistentialType => true - case _: ClassInfoType => true - case _: MethodType => true - case _: PolyType => true - case _ => false - } + // def typePath(fullname: String): Tree = path(fullname, newTypeName) - tpe != null && (tpe exists isTough) - } + // def isTough(tpe: Type) = { + // def isTough(tpe: Type) = tpe match { + // case _: RefinedType => true + // case _: ExistentialType => true + // case _: ClassInfoType => true + // case _: MethodType => true + // case _: PolyType => true + // case _ => false + // } + + // tpe != null && (tpe exists isTough) + // } object TypedOrAnnotated { def unapply(tree: Tree): Option[Tree] = tree match { @@ -117,14 +117,14 @@ trait GenUtils { } } - def isAnnotated(tpe: Type) = { - def isAnnotated(tpe: Type) = tpe match { - case _: AnnotatedType => true - case _ => false - } + // def isAnnotated(tpe: Type) = { + // def isAnnotated(tpe: Type) = tpe match { + // case _: AnnotatedType => true + // case _ => false + // } - tpe != null && (tpe exists isAnnotated) - } + // tpe != null && (tpe exists isAnnotated) + // } def isSemiConcreteTypeMember(tpe: Type) = tpe match { case TypeRef(SingleType(_, _), sym, _) if sym.isAbstractType && !sym.isExistential => true diff --git a/src/compiler/scala/reflect/reify/phases/Metalevels.scala b/src/compiler/scala/reflect/reify/phases/Metalevels.scala index 92d951c3a1..cccf080dbf 100644 --- a/src/compiler/scala/reflect/reify/phases/Metalevels.scala +++ b/src/compiler/scala/reflect/reify/phases/Metalevels.scala @@ -1,6 +1,8 @@ package scala.reflect.reify package phases +import scala.collection.{ mutable } + trait Metalevels { self: Reifier => @@ -101,7 +103,7 @@ trait Metalevels { */ val metalevels = new Transformer { var insideSplice = false - var inlineableBindings = scala.collection.mutable.Map[TermName, Tree]() + val inlineableBindings = mutable.Map[TermName, Tree]() def withinSplice[T](op: => T) = { val old = insideSplice diff --git a/src/compiler/scala/reflect/reify/utils/SymbolTables.scala b/src/compiler/scala/reflect/reify/utils/SymbolTables.scala index 99118c4f2e..6e34d64847 100644 --- a/src/compiler/scala/reflect/reify/utils/SymbolTables.scala +++ b/src/compiler/scala/reflect/reify/utils/SymbolTables.scala @@ -15,7 +15,7 @@ trait SymbolTables { private[SymbolTable] val original: Option[List[Tree]] = None) { def syms: List[Symbol] = symtab.keys.toList - def isConcrete: Boolean = symtab.values forall (sym => !FreeTypeDef.unapply(sym).isDefined) + // def isConcrete: Boolean = symtab.values forall (sym => !FreeTypeDef.unapply(sym).isDefined) // def aliases: Map[Symbol, List[TermName]] = aliases.distinct groupBy (_._1) mapValues (_ map (_._2)) diff --git a/src/compiler/scala/tools/ant/sabbus/Settings.scala b/src/compiler/scala/tools/ant/sabbus/Settings.scala index fde61e9564..d0fefdaa03 100644 --- a/src/compiler/scala/tools/ant/sabbus/Settings.scala +++ b/src/compiler/scala/tools/ant/sabbus/Settings.scala @@ -10,7 +10,7 @@ package scala.tools.ant.sabbus import java.io.File -import org.apache.tools.ant.types.{Path, Reference} +import org.apache.tools.ant.types.Path class Settings { diff --git a/src/compiler/scala/tools/cmd/FromString.scala b/src/compiler/scala/tools/cmd/FromString.scala index 2a624875ee..c9df9f9145 100644 --- a/src/compiler/scala/tools/cmd/FromString.scala +++ b/src/compiler/scala/tools/cmd/FromString.scala @@ -25,17 +25,17 @@ abstract class FromString[+T](implicit t: ru.TypeTag[T]) extends PartialFunction object FromString { // We need these because we clash with the String => Path implicits. - private def toFile(s: String) = new File(new java.io.File(s)) + // private def toFile(s: String) = new File(new java.io.File(s)) private def toDir(s: String) = new Directory(new java.io.File(s)) /** Path related stringifiers. */ - val ExistingFile: FromString[File] = new FromString[File]()(tagOfFile) { - override def isDefinedAt(s: String) = toFile(s).isFile - def apply(s: String): File = - if (isDefinedAt(s)) toFile(s) - else cmd.runAndExit(println("'%s' is not an existing file." format s)) - } + // val ExistingFile: FromString[File] = new FromString[File]()(tagOfFile) { + // override def isDefinedAt(s: String) = toFile(s).isFile + // def apply(s: String): File = + // if (isDefinedAt(s)) toFile(s) + // else cmd.runAndExit(println("'%s' is not an existing file." format s)) + // } val ExistingDir: FromString[Directory] = new FromString[Directory]()(tagOfDirectory) { override def isDefinedAt(s: String) = toDir(s).isDirectory def apply(s: String): Directory = diff --git a/src/compiler/scala/tools/cmd/Reference.scala b/src/compiler/scala/tools/cmd/Reference.scala index d4f2060f81..4f1620d61a 100644 --- a/src/compiler/scala/tools/cmd/Reference.scala +++ b/src/compiler/scala/tools/cmd/Reference.scala @@ -26,7 +26,7 @@ trait Reference extends Spec { def isUnaryOption(s: String) = unary contains toOpt(s) def isBinaryOption(s: String) = binary contains toOpt(s) def isExpandOption(s: String) = expansionMap contains toOpt(s) - def isAnyOption(s: String) = isUnaryOption(s) || isBinaryOption(s) || isExpandOption(s) + // def isAnyOption(s: String) = isUnaryOption(s) || isBinaryOption(s) || isExpandOption(s) def expandArg(arg: String) = expansionMap.getOrElse(fromOpt(arg), List(arg)) diff --git a/src/compiler/scala/tools/nsc/CompilationUnits.scala b/src/compiler/scala/tools/nsc/CompilationUnits.scala index 5be819c134..6d523552b8 100644 --- a/src/compiler/scala/tools/nsc/CompilationUnits.scala +++ b/src/compiler/scala/tools/nsc/CompilationUnits.scala @@ -26,7 +26,7 @@ trait CompilationUnits { self: Global => class CompilationUnit(val source: SourceFile) extends CompilationUnitContextApi { /** the fresh name creator */ - var fresh: FreshNameCreator = new FreshNameCreator.Default + val fresh: FreshNameCreator = new FreshNameCreator.Default def freshTermName(prefix: String): TermName = newTermName(fresh.newName(prefix)) def freshTypeName(prefix: String): TypeName = newTypeName(fresh.newName(prefix)) @@ -108,16 +108,16 @@ trait CompilationUnits { self: Global => override def toString() = source.toString() - def clear() { - fresh = new FreshNameCreator.Default - body = EmptyTree - depends.clear() - defined.clear() - synthetics.clear() - toCheck.clear() - checkedFeatures = Set() - icode.clear() - } + // def clear() { + // fresh = new FreshNameCreator.Default + // body = EmptyTree + // depends.clear() + // defined.clear() + // synthetics.clear() + // toCheck.clear() + // checkedFeatures = Set() + // icode.clear() + // } } } diff --git a/src/compiler/scala/tools/nsc/CompileServer.scala b/src/compiler/scala/tools/nsc/CompileServer.scala index 521f788fa1..11ee34af99 100644 --- a/src/compiler/scala/tools/nsc/CompileServer.scala +++ b/src/compiler/scala/tools/nsc/CompileServer.scala @@ -29,7 +29,7 @@ class StandardCompileServer extends SocketServer { var shutdown = false var verbose = false - val versionMsg = "Fast " + Properties.versionMsg + // val versionMsg = "Fast " + Properties.versionMsg val MaxCharge = 0.8 diff --git a/src/compiler/scala/tools/nsc/CompilerCommand.scala b/src/compiler/scala/tools/nsc/CompilerCommand.scala index 829e097714..577d28f5f6 100644 --- a/src/compiler/scala/tools/nsc/CompilerCommand.scala +++ b/src/compiler/scala/tools/nsc/CompilerCommand.scala @@ -15,7 +15,7 @@ class CompilerCommand(arguments: List[String], val settings: Settings) { type Setting = Settings#Setting /** file extensions of files that the compiler can process */ - lazy val fileEndings = Properties.fileEndings + // lazy val fileEndings = Properties.fileEndings private val processArgumentsResult = if (shouldProcessArguments) processArguments @@ -40,8 +40,8 @@ class CompilerCommand(arguments: List[String], val settings: Settings) { """.stripMargin.trim + "\n" def shortUsage = "Usage: %s " format cmdName - def createUsagePreface(shouldExplain: Boolean) = - if (shouldExplain) shortUsage + "\n" + explainAdvanced else "" + // def createUsagePreface(shouldExplain: Boolean) = + // if (shouldExplain) shortUsage + "\n" + explainAdvanced else "" /** Creates a help message for a subset of options based on cond */ def createUsageMsg(cond: Setting => Boolean): String = { diff --git a/src/compiler/scala/tools/nsc/CompilerRun.scala b/src/compiler/scala/tools/nsc/CompilerRun.scala index 6746b08155..daad704534 100644 --- a/src/compiler/scala/tools/nsc/CompilerRun.scala +++ b/src/compiler/scala/tools/nsc/CompilerRun.scala @@ -1,21 +1,21 @@ -/* NSC -- new Scala compiler - * Copyright 2005-2013 LAMP/EPFL - * @author Martin Odersky - */ +// /* NSC -- new Scala compiler +// * Copyright 2005-2013 LAMP/EPFL +// * @author Martin Odersky +// */ -package scala.tools.nsc +// package scala.tools.nsc -class CompilerRun { - def firstPhase: Phase = NoPhase - def terminalPhase: Phase = NoPhase - def namerPhase: Phase = NoPhase - def typerPhase: Phase = NoPhase - def refchecksPhase: Phase = NoPhase - def explicitouterPhase: Phase = NoPhase - def erasurePhase: Phase = NoPhase - def flattenPhase: Phase = NoPhase - def mixinPhase: Phase = NoPhase - def icodePhase: Phase = NoPhase - def phaseNamed(name: String): Phase = NoPhase -} +// class CompilerRun { +// def firstPhase: Phase = NoPhase +// def terminalPhase: Phase = NoPhase +// def namerPhase: Phase = NoPhase +// def typerPhase: Phase = NoPhase +// def refchecksPhase: Phase = NoPhase +// def explicitouterPhase: Phase = NoPhase +// def erasurePhase: Phase = NoPhase +// def flattenPhase: Phase = NoPhase +// def mixinPhase: Phase = NoPhase +// def icodePhase: Phase = NoPhase +// def phaseNamed(name: String): Phase = NoPhase +// } diff --git a/src/compiler/scala/tools/nsc/Global.scala b/src/compiler/scala/tools/nsc/Global.scala index 13bec828ca..8802c3ec80 100644 --- a/src/compiler/scala/tools/nsc/Global.scala +++ b/src/compiler/scala/tools/nsc/Global.scala @@ -70,7 +70,7 @@ class Global(var currentSettings: Settings, var reporter: Reporter) def this(settings: Settings) = this(settings, new ConsoleReporter(settings)) - def mkAttributedQualifier(tpe: Type, termSym: Symbol): Tree = gen.mkAttributedQualifier(tpe, termSym) + // def mkAttributedQualifier(tpe: Type, termSym: Symbol): Tree = gen.mkAttributedQualifier(tpe, termSym) def picklerPhase: Phase = if (currentRun.isDefined) currentRun.picklerPhase else NoPhase @@ -265,14 +265,14 @@ class Global(var currentSettings: Settings, var reporter: Reporter) def informComplete(msg: String): Unit = reporter.withoutTruncating(inform(msg)) def informProgress(msg: String) = if (settings.verbose.value) inform("[" + msg + "]") - def inform[T](msg: String, value: T): T = returning(value)(x => inform(msg + x)) + // def inform[T](msg: String, value: T): T = returning(value)(x => inform(msg + x)) def informTime(msg: String, start: Long) = informProgress(elapsedMessage(msg, start)) def logError(msg: String, t: Throwable): Unit = () - def logAfterEveryPhase[T](msg: String)(op: => T) { - log("Running operation '%s' after every phase.\n".format(msg) + describeAfterEveryPhase(op)) - } + // def logAfterEveryPhase[T](msg: String)(op: => T) { + // log("Running operation '%s' after every phase.\n".format(msg) + describeAfterEveryPhase(op)) + // } override def shouldLogAtThisPhase = settings.log.isSetByUser && ( (settings.log containsPhase globalPhase) || (settings.log containsPhase phase) @@ -419,8 +419,8 @@ class Global(var currentSettings: Settings, var reporter: Reporter) } /** Switch to turn on detailed type logs */ - var printTypings = settings.Ytyperdebug.value - var printInfers = settings.Yinferdebug.value + val printTypings = settings.Ytyperdebug.value + val printInfers = settings.Yinferdebug.value // phaseName = "parser" object syntaxAnalyzer extends { @@ -639,11 +639,11 @@ class Global(var currentSettings: Settings, var reporter: Reporter) } // phaseName = "SAMPLE PHASE" - object sampleTransform extends { - val global: Global.this.type = Global.this - val runsAfter = List[String]() - val runsRightAfter = None - } with SampleTransform + // object sampleTransform extends { + // val global: Global.this.type = Global.this + // val runsAfter = List[String]() + // val runsRightAfter = None + // } with SampleTransform /** The checkers are for validating the compiler data structures * at phase boundaries. @@ -778,7 +778,7 @@ class Global(var currentSettings: Settings, var reporter: Reporter) /** Returns List of (phase, value) pairs, including only those * where the value compares unequal to the previous phase's value. */ - def afterEachPhase[T](op: => T): List[(Phase, T)] = { + def afterEachPhase[T](op: => T): List[(Phase, T)] = { // used in tests phaseDescriptors.map(_.ownPhase).filterNot(_ eq NoPhase).foldLeft(List[(Phase, T)]()) { (res, ph) => val value = exitingPhase(ph)(op) if (res.nonEmpty && res.head._2 == value) res @@ -790,17 +790,17 @@ class Global(var currentSettings: Settings, var reporter: Reporter) * phase transitions where the result of the operation gave a different * list than it had when run during the previous phase. */ - def changesAfterEachPhase[T](op: => List[T]): List[ChangeAfterPhase[T]] = { - val ops = ((NoPhase, Nil)) :: afterEachPhase(op) - - ops sliding 2 map { - case (_, before) :: (ph, after) :: Nil => - val lost = before filterNot (after contains _) - val gained = after filterNot (before contains _) - ChangeAfterPhase(ph, lost, gained) - case _ => ??? - } toList - } + // def changesAfterEachPhase[T](op: => List[T]): List[ChangeAfterPhase[T]] = { + // val ops = ((NoPhase, Nil)) :: afterEachPhase(op) + + // ops sliding 2 map { + // case (_, before) :: (ph, after) :: Nil => + // val lost = before filterNot (after contains _) + // val gained = after filterNot (before contains _) + // ChangeAfterPhase(ph, lost, gained) + // case _ => ??? + // } toList + // } private def numberedPhase(ph: Phase) = "%2d/%s".format(ph.id, ph.name) case class ChangeAfterPhase[+T](ph: Phase, lost: List[T], gained: List[T]) { @@ -811,14 +811,14 @@ class Global(var currentSettings: Settings, var reporter: Reporter) override def toString = mkStr("Lost", lost) + mkStr("Gained", gained) } - def describeAfterEachPhase[T](op: => T): List[String] = - afterEachPhase(op) map { case (ph, t) => "[after %-15s] %s".format(numberedPhase(ph), t) } + // def describeAfterEachPhase[T](op: => T): List[String] = + // afterEachPhase(op) map { case (ph, t) => "[after %-15s] %s".format(numberedPhase(ph), t) } - def describeAfterEveryPhase[T](op: => T): String = - describeAfterEachPhase(op) map (" " + _ + "\n") mkString + // def describeAfterEveryPhase[T](op: => T): String = + // describeAfterEachPhase(op) map (" " + _ + "\n") mkString - def printAfterEachPhase[T](op: => T): Unit = - describeAfterEachPhase(op) foreach (m => println(" " + m)) + // def printAfterEachPhase[T](op: => T): Unit = + // describeAfterEachPhase(op) foreach (m => println(" " + m)) // ------------ Invalidations --------------------------------- @@ -1057,7 +1057,7 @@ class Global(var currentSettings: Settings, var reporter: Reporter) @inline final def exitingPostErasure[T](op: => T): T = exitingPhase(currentRun.posterasurePhase)(op) @inline final def exitingExplicitOuter[T](op: => T): T = exitingPhase(currentRun.explicitouterPhase)(op) @inline final def exitingFlatten[T](op: => T): T = exitingPhase(currentRun.flattenPhase)(op) - @inline final def exitingIcode[T](op: => T): T = exitingPhase(currentRun.icodePhase)(op) + // @inline final def exitingIcode[T](op: => T): T = exitingPhase(currentRun.icodePhase)(op) @inline final def exitingMixin[T](op: => T): T = exitingPhase(currentRun.mixinPhase)(op) @inline final def exitingPickler[T](op: => T): T = exitingPhase(currentRun.picklerPhase)(op) @inline final def exitingRefchecks[T](op: => T): T = exitingPhase(currentRun.refchecksPhase)(op) @@ -1071,21 +1071,21 @@ class Global(var currentSettings: Settings, var reporter: Reporter) @inline final def enteringMixin[T](op: => T): T = enteringPhase(currentRun.mixinPhase)(op) @inline final def enteringPickler[T](op: => T): T = enteringPhase(currentRun.picklerPhase)(op) @inline final def enteringRefchecks[T](op: => T): T = enteringPhase(currentRun.refchecksPhase)(op) - @inline final def enteringSpecialize[T](op: => T): T = enteringPhase(currentRun.specializePhase)(op) + // @inline final def enteringSpecialize[T](op: => T): T = enteringPhase(currentRun.specializePhase)(op) @inline final def enteringTyper[T](op: => T): T = enteringPhase(currentRun.typerPhase)(op) @inline final def enteringUncurry[T](op: => T): T = enteringPhase(currentRun.uncurryPhase)(op) - def explainContext(c: analyzer.Context): String = ( - if (c == null) "" else ( - """| context owners: %s - | - |Enclosing block or template: - |%s""".format( - c.owner.ownerChain.takeWhile(!_.isPackageClass).mkString(" -> "), - nodePrinters.nodeToString(c.enclClassOrMethod.tree) - ) - ) - ) + // def explainContext(c: analyzer.Context): String = ( + // if (c == null) "" else ( + // """| context owners: %s + // | + // |Enclosing block or template: + // |%s""".format( + // c.owner.ownerChain.takeWhile(!_.isPackageClass).mkString(" -> "), + // nodePrinters.nodeToString(c.enclClassOrMethod.tree) + // ) + // ) + // ) // Owners up to and including the first package class. private def ownerChainString(sym: Symbol): String = ( if (sym == null) "" @@ -1098,8 +1098,8 @@ class Global(var currentSettings: Settings, var reporter: Reporter) pairs.toList collect { case (k, v) if v != null => "%20s: %s".format(k, v) } mkString "\n" ) - def explainTree(t: Tree): String = formatExplain( - ) + // def explainTree(t: Tree): String = formatExplain( + // ) /** Don't want to introduce new errors trying to report errors, * so swallow exceptions. @@ -1158,7 +1158,7 @@ class Global(var currentSettings: Settings, var reporter: Reporter) } def newUnitParser(code: String) = new syntaxAnalyzer.UnitParser(newCompilationUnit(code)) - def newUnitScanner(code: String) = new syntaxAnalyzer.UnitScanner(newCompilationUnit(code)) + // def newUnitScanner(code: String) = new syntaxAnalyzer.UnitScanner(newCompilationUnit(code)) def newCompilationUnit(code: String) = new CompilationUnit(newSourceFile(code)) def newSourceFile(code: String) = new BatchSourceFile("", code) @@ -1181,9 +1181,8 @@ class Global(var currentSettings: Settings, var reporter: Reporter) val inlinerWarnings = new ConditionalWarning("inliner", settings.YinlinerWarnings) val allConditionalWarnings = List(deprecationWarnings0, uncheckedWarnings0, featureWarnings, inlinerWarnings) - // for sbt's benefit - def uncheckedWarnings: List[(Position, String)] = uncheckedWarnings0.warnings.toList - def deprecationWarnings: List[(Position, String)] = deprecationWarnings0.warnings.toList + def uncheckedWarnings: List[(Position, String)] = uncheckedWarnings0.warnings.toList // used in sbt + def deprecationWarnings: List[(Position, String)] = deprecationWarnings0.warnings.toList // used in sbt var reportedFeature = Set[Symbol]() @@ -1350,7 +1349,7 @@ class Global(var currentSettings: Settings, var reporter: Reporter) val namerPhase = phaseNamed("namer") // val packageobjectsPhase = phaseNamed("packageobjects") val typerPhase = phaseNamed("typer") - val inlineclassesPhase = phaseNamed("inlineclasses") + // val inlineclassesPhase = phaseNamed("inlineclasses") // val superaccessorsPhase = phaseNamed("superaccessors") val picklerPhase = phaseNamed("pickler") val refchecksPhase = phaseNamed("refchecks") @@ -1363,7 +1362,7 @@ class Global(var currentSettings: Settings, var reporter: Reporter) val erasurePhase = phaseNamed("erasure") val posterasurePhase = phaseNamed("posterasure") // val lazyvalsPhase = phaseNamed("lazyvals") - val lambdaliftPhase = phaseNamed("lambdalift") + // val lambdaliftPhase = phaseNamed("lambdalift") // val constructorsPhase = phaseNamed("constructors") val flattenPhase = phaseNamed("flatten") val mixinPhase = phaseNamed("mixin") @@ -1373,11 +1372,11 @@ class Global(var currentSettings: Settings, var reporter: Reporter) val inlineExceptionHandlersPhase = phaseNamed("inlineExceptionHandlers") val closelimPhase = phaseNamed("closelim") val dcePhase = phaseNamed("dce") - val jvmPhase = phaseNamed("jvm") + // val jvmPhase = phaseNamed("jvm") // val msilPhase = phaseNamed("msil") def runIsAt(ph: Phase) = globalPhase.id == ph.id - def runIsPast(ph: Phase) = globalPhase.id > ph.id + // def runIsPast(ph: Phase) = globalPhase.id > ph.id // def runIsAtBytecodeGen = (runIsAt(jvmPhase) || runIsAt(msilPhase)) def runIsAtOptimiz = { runIsAt(inlinerPhase) || // listing phases in full for robustness when -Ystop-after has been given. @@ -1743,7 +1742,7 @@ class Global(var currentSettings: Settings, var reporter: Reporter) // and forScaladoc default to onlyPresentation, which is the same as defaulting // to false except in old code. The downside is that this leaves us calling a // deprecated method: but I see no simple way out, so I leave it for now. - def forJVM = settings.target.value startsWith "jvm" + // def forJVM = settings.target.value startsWith "jvm" override def forMSIL = settings.target.value startsWith "msil" def forInteractive = false def forScaladoc = false diff --git a/src/compiler/scala/tools/nsc/ObjectRunner.scala b/src/compiler/scala/tools/nsc/ObjectRunner.scala index 3c75429311..e36e154925 100644 --- a/src/compiler/scala/tools/nsc/ObjectRunner.scala +++ b/src/compiler/scala/tools/nsc/ObjectRunner.scala @@ -13,8 +13,8 @@ import util.Exceptional.unwrap trait CommonRunner { /** Check whether a class with the specified name * exists on the specified class path. */ - def classExists(urls: List[URL], objectName: String): Boolean = - ScalaClassLoader.classExists(urls, objectName) + // def classExists(urls: List[URL], objectName: String): Boolean = + // ScalaClassLoader.classExists(urls, objectName) /** Run a given object, specified by name, using a * specified classpath and argument list. diff --git a/src/compiler/scala/tools/nsc/Phases.scala b/src/compiler/scala/tools/nsc/Phases.scala index aad70a9c5e..e81d3ebc8a 100644 --- a/src/compiler/scala/tools/nsc/Phases.scala +++ b/src/compiler/scala/tools/nsc/Phases.scala @@ -20,7 +20,7 @@ object Phases { } val values = new Array[Cell](MaxPhases + 1) def results = values filterNot (_ == null) - def apply(ph: Phase): T = values(ph.id).value + // def apply(ph: Phase): T = values(ph.id).value def update(ph: Phase, value: T): Unit = values(ph.id) = Cell(ph, value) } /** A class for recording the elapsed time of each phase in the @@ -38,7 +38,7 @@ object Phases { >> ("ms" -> (_.value)) >+ " " << ("share" -> (_.value.toDouble * 100 / total formatted "%.2f")) } - def formatted = "" + table() + // def formatted = "" + table() } } diff --git a/src/compiler/scala/tools/nsc/Properties.scala b/src/compiler/scala/tools/nsc/Properties.scala index 55fd196716..028fc24efb 100644 --- a/src/compiler/scala/tools/nsc/Properties.scala +++ b/src/compiler/scala/tools/nsc/Properties.scala @@ -21,5 +21,5 @@ object Properties extends scala.util.PropertiesTrait { // derived values def isEmacsShell = propOrEmpty("env.emacs") != "" - def fileEndings = fileEndingString.split("""\|""").toList + // def fileEndings = fileEndingString.split("""\|""").toList } diff --git a/src/compiler/scala/tools/nsc/ScriptRunner.scala b/src/compiler/scala/tools/nsc/ScriptRunner.scala index 0b307a861e..344a60903a 100644 --- a/src/compiler/scala/tools/nsc/ScriptRunner.scala +++ b/src/compiler/scala/tools/nsc/ScriptRunner.scala @@ -48,7 +48,7 @@ class ScriptRunner extends HasCompileSocket { case x => x } - def isScript(settings: Settings) = settings.script.value != "" + // def isScript(settings: Settings) = settings.script.value != "" /** Choose a jar filename to hold the compiled version of a script. */ private def jarFileFor(scriptFile: String)= File( diff --git a/src/compiler/scala/tools/nsc/ast/DocComments.scala b/src/compiler/scala/tools/nsc/ast/DocComments.scala index 21407289db..40f97222a9 100755 --- a/src/compiler/scala/tools/nsc/ast/DocComments.scala +++ b/src/compiler/scala/tools/nsc/ast/DocComments.scala @@ -22,9 +22,9 @@ trait DocComments { self: Global => val docComments = mutable.HashMap[Symbol, DocComment]() /** Associate comment with symbol `sym` at position `pos`. */ - def docComment(sym: Symbol, docStr: String, pos: Position = NoPosition) = - if ((sym ne null) && (sym ne NoSymbol)) - docComments += (sym -> DocComment(docStr, pos)) + // def docComment(sym: Symbol, docStr: String, pos: Position = NoPosition) = + // if ((sym ne null) && (sym ne NoSymbol)) + // docComments += (sym -> DocComment(docStr, pos)) /** The raw doc comment of symbol `sym`, as it appears in the source text, "" if missing. */ @@ -120,7 +120,7 @@ trait DocComments { self: Global => getDocComment(sym) map getUseCases getOrElse List() } - def useCases(sym: Symbol): List[(Symbol, String, Position)] = useCases(sym, sym.enclClass) + // def useCases(sym: Symbol): List[(Symbol, String, Position)] = useCases(sym, sym.enclClass) /** Returns the javadoc format of doc comment string `s`, including wiki expansion */ diff --git a/src/compiler/scala/tools/nsc/ast/Printers.scala b/src/compiler/scala/tools/nsc/ast/Printers.scala index 0414e0f123..d0aa004c9a 100644 --- a/src/compiler/scala/tools/nsc/ast/Printers.scala +++ b/src/compiler/scala/tools/nsc/ast/Printers.scala @@ -200,91 +200,17 @@ trait Printers extends scala.reflect.internal.Printers { this: Global => override def printTree(tree: Tree) { print(safe(tree)) } } - class TreeMatchTemplate { - // non-trees defined in Trees - // - // case class ImportSelector(name: Name, namePos: Int, rename: Name, renamePos: Int) - // case class Modifiers(flags: Long, privateWithin: Name, annotations: List[Tree], positions: Map[Long, Position]) - // - def apply(t: Tree): Unit = t match { - // eliminated by typer - case Annotated(annot, arg) => - case AssignOrNamedArg(lhs, rhs) => - case DocDef(comment, definition) => - case Import(expr, selectors) => - - // eliminated by refchecks - case ModuleDef(mods, name, impl) => - case TypeTreeWithDeferredRefCheck() => - - // eliminated by erasure - case TypeDef(mods, name, tparams, rhs) => - case Typed(expr, tpt) => - - // eliminated by cleanup - case ApplyDynamic(qual, args) => - - // eliminated by explicitouter - case Alternative(trees) => - case Bind(name, body) => - case CaseDef(pat, guard, body) => - case Star(elem) => - case UnApply(fun, args) => - - // eliminated by lambdalift - case Function(vparams, body) => - - // eliminated by uncurry - case AppliedTypeTree(tpt, args) => - case CompoundTypeTree(templ) => - case ExistentialTypeTree(tpt, whereClauses) => - case SelectFromTypeTree(qual, selector) => - case SingletonTypeTree(ref) => - case TypeBoundsTree(lo, hi) => - - // survivors - case Apply(fun, args) => - case ArrayValue(elemtpt, trees) => - case Assign(lhs, rhs) => - case Block(stats, expr) => - case ClassDef(mods, name, tparams, impl) => - case DefDef(mods, name, tparams, vparamss, tpt, rhs) => - case EmptyTree => - case Ident(name) => - case If(cond, thenp, elsep) => - case LabelDef(name, params, rhs) => - case Literal(value) => - case Match(selector, cases) => - case New(tpt) => - case PackageDef(pid, stats) => - case Return(expr) => - case Select(qualifier, selector) => - case Super(qual, mix) => - case Template(parents, self, body) => - case This(qual) => - case Throw(expr) => - case Try(block, catches, finalizer) => - case TypeApply(fun, args) => - case TypeTree() => - case ValDef(mods, name, tpt, rhs) => - - // missing from the Trees comment - case Parens(args) => // only used during parsing - case SelectFromArray(qual, name, erasure) => // only used during erasure - } - } - def asString(t: Tree): String = render(t, newStandardTreePrinter, settings.printtypes.value, settings.uniqid.value, settings.Yshowsymkinds.value) def asCompactString(t: Tree): String = render(t, newCompactTreePrinter, settings.printtypes.value, settings.uniqid.value, settings.Yshowsymkinds.value) def asCompactDebugString(t: Tree): String = render(t, newCompactTreePrinter, true, true, true) def newStandardTreePrinter(writer: PrintWriter): TreePrinter = new TreePrinter(writer) - def newStandardTreePrinter(stream: OutputStream): TreePrinter = newStandardTreePrinter(new PrintWriter(stream)) - def newStandardTreePrinter(): TreePrinter = newStandardTreePrinter(new PrintWriter(ConsoleWriter)) + // def newStandardTreePrinter(stream: OutputStream): TreePrinter = newStandardTreePrinter(new PrintWriter(stream)) + // def newStandardTreePrinter(): TreePrinter = newStandardTreePrinter(new PrintWriter(ConsoleWriter)) def newCompactTreePrinter(writer: PrintWriter): CompactTreePrinter = new CompactTreePrinter(writer) - def newCompactTreePrinter(stream: OutputStream): CompactTreePrinter = newCompactTreePrinter(new PrintWriter(stream)) - def newCompactTreePrinter(): CompactTreePrinter = newCompactTreePrinter(new PrintWriter(ConsoleWriter)) + // def newCompactTreePrinter(stream: OutputStream): CompactTreePrinter = newCompactTreePrinter(new PrintWriter(stream)) + // def newCompactTreePrinter(): CompactTreePrinter = newCompactTreePrinter(new PrintWriter(ConsoleWriter)) override def newTreePrinter(writer: PrintWriter): TreePrinter = if (settings.Ycompacttrees.value) newCompactTreePrinter(writer) diff --git a/src/compiler/scala/tools/nsc/ast/TreeDSL.scala b/src/compiler/scala/tools/nsc/ast/TreeDSL.scala index 3acefe9441..0696b0e673 100644 --- a/src/compiler/scala/tools/nsc/ast/TreeDSL.scala +++ b/src/compiler/scala/tools/nsc/ast/TreeDSL.scala @@ -84,16 +84,16 @@ trait TreeDSL { def ANY_EQ (other: Tree) = OBJ_EQ(other AS ObjectClass.tpe) def ANY_== (other: Tree) = fn(target, Any_==, other) def ANY_!= (other: Tree) = fn(target, Any_!=, other) - def OBJ_== (other: Tree) = fn(target, Object_==, other) + // def OBJ_== (other: Tree) = fn(target, Object_==, other) def OBJ_!= (other: Tree) = fn(target, Object_!=, other) def OBJ_EQ (other: Tree) = fn(target, Object_eq, other) def OBJ_NE (other: Tree) = fn(target, Object_ne, other) - def INT_| (other: Tree) = fn(target, getMember(IntClass, nme.OR), other) - def INT_& (other: Tree) = fn(target, getMember(IntClass, nme.AND), other) + // def INT_| (other: Tree) = fn(target, getMember(IntClass, nme.OR), other) + // def INT_& (other: Tree) = fn(target, getMember(IntClass, nme.AND), other) def INT_>= (other: Tree) = fn(target, getMember(IntClass, nme.GE), other) def INT_== (other: Tree) = fn(target, getMember(IntClass, nme.EQ), other) - def INT_!= (other: Tree) = fn(target, getMember(IntClass, nme.NE), other) + // def INT_!= (other: Tree) = fn(target, getMember(IntClass, nme.NE), other) // generic operations on ByteClass, IntClass, LongClass def GEN_| (other: Tree, kind: ClassSymbol) = fn(target, getMember(kind, nme.OR), other) @@ -101,8 +101,8 @@ trait TreeDSL { def GEN_== (other: Tree, kind: ClassSymbol) = fn(target, getMember(kind, nme.EQ), other) def GEN_!= (other: Tree, kind: ClassSymbol) = fn(target, getMember(kind, nme.NE), other) - def BOOL_&& (other: Tree) = fn(target, Boolean_and, other) - def BOOL_|| (other: Tree) = fn(target, Boolean_or, other) + // def BOOL_&& (other: Tree) = fn(target, Boolean_and, other) + // def BOOL_|| (other: Tree) = fn(target, Boolean_or, other) /** Apply, Select, Match **/ def APPLY(params: Tree*) = Apply(target, params.toList) @@ -158,7 +158,7 @@ trait TreeDSL { def mkTree(rhs: Tree): ResultTreeType def ===(rhs: Tree): ResultTreeType - private var _mods: Modifiers = null + // private var _mods: Modifiers = null private var _tpt: Tree = null private var _pos: Position = null @@ -166,19 +166,19 @@ trait TreeDSL { _tpt = TypeTree(tp) this } - def withFlags(flags: Long*): this.type = { - if (_mods == null) - _mods = defaultMods + // def withFlags(flags: Long*): this.type = { + // if (_mods == null) + // _mods = defaultMods - _mods = flags.foldLeft(_mods)(_ | _) - this - } + // _mods = flags.foldLeft(_mods)(_ | _) + // this + // } def withPos(pos: Position): this.type = { _pos = pos this } - final def mods = if (_mods == null) defaultMods else _mods + final def mods = defaultMods // if (_mods == null) defaultMods else _mods final def tpt = if (_tpt == null) defaultTpt else _tpt final def pos = if (_pos == null) defaultPos else _pos } @@ -243,7 +243,7 @@ trait TreeDSL { } class TryStart(body: Tree, catches: List[CaseDef], fin: Tree) { def CATCH(xs: CaseDef*) = new TryStart(body, xs.toList, fin) - def FINALLY(x: Tree) = Try(body, catches, x) + // def FINALLY(x: Tree) = Try(body, catches, x) def ENDTRY = Try(body, catches, fin) } @@ -251,16 +251,16 @@ trait TreeDSL { def DEFAULT: CaseStart = new CaseStart(WILD.empty, EmptyTree) class SymbolMethods(target: Symbol) { - def BIND(body: Tree) = Bind(target, body) + // def BIND(body: Tree) = Bind(target, body) def IS_NULL() = REF(target) OBJ_EQ NULL - def NOT_NULL() = REF(target) OBJ_NE NULL + // def NOT_NULL() = REF(target) OBJ_NE NULL def GET() = fn(REF(target), nme.get) // name of nth indexed argument to a method (first parameter list), defaults to 1st - def ARG(idx: Int = 0) = Ident(target.paramss.head(idx)) + // def ARG(idx: Int = 0) = Ident(target.paramss.head(idx)) def ARGS = target.paramss.head - def ARGNAMES = ARGS map Ident + // def ARGNAMES = ARGS map Ident } /** Top level accessible. */ @@ -268,31 +268,31 @@ trait TreeDSL { def THROW(sym: Symbol, msg: Tree): Throw = Throw(sym.tpe, msg.TOSTRING()) def NEW(tpt: Tree, args: Tree*): Tree = New(tpt, List(args.toList)) - def NEW(sym: Symbol, args: Tree*): Tree = New(sym.tpe, args: _*) + // def NEW(sym: Symbol, args: Tree*): Tree = New(sym.tpe, args: _*) - def DEF(name: Name, tp: Type): DefTreeStart = DEF(name) withType tp - def DEF(name: Name): DefTreeStart = new DefTreeStart(name) + // def DEF(name: Name, tp: Type): DefTreeStart = DEF(name) withType tp + // def DEF(name: Name): DefTreeStart = new DefTreeStart(name) def DEF(sym: Symbol): DefSymStart = new DefSymStart(sym) - def VAL(name: Name, tp: Type): ValTreeStart = VAL(name) withType tp - def VAL(name: Name): ValTreeStart = new ValTreeStart(name) + // def VAL(name: Name, tp: Type): ValTreeStart = VAL(name) withType tp + // def VAL(name: Name): ValTreeStart = new ValTreeStart(name) def VAL(sym: Symbol): ValSymStart = new ValSymStart(sym) - def VAR(name: Name, tp: Type): ValTreeStart = VAL(name, tp) withFlags Flags.MUTABLE - def VAR(name: Name): ValTreeStart = VAL(name) withFlags Flags.MUTABLE - def VAR(sym: Symbol): ValSymStart = VAL(sym) withFlags Flags.MUTABLE + // def VAR(name: Name, tp: Type): ValTreeStart = VAL(name, tp) withFlags Flags.MUTABLE + // def VAR(name: Name): ValTreeStart = VAL(name) withFlags Flags.MUTABLE + // def VAR(sym: Symbol): ValSymStart = VAL(sym) withFlags Flags.MUTABLE - def LAZYVAL(name: Name, tp: Type): ValTreeStart = VAL(name, tp) withFlags Flags.LAZY - def LAZYVAL(name: Name): ValTreeStart = VAL(name) withFlags Flags.LAZY - def LAZYVAL(sym: Symbol): ValSymStart = VAL(sym) withFlags Flags.LAZY + // def LAZYVAL(name: Name, tp: Type): ValTreeStart = VAL(name, tp) withFlags Flags.LAZY + // def LAZYVAL(name: Name): ValTreeStart = VAL(name) withFlags Flags.LAZY + // def LAZYVAL(sym: Symbol): ValSymStart = VAL(sym) withFlags Flags.LAZY def AND(guards: Tree*) = if (guards.isEmpty) EmptyTree else guards reduceLeft gen.mkAnd - def OR(guards: Tree*) = - if (guards.isEmpty) EmptyTree - else guards reduceLeft gen.mkOr + // def OR(guards: Tree*) = + // if (guards.isEmpty) EmptyTree + // else guards reduceLeft gen.mkOr def IF(tree: Tree) = new IfStart(tree, EmptyTree) def TRY(tree: Tree) = new TryStart(tree, Nil, EmptyTree) @@ -311,11 +311,11 @@ trait TreeDSL { case List(tree) if flattenUnary => tree case _ => Apply(TupleClass(trees.length).companionModule, trees: _*) } - def makeTupleType(trees: List[Tree], flattenUnary: Boolean): Tree = trees match { - case Nil => gen.scalaUnitConstr - case List(tree) if flattenUnary => tree - case _ => AppliedTypeTree(REF(TupleClass(trees.length)), trees) - } + // def makeTupleType(trees: List[Tree], flattenUnary: Boolean): Tree = trees match { + // case Nil => gen.scalaUnitConstr + // case List(tree) if flattenUnary => tree + // case _ => AppliedTypeTree(REF(TupleClass(trees.length)), trees) + // } /** Implicits - some of these should probably disappear **/ implicit def mkTreeMethods(target: Tree): TreeMethods = new TreeMethods(target) diff --git a/src/compiler/scala/tools/nsc/ast/TreeGen.scala b/src/compiler/scala/tools/nsc/ast/TreeGen.scala index 983f355c58..ea7f674809 100644 --- a/src/compiler/scala/tools/nsc/ast/TreeGen.scala +++ b/src/compiler/scala/tools/nsc/ast/TreeGen.scala @@ -63,71 +63,71 @@ abstract class TreeGen extends scala.reflect.internal.TreeGen with TreeDSL { Annotated(New(scalaDot(UncheckedClass.name), ListOfNil), expr) } // if it's a Match, mark the selector unchecked; otherwise nothing. - def mkUncheckedMatch(tree: Tree) = tree match { - case Match(selector, cases) => atPos(tree.pos)(Match(mkUnchecked(selector), cases)) - case _ => tree - } + // def mkUncheckedMatch(tree: Tree) = tree match { + // case Match(selector, cases) => atPos(tree.pos)(Match(mkUnchecked(selector), cases)) + // case _ => tree + // } - def mkSynthSwitchSelector(expr: Tree): Tree = atPos(expr.pos) { - // This can't be "Annotated(New(SwitchClass), expr)" because annotations - // are very picky about things and it crashes the compiler with "unexpected new". - Annotated(Ident(nme.synthSwitch), expr) - } + // def mkSynthSwitchSelector(expr: Tree): Tree = atPos(expr.pos) { + // // This can't be "Annotated(New(SwitchClass), expr)" because annotations + // // are very picky about things and it crashes the compiler with "unexpected new". + // Annotated(Ident(nme.synthSwitch), expr) + // } // TODO: would be so much nicer if we would know during match-translation (i.e., type checking) // whether we should emit missingCase-style apply (and isDefinedAt), instead of transforming trees post-factum - class MatchMatcher { - def caseMatch(orig: Tree, selector: Tree, cases: List[CaseDef], wrap: Tree => Tree): Tree = unknownTree(orig) - def caseVirtualizedMatch(orig: Tree, _match: Tree, targs: List[Tree], scrut: Tree, matcher: Tree): Tree = unknownTree(orig) - def caseVirtualizedMatchOpt(orig: Tree, prologue: List[Tree], cases: List[Tree], matchEndDef: Tree, wrap: Tree => Tree): Tree = unknownTree(orig) - - def genVirtualizedMatch(prologue: List[Tree], cases: List[Tree], matchEndDef: Tree): Tree = Block(prologue ++ cases, matchEndDef) - - def apply(matchExpr: Tree): Tree = matchExpr match { - // old-style match or virtpatmat switch - case Match(selector, cases) => // println("simple match: "+ (selector, cases) + "for:\n"+ matchExpr ) - caseMatch(matchExpr, selector, cases, identity) - // old-style match or virtpatmat switch - case Block((vd: ValDef) :: Nil, orig@Match(selector, cases)) => // println("block match: "+ (selector, cases, vd) + "for:\n"+ matchExpr ) - caseMatch(matchExpr, selector, cases, m => copyBlock(matchExpr, List(vd), m)) - // virtpatmat - case Apply(Apply(TypeApply(Select(tgt, nme.runOrElse), targs), List(scrut)), List(matcher)) if !settings.XoldPatmat.value => // println("virt match: "+ (tgt, targs, scrut, matcher) + "for:\n"+ matchExpr ) - caseVirtualizedMatch(matchExpr, tgt, targs, scrut, matcher) - // optimized version of virtpatmat - case Block(stats, matchEndDef) if !settings.XoldPatmat.value && (stats forall treeInfo.hasSynthCaseSymbol) => - // the assumption is once we encounter a case, the remainder of the block will consist of cases - // the prologue may be empty, usually it is the valdef that stores the scrut - val (prologue, cases) = stats span (s => !s.isInstanceOf[LabelDef]) - caseVirtualizedMatchOpt(matchExpr, prologue, cases, matchEndDef, identity) - // optimized version of virtpatmat - case Block(outerStats, orig@Block(stats, matchEndDef)) if !settings.XoldPatmat.value && (stats forall treeInfo.hasSynthCaseSymbol) => - val (prologue, cases) = stats span (s => !s.isInstanceOf[LabelDef]) - caseVirtualizedMatchOpt(matchExpr, prologue, cases, matchEndDef, m => copyBlock(matchExpr, outerStats, m)) - case other => - unknownTree(other) - } - - def unknownTree(t: Tree): Tree = throw new MatchError(t) - def copyBlock(orig: Tree, stats: List[Tree], expr: Tree): Block = Block(stats, expr) - - def dropSyntheticCatchAll(cases: List[CaseDef]): List[CaseDef] = - if (settings.XoldPatmat.value) cases - else cases filter { - case CaseDef(pat, EmptyTree, Throw(Apply(Select(New(exTpt), nme.CONSTRUCTOR), _))) if (treeInfo.isWildcardArg(pat) && (exTpt.tpe.typeSymbol eq MatchErrorClass)) => false - case CaseDef(pat, guard, body) => true - } - } - - def mkCached(cvar: Symbol, expr: Tree): Tree = { - val cvarRef = mkUnattributedRef(cvar) - Block( - List( - If(Apply(Select(cvarRef, nme.eq), List(Literal(Constant(null)))), - Assign(cvarRef, expr), - EmptyTree)), - cvarRef - ) - } + // class MatchMatcher { + // def caseMatch(orig: Tree, selector: Tree, cases: List[CaseDef], wrap: Tree => Tree): Tree = unknownTree(orig) + // def caseVirtualizedMatch(orig: Tree, _match: Tree, targs: List[Tree], scrut: Tree, matcher: Tree): Tree = unknownTree(orig) + // def caseVirtualizedMatchOpt(orig: Tree, prologue: List[Tree], cases: List[Tree], matchEndDef: Tree, wrap: Tree => Tree): Tree = unknownTree(orig) + + // def genVirtualizedMatch(prologue: List[Tree], cases: List[Tree], matchEndDef: Tree): Tree = Block(prologue ++ cases, matchEndDef) + + // def apply(matchExpr: Tree): Tree = matchExpr match { + // // old-style match or virtpatmat switch + // case Match(selector, cases) => // println("simple match: "+ (selector, cases) + "for:\n"+ matchExpr ) + // caseMatch(matchExpr, selector, cases, identity) + // // old-style match or virtpatmat switch + // case Block((vd: ValDef) :: Nil, orig@Match(selector, cases)) => // println("block match: "+ (selector, cases, vd) + "for:\n"+ matchExpr ) + // caseMatch(matchExpr, selector, cases, m => copyBlock(matchExpr, List(vd), m)) + // // virtpatmat + // case Apply(Apply(TypeApply(Select(tgt, nme.runOrElse), targs), List(scrut)), List(matcher)) if !settings.XoldPatmat.value => // println("virt match: "+ (tgt, targs, scrut, matcher) + "for:\n"+ matchExpr ) + // caseVirtualizedMatch(matchExpr, tgt, targs, scrut, matcher) + // // optimized version of virtpatmat + // case Block(stats, matchEndDef) if !settings.XoldPatmat.value && (stats forall treeInfo.hasSynthCaseSymbol) => + // // the assumption is once we encounter a case, the remainder of the block will consist of cases + // // the prologue may be empty, usually it is the valdef that stores the scrut + // val (prologue, cases) = stats span (s => !s.isInstanceOf[LabelDef]) + // caseVirtualizedMatchOpt(matchExpr, prologue, cases, matchEndDef, identity) + // // optimized version of virtpatmat + // case Block(outerStats, orig@Block(stats, matchEndDef)) if !settings.XoldPatmat.value && (stats forall treeInfo.hasSynthCaseSymbol) => + // val (prologue, cases) = stats span (s => !s.isInstanceOf[LabelDef]) + // caseVirtualizedMatchOpt(matchExpr, prologue, cases, matchEndDef, m => copyBlock(matchExpr, outerStats, m)) + // case other => + // unknownTree(other) + // } + + // def unknownTree(t: Tree): Tree = throw new MatchError(t) + // def copyBlock(orig: Tree, stats: List[Tree], expr: Tree): Block = Block(stats, expr) + + // def dropSyntheticCatchAll(cases: List[CaseDef]): List[CaseDef] = + // if (settings.XoldPatmat.value) cases + // else cases filter { + // case CaseDef(pat, EmptyTree, Throw(Apply(Select(New(exTpt), nme.CONSTRUCTOR), _))) if (treeInfo.isWildcardArg(pat) && (exTpt.tpe.typeSymbol eq MatchErrorClass)) => false + // case CaseDef(pat, guard, body) => true + // } + // } + + // def mkCached(cvar: Symbol, expr: Tree): Tree = { + // val cvarRef = mkUnattributedRef(cvar) + // Block( + // List( + // If(Apply(Select(cvarRef, nme.eq), List(Literal(Constant(null)))), + // Assign(cvarRef, expr), + // EmptyTree)), + // cvarRef + // ) + // } // Builds a tree of the form "{ lhs = rhs ; lhs }" def mkAssignAndReturn(lhs: Symbol, rhs: Tree): Tree = { @@ -152,8 +152,8 @@ abstract class TreeGen extends scala.reflect.internal.TreeGen with TreeDSL { // def m: T = { if (m$ eq null) m$ = new m$class(...) m$ } // where (...) are eventual outer accessors - def mkCachedModuleAccessDef(accessor: Symbol, mvar: Symbol) = - DefDef(accessor, mkCached(mvar, newModule(accessor, mvar.tpe))) + // def mkCachedModuleAccessDef(accessor: Symbol, mvar: Symbol) = + // DefDef(accessor, mkCached(mvar, newModule(accessor, mvar.tpe))) def mkModuleAccessDef(accessor: Symbol, msym: Symbol) = DefDef(accessor, Select(This(msym.owner), msym)) @@ -165,8 +165,8 @@ abstract class TreeGen extends scala.reflect.internal.TreeGen with TreeDSL { } // def m: T; - def mkModuleAccessDcl(accessor: Symbol) = - DefDef(accessor setFlag lateDEFERRED, EmptyTree) + // def mkModuleAccessDcl(accessor: Symbol) = + // DefDef(accessor setFlag lateDEFERRED, EmptyTree) def mkRuntimeCall(meth: Name, args: List[Tree]): Tree = mkRuntimeCall(meth, Nil, args) @@ -223,8 +223,8 @@ abstract class TreeGen extends scala.reflect.internal.TreeGen with TreeDSL { if (isRepeatedParam) wildcardStar(arg) else arg /** Make forwarder to method `target`, passing all parameters in `params` */ - def mkForwarder(target: Tree, vparamss: List[List[Symbol]]) = - (target /: vparamss)((fn, vparams) => Apply(fn, vparams map paramToArg)) + // def mkForwarder(target: Tree, vparamss: List[List[Symbol]]) = + // (target /: vparamss)((fn, vparams) => Apply(fn, vparams map paramToArg)) /** Applies a wrapArray call to an array, making it a WrappedArray. * Don't let a reference type parameter be inferred, in case it's a singleton: @@ -264,24 +264,24 @@ abstract class TreeGen extends scala.reflect.internal.TreeGen with TreeDSL { else mkCast(tree, pt) - def mkZeroContravariantAfterTyper(tp: Type): Tree = { - // contravariant -- for replacing an argument in a method call - // must use subtyping, as otherwise we miss types like `Any with Int` - val tree = - if (NullClass.tpe <:< tp) Literal(Constant(null)) - else if (UnitClass.tpe <:< tp) Literal(Constant()) - else if (BooleanClass.tpe <:< tp) Literal(Constant(false)) - else if (FloatClass.tpe <:< tp) Literal(Constant(0.0f)) - else if (DoubleClass.tpe <:< tp) Literal(Constant(0.0d)) - else if (ByteClass.tpe <:< tp) Literal(Constant(0.toByte)) - else if (ShortClass.tpe <:< tp) Literal(Constant(0.toShort)) - else if (IntClass.tpe <:< tp) Literal(Constant(0)) - else if (LongClass.tpe <:< tp) Literal(Constant(0L)) - else if (CharClass.tpe <:< tp) Literal(Constant(0.toChar)) - else mkCast(Literal(Constant(null)), tp) - - tree - } + // def mkZeroContravariantAfterTyper(tp: Type): Tree = { + // // contravariant -- for replacing an argument in a method call + // // must use subtyping, as otherwise we miss types like `Any with Int` + // val tree = + // if (NullClass.tpe <:< tp) Literal(Constant(null)) + // else if (UnitClass.tpe <:< tp) Literal(Constant()) + // else if (BooleanClass.tpe <:< tp) Literal(Constant(false)) + // else if (FloatClass.tpe <:< tp) Literal(Constant(0.0f)) + // else if (DoubleClass.tpe <:< tp) Literal(Constant(0.0d)) + // else if (ByteClass.tpe <:< tp) Literal(Constant(0.toByte)) + // else if (ShortClass.tpe <:< tp) Literal(Constant(0.toShort)) + // else if (IntClass.tpe <:< tp) Literal(Constant(0)) + // else if (LongClass.tpe <:< tp) Literal(Constant(0L)) + // else if (CharClass.tpe <:< tp) Literal(Constant(0.toChar)) + // else mkCast(Literal(Constant(null)), tp) + + // tree + // } /** Translate names in Select/Ident nodes to type names. */ diff --git a/src/compiler/scala/tools/nsc/ast/TreeInfo.scala b/src/compiler/scala/tools/nsc/ast/TreeInfo.scala index 97227a5b6e..5c1ab29548 100644 --- a/src/compiler/scala/tools/nsc/ast/TreeInfo.scala +++ b/src/compiler/scala/tools/nsc/ast/TreeInfo.scala @@ -6,7 +6,7 @@ package scala.tools.nsc package ast -import scala.reflect.internal.HasFlags +// import scala.reflect.internal.HasFlags /** This class ... * @@ -39,6 +39,6 @@ abstract class TreeInfo extends scala.reflect.internal.TreeInfo { case _ => super.firstDefinesClassOrObject(trees, name) } - def isInterface(mods: HasFlags, body: List[Tree]) = - mods.isTrait && (body forall isInterfaceMember) + // def isInterface(mods: HasFlags, body: List[Tree]) = + // mods.isTrait && (body forall isInterfaceMember) } diff --git a/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala b/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala index efcde1f74f..501127865b 100644 --- a/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala +++ b/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala @@ -9,7 +9,8 @@ package scala.tools.nsc package ast.parser -import scala.collection.mutable.{ListBuffer, StringBuilder} +import scala.collection.{ mutable, immutable } +import mutable.{ ListBuffer, StringBuilder } import scala.reflect.internal.{ ModifierFlags => Flags } import scala.reflect.internal.Chars.{ isScalaLetter } import scala.reflect.internal.util.{ SourceFile, OffsetPosition } @@ -167,7 +168,7 @@ self => object symbXMLBuilder extends SymbolicXMLBuilder(this, preserveWS = true) { // DEBUG choices val global: self.global.type = self.global - def freshName(prefix: String): Name = SourceFileParser.this.freshName(prefix) + // def freshName(prefix: String): Name = SourceFileParser.this.freshName(prefix) } def xmlLiteral : Tree = xmlp.xLiteral @@ -463,7 +464,7 @@ self => /* ------------- ERROR HANDLING ------------------------------------------- */ - var assumedClosingParens = scala.collection.mutable.Map(RPAREN -> 0, RBRACKET -> 0, RBRACE -> 0) + val assumedClosingParens = mutable.Map(RPAREN -> 0, RBRACKET -> 0, RBRACE -> 0) private var inFunReturnType = false @inline private def fromWithinReturnType[T](body: => T): T = { @@ -640,7 +641,7 @@ self => case _ => false } - def isTypeIntro: Boolean = isTypeIntroToken(in.token) + // def isTypeIntro: Boolean = isTypeIntroToken(in.token) def isStatSeqEnd = in.token == RBRACE || in.token == EOF @@ -765,9 +766,9 @@ self => } } - def checkSize(kind: String, size: Int, max: Int) { - if (size > max) syntaxError("too many "+kind+", maximum = "+max, false) - } + // def checkSize(kind: String, size: Int, max: Int) { + // if (size > max) syntaxError("too many "+kind+", maximum = "+max, false) + // } def checkAssoc(offset: Int, op: Name, leftAssoc: Boolean) = if (treeInfo.isLeftAssoc(op) != leftAssoc) @@ -1218,10 +1219,10 @@ self => * EqualsExpr ::= `=' Expr * }}} */ - def equalsExpr(): Tree = { - accept(EQUALS) - expr() - } + // def equalsExpr(): Tree = { + // accept(EQUALS) + // expr() + // } def condExpr(): Tree = { if (in.token == LPAREN) { @@ -1964,7 +1965,7 @@ self => /** Default entry points into some pattern contexts. */ def pattern(): Tree = noSeq.pattern() - def patterns(): List[Tree] = noSeq.patterns() + // def patterns(): List[Tree] = noSeq.patterns() def seqPatterns(): List[Tree] = seqOK.patterns() def xmlSeqPatterns(): List[Tree] = xmlSeqOK.patterns() // Called from xml parser def argumentPatterns(): List[Tree] = inParens { diff --git a/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala b/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala index 1be5fb1782..b346ce0a14 100644 --- a/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala +++ b/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala @@ -10,7 +10,8 @@ import scala.reflect.internal.util._ import scala.reflect.internal.Chars._ import Tokens._ import scala.annotation.switch -import scala.collection.mutable.{ ListBuffer, ArrayBuffer } +import scala.collection.{ mutable, immutable } +import mutable.{ ListBuffer, ArrayBuffer } import scala.xml.Utility.{ isNameStart } /** See Parsers.scala / ParsersCommon for some explanation of ScannersCommon. @@ -26,7 +27,7 @@ trait ScannersCommon { trait ScannerCommon extends CommonTokenData { // things to fill in, in addition to buf, decodeUni which come from CharArrayReader - def warning(off: Int, msg: String): Unit + // def warning(off: Int, msg: String): Unit def error (off: Int, msg: String): Unit def incompleteInputError(off: Int, msg: String): Unit def deprecationWarning(off: Int, msg: String): Unit @@ -51,7 +52,7 @@ trait Scanners extends ScannersCommon { type Offset = Int /** An undefined offset */ - val NoOffset: Offset = -1 + // val NoOffset: Offset = -1 trait TokenData extends CommonTokenData { @@ -88,7 +89,7 @@ trait Scanners extends ScannersCommon { def isAtEnd = charOffset >= buf.length - def flush = { charOffset = offset; nextChar(); this } + // def flush = { charOffset = offset; nextChar(); this } def resume(lastCode: Int) = { token = lastCode @@ -100,7 +101,7 @@ trait Scanners extends ScannersCommon { /** the last error offset */ - var errOffset: Offset = NoOffset + // var errOffset: Offset = NoOffset /** A character buffer for literals */ @@ -1063,7 +1064,7 @@ trait Scanners extends ScannersCommon { def syntaxError(off: Offset, msg: String) { error(off, msg) token = ERROR - errOffset = off + // errOffset = off } /** generate an error at the current token offset @@ -1076,7 +1077,7 @@ trait Scanners extends ScannersCommon { def incompleteInputError(msg: String) { incompleteInputError(offset, msg) token = EOF - errOffset = offset + // errOffset = offset } override def toString() = token match { @@ -1241,7 +1242,7 @@ trait Scanners extends ScannersCommon { override val decodeUni: Boolean = !settings.nouescape.value // suppress warnings, throw exception on errors - def warning(off: Offset, msg: String): Unit = () + // def warning(off: Offset, msg: String): Unit = () def deprecationWarning(off: Offset, msg: String): Unit = () def error (off: Offset, msg: String): Unit = throw new MalformedInput(off, msg) def incompleteInputError(off: Offset, msg: String): Unit = throw new MalformedInput(off, msg) @@ -1252,7 +1253,7 @@ trait Scanners extends ScannersCommon { class UnitScanner(unit: CompilationUnit, patches: List[BracePatch]) extends SourceFileScanner(unit.source) { def this(unit: CompilationUnit) = this(unit, List()) - override def warning(off: Offset, msg: String) = unit.warning(unit.position(off), msg) + // override def warning(off: Offset, msg: String) = unit.warning(unit.position(off), msg) override def deprecationWarning(off: Offset, msg: String) = unit.deprecationWarning(unit.position(off), msg) override def error (off: Offset, msg: String) = unit.error(unit.position(off), msg) override def incompleteInputError(off: Offset, msg: String) = unit.incompleteInputError(unit.position(off), msg) @@ -1311,7 +1312,7 @@ trait Scanners extends ScannersCommon { } class ParensAnalyzer(unit: CompilationUnit, patches: List[BracePatch]) extends UnitScanner(unit, patches) { - var balance = scala.collection.mutable.Map(RPAREN -> 0, RBRACKET -> 0, RBRACE -> 0) + val balance = mutable.Map(RPAREN -> 0, RBRACKET -> 0, RBRACE -> 0) init() @@ -1433,17 +1434,17 @@ trait Scanners extends ScannersCommon { else bp :: insertPatch(bps, patch) } - def leftColumn(offset: Int) = - if (offset == -1) -1 else column(lineStart(line(offset))) + // def leftColumn(offset: Int) = + // if (offset == -1) -1 else column(lineStart(line(offset))) - def rightColumn(offset: Int, default: Int) = - if (offset == -1) -1 - else { - val rlin = line(offset) - if (lineStart(rlin) == offset) column(offset) - else if (rlin + 1 < lineStart.length) column(lineStart(rlin + 1)) - else default - } + // def rightColumn(offset: Int, default: Int) = + // if (offset == -1) -1 + // else { + // val rlin = line(offset) + // if (lineStart(rlin) == offset) column(offset) + // else if (rlin + 1 < lineStart.length) column(lineStart(rlin + 1)) + // else default + // } def insertRBrace(): List[BracePatch] = { def insert(bps: List[BracePair]): List[BracePatch] = bps match { @@ -1486,16 +1487,16 @@ trait Scanners extends ScannersCommon { delete(bracePairs) } - def imbalanceMeasure: Int = { - def measureList(bps: List[BracePair]): Int = - (bps map measure).sum - def measure(bp: BracePair): Int = - (if (bp.lindent != bp.rindent) 1 else 0) + measureList(bp.nested) - measureList(bracePairs) - } + // def imbalanceMeasure: Int = { + // def measureList(bps: List[BracePair]): Int = + // (bps map measure).sum + // def measure(bp: BracePair): Int = + // (if (bp.lindent != bp.rindent) 1 else 0) + measureList(bp.nested) + // measureList(bracePairs) + // } - def improves(patches1: List[BracePatch]): Boolean = - imbalanceMeasure > new ParensAnalyzer(unit, patches1).imbalanceMeasure + // def improves(patches1: List[BracePatch]): Boolean = + // imbalanceMeasure > new ParensAnalyzer(unit, patches1).imbalanceMeasure override def error(offset: Int, msg: String) {} } diff --git a/src/compiler/scala/tools/nsc/ast/parser/Tokens.scala b/src/compiler/scala/tools/nsc/ast/parser/Tokens.scala index c3fd414426..be8e1bc8b4 100644 --- a/src/compiler/scala/tools/nsc/ast/parser/Tokens.scala +++ b/src/compiler/scala/tools/nsc/ast/parser/Tokens.scala @@ -6,14 +6,14 @@ package scala.tools.nsc package ast.parser -import scala.annotation.switch +// import scala.annotation.switch /** Common code between JavaTokens and Tokens. Not as much (and not as concrete) * as one might like because JavaTokens for no clear reason chose new numbers for * identical token sets. */ abstract class Tokens { - import scala.reflect.internal.Chars._ + // import scala.reflect.internal.Chars._ /** special tokens */ final val EMPTY = -3 @@ -32,16 +32,16 @@ abstract class Tokens { def LPAREN: Int def RBRACE: Int - def isIdentifier(code: Int): Boolean + // def isIdentifier(code: Int): Boolean def isLiteral(code: Int): Boolean - def isKeyword(code: Int): Boolean - def isSymbol(code: Int): Boolean - - final def isSpace(at: Char) = at == ' ' || at == '\t' - final def isNewLine(at: Char) = at == CR || at == LF || at == FF - final def isBrace(code: Int) = code >= LPAREN && code <= RBRACE - final def isOpenBrace(code: Int) = isBrace(code) && (code % 2 == 0) - final def isCloseBrace(code: Int) = isBrace(code) && (code % 2 == 1) + // def isKeyword(code: Int): Boolean + // def isSymbol(code: Int): Boolean + + // final def isSpace(at: Char) = at == ' ' || at == '\t' + // final def isNewLine(at: Char) = at == CR || at == LF || at == FF + // final def isBrace(code: Int) = code >= LPAREN && code <= RBRACE + // final def isOpenBrace(code: Int) = isBrace(code) && (code % 2 == 0) + // final def isCloseBrace(code: Int) = isBrace(code) && (code % 2 == 1) } object Tokens extends Tokens { @@ -56,16 +56,16 @@ object Tokens extends Tokens { /** identifiers */ final val IDENTIFIER = 10 final val BACKQUOTED_IDENT = 11 - def isIdentifier(code: Int) = - code >= IDENTIFIER && code <= BACKQUOTED_IDENT + // def isIdentifier(code: Int) = + // code >= IDENTIFIER && code <= BACKQUOTED_IDENT - @switch def canBeginExpression(code: Int) = code match { - case IDENTIFIER|BACKQUOTED_IDENT|USCORE => true - case LBRACE|LPAREN|LBRACKET|COMMENT => true - case IF|DO|WHILE|FOR|NEW|TRY|THROW => true - case NULL|THIS|TRUE|FALSE => true - case code => isLiteral(code) - } + // @switch def canBeginExpression(code: Int) = code match { + // case IDENTIFIER|BACKQUOTED_IDENT|USCORE => true + // case LBRACE|LPAREN|LBRACKET|COMMENT => true + // case IF|DO|WHILE|FOR|NEW|TRY|THROW => true + // case NULL|THIS|TRUE|FALSE => true + // case code => isLiteral(code) + // } /** keywords */ final val IF = 20 @@ -113,16 +113,16 @@ object Tokens extends Tokens { final val MACRO = 62 // not yet used in 2.10 final val THEN = 63 // not yet used in 2.10 - def isKeyword(code: Int) = - code >= IF && code <= LAZY + // def isKeyword(code: Int) = + // code >= IF && code <= LAZY - @switch def isDefinition(code: Int) = code match { - case CLASS|TRAIT|OBJECT => true - case CASECLASS|CASEOBJECT => true - case DEF|VAL|VAR => true - case TYPE => true - case _ => false - } + // @switch def isDefinition(code: Int) = code match { + // case CLASS|TRAIT|OBJECT => true + // case CASECLASS|CASEOBJECT => true + // case DEF|VAL|VAR => true + // case TYPE => true + // case _ => false + // } /** special symbols */ final val COMMA = 70 @@ -141,8 +141,8 @@ object Tokens extends Tokens { final val AT = 83 final val VIEWBOUND = 84 - def isSymbol(code: Int) = - code >= COMMA && code <= VIEWBOUND + // def isSymbol(code: Int) = + // code >= COMMA && code <= VIEWBOUND /** parenthesis */ final val LPAREN = 90 diff --git a/src/compiler/scala/tools/nsc/ast/parser/TreeBuilder.scala b/src/compiler/scala/tools/nsc/ast/parser/TreeBuilder.scala index 49b772ed2c..6dc2055121 100644 --- a/src/compiler/scala/tools/nsc/ast/parser/TreeBuilder.scala +++ b/src/compiler/scala/tools/nsc/ast/parser/TreeBuilder.scala @@ -26,15 +26,15 @@ abstract class TreeBuilder { def o2p(offset: Int): Position def r2p(start: Int, point: Int, end: Int): Position - def rootId(name: Name) = gen.rootId(name) + // def rootId(name: Name) = gen.rootId(name) def rootScalaDot(name: Name) = gen.rootScalaDot(name) def scalaDot(name: Name) = gen.scalaDot(name) def scalaAnyRefConstr = scalaDot(tpnme.AnyRef) - def scalaAnyValConstr = scalaDot(tpnme.AnyVal) - def scalaAnyConstr = scalaDot(tpnme.Any) + // def scalaAnyValConstr = scalaDot(tpnme.AnyVal) + // def scalaAnyConstr = scalaDot(tpnme.Any) def scalaUnitConstr = scalaDot(tpnme.Unit) def productConstr = scalaDot(tpnme.Product) - def productConstrN(n: Int) = scalaDot(newTypeName("Product" + n)) + // def productConstrN(n: Int) = scalaDot(newTypeName("Product" + n)) def serializableConstr = scalaDot(tpnme.Serializable) def convertToTypeName(t: Tree) = gen.convertToTypeName(t) @@ -446,15 +446,15 @@ abstract class TreeBuilder { /** Create tree for a lifted expression XX-LIFTING */ - def makeLifted(gs: List[ValFrom], body: Tree): Tree = { - def combine(gs: List[ValFrom]): ValFrom = (gs: @unchecked) match { - case g :: Nil => g - case ValFrom(pos1, pat1, rhs1) :: gs2 => - val ValFrom(_, pat2, rhs2) = combine(gs2) - ValFrom(pos1, makeTuple(List(pat1, pat2), false), Apply(Select(rhs1, nme.zip), List(rhs2))) - } - makeForYield(List(combine(gs)), body) - } + // def makeLifted(gs: List[ValFrom], body: Tree): Tree = { + // def combine(gs: List[ValFrom]): ValFrom = (gs: @unchecked) match { + // case g :: Nil => g + // case ValFrom(pos1, pat1, rhs1) :: gs2 => + // val ValFrom(_, pat2, rhs2) = combine(gs2) + // ValFrom(pos1, makeTuple(List(pat1, pat2), false), Apply(Select(rhs1, nme.zip), List(rhs2))) + // } + // makeForYield(List(combine(gs)), body) + // } /** Create tree for a pattern alternative */ def makeAlternative(ts: List[Tree]): Tree = { diff --git a/src/compiler/scala/tools/nsc/backend/icode/BasicBlocks.scala b/src/compiler/scala/tools/nsc/backend/icode/BasicBlocks.scala index b62d5cb4e4..52fc3d08e1 100644 --- a/src/compiler/scala/tools/nsc/backend/icode/BasicBlocks.scala +++ b/src/compiler/scala/tools/nsc/backend/icode/BasicBlocks.scala @@ -281,12 +281,12 @@ trait BasicBlocks { } /** Insert instructions in 'is' immediately after index 'idx'. */ - def insertAfter(idx: Int, is: List[Instruction]) { - assert(closed, "Instructions can be replaced only after the basic block is closed") + // def insertAfter(idx: Int, is: List[Instruction]) { + // assert(closed, "Instructions can be replaced only after the basic block is closed") - instrs = instrs.patch(idx + 1, is, 0) - code.touched = true - } + // instrs = instrs.patch(idx + 1, is, 0) + // code.touched = true + // } /** Removes instructions found at the given positions. * @@ -436,10 +436,10 @@ trait BasicBlocks { ignore = true } - def exitIgnoreMode() { - assert(ignore, "Exit ignore mode when not in ignore mode: " + this) - ignore = false - } + // def exitIgnoreMode() { + // assert(ignore, "Exit ignore mode when not in ignore mode: " + this) + // ignore = false + // } /** Return the last instruction of this basic block. */ def lastInstruction = @@ -498,15 +498,15 @@ trait BasicBlocks { override def hashCode = label * 41 + code.hashCode // Instead of it, rather use a printer - def print() { print(java.lang.System.out) } - - def print(out: java.io.PrintStream) { - out.println("block #"+label+" :") - foreach(i => out.println(" " + i)) - out.print("Successors: ") - successors.foreach((x: BasicBlock) => out.print(" "+x.label.toString())) - out.println() - } + // def print() { print(java.lang.System.out) } + + // def print(out: java.io.PrintStream) { + // out.println("block #"+label+" :") + // foreach(i => out.println(" " + i)) + // out.print("Successors: ") + // successors.foreach((x: BasicBlock) => out.print(" "+x.label.toString())) + // out.println() + // } private def succString = if (successors.isEmpty) "[S: N/A]" else successors.distinct.mkString("[S: ", ", ", "]") private def predString = if (predecessors.isEmpty) "[P: N/A]" else predecessors.distinct.mkString("[P: ", ", ", "]") diff --git a/src/compiler/scala/tools/nsc/backend/icode/ExceptionHandlers.scala b/src/compiler/scala/tools/nsc/backend/icode/ExceptionHandlers.scala index f35996eeb9..e1732d5775 100644 --- a/src/compiler/scala/tools/nsc/backend/icode/ExceptionHandlers.scala +++ b/src/compiler/scala/tools/nsc/backend/icode/ExceptionHandlers.scala @@ -72,9 +72,9 @@ trait ExceptionHandlers { override def dup: Finalizer = new Finalizer(method, label, pos) } - object NoFinalizer extends Finalizer(null, newTermNameCached(""), NoPosition) { - override def startBlock: BasicBlock = sys.error("NoFinalizer cannot have a start block."); - override def setStartBlock(b: BasicBlock): Unit = sys.error("NoFinalizer cannot have a start block."); - override def dup = this - } + // object NoFinalizer extends Finalizer(null, newTermNameCached(""), NoPosition) { + // override def startBlock: BasicBlock = sys.error("NoFinalizer cannot have a start block."); + // override def setStartBlock(b: BasicBlock): Unit = sys.error("NoFinalizer cannot have a start block."); + // override def dup = this + // } } diff --git a/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala b/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala index 720896d0b3..d521f893d1 100644 --- a/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala +++ b/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala @@ -1960,12 +1960,12 @@ abstract class GenICode extends SubComponent { this } - def removeFinalizer(f: Tree): this.type = { - assert(cleanups.head contains f, - "Illegal nesting of cleanup operations: " + cleanups + " while exiting finalizer " + f); - cleanups = cleanups.tail - this - } + // def removeFinalizer(f: Tree): this.type = { + // assert(cleanups.head contains f, + // "Illegal nesting of cleanup operations: " + cleanups + " while exiting finalizer " + f); + // cleanups = cleanups.tail + // this + // } /** Prepare a new context upon entry into a method. * @@ -2044,14 +2044,14 @@ abstract class GenICode extends SubComponent { } /** Remove the given handler from the list of active exception handlers. */ - def removeActiveHandler(exh: ExceptionHandler): Unit = { - assert(handlerCount > 0 && handlers.head == exh, - "Wrong nesting of exception handlers." + this + " for " + exh) - handlerCount -= 1 - handlers = handlers.tail - debuglog("removed handler: " + exh); + // def removeActiveHandler(exh: ExceptionHandler): Unit = { + // assert(handlerCount > 0 && handlers.head == exh, + // "Wrong nesting of exception handlers." + this + " for " + exh) + // handlerCount -= 1 + // handlers = handlers.tail + // debuglog("removed handler: " + exh); - } + // } /** Clone the current context */ def dup: Context = new Context(this) @@ -2339,7 +2339,7 @@ abstract class GenICode extends SubComponent { val locals: ListBuffer[Local] = new ListBuffer def add(l: Local) = locals += l - def remove(l: Local) = locals -= l + // def remove(l: Local) = locals -= l /** Return all locals that are in scope. */ def varsInScope: Buffer[Local] = outer.varsInScope.clone() ++= locals diff --git a/src/compiler/scala/tools/nsc/backend/icode/Members.scala b/src/compiler/scala/tools/nsc/backend/icode/Members.scala index 07abe9d74f..4192d794f9 100644 --- a/src/compiler/scala/tools/nsc/backend/icode/Members.scala +++ b/src/compiler/scala/tools/nsc/backend/icode/Members.scala @@ -126,9 +126,9 @@ trait Members { override def toString() = symbol.fullName - def lookupField(s: Symbol) = fields find (_.symbol == s) + // def lookupField(s: Symbol) = fields find (_.symbol == s) def lookupMethod(s: Symbol) = methods find (_.symbol == s) - def lookupMethod(s: Name) = methods find (_.symbol.name == s) + // def lookupMethod(s: Name) = methods find (_.symbol.name == s) /* returns this methods static ctor if it has one. */ def lookupStaticCtor: Option[IMethod] = methods find (_.symbol.isStaticConstructor) @@ -159,7 +159,7 @@ trait Members { def linearizedBlocks(lin: Linearizer = self.linearizer): List[BasicBlock] = lin linearize this def foreachBlock[U](f: BasicBlock => U): Unit = blocks foreach f - def foreachInstr[U](f: Instruction => U): Unit = foreachBlock(_.toList foreach f) + // def foreachInstr[U](f: Instruction => U): Unit = foreachBlock(_.toList foreach f) var native = false @@ -192,7 +192,7 @@ trait Members { } def addLocals(ls: List[Local]) = ls foreach addLocal - def addParams(as: List[Local]) = as foreach addParam + // def addParams(as: List[Local]) = as foreach addParam def lookupLocal(n: Name): Option[Local] = locals find (_.sym.name == n) def lookupLocal(sym: Symbol): Option[Local] = locals find (_.sym == sym) @@ -207,28 +207,28 @@ trait Members { override def toString() = symbol.fullName - def matchesSignature(other: IMethod) = { - (symbol.name == other.symbol.name) && - (params corresponds other.params)(_.kind == _.kind) && - (returnType == other.returnType) - } + // def matchesSignature(other: IMethod) = { + // (symbol.name == other.symbol.name) && + // (params corresponds other.params)(_.kind == _.kind) && + // (returnType == other.returnType) + // } import opcodes._ - def checkLocals(): Unit = { - def localsSet = (code.blocks flatMap { bb => - bb.iterator collect { - case LOAD_LOCAL(l) => l - case STORE_LOCAL(l) => l - } - }).toSet - - if (hasCode) { - log("[checking locals of " + this + "]") - locals filterNot localsSet foreach { l => - log("Local " + l + " is not declared in " + this) - } - } - } + // def checkLocals(): Unit = { + // def localsSet = (code.blocks flatMap { bb => + // bb.iterator collect { + // case LOAD_LOCAL(l) => l + // case STORE_LOCAL(l) => l + // } + // }).toSet + + // if (hasCode) { + // log("[checking locals of " + this + "]") + // locals filterNot localsSet foreach { l => + // log("Local " + l + " is not declared in " + this) + // } + // } + // } /** Merge together blocks that have a single successor which has a * single predecessor. Exception handlers are taken into account (they @@ -295,7 +295,7 @@ trait Members { var start: Int = _ /** Ending PC for this local's visibility range. */ - var end: Int = _ + // var end: Int = _ /** PC-based ranges for this local variable's visibility */ var ranges: List[(Int, Int)] = Nil diff --git a/src/compiler/scala/tools/nsc/backend/icode/Opcodes.scala b/src/compiler/scala/tools/nsc/backend/icode/Opcodes.scala index 0e7c75de50..6f7db042e6 100644 --- a/src/compiler/scala/tools/nsc/backend/icode/Opcodes.scala +++ b/src/compiler/scala/tools/nsc/backend/icode/Opcodes.scala @@ -111,7 +111,7 @@ trait Opcodes { self: ICodes => def producedTypes: List[TypeKind] = Nil /** This method returns the difference of size of the stack when the instruction is used */ - def difference = produced-consumed + // def difference = produced-consumed /** The corresponding position in the source file */ private var _pos: Position = NoPosition @@ -119,7 +119,7 @@ trait Opcodes { self: ICodes => def pos: Position = _pos /** Used by dead code elimination. */ - var useful: Boolean = false + // var useful: Boolean = false def setPos(p: Position): this.type = { _pos = p @@ -133,11 +133,11 @@ trait Opcodes { self: ICodes => object opcodes { - def mayThrow(i: Instruction): Boolean = i match { - case LOAD_LOCAL(_) | STORE_LOCAL(_) | CONSTANT(_) | THIS(_) | CZJUMP(_, _, _, _) - | DROP(_) | DUP(_) | RETURN(_) | LOAD_EXCEPTION(_) | JUMP(_) | CJUMP(_, _, _, _) => false - case _ => true - } + // def mayThrow(i: Instruction): Boolean = i match { + // case LOAD_LOCAL(_) | STORE_LOCAL(_) | CONSTANT(_) | THIS(_) | CZJUMP(_, _, _, _) + // | DROP(_) | DUP(_) | RETURN(_) | LOAD_EXCEPTION(_) | JUMP(_) | CJUMP(_, _, _, _) => false + // case _ => true + // } /** Loads "this" on top of the stack. * Stack: ... @@ -714,7 +714,7 @@ trait Opcodes { self: ICodes => /** Is this a static method call? */ def isStatic: Boolean = false - def isSuper: Boolean = false + // def isSuper: Boolean = false /** Is this an instance method call? */ def hasInstance: Boolean = true @@ -749,7 +749,7 @@ trait Opcodes { self: ICodes => * On JVM, translated to `invokespecial`. */ case class SuperCall(mix: Name) extends InvokeStyle { - override def isSuper = true + // override def isSuper = true override def toString(): String = { "super(" + mix + ")" } } @@ -814,7 +814,7 @@ trait Opcodes { self: ICodes => case class CIL_NEWOBJ(method: Symbol) extends Instruction { override def toString(): String = "CIL_NEWOBJ " + hostClass.fullName + method.fullName - var hostClass: Symbol = method.owner; + val hostClass: Symbol = method.owner; override def consumed = method.tpe.paramTypes.length override def consumedTypes = method.tpe.paramTypes map toTypeKind override def produced = 1 diff --git a/src/compiler/scala/tools/nsc/backend/icode/Primitives.scala b/src/compiler/scala/tools/nsc/backend/icode/Primitives.scala index c8579041ba..ebfb4ad591 100644 --- a/src/compiler/scala/tools/nsc/backend/icode/Primitives.scala +++ b/src/compiler/scala/tools/nsc/backend/icode/Primitives.scala @@ -84,17 +84,17 @@ trait Primitives { self: ICodes => def print(o: AnyRef): PrimitivePrinter = print(o.toString()) - def printPrimitive(prim: Primitive) = prim match { - case Negation(kind) => - print("!") + // def printPrimitive(prim: Primitive) = prim match { + // case Negation(kind) => + // print("!") - case Test(op, kind, zero) => - print(op).print(kind) + // case Test(op, kind, zero) => + // print(op).print(kind) - case Comparison(op, kind) => - print(op).print("(").print(kind) + // case Comparison(op, kind) => + // print(op).print("(").print(kind) - } + // } } /** This class represents a comparison operation. */ diff --git a/src/compiler/scala/tools/nsc/backend/icode/Repository.scala b/src/compiler/scala/tools/nsc/backend/icode/Repository.scala index e73015c4da..c8168cbfa6 100644 --- a/src/compiler/scala/tools/nsc/backend/icode/Repository.scala +++ b/src/compiler/scala/tools/nsc/backend/icode/Repository.scala @@ -29,13 +29,13 @@ trait Repository { /** The icode of the given class. If not available, it loads * its bytecode. */ - def icode(sym: Symbol, force: Boolean): IClass = - icode(sym) getOrElse { - log("loading " + sym) - load(sym) - assert(available(sym)) - loaded(sym) - } + // def icode(sym: Symbol, force: Boolean): IClass = + // icode(sym) getOrElse { + // log("loading " + sym) + // load(sym) + // assert(available(sym)) + // loaded(sym) + // } /** Load bytecode for given symbol. */ def load(sym: Symbol): Boolean = { diff --git a/src/compiler/scala/tools/nsc/backend/icode/TypeKinds.scala b/src/compiler/scala/tools/nsc/backend/icode/TypeKinds.scala index f96dce9f1c..2df6811fd7 100644 --- a/src/compiler/scala/tools/nsc/backend/icode/TypeKinds.scala +++ b/src/compiler/scala/tools/nsc/backend/icode/TypeKinds.scala @@ -66,7 +66,7 @@ trait TypeKinds { self: ICodes => def isValueType = false def isBoxedType = false final def isRefOrArrayType = isReferenceType || isArrayType - final def isRefArrayOrBoxType = isRefOrArrayType || isBoxedType + // final def isRefArrayOrBoxType = isRefOrArrayType || isBoxedType final def isNothingType = this == NothingReference final def isNullType = this == NullReference final def isInterfaceType = this match { @@ -114,7 +114,7 @@ trait TypeKinds { self: ICodes => } } - var lubs0 = 0 + // var lubs0 = 0 /** * The least upper bound of two typekinds. They have to be either diff --git a/src/compiler/scala/tools/nsc/backend/icode/TypeStacks.scala b/src/compiler/scala/tools/nsc/backend/icode/TypeStacks.scala index c1bf4304ea..c958f20853 100644 --- a/src/compiler/scala/tools/nsc/backend/icode/TypeStacks.scala +++ b/src/compiler/scala/tools/nsc/backend/icode/TypeStacks.scala @@ -20,7 +20,7 @@ trait TypeStacks { */ type Rep = List[TypeKind] - object NoTypeStack extends TypeStack(Nil) { } + // object NoTypeStack extends TypeStack(Nil) { } class TypeStack(var types: Rep) { if (types.nonEmpty) @@ -74,8 +74,8 @@ trait TypeStacks { * length and each type kind agrees position-wise. Two * types agree if one is a subtype of the other. */ - def agreesWith(other: TypeStack): Boolean = - (types corresponds other.types)((t1, t2) => t1 <:< t2 || t2 <:< t1) + // def agreesWith(other: TypeStack): Boolean = + // (types corresponds other.types)((t1, t2) => t1 <:< t2 || t2 <:< t1) /* This method returns a String representation of the stack */ override def toString() = diff --git a/src/compiler/scala/tools/nsc/backend/icode/analysis/CopyPropagation.scala b/src/compiler/scala/tools/nsc/backend/icode/analysis/CopyPropagation.scala index 53111d0ade..6534cd83f1 100644 --- a/src/compiler/scala/tools/nsc/backend/icode/analysis/CopyPropagation.scala +++ b/src/compiler/scala/tools/nsc/backend/icode/analysis/CopyPropagation.scala @@ -27,10 +27,10 @@ abstract class CopyPropagation { /** Values that can be on the stack. */ abstract class Value { - def isRecord = false + // def isRecord = false } case class Record(cls: Symbol, bindings: mutable.Map[Symbol, Value]) extends Value { - override def isRecord = true + // override def isRecord = true } /** The value of some location in memory. */ case class Deref(l: Location) extends Value @@ -92,14 +92,14 @@ abstract class CopyPropagation { } /* Return the binding for the given field of the given record */ - def getBinding(r: Record, f: Symbol): Value = { - assert(r.bindings contains f, "Record " + r + " does not contain a field " + f) - - r.bindings(f) match { - case Deref(LocalVar(l)) => getBinding(l) - case target => target - } - } + // def getBinding(r: Record, f: Symbol): Value = { + // assert(r.bindings contains f, "Record " + r + " does not contain a field " + f) + + // r.bindings(f) match { + // case Deref(LocalVar(l)) => getBinding(l) + // case target => target + // } + // } /** Return a local which contains the same value as this field, if any. * If the field holds a reference to a local, the returned value is the diff --git a/src/compiler/scala/tools/nsc/backend/icode/analysis/DataFlowAnalysis.scala b/src/compiler/scala/tools/nsc/backend/icode/analysis/DataFlowAnalysis.scala index 04c3eedbad..c232c3692a 100644 --- a/src/compiler/scala/tools/nsc/backend/icode/analysis/DataFlowAnalysis.scala +++ b/src/compiler/scala/tools/nsc/backend/icode/analysis/DataFlowAnalysis.scala @@ -37,11 +37,11 @@ trait DataFlowAnalysis[L <: SemiLattice] { /** Reinitialize, but keep the old solutions. Should be used when reanalyzing the * same method, after some code transformation. */ - def reinit(f: => Unit): Unit = { - iterations = 0 - worklist.clear; visited.clear; - f - } + // def reinit(f: => Unit): Unit = { + // iterations = 0 + // worklist.clear; visited.clear; + // f + // } def run(): Unit diff --git a/src/compiler/scala/tools/nsc/backend/icode/analysis/TypeFlowAnalysis.scala b/src/compiler/scala/tools/nsc/backend/icode/analysis/TypeFlowAnalysis.scala index 15755f31ad..6e7ed9d4c4 100644 --- a/src/compiler/scala/tools/nsc/backend/icode/analysis/TypeFlowAnalysis.scala +++ b/src/compiler/scala/tools/nsc/backend/icode/analysis/TypeFlowAnalysis.scala @@ -269,34 +269,34 @@ abstract class TypeFlowAnalysis { } // interpret - class SimulatedStack { - private var types: List[InferredType] = Nil - private var depth = 0 - - /** Remove and return the topmost element on the stack. If the - * stack is empty, return a reference to a negative index on the - * stack, meaning it refers to elements pushed by a predecessor block. - */ - def pop: InferredType = types match { - case head :: rest => - types = rest - head - case _ => - depth -= 1 - TypeOfStackPos(depth) - } - - def pop2: (InferredType, InferredType) = { - (pop, pop) - } - - def push(t: InferredType) { - depth += 1 - types = types ::: List(t) - } - - def push(k: TypeKind) { push(Const(k)) } - } + // class SimulatedStack { + // private var types: List[InferredType] = Nil + // private var depth = 0 + + // /** Remove and return the topmost element on the stack. If the + // * stack is empty, return a reference to a negative index on the + // * stack, meaning it refers to elements pushed by a predecessor block. + // */ + // def pop: InferredType = types match { + // case head :: rest => + // types = rest + // head + // case _ => + // depth -= 1 + // TypeOfStackPos(depth) + // } + + // def pop2: (InferredType, InferredType) = { + // (pop, pop) + // } + + // def push(t: InferredType) { + // depth += 1 + // types = types ::: List(t) + // } + + // def push(k: TypeKind) { push(Const(k)) } + // } abstract class InferredType { /** Return the type kind pointed by this inferred type. */ @@ -737,9 +737,9 @@ abstract class TypeFlowAnalysis { private var lastStart = 0L - def reset() { - millis = 0L - } + // def reset() { + // millis = 0L + // } def start() { lastStart = System.currentTimeMillis diff --git a/src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala b/src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala index 8bae80c760..4a46a64dae 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala @@ -455,7 +455,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters { /** basic functionality for class file building */ abstract class JBuilder(bytecodeWriter: BytecodeWriter) { - val EMPTY_JTYPE_ARRAY = Array.empty[asm.Type] + // val EMPTY_JTYPE_ARRAY = Array.empty[asm.Type] val EMPTY_STRING_ARRAY = Array.empty[String] val mdesc_arglessvoid = "()V" @@ -523,7 +523,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters { /** Specialized array conversion to prevent calling * java.lang.reflect.Array.newInstance via TraversableOnce.toArray */ - def mkArray(xs: Traversable[asm.Type]): Array[asm.Type] = { val a = new Array[asm.Type](xs.size); xs.copyToArray(a); a } + // def mkArray(xs: Traversable[asm.Type]): Array[asm.Type] = { val a = new Array[asm.Type](xs.size); xs.copyToArray(a); a } def mkArray(xs: Traversable[String]): Array[String] = { val a = new Array[String](xs.size); xs.copyToArray(a); a } // ----------------------------------------------------------------------------------------- @@ -1757,10 +1757,10 @@ abstract class GenASM extends SubComponent with BytecodeWriters { import asm.Opcodes; - def aconst(cst: AnyRef) { - if (cst == null) { jmethod.visitInsn(Opcodes.ACONST_NULL) } - else { jmethod.visitLdcInsn(cst) } - } + // def aconst(cst: AnyRef) { + // if (cst == null) { jmethod.visitInsn(Opcodes.ACONST_NULL) } + // else { jmethod.visitLdcInsn(cst) } + // } final def boolconst(b: Boolean) { iconst(if(b) 1 else 0) } diff --git a/src/compiler/scala/tools/nsc/backend/jvm/GenJVM.scala b/src/compiler/scala/tools/nsc/backend/jvm/GenJVM.scala index 06f94ef46c..e258f38284 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/GenJVM.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/GenJVM.scala @@ -182,15 +182,15 @@ abstract class GenJVM extends SubComponent with GenJVMUtil with GenAndroid with val StringBuilderType = new JObjectType(StringBuilderClassName) // TODO use ASMType.getObjectType val toStringType = new JMethodType(JAVA_LANG_STRING, JType.EMPTY_ARRAY) // TODO use ASMType.getMethodType val arrayCloneType = new JMethodType(JAVA_LANG_OBJECT, JType.EMPTY_ARRAY) - val MethodTypeType = new JObjectType("java.dyn.MethodType") - val JavaLangClassType = new JObjectType("java.lang.Class") - val MethodHandleType = new JObjectType("java.dyn.MethodHandle") + // val MethodTypeType = new JObjectType("java.dyn.MethodType") + // val JavaLangClassType = new JObjectType("java.lang.Class") + // val MethodHandleType = new JObjectType("java.dyn.MethodHandle") // Scala attributes val BeanInfoAttr = rootMirror.getRequiredClass("scala.beans.BeanInfo") - val BeanInfoSkipAttr = rootMirror.getRequiredClass("scala.beans.BeanInfoSkip") - val BeanDisplayNameAttr = rootMirror.getRequiredClass("scala.beans.BeanDisplayName") - val BeanDescriptionAttr = rootMirror.getRequiredClass("scala.beans.BeanDescription") + // val BeanInfoSkipAttr = rootMirror.getRequiredClass("scala.beans.BeanInfoSkip") + // val BeanDisplayNameAttr = rootMirror.getRequiredClass("scala.beans.BeanDisplayName") + // val BeanDescriptionAttr = rootMirror.getRequiredClass("scala.beans.BeanDescription") final val ExcludedForwarderFlags = { import Flags._ @@ -264,8 +264,8 @@ abstract class GenJVM extends SubComponent with GenJVMUtil with GenAndroid with val fjbgContext = new FJBGContext(49, 0) - val emitSource = debugLevel >= 1 - val emitLines = debugLevel >= 2 + // val emitSource = debugLevel >= 1 + // val emitLines = debugLevel >= 2 val emitVars = debugLevel >= 3 // bug had phase with wrong name; leaving enabled for brief pseudo deprecation @@ -1843,14 +1843,14 @@ abstract class GenJVM extends SubComponent with GenJVMUtil with GenAndroid with ////////////////////// local vars /////////////////////// - def sizeOf(sym: Symbol): Int = sizeOf(toTypeKind(sym.tpe)) + // def sizeOf(sym: Symbol): Int = sizeOf(toTypeKind(sym.tpe)) def sizeOf(k: TypeKind): Int = if(k.isWideType) 2 else 1 - def indexOf(m: IMethod, sym: Symbol): Int = { - val Some(local) = m lookupLocal sym - indexOf(local) - } + // def indexOf(m: IMethod, sym: Symbol): Int = { + // val Some(local) = m lookupLocal sym + // indexOf(local) + // } def indexOf(local: Local): Int = { assert(local.index >= 0, "Invalid index for: " + local + "{" + local.## + "}: ") diff --git a/src/compiler/scala/tools/nsc/backend/msil/GenMSIL.scala b/src/compiler/scala/tools/nsc/backend/msil/GenMSIL.scala index 2253ae6e15..f7278a7590 100644 --- a/src/compiler/scala/tools/nsc/backend/msil/GenMSIL.scala +++ b/src/compiler/scala/tools/nsc/backend/msil/GenMSIL.scala @@ -23,7 +23,7 @@ abstract class GenMSIL extends SubComponent { import icodes._ import icodes.opcodes._ - val x = loaders + // val x = loaders /** Create a new phase */ override def newPhase(p: Phase) = new MsilPhase(p) @@ -83,9 +83,9 @@ abstract class GenMSIL extends SubComponent { SYMTAB_DEFAULT_CONSTR => SYMTAB_ATTRIBUTE_EMPTY_CONSTRUCTOR} val EXCEPTION = clrTypes.getType("System.Exception") - val MBYTE_ARRAY = clrTypes.mkArrayType(MBYTE) + // val MBYTE_ARRAY = clrTypes.mkArrayType(MBYTE) - val ICLONEABLE = clrTypes.getType("System.ICloneable") + // val ICLONEABLE = clrTypes.getType("System.ICloneable") val MEMBERWISE_CLONE = MOBJECT.GetMethod("MemberwiseClone", MsilType.EmptyTypes) val MMONITOR = clrTypes.getType("System.Threading.Monitor") @@ -102,8 +102,8 @@ abstract class GenMSIL extends SubComponent { val INT_PTR = clrTypes.getType("System.IntPtr") - val JOBJECT = definitions.ObjectClass - val JSTRING = definitions.StringClass + // val JOBJECT = definitions.ObjectClass + // val JSTRING = definitions.StringClass val SystemConvert = clrTypes.getType("System.Convert") @@ -622,7 +622,7 @@ abstract class GenMSIL extends SubComponent { * - emit `Leave handlerReturnLabel` instead of the Return * - emit code at the end: load the local and return its value */ - var currentHandlers = new mutable.Stack[ExceptionHandler] + val currentHandlers = new mutable.Stack[ExceptionHandler] // The IMethod the Local/Label/Kind below belong to var handlerReturnMethod: IMethod = _ // Stores the result when returning inside an exception block diff --git a/src/compiler/scala/tools/nsc/backend/opt/ClosureElimination.scala b/src/compiler/scala/tools/nsc/backend/opt/ClosureElimination.scala index 8d6de821bb..b36e7a4636 100644 --- a/src/compiler/scala/tools/nsc/backend/opt/ClosureElimination.scala +++ b/src/compiler/scala/tools/nsc/backend/opt/ClosureElimination.scala @@ -189,8 +189,8 @@ abstract class ClosureElimination extends SubComponent { } /** is field 'f' accessible from method 'm'? */ - def accessible(f: Symbol, m: Symbol): Boolean = - f.isPublic || (f.isProtected && (f.enclosingPackageClass == m.enclosingPackageClass)) + // def accessible(f: Symbol, m: Symbol): Boolean = + // f.isPublic || (f.isProtected && (f.enclosingPackageClass == m.enclosingPackageClass)) } /* class ClosureElim */ diff --git a/src/compiler/scala/tools/nsc/backend/opt/Inliners.scala b/src/compiler/scala/tools/nsc/backend/opt/Inliners.scala index 7d741aab60..eaeba69382 100644 --- a/src/compiler/scala/tools/nsc/backend/opt/Inliners.scala +++ b/src/compiler/scala/tools/nsc/backend/opt/Inliners.scala @@ -602,7 +602,7 @@ abstract class Inliners extends SubComponent { override def toString = m.toString val sym = m.symbol - val name = sym.name + // val name = sym.name def owner = sym.owner def paramTypes = sym.info.paramTypes def minimumStack = paramTypes.length + 1 @@ -624,7 +624,7 @@ abstract class Inliners extends SubComponent { def isLarge = length > MAX_INLINE_SIZE def isRecursive = m.recursive def hasHandlers = handlers.nonEmpty || m.bytecodeHasEHs - def hasClosureParam = paramTypes exists (tp => isByNameParamType(tp) || isFunctionType(tp)) + // def hasClosureParam = paramTypes exists (tp => isByNameParamType(tp) || isFunctionType(tp)) def isSynchronized = sym.hasFlag(Flags.SYNCHRONIZED) def hasNonFinalizerHandler = handlers exists { @@ -732,7 +732,7 @@ abstract class Inliners extends SubComponent { */ sealed abstract class InlineSafetyInfo { def isSafe = false - def isUnsafe = !isSafe + // def isUnsafe = !isSafe } case object NeverSafeToInline extends InlineSafetyInfo case object InlineableAtThisCaller extends InlineSafetyInfo { override def isSafe = true } diff --git a/src/compiler/scala/tools/nsc/doc/html/Page.scala b/src/compiler/scala/tools/nsc/doc/html/Page.scala index 62166f7def..d502f19e31 100644 --- a/src/compiler/scala/tools/nsc/doc/html/Page.scala +++ b/src/compiler/scala/tools/nsc/doc/html/Page.scala @@ -90,9 +90,9 @@ abstract class Page { /** A relative link from this page to some destination page in the Scaladoc site. * @param destPage The page that the link will point to. */ - def relativeLinkTo(destPage: HtmlPage): String = { - relativeLinkTo(destPage.path) - } + // def relativeLinkTo(destPage: HtmlPage): String = { + // relativeLinkTo(destPage.path) + // } /** A relative link from this page to some destination path. * @param destPath The path that the link will point to. */ diff --git a/src/compiler/scala/tools/nsc/doc/model/Entity.scala b/src/compiler/scala/tools/nsc/doc/model/Entity.scala index 6b24073339..496dfdefcf 100644 --- a/src/compiler/scala/tools/nsc/doc/model/Entity.scala +++ b/src/compiler/scala/tools/nsc/doc/model/Entity.scala @@ -25,7 +25,7 @@ import diagram._ trait Entity { /** Similar to symbols, so we can track entities */ - def id: Int + // def id: Int /** The name of the entity. Note that the name does not qualify this entity uniquely; use its `qualifiedName` * instead. */ @@ -61,7 +61,7 @@ trait Entity { def isType: Boolean /** Indicates whether this entity lives in the terms namespace (objects, packages, methods, values) */ - def isTerm: Boolean + // def isTerm: Boolean } object Entity { @@ -98,7 +98,7 @@ trait TemplateEntity extends Entity { def isDocTemplate: Boolean /** Whether documentation is available for this template. */ - def isNoDocMemberTemplate: Boolean + // def isNoDocMemberTemplate: Boolean /** Whether this template is a case class. */ def isCaseClass: Boolean @@ -175,10 +175,10 @@ trait MemberEntity extends Entity { def isAbstractType: Boolean /** Whether this member is a template. */ - def isTemplate: Boolean + // def isTemplate: Boolean /** Whether this member is implicit. */ - def isImplicit: Boolean + // def isImplicit: Boolean /** Whether this member is abstract. */ def isAbstract: Boolean @@ -388,7 +388,7 @@ trait NonTemplateMemberEntity extends MemberEntity { /** Whether this member is a bridge member. A bridge member does only exist for binary compatibility reasons * and should not appear in ScalaDoc. */ - def isBridge: Boolean + // def isBridge: Boolean } @@ -507,7 +507,7 @@ trait ImplicitConversion { * Note: not all targetTypes have a corresponding template. Examples include conversions resulting in refinement * types. Need to check it's not option! */ - def targetTemplate: Option[TemplateEntity] + // def targetTemplate: Option[TemplateEntity] /** The components of the implicit conversion type parents */ def targetTypeComponents: List[(TemplateEntity, TypeEntity)] diff --git a/src/compiler/scala/tools/nsc/doc/model/IndexModelFactory.scala b/src/compiler/scala/tools/nsc/doc/model/IndexModelFactory.scala index 10e2f23142..0f2374a6f4 100755 --- a/src/compiler/scala/tools/nsc/doc/model/IndexModelFactory.scala +++ b/src/compiler/scala/tools/nsc/doc/model/IndexModelFactory.scala @@ -18,7 +18,7 @@ object IndexModelFactory { object result extends mutable.HashMap[Char,SymbolMap] { /* Owner template ordering */ - implicit def orderingSet = math.Ordering.String.on { x: MemberEntity => x.name.toLowerCase } + // implicit def orderingSet = math.Ordering.String.on { x: MemberEntity => x.name.toLowerCase } /* symbol name ordering */ implicit def orderingMap = math.Ordering.String.on { x: String => x.toLowerCase } diff --git a/src/compiler/scala/tools/nsc/doc/model/ModelFactory.scala b/src/compiler/scala/tools/nsc/doc/model/ModelFactory.scala index f11f090b4b..739a1b836d 100644 --- a/src/compiler/scala/tools/nsc/doc/model/ModelFactory.scala +++ b/src/compiler/scala/tools/nsc/doc/model/ModelFactory.scala @@ -43,10 +43,10 @@ class ModelFactory(val global: Global, val settings: doc.Settings) { def modelFinished: Boolean = _modelFinished private var universe: Universe = null - protected def closestPackage(sym: Symbol) = { - if (sym.isPackage || sym.isPackageClass) sym - else sym.enclosingPackage - } + // protected def closestPackage(sym: Symbol) = { + // if (sym.isPackage || sym.isPackageClass) sym + // else sym.enclosingPackage + // } def makeModel: Option[Universe] = { val universe = new Universe { thisUniverse => @@ -77,7 +77,7 @@ class ModelFactory(val global: Global, val settings: doc.Settings) { /* ============== IMPLEMENTATION PROVIDING ENTITY TYPES ============== */ abstract class EntityImpl(val sym: Symbol, val inTpl: TemplateImpl) extends Entity { - val id = { ids += 1; ids } + // val id = { ids += 1; ids } val name = optimize(sym.nameString) val universe = thisFactory.universe @@ -91,7 +91,7 @@ class ModelFactory(val global: Global, val settings: doc.Settings) { def annotations = sym.annotations.map(makeAnnotation) def inPackageObject: Boolean = sym.owner.isModuleClass && sym.owner.sourceModule.isPackageObject def isType = sym.name.isTypeName - def isTerm = sym.name.isTermName + // def isTerm = sym.name.isTermName } trait TemplateImpl extends EntityImpl with TemplateEntity { @@ -103,7 +103,7 @@ class ModelFactory(val global: Global, val settings: doc.Settings) { def isObject = sym.isModule && !sym.isPackage def isCaseClass = sym.isCaseClass def isRootPackage = false - def isNoDocMemberTemplate = false + // def isNoDocMemberTemplate = false def selfType = if (sym.thisSym eq sym) None else Some(makeType(sym.thisSym.typeOfThis, this)) } @@ -178,9 +178,9 @@ class ModelFactory(val global: Global, val settings: doc.Settings) { }) else None - def inheritedFrom = - if (inTemplate.sym == this.sym.owner || inTemplate.sym.isPackage) Nil else - makeTemplate(this.sym.owner) :: (sym.allOverriddenSymbols map { os => makeTemplate(os.owner) }) + // def inheritedFrom = + // if (inTemplate.sym == this.sym.owner || inTemplate.sym.isPackage) Nil else + // makeTemplate(this.sym.owner) :: (sym.allOverriddenSymbols map { os => makeTemplate(os.owner) }) def resultType = { def resultTpe(tpe: Type): Type = tpe match { // similar to finalResultType, except that it leaves singleton types alone case PolyType(_, res) => resultTpe(res) @@ -195,7 +195,7 @@ class ModelFactory(val global: Global, val settings: doc.Settings) { def isVal = false def isLazyVal = false def isVar = false - def isImplicit = sym.isImplicit + // def isImplicit = sym.isImplicit def isConstructor = false def isAliasType = false def isAbstractType = false @@ -203,7 +203,7 @@ class ModelFactory(val global: Global, val settings: doc.Settings) { // for the explanation of conversion == null see comment on flags ((!sym.isTrait && ((sym hasFlag Flags.ABSTRACT) || (sym hasFlag Flags.DEFERRED)) && (!isImplicitlyInherited)) || sym.isAbstractClass || sym.isAbstractType) && !sym.isSynthetic - def isTemplate = false + // def isTemplate = false def signature = externalSignature(sym) lazy val signatureCompat = { @@ -257,9 +257,9 @@ class ModelFactory(val global: Global, val settings: doc.Settings) { */ abstract class MemberTemplateImpl(sym: Symbol, inTpl: DocTemplateImpl) extends MemberImpl(sym, inTpl) with TemplateImpl with HigherKindedImpl with MemberTemplateEntity { // no templates cache for this class, each owner gets its own instance - override def isTemplate = true + // override def isTemplate = true def isDocTemplate = false - override def isNoDocMemberTemplate = true + // override def isNoDocMemberTemplate = true lazy val definitionName = optimize(inDefinitionTemplates.head.qualifiedName + "." + name) def valueParams: List[List[ValueParam]] = Nil /** TODO, these are now only computed for DocTemplates */ @@ -380,9 +380,9 @@ class ModelFactory(val global: Global, val settings: doc.Settings) { lazy val memberSyms = sym.info.members.filter(s => membersShouldDocument(s, this)).toList // the inherited templates (classes, traits or objects) - var memberSymsLazy = memberSyms.filter(t => templateShouldDocument(t, this) && !inOriginalOwner(t, this)) + val memberSymsLazy = memberSyms.filter(t => templateShouldDocument(t, this) && !inOriginalOwner(t, this)) // the direct members (methods, values, vars, types and directly contained templates) - var memberSymsEager = memberSyms.filter(!memberSymsLazy.contains(_)) + val memberSymsEager = memberSyms.filter(!memberSymsLazy.contains(_)) // the members generated by the symbols in memberSymsEager val ownMembers = (memberSymsEager.flatMap(makeMember(_, None, this))) @@ -438,7 +438,7 @@ class ModelFactory(val global: Global, val settings: doc.Settings) { else List() ) - override def isTemplate = true + // override def isTemplate = true override def isDocTemplate = true private[this] lazy val companionSymbol = if (sym.isAliasType || sym.isAbstractType) { @@ -545,7 +545,7 @@ class ModelFactory(val global: Global, val settings: doc.Settings) { val qualifiedName = conversion.fold(inDefinitionTemplates.head.qualifiedName)(_.conversionQualifiedName) optimize(qualifiedName + "#" + name) } - def isBridge = sym.isBridge + // def isBridge = sym.isBridge def isUseCase = useCaseOf.isDefined override def byConversion: Option[ImplicitConversionImpl] = conversion override def isImplicitlyInherited = { assert(modelFinished); conversion.isDefined } @@ -707,7 +707,7 @@ class ModelFactory(val global: Global, val settings: doc.Settings) { override def inTemplate = this override def toRoot = this :: Nil override def qualifiedName = "_root_" - override def inheritedFrom = Nil + // override def inheritedFrom = Nil override def isRootPackage = true override lazy val memberSyms = (bSym.info.members ++ EmptyPackage.info.members).toList filter { s => @@ -857,11 +857,11 @@ class ModelFactory(val global: Global, val settings: doc.Settings) { inTpl.members.find(_.sym == aSym) } - @deprecated("Use `findLinkTarget` instead.", "2.10.0") - def findTemplate(query: String): Option[DocTemplateImpl] = { - assert(modelFinished) - docTemplatesCache.values find { (tpl: DocTemplateImpl) => tpl.qualifiedName == query && !packageDropped(tpl) && !tpl.isObject } - } + // @deprecated("Use `findLinkTarget` instead.", "2.10.0") + // def findTemplate(query: String): Option[DocTemplateImpl] = { + // assert(modelFinished) + // docTemplatesCache.values find { (tpl: DocTemplateImpl) => tpl.qualifiedName == query && !packageDropped(tpl) && !tpl.isObject } + // } def findTemplateMaybe(aSym: Symbol): Option[DocTemplateImpl] = { assert(modelFinished) diff --git a/src/compiler/scala/tools/nsc/doc/model/ModelFactoryImplicitSupport.scala b/src/compiler/scala/tools/nsc/doc/model/ModelFactoryImplicitSupport.scala index c1ca8c1448..e9c32b1af0 100644 --- a/src/compiler/scala/tools/nsc/doc/model/ModelFactoryImplicitSupport.scala +++ b/src/compiler/scala/tools/nsc/doc/model/ModelFactoryImplicitSupport.scala @@ -345,14 +345,14 @@ trait ModelFactoryImplicitSupport { makeRootPackage } - def targetTemplate: Option[TemplateEntity] = toType match { - // @Vlad: I'm being extra conservative in template creation -- I don't want to create templates for complex types - // such as refinement types because the template can't represent the type corectly (a template corresponds to a - // package, class, trait or object) - case t: TypeRef => Some(makeTemplate(t.sym)) - case RefinedType(parents, decls) => None - case _ => error("Scaladoc implicits: Could not create template for: " + toType + " of type " + toType.getClass); None - } + // def targetTemplate: Option[TemplateEntity] = toType match { + // // @Vlad: I'm being extra conservative in template creation -- I don't want to create templates for complex types + // // such as refinement types because the template can't represent the type corectly (a template corresponds to a + // // package, class, trait or object) + // case t: TypeRef => Some(makeTemplate(t.sym)) + // case RefinedType(parents, decls) => None + // case _ => error("Scaladoc implicits: Could not create template for: " + toType + " of type " + toType.getClass); None + // } def targetTypeComponents: List[(TemplateEntity, TypeEntity)] = makeParentTypes(toType, None, inTpl) @@ -492,14 +492,14 @@ trait ModelFactoryImplicitSupport { /** * Make implicits explicit - Not used curently */ - object implicitToExplicit extends TypeMap { - def apply(tp: Type): Type = mapOver(tp) match { - case MethodType(params, resultType) => - MethodType(params.map(param => if (param.isImplicit) param.cloneSymbol.resetFlag(Flags.IMPLICIT) else param), resultType) - case other => - other - } - } + // object implicitToExplicit extends TypeMap { + // def apply(tp: Type): Type = mapOver(tp) match { + // case MethodType(params, resultType) => + // MethodType(params.map(param => if (param.isImplicit) param.cloneSymbol.resetFlag(Flags.IMPLICIT) else param), resultType) + // case other => + // other + // } + // } /** * removeImplicitParameters transforms implicit parameters from the view result type into constraints and diff --git a/src/compiler/scala/tools/nsc/doc/model/comment/Comment.scala b/src/compiler/scala/tools/nsc/doc/model/comment/Comment.scala index 3e172544dd..4b75f3fd4d 100644 --- a/src/compiler/scala/tools/nsc/doc/model/comment/Comment.scala +++ b/src/compiler/scala/tools/nsc/doc/model/comment/Comment.scala @@ -103,7 +103,7 @@ abstract class Comment { def example: List[Body] /** The comment as it appears in the source text. */ - def source: Option[String] + // def source: Option[String] /** A description for the primary constructor */ def constructor: Option[Body] diff --git a/src/compiler/scala/tools/nsc/doc/model/comment/CommentFactory.scala b/src/compiler/scala/tools/nsc/doc/model/comment/CommentFactory.scala index 9617b15068..7897421bd7 100644 --- a/src/compiler/scala/tools/nsc/doc/model/comment/CommentFactory.scala +++ b/src/compiler/scala/tools/nsc/doc/model/comment/CommentFactory.scala @@ -28,10 +28,10 @@ trait CommentFactory { thisFactory: ModelFactory with CommentFactory with Member protected val commentCache = mutable.HashMap.empty[(global.Symbol, TemplateImpl), Comment] - def addCommentBody(sym: global.Symbol, inTpl: TemplateImpl, docStr: String, docPos: global.Position): global.Symbol = { - commentCache += (sym, inTpl) -> parse(docStr, docStr, docPos, None) - sym - } + // def addCommentBody(sym: global.Symbol, inTpl: TemplateImpl, docStr: String, docPos: global.Position): global.Symbol = { + // commentCache += (sym, inTpl) -> parse(docStr, docStr, docPos, None) + // sym + // } def comment(sym: global.Symbol, currentTpl: Option[DocTemplateImpl], inTpl: DocTemplateImpl): Option[Comment] = { val key = (sym, inTpl) @@ -132,7 +132,7 @@ trait CommentFactory { thisFactory: ModelFactory with CommentFactory with Member val note = note0 val example = example0 val constructor = constructor0 - val source = source0 + // val source = source0 val inheritDiagram = inheritDiagram0 val contentDiagram = contentDiagram0 val groupDesc = groupDesc0 @@ -957,19 +957,19 @@ trait CommentFactory { thisFactory: ModelFactory with CommentFactory with Member count } - final def jumpUntil(chars: String): Int = { - assert(chars.length > 0) - var count = 0 - val c = chars.charAt(0) - while (!check(chars) && char != endOfText) { - nextChar() - while (char != c && char != endOfText) { - nextChar() - count += 1 - } - } - count - } + // final def jumpUntil(chars: String): Int = { + // assert(chars.length > 0) + // var count = 0 + // val c = chars.charAt(0) + // while (!check(chars) && char != endOfText) { + // nextChar() + // while (char != c && char != endOfText) { + // nextChar() + // count += 1 + // } + // } + // count + // } final def jumpUntil(pred: => Boolean): Int = { var count = 0 diff --git a/src/compiler/scala/tools/nsc/doc/model/diagram/Diagram.scala b/src/compiler/scala/tools/nsc/doc/model/diagram/Diagram.scala index c2aa1f17f3..cb4659a71c 100644 --- a/src/compiler/scala/tools/nsc/doc/model/diagram/Diagram.scala +++ b/src/compiler/scala/tools/nsc/doc/model/diagram/Diagram.scala @@ -36,12 +36,12 @@ case class InheritanceDiagram(thisNode: ThisNode, override def isInheritanceDiagram = true lazy val depthInfo = new DepthInfo { def maxDepth = 3 - def nodeDepth(node: Node) = - if (node == thisNode) 1 - else if (superClasses.contains(node)) 0 - else if (subClasses.contains(node)) 2 - else if (incomingImplicits.contains(node) || outgoingImplicits.contains(node)) 1 - else -1 + // def nodeDepth(node: Node) = + // if (node == thisNode) 1 + // else if (superClasses.contains(node)) 0 + // else if (subClasses.contains(node)) 2 + // else if (incomingImplicits.contains(node) || outgoingImplicits.contains(node)) 1 + // else -1 } } @@ -49,7 +49,7 @@ trait DepthInfo { /** Gives the maximum depth */ def maxDepth: Int /** Gives the depth of any node in the diagram or -1 if the node is not in the diagram */ - def nodeDepth(node: Node): Int + // def nodeDepth(node: Node): Int } abstract class Node { @@ -142,5 +142,5 @@ class ContentDiagramDepth(pack: ContentDiagram) extends DepthInfo { } val maxDepth = _maxDepth - def nodeDepth(node: Node) = _nodeDepth.getOrElse(node, -1) -} \ No newline at end of file + // def nodeDepth(node: Node) = _nodeDepth.getOrElse(node, -1) +} diff --git a/src/compiler/scala/tools/nsc/interactive/BuildManager.scala b/src/compiler/scala/tools/nsc/interactive/BuildManager.scala index a3f76994bc..921d4dc0e1 100644 --- a/src/compiler/scala/tools/nsc/interactive/BuildManager.scala +++ b/src/compiler/scala/tools/nsc/interactive/BuildManager.scala @@ -16,7 +16,7 @@ trait BuildManager { def addSourceFiles(files: Set[AbstractFile]) /** Remove the given files from the managed build process. */ - def removeFiles(files: Set[AbstractFile]) + // def removeFiles(files: Set[AbstractFile]) /** The given files have been modified by the user. Recompile * them and their dependent files. diff --git a/src/compiler/scala/tools/nsc/interactive/Global.scala b/src/compiler/scala/tools/nsc/interactive/Global.scala index 4dedbcfd3d..f71415450c 100644 --- a/src/compiler/scala/tools/nsc/interactive/Global.scala +++ b/src/compiler/scala/tools/nsc/interactive/Global.scala @@ -395,40 +395,40 @@ class Global(settings: Settings, _reporter: Reporter, projectName: String = "") if (typerRun != currentTyperRun) demandNewCompilerRun() } - def debugInfo(source : SourceFile, start : Int, length : Int): String = { - println("DEBUG INFO "+source+"/"+start+"/"+length) - val end = start+length - val pos = rangePos(source, start, start, end) - - val tree = locateTree(pos) - val sw = new StringWriter - val pw = new PrintWriter(sw) - newTreePrinter(pw).print(tree) - pw.flush - - val typed = new Response[Tree] - askTypeAt(pos, typed) - val typ = typed.get.left.toOption match { - case Some(tree) => - val sw = new StringWriter - val pw = new PrintWriter(sw) - newTreePrinter(pw).print(tree) - pw.flush - sw.toString - case None => "" - } - - val completionResponse = new Response[List[Member]] - askTypeCompletion(pos, completionResponse) - val completion = completionResponse.get.left.toOption match { - case Some(members) => - members mkString "\n" - case None => "" - } - - source.content.view.drop(start).take(length).mkString+" : "+source.path+" ("+start+", "+end+ - ")\n\nlocateTree:\n"+sw.toString+"\n\naskTypeAt:\n"+typ+"\n\ncompletion:\n"+completion - } + // def debugInfo(source : SourceFile, start : Int, length : Int): String = { + // println("DEBUG INFO "+source+"/"+start+"/"+length) + // val end = start+length + // val pos = rangePos(source, start, start, end) + + // val tree = locateTree(pos) + // val sw = new StringWriter + // val pw = new PrintWriter(sw) + // newTreePrinter(pw).print(tree) + // pw.flush + + // val typed = new Response[Tree] + // askTypeAt(pos, typed) + // val typ = typed.get.left.toOption match { + // case Some(tree) => + // val sw = new StringWriter + // val pw = new PrintWriter(sw) + // newTreePrinter(pw).print(tree) + // pw.flush + // sw.toString + // case None => "" + // } + + // val completionResponse = new Response[List[Member]] + // askTypeCompletion(pos, completionResponse) + // val completion = completionResponse.get.left.toOption match { + // case Some(members) => + // members mkString "\n" + // case None => "" + // } + + // source.content.view.drop(start).take(length).mkString+" : "+source.path+" ("+start+", "+end+ + // ")\n\nlocateTree:\n"+sw.toString+"\n\naskTypeAt:\n"+typ+"\n\ncompletion:\n"+completion + // } // ----------------- The Background Runner Thread ----------------------- diff --git a/src/compiler/scala/tools/nsc/interpreter/ByteCode.scala b/src/compiler/scala/tools/nsc/interpreter/ByteCode.scala index 014661e525..42dad4c50d 100644 --- a/src/compiler/scala/tools/nsc/interpreter/ByteCode.scala +++ b/src/compiler/scala/tools/nsc/interpreter/ByteCode.scala @@ -40,23 +40,23 @@ object ByteCode { /** Attempts to retrieve case parameter names for given class name. */ - def caseParamNamesForPath(path: String) = - for { - module <- DECODER - method <- decoderMethod("caseParamNames", classOf[String]) - names <- method.invoke(module, path).asInstanceOf[Option[List[String]]] - } - yield names + // def caseParamNamesForPath(path: String) = + // for { + // module <- DECODER + // method <- decoderMethod("caseParamNames", classOf[String]) + // names <- method.invoke(module, path).asInstanceOf[Option[List[String]]] + // } + // yield names def aliasesForPackage(pkg: String) = aliasMap flatMap (_(pkg)) /** Attempts to find type aliases in package objects. */ - def aliasForType(path: String): Option[String] = { - val (pkg, name) = (path lastIndexOf '.') match { - case -1 => return None - case idx => (path take idx, path drop (idx + 1)) - } - aliasesForPackage(pkg) flatMap (_ get name) - } + // def aliasForType(path: String): Option[String] = { + // val (pkg, name) = (path lastIndexOf '.') match { + // case -1 => return None + // case idx => (path take idx, path drop (idx + 1)) + // } + // aliasesForPackage(pkg) flatMap (_ get name) + // } } diff --git a/src/compiler/scala/tools/nsc/interpreter/CodeHandlers.scala b/src/compiler/scala/tools/nsc/interpreter/CodeHandlers.scala index 1741a82775..a8eeee48b8 100644 --- a/src/compiler/scala/tools/nsc/interpreter/CodeHandlers.scala +++ b/src/compiler/scala/tools/nsc/interpreter/CodeHandlers.scala @@ -1,50 +1,50 @@ -/* NSC -- new Scala compiler - * Copyright 2005-2013 LAMP/EPFL - * @author Paul Phillips - */ - -package scala.tools.nsc -package interpreter - -import CodeHandlers.NoSuccess -import scala.util.control.ControlThrowable - -/** - * The start of a simpler interface for utilizing the compiler with piecemeal - * code strings. The "T" here could potentially be a Tree, a Type, a Symbol, - * a Boolean, or something even more exotic. - */ -trait CodeHandlers[T] { - self => - - // Expressions are composed of operators and operands. - def expr(code: String): T - - // Statements occur as parts of blocks and templates. - // A statement can be an import, a definition or an expression, or it can be empty. - // Statements used in the template of a class definition can also be declarations. - def stmt(code: String): T - def stmts(code: String): Seq[T] - - object opt extends CodeHandlers[Option[T]] { - val handler: PartialFunction[Throwable, Option[T]] = { - case _: NoSuccess => None - } - val handlerSeq: PartialFunction[Throwable, Seq[Option[T]]] = { - case _: NoSuccess => Nil - } - - def expr(code: String) = try Some(self.expr(code)) catch handler - def stmt(code: String) = try Some(self.stmt(code)) catch handler - def stmts(code: String) = try (self.stmts(code) map (x => Some(x))) catch handlerSeq - } -} - -object CodeHandlers { - def incomplete() = throw CodeIncomplete - def fail(msg: String) = throw new CodeException(msg) - - trait NoSuccess extends ControlThrowable - class CodeException(msg: String) extends RuntimeException(msg) with NoSuccess { } - object CodeIncomplete extends CodeException("CodeIncomplete") -} +// /* NSC -- new Scala compiler +// * Copyright 2005-2013 LAMP/EPFL +// * @author Paul Phillips +// */ + +// package scala.tools.nsc +// package interpreter + +// import CodeHandlers.NoSuccess +// import scala.util.control.ControlThrowable + +// /** +// * The start of a simpler interface for utilizing the compiler with piecemeal +// * code strings. The "T" here could potentially be a Tree, a Type, a Symbol, +// * a Boolean, or something even more exotic. +// */ +// trait CodeHandlers[T] { +// self => + +// // Expressions are composed of operators and operands. +// def expr(code: String): T + +// // Statements occur as parts of blocks and templates. +// // A statement can be an import, a definition or an expression, or it can be empty. +// // Statements used in the template of a class definition can also be declarations. +// def stmt(code: String): T +// def stmts(code: String): Seq[T] + +// object opt extends CodeHandlers[Option[T]] { +// val handler: PartialFunction[Throwable, Option[T]] = { +// case _: NoSuccess => None +// } +// val handlerSeq: PartialFunction[Throwable, Seq[Option[T]]] = { +// case _: NoSuccess => Nil +// } + +// def expr(code: String) = try Some(self.expr(code)) catch handler +// def stmt(code: String) = try Some(self.stmt(code)) catch handler +// def stmts(code: String) = try (self.stmts(code) map (x => Some(x))) catch handlerSeq +// } +// } + +// object CodeHandlers { +// def incomplete() = throw CodeIncomplete +// def fail(msg: String) = throw new CodeException(msg) + +// trait NoSuccess extends ControlThrowable +// class CodeException(msg: String) extends RuntimeException(msg) with NoSuccess { } +// object CodeIncomplete extends CodeException("CodeIncomplete") +// } diff --git a/src/compiler/scala/tools/nsc/interpreter/CommandLine.scala b/src/compiler/scala/tools/nsc/interpreter/CommandLine.scala index 8042f0aee2..0b4ad1a29d 100644 --- a/src/compiler/scala/tools/nsc/interpreter/CommandLine.scala +++ b/src/compiler/scala/tools/nsc/interpreter/CommandLine.scala @@ -10,5 +10,5 @@ package interpreter */ class CommandLine(arguments: List[String], error: String => Unit) extends CompilerCommand(arguments, error) { override def cmdName = "scala" - override lazy val fileEndings = List(".scalaint") + // override lazy val fileEndings = List(".scalaint") } diff --git a/src/compiler/scala/tools/nsc/interpreter/Completion.scala b/src/compiler/scala/tools/nsc/interpreter/Completion.scala index 1dfccbfbf7..bf53fbb04b 100644 --- a/src/compiler/scala/tools/nsc/interpreter/Completion.scala +++ b/src/compiler/scala/tools/nsc/interpreter/Completion.scala @@ -23,7 +23,7 @@ object NoCompletion extends Completion { } object Completion { - def empty: Completion = NoCompletion + // def empty: Completion = NoCompletion case class Candidates(cursor: Int, candidates: List[String]) { } val NoCandidates = Candidates(-1, Nil) diff --git a/src/compiler/scala/tools/nsc/interpreter/CompletionAware.scala b/src/compiler/scala/tools/nsc/interpreter/CompletionAware.scala index 3a0b48ef57..5b8a1791e4 100644 --- a/src/compiler/scala/tools/nsc/interpreter/CompletionAware.scala +++ b/src/compiler/scala/tools/nsc/interpreter/CompletionAware.scala @@ -52,30 +52,30 @@ trait CompletionAware { } } -object CompletionAware { - val Empty = new CompletionAware { def completions(verbosity: Int) = Nil } +// object CompletionAware { +// val Empty = new CompletionAware { def completions(verbosity: Int) = Nil } - def unapply(that: Any): Option[CompletionAware] = that match { - case x: CompletionAware => Some((x)) - case _ => None - } +// def unapply(that: Any): Option[CompletionAware] = that match { +// case x: CompletionAware => Some((x)) +// case _ => None +// } - /** Create a CompletionAware object from the given functions. - * The first should generate the list of completions whenever queried, - * and the second should return Some(CompletionAware) object if - * subcompletions are possible. - */ - def apply(terms: () => List[String], followFunction: String => Option[CompletionAware]): CompletionAware = - new CompletionAware { - def completions = terms() - def completions(verbosity: Int) = completions - override def follow(id: String) = followFunction(id) - } +// /** Create a CompletionAware object from the given functions. +// * The first should generate the list of completions whenever queried, +// * and the second should return Some(CompletionAware) object if +// * subcompletions are possible. +// */ +// def apply(terms: () => List[String], followFunction: String => Option[CompletionAware]): CompletionAware = +// new CompletionAware { +// def completions = terms() +// def completions(verbosity: Int) = completions +// override def follow(id: String) = followFunction(id) +// } - /** Convenience factories. - */ - def apply(terms: () => List[String]): CompletionAware = apply(terms, _ => None) - def apply(map: scala.collection.Map[String, CompletionAware]): CompletionAware = - apply(() => map.keys.toList, map.get _) -} +// /** Convenience factories. +// */ +// def apply(terms: () => List[String]): CompletionAware = apply(terms, _ => None) +// def apply(map: scala.collection.Map[String, CompletionAware]): CompletionAware = +// apply(() => map.keys.toList, map.get _) +// } diff --git a/src/compiler/scala/tools/nsc/interpreter/CompletionOutput.scala b/src/compiler/scala/tools/nsc/interpreter/CompletionOutput.scala index 13880bb8af..cddd7c930c 100644 --- a/src/compiler/scala/tools/nsc/interpreter/CompletionOutput.scala +++ b/src/compiler/scala/tools/nsc/interpreter/CompletionOutput.scala @@ -38,7 +38,7 @@ trait CompletionOutput { def relativize(str: String): String = quietString(str stripPrefix (pkg + ".")) def relativize(tp: Type): String = relativize(tp.normalize.toString) - def relativize(sym: Symbol): String = relativize(sym.info) + // def relativize(sym: Symbol): String = relativize(sym.info) def braceList(tparams: List[String]) = if (tparams.isEmpty) "" else (tparams map relativize).mkString("[", ", ", "]") def parenList(params: List[Any]) = params.mkString("(", ", ", ")") diff --git a/src/compiler/scala/tools/nsc/interpreter/ConsoleReaderHelper.scala b/src/compiler/scala/tools/nsc/interpreter/ConsoleReaderHelper.scala index b5850d901c..0e3a2328a2 100644 --- a/src/compiler/scala/tools/nsc/interpreter/ConsoleReaderHelper.scala +++ b/src/compiler/scala/tools/nsc/interpreter/ConsoleReaderHelper.scala @@ -9,15 +9,15 @@ package interpreter import scala.tools.jline.console.{ ConsoleReader, CursorBuffer } trait ConsoleReaderHelper extends ConsoleReader { - def currentLine = "" + getCursorBuffer.buffer - def currentPos = getCursorBuffer.cursor + // def currentLine = "" + getCursorBuffer.buffer + // def currentPos = getCursorBuffer.cursor def terminal = getTerminal() def width = terminal.getWidth() def height = terminal.getHeight() - def paginate = isPaginationEnabled() - def paginate_=(value: Boolean) = setPaginationEnabled(value) + // def paginate = isPaginationEnabled() + // def paginate_=(value: Boolean) = setPaginationEnabled(value) - def goBack(num: Int): Unit + // def goBack(num: Int): Unit def readOneKey(prompt: String): Int def eraseLine(): Unit diff --git a/src/compiler/scala/tools/nsc/interpreter/Delimited.scala b/src/compiler/scala/tools/nsc/interpreter/Delimited.scala index 80debfacb9..f09c9a9557 100644 --- a/src/compiler/scala/tools/nsc/interpreter/Delimited.scala +++ b/src/compiler/scala/tools/nsc/interpreter/Delimited.scala @@ -26,7 +26,7 @@ trait Delimited { def delimited: Char => Boolean def escapeChars: List[Char] = List('\\') - def quoteChars: List[(Char, Char)] = List(('\'', '\''), ('"', '"')) + // def quoteChars: List[(Char, Char)] = List(('\'', '\''), ('"', '"')) /** Break String into args based on delimiting function. */ @@ -39,6 +39,6 @@ trait Delimited { def isDelimiterChar(ch: Char) = delimited(ch) def isEscapeChar(ch: Char): Boolean = escapeChars contains ch - def isQuoteStart(ch: Char): Boolean = quoteChars map (_._1) contains ch - def isQuoteEnd(ch: Char): Boolean = quoteChars map (_._2) contains ch + // def isQuoteStart(ch: Char): Boolean = quoteChars map (_._1) contains ch + // def isQuoteEnd(ch: Char): Boolean = quoteChars map (_._2) contains ch } diff --git a/src/compiler/scala/tools/nsc/interpreter/ExprTyper.scala b/src/compiler/scala/tools/nsc/interpreter/ExprTyper.scala index ebd0030802..fbd0d15962 100644 --- a/src/compiler/scala/tools/nsc/interpreter/ExprTyper.scala +++ b/src/compiler/scala/tools/nsc/interpreter/ExprTyper.scala @@ -17,7 +17,8 @@ trait ExprTyper { import syntaxAnalyzer.UnitParser import naming.freshInternalVarName - object codeParser extends { val global: repl.global.type = repl.global } with CodeHandlers[Tree] { + object codeParser { + val global: repl.global.type = repl.global def applyRule[T](code: String, rule: UnitParser => T): T = { reporter.reset() val scanner = newUnitParser(code) @@ -29,10 +30,10 @@ trait ExprTyper { result } - def defns(code: String) = stmts(code) collect { case x: DefTree => x } - def expr(code: String) = applyRule(code, _.expr()) + // def defns(code: String) = stmts(code) collect { case x: DefTree => x } + // def expr(code: String) = applyRule(code, _.expr()) def stmts(code: String) = applyRule(code, _.templateStats()) - def stmt(code: String) = stmts(code).last // guaranteed nonempty + // def stmt(code: String) = stmts(code).last // guaranteed nonempty } /** Parse a line into a sequence of trees. Returns None if the input is incomplete. */ diff --git a/src/compiler/scala/tools/nsc/interpreter/ILoop.scala b/src/compiler/scala/tools/nsc/interpreter/ILoop.scala index 74549ef558..0e3dc3147b 100644 --- a/src/compiler/scala/tools/nsc/interpreter/ILoop.scala +++ b/src/compiler/scala/tools/nsc/interpreter/ILoop.scala @@ -14,7 +14,7 @@ import scala.util.Properties.{ jdkHome, javaVersion, versionString, javaVmName } import scala.tools.util.{ Javap } import util.{ ClassPath, Exceptional, stringFromWriter, stringFromStream } import io.{ File, Directory } -import scala.reflect.NameTransformer._ +// import scala.reflect.NameTransformer._ import util.ScalaClassLoader import ScalaClassLoader._ import scala.tools.util._ @@ -71,12 +71,12 @@ class ILoop(in0: Option[BufferedReader], protected val out: JPrintWriter) intp.reporter printUntruncatedMessage msg } - def isAsync = !settings.Yreplsync.value + // def isAsync = !settings.Yreplsync.value lazy val power = new Power(intp, new StdReplVals(this))(tagOfStdReplVals, classTag[StdReplVals]) def history = in.history /** The context class loader at the time this object was created */ - protected val originalClassLoader = Thread.currentThread.getContextClassLoader + // protected val originalClassLoader = Thread.currentThread.getContextClassLoader // classpath entries added via :cp var addedClasspath: String = "" @@ -131,20 +131,19 @@ class ILoop(in0: Option[BufferedReader], protected val out: JPrintWriter) def helpCommand(line: String): Result = { if (line == "") helpSummary() else uniqueCommand(line) match { - case Some(lc) => echo("\n" + lc.longHelp) + case Some(lc) => echo("\n" + lc.help) case _ => ambiguousError(line) } } private def helpSummary() = { val usageWidth = commands map (_.usageMsg.length) max - val formatStr = "%-" + usageWidth + "s %s %s" + val formatStr = "%-" + usageWidth + "s %s" echo("All commands can be abbreviated, e.g. :he instead of :help.") - echo("Those marked with a * have more detailed help, e.g. :help imports.\n") + // echo("Those marked with a * have more detailed help, e.g. :help imports.\n") commands foreach { cmd => - val star = if (cmd.hasLongHelp) "*" else " " - echo(formatStr.format(cmd.usageMsg, star, cmd.help)) + echo(formatStr.format(cmd.usageMsg, cmd.help)) } } private def ambiguousError(cmd: String): Result = { @@ -194,10 +193,10 @@ class ILoop(in0: Option[BufferedReader], protected val out: JPrintWriter) out println msg out.flush() } - protected def echoNoNL(msg: String) = { - out print msg - out.flush() - } + // protected def echoNoNL(msg: String) = { + // out print msg + // out.flush() + // } /** Search the history */ def searchHistory(_cmdline: String) { @@ -208,8 +207,8 @@ class ILoop(in0: Option[BufferedReader], protected val out: JPrintWriter) echo("%d %s".format(index + offset, line)) } - private var currentPrompt = Properties.shellPromptString - def setPrompt(prompt: String) = currentPrompt = prompt + private val currentPrompt = Properties.shellPromptString + // def setPrompt(prompt: String) = currentPrompt = prompt /** Prompt to print when awaiting input */ def prompt = currentPrompt @@ -684,7 +683,7 @@ class ILoop(in0: Option[BufferedReader], protected val out: JPrintWriter) def process(settings: Settings): Boolean = savingContextLoader { this.settings = settings createInterpreter() - var thunks: List[() => Unit] = Nil + // var thunks: List[() => Unit] = Nil // sets in to some kind of reader depending on environmental cues in = in0.fold(chooseReader(settings))(r => SimpleReader(r, out, true)) @@ -704,21 +703,21 @@ class ILoop(in0: Option[BufferedReader], protected val out: JPrintWriter) } /** process command-line arguments and do as they request */ - def process(args: Array[String]): Boolean = { - val command = new CommandLine(args.toList, echo) - def neededHelp(): String = - (if (command.settings.help.value) command.usageMsg + "\n" else "") + - (if (command.settings.Xhelp.value) command.xusageMsg + "\n" else "") - - // if they asked for no help and command is valid, we call the real main - neededHelp() match { - case "" => command.ok && process(command.settings) - case help => echoNoNL(help) ; true - } - } + // def process(args: Array[String]): Boolean = { + // val command = new CommandLine(args.toList, echo) + // def neededHelp(): String = + // (if (command.settings.help.value) command.usageMsg + "\n" else "") + + // (if (command.settings.Xhelp.value) command.xusageMsg + "\n" else "") + + // // if they asked for no help and command is valid, we call the real main + // neededHelp() match { + // case "" => command.ok && process(command.settings) + // case help => echoNoNL(help) ; true + // } + // } @deprecated("Use `process` instead", "2.9.0") - def main(settings: Settings): Unit = process(settings) + def main(settings: Settings): Unit = process(settings) //used by sbt } object ILoop { diff --git a/src/compiler/scala/tools/nsc/interpreter/IMain.scala b/src/compiler/scala/tools/nsc/interpreter/IMain.scala index 0ef27ac96a..8ba6573e64 100644 --- a/src/compiler/scala/tools/nsc/interpreter/IMain.scala +++ b/src/compiler/scala/tools/nsc/interpreter/IMain.scala @@ -63,6 +63,7 @@ class IMain(initialSettings: Settings, protected val out: JPrintWriter) extends @deprecated("Use replOutput.dir instead", "2.11.0") def virtualDirectory = replOutput.dir + // Used in a test case. def showDirectory() = replOutput.show(out) private[nsc] var printResults = true // whether to print result lines @@ -82,20 +83,20 @@ class IMain(initialSettings: Settings, protected val out: JPrintWriter) extends private var _classLoader: AbstractFileClassLoader = null // active classloader private val _compiler: Global = newCompiler(settings, reporter) // our private compiler - private val nextReqId = { - var counter = 0 - () => { counter += 1 ; counter } - } + // private val nextReqId = { + // var counter = 0 + // () => { counter += 1 ; counter } + // } def compilerClasspath: Seq[URL] = ( if (isInitializeComplete) global.classPath.asURLs else new PathResolver(settings).result.asURLs // the compiler's classpath ) def settings = initialSettings - def mostRecentLine = prevRequestList match { - case Nil => "" - case req :: _ => req.originalLine - } + // def mostRecentLine = prevRequestList match { + // case Nil => "" + // case req :: _ => req.originalLine + // } // Run the code body with the given boolean settings flipped to true. def withoutWarnings[T](body: => T): T = beQuietDuring { val saved = settings.nowarn.value @@ -110,12 +111,12 @@ class IMain(initialSettings: Settings, protected val out: JPrintWriter) extends def this(settings: Settings) = this(settings, new NewLinePrintWriter(new ConsoleWriter, true)) def this() = this(new Settings()) - lazy val repllog: Logger = new Logger { - val out: JPrintWriter = imain.out - val isInfo: Boolean = BooleanProp keyExists "scala.repl.info" - val isDebug: Boolean = BooleanProp keyExists "scala.repl.debug" - val isTrace: Boolean = BooleanProp keyExists "scala.repl.trace" - } + // lazy val repllog: Logger = new Logger { + // val out: JPrintWriter = imain.out + // val isInfo: Boolean = BooleanProp keyExists "scala.repl.info" + // val isDebug: Boolean = BooleanProp keyExists "scala.repl.debug" + // val isTrace: Boolean = BooleanProp keyExists "scala.repl.trace" + // } lazy val formatting: Formatting = new Formatting { val prompt = Properties.shellPromptString } @@ -175,7 +176,7 @@ class IMain(initialSettings: Settings, protected val out: JPrintWriter) extends } import global._ - import definitions.{ ObjectClass, termMember, typeMember, dropNullaryMethod} + import definitions.{ ObjectClass, termMember, dropNullaryMethod} lazy val runtimeMirror = ru.runtimeMirror(classLoader) @@ -189,13 +190,13 @@ class IMain(initialSettings: Settings, protected val out: JPrintWriter) extends noFatal(runtimeMirror staticModule path) orElse noFatal(rootMirror staticModule path) ) - def getPathIfDefined(path: String) = ( - if (path endsWith "$") getModuleIfDefined(path.init) - else getClassIfDefined(path) - ) + // def getPathIfDefined(path: String) = ( + // if (path endsWith "$") getModuleIfDefined(path.init) + // else getClassIfDefined(path) + // ) implicit class ReplTypeOps(tp: Type) { - def orElse(other: => Type): Type = if (tp ne NoType) tp else other + // def orElse(other: => Type): Type = if (tp ne NoType) tp else other def andAlso(fn: Type => Type): Type = if (tp eq NoType) tp else fn(tp) } @@ -211,7 +212,7 @@ class IMain(initialSettings: Settings, protected val out: JPrintWriter) extends if (replScope containsName name) freshUserTermName() else name } - def isUserTermName(name: Name) = isUserVarName("" + name) + // def isUserTermName(name: Name) = isUserVarName("" + name) def isInternalTermName(name: Name) = isInternalVarName("" + name) } import naming._ @@ -310,11 +311,11 @@ class IMain(initialSettings: Settings, protected val out: JPrintWriter) extends abstract class PhaseDependentOps { def shift[T](op: => T): T - def lookup(name: Name): Symbol = shift(replScope lookup name) + // def lookup(name: Name): Symbol = shift(replScope lookup name) def path(name: => Name): String = shift(path(symbolOfName(name))) def path(sym: Symbol): String = backticked(shift(sym.fullName)) - def name(sym: Symbol): Name = shift(sym.name) - def info(sym: Symbol): Type = shift(sym.info) + // def name(sym: Symbol): Name = shift(sym.name) + // def info(sym: Symbol): Type = shift(sym.info) def sig(sym: Symbol): String = shift(sym.defString) } object typerOp extends PhaseDependentOps { @@ -354,7 +355,7 @@ class IMain(initialSettings: Settings, protected val out: JPrintWriter) extends case p => new URLClassLoader(compilerClasspath, p) }) - def getInterpreterClassLoader() = classLoader + // def getInterpreterClassLoader() = classLoader // Set the current Java "context" class loader to this interpreter's class loader def setContextClassLoader() = classLoader.setAsContext() @@ -405,7 +406,7 @@ class IMain(initialSettings: Settings, protected val out: JPrintWriter) extends exitingTyper { req.defines filterNot (s => req.defines contains s.companionSymbol) foreach { newSym => val companion = newSym.name.companionName - val found = replScope lookup companion + // val found = replScope lookup companion replScope lookup companion andAlso { oldSym => replwarn(s"warning: previously defined $oldSym is not a companion to $newSym.") replwarn("Companions must be defined together; you may wish to use :paste mode for this.") @@ -423,18 +424,18 @@ class IMain(initialSettings: Settings, protected val out: JPrintWriter) extends printMessage(msg) } - def isParseable(line: String): Boolean = { - beSilentDuring { - try parse(line) match { - case Some(xs) => xs.nonEmpty // parses as-is - case None => true // incomplete - } - catch { case x: Exception => // crashed the compiler - replwarn("Exception in isParseable(\"" + line + "\"): " + x) - false - } - } - } + // def isParseable(line: String): Boolean = { + // beSilentDuring { + // try parse(line) match { + // case Some(xs) => xs.nonEmpty // parses as-is + // case None => true // incomplete + // } + // catch { case x: Exception => // crashed the compiler + // replwarn("Exception in isParseable(\"" + line + "\"): " + x) + // false + // } + // } + // } def compileSourcesKeepingRun(sources: SourceFile*) = { val run = new Run() @@ -653,17 +654,17 @@ class IMain(initialSettings: Settings, protected val out: JPrintWriter) extends quietRun("val %s = %s".format(tempName, name)) quietRun("val %s = %s.asInstanceOf[%s]".format(name, tempName, newType)) } - def quietImport(ids: String*): IR.Result = beQuietDuring(addImports(ids: _*)) - def addImports(ids: String*): IR.Result = - if (ids.isEmpty) IR.Success - else interpret("import " + ids.mkString(", ")) + // def quietImport(ids: String*): IR.Result = beQuietDuring(addImports(ids: _*)) + // def addImports(ids: String*): IR.Result = + // if (ids.isEmpty) IR.Success + // else interpret("import " + ids.mkString(", ")) def quietBind(p: NamedParam): IR.Result = beQuietDuring(bind(p)) def bind(p: NamedParam): IR.Result = bind(p.name, p.tpe, p.value) def bind[T: ru.TypeTag : ClassTag](name: String, value: T): IR.Result = bind((name, value)) - def bindSyntheticValue(x: Any): IR.Result = bindValue(freshInternalVarName(), x) - def bindValue(x: Any): IR.Result = bindValue(freshUserVarName(), x) - def bindValue(name: String, x: Any): IR.Result = bind(name, TypeStrings.fromValue(x), x) + // def bindSyntheticValue(x: Any): IR.Result = bindValue(freshInternalVarName(), x) + // def bindValue(x: Any): IR.Result = bindValue(freshUserVarName(), x) + // def bindValue(name: String, x: Any): IR.Result = bind(name, TypeStrings.fromValue(x), x) /** Reset this interpreter, forgetting all user-specified requests. */ def reset() { @@ -719,7 +720,7 @@ class IMain(initialSettings: Settings, protected val out: JPrintWriter) extends def readPath = pathTo(readName) def evalPath = pathTo(evalName) - def printPath = pathTo(printName) + // def printPath = pathTo(printName) def call(name: String, args: Any*): AnyRef = { val m = evalMethod(name) @@ -734,9 +735,9 @@ class IMain(initialSettings: Settings, protected val out: JPrintWriter) extends try Right(call(name, args: _*)) catch { case ex: Throwable => Left(ex) } - def callOpt(name: String, args: Any*): Option[AnyRef] = - try Some(call(name, args: _*)) - catch { case ex: Throwable => bindError(ex) ; None } + // def callOpt(name: String, args: Any*): Option[AnyRef] = + // try Some(call(name, args: _*)) + // catch { case ex: Throwable => bindError(ex) ; None } class EvalException(msg: String, cause: Throwable) extends RuntimeException(msg, cause) { } @@ -749,7 +750,7 @@ class IMain(initialSettings: Settings, protected val out: JPrintWriter) extends } lazy val evalClass = load(evalPath) - lazy val evalValue = callOpt(resultName) + // lazy val evalValue = callOpt(resultName) def compile(source: String): Boolean = compileAndSaveRun("", source) @@ -802,10 +803,10 @@ class IMain(initialSettings: Settings, protected val out: JPrintWriter) extends class Request(val line: String, val trees: List[Tree]) { def defines = defHandlers flatMap (_.definedSymbols) def imports = importedSymbols - def references = referencedNames map symbolOfName + // def references = referencedNames map symbolOfName def value = Some(handlers.last) filter (h => h.definesValue) map (h => definedSymbols(h.definesTerm.get)) getOrElse NoSymbol - val reqId = nextReqId() + // val reqId = nextReqId() val lineRep = new ReadEvalPrint() private var _originalLine: String = null @@ -817,7 +818,7 @@ class IMain(initialSettings: Settings, protected val out: JPrintWriter) extends def defHandlers = handlers collect { case x: MemberDefHandler => x } /** all (public) names defined by these statements */ - val definedNames = handlers flatMap (_.definedNames) + // val definedNames = handlers flatMap (_.definedNames) /** list of names used by this expression */ val referencedNames: List[Name] = handlers flatMap (_.referencedNames) @@ -837,7 +838,7 @@ class IMain(initialSettings: Settings, protected val out: JPrintWriter) extends exitingTyper(importsCode(referencedNames.toSet)) /** The unmangled symbol name, but supplemented with line info. */ - def disambiguated(name: Name): String = name + " (in " + lineRep + ")" + // def disambiguated(name: Name): String = name + " (in " + lineRep + ")" /** the line of code to compute */ def toCompute = line @@ -931,7 +932,7 @@ class IMain(initialSettings: Settings, protected val out: JPrintWriter) extends /* typeOf lookup with encoding */ def lookupTypeOf(name: Name) = typeOf.getOrElse(name, typeOf(global.encode(name.toString))) - def simpleNameOfType(name: TypeName) = (compilerTypeOf get name) map (_.typeSymbolDirect.simpleName) + // def simpleNameOfType(name: TypeName) = (compilerTypeOf get name) map (_.typeSymbolDirect.simpleName) private def typeMap[T](f: Type => T) = mapFrom[Name, Name, T](termNames ++ typeNames)(x => f(cleanMemberDecl(resultSymbol, x))) @@ -1011,8 +1012,8 @@ class IMain(initialSettings: Settings, protected val out: JPrintWriter) extends */ def tryTwice(op: => Symbol): Symbol = exitingTyper(op) orElse exitingFlatten(op) - def signatureOf(sym: Symbol) = typerOp sig sym - def symbolOfPath(path: String): Symbol = exitingTyper(getPathIfDefined(path)) + // def signatureOf(sym: Symbol) = typerOp sig sym + // def symbolOfPath(path: String): Symbol = exitingTyper(getPathIfDefined(path)) def symbolOfIdent(id: String): Symbol = symbolOfTerm(id) orElse symbolOfType(id) def symbolOfType(id: String): Symbol = tryTwice(replScope lookup (id: TypeName)) def symbolOfTerm(id: String): Symbol = tryTwice(replScope lookup (id: TermName)) @@ -1067,7 +1068,7 @@ class IMain(initialSettings: Settings, protected val out: JPrintWriter) extends def definedTerms = onlyTerms(allDefinedNames) filterNot isInternalTermName def definedTypes = onlyTypes(allDefinedNames) - def definedSymbols = prevRequestList flatMap (_.defines) toSet + // def definedSymbols = prevRequestList flatMap (_.defines) toSet def definedSymbolList = prevRequestList flatMap (_.defines) filterNot (s => isInternalTermName(s.name)) // Terms with user-given names (i.e. not res0 and not synthetic) @@ -1075,67 +1076,67 @@ class IMain(initialSettings: Settings, protected val out: JPrintWriter) extends /** Translate a repl-defined identifier into a Symbol. */ - def apply(name: String): Symbol = types(name) orElse terms(name) - def types(name: String): Symbol = replScope lookup (name: TypeName) orElse getClassIfDefined(name) - def terms(name: String): Symbol = replScope lookup (name: TermName) orElse getModuleIfDefined(name) - - def types[T: global.TypeTag] : Symbol = typeOf[T].typeSymbol - def terms[T: global.TypeTag] : Symbol = typeOf[T].termSymbol - def apply[T: global.TypeTag] : Symbol = typeOf[T].typeSymbol - - lazy val DummyInfoSymbol = NoSymbol.newValue("replScopeDummy") - private lazy val DummyInfo = TypeRef(NoPrefix, DummyInfoSymbol, Nil) - private def enterDummySymbol(name: Name) = name match { - case x: TermName => replScope enter (NoSymbol.newValue(x) setInfo DummyInfo) - case x: TypeName => replScope enter (NoSymbol.newClass(x) setInfo DummyInfo) - } + // def apply(name: String): Symbol = types(name) orElse terms(name) + // def types(name: String): Symbol = replScope lookup (name: TypeName) orElse getClassIfDefined(name) + // def terms(name: String): Symbol = replScope lookup (name: TermName) orElse getModuleIfDefined(name) + + // def types[T: global.TypeTag] : Symbol = typeOf[T].typeSymbol + // def terms[T: global.TypeTag] : Symbol = typeOf[T].termSymbol + // def apply[T: global.TypeTag] : Symbol = typeOf[T].typeSymbol + + // lazy val DummyInfoSymbol = NoSymbol.newValue("replScopeDummy") + // private lazy val DummyInfo = TypeRef(NoPrefix, DummyInfoSymbol, Nil) + // private def enterDummySymbol(name: Name) = name match { + // case x: TermName => replScope enter (NoSymbol.newValue(x) setInfo DummyInfo) + // case x: TypeName => replScope enter (NoSymbol.newClass(x) setInfo DummyInfo) + // } private var _replScope: Scope = _ private def resetReplScope() { _replScope = newScope } - def initReplScope() { - languageWildcardSyms foreach { clazz => - importableMembers(clazz) foreach { sym => - updateReplScope(sym, isDefined = false) - } - } - } + // def initReplScope() { + // languageWildcardSyms foreach { clazz => + // importableMembers(clazz) foreach { sym => + // updateReplScope(sym, isDefined = false) + // } + // } + // } def replScope = { if (_replScope eq null) _replScope = newScope _replScope } - def lookupAll(name: String) = (replScope.lookupAll(name: TermName) ++ replScope.lookupAll(name: TypeName)).toList - def unlinkAll(name: String) = { - val syms = lookupAll(name) - syms foreach { sym => - replScope unlink sym - } - enterDummySymbol(name: TermName) - enterDummySymbol(name: TypeName) - syms - } - def isUnlinked(name: Name) = { - symbolOfName(name) match { - case NoSymbol => false - case sym => sym.info.typeSymbolDirect == DummyInfoSymbol - } - } + // def lookupAll(name: String) = (replScope.lookupAll(name: TermName) ++ replScope.lookupAll(name: TypeName)).toList + // def unlinkAll(name: String) = { + // val syms = lookupAll(name) + // syms foreach { sym => + // replScope unlink sym + // } + // enterDummySymbol(name: TermName) + // enterDummySymbol(name: TypeName) + // syms + // } + // def isUnlinked(name: Name) = { + // symbolOfName(name) match { + // case NoSymbol => false + // case sym => sym.info.typeSymbolDirect == DummyInfoSymbol + // } + // } private var executingRequest: Request = _ private val prevRequests = mutable.ListBuffer[Request]() private val directlyBoundNames = mutable.Set[Name]() def allHandlers = prevRequestList flatMap (_.handlers) - def allDefHandlers = allHandlers collect { case x: MemberDefHandler => x } - def allDefSymbols = allDefHandlers map (_.symbol) filter (_ ne NoSymbol) + // def allDefHandlers = allHandlers collect { case x: MemberDefHandler => x } + // def allDefSymbols = allDefHandlers map (_.symbol) filter (_ ne NoSymbol) def lastRequest = if (prevRequests.isEmpty) null else prevRequests.last def prevRequestList = prevRequests.toList - def allSeenTypes = prevRequestList flatMap (_.typeOf.values.toList) distinct - def allImplicits = allHandlers filter (_.definesImplicit) flatMap (_.definedNames) + // def allSeenTypes = prevRequestList flatMap (_.typeOf.values.toList) distinct + // def allImplicits = allHandlers filter (_.definesImplicit) flatMap (_.definedNames) def importHandlers = allHandlers collect { case x: ImportHandler => x } def withoutUnwrapping(op: => Unit): Unit = { diff --git a/src/compiler/scala/tools/nsc/interpreter/ISettings.scala b/src/compiler/scala/tools/nsc/interpreter/ISettings.scala index 302ba7a8ac..d45146383b 100644 --- a/src/compiler/scala/tools/nsc/interpreter/ISettings.scala +++ b/src/compiler/scala/tools/nsc/interpreter/ISettings.scala @@ -12,12 +12,12 @@ package interpreter * @author Lex Spoon, 2007/3/24 **/ class ISettings(intp: IMain) { - /** A list of paths where :load should look */ - var loadPath = List(".") + // /** A list of paths where :load should look */ + // var loadPath = List(".") - /** Set this to true to see repl machinery under -Yrich-exceptions. - */ - var showInternalStackTraces = false + // /** Set this to true to see repl machinery under -Yrich-exceptions. + // */ + // var showInternalStackTraces = false /** The maximum length of toString to use when printing the result * of an evaluation. 0 means no maximum. If a printout requires diff --git a/src/compiler/scala/tools/nsc/interpreter/Imports.scala b/src/compiler/scala/tools/nsc/interpreter/Imports.scala index c5048ebfd8..978e220fab 100644 --- a/src/compiler/scala/tools/nsc/interpreter/Imports.scala +++ b/src/compiler/scala/tools/nsc/interpreter/Imports.scala @@ -15,8 +15,8 @@ trait Imports { import definitions.{ ObjectClass, ScalaPackage, JavaLangPackage, PredefModule } import memberHandlers._ - def isNoImports = settings.noimports.value - def isNoPredef = settings.nopredef.value + // def isNoImports = settings.noimports.value + // def isNoPredef = settings.nopredef.value /** Synthetic import handlers for the language defined imports. */ private def makeWildcardImportHandler(sym: Symbol): ImportHandler = { @@ -31,12 +31,12 @@ trait Imports { /** Symbols whose contents are language-defined to be imported. */ def languageWildcardSyms: List[Symbol] = List(JavaLangPackage, ScalaPackage, PredefModule) - def languageWildcards: List[Type] = languageWildcardSyms map (_.tpe) + // def languageWildcards: List[Type] = languageWildcardSyms map (_.tpe) def languageWildcardHandlers = languageWildcardSyms map makeWildcardImportHandler def allImportedNames = importHandlers flatMap (_.importedNames) - def importedTerms = onlyTerms(allImportedNames) - def importedTypes = onlyTypes(allImportedNames) + // def importedTerms = onlyTerms(allImportedNames) + // def importedTypes = onlyTypes(allImportedNames) /** Types which have been wildcard imported, such as: * val x = "abc" ; import x._ // type java.lang.String @@ -52,17 +52,17 @@ trait Imports { def sessionWildcards: List[Type] = { importHandlers filter (_.importsWildcard) map (_.targetType) distinct } - def wildcardTypes = languageWildcards ++ sessionWildcards + // def wildcardTypes = languageWildcards ++ sessionWildcards def languageSymbols = languageWildcardSyms flatMap membersAtPickler def sessionImportedSymbols = importHandlers flatMap (_.importedSymbols) def importedSymbols = languageSymbols ++ sessionImportedSymbols def importedTermSymbols = importedSymbols collect { case x: TermSymbol => x } - def importedTypeSymbols = importedSymbols collect { case x: TypeSymbol => x } - def implicitSymbols = importedSymbols filter (_.isImplicit) + // def importedTypeSymbols = importedSymbols collect { case x: TypeSymbol => x } + // def implicitSymbols = importedSymbols filter (_.isImplicit) - def importedTermNamed(name: String): Symbol = - importedTermSymbols find (_.name.toString == name) getOrElse NoSymbol + // def importedTermNamed(name: String): Symbol = + // importedTermSymbols find (_.name.toString == name) getOrElse NoSymbol /** Tuples of (source, imported symbols) in the order they were imported. */ @@ -105,7 +105,7 @@ trait Imports { */ case class ComputedImports(prepend: String, append: String, access: String) protected def importsCode(wanted0: Set[Name]): ComputedImports = { - val wanted = wanted0 filterNot isUnlinked + val wanted = wanted0 // filterNot isUnlinked /** Narrow down the list of requests from which imports * should be taken. Removes requests which cannot contribute diff --git a/src/compiler/scala/tools/nsc/interpreter/InteractiveReader.scala b/src/compiler/scala/tools/nsc/interpreter/InteractiveReader.scala index 6513381d77..a5f491fe6f 100644 --- a/src/compiler/scala/tools/nsc/interpreter/InteractiveReader.scala +++ b/src/compiler/scala/tools/nsc/interpreter/InteractiveReader.scala @@ -15,22 +15,22 @@ import Properties.isMac trait InteractiveReader { val interactive: Boolean - def init(): Unit + // def init(): Unit def reset(): Unit def history: History def completion: Completion - def eraseLine(): Unit + // def eraseLine(): Unit def redrawLine(): Unit - def currentLine: String + // def currentLine: String def readYesOrNo(prompt: String, alt: => Boolean): Boolean = readOneKey(prompt) match { case 'y' => true case 'n' => false case _ => alt } - def readAssumingNo(prompt: String) = readYesOrNo(prompt, false) - def readAssumingYes(prompt: String) = readYesOrNo(prompt, true) + // def readAssumingNo(prompt: String) = readYesOrNo(prompt, false) + // def readAssumingYes(prompt: String) = readYesOrNo(prompt, true) protected def readOneLine(prompt: String): String protected def readOneKey(prompt: String): Int @@ -50,6 +50,6 @@ object InteractiveReader { def apply(): InteractiveReader = SimpleReader() @deprecated("Use `apply` instead.", "2.9.0") - def createDefault(): InteractiveReader = apply() + def createDefault(): InteractiveReader = apply() // used by sbt } diff --git a/src/compiler/scala/tools/nsc/interpreter/JLineCompletion.scala b/src/compiler/scala/tools/nsc/interpreter/JLineCompletion.scala index 5ee5e5526d..5325c18710 100644 --- a/src/compiler/scala/tools/nsc/interpreter/JLineCompletion.scala +++ b/src/compiler/scala/tools/nsc/interpreter/JLineCompletion.scala @@ -28,9 +28,9 @@ class JLineCompletion(val intp: IMain) extends Completion with CompletionOutput if (isModule) getModuleIfDefined(name) else getModuleIfDefined(name) ) - def getType(name: String, isModule: Boolean) = getSymbol(name, isModule).tpe - def typeOf(name: String) = getType(name, false) - def moduleOf(name: String) = getType(name, true) + // def getType(name: String, isModule: Boolean) = getSymbol(name, isModule).tpe + // def typeOf(name: String) = getType(name, false) + // def moduleOf(name: String) = getType(name, true) trait CompilerCompletion { def tp: Type @@ -47,7 +47,7 @@ class JLineCompletion(val intp: IMain) extends Completion with CompletionOutput def tos(sym: Symbol): String = sym.decodedName def memberNamed(s: String) = exitingTyper(effectiveTp member newTermName(s)) - def hasMethod(s: String) = memberNamed(s).isMethod + // def hasMethod(s: String) = memberNamed(s).isMethod // XXX we'd like to say "filterNot (_.isDeprecated)" but this causes the // compiler to crash for reasons not yet known. @@ -280,8 +280,8 @@ class JLineCompletion(val intp: IMain) extends Completion with CompletionOutput } // generic interface for querying (e.g. interpreter loop, testing) - def completions(buf: String): List[String] = - topLevelFor(Parsed.dotted(buf + ".", buf.length + 1)) + // def completions(buf: String): List[String] = + // topLevelFor(Parsed.dotted(buf + ".", buf.length + 1)) def completer(): ScalaCompleter = new JLineTabCompletion diff --git a/src/compiler/scala/tools/nsc/interpreter/JLineReader.scala b/src/compiler/scala/tools/nsc/interpreter/JLineReader.scala index e033bab03b..7f8beba32b 100644 --- a/src/compiler/scala/tools/nsc/interpreter/JLineReader.scala +++ b/src/compiler/scala/tools/nsc/interpreter/JLineReader.scala @@ -23,7 +23,7 @@ class JLineReader(_completion: => Completion) extends InteractiveReader { private def term = consoleReader.getTerminal() def reset() = term.reset() - def init() = term.init() + // def init() = term.init() def scalaToJline(tc: ScalaCompleter): Completer = new Completer { def complete(_buf: String, cursor: Int, candidates: JList[CharSequence]): Int = { @@ -36,7 +36,7 @@ class JLineReader(_completion: => Completion) extends InteractiveReader { class JLineConsoleReader extends ConsoleReader with ConsoleReaderHelper { // working around protected/trait/java insufficiencies. - def goBack(num: Int): Unit = back(num) + // def goBack(num: Int): Unit = back(num) def readOneKey(prompt: String) = { this.print(prompt) this.flush() @@ -63,9 +63,9 @@ class JLineReader(_completion: => Completion) extends InteractiveReader { } } - def currentLine = consoleReader.getCursorBuffer.buffer.toString + // def currentLine = consoleReader.getCursorBuffer.buffer.toString def redrawLine() = consoleReader.redrawLineAndFlush() - def eraseLine() = consoleReader.eraseLine() + // def eraseLine() = consoleReader.eraseLine() // Alternate implementation, not sure if/when I need this. // def eraseLine() = while (consoleReader.delete()) { } def readOneLine(prompt: String) = consoleReader readLine prompt diff --git a/src/compiler/scala/tools/nsc/interpreter/Logger.scala b/src/compiler/scala/tools/nsc/interpreter/Logger.scala index aeb25fc688..df3e90f0e2 100644 --- a/src/compiler/scala/tools/nsc/interpreter/Logger.scala +++ b/src/compiler/scala/tools/nsc/interpreter/Logger.scala @@ -12,7 +12,7 @@ trait Logger { def isTrace: Boolean def out: JPrintWriter - def info(msg: => Any): Unit = if (isInfo) out println msg - def debug(msg: => Any): Unit = if (isDebug) out println msg - def trace(msg: => Any): Unit = if (isTrace) out println msg + // def info(msg: => Any): Unit = if (isInfo) out println msg + // def debug(msg: => Any): Unit = if (isDebug) out println msg + // def trace(msg: => Any): Unit = if (isTrace) out println msg } diff --git a/src/compiler/scala/tools/nsc/interpreter/LoopCommands.scala b/src/compiler/scala/tools/nsc/interpreter/LoopCommands.scala index 60325ece30..168e611077 100644 --- a/src/compiler/scala/tools/nsc/interpreter/LoopCommands.scala +++ b/src/compiler/scala/tools/nsc/interpreter/LoopCommands.scala @@ -19,13 +19,13 @@ class ProcessResult(val line: String) { val exitCode = builder ! logger def lines = buffer.toList - def show() = lines foreach println + // def show() = lines foreach println override def toString = "`%s` (%d lines, exit %d)".format(line, buffer.size, exitCode) } -object ProcessResult { - implicit def processResultToOutputLines(pr: ProcessResult): List[String] = pr.lines - def apply(line: String): ProcessResult = new ProcessResult(line) -} +// object ProcessResult { +// implicit def processResultToOutputLines(pr: ProcessResult): List[String] = pr.lines +// def apply(line: String): ProcessResult = new ProcessResult(line) +// } trait LoopCommands { protected def out: JPrintWriter @@ -35,14 +35,14 @@ trait LoopCommands { // a single interpreter command abstract class LoopCommand(val name: String, val help: String) extends (String => Result) { - private var _longHelp: String = null - final def defaultHelp = usageMsg + " (no extended help available.)" - def hasLongHelp = _longHelp != null || longHelp != defaultHelp - def withLongHelp(text: String): this.type = { _longHelp = text ; this } - def longHelp = _longHelp match { - case null => defaultHelp - case text => text - } + // private var _longHelp: String = null + // final def defaultHelp = usageMsg + " (no extended help available.)" + // def hasLongHelp = _longHelp != null || longHelp != defaultHelp + // def withLongHelp(text: String): this.type = { _longHelp = text ; this } + // def longHelp = _longHelp match { + // case null => defaultHelp + // case text => text + // } def usage: String = "" def usageMsg: String = ":" + name + ( if (usage == "") "" else " " + usage @@ -55,10 +55,10 @@ trait LoopCommands { Result(true, None) } - def onError(msg: String) = { - out.println("error: " + msg) - showUsage() - } + // def onError(msg: String) = { + // out.println("error: " + msg) + // showUsage() + // } } object LoopCommand { def nullary(name: String, help: String, f: () => Result): LoopCommand = @@ -68,8 +68,8 @@ trait LoopCommands { if (usage == "") new NullaryCmd(name, help, f) else new LineCmd(name, usage, help, f) - def varargs(name: String, usage: String, help: String, f: List[String] => Result): LoopCommand = - new VarArgsCmd(name, usage, help, f) + // def varargs(name: String, usage: String, help: String, f: List[String] => Result): LoopCommand = + // new VarArgsCmd(name, usage, help, f) } class NullaryCmd(name: String, help: String, f: String => Result) extends LoopCommand(name, help) { diff --git a/src/compiler/scala/tools/nsc/interpreter/MemberHandlers.scala b/src/compiler/scala/tools/nsc/interpreter/MemberHandlers.scala index 95482f1e46..6e564f4d17 100644 --- a/src/compiler/scala/tools/nsc/interpreter/MemberHandlers.scala +++ b/src/compiler/scala/tools/nsc/interpreter/MemberHandlers.scala @@ -81,7 +81,7 @@ trait MemberHandlers { def symbol = if (member.symbol eq null) NoSymbol else member.symbol def definesImplicit = false def definesValue = false - def isLegalTopLevel = false + // def isLegalTopLevel = false def definesTerm = Option.empty[TermName] def definesType = Option.empty[TypeName] @@ -152,7 +152,7 @@ trait MemberHandlers { class ModuleHandler(module: ModuleDef) extends MemberDefHandler(module) { override def definesTerm = Some(name) override def definesValue = true - override def isLegalTopLevel = true + // override def isLegalTopLevel = true override def resultExtractionCode(req: Request) = codegenln("defined module ", name) } @@ -161,7 +161,7 @@ trait MemberHandlers { override def definedSymbols = List(symbol, symbol.companionSymbol) filterNot (_ == NoSymbol) override def definesType = Some(name.toTypeName) override def definesTerm = Some(name.toTermName) filter (_ => mods.isCase) - override def isLegalTopLevel = true + // override def isLegalTopLevel = true override def resultExtractionCode(req: Request) = codegenln("defined %s %s".format(keyword, name)) @@ -182,19 +182,19 @@ trait MemberHandlers { case sym => sym.thisType } private def importableTargetMembers = importableMembers(targetType).toList - override def isLegalTopLevel = true - - def createImportForName(name: Name): String = { - selectors foreach { - case sel @ ImportSelector(old, _, `name`, _) => return "import %s.{ %s }".format(expr, sel) - case _ => () - } - "import %s.%s".format(expr, name) - } + // override def isLegalTopLevel = true + + // def createImportForName(name: Name): String = { + // selectors foreach { + // case sel @ ImportSelector(old, _, `name`, _) => return "import %s.{ %s }".format(expr, sel) + // case _ => () + // } + // "import %s.%s".format(expr, name) + // } // TODO: Need to track these specially to honor Predef masking attempts, // because they must be the leading imports in the code generated for each // line. We can use the same machinery as Contexts now, anyway. - def isPredefImport = isReferenceToPredef(expr) + // def isPredefImport = isReferenceToPredef(expr) // wildcard imports, e.g. import foo._ private def selectorWild = selectors filter (_.name == nme.USCOREkw) diff --git a/src/compiler/scala/tools/nsc/interpreter/NamedParam.scala b/src/compiler/scala/tools/nsc/interpreter/NamedParam.scala index eff0ef59c5..c10ba23691 100644 --- a/src/compiler/scala/tools/nsc/interpreter/NamedParam.scala +++ b/src/compiler/scala/tools/nsc/interpreter/NamedParam.scala @@ -14,14 +14,14 @@ import scala.reflect.{ClassTag, classTag} trait NamedParamCreator { protected def freshName: () => String - def apply(name: String, tpe: String, value: Any): NamedParam = NamedParamClass(name, tpe, value) + // def apply(name: String, tpe: String, value: Any): NamedParam = NamedParamClass(name, tpe, value) def apply[T: ru.TypeTag : ClassTag](name: String, x: T): NamedParam = new Typed[T](name, x) def apply[T: ru.TypeTag : ClassTag](x: T): NamedParam = apply(freshName(), x) def clazz(name: String, x: Any): NamedParam = new Untyped(name, x) - def clazz(x: Any): NamedParam = clazz(freshName(), x) + // def clazz(x: Any): NamedParam = clazz(freshName(), x) - implicit def namedValue[T: ru.TypeTag : ClassTag](name: String, x: T): NamedParam = apply(name, x) + // implicit def namedValue[T: ru.TypeTag : ClassTag](name: String, x: T): NamedParam = apply(name, x) implicit def tuple[T: ru.TypeTag : ClassTag](pair: (String, T)): NamedParam = apply(pair._1, pair._2) } diff --git a/src/compiler/scala/tools/nsc/interpreter/Naming.scala b/src/compiler/scala/tools/nsc/interpreter/Naming.scala index 0d03a8669a..57b1205fb3 100644 --- a/src/compiler/scala/tools/nsc/interpreter/Naming.scala +++ b/src/compiler/scala/tools/nsc/interpreter/Naming.scala @@ -78,7 +78,7 @@ trait Naming { private lazy val userVar = new NameCreator(sessionNames.res) // var name, like res0 private lazy val internalVar = new NameCreator(sessionNames.ires) // internal var name, like $ires0 - def isLineName(name: String) = (name startsWith sessionNames.line) && (name stripPrefix sessionNames.line forall (_.isDigit)) + // def isLineName(name: String) = (name startsWith sessionNames.line) && (name stripPrefix sessionNames.line forall (_.isDigit)) def isUserVarName(name: String) = userVar didGenerate name def isInternalVarName(name: String) = internalVar didGenerate name diff --git a/src/compiler/scala/tools/nsc/interpreter/Parsed.scala b/src/compiler/scala/tools/nsc/interpreter/Parsed.scala index 24c01e9ae6..3d72e4b2a4 100644 --- a/src/compiler/scala/tools/nsc/interpreter/Parsed.scala +++ b/src/compiler/scala/tools/nsc/interpreter/Parsed.scala @@ -17,7 +17,7 @@ class Parsed private ( ) extends Delimited { def isEmpty = args.isEmpty def isUnqualified = args.size == 1 - def isQualified = args.size > 1 + // def isQualified = args.size > 1 def isAtStart = cursor <= 0 private var _verbosity = 0 @@ -31,7 +31,7 @@ class Parsed private ( def bufferTail = new Parsed(buffer drop headLength, cursor - headLength, delimited) withVerbosity verbosity def prev = new Parsed(buffer, cursor - 1, delimited) withVerbosity verbosity - def next = new Parsed(buffer, cursor + 1, delimited) withVerbosity verbosity + // def next = new Parsed(buffer, cursor + 1, delimited) withVerbosity verbosity def currentChar = buffer(cursor) def currentArg = args.last def position = @@ -41,8 +41,8 @@ class Parsed private ( def isFirstDelimiter = !isEmpty && isDelimiterChar(buffer.head) def isLastDelimiter = !isEmpty && isDelimiterChar(buffer.last) - def firstIfDelimiter = if (isFirstDelimiter) buffer.head.toString else "" - def lastIfDelimiter = if (isLastDelimiter) buffer.last.toString else "" + // def firstIfDelimiter = if (isFirstDelimiter) buffer.head.toString else "" + // def lastIfDelimiter = if (isLastDelimiter) buffer.last.toString else "" def isQuoted = false // TODO def isEscaped = !isAtStart && isEscapeChar(currentChar) && !isEscapeChar(prev.currentChar) @@ -56,13 +56,13 @@ object Parsed { private def onull(s: String) = if (s == null) "" else s - def apply(s: String): Parsed = apply(onull(s), onull(s).length) + // def apply(s: String): Parsed = apply(onull(s), onull(s).length) def apply(s: String, cursor: Int): Parsed = apply(onull(s), cursor, DefaultDelimiters) def apply(s: String, cursor: Int, delimited: Char => Boolean): Parsed = new Parsed(onull(s), cursor, delimited) - def dotted(s: String): Parsed = dotted(onull(s), onull(s).length) + // def dotted(s: String): Parsed = dotted(onull(s), onull(s).length) def dotted(s: String, cursor: Int): Parsed = new Parsed(onull(s), cursor, _ == '.') - def undelimited(s: String, cursor: Int): Parsed = new Parsed(onull(s), cursor, _ => false) + // def undelimited(s: String, cursor: Int): Parsed = new Parsed(onull(s), cursor, _ => false) } diff --git a/src/compiler/scala/tools/nsc/interpreter/Phased.scala b/src/compiler/scala/tools/nsc/interpreter/Phased.scala index e6b780f177..3cf448f4c2 100644 --- a/src/compiler/scala/tools/nsc/interpreter/Phased.scala +++ b/src/compiler/scala/tools/nsc/interpreter/Phased.scala @@ -24,7 +24,7 @@ trait Phased { case NoPhaseName => false case name => active = name ; true } - def getMulti = multi + // def getMulti = multi def setMulti(phases: Seq[PhaseName]): Boolean = { if (phases contains NoPhaseName) false else { @@ -66,16 +66,16 @@ trait Phased { try parseInternal(str) catch { case _: Exception => NoPhaseName } - def apply[T](body: => T) = immutable.SortedMap[PhaseName, T](atMap(PhaseName.all)(body): _*) + // def apply[T](body: => T) = immutable.SortedMap[PhaseName, T](atMap(PhaseName.all)(body): _*) def atCurrent[T](body: => T): T = enteringPhase(get)(body) def multi[T](body: => T): Seq[T] = multi map (ph => at(ph)(body)) - def all[T](body: => T): Seq[T] = atMulti(PhaseName.all)(body) - def show[T](body: => T): Seq[T] = { - val pairs = atMap(PhaseName.all)(body) - pairs foreach { case (ph, op) => Console.println("%15s -> %s".format(ph, op.toString take 240)) } - pairs map (_._2) - } + // def all[T](body: => T): Seq[T] = atMulti(PhaseName.all)(body) + // def show[T](body: => T): Seq[T] = { + // val pairs = atMap(PhaseName.all)(body) + // pairs foreach { case (ph, op) => Console.println("%15s -> %s".format(ph, op.toString take 240)) } + // pairs map (_._2) + // } def at[T](ph: PhaseName)(body: => T): T = { val saved = get @@ -90,10 +90,10 @@ trait Phased { finally setMulti(saved) } - def showAt[T](phs: Seq[PhaseName])(body: => T): Unit = - atMap[T](phs)(body) foreach { - case (ph, op) => Console.println("%15s -> %s".format(ph, op.toString take 240)) - } + // def showAt[T](phs: Seq[PhaseName])(body: => T): Unit = + // atMap[T](phs)(body) foreach { + // case (ph, op) => Console.println("%15s -> %s".format(ph, op.toString take 240)) + // } def atMap[T](phs: Seq[PhaseName])(body: => T): Seq[(PhaseName, T)] = phs zip atMulti(phs)(body) @@ -112,7 +112,7 @@ trait Phased { def apply(id: Int): PhaseName = all find (_.id == id) getOrElse NoPhaseName implicit def apply(s: String): PhaseName = nameMap(s) - implicit def defaultPhaseName: PhaseName = active + // implicit def defaultPhaseName: PhaseName = active } sealed abstract class PhaseName { lazy val id = phase.id @@ -121,7 +121,7 @@ trait Phased { def isEmpty = this eq NoPhaseName // Execute some code during this phase. - def apply[T](body: => T): T = enteringPhase(phase)(body) + // def apply[T](body: => T): T = enteringPhase(phase)(body) } case object Parser extends PhaseName @@ -158,5 +158,5 @@ trait Phased { } implicit def phaseEnumToPhase(name: PhaseName): Phase = name.phase - implicit def phaseNameToPhase(name: String): Phase = currentRun.phaseNamed(name) + // implicit def phaseNameToPhase(name: String): Phase = currentRun.phaseNamed(name) } diff --git a/src/compiler/scala/tools/nsc/interpreter/Power.scala b/src/compiler/scala/tools/nsc/interpreter/Power.scala index ab0f1c0033..88c20c5096 100644 --- a/src/compiler/scala/tools/nsc/interpreter/Power.scala +++ b/src/compiler/scala/tools/nsc/interpreter/Power.scala @@ -149,17 +149,17 @@ class Power[ReplValsImpl <: ReplVals : ru.TypeTag: ClassTag](val intp: IMain, re // And whatever else there is to do. init.lines foreach (intp interpret _) } - def valsDescription: String = { - def to_str(m: Symbol) = "%12s %s".format( - m.decodedName, "" + elimRefinement(m.accessedOrSelf.tpe) stripPrefix "scala.tools.nsc.") - - ( rutil.info[ReplValsImpl].membersDeclared - filter (m => m.isPublic && !m.hasModuleFlag && !m.isConstructor) - sortBy (_.decodedName) - map to_str - mkString ("Name and type of values imported into the repl in power mode.\n\n", "\n", "") - ) - } + // def valsDescription: String = { + // def to_str(m: Symbol) = "%12s %s".format( + // m.decodedName, "" + elimRefinement(m.accessedOrSelf.tpe) stripPrefix "scala.tools.nsc.") + + // ( rutil.info[ReplValsImpl].membersDeclared + // filter (m => m.isPublic && !m.hasModuleFlag && !m.isConstructor) + // sortBy (_.decodedName) + // map to_str + // mkString ("Name and type of values imported into the repl in power mode.\n\n", "\n", "") + // ) + // } trait LowPriorityInternalInfo { implicit def apply[T: ru.TypeTag : ClassTag] : InternalInfo[T] = new InternalInfo[T](None) @@ -173,13 +173,13 @@ class Power[ReplValsImpl <: ReplVals : ru.TypeTag: ClassTag](val intp: IMain, re * of the conveniences exist on that wrapper. */ trait LowPriorityInternalInfoWrapper { - implicit def apply[T: ru.TypeTag : ClassTag] : InternalInfoWrapper[T] = new InternalInfoWrapper[T](None) + // implicit def apply[T: ru.TypeTag : ClassTag] : InternalInfoWrapper[T] = new InternalInfoWrapper[T](None) } - object InternalInfoWrapper extends LowPriorityInternalInfoWrapper { + // object InternalInfoWrapper extends LowPriorityInternalInfoWrapper { - } + // } class InternalInfoWrapper[T: ru.TypeTag : ClassTag](value: Option[T] = None) { - def ? : InternalInfo[T] = new InternalInfo[T](value) + // def ? : InternalInfo[T] = new InternalInfo[T](value) } /** Todos... @@ -187,7 +187,7 @@ class Power[ReplValsImpl <: ReplVals : ru.TypeTag: ClassTag](val intp: IMain, re * customizable symbol filter (had to hardcode no-spec to reduce noise) */ class InternalInfo[T](value: Option[T] = None)(implicit typeEvidence: ru.TypeTag[T], runtimeClassEvidence: ClassTag[T]) { - private def newInfo[U: ru.TypeTag : ClassTag](value: U): InternalInfo[U] = new InternalInfo[U](Some(value)) + // private def newInfo[U: ru.TypeTag : ClassTag](value: U): InternalInfo[U] = new InternalInfo[U](Some(value)) private def isSpecialized(s: Symbol) = s.name.toString contains "$mc" private def isImplClass(s: Symbol) = s.name.toString endsWith "$class" @@ -200,45 +200,45 @@ class Power[ReplValsImpl <: ReplVals : ru.TypeTag: ClassTag](val intp: IMain, re ) def symbol = compilerSymbolFromTag(tag) def tpe = compilerTypeFromTag(tag) - def name = symbol.name - def companion = symbol.companionSymbol - def info = symbol.info - def moduleClass = symbol.moduleClass - def owner = symbol.owner - def owners = symbol.ownerChain drop 1 - def signature = symbol.defString - - def decls = info.decls - def declsOverride = membersDeclared filter (_.isOverride) - def declsOriginal = membersDeclared filterNot (_.isOverride) + // def name = symbol.name + // def companion = symbol.companionSymbol + // def info = symbol.info + // def moduleClass = symbol.moduleClass + // def owner = symbol.owner + // def owners = symbol.ownerChain drop 1 + // def signature = symbol.defString + + // def decls = info.decls + // def declsOverride = membersDeclared filter (_.isOverride) + // def declsOriginal = membersDeclared filterNot (_.isOverride) def members = membersUnabridged filterNot excludeMember def membersUnabridged = tpe.members.toList - def membersDeclared = members filterNot excludeMember - def membersInherited = members filterNot (membersDeclared contains _) - def memberTypes = members filter (_.name.isTypeName) - def memberMethods = members filter (_.isMethod) + // def membersDeclared = members filterNot excludeMember + // def membersInherited = members filterNot (membersDeclared contains _) + // def memberTypes = members filter (_.name.isTypeName) + // def memberMethods = members filter (_.isMethod) def pkg = symbol.enclosingPackage - def pkgName = pkg.fullName - def pkgClass = symbol.enclosingPackageClass - def pkgMembers = pkg.info.members filterNot excludeMember - def pkgClasses = pkgMembers filter (s => s.isClass && s.isDefinedInPackage) - def pkgSymbols = new PackageSlurper(pkgClass).slurp() filterNot excludeMember + // def pkgName = pkg.fullName + // def pkgClass = symbol.enclosingPackageClass + // def pkgMembers = pkg.info.members filterNot excludeMember + // def pkgClasses = pkgMembers filter (s => s.isClass && s.isDefinedInPackage) + // def pkgSymbols = new PackageSlurper(pkgClass).slurp() filterNot excludeMember def tag = typeEvidence def runtimeClass = runtimeClassEvidence.runtimeClass def shortClass = runtimeClass.getName split "[$.]" last def baseClasses = tpe.baseClasses - def baseClassDecls = mapFrom(baseClasses)(_.info.decls.toList.sortBy(_.name)) - def ancestors = baseClasses drop 1 - def ancestorDeclares(name: String) = ancestors filter (_.info member newTermName(name) ne NoSymbol) - def baseTypes = tpe.baseTypeSeq.toList + // def baseClassDecls = mapFrom(baseClasses)(_.info.decls.toList.sortBy(_.name)) + // def ancestors = baseClasses drop 1 + // def ancestorDeclares(name: String) = ancestors filter (_.info member newTermName(name) ne NoSymbol) + // def baseTypes = tpe.baseTypeSeq.toList - def <:<[U: ru.TypeTag : ClassTag](other: U) = tpe <:< newInfo(other).tpe - def lub[U: ru.TypeTag : ClassTag](other: U) = intp.global.lub(List(tpe, newInfo(other).tpe)) - def glb[U: ru.TypeTag : ClassTag](other: U) = intp.global.glb(List(tpe, newInfo(other).tpe)) + // def <:<[U: ru.TypeTag : ClassTag](other: U) = tpe <:< newInfo(other).tpe + // def lub[U: ru.TypeTag : ClassTag](other: U) = intp.global.lub(List(tpe, newInfo(other).tpe)) + // def glb[U: ru.TypeTag : ClassTag](other: U) = intp.global.glb(List(tpe, newInfo(other).tpe)) override def toString = value match { case Some(x) => "%s (%s)".format(x, shortClass) @@ -264,7 +264,7 @@ class Power[ReplValsImpl <: ReplVals : ru.TypeTag: ClassTag](val intp: IMain, re } object Prettifier extends LowPriorityPrettifier { def stringOf(x: Any): String = scala.runtime.ScalaRunTime.stringOf(x) - def prettify[T](value: T): TraversableOnce[String] = default[T] prettify value + // def prettify[T](value: T): TraversableOnce[String] = default[T] prettify value def default[T] = new Prettifier[T] { def prettify(x: T): TraversableOnce[String] = AnyPrettifier prettify x def show(x: T): Unit = AnyPrettifier show x @@ -274,7 +274,7 @@ class Power[ReplValsImpl <: ReplVals : ru.TypeTag: ClassTag](val intp: IMain, re def show(x: T): Unit def prettify(x: T): TraversableOnce[String] - def show(xs: TraversableOnce[T]): Unit = prettify(xs) foreach println + // def show(xs: TraversableOnce[T]): Unit = prettify(xs) foreach println def prettify(xs: TraversableOnce[T]): TraversableOnce[String] = xs flatMap (x => prettify(x)) } @@ -286,31 +286,31 @@ class Power[ReplValsImpl <: ReplVals : ru.TypeTag: ClassTag](val intp: IMain, re pretty prettify f(value) foreach (StringPrettifier show _) def freq[U](p: T => U) = (value.toSeq groupBy p mapValues (_.size)).toList sortBy (-_._2) map (_.swap) - def ppfreq[U](p: T => U): Unit = freq(p) foreach { case (count, key) => println("%5d %s".format(count, key)) } + // def ppfreq[U](p: T => U): Unit = freq(p) foreach { case (count, key) => println("%5d %s".format(count, key)) } - def |[U](f: Seq[T] => Seq[U]): Seq[U] = f(value) - def ^^[U](f: T => U): Seq[U] = value map f - def ^?[U](pf: PartialFunction[T, U]): Seq[U] = value collect pf + // def |[U](f: Seq[T] => Seq[U]): Seq[U] = f(value) + // def ^^[U](f: T => U): Seq[U] = value map f + // def ^?[U](pf: PartialFunction[T, U]): Seq[U] = value collect pf - def >>!(implicit ord: Ordering[T]): Unit = pp(_.sorted.distinct) + // def >>!(implicit ord: Ordering[T]): Unit = pp(_.sorted.distinct) def >>(implicit ord: Ordering[T]): Unit = pp(_.sorted) def >!(): Unit = pp(_.distinct) def >(): Unit = pp(identity) - def >#(): Unit = this ># (identity[T] _) - def >#[U](p: T => U): Unit = this ppfreq p + // def >#(): Unit = this ># (identity[T] _) + // def >#[U](p: T => U): Unit = this ppfreq p - def >?(p: T => Boolean): Unit = pp(_ filter p) - def >?(s: String): Unit = pp(_ filter (_.toString contains s)) - def >?(r: Regex): Unit = pp(_ filter (_.toString matches fixRegex(r))) + // def >?(p: T => Boolean): Unit = pp(_ filter p) + // def >?(s: String): Unit = pp(_ filter (_.toString contains s)) + // def >?(r: Regex): Unit = pp(_ filter (_.toString matches fixRegex(r))) - private def fixRegex(r: scala.util.matching.Regex): String = { - val s = r.pattern.toString - val prefix = if (s startsWith "^") "" else """^.*?""" - val suffix = if (s endsWith "$") "" else """.*$""" + // private def fixRegex(r: scala.util.matching.Regex): String = { + // val s = r.pattern.toString + // val prefix = if (s startsWith "^") "" else """^.*?""" + // val suffix = if (s endsWith "$") "" else """.*$""" - prefix + s + suffix - } + // prefix + s + suffix + // } } class MultiPrettifierClass[T: Prettifier](val value: Seq[T]) extends PrettifierClass[T]() { } @@ -335,8 +335,8 @@ class Power[ReplValsImpl <: ReplVals : ru.TypeTag: ClassTag](val intp: IMain, re def slurp(): String = io.Streamable.slurp(url) } class RichSymbolList(syms: List[Symbol]) { - def sigs = syms map (_.defString) - def infos = syms map (_.info) + // def sigs = syms map (_.defString) + // def infos = syms map (_.info) } trait Implicits1 { @@ -344,7 +344,7 @@ class Power[ReplValsImpl <: ReplVals : ru.TypeTag: ClassTag](val intp: IMain, re implicit def replPrinting[T](x: T)(implicit pretty: Prettifier[T] = Prettifier.default[T]) = new SinglePrettifierClass[T](x) - implicit def liftToTypeName(s: String): TypeName = newTypeName(s) + // implicit def liftToTypeName(s: String): TypeName = newTypeName(s) } trait Implicits2 extends Implicits1 { class RichSymbol(sym: Symbol) { @@ -370,8 +370,8 @@ class Power[ReplValsImpl <: ReplVals : ru.TypeTag: ClassTag](val intp: IMain, re implicit def replInputStream(in: InputStream)(implicit codec: Codec) = new RichInputStream(in) implicit def replEnhancedURLs(url: URL)(implicit codec: Codec): RichReplURL = new RichReplURL(url)(codec) - implicit def liftToTermName(s: String): TermName = newTermName(s) - implicit def replListOfSymbols(xs: List[Symbol]) = new RichSymbolList(xs) + // implicit def liftToTermName(s: String): TermName = newTermName(s) + // implicit def replListOfSymbols(xs: List[Symbol]) = new RichSymbolList(xs) } trait ReplUtilities { @@ -382,13 +382,13 @@ class Power[ReplValsImpl <: ReplVals : ru.TypeTag: ClassTag](val intp: IMain, re def clazz[T: ru.TypeTag] = ru.typeOf[T].typeSymbol.suchThat(_.isClass) def info[T: ru.TypeTag : ClassTag] = InternalInfo[T] def ?[T: ru.TypeTag : ClassTag] = InternalInfo[T] - def url(s: String) = { - try new URL(s) - catch { case _: MalformedURLException => - if (Path(s).exists) Path(s).toURL - else new URL("http://" + s) - } - } + // def url(s: String) = { + // try new URL(s) + // catch { case _: MalformedURLException => + // if (Path(s).exists) Path(s).toURL + // else new URL("http://" + s) + // } + // } def sanitize(s: String): String = sanitize(s.getBytes()) def sanitize(s: Array[Byte]): String = (s map { case x if x.toChar.isControl => '?' @@ -406,11 +406,11 @@ class Power[ReplValsImpl <: ReplVals : ru.TypeTag: ClassTag](val intp: IMain, re lazy val rutil: ReplUtilities = new ReplUtilities { } lazy val phased: Phased = new { val global: intp.global.type = intp.global } with Phased { } - def context(code: String) = analyzer.rootContext(unit(code)) - def source(code: String) = newSourceFile(code) + // def context(code: String) = analyzer.rootContext(unit(code)) + // def source(code: String) = newSourceFile(code) def unit(code: String) = newCompilationUnit(code) def trees(code: String) = parse(code) getOrElse Nil - def typeOf(id: String) = intp.typeOfExpression(id) + // def typeOf(id: String) = intp.typeOfExpression(id) override def toString = s""" |** Power mode status ** diff --git a/src/compiler/scala/tools/nsc/interpreter/ReplConfig.scala b/src/compiler/scala/tools/nsc/interpreter/ReplConfig.scala index 7cd0f436c4..d48ceb4eff 100644 --- a/src/compiler/scala/tools/nsc/interpreter/ReplConfig.scala +++ b/src/compiler/scala/tools/nsc/interpreter/ReplConfig.scala @@ -14,9 +14,9 @@ trait ReplConfig { lazy val replProps = new ReplProps class TapMaker[T](x: T) { - def tapInfo(msg: => String): T = tap(x => replinfo(parens(x))) + // def tapInfo(msg: => String): T = tap(x => replinfo(parens(x))) def tapDebug(msg: => String): T = tap(x => repldbg(parens(x))) - def tapTrace(msg: => String): T = tap(x => repltrace(parens(x))) + // def tapTrace(msg: => String): T = tap(x => repltrace(parens(x))) def tap[U](f: T => U): T = { f(x) x @@ -28,12 +28,12 @@ trait ReplConfig { try Console println msg catch { case x: AssertionError => Console.println("Assertion error printing debugging output: " + x) } - private[nsc] def repldbgex(ex: Throwable): Unit = { - if (isReplDebug) { - echo("Caught/suppressing: " + ex) - ex.printStackTrace - } - } + // private[nsc] def repldbgex(ex: Throwable): Unit = { + // if (isReplDebug) { + // echo("Caught/suppressing: " + ex) + // ex.printStackTrace + // } + // } private[nsc] def repldbg(msg: => String) = if (isReplDebug) echo(msg) private[nsc] def repltrace(msg: => String) = if (isReplTrace) echo(msg) private[nsc] def replinfo(msg: => String) = if (isReplInfo) echo(msg) @@ -45,14 +45,14 @@ trait ReplConfig { repltrace(stackTraceString(unwrap(t))) alt } - private[nsc] def substituteAndLog[T](alt: => T)(body: => T): T = - substituteAndLog("" + alt, alt)(body) + // private[nsc] def substituteAndLog[T](alt: => T)(body: => T): T = + // substituteAndLog("" + alt, alt)(body) private[nsc] def substituteAndLog[T](label: String, alt: => T)(body: => T): T = { try body catch logAndDiscard(label, alt) } - private[nsc] def squashAndLog(label: String)(body: => Unit): Unit = - substituteAndLog(label, ())(body) + // private[nsc] def squashAndLog(label: String)(body: => Unit): Unit = + // substituteAndLog(label, ())(body) def isReplTrace: Boolean = replProps.trace def isReplDebug: Boolean = replProps.debug || isReplTrace diff --git a/src/compiler/scala/tools/nsc/interpreter/ReplProps.scala b/src/compiler/scala/tools/nsc/interpreter/ReplProps.scala index bc3e7a10d7..22c27513d3 100644 --- a/src/compiler/scala/tools/nsc/interpreter/ReplProps.scala +++ b/src/compiler/scala/tools/nsc/interpreter/ReplProps.scala @@ -13,15 +13,15 @@ class ReplProps { private def bool(name: String) = BooleanProp.keyExists(name) private def int(name: String) = IntProp(name) - val jlineDebug = bool("scala.tools.jline.internal.Log.debug") - val jlineTrace = bool("scala.tools.jline.internal.Log.trace") + // val jlineDebug = bool("scala.tools.jline.internal.Log.debug") + // val jlineTrace = bool("scala.tools.jline.internal.Log.trace") val info = bool("scala.repl.info") val debug = bool("scala.repl.debug") val trace = bool("scala.repl.trace") val power = bool("scala.repl.power") - val replInitCode = Prop[JFile]("scala.repl.initcode") + // val replInitCode = Prop[JFile]("scala.repl.initcode") val replAutorunCode = Prop[JFile]("scala.repl.autoruncode") val powerInitCode = Prop[JFile]("scala.repl.power.initcode") val powerBanner = Prop[JFile]("scala.repl.power.banner") diff --git a/src/compiler/scala/tools/nsc/interpreter/ReplStrings.scala b/src/compiler/scala/tools/nsc/interpreter/ReplStrings.scala index 670bbf9bae..ed035f8a1a 100644 --- a/src/compiler/scala/tools/nsc/interpreter/ReplStrings.scala +++ b/src/compiler/scala/tools/nsc/interpreter/ReplStrings.scala @@ -29,5 +29,5 @@ trait ReplStrings { "scala.runtime.ScalaRunTime.replStringOf(%s, %s)".format(x, maxlen) def words(s: String) = s.trim split "\\s+" filterNot (_ == "") toList - def isQuoted(s: String) = (s.length >= 2) && (s.head == s.last) && ("\"'" contains s.head) + // def isQuoted(s: String) = (s.length >= 2) && (s.head == s.last) && ("\"'" contains s.head) } diff --git a/src/compiler/scala/tools/nsc/interpreter/RichClass.scala b/src/compiler/scala/tools/nsc/interpreter/RichClass.scala index 4371f7fe05..7183db2dfb 100644 --- a/src/compiler/scala/tools/nsc/interpreter/RichClass.scala +++ b/src/compiler/scala/tools/nsc/interpreter/RichClass.scala @@ -10,7 +10,7 @@ import scala.reflect.{ ClassTag, classTag } class RichClass[T](val clazz: Class[T]) { def toTag: ClassTag[T] = ClassTag[T](clazz) - def toTypeString: String = TypeStrings.fromClazz(clazz) + // def toTypeString: String = TypeStrings.fromClazz(clazz) // Sadly isAnonymousClass does not return true for scala anonymous // classes because our naming scheme is not doing well against the @@ -20,14 +20,13 @@ class RichClass[T](val clazz: Class[T]) { catch { case _: java.lang.InternalError => false } // good ol' "Malformed class name" ) - /** It's not easy... to be... me... */ - def supermans: List[ClassTag[_]] = supers map (_.toTag) + def supertags: List[ClassTag[_]] = supers map (_.toTag) def superNames: List[String] = supers map (_.getName) def interfaces: List[JClass] = supers filter (_.isInterface) def hasAncestorName(f: String => Boolean) = superNames exists f def hasAncestor(f: JClass => Boolean) = supers exists f - def hasAncestorInPackage(pkg: String) = hasAncestorName(_ startsWith (pkg + ".")) + // def hasAncestorInPackage(pkg: String) = hasAncestorName(_ startsWith (pkg + ".")) def supers: List[JClass] = { def loop(x: JClass): List[JClass] = x.getSuperclass match { diff --git a/src/compiler/scala/tools/nsc/interpreter/SimpleReader.scala b/src/compiler/scala/tools/nsc/interpreter/SimpleReader.scala index bccd8158ec..a07f84cc10 100644 --- a/src/compiler/scala/tools/nsc/interpreter/SimpleReader.scala +++ b/src/compiler/scala/tools/nsc/interpreter/SimpleReader.scala @@ -19,11 +19,11 @@ extends InteractiveReader val history = NoHistory val completion = NoCompletion - def init() = () + // def init() = () def reset() = () - def eraseLine() = () + // def eraseLine() = () def redrawLine() = () - def currentLine = "" + // def currentLine = "" def readOneLine(prompt: String): String = { if (interactive) { out.print(prompt) @@ -40,4 +40,4 @@ object SimpleReader { def apply(in: BufferedReader = defaultIn, out: JPrintWriter = defaultOut, interactive: Boolean = true): SimpleReader = new SimpleReader(in, out, interactive) -} \ No newline at end of file +} diff --git a/src/compiler/scala/tools/nsc/interpreter/TypeStrings.scala b/src/compiler/scala/tools/nsc/interpreter/TypeStrings.scala index 9fb79a9d6f..f9749feabe 100644 --- a/src/compiler/scala/tools/nsc/interpreter/TypeStrings.scala +++ b/src/compiler/scala/tools/nsc/interpreter/TypeStrings.scala @@ -33,7 +33,7 @@ trait StructuredTypeStrings extends DestructureTypes { val NoGrouping = Grouping("", "", "", false) val ListGrouping = Grouping("(", ", ", ")", false) val ProductGrouping = Grouping("(", ", ", ")", true) - val ParamGrouping = Grouping("(", ", ", ")", true) + // val ParamGrouping = Grouping("(", ", ", ")", true) val BlockGrouping = Grouping(" { ", "; ", "}", false) private def str(level: Int)(body: => String): String = " " * level + body @@ -189,7 +189,7 @@ trait TypeStrings { else enclClass.getName + "." + (name stripPrefix enclPre) ) } - def scalaName(ct: ClassTag[_]): String = scalaName(ct.runtimeClass) + // def scalaName(ct: ClassTag[_]): String = scalaName(ct.runtimeClass) def anyClass(x: Any): JClass = if (x == null) null else x.getClass private def brackets(tps: String*): String = @@ -220,7 +220,7 @@ trait TypeStrings { * practice to rely on toString for correctness) generated the VALID string * representation of the type. */ - def fromTypedValue[T: ru.TypeTag : ClassTag](x: T): String = fromTag[T] + // def fromTypedValue[T: ru.TypeTag : ClassTag](x: T): String = fromTag[T] def fromValue(value: Any): String = if (value == null) "Null" else fromClazz(anyClass(value)) def fromClazz(clazz: JClass): String = scalaName(clazz) + tparamString(clazz) def fromTag[T: ru.TypeTag : ClassTag] : String = scalaName(classTag[T].runtimeClass) + tparamString[T] @@ -241,12 +241,12 @@ trait TypeStrings { } } - val typeTransforms = List( - "java.lang." -> "", - "scala.collection.immutable." -> "immutable.", - "scala.collection.mutable." -> "mutable.", - "scala.collection.generic." -> "generic." - ) + // val typeTransforms = List( + // "java.lang." -> "", + // "scala.collection.immutable." -> "immutable.", + // "scala.collection.mutable." -> "mutable.", + // "scala.collection.generic." -> "generic." + // ) } object TypeStrings extends TypeStrings { } diff --git a/src/compiler/scala/tools/nsc/interpreter/package.scala b/src/compiler/scala/tools/nsc/interpreter/package.scala index 6a2d69db2c..e6c9adb296 100644 --- a/src/compiler/scala/tools/nsc/interpreter/package.scala +++ b/src/compiler/scala/tools/nsc/interpreter/package.scala @@ -48,7 +48,7 @@ package object interpreter extends ReplConfig with ReplStrings { private[nsc] implicit def enrichClass[T](clazz: Class[T]) = new RichClass[T](clazz) private[nsc] implicit def enrichAnyRefWithTap[T](x: T) = new TapMaker(x) - private[nsc] def tracing[T](msg: String)(x: T): T = x.tapTrace(msg) + // private[nsc] def tracing[T](msg: String)(x: T): T = x.tapTrace(msg) private[nsc] def debugging[T](msg: String)(x: T) = x.tapDebug(msg) private val ourClassloader = getClass.getClassLoader @@ -68,38 +68,38 @@ package object interpreter extends ReplConfig with ReplStrings { import global.{ reporter => _, _ } import definitions._ - lazy val tagOfStdReplVals = staticTypeTag[scala.tools.nsc.interpreter.StdReplVals] + // lazy val tagOfStdReplVals = staticTypeTag[scala.tools.nsc.interpreter.StdReplVals] protected def echo(msg: String) = { Console.out println msg Console.out.flush() } - def wrapCommand(line: String): String = { - def failMsg = "Argument to :wrap must be the name of a method with signature [T](=> T): T" - - words(line) match { - case Nil => - intp.executionWrapper match { - case "" => "No execution wrapper is set." - case s => "Current execution wrapper: " + s - } - case "clear" :: Nil => - intp.executionWrapper match { - case "" => "No execution wrapper is set." - case s => intp.clearExecutionWrapper() ; "Cleared execution wrapper." - } - case wrapper :: Nil => - intp.typeOfExpression(wrapper) match { - case PolyType(List(targ), MethodType(List(arg), restpe)) => - setExecutionWrapper(originalPath(wrapper)) - "Set wrapper to '" + wrapper + "'" - case tp => - failMsg + "\nFound: " - } - case _ => failMsg - } - } + // def wrapCommand(line: String): String = { + // def failMsg = "Argument to :wrap must be the name of a method with signature [T](=> T): T" + + // words(line) match { + // case Nil => + // intp.executionWrapper match { + // case "" => "No execution wrapper is set." + // case s => "Current execution wrapper: " + s + // } + // case "clear" :: Nil => + // intp.executionWrapper match { + // case "" => "No execution wrapper is set." + // case s => intp.clearExecutionWrapper() ; "Cleared execution wrapper." + // } + // case wrapper :: Nil => + // intp.typeOfExpression(wrapper) match { + // case PolyType(List(targ), MethodType(List(arg), restpe)) => + // setExecutionWrapper(originalPath(wrapper)) + // "Set wrapper to '" + wrapper + "'" + // case tp => + // failMsg + "\nFound: " + // } + // case _ => failMsg + // } + // } def implicitsCommand(line: String): String = { def p(x: Any) = intp.reporter.printMessage("" + x) diff --git a/src/compiler/scala/tools/nsc/interpreter/session/History.scala b/src/compiler/scala/tools/nsc/interpreter/session/History.scala index daa05b86db..b727a0494f 100644 --- a/src/compiler/scala/tools/nsc/interpreter/session/History.scala +++ b/src/compiler/scala/tools/nsc/interpreter/session/History.scala @@ -14,15 +14,15 @@ trait History { def asStrings: List[String] def index: Int def size: Int - def grep(s: String): List[String] + // def grep(s: String): List[String] } object NoHistory extends History { def asStrings = Nil - def grep(s: String) = Nil + // def grep(s: String) = Nil def index = 0 def size = 0 } -object History { - def empty: History = NoHistory -} +// object History { +// def empty: History = NoHistory +// } diff --git a/src/compiler/scala/tools/nsc/interpreter/session/SimpleHistory.scala b/src/compiler/scala/tools/nsc/interpreter/session/SimpleHistory.scala index 9f4e2b9df3..06e431fb30 100644 --- a/src/compiler/scala/tools/nsc/interpreter/session/SimpleHistory.scala +++ b/src/compiler/scala/tools/nsc/interpreter/session/SimpleHistory.scala @@ -55,8 +55,8 @@ class SimpleHistory extends JLineHistory { def moveToEnd(): Unit = setTo(size) // scala legacy interface - def asList: List[JEntry] = toEntries().toList - def asJavaList = entries() + // def asList: List[JEntry] = toEntries().toList + // def asJavaList = entries() def asStrings = buf.toList - def grep(s: String) = buf.toList filter (_ contains s) + // def grep(s: String) = buf.toList filter (_ contains s) } diff --git a/src/compiler/scala/tools/nsc/io/Fileish.scala b/src/compiler/scala/tools/nsc/io/Fileish.scala index 7b4e385dd8..77d12ea022 100644 --- a/src/compiler/scala/tools/nsc/io/Fileish.scala +++ b/src/compiler/scala/tools/nsc/io/Fileish.scala @@ -1,33 +1,33 @@ -/* NSC -- new Scala compiler - * Copyright 2005-2013 LAMP/EPFL - * @author Paul Phillips - */ +// /* NSC -- new Scala compiler +// * Copyright 2005-2013 LAMP/EPFL +// * @author Paul Phillips +// */ -package scala.tools.nsc -package io +// package scala.tools.nsc +// package io -import java.io.{ InputStream } -import java.util.jar.JarEntry +// import java.io.{ InputStream } +// import java.util.jar.JarEntry -/** A common interface for File-based things and Stream-based things. - * (In particular, io.File and JarEntry.) - */ -class Fileish(val path: Path, val input: () => InputStream) extends Streamable.Chars { - def inputStream() = input() +// /** A common interface for File-based things and Stream-based things. +// * (In particular, io.File and JarEntry.) +// */ +// class Fileish(val path: Path, val input: () => InputStream) extends Streamable.Chars { +// def inputStream() = input() - def parent = path.parent - def name = path.name - def isSourceFile = path.hasExtension("java", "scala") +// def parent = path.parent +// def name = path.name +// def isSourceFile = path.hasExtension("java", "scala") - private lazy val pkgLines = lines() collect { case x if x startsWith "package " => x stripPrefix "package" trim } - lazy val pkgFromPath = parent.path.replaceAll("""[/\\]""", ".") - lazy val pkgFromSource = pkgLines map (_ stripSuffix ";") mkString "." +// private lazy val pkgLines = lines() collect { case x if x startsWith "package " => x stripPrefix "package" trim } +// lazy val pkgFromPath = parent.path.replaceAll("""[/\\]""", ".") +// lazy val pkgFromSource = pkgLines map (_ stripSuffix ";") mkString "." - override def toString = path.path -} +// override def toString = path.path +// } -object Fileish { - def apply(f: File): Fileish = new Fileish(f, () => f.inputStream()) - def apply(f: JarEntry, in: () => InputStream): Fileish = new Fileish(Path(f.getName), in) - def apply(path: String, in: () => InputStream): Fileish = new Fileish(Path(path), in) -} +// object Fileish { +// def apply(f: File): Fileish = new Fileish(f, () => f.inputStream()) +// def apply(f: JarEntry, in: () => InputStream): Fileish = new Fileish(Path(f.getName), in) +// def apply(path: String, in: () => InputStream): Fileish = new Fileish(Path(path), in) +// } diff --git a/src/compiler/scala/tools/nsc/io/Jar.scala b/src/compiler/scala/tools/nsc/io/Jar.scala index ef2c9b13c0..6f1c322391 100644 --- a/src/compiler/scala/tools/nsc/io/Jar.scala +++ b/src/compiler/scala/tools/nsc/io/Jar.scala @@ -36,9 +36,9 @@ class Jar(file: File) extends Iterable[JarEntry] { def this(jfile: JFile) = this(File(jfile)) def this(path: String) = this(File(path)) - protected def errorFn(msg: String): Unit = Console println msg + // protected def errorFn(msg: String): Unit = Console println msg - lazy val jarFile = new JarFile(file.jfile) + // lazy val jarFile = new JarFile(file.jfile) lazy val manifest = withJarInput(s => Option(s.getManifest)) def mainClass = manifest map (f => f(Name.MAIN_CLASS)) @@ -63,12 +63,12 @@ class Jar(file: File) extends Iterable[JarEntry] { Iterator continually in.getNextJarEntry() takeWhile (_ != null) foreach f } override def iterator: Iterator[JarEntry] = this.toList.iterator - def fileishIterator: Iterator[Fileish] = jarFile.entries.asScala map (x => Fileish(x, () => getEntryStream(x))) + // def fileishIterator: Iterator[Fileish] = jarFile.entries.asScala map (x => Fileish(x, () => getEntryStream(x))) - private def getEntryStream(entry: JarEntry) = jarFile getInputStream entry match { - case null => errorFn("No such entry: " + entry) ; null - case x => x - } + // private def getEntryStream(entry: JarEntry) = jarFile getInputStream entry match { + // case null => errorFn("No such entry: " + entry) ; null + // case x => x + // } override def toString = "" + file } @@ -130,7 +130,7 @@ object Jar { m } def apply(manifest: JManifest): WManifest = new WManifest(manifest) - implicit def unenrichManifest(x: WManifest): JManifest = x.underlying + // implicit def unenrichManifest(x: WManifest): JManifest = x.underlying } class WManifest(manifest: JManifest) { for ((k, v) <- initialMainAttrs) @@ -147,12 +147,12 @@ object Jar { } def apply(name: Attributes.Name): String = attrs(name) - def apply(name: String): String = apply(new Attributes.Name(name)) + // def apply(name: String): String = apply(new Attributes.Name(name)) def update(key: Attributes.Name, value: String) = attrs.put(key, value) - def update(key: String, value: String) = attrs.put(new Attributes.Name(key), value) + // def update(key: String, value: String) = attrs.put(new Attributes.Name(key), value) - def mainClass: String = apply(Name.MAIN_CLASS) - def mainClass_=(value: String) = update(Name.MAIN_CLASS, value) + // def mainClass: String = apply(Name.MAIN_CLASS) + // def mainClass_=(value: String) = update(Name.MAIN_CLASS, value) } // See http://download.java.net/jdk7/docs/api/java/nio/file/Path.html diff --git a/src/compiler/scala/tools/nsc/io/MsilFile.scala b/src/compiler/scala/tools/nsc/io/MsilFile.scala index 1a3a4f5c81..881af2e87a 100644 --- a/src/compiler/scala/tools/nsc/io/MsilFile.scala +++ b/src/compiler/scala/tools/nsc/io/MsilFile.scala @@ -15,4 +15,4 @@ import ch.epfl.lamp.compiler.msil.{ Type => MsilType } class MsilFile(val msilType: MsilType) extends VirtualFile(msilType.FullName, msilType.Namespace) { } -object NoMsilFile extends MsilFile(null) { } +// object NoMsilFile extends MsilFile(null) { } diff --git a/src/compiler/scala/tools/nsc/io/Pickler.scala b/src/compiler/scala/tools/nsc/io/Pickler.scala index 56ff4a57ee..3f16784b2d 100644 --- a/src/compiler/scala/tools/nsc/io/Pickler.scala +++ b/src/compiler/scala/tools/nsc/io/Pickler.scala @@ -76,7 +76,7 @@ abstract class Pickler[T] { * @param fromNull an implicit evidence parameter ensuring that the type of values * handled by this pickler contains `null`. */ - def orNull(implicit fromNull: Null <:< T): Pickler[T] = nullablePickler(this) + // def orNull(implicit fromNull: Null <:< T): Pickler[T] = nullablePickler(this) /** A conditional pickler obtained from the current pickler. * @param cond the condition to test to find out whether pickler can handle @@ -93,7 +93,7 @@ abstract class Pickler[T] { object Pickler { - var picklerDebugMode = false + // var picklerDebugMode = false /** A base class representing unpickler result. It has two subclasses: * `UnpickleSucess` for successful unpicklings and `UnpickleFailure` for failures, @@ -176,14 +176,14 @@ object Pickler { /** A converter from binary functions to functions over `~`-pairs */ - implicit def fromTilde[T1, T2, R](f: (T1, T2) => R): T1 ~ T2 => R = { case x1 ~ x2 => f(x1, x2) } + // implicit def fromTilde[T1, T2, R](f: (T1, T2) => R): T1 ~ T2 => R = { case x1 ~ x2 => f(x1, x2) } /** An converter from unctions returning Options over pair to functions returning `~`-pairs * The converted function will raise a `MatchError` where the original function returned * a `None`. This converter is useful for turning `unapply` methods of case classes * into wrapper methods that can be passed as second argument to `wrap`. */ - implicit def toTilde[T1, T2, S](f: S => Option[(T1, T2)]): S => T1 ~ T2 = { x => (f(x): @unchecked) match { case Some((x1, x2)) => x1 ~ x2 } } + // implicit def toTilde[T1, T2, S](f: S => Option[(T1, T2)]): S => T1 ~ T2 = { x => (f(x): @unchecked) match { case Some((x1, x2)) => x1 ~ x2 } } /** Same as `p.labelled(label)`. */ @@ -250,13 +250,13 @@ object Pickler { /** Same as `p.orNull` */ - def nullablePickler[T](p: Pickler[T])(implicit fromNull: Null <:< T): Pickler[T] = new Pickler[T] { - def pickle(wr: Writer, x: T) = - if (x == null) wr.write("null") else p.pickle(wr, x) - def unpickle(rd: Lexer): Unpickled[T] = - if (rd.token == NullLit) nextSuccess(rd, fromNull(null)) - else p.unpickle(rd) - } + // def nullablePickler[T](p: Pickler[T])(implicit fromNull: Null <:< T): Pickler[T] = new Pickler[T] { + // def pickle(wr: Writer, x: T) = + // if (x == null) wr.write("null") else p.pickle(wr, x) + // def unpickle(rd: Lexer): Unpickled[T] = + // if (rd.token == NullLit) nextSuccess(rd, fromNull(null)) + // else p.unpickle(rd) + // } /** A conditional pickler for singleton objects. It represents these * with the object's underlying class as a label. @@ -330,20 +330,20 @@ object Pickler { tokenPickler("integer literal") { case IntLit(s) => s.toLong } /** A pickler for values of type `Double`, represented as floating point literals */ - implicit val doublePickler: Pickler[Double] = - tokenPickler("floating point literal") { case FloatLit(s) => s.toDouble } + // implicit val doublePickler: Pickler[Double] = + // tokenPickler("floating point literal") { case FloatLit(s) => s.toDouble } /** A pickler for values of type `Byte`, represented as integer literals */ - implicit val bytePickler: Pickler[Byte] = longPickler.wrapped { _.toByte } { _.toLong } + // implicit val bytePickler: Pickler[Byte] = longPickler.wrapped { _.toByte } { _.toLong } /** A pickler for values of type `Short`, represented as integer literals */ - implicit val shortPickler: Pickler[Short] = longPickler.wrapped { _.toShort } { _.toLong } + // implicit val shortPickler: Pickler[Short] = longPickler.wrapped { _.toShort } { _.toLong } /** A pickler for values of type `Int`, represented as integer literals */ implicit val intPickler: Pickler[Int] = longPickler.wrapped { _.toInt } { _.toLong } /** A pickler for values of type `Float`, represented as floating point literals */ - implicit val floatPickler: Pickler[Float] = doublePickler.wrapped { _.toFloat } { _.toLong } + // implicit val floatPickler: Pickler[Float] = doublePickler.wrapped { _.toFloat } { _.toLong } /** A conditional pickler for the boolean value `true` */ private val truePickler = @@ -373,15 +373,15 @@ object Pickler { } /** A pickler for values of type `Char`, represented as string literals of length 1 */ - implicit val charPickler: Pickler[Char] = - stringPickler - .wrapped { s => require(s.length == 1, "single character string literal expected, but "+quoted(s)+" found"); s(0) } { _.toString } + // implicit val charPickler: Pickler[Char] = + // stringPickler + // .wrapped { s => require(s.length == 1, "single character string literal expected, but "+quoted(s)+" found"); s(0) } { _.toString } /** A pickler for pairs, represented as `~`-pairs */ - implicit def tuple2Pickler[T1: Pickler, T2: Pickler]: Pickler[(T1, T2)] = - (pkl[T1] ~ pkl[T2]) - .wrapped { case x1 ~ x2 => (x1, x2) } { case (x1, x2) => x1 ~ x2 } - .labelled ("tuple2") + // implicit def tuple2Pickler[T1: Pickler, T2: Pickler]: Pickler[(T1, T2)] = + // (pkl[T1] ~ pkl[T2]) + // .wrapped { case x1 ~ x2 => (x1, x2) } { case (x1, x2) => x1 ~ x2 } + // .labelled ("tuple2") /** A pickler for 3-tuples, represented as `~`-tuples */ implicit def tuple3Pickler[T1, T2, T3](implicit p1: Pickler[T1], p2: Pickler[T2], p3: Pickler[T3]): Pickler[(T1, T2, T3)] = @@ -390,34 +390,34 @@ object Pickler { .labelled ("tuple3") /** A pickler for 4-tuples, represented as `~`-tuples */ - implicit def tuple4Pickler[T1, T2, T3, T4](implicit p1: Pickler[T1], p2: Pickler[T2], p3: Pickler[T3], p4: Pickler[T4]): Pickler[(T1, T2, T3, T4)] = - (p1 ~ p2 ~ p3 ~ p4) - .wrapped { case x1 ~ x2 ~ x3 ~ x4 => (x1, x2, x3, x4) } { case (x1, x2, x3, x4) => x1 ~ x2 ~ x3 ~ x4 } - .labelled ("tuple4") + // implicit def tuple4Pickler[T1, T2, T3, T4](implicit p1: Pickler[T1], p2: Pickler[T2], p3: Pickler[T3], p4: Pickler[T4]): Pickler[(T1, T2, T3, T4)] = + // (p1 ~ p2 ~ p3 ~ p4) + // .wrapped { case x1 ~ x2 ~ x3 ~ x4 => (x1, x2, x3, x4) } { case (x1, x2, x3, x4) => x1 ~ x2 ~ x3 ~ x4 } + // .labelled ("tuple4") /** A conditional pickler for the `scala.None` object */ - implicit val nonePickler = singletonPickler(None) + // implicit val nonePickler = singletonPickler(None) /** A conditional pickler for instances of class `scala.Some` */ - implicit def somePickler[T: Pickler]: CondPickler[Some[T]] = - pkl[T] - .wrapped { Some(_) } { _.get } - .asClass (classOf[Some[T]]) + // implicit def somePickler[T: Pickler]: CondPickler[Some[T]] = + // pkl[T] + // .wrapped { Some(_) } { _.get } + // .asClass (classOf[Some[T]]) /** A pickler for optional values */ - implicit def optionPickler[T: Pickler]: Pickler[Option[T]] = nonePickler | somePickler[T] + // implicit def optionPickler[T: Pickler]: Pickler[Option[T]] = nonePickler | somePickler[T] /** A pickler for list values */ implicit def listPickler[T: Pickler]: Pickler[List[T]] = iterPickler[T] .wrapped { _.toList } { _.iterator } .labelled ("scala.List") /** A pickler for vector values */ - implicit def vectorPickler[T: Pickler]: Pickler[Vector[T]] = - iterPickler[T] .wrapped { Vector() ++ _ } { _.iterator } .labelled ("scala.Vector") + // implicit def vectorPickler[T: Pickler]: Pickler[Vector[T]] = + // iterPickler[T] .wrapped { Vector() ++ _ } { _.iterator } .labelled ("scala.Vector") /** A pickler for array values */ - implicit def array[T : ClassTag : Pickler]: Pickler[Array[T]] = - iterPickler[T] .wrapped { _.toArray} { _.iterator } .labelled ("scala.Array") + // implicit def array[T : ClassTag : Pickler]: Pickler[Array[T]] = + // iterPickler[T] .wrapped { _.toArray} { _.iterator } .labelled ("scala.Array") } /** A subclass of Pickler can indicate whether a particular value can be pickled by instances diff --git a/src/compiler/scala/tools/nsc/io/Socket.scala b/src/compiler/scala/tools/nsc/io/Socket.scala index e766c1b2fd..cb264a671c 100644 --- a/src/compiler/scala/tools/nsc/io/Socket.scala +++ b/src/compiler/scala/tools/nsc/io/Socket.scala @@ -28,13 +28,13 @@ object Socket { private val optHandler = handlerFn[Option[T]](_ => None) private val eitherHandler = handlerFn[Either[Throwable, T]](x => Left(x)) - def getOrElse[T1 >: T](alt: T1): T1 = opt getOrElse alt + // def getOrElse[T1 >: T](alt: T1): T1 = opt getOrElse alt def either: Either[Throwable, T] = try Right(f()) catch eitherHandler def opt: Option[T] = try Some(f()) catch optHandler } - def newIPv4Server(port: Int = 0) = new Box(() => preferringIPv4(new ServerSocket(0))) - def newServer(port: Int = 0) = new Box(() => new ServerSocket(0)) + // def newIPv4Server(port: Int = 0) = new Box(() => preferringIPv4(new ServerSocket(0))) + // def newServer(port: Int = 0) = new Box(() => new ServerSocket(0)) def localhost(port: Int) = apply(InetAddress.getLocalHost(), port) def apply(host: InetAddress, port: Int) = new Box(() => new Socket(new JSocket(host, port))) def apply(host: String, port: Int) = new Box(() => new Socket(new JSocket(host, port))) @@ -62,4 +62,4 @@ class Socket(jsocket: JSocket) extends Streamable.Bytes with Closeable { out.close() } } -} \ No newline at end of file +} diff --git a/src/compiler/scala/tools/nsc/io/SourceReader.scala b/src/compiler/scala/tools/nsc/io/SourceReader.scala index af745eb3e8..f6759be5eb 100644 --- a/src/compiler/scala/tools/nsc/io/SourceReader.scala +++ b/src/compiler/scala/tools/nsc/io/SourceReader.scala @@ -34,7 +34,7 @@ class SourceReader(decoder: CharsetDecoder, reporter: Reporter) { } /** Reads the file with the specified name. */ - def read(filename: String): Array[Char]= read(new JFile(filename)) + // def read(filename: String): Array[Char]= read(new JFile(filename)) /** Reads the specified file. */ def read(file: JFile): Array[Char] = { diff --git a/src/compiler/scala/tools/nsc/io/package.scala b/src/compiler/scala/tools/nsc/io/package.scala index c29a7c96df..e9fb8a6d98 100644 --- a/src/compiler/scala/tools/nsc/io/package.scala +++ b/src/compiler/scala/tools/nsc/io/package.scala @@ -20,14 +20,14 @@ package object io { type Path = scala.reflect.io.Path val Path = scala.reflect.io.Path type PlainFile = scala.reflect.io.PlainFile - val PlainFile = scala.reflect.io.PlainFile + // val PlainFile = scala.reflect.io.PlainFile val Streamable = scala.reflect.io.Streamable type VirtualDirectory = scala.reflect.io.VirtualDirectory type VirtualFile = scala.reflect.io.VirtualFile - val ZipArchive = scala.reflect.io.ZipArchive + // val ZipArchive = scala.reflect.io.ZipArchive type ZipArchive = scala.reflect.io.ZipArchive - implicit def postfixOps = scala.language.postfixOps // make all postfix ops in this package compile without warning + // implicit def postfixOps = scala.language.postfixOps // make all postfix ops in this package compile without warning type JManifest = java.util.jar.Manifest type JFile = java.io.File @@ -38,10 +38,10 @@ package object io { def runnable(body: => Unit): Runnable = new Runnable { override def run() = body } def callable[T](body: => T): Callable[T] = new Callable[T] { override def call() = body } def spawn[T](body: => T): Future[T] = daemonThreadPool submit callable(body) - def submit(runnable: Runnable) = daemonThreadPool submit runnable + // def submit(runnable: Runnable) = daemonThreadPool submit runnable // Create, start, and return a daemon thread - def daemonize(body: => Unit): Thread = newThread(_ setDaemon true)(body) + // def daemonize(body: => Unit): Thread = newThread(_ setDaemon true)(body) def newThread(f: Thread => Unit)(body: => Unit): Thread = { val thread = new Thread(runnable(body)) f(thread) @@ -50,11 +50,11 @@ package object io { } // Set a timer to execute the given code. - def timer(seconds: Int)(body: => Unit): Timer = { - val alarm = new Timer(true) // daemon - val tt = new TimerTask { def run() = body } + // def timer(seconds: Int)(body: => Unit): Timer = { + // val alarm = new Timer(true) // daemon + // val tt = new TimerTask { def run() = body } - alarm.schedule(tt, seconds * 1000) - alarm - } + // alarm.schedule(tt, seconds * 1000) + // alarm + // } } diff --git a/src/compiler/scala/tools/nsc/javac/JavaParsers.scala b/src/compiler/scala/tools/nsc/javac/JavaParsers.scala index 2f6c13dd67..63f08c42ec 100644 --- a/src/compiler/scala/tools/nsc/javac/JavaParsers.scala +++ b/src/compiler/scala/tools/nsc/javac/JavaParsers.scala @@ -35,7 +35,7 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners { abstract class JavaParser extends ParserCommon { val in: JavaScanner - protected def posToReport: Int = in.currentPos + // protected def posToReport: Int = in.currentPos def freshName(prefix : String): Name protected implicit def i2p(offset : Int) : Position private implicit def p2i(pos : Position): Int = if (pos.isDefined) pos.point else -1 @@ -94,11 +94,11 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners { if (skipIt) skip() } - def warning(msg: String) : Unit = warning(in.currentPos, msg) + // def warning(msg: String) : Unit = warning(in.currentPos, msg) def errorTypeTree = TypeTree().setType(ErrorType) setPos in.currentPos - def errorTermTree = Literal(Constant(null)) setPos in.currentPos - def errorPatternTree = blankExpr setPos in.currentPos + // def errorTermTree = Literal(Constant(null)) setPos in.currentPos + // def errorPatternTree = blankExpr setPos in.currentPos // --------- tree building ----------------------------- diff --git a/src/compiler/scala/tools/nsc/javac/JavaScanners.scala b/src/compiler/scala/tools/nsc/javac/JavaScanners.scala index e230585a8b..95a22f847b 100644 --- a/src/compiler/scala/tools/nsc/javac/JavaScanners.scala +++ b/src/compiler/scala/tools/nsc/javac/JavaScanners.scala @@ -57,14 +57,14 @@ trait JavaScanners extends ast.parser.ScannersCommon { /** ... */ abstract class AbstractJavaScanner extends AbstractJavaTokenData { - implicit def p2g(pos: Position): ScanPosition + // implicit def p2g(pos: Position): ScanPosition implicit def g2p(pos: ScanPosition): Position /** the last error position */ - var errpos: ScanPosition - var lastPos: ScanPosition - def skipToken: ScanPosition + // var errpos: ScanPosition + // var lastPos: ScanPosition + // def skipToken: ScanPosition def nextToken(): Unit def next: AbstractJavaTokenData def intVal(negated: Boolean): Long @@ -73,7 +73,7 @@ trait JavaScanners extends ast.parser.ScannersCommon { def floatVal: Double = floatVal(false) //def token2string(token : Int) : String = configuration.token2string(token) /** return recent scala doc, if any */ - def flushDoc: DocComment + // def flushDoc: DocComment def currentPos: Position } @@ -227,16 +227,16 @@ trait JavaScanners extends ast.parser.ScannersCommon { abstract class JavaScanner extends AbstractJavaScanner with JavaTokenData with Cloneable with ScannerCommon { override def intVal = super.intVal// todo: needed? override def floatVal = super.floatVal - override var errpos: Int = NoPos + // override var errpos: Int = NoPos def currentPos: Position = g2p(pos - 1) var in: JavaCharArrayReader = _ - def dup: JavaScanner = { - val dup = clone().asInstanceOf[JavaScanner] - dup.in = in.dup - dup - } + // def dup: JavaScanner = { + // val dup = clone().asInstanceOf[JavaScanner] + // dup.in = in.dup + // dup + // } /** character buffer for literals */ @@ -256,11 +256,11 @@ trait JavaScanners extends ast.parser.ScannersCommon { */ var docBuffer: StringBuilder = null - def flushDoc: DocComment = { - val ret = if (docBuffer != null) DocComment(docBuffer.toString, NoPosition) else null - docBuffer = null - ret - } + // def flushDoc: DocComment = { + // val ret = if (docBuffer != null) DocComment(docBuffer.toString, NoPosition) else null + // docBuffer = null + // ret + // } /** add the given character to the documentation buffer */ @@ -279,10 +279,10 @@ trait JavaScanners extends ast.parser.ScannersCommon { /** read next token and return last position */ - def skipToken: Int = { - val p = pos; nextToken - p - 1 - } + // def skipToken: Int = { + // val p = pos; nextToken + // p - 1 + // } def nextToken() { if (next.token == EMPTY) { @@ -868,7 +868,7 @@ trait JavaScanners extends ast.parser.ScannersCommon { def syntaxError(pos: Int, msg: String) { error(pos, msg) token = ERROR - errpos = pos + // errpos = pos } /** generate an error at the current token position @@ -879,7 +879,7 @@ trait JavaScanners extends ast.parser.ScannersCommon { def incompleteInputError(msg: String) { incompleteInputError(pos, msg) token = EOF - errpos = pos + // errpos = pos } override def toString() = token match { @@ -918,11 +918,11 @@ trait JavaScanners extends ast.parser.ScannersCommon { class JavaUnitScanner(unit: CompilationUnit) extends JavaScanner { in = new JavaCharArrayReader(unit.source.content, !settings.nouescape.value, syntaxError) init - def warning(pos: Int, msg: String) = unit.warning(pos, msg) + // def warning(pos: Int, msg: String) = unit.warning(pos, msg) def error (pos: Int, msg: String) = unit. error(pos, msg) def incompleteInputError(pos: Int, msg: String) = unit.incompleteInputError(pos, msg) def deprecationWarning(pos: Int, msg: String) = unit.deprecationWarning(pos, msg) - implicit def p2g(pos: Position): Int = if (pos.isDefined) pos.point else -1 + // implicit def p2g(pos: Position): Int = if (pos.isDefined) pos.point else -1 implicit def g2p(pos: Int): Position = new OffsetPosition(unit.source, pos) } } diff --git a/src/compiler/scala/tools/nsc/javac/JavaTokens.scala b/src/compiler/scala/tools/nsc/javac/JavaTokens.scala index a562de291d..90f73ec44a 100644 --- a/src/compiler/scala/tools/nsc/javac/JavaTokens.scala +++ b/src/compiler/scala/tools/nsc/javac/JavaTokens.scala @@ -13,8 +13,8 @@ object JavaTokens extends ast.parser.Tokens { /** identifiers */ final val IDENTIFIER = 10 - def isIdentifier(code : Int) = - code == IDENTIFIER + // def isIdentifier(code : Int) = + // code == IDENTIFIER /** keywords */ final val ABSTRACT = 20 @@ -68,8 +68,8 @@ object JavaTokens extends ast.parser.Tokens { final val VOLATILE = 68 final val WHILE = 69 - def isKeyword(code : Int) = - code >= ABSTRACT && code <= WHILE + // def isKeyword(code : Int) = + // code >= ABSTRACT && code <= WHILE /** special symbols */ final val COMMA = 70 @@ -115,8 +115,8 @@ object JavaTokens extends ast.parser.Tokens { final val GTGTEQ = 113 final val GTGTGTEQ = 114 - def isSymbol(code : Int) = - code >= COMMA && code <= GTGTGTEQ + // def isSymbol(code : Int) = + // code >= COMMA && code <= GTGTGTEQ /** parenthesis */ final val LPAREN = 115 diff --git a/src/compiler/scala/tools/nsc/matching/MatchSupport.scala b/src/compiler/scala/tools/nsc/matching/MatchSupport.scala index 5ce1aabcd8..07a79a174b 100644 --- a/src/compiler/scala/tools/nsc/matching/MatchSupport.scala +++ b/src/compiler/scala/tools/nsc/matching/MatchSupport.scala @@ -22,8 +22,8 @@ trait MatchSupport extends ast.TreeDSL { self: ParallelMatching => def impossible: Nothing = abort("this never happens") - def treeCollect[T](tree: Tree, pf: PartialFunction[Tree, T]): List[T] = - tree filter (pf isDefinedAt _) map (x => pf(x)) + // def treeCollect[T](tree: Tree, pf: PartialFunction[Tree, T]): List[T] = + // tree filter (pf isDefinedAt _) map (x => pf(x)) object Types { import definitions._ @@ -36,24 +36,24 @@ trait MatchSupport extends ast.TreeDSL { self: ParallelMatching => // These tests for final classes can inspect the typeSymbol private def is(s: Symbol) = tpe.typeSymbol eq s - def isByte = is(ByteClass) - def isShort = is(ShortClass) + // def isByte = is(ByteClass) + // def isShort = is(ShortClass) def isInt = is(IntClass) - def isChar = is(CharClass) - def isBoolean = is(BooleanClass) + // def isChar = is(CharClass) + // def isBoolean = is(BooleanClass) def isNothing = is(NothingClass) - def isArray = is(ArrayClass) + // def isArray = is(ArrayClass) } } object Debug { - def typeToString(t: Type): String = t match { - case NoType => "x" - case x => x.toString - } - def symbolToString(s: Symbol): String = s match { - case x => x.toString - } + // def typeToString(t: Type): String = t match { + // case NoType => "x" + // case x => x.toString + // } + // def symbolToString(s: Symbol): String = s match { + // case x => x.toString + // } def treeToString(t: Tree): String = treeInfo.unbind(t) match { case EmptyTree => "?" case WILD() => "_" @@ -66,10 +66,10 @@ trait MatchSupport extends ast.TreeDSL { self: ParallelMatching => // Formatting for some error messages private val NPAD = 15 def pad(s: String): String = "%%%ds" format (NPAD-1) format s - def pad(s: Any): String = pad(s match { - case x: Tree => treeToString(x) - case x => x.toString - }) + // def pad(s: Any): String = pad(s match { + // case x: Tree => treeToString(x) + // case x => x.toString + // }) // pretty print for debugging def pp(x: Any): String = pp(x, false) @@ -117,7 +117,7 @@ trait MatchSupport extends ast.TreeDSL { self: ParallelMatching => else x } - def indent(s: Any) = s.toString() split "\n" map (" " + _) mkString "\n" + // def indent(s: Any) = s.toString() split "\n" map (" " + _) mkString "\n" def indentAll(s: Seq[Any]) = s map (" " + _.toString() + "\n") mkString } diff --git a/src/compiler/scala/tools/nsc/matching/Matrix.scala b/src/compiler/scala/tools/nsc/matching/Matrix.scala index d2f5a98411..44387b59fb 100644 --- a/src/compiler/scala/tools/nsc/matching/Matrix.scala +++ b/src/compiler/scala/tools/nsc/matching/Matrix.scala @@ -140,7 +140,7 @@ trait Matrix extends MatrixAdditions { cases: List[CaseDef], default: Tree ) { - def tvars = roots map (_.lhs) + // def tvars = roots map (_.lhs) def valDefs = roots map (_.valDef) override def toString() = "MatrixInit(roots = %s, %d cases)".format(pp(roots), cases.size) } @@ -153,25 +153,25 @@ trait Matrix extends MatrixAdditions { def apply(xs: List[PatternVar]) = new PatternVarGroup(xs) // XXX - transitional - def fromBindings(vlist: List[Binding], freeVars: List[Symbol] = Nil) = { - def vmap(v: Symbol): Option[Binding] = vlist find (_.pvar eq v) - val info = - if (freeVars.isEmpty) vlist - else (freeVars map vmap).flatten - - val xs = - for (Binding(lhs, rhs) <- info) yield - new PatternVar(lhs, Ident(rhs) setType lhs.tpe, !(rhs hasFlag NO_EXHAUSTIVE)) - - new PatternVarGroup(xs) - } + // def fromBindings(vlist: List[Binding], freeVars: List[Symbol] = Nil) = { + // def vmap(v: Symbol): Option[Binding] = vlist find (_.pvar eq v) + // val info = + // if (freeVars.isEmpty) vlist + // else (freeVars map vmap).flatten + + // val xs = + // for (Binding(lhs, rhs) <- info) yield + // new PatternVar(lhs, Ident(rhs) setType lhs.tpe, !(rhs hasFlag NO_EXHAUSTIVE)) + + // new PatternVarGroup(xs) + // } } val emptyPatternVarGroup = PatternVarGroup() class PatternVarGroup(val pvs: List[PatternVar]) { def syms = pvs map (_.sym) def valDefs = pvs map (_.valDef) - def idents = pvs map (_.ident) + // def idents = pvs map (_.ident) def extractIndex(index: Int): (PatternVar, PatternVarGroup) = { val (t, ts) = self.extractIndex(pvs, index) @@ -180,16 +180,16 @@ trait Matrix extends MatrixAdditions { def isEmpty = pvs.isEmpty def size = pvs.size - def head = pvs.head - def ::(t: PatternVar) = PatternVarGroup(t :: pvs) + // def head = pvs.head + // def ::(t: PatternVar) = PatternVarGroup(t :: pvs) def :::(ts: List[PatternVar]) = PatternVarGroup(ts ::: pvs) - def ++(other: PatternVarGroup) = PatternVarGroup(pvs ::: other.pvs) + // def ++(other: PatternVarGroup) = PatternVarGroup(pvs ::: other.pvs) def apply(i: Int) = pvs(i) def zipWithIndex = pvs.zipWithIndex def indices = pvs.indices - def map[T](f: PatternVar => T) = pvs map f - def filter(p: PatternVar => Boolean) = PatternVarGroup(pvs filter p) + // def map[T](f: PatternVar => T) = pvs map f + // def filter(p: PatternVar => Boolean) = PatternVarGroup(pvs filter p) override def toString() = pp(pvs) } @@ -237,12 +237,12 @@ trait Matrix extends MatrixAdditions { tracing("create")(new PatternVar(lhs, rhs, checked)) } - def createLazy(tpe: Type, f: Symbol => Tree, checked: Boolean) = { - val lhs = newVar(owner.pos, tpe, Flags.LAZY :: flags(checked)) - val rhs = f(lhs) + // def createLazy(tpe: Type, f: Symbol => Tree, checked: Boolean) = { + // val lhs = newVar(owner.pos, tpe, Flags.LAZY :: flags(checked)) + // val rhs = f(lhs) - tracing("createLazy")(new PatternVar(lhs, rhs, checked)) - } + // tracing("createLazy")(new PatternVar(lhs, rhs, checked)) + // } private def newVar( pos: Position, diff --git a/src/compiler/scala/tools/nsc/matching/ParallelMatching.scala b/src/compiler/scala/tools/nsc/matching/ParallelMatching.scala index ea4d9cd3f4..b5e25f3809 100644 --- a/src/compiler/scala/tools/nsc/matching/ParallelMatching.scala +++ b/src/compiler/scala/tools/nsc/matching/ParallelMatching.scala @@ -126,7 +126,7 @@ trait ParallelMatching extends ast.TreeDSL // for propagating "unchecked" to synthetic vars def isChecked = !(sym hasFlag NO_EXHAUSTIVE) - def flags: List[Long] = List(NO_EXHAUSTIVE) filter (sym hasFlag _) + // def flags: List[Long] = List(NO_EXHAUSTIVE) filter (sym hasFlag _) // this is probably where this actually belongs def createVar(tpe: Type, f: Symbol => Tree) = context.createVar(tpe, f, isChecked) @@ -170,7 +170,7 @@ trait ParallelMatching extends ast.TreeDSL case class PatternMatch(scrut: Scrutinee, ps: List[Pattern]) { def head = ps.head def tail = ps.tail - def size = ps.length + // def size = ps.length def headType = head.necessaryType private val dummyCount = if (head.isCaseClass) headType.typeSymbol.caseFieldAccessors.length else 0 @@ -576,7 +576,7 @@ trait ParallelMatching extends ast.TreeDSL (_ys.toList, _ns.toList) } - val moreSpecific = yeses map (_.moreSpecific) + // val moreSpecific = yeses map (_.moreSpecific) val subsumed = yeses map (x => (x.bx, x.subsumed)) val remaining = noes map (x => (x.bx, x.remaining)) diff --git a/src/compiler/scala/tools/nsc/matching/PatternBindings.scala b/src/compiler/scala/tools/nsc/matching/PatternBindings.scala index 3ff5ce83bb..1aad24c2d6 100644 --- a/src/compiler/scala/tools/nsc/matching/PatternBindings.scala +++ b/src/compiler/scala/tools/nsc/matching/PatternBindings.scala @@ -61,7 +61,7 @@ trait PatternBindings extends ast.TreeDSL // This is for traversing the pattern tree - pattern types which might have // bound variables beneath them return a list of said patterns for flatMapping. - def subpatternsForVars: List[Pattern] = Nil + // def subpatternsForVars: List[Pattern] = Nil // The outermost Bind(x1, Bind(x2, ...)) surrounding the tree. private var _boundTree: Tree = tree diff --git a/src/compiler/scala/tools/nsc/matching/Patterns.scala b/src/compiler/scala/tools/nsc/matching/Patterns.scala index e92c43f1fd..9cb91afb5b 100644 --- a/src/compiler/scala/tools/nsc/matching/Patterns.scala +++ b/src/compiler/scala/tools/nsc/matching/Patterns.scala @@ -33,7 +33,7 @@ trait Patterns extends ast.TreeDSL { def NoPattern = WildcardPattern() // The constant null pattern - def NullPattern = LiteralPattern(NULL) + // def NullPattern = LiteralPattern(NULL) // The Nil pattern def NilPattern = Pattern(gen.mkNil) @@ -60,7 +60,7 @@ trait Patterns extends ast.TreeDSL { override def covers(sym: Symbol) = newMatchesPattern(sym, tpt.tpe) override def sufficientType = tpt.tpe - override def subpatternsForVars: List[Pattern] = List(Pattern(expr)) + // override def subpatternsForVars: List[Pattern] = List(Pattern(expr)) override def simplify(pv: PatternVar) = Pattern(expr) match { case ExtractorPattern(ua) if pv.sym.tpe <:< tpt.tpe => this rebindTo expr case _ => this @@ -140,10 +140,10 @@ trait Patterns extends ast.TreeDSL { require(fn.isType && this.isCaseClass, "tree: " + tree + " fn: " + fn) def name = tpe.typeSymbol.name def cleanName = tpe.typeSymbol.decodedName - def hasPrefix = tpe.prefix.prefixString != "" - def prefixedName = - if (hasPrefix) "%s.%s".format(tpe.prefix.prefixString, cleanName) - else cleanName + // def hasPrefix = tpe.prefix.prefixString != "" + // def prefixedName = + // if (hasPrefix) "%s.%s".format(tpe.prefix.prefixString, cleanName) + // else cleanName private def isColonColon = cleanName == "::" @@ -222,15 +222,15 @@ trait Patterns extends ast.TreeDSL { // 8.1.8 (b) (literal ArrayValues) case class SequencePattern(tree: ArrayValue) extends Pattern with SequenceLikePattern { - lazy val ArrayValue(elemtpt, elems) = tree + lazy val ArrayValue(_, elems) = tree - override def subpatternsForVars: List[Pattern] = elemPatterns + // override def subpatternsForVars: List[Pattern] = elemPatterns override def description = "Seq(%s)".format(elemPatterns mkString ", ") } // 8.1.8 (c) case class StarPattern(tree: Star) extends Pattern { - lazy val Star(elem) = tree + // lazy val Star(_) = tree override def description = "_*" } // XXX temporary? @@ -389,10 +389,10 @@ trait Patterns extends ast.TreeDSL { // fn.tpe.finalResultType.typeSymbol == SomeClass override def necessaryType = arg.tpe - override def subpatternsForVars = args match { - case List(ArrayValue(elemtpe, elems)) => toPats(elems) - case _ => toPats(args) - } + // override def subpatternsForVars = args match { + // case List(ArrayValue(elemtpe, elems)) => toPats(elems) + // case _ => toPats(args) + // } def resTypes = analyzer.unapplyTypeList(unfn.symbol, unfn.tpe, args.length) def resTypesString = resTypes match { @@ -403,13 +403,13 @@ trait Patterns extends ast.TreeDSL { sealed trait ApplyPattern extends Pattern { lazy val Apply(fn, args) = tree - override def subpatternsForVars: List[Pattern] = toPats(args) + // override def subpatternsForVars: List[Pattern] = toPats(args) - override def dummies = - if (!this.isCaseClass) Nil - else emptyPatterns(sufficientType.typeSymbol.caseFieldAccessors.size) + // override def dummies = + // if (!this.isCaseClass) Nil + // else emptyPatterns(sufficientType.typeSymbol.caseFieldAccessors.size) - def isConstructorPattern = fn.isType + // def isConstructorPattern = fn.isType override def covers(sym: Symbol) = newMatchesPattern(sym, fn.tpe) } @@ -420,7 +420,7 @@ trait Patterns extends ast.TreeDSL { def simplify(pv: PatternVar): Pattern = this // the right number of dummies for this pattern - def dummies: List[Pattern] = Nil + // def dummies: List[Pattern] = Nil // Is this a default pattern (untyped "_" or an EmptyTree inserted by the matcher) def isDefault = false @@ -454,10 +454,10 @@ trait Patterns extends ast.TreeDSL { def hasStar = false - def setType(tpe: Type): this.type = { - tree setType tpe - this - } + // def setType(tpe: Type): this.type = { + // tree setType tpe + // this + // } def equalsCheck = tracing("equalsCheck")( @@ -475,7 +475,7 @@ trait Patterns extends ast.TreeDSL { final override def toString = description - def toTypeString() = "%s <: x <: %s".format(necessaryType, sufficientType) + // def toTypeString() = "%s <: x <: %s".format(necessaryType, sufficientType) def kindString = "" } diff --git a/src/compiler/scala/tools/nsc/settings/AbsSettings.scala b/src/compiler/scala/tools/nsc/settings/AbsSettings.scala index e965370713..7b77613e2a 100644 --- a/src/compiler/scala/tools/nsc/settings/AbsSettings.scala +++ b/src/compiler/scala/tools/nsc/settings/AbsSettings.scala @@ -47,7 +47,7 @@ trait AbsSettings extends scala.reflect.internal.settings.AbsSettings { } }) - implicit lazy val SettingOrdering: Ordering[Setting] = Ordering.ordered + // implicit lazy val SettingOrdering: Ordering[Setting] = Ordering.ordered trait AbsSetting extends Ordered[Setting] with AbsSettingValue { def name: String @@ -84,12 +84,12 @@ trait AbsSettings extends scala.reflect.internal.settings.AbsSettings { } /** If the appearance of the setting should halt argument processing. */ - private var isTerminatorSetting = false - def shouldStopProcessing = isTerminatorSetting - def stopProcessing(): this.type = { - isTerminatorSetting = true - this - } + // private var isTerminatorSetting = false + // def shouldStopProcessing = isTerminatorSetting + // def stopProcessing(): this.type = { + // isTerminatorSetting = true + // this + // } /** Issue error and return */ def errorAndValue[T](msg: String, x: T): T = { errorFn(msg) ; x } @@ -111,7 +111,7 @@ trait AbsSettings extends scala.reflect.internal.settings.AbsSettings { /** Attempt to set from a properties file style property value. * Currently used by Eclipse SDT only. */ - def tryToSetFromPropertyValue(s: String): Unit = tryToSet(s :: Nil) + def tryToSetFromPropertyValue(s: String): Unit = tryToSet(s :: Nil) // used in ide? /** These categorizations are so the help output shows -X and -P among * the standard options and -Y among the advanced options. diff --git a/src/compiler/scala/tools/nsc/settings/AdvancedScalaSettings.scala b/src/compiler/scala/tools/nsc/settings/AdvancedScalaSettings.scala index 0bec113743..49b89392b9 100644 --- a/src/compiler/scala/tools/nsc/settings/AdvancedScalaSettings.scala +++ b/src/compiler/scala/tools/nsc/settings/AdvancedScalaSettings.scala @@ -1,77 +1,77 @@ -/* NSC -- new Scala compiler - * Copyright 2005-2013 LAMP/EPFL - * @author Paul Phillips - */ +// /* NSC -- new Scala compiler +// * Copyright 2005-2013 LAMP/EPFL +// * @author Paul Phillips +// */ -package scala.tools.nsc -package settings +// package scala.tools.nsc +// package settings -trait AdvancedScalaSettings { - self: AbsScalaSettings => +// trait AdvancedScalaSettings { +// self: AbsScalaSettings => - abstract class X extends SettingGroup("-X") { - val assemextdirs: StringSetting - val assemname: StringSetting - val assempath: StringSetting - val checkinit: BooleanSetting - val disableassertions: BooleanSetting - val elidebelow: IntSetting - val experimental: BooleanSetting - val future: BooleanSetting - val generatephasegraph: StringSetting - val logimplicits: BooleanSetting - val mainClass: StringSetting - val migration: BooleanSetting - val noforwarders: BooleanSetting - val nojline: BooleanSetting - val nouescape: BooleanSetting - val plugin: MultiStringSetting - val plugindisable: MultiStringSetting - val pluginlist: BooleanSetting - val pluginrequire: MultiStringSetting - val pluginsdir: StringSetting - val print: PhasesSetting - val printicode: BooleanSetting - val printpos: BooleanSetting - val printtypes: BooleanSetting - val prompt: BooleanSetting - val resident: BooleanSetting - val script: StringSetting - val showclass: StringSetting - val showobject: StringSetting - val showphases: BooleanSetting - val sourcedir: StringSetting - val sourcereader: StringSetting - } - // def Xexperimental = X.experimental - // def Xmigration28 = X.migration - // def Xnojline = X.nojline - // def Xprint = X.print - // def Xprintpos = X.printpos - // def Xshowcls = X.showclass - // def Xshowobj = X.showobject - // def assemextdirs = X.assemextdirs - // def assemname = X.assemname - // def assemrefs = X.assempath - // def checkInit = X.checkinit - // def disable = X.plugindisable - // def elideLevel = X.elidelevel - // def future = X.future - // def genPhaseGraph = X.generatephasegraph - // def logimplicits = X.logimplicits - // def noForwarders = X.noforwarders - // def noassertions = X.disableassertions - // def nouescape = X.nouescape - // def plugin = X.plugin - // def pluginsDir = X.pluginsdir - // def printtypes = X.printtypes - // def prompt = X.prompt - // def require = X.require - // def resident = X.resident - // def script = X.script - // def showPhases = X.showphases - // def showPlugins = X.pluginlist - // def sourceReader = X.sourcereader - // def sourcedir = X.sourcedir - // def writeICode = X.printicode -} \ No newline at end of file +// abstract class X extends SettingGroup("-X") { +// val assemextdirs: StringSetting +// val assemname: StringSetting +// val assempath: StringSetting +// val checkinit: BooleanSetting +// val disableassertions: BooleanSetting +// val elidebelow: IntSetting +// val experimental: BooleanSetting +// val future: BooleanSetting +// val generatephasegraph: StringSetting +// val logimplicits: BooleanSetting +// val mainClass: StringSetting +// val migration: BooleanSetting +// val noforwarders: BooleanSetting +// val nojline: BooleanSetting +// val nouescape: BooleanSetting +// val plugin: MultiStringSetting +// val plugindisable: MultiStringSetting +// val pluginlist: BooleanSetting +// val pluginrequire: MultiStringSetting +// val pluginsdir: StringSetting +// val print: PhasesSetting +// val printicode: BooleanSetting +// val printpos: BooleanSetting +// val printtypes: BooleanSetting +// val prompt: BooleanSetting +// val resident: BooleanSetting +// val script: StringSetting +// val showclass: StringSetting +// val showobject: StringSetting +// val showphases: BooleanSetting +// val sourcedir: StringSetting +// val sourcereader: StringSetting +// } +// // def Xexperimental = X.experimental +// // def Xmigration28 = X.migration +// // def Xnojline = X.nojline +// // def Xprint = X.print +// // def Xprintpos = X.printpos +// // def Xshowcls = X.showclass +// // def Xshowobj = X.showobject +// // def assemextdirs = X.assemextdirs +// // def assemname = X.assemname +// // def assemrefs = X.assempath +// // def checkInit = X.checkinit +// // def disable = X.plugindisable +// // def elideLevel = X.elidelevel +// // def future = X.future +// // def genPhaseGraph = X.generatephasegraph +// // def logimplicits = X.logimplicits +// // def noForwarders = X.noforwarders +// // def noassertions = X.disableassertions +// // def nouescape = X.nouescape +// // def plugin = X.plugin +// // def pluginsDir = X.pluginsdir +// // def printtypes = X.printtypes +// // def prompt = X.prompt +// // def require = X.require +// // def resident = X.resident +// // def script = X.script +// // def showPhases = X.showphases +// // def showPlugins = X.pluginlist +// // def sourceReader = X.sourcereader +// // def sourcedir = X.sourcedir +// // def writeICode = X.printicode +// } diff --git a/src/compiler/scala/tools/nsc/settings/MutableSettings.scala b/src/compiler/scala/tools/nsc/settings/MutableSettings.scala index 4f4f0544da..748c6069f0 100644 --- a/src/compiler/scala/tools/nsc/settings/MutableSettings.scala +++ b/src/compiler/scala/tools/nsc/settings/MutableSettings.scala @@ -176,7 +176,7 @@ class MutableSettings(val errorFn: String => Unit) * The class loader defining `T` should provide resources `app.class.path` * and `boot.class.path`. These resources should contain the application * and boot classpaths in the same form as would be passed on the command line.*/ - def embeddedDefaults[T: ClassTag]: Unit = + def embeddedDefaults[T: ClassTag]: Unit = // called from sbt and repl embeddedDefaults(classTag[T].runtimeClass.getClassLoader) /** Initializes these settings for embedded use by a class from the given class loader. @@ -239,7 +239,7 @@ class MutableSettings(val errorFn: String => Unit) /** Add a destination directory for sources found under srcdir. * Both directories should exits. */ - def add(srcDir: String, outDir: String): Unit = + def add(srcDir: String, outDir: String): Unit = // used in ide? add(checkDir(AbstractFile.getDirectory(srcDir), srcDir), checkDir(AbstractFile.getDirectory(outDir), outDir)) @@ -434,7 +434,7 @@ class MutableSettings(val errorFn: String => Unit) def tryToSet(args: List[String]) = { value = true ; Some(args) } def unparse: List[String] = if (value) List(name) else Nil - override def tryToSetFromPropertyValue(s : String) { + override def tryToSetFromPropertyValue(s : String) { // used from ide value = s.equalsIgnoreCase("true") } } @@ -527,7 +527,7 @@ class MutableSettings(val errorFn: String => Unit) Some(rest) } override def tryToSetColon(args: List[String]) = tryToSet(args) - override def tryToSetFromPropertyValue(s: String) = tryToSet(s.trim.split(',').toList) + override def tryToSetFromPropertyValue(s: String) = tryToSet(s.trim.split(',').toList) // used from ide def unparse: List[String] = value map (name + ":" + _) withHelpSyntax(name + ":<" + arg + ">") @@ -561,7 +561,7 @@ class MutableSettings(val errorFn: String => Unit) } def unparse: List[String] = if (value == default) Nil else List(name + ":" + value) - override def tryToSetFromPropertyValue(s: String) = tryToSetColon(s::Nil) + override def tryToSetFromPropertyValue(s: String) = tryToSetColon(s::Nil) // used from ide withHelpSyntax(name + ":<" + helpArg + ">") } diff --git a/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala b/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala index 8dce48ee9a..5074efbd01 100644 --- a/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala +++ b/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala @@ -52,14 +52,14 @@ trait ScalaSettings extends AbsScalaSettings val jvmargs = PrefixSetting("-J", "-J", "Pass directly to the runtime system.") val defines = PrefixSetting("-Dproperty=value", "-D", "Pass -Dproperty=value directly to the runtime system.") - val toolcp = PathSetting("-toolcp", "Add to the runner classpath.", "") + /*val toolcp =*/ PathSetting("-toolcp", "Add to the runner classpath.", "") val nobootcp = BooleanSetting("-nobootcp", "Do not use the boot classpath for the scala jars.") /** * Standard settings */ // argfiles is only for the help message - val argfiles = BooleanSetting ("@", "A text file containing compiler arguments (options and source files)") + /*val argfiles = */ BooleanSetting ("@", "A text file containing compiler arguments (options and source files)") val classpath = PathSetting ("-classpath", "Specify where to find user class files.", defaultClasspath) withAbbreviation "-cp" val d = OutputSetting (outputDirs, ".") val nospecialization = BooleanSetting ("-no-specialization", "Ignore @specialize annotations.") @@ -114,7 +114,7 @@ trait ScalaSettings extends AbsScalaSettings /** Compatibility stubs for options whose value name did * not previously match the option name. */ - def XO = optimise + // def XO = optimise def debuginfo = g def dependenciesFile = dependencyfile def nowarnings = nowarn @@ -180,12 +180,12 @@ trait ScalaSettings extends AbsScalaSettings val exposeEmptyPackage = BooleanSetting("-Yexpose-empty-package", "Internal only: expose the empty package.").internalOnly() - def stop = stopAfter + // def stop = stopAfter /** Area-specific debug output. */ val Ybuildmanagerdebug = BooleanSetting("-Ybuild-manager-debug", "Generate debug information for the Refined Build Manager compiler.") - val Ycompletion = BooleanSetting("-Ycompletion-debug", "Trace all tab completion activity.") + // val Ycompletion = BooleanSetting("-Ycompletion-debug", "Trace all tab completion activity.") val Ydocdebug = BooleanSetting("-Ydoc-debug", "Trace all scaladoc activity.") val Yidedebug = BooleanSetting("-Yide-debug", "Generate, validate and output trees using the interactive compiler.") val Yinferdebug = BooleanSetting("-Yinfer-debug", "Trace type inference and implicit search.") diff --git a/src/compiler/scala/tools/nsc/settings/StandardScalaSettings.scala b/src/compiler/scala/tools/nsc/settings/StandardScalaSettings.scala index e866ad6ae0..53d3557c67 100644 --- a/src/compiler/scala/tools/nsc/settings/StandardScalaSettings.scala +++ b/src/compiler/scala/tools/nsc/settings/StandardScalaSettings.scala @@ -52,5 +52,5 @@ trait StandardScalaSettings { /** These are @ and -Dkey=val style settings, which don't * nicely map to identifiers. */ - val argfiles: BooleanSetting // exists only to echo help message, should be done differently + // val argfiles: BooleanSetting // exists only to echo help message, should be done differently } diff --git a/src/compiler/scala/tools/nsc/settings/Warnings.scala b/src/compiler/scala/tools/nsc/settings/Warnings.scala index d6d77278ab..d678fc60a8 100644 --- a/src/compiler/scala/tools/nsc/settings/Warnings.scala +++ b/src/compiler/scala/tools/nsc/settings/Warnings.scala @@ -39,9 +39,13 @@ trait Warnings { BooleanSetting("-Xlint", "Enable recommended additional warnings.") withPostSetHook (_ => lintWarnings foreach (_.value = true)) ) - val warnEverything = ( + + /*val warnEverything = */ ( BooleanSetting("-Ywarn-all", "Enable all -Y warnings.") - withPostSetHook (_ => lintWarnings foreach (_.value = true)) + withPostSetHook { _ => + lint.value = true + allWarnings foreach (_.value = true) + } ) // Individual warnings. @@ -57,7 +61,7 @@ trait Warnings { val warnInferAny = BooleanSetting ("-Ywarn-infer-any", "Warn when a type argument is inferred to be `Any`.") // Backward compatibility. - def Xwarnfatal = fatalWarnings - def Xchecknull = warnSelectNullable - def Ywarndeadcode = warnDeadCode + def Xwarnfatal = fatalWarnings // used by sbt + // def Xchecknull = warnSelectNullable + // def Ywarndeadcode = warnDeadCode } diff --git a/src/compiler/scala/tools/nsc/symtab/classfile/AbstractFileReader.scala b/src/compiler/scala/tools/nsc/symtab/classfile/AbstractFileReader.scala index 427b5bf887..2d44d1e5f1 100644 --- a/src/compiler/scala/tools/nsc/symtab/classfile/AbstractFileReader.scala +++ b/src/compiler/scala/tools/nsc/symtab/classfile/AbstractFileReader.scala @@ -31,8 +31,8 @@ class AbstractFileReader(val file: AbstractFile) { /** return byte at offset 'pos' */ - @throws(classOf[IndexOutOfBoundsException]) - def byteAt(pos: Int): Byte = buf(pos) + // @throws(classOf[IndexOutOfBoundsException]) + // def byteAt(pos: Int): Byte = buf(pos) /** read a byte */ @@ -45,10 +45,10 @@ class AbstractFileReader(val file: AbstractFile) { /** read some bytes */ - def nextBytes(len: Int): Array[Byte] = { - bp += len - buf.slice(bp - len, bp) - } + // def nextBytes(len: Int): Array[Byte] = { + // bp += len + // buf.slice(bp - len, bp) + // } /** read a character */ diff --git a/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala b/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala index 67f6c3ec5d..50a455b33f 100644 --- a/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala +++ b/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala @@ -1172,16 +1172,16 @@ abstract class ClassfileParser { /** Return the Symbol of the top level class enclosing `name`, * or 'name's symbol if no entry found for `name`. */ - def topLevelClass(name: Name): Symbol = { - val tlName = if (isDefinedAt(name)) { - var entry = this(name) - while (isDefinedAt(entry.outerName)) - entry = this(entry.outerName) - entry.outerName - } else - name - classNameToSymbol(tlName) - } + // def topLevelClass(name: Name): Symbol = { + // val tlName = if (isDefinedAt(name)) { + // var entry = this(name) + // while (isDefinedAt(entry.outerName)) + // entry = this(entry.outerName) + // entry.outerName + // } else + // name + // classNameToSymbol(tlName) + // } /** Return the class symbol for `externalName`. It looks it up in its outer class. * Forces all outer class symbols to be completed. diff --git a/src/compiler/scala/tools/nsc/symtab/classfile/ICodeReader.scala b/src/compiler/scala/tools/nsc/symtab/classfile/ICodeReader.scala index b7511377cc..b5459ec773 100644 --- a/src/compiler/scala/tools/nsc/symtab/classfile/ICodeReader.scala +++ b/src/compiler/scala/tools/nsc/symtab/classfile/ICodeReader.scala @@ -632,9 +632,9 @@ abstract class ICodeReader extends ClassfileParser { else instanceCode class LinearCode { - var instrs: ListBuffer[(Int, Instruction)] = new ListBuffer - var jmpTargets: mutable.Set[Int] = perRunCaches.newSet[Int]() - var locals: mutable.Map[Int, List[(Local, TypeKind)]] = perRunCaches.newMap() + val instrs: ListBuffer[(Int, Instruction)] = new ListBuffer + val jmpTargets: mutable.Set[Int] = perRunCaches.newSet[Int]() + val locals: mutable.Map[Int, List[(Local, TypeKind)]] = perRunCaches.newMap() var containsDUPX = false var containsNEW = false diff --git a/src/compiler/scala/tools/nsc/symtab/classfile/Pickler.scala b/src/compiler/scala/tools/nsc/symtab/classfile/Pickler.scala index 941604b154..324d62b662 100644 --- a/src/compiler/scala/tools/nsc/symtab/classfile/Pickler.scala +++ b/src/compiler/scala/tools/nsc/symtab/classfile/Pickler.scala @@ -1002,110 +1002,110 @@ abstract class Pickler extends SubComponent { } /** Print entry for diagnostics */ - def printEntryAtIndex(idx: Int) = printEntry(entries(idx)) - def printEntry(entry: AnyRef) { - def printRef(ref: AnyRef) { - print(index(ref)+ - (if (ref.isInstanceOf[Name]) "("+ref+") " else " ")) - } - def printRefs(refs: List[AnyRef]) { refs foreach printRef } - def printSymInfo(sym: Symbol) { - printRef(sym.name) - printRef(localizedOwner(sym)) - print(flagsToString(sym.flags & PickledFlags)+" ") - if (sym.hasAccessBoundary) printRef(sym.privateWithin) - printRef(sym.info) - } - def printBody(entry: AnyRef) = entry match { - case name: Name => - print((if (name.isTermName) "TERMname " else "TYPEname ")+name) - case NoSymbol => - print("NONEsym") - case sym: Symbol if !isLocal(sym) => - if (sym.isModuleClass) { - print("EXTMODCLASSref "); printRef(sym.name.toTermName) - } else { - print("EXTref "); printRef(sym.name) - } - if (!sym.owner.isRoot) printRef(sym.owner) - case sym: ClassSymbol => - print("CLASSsym ") - printSymInfo(sym) - if (sym.thisSym.tpe != sym.tpe) printRef(sym.typeOfThis) - case sym: TypeSymbol => - print(if (sym.isAbstractType) "TYPEsym " else "ALIASsym ") - printSymInfo(sym) - case sym: TermSymbol => - print(if (sym.isModule) "MODULEsym " else "VALsym ") - printSymInfo(sym) - if (sym.alias != NoSymbol) printRef(sym.alias) - case NoType => - print("NOtpe") - case NoPrefix => - print("NOPREFIXtpe") - case ThisType(sym) => - print("THIStpe "); printRef(sym) - case SingleType(pre, sym) => - print("SINGLEtpe "); printRef(pre); printRef(sym); - case ConstantType(value) => - print("CONSTANTtpe "); printRef(value); - case TypeRef(pre, sym, args) => - print("TYPEREFtpe "); printRef(pre); printRef(sym); printRefs(args); - case TypeBounds(lo, hi) => - print("TYPEBOUNDStpe "); printRef(lo); printRef(hi); - case tp @ RefinedType(parents, decls) => - print("REFINEDtpe "); printRef(tp.typeSymbol); printRefs(parents); - case ClassInfoType(parents, decls, clazz) => - print("CLASSINFOtpe "); printRef(clazz); printRefs(parents); - case mt @ MethodType(formals, restpe) => - print("METHODtpe"); printRef(restpe); printRefs(formals) - case PolyType(tparams, restpe) => - print("POLYtpe "); printRef(restpe); printRefs(tparams); - case ExistentialType(tparams, restpe) => - print("EXISTENTIALtpe "); printRef(restpe); printRefs(tparams); - print("||| "+entry) - case c @ Constant(_) => - print("LITERAL ") - if (c.tag == BooleanTag) print("Boolean "+(if (c.booleanValue) 1 else 0)) - else if (c.tag == ByteTag) print("Byte "+c.longValue) - else if (c.tag == ShortTag) print("Short "+c.longValue) - else if (c.tag == CharTag) print("Char "+c.longValue) - else if (c.tag == IntTag) print("Int "+c.longValue) - else if (c.tag == LongTag) print("Long "+c.longValue) - else if (c.tag == FloatTag) print("Float "+c.floatValue) - else if (c.tag == DoubleTag) print("Double "+c.doubleValue) - else if (c.tag == StringTag) { print("String "); printRef(newTermName(c.stringValue)) } - else if (c.tag == ClazzTag) { print("Class "); printRef(c.typeValue) } - else if (c.tag == EnumTag) { print("Enum "); printRef(c.symbolValue) } - case AnnotatedType(annots, tp, selfsym) => - if (settings.selfInAnnots.value) { - print("ANNOTATEDWSELFtpe ") - printRef(tp) - printRef(selfsym) - printRefs(annots) - } else { - print("ANNOTATEDtpe ") - printRef(tp) - printRefs(annots) - } - case (target: Symbol, AnnotationInfo(atp, args, Nil)) => - print("SYMANNOT ") - printRef(target) - printRef(atp) - for (c <- args) printRef(c) - case (target: Symbol, children: List[_]) => - print("CHILDREN ") - printRef(target) - for (c <- children) printRef(c.asInstanceOf[Symbol]) - case AnnotationInfo(atp, args, Nil) => - print("ANNOTINFO") - printRef(atp) - for (c <- args) printRef(c) - case _ => - throw new FatalError("bad entry: " + entry + " " + entry.getClass) - } - printBody(entry); println() - } + // def printEntryAtIndex(idx: Int) = printEntry(entries(idx)) + // def printEntry(entry: AnyRef) { + // def printRef(ref: AnyRef) { + // print(index(ref)+ + // (if (ref.isInstanceOf[Name]) "("+ref+") " else " ")) + // } + // def printRefs(refs: List[AnyRef]) { refs foreach printRef } + // def printSymInfo(sym: Symbol) { + // printRef(sym.name) + // printRef(localizedOwner(sym)) + // print(flagsToString(sym.flags & PickledFlags)+" ") + // if (sym.hasAccessBoundary) printRef(sym.privateWithin) + // printRef(sym.info) + // } + // def printBody(entry: AnyRef) = entry match { + // case name: Name => + // print((if (name.isTermName) "TERMname " else "TYPEname ")+name) + // case NoSymbol => + // print("NONEsym") + // case sym: Symbol if !isLocal(sym) => + // if (sym.isModuleClass) { + // print("EXTMODCLASSref "); printRef(sym.name.toTermName) + // } else { + // print("EXTref "); printRef(sym.name) + // } + // if (!sym.owner.isRoot) printRef(sym.owner) + // case sym: ClassSymbol => + // print("CLASSsym ") + // printSymInfo(sym) + // if (sym.thisSym.tpe != sym.tpe) printRef(sym.typeOfThis) + // case sym: TypeSymbol => + // print(if (sym.isAbstractType) "TYPEsym " else "ALIASsym ") + // printSymInfo(sym) + // case sym: TermSymbol => + // print(if (sym.isModule) "MODULEsym " else "VALsym ") + // printSymInfo(sym) + // if (sym.alias != NoSymbol) printRef(sym.alias) + // case NoType => + // print("NOtpe") + // case NoPrefix => + // print("NOPREFIXtpe") + // case ThisType(sym) => + // print("THIStpe "); printRef(sym) + // case SingleType(pre, sym) => + // print("SINGLEtpe "); printRef(pre); printRef(sym); + // case ConstantType(value) => + // print("CONSTANTtpe "); printRef(value); + // case TypeRef(pre, sym, args) => + // print("TYPEREFtpe "); printRef(pre); printRef(sym); printRefs(args); + // case TypeBounds(lo, hi) => + // print("TYPEBOUNDStpe "); printRef(lo); printRef(hi); + // case tp @ RefinedType(parents, decls) => + // print("REFINEDtpe "); printRef(tp.typeSymbol); printRefs(parents); + // case ClassInfoType(parents, decls, clazz) => + // print("CLASSINFOtpe "); printRef(clazz); printRefs(parents); + // case mt @ MethodType(formals, restpe) => + // print("METHODtpe"); printRef(restpe); printRefs(formals) + // case PolyType(tparams, restpe) => + // print("POLYtpe "); printRef(restpe); printRefs(tparams); + // case ExistentialType(tparams, restpe) => + // print("EXISTENTIALtpe "); printRef(restpe); printRefs(tparams); + // print("||| "+entry) + // case c @ Constant(_) => + // print("LITERAL ") + // if (c.tag == BooleanTag) print("Boolean "+(if (c.booleanValue) 1 else 0)) + // else if (c.tag == ByteTag) print("Byte "+c.longValue) + // else if (c.tag == ShortTag) print("Short "+c.longValue) + // else if (c.tag == CharTag) print("Char "+c.longValue) + // else if (c.tag == IntTag) print("Int "+c.longValue) + // else if (c.tag == LongTag) print("Long "+c.longValue) + // else if (c.tag == FloatTag) print("Float "+c.floatValue) + // else if (c.tag == DoubleTag) print("Double "+c.doubleValue) + // else if (c.tag == StringTag) { print("String "); printRef(newTermName(c.stringValue)) } + // else if (c.tag == ClazzTag) { print("Class "); printRef(c.typeValue) } + // else if (c.tag == EnumTag) { print("Enum "); printRef(c.symbolValue) } + // case AnnotatedType(annots, tp, selfsym) => + // if (settings.selfInAnnots.value) { + // print("ANNOTATEDWSELFtpe ") + // printRef(tp) + // printRef(selfsym) + // printRefs(annots) + // } else { + // print("ANNOTATEDtpe ") + // printRef(tp) + // printRefs(annots) + // } + // case (target: Symbol, AnnotationInfo(atp, args, Nil)) => + // print("SYMANNOT ") + // printRef(target) + // printRef(atp) + // for (c <- args) printRef(c) + // case (target: Symbol, children: List[_]) => + // print("CHILDREN ") + // printRef(target) + // for (c <- children) printRef(c.asInstanceOf[Symbol]) + // case AnnotationInfo(atp, args, Nil) => + // print("ANNOTINFO") + // printRef(atp) + // for (c <- args) printRef(c) + // case _ => + // throw new FatalError("bad entry: " + entry + " " + entry.getClass) + // } + // printBody(entry); println() + // } /** Write byte array */ def writeArray() { diff --git a/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala b/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala index 78fb725041..0fe72c992e 100644 --- a/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala +++ b/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala @@ -187,9 +187,9 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers { /** Returns the generic class that was specialized to 'sClass', or * 'sClass' itself if sClass is not a specialized subclass. */ - def genericClass(sClass: Symbol): Symbol = - if (sClass.isSpecialized) sClass.superClass - else sClass + // def genericClass(sClass: Symbol): Symbol = + // if (sClass.isSpecialized) sClass.superClass + // else sClass case class Overload(sym: Symbol, env: TypeEnv) { override def toString = "specialized overload " + sym + " in " + env @@ -223,7 +223,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers { */ def degenerate = false - def isAccessor = false + // def isAccessor = false } /** Symbol is a special overloaded method of 'original', in the environment env. */ @@ -248,7 +248,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers { /** Symbol is a specialized accessor for the `target` field. */ case class SpecializedAccessor(target: Symbol) extends SpecializedInfo { - override def isAccessor = true + // override def isAccessor = true } /** Symbol is a specialized method whose body should be the target's method body. */ @@ -288,8 +288,8 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers { def specializedParams(sym: Symbol): List[Symbol] = sym.info.typeParams filter (_.isSpecialized) - def splitParams(tps: List[Symbol]) = - tps partition (_.isSpecialized) + // def splitParams(tps: List[Symbol]) = + // tps partition (_.isSpecialized) /** Given an original class symbol and a list of types its type parameters are instantiated at * returns a list of type parameters that should remain in the TypeRef when instantiating a @@ -1185,7 +1185,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers { * * A conflicting type environment could still be satisfiable. */ - def conflicting(env: TypeEnv) = !nonConflicting(env) + // def conflicting(env: TypeEnv) = !nonConflicting(env) def nonConflicting(env: TypeEnv) = env forall { case (tvar, tpe) => (subst(env, tvar.info.bounds.lo) <:< tpe) && (tpe <:< subst(env, tvar.info.bounds.hi)) } @@ -1866,10 +1866,10 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers { } } - def printSpecStats() { - println(" concreteSpecMembers: %7d".format(concreteSpecMethods.size)) - println(" overloads: %7d".format(overloads.size)) - println(" typeEnv: %7d".format(typeEnv.size)) - println(" info: %7d".format(info.size)) - } + // def printSpecStats() { + // println(" concreteSpecMembers: %7d".format(concreteSpecMethods.size)) + // println(" overloads: %7d".format(overloads.size)) + // println(" typeEnv: %7d".format(typeEnv.size)) + // println(" info: %7d".format(info.size)) + // } } diff --git a/src/compiler/scala/tools/nsc/transform/TailCalls.scala b/src/compiler/scala/tools/nsc/transform/TailCalls.scala index 2e0cc3bd98..798e604be4 100644 --- a/src/compiler/scala/tools/nsc/transform/TailCalls.scala +++ b/src/compiler/scala/tools/nsc/transform/TailCalls.scala @@ -147,7 +147,7 @@ abstract class TailCalls extends Transform { } def enclosingType = method.enclClass.typeOfThis - def methodTypeParams = method.tpe.typeParams + // def methodTypeParams = method.tpe.typeParams def isEligible = method.isEffectivelyFinal // @tailrec annotation indicates mandatory transformation def isMandatory = method.hasAnnotation(TailrecClass) && !forMSIL diff --git a/src/compiler/scala/tools/nsc/transform/TypingTransformers.scala b/src/compiler/scala/tools/nsc/transform/TypingTransformers.scala index b7da0e0087..f0414b8639 100644 --- a/src/compiler/scala/tools/nsc/transform/TypingTransformers.scala +++ b/src/compiler/scala/tools/nsc/transform/TypingTransformers.scala @@ -21,7 +21,7 @@ trait TypingTransformers { else analyzer.newTyper(analyzer.rootContext(unit, EmptyTree, true)) protected var curTree: Tree = _ - protected def typedPos(pos: Position)(tree: Tree) = localTyper typed { atPos(pos)(tree) } + // protected def typedPos(pos: Position)(tree: Tree) = localTyper typed { atPos(pos)(tree) } override final def atOwner[A](owner: Symbol)(trans: => A): A = atOwner(curTree, owner)(trans) diff --git a/src/compiler/scala/tools/nsc/typechecker/Contexts.scala b/src/compiler/scala/tools/nsc/typechecker/Contexts.scala index a8d7de6362..9ea1ff4263 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Contexts.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Contexts.scala @@ -219,7 +219,7 @@ trait Contexts { self: Analyzer => current } - def logError(err: AbsTypeError) = buffer += err + // def logError(err: AbsTypeError) = buffer += err def withImplicitsEnabled[T](op: => T): T = { val saved = implicitsEnabled @@ -313,13 +313,13 @@ trait Contexts { self: Analyzer => } // TODO: remove? Doesn't seem to be used - def make(unit: CompilationUnit): Context = { - val c = make(unit, EmptyTree, owner, scope, imports) - c.setReportErrors() - c.implicitsEnabled = true - c.macrosEnabled = true - c - } + // def make(unit: CompilationUnit): Context = { + // val c = make(unit, EmptyTree, owner, scope, imports) + // c.setReportErrors() + // c.implicitsEnabled = true + // c.macrosEnabled = true + // c + // } def makeNewImport(sym: Symbol): Context = makeNewImport(gen.mkWildcardImport(sym)) @@ -491,14 +491,14 @@ trait Contexts { self: Analyzer => /** Return closest enclosing context that defines a superclass of `clazz`, or a * companion module of a superclass of `clazz`, or NoContext if none exists */ - def enclosingSuperClassContext(clazz: Symbol): Context = { - var c = this.enclClass - while (c != NoContext && - !clazz.isNonBottomSubClass(c.owner) && - !(c.owner.isModuleClass && clazz.isNonBottomSubClass(c.owner.companionClass))) - c = c.outer.enclClass - c - } + // def enclosingSuperClassContext(clazz: Symbol): Context = { + // var c = this.enclClass + // while (c != NoContext && + // !clazz.isNonBottomSubClass(c.owner) && + // !(c.owner.isModuleClass && clazz.isNonBottomSubClass(c.owner.companionClass))) + // c = c.outer.enclClass + // c + // } /** Return the closest enclosing context that defines a subclass of `clazz` * or a companion object thereof, or `NoContext` if no such context exists. diff --git a/src/compiler/scala/tools/nsc/typechecker/DestructureTypes.scala b/src/compiler/scala/tools/nsc/typechecker/DestructureTypes.scala index 79cd46e018..856043bca9 100644 --- a/src/compiler/scala/tools/nsc/typechecker/DestructureTypes.scala +++ b/src/compiler/scala/tools/nsc/typechecker/DestructureTypes.scala @@ -64,15 +64,15 @@ trait DestructureTypes { }, tree.productPrefix ) - def wrapSymbol(label: String, sym: Symbol): Node = { - if (sym eq NoSymbol) wrapEmpty - else atom(label, sym) - } - def wrapInfo(sym: Symbol) = sym.info match { - case TypeBounds(lo, hi) => typeBounds(lo, hi) - case PolyType(tparams, restpe) => polyFunction(tparams, restpe) - case _ => wrapEmpty - } + // def wrapSymbol(label: String, sym: Symbol): Node = { + // if (sym eq NoSymbol) wrapEmpty + // else atom(label, sym) + // } + // def wrapInfo(sym: Symbol) = sym.info match { + // case TypeBounds(lo, hi) => typeBounds(lo, hi) + // case PolyType(tparams, restpe) => polyFunction(tparams, restpe) + // case _ => wrapEmpty + // } def wrapSymbolInfo(sym: Symbol): Node = { if ((sym eq NoSymbol) || openSymbols(sym)) wrapEmpty else { @@ -95,7 +95,7 @@ trait DestructureTypes { def constant(label: String, const: Constant): Node = atom(label, const) def scope(decls: Scope): Node = node("decls", scopeMemberList(decls.toList)) - def const[T](named: (String, T)): Node = constant(named._1, Constant(named._2)) + // def const[T](named: (String, T)): Node = constant(named._1, Constant(named._2)) def resultType(restpe: Type): Node = this("resultType", restpe) def typeParams(tps: List[Symbol]): Node = node("typeParams", symbolList(tps)) diff --git a/src/compiler/scala/tools/nsc/typechecker/Duplicators.scala b/src/compiler/scala/tools/nsc/typechecker/Duplicators.scala index 7d58155eb2..e5e52e91c3 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Duplicators.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Duplicators.scala @@ -19,10 +19,10 @@ abstract class Duplicators extends Analyzer { import global._ import definitions.{ AnyRefClass, AnyValClass } - def retyped(context: Context, tree: Tree): Tree = { - resetClassOwners - (newBodyDuplicator(context)).typed(tree) - } + // def retyped(context: Context, tree: Tree): Tree = { + // resetClassOwners + // (newBodyDuplicator(context)).typed(tree) + // } /** Retype the given tree in the given context. Use this method when retyping * a method in a different class. The typer will replace references to the this of @@ -42,8 +42,8 @@ abstract class Duplicators extends Analyzer { protected def newBodyDuplicator(context: Context) = new BodyDuplicator(context) - def retypedMethod(context: Context, tree: Tree, oldThis: Symbol, newThis: Symbol): Tree = - (newBodyDuplicator(context)).retypedMethod(tree.asInstanceOf[DefDef], oldThis, newThis) + // def retypedMethod(context: Context, tree: Tree, oldThis: Symbol, newThis: Symbol): Tree = + // (newBodyDuplicator(context)).retypedMethod(tree.asInstanceOf[DefDef], oldThis, newThis) /** Return the special typer for duplicate method bodies. */ override def newTyper(context: Context): Typer = @@ -186,19 +186,19 @@ abstract class Duplicators extends Analyzer { stats.foreach(invalidate(_, owner)) } - def retypedMethod(ddef: DefDef, oldThis: Symbol, newThis: Symbol): Tree = { - oldClassOwner = oldThis - newClassOwner = newThis - invalidateAll(ddef.tparams) - mforeach(ddef.vparamss) { vdef => - invalidate(vdef) - vdef.tpe = null - } - ddef.symbol = NoSymbol - enterSym(context, ddef) - debuglog("remapping this of " + oldClassOwner + " to " + newClassOwner) - typed(ddef) - } + // def retypedMethod(ddef: DefDef, oldThis: Symbol, newThis: Symbol): Tree = { + // oldClassOwner = oldThis + // newClassOwner = newThis + // invalidateAll(ddef.tparams) + // mforeach(ddef.vparamss) { vdef => + // invalidate(vdef) + // vdef.tpe = null + // } + // ddef.symbol = NoSymbol + // enterSym(context, ddef) + // debuglog("remapping this of " + oldClassOwner + " to " + newClassOwner) + // typed(ddef) + // } /** Optionally cast this tree into some other type, if required. * Unless overridden, just returns the tree. diff --git a/src/compiler/scala/tools/nsc/typechecker/Implicits.scala b/src/compiler/scala/tools/nsc/typechecker/Implicits.scala index 576a21fe31..c17586335c 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Implicits.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Implicits.scala @@ -233,10 +233,10 @@ trait Implicits { object HasMember { private val hasMemberCache = perRunCaches.newMap[Name, Type]() def apply(name: Name): Type = hasMemberCache.getOrElseUpdate(name, memberWildcardType(name, WildcardType)) - def unapply(pt: Type): Option[Name] = pt match { - case RefinedType(List(WildcardType), Scope(sym)) if sym.tpe == WildcardType => Some(sym.name) - case _ => None - } + // def unapply(pt: Type): Option[Name] = pt match { + // case RefinedType(List(WildcardType), Scope(sym)) if sym.tpe == WildcardType => Some(sym.name) + // case _ => None + // } } /** An extractor for types of the form ? { name: (? >: argtpe <: Any*)restp } @@ -1493,9 +1493,9 @@ object ImplicitsStats { val subtypeImpl = Statistics.newSubCounter(" of which in implicit", subtypeCount) val findMemberImpl = Statistics.newSubCounter(" of which in implicit", findMemberCount) val subtypeAppInfos = Statistics.newSubCounter(" of which in app impl", subtypeCount) - val subtypeImprovCount = Statistics.newSubCounter(" of which in improves", subtypeCount) + // val subtypeImprovCount = Statistics.newSubCounter(" of which in improves", subtypeCount) val implicitSearchCount = Statistics.newCounter ("#implicit searches", "typer") - val triedImplicits = Statistics.newSubCounter(" #tried", implicitSearchCount) + // val triedImplicits = Statistics.newSubCounter(" #tried", implicitSearchCount) val plausiblyCompatibleImplicits = Statistics.newSubCounter(" #plausibly compatible", implicitSearchCount) val matchingImplicits = Statistics.newSubCounter(" #matching", implicitSearchCount) diff --git a/src/compiler/scala/tools/nsc/typechecker/MethodSynthesis.scala b/src/compiler/scala/tools/nsc/typechecker/MethodSynthesis.scala index 6aafd32237..bd37f055b7 100644 --- a/src/compiler/scala/tools/nsc/typechecker/MethodSynthesis.scala +++ b/src/compiler/scala/tools/nsc/typechecker/MethodSynthesis.scala @@ -29,43 +29,43 @@ trait MethodSynthesis { if (sym.isLazy) ValDef(sym, body) else DefDef(sym, body) - def applyTypeInternal(tags: List[TT[_]]): Type = { - val symbols = tags map compilerSymbolFromTag - val container :: args = symbols - val tparams = container.typeConstructor.typeParams + // def applyTypeInternal(tags: List[TT[_]]): Type = { + // val symbols = tags map compilerSymbolFromTag + // val container :: args = symbols + // val tparams = container.typeConstructor.typeParams - // Conservative at present - if manifests were more usable this could do a lot more. - // [Eugene to Paul] all right, they are now. what do you have in mind? - require(symbols forall (_ ne NoSymbol), "Must find all tags: " + symbols) - require(container.owner.isPackageClass, "Container must be a top-level class in a package: " + container) - require(tparams.size == args.size, "Arguments must match type constructor arity: " + tparams + ", " + args) + // // Conservative at present - if manifests were more usable this could do a lot more. + // // [Eugene to Paul] all right, they are now. what do you have in mind? + // require(symbols forall (_ ne NoSymbol), "Must find all tags: " + symbols) + // require(container.owner.isPackageClass, "Container must be a top-level class in a package: " + container) + // require(tparams.size == args.size, "Arguments must match type constructor arity: " + tparams + ", " + args) - appliedType(container, args map (_.tpe): _*) - } + // appliedType(container, args map (_.tpe): _*) + // } - def companionType[T](implicit ct: CT[T]) = - rootMirror.getRequiredModule(ct.runtimeClass.getName).tpe + // def companionType[T](implicit ct: CT[T]) = + // rootMirror.getRequiredModule(ct.runtimeClass.getName).tpe // Use these like `applyType[List, Int]` or `applyType[Map, Int, String]` - def applyType[CC](implicit t1: TT[CC]): Type = - applyTypeInternal(List(t1)) + // def applyType[CC](implicit t1: TT[CC]): Type = + // applyTypeInternal(List(t1)) - def applyType[CC[X1], X1](implicit t1: TT[CC[_]], t2: TT[X1]): Type = - applyTypeInternal(List(t1, t2)) + // def applyType[CC[X1], X1](implicit t1: TT[CC[_]], t2: TT[X1]): Type = + // applyTypeInternal(List(t1, t2)) - def applyType[CC[X1, X2], X1, X2](implicit t1: TT[CC[_,_]], t2: TT[X1], t3: TT[X2]): Type = - applyTypeInternal(List(t1, t2, t3)) + // def applyType[CC[X1, X2], X1, X2](implicit t1: TT[CC[_,_]], t2: TT[X1], t3: TT[X2]): Type = + // applyTypeInternal(List(t1, t2, t3)) - def applyType[CC[X1, X2, X3], X1, X2, X3](implicit t1: TT[CC[_,_,_]], t2: TT[X1], t3: TT[X2], t4: TT[X3]): Type = - applyTypeInternal(List(t1, t2, t3, t4)) + // def applyType[CC[X1, X2, X3], X1, X2, X3](implicit t1: TT[CC[_,_,_]], t2: TT[X1], t3: TT[X2], t4: TT[X3]): Type = + // applyTypeInternal(List(t1, t2, t3, t4)) - def newMethodType[F](owner: Symbol)(implicit t: TT[F]): Type = { - val fnSymbol = compilerSymbolFromTag(t) - val formals = compilerTypeFromTag(t).typeArguments - assert(fnSymbol isSubClass FunctionClass(formals.size - 1), (owner, t)) - val params = owner newSyntheticValueParams formals - MethodType(params, formals.last) - } + // def newMethodType[F](owner: Symbol)(implicit t: TT[F]): Type = { + // val fnSymbol = compilerSymbolFromTag(t) + // val formals = compilerTypeFromTag(t).typeArguments + // assert(fnSymbol isSubClass FunctionClass(formals.size - 1), (owner, t)) + // val params = owner newSyntheticValueParams formals + // MethodType(params, formals.last) + // } /** The annotations amongst those found on the original symbol which * should be propagated to this kind of accessor. @@ -118,8 +118,8 @@ trait MethodSynthesis { finishMethod(clazz.info.decls enter m, f) } - private def cloneInternal(original: Symbol, f: Symbol => Tree): Tree = - cloneInternal(original, f, original.name) + // private def cloneInternal(original: Symbol, f: Symbol => Tree): Tree = + // cloneInternal(original, f, original.name) def clazzMember(name: Name) = clazz.info nonPrivateMember name def typeInClazz(sym: Symbol) = clazz.thisType memberType sym @@ -128,11 +128,11 @@ trait MethodSynthesis { * the same type as `name` in clazz, and returns the tree to be * added to the template. */ - def overrideMethod(name: Name)(f: Symbol => Tree): Tree = - overrideMethod(clazzMember(name))(f) + // def overrideMethod(name: Name)(f: Symbol => Tree): Tree = + // overrideMethod(clazzMember(name))(f) - def overrideMethod(original: Symbol)(f: Symbol => Tree): Tree = - cloneInternal(original, sym => f(sym setFlag OVERRIDE)) + // def overrideMethod(original: Symbol)(f: Symbol => Tree): Tree = + // cloneInternal(original, sym => f(sym setFlag OVERRIDE)) def deriveMethod(original: Symbol, nameFn: Name => Name)(f: Symbol => Tree): Tree = cloneInternal(original, f, nameFn(original.name)) @@ -311,7 +311,7 @@ trait MethodSynthesis { // Final methods to make the rest easier to reason about. final def mods = tree.mods final def basisSym = tree.symbol - final def derivedFlags: Long = basisSym.flags & flagsMask | flagsExtra + // final def derivedFlags: Long = basisSym.flags & flagsMask | flagsExtra } trait DerivedFromClassDef extends DerivedFromMemberDef { diff --git a/src/compiler/scala/tools/nsc/typechecker/Namers.scala b/src/compiler/scala/tools/nsc/typechecker/Namers.scala index 28bed0f1bf..407749f833 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Namers.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Namers.scala @@ -1388,11 +1388,11 @@ trait Namers extends MethodSynthesis { tpe } - def ensureParent(clazz: Symbol, parent: Symbol) = { - val info0 = clazz.info - val info1 = includeParent(info0, parent) - if (info0 ne info1) clazz setInfo info1 - } + // def ensureParent(clazz: Symbol, parent: Symbol) = { + // val info0 = clazz.info + // val info1 = includeParent(info0, parent) + // if (info0 ne info1) clazz setInfo info1 + // } class LogTransitions[S](onEnter: S => String, onExit: S => String) { val enabled = settings.debug.value @@ -1585,12 +1585,12 @@ trait Namers extends MethodSynthesis { } } - @deprecated("Use underlyingSymbol instead", "2.10.0") - def underlying(member: Symbol): Symbol = underlyingSymbol(member) - @deprecated("Use `companionSymbolOf` instead", "2.10.0") - def companionClassOf(module: Symbol, ctx: Context): Symbol = companionSymbolOf(module, ctx) - @deprecated("Use `companionSymbolOf` instead", "2.10.0") - def companionModuleOf(clazz: Symbol, ctx: Context): Symbol = companionSymbolOf(clazz, ctx) + // @deprecated("Use underlyingSymbol instead", "2.10.0") + // def underlying(member: Symbol): Symbol = underlyingSymbol(member) + // @deprecated("Use `companionSymbolOf` instead", "2.10.0") + // def companionClassOf(module: Symbol, ctx: Context): Symbol = companionSymbolOf(module, ctx) + // @deprecated("Use `companionSymbolOf` instead", "2.10.0") + // def companionModuleOf(clazz: Symbol, ctx: Context): Symbol = companionSymbolOf(clazz, ctx) /** The companion class or companion module of `original`. * Calling .companionModule does not work for classes defined inside methods. diff --git a/src/compiler/scala/tools/nsc/typechecker/NamesDefaults.scala b/src/compiler/scala/tools/nsc/typechecker/NamesDefaults.scala index 252a738755..1588380bca 100644 --- a/src/compiler/scala/tools/nsc/typechecker/NamesDefaults.scala +++ b/src/compiler/scala/tools/nsc/typechecker/NamesDefaults.scala @@ -41,7 +41,7 @@ trait NamesDefaults { self: Analyzer => blockTyper: Typer ) { } - val noApplyInfo = NamedApplyInfo(None, Nil, Nil, null) + // val noApplyInfo = NamedApplyInfo(None, Nil, Nil, null) def nameOf(arg: Tree) = arg match { case AssignOrNamedArg(Ident(name), rhs) => Some(name) diff --git a/src/compiler/scala/tools/nsc/typechecker/PatternMatching.scala b/src/compiler/scala/tools/nsc/typechecker/PatternMatching.scala index 7cb420d2dc..27e539abbf 100644 --- a/src/compiler/scala/tools/nsc/typechecker/PatternMatching.scala +++ b/src/compiler/scala/tools/nsc/typechecker/PatternMatching.scala @@ -551,50 +551,50 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL // NOTE: it's an apply, not a select, since in general an extractor call may have multiple argument lists (including an implicit one) // that we need to preserve, so we supply the scrutinee as Ident(nme.SELECTOR_DUMMY), // and replace that dummy by a reference to the actual binder in translateExtractorPattern - def fromCaseClassUnapply(fun: Tree, args: List[Tree]): Option[ExtractorCall] = { - // TODO: can we rework the typer so we don't have to do all this twice? - // undo rewrite performed in (5) of adapt - val orig = fun match {case tpt: TypeTree => tpt.original case _ => fun} - val origSym = orig.symbol - val extractor = unapplyMember(origSym.filter(sym => reallyExists(unapplyMember(sym.tpe))).tpe) - - if((fun.tpe eq null) || fun.tpe.isError || (extractor eq NoSymbol)) { - None - } else { - // this is a tricky balance: pos/t602.scala, pos/sudoku.scala, run/virtpatmat_alts.scala must all be happy - // bypass typing at own risk: val extractorCall = Select(orig, extractor) setType caseClassApplyToUnapplyTp(fun.tpe) - // can't always infer type arguments (pos/t602): - /* case class Span[K <: Ordered[K]](low: Option[K]) { - override def equals(x: Any): Boolean = x match { - case Span((low0 @ _)) if low0 equals low => true - } - }*/ - // so... leave undetermined type params floating around if we have to - // (if we don't infer types, uninstantiated type params show up later: pos/sudoku.scala) - // (see also run/virtpatmat_alts.scala) - val savedUndets = context.undetparams - val extractorCall = try { - context.undetparams = Nil - silent(_.typed(Apply(Select(orig, extractor), List(Ident(nme.SELECTOR_DUMMY) setType fun.tpe.finalResultType)), EXPRmode, WildcardType), reportAmbiguousErrors = false) match { - case SilentResultValue(extractorCall) => extractorCall // if !extractorCall.containsError() - case _ => - // this fails to resolve overloading properly... - // Apply(typedOperator(Select(orig, extractor)), List(Ident(nme.SELECTOR_DUMMY))) // no need to set the type of the dummy arg, it will be replaced anyway - - // patmatDebug("funtpe after = "+ fun.tpe.finalResultType) - // patmatDebug("orig: "+(orig, orig.tpe)) - val tgt = typed(orig, EXPRmode | QUALmode | POLYmode, HasMember(extractor.name)) // can't specify fun.tpe.finalResultType as the type for the extractor's arg, - // as it may have been inferred incorrectly (see t602, where it's com.mosol.sl.Span[Any], instead of com.mosol.sl.Span[?K]) - // patmatDebug("tgt = "+ (tgt, tgt.tpe)) - val oper = typed(Select(tgt, extractor.name), EXPRmode | FUNmode | POLYmode | TAPPmode, WildcardType) - // patmatDebug("oper: "+ (oper, oper.tpe)) - Apply(oper, List(Ident(nme.SELECTOR_DUMMY))) // no need to set the type of the dummy arg, it will be replaced anyway - } - } finally context.undetparams = savedUndets - - Some(this(extractorCall, args)) // TODO: simplify spliceApply? - } - } + // def fromCaseClassUnapply(fun: Tree, args: List[Tree]): Option[ExtractorCall] = { + // // TODO: can we rework the typer so we don't have to do all this twice? + // // undo rewrite performed in (5) of adapt + // val orig = fun match {case tpt: TypeTree => tpt.original case _ => fun} + // val origSym = orig.symbol + // val extractor = unapplyMember(origSym.filter(sym => reallyExists(unapplyMember(sym.tpe))).tpe) + + // if((fun.tpe eq null) || fun.tpe.isError || (extractor eq NoSymbol)) { + // None + // } else { + // // this is a tricky balance: pos/t602.scala, pos/sudoku.scala, run/virtpatmat_alts.scala must all be happy + // // bypass typing at own risk: val extractorCall = Select(orig, extractor) setType caseClassApplyToUnapplyTp(fun.tpe) + // // can't always infer type arguments (pos/t602): + // /* case class Span[K <: Ordered[K]](low: Option[K]) { + // override def equals(x: Any): Boolean = x match { + // case Span((low0 @ _)) if low0 equals low => true + // } + // }*/ + // // so... leave undetermined type params floating around if we have to + // // (if we don't infer types, uninstantiated type params show up later: pos/sudoku.scala) + // // (see also run/virtpatmat_alts.scala) + // val savedUndets = context.undetparams + // val extractorCall = try { + // context.undetparams = Nil + // silent(_.typed(Apply(Select(orig, extractor), List(Ident(nme.SELECTOR_DUMMY) setType fun.tpe.finalResultType)), EXPRmode, WildcardType), reportAmbiguousErrors = false) match { + // case SilentResultValue(extractorCall) => extractorCall // if !extractorCall.containsError() + // case _ => + // // this fails to resolve overloading properly... + // // Apply(typedOperator(Select(orig, extractor)), List(Ident(nme.SELECTOR_DUMMY))) // no need to set the type of the dummy arg, it will be replaced anyway + + // // patmatDebug("funtpe after = "+ fun.tpe.finalResultType) + // // patmatDebug("orig: "+(orig, orig.tpe)) + // val tgt = typed(orig, EXPRmode | QUALmode | POLYmode, HasMember(extractor.name)) // can't specify fun.tpe.finalResultType as the type for the extractor's arg, + // // as it may have been inferred incorrectly (see t602, where it's com.mosol.sl.Span[Any], instead of com.mosol.sl.Span[?K]) + // // patmatDebug("tgt = "+ (tgt, tgt.tpe)) + // val oper = typed(Select(tgt, extractor.name), EXPRmode | FUNmode | POLYmode | TAPPmode, WildcardType) + // // patmatDebug("oper: "+ (oper, oper.tpe)) + // Apply(oper, List(Ident(nme.SELECTOR_DUMMY))) // no need to set the type of the dummy arg, it will be replaced anyway + // } + // } finally context.undetparams = savedUndets + + // Some(this(extractorCall, args)) // TODO: simplify spliceApply? + // } + // } } abstract class ExtractorCall(val args: List[Tree]) { @@ -1413,10 +1413,10 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL // local / context-free def _asInstanceOf(b: Symbol, tp: Type): Tree - def _asInstanceOf(t: Tree, tp: Type): Tree + // def _asInstanceOf(t: Tree, tp: Type): Tree def _equals(checker: Tree, binder: Symbol): Tree def _isInstanceOf(b: Symbol, tp: Type): Tree - def and(a: Tree, b: Tree): Tree + // def and(a: Tree, b: Tree): Tree def drop(tgt: Tree)(n: Int): Tree def index(tgt: Tree)(i: Int): Tree def mkZero(tp: Type): Tree @@ -1458,12 +1458,12 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL abstract class CommonCodegen extends AbsCodegen { import CODE._ def fun(arg: Symbol, body: Tree): Tree = Function(List(ValDef(arg)), body) - def genTypeApply(tfun: Tree, args: Type*): Tree = if(args contains NoType) tfun else TypeApply(tfun, args.toList map TypeTree) + // def genTypeApply(tfun: Tree, args: Type*): Tree = if(args contains NoType) tfun else TypeApply(tfun, args.toList map TypeTree) def tupleSel(binder: Symbol)(i: Int): Tree = (REF(binder) DOT nme.productAccessorName(i)) // make tree that accesses the i'th component of the tuple referenced by binder def index(tgt: Tree)(i: Int): Tree = tgt APPLY (LIT(i)) def drop(tgt: Tree)(n: Int): Tree = (tgt DOT vpmName.drop) (LIT(n)) def _equals(checker: Tree, binder: Symbol): Tree = checker MEMBER_== REF(binder) // NOTE: checker must be the target of the ==, that's the patmat semantics for ya - def and(a: Tree, b: Tree): Tree = a AND b + // def and(a: Tree, b: Tree): Tree = a AND b // drop annotations generated by CPS plugin etc, since its annotationchecker rejects T @cps[U] <: Any // let's assume for now annotations don't affect casts, drop them there, and bring them back using the outer Typed tree @@ -1471,7 +1471,7 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL Typed(gen.mkAsInstanceOf(t, tp.withoutAnnotations, true, false), TypeTree() setType tp) // the force is needed mainly to deal with the GADT typing hack (we can't detect it otherwise as tp nor pt need contain an abstract type, we're just casting wildly) - def _asInstanceOf(t: Tree, tp: Type): Tree = if (t.tpe != NoType && t.isTyped && typesConform(t.tpe, tp)) t else mkCast(t, tp) + // def _asInstanceOf(t: Tree, tp: Type): Tree = if (t.tpe != NoType && t.isTyped && typesConform(t.tpe, tp)) t else mkCast(t, tp) def _asInstanceOf(b: Symbol, tp: Type): Tree = if (typesConform(b.info, tp)) REF(b) else mkCast(REF(b), tp) def _isInstanceOf(b: Symbol, tp: Type): Tree = gen.mkIsInstanceOf(REF(b), tp.withoutAnnotations, true, false) // if (typesConform(b.info, tpX)) { patmatDebug("warning: emitted spurious isInstanceOf: "+(b, tp)); TRUE } @@ -2879,7 +2879,7 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL v +"(="+ v.path +": "+ v.staticTpCheckable +") "+ assignment }.mkString("\n") - def modelString(model: Model) = varAssignmentString(modelToVarAssignment(model)) + // def modelString(model: Model) = varAssignmentString(modelToVarAssignment(model)) // return constructor call when the model is a true counter example // (the variables don't take into account type information derived from other variables, @@ -3538,7 +3538,7 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL // for the catch-cases in a try/catch private object typeSwitchMaker extends SwitchMaker { val unchecked = false - def switchableTpe(tp: Type) = true + // def switchableTpe(tp: Type) = true val alternativesSupported = false // TODO: needs either back-end support of flattening of alternatives during typers val canJump = false diff --git a/src/compiler/scala/tools/nsc/typechecker/TreeCheckers.scala b/src/compiler/scala/tools/nsc/typechecker/TreeCheckers.scala index 710adf5a9c..f1e6e48ccc 100644 --- a/src/compiler/scala/tools/nsc/typechecker/TreeCheckers.scala +++ b/src/compiler/scala/tools/nsc/typechecker/TreeCheckers.scala @@ -142,13 +142,13 @@ abstract class TreeCheckers extends Analyzer { currentRun.units foreach (x => wrap(x)(check(x))) } - def printingTypings[T](body: => T): T = { - val saved = global.printTypings - global.printTypings = true - val result = body - global.printTypings = saved - result - } + // def printingTypings[T](body: => T): T = { + // val saved = global.printTypings + // global.printTypings = true + // val result = body + // global.printTypings = saved + // result + // } def runWithUnit[T](unit: CompilationUnit)(body: => Unit): Unit = { val unit0 = currentUnit currentRun.currentUnit = unit diff --git a/src/compiler/scala/tools/nsc/typechecker/TypeDiagnostics.scala b/src/compiler/scala/tools/nsc/typechecker/TypeDiagnostics.scala index 4f5291507e..ebeb8ef2c8 100644 --- a/src/compiler/scala/tools/nsc/typechecker/TypeDiagnostics.scala +++ b/src/compiler/scala/tools/nsc/typechecker/TypeDiagnostics.scala @@ -41,9 +41,9 @@ trait TypeDiagnostics { * nicer if Symbols, Types, and Trees all implemented some common interface * in which isErroneous and similar would be placed. */ - def noErroneousTypes(tps: Type*) = tps forall (x => !x.isErroneous) - def noErroneousSyms(syms: Symbol*) = syms forall (x => !x.isErroneous) - def noErroneousTrees(trees: Tree*) = trees forall (x => !x.isErroneous) + // def noErroneousTypes(tps: Type*) = tps forall (x => !x.isErroneous) + // def noErroneousSyms(syms: Symbol*) = syms forall (x => !x.isErroneous) + // def noErroneousTrees(trees: Tree*) = trees forall (x => !x.isErroneous) /** For errors which are artifacts of the implementation: such messages * indicate that the restriction may be lifted in the future. @@ -294,7 +294,7 @@ trait TypeDiagnostics { // distinguished from the other types in the same error message private val savedName = sym.name def restoreName() = sym.name = savedName - def isAltered = sym.name != savedName + // def isAltered = sym.name != savedName def modifyName(f: String => String) = sym setName newTypeName(f(sym.name.toString)) /** Prepend java.lang, scala., or Predef. if this type originated @@ -478,10 +478,10 @@ trait TypeDiagnostics { } super.traverse(t) } - def isUnused(t: Tree): Boolean = ( - if (t.symbol.isTerm) isUnusedTerm(t.symbol) - else isUnusedType(t.symbol) - ) + // def isUnused(t: Tree): Boolean = ( + // if (t.symbol.isTerm) isUnusedTerm(t.symbol) + // else isUnusedType(t.symbol) + // ) def isUnusedType(m: Symbol): Boolean = ( m.isType && !m.isTypeParameterOrSkolem // would be nice to improve this diff --git a/src/compiler/scala/tools/nsc/typechecker/Typers.scala b/src/compiler/scala/tools/nsc/typechecker/Typers.scala index 0a0ab53852..0a295febf3 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Typers.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Typers.scala @@ -394,8 +394,8 @@ trait Typers extends Modes with Adaptations with Tags { * @param tree ... * @return ... */ - def locals[T <: Tree](scope: Scope, pt: Type, tree: T): T = - check(NoSymbol, scope, pt, tree) + // def locals[T <: Tree](scope: Scope, pt: Type, tree: T): T = + // check(NoSymbol, scope, pt, tree) private def check[T <: Tree](owner: Symbol, scope: Scope, pt: Type, tree: T): T = { this.owner = owner @@ -5343,7 +5343,7 @@ trait Typers extends Modes with Adaptations with Tags { def typedHigherKindedType(tree: Tree, mode: Int): Tree = typed(tree, HKmode, WildcardType) - def typedHigherKindedType(tree: Tree): Tree = typedHigherKindedType(tree, NOmode) + // def typedHigherKindedType(tree: Tree): Tree = typedHigherKindedType(tree, NOmode) /** Types a type constructor tree used in a new or supertype */ def typedTypeConstructor(tree: Tree, mode: Int): Tree = { @@ -5431,16 +5431,16 @@ trait Typers extends Modes with Adaptations with Tags { object TypersStats { import scala.reflect.internal.TypesStats._ - import scala.reflect.internal.BaseTypeSeqsStats._ + // import scala.reflect.internal.BaseTypeSeqsStats._ val typedIdentCount = Statistics.newCounter("#typechecked identifiers") val typedSelectCount = Statistics.newCounter("#typechecked selections") val typedApplyCount = Statistics.newCounter("#typechecked applications") val rawTypeFailed = Statistics.newSubCounter (" of which in failed", rawTypeCount) val subtypeFailed = Statistics.newSubCounter(" of which in failed", subtypeCount) val findMemberFailed = Statistics.newSubCounter(" of which in failed", findMemberCount) - val compoundBaseTypeSeqCount = Statistics.newSubCounter(" of which for compound types", baseTypeSeqCount) - val typerefBaseTypeSeqCount = Statistics.newSubCounter(" of which for typerefs", baseTypeSeqCount) - val singletonBaseTypeSeqCount = Statistics.newSubCounter(" of which for singletons", baseTypeSeqCount) + // val compoundBaseTypeSeqCount = Statistics.newSubCounter(" of which for compound types", baseTypeSeqCount) + // val typerefBaseTypeSeqCount = Statistics.newSubCounter(" of which for typerefs", baseTypeSeqCount) + // val singletonBaseTypeSeqCount = Statistics.newSubCounter(" of which for singletons", baseTypeSeqCount) val failedSilentNanos = Statistics.newSubTimer("time spent in failed", typerNanos) val failedApplyNanos = Statistics.newSubTimer(" failed apply", typerNanos) val failedOpEqNanos = Statistics.newSubTimer(" failed op=", typerNanos) diff --git a/src/compiler/scala/tools/nsc/typechecker/Unapplies.scala b/src/compiler/scala/tools/nsc/typechecker/Unapplies.scala index bf44b65406..094b32673c 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Unapplies.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Unapplies.scala @@ -51,11 +51,11 @@ trait Unapplies extends ast.TreeDSL * for n == 1, Some[T0] * else Some[Product[Ti]] */ - def unapplyReturnTypeExpected(argsLength: Int) = argsLength match { - case 0 => BooleanClass.tpe - case 1 => optionType(WildcardType) - case n => optionType(productType((List fill n)(WildcardType))) - } + // def unapplyReturnTypeExpected(argsLength: Int) = argsLength match { + // case 0 => BooleanClass.tpe + // case 1 => optionType(WildcardType) + // case n => optionType(productType((List fill n)(WildcardType))) + // } /** returns unapply or unapplySeq if available */ def unapplyMember(tp: Type): Symbol = (tp member nme.unapply) match { diff --git a/src/compiler/scala/tools/nsc/util/ClassPath.scala b/src/compiler/scala/tools/nsc/util/ClassPath.scala index 0c49b9b8e7..f82c504eb4 100644 --- a/src/compiler/scala/tools/nsc/util/ClassPath.scala +++ b/src/compiler/scala/tools/nsc/util/ClassPath.scala @@ -50,20 +50,20 @@ object ClassPath { def map(cp: String, f: String => String): String = join(split(cp) map f: _*) /** Split the classpath, filter according to predicate, and reassemble. */ - def filter(cp: String, p: String => Boolean): String = join(split(cp) filter p: _*) + // def filter(cp: String, p: String => Boolean): String = join(split(cp) filter p: _*) /** Split the classpath and map them into Paths */ - def toPaths(cp: String): List[Path] = split(cp) map (x => Path(x).toAbsolute) + // def toPaths(cp: String): List[Path] = split(cp) map (x => Path(x).toAbsolute) /** Make all classpath components absolute. */ - def makeAbsolute(cp: String): String = fromPaths(toPaths(cp): _*) + // def makeAbsolute(cp: String): String = fromPaths(toPaths(cp): _*) /** Join the paths as a classpath */ - def fromPaths(paths: Path*): String = join(paths map (_.path): _*) - def fromURLs(urls: URL*): String = fromPaths(urls map (x => Path(x.getPath)) : _*) + // def fromPaths(paths: Path*): String = join(paths map (_.path): _*) + // def fromURLs(urls: URL*): String = fromPaths(urls map (x => Path(x.getPath)) : _*) /** Split the classpath and map them into URLs */ - def toURLs(cp: String): List[URL] = toPaths(cp) map (_.toURL) + // def toURLs(cp: String): List[URL] = toPaths(cp) map (_.toURL) /** Expand path and possibly expanding stars */ def expandPath(path: String, expandStar: Boolean = true): List[String] = @@ -124,12 +124,12 @@ object ClassPath { for (dir <- expandPath(path, false) ; name <- expandDir(dir) ; entry <- Option(AbstractFile getDirectory name)) yield newClassPath(entry) - def classesAtAllURLS(path: String): List[ClassPath[T]] = - (path split " ").toList flatMap classesAtURL + // def classesAtAllURLS(path: String): List[ClassPath[T]] = + // (path split " ").toList flatMap classesAtURL - def classesAtURL(spec: String) = - for (url <- specToURL(spec).toList ; location <- Option(AbstractFile getURL url)) yield - newClassPath(location) + // def classesAtURL(spec: String) = + // for (url <- specToURL(spec).toList ; location <- Option(AbstractFile getURL url)) yield + // newClassPath(location) def classesInExpandedPath(path: String): IndexedSeq[ClassPath[T]] = classesInPathImpl(path, true).toIndexedSeq @@ -400,14 +400,14 @@ class JavaClassPath( context: JavaContext) extends MergedClassPath[AbstractFile](containers, context) { } -object JavaClassPath { - def fromURLs(urls: Seq[URL], context: JavaContext): JavaClassPath = { - val containers = { - for (url <- urls ; f = AbstractFile getURL url ; if f != null) yield - new DirectoryClassPath(f, context) - } - new JavaClassPath(containers.toIndexedSeq, context) - } - def fromURLs(urls: Seq[URL]): JavaClassPath = - fromURLs(urls, ClassPath.DefaultJavaContext) -} +// object JavaClassPath { +// def fromURLs(urls: Seq[URL], context: JavaContext): JavaClassPath = { +// val containers = { +// for (url <- urls ; f = AbstractFile getURL url ; if f != null) yield +// new DirectoryClassPath(f, context) +// } +// new JavaClassPath(containers.toIndexedSeq, context) +// } +// def fromURLs(urls: Seq[URL]): JavaClassPath = +// fromURLs(urls, ClassPath.DefaultJavaContext) +// } diff --git a/src/compiler/scala/tools/nsc/util/CommandLineParser.scala b/src/compiler/scala/tools/nsc/util/CommandLineParser.scala index 81c1b1d37a..2baab177b8 100644 --- a/src/compiler/scala/tools/nsc/util/CommandLineParser.scala +++ b/src/compiler/scala/tools/nsc/util/CommandLineParser.scala @@ -21,7 +21,7 @@ import scala.collection.mutable.ListBuffer trait ParserUtil extends Parsers { protected implicit class ParserPlus[+T](underlying: Parser[T]) { def !~>[U](p: => Parser[U]): Parser[U] = (underlying ~! p) ^^ { case a~b => b } - def <~![U](p: => Parser[U]): Parser[T] = (underlying ~! p) ^^ { case a~b => a } + // def <~![U](p: => Parser[U]): Parser[T] = (underlying ~! p) ^^ { case a~b => a } } } @@ -37,7 +37,7 @@ case class CommandLine( def withUnaryArgs(xs: List[String]) = copy(unaryArguments = xs) def withBinaryArgs(xs: List[String]) = copy(binaryArguments = xs) - def originalArgs = args + // def originalArgs = args def assumeBinary = true def enforceArity = true def onlyKnownOptions = false @@ -105,7 +105,7 @@ case class CommandLine( def isSet(arg: String) = args contains arg def get(arg: String) = argMap get arg - def getOrElse(arg: String, orElse: => String) = if (isSet(arg)) apply(arg) else orElse + // def getOrElse(arg: String, orElse: => String) = if (isSet(arg)) apply(arg) else orElse def apply(arg: String) = argMap(arg) override def toString() = "CommandLine(\n%s)\n" format (args map (" " + _ + "\n") mkString) @@ -115,7 +115,7 @@ object CommandLineParser extends RegexParsers with ParserUtil { override def skipWhitespace = false def elemExcept(xs: Elem*): Parser[Elem] = elem("elemExcept", x => x != EofCh && !(xs contains x)) - def elemOf(xs: Elem*): Parser[Elem] = elem("elemOf", xs contains _) + // def elemOf(xs: Elem*): Parser[Elem] = elem("elemOf", xs contains _) def escaped(ch: Char): Parser[String] = "\\" + ch def mkQuoted(ch: Char): Parser[String] = ( elem(ch) !~> rep(escaped(ch) | elemExcept(ch)) <~ ch ^^ (_.mkString) diff --git a/src/compiler/scala/tools/nsc/util/JavaCharArrayReader.scala b/src/compiler/scala/tools/nsc/util/JavaCharArrayReader.scala index b7ed7903bc..a056a97a7c 100644 --- a/src/compiler/scala/tools/nsc/util/JavaCharArrayReader.scala +++ b/src/compiler/scala/tools/nsc/util/JavaCharArrayReader.scala @@ -17,46 +17,43 @@ class JavaCharArrayReader(buf: IndexedSeq[Char], start: Int, /* startline: int, /** produce a duplicate of this char array reader which starts reading * at current position, independent of what happens to original reader */ - def dup: JavaCharArrayReader = clone().asInstanceOf[JavaCharArrayReader] + // def dup: JavaCharArrayReader = clone().asInstanceOf[JavaCharArrayReader] /** layout constant */ - val tabinc = 8 + // val tabinc = 8 /** the line and column position of the current character */ var ch: Char = _ var bp = start - var oldBp = -1 - var oldCh: Char = _ + // var oldBp = -1 + // var oldCh: Char = _ //private var cline: Int = _ //private var ccol: Int = _ def cpos = bp var isUnicode: Boolean = _ - var lastLineStartPos: Int = 0 - var lineStartPos: Int = 0 - var lastBlankLinePos: Int = 0 + // var lastLineStartPos: Int = 0 + // var lineStartPos: Int = 0 + // var lastBlankLinePos: Int = 0 - private var onlyBlankChars = false + // private var onlyBlankChars = false //private var nextline = startline //private var nextcol = startcol private def markNewLine() { - lastLineStartPos = lineStartPos - if (onlyBlankChars) lastBlankLinePos = lineStartPos - lineStartPos = bp - onlyBlankChars = true + // lastLineStartPos = lineStartPos + // if (onlyBlankChars) lastBlankLinePos = lineStartPos + // lineStartPos = bp + // onlyBlankChars = true //nextline += 1 //nextcol = 1 } - def hasNext: Boolean = if (bp < buf.length) true - else { - false - } + def hasNext = bp < buf.length - def last: Char = if (bp > start + 2) buf(bp - 2) else ' ' // XML literals + // def last: Char = if (bp > start + 2) buf(bp - 2) else ' ' // XML literals def next(): Char = { //cline = nextline @@ -66,8 +63,8 @@ class JavaCharArrayReader(buf: IndexedSeq[Char], start: Int, /* startline: int, ch = SU return SU // there is an endless stream of SU's at the end } - oldBp = bp - oldCh = ch + // oldBp = bp + // oldCh = ch ch = buf(bp) isUnicode = false bp = bp + 1 @@ -104,19 +101,19 @@ class JavaCharArrayReader(buf: IndexedSeq[Char], start: Int, /* startline: int, isUnicode = true } case _ => - if (ch > ' ') onlyBlankChars = false + // if (ch > ' ') onlyBlankChars = false // nextcol += 1 } ch } - def rewind() { - if (oldBp == -1) throw new IllegalArgumentException - bp = oldBp - ch = oldCh - oldBp = -1 - oldCh = 'x' - } + // def rewind() { + // if (oldBp == -1) throw new IllegalArgumentException + // bp = oldBp + // ch = oldCh + // oldBp = -1 + // oldCh = 'x' + // } def copy: JavaCharArrayReader = new JavaCharArrayReader(buf, bp, /* nextcol, nextline, */ decodeUni, error) diff --git a/src/compiler/scala/tools/nsc/util/ScalaClassLoader.scala b/src/compiler/scala/tools/nsc/util/ScalaClassLoader.scala index a2994966fd..f09787ec4f 100644 --- a/src/compiler/scala/tools/nsc/util/ScalaClassLoader.scala +++ b/src/compiler/scala/tools/nsc/util/ScalaClassLoader.scala @@ -46,8 +46,8 @@ trait ScalaClassLoader extends JClassLoader { def create(path: String): AnyRef = tryToInitializeClass[AnyRef](path) map (_.newInstance()) orNull - def constructorsOf[T <: AnyRef : ClassTag]: List[Constructor[T]] = - classTag[T].runtimeClass.getConstructors.toList map (_.asInstanceOf[Constructor[T]]) + // def constructorsOf[T <: AnyRef : ClassTag]: List[Constructor[T]] = + // classTag[T].runtimeClass.getConstructors.toList map (_.asInstanceOf[Constructor[T]]) /** The actual bytes for a class file, or an empty array if it can't be found. */ def classBytes(className: String): Array[Byte] = classAsStream(className) match { @@ -75,10 +75,10 @@ trait ScalaClassLoader extends JClassLoader { /** A list comprised of this classloader followed by all its * (non-null) parent classloaders, if any. */ - def loaderChain: List[ScalaClassLoader] = this :: (getParent match { - case null => Nil - case p => p.loaderChain - }) + // def loaderChain: List[ScalaClassLoader] = this :: (getParent match { + // case null => Nil + // case p => p.loaderChain + // }) } /** Methods for obtaining various classloaders. @@ -99,35 +99,35 @@ object ScalaClassLoader { } def contextLoader = apply(Thread.currentThread.getContextClassLoader) def appLoader = apply(JClassLoader.getSystemClassLoader) - def extLoader = apply(appLoader.getParent) - def bootLoader = apply(null) - def contextChain = loaderChain(contextLoader) + // def extLoader = apply(appLoader.getParent) + // def bootLoader = apply(null) + // def contextChain = loaderChain(contextLoader) - def pathToErasure[T: ClassTag] = pathToClass(classTag[T].runtimeClass) - def pathToClass(clazz: Class[_]) = clazz.getName.replace('.', JFile.separatorChar) + ".class" - def locate[T: ClassTag] = contextLoader getResource pathToErasure[T] + // def pathToErasure[T: ClassTag] = pathToClass(classTag[T].runtimeClass) + // def pathToClass(clazz: Class[_]) = clazz.getName.replace('.', JFile.separatorChar) + ".class" + // def locate[T: ClassTag] = contextLoader getResource pathToErasure[T] /** Tries to guess the classpath by type matching the context classloader * and its parents, looking for any classloaders which will reveal their * classpath elements as urls. It it can't find any, creates a classpath * from the supplied string. */ - def guessClassPathString(default: String = ""): String = { - val classpathURLs = contextChain flatMap { - case x: HasClassPath => x.classPathURLs - case x: JURLClassLoader => x.getURLs.toSeq - case _ => Nil - } - if (classpathURLs.isEmpty) default - else JavaClassPath.fromURLs(classpathURLs).asClasspathString - } - - def loaderChain(head: JClassLoader) = { - def loop(cl: JClassLoader): List[JClassLoader] = - if (cl == null) Nil else cl :: loop(cl.getParent) - - loop(head) - } + // def guessClassPathString(default: String = ""): String = { + // val classpathURLs = contextChain flatMap { + // case x: HasClassPath => x.classPathURLs + // case x: JURLClassLoader => x.getURLs.toSeq + // case _ => Nil + // } + // if (classpathURLs.isEmpty) default + // else JavaClassPath.fromURLs(classpathURLs).asClasspathString + // } + + // def loaderChain(head: JClassLoader) = { + // def loop(cl: JClassLoader): List[JClassLoader] = + // if (cl == null) Nil else cl :: loop(cl.getParent) + + // loop(head) + // } def setContext(cl: JClassLoader) = Thread.currentThread.setContextClassLoader(cl) def savingContextLoader[T](body: => T): T = { @@ -143,14 +143,14 @@ object ScalaClassLoader { private var classloaderURLs: Seq[URL] = urls def classPathURLs: Seq[URL] = classloaderURLs - def classPath: ClassPath[_] = JavaClassPath fromURLs classPathURLs + // def classPath: ClassPath[_] = JavaClassPath fromURLs classPathURLs /** Override to widen to public */ override def addURL(url: URL) = { classloaderURLs :+= url super.addURL(url) } - def toLongString = urls.mkString("URLClassLoader(\n ", "\n ", "\n)\n") + // def toLongString = urls.mkString("URLClassLoader(\n ", "\n ", "\n)\n") } def fromURLs(urls: Seq[URL], parent: ClassLoader = null): URLClassLoader = @@ -161,7 +161,7 @@ object ScalaClassLoader { fromURLs(urls) tryToLoadClass name isDefined /** Finding what jar a clazz or instance came from */ - def origin(x: Any): Option[URL] = originOfClass(x.getClass) + // def origin(x: Any): Option[URL] = originOfClass(x.getClass) def originOfClass(x: Class[_]): Option[URL] = Option(x.getProtectionDomain.getCodeSource) flatMap (x => Option(x.getLocation)) } diff --git a/src/compiler/scala/tools/nsc/util/SimpleTracer.scala b/src/compiler/scala/tools/nsc/util/SimpleTracer.scala index b103ae9cb0..a3b92aa2df 100644 --- a/src/compiler/scala/tools/nsc/util/SimpleTracer.scala +++ b/src/compiler/scala/tools/nsc/util/SimpleTracer.scala @@ -14,6 +14,6 @@ class SimpleTracer(out: PrintStream, enabled: Boolean = true) { if (enabled) out.println(msg+value) value } - def withOutput(out: PrintStream) = new SimpleTracer(out, enabled) + // def withOutput(out: PrintStream) = new SimpleTracer(out, enabled) def when(enabled: Boolean): SimpleTracer = new SimpleTracer(out, enabled) } diff --git a/src/compiler/scala/tools/nsc/util/package.scala b/src/compiler/scala/tools/nsc/util/package.scala index e9dcaa8e16..b40d3094f3 100644 --- a/src/compiler/scala/tools/nsc/util/package.scala +++ b/src/compiler/scala/tools/nsc/util/package.scala @@ -18,15 +18,15 @@ package object util { type HashSet[T >: Null <: AnyRef] = scala.reflect.internal.util.HashSet[T] val HashSet = scala.reflect.internal.util.HashSet - def onull[T](value: T, orElse: => T): T = if (value == null) orElse else value + // def onull[T](value: T, orElse: => T): T = if (value == null) orElse else value /** Apply a function and return the passed value */ def returning[T](x: T)(f: T => Unit): T = { f(x) ; x } /** Frequency counter */ - def freq[T](xs: Traversable[T]): Map[T, Int] = xs groupBy identity mapValues (_.size) + // def freq[T](xs: Traversable[T]): Map[T, Int] = xs groupBy identity mapValues (_.size) - def freqrank[T](xs: Traversable[(T, Int)]): List[(Int, T)] = xs.toList map (_.swap) sortBy (-_._1) + // def freqrank[T](xs: Traversable[(T, Int)]): List[(Int, T)] = xs.toList map (_.swap) sortBy (-_._1) /** Execute code and then wait for all non-daemon Threads * created and begun during its execution to complete. @@ -57,14 +57,14 @@ package object util { /** Given a function and a block of code, evaluates code block, * calls function with milliseconds elapsed, and returns block result. */ - def millisElapsedTo[T](f: Long => Unit)(body: => T): T = { - val start = System.currentTimeMillis - val result = body - val end = System.currentTimeMillis + // def millisElapsedTo[T](f: Long => Unit)(body: => T): T = { + // val start = System.currentTimeMillis + // val result = body + // val end = System.currentTimeMillis - f(end - start) - result - } + // f(end - start) + // result + // } /** Generate a string using a routine that wants to write on a stream. */ def stringFromWriter(writer: PrintWriter => Unit): String = { @@ -96,7 +96,7 @@ package object util { } lazy val trace = new SimpleTracer(System.out) - lazy val errtrace = new SimpleTracer(System.err) + // lazy val errtrace = new SimpleTracer(System.err) @deprecated("Moved to scala.reflect.internal.util.StringOps", "2.10.0") val StringOps = scala.reflect.internal.util.StringOps diff --git a/src/compiler/scala/tools/reflect/ToolBoxFactory.scala b/src/compiler/scala/tools/reflect/ToolBoxFactory.scala index f0c88eadea..4fc3fede16 100644 --- a/src/compiler/scala/tools/reflect/ToolBoxFactory.scala +++ b/src/compiler/scala/tools/reflect/ToolBoxFactory.scala @@ -392,8 +392,8 @@ abstract class ToolBoxFactory[U <: JavaUniverse](val u: U) { factorySelf => uttree } - def showAttributed(tree: u.Tree, printTypes: Boolean = true, printIds: Boolean = true, printKinds: Boolean = false): String = - compiler.showAttributed(importer.importTree(tree), printTypes, printIds, printKinds) + // def showAttributed(tree: u.Tree, printTypes: Boolean = true, printIds: Boolean = true, printKinds: Boolean = false): String = + // compiler.showAttributed(importer.importTree(tree), printTypes, printIds, printKinds) def parse(code: String): u.Tree = { if (compiler.settings.verbose.value) println("parsing "+code) diff --git a/src/compiler/scala/tools/util/Javap.scala b/src/compiler/scala/tools/util/Javap.scala index 4d94581cc1..b80c65caa6 100644 --- a/src/compiler/scala/tools/util/Javap.scala +++ b/src/compiler/scala/tools/util/Javap.scala @@ -107,8 +107,8 @@ object Javap { type FakeEnvironment = AnyRef type FakePrinter = AnyRef - def apply(path: String): Unit = apply(Seq(path)) - def apply(args: Seq[String]): Unit = new JavapClass() apply args foreach (_.show()) + // def apply(path: String): Unit = apply(Seq(path)) + // def apply(args: Seq[String]): Unit = new JavapClass() apply args foreach (_.show()) sealed trait JpResult { type ResultType diff --git a/src/compiler/scala/tools/util/PathResolver.scala b/src/compiler/scala/tools/util/PathResolver.scala index 6b0821edf3..c88a8e13c4 100644 --- a/src/compiler/scala/tools/util/PathResolver.scala +++ b/src/compiler/scala/tools/util/PathResolver.scala @@ -22,11 +22,11 @@ object PathResolver { // security exceptions. import AccessControl._ - def firstNonEmpty(xs: String*) = xs find (_ != "") getOrElse "" + // def firstNonEmpty(xs: String*) = xs find (_ != "") getOrElse "" /** Map all classpath elements to absolute paths and reconstruct the classpath. */ - def makeAbsolute(cp: String) = ClassPath.map(cp, x => Path(x).toAbsolute.path) + // def makeAbsolute(cp: String) = ClassPath.map(cp, x => Path(x).toAbsolute.path) /** pretty print class path */ def ppcp(s: String) = split(s) match { @@ -45,7 +45,7 @@ object PathResolver { /** Environment variables which java pays attention to so it * seems we do as well. */ - def classPathEnv = envOrElse("CLASSPATH", "") + // def classPathEnv = envOrElse("CLASSPATH", "") def sourcePathEnv = envOrElse("SOURCEPATH", "") def javaBootClassPath = propOrElse("sun.boot.class.path", searchForBootClasspath) @@ -85,7 +85,7 @@ object PathResolver { def scalaHome = Environment.scalaHome def scalaHomeDir = Directory(scalaHome) - def scalaHomeExists = scalaHomeDir.isDirectory + // def scalaHomeExists = scalaHomeDir.isDirectory def scalaLibDir = Directory(scalaHomeDir / "lib") def scalaClassesDir = Directory(scalaHomeDir / "classes") @@ -135,7 +135,7 @@ object PathResolver { ) } - def fromPathString(path: String, context: JavaContext = DefaultJavaContext): JavaClassPath = { + def fromPathString(path: String, context: JavaContext = DefaultJavaContext): JavaClassPath = { // called from scalap val s = new Settings() s.classpath.value = path new PathResolver(s, context) result @@ -160,7 +160,7 @@ object PathResolver { } } } -import PathResolver.{ Defaults, Environment, firstNonEmpty, ppcp } +import PathResolver.{ Defaults, Environment, ppcp } class PathResolver(settings: Settings, context: JavaContext) { def this(settings: Settings) = this(settings, if (settings.inline.value) new JavaContext else DefaultJavaContext) diff --git a/src/continuations/library/scala/util/continuations/ControlContext.scala b/src/continuations/library/scala/util/continuations/ControlContext.scala index 44a5b537b6..c196809da9 100644 --- a/src/continuations/library/scala/util/continuations/ControlContext.scala +++ b/src/continuations/library/scala/util/continuations/ControlContext.scala @@ -183,7 +183,7 @@ final class ControlContext[+A,-B,+C](val fun: (A => B, Exception => B) => C, val // need filter or other functions? - final def flatMapCatch[A1>:A,B1<:B,C1>:C<:B1](pf: PartialFunction[Exception, ControlContext[A1,B1,C1]]): ControlContext[A1,B1,C1] = { + final def flatMapCatch[A1>:A,B1<:B,C1>:C<:B1](pf: PartialFunction[Exception, ControlContext[A1,B1,C1]]): ControlContext[A1,B1,C1] = { // called by codegen from SelectiveCPSTransform if (fun eq null) this else { @@ -209,7 +209,7 @@ final class ControlContext[+A,-B,+C](val fun: (A => B, Exception => B) => C, val } } - final def mapFinally(f: () => Unit): ControlContext[A,B,C] = { + final def mapFinally(f: () => Unit): ControlContext[A,B,C] = { // called in code generated by SelectiveCPSTransform if (fun eq null) { try { f() diff --git a/src/continuations/library/scala/util/continuations/package.scala b/src/continuations/library/scala/util/continuations/package.scala index 1b50956c93..573fae85e7 100644 --- a/src/continuations/library/scala/util/continuations/package.scala +++ b/src/continuations/library/scala/util/continuations/package.scala @@ -166,7 +166,7 @@ package object continuations { throw new NoSuchMethodException("this code has to be compiled with the Scala continuations plugin enabled") } - def shiftUnitR[A,B](x: A): ControlContext[A,B,B] = { + def shiftUnitR[A,B](x: A): ControlContext[A,B,B] = { // called in code generated by SelectiveCPSTransform new ControlContext[A, B, B](null, x) } @@ -176,11 +176,11 @@ package object continuations { * a final result. * @see shift */ - def shiftR[A,B,C](fun: (A => B) => C): ControlContext[A,B,C] = { + def shiftR[A,B,C](fun: (A => B) => C): ControlContext[A,B,C] = { // called in code generated by SelectiveCPSTransform new ControlContext((f:A=>B,g:Exception=>B) => fun(f), null.asInstanceOf[A]) } - def reifyR[A,B,C](ctx: => ControlContext[A,B,C]): ControlContext[A,B,C] = { + def reifyR[A,B,C](ctx: => ControlContext[A,B,C]): ControlContext[A,B,C] = { // called in code generated by SelectiveCPSTransform ctx } diff --git a/src/detach/plugin/scala/tools/detach/Detach.scala b/src/detach/plugin/scala/tools/detach/Detach.scala index 73f6cde58c..499a97b761 100644 --- a/src/detach/plugin/scala/tools/detach/Detach.scala +++ b/src/detach/plugin/scala/tools/detach/Detach.scala @@ -73,7 +73,7 @@ abstract class Detach extends PluginComponent } private val serializableAnnotationInfo = - AnnotationInfo(SerializableAttr.tpe, List(), List()) + AnnotationInfo(requiredClass[scala.annotation.serializable].tpe, List(), List()) /* private val throwsAnnotationInfo = { val RemoteExceptionClass = definitions.getClass("java.rmi.RemoteException") diff --git a/src/partest/scala/tools/partest/CompilerTest.scala b/src/partest/scala/tools/partest/CompilerTest.scala index d73d99bc89..bb0732dcc6 100644 --- a/src/partest/scala/tools/partest/CompilerTest.scala +++ b/src/partest/scala/tools/partest/CompilerTest.scala @@ -21,7 +21,7 @@ abstract class CompilerTest extends DirectTest { lazy val global: Global = newCompiler() lazy val units = compilationUnits(global)(sources: _ *) import global._ - import definitions._ + import definitions.{ compilerTypeFromTag } override def extraSettings = "-usejavacp -d " + testOutput.path @@ -32,7 +32,6 @@ abstract class CompilerTest extends DirectTest { def sources: List[String] = List(code) // Utility functions - class MkType(sym: Symbol) { def apply[M](implicit t: ru.TypeTag[M]): Type = if (sym eq NoSymbol) NoType diff --git a/src/partest/scala/tools/partest/SecurityTest.scala b/src/partest/scala/tools/partest/SecurityTest.scala index 2d6f61d0b1..8d57e7e38d 100644 --- a/src/partest/scala/tools/partest/SecurityTest.scala +++ b/src/partest/scala/tools/partest/SecurityTest.scala @@ -11,8 +11,8 @@ import java.util._ abstract class SecurityTest extends App { def throwIt(x: Any) = throw new AccessControlException("" + x) - def readPerm(p: PropertyPermission) = p.getActions contains "read" - def writePerm(p: PropertyPermission) = p.getActions contains "write" + // def readPerm(p: PropertyPermission) = p.getActions contains "read" + // def writePerm(p: PropertyPermission) = p.getActions contains "write" def propertyCheck(p: PropertyPermission): Unit = throwIt(p) def check(perm: Permission): Unit = perm match { @@ -20,13 +20,13 @@ abstract class SecurityTest extends App { case _ => () } - lazy val sm = new SecurityManager { - // these two are the choke points for all permissions checks - override def checkPermission(perm: Permission): Unit = check(perm) - override def checkPermission(perm: Permission, context: Object): Unit = check(perm) - } - def securityOn(): Boolean = { - try { System.setSecurityManager(sm) ; true } - catch { case _: SecurityException => false } - } + // lazy val sm = new SecurityManager { + // // these two are the choke points for all permissions checks + // override def checkPermission(perm: Permission): Unit = check(perm) + // override def checkPermission(perm: Permission, context: Object): Unit = check(perm) + // } + // def securityOn(): Boolean = { + // try { System.setSecurityManager(sm) ; true } + // catch { case _: SecurityException => false } + // } } diff --git a/src/partest/scala/tools/partest/TestUtil.scala b/src/partest/scala/tools/partest/TestUtil.scala index 9bfd444180..bd5dc39498 100644 --- a/src/partest/scala/tools/partest/TestUtil.scala +++ b/src/partest/scala/tools/partest/TestUtil.scala @@ -24,13 +24,13 @@ trait TestUtil { } def nanos(body: => Unit): Long = alsoNanos(body)._1 - def verifySpeed(body1: => Unit, body2: => Unit, acceptableMultiple: Double) = { - val t1 = nanos(body1).toDouble - val t2 = nanos(body2).toDouble - val mult = if (t1 > t2) t1 / t2 else t2 / t1 + // def verifySpeed(body1: => Unit, body2: => Unit, acceptableMultiple: Double) = { + // val t1 = nanos(body1).toDouble + // val t2 = nanos(body2).toDouble + // val mult = if (t1 > t2) t1 / t2 else t2 / t1 - assert(mult <= acceptableMultiple, "Performance difference too great: multiple = " + mult) - } + // assert(mult <= acceptableMultiple, "Performance difference too great: multiple = " + mult) + // } def intercept[T <: Exception : ClassTag](code: => Unit): Unit = try { @@ -41,6 +41,6 @@ trait TestUtil { } } +// Used in tests. object TestUtil extends TestUtil { - } diff --git a/src/partest/scala/tools/partest/instrumented/Instrumentation.scala b/src/partest/scala/tools/partest/instrumented/Instrumentation.scala index 8a284b313b..18dd740208 100644 --- a/src/partest/scala/tools/partest/instrumented/Instrumentation.scala +++ b/src/partest/scala/tools/partest/instrumented/Instrumentation.scala @@ -78,6 +78,7 @@ object Instrumentation { !t.className.startsWith("scala/util/DynamicVariable") } + // Used in tests. def printStatistics(stats: Statistics = getStatistics, filter: MethodCallTrace => Boolean = standardFilter): Unit = { val stats = getStatistics println("Method call statistics:") diff --git a/src/partest/scala/tools/partest/nest/ConsoleFileManager.scala b/src/partest/scala/tools/partest/nest/ConsoleFileManager.scala index 75aed449a8..d8ae4b2403 100644 --- a/src/partest/scala/tools/partest/nest/ConsoleFileManager.scala +++ b/src/partest/scala/tools/partest/nest/ConsoleFileManager.scala @@ -79,7 +79,7 @@ class ConsoleFileManager extends FileManager { testClassesDir = Path(testClasses.get).toCanonical.toDirectory NestUI.verbose("Running with classes in "+testClassesDir) - latestFile = testClassesDir.parent / "bin" + // latestFile = testClassesDir.parent / "bin" latestLibFile = testClassesDir / "library" latestActorsFile = testClassesDir / "library" / "actors" latestReflectFile = testClassesDir / "reflect" @@ -90,7 +90,7 @@ class ConsoleFileManager extends FileManager { else if (testBuild.isDefined) { val dir = Path(testBuild.get) NestUI.verbose("Running on "+dir) - latestFile = dir / "bin" + // latestFile = dir / "bin" latestLibFile = dir / "lib/scala-library.jar" latestActorsFile = dir / "lib/scala-actors.jar" latestReflectFile = dir / "lib/scala-reflect.jar" @@ -101,7 +101,7 @@ class ConsoleFileManager extends FileManager { else { def setupQuick() { NestUI.verbose("Running build/quick") - latestFile = prefixFile("build/quick/bin") + // latestFile = prefixFile("build/quick/bin") latestLibFile = prefixFile("build/quick/classes/library") latestActorsFile = prefixFile("build/quick/classes/library/actors") latestReflectFile = prefixFile("build/quick/classes/reflect") @@ -112,7 +112,7 @@ class ConsoleFileManager extends FileManager { def setupInst() { NestUI.verbose("Running dist (installed)") val p = testParent.getParentFile - latestFile = prefixFileWith(p, "bin") + // latestFile = prefixFileWith(p, "bin") latestLibFile = prefixFileWith(p, "lib/scala-library.jar") latestActorsFile = prefixFileWith(p, "lib/scala-actors.jar") latestReflectFile = prefixFileWith(p, "lib/scala-reflect.jar") @@ -122,7 +122,7 @@ class ConsoleFileManager extends FileManager { def setupDist() { NestUI.verbose("Running dists/latest") - latestFile = prefixFile("dists/latest/bin") + // latestFile = prefixFile("dists/latest/bin") latestLibFile = prefixFile("dists/latest/lib/scala-library.jar") latestActorsFile = prefixFile("dists/latest/lib/scala-actors.jar") latestReflectFile = prefixFile("dists/latest/lib/scala-reflect.jar") @@ -132,7 +132,7 @@ class ConsoleFileManager extends FileManager { def setupPack() { NestUI.verbose("Running build/pack") - latestFile = prefixFile("build/pack/bin") + // latestFile = prefixFile("build/pack/bin") latestLibFile = prefixFile("build/pack/lib/scala-library.jar") latestActorsFile = prefixFile("build/pack/lib/scala-actors.jar") latestReflectFile = prefixFile("build/pack/lib/scala-reflect.jar") @@ -175,7 +175,7 @@ class ConsoleFileManager extends FileManager { var LATEST_PARTEST: String = "" var LATEST_ACTORS: String = "" - var latestFile: File = _ + // var latestFile: File = _ var latestLibFile: File = _ var latestActorsFile: File = _ var latestReflectFile: File = _ @@ -187,7 +187,7 @@ class ConsoleFileManager extends FileManager { // initialize above fields findLatest() - var testFiles: List[io.Path] = Nil + // var testFiles: List[io.Path] = Nil def getFiles(kind: String, cond: Path => Boolean): List[File] = { def ignoreDir(p: Path) = List("svn", "obj") exists (p hasExtension _) @@ -197,9 +197,7 @@ class ConsoleFileManager extends FileManager { if (dir.isDirectory) NestUI.verbose("look in %s for tests" format dir) else NestUI.failure("Directory '%s' not found" format dir) - val files = - if (testFiles.nonEmpty) testFiles filter (_.parent isSame dir) - else dir.list filterNot ignoreDir filter cond toList + val files = dir.list filterNot ignoreDir filter cond toList ( if (failed) files filter (x => logFileExists(x, kind)) else files ) map (_.jfile) } diff --git a/src/partest/scala/tools/partest/nest/ConsoleRunner.scala b/src/partest/scala/tools/partest/nest/ConsoleRunner.scala index d23ee81e4d..35bce01684 100644 --- a/src/partest/scala/tools/partest/nest/ConsoleRunner.scala +++ b/src/partest/scala/tools/partest/nest/ConsoleRunner.scala @@ -51,7 +51,7 @@ class ConsoleRunner extends DirectRunner { private val testSetArgs = testSets map ("--" + _.kind) private val testSetArgMap = testSetArgs zip testSets toMap - def denotesTestSet(arg: String) = testSetArgs contains arg + // def denotesTestSet(arg: String) = testSetArgs contains arg private def printVersion() { NestUI outline (versionMsg + "\n") } diff --git a/src/partest/scala/tools/partest/nest/FileManager.scala b/src/partest/scala/tools/partest/nest/FileManager.scala index 21fd314552..9e2a34a34e 100644 --- a/src/partest/scala/tools/partest/nest/FileManager.scala +++ b/src/partest/scala/tools/partest/nest/FileManager.scala @@ -74,15 +74,15 @@ trait FileManager extends FileUtil { var timeout = PartestDefaults.timeout // how can 15 minutes not be enough? What are you doing, run/lisp.scala? // You complete in 11 seconds on my machine. - var oneTestTimeout = 60 * 60 * 1000 + // var oneTestTimeout = 60 * 60 * 1000 /** Only when --debug is given. */ lazy val testTimings = new mutable.HashMap[String, Long] def recordTestTiming(name: String, milliseconds: Long) = synchronized { testTimings(name) = milliseconds } - def showTestTimings() { - testTimings.toList sortBy (-_._2) foreach { case (k, v) => println("%s: %s".format(k, v)) } - } + // def showTestTimings() { + // testTimings.toList sortBy (-_._2) foreach { case (k, v) => println("%s: %s".format(k, v)) } + // } def getLogFile(dir: File, fileBase: String, kind: String): File = new File(dir, fileBase + "-" + kind + ".log") diff --git a/src/partest/scala/tools/partest/nest/NestUI.scala b/src/partest/scala/tools/partest/nest/NestUI.scala index 70db6d0ed1..00aa27bd34 100644 --- a/src/partest/scala/tools/partest/nest/NestUI.scala +++ b/src/partest/scala/tools/partest/nest/NestUI.scala @@ -54,9 +54,9 @@ object NestUI { } def warning(msg: String) = print(_warning + msg + _default) - def warning(msg: String, wr: PrintWriter) = synchronized { - wr.print(_warning + msg + _default) - } + // def warning(msg: String, wr: PrintWriter) = synchronized { + // wr.print(_warning + msg + _default) + // } def normal(msg: String) = print(_default + msg) def normal(msg: String, wr: PrintWriter) = synchronized { @@ -104,7 +104,7 @@ object NestUI { } var _verbose = false - var _debug = false + // var _debug = false def verbose(msg: String) { if (_verbose) { @@ -112,10 +112,10 @@ object NestUI { println(msg) } } - def debug(msg: String) { - if (isPartestDebug) { - outline("debug: ") - println(msg) - } - } + // def debug(msg: String) { + // if (isPartestDebug) { + // outline("debug: ") + // println(msg) + // } + // } } diff --git a/src/partest/scala/tools/partest/nest/ReflectiveRunner.scala b/src/partest/scala/tools/partest/nest/ReflectiveRunner.scala index 5cb8589d66..4b0ed1f82a 100644 --- a/src/partest/scala/tools/partest/nest/ReflectiveRunner.scala +++ b/src/partest/scala/tools/partest/nest/ReflectiveRunner.scala @@ -3,8 +3,6 @@ * @author Philipp Haller */ -// $Id$ - package scala.tools.partest package nest @@ -12,7 +10,6 @@ import scala.tools.nsc.Properties.{ setProp, propOrEmpty } import scala.tools.nsc.util.ClassPath import scala.tools.nsc.io import io.Path -import RunnerUtils._ import java.net.URLClassLoader /* This class is used to load an instance of DirectRunner using @@ -28,6 +25,12 @@ class ReflectiveRunner { // was used to start the runner. val sepRunnerClassName = "scala.tools.partest.nest.ConsoleRunner" + private def searchPath(option: String, as: List[String]): Option[String] = as match { + case `option` :: r :: _ => Some(r) + case _ :: rest => searchPath(option, rest) + case Nil => None + } + def main(args: String) { val argList = (args.split("\\s")).toList diff --git a/src/partest/scala/tools/partest/nest/RunnerManager.scala b/src/partest/scala/tools/partest/nest/RunnerManager.scala index 548c5abbd9..2651088018 100644 --- a/src/partest/scala/tools/partest/nest/RunnerManager.scala +++ b/src/partest/scala/tools/partest/nest/RunnerManager.scala @@ -291,10 +291,10 @@ class RunnerManager(kind: String, val fileManager: FileManager, params: TestRunP ((swr, wr)) } - def fail(what: Any) = { - NestUI.verbose("scalac: compilation of "+what+" failed\n") - false - } + // def fail(what: Any) = { + // NestUI.verbose("scalac: compilation of "+what+" failed\n") + // false + // } def diffCheck(testFile: File, diff: String) = { testDiff = diff testDiff == "" diff --git a/src/partest/scala/tools/partest/nest/RunnerUtils.scala b/src/partest/scala/tools/partest/nest/RunnerUtils.scala index 6707a9338a..8a47989b7c 100644 --- a/src/partest/scala/tools/partest/nest/RunnerUtils.scala +++ b/src/partest/scala/tools/partest/nest/RunnerUtils.scala @@ -1,29 +1,29 @@ -/* NEST (New Scala Test) - * Copyright 2007-2013 LAMP/EPFL - * @author Philipp Haller - */ +// /* NEST (New Scala Test) +// * Copyright 2007-2013 LAMP/EPFL +// * @author Philipp Haller +// */ -// $Id$ +// // $Id$ -package scala.tools.partest -package nest +// package scala.tools.partest +// package nest -object RunnerUtils { - def splitArgs(str: String) = str split "\\s" filterNot (_ == "") toList +// object RunnerUtils { +// def splitArgs(str: String) = str split "\\s" filterNot (_ == "") toList - def searchPath(option: String, as: List[String]): Option[String] = as match { - case `option` :: r :: _ => Some(r) - case _ :: rest => searchPath(option, rest) - case Nil => None - } +// def searchPath(option: String, as: List[String]): Option[String] = as match { +// case `option` :: r :: _ => Some(r) +// case _ :: rest => searchPath(option, rest) +// case Nil => None +// } - def searchAndRemovePath(option: String, as: List[String]) = (as indexOf option) match { - case -1 => (None, as) - case idx => (Some(as(idx + 1)), (as take idx) ::: (as drop (idx + 2))) - } +// def searchAndRemovePath(option: String, as: List[String]) = (as indexOf option) match { +// case -1 => (None, as) +// case idx => (Some(as(idx + 1)), (as take idx) ::: (as drop (idx + 2))) +// } - def searchAndRemoveOption(option: String, as: List[String]) = (as indexOf option) match { - case -1 => (false, as) - case idx => (true, (as take idx) ::: (as drop (idx + 1))) - } -} +// def searchAndRemoveOption(option: String, as: List[String]) = (as indexOf option) match { +// case -1 => (false, as) +// case idx => (true, (as take idx) ::: (as drop (idx + 1))) +// } +// } diff --git a/src/partest/scala/tools/partest/package.scala b/src/partest/scala/tools/partest/package.scala index 58cc7d5b0b..fa0c88a2b2 100644 --- a/src/partest/scala/tools/partest/package.scala +++ b/src/partest/scala/tools/partest/package.scala @@ -13,9 +13,9 @@ import java.util.concurrent.Callable package partest { class TestState { - def isOk = this eq TestState.Ok - def isFail = this eq TestState.Fail - def isTimeout = this eq TestState.Timeout + // def isOk = this eq TestState.Ok + // def isFail = this eq TestState.Fail + // def isTimeout = this eq TestState.Timeout } object TestState { val Ok = new TestState @@ -43,7 +43,7 @@ package object partest { def callable[T](body: => T): Callable[T] = new Callable[T] { override def call() = body } - def path2String(path: String) = file2String(new JFile(path)) + // def path2String(path: String) = file2String(new JFile(path)) def file2String(f: JFile) = try SFile(f).slurp(scala.io.Codec.UTF8) catch { case _: FileNotFoundException => "" } diff --git a/src/partest/scala/tools/partest/utils/PrintMgr.scala b/src/partest/scala/tools/partest/utils/PrintMgr.scala index d25be87c1e..56fdcda2ea 100644 --- a/src/partest/scala/tools/partest/utils/PrintMgr.scala +++ b/src/partest/scala/tools/partest/utils/PrintMgr.scala @@ -1,52 +1,52 @@ -/* __ *\ -** ________ ___ / / ___ Scala Parallel Testing ** -** / __/ __// _ | / / / _ | (c) 2007-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -// $Id$ - -package scala.tools.partest -package utils - -/** - * @author Thomas Hofer - */ -object PrintMgr { - - val NONE = 0 - val SOME = 1 - val MANY = 2 - - var outline = "" - var success = "" - var failure = "" - var warning = "" - var default = "" - - def initialization(number: Int) = number match { - case MANY => - outline = Console.BOLD + Console.BLACK - success = Console.BOLD + Console.GREEN - failure = Console.BOLD + Console.RED - warning = Console.BOLD + Console.YELLOW - default = Console.RESET - case SOME => - outline = Console.BOLD + Console.BLACK - success = Console.RESET - failure = Console.BOLD + Console.BLACK - warning = Console.BOLD + Console.BLACK - default = Console.RESET - case _ => - } - - def printOutline(msg: String) = print(outline + msg + default) - - def printSuccess(msg: String) = print(success + msg + default) - - def printFailure(msg: String) = print(failure + msg + default) - - def printWarning(msg: String) = print(warning + msg + default) -} +// /* __ *\ +// ** ________ ___ / / ___ Scala Parallel Testing ** +// ** / __/ __// _ | / / / _ | (c) 2007-2013, LAMP/EPFL ** +// ** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** +// ** /____/\___/_/ |_/____/_/ | | ** +// ** |/ ** +// \* */ + +// // $Id$ + +// package scala.tools.partest +// package utils + +// /** +// * @author Thomas Hofer +// */ +// object PrintMgr { + +// val NONE = 0 +// val SOME = 1 +// val MANY = 2 + +// var outline = "" +// var success = "" +// var failure = "" +// var warning = "" +// var default = "" + +// def initialization(number: Int) = number match { +// case MANY => +// outline = Console.BOLD + Console.BLACK +// success = Console.BOLD + Console.GREEN +// failure = Console.BOLD + Console.RED +// warning = Console.BOLD + Console.YELLOW +// default = Console.RESET +// case SOME => +// outline = Console.BOLD + Console.BLACK +// success = Console.RESET +// failure = Console.BOLD + Console.BLACK +// warning = Console.BOLD + Console.BLACK +// default = Console.RESET +// case _ => +// } + +// def printOutline(msg: String) = print(outline + msg + default) + +// def printSuccess(msg: String) = print(success + msg + default) + +// def printFailure(msg: String) = print(failure + msg + default) + +// def printWarning(msg: String) = print(warning + msg + default) +// } diff --git a/src/reflect/scala/reflect/internal/AnnotationInfos.scala b/src/reflect/scala/reflect/internal/AnnotationInfos.scala index 7c12b5979d..82be2fa04a 100644 --- a/src/reflect/scala/reflect/internal/AnnotationInfos.scala +++ b/src/reflect/scala/reflect/internal/AnnotationInfos.scala @@ -6,7 +6,6 @@ package scala.reflect package internal -import util._ import pickling.ByteCodecs import scala.annotation.tailrec import scala.collection.immutable.ListMap @@ -289,8 +288,8 @@ trait AnnotationInfos extends api.Annotations { self: SymbolTable => def refsSymbol(sym: Symbol) = hasArgWhich(_.symbol == sym) /** Change all ident's with Symbol "from" to instead use symbol "to" */ - def substIdentSyms(from: Symbol, to: Symbol) = - AnnotationInfo(atp, args map (_ substituteSymbols (List(from), List(to))), assocs) setPos pos + // def substIdentSyms(from: Symbol, to: Symbol) = + // AnnotationInfo(atp, args map (_ substituteSymbols (List(from), List(to))), assocs) setPos pos def stringArg(index: Int) = constantAtIndex(index) map (_.stringValue) def intArg(index: Int) = constantAtIndex(index) map (_.intValue) @@ -325,14 +324,14 @@ trait AnnotationInfos extends api.Annotations { self: SymbolTable => implicit val AnnotationTag = ClassTag[AnnotationInfo](classOf[AnnotationInfo]) object UnmappableAnnotation extends CompleteAnnotationInfo(NoType, Nil, Nil) - + /** Extracts symbol of thrown exception from AnnotationInfo. - * + * * Supports both “old-style” `@throws(classOf[Exception])` * as well as “new-stye” `@throws[Exception]("cause")` annotations. */ object ThrownException { - def unapply(ann: AnnotationInfo): Option[Symbol] = + def unapply(ann: AnnotationInfo): Option[Symbol] = ann match { case AnnotationInfo(tpe, _, _) if tpe.typeSymbol != ThrowsClass => None diff --git a/src/reflect/scala/reflect/internal/BuildUtils.scala b/src/reflect/scala/reflect/internal/BuildUtils.scala index 9f41f0336e..9da6ad652a 100644 --- a/src/reflect/scala/reflect/internal/BuildUtils.scala +++ b/src/reflect/scala/reflect/internal/BuildUtils.scala @@ -1,8 +1,6 @@ package scala.reflect package internal -import Flags._ - trait BuildUtils { self: SymbolTable => class BuildImpl extends BuildApi { diff --git a/src/reflect/scala/reflect/internal/ClassfileConstants.scala b/src/reflect/scala/reflect/internal/ClassfileConstants.scala index b1462e9709..c8af1732a9 100644 --- a/src/reflect/scala/reflect/internal/ClassfileConstants.scala +++ b/src/reflect/scala/reflect/internal/ClassfileConstants.scala @@ -381,10 +381,10 @@ object ClassfileConstants { def toScalaClassFlags(flags: Int): Long = FlagTranslation classFlags flags def toScalaFieldFlags(flags: Int): Long = FlagTranslation fieldFlags flags - @deprecated("Use another method in this object", "2.10.0") - def toScalaFlags(flags: Int, isClass: Boolean = false, isField: Boolean = false): Long = ( - if (isClass) toScalaClassFlags(flags) - else if (isField) toScalaFieldFlags(flags) - else toScalaMethodFlags(flags) - ) + // @deprecated("Use another method in this object", "2.10.0") + // def toScalaFlags(flags: Int, isClass: Boolean = false, isField: Boolean = false): Long = ( + // if (isClass) toScalaClassFlags(flags) + // else if (isField) toScalaFieldFlags(flags) + // else toScalaMethodFlags(flags) + // ) } diff --git a/src/reflect/scala/reflect/internal/Definitions.scala b/src/reflect/scala/reflect/internal/Definitions.scala index 9a358cec46..8ecb189c27 100644 --- a/src/reflect/scala/reflect/internal/Definitions.scala +++ b/src/reflect/scala/reflect/internal/Definitions.scala @@ -9,7 +9,6 @@ package internal import scala.annotation.{ switch, meta } import scala.collection.{ mutable, immutable } import Flags._ -import PartialFunction._ import scala.reflect.api.{Universe => ApiUniverse} trait Definitions extends api.StandardDefinitions { @@ -149,7 +148,7 @@ trait Definitions extends api.StandardDefinitions { FloatClass, DoubleClass ) - def ScalaValueClassCompanions: List[Symbol] = ScalaValueClasses map (_.companionSymbol) + // def ScalaValueClassCompanions: List[Symbol] = ScalaValueClasses map (_.companionSymbol) def ScalaPrimitiveValueClasses: List[ClassSymbol] = ScalaValueClasses } @@ -158,7 +157,7 @@ trait Definitions extends api.StandardDefinitions { def isDefinitionsInitialized = isInitialized // symbols related to packages - var emptypackagescope: Scope = null //debug + // var emptypackagescope: Scope = null //debug @deprecated("Moved to rootMirror.RootPackage", "2.10.0") val RootPackage: ModuleSymbol = rootMirror.RootPackage @@ -181,7 +180,7 @@ trait Definitions extends api.StandardDefinitions { lazy val RuntimePackage = getRequiredPackage("scala.runtime") lazy val RuntimePackageClass = RuntimePackage.moduleClass.asClass - lazy val JavaLangEnumClass = requiredClass[java.lang.Enum[_]] + // lazy val JavaLangEnumClass = requiredClass[java.lang.Enum[_]] // convenient one-argument parameter lists lazy val anyparam = List(AnyClass.tpe) @@ -262,7 +261,7 @@ trait Definitions extends api.StandardDefinitions { || tp =:= AnyRefClass.tpe ) /** Does this type have a parent which is none of Any, AnyVal, or AnyRef? */ - def hasNonTrivialParent(tp: Type) = tp.parents exists (t => !isTrivialTopType(tp)) + // def hasNonTrivialParent(tp: Type) = tp.parents exists (t => !isTrivialTopType(tp)) private def fixupAsAnyTrait(tpe: Type): Type = tpe match { case ClassInfoType(parents, decls, clazz) => @@ -358,11 +357,11 @@ trait Definitions extends api.StandardDefinitions { lazy val UnqualifiedOwners = UnqualifiedModules.toSet ++ UnqualifiedModules.map(_.moduleClass) lazy val PredefModule = requiredModule[scala.Predef.type] - lazy val PredefModuleClass = PredefModule.moduleClass + // lazy val PredefModuleClass = PredefModule.moduleClass def Predef_classOf = getMemberMethod(PredefModule, nme.classOf) - def Predef_identity = getMemberMethod(PredefModule, nme.identity) - def Predef_conforms = getMemberMethod(PredefModule, nme.conforms) + // def Predef_identity = getMemberMethod(PredefModule, nme.identity) + // def Predef_conforms = getMemberMethod(PredefModule, nme.conforms) def Predef_wrapRefArray = getMemberMethod(PredefModule, nme.wrapRefArray) def Predef_wrapArray(tp: Type) = getMemberMethod(PredefModule, wrapArrayMethodName(tp)) def Predef_??? = getMemberMethod(PredefModule, nme.???) @@ -382,7 +381,7 @@ trait Definitions extends api.StandardDefinitions { lazy val SpecializableModule = requiredModule[Specializable] lazy val GroupOfSpecializable = getMemberClass(SpecializableModule, tpnme.Group) - lazy val ConsoleModule = requiredModule[scala.Console.type] + // lazy val ConsoleModule = requiredModule[scala.Console.type] lazy val ScalaRunTimeModule = requiredModule[scala.runtime.ScalaRunTime.type] lazy val SymbolModule = requiredModule[scala.Symbol.type] lazy val Symbol_apply = getMemberMethod(SymbolModule, nme.apply) @@ -392,9 +391,9 @@ trait Definitions extends api.StandardDefinitions { def arrayLengthMethod = getMemberMethod(ScalaRunTimeModule, nme.array_length) def arrayCloneMethod = getMemberMethod(ScalaRunTimeModule, nme.array_clone) def ensureAccessibleMethod = getMemberMethod(ScalaRunTimeModule, nme.ensureAccessible) - def scalaRuntimeSameElements = getMemberMethod(ScalaRunTimeModule, nme.sameElements) + // def scalaRuntimeSameElements = getMemberMethod(ScalaRunTimeModule, nme.sameElements) def arrayClassMethod = getMemberMethod(ScalaRunTimeModule, nme.arrayClass) - def arrayElementClassMethod = getMemberMethod(ScalaRunTimeModule, nme.arrayElementClass) + // def arrayElementClassMethod = getMemberMethod(ScalaRunTimeModule, nme.arrayElementClass) // classes with special meanings lazy val StringAddClass = requiredClass[scala.runtime.StringAdd] @@ -457,10 +456,10 @@ trait Definitions extends api.StandardDefinitions { case _ => tp } - def isPrimitiveArray(tp: Type) = tp match { - case TypeRef(_, ArrayClass, arg :: Nil) => isPrimitiveValueClass(arg.typeSymbol) - case _ => false - } + // def isPrimitiveArray(tp: Type) = tp match { + // case TypeRef(_, ArrayClass, arg :: Nil) => isPrimitiveValueClass(arg.typeSymbol) + // case _ => false + // } def isReferenceArray(tp: Type) = tp match { case TypeRef(_, ArrayClass, arg :: Nil) => arg <:< AnyRefClass.tpe case _ => false @@ -470,11 +469,11 @@ trait Definitions extends api.StandardDefinitions { case _ => false } - lazy val MatchingStrategyClass = getRequiredClass("scala.MatchingStrategy") + // lazy val MatchingStrategyClass = getRequiredClass("scala.MatchingStrategy") // collections classes lazy val ConsClass = requiredClass[scala.collection.immutable.::[_]] - lazy val IterableClass = requiredClass[scala.collection.Iterable[_]] + // lazy val IterableClass = requiredClass[scala.collection.Iterable[_]] lazy val IteratorClass = requiredClass[scala.collection.Iterator[_]] lazy val ListClass = requiredClass[scala.collection.immutable.List[_]] lazy val SeqClass = requiredClass[scala.collection.Seq[_]] @@ -485,8 +484,8 @@ trait Definitions extends api.StandardDefinitions { lazy val List_apply = getMemberMethod(ListModule, nme.apply) lazy val NilModule = requiredModule[scala.collection.immutable.Nil.type] lazy val SeqModule = requiredModule[scala.collection.Seq.type] - lazy val IteratorModule = requiredModule[scala.collection.Iterator.type] - lazy val Iterator_apply = getMemberMethod(IteratorModule, nme.apply) + // lazy val IteratorModule = requiredModule[scala.collection.Iterator.type] + // lazy val Iterator_apply = getMemberMethod(IteratorModule, nme.apply) // arrays and their members lazy val ArrayModule = requiredModule[scala.Array.type] @@ -501,9 +500,9 @@ trait Definitions extends api.StandardDefinitions { // reflection / structural types lazy val SoftReferenceClass = requiredClass[java.lang.ref.SoftReference[_]] - lazy val WeakReferenceClass = requiredClass[java.lang.ref.WeakReference[_]] + // lazy val WeakReferenceClass = requiredClass[java.lang.ref.WeakReference[_]] lazy val MethodClass = getClassByName(sn.MethodAsObject) - def methodClass_setAccessible = getMemberMethod(MethodClass, nme.setAccessible) + // def methodClass_setAccessible = getMemberMethod(MethodClass, nme.setAccessible) lazy val EmptyMethodCacheClass = requiredClass[scala.runtime.EmptyMethodCache] lazy val MethodCacheClass = requiredClass[scala.runtime.MethodCache] def methodCache_find = getMemberMethod(MethodCacheClass, nme.find_) @@ -527,7 +526,7 @@ trait Definitions extends api.StandardDefinitions { lazy val ExprClass = if (ExprsClass != NoSymbol) getMemberClass(ExprsClass, tpnme.Expr) else NoSymbol def ExprSplice = if (ExprsClass != NoSymbol) getMemberMethod(ExprClass, nme.splice) else NoSymbol def ExprValue = if (ExprsClass != NoSymbol) getMemberMethod(ExprClass, nme.value) else NoSymbol - lazy val ExprModule = if (ExprsClass != NoSymbol) getMemberModule(ExprsClass, nme.Expr) else NoSymbol + // lazy val ExprModule = if (ExprsClass != NoSymbol) getMemberModule(ExprsClass, nme.Expr) else NoSymbol lazy val ClassTagModule = requiredModule[scala.reflect.ClassTag[_]] lazy val ClassTagClass = requiredClass[scala.reflect.ClassTag[_]] @@ -553,7 +552,7 @@ trait Definitions extends api.StandardDefinitions { def MacroContextPrefix = if (MacroContextClass != NoSymbol) getMemberMethod(MacroContextClass, nme.prefix) else NoSymbol def MacroContextPrefixType = if (MacroContextClass != NoSymbol) getTypeMember(MacroContextClass, tpnme.PrefixType) else NoSymbol def MacroContextUniverse = if (MacroContextClass != NoSymbol) getMemberMethod(MacroContextClass, nme.universe) else NoSymbol - def MacroContextMirror = if (MacroContextClass != NoSymbol) getMemberMethod(MacroContextClass, nme.mirror) else NoSymbol + // def MacroContextMirror = if (MacroContextClass != NoSymbol) getMemberMethod(MacroContextClass, nme.mirror) else NoSymbol lazy val MacroImplAnnotation = requiredClass[scala.reflect.macros.internal.macroImpl] lazy val StringContextClass = requiredClass[scala.StringContext] @@ -585,12 +584,12 @@ trait Definitions extends api.StandardDefinitions { // The given class has a main method. def hasJavaMainMethod(sym: Symbol): Boolean = (sym.tpe member nme.main).alternatives exists isJavaMainMethod - def hasJavaMainMethod(path: String): Boolean = - hasJavaMainMethod(getModuleIfDefined(path)) + // def hasJavaMainMethod(path: String): Boolean = + // hasJavaMainMethod(getModuleIfDefined(path)) - def isOptionType(tp: Type) = tp.typeSymbol isSubClass OptionClass - def isSomeType(tp: Type) = tp.typeSymbol eq SomeClass - def isNoneType(tp: Type) = tp.typeSymbol eq NoneModule + // def isOptionType(tp: Type) = tp.typeSymbol isSubClass OptionClass + // def isSomeType(tp: Type) = tp.typeSymbol eq SomeClass + // def isNoneType(tp: Type) = tp.typeSymbol eq NoneModule // Product, Tuple, Function, AbstractFunction private def mkArityArray(name: String, arity: Int, countFrom: Int): Array[ClassSymbol] = { @@ -613,7 +612,7 @@ trait Definitions extends api.StandardDefinitions { /** Creators for TupleN, ProductN, FunctionN. */ def tupleType(elems: List[Type]) = aritySpecificType(TupleClass, elems) - def productType(elems: List[Type]) = aritySpecificType(ProductClass, elems) + // def productType(elems: List[Type]) = aritySpecificType(ProductClass, elems) def functionType(formals: List[Type], restpe: Type) = aritySpecificType(FunctionClass, formals, restpe) def abstractFunctionType(formals: List[Type], restpe: Type) = aritySpecificType(AbstractFunctionClass, formals, restpe) @@ -632,10 +631,10 @@ trait Definitions extends api.StandardDefinitions { else nme.genericWrapArray } - @deprecated("Use isTupleType", "2.10.0") - def isTupleTypeOrSubtype(tp: Type) = isTupleType(tp) + // @deprecated("Use isTupleType", "2.10.0") + // def isTupleTypeOrSubtype(tp: Type) = isTupleType(tp) - def tupleField(n: Int, j: Int) = getMemberValue(TupleClass(n), nme.productAccessorName(j)) + // def tupleField(n: Int, j: Int) = getMemberValue(TupleClass(n), nme.productAccessorName(j)) // NOTE: returns true for NoSymbol since it's included in the TupleClass array -- is this intensional? def isTupleSymbol(sym: Symbol) = TupleClass contains unspecializedSymbol(sym) def isProductNClass(sym: Symbol) = ProductClass contains sym @@ -684,10 +683,10 @@ trait Definitions extends api.StandardDefinitions { // def Product_productElementName = getMemberMethod(ProductRootClass, nme.productElementName) def productProj(z:Symbol, j: Int): TermSymbol = getMemberValue(z, nme.productAccessorName(j)) - def productProj(n: Int, j: Int): TermSymbol = productProj(ProductClass(n), j) + // def productProj(n: Int, j: Int): TermSymbol = productProj(ProductClass(n), j) /** returns true if this type is exactly ProductN[T1,...,Tn], not some subclass */ - def isExactProductType(tp: Type): Boolean = isProductNClass(tp.typeSymbol) + // def isExactProductType(tp: Type): Boolean = isProductNClass(tp.typeSymbol) /** if tpe <: ProductN[T1,...,TN], returns List(T1,...,TN) else Nil */ def getProductArgs(tpe: Type): List[Type] = tpe.baseClasses find isProductNClass match { @@ -705,7 +704,7 @@ trait Definitions extends api.StandardDefinitions { case tp => tp } - def functionApply(n: Int) = getMemberMethod(FunctionClass(n), nme.apply) + // def functionApply(n: Int) = getMemberMethod(FunctionClass(n), nme.apply) def abstractFunctionForFunctionType(tp: Type) = if (isFunctionType(tp)) abstractFunctionType(tp.typeArgs.init, tp.typeArgs.last) @@ -724,7 +723,7 @@ trait Definitions extends api.StandardDefinitions { (sym eq PartialFunctionClass) || (sym eq AbstractPartialFunctionClass) } - def isSeqType(tp: Type) = elementType(SeqClass, tp.normalize) != NoType + // def isSeqType(tp: Type) = elementType(SeqClass, tp.normalize) != NoType def elementType(container: Symbol, tp: Type): Type = tp match { case TypeRef(_, `container`, arg :: Nil) => arg @@ -738,10 +737,10 @@ trait Definitions extends api.StandardDefinitions { def optionType(tp: Type) = appliedType(OptionClass, tp) def scalaRepeatedType(arg: Type) = appliedType(RepeatedParamClass, arg) def seqType(arg: Type) = appliedType(SeqClass, arg) - def someType(tp: Type) = appliedType(SomeClass, tp) + // def someType(tp: Type) = appliedType(SomeClass, tp) - def StringArray = arrayType(StringClass.tpe) - lazy val ObjectArray = arrayType(ObjectClass.tpe) + // def StringArray = arrayType(StringClass.tpe) + // lazy val ObjectArray = arrayType(ObjectClass.tpe) def ClassType(arg: Type) = if (phase.erasedTypes || forMSIL) ClassClass.tpe @@ -754,8 +753,8 @@ trait Definitions extends api.StandardDefinitions { // - .linkedClassOfClass: the ClassSymbol of the enumeration (class E) sym.owner.linkedClassOfClass.tpe - def vmClassType(arg: Type): Type = ClassType(arg) - def vmSignature(sym: Symbol, info: Type): String = signature(info) // !!! + // def vmClassType(arg: Type): Type = ClassType(arg) + // def vmSignature(sym: Symbol, info: Type): String = signature(info) // !!! /** Given a class symbol C with type parameters T1, T2, ... Tn * which have upper/lower bounds LB1/UB1, LB1/UB2, ..., LBn/UBn, @@ -768,14 +767,14 @@ trait Definitions extends api.StandardDefinitions { /** Given type U, creates a Type representing Class[_ <: U]. */ - def boundedClassType(upperBound: Type) = - appliedTypeAsUpperBounds(ClassClass.typeConstructor, List(upperBound)) + // def boundedClassType(upperBound: Type) = + // appliedTypeAsUpperBounds(ClassClass.typeConstructor, List(upperBound)) /** To avoid unchecked warnings on polymorphic classes, translate * a Foo[T] into a Foo[_] for use in the pattern matcher. */ - @deprecated("Use classExistentialType", "2.10.0") - def typeCaseType(clazz: Symbol): Type = classExistentialType(clazz) + // @deprecated("Use classExistentialType", "2.10.0") + // def typeCaseType(clazz: Symbol): Type = classExistentialType(clazz) // // .NET backend @@ -783,7 +782,7 @@ trait Definitions extends api.StandardDefinitions { lazy val ComparatorClass = getRequiredClass("scala.runtime.Comparator") // System.ValueType - lazy val ValueTypeClass: ClassSymbol = getClassByName(sn.ValueType) + // lazy val ValueTypeClass: ClassSymbol = getClassByName(sn.ValueType) // System.MulticastDelegate lazy val DelegateClass: ClassSymbol = getClassByName(sn.Delegate) var Delegate_scalaCallers: List[Symbol] = List() // Syncnote: No protection necessary yet as only for .NET where reflection is not supported. @@ -876,11 +875,11 @@ trait Definitions extends api.StandardDefinitions { x :: removeRedundantObjects(xs) } /** Order a list of types with non-trait classes before others. */ - def classesFirst(tps: List[Type]): List[Type] = { - val (classes, others) = tps partition (t => t.typeSymbol.isClass && !t.typeSymbol.isTrait) - if (classes.isEmpty || others.isEmpty || (tps startsWith classes)) tps - else classes ::: others - } + // def classesFirst(tps: List[Type]): List[Type] = { + // val (classes, others) = tps partition (t => t.typeSymbol.isClass && !t.typeSymbol.isTrait) + // if (classes.isEmpty || others.isEmpty || (tps startsWith classes)) tps + // else classes ::: others + // } /** The following transformations applied to a list of parents. * If any parent is a class/trait, all parents which normalize to * Object are discarded. Otherwise, all parents which normalize @@ -908,10 +907,10 @@ trait Definitions extends api.StandardDefinitions { def parentsString(parents: List[Type]) = normalizedParents(parents) mkString " with " - def typeParamsString(tp: Type) = tp match { - case PolyType(tparams, _) => tparams map (_.defString) mkString ("[", ",", "]") - case _ => "" - } + // def typeParamsString(tp: Type) = tp match { + // case PolyType(tparams, _) => tparams map (_.defString) mkString ("[", ",", "]") + // case _ => "" + // } def valueParamsString(tp: Type) = tp match { case MethodType(params, _) => params map (_.defString) mkString ("(", ",", ")") case _ => "" @@ -948,12 +947,12 @@ trait Definitions extends api.StandardDefinitions { lazy val BoxedNumberClass = getClassByName(sn.BoxedNumber) lazy val BoxedCharacterClass = getClassByName(sn.BoxedCharacter) lazy val BoxedBooleanClass = getClassByName(sn.BoxedBoolean) - lazy val BoxedByteClass = requiredClass[java.lang.Byte] - lazy val BoxedShortClass = requiredClass[java.lang.Short] - lazy val BoxedIntClass = requiredClass[java.lang.Integer] - lazy val BoxedLongClass = requiredClass[java.lang.Long] - lazy val BoxedFloatClass = requiredClass[java.lang.Float] - lazy val BoxedDoubleClass = requiredClass[java.lang.Double] + // lazy val BoxedByteClass = requiredClass[java.lang.Byte] + // lazy val BoxedShortClass = requiredClass[java.lang.Short] + // lazy val BoxedIntClass = requiredClass[java.lang.Integer] + // lazy val BoxedLongClass = requiredClass[java.lang.Long] + // lazy val BoxedFloatClass = requiredClass[java.lang.Float] + // lazy val BoxedDoubleClass = requiredClass[java.lang.Double] lazy val Boxes_isNumberOrBool = getDecl(BoxesRunTimeClass, nme.isBoxedNumberOrBoolean) lazy val Boxes_isNumber = getDecl(BoxesRunTimeClass, nme.isBoxedNumber) @@ -974,7 +973,7 @@ trait Definitions extends api.StandardDefinitions { lazy val ImplicitNotFoundClass = requiredClass[scala.annotation.implicitNotFound] lazy val MigrationAnnotationClass = requiredClass[scala.annotation.migration] lazy val ScalaStrictFPAttr = requiredClass[scala.annotation.strictfp] - lazy val SerializableAttr = requiredClass[scala.annotation.serializable] // @serializable is deprecated + // lazy val SerializableAttr = requiredClass[scala.annotation.serializable] // @serializable is deprecated lazy val SwitchClass = requiredClass[scala.annotation.switch] lazy val TailrecClass = requiredClass[scala.annotation.tailrec] lazy val VarargsClass = requiredClass[scala.annotation.varargs] @@ -1009,7 +1008,7 @@ trait Definitions extends api.StandardDefinitions { lazy val ParamTargetClass = requiredClass[meta.param] lazy val SetterTargetClass = requiredClass[meta.setter] lazy val ClassTargetClass = requiredClass[meta.companionClass] - lazy val ObjectTargetClass = requiredClass[meta.companionObject] + // lazy val ObjectTargetClass = requiredClass[meta.companionObject] lazy val MethodTargetClass = requiredClass[meta.companionMethod] // TODO: module, moduleClass? package, packageObject? lazy val LanguageFeatureAnnot = requiredClass[meta.languageFeature] @@ -1054,7 +1053,7 @@ trait Definitions extends api.StandardDefinitions { def getLanguageFeature(name: String, owner: Symbol = languageFeatureModule): Symbol = getMember(owner, newTypeName(name)) def termMember(owner: Symbol, name: String): Symbol = owner.info.member(newTermName(name)) - def typeMember(owner: Symbol, name: String): Symbol = owner.info.member(newTypeName(name)) + // def typeMember(owner: Symbol, name: String): Symbol = owner.info.member(newTypeName(name)) def findNamedMember(fullName: Name, root: Symbol): Symbol = { val segs = nme.segments(fullName.toString, fullName.isTermName) @@ -1121,8 +1120,8 @@ trait Definitions extends api.StandardDefinitions { def getDeclIfDefined(owner: Symbol, name: Name): Symbol = owner.info.nonPrivateDecl(name) - def packageExists(packageName: String): Boolean = - getModuleIfDefined(packageName).isPackage + // def packageExists(packageName: String): Boolean = + // getModuleIfDefined(packageName).isPackage private def newAlias(owner: Symbol, name: TypeName, alias: Type): AliasTypeSymbol = owner.newAliasType(name) setInfoAndEnter alias @@ -1155,7 +1154,7 @@ trait Definitions extends api.StandardDefinitions { newPolyMethod(1, owner, name, flags)(tparams => (Some(Nil), createFn(tparams.head))) } - lazy val boxedClassValues = boxedClass.values.toSet[Symbol] + // lazy val boxedClassValues = boxedClass.values.toSet[Symbol] lazy val isUnbox = unboxMethod.values.toSet[Symbol] lazy val isBox = boxMethod.values.toSet[Symbol] @@ -1215,8 +1214,8 @@ trait Definitions extends api.StandardDefinitions { /** Is symbol a value class? */ def isPrimitiveValueClass(sym: Symbol) = ScalaValueClasses contains sym - def isNonUnitValueClass(sym: Symbol) = isPrimitiveValueClass(sym) && (sym != UnitClass) - def isSpecializableClass(sym: Symbol) = isPrimitiveValueClass(sym) || (sym == AnyRefClass) + // def isNonUnitValueClass(sym: Symbol) = isPrimitiveValueClass(sym) && (sym != UnitClass) + // def isSpecializableClass(sym: Symbol) = isPrimitiveValueClass(sym) || (sym == AnyRefClass) def isPrimitiveValueType(tp: Type) = isPrimitiveValueClass(tp.typeSymbol) /** Is symbol a boxed value class, e.g. java.lang.Integer? */ diff --git a/src/reflect/scala/reflect/internal/ExistentialsAndSkolems.scala b/src/reflect/scala/reflect/internal/ExistentialsAndSkolems.scala index 59c027868e..2a0fe9d19a 100644 --- a/src/reflect/scala/reflect/internal/ExistentialsAndSkolems.scala +++ b/src/reflect/scala/reflect/internal/ExistentialsAndSkolems.scala @@ -7,7 +7,6 @@ package scala.reflect package internal import scala.collection.{ mutable, immutable } -import util._ /** The name of this trait defines the eventual intent better than * it does the initial contents. diff --git a/src/reflect/scala/reflect/internal/Importers.scala b/src/reflect/scala/reflect/internal/Importers.scala index ea8d6078ff..592523c639 100644 --- a/src/reflect/scala/reflect/internal/Importers.scala +++ b/src/reflect/scala/reflect/internal/Importers.scala @@ -316,7 +316,7 @@ trait Importers extends api.Importers { self: SymbolTable => def importName(name: from.Name): Name = if (name.isTypeName) newTypeName(name.toString) else newTermName(name.toString) def importTypeName(name: from.TypeName): TypeName = importName(name).toTypeName - def importTermName(name: from.TermName): TermName = importName(name).toTermName + // def importTermName(name: from.TermName): TermName = importName(name).toTermName def importModifiers(mods: from.Modifiers): Modifiers = new Modifiers(mods.flags, importName(mods.privateWithin), mods.annotations map importTree) diff --git a/src/reflect/scala/reflect/internal/Names.scala b/src/reflect/scala/reflect/internal/Names.scala index 9193674f3e..7d98823c7e 100644 --- a/src/reflect/scala/reflect/internal/Names.scala +++ b/src/reflect/scala/reflect/internal/Names.scala @@ -135,8 +135,8 @@ trait Names extends api.Names with LowPriorityNames { def newTypeName(bs: Array[Byte], offset: Int, len: Int): TypeName = newTermName(bs, offset, len).toTypeName - def nameChars: Array[Char] = chrs - @deprecated("", "2.9.0") def view(s: String): TermName = newTermName(s) + // def nameChars: Array[Char] = chrs + // @deprecated("", "2.9.0") def view(s: String): TermName = newTermName(s) // Classes ---------------------------------------------------------------------- @@ -186,22 +186,22 @@ trait Names extends api.Names with LowPriorityNames { scala.compat.Platform.arraycopy(chrs, index, cs, offset, len) /** @return the ascii representation of this name */ - final def toChars: Array[Char] = { - val cs = new Array[Char](len) - copyChars(cs, 0) - cs - } + // final def toChars: Array[Char] = { + // val cs = new Array[Char](len) + // copyChars(cs, 0) + // cs + // } /** Write to UTF8 representation of this name to given character array. * Start copying to index `to`. Return index of next free byte in array. * Array must have enough remaining space for all bytes * (i.e. maximally 3*length bytes). */ - final def copyUTF8(bs: Array[Byte], offset: Int): Int = { - val bytes = Codec.toUTF8(chrs, index, len) - scala.compat.Platform.arraycopy(bytes, 0, bs, offset, bytes.length) - offset + bytes.length - } + // final def copyUTF8(bs: Array[Byte], offset: Int): Int = { + // val bytes = Codec.toUTF8(chrs, index, len) + // scala.compat.Platform.arraycopy(bytes, 0, bs, offset, bytes.length) + // offset + bytes.length + // } /** @return the hash value of this name */ final override def hashCode(): Int = index @@ -283,7 +283,7 @@ trait Names extends api.Names with LowPriorityNames { */ final def lastPos(c: Char): Int = lastPos(c, len - 1) - final def lastPos(s: String): Int = lastPos(s, len - s.length) + // final def lastPos(s: String): Int = lastPos(s, len - s.length) /** Returns the index of the last occurrence of char c in this * name from start, -1 if not found. @@ -305,18 +305,18 @@ trait Names extends api.Names with LowPriorityNames { * @param start ... * @return the index of the last occurrence of s */ - final def lastPos(s: String, start: Int): Int = { - var i = lastPos(s.charAt(0), start) - while (i >= 0) { - var j = 1; - while (s.charAt(j) == chrs(index + i + j)) { - j += 1 - if (j == s.length()) return i; - } - i = lastPos(s.charAt(0), i - 1) - } - -s.length() - } + // final def lastPos(s: String, start: Int): Int = { + // var i = lastPos(s.charAt(0), start) + // while (i >= 0) { + // var j = 1; + // while (s.charAt(j) == chrs(index + i + j)) { + // j += 1 + // if (j == s.length()) return i; + // } + // i = lastPos(s.charAt(0), i - 1) + // } + // -s.length() + // } /** Does this name start with prefix? */ final def startsWith(prefix: Name): Boolean = startsWith(prefix, 0) @@ -379,7 +379,7 @@ trait Names extends api.Names with LowPriorityNames { if (idx == length) -1 else idx } def lastIndexOf(ch: Char) = lastPos(ch) - def lastIndexOf(ch: Char, fromIndex: Int) = lastPos(ch, fromIndex) + // def lastIndexOf(ch: Char, fromIndex: Int) = lastPos(ch, fromIndex) /** Replace all occurrences of `from` by `to` in * name; result is always a term name. @@ -428,12 +428,12 @@ trait Names extends api.Names with LowPriorityNames { def append(ch: Char) = newName("" + this + ch) def append(suffix: String) = newName("" + this + suffix) def append(suffix: Name) = newName("" + this + suffix) - def prepend(ch: Char) = newName("" + ch + this) + // def prepend(ch: Char) = newName("" + ch + this) def prepend(prefix: String) = newName("" + prefix + this) - def prepend(prefix: Name) = newName("" + prefix + this) + // def prepend(prefix: Name) = newName("" + prefix + this) def decodedName: ThisNameType = newName(decode) - def isOperatorName: Boolean = decode != toString + // def isOperatorName: Boolean = decode != toString def longString: String = nameKind + " " + decode def debugString = { val s = decode ; if (isTypeName) s + "!" else s } } @@ -445,7 +445,7 @@ trait Names extends api.Names with LowPriorityNames { def stripSuffix(suffix: Name): T = if (name endsWith suffix) dropRight(suffix.length) else name def dropRight(n: Int): T = name.subName(0, name.length - n).asInstanceOf[T] def drop(n: Int): T = name.subName(n, name.length).asInstanceOf[T] - def nonEmpty: Boolean = name.length > 0 + // def nonEmpty: Boolean = name.length > 0 } implicit val NameTag = ClassTag[Name](classOf[Name]) @@ -489,7 +489,7 @@ trait Names extends api.Names with LowPriorityNames { type ThisNameType = TermName protected[this] def thisName: TermName = this - var next: TermName = termHashtable(hash) + val next: TermName = termHashtable(hash) termHashtable(hash) = this def isTermName: Boolean = true def isTypeName: Boolean = false @@ -518,7 +518,7 @@ trait Names extends api.Names with LowPriorityNames { type ThisNameType = TypeName protected[this] def thisName: TypeName = this - var next: TypeName = typeHashtable(hash) + val next: TypeName = typeHashtable(hash) typeHashtable(hash) = this def isTermName: Boolean = false def isTypeName: Boolean = true diff --git a/src/reflect/scala/reflect/internal/Printers.scala b/src/reflect/scala/reflect/internal/Printers.scala index 31f9cb7e59..58e3aff46f 100644 --- a/src/reflect/scala/reflect/internal/Printers.scala +++ b/src/reflect/scala/reflect/internal/Printers.scala @@ -475,8 +475,8 @@ trait Printers extends api.Printers { self: SymbolTable => } def newRawTreePrinter(writer: PrintWriter): RawTreePrinter = new RawTreePrinter(writer) - def newRawTreePrinter(stream: OutputStream): RawTreePrinter = newRawTreePrinter(new PrintWriter(stream)) - def newRawTreePrinter(): RawTreePrinter = newRawTreePrinter(new PrintWriter(ConsoleWriter)) + // def newRawTreePrinter(stream: OutputStream): RawTreePrinter = newRawTreePrinter(new PrintWriter(stream)) + // def newRawTreePrinter(): RawTreePrinter = newRawTreePrinter(new PrintWriter(ConsoleWriter)) // provides footnotes for types and mirrors import scala.collection.mutable.{Map, WeakHashMap, SortedSet} diff --git a/src/reflect/scala/reflect/internal/Scopes.scala b/src/reflect/scala/reflect/internal/Scopes.scala index 950e30dbc5..31907772cb 100644 --- a/src/reflect/scala/reflect/internal/Scopes.scala +++ b/src/reflect/scala/reflect/internal/Scopes.scala @@ -343,7 +343,7 @@ trait Scopes extends api.Scopes { self: SymbolTable => */ def iterator: Iterator[Symbol] = toList.iterator - def containsSymbol(s: Symbol) = lookupAll(s.name) contains s + // def containsSymbol(s: Symbol) = lookupAll(s.name) contains s override def foreach[U](p: Symbol => U): Unit = toList foreach p diff --git a/src/reflect/scala/reflect/internal/StdNames.scala b/src/reflect/scala/reflect/internal/StdNames.scala index 5e7f5777b2..7947a1a1e1 100644 --- a/src/reflect/scala/reflect/internal/StdNames.scala +++ b/src/reflect/scala/reflect/internal/StdNames.scala @@ -104,7 +104,7 @@ trait StdNames { val IMPORT: NameType = "" val MODULE_SUFFIX_NAME: NameType = MODULE_SUFFIX_STRING val MODULE_VAR_SUFFIX: NameType = "$module" - val NAME_JOIN_NAME: NameType = NAME_JOIN_STRING + // val NAME_JOIN_NAME: NameType = NAME_JOIN_STRING val PACKAGE: NameType = "package" val ROOT: NameType = "" val SPECIALIZED_SUFFIX: NameType = "$sp" @@ -121,8 +121,8 @@ trait StdNames { final val Short: NameType = "Short" final val Unit: NameType = "Unit" - final val ScalaValueNames: scala.List[NameType] = - scala.List(Byte, Char, Short, Int, Long, Float, Double, Boolean, Unit) + // final val ScalaValueNames: scala.List[NameType] = + // scala.List(Byte, Char, Short, Int, Long, Float, Double, Boolean, Unit) // some types whose companions we utilize final val AnyRef: NameType = "AnyRef" @@ -130,7 +130,7 @@ trait StdNames { final val List: NameType = "List" final val Seq: NameType = "Seq" final val Symbol: NameType = "Symbol" - final val ClassTag: NameType = "ClassTag" + // final val ClassTag: NameType = "ClassTag" final val WeakTypeTag: NameType = "WeakTypeTag" final val TypeTag : NameType = "TypeTag" final val Expr: NameType = "Expr" @@ -220,12 +220,12 @@ trait StdNames { final val Any: NameType = "Any" final val AnyVal: NameType = "AnyVal" - final val ExprApi: NameType = "ExprApi" + // final val ExprApi: NameType = "ExprApi" final val Mirror: NameType = "Mirror" final val Nothing: NameType = "Nothing" final val Null: NameType = "Null" final val Object: NameType = "Object" - final val PartialFunction: NameType = "PartialFunction" + // final val PartialFunction: NameType = "PartialFunction" final val PrefixType: NameType = "PrefixType" final val Product: NameType = "Product" final val Serializable: NameType = "Serializable" @@ -239,7 +239,7 @@ trait StdNames { final val Group: NameType = "Group" final val Tree: NameType = "Tree" final val Type : NameType = "Type" - final val TypeTree: NameType = "TypeTree" + // final val TypeTree: NameType = "TypeTree" // Annotation simple names, used in Namer final val BeanPropertyAnnot: NameType = "BeanProperty" @@ -249,13 +249,13 @@ trait StdNames { // Classfile Attributes final val AnnotationDefaultATTR: NameType = "AnnotationDefault" final val BridgeATTR: NameType = "Bridge" - final val ClassfileAnnotationATTR: NameType = "RuntimeInvisibleAnnotations" // RetentionPolicy.CLASS. Currently not used (Apr 2009). + // final val ClassfileAnnotationATTR: NameType = "RuntimeInvisibleAnnotations" // RetentionPolicy.CLASS. Currently not used (Apr 2009). final val CodeATTR: NameType = "Code" final val ConstantValueATTR: NameType = "ConstantValue" final val DeprecatedATTR: NameType = "Deprecated" final val ExceptionsATTR: NameType = "Exceptions" final val InnerClassesATTR: NameType = "InnerClasses" - final val LineNumberTableATTR: NameType = "LineNumberTable" + // final val LineNumberTableATTR: NameType = "LineNumberTable" final val LocalVariableTableATTR: NameType = "LocalVariableTable" final val RuntimeAnnotationATTR: NameType = "RuntimeVisibleAnnotations" // RetentionPolicy.RUNTIME final val RuntimeParamAnnotationATTR: NameType = "RuntimeVisibleParameterAnnotations" // RetentionPolicy.RUNTIME (annotations on parameters) @@ -284,9 +284,9 @@ trait StdNames { val EXCEPTION_RESULT_PREFIX = "exceptionResult" val EXPAND_SEPARATOR_STRING = "$$" val INTERPRETER_IMPORT_WRAPPER = "$iw" - val INTERPRETER_LINE_PREFIX = "line" - val INTERPRETER_VAR_PREFIX = "res" - val INTERPRETER_WRAPPER_SUFFIX = "$object" + // val INTERPRETER_LINE_PREFIX = "line" + // val INTERPRETER_VAR_PREFIX = "res" + // val INTERPRETER_WRAPPER_SUFFIX = "$object" val LOCALDUMMY_PREFIX = " true - case _ => false - } + // def isDeprecatedIdentifierName(name: Name) = name.toTermName match { + // case nme.`then` | nme.`macro` => true + // case _ => false + // } def isOpAssignmentName(name: Name) = name match { case raw.NE | raw.LE | raw.GE | EMPTY => false @@ -461,14 +461,14 @@ trait StdNames { // If the name ends with $nn where nn are // all digits, strip the $ and the digits. // Otherwise return the argument. - def stripAnonNumberSuffix(name: Name): Name = { - var pos = name.length - while (pos > 0 && name.charAt(pos - 1).isDigit) - pos -= 1 + // def stripAnonNumberSuffix(name: Name): Name = { + // var pos = name.length + // while (pos > 0 && name.charAt(pos - 1).isDigit) + // pos -= 1 - if (pos <= 0 || pos == name.length || name.charAt(pos - 1) != '$') name - else name.subName(0, pos - 1) - } + // if (pos <= 0 || pos == name.length || name.charAt(pos - 1) != '$') name + // else name.subName(0, pos - 1) + // } def stripModuleSuffix(name: Name): Name = ( if (isModuleName(name)) name dropRight MODULE_SUFFIX_STRING.length else name @@ -484,8 +484,8 @@ trait StdNames { final val Nil: NameType = "Nil" final val Predef: NameType = "Predef" - final val ScalaRunTime: NameType = "ScalaRunTime" - final val Some: NameType = "Some" + // final val ScalaRunTime: NameType = "ScalaRunTime" + // final val Some: NameType = "Some" val _1 : NameType = "_1" val _2 : NameType = "_2" @@ -581,14 +581,14 @@ trait StdNames { val Annotation: NameType = "Annotation" val Any: NameType = "Any" val AnyVal: NameType = "AnyVal" - val AppliedTypeTree: NameType = "AppliedTypeTree" - val Apply: NameType = "Apply" + // val AppliedTypeTree: NameType = "AppliedTypeTree" + // val Apply: NameType = "Apply" val ArrayAnnotArg: NameType = "ArrayAnnotArg" - val Constant: NameType = "Constant" + // val Constant: NameType = "Constant" val ConstantType: NameType = "ConstantType" val EmptyPackage: NameType = "EmptyPackage" val EmptyPackageClass: NameType = "EmptyPackageClass" - val ExistentialTypeTree: NameType = "ExistentialTypeTree" + // val ExistentialTypeTree: NameType = "ExistentialTypeTree" val Flag : NameType = "Flag" val Ident: NameType = "Ident" val Import: NameType = "Import" @@ -597,10 +597,10 @@ trait StdNames { val Modifiers: NameType = "Modifiers" val NestedAnnotArg: NameType = "NestedAnnotArg" val NoFlags: NameType = "NoFlags" - val NoPrefix: NameType = "NoPrefix" + // val NoPrefix: NameType = "NoPrefix" val NoSymbol: NameType = "NoSymbol" val Nothing: NameType = "Nothing" - val NoType: NameType = "NoType" + // val NoType: NameType = "NoType" val Null: NameType = "Null" val Object: NameType = "Object" val RootPackage: NameType = "RootPackage" @@ -609,17 +609,17 @@ trait StdNames { val StringContext: NameType = "StringContext" val This: NameType = "This" val ThisType: NameType = "ThisType" - val Tree : NameType = "Tree" + // val Tree : NameType = "Tree" val Tuple2: NameType = "Tuple2" val TYPE_ : NameType = "TYPE" - val TypeApply: NameType = "TypeApply" + // val TypeApply: NameType = "TypeApply" val TypeRef: NameType = "TypeRef" val TypeTree: NameType = "TypeTree" val UNIT : NameType = "UNIT" val add_ : NameType = "add" val annotation: NameType = "annotation" val anyValClass: NameType = "anyValClass" - val append: NameType = "append" + // val append: NameType = "append" val apply: NameType = "apply" val applyDynamic: NameType = "applyDynamic" val applyDynamicNamed: NameType = "applyDynamicNamed" @@ -627,34 +627,34 @@ trait StdNames { val args : NameType = "args" val argv : NameType = "argv" val arrayClass: NameType = "arrayClass" - val arrayElementClass: NameType = "arrayElementClass" - val arrayValue: NameType = "arrayValue" + // val arrayElementClass: NameType = "arrayElementClass" + // val arrayValue: NameType = "arrayValue" val array_apply : NameType = "array_apply" val array_clone : NameType = "array_clone" val array_length : NameType = "array_length" val array_update : NameType = "array_update" - val arraycopy: NameType = "arraycopy" - val asTerm: NameType = "asTerm" + // val arraycopy: NameType = "arraycopy" + // val asTerm: NameType = "asTerm" val asModule: NameType = "asModule" - val asMethod: NameType = "asMethod" + // val asMethod: NameType = "asMethod" val asType: NameType = "asType" - val asClass: NameType = "asClass" + // val asClass: NameType = "asClass" val asInstanceOf_ : NameType = "asInstanceOf" val asInstanceOf_Ob : NameType = "$asInstanceOf" - val assert_ : NameType = "assert" - val assume_ : NameType = "assume" + // val assert_ : NameType = "assert" + // val assume_ : NameType = "assume" val box: NameType = "box" val build : NameType = "build" val bytes: NameType = "bytes" val canEqual_ : NameType = "canEqual" val checkInitialized: NameType = "checkInitialized" - val ClassManifestFactory: NameType = "ClassManifestFactory" + // val ClassManifestFactory: NameType = "ClassManifestFactory" val classOf: NameType = "classOf" val clone_ : NameType = if (forMSIL) "MemberwiseClone" else "clone" // sn.OClone causes checkinit failure val conforms: NameType = "conforms" val copy: NameType = "copy" val currentMirror: NameType = "currentMirror" - val definitions: NameType = "definitions" + // val definitions: NameType = "definitions" val delayedInit: NameType = "delayedInit" val delayedInitArg: NameType = "delayedInit$body" val drop: NameType = "drop" @@ -667,7 +667,7 @@ trait StdNames { val equalsNumObject : NameType = "equalsNumObject" val equals_ : NameType = if (forMSIL) "Equals" else "equals" val error: NameType = "error" - val eval: NameType = "eval" + // val eval: NameType = "eval" val ex: NameType = "ex" val experimental: NameType = "experimental" val f: NameType = "f" @@ -678,17 +678,17 @@ trait StdNames { val flagsFromBits : NameType = "flagsFromBits" val flatMap: NameType = "flatMap" val foreach: NameType = "foreach" - val genericArrayOps: NameType = "genericArrayOps" + // val genericArrayOps: NameType = "genericArrayOps" val get: NameType = "get" - val getOrElse: NameType = "getOrElse" - val hasNext: NameType = "hasNext" + // val getOrElse: NameType = "getOrElse" + // val hasNext: NameType = "hasNext" val hashCode_ : NameType = if (forMSIL) "GetHashCode" else "hashCode" val hash_ : NameType = "hash" - val head: NameType = "head" - val identity: NameType = "identity" + // val head: NameType = "head" + // val identity: NameType = "identity" val implicitly: NameType = "implicitly" val in: NameType = "in" - val info: NameType = "info" + // val info: NameType = "info" val inlinedEquals: NameType = "inlinedEquals" val isArray: NameType = "isArray" val isDefinedAt: NameType = "isDefinedAt" @@ -700,57 +700,57 @@ trait StdNames { val lang: NameType = "lang" val length: NameType = "length" val lengthCompare: NameType = "lengthCompare" - val liftedTree: NameType = "liftedTree" - val `macro` : NameType = "macro" - val macroThis : NameType = "_this" + // val liftedTree: NameType = "liftedTree" + // val `macro` : NameType = "macro" + // val macroThis : NameType = "_this" val macroContext : NameType = "c" val main: NameType = "main" - val manifest: NameType = "manifest" - val ManifestFactory: NameType = "ManifestFactory" + // val manifest: NameType = "manifest" + // val ManifestFactory: NameType = "ManifestFactory" val manifestToTypeTag: NameType = "manifestToTypeTag" val map: NameType = "map" val materializeClassTag: NameType = "materializeClassTag" val materializeWeakTypeTag: NameType = "materializeWeakTypeTag" val materializeTypeTag: NameType = "materializeTypeTag" - val mirror : NameType = "mirror" + // val mirror : NameType = "mirror" val moduleClass : NameType = "moduleClass" - val name: NameType = "name" + // val name: NameType = "name" val ne: NameType = "ne" val newArray: NameType = "newArray" val newFreeTerm: NameType = "newFreeTerm" val newFreeType: NameType = "newFreeType" val newNestedSymbol: NameType = "newNestedSymbol" val newScopeWith: NameType = "newScopeWith" - val next: NameType = "next" + // val next: NameType = "next" val nmeNewTermName: NameType = "newTermName" val nmeNewTypeName: NameType = "newTypeName" - val normalize: NameType = "normalize" + // val normalize: NameType = "normalize" val notifyAll_ : NameType = "notifyAll" val notify_ : NameType = "notify" val null_ : NameType = "null" - val ofDim: NameType = "ofDim" - val origin: NameType = "origin" + // val ofDim: NameType = "ofDim" + // val origin: NameType = "origin" val prefix : NameType = "prefix" val productArity: NameType = "productArity" val productElement: NameType = "productElement" val productIterator: NameType = "productIterator" val productPrefix: NameType = "productPrefix" val readResolve: NameType = "readResolve" - val reflect : NameType = "reflect" + // val reflect : NameType = "reflect" val reify : NameType = "reify" val rootMirror : NameType = "rootMirror" - val runOrElse: NameType = "runOrElse" + // val runOrElse: NameType = "runOrElse" val runtime: NameType = "runtime" val runtimeClass: NameType = "runtimeClass" val runtimeMirror: NameType = "runtimeMirror" - val sameElements: NameType = "sameElements" + // val sameElements: NameType = "sameElements" val scala_ : NameType = "scala" val selectDynamic: NameType = "selectDynamic" val selectOverloadedMethod: NameType = "selectOverloadedMethod" val selectTerm: NameType = "selectTerm" val selectType: NameType = "selectType" val self: NameType = "self" - val setAccessible: NameType = "setAccessible" + // val setAccessible: NameType = "setAccessible" val setAnnotations: NameType = "setAnnotations" val setSymbol: NameType = "setSymbol" val setType: NameType = "setType" @@ -760,15 +760,15 @@ trait StdNames { val staticModule : NameType = "staticModule" val staticPackage : NameType = "staticPackage" val synchronized_ : NameType = "synchronized" - val tail: NameType = "tail" - val `then` : NameType = "then" + // val tail: NameType = "tail" + // val `then` : NameType = "then" val this_ : NameType = "this" val thisPrefix : NameType = "thisPrefix" - val throw_ : NameType = "throw" + // val throw_ : NameType = "throw" val toArray: NameType = "toArray" - val toList: NameType = "toList" + // val toList: NameType = "toList" val toObjectArray : NameType = "toObjectArray" - val toSeq: NameType = "toSeq" + // val toSeq: NameType = "toSeq" val toString_ : NameType = if (forMSIL) "ToString" else "toString" val toTypeConstructor: NameType = "toTypeConstructor" val tpe : NameType = "tpe" @@ -788,14 +788,14 @@ trait StdNames { val view_ : NameType = "view" val wait_ : NameType = "wait" val withFilter: NameType = "withFilter" - val wrap: NameType = "wrap" - val zip: NameType = "zip" + // val wrap: NameType = "wrap" + // val zip: NameType = "zip" - val synthSwitch: NameType = "$synthSwitch" + // val synthSwitch: NameType = "$synthSwitch" // unencoded operators object raw { - final val AMP : NameType = "&" + // final val AMP : NameType = "&" final val BANG : NameType = "!" final val BAR : NameType = "|" final val DOLLAR: NameType = "$" @@ -804,7 +804,7 @@ trait StdNames { final val MINUS: NameType = "-" final val NE: NameType = "!=" final val PLUS : NameType = "+" - final val SLASH: NameType = "/" + // final val SLASH: NameType = "/" final val STAR : NameType = "*" final val TILDE: NameType = "~" @@ -860,14 +860,14 @@ trait StdNames { // Grouped here so Cleanup knows what tests to perform. val CommonOpNames = Set[Name](OR, XOR, AND, EQ, NE) - val ConversionNames = Set[Name](toByte, toChar, toDouble, toFloat, toInt, toLong, toShort) + // val ConversionNames = Set[Name](toByte, toChar, toDouble, toFloat, toInt, toLong, toShort) val BooleanOpNames = Set[Name](ZOR, ZAND, UNARY_!) ++ CommonOpNames - val NumberOpNames = ( - Set[Name](ADD, SUB, MUL, DIV, MOD, LSL, LSR, ASR, LT, LE, GE, GT) - ++ Set(UNARY_+, UNARY_-, UNARY_!) - ++ ConversionNames - ++ CommonOpNames - ) + // val NumberOpNames = ( + // Set[Name](ADD, SUB, MUL, DIV, MOD, LSL, LSR, ASR, LT, LE, GE, GT) + // ++ Set(UNARY_+, UNARY_-, UNARY_!) + // ++ ConversionNames + // ++ CommonOpNames + // ) val add: NameType = "add" val complement: NameType = "complement" @@ -999,7 +999,7 @@ trait StdNames { object fulltpnme extends TypeNames { val RuntimeNothing: NameType = "scala.runtime.Nothing$" val RuntimeNull: NameType = "scala.runtime.Null$" - val JavaLangEnum: NameType = "java.lang.Enum" + // val JavaLangEnum: NameType = "java.lang.Enum" } /** Java binary names, like scala/runtime/Nothing$. @@ -1015,15 +1015,15 @@ trait StdNames { object nme extends TermNames { - def isModuleVarName(name: Name): Boolean = - stripAnonNumberSuffix(name) endsWith MODULE_VAR_SUFFIX + // def isModuleVarName(name: Name): Boolean = + // stripAnonNumberSuffix(name) endsWith MODULE_VAR_SUFFIX def moduleVarName(name: TermName): TermName = newTermNameCached("" + name + MODULE_VAR_SUFFIX) def getCause = sn.GetCause def getClass_ = sn.GetClass - def getComponentType = sn.GetComponentType + // def getComponentType = sn.GetComponentType def getMethod_ = sn.GetMethod def invoke_ = sn.Invoke @@ -1036,14 +1036,14 @@ trait StdNames { val reflMethodCacheName: NameType = "reflMethod$Cache" val reflMethodName: NameType = "reflMethod$Method" - private val reflectionCacheNames = Set[NameType]( - reflPolyCacheName, - reflClassCacheName, - reflParamsCacheName, - reflMethodCacheName, - reflMethodName - ) - def isReflectionCacheName(name: Name) = reflectionCacheNames exists (name startsWith _) + // private val reflectionCacheNames = Set[NameType]( + // reflPolyCacheName, + // reflClassCacheName, + // reflParamsCacheName, + // reflMethodCacheName, + // reflMethodName + // ) + // def isReflectionCacheName(name: Name) = reflectionCacheNames exists (name startsWith _) @deprecated("Use a method in tpnme", "2.10.0") def dropSingletonName(name: Name): TypeName = tpnme.dropSingletonName(name) @deprecated("Use a method in tpnme", "2.10.0") def singletonName(name: Name): TypeName = tpnme.singletonName(name) @@ -1056,28 +1056,28 @@ trait StdNames { protected val stringToTypeName = null protected implicit def createNameType(s: String): TypeName = newTypeNameCached(s) - val BeanProperty : TypeName - val BooleanBeanProperty : TypeName + // val BeanProperty : TypeName + // val BooleanBeanProperty : TypeName val BoxedBoolean : TypeName val BoxedCharacter : TypeName val BoxedNumber : TypeName - val Class : TypeName + // val Class : TypeName val Delegate : TypeName val IOOBException : TypeName // IndexOutOfBoundsException val InvTargetException : TypeName // InvocationTargetException - val JavaSerializable : TypeName + // val JavaSerializable : TypeName val MethodAsObject : TypeName val NPException : TypeName // NullPointerException val Object : TypeName - val String : TypeName + // val String : TypeName val Throwable : TypeName val ValueType : TypeName - val ForName : TermName + // val ForName : TermName val GetCause : TermName val GetClass : TermName val GetClassLoader : TermName - val GetComponentType : TermName + // val GetComponentType : TermName val GetMethod : TermName val Invoke : TermName val JavaLang : TermName @@ -1152,22 +1152,22 @@ trait StdNames { final val BoxedLong: TypeName = "java.lang.Long" final val BoxedNumber: TypeName = "java.lang.Number" final val BoxedShort: TypeName = "java.lang.Short" - final val Class: TypeName = "java.lang.Class" + // final val Class: TypeName = "java.lang.Class" final val Delegate: TypeName = tpnme.NO_NAME final val IOOBException: TypeName = "java.lang.IndexOutOfBoundsException" final val InvTargetException: TypeName = "java.lang.reflect.InvocationTargetException" final val MethodAsObject: TypeName = "java.lang.reflect.Method" final val NPException: TypeName = "java.lang.NullPointerException" final val Object: TypeName = "java.lang.Object" - final val String: TypeName = "java.lang.String" + // final val String: TypeName = "java.lang.String" final val Throwable: TypeName = "java.lang.Throwable" final val ValueType: TypeName = tpnme.NO_NAME - final val ForName: TermName = newTermName("forName") + // final val ForName: TermName = newTermName("forName") final val GetCause: TermName = newTermName("getCause") final val GetClass: TermName = newTermName("getClass") final val GetClassLoader: TermName = newTermName("getClassLoader") - final val GetComponentType: TermName = newTermName("getComponentType") + // final val GetComponentType: TermName = newTermName("getComponentType") final val GetMethod: TermName = newTermName("getMethod") final val Invoke: TermName = newTermName("invoke") final val JavaLang: TermName = newTermName("java.lang") @@ -1185,28 +1185,28 @@ trait StdNames { } private class MSILNames extends SymbolNames { - final val BeanProperty: TypeName = tpnme.NO_NAME - final val BooleanBeanProperty: TypeName = tpnme.NO_NAME + // final val BeanProperty: TypeName = tpnme.NO_NAME + // final val BooleanBeanProperty: TypeName = tpnme.NO_NAME final val BoxedBoolean: TypeName = "System.IConvertible" final val BoxedCharacter: TypeName = "System.IConvertible" final val BoxedNumber: TypeName = "System.IConvertible" - final val Class: TypeName = "System.Type" + // final val Class: TypeName = "System.Type" final val Delegate: TypeName = "System.MulticastDelegate" final val IOOBException: TypeName = "System.IndexOutOfRangeException" final val InvTargetException: TypeName = "System.Reflection.TargetInvocationException" - final val JavaSerializable: TypeName = tpnme.NO_NAME + // final val JavaSerializable: TypeName = tpnme.NO_NAME final val MethodAsObject: TypeName = "System.Reflection.MethodInfo" final val NPException: TypeName = "System.NullReferenceException" final val Object: TypeName = "System.Object" - final val String: TypeName = "System.String" + // final val String: TypeName = "System.String" final val Throwable: TypeName = "System.Exception" final val ValueType: TypeName = "System.ValueType" - final val ForName: TermName = newTermName("GetType") + // final val ForName: TermName = newTermName("GetType") final val GetCause: TermName = newTermName("InnerException") /* System.Reflection.TargetInvocationException.InnerException */ final val GetClass: TermName = newTermName("GetType") final lazy val GetClassLoader: TermName = throw new UnsupportedOperationException("Scala reflection is not supported on this platform"); - final val GetComponentType: TermName = newTermName("GetElementType") + // final val GetComponentType: TermName = newTermName("GetElementType") final val GetMethod: TermName = newTermName("GetMethod") final val Invoke: TermName = newTermName("Invoke") final val JavaLang: TermName = newTermName("System") @@ -1223,13 +1223,13 @@ trait StdNames { ) } - private class J2SENames extends JavaNames { - final val BeanProperty: TypeName = "scala.beans.BeanProperty" - final val BooleanBeanProperty: TypeName = "scala.beans.BooleanBeanProperty" - final val JavaSerializable: TypeName = "java.io.Serializable" - } + // private class J2SENames extends JavaNames { + // final val BeanProperty: TypeName = "scala.beans.BeanProperty" + // final val BooleanBeanProperty: TypeName = "scala.beans.BooleanBeanProperty" + // final val JavaSerializable: TypeName = "java.io.Serializable" + // } lazy val sn: SymbolNames = if (forMSIL) new MSILNames - else new J2SENames + else new JavaNames { } } diff --git a/src/reflect/scala/reflect/internal/SymbolTable.scala b/src/reflect/scala/reflect/internal/SymbolTable.scala index fb1bf9ed9d..c72fb96611 100644 --- a/src/reflect/scala/reflect/internal/SymbolTable.scala +++ b/src/reflect/scala/reflect/internal/SymbolTable.scala @@ -184,7 +184,7 @@ abstract class SymbolTable extends macros.Universe final def phaseId(period: Period): Phase#Id = period & 0xFF /** The period at the start of run that includes `period`. */ - final def startRun(period: Period): Period = period & 0xFFFFFF00 + // final def startRun(period: Period): Period = period & 0xFFFFFF00 /** The current period. */ final def currentPeriod: Period = { @@ -298,7 +298,6 @@ abstract class SymbolTable extends macros.Universe object perRunCaches { import java.lang.ref.WeakReference - import scala.runtime.ScalaRunTime.stringOf import scala.collection.generic.Clearable // Weak references so the garbage collector will take care of diff --git a/src/reflect/scala/reflect/internal/Symbols.scala b/src/reflect/scala/reflect/internal/Symbols.scala index eec780470e..89c3659b2e 100644 --- a/src/reflect/scala/reflect/internal/Symbols.scala +++ b/src/reflect/scala/reflect/internal/Symbols.scala @@ -19,7 +19,7 @@ trait Symbols extends api.Symbols { self: SymbolTable => protected var ids = 0 - val emptySymbolArray = new Array[Symbol](0) + // val emptySymbolArray = new Array[Symbol](0) protected def nextId() = { ids += 1; ids } @@ -256,8 +256,8 @@ trait Symbols extends api.Symbols { self: SymbolTable => (m, c) } - final def newPackageSymbol(name: TermName, pos: Position = NoPosition, newFlags: Long = 0L): ModuleSymbol = - newTermSymbol(name, pos, newFlags).asInstanceOf[ModuleSymbol] + // final def newPackageSymbol(name: TermName, pos: Position = NoPosition, newFlags: Long = 0L): ModuleSymbol = + // newTermSymbol(name, pos, newFlags).asInstanceOf[ModuleSymbol] final def newModuleClassSymbol(name: TypeName, pos: Position = NoPosition, newFlags: Long = 0L): ModuleClassSymbol = newClassSymbol(name, pos, newFlags).asInstanceOf[ModuleClassSymbol] @@ -323,8 +323,8 @@ trait Symbols extends api.Symbols { self: SymbolTable => /** Synthetic value parameters when parameter symbols are not available */ - final def newSyntheticValueParamss(argtypess: List[List[Type]]): List[List[TermSymbol]] = - argtypess map (xs => newSyntheticValueParams(xs, freshNamer)) + // final def newSyntheticValueParamss(argtypess: List[List[Type]]): List[List[TermSymbol]] = + // argtypess map (xs => newSyntheticValueParams(xs, freshNamer)) /** Synthetic value parameters when parameter symbols are not available. * Calling this method multiple times will re-use the same parameter names. @@ -341,7 +341,7 @@ trait Symbols extends api.Symbols { self: SymbolTable => final def newSyntheticValueParam(argtype: Type, name: TermName = nme.syntheticParamName(1)): TermSymbol = newValueParameter(name, owner.pos.focus, SYNTHETIC) setInfo argtype - def newSyntheticTypeParam(): TypeSymbol = newSyntheticTypeParam("T0", 0L) + // def newSyntheticTypeParam(): TypeSymbol = newSyntheticTypeParam("T0", 0L) def newSyntheticTypeParam(name: String, newFlags: Long): TypeSymbol = newTypeParameter(newTypeName(name), NoPosition, newFlags) setInfo TypeBounds.empty def newSyntheticTypeParams(num: Int): List[TypeSymbol] = (0 until num).toList map (n => newSyntheticTypeParam("T" + n, 0L)) @@ -407,11 +407,11 @@ trait Symbols extends api.Symbols { self: SymbolTable => /** Create a new getter for current symbol (which must be a field) */ - final def newGetter: MethodSymbol = ( - owner.newMethod(nme.getterName(name.toTermName), NoPosition, getterFlags(flags)) - setPrivateWithin privateWithin - setInfo MethodType(Nil, tpe) - ) + // final def newGetter: MethodSymbol = ( + // owner.newMethod(nme.getterName(name.toTermName), NoPosition, getterFlags(flags)) + // setPrivateWithin privateWithin + // setInfo MethodType(Nil, tpe) + // ) final def newErrorSymbol(name: Name): Symbol = name match { case x: TypeName => newErrorClass(x) @@ -528,14 +528,14 @@ trait Symbols extends api.Symbols { self: SymbolTable => */ def isContravariant = false def isCovariant = false - def isExistentialQuantified = false + // def isExistentialQuantified = false def isExistentialSkolem = false def isExistentiallyBound = false def isGADTSkolem = false def isTypeParameter = false def isTypeParameterOrSkolem = false def isTypeSkolem = false - def isTypeMacro = false + // def isTypeMacro = false def isInvariant = !isCovariant && !isContravariant /** Qualities of Terms, always false for TypeSymbols. @@ -719,14 +719,14 @@ trait Symbols extends api.Symbols { self: SymbolTable => = hasAnnotation(DeprecatedInheritanceAttr) def deprecatedInheritanceMessage = getAnnotation(DeprecatedInheritanceAttr) flatMap (_ stringArg 0) - def deprecatedInheritanceVersion - = getAnnotation(DeprecatedInheritanceAttr) flatMap (_ stringArg 1) + // def deprecatedInheritanceVersion + // = getAnnotation(DeprecatedInheritanceAttr) flatMap (_ stringArg 1) def hasDeprecatedOverridingAnnotation = hasAnnotation(DeprecatedOverridingAttr) def deprecatedOverridingMessage = getAnnotation(DeprecatedOverridingAttr) flatMap (_ stringArg 0) - def deprecatedOverridingVersion - = getAnnotation(DeprecatedOverridingAttr) flatMap (_ stringArg 1) + // def deprecatedOverridingVersion + // = getAnnotation(DeprecatedOverridingAttr) flatMap (_ stringArg 1) // !!! when annotation arguments are not literal strings, but any sort of // assembly of strings, there is a fair chance they will turn up here not as @@ -806,7 +806,7 @@ trait Symbols extends api.Symbols { self: SymbolTable => final def isStaticOwner: Boolean = isPackageClass || isModuleClass && isStatic - def isTopLevelModule = hasFlag(MODULE) && owner.isPackageClass + // def isTopLevelModule = hasFlag(MODULE) && owner.isPackageClass /** A helper function for isEffectivelyFinal. */ private def isNotOverridden = ( @@ -844,7 +844,7 @@ trait Symbols extends api.Symbols { self: SymbolTable => */ def isLocalClass = false - def isStableClass = false + // def isStableClass = false /* code for fixing nested objects override final def isModuleClass: Boolean = @@ -869,8 +869,8 @@ trait Symbols extends api.Symbols { self: SymbolTable => final def isPossibleInRefinement = !isConstructor && !isOverridingSymbol /** Is this symbol a member of class `clazz`? */ - def isMemberOf(clazz: Symbol) = - clazz.info.member(name).alternatives contains this + // def isMemberOf(clazz: Symbol) = + // clazz.info.member(name).alternatives contains this /** A a member of class `base` is incomplete if * (1) it is declared deferred or @@ -972,6 +972,14 @@ trait Symbols extends api.Symbols { self: SymbolTable => def ownerChain: List[Symbol] = this :: owner.ownerChain def originalOwnerChain: List[Symbol] = this :: originalOwner.getOrElse(this, rawowner).originalOwnerChain + // All the symbols overridden by this symbol and this symbol at the head, + // or Nil if this is NoSymbol. + def overrideChain = ( + if (this eq NoSymbol) Nil + else if (!owner.isClass) this :: Nil + else this :: allOverriddenSymbols + ) + // Non-classes skip self and return rest of owner chain; overridden in ClassSymbol. def enclClassChain: List[Symbol] = owner.enclClassChain @@ -1081,8 +1089,8 @@ trait Symbols extends api.Symbols { self: SymbolTable => protected def createImplClassSymbol(name: TypeName, pos: Position, newFlags: Long): ClassSymbol = new ClassSymbol(this, pos, name) with ImplClassSymbol initFlags newFlags - protected def createTermSymbol(name: TermName, pos: Position, newFlags: Long): TermSymbol = - new TermSymbol(this, pos, name) initFlags newFlags + // protected def createTermSymbol(name: TermName, pos: Position, newFlags: Long): TermSymbol = + // new TermSymbol(this, pos, name) initFlags newFlags protected def createMethodSymbol(name: TermName, pos: Position, newFlags: Long): MethodSymbol = new MethodSymbol(this, pos, name) initFlags newFlags @@ -1446,12 +1454,12 @@ trait Symbols extends api.Symbols { self: SymbolTable => !isInitialized && (flags & LOCKED) == 0 && shouldTriggerCompleter(this, if (infos ne null) infos.info else null, isFlagRelated, mask) /** Was symbol's type updated during given phase? */ - final def isUpdatedAt(pid: Phase#Id): Boolean = { - assert(isCompilerUniverse) - var infos = this.infos - while ((infos ne null) && phaseId(infos.validFrom) != pid + 1) infos = infos.prev - infos ne null - } + // final def isUpdatedAt(pid: Phase#Id): Boolean = { + // assert(isCompilerUniverse) + // var infos = this.infos + // while ((infos ne null) && phaseId(infos.validFrom) != pid + 1) infos = infos.prev + // infos ne null + // } /** Was symbol's type updated during given phase? */ final def hasTypeAt(pid: Phase#Id): Boolean = { @@ -1944,10 +1952,10 @@ trait Symbols extends api.Symbols { self: SymbolTable => (this.rawInfo ne NoType) && (this.effectiveOwner == that.effectiveOwner) && ( !this.effectiveOwner.isPackageClass - || (this.sourceFile eq null) - || (that.sourceFile eq null) - || (this.sourceFile.path == that.sourceFile.path) // Cheap possibly wrong check, then expensive normalization - || (this.sourceFile.canonicalPath == that.sourceFile.canonicalPath) + || (this.associatedFile eq null) + || (that.associatedFile eq null) + || (this.associatedFile.path == that.associatedFile.path) // Cheap possibly wrong check, then expensive normalization + || (this.associatedFile.canonicalPath == that.associatedFile.canonicalPath) ) ) @@ -2074,9 +2082,10 @@ trait Symbols extends api.Symbols { self: SymbolTable => if (isClassConstructor) NoSymbol else matchingSymbol(ofclazz, ofclazz.thisType) /** Returns all symbols overriden by this symbol. */ - final def allOverriddenSymbols: List[Symbol] = - if (!owner.isClass) Nil + final def allOverriddenSymbols: List[Symbol] = ( + if ((this eq NoSymbol) || !owner.isClass) Nil else owner.ancestors map overriddenSymbol filter (_ != NoSymbol) + ) /** Equivalent to allOverriddenSymbols.nonEmpty, but more efficient. */ // !!! When if ever will this answer differ from .isOverride? @@ -2087,7 +2096,7 @@ trait Symbols extends api.Symbols { self: SymbolTable => ) /** Equivalent to allOverriddenSymbols.head (or NoSymbol if no overrides) but more efficient. */ def nextOverriddenSymbol: Symbol = { - if (owner.isClass) owner.ancestors foreach { base => + if ((this ne NoSymbol) && owner.isClass) owner.ancestors foreach { base => val sym = overriddenSymbol(base) if (sym != NoSymbol) return sym @@ -2208,10 +2217,10 @@ trait Symbols extends api.Symbols { self: SymbolTable => private def sourceFileOnly(file: AbstractFile): AbstractFile = if ((file eq null) || (file.path endsWith ".class")) null else file - private def binaryFileOnly(file: AbstractFile): AbstractFile = - if ((file eq null) || !(file.path endsWith ".class")) null else file + // private def binaryFileOnly(file: AbstractFile): AbstractFile = + // if ((file eq null) || !(file.path endsWith ".class")) null else file - final def binaryFile: AbstractFile = binaryFileOnly(associatedFile) + // final def binaryFile: AbstractFile = binaryFileOnly(associatedFile) final def sourceFile: AbstractFile = sourceFileOnly(associatedFile) /** Overridden in ModuleSymbols to delegate to the module class. */ @@ -2236,7 +2245,7 @@ trait Symbols extends api.Symbols { self: SymbolTable => // ------ toString ------------------------------------------------------------------- /** A tag which (in the ideal case) uniquely identifies class symbols */ - final def tag: Int = fullName.## + // final def tag: Int = fullName.## /** The simple name of this Symbol */ final def simpleName: Name = name @@ -2705,7 +2714,7 @@ trait Symbols extends api.Symbols { self: SymbolTable => final def asNameType(n: Name) = n.toTypeName override def isNonClassType = true - override def isTypeMacro = hasFlag(MACRO) + // override def isTypeMacro = hasFlag(MACRO) override def resolveOverloadedFlag(flag: Long) = flag match { case TRAIT => "" // DEFAULTPARAM @@ -2723,7 +2732,7 @@ trait Symbols extends api.Symbols { self: SymbolTable => override def isAbstractType = this hasFlag DEFERRED override def isContravariant = this hasFlag CONTRAVARIANT override def isCovariant = this hasFlag COVARIANT - override def isExistentialQuantified = isExistentiallyBound && !isSkolem + // override def isExistentialQuantified = isExistentiallyBound && !isSkolem override def isExistentiallyBound = this hasFlag EXISTENTIAL override def isTypeParameter = isTypeParameterOrSkolem && !isSkolem override def isTypeParameterOrSkolem = this hasFlag PARAM @@ -2854,7 +2863,7 @@ trait Symbols extends api.Symbols { self: SymbolTable => override def isTypeSkolem = this hasFlag PARAM override def isAbstractType = this hasFlag DEFERRED - override def isExistentialQuantified = false + // override def isExistentialQuantified = false override def existentialBound = if (isAbstractType) this.info else super.existentialBound /** If typeskolem comes from a type parameter, that parameter, otherwise skolem itself */ @@ -2940,21 +2949,21 @@ trait Symbols extends api.Symbols { self: SymbolTable => || isLocal || !owner.isPackageClass && owner.isLocalClass ) - override def isStableClass = (this hasFlag STABLE) || checkStable() - - private def checkStable() = { - def hasNoAbstractTypeMember(clazz: Symbol): Boolean = - (clazz hasFlag STABLE) || { - var e = clazz.info.decls.elems - while ((e ne null) && !(e.sym.isAbstractType && info.member(e.sym.name) == e.sym)) - e = e.next - e == null - } - (info.baseClasses forall hasNoAbstractTypeMember) && { - setFlag(STABLE) - true - } - } + // override def isStableClass = (this hasFlag STABLE) || checkStable() + + // private def checkStable() = { + // def hasNoAbstractTypeMember(clazz: Symbol): Boolean = + // (clazz hasFlag STABLE) || { + // var e = clazz.info.decls.elems + // while ((e ne null) && !(e.sym.isAbstractType && info.member(e.sym.name) == e.sym)) + // e = e.next + // e == null + // } + // (info.baseClasses forall hasNoAbstractTypeMember) && { + // setFlag(STABLE) + // true + // } + // } override def enclClassChain = this :: owner.enclClassChain diff --git a/src/reflect/scala/reflect/internal/TreeGen.scala b/src/reflect/scala/reflect/internal/TreeGen.scala index 6ce93d93b2..6c8ba047d6 100644 --- a/src/reflect/scala/reflect/internal/TreeGen.scala +++ b/src/reflect/scala/reflect/internal/TreeGen.scala @@ -11,10 +11,10 @@ abstract class TreeGen extends macros.TreeBuilder { def rootScalaDot(name: Name) = Select(rootId(nme.scala_) setSymbol ScalaPackage, name) def scalaDot(name: Name) = Select(Ident(nme.scala_) setSymbol ScalaPackage, name) def scalaAnnotationDot(name: Name) = Select(scalaDot(nme.annotation), name) - def scalaAnyRefConstr = scalaDot(tpnme.AnyRef) setSymbol AnyRefClass - def scalaUnitConstr = scalaDot(tpnme.Unit) setSymbol UnitClass - def productConstr = scalaDot(tpnme.Product) setSymbol ProductRootClass - def serializableConstr = scalaDot(tpnme.Serializable) setSymbol SerializableClass + // def scalaAnyRefConstr = scalaDot(tpnme.AnyRef) setSymbol AnyRefClass + // def scalaUnitConstr = scalaDot(tpnme.Unit) setSymbol UnitClass + // def productConstr = scalaDot(tpnme.Product) setSymbol ProductRootClass + // def serializableConstr = scalaDot(tpnme.Serializable) setSymbol SerializableClass def scalaFunctionConstr(argtpes: List[Tree], restpe: Tree, abstractFun: Boolean = false): Tree = { val cls = if (abstractFun) @@ -248,8 +248,8 @@ abstract class TreeGen extends macros.TreeBuilder { Literal(Constant(tp)) setType ConstantType(Constant(tp)) /** Builds a list with given head and tail. */ - def mkNewCons(head: Tree, tail: Tree): Tree = - New(Apply(mkAttributedRef(ConsClass), List(head, tail))) + // def mkNewCons(head: Tree, tail: Tree): Tree = + // New(Apply(mkAttributedRef(ConsClass), List(head, tail))) /** Builds a list with given head and tail. */ def mkNil: Tree = mkAttributedRef(NilModule) diff --git a/src/reflect/scala/reflect/internal/TreeInfo.scala b/src/reflect/scala/reflect/internal/TreeInfo.scala index 7ae7cf1821..2b1292e145 100644 --- a/src/reflect/scala/reflect/internal/TreeInfo.scala +++ b/src/reflect/scala/reflect/internal/TreeInfo.scala @@ -135,8 +135,8 @@ abstract class TreeInfo { @deprecated("Use isExprSafeToInline instead", "2.10.0") def isPureExpr(tree: Tree) = isExprSafeToInline(tree) - def zipMethodParamsAndArgs(params: List[Symbol], args: List[Tree]): List[(Symbol, Tree)] = - mapMethodParamsAndArgs(params, args)((param, arg) => ((param, arg))) + // def zipMethodParamsAndArgs(params: List[Symbol], args: List[Tree]): List[(Symbol, Tree)] = + // mapMethodParamsAndArgs(params, args)((param, arg) => ((param, arg))) def mapMethodParamsAndArgs[R](params: List[Symbol], args: List[Tree])(f: (Symbol, Tree) => R): List[R] = { val b = List.newBuilder[R] @@ -186,25 +186,25 @@ abstract class TreeInfo { * * Also accounts for varargs. */ - private def applyMethodParameters(fn: Tree): List[Symbol] = { - val depth = applyDepth(fn) - // There could be applies which go beyond the parameter list(s), - // being applied to the result of the method call. - // !!! Note that this still doesn't seem correct, although it should - // be closer than what it replaced. - if (depth < fn.symbol.paramss.size) fn.symbol.paramss(depth) - else if (fn.symbol.paramss.isEmpty) Nil - else fn.symbol.paramss.last - } - - def zipMethodParamsAndArgs(t: Tree): List[(Symbol, Tree)] = t match { - case Apply(fn, args) => zipMethodParamsAndArgs(applyMethodParameters(fn), args) - case _ => Nil - } - def foreachMethodParamAndArg(t: Tree)(f: (Symbol, Tree) => Unit): Unit = t match { - case Apply(fn, args) => foreachMethodParamAndArg(applyMethodParameters(fn), args)(f) - case _ => - } + // private def applyMethodParameters(fn: Tree): List[Symbol] = { + // val depth = applyDepth(fn) + // // There could be applies which go beyond the parameter list(s), + // // being applied to the result of the method call. + // // !!! Note that this still doesn't seem correct, although it should + // // be closer than what it replaced. + // if (depth < fn.symbol.paramss.size) fn.symbol.paramss(depth) + // else if (fn.symbol.paramss.isEmpty) Nil + // else fn.symbol.paramss.last + // } + + // def zipMethodParamsAndArgs(t: Tree): List[(Symbol, Tree)] = t match { + // case Apply(fn, args) => zipMethodParamsAndArgs(applyMethodParameters(fn), args) + // case _ => Nil + // } + // def foreachMethodParamAndArg(t: Tree)(f: (Symbol, Tree) => Unit): Unit = t match { + // case Apply(fn, args) => foreachMethodParamAndArg(applyMethodParameters(fn), args)(f) + // case _ => + // } /** Is symbol potentially a getter of a variable? */ @@ -354,10 +354,10 @@ abstract class TreeInfo { case x: Ident => !x.isBackquoted && nme.isVariableName(x.name) case _ => false } - def isDeprecatedIdentifier(tree: Tree): Boolean = tree match { - case x: Ident => !x.isBackquoted && nme.isDeprecatedIdentifierName(x.name) - case _ => false - } + // def isDeprecatedIdentifier(tree: Tree): Boolean = tree match { + // case x: Ident => !x.isBackquoted && nme.isDeprecatedIdentifierName(x.name) + // case _ => false + // } /** The first constructor definitions in `stats` */ def firstConstructor(stats: List[Tree]): Tree = stats find { @@ -417,10 +417,10 @@ abstract class TreeInfo { def isLeftAssoc(operator: Name) = operator.nonEmpty && (operator.endChar != ':') /** Is tree a `this` node which belongs to `enclClass`? */ - def isSelf(tree: Tree, enclClass: Symbol): Boolean = tree match { - case This(_) => tree.symbol == enclClass - case _ => false - } + // def isSelf(tree: Tree, enclClass: Symbol): Boolean = tree match { + // case This(_) => tree.symbol == enclClass + // case _ => false + // } /** a Match(Typed(_, tpt), _) must be translated into a switch if isSwitchAnnotation(tpt.tpe) */ def isSwitchAnnotation(tpe: Type) = tpe hasAnnotation definitions.SwitchClass diff --git a/src/reflect/scala/reflect/internal/Trees.scala b/src/reflect/scala/reflect/internal/Trees.scala index ed08226ec7..c93750165d 100644 --- a/src/reflect/scala/reflect/internal/Trees.scala +++ b/src/reflect/scala/reflect/internal/Trees.scala @@ -847,7 +847,7 @@ trait Trees extends api.Trees { self: SymbolTable => /** Is the tree Predef, scala.Predef, or _root_.scala.Predef? */ def isReferenceToPredef(t: Tree) = isReferenceToScalaMember(t, nme.Predef) - def isReferenceToAnyVal(t: Tree) = isReferenceToScalaMember(t, tpnme.AnyVal) + // def isReferenceToAnyVal(t: Tree) = isReferenceToScalaMember(t, tpnme.AnyVal) // --- modifiers implementation --------------------------------------- diff --git a/src/reflect/scala/reflect/internal/TypeDebugging.scala b/src/reflect/scala/reflect/internal/TypeDebugging.scala index 68b4fa69a1..9c2457e402 100644 --- a/src/reflect/scala/reflect/internal/TypeDebugging.scala +++ b/src/reflect/scala/reflect/internal/TypeDebugging.scala @@ -9,8 +9,6 @@ package internal trait TypeDebugging { self: SymbolTable => - import definitions._ - // @M toString that is safe during debugging (does not normalize, ...) object typeDebug { private def to_s(x: Any): String = x match { @@ -20,7 +18,7 @@ trait TypeDebugging { case x: Product => x.productIterator mkString ("(", ", ", ")") case _ => "" + x } - def ptIndent(x: Any) = ("" + x).replaceAll("\\n", " ") + // def ptIndent(x: Any) = ("" + x).replaceAll("\\n", " ") def ptBlock(label: String, pairs: (String, Any)*): String = { if (pairs.isEmpty) label + "{ }" else { diff --git a/src/reflect/scala/reflect/internal/Types.scala b/src/reflect/scala/reflect/internal/Types.scala index 42a9d9e456..0f92388993 100644 --- a/src/reflect/scala/reflect/internal/Types.scala +++ b/src/reflect/scala/reflect/internal/Types.scala @@ -167,10 +167,10 @@ trait Types extends api.Types { self: SymbolTable => log = Nil } finally unlock() } - def size = { - lock() - try log.size finally unlock() - } + // def size = { + // lock() + // try log.size finally unlock() + // } // `block` should not affect constraints on typevars def undo[T](block: => T): T = { @@ -184,18 +184,18 @@ trait Types extends api.Types { self: SymbolTable => } // if `block` evaluates to false, it should not affect constraints on typevars - def undoUnless(block: => Boolean): Boolean = { - lock() - try { - val before = log - var result = false + // def undoUnless(block: => Boolean): Boolean = { + // lock() + // try { + // val before = log + // var result = false - try result = block - finally if (!result) undoTo(before) + // try result = block + // finally if (!result) undoTo(before) - result - } finally unlock() - } + // result + // } finally unlock() + // } } /** A map from lists to compound types that have the given list as parents. @@ -292,7 +292,7 @@ trait Types extends api.Types { self: SymbolTable => abstract class TypeApiImpl extends TypeApi { this: Type => def declaration(name: Name): Symbol = decl(name) - def nonPrivateDeclaration(name: Name): Symbol = nonPrivateDecl(name) + // def nonPrivateDeclaration(name: Name): Symbol = nonPrivateDecl(name) def declarations = decls def typeArguments = typeArgs def erasure = this match { @@ -522,7 +522,7 @@ trait Types extends api.Types { self: SymbolTable => /** If this is a TypeRef `clazz`[`T`], return the argument `T` * otherwise return this type */ - def remove(clazz: Symbol): Type = this + // def remove(clazz: Symbol): Type = this /** For a curried/nullary method or poly type its non-method result type, * the type itself for all other types */ @@ -663,13 +663,13 @@ trait Types extends api.Types { self: SymbolTable => /** All members with the given flags, excluding bridges. */ - def membersWithFlags(requiredFlags: Long): Scope = - membersBasedOnFlags(BridgeFlags, requiredFlags) + // def membersWithFlags(requiredFlags: Long): Scope = + // membersBasedOnFlags(BridgeFlags, requiredFlags) /** All non-private members with the given flags, excluding bridges. */ - def nonPrivateMembersWithFlags(requiredFlags: Long): Scope = - membersBasedOnFlags(BridgeAndPrivateFlags, requiredFlags) + // def nonPrivateMembersWithFlags(requiredFlags: Long): Scope = + // membersBasedOnFlags(BridgeAndPrivateFlags, requiredFlags) /** The non-private member with given name, admitting members with given flags `admit`. * "Admitting" refers to the fact that members with a PRIVATE, BRIDGE, or VBRIDGE @@ -806,7 +806,7 @@ trait Types extends api.Types { self: SymbolTable => else substThis(from, to).substSym(symsFrom, symsTo) /** Returns all parts of this type which satisfy predicate `p` */ - def filter(p: Type => Boolean): List[Type] = new FilterTypeCollector(p) collect this + // def filter(p: Type => Boolean): List[Type] = new FilterTypeCollector(p) collect this def withFilter(p: Type => Boolean) = new FilterMapForeach(p) class FilterMapForeach(p: Type => Boolean) extends FilterTypeCollector(p){ @@ -837,7 +837,7 @@ trait Types extends api.Types { self: SymbolTable => def contains(sym: Symbol): Boolean = new ContainsCollector(sym).collect(this) /** Does this type contain a reference to this type */ - def containsTp(tp: Type): Boolean = new ContainsTypeCollector(tp).collect(this) + // def containsTp(tp: Type): Boolean = new ContainsTypeCollector(tp).collect(this) /** Is this type a subtype of that type? */ def <:<(that: Type): Boolean = { @@ -900,9 +900,9 @@ trait Types extends api.Types { self: SymbolTable => ); /** Does this type implement symbol `sym` with same or stronger type? */ - def specializes(sym: Symbol): Boolean = - if (explainSwitch) explain("specializes", specializesSym, this, sym) - else specializesSym(this, sym) + // def specializes(sym: Symbol): Boolean = + // if (explainSwitch) explain("specializes", specializesSym, this, sym) + // else specializesSym(this, sym) /** Is this type close enough to that type so that members * with the two type would override each other? @@ -1243,7 +1243,7 @@ trait Types extends api.Types { self: SymbolTable => /** Remove any annotations from this type and from any * types embedded in this type. */ - def stripAnnotations = StripAnnotationsMap(this) + // def stripAnnotations = StripAnnotationsMap(this) /** Set the self symbol of an annotated type, or do nothing * otherwise. */ @@ -2917,14 +2917,14 @@ trait Types extends api.Types { self: SymbolTable => } // Not used yet. - object HasTypeParams { - def unapply(tp: Type): Option[(List[Symbol], Type)] = tp match { - case AnnotatedType(_, tp, _) => unapply(tp) - case ExistentialType(tparams, qtpe) => Some((tparams, qtpe)) - case PolyType(tparams, restpe) => Some((tparams, restpe)) - case _ => None - } - } + // object HasTypeParams { + // def unapply(tp: Type): Option[(List[Symbol], Type)] = tp match { + // case AnnotatedType(_, tp, _) => unapply(tp) + // case ExistentialType(tparams, qtpe) => Some((tparams, qtpe)) + // case PolyType(tparams, restpe) => Some((tparams, restpe)) + // case _ => None + // } + // } //@M // a TypeVar used to be a case class with only an origin and a constr @@ -3019,7 +3019,7 @@ trait Types extends api.Types { self: SymbolTable => require(params.nonEmpty, this) override def isHigherKinded = true - override protected def typeVarString = params.map(_.name).mkString("[", ", ", "]=>" + originName) + // override protected def typeVarString = params.map(_.name).mkString("[", ", ", "]=>" + originName) } /** Precondition: zipped params/args nonEmpty. (Size equivalence enforced structurally.) @@ -3035,9 +3035,9 @@ trait Types extends api.Types { self: SymbolTable => override def params: List[Symbol] = zippedArgs map (_._1) override def typeArgs: List[Type] = zippedArgs map (_._2) - override protected def typeVarString = ( - zippedArgs map { case (p, a) => p.name + "=" + a } mkString (origin + "[", ", ", "]") - ) + // override protected def typeVarString = ( + // zippedArgs map { case (p, a) => p.name + "=" + a } mkString (origin + "[", ", ", "]") + // ) } trait UntouchableTypeVar extends TypeVar { @@ -3357,7 +3357,7 @@ trait Types extends api.Types { self: SymbolTable => ).flatten map (s => s.decodedName + tparamsOfSym(s)) mkString "#" } private def levelString = if (settings.explaintypes.value) level else "" - protected def typeVarString = originName + // protected def typeVarString = originName override def safeToString = ( if ((constr eq null) || (constr.inst eq null)) "TVar<" + originName + "=null>" else if (constr.inst ne NoType) "=?" + constr.inst @@ -3714,18 +3714,18 @@ trait Types extends api.Types { self: SymbolTable => * list given is List(AnyRefClass), the resulting type would be * e.g. Set[_ <: AnyRef] rather than Set[AnyRef] . */ - def appliedTypeAsUpperBounds(tycon: Type, args: List[Type]): Type = { - tycon match { - case TypeRef(pre, sym, _) if sameLength(sym.typeParams, args) => - val eparams = typeParamsToExistentials(sym) - val bounds = args map (TypeBounds upper _) - foreach2(eparams, bounds)(_ setInfo _) - - newExistentialType(eparams, typeRef(pre, sym, eparams map (_.tpe))) - case _ => - appliedType(tycon, args) - } - } + // def appliedTypeAsUpperBounds(tycon: Type, args: List[Type]): Type = { + // tycon match { + // case TypeRef(pre, sym, _) if sameLength(sym.typeParams, args) => + // val eparams = typeParamsToExistentials(sym) + // val bounds = args map (TypeBounds upper _) + // foreach2(eparams, bounds)(_ setInfo _) + + // newExistentialType(eparams, typeRef(pre, sym, eparams map (_.tpe))) + // case _ => + // appliedType(tycon, args) + // } + // } /** A creator and extractor for type parameterizations that strips empty type parameter lists. * Use this factory method to indicate the type has kind * (it's a polymorphic value) @@ -3829,16 +3829,16 @@ trait Types extends api.Types { self: SymbolTable => } /** Substitutes the empty scope for any non-empty decls in the type. */ - object dropAllRefinements extends TypeMap { - def apply(tp: Type): Type = tp match { - case rt @ RefinedType(parents, decls) if !decls.isEmpty => - mapOver(copyRefinedType(rt, parents, EmptyScope)) - case ClassInfoType(parents, decls, clazz) if !decls.isEmpty => - mapOver(ClassInfoType(parents, EmptyScope, clazz)) - case _ => - mapOver(tp) - } - } + // object dropAllRefinements extends TypeMap { + // def apply(tp: Type): Type = tp match { + // case rt @ RefinedType(parents, decls) if !decls.isEmpty => + // mapOver(copyRefinedType(rt, parents, EmptyScope)) + // case ClassInfoType(parents, decls, clazz) if !decls.isEmpty => + // mapOver(ClassInfoType(parents, EmptyScope, clazz)) + // case _ => + // mapOver(tp) + // } + // } /** Type with all top-level occurrences of abstract types replaced by their bounds */ def abstractTypesToBounds(tp: Type): Type = tp match { // @M don't normalize here (compiler loops on pos/bug1090.scala ) @@ -4855,14 +4855,14 @@ trait Types extends api.Types { self: SymbolTable => } } - object StripAnnotationsMap extends TypeMap { - def apply(tp: Type): Type = tp match { - case AnnotatedType(_, atp, _) => - mapOver(atp) - case tp => - mapOver(tp) - } - } + // object StripAnnotationsMap extends TypeMap { + // def apply(tp: Type): Type = tp match { + // case AnnotatedType(_, atp, _) => + // mapOver(atp) + // case tp => + // mapOver(tp) + // } + // } /** A map to convert every occurrence of a wildcard type to a fresh * type variable */ @@ -4925,7 +4925,7 @@ trait Types extends api.Types { self: SymbolTable => /** A map to implement the `filter` method. */ class FilterTypeCollector(p: Type => Boolean) extends TypeCollector[List[Type]](Nil) { - def withFilter(q: Type => Boolean) = new FilterTypeCollector(tp => p(tp) && q(tp)) + // def withFilter(q: Type => Boolean) = new FilterTypeCollector(tp => p(tp) && q(tp)) override def collect(tp: Type) = super.collect(tp).reverse @@ -5868,7 +5868,7 @@ trait Types extends api.Types { self: SymbolTable => * useful as documentation; it is likely that !isNonValueType(tp) * will serve better than isValueType(tp). */ - def isValueType(tp: Type) = isValueElseNonValue(tp) + // def isValueType(tp: Type) = isValueElseNonValue(tp) /** SLS 3.3, Non-Value Types * Is the given type definitely a non-value type, as defined in SLS 3.3? @@ -5879,7 +5879,7 @@ trait Types extends api.Types { self: SymbolTable => * not designated non-value types because there is code which depends on using * them as type arguments, but their precise status is unclear. */ - def isNonValueType(tp: Type) = !isValueElseNonValue(tp) + // def isNonValueType(tp: Type) = !isValueElseNonValue(tp) def isNonRefinementClassType(tpe: Type) = tpe match { case SingleType(_, sym) => sym.isModuleClass @@ -5928,7 +5928,7 @@ trait Types extends api.Types { self: SymbolTable => corresponds3(tps1, tps2, tparams map (_.variance))(isSubArg) } - def differentOrNone(tp1: Type, tp2: Type) = if (tp1 eq tp2) NoType else tp1 + // def differentOrNone(tp1: Type, tp2: Type) = if (tp1 eq tp2) NoType else tp1 /** Does type `tp1` conform to `tp2`? */ private def isSubType2(tp1: Type, tp2: Type, depth: Int): Boolean = { @@ -6136,14 +6136,14 @@ trait Types extends api.Types { self: SymbolTable => /** Are `tps1` and `tps2` lists of equal length such that all elements * of `tps1` conform to corresponding elements of `tps2`? */ - def isSubTypes(tps1: List[Type], tps2: List[Type]): Boolean = (tps1 corresponds tps2)(_ <:< _) + // def isSubTypes(tps1: List[Type], tps2: List[Type]): Boolean = (tps1 corresponds tps2)(_ <:< _) /** Does type `tp` implement symbol `sym` with same or * stronger type? Exact only if `sym` is a member of some * refinement type, otherwise we might return false negatives. */ - def specializesSym(tp: Type, sym: Symbol): Boolean = - specializesSym(tp, sym, AnyDepth) + // def specializesSym(tp: Type, sym: Symbol): Boolean = + // specializesSym(tp, sym, AnyDepth) def specializesSym(tp: Type, sym: Symbol, depth: Int): Boolean = tp.typeSymbol == NothingClass || @@ -6586,10 +6586,10 @@ trait Types extends api.Types { self: SymbolTable => case _ => t } - def elimRefinement(t: Type) = t match { - case RefinedType(parents, decls) if !decls.isEmpty => intersectionType(parents) - case _ => t - } + // def elimRefinement(t: Type) = t match { + // case RefinedType(parents, decls) if !decls.isEmpty => intersectionType(parents) + // case _ => t + // } /** Eliminate from list of types all elements which are a subtype * of some other element of the list. */ @@ -6634,15 +6634,15 @@ trait Types extends api.Types { self: SymbolTable => (annotationsLub(lub(ts map (_.withoutAnnotations)), ts), true) else (lub(ts), false) - def weakGlb(ts: List[Type]) = { - if (ts.nonEmpty && (ts forall isNumericValueType)) { - val nglb = numericGlb(ts) - if (nglb != NoType) (nglb, true) - else (glb(ts), false) - } else if (ts exists typeHasAnnotations) { - (annotationsGlb(glb(ts map (_.withoutAnnotations)), ts), true) - } else (glb(ts), false) - } + // def weakGlb(ts: List[Type]) = { + // if (ts.nonEmpty && (ts forall isNumericValueType)) { + // val nglb = numericGlb(ts) + // if (nglb != NoType) (nglb, true) + // else (glb(ts), false) + // } else if (ts exists typeHasAnnotations) { + // (annotationsGlb(glb(ts map (_.withoutAnnotations)), ts), true) + // } else (glb(ts), false) + // } def numericLub(ts: List[Type]) = ts reduceLeft ((t1, t2) => @@ -6650,11 +6650,11 @@ trait Types extends api.Types { self: SymbolTable => else if (isNumericSubType(t2, t1)) t1 else IntClass.tpe) - def numericGlb(ts: List[Type]) = - ts reduceLeft ((t1, t2) => - if (isNumericSubType(t1, t2)) t1 - else if (isNumericSubType(t2, t1)) t2 - else NoType) + // def numericGlb(ts: List[Type]) = + // ts reduceLeft ((t1, t2) => + // if (isNumericSubType(t1, t2)) t1 + // else if (isNumericSubType(t2, t1)) t2 + // else NoType) def isWeakSubType(tp1: Type, tp2: Type) = tp1.deconst.normalize match { @@ -7017,8 +7017,8 @@ trait Types extends api.Types { self: SymbolTable => // Without this, the matchesType call would lead to type variables on both // sides of a subtyping/equality judgement, which can lead to recursive types // being constructed. See pos/t0851 for a situation where this happens. - def suspendingTypeVarsInType[T](tp: Type)(op: => T): T = - suspendingTypeVars(typeVarsInType(tp))(op) + // def suspendingTypeVarsInType[T](tp: Type)(op: => T): T = + // suspendingTypeVars(typeVarsInType(tp))(op) @inline final def suspendingTypeVars[T](tvs: List[TypeVar])(op: => T): T = { val saved = tvs map (_.suspended) @@ -7257,7 +7257,7 @@ trait Types extends api.Types { self: SymbolTable => /** Members which can be imported into other scopes. */ - def importableMembers(clazz: Symbol): Scope = importableMembers(clazz.info) + // def importableMembers(clazz: Symbol): Scope = importableMembers(clazz.info) def importableMembers(pre: Type): Scope = pre.members filter isImportable def objToAny(tp: Type): Type = @@ -7353,7 +7353,7 @@ trait Types extends api.Types { self: SymbolTable => object TypesStats { import BaseTypeSeqsStats._ val rawTypeCount = Statistics.newCounter ("#raw type creations") - val asSeenFromCount = Statistics.newCounter ("#asSeenFrom ops") + // val asSeenFromCount = Statistics.newCounter ("#asSeenFrom ops") val subtypeCount = Statistics.newCounter ("#subtype ops") val sametypeCount = Statistics.newCounter ("#sametype ops") val lubCount = Statistics.newCounter ("#toplevel lubs/glbs") diff --git a/src/reflect/scala/reflect/internal/pickling/PickleBuffer.scala b/src/reflect/scala/reflect/internal/pickling/PickleBuffer.scala index 6170fcbb90..4dfeb913ce 100644 --- a/src/reflect/scala/reflect/internal/pickling/PickleBuffer.scala +++ b/src/reflect/scala/reflect/internal/pickling/PickleBuffer.scala @@ -95,7 +95,7 @@ class PickleBuffer(data: Array[Byte], from: Int, to: Int) { // -- Basic input routines -------------------------------------------- /** Peek at the current byte without moving the read index */ - def peekByte(): Int = bytes(readIndex) + // def peekByte(): Int = bytes(readIndex) /** Read a byte */ def readByte(): Int = { diff --git a/src/reflect/scala/reflect/internal/pickling/PickleFormat.scala b/src/reflect/scala/reflect/internal/pickling/PickleFormat.scala index 94b2f77ff9..1f522e8ee3 100644 --- a/src/reflect/scala/reflect/internal/pickling/PickleFormat.scala +++ b/src/reflect/scala/reflect/internal/pickling/PickleFormat.scala @@ -115,7 +115,7 @@ object PickleFormat { */ val MajorVersion = 5 val MinorVersion = 0 - def VersionString = "V" + MajorVersion + "." + MinorVersion + // def VersionString = "V" + MajorVersion + "." + MinorVersion final val TERMname = 1 final val TYPEname = 2 diff --git a/src/reflect/scala/reflect/internal/pickling/UnPickler.scala b/src/reflect/scala/reflect/internal/pickling/UnPickler.scala index c82546b552..76a4af850c 100644 --- a/src/reflect/scala/reflect/internal/pickling/UnPickler.scala +++ b/src/reflect/scala/reflect/internal/pickling/UnPickler.scala @@ -186,8 +186,8 @@ abstract class UnPickler /*extends scala.reflect.generic.UnPickler*/ { case _ => errorBadSignature("bad name tag: " + tag) } } - protected def readTermName(): TermName = readName().toTermName - protected def readTypeName(): TypeName = readName().toTypeName + // protected def readTermName(): TermName = readName().toTermName + // protected def readTypeName(): TypeName = readName().toTypeName private def readEnd() = readNat() + readIndex /** Read a symbol */ @@ -793,7 +793,7 @@ abstract class UnPickler /*extends scala.reflect.generic.UnPickler*/ { protected def readTreeRef(): Tree = at(readNat(), readTree) protected def readTypeNameRef(): TypeName = readNameRef().toTypeName - protected def readTermNameRef(): TermName = readNameRef().toTermName + // protected def readTermNameRef(): TermName = readNameRef().toTermName protected def readTemplateRef(): Template = readTreeRef() match { @@ -829,10 +829,10 @@ abstract class UnPickler /*extends scala.reflect.generic.UnPickler*/ { protected def errorBadSignature(msg: String) = throw new RuntimeException("malformed Scala signature of " + classRoot.name + " at " + readIndex + "; " + msg) - protected def errorMissingRequirement(name: Name, owner: Symbol): Symbol = - mirrorThatLoaded(owner).missingHook(owner, name) orElse MissingRequirementError.signal( - s"bad reference while unpickling $filename: ${name.longString} not found in ${owner.tpe.widen}" - ) + // protected def errorMissingRequirement(name: Name, owner: Symbol): Symbol = + // mirrorThatLoaded(owner).missingHook(owner, name) orElse MissingRequirementError.signal( + // s"bad reference while unpickling $filename: ${name.longString} not found in ${owner.tpe.widen}" + // ) def inferMethodAlternative(fun: Tree, argtpes: List[Type], restpe: Type) {} // can't do it; need a compiler for that. diff --git a/src/reflect/scala/reflect/internal/util/Collections.scala b/src/reflect/scala/reflect/internal/util/Collections.scala index 2ba15e0776..8d2ac3565e 100644 --- a/src/reflect/scala/reflect/internal/util/Collections.scala +++ b/src/reflect/scala/reflect/internal/util/Collections.scala @@ -40,8 +40,8 @@ trait Collections { mforeach(xss)(x => if ((res eq null) && p(x)) res = Some(x)) if (res eq null) None else res } - final def mfilter[A](xss: List[List[A]])(p: A => Boolean) = - for (xs <- xss; x <- xs; if p(x)) yield x + // final def mfilter[A](xss: List[List[A]])(p: A => Boolean) = + // for (xs <- xss; x <- xs; if p(x)) yield x final def map2[A, B, C](xs1: List[A], xs2: List[B])(f: (A, B) => C): List[C] = { val lb = new ListBuffer[C] @@ -78,18 +78,18 @@ trait Collections { lb.toList } - final def distinctBy[A, B](xs: List[A])(f: A => B): List[A] = { - val buf = new ListBuffer[A] - val seen = mutable.Set[B]() - xs foreach { x => - val y = f(x) - if (!seen(y)) { - buf += x - seen += y - } - } - buf.toList - } + // final def distinctBy[A, B](xs: List[A])(f: A => B): List[A] = { + // val buf = new ListBuffer[A] + // val seen = mutable.Set[B]() + // xs foreach { x => + // val y = f(x) + // if (!seen(y)) { + // buf += x + // seen += y + // } + // } + // buf.toList + // } @tailrec final def flattensToEmpty(xss: Seq[Seq[_]]): Boolean = { xss.isEmpty || xss.head.isEmpty && flattensToEmpty(xss.tail) @@ -189,18 +189,18 @@ trait Collections { } false } - final def forall2[A, B](xs1: List[A], xs2: List[B])(f: (A, B) => Boolean): Boolean = { - var ys1 = xs1 - var ys2 = xs2 - while (!ys1.isEmpty && !ys2.isEmpty) { - if (!f(ys1.head, ys2.head)) - return false - - ys1 = ys1.tail - ys2 = ys2.tail - } - true - } + // final def forall2[A, B](xs1: List[A], xs2: List[B])(f: (A, B) => Boolean): Boolean = { + // var ys1 = xs1 + // var ys2 = xs2 + // while (!ys1.isEmpty && !ys2.isEmpty) { + // if (!f(ys1.head, ys2.head)) + // return false + + // ys1 = ys1.tail + // ys2 = ys2.tail + // } + // true + // } final def forall3[A, B, C](xs1: List[A], xs2: List[B], xs3: List[C])(f: (A, B, C) => Boolean): Boolean = { var ys1 = xs1 var ys2 = xs2 @@ -223,5 +223,5 @@ trait Collections { } } -object Collections extends Collections { } +// object Collections extends Collections { } diff --git a/src/reflect/scala/reflect/internal/util/HashSet.scala b/src/reflect/scala/reflect/internal/util/HashSet.scala index 4135f3c469..e580315285 100644 --- a/src/reflect/scala/reflect/internal/util/HashSet.scala +++ b/src/reflect/scala/reflect/internal/util/HashSet.scala @@ -6,8 +6,8 @@ package scala.reflect.internal.util object HashSet { - def apply[T >: Null <: AnyRef](): HashSet[T] = this(16) - def apply[T >: Null <: AnyRef](label: String): HashSet[T] = this(label, 16) + // def apply[T >: Null <: AnyRef](): HashSet[T] = this(16) + // def apply[T >: Null <: AnyRef](label: String): HashSet[T] = this(label, 16) def apply[T >: Null <: AnyRef](initialCapacity: Int): HashSet[T] = this("No Label", initialCapacity) def apply[T >: Null <: AnyRef](label: String, initialCapacity: Int): HashSet[T] = new HashSet[T](label, initialCapacity) diff --git a/src/reflect/scala/reflect/internal/util/Origins.scala b/src/reflect/scala/reflect/internal/util/Origins.scala index 3259a12163..a2b9e24ebc 100644 --- a/src/reflect/scala/reflect/internal/util/Origins.scala +++ b/src/reflect/scala/reflect/internal/util/Origins.scala @@ -6,9 +6,7 @@ package scala.reflect package internal.util -import NameTransformer._ import scala.collection.{ mutable, immutable } -import Origins._ /** A debugging class for logging from whence a method is being called. * Say you wanted to discover who was calling phase_= in SymbolTable. diff --git a/src/reflect/scala/reflect/internal/util/Position.scala b/src/reflect/scala/reflect/internal/util/Position.scala index 0725e9775b..bbc95feaab 100644 --- a/src/reflect/scala/reflect/internal/util/Position.scala +++ b/src/reflect/scala/reflect/internal/util/Position.scala @@ -128,7 +128,7 @@ abstract class Position extends scala.reflect.api.Position { self => def endOrPoint: Int = point @deprecated("use point instead", "2.9.0") - def offset: Option[Int] = if (isDefined) Some(point) else None + def offset: Option[Int] = if (isDefined) Some(point) else None // used by sbt /** The same position with a different start value (if a range) */ def withStart(off: Int): Position = this diff --git a/src/reflect/scala/reflect/internal/util/SourceFile.scala b/src/reflect/scala/reflect/internal/util/SourceFile.scala index bc2d0ee4db..4d10372662 100644 --- a/src/reflect/scala/reflect/internal/util/SourceFile.scala +++ b/src/reflect/scala/reflect/internal/util/SourceFile.scala @@ -24,7 +24,7 @@ abstract class SourceFile { assert(offset < length, file + ": " + offset + " >= " + length) new OffsetPosition(this, offset) } - def position(line: Int, column: Int) : Position = new OffsetPosition(this, lineToOffset(line) + column) + // def position(line: Int, column: Int) : Position = new OffsetPosition(this, lineToOffset(line) + column) def offsetToLine(offset: Int): Int def lineToOffset(index : Int): Int @@ -37,8 +37,8 @@ abstract class SourceFile { def dbg(offset: Int) = (new OffsetPosition(this, offset)).dbgString def path = file.path - def beginsWith(offset: Int, text: String): Boolean = - (content drop offset) startsWith text + // def beginsWith(offset: Int, text: String): Boolean = + // (content drop offset) startsWith text def lineToString(index: Int): String = content drop lineToOffset(index) takeWhile (c => !isLineBreakChar(c.toChar)) mkString "" @@ -81,7 +81,7 @@ object ScriptSourceFile { } else 0 } - def stripHeader(cs: Array[Char]): Array[Char] = cs drop headerLength(cs) + // def stripHeader(cs: Array[Char]): Array[Char] = cs drop headerLength(cs) def apply(file: AbstractFile, content: Array[Char]) = { val underlying = new BatchSourceFile(file, content) @@ -91,7 +91,6 @@ object ScriptSourceFile { stripped } } -import ScriptSourceFile._ class ScriptSourceFile(underlying: BatchSourceFile, content: Array[Char], override val start: Int) extends BatchSourceFile(underlying.file, content) { override def isSelfContained = false diff --git a/src/reflect/scala/reflect/internal/util/StringOps.scala b/src/reflect/scala/reflect/internal/util/StringOps.scala index bc02ad1058..3e8de65869 100644 --- a/src/reflect/scala/reflect/internal/util/StringOps.scala +++ b/src/reflect/scala/reflect/internal/util/StringOps.scala @@ -16,24 +16,24 @@ package scala.reflect.internal.util * @version 1.0 */ trait StringOps { - def onull(s: String) = if (s == null) "" else s - def oempty(xs: String*) = xs filterNot (x => x == null || x == "") - def ojoin(xs: String*): String = oempty(xs: _*) mkString " " - def ojoin(xs: Seq[String], sep: String): String = oempty(xs: _*) mkString sep - def ojoinOr(xs: Seq[String], sep: String, orElse: String) = { - val ys = oempty(xs: _*) - if (ys.isEmpty) orElse else ys mkString sep - } - def trimTrailingSpace(s: String) = { - if (s.length == 0 || !s.charAt(s.length - 1).isWhitespace) s - else { - var idx = s.length - 1 - while (idx >= 0 && s.charAt(idx).isWhitespace) - idx -= 1 + // def onull(s: String) = if (s == null) "" else s + def oempty(xs: String*) = xs filterNot (x => x == null || x == "") + def ojoin(xs: String*): String = oempty(xs: _*) mkString " " + // def ojoin(xs: Seq[String], sep: String): String = oempty(xs: _*) mkString sep + // def ojoinOr(xs: Seq[String], sep: String, orElse: String) = { + // val ys = oempty(xs: _*) + // if (ys.isEmpty) orElse else ys mkString sep + // } + // def trimTrailingSpace(s: String) = { + // if (s.length == 0 || !s.charAt(s.length - 1).isWhitespace) s + // else { + // var idx = s.length - 1 + // while (idx >= 0 && s.charAt(idx).isWhitespace) + // idx -= 1 - s.substring(0, idx + 1) - } - } + // s.substring(0, idx + 1) + // } + // } def longestCommonPrefix(xs: List[String]): String = { if (xs.isEmpty || xs.contains("")) "" else xs.head.head match { @@ -57,13 +57,13 @@ trait StringOps { def words(str: String): List[String] = decompose(str, ' ') - def stripPrefixOpt(str: String, prefix: String): Option[String] = - if (str startsWith prefix) Some(str drop prefix.length) - else None + // def stripPrefixOpt(str: String, prefix: String): Option[String] = + // if (str startsWith prefix) Some(str drop prefix.length) + // else None - def stripSuffixOpt(str: String, suffix: String): Option[String] = - if (str endsWith suffix) Some(str dropRight suffix.length) - else None + // def stripSuffixOpt(str: String, suffix: String): Option[String] = + // if (str endsWith suffix) Some(str dropRight suffix.length) + // else None def splitWhere(str: String, f: Char => Boolean, doDropIndex: Boolean = false): Option[(String, String)] = splitAt(str, str indexWhere f, doDropIndex) diff --git a/src/reflect/scala/reflect/internal/util/TableDef.scala b/src/reflect/scala/reflect/internal/util/TableDef.scala index 8e2bcc2ff7..8208097d5c 100644 --- a/src/reflect/scala/reflect/internal/util/TableDef.scala +++ b/src/reflect/scala/reflect/internal/util/TableDef.scala @@ -67,11 +67,11 @@ class TableDef[T](_cols: Column[T]*) { override def toString = allToSeq mkString "\n" } - def formatterFor(rows: Seq[T]): T => String = { - val formatStr = new Table(rows).rowFormat + // def formatterFor(rows: Seq[T]): T => String = { + // val formatStr = new Table(rows).rowFormat - x => formatStr.format(colApply(x) : _*) - } + // x => formatStr.format(colApply(x) : _*) + // } def table(rows: Seq[T]) = new Table(rows) diff --git a/src/reflect/scala/reflect/internal/util/TraceSymbolActivity.scala b/src/reflect/scala/reflect/internal/util/TraceSymbolActivity.scala index 7ea8a75417..abedda8737 100644 --- a/src/reflect/scala/reflect/internal/util/TraceSymbolActivity.scala +++ b/src/reflect/scala/reflect/internal/util/TraceSymbolActivity.scala @@ -12,12 +12,10 @@ trait TraceSymbolActivity { if (enabled && global.isCompilerUniverse) scala.sys addShutdownHook showAllSymbols() - private type Set[T] = scala.collection.immutable.Set[T] - val allSymbols = mutable.Map[Int, Symbol]() val allChildren = mutable.Map[Int, List[Int]]() withDefaultValue Nil val prevOwners = mutable.Map[Int, List[(Int, Phase)]]() withDefaultValue Nil - val symsCaused = mutable.Map[Int, Int]() withDefaultValue 0 + // val symsCaused = mutable.Map[Int, Int]() withDefaultValue 0 val allTrees = mutable.Set[Tree]() def recordSymbolsInTree(tree: Tree) { diff --git a/src/reflect/scala/reflect/internal/util/WeakHashSet.scala b/src/reflect/scala/reflect/internal/util/WeakHashSet.scala index 9882aad5e5..41e74f80e9 100644 --- a/src/reflect/scala/reflect/internal/util/WeakHashSet.scala +++ b/src/reflect/scala/reflect/internal/util/WeakHashSet.scala @@ -1,9 +1,6 @@ package scala.reflect.internal.util import scala.collection.mutable -import scala.collection.mutable.ArrayBuffer -import scala.collection.mutable.Builder -import scala.collection.mutable.SetBuilder import scala.collection.generic.Clearable import scala.runtime.AbstractFunction1 diff --git a/src/reflect/scala/reflect/io/AbstractFile.scala b/src/reflect/scala/reflect/io/AbstractFile.scala index 15befb67f1..de37176cd5 100644 --- a/src/reflect/scala/reflect/io/AbstractFile.scala +++ b/src/reflect/scala/reflect/io/AbstractFile.scala @@ -14,9 +14,9 @@ import scala.collection.mutable.ArrayBuffer /** * An abstraction over files for use in the reflection/compiler libraries. - * + * * ''Note: This library is considered experimental and should not be used unless you know what you are doing.'' - * + * * @author Philippe Altherr * @version 1.0, 23/03/2004 */ @@ -85,7 +85,7 @@ object AbstractFile { * all other cases, the class SourceFile is used, which honors * global.settings.encoding.value. *

- * + * * ''Note: This library is considered experimental and should not be used unless you know what you are doing.'' */ abstract class AbstractFile extends Iterable[AbstractFile] { @@ -195,9 +195,9 @@ abstract class AbstractFile extends Iterable[AbstractFile] { * @param directory ... * @return ... */ - def lookupPath(path: String, directory: Boolean): AbstractFile = { - lookup((f, p, dir) => f.lookupName(p, dir), path, directory) - } + // def lookupPath(path: String, directory: Boolean): AbstractFile = { + // lookup((f, p, dir) => f.lookupName(p, dir), path, directory) + // } /** Return an abstract file that does not check that `path` denotes * an existing file. diff --git a/src/reflect/scala/reflect/io/Directory.scala b/src/reflect/scala/reflect/io/Directory.scala index c040d1eac5..3a21509457 100644 --- a/src/reflect/scala/reflect/io/Directory.scala +++ b/src/reflect/scala/reflect/io/Directory.scala @@ -14,12 +14,12 @@ import java.io.{ File => JFile } * ''Note: This library is considered experimental and should not be used unless you know what you are doing.'' */ object Directory { - import scala.util.Properties.{ tmpDir, userHome, userDir } + import scala.util.Properties.{ userHome, userDir } private def normalizePath(s: String) = Some(apply(Path(s).normalize)) def Current: Option[Directory] = if (userDir == "") None else normalizePath(userDir) - def Home: Option[Directory] = if (userHome == "") None else normalizePath(userHome) - def TmpDir: Option[Directory] = if (tmpDir == "") None else normalizePath(tmpDir) + // def Home: Option[Directory] = if (userHome == "") None else normalizePath(userHome) + // def TmpDir: Option[Directory] = if (tmpDir == "") None else normalizePath(tmpDir) def apply(path: Path): Directory = path.toDirectory @@ -30,20 +30,19 @@ object Directory { path.createDirectory() } } -import Path._ /** An abstraction for directories. * * @author Paul Phillips * @since 2.8 - * + * * ''Note: This is library is considered experimental and should not be used unless you know what you are doing.'' */ class Directory(jfile: JFile) extends Path(jfile) { override def toAbsolute: Directory = if (isAbsolute) this else super.toAbsolute.toDirectory override def toDirectory: Directory = this override def toFile: File = new File(jfile) - override def isValid = jfile.isDirectory() || !jfile.exists() + // override def isValid = jfile.isDirectory() || !jfile.exists() override def normalize: Directory = super.normalize.toDirectory /** An iterator over the contents of this directory. @@ -60,7 +59,7 @@ class Directory(jfile: JFile) extends Path(jfile) { override def walkFilter(cond: Path => Boolean): Iterator[Path] = list filter cond flatMap (_ walkFilter cond) - def deepDirs: Iterator[Directory] = Path.onlyDirs(deepList()) + // def deepDirs: Iterator[Directory] = Path.onlyDirs(deepList()) def deepFiles: Iterator[File] = Path.onlyFiles(deepList()) /** If optional depth argument is not given, will recurse @@ -74,6 +73,6 @@ class Directory(jfile: JFile) extends Path(jfile) { /** An iterator over the directories underneath this directory, * to the (optionally) given depth. */ - def subdirs(depth: Int = 1): Iterator[Directory] = - deepList(depth) collect { case x: Directory => x } + // def subdirs(depth: Int = 1): Iterator[Directory] = + // deepList(depth) collect { case x: Directory => x } } diff --git a/src/reflect/scala/reflect/io/File.scala b/src/reflect/scala/reflect/io/File.scala index 736ba5d51e..04e122af67 100644 --- a/src/reflect/scala/reflect/io/File.scala +++ b/src/reflect/scala/reflect/io/File.scala @@ -35,12 +35,12 @@ object File { type HasClose = { def close(): Unit } - def closeQuietly(target: HasClose) { - try target.close() catch { case e: IOException => } - } - def closeQuietly(target: JCloseable) { - try target.close() catch { case e: IOException => } - } + // def closeQuietly(target: HasClose) { + // try target.close() catch { case e: IOException => } + // } + // def closeQuietly(target: JCloseable) { + // try target.close() catch { case e: IOException => } + // } // this is a workaround for http://bugs.sun.com/bugdatabase/view_bug.do?bug_id=6503430 // we are using a static initializer to statically initialize a java class so we don't @@ -65,8 +65,8 @@ object File { // case _: IllegalArgumentException | _: IllegalStateException | _: IOException | _: SecurityException => () // } } -import File._ -import Path._ +// import File._ +// import Path._ /** An abstraction for files. For character data, a Codec * can be supplied at either creation time or when a method @@ -76,19 +76,19 @@ import Path._ * * @author Paul Phillips * @since 2.8 - * + * * ''Note: This is library is considered experimental and should not be used unless you know what you are doing.'' */ class File(jfile: JFile)(implicit constructorCodec: Codec) extends Path(jfile) with Streamable.Chars { override val creationCodec = constructorCodec - def withCodec(codec: Codec): File = new File(jfile)(codec) + // def withCodec(codec: Codec): File = new File(jfile)(codec) override def addExtension(ext: String): File = super.addExtension(ext).toFile override def toAbsolute: File = if (isAbsolute) this else super.toAbsolute.toFile override def toDirectory: Directory = new Directory(jfile) override def toFile: File = this override def normalize: File = super.normalize.toFile - override def isValid = jfile.isFile() || !jfile.exists() + // override def isValid = jfile.isFile() || !jfile.exists() override def length = super[Path].length override def walkFilter(cond: Path => Boolean): Iterator[Path] = if (cond(this)) Iterator.single(this) else Iterator.empty @@ -99,14 +99,14 @@ class File(jfile: JFile)(implicit constructorCodec: Codec) extends Path(jfile) w /** Obtains a OutputStream. */ def outputStream(append: Boolean = false) = new FileOutputStream(jfile, append) def bufferedOutput(append: Boolean = false) = new BufferedOutputStream(outputStream(append)) - def printStream(append: Boolean = false) = new PrintStream(outputStream(append), true) + // def printStream(append: Boolean = false) = new PrintStream(outputStream(append), true) /** Obtains an OutputStreamWriter wrapped around a FileOutputStream. * This should behave like a less broken version of java.io.FileWriter, * in that unlike the java version you can specify the encoding. */ - def writer(): OutputStreamWriter = writer(false) - def writer(append: Boolean): OutputStreamWriter = writer(append, creationCodec) + // def writer(): OutputStreamWriter = writer(false) + // def writer(append: Boolean): OutputStreamWriter = writer(append, creationCodec) def writer(append: Boolean, codec: Codec): OutputStreamWriter = new OutputStreamWriter(outputStream(append), codec.charSet) @@ -118,7 +118,7 @@ class File(jfile: JFile)(implicit constructorCodec: Codec) extends Path(jfile) w new BufferedWriter(writer(append, codec)) def printWriter(): PrintWriter = new PrintWriter(bufferedWriter(), true) - def printWriter(append: Boolean): PrintWriter = new PrintWriter(bufferedWriter(append), true) + // def printWriter(append: Boolean): PrintWriter = new PrintWriter(bufferedWriter(append), true) /** Creates a new file and writes all the Strings to it. */ def writeAll(strings: String*): Unit = { @@ -127,11 +127,11 @@ class File(jfile: JFile)(implicit constructorCodec: Codec) extends Path(jfile) w finally out.close() } - def writeBytes(bytes: Array[Byte]): Unit = { - val out = bufferedOutput() - try out write bytes - finally out.close() - } + // def writeBytes(bytes: Array[Byte]): Unit = { + // val out = bufferedOutput() + // try out write bytes + // finally out.close() + // } def appendAll(strings: String*): Unit = { val out = bufferedWriter(append = true) @@ -150,38 +150,38 @@ class File(jfile: JFile)(implicit constructorCodec: Codec) extends Path(jfile) w try Some(slurp()) catch { case _: IOException => None } - def copyTo(destPath: Path, preserveFileDate: Boolean = false): Boolean = { - val CHUNK = 1024 * 1024 * 16 // 16 MB - val dest = destPath.toFile - if (!isValid) fail("Source %s is not a valid file." format name) - if (this.normalize == dest.normalize) fail("Source and destination are the same.") - if (!dest.parent.exists) fail("Destination cannot be created.") - if (dest.exists && !dest.canWrite) fail("Destination exists but is not writable.") - if (dest.isDirectory) fail("Destination exists but is a directory.") - - lazy val in_s = inputStream() - lazy val out_s = dest.outputStream() - lazy val in = in_s.getChannel() - lazy val out = out_s.getChannel() - - try { - val size = in.size() - var pos, count = 0L - while (pos < size) { - count = (size - pos) min CHUNK - pos += out.transferFrom(in, pos, count) - } - } - finally List[HasClose](out, out_s, in, in_s) foreach closeQuietly - - if (this.length != dest.length) - fail("Failed to completely copy %s to %s".format(name, dest.name)) - - if (preserveFileDate) - dest.lastModified = this.lastModified - - true - } + // def copyTo(destPath: Path, preserveFileDate: Boolean = false): Boolean = { + // val CHUNK = 1024 * 1024 * 16 // 16 MB + // val dest = destPath.toFile + // if (!isValid) fail("Source %s is not a valid file." format name) + // if (this.normalize == dest.normalize) fail("Source and destination are the same.") + // if (!dest.parent.exists) fail("Destination cannot be created.") + // if (dest.exists && !dest.canWrite) fail("Destination exists but is not writable.") + // if (dest.isDirectory) fail("Destination exists but is a directory.") + + // lazy val in_s = inputStream() + // lazy val out_s = dest.outputStream() + // lazy val in = in_s.getChannel() + // lazy val out = out_s.getChannel() + + // try { + // val size = in.size() + // var pos, count = 0L + // while (pos < size) { + // count = (size - pos) min CHUNK + // pos += out.transferFrom(in, pos, count) + // } + // } + // finally List[HasClose](out, out_s, in, in_s) foreach closeQuietly + + // if (this.length != dest.length) + // fail("Failed to completely copy %s to %s".format(name, dest.name)) + + // if (preserveFileDate) + // dest.lastModified = this.lastModified + + // true + // } /** Reflection since we're into the java 6+ API. */ diff --git a/src/reflect/scala/reflect/io/Path.scala b/src/reflect/scala/reflect/io/Path.scala index 36fdc04db4..77b5065db1 100644 --- a/src/reflect/scala/reflect/io/Path.scala +++ b/src/reflect/scala/reflect/io/Path.scala @@ -27,7 +27,7 @@ import scala.language.implicitConversions * * @author Paul Phillips * @since 2.8 - * + * * ''Note: This library is considered experimental and should not be used unless you know what you are doing.'' */ object Path { @@ -65,11 +65,11 @@ object Path { def onlyDirs(xs: Iterator[Path]): Iterator[Directory] = xs filter (_.isDirectory) map (_.toDirectory) def onlyDirs(xs: List[Path]): List[Directory] = xs filter (_.isDirectory) map (_.toDirectory) def onlyFiles(xs: Iterator[Path]): Iterator[File] = xs filter (_.isFile) map (_.toFile) - def onlyFiles(xs: List[Path]): List[File] = xs filter (_.isFile) map (_.toFile) + // def onlyFiles(xs: List[Path]): List[File] = xs filter (_.isFile) map (_.toFile) def roots: List[Path] = java.io.File.listRoots().toList map Path.apply - def apply(segments: Seq[String]): Path = apply(segments mkString java.io.File.separator) + // def apply(segments: Seq[String]): Path = apply(segments mkString java.io.File.separator) def apply(path: String): Path = apply(new JFile(path)) def apply(jfile: JFile): Path = if (jfile.isFile) new File(jfile) @@ -84,7 +84,7 @@ import Path._ /** The Path constructor is private so we can enforce some * semantics regarding how a Path might relate to the world. - * + * * ''Note: This library is considered experimental and should not be used unless you know what you are doing.'' */ class Path private[io] (val jfile: JFile) { @@ -95,7 +95,7 @@ class Path private[io] (val jfile: JFile) { // contents of the filesystem are in agreement. All objects are // valid except File objects whose path points to a directory and // Directory objects whose path points to a file. - def isValid: Boolean = true + // def isValid: Boolean = true // conversions def toFile: File = new File(jfile) @@ -136,7 +136,7 @@ class Path private[io] (val jfile: JFile) { def name: String = jfile.getName() def path: String = jfile.getPath() def normalize: Path = Path(jfile.getAbsolutePath()) - def isRootPath: Boolean = roots exists (_ isSame this) + // def isRootPath: Boolean = roots exists (_ isSame this) def resolve(other: Path) = if (other.isAbsolute || isEmpty) other else /(other) def relativize(other: Path) = { @@ -153,7 +153,7 @@ class Path private[io] (val jfile: JFile) { } // derived from identity - def root: Option[Path] = roots find (this startsWith _) + // def root: Option[Path] = roots find (this startsWith _) def segments: List[String] = (path split separator).toList filterNot (_.length == 0) /** * @return The path of the parent directory, or root if path is already root @@ -213,22 +213,22 @@ class Path private[io] (val jfile: JFile) { def canRead = jfile.canRead() def canWrite = jfile.canWrite() def exists = jfile.exists() - def notExists = try !jfile.exists() catch { case ex: SecurityException => false } + // def notExists = try !jfile.exists() catch { case ex: SecurityException => false } def isFile = jfile.isFile() def isDirectory = jfile.isDirectory() def isAbsolute = jfile.isAbsolute() - def isHidden = jfile.isHidden() + // def isHidden = jfile.isHidden() def isEmpty = path.length == 0 // Information def lastModified = jfile.lastModified() - def lastModified_=(time: Long) = jfile setLastModified time // should use setXXX function? + // def lastModified_=(time: Long) = jfile setLastModified time // should use setXXX function? def length = jfile.length() // Boolean path comparisons def endsWith(other: Path) = segments endsWith other.segments - def startsWith(other: Path) = segments startsWith other.segments + // def startsWith(other: Path) = segments startsWith other.segments def isSame(other: Path) = toCanonical == other.toCanonical def isFresher(other: Path) = lastModified > other.lastModified @@ -248,7 +248,7 @@ class Path private[io] (val jfile: JFile) { // deletions def delete() = jfile.delete() - def deleteIfExists() = if (jfile.exists()) delete() else false + // def deleteIfExists() = if (jfile.exists()) delete() else false /** Deletes the path recursively. Returns false on failure. * Use with caution! @@ -270,11 +270,11 @@ class Path private[io] (val jfile: JFile) { length == 0 } - def touch(modTime: Long = System.currentTimeMillis) = { - createFile() - if (isFile) - lastModified = modTime - } + // def touch(modTime: Long = System.currentTimeMillis) = { + // createFile() + // if (isFile) + // lastModified = modTime + // } // todo // def copyTo(target: Path, options ...): Boolean diff --git a/src/reflect/scala/reflect/io/PlainFile.scala b/src/reflect/scala/reflect/io/PlainFile.scala index 82b0568657..6ee51d3d37 100644 --- a/src/reflect/scala/reflect/io/PlainFile.scala +++ b/src/reflect/scala/reflect/io/PlainFile.scala @@ -8,17 +8,17 @@ package scala.reflect package io import java.io.{ FileInputStream, FileOutputStream, IOException } -import PartialFunction._ + /** ''Note: This library is considered experimental and should not be used unless you know what you are doing.'' */ object PlainFile { /** * If the specified File exists, returns an abstract file backed * by it. Otherwise, returns null. */ - def fromPath(file: Path): PlainFile = - if (file.isDirectory) new PlainDirectory(file.toDirectory) - else if (file.isFile) new PlainFile(file) - else null + // def fromPath(file: Path): PlainFile = + // if (file.isDirectory) new PlainDirectory(file.toDirectory) + // else if (file.isFile) new PlainFile(file) + // else null } /** ''Note: This library is considered experimental and should not be used unless you know what you are doing.'' */ class PlainDirectory(givenPath: Directory) extends PlainFile(givenPath) { @@ -28,7 +28,7 @@ class PlainDirectory(givenPath: Directory) extends PlainFile(givenPath) { } /** This class implements an abstract file backed by a File. - * + * * ''Note: This library is considered experimental and should not be used unless you know what you are doing.'' */ class PlainFile(val givenPath: Path) extends AbstractFile { diff --git a/src/reflect/scala/reflect/io/Streamable.scala b/src/reflect/scala/reflect/io/Streamable.scala index 61ec8a4c23..615f44acc5 100644 --- a/src/reflect/scala/reflect/io/Streamable.scala +++ b/src/reflect/scala/reflect/io/Streamable.scala @@ -17,14 +17,14 @@ import Path.fail * * @author Paul Phillips * @since 2.8 - * + * * ''Note: This library is considered experimental and should not be used unless you know what you are doing.'' */ object Streamable { /** Traits which can be viewed as a sequence of bytes. Source types * which know their length should override def length: Long for more * efficient method implementations. - * + * * ''Note: This library is considered experimental and should not be used unless you know what you are doing.'' */ trait Bytes { @@ -69,7 +69,7 @@ object Streamable { } /** For objects which can be viewed as Chars. - * + * * ''Note: This library is considered experimental and should not be used unless you know what you are doing.'' */ trait Chars extends Bytes { @@ -81,7 +81,7 @@ object Streamable { */ def creationCodec: Codec = implicitly[Codec] - def chars(): BufferedSource = chars(creationCodec) + // def chars(): BufferedSource = chars(creationCodec) def chars(codec: Codec): BufferedSource = Source.fromInputStream(inputStream())(codec) def lines(): Iterator[String] = lines(creationCodec) @@ -89,7 +89,7 @@ object Streamable { /** Obtains an InputStreamReader wrapped around a FileInputStream. */ - def reader(): InputStreamReader = reader(creationCodec) + // def reader(): InputStreamReader = reader(creationCodec) def reader(codec: Codec): InputStreamReader = new InputStreamReader(inputStream, codec.charSet) /** Wraps a BufferedReader around the result of reader(). diff --git a/src/reflect/scala/reflect/io/VirtualDirectory.scala b/src/reflect/scala/reflect/io/VirtualDirectory.scala index 78713c2ae0..72ffff2aa9 100644 --- a/src/reflect/scala/reflect/io/VirtualDirectory.scala +++ b/src/reflect/scala/reflect/io/VirtualDirectory.scala @@ -11,7 +11,7 @@ import scala.collection.mutable * An in-memory directory. * * @author Lex Spoon - * + * * ''Note: This library is considered experimental and should not be used unless you know what you are doing.'' */ class VirtualDirectory(val name: String, maybeContainer: Option[VirtualDirectory]) @@ -26,7 +26,8 @@ extends AbstractFile { def container = maybeContainer.get def isDirectory = true - var lastModified: Long = System.currentTimeMillis + val lastModified: Long = System.currentTimeMillis + // var lastModified: Long = System.currentTimeMillis override def file = null override def input = sys.error("directories cannot be read") diff --git a/src/reflect/scala/reflect/io/VirtualFile.scala b/src/reflect/scala/reflect/io/VirtualFile.scala index 95f4429fad..014e02c6cd 100644 --- a/src/reflect/scala/reflect/io/VirtualFile.scala +++ b/src/reflect/scala/reflect/io/VirtualFile.scala @@ -14,7 +14,7 @@ import java.io.{ File => JFile } * * @author Philippe Altherr * @version 1.0, 23/03/2004 - * + * * ''Note: This library is considered experimental and should not be used unless you know what you are doing.'' */ class VirtualFile(val name: String, override val path: String) extends AbstractFile { @@ -65,7 +65,7 @@ class VirtualFile(val name: String, override val path: String) extends AbstractF /** Returns the time that this abstract file was last modified. */ private var _lastModified: Long = 0 def lastModified: Long = _lastModified - def lastModified_=(x: Long) = _lastModified = x + // def lastModified_=(x: Long) = _lastModified = x /** Returns all abstract subfiles of this abstract directory. */ def iterator: Iterator[AbstractFile] = { diff --git a/src/reflect/scala/reflect/io/ZipArchive.scala b/src/reflect/scala/reflect/io/ZipArchive.scala index 3b57721e89..0e69834d26 100644 --- a/src/reflect/scala/reflect/io/ZipArchive.scala +++ b/src/reflect/scala/reflect/io/ZipArchive.scala @@ -20,12 +20,12 @@ import scala.annotation.tailrec * @author Philippe Altherr (original version) * @author Paul Phillips (this one) * @version 2.0, - * + * * ''Note: This library is considered experimental and should not be used unless you know what you are doing.'' */ object ZipArchive { - def fromPath(path: String): FileZipArchive = fromFile(new JFile(path)) - def fromPath(path: Path): FileZipArchive = fromFile(path.toFile) + // def fromPath(path: String): FileZipArchive = fromFile(new JFile(path)) + // def fromPath(path: Path): FileZipArchive = fromFile(path.toFile) /** * @param file a File @@ -41,7 +41,7 @@ object ZipArchive { * @return A ZipArchive backed by the given url. */ def fromURL(url: URL): URLZipArchive = new URLZipArchive(url) - def fromURL(url: String): URLZipArchive = fromURL(new URL(url)) + // def fromURL(url: String): URLZipArchive = fromURL(new URL(url)) private def dirName(path: String) = splitPath(path, true) private def baseName(path: String) = splitPath(path, false) @@ -79,7 +79,7 @@ abstract class ZipArchive(override val file: JFile) extends AbstractFile with Eq else Iterator(f) } } - def deepIterator = walkIterator(iterator) + // def deepIterator = walkIterator(iterator) /** ''Note: This library is considered experimental and should not be used unless you know what you are doing.'' */ sealed abstract class Entry(path: String) extends VirtualFile(baseName(path), path) { // have to keep this name for compat with sbt's compiler-interface diff --git a/src/reflect/scala/reflect/macros/TreeBuilder.scala b/src/reflect/scala/reflect/macros/TreeBuilder.scala index 204dc40858..fbbbe13201 100644 --- a/src/reflect/scala/reflect/macros/TreeBuilder.scala +++ b/src/reflect/scala/reflect/macros/TreeBuilder.scala @@ -11,7 +11,6 @@ abstract class TreeBuilder { val global: Universe import global._ - import definitions._ /** Builds a reference to value whose type is given stable prefix. * The type must be suitable for this. For example, it diff --git a/src/reflect/scala/reflect/runtime/JavaMirrors.scala b/src/reflect/scala/reflect/runtime/JavaMirrors.scala index 44fbd55162..07599e095d 100644 --- a/src/reflect/scala/reflect/runtime/JavaMirrors.scala +++ b/src/reflect/scala/reflect/runtime/JavaMirrors.scala @@ -22,7 +22,6 @@ import internal.Flags._ import ReflectionUtils.{staticSingletonInstance, innerSingletonInstance} import scala.language.existentials import scala.runtime.{ScalaRunTime, BoxesRunTime} -import scala.reflect.internal.util.Collections._ private[reflect] trait JavaMirrors extends internal.SymbolTable with api.JavaUniverse { thisUniverse: SymbolTable => @@ -841,13 +840,13 @@ private[reflect] trait JavaMirrors extends internal.SymbolTable with api.JavaUni * @return A Scala field object that corresponds to `jfield`. * // ??? should we return the getter instead? */ - def fieldToScala(jfield: jField): TermSymbol = - toScala(fieldCache, jfield)(_ fieldToScala1 _) + // def fieldToScala(jfield: jField): TermSymbol = + // toScala(fieldCache, jfield)(_ fieldToScala1 _) - private def fieldToScala1(jfield: jField): TermSymbol = { - val owner = followStatic(classToScala(jfield.getDeclaringClass), jfield.getModifiers) - (lookup(owner, jfield.getName) suchThat (!_.isMethod) orElse jfieldAsScala(jfield)).asTerm - } + // private def fieldToScala1(jfield: jField): TermSymbol = { + // val owner = followStatic(classToScala(jfield.getDeclaringClass), jfield.getModifiers) + // (lookup(owner, jfield.getName) suchThat (!_.isMethod) orElse jfieldAsScala(jfield)).asTerm + // } /** * The Scala package corresponding to given Java package @@ -1115,9 +1114,9 @@ private[reflect] trait JavaMirrors extends internal.SymbolTable with api.JavaUni /** Optionally, the Java package corresponding to a given Scala package, or None if no such Java package exists. * @param pkg The Scala package */ - def packageToJavaOption(pkg: ModuleSymbol): Option[jPackage] = packageCache.toJavaOption(pkg) { - Option(jPackage.getPackage(pkg.fullName.toString)) - } + // def packageToJavaOption(pkg: ModuleSymbol): Option[jPackage] = packageCache.toJavaOption(pkg) { + // Option(jPackage.getPackage(pkg.fullName.toString)) + // } /** The Java class corresponding to given Scala class. * Note: This only works for diff --git a/src/reflect/scala/reflect/runtime/JavaUniverse.scala b/src/reflect/scala/reflect/runtime/JavaUniverse.scala index 0f70a676fa..a12e7d43d4 100644 --- a/src/reflect/scala/reflect/runtime/JavaUniverse.scala +++ b/src/reflect/scala/reflect/runtime/JavaUniverse.scala @@ -1,8 +1,6 @@ package scala.reflect package runtime -import internal.{SomePhase, NoPhase, Phase, TreeGen} - /** An implementation of [[scala.reflect.api.Universe]] for runtime reflection using JVM classloaders. * * Should not be instantiated directly, use [[scala.reflect.runtime.universe]] instead. @@ -11,7 +9,7 @@ import internal.{SomePhase, NoPhase, Phase, TreeGen} */ class JavaUniverse extends internal.SymbolTable with ReflectSetup with runtime.SymbolTable { self => - def picklerPhase = SomePhase + def picklerPhase = internal.SomePhase def forInteractive = false def forScaladoc = false @@ -26,4 +24,3 @@ class JavaUniverse extends internal.SymbolTable with ReflectSetup with runtime.S init() } - diff --git a/src/reflect/scala/reflect/runtime/SynchronizedSymbols.scala b/src/reflect/scala/reflect/runtime/SynchronizedSymbols.scala index 366b4319c3..b415abecb1 100644 --- a/src/reflect/scala/reflect/runtime/SynchronizedSymbols.scala +++ b/src/reflect/scala/reflect/runtime/SynchronizedSymbols.scala @@ -83,8 +83,8 @@ private[reflect] trait SynchronizedSymbols extends internal.Symbols { self: Symb override protected def createPackageObjectClassSymbol(pos: Position, newFlags: Long): PackageObjectClassSymbol = new PackageObjectClassSymbol(this, pos) with SynchronizedClassSymbol initFlags newFlags - override protected def createTermSymbol(name: TermName, pos: Position, newFlags: Long): TermSymbol = - new TermSymbol(this, pos, name) with SynchronizedTermSymbol initFlags newFlags + // override protected def createTermSymbol(name: TermName, pos: Position, newFlags: Long): TermSymbol = + // new TermSymbol(this, pos, name) with SynchronizedTermSymbol initFlags newFlags override protected def createMethodSymbol(name: TermName, pos: Position, newFlags: Long): MethodSymbol = new MethodSymbol(this, pos, name) with SynchronizedMethodSymbol initFlags newFlags diff --git a/src/reflect/scala/reflect/runtime/package.scala b/src/reflect/scala/reflect/runtime/package.scala index b97913daf0..eadbc0c52e 100644 --- a/src/reflect/scala/reflect/runtime/package.scala +++ b/src/reflect/scala/reflect/runtime/package.scala @@ -6,7 +6,7 @@ package scala.reflect package object runtime { /** The entry point into Scala runtime reflection. - * + * * To use Scala runtime reflection, simply use or import `scala.reflect.runtime.universe._` * * See [[scala.reflect.api.Universe]] or the -- cgit v1.2.3 From d5e3f85946af966111f88af90a666a709df0ba6f Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Tue, 13 Nov 2012 00:14:40 -0800 Subject: Revert "Commenting out unused members." This reverts commit 951fc3a486. --- src/compiler/scala/reflect/reify/Errors.scala | 8 +- .../scala/reflect/reify/codegen/GenUtils.scala | 40 ++-- .../scala/reflect/reify/phases/Metalevels.scala | 4 +- .../scala/reflect/reify/utils/SymbolTables.scala | 2 +- src/compiler/scala/tools/ant/sabbus/Settings.scala | 2 +- src/compiler/scala/tools/cmd/FromString.scala | 14 +- src/compiler/scala/tools/cmd/Reference.scala | 2 +- .../scala/tools/nsc/CompilationUnits.scala | 22 +- src/compiler/scala/tools/nsc/CompileServer.scala | 2 +- src/compiler/scala/tools/nsc/CompilerCommand.scala | 6 +- src/compiler/scala/tools/nsc/CompilerRun.scala | 36 +-- src/compiler/scala/tools/nsc/Global.scala | 107 ++++----- src/compiler/scala/tools/nsc/ObjectRunner.scala | 4 +- src/compiler/scala/tools/nsc/Phases.scala | 4 +- src/compiler/scala/tools/nsc/Properties.scala | 2 +- src/compiler/scala/tools/nsc/ScriptRunner.scala | 2 +- src/compiler/scala/tools/nsc/ast/DocComments.scala | 8 +- src/compiler/scala/tools/nsc/ast/Printers.scala | 82 ++++++- src/compiler/scala/tools/nsc/ast/TreeDSL.scala | 76 +++---- src/compiler/scala/tools/nsc/ast/TreeGen.scala | 170 +++++++-------- src/compiler/scala/tools/nsc/ast/TreeInfo.scala | 6 +- .../scala/tools/nsc/ast/parser/Parsers.scala | 25 +-- .../scala/tools/nsc/ast/parser/Scanners.scala | 59 +++-- .../scala/tools/nsc/ast/parser/Tokens.scala | 62 +++--- .../scala/tools/nsc/ast/parser/TreeBuilder.scala | 26 +-- .../tools/nsc/backend/icode/BasicBlocks.scala | 36 +-- .../nsc/backend/icode/ExceptionHandlers.scala | 10 +- .../scala/tools/nsc/backend/icode/GenICode.scala | 28 +-- .../scala/tools/nsc/backend/icode/Members.scala | 50 ++--- .../scala/tools/nsc/backend/icode/Opcodes.scala | 20 +- .../scala/tools/nsc/backend/icode/Primitives.scala | 16 +- .../scala/tools/nsc/backend/icode/Repository.scala | 14 +- .../scala/tools/nsc/backend/icode/TypeKinds.scala | 4 +- .../scala/tools/nsc/backend/icode/TypeStacks.scala | 6 +- .../backend/icode/analysis/CopyPropagation.scala | 20 +- .../backend/icode/analysis/DataFlowAnalysis.scala | 10 +- .../backend/icode/analysis/TypeFlowAnalysis.scala | 62 +++--- .../scala/tools/nsc/backend/jvm/GenASM.scala | 12 +- .../scala/tools/nsc/backend/jvm/GenJVM.scala | 26 +-- .../scala/tools/nsc/backend/msil/GenMSIL.scala | 12 +- .../tools/nsc/backend/opt/ClosureElimination.scala | 4 +- .../scala/tools/nsc/backend/opt/Inliners.scala | 6 +- src/compiler/scala/tools/nsc/doc/html/Page.scala | 6 +- .../scala/tools/nsc/doc/model/Entity.scala | 14 +- .../tools/nsc/doc/model/IndexModelFactory.scala | 2 +- .../scala/tools/nsc/doc/model/ModelFactory.scala | 48 ++-- .../doc/model/ModelFactoryImplicitSupport.scala | 32 +-- .../tools/nsc/doc/model/comment/Comment.scala | 2 +- .../nsc/doc/model/comment/CommentFactory.scala | 36 +-- .../tools/nsc/doc/model/diagram/Diagram.scala | 18 +- .../scala/tools/nsc/interactive/BuildManager.scala | 2 +- .../scala/tools/nsc/interactive/Global.scala | 68 +++--- .../scala/tools/nsc/interpreter/ByteCode.scala | 28 +-- .../scala/tools/nsc/interpreter/CodeHandlers.scala | 100 ++++----- .../scala/tools/nsc/interpreter/CommandLine.scala | 2 +- .../scala/tools/nsc/interpreter/Completion.scala | 2 +- .../tools/nsc/interpreter/CompletionAware.scala | 46 ++-- .../tools/nsc/interpreter/CompletionOutput.scala | 2 +- .../nsc/interpreter/ConsoleReaderHelper.scala | 10 +- .../scala/tools/nsc/interpreter/Delimited.scala | 6 +- .../scala/tools/nsc/interpreter/ExprTyper.scala | 9 +- .../scala/tools/nsc/interpreter/ILoop.scala | 55 ++--- .../scala/tools/nsc/interpreter/IMain.scala | 199 +++++++++-------- .../scala/tools/nsc/interpreter/ISettings.scala | 10 +- .../scala/tools/nsc/interpreter/Imports.scala | 22 +- .../tools/nsc/interpreter/InteractiveReader.scala | 12 +- .../tools/nsc/interpreter/JLineCompletion.scala | 12 +- .../scala/tools/nsc/interpreter/JLineReader.scala | 8 +- .../scala/tools/nsc/interpreter/Logger.scala | 6 +- .../scala/tools/nsc/interpreter/LoopCommands.scala | 38 ++-- .../tools/nsc/interpreter/MemberHandlers.scala | 26 +-- .../scala/tools/nsc/interpreter/NamedParam.scala | 6 +- .../scala/tools/nsc/interpreter/Naming.scala | 2 +- .../scala/tools/nsc/interpreter/Parsed.scala | 14 +- .../scala/tools/nsc/interpreter/Phased.scala | 30 +-- .../scala/tools/nsc/interpreter/Power.scala | 152 ++++++------- .../scala/tools/nsc/interpreter/ReplConfig.scala | 24 +- .../scala/tools/nsc/interpreter/ReplProps.scala | 6 +- .../scala/tools/nsc/interpreter/ReplStrings.scala | 2 +- .../scala/tools/nsc/interpreter/RichClass.scala | 7 +- .../scala/tools/nsc/interpreter/SimpleReader.scala | 8 +- .../scala/tools/nsc/interpreter/TypeStrings.scala | 18 +- .../scala/tools/nsc/interpreter/package.scala | 54 ++--- .../tools/nsc/interpreter/session/History.scala | 10 +- .../nsc/interpreter/session/SimpleHistory.scala | 6 +- src/compiler/scala/tools/nsc/io/Fileish.scala | 52 ++--- src/compiler/scala/tools/nsc/io/Jar.scala | 24 +- src/compiler/scala/tools/nsc/io/MsilFile.scala | 2 +- src/compiler/scala/tools/nsc/io/Pickler.scala | 74 +++---- src/compiler/scala/tools/nsc/io/Socket.scala | 8 +- src/compiler/scala/tools/nsc/io/SourceReader.scala | 2 +- src/compiler/scala/tools/nsc/io/package.scala | 22 +- .../scala/tools/nsc/javac/JavaParsers.scala | 8 +- .../scala/tools/nsc/javac/JavaScanners.scala | 48 ++-- .../scala/tools/nsc/javac/JavaTokens.scala | 12 +- .../scala/tools/nsc/matching/MatchSupport.scala | 38 ++-- src/compiler/scala/tools/nsc/matching/Matrix.scala | 48 ++-- .../tools/nsc/matching/ParallelMatching.scala | 6 +- .../scala/tools/nsc/matching/PatternBindings.scala | 2 +- .../scala/tools/nsc/matching/Patterns.scala | 48 ++-- .../scala/tools/nsc/settings/AbsSettings.scala | 16 +- .../tools/nsc/settings/AdvancedScalaSettings.scala | 148 ++++++------- .../scala/tools/nsc/settings/MutableSettings.scala | 10 +- .../scala/tools/nsc/settings/ScalaSettings.scala | 10 +- .../tools/nsc/settings/StandardScalaSettings.scala | 2 +- .../scala/tools/nsc/settings/Warnings.scala | 14 +- .../nsc/symtab/classfile/AbstractFileReader.scala | 12 +- .../nsc/symtab/classfile/ClassfileParser.scala | 20 +- .../tools/nsc/symtab/classfile/ICodeReader.scala | 6 +- .../scala/tools/nsc/symtab/classfile/Pickler.scala | 208 +++++++++--------- .../tools/nsc/transform/SpecializeTypes.scala | 28 +-- .../scala/tools/nsc/transform/TailCalls.scala | 2 +- .../tools/nsc/transform/TypingTransformers.scala | 2 +- .../scala/tools/nsc/typechecker/Contexts.scala | 32 +-- .../tools/nsc/typechecker/DestructureTypes.scala | 20 +- .../scala/tools/nsc/typechecker/Duplicators.scala | 38 ++-- .../scala/tools/nsc/typechecker/Implicits.scala | 12 +- .../tools/nsc/typechecker/MethodSynthesis.scala | 70 +++--- .../scala/tools/nsc/typechecker/Namers.scala | 22 +- .../tools/nsc/typechecker/NamesDefaults.scala | 2 +- .../tools/nsc/typechecker/PatternMatching.scala | 102 ++++----- .../scala/tools/nsc/typechecker/TreeCheckers.scala | 14 +- .../tools/nsc/typechecker/TypeDiagnostics.scala | 16 +- .../scala/tools/nsc/typechecker/Typers.scala | 14 +- .../scala/tools/nsc/typechecker/Unapplies.scala | 10 +- src/compiler/scala/tools/nsc/util/ClassPath.scala | 44 ++-- .../scala/tools/nsc/util/CommandLineParser.scala | 8 +- .../scala/tools/nsc/util/JavaCharArrayReader.scala | 51 +++-- .../scala/tools/nsc/util/ScalaClassLoader.scala | 62 +++--- .../scala/tools/nsc/util/SimpleTracer.scala | 2 +- src/compiler/scala/tools/nsc/util/package.scala | 22 +- .../scala/tools/reflect/ToolBoxFactory.scala | 4 +- src/compiler/scala/tools/util/Javap.scala | 4 +- src/compiler/scala/tools/util/PathResolver.scala | 12 +- .../scala/util/continuations/ControlContext.scala | 4 +- .../library/scala/util/continuations/package.scala | 6 +- src/detach/plugin/scala/tools/detach/Detach.scala | 2 +- src/partest/scala/tools/partest/CompilerTest.scala | 3 +- src/partest/scala/tools/partest/SecurityTest.scala | 22 +- src/partest/scala/tools/partest/TestUtil.scala | 14 +- .../partest/instrumented/Instrumentation.scala | 1 - .../tools/partest/nest/ConsoleFileManager.scala | 20 +- .../scala/tools/partest/nest/ConsoleRunner.scala | 2 +- .../scala/tools/partest/nest/FileManager.scala | 8 +- src/partest/scala/tools/partest/nest/NestUI.scala | 20 +- .../tools/partest/nest/ReflectiveRunner.scala | 9 +- .../scala/tools/partest/nest/RunnerManager.scala | 8 +- .../scala/tools/partest/nest/RunnerUtils.scala | 46 ++-- src/partest/scala/tools/partest/package.scala | 8 +- .../scala/tools/partest/utils/PrintMgr.scala | 104 ++++----- .../scala/reflect/internal/AnnotationInfos.scala | 11 +- .../scala/reflect/internal/BuildUtils.scala | 2 + .../reflect/internal/ClassfileConstants.scala | 12 +- .../scala/reflect/internal/Definitions.scala | 133 +++++------ .../reflect/internal/ExistentialsAndSkolems.scala | 1 + src/reflect/scala/reflect/internal/Importers.scala | 2 +- src/reflect/scala/reflect/internal/Names.scala | 64 +++--- src/reflect/scala/reflect/internal/Printers.scala | 4 +- src/reflect/scala/reflect/internal/Scopes.scala | 2 +- src/reflect/scala/reflect/internal/StdNames.scala | 242 ++++++++++----------- .../scala/reflect/internal/SymbolTable.scala | 3 +- src/reflect/scala/reflect/internal/Symbols.scala | 125 +++++------ src/reflect/scala/reflect/internal/TreeGen.scala | 12 +- src/reflect/scala/reflect/internal/TreeInfo.scala | 58 ++--- src/reflect/scala/reflect/internal/Trees.scala | 2 +- .../scala/reflect/internal/TypeDebugging.scala | 4 +- src/reflect/scala/reflect/internal/Types.scala | 196 ++++++++--------- .../reflect/internal/pickling/PickleBuffer.scala | 2 +- .../reflect/internal/pickling/PickleFormat.scala | 2 +- .../reflect/internal/pickling/UnPickler.scala | 14 +- .../scala/reflect/internal/util/Collections.scala | 54 ++--- .../scala/reflect/internal/util/HashSet.scala | 4 +- .../scala/reflect/internal/util/Origins.scala | 2 + .../scala/reflect/internal/util/Position.scala | 2 +- .../scala/reflect/internal/util/SourceFile.scala | 9 +- .../scala/reflect/internal/util/StringOps.scala | 46 ++-- .../scala/reflect/internal/util/TableDef.scala | 8 +- .../internal/util/TraceSymbolActivity.scala | 4 +- .../scala/reflect/internal/util/WeakHashSet.scala | 3 + src/reflect/scala/reflect/io/AbstractFile.scala | 12 +- src/reflect/scala/reflect/io/Directory.scala | 17 +- src/reflect/scala/reflect/io/File.scala | 104 ++++----- src/reflect/scala/reflect/io/Path.scala | 34 +-- src/reflect/scala/reflect/io/PlainFile.scala | 12 +- src/reflect/scala/reflect/io/Streamable.scala | 10 +- .../scala/reflect/io/VirtualDirectory.scala | 5 +- src/reflect/scala/reflect/io/VirtualFile.scala | 4 +- src/reflect/scala/reflect/io/ZipArchive.scala | 10 +- src/reflect/scala/reflect/macros/TreeBuilder.scala | 1 + .../scala/reflect/runtime/JavaMirrors.scala | 19 +- .../scala/reflect/runtime/JavaUniverse.scala | 5 +- .../reflect/runtime/SynchronizedSymbols.scala | 4 +- src/reflect/scala/reflect/runtime/package.scala | 2 +- 193 files changed, 2660 insertions(+), 2579 deletions(-) (limited to 'src') diff --git a/src/compiler/scala/reflect/reify/Errors.scala b/src/compiler/scala/reflect/reify/Errors.scala index 9e59b40236..a72233274e 100644 --- a/src/compiler/scala/reflect/reify/Errors.scala +++ b/src/compiler/scala/reflect/reify/Errors.scala @@ -21,10 +21,10 @@ trait Errors { throw new ReificationException(defaultErrorPosition, msg) } - // def CannotReifySymbol(sym: Symbol) = { - // val msg = "implementation restriction: cannot reify symbol %s (%s)".format(sym, sym.accurateKindString) - // throw new ReificationException(defaultErrorPosition, msg) - // } + def CannotReifySymbol(sym: Symbol) = { + val msg = "implementation restriction: cannot reify symbol %s (%s)".format(sym, sym.accurateKindString) + throw new ReificationException(defaultErrorPosition, msg) + } def CannotReifyWeakType(details: Any) = { val msg = "cannot create a TypeTag" + details + ": use WeakTypeTag instead" diff --git a/src/compiler/scala/reflect/reify/codegen/GenUtils.scala b/src/compiler/scala/reflect/reify/codegen/GenUtils.scala index d0f8ae76e2..6554947f88 100644 --- a/src/compiler/scala/reflect/reify/codegen/GenUtils.scala +++ b/src/compiler/scala/reflect/reify/codegen/GenUtils.scala @@ -91,20 +91,20 @@ trait GenUtils { def termPath(fullname: String): Tree = path(fullname, newTermName) /** An (unreified) path that refers to type definition with given fully qualified name */ - // def typePath(fullname: String): Tree = path(fullname, newTypeName) - - // def isTough(tpe: Type) = { - // def isTough(tpe: Type) = tpe match { - // case _: RefinedType => true - // case _: ExistentialType => true - // case _: ClassInfoType => true - // case _: MethodType => true - // case _: PolyType => true - // case _ => false - // } + def typePath(fullname: String): Tree = path(fullname, newTypeName) + + def isTough(tpe: Type) = { + def isTough(tpe: Type) = tpe match { + case _: RefinedType => true + case _: ExistentialType => true + case _: ClassInfoType => true + case _: MethodType => true + case _: PolyType => true + case _ => false + } - // tpe != null && (tpe exists isTough) - // } + tpe != null && (tpe exists isTough) + } object TypedOrAnnotated { def unapply(tree: Tree): Option[Tree] = tree match { @@ -117,14 +117,14 @@ trait GenUtils { } } - // def isAnnotated(tpe: Type) = { - // def isAnnotated(tpe: Type) = tpe match { - // case _: AnnotatedType => true - // case _ => false - // } + def isAnnotated(tpe: Type) = { + def isAnnotated(tpe: Type) = tpe match { + case _: AnnotatedType => true + case _ => false + } - // tpe != null && (tpe exists isAnnotated) - // } + tpe != null && (tpe exists isAnnotated) + } def isSemiConcreteTypeMember(tpe: Type) = tpe match { case TypeRef(SingleType(_, _), sym, _) if sym.isAbstractType && !sym.isExistential => true diff --git a/src/compiler/scala/reflect/reify/phases/Metalevels.scala b/src/compiler/scala/reflect/reify/phases/Metalevels.scala index cccf080dbf..92d951c3a1 100644 --- a/src/compiler/scala/reflect/reify/phases/Metalevels.scala +++ b/src/compiler/scala/reflect/reify/phases/Metalevels.scala @@ -1,8 +1,6 @@ package scala.reflect.reify package phases -import scala.collection.{ mutable } - trait Metalevels { self: Reifier => @@ -103,7 +101,7 @@ trait Metalevels { */ val metalevels = new Transformer { var insideSplice = false - val inlineableBindings = mutable.Map[TermName, Tree]() + var inlineableBindings = scala.collection.mutable.Map[TermName, Tree]() def withinSplice[T](op: => T) = { val old = insideSplice diff --git a/src/compiler/scala/reflect/reify/utils/SymbolTables.scala b/src/compiler/scala/reflect/reify/utils/SymbolTables.scala index 6e34d64847..99118c4f2e 100644 --- a/src/compiler/scala/reflect/reify/utils/SymbolTables.scala +++ b/src/compiler/scala/reflect/reify/utils/SymbolTables.scala @@ -15,7 +15,7 @@ trait SymbolTables { private[SymbolTable] val original: Option[List[Tree]] = None) { def syms: List[Symbol] = symtab.keys.toList - // def isConcrete: Boolean = symtab.values forall (sym => !FreeTypeDef.unapply(sym).isDefined) + def isConcrete: Boolean = symtab.values forall (sym => !FreeTypeDef.unapply(sym).isDefined) // def aliases: Map[Symbol, List[TermName]] = aliases.distinct groupBy (_._1) mapValues (_ map (_._2)) diff --git a/src/compiler/scala/tools/ant/sabbus/Settings.scala b/src/compiler/scala/tools/ant/sabbus/Settings.scala index d0fefdaa03..fde61e9564 100644 --- a/src/compiler/scala/tools/ant/sabbus/Settings.scala +++ b/src/compiler/scala/tools/ant/sabbus/Settings.scala @@ -10,7 +10,7 @@ package scala.tools.ant.sabbus import java.io.File -import org.apache.tools.ant.types.Path +import org.apache.tools.ant.types.{Path, Reference} class Settings { diff --git a/src/compiler/scala/tools/cmd/FromString.scala b/src/compiler/scala/tools/cmd/FromString.scala index c9df9f9145..2a624875ee 100644 --- a/src/compiler/scala/tools/cmd/FromString.scala +++ b/src/compiler/scala/tools/cmd/FromString.scala @@ -25,17 +25,17 @@ abstract class FromString[+T](implicit t: ru.TypeTag[T]) extends PartialFunction object FromString { // We need these because we clash with the String => Path implicits. - // private def toFile(s: String) = new File(new java.io.File(s)) + private def toFile(s: String) = new File(new java.io.File(s)) private def toDir(s: String) = new Directory(new java.io.File(s)) /** Path related stringifiers. */ - // val ExistingFile: FromString[File] = new FromString[File]()(tagOfFile) { - // override def isDefinedAt(s: String) = toFile(s).isFile - // def apply(s: String): File = - // if (isDefinedAt(s)) toFile(s) - // else cmd.runAndExit(println("'%s' is not an existing file." format s)) - // } + val ExistingFile: FromString[File] = new FromString[File]()(tagOfFile) { + override def isDefinedAt(s: String) = toFile(s).isFile + def apply(s: String): File = + if (isDefinedAt(s)) toFile(s) + else cmd.runAndExit(println("'%s' is not an existing file." format s)) + } val ExistingDir: FromString[Directory] = new FromString[Directory]()(tagOfDirectory) { override def isDefinedAt(s: String) = toDir(s).isDirectory def apply(s: String): Directory = diff --git a/src/compiler/scala/tools/cmd/Reference.scala b/src/compiler/scala/tools/cmd/Reference.scala index 4f1620d61a..d4f2060f81 100644 --- a/src/compiler/scala/tools/cmd/Reference.scala +++ b/src/compiler/scala/tools/cmd/Reference.scala @@ -26,7 +26,7 @@ trait Reference extends Spec { def isUnaryOption(s: String) = unary contains toOpt(s) def isBinaryOption(s: String) = binary contains toOpt(s) def isExpandOption(s: String) = expansionMap contains toOpt(s) - // def isAnyOption(s: String) = isUnaryOption(s) || isBinaryOption(s) || isExpandOption(s) + def isAnyOption(s: String) = isUnaryOption(s) || isBinaryOption(s) || isExpandOption(s) def expandArg(arg: String) = expansionMap.getOrElse(fromOpt(arg), List(arg)) diff --git a/src/compiler/scala/tools/nsc/CompilationUnits.scala b/src/compiler/scala/tools/nsc/CompilationUnits.scala index 6d523552b8..5be819c134 100644 --- a/src/compiler/scala/tools/nsc/CompilationUnits.scala +++ b/src/compiler/scala/tools/nsc/CompilationUnits.scala @@ -26,7 +26,7 @@ trait CompilationUnits { self: Global => class CompilationUnit(val source: SourceFile) extends CompilationUnitContextApi { /** the fresh name creator */ - val fresh: FreshNameCreator = new FreshNameCreator.Default + var fresh: FreshNameCreator = new FreshNameCreator.Default def freshTermName(prefix: String): TermName = newTermName(fresh.newName(prefix)) def freshTypeName(prefix: String): TypeName = newTypeName(fresh.newName(prefix)) @@ -108,16 +108,16 @@ trait CompilationUnits { self: Global => override def toString() = source.toString() - // def clear() { - // fresh = new FreshNameCreator.Default - // body = EmptyTree - // depends.clear() - // defined.clear() - // synthetics.clear() - // toCheck.clear() - // checkedFeatures = Set() - // icode.clear() - // } + def clear() { + fresh = new FreshNameCreator.Default + body = EmptyTree + depends.clear() + defined.clear() + synthetics.clear() + toCheck.clear() + checkedFeatures = Set() + icode.clear() + } } } diff --git a/src/compiler/scala/tools/nsc/CompileServer.scala b/src/compiler/scala/tools/nsc/CompileServer.scala index 11ee34af99..521f788fa1 100644 --- a/src/compiler/scala/tools/nsc/CompileServer.scala +++ b/src/compiler/scala/tools/nsc/CompileServer.scala @@ -29,7 +29,7 @@ class StandardCompileServer extends SocketServer { var shutdown = false var verbose = false - // val versionMsg = "Fast " + Properties.versionMsg + val versionMsg = "Fast " + Properties.versionMsg val MaxCharge = 0.8 diff --git a/src/compiler/scala/tools/nsc/CompilerCommand.scala b/src/compiler/scala/tools/nsc/CompilerCommand.scala index 577d28f5f6..829e097714 100644 --- a/src/compiler/scala/tools/nsc/CompilerCommand.scala +++ b/src/compiler/scala/tools/nsc/CompilerCommand.scala @@ -15,7 +15,7 @@ class CompilerCommand(arguments: List[String], val settings: Settings) { type Setting = Settings#Setting /** file extensions of files that the compiler can process */ - // lazy val fileEndings = Properties.fileEndings + lazy val fileEndings = Properties.fileEndings private val processArgumentsResult = if (shouldProcessArguments) processArguments @@ -40,8 +40,8 @@ class CompilerCommand(arguments: List[String], val settings: Settings) { """.stripMargin.trim + "\n" def shortUsage = "Usage: %s " format cmdName - // def createUsagePreface(shouldExplain: Boolean) = - // if (shouldExplain) shortUsage + "\n" + explainAdvanced else "" + def createUsagePreface(shouldExplain: Boolean) = + if (shouldExplain) shortUsage + "\n" + explainAdvanced else "" /** Creates a help message for a subset of options based on cond */ def createUsageMsg(cond: Setting => Boolean): String = { diff --git a/src/compiler/scala/tools/nsc/CompilerRun.scala b/src/compiler/scala/tools/nsc/CompilerRun.scala index daad704534..6746b08155 100644 --- a/src/compiler/scala/tools/nsc/CompilerRun.scala +++ b/src/compiler/scala/tools/nsc/CompilerRun.scala @@ -1,21 +1,21 @@ -// /* NSC -- new Scala compiler -// * Copyright 2005-2013 LAMP/EPFL -// * @author Martin Odersky -// */ +/* NSC -- new Scala compiler + * Copyright 2005-2013 LAMP/EPFL + * @author Martin Odersky + */ -// package scala.tools.nsc +package scala.tools.nsc -// class CompilerRun { -// def firstPhase: Phase = NoPhase -// def terminalPhase: Phase = NoPhase -// def namerPhase: Phase = NoPhase -// def typerPhase: Phase = NoPhase -// def refchecksPhase: Phase = NoPhase -// def explicitouterPhase: Phase = NoPhase -// def erasurePhase: Phase = NoPhase -// def flattenPhase: Phase = NoPhase -// def mixinPhase: Phase = NoPhase -// def icodePhase: Phase = NoPhase -// def phaseNamed(name: String): Phase = NoPhase -// } +class CompilerRun { + def firstPhase: Phase = NoPhase + def terminalPhase: Phase = NoPhase + def namerPhase: Phase = NoPhase + def typerPhase: Phase = NoPhase + def refchecksPhase: Phase = NoPhase + def explicitouterPhase: Phase = NoPhase + def erasurePhase: Phase = NoPhase + def flattenPhase: Phase = NoPhase + def mixinPhase: Phase = NoPhase + def icodePhase: Phase = NoPhase + def phaseNamed(name: String): Phase = NoPhase +} diff --git a/src/compiler/scala/tools/nsc/Global.scala b/src/compiler/scala/tools/nsc/Global.scala index 8802c3ec80..13bec828ca 100644 --- a/src/compiler/scala/tools/nsc/Global.scala +++ b/src/compiler/scala/tools/nsc/Global.scala @@ -70,7 +70,7 @@ class Global(var currentSettings: Settings, var reporter: Reporter) def this(settings: Settings) = this(settings, new ConsoleReporter(settings)) - // def mkAttributedQualifier(tpe: Type, termSym: Symbol): Tree = gen.mkAttributedQualifier(tpe, termSym) + def mkAttributedQualifier(tpe: Type, termSym: Symbol): Tree = gen.mkAttributedQualifier(tpe, termSym) def picklerPhase: Phase = if (currentRun.isDefined) currentRun.picklerPhase else NoPhase @@ -265,14 +265,14 @@ class Global(var currentSettings: Settings, var reporter: Reporter) def informComplete(msg: String): Unit = reporter.withoutTruncating(inform(msg)) def informProgress(msg: String) = if (settings.verbose.value) inform("[" + msg + "]") - // def inform[T](msg: String, value: T): T = returning(value)(x => inform(msg + x)) + def inform[T](msg: String, value: T): T = returning(value)(x => inform(msg + x)) def informTime(msg: String, start: Long) = informProgress(elapsedMessage(msg, start)) def logError(msg: String, t: Throwable): Unit = () - // def logAfterEveryPhase[T](msg: String)(op: => T) { - // log("Running operation '%s' after every phase.\n".format(msg) + describeAfterEveryPhase(op)) - // } + def logAfterEveryPhase[T](msg: String)(op: => T) { + log("Running operation '%s' after every phase.\n".format(msg) + describeAfterEveryPhase(op)) + } override def shouldLogAtThisPhase = settings.log.isSetByUser && ( (settings.log containsPhase globalPhase) || (settings.log containsPhase phase) @@ -419,8 +419,8 @@ class Global(var currentSettings: Settings, var reporter: Reporter) } /** Switch to turn on detailed type logs */ - val printTypings = settings.Ytyperdebug.value - val printInfers = settings.Yinferdebug.value + var printTypings = settings.Ytyperdebug.value + var printInfers = settings.Yinferdebug.value // phaseName = "parser" object syntaxAnalyzer extends { @@ -639,11 +639,11 @@ class Global(var currentSettings: Settings, var reporter: Reporter) } // phaseName = "SAMPLE PHASE" - // object sampleTransform extends { - // val global: Global.this.type = Global.this - // val runsAfter = List[String]() - // val runsRightAfter = None - // } with SampleTransform + object sampleTransform extends { + val global: Global.this.type = Global.this + val runsAfter = List[String]() + val runsRightAfter = None + } with SampleTransform /** The checkers are for validating the compiler data structures * at phase boundaries. @@ -778,7 +778,7 @@ class Global(var currentSettings: Settings, var reporter: Reporter) /** Returns List of (phase, value) pairs, including only those * where the value compares unequal to the previous phase's value. */ - def afterEachPhase[T](op: => T): List[(Phase, T)] = { // used in tests + def afterEachPhase[T](op: => T): List[(Phase, T)] = { phaseDescriptors.map(_.ownPhase).filterNot(_ eq NoPhase).foldLeft(List[(Phase, T)]()) { (res, ph) => val value = exitingPhase(ph)(op) if (res.nonEmpty && res.head._2 == value) res @@ -790,17 +790,17 @@ class Global(var currentSettings: Settings, var reporter: Reporter) * phase transitions where the result of the operation gave a different * list than it had when run during the previous phase. */ - // def changesAfterEachPhase[T](op: => List[T]): List[ChangeAfterPhase[T]] = { - // val ops = ((NoPhase, Nil)) :: afterEachPhase(op) - - // ops sliding 2 map { - // case (_, before) :: (ph, after) :: Nil => - // val lost = before filterNot (after contains _) - // val gained = after filterNot (before contains _) - // ChangeAfterPhase(ph, lost, gained) - // case _ => ??? - // } toList - // } + def changesAfterEachPhase[T](op: => List[T]): List[ChangeAfterPhase[T]] = { + val ops = ((NoPhase, Nil)) :: afterEachPhase(op) + + ops sliding 2 map { + case (_, before) :: (ph, after) :: Nil => + val lost = before filterNot (after contains _) + val gained = after filterNot (before contains _) + ChangeAfterPhase(ph, lost, gained) + case _ => ??? + } toList + } private def numberedPhase(ph: Phase) = "%2d/%s".format(ph.id, ph.name) case class ChangeAfterPhase[+T](ph: Phase, lost: List[T], gained: List[T]) { @@ -811,14 +811,14 @@ class Global(var currentSettings: Settings, var reporter: Reporter) override def toString = mkStr("Lost", lost) + mkStr("Gained", gained) } - // def describeAfterEachPhase[T](op: => T): List[String] = - // afterEachPhase(op) map { case (ph, t) => "[after %-15s] %s".format(numberedPhase(ph), t) } + def describeAfterEachPhase[T](op: => T): List[String] = + afterEachPhase(op) map { case (ph, t) => "[after %-15s] %s".format(numberedPhase(ph), t) } - // def describeAfterEveryPhase[T](op: => T): String = - // describeAfterEachPhase(op) map (" " + _ + "\n") mkString + def describeAfterEveryPhase[T](op: => T): String = + describeAfterEachPhase(op) map (" " + _ + "\n") mkString - // def printAfterEachPhase[T](op: => T): Unit = - // describeAfterEachPhase(op) foreach (m => println(" " + m)) + def printAfterEachPhase[T](op: => T): Unit = + describeAfterEachPhase(op) foreach (m => println(" " + m)) // ------------ Invalidations --------------------------------- @@ -1057,7 +1057,7 @@ class Global(var currentSettings: Settings, var reporter: Reporter) @inline final def exitingPostErasure[T](op: => T): T = exitingPhase(currentRun.posterasurePhase)(op) @inline final def exitingExplicitOuter[T](op: => T): T = exitingPhase(currentRun.explicitouterPhase)(op) @inline final def exitingFlatten[T](op: => T): T = exitingPhase(currentRun.flattenPhase)(op) - // @inline final def exitingIcode[T](op: => T): T = exitingPhase(currentRun.icodePhase)(op) + @inline final def exitingIcode[T](op: => T): T = exitingPhase(currentRun.icodePhase)(op) @inline final def exitingMixin[T](op: => T): T = exitingPhase(currentRun.mixinPhase)(op) @inline final def exitingPickler[T](op: => T): T = exitingPhase(currentRun.picklerPhase)(op) @inline final def exitingRefchecks[T](op: => T): T = exitingPhase(currentRun.refchecksPhase)(op) @@ -1071,21 +1071,21 @@ class Global(var currentSettings: Settings, var reporter: Reporter) @inline final def enteringMixin[T](op: => T): T = enteringPhase(currentRun.mixinPhase)(op) @inline final def enteringPickler[T](op: => T): T = enteringPhase(currentRun.picklerPhase)(op) @inline final def enteringRefchecks[T](op: => T): T = enteringPhase(currentRun.refchecksPhase)(op) - // @inline final def enteringSpecialize[T](op: => T): T = enteringPhase(currentRun.specializePhase)(op) + @inline final def enteringSpecialize[T](op: => T): T = enteringPhase(currentRun.specializePhase)(op) @inline final def enteringTyper[T](op: => T): T = enteringPhase(currentRun.typerPhase)(op) @inline final def enteringUncurry[T](op: => T): T = enteringPhase(currentRun.uncurryPhase)(op) - // def explainContext(c: analyzer.Context): String = ( - // if (c == null) "" else ( - // """| context owners: %s - // | - // |Enclosing block or template: - // |%s""".format( - // c.owner.ownerChain.takeWhile(!_.isPackageClass).mkString(" -> "), - // nodePrinters.nodeToString(c.enclClassOrMethod.tree) - // ) - // ) - // ) + def explainContext(c: analyzer.Context): String = ( + if (c == null) "" else ( + """| context owners: %s + | + |Enclosing block or template: + |%s""".format( + c.owner.ownerChain.takeWhile(!_.isPackageClass).mkString(" -> "), + nodePrinters.nodeToString(c.enclClassOrMethod.tree) + ) + ) + ) // Owners up to and including the first package class. private def ownerChainString(sym: Symbol): String = ( if (sym == null) "" @@ -1098,8 +1098,8 @@ class Global(var currentSettings: Settings, var reporter: Reporter) pairs.toList collect { case (k, v) if v != null => "%20s: %s".format(k, v) } mkString "\n" ) - // def explainTree(t: Tree): String = formatExplain( - // ) + def explainTree(t: Tree): String = formatExplain( + ) /** Don't want to introduce new errors trying to report errors, * so swallow exceptions. @@ -1158,7 +1158,7 @@ class Global(var currentSettings: Settings, var reporter: Reporter) } def newUnitParser(code: String) = new syntaxAnalyzer.UnitParser(newCompilationUnit(code)) - // def newUnitScanner(code: String) = new syntaxAnalyzer.UnitScanner(newCompilationUnit(code)) + def newUnitScanner(code: String) = new syntaxAnalyzer.UnitScanner(newCompilationUnit(code)) def newCompilationUnit(code: String) = new CompilationUnit(newSourceFile(code)) def newSourceFile(code: String) = new BatchSourceFile("", code) @@ -1181,8 +1181,9 @@ class Global(var currentSettings: Settings, var reporter: Reporter) val inlinerWarnings = new ConditionalWarning("inliner", settings.YinlinerWarnings) val allConditionalWarnings = List(deprecationWarnings0, uncheckedWarnings0, featureWarnings, inlinerWarnings) - def uncheckedWarnings: List[(Position, String)] = uncheckedWarnings0.warnings.toList // used in sbt - def deprecationWarnings: List[(Position, String)] = deprecationWarnings0.warnings.toList // used in sbt + // for sbt's benefit + def uncheckedWarnings: List[(Position, String)] = uncheckedWarnings0.warnings.toList + def deprecationWarnings: List[(Position, String)] = deprecationWarnings0.warnings.toList var reportedFeature = Set[Symbol]() @@ -1349,7 +1350,7 @@ class Global(var currentSettings: Settings, var reporter: Reporter) val namerPhase = phaseNamed("namer") // val packageobjectsPhase = phaseNamed("packageobjects") val typerPhase = phaseNamed("typer") - // val inlineclassesPhase = phaseNamed("inlineclasses") + val inlineclassesPhase = phaseNamed("inlineclasses") // val superaccessorsPhase = phaseNamed("superaccessors") val picklerPhase = phaseNamed("pickler") val refchecksPhase = phaseNamed("refchecks") @@ -1362,7 +1363,7 @@ class Global(var currentSettings: Settings, var reporter: Reporter) val erasurePhase = phaseNamed("erasure") val posterasurePhase = phaseNamed("posterasure") // val lazyvalsPhase = phaseNamed("lazyvals") - // val lambdaliftPhase = phaseNamed("lambdalift") + val lambdaliftPhase = phaseNamed("lambdalift") // val constructorsPhase = phaseNamed("constructors") val flattenPhase = phaseNamed("flatten") val mixinPhase = phaseNamed("mixin") @@ -1372,11 +1373,11 @@ class Global(var currentSettings: Settings, var reporter: Reporter) val inlineExceptionHandlersPhase = phaseNamed("inlineExceptionHandlers") val closelimPhase = phaseNamed("closelim") val dcePhase = phaseNamed("dce") - // val jvmPhase = phaseNamed("jvm") + val jvmPhase = phaseNamed("jvm") // val msilPhase = phaseNamed("msil") def runIsAt(ph: Phase) = globalPhase.id == ph.id - // def runIsPast(ph: Phase) = globalPhase.id > ph.id + def runIsPast(ph: Phase) = globalPhase.id > ph.id // def runIsAtBytecodeGen = (runIsAt(jvmPhase) || runIsAt(msilPhase)) def runIsAtOptimiz = { runIsAt(inlinerPhase) || // listing phases in full for robustness when -Ystop-after has been given. @@ -1742,7 +1743,7 @@ class Global(var currentSettings: Settings, var reporter: Reporter) // and forScaladoc default to onlyPresentation, which is the same as defaulting // to false except in old code. The downside is that this leaves us calling a // deprecated method: but I see no simple way out, so I leave it for now. - // def forJVM = settings.target.value startsWith "jvm" + def forJVM = settings.target.value startsWith "jvm" override def forMSIL = settings.target.value startsWith "msil" def forInteractive = false def forScaladoc = false diff --git a/src/compiler/scala/tools/nsc/ObjectRunner.scala b/src/compiler/scala/tools/nsc/ObjectRunner.scala index e36e154925..3c75429311 100644 --- a/src/compiler/scala/tools/nsc/ObjectRunner.scala +++ b/src/compiler/scala/tools/nsc/ObjectRunner.scala @@ -13,8 +13,8 @@ import util.Exceptional.unwrap trait CommonRunner { /** Check whether a class with the specified name * exists on the specified class path. */ - // def classExists(urls: List[URL], objectName: String): Boolean = - // ScalaClassLoader.classExists(urls, objectName) + def classExists(urls: List[URL], objectName: String): Boolean = + ScalaClassLoader.classExists(urls, objectName) /** Run a given object, specified by name, using a * specified classpath and argument list. diff --git a/src/compiler/scala/tools/nsc/Phases.scala b/src/compiler/scala/tools/nsc/Phases.scala index e81d3ebc8a..aad70a9c5e 100644 --- a/src/compiler/scala/tools/nsc/Phases.scala +++ b/src/compiler/scala/tools/nsc/Phases.scala @@ -20,7 +20,7 @@ object Phases { } val values = new Array[Cell](MaxPhases + 1) def results = values filterNot (_ == null) - // def apply(ph: Phase): T = values(ph.id).value + def apply(ph: Phase): T = values(ph.id).value def update(ph: Phase, value: T): Unit = values(ph.id) = Cell(ph, value) } /** A class for recording the elapsed time of each phase in the @@ -38,7 +38,7 @@ object Phases { >> ("ms" -> (_.value)) >+ " " << ("share" -> (_.value.toDouble * 100 / total formatted "%.2f")) } - // def formatted = "" + table() + def formatted = "" + table() } } diff --git a/src/compiler/scala/tools/nsc/Properties.scala b/src/compiler/scala/tools/nsc/Properties.scala index 028fc24efb..55fd196716 100644 --- a/src/compiler/scala/tools/nsc/Properties.scala +++ b/src/compiler/scala/tools/nsc/Properties.scala @@ -21,5 +21,5 @@ object Properties extends scala.util.PropertiesTrait { // derived values def isEmacsShell = propOrEmpty("env.emacs") != "" - // def fileEndings = fileEndingString.split("""\|""").toList + def fileEndings = fileEndingString.split("""\|""").toList } diff --git a/src/compiler/scala/tools/nsc/ScriptRunner.scala b/src/compiler/scala/tools/nsc/ScriptRunner.scala index 344a60903a..0b307a861e 100644 --- a/src/compiler/scala/tools/nsc/ScriptRunner.scala +++ b/src/compiler/scala/tools/nsc/ScriptRunner.scala @@ -48,7 +48,7 @@ class ScriptRunner extends HasCompileSocket { case x => x } - // def isScript(settings: Settings) = settings.script.value != "" + def isScript(settings: Settings) = settings.script.value != "" /** Choose a jar filename to hold the compiled version of a script. */ private def jarFileFor(scriptFile: String)= File( diff --git a/src/compiler/scala/tools/nsc/ast/DocComments.scala b/src/compiler/scala/tools/nsc/ast/DocComments.scala index 40f97222a9..21407289db 100755 --- a/src/compiler/scala/tools/nsc/ast/DocComments.scala +++ b/src/compiler/scala/tools/nsc/ast/DocComments.scala @@ -22,9 +22,9 @@ trait DocComments { self: Global => val docComments = mutable.HashMap[Symbol, DocComment]() /** Associate comment with symbol `sym` at position `pos`. */ - // def docComment(sym: Symbol, docStr: String, pos: Position = NoPosition) = - // if ((sym ne null) && (sym ne NoSymbol)) - // docComments += (sym -> DocComment(docStr, pos)) + def docComment(sym: Symbol, docStr: String, pos: Position = NoPosition) = + if ((sym ne null) && (sym ne NoSymbol)) + docComments += (sym -> DocComment(docStr, pos)) /** The raw doc comment of symbol `sym`, as it appears in the source text, "" if missing. */ @@ -120,7 +120,7 @@ trait DocComments { self: Global => getDocComment(sym) map getUseCases getOrElse List() } - // def useCases(sym: Symbol): List[(Symbol, String, Position)] = useCases(sym, sym.enclClass) + def useCases(sym: Symbol): List[(Symbol, String, Position)] = useCases(sym, sym.enclClass) /** Returns the javadoc format of doc comment string `s`, including wiki expansion */ diff --git a/src/compiler/scala/tools/nsc/ast/Printers.scala b/src/compiler/scala/tools/nsc/ast/Printers.scala index d0aa004c9a..0414e0f123 100644 --- a/src/compiler/scala/tools/nsc/ast/Printers.scala +++ b/src/compiler/scala/tools/nsc/ast/Printers.scala @@ -200,17 +200,91 @@ trait Printers extends scala.reflect.internal.Printers { this: Global => override def printTree(tree: Tree) { print(safe(tree)) } } + class TreeMatchTemplate { + // non-trees defined in Trees + // + // case class ImportSelector(name: Name, namePos: Int, rename: Name, renamePos: Int) + // case class Modifiers(flags: Long, privateWithin: Name, annotations: List[Tree], positions: Map[Long, Position]) + // + def apply(t: Tree): Unit = t match { + // eliminated by typer + case Annotated(annot, arg) => + case AssignOrNamedArg(lhs, rhs) => + case DocDef(comment, definition) => + case Import(expr, selectors) => + + // eliminated by refchecks + case ModuleDef(mods, name, impl) => + case TypeTreeWithDeferredRefCheck() => + + // eliminated by erasure + case TypeDef(mods, name, tparams, rhs) => + case Typed(expr, tpt) => + + // eliminated by cleanup + case ApplyDynamic(qual, args) => + + // eliminated by explicitouter + case Alternative(trees) => + case Bind(name, body) => + case CaseDef(pat, guard, body) => + case Star(elem) => + case UnApply(fun, args) => + + // eliminated by lambdalift + case Function(vparams, body) => + + // eliminated by uncurry + case AppliedTypeTree(tpt, args) => + case CompoundTypeTree(templ) => + case ExistentialTypeTree(tpt, whereClauses) => + case SelectFromTypeTree(qual, selector) => + case SingletonTypeTree(ref) => + case TypeBoundsTree(lo, hi) => + + // survivors + case Apply(fun, args) => + case ArrayValue(elemtpt, trees) => + case Assign(lhs, rhs) => + case Block(stats, expr) => + case ClassDef(mods, name, tparams, impl) => + case DefDef(mods, name, tparams, vparamss, tpt, rhs) => + case EmptyTree => + case Ident(name) => + case If(cond, thenp, elsep) => + case LabelDef(name, params, rhs) => + case Literal(value) => + case Match(selector, cases) => + case New(tpt) => + case PackageDef(pid, stats) => + case Return(expr) => + case Select(qualifier, selector) => + case Super(qual, mix) => + case Template(parents, self, body) => + case This(qual) => + case Throw(expr) => + case Try(block, catches, finalizer) => + case TypeApply(fun, args) => + case TypeTree() => + case ValDef(mods, name, tpt, rhs) => + + // missing from the Trees comment + case Parens(args) => // only used during parsing + case SelectFromArray(qual, name, erasure) => // only used during erasure + } + } + def asString(t: Tree): String = render(t, newStandardTreePrinter, settings.printtypes.value, settings.uniqid.value, settings.Yshowsymkinds.value) def asCompactString(t: Tree): String = render(t, newCompactTreePrinter, settings.printtypes.value, settings.uniqid.value, settings.Yshowsymkinds.value) def asCompactDebugString(t: Tree): String = render(t, newCompactTreePrinter, true, true, true) def newStandardTreePrinter(writer: PrintWriter): TreePrinter = new TreePrinter(writer) - // def newStandardTreePrinter(stream: OutputStream): TreePrinter = newStandardTreePrinter(new PrintWriter(stream)) - // def newStandardTreePrinter(): TreePrinter = newStandardTreePrinter(new PrintWriter(ConsoleWriter)) + def newStandardTreePrinter(stream: OutputStream): TreePrinter = newStandardTreePrinter(new PrintWriter(stream)) + def newStandardTreePrinter(): TreePrinter = newStandardTreePrinter(new PrintWriter(ConsoleWriter)) def newCompactTreePrinter(writer: PrintWriter): CompactTreePrinter = new CompactTreePrinter(writer) - // def newCompactTreePrinter(stream: OutputStream): CompactTreePrinter = newCompactTreePrinter(new PrintWriter(stream)) - // def newCompactTreePrinter(): CompactTreePrinter = newCompactTreePrinter(new PrintWriter(ConsoleWriter)) + def newCompactTreePrinter(stream: OutputStream): CompactTreePrinter = newCompactTreePrinter(new PrintWriter(stream)) + def newCompactTreePrinter(): CompactTreePrinter = newCompactTreePrinter(new PrintWriter(ConsoleWriter)) override def newTreePrinter(writer: PrintWriter): TreePrinter = if (settings.Ycompacttrees.value) newCompactTreePrinter(writer) diff --git a/src/compiler/scala/tools/nsc/ast/TreeDSL.scala b/src/compiler/scala/tools/nsc/ast/TreeDSL.scala index 0696b0e673..3acefe9441 100644 --- a/src/compiler/scala/tools/nsc/ast/TreeDSL.scala +++ b/src/compiler/scala/tools/nsc/ast/TreeDSL.scala @@ -84,16 +84,16 @@ trait TreeDSL { def ANY_EQ (other: Tree) = OBJ_EQ(other AS ObjectClass.tpe) def ANY_== (other: Tree) = fn(target, Any_==, other) def ANY_!= (other: Tree) = fn(target, Any_!=, other) - // def OBJ_== (other: Tree) = fn(target, Object_==, other) + def OBJ_== (other: Tree) = fn(target, Object_==, other) def OBJ_!= (other: Tree) = fn(target, Object_!=, other) def OBJ_EQ (other: Tree) = fn(target, Object_eq, other) def OBJ_NE (other: Tree) = fn(target, Object_ne, other) - // def INT_| (other: Tree) = fn(target, getMember(IntClass, nme.OR), other) - // def INT_& (other: Tree) = fn(target, getMember(IntClass, nme.AND), other) + def INT_| (other: Tree) = fn(target, getMember(IntClass, nme.OR), other) + def INT_& (other: Tree) = fn(target, getMember(IntClass, nme.AND), other) def INT_>= (other: Tree) = fn(target, getMember(IntClass, nme.GE), other) def INT_== (other: Tree) = fn(target, getMember(IntClass, nme.EQ), other) - // def INT_!= (other: Tree) = fn(target, getMember(IntClass, nme.NE), other) + def INT_!= (other: Tree) = fn(target, getMember(IntClass, nme.NE), other) // generic operations on ByteClass, IntClass, LongClass def GEN_| (other: Tree, kind: ClassSymbol) = fn(target, getMember(kind, nme.OR), other) @@ -101,8 +101,8 @@ trait TreeDSL { def GEN_== (other: Tree, kind: ClassSymbol) = fn(target, getMember(kind, nme.EQ), other) def GEN_!= (other: Tree, kind: ClassSymbol) = fn(target, getMember(kind, nme.NE), other) - // def BOOL_&& (other: Tree) = fn(target, Boolean_and, other) - // def BOOL_|| (other: Tree) = fn(target, Boolean_or, other) + def BOOL_&& (other: Tree) = fn(target, Boolean_and, other) + def BOOL_|| (other: Tree) = fn(target, Boolean_or, other) /** Apply, Select, Match **/ def APPLY(params: Tree*) = Apply(target, params.toList) @@ -158,7 +158,7 @@ trait TreeDSL { def mkTree(rhs: Tree): ResultTreeType def ===(rhs: Tree): ResultTreeType - // private var _mods: Modifiers = null + private var _mods: Modifiers = null private var _tpt: Tree = null private var _pos: Position = null @@ -166,19 +166,19 @@ trait TreeDSL { _tpt = TypeTree(tp) this } - // def withFlags(flags: Long*): this.type = { - // if (_mods == null) - // _mods = defaultMods + def withFlags(flags: Long*): this.type = { + if (_mods == null) + _mods = defaultMods - // _mods = flags.foldLeft(_mods)(_ | _) - // this - // } + _mods = flags.foldLeft(_mods)(_ | _) + this + } def withPos(pos: Position): this.type = { _pos = pos this } - final def mods = defaultMods // if (_mods == null) defaultMods else _mods + final def mods = if (_mods == null) defaultMods else _mods final def tpt = if (_tpt == null) defaultTpt else _tpt final def pos = if (_pos == null) defaultPos else _pos } @@ -243,7 +243,7 @@ trait TreeDSL { } class TryStart(body: Tree, catches: List[CaseDef], fin: Tree) { def CATCH(xs: CaseDef*) = new TryStart(body, xs.toList, fin) - // def FINALLY(x: Tree) = Try(body, catches, x) + def FINALLY(x: Tree) = Try(body, catches, x) def ENDTRY = Try(body, catches, fin) } @@ -251,16 +251,16 @@ trait TreeDSL { def DEFAULT: CaseStart = new CaseStart(WILD.empty, EmptyTree) class SymbolMethods(target: Symbol) { - // def BIND(body: Tree) = Bind(target, body) + def BIND(body: Tree) = Bind(target, body) def IS_NULL() = REF(target) OBJ_EQ NULL - // def NOT_NULL() = REF(target) OBJ_NE NULL + def NOT_NULL() = REF(target) OBJ_NE NULL def GET() = fn(REF(target), nme.get) // name of nth indexed argument to a method (first parameter list), defaults to 1st - // def ARG(idx: Int = 0) = Ident(target.paramss.head(idx)) + def ARG(idx: Int = 0) = Ident(target.paramss.head(idx)) def ARGS = target.paramss.head - // def ARGNAMES = ARGS map Ident + def ARGNAMES = ARGS map Ident } /** Top level accessible. */ @@ -268,31 +268,31 @@ trait TreeDSL { def THROW(sym: Symbol, msg: Tree): Throw = Throw(sym.tpe, msg.TOSTRING()) def NEW(tpt: Tree, args: Tree*): Tree = New(tpt, List(args.toList)) - // def NEW(sym: Symbol, args: Tree*): Tree = New(sym.tpe, args: _*) + def NEW(sym: Symbol, args: Tree*): Tree = New(sym.tpe, args: _*) - // def DEF(name: Name, tp: Type): DefTreeStart = DEF(name) withType tp - // def DEF(name: Name): DefTreeStart = new DefTreeStart(name) + def DEF(name: Name, tp: Type): DefTreeStart = DEF(name) withType tp + def DEF(name: Name): DefTreeStart = new DefTreeStart(name) def DEF(sym: Symbol): DefSymStart = new DefSymStart(sym) - // def VAL(name: Name, tp: Type): ValTreeStart = VAL(name) withType tp - // def VAL(name: Name): ValTreeStart = new ValTreeStart(name) + def VAL(name: Name, tp: Type): ValTreeStart = VAL(name) withType tp + def VAL(name: Name): ValTreeStart = new ValTreeStart(name) def VAL(sym: Symbol): ValSymStart = new ValSymStart(sym) - // def VAR(name: Name, tp: Type): ValTreeStart = VAL(name, tp) withFlags Flags.MUTABLE - // def VAR(name: Name): ValTreeStart = VAL(name) withFlags Flags.MUTABLE - // def VAR(sym: Symbol): ValSymStart = VAL(sym) withFlags Flags.MUTABLE + def VAR(name: Name, tp: Type): ValTreeStart = VAL(name, tp) withFlags Flags.MUTABLE + def VAR(name: Name): ValTreeStart = VAL(name) withFlags Flags.MUTABLE + def VAR(sym: Symbol): ValSymStart = VAL(sym) withFlags Flags.MUTABLE - // def LAZYVAL(name: Name, tp: Type): ValTreeStart = VAL(name, tp) withFlags Flags.LAZY - // def LAZYVAL(name: Name): ValTreeStart = VAL(name) withFlags Flags.LAZY - // def LAZYVAL(sym: Symbol): ValSymStart = VAL(sym) withFlags Flags.LAZY + def LAZYVAL(name: Name, tp: Type): ValTreeStart = VAL(name, tp) withFlags Flags.LAZY + def LAZYVAL(name: Name): ValTreeStart = VAL(name) withFlags Flags.LAZY + def LAZYVAL(sym: Symbol): ValSymStart = VAL(sym) withFlags Flags.LAZY def AND(guards: Tree*) = if (guards.isEmpty) EmptyTree else guards reduceLeft gen.mkAnd - // def OR(guards: Tree*) = - // if (guards.isEmpty) EmptyTree - // else guards reduceLeft gen.mkOr + def OR(guards: Tree*) = + if (guards.isEmpty) EmptyTree + else guards reduceLeft gen.mkOr def IF(tree: Tree) = new IfStart(tree, EmptyTree) def TRY(tree: Tree) = new TryStart(tree, Nil, EmptyTree) @@ -311,11 +311,11 @@ trait TreeDSL { case List(tree) if flattenUnary => tree case _ => Apply(TupleClass(trees.length).companionModule, trees: _*) } - // def makeTupleType(trees: List[Tree], flattenUnary: Boolean): Tree = trees match { - // case Nil => gen.scalaUnitConstr - // case List(tree) if flattenUnary => tree - // case _ => AppliedTypeTree(REF(TupleClass(trees.length)), trees) - // } + def makeTupleType(trees: List[Tree], flattenUnary: Boolean): Tree = trees match { + case Nil => gen.scalaUnitConstr + case List(tree) if flattenUnary => tree + case _ => AppliedTypeTree(REF(TupleClass(trees.length)), trees) + } /** Implicits - some of these should probably disappear **/ implicit def mkTreeMethods(target: Tree): TreeMethods = new TreeMethods(target) diff --git a/src/compiler/scala/tools/nsc/ast/TreeGen.scala b/src/compiler/scala/tools/nsc/ast/TreeGen.scala index ea7f674809..983f355c58 100644 --- a/src/compiler/scala/tools/nsc/ast/TreeGen.scala +++ b/src/compiler/scala/tools/nsc/ast/TreeGen.scala @@ -63,71 +63,71 @@ abstract class TreeGen extends scala.reflect.internal.TreeGen with TreeDSL { Annotated(New(scalaDot(UncheckedClass.name), ListOfNil), expr) } // if it's a Match, mark the selector unchecked; otherwise nothing. - // def mkUncheckedMatch(tree: Tree) = tree match { - // case Match(selector, cases) => atPos(tree.pos)(Match(mkUnchecked(selector), cases)) - // case _ => tree - // } + def mkUncheckedMatch(tree: Tree) = tree match { + case Match(selector, cases) => atPos(tree.pos)(Match(mkUnchecked(selector), cases)) + case _ => tree + } - // def mkSynthSwitchSelector(expr: Tree): Tree = atPos(expr.pos) { - // // This can't be "Annotated(New(SwitchClass), expr)" because annotations - // // are very picky about things and it crashes the compiler with "unexpected new". - // Annotated(Ident(nme.synthSwitch), expr) - // } + def mkSynthSwitchSelector(expr: Tree): Tree = atPos(expr.pos) { + // This can't be "Annotated(New(SwitchClass), expr)" because annotations + // are very picky about things and it crashes the compiler with "unexpected new". + Annotated(Ident(nme.synthSwitch), expr) + } // TODO: would be so much nicer if we would know during match-translation (i.e., type checking) // whether we should emit missingCase-style apply (and isDefinedAt), instead of transforming trees post-factum - // class MatchMatcher { - // def caseMatch(orig: Tree, selector: Tree, cases: List[CaseDef], wrap: Tree => Tree): Tree = unknownTree(orig) - // def caseVirtualizedMatch(orig: Tree, _match: Tree, targs: List[Tree], scrut: Tree, matcher: Tree): Tree = unknownTree(orig) - // def caseVirtualizedMatchOpt(orig: Tree, prologue: List[Tree], cases: List[Tree], matchEndDef: Tree, wrap: Tree => Tree): Tree = unknownTree(orig) - - // def genVirtualizedMatch(prologue: List[Tree], cases: List[Tree], matchEndDef: Tree): Tree = Block(prologue ++ cases, matchEndDef) - - // def apply(matchExpr: Tree): Tree = matchExpr match { - // // old-style match or virtpatmat switch - // case Match(selector, cases) => // println("simple match: "+ (selector, cases) + "for:\n"+ matchExpr ) - // caseMatch(matchExpr, selector, cases, identity) - // // old-style match or virtpatmat switch - // case Block((vd: ValDef) :: Nil, orig@Match(selector, cases)) => // println("block match: "+ (selector, cases, vd) + "for:\n"+ matchExpr ) - // caseMatch(matchExpr, selector, cases, m => copyBlock(matchExpr, List(vd), m)) - // // virtpatmat - // case Apply(Apply(TypeApply(Select(tgt, nme.runOrElse), targs), List(scrut)), List(matcher)) if !settings.XoldPatmat.value => // println("virt match: "+ (tgt, targs, scrut, matcher) + "for:\n"+ matchExpr ) - // caseVirtualizedMatch(matchExpr, tgt, targs, scrut, matcher) - // // optimized version of virtpatmat - // case Block(stats, matchEndDef) if !settings.XoldPatmat.value && (stats forall treeInfo.hasSynthCaseSymbol) => - // // the assumption is once we encounter a case, the remainder of the block will consist of cases - // // the prologue may be empty, usually it is the valdef that stores the scrut - // val (prologue, cases) = stats span (s => !s.isInstanceOf[LabelDef]) - // caseVirtualizedMatchOpt(matchExpr, prologue, cases, matchEndDef, identity) - // // optimized version of virtpatmat - // case Block(outerStats, orig@Block(stats, matchEndDef)) if !settings.XoldPatmat.value && (stats forall treeInfo.hasSynthCaseSymbol) => - // val (prologue, cases) = stats span (s => !s.isInstanceOf[LabelDef]) - // caseVirtualizedMatchOpt(matchExpr, prologue, cases, matchEndDef, m => copyBlock(matchExpr, outerStats, m)) - // case other => - // unknownTree(other) - // } - - // def unknownTree(t: Tree): Tree = throw new MatchError(t) - // def copyBlock(orig: Tree, stats: List[Tree], expr: Tree): Block = Block(stats, expr) - - // def dropSyntheticCatchAll(cases: List[CaseDef]): List[CaseDef] = - // if (settings.XoldPatmat.value) cases - // else cases filter { - // case CaseDef(pat, EmptyTree, Throw(Apply(Select(New(exTpt), nme.CONSTRUCTOR), _))) if (treeInfo.isWildcardArg(pat) && (exTpt.tpe.typeSymbol eq MatchErrorClass)) => false - // case CaseDef(pat, guard, body) => true - // } - // } - - // def mkCached(cvar: Symbol, expr: Tree): Tree = { - // val cvarRef = mkUnattributedRef(cvar) - // Block( - // List( - // If(Apply(Select(cvarRef, nme.eq), List(Literal(Constant(null)))), - // Assign(cvarRef, expr), - // EmptyTree)), - // cvarRef - // ) - // } + class MatchMatcher { + def caseMatch(orig: Tree, selector: Tree, cases: List[CaseDef], wrap: Tree => Tree): Tree = unknownTree(orig) + def caseVirtualizedMatch(orig: Tree, _match: Tree, targs: List[Tree], scrut: Tree, matcher: Tree): Tree = unknownTree(orig) + def caseVirtualizedMatchOpt(orig: Tree, prologue: List[Tree], cases: List[Tree], matchEndDef: Tree, wrap: Tree => Tree): Tree = unknownTree(orig) + + def genVirtualizedMatch(prologue: List[Tree], cases: List[Tree], matchEndDef: Tree): Tree = Block(prologue ++ cases, matchEndDef) + + def apply(matchExpr: Tree): Tree = matchExpr match { + // old-style match or virtpatmat switch + case Match(selector, cases) => // println("simple match: "+ (selector, cases) + "for:\n"+ matchExpr ) + caseMatch(matchExpr, selector, cases, identity) + // old-style match or virtpatmat switch + case Block((vd: ValDef) :: Nil, orig@Match(selector, cases)) => // println("block match: "+ (selector, cases, vd) + "for:\n"+ matchExpr ) + caseMatch(matchExpr, selector, cases, m => copyBlock(matchExpr, List(vd), m)) + // virtpatmat + case Apply(Apply(TypeApply(Select(tgt, nme.runOrElse), targs), List(scrut)), List(matcher)) if !settings.XoldPatmat.value => // println("virt match: "+ (tgt, targs, scrut, matcher) + "for:\n"+ matchExpr ) + caseVirtualizedMatch(matchExpr, tgt, targs, scrut, matcher) + // optimized version of virtpatmat + case Block(stats, matchEndDef) if !settings.XoldPatmat.value && (stats forall treeInfo.hasSynthCaseSymbol) => + // the assumption is once we encounter a case, the remainder of the block will consist of cases + // the prologue may be empty, usually it is the valdef that stores the scrut + val (prologue, cases) = stats span (s => !s.isInstanceOf[LabelDef]) + caseVirtualizedMatchOpt(matchExpr, prologue, cases, matchEndDef, identity) + // optimized version of virtpatmat + case Block(outerStats, orig@Block(stats, matchEndDef)) if !settings.XoldPatmat.value && (stats forall treeInfo.hasSynthCaseSymbol) => + val (prologue, cases) = stats span (s => !s.isInstanceOf[LabelDef]) + caseVirtualizedMatchOpt(matchExpr, prologue, cases, matchEndDef, m => copyBlock(matchExpr, outerStats, m)) + case other => + unknownTree(other) + } + + def unknownTree(t: Tree): Tree = throw new MatchError(t) + def copyBlock(orig: Tree, stats: List[Tree], expr: Tree): Block = Block(stats, expr) + + def dropSyntheticCatchAll(cases: List[CaseDef]): List[CaseDef] = + if (settings.XoldPatmat.value) cases + else cases filter { + case CaseDef(pat, EmptyTree, Throw(Apply(Select(New(exTpt), nme.CONSTRUCTOR), _))) if (treeInfo.isWildcardArg(pat) && (exTpt.tpe.typeSymbol eq MatchErrorClass)) => false + case CaseDef(pat, guard, body) => true + } + } + + def mkCached(cvar: Symbol, expr: Tree): Tree = { + val cvarRef = mkUnattributedRef(cvar) + Block( + List( + If(Apply(Select(cvarRef, nme.eq), List(Literal(Constant(null)))), + Assign(cvarRef, expr), + EmptyTree)), + cvarRef + ) + } // Builds a tree of the form "{ lhs = rhs ; lhs }" def mkAssignAndReturn(lhs: Symbol, rhs: Tree): Tree = { @@ -152,8 +152,8 @@ abstract class TreeGen extends scala.reflect.internal.TreeGen with TreeDSL { // def m: T = { if (m$ eq null) m$ = new m$class(...) m$ } // where (...) are eventual outer accessors - // def mkCachedModuleAccessDef(accessor: Symbol, mvar: Symbol) = - // DefDef(accessor, mkCached(mvar, newModule(accessor, mvar.tpe))) + def mkCachedModuleAccessDef(accessor: Symbol, mvar: Symbol) = + DefDef(accessor, mkCached(mvar, newModule(accessor, mvar.tpe))) def mkModuleAccessDef(accessor: Symbol, msym: Symbol) = DefDef(accessor, Select(This(msym.owner), msym)) @@ -165,8 +165,8 @@ abstract class TreeGen extends scala.reflect.internal.TreeGen with TreeDSL { } // def m: T; - // def mkModuleAccessDcl(accessor: Symbol) = - // DefDef(accessor setFlag lateDEFERRED, EmptyTree) + def mkModuleAccessDcl(accessor: Symbol) = + DefDef(accessor setFlag lateDEFERRED, EmptyTree) def mkRuntimeCall(meth: Name, args: List[Tree]): Tree = mkRuntimeCall(meth, Nil, args) @@ -223,8 +223,8 @@ abstract class TreeGen extends scala.reflect.internal.TreeGen with TreeDSL { if (isRepeatedParam) wildcardStar(arg) else arg /** Make forwarder to method `target`, passing all parameters in `params` */ - // def mkForwarder(target: Tree, vparamss: List[List[Symbol]]) = - // (target /: vparamss)((fn, vparams) => Apply(fn, vparams map paramToArg)) + def mkForwarder(target: Tree, vparamss: List[List[Symbol]]) = + (target /: vparamss)((fn, vparams) => Apply(fn, vparams map paramToArg)) /** Applies a wrapArray call to an array, making it a WrappedArray. * Don't let a reference type parameter be inferred, in case it's a singleton: @@ -264,24 +264,24 @@ abstract class TreeGen extends scala.reflect.internal.TreeGen with TreeDSL { else mkCast(tree, pt) - // def mkZeroContravariantAfterTyper(tp: Type): Tree = { - // // contravariant -- for replacing an argument in a method call - // // must use subtyping, as otherwise we miss types like `Any with Int` - // val tree = - // if (NullClass.tpe <:< tp) Literal(Constant(null)) - // else if (UnitClass.tpe <:< tp) Literal(Constant()) - // else if (BooleanClass.tpe <:< tp) Literal(Constant(false)) - // else if (FloatClass.tpe <:< tp) Literal(Constant(0.0f)) - // else if (DoubleClass.tpe <:< tp) Literal(Constant(0.0d)) - // else if (ByteClass.tpe <:< tp) Literal(Constant(0.toByte)) - // else if (ShortClass.tpe <:< tp) Literal(Constant(0.toShort)) - // else if (IntClass.tpe <:< tp) Literal(Constant(0)) - // else if (LongClass.tpe <:< tp) Literal(Constant(0L)) - // else if (CharClass.tpe <:< tp) Literal(Constant(0.toChar)) - // else mkCast(Literal(Constant(null)), tp) - - // tree - // } + def mkZeroContravariantAfterTyper(tp: Type): Tree = { + // contravariant -- for replacing an argument in a method call + // must use subtyping, as otherwise we miss types like `Any with Int` + val tree = + if (NullClass.tpe <:< tp) Literal(Constant(null)) + else if (UnitClass.tpe <:< tp) Literal(Constant()) + else if (BooleanClass.tpe <:< tp) Literal(Constant(false)) + else if (FloatClass.tpe <:< tp) Literal(Constant(0.0f)) + else if (DoubleClass.tpe <:< tp) Literal(Constant(0.0d)) + else if (ByteClass.tpe <:< tp) Literal(Constant(0.toByte)) + else if (ShortClass.tpe <:< tp) Literal(Constant(0.toShort)) + else if (IntClass.tpe <:< tp) Literal(Constant(0)) + else if (LongClass.tpe <:< tp) Literal(Constant(0L)) + else if (CharClass.tpe <:< tp) Literal(Constant(0.toChar)) + else mkCast(Literal(Constant(null)), tp) + + tree + } /** Translate names in Select/Ident nodes to type names. */ diff --git a/src/compiler/scala/tools/nsc/ast/TreeInfo.scala b/src/compiler/scala/tools/nsc/ast/TreeInfo.scala index 5c1ab29548..97227a5b6e 100644 --- a/src/compiler/scala/tools/nsc/ast/TreeInfo.scala +++ b/src/compiler/scala/tools/nsc/ast/TreeInfo.scala @@ -6,7 +6,7 @@ package scala.tools.nsc package ast -// import scala.reflect.internal.HasFlags +import scala.reflect.internal.HasFlags /** This class ... * @@ -39,6 +39,6 @@ abstract class TreeInfo extends scala.reflect.internal.TreeInfo { case _ => super.firstDefinesClassOrObject(trees, name) } - // def isInterface(mods: HasFlags, body: List[Tree]) = - // mods.isTrait && (body forall isInterfaceMember) + def isInterface(mods: HasFlags, body: List[Tree]) = + mods.isTrait && (body forall isInterfaceMember) } diff --git a/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala b/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala index 501127865b..efcde1f74f 100644 --- a/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala +++ b/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala @@ -9,8 +9,7 @@ package scala.tools.nsc package ast.parser -import scala.collection.{ mutable, immutable } -import mutable.{ ListBuffer, StringBuilder } +import scala.collection.mutable.{ListBuffer, StringBuilder} import scala.reflect.internal.{ ModifierFlags => Flags } import scala.reflect.internal.Chars.{ isScalaLetter } import scala.reflect.internal.util.{ SourceFile, OffsetPosition } @@ -168,7 +167,7 @@ self => object symbXMLBuilder extends SymbolicXMLBuilder(this, preserveWS = true) { // DEBUG choices val global: self.global.type = self.global - // def freshName(prefix: String): Name = SourceFileParser.this.freshName(prefix) + def freshName(prefix: String): Name = SourceFileParser.this.freshName(prefix) } def xmlLiteral : Tree = xmlp.xLiteral @@ -464,7 +463,7 @@ self => /* ------------- ERROR HANDLING ------------------------------------------- */ - val assumedClosingParens = mutable.Map(RPAREN -> 0, RBRACKET -> 0, RBRACE -> 0) + var assumedClosingParens = scala.collection.mutable.Map(RPAREN -> 0, RBRACKET -> 0, RBRACE -> 0) private var inFunReturnType = false @inline private def fromWithinReturnType[T](body: => T): T = { @@ -641,7 +640,7 @@ self => case _ => false } - // def isTypeIntro: Boolean = isTypeIntroToken(in.token) + def isTypeIntro: Boolean = isTypeIntroToken(in.token) def isStatSeqEnd = in.token == RBRACE || in.token == EOF @@ -766,9 +765,9 @@ self => } } - // def checkSize(kind: String, size: Int, max: Int) { - // if (size > max) syntaxError("too many "+kind+", maximum = "+max, false) - // } + def checkSize(kind: String, size: Int, max: Int) { + if (size > max) syntaxError("too many "+kind+", maximum = "+max, false) + } def checkAssoc(offset: Int, op: Name, leftAssoc: Boolean) = if (treeInfo.isLeftAssoc(op) != leftAssoc) @@ -1219,10 +1218,10 @@ self => * EqualsExpr ::= `=' Expr * }}} */ - // def equalsExpr(): Tree = { - // accept(EQUALS) - // expr() - // } + def equalsExpr(): Tree = { + accept(EQUALS) + expr() + } def condExpr(): Tree = { if (in.token == LPAREN) { @@ -1965,7 +1964,7 @@ self => /** Default entry points into some pattern contexts. */ def pattern(): Tree = noSeq.pattern() - // def patterns(): List[Tree] = noSeq.patterns() + def patterns(): List[Tree] = noSeq.patterns() def seqPatterns(): List[Tree] = seqOK.patterns() def xmlSeqPatterns(): List[Tree] = xmlSeqOK.patterns() // Called from xml parser def argumentPatterns(): List[Tree] = inParens { diff --git a/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala b/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala index b346ce0a14..1be5fb1782 100644 --- a/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala +++ b/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala @@ -10,8 +10,7 @@ import scala.reflect.internal.util._ import scala.reflect.internal.Chars._ import Tokens._ import scala.annotation.switch -import scala.collection.{ mutable, immutable } -import mutable.{ ListBuffer, ArrayBuffer } +import scala.collection.mutable.{ ListBuffer, ArrayBuffer } import scala.xml.Utility.{ isNameStart } /** See Parsers.scala / ParsersCommon for some explanation of ScannersCommon. @@ -27,7 +26,7 @@ trait ScannersCommon { trait ScannerCommon extends CommonTokenData { // things to fill in, in addition to buf, decodeUni which come from CharArrayReader - // def warning(off: Int, msg: String): Unit + def warning(off: Int, msg: String): Unit def error (off: Int, msg: String): Unit def incompleteInputError(off: Int, msg: String): Unit def deprecationWarning(off: Int, msg: String): Unit @@ -52,7 +51,7 @@ trait Scanners extends ScannersCommon { type Offset = Int /** An undefined offset */ - // val NoOffset: Offset = -1 + val NoOffset: Offset = -1 trait TokenData extends CommonTokenData { @@ -89,7 +88,7 @@ trait Scanners extends ScannersCommon { def isAtEnd = charOffset >= buf.length - // def flush = { charOffset = offset; nextChar(); this } + def flush = { charOffset = offset; nextChar(); this } def resume(lastCode: Int) = { token = lastCode @@ -101,7 +100,7 @@ trait Scanners extends ScannersCommon { /** the last error offset */ - // var errOffset: Offset = NoOffset + var errOffset: Offset = NoOffset /** A character buffer for literals */ @@ -1064,7 +1063,7 @@ trait Scanners extends ScannersCommon { def syntaxError(off: Offset, msg: String) { error(off, msg) token = ERROR - // errOffset = off + errOffset = off } /** generate an error at the current token offset @@ -1077,7 +1076,7 @@ trait Scanners extends ScannersCommon { def incompleteInputError(msg: String) { incompleteInputError(offset, msg) token = EOF - // errOffset = offset + errOffset = offset } override def toString() = token match { @@ -1242,7 +1241,7 @@ trait Scanners extends ScannersCommon { override val decodeUni: Boolean = !settings.nouescape.value // suppress warnings, throw exception on errors - // def warning(off: Offset, msg: String): Unit = () + def warning(off: Offset, msg: String): Unit = () def deprecationWarning(off: Offset, msg: String): Unit = () def error (off: Offset, msg: String): Unit = throw new MalformedInput(off, msg) def incompleteInputError(off: Offset, msg: String): Unit = throw new MalformedInput(off, msg) @@ -1253,7 +1252,7 @@ trait Scanners extends ScannersCommon { class UnitScanner(unit: CompilationUnit, patches: List[BracePatch]) extends SourceFileScanner(unit.source) { def this(unit: CompilationUnit) = this(unit, List()) - // override def warning(off: Offset, msg: String) = unit.warning(unit.position(off), msg) + override def warning(off: Offset, msg: String) = unit.warning(unit.position(off), msg) override def deprecationWarning(off: Offset, msg: String) = unit.deprecationWarning(unit.position(off), msg) override def error (off: Offset, msg: String) = unit.error(unit.position(off), msg) override def incompleteInputError(off: Offset, msg: String) = unit.incompleteInputError(unit.position(off), msg) @@ -1312,7 +1311,7 @@ trait Scanners extends ScannersCommon { } class ParensAnalyzer(unit: CompilationUnit, patches: List[BracePatch]) extends UnitScanner(unit, patches) { - val balance = mutable.Map(RPAREN -> 0, RBRACKET -> 0, RBRACE -> 0) + var balance = scala.collection.mutable.Map(RPAREN -> 0, RBRACKET -> 0, RBRACE -> 0) init() @@ -1434,17 +1433,17 @@ trait Scanners extends ScannersCommon { else bp :: insertPatch(bps, patch) } - // def leftColumn(offset: Int) = - // if (offset == -1) -1 else column(lineStart(line(offset))) + def leftColumn(offset: Int) = + if (offset == -1) -1 else column(lineStart(line(offset))) - // def rightColumn(offset: Int, default: Int) = - // if (offset == -1) -1 - // else { - // val rlin = line(offset) - // if (lineStart(rlin) == offset) column(offset) - // else if (rlin + 1 < lineStart.length) column(lineStart(rlin + 1)) - // else default - // } + def rightColumn(offset: Int, default: Int) = + if (offset == -1) -1 + else { + val rlin = line(offset) + if (lineStart(rlin) == offset) column(offset) + else if (rlin + 1 < lineStart.length) column(lineStart(rlin + 1)) + else default + } def insertRBrace(): List[BracePatch] = { def insert(bps: List[BracePair]): List[BracePatch] = bps match { @@ -1487,16 +1486,16 @@ trait Scanners extends ScannersCommon { delete(bracePairs) } - // def imbalanceMeasure: Int = { - // def measureList(bps: List[BracePair]): Int = - // (bps map measure).sum - // def measure(bp: BracePair): Int = - // (if (bp.lindent != bp.rindent) 1 else 0) + measureList(bp.nested) - // measureList(bracePairs) - // } + def imbalanceMeasure: Int = { + def measureList(bps: List[BracePair]): Int = + (bps map measure).sum + def measure(bp: BracePair): Int = + (if (bp.lindent != bp.rindent) 1 else 0) + measureList(bp.nested) + measureList(bracePairs) + } - // def improves(patches1: List[BracePatch]): Boolean = - // imbalanceMeasure > new ParensAnalyzer(unit, patches1).imbalanceMeasure + def improves(patches1: List[BracePatch]): Boolean = + imbalanceMeasure > new ParensAnalyzer(unit, patches1).imbalanceMeasure override def error(offset: Int, msg: String) {} } diff --git a/src/compiler/scala/tools/nsc/ast/parser/Tokens.scala b/src/compiler/scala/tools/nsc/ast/parser/Tokens.scala index be8e1bc8b4..c3fd414426 100644 --- a/src/compiler/scala/tools/nsc/ast/parser/Tokens.scala +++ b/src/compiler/scala/tools/nsc/ast/parser/Tokens.scala @@ -6,14 +6,14 @@ package scala.tools.nsc package ast.parser -// import scala.annotation.switch +import scala.annotation.switch /** Common code between JavaTokens and Tokens. Not as much (and not as concrete) * as one might like because JavaTokens for no clear reason chose new numbers for * identical token sets. */ abstract class Tokens { - // import scala.reflect.internal.Chars._ + import scala.reflect.internal.Chars._ /** special tokens */ final val EMPTY = -3 @@ -32,16 +32,16 @@ abstract class Tokens { def LPAREN: Int def RBRACE: Int - // def isIdentifier(code: Int): Boolean + def isIdentifier(code: Int): Boolean def isLiteral(code: Int): Boolean - // def isKeyword(code: Int): Boolean - // def isSymbol(code: Int): Boolean - - // final def isSpace(at: Char) = at == ' ' || at == '\t' - // final def isNewLine(at: Char) = at == CR || at == LF || at == FF - // final def isBrace(code: Int) = code >= LPAREN && code <= RBRACE - // final def isOpenBrace(code: Int) = isBrace(code) && (code % 2 == 0) - // final def isCloseBrace(code: Int) = isBrace(code) && (code % 2 == 1) + def isKeyword(code: Int): Boolean + def isSymbol(code: Int): Boolean + + final def isSpace(at: Char) = at == ' ' || at == '\t' + final def isNewLine(at: Char) = at == CR || at == LF || at == FF + final def isBrace(code: Int) = code >= LPAREN && code <= RBRACE + final def isOpenBrace(code: Int) = isBrace(code) && (code % 2 == 0) + final def isCloseBrace(code: Int) = isBrace(code) && (code % 2 == 1) } object Tokens extends Tokens { @@ -56,16 +56,16 @@ object Tokens extends Tokens { /** identifiers */ final val IDENTIFIER = 10 final val BACKQUOTED_IDENT = 11 - // def isIdentifier(code: Int) = - // code >= IDENTIFIER && code <= BACKQUOTED_IDENT + def isIdentifier(code: Int) = + code >= IDENTIFIER && code <= BACKQUOTED_IDENT - // @switch def canBeginExpression(code: Int) = code match { - // case IDENTIFIER|BACKQUOTED_IDENT|USCORE => true - // case LBRACE|LPAREN|LBRACKET|COMMENT => true - // case IF|DO|WHILE|FOR|NEW|TRY|THROW => true - // case NULL|THIS|TRUE|FALSE => true - // case code => isLiteral(code) - // } + @switch def canBeginExpression(code: Int) = code match { + case IDENTIFIER|BACKQUOTED_IDENT|USCORE => true + case LBRACE|LPAREN|LBRACKET|COMMENT => true + case IF|DO|WHILE|FOR|NEW|TRY|THROW => true + case NULL|THIS|TRUE|FALSE => true + case code => isLiteral(code) + } /** keywords */ final val IF = 20 @@ -113,16 +113,16 @@ object Tokens extends Tokens { final val MACRO = 62 // not yet used in 2.10 final val THEN = 63 // not yet used in 2.10 - // def isKeyword(code: Int) = - // code >= IF && code <= LAZY + def isKeyword(code: Int) = + code >= IF && code <= LAZY - // @switch def isDefinition(code: Int) = code match { - // case CLASS|TRAIT|OBJECT => true - // case CASECLASS|CASEOBJECT => true - // case DEF|VAL|VAR => true - // case TYPE => true - // case _ => false - // } + @switch def isDefinition(code: Int) = code match { + case CLASS|TRAIT|OBJECT => true + case CASECLASS|CASEOBJECT => true + case DEF|VAL|VAR => true + case TYPE => true + case _ => false + } /** special symbols */ final val COMMA = 70 @@ -141,8 +141,8 @@ object Tokens extends Tokens { final val AT = 83 final val VIEWBOUND = 84 - // def isSymbol(code: Int) = - // code >= COMMA && code <= VIEWBOUND + def isSymbol(code: Int) = + code >= COMMA && code <= VIEWBOUND /** parenthesis */ final val LPAREN = 90 diff --git a/src/compiler/scala/tools/nsc/ast/parser/TreeBuilder.scala b/src/compiler/scala/tools/nsc/ast/parser/TreeBuilder.scala index 6dc2055121..49b772ed2c 100644 --- a/src/compiler/scala/tools/nsc/ast/parser/TreeBuilder.scala +++ b/src/compiler/scala/tools/nsc/ast/parser/TreeBuilder.scala @@ -26,15 +26,15 @@ abstract class TreeBuilder { def o2p(offset: Int): Position def r2p(start: Int, point: Int, end: Int): Position - // def rootId(name: Name) = gen.rootId(name) + def rootId(name: Name) = gen.rootId(name) def rootScalaDot(name: Name) = gen.rootScalaDot(name) def scalaDot(name: Name) = gen.scalaDot(name) def scalaAnyRefConstr = scalaDot(tpnme.AnyRef) - // def scalaAnyValConstr = scalaDot(tpnme.AnyVal) - // def scalaAnyConstr = scalaDot(tpnme.Any) + def scalaAnyValConstr = scalaDot(tpnme.AnyVal) + def scalaAnyConstr = scalaDot(tpnme.Any) def scalaUnitConstr = scalaDot(tpnme.Unit) def productConstr = scalaDot(tpnme.Product) - // def productConstrN(n: Int) = scalaDot(newTypeName("Product" + n)) + def productConstrN(n: Int) = scalaDot(newTypeName("Product" + n)) def serializableConstr = scalaDot(tpnme.Serializable) def convertToTypeName(t: Tree) = gen.convertToTypeName(t) @@ -446,15 +446,15 @@ abstract class TreeBuilder { /** Create tree for a lifted expression XX-LIFTING */ - // def makeLifted(gs: List[ValFrom], body: Tree): Tree = { - // def combine(gs: List[ValFrom]): ValFrom = (gs: @unchecked) match { - // case g :: Nil => g - // case ValFrom(pos1, pat1, rhs1) :: gs2 => - // val ValFrom(_, pat2, rhs2) = combine(gs2) - // ValFrom(pos1, makeTuple(List(pat1, pat2), false), Apply(Select(rhs1, nme.zip), List(rhs2))) - // } - // makeForYield(List(combine(gs)), body) - // } + def makeLifted(gs: List[ValFrom], body: Tree): Tree = { + def combine(gs: List[ValFrom]): ValFrom = (gs: @unchecked) match { + case g :: Nil => g + case ValFrom(pos1, pat1, rhs1) :: gs2 => + val ValFrom(_, pat2, rhs2) = combine(gs2) + ValFrom(pos1, makeTuple(List(pat1, pat2), false), Apply(Select(rhs1, nme.zip), List(rhs2))) + } + makeForYield(List(combine(gs)), body) + } /** Create tree for a pattern alternative */ def makeAlternative(ts: List[Tree]): Tree = { diff --git a/src/compiler/scala/tools/nsc/backend/icode/BasicBlocks.scala b/src/compiler/scala/tools/nsc/backend/icode/BasicBlocks.scala index 52fc3d08e1..b62d5cb4e4 100644 --- a/src/compiler/scala/tools/nsc/backend/icode/BasicBlocks.scala +++ b/src/compiler/scala/tools/nsc/backend/icode/BasicBlocks.scala @@ -281,12 +281,12 @@ trait BasicBlocks { } /** Insert instructions in 'is' immediately after index 'idx'. */ - // def insertAfter(idx: Int, is: List[Instruction]) { - // assert(closed, "Instructions can be replaced only after the basic block is closed") + def insertAfter(idx: Int, is: List[Instruction]) { + assert(closed, "Instructions can be replaced only after the basic block is closed") - // instrs = instrs.patch(idx + 1, is, 0) - // code.touched = true - // } + instrs = instrs.patch(idx + 1, is, 0) + code.touched = true + } /** Removes instructions found at the given positions. * @@ -436,10 +436,10 @@ trait BasicBlocks { ignore = true } - // def exitIgnoreMode() { - // assert(ignore, "Exit ignore mode when not in ignore mode: " + this) - // ignore = false - // } + def exitIgnoreMode() { + assert(ignore, "Exit ignore mode when not in ignore mode: " + this) + ignore = false + } /** Return the last instruction of this basic block. */ def lastInstruction = @@ -498,15 +498,15 @@ trait BasicBlocks { override def hashCode = label * 41 + code.hashCode // Instead of it, rather use a printer - // def print() { print(java.lang.System.out) } - - // def print(out: java.io.PrintStream) { - // out.println("block #"+label+" :") - // foreach(i => out.println(" " + i)) - // out.print("Successors: ") - // successors.foreach((x: BasicBlock) => out.print(" "+x.label.toString())) - // out.println() - // } + def print() { print(java.lang.System.out) } + + def print(out: java.io.PrintStream) { + out.println("block #"+label+" :") + foreach(i => out.println(" " + i)) + out.print("Successors: ") + successors.foreach((x: BasicBlock) => out.print(" "+x.label.toString())) + out.println() + } private def succString = if (successors.isEmpty) "[S: N/A]" else successors.distinct.mkString("[S: ", ", ", "]") private def predString = if (predecessors.isEmpty) "[P: N/A]" else predecessors.distinct.mkString("[P: ", ", ", "]") diff --git a/src/compiler/scala/tools/nsc/backend/icode/ExceptionHandlers.scala b/src/compiler/scala/tools/nsc/backend/icode/ExceptionHandlers.scala index e1732d5775..f35996eeb9 100644 --- a/src/compiler/scala/tools/nsc/backend/icode/ExceptionHandlers.scala +++ b/src/compiler/scala/tools/nsc/backend/icode/ExceptionHandlers.scala @@ -72,9 +72,9 @@ trait ExceptionHandlers { override def dup: Finalizer = new Finalizer(method, label, pos) } - // object NoFinalizer extends Finalizer(null, newTermNameCached(""), NoPosition) { - // override def startBlock: BasicBlock = sys.error("NoFinalizer cannot have a start block."); - // override def setStartBlock(b: BasicBlock): Unit = sys.error("NoFinalizer cannot have a start block."); - // override def dup = this - // } + object NoFinalizer extends Finalizer(null, newTermNameCached(""), NoPosition) { + override def startBlock: BasicBlock = sys.error("NoFinalizer cannot have a start block."); + override def setStartBlock(b: BasicBlock): Unit = sys.error("NoFinalizer cannot have a start block."); + override def dup = this + } } diff --git a/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala b/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala index d521f893d1..720896d0b3 100644 --- a/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala +++ b/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala @@ -1960,12 +1960,12 @@ abstract class GenICode extends SubComponent { this } - // def removeFinalizer(f: Tree): this.type = { - // assert(cleanups.head contains f, - // "Illegal nesting of cleanup operations: " + cleanups + " while exiting finalizer " + f); - // cleanups = cleanups.tail - // this - // } + def removeFinalizer(f: Tree): this.type = { + assert(cleanups.head contains f, + "Illegal nesting of cleanup operations: " + cleanups + " while exiting finalizer " + f); + cleanups = cleanups.tail + this + } /** Prepare a new context upon entry into a method. * @@ -2044,14 +2044,14 @@ abstract class GenICode extends SubComponent { } /** Remove the given handler from the list of active exception handlers. */ - // def removeActiveHandler(exh: ExceptionHandler): Unit = { - // assert(handlerCount > 0 && handlers.head == exh, - // "Wrong nesting of exception handlers." + this + " for " + exh) - // handlerCount -= 1 - // handlers = handlers.tail - // debuglog("removed handler: " + exh); + def removeActiveHandler(exh: ExceptionHandler): Unit = { + assert(handlerCount > 0 && handlers.head == exh, + "Wrong nesting of exception handlers." + this + " for " + exh) + handlerCount -= 1 + handlers = handlers.tail + debuglog("removed handler: " + exh); - // } + } /** Clone the current context */ def dup: Context = new Context(this) @@ -2339,7 +2339,7 @@ abstract class GenICode extends SubComponent { val locals: ListBuffer[Local] = new ListBuffer def add(l: Local) = locals += l - // def remove(l: Local) = locals -= l + def remove(l: Local) = locals -= l /** Return all locals that are in scope. */ def varsInScope: Buffer[Local] = outer.varsInScope.clone() ++= locals diff --git a/src/compiler/scala/tools/nsc/backend/icode/Members.scala b/src/compiler/scala/tools/nsc/backend/icode/Members.scala index 4192d794f9..07abe9d74f 100644 --- a/src/compiler/scala/tools/nsc/backend/icode/Members.scala +++ b/src/compiler/scala/tools/nsc/backend/icode/Members.scala @@ -126,9 +126,9 @@ trait Members { override def toString() = symbol.fullName - // def lookupField(s: Symbol) = fields find (_.symbol == s) + def lookupField(s: Symbol) = fields find (_.symbol == s) def lookupMethod(s: Symbol) = methods find (_.symbol == s) - // def lookupMethod(s: Name) = methods find (_.symbol.name == s) + def lookupMethod(s: Name) = methods find (_.symbol.name == s) /* returns this methods static ctor if it has one. */ def lookupStaticCtor: Option[IMethod] = methods find (_.symbol.isStaticConstructor) @@ -159,7 +159,7 @@ trait Members { def linearizedBlocks(lin: Linearizer = self.linearizer): List[BasicBlock] = lin linearize this def foreachBlock[U](f: BasicBlock => U): Unit = blocks foreach f - // def foreachInstr[U](f: Instruction => U): Unit = foreachBlock(_.toList foreach f) + def foreachInstr[U](f: Instruction => U): Unit = foreachBlock(_.toList foreach f) var native = false @@ -192,7 +192,7 @@ trait Members { } def addLocals(ls: List[Local]) = ls foreach addLocal - // def addParams(as: List[Local]) = as foreach addParam + def addParams(as: List[Local]) = as foreach addParam def lookupLocal(n: Name): Option[Local] = locals find (_.sym.name == n) def lookupLocal(sym: Symbol): Option[Local] = locals find (_.sym == sym) @@ -207,28 +207,28 @@ trait Members { override def toString() = symbol.fullName - // def matchesSignature(other: IMethod) = { - // (symbol.name == other.symbol.name) && - // (params corresponds other.params)(_.kind == _.kind) && - // (returnType == other.returnType) - // } + def matchesSignature(other: IMethod) = { + (symbol.name == other.symbol.name) && + (params corresponds other.params)(_.kind == _.kind) && + (returnType == other.returnType) + } import opcodes._ - // def checkLocals(): Unit = { - // def localsSet = (code.blocks flatMap { bb => - // bb.iterator collect { - // case LOAD_LOCAL(l) => l - // case STORE_LOCAL(l) => l - // } - // }).toSet - - // if (hasCode) { - // log("[checking locals of " + this + "]") - // locals filterNot localsSet foreach { l => - // log("Local " + l + " is not declared in " + this) - // } - // } - // } + def checkLocals(): Unit = { + def localsSet = (code.blocks flatMap { bb => + bb.iterator collect { + case LOAD_LOCAL(l) => l + case STORE_LOCAL(l) => l + } + }).toSet + + if (hasCode) { + log("[checking locals of " + this + "]") + locals filterNot localsSet foreach { l => + log("Local " + l + " is not declared in " + this) + } + } + } /** Merge together blocks that have a single successor which has a * single predecessor. Exception handlers are taken into account (they @@ -295,7 +295,7 @@ trait Members { var start: Int = _ /** Ending PC for this local's visibility range. */ - // var end: Int = _ + var end: Int = _ /** PC-based ranges for this local variable's visibility */ var ranges: List[(Int, Int)] = Nil diff --git a/src/compiler/scala/tools/nsc/backend/icode/Opcodes.scala b/src/compiler/scala/tools/nsc/backend/icode/Opcodes.scala index 6f7db042e6..0e7c75de50 100644 --- a/src/compiler/scala/tools/nsc/backend/icode/Opcodes.scala +++ b/src/compiler/scala/tools/nsc/backend/icode/Opcodes.scala @@ -111,7 +111,7 @@ trait Opcodes { self: ICodes => def producedTypes: List[TypeKind] = Nil /** This method returns the difference of size of the stack when the instruction is used */ - // def difference = produced-consumed + def difference = produced-consumed /** The corresponding position in the source file */ private var _pos: Position = NoPosition @@ -119,7 +119,7 @@ trait Opcodes { self: ICodes => def pos: Position = _pos /** Used by dead code elimination. */ - // var useful: Boolean = false + var useful: Boolean = false def setPos(p: Position): this.type = { _pos = p @@ -133,11 +133,11 @@ trait Opcodes { self: ICodes => object opcodes { - // def mayThrow(i: Instruction): Boolean = i match { - // case LOAD_LOCAL(_) | STORE_LOCAL(_) | CONSTANT(_) | THIS(_) | CZJUMP(_, _, _, _) - // | DROP(_) | DUP(_) | RETURN(_) | LOAD_EXCEPTION(_) | JUMP(_) | CJUMP(_, _, _, _) => false - // case _ => true - // } + def mayThrow(i: Instruction): Boolean = i match { + case LOAD_LOCAL(_) | STORE_LOCAL(_) | CONSTANT(_) | THIS(_) | CZJUMP(_, _, _, _) + | DROP(_) | DUP(_) | RETURN(_) | LOAD_EXCEPTION(_) | JUMP(_) | CJUMP(_, _, _, _) => false + case _ => true + } /** Loads "this" on top of the stack. * Stack: ... @@ -714,7 +714,7 @@ trait Opcodes { self: ICodes => /** Is this a static method call? */ def isStatic: Boolean = false - // def isSuper: Boolean = false + def isSuper: Boolean = false /** Is this an instance method call? */ def hasInstance: Boolean = true @@ -749,7 +749,7 @@ trait Opcodes { self: ICodes => * On JVM, translated to `invokespecial`. */ case class SuperCall(mix: Name) extends InvokeStyle { - // override def isSuper = true + override def isSuper = true override def toString(): String = { "super(" + mix + ")" } } @@ -814,7 +814,7 @@ trait Opcodes { self: ICodes => case class CIL_NEWOBJ(method: Symbol) extends Instruction { override def toString(): String = "CIL_NEWOBJ " + hostClass.fullName + method.fullName - val hostClass: Symbol = method.owner; + var hostClass: Symbol = method.owner; override def consumed = method.tpe.paramTypes.length override def consumedTypes = method.tpe.paramTypes map toTypeKind override def produced = 1 diff --git a/src/compiler/scala/tools/nsc/backend/icode/Primitives.scala b/src/compiler/scala/tools/nsc/backend/icode/Primitives.scala index ebfb4ad591..c8579041ba 100644 --- a/src/compiler/scala/tools/nsc/backend/icode/Primitives.scala +++ b/src/compiler/scala/tools/nsc/backend/icode/Primitives.scala @@ -84,17 +84,17 @@ trait Primitives { self: ICodes => def print(o: AnyRef): PrimitivePrinter = print(o.toString()) - // def printPrimitive(prim: Primitive) = prim match { - // case Negation(kind) => - // print("!") + def printPrimitive(prim: Primitive) = prim match { + case Negation(kind) => + print("!") - // case Test(op, kind, zero) => - // print(op).print(kind) + case Test(op, kind, zero) => + print(op).print(kind) - // case Comparison(op, kind) => - // print(op).print("(").print(kind) + case Comparison(op, kind) => + print(op).print("(").print(kind) - // } + } } /** This class represents a comparison operation. */ diff --git a/src/compiler/scala/tools/nsc/backend/icode/Repository.scala b/src/compiler/scala/tools/nsc/backend/icode/Repository.scala index c8168cbfa6..e73015c4da 100644 --- a/src/compiler/scala/tools/nsc/backend/icode/Repository.scala +++ b/src/compiler/scala/tools/nsc/backend/icode/Repository.scala @@ -29,13 +29,13 @@ trait Repository { /** The icode of the given class. If not available, it loads * its bytecode. */ - // def icode(sym: Symbol, force: Boolean): IClass = - // icode(sym) getOrElse { - // log("loading " + sym) - // load(sym) - // assert(available(sym)) - // loaded(sym) - // } + def icode(sym: Symbol, force: Boolean): IClass = + icode(sym) getOrElse { + log("loading " + sym) + load(sym) + assert(available(sym)) + loaded(sym) + } /** Load bytecode for given symbol. */ def load(sym: Symbol): Boolean = { diff --git a/src/compiler/scala/tools/nsc/backend/icode/TypeKinds.scala b/src/compiler/scala/tools/nsc/backend/icode/TypeKinds.scala index 2df6811fd7..f96dce9f1c 100644 --- a/src/compiler/scala/tools/nsc/backend/icode/TypeKinds.scala +++ b/src/compiler/scala/tools/nsc/backend/icode/TypeKinds.scala @@ -66,7 +66,7 @@ trait TypeKinds { self: ICodes => def isValueType = false def isBoxedType = false final def isRefOrArrayType = isReferenceType || isArrayType - // final def isRefArrayOrBoxType = isRefOrArrayType || isBoxedType + final def isRefArrayOrBoxType = isRefOrArrayType || isBoxedType final def isNothingType = this == NothingReference final def isNullType = this == NullReference final def isInterfaceType = this match { @@ -114,7 +114,7 @@ trait TypeKinds { self: ICodes => } } - // var lubs0 = 0 + var lubs0 = 0 /** * The least upper bound of two typekinds. They have to be either diff --git a/src/compiler/scala/tools/nsc/backend/icode/TypeStacks.scala b/src/compiler/scala/tools/nsc/backend/icode/TypeStacks.scala index c958f20853..c1bf4304ea 100644 --- a/src/compiler/scala/tools/nsc/backend/icode/TypeStacks.scala +++ b/src/compiler/scala/tools/nsc/backend/icode/TypeStacks.scala @@ -20,7 +20,7 @@ trait TypeStacks { */ type Rep = List[TypeKind] - // object NoTypeStack extends TypeStack(Nil) { } + object NoTypeStack extends TypeStack(Nil) { } class TypeStack(var types: Rep) { if (types.nonEmpty) @@ -74,8 +74,8 @@ trait TypeStacks { * length and each type kind agrees position-wise. Two * types agree if one is a subtype of the other. */ - // def agreesWith(other: TypeStack): Boolean = - // (types corresponds other.types)((t1, t2) => t1 <:< t2 || t2 <:< t1) + def agreesWith(other: TypeStack): Boolean = + (types corresponds other.types)((t1, t2) => t1 <:< t2 || t2 <:< t1) /* This method returns a String representation of the stack */ override def toString() = diff --git a/src/compiler/scala/tools/nsc/backend/icode/analysis/CopyPropagation.scala b/src/compiler/scala/tools/nsc/backend/icode/analysis/CopyPropagation.scala index 6534cd83f1..53111d0ade 100644 --- a/src/compiler/scala/tools/nsc/backend/icode/analysis/CopyPropagation.scala +++ b/src/compiler/scala/tools/nsc/backend/icode/analysis/CopyPropagation.scala @@ -27,10 +27,10 @@ abstract class CopyPropagation { /** Values that can be on the stack. */ abstract class Value { - // def isRecord = false + def isRecord = false } case class Record(cls: Symbol, bindings: mutable.Map[Symbol, Value]) extends Value { - // override def isRecord = true + override def isRecord = true } /** The value of some location in memory. */ case class Deref(l: Location) extends Value @@ -92,14 +92,14 @@ abstract class CopyPropagation { } /* Return the binding for the given field of the given record */ - // def getBinding(r: Record, f: Symbol): Value = { - // assert(r.bindings contains f, "Record " + r + " does not contain a field " + f) - - // r.bindings(f) match { - // case Deref(LocalVar(l)) => getBinding(l) - // case target => target - // } - // } + def getBinding(r: Record, f: Symbol): Value = { + assert(r.bindings contains f, "Record " + r + " does not contain a field " + f) + + r.bindings(f) match { + case Deref(LocalVar(l)) => getBinding(l) + case target => target + } + } /** Return a local which contains the same value as this field, if any. * If the field holds a reference to a local, the returned value is the diff --git a/src/compiler/scala/tools/nsc/backend/icode/analysis/DataFlowAnalysis.scala b/src/compiler/scala/tools/nsc/backend/icode/analysis/DataFlowAnalysis.scala index c232c3692a..04c3eedbad 100644 --- a/src/compiler/scala/tools/nsc/backend/icode/analysis/DataFlowAnalysis.scala +++ b/src/compiler/scala/tools/nsc/backend/icode/analysis/DataFlowAnalysis.scala @@ -37,11 +37,11 @@ trait DataFlowAnalysis[L <: SemiLattice] { /** Reinitialize, but keep the old solutions. Should be used when reanalyzing the * same method, after some code transformation. */ - // def reinit(f: => Unit): Unit = { - // iterations = 0 - // worklist.clear; visited.clear; - // f - // } + def reinit(f: => Unit): Unit = { + iterations = 0 + worklist.clear; visited.clear; + f + } def run(): Unit diff --git a/src/compiler/scala/tools/nsc/backend/icode/analysis/TypeFlowAnalysis.scala b/src/compiler/scala/tools/nsc/backend/icode/analysis/TypeFlowAnalysis.scala index 6e7ed9d4c4..15755f31ad 100644 --- a/src/compiler/scala/tools/nsc/backend/icode/analysis/TypeFlowAnalysis.scala +++ b/src/compiler/scala/tools/nsc/backend/icode/analysis/TypeFlowAnalysis.scala @@ -269,34 +269,34 @@ abstract class TypeFlowAnalysis { } // interpret - // class SimulatedStack { - // private var types: List[InferredType] = Nil - // private var depth = 0 - - // /** Remove and return the topmost element on the stack. If the - // * stack is empty, return a reference to a negative index on the - // * stack, meaning it refers to elements pushed by a predecessor block. - // */ - // def pop: InferredType = types match { - // case head :: rest => - // types = rest - // head - // case _ => - // depth -= 1 - // TypeOfStackPos(depth) - // } - - // def pop2: (InferredType, InferredType) = { - // (pop, pop) - // } - - // def push(t: InferredType) { - // depth += 1 - // types = types ::: List(t) - // } - - // def push(k: TypeKind) { push(Const(k)) } - // } + class SimulatedStack { + private var types: List[InferredType] = Nil + private var depth = 0 + + /** Remove and return the topmost element on the stack. If the + * stack is empty, return a reference to a negative index on the + * stack, meaning it refers to elements pushed by a predecessor block. + */ + def pop: InferredType = types match { + case head :: rest => + types = rest + head + case _ => + depth -= 1 + TypeOfStackPos(depth) + } + + def pop2: (InferredType, InferredType) = { + (pop, pop) + } + + def push(t: InferredType) { + depth += 1 + types = types ::: List(t) + } + + def push(k: TypeKind) { push(Const(k)) } + } abstract class InferredType { /** Return the type kind pointed by this inferred type. */ @@ -737,9 +737,9 @@ abstract class TypeFlowAnalysis { private var lastStart = 0L - // def reset() { - // millis = 0L - // } + def reset() { + millis = 0L + } def start() { lastStart = System.currentTimeMillis diff --git a/src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala b/src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala index 4a46a64dae..8bae80c760 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala @@ -455,7 +455,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters { /** basic functionality for class file building */ abstract class JBuilder(bytecodeWriter: BytecodeWriter) { - // val EMPTY_JTYPE_ARRAY = Array.empty[asm.Type] + val EMPTY_JTYPE_ARRAY = Array.empty[asm.Type] val EMPTY_STRING_ARRAY = Array.empty[String] val mdesc_arglessvoid = "()V" @@ -523,7 +523,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters { /** Specialized array conversion to prevent calling * java.lang.reflect.Array.newInstance via TraversableOnce.toArray */ - // def mkArray(xs: Traversable[asm.Type]): Array[asm.Type] = { val a = new Array[asm.Type](xs.size); xs.copyToArray(a); a } + def mkArray(xs: Traversable[asm.Type]): Array[asm.Type] = { val a = new Array[asm.Type](xs.size); xs.copyToArray(a); a } def mkArray(xs: Traversable[String]): Array[String] = { val a = new Array[String](xs.size); xs.copyToArray(a); a } // ----------------------------------------------------------------------------------------- @@ -1757,10 +1757,10 @@ abstract class GenASM extends SubComponent with BytecodeWriters { import asm.Opcodes; - // def aconst(cst: AnyRef) { - // if (cst == null) { jmethod.visitInsn(Opcodes.ACONST_NULL) } - // else { jmethod.visitLdcInsn(cst) } - // } + def aconst(cst: AnyRef) { + if (cst == null) { jmethod.visitInsn(Opcodes.ACONST_NULL) } + else { jmethod.visitLdcInsn(cst) } + } final def boolconst(b: Boolean) { iconst(if(b) 1 else 0) } diff --git a/src/compiler/scala/tools/nsc/backend/jvm/GenJVM.scala b/src/compiler/scala/tools/nsc/backend/jvm/GenJVM.scala index e258f38284..06f94ef46c 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/GenJVM.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/GenJVM.scala @@ -182,15 +182,15 @@ abstract class GenJVM extends SubComponent with GenJVMUtil with GenAndroid with val StringBuilderType = new JObjectType(StringBuilderClassName) // TODO use ASMType.getObjectType val toStringType = new JMethodType(JAVA_LANG_STRING, JType.EMPTY_ARRAY) // TODO use ASMType.getMethodType val arrayCloneType = new JMethodType(JAVA_LANG_OBJECT, JType.EMPTY_ARRAY) - // val MethodTypeType = new JObjectType("java.dyn.MethodType") - // val JavaLangClassType = new JObjectType("java.lang.Class") - // val MethodHandleType = new JObjectType("java.dyn.MethodHandle") + val MethodTypeType = new JObjectType("java.dyn.MethodType") + val JavaLangClassType = new JObjectType("java.lang.Class") + val MethodHandleType = new JObjectType("java.dyn.MethodHandle") // Scala attributes val BeanInfoAttr = rootMirror.getRequiredClass("scala.beans.BeanInfo") - // val BeanInfoSkipAttr = rootMirror.getRequiredClass("scala.beans.BeanInfoSkip") - // val BeanDisplayNameAttr = rootMirror.getRequiredClass("scala.beans.BeanDisplayName") - // val BeanDescriptionAttr = rootMirror.getRequiredClass("scala.beans.BeanDescription") + val BeanInfoSkipAttr = rootMirror.getRequiredClass("scala.beans.BeanInfoSkip") + val BeanDisplayNameAttr = rootMirror.getRequiredClass("scala.beans.BeanDisplayName") + val BeanDescriptionAttr = rootMirror.getRequiredClass("scala.beans.BeanDescription") final val ExcludedForwarderFlags = { import Flags._ @@ -264,8 +264,8 @@ abstract class GenJVM extends SubComponent with GenJVMUtil with GenAndroid with val fjbgContext = new FJBGContext(49, 0) - // val emitSource = debugLevel >= 1 - // val emitLines = debugLevel >= 2 + val emitSource = debugLevel >= 1 + val emitLines = debugLevel >= 2 val emitVars = debugLevel >= 3 // bug had phase with wrong name; leaving enabled for brief pseudo deprecation @@ -1843,14 +1843,14 @@ abstract class GenJVM extends SubComponent with GenJVMUtil with GenAndroid with ////////////////////// local vars /////////////////////// - // def sizeOf(sym: Symbol): Int = sizeOf(toTypeKind(sym.tpe)) + def sizeOf(sym: Symbol): Int = sizeOf(toTypeKind(sym.tpe)) def sizeOf(k: TypeKind): Int = if(k.isWideType) 2 else 1 - // def indexOf(m: IMethod, sym: Symbol): Int = { - // val Some(local) = m lookupLocal sym - // indexOf(local) - // } + def indexOf(m: IMethod, sym: Symbol): Int = { + val Some(local) = m lookupLocal sym + indexOf(local) + } def indexOf(local: Local): Int = { assert(local.index >= 0, "Invalid index for: " + local + "{" + local.## + "}: ") diff --git a/src/compiler/scala/tools/nsc/backend/msil/GenMSIL.scala b/src/compiler/scala/tools/nsc/backend/msil/GenMSIL.scala index f7278a7590..2253ae6e15 100644 --- a/src/compiler/scala/tools/nsc/backend/msil/GenMSIL.scala +++ b/src/compiler/scala/tools/nsc/backend/msil/GenMSIL.scala @@ -23,7 +23,7 @@ abstract class GenMSIL extends SubComponent { import icodes._ import icodes.opcodes._ - // val x = loaders + val x = loaders /** Create a new phase */ override def newPhase(p: Phase) = new MsilPhase(p) @@ -83,9 +83,9 @@ abstract class GenMSIL extends SubComponent { SYMTAB_DEFAULT_CONSTR => SYMTAB_ATTRIBUTE_EMPTY_CONSTRUCTOR} val EXCEPTION = clrTypes.getType("System.Exception") - // val MBYTE_ARRAY = clrTypes.mkArrayType(MBYTE) + val MBYTE_ARRAY = clrTypes.mkArrayType(MBYTE) - // val ICLONEABLE = clrTypes.getType("System.ICloneable") + val ICLONEABLE = clrTypes.getType("System.ICloneable") val MEMBERWISE_CLONE = MOBJECT.GetMethod("MemberwiseClone", MsilType.EmptyTypes) val MMONITOR = clrTypes.getType("System.Threading.Monitor") @@ -102,8 +102,8 @@ abstract class GenMSIL extends SubComponent { val INT_PTR = clrTypes.getType("System.IntPtr") - // val JOBJECT = definitions.ObjectClass - // val JSTRING = definitions.StringClass + val JOBJECT = definitions.ObjectClass + val JSTRING = definitions.StringClass val SystemConvert = clrTypes.getType("System.Convert") @@ -622,7 +622,7 @@ abstract class GenMSIL extends SubComponent { * - emit `Leave handlerReturnLabel` instead of the Return * - emit code at the end: load the local and return its value */ - val currentHandlers = new mutable.Stack[ExceptionHandler] + var currentHandlers = new mutable.Stack[ExceptionHandler] // The IMethod the Local/Label/Kind below belong to var handlerReturnMethod: IMethod = _ // Stores the result when returning inside an exception block diff --git a/src/compiler/scala/tools/nsc/backend/opt/ClosureElimination.scala b/src/compiler/scala/tools/nsc/backend/opt/ClosureElimination.scala index b36e7a4636..8d6de821bb 100644 --- a/src/compiler/scala/tools/nsc/backend/opt/ClosureElimination.scala +++ b/src/compiler/scala/tools/nsc/backend/opt/ClosureElimination.scala @@ -189,8 +189,8 @@ abstract class ClosureElimination extends SubComponent { } /** is field 'f' accessible from method 'm'? */ - // def accessible(f: Symbol, m: Symbol): Boolean = - // f.isPublic || (f.isProtected && (f.enclosingPackageClass == m.enclosingPackageClass)) + def accessible(f: Symbol, m: Symbol): Boolean = + f.isPublic || (f.isProtected && (f.enclosingPackageClass == m.enclosingPackageClass)) } /* class ClosureElim */ diff --git a/src/compiler/scala/tools/nsc/backend/opt/Inliners.scala b/src/compiler/scala/tools/nsc/backend/opt/Inliners.scala index eaeba69382..7d741aab60 100644 --- a/src/compiler/scala/tools/nsc/backend/opt/Inliners.scala +++ b/src/compiler/scala/tools/nsc/backend/opt/Inliners.scala @@ -602,7 +602,7 @@ abstract class Inliners extends SubComponent { override def toString = m.toString val sym = m.symbol - // val name = sym.name + val name = sym.name def owner = sym.owner def paramTypes = sym.info.paramTypes def minimumStack = paramTypes.length + 1 @@ -624,7 +624,7 @@ abstract class Inliners extends SubComponent { def isLarge = length > MAX_INLINE_SIZE def isRecursive = m.recursive def hasHandlers = handlers.nonEmpty || m.bytecodeHasEHs - // def hasClosureParam = paramTypes exists (tp => isByNameParamType(tp) || isFunctionType(tp)) + def hasClosureParam = paramTypes exists (tp => isByNameParamType(tp) || isFunctionType(tp)) def isSynchronized = sym.hasFlag(Flags.SYNCHRONIZED) def hasNonFinalizerHandler = handlers exists { @@ -732,7 +732,7 @@ abstract class Inliners extends SubComponent { */ sealed abstract class InlineSafetyInfo { def isSafe = false - // def isUnsafe = !isSafe + def isUnsafe = !isSafe } case object NeverSafeToInline extends InlineSafetyInfo case object InlineableAtThisCaller extends InlineSafetyInfo { override def isSafe = true } diff --git a/src/compiler/scala/tools/nsc/doc/html/Page.scala b/src/compiler/scala/tools/nsc/doc/html/Page.scala index d502f19e31..62166f7def 100644 --- a/src/compiler/scala/tools/nsc/doc/html/Page.scala +++ b/src/compiler/scala/tools/nsc/doc/html/Page.scala @@ -90,9 +90,9 @@ abstract class Page { /** A relative link from this page to some destination page in the Scaladoc site. * @param destPage The page that the link will point to. */ - // def relativeLinkTo(destPage: HtmlPage): String = { - // relativeLinkTo(destPage.path) - // } + def relativeLinkTo(destPage: HtmlPage): String = { + relativeLinkTo(destPage.path) + } /** A relative link from this page to some destination path. * @param destPath The path that the link will point to. */ diff --git a/src/compiler/scala/tools/nsc/doc/model/Entity.scala b/src/compiler/scala/tools/nsc/doc/model/Entity.scala index 496dfdefcf..6b24073339 100644 --- a/src/compiler/scala/tools/nsc/doc/model/Entity.scala +++ b/src/compiler/scala/tools/nsc/doc/model/Entity.scala @@ -25,7 +25,7 @@ import diagram._ trait Entity { /** Similar to symbols, so we can track entities */ - // def id: Int + def id: Int /** The name of the entity. Note that the name does not qualify this entity uniquely; use its `qualifiedName` * instead. */ @@ -61,7 +61,7 @@ trait Entity { def isType: Boolean /** Indicates whether this entity lives in the terms namespace (objects, packages, methods, values) */ - // def isTerm: Boolean + def isTerm: Boolean } object Entity { @@ -98,7 +98,7 @@ trait TemplateEntity extends Entity { def isDocTemplate: Boolean /** Whether documentation is available for this template. */ - // def isNoDocMemberTemplate: Boolean + def isNoDocMemberTemplate: Boolean /** Whether this template is a case class. */ def isCaseClass: Boolean @@ -175,10 +175,10 @@ trait MemberEntity extends Entity { def isAbstractType: Boolean /** Whether this member is a template. */ - // def isTemplate: Boolean + def isTemplate: Boolean /** Whether this member is implicit. */ - // def isImplicit: Boolean + def isImplicit: Boolean /** Whether this member is abstract. */ def isAbstract: Boolean @@ -388,7 +388,7 @@ trait NonTemplateMemberEntity extends MemberEntity { /** Whether this member is a bridge member. A bridge member does only exist for binary compatibility reasons * and should not appear in ScalaDoc. */ - // def isBridge: Boolean + def isBridge: Boolean } @@ -507,7 +507,7 @@ trait ImplicitConversion { * Note: not all targetTypes have a corresponding template. Examples include conversions resulting in refinement * types. Need to check it's not option! */ - // def targetTemplate: Option[TemplateEntity] + def targetTemplate: Option[TemplateEntity] /** The components of the implicit conversion type parents */ def targetTypeComponents: List[(TemplateEntity, TypeEntity)] diff --git a/src/compiler/scala/tools/nsc/doc/model/IndexModelFactory.scala b/src/compiler/scala/tools/nsc/doc/model/IndexModelFactory.scala index 0f2374a6f4..10e2f23142 100755 --- a/src/compiler/scala/tools/nsc/doc/model/IndexModelFactory.scala +++ b/src/compiler/scala/tools/nsc/doc/model/IndexModelFactory.scala @@ -18,7 +18,7 @@ object IndexModelFactory { object result extends mutable.HashMap[Char,SymbolMap] { /* Owner template ordering */ - // implicit def orderingSet = math.Ordering.String.on { x: MemberEntity => x.name.toLowerCase } + implicit def orderingSet = math.Ordering.String.on { x: MemberEntity => x.name.toLowerCase } /* symbol name ordering */ implicit def orderingMap = math.Ordering.String.on { x: String => x.toLowerCase } diff --git a/src/compiler/scala/tools/nsc/doc/model/ModelFactory.scala b/src/compiler/scala/tools/nsc/doc/model/ModelFactory.scala index 739a1b836d..f11f090b4b 100644 --- a/src/compiler/scala/tools/nsc/doc/model/ModelFactory.scala +++ b/src/compiler/scala/tools/nsc/doc/model/ModelFactory.scala @@ -43,10 +43,10 @@ class ModelFactory(val global: Global, val settings: doc.Settings) { def modelFinished: Boolean = _modelFinished private var universe: Universe = null - // protected def closestPackage(sym: Symbol) = { - // if (sym.isPackage || sym.isPackageClass) sym - // else sym.enclosingPackage - // } + protected def closestPackage(sym: Symbol) = { + if (sym.isPackage || sym.isPackageClass) sym + else sym.enclosingPackage + } def makeModel: Option[Universe] = { val universe = new Universe { thisUniverse => @@ -77,7 +77,7 @@ class ModelFactory(val global: Global, val settings: doc.Settings) { /* ============== IMPLEMENTATION PROVIDING ENTITY TYPES ============== */ abstract class EntityImpl(val sym: Symbol, val inTpl: TemplateImpl) extends Entity { - // val id = { ids += 1; ids } + val id = { ids += 1; ids } val name = optimize(sym.nameString) val universe = thisFactory.universe @@ -91,7 +91,7 @@ class ModelFactory(val global: Global, val settings: doc.Settings) { def annotations = sym.annotations.map(makeAnnotation) def inPackageObject: Boolean = sym.owner.isModuleClass && sym.owner.sourceModule.isPackageObject def isType = sym.name.isTypeName - // def isTerm = sym.name.isTermName + def isTerm = sym.name.isTermName } trait TemplateImpl extends EntityImpl with TemplateEntity { @@ -103,7 +103,7 @@ class ModelFactory(val global: Global, val settings: doc.Settings) { def isObject = sym.isModule && !sym.isPackage def isCaseClass = sym.isCaseClass def isRootPackage = false - // def isNoDocMemberTemplate = false + def isNoDocMemberTemplate = false def selfType = if (sym.thisSym eq sym) None else Some(makeType(sym.thisSym.typeOfThis, this)) } @@ -178,9 +178,9 @@ class ModelFactory(val global: Global, val settings: doc.Settings) { }) else None - // def inheritedFrom = - // if (inTemplate.sym == this.sym.owner || inTemplate.sym.isPackage) Nil else - // makeTemplate(this.sym.owner) :: (sym.allOverriddenSymbols map { os => makeTemplate(os.owner) }) + def inheritedFrom = + if (inTemplate.sym == this.sym.owner || inTemplate.sym.isPackage) Nil else + makeTemplate(this.sym.owner) :: (sym.allOverriddenSymbols map { os => makeTemplate(os.owner) }) def resultType = { def resultTpe(tpe: Type): Type = tpe match { // similar to finalResultType, except that it leaves singleton types alone case PolyType(_, res) => resultTpe(res) @@ -195,7 +195,7 @@ class ModelFactory(val global: Global, val settings: doc.Settings) { def isVal = false def isLazyVal = false def isVar = false - // def isImplicit = sym.isImplicit + def isImplicit = sym.isImplicit def isConstructor = false def isAliasType = false def isAbstractType = false @@ -203,7 +203,7 @@ class ModelFactory(val global: Global, val settings: doc.Settings) { // for the explanation of conversion == null see comment on flags ((!sym.isTrait && ((sym hasFlag Flags.ABSTRACT) || (sym hasFlag Flags.DEFERRED)) && (!isImplicitlyInherited)) || sym.isAbstractClass || sym.isAbstractType) && !sym.isSynthetic - // def isTemplate = false + def isTemplate = false def signature = externalSignature(sym) lazy val signatureCompat = { @@ -257,9 +257,9 @@ class ModelFactory(val global: Global, val settings: doc.Settings) { */ abstract class MemberTemplateImpl(sym: Symbol, inTpl: DocTemplateImpl) extends MemberImpl(sym, inTpl) with TemplateImpl with HigherKindedImpl with MemberTemplateEntity { // no templates cache for this class, each owner gets its own instance - // override def isTemplate = true + override def isTemplate = true def isDocTemplate = false - // override def isNoDocMemberTemplate = true + override def isNoDocMemberTemplate = true lazy val definitionName = optimize(inDefinitionTemplates.head.qualifiedName + "." + name) def valueParams: List[List[ValueParam]] = Nil /** TODO, these are now only computed for DocTemplates */ @@ -380,9 +380,9 @@ class ModelFactory(val global: Global, val settings: doc.Settings) { lazy val memberSyms = sym.info.members.filter(s => membersShouldDocument(s, this)).toList // the inherited templates (classes, traits or objects) - val memberSymsLazy = memberSyms.filter(t => templateShouldDocument(t, this) && !inOriginalOwner(t, this)) + var memberSymsLazy = memberSyms.filter(t => templateShouldDocument(t, this) && !inOriginalOwner(t, this)) // the direct members (methods, values, vars, types and directly contained templates) - val memberSymsEager = memberSyms.filter(!memberSymsLazy.contains(_)) + var memberSymsEager = memberSyms.filter(!memberSymsLazy.contains(_)) // the members generated by the symbols in memberSymsEager val ownMembers = (memberSymsEager.flatMap(makeMember(_, None, this))) @@ -438,7 +438,7 @@ class ModelFactory(val global: Global, val settings: doc.Settings) { else List() ) - // override def isTemplate = true + override def isTemplate = true override def isDocTemplate = true private[this] lazy val companionSymbol = if (sym.isAliasType || sym.isAbstractType) { @@ -545,7 +545,7 @@ class ModelFactory(val global: Global, val settings: doc.Settings) { val qualifiedName = conversion.fold(inDefinitionTemplates.head.qualifiedName)(_.conversionQualifiedName) optimize(qualifiedName + "#" + name) } - // def isBridge = sym.isBridge + def isBridge = sym.isBridge def isUseCase = useCaseOf.isDefined override def byConversion: Option[ImplicitConversionImpl] = conversion override def isImplicitlyInherited = { assert(modelFinished); conversion.isDefined } @@ -707,7 +707,7 @@ class ModelFactory(val global: Global, val settings: doc.Settings) { override def inTemplate = this override def toRoot = this :: Nil override def qualifiedName = "_root_" - // override def inheritedFrom = Nil + override def inheritedFrom = Nil override def isRootPackage = true override lazy val memberSyms = (bSym.info.members ++ EmptyPackage.info.members).toList filter { s => @@ -857,11 +857,11 @@ class ModelFactory(val global: Global, val settings: doc.Settings) { inTpl.members.find(_.sym == aSym) } - // @deprecated("Use `findLinkTarget` instead.", "2.10.0") - // def findTemplate(query: String): Option[DocTemplateImpl] = { - // assert(modelFinished) - // docTemplatesCache.values find { (tpl: DocTemplateImpl) => tpl.qualifiedName == query && !packageDropped(tpl) && !tpl.isObject } - // } + @deprecated("Use `findLinkTarget` instead.", "2.10.0") + def findTemplate(query: String): Option[DocTemplateImpl] = { + assert(modelFinished) + docTemplatesCache.values find { (tpl: DocTemplateImpl) => tpl.qualifiedName == query && !packageDropped(tpl) && !tpl.isObject } + } def findTemplateMaybe(aSym: Symbol): Option[DocTemplateImpl] = { assert(modelFinished) diff --git a/src/compiler/scala/tools/nsc/doc/model/ModelFactoryImplicitSupport.scala b/src/compiler/scala/tools/nsc/doc/model/ModelFactoryImplicitSupport.scala index e9c32b1af0..c1ca8c1448 100644 --- a/src/compiler/scala/tools/nsc/doc/model/ModelFactoryImplicitSupport.scala +++ b/src/compiler/scala/tools/nsc/doc/model/ModelFactoryImplicitSupport.scala @@ -345,14 +345,14 @@ trait ModelFactoryImplicitSupport { makeRootPackage } - // def targetTemplate: Option[TemplateEntity] = toType match { - // // @Vlad: I'm being extra conservative in template creation -- I don't want to create templates for complex types - // // such as refinement types because the template can't represent the type corectly (a template corresponds to a - // // package, class, trait or object) - // case t: TypeRef => Some(makeTemplate(t.sym)) - // case RefinedType(parents, decls) => None - // case _ => error("Scaladoc implicits: Could not create template for: " + toType + " of type " + toType.getClass); None - // } + def targetTemplate: Option[TemplateEntity] = toType match { + // @Vlad: I'm being extra conservative in template creation -- I don't want to create templates for complex types + // such as refinement types because the template can't represent the type corectly (a template corresponds to a + // package, class, trait or object) + case t: TypeRef => Some(makeTemplate(t.sym)) + case RefinedType(parents, decls) => None + case _ => error("Scaladoc implicits: Could not create template for: " + toType + " of type " + toType.getClass); None + } def targetTypeComponents: List[(TemplateEntity, TypeEntity)] = makeParentTypes(toType, None, inTpl) @@ -492,14 +492,14 @@ trait ModelFactoryImplicitSupport { /** * Make implicits explicit - Not used curently */ - // object implicitToExplicit extends TypeMap { - // def apply(tp: Type): Type = mapOver(tp) match { - // case MethodType(params, resultType) => - // MethodType(params.map(param => if (param.isImplicit) param.cloneSymbol.resetFlag(Flags.IMPLICIT) else param), resultType) - // case other => - // other - // } - // } + object implicitToExplicit extends TypeMap { + def apply(tp: Type): Type = mapOver(tp) match { + case MethodType(params, resultType) => + MethodType(params.map(param => if (param.isImplicit) param.cloneSymbol.resetFlag(Flags.IMPLICIT) else param), resultType) + case other => + other + } + } /** * removeImplicitParameters transforms implicit parameters from the view result type into constraints and diff --git a/src/compiler/scala/tools/nsc/doc/model/comment/Comment.scala b/src/compiler/scala/tools/nsc/doc/model/comment/Comment.scala index 4b75f3fd4d..3e172544dd 100644 --- a/src/compiler/scala/tools/nsc/doc/model/comment/Comment.scala +++ b/src/compiler/scala/tools/nsc/doc/model/comment/Comment.scala @@ -103,7 +103,7 @@ abstract class Comment { def example: List[Body] /** The comment as it appears in the source text. */ - // def source: Option[String] + def source: Option[String] /** A description for the primary constructor */ def constructor: Option[Body] diff --git a/src/compiler/scala/tools/nsc/doc/model/comment/CommentFactory.scala b/src/compiler/scala/tools/nsc/doc/model/comment/CommentFactory.scala index 7897421bd7..9617b15068 100644 --- a/src/compiler/scala/tools/nsc/doc/model/comment/CommentFactory.scala +++ b/src/compiler/scala/tools/nsc/doc/model/comment/CommentFactory.scala @@ -28,10 +28,10 @@ trait CommentFactory { thisFactory: ModelFactory with CommentFactory with Member protected val commentCache = mutable.HashMap.empty[(global.Symbol, TemplateImpl), Comment] - // def addCommentBody(sym: global.Symbol, inTpl: TemplateImpl, docStr: String, docPos: global.Position): global.Symbol = { - // commentCache += (sym, inTpl) -> parse(docStr, docStr, docPos, None) - // sym - // } + def addCommentBody(sym: global.Symbol, inTpl: TemplateImpl, docStr: String, docPos: global.Position): global.Symbol = { + commentCache += (sym, inTpl) -> parse(docStr, docStr, docPos, None) + sym + } def comment(sym: global.Symbol, currentTpl: Option[DocTemplateImpl], inTpl: DocTemplateImpl): Option[Comment] = { val key = (sym, inTpl) @@ -132,7 +132,7 @@ trait CommentFactory { thisFactory: ModelFactory with CommentFactory with Member val note = note0 val example = example0 val constructor = constructor0 - // val source = source0 + val source = source0 val inheritDiagram = inheritDiagram0 val contentDiagram = contentDiagram0 val groupDesc = groupDesc0 @@ -957,19 +957,19 @@ trait CommentFactory { thisFactory: ModelFactory with CommentFactory with Member count } - // final def jumpUntil(chars: String): Int = { - // assert(chars.length > 0) - // var count = 0 - // val c = chars.charAt(0) - // while (!check(chars) && char != endOfText) { - // nextChar() - // while (char != c && char != endOfText) { - // nextChar() - // count += 1 - // } - // } - // count - // } + final def jumpUntil(chars: String): Int = { + assert(chars.length > 0) + var count = 0 + val c = chars.charAt(0) + while (!check(chars) && char != endOfText) { + nextChar() + while (char != c && char != endOfText) { + nextChar() + count += 1 + } + } + count + } final def jumpUntil(pred: => Boolean): Int = { var count = 0 diff --git a/src/compiler/scala/tools/nsc/doc/model/diagram/Diagram.scala b/src/compiler/scala/tools/nsc/doc/model/diagram/Diagram.scala index cb4659a71c..c2aa1f17f3 100644 --- a/src/compiler/scala/tools/nsc/doc/model/diagram/Diagram.scala +++ b/src/compiler/scala/tools/nsc/doc/model/diagram/Diagram.scala @@ -36,12 +36,12 @@ case class InheritanceDiagram(thisNode: ThisNode, override def isInheritanceDiagram = true lazy val depthInfo = new DepthInfo { def maxDepth = 3 - // def nodeDepth(node: Node) = - // if (node == thisNode) 1 - // else if (superClasses.contains(node)) 0 - // else if (subClasses.contains(node)) 2 - // else if (incomingImplicits.contains(node) || outgoingImplicits.contains(node)) 1 - // else -1 + def nodeDepth(node: Node) = + if (node == thisNode) 1 + else if (superClasses.contains(node)) 0 + else if (subClasses.contains(node)) 2 + else if (incomingImplicits.contains(node) || outgoingImplicits.contains(node)) 1 + else -1 } } @@ -49,7 +49,7 @@ trait DepthInfo { /** Gives the maximum depth */ def maxDepth: Int /** Gives the depth of any node in the diagram or -1 if the node is not in the diagram */ - // def nodeDepth(node: Node): Int + def nodeDepth(node: Node): Int } abstract class Node { @@ -142,5 +142,5 @@ class ContentDiagramDepth(pack: ContentDiagram) extends DepthInfo { } val maxDepth = _maxDepth - // def nodeDepth(node: Node) = _nodeDepth.getOrElse(node, -1) -} + def nodeDepth(node: Node) = _nodeDepth.getOrElse(node, -1) +} \ No newline at end of file diff --git a/src/compiler/scala/tools/nsc/interactive/BuildManager.scala b/src/compiler/scala/tools/nsc/interactive/BuildManager.scala index 921d4dc0e1..a3f76994bc 100644 --- a/src/compiler/scala/tools/nsc/interactive/BuildManager.scala +++ b/src/compiler/scala/tools/nsc/interactive/BuildManager.scala @@ -16,7 +16,7 @@ trait BuildManager { def addSourceFiles(files: Set[AbstractFile]) /** Remove the given files from the managed build process. */ - // def removeFiles(files: Set[AbstractFile]) + def removeFiles(files: Set[AbstractFile]) /** The given files have been modified by the user. Recompile * them and their dependent files. diff --git a/src/compiler/scala/tools/nsc/interactive/Global.scala b/src/compiler/scala/tools/nsc/interactive/Global.scala index f71415450c..4dedbcfd3d 100644 --- a/src/compiler/scala/tools/nsc/interactive/Global.scala +++ b/src/compiler/scala/tools/nsc/interactive/Global.scala @@ -395,40 +395,40 @@ class Global(settings: Settings, _reporter: Reporter, projectName: String = "") if (typerRun != currentTyperRun) demandNewCompilerRun() } - // def debugInfo(source : SourceFile, start : Int, length : Int): String = { - // println("DEBUG INFO "+source+"/"+start+"/"+length) - // val end = start+length - // val pos = rangePos(source, start, start, end) - - // val tree = locateTree(pos) - // val sw = new StringWriter - // val pw = new PrintWriter(sw) - // newTreePrinter(pw).print(tree) - // pw.flush - - // val typed = new Response[Tree] - // askTypeAt(pos, typed) - // val typ = typed.get.left.toOption match { - // case Some(tree) => - // val sw = new StringWriter - // val pw = new PrintWriter(sw) - // newTreePrinter(pw).print(tree) - // pw.flush - // sw.toString - // case None => "" - // } - - // val completionResponse = new Response[List[Member]] - // askTypeCompletion(pos, completionResponse) - // val completion = completionResponse.get.left.toOption match { - // case Some(members) => - // members mkString "\n" - // case None => "" - // } - - // source.content.view.drop(start).take(length).mkString+" : "+source.path+" ("+start+", "+end+ - // ")\n\nlocateTree:\n"+sw.toString+"\n\naskTypeAt:\n"+typ+"\n\ncompletion:\n"+completion - // } + def debugInfo(source : SourceFile, start : Int, length : Int): String = { + println("DEBUG INFO "+source+"/"+start+"/"+length) + val end = start+length + val pos = rangePos(source, start, start, end) + + val tree = locateTree(pos) + val sw = new StringWriter + val pw = new PrintWriter(sw) + newTreePrinter(pw).print(tree) + pw.flush + + val typed = new Response[Tree] + askTypeAt(pos, typed) + val typ = typed.get.left.toOption match { + case Some(tree) => + val sw = new StringWriter + val pw = new PrintWriter(sw) + newTreePrinter(pw).print(tree) + pw.flush + sw.toString + case None => "" + } + + val completionResponse = new Response[List[Member]] + askTypeCompletion(pos, completionResponse) + val completion = completionResponse.get.left.toOption match { + case Some(members) => + members mkString "\n" + case None => "" + } + + source.content.view.drop(start).take(length).mkString+" : "+source.path+" ("+start+", "+end+ + ")\n\nlocateTree:\n"+sw.toString+"\n\naskTypeAt:\n"+typ+"\n\ncompletion:\n"+completion + } // ----------------- The Background Runner Thread ----------------------- diff --git a/src/compiler/scala/tools/nsc/interpreter/ByteCode.scala b/src/compiler/scala/tools/nsc/interpreter/ByteCode.scala index 42dad4c50d..014661e525 100644 --- a/src/compiler/scala/tools/nsc/interpreter/ByteCode.scala +++ b/src/compiler/scala/tools/nsc/interpreter/ByteCode.scala @@ -40,23 +40,23 @@ object ByteCode { /** Attempts to retrieve case parameter names for given class name. */ - // def caseParamNamesForPath(path: String) = - // for { - // module <- DECODER - // method <- decoderMethod("caseParamNames", classOf[String]) - // names <- method.invoke(module, path).asInstanceOf[Option[List[String]]] - // } - // yield names + def caseParamNamesForPath(path: String) = + for { + module <- DECODER + method <- decoderMethod("caseParamNames", classOf[String]) + names <- method.invoke(module, path).asInstanceOf[Option[List[String]]] + } + yield names def aliasesForPackage(pkg: String) = aliasMap flatMap (_(pkg)) /** Attempts to find type aliases in package objects. */ - // def aliasForType(path: String): Option[String] = { - // val (pkg, name) = (path lastIndexOf '.') match { - // case -1 => return None - // case idx => (path take idx, path drop (idx + 1)) - // } - // aliasesForPackage(pkg) flatMap (_ get name) - // } + def aliasForType(path: String): Option[String] = { + val (pkg, name) = (path lastIndexOf '.') match { + case -1 => return None + case idx => (path take idx, path drop (idx + 1)) + } + aliasesForPackage(pkg) flatMap (_ get name) + } } diff --git a/src/compiler/scala/tools/nsc/interpreter/CodeHandlers.scala b/src/compiler/scala/tools/nsc/interpreter/CodeHandlers.scala index a8eeee48b8..1741a82775 100644 --- a/src/compiler/scala/tools/nsc/interpreter/CodeHandlers.scala +++ b/src/compiler/scala/tools/nsc/interpreter/CodeHandlers.scala @@ -1,50 +1,50 @@ -// /* NSC -- new Scala compiler -// * Copyright 2005-2013 LAMP/EPFL -// * @author Paul Phillips -// */ - -// package scala.tools.nsc -// package interpreter - -// import CodeHandlers.NoSuccess -// import scala.util.control.ControlThrowable - -// /** -// * The start of a simpler interface for utilizing the compiler with piecemeal -// * code strings. The "T" here could potentially be a Tree, a Type, a Symbol, -// * a Boolean, or something even more exotic. -// */ -// trait CodeHandlers[T] { -// self => - -// // Expressions are composed of operators and operands. -// def expr(code: String): T - -// // Statements occur as parts of blocks and templates. -// // A statement can be an import, a definition or an expression, or it can be empty. -// // Statements used in the template of a class definition can also be declarations. -// def stmt(code: String): T -// def stmts(code: String): Seq[T] - -// object opt extends CodeHandlers[Option[T]] { -// val handler: PartialFunction[Throwable, Option[T]] = { -// case _: NoSuccess => None -// } -// val handlerSeq: PartialFunction[Throwable, Seq[Option[T]]] = { -// case _: NoSuccess => Nil -// } - -// def expr(code: String) = try Some(self.expr(code)) catch handler -// def stmt(code: String) = try Some(self.stmt(code)) catch handler -// def stmts(code: String) = try (self.stmts(code) map (x => Some(x))) catch handlerSeq -// } -// } - -// object CodeHandlers { -// def incomplete() = throw CodeIncomplete -// def fail(msg: String) = throw new CodeException(msg) - -// trait NoSuccess extends ControlThrowable -// class CodeException(msg: String) extends RuntimeException(msg) with NoSuccess { } -// object CodeIncomplete extends CodeException("CodeIncomplete") -// } +/* NSC -- new Scala compiler + * Copyright 2005-2013 LAMP/EPFL + * @author Paul Phillips + */ + +package scala.tools.nsc +package interpreter + +import CodeHandlers.NoSuccess +import scala.util.control.ControlThrowable + +/** + * The start of a simpler interface for utilizing the compiler with piecemeal + * code strings. The "T" here could potentially be a Tree, a Type, a Symbol, + * a Boolean, or something even more exotic. + */ +trait CodeHandlers[T] { + self => + + // Expressions are composed of operators and operands. + def expr(code: String): T + + // Statements occur as parts of blocks and templates. + // A statement can be an import, a definition or an expression, or it can be empty. + // Statements used in the template of a class definition can also be declarations. + def stmt(code: String): T + def stmts(code: String): Seq[T] + + object opt extends CodeHandlers[Option[T]] { + val handler: PartialFunction[Throwable, Option[T]] = { + case _: NoSuccess => None + } + val handlerSeq: PartialFunction[Throwable, Seq[Option[T]]] = { + case _: NoSuccess => Nil + } + + def expr(code: String) = try Some(self.expr(code)) catch handler + def stmt(code: String) = try Some(self.stmt(code)) catch handler + def stmts(code: String) = try (self.stmts(code) map (x => Some(x))) catch handlerSeq + } +} + +object CodeHandlers { + def incomplete() = throw CodeIncomplete + def fail(msg: String) = throw new CodeException(msg) + + trait NoSuccess extends ControlThrowable + class CodeException(msg: String) extends RuntimeException(msg) with NoSuccess { } + object CodeIncomplete extends CodeException("CodeIncomplete") +} diff --git a/src/compiler/scala/tools/nsc/interpreter/CommandLine.scala b/src/compiler/scala/tools/nsc/interpreter/CommandLine.scala index 0b4ad1a29d..8042f0aee2 100644 --- a/src/compiler/scala/tools/nsc/interpreter/CommandLine.scala +++ b/src/compiler/scala/tools/nsc/interpreter/CommandLine.scala @@ -10,5 +10,5 @@ package interpreter */ class CommandLine(arguments: List[String], error: String => Unit) extends CompilerCommand(arguments, error) { override def cmdName = "scala" - // override lazy val fileEndings = List(".scalaint") + override lazy val fileEndings = List(".scalaint") } diff --git a/src/compiler/scala/tools/nsc/interpreter/Completion.scala b/src/compiler/scala/tools/nsc/interpreter/Completion.scala index bf53fbb04b..1dfccbfbf7 100644 --- a/src/compiler/scala/tools/nsc/interpreter/Completion.scala +++ b/src/compiler/scala/tools/nsc/interpreter/Completion.scala @@ -23,7 +23,7 @@ object NoCompletion extends Completion { } object Completion { - // def empty: Completion = NoCompletion + def empty: Completion = NoCompletion case class Candidates(cursor: Int, candidates: List[String]) { } val NoCandidates = Candidates(-1, Nil) diff --git a/src/compiler/scala/tools/nsc/interpreter/CompletionAware.scala b/src/compiler/scala/tools/nsc/interpreter/CompletionAware.scala index 5b8a1791e4..3a0b48ef57 100644 --- a/src/compiler/scala/tools/nsc/interpreter/CompletionAware.scala +++ b/src/compiler/scala/tools/nsc/interpreter/CompletionAware.scala @@ -52,30 +52,30 @@ trait CompletionAware { } } -// object CompletionAware { -// val Empty = new CompletionAware { def completions(verbosity: Int) = Nil } +object CompletionAware { + val Empty = new CompletionAware { def completions(verbosity: Int) = Nil } -// def unapply(that: Any): Option[CompletionAware] = that match { -// case x: CompletionAware => Some((x)) -// case _ => None -// } + def unapply(that: Any): Option[CompletionAware] = that match { + case x: CompletionAware => Some((x)) + case _ => None + } -// /** Create a CompletionAware object from the given functions. -// * The first should generate the list of completions whenever queried, -// * and the second should return Some(CompletionAware) object if -// * subcompletions are possible. -// */ -// def apply(terms: () => List[String], followFunction: String => Option[CompletionAware]): CompletionAware = -// new CompletionAware { -// def completions = terms() -// def completions(verbosity: Int) = completions -// override def follow(id: String) = followFunction(id) -// } + /** Create a CompletionAware object from the given functions. + * The first should generate the list of completions whenever queried, + * and the second should return Some(CompletionAware) object if + * subcompletions are possible. + */ + def apply(terms: () => List[String], followFunction: String => Option[CompletionAware]): CompletionAware = + new CompletionAware { + def completions = terms() + def completions(verbosity: Int) = completions + override def follow(id: String) = followFunction(id) + } -// /** Convenience factories. -// */ -// def apply(terms: () => List[String]): CompletionAware = apply(terms, _ => None) -// def apply(map: scala.collection.Map[String, CompletionAware]): CompletionAware = -// apply(() => map.keys.toList, map.get _) -// } + /** Convenience factories. + */ + def apply(terms: () => List[String]): CompletionAware = apply(terms, _ => None) + def apply(map: scala.collection.Map[String, CompletionAware]): CompletionAware = + apply(() => map.keys.toList, map.get _) +} diff --git a/src/compiler/scala/tools/nsc/interpreter/CompletionOutput.scala b/src/compiler/scala/tools/nsc/interpreter/CompletionOutput.scala index cddd7c930c..13880bb8af 100644 --- a/src/compiler/scala/tools/nsc/interpreter/CompletionOutput.scala +++ b/src/compiler/scala/tools/nsc/interpreter/CompletionOutput.scala @@ -38,7 +38,7 @@ trait CompletionOutput { def relativize(str: String): String = quietString(str stripPrefix (pkg + ".")) def relativize(tp: Type): String = relativize(tp.normalize.toString) - // def relativize(sym: Symbol): String = relativize(sym.info) + def relativize(sym: Symbol): String = relativize(sym.info) def braceList(tparams: List[String]) = if (tparams.isEmpty) "" else (tparams map relativize).mkString("[", ", ", "]") def parenList(params: List[Any]) = params.mkString("(", ", ", ")") diff --git a/src/compiler/scala/tools/nsc/interpreter/ConsoleReaderHelper.scala b/src/compiler/scala/tools/nsc/interpreter/ConsoleReaderHelper.scala index 0e3a2328a2..b5850d901c 100644 --- a/src/compiler/scala/tools/nsc/interpreter/ConsoleReaderHelper.scala +++ b/src/compiler/scala/tools/nsc/interpreter/ConsoleReaderHelper.scala @@ -9,15 +9,15 @@ package interpreter import scala.tools.jline.console.{ ConsoleReader, CursorBuffer } trait ConsoleReaderHelper extends ConsoleReader { - // def currentLine = "" + getCursorBuffer.buffer - // def currentPos = getCursorBuffer.cursor + def currentLine = "" + getCursorBuffer.buffer + def currentPos = getCursorBuffer.cursor def terminal = getTerminal() def width = terminal.getWidth() def height = terminal.getHeight() - // def paginate = isPaginationEnabled() - // def paginate_=(value: Boolean) = setPaginationEnabled(value) + def paginate = isPaginationEnabled() + def paginate_=(value: Boolean) = setPaginationEnabled(value) - // def goBack(num: Int): Unit + def goBack(num: Int): Unit def readOneKey(prompt: String): Int def eraseLine(): Unit diff --git a/src/compiler/scala/tools/nsc/interpreter/Delimited.scala b/src/compiler/scala/tools/nsc/interpreter/Delimited.scala index f09c9a9557..80debfacb9 100644 --- a/src/compiler/scala/tools/nsc/interpreter/Delimited.scala +++ b/src/compiler/scala/tools/nsc/interpreter/Delimited.scala @@ -26,7 +26,7 @@ trait Delimited { def delimited: Char => Boolean def escapeChars: List[Char] = List('\\') - // def quoteChars: List[(Char, Char)] = List(('\'', '\''), ('"', '"')) + def quoteChars: List[(Char, Char)] = List(('\'', '\''), ('"', '"')) /** Break String into args based on delimiting function. */ @@ -39,6 +39,6 @@ trait Delimited { def isDelimiterChar(ch: Char) = delimited(ch) def isEscapeChar(ch: Char): Boolean = escapeChars contains ch - // def isQuoteStart(ch: Char): Boolean = quoteChars map (_._1) contains ch - // def isQuoteEnd(ch: Char): Boolean = quoteChars map (_._2) contains ch + def isQuoteStart(ch: Char): Boolean = quoteChars map (_._1) contains ch + def isQuoteEnd(ch: Char): Boolean = quoteChars map (_._2) contains ch } diff --git a/src/compiler/scala/tools/nsc/interpreter/ExprTyper.scala b/src/compiler/scala/tools/nsc/interpreter/ExprTyper.scala index fbd0d15962..ebd0030802 100644 --- a/src/compiler/scala/tools/nsc/interpreter/ExprTyper.scala +++ b/src/compiler/scala/tools/nsc/interpreter/ExprTyper.scala @@ -17,8 +17,7 @@ trait ExprTyper { import syntaxAnalyzer.UnitParser import naming.freshInternalVarName - object codeParser { - val global: repl.global.type = repl.global + object codeParser extends { val global: repl.global.type = repl.global } with CodeHandlers[Tree] { def applyRule[T](code: String, rule: UnitParser => T): T = { reporter.reset() val scanner = newUnitParser(code) @@ -30,10 +29,10 @@ trait ExprTyper { result } - // def defns(code: String) = stmts(code) collect { case x: DefTree => x } - // def expr(code: String) = applyRule(code, _.expr()) + def defns(code: String) = stmts(code) collect { case x: DefTree => x } + def expr(code: String) = applyRule(code, _.expr()) def stmts(code: String) = applyRule(code, _.templateStats()) - // def stmt(code: String) = stmts(code).last // guaranteed nonempty + def stmt(code: String) = stmts(code).last // guaranteed nonempty } /** Parse a line into a sequence of trees. Returns None if the input is incomplete. */ diff --git a/src/compiler/scala/tools/nsc/interpreter/ILoop.scala b/src/compiler/scala/tools/nsc/interpreter/ILoop.scala index 0e3dc3147b..74549ef558 100644 --- a/src/compiler/scala/tools/nsc/interpreter/ILoop.scala +++ b/src/compiler/scala/tools/nsc/interpreter/ILoop.scala @@ -14,7 +14,7 @@ import scala.util.Properties.{ jdkHome, javaVersion, versionString, javaVmName } import scala.tools.util.{ Javap } import util.{ ClassPath, Exceptional, stringFromWriter, stringFromStream } import io.{ File, Directory } -// import scala.reflect.NameTransformer._ +import scala.reflect.NameTransformer._ import util.ScalaClassLoader import ScalaClassLoader._ import scala.tools.util._ @@ -71,12 +71,12 @@ class ILoop(in0: Option[BufferedReader], protected val out: JPrintWriter) intp.reporter printUntruncatedMessage msg } - // def isAsync = !settings.Yreplsync.value + def isAsync = !settings.Yreplsync.value lazy val power = new Power(intp, new StdReplVals(this))(tagOfStdReplVals, classTag[StdReplVals]) def history = in.history /** The context class loader at the time this object was created */ - // protected val originalClassLoader = Thread.currentThread.getContextClassLoader + protected val originalClassLoader = Thread.currentThread.getContextClassLoader // classpath entries added via :cp var addedClasspath: String = "" @@ -131,19 +131,20 @@ class ILoop(in0: Option[BufferedReader], protected val out: JPrintWriter) def helpCommand(line: String): Result = { if (line == "") helpSummary() else uniqueCommand(line) match { - case Some(lc) => echo("\n" + lc.help) + case Some(lc) => echo("\n" + lc.longHelp) case _ => ambiguousError(line) } } private def helpSummary() = { val usageWidth = commands map (_.usageMsg.length) max - val formatStr = "%-" + usageWidth + "s %s" + val formatStr = "%-" + usageWidth + "s %s %s" echo("All commands can be abbreviated, e.g. :he instead of :help.") - // echo("Those marked with a * have more detailed help, e.g. :help imports.\n") + echo("Those marked with a * have more detailed help, e.g. :help imports.\n") commands foreach { cmd => - echo(formatStr.format(cmd.usageMsg, cmd.help)) + val star = if (cmd.hasLongHelp) "*" else " " + echo(formatStr.format(cmd.usageMsg, star, cmd.help)) } } private def ambiguousError(cmd: String): Result = { @@ -193,10 +194,10 @@ class ILoop(in0: Option[BufferedReader], protected val out: JPrintWriter) out println msg out.flush() } - // protected def echoNoNL(msg: String) = { - // out print msg - // out.flush() - // } + protected def echoNoNL(msg: String) = { + out print msg + out.flush() + } /** Search the history */ def searchHistory(_cmdline: String) { @@ -207,8 +208,8 @@ class ILoop(in0: Option[BufferedReader], protected val out: JPrintWriter) echo("%d %s".format(index + offset, line)) } - private val currentPrompt = Properties.shellPromptString - // def setPrompt(prompt: String) = currentPrompt = prompt + private var currentPrompt = Properties.shellPromptString + def setPrompt(prompt: String) = currentPrompt = prompt /** Prompt to print when awaiting input */ def prompt = currentPrompt @@ -683,7 +684,7 @@ class ILoop(in0: Option[BufferedReader], protected val out: JPrintWriter) def process(settings: Settings): Boolean = savingContextLoader { this.settings = settings createInterpreter() - // var thunks: List[() => Unit] = Nil + var thunks: List[() => Unit] = Nil // sets in to some kind of reader depending on environmental cues in = in0.fold(chooseReader(settings))(r => SimpleReader(r, out, true)) @@ -703,21 +704,21 @@ class ILoop(in0: Option[BufferedReader], protected val out: JPrintWriter) } /** process command-line arguments and do as they request */ - // def process(args: Array[String]): Boolean = { - // val command = new CommandLine(args.toList, echo) - // def neededHelp(): String = - // (if (command.settings.help.value) command.usageMsg + "\n" else "") + - // (if (command.settings.Xhelp.value) command.xusageMsg + "\n" else "") - - // // if they asked for no help and command is valid, we call the real main - // neededHelp() match { - // case "" => command.ok && process(command.settings) - // case help => echoNoNL(help) ; true - // } - // } + def process(args: Array[String]): Boolean = { + val command = new CommandLine(args.toList, echo) + def neededHelp(): String = + (if (command.settings.help.value) command.usageMsg + "\n" else "") + + (if (command.settings.Xhelp.value) command.xusageMsg + "\n" else "") + + // if they asked for no help and command is valid, we call the real main + neededHelp() match { + case "" => command.ok && process(command.settings) + case help => echoNoNL(help) ; true + } + } @deprecated("Use `process` instead", "2.9.0") - def main(settings: Settings): Unit = process(settings) //used by sbt + def main(settings: Settings): Unit = process(settings) } object ILoop { diff --git a/src/compiler/scala/tools/nsc/interpreter/IMain.scala b/src/compiler/scala/tools/nsc/interpreter/IMain.scala index 8ba6573e64..0ef27ac96a 100644 --- a/src/compiler/scala/tools/nsc/interpreter/IMain.scala +++ b/src/compiler/scala/tools/nsc/interpreter/IMain.scala @@ -63,7 +63,6 @@ class IMain(initialSettings: Settings, protected val out: JPrintWriter) extends @deprecated("Use replOutput.dir instead", "2.11.0") def virtualDirectory = replOutput.dir - // Used in a test case. def showDirectory() = replOutput.show(out) private[nsc] var printResults = true // whether to print result lines @@ -83,20 +82,20 @@ class IMain(initialSettings: Settings, protected val out: JPrintWriter) extends private var _classLoader: AbstractFileClassLoader = null // active classloader private val _compiler: Global = newCompiler(settings, reporter) // our private compiler - // private val nextReqId = { - // var counter = 0 - // () => { counter += 1 ; counter } - // } + private val nextReqId = { + var counter = 0 + () => { counter += 1 ; counter } + } def compilerClasspath: Seq[URL] = ( if (isInitializeComplete) global.classPath.asURLs else new PathResolver(settings).result.asURLs // the compiler's classpath ) def settings = initialSettings - // def mostRecentLine = prevRequestList match { - // case Nil => "" - // case req :: _ => req.originalLine - // } + def mostRecentLine = prevRequestList match { + case Nil => "" + case req :: _ => req.originalLine + } // Run the code body with the given boolean settings flipped to true. def withoutWarnings[T](body: => T): T = beQuietDuring { val saved = settings.nowarn.value @@ -111,12 +110,12 @@ class IMain(initialSettings: Settings, protected val out: JPrintWriter) extends def this(settings: Settings) = this(settings, new NewLinePrintWriter(new ConsoleWriter, true)) def this() = this(new Settings()) - // lazy val repllog: Logger = new Logger { - // val out: JPrintWriter = imain.out - // val isInfo: Boolean = BooleanProp keyExists "scala.repl.info" - // val isDebug: Boolean = BooleanProp keyExists "scala.repl.debug" - // val isTrace: Boolean = BooleanProp keyExists "scala.repl.trace" - // } + lazy val repllog: Logger = new Logger { + val out: JPrintWriter = imain.out + val isInfo: Boolean = BooleanProp keyExists "scala.repl.info" + val isDebug: Boolean = BooleanProp keyExists "scala.repl.debug" + val isTrace: Boolean = BooleanProp keyExists "scala.repl.trace" + } lazy val formatting: Formatting = new Formatting { val prompt = Properties.shellPromptString } @@ -176,7 +175,7 @@ class IMain(initialSettings: Settings, protected val out: JPrintWriter) extends } import global._ - import definitions.{ ObjectClass, termMember, dropNullaryMethod} + import definitions.{ ObjectClass, termMember, typeMember, dropNullaryMethod} lazy val runtimeMirror = ru.runtimeMirror(classLoader) @@ -190,13 +189,13 @@ class IMain(initialSettings: Settings, protected val out: JPrintWriter) extends noFatal(runtimeMirror staticModule path) orElse noFatal(rootMirror staticModule path) ) - // def getPathIfDefined(path: String) = ( - // if (path endsWith "$") getModuleIfDefined(path.init) - // else getClassIfDefined(path) - // ) + def getPathIfDefined(path: String) = ( + if (path endsWith "$") getModuleIfDefined(path.init) + else getClassIfDefined(path) + ) implicit class ReplTypeOps(tp: Type) { - // def orElse(other: => Type): Type = if (tp ne NoType) tp else other + def orElse(other: => Type): Type = if (tp ne NoType) tp else other def andAlso(fn: Type => Type): Type = if (tp eq NoType) tp else fn(tp) } @@ -212,7 +211,7 @@ class IMain(initialSettings: Settings, protected val out: JPrintWriter) extends if (replScope containsName name) freshUserTermName() else name } - // def isUserTermName(name: Name) = isUserVarName("" + name) + def isUserTermName(name: Name) = isUserVarName("" + name) def isInternalTermName(name: Name) = isInternalVarName("" + name) } import naming._ @@ -311,11 +310,11 @@ class IMain(initialSettings: Settings, protected val out: JPrintWriter) extends abstract class PhaseDependentOps { def shift[T](op: => T): T - // def lookup(name: Name): Symbol = shift(replScope lookup name) + def lookup(name: Name): Symbol = shift(replScope lookup name) def path(name: => Name): String = shift(path(symbolOfName(name))) def path(sym: Symbol): String = backticked(shift(sym.fullName)) - // def name(sym: Symbol): Name = shift(sym.name) - // def info(sym: Symbol): Type = shift(sym.info) + def name(sym: Symbol): Name = shift(sym.name) + def info(sym: Symbol): Type = shift(sym.info) def sig(sym: Symbol): String = shift(sym.defString) } object typerOp extends PhaseDependentOps { @@ -355,7 +354,7 @@ class IMain(initialSettings: Settings, protected val out: JPrintWriter) extends case p => new URLClassLoader(compilerClasspath, p) }) - // def getInterpreterClassLoader() = classLoader + def getInterpreterClassLoader() = classLoader // Set the current Java "context" class loader to this interpreter's class loader def setContextClassLoader() = classLoader.setAsContext() @@ -406,7 +405,7 @@ class IMain(initialSettings: Settings, protected val out: JPrintWriter) extends exitingTyper { req.defines filterNot (s => req.defines contains s.companionSymbol) foreach { newSym => val companion = newSym.name.companionName - // val found = replScope lookup companion + val found = replScope lookup companion replScope lookup companion andAlso { oldSym => replwarn(s"warning: previously defined $oldSym is not a companion to $newSym.") replwarn("Companions must be defined together; you may wish to use :paste mode for this.") @@ -424,18 +423,18 @@ class IMain(initialSettings: Settings, protected val out: JPrintWriter) extends printMessage(msg) } - // def isParseable(line: String): Boolean = { - // beSilentDuring { - // try parse(line) match { - // case Some(xs) => xs.nonEmpty // parses as-is - // case None => true // incomplete - // } - // catch { case x: Exception => // crashed the compiler - // replwarn("Exception in isParseable(\"" + line + "\"): " + x) - // false - // } - // } - // } + def isParseable(line: String): Boolean = { + beSilentDuring { + try parse(line) match { + case Some(xs) => xs.nonEmpty // parses as-is + case None => true // incomplete + } + catch { case x: Exception => // crashed the compiler + replwarn("Exception in isParseable(\"" + line + "\"): " + x) + false + } + } + } def compileSourcesKeepingRun(sources: SourceFile*) = { val run = new Run() @@ -654,17 +653,17 @@ class IMain(initialSettings: Settings, protected val out: JPrintWriter) extends quietRun("val %s = %s".format(tempName, name)) quietRun("val %s = %s.asInstanceOf[%s]".format(name, tempName, newType)) } - // def quietImport(ids: String*): IR.Result = beQuietDuring(addImports(ids: _*)) - // def addImports(ids: String*): IR.Result = - // if (ids.isEmpty) IR.Success - // else interpret("import " + ids.mkString(", ")) + def quietImport(ids: String*): IR.Result = beQuietDuring(addImports(ids: _*)) + def addImports(ids: String*): IR.Result = + if (ids.isEmpty) IR.Success + else interpret("import " + ids.mkString(", ")) def quietBind(p: NamedParam): IR.Result = beQuietDuring(bind(p)) def bind(p: NamedParam): IR.Result = bind(p.name, p.tpe, p.value) def bind[T: ru.TypeTag : ClassTag](name: String, value: T): IR.Result = bind((name, value)) - // def bindSyntheticValue(x: Any): IR.Result = bindValue(freshInternalVarName(), x) - // def bindValue(x: Any): IR.Result = bindValue(freshUserVarName(), x) - // def bindValue(name: String, x: Any): IR.Result = bind(name, TypeStrings.fromValue(x), x) + def bindSyntheticValue(x: Any): IR.Result = bindValue(freshInternalVarName(), x) + def bindValue(x: Any): IR.Result = bindValue(freshUserVarName(), x) + def bindValue(name: String, x: Any): IR.Result = bind(name, TypeStrings.fromValue(x), x) /** Reset this interpreter, forgetting all user-specified requests. */ def reset() { @@ -720,7 +719,7 @@ class IMain(initialSettings: Settings, protected val out: JPrintWriter) extends def readPath = pathTo(readName) def evalPath = pathTo(evalName) - // def printPath = pathTo(printName) + def printPath = pathTo(printName) def call(name: String, args: Any*): AnyRef = { val m = evalMethod(name) @@ -735,9 +734,9 @@ class IMain(initialSettings: Settings, protected val out: JPrintWriter) extends try Right(call(name, args: _*)) catch { case ex: Throwable => Left(ex) } - // def callOpt(name: String, args: Any*): Option[AnyRef] = - // try Some(call(name, args: _*)) - // catch { case ex: Throwable => bindError(ex) ; None } + def callOpt(name: String, args: Any*): Option[AnyRef] = + try Some(call(name, args: _*)) + catch { case ex: Throwable => bindError(ex) ; None } class EvalException(msg: String, cause: Throwable) extends RuntimeException(msg, cause) { } @@ -750,7 +749,7 @@ class IMain(initialSettings: Settings, protected val out: JPrintWriter) extends } lazy val evalClass = load(evalPath) - // lazy val evalValue = callOpt(resultName) + lazy val evalValue = callOpt(resultName) def compile(source: String): Boolean = compileAndSaveRun("", source) @@ -803,10 +802,10 @@ class IMain(initialSettings: Settings, protected val out: JPrintWriter) extends class Request(val line: String, val trees: List[Tree]) { def defines = defHandlers flatMap (_.definedSymbols) def imports = importedSymbols - // def references = referencedNames map symbolOfName + def references = referencedNames map symbolOfName def value = Some(handlers.last) filter (h => h.definesValue) map (h => definedSymbols(h.definesTerm.get)) getOrElse NoSymbol - // val reqId = nextReqId() + val reqId = nextReqId() val lineRep = new ReadEvalPrint() private var _originalLine: String = null @@ -818,7 +817,7 @@ class IMain(initialSettings: Settings, protected val out: JPrintWriter) extends def defHandlers = handlers collect { case x: MemberDefHandler => x } /** all (public) names defined by these statements */ - // val definedNames = handlers flatMap (_.definedNames) + val definedNames = handlers flatMap (_.definedNames) /** list of names used by this expression */ val referencedNames: List[Name] = handlers flatMap (_.referencedNames) @@ -838,7 +837,7 @@ class IMain(initialSettings: Settings, protected val out: JPrintWriter) extends exitingTyper(importsCode(referencedNames.toSet)) /** The unmangled symbol name, but supplemented with line info. */ - // def disambiguated(name: Name): String = name + " (in " + lineRep + ")" + def disambiguated(name: Name): String = name + " (in " + lineRep + ")" /** the line of code to compute */ def toCompute = line @@ -932,7 +931,7 @@ class IMain(initialSettings: Settings, protected val out: JPrintWriter) extends /* typeOf lookup with encoding */ def lookupTypeOf(name: Name) = typeOf.getOrElse(name, typeOf(global.encode(name.toString))) - // def simpleNameOfType(name: TypeName) = (compilerTypeOf get name) map (_.typeSymbolDirect.simpleName) + def simpleNameOfType(name: TypeName) = (compilerTypeOf get name) map (_.typeSymbolDirect.simpleName) private def typeMap[T](f: Type => T) = mapFrom[Name, Name, T](termNames ++ typeNames)(x => f(cleanMemberDecl(resultSymbol, x))) @@ -1012,8 +1011,8 @@ class IMain(initialSettings: Settings, protected val out: JPrintWriter) extends */ def tryTwice(op: => Symbol): Symbol = exitingTyper(op) orElse exitingFlatten(op) - // def signatureOf(sym: Symbol) = typerOp sig sym - // def symbolOfPath(path: String): Symbol = exitingTyper(getPathIfDefined(path)) + def signatureOf(sym: Symbol) = typerOp sig sym + def symbolOfPath(path: String): Symbol = exitingTyper(getPathIfDefined(path)) def symbolOfIdent(id: String): Symbol = symbolOfTerm(id) orElse symbolOfType(id) def symbolOfType(id: String): Symbol = tryTwice(replScope lookup (id: TypeName)) def symbolOfTerm(id: String): Symbol = tryTwice(replScope lookup (id: TermName)) @@ -1068,7 +1067,7 @@ class IMain(initialSettings: Settings, protected val out: JPrintWriter) extends def definedTerms = onlyTerms(allDefinedNames) filterNot isInternalTermName def definedTypes = onlyTypes(allDefinedNames) - // def definedSymbols = prevRequestList flatMap (_.defines) toSet + def definedSymbols = prevRequestList flatMap (_.defines) toSet def definedSymbolList = prevRequestList flatMap (_.defines) filterNot (s => isInternalTermName(s.name)) // Terms with user-given names (i.e. not res0 and not synthetic) @@ -1076,67 +1075,67 @@ class IMain(initialSettings: Settings, protected val out: JPrintWriter) extends /** Translate a repl-defined identifier into a Symbol. */ - // def apply(name: String): Symbol = types(name) orElse terms(name) - // def types(name: String): Symbol = replScope lookup (name: TypeName) orElse getClassIfDefined(name) - // def terms(name: String): Symbol = replScope lookup (name: TermName) orElse getModuleIfDefined(name) - - // def types[T: global.TypeTag] : Symbol = typeOf[T].typeSymbol - // def terms[T: global.TypeTag] : Symbol = typeOf[T].termSymbol - // def apply[T: global.TypeTag] : Symbol = typeOf[T].typeSymbol - - // lazy val DummyInfoSymbol = NoSymbol.newValue("replScopeDummy") - // private lazy val DummyInfo = TypeRef(NoPrefix, DummyInfoSymbol, Nil) - // private def enterDummySymbol(name: Name) = name match { - // case x: TermName => replScope enter (NoSymbol.newValue(x) setInfo DummyInfo) - // case x: TypeName => replScope enter (NoSymbol.newClass(x) setInfo DummyInfo) - // } + def apply(name: String): Symbol = types(name) orElse terms(name) + def types(name: String): Symbol = replScope lookup (name: TypeName) orElse getClassIfDefined(name) + def terms(name: String): Symbol = replScope lookup (name: TermName) orElse getModuleIfDefined(name) + + def types[T: global.TypeTag] : Symbol = typeOf[T].typeSymbol + def terms[T: global.TypeTag] : Symbol = typeOf[T].termSymbol + def apply[T: global.TypeTag] : Symbol = typeOf[T].typeSymbol + + lazy val DummyInfoSymbol = NoSymbol.newValue("replScopeDummy") + private lazy val DummyInfo = TypeRef(NoPrefix, DummyInfoSymbol, Nil) + private def enterDummySymbol(name: Name) = name match { + case x: TermName => replScope enter (NoSymbol.newValue(x) setInfo DummyInfo) + case x: TypeName => replScope enter (NoSymbol.newClass(x) setInfo DummyInfo) + } private var _replScope: Scope = _ private def resetReplScope() { _replScope = newScope } - // def initReplScope() { - // languageWildcardSyms foreach { clazz => - // importableMembers(clazz) foreach { sym => - // updateReplScope(sym, isDefined = false) - // } - // } - // } + def initReplScope() { + languageWildcardSyms foreach { clazz => + importableMembers(clazz) foreach { sym => + updateReplScope(sym, isDefined = false) + } + } + } def replScope = { if (_replScope eq null) _replScope = newScope _replScope } - // def lookupAll(name: String) = (replScope.lookupAll(name: TermName) ++ replScope.lookupAll(name: TypeName)).toList - // def unlinkAll(name: String) = { - // val syms = lookupAll(name) - // syms foreach { sym => - // replScope unlink sym - // } - // enterDummySymbol(name: TermName) - // enterDummySymbol(name: TypeName) - // syms - // } - // def isUnlinked(name: Name) = { - // symbolOfName(name) match { - // case NoSymbol => false - // case sym => sym.info.typeSymbolDirect == DummyInfoSymbol - // } - // } + def lookupAll(name: String) = (replScope.lookupAll(name: TermName) ++ replScope.lookupAll(name: TypeName)).toList + def unlinkAll(name: String) = { + val syms = lookupAll(name) + syms foreach { sym => + replScope unlink sym + } + enterDummySymbol(name: TermName) + enterDummySymbol(name: TypeName) + syms + } + def isUnlinked(name: Name) = { + symbolOfName(name) match { + case NoSymbol => false + case sym => sym.info.typeSymbolDirect == DummyInfoSymbol + } + } private var executingRequest: Request = _ private val prevRequests = mutable.ListBuffer[Request]() private val directlyBoundNames = mutable.Set[Name]() def allHandlers = prevRequestList flatMap (_.handlers) - // def allDefHandlers = allHandlers collect { case x: MemberDefHandler => x } - // def allDefSymbols = allDefHandlers map (_.symbol) filter (_ ne NoSymbol) + def allDefHandlers = allHandlers collect { case x: MemberDefHandler => x } + def allDefSymbols = allDefHandlers map (_.symbol) filter (_ ne NoSymbol) def lastRequest = if (prevRequests.isEmpty) null else prevRequests.last def prevRequestList = prevRequests.toList - // def allSeenTypes = prevRequestList flatMap (_.typeOf.values.toList) distinct - // def allImplicits = allHandlers filter (_.definesImplicit) flatMap (_.definedNames) + def allSeenTypes = prevRequestList flatMap (_.typeOf.values.toList) distinct + def allImplicits = allHandlers filter (_.definesImplicit) flatMap (_.definedNames) def importHandlers = allHandlers collect { case x: ImportHandler => x } def withoutUnwrapping(op: => Unit): Unit = { diff --git a/src/compiler/scala/tools/nsc/interpreter/ISettings.scala b/src/compiler/scala/tools/nsc/interpreter/ISettings.scala index d45146383b..302ba7a8ac 100644 --- a/src/compiler/scala/tools/nsc/interpreter/ISettings.scala +++ b/src/compiler/scala/tools/nsc/interpreter/ISettings.scala @@ -12,12 +12,12 @@ package interpreter * @author Lex Spoon, 2007/3/24 **/ class ISettings(intp: IMain) { - // /** A list of paths where :load should look */ - // var loadPath = List(".") + /** A list of paths where :load should look */ + var loadPath = List(".") - // /** Set this to true to see repl machinery under -Yrich-exceptions. - // */ - // var showInternalStackTraces = false + /** Set this to true to see repl machinery under -Yrich-exceptions. + */ + var showInternalStackTraces = false /** The maximum length of toString to use when printing the result * of an evaluation. 0 means no maximum. If a printout requires diff --git a/src/compiler/scala/tools/nsc/interpreter/Imports.scala b/src/compiler/scala/tools/nsc/interpreter/Imports.scala index 978e220fab..c5048ebfd8 100644 --- a/src/compiler/scala/tools/nsc/interpreter/Imports.scala +++ b/src/compiler/scala/tools/nsc/interpreter/Imports.scala @@ -15,8 +15,8 @@ trait Imports { import definitions.{ ObjectClass, ScalaPackage, JavaLangPackage, PredefModule } import memberHandlers._ - // def isNoImports = settings.noimports.value - // def isNoPredef = settings.nopredef.value + def isNoImports = settings.noimports.value + def isNoPredef = settings.nopredef.value /** Synthetic import handlers for the language defined imports. */ private def makeWildcardImportHandler(sym: Symbol): ImportHandler = { @@ -31,12 +31,12 @@ trait Imports { /** Symbols whose contents are language-defined to be imported. */ def languageWildcardSyms: List[Symbol] = List(JavaLangPackage, ScalaPackage, PredefModule) - // def languageWildcards: List[Type] = languageWildcardSyms map (_.tpe) + def languageWildcards: List[Type] = languageWildcardSyms map (_.tpe) def languageWildcardHandlers = languageWildcardSyms map makeWildcardImportHandler def allImportedNames = importHandlers flatMap (_.importedNames) - // def importedTerms = onlyTerms(allImportedNames) - // def importedTypes = onlyTypes(allImportedNames) + def importedTerms = onlyTerms(allImportedNames) + def importedTypes = onlyTypes(allImportedNames) /** Types which have been wildcard imported, such as: * val x = "abc" ; import x._ // type java.lang.String @@ -52,17 +52,17 @@ trait Imports { def sessionWildcards: List[Type] = { importHandlers filter (_.importsWildcard) map (_.targetType) distinct } - // def wildcardTypes = languageWildcards ++ sessionWildcards + def wildcardTypes = languageWildcards ++ sessionWildcards def languageSymbols = languageWildcardSyms flatMap membersAtPickler def sessionImportedSymbols = importHandlers flatMap (_.importedSymbols) def importedSymbols = languageSymbols ++ sessionImportedSymbols def importedTermSymbols = importedSymbols collect { case x: TermSymbol => x } - // def importedTypeSymbols = importedSymbols collect { case x: TypeSymbol => x } - // def implicitSymbols = importedSymbols filter (_.isImplicit) + def importedTypeSymbols = importedSymbols collect { case x: TypeSymbol => x } + def implicitSymbols = importedSymbols filter (_.isImplicit) - // def importedTermNamed(name: String): Symbol = - // importedTermSymbols find (_.name.toString == name) getOrElse NoSymbol + def importedTermNamed(name: String): Symbol = + importedTermSymbols find (_.name.toString == name) getOrElse NoSymbol /** Tuples of (source, imported symbols) in the order they were imported. */ @@ -105,7 +105,7 @@ trait Imports { */ case class ComputedImports(prepend: String, append: String, access: String) protected def importsCode(wanted0: Set[Name]): ComputedImports = { - val wanted = wanted0 // filterNot isUnlinked + val wanted = wanted0 filterNot isUnlinked /** Narrow down the list of requests from which imports * should be taken. Removes requests which cannot contribute diff --git a/src/compiler/scala/tools/nsc/interpreter/InteractiveReader.scala b/src/compiler/scala/tools/nsc/interpreter/InteractiveReader.scala index a5f491fe6f..6513381d77 100644 --- a/src/compiler/scala/tools/nsc/interpreter/InteractiveReader.scala +++ b/src/compiler/scala/tools/nsc/interpreter/InteractiveReader.scala @@ -15,22 +15,22 @@ import Properties.isMac trait InteractiveReader { val interactive: Boolean - // def init(): Unit + def init(): Unit def reset(): Unit def history: History def completion: Completion - // def eraseLine(): Unit + def eraseLine(): Unit def redrawLine(): Unit - // def currentLine: String + def currentLine: String def readYesOrNo(prompt: String, alt: => Boolean): Boolean = readOneKey(prompt) match { case 'y' => true case 'n' => false case _ => alt } - // def readAssumingNo(prompt: String) = readYesOrNo(prompt, false) - // def readAssumingYes(prompt: String) = readYesOrNo(prompt, true) + def readAssumingNo(prompt: String) = readYesOrNo(prompt, false) + def readAssumingYes(prompt: String) = readYesOrNo(prompt, true) protected def readOneLine(prompt: String): String protected def readOneKey(prompt: String): Int @@ -50,6 +50,6 @@ object InteractiveReader { def apply(): InteractiveReader = SimpleReader() @deprecated("Use `apply` instead.", "2.9.0") - def createDefault(): InteractiveReader = apply() // used by sbt + def createDefault(): InteractiveReader = apply() } diff --git a/src/compiler/scala/tools/nsc/interpreter/JLineCompletion.scala b/src/compiler/scala/tools/nsc/interpreter/JLineCompletion.scala index 5325c18710..5ee5e5526d 100644 --- a/src/compiler/scala/tools/nsc/interpreter/JLineCompletion.scala +++ b/src/compiler/scala/tools/nsc/interpreter/JLineCompletion.scala @@ -28,9 +28,9 @@ class JLineCompletion(val intp: IMain) extends Completion with CompletionOutput if (isModule) getModuleIfDefined(name) else getModuleIfDefined(name) ) - // def getType(name: String, isModule: Boolean) = getSymbol(name, isModule).tpe - // def typeOf(name: String) = getType(name, false) - // def moduleOf(name: String) = getType(name, true) + def getType(name: String, isModule: Boolean) = getSymbol(name, isModule).tpe + def typeOf(name: String) = getType(name, false) + def moduleOf(name: String) = getType(name, true) trait CompilerCompletion { def tp: Type @@ -47,7 +47,7 @@ class JLineCompletion(val intp: IMain) extends Completion with CompletionOutput def tos(sym: Symbol): String = sym.decodedName def memberNamed(s: String) = exitingTyper(effectiveTp member newTermName(s)) - // def hasMethod(s: String) = memberNamed(s).isMethod + def hasMethod(s: String) = memberNamed(s).isMethod // XXX we'd like to say "filterNot (_.isDeprecated)" but this causes the // compiler to crash for reasons not yet known. @@ -280,8 +280,8 @@ class JLineCompletion(val intp: IMain) extends Completion with CompletionOutput } // generic interface for querying (e.g. interpreter loop, testing) - // def completions(buf: String): List[String] = - // topLevelFor(Parsed.dotted(buf + ".", buf.length + 1)) + def completions(buf: String): List[String] = + topLevelFor(Parsed.dotted(buf + ".", buf.length + 1)) def completer(): ScalaCompleter = new JLineTabCompletion diff --git a/src/compiler/scala/tools/nsc/interpreter/JLineReader.scala b/src/compiler/scala/tools/nsc/interpreter/JLineReader.scala index 7f8beba32b..e033bab03b 100644 --- a/src/compiler/scala/tools/nsc/interpreter/JLineReader.scala +++ b/src/compiler/scala/tools/nsc/interpreter/JLineReader.scala @@ -23,7 +23,7 @@ class JLineReader(_completion: => Completion) extends InteractiveReader { private def term = consoleReader.getTerminal() def reset() = term.reset() - // def init() = term.init() + def init() = term.init() def scalaToJline(tc: ScalaCompleter): Completer = new Completer { def complete(_buf: String, cursor: Int, candidates: JList[CharSequence]): Int = { @@ -36,7 +36,7 @@ class JLineReader(_completion: => Completion) extends InteractiveReader { class JLineConsoleReader extends ConsoleReader with ConsoleReaderHelper { // working around protected/trait/java insufficiencies. - // def goBack(num: Int): Unit = back(num) + def goBack(num: Int): Unit = back(num) def readOneKey(prompt: String) = { this.print(prompt) this.flush() @@ -63,9 +63,9 @@ class JLineReader(_completion: => Completion) extends InteractiveReader { } } - // def currentLine = consoleReader.getCursorBuffer.buffer.toString + def currentLine = consoleReader.getCursorBuffer.buffer.toString def redrawLine() = consoleReader.redrawLineAndFlush() - // def eraseLine() = consoleReader.eraseLine() + def eraseLine() = consoleReader.eraseLine() // Alternate implementation, not sure if/when I need this. // def eraseLine() = while (consoleReader.delete()) { } def readOneLine(prompt: String) = consoleReader readLine prompt diff --git a/src/compiler/scala/tools/nsc/interpreter/Logger.scala b/src/compiler/scala/tools/nsc/interpreter/Logger.scala index df3e90f0e2..aeb25fc688 100644 --- a/src/compiler/scala/tools/nsc/interpreter/Logger.scala +++ b/src/compiler/scala/tools/nsc/interpreter/Logger.scala @@ -12,7 +12,7 @@ trait Logger { def isTrace: Boolean def out: JPrintWriter - // def info(msg: => Any): Unit = if (isInfo) out println msg - // def debug(msg: => Any): Unit = if (isDebug) out println msg - // def trace(msg: => Any): Unit = if (isTrace) out println msg + def info(msg: => Any): Unit = if (isInfo) out println msg + def debug(msg: => Any): Unit = if (isDebug) out println msg + def trace(msg: => Any): Unit = if (isTrace) out println msg } diff --git a/src/compiler/scala/tools/nsc/interpreter/LoopCommands.scala b/src/compiler/scala/tools/nsc/interpreter/LoopCommands.scala index 168e611077..60325ece30 100644 --- a/src/compiler/scala/tools/nsc/interpreter/LoopCommands.scala +++ b/src/compiler/scala/tools/nsc/interpreter/LoopCommands.scala @@ -19,13 +19,13 @@ class ProcessResult(val line: String) { val exitCode = builder ! logger def lines = buffer.toList - // def show() = lines foreach println + def show() = lines foreach println override def toString = "`%s` (%d lines, exit %d)".format(line, buffer.size, exitCode) } -// object ProcessResult { -// implicit def processResultToOutputLines(pr: ProcessResult): List[String] = pr.lines -// def apply(line: String): ProcessResult = new ProcessResult(line) -// } +object ProcessResult { + implicit def processResultToOutputLines(pr: ProcessResult): List[String] = pr.lines + def apply(line: String): ProcessResult = new ProcessResult(line) +} trait LoopCommands { protected def out: JPrintWriter @@ -35,14 +35,14 @@ trait LoopCommands { // a single interpreter command abstract class LoopCommand(val name: String, val help: String) extends (String => Result) { - // private var _longHelp: String = null - // final def defaultHelp = usageMsg + " (no extended help available.)" - // def hasLongHelp = _longHelp != null || longHelp != defaultHelp - // def withLongHelp(text: String): this.type = { _longHelp = text ; this } - // def longHelp = _longHelp match { - // case null => defaultHelp - // case text => text - // } + private var _longHelp: String = null + final def defaultHelp = usageMsg + " (no extended help available.)" + def hasLongHelp = _longHelp != null || longHelp != defaultHelp + def withLongHelp(text: String): this.type = { _longHelp = text ; this } + def longHelp = _longHelp match { + case null => defaultHelp + case text => text + } def usage: String = "" def usageMsg: String = ":" + name + ( if (usage == "") "" else " " + usage @@ -55,10 +55,10 @@ trait LoopCommands { Result(true, None) } - // def onError(msg: String) = { - // out.println("error: " + msg) - // showUsage() - // } + def onError(msg: String) = { + out.println("error: " + msg) + showUsage() + } } object LoopCommand { def nullary(name: String, help: String, f: () => Result): LoopCommand = @@ -68,8 +68,8 @@ trait LoopCommands { if (usage == "") new NullaryCmd(name, help, f) else new LineCmd(name, usage, help, f) - // def varargs(name: String, usage: String, help: String, f: List[String] => Result): LoopCommand = - // new VarArgsCmd(name, usage, help, f) + def varargs(name: String, usage: String, help: String, f: List[String] => Result): LoopCommand = + new VarArgsCmd(name, usage, help, f) } class NullaryCmd(name: String, help: String, f: String => Result) extends LoopCommand(name, help) { diff --git a/src/compiler/scala/tools/nsc/interpreter/MemberHandlers.scala b/src/compiler/scala/tools/nsc/interpreter/MemberHandlers.scala index 6e564f4d17..95482f1e46 100644 --- a/src/compiler/scala/tools/nsc/interpreter/MemberHandlers.scala +++ b/src/compiler/scala/tools/nsc/interpreter/MemberHandlers.scala @@ -81,7 +81,7 @@ trait MemberHandlers { def symbol = if (member.symbol eq null) NoSymbol else member.symbol def definesImplicit = false def definesValue = false - // def isLegalTopLevel = false + def isLegalTopLevel = false def definesTerm = Option.empty[TermName] def definesType = Option.empty[TypeName] @@ -152,7 +152,7 @@ trait MemberHandlers { class ModuleHandler(module: ModuleDef) extends MemberDefHandler(module) { override def definesTerm = Some(name) override def definesValue = true - // override def isLegalTopLevel = true + override def isLegalTopLevel = true override def resultExtractionCode(req: Request) = codegenln("defined module ", name) } @@ -161,7 +161,7 @@ trait MemberHandlers { override def definedSymbols = List(symbol, symbol.companionSymbol) filterNot (_ == NoSymbol) override def definesType = Some(name.toTypeName) override def definesTerm = Some(name.toTermName) filter (_ => mods.isCase) - // override def isLegalTopLevel = true + override def isLegalTopLevel = true override def resultExtractionCode(req: Request) = codegenln("defined %s %s".format(keyword, name)) @@ -182,19 +182,19 @@ trait MemberHandlers { case sym => sym.thisType } private def importableTargetMembers = importableMembers(targetType).toList - // override def isLegalTopLevel = true - - // def createImportForName(name: Name): String = { - // selectors foreach { - // case sel @ ImportSelector(old, _, `name`, _) => return "import %s.{ %s }".format(expr, sel) - // case _ => () - // } - // "import %s.%s".format(expr, name) - // } + override def isLegalTopLevel = true + + def createImportForName(name: Name): String = { + selectors foreach { + case sel @ ImportSelector(old, _, `name`, _) => return "import %s.{ %s }".format(expr, sel) + case _ => () + } + "import %s.%s".format(expr, name) + } // TODO: Need to track these specially to honor Predef masking attempts, // because they must be the leading imports in the code generated for each // line. We can use the same machinery as Contexts now, anyway. - // def isPredefImport = isReferenceToPredef(expr) + def isPredefImport = isReferenceToPredef(expr) // wildcard imports, e.g. import foo._ private def selectorWild = selectors filter (_.name == nme.USCOREkw) diff --git a/src/compiler/scala/tools/nsc/interpreter/NamedParam.scala b/src/compiler/scala/tools/nsc/interpreter/NamedParam.scala index c10ba23691..eff0ef59c5 100644 --- a/src/compiler/scala/tools/nsc/interpreter/NamedParam.scala +++ b/src/compiler/scala/tools/nsc/interpreter/NamedParam.scala @@ -14,14 +14,14 @@ import scala.reflect.{ClassTag, classTag} trait NamedParamCreator { protected def freshName: () => String - // def apply(name: String, tpe: String, value: Any): NamedParam = NamedParamClass(name, tpe, value) + def apply(name: String, tpe: String, value: Any): NamedParam = NamedParamClass(name, tpe, value) def apply[T: ru.TypeTag : ClassTag](name: String, x: T): NamedParam = new Typed[T](name, x) def apply[T: ru.TypeTag : ClassTag](x: T): NamedParam = apply(freshName(), x) def clazz(name: String, x: Any): NamedParam = new Untyped(name, x) - // def clazz(x: Any): NamedParam = clazz(freshName(), x) + def clazz(x: Any): NamedParam = clazz(freshName(), x) - // implicit def namedValue[T: ru.TypeTag : ClassTag](name: String, x: T): NamedParam = apply(name, x) + implicit def namedValue[T: ru.TypeTag : ClassTag](name: String, x: T): NamedParam = apply(name, x) implicit def tuple[T: ru.TypeTag : ClassTag](pair: (String, T)): NamedParam = apply(pair._1, pair._2) } diff --git a/src/compiler/scala/tools/nsc/interpreter/Naming.scala b/src/compiler/scala/tools/nsc/interpreter/Naming.scala index 57b1205fb3..0d03a8669a 100644 --- a/src/compiler/scala/tools/nsc/interpreter/Naming.scala +++ b/src/compiler/scala/tools/nsc/interpreter/Naming.scala @@ -78,7 +78,7 @@ trait Naming { private lazy val userVar = new NameCreator(sessionNames.res) // var name, like res0 private lazy val internalVar = new NameCreator(sessionNames.ires) // internal var name, like $ires0 - // def isLineName(name: String) = (name startsWith sessionNames.line) && (name stripPrefix sessionNames.line forall (_.isDigit)) + def isLineName(name: String) = (name startsWith sessionNames.line) && (name stripPrefix sessionNames.line forall (_.isDigit)) def isUserVarName(name: String) = userVar didGenerate name def isInternalVarName(name: String) = internalVar didGenerate name diff --git a/src/compiler/scala/tools/nsc/interpreter/Parsed.scala b/src/compiler/scala/tools/nsc/interpreter/Parsed.scala index 3d72e4b2a4..24c01e9ae6 100644 --- a/src/compiler/scala/tools/nsc/interpreter/Parsed.scala +++ b/src/compiler/scala/tools/nsc/interpreter/Parsed.scala @@ -17,7 +17,7 @@ class Parsed private ( ) extends Delimited { def isEmpty = args.isEmpty def isUnqualified = args.size == 1 - // def isQualified = args.size > 1 + def isQualified = args.size > 1 def isAtStart = cursor <= 0 private var _verbosity = 0 @@ -31,7 +31,7 @@ class Parsed private ( def bufferTail = new Parsed(buffer drop headLength, cursor - headLength, delimited) withVerbosity verbosity def prev = new Parsed(buffer, cursor - 1, delimited) withVerbosity verbosity - // def next = new Parsed(buffer, cursor + 1, delimited) withVerbosity verbosity + def next = new Parsed(buffer, cursor + 1, delimited) withVerbosity verbosity def currentChar = buffer(cursor) def currentArg = args.last def position = @@ -41,8 +41,8 @@ class Parsed private ( def isFirstDelimiter = !isEmpty && isDelimiterChar(buffer.head) def isLastDelimiter = !isEmpty && isDelimiterChar(buffer.last) - // def firstIfDelimiter = if (isFirstDelimiter) buffer.head.toString else "" - // def lastIfDelimiter = if (isLastDelimiter) buffer.last.toString else "" + def firstIfDelimiter = if (isFirstDelimiter) buffer.head.toString else "" + def lastIfDelimiter = if (isLastDelimiter) buffer.last.toString else "" def isQuoted = false // TODO def isEscaped = !isAtStart && isEscapeChar(currentChar) && !isEscapeChar(prev.currentChar) @@ -56,13 +56,13 @@ object Parsed { private def onull(s: String) = if (s == null) "" else s - // def apply(s: String): Parsed = apply(onull(s), onull(s).length) + def apply(s: String): Parsed = apply(onull(s), onull(s).length) def apply(s: String, cursor: Int): Parsed = apply(onull(s), cursor, DefaultDelimiters) def apply(s: String, cursor: Int, delimited: Char => Boolean): Parsed = new Parsed(onull(s), cursor, delimited) - // def dotted(s: String): Parsed = dotted(onull(s), onull(s).length) + def dotted(s: String): Parsed = dotted(onull(s), onull(s).length) def dotted(s: String, cursor: Int): Parsed = new Parsed(onull(s), cursor, _ == '.') - // def undelimited(s: String, cursor: Int): Parsed = new Parsed(onull(s), cursor, _ => false) + def undelimited(s: String, cursor: Int): Parsed = new Parsed(onull(s), cursor, _ => false) } diff --git a/src/compiler/scala/tools/nsc/interpreter/Phased.scala b/src/compiler/scala/tools/nsc/interpreter/Phased.scala index 3cf448f4c2..e6b780f177 100644 --- a/src/compiler/scala/tools/nsc/interpreter/Phased.scala +++ b/src/compiler/scala/tools/nsc/interpreter/Phased.scala @@ -24,7 +24,7 @@ trait Phased { case NoPhaseName => false case name => active = name ; true } - // def getMulti = multi + def getMulti = multi def setMulti(phases: Seq[PhaseName]): Boolean = { if (phases contains NoPhaseName) false else { @@ -66,16 +66,16 @@ trait Phased { try parseInternal(str) catch { case _: Exception => NoPhaseName } - // def apply[T](body: => T) = immutable.SortedMap[PhaseName, T](atMap(PhaseName.all)(body): _*) + def apply[T](body: => T) = immutable.SortedMap[PhaseName, T](atMap(PhaseName.all)(body): _*) def atCurrent[T](body: => T): T = enteringPhase(get)(body) def multi[T](body: => T): Seq[T] = multi map (ph => at(ph)(body)) - // def all[T](body: => T): Seq[T] = atMulti(PhaseName.all)(body) - // def show[T](body: => T): Seq[T] = { - // val pairs = atMap(PhaseName.all)(body) - // pairs foreach { case (ph, op) => Console.println("%15s -> %s".format(ph, op.toString take 240)) } - // pairs map (_._2) - // } + def all[T](body: => T): Seq[T] = atMulti(PhaseName.all)(body) + def show[T](body: => T): Seq[T] = { + val pairs = atMap(PhaseName.all)(body) + pairs foreach { case (ph, op) => Console.println("%15s -> %s".format(ph, op.toString take 240)) } + pairs map (_._2) + } def at[T](ph: PhaseName)(body: => T): T = { val saved = get @@ -90,10 +90,10 @@ trait Phased { finally setMulti(saved) } - // def showAt[T](phs: Seq[PhaseName])(body: => T): Unit = - // atMap[T](phs)(body) foreach { - // case (ph, op) => Console.println("%15s -> %s".format(ph, op.toString take 240)) - // } + def showAt[T](phs: Seq[PhaseName])(body: => T): Unit = + atMap[T](phs)(body) foreach { + case (ph, op) => Console.println("%15s -> %s".format(ph, op.toString take 240)) + } def atMap[T](phs: Seq[PhaseName])(body: => T): Seq[(PhaseName, T)] = phs zip atMulti(phs)(body) @@ -112,7 +112,7 @@ trait Phased { def apply(id: Int): PhaseName = all find (_.id == id) getOrElse NoPhaseName implicit def apply(s: String): PhaseName = nameMap(s) - // implicit def defaultPhaseName: PhaseName = active + implicit def defaultPhaseName: PhaseName = active } sealed abstract class PhaseName { lazy val id = phase.id @@ -121,7 +121,7 @@ trait Phased { def isEmpty = this eq NoPhaseName // Execute some code during this phase. - // def apply[T](body: => T): T = enteringPhase(phase)(body) + def apply[T](body: => T): T = enteringPhase(phase)(body) } case object Parser extends PhaseName @@ -158,5 +158,5 @@ trait Phased { } implicit def phaseEnumToPhase(name: PhaseName): Phase = name.phase - // implicit def phaseNameToPhase(name: String): Phase = currentRun.phaseNamed(name) + implicit def phaseNameToPhase(name: String): Phase = currentRun.phaseNamed(name) } diff --git a/src/compiler/scala/tools/nsc/interpreter/Power.scala b/src/compiler/scala/tools/nsc/interpreter/Power.scala index 88c20c5096..ab0f1c0033 100644 --- a/src/compiler/scala/tools/nsc/interpreter/Power.scala +++ b/src/compiler/scala/tools/nsc/interpreter/Power.scala @@ -149,17 +149,17 @@ class Power[ReplValsImpl <: ReplVals : ru.TypeTag: ClassTag](val intp: IMain, re // And whatever else there is to do. init.lines foreach (intp interpret _) } - // def valsDescription: String = { - // def to_str(m: Symbol) = "%12s %s".format( - // m.decodedName, "" + elimRefinement(m.accessedOrSelf.tpe) stripPrefix "scala.tools.nsc.") - - // ( rutil.info[ReplValsImpl].membersDeclared - // filter (m => m.isPublic && !m.hasModuleFlag && !m.isConstructor) - // sortBy (_.decodedName) - // map to_str - // mkString ("Name and type of values imported into the repl in power mode.\n\n", "\n", "") - // ) - // } + def valsDescription: String = { + def to_str(m: Symbol) = "%12s %s".format( + m.decodedName, "" + elimRefinement(m.accessedOrSelf.tpe) stripPrefix "scala.tools.nsc.") + + ( rutil.info[ReplValsImpl].membersDeclared + filter (m => m.isPublic && !m.hasModuleFlag && !m.isConstructor) + sortBy (_.decodedName) + map to_str + mkString ("Name and type of values imported into the repl in power mode.\n\n", "\n", "") + ) + } trait LowPriorityInternalInfo { implicit def apply[T: ru.TypeTag : ClassTag] : InternalInfo[T] = new InternalInfo[T](None) @@ -173,13 +173,13 @@ class Power[ReplValsImpl <: ReplVals : ru.TypeTag: ClassTag](val intp: IMain, re * of the conveniences exist on that wrapper. */ trait LowPriorityInternalInfoWrapper { - // implicit def apply[T: ru.TypeTag : ClassTag] : InternalInfoWrapper[T] = new InternalInfoWrapper[T](None) + implicit def apply[T: ru.TypeTag : ClassTag] : InternalInfoWrapper[T] = new InternalInfoWrapper[T](None) } - // object InternalInfoWrapper extends LowPriorityInternalInfoWrapper { + object InternalInfoWrapper extends LowPriorityInternalInfoWrapper { - // } + } class InternalInfoWrapper[T: ru.TypeTag : ClassTag](value: Option[T] = None) { - // def ? : InternalInfo[T] = new InternalInfo[T](value) + def ? : InternalInfo[T] = new InternalInfo[T](value) } /** Todos... @@ -187,7 +187,7 @@ class Power[ReplValsImpl <: ReplVals : ru.TypeTag: ClassTag](val intp: IMain, re * customizable symbol filter (had to hardcode no-spec to reduce noise) */ class InternalInfo[T](value: Option[T] = None)(implicit typeEvidence: ru.TypeTag[T], runtimeClassEvidence: ClassTag[T]) { - // private def newInfo[U: ru.TypeTag : ClassTag](value: U): InternalInfo[U] = new InternalInfo[U](Some(value)) + private def newInfo[U: ru.TypeTag : ClassTag](value: U): InternalInfo[U] = new InternalInfo[U](Some(value)) private def isSpecialized(s: Symbol) = s.name.toString contains "$mc" private def isImplClass(s: Symbol) = s.name.toString endsWith "$class" @@ -200,45 +200,45 @@ class Power[ReplValsImpl <: ReplVals : ru.TypeTag: ClassTag](val intp: IMain, re ) def symbol = compilerSymbolFromTag(tag) def tpe = compilerTypeFromTag(tag) - // def name = symbol.name - // def companion = symbol.companionSymbol - // def info = symbol.info - // def moduleClass = symbol.moduleClass - // def owner = symbol.owner - // def owners = symbol.ownerChain drop 1 - // def signature = symbol.defString - - // def decls = info.decls - // def declsOverride = membersDeclared filter (_.isOverride) - // def declsOriginal = membersDeclared filterNot (_.isOverride) + def name = symbol.name + def companion = symbol.companionSymbol + def info = symbol.info + def moduleClass = symbol.moduleClass + def owner = symbol.owner + def owners = symbol.ownerChain drop 1 + def signature = symbol.defString + + def decls = info.decls + def declsOverride = membersDeclared filter (_.isOverride) + def declsOriginal = membersDeclared filterNot (_.isOverride) def members = membersUnabridged filterNot excludeMember def membersUnabridged = tpe.members.toList - // def membersDeclared = members filterNot excludeMember - // def membersInherited = members filterNot (membersDeclared contains _) - // def memberTypes = members filter (_.name.isTypeName) - // def memberMethods = members filter (_.isMethod) + def membersDeclared = members filterNot excludeMember + def membersInherited = members filterNot (membersDeclared contains _) + def memberTypes = members filter (_.name.isTypeName) + def memberMethods = members filter (_.isMethod) def pkg = symbol.enclosingPackage - // def pkgName = pkg.fullName - // def pkgClass = symbol.enclosingPackageClass - // def pkgMembers = pkg.info.members filterNot excludeMember - // def pkgClasses = pkgMembers filter (s => s.isClass && s.isDefinedInPackage) - // def pkgSymbols = new PackageSlurper(pkgClass).slurp() filterNot excludeMember + def pkgName = pkg.fullName + def pkgClass = symbol.enclosingPackageClass + def pkgMembers = pkg.info.members filterNot excludeMember + def pkgClasses = pkgMembers filter (s => s.isClass && s.isDefinedInPackage) + def pkgSymbols = new PackageSlurper(pkgClass).slurp() filterNot excludeMember def tag = typeEvidence def runtimeClass = runtimeClassEvidence.runtimeClass def shortClass = runtimeClass.getName split "[$.]" last def baseClasses = tpe.baseClasses - // def baseClassDecls = mapFrom(baseClasses)(_.info.decls.toList.sortBy(_.name)) - // def ancestors = baseClasses drop 1 - // def ancestorDeclares(name: String) = ancestors filter (_.info member newTermName(name) ne NoSymbol) - // def baseTypes = tpe.baseTypeSeq.toList + def baseClassDecls = mapFrom(baseClasses)(_.info.decls.toList.sortBy(_.name)) + def ancestors = baseClasses drop 1 + def ancestorDeclares(name: String) = ancestors filter (_.info member newTermName(name) ne NoSymbol) + def baseTypes = tpe.baseTypeSeq.toList - // def <:<[U: ru.TypeTag : ClassTag](other: U) = tpe <:< newInfo(other).tpe - // def lub[U: ru.TypeTag : ClassTag](other: U) = intp.global.lub(List(tpe, newInfo(other).tpe)) - // def glb[U: ru.TypeTag : ClassTag](other: U) = intp.global.glb(List(tpe, newInfo(other).tpe)) + def <:<[U: ru.TypeTag : ClassTag](other: U) = tpe <:< newInfo(other).tpe + def lub[U: ru.TypeTag : ClassTag](other: U) = intp.global.lub(List(tpe, newInfo(other).tpe)) + def glb[U: ru.TypeTag : ClassTag](other: U) = intp.global.glb(List(tpe, newInfo(other).tpe)) override def toString = value match { case Some(x) => "%s (%s)".format(x, shortClass) @@ -264,7 +264,7 @@ class Power[ReplValsImpl <: ReplVals : ru.TypeTag: ClassTag](val intp: IMain, re } object Prettifier extends LowPriorityPrettifier { def stringOf(x: Any): String = scala.runtime.ScalaRunTime.stringOf(x) - // def prettify[T](value: T): TraversableOnce[String] = default[T] prettify value + def prettify[T](value: T): TraversableOnce[String] = default[T] prettify value def default[T] = new Prettifier[T] { def prettify(x: T): TraversableOnce[String] = AnyPrettifier prettify x def show(x: T): Unit = AnyPrettifier show x @@ -274,7 +274,7 @@ class Power[ReplValsImpl <: ReplVals : ru.TypeTag: ClassTag](val intp: IMain, re def show(x: T): Unit def prettify(x: T): TraversableOnce[String] - // def show(xs: TraversableOnce[T]): Unit = prettify(xs) foreach println + def show(xs: TraversableOnce[T]): Unit = prettify(xs) foreach println def prettify(xs: TraversableOnce[T]): TraversableOnce[String] = xs flatMap (x => prettify(x)) } @@ -286,31 +286,31 @@ class Power[ReplValsImpl <: ReplVals : ru.TypeTag: ClassTag](val intp: IMain, re pretty prettify f(value) foreach (StringPrettifier show _) def freq[U](p: T => U) = (value.toSeq groupBy p mapValues (_.size)).toList sortBy (-_._2) map (_.swap) - // def ppfreq[U](p: T => U): Unit = freq(p) foreach { case (count, key) => println("%5d %s".format(count, key)) } + def ppfreq[U](p: T => U): Unit = freq(p) foreach { case (count, key) => println("%5d %s".format(count, key)) } - // def |[U](f: Seq[T] => Seq[U]): Seq[U] = f(value) - // def ^^[U](f: T => U): Seq[U] = value map f - // def ^?[U](pf: PartialFunction[T, U]): Seq[U] = value collect pf + def |[U](f: Seq[T] => Seq[U]): Seq[U] = f(value) + def ^^[U](f: T => U): Seq[U] = value map f + def ^?[U](pf: PartialFunction[T, U]): Seq[U] = value collect pf - // def >>!(implicit ord: Ordering[T]): Unit = pp(_.sorted.distinct) + def >>!(implicit ord: Ordering[T]): Unit = pp(_.sorted.distinct) def >>(implicit ord: Ordering[T]): Unit = pp(_.sorted) def >!(): Unit = pp(_.distinct) def >(): Unit = pp(identity) - // def >#(): Unit = this ># (identity[T] _) - // def >#[U](p: T => U): Unit = this ppfreq p + def >#(): Unit = this ># (identity[T] _) + def >#[U](p: T => U): Unit = this ppfreq p - // def >?(p: T => Boolean): Unit = pp(_ filter p) - // def >?(s: String): Unit = pp(_ filter (_.toString contains s)) - // def >?(r: Regex): Unit = pp(_ filter (_.toString matches fixRegex(r))) + def >?(p: T => Boolean): Unit = pp(_ filter p) + def >?(s: String): Unit = pp(_ filter (_.toString contains s)) + def >?(r: Regex): Unit = pp(_ filter (_.toString matches fixRegex(r))) - // private def fixRegex(r: scala.util.matching.Regex): String = { - // val s = r.pattern.toString - // val prefix = if (s startsWith "^") "" else """^.*?""" - // val suffix = if (s endsWith "$") "" else """.*$""" + private def fixRegex(r: scala.util.matching.Regex): String = { + val s = r.pattern.toString + val prefix = if (s startsWith "^") "" else """^.*?""" + val suffix = if (s endsWith "$") "" else """.*$""" - // prefix + s + suffix - // } + prefix + s + suffix + } } class MultiPrettifierClass[T: Prettifier](val value: Seq[T]) extends PrettifierClass[T]() { } @@ -335,8 +335,8 @@ class Power[ReplValsImpl <: ReplVals : ru.TypeTag: ClassTag](val intp: IMain, re def slurp(): String = io.Streamable.slurp(url) } class RichSymbolList(syms: List[Symbol]) { - // def sigs = syms map (_.defString) - // def infos = syms map (_.info) + def sigs = syms map (_.defString) + def infos = syms map (_.info) } trait Implicits1 { @@ -344,7 +344,7 @@ class Power[ReplValsImpl <: ReplVals : ru.TypeTag: ClassTag](val intp: IMain, re implicit def replPrinting[T](x: T)(implicit pretty: Prettifier[T] = Prettifier.default[T]) = new SinglePrettifierClass[T](x) - // implicit def liftToTypeName(s: String): TypeName = newTypeName(s) + implicit def liftToTypeName(s: String): TypeName = newTypeName(s) } trait Implicits2 extends Implicits1 { class RichSymbol(sym: Symbol) { @@ -370,8 +370,8 @@ class Power[ReplValsImpl <: ReplVals : ru.TypeTag: ClassTag](val intp: IMain, re implicit def replInputStream(in: InputStream)(implicit codec: Codec) = new RichInputStream(in) implicit def replEnhancedURLs(url: URL)(implicit codec: Codec): RichReplURL = new RichReplURL(url)(codec) - // implicit def liftToTermName(s: String): TermName = newTermName(s) - // implicit def replListOfSymbols(xs: List[Symbol]) = new RichSymbolList(xs) + implicit def liftToTermName(s: String): TermName = newTermName(s) + implicit def replListOfSymbols(xs: List[Symbol]) = new RichSymbolList(xs) } trait ReplUtilities { @@ -382,13 +382,13 @@ class Power[ReplValsImpl <: ReplVals : ru.TypeTag: ClassTag](val intp: IMain, re def clazz[T: ru.TypeTag] = ru.typeOf[T].typeSymbol.suchThat(_.isClass) def info[T: ru.TypeTag : ClassTag] = InternalInfo[T] def ?[T: ru.TypeTag : ClassTag] = InternalInfo[T] - // def url(s: String) = { - // try new URL(s) - // catch { case _: MalformedURLException => - // if (Path(s).exists) Path(s).toURL - // else new URL("http://" + s) - // } - // } + def url(s: String) = { + try new URL(s) + catch { case _: MalformedURLException => + if (Path(s).exists) Path(s).toURL + else new URL("http://" + s) + } + } def sanitize(s: String): String = sanitize(s.getBytes()) def sanitize(s: Array[Byte]): String = (s map { case x if x.toChar.isControl => '?' @@ -406,11 +406,11 @@ class Power[ReplValsImpl <: ReplVals : ru.TypeTag: ClassTag](val intp: IMain, re lazy val rutil: ReplUtilities = new ReplUtilities { } lazy val phased: Phased = new { val global: intp.global.type = intp.global } with Phased { } - // def context(code: String) = analyzer.rootContext(unit(code)) - // def source(code: String) = newSourceFile(code) + def context(code: String) = analyzer.rootContext(unit(code)) + def source(code: String) = newSourceFile(code) def unit(code: String) = newCompilationUnit(code) def trees(code: String) = parse(code) getOrElse Nil - // def typeOf(id: String) = intp.typeOfExpression(id) + def typeOf(id: String) = intp.typeOfExpression(id) override def toString = s""" |** Power mode status ** diff --git a/src/compiler/scala/tools/nsc/interpreter/ReplConfig.scala b/src/compiler/scala/tools/nsc/interpreter/ReplConfig.scala index d48ceb4eff..7cd0f436c4 100644 --- a/src/compiler/scala/tools/nsc/interpreter/ReplConfig.scala +++ b/src/compiler/scala/tools/nsc/interpreter/ReplConfig.scala @@ -14,9 +14,9 @@ trait ReplConfig { lazy val replProps = new ReplProps class TapMaker[T](x: T) { - // def tapInfo(msg: => String): T = tap(x => replinfo(parens(x))) + def tapInfo(msg: => String): T = tap(x => replinfo(parens(x))) def tapDebug(msg: => String): T = tap(x => repldbg(parens(x))) - // def tapTrace(msg: => String): T = tap(x => repltrace(parens(x))) + def tapTrace(msg: => String): T = tap(x => repltrace(parens(x))) def tap[U](f: T => U): T = { f(x) x @@ -28,12 +28,12 @@ trait ReplConfig { try Console println msg catch { case x: AssertionError => Console.println("Assertion error printing debugging output: " + x) } - // private[nsc] def repldbgex(ex: Throwable): Unit = { - // if (isReplDebug) { - // echo("Caught/suppressing: " + ex) - // ex.printStackTrace - // } - // } + private[nsc] def repldbgex(ex: Throwable): Unit = { + if (isReplDebug) { + echo("Caught/suppressing: " + ex) + ex.printStackTrace + } + } private[nsc] def repldbg(msg: => String) = if (isReplDebug) echo(msg) private[nsc] def repltrace(msg: => String) = if (isReplTrace) echo(msg) private[nsc] def replinfo(msg: => String) = if (isReplInfo) echo(msg) @@ -45,14 +45,14 @@ trait ReplConfig { repltrace(stackTraceString(unwrap(t))) alt } - // private[nsc] def substituteAndLog[T](alt: => T)(body: => T): T = - // substituteAndLog("" + alt, alt)(body) + private[nsc] def substituteAndLog[T](alt: => T)(body: => T): T = + substituteAndLog("" + alt, alt)(body) private[nsc] def substituteAndLog[T](label: String, alt: => T)(body: => T): T = { try body catch logAndDiscard(label, alt) } - // private[nsc] def squashAndLog(label: String)(body: => Unit): Unit = - // substituteAndLog(label, ())(body) + private[nsc] def squashAndLog(label: String)(body: => Unit): Unit = + substituteAndLog(label, ())(body) def isReplTrace: Boolean = replProps.trace def isReplDebug: Boolean = replProps.debug || isReplTrace diff --git a/src/compiler/scala/tools/nsc/interpreter/ReplProps.scala b/src/compiler/scala/tools/nsc/interpreter/ReplProps.scala index 22c27513d3..bc3e7a10d7 100644 --- a/src/compiler/scala/tools/nsc/interpreter/ReplProps.scala +++ b/src/compiler/scala/tools/nsc/interpreter/ReplProps.scala @@ -13,15 +13,15 @@ class ReplProps { private def bool(name: String) = BooleanProp.keyExists(name) private def int(name: String) = IntProp(name) - // val jlineDebug = bool("scala.tools.jline.internal.Log.debug") - // val jlineTrace = bool("scala.tools.jline.internal.Log.trace") + val jlineDebug = bool("scala.tools.jline.internal.Log.debug") + val jlineTrace = bool("scala.tools.jline.internal.Log.trace") val info = bool("scala.repl.info") val debug = bool("scala.repl.debug") val trace = bool("scala.repl.trace") val power = bool("scala.repl.power") - // val replInitCode = Prop[JFile]("scala.repl.initcode") + val replInitCode = Prop[JFile]("scala.repl.initcode") val replAutorunCode = Prop[JFile]("scala.repl.autoruncode") val powerInitCode = Prop[JFile]("scala.repl.power.initcode") val powerBanner = Prop[JFile]("scala.repl.power.banner") diff --git a/src/compiler/scala/tools/nsc/interpreter/ReplStrings.scala b/src/compiler/scala/tools/nsc/interpreter/ReplStrings.scala index ed035f8a1a..670bbf9bae 100644 --- a/src/compiler/scala/tools/nsc/interpreter/ReplStrings.scala +++ b/src/compiler/scala/tools/nsc/interpreter/ReplStrings.scala @@ -29,5 +29,5 @@ trait ReplStrings { "scala.runtime.ScalaRunTime.replStringOf(%s, %s)".format(x, maxlen) def words(s: String) = s.trim split "\\s+" filterNot (_ == "") toList - // def isQuoted(s: String) = (s.length >= 2) && (s.head == s.last) && ("\"'" contains s.head) + def isQuoted(s: String) = (s.length >= 2) && (s.head == s.last) && ("\"'" contains s.head) } diff --git a/src/compiler/scala/tools/nsc/interpreter/RichClass.scala b/src/compiler/scala/tools/nsc/interpreter/RichClass.scala index 7183db2dfb..4371f7fe05 100644 --- a/src/compiler/scala/tools/nsc/interpreter/RichClass.scala +++ b/src/compiler/scala/tools/nsc/interpreter/RichClass.scala @@ -10,7 +10,7 @@ import scala.reflect.{ ClassTag, classTag } class RichClass[T](val clazz: Class[T]) { def toTag: ClassTag[T] = ClassTag[T](clazz) - // def toTypeString: String = TypeStrings.fromClazz(clazz) + def toTypeString: String = TypeStrings.fromClazz(clazz) // Sadly isAnonymousClass does not return true for scala anonymous // classes because our naming scheme is not doing well against the @@ -20,13 +20,14 @@ class RichClass[T](val clazz: Class[T]) { catch { case _: java.lang.InternalError => false } // good ol' "Malformed class name" ) - def supertags: List[ClassTag[_]] = supers map (_.toTag) + /** It's not easy... to be... me... */ + def supermans: List[ClassTag[_]] = supers map (_.toTag) def superNames: List[String] = supers map (_.getName) def interfaces: List[JClass] = supers filter (_.isInterface) def hasAncestorName(f: String => Boolean) = superNames exists f def hasAncestor(f: JClass => Boolean) = supers exists f - // def hasAncestorInPackage(pkg: String) = hasAncestorName(_ startsWith (pkg + ".")) + def hasAncestorInPackage(pkg: String) = hasAncestorName(_ startsWith (pkg + ".")) def supers: List[JClass] = { def loop(x: JClass): List[JClass] = x.getSuperclass match { diff --git a/src/compiler/scala/tools/nsc/interpreter/SimpleReader.scala b/src/compiler/scala/tools/nsc/interpreter/SimpleReader.scala index a07f84cc10..bccd8158ec 100644 --- a/src/compiler/scala/tools/nsc/interpreter/SimpleReader.scala +++ b/src/compiler/scala/tools/nsc/interpreter/SimpleReader.scala @@ -19,11 +19,11 @@ extends InteractiveReader val history = NoHistory val completion = NoCompletion - // def init() = () + def init() = () def reset() = () - // def eraseLine() = () + def eraseLine() = () def redrawLine() = () - // def currentLine = "" + def currentLine = "" def readOneLine(prompt: String): String = { if (interactive) { out.print(prompt) @@ -40,4 +40,4 @@ object SimpleReader { def apply(in: BufferedReader = defaultIn, out: JPrintWriter = defaultOut, interactive: Boolean = true): SimpleReader = new SimpleReader(in, out, interactive) -} +} \ No newline at end of file diff --git a/src/compiler/scala/tools/nsc/interpreter/TypeStrings.scala b/src/compiler/scala/tools/nsc/interpreter/TypeStrings.scala index f9749feabe..9fb79a9d6f 100644 --- a/src/compiler/scala/tools/nsc/interpreter/TypeStrings.scala +++ b/src/compiler/scala/tools/nsc/interpreter/TypeStrings.scala @@ -33,7 +33,7 @@ trait StructuredTypeStrings extends DestructureTypes { val NoGrouping = Grouping("", "", "", false) val ListGrouping = Grouping("(", ", ", ")", false) val ProductGrouping = Grouping("(", ", ", ")", true) - // val ParamGrouping = Grouping("(", ", ", ")", true) + val ParamGrouping = Grouping("(", ", ", ")", true) val BlockGrouping = Grouping(" { ", "; ", "}", false) private def str(level: Int)(body: => String): String = " " * level + body @@ -189,7 +189,7 @@ trait TypeStrings { else enclClass.getName + "." + (name stripPrefix enclPre) ) } - // def scalaName(ct: ClassTag[_]): String = scalaName(ct.runtimeClass) + def scalaName(ct: ClassTag[_]): String = scalaName(ct.runtimeClass) def anyClass(x: Any): JClass = if (x == null) null else x.getClass private def brackets(tps: String*): String = @@ -220,7 +220,7 @@ trait TypeStrings { * practice to rely on toString for correctness) generated the VALID string * representation of the type. */ - // def fromTypedValue[T: ru.TypeTag : ClassTag](x: T): String = fromTag[T] + def fromTypedValue[T: ru.TypeTag : ClassTag](x: T): String = fromTag[T] def fromValue(value: Any): String = if (value == null) "Null" else fromClazz(anyClass(value)) def fromClazz(clazz: JClass): String = scalaName(clazz) + tparamString(clazz) def fromTag[T: ru.TypeTag : ClassTag] : String = scalaName(classTag[T].runtimeClass) + tparamString[T] @@ -241,12 +241,12 @@ trait TypeStrings { } } - // val typeTransforms = List( - // "java.lang." -> "", - // "scala.collection.immutable." -> "immutable.", - // "scala.collection.mutable." -> "mutable.", - // "scala.collection.generic." -> "generic." - // ) + val typeTransforms = List( + "java.lang." -> "", + "scala.collection.immutable." -> "immutable.", + "scala.collection.mutable." -> "mutable.", + "scala.collection.generic." -> "generic." + ) } object TypeStrings extends TypeStrings { } diff --git a/src/compiler/scala/tools/nsc/interpreter/package.scala b/src/compiler/scala/tools/nsc/interpreter/package.scala index e6c9adb296..6a2d69db2c 100644 --- a/src/compiler/scala/tools/nsc/interpreter/package.scala +++ b/src/compiler/scala/tools/nsc/interpreter/package.scala @@ -48,7 +48,7 @@ package object interpreter extends ReplConfig with ReplStrings { private[nsc] implicit def enrichClass[T](clazz: Class[T]) = new RichClass[T](clazz) private[nsc] implicit def enrichAnyRefWithTap[T](x: T) = new TapMaker(x) - // private[nsc] def tracing[T](msg: String)(x: T): T = x.tapTrace(msg) + private[nsc] def tracing[T](msg: String)(x: T): T = x.tapTrace(msg) private[nsc] def debugging[T](msg: String)(x: T) = x.tapDebug(msg) private val ourClassloader = getClass.getClassLoader @@ -68,38 +68,38 @@ package object interpreter extends ReplConfig with ReplStrings { import global.{ reporter => _, _ } import definitions._ - // lazy val tagOfStdReplVals = staticTypeTag[scala.tools.nsc.interpreter.StdReplVals] + lazy val tagOfStdReplVals = staticTypeTag[scala.tools.nsc.interpreter.StdReplVals] protected def echo(msg: String) = { Console.out println msg Console.out.flush() } - // def wrapCommand(line: String): String = { - // def failMsg = "Argument to :wrap must be the name of a method with signature [T](=> T): T" - - // words(line) match { - // case Nil => - // intp.executionWrapper match { - // case "" => "No execution wrapper is set." - // case s => "Current execution wrapper: " + s - // } - // case "clear" :: Nil => - // intp.executionWrapper match { - // case "" => "No execution wrapper is set." - // case s => intp.clearExecutionWrapper() ; "Cleared execution wrapper." - // } - // case wrapper :: Nil => - // intp.typeOfExpression(wrapper) match { - // case PolyType(List(targ), MethodType(List(arg), restpe)) => - // setExecutionWrapper(originalPath(wrapper)) - // "Set wrapper to '" + wrapper + "'" - // case tp => - // failMsg + "\nFound: " - // } - // case _ => failMsg - // } - // } + def wrapCommand(line: String): String = { + def failMsg = "Argument to :wrap must be the name of a method with signature [T](=> T): T" + + words(line) match { + case Nil => + intp.executionWrapper match { + case "" => "No execution wrapper is set." + case s => "Current execution wrapper: " + s + } + case "clear" :: Nil => + intp.executionWrapper match { + case "" => "No execution wrapper is set." + case s => intp.clearExecutionWrapper() ; "Cleared execution wrapper." + } + case wrapper :: Nil => + intp.typeOfExpression(wrapper) match { + case PolyType(List(targ), MethodType(List(arg), restpe)) => + setExecutionWrapper(originalPath(wrapper)) + "Set wrapper to '" + wrapper + "'" + case tp => + failMsg + "\nFound: " + } + case _ => failMsg + } + } def implicitsCommand(line: String): String = { def p(x: Any) = intp.reporter.printMessage("" + x) diff --git a/src/compiler/scala/tools/nsc/interpreter/session/History.scala b/src/compiler/scala/tools/nsc/interpreter/session/History.scala index b727a0494f..daa05b86db 100644 --- a/src/compiler/scala/tools/nsc/interpreter/session/History.scala +++ b/src/compiler/scala/tools/nsc/interpreter/session/History.scala @@ -14,15 +14,15 @@ trait History { def asStrings: List[String] def index: Int def size: Int - // def grep(s: String): List[String] + def grep(s: String): List[String] } object NoHistory extends History { def asStrings = Nil - // def grep(s: String) = Nil + def grep(s: String) = Nil def index = 0 def size = 0 } -// object History { -// def empty: History = NoHistory -// } +object History { + def empty: History = NoHistory +} diff --git a/src/compiler/scala/tools/nsc/interpreter/session/SimpleHistory.scala b/src/compiler/scala/tools/nsc/interpreter/session/SimpleHistory.scala index 06e431fb30..9f4e2b9df3 100644 --- a/src/compiler/scala/tools/nsc/interpreter/session/SimpleHistory.scala +++ b/src/compiler/scala/tools/nsc/interpreter/session/SimpleHistory.scala @@ -55,8 +55,8 @@ class SimpleHistory extends JLineHistory { def moveToEnd(): Unit = setTo(size) // scala legacy interface - // def asList: List[JEntry] = toEntries().toList - // def asJavaList = entries() + def asList: List[JEntry] = toEntries().toList + def asJavaList = entries() def asStrings = buf.toList - // def grep(s: String) = buf.toList filter (_ contains s) + def grep(s: String) = buf.toList filter (_ contains s) } diff --git a/src/compiler/scala/tools/nsc/io/Fileish.scala b/src/compiler/scala/tools/nsc/io/Fileish.scala index 77d12ea022..7b4e385dd8 100644 --- a/src/compiler/scala/tools/nsc/io/Fileish.scala +++ b/src/compiler/scala/tools/nsc/io/Fileish.scala @@ -1,33 +1,33 @@ -// /* NSC -- new Scala compiler -// * Copyright 2005-2013 LAMP/EPFL -// * @author Paul Phillips -// */ +/* NSC -- new Scala compiler + * Copyright 2005-2013 LAMP/EPFL + * @author Paul Phillips + */ -// package scala.tools.nsc -// package io +package scala.tools.nsc +package io -// import java.io.{ InputStream } -// import java.util.jar.JarEntry +import java.io.{ InputStream } +import java.util.jar.JarEntry -// /** A common interface for File-based things and Stream-based things. -// * (In particular, io.File and JarEntry.) -// */ -// class Fileish(val path: Path, val input: () => InputStream) extends Streamable.Chars { -// def inputStream() = input() +/** A common interface for File-based things and Stream-based things. + * (In particular, io.File and JarEntry.) + */ +class Fileish(val path: Path, val input: () => InputStream) extends Streamable.Chars { + def inputStream() = input() -// def parent = path.parent -// def name = path.name -// def isSourceFile = path.hasExtension("java", "scala") + def parent = path.parent + def name = path.name + def isSourceFile = path.hasExtension("java", "scala") -// private lazy val pkgLines = lines() collect { case x if x startsWith "package " => x stripPrefix "package" trim } -// lazy val pkgFromPath = parent.path.replaceAll("""[/\\]""", ".") -// lazy val pkgFromSource = pkgLines map (_ stripSuffix ";") mkString "." + private lazy val pkgLines = lines() collect { case x if x startsWith "package " => x stripPrefix "package" trim } + lazy val pkgFromPath = parent.path.replaceAll("""[/\\]""", ".") + lazy val pkgFromSource = pkgLines map (_ stripSuffix ";") mkString "." -// override def toString = path.path -// } + override def toString = path.path +} -// object Fileish { -// def apply(f: File): Fileish = new Fileish(f, () => f.inputStream()) -// def apply(f: JarEntry, in: () => InputStream): Fileish = new Fileish(Path(f.getName), in) -// def apply(path: String, in: () => InputStream): Fileish = new Fileish(Path(path), in) -// } +object Fileish { + def apply(f: File): Fileish = new Fileish(f, () => f.inputStream()) + def apply(f: JarEntry, in: () => InputStream): Fileish = new Fileish(Path(f.getName), in) + def apply(path: String, in: () => InputStream): Fileish = new Fileish(Path(path), in) +} diff --git a/src/compiler/scala/tools/nsc/io/Jar.scala b/src/compiler/scala/tools/nsc/io/Jar.scala index 6f1c322391..ef2c9b13c0 100644 --- a/src/compiler/scala/tools/nsc/io/Jar.scala +++ b/src/compiler/scala/tools/nsc/io/Jar.scala @@ -36,9 +36,9 @@ class Jar(file: File) extends Iterable[JarEntry] { def this(jfile: JFile) = this(File(jfile)) def this(path: String) = this(File(path)) - // protected def errorFn(msg: String): Unit = Console println msg + protected def errorFn(msg: String): Unit = Console println msg - // lazy val jarFile = new JarFile(file.jfile) + lazy val jarFile = new JarFile(file.jfile) lazy val manifest = withJarInput(s => Option(s.getManifest)) def mainClass = manifest map (f => f(Name.MAIN_CLASS)) @@ -63,12 +63,12 @@ class Jar(file: File) extends Iterable[JarEntry] { Iterator continually in.getNextJarEntry() takeWhile (_ != null) foreach f } override def iterator: Iterator[JarEntry] = this.toList.iterator - // def fileishIterator: Iterator[Fileish] = jarFile.entries.asScala map (x => Fileish(x, () => getEntryStream(x))) + def fileishIterator: Iterator[Fileish] = jarFile.entries.asScala map (x => Fileish(x, () => getEntryStream(x))) - // private def getEntryStream(entry: JarEntry) = jarFile getInputStream entry match { - // case null => errorFn("No such entry: " + entry) ; null - // case x => x - // } + private def getEntryStream(entry: JarEntry) = jarFile getInputStream entry match { + case null => errorFn("No such entry: " + entry) ; null + case x => x + } override def toString = "" + file } @@ -130,7 +130,7 @@ object Jar { m } def apply(manifest: JManifest): WManifest = new WManifest(manifest) - // implicit def unenrichManifest(x: WManifest): JManifest = x.underlying + implicit def unenrichManifest(x: WManifest): JManifest = x.underlying } class WManifest(manifest: JManifest) { for ((k, v) <- initialMainAttrs) @@ -147,12 +147,12 @@ object Jar { } def apply(name: Attributes.Name): String = attrs(name) - // def apply(name: String): String = apply(new Attributes.Name(name)) + def apply(name: String): String = apply(new Attributes.Name(name)) def update(key: Attributes.Name, value: String) = attrs.put(key, value) - // def update(key: String, value: String) = attrs.put(new Attributes.Name(key), value) + def update(key: String, value: String) = attrs.put(new Attributes.Name(key), value) - // def mainClass: String = apply(Name.MAIN_CLASS) - // def mainClass_=(value: String) = update(Name.MAIN_CLASS, value) + def mainClass: String = apply(Name.MAIN_CLASS) + def mainClass_=(value: String) = update(Name.MAIN_CLASS, value) } // See http://download.java.net/jdk7/docs/api/java/nio/file/Path.html diff --git a/src/compiler/scala/tools/nsc/io/MsilFile.scala b/src/compiler/scala/tools/nsc/io/MsilFile.scala index 881af2e87a..1a3a4f5c81 100644 --- a/src/compiler/scala/tools/nsc/io/MsilFile.scala +++ b/src/compiler/scala/tools/nsc/io/MsilFile.scala @@ -15,4 +15,4 @@ import ch.epfl.lamp.compiler.msil.{ Type => MsilType } class MsilFile(val msilType: MsilType) extends VirtualFile(msilType.FullName, msilType.Namespace) { } -// object NoMsilFile extends MsilFile(null) { } +object NoMsilFile extends MsilFile(null) { } diff --git a/src/compiler/scala/tools/nsc/io/Pickler.scala b/src/compiler/scala/tools/nsc/io/Pickler.scala index 3f16784b2d..56ff4a57ee 100644 --- a/src/compiler/scala/tools/nsc/io/Pickler.scala +++ b/src/compiler/scala/tools/nsc/io/Pickler.scala @@ -76,7 +76,7 @@ abstract class Pickler[T] { * @param fromNull an implicit evidence parameter ensuring that the type of values * handled by this pickler contains `null`. */ - // def orNull(implicit fromNull: Null <:< T): Pickler[T] = nullablePickler(this) + def orNull(implicit fromNull: Null <:< T): Pickler[T] = nullablePickler(this) /** A conditional pickler obtained from the current pickler. * @param cond the condition to test to find out whether pickler can handle @@ -93,7 +93,7 @@ abstract class Pickler[T] { object Pickler { - // var picklerDebugMode = false + var picklerDebugMode = false /** A base class representing unpickler result. It has two subclasses: * `UnpickleSucess` for successful unpicklings and `UnpickleFailure` for failures, @@ -176,14 +176,14 @@ object Pickler { /** A converter from binary functions to functions over `~`-pairs */ - // implicit def fromTilde[T1, T2, R](f: (T1, T2) => R): T1 ~ T2 => R = { case x1 ~ x2 => f(x1, x2) } + implicit def fromTilde[T1, T2, R](f: (T1, T2) => R): T1 ~ T2 => R = { case x1 ~ x2 => f(x1, x2) } /** An converter from unctions returning Options over pair to functions returning `~`-pairs * The converted function will raise a `MatchError` where the original function returned * a `None`. This converter is useful for turning `unapply` methods of case classes * into wrapper methods that can be passed as second argument to `wrap`. */ - // implicit def toTilde[T1, T2, S](f: S => Option[(T1, T2)]): S => T1 ~ T2 = { x => (f(x): @unchecked) match { case Some((x1, x2)) => x1 ~ x2 } } + implicit def toTilde[T1, T2, S](f: S => Option[(T1, T2)]): S => T1 ~ T2 = { x => (f(x): @unchecked) match { case Some((x1, x2)) => x1 ~ x2 } } /** Same as `p.labelled(label)`. */ @@ -250,13 +250,13 @@ object Pickler { /** Same as `p.orNull` */ - // def nullablePickler[T](p: Pickler[T])(implicit fromNull: Null <:< T): Pickler[T] = new Pickler[T] { - // def pickle(wr: Writer, x: T) = - // if (x == null) wr.write("null") else p.pickle(wr, x) - // def unpickle(rd: Lexer): Unpickled[T] = - // if (rd.token == NullLit) nextSuccess(rd, fromNull(null)) - // else p.unpickle(rd) - // } + def nullablePickler[T](p: Pickler[T])(implicit fromNull: Null <:< T): Pickler[T] = new Pickler[T] { + def pickle(wr: Writer, x: T) = + if (x == null) wr.write("null") else p.pickle(wr, x) + def unpickle(rd: Lexer): Unpickled[T] = + if (rd.token == NullLit) nextSuccess(rd, fromNull(null)) + else p.unpickle(rd) + } /** A conditional pickler for singleton objects. It represents these * with the object's underlying class as a label. @@ -330,20 +330,20 @@ object Pickler { tokenPickler("integer literal") { case IntLit(s) => s.toLong } /** A pickler for values of type `Double`, represented as floating point literals */ - // implicit val doublePickler: Pickler[Double] = - // tokenPickler("floating point literal") { case FloatLit(s) => s.toDouble } + implicit val doublePickler: Pickler[Double] = + tokenPickler("floating point literal") { case FloatLit(s) => s.toDouble } /** A pickler for values of type `Byte`, represented as integer literals */ - // implicit val bytePickler: Pickler[Byte] = longPickler.wrapped { _.toByte } { _.toLong } + implicit val bytePickler: Pickler[Byte] = longPickler.wrapped { _.toByte } { _.toLong } /** A pickler for values of type `Short`, represented as integer literals */ - // implicit val shortPickler: Pickler[Short] = longPickler.wrapped { _.toShort } { _.toLong } + implicit val shortPickler: Pickler[Short] = longPickler.wrapped { _.toShort } { _.toLong } /** A pickler for values of type `Int`, represented as integer literals */ implicit val intPickler: Pickler[Int] = longPickler.wrapped { _.toInt } { _.toLong } /** A pickler for values of type `Float`, represented as floating point literals */ - // implicit val floatPickler: Pickler[Float] = doublePickler.wrapped { _.toFloat } { _.toLong } + implicit val floatPickler: Pickler[Float] = doublePickler.wrapped { _.toFloat } { _.toLong } /** A conditional pickler for the boolean value `true` */ private val truePickler = @@ -373,15 +373,15 @@ object Pickler { } /** A pickler for values of type `Char`, represented as string literals of length 1 */ - // implicit val charPickler: Pickler[Char] = - // stringPickler - // .wrapped { s => require(s.length == 1, "single character string literal expected, but "+quoted(s)+" found"); s(0) } { _.toString } + implicit val charPickler: Pickler[Char] = + stringPickler + .wrapped { s => require(s.length == 1, "single character string literal expected, but "+quoted(s)+" found"); s(0) } { _.toString } /** A pickler for pairs, represented as `~`-pairs */ - // implicit def tuple2Pickler[T1: Pickler, T2: Pickler]: Pickler[(T1, T2)] = - // (pkl[T1] ~ pkl[T2]) - // .wrapped { case x1 ~ x2 => (x1, x2) } { case (x1, x2) => x1 ~ x2 } - // .labelled ("tuple2") + implicit def tuple2Pickler[T1: Pickler, T2: Pickler]: Pickler[(T1, T2)] = + (pkl[T1] ~ pkl[T2]) + .wrapped { case x1 ~ x2 => (x1, x2) } { case (x1, x2) => x1 ~ x2 } + .labelled ("tuple2") /** A pickler for 3-tuples, represented as `~`-tuples */ implicit def tuple3Pickler[T1, T2, T3](implicit p1: Pickler[T1], p2: Pickler[T2], p3: Pickler[T3]): Pickler[(T1, T2, T3)] = @@ -390,34 +390,34 @@ object Pickler { .labelled ("tuple3") /** A pickler for 4-tuples, represented as `~`-tuples */ - // implicit def tuple4Pickler[T1, T2, T3, T4](implicit p1: Pickler[T1], p2: Pickler[T2], p3: Pickler[T3], p4: Pickler[T4]): Pickler[(T1, T2, T3, T4)] = - // (p1 ~ p2 ~ p3 ~ p4) - // .wrapped { case x1 ~ x2 ~ x3 ~ x4 => (x1, x2, x3, x4) } { case (x1, x2, x3, x4) => x1 ~ x2 ~ x3 ~ x4 } - // .labelled ("tuple4") + implicit def tuple4Pickler[T1, T2, T3, T4](implicit p1: Pickler[T1], p2: Pickler[T2], p3: Pickler[T3], p4: Pickler[T4]): Pickler[(T1, T2, T3, T4)] = + (p1 ~ p2 ~ p3 ~ p4) + .wrapped { case x1 ~ x2 ~ x3 ~ x4 => (x1, x2, x3, x4) } { case (x1, x2, x3, x4) => x1 ~ x2 ~ x3 ~ x4 } + .labelled ("tuple4") /** A conditional pickler for the `scala.None` object */ - // implicit val nonePickler = singletonPickler(None) + implicit val nonePickler = singletonPickler(None) /** A conditional pickler for instances of class `scala.Some` */ - // implicit def somePickler[T: Pickler]: CondPickler[Some[T]] = - // pkl[T] - // .wrapped { Some(_) } { _.get } - // .asClass (classOf[Some[T]]) + implicit def somePickler[T: Pickler]: CondPickler[Some[T]] = + pkl[T] + .wrapped { Some(_) } { _.get } + .asClass (classOf[Some[T]]) /** A pickler for optional values */ - // implicit def optionPickler[T: Pickler]: Pickler[Option[T]] = nonePickler | somePickler[T] + implicit def optionPickler[T: Pickler]: Pickler[Option[T]] = nonePickler | somePickler[T] /** A pickler for list values */ implicit def listPickler[T: Pickler]: Pickler[List[T]] = iterPickler[T] .wrapped { _.toList } { _.iterator } .labelled ("scala.List") /** A pickler for vector values */ - // implicit def vectorPickler[T: Pickler]: Pickler[Vector[T]] = - // iterPickler[T] .wrapped { Vector() ++ _ } { _.iterator } .labelled ("scala.Vector") + implicit def vectorPickler[T: Pickler]: Pickler[Vector[T]] = + iterPickler[T] .wrapped { Vector() ++ _ } { _.iterator } .labelled ("scala.Vector") /** A pickler for array values */ - // implicit def array[T : ClassTag : Pickler]: Pickler[Array[T]] = - // iterPickler[T] .wrapped { _.toArray} { _.iterator } .labelled ("scala.Array") + implicit def array[T : ClassTag : Pickler]: Pickler[Array[T]] = + iterPickler[T] .wrapped { _.toArray} { _.iterator } .labelled ("scala.Array") } /** A subclass of Pickler can indicate whether a particular value can be pickled by instances diff --git a/src/compiler/scala/tools/nsc/io/Socket.scala b/src/compiler/scala/tools/nsc/io/Socket.scala index cb264a671c..e766c1b2fd 100644 --- a/src/compiler/scala/tools/nsc/io/Socket.scala +++ b/src/compiler/scala/tools/nsc/io/Socket.scala @@ -28,13 +28,13 @@ object Socket { private val optHandler = handlerFn[Option[T]](_ => None) private val eitherHandler = handlerFn[Either[Throwable, T]](x => Left(x)) - // def getOrElse[T1 >: T](alt: T1): T1 = opt getOrElse alt + def getOrElse[T1 >: T](alt: T1): T1 = opt getOrElse alt def either: Either[Throwable, T] = try Right(f()) catch eitherHandler def opt: Option[T] = try Some(f()) catch optHandler } - // def newIPv4Server(port: Int = 0) = new Box(() => preferringIPv4(new ServerSocket(0))) - // def newServer(port: Int = 0) = new Box(() => new ServerSocket(0)) + def newIPv4Server(port: Int = 0) = new Box(() => preferringIPv4(new ServerSocket(0))) + def newServer(port: Int = 0) = new Box(() => new ServerSocket(0)) def localhost(port: Int) = apply(InetAddress.getLocalHost(), port) def apply(host: InetAddress, port: Int) = new Box(() => new Socket(new JSocket(host, port))) def apply(host: String, port: Int) = new Box(() => new Socket(new JSocket(host, port))) @@ -62,4 +62,4 @@ class Socket(jsocket: JSocket) extends Streamable.Bytes with Closeable { out.close() } } -} +} \ No newline at end of file diff --git a/src/compiler/scala/tools/nsc/io/SourceReader.scala b/src/compiler/scala/tools/nsc/io/SourceReader.scala index f6759be5eb..af745eb3e8 100644 --- a/src/compiler/scala/tools/nsc/io/SourceReader.scala +++ b/src/compiler/scala/tools/nsc/io/SourceReader.scala @@ -34,7 +34,7 @@ class SourceReader(decoder: CharsetDecoder, reporter: Reporter) { } /** Reads the file with the specified name. */ - // def read(filename: String): Array[Char]= read(new JFile(filename)) + def read(filename: String): Array[Char]= read(new JFile(filename)) /** Reads the specified file. */ def read(file: JFile): Array[Char] = { diff --git a/src/compiler/scala/tools/nsc/io/package.scala b/src/compiler/scala/tools/nsc/io/package.scala index e9fb8a6d98..c29a7c96df 100644 --- a/src/compiler/scala/tools/nsc/io/package.scala +++ b/src/compiler/scala/tools/nsc/io/package.scala @@ -20,14 +20,14 @@ package object io { type Path = scala.reflect.io.Path val Path = scala.reflect.io.Path type PlainFile = scala.reflect.io.PlainFile - // val PlainFile = scala.reflect.io.PlainFile + val PlainFile = scala.reflect.io.PlainFile val Streamable = scala.reflect.io.Streamable type VirtualDirectory = scala.reflect.io.VirtualDirectory type VirtualFile = scala.reflect.io.VirtualFile - // val ZipArchive = scala.reflect.io.ZipArchive + val ZipArchive = scala.reflect.io.ZipArchive type ZipArchive = scala.reflect.io.ZipArchive - // implicit def postfixOps = scala.language.postfixOps // make all postfix ops in this package compile without warning + implicit def postfixOps = scala.language.postfixOps // make all postfix ops in this package compile without warning type JManifest = java.util.jar.Manifest type JFile = java.io.File @@ -38,10 +38,10 @@ package object io { def runnable(body: => Unit): Runnable = new Runnable { override def run() = body } def callable[T](body: => T): Callable[T] = new Callable[T] { override def call() = body } def spawn[T](body: => T): Future[T] = daemonThreadPool submit callable(body) - // def submit(runnable: Runnable) = daemonThreadPool submit runnable + def submit(runnable: Runnable) = daemonThreadPool submit runnable // Create, start, and return a daemon thread - // def daemonize(body: => Unit): Thread = newThread(_ setDaemon true)(body) + def daemonize(body: => Unit): Thread = newThread(_ setDaemon true)(body) def newThread(f: Thread => Unit)(body: => Unit): Thread = { val thread = new Thread(runnable(body)) f(thread) @@ -50,11 +50,11 @@ package object io { } // Set a timer to execute the given code. - // def timer(seconds: Int)(body: => Unit): Timer = { - // val alarm = new Timer(true) // daemon - // val tt = new TimerTask { def run() = body } + def timer(seconds: Int)(body: => Unit): Timer = { + val alarm = new Timer(true) // daemon + val tt = new TimerTask { def run() = body } - // alarm.schedule(tt, seconds * 1000) - // alarm - // } + alarm.schedule(tt, seconds * 1000) + alarm + } } diff --git a/src/compiler/scala/tools/nsc/javac/JavaParsers.scala b/src/compiler/scala/tools/nsc/javac/JavaParsers.scala index 63f08c42ec..2f6c13dd67 100644 --- a/src/compiler/scala/tools/nsc/javac/JavaParsers.scala +++ b/src/compiler/scala/tools/nsc/javac/JavaParsers.scala @@ -35,7 +35,7 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners { abstract class JavaParser extends ParserCommon { val in: JavaScanner - // protected def posToReport: Int = in.currentPos + protected def posToReport: Int = in.currentPos def freshName(prefix : String): Name protected implicit def i2p(offset : Int) : Position private implicit def p2i(pos : Position): Int = if (pos.isDefined) pos.point else -1 @@ -94,11 +94,11 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners { if (skipIt) skip() } - // def warning(msg: String) : Unit = warning(in.currentPos, msg) + def warning(msg: String) : Unit = warning(in.currentPos, msg) def errorTypeTree = TypeTree().setType(ErrorType) setPos in.currentPos - // def errorTermTree = Literal(Constant(null)) setPos in.currentPos - // def errorPatternTree = blankExpr setPos in.currentPos + def errorTermTree = Literal(Constant(null)) setPos in.currentPos + def errorPatternTree = blankExpr setPos in.currentPos // --------- tree building ----------------------------- diff --git a/src/compiler/scala/tools/nsc/javac/JavaScanners.scala b/src/compiler/scala/tools/nsc/javac/JavaScanners.scala index 95a22f847b..e230585a8b 100644 --- a/src/compiler/scala/tools/nsc/javac/JavaScanners.scala +++ b/src/compiler/scala/tools/nsc/javac/JavaScanners.scala @@ -57,14 +57,14 @@ trait JavaScanners extends ast.parser.ScannersCommon { /** ... */ abstract class AbstractJavaScanner extends AbstractJavaTokenData { - // implicit def p2g(pos: Position): ScanPosition + implicit def p2g(pos: Position): ScanPosition implicit def g2p(pos: ScanPosition): Position /** the last error position */ - // var errpos: ScanPosition - // var lastPos: ScanPosition - // def skipToken: ScanPosition + var errpos: ScanPosition + var lastPos: ScanPosition + def skipToken: ScanPosition def nextToken(): Unit def next: AbstractJavaTokenData def intVal(negated: Boolean): Long @@ -73,7 +73,7 @@ trait JavaScanners extends ast.parser.ScannersCommon { def floatVal: Double = floatVal(false) //def token2string(token : Int) : String = configuration.token2string(token) /** return recent scala doc, if any */ - // def flushDoc: DocComment + def flushDoc: DocComment def currentPos: Position } @@ -227,16 +227,16 @@ trait JavaScanners extends ast.parser.ScannersCommon { abstract class JavaScanner extends AbstractJavaScanner with JavaTokenData with Cloneable with ScannerCommon { override def intVal = super.intVal// todo: needed? override def floatVal = super.floatVal - // override var errpos: Int = NoPos + override var errpos: Int = NoPos def currentPos: Position = g2p(pos - 1) var in: JavaCharArrayReader = _ - // def dup: JavaScanner = { - // val dup = clone().asInstanceOf[JavaScanner] - // dup.in = in.dup - // dup - // } + def dup: JavaScanner = { + val dup = clone().asInstanceOf[JavaScanner] + dup.in = in.dup + dup + } /** character buffer for literals */ @@ -256,11 +256,11 @@ trait JavaScanners extends ast.parser.ScannersCommon { */ var docBuffer: StringBuilder = null - // def flushDoc: DocComment = { - // val ret = if (docBuffer != null) DocComment(docBuffer.toString, NoPosition) else null - // docBuffer = null - // ret - // } + def flushDoc: DocComment = { + val ret = if (docBuffer != null) DocComment(docBuffer.toString, NoPosition) else null + docBuffer = null + ret + } /** add the given character to the documentation buffer */ @@ -279,10 +279,10 @@ trait JavaScanners extends ast.parser.ScannersCommon { /** read next token and return last position */ - // def skipToken: Int = { - // val p = pos; nextToken - // p - 1 - // } + def skipToken: Int = { + val p = pos; nextToken + p - 1 + } def nextToken() { if (next.token == EMPTY) { @@ -868,7 +868,7 @@ trait JavaScanners extends ast.parser.ScannersCommon { def syntaxError(pos: Int, msg: String) { error(pos, msg) token = ERROR - // errpos = pos + errpos = pos } /** generate an error at the current token position @@ -879,7 +879,7 @@ trait JavaScanners extends ast.parser.ScannersCommon { def incompleteInputError(msg: String) { incompleteInputError(pos, msg) token = EOF - // errpos = pos + errpos = pos } override def toString() = token match { @@ -918,11 +918,11 @@ trait JavaScanners extends ast.parser.ScannersCommon { class JavaUnitScanner(unit: CompilationUnit) extends JavaScanner { in = new JavaCharArrayReader(unit.source.content, !settings.nouescape.value, syntaxError) init - // def warning(pos: Int, msg: String) = unit.warning(pos, msg) + def warning(pos: Int, msg: String) = unit.warning(pos, msg) def error (pos: Int, msg: String) = unit. error(pos, msg) def incompleteInputError(pos: Int, msg: String) = unit.incompleteInputError(pos, msg) def deprecationWarning(pos: Int, msg: String) = unit.deprecationWarning(pos, msg) - // implicit def p2g(pos: Position): Int = if (pos.isDefined) pos.point else -1 + implicit def p2g(pos: Position): Int = if (pos.isDefined) pos.point else -1 implicit def g2p(pos: Int): Position = new OffsetPosition(unit.source, pos) } } diff --git a/src/compiler/scala/tools/nsc/javac/JavaTokens.scala b/src/compiler/scala/tools/nsc/javac/JavaTokens.scala index 90f73ec44a..a562de291d 100644 --- a/src/compiler/scala/tools/nsc/javac/JavaTokens.scala +++ b/src/compiler/scala/tools/nsc/javac/JavaTokens.scala @@ -13,8 +13,8 @@ object JavaTokens extends ast.parser.Tokens { /** identifiers */ final val IDENTIFIER = 10 - // def isIdentifier(code : Int) = - // code == IDENTIFIER + def isIdentifier(code : Int) = + code == IDENTIFIER /** keywords */ final val ABSTRACT = 20 @@ -68,8 +68,8 @@ object JavaTokens extends ast.parser.Tokens { final val VOLATILE = 68 final val WHILE = 69 - // def isKeyword(code : Int) = - // code >= ABSTRACT && code <= WHILE + def isKeyword(code : Int) = + code >= ABSTRACT && code <= WHILE /** special symbols */ final val COMMA = 70 @@ -115,8 +115,8 @@ object JavaTokens extends ast.parser.Tokens { final val GTGTEQ = 113 final val GTGTGTEQ = 114 - // def isSymbol(code : Int) = - // code >= COMMA && code <= GTGTGTEQ + def isSymbol(code : Int) = + code >= COMMA && code <= GTGTGTEQ /** parenthesis */ final val LPAREN = 115 diff --git a/src/compiler/scala/tools/nsc/matching/MatchSupport.scala b/src/compiler/scala/tools/nsc/matching/MatchSupport.scala index 07a79a174b..5ce1aabcd8 100644 --- a/src/compiler/scala/tools/nsc/matching/MatchSupport.scala +++ b/src/compiler/scala/tools/nsc/matching/MatchSupport.scala @@ -22,8 +22,8 @@ trait MatchSupport extends ast.TreeDSL { self: ParallelMatching => def impossible: Nothing = abort("this never happens") - // def treeCollect[T](tree: Tree, pf: PartialFunction[Tree, T]): List[T] = - // tree filter (pf isDefinedAt _) map (x => pf(x)) + def treeCollect[T](tree: Tree, pf: PartialFunction[Tree, T]): List[T] = + tree filter (pf isDefinedAt _) map (x => pf(x)) object Types { import definitions._ @@ -36,24 +36,24 @@ trait MatchSupport extends ast.TreeDSL { self: ParallelMatching => // These tests for final classes can inspect the typeSymbol private def is(s: Symbol) = tpe.typeSymbol eq s - // def isByte = is(ByteClass) - // def isShort = is(ShortClass) + def isByte = is(ByteClass) + def isShort = is(ShortClass) def isInt = is(IntClass) - // def isChar = is(CharClass) - // def isBoolean = is(BooleanClass) + def isChar = is(CharClass) + def isBoolean = is(BooleanClass) def isNothing = is(NothingClass) - // def isArray = is(ArrayClass) + def isArray = is(ArrayClass) } } object Debug { - // def typeToString(t: Type): String = t match { - // case NoType => "x" - // case x => x.toString - // } - // def symbolToString(s: Symbol): String = s match { - // case x => x.toString - // } + def typeToString(t: Type): String = t match { + case NoType => "x" + case x => x.toString + } + def symbolToString(s: Symbol): String = s match { + case x => x.toString + } def treeToString(t: Tree): String = treeInfo.unbind(t) match { case EmptyTree => "?" case WILD() => "_" @@ -66,10 +66,10 @@ trait MatchSupport extends ast.TreeDSL { self: ParallelMatching => // Formatting for some error messages private val NPAD = 15 def pad(s: String): String = "%%%ds" format (NPAD-1) format s - // def pad(s: Any): String = pad(s match { - // case x: Tree => treeToString(x) - // case x => x.toString - // }) + def pad(s: Any): String = pad(s match { + case x: Tree => treeToString(x) + case x => x.toString + }) // pretty print for debugging def pp(x: Any): String = pp(x, false) @@ -117,7 +117,7 @@ trait MatchSupport extends ast.TreeDSL { self: ParallelMatching => else x } - // def indent(s: Any) = s.toString() split "\n" map (" " + _) mkString "\n" + def indent(s: Any) = s.toString() split "\n" map (" " + _) mkString "\n" def indentAll(s: Seq[Any]) = s map (" " + _.toString() + "\n") mkString } diff --git a/src/compiler/scala/tools/nsc/matching/Matrix.scala b/src/compiler/scala/tools/nsc/matching/Matrix.scala index 44387b59fb..d2f5a98411 100644 --- a/src/compiler/scala/tools/nsc/matching/Matrix.scala +++ b/src/compiler/scala/tools/nsc/matching/Matrix.scala @@ -140,7 +140,7 @@ trait Matrix extends MatrixAdditions { cases: List[CaseDef], default: Tree ) { - // def tvars = roots map (_.lhs) + def tvars = roots map (_.lhs) def valDefs = roots map (_.valDef) override def toString() = "MatrixInit(roots = %s, %d cases)".format(pp(roots), cases.size) } @@ -153,25 +153,25 @@ trait Matrix extends MatrixAdditions { def apply(xs: List[PatternVar]) = new PatternVarGroup(xs) // XXX - transitional - // def fromBindings(vlist: List[Binding], freeVars: List[Symbol] = Nil) = { - // def vmap(v: Symbol): Option[Binding] = vlist find (_.pvar eq v) - // val info = - // if (freeVars.isEmpty) vlist - // else (freeVars map vmap).flatten - - // val xs = - // for (Binding(lhs, rhs) <- info) yield - // new PatternVar(lhs, Ident(rhs) setType lhs.tpe, !(rhs hasFlag NO_EXHAUSTIVE)) - - // new PatternVarGroup(xs) - // } + def fromBindings(vlist: List[Binding], freeVars: List[Symbol] = Nil) = { + def vmap(v: Symbol): Option[Binding] = vlist find (_.pvar eq v) + val info = + if (freeVars.isEmpty) vlist + else (freeVars map vmap).flatten + + val xs = + for (Binding(lhs, rhs) <- info) yield + new PatternVar(lhs, Ident(rhs) setType lhs.tpe, !(rhs hasFlag NO_EXHAUSTIVE)) + + new PatternVarGroup(xs) + } } val emptyPatternVarGroup = PatternVarGroup() class PatternVarGroup(val pvs: List[PatternVar]) { def syms = pvs map (_.sym) def valDefs = pvs map (_.valDef) - // def idents = pvs map (_.ident) + def idents = pvs map (_.ident) def extractIndex(index: Int): (PatternVar, PatternVarGroup) = { val (t, ts) = self.extractIndex(pvs, index) @@ -180,16 +180,16 @@ trait Matrix extends MatrixAdditions { def isEmpty = pvs.isEmpty def size = pvs.size - // def head = pvs.head - // def ::(t: PatternVar) = PatternVarGroup(t :: pvs) + def head = pvs.head + def ::(t: PatternVar) = PatternVarGroup(t :: pvs) def :::(ts: List[PatternVar]) = PatternVarGroup(ts ::: pvs) - // def ++(other: PatternVarGroup) = PatternVarGroup(pvs ::: other.pvs) + def ++(other: PatternVarGroup) = PatternVarGroup(pvs ::: other.pvs) def apply(i: Int) = pvs(i) def zipWithIndex = pvs.zipWithIndex def indices = pvs.indices - // def map[T](f: PatternVar => T) = pvs map f - // def filter(p: PatternVar => Boolean) = PatternVarGroup(pvs filter p) + def map[T](f: PatternVar => T) = pvs map f + def filter(p: PatternVar => Boolean) = PatternVarGroup(pvs filter p) override def toString() = pp(pvs) } @@ -237,12 +237,12 @@ trait Matrix extends MatrixAdditions { tracing("create")(new PatternVar(lhs, rhs, checked)) } - // def createLazy(tpe: Type, f: Symbol => Tree, checked: Boolean) = { - // val lhs = newVar(owner.pos, tpe, Flags.LAZY :: flags(checked)) - // val rhs = f(lhs) + def createLazy(tpe: Type, f: Symbol => Tree, checked: Boolean) = { + val lhs = newVar(owner.pos, tpe, Flags.LAZY :: flags(checked)) + val rhs = f(lhs) - // tracing("createLazy")(new PatternVar(lhs, rhs, checked)) - // } + tracing("createLazy")(new PatternVar(lhs, rhs, checked)) + } private def newVar( pos: Position, diff --git a/src/compiler/scala/tools/nsc/matching/ParallelMatching.scala b/src/compiler/scala/tools/nsc/matching/ParallelMatching.scala index b5e25f3809..ea4d9cd3f4 100644 --- a/src/compiler/scala/tools/nsc/matching/ParallelMatching.scala +++ b/src/compiler/scala/tools/nsc/matching/ParallelMatching.scala @@ -126,7 +126,7 @@ trait ParallelMatching extends ast.TreeDSL // for propagating "unchecked" to synthetic vars def isChecked = !(sym hasFlag NO_EXHAUSTIVE) - // def flags: List[Long] = List(NO_EXHAUSTIVE) filter (sym hasFlag _) + def flags: List[Long] = List(NO_EXHAUSTIVE) filter (sym hasFlag _) // this is probably where this actually belongs def createVar(tpe: Type, f: Symbol => Tree) = context.createVar(tpe, f, isChecked) @@ -170,7 +170,7 @@ trait ParallelMatching extends ast.TreeDSL case class PatternMatch(scrut: Scrutinee, ps: List[Pattern]) { def head = ps.head def tail = ps.tail - // def size = ps.length + def size = ps.length def headType = head.necessaryType private val dummyCount = if (head.isCaseClass) headType.typeSymbol.caseFieldAccessors.length else 0 @@ -576,7 +576,7 @@ trait ParallelMatching extends ast.TreeDSL (_ys.toList, _ns.toList) } - // val moreSpecific = yeses map (_.moreSpecific) + val moreSpecific = yeses map (_.moreSpecific) val subsumed = yeses map (x => (x.bx, x.subsumed)) val remaining = noes map (x => (x.bx, x.remaining)) diff --git a/src/compiler/scala/tools/nsc/matching/PatternBindings.scala b/src/compiler/scala/tools/nsc/matching/PatternBindings.scala index 1aad24c2d6..3ff5ce83bb 100644 --- a/src/compiler/scala/tools/nsc/matching/PatternBindings.scala +++ b/src/compiler/scala/tools/nsc/matching/PatternBindings.scala @@ -61,7 +61,7 @@ trait PatternBindings extends ast.TreeDSL // This is for traversing the pattern tree - pattern types which might have // bound variables beneath them return a list of said patterns for flatMapping. - // def subpatternsForVars: List[Pattern] = Nil + def subpatternsForVars: List[Pattern] = Nil // The outermost Bind(x1, Bind(x2, ...)) surrounding the tree. private var _boundTree: Tree = tree diff --git a/src/compiler/scala/tools/nsc/matching/Patterns.scala b/src/compiler/scala/tools/nsc/matching/Patterns.scala index 9cb91afb5b..e92c43f1fd 100644 --- a/src/compiler/scala/tools/nsc/matching/Patterns.scala +++ b/src/compiler/scala/tools/nsc/matching/Patterns.scala @@ -33,7 +33,7 @@ trait Patterns extends ast.TreeDSL { def NoPattern = WildcardPattern() // The constant null pattern - // def NullPattern = LiteralPattern(NULL) + def NullPattern = LiteralPattern(NULL) // The Nil pattern def NilPattern = Pattern(gen.mkNil) @@ -60,7 +60,7 @@ trait Patterns extends ast.TreeDSL { override def covers(sym: Symbol) = newMatchesPattern(sym, tpt.tpe) override def sufficientType = tpt.tpe - // override def subpatternsForVars: List[Pattern] = List(Pattern(expr)) + override def subpatternsForVars: List[Pattern] = List(Pattern(expr)) override def simplify(pv: PatternVar) = Pattern(expr) match { case ExtractorPattern(ua) if pv.sym.tpe <:< tpt.tpe => this rebindTo expr case _ => this @@ -140,10 +140,10 @@ trait Patterns extends ast.TreeDSL { require(fn.isType && this.isCaseClass, "tree: " + tree + " fn: " + fn) def name = tpe.typeSymbol.name def cleanName = tpe.typeSymbol.decodedName - // def hasPrefix = tpe.prefix.prefixString != "" - // def prefixedName = - // if (hasPrefix) "%s.%s".format(tpe.prefix.prefixString, cleanName) - // else cleanName + def hasPrefix = tpe.prefix.prefixString != "" + def prefixedName = + if (hasPrefix) "%s.%s".format(tpe.prefix.prefixString, cleanName) + else cleanName private def isColonColon = cleanName == "::" @@ -222,15 +222,15 @@ trait Patterns extends ast.TreeDSL { // 8.1.8 (b) (literal ArrayValues) case class SequencePattern(tree: ArrayValue) extends Pattern with SequenceLikePattern { - lazy val ArrayValue(_, elems) = tree + lazy val ArrayValue(elemtpt, elems) = tree - // override def subpatternsForVars: List[Pattern] = elemPatterns + override def subpatternsForVars: List[Pattern] = elemPatterns override def description = "Seq(%s)".format(elemPatterns mkString ", ") } // 8.1.8 (c) case class StarPattern(tree: Star) extends Pattern { - // lazy val Star(_) = tree + lazy val Star(elem) = tree override def description = "_*" } // XXX temporary? @@ -389,10 +389,10 @@ trait Patterns extends ast.TreeDSL { // fn.tpe.finalResultType.typeSymbol == SomeClass override def necessaryType = arg.tpe - // override def subpatternsForVars = args match { - // case List(ArrayValue(elemtpe, elems)) => toPats(elems) - // case _ => toPats(args) - // } + override def subpatternsForVars = args match { + case List(ArrayValue(elemtpe, elems)) => toPats(elems) + case _ => toPats(args) + } def resTypes = analyzer.unapplyTypeList(unfn.symbol, unfn.tpe, args.length) def resTypesString = resTypes match { @@ -403,13 +403,13 @@ trait Patterns extends ast.TreeDSL { sealed trait ApplyPattern extends Pattern { lazy val Apply(fn, args) = tree - // override def subpatternsForVars: List[Pattern] = toPats(args) + override def subpatternsForVars: List[Pattern] = toPats(args) - // override def dummies = - // if (!this.isCaseClass) Nil - // else emptyPatterns(sufficientType.typeSymbol.caseFieldAccessors.size) + override def dummies = + if (!this.isCaseClass) Nil + else emptyPatterns(sufficientType.typeSymbol.caseFieldAccessors.size) - // def isConstructorPattern = fn.isType + def isConstructorPattern = fn.isType override def covers(sym: Symbol) = newMatchesPattern(sym, fn.tpe) } @@ -420,7 +420,7 @@ trait Patterns extends ast.TreeDSL { def simplify(pv: PatternVar): Pattern = this // the right number of dummies for this pattern - // def dummies: List[Pattern] = Nil + def dummies: List[Pattern] = Nil // Is this a default pattern (untyped "_" or an EmptyTree inserted by the matcher) def isDefault = false @@ -454,10 +454,10 @@ trait Patterns extends ast.TreeDSL { def hasStar = false - // def setType(tpe: Type): this.type = { - // tree setType tpe - // this - // } + def setType(tpe: Type): this.type = { + tree setType tpe + this + } def equalsCheck = tracing("equalsCheck")( @@ -475,7 +475,7 @@ trait Patterns extends ast.TreeDSL { final override def toString = description - // def toTypeString() = "%s <: x <: %s".format(necessaryType, sufficientType) + def toTypeString() = "%s <: x <: %s".format(necessaryType, sufficientType) def kindString = "" } diff --git a/src/compiler/scala/tools/nsc/settings/AbsSettings.scala b/src/compiler/scala/tools/nsc/settings/AbsSettings.scala index 7b77613e2a..e965370713 100644 --- a/src/compiler/scala/tools/nsc/settings/AbsSettings.scala +++ b/src/compiler/scala/tools/nsc/settings/AbsSettings.scala @@ -47,7 +47,7 @@ trait AbsSettings extends scala.reflect.internal.settings.AbsSettings { } }) - // implicit lazy val SettingOrdering: Ordering[Setting] = Ordering.ordered + implicit lazy val SettingOrdering: Ordering[Setting] = Ordering.ordered trait AbsSetting extends Ordered[Setting] with AbsSettingValue { def name: String @@ -84,12 +84,12 @@ trait AbsSettings extends scala.reflect.internal.settings.AbsSettings { } /** If the appearance of the setting should halt argument processing. */ - // private var isTerminatorSetting = false - // def shouldStopProcessing = isTerminatorSetting - // def stopProcessing(): this.type = { - // isTerminatorSetting = true - // this - // } + private var isTerminatorSetting = false + def shouldStopProcessing = isTerminatorSetting + def stopProcessing(): this.type = { + isTerminatorSetting = true + this + } /** Issue error and return */ def errorAndValue[T](msg: String, x: T): T = { errorFn(msg) ; x } @@ -111,7 +111,7 @@ trait AbsSettings extends scala.reflect.internal.settings.AbsSettings { /** Attempt to set from a properties file style property value. * Currently used by Eclipse SDT only. */ - def tryToSetFromPropertyValue(s: String): Unit = tryToSet(s :: Nil) // used in ide? + def tryToSetFromPropertyValue(s: String): Unit = tryToSet(s :: Nil) /** These categorizations are so the help output shows -X and -P among * the standard options and -Y among the advanced options. diff --git a/src/compiler/scala/tools/nsc/settings/AdvancedScalaSettings.scala b/src/compiler/scala/tools/nsc/settings/AdvancedScalaSettings.scala index 49b89392b9..0bec113743 100644 --- a/src/compiler/scala/tools/nsc/settings/AdvancedScalaSettings.scala +++ b/src/compiler/scala/tools/nsc/settings/AdvancedScalaSettings.scala @@ -1,77 +1,77 @@ -// /* NSC -- new Scala compiler -// * Copyright 2005-2013 LAMP/EPFL -// * @author Paul Phillips -// */ +/* NSC -- new Scala compiler + * Copyright 2005-2013 LAMP/EPFL + * @author Paul Phillips + */ -// package scala.tools.nsc -// package settings +package scala.tools.nsc +package settings -// trait AdvancedScalaSettings { -// self: AbsScalaSettings => +trait AdvancedScalaSettings { + self: AbsScalaSettings => -// abstract class X extends SettingGroup("-X") { -// val assemextdirs: StringSetting -// val assemname: StringSetting -// val assempath: StringSetting -// val checkinit: BooleanSetting -// val disableassertions: BooleanSetting -// val elidebelow: IntSetting -// val experimental: BooleanSetting -// val future: BooleanSetting -// val generatephasegraph: StringSetting -// val logimplicits: BooleanSetting -// val mainClass: StringSetting -// val migration: BooleanSetting -// val noforwarders: BooleanSetting -// val nojline: BooleanSetting -// val nouescape: BooleanSetting -// val plugin: MultiStringSetting -// val plugindisable: MultiStringSetting -// val pluginlist: BooleanSetting -// val pluginrequire: MultiStringSetting -// val pluginsdir: StringSetting -// val print: PhasesSetting -// val printicode: BooleanSetting -// val printpos: BooleanSetting -// val printtypes: BooleanSetting -// val prompt: BooleanSetting -// val resident: BooleanSetting -// val script: StringSetting -// val showclass: StringSetting -// val showobject: StringSetting -// val showphases: BooleanSetting -// val sourcedir: StringSetting -// val sourcereader: StringSetting -// } -// // def Xexperimental = X.experimental -// // def Xmigration28 = X.migration -// // def Xnojline = X.nojline -// // def Xprint = X.print -// // def Xprintpos = X.printpos -// // def Xshowcls = X.showclass -// // def Xshowobj = X.showobject -// // def assemextdirs = X.assemextdirs -// // def assemname = X.assemname -// // def assemrefs = X.assempath -// // def checkInit = X.checkinit -// // def disable = X.plugindisable -// // def elideLevel = X.elidelevel -// // def future = X.future -// // def genPhaseGraph = X.generatephasegraph -// // def logimplicits = X.logimplicits -// // def noForwarders = X.noforwarders -// // def noassertions = X.disableassertions -// // def nouescape = X.nouescape -// // def plugin = X.plugin -// // def pluginsDir = X.pluginsdir -// // def printtypes = X.printtypes -// // def prompt = X.prompt -// // def require = X.require -// // def resident = X.resident -// // def script = X.script -// // def showPhases = X.showphases -// // def showPlugins = X.pluginlist -// // def sourceReader = X.sourcereader -// // def sourcedir = X.sourcedir -// // def writeICode = X.printicode -// } + abstract class X extends SettingGroup("-X") { + val assemextdirs: StringSetting + val assemname: StringSetting + val assempath: StringSetting + val checkinit: BooleanSetting + val disableassertions: BooleanSetting + val elidebelow: IntSetting + val experimental: BooleanSetting + val future: BooleanSetting + val generatephasegraph: StringSetting + val logimplicits: BooleanSetting + val mainClass: StringSetting + val migration: BooleanSetting + val noforwarders: BooleanSetting + val nojline: BooleanSetting + val nouescape: BooleanSetting + val plugin: MultiStringSetting + val plugindisable: MultiStringSetting + val pluginlist: BooleanSetting + val pluginrequire: MultiStringSetting + val pluginsdir: StringSetting + val print: PhasesSetting + val printicode: BooleanSetting + val printpos: BooleanSetting + val printtypes: BooleanSetting + val prompt: BooleanSetting + val resident: BooleanSetting + val script: StringSetting + val showclass: StringSetting + val showobject: StringSetting + val showphases: BooleanSetting + val sourcedir: StringSetting + val sourcereader: StringSetting + } + // def Xexperimental = X.experimental + // def Xmigration28 = X.migration + // def Xnojline = X.nojline + // def Xprint = X.print + // def Xprintpos = X.printpos + // def Xshowcls = X.showclass + // def Xshowobj = X.showobject + // def assemextdirs = X.assemextdirs + // def assemname = X.assemname + // def assemrefs = X.assempath + // def checkInit = X.checkinit + // def disable = X.plugindisable + // def elideLevel = X.elidelevel + // def future = X.future + // def genPhaseGraph = X.generatephasegraph + // def logimplicits = X.logimplicits + // def noForwarders = X.noforwarders + // def noassertions = X.disableassertions + // def nouescape = X.nouescape + // def plugin = X.plugin + // def pluginsDir = X.pluginsdir + // def printtypes = X.printtypes + // def prompt = X.prompt + // def require = X.require + // def resident = X.resident + // def script = X.script + // def showPhases = X.showphases + // def showPlugins = X.pluginlist + // def sourceReader = X.sourcereader + // def sourcedir = X.sourcedir + // def writeICode = X.printicode +} \ No newline at end of file diff --git a/src/compiler/scala/tools/nsc/settings/MutableSettings.scala b/src/compiler/scala/tools/nsc/settings/MutableSettings.scala index 748c6069f0..4f4f0544da 100644 --- a/src/compiler/scala/tools/nsc/settings/MutableSettings.scala +++ b/src/compiler/scala/tools/nsc/settings/MutableSettings.scala @@ -176,7 +176,7 @@ class MutableSettings(val errorFn: String => Unit) * The class loader defining `T` should provide resources `app.class.path` * and `boot.class.path`. These resources should contain the application * and boot classpaths in the same form as would be passed on the command line.*/ - def embeddedDefaults[T: ClassTag]: Unit = // called from sbt and repl + def embeddedDefaults[T: ClassTag]: Unit = embeddedDefaults(classTag[T].runtimeClass.getClassLoader) /** Initializes these settings for embedded use by a class from the given class loader. @@ -239,7 +239,7 @@ class MutableSettings(val errorFn: String => Unit) /** Add a destination directory for sources found under srcdir. * Both directories should exits. */ - def add(srcDir: String, outDir: String): Unit = // used in ide? + def add(srcDir: String, outDir: String): Unit = add(checkDir(AbstractFile.getDirectory(srcDir), srcDir), checkDir(AbstractFile.getDirectory(outDir), outDir)) @@ -434,7 +434,7 @@ class MutableSettings(val errorFn: String => Unit) def tryToSet(args: List[String]) = { value = true ; Some(args) } def unparse: List[String] = if (value) List(name) else Nil - override def tryToSetFromPropertyValue(s : String) { // used from ide + override def tryToSetFromPropertyValue(s : String) { value = s.equalsIgnoreCase("true") } } @@ -527,7 +527,7 @@ class MutableSettings(val errorFn: String => Unit) Some(rest) } override def tryToSetColon(args: List[String]) = tryToSet(args) - override def tryToSetFromPropertyValue(s: String) = tryToSet(s.trim.split(',').toList) // used from ide + override def tryToSetFromPropertyValue(s: String) = tryToSet(s.trim.split(',').toList) def unparse: List[String] = value map (name + ":" + _) withHelpSyntax(name + ":<" + arg + ">") @@ -561,7 +561,7 @@ class MutableSettings(val errorFn: String => Unit) } def unparse: List[String] = if (value == default) Nil else List(name + ":" + value) - override def tryToSetFromPropertyValue(s: String) = tryToSetColon(s::Nil) // used from ide + override def tryToSetFromPropertyValue(s: String) = tryToSetColon(s::Nil) withHelpSyntax(name + ":<" + helpArg + ">") } diff --git a/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala b/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala index 5074efbd01..8dce48ee9a 100644 --- a/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala +++ b/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala @@ -52,14 +52,14 @@ trait ScalaSettings extends AbsScalaSettings val jvmargs = PrefixSetting("-J", "-J", "Pass directly to the runtime system.") val defines = PrefixSetting("-Dproperty=value", "-D", "Pass -Dproperty=value directly to the runtime system.") - /*val toolcp =*/ PathSetting("-toolcp", "Add to the runner classpath.", "") + val toolcp = PathSetting("-toolcp", "Add to the runner classpath.", "") val nobootcp = BooleanSetting("-nobootcp", "Do not use the boot classpath for the scala jars.") /** * Standard settings */ // argfiles is only for the help message - /*val argfiles = */ BooleanSetting ("@", "A text file containing compiler arguments (options and source files)") + val argfiles = BooleanSetting ("@", "A text file containing compiler arguments (options and source files)") val classpath = PathSetting ("-classpath", "Specify where to find user class files.", defaultClasspath) withAbbreviation "-cp" val d = OutputSetting (outputDirs, ".") val nospecialization = BooleanSetting ("-no-specialization", "Ignore @specialize annotations.") @@ -114,7 +114,7 @@ trait ScalaSettings extends AbsScalaSettings /** Compatibility stubs for options whose value name did * not previously match the option name. */ - // def XO = optimise + def XO = optimise def debuginfo = g def dependenciesFile = dependencyfile def nowarnings = nowarn @@ -180,12 +180,12 @@ trait ScalaSettings extends AbsScalaSettings val exposeEmptyPackage = BooleanSetting("-Yexpose-empty-package", "Internal only: expose the empty package.").internalOnly() - // def stop = stopAfter + def stop = stopAfter /** Area-specific debug output. */ val Ybuildmanagerdebug = BooleanSetting("-Ybuild-manager-debug", "Generate debug information for the Refined Build Manager compiler.") - // val Ycompletion = BooleanSetting("-Ycompletion-debug", "Trace all tab completion activity.") + val Ycompletion = BooleanSetting("-Ycompletion-debug", "Trace all tab completion activity.") val Ydocdebug = BooleanSetting("-Ydoc-debug", "Trace all scaladoc activity.") val Yidedebug = BooleanSetting("-Yide-debug", "Generate, validate and output trees using the interactive compiler.") val Yinferdebug = BooleanSetting("-Yinfer-debug", "Trace type inference and implicit search.") diff --git a/src/compiler/scala/tools/nsc/settings/StandardScalaSettings.scala b/src/compiler/scala/tools/nsc/settings/StandardScalaSettings.scala index 53d3557c67..e866ad6ae0 100644 --- a/src/compiler/scala/tools/nsc/settings/StandardScalaSettings.scala +++ b/src/compiler/scala/tools/nsc/settings/StandardScalaSettings.scala @@ -52,5 +52,5 @@ trait StandardScalaSettings { /** These are @ and -Dkey=val style settings, which don't * nicely map to identifiers. */ - // val argfiles: BooleanSetting // exists only to echo help message, should be done differently + val argfiles: BooleanSetting // exists only to echo help message, should be done differently } diff --git a/src/compiler/scala/tools/nsc/settings/Warnings.scala b/src/compiler/scala/tools/nsc/settings/Warnings.scala index d678fc60a8..d6d77278ab 100644 --- a/src/compiler/scala/tools/nsc/settings/Warnings.scala +++ b/src/compiler/scala/tools/nsc/settings/Warnings.scala @@ -39,13 +39,9 @@ trait Warnings { BooleanSetting("-Xlint", "Enable recommended additional warnings.") withPostSetHook (_ => lintWarnings foreach (_.value = true)) ) - - /*val warnEverything = */ ( + val warnEverything = ( BooleanSetting("-Ywarn-all", "Enable all -Y warnings.") - withPostSetHook { _ => - lint.value = true - allWarnings foreach (_.value = true) - } + withPostSetHook (_ => lintWarnings foreach (_.value = true)) ) // Individual warnings. @@ -61,7 +57,7 @@ trait Warnings { val warnInferAny = BooleanSetting ("-Ywarn-infer-any", "Warn when a type argument is inferred to be `Any`.") // Backward compatibility. - def Xwarnfatal = fatalWarnings // used by sbt - // def Xchecknull = warnSelectNullable - // def Ywarndeadcode = warnDeadCode + def Xwarnfatal = fatalWarnings + def Xchecknull = warnSelectNullable + def Ywarndeadcode = warnDeadCode } diff --git a/src/compiler/scala/tools/nsc/symtab/classfile/AbstractFileReader.scala b/src/compiler/scala/tools/nsc/symtab/classfile/AbstractFileReader.scala index 2d44d1e5f1..427b5bf887 100644 --- a/src/compiler/scala/tools/nsc/symtab/classfile/AbstractFileReader.scala +++ b/src/compiler/scala/tools/nsc/symtab/classfile/AbstractFileReader.scala @@ -31,8 +31,8 @@ class AbstractFileReader(val file: AbstractFile) { /** return byte at offset 'pos' */ - // @throws(classOf[IndexOutOfBoundsException]) - // def byteAt(pos: Int): Byte = buf(pos) + @throws(classOf[IndexOutOfBoundsException]) + def byteAt(pos: Int): Byte = buf(pos) /** read a byte */ @@ -45,10 +45,10 @@ class AbstractFileReader(val file: AbstractFile) { /** read some bytes */ - // def nextBytes(len: Int): Array[Byte] = { - // bp += len - // buf.slice(bp - len, bp) - // } + def nextBytes(len: Int): Array[Byte] = { + bp += len + buf.slice(bp - len, bp) + } /** read a character */ diff --git a/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala b/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala index 50a455b33f..67f6c3ec5d 100644 --- a/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala +++ b/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala @@ -1172,16 +1172,16 @@ abstract class ClassfileParser { /** Return the Symbol of the top level class enclosing `name`, * or 'name's symbol if no entry found for `name`. */ - // def topLevelClass(name: Name): Symbol = { - // val tlName = if (isDefinedAt(name)) { - // var entry = this(name) - // while (isDefinedAt(entry.outerName)) - // entry = this(entry.outerName) - // entry.outerName - // } else - // name - // classNameToSymbol(tlName) - // } + def topLevelClass(name: Name): Symbol = { + val tlName = if (isDefinedAt(name)) { + var entry = this(name) + while (isDefinedAt(entry.outerName)) + entry = this(entry.outerName) + entry.outerName + } else + name + classNameToSymbol(tlName) + } /** Return the class symbol for `externalName`. It looks it up in its outer class. * Forces all outer class symbols to be completed. diff --git a/src/compiler/scala/tools/nsc/symtab/classfile/ICodeReader.scala b/src/compiler/scala/tools/nsc/symtab/classfile/ICodeReader.scala index b5459ec773..b7511377cc 100644 --- a/src/compiler/scala/tools/nsc/symtab/classfile/ICodeReader.scala +++ b/src/compiler/scala/tools/nsc/symtab/classfile/ICodeReader.scala @@ -632,9 +632,9 @@ abstract class ICodeReader extends ClassfileParser { else instanceCode class LinearCode { - val instrs: ListBuffer[(Int, Instruction)] = new ListBuffer - val jmpTargets: mutable.Set[Int] = perRunCaches.newSet[Int]() - val locals: mutable.Map[Int, List[(Local, TypeKind)]] = perRunCaches.newMap() + var instrs: ListBuffer[(Int, Instruction)] = new ListBuffer + var jmpTargets: mutable.Set[Int] = perRunCaches.newSet[Int]() + var locals: mutable.Map[Int, List[(Local, TypeKind)]] = perRunCaches.newMap() var containsDUPX = false var containsNEW = false diff --git a/src/compiler/scala/tools/nsc/symtab/classfile/Pickler.scala b/src/compiler/scala/tools/nsc/symtab/classfile/Pickler.scala index 324d62b662..941604b154 100644 --- a/src/compiler/scala/tools/nsc/symtab/classfile/Pickler.scala +++ b/src/compiler/scala/tools/nsc/symtab/classfile/Pickler.scala @@ -1002,110 +1002,110 @@ abstract class Pickler extends SubComponent { } /** Print entry for diagnostics */ - // def printEntryAtIndex(idx: Int) = printEntry(entries(idx)) - // def printEntry(entry: AnyRef) { - // def printRef(ref: AnyRef) { - // print(index(ref)+ - // (if (ref.isInstanceOf[Name]) "("+ref+") " else " ")) - // } - // def printRefs(refs: List[AnyRef]) { refs foreach printRef } - // def printSymInfo(sym: Symbol) { - // printRef(sym.name) - // printRef(localizedOwner(sym)) - // print(flagsToString(sym.flags & PickledFlags)+" ") - // if (sym.hasAccessBoundary) printRef(sym.privateWithin) - // printRef(sym.info) - // } - // def printBody(entry: AnyRef) = entry match { - // case name: Name => - // print((if (name.isTermName) "TERMname " else "TYPEname ")+name) - // case NoSymbol => - // print("NONEsym") - // case sym: Symbol if !isLocal(sym) => - // if (sym.isModuleClass) { - // print("EXTMODCLASSref "); printRef(sym.name.toTermName) - // } else { - // print("EXTref "); printRef(sym.name) - // } - // if (!sym.owner.isRoot) printRef(sym.owner) - // case sym: ClassSymbol => - // print("CLASSsym ") - // printSymInfo(sym) - // if (sym.thisSym.tpe != sym.tpe) printRef(sym.typeOfThis) - // case sym: TypeSymbol => - // print(if (sym.isAbstractType) "TYPEsym " else "ALIASsym ") - // printSymInfo(sym) - // case sym: TermSymbol => - // print(if (sym.isModule) "MODULEsym " else "VALsym ") - // printSymInfo(sym) - // if (sym.alias != NoSymbol) printRef(sym.alias) - // case NoType => - // print("NOtpe") - // case NoPrefix => - // print("NOPREFIXtpe") - // case ThisType(sym) => - // print("THIStpe "); printRef(sym) - // case SingleType(pre, sym) => - // print("SINGLEtpe "); printRef(pre); printRef(sym); - // case ConstantType(value) => - // print("CONSTANTtpe "); printRef(value); - // case TypeRef(pre, sym, args) => - // print("TYPEREFtpe "); printRef(pre); printRef(sym); printRefs(args); - // case TypeBounds(lo, hi) => - // print("TYPEBOUNDStpe "); printRef(lo); printRef(hi); - // case tp @ RefinedType(parents, decls) => - // print("REFINEDtpe "); printRef(tp.typeSymbol); printRefs(parents); - // case ClassInfoType(parents, decls, clazz) => - // print("CLASSINFOtpe "); printRef(clazz); printRefs(parents); - // case mt @ MethodType(formals, restpe) => - // print("METHODtpe"); printRef(restpe); printRefs(formals) - // case PolyType(tparams, restpe) => - // print("POLYtpe "); printRef(restpe); printRefs(tparams); - // case ExistentialType(tparams, restpe) => - // print("EXISTENTIALtpe "); printRef(restpe); printRefs(tparams); - // print("||| "+entry) - // case c @ Constant(_) => - // print("LITERAL ") - // if (c.tag == BooleanTag) print("Boolean "+(if (c.booleanValue) 1 else 0)) - // else if (c.tag == ByteTag) print("Byte "+c.longValue) - // else if (c.tag == ShortTag) print("Short "+c.longValue) - // else if (c.tag == CharTag) print("Char "+c.longValue) - // else if (c.tag == IntTag) print("Int "+c.longValue) - // else if (c.tag == LongTag) print("Long "+c.longValue) - // else if (c.tag == FloatTag) print("Float "+c.floatValue) - // else if (c.tag == DoubleTag) print("Double "+c.doubleValue) - // else if (c.tag == StringTag) { print("String "); printRef(newTermName(c.stringValue)) } - // else if (c.tag == ClazzTag) { print("Class "); printRef(c.typeValue) } - // else if (c.tag == EnumTag) { print("Enum "); printRef(c.symbolValue) } - // case AnnotatedType(annots, tp, selfsym) => - // if (settings.selfInAnnots.value) { - // print("ANNOTATEDWSELFtpe ") - // printRef(tp) - // printRef(selfsym) - // printRefs(annots) - // } else { - // print("ANNOTATEDtpe ") - // printRef(tp) - // printRefs(annots) - // } - // case (target: Symbol, AnnotationInfo(atp, args, Nil)) => - // print("SYMANNOT ") - // printRef(target) - // printRef(atp) - // for (c <- args) printRef(c) - // case (target: Symbol, children: List[_]) => - // print("CHILDREN ") - // printRef(target) - // for (c <- children) printRef(c.asInstanceOf[Symbol]) - // case AnnotationInfo(atp, args, Nil) => - // print("ANNOTINFO") - // printRef(atp) - // for (c <- args) printRef(c) - // case _ => - // throw new FatalError("bad entry: " + entry + " " + entry.getClass) - // } - // printBody(entry); println() - // } + def printEntryAtIndex(idx: Int) = printEntry(entries(idx)) + def printEntry(entry: AnyRef) { + def printRef(ref: AnyRef) { + print(index(ref)+ + (if (ref.isInstanceOf[Name]) "("+ref+") " else " ")) + } + def printRefs(refs: List[AnyRef]) { refs foreach printRef } + def printSymInfo(sym: Symbol) { + printRef(sym.name) + printRef(localizedOwner(sym)) + print(flagsToString(sym.flags & PickledFlags)+" ") + if (sym.hasAccessBoundary) printRef(sym.privateWithin) + printRef(sym.info) + } + def printBody(entry: AnyRef) = entry match { + case name: Name => + print((if (name.isTermName) "TERMname " else "TYPEname ")+name) + case NoSymbol => + print("NONEsym") + case sym: Symbol if !isLocal(sym) => + if (sym.isModuleClass) { + print("EXTMODCLASSref "); printRef(sym.name.toTermName) + } else { + print("EXTref "); printRef(sym.name) + } + if (!sym.owner.isRoot) printRef(sym.owner) + case sym: ClassSymbol => + print("CLASSsym ") + printSymInfo(sym) + if (sym.thisSym.tpe != sym.tpe) printRef(sym.typeOfThis) + case sym: TypeSymbol => + print(if (sym.isAbstractType) "TYPEsym " else "ALIASsym ") + printSymInfo(sym) + case sym: TermSymbol => + print(if (sym.isModule) "MODULEsym " else "VALsym ") + printSymInfo(sym) + if (sym.alias != NoSymbol) printRef(sym.alias) + case NoType => + print("NOtpe") + case NoPrefix => + print("NOPREFIXtpe") + case ThisType(sym) => + print("THIStpe "); printRef(sym) + case SingleType(pre, sym) => + print("SINGLEtpe "); printRef(pre); printRef(sym); + case ConstantType(value) => + print("CONSTANTtpe "); printRef(value); + case TypeRef(pre, sym, args) => + print("TYPEREFtpe "); printRef(pre); printRef(sym); printRefs(args); + case TypeBounds(lo, hi) => + print("TYPEBOUNDStpe "); printRef(lo); printRef(hi); + case tp @ RefinedType(parents, decls) => + print("REFINEDtpe "); printRef(tp.typeSymbol); printRefs(parents); + case ClassInfoType(parents, decls, clazz) => + print("CLASSINFOtpe "); printRef(clazz); printRefs(parents); + case mt @ MethodType(formals, restpe) => + print("METHODtpe"); printRef(restpe); printRefs(formals) + case PolyType(tparams, restpe) => + print("POLYtpe "); printRef(restpe); printRefs(tparams); + case ExistentialType(tparams, restpe) => + print("EXISTENTIALtpe "); printRef(restpe); printRefs(tparams); + print("||| "+entry) + case c @ Constant(_) => + print("LITERAL ") + if (c.tag == BooleanTag) print("Boolean "+(if (c.booleanValue) 1 else 0)) + else if (c.tag == ByteTag) print("Byte "+c.longValue) + else if (c.tag == ShortTag) print("Short "+c.longValue) + else if (c.tag == CharTag) print("Char "+c.longValue) + else if (c.tag == IntTag) print("Int "+c.longValue) + else if (c.tag == LongTag) print("Long "+c.longValue) + else if (c.tag == FloatTag) print("Float "+c.floatValue) + else if (c.tag == DoubleTag) print("Double "+c.doubleValue) + else if (c.tag == StringTag) { print("String "); printRef(newTermName(c.stringValue)) } + else if (c.tag == ClazzTag) { print("Class "); printRef(c.typeValue) } + else if (c.tag == EnumTag) { print("Enum "); printRef(c.symbolValue) } + case AnnotatedType(annots, tp, selfsym) => + if (settings.selfInAnnots.value) { + print("ANNOTATEDWSELFtpe ") + printRef(tp) + printRef(selfsym) + printRefs(annots) + } else { + print("ANNOTATEDtpe ") + printRef(tp) + printRefs(annots) + } + case (target: Symbol, AnnotationInfo(atp, args, Nil)) => + print("SYMANNOT ") + printRef(target) + printRef(atp) + for (c <- args) printRef(c) + case (target: Symbol, children: List[_]) => + print("CHILDREN ") + printRef(target) + for (c <- children) printRef(c.asInstanceOf[Symbol]) + case AnnotationInfo(atp, args, Nil) => + print("ANNOTINFO") + printRef(atp) + for (c <- args) printRef(c) + case _ => + throw new FatalError("bad entry: " + entry + " " + entry.getClass) + } + printBody(entry); println() + } /** Write byte array */ def writeArray() { diff --git a/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala b/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala index 0fe72c992e..78fb725041 100644 --- a/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala +++ b/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala @@ -187,9 +187,9 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers { /** Returns the generic class that was specialized to 'sClass', or * 'sClass' itself if sClass is not a specialized subclass. */ - // def genericClass(sClass: Symbol): Symbol = - // if (sClass.isSpecialized) sClass.superClass - // else sClass + def genericClass(sClass: Symbol): Symbol = + if (sClass.isSpecialized) sClass.superClass + else sClass case class Overload(sym: Symbol, env: TypeEnv) { override def toString = "specialized overload " + sym + " in " + env @@ -223,7 +223,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers { */ def degenerate = false - // def isAccessor = false + def isAccessor = false } /** Symbol is a special overloaded method of 'original', in the environment env. */ @@ -248,7 +248,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers { /** Symbol is a specialized accessor for the `target` field. */ case class SpecializedAccessor(target: Symbol) extends SpecializedInfo { - // override def isAccessor = true + override def isAccessor = true } /** Symbol is a specialized method whose body should be the target's method body. */ @@ -288,8 +288,8 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers { def specializedParams(sym: Symbol): List[Symbol] = sym.info.typeParams filter (_.isSpecialized) - // def splitParams(tps: List[Symbol]) = - // tps partition (_.isSpecialized) + def splitParams(tps: List[Symbol]) = + tps partition (_.isSpecialized) /** Given an original class symbol and a list of types its type parameters are instantiated at * returns a list of type parameters that should remain in the TypeRef when instantiating a @@ -1185,7 +1185,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers { * * A conflicting type environment could still be satisfiable. */ - // def conflicting(env: TypeEnv) = !nonConflicting(env) + def conflicting(env: TypeEnv) = !nonConflicting(env) def nonConflicting(env: TypeEnv) = env forall { case (tvar, tpe) => (subst(env, tvar.info.bounds.lo) <:< tpe) && (tpe <:< subst(env, tvar.info.bounds.hi)) } @@ -1866,10 +1866,10 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers { } } - // def printSpecStats() { - // println(" concreteSpecMembers: %7d".format(concreteSpecMethods.size)) - // println(" overloads: %7d".format(overloads.size)) - // println(" typeEnv: %7d".format(typeEnv.size)) - // println(" info: %7d".format(info.size)) - // } + def printSpecStats() { + println(" concreteSpecMembers: %7d".format(concreteSpecMethods.size)) + println(" overloads: %7d".format(overloads.size)) + println(" typeEnv: %7d".format(typeEnv.size)) + println(" info: %7d".format(info.size)) + } } diff --git a/src/compiler/scala/tools/nsc/transform/TailCalls.scala b/src/compiler/scala/tools/nsc/transform/TailCalls.scala index 798e604be4..2e0cc3bd98 100644 --- a/src/compiler/scala/tools/nsc/transform/TailCalls.scala +++ b/src/compiler/scala/tools/nsc/transform/TailCalls.scala @@ -147,7 +147,7 @@ abstract class TailCalls extends Transform { } def enclosingType = method.enclClass.typeOfThis - // def methodTypeParams = method.tpe.typeParams + def methodTypeParams = method.tpe.typeParams def isEligible = method.isEffectivelyFinal // @tailrec annotation indicates mandatory transformation def isMandatory = method.hasAnnotation(TailrecClass) && !forMSIL diff --git a/src/compiler/scala/tools/nsc/transform/TypingTransformers.scala b/src/compiler/scala/tools/nsc/transform/TypingTransformers.scala index f0414b8639..b7da0e0087 100644 --- a/src/compiler/scala/tools/nsc/transform/TypingTransformers.scala +++ b/src/compiler/scala/tools/nsc/transform/TypingTransformers.scala @@ -21,7 +21,7 @@ trait TypingTransformers { else analyzer.newTyper(analyzer.rootContext(unit, EmptyTree, true)) protected var curTree: Tree = _ - // protected def typedPos(pos: Position)(tree: Tree) = localTyper typed { atPos(pos)(tree) } + protected def typedPos(pos: Position)(tree: Tree) = localTyper typed { atPos(pos)(tree) } override final def atOwner[A](owner: Symbol)(trans: => A): A = atOwner(curTree, owner)(trans) diff --git a/src/compiler/scala/tools/nsc/typechecker/Contexts.scala b/src/compiler/scala/tools/nsc/typechecker/Contexts.scala index 9ea1ff4263..a8d7de6362 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Contexts.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Contexts.scala @@ -219,7 +219,7 @@ trait Contexts { self: Analyzer => current } - // def logError(err: AbsTypeError) = buffer += err + def logError(err: AbsTypeError) = buffer += err def withImplicitsEnabled[T](op: => T): T = { val saved = implicitsEnabled @@ -313,13 +313,13 @@ trait Contexts { self: Analyzer => } // TODO: remove? Doesn't seem to be used - // def make(unit: CompilationUnit): Context = { - // val c = make(unit, EmptyTree, owner, scope, imports) - // c.setReportErrors() - // c.implicitsEnabled = true - // c.macrosEnabled = true - // c - // } + def make(unit: CompilationUnit): Context = { + val c = make(unit, EmptyTree, owner, scope, imports) + c.setReportErrors() + c.implicitsEnabled = true + c.macrosEnabled = true + c + } def makeNewImport(sym: Symbol): Context = makeNewImport(gen.mkWildcardImport(sym)) @@ -491,14 +491,14 @@ trait Contexts { self: Analyzer => /** Return closest enclosing context that defines a superclass of `clazz`, or a * companion module of a superclass of `clazz`, or NoContext if none exists */ - // def enclosingSuperClassContext(clazz: Symbol): Context = { - // var c = this.enclClass - // while (c != NoContext && - // !clazz.isNonBottomSubClass(c.owner) && - // !(c.owner.isModuleClass && clazz.isNonBottomSubClass(c.owner.companionClass))) - // c = c.outer.enclClass - // c - // } + def enclosingSuperClassContext(clazz: Symbol): Context = { + var c = this.enclClass + while (c != NoContext && + !clazz.isNonBottomSubClass(c.owner) && + !(c.owner.isModuleClass && clazz.isNonBottomSubClass(c.owner.companionClass))) + c = c.outer.enclClass + c + } /** Return the closest enclosing context that defines a subclass of `clazz` * or a companion object thereof, or `NoContext` if no such context exists. diff --git a/src/compiler/scala/tools/nsc/typechecker/DestructureTypes.scala b/src/compiler/scala/tools/nsc/typechecker/DestructureTypes.scala index 856043bca9..79cd46e018 100644 --- a/src/compiler/scala/tools/nsc/typechecker/DestructureTypes.scala +++ b/src/compiler/scala/tools/nsc/typechecker/DestructureTypes.scala @@ -64,15 +64,15 @@ trait DestructureTypes { }, tree.productPrefix ) - // def wrapSymbol(label: String, sym: Symbol): Node = { - // if (sym eq NoSymbol) wrapEmpty - // else atom(label, sym) - // } - // def wrapInfo(sym: Symbol) = sym.info match { - // case TypeBounds(lo, hi) => typeBounds(lo, hi) - // case PolyType(tparams, restpe) => polyFunction(tparams, restpe) - // case _ => wrapEmpty - // } + def wrapSymbol(label: String, sym: Symbol): Node = { + if (sym eq NoSymbol) wrapEmpty + else atom(label, sym) + } + def wrapInfo(sym: Symbol) = sym.info match { + case TypeBounds(lo, hi) => typeBounds(lo, hi) + case PolyType(tparams, restpe) => polyFunction(tparams, restpe) + case _ => wrapEmpty + } def wrapSymbolInfo(sym: Symbol): Node = { if ((sym eq NoSymbol) || openSymbols(sym)) wrapEmpty else { @@ -95,7 +95,7 @@ trait DestructureTypes { def constant(label: String, const: Constant): Node = atom(label, const) def scope(decls: Scope): Node = node("decls", scopeMemberList(decls.toList)) - // def const[T](named: (String, T)): Node = constant(named._1, Constant(named._2)) + def const[T](named: (String, T)): Node = constant(named._1, Constant(named._2)) def resultType(restpe: Type): Node = this("resultType", restpe) def typeParams(tps: List[Symbol]): Node = node("typeParams", symbolList(tps)) diff --git a/src/compiler/scala/tools/nsc/typechecker/Duplicators.scala b/src/compiler/scala/tools/nsc/typechecker/Duplicators.scala index e5e52e91c3..7d58155eb2 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Duplicators.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Duplicators.scala @@ -19,10 +19,10 @@ abstract class Duplicators extends Analyzer { import global._ import definitions.{ AnyRefClass, AnyValClass } - // def retyped(context: Context, tree: Tree): Tree = { - // resetClassOwners - // (newBodyDuplicator(context)).typed(tree) - // } + def retyped(context: Context, tree: Tree): Tree = { + resetClassOwners + (newBodyDuplicator(context)).typed(tree) + } /** Retype the given tree in the given context. Use this method when retyping * a method in a different class. The typer will replace references to the this of @@ -42,8 +42,8 @@ abstract class Duplicators extends Analyzer { protected def newBodyDuplicator(context: Context) = new BodyDuplicator(context) - // def retypedMethod(context: Context, tree: Tree, oldThis: Symbol, newThis: Symbol): Tree = - // (newBodyDuplicator(context)).retypedMethod(tree.asInstanceOf[DefDef], oldThis, newThis) + def retypedMethod(context: Context, tree: Tree, oldThis: Symbol, newThis: Symbol): Tree = + (newBodyDuplicator(context)).retypedMethod(tree.asInstanceOf[DefDef], oldThis, newThis) /** Return the special typer for duplicate method bodies. */ override def newTyper(context: Context): Typer = @@ -186,19 +186,19 @@ abstract class Duplicators extends Analyzer { stats.foreach(invalidate(_, owner)) } - // def retypedMethod(ddef: DefDef, oldThis: Symbol, newThis: Symbol): Tree = { - // oldClassOwner = oldThis - // newClassOwner = newThis - // invalidateAll(ddef.tparams) - // mforeach(ddef.vparamss) { vdef => - // invalidate(vdef) - // vdef.tpe = null - // } - // ddef.symbol = NoSymbol - // enterSym(context, ddef) - // debuglog("remapping this of " + oldClassOwner + " to " + newClassOwner) - // typed(ddef) - // } + def retypedMethod(ddef: DefDef, oldThis: Symbol, newThis: Symbol): Tree = { + oldClassOwner = oldThis + newClassOwner = newThis + invalidateAll(ddef.tparams) + mforeach(ddef.vparamss) { vdef => + invalidate(vdef) + vdef.tpe = null + } + ddef.symbol = NoSymbol + enterSym(context, ddef) + debuglog("remapping this of " + oldClassOwner + " to " + newClassOwner) + typed(ddef) + } /** Optionally cast this tree into some other type, if required. * Unless overridden, just returns the tree. diff --git a/src/compiler/scala/tools/nsc/typechecker/Implicits.scala b/src/compiler/scala/tools/nsc/typechecker/Implicits.scala index c17586335c..576a21fe31 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Implicits.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Implicits.scala @@ -233,10 +233,10 @@ trait Implicits { object HasMember { private val hasMemberCache = perRunCaches.newMap[Name, Type]() def apply(name: Name): Type = hasMemberCache.getOrElseUpdate(name, memberWildcardType(name, WildcardType)) - // def unapply(pt: Type): Option[Name] = pt match { - // case RefinedType(List(WildcardType), Scope(sym)) if sym.tpe == WildcardType => Some(sym.name) - // case _ => None - // } + def unapply(pt: Type): Option[Name] = pt match { + case RefinedType(List(WildcardType), Scope(sym)) if sym.tpe == WildcardType => Some(sym.name) + case _ => None + } } /** An extractor for types of the form ? { name: (? >: argtpe <: Any*)restp } @@ -1493,9 +1493,9 @@ object ImplicitsStats { val subtypeImpl = Statistics.newSubCounter(" of which in implicit", subtypeCount) val findMemberImpl = Statistics.newSubCounter(" of which in implicit", findMemberCount) val subtypeAppInfos = Statistics.newSubCounter(" of which in app impl", subtypeCount) - // val subtypeImprovCount = Statistics.newSubCounter(" of which in improves", subtypeCount) + val subtypeImprovCount = Statistics.newSubCounter(" of which in improves", subtypeCount) val implicitSearchCount = Statistics.newCounter ("#implicit searches", "typer") - // val triedImplicits = Statistics.newSubCounter(" #tried", implicitSearchCount) + val triedImplicits = Statistics.newSubCounter(" #tried", implicitSearchCount) val plausiblyCompatibleImplicits = Statistics.newSubCounter(" #plausibly compatible", implicitSearchCount) val matchingImplicits = Statistics.newSubCounter(" #matching", implicitSearchCount) diff --git a/src/compiler/scala/tools/nsc/typechecker/MethodSynthesis.scala b/src/compiler/scala/tools/nsc/typechecker/MethodSynthesis.scala index bd37f055b7..6aafd32237 100644 --- a/src/compiler/scala/tools/nsc/typechecker/MethodSynthesis.scala +++ b/src/compiler/scala/tools/nsc/typechecker/MethodSynthesis.scala @@ -29,43 +29,43 @@ trait MethodSynthesis { if (sym.isLazy) ValDef(sym, body) else DefDef(sym, body) - // def applyTypeInternal(tags: List[TT[_]]): Type = { - // val symbols = tags map compilerSymbolFromTag - // val container :: args = symbols - // val tparams = container.typeConstructor.typeParams + def applyTypeInternal(tags: List[TT[_]]): Type = { + val symbols = tags map compilerSymbolFromTag + val container :: args = symbols + val tparams = container.typeConstructor.typeParams - // // Conservative at present - if manifests were more usable this could do a lot more. - // // [Eugene to Paul] all right, they are now. what do you have in mind? - // require(symbols forall (_ ne NoSymbol), "Must find all tags: " + symbols) - // require(container.owner.isPackageClass, "Container must be a top-level class in a package: " + container) - // require(tparams.size == args.size, "Arguments must match type constructor arity: " + tparams + ", " + args) + // Conservative at present - if manifests were more usable this could do a lot more. + // [Eugene to Paul] all right, they are now. what do you have in mind? + require(symbols forall (_ ne NoSymbol), "Must find all tags: " + symbols) + require(container.owner.isPackageClass, "Container must be a top-level class in a package: " + container) + require(tparams.size == args.size, "Arguments must match type constructor arity: " + tparams + ", " + args) - // appliedType(container, args map (_.tpe): _*) - // } + appliedType(container, args map (_.tpe): _*) + } - // def companionType[T](implicit ct: CT[T]) = - // rootMirror.getRequiredModule(ct.runtimeClass.getName).tpe + def companionType[T](implicit ct: CT[T]) = + rootMirror.getRequiredModule(ct.runtimeClass.getName).tpe // Use these like `applyType[List, Int]` or `applyType[Map, Int, String]` - // def applyType[CC](implicit t1: TT[CC]): Type = - // applyTypeInternal(List(t1)) + def applyType[CC](implicit t1: TT[CC]): Type = + applyTypeInternal(List(t1)) - // def applyType[CC[X1], X1](implicit t1: TT[CC[_]], t2: TT[X1]): Type = - // applyTypeInternal(List(t1, t2)) + def applyType[CC[X1], X1](implicit t1: TT[CC[_]], t2: TT[X1]): Type = + applyTypeInternal(List(t1, t2)) - // def applyType[CC[X1, X2], X1, X2](implicit t1: TT[CC[_,_]], t2: TT[X1], t3: TT[X2]): Type = - // applyTypeInternal(List(t1, t2, t3)) + def applyType[CC[X1, X2], X1, X2](implicit t1: TT[CC[_,_]], t2: TT[X1], t3: TT[X2]): Type = + applyTypeInternal(List(t1, t2, t3)) - // def applyType[CC[X1, X2, X3], X1, X2, X3](implicit t1: TT[CC[_,_,_]], t2: TT[X1], t3: TT[X2], t4: TT[X3]): Type = - // applyTypeInternal(List(t1, t2, t3, t4)) + def applyType[CC[X1, X2, X3], X1, X2, X3](implicit t1: TT[CC[_,_,_]], t2: TT[X1], t3: TT[X2], t4: TT[X3]): Type = + applyTypeInternal(List(t1, t2, t3, t4)) - // def newMethodType[F](owner: Symbol)(implicit t: TT[F]): Type = { - // val fnSymbol = compilerSymbolFromTag(t) - // val formals = compilerTypeFromTag(t).typeArguments - // assert(fnSymbol isSubClass FunctionClass(formals.size - 1), (owner, t)) - // val params = owner newSyntheticValueParams formals - // MethodType(params, formals.last) - // } + def newMethodType[F](owner: Symbol)(implicit t: TT[F]): Type = { + val fnSymbol = compilerSymbolFromTag(t) + val formals = compilerTypeFromTag(t).typeArguments + assert(fnSymbol isSubClass FunctionClass(formals.size - 1), (owner, t)) + val params = owner newSyntheticValueParams formals + MethodType(params, formals.last) + } /** The annotations amongst those found on the original symbol which * should be propagated to this kind of accessor. @@ -118,8 +118,8 @@ trait MethodSynthesis { finishMethod(clazz.info.decls enter m, f) } - // private def cloneInternal(original: Symbol, f: Symbol => Tree): Tree = - // cloneInternal(original, f, original.name) + private def cloneInternal(original: Symbol, f: Symbol => Tree): Tree = + cloneInternal(original, f, original.name) def clazzMember(name: Name) = clazz.info nonPrivateMember name def typeInClazz(sym: Symbol) = clazz.thisType memberType sym @@ -128,11 +128,11 @@ trait MethodSynthesis { * the same type as `name` in clazz, and returns the tree to be * added to the template. */ - // def overrideMethod(name: Name)(f: Symbol => Tree): Tree = - // overrideMethod(clazzMember(name))(f) + def overrideMethod(name: Name)(f: Symbol => Tree): Tree = + overrideMethod(clazzMember(name))(f) - // def overrideMethod(original: Symbol)(f: Symbol => Tree): Tree = - // cloneInternal(original, sym => f(sym setFlag OVERRIDE)) + def overrideMethod(original: Symbol)(f: Symbol => Tree): Tree = + cloneInternal(original, sym => f(sym setFlag OVERRIDE)) def deriveMethod(original: Symbol, nameFn: Name => Name)(f: Symbol => Tree): Tree = cloneInternal(original, f, nameFn(original.name)) @@ -311,7 +311,7 @@ trait MethodSynthesis { // Final methods to make the rest easier to reason about. final def mods = tree.mods final def basisSym = tree.symbol - // final def derivedFlags: Long = basisSym.flags & flagsMask | flagsExtra + final def derivedFlags: Long = basisSym.flags & flagsMask | flagsExtra } trait DerivedFromClassDef extends DerivedFromMemberDef { diff --git a/src/compiler/scala/tools/nsc/typechecker/Namers.scala b/src/compiler/scala/tools/nsc/typechecker/Namers.scala index 407749f833..28bed0f1bf 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Namers.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Namers.scala @@ -1388,11 +1388,11 @@ trait Namers extends MethodSynthesis { tpe } - // def ensureParent(clazz: Symbol, parent: Symbol) = { - // val info0 = clazz.info - // val info1 = includeParent(info0, parent) - // if (info0 ne info1) clazz setInfo info1 - // } + def ensureParent(clazz: Symbol, parent: Symbol) = { + val info0 = clazz.info + val info1 = includeParent(info0, parent) + if (info0 ne info1) clazz setInfo info1 + } class LogTransitions[S](onEnter: S => String, onExit: S => String) { val enabled = settings.debug.value @@ -1585,12 +1585,12 @@ trait Namers extends MethodSynthesis { } } - // @deprecated("Use underlyingSymbol instead", "2.10.0") - // def underlying(member: Symbol): Symbol = underlyingSymbol(member) - // @deprecated("Use `companionSymbolOf` instead", "2.10.0") - // def companionClassOf(module: Symbol, ctx: Context): Symbol = companionSymbolOf(module, ctx) - // @deprecated("Use `companionSymbolOf` instead", "2.10.0") - // def companionModuleOf(clazz: Symbol, ctx: Context): Symbol = companionSymbolOf(clazz, ctx) + @deprecated("Use underlyingSymbol instead", "2.10.0") + def underlying(member: Symbol): Symbol = underlyingSymbol(member) + @deprecated("Use `companionSymbolOf` instead", "2.10.0") + def companionClassOf(module: Symbol, ctx: Context): Symbol = companionSymbolOf(module, ctx) + @deprecated("Use `companionSymbolOf` instead", "2.10.0") + def companionModuleOf(clazz: Symbol, ctx: Context): Symbol = companionSymbolOf(clazz, ctx) /** The companion class or companion module of `original`. * Calling .companionModule does not work for classes defined inside methods. diff --git a/src/compiler/scala/tools/nsc/typechecker/NamesDefaults.scala b/src/compiler/scala/tools/nsc/typechecker/NamesDefaults.scala index 1588380bca..252a738755 100644 --- a/src/compiler/scala/tools/nsc/typechecker/NamesDefaults.scala +++ b/src/compiler/scala/tools/nsc/typechecker/NamesDefaults.scala @@ -41,7 +41,7 @@ trait NamesDefaults { self: Analyzer => blockTyper: Typer ) { } - // val noApplyInfo = NamedApplyInfo(None, Nil, Nil, null) + val noApplyInfo = NamedApplyInfo(None, Nil, Nil, null) def nameOf(arg: Tree) = arg match { case AssignOrNamedArg(Ident(name), rhs) => Some(name) diff --git a/src/compiler/scala/tools/nsc/typechecker/PatternMatching.scala b/src/compiler/scala/tools/nsc/typechecker/PatternMatching.scala index 27e539abbf..7cb420d2dc 100644 --- a/src/compiler/scala/tools/nsc/typechecker/PatternMatching.scala +++ b/src/compiler/scala/tools/nsc/typechecker/PatternMatching.scala @@ -551,50 +551,50 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL // NOTE: it's an apply, not a select, since in general an extractor call may have multiple argument lists (including an implicit one) // that we need to preserve, so we supply the scrutinee as Ident(nme.SELECTOR_DUMMY), // and replace that dummy by a reference to the actual binder in translateExtractorPattern - // def fromCaseClassUnapply(fun: Tree, args: List[Tree]): Option[ExtractorCall] = { - // // TODO: can we rework the typer so we don't have to do all this twice? - // // undo rewrite performed in (5) of adapt - // val orig = fun match {case tpt: TypeTree => tpt.original case _ => fun} - // val origSym = orig.symbol - // val extractor = unapplyMember(origSym.filter(sym => reallyExists(unapplyMember(sym.tpe))).tpe) - - // if((fun.tpe eq null) || fun.tpe.isError || (extractor eq NoSymbol)) { - // None - // } else { - // // this is a tricky balance: pos/t602.scala, pos/sudoku.scala, run/virtpatmat_alts.scala must all be happy - // // bypass typing at own risk: val extractorCall = Select(orig, extractor) setType caseClassApplyToUnapplyTp(fun.tpe) - // // can't always infer type arguments (pos/t602): - // /* case class Span[K <: Ordered[K]](low: Option[K]) { - // override def equals(x: Any): Boolean = x match { - // case Span((low0 @ _)) if low0 equals low => true - // } - // }*/ - // // so... leave undetermined type params floating around if we have to - // // (if we don't infer types, uninstantiated type params show up later: pos/sudoku.scala) - // // (see also run/virtpatmat_alts.scala) - // val savedUndets = context.undetparams - // val extractorCall = try { - // context.undetparams = Nil - // silent(_.typed(Apply(Select(orig, extractor), List(Ident(nme.SELECTOR_DUMMY) setType fun.tpe.finalResultType)), EXPRmode, WildcardType), reportAmbiguousErrors = false) match { - // case SilentResultValue(extractorCall) => extractorCall // if !extractorCall.containsError() - // case _ => - // // this fails to resolve overloading properly... - // // Apply(typedOperator(Select(orig, extractor)), List(Ident(nme.SELECTOR_DUMMY))) // no need to set the type of the dummy arg, it will be replaced anyway - - // // patmatDebug("funtpe after = "+ fun.tpe.finalResultType) - // // patmatDebug("orig: "+(orig, orig.tpe)) - // val tgt = typed(orig, EXPRmode | QUALmode | POLYmode, HasMember(extractor.name)) // can't specify fun.tpe.finalResultType as the type for the extractor's arg, - // // as it may have been inferred incorrectly (see t602, where it's com.mosol.sl.Span[Any], instead of com.mosol.sl.Span[?K]) - // // patmatDebug("tgt = "+ (tgt, tgt.tpe)) - // val oper = typed(Select(tgt, extractor.name), EXPRmode | FUNmode | POLYmode | TAPPmode, WildcardType) - // // patmatDebug("oper: "+ (oper, oper.tpe)) - // Apply(oper, List(Ident(nme.SELECTOR_DUMMY))) // no need to set the type of the dummy arg, it will be replaced anyway - // } - // } finally context.undetparams = savedUndets - - // Some(this(extractorCall, args)) // TODO: simplify spliceApply? - // } - // } + def fromCaseClassUnapply(fun: Tree, args: List[Tree]): Option[ExtractorCall] = { + // TODO: can we rework the typer so we don't have to do all this twice? + // undo rewrite performed in (5) of adapt + val orig = fun match {case tpt: TypeTree => tpt.original case _ => fun} + val origSym = orig.symbol + val extractor = unapplyMember(origSym.filter(sym => reallyExists(unapplyMember(sym.tpe))).tpe) + + if((fun.tpe eq null) || fun.tpe.isError || (extractor eq NoSymbol)) { + None + } else { + // this is a tricky balance: pos/t602.scala, pos/sudoku.scala, run/virtpatmat_alts.scala must all be happy + // bypass typing at own risk: val extractorCall = Select(orig, extractor) setType caseClassApplyToUnapplyTp(fun.tpe) + // can't always infer type arguments (pos/t602): + /* case class Span[K <: Ordered[K]](low: Option[K]) { + override def equals(x: Any): Boolean = x match { + case Span((low0 @ _)) if low0 equals low => true + } + }*/ + // so... leave undetermined type params floating around if we have to + // (if we don't infer types, uninstantiated type params show up later: pos/sudoku.scala) + // (see also run/virtpatmat_alts.scala) + val savedUndets = context.undetparams + val extractorCall = try { + context.undetparams = Nil + silent(_.typed(Apply(Select(orig, extractor), List(Ident(nme.SELECTOR_DUMMY) setType fun.tpe.finalResultType)), EXPRmode, WildcardType), reportAmbiguousErrors = false) match { + case SilentResultValue(extractorCall) => extractorCall // if !extractorCall.containsError() + case _ => + // this fails to resolve overloading properly... + // Apply(typedOperator(Select(orig, extractor)), List(Ident(nme.SELECTOR_DUMMY))) // no need to set the type of the dummy arg, it will be replaced anyway + + // patmatDebug("funtpe after = "+ fun.tpe.finalResultType) + // patmatDebug("orig: "+(orig, orig.tpe)) + val tgt = typed(orig, EXPRmode | QUALmode | POLYmode, HasMember(extractor.name)) // can't specify fun.tpe.finalResultType as the type for the extractor's arg, + // as it may have been inferred incorrectly (see t602, where it's com.mosol.sl.Span[Any], instead of com.mosol.sl.Span[?K]) + // patmatDebug("tgt = "+ (tgt, tgt.tpe)) + val oper = typed(Select(tgt, extractor.name), EXPRmode | FUNmode | POLYmode | TAPPmode, WildcardType) + // patmatDebug("oper: "+ (oper, oper.tpe)) + Apply(oper, List(Ident(nme.SELECTOR_DUMMY))) // no need to set the type of the dummy arg, it will be replaced anyway + } + } finally context.undetparams = savedUndets + + Some(this(extractorCall, args)) // TODO: simplify spliceApply? + } + } } abstract class ExtractorCall(val args: List[Tree]) { @@ -1413,10 +1413,10 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL // local / context-free def _asInstanceOf(b: Symbol, tp: Type): Tree - // def _asInstanceOf(t: Tree, tp: Type): Tree + def _asInstanceOf(t: Tree, tp: Type): Tree def _equals(checker: Tree, binder: Symbol): Tree def _isInstanceOf(b: Symbol, tp: Type): Tree - // def and(a: Tree, b: Tree): Tree + def and(a: Tree, b: Tree): Tree def drop(tgt: Tree)(n: Int): Tree def index(tgt: Tree)(i: Int): Tree def mkZero(tp: Type): Tree @@ -1458,12 +1458,12 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL abstract class CommonCodegen extends AbsCodegen { import CODE._ def fun(arg: Symbol, body: Tree): Tree = Function(List(ValDef(arg)), body) - // def genTypeApply(tfun: Tree, args: Type*): Tree = if(args contains NoType) tfun else TypeApply(tfun, args.toList map TypeTree) + def genTypeApply(tfun: Tree, args: Type*): Tree = if(args contains NoType) tfun else TypeApply(tfun, args.toList map TypeTree) def tupleSel(binder: Symbol)(i: Int): Tree = (REF(binder) DOT nme.productAccessorName(i)) // make tree that accesses the i'th component of the tuple referenced by binder def index(tgt: Tree)(i: Int): Tree = tgt APPLY (LIT(i)) def drop(tgt: Tree)(n: Int): Tree = (tgt DOT vpmName.drop) (LIT(n)) def _equals(checker: Tree, binder: Symbol): Tree = checker MEMBER_== REF(binder) // NOTE: checker must be the target of the ==, that's the patmat semantics for ya - // def and(a: Tree, b: Tree): Tree = a AND b + def and(a: Tree, b: Tree): Tree = a AND b // drop annotations generated by CPS plugin etc, since its annotationchecker rejects T @cps[U] <: Any // let's assume for now annotations don't affect casts, drop them there, and bring them back using the outer Typed tree @@ -1471,7 +1471,7 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL Typed(gen.mkAsInstanceOf(t, tp.withoutAnnotations, true, false), TypeTree() setType tp) // the force is needed mainly to deal with the GADT typing hack (we can't detect it otherwise as tp nor pt need contain an abstract type, we're just casting wildly) - // def _asInstanceOf(t: Tree, tp: Type): Tree = if (t.tpe != NoType && t.isTyped && typesConform(t.tpe, tp)) t else mkCast(t, tp) + def _asInstanceOf(t: Tree, tp: Type): Tree = if (t.tpe != NoType && t.isTyped && typesConform(t.tpe, tp)) t else mkCast(t, tp) def _asInstanceOf(b: Symbol, tp: Type): Tree = if (typesConform(b.info, tp)) REF(b) else mkCast(REF(b), tp) def _isInstanceOf(b: Symbol, tp: Type): Tree = gen.mkIsInstanceOf(REF(b), tp.withoutAnnotations, true, false) // if (typesConform(b.info, tpX)) { patmatDebug("warning: emitted spurious isInstanceOf: "+(b, tp)); TRUE } @@ -2879,7 +2879,7 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL v +"(="+ v.path +": "+ v.staticTpCheckable +") "+ assignment }.mkString("\n") - // def modelString(model: Model) = varAssignmentString(modelToVarAssignment(model)) + def modelString(model: Model) = varAssignmentString(modelToVarAssignment(model)) // return constructor call when the model is a true counter example // (the variables don't take into account type information derived from other variables, @@ -3538,7 +3538,7 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL // for the catch-cases in a try/catch private object typeSwitchMaker extends SwitchMaker { val unchecked = false - // def switchableTpe(tp: Type) = true + def switchableTpe(tp: Type) = true val alternativesSupported = false // TODO: needs either back-end support of flattening of alternatives during typers val canJump = false diff --git a/src/compiler/scala/tools/nsc/typechecker/TreeCheckers.scala b/src/compiler/scala/tools/nsc/typechecker/TreeCheckers.scala index f1e6e48ccc..710adf5a9c 100644 --- a/src/compiler/scala/tools/nsc/typechecker/TreeCheckers.scala +++ b/src/compiler/scala/tools/nsc/typechecker/TreeCheckers.scala @@ -142,13 +142,13 @@ abstract class TreeCheckers extends Analyzer { currentRun.units foreach (x => wrap(x)(check(x))) } - // def printingTypings[T](body: => T): T = { - // val saved = global.printTypings - // global.printTypings = true - // val result = body - // global.printTypings = saved - // result - // } + def printingTypings[T](body: => T): T = { + val saved = global.printTypings + global.printTypings = true + val result = body + global.printTypings = saved + result + } def runWithUnit[T](unit: CompilationUnit)(body: => Unit): Unit = { val unit0 = currentUnit currentRun.currentUnit = unit diff --git a/src/compiler/scala/tools/nsc/typechecker/TypeDiagnostics.scala b/src/compiler/scala/tools/nsc/typechecker/TypeDiagnostics.scala index ebeb8ef2c8..4f5291507e 100644 --- a/src/compiler/scala/tools/nsc/typechecker/TypeDiagnostics.scala +++ b/src/compiler/scala/tools/nsc/typechecker/TypeDiagnostics.scala @@ -41,9 +41,9 @@ trait TypeDiagnostics { * nicer if Symbols, Types, and Trees all implemented some common interface * in which isErroneous and similar would be placed. */ - // def noErroneousTypes(tps: Type*) = tps forall (x => !x.isErroneous) - // def noErroneousSyms(syms: Symbol*) = syms forall (x => !x.isErroneous) - // def noErroneousTrees(trees: Tree*) = trees forall (x => !x.isErroneous) + def noErroneousTypes(tps: Type*) = tps forall (x => !x.isErroneous) + def noErroneousSyms(syms: Symbol*) = syms forall (x => !x.isErroneous) + def noErroneousTrees(trees: Tree*) = trees forall (x => !x.isErroneous) /** For errors which are artifacts of the implementation: such messages * indicate that the restriction may be lifted in the future. @@ -294,7 +294,7 @@ trait TypeDiagnostics { // distinguished from the other types in the same error message private val savedName = sym.name def restoreName() = sym.name = savedName - // def isAltered = sym.name != savedName + def isAltered = sym.name != savedName def modifyName(f: String => String) = sym setName newTypeName(f(sym.name.toString)) /** Prepend java.lang, scala., or Predef. if this type originated @@ -478,10 +478,10 @@ trait TypeDiagnostics { } super.traverse(t) } - // def isUnused(t: Tree): Boolean = ( - // if (t.symbol.isTerm) isUnusedTerm(t.symbol) - // else isUnusedType(t.symbol) - // ) + def isUnused(t: Tree): Boolean = ( + if (t.symbol.isTerm) isUnusedTerm(t.symbol) + else isUnusedType(t.symbol) + ) def isUnusedType(m: Symbol): Boolean = ( m.isType && !m.isTypeParameterOrSkolem // would be nice to improve this diff --git a/src/compiler/scala/tools/nsc/typechecker/Typers.scala b/src/compiler/scala/tools/nsc/typechecker/Typers.scala index 0a295febf3..0a0ab53852 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Typers.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Typers.scala @@ -394,8 +394,8 @@ trait Typers extends Modes with Adaptations with Tags { * @param tree ... * @return ... */ - // def locals[T <: Tree](scope: Scope, pt: Type, tree: T): T = - // check(NoSymbol, scope, pt, tree) + def locals[T <: Tree](scope: Scope, pt: Type, tree: T): T = + check(NoSymbol, scope, pt, tree) private def check[T <: Tree](owner: Symbol, scope: Scope, pt: Type, tree: T): T = { this.owner = owner @@ -5343,7 +5343,7 @@ trait Typers extends Modes with Adaptations with Tags { def typedHigherKindedType(tree: Tree, mode: Int): Tree = typed(tree, HKmode, WildcardType) - // def typedHigherKindedType(tree: Tree): Tree = typedHigherKindedType(tree, NOmode) + def typedHigherKindedType(tree: Tree): Tree = typedHigherKindedType(tree, NOmode) /** Types a type constructor tree used in a new or supertype */ def typedTypeConstructor(tree: Tree, mode: Int): Tree = { @@ -5431,16 +5431,16 @@ trait Typers extends Modes with Adaptations with Tags { object TypersStats { import scala.reflect.internal.TypesStats._ - // import scala.reflect.internal.BaseTypeSeqsStats._ + import scala.reflect.internal.BaseTypeSeqsStats._ val typedIdentCount = Statistics.newCounter("#typechecked identifiers") val typedSelectCount = Statistics.newCounter("#typechecked selections") val typedApplyCount = Statistics.newCounter("#typechecked applications") val rawTypeFailed = Statistics.newSubCounter (" of which in failed", rawTypeCount) val subtypeFailed = Statistics.newSubCounter(" of which in failed", subtypeCount) val findMemberFailed = Statistics.newSubCounter(" of which in failed", findMemberCount) - // val compoundBaseTypeSeqCount = Statistics.newSubCounter(" of which for compound types", baseTypeSeqCount) - // val typerefBaseTypeSeqCount = Statistics.newSubCounter(" of which for typerefs", baseTypeSeqCount) - // val singletonBaseTypeSeqCount = Statistics.newSubCounter(" of which for singletons", baseTypeSeqCount) + val compoundBaseTypeSeqCount = Statistics.newSubCounter(" of which for compound types", baseTypeSeqCount) + val typerefBaseTypeSeqCount = Statistics.newSubCounter(" of which for typerefs", baseTypeSeqCount) + val singletonBaseTypeSeqCount = Statistics.newSubCounter(" of which for singletons", baseTypeSeqCount) val failedSilentNanos = Statistics.newSubTimer("time spent in failed", typerNanos) val failedApplyNanos = Statistics.newSubTimer(" failed apply", typerNanos) val failedOpEqNanos = Statistics.newSubTimer(" failed op=", typerNanos) diff --git a/src/compiler/scala/tools/nsc/typechecker/Unapplies.scala b/src/compiler/scala/tools/nsc/typechecker/Unapplies.scala index 094b32673c..bf44b65406 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Unapplies.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Unapplies.scala @@ -51,11 +51,11 @@ trait Unapplies extends ast.TreeDSL * for n == 1, Some[T0] * else Some[Product[Ti]] */ - // def unapplyReturnTypeExpected(argsLength: Int) = argsLength match { - // case 0 => BooleanClass.tpe - // case 1 => optionType(WildcardType) - // case n => optionType(productType((List fill n)(WildcardType))) - // } + def unapplyReturnTypeExpected(argsLength: Int) = argsLength match { + case 0 => BooleanClass.tpe + case 1 => optionType(WildcardType) + case n => optionType(productType((List fill n)(WildcardType))) + } /** returns unapply or unapplySeq if available */ def unapplyMember(tp: Type): Symbol = (tp member nme.unapply) match { diff --git a/src/compiler/scala/tools/nsc/util/ClassPath.scala b/src/compiler/scala/tools/nsc/util/ClassPath.scala index f82c504eb4..0c49b9b8e7 100644 --- a/src/compiler/scala/tools/nsc/util/ClassPath.scala +++ b/src/compiler/scala/tools/nsc/util/ClassPath.scala @@ -50,20 +50,20 @@ object ClassPath { def map(cp: String, f: String => String): String = join(split(cp) map f: _*) /** Split the classpath, filter according to predicate, and reassemble. */ - // def filter(cp: String, p: String => Boolean): String = join(split(cp) filter p: _*) + def filter(cp: String, p: String => Boolean): String = join(split(cp) filter p: _*) /** Split the classpath and map them into Paths */ - // def toPaths(cp: String): List[Path] = split(cp) map (x => Path(x).toAbsolute) + def toPaths(cp: String): List[Path] = split(cp) map (x => Path(x).toAbsolute) /** Make all classpath components absolute. */ - // def makeAbsolute(cp: String): String = fromPaths(toPaths(cp): _*) + def makeAbsolute(cp: String): String = fromPaths(toPaths(cp): _*) /** Join the paths as a classpath */ - // def fromPaths(paths: Path*): String = join(paths map (_.path): _*) - // def fromURLs(urls: URL*): String = fromPaths(urls map (x => Path(x.getPath)) : _*) + def fromPaths(paths: Path*): String = join(paths map (_.path): _*) + def fromURLs(urls: URL*): String = fromPaths(urls map (x => Path(x.getPath)) : _*) /** Split the classpath and map them into URLs */ - // def toURLs(cp: String): List[URL] = toPaths(cp) map (_.toURL) + def toURLs(cp: String): List[URL] = toPaths(cp) map (_.toURL) /** Expand path and possibly expanding stars */ def expandPath(path: String, expandStar: Boolean = true): List[String] = @@ -124,12 +124,12 @@ object ClassPath { for (dir <- expandPath(path, false) ; name <- expandDir(dir) ; entry <- Option(AbstractFile getDirectory name)) yield newClassPath(entry) - // def classesAtAllURLS(path: String): List[ClassPath[T]] = - // (path split " ").toList flatMap classesAtURL + def classesAtAllURLS(path: String): List[ClassPath[T]] = + (path split " ").toList flatMap classesAtURL - // def classesAtURL(spec: String) = - // for (url <- specToURL(spec).toList ; location <- Option(AbstractFile getURL url)) yield - // newClassPath(location) + def classesAtURL(spec: String) = + for (url <- specToURL(spec).toList ; location <- Option(AbstractFile getURL url)) yield + newClassPath(location) def classesInExpandedPath(path: String): IndexedSeq[ClassPath[T]] = classesInPathImpl(path, true).toIndexedSeq @@ -400,14 +400,14 @@ class JavaClassPath( context: JavaContext) extends MergedClassPath[AbstractFile](containers, context) { } -// object JavaClassPath { -// def fromURLs(urls: Seq[URL], context: JavaContext): JavaClassPath = { -// val containers = { -// for (url <- urls ; f = AbstractFile getURL url ; if f != null) yield -// new DirectoryClassPath(f, context) -// } -// new JavaClassPath(containers.toIndexedSeq, context) -// } -// def fromURLs(urls: Seq[URL]): JavaClassPath = -// fromURLs(urls, ClassPath.DefaultJavaContext) -// } +object JavaClassPath { + def fromURLs(urls: Seq[URL], context: JavaContext): JavaClassPath = { + val containers = { + for (url <- urls ; f = AbstractFile getURL url ; if f != null) yield + new DirectoryClassPath(f, context) + } + new JavaClassPath(containers.toIndexedSeq, context) + } + def fromURLs(urls: Seq[URL]): JavaClassPath = + fromURLs(urls, ClassPath.DefaultJavaContext) +} diff --git a/src/compiler/scala/tools/nsc/util/CommandLineParser.scala b/src/compiler/scala/tools/nsc/util/CommandLineParser.scala index 2baab177b8..81c1b1d37a 100644 --- a/src/compiler/scala/tools/nsc/util/CommandLineParser.scala +++ b/src/compiler/scala/tools/nsc/util/CommandLineParser.scala @@ -21,7 +21,7 @@ import scala.collection.mutable.ListBuffer trait ParserUtil extends Parsers { protected implicit class ParserPlus[+T](underlying: Parser[T]) { def !~>[U](p: => Parser[U]): Parser[U] = (underlying ~! p) ^^ { case a~b => b } - // def <~![U](p: => Parser[U]): Parser[T] = (underlying ~! p) ^^ { case a~b => a } + def <~![U](p: => Parser[U]): Parser[T] = (underlying ~! p) ^^ { case a~b => a } } } @@ -37,7 +37,7 @@ case class CommandLine( def withUnaryArgs(xs: List[String]) = copy(unaryArguments = xs) def withBinaryArgs(xs: List[String]) = copy(binaryArguments = xs) - // def originalArgs = args + def originalArgs = args def assumeBinary = true def enforceArity = true def onlyKnownOptions = false @@ -105,7 +105,7 @@ case class CommandLine( def isSet(arg: String) = args contains arg def get(arg: String) = argMap get arg - // def getOrElse(arg: String, orElse: => String) = if (isSet(arg)) apply(arg) else orElse + def getOrElse(arg: String, orElse: => String) = if (isSet(arg)) apply(arg) else orElse def apply(arg: String) = argMap(arg) override def toString() = "CommandLine(\n%s)\n" format (args map (" " + _ + "\n") mkString) @@ -115,7 +115,7 @@ object CommandLineParser extends RegexParsers with ParserUtil { override def skipWhitespace = false def elemExcept(xs: Elem*): Parser[Elem] = elem("elemExcept", x => x != EofCh && !(xs contains x)) - // def elemOf(xs: Elem*): Parser[Elem] = elem("elemOf", xs contains _) + def elemOf(xs: Elem*): Parser[Elem] = elem("elemOf", xs contains _) def escaped(ch: Char): Parser[String] = "\\" + ch def mkQuoted(ch: Char): Parser[String] = ( elem(ch) !~> rep(escaped(ch) | elemExcept(ch)) <~ ch ^^ (_.mkString) diff --git a/src/compiler/scala/tools/nsc/util/JavaCharArrayReader.scala b/src/compiler/scala/tools/nsc/util/JavaCharArrayReader.scala index a056a97a7c..b7ed7903bc 100644 --- a/src/compiler/scala/tools/nsc/util/JavaCharArrayReader.scala +++ b/src/compiler/scala/tools/nsc/util/JavaCharArrayReader.scala @@ -17,43 +17,46 @@ class JavaCharArrayReader(buf: IndexedSeq[Char], start: Int, /* startline: int, /** produce a duplicate of this char array reader which starts reading * at current position, independent of what happens to original reader */ - // def dup: JavaCharArrayReader = clone().asInstanceOf[JavaCharArrayReader] + def dup: JavaCharArrayReader = clone().asInstanceOf[JavaCharArrayReader] /** layout constant */ - // val tabinc = 8 + val tabinc = 8 /** the line and column position of the current character */ var ch: Char = _ var bp = start - // var oldBp = -1 - // var oldCh: Char = _ + var oldBp = -1 + var oldCh: Char = _ //private var cline: Int = _ //private var ccol: Int = _ def cpos = bp var isUnicode: Boolean = _ - // var lastLineStartPos: Int = 0 - // var lineStartPos: Int = 0 - // var lastBlankLinePos: Int = 0 + var lastLineStartPos: Int = 0 + var lineStartPos: Int = 0 + var lastBlankLinePos: Int = 0 - // private var onlyBlankChars = false + private var onlyBlankChars = false //private var nextline = startline //private var nextcol = startcol private def markNewLine() { - // lastLineStartPos = lineStartPos - // if (onlyBlankChars) lastBlankLinePos = lineStartPos - // lineStartPos = bp - // onlyBlankChars = true + lastLineStartPos = lineStartPos + if (onlyBlankChars) lastBlankLinePos = lineStartPos + lineStartPos = bp + onlyBlankChars = true //nextline += 1 //nextcol = 1 } - def hasNext = bp < buf.length + def hasNext: Boolean = if (bp < buf.length) true + else { + false + } - // def last: Char = if (bp > start + 2) buf(bp - 2) else ' ' // XML literals + def last: Char = if (bp > start + 2) buf(bp - 2) else ' ' // XML literals def next(): Char = { //cline = nextline @@ -63,8 +66,8 @@ class JavaCharArrayReader(buf: IndexedSeq[Char], start: Int, /* startline: int, ch = SU return SU // there is an endless stream of SU's at the end } - // oldBp = bp - // oldCh = ch + oldBp = bp + oldCh = ch ch = buf(bp) isUnicode = false bp = bp + 1 @@ -101,19 +104,19 @@ class JavaCharArrayReader(buf: IndexedSeq[Char], start: Int, /* startline: int, isUnicode = true } case _ => - // if (ch > ' ') onlyBlankChars = false + if (ch > ' ') onlyBlankChars = false // nextcol += 1 } ch } - // def rewind() { - // if (oldBp == -1) throw new IllegalArgumentException - // bp = oldBp - // ch = oldCh - // oldBp = -1 - // oldCh = 'x' - // } + def rewind() { + if (oldBp == -1) throw new IllegalArgumentException + bp = oldBp + ch = oldCh + oldBp = -1 + oldCh = 'x' + } def copy: JavaCharArrayReader = new JavaCharArrayReader(buf, bp, /* nextcol, nextline, */ decodeUni, error) diff --git a/src/compiler/scala/tools/nsc/util/ScalaClassLoader.scala b/src/compiler/scala/tools/nsc/util/ScalaClassLoader.scala index f09787ec4f..a2994966fd 100644 --- a/src/compiler/scala/tools/nsc/util/ScalaClassLoader.scala +++ b/src/compiler/scala/tools/nsc/util/ScalaClassLoader.scala @@ -46,8 +46,8 @@ trait ScalaClassLoader extends JClassLoader { def create(path: String): AnyRef = tryToInitializeClass[AnyRef](path) map (_.newInstance()) orNull - // def constructorsOf[T <: AnyRef : ClassTag]: List[Constructor[T]] = - // classTag[T].runtimeClass.getConstructors.toList map (_.asInstanceOf[Constructor[T]]) + def constructorsOf[T <: AnyRef : ClassTag]: List[Constructor[T]] = + classTag[T].runtimeClass.getConstructors.toList map (_.asInstanceOf[Constructor[T]]) /** The actual bytes for a class file, or an empty array if it can't be found. */ def classBytes(className: String): Array[Byte] = classAsStream(className) match { @@ -75,10 +75,10 @@ trait ScalaClassLoader extends JClassLoader { /** A list comprised of this classloader followed by all its * (non-null) parent classloaders, if any. */ - // def loaderChain: List[ScalaClassLoader] = this :: (getParent match { - // case null => Nil - // case p => p.loaderChain - // }) + def loaderChain: List[ScalaClassLoader] = this :: (getParent match { + case null => Nil + case p => p.loaderChain + }) } /** Methods for obtaining various classloaders. @@ -99,35 +99,35 @@ object ScalaClassLoader { } def contextLoader = apply(Thread.currentThread.getContextClassLoader) def appLoader = apply(JClassLoader.getSystemClassLoader) - // def extLoader = apply(appLoader.getParent) - // def bootLoader = apply(null) - // def contextChain = loaderChain(contextLoader) + def extLoader = apply(appLoader.getParent) + def bootLoader = apply(null) + def contextChain = loaderChain(contextLoader) - // def pathToErasure[T: ClassTag] = pathToClass(classTag[T].runtimeClass) - // def pathToClass(clazz: Class[_]) = clazz.getName.replace('.', JFile.separatorChar) + ".class" - // def locate[T: ClassTag] = contextLoader getResource pathToErasure[T] + def pathToErasure[T: ClassTag] = pathToClass(classTag[T].runtimeClass) + def pathToClass(clazz: Class[_]) = clazz.getName.replace('.', JFile.separatorChar) + ".class" + def locate[T: ClassTag] = contextLoader getResource pathToErasure[T] /** Tries to guess the classpath by type matching the context classloader * and its parents, looking for any classloaders which will reveal their * classpath elements as urls. It it can't find any, creates a classpath * from the supplied string. */ - // def guessClassPathString(default: String = ""): String = { - // val classpathURLs = contextChain flatMap { - // case x: HasClassPath => x.classPathURLs - // case x: JURLClassLoader => x.getURLs.toSeq - // case _ => Nil - // } - // if (classpathURLs.isEmpty) default - // else JavaClassPath.fromURLs(classpathURLs).asClasspathString - // } - - // def loaderChain(head: JClassLoader) = { - // def loop(cl: JClassLoader): List[JClassLoader] = - // if (cl == null) Nil else cl :: loop(cl.getParent) - - // loop(head) - // } + def guessClassPathString(default: String = ""): String = { + val classpathURLs = contextChain flatMap { + case x: HasClassPath => x.classPathURLs + case x: JURLClassLoader => x.getURLs.toSeq + case _ => Nil + } + if (classpathURLs.isEmpty) default + else JavaClassPath.fromURLs(classpathURLs).asClasspathString + } + + def loaderChain(head: JClassLoader) = { + def loop(cl: JClassLoader): List[JClassLoader] = + if (cl == null) Nil else cl :: loop(cl.getParent) + + loop(head) + } def setContext(cl: JClassLoader) = Thread.currentThread.setContextClassLoader(cl) def savingContextLoader[T](body: => T): T = { @@ -143,14 +143,14 @@ object ScalaClassLoader { private var classloaderURLs: Seq[URL] = urls def classPathURLs: Seq[URL] = classloaderURLs - // def classPath: ClassPath[_] = JavaClassPath fromURLs classPathURLs + def classPath: ClassPath[_] = JavaClassPath fromURLs classPathURLs /** Override to widen to public */ override def addURL(url: URL) = { classloaderURLs :+= url super.addURL(url) } - // def toLongString = urls.mkString("URLClassLoader(\n ", "\n ", "\n)\n") + def toLongString = urls.mkString("URLClassLoader(\n ", "\n ", "\n)\n") } def fromURLs(urls: Seq[URL], parent: ClassLoader = null): URLClassLoader = @@ -161,7 +161,7 @@ object ScalaClassLoader { fromURLs(urls) tryToLoadClass name isDefined /** Finding what jar a clazz or instance came from */ - // def origin(x: Any): Option[URL] = originOfClass(x.getClass) + def origin(x: Any): Option[URL] = originOfClass(x.getClass) def originOfClass(x: Class[_]): Option[URL] = Option(x.getProtectionDomain.getCodeSource) flatMap (x => Option(x.getLocation)) } diff --git a/src/compiler/scala/tools/nsc/util/SimpleTracer.scala b/src/compiler/scala/tools/nsc/util/SimpleTracer.scala index a3b92aa2df..b103ae9cb0 100644 --- a/src/compiler/scala/tools/nsc/util/SimpleTracer.scala +++ b/src/compiler/scala/tools/nsc/util/SimpleTracer.scala @@ -14,6 +14,6 @@ class SimpleTracer(out: PrintStream, enabled: Boolean = true) { if (enabled) out.println(msg+value) value } - // def withOutput(out: PrintStream) = new SimpleTracer(out, enabled) + def withOutput(out: PrintStream) = new SimpleTracer(out, enabled) def when(enabled: Boolean): SimpleTracer = new SimpleTracer(out, enabled) } diff --git a/src/compiler/scala/tools/nsc/util/package.scala b/src/compiler/scala/tools/nsc/util/package.scala index b40d3094f3..e9dcaa8e16 100644 --- a/src/compiler/scala/tools/nsc/util/package.scala +++ b/src/compiler/scala/tools/nsc/util/package.scala @@ -18,15 +18,15 @@ package object util { type HashSet[T >: Null <: AnyRef] = scala.reflect.internal.util.HashSet[T] val HashSet = scala.reflect.internal.util.HashSet - // def onull[T](value: T, orElse: => T): T = if (value == null) orElse else value + def onull[T](value: T, orElse: => T): T = if (value == null) orElse else value /** Apply a function and return the passed value */ def returning[T](x: T)(f: T => Unit): T = { f(x) ; x } /** Frequency counter */ - // def freq[T](xs: Traversable[T]): Map[T, Int] = xs groupBy identity mapValues (_.size) + def freq[T](xs: Traversable[T]): Map[T, Int] = xs groupBy identity mapValues (_.size) - // def freqrank[T](xs: Traversable[(T, Int)]): List[(Int, T)] = xs.toList map (_.swap) sortBy (-_._1) + def freqrank[T](xs: Traversable[(T, Int)]): List[(Int, T)] = xs.toList map (_.swap) sortBy (-_._1) /** Execute code and then wait for all non-daemon Threads * created and begun during its execution to complete. @@ -57,14 +57,14 @@ package object util { /** Given a function and a block of code, evaluates code block, * calls function with milliseconds elapsed, and returns block result. */ - // def millisElapsedTo[T](f: Long => Unit)(body: => T): T = { - // val start = System.currentTimeMillis - // val result = body - // val end = System.currentTimeMillis + def millisElapsedTo[T](f: Long => Unit)(body: => T): T = { + val start = System.currentTimeMillis + val result = body + val end = System.currentTimeMillis - // f(end - start) - // result - // } + f(end - start) + result + } /** Generate a string using a routine that wants to write on a stream. */ def stringFromWriter(writer: PrintWriter => Unit): String = { @@ -96,7 +96,7 @@ package object util { } lazy val trace = new SimpleTracer(System.out) - // lazy val errtrace = new SimpleTracer(System.err) + lazy val errtrace = new SimpleTracer(System.err) @deprecated("Moved to scala.reflect.internal.util.StringOps", "2.10.0") val StringOps = scala.reflect.internal.util.StringOps diff --git a/src/compiler/scala/tools/reflect/ToolBoxFactory.scala b/src/compiler/scala/tools/reflect/ToolBoxFactory.scala index 4fc3fede16..f0c88eadea 100644 --- a/src/compiler/scala/tools/reflect/ToolBoxFactory.scala +++ b/src/compiler/scala/tools/reflect/ToolBoxFactory.scala @@ -392,8 +392,8 @@ abstract class ToolBoxFactory[U <: JavaUniverse](val u: U) { factorySelf => uttree } - // def showAttributed(tree: u.Tree, printTypes: Boolean = true, printIds: Boolean = true, printKinds: Boolean = false): String = - // compiler.showAttributed(importer.importTree(tree), printTypes, printIds, printKinds) + def showAttributed(tree: u.Tree, printTypes: Boolean = true, printIds: Boolean = true, printKinds: Boolean = false): String = + compiler.showAttributed(importer.importTree(tree), printTypes, printIds, printKinds) def parse(code: String): u.Tree = { if (compiler.settings.verbose.value) println("parsing "+code) diff --git a/src/compiler/scala/tools/util/Javap.scala b/src/compiler/scala/tools/util/Javap.scala index b80c65caa6..4d94581cc1 100644 --- a/src/compiler/scala/tools/util/Javap.scala +++ b/src/compiler/scala/tools/util/Javap.scala @@ -107,8 +107,8 @@ object Javap { type FakeEnvironment = AnyRef type FakePrinter = AnyRef - // def apply(path: String): Unit = apply(Seq(path)) - // def apply(args: Seq[String]): Unit = new JavapClass() apply args foreach (_.show()) + def apply(path: String): Unit = apply(Seq(path)) + def apply(args: Seq[String]): Unit = new JavapClass() apply args foreach (_.show()) sealed trait JpResult { type ResultType diff --git a/src/compiler/scala/tools/util/PathResolver.scala b/src/compiler/scala/tools/util/PathResolver.scala index c88a8e13c4..6b0821edf3 100644 --- a/src/compiler/scala/tools/util/PathResolver.scala +++ b/src/compiler/scala/tools/util/PathResolver.scala @@ -22,11 +22,11 @@ object PathResolver { // security exceptions. import AccessControl._ - // def firstNonEmpty(xs: String*) = xs find (_ != "") getOrElse "" + def firstNonEmpty(xs: String*) = xs find (_ != "") getOrElse "" /** Map all classpath elements to absolute paths and reconstruct the classpath. */ - // def makeAbsolute(cp: String) = ClassPath.map(cp, x => Path(x).toAbsolute.path) + def makeAbsolute(cp: String) = ClassPath.map(cp, x => Path(x).toAbsolute.path) /** pretty print class path */ def ppcp(s: String) = split(s) match { @@ -45,7 +45,7 @@ object PathResolver { /** Environment variables which java pays attention to so it * seems we do as well. */ - // def classPathEnv = envOrElse("CLASSPATH", "") + def classPathEnv = envOrElse("CLASSPATH", "") def sourcePathEnv = envOrElse("SOURCEPATH", "") def javaBootClassPath = propOrElse("sun.boot.class.path", searchForBootClasspath) @@ -85,7 +85,7 @@ object PathResolver { def scalaHome = Environment.scalaHome def scalaHomeDir = Directory(scalaHome) - // def scalaHomeExists = scalaHomeDir.isDirectory + def scalaHomeExists = scalaHomeDir.isDirectory def scalaLibDir = Directory(scalaHomeDir / "lib") def scalaClassesDir = Directory(scalaHomeDir / "classes") @@ -135,7 +135,7 @@ object PathResolver { ) } - def fromPathString(path: String, context: JavaContext = DefaultJavaContext): JavaClassPath = { // called from scalap + def fromPathString(path: String, context: JavaContext = DefaultJavaContext): JavaClassPath = { val s = new Settings() s.classpath.value = path new PathResolver(s, context) result @@ -160,7 +160,7 @@ object PathResolver { } } } -import PathResolver.{ Defaults, Environment, ppcp } +import PathResolver.{ Defaults, Environment, firstNonEmpty, ppcp } class PathResolver(settings: Settings, context: JavaContext) { def this(settings: Settings) = this(settings, if (settings.inline.value) new JavaContext else DefaultJavaContext) diff --git a/src/continuations/library/scala/util/continuations/ControlContext.scala b/src/continuations/library/scala/util/continuations/ControlContext.scala index c196809da9..44a5b537b6 100644 --- a/src/continuations/library/scala/util/continuations/ControlContext.scala +++ b/src/continuations/library/scala/util/continuations/ControlContext.scala @@ -183,7 +183,7 @@ final class ControlContext[+A,-B,+C](val fun: (A => B, Exception => B) => C, val // need filter or other functions? - final def flatMapCatch[A1>:A,B1<:B,C1>:C<:B1](pf: PartialFunction[Exception, ControlContext[A1,B1,C1]]): ControlContext[A1,B1,C1] = { // called by codegen from SelectiveCPSTransform + final def flatMapCatch[A1>:A,B1<:B,C1>:C<:B1](pf: PartialFunction[Exception, ControlContext[A1,B1,C1]]): ControlContext[A1,B1,C1] = { if (fun eq null) this else { @@ -209,7 +209,7 @@ final class ControlContext[+A,-B,+C](val fun: (A => B, Exception => B) => C, val } } - final def mapFinally(f: () => Unit): ControlContext[A,B,C] = { // called in code generated by SelectiveCPSTransform + final def mapFinally(f: () => Unit): ControlContext[A,B,C] = { if (fun eq null) { try { f() diff --git a/src/continuations/library/scala/util/continuations/package.scala b/src/continuations/library/scala/util/continuations/package.scala index 573fae85e7..1b50956c93 100644 --- a/src/continuations/library/scala/util/continuations/package.scala +++ b/src/continuations/library/scala/util/continuations/package.scala @@ -166,7 +166,7 @@ package object continuations { throw new NoSuchMethodException("this code has to be compiled with the Scala continuations plugin enabled") } - def shiftUnitR[A,B](x: A): ControlContext[A,B,B] = { // called in code generated by SelectiveCPSTransform + def shiftUnitR[A,B](x: A): ControlContext[A,B,B] = { new ControlContext[A, B, B](null, x) } @@ -176,11 +176,11 @@ package object continuations { * a final result. * @see shift */ - def shiftR[A,B,C](fun: (A => B) => C): ControlContext[A,B,C] = { // called in code generated by SelectiveCPSTransform + def shiftR[A,B,C](fun: (A => B) => C): ControlContext[A,B,C] = { new ControlContext((f:A=>B,g:Exception=>B) => fun(f), null.asInstanceOf[A]) } - def reifyR[A,B,C](ctx: => ControlContext[A,B,C]): ControlContext[A,B,C] = { // called in code generated by SelectiveCPSTransform + def reifyR[A,B,C](ctx: => ControlContext[A,B,C]): ControlContext[A,B,C] = { ctx } diff --git a/src/detach/plugin/scala/tools/detach/Detach.scala b/src/detach/plugin/scala/tools/detach/Detach.scala index 499a97b761..73f6cde58c 100644 --- a/src/detach/plugin/scala/tools/detach/Detach.scala +++ b/src/detach/plugin/scala/tools/detach/Detach.scala @@ -73,7 +73,7 @@ abstract class Detach extends PluginComponent } private val serializableAnnotationInfo = - AnnotationInfo(requiredClass[scala.annotation.serializable].tpe, List(), List()) + AnnotationInfo(SerializableAttr.tpe, List(), List()) /* private val throwsAnnotationInfo = { val RemoteExceptionClass = definitions.getClass("java.rmi.RemoteException") diff --git a/src/partest/scala/tools/partest/CompilerTest.scala b/src/partest/scala/tools/partest/CompilerTest.scala index bb0732dcc6..d73d99bc89 100644 --- a/src/partest/scala/tools/partest/CompilerTest.scala +++ b/src/partest/scala/tools/partest/CompilerTest.scala @@ -21,7 +21,7 @@ abstract class CompilerTest extends DirectTest { lazy val global: Global = newCompiler() lazy val units = compilationUnits(global)(sources: _ *) import global._ - import definitions.{ compilerTypeFromTag } + import definitions._ override def extraSettings = "-usejavacp -d " + testOutput.path @@ -32,6 +32,7 @@ abstract class CompilerTest extends DirectTest { def sources: List[String] = List(code) // Utility functions + class MkType(sym: Symbol) { def apply[M](implicit t: ru.TypeTag[M]): Type = if (sym eq NoSymbol) NoType diff --git a/src/partest/scala/tools/partest/SecurityTest.scala b/src/partest/scala/tools/partest/SecurityTest.scala index 8d57e7e38d..2d6f61d0b1 100644 --- a/src/partest/scala/tools/partest/SecurityTest.scala +++ b/src/partest/scala/tools/partest/SecurityTest.scala @@ -11,8 +11,8 @@ import java.util._ abstract class SecurityTest extends App { def throwIt(x: Any) = throw new AccessControlException("" + x) - // def readPerm(p: PropertyPermission) = p.getActions contains "read" - // def writePerm(p: PropertyPermission) = p.getActions contains "write" + def readPerm(p: PropertyPermission) = p.getActions contains "read" + def writePerm(p: PropertyPermission) = p.getActions contains "write" def propertyCheck(p: PropertyPermission): Unit = throwIt(p) def check(perm: Permission): Unit = perm match { @@ -20,13 +20,13 @@ abstract class SecurityTest extends App { case _ => () } - // lazy val sm = new SecurityManager { - // // these two are the choke points for all permissions checks - // override def checkPermission(perm: Permission): Unit = check(perm) - // override def checkPermission(perm: Permission, context: Object): Unit = check(perm) - // } - // def securityOn(): Boolean = { - // try { System.setSecurityManager(sm) ; true } - // catch { case _: SecurityException => false } - // } + lazy val sm = new SecurityManager { + // these two are the choke points for all permissions checks + override def checkPermission(perm: Permission): Unit = check(perm) + override def checkPermission(perm: Permission, context: Object): Unit = check(perm) + } + def securityOn(): Boolean = { + try { System.setSecurityManager(sm) ; true } + catch { case _: SecurityException => false } + } } diff --git a/src/partest/scala/tools/partest/TestUtil.scala b/src/partest/scala/tools/partest/TestUtil.scala index bd5dc39498..9bfd444180 100644 --- a/src/partest/scala/tools/partest/TestUtil.scala +++ b/src/partest/scala/tools/partest/TestUtil.scala @@ -24,13 +24,13 @@ trait TestUtil { } def nanos(body: => Unit): Long = alsoNanos(body)._1 - // def verifySpeed(body1: => Unit, body2: => Unit, acceptableMultiple: Double) = { - // val t1 = nanos(body1).toDouble - // val t2 = nanos(body2).toDouble - // val mult = if (t1 > t2) t1 / t2 else t2 / t1 + def verifySpeed(body1: => Unit, body2: => Unit, acceptableMultiple: Double) = { + val t1 = nanos(body1).toDouble + val t2 = nanos(body2).toDouble + val mult = if (t1 > t2) t1 / t2 else t2 / t1 - // assert(mult <= acceptableMultiple, "Performance difference too great: multiple = " + mult) - // } + assert(mult <= acceptableMultiple, "Performance difference too great: multiple = " + mult) + } def intercept[T <: Exception : ClassTag](code: => Unit): Unit = try { @@ -41,6 +41,6 @@ trait TestUtil { } } -// Used in tests. object TestUtil extends TestUtil { + } diff --git a/src/partest/scala/tools/partest/instrumented/Instrumentation.scala b/src/partest/scala/tools/partest/instrumented/Instrumentation.scala index 18dd740208..8a284b313b 100644 --- a/src/partest/scala/tools/partest/instrumented/Instrumentation.scala +++ b/src/partest/scala/tools/partest/instrumented/Instrumentation.scala @@ -78,7 +78,6 @@ object Instrumentation { !t.className.startsWith("scala/util/DynamicVariable") } - // Used in tests. def printStatistics(stats: Statistics = getStatistics, filter: MethodCallTrace => Boolean = standardFilter): Unit = { val stats = getStatistics println("Method call statistics:") diff --git a/src/partest/scala/tools/partest/nest/ConsoleFileManager.scala b/src/partest/scala/tools/partest/nest/ConsoleFileManager.scala index d8ae4b2403..75aed449a8 100644 --- a/src/partest/scala/tools/partest/nest/ConsoleFileManager.scala +++ b/src/partest/scala/tools/partest/nest/ConsoleFileManager.scala @@ -79,7 +79,7 @@ class ConsoleFileManager extends FileManager { testClassesDir = Path(testClasses.get).toCanonical.toDirectory NestUI.verbose("Running with classes in "+testClassesDir) - // latestFile = testClassesDir.parent / "bin" + latestFile = testClassesDir.parent / "bin" latestLibFile = testClassesDir / "library" latestActorsFile = testClassesDir / "library" / "actors" latestReflectFile = testClassesDir / "reflect" @@ -90,7 +90,7 @@ class ConsoleFileManager extends FileManager { else if (testBuild.isDefined) { val dir = Path(testBuild.get) NestUI.verbose("Running on "+dir) - // latestFile = dir / "bin" + latestFile = dir / "bin" latestLibFile = dir / "lib/scala-library.jar" latestActorsFile = dir / "lib/scala-actors.jar" latestReflectFile = dir / "lib/scala-reflect.jar" @@ -101,7 +101,7 @@ class ConsoleFileManager extends FileManager { else { def setupQuick() { NestUI.verbose("Running build/quick") - // latestFile = prefixFile("build/quick/bin") + latestFile = prefixFile("build/quick/bin") latestLibFile = prefixFile("build/quick/classes/library") latestActorsFile = prefixFile("build/quick/classes/library/actors") latestReflectFile = prefixFile("build/quick/classes/reflect") @@ -112,7 +112,7 @@ class ConsoleFileManager extends FileManager { def setupInst() { NestUI.verbose("Running dist (installed)") val p = testParent.getParentFile - // latestFile = prefixFileWith(p, "bin") + latestFile = prefixFileWith(p, "bin") latestLibFile = prefixFileWith(p, "lib/scala-library.jar") latestActorsFile = prefixFileWith(p, "lib/scala-actors.jar") latestReflectFile = prefixFileWith(p, "lib/scala-reflect.jar") @@ -122,7 +122,7 @@ class ConsoleFileManager extends FileManager { def setupDist() { NestUI.verbose("Running dists/latest") - // latestFile = prefixFile("dists/latest/bin") + latestFile = prefixFile("dists/latest/bin") latestLibFile = prefixFile("dists/latest/lib/scala-library.jar") latestActorsFile = prefixFile("dists/latest/lib/scala-actors.jar") latestReflectFile = prefixFile("dists/latest/lib/scala-reflect.jar") @@ -132,7 +132,7 @@ class ConsoleFileManager extends FileManager { def setupPack() { NestUI.verbose("Running build/pack") - // latestFile = prefixFile("build/pack/bin") + latestFile = prefixFile("build/pack/bin") latestLibFile = prefixFile("build/pack/lib/scala-library.jar") latestActorsFile = prefixFile("build/pack/lib/scala-actors.jar") latestReflectFile = prefixFile("build/pack/lib/scala-reflect.jar") @@ -175,7 +175,7 @@ class ConsoleFileManager extends FileManager { var LATEST_PARTEST: String = "" var LATEST_ACTORS: String = "" - // var latestFile: File = _ + var latestFile: File = _ var latestLibFile: File = _ var latestActorsFile: File = _ var latestReflectFile: File = _ @@ -187,7 +187,7 @@ class ConsoleFileManager extends FileManager { // initialize above fields findLatest() - // var testFiles: List[io.Path] = Nil + var testFiles: List[io.Path] = Nil def getFiles(kind: String, cond: Path => Boolean): List[File] = { def ignoreDir(p: Path) = List("svn", "obj") exists (p hasExtension _) @@ -197,7 +197,9 @@ class ConsoleFileManager extends FileManager { if (dir.isDirectory) NestUI.verbose("look in %s for tests" format dir) else NestUI.failure("Directory '%s' not found" format dir) - val files = dir.list filterNot ignoreDir filter cond toList + val files = + if (testFiles.nonEmpty) testFiles filter (_.parent isSame dir) + else dir.list filterNot ignoreDir filter cond toList ( if (failed) files filter (x => logFileExists(x, kind)) else files ) map (_.jfile) } diff --git a/src/partest/scala/tools/partest/nest/ConsoleRunner.scala b/src/partest/scala/tools/partest/nest/ConsoleRunner.scala index 35bce01684..d23ee81e4d 100644 --- a/src/partest/scala/tools/partest/nest/ConsoleRunner.scala +++ b/src/partest/scala/tools/partest/nest/ConsoleRunner.scala @@ -51,7 +51,7 @@ class ConsoleRunner extends DirectRunner { private val testSetArgs = testSets map ("--" + _.kind) private val testSetArgMap = testSetArgs zip testSets toMap - // def denotesTestSet(arg: String) = testSetArgs contains arg + def denotesTestSet(arg: String) = testSetArgs contains arg private def printVersion() { NestUI outline (versionMsg + "\n") } diff --git a/src/partest/scala/tools/partest/nest/FileManager.scala b/src/partest/scala/tools/partest/nest/FileManager.scala index 9e2a34a34e..21fd314552 100644 --- a/src/partest/scala/tools/partest/nest/FileManager.scala +++ b/src/partest/scala/tools/partest/nest/FileManager.scala @@ -74,15 +74,15 @@ trait FileManager extends FileUtil { var timeout = PartestDefaults.timeout // how can 15 minutes not be enough? What are you doing, run/lisp.scala? // You complete in 11 seconds on my machine. - // var oneTestTimeout = 60 * 60 * 1000 + var oneTestTimeout = 60 * 60 * 1000 /** Only when --debug is given. */ lazy val testTimings = new mutable.HashMap[String, Long] def recordTestTiming(name: String, milliseconds: Long) = synchronized { testTimings(name) = milliseconds } - // def showTestTimings() { - // testTimings.toList sortBy (-_._2) foreach { case (k, v) => println("%s: %s".format(k, v)) } - // } + def showTestTimings() { + testTimings.toList sortBy (-_._2) foreach { case (k, v) => println("%s: %s".format(k, v)) } + } def getLogFile(dir: File, fileBase: String, kind: String): File = new File(dir, fileBase + "-" + kind + ".log") diff --git a/src/partest/scala/tools/partest/nest/NestUI.scala b/src/partest/scala/tools/partest/nest/NestUI.scala index 00aa27bd34..70db6d0ed1 100644 --- a/src/partest/scala/tools/partest/nest/NestUI.scala +++ b/src/partest/scala/tools/partest/nest/NestUI.scala @@ -54,9 +54,9 @@ object NestUI { } def warning(msg: String) = print(_warning + msg + _default) - // def warning(msg: String, wr: PrintWriter) = synchronized { - // wr.print(_warning + msg + _default) - // } + def warning(msg: String, wr: PrintWriter) = synchronized { + wr.print(_warning + msg + _default) + } def normal(msg: String) = print(_default + msg) def normal(msg: String, wr: PrintWriter) = synchronized { @@ -104,7 +104,7 @@ object NestUI { } var _verbose = false - // var _debug = false + var _debug = false def verbose(msg: String) { if (_verbose) { @@ -112,10 +112,10 @@ object NestUI { println(msg) } } - // def debug(msg: String) { - // if (isPartestDebug) { - // outline("debug: ") - // println(msg) - // } - // } + def debug(msg: String) { + if (isPartestDebug) { + outline("debug: ") + println(msg) + } + } } diff --git a/src/partest/scala/tools/partest/nest/ReflectiveRunner.scala b/src/partest/scala/tools/partest/nest/ReflectiveRunner.scala index 4b0ed1f82a..5cb8589d66 100644 --- a/src/partest/scala/tools/partest/nest/ReflectiveRunner.scala +++ b/src/partest/scala/tools/partest/nest/ReflectiveRunner.scala @@ -3,6 +3,8 @@ * @author Philipp Haller */ +// $Id$ + package scala.tools.partest package nest @@ -10,6 +12,7 @@ import scala.tools.nsc.Properties.{ setProp, propOrEmpty } import scala.tools.nsc.util.ClassPath import scala.tools.nsc.io import io.Path +import RunnerUtils._ import java.net.URLClassLoader /* This class is used to load an instance of DirectRunner using @@ -25,12 +28,6 @@ class ReflectiveRunner { // was used to start the runner. val sepRunnerClassName = "scala.tools.partest.nest.ConsoleRunner" - private def searchPath(option: String, as: List[String]): Option[String] = as match { - case `option` :: r :: _ => Some(r) - case _ :: rest => searchPath(option, rest) - case Nil => None - } - def main(args: String) { val argList = (args.split("\\s")).toList diff --git a/src/partest/scala/tools/partest/nest/RunnerManager.scala b/src/partest/scala/tools/partest/nest/RunnerManager.scala index 2651088018..548c5abbd9 100644 --- a/src/partest/scala/tools/partest/nest/RunnerManager.scala +++ b/src/partest/scala/tools/partest/nest/RunnerManager.scala @@ -291,10 +291,10 @@ class RunnerManager(kind: String, val fileManager: FileManager, params: TestRunP ((swr, wr)) } - // def fail(what: Any) = { - // NestUI.verbose("scalac: compilation of "+what+" failed\n") - // false - // } + def fail(what: Any) = { + NestUI.verbose("scalac: compilation of "+what+" failed\n") + false + } def diffCheck(testFile: File, diff: String) = { testDiff = diff testDiff == "" diff --git a/src/partest/scala/tools/partest/nest/RunnerUtils.scala b/src/partest/scala/tools/partest/nest/RunnerUtils.scala index 8a47989b7c..6707a9338a 100644 --- a/src/partest/scala/tools/partest/nest/RunnerUtils.scala +++ b/src/partest/scala/tools/partest/nest/RunnerUtils.scala @@ -1,29 +1,29 @@ -// /* NEST (New Scala Test) -// * Copyright 2007-2013 LAMP/EPFL -// * @author Philipp Haller -// */ +/* NEST (New Scala Test) + * Copyright 2007-2013 LAMP/EPFL + * @author Philipp Haller + */ -// // $Id$ +// $Id$ -// package scala.tools.partest -// package nest +package scala.tools.partest +package nest -// object RunnerUtils { -// def splitArgs(str: String) = str split "\\s" filterNot (_ == "") toList +object RunnerUtils { + def splitArgs(str: String) = str split "\\s" filterNot (_ == "") toList -// def searchPath(option: String, as: List[String]): Option[String] = as match { -// case `option` :: r :: _ => Some(r) -// case _ :: rest => searchPath(option, rest) -// case Nil => None -// } + def searchPath(option: String, as: List[String]): Option[String] = as match { + case `option` :: r :: _ => Some(r) + case _ :: rest => searchPath(option, rest) + case Nil => None + } -// def searchAndRemovePath(option: String, as: List[String]) = (as indexOf option) match { -// case -1 => (None, as) -// case idx => (Some(as(idx + 1)), (as take idx) ::: (as drop (idx + 2))) -// } + def searchAndRemovePath(option: String, as: List[String]) = (as indexOf option) match { + case -1 => (None, as) + case idx => (Some(as(idx + 1)), (as take idx) ::: (as drop (idx + 2))) + } -// def searchAndRemoveOption(option: String, as: List[String]) = (as indexOf option) match { -// case -1 => (false, as) -// case idx => (true, (as take idx) ::: (as drop (idx + 1))) -// } -// } + def searchAndRemoveOption(option: String, as: List[String]) = (as indexOf option) match { + case -1 => (false, as) + case idx => (true, (as take idx) ::: (as drop (idx + 1))) + } +} diff --git a/src/partest/scala/tools/partest/package.scala b/src/partest/scala/tools/partest/package.scala index fa0c88a2b2..58cc7d5b0b 100644 --- a/src/partest/scala/tools/partest/package.scala +++ b/src/partest/scala/tools/partest/package.scala @@ -13,9 +13,9 @@ import java.util.concurrent.Callable package partest { class TestState { - // def isOk = this eq TestState.Ok - // def isFail = this eq TestState.Fail - // def isTimeout = this eq TestState.Timeout + def isOk = this eq TestState.Ok + def isFail = this eq TestState.Fail + def isTimeout = this eq TestState.Timeout } object TestState { val Ok = new TestState @@ -43,7 +43,7 @@ package object partest { def callable[T](body: => T): Callable[T] = new Callable[T] { override def call() = body } - // def path2String(path: String) = file2String(new JFile(path)) + def path2String(path: String) = file2String(new JFile(path)) def file2String(f: JFile) = try SFile(f).slurp(scala.io.Codec.UTF8) catch { case _: FileNotFoundException => "" } diff --git a/src/partest/scala/tools/partest/utils/PrintMgr.scala b/src/partest/scala/tools/partest/utils/PrintMgr.scala index 56fdcda2ea..d25be87c1e 100644 --- a/src/partest/scala/tools/partest/utils/PrintMgr.scala +++ b/src/partest/scala/tools/partest/utils/PrintMgr.scala @@ -1,52 +1,52 @@ -// /* __ *\ -// ** ________ ___ / / ___ Scala Parallel Testing ** -// ** / __/ __// _ | / / / _ | (c) 2007-2013, LAMP/EPFL ** -// ** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -// ** /____/\___/_/ |_/____/_/ | | ** -// ** |/ ** -// \* */ - -// // $Id$ - -// package scala.tools.partest -// package utils - -// /** -// * @author Thomas Hofer -// */ -// object PrintMgr { - -// val NONE = 0 -// val SOME = 1 -// val MANY = 2 - -// var outline = "" -// var success = "" -// var failure = "" -// var warning = "" -// var default = "" - -// def initialization(number: Int) = number match { -// case MANY => -// outline = Console.BOLD + Console.BLACK -// success = Console.BOLD + Console.GREEN -// failure = Console.BOLD + Console.RED -// warning = Console.BOLD + Console.YELLOW -// default = Console.RESET -// case SOME => -// outline = Console.BOLD + Console.BLACK -// success = Console.RESET -// failure = Console.BOLD + Console.BLACK -// warning = Console.BOLD + Console.BLACK -// default = Console.RESET -// case _ => -// } - -// def printOutline(msg: String) = print(outline + msg + default) - -// def printSuccess(msg: String) = print(success + msg + default) - -// def printFailure(msg: String) = print(failure + msg + default) - -// def printWarning(msg: String) = print(warning + msg + default) -// } +/* __ *\ +** ________ ___ / / ___ Scala Parallel Testing ** +** / __/ __// _ | / / / _ | (c) 2007-2013, LAMP/EPFL ** +** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** +** /____/\___/_/ |_/____/_/ | | ** +** |/ ** +\* */ + +// $Id$ + +package scala.tools.partest +package utils + +/** + * @author Thomas Hofer + */ +object PrintMgr { + + val NONE = 0 + val SOME = 1 + val MANY = 2 + + var outline = "" + var success = "" + var failure = "" + var warning = "" + var default = "" + + def initialization(number: Int) = number match { + case MANY => + outline = Console.BOLD + Console.BLACK + success = Console.BOLD + Console.GREEN + failure = Console.BOLD + Console.RED + warning = Console.BOLD + Console.YELLOW + default = Console.RESET + case SOME => + outline = Console.BOLD + Console.BLACK + success = Console.RESET + failure = Console.BOLD + Console.BLACK + warning = Console.BOLD + Console.BLACK + default = Console.RESET + case _ => + } + + def printOutline(msg: String) = print(outline + msg + default) + + def printSuccess(msg: String) = print(success + msg + default) + + def printFailure(msg: String) = print(failure + msg + default) + + def printWarning(msg: String) = print(warning + msg + default) +} diff --git a/src/reflect/scala/reflect/internal/AnnotationInfos.scala b/src/reflect/scala/reflect/internal/AnnotationInfos.scala index 82be2fa04a..7c12b5979d 100644 --- a/src/reflect/scala/reflect/internal/AnnotationInfos.scala +++ b/src/reflect/scala/reflect/internal/AnnotationInfos.scala @@ -6,6 +6,7 @@ package scala.reflect package internal +import util._ import pickling.ByteCodecs import scala.annotation.tailrec import scala.collection.immutable.ListMap @@ -288,8 +289,8 @@ trait AnnotationInfos extends api.Annotations { self: SymbolTable => def refsSymbol(sym: Symbol) = hasArgWhich(_.symbol == sym) /** Change all ident's with Symbol "from" to instead use symbol "to" */ - // def substIdentSyms(from: Symbol, to: Symbol) = - // AnnotationInfo(atp, args map (_ substituteSymbols (List(from), List(to))), assocs) setPos pos + def substIdentSyms(from: Symbol, to: Symbol) = + AnnotationInfo(atp, args map (_ substituteSymbols (List(from), List(to))), assocs) setPos pos def stringArg(index: Int) = constantAtIndex(index) map (_.stringValue) def intArg(index: Int) = constantAtIndex(index) map (_.intValue) @@ -324,14 +325,14 @@ trait AnnotationInfos extends api.Annotations { self: SymbolTable => implicit val AnnotationTag = ClassTag[AnnotationInfo](classOf[AnnotationInfo]) object UnmappableAnnotation extends CompleteAnnotationInfo(NoType, Nil, Nil) - + /** Extracts symbol of thrown exception from AnnotationInfo. - * + * * Supports both “old-style” `@throws(classOf[Exception])` * as well as “new-stye” `@throws[Exception]("cause")` annotations. */ object ThrownException { - def unapply(ann: AnnotationInfo): Option[Symbol] = + def unapply(ann: AnnotationInfo): Option[Symbol] = ann match { case AnnotationInfo(tpe, _, _) if tpe.typeSymbol != ThrowsClass => None diff --git a/src/reflect/scala/reflect/internal/BuildUtils.scala b/src/reflect/scala/reflect/internal/BuildUtils.scala index 9da6ad652a..9f41f0336e 100644 --- a/src/reflect/scala/reflect/internal/BuildUtils.scala +++ b/src/reflect/scala/reflect/internal/BuildUtils.scala @@ -1,6 +1,8 @@ package scala.reflect package internal +import Flags._ + trait BuildUtils { self: SymbolTable => class BuildImpl extends BuildApi { diff --git a/src/reflect/scala/reflect/internal/ClassfileConstants.scala b/src/reflect/scala/reflect/internal/ClassfileConstants.scala index c8af1732a9..b1462e9709 100644 --- a/src/reflect/scala/reflect/internal/ClassfileConstants.scala +++ b/src/reflect/scala/reflect/internal/ClassfileConstants.scala @@ -381,10 +381,10 @@ object ClassfileConstants { def toScalaClassFlags(flags: Int): Long = FlagTranslation classFlags flags def toScalaFieldFlags(flags: Int): Long = FlagTranslation fieldFlags flags - // @deprecated("Use another method in this object", "2.10.0") - // def toScalaFlags(flags: Int, isClass: Boolean = false, isField: Boolean = false): Long = ( - // if (isClass) toScalaClassFlags(flags) - // else if (isField) toScalaFieldFlags(flags) - // else toScalaMethodFlags(flags) - // ) + @deprecated("Use another method in this object", "2.10.0") + def toScalaFlags(flags: Int, isClass: Boolean = false, isField: Boolean = false): Long = ( + if (isClass) toScalaClassFlags(flags) + else if (isField) toScalaFieldFlags(flags) + else toScalaMethodFlags(flags) + ) } diff --git a/src/reflect/scala/reflect/internal/Definitions.scala b/src/reflect/scala/reflect/internal/Definitions.scala index 8ecb189c27..1f751cde38 100644 --- a/src/reflect/scala/reflect/internal/Definitions.scala +++ b/src/reflect/scala/reflect/internal/Definitions.scala @@ -9,6 +9,7 @@ package internal import scala.annotation.{ switch, meta } import scala.collection.{ mutable, immutable } import Flags._ +import PartialFunction._ import scala.reflect.api.{Universe => ApiUniverse} trait Definitions extends api.StandardDefinitions { @@ -148,7 +149,7 @@ trait Definitions extends api.StandardDefinitions { FloatClass, DoubleClass ) - // def ScalaValueClassCompanions: List[Symbol] = ScalaValueClasses map (_.companionSymbol) + def ScalaValueClassCompanions: List[Symbol] = ScalaValueClasses map (_.companionSymbol) def ScalaPrimitiveValueClasses: List[ClassSymbol] = ScalaValueClasses } @@ -157,7 +158,7 @@ trait Definitions extends api.StandardDefinitions { def isDefinitionsInitialized = isInitialized // symbols related to packages - // var emptypackagescope: Scope = null //debug + var emptypackagescope: Scope = null //debug @deprecated("Moved to rootMirror.RootPackage", "2.10.0") val RootPackage: ModuleSymbol = rootMirror.RootPackage @@ -180,7 +181,7 @@ trait Definitions extends api.StandardDefinitions { lazy val RuntimePackage = getRequiredPackage("scala.runtime") lazy val RuntimePackageClass = RuntimePackage.moduleClass.asClass - // lazy val JavaLangEnumClass = requiredClass[java.lang.Enum[_]] + lazy val JavaLangEnumClass = requiredClass[java.lang.Enum[_]] // convenient one-argument parameter lists lazy val anyparam = List(AnyClass.tpe) @@ -261,7 +262,7 @@ trait Definitions extends api.StandardDefinitions { || tp =:= AnyRefClass.tpe ) /** Does this type have a parent which is none of Any, AnyVal, or AnyRef? */ - // def hasNonTrivialParent(tp: Type) = tp.parents exists (t => !isTrivialTopType(tp)) + def hasNonTrivialParent(tp: Type) = tp.parents exists (t => !isTrivialTopType(tp)) private def fixupAsAnyTrait(tpe: Type): Type = tpe match { case ClassInfoType(parents, decls, clazz) => @@ -357,7 +358,7 @@ trait Definitions extends api.StandardDefinitions { lazy val UnqualifiedOwners = UnqualifiedModules.toSet ++ UnqualifiedModules.map(_.moduleClass) lazy val PredefModule = requiredModule[scala.Predef.type] - // lazy val PredefModuleClass = PredefModule.moduleClass + lazy val PredefModuleClass = PredefModule.moduleClass def Predef_classOf = getMemberMethod(PredefModule, nme.classOf) // def Predef_identity = getMemberMethod(PredefModule, nme.identity) @@ -381,7 +382,7 @@ trait Definitions extends api.StandardDefinitions { lazy val SpecializableModule = requiredModule[Specializable] lazy val GroupOfSpecializable = getMemberClass(SpecializableModule, tpnme.Group) - // lazy val ConsoleModule = requiredModule[scala.Console.type] + lazy val ConsoleModule = requiredModule[scala.Console.type] lazy val ScalaRunTimeModule = requiredModule[scala.runtime.ScalaRunTime.type] lazy val SymbolModule = requiredModule[scala.Symbol.type] lazy val Symbol_apply = getMemberMethod(SymbolModule, nme.apply) @@ -391,9 +392,9 @@ trait Definitions extends api.StandardDefinitions { def arrayLengthMethod = getMemberMethod(ScalaRunTimeModule, nme.array_length) def arrayCloneMethod = getMemberMethod(ScalaRunTimeModule, nme.array_clone) def ensureAccessibleMethod = getMemberMethod(ScalaRunTimeModule, nme.ensureAccessible) - // def scalaRuntimeSameElements = getMemberMethod(ScalaRunTimeModule, nme.sameElements) + def scalaRuntimeSameElements = getMemberMethod(ScalaRunTimeModule, nme.sameElements) def arrayClassMethod = getMemberMethod(ScalaRunTimeModule, nme.arrayClass) - // def arrayElementClassMethod = getMemberMethod(ScalaRunTimeModule, nme.arrayElementClass) + def arrayElementClassMethod = getMemberMethod(ScalaRunTimeModule, nme.arrayElementClass) // classes with special meanings lazy val StringAddClass = requiredClass[scala.runtime.StringAdd] @@ -456,10 +457,10 @@ trait Definitions extends api.StandardDefinitions { case _ => tp } - // def isPrimitiveArray(tp: Type) = tp match { - // case TypeRef(_, ArrayClass, arg :: Nil) => isPrimitiveValueClass(arg.typeSymbol) - // case _ => false - // } + def isPrimitiveArray(tp: Type) = tp match { + case TypeRef(_, ArrayClass, arg :: Nil) => isPrimitiveValueClass(arg.typeSymbol) + case _ => false + } def isReferenceArray(tp: Type) = tp match { case TypeRef(_, ArrayClass, arg :: Nil) => arg <:< AnyRefClass.tpe case _ => false @@ -469,11 +470,11 @@ trait Definitions extends api.StandardDefinitions { case _ => false } - // lazy val MatchingStrategyClass = getRequiredClass("scala.MatchingStrategy") + lazy val MatchingStrategyClass = getRequiredClass("scala.MatchingStrategy") // collections classes lazy val ConsClass = requiredClass[scala.collection.immutable.::[_]] - // lazy val IterableClass = requiredClass[scala.collection.Iterable[_]] + lazy val IterableClass = requiredClass[scala.collection.Iterable[_]] lazy val IteratorClass = requiredClass[scala.collection.Iterator[_]] lazy val ListClass = requiredClass[scala.collection.immutable.List[_]] lazy val SeqClass = requiredClass[scala.collection.Seq[_]] @@ -484,8 +485,8 @@ trait Definitions extends api.StandardDefinitions { lazy val List_apply = getMemberMethod(ListModule, nme.apply) lazy val NilModule = requiredModule[scala.collection.immutable.Nil.type] lazy val SeqModule = requiredModule[scala.collection.Seq.type] - // lazy val IteratorModule = requiredModule[scala.collection.Iterator.type] - // lazy val Iterator_apply = getMemberMethod(IteratorModule, nme.apply) + lazy val IteratorModule = requiredModule[scala.collection.Iterator.type] + lazy val Iterator_apply = getMemberMethod(IteratorModule, nme.apply) // arrays and their members lazy val ArrayModule = requiredModule[scala.Array.type] @@ -500,9 +501,9 @@ trait Definitions extends api.StandardDefinitions { // reflection / structural types lazy val SoftReferenceClass = requiredClass[java.lang.ref.SoftReference[_]] - // lazy val WeakReferenceClass = requiredClass[java.lang.ref.WeakReference[_]] + lazy val WeakReferenceClass = requiredClass[java.lang.ref.WeakReference[_]] lazy val MethodClass = getClassByName(sn.MethodAsObject) - // def methodClass_setAccessible = getMemberMethod(MethodClass, nme.setAccessible) + def methodClass_setAccessible = getMemberMethod(MethodClass, nme.setAccessible) lazy val EmptyMethodCacheClass = requiredClass[scala.runtime.EmptyMethodCache] lazy val MethodCacheClass = requiredClass[scala.runtime.MethodCache] def methodCache_find = getMemberMethod(MethodCacheClass, nme.find_) @@ -526,7 +527,7 @@ trait Definitions extends api.StandardDefinitions { lazy val ExprClass = if (ExprsClass != NoSymbol) getMemberClass(ExprsClass, tpnme.Expr) else NoSymbol def ExprSplice = if (ExprsClass != NoSymbol) getMemberMethod(ExprClass, nme.splice) else NoSymbol def ExprValue = if (ExprsClass != NoSymbol) getMemberMethod(ExprClass, nme.value) else NoSymbol - // lazy val ExprModule = if (ExprsClass != NoSymbol) getMemberModule(ExprsClass, nme.Expr) else NoSymbol + lazy val ExprModule = if (ExprsClass != NoSymbol) getMemberModule(ExprsClass, nme.Expr) else NoSymbol lazy val ClassTagModule = requiredModule[scala.reflect.ClassTag[_]] lazy val ClassTagClass = requiredClass[scala.reflect.ClassTag[_]] @@ -552,7 +553,7 @@ trait Definitions extends api.StandardDefinitions { def MacroContextPrefix = if (MacroContextClass != NoSymbol) getMemberMethod(MacroContextClass, nme.prefix) else NoSymbol def MacroContextPrefixType = if (MacroContextClass != NoSymbol) getTypeMember(MacroContextClass, tpnme.PrefixType) else NoSymbol def MacroContextUniverse = if (MacroContextClass != NoSymbol) getMemberMethod(MacroContextClass, nme.universe) else NoSymbol - // def MacroContextMirror = if (MacroContextClass != NoSymbol) getMemberMethod(MacroContextClass, nme.mirror) else NoSymbol + def MacroContextMirror = if (MacroContextClass != NoSymbol) getMemberMethod(MacroContextClass, nme.mirror) else NoSymbol lazy val MacroImplAnnotation = requiredClass[scala.reflect.macros.internal.macroImpl] lazy val StringContextClass = requiredClass[scala.StringContext] @@ -584,12 +585,12 @@ trait Definitions extends api.StandardDefinitions { // The given class has a main method. def hasJavaMainMethod(sym: Symbol): Boolean = (sym.tpe member nme.main).alternatives exists isJavaMainMethod - // def hasJavaMainMethod(path: String): Boolean = - // hasJavaMainMethod(getModuleIfDefined(path)) + def hasJavaMainMethod(path: String): Boolean = + hasJavaMainMethod(getModuleIfDefined(path)) - // def isOptionType(tp: Type) = tp.typeSymbol isSubClass OptionClass - // def isSomeType(tp: Type) = tp.typeSymbol eq SomeClass - // def isNoneType(tp: Type) = tp.typeSymbol eq NoneModule + def isOptionType(tp: Type) = tp.typeSymbol isSubClass OptionClass + def isSomeType(tp: Type) = tp.typeSymbol eq SomeClass + def isNoneType(tp: Type) = tp.typeSymbol eq NoneModule // Product, Tuple, Function, AbstractFunction private def mkArityArray(name: String, arity: Int, countFrom: Int): Array[ClassSymbol] = { @@ -612,7 +613,7 @@ trait Definitions extends api.StandardDefinitions { /** Creators for TupleN, ProductN, FunctionN. */ def tupleType(elems: List[Type]) = aritySpecificType(TupleClass, elems) - // def productType(elems: List[Type]) = aritySpecificType(ProductClass, elems) + def productType(elems: List[Type]) = aritySpecificType(ProductClass, elems) def functionType(formals: List[Type], restpe: Type) = aritySpecificType(FunctionClass, formals, restpe) def abstractFunctionType(formals: List[Type], restpe: Type) = aritySpecificType(AbstractFunctionClass, formals, restpe) @@ -631,10 +632,10 @@ trait Definitions extends api.StandardDefinitions { else nme.genericWrapArray } - // @deprecated("Use isTupleType", "2.10.0") - // def isTupleTypeOrSubtype(tp: Type) = isTupleType(tp) + @deprecated("Use isTupleType", "2.10.0") + def isTupleTypeOrSubtype(tp: Type) = isTupleType(tp) - // def tupleField(n: Int, j: Int) = getMemberValue(TupleClass(n), nme.productAccessorName(j)) + def tupleField(n: Int, j: Int) = getMemberValue(TupleClass(n), nme.productAccessorName(j)) // NOTE: returns true for NoSymbol since it's included in the TupleClass array -- is this intensional? def isTupleSymbol(sym: Symbol) = TupleClass contains unspecializedSymbol(sym) def isProductNClass(sym: Symbol) = ProductClass contains sym @@ -683,10 +684,10 @@ trait Definitions extends api.StandardDefinitions { // def Product_productElementName = getMemberMethod(ProductRootClass, nme.productElementName) def productProj(z:Symbol, j: Int): TermSymbol = getMemberValue(z, nme.productAccessorName(j)) - // def productProj(n: Int, j: Int): TermSymbol = productProj(ProductClass(n), j) + def productProj(n: Int, j: Int): TermSymbol = productProj(ProductClass(n), j) /** returns true if this type is exactly ProductN[T1,...,Tn], not some subclass */ - // def isExactProductType(tp: Type): Boolean = isProductNClass(tp.typeSymbol) + def isExactProductType(tp: Type): Boolean = isProductNClass(tp.typeSymbol) /** if tpe <: ProductN[T1,...,TN], returns List(T1,...,TN) else Nil */ def getProductArgs(tpe: Type): List[Type] = tpe.baseClasses find isProductNClass match { @@ -704,7 +705,7 @@ trait Definitions extends api.StandardDefinitions { case tp => tp } - // def functionApply(n: Int) = getMemberMethod(FunctionClass(n), nme.apply) + def functionApply(n: Int) = getMemberMethod(FunctionClass(n), nme.apply) def abstractFunctionForFunctionType(tp: Type) = if (isFunctionType(tp)) abstractFunctionType(tp.typeArgs.init, tp.typeArgs.last) @@ -723,7 +724,7 @@ trait Definitions extends api.StandardDefinitions { (sym eq PartialFunctionClass) || (sym eq AbstractPartialFunctionClass) } - // def isSeqType(tp: Type) = elementType(SeqClass, tp.normalize) != NoType + def isSeqType(tp: Type) = elementType(SeqClass, tp.normalize) != NoType def elementType(container: Symbol, tp: Type): Type = tp match { case TypeRef(_, `container`, arg :: Nil) => arg @@ -737,10 +738,10 @@ trait Definitions extends api.StandardDefinitions { def optionType(tp: Type) = appliedType(OptionClass, tp) def scalaRepeatedType(arg: Type) = appliedType(RepeatedParamClass, arg) def seqType(arg: Type) = appliedType(SeqClass, arg) - // def someType(tp: Type) = appliedType(SomeClass, tp) + def someType(tp: Type) = appliedType(SomeClass, tp) - // def StringArray = arrayType(StringClass.tpe) - // lazy val ObjectArray = arrayType(ObjectClass.tpe) + def StringArray = arrayType(StringClass.tpe) + lazy val ObjectArray = arrayType(ObjectClass.tpe) def ClassType(arg: Type) = if (phase.erasedTypes || forMSIL) ClassClass.tpe @@ -753,8 +754,8 @@ trait Definitions extends api.StandardDefinitions { // - .linkedClassOfClass: the ClassSymbol of the enumeration (class E) sym.owner.linkedClassOfClass.tpe - // def vmClassType(arg: Type): Type = ClassType(arg) - // def vmSignature(sym: Symbol, info: Type): String = signature(info) // !!! + def vmClassType(arg: Type): Type = ClassType(arg) + def vmSignature(sym: Symbol, info: Type): String = signature(info) // !!! /** Given a class symbol C with type parameters T1, T2, ... Tn * which have upper/lower bounds LB1/UB1, LB1/UB2, ..., LBn/UBn, @@ -767,14 +768,14 @@ trait Definitions extends api.StandardDefinitions { /** Given type U, creates a Type representing Class[_ <: U]. */ - // def boundedClassType(upperBound: Type) = - // appliedTypeAsUpperBounds(ClassClass.typeConstructor, List(upperBound)) + def boundedClassType(upperBound: Type) = + appliedTypeAsUpperBounds(ClassClass.typeConstructor, List(upperBound)) /** To avoid unchecked warnings on polymorphic classes, translate * a Foo[T] into a Foo[_] for use in the pattern matcher. */ - // @deprecated("Use classExistentialType", "2.10.0") - // def typeCaseType(clazz: Symbol): Type = classExistentialType(clazz) + @deprecated("Use classExistentialType", "2.10.0") + def typeCaseType(clazz: Symbol): Type = classExistentialType(clazz) // // .NET backend @@ -782,7 +783,7 @@ trait Definitions extends api.StandardDefinitions { lazy val ComparatorClass = getRequiredClass("scala.runtime.Comparator") // System.ValueType - // lazy val ValueTypeClass: ClassSymbol = getClassByName(sn.ValueType) + lazy val ValueTypeClass: ClassSymbol = getClassByName(sn.ValueType) // System.MulticastDelegate lazy val DelegateClass: ClassSymbol = getClassByName(sn.Delegate) var Delegate_scalaCallers: List[Symbol] = List() // Syncnote: No protection necessary yet as only for .NET where reflection is not supported. @@ -875,11 +876,11 @@ trait Definitions extends api.StandardDefinitions { x :: removeRedundantObjects(xs) } /** Order a list of types with non-trait classes before others. */ - // def classesFirst(tps: List[Type]): List[Type] = { - // val (classes, others) = tps partition (t => t.typeSymbol.isClass && !t.typeSymbol.isTrait) - // if (classes.isEmpty || others.isEmpty || (tps startsWith classes)) tps - // else classes ::: others - // } + def classesFirst(tps: List[Type]): List[Type] = { + val (classes, others) = tps partition (t => t.typeSymbol.isClass && !t.typeSymbol.isTrait) + if (classes.isEmpty || others.isEmpty || (tps startsWith classes)) tps + else classes ::: others + } /** The following transformations applied to a list of parents. * If any parent is a class/trait, all parents which normalize to * Object are discarded. Otherwise, all parents which normalize @@ -907,10 +908,10 @@ trait Definitions extends api.StandardDefinitions { def parentsString(parents: List[Type]) = normalizedParents(parents) mkString " with " - // def typeParamsString(tp: Type) = tp match { - // case PolyType(tparams, _) => tparams map (_.defString) mkString ("[", ",", "]") - // case _ => "" - // } + def typeParamsString(tp: Type) = tp match { + case PolyType(tparams, _) => tparams map (_.defString) mkString ("[", ",", "]") + case _ => "" + } def valueParamsString(tp: Type) = tp match { case MethodType(params, _) => params map (_.defString) mkString ("(", ",", ")") case _ => "" @@ -947,12 +948,12 @@ trait Definitions extends api.StandardDefinitions { lazy val BoxedNumberClass = getClassByName(sn.BoxedNumber) lazy val BoxedCharacterClass = getClassByName(sn.BoxedCharacter) lazy val BoxedBooleanClass = getClassByName(sn.BoxedBoolean) - // lazy val BoxedByteClass = requiredClass[java.lang.Byte] - // lazy val BoxedShortClass = requiredClass[java.lang.Short] - // lazy val BoxedIntClass = requiredClass[java.lang.Integer] - // lazy val BoxedLongClass = requiredClass[java.lang.Long] - // lazy val BoxedFloatClass = requiredClass[java.lang.Float] - // lazy val BoxedDoubleClass = requiredClass[java.lang.Double] + lazy val BoxedByteClass = requiredClass[java.lang.Byte] + lazy val BoxedShortClass = requiredClass[java.lang.Short] + lazy val BoxedIntClass = requiredClass[java.lang.Integer] + lazy val BoxedLongClass = requiredClass[java.lang.Long] + lazy val BoxedFloatClass = requiredClass[java.lang.Float] + lazy val BoxedDoubleClass = requiredClass[java.lang.Double] lazy val Boxes_isNumberOrBool = getDecl(BoxesRunTimeClass, nme.isBoxedNumberOrBoolean) lazy val Boxes_isNumber = getDecl(BoxesRunTimeClass, nme.isBoxedNumber) @@ -973,7 +974,7 @@ trait Definitions extends api.StandardDefinitions { lazy val ImplicitNotFoundClass = requiredClass[scala.annotation.implicitNotFound] lazy val MigrationAnnotationClass = requiredClass[scala.annotation.migration] lazy val ScalaStrictFPAttr = requiredClass[scala.annotation.strictfp] - // lazy val SerializableAttr = requiredClass[scala.annotation.serializable] // @serializable is deprecated + lazy val SerializableAttr = requiredClass[scala.annotation.serializable] // @serializable is deprecated lazy val SwitchClass = requiredClass[scala.annotation.switch] lazy val TailrecClass = requiredClass[scala.annotation.tailrec] lazy val VarargsClass = requiredClass[scala.annotation.varargs] @@ -1008,7 +1009,7 @@ trait Definitions extends api.StandardDefinitions { lazy val ParamTargetClass = requiredClass[meta.param] lazy val SetterTargetClass = requiredClass[meta.setter] lazy val ClassTargetClass = requiredClass[meta.companionClass] - // lazy val ObjectTargetClass = requiredClass[meta.companionObject] + lazy val ObjectTargetClass = requiredClass[meta.companionObject] lazy val MethodTargetClass = requiredClass[meta.companionMethod] // TODO: module, moduleClass? package, packageObject? lazy val LanguageFeatureAnnot = requiredClass[meta.languageFeature] @@ -1053,7 +1054,7 @@ trait Definitions extends api.StandardDefinitions { def getLanguageFeature(name: String, owner: Symbol = languageFeatureModule): Symbol = getMember(owner, newTypeName(name)) def termMember(owner: Symbol, name: String): Symbol = owner.info.member(newTermName(name)) - // def typeMember(owner: Symbol, name: String): Symbol = owner.info.member(newTypeName(name)) + def typeMember(owner: Symbol, name: String): Symbol = owner.info.member(newTypeName(name)) def findNamedMember(fullName: Name, root: Symbol): Symbol = { val segs = nme.segments(fullName.toString, fullName.isTermName) @@ -1120,8 +1121,8 @@ trait Definitions extends api.StandardDefinitions { def getDeclIfDefined(owner: Symbol, name: Name): Symbol = owner.info.nonPrivateDecl(name) - // def packageExists(packageName: String): Boolean = - // getModuleIfDefined(packageName).isPackage + def packageExists(packageName: String): Boolean = + getModuleIfDefined(packageName).isPackage private def newAlias(owner: Symbol, name: TypeName, alias: Type): AliasTypeSymbol = owner.newAliasType(name) setInfoAndEnter alias @@ -1154,7 +1155,7 @@ trait Definitions extends api.StandardDefinitions { newPolyMethod(1, owner, name, flags)(tparams => (Some(Nil), createFn(tparams.head))) } - // lazy val boxedClassValues = boxedClass.values.toSet[Symbol] + lazy val boxedClassValues = boxedClass.values.toSet[Symbol] lazy val isUnbox = unboxMethod.values.toSet[Symbol] lazy val isBox = boxMethod.values.toSet[Symbol] @@ -1214,8 +1215,8 @@ trait Definitions extends api.StandardDefinitions { /** Is symbol a value class? */ def isPrimitiveValueClass(sym: Symbol) = ScalaValueClasses contains sym - // def isNonUnitValueClass(sym: Symbol) = isPrimitiveValueClass(sym) && (sym != UnitClass) - // def isSpecializableClass(sym: Symbol) = isPrimitiveValueClass(sym) || (sym == AnyRefClass) + def isNonUnitValueClass(sym: Symbol) = isPrimitiveValueClass(sym) && (sym != UnitClass) + def isSpecializableClass(sym: Symbol) = isPrimitiveValueClass(sym) || (sym == AnyRefClass) def isPrimitiveValueType(tp: Type) = isPrimitiveValueClass(tp.typeSymbol) /** Is symbol a boxed value class, e.g. java.lang.Integer? */ diff --git a/src/reflect/scala/reflect/internal/ExistentialsAndSkolems.scala b/src/reflect/scala/reflect/internal/ExistentialsAndSkolems.scala index 2a0fe9d19a..59c027868e 100644 --- a/src/reflect/scala/reflect/internal/ExistentialsAndSkolems.scala +++ b/src/reflect/scala/reflect/internal/ExistentialsAndSkolems.scala @@ -7,6 +7,7 @@ package scala.reflect package internal import scala.collection.{ mutable, immutable } +import util._ /** The name of this trait defines the eventual intent better than * it does the initial contents. diff --git a/src/reflect/scala/reflect/internal/Importers.scala b/src/reflect/scala/reflect/internal/Importers.scala index 592523c639..ea8d6078ff 100644 --- a/src/reflect/scala/reflect/internal/Importers.scala +++ b/src/reflect/scala/reflect/internal/Importers.scala @@ -316,7 +316,7 @@ trait Importers extends api.Importers { self: SymbolTable => def importName(name: from.Name): Name = if (name.isTypeName) newTypeName(name.toString) else newTermName(name.toString) def importTypeName(name: from.TypeName): TypeName = importName(name).toTypeName - // def importTermName(name: from.TermName): TermName = importName(name).toTermName + def importTermName(name: from.TermName): TermName = importName(name).toTermName def importModifiers(mods: from.Modifiers): Modifiers = new Modifiers(mods.flags, importName(mods.privateWithin), mods.annotations map importTree) diff --git a/src/reflect/scala/reflect/internal/Names.scala b/src/reflect/scala/reflect/internal/Names.scala index 7d98823c7e..9193674f3e 100644 --- a/src/reflect/scala/reflect/internal/Names.scala +++ b/src/reflect/scala/reflect/internal/Names.scala @@ -135,8 +135,8 @@ trait Names extends api.Names with LowPriorityNames { def newTypeName(bs: Array[Byte], offset: Int, len: Int): TypeName = newTermName(bs, offset, len).toTypeName - // def nameChars: Array[Char] = chrs - // @deprecated("", "2.9.0") def view(s: String): TermName = newTermName(s) + def nameChars: Array[Char] = chrs + @deprecated("", "2.9.0") def view(s: String): TermName = newTermName(s) // Classes ---------------------------------------------------------------------- @@ -186,22 +186,22 @@ trait Names extends api.Names with LowPriorityNames { scala.compat.Platform.arraycopy(chrs, index, cs, offset, len) /** @return the ascii representation of this name */ - // final def toChars: Array[Char] = { - // val cs = new Array[Char](len) - // copyChars(cs, 0) - // cs - // } + final def toChars: Array[Char] = { + val cs = new Array[Char](len) + copyChars(cs, 0) + cs + } /** Write to UTF8 representation of this name to given character array. * Start copying to index `to`. Return index of next free byte in array. * Array must have enough remaining space for all bytes * (i.e. maximally 3*length bytes). */ - // final def copyUTF8(bs: Array[Byte], offset: Int): Int = { - // val bytes = Codec.toUTF8(chrs, index, len) - // scala.compat.Platform.arraycopy(bytes, 0, bs, offset, bytes.length) - // offset + bytes.length - // } + final def copyUTF8(bs: Array[Byte], offset: Int): Int = { + val bytes = Codec.toUTF8(chrs, index, len) + scala.compat.Platform.arraycopy(bytes, 0, bs, offset, bytes.length) + offset + bytes.length + } /** @return the hash value of this name */ final override def hashCode(): Int = index @@ -283,7 +283,7 @@ trait Names extends api.Names with LowPriorityNames { */ final def lastPos(c: Char): Int = lastPos(c, len - 1) - // final def lastPos(s: String): Int = lastPos(s, len - s.length) + final def lastPos(s: String): Int = lastPos(s, len - s.length) /** Returns the index of the last occurrence of char c in this * name from start, -1 if not found. @@ -305,18 +305,18 @@ trait Names extends api.Names with LowPriorityNames { * @param start ... * @return the index of the last occurrence of s */ - // final def lastPos(s: String, start: Int): Int = { - // var i = lastPos(s.charAt(0), start) - // while (i >= 0) { - // var j = 1; - // while (s.charAt(j) == chrs(index + i + j)) { - // j += 1 - // if (j == s.length()) return i; - // } - // i = lastPos(s.charAt(0), i - 1) - // } - // -s.length() - // } + final def lastPos(s: String, start: Int): Int = { + var i = lastPos(s.charAt(0), start) + while (i >= 0) { + var j = 1; + while (s.charAt(j) == chrs(index + i + j)) { + j += 1 + if (j == s.length()) return i; + } + i = lastPos(s.charAt(0), i - 1) + } + -s.length() + } /** Does this name start with prefix? */ final def startsWith(prefix: Name): Boolean = startsWith(prefix, 0) @@ -379,7 +379,7 @@ trait Names extends api.Names with LowPriorityNames { if (idx == length) -1 else idx } def lastIndexOf(ch: Char) = lastPos(ch) - // def lastIndexOf(ch: Char, fromIndex: Int) = lastPos(ch, fromIndex) + def lastIndexOf(ch: Char, fromIndex: Int) = lastPos(ch, fromIndex) /** Replace all occurrences of `from` by `to` in * name; result is always a term name. @@ -428,12 +428,12 @@ trait Names extends api.Names with LowPriorityNames { def append(ch: Char) = newName("" + this + ch) def append(suffix: String) = newName("" + this + suffix) def append(suffix: Name) = newName("" + this + suffix) - // def prepend(ch: Char) = newName("" + ch + this) + def prepend(ch: Char) = newName("" + ch + this) def prepend(prefix: String) = newName("" + prefix + this) - // def prepend(prefix: Name) = newName("" + prefix + this) + def prepend(prefix: Name) = newName("" + prefix + this) def decodedName: ThisNameType = newName(decode) - // def isOperatorName: Boolean = decode != toString + def isOperatorName: Boolean = decode != toString def longString: String = nameKind + " " + decode def debugString = { val s = decode ; if (isTypeName) s + "!" else s } } @@ -445,7 +445,7 @@ trait Names extends api.Names with LowPriorityNames { def stripSuffix(suffix: Name): T = if (name endsWith suffix) dropRight(suffix.length) else name def dropRight(n: Int): T = name.subName(0, name.length - n).asInstanceOf[T] def drop(n: Int): T = name.subName(n, name.length).asInstanceOf[T] - // def nonEmpty: Boolean = name.length > 0 + def nonEmpty: Boolean = name.length > 0 } implicit val NameTag = ClassTag[Name](classOf[Name]) @@ -489,7 +489,7 @@ trait Names extends api.Names with LowPriorityNames { type ThisNameType = TermName protected[this] def thisName: TermName = this - val next: TermName = termHashtable(hash) + var next: TermName = termHashtable(hash) termHashtable(hash) = this def isTermName: Boolean = true def isTypeName: Boolean = false @@ -518,7 +518,7 @@ trait Names extends api.Names with LowPriorityNames { type ThisNameType = TypeName protected[this] def thisName: TypeName = this - val next: TypeName = typeHashtable(hash) + var next: TypeName = typeHashtable(hash) typeHashtable(hash) = this def isTermName: Boolean = false def isTypeName: Boolean = true diff --git a/src/reflect/scala/reflect/internal/Printers.scala b/src/reflect/scala/reflect/internal/Printers.scala index 58e3aff46f..31f9cb7e59 100644 --- a/src/reflect/scala/reflect/internal/Printers.scala +++ b/src/reflect/scala/reflect/internal/Printers.scala @@ -475,8 +475,8 @@ trait Printers extends api.Printers { self: SymbolTable => } def newRawTreePrinter(writer: PrintWriter): RawTreePrinter = new RawTreePrinter(writer) - // def newRawTreePrinter(stream: OutputStream): RawTreePrinter = newRawTreePrinter(new PrintWriter(stream)) - // def newRawTreePrinter(): RawTreePrinter = newRawTreePrinter(new PrintWriter(ConsoleWriter)) + def newRawTreePrinter(stream: OutputStream): RawTreePrinter = newRawTreePrinter(new PrintWriter(stream)) + def newRawTreePrinter(): RawTreePrinter = newRawTreePrinter(new PrintWriter(ConsoleWriter)) // provides footnotes for types and mirrors import scala.collection.mutable.{Map, WeakHashMap, SortedSet} diff --git a/src/reflect/scala/reflect/internal/Scopes.scala b/src/reflect/scala/reflect/internal/Scopes.scala index 31907772cb..950e30dbc5 100644 --- a/src/reflect/scala/reflect/internal/Scopes.scala +++ b/src/reflect/scala/reflect/internal/Scopes.scala @@ -343,7 +343,7 @@ trait Scopes extends api.Scopes { self: SymbolTable => */ def iterator: Iterator[Symbol] = toList.iterator - // def containsSymbol(s: Symbol) = lookupAll(s.name) contains s + def containsSymbol(s: Symbol) = lookupAll(s.name) contains s override def foreach[U](p: Symbol => U): Unit = toList foreach p diff --git a/src/reflect/scala/reflect/internal/StdNames.scala b/src/reflect/scala/reflect/internal/StdNames.scala index 7947a1a1e1..5e7f5777b2 100644 --- a/src/reflect/scala/reflect/internal/StdNames.scala +++ b/src/reflect/scala/reflect/internal/StdNames.scala @@ -104,7 +104,7 @@ trait StdNames { val IMPORT: NameType = "" val MODULE_SUFFIX_NAME: NameType = MODULE_SUFFIX_STRING val MODULE_VAR_SUFFIX: NameType = "$module" - // val NAME_JOIN_NAME: NameType = NAME_JOIN_STRING + val NAME_JOIN_NAME: NameType = NAME_JOIN_STRING val PACKAGE: NameType = "package" val ROOT: NameType = "" val SPECIALIZED_SUFFIX: NameType = "$sp" @@ -121,8 +121,8 @@ trait StdNames { final val Short: NameType = "Short" final val Unit: NameType = "Unit" - // final val ScalaValueNames: scala.List[NameType] = - // scala.List(Byte, Char, Short, Int, Long, Float, Double, Boolean, Unit) + final val ScalaValueNames: scala.List[NameType] = + scala.List(Byte, Char, Short, Int, Long, Float, Double, Boolean, Unit) // some types whose companions we utilize final val AnyRef: NameType = "AnyRef" @@ -130,7 +130,7 @@ trait StdNames { final val List: NameType = "List" final val Seq: NameType = "Seq" final val Symbol: NameType = "Symbol" - // final val ClassTag: NameType = "ClassTag" + final val ClassTag: NameType = "ClassTag" final val WeakTypeTag: NameType = "WeakTypeTag" final val TypeTag : NameType = "TypeTag" final val Expr: NameType = "Expr" @@ -220,12 +220,12 @@ trait StdNames { final val Any: NameType = "Any" final val AnyVal: NameType = "AnyVal" - // final val ExprApi: NameType = "ExprApi" + final val ExprApi: NameType = "ExprApi" final val Mirror: NameType = "Mirror" final val Nothing: NameType = "Nothing" final val Null: NameType = "Null" final val Object: NameType = "Object" - // final val PartialFunction: NameType = "PartialFunction" + final val PartialFunction: NameType = "PartialFunction" final val PrefixType: NameType = "PrefixType" final val Product: NameType = "Product" final val Serializable: NameType = "Serializable" @@ -239,7 +239,7 @@ trait StdNames { final val Group: NameType = "Group" final val Tree: NameType = "Tree" final val Type : NameType = "Type" - // final val TypeTree: NameType = "TypeTree" + final val TypeTree: NameType = "TypeTree" // Annotation simple names, used in Namer final val BeanPropertyAnnot: NameType = "BeanProperty" @@ -249,13 +249,13 @@ trait StdNames { // Classfile Attributes final val AnnotationDefaultATTR: NameType = "AnnotationDefault" final val BridgeATTR: NameType = "Bridge" - // final val ClassfileAnnotationATTR: NameType = "RuntimeInvisibleAnnotations" // RetentionPolicy.CLASS. Currently not used (Apr 2009). + final val ClassfileAnnotationATTR: NameType = "RuntimeInvisibleAnnotations" // RetentionPolicy.CLASS. Currently not used (Apr 2009). final val CodeATTR: NameType = "Code" final val ConstantValueATTR: NameType = "ConstantValue" final val DeprecatedATTR: NameType = "Deprecated" final val ExceptionsATTR: NameType = "Exceptions" final val InnerClassesATTR: NameType = "InnerClasses" - // final val LineNumberTableATTR: NameType = "LineNumberTable" + final val LineNumberTableATTR: NameType = "LineNumberTable" final val LocalVariableTableATTR: NameType = "LocalVariableTable" final val RuntimeAnnotationATTR: NameType = "RuntimeVisibleAnnotations" // RetentionPolicy.RUNTIME final val RuntimeParamAnnotationATTR: NameType = "RuntimeVisibleParameterAnnotations" // RetentionPolicy.RUNTIME (annotations on parameters) @@ -284,9 +284,9 @@ trait StdNames { val EXCEPTION_RESULT_PREFIX = "exceptionResult" val EXPAND_SEPARATOR_STRING = "$$" val INTERPRETER_IMPORT_WRAPPER = "$iw" - // val INTERPRETER_LINE_PREFIX = "line" - // val INTERPRETER_VAR_PREFIX = "res" - // val INTERPRETER_WRAPPER_SUFFIX = "$object" + val INTERPRETER_LINE_PREFIX = "line" + val INTERPRETER_VAR_PREFIX = "res" + val INTERPRETER_WRAPPER_SUFFIX = "$object" val LOCALDUMMY_PREFIX = " true - // case _ => false - // } + def isDeprecatedIdentifierName(name: Name) = name.toTermName match { + case nme.`then` | nme.`macro` => true + case _ => false + } def isOpAssignmentName(name: Name) = name match { case raw.NE | raw.LE | raw.GE | EMPTY => false @@ -461,14 +461,14 @@ trait StdNames { // If the name ends with $nn where nn are // all digits, strip the $ and the digits. // Otherwise return the argument. - // def stripAnonNumberSuffix(name: Name): Name = { - // var pos = name.length - // while (pos > 0 && name.charAt(pos - 1).isDigit) - // pos -= 1 + def stripAnonNumberSuffix(name: Name): Name = { + var pos = name.length + while (pos > 0 && name.charAt(pos - 1).isDigit) + pos -= 1 - // if (pos <= 0 || pos == name.length || name.charAt(pos - 1) != '$') name - // else name.subName(0, pos - 1) - // } + if (pos <= 0 || pos == name.length || name.charAt(pos - 1) != '$') name + else name.subName(0, pos - 1) + } def stripModuleSuffix(name: Name): Name = ( if (isModuleName(name)) name dropRight MODULE_SUFFIX_STRING.length else name @@ -484,8 +484,8 @@ trait StdNames { final val Nil: NameType = "Nil" final val Predef: NameType = "Predef" - // final val ScalaRunTime: NameType = "ScalaRunTime" - // final val Some: NameType = "Some" + final val ScalaRunTime: NameType = "ScalaRunTime" + final val Some: NameType = "Some" val _1 : NameType = "_1" val _2 : NameType = "_2" @@ -581,14 +581,14 @@ trait StdNames { val Annotation: NameType = "Annotation" val Any: NameType = "Any" val AnyVal: NameType = "AnyVal" - // val AppliedTypeTree: NameType = "AppliedTypeTree" - // val Apply: NameType = "Apply" + val AppliedTypeTree: NameType = "AppliedTypeTree" + val Apply: NameType = "Apply" val ArrayAnnotArg: NameType = "ArrayAnnotArg" - // val Constant: NameType = "Constant" + val Constant: NameType = "Constant" val ConstantType: NameType = "ConstantType" val EmptyPackage: NameType = "EmptyPackage" val EmptyPackageClass: NameType = "EmptyPackageClass" - // val ExistentialTypeTree: NameType = "ExistentialTypeTree" + val ExistentialTypeTree: NameType = "ExistentialTypeTree" val Flag : NameType = "Flag" val Ident: NameType = "Ident" val Import: NameType = "Import" @@ -597,10 +597,10 @@ trait StdNames { val Modifiers: NameType = "Modifiers" val NestedAnnotArg: NameType = "NestedAnnotArg" val NoFlags: NameType = "NoFlags" - // val NoPrefix: NameType = "NoPrefix" + val NoPrefix: NameType = "NoPrefix" val NoSymbol: NameType = "NoSymbol" val Nothing: NameType = "Nothing" - // val NoType: NameType = "NoType" + val NoType: NameType = "NoType" val Null: NameType = "Null" val Object: NameType = "Object" val RootPackage: NameType = "RootPackage" @@ -609,17 +609,17 @@ trait StdNames { val StringContext: NameType = "StringContext" val This: NameType = "This" val ThisType: NameType = "ThisType" - // val Tree : NameType = "Tree" + val Tree : NameType = "Tree" val Tuple2: NameType = "Tuple2" val TYPE_ : NameType = "TYPE" - // val TypeApply: NameType = "TypeApply" + val TypeApply: NameType = "TypeApply" val TypeRef: NameType = "TypeRef" val TypeTree: NameType = "TypeTree" val UNIT : NameType = "UNIT" val add_ : NameType = "add" val annotation: NameType = "annotation" val anyValClass: NameType = "anyValClass" - // val append: NameType = "append" + val append: NameType = "append" val apply: NameType = "apply" val applyDynamic: NameType = "applyDynamic" val applyDynamicNamed: NameType = "applyDynamicNamed" @@ -627,34 +627,34 @@ trait StdNames { val args : NameType = "args" val argv : NameType = "argv" val arrayClass: NameType = "arrayClass" - // val arrayElementClass: NameType = "arrayElementClass" - // val arrayValue: NameType = "arrayValue" + val arrayElementClass: NameType = "arrayElementClass" + val arrayValue: NameType = "arrayValue" val array_apply : NameType = "array_apply" val array_clone : NameType = "array_clone" val array_length : NameType = "array_length" val array_update : NameType = "array_update" - // val arraycopy: NameType = "arraycopy" - // val asTerm: NameType = "asTerm" + val arraycopy: NameType = "arraycopy" + val asTerm: NameType = "asTerm" val asModule: NameType = "asModule" - // val asMethod: NameType = "asMethod" + val asMethod: NameType = "asMethod" val asType: NameType = "asType" - // val asClass: NameType = "asClass" + val asClass: NameType = "asClass" val asInstanceOf_ : NameType = "asInstanceOf" val asInstanceOf_Ob : NameType = "$asInstanceOf" - // val assert_ : NameType = "assert" - // val assume_ : NameType = "assume" + val assert_ : NameType = "assert" + val assume_ : NameType = "assume" val box: NameType = "box" val build : NameType = "build" val bytes: NameType = "bytes" val canEqual_ : NameType = "canEqual" val checkInitialized: NameType = "checkInitialized" - // val ClassManifestFactory: NameType = "ClassManifestFactory" + val ClassManifestFactory: NameType = "ClassManifestFactory" val classOf: NameType = "classOf" val clone_ : NameType = if (forMSIL) "MemberwiseClone" else "clone" // sn.OClone causes checkinit failure val conforms: NameType = "conforms" val copy: NameType = "copy" val currentMirror: NameType = "currentMirror" - // val definitions: NameType = "definitions" + val definitions: NameType = "definitions" val delayedInit: NameType = "delayedInit" val delayedInitArg: NameType = "delayedInit$body" val drop: NameType = "drop" @@ -667,7 +667,7 @@ trait StdNames { val equalsNumObject : NameType = "equalsNumObject" val equals_ : NameType = if (forMSIL) "Equals" else "equals" val error: NameType = "error" - // val eval: NameType = "eval" + val eval: NameType = "eval" val ex: NameType = "ex" val experimental: NameType = "experimental" val f: NameType = "f" @@ -678,17 +678,17 @@ trait StdNames { val flagsFromBits : NameType = "flagsFromBits" val flatMap: NameType = "flatMap" val foreach: NameType = "foreach" - // val genericArrayOps: NameType = "genericArrayOps" + val genericArrayOps: NameType = "genericArrayOps" val get: NameType = "get" - // val getOrElse: NameType = "getOrElse" - // val hasNext: NameType = "hasNext" + val getOrElse: NameType = "getOrElse" + val hasNext: NameType = "hasNext" val hashCode_ : NameType = if (forMSIL) "GetHashCode" else "hashCode" val hash_ : NameType = "hash" - // val head: NameType = "head" - // val identity: NameType = "identity" + val head: NameType = "head" + val identity: NameType = "identity" val implicitly: NameType = "implicitly" val in: NameType = "in" - // val info: NameType = "info" + val info: NameType = "info" val inlinedEquals: NameType = "inlinedEquals" val isArray: NameType = "isArray" val isDefinedAt: NameType = "isDefinedAt" @@ -700,57 +700,57 @@ trait StdNames { val lang: NameType = "lang" val length: NameType = "length" val lengthCompare: NameType = "lengthCompare" - // val liftedTree: NameType = "liftedTree" - // val `macro` : NameType = "macro" - // val macroThis : NameType = "_this" + val liftedTree: NameType = "liftedTree" + val `macro` : NameType = "macro" + val macroThis : NameType = "_this" val macroContext : NameType = "c" val main: NameType = "main" - // val manifest: NameType = "manifest" - // val ManifestFactory: NameType = "ManifestFactory" + val manifest: NameType = "manifest" + val ManifestFactory: NameType = "ManifestFactory" val manifestToTypeTag: NameType = "manifestToTypeTag" val map: NameType = "map" val materializeClassTag: NameType = "materializeClassTag" val materializeWeakTypeTag: NameType = "materializeWeakTypeTag" val materializeTypeTag: NameType = "materializeTypeTag" - // val mirror : NameType = "mirror" + val mirror : NameType = "mirror" val moduleClass : NameType = "moduleClass" - // val name: NameType = "name" + val name: NameType = "name" val ne: NameType = "ne" val newArray: NameType = "newArray" val newFreeTerm: NameType = "newFreeTerm" val newFreeType: NameType = "newFreeType" val newNestedSymbol: NameType = "newNestedSymbol" val newScopeWith: NameType = "newScopeWith" - // val next: NameType = "next" + val next: NameType = "next" val nmeNewTermName: NameType = "newTermName" val nmeNewTypeName: NameType = "newTypeName" - // val normalize: NameType = "normalize" + val normalize: NameType = "normalize" val notifyAll_ : NameType = "notifyAll" val notify_ : NameType = "notify" val null_ : NameType = "null" - // val ofDim: NameType = "ofDim" - // val origin: NameType = "origin" + val ofDim: NameType = "ofDim" + val origin: NameType = "origin" val prefix : NameType = "prefix" val productArity: NameType = "productArity" val productElement: NameType = "productElement" val productIterator: NameType = "productIterator" val productPrefix: NameType = "productPrefix" val readResolve: NameType = "readResolve" - // val reflect : NameType = "reflect" + val reflect : NameType = "reflect" val reify : NameType = "reify" val rootMirror : NameType = "rootMirror" - // val runOrElse: NameType = "runOrElse" + val runOrElse: NameType = "runOrElse" val runtime: NameType = "runtime" val runtimeClass: NameType = "runtimeClass" val runtimeMirror: NameType = "runtimeMirror" - // val sameElements: NameType = "sameElements" + val sameElements: NameType = "sameElements" val scala_ : NameType = "scala" val selectDynamic: NameType = "selectDynamic" val selectOverloadedMethod: NameType = "selectOverloadedMethod" val selectTerm: NameType = "selectTerm" val selectType: NameType = "selectType" val self: NameType = "self" - // val setAccessible: NameType = "setAccessible" + val setAccessible: NameType = "setAccessible" val setAnnotations: NameType = "setAnnotations" val setSymbol: NameType = "setSymbol" val setType: NameType = "setType" @@ -760,15 +760,15 @@ trait StdNames { val staticModule : NameType = "staticModule" val staticPackage : NameType = "staticPackage" val synchronized_ : NameType = "synchronized" - // val tail: NameType = "tail" - // val `then` : NameType = "then" + val tail: NameType = "tail" + val `then` : NameType = "then" val this_ : NameType = "this" val thisPrefix : NameType = "thisPrefix" - // val throw_ : NameType = "throw" + val throw_ : NameType = "throw" val toArray: NameType = "toArray" - // val toList: NameType = "toList" + val toList: NameType = "toList" val toObjectArray : NameType = "toObjectArray" - // val toSeq: NameType = "toSeq" + val toSeq: NameType = "toSeq" val toString_ : NameType = if (forMSIL) "ToString" else "toString" val toTypeConstructor: NameType = "toTypeConstructor" val tpe : NameType = "tpe" @@ -788,14 +788,14 @@ trait StdNames { val view_ : NameType = "view" val wait_ : NameType = "wait" val withFilter: NameType = "withFilter" - // val wrap: NameType = "wrap" - // val zip: NameType = "zip" + val wrap: NameType = "wrap" + val zip: NameType = "zip" - // val synthSwitch: NameType = "$synthSwitch" + val synthSwitch: NameType = "$synthSwitch" // unencoded operators object raw { - // final val AMP : NameType = "&" + final val AMP : NameType = "&" final val BANG : NameType = "!" final val BAR : NameType = "|" final val DOLLAR: NameType = "$" @@ -804,7 +804,7 @@ trait StdNames { final val MINUS: NameType = "-" final val NE: NameType = "!=" final val PLUS : NameType = "+" - // final val SLASH: NameType = "/" + final val SLASH: NameType = "/" final val STAR : NameType = "*" final val TILDE: NameType = "~" @@ -860,14 +860,14 @@ trait StdNames { // Grouped here so Cleanup knows what tests to perform. val CommonOpNames = Set[Name](OR, XOR, AND, EQ, NE) - // val ConversionNames = Set[Name](toByte, toChar, toDouble, toFloat, toInt, toLong, toShort) + val ConversionNames = Set[Name](toByte, toChar, toDouble, toFloat, toInt, toLong, toShort) val BooleanOpNames = Set[Name](ZOR, ZAND, UNARY_!) ++ CommonOpNames - // val NumberOpNames = ( - // Set[Name](ADD, SUB, MUL, DIV, MOD, LSL, LSR, ASR, LT, LE, GE, GT) - // ++ Set(UNARY_+, UNARY_-, UNARY_!) - // ++ ConversionNames - // ++ CommonOpNames - // ) + val NumberOpNames = ( + Set[Name](ADD, SUB, MUL, DIV, MOD, LSL, LSR, ASR, LT, LE, GE, GT) + ++ Set(UNARY_+, UNARY_-, UNARY_!) + ++ ConversionNames + ++ CommonOpNames + ) val add: NameType = "add" val complement: NameType = "complement" @@ -999,7 +999,7 @@ trait StdNames { object fulltpnme extends TypeNames { val RuntimeNothing: NameType = "scala.runtime.Nothing$" val RuntimeNull: NameType = "scala.runtime.Null$" - // val JavaLangEnum: NameType = "java.lang.Enum" + val JavaLangEnum: NameType = "java.lang.Enum" } /** Java binary names, like scala/runtime/Nothing$. @@ -1015,15 +1015,15 @@ trait StdNames { object nme extends TermNames { - // def isModuleVarName(name: Name): Boolean = - // stripAnonNumberSuffix(name) endsWith MODULE_VAR_SUFFIX + def isModuleVarName(name: Name): Boolean = + stripAnonNumberSuffix(name) endsWith MODULE_VAR_SUFFIX def moduleVarName(name: TermName): TermName = newTermNameCached("" + name + MODULE_VAR_SUFFIX) def getCause = sn.GetCause def getClass_ = sn.GetClass - // def getComponentType = sn.GetComponentType + def getComponentType = sn.GetComponentType def getMethod_ = sn.GetMethod def invoke_ = sn.Invoke @@ -1036,14 +1036,14 @@ trait StdNames { val reflMethodCacheName: NameType = "reflMethod$Cache" val reflMethodName: NameType = "reflMethod$Method" - // private val reflectionCacheNames = Set[NameType]( - // reflPolyCacheName, - // reflClassCacheName, - // reflParamsCacheName, - // reflMethodCacheName, - // reflMethodName - // ) - // def isReflectionCacheName(name: Name) = reflectionCacheNames exists (name startsWith _) + private val reflectionCacheNames = Set[NameType]( + reflPolyCacheName, + reflClassCacheName, + reflParamsCacheName, + reflMethodCacheName, + reflMethodName + ) + def isReflectionCacheName(name: Name) = reflectionCacheNames exists (name startsWith _) @deprecated("Use a method in tpnme", "2.10.0") def dropSingletonName(name: Name): TypeName = tpnme.dropSingletonName(name) @deprecated("Use a method in tpnme", "2.10.0") def singletonName(name: Name): TypeName = tpnme.singletonName(name) @@ -1056,28 +1056,28 @@ trait StdNames { protected val stringToTypeName = null protected implicit def createNameType(s: String): TypeName = newTypeNameCached(s) - // val BeanProperty : TypeName - // val BooleanBeanProperty : TypeName + val BeanProperty : TypeName + val BooleanBeanProperty : TypeName val BoxedBoolean : TypeName val BoxedCharacter : TypeName val BoxedNumber : TypeName - // val Class : TypeName + val Class : TypeName val Delegate : TypeName val IOOBException : TypeName // IndexOutOfBoundsException val InvTargetException : TypeName // InvocationTargetException - // val JavaSerializable : TypeName + val JavaSerializable : TypeName val MethodAsObject : TypeName val NPException : TypeName // NullPointerException val Object : TypeName - // val String : TypeName + val String : TypeName val Throwable : TypeName val ValueType : TypeName - // val ForName : TermName + val ForName : TermName val GetCause : TermName val GetClass : TermName val GetClassLoader : TermName - // val GetComponentType : TermName + val GetComponentType : TermName val GetMethod : TermName val Invoke : TermName val JavaLang : TermName @@ -1152,22 +1152,22 @@ trait StdNames { final val BoxedLong: TypeName = "java.lang.Long" final val BoxedNumber: TypeName = "java.lang.Number" final val BoxedShort: TypeName = "java.lang.Short" - // final val Class: TypeName = "java.lang.Class" + final val Class: TypeName = "java.lang.Class" final val Delegate: TypeName = tpnme.NO_NAME final val IOOBException: TypeName = "java.lang.IndexOutOfBoundsException" final val InvTargetException: TypeName = "java.lang.reflect.InvocationTargetException" final val MethodAsObject: TypeName = "java.lang.reflect.Method" final val NPException: TypeName = "java.lang.NullPointerException" final val Object: TypeName = "java.lang.Object" - // final val String: TypeName = "java.lang.String" + final val String: TypeName = "java.lang.String" final val Throwable: TypeName = "java.lang.Throwable" final val ValueType: TypeName = tpnme.NO_NAME - // final val ForName: TermName = newTermName("forName") + final val ForName: TermName = newTermName("forName") final val GetCause: TermName = newTermName("getCause") final val GetClass: TermName = newTermName("getClass") final val GetClassLoader: TermName = newTermName("getClassLoader") - // final val GetComponentType: TermName = newTermName("getComponentType") + final val GetComponentType: TermName = newTermName("getComponentType") final val GetMethod: TermName = newTermName("getMethod") final val Invoke: TermName = newTermName("invoke") final val JavaLang: TermName = newTermName("java.lang") @@ -1185,28 +1185,28 @@ trait StdNames { } private class MSILNames extends SymbolNames { - // final val BeanProperty: TypeName = tpnme.NO_NAME - // final val BooleanBeanProperty: TypeName = tpnme.NO_NAME + final val BeanProperty: TypeName = tpnme.NO_NAME + final val BooleanBeanProperty: TypeName = tpnme.NO_NAME final val BoxedBoolean: TypeName = "System.IConvertible" final val BoxedCharacter: TypeName = "System.IConvertible" final val BoxedNumber: TypeName = "System.IConvertible" - // final val Class: TypeName = "System.Type" + final val Class: TypeName = "System.Type" final val Delegate: TypeName = "System.MulticastDelegate" final val IOOBException: TypeName = "System.IndexOutOfRangeException" final val InvTargetException: TypeName = "System.Reflection.TargetInvocationException" - // final val JavaSerializable: TypeName = tpnme.NO_NAME + final val JavaSerializable: TypeName = tpnme.NO_NAME final val MethodAsObject: TypeName = "System.Reflection.MethodInfo" final val NPException: TypeName = "System.NullReferenceException" final val Object: TypeName = "System.Object" - // final val String: TypeName = "System.String" + final val String: TypeName = "System.String" final val Throwable: TypeName = "System.Exception" final val ValueType: TypeName = "System.ValueType" - // final val ForName: TermName = newTermName("GetType") + final val ForName: TermName = newTermName("GetType") final val GetCause: TermName = newTermName("InnerException") /* System.Reflection.TargetInvocationException.InnerException */ final val GetClass: TermName = newTermName("GetType") final lazy val GetClassLoader: TermName = throw new UnsupportedOperationException("Scala reflection is not supported on this platform"); - // final val GetComponentType: TermName = newTermName("GetElementType") + final val GetComponentType: TermName = newTermName("GetElementType") final val GetMethod: TermName = newTermName("GetMethod") final val Invoke: TermName = newTermName("Invoke") final val JavaLang: TermName = newTermName("System") @@ -1223,13 +1223,13 @@ trait StdNames { ) } - // private class J2SENames extends JavaNames { - // final val BeanProperty: TypeName = "scala.beans.BeanProperty" - // final val BooleanBeanProperty: TypeName = "scala.beans.BooleanBeanProperty" - // final val JavaSerializable: TypeName = "java.io.Serializable" - // } + private class J2SENames extends JavaNames { + final val BeanProperty: TypeName = "scala.beans.BeanProperty" + final val BooleanBeanProperty: TypeName = "scala.beans.BooleanBeanProperty" + final val JavaSerializable: TypeName = "java.io.Serializable" + } lazy val sn: SymbolNames = if (forMSIL) new MSILNames - else new JavaNames { } + else new J2SENames } diff --git a/src/reflect/scala/reflect/internal/SymbolTable.scala b/src/reflect/scala/reflect/internal/SymbolTable.scala index c72fb96611..fb1bf9ed9d 100644 --- a/src/reflect/scala/reflect/internal/SymbolTable.scala +++ b/src/reflect/scala/reflect/internal/SymbolTable.scala @@ -184,7 +184,7 @@ abstract class SymbolTable extends macros.Universe final def phaseId(period: Period): Phase#Id = period & 0xFF /** The period at the start of run that includes `period`. */ - // final def startRun(period: Period): Period = period & 0xFFFFFF00 + final def startRun(period: Period): Period = period & 0xFFFFFF00 /** The current period. */ final def currentPeriod: Period = { @@ -298,6 +298,7 @@ abstract class SymbolTable extends macros.Universe object perRunCaches { import java.lang.ref.WeakReference + import scala.runtime.ScalaRunTime.stringOf import scala.collection.generic.Clearable // Weak references so the garbage collector will take care of diff --git a/src/reflect/scala/reflect/internal/Symbols.scala b/src/reflect/scala/reflect/internal/Symbols.scala index 89c3659b2e..eec780470e 100644 --- a/src/reflect/scala/reflect/internal/Symbols.scala +++ b/src/reflect/scala/reflect/internal/Symbols.scala @@ -19,7 +19,7 @@ trait Symbols extends api.Symbols { self: SymbolTable => protected var ids = 0 - // val emptySymbolArray = new Array[Symbol](0) + val emptySymbolArray = new Array[Symbol](0) protected def nextId() = { ids += 1; ids } @@ -256,8 +256,8 @@ trait Symbols extends api.Symbols { self: SymbolTable => (m, c) } - // final def newPackageSymbol(name: TermName, pos: Position = NoPosition, newFlags: Long = 0L): ModuleSymbol = - // newTermSymbol(name, pos, newFlags).asInstanceOf[ModuleSymbol] + final def newPackageSymbol(name: TermName, pos: Position = NoPosition, newFlags: Long = 0L): ModuleSymbol = + newTermSymbol(name, pos, newFlags).asInstanceOf[ModuleSymbol] final def newModuleClassSymbol(name: TypeName, pos: Position = NoPosition, newFlags: Long = 0L): ModuleClassSymbol = newClassSymbol(name, pos, newFlags).asInstanceOf[ModuleClassSymbol] @@ -323,8 +323,8 @@ trait Symbols extends api.Symbols { self: SymbolTable => /** Synthetic value parameters when parameter symbols are not available */ - // final def newSyntheticValueParamss(argtypess: List[List[Type]]): List[List[TermSymbol]] = - // argtypess map (xs => newSyntheticValueParams(xs, freshNamer)) + final def newSyntheticValueParamss(argtypess: List[List[Type]]): List[List[TermSymbol]] = + argtypess map (xs => newSyntheticValueParams(xs, freshNamer)) /** Synthetic value parameters when parameter symbols are not available. * Calling this method multiple times will re-use the same parameter names. @@ -341,7 +341,7 @@ trait Symbols extends api.Symbols { self: SymbolTable => final def newSyntheticValueParam(argtype: Type, name: TermName = nme.syntheticParamName(1)): TermSymbol = newValueParameter(name, owner.pos.focus, SYNTHETIC) setInfo argtype - // def newSyntheticTypeParam(): TypeSymbol = newSyntheticTypeParam("T0", 0L) + def newSyntheticTypeParam(): TypeSymbol = newSyntheticTypeParam("T0", 0L) def newSyntheticTypeParam(name: String, newFlags: Long): TypeSymbol = newTypeParameter(newTypeName(name), NoPosition, newFlags) setInfo TypeBounds.empty def newSyntheticTypeParams(num: Int): List[TypeSymbol] = (0 until num).toList map (n => newSyntheticTypeParam("T" + n, 0L)) @@ -407,11 +407,11 @@ trait Symbols extends api.Symbols { self: SymbolTable => /** Create a new getter for current symbol (which must be a field) */ - // final def newGetter: MethodSymbol = ( - // owner.newMethod(nme.getterName(name.toTermName), NoPosition, getterFlags(flags)) - // setPrivateWithin privateWithin - // setInfo MethodType(Nil, tpe) - // ) + final def newGetter: MethodSymbol = ( + owner.newMethod(nme.getterName(name.toTermName), NoPosition, getterFlags(flags)) + setPrivateWithin privateWithin + setInfo MethodType(Nil, tpe) + ) final def newErrorSymbol(name: Name): Symbol = name match { case x: TypeName => newErrorClass(x) @@ -528,14 +528,14 @@ trait Symbols extends api.Symbols { self: SymbolTable => */ def isContravariant = false def isCovariant = false - // def isExistentialQuantified = false + def isExistentialQuantified = false def isExistentialSkolem = false def isExistentiallyBound = false def isGADTSkolem = false def isTypeParameter = false def isTypeParameterOrSkolem = false def isTypeSkolem = false - // def isTypeMacro = false + def isTypeMacro = false def isInvariant = !isCovariant && !isContravariant /** Qualities of Terms, always false for TypeSymbols. @@ -719,14 +719,14 @@ trait Symbols extends api.Symbols { self: SymbolTable => = hasAnnotation(DeprecatedInheritanceAttr) def deprecatedInheritanceMessage = getAnnotation(DeprecatedInheritanceAttr) flatMap (_ stringArg 0) - // def deprecatedInheritanceVersion - // = getAnnotation(DeprecatedInheritanceAttr) flatMap (_ stringArg 1) + def deprecatedInheritanceVersion + = getAnnotation(DeprecatedInheritanceAttr) flatMap (_ stringArg 1) def hasDeprecatedOverridingAnnotation = hasAnnotation(DeprecatedOverridingAttr) def deprecatedOverridingMessage = getAnnotation(DeprecatedOverridingAttr) flatMap (_ stringArg 0) - // def deprecatedOverridingVersion - // = getAnnotation(DeprecatedOverridingAttr) flatMap (_ stringArg 1) + def deprecatedOverridingVersion + = getAnnotation(DeprecatedOverridingAttr) flatMap (_ stringArg 1) // !!! when annotation arguments are not literal strings, but any sort of // assembly of strings, there is a fair chance they will turn up here not as @@ -806,7 +806,7 @@ trait Symbols extends api.Symbols { self: SymbolTable => final def isStaticOwner: Boolean = isPackageClass || isModuleClass && isStatic - // def isTopLevelModule = hasFlag(MODULE) && owner.isPackageClass + def isTopLevelModule = hasFlag(MODULE) && owner.isPackageClass /** A helper function for isEffectivelyFinal. */ private def isNotOverridden = ( @@ -844,7 +844,7 @@ trait Symbols extends api.Symbols { self: SymbolTable => */ def isLocalClass = false - // def isStableClass = false + def isStableClass = false /* code for fixing nested objects override final def isModuleClass: Boolean = @@ -869,8 +869,8 @@ trait Symbols extends api.Symbols { self: SymbolTable => final def isPossibleInRefinement = !isConstructor && !isOverridingSymbol /** Is this symbol a member of class `clazz`? */ - // def isMemberOf(clazz: Symbol) = - // clazz.info.member(name).alternatives contains this + def isMemberOf(clazz: Symbol) = + clazz.info.member(name).alternatives contains this /** A a member of class `base` is incomplete if * (1) it is declared deferred or @@ -972,14 +972,6 @@ trait Symbols extends api.Symbols { self: SymbolTable => def ownerChain: List[Symbol] = this :: owner.ownerChain def originalOwnerChain: List[Symbol] = this :: originalOwner.getOrElse(this, rawowner).originalOwnerChain - // All the symbols overridden by this symbol and this symbol at the head, - // or Nil if this is NoSymbol. - def overrideChain = ( - if (this eq NoSymbol) Nil - else if (!owner.isClass) this :: Nil - else this :: allOverriddenSymbols - ) - // Non-classes skip self and return rest of owner chain; overridden in ClassSymbol. def enclClassChain: List[Symbol] = owner.enclClassChain @@ -1089,8 +1081,8 @@ trait Symbols extends api.Symbols { self: SymbolTable => protected def createImplClassSymbol(name: TypeName, pos: Position, newFlags: Long): ClassSymbol = new ClassSymbol(this, pos, name) with ImplClassSymbol initFlags newFlags - // protected def createTermSymbol(name: TermName, pos: Position, newFlags: Long): TermSymbol = - // new TermSymbol(this, pos, name) initFlags newFlags + protected def createTermSymbol(name: TermName, pos: Position, newFlags: Long): TermSymbol = + new TermSymbol(this, pos, name) initFlags newFlags protected def createMethodSymbol(name: TermName, pos: Position, newFlags: Long): MethodSymbol = new MethodSymbol(this, pos, name) initFlags newFlags @@ -1454,12 +1446,12 @@ trait Symbols extends api.Symbols { self: SymbolTable => !isInitialized && (flags & LOCKED) == 0 && shouldTriggerCompleter(this, if (infos ne null) infos.info else null, isFlagRelated, mask) /** Was symbol's type updated during given phase? */ - // final def isUpdatedAt(pid: Phase#Id): Boolean = { - // assert(isCompilerUniverse) - // var infos = this.infos - // while ((infos ne null) && phaseId(infos.validFrom) != pid + 1) infos = infos.prev - // infos ne null - // } + final def isUpdatedAt(pid: Phase#Id): Boolean = { + assert(isCompilerUniverse) + var infos = this.infos + while ((infos ne null) && phaseId(infos.validFrom) != pid + 1) infos = infos.prev + infos ne null + } /** Was symbol's type updated during given phase? */ final def hasTypeAt(pid: Phase#Id): Boolean = { @@ -1952,10 +1944,10 @@ trait Symbols extends api.Symbols { self: SymbolTable => (this.rawInfo ne NoType) && (this.effectiveOwner == that.effectiveOwner) && ( !this.effectiveOwner.isPackageClass - || (this.associatedFile eq null) - || (that.associatedFile eq null) - || (this.associatedFile.path == that.associatedFile.path) // Cheap possibly wrong check, then expensive normalization - || (this.associatedFile.canonicalPath == that.associatedFile.canonicalPath) + || (this.sourceFile eq null) + || (that.sourceFile eq null) + || (this.sourceFile.path == that.sourceFile.path) // Cheap possibly wrong check, then expensive normalization + || (this.sourceFile.canonicalPath == that.sourceFile.canonicalPath) ) ) @@ -2082,10 +2074,9 @@ trait Symbols extends api.Symbols { self: SymbolTable => if (isClassConstructor) NoSymbol else matchingSymbol(ofclazz, ofclazz.thisType) /** Returns all symbols overriden by this symbol. */ - final def allOverriddenSymbols: List[Symbol] = ( - if ((this eq NoSymbol) || !owner.isClass) Nil + final def allOverriddenSymbols: List[Symbol] = + if (!owner.isClass) Nil else owner.ancestors map overriddenSymbol filter (_ != NoSymbol) - ) /** Equivalent to allOverriddenSymbols.nonEmpty, but more efficient. */ // !!! When if ever will this answer differ from .isOverride? @@ -2096,7 +2087,7 @@ trait Symbols extends api.Symbols { self: SymbolTable => ) /** Equivalent to allOverriddenSymbols.head (or NoSymbol if no overrides) but more efficient. */ def nextOverriddenSymbol: Symbol = { - if ((this ne NoSymbol) && owner.isClass) owner.ancestors foreach { base => + if (owner.isClass) owner.ancestors foreach { base => val sym = overriddenSymbol(base) if (sym != NoSymbol) return sym @@ -2217,10 +2208,10 @@ trait Symbols extends api.Symbols { self: SymbolTable => private def sourceFileOnly(file: AbstractFile): AbstractFile = if ((file eq null) || (file.path endsWith ".class")) null else file - // private def binaryFileOnly(file: AbstractFile): AbstractFile = - // if ((file eq null) || !(file.path endsWith ".class")) null else file + private def binaryFileOnly(file: AbstractFile): AbstractFile = + if ((file eq null) || !(file.path endsWith ".class")) null else file - // final def binaryFile: AbstractFile = binaryFileOnly(associatedFile) + final def binaryFile: AbstractFile = binaryFileOnly(associatedFile) final def sourceFile: AbstractFile = sourceFileOnly(associatedFile) /** Overridden in ModuleSymbols to delegate to the module class. */ @@ -2245,7 +2236,7 @@ trait Symbols extends api.Symbols { self: SymbolTable => // ------ toString ------------------------------------------------------------------- /** A tag which (in the ideal case) uniquely identifies class symbols */ - // final def tag: Int = fullName.## + final def tag: Int = fullName.## /** The simple name of this Symbol */ final def simpleName: Name = name @@ -2714,7 +2705,7 @@ trait Symbols extends api.Symbols { self: SymbolTable => final def asNameType(n: Name) = n.toTypeName override def isNonClassType = true - // override def isTypeMacro = hasFlag(MACRO) + override def isTypeMacro = hasFlag(MACRO) override def resolveOverloadedFlag(flag: Long) = flag match { case TRAIT => "" // DEFAULTPARAM @@ -2732,7 +2723,7 @@ trait Symbols extends api.Symbols { self: SymbolTable => override def isAbstractType = this hasFlag DEFERRED override def isContravariant = this hasFlag CONTRAVARIANT override def isCovariant = this hasFlag COVARIANT - // override def isExistentialQuantified = isExistentiallyBound && !isSkolem + override def isExistentialQuantified = isExistentiallyBound && !isSkolem override def isExistentiallyBound = this hasFlag EXISTENTIAL override def isTypeParameter = isTypeParameterOrSkolem && !isSkolem override def isTypeParameterOrSkolem = this hasFlag PARAM @@ -2863,7 +2854,7 @@ trait Symbols extends api.Symbols { self: SymbolTable => override def isTypeSkolem = this hasFlag PARAM override def isAbstractType = this hasFlag DEFERRED - // override def isExistentialQuantified = false + override def isExistentialQuantified = false override def existentialBound = if (isAbstractType) this.info else super.existentialBound /** If typeskolem comes from a type parameter, that parameter, otherwise skolem itself */ @@ -2949,21 +2940,21 @@ trait Symbols extends api.Symbols { self: SymbolTable => || isLocal || !owner.isPackageClass && owner.isLocalClass ) - // override def isStableClass = (this hasFlag STABLE) || checkStable() - - // private def checkStable() = { - // def hasNoAbstractTypeMember(clazz: Symbol): Boolean = - // (clazz hasFlag STABLE) || { - // var e = clazz.info.decls.elems - // while ((e ne null) && !(e.sym.isAbstractType && info.member(e.sym.name) == e.sym)) - // e = e.next - // e == null - // } - // (info.baseClasses forall hasNoAbstractTypeMember) && { - // setFlag(STABLE) - // true - // } - // } + override def isStableClass = (this hasFlag STABLE) || checkStable() + + private def checkStable() = { + def hasNoAbstractTypeMember(clazz: Symbol): Boolean = + (clazz hasFlag STABLE) || { + var e = clazz.info.decls.elems + while ((e ne null) && !(e.sym.isAbstractType && info.member(e.sym.name) == e.sym)) + e = e.next + e == null + } + (info.baseClasses forall hasNoAbstractTypeMember) && { + setFlag(STABLE) + true + } + } override def enclClassChain = this :: owner.enclClassChain diff --git a/src/reflect/scala/reflect/internal/TreeGen.scala b/src/reflect/scala/reflect/internal/TreeGen.scala index 6c8ba047d6..6ce93d93b2 100644 --- a/src/reflect/scala/reflect/internal/TreeGen.scala +++ b/src/reflect/scala/reflect/internal/TreeGen.scala @@ -11,10 +11,10 @@ abstract class TreeGen extends macros.TreeBuilder { def rootScalaDot(name: Name) = Select(rootId(nme.scala_) setSymbol ScalaPackage, name) def scalaDot(name: Name) = Select(Ident(nme.scala_) setSymbol ScalaPackage, name) def scalaAnnotationDot(name: Name) = Select(scalaDot(nme.annotation), name) - // def scalaAnyRefConstr = scalaDot(tpnme.AnyRef) setSymbol AnyRefClass - // def scalaUnitConstr = scalaDot(tpnme.Unit) setSymbol UnitClass - // def productConstr = scalaDot(tpnme.Product) setSymbol ProductRootClass - // def serializableConstr = scalaDot(tpnme.Serializable) setSymbol SerializableClass + def scalaAnyRefConstr = scalaDot(tpnme.AnyRef) setSymbol AnyRefClass + def scalaUnitConstr = scalaDot(tpnme.Unit) setSymbol UnitClass + def productConstr = scalaDot(tpnme.Product) setSymbol ProductRootClass + def serializableConstr = scalaDot(tpnme.Serializable) setSymbol SerializableClass def scalaFunctionConstr(argtpes: List[Tree], restpe: Tree, abstractFun: Boolean = false): Tree = { val cls = if (abstractFun) @@ -248,8 +248,8 @@ abstract class TreeGen extends macros.TreeBuilder { Literal(Constant(tp)) setType ConstantType(Constant(tp)) /** Builds a list with given head and tail. */ - // def mkNewCons(head: Tree, tail: Tree): Tree = - // New(Apply(mkAttributedRef(ConsClass), List(head, tail))) + def mkNewCons(head: Tree, tail: Tree): Tree = + New(Apply(mkAttributedRef(ConsClass), List(head, tail))) /** Builds a list with given head and tail. */ def mkNil: Tree = mkAttributedRef(NilModule) diff --git a/src/reflect/scala/reflect/internal/TreeInfo.scala b/src/reflect/scala/reflect/internal/TreeInfo.scala index 2b1292e145..7ae7cf1821 100644 --- a/src/reflect/scala/reflect/internal/TreeInfo.scala +++ b/src/reflect/scala/reflect/internal/TreeInfo.scala @@ -135,8 +135,8 @@ abstract class TreeInfo { @deprecated("Use isExprSafeToInline instead", "2.10.0") def isPureExpr(tree: Tree) = isExprSafeToInline(tree) - // def zipMethodParamsAndArgs(params: List[Symbol], args: List[Tree]): List[(Symbol, Tree)] = - // mapMethodParamsAndArgs(params, args)((param, arg) => ((param, arg))) + def zipMethodParamsAndArgs(params: List[Symbol], args: List[Tree]): List[(Symbol, Tree)] = + mapMethodParamsAndArgs(params, args)((param, arg) => ((param, arg))) def mapMethodParamsAndArgs[R](params: List[Symbol], args: List[Tree])(f: (Symbol, Tree) => R): List[R] = { val b = List.newBuilder[R] @@ -186,25 +186,25 @@ abstract class TreeInfo { * * Also accounts for varargs. */ - // private def applyMethodParameters(fn: Tree): List[Symbol] = { - // val depth = applyDepth(fn) - // // There could be applies which go beyond the parameter list(s), - // // being applied to the result of the method call. - // // !!! Note that this still doesn't seem correct, although it should - // // be closer than what it replaced. - // if (depth < fn.symbol.paramss.size) fn.symbol.paramss(depth) - // else if (fn.symbol.paramss.isEmpty) Nil - // else fn.symbol.paramss.last - // } - - // def zipMethodParamsAndArgs(t: Tree): List[(Symbol, Tree)] = t match { - // case Apply(fn, args) => zipMethodParamsAndArgs(applyMethodParameters(fn), args) - // case _ => Nil - // } - // def foreachMethodParamAndArg(t: Tree)(f: (Symbol, Tree) => Unit): Unit = t match { - // case Apply(fn, args) => foreachMethodParamAndArg(applyMethodParameters(fn), args)(f) - // case _ => - // } + private def applyMethodParameters(fn: Tree): List[Symbol] = { + val depth = applyDepth(fn) + // There could be applies which go beyond the parameter list(s), + // being applied to the result of the method call. + // !!! Note that this still doesn't seem correct, although it should + // be closer than what it replaced. + if (depth < fn.symbol.paramss.size) fn.symbol.paramss(depth) + else if (fn.symbol.paramss.isEmpty) Nil + else fn.symbol.paramss.last + } + + def zipMethodParamsAndArgs(t: Tree): List[(Symbol, Tree)] = t match { + case Apply(fn, args) => zipMethodParamsAndArgs(applyMethodParameters(fn), args) + case _ => Nil + } + def foreachMethodParamAndArg(t: Tree)(f: (Symbol, Tree) => Unit): Unit = t match { + case Apply(fn, args) => foreachMethodParamAndArg(applyMethodParameters(fn), args)(f) + case _ => + } /** Is symbol potentially a getter of a variable? */ @@ -354,10 +354,10 @@ abstract class TreeInfo { case x: Ident => !x.isBackquoted && nme.isVariableName(x.name) case _ => false } - // def isDeprecatedIdentifier(tree: Tree): Boolean = tree match { - // case x: Ident => !x.isBackquoted && nme.isDeprecatedIdentifierName(x.name) - // case _ => false - // } + def isDeprecatedIdentifier(tree: Tree): Boolean = tree match { + case x: Ident => !x.isBackquoted && nme.isDeprecatedIdentifierName(x.name) + case _ => false + } /** The first constructor definitions in `stats` */ def firstConstructor(stats: List[Tree]): Tree = stats find { @@ -417,10 +417,10 @@ abstract class TreeInfo { def isLeftAssoc(operator: Name) = operator.nonEmpty && (operator.endChar != ':') /** Is tree a `this` node which belongs to `enclClass`? */ - // def isSelf(tree: Tree, enclClass: Symbol): Boolean = tree match { - // case This(_) => tree.symbol == enclClass - // case _ => false - // } + def isSelf(tree: Tree, enclClass: Symbol): Boolean = tree match { + case This(_) => tree.symbol == enclClass + case _ => false + } /** a Match(Typed(_, tpt), _) must be translated into a switch if isSwitchAnnotation(tpt.tpe) */ def isSwitchAnnotation(tpe: Type) = tpe hasAnnotation definitions.SwitchClass diff --git a/src/reflect/scala/reflect/internal/Trees.scala b/src/reflect/scala/reflect/internal/Trees.scala index c93750165d..ed08226ec7 100644 --- a/src/reflect/scala/reflect/internal/Trees.scala +++ b/src/reflect/scala/reflect/internal/Trees.scala @@ -847,7 +847,7 @@ trait Trees extends api.Trees { self: SymbolTable => /** Is the tree Predef, scala.Predef, or _root_.scala.Predef? */ def isReferenceToPredef(t: Tree) = isReferenceToScalaMember(t, nme.Predef) - // def isReferenceToAnyVal(t: Tree) = isReferenceToScalaMember(t, tpnme.AnyVal) + def isReferenceToAnyVal(t: Tree) = isReferenceToScalaMember(t, tpnme.AnyVal) // --- modifiers implementation --------------------------------------- diff --git a/src/reflect/scala/reflect/internal/TypeDebugging.scala b/src/reflect/scala/reflect/internal/TypeDebugging.scala index 9c2457e402..68b4fa69a1 100644 --- a/src/reflect/scala/reflect/internal/TypeDebugging.scala +++ b/src/reflect/scala/reflect/internal/TypeDebugging.scala @@ -9,6 +9,8 @@ package internal trait TypeDebugging { self: SymbolTable => + import definitions._ + // @M toString that is safe during debugging (does not normalize, ...) object typeDebug { private def to_s(x: Any): String = x match { @@ -18,7 +20,7 @@ trait TypeDebugging { case x: Product => x.productIterator mkString ("(", ", ", ")") case _ => "" + x } - // def ptIndent(x: Any) = ("" + x).replaceAll("\\n", " ") + def ptIndent(x: Any) = ("" + x).replaceAll("\\n", " ") def ptBlock(label: String, pairs: (String, Any)*): String = { if (pairs.isEmpty) label + "{ }" else { diff --git a/src/reflect/scala/reflect/internal/Types.scala b/src/reflect/scala/reflect/internal/Types.scala index 0f92388993..42a9d9e456 100644 --- a/src/reflect/scala/reflect/internal/Types.scala +++ b/src/reflect/scala/reflect/internal/Types.scala @@ -167,10 +167,10 @@ trait Types extends api.Types { self: SymbolTable => log = Nil } finally unlock() } - // def size = { - // lock() - // try log.size finally unlock() - // } + def size = { + lock() + try log.size finally unlock() + } // `block` should not affect constraints on typevars def undo[T](block: => T): T = { @@ -184,18 +184,18 @@ trait Types extends api.Types { self: SymbolTable => } // if `block` evaluates to false, it should not affect constraints on typevars - // def undoUnless(block: => Boolean): Boolean = { - // lock() - // try { - // val before = log - // var result = false + def undoUnless(block: => Boolean): Boolean = { + lock() + try { + val before = log + var result = false - // try result = block - // finally if (!result) undoTo(before) + try result = block + finally if (!result) undoTo(before) - // result - // } finally unlock() - // } + result + } finally unlock() + } } /** A map from lists to compound types that have the given list as parents. @@ -292,7 +292,7 @@ trait Types extends api.Types { self: SymbolTable => abstract class TypeApiImpl extends TypeApi { this: Type => def declaration(name: Name): Symbol = decl(name) - // def nonPrivateDeclaration(name: Name): Symbol = nonPrivateDecl(name) + def nonPrivateDeclaration(name: Name): Symbol = nonPrivateDecl(name) def declarations = decls def typeArguments = typeArgs def erasure = this match { @@ -522,7 +522,7 @@ trait Types extends api.Types { self: SymbolTable => /** If this is a TypeRef `clazz`[`T`], return the argument `T` * otherwise return this type */ - // def remove(clazz: Symbol): Type = this + def remove(clazz: Symbol): Type = this /** For a curried/nullary method or poly type its non-method result type, * the type itself for all other types */ @@ -663,13 +663,13 @@ trait Types extends api.Types { self: SymbolTable => /** All members with the given flags, excluding bridges. */ - // def membersWithFlags(requiredFlags: Long): Scope = - // membersBasedOnFlags(BridgeFlags, requiredFlags) + def membersWithFlags(requiredFlags: Long): Scope = + membersBasedOnFlags(BridgeFlags, requiredFlags) /** All non-private members with the given flags, excluding bridges. */ - // def nonPrivateMembersWithFlags(requiredFlags: Long): Scope = - // membersBasedOnFlags(BridgeAndPrivateFlags, requiredFlags) + def nonPrivateMembersWithFlags(requiredFlags: Long): Scope = + membersBasedOnFlags(BridgeAndPrivateFlags, requiredFlags) /** The non-private member with given name, admitting members with given flags `admit`. * "Admitting" refers to the fact that members with a PRIVATE, BRIDGE, or VBRIDGE @@ -806,7 +806,7 @@ trait Types extends api.Types { self: SymbolTable => else substThis(from, to).substSym(symsFrom, symsTo) /** Returns all parts of this type which satisfy predicate `p` */ - // def filter(p: Type => Boolean): List[Type] = new FilterTypeCollector(p) collect this + def filter(p: Type => Boolean): List[Type] = new FilterTypeCollector(p) collect this def withFilter(p: Type => Boolean) = new FilterMapForeach(p) class FilterMapForeach(p: Type => Boolean) extends FilterTypeCollector(p){ @@ -837,7 +837,7 @@ trait Types extends api.Types { self: SymbolTable => def contains(sym: Symbol): Boolean = new ContainsCollector(sym).collect(this) /** Does this type contain a reference to this type */ - // def containsTp(tp: Type): Boolean = new ContainsTypeCollector(tp).collect(this) + def containsTp(tp: Type): Boolean = new ContainsTypeCollector(tp).collect(this) /** Is this type a subtype of that type? */ def <:<(that: Type): Boolean = { @@ -900,9 +900,9 @@ trait Types extends api.Types { self: SymbolTable => ); /** Does this type implement symbol `sym` with same or stronger type? */ - // def specializes(sym: Symbol): Boolean = - // if (explainSwitch) explain("specializes", specializesSym, this, sym) - // else specializesSym(this, sym) + def specializes(sym: Symbol): Boolean = + if (explainSwitch) explain("specializes", specializesSym, this, sym) + else specializesSym(this, sym) /** Is this type close enough to that type so that members * with the two type would override each other? @@ -1243,7 +1243,7 @@ trait Types extends api.Types { self: SymbolTable => /** Remove any annotations from this type and from any * types embedded in this type. */ - // def stripAnnotations = StripAnnotationsMap(this) + def stripAnnotations = StripAnnotationsMap(this) /** Set the self symbol of an annotated type, or do nothing * otherwise. */ @@ -2917,14 +2917,14 @@ trait Types extends api.Types { self: SymbolTable => } // Not used yet. - // object HasTypeParams { - // def unapply(tp: Type): Option[(List[Symbol], Type)] = tp match { - // case AnnotatedType(_, tp, _) => unapply(tp) - // case ExistentialType(tparams, qtpe) => Some((tparams, qtpe)) - // case PolyType(tparams, restpe) => Some((tparams, restpe)) - // case _ => None - // } - // } + object HasTypeParams { + def unapply(tp: Type): Option[(List[Symbol], Type)] = tp match { + case AnnotatedType(_, tp, _) => unapply(tp) + case ExistentialType(tparams, qtpe) => Some((tparams, qtpe)) + case PolyType(tparams, restpe) => Some((tparams, restpe)) + case _ => None + } + } //@M // a TypeVar used to be a case class with only an origin and a constr @@ -3019,7 +3019,7 @@ trait Types extends api.Types { self: SymbolTable => require(params.nonEmpty, this) override def isHigherKinded = true - // override protected def typeVarString = params.map(_.name).mkString("[", ", ", "]=>" + originName) + override protected def typeVarString = params.map(_.name).mkString("[", ", ", "]=>" + originName) } /** Precondition: zipped params/args nonEmpty. (Size equivalence enforced structurally.) @@ -3035,9 +3035,9 @@ trait Types extends api.Types { self: SymbolTable => override def params: List[Symbol] = zippedArgs map (_._1) override def typeArgs: List[Type] = zippedArgs map (_._2) - // override protected def typeVarString = ( - // zippedArgs map { case (p, a) => p.name + "=" + a } mkString (origin + "[", ", ", "]") - // ) + override protected def typeVarString = ( + zippedArgs map { case (p, a) => p.name + "=" + a } mkString (origin + "[", ", ", "]") + ) } trait UntouchableTypeVar extends TypeVar { @@ -3357,7 +3357,7 @@ trait Types extends api.Types { self: SymbolTable => ).flatten map (s => s.decodedName + tparamsOfSym(s)) mkString "#" } private def levelString = if (settings.explaintypes.value) level else "" - // protected def typeVarString = originName + protected def typeVarString = originName override def safeToString = ( if ((constr eq null) || (constr.inst eq null)) "TVar<" + originName + "=null>" else if (constr.inst ne NoType) "=?" + constr.inst @@ -3714,18 +3714,18 @@ trait Types extends api.Types { self: SymbolTable => * list given is List(AnyRefClass), the resulting type would be * e.g. Set[_ <: AnyRef] rather than Set[AnyRef] . */ - // def appliedTypeAsUpperBounds(tycon: Type, args: List[Type]): Type = { - // tycon match { - // case TypeRef(pre, sym, _) if sameLength(sym.typeParams, args) => - // val eparams = typeParamsToExistentials(sym) - // val bounds = args map (TypeBounds upper _) - // foreach2(eparams, bounds)(_ setInfo _) - - // newExistentialType(eparams, typeRef(pre, sym, eparams map (_.tpe))) - // case _ => - // appliedType(tycon, args) - // } - // } + def appliedTypeAsUpperBounds(tycon: Type, args: List[Type]): Type = { + tycon match { + case TypeRef(pre, sym, _) if sameLength(sym.typeParams, args) => + val eparams = typeParamsToExistentials(sym) + val bounds = args map (TypeBounds upper _) + foreach2(eparams, bounds)(_ setInfo _) + + newExistentialType(eparams, typeRef(pre, sym, eparams map (_.tpe))) + case _ => + appliedType(tycon, args) + } + } /** A creator and extractor for type parameterizations that strips empty type parameter lists. * Use this factory method to indicate the type has kind * (it's a polymorphic value) @@ -3829,16 +3829,16 @@ trait Types extends api.Types { self: SymbolTable => } /** Substitutes the empty scope for any non-empty decls in the type. */ - // object dropAllRefinements extends TypeMap { - // def apply(tp: Type): Type = tp match { - // case rt @ RefinedType(parents, decls) if !decls.isEmpty => - // mapOver(copyRefinedType(rt, parents, EmptyScope)) - // case ClassInfoType(parents, decls, clazz) if !decls.isEmpty => - // mapOver(ClassInfoType(parents, EmptyScope, clazz)) - // case _ => - // mapOver(tp) - // } - // } + object dropAllRefinements extends TypeMap { + def apply(tp: Type): Type = tp match { + case rt @ RefinedType(parents, decls) if !decls.isEmpty => + mapOver(copyRefinedType(rt, parents, EmptyScope)) + case ClassInfoType(parents, decls, clazz) if !decls.isEmpty => + mapOver(ClassInfoType(parents, EmptyScope, clazz)) + case _ => + mapOver(tp) + } + } /** Type with all top-level occurrences of abstract types replaced by their bounds */ def abstractTypesToBounds(tp: Type): Type = tp match { // @M don't normalize here (compiler loops on pos/bug1090.scala ) @@ -4855,14 +4855,14 @@ trait Types extends api.Types { self: SymbolTable => } } - // object StripAnnotationsMap extends TypeMap { - // def apply(tp: Type): Type = tp match { - // case AnnotatedType(_, atp, _) => - // mapOver(atp) - // case tp => - // mapOver(tp) - // } - // } + object StripAnnotationsMap extends TypeMap { + def apply(tp: Type): Type = tp match { + case AnnotatedType(_, atp, _) => + mapOver(atp) + case tp => + mapOver(tp) + } + } /** A map to convert every occurrence of a wildcard type to a fresh * type variable */ @@ -4925,7 +4925,7 @@ trait Types extends api.Types { self: SymbolTable => /** A map to implement the `filter` method. */ class FilterTypeCollector(p: Type => Boolean) extends TypeCollector[List[Type]](Nil) { - // def withFilter(q: Type => Boolean) = new FilterTypeCollector(tp => p(tp) && q(tp)) + def withFilter(q: Type => Boolean) = new FilterTypeCollector(tp => p(tp) && q(tp)) override def collect(tp: Type) = super.collect(tp).reverse @@ -5868,7 +5868,7 @@ trait Types extends api.Types { self: SymbolTable => * useful as documentation; it is likely that !isNonValueType(tp) * will serve better than isValueType(tp). */ - // def isValueType(tp: Type) = isValueElseNonValue(tp) + def isValueType(tp: Type) = isValueElseNonValue(tp) /** SLS 3.3, Non-Value Types * Is the given type definitely a non-value type, as defined in SLS 3.3? @@ -5879,7 +5879,7 @@ trait Types extends api.Types { self: SymbolTable => * not designated non-value types because there is code which depends on using * them as type arguments, but their precise status is unclear. */ - // def isNonValueType(tp: Type) = !isValueElseNonValue(tp) + def isNonValueType(tp: Type) = !isValueElseNonValue(tp) def isNonRefinementClassType(tpe: Type) = tpe match { case SingleType(_, sym) => sym.isModuleClass @@ -5928,7 +5928,7 @@ trait Types extends api.Types { self: SymbolTable => corresponds3(tps1, tps2, tparams map (_.variance))(isSubArg) } - // def differentOrNone(tp1: Type, tp2: Type) = if (tp1 eq tp2) NoType else tp1 + def differentOrNone(tp1: Type, tp2: Type) = if (tp1 eq tp2) NoType else tp1 /** Does type `tp1` conform to `tp2`? */ private def isSubType2(tp1: Type, tp2: Type, depth: Int): Boolean = { @@ -6136,14 +6136,14 @@ trait Types extends api.Types { self: SymbolTable => /** Are `tps1` and `tps2` lists of equal length such that all elements * of `tps1` conform to corresponding elements of `tps2`? */ - // def isSubTypes(tps1: List[Type], tps2: List[Type]): Boolean = (tps1 corresponds tps2)(_ <:< _) + def isSubTypes(tps1: List[Type], tps2: List[Type]): Boolean = (tps1 corresponds tps2)(_ <:< _) /** Does type `tp` implement symbol `sym` with same or * stronger type? Exact only if `sym` is a member of some * refinement type, otherwise we might return false negatives. */ - // def specializesSym(tp: Type, sym: Symbol): Boolean = - // specializesSym(tp, sym, AnyDepth) + def specializesSym(tp: Type, sym: Symbol): Boolean = + specializesSym(tp, sym, AnyDepth) def specializesSym(tp: Type, sym: Symbol, depth: Int): Boolean = tp.typeSymbol == NothingClass || @@ -6586,10 +6586,10 @@ trait Types extends api.Types { self: SymbolTable => case _ => t } - // def elimRefinement(t: Type) = t match { - // case RefinedType(parents, decls) if !decls.isEmpty => intersectionType(parents) - // case _ => t - // } + def elimRefinement(t: Type) = t match { + case RefinedType(parents, decls) if !decls.isEmpty => intersectionType(parents) + case _ => t + } /** Eliminate from list of types all elements which are a subtype * of some other element of the list. */ @@ -6634,15 +6634,15 @@ trait Types extends api.Types { self: SymbolTable => (annotationsLub(lub(ts map (_.withoutAnnotations)), ts), true) else (lub(ts), false) - // def weakGlb(ts: List[Type]) = { - // if (ts.nonEmpty && (ts forall isNumericValueType)) { - // val nglb = numericGlb(ts) - // if (nglb != NoType) (nglb, true) - // else (glb(ts), false) - // } else if (ts exists typeHasAnnotations) { - // (annotationsGlb(glb(ts map (_.withoutAnnotations)), ts), true) - // } else (glb(ts), false) - // } + def weakGlb(ts: List[Type]) = { + if (ts.nonEmpty && (ts forall isNumericValueType)) { + val nglb = numericGlb(ts) + if (nglb != NoType) (nglb, true) + else (glb(ts), false) + } else if (ts exists typeHasAnnotations) { + (annotationsGlb(glb(ts map (_.withoutAnnotations)), ts), true) + } else (glb(ts), false) + } def numericLub(ts: List[Type]) = ts reduceLeft ((t1, t2) => @@ -6650,11 +6650,11 @@ trait Types extends api.Types { self: SymbolTable => else if (isNumericSubType(t2, t1)) t1 else IntClass.tpe) - // def numericGlb(ts: List[Type]) = - // ts reduceLeft ((t1, t2) => - // if (isNumericSubType(t1, t2)) t1 - // else if (isNumericSubType(t2, t1)) t2 - // else NoType) + def numericGlb(ts: List[Type]) = + ts reduceLeft ((t1, t2) => + if (isNumericSubType(t1, t2)) t1 + else if (isNumericSubType(t2, t1)) t2 + else NoType) def isWeakSubType(tp1: Type, tp2: Type) = tp1.deconst.normalize match { @@ -7017,8 +7017,8 @@ trait Types extends api.Types { self: SymbolTable => // Without this, the matchesType call would lead to type variables on both // sides of a subtyping/equality judgement, which can lead to recursive types // being constructed. See pos/t0851 for a situation where this happens. - // def suspendingTypeVarsInType[T](tp: Type)(op: => T): T = - // suspendingTypeVars(typeVarsInType(tp))(op) + def suspendingTypeVarsInType[T](tp: Type)(op: => T): T = + suspendingTypeVars(typeVarsInType(tp))(op) @inline final def suspendingTypeVars[T](tvs: List[TypeVar])(op: => T): T = { val saved = tvs map (_.suspended) @@ -7257,7 +7257,7 @@ trait Types extends api.Types { self: SymbolTable => /** Members which can be imported into other scopes. */ - // def importableMembers(clazz: Symbol): Scope = importableMembers(clazz.info) + def importableMembers(clazz: Symbol): Scope = importableMembers(clazz.info) def importableMembers(pre: Type): Scope = pre.members filter isImportable def objToAny(tp: Type): Type = @@ -7353,7 +7353,7 @@ trait Types extends api.Types { self: SymbolTable => object TypesStats { import BaseTypeSeqsStats._ val rawTypeCount = Statistics.newCounter ("#raw type creations") - // val asSeenFromCount = Statistics.newCounter ("#asSeenFrom ops") + val asSeenFromCount = Statistics.newCounter ("#asSeenFrom ops") val subtypeCount = Statistics.newCounter ("#subtype ops") val sametypeCount = Statistics.newCounter ("#sametype ops") val lubCount = Statistics.newCounter ("#toplevel lubs/glbs") diff --git a/src/reflect/scala/reflect/internal/pickling/PickleBuffer.scala b/src/reflect/scala/reflect/internal/pickling/PickleBuffer.scala index 4dfeb913ce..6170fcbb90 100644 --- a/src/reflect/scala/reflect/internal/pickling/PickleBuffer.scala +++ b/src/reflect/scala/reflect/internal/pickling/PickleBuffer.scala @@ -95,7 +95,7 @@ class PickleBuffer(data: Array[Byte], from: Int, to: Int) { // -- Basic input routines -------------------------------------------- /** Peek at the current byte without moving the read index */ - // def peekByte(): Int = bytes(readIndex) + def peekByte(): Int = bytes(readIndex) /** Read a byte */ def readByte(): Int = { diff --git a/src/reflect/scala/reflect/internal/pickling/PickleFormat.scala b/src/reflect/scala/reflect/internal/pickling/PickleFormat.scala index 1f522e8ee3..94b2f77ff9 100644 --- a/src/reflect/scala/reflect/internal/pickling/PickleFormat.scala +++ b/src/reflect/scala/reflect/internal/pickling/PickleFormat.scala @@ -115,7 +115,7 @@ object PickleFormat { */ val MajorVersion = 5 val MinorVersion = 0 - // def VersionString = "V" + MajorVersion + "." + MinorVersion + def VersionString = "V" + MajorVersion + "." + MinorVersion final val TERMname = 1 final val TYPEname = 2 diff --git a/src/reflect/scala/reflect/internal/pickling/UnPickler.scala b/src/reflect/scala/reflect/internal/pickling/UnPickler.scala index 76a4af850c..c82546b552 100644 --- a/src/reflect/scala/reflect/internal/pickling/UnPickler.scala +++ b/src/reflect/scala/reflect/internal/pickling/UnPickler.scala @@ -186,8 +186,8 @@ abstract class UnPickler /*extends scala.reflect.generic.UnPickler*/ { case _ => errorBadSignature("bad name tag: " + tag) } } - // protected def readTermName(): TermName = readName().toTermName - // protected def readTypeName(): TypeName = readName().toTypeName + protected def readTermName(): TermName = readName().toTermName + protected def readTypeName(): TypeName = readName().toTypeName private def readEnd() = readNat() + readIndex /** Read a symbol */ @@ -793,7 +793,7 @@ abstract class UnPickler /*extends scala.reflect.generic.UnPickler*/ { protected def readTreeRef(): Tree = at(readNat(), readTree) protected def readTypeNameRef(): TypeName = readNameRef().toTypeName - // protected def readTermNameRef(): TermName = readNameRef().toTermName + protected def readTermNameRef(): TermName = readNameRef().toTermName protected def readTemplateRef(): Template = readTreeRef() match { @@ -829,10 +829,10 @@ abstract class UnPickler /*extends scala.reflect.generic.UnPickler*/ { protected def errorBadSignature(msg: String) = throw new RuntimeException("malformed Scala signature of " + classRoot.name + " at " + readIndex + "; " + msg) - // protected def errorMissingRequirement(name: Name, owner: Symbol): Symbol = - // mirrorThatLoaded(owner).missingHook(owner, name) orElse MissingRequirementError.signal( - // s"bad reference while unpickling $filename: ${name.longString} not found in ${owner.tpe.widen}" - // ) + protected def errorMissingRequirement(name: Name, owner: Symbol): Symbol = + mirrorThatLoaded(owner).missingHook(owner, name) orElse MissingRequirementError.signal( + s"bad reference while unpickling $filename: ${name.longString} not found in ${owner.tpe.widen}" + ) def inferMethodAlternative(fun: Tree, argtpes: List[Type], restpe: Type) {} // can't do it; need a compiler for that. diff --git a/src/reflect/scala/reflect/internal/util/Collections.scala b/src/reflect/scala/reflect/internal/util/Collections.scala index 8d2ac3565e..2ba15e0776 100644 --- a/src/reflect/scala/reflect/internal/util/Collections.scala +++ b/src/reflect/scala/reflect/internal/util/Collections.scala @@ -40,8 +40,8 @@ trait Collections { mforeach(xss)(x => if ((res eq null) && p(x)) res = Some(x)) if (res eq null) None else res } - // final def mfilter[A](xss: List[List[A]])(p: A => Boolean) = - // for (xs <- xss; x <- xs; if p(x)) yield x + final def mfilter[A](xss: List[List[A]])(p: A => Boolean) = + for (xs <- xss; x <- xs; if p(x)) yield x final def map2[A, B, C](xs1: List[A], xs2: List[B])(f: (A, B) => C): List[C] = { val lb = new ListBuffer[C] @@ -78,18 +78,18 @@ trait Collections { lb.toList } - // final def distinctBy[A, B](xs: List[A])(f: A => B): List[A] = { - // val buf = new ListBuffer[A] - // val seen = mutable.Set[B]() - // xs foreach { x => - // val y = f(x) - // if (!seen(y)) { - // buf += x - // seen += y - // } - // } - // buf.toList - // } + final def distinctBy[A, B](xs: List[A])(f: A => B): List[A] = { + val buf = new ListBuffer[A] + val seen = mutable.Set[B]() + xs foreach { x => + val y = f(x) + if (!seen(y)) { + buf += x + seen += y + } + } + buf.toList + } @tailrec final def flattensToEmpty(xss: Seq[Seq[_]]): Boolean = { xss.isEmpty || xss.head.isEmpty && flattensToEmpty(xss.tail) @@ -189,18 +189,18 @@ trait Collections { } false } - // final def forall2[A, B](xs1: List[A], xs2: List[B])(f: (A, B) => Boolean): Boolean = { - // var ys1 = xs1 - // var ys2 = xs2 - // while (!ys1.isEmpty && !ys2.isEmpty) { - // if (!f(ys1.head, ys2.head)) - // return false - - // ys1 = ys1.tail - // ys2 = ys2.tail - // } - // true - // } + final def forall2[A, B](xs1: List[A], xs2: List[B])(f: (A, B) => Boolean): Boolean = { + var ys1 = xs1 + var ys2 = xs2 + while (!ys1.isEmpty && !ys2.isEmpty) { + if (!f(ys1.head, ys2.head)) + return false + + ys1 = ys1.tail + ys2 = ys2.tail + } + true + } final def forall3[A, B, C](xs1: List[A], xs2: List[B], xs3: List[C])(f: (A, B, C) => Boolean): Boolean = { var ys1 = xs1 var ys2 = xs2 @@ -223,5 +223,5 @@ trait Collections { } } -// object Collections extends Collections { } +object Collections extends Collections { } diff --git a/src/reflect/scala/reflect/internal/util/HashSet.scala b/src/reflect/scala/reflect/internal/util/HashSet.scala index e580315285..4135f3c469 100644 --- a/src/reflect/scala/reflect/internal/util/HashSet.scala +++ b/src/reflect/scala/reflect/internal/util/HashSet.scala @@ -6,8 +6,8 @@ package scala.reflect.internal.util object HashSet { - // def apply[T >: Null <: AnyRef](): HashSet[T] = this(16) - // def apply[T >: Null <: AnyRef](label: String): HashSet[T] = this(label, 16) + def apply[T >: Null <: AnyRef](): HashSet[T] = this(16) + def apply[T >: Null <: AnyRef](label: String): HashSet[T] = this(label, 16) def apply[T >: Null <: AnyRef](initialCapacity: Int): HashSet[T] = this("No Label", initialCapacity) def apply[T >: Null <: AnyRef](label: String, initialCapacity: Int): HashSet[T] = new HashSet[T](label, initialCapacity) diff --git a/src/reflect/scala/reflect/internal/util/Origins.scala b/src/reflect/scala/reflect/internal/util/Origins.scala index a2b9e24ebc..3259a12163 100644 --- a/src/reflect/scala/reflect/internal/util/Origins.scala +++ b/src/reflect/scala/reflect/internal/util/Origins.scala @@ -6,7 +6,9 @@ package scala.reflect package internal.util +import NameTransformer._ import scala.collection.{ mutable, immutable } +import Origins._ /** A debugging class for logging from whence a method is being called. * Say you wanted to discover who was calling phase_= in SymbolTable. diff --git a/src/reflect/scala/reflect/internal/util/Position.scala b/src/reflect/scala/reflect/internal/util/Position.scala index bbc95feaab..0725e9775b 100644 --- a/src/reflect/scala/reflect/internal/util/Position.scala +++ b/src/reflect/scala/reflect/internal/util/Position.scala @@ -128,7 +128,7 @@ abstract class Position extends scala.reflect.api.Position { self => def endOrPoint: Int = point @deprecated("use point instead", "2.9.0") - def offset: Option[Int] = if (isDefined) Some(point) else None // used by sbt + def offset: Option[Int] = if (isDefined) Some(point) else None /** The same position with a different start value (if a range) */ def withStart(off: Int): Position = this diff --git a/src/reflect/scala/reflect/internal/util/SourceFile.scala b/src/reflect/scala/reflect/internal/util/SourceFile.scala index 4d10372662..bc2d0ee4db 100644 --- a/src/reflect/scala/reflect/internal/util/SourceFile.scala +++ b/src/reflect/scala/reflect/internal/util/SourceFile.scala @@ -24,7 +24,7 @@ abstract class SourceFile { assert(offset < length, file + ": " + offset + " >= " + length) new OffsetPosition(this, offset) } - // def position(line: Int, column: Int) : Position = new OffsetPosition(this, lineToOffset(line) + column) + def position(line: Int, column: Int) : Position = new OffsetPosition(this, lineToOffset(line) + column) def offsetToLine(offset: Int): Int def lineToOffset(index : Int): Int @@ -37,8 +37,8 @@ abstract class SourceFile { def dbg(offset: Int) = (new OffsetPosition(this, offset)).dbgString def path = file.path - // def beginsWith(offset: Int, text: String): Boolean = - // (content drop offset) startsWith text + def beginsWith(offset: Int, text: String): Boolean = + (content drop offset) startsWith text def lineToString(index: Int): String = content drop lineToOffset(index) takeWhile (c => !isLineBreakChar(c.toChar)) mkString "" @@ -81,7 +81,7 @@ object ScriptSourceFile { } else 0 } - // def stripHeader(cs: Array[Char]): Array[Char] = cs drop headerLength(cs) + def stripHeader(cs: Array[Char]): Array[Char] = cs drop headerLength(cs) def apply(file: AbstractFile, content: Array[Char]) = { val underlying = new BatchSourceFile(file, content) @@ -91,6 +91,7 @@ object ScriptSourceFile { stripped } } +import ScriptSourceFile._ class ScriptSourceFile(underlying: BatchSourceFile, content: Array[Char], override val start: Int) extends BatchSourceFile(underlying.file, content) { override def isSelfContained = false diff --git a/src/reflect/scala/reflect/internal/util/StringOps.scala b/src/reflect/scala/reflect/internal/util/StringOps.scala index 3e8de65869..bc02ad1058 100644 --- a/src/reflect/scala/reflect/internal/util/StringOps.scala +++ b/src/reflect/scala/reflect/internal/util/StringOps.scala @@ -16,24 +16,24 @@ package scala.reflect.internal.util * @version 1.0 */ trait StringOps { - // def onull(s: String) = if (s == null) "" else s - def oempty(xs: String*) = xs filterNot (x => x == null || x == "") - def ojoin(xs: String*): String = oempty(xs: _*) mkString " " - // def ojoin(xs: Seq[String], sep: String): String = oempty(xs: _*) mkString sep - // def ojoinOr(xs: Seq[String], sep: String, orElse: String) = { - // val ys = oempty(xs: _*) - // if (ys.isEmpty) orElse else ys mkString sep - // } - // def trimTrailingSpace(s: String) = { - // if (s.length == 0 || !s.charAt(s.length - 1).isWhitespace) s - // else { - // var idx = s.length - 1 - // while (idx >= 0 && s.charAt(idx).isWhitespace) - // idx -= 1 + def onull(s: String) = if (s == null) "" else s + def oempty(xs: String*) = xs filterNot (x => x == null || x == "") + def ojoin(xs: String*): String = oempty(xs: _*) mkString " " + def ojoin(xs: Seq[String], sep: String): String = oempty(xs: _*) mkString sep + def ojoinOr(xs: Seq[String], sep: String, orElse: String) = { + val ys = oempty(xs: _*) + if (ys.isEmpty) orElse else ys mkString sep + } + def trimTrailingSpace(s: String) = { + if (s.length == 0 || !s.charAt(s.length - 1).isWhitespace) s + else { + var idx = s.length - 1 + while (idx >= 0 && s.charAt(idx).isWhitespace) + idx -= 1 - // s.substring(0, idx + 1) - // } - // } + s.substring(0, idx + 1) + } + } def longestCommonPrefix(xs: List[String]): String = { if (xs.isEmpty || xs.contains("")) "" else xs.head.head match { @@ -57,13 +57,13 @@ trait StringOps { def words(str: String): List[String] = decompose(str, ' ') - // def stripPrefixOpt(str: String, prefix: String): Option[String] = - // if (str startsWith prefix) Some(str drop prefix.length) - // else None + def stripPrefixOpt(str: String, prefix: String): Option[String] = + if (str startsWith prefix) Some(str drop prefix.length) + else None - // def stripSuffixOpt(str: String, suffix: String): Option[String] = - // if (str endsWith suffix) Some(str dropRight suffix.length) - // else None + def stripSuffixOpt(str: String, suffix: String): Option[String] = + if (str endsWith suffix) Some(str dropRight suffix.length) + else None def splitWhere(str: String, f: Char => Boolean, doDropIndex: Boolean = false): Option[(String, String)] = splitAt(str, str indexWhere f, doDropIndex) diff --git a/src/reflect/scala/reflect/internal/util/TableDef.scala b/src/reflect/scala/reflect/internal/util/TableDef.scala index 8208097d5c..8e2bcc2ff7 100644 --- a/src/reflect/scala/reflect/internal/util/TableDef.scala +++ b/src/reflect/scala/reflect/internal/util/TableDef.scala @@ -67,11 +67,11 @@ class TableDef[T](_cols: Column[T]*) { override def toString = allToSeq mkString "\n" } - // def formatterFor(rows: Seq[T]): T => String = { - // val formatStr = new Table(rows).rowFormat + def formatterFor(rows: Seq[T]): T => String = { + val formatStr = new Table(rows).rowFormat - // x => formatStr.format(colApply(x) : _*) - // } + x => formatStr.format(colApply(x) : _*) + } def table(rows: Seq[T]) = new Table(rows) diff --git a/src/reflect/scala/reflect/internal/util/TraceSymbolActivity.scala b/src/reflect/scala/reflect/internal/util/TraceSymbolActivity.scala index abedda8737..7ea8a75417 100644 --- a/src/reflect/scala/reflect/internal/util/TraceSymbolActivity.scala +++ b/src/reflect/scala/reflect/internal/util/TraceSymbolActivity.scala @@ -12,10 +12,12 @@ trait TraceSymbolActivity { if (enabled && global.isCompilerUniverse) scala.sys addShutdownHook showAllSymbols() + private type Set[T] = scala.collection.immutable.Set[T] + val allSymbols = mutable.Map[Int, Symbol]() val allChildren = mutable.Map[Int, List[Int]]() withDefaultValue Nil val prevOwners = mutable.Map[Int, List[(Int, Phase)]]() withDefaultValue Nil - // val symsCaused = mutable.Map[Int, Int]() withDefaultValue 0 + val symsCaused = mutable.Map[Int, Int]() withDefaultValue 0 val allTrees = mutable.Set[Tree]() def recordSymbolsInTree(tree: Tree) { diff --git a/src/reflect/scala/reflect/internal/util/WeakHashSet.scala b/src/reflect/scala/reflect/internal/util/WeakHashSet.scala index 41e74f80e9..9882aad5e5 100644 --- a/src/reflect/scala/reflect/internal/util/WeakHashSet.scala +++ b/src/reflect/scala/reflect/internal/util/WeakHashSet.scala @@ -1,6 +1,9 @@ package scala.reflect.internal.util import scala.collection.mutable +import scala.collection.mutable.ArrayBuffer +import scala.collection.mutable.Builder +import scala.collection.mutable.SetBuilder import scala.collection.generic.Clearable import scala.runtime.AbstractFunction1 diff --git a/src/reflect/scala/reflect/io/AbstractFile.scala b/src/reflect/scala/reflect/io/AbstractFile.scala index de37176cd5..15befb67f1 100644 --- a/src/reflect/scala/reflect/io/AbstractFile.scala +++ b/src/reflect/scala/reflect/io/AbstractFile.scala @@ -14,9 +14,9 @@ import scala.collection.mutable.ArrayBuffer /** * An abstraction over files for use in the reflection/compiler libraries. - * + * * ''Note: This library is considered experimental and should not be used unless you know what you are doing.'' - * + * * @author Philippe Altherr * @version 1.0, 23/03/2004 */ @@ -85,7 +85,7 @@ object AbstractFile { * all other cases, the class SourceFile is used, which honors * global.settings.encoding.value. *

- * + * * ''Note: This library is considered experimental and should not be used unless you know what you are doing.'' */ abstract class AbstractFile extends Iterable[AbstractFile] { @@ -195,9 +195,9 @@ abstract class AbstractFile extends Iterable[AbstractFile] { * @param directory ... * @return ... */ - // def lookupPath(path: String, directory: Boolean): AbstractFile = { - // lookup((f, p, dir) => f.lookupName(p, dir), path, directory) - // } + def lookupPath(path: String, directory: Boolean): AbstractFile = { + lookup((f, p, dir) => f.lookupName(p, dir), path, directory) + } /** Return an abstract file that does not check that `path` denotes * an existing file. diff --git a/src/reflect/scala/reflect/io/Directory.scala b/src/reflect/scala/reflect/io/Directory.scala index 3a21509457..c040d1eac5 100644 --- a/src/reflect/scala/reflect/io/Directory.scala +++ b/src/reflect/scala/reflect/io/Directory.scala @@ -14,12 +14,12 @@ import java.io.{ File => JFile } * ''Note: This library is considered experimental and should not be used unless you know what you are doing.'' */ object Directory { - import scala.util.Properties.{ userHome, userDir } + import scala.util.Properties.{ tmpDir, userHome, userDir } private def normalizePath(s: String) = Some(apply(Path(s).normalize)) def Current: Option[Directory] = if (userDir == "") None else normalizePath(userDir) - // def Home: Option[Directory] = if (userHome == "") None else normalizePath(userHome) - // def TmpDir: Option[Directory] = if (tmpDir == "") None else normalizePath(tmpDir) + def Home: Option[Directory] = if (userHome == "") None else normalizePath(userHome) + def TmpDir: Option[Directory] = if (tmpDir == "") None else normalizePath(tmpDir) def apply(path: Path): Directory = path.toDirectory @@ -30,19 +30,20 @@ object Directory { path.createDirectory() } } +import Path._ /** An abstraction for directories. * * @author Paul Phillips * @since 2.8 - * + * * ''Note: This is library is considered experimental and should not be used unless you know what you are doing.'' */ class Directory(jfile: JFile) extends Path(jfile) { override def toAbsolute: Directory = if (isAbsolute) this else super.toAbsolute.toDirectory override def toDirectory: Directory = this override def toFile: File = new File(jfile) - // override def isValid = jfile.isDirectory() || !jfile.exists() + override def isValid = jfile.isDirectory() || !jfile.exists() override def normalize: Directory = super.normalize.toDirectory /** An iterator over the contents of this directory. @@ -59,7 +60,7 @@ class Directory(jfile: JFile) extends Path(jfile) { override def walkFilter(cond: Path => Boolean): Iterator[Path] = list filter cond flatMap (_ walkFilter cond) - // def deepDirs: Iterator[Directory] = Path.onlyDirs(deepList()) + def deepDirs: Iterator[Directory] = Path.onlyDirs(deepList()) def deepFiles: Iterator[File] = Path.onlyFiles(deepList()) /** If optional depth argument is not given, will recurse @@ -73,6 +74,6 @@ class Directory(jfile: JFile) extends Path(jfile) { /** An iterator over the directories underneath this directory, * to the (optionally) given depth. */ - // def subdirs(depth: Int = 1): Iterator[Directory] = - // deepList(depth) collect { case x: Directory => x } + def subdirs(depth: Int = 1): Iterator[Directory] = + deepList(depth) collect { case x: Directory => x } } diff --git a/src/reflect/scala/reflect/io/File.scala b/src/reflect/scala/reflect/io/File.scala index 04e122af67..736ba5d51e 100644 --- a/src/reflect/scala/reflect/io/File.scala +++ b/src/reflect/scala/reflect/io/File.scala @@ -35,12 +35,12 @@ object File { type HasClose = { def close(): Unit } - // def closeQuietly(target: HasClose) { - // try target.close() catch { case e: IOException => } - // } - // def closeQuietly(target: JCloseable) { - // try target.close() catch { case e: IOException => } - // } + def closeQuietly(target: HasClose) { + try target.close() catch { case e: IOException => } + } + def closeQuietly(target: JCloseable) { + try target.close() catch { case e: IOException => } + } // this is a workaround for http://bugs.sun.com/bugdatabase/view_bug.do?bug_id=6503430 // we are using a static initializer to statically initialize a java class so we don't @@ -65,8 +65,8 @@ object File { // case _: IllegalArgumentException | _: IllegalStateException | _: IOException | _: SecurityException => () // } } -// import File._ -// import Path._ +import File._ +import Path._ /** An abstraction for files. For character data, a Codec * can be supplied at either creation time or when a method @@ -76,19 +76,19 @@ object File { * * @author Paul Phillips * @since 2.8 - * + * * ''Note: This is library is considered experimental and should not be used unless you know what you are doing.'' */ class File(jfile: JFile)(implicit constructorCodec: Codec) extends Path(jfile) with Streamable.Chars { override val creationCodec = constructorCodec - // def withCodec(codec: Codec): File = new File(jfile)(codec) + def withCodec(codec: Codec): File = new File(jfile)(codec) override def addExtension(ext: String): File = super.addExtension(ext).toFile override def toAbsolute: File = if (isAbsolute) this else super.toAbsolute.toFile override def toDirectory: Directory = new Directory(jfile) override def toFile: File = this override def normalize: File = super.normalize.toFile - // override def isValid = jfile.isFile() || !jfile.exists() + override def isValid = jfile.isFile() || !jfile.exists() override def length = super[Path].length override def walkFilter(cond: Path => Boolean): Iterator[Path] = if (cond(this)) Iterator.single(this) else Iterator.empty @@ -99,14 +99,14 @@ class File(jfile: JFile)(implicit constructorCodec: Codec) extends Path(jfile) w /** Obtains a OutputStream. */ def outputStream(append: Boolean = false) = new FileOutputStream(jfile, append) def bufferedOutput(append: Boolean = false) = new BufferedOutputStream(outputStream(append)) - // def printStream(append: Boolean = false) = new PrintStream(outputStream(append), true) + def printStream(append: Boolean = false) = new PrintStream(outputStream(append), true) /** Obtains an OutputStreamWriter wrapped around a FileOutputStream. * This should behave like a less broken version of java.io.FileWriter, * in that unlike the java version you can specify the encoding. */ - // def writer(): OutputStreamWriter = writer(false) - // def writer(append: Boolean): OutputStreamWriter = writer(append, creationCodec) + def writer(): OutputStreamWriter = writer(false) + def writer(append: Boolean): OutputStreamWriter = writer(append, creationCodec) def writer(append: Boolean, codec: Codec): OutputStreamWriter = new OutputStreamWriter(outputStream(append), codec.charSet) @@ -118,7 +118,7 @@ class File(jfile: JFile)(implicit constructorCodec: Codec) extends Path(jfile) w new BufferedWriter(writer(append, codec)) def printWriter(): PrintWriter = new PrintWriter(bufferedWriter(), true) - // def printWriter(append: Boolean): PrintWriter = new PrintWriter(bufferedWriter(append), true) + def printWriter(append: Boolean): PrintWriter = new PrintWriter(bufferedWriter(append), true) /** Creates a new file and writes all the Strings to it. */ def writeAll(strings: String*): Unit = { @@ -127,11 +127,11 @@ class File(jfile: JFile)(implicit constructorCodec: Codec) extends Path(jfile) w finally out.close() } - // def writeBytes(bytes: Array[Byte]): Unit = { - // val out = bufferedOutput() - // try out write bytes - // finally out.close() - // } + def writeBytes(bytes: Array[Byte]): Unit = { + val out = bufferedOutput() + try out write bytes + finally out.close() + } def appendAll(strings: String*): Unit = { val out = bufferedWriter(append = true) @@ -150,38 +150,38 @@ class File(jfile: JFile)(implicit constructorCodec: Codec) extends Path(jfile) w try Some(slurp()) catch { case _: IOException => None } - // def copyTo(destPath: Path, preserveFileDate: Boolean = false): Boolean = { - // val CHUNK = 1024 * 1024 * 16 // 16 MB - // val dest = destPath.toFile - // if (!isValid) fail("Source %s is not a valid file." format name) - // if (this.normalize == dest.normalize) fail("Source and destination are the same.") - // if (!dest.parent.exists) fail("Destination cannot be created.") - // if (dest.exists && !dest.canWrite) fail("Destination exists but is not writable.") - // if (dest.isDirectory) fail("Destination exists but is a directory.") - - // lazy val in_s = inputStream() - // lazy val out_s = dest.outputStream() - // lazy val in = in_s.getChannel() - // lazy val out = out_s.getChannel() - - // try { - // val size = in.size() - // var pos, count = 0L - // while (pos < size) { - // count = (size - pos) min CHUNK - // pos += out.transferFrom(in, pos, count) - // } - // } - // finally List[HasClose](out, out_s, in, in_s) foreach closeQuietly - - // if (this.length != dest.length) - // fail("Failed to completely copy %s to %s".format(name, dest.name)) - - // if (preserveFileDate) - // dest.lastModified = this.lastModified - - // true - // } + def copyTo(destPath: Path, preserveFileDate: Boolean = false): Boolean = { + val CHUNK = 1024 * 1024 * 16 // 16 MB + val dest = destPath.toFile + if (!isValid) fail("Source %s is not a valid file." format name) + if (this.normalize == dest.normalize) fail("Source and destination are the same.") + if (!dest.parent.exists) fail("Destination cannot be created.") + if (dest.exists && !dest.canWrite) fail("Destination exists but is not writable.") + if (dest.isDirectory) fail("Destination exists but is a directory.") + + lazy val in_s = inputStream() + lazy val out_s = dest.outputStream() + lazy val in = in_s.getChannel() + lazy val out = out_s.getChannel() + + try { + val size = in.size() + var pos, count = 0L + while (pos < size) { + count = (size - pos) min CHUNK + pos += out.transferFrom(in, pos, count) + } + } + finally List[HasClose](out, out_s, in, in_s) foreach closeQuietly + + if (this.length != dest.length) + fail("Failed to completely copy %s to %s".format(name, dest.name)) + + if (preserveFileDate) + dest.lastModified = this.lastModified + + true + } /** Reflection since we're into the java 6+ API. */ diff --git a/src/reflect/scala/reflect/io/Path.scala b/src/reflect/scala/reflect/io/Path.scala index 77b5065db1..36fdc04db4 100644 --- a/src/reflect/scala/reflect/io/Path.scala +++ b/src/reflect/scala/reflect/io/Path.scala @@ -27,7 +27,7 @@ import scala.language.implicitConversions * * @author Paul Phillips * @since 2.8 - * + * * ''Note: This library is considered experimental and should not be used unless you know what you are doing.'' */ object Path { @@ -65,11 +65,11 @@ object Path { def onlyDirs(xs: Iterator[Path]): Iterator[Directory] = xs filter (_.isDirectory) map (_.toDirectory) def onlyDirs(xs: List[Path]): List[Directory] = xs filter (_.isDirectory) map (_.toDirectory) def onlyFiles(xs: Iterator[Path]): Iterator[File] = xs filter (_.isFile) map (_.toFile) - // def onlyFiles(xs: List[Path]): List[File] = xs filter (_.isFile) map (_.toFile) + def onlyFiles(xs: List[Path]): List[File] = xs filter (_.isFile) map (_.toFile) def roots: List[Path] = java.io.File.listRoots().toList map Path.apply - // def apply(segments: Seq[String]): Path = apply(segments mkString java.io.File.separator) + def apply(segments: Seq[String]): Path = apply(segments mkString java.io.File.separator) def apply(path: String): Path = apply(new JFile(path)) def apply(jfile: JFile): Path = if (jfile.isFile) new File(jfile) @@ -84,7 +84,7 @@ import Path._ /** The Path constructor is private so we can enforce some * semantics regarding how a Path might relate to the world. - * + * * ''Note: This library is considered experimental and should not be used unless you know what you are doing.'' */ class Path private[io] (val jfile: JFile) { @@ -95,7 +95,7 @@ class Path private[io] (val jfile: JFile) { // contents of the filesystem are in agreement. All objects are // valid except File objects whose path points to a directory and // Directory objects whose path points to a file. - // def isValid: Boolean = true + def isValid: Boolean = true // conversions def toFile: File = new File(jfile) @@ -136,7 +136,7 @@ class Path private[io] (val jfile: JFile) { def name: String = jfile.getName() def path: String = jfile.getPath() def normalize: Path = Path(jfile.getAbsolutePath()) - // def isRootPath: Boolean = roots exists (_ isSame this) + def isRootPath: Boolean = roots exists (_ isSame this) def resolve(other: Path) = if (other.isAbsolute || isEmpty) other else /(other) def relativize(other: Path) = { @@ -153,7 +153,7 @@ class Path private[io] (val jfile: JFile) { } // derived from identity - // def root: Option[Path] = roots find (this startsWith _) + def root: Option[Path] = roots find (this startsWith _) def segments: List[String] = (path split separator).toList filterNot (_.length == 0) /** * @return The path of the parent directory, or root if path is already root @@ -213,22 +213,22 @@ class Path private[io] (val jfile: JFile) { def canRead = jfile.canRead() def canWrite = jfile.canWrite() def exists = jfile.exists() - // def notExists = try !jfile.exists() catch { case ex: SecurityException => false } + def notExists = try !jfile.exists() catch { case ex: SecurityException => false } def isFile = jfile.isFile() def isDirectory = jfile.isDirectory() def isAbsolute = jfile.isAbsolute() - // def isHidden = jfile.isHidden() + def isHidden = jfile.isHidden() def isEmpty = path.length == 0 // Information def lastModified = jfile.lastModified() - // def lastModified_=(time: Long) = jfile setLastModified time // should use setXXX function? + def lastModified_=(time: Long) = jfile setLastModified time // should use setXXX function? def length = jfile.length() // Boolean path comparisons def endsWith(other: Path) = segments endsWith other.segments - // def startsWith(other: Path) = segments startsWith other.segments + def startsWith(other: Path) = segments startsWith other.segments def isSame(other: Path) = toCanonical == other.toCanonical def isFresher(other: Path) = lastModified > other.lastModified @@ -248,7 +248,7 @@ class Path private[io] (val jfile: JFile) { // deletions def delete() = jfile.delete() - // def deleteIfExists() = if (jfile.exists()) delete() else false + def deleteIfExists() = if (jfile.exists()) delete() else false /** Deletes the path recursively. Returns false on failure. * Use with caution! @@ -270,11 +270,11 @@ class Path private[io] (val jfile: JFile) { length == 0 } - // def touch(modTime: Long = System.currentTimeMillis) = { - // createFile() - // if (isFile) - // lastModified = modTime - // } + def touch(modTime: Long = System.currentTimeMillis) = { + createFile() + if (isFile) + lastModified = modTime + } // todo // def copyTo(target: Path, options ...): Boolean diff --git a/src/reflect/scala/reflect/io/PlainFile.scala b/src/reflect/scala/reflect/io/PlainFile.scala index 6ee51d3d37..82b0568657 100644 --- a/src/reflect/scala/reflect/io/PlainFile.scala +++ b/src/reflect/scala/reflect/io/PlainFile.scala @@ -8,17 +8,17 @@ package scala.reflect package io import java.io.{ FileInputStream, FileOutputStream, IOException } - +import PartialFunction._ /** ''Note: This library is considered experimental and should not be used unless you know what you are doing.'' */ object PlainFile { /** * If the specified File exists, returns an abstract file backed * by it. Otherwise, returns null. */ - // def fromPath(file: Path): PlainFile = - // if (file.isDirectory) new PlainDirectory(file.toDirectory) - // else if (file.isFile) new PlainFile(file) - // else null + def fromPath(file: Path): PlainFile = + if (file.isDirectory) new PlainDirectory(file.toDirectory) + else if (file.isFile) new PlainFile(file) + else null } /** ''Note: This library is considered experimental and should not be used unless you know what you are doing.'' */ class PlainDirectory(givenPath: Directory) extends PlainFile(givenPath) { @@ -28,7 +28,7 @@ class PlainDirectory(givenPath: Directory) extends PlainFile(givenPath) { } /** This class implements an abstract file backed by a File. - * + * * ''Note: This library is considered experimental and should not be used unless you know what you are doing.'' */ class PlainFile(val givenPath: Path) extends AbstractFile { diff --git a/src/reflect/scala/reflect/io/Streamable.scala b/src/reflect/scala/reflect/io/Streamable.scala index 615f44acc5..61ec8a4c23 100644 --- a/src/reflect/scala/reflect/io/Streamable.scala +++ b/src/reflect/scala/reflect/io/Streamable.scala @@ -17,14 +17,14 @@ import Path.fail * * @author Paul Phillips * @since 2.8 - * + * * ''Note: This library is considered experimental and should not be used unless you know what you are doing.'' */ object Streamable { /** Traits which can be viewed as a sequence of bytes. Source types * which know their length should override def length: Long for more * efficient method implementations. - * + * * ''Note: This library is considered experimental and should not be used unless you know what you are doing.'' */ trait Bytes { @@ -69,7 +69,7 @@ object Streamable { } /** For objects which can be viewed as Chars. - * + * * ''Note: This library is considered experimental and should not be used unless you know what you are doing.'' */ trait Chars extends Bytes { @@ -81,7 +81,7 @@ object Streamable { */ def creationCodec: Codec = implicitly[Codec] - // def chars(): BufferedSource = chars(creationCodec) + def chars(): BufferedSource = chars(creationCodec) def chars(codec: Codec): BufferedSource = Source.fromInputStream(inputStream())(codec) def lines(): Iterator[String] = lines(creationCodec) @@ -89,7 +89,7 @@ object Streamable { /** Obtains an InputStreamReader wrapped around a FileInputStream. */ - // def reader(): InputStreamReader = reader(creationCodec) + def reader(): InputStreamReader = reader(creationCodec) def reader(codec: Codec): InputStreamReader = new InputStreamReader(inputStream, codec.charSet) /** Wraps a BufferedReader around the result of reader(). diff --git a/src/reflect/scala/reflect/io/VirtualDirectory.scala b/src/reflect/scala/reflect/io/VirtualDirectory.scala index 72ffff2aa9..78713c2ae0 100644 --- a/src/reflect/scala/reflect/io/VirtualDirectory.scala +++ b/src/reflect/scala/reflect/io/VirtualDirectory.scala @@ -11,7 +11,7 @@ import scala.collection.mutable * An in-memory directory. * * @author Lex Spoon - * + * * ''Note: This library is considered experimental and should not be used unless you know what you are doing.'' */ class VirtualDirectory(val name: String, maybeContainer: Option[VirtualDirectory]) @@ -26,8 +26,7 @@ extends AbstractFile { def container = maybeContainer.get def isDirectory = true - val lastModified: Long = System.currentTimeMillis - // var lastModified: Long = System.currentTimeMillis + var lastModified: Long = System.currentTimeMillis override def file = null override def input = sys.error("directories cannot be read") diff --git a/src/reflect/scala/reflect/io/VirtualFile.scala b/src/reflect/scala/reflect/io/VirtualFile.scala index 014e02c6cd..95f4429fad 100644 --- a/src/reflect/scala/reflect/io/VirtualFile.scala +++ b/src/reflect/scala/reflect/io/VirtualFile.scala @@ -14,7 +14,7 @@ import java.io.{ File => JFile } * * @author Philippe Altherr * @version 1.0, 23/03/2004 - * + * * ''Note: This library is considered experimental and should not be used unless you know what you are doing.'' */ class VirtualFile(val name: String, override val path: String) extends AbstractFile { @@ -65,7 +65,7 @@ class VirtualFile(val name: String, override val path: String) extends AbstractF /** Returns the time that this abstract file was last modified. */ private var _lastModified: Long = 0 def lastModified: Long = _lastModified - // def lastModified_=(x: Long) = _lastModified = x + def lastModified_=(x: Long) = _lastModified = x /** Returns all abstract subfiles of this abstract directory. */ def iterator: Iterator[AbstractFile] = { diff --git a/src/reflect/scala/reflect/io/ZipArchive.scala b/src/reflect/scala/reflect/io/ZipArchive.scala index 0e69834d26..3b57721e89 100644 --- a/src/reflect/scala/reflect/io/ZipArchive.scala +++ b/src/reflect/scala/reflect/io/ZipArchive.scala @@ -20,12 +20,12 @@ import scala.annotation.tailrec * @author Philippe Altherr (original version) * @author Paul Phillips (this one) * @version 2.0, - * + * * ''Note: This library is considered experimental and should not be used unless you know what you are doing.'' */ object ZipArchive { - // def fromPath(path: String): FileZipArchive = fromFile(new JFile(path)) - // def fromPath(path: Path): FileZipArchive = fromFile(path.toFile) + def fromPath(path: String): FileZipArchive = fromFile(new JFile(path)) + def fromPath(path: Path): FileZipArchive = fromFile(path.toFile) /** * @param file a File @@ -41,7 +41,7 @@ object ZipArchive { * @return A ZipArchive backed by the given url. */ def fromURL(url: URL): URLZipArchive = new URLZipArchive(url) - // def fromURL(url: String): URLZipArchive = fromURL(new URL(url)) + def fromURL(url: String): URLZipArchive = fromURL(new URL(url)) private def dirName(path: String) = splitPath(path, true) private def baseName(path: String) = splitPath(path, false) @@ -79,7 +79,7 @@ abstract class ZipArchive(override val file: JFile) extends AbstractFile with Eq else Iterator(f) } } - // def deepIterator = walkIterator(iterator) + def deepIterator = walkIterator(iterator) /** ''Note: This library is considered experimental and should not be used unless you know what you are doing.'' */ sealed abstract class Entry(path: String) extends VirtualFile(baseName(path), path) { // have to keep this name for compat with sbt's compiler-interface diff --git a/src/reflect/scala/reflect/macros/TreeBuilder.scala b/src/reflect/scala/reflect/macros/TreeBuilder.scala index fbbbe13201..204dc40858 100644 --- a/src/reflect/scala/reflect/macros/TreeBuilder.scala +++ b/src/reflect/scala/reflect/macros/TreeBuilder.scala @@ -11,6 +11,7 @@ abstract class TreeBuilder { val global: Universe import global._ + import definitions._ /** Builds a reference to value whose type is given stable prefix. * The type must be suitable for this. For example, it diff --git a/src/reflect/scala/reflect/runtime/JavaMirrors.scala b/src/reflect/scala/reflect/runtime/JavaMirrors.scala index 07599e095d..44fbd55162 100644 --- a/src/reflect/scala/reflect/runtime/JavaMirrors.scala +++ b/src/reflect/scala/reflect/runtime/JavaMirrors.scala @@ -22,6 +22,7 @@ import internal.Flags._ import ReflectionUtils.{staticSingletonInstance, innerSingletonInstance} import scala.language.existentials import scala.runtime.{ScalaRunTime, BoxesRunTime} +import scala.reflect.internal.util.Collections._ private[reflect] trait JavaMirrors extends internal.SymbolTable with api.JavaUniverse { thisUniverse: SymbolTable => @@ -840,13 +841,13 @@ private[reflect] trait JavaMirrors extends internal.SymbolTable with api.JavaUni * @return A Scala field object that corresponds to `jfield`. * // ??? should we return the getter instead? */ - // def fieldToScala(jfield: jField): TermSymbol = - // toScala(fieldCache, jfield)(_ fieldToScala1 _) + def fieldToScala(jfield: jField): TermSymbol = + toScala(fieldCache, jfield)(_ fieldToScala1 _) - // private def fieldToScala1(jfield: jField): TermSymbol = { - // val owner = followStatic(classToScala(jfield.getDeclaringClass), jfield.getModifiers) - // (lookup(owner, jfield.getName) suchThat (!_.isMethod) orElse jfieldAsScala(jfield)).asTerm - // } + private def fieldToScala1(jfield: jField): TermSymbol = { + val owner = followStatic(classToScala(jfield.getDeclaringClass), jfield.getModifiers) + (lookup(owner, jfield.getName) suchThat (!_.isMethod) orElse jfieldAsScala(jfield)).asTerm + } /** * The Scala package corresponding to given Java package @@ -1114,9 +1115,9 @@ private[reflect] trait JavaMirrors extends internal.SymbolTable with api.JavaUni /** Optionally, the Java package corresponding to a given Scala package, or None if no such Java package exists. * @param pkg The Scala package */ - // def packageToJavaOption(pkg: ModuleSymbol): Option[jPackage] = packageCache.toJavaOption(pkg) { - // Option(jPackage.getPackage(pkg.fullName.toString)) - // } + def packageToJavaOption(pkg: ModuleSymbol): Option[jPackage] = packageCache.toJavaOption(pkg) { + Option(jPackage.getPackage(pkg.fullName.toString)) + } /** The Java class corresponding to given Scala class. * Note: This only works for diff --git a/src/reflect/scala/reflect/runtime/JavaUniverse.scala b/src/reflect/scala/reflect/runtime/JavaUniverse.scala index a12e7d43d4..0f70a676fa 100644 --- a/src/reflect/scala/reflect/runtime/JavaUniverse.scala +++ b/src/reflect/scala/reflect/runtime/JavaUniverse.scala @@ -1,6 +1,8 @@ package scala.reflect package runtime +import internal.{SomePhase, NoPhase, Phase, TreeGen} + /** An implementation of [[scala.reflect.api.Universe]] for runtime reflection using JVM classloaders. * * Should not be instantiated directly, use [[scala.reflect.runtime.universe]] instead. @@ -9,7 +11,7 @@ package runtime */ class JavaUniverse extends internal.SymbolTable with ReflectSetup with runtime.SymbolTable { self => - def picklerPhase = internal.SomePhase + def picklerPhase = SomePhase def forInteractive = false def forScaladoc = false @@ -24,3 +26,4 @@ class JavaUniverse extends internal.SymbolTable with ReflectSetup with runtime.S init() } + diff --git a/src/reflect/scala/reflect/runtime/SynchronizedSymbols.scala b/src/reflect/scala/reflect/runtime/SynchronizedSymbols.scala index b415abecb1..366b4319c3 100644 --- a/src/reflect/scala/reflect/runtime/SynchronizedSymbols.scala +++ b/src/reflect/scala/reflect/runtime/SynchronizedSymbols.scala @@ -83,8 +83,8 @@ private[reflect] trait SynchronizedSymbols extends internal.Symbols { self: Symb override protected def createPackageObjectClassSymbol(pos: Position, newFlags: Long): PackageObjectClassSymbol = new PackageObjectClassSymbol(this, pos) with SynchronizedClassSymbol initFlags newFlags - // override protected def createTermSymbol(name: TermName, pos: Position, newFlags: Long): TermSymbol = - // new TermSymbol(this, pos, name) with SynchronizedTermSymbol initFlags newFlags + override protected def createTermSymbol(name: TermName, pos: Position, newFlags: Long): TermSymbol = + new TermSymbol(this, pos, name) with SynchronizedTermSymbol initFlags newFlags override protected def createMethodSymbol(name: TermName, pos: Position, newFlags: Long): MethodSymbol = new MethodSymbol(this, pos, name) with SynchronizedMethodSymbol initFlags newFlags diff --git a/src/reflect/scala/reflect/runtime/package.scala b/src/reflect/scala/reflect/runtime/package.scala index eadbc0c52e..b97913daf0 100644 --- a/src/reflect/scala/reflect/runtime/package.scala +++ b/src/reflect/scala/reflect/runtime/package.scala @@ -6,7 +6,7 @@ package scala.reflect package object runtime { /** The entry point into Scala runtime reflection. - * + * * To use Scala runtime reflection, simply use or import `scala.reflect.runtime.universe._` * * See [[scala.reflect.api.Universe]] or the -- cgit v1.2.3 From f89394ee3b3f95d982382d6ee2c2b74af0c02113 Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Tue, 13 Nov 2012 02:59:33 -0800 Subject: Removing ancient comments and pointless comments. Translating into backticks. Removed the "@param tree ..." blocks which have been taunting me for half a decade now. Removed commented-out blocks of code which had been sitting there for two years or more. --- src/compiler/scala/tools/nsc/PhaseAssembly.scala | 2 +- .../scala/tools/nsc/ast/parser/MarkupParsers.scala | 9 -- .../scala/tools/nsc/ast/parser/Parsers.scala | 2 +- .../tools/nsc/backend/WorklistAlgorithm.scala | 2 - .../tools/nsc/backend/icode/BasicBlocks.scala | 8 +- .../scala/tools/nsc/backend/icode/GenICode.scala | 24 +-- .../tools/nsc/backend/icode/ICodeCheckers.scala | 7 +- .../backend/icode/analysis/CopyPropagation.scala | 12 +- .../scala/tools/nsc/backend/jvm/GenJVM.scala | 8 - src/compiler/scala/tools/nsc/plugins/Plugin.scala | 2 +- .../tools/nsc/plugins/PluginDescription.scala | 2 +- .../tools/nsc/reporters/ConsoleReporter.scala | 8 - .../scala/tools/nsc/symtab/classfile/Pickler.scala | 2 - .../scala/tools/nsc/transform/ExplicitOuter.scala | 5 - src/compiler/scala/tools/nsc/transform/Mixin.scala | 12 +- .../scala/tools/nsc/transform/TailCalls.scala | 2 +- .../tools/nsc/typechecker/ConstantFolder.scala | 3 - .../scala/tools/nsc/typechecker/EtaExpansion.scala | 9 +- .../scala/tools/nsc/typechecker/Infer.scala | 31 ---- .../scala/tools/nsc/typechecker/Typers.scala | 168 +++------------------ .../scala/tools/nsc/typechecker/Variances.scala | 4 +- .../scala/collection/immutable/RedBlackTree.scala | 6 +- .../scala/concurrent/FutureTaskRunner.scala | 2 +- src/library/scala/xml/Elem.scala | 2 +- src/library/scala/xml/Node.scala | 10 +- src/reflect/scala/reflect/internal/Scopes.scala | 13 -- src/reflect/scala/reflect/internal/Symbols.scala | 7 +- src/reflect/scala/reflect/internal/Types.scala | 6 - .../reflect/internal/pickling/PickleBuffer.scala | 17 +-- .../reflect/internal/pickling/UnPickler.scala | 6 +- .../scala/reflect/internal/util/StringOps.scala | 7 - src/reflect/scala/reflect/io/AbstractFile.scala | 24 ++- src/reflect/scala/reflect/io/VirtualFile.scala | 11 -- 33 files changed, 77 insertions(+), 356 deletions(-) (limited to 'src') diff --git a/src/compiler/scala/tools/nsc/PhaseAssembly.scala b/src/compiler/scala/tools/nsc/PhaseAssembly.scala index bef81b6ff3..67dc1e3b66 100644 --- a/src/compiler/scala/tools/nsc/PhaseAssembly.scala +++ b/src/compiler/scala/tools/nsc/PhaseAssembly.scala @@ -182,7 +182,7 @@ trait PhaseAssembly { /** Remove all nodes in the given graph, that have no phase object * Make sure to clean up all edges when removing the node object - * Inform with warnings, if an external phase has a + * `Inform` with warnings, if an external phase has a * dependency on something that is dropped. */ def removeDanglingNodes() { diff --git a/src/compiler/scala/tools/nsc/ast/parser/MarkupParsers.scala b/src/compiler/scala/tools/nsc/ast/parser/MarkupParsers.scala index ab2afcb403..639780149e 100755 --- a/src/compiler/scala/tools/nsc/ast/parser/MarkupParsers.scala +++ b/src/compiler/scala/tools/nsc/ast/parser/MarkupParsers.scala @@ -24,12 +24,6 @@ import scala.reflect.internal.Chars.{ SU, LF } // I rewrote most of these, but not as yet the library versions: so if you are // tempted to touch any of these, please be aware of that situation and try not // to let it get any worse. -- paulp - -/** This trait ... - * - * @author Burak Emir - * @version 1.0 - */ trait MarkupParsers { self: Parsers => @@ -216,9 +210,6 @@ trait MarkupParsers { /** Returns true if it encounters an end tag (without consuming it), * appends trees to ts as side-effect. - * - * @param ts ... - * @return ... */ private def content_LT(ts: ArrayBuffer[Tree]): Boolean = { if (ch == '/') diff --git a/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala b/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala index efcde1f74f..c934f34398 100644 --- a/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala +++ b/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala @@ -94,7 +94,7 @@ trait ParsersCommon extends ScannersCommon { *
    *
  1. * Places all pattern variables in Bind nodes. In a pattern, for - * identifiers x:
    + *      identifiers `x`:
      *                 x  => x @ _
      *               x:T  => x @ (_ : T)
    *
  2. diff --git a/src/compiler/scala/tools/nsc/backend/WorklistAlgorithm.scala b/src/compiler/scala/tools/nsc/backend/WorklistAlgorithm.scala index 49dc105c79..45ca39fee4 100644 --- a/src/compiler/scala/tools/nsc/backend/WorklistAlgorithm.scala +++ b/src/compiler/scala/tools/nsc/backend/WorklistAlgorithm.scala @@ -31,8 +31,6 @@ trait WorklistAlgorithm { * Run the iterative algorithm until the worklist remains empty. * The initializer is run once before the loop starts and should * initialize the worklist. - * - * @param initWorklist ... */ def run(initWorklist: => Unit) = { initWorklist diff --git a/src/compiler/scala/tools/nsc/backend/icode/BasicBlocks.scala b/src/compiler/scala/tools/nsc/backend/icode/BasicBlocks.scala index b62d5cb4e4..7c7777f761 100644 --- a/src/compiler/scala/tools/nsc/backend/icode/BasicBlocks.scala +++ b/src/compiler/scala/tools/nsc/backend/icode/BasicBlocks.scala @@ -122,7 +122,7 @@ trait BasicBlocks { def closed: Boolean = hasFlag(CLOSED) def closed_=(b: Boolean) = if (b) setFlag(CLOSED) else resetFlag(CLOSED) - /** When set, the emit methods will be ignored. */ + /** When set, the `emit` methods will be ignored. */ def ignore: Boolean = hasFlag(IGNORING) def ignore_=(b: Boolean) = if (b) setFlag(IGNORING) else resetFlag(IGNORING) @@ -260,7 +260,7 @@ trait BasicBlocks { } } - /** Replaces oldInstr with is. It does not update + /** Replaces `oldInstr` with `is`. It does not update * the position field in the newly inserted instructions, so it behaves * differently than the one-instruction versions of this function. * @@ -289,8 +289,6 @@ trait BasicBlocks { } /** Removes instructions found at the given positions. - * - * @param positions ... */ def removeInstructionsAt(positions: Int*) { assert(closed, this) @@ -311,8 +309,6 @@ trait BasicBlocks { } /** Replaces all instructions found in the map. - * - * @param map ... */ def subst(map: Map[Instruction, Instruction]): Unit = if (!closed) diff --git a/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala b/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala index 720896d0b3..e2436d0e90 100644 --- a/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala +++ b/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala @@ -14,8 +14,7 @@ import scala.tools.nsc.symtab._ import scala.annotation.switch import PartialFunction._ -/** This class ... - * +/** * @author Iulian Dragos * @version 1.0 */ @@ -159,8 +158,6 @@ abstract class GenICode extends SubComponent { * and not produce any value. Use genLoad for expressions which leave * a value on top of the stack. * - * @param tree ... - * @param ctx ... * @return a new context. This is necessary for control flow instructions * which may change the current basic block. */ @@ -263,11 +260,6 @@ abstract class GenICode extends SubComponent { } /** Generate primitive array operations. - * - * @param tree ... - * @param ctx ... - * @param code ... - * @return ... */ private def genArrayOp(tree: Tree, ctx: Context, code: Int, expectedType: TypeKind): (Context, TypeKind) = { import scalaPrimitives._ @@ -1386,10 +1378,6 @@ abstract class GenICode extends SubComponent { // } /** Generate string concatenation. - * - * @param tree ... - * @param ctx ... - * @return ... */ def genStringConcat(tree: Tree, ctx: Context): Context = { liftStringConcat(tree) match { @@ -1703,8 +1691,6 @@ abstract class GenICode extends SubComponent { * If the block consists of a single unconditional jump, prune * it by replacing the instructions in the predecessor to jump * directly to the JUMP target of the block. - * - * @param method ... */ def prune(method: IMethod) = { var changed = false @@ -1968,10 +1954,6 @@ abstract class GenICode extends SubComponent { } /** Prepare a new context upon entry into a method. - * - * @param m ... - * @param d ... - * @return ... */ def enterMethod(m: IMethod, d: DefDef): Context = { val ctx1 = new Context(this) setMethod(m) @@ -2071,14 +2053,14 @@ abstract class GenICode extends SubComponent { * It returns the resulting context, with the same active handlers as * before the call. Use it like: * - * ctx.Try( ctx => { + * ` ctx.Try( ctx => { * ctx.bb.emit(...) // protected block * }, (ThrowableClass, * ctx => { * ctx.bb.emit(...); // exception handler * }), (AnotherExceptionClass, * ctx => {... - * } )) + * } ))` */ def Try(body: Context => Context, handlers: List[(Symbol, TypeKind, Context => Context)], diff --git a/src/compiler/scala/tools/nsc/backend/icode/ICodeCheckers.scala b/src/compiler/scala/tools/nsc/backend/icode/ICodeCheckers.scala index 221652723d..95913c7768 100644 --- a/src/compiler/scala/tools/nsc/backend/icode/ICodeCheckers.scala +++ b/src/compiler/scala/tools/nsc/backend/icode/ICodeCheckers.scala @@ -48,7 +48,7 @@ abstract class ICodeCheckers { * @author Iulian Dragos * @version 1.0, 06/09/2005 * - * @todo Better checks for MONITOR_ENTER/EXIT + * @todo Better checks for `MONITOR_ENTER/EXIT` * Better checks for local var initializations * * @todo Iulian says: I think there's some outdated logic in the checker. @@ -413,10 +413,7 @@ abstract class ICodeCheckers { } /** Checks that the object passed as receiver has a method - * method and that it is callable from the current method. - * - * @param receiver ... - * @param method ... + * `method` and that it is callable from the current method. */ def checkMethod(receiver: TypeKind, method: Symbol) = receiver match { diff --git a/src/compiler/scala/tools/nsc/backend/icode/analysis/CopyPropagation.scala b/src/compiler/scala/tools/nsc/backend/icode/analysis/CopyPropagation.scala index 53111d0ade..4a5844531a 100644 --- a/src/compiler/scala/tools/nsc/backend/icode/analysis/CopyPropagation.scala +++ b/src/compiler/scala/tools/nsc/backend/icode/analysis/CopyPropagation.scala @@ -463,14 +463,9 @@ abstract class CopyPropagation { } } - /** Update the state s after the call to method. + /** Update the state `s` after the call to `method`. * The stack elements are dropped and replaced by the result of the call. * If the method is impure, all bindings to record fields are cleared. - * - * @param state ... - * @param method ... - * @param static ... - * @return ... */ final def simulateCall(state: copyLattice.State, method: Symbol, static: Boolean): copyLattice.State = { val out = new copyLattice.State(state.bindings, state.stack); @@ -554,10 +549,7 @@ abstract class CopyPropagation { bindings } - /** Is symbol m a pure method? - * - * @param m ... - * @return ... + /** Is symbol `m` a pure method? */ final def isPureMethod(m: Symbol): Boolean = m.isGetter // abstract getters are still pure, as we 'know' diff --git a/src/compiler/scala/tools/nsc/backend/jvm/GenJVM.scala b/src/compiler/scala/tools/nsc/backend/jvm/GenJVM.scala index 06f94ef46c..31a4554d97 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/GenJVM.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/GenJVM.scala @@ -1153,9 +1153,6 @@ abstract class GenJVM extends SubComponent with GenJVMUtil with GenAndroid with var linearization: List[BasicBlock] = Nil var isModuleInitialized = false - /** - * @param m ... - */ def genCode(m: IMethod) { val jcode = jmethod.getCode.asInstanceOf[JExtendedCode] @@ -1605,11 +1602,6 @@ abstract class GenJVM extends SubComponent with GenJVMUtil with GenAndroid with } } - - /** - * @param primitive ... - * @param pos ... - */ def genPrimitive(primitive: Primitive, pos: Position) { primitive match { case Negation(kind) => diff --git a/src/compiler/scala/tools/nsc/plugins/Plugin.scala b/src/compiler/scala/tools/nsc/plugins/Plugin.scala index 6c64ea907f..093f8285e1 100644 --- a/src/compiler/scala/tools/nsc/plugins/Plugin.scala +++ b/src/compiler/scala/tools/nsc/plugins/Plugin.scala @@ -71,7 +71,7 @@ object Plugin { } /** Try to load a plugin description from the specified - * file, returning None if it does not work. + * file, returning `None` if it does not work. */ private def loadDescription(jarfile: Path): Option[PluginDescription] = // XXX Return to this once we have some ARM support diff --git a/src/compiler/scala/tools/nsc/plugins/PluginDescription.scala b/src/compiler/scala/tools/nsc/plugins/PluginDescription.scala index 9ecc098687..f77123ba11 100644 --- a/src/compiler/scala/tools/nsc/plugins/PluginDescription.scala +++ b/src/compiler/scala/tools/nsc/plugins/PluginDescription.scala @@ -26,7 +26,7 @@ abstract class PluginDescription { val classname: String /** An XML representation of this description. It can be - * read back using PluginDescription.fromXML. + * read back using `PluginDescription.fromXML`. * It should be stored inside the jar archive file. */ def toXML: Node = { diff --git a/src/compiler/scala/tools/nsc/reporters/ConsoleReporter.scala b/src/compiler/scala/tools/nsc/reporters/ConsoleReporter.scala index 245ac6adaa..bda195f9d3 100644 --- a/src/compiler/scala/tools/nsc/reporters/ConsoleReporter.scala +++ b/src/compiler/scala/tools/nsc/reporters/ConsoleReporter.scala @@ -34,9 +34,6 @@ class ConsoleReporter(val settings: Settings, reader: BufferedReader, writer: Pr } /** Returns the number of errors issued totally as a string. - * - * @param severity ... - * @return ... */ private def getCountString(severity: Severity): String = StringOps.countElementsAsString((severity).count, label(severity)) @@ -52,17 +49,12 @@ class ConsoleReporter(val settings: Settings, reader: BufferedReader, writer: Pr printMessage(pos, clabel(severity) + msg) } - /** - * @param pos ... - */ def printSourceLine(pos: Position) { printMessage(pos.lineContent.stripLineEnd) printColumnMarker(pos) } /** Prints the column marker of the given position. - * - * @param pos ... */ def printColumnMarker(pos: Position) = if (pos.isDefined) { printMessage(" " * (pos.column - 1) + "^") } diff --git a/src/compiler/scala/tools/nsc/symtab/classfile/Pickler.scala b/src/compiler/scala/tools/nsc/symtab/classfile/Pickler.scala index 941604b154..9a8db447c3 100644 --- a/src/compiler/scala/tools/nsc/symtab/classfile/Pickler.scala +++ b/src/compiler/scala/tools/nsc/symtab/classfile/Pickler.scala @@ -149,8 +149,6 @@ abstract class Pickler extends SubComponent { } /** Store symbol in index. If symbol is local, also store everything it references. - * - * @param sym ... */ def putSymbol(sym: Symbol) { if (putEntry(sym)) { diff --git a/src/compiler/scala/tools/nsc/transform/ExplicitOuter.scala b/src/compiler/scala/tools/nsc/transform/ExplicitOuter.scala index 13e7e17951..01c22245cb 100644 --- a/src/compiler/scala/tools/nsc/transform/ExplicitOuter.scala +++ b/src/compiler/scala/tools/nsc/transform/ExplicitOuter.scala @@ -234,11 +234,6 @@ abstract class ExplicitOuter extends InfoTransform *
    `base'.$outer$$C1 ... .$outer$$Cn
    * which refers to the outer instance of class to of * value base. The result is typed but not positioned. - * - * @param base ... - * @param from ... - * @param to ... - * @return ... */ protected def outerPath(base: Tree, from: Symbol, to: Symbol): Tree = { //Console.println("outerPath from "+from+" to "+to+" at "+base+":"+base.tpe) diff --git a/src/compiler/scala/tools/nsc/transform/Mixin.scala b/src/compiler/scala/tools/nsc/transform/Mixin.scala index 8122dc38cf..4797a231f8 100644 --- a/src/compiler/scala/tools/nsc/transform/Mixin.scala +++ b/src/compiler/scala/tools/nsc/transform/Mixin.scala @@ -588,8 +588,8 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL { tree } - /** Create a static reference to given symbol sym of the - * form M.sym where M is the symbol's implementation module. + /** Create a static reference to given symbol `sym` of the + * form `M.sym` where M is the symbol's implementation module. */ private def staticRef(sym: Symbol): Tree = { sym.owner.info //todo: needed? @@ -671,8 +671,8 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL { def addValDef(sym: Symbol, rhs: Tree = EmptyTree) = addDef(position(sym), ValDef(sym, rhs)) /** Add `newdefs` to `stats`, removing any abstract method definitions - * in stats that are matched by some symbol defined in - * newDefs. + * in `stats` that are matched by some symbol defined in + * `newDefs`. */ def add(stats: List[Tree], newDefs: List[Tree]) = { val newSyms = newDefs map (_.symbol) @@ -1144,9 +1144,9 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL { qual case Apply(Select(qual, _), args) => - /** Changes qual.m(args) where m refers to an implementation + /** Changes `qual.m(args)` where m refers to an implementation * class method to Q.m(S, args) where Q is the implementation module of - * m and S is the self parameter for the call, which + * `m` and S is the self parameter for the call, which * is determined as follows: * - if qual != super, qual itself * - if qual == super, and we are in an implementation class, diff --git a/src/compiler/scala/tools/nsc/transform/TailCalls.scala b/src/compiler/scala/tools/nsc/transform/TailCalls.scala index 2e0cc3bd98..488b8aad4e 100644 --- a/src/compiler/scala/tools/nsc/transform/TailCalls.scala +++ b/src/compiler/scala/tools/nsc/transform/TailCalls.scala @@ -82,7 +82,7 @@ abstract class TailCalls extends Transform { * that label. *

    *

    - * Assumes: Uncurry has been run already, and no multiple + * Assumes: `Uncurry` has been run already, and no multiple * parameter lists exit. *

    */ diff --git a/src/compiler/scala/tools/nsc/typechecker/ConstantFolder.scala b/src/compiler/scala/tools/nsc/typechecker/ConstantFolder.scala index a9f6e2517b..65bfd8e34e 100644 --- a/src/compiler/scala/tools/nsc/typechecker/ConstantFolder.scala +++ b/src/compiler/scala/tools/nsc/typechecker/ConstantFolder.scala @@ -27,9 +27,6 @@ abstract class ConstantFolder { /** If tree is a constant value that can be converted to type `pt`, perform * the conversion. - * - * @param tree ... - * @param pt ... */ def apply(tree: Tree, pt: Type): Tree = fold(apply(tree), tree.tpe match { case ConstantType(x) => x convertTo pt diff --git a/src/compiler/scala/tools/nsc/typechecker/EtaExpansion.scala b/src/compiler/scala/tools/nsc/typechecker/EtaExpansion.scala index 9e21a2b82d..2806d7b2d9 100644 --- a/src/compiler/scala/tools/nsc/typechecker/EtaExpansion.scala +++ b/src/compiler/scala/tools/nsc/typechecker/EtaExpansion.scala @@ -33,7 +33,7 @@ trait EtaExpansion { self: Analyzer => } /**

    - * Expand partial function applications of type type. + * Expand partial function applications of type `type`. *

        *  p.f(es_1)...(es_n)
        *     ==>  {
    @@ -56,11 +56,8 @@ trait EtaExpansion { self: Analyzer =>
         }
         val defs = new ListBuffer[Tree]
     
    -    /** Append to defs value definitions for all non-stable
    -     *  subexpressions of the function application tree.
    -     *
    -     *  @param tree ...
    -     *  @return     ...
    +    /** Append to `defs` value definitions for all non-stable
    +     *  subexpressions of the function application `tree`.
          */
         def liftoutPrefix(tree: Tree): Tree = {
           def liftout(tree: Tree, byName: Boolean): Tree =
    diff --git a/src/compiler/scala/tools/nsc/typechecker/Infer.scala b/src/compiler/scala/tools/nsc/typechecker/Infer.scala
    index 6e42481d60..e1f05e1279 100644
    --- a/src/compiler/scala/tools/nsc/typechecker/Infer.scala
    +++ b/src/compiler/scala/tools/nsc/typechecker/Infer.scala
    @@ -130,9 +130,6 @@ trait Infer extends Checkable {
       }
     
       /** A fresh type variable with given type parameter as origin.
    -   *
    -   *  @param tparam ...
    -   *  @return       ...
        */
       def freshVar(tparam: Symbol): TypeVar = TypeVar(tparam)
     
    @@ -169,9 +166,6 @@ trait Infer extends Checkable {
       }
     
       /** Is type fully defined, i.e. no embedded anytypes or wildcards in it?
    -   *
    -   *  @param tp ...
    -   *  @return   ...
        */
       private[typechecker] def isFullyDefined(tp: Type): Boolean = tp match {
         case WildcardType | BoundedWildcardType(_) | NoType =>
    @@ -457,11 +451,6 @@ trait Infer extends Checkable {
          *  its type parameters and result type and a prototype `pt`.
          *  If no minimal type variables exist that make the
          *  instantiated type a subtype of `pt`, return null.
    -     *
    -     *  @param tparams ...
    -     *  @param restpe  ...
    -     *  @param pt      ...
    -     *  @return        ...
          */
         private def exprTypeArgs(tparams: List[Symbol], restpe: Type, pt: Type, useWeaklyCompatible: Boolean = false): (List[Type], List[TypeVar]) = {
           val tvars = tparams map freshVar
    @@ -494,12 +483,6 @@ trait Infer extends Checkable {
         *  in the value parameter list.
         *  If instantiation of a type parameter fails,
         *  take WildcardType for the proto-type argument.
    -    *
    -    *  @param tparams ...
    -    *  @param formals ...
    -    *  @param restype ...
    -    *  @param pt      ...
    -    *  @return        ...
         */
         def protoTypeArgs(tparams: List[Symbol], formals: List[Type], restpe: Type,
                           pt: Type): List[Type] = {
    @@ -927,10 +910,6 @@ trait Infer extends Checkable {
         /** Is type `ftpe1` strictly more specific than type `ftpe2`
          *  when both are alternatives in an overloaded function?
          *  @see SLS (sec:overloading-resolution)
    -     *
    -     *  @param ftpe1 ...
    -     *  @param ftpe2 ...
    -     *  @return      ...
          */
         def isAsSpecific(ftpe1: Type, ftpe2: Type): Boolean = ftpe1 match {
           case OverloadedType(pre, alts) =>
    @@ -1173,11 +1152,6 @@ trait Infer extends Checkable {
     
         /** Substitute free type variables `undetparams` of polymorphic argument
          *  expression `tree` to `targs`, Error if `targs` is null.
    -     *
    -     *  @param tree ...
    -     *  @param undetparams ...
    -     *  @param targs ...
    -     *  @param pt ...
          */
         private def substExpr(tree: Tree, undetparams: List[Symbol], targs: List[Type], pt: Type) {
           if (targs eq null) {
    @@ -1640,8 +1614,6 @@ trait Infer extends Checkable {
     
         /** Try inference twice, once without views and once with views,
          *  unless views are already disabled.
    -     *
    -     *  @param infer ...
          */
         def tryTwice(infer: Boolean => Unit): Unit = {
           if (context.implicitsEnabled) {
    @@ -1679,9 +1651,6 @@ trait Infer extends Checkable {
         /** Assign `tree` the type of all polymorphic alternatives
          *  with `nparams` as the number of type parameters, if it exists.
          *  If no such polymorphic alternative exist, error.
    -     *
    -     *  @param tree ...
    -     *  @param nparams ...
          */
         def inferPolyAlternatives(tree: Tree, argtypes: List[Type]): Unit = {
           val OverloadedType(pre, alts) = tree.tpe
    diff --git a/src/compiler/scala/tools/nsc/typechecker/Typers.scala b/src/compiler/scala/tools/nsc/typechecker/Typers.scala
    index 0a0ab53852..ee5446ee87 100644
    --- a/src/compiler/scala/tools/nsc/typechecker/Typers.scala
    +++ b/src/compiler/scala/tools/nsc/typechecker/Typers.scala
    @@ -57,16 +57,6 @@ trait Typers extends Modes with Adaptations with Tags {
           super.traverse(tree)
         }
       }
    -/* needed for experimental version where early types can be type arguments
    -  class EarlyMap(clazz: Symbol) extends TypeMap {
    -    def apply(tp: Type): Type = tp match {
    -      case TypeRef(NoPrefix, sym, List()) if (sym hasFlag PRESUPER) =>
    -        TypeRef(ThisType(clazz), sym, List())
    -      case _ =>
    -        mapOver(tp)
    -    }
    -  }
    -*/
     
       sealed abstract class SilentResult[+T] {
         @inline final def map[U](f: T => U): SilentResult[U] = this match {
    @@ -240,10 +230,7 @@ trait Typers extends Modes with Adaptations with Tags {
           case _ => tp
         }
     
    -    /** Check that tree is a stable expression.
    -     *
    -     *  @param tree ...
    -     *  @return     ...
    +    /** Check that `tree` is a stable expression.
          */
         def checkStable(tree: Tree): Tree = (
           if (treeInfo.isExprSafeToInline(tree)) tree
    @@ -297,11 +284,7 @@ trait Typers extends Modes with Adaptations with Tags {
           )
         }
     
    -    /** Check that type tp is not a subtype of itself.
    -     *
    -     *  @param pos ...
    -     *  @param tp  ...
    -     *  @return    true if tp is not a subtype of itself.
    +    /** Check that type `tp` is not a subtype of itself.
          */
         def checkNonCyclic(pos: Position, tp: Type): Boolean = {
           def checkNotLocked(sym: Symbol) = {
    @@ -316,12 +299,6 @@ trait Typers extends Modes with Adaptations with Tags {
     
             case SingleType(pre, sym) =>
               checkNotLocked(sym)
    -/*
    -        case TypeBounds(lo, hi) =>
    -          var ok = true
    -          for (t <- lo) ok = ok & checkNonCyclic(pos, t)
    -          ok
    -*/
             case st: SubType =>
               checkNonCyclic(pos, st.supertype)
             case ct: CompoundType =>
    @@ -375,13 +352,9 @@ trait Typers extends Modes with Adaptations with Tags {
           private var scope: Scope = _
           private var hiddenSymbols: List[Symbol] = _
     
    -      /** Check that type tree does not refer to private
    +      /** Check that type `tree` does not refer to private
            *  components unless itself is wrapped in something private
    -       *  (owner tells where the type occurs).
    -       *
    -       *  @param owner ...
    -       *  @param tree  ...
    -       *  @return      ...
    +       *  (`owner` tells where the type occurs).
            */
           def privates[T <: Tree](owner: Symbol, tree: T): T =
             check(owner, EmptyScope, WildcardType, tree)
    @@ -472,7 +445,7 @@ trait Typers extends Modes with Adaptations with Tags {
           }
     
         /** The qualifying class
    -     *  of a this or super with prefix qual.
    +     *  of a this or super with prefix `qual`.
          *  packageOk is equal false when qualifying class symbol
          */
         def qualifyingClass(tree: Tree, qual: Name, packageOK: Boolean) =
    @@ -558,7 +531,7 @@ trait Typers extends Modes with Adaptations with Tags {
           }
         }
     
    -    /** Does the context of tree tree require a stable type?
    +    /** Does the context of tree `tree` require a stable type?
          */
         private def isStableContext(tree: Tree, mode: Int, pt: Type) =
           isNarrowable(tree.tpe) && ((mode & (EXPRmode | LHSmode)) == EXPRmode) &&
    @@ -660,12 +633,6 @@ trait Typers extends Modes with Adaptations with Tags {
           case _                                    => !phase.erasedTypes
         }
     
    -    /**
    -     *  @param tree ...
    -     *  @param mode ...
    -     *  @param pt   ...
    -     *  @return     ...
    -     */
         def stabilizeFun(tree: Tree, mode: Int, pt: Type): Tree = {
           val sym = tree.symbol
           val pre = tree match {
    @@ -874,7 +841,6 @@ trait Typers extends Modes with Adaptations with Tags {
               debuglog("eta-expanding " + tree + ":" + tree.tpe + " to " + pt)
               checkParamsConvertible(tree, tree.tpe)
               val tree0 = etaExpand(context.unit, tree, this)
    -          // println("eta "+tree+" ---> "+tree0+":"+tree0.tpe+" undet: "+context.undetparams+ " mode: "+Integer.toHexString(mode))
     
               if (context.undetparams.nonEmpty) {
                 // #2624: need to infer type arguments for eta expansion of a polymorphic method
    @@ -960,9 +926,11 @@ trait Typers extends Modes with Adaptations with Tags {
           def adaptConstrPattern(): Tree = { // (5)
             def hasUnapplyMember(tp: Type) = reallyExists(unapplyMember(tp))
             val overloadedExtractorOfObject = tree.symbol filter (sym => hasUnapplyMember(sym.tpe))
    -        // if the tree's symbol's type does not define an extractor, maybe the tree's type does
    -        // this is the case when we encounter an arbitrary tree as the target of an unapply call (rather than something that looks like a constructor call)
    -        // (for now, this only happens due to wrapClassTagUnapply, but when we support parameterized extractors, it will become more common place)
    +        // if the tree's symbol's type does not define an extractor, maybe the tree's type does.
    +        // this is the case when we encounter an arbitrary tree as the target of an unapply call
    +        // (rather than something that looks like a constructor call.) (for now, this only happens
    +        // due to wrapClassTagUnapply, but when we support parameterized extractors, it will become
    +        // more common place)
             val extractor = overloadedExtractorOfObject orElse unapplyMember(tree.tpe)
             if (extractor != NoSymbol) {
               // if we did some ad-hoc overloading resolution, update the tree's symbol
    @@ -1555,16 +1523,6 @@ trait Typers extends Modes with Adaptations with Tags {
                 if (!supertparams.isEmpty)
                   MissingTypeArgumentsParentTpeError(supertpt)
             }
    -/* experimental: early types as type arguments
    -        val hasEarlyTypes = templ.body exists (treeInfo.isEarlyTypeDef)
    -        val earlyMap = new EarlyMap(clazz)
    -        List.mapConserve(supertpt :: mixins){ tpt =>
    -          val tpt1 = checkNoEscaping.privates(clazz, tpt)
    -          if (hasEarlyTypes) tpt1 else tpt1 setType earlyMap(tpt1.tpe)
    -        }
    -*/
    -
    -        //Console.println("parents("+clazz") = "+supertpt :: mixins);//DEBUG
     
             // Certain parents are added in the parser before it is known whether
             // that class also declared them as parents.  For instance, this is an
    @@ -1652,9 +1610,6 @@ trait Typers extends Modes with Adaptations with Tags {
                   !selfType.isErroneous &&
                   !parent.tpe.isErroneous)
               {
    -            //Console.println(context.owner);//DEBUG
    -            //Console.println(context.owner.unsafeTypeParams);//DEBUG
    -            //Console.println(List.fromArray(context.owner.info.closure));//DEBUG
                 pending += ParentSelfTypeConformanceError(parent, selfType)
                 if (settings.explaintypes.value) explainTypes(selfType, parent.tpe.typeOfThis)
               }
    @@ -1670,13 +1625,6 @@ trait Typers extends Modes with Adaptations with Tags {
             for (p <- parents) validateParentClass(p, superclazz)
           }
     
    -/*
    -      if (settings.Xshowcls.value != "" &&
    -          settings.Xshowcls.value == context.owner.fullName)
    -        println("INFO "+context.owner+
    -                ", baseclasses = "+(context.owner.info.baseClasses map (_.fullName))+
    -                ", lin = "+(context.owner.info.baseClasses map (context.owner.thisType.baseType)))
    -*/
           pending.foreach(ErrorUtils.issueTypeError)
         }
     
    @@ -1700,12 +1648,7 @@ trait Typers extends Modes with Adaptations with Tags {
           }
         }
     
    -    /**
    -     *  @param cdef ...
    -     *  @return     ...
    -     */
         def typedClassDef(cdef: ClassDef): Tree = {
    -//      attributes(cdef)
           val clazz = cdef.symbol
           val typedMods = typedModifiers(cdef.mods)
           assert(clazz != NoSymbol, cdef)
    @@ -1734,10 +1677,6 @@ trait Typers extends Modes with Adaptations with Tags {
             .setType(NoType)
         }
     
    -    /**
    -     *  @param mdef ...
    -     *  @return     ...
    -     */
         def typedModuleDef(mdef: ModuleDef): Tree = {
           // initialize all constructors of the linked class: the type completer (Namer.methodSig)
           // might add default getters to this object. example: "object T; class T(x: Int = 1)"
    @@ -1795,13 +1734,7 @@ trait Typers extends Modes with Adaptations with Tags {
           if (txt eq context) namer.enterSym(tree)
           else newNamer(txt).enterSym(tree)
     
    -    /**
    -     *  @param templ    ...
    -     *  @param parents1 ...
    -     *    
  3. - * Check that inner classes do not inherit from Annotation - *
  4. - * @return ... + /** Check that inner classes do not inherit from Annotation */ def typedTemplate(templ: Template, parents1: List[Tree]): Template = { val clazz = context.owner @@ -1876,12 +1809,7 @@ trait Typers extends Modes with Adaptations with Tags { def typedModifiers(mods: Modifiers): Modifiers = mods.copy(annotations = Nil) setPositions mods.positions - /** - * @param vdef ... - * @return ... - */ def typedValDef(vdef: ValDef): ValDef = { -// attributes(vdef) val sym = vdef.symbol.initialize val typer1 = constrTyperIf(sym.isParameter && sym.owner.isConstructor) val typedMods = typedModifiers(vdef.mods) @@ -1922,10 +1850,6 @@ trait Typers extends Modes with Adaptations with Tags { } /** Enter all aliases of local parameter accessors. - * - * @param clazz ... - * @param vparamss ... - * @param rhs ... */ def computeParamAliases(clazz: Symbol, vparamss: List[List[ValDef]], rhs: Tree) { debuglog(s"computing param aliases for $clazz:${clazz.primaryConstructor.tpe}:$rhs") @@ -2020,7 +1944,7 @@ trait Typers extends Modes with Adaptations with Tags { f(subTree) } - /** Check if a structurally defined method violates implementation restrictions. + /** Check if a structurally defined method violates implementation restrictions. * A method cannot be called if it is a non-private member of a refinement type * and if its parameter's types are any of: * - the self-type of the refinement @@ -2101,10 +2025,6 @@ trait Typers extends Modes with Adaptations with Tags { useCase.defined foreach (sym => println("defined use cases: %s:%s".format(sym, sym.tpe))) } - /** - * @param ddef ... - * @return ... - */ def typedDefDef(ddef: DefDef): DefDef = { val meth = ddef.symbol.initialize @@ -2257,12 +2177,6 @@ trait Typers extends Modes with Adaptations with Tags { } } - /** - * @param block ... - * @param mode ... - * @param pt ... - * @return ... - */ def typedBlock(block: Block, mode: Int, pt: Type): Block = { val syntheticPrivates = new ListBuffer[Symbol] try { @@ -2335,12 +2249,6 @@ trait Typers extends Modes with Adaptations with Tags { } } - /** - * @param cdef ... - * @param pattpe ... - * @param pt ... - * @return ... - */ def typedCase(cdef: CaseDef, pattpe: Type, pt: Type): CaseDef = { // verify no _* except in last position for (Apply(_, xs) <- cdef.pat ; x <- xs dropRight 1 ; if treeInfo isStar x) @@ -2606,12 +2514,6 @@ trait Typers extends Modes with Adaptations with Tags { override def mkSel(params: List[Symbol]) = sel.duplicate } - /** - * @param fun ... - * @param mode ... - * @param pt ... - * @return ... - */ private def typedFunction(fun: Function, mode: Int, pt: Type): Tree = { val numVparams = fun.vparams.length if (numVparams > definitions.MaxFunctionArity) @@ -3294,10 +3196,7 @@ trait Typers extends Modes with Adaptations with Tags { if (formals == null) duplErrorTree(WrongNumberOfArgsError(tree, fun)) else { val args1 = typedArgs(args, mode, formals, formalsExpanded) - // This used to be the following (failing) assert: - // assert(isFullyDefined(pt), tree+" ==> "+UnApply(fun1, args1)+", pt = "+pt) - // I modified as follows. See SI-1048. - val pt1 = if (isFullyDefined(pt)) pt else makeFullyDefined(pt) + val pt1 = if (isFullyDefined(pt)) pt else makeFullyDefined(pt) // SI-1048 val itype = glb(List(pt1, arg.tpe)) arg.tpe = pt1 // restore type (arg is a dummy tree, just needs to pass typechecking) @@ -4300,10 +4199,6 @@ trait Typers extends Modes with Adaptations with Tags { UnderscoreEtaError(expr1) } - /** - * @param args ... - * @return ... - */ def tryTypedArgs(args: List[Tree], mode: Int): Option[List[Tree]] = { val c = context.makeSilent(false) c.retyping = true @@ -4563,12 +4458,8 @@ trait Typers extends Modes with Adaptations with Tags { if (isStableContext(tree, mode, pt)) tree setType clazz.thisType else tree } - /** Attribute a selection where tree is qual.name. - * qual is already attributed. - * - * @param qual ... - * @param name ... - * @return ... + /** Attribute a selection where `tree` is `qual.name`. + * `qual` is already attributed. */ def typedSelect(tree: Tree, qual: Tree, name: Name): Tree = { val t = typedSelectInternal(tree, qual, name) @@ -5186,12 +5077,6 @@ trait Typers extends Modes with Adaptations with Tags { } } - /** - * @param tree ... - * @param mode ... - * @param pt ... - * @return ... - */ def typed(tree: Tree, mode: Int, pt: Type): Tree = { lastTreeToTyper = tree indentTyping() @@ -5262,10 +5147,7 @@ trait Typers extends Modes with Adaptations with Tags { def atOwner(tree: Tree, owner: Symbol): Typer = newTyper(context.make(tree, owner)) - /** Types expression or definition tree. - * - * @param tree ... - * @return ... + /** Types expression or definition `tree`. */ def typed(tree: Tree): Tree = { val ret = typed(tree, EXPRmode, WildcardType) @@ -5278,23 +5160,19 @@ trait Typers extends Modes with Adaptations with Tags { // it makes for a lot less casting. // def typedPos[T <: Tree](pos: Position)(tree: T): T = typed(atPos(pos)(tree)).asInstanceOf[T] - /** Types expression tree with given prototype pt. - * - * @param tree ... - * @param pt ... - * @return ... + /** Types expression `tree` with given prototype `pt`. */ def typed(tree: Tree, pt: Type): Tree = typed(tree, EXPRmode, pt) - /** Types qualifier tree of a select node. - * E.g. is tree occurs in a context like tree.m. + /** Types qualifier `tree` of a select node. + * E.g. is tree occurs in a context like `tree.m`. */ def typedQualifier(tree: Tree, mode: Int, pt: Type): Tree = typed(tree, EXPRmode | QUALmode | POLYmode | mode & TYPEPATmode, pt) // TR: don't set BYVALmode, since qualifier might end up as by-name param to an implicit - /** Types qualifier tree of a select node. - * E.g. is tree occurs in a context like tree.m. + /** Types qualifier `tree` of a select node. + * E.g. is tree occurs in a context like `tree.m`. */ def typedQualifier(tree: Tree, mode: Int): Tree = typedQualifier(tree, mode, WildcardType) @@ -5305,7 +5183,7 @@ trait Typers extends Modes with Adaptations with Tags { def typedOperator(tree: Tree): Tree = typed(tree, EXPRmode | FUNmode | POLYmode | TAPPmode, WildcardType) - /** Types a pattern with prototype pt */ + /** Types a pattern with prototype `pt` */ def typedPattern(tree: Tree, pt: Type): Tree = { // We disable implicits because otherwise some constructs will // type check which should not. The pattern matcher does not diff --git a/src/compiler/scala/tools/nsc/typechecker/Variances.scala b/src/compiler/scala/tools/nsc/typechecker/Variances.scala index ea436a71fb..aa66a8d00a 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Variances.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Variances.scala @@ -40,7 +40,7 @@ trait Variances { (VARIANCES /: tps) ((v, tp) => v & varianceInType(tp)(tparam)) /** Compute variance of type parameter `tparam` in all type arguments - * tps which correspond to formal type parameters `tparams1`. + * `tps` which correspond to formal type parameters `tparams1`. */ def varianceInArgs(tps: List[Type], tparams1: List[Symbol])(tparam: Symbol): Int = { var v: Int = VARIANCES; @@ -63,7 +63,7 @@ trait Variances { varianceInType(annot.atp)(tparam) } - /** Compute variance of type parameter tparam in type tp. */ + /** Compute variance of type parameter `tparam` in type `tp`. */ def varianceInType(tp: Type)(tparam: Symbol): Int = tp match { case ErrorType | WildcardType | NoType | NoPrefix | ThisType(_) | ConstantType(_) => VARIANCES diff --git a/src/library/scala/collection/immutable/RedBlackTree.scala b/src/library/scala/collection/immutable/RedBlackTree.scala index 3ade581c8f..99f8d95517 100644 --- a/src/library/scala/collection/immutable/RedBlackTree.scala +++ b/src/library/scala/collection/immutable/RedBlackTree.scala @@ -18,7 +18,7 @@ import scala.annotation.meta.getter /** An object containing the RedBlack tree implementation used by for `TreeMaps` and `TreeSets`. * * Implementation note: since efficiency is important for data structures this implementation - * uses null to represent empty trees. This also means pattern matching cannot + * uses `null` to represent empty trees. This also means pattern matching cannot * easily be used. The API represented by the RedBlackTree object tries to hide these * optimizations behind a reasonably clean API. * @@ -82,7 +82,7 @@ object RedBlackTree { f((tree.key, tree.value)) if (tree.right ne null) _foreach(tree.right, f) } - + def foreachKey[A, U](tree:Tree[A,_], f: A => U):Unit = if (tree ne null) _foreachKey(tree,f) private[this] def _foreachKey[A, U](tree: Tree[A, _], f: A => U) { @@ -90,7 +90,7 @@ object RedBlackTree { f((tree.key)) if (tree.right ne null) _foreachKey(tree.right, f) } - + def iterator[A, B](tree: Tree[A, B]): Iterator[(A, B)] = new EntriesIterator(tree) def keysIterator[A, _](tree: Tree[A, _]): Iterator[A] = new KeysIterator(tree) def valuesIterator[_, B](tree: Tree[_, B]): Iterator[B] = new ValuesIterator(tree) diff --git a/src/library/scala/concurrent/FutureTaskRunner.scala b/src/library/scala/concurrent/FutureTaskRunner.scala index eeadaddb5e..cb4f8687f3 100644 --- a/src/library/scala/concurrent/FutureTaskRunner.scala +++ b/src/library/scala/concurrent/FutureTaskRunner.scala @@ -10,7 +10,7 @@ package scala.concurrent import scala.language.{implicitConversions, higherKinds} -/** The `FutureTaskRunner
    trait is a base trait of task runners +/** The `FutureTaskRunner` trait is a base trait of task runners * that provide some sort of future abstraction. * * @author Philipp Haller diff --git a/src/library/scala/xml/Elem.scala b/src/library/scala/xml/Elem.scala index b9e665e292..fc32e45a5e 100755 --- a/src/library/scala/xml/Elem.scala +++ b/src/library/scala/xml/Elem.scala @@ -17,7 +17,7 @@ package scala.xml * @author Burak Emir */ object Elem { - /** Build an Elem, setting its minimizeEmpty property to true if it has no children. Note that this + /** Build an Elem, setting its minimizeEmpty property to `true` if it has no children. Note that this * default may not be exactly what you want, as some XML dialects don't permit some elements to be minimized. * * @deprecated This factory method is retained for backward compatibility; please use the other one, with which you diff --git a/src/library/scala/xml/Node.scala b/src/library/scala/xml/Node.scala index 6b6c962692..dcd4c15969 100755 --- a/src/library/scala/xml/Node.scala +++ b/src/library/scala/xml/Node.scala @@ -55,7 +55,7 @@ abstract class Node extends NodeSeq { def scope: NamespaceBinding = TopScope /** - * convenience, same as getNamespace(this.prefix) + * convenience, same as `getNamespace(this.prefix)` */ def namespace = getNamespace(this.prefix) @@ -64,8 +64,8 @@ abstract class Node extends NodeSeq { * checks if scope is `'''null'''`. * * @param pre the prefix whose namespace name we would like to obtain - * @return the namespace if scope != null and prefix was - * found, else null + * @return the namespace if `scope != null` and prefix was + * found, else `null` */ def getNamespace(pre: String): String = if (scope eq null) null else scope.getURI(pre) @@ -74,8 +74,8 @@ abstract class Node extends NodeSeq { * Same as `attributes.getValue(key)` * * @param key of queried attribute. - * @return value of UnprefixedAttribute with given key - * in attributes, if it exists, otherwise null. + * @return value of `UnprefixedAttribute` with given key + * in attributes, if it exists, otherwise `null`. */ final def attribute(key: String): Option[Seq[Node]] = attributes.get(key) diff --git a/src/reflect/scala/reflect/internal/Scopes.scala b/src/reflect/scala/reflect/internal/Scopes.scala index 950e30dbc5..6d0d34cfc1 100644 --- a/src/reflect/scala/reflect/internal/Scopes.scala +++ b/src/reflect/scala/reflect/internal/Scopes.scala @@ -30,11 +30,6 @@ trait Scopes extends api.Scopes { self: SymbolTable => override def toString() = s"$sym (depth=$depth)" } - /** - * @param sym ... - * @param owner ... - * @return ... - */ private def newScopeEntry(sym: Symbol, owner: Scope): ScopeEntry = { val e = new ScopeEntry(sym, owner) e.next = owner.elems @@ -101,8 +96,6 @@ trait Scopes extends api.Scopes { self: SymbolTable => } /** enter a scope entry - * - * @param e ... */ protected def enterEntry(e: ScopeEntry) { elemsCache = null @@ -119,8 +112,6 @@ trait Scopes extends api.Scopes { self: SymbolTable => } /** enter a symbol - * - * @param sym ... */ def enter[T <: Symbol](sym: T): T = { enterEntry(newScopeEntry(sym, this)) @@ -128,8 +119,6 @@ trait Scopes extends api.Scopes { self: SymbolTable => } /** enter a symbol, asserting that no symbol with same name exists in scope - * - * @param sym ... */ def enterUnique(sym: Symbol) { assert(lookup(sym.name) == NoSymbol, (sym.fullLocationString, lookup(sym.name).fullLocationString)) @@ -184,8 +173,6 @@ trait Scopes extends api.Scopes { self: SymbolTable => } /** remove entry - * - * @param e ... */ def unlink(e: ScopeEntry) { if (elems == e) { diff --git a/src/reflect/scala/reflect/internal/Symbols.scala b/src/reflect/scala/reflect/internal/Symbols.scala index eec780470e..2e806dd6b1 100644 --- a/src/reflect/scala/reflect/internal/Symbols.scala +++ b/src/reflect/scala/reflect/internal/Symbols.scala @@ -876,9 +876,6 @@ trait Symbols extends api.Symbols { self: SymbolTable => * (1) it is declared deferred or * (2) it is abstract override and its super symbol in `base` is * nonexistent or incomplete. - * - * @param base ... - * @return ... */ final def isIncompleteIn(base: Symbol): Boolean = this.isDeferred || @@ -1774,8 +1771,8 @@ trait Symbols extends api.Symbols { self: SymbolTable => /** The type of `this` in a class, or else the type of the symbol itself. */ def typeOfThis = thisSym.tpe_* - /** If symbol is a class, the type this.type in this class, - * otherwise NoPrefix. + /** If symbol is a class, the type `this.type` in this class, + * otherwise `NoPrefix`. * We always have: thisType <:< typeOfThis */ def thisType: Type = NoPrefix diff --git a/src/reflect/scala/reflect/internal/Types.scala b/src/reflect/scala/reflect/internal/Types.scala index 42a9d9e456..2f97b01ffa 100644 --- a/src/reflect/scala/reflect/internal/Types.scala +++ b/src/reflect/scala/reflect/internal/Types.scala @@ -2060,8 +2060,6 @@ trait Types extends api.Types { self: SymbolTable => extends ClassInfoType(List(), decls, clazz) /** A class representing a constant type. - * - * @param value ... */ abstract case class ConstantType(value: Constant) extends SingletonType with ConstantTypeApi { override def underlying: Type = value.tpe @@ -3567,10 +3565,6 @@ trait Types extends api.Types { self: SymbolTable => } /** The canonical creator for a refined type with an initially empty scope. - * - * @param parents ... - * @param owner ... - * @return ... */ def refinedType(parents: List[Type], owner: Symbol): Type = refinedType(parents, owner, newScope, owner.pos) diff --git a/src/reflect/scala/reflect/internal/pickling/PickleBuffer.scala b/src/reflect/scala/reflect/internal/pickling/PickleBuffer.scala index 6170fcbb90..e8ee202978 100644 --- a/src/reflect/scala/reflect/internal/pickling/PickleBuffer.scala +++ b/src/reflect/scala/reflect/internal/pickling/PickleBuffer.scala @@ -62,11 +62,8 @@ class PickleBuffer(data: Array[Byte], from: Int, to: Int) { writeByte((x & 0x7f).toInt) } - /** Write a natural number x at position pos. + /** Write a natural number `x` at position `pos`. * If number is more than one byte, shift rest of array to make space. - * - * @param pos ... - * @param x ... */ def patchNat(pos: Int, x: Int) { def patchNatPrefix(x: Int) { @@ -81,7 +78,7 @@ class PickleBuffer(data: Array[Byte], from: Int, to: Int) { if (y != 0) patchNatPrefix(y) } - /** Write a long number x in signed big endian format, base 256. + /** Write a long number `x` in signed big endian format, base 256. * * @param x The long number to be written. */ @@ -151,18 +148,14 @@ class PickleBuffer(data: Array[Byte], from: Int, to: Int) { result.toIndexedSeq } - /** Perform operation op until the condition - * readIndex == end is satisfied. + /** Perform operation `op` until the condition + * `readIndex == end` is satisfied. * Concatenate results into a list. - * - * @param end ... - * @param op ... - * @return ... */ def until[T](end: Int, op: () => T): List[T] = if (readIndex == end) List() else op() :: until(end, op); - /** Perform operation op the number of + /** Perform operation `op` the number of * times specified. Concatenate the results into a list. */ def times[T](n: Int, op: ()=>T): List[T] = diff --git a/src/reflect/scala/reflect/internal/pickling/UnPickler.scala b/src/reflect/scala/reflect/internal/pickling/UnPickler.scala index c82546b552..551ba4ee5c 100644 --- a/src/reflect/scala/reflect/internal/pickling/UnPickler.scala +++ b/src/reflect/scala/reflect/internal/pickling/UnPickler.scala @@ -159,9 +159,9 @@ abstract class UnPickler /*extends scala.reflect.generic.UnPickler*/ { result } - /** If entry at i is undefined, define it by performing - * operation op with readIndex at start of i'th - * entry. Restore readIndex afterwards. + /** If entry at `i` is undefined, define it by performing + * operation `op` with `readIndex at start of i'th + * entry. Restore `readIndex` afterwards. */ protected def at[T <: AnyRef](i: Int, op: () => T): T = { var r = entries(i) diff --git a/src/reflect/scala/reflect/internal/util/StringOps.scala b/src/reflect/scala/reflect/internal/util/StringOps.scala index bc02ad1058..5645eb4889 100644 --- a/src/reflect/scala/reflect/internal/util/StringOps.scala +++ b/src/reflect/scala/reflect/internal/util/StringOps.scala @@ -73,10 +73,6 @@ trait StringOps { else Some((str take idx, str drop (if (doDropIndex) idx + 1 else idx))) /** Returns a string meaning "n elements". - * - * @param n ... - * @param elements ... - * @return ... */ def countElementsAsString(n: Int, elements: String): String = n match { @@ -89,9 +85,6 @@ trait StringOps { } /** Turns a count into a friendly English description if n<=4. - * - * @param n ... - * @return ... */ def countAsString(n: Int): String = n match { diff --git a/src/reflect/scala/reflect/io/AbstractFile.scala b/src/reflect/scala/reflect/io/AbstractFile.scala index 15befb67f1..fa7298c726 100644 --- a/src/reflect/scala/reflect/io/AbstractFile.scala +++ b/src/reflect/scala/reflect/io/AbstractFile.scala @@ -27,7 +27,7 @@ object AbstractFile { /** * If the specified File exists and is a regular file, returns an - * abstract regular file backed by it. Otherwise, returns null. + * abstract regular file backed by it. Otherwise, returns `null`. */ def getFile(file: File): AbstractFile = if (file.isFile) new PlainFile(file) else null @@ -38,10 +38,7 @@ object AbstractFile { /** * If the specified File exists and is either a directory or a * readable zip or jar archive, returns an abstract directory - * backed by it. Otherwise, returns null. - * - * @param file ... - * @return ... + * backed by it. Otherwise, returns `null`. */ def getDirectory(file: File): AbstractFile = if (file.isDirectory) new PlainFile(file) @@ -51,10 +48,7 @@ object AbstractFile { /** * If the specified URL exists and is a readable zip or jar archive, * returns an abstract directory backed by it. Otherwise, returns - * null. - * - * @param file ... - * @return ... + * `null`. */ def getURL(url: URL): AbstractFile = { if (url == null || !Path.isExtensionJarOrZip(url.getPath)) null @@ -80,10 +74,10 @@ object AbstractFile { *

    *

    * The interface does not allow to access the content. - * The class symtab.classfile.AbstractFileReader accesses + * The class `symtab.classfile.AbstractFileReader` accesses * bytes, knowing that the character set of classfiles is UTF-8. For - * all other cases, the class SourceFile is used, which honors - * global.settings.encoding.value. + * all other cases, the class `SourceFile` is used, which honors + * `global.settings.encoding.value`. *

    * * ''Note: This library is considered experimental and should not be used unless you know what you are doing.'' @@ -148,7 +142,7 @@ abstract class AbstractFile extends Iterable[AbstractFile] { def toURL: URL = if (file == null) null else file.toURI.toURL /** Returns contents of file (if applicable) in a Char array. - * warning: use Global.getSourceFile() to use the proper + * warning: use `Global.getSourceFile()` to use the proper * encoding when converting to the char array. */ @throws(classOf[IOException]) @@ -175,8 +169,8 @@ abstract class AbstractFile extends Iterable[AbstractFile] { def iterator: Iterator[AbstractFile] /** Returns the abstract file in this abstract directory with the specified - * name. If there is no such file, returns null. The argument - * directory tells whether to look for a directory or + * name. If there is no such file, returns `null`. The argument + * `directory` tells whether to look for a directory or * a regular file. */ def lookupName(name: String, directory: Boolean): AbstractFile diff --git a/src/reflect/scala/reflect/io/VirtualFile.scala b/src/reflect/scala/reflect/io/VirtualFile.scala index 95f4429fad..eea81da290 100644 --- a/src/reflect/scala/reflect/io/VirtualFile.scala +++ b/src/reflect/scala/reflect/io/VirtualFile.scala @@ -3,7 +3,6 @@ * @author Martin Odersky */ - package scala.reflect package io @@ -33,12 +32,8 @@ class VirtualFile(val name: String, override val path: String) extends AbstractF case _ => false } - //######################################################################## - // Private data private var content = Array.emptyByteArray - //######################################################################## - // Public Methods def absolute = this /** Returns null. */ @@ -84,10 +79,6 @@ class VirtualFile(val name: String, override val path: String) extends AbstractF * specified name. If there is no such file, returns null. The * argument "directory" tells whether to look for a directory or * or a regular file. - * - * @param name ... - * @param directory ... - * @return ... */ def lookupName(name: String, directory: Boolean): AbstractFile = { assert(isDirectory, "not a directory '" + this + "'") @@ -98,6 +89,4 @@ class VirtualFile(val name: String, override val path: String) extends AbstractF * check that it exists. */ def lookupNameUnchecked(name: String, directory: Boolean) = unsupported - - //######################################################################## } -- cgit v1.2.3 From d09bb9c6377de4664da9a0a7067f94644ba841d5 Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Tue, 13 Nov 2012 08:22:25 -0800 Subject: Some files removed in their entirety. And small associated changes. --- src/compiler/scala/tools/nsc/CompilerRun.scala | 21 ------ .../scala/tools/nsc/interpreter/CodeHandlers.scala | 50 -------------- .../scala/tools/nsc/interpreter/ExprTyper.scala | 7 +- src/compiler/scala/tools/nsc/io/Fileish.scala | 33 ---------- src/compiler/scala/tools/nsc/io/Jar.scala | 6 -- .../tools/nsc/settings/AdvancedScalaSettings.scala | 77 ---------------------- .../tools/partest/nest/ReflectiveRunner.scala | 7 +- .../scala/tools/partest/nest/RunnerUtils.scala | 29 -------- .../scala/tools/partest/utils/PrintMgr.scala | 52 --------------- 9 files changed, 8 insertions(+), 274 deletions(-) delete mode 100644 src/compiler/scala/tools/nsc/CompilerRun.scala delete mode 100644 src/compiler/scala/tools/nsc/interpreter/CodeHandlers.scala delete mode 100644 src/compiler/scala/tools/nsc/io/Fileish.scala delete mode 100644 src/compiler/scala/tools/nsc/settings/AdvancedScalaSettings.scala delete mode 100644 src/partest/scala/tools/partest/nest/RunnerUtils.scala delete mode 100644 src/partest/scala/tools/partest/utils/PrintMgr.scala (limited to 'src') diff --git a/src/compiler/scala/tools/nsc/CompilerRun.scala b/src/compiler/scala/tools/nsc/CompilerRun.scala deleted file mode 100644 index 6746b08155..0000000000 --- a/src/compiler/scala/tools/nsc/CompilerRun.scala +++ /dev/null @@ -1,21 +0,0 @@ -/* NSC -- new Scala compiler - * Copyright 2005-2013 LAMP/EPFL - * @author Martin Odersky - */ - -package scala.tools.nsc - -class CompilerRun { - def firstPhase: Phase = NoPhase - def terminalPhase: Phase = NoPhase - def namerPhase: Phase = NoPhase - def typerPhase: Phase = NoPhase - def refchecksPhase: Phase = NoPhase - def explicitouterPhase: Phase = NoPhase - def erasurePhase: Phase = NoPhase - def flattenPhase: Phase = NoPhase - def mixinPhase: Phase = NoPhase - def icodePhase: Phase = NoPhase - def phaseNamed(name: String): Phase = NoPhase -} - diff --git a/src/compiler/scala/tools/nsc/interpreter/CodeHandlers.scala b/src/compiler/scala/tools/nsc/interpreter/CodeHandlers.scala deleted file mode 100644 index 1741a82775..0000000000 --- a/src/compiler/scala/tools/nsc/interpreter/CodeHandlers.scala +++ /dev/null @@ -1,50 +0,0 @@ -/* NSC -- new Scala compiler - * Copyright 2005-2013 LAMP/EPFL - * @author Paul Phillips - */ - -package scala.tools.nsc -package interpreter - -import CodeHandlers.NoSuccess -import scala.util.control.ControlThrowable - -/** - * The start of a simpler interface for utilizing the compiler with piecemeal - * code strings. The "T" here could potentially be a Tree, a Type, a Symbol, - * a Boolean, or something even more exotic. - */ -trait CodeHandlers[T] { - self => - - // Expressions are composed of operators and operands. - def expr(code: String): T - - // Statements occur as parts of blocks and templates. - // A statement can be an import, a definition or an expression, or it can be empty. - // Statements used in the template of a class definition can also be declarations. - def stmt(code: String): T - def stmts(code: String): Seq[T] - - object opt extends CodeHandlers[Option[T]] { - val handler: PartialFunction[Throwable, Option[T]] = { - case _: NoSuccess => None - } - val handlerSeq: PartialFunction[Throwable, Seq[Option[T]]] = { - case _: NoSuccess => Nil - } - - def expr(code: String) = try Some(self.expr(code)) catch handler - def stmt(code: String) = try Some(self.stmt(code)) catch handler - def stmts(code: String) = try (self.stmts(code) map (x => Some(x))) catch handlerSeq - } -} - -object CodeHandlers { - def incomplete() = throw CodeIncomplete - def fail(msg: String) = throw new CodeException(msg) - - trait NoSuccess extends ControlThrowable - class CodeException(msg: String) extends RuntimeException(msg) with NoSuccess { } - object CodeIncomplete extends CodeException("CodeIncomplete") -} diff --git a/src/compiler/scala/tools/nsc/interpreter/ExprTyper.scala b/src/compiler/scala/tools/nsc/interpreter/ExprTyper.scala index ebd0030802..5ebaf6e031 100644 --- a/src/compiler/scala/tools/nsc/interpreter/ExprTyper.scala +++ b/src/compiler/scala/tools/nsc/interpreter/ExprTyper.scala @@ -17,7 +17,8 @@ trait ExprTyper { import syntaxAnalyzer.UnitParser import naming.freshInternalVarName - object codeParser extends { val global: repl.global.type = repl.global } with CodeHandlers[Tree] { + object codeParser { + val global: repl.global.type = repl.global def applyRule[T](code: String, rule: UnitParser => T): T = { reporter.reset() val scanner = newUnitParser(code) @@ -28,11 +29,7 @@ trait ExprTyper { result } - - def defns(code: String) = stmts(code) collect { case x: DefTree => x } - def expr(code: String) = applyRule(code, _.expr()) def stmts(code: String) = applyRule(code, _.templateStats()) - def stmt(code: String) = stmts(code).last // guaranteed nonempty } /** Parse a line into a sequence of trees. Returns None if the input is incomplete. */ diff --git a/src/compiler/scala/tools/nsc/io/Fileish.scala b/src/compiler/scala/tools/nsc/io/Fileish.scala deleted file mode 100644 index 7b4e385dd8..0000000000 --- a/src/compiler/scala/tools/nsc/io/Fileish.scala +++ /dev/null @@ -1,33 +0,0 @@ -/* NSC -- new Scala compiler - * Copyright 2005-2013 LAMP/EPFL - * @author Paul Phillips - */ - -package scala.tools.nsc -package io - -import java.io.{ InputStream } -import java.util.jar.JarEntry - -/** A common interface for File-based things and Stream-based things. - * (In particular, io.File and JarEntry.) - */ -class Fileish(val path: Path, val input: () => InputStream) extends Streamable.Chars { - def inputStream() = input() - - def parent = path.parent - def name = path.name - def isSourceFile = path.hasExtension("java", "scala") - - private lazy val pkgLines = lines() collect { case x if x startsWith "package " => x stripPrefix "package" trim } - lazy val pkgFromPath = parent.path.replaceAll("""[/\\]""", ".") - lazy val pkgFromSource = pkgLines map (_ stripSuffix ";") mkString "." - - override def toString = path.path -} - -object Fileish { - def apply(f: File): Fileish = new Fileish(f, () => f.inputStream()) - def apply(f: JarEntry, in: () => InputStream): Fileish = new Fileish(Path(f.getName), in) - def apply(path: String, in: () => InputStream): Fileish = new Fileish(Path(path), in) -} diff --git a/src/compiler/scala/tools/nsc/io/Jar.scala b/src/compiler/scala/tools/nsc/io/Jar.scala index ef2c9b13c0..3a9a878bc2 100644 --- a/src/compiler/scala/tools/nsc/io/Jar.scala +++ b/src/compiler/scala/tools/nsc/io/Jar.scala @@ -63,12 +63,6 @@ class Jar(file: File) extends Iterable[JarEntry] { Iterator continually in.getNextJarEntry() takeWhile (_ != null) foreach f } override def iterator: Iterator[JarEntry] = this.toList.iterator - def fileishIterator: Iterator[Fileish] = jarFile.entries.asScala map (x => Fileish(x, () => getEntryStream(x))) - - private def getEntryStream(entry: JarEntry) = jarFile getInputStream entry match { - case null => errorFn("No such entry: " + entry) ; null - case x => x - } override def toString = "" + file } diff --git a/src/compiler/scala/tools/nsc/settings/AdvancedScalaSettings.scala b/src/compiler/scala/tools/nsc/settings/AdvancedScalaSettings.scala deleted file mode 100644 index 0bec113743..0000000000 --- a/src/compiler/scala/tools/nsc/settings/AdvancedScalaSettings.scala +++ /dev/null @@ -1,77 +0,0 @@ -/* NSC -- new Scala compiler - * Copyright 2005-2013 LAMP/EPFL - * @author Paul Phillips - */ - -package scala.tools.nsc -package settings - -trait AdvancedScalaSettings { - self: AbsScalaSettings => - - abstract class X extends SettingGroup("-X") { - val assemextdirs: StringSetting - val assemname: StringSetting - val assempath: StringSetting - val checkinit: BooleanSetting - val disableassertions: BooleanSetting - val elidebelow: IntSetting - val experimental: BooleanSetting - val future: BooleanSetting - val generatephasegraph: StringSetting - val logimplicits: BooleanSetting - val mainClass: StringSetting - val migration: BooleanSetting - val noforwarders: BooleanSetting - val nojline: BooleanSetting - val nouescape: BooleanSetting - val plugin: MultiStringSetting - val plugindisable: MultiStringSetting - val pluginlist: BooleanSetting - val pluginrequire: MultiStringSetting - val pluginsdir: StringSetting - val print: PhasesSetting - val printicode: BooleanSetting - val printpos: BooleanSetting - val printtypes: BooleanSetting - val prompt: BooleanSetting - val resident: BooleanSetting - val script: StringSetting - val showclass: StringSetting - val showobject: StringSetting - val showphases: BooleanSetting - val sourcedir: StringSetting - val sourcereader: StringSetting - } - // def Xexperimental = X.experimental - // def Xmigration28 = X.migration - // def Xnojline = X.nojline - // def Xprint = X.print - // def Xprintpos = X.printpos - // def Xshowcls = X.showclass - // def Xshowobj = X.showobject - // def assemextdirs = X.assemextdirs - // def assemname = X.assemname - // def assemrefs = X.assempath - // def checkInit = X.checkinit - // def disable = X.plugindisable - // def elideLevel = X.elidelevel - // def future = X.future - // def genPhaseGraph = X.generatephasegraph - // def logimplicits = X.logimplicits - // def noForwarders = X.noforwarders - // def noassertions = X.disableassertions - // def nouescape = X.nouescape - // def plugin = X.plugin - // def pluginsDir = X.pluginsdir - // def printtypes = X.printtypes - // def prompt = X.prompt - // def require = X.require - // def resident = X.resident - // def script = X.script - // def showPhases = X.showphases - // def showPlugins = X.pluginlist - // def sourceReader = X.sourcereader - // def sourcedir = X.sourcedir - // def writeICode = X.printicode -} \ No newline at end of file diff --git a/src/partest/scala/tools/partest/nest/ReflectiveRunner.scala b/src/partest/scala/tools/partest/nest/ReflectiveRunner.scala index 5cb8589d66..22010d4b16 100644 --- a/src/partest/scala/tools/partest/nest/ReflectiveRunner.scala +++ b/src/partest/scala/tools/partest/nest/ReflectiveRunner.scala @@ -12,7 +12,6 @@ import scala.tools.nsc.Properties.{ setProp, propOrEmpty } import scala.tools.nsc.util.ClassPath import scala.tools.nsc.io import io.Path -import RunnerUtils._ import java.net.URLClassLoader /* This class is used to load an instance of DirectRunner using @@ -28,6 +27,12 @@ class ReflectiveRunner { // was used to start the runner. val sepRunnerClassName = "scala.tools.partest.nest.ConsoleRunner" + private def searchPath(option: String, as: List[String]): Option[String] = as match { + case `option` :: r :: _ => Some(r) + case _ :: rest => searchPath(option, rest) + case Nil => None + } + def main(args: String) { val argList = (args.split("\\s")).toList diff --git a/src/partest/scala/tools/partest/nest/RunnerUtils.scala b/src/partest/scala/tools/partest/nest/RunnerUtils.scala deleted file mode 100644 index 6707a9338a..0000000000 --- a/src/partest/scala/tools/partest/nest/RunnerUtils.scala +++ /dev/null @@ -1,29 +0,0 @@ -/* NEST (New Scala Test) - * Copyright 2007-2013 LAMP/EPFL - * @author Philipp Haller - */ - -// $Id$ - -package scala.tools.partest -package nest - -object RunnerUtils { - def splitArgs(str: String) = str split "\\s" filterNot (_ == "") toList - - def searchPath(option: String, as: List[String]): Option[String] = as match { - case `option` :: r :: _ => Some(r) - case _ :: rest => searchPath(option, rest) - case Nil => None - } - - def searchAndRemovePath(option: String, as: List[String]) = (as indexOf option) match { - case -1 => (None, as) - case idx => (Some(as(idx + 1)), (as take idx) ::: (as drop (idx + 2))) - } - - def searchAndRemoveOption(option: String, as: List[String]) = (as indexOf option) match { - case -1 => (false, as) - case idx => (true, (as take idx) ::: (as drop (idx + 1))) - } -} diff --git a/src/partest/scala/tools/partest/utils/PrintMgr.scala b/src/partest/scala/tools/partest/utils/PrintMgr.scala deleted file mode 100644 index d25be87c1e..0000000000 --- a/src/partest/scala/tools/partest/utils/PrintMgr.scala +++ /dev/null @@ -1,52 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala Parallel Testing ** -** / __/ __// _ | / / / _ | (c) 2007-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -// $Id$ - -package scala.tools.partest -package utils - -/** - * @author Thomas Hofer - */ -object PrintMgr { - - val NONE = 0 - val SOME = 1 - val MANY = 2 - - var outline = "" - var success = "" - var failure = "" - var warning = "" - var default = "" - - def initialization(number: Int) = number match { - case MANY => - outline = Console.BOLD + Console.BLACK - success = Console.BOLD + Console.GREEN - failure = Console.BOLD + Console.RED - warning = Console.BOLD + Console.YELLOW - default = Console.RESET - case SOME => - outline = Console.BOLD + Console.BLACK - success = Console.RESET - failure = Console.BOLD + Console.BLACK - warning = Console.BOLD + Console.BLACK - default = Console.RESET - case _ => - } - - def printOutline(msg: String) = print(outline + msg + default) - - def printSuccess(msg: String) = print(success + msg + default) - - def printFailure(msg: String) = print(failure + msg + default) - - def printWarning(msg: String) = print(warning + msg + default) -} -- cgit v1.2.3 From 59c0c5dfce92e0ef70708b95b2712556cdab3623 Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Tue, 13 Nov 2012 08:25:33 -0800 Subject: Members removed from the repl. There were a whole lot of these. --- .../scala/tools/nsc/interpreter/ByteCode.scala | 20 ---- .../scala/tools/nsc/interpreter/CommandLine.scala | 1 - .../scala/tools/nsc/interpreter/Completion.scala | 2 - .../tools/nsc/interpreter/CompletionAware.scala | 28 ----- .../tools/nsc/interpreter/CompletionOutput.scala | 1 - .../nsc/interpreter/ConsoleReaderHelper.scala | 5 - .../scala/tools/nsc/interpreter/Delimited.scala | 3 - .../scala/tools/nsc/interpreter/ExprTyper.scala | 4 - .../scala/tools/nsc/interpreter/ILoop.scala | 38 +------ .../scala/tools/nsc/interpreter/IMain.scala | 126 +-------------------- .../scala/tools/nsc/interpreter/ISettings.scala | 7 -- .../scala/tools/nsc/interpreter/Imports.scala | 16 +-- .../tools/nsc/interpreter/InteractiveReader.scala | 8 +- .../tools/nsc/interpreter/JLineCompletion.scala | 8 -- .../scala/tools/nsc/interpreter/JLineReader.scala | 8 -- .../scala/tools/nsc/interpreter/Logger.scala | 4 - .../scala/tools/nsc/interpreter/LoopCommands.scala | 21 ---- .../tools/nsc/interpreter/MemberHandlers.scala | 17 --- .../scala/tools/nsc/interpreter/NamedParam.scala | 4 - .../scala/tools/nsc/interpreter/Naming.scala | 1 - .../scala/tools/nsc/interpreter/Parsed.scala | 8 -- .../scala/tools/nsc/interpreter/Phased.scala | 19 ---- .../scala/tools/nsc/interpreter/Power.scala | 110 ++---------------- .../scala/tools/nsc/interpreter/ReplConfig.scala | 12 -- .../scala/tools/nsc/interpreter/ReplProps.scala | 4 - .../scala/tools/nsc/interpreter/ReplStrings.scala | 1 - .../scala/tools/nsc/interpreter/RichClass.scala | 5 +- .../scala/tools/nsc/interpreter/SimpleReader.scala | 5 +- .../scala/tools/nsc/interpreter/TypeStrings.scala | 10 -- .../scala/tools/nsc/interpreter/package.scala | 29 ----- .../tools/nsc/interpreter/session/History.scala | 6 - .../nsc/interpreter/session/SimpleHistory.scala | 6 +- 32 files changed, 25 insertions(+), 512 deletions(-) (limited to 'src') diff --git a/src/compiler/scala/tools/nsc/interpreter/ByteCode.scala b/src/compiler/scala/tools/nsc/interpreter/ByteCode.scala index 014661e525..48890a21c6 100644 --- a/src/compiler/scala/tools/nsc/interpreter/ByteCode.scala +++ b/src/compiler/scala/tools/nsc/interpreter/ByteCode.scala @@ -38,25 +38,5 @@ object ByteCode { } yield names - /** Attempts to retrieve case parameter names for given class name. - */ - def caseParamNamesForPath(path: String) = - for { - module <- DECODER - method <- decoderMethod("caseParamNames", classOf[String]) - names <- method.invoke(module, path).asInstanceOf[Option[List[String]]] - } - yield names - def aliasesForPackage(pkg: String) = aliasMap flatMap (_(pkg)) - - /** Attempts to find type aliases in package objects. - */ - def aliasForType(path: String): Option[String] = { - val (pkg, name) = (path lastIndexOf '.') match { - case -1 => return None - case idx => (path take idx, path drop (idx + 1)) - } - aliasesForPackage(pkg) flatMap (_ get name) - } } diff --git a/src/compiler/scala/tools/nsc/interpreter/CommandLine.scala b/src/compiler/scala/tools/nsc/interpreter/CommandLine.scala index 8042f0aee2..0ab92ab769 100644 --- a/src/compiler/scala/tools/nsc/interpreter/CommandLine.scala +++ b/src/compiler/scala/tools/nsc/interpreter/CommandLine.scala @@ -10,5 +10,4 @@ package interpreter */ class CommandLine(arguments: List[String], error: String => Unit) extends CompilerCommand(arguments, error) { override def cmdName = "scala" - override lazy val fileEndings = List(".scalaint") } diff --git a/src/compiler/scala/tools/nsc/interpreter/Completion.scala b/src/compiler/scala/tools/nsc/interpreter/Completion.scala index 1dfccbfbf7..84a5cb49ae 100644 --- a/src/compiler/scala/tools/nsc/interpreter/Completion.scala +++ b/src/compiler/scala/tools/nsc/interpreter/Completion.scala @@ -23,8 +23,6 @@ object NoCompletion extends Completion { } object Completion { - def empty: Completion = NoCompletion - case class Candidates(cursor: Int, candidates: List[String]) { } val NoCandidates = Candidates(-1, Nil) diff --git a/src/compiler/scala/tools/nsc/interpreter/CompletionAware.scala b/src/compiler/scala/tools/nsc/interpreter/CompletionAware.scala index 3a0b48ef57..3dd5d93390 100644 --- a/src/compiler/scala/tools/nsc/interpreter/CompletionAware.scala +++ b/src/compiler/scala/tools/nsc/interpreter/CompletionAware.scala @@ -51,31 +51,3 @@ trait CompletionAware { results.sorted } } - -object CompletionAware { - val Empty = new CompletionAware { def completions(verbosity: Int) = Nil } - - def unapply(that: Any): Option[CompletionAware] = that match { - case x: CompletionAware => Some((x)) - case _ => None - } - - /** Create a CompletionAware object from the given functions. - * The first should generate the list of completions whenever queried, - * and the second should return Some(CompletionAware) object if - * subcompletions are possible. - */ - def apply(terms: () => List[String], followFunction: String => Option[CompletionAware]): CompletionAware = - new CompletionAware { - def completions = terms() - def completions(verbosity: Int) = completions - override def follow(id: String) = followFunction(id) - } - - /** Convenience factories. - */ - def apply(terms: () => List[String]): CompletionAware = apply(terms, _ => None) - def apply(map: scala.collection.Map[String, CompletionAware]): CompletionAware = - apply(() => map.keys.toList, map.get _) -} - diff --git a/src/compiler/scala/tools/nsc/interpreter/CompletionOutput.scala b/src/compiler/scala/tools/nsc/interpreter/CompletionOutput.scala index 13880bb8af..c647ef6f51 100644 --- a/src/compiler/scala/tools/nsc/interpreter/CompletionOutput.scala +++ b/src/compiler/scala/tools/nsc/interpreter/CompletionOutput.scala @@ -38,7 +38,6 @@ trait CompletionOutput { def relativize(str: String): String = quietString(str stripPrefix (pkg + ".")) def relativize(tp: Type): String = relativize(tp.normalize.toString) - def relativize(sym: Symbol): String = relativize(sym.info) def braceList(tparams: List[String]) = if (tparams.isEmpty) "" else (tparams map relativize).mkString("[", ", ", "]") def parenList(params: List[Any]) = params.mkString("(", ", ", ")") diff --git a/src/compiler/scala/tools/nsc/interpreter/ConsoleReaderHelper.scala b/src/compiler/scala/tools/nsc/interpreter/ConsoleReaderHelper.scala index b5850d901c..48af261937 100644 --- a/src/compiler/scala/tools/nsc/interpreter/ConsoleReaderHelper.scala +++ b/src/compiler/scala/tools/nsc/interpreter/ConsoleReaderHelper.scala @@ -9,15 +9,10 @@ package interpreter import scala.tools.jline.console.{ ConsoleReader, CursorBuffer } trait ConsoleReaderHelper extends ConsoleReader { - def currentLine = "" + getCursorBuffer.buffer - def currentPos = getCursorBuffer.cursor def terminal = getTerminal() def width = terminal.getWidth() def height = terminal.getHeight() - def paginate = isPaginationEnabled() - def paginate_=(value: Boolean) = setPaginationEnabled(value) - def goBack(num: Int): Unit def readOneKey(prompt: String): Int def eraseLine(): Unit diff --git a/src/compiler/scala/tools/nsc/interpreter/Delimited.scala b/src/compiler/scala/tools/nsc/interpreter/Delimited.scala index 80debfacb9..e88a044931 100644 --- a/src/compiler/scala/tools/nsc/interpreter/Delimited.scala +++ b/src/compiler/scala/tools/nsc/interpreter/Delimited.scala @@ -26,7 +26,6 @@ trait Delimited { def delimited: Char => Boolean def escapeChars: List[Char] = List('\\') - def quoteChars: List[(Char, Char)] = List(('\'', '\''), ('"', '"')) /** Break String into args based on delimiting function. */ @@ -39,6 +38,4 @@ trait Delimited { def isDelimiterChar(ch: Char) = delimited(ch) def isEscapeChar(ch: Char): Boolean = escapeChars contains ch - def isQuoteStart(ch: Char): Boolean = quoteChars map (_._1) contains ch - def isQuoteEnd(ch: Char): Boolean = quoteChars map (_._2) contains ch } diff --git a/src/compiler/scala/tools/nsc/interpreter/ExprTyper.scala b/src/compiler/scala/tools/nsc/interpreter/ExprTyper.scala index 5ebaf6e031..b087547cf8 100644 --- a/src/compiler/scala/tools/nsc/interpreter/ExprTyper.scala +++ b/src/compiler/scala/tools/nsc/interpreter/ExprTyper.scala @@ -42,10 +42,6 @@ trait ExprTyper { else Some(trees) } } - // def parsesAsExpr(line: String) = { - // import codeParser._ - // (opt expr line).isDefined - // } def symbolOfLine(code: String): Symbol = { def asExpr(): Symbol = { diff --git a/src/compiler/scala/tools/nsc/interpreter/ILoop.scala b/src/compiler/scala/tools/nsc/interpreter/ILoop.scala index 74549ef558..612a90f3ea 100644 --- a/src/compiler/scala/tools/nsc/interpreter/ILoop.scala +++ b/src/compiler/scala/tools/nsc/interpreter/ILoop.scala @@ -14,7 +14,6 @@ import scala.util.Properties.{ jdkHome, javaVersion, versionString, javaVmName } import scala.tools.util.{ Javap } import util.{ ClassPath, Exceptional, stringFromWriter, stringFromStream } import io.{ File, Directory } -import scala.reflect.NameTransformer._ import util.ScalaClassLoader import ScalaClassLoader._ import scala.tools.util._ @@ -71,13 +70,9 @@ class ILoop(in0: Option[BufferedReader], protected val out: JPrintWriter) intp.reporter printUntruncatedMessage msg } - def isAsync = !settings.Yreplsync.value lazy val power = new Power(intp, new StdReplVals(this))(tagOfStdReplVals, classTag[StdReplVals]) def history = in.history - /** The context class loader at the time this object was created */ - protected val originalClassLoader = Thread.currentThread.getContextClassLoader - // classpath entries added via :cp var addedClasspath: String = "" @@ -131,20 +126,18 @@ class ILoop(in0: Option[BufferedReader], protected val out: JPrintWriter) def helpCommand(line: String): Result = { if (line == "") helpSummary() else uniqueCommand(line) match { - case Some(lc) => echo("\n" + lc.longHelp) + case Some(lc) => echo("\n" + lc.help) case _ => ambiguousError(line) } } private def helpSummary() = { val usageWidth = commands map (_.usageMsg.length) max - val formatStr = "%-" + usageWidth + "s %s %s" + val formatStr = "%-" + usageWidth + "s %s" echo("All commands can be abbreviated, e.g. :he instead of :help.") - echo("Those marked with a * have more detailed help, e.g. :help imports.\n") commands foreach { cmd => - val star = if (cmd.hasLongHelp) "*" else " " - echo(formatStr.format(cmd.usageMsg, star, cmd.help)) + echo(formatStr.format(cmd.usageMsg, cmd.help)) } } private def ambiguousError(cmd: String): Result = { @@ -194,10 +187,6 @@ class ILoop(in0: Option[BufferedReader], protected val out: JPrintWriter) out println msg out.flush() } - protected def echoNoNL(msg: String) = { - out print msg - out.flush() - } /** Search the history */ def searchHistory(_cmdline: String) { @@ -208,8 +197,8 @@ class ILoop(in0: Option[BufferedReader], protected val out: JPrintWriter) echo("%d %s".format(index + offset, line)) } - private var currentPrompt = Properties.shellPromptString - def setPrompt(prompt: String) = currentPrompt = prompt + private val currentPrompt = Properties.shellPromptString + /** Prompt to print when awaiting input */ def prompt = currentPrompt @@ -684,7 +673,6 @@ class ILoop(in0: Option[BufferedReader], protected val out: JPrintWriter) def process(settings: Settings): Boolean = savingContextLoader { this.settings = settings createInterpreter() - var thunks: List[() => Unit] = Nil // sets in to some kind of reader depending on environmental cues in = in0.fold(chooseReader(settings))(r => SimpleReader(r, out, true)) @@ -703,22 +691,8 @@ class ILoop(in0: Option[BufferedReader], protected val out: JPrintWriter) true } - /** process command-line arguments and do as they request */ - def process(args: Array[String]): Boolean = { - val command = new CommandLine(args.toList, echo) - def neededHelp(): String = - (if (command.settings.help.value) command.usageMsg + "\n" else "") + - (if (command.settings.Xhelp.value) command.xusageMsg + "\n" else "") - - // if they asked for no help and command is valid, we call the real main - neededHelp() match { - case "" => command.ok && process(command.settings) - case help => echoNoNL(help) ; true - } - } - @deprecated("Use `process` instead", "2.9.0") - def main(settings: Settings): Unit = process(settings) + def main(settings: Settings): Unit = process(settings) //used by sbt } object ILoop { diff --git a/src/compiler/scala/tools/nsc/interpreter/IMain.scala b/src/compiler/scala/tools/nsc/interpreter/IMain.scala index 0ef27ac96a..3f49e782b0 100644 --- a/src/compiler/scala/tools/nsc/interpreter/IMain.scala +++ b/src/compiler/scala/tools/nsc/interpreter/IMain.scala @@ -63,6 +63,7 @@ class IMain(initialSettings: Settings, protected val out: JPrintWriter) extends @deprecated("Use replOutput.dir instead", "2.11.0") def virtualDirectory = replOutput.dir + // Used in a test case. def showDirectory() = replOutput.show(out) private[nsc] var printResults = true // whether to print result lines @@ -82,20 +83,11 @@ class IMain(initialSettings: Settings, protected val out: JPrintWriter) extends private var _classLoader: AbstractFileClassLoader = null // active classloader private val _compiler: Global = newCompiler(settings, reporter) // our private compiler - private val nextReqId = { - var counter = 0 - () => { counter += 1 ; counter } - } - def compilerClasspath: Seq[URL] = ( if (isInitializeComplete) global.classPath.asURLs else new PathResolver(settings).result.asURLs // the compiler's classpath ) def settings = initialSettings - def mostRecentLine = prevRequestList match { - case Nil => "" - case req :: _ => req.originalLine - } // Run the code body with the given boolean settings flipped to true. def withoutWarnings[T](body: => T): T = beQuietDuring { val saved = settings.nowarn.value @@ -110,12 +102,6 @@ class IMain(initialSettings: Settings, protected val out: JPrintWriter) extends def this(settings: Settings) = this(settings, new NewLinePrintWriter(new ConsoleWriter, true)) def this() = this(new Settings()) - lazy val repllog: Logger = new Logger { - val out: JPrintWriter = imain.out - val isInfo: Boolean = BooleanProp keyExists "scala.repl.info" - val isDebug: Boolean = BooleanProp keyExists "scala.repl.debug" - val isTrace: Boolean = BooleanProp keyExists "scala.repl.trace" - } lazy val formatting: Formatting = new Formatting { val prompt = Properties.shellPromptString } @@ -175,7 +161,7 @@ class IMain(initialSettings: Settings, protected val out: JPrintWriter) extends } import global._ - import definitions.{ ObjectClass, termMember, typeMember, dropNullaryMethod} + import definitions.{ ObjectClass, termMember, dropNullaryMethod} lazy val runtimeMirror = ru.runtimeMirror(classLoader) @@ -189,13 +175,8 @@ class IMain(initialSettings: Settings, protected val out: JPrintWriter) extends noFatal(runtimeMirror staticModule path) orElse noFatal(rootMirror staticModule path) ) - def getPathIfDefined(path: String) = ( - if (path endsWith "$") getModuleIfDefined(path.init) - else getClassIfDefined(path) - ) implicit class ReplTypeOps(tp: Type) { - def orElse(other: => Type): Type = if (tp ne NoType) tp else other def andAlso(fn: Type => Type): Type = if (tp eq NoType) tp else fn(tp) } @@ -211,7 +192,6 @@ class IMain(initialSettings: Settings, protected val out: JPrintWriter) extends if (replScope containsName name) freshUserTermName() else name } - def isUserTermName(name: Name) = isUserVarName("" + name) def isInternalTermName(name: Name) = isInternalVarName("" + name) } import naming._ @@ -310,11 +290,8 @@ class IMain(initialSettings: Settings, protected val out: JPrintWriter) extends abstract class PhaseDependentOps { def shift[T](op: => T): T - def lookup(name: Name): Symbol = shift(replScope lookup name) def path(name: => Name): String = shift(path(symbolOfName(name))) def path(sym: Symbol): String = backticked(shift(sym.fullName)) - def name(sym: Symbol): Name = shift(sym.name) - def info(sym: Symbol): Type = shift(sym.info) def sig(sym: Symbol): String = shift(sym.defString) } object typerOp extends PhaseDependentOps { @@ -328,7 +305,6 @@ class IMain(initialSettings: Settings, protected val out: JPrintWriter) extends def originalPath(name: Name): String = typerOp path name def originalPath(sym: Symbol): String = typerOp path sym def flatPath(sym: Symbol): String = flatOp shift sym.javaClassName - // def translatePath(path: String) = symbolOfPath(path).fold(Option.empty[String])(flatPath) def translatePath(path: String) = { val sym = if (path endsWith "$") symbolOfTerm(path.init) else symbolOfIdent(path) sym match { @@ -354,8 +330,6 @@ class IMain(initialSettings: Settings, protected val out: JPrintWriter) extends case p => new URLClassLoader(compilerClasspath, p) }) - def getInterpreterClassLoader() = classLoader - // Set the current Java "context" class loader to this interpreter's class loader def setContextClassLoader() = classLoader.setAsContext() @@ -405,7 +379,6 @@ class IMain(initialSettings: Settings, protected val out: JPrintWriter) extends exitingTyper { req.defines filterNot (s => req.defines contains s.companionSymbol) foreach { newSym => val companion = newSym.name.companionName - val found = replScope lookup companion replScope lookup companion andAlso { oldSym => replwarn(s"warning: previously defined $oldSym is not a companion to $newSym.") replwarn("Companions must be defined together; you may wish to use :paste mode for this.") @@ -423,19 +396,6 @@ class IMain(initialSettings: Settings, protected val out: JPrintWriter) extends printMessage(msg) } - def isParseable(line: String): Boolean = { - beSilentDuring { - try parse(line) match { - case Some(xs) => xs.nonEmpty // parses as-is - case None => true // incomplete - } - catch { case x: Exception => // crashed the compiler - replwarn("Exception in isParseable(\"" + line + "\"): " + x) - false - } - } - } - def compileSourcesKeepingRun(sources: SourceFile*) = { val run = new Run() reporter.reset() @@ -653,17 +613,9 @@ class IMain(initialSettings: Settings, protected val out: JPrintWriter) extends quietRun("val %s = %s".format(tempName, name)) quietRun("val %s = %s.asInstanceOf[%s]".format(name, tempName, newType)) } - def quietImport(ids: String*): IR.Result = beQuietDuring(addImports(ids: _*)) - def addImports(ids: String*): IR.Result = - if (ids.isEmpty) IR.Success - else interpret("import " + ids.mkString(", ")) - def quietBind(p: NamedParam): IR.Result = beQuietDuring(bind(p)) def bind(p: NamedParam): IR.Result = bind(p.name, p.tpe, p.value) def bind[T: ru.TypeTag : ClassTag](name: String, value: T): IR.Result = bind((name, value)) - def bindSyntheticValue(x: Any): IR.Result = bindValue(freshInternalVarName(), x) - def bindValue(x: Any): IR.Result = bindValue(freshUserVarName(), x) - def bindValue(name: String, x: Any): IR.Result = bind(name, TypeStrings.fromValue(x), x) /** Reset this interpreter, forgetting all user-specified requests. */ def reset() { @@ -719,7 +671,6 @@ class IMain(initialSettings: Settings, protected val out: JPrintWriter) extends def readPath = pathTo(readName) def evalPath = pathTo(evalName) - def printPath = pathTo(printName) def call(name: String, args: Any*): AnyRef = { val m = evalMethod(name) @@ -734,10 +685,6 @@ class IMain(initialSettings: Settings, protected val out: JPrintWriter) extends try Right(call(name, args: _*)) catch { case ex: Throwable => Left(ex) } - def callOpt(name: String, args: Any*): Option[AnyRef] = - try Some(call(name, args: _*)) - catch { case ex: Throwable => bindError(ex) ; None } - class EvalException(msg: String, cause: Throwable) extends RuntimeException(msg, cause) { } private def evalError(path: String, ex: Throwable) = @@ -749,7 +696,6 @@ class IMain(initialSettings: Settings, protected val out: JPrintWriter) extends } lazy val evalClass = load(evalPath) - lazy val evalValue = callOpt(resultName) def compile(source: String): Boolean = compileAndSaveRun("", source) @@ -798,14 +744,11 @@ class IMain(initialSettings: Settings, protected val out: JPrintWriter) extends } /** One line of code submitted by the user for interpretation */ - // private class Request(val line: String, val trees: List[Tree]) { def defines = defHandlers flatMap (_.definedSymbols) def imports = importedSymbols - def references = referencedNames map symbolOfName def value = Some(handlers.last) filter (h => h.definesValue) map (h => definedSymbols(h.definesTerm.get)) getOrElse NoSymbol - val reqId = nextReqId() val lineRep = new ReadEvalPrint() private var _originalLine: String = null @@ -816,9 +759,6 @@ class IMain(initialSettings: Settings, protected val out: JPrintWriter) extends val handlers: List[MemberHandler] = trees map (memberHandlers chooseHandler _) def defHandlers = handlers collect { case x: MemberDefHandler => x } - /** all (public) names defined by these statements */ - val definedNames = handlers flatMap (_.definedNames) - /** list of names used by this expression */ val referencedNames: List[Name] = handlers flatMap (_.referencedNames) @@ -836,9 +776,6 @@ class IMain(initialSettings: Settings, protected val out: JPrintWriter) extends val ComputedImports(importsPreamble, importsTrailer, accessPath) = exitingTyper(importsCode(referencedNames.toSet)) - /** The unmangled symbol name, but supplemented with line info. */ - def disambiguated(name: Name): String = name + " (in " + lineRep + ")" - /** the line of code to compute */ def toCompute = line @@ -852,13 +789,11 @@ class IMain(initialSettings: Settings, protected val out: JPrintWriter) extends // $intp is not bound; punt, but include the line. else if (path == "$intp") List( "def $line = " + tquoted(originalLine), - // "def $req = %s.requestForReqId(%s).orNull".format(path, reqId), "def $trees = Nil" ) else List( "def $line = " + tquoted(originalLine), "def $trees = Nil" - // "def $trees = if ($req eq null) Nil else $req.trees".format(lineRep.readName, path, reqId) ) } @@ -931,7 +866,6 @@ class IMain(initialSettings: Settings, protected val out: JPrintWriter) extends /* typeOf lookup with encoding */ def lookupTypeOf(name: Name) = typeOf.getOrElse(name, typeOf(global.encode(name.toString))) - def simpleNameOfType(name: TypeName) = (compilerTypeOf get name) map (_.typeSymbolDirect.simpleName) private def typeMap[T](f: Type => T) = mapFrom[Name, Name, T](termNames ++ typeNames)(x => f(cleanMemberDecl(resultSymbol, x))) @@ -1011,8 +945,6 @@ class IMain(initialSettings: Settings, protected val out: JPrintWriter) extends */ def tryTwice(op: => Symbol): Symbol = exitingTyper(op) orElse exitingFlatten(op) - def signatureOf(sym: Symbol) = typerOp sig sym - def symbolOfPath(path: String): Symbol = exitingTyper(getPathIfDefined(path)) def symbolOfIdent(id: String): Symbol = symbolOfTerm(id) orElse symbolOfType(id) def symbolOfType(id: String): Symbol = tryTwice(replScope lookup (id: TypeName)) def symbolOfTerm(id: String): Symbol = tryTwice(replScope lookup (id: TermName)) @@ -1067,76 +999,30 @@ class IMain(initialSettings: Settings, protected val out: JPrintWriter) extends def definedTerms = onlyTerms(allDefinedNames) filterNot isInternalTermName def definedTypes = onlyTypes(allDefinedNames) - def definedSymbols = prevRequestList flatMap (_.defines) toSet def definedSymbolList = prevRequestList flatMap (_.defines) filterNot (s => isInternalTermName(s.name)) // Terms with user-given names (i.e. not res0 and not synthetic) def namedDefinedTerms = definedTerms filterNot (x => isUserVarName("" + x) || directlyBoundNames(x)) - /** Translate a repl-defined identifier into a Symbol. - */ - def apply(name: String): Symbol = types(name) orElse terms(name) - def types(name: String): Symbol = replScope lookup (name: TypeName) orElse getClassIfDefined(name) - def terms(name: String): Symbol = replScope lookup (name: TermName) orElse getModuleIfDefined(name) - - def types[T: global.TypeTag] : Symbol = typeOf[T].typeSymbol - def terms[T: global.TypeTag] : Symbol = typeOf[T].termSymbol - def apply[T: global.TypeTag] : Symbol = typeOf[T].typeSymbol - - lazy val DummyInfoSymbol = NoSymbol.newValue("replScopeDummy") - private lazy val DummyInfo = TypeRef(NoPrefix, DummyInfoSymbol, Nil) - private def enterDummySymbol(name: Name) = name match { - case x: TermName => replScope enter (NoSymbol.newValue(x) setInfo DummyInfo) - case x: TypeName => replScope enter (NoSymbol.newClass(x) setInfo DummyInfo) - } - private var _replScope: Scope = _ private def resetReplScope() { _replScope = newScope } - def initReplScope() { - languageWildcardSyms foreach { clazz => - importableMembers(clazz) foreach { sym => - updateReplScope(sym, isDefined = false) - } - } - } def replScope = { if (_replScope eq null) _replScope = newScope _replScope } - def lookupAll(name: String) = (replScope.lookupAll(name: TermName) ++ replScope.lookupAll(name: TypeName)).toList - def unlinkAll(name: String) = { - val syms = lookupAll(name) - syms foreach { sym => - replScope unlink sym - } - enterDummySymbol(name: TermName) - enterDummySymbol(name: TypeName) - syms - } - def isUnlinked(name: Name) = { - symbolOfName(name) match { - case NoSymbol => false - case sym => sym.info.typeSymbolDirect == DummyInfoSymbol - } - } private var executingRequest: Request = _ private val prevRequests = mutable.ListBuffer[Request]() private val directlyBoundNames = mutable.Set[Name]() - def allHandlers = prevRequestList flatMap (_.handlers) - def allDefHandlers = allHandlers collect { case x: MemberDefHandler => x } - def allDefSymbols = allDefHandlers map (_.symbol) filter (_ ne NoSymbol) - - def lastRequest = if (prevRequests.isEmpty) null else prevRequests.last - def prevRequestList = prevRequests.toList - def allSeenTypes = prevRequestList flatMap (_.typeOf.values.toList) distinct - def allImplicits = allHandlers filter (_.definesImplicit) flatMap (_.definedNames) - def importHandlers = allHandlers collect { case x: ImportHandler => x } + def allHandlers = prevRequestList flatMap (_.handlers) + def lastRequest = if (prevRequests.isEmpty) null else prevRequests.last + def prevRequestList = prevRequests.toList + def importHandlers = allHandlers collect { case x: ImportHandler => x } def withoutUnwrapping(op: => Unit): Unit = { val saved = isettings.unwrapStrings diff --git a/src/compiler/scala/tools/nsc/interpreter/ISettings.scala b/src/compiler/scala/tools/nsc/interpreter/ISettings.scala index 302ba7a8ac..d114ca2359 100644 --- a/src/compiler/scala/tools/nsc/interpreter/ISettings.scala +++ b/src/compiler/scala/tools/nsc/interpreter/ISettings.scala @@ -12,13 +12,6 @@ package interpreter * @author Lex Spoon, 2007/3/24 **/ class ISettings(intp: IMain) { - /** A list of paths where :load should look */ - var loadPath = List(".") - - /** Set this to true to see repl machinery under -Yrich-exceptions. - */ - var showInternalStackTraces = false - /** The maximum length of toString to use when printing the result * of an evaluation. 0 means no maximum. If a printout requires * more than this number of characters, then the printout is diff --git a/src/compiler/scala/tools/nsc/interpreter/Imports.scala b/src/compiler/scala/tools/nsc/interpreter/Imports.scala index c5048ebfd8..ff7bfd432c 100644 --- a/src/compiler/scala/tools/nsc/interpreter/Imports.scala +++ b/src/compiler/scala/tools/nsc/interpreter/Imports.scala @@ -15,9 +15,6 @@ trait Imports { import definitions.{ ObjectClass, ScalaPackage, JavaLangPackage, PredefModule } import memberHandlers._ - def isNoImports = settings.noimports.value - def isNoPredef = settings.nopredef.value - /** Synthetic import handlers for the language defined imports. */ private def makeWildcardImportHandler(sym: Symbol): ImportHandler = { val hd :: tl = sym.fullName.split('.').toList map newTermName @@ -31,12 +28,9 @@ trait Imports { /** Symbols whose contents are language-defined to be imported. */ def languageWildcardSyms: List[Symbol] = List(JavaLangPackage, ScalaPackage, PredefModule) - def languageWildcards: List[Type] = languageWildcardSyms map (_.tpe) def languageWildcardHandlers = languageWildcardSyms map makeWildcardImportHandler def allImportedNames = importHandlers flatMap (_.importedNames) - def importedTerms = onlyTerms(allImportedNames) - def importedTypes = onlyTypes(allImportedNames) /** Types which have been wildcard imported, such as: * val x = "abc" ; import x._ // type java.lang.String @@ -52,17 +46,11 @@ trait Imports { def sessionWildcards: List[Type] = { importHandlers filter (_.importsWildcard) map (_.targetType) distinct } - def wildcardTypes = languageWildcards ++ sessionWildcards def languageSymbols = languageWildcardSyms flatMap membersAtPickler def sessionImportedSymbols = importHandlers flatMap (_.importedSymbols) def importedSymbols = languageSymbols ++ sessionImportedSymbols def importedTermSymbols = importedSymbols collect { case x: TermSymbol => x } - def importedTypeSymbols = importedSymbols collect { case x: TypeSymbol => x } - def implicitSymbols = importedSymbols filter (_.isImplicit) - - def importedTermNamed(name: String): Symbol = - importedTermSymbols find (_.name.toString == name) getOrElse NoSymbol /** Tuples of (source, imported symbols) in the order they were imported. */ @@ -104,9 +92,7 @@ trait Imports { * last one imported is actually usable. */ case class ComputedImports(prepend: String, append: String, access: String) - protected def importsCode(wanted0: Set[Name]): ComputedImports = { - val wanted = wanted0 filterNot isUnlinked - + protected def importsCode(wanted: Set[Name]): ComputedImports = { /** Narrow down the list of requests from which imports * should be taken. Removes requests which cannot contribute * useful imports for the specified set of wanted names. diff --git a/src/compiler/scala/tools/nsc/interpreter/InteractiveReader.scala b/src/compiler/scala/tools/nsc/interpreter/InteractiveReader.scala index 6513381d77..28ddf2939c 100644 --- a/src/compiler/scala/tools/nsc/interpreter/InteractiveReader.scala +++ b/src/compiler/scala/tools/nsc/interpreter/InteractiveReader.scala @@ -15,22 +15,16 @@ import Properties.isMac trait InteractiveReader { val interactive: Boolean - def init(): Unit def reset(): Unit - def history: History def completion: Completion - def eraseLine(): Unit def redrawLine(): Unit - def currentLine: String def readYesOrNo(prompt: String, alt: => Boolean): Boolean = readOneKey(prompt) match { case 'y' => true case 'n' => false case _ => alt } - def readAssumingNo(prompt: String) = readYesOrNo(prompt, false) - def readAssumingYes(prompt: String) = readYesOrNo(prompt, true) protected def readOneLine(prompt: String): String protected def readOneKey(prompt: String): Int @@ -50,6 +44,6 @@ object InteractiveReader { def apply(): InteractiveReader = SimpleReader() @deprecated("Use `apply` instead.", "2.9.0") - def createDefault(): InteractiveReader = apply() + def createDefault(): InteractiveReader = apply() // used by sbt } diff --git a/src/compiler/scala/tools/nsc/interpreter/JLineCompletion.scala b/src/compiler/scala/tools/nsc/interpreter/JLineCompletion.scala index 5ee5e5526d..19fa562234 100644 --- a/src/compiler/scala/tools/nsc/interpreter/JLineCompletion.scala +++ b/src/compiler/scala/tools/nsc/interpreter/JLineCompletion.scala @@ -28,9 +28,6 @@ class JLineCompletion(val intp: IMain) extends Completion with CompletionOutput if (isModule) getModuleIfDefined(name) else getModuleIfDefined(name) ) - def getType(name: String, isModule: Boolean) = getSymbol(name, isModule).tpe - def typeOf(name: String) = getType(name, false) - def moduleOf(name: String) = getType(name, true) trait CompilerCompletion { def tp: Type @@ -47,7 +44,6 @@ class JLineCompletion(val intp: IMain) extends Completion with CompletionOutput def tos(sym: Symbol): String = sym.decodedName def memberNamed(s: String) = exitingTyper(effectiveTp member newTermName(s)) - def hasMethod(s: String) = memberNamed(s).isMethod // XXX we'd like to say "filterNot (_.isDeprecated)" but this causes the // compiler to crash for reasons not yet known. @@ -279,10 +275,6 @@ class JLineCompletion(val intp: IMain) extends Completion with CompletionOutput if (parsed.isEmpty) xs map ("." + _) else xs } - // generic interface for querying (e.g. interpreter loop, testing) - def completions(buf: String): List[String] = - topLevelFor(Parsed.dotted(buf + ".", buf.length + 1)) - def completer(): ScalaCompleter = new JLineTabCompletion /** This gets a little bit hairy. It's no small feat delegating everything diff --git a/src/compiler/scala/tools/nsc/interpreter/JLineReader.scala b/src/compiler/scala/tools/nsc/interpreter/JLineReader.scala index e033bab03b..a620c7c75a 100644 --- a/src/compiler/scala/tools/nsc/interpreter/JLineReader.scala +++ b/src/compiler/scala/tools/nsc/interpreter/JLineReader.scala @@ -23,7 +23,6 @@ class JLineReader(_completion: => Completion) extends InteractiveReader { private def term = consoleReader.getTerminal() def reset() = term.reset() - def init() = term.init() def scalaToJline(tc: ScalaCompleter): Completer = new Completer { def complete(_buf: String, cursor: Int, candidates: JList[CharSequence]): Int = { @@ -35,8 +34,6 @@ class JLineReader(_completion: => Completion) extends InteractiveReader { } class JLineConsoleReader extends ConsoleReader with ConsoleReaderHelper { - // working around protected/trait/java insufficiencies. - def goBack(num: Int): Unit = back(num) def readOneKey(prompt: String) = { this.print(prompt) this.flush() @@ -44,7 +41,6 @@ class JLineReader(_completion: => Completion) extends InteractiveReader { } def eraseLine() = consoleReader.resetPromptLine("", "", 0) def redrawLineAndFlush(): Unit = { flush() ; drawLine() ; flush() } - // override def readLine(prompt: String): String // A hook for running code after the repl is done initializing. lazy val postInit: Unit = { @@ -63,11 +59,7 @@ class JLineReader(_completion: => Completion) extends InteractiveReader { } } - def currentLine = consoleReader.getCursorBuffer.buffer.toString def redrawLine() = consoleReader.redrawLineAndFlush() - def eraseLine() = consoleReader.eraseLine() - // Alternate implementation, not sure if/when I need this. - // def eraseLine() = while (consoleReader.delete()) { } def readOneLine(prompt: String) = consoleReader readLine prompt def readOneKey(prompt: String) = consoleReader readOneKey prompt } diff --git a/src/compiler/scala/tools/nsc/interpreter/Logger.scala b/src/compiler/scala/tools/nsc/interpreter/Logger.scala index aeb25fc688..7407daf8d0 100644 --- a/src/compiler/scala/tools/nsc/interpreter/Logger.scala +++ b/src/compiler/scala/tools/nsc/interpreter/Logger.scala @@ -11,8 +11,4 @@ trait Logger { def isDebug: Boolean def isTrace: Boolean def out: JPrintWriter - - def info(msg: => Any): Unit = if (isInfo) out println msg - def debug(msg: => Any): Unit = if (isDebug) out println msg - def trace(msg: => Any): Unit = if (isTrace) out println msg } diff --git a/src/compiler/scala/tools/nsc/interpreter/LoopCommands.scala b/src/compiler/scala/tools/nsc/interpreter/LoopCommands.scala index 60325ece30..39979c8fbe 100644 --- a/src/compiler/scala/tools/nsc/interpreter/LoopCommands.scala +++ b/src/compiler/scala/tools/nsc/interpreter/LoopCommands.scala @@ -19,13 +19,8 @@ class ProcessResult(val line: String) { val exitCode = builder ! logger def lines = buffer.toList - def show() = lines foreach println override def toString = "`%s` (%d lines, exit %d)".format(line, buffer.size, exitCode) } -object ProcessResult { - implicit def processResultToOutputLines(pr: ProcessResult): List[String] = pr.lines - def apply(line: String): ProcessResult = new ProcessResult(line) -} trait LoopCommands { protected def out: JPrintWriter @@ -35,14 +30,6 @@ trait LoopCommands { // a single interpreter command abstract class LoopCommand(val name: String, val help: String) extends (String => Result) { - private var _longHelp: String = null - final def defaultHelp = usageMsg + " (no extended help available.)" - def hasLongHelp = _longHelp != null || longHelp != defaultHelp - def withLongHelp(text: String): this.type = { _longHelp = text ; this } - def longHelp = _longHelp match { - case null => defaultHelp - case text => text - } def usage: String = "" def usageMsg: String = ":" + name + ( if (usage == "") "" else " " + usage @@ -54,11 +41,6 @@ trait LoopCommands { "usage is " + usageMsg Result(true, None) } - - def onError(msg: String) = { - out.println("error: " + msg) - showUsage() - } } object LoopCommand { def nullary(name: String, help: String, f: () => Result): LoopCommand = @@ -67,9 +49,6 @@ trait LoopCommands { def cmd(name: String, usage: String, help: String, f: String => Result): LoopCommand = if (usage == "") new NullaryCmd(name, help, f) else new LineCmd(name, usage, help, f) - - def varargs(name: String, usage: String, help: String, f: List[String] => Result): LoopCommand = - new VarArgsCmd(name, usage, help, f) } class NullaryCmd(name: String, help: String, f: String => Result) extends LoopCommand(name, help) { diff --git a/src/compiler/scala/tools/nsc/interpreter/MemberHandlers.scala b/src/compiler/scala/tools/nsc/interpreter/MemberHandlers.scala index 95482f1e46..381dfeb261 100644 --- a/src/compiler/scala/tools/nsc/interpreter/MemberHandlers.scala +++ b/src/compiler/scala/tools/nsc/interpreter/MemberHandlers.scala @@ -81,7 +81,6 @@ trait MemberHandlers { def symbol = if (member.symbol eq null) NoSymbol else member.symbol def definesImplicit = false def definesValue = false - def isLegalTopLevel = false def definesTerm = Option.empty[TermName] def definesType = Option.empty[TypeName] @@ -152,7 +151,6 @@ trait MemberHandlers { class ModuleHandler(module: ModuleDef) extends MemberDefHandler(module) { override def definesTerm = Some(name) override def definesValue = true - override def isLegalTopLevel = true override def resultExtractionCode(req: Request) = codegenln("defined module ", name) } @@ -161,7 +159,6 @@ trait MemberHandlers { override def definedSymbols = List(symbol, symbol.companionSymbol) filterNot (_ == NoSymbol) override def definesType = Some(name.toTypeName) override def definesTerm = Some(name.toTermName) filter (_ => mods.isCase) - override def isLegalTopLevel = true override def resultExtractionCode(req: Request) = codegenln("defined %s %s".format(keyword, name)) @@ -182,20 +179,6 @@ trait MemberHandlers { case sym => sym.thisType } private def importableTargetMembers = importableMembers(targetType).toList - override def isLegalTopLevel = true - - def createImportForName(name: Name): String = { - selectors foreach { - case sel @ ImportSelector(old, _, `name`, _) => return "import %s.{ %s }".format(expr, sel) - case _ => () - } - "import %s.%s".format(expr, name) - } - // TODO: Need to track these specially to honor Predef masking attempts, - // because they must be the leading imports in the code generated for each - // line. We can use the same machinery as Contexts now, anyway. - def isPredefImport = isReferenceToPredef(expr) - // wildcard imports, e.g. import foo._ private def selectorWild = selectors filter (_.name == nme.USCOREkw) // renamed imports, e.g. import foo.{ bar => baz } diff --git a/src/compiler/scala/tools/nsc/interpreter/NamedParam.scala b/src/compiler/scala/tools/nsc/interpreter/NamedParam.scala index eff0ef59c5..627a881cae 100644 --- a/src/compiler/scala/tools/nsc/interpreter/NamedParam.scala +++ b/src/compiler/scala/tools/nsc/interpreter/NamedParam.scala @@ -14,14 +14,10 @@ import scala.reflect.{ClassTag, classTag} trait NamedParamCreator { protected def freshName: () => String - def apply(name: String, tpe: String, value: Any): NamedParam = NamedParamClass(name, tpe, value) def apply[T: ru.TypeTag : ClassTag](name: String, x: T): NamedParam = new Typed[T](name, x) def apply[T: ru.TypeTag : ClassTag](x: T): NamedParam = apply(freshName(), x) - def clazz(name: String, x: Any): NamedParam = new Untyped(name, x) - def clazz(x: Any): NamedParam = clazz(freshName(), x) - implicit def namedValue[T: ru.TypeTag : ClassTag](name: String, x: T): NamedParam = apply(name, x) implicit def tuple[T: ru.TypeTag : ClassTag](pair: (String, T)): NamedParam = apply(pair._1, pair._2) } diff --git a/src/compiler/scala/tools/nsc/interpreter/Naming.scala b/src/compiler/scala/tools/nsc/interpreter/Naming.scala index 0d03a8669a..41ddf23de4 100644 --- a/src/compiler/scala/tools/nsc/interpreter/Naming.scala +++ b/src/compiler/scala/tools/nsc/interpreter/Naming.scala @@ -78,7 +78,6 @@ trait Naming { private lazy val userVar = new NameCreator(sessionNames.res) // var name, like res0 private lazy val internalVar = new NameCreator(sessionNames.ires) // internal var name, like $ires0 - def isLineName(name: String) = (name startsWith sessionNames.line) && (name stripPrefix sessionNames.line forall (_.isDigit)) def isUserVarName(name: String) = userVar didGenerate name def isInternalVarName(name: String) = internalVar didGenerate name diff --git a/src/compiler/scala/tools/nsc/interpreter/Parsed.scala b/src/compiler/scala/tools/nsc/interpreter/Parsed.scala index 24c01e9ae6..672a6fd28f 100644 --- a/src/compiler/scala/tools/nsc/interpreter/Parsed.scala +++ b/src/compiler/scala/tools/nsc/interpreter/Parsed.scala @@ -17,7 +17,6 @@ class Parsed private ( ) extends Delimited { def isEmpty = args.isEmpty def isUnqualified = args.size == 1 - def isQualified = args.size > 1 def isAtStart = cursor <= 0 private var _verbosity = 0 @@ -31,7 +30,6 @@ class Parsed private ( def bufferTail = new Parsed(buffer drop headLength, cursor - headLength, delimited) withVerbosity verbosity def prev = new Parsed(buffer, cursor - 1, delimited) withVerbosity verbosity - def next = new Parsed(buffer, cursor + 1, delimited) withVerbosity verbosity def currentChar = buffer(cursor) def currentArg = args.last def position = @@ -41,8 +39,6 @@ class Parsed private ( def isFirstDelimiter = !isEmpty && isDelimiterChar(buffer.head) def isLastDelimiter = !isEmpty && isDelimiterChar(buffer.last) - def firstIfDelimiter = if (isFirstDelimiter) buffer.head.toString else "" - def lastIfDelimiter = if (isLastDelimiter) buffer.last.toString else "" def isQuoted = false // TODO def isEscaped = !isAtStart && isEscapeChar(currentChar) && !isEscapeChar(prev.currentChar) @@ -56,13 +52,9 @@ object Parsed { private def onull(s: String) = if (s == null) "" else s - def apply(s: String): Parsed = apply(onull(s), onull(s).length) def apply(s: String, cursor: Int): Parsed = apply(onull(s), cursor, DefaultDelimiters) def apply(s: String, cursor: Int, delimited: Char => Boolean): Parsed = new Parsed(onull(s), cursor, delimited) - def dotted(s: String): Parsed = dotted(onull(s), onull(s).length) def dotted(s: String, cursor: Int): Parsed = new Parsed(onull(s), cursor, _ == '.') - - def undelimited(s: String, cursor: Int): Parsed = new Parsed(onull(s), cursor, _ => false) } diff --git a/src/compiler/scala/tools/nsc/interpreter/Phased.scala b/src/compiler/scala/tools/nsc/interpreter/Phased.scala index e6b780f177..f625124e70 100644 --- a/src/compiler/scala/tools/nsc/interpreter/Phased.scala +++ b/src/compiler/scala/tools/nsc/interpreter/Phased.scala @@ -24,7 +24,6 @@ trait Phased { case NoPhaseName => false case name => active = name ; true } - def getMulti = multi def setMulti(phases: Seq[PhaseName]): Boolean = { if (phases contains NoPhaseName) false else { @@ -66,16 +65,8 @@ trait Phased { try parseInternal(str) catch { case _: Exception => NoPhaseName } - def apply[T](body: => T) = immutable.SortedMap[PhaseName, T](atMap(PhaseName.all)(body): _*) - def atCurrent[T](body: => T): T = enteringPhase(get)(body) def multi[T](body: => T): Seq[T] = multi map (ph => at(ph)(body)) - def all[T](body: => T): Seq[T] = atMulti(PhaseName.all)(body) - def show[T](body: => T): Seq[T] = { - val pairs = atMap(PhaseName.all)(body) - pairs foreach { case (ph, op) => Console.println("%15s -> %s".format(ph, op.toString take 240)) } - pairs map (_._2) - } def at[T](ph: PhaseName)(body: => T): T = { val saved = get @@ -90,11 +81,6 @@ trait Phased { finally setMulti(saved) } - def showAt[T](phs: Seq[PhaseName])(body: => T): Unit = - atMap[T](phs)(body) foreach { - case (ph, op) => Console.println("%15s -> %s".format(ph, op.toString take 240)) - } - def atMap[T](phs: Seq[PhaseName])(body: => T): Seq[(PhaseName, T)] = phs zip atMulti(phs)(body) @@ -112,16 +98,12 @@ trait Phased { def apply(id: Int): PhaseName = all find (_.id == id) getOrElse NoPhaseName implicit def apply(s: String): PhaseName = nameMap(s) - implicit def defaultPhaseName: PhaseName = active } sealed abstract class PhaseName { lazy val id = phase.id lazy val name = toString.toLowerCase def phase = currentRun.phaseNamed(name) def isEmpty = this eq NoPhaseName - - // Execute some code during this phase. - def apply[T](body: => T): T = enteringPhase(phase)(body) } case object Parser extends PhaseName @@ -158,5 +140,4 @@ trait Phased { } implicit def phaseEnumToPhase(name: PhaseName): Phase = name.phase - implicit def phaseNameToPhase(name: String): Phase = currentRun.phaseNamed(name) } diff --git a/src/compiler/scala/tools/nsc/interpreter/Power.scala b/src/compiler/scala/tools/nsc/interpreter/Power.scala index ab0f1c0033..e517a16b32 100644 --- a/src/compiler/scala/tools/nsc/interpreter/Power.scala +++ b/src/compiler/scala/tools/nsc/interpreter/Power.scala @@ -149,17 +149,6 @@ class Power[ReplValsImpl <: ReplVals : ru.TypeTag: ClassTag](val intp: IMain, re // And whatever else there is to do. init.lines foreach (intp interpret _) } - def valsDescription: String = { - def to_str(m: Symbol) = "%12s %s".format( - m.decodedName, "" + elimRefinement(m.accessedOrSelf.tpe) stripPrefix "scala.tools.nsc.") - - ( rutil.info[ReplValsImpl].membersDeclared - filter (m => m.isPublic && !m.hasModuleFlag && !m.isConstructor) - sortBy (_.decodedName) - map to_str - mkString ("Name and type of values imported into the repl in power mode.\n\n", "\n", "") - ) - } trait LowPriorityInternalInfo { implicit def apply[T: ru.TypeTag : ClassTag] : InternalInfo[T] = new InternalInfo[T](None) @@ -172,12 +161,7 @@ class Power[ReplValsImpl <: ReplVals : ru.TypeTag: ClassTag](val intp: IMain, re * symbol, by only implicitly installing one method, "?", and the rest * of the conveniences exist on that wrapper. */ - trait LowPriorityInternalInfoWrapper { - implicit def apply[T: ru.TypeTag : ClassTag] : InternalInfoWrapper[T] = new InternalInfoWrapper[T](None) - } - object InternalInfoWrapper extends LowPriorityInternalInfoWrapper { - - } + trait LowPriorityInternalInfoWrapper { } class InternalInfoWrapper[T: ru.TypeTag : ClassTag](value: Option[T] = None) { def ? : InternalInfo[T] = new InternalInfo[T](value) } @@ -187,7 +171,6 @@ class Power[ReplValsImpl <: ReplVals : ru.TypeTag: ClassTag](val intp: IMain, re * customizable symbol filter (had to hardcode no-spec to reduce noise) */ class InternalInfo[T](value: Option[T] = None)(implicit typeEvidence: ru.TypeTag[T], runtimeClassEvidence: ClassTag[T]) { - private def newInfo[U: ru.TypeTag : ClassTag](value: U): InternalInfo[U] = new InternalInfo[U](Some(value)) private def isSpecialized(s: Symbol) = s.name.toString contains "$mc" private def isImplClass(s: Symbol) = s.name.toString endsWith "$class" @@ -198,47 +181,15 @@ class Power[ReplValsImpl <: ReplVals : ru.TypeTag: ClassTag](val intp: IMain, re || s.isAnonOrRefinementClass || s.isAnonymousFunction ) - def symbol = compilerSymbolFromTag(tag) - def tpe = compilerTypeFromTag(tag) - def name = symbol.name - def companion = symbol.companionSymbol - def info = symbol.info - def moduleClass = symbol.moduleClass - def owner = symbol.owner - def owners = symbol.ownerChain drop 1 - def signature = symbol.defString - - def decls = info.decls - def declsOverride = membersDeclared filter (_.isOverride) - def declsOriginal = membersDeclared filterNot (_.isOverride) - + def symbol = compilerSymbolFromTag(tag) + def tpe = compilerTypeFromTag(tag) def members = membersUnabridged filterNot excludeMember def membersUnabridged = tpe.members.toList - def membersDeclared = members filterNot excludeMember - def membersInherited = members filterNot (membersDeclared contains _) - def memberTypes = members filter (_.name.isTypeName) - def memberMethods = members filter (_.isMethod) - - def pkg = symbol.enclosingPackage - def pkgName = pkg.fullName - def pkgClass = symbol.enclosingPackageClass - def pkgMembers = pkg.info.members filterNot excludeMember - def pkgClasses = pkgMembers filter (s => s.isClass && s.isDefinedInPackage) - def pkgSymbols = new PackageSlurper(pkgClass).slurp() filterNot excludeMember - - def tag = typeEvidence - def runtimeClass = runtimeClassEvidence.runtimeClass - def shortClass = runtimeClass.getName split "[$.]" last - - def baseClasses = tpe.baseClasses - def baseClassDecls = mapFrom(baseClasses)(_.info.decls.toList.sortBy(_.name)) - def ancestors = baseClasses drop 1 - def ancestorDeclares(name: String) = ancestors filter (_.info member newTermName(name) ne NoSymbol) - def baseTypes = tpe.baseTypeSeq.toList - - def <:<[U: ru.TypeTag : ClassTag](other: U) = tpe <:< newInfo(other).tpe - def lub[U: ru.TypeTag : ClassTag](other: U) = intp.global.lub(List(tpe, newInfo(other).tpe)) - def glb[U: ru.TypeTag : ClassTag](other: U) = intp.global.glb(List(tpe, newInfo(other).tpe)) + def pkg = symbol.enclosingPackage + def tag = typeEvidence + def runtimeClass = runtimeClassEvidence.runtimeClass + def shortClass = runtimeClass.getName split "[$.]" last + def baseClasses = tpe.baseClasses override def toString = value match { case Some(x) => "%s (%s)".format(x, shortClass) @@ -264,7 +215,6 @@ class Power[ReplValsImpl <: ReplVals : ru.TypeTag: ClassTag](val intp: IMain, re } object Prettifier extends LowPriorityPrettifier { def stringOf(x: Any): String = scala.runtime.ScalaRunTime.stringOf(x) - def prettify[T](value: T): TraversableOnce[String] = default[T] prettify value def default[T] = new Prettifier[T] { def prettify(x: T): TraversableOnce[String] = AnyPrettifier prettify x def show(x: T): Unit = AnyPrettifier show x @@ -274,7 +224,6 @@ class Power[ReplValsImpl <: ReplVals : ru.TypeTag: ClassTag](val intp: IMain, re def show(x: T): Unit def prettify(x: T): TraversableOnce[String] - def show(xs: TraversableOnce[T]): Unit = prettify(xs) foreach println def prettify(xs: TraversableOnce[T]): TraversableOnce[String] = xs flatMap (x => prettify(x)) } @@ -286,31 +235,10 @@ class Power[ReplValsImpl <: ReplVals : ru.TypeTag: ClassTag](val intp: IMain, re pretty prettify f(value) foreach (StringPrettifier show _) def freq[U](p: T => U) = (value.toSeq groupBy p mapValues (_.size)).toList sortBy (-_._2) map (_.swap) - def ppfreq[U](p: T => U): Unit = freq(p) foreach { case (count, key) => println("%5d %s".format(count, key)) } - - def |[U](f: Seq[T] => Seq[U]): Seq[U] = f(value) - def ^^[U](f: T => U): Seq[U] = value map f - def ^?[U](pf: PartialFunction[T, U]): Seq[U] = value collect pf - def >>!(implicit ord: Ordering[T]): Unit = pp(_.sorted.distinct) def >>(implicit ord: Ordering[T]): Unit = pp(_.sorted) def >!(): Unit = pp(_.distinct) def >(): Unit = pp(identity) - - def >#(): Unit = this ># (identity[T] _) - def >#[U](p: T => U): Unit = this ppfreq p - - def >?(p: T => Boolean): Unit = pp(_ filter p) - def >?(s: String): Unit = pp(_ filter (_.toString contains s)) - def >?(r: Regex): Unit = pp(_ filter (_.toString matches fixRegex(r))) - - private def fixRegex(r: scala.util.matching.Regex): String = { - val s = r.pattern.toString - val prefix = if (s startsWith "^") "" else """^.*?""" - val suffix = if (s endsWith "$") "" else """.*$""" - - prefix + s + suffix - } } class MultiPrettifierClass[T: Prettifier](val value: Seq[T]) extends PrettifierClass[T]() { } @@ -334,17 +262,11 @@ class Power[ReplValsImpl <: ReplVals : ru.TypeTag: ClassTag](val intp: IMain, re class RichReplURL(url: URL)(implicit codec: Codec) { def slurp(): String = io.Streamable.slurp(url) } - class RichSymbolList(syms: List[Symbol]) { - def sigs = syms map (_.defString) - def infos = syms map (_.info) - } trait Implicits1 { // fallback implicit def replPrinting[T](x: T)(implicit pretty: Prettifier[T] = Prettifier.default[T]) = new SinglePrettifierClass[T](x) - - implicit def liftToTypeName(s: String): TypeName = newTypeName(s) } trait Implicits2 extends Implicits1 { class RichSymbol(sym: Symbol) { @@ -369,26 +291,13 @@ class Power[ReplValsImpl <: ReplVals : ru.TypeTag: ClassTag](val intp: IMain, re implicit def replInputStream(in: InputStream)(implicit codec: Codec) = new RichInputStream(in) implicit def replEnhancedURLs(url: URL)(implicit codec: Codec): RichReplURL = new RichReplURL(url)(codec) - - implicit def liftToTermName(s: String): TermName = newTermName(s) - implicit def replListOfSymbols(xs: List[Symbol]) = new RichSymbolList(xs) } trait ReplUtilities { - // [Eugene to Paul] needs review! - // def module[T: Manifest] = getModuleIfDefined(manifest[T].erasure.getName stripSuffix nme.MODULE_SUFFIX_STRING) - // def clazz[T: Manifest] = getClassIfDefined(manifest[T].erasure.getName) def module[T: ru.TypeTag] = ru.typeOf[T].typeSymbol.suchThat(_.isPackage) def clazz[T: ru.TypeTag] = ru.typeOf[T].typeSymbol.suchThat(_.isClass) def info[T: ru.TypeTag : ClassTag] = InternalInfo[T] def ?[T: ru.TypeTag : ClassTag] = InternalInfo[T] - def url(s: String) = { - try new URL(s) - catch { case _: MalformedURLException => - if (Path(s).exists) Path(s).toURL - else new URL("http://" + s) - } - } def sanitize(s: String): String = sanitize(s.getBytes()) def sanitize(s: Array[Byte]): String = (s map { case x if x.toChar.isControl => '?' @@ -406,11 +315,8 @@ class Power[ReplValsImpl <: ReplVals : ru.TypeTag: ClassTag](val intp: IMain, re lazy val rutil: ReplUtilities = new ReplUtilities { } lazy val phased: Phased = new { val global: intp.global.type = intp.global } with Phased { } - def context(code: String) = analyzer.rootContext(unit(code)) - def source(code: String) = newSourceFile(code) def unit(code: String) = newCompilationUnit(code) def trees(code: String) = parse(code) getOrElse Nil - def typeOf(id: String) = intp.typeOfExpression(id) override def toString = s""" |** Power mode status ** diff --git a/src/compiler/scala/tools/nsc/interpreter/ReplConfig.scala b/src/compiler/scala/tools/nsc/interpreter/ReplConfig.scala index 7cd0f436c4..3392ea0b5e 100644 --- a/src/compiler/scala/tools/nsc/interpreter/ReplConfig.scala +++ b/src/compiler/scala/tools/nsc/interpreter/ReplConfig.scala @@ -14,9 +14,7 @@ trait ReplConfig { lazy val replProps = new ReplProps class TapMaker[T](x: T) { - def tapInfo(msg: => String): T = tap(x => replinfo(parens(x))) def tapDebug(msg: => String): T = tap(x => repldbg(parens(x))) - def tapTrace(msg: => String): T = tap(x => repltrace(parens(x))) def tap[U](f: T => U): T = { f(x) x @@ -28,12 +26,6 @@ trait ReplConfig { try Console println msg catch { case x: AssertionError => Console.println("Assertion error printing debugging output: " + x) } - private[nsc] def repldbgex(ex: Throwable): Unit = { - if (isReplDebug) { - echo("Caught/suppressing: " + ex) - ex.printStackTrace - } - } private[nsc] def repldbg(msg: => String) = if (isReplDebug) echo(msg) private[nsc] def repltrace(msg: => String) = if (isReplTrace) echo(msg) private[nsc] def replinfo(msg: => String) = if (isReplInfo) echo(msg) @@ -45,14 +37,10 @@ trait ReplConfig { repltrace(stackTraceString(unwrap(t))) alt } - private[nsc] def substituteAndLog[T](alt: => T)(body: => T): T = - substituteAndLog("" + alt, alt)(body) private[nsc] def substituteAndLog[T](label: String, alt: => T)(body: => T): T = { try body catch logAndDiscard(label, alt) } - private[nsc] def squashAndLog(label: String)(body: => Unit): Unit = - substituteAndLog(label, ())(body) def isReplTrace: Boolean = replProps.trace def isReplDebug: Boolean = replProps.debug || isReplTrace diff --git a/src/compiler/scala/tools/nsc/interpreter/ReplProps.scala b/src/compiler/scala/tools/nsc/interpreter/ReplProps.scala index bc3e7a10d7..2364918494 100644 --- a/src/compiler/scala/tools/nsc/interpreter/ReplProps.scala +++ b/src/compiler/scala/tools/nsc/interpreter/ReplProps.scala @@ -13,15 +13,11 @@ class ReplProps { private def bool(name: String) = BooleanProp.keyExists(name) private def int(name: String) = IntProp(name) - val jlineDebug = bool("scala.tools.jline.internal.Log.debug") - val jlineTrace = bool("scala.tools.jline.internal.Log.trace") - val info = bool("scala.repl.info") val debug = bool("scala.repl.debug") val trace = bool("scala.repl.trace") val power = bool("scala.repl.power") - val replInitCode = Prop[JFile]("scala.repl.initcode") val replAutorunCode = Prop[JFile]("scala.repl.autoruncode") val powerInitCode = Prop[JFile]("scala.repl.power.initcode") val powerBanner = Prop[JFile]("scala.repl.power.banner") diff --git a/src/compiler/scala/tools/nsc/interpreter/ReplStrings.scala b/src/compiler/scala/tools/nsc/interpreter/ReplStrings.scala index 670bbf9bae..08472bbc64 100644 --- a/src/compiler/scala/tools/nsc/interpreter/ReplStrings.scala +++ b/src/compiler/scala/tools/nsc/interpreter/ReplStrings.scala @@ -29,5 +29,4 @@ trait ReplStrings { "scala.runtime.ScalaRunTime.replStringOf(%s, %s)".format(x, maxlen) def words(s: String) = s.trim split "\\s+" filterNot (_ == "") toList - def isQuoted(s: String) = (s.length >= 2) && (s.head == s.last) && ("\"'" contains s.head) } diff --git a/src/compiler/scala/tools/nsc/interpreter/RichClass.scala b/src/compiler/scala/tools/nsc/interpreter/RichClass.scala index 4371f7fe05..36cdf65510 100644 --- a/src/compiler/scala/tools/nsc/interpreter/RichClass.scala +++ b/src/compiler/scala/tools/nsc/interpreter/RichClass.scala @@ -10,7 +10,6 @@ import scala.reflect.{ ClassTag, classTag } class RichClass[T](val clazz: Class[T]) { def toTag: ClassTag[T] = ClassTag[T](clazz) - def toTypeString: String = TypeStrings.fromClazz(clazz) // Sadly isAnonymousClass does not return true for scala anonymous // classes because our naming scheme is not doing well against the @@ -20,14 +19,12 @@ class RichClass[T](val clazz: Class[T]) { catch { case _: java.lang.InternalError => false } // good ol' "Malformed class name" ) - /** It's not easy... to be... me... */ - def supermans: List[ClassTag[_]] = supers map (_.toTag) + def supertags: List[ClassTag[_]] = supers map (_.toTag) def superNames: List[String] = supers map (_.getName) def interfaces: List[JClass] = supers filter (_.isInterface) def hasAncestorName(f: String => Boolean) = superNames exists f def hasAncestor(f: JClass => Boolean) = supers exists f - def hasAncestorInPackage(pkg: String) = hasAncestorName(_ startsWith (pkg + ".")) def supers: List[JClass] = { def loop(x: JClass): List[JClass] = x.getSuperclass match { diff --git a/src/compiler/scala/tools/nsc/interpreter/SimpleReader.scala b/src/compiler/scala/tools/nsc/interpreter/SimpleReader.scala index bccd8158ec..2d0917d91f 100644 --- a/src/compiler/scala/tools/nsc/interpreter/SimpleReader.scala +++ b/src/compiler/scala/tools/nsc/interpreter/SimpleReader.scala @@ -19,11 +19,8 @@ extends InteractiveReader val history = NoHistory val completion = NoCompletion - def init() = () def reset() = () - def eraseLine() = () def redrawLine() = () - def currentLine = "" def readOneLine(prompt: String): String = { if (interactive) { out.print(prompt) @@ -40,4 +37,4 @@ object SimpleReader { def apply(in: BufferedReader = defaultIn, out: JPrintWriter = defaultOut, interactive: Boolean = true): SimpleReader = new SimpleReader(in, out, interactive) -} \ No newline at end of file +} diff --git a/src/compiler/scala/tools/nsc/interpreter/TypeStrings.scala b/src/compiler/scala/tools/nsc/interpreter/TypeStrings.scala index 9fb79a9d6f..239dbb8149 100644 --- a/src/compiler/scala/tools/nsc/interpreter/TypeStrings.scala +++ b/src/compiler/scala/tools/nsc/interpreter/TypeStrings.scala @@ -33,7 +33,6 @@ trait StructuredTypeStrings extends DestructureTypes { val NoGrouping = Grouping("", "", "", false) val ListGrouping = Grouping("(", ", ", ")", false) val ProductGrouping = Grouping("(", ", ", ")", true) - val ParamGrouping = Grouping("(", ", ", ")", true) val BlockGrouping = Grouping(" { ", "; ", "}", false) private def str(level: Int)(body: => String): String = " " * level + body @@ -189,7 +188,6 @@ trait TypeStrings { else enclClass.getName + "." + (name stripPrefix enclPre) ) } - def scalaName(ct: ClassTag[_]): String = scalaName(ct.runtimeClass) def anyClass(x: Any): JClass = if (x == null) null else x.getClass private def brackets(tps: String*): String = @@ -220,7 +218,6 @@ trait TypeStrings { * practice to rely on toString for correctness) generated the VALID string * representation of the type. */ - def fromTypedValue[T: ru.TypeTag : ClassTag](x: T): String = fromTag[T] def fromValue(value: Any): String = if (value == null) "Null" else fromClazz(anyClass(value)) def fromClazz(clazz: JClass): String = scalaName(clazz) + tparamString(clazz) def fromTag[T: ru.TypeTag : ClassTag] : String = scalaName(classTag[T].runtimeClass) + tparamString[T] @@ -240,13 +237,6 @@ trait TypeStrings { case (res, (k, v)) => res.replaceAll(k, v) } } - - val typeTransforms = List( - "java.lang." -> "", - "scala.collection.immutable." -> "immutable.", - "scala.collection.mutable." -> "mutable.", - "scala.collection.generic." -> "generic." - ) } object TypeStrings extends TypeStrings { } diff --git a/src/compiler/scala/tools/nsc/interpreter/package.scala b/src/compiler/scala/tools/nsc/interpreter/package.scala index 6a2d69db2c..52a085080b 100644 --- a/src/compiler/scala/tools/nsc/interpreter/package.scala +++ b/src/compiler/scala/tools/nsc/interpreter/package.scala @@ -48,7 +48,6 @@ package object interpreter extends ReplConfig with ReplStrings { private[nsc] implicit def enrichClass[T](clazz: Class[T]) = new RichClass[T](clazz) private[nsc] implicit def enrichAnyRefWithTap[T](x: T) = new TapMaker(x) - private[nsc] def tracing[T](msg: String)(x: T): T = x.tapTrace(msg) private[nsc] def debugging[T](msg: String)(x: T) = x.tapDebug(msg) private val ourClassloader = getClass.getClassLoader @@ -68,39 +67,11 @@ package object interpreter extends ReplConfig with ReplStrings { import global.{ reporter => _, _ } import definitions._ - lazy val tagOfStdReplVals = staticTypeTag[scala.tools.nsc.interpreter.StdReplVals] - protected def echo(msg: String) = { Console.out println msg Console.out.flush() } - def wrapCommand(line: String): String = { - def failMsg = "Argument to :wrap must be the name of a method with signature [T](=> T): T" - - words(line) match { - case Nil => - intp.executionWrapper match { - case "" => "No execution wrapper is set." - case s => "Current execution wrapper: " + s - } - case "clear" :: Nil => - intp.executionWrapper match { - case "" => "No execution wrapper is set." - case s => intp.clearExecutionWrapper() ; "Cleared execution wrapper." - } - case wrapper :: Nil => - intp.typeOfExpression(wrapper) match { - case PolyType(List(targ), MethodType(List(arg), restpe)) => - setExecutionWrapper(originalPath(wrapper)) - "Set wrapper to '" + wrapper + "'" - case tp => - failMsg + "\nFound: " - } - case _ => failMsg - } - } - def implicitsCommand(line: String): String = { def p(x: Any) = intp.reporter.printMessage("" + x) diff --git a/src/compiler/scala/tools/nsc/interpreter/session/History.scala b/src/compiler/scala/tools/nsc/interpreter/session/History.scala index daa05b86db..794d41adc7 100644 --- a/src/compiler/scala/tools/nsc/interpreter/session/History.scala +++ b/src/compiler/scala/tools/nsc/interpreter/session/History.scala @@ -14,15 +14,9 @@ trait History { def asStrings: List[String] def index: Int def size: Int - def grep(s: String): List[String] } object NoHistory extends History { def asStrings = Nil - def grep(s: String) = Nil def index = 0 def size = 0 } - -object History { - def empty: History = NoHistory -} diff --git a/src/compiler/scala/tools/nsc/interpreter/session/SimpleHistory.scala b/src/compiler/scala/tools/nsc/interpreter/session/SimpleHistory.scala index 9f4e2b9df3..89998e438a 100644 --- a/src/compiler/scala/tools/nsc/interpreter/session/SimpleHistory.scala +++ b/src/compiler/scala/tools/nsc/interpreter/session/SimpleHistory.scala @@ -54,9 +54,5 @@ class SimpleHistory extends JLineHistory { def moveTo(idx: Int) = (idx > 0) && (idx <= lastIndex) && setTo(idx) def moveToEnd(): Unit = setTo(size) - // scala legacy interface - def asList: List[JEntry] = toEntries().toList - def asJavaList = entries() - def asStrings = buf.toList - def grep(s: String) = buf.toList filter (_ contains s) + def asStrings = buf.toList } -- cgit v1.2.3 From 9d4994b96c77d914687433586eb6d1f9e49c520f Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Tue, 13 Nov 2012 08:27:02 -0800 Subject: Members removed from scala.reflect.io. Mostly hailing from a long-ago day when I imagined I was writing a general purpose library. We dodged that bullet. --- src/reflect/scala/reflect/io/AbstractFile.scala | 19 +---- src/reflect/scala/reflect/io/Directory.scala | 15 +--- src/reflect/scala/reflect/io/File.scala | 84 +--------------------- src/reflect/scala/reflect/io/Path.scala | 49 ++----------- src/reflect/scala/reflect/io/PlainFile.scala | 20 +----- src/reflect/scala/reflect/io/Streamable.scala | 8 +-- .../scala/reflect/io/VirtualDirectory.scala | 4 +- src/reflect/scala/reflect/io/VirtualFile.scala | 3 +- src/reflect/scala/reflect/io/ZipArchive.scala | 7 +- 9 files changed, 20 insertions(+), 189 deletions(-) (limited to 'src') diff --git a/src/reflect/scala/reflect/io/AbstractFile.scala b/src/reflect/scala/reflect/io/AbstractFile.scala index fa7298c726..1a8d1c4f5e 100644 --- a/src/reflect/scala/reflect/io/AbstractFile.scala +++ b/src/reflect/scala/reflect/io/AbstractFile.scala @@ -14,9 +14,9 @@ import scala.collection.mutable.ArrayBuffer /** * An abstraction over files for use in the reflection/compiler libraries. - * + * * ''Note: This library is considered experimental and should not be used unless you know what you are doing.'' - * + * * @author Philippe Altherr * @version 1.0, 23/03/2004 */ @@ -79,7 +79,7 @@ object AbstractFile { * all other cases, the class `SourceFile` is used, which honors * `global.settings.encoding.value`. *

    - * + * * ''Note: This library is considered experimental and should not be used unless you know what you are doing.'' */ abstract class AbstractFile extends Iterable[AbstractFile] { @@ -180,19 +180,6 @@ abstract class AbstractFile extends Iterable[AbstractFile] { */ def lookupNameUnchecked(name: String, directory: Boolean): AbstractFile - /** Returns the abstract file in this abstract directory with the specified - * path relative to it, If there is no such file, returns null. The argument - * directory tells whether to look for a directory or a regular - * file. - * - * @param path ... - * @param directory ... - * @return ... - */ - def lookupPath(path: String, directory: Boolean): AbstractFile = { - lookup((f, p, dir) => f.lookupName(p, dir), path, directory) - } - /** Return an abstract file that does not check that `path` denotes * an existing file. */ diff --git a/src/reflect/scala/reflect/io/Directory.scala b/src/reflect/scala/reflect/io/Directory.scala index c040d1eac5..4bf9ed8a36 100644 --- a/src/reflect/scala/reflect/io/Directory.scala +++ b/src/reflect/scala/reflect/io/Directory.scala @@ -14,12 +14,10 @@ import java.io.{ File => JFile } * ''Note: This library is considered experimental and should not be used unless you know what you are doing.'' */ object Directory { - import scala.util.Properties.{ tmpDir, userHome, userDir } + import scala.util.Properties.{ userHome, userDir } private def normalizePath(s: String) = Some(apply(Path(s).normalize)) def Current: Option[Directory] = if (userDir == "") None else normalizePath(userDir) - def Home: Option[Directory] = if (userHome == "") None else normalizePath(userHome) - def TmpDir: Option[Directory] = if (tmpDir == "") None else normalizePath(tmpDir) def apply(path: Path): Directory = path.toDirectory @@ -30,20 +28,18 @@ object Directory { path.createDirectory() } } -import Path._ /** An abstraction for directories. * * @author Paul Phillips * @since 2.8 - * + * * ''Note: This is library is considered experimental and should not be used unless you know what you are doing.'' */ class Directory(jfile: JFile) extends Path(jfile) { override def toAbsolute: Directory = if (isAbsolute) this else super.toAbsolute.toDirectory override def toDirectory: Directory = this override def toFile: File = new File(jfile) - override def isValid = jfile.isDirectory() || !jfile.exists() override def normalize: Directory = super.normalize.toDirectory /** An iterator over the contents of this directory. @@ -60,7 +56,6 @@ class Directory(jfile: JFile) extends Path(jfile) { override def walkFilter(cond: Path => Boolean): Iterator[Path] = list filter cond flatMap (_ walkFilter cond) - def deepDirs: Iterator[Directory] = Path.onlyDirs(deepList()) def deepFiles: Iterator[File] = Path.onlyFiles(deepList()) /** If optional depth argument is not given, will recurse @@ -70,10 +65,4 @@ class Directory(jfile: JFile) extends Path(jfile) { if (depth < 0) list ++ (dirs flatMap (_ deepList (depth))) else if (depth == 0) Iterator.empty else list ++ (dirs flatMap (_ deepList (depth - 1))) - - /** An iterator over the directories underneath this directory, - * to the (optionally) given depth. - */ - def subdirs(depth: Int = 1): Iterator[Directory] = - deepList(depth) collect { case x: Directory => x } } diff --git a/src/reflect/scala/reflect/io/File.scala b/src/reflect/scala/reflect/io/File.scala index 736ba5d51e..c74dc06501 100644 --- a/src/reflect/scala/reflect/io/File.scala +++ b/src/reflect/scala/reflect/io/File.scala @@ -22,8 +22,7 @@ import scala.language.{reflectiveCalls, implicitConversions} */ object File { def pathSeparator = java.io.File.pathSeparator - def separator = java.io.File.separator - + def separator = java.io.File.separator def apply(path: Path)(implicit codec: Codec) = new File(path.jfile)(codec) // Create a temporary file, which will be deleted upon jvm exit. @@ -32,41 +31,7 @@ object File { jfile.deleteOnExit() apply(jfile) } - - type HasClose = { def close(): Unit } - - def closeQuietly(target: HasClose) { - try target.close() catch { case e: IOException => } - } - def closeQuietly(target: JCloseable) { - try target.close() catch { case e: IOException => } - } - - // this is a workaround for http://bugs.sun.com/bugdatabase/view_bug.do?bug_id=6503430 - // we are using a static initializer to statically initialize a java class so we don't - // trigger java.lang.InternalErrors later when using it concurrently. We ignore all - // the exceptions so as not to cause spurious failures when no write access is available, - // e.g. google app engine. - // - // XXX need to put this behind a setting. - // - // try { - // import Streamable.closing - // val tmp = java.io.File.createTempFile("bug6503430", null, null) - // try closing(new FileInputStream(tmp)) { in => - // val inc = in.getChannel() - // closing(new FileOutputStream(tmp, true)) { out => - // out.getChannel().transferFrom(inc, 0, 0) - // } - // } - // finally tmp.delete() - // } - // catch { - // case _: IllegalArgumentException | _: IllegalStateException | _: IOException | _: SecurityException => () - // } } -import File._ -import Path._ /** An abstraction for files. For character data, a Codec * can be supplied at either creation time or when a method @@ -76,19 +41,17 @@ import Path._ * * @author Paul Phillips * @since 2.8 - * + * * ''Note: This is library is considered experimental and should not be used unless you know what you are doing.'' */ class File(jfile: JFile)(implicit constructorCodec: Codec) extends Path(jfile) with Streamable.Chars { override val creationCodec = constructorCodec - def withCodec(codec: Codec): File = new File(jfile)(codec) override def addExtension(ext: String): File = super.addExtension(ext).toFile override def toAbsolute: File = if (isAbsolute) this else super.toAbsolute.toFile override def toDirectory: Directory = new Directory(jfile) override def toFile: File = this override def normalize: File = super.normalize.toFile - override def isValid = jfile.isFile() || !jfile.exists() override def length = super[Path].length override def walkFilter(cond: Path => Boolean): Iterator[Path] = if (cond(this)) Iterator.single(this) else Iterator.empty @@ -99,14 +62,11 @@ class File(jfile: JFile)(implicit constructorCodec: Codec) extends Path(jfile) w /** Obtains a OutputStream. */ def outputStream(append: Boolean = false) = new FileOutputStream(jfile, append) def bufferedOutput(append: Boolean = false) = new BufferedOutputStream(outputStream(append)) - def printStream(append: Boolean = false) = new PrintStream(outputStream(append), true) /** Obtains an OutputStreamWriter wrapped around a FileOutputStream. * This should behave like a less broken version of java.io.FileWriter, * in that unlike the java version you can specify the encoding. */ - def writer(): OutputStreamWriter = writer(false) - def writer(append: Boolean): OutputStreamWriter = writer(append, creationCodec) def writer(append: Boolean, codec: Codec): OutputStreamWriter = new OutputStreamWriter(outputStream(append), codec.charSet) @@ -118,7 +78,6 @@ class File(jfile: JFile)(implicit constructorCodec: Codec) extends Path(jfile) w new BufferedWriter(writer(append, codec)) def printWriter(): PrintWriter = new PrintWriter(bufferedWriter(), true) - def printWriter(append: Boolean): PrintWriter = new PrintWriter(bufferedWriter(append), true) /** Creates a new file and writes all the Strings to it. */ def writeAll(strings: String*): Unit = { @@ -127,12 +86,6 @@ class File(jfile: JFile)(implicit constructorCodec: Codec) extends Path(jfile) w finally out.close() } - def writeBytes(bytes: Array[Byte]): Unit = { - val out = bufferedOutput() - try out write bytes - finally out.close() - } - def appendAll(strings: String*): Unit = { val out = bufferedWriter(append = true) try strings foreach (out write _) @@ -150,39 +103,6 @@ class File(jfile: JFile)(implicit constructorCodec: Codec) extends Path(jfile) w try Some(slurp()) catch { case _: IOException => None } - def copyTo(destPath: Path, preserveFileDate: Boolean = false): Boolean = { - val CHUNK = 1024 * 1024 * 16 // 16 MB - val dest = destPath.toFile - if (!isValid) fail("Source %s is not a valid file." format name) - if (this.normalize == dest.normalize) fail("Source and destination are the same.") - if (!dest.parent.exists) fail("Destination cannot be created.") - if (dest.exists && !dest.canWrite) fail("Destination exists but is not writable.") - if (dest.isDirectory) fail("Destination exists but is a directory.") - - lazy val in_s = inputStream() - lazy val out_s = dest.outputStream() - lazy val in = in_s.getChannel() - lazy val out = out_s.getChannel() - - try { - val size = in.size() - var pos, count = 0L - while (pos < size) { - count = (size - pos) min CHUNK - pos += out.transferFrom(in, pos, count) - } - } - finally List[HasClose](out, out_s, in, in_s) foreach closeQuietly - - if (this.length != dest.length) - fail("Failed to completely copy %s to %s".format(name, dest.name)) - - if (preserveFileDate) - dest.lastModified = this.lastModified - - true - } - /** Reflection since we're into the java 6+ API. */ def setExecutable(executable: Boolean, ownerOnly: Boolean = true): Boolean = { diff --git a/src/reflect/scala/reflect/io/Path.scala b/src/reflect/scala/reflect/io/Path.scala index 36fdc04db4..3b5d3079cd 100644 --- a/src/reflect/scala/reflect/io/Path.scala +++ b/src/reflect/scala/reflect/io/Path.scala @@ -27,7 +27,7 @@ import scala.language.implicitConversions * * @author Paul Phillips * @since 2.8 - * + * * ''Note: This library is considered experimental and should not be used unless you know what you are doing.'' */ object Path { @@ -49,27 +49,12 @@ object Path { implicit def string2path(s: String): Path = apply(s) implicit def jfile2path(jfile: JFile): Path = apply(jfile) - // java 7 style, we don't use it yet - // object AccessMode extends Enumeration { - // val EXECUTE, READ, WRITE = Value - // } - // def checkAccess(modes: AccessMode*): Boolean = { - // modes foreach { - // case EXECUTE => throw new Exception("Unsupported") // can't check in java 5 - // case READ => if (!jfile.canRead()) return false - // case WRITE => if (!jfile.canWrite()) return false - // } - // true - // } - def onlyDirs(xs: Iterator[Path]): Iterator[Directory] = xs filter (_.isDirectory) map (_.toDirectory) def onlyDirs(xs: List[Path]): List[Directory] = xs filter (_.isDirectory) map (_.toDirectory) def onlyFiles(xs: Iterator[Path]): Iterator[File] = xs filter (_.isFile) map (_.toFile) - def onlyFiles(xs: List[Path]): List[File] = xs filter (_.isFile) map (_.toFile) def roots: List[Path] = java.io.File.listRoots().toList map Path.apply - def apply(segments: Seq[String]): Path = apply(segments mkString java.io.File.separator) def apply(path: String): Path = apply(new JFile(path)) def apply(jfile: JFile): Path = if (jfile.isFile) new File(jfile) @@ -84,19 +69,13 @@ import Path._ /** The Path constructor is private so we can enforce some * semantics regarding how a Path might relate to the world. - * + * * ''Note: This library is considered experimental and should not be used unless you know what you are doing.'' */ class Path private[io] (val jfile: JFile) { val separator = java.io.File.separatorChar val separatorStr = java.io.File.separator - // Validation: this verifies that the type of this object and the - // contents of the filesystem are in agreement. All objects are - // valid except File objects whose path points to a directory and - // Directory objects whose path points to a file. - def isValid: Boolean = true - // conversions def toFile: File = new File(jfile) def toDirectory: Directory = new Directory(jfile) @@ -104,6 +83,7 @@ class Path private[io] (val jfile: JFile) { def toCanonical: Path = Path(jfile.getCanonicalPath()) def toURI: URI = jfile.toURI() def toURL: URL = toURI.toURL() + /** If this path is absolute, returns it: otherwise, returns an absolute * path made up of root / this. */ @@ -136,7 +116,6 @@ class Path private[io] (val jfile: JFile) { def name: String = jfile.getName() def path: String = jfile.getPath() def normalize: Path = Path(jfile.getAbsolutePath()) - def isRootPath: Boolean = roots exists (_ isSame this) def resolve(other: Path) = if (other.isAbsolute || isEmpty) other else /(other) def relativize(other: Path) = { @@ -152,9 +131,8 @@ class Path private[io] (val jfile: JFile) { Path(createRelativePath(segments, other.segments)) } - // derived from identity - def root: Option[Path] = roots find (this startsWith _) def segments: List[String] = (path split separator).toList filterNot (_.length == 0) + /** * @return The path of the parent directory, or root if path is already root */ @@ -185,10 +163,6 @@ class Path private[io] (val jfile: JFile) { if (i < 0) "" else name.substring(i + 1) } - // def extension: String = (name lastIndexOf '.') match { - // case -1 => "" - // case idx => name drop (idx + 1) - // } // compares against extensions in a CASE INSENSITIVE way. def hasExtension(ext: String, exts: String*) = { val lower = extension.toLowerCase @@ -213,22 +187,18 @@ class Path private[io] (val jfile: JFile) { def canRead = jfile.canRead() def canWrite = jfile.canWrite() def exists = jfile.exists() - def notExists = try !jfile.exists() catch { case ex: SecurityException => false } def isFile = jfile.isFile() def isDirectory = jfile.isDirectory() def isAbsolute = jfile.isAbsolute() - def isHidden = jfile.isHidden() def isEmpty = path.length == 0 // Information def lastModified = jfile.lastModified() - def lastModified_=(time: Long) = jfile setLastModified time // should use setXXX function? def length = jfile.length() // Boolean path comparisons def endsWith(other: Path) = segments endsWith other.segments - def startsWith(other: Path) = segments startsWith other.segments def isSame(other: Path) = toCanonical == other.toCanonical def isFresher(other: Path) = lastModified > other.lastModified @@ -248,7 +218,6 @@ class Path private[io] (val jfile: JFile) { // deletions def delete() = jfile.delete() - def deleteIfExists() = if (jfile.exists()) delete() else false /** Deletes the path recursively. Returns false on failure. * Use with caution! @@ -270,16 +239,6 @@ class Path private[io] (val jfile: JFile) { length == 0 } - def touch(modTime: Long = System.currentTimeMillis) = { - createFile() - if (isFile) - lastModified = modTime - } - - // todo - // def copyTo(target: Path, options ...): Boolean - // def moveTo(target: Path, options ...): Boolean - override def toString() = path override def equals(other: Any) = other match { case x: Path => path == x.path diff --git a/src/reflect/scala/reflect/io/PlainFile.scala b/src/reflect/scala/reflect/io/PlainFile.scala index 82b0568657..0d4d55bdec 100644 --- a/src/reflect/scala/reflect/io/PlainFile.scala +++ b/src/reflect/scala/reflect/io/PlainFile.scala @@ -3,23 +3,11 @@ * @author Martin Odersky */ - package scala.reflect package io import java.io.{ FileInputStream, FileOutputStream, IOException } -import PartialFunction._ -/** ''Note: This library is considered experimental and should not be used unless you know what you are doing.'' */ -object PlainFile { - /** - * If the specified File exists, returns an abstract file backed - * by it. Otherwise, returns null. - */ - def fromPath(file: Path): PlainFile = - if (file.isDirectory) new PlainDirectory(file.toDirectory) - else if (file.isFile) new PlainFile(file) - else null -} + /** ''Note: This library is considered experimental and should not be used unless you know what you are doing.'' */ class PlainDirectory(givenPath: Directory) extends PlainFile(givenPath) { override def isDirectory = true @@ -28,7 +16,7 @@ class PlainDirectory(givenPath: Directory) extends PlainFile(givenPath) { } /** This class implements an abstract file backed by a File. - * + * * ''Note: This library is considered experimental and should not be used unless you know what you are doing.'' */ class PlainFile(val givenPath: Path) extends AbstractFile { @@ -77,10 +65,6 @@ class PlainFile(val givenPath: Path) extends AbstractFile { * specified name. If there is no such file, returns null. The * argument "directory" tells whether to look for a directory or * or a regular file. - * - * @param name ... - * @param directory ... - * @return ... */ def lookupName(name: String, directory: Boolean): AbstractFile = { val child = givenPath / name diff --git a/src/reflect/scala/reflect/io/Streamable.scala b/src/reflect/scala/reflect/io/Streamable.scala index 61ec8a4c23..b45cffb150 100644 --- a/src/reflect/scala/reflect/io/Streamable.scala +++ b/src/reflect/scala/reflect/io/Streamable.scala @@ -17,14 +17,14 @@ import Path.fail * * @author Paul Phillips * @since 2.8 - * + * * ''Note: This library is considered experimental and should not be used unless you know what you are doing.'' */ object Streamable { /** Traits which can be viewed as a sequence of bytes. Source types * which know their length should override def length: Long for more * efficient method implementations. - * + * * ''Note: This library is considered experimental and should not be used unless you know what you are doing.'' */ trait Bytes { @@ -69,7 +69,7 @@ object Streamable { } /** For objects which can be viewed as Chars. - * + * * ''Note: This library is considered experimental and should not be used unless you know what you are doing.'' */ trait Chars extends Bytes { @@ -81,7 +81,6 @@ object Streamable { */ def creationCodec: Codec = implicitly[Codec] - def chars(): BufferedSource = chars(creationCodec) def chars(codec: Codec): BufferedSource = Source.fromInputStream(inputStream())(codec) def lines(): Iterator[String] = lines(creationCodec) @@ -89,7 +88,6 @@ object Streamable { /** Obtains an InputStreamReader wrapped around a FileInputStream. */ - def reader(): InputStreamReader = reader(creationCodec) def reader(codec: Codec): InputStreamReader = new InputStreamReader(inputStream, codec.charSet) /** Wraps a BufferedReader around the result of reader(). diff --git a/src/reflect/scala/reflect/io/VirtualDirectory.scala b/src/reflect/scala/reflect/io/VirtualDirectory.scala index 78713c2ae0..94cb52e9b5 100644 --- a/src/reflect/scala/reflect/io/VirtualDirectory.scala +++ b/src/reflect/scala/reflect/io/VirtualDirectory.scala @@ -11,7 +11,7 @@ import scala.collection.mutable * An in-memory directory. * * @author Lex Spoon - * + * * ''Note: This library is considered experimental and should not be used unless you know what you are doing.'' */ class VirtualDirectory(val name: String, maybeContainer: Option[VirtualDirectory]) @@ -26,7 +26,7 @@ extends AbstractFile { def container = maybeContainer.get def isDirectory = true - var lastModified: Long = System.currentTimeMillis + val lastModified: Long = System.currentTimeMillis override def file = null override def input = sys.error("directories cannot be read") diff --git a/src/reflect/scala/reflect/io/VirtualFile.scala b/src/reflect/scala/reflect/io/VirtualFile.scala index eea81da290..09b977bd45 100644 --- a/src/reflect/scala/reflect/io/VirtualFile.scala +++ b/src/reflect/scala/reflect/io/VirtualFile.scala @@ -13,7 +13,7 @@ import java.io.{ File => JFile } * * @author Philippe Altherr * @version 1.0, 23/03/2004 - * + * * ''Note: This library is considered experimental and should not be used unless you know what you are doing.'' */ class VirtualFile(val name: String, override val path: String) extends AbstractFile { @@ -60,7 +60,6 @@ class VirtualFile(val name: String, override val path: String) extends AbstractF /** Returns the time that this abstract file was last modified. */ private var _lastModified: Long = 0 def lastModified: Long = _lastModified - def lastModified_=(x: Long) = _lastModified = x /** Returns all abstract subfiles of this abstract directory. */ def iterator: Iterator[AbstractFile] = { diff --git a/src/reflect/scala/reflect/io/ZipArchive.scala b/src/reflect/scala/reflect/io/ZipArchive.scala index 3b57721e89..097d3cb71c 100644 --- a/src/reflect/scala/reflect/io/ZipArchive.scala +++ b/src/reflect/scala/reflect/io/ZipArchive.scala @@ -20,13 +20,10 @@ import scala.annotation.tailrec * @author Philippe Altherr (original version) * @author Paul Phillips (this one) * @version 2.0, - * + * * ''Note: This library is considered experimental and should not be used unless you know what you are doing.'' */ object ZipArchive { - def fromPath(path: String): FileZipArchive = fromFile(new JFile(path)) - def fromPath(path: Path): FileZipArchive = fromFile(path.toFile) - /** * @param file a File * @return A ZipArchive if `file` is a readable zip file, otherwise null. @@ -41,7 +38,6 @@ object ZipArchive { * @return A ZipArchive backed by the given url. */ def fromURL(url: URL): URLZipArchive = new URLZipArchive(url) - def fromURL(url: String): URLZipArchive = fromURL(new URL(url)) private def dirName(path: String) = splitPath(path, true) private def baseName(path: String) = splitPath(path, false) @@ -79,7 +75,6 @@ abstract class ZipArchive(override val file: JFile) extends AbstractFile with Eq else Iterator(f) } } - def deepIterator = walkIterator(iterator) /** ''Note: This library is considered experimental and should not be used unless you know what you are doing.'' */ sealed abstract class Entry(path: String) extends VirtualFile(baseName(path), path) { // have to keep this name for compat with sbt's compiler-interface -- cgit v1.2.3 From 427e02e945f012c916a1f698242cafb16e475421 Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Tue, 13 Nov 2012 08:28:16 -0800 Subject: Members removed from partest. It has accreted its share through the bumpy years. --- src/partest/scala/tools/partest/CompilerTest.scala | 3 +-- src/partest/scala/tools/partest/SecurityTest.scala | 13 ------------- src/partest/scala/tools/partest/TestUtil.scala | 10 +--------- .../scala/tools/partest/instrumented/Instrumentation.scala | 1 + .../scala/tools/partest/nest/ConsoleFileManager.scala | 13 +------------ src/partest/scala/tools/partest/nest/ConsoleRunner.scala | 2 -- src/partest/scala/tools/partest/nest/FileManager.scala | 6 ------ src/partest/scala/tools/partest/nest/NestUI.scala | 10 ---------- src/partest/scala/tools/partest/nest/ReflectiveRunner.scala | 2 -- src/partest/scala/tools/partest/nest/RunnerManager.scala | 5 ----- src/partest/scala/tools/partest/package.scala | 7 +------ 11 files changed, 5 insertions(+), 67 deletions(-) (limited to 'src') diff --git a/src/partest/scala/tools/partest/CompilerTest.scala b/src/partest/scala/tools/partest/CompilerTest.scala index d73d99bc89..bb0732dcc6 100644 --- a/src/partest/scala/tools/partest/CompilerTest.scala +++ b/src/partest/scala/tools/partest/CompilerTest.scala @@ -21,7 +21,7 @@ abstract class CompilerTest extends DirectTest { lazy val global: Global = newCompiler() lazy val units = compilationUnits(global)(sources: _ *) import global._ - import definitions._ + import definitions.{ compilerTypeFromTag } override def extraSettings = "-usejavacp -d " + testOutput.path @@ -32,7 +32,6 @@ abstract class CompilerTest extends DirectTest { def sources: List[String] = List(code) // Utility functions - class MkType(sym: Symbol) { def apply[M](implicit t: ru.TypeTag[M]): Type = if (sym eq NoSymbol) NoType diff --git a/src/partest/scala/tools/partest/SecurityTest.scala b/src/partest/scala/tools/partest/SecurityTest.scala index 2d6f61d0b1..1f1c8a95ea 100644 --- a/src/partest/scala/tools/partest/SecurityTest.scala +++ b/src/partest/scala/tools/partest/SecurityTest.scala @@ -10,23 +10,10 @@ import java.util._ abstract class SecurityTest extends App { def throwIt(x: Any) = throw new AccessControlException("" + x) - - def readPerm(p: PropertyPermission) = p.getActions contains "read" - def writePerm(p: PropertyPermission) = p.getActions contains "write" def propertyCheck(p: PropertyPermission): Unit = throwIt(p) def check(perm: Permission): Unit = perm match { case p: PropertyPermission => propertyCheck(p) case _ => () } - - lazy val sm = new SecurityManager { - // these two are the choke points for all permissions checks - override def checkPermission(perm: Permission): Unit = check(perm) - override def checkPermission(perm: Permission, context: Object): Unit = check(perm) - } - def securityOn(): Boolean = { - try { System.setSecurityManager(sm) ; true } - catch { case _: SecurityException => false } - } } diff --git a/src/partest/scala/tools/partest/TestUtil.scala b/src/partest/scala/tools/partest/TestUtil.scala index 9bfd444180..5c177ac962 100644 --- a/src/partest/scala/tools/partest/TestUtil.scala +++ b/src/partest/scala/tools/partest/TestUtil.scala @@ -24,14 +24,6 @@ trait TestUtil { } def nanos(body: => Unit): Long = alsoNanos(body)._1 - def verifySpeed(body1: => Unit, body2: => Unit, acceptableMultiple: Double) = { - val t1 = nanos(body1).toDouble - val t2 = nanos(body2).toDouble - val mult = if (t1 > t2) t1 / t2 else t2 / t1 - - assert(mult <= acceptableMultiple, "Performance difference too great: multiple = " + mult) - } - def intercept[T <: Exception : ClassTag](code: => Unit): Unit = try { code @@ -41,6 +33,6 @@ trait TestUtil { } } +// Used in tests. object TestUtil extends TestUtil { - } diff --git a/src/partest/scala/tools/partest/instrumented/Instrumentation.scala b/src/partest/scala/tools/partest/instrumented/Instrumentation.scala index 8a284b313b..18dd740208 100644 --- a/src/partest/scala/tools/partest/instrumented/Instrumentation.scala +++ b/src/partest/scala/tools/partest/instrumented/Instrumentation.scala @@ -78,6 +78,7 @@ object Instrumentation { !t.className.startsWith("scala/util/DynamicVariable") } + // Used in tests. def printStatistics(stats: Statistics = getStatistics, filter: MethodCallTrace => Boolean = standardFilter): Unit = { val stats = getStatistics println("Method call statistics:") diff --git a/src/partest/scala/tools/partest/nest/ConsoleFileManager.scala b/src/partest/scala/tools/partest/nest/ConsoleFileManager.scala index 75aed449a8..7000e8280b 100644 --- a/src/partest/scala/tools/partest/nest/ConsoleFileManager.scala +++ b/src/partest/scala/tools/partest/nest/ConsoleFileManager.scala @@ -79,7 +79,6 @@ class ConsoleFileManager extends FileManager { testClassesDir = Path(testClasses.get).toCanonical.toDirectory NestUI.verbose("Running with classes in "+testClassesDir) - latestFile = testClassesDir.parent / "bin" latestLibFile = testClassesDir / "library" latestActorsFile = testClassesDir / "library" / "actors" latestReflectFile = testClassesDir / "reflect" @@ -90,7 +89,6 @@ class ConsoleFileManager extends FileManager { else if (testBuild.isDefined) { val dir = Path(testBuild.get) NestUI.verbose("Running on "+dir) - latestFile = dir / "bin" latestLibFile = dir / "lib/scala-library.jar" latestActorsFile = dir / "lib/scala-actors.jar" latestReflectFile = dir / "lib/scala-reflect.jar" @@ -101,7 +99,6 @@ class ConsoleFileManager extends FileManager { else { def setupQuick() { NestUI.verbose("Running build/quick") - latestFile = prefixFile("build/quick/bin") latestLibFile = prefixFile("build/quick/classes/library") latestActorsFile = prefixFile("build/quick/classes/library/actors") latestReflectFile = prefixFile("build/quick/classes/reflect") @@ -112,7 +109,6 @@ class ConsoleFileManager extends FileManager { def setupInst() { NestUI.verbose("Running dist (installed)") val p = testParent.getParentFile - latestFile = prefixFileWith(p, "bin") latestLibFile = prefixFileWith(p, "lib/scala-library.jar") latestActorsFile = prefixFileWith(p, "lib/scala-actors.jar") latestReflectFile = prefixFileWith(p, "lib/scala-reflect.jar") @@ -122,7 +118,6 @@ class ConsoleFileManager extends FileManager { def setupDist() { NestUI.verbose("Running dists/latest") - latestFile = prefixFile("dists/latest/bin") latestLibFile = prefixFile("dists/latest/lib/scala-library.jar") latestActorsFile = prefixFile("dists/latest/lib/scala-actors.jar") latestReflectFile = prefixFile("dists/latest/lib/scala-reflect.jar") @@ -132,7 +127,6 @@ class ConsoleFileManager extends FileManager { def setupPack() { NestUI.verbose("Running build/pack") - latestFile = prefixFile("build/pack/bin") latestLibFile = prefixFile("build/pack/lib/scala-library.jar") latestActorsFile = prefixFile("build/pack/lib/scala-actors.jar") latestReflectFile = prefixFile("build/pack/lib/scala-reflect.jar") @@ -175,7 +169,6 @@ class ConsoleFileManager extends FileManager { var LATEST_PARTEST: String = "" var LATEST_ACTORS: String = "" - var latestFile: File = _ var latestLibFile: File = _ var latestActorsFile: File = _ var latestReflectFile: File = _ @@ -187,8 +180,6 @@ class ConsoleFileManager extends FileManager { // initialize above fields findLatest() - var testFiles: List[io.Path] = Nil - def getFiles(kind: String, cond: Path => Boolean): List[File] = { def ignoreDir(p: Path) = List("svn", "obj") exists (p hasExtension _) @@ -197,9 +188,7 @@ class ConsoleFileManager extends FileManager { if (dir.isDirectory) NestUI.verbose("look in %s for tests" format dir) else NestUI.failure("Directory '%s' not found" format dir) - val files = - if (testFiles.nonEmpty) testFiles filter (_.parent isSame dir) - else dir.list filterNot ignoreDir filter cond toList + val files = dir.list filterNot ignoreDir filter cond toList ( if (failed) files filter (x => logFileExists(x, kind)) else files ) map (_.jfile) } diff --git a/src/partest/scala/tools/partest/nest/ConsoleRunner.scala b/src/partest/scala/tools/partest/nest/ConsoleRunner.scala index d23ee81e4d..d146618d0e 100644 --- a/src/partest/scala/tools/partest/nest/ConsoleRunner.scala +++ b/src/partest/scala/tools/partest/nest/ConsoleRunner.scala @@ -51,8 +51,6 @@ class ConsoleRunner extends DirectRunner { private val testSetArgs = testSets map ("--" + _.kind) private val testSetArgMap = testSetArgs zip testSets toMap - def denotesTestSet(arg: String) = testSetArgs contains arg - private def printVersion() { NestUI outline (versionMsg + "\n") } private val unaryArgs = List( diff --git a/src/partest/scala/tools/partest/nest/FileManager.scala b/src/partest/scala/tools/partest/nest/FileManager.scala index 21fd314552..a4c4e7e6a6 100644 --- a/src/partest/scala/tools/partest/nest/FileManager.scala +++ b/src/partest/scala/tools/partest/nest/FileManager.scala @@ -72,17 +72,11 @@ trait FileManager extends FileUtil { var SCALAC_OPTS = PartestDefaults.scalacOpts.split(' ').toSeq var JAVA_OPTS = PartestDefaults.javaOpts var timeout = PartestDefaults.timeout - // how can 15 minutes not be enough? What are you doing, run/lisp.scala? - // You complete in 11 seconds on my machine. - var oneTestTimeout = 60 * 60 * 1000 /** Only when --debug is given. */ lazy val testTimings = new mutable.HashMap[String, Long] def recordTestTiming(name: String, milliseconds: Long) = synchronized { testTimings(name) = milliseconds } - def showTestTimings() { - testTimings.toList sortBy (-_._2) foreach { case (k, v) => println("%s: %s".format(k, v)) } - } def getLogFile(dir: File, fileBase: String, kind: String): File = new File(dir, fileBase + "-" + kind + ".log") diff --git a/src/partest/scala/tools/partest/nest/NestUI.scala b/src/partest/scala/tools/partest/nest/NestUI.scala index 70db6d0ed1..ab90d387d0 100644 --- a/src/partest/scala/tools/partest/nest/NestUI.scala +++ b/src/partest/scala/tools/partest/nest/NestUI.scala @@ -54,9 +54,6 @@ object NestUI { } def warning(msg: String) = print(_warning + msg + _default) - def warning(msg: String, wr: PrintWriter) = synchronized { - wr.print(_warning + msg + _default) - } def normal(msg: String) = print(_default + msg) def normal(msg: String, wr: PrintWriter) = synchronized { @@ -104,7 +101,6 @@ object NestUI { } var _verbose = false - var _debug = false def verbose(msg: String) { if (_verbose) { @@ -112,10 +108,4 @@ object NestUI { println(msg) } } - def debug(msg: String) { - if (isPartestDebug) { - outline("debug: ") - println(msg) - } - } } diff --git a/src/partest/scala/tools/partest/nest/ReflectiveRunner.scala b/src/partest/scala/tools/partest/nest/ReflectiveRunner.scala index 22010d4b16..4b0ed1f82a 100644 --- a/src/partest/scala/tools/partest/nest/ReflectiveRunner.scala +++ b/src/partest/scala/tools/partest/nest/ReflectiveRunner.scala @@ -3,8 +3,6 @@ * @author Philipp Haller */ -// $Id$ - package scala.tools.partest package nest diff --git a/src/partest/scala/tools/partest/nest/RunnerManager.scala b/src/partest/scala/tools/partest/nest/RunnerManager.scala index 548c5abbd9..f2ce19a950 100644 --- a/src/partest/scala/tools/partest/nest/RunnerManager.scala +++ b/src/partest/scala/tools/partest/nest/RunnerManager.scala @@ -286,15 +286,10 @@ class RunnerManager(kind: String, val fileManager: FileManager, params: TestRunP def newTestWriters() = { val swr = new StringWriter val wr = new PrintWriter(swr, true) - // diff = "" ((swr, wr)) } - def fail(what: Any) = { - NestUI.verbose("scalac: compilation of "+what+" failed\n") - false - } def diffCheck(testFile: File, diff: String) = { testDiff = diff testDiff == "" diff --git a/src/partest/scala/tools/partest/package.scala b/src/partest/scala/tools/partest/package.scala index 58cc7d5b0b..2b2ce2e435 100644 --- a/src/partest/scala/tools/partest/package.scala +++ b/src/partest/scala/tools/partest/package.scala @@ -12,11 +12,7 @@ import scala.sys.process.javaVmArguments import java.util.concurrent.Callable package partest { - class TestState { - def isOk = this eq TestState.Ok - def isFail = this eq TestState.Fail - def isTimeout = this eq TestState.Timeout - } + class TestState { } object TestState { val Ok = new TestState val Fail = new TestState @@ -43,7 +39,6 @@ package object partest { def callable[T](body: => T): Callable[T] = new Callable[T] { override def call() = body } - def path2String(path: String) = file2String(new JFile(path)) def file2String(f: JFile) = try SFile(f).slurp(scala.io.Codec.UTF8) catch { case _: FileNotFoundException => "" } -- cgit v1.2.3 From 50712cf5639bd42f420c540f526393a110f3349c Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Tue, 13 Nov 2012 08:29:07 -0800 Subject: Members removed in the backend. --- .../tools/nsc/backend/icode/BasicBlocks.scala | 28 ------------------ .../nsc/backend/icode/ExceptionHandlers.scala | 6 ---- .../scala/tools/nsc/backend/icode/GenICode.scala | 18 ------------ .../scala/tools/nsc/backend/icode/Members.scala | 28 ------------------ .../scala/tools/nsc/backend/icode/Opcodes.scala | 20 +------------ .../scala/tools/nsc/backend/icode/Primitives.scala | 13 --------- .../scala/tools/nsc/backend/icode/Repository.scala | 11 ------- .../scala/tools/nsc/backend/icode/TypeKinds.scala | 3 -- .../scala/tools/nsc/backend/icode/TypeStacks.scala | 10 ------- .../backend/icode/analysis/CopyPropagation.scala | 18 ++---------- .../backend/icode/analysis/DataFlowAnalysis.scala | 13 --------- .../backend/icode/analysis/TypeFlowAnalysis.scala | 34 ---------------------- .../scala/tools/nsc/backend/jvm/GenASM.scala | 14 --------- .../scala/tools/nsc/backend/jvm/GenJVM.scala | 17 ----------- .../scala/tools/nsc/backend/msil/GenMSIL.scala | 10 +------ .../tools/nsc/backend/opt/ClosureElimination.scala | 4 --- .../scala/tools/nsc/backend/opt/Inliners.scala | 4 --- 17 files changed, 4 insertions(+), 247 deletions(-) (limited to 'src') diff --git a/src/compiler/scala/tools/nsc/backend/icode/BasicBlocks.scala b/src/compiler/scala/tools/nsc/backend/icode/BasicBlocks.scala index 7c7777f761..34bdc1ede4 100644 --- a/src/compiler/scala/tools/nsc/backend/icode/BasicBlocks.scala +++ b/src/compiler/scala/tools/nsc/backend/icode/BasicBlocks.scala @@ -280,14 +280,6 @@ trait BasicBlocks { } } - /** Insert instructions in 'is' immediately after index 'idx'. */ - def insertAfter(idx: Int, is: List[Instruction]) { - assert(closed, "Instructions can be replaced only after the basic block is closed") - - instrs = instrs.patch(idx + 1, is, 0) - code.touched = true - } - /** Removes instructions found at the given positions. */ def removeInstructionsAt(positions: Int*) { @@ -335,10 +327,6 @@ trait BasicBlocks { * is closed, which sets the DIRTYSUCCS flag. */ def emit(instr: Instruction, pos: Position) { -/* if (closed) { - print() - Console.println("trying to emit: " + instr) - } */ assert(!closed || ignore, this) if (ignore) { @@ -432,11 +420,6 @@ trait BasicBlocks { ignore = true } - def exitIgnoreMode() { - assert(ignore, "Exit ignore mode when not in ignore mode: " + this) - ignore = false - } - /** Return the last instruction of this basic block. */ def lastInstruction = if (closed) instrs(instrs.length - 1) @@ -493,17 +476,6 @@ trait BasicBlocks { override def hashCode = label * 41 + code.hashCode - // Instead of it, rather use a printer - def print() { print(java.lang.System.out) } - - def print(out: java.io.PrintStream) { - out.println("block #"+label+" :") - foreach(i => out.println(" " + i)) - out.print("Successors: ") - successors.foreach((x: BasicBlock) => out.print(" "+x.label.toString())) - out.println() - } - private def succString = if (successors.isEmpty) "[S: N/A]" else successors.distinct.mkString("[S: ", ", ", "]") private def predString = if (predecessors.isEmpty) "[P: N/A]" else predecessors.distinct.mkString("[P: ", ", ", "]") diff --git a/src/compiler/scala/tools/nsc/backend/icode/ExceptionHandlers.scala b/src/compiler/scala/tools/nsc/backend/icode/ExceptionHandlers.scala index f35996eeb9..7c2961778f 100644 --- a/src/compiler/scala/tools/nsc/backend/icode/ExceptionHandlers.scala +++ b/src/compiler/scala/tools/nsc/backend/icode/ExceptionHandlers.scala @@ -71,10 +71,4 @@ trait ExceptionHandlers { override def toString() = "finalizer_" + label override def dup: Finalizer = new Finalizer(method, label, pos) } - - object NoFinalizer extends Finalizer(null, newTermNameCached(""), NoPosition) { - override def startBlock: BasicBlock = sys.error("NoFinalizer cannot have a start block."); - override def setStartBlock(b: BasicBlock): Unit = sys.error("NoFinalizer cannot have a start block."); - override def dup = this - } } diff --git a/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala b/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala index e2436d0e90..61d0292707 100644 --- a/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala +++ b/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala @@ -1946,13 +1946,6 @@ abstract class GenICode extends SubComponent { this } - def removeFinalizer(f: Tree): this.type = { - assert(cleanups.head contains f, - "Illegal nesting of cleanup operations: " + cleanups + " while exiting finalizer " + f); - cleanups = cleanups.tail - this - } - /** Prepare a new context upon entry into a method. */ def enterMethod(m: IMethod, d: DefDef): Context = { @@ -2025,16 +2018,6 @@ abstract class GenICode extends SubComponent { currentExceptionHandlers = currentExceptionHandlers.tail } - /** Remove the given handler from the list of active exception handlers. */ - def removeActiveHandler(exh: ExceptionHandler): Unit = { - assert(handlerCount > 0 && handlers.head == exh, - "Wrong nesting of exception handlers." + this + " for " + exh) - handlerCount -= 1 - handlers = handlers.tail - debuglog("removed handler: " + exh); - - } - /** Clone the current context */ def dup: Context = new Context(this) @@ -2321,7 +2304,6 @@ abstract class GenICode extends SubComponent { val locals: ListBuffer[Local] = new ListBuffer def add(l: Local) = locals += l - def remove(l: Local) = locals -= l /** Return all locals that are in scope. */ def varsInScope: Buffer[Local] = outer.varsInScope.clone() ++= locals diff --git a/src/compiler/scala/tools/nsc/backend/icode/Members.scala b/src/compiler/scala/tools/nsc/backend/icode/Members.scala index 07abe9d74f..12daa32186 100644 --- a/src/compiler/scala/tools/nsc/backend/icode/Members.scala +++ b/src/compiler/scala/tools/nsc/backend/icode/Members.scala @@ -126,9 +126,7 @@ trait Members { override def toString() = symbol.fullName - def lookupField(s: Symbol) = fields find (_.symbol == s) def lookupMethod(s: Symbol) = methods find (_.symbol == s) - def lookupMethod(s: Name) = methods find (_.symbol.name == s) /* returns this methods static ctor if it has one. */ def lookupStaticCtor: Option[IMethod] = methods find (_.symbol.isStaticConstructor) @@ -159,7 +157,6 @@ trait Members { def linearizedBlocks(lin: Linearizer = self.linearizer): List[BasicBlock] = lin linearize this def foreachBlock[U](f: BasicBlock => U): Unit = blocks foreach f - def foreachInstr[U](f: Instruction => U): Unit = foreachBlock(_.toList foreach f) var native = false @@ -192,7 +189,6 @@ trait Members { } def addLocals(ls: List[Local]) = ls foreach addLocal - def addParams(as: List[Local]) = as foreach addParam def lookupLocal(n: Name): Option[Local] = locals find (_.sym.name == n) def lookupLocal(sym: Symbol): Option[Local] = locals find (_.sym == sym) @@ -207,28 +203,7 @@ trait Members { override def toString() = symbol.fullName - def matchesSignature(other: IMethod) = { - (symbol.name == other.symbol.name) && - (params corresponds other.params)(_.kind == _.kind) && - (returnType == other.returnType) - } - import opcodes._ - def checkLocals(): Unit = { - def localsSet = (code.blocks flatMap { bb => - bb.iterator collect { - case LOAD_LOCAL(l) => l - case STORE_LOCAL(l) => l - } - }).toSet - - if (hasCode) { - log("[checking locals of " + this + "]") - locals filterNot localsSet foreach { l => - log("Local " + l + " is not declared in " + this) - } - } - } /** Merge together blocks that have a single successor which has a * single predecessor. Exception handlers are taken into account (they @@ -294,9 +269,6 @@ trait Members { /** Starting PC for this local's visibility range. */ var start: Int = _ - /** Ending PC for this local's visibility range. */ - var end: Int = _ - /** PC-based ranges for this local variable's visibility */ var ranges: List[(Int, Int)] = Nil diff --git a/src/compiler/scala/tools/nsc/backend/icode/Opcodes.scala b/src/compiler/scala/tools/nsc/backend/icode/Opcodes.scala index 0e7c75de50..eaa742a1da 100644 --- a/src/compiler/scala/tools/nsc/backend/icode/Opcodes.scala +++ b/src/compiler/scala/tools/nsc/backend/icode/Opcodes.scala @@ -3,8 +3,6 @@ * @author Martin Odersky */ - - package scala.tools.nsc package backend package icode @@ -110,17 +108,11 @@ trait Opcodes { self: ICodes => // Vlad: I wonder why we keep producedTypes around -- it looks like an useless thing to have def producedTypes: List[TypeKind] = Nil - /** This method returns the difference of size of the stack when the instruction is used */ - def difference = produced-consumed - /** The corresponding position in the source file */ private var _pos: Position = NoPosition def pos: Position = _pos - /** Used by dead code elimination. */ - var useful: Boolean = false - def setPos(p: Position): this.type = { _pos = p this @@ -132,13 +124,6 @@ trait Opcodes { self: ICodes => } object opcodes { - - def mayThrow(i: Instruction): Boolean = i match { - case LOAD_LOCAL(_) | STORE_LOCAL(_) | CONSTANT(_) | THIS(_) | CZJUMP(_, _, _, _) - | DROP(_) | DUP(_) | RETURN(_) | LOAD_EXCEPTION(_) | JUMP(_) | CJUMP(_, _, _, _) => false - case _ => true - } - /** Loads "this" on top of the stack. * Stack: ... * ->: ...:ref @@ -714,8 +699,6 @@ trait Opcodes { self: ICodes => /** Is this a static method call? */ def isStatic: Boolean = false - def isSuper: Boolean = false - /** Is this an instance method call? */ def hasInstance: Boolean = true @@ -749,7 +732,6 @@ trait Opcodes { self: ICodes => * On JVM, translated to `invokespecial`. */ case class SuperCall(mix: Name) extends InvokeStyle { - override def isSuper = true override def toString(): String = { "super(" + mix + ")" } } @@ -814,7 +796,7 @@ trait Opcodes { self: ICodes => case class CIL_NEWOBJ(method: Symbol) extends Instruction { override def toString(): String = "CIL_NEWOBJ " + hostClass.fullName + method.fullName - var hostClass: Symbol = method.owner; + val hostClass: Symbol = method.owner; override def consumed = method.tpe.paramTypes.length override def consumedTypes = method.tpe.paramTypes map toTypeKind override def produced = 1 diff --git a/src/compiler/scala/tools/nsc/backend/icode/Primitives.scala b/src/compiler/scala/tools/nsc/backend/icode/Primitives.scala index c8579041ba..351d99f51a 100644 --- a/src/compiler/scala/tools/nsc/backend/icode/Primitives.scala +++ b/src/compiler/scala/tools/nsc/backend/icode/Primitives.scala @@ -76,25 +76,12 @@ trait Primitives { self: ICodes => /** Pretty printer for primitives */ class PrimitivePrinter(out: PrintWriter) { - def print(s: String): PrimitivePrinter = { out.print(s) this } def print(o: AnyRef): PrimitivePrinter = print(o.toString()) - - def printPrimitive(prim: Primitive) = prim match { - case Negation(kind) => - print("!") - - case Test(op, kind, zero) => - print(op).print(kind) - - case Comparison(op, kind) => - print(op).print("(").print(kind) - - } } /** This class represents a comparison operation. */ diff --git a/src/compiler/scala/tools/nsc/backend/icode/Repository.scala b/src/compiler/scala/tools/nsc/backend/icode/Repository.scala index e73015c4da..e92e61c957 100644 --- a/src/compiler/scala/tools/nsc/backend/icode/Repository.scala +++ b/src/compiler/scala/tools/nsc/backend/icode/Repository.scala @@ -26,17 +26,6 @@ trait Repository { /** The icode of the given class, if available */ def icode(sym: Symbol): Option[IClass] = (classes get sym) orElse (loaded get sym) - /** The icode of the given class. If not available, it loads - * its bytecode. - */ - def icode(sym: Symbol, force: Boolean): IClass = - icode(sym) getOrElse { - log("loading " + sym) - load(sym) - assert(available(sym)) - loaded(sym) - } - /** Load bytecode for given symbol. */ def load(sym: Symbol): Boolean = { try { diff --git a/src/compiler/scala/tools/nsc/backend/icode/TypeKinds.scala b/src/compiler/scala/tools/nsc/backend/icode/TypeKinds.scala index f96dce9f1c..0990cfba6f 100644 --- a/src/compiler/scala/tools/nsc/backend/icode/TypeKinds.scala +++ b/src/compiler/scala/tools/nsc/backend/icode/TypeKinds.scala @@ -66,7 +66,6 @@ trait TypeKinds { self: ICodes => def isValueType = false def isBoxedType = false final def isRefOrArrayType = isReferenceType || isArrayType - final def isRefArrayOrBoxType = isRefOrArrayType || isBoxedType final def isNothingType = this == NothingReference final def isNullType = this == NullReference final def isInterfaceType = this match { @@ -114,8 +113,6 @@ trait TypeKinds { self: ICodes => } } - var lubs0 = 0 - /** * The least upper bound of two typekinds. They have to be either * REFERENCE or ARRAY kinds. diff --git a/src/compiler/scala/tools/nsc/backend/icode/TypeStacks.scala b/src/compiler/scala/tools/nsc/backend/icode/TypeStacks.scala index c1bf4304ea..57d51dad49 100644 --- a/src/compiler/scala/tools/nsc/backend/icode/TypeStacks.scala +++ b/src/compiler/scala/tools/nsc/backend/icode/TypeStacks.scala @@ -20,8 +20,6 @@ trait TypeStacks { */ type Rep = List[TypeKind] - object NoTypeStack extends TypeStack(Nil) { } - class TypeStack(var types: Rep) { if (types.nonEmpty) checkerDebug("Created " + this) @@ -69,14 +67,6 @@ trait TypeStacks { def apply(n: Int): TypeKind = types(n) - /** - * A TypeStack agrees with another one if they have the same - * length and each type kind agrees position-wise. Two - * types agree if one is a subtype of the other. - */ - def agreesWith(other: TypeStack): Boolean = - (types corresponds other.types)((t1, t2) => t1 <:< t2 || t2 <:< t1) - /* This method returns a String representation of the stack */ override def toString() = if (types.isEmpty) "[]" diff --git a/src/compiler/scala/tools/nsc/backend/icode/analysis/CopyPropagation.scala b/src/compiler/scala/tools/nsc/backend/icode/analysis/CopyPropagation.scala index 4a5844531a..7f32b2b764 100644 --- a/src/compiler/scala/tools/nsc/backend/icode/analysis/CopyPropagation.scala +++ b/src/compiler/scala/tools/nsc/backend/icode/analysis/CopyPropagation.scala @@ -26,12 +26,8 @@ abstract class CopyPropagation { case object This extends Location /** Values that can be on the stack. */ - abstract class Value { - def isRecord = false - } - case class Record(cls: Symbol, bindings: mutable.Map[Symbol, Value]) extends Value { - override def isRecord = true - } + abstract class Value { } + case class Record(cls: Symbol, bindings: mutable.Map[Symbol, Value]) extends Value { } /** The value of some location in memory. */ case class Deref(l: Location) extends Value @@ -91,16 +87,6 @@ abstract class CopyPropagation { loop(l) getOrElse Deref(LocalVar(l)) } - /* Return the binding for the given field of the given record */ - def getBinding(r: Record, f: Symbol): Value = { - assert(r.bindings contains f, "Record " + r + " does not contain a field " + f) - - r.bindings(f) match { - case Deref(LocalVar(l)) => getBinding(l) - case target => target - } - } - /** Return a local which contains the same value as this field, if any. * If the field holds a reference to a local, the returned value is the * binding of that local. diff --git a/src/compiler/scala/tools/nsc/backend/icode/analysis/DataFlowAnalysis.scala b/src/compiler/scala/tools/nsc/backend/icode/analysis/DataFlowAnalysis.scala index 04c3eedbad..cc3a7eb876 100644 --- a/src/compiler/scala/tools/nsc/backend/icode/analysis/DataFlowAnalysis.scala +++ b/src/compiler/scala/tools/nsc/backend/icode/analysis/DataFlowAnalysis.scala @@ -34,15 +34,6 @@ trait DataFlowAnalysis[L <: SemiLattice] { f } - /** Reinitialize, but keep the old solutions. Should be used when reanalyzing the - * same method, after some code transformation. - */ - def reinit(f: => Unit): Unit = { - iterations = 0 - worklist.clear; visited.clear; - f - } - def run(): Unit /** Implements forward dataflow analysis: the transfer function is @@ -82,10 +73,6 @@ trait DataFlowAnalysis[L <: SemiLattice] { sys.error("Could not find element " + e.getMessage) } - /** ... - * - * @param f ... - */ def backwardAnalysis(f: (P, lattice.Elem) => lattice.Elem): Unit = while (worklist.nonEmpty) { if (stat) iterations += 1 diff --git a/src/compiler/scala/tools/nsc/backend/icode/analysis/TypeFlowAnalysis.scala b/src/compiler/scala/tools/nsc/backend/icode/analysis/TypeFlowAnalysis.scala index 15755f31ad..c9d295a350 100644 --- a/src/compiler/scala/tools/nsc/backend/icode/analysis/TypeFlowAnalysis.scala +++ b/src/compiler/scala/tools/nsc/backend/icode/analysis/TypeFlowAnalysis.scala @@ -268,36 +268,6 @@ abstract class TypeFlowAnalysis { out } // interpret - - class SimulatedStack { - private var types: List[InferredType] = Nil - private var depth = 0 - - /** Remove and return the topmost element on the stack. If the - * stack is empty, return a reference to a negative index on the - * stack, meaning it refers to elements pushed by a predecessor block. - */ - def pop: InferredType = types match { - case head :: rest => - types = rest - head - case _ => - depth -= 1 - TypeOfStackPos(depth) - } - - def pop2: (InferredType, InferredType) = { - (pop, pop) - } - - def push(t: InferredType) { - depth += 1 - types = types ::: List(t) - } - - def push(k: TypeKind) { push(Const(k)) } - } - abstract class InferredType { /** Return the type kind pointed by this inferred type. */ def getKind(in: lattice.Elem): icodes.TypeKind = this match { @@ -737,10 +707,6 @@ abstract class TypeFlowAnalysis { private var lastStart = 0L - def reset() { - millis = 0L - } - def start() { lastStart = System.currentTimeMillis } diff --git a/src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala b/src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala index 8bae80c760..1aa80d5c5b 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala @@ -455,7 +455,6 @@ abstract class GenASM extends SubComponent with BytecodeWriters { /** basic functionality for class file building */ abstract class JBuilder(bytecodeWriter: BytecodeWriter) { - val EMPTY_JTYPE_ARRAY = Array.empty[asm.Type] val EMPTY_STRING_ARRAY = Array.empty[String] val mdesc_arglessvoid = "()V" @@ -523,7 +522,6 @@ abstract class GenASM extends SubComponent with BytecodeWriters { /** Specialized array conversion to prevent calling * java.lang.reflect.Array.newInstance via TraversableOnce.toArray */ - def mkArray(xs: Traversable[asm.Type]): Array[asm.Type] = { val a = new Array[asm.Type](xs.size); xs.copyToArray(a); a } def mkArray(xs: Traversable[String]): Array[String] = { val a = new Array[String](xs.size); xs.copyToArray(a); a } // ----------------------------------------------------------------------------------------- @@ -1757,11 +1755,6 @@ abstract class GenASM extends SubComponent with BytecodeWriters { import asm.Opcodes; - def aconst(cst: AnyRef) { - if (cst == null) { jmethod.visitInsn(Opcodes.ACONST_NULL) } - else { jmethod.visitLdcInsn(cst) } - } - final def boolconst(b: Boolean) { iconst(if(b) 1 else 0) } def iconst(cst: Int) { @@ -2924,15 +2917,8 @@ abstract class GenASM extends SubComponent with BytecodeWriters { ////////////////////// local vars /////////////////////// - // def sizeOf(sym: Symbol): Int = sizeOf(toTypeKind(sym.tpe)) - def sizeOf(k: TypeKind): Int = if(k.isWideType) 2 else 1 - // def indexOf(m: IMethod, sym: Symbol): Int = { - // val Some(local) = m lookupLocal sym - // indexOf(local) - // } - final def indexOf(local: Local): Int = { assert(local.index >= 0, "Invalid index for: " + local + "{" + local.## + "}: ") local.index diff --git a/src/compiler/scala/tools/nsc/backend/jvm/GenJVM.scala b/src/compiler/scala/tools/nsc/backend/jvm/GenJVM.scala index 31a4554d97..e1484d1f97 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/GenJVM.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/GenJVM.scala @@ -182,15 +182,9 @@ abstract class GenJVM extends SubComponent with GenJVMUtil with GenAndroid with val StringBuilderType = new JObjectType(StringBuilderClassName) // TODO use ASMType.getObjectType val toStringType = new JMethodType(JAVA_LANG_STRING, JType.EMPTY_ARRAY) // TODO use ASMType.getMethodType val arrayCloneType = new JMethodType(JAVA_LANG_OBJECT, JType.EMPTY_ARRAY) - val MethodTypeType = new JObjectType("java.dyn.MethodType") - val JavaLangClassType = new JObjectType("java.lang.Class") - val MethodHandleType = new JObjectType("java.dyn.MethodHandle") // Scala attributes val BeanInfoAttr = rootMirror.getRequiredClass("scala.beans.BeanInfo") - val BeanInfoSkipAttr = rootMirror.getRequiredClass("scala.beans.BeanInfoSkip") - val BeanDisplayNameAttr = rootMirror.getRequiredClass("scala.beans.BeanDisplayName") - val BeanDescriptionAttr = rootMirror.getRequiredClass("scala.beans.BeanDescription") final val ExcludedForwarderFlags = { import Flags._ @@ -254,7 +248,6 @@ abstract class GenJVM extends SubComponent with GenJVMUtil with GenAndroid with var method: IMethod = _ var jclass: JClass = _ var jmethod: JMethod = _ - // var jcode: JExtendedCode = _ def isParcelableClass = isAndroidParcelableClass(clasz.symbol) def isRemoteClass = clasz.symbol hasAnnotation RemoteAttr @@ -263,9 +256,6 @@ abstract class GenJVM extends SubComponent with GenJVMUtil with GenAndroid with } val fjbgContext = new FJBGContext(49, 0) - - val emitSource = debugLevel >= 1 - val emitLines = debugLevel >= 2 val emitVars = debugLevel >= 3 // bug had phase with wrong name; leaving enabled for brief pseudo deprecation @@ -1835,15 +1825,8 @@ abstract class GenJVM extends SubComponent with GenJVMUtil with GenAndroid with ////////////////////// local vars /////////////////////// - def sizeOf(sym: Symbol): Int = sizeOf(toTypeKind(sym.tpe)) - def sizeOf(k: TypeKind): Int = if(k.isWideType) 2 else 1 - def indexOf(m: IMethod, sym: Symbol): Int = { - val Some(local) = m lookupLocal sym - indexOf(local) - } - def indexOf(local: Local): Int = { assert(local.index >= 0, "Invalid index for: " + local + "{" + local.## + "}: ") local.index diff --git a/src/compiler/scala/tools/nsc/backend/msil/GenMSIL.scala b/src/compiler/scala/tools/nsc/backend/msil/GenMSIL.scala index 2253ae6e15..2fb6550239 100644 --- a/src/compiler/scala/tools/nsc/backend/msil/GenMSIL.scala +++ b/src/compiler/scala/tools/nsc/backend/msil/GenMSIL.scala @@ -23,8 +23,6 @@ abstract class GenMSIL extends SubComponent { import icodes._ import icodes.opcodes._ - val x = loaders - /** Create a new phase */ override def newPhase(p: Phase) = new MsilPhase(p) @@ -83,9 +81,6 @@ abstract class GenMSIL extends SubComponent { SYMTAB_DEFAULT_CONSTR => SYMTAB_ATTRIBUTE_EMPTY_CONSTRUCTOR} val EXCEPTION = clrTypes.getType("System.Exception") - val MBYTE_ARRAY = clrTypes.mkArrayType(MBYTE) - - val ICLONEABLE = clrTypes.getType("System.ICloneable") val MEMBERWISE_CLONE = MOBJECT.GetMethod("MemberwiseClone", MsilType.EmptyTypes) val MMONITOR = clrTypes.getType("System.Threading.Monitor") @@ -102,9 +97,6 @@ abstract class GenMSIL extends SubComponent { val INT_PTR = clrTypes.getType("System.IntPtr") - val JOBJECT = definitions.ObjectClass - val JSTRING = definitions.StringClass - val SystemConvert = clrTypes.getType("System.Convert") val objParam = Array(MOBJECT) @@ -622,7 +614,7 @@ abstract class GenMSIL extends SubComponent { * - emit `Leave handlerReturnLabel` instead of the Return * - emit code at the end: load the local and return its value */ - var currentHandlers = new mutable.Stack[ExceptionHandler] + val currentHandlers = new mutable.Stack[ExceptionHandler] // The IMethod the Local/Label/Kind below belong to var handlerReturnMethod: IMethod = _ // Stores the result when returning inside an exception block diff --git a/src/compiler/scala/tools/nsc/backend/opt/ClosureElimination.scala b/src/compiler/scala/tools/nsc/backend/opt/ClosureElimination.scala index 8d6de821bb..650775b259 100644 --- a/src/compiler/scala/tools/nsc/backend/opt/ClosureElimination.scala +++ b/src/compiler/scala/tools/nsc/backend/opt/ClosureElimination.scala @@ -187,10 +187,6 @@ abstract class ClosureElimination extends SubComponent { case Boxed(LocalVar(v)) => LOAD_LOCAL(v) } - - /** is field 'f' accessible from method 'm'? */ - def accessible(f: Symbol, m: Symbol): Boolean = - f.isPublic || (f.isProtected && (f.enclosingPackageClass == m.enclosingPackageClass)) } /* class ClosureElim */ diff --git a/src/compiler/scala/tools/nsc/backend/opt/Inliners.scala b/src/compiler/scala/tools/nsc/backend/opt/Inliners.scala index 7d741aab60..ca1cfc8929 100644 --- a/src/compiler/scala/tools/nsc/backend/opt/Inliners.scala +++ b/src/compiler/scala/tools/nsc/backend/opt/Inliners.scala @@ -602,7 +602,6 @@ abstract class Inliners extends SubComponent { override def toString = m.toString val sym = m.symbol - val name = sym.name def owner = sym.owner def paramTypes = sym.info.paramTypes def minimumStack = paramTypes.length + 1 @@ -618,13 +617,11 @@ abstract class Inliners extends SubComponent { def length = blocks.length def openBlocks = blocks filterNot (_.closed) def instructions = m.code.instructions - // def linearized = linearizer linearize m def isSmall = (length <= SMALL_METHOD_SIZE) && blocks(0).length < 10 def isLarge = length > MAX_INLINE_SIZE def isRecursive = m.recursive def hasHandlers = handlers.nonEmpty || m.bytecodeHasEHs - def hasClosureParam = paramTypes exists (tp => isByNameParamType(tp) || isFunctionType(tp)) def isSynchronized = sym.hasFlag(Flags.SYNCHRONIZED) def hasNonFinalizerHandler = handlers exists { @@ -732,7 +729,6 @@ abstract class Inliners extends SubComponent { */ sealed abstract class InlineSafetyInfo { def isSafe = false - def isUnsafe = !isSafe } case object NeverSafeToInline extends InlineSafetyInfo case object InlineableAtThisCaller extends InlineSafetyInfo { override def isSafe = true } -- cgit v1.2.3 From e5b050814deb2e7e1d6d05511d3a6cb6b013b549 Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Tue, 13 Nov 2012 08:29:42 -0800 Subject: Members removed in scala.reflect. Not a bad showing for a newcomer. Of course most of this code predates scala.reflect by a lot. --- src/compiler/scala/reflect/reify/Errors.scala | 5 - .../scala/reflect/reify/codegen/GenUtils.scala | 25 -- .../scala/reflect/reify/phases/Metalevels.scala | 4 +- .../scala/reflect/reify/utils/SymbolTables.scala | 3 - .../scala/reflect/internal/AnnotationInfos.scala | 11 +- .../scala/reflect/internal/BuildUtils.scala | 2 - .../reflect/internal/ClassfileConstants.scala | 7 - .../scala/reflect/internal/Definitions.scala | 114 +------- .../reflect/internal/ExistentialsAndSkolems.scala | 1 - src/reflect/scala/reflect/internal/Importers.scala | 1 - src/reflect/scala/reflect/internal/Names.scala | 44 +-- src/reflect/scala/reflect/internal/Printers.scala | 2 - src/reflect/scala/reflect/internal/Scopes.scala | 2 - src/reflect/scala/reflect/internal/StdNames.scala | 146 +--------- .../scala/reflect/internal/SymbolTable.scala | 4 - src/reflect/scala/reflect/internal/Symbols.scala | 103 +------ src/reflect/scala/reflect/internal/TreeGen.scala | 7 - src/reflect/scala/reflect/internal/TreeInfo.scala | 48 --- src/reflect/scala/reflect/internal/Trees.scala | 1 - .../scala/reflect/internal/TypeDebugging.scala | 3 - src/reflect/scala/reflect/internal/Types.scala | 322 +-------------------- .../reflect/internal/pickling/PickleBuffer.scala | 3 - .../reflect/internal/pickling/PickleFormat.scala | 1 - .../reflect/internal/pickling/UnPickler.scala | 9 - .../scala/reflect/internal/util/Collections.scala | 30 -- .../scala/reflect/internal/util/HashSet.scala | 2 - .../scala/reflect/internal/util/Origins.scala | 2 - .../scala/reflect/internal/util/Position.scala | 2 +- .../scala/reflect/internal/util/SourceFile.scala | 6 - .../scala/reflect/internal/util/StringOps.scala | 29 +- .../scala/reflect/internal/util/TableDef.scala | 6 - .../internal/util/TraceSymbolActivity.scala | 3 - .../scala/reflect/internal/util/WeakHashSet.scala | 3 - src/reflect/scala/reflect/macros/TreeBuilder.scala | 1 - .../scala/reflect/runtime/JavaMirrors.scala | 24 -- .../scala/reflect/runtime/JavaUniverse.scala | 5 +- .../reflect/runtime/SynchronizedSymbols.scala | 3 - src/reflect/scala/reflect/runtime/package.scala | 2 +- 38 files changed, 39 insertions(+), 947 deletions(-) (limited to 'src') diff --git a/src/compiler/scala/reflect/reify/Errors.scala b/src/compiler/scala/reflect/reify/Errors.scala index a72233274e..0ff098e1da 100644 --- a/src/compiler/scala/reflect/reify/Errors.scala +++ b/src/compiler/scala/reflect/reify/Errors.scala @@ -21,11 +21,6 @@ trait Errors { throw new ReificationException(defaultErrorPosition, msg) } - def CannotReifySymbol(sym: Symbol) = { - val msg = "implementation restriction: cannot reify symbol %s (%s)".format(sym, sym.accurateKindString) - throw new ReificationException(defaultErrorPosition, msg) - } - def CannotReifyWeakType(details: Any) = { val msg = "cannot create a TypeTag" + details + ": use WeakTypeTag instead" throw new ReificationException(defaultErrorPosition, msg) diff --git a/src/compiler/scala/reflect/reify/codegen/GenUtils.scala b/src/compiler/scala/reflect/reify/codegen/GenUtils.scala index 6554947f88..e2275f79ff 100644 --- a/src/compiler/scala/reflect/reify/codegen/GenUtils.scala +++ b/src/compiler/scala/reflect/reify/codegen/GenUtils.scala @@ -90,22 +90,6 @@ trait GenUtils { /** An (unreified) path that refers to term definition with given fully qualified name */ def termPath(fullname: String): Tree = path(fullname, newTermName) - /** An (unreified) path that refers to type definition with given fully qualified name */ - def typePath(fullname: String): Tree = path(fullname, newTypeName) - - def isTough(tpe: Type) = { - def isTough(tpe: Type) = tpe match { - case _: RefinedType => true - case _: ExistentialType => true - case _: ClassInfoType => true - case _: MethodType => true - case _: PolyType => true - case _ => false - } - - tpe != null && (tpe exists isTough) - } - object TypedOrAnnotated { def unapply(tree: Tree): Option[Tree] = tree match { case ty @ Typed(_, _) => @@ -117,15 +101,6 @@ trait GenUtils { } } - def isAnnotated(tpe: Type) = { - def isAnnotated(tpe: Type) = tpe match { - case _: AnnotatedType => true - case _ => false - } - - tpe != null && (tpe exists isAnnotated) - } - def isSemiConcreteTypeMember(tpe: Type) = tpe match { case TypeRef(SingleType(_, _), sym, _) if sym.isAbstractType && !sym.isExistential => true case _ => false diff --git a/src/compiler/scala/reflect/reify/phases/Metalevels.scala b/src/compiler/scala/reflect/reify/phases/Metalevels.scala index 92d951c3a1..cccf080dbf 100644 --- a/src/compiler/scala/reflect/reify/phases/Metalevels.scala +++ b/src/compiler/scala/reflect/reify/phases/Metalevels.scala @@ -1,6 +1,8 @@ package scala.reflect.reify package phases +import scala.collection.{ mutable } + trait Metalevels { self: Reifier => @@ -101,7 +103,7 @@ trait Metalevels { */ val metalevels = new Transformer { var insideSplice = false - var inlineableBindings = scala.collection.mutable.Map[TermName, Tree]() + val inlineableBindings = mutable.Map[TermName, Tree]() def withinSplice[T](op: => T) = { val old = insideSplice diff --git a/src/compiler/scala/reflect/reify/utils/SymbolTables.scala b/src/compiler/scala/reflect/reify/utils/SymbolTables.scala index 99118c4f2e..5f8de9894f 100644 --- a/src/compiler/scala/reflect/reify/utils/SymbolTables.scala +++ b/src/compiler/scala/reflect/reify/utils/SymbolTables.scala @@ -15,9 +15,6 @@ trait SymbolTables { private[SymbolTable] val original: Option[List[Tree]] = None) { def syms: List[Symbol] = symtab.keys.toList - def isConcrete: Boolean = symtab.values forall (sym => !FreeTypeDef.unapply(sym).isDefined) - -// def aliases: Map[Symbol, List[TermName]] = aliases.distinct groupBy (_._1) mapValues (_ map (_._2)) def symDef(sym: Symbol): Tree = symtab.getOrElse(sym, EmptyTree) diff --git a/src/reflect/scala/reflect/internal/AnnotationInfos.scala b/src/reflect/scala/reflect/internal/AnnotationInfos.scala index 7c12b5979d..da50c55fe1 100644 --- a/src/reflect/scala/reflect/internal/AnnotationInfos.scala +++ b/src/reflect/scala/reflect/internal/AnnotationInfos.scala @@ -6,7 +6,6 @@ package scala.reflect package internal -import util._ import pickling.ByteCodecs import scala.annotation.tailrec import scala.collection.immutable.ListMap @@ -288,10 +287,6 @@ trait AnnotationInfos extends api.Annotations { self: SymbolTable => /** Check whether any of the arguments mention a symbol */ def refsSymbol(sym: Symbol) = hasArgWhich(_.symbol == sym) - /** Change all ident's with Symbol "from" to instead use symbol "to" */ - def substIdentSyms(from: Symbol, to: Symbol) = - AnnotationInfo(atp, args map (_ substituteSymbols (List(from), List(to))), assocs) setPos pos - def stringArg(index: Int) = constantAtIndex(index) map (_.stringValue) def intArg(index: Int) = constantAtIndex(index) map (_.intValue) def symbolArg(index: Int) = argAtIndex(index) collect { @@ -325,14 +320,14 @@ trait AnnotationInfos extends api.Annotations { self: SymbolTable => implicit val AnnotationTag = ClassTag[AnnotationInfo](classOf[AnnotationInfo]) object UnmappableAnnotation extends CompleteAnnotationInfo(NoType, Nil, Nil) - + /** Extracts symbol of thrown exception from AnnotationInfo. - * + * * Supports both “old-style” `@throws(classOf[Exception])` * as well as “new-stye” `@throws[Exception]("cause")` annotations. */ object ThrownException { - def unapply(ann: AnnotationInfo): Option[Symbol] = + def unapply(ann: AnnotationInfo): Option[Symbol] = ann match { case AnnotationInfo(tpe, _, _) if tpe.typeSymbol != ThrowsClass => None diff --git a/src/reflect/scala/reflect/internal/BuildUtils.scala b/src/reflect/scala/reflect/internal/BuildUtils.scala index 9f41f0336e..9da6ad652a 100644 --- a/src/reflect/scala/reflect/internal/BuildUtils.scala +++ b/src/reflect/scala/reflect/internal/BuildUtils.scala @@ -1,8 +1,6 @@ package scala.reflect package internal -import Flags._ - trait BuildUtils { self: SymbolTable => class BuildImpl extends BuildApi { diff --git a/src/reflect/scala/reflect/internal/ClassfileConstants.scala b/src/reflect/scala/reflect/internal/ClassfileConstants.scala index b1462e9709..2ab3caa19d 100644 --- a/src/reflect/scala/reflect/internal/ClassfileConstants.scala +++ b/src/reflect/scala/reflect/internal/ClassfileConstants.scala @@ -380,11 +380,4 @@ object ClassfileConstants { def toScalaMethodFlags(flags: Int): Long = FlagTranslation methodFlags flags def toScalaClassFlags(flags: Int): Long = FlagTranslation classFlags flags def toScalaFieldFlags(flags: Int): Long = FlagTranslation fieldFlags flags - - @deprecated("Use another method in this object", "2.10.0") - def toScalaFlags(flags: Int, isClass: Boolean = false, isField: Boolean = false): Long = ( - if (isClass) toScalaClassFlags(flags) - else if (isField) toScalaFieldFlags(flags) - else toScalaMethodFlags(flags) - ) } diff --git a/src/reflect/scala/reflect/internal/Definitions.scala b/src/reflect/scala/reflect/internal/Definitions.scala index 1f751cde38..9a846179b9 100644 --- a/src/reflect/scala/reflect/internal/Definitions.scala +++ b/src/reflect/scala/reflect/internal/Definitions.scala @@ -9,7 +9,6 @@ package internal import scala.annotation.{ switch, meta } import scala.collection.{ mutable, immutable } import Flags._ -import PartialFunction._ import scala.reflect.api.{Universe => ApiUniverse} trait Definitions extends api.StandardDefinitions { @@ -149,7 +148,6 @@ trait Definitions extends api.StandardDefinitions { FloatClass, DoubleClass ) - def ScalaValueClassCompanions: List[Symbol] = ScalaValueClasses map (_.companionSymbol) def ScalaPrimitiveValueClasses: List[ClassSymbol] = ScalaValueClasses } @@ -157,9 +155,6 @@ trait Definitions extends api.StandardDefinitions { private var isInitialized = false def isDefinitionsInitialized = isInitialized - // symbols related to packages - var emptypackagescope: Scope = null //debug - @deprecated("Moved to rootMirror.RootPackage", "2.10.0") val RootPackage: ModuleSymbol = rootMirror.RootPackage @@ -181,8 +176,6 @@ trait Definitions extends api.StandardDefinitions { lazy val RuntimePackage = getRequiredPackage("scala.runtime") lazy val RuntimePackageClass = RuntimePackage.moduleClass.asClass - lazy val JavaLangEnumClass = requiredClass[java.lang.Enum[_]] - // convenient one-argument parameter lists lazy val anyparam = List(AnyClass.tpe) lazy val anyvalparam = List(AnyValClass.typeConstructor) @@ -261,8 +254,6 @@ trait Definitions extends api.StandardDefinitions { || tp =:= AnyValClass.tpe || tp =:= AnyRefClass.tpe ) - /** Does this type have a parent which is none of Any, AnyVal, or AnyRef? */ - def hasNonTrivialParent(tp: Type) = tp.parents exists (t => !isTrivialTopType(tp)) private def fixupAsAnyTrait(tpe: Type): Type = tpe match { case ClassInfoType(parents, decls, clazz) => @@ -273,7 +264,6 @@ trait Definitions extends api.StandardDefinitions { } case PolyType(tparams, restpe) => PolyType(tparams, fixupAsAnyTrait(restpe)) -// case _ => tpe } // top types @@ -358,15 +348,11 @@ trait Definitions extends api.StandardDefinitions { lazy val UnqualifiedOwners = UnqualifiedModules.toSet ++ UnqualifiedModules.map(_.moduleClass) lazy val PredefModule = requiredModule[scala.Predef.type] - lazy val PredefModuleClass = PredefModule.moduleClass - def Predef_classOf = getMemberMethod(PredefModule, nme.classOf) - // def Predef_identity = getMemberMethod(PredefModule, nme.identity) - // def Predef_conforms = getMemberMethod(PredefModule, nme.conforms) - def Predef_wrapRefArray = getMemberMethod(PredefModule, nme.wrapRefArray) - def Predef_wrapArray(tp: Type) = getMemberMethod(PredefModule, wrapArrayMethodName(tp)) - def Predef_??? = getMemberMethod(PredefModule, nme.???) - def Predef_implicitly = getMemberMethod(PredefModule, nme.implicitly) + def Predef_wrapRefArray = getMemberMethod(PredefModule, nme.wrapRefArray) + def Predef_wrapArray(tp: Type) = getMemberMethod(PredefModule, wrapArrayMethodName(tp)) + def Predef_??? = getMemberMethod(PredefModule, nme.???) + def Predef_implicitly = getMemberMethod(PredefModule, nme.implicitly) /** Is `sym` a member of Predef with the given name? * Note: DON't replace this by sym == Predef_conforms/etc, as Predef_conforms is a `def` @@ -382,7 +368,6 @@ trait Definitions extends api.StandardDefinitions { lazy val SpecializableModule = requiredModule[Specializable] lazy val GroupOfSpecializable = getMemberClass(SpecializableModule, tpnme.Group) - lazy val ConsoleModule = requiredModule[scala.Console.type] lazy val ScalaRunTimeModule = requiredModule[scala.runtime.ScalaRunTime.type] lazy val SymbolModule = requiredModule[scala.Symbol.type] lazy val Symbol_apply = getMemberMethod(SymbolModule, nme.apply) @@ -392,9 +377,7 @@ trait Definitions extends api.StandardDefinitions { def arrayLengthMethod = getMemberMethod(ScalaRunTimeModule, nme.array_length) def arrayCloneMethod = getMemberMethod(ScalaRunTimeModule, nme.array_clone) def ensureAccessibleMethod = getMemberMethod(ScalaRunTimeModule, nme.ensureAccessible) - def scalaRuntimeSameElements = getMemberMethod(ScalaRunTimeModule, nme.sameElements) def arrayClassMethod = getMemberMethod(ScalaRunTimeModule, nme.arrayClass) - def arrayElementClassMethod = getMemberMethod(ScalaRunTimeModule, nme.arrayElementClass) // classes with special meanings lazy val StringAddClass = requiredClass[scala.runtime.StringAdd] @@ -405,11 +388,6 @@ trait Definitions extends api.StandardDefinitions { lazy val TraitSetterAnnotationClass = requiredClass[scala.runtime.TraitSetter] lazy val DelayedInitClass = requiredClass[scala.DelayedInit] def delayedInitMethod = getMemberMethod(DelayedInitClass, nme.delayedInit) - // a dummy value that communicates that a delayedInit call is compiler-generated - // from phase UnCurry to phase Constructors - // !!! This is not used anywhere (it was checked in that way.) - // def delayedInitArgVal = EmptyPackageClass.newValue(NoPosition, nme.delayedInitArg) - // .setInfo(UnitClass.tpe) lazy val TypeConstraintClass = requiredClass[scala.annotation.TypeConstraint] lazy val SingletonClass = enterNewClass(ScalaPackageClass, tpnme.Singleton, anyparam, ABSTRACT | TRAIT | FINAL) @@ -457,10 +435,6 @@ trait Definitions extends api.StandardDefinitions { case _ => tp } - def isPrimitiveArray(tp: Type) = tp match { - case TypeRef(_, ArrayClass, arg :: Nil) => isPrimitiveValueClass(arg.typeSymbol) - case _ => false - } def isReferenceArray(tp: Type) = tp match { case TypeRef(_, ArrayClass, arg :: Nil) => arg <:< AnyRefClass.tpe case _ => false @@ -470,11 +444,8 @@ trait Definitions extends api.StandardDefinitions { case _ => false } - lazy val MatchingStrategyClass = getRequiredClass("scala.MatchingStrategy") - // collections classes lazy val ConsClass = requiredClass[scala.collection.immutable.::[_]] - lazy val IterableClass = requiredClass[scala.collection.Iterable[_]] lazy val IteratorClass = requiredClass[scala.collection.Iterator[_]] lazy val ListClass = requiredClass[scala.collection.immutable.List[_]] lazy val SeqClass = requiredClass[scala.collection.Seq[_]] @@ -485,8 +456,6 @@ trait Definitions extends api.StandardDefinitions { lazy val List_apply = getMemberMethod(ListModule, nme.apply) lazy val NilModule = requiredModule[scala.collection.immutable.Nil.type] lazy val SeqModule = requiredModule[scala.collection.Seq.type] - lazy val IteratorModule = requiredModule[scala.collection.Iterator.type] - lazy val Iterator_apply = getMemberMethod(IteratorModule, nme.apply) // arrays and their members lazy val ArrayModule = requiredModule[scala.Array.type] @@ -501,9 +470,7 @@ trait Definitions extends api.StandardDefinitions { // reflection / structural types lazy val SoftReferenceClass = requiredClass[java.lang.ref.SoftReference[_]] - lazy val WeakReferenceClass = requiredClass[java.lang.ref.WeakReference[_]] lazy val MethodClass = getClassByName(sn.MethodAsObject) - def methodClass_setAccessible = getMemberMethod(MethodClass, nme.setAccessible) lazy val EmptyMethodCacheClass = requiredClass[scala.runtime.EmptyMethodCache] lazy val MethodCacheClass = requiredClass[scala.runtime.MethodCache] def methodCache_find = getMemberMethod(MethodCacheClass, nme.find_) @@ -527,7 +494,6 @@ trait Definitions extends api.StandardDefinitions { lazy val ExprClass = if (ExprsClass != NoSymbol) getMemberClass(ExprsClass, tpnme.Expr) else NoSymbol def ExprSplice = if (ExprsClass != NoSymbol) getMemberMethod(ExprClass, nme.splice) else NoSymbol def ExprValue = if (ExprsClass != NoSymbol) getMemberMethod(ExprClass, nme.value) else NoSymbol - lazy val ExprModule = if (ExprsClass != NoSymbol) getMemberModule(ExprsClass, nme.Expr) else NoSymbol lazy val ClassTagModule = requiredModule[scala.reflect.ClassTag[_]] lazy val ClassTagClass = requiredClass[scala.reflect.ClassTag[_]] @@ -553,7 +519,6 @@ trait Definitions extends api.StandardDefinitions { def MacroContextPrefix = if (MacroContextClass != NoSymbol) getMemberMethod(MacroContextClass, nme.prefix) else NoSymbol def MacroContextPrefixType = if (MacroContextClass != NoSymbol) getTypeMember(MacroContextClass, tpnme.PrefixType) else NoSymbol def MacroContextUniverse = if (MacroContextClass != NoSymbol) getMemberMethod(MacroContextClass, nme.universe) else NoSymbol - def MacroContextMirror = if (MacroContextClass != NoSymbol) getMemberMethod(MacroContextClass, nme.mirror) else NoSymbol lazy val MacroImplAnnotation = requiredClass[scala.reflect.macros.internal.macroImpl] lazy val StringContextClass = requiredClass[scala.StringContext] @@ -585,12 +550,6 @@ trait Definitions extends api.StandardDefinitions { // The given class has a main method. def hasJavaMainMethod(sym: Symbol): Boolean = (sym.tpe member nme.main).alternatives exists isJavaMainMethod - def hasJavaMainMethod(path: String): Boolean = - hasJavaMainMethod(getModuleIfDefined(path)) - - def isOptionType(tp: Type) = tp.typeSymbol isSubClass OptionClass - def isSomeType(tp: Type) = tp.typeSymbol eq SomeClass - def isNoneType(tp: Type) = tp.typeSymbol eq NoneModule // Product, Tuple, Function, AbstractFunction private def mkArityArray(name: String, arity: Int, countFrom: Int): Array[ClassSymbol] = { @@ -613,7 +572,6 @@ trait Definitions extends api.StandardDefinitions { /** Creators for TupleN, ProductN, FunctionN. */ def tupleType(elems: List[Type]) = aritySpecificType(TupleClass, elems) - def productType(elems: List[Type]) = aritySpecificType(ProductClass, elems) def functionType(formals: List[Type], restpe: Type) = aritySpecificType(FunctionClass, formals, restpe) def abstractFunctionType(formals: List[Type], restpe: Type) = aritySpecificType(AbstractFunctionClass, formals, restpe) @@ -632,10 +590,6 @@ trait Definitions extends api.StandardDefinitions { else nme.genericWrapArray } - @deprecated("Use isTupleType", "2.10.0") - def isTupleTypeOrSubtype(tp: Type) = isTupleType(tp) - - def tupleField(n: Int, j: Int) = getMemberValue(TupleClass(n), nme.productAccessorName(j)) // NOTE: returns true for NoSymbol since it's included in the TupleClass array -- is this intensional? def isTupleSymbol(sym: Symbol) = TupleClass contains unspecializedSymbol(sym) def isProductNClass(sym: Symbol) = ProductClass contains sym @@ -681,13 +635,8 @@ trait Definitions extends api.StandardDefinitions { def Product_iterator = getMemberMethod(ProductRootClass, nme.productIterator) def Product_productPrefix = getMemberMethod(ProductRootClass, nme.productPrefix) def Product_canEqual = getMemberMethod(ProductRootClass, nme.canEqual_) - // def Product_productElementName = getMemberMethod(ProductRootClass, nme.productElementName) def productProj(z:Symbol, j: Int): TermSymbol = getMemberValue(z, nme.productAccessorName(j)) - def productProj(n: Int, j: Int): TermSymbol = productProj(ProductClass(n), j) - - /** returns true if this type is exactly ProductN[T1,...,Tn], not some subclass */ - def isExactProductType(tp: Type): Boolean = isProductNClass(tp.typeSymbol) /** if tpe <: ProductN[T1,...,TN], returns List(T1,...,TN) else Nil */ def getProductArgs(tpe: Type): List[Type] = tpe.baseClasses find isProductNClass match { @@ -705,8 +654,6 @@ trait Definitions extends api.StandardDefinitions { case tp => tp } - def functionApply(n: Int) = getMemberMethod(FunctionClass(n), nme.apply) - def abstractFunctionForFunctionType(tp: Type) = if (isFunctionType(tp)) abstractFunctionType(tp.typeArgs.init, tp.typeArgs.last) else NoType @@ -724,8 +671,6 @@ trait Definitions extends api.StandardDefinitions { (sym eq PartialFunctionClass) || (sym eq AbstractPartialFunctionClass) } - def isSeqType(tp: Type) = elementType(SeqClass, tp.normalize) != NoType - def elementType(container: Symbol, tp: Type): Type = tp match { case TypeRef(_, `container`, arg :: Nil) => arg case _ => NoType @@ -738,10 +683,6 @@ trait Definitions extends api.StandardDefinitions { def optionType(tp: Type) = appliedType(OptionClass, tp) def scalaRepeatedType(arg: Type) = appliedType(RepeatedParamClass, arg) def seqType(arg: Type) = appliedType(SeqClass, arg) - def someType(tp: Type) = appliedType(SomeClass, tp) - - def StringArray = arrayType(StringClass.tpe) - lazy val ObjectArray = arrayType(ObjectClass.tpe) def ClassType(arg: Type) = if (phase.erasedTypes || forMSIL) ClassClass.tpe @@ -754,9 +695,6 @@ trait Definitions extends api.StandardDefinitions { // - .linkedClassOfClass: the ClassSymbol of the enumeration (class E) sym.owner.linkedClassOfClass.tpe - def vmClassType(arg: Type): Type = ClassType(arg) - def vmSignature(sym: Symbol, info: Type): String = signature(info) // !!! - /** Given a class symbol C with type parameters T1, T2, ... Tn * which have upper/lower bounds LB1/UB1, LB1/UB2, ..., LBn/UBn, * returns an existential type of the form @@ -766,24 +704,11 @@ trait Definitions extends api.StandardDefinitions { def classExistentialType(clazz: Symbol): Type = newExistentialType(clazz.typeParams, clazz.tpe_*) - /** Given type U, creates a Type representing Class[_ <: U]. - */ - def boundedClassType(upperBound: Type) = - appliedTypeAsUpperBounds(ClassClass.typeConstructor, List(upperBound)) - - /** To avoid unchecked warnings on polymorphic classes, translate - * a Foo[T] into a Foo[_] for use in the pattern matcher. - */ - @deprecated("Use classExistentialType", "2.10.0") - def typeCaseType(clazz: Symbol): Type = classExistentialType(clazz) - // // .NET backend // lazy val ComparatorClass = getRequiredClass("scala.runtime.Comparator") - // System.ValueType - lazy val ValueTypeClass: ClassSymbol = getClassByName(sn.ValueType) // System.MulticastDelegate lazy val DelegateClass: ClassSymbol = getClassByName(sn.Delegate) var Delegate_scalaCallers: List[Symbol] = List() // Syncnote: No protection necessary yet as only for .NET where reflection is not supported. @@ -875,12 +800,7 @@ trait Definitions extends api.StandardDefinitions { else x :: removeRedundantObjects(xs) } - /** Order a list of types with non-trait classes before others. */ - def classesFirst(tps: List[Type]): List[Type] = { - val (classes, others) = tps partition (t => t.typeSymbol.isClass && !t.typeSymbol.isTrait) - if (classes.isEmpty || others.isEmpty || (tps startsWith classes)) tps - else classes ::: others - } + /** The following transformations applied to a list of parents. * If any parent is a class/trait, all parents which normalize to * Object are discarded. Otherwise, all parents which normalize @@ -908,10 +828,6 @@ trait Definitions extends api.StandardDefinitions { def parentsString(parents: List[Type]) = normalizedParents(parents) mkString " with " - def typeParamsString(tp: Type) = tp match { - case PolyType(tparams, _) => tparams map (_.defString) mkString ("[", ",", "]") - case _ => "" - } def valueParamsString(tp: Type) = tp match { case MethodType(params, _) => params map (_.defString) mkString ("(", ",", ")") case _ => "" @@ -948,12 +864,6 @@ trait Definitions extends api.StandardDefinitions { lazy val BoxedNumberClass = getClassByName(sn.BoxedNumber) lazy val BoxedCharacterClass = getClassByName(sn.BoxedCharacter) lazy val BoxedBooleanClass = getClassByName(sn.BoxedBoolean) - lazy val BoxedByteClass = requiredClass[java.lang.Byte] - lazy val BoxedShortClass = requiredClass[java.lang.Short] - lazy val BoxedIntClass = requiredClass[java.lang.Integer] - lazy val BoxedLongClass = requiredClass[java.lang.Long] - lazy val BoxedFloatClass = requiredClass[java.lang.Float] - lazy val BoxedDoubleClass = requiredClass[java.lang.Double] lazy val Boxes_isNumberOrBool = getDecl(BoxesRunTimeClass, nme.isBoxedNumberOrBoolean) lazy val Boxes_isNumber = getDecl(BoxesRunTimeClass, nme.isBoxedNumber) @@ -974,7 +884,6 @@ trait Definitions extends api.StandardDefinitions { lazy val ImplicitNotFoundClass = requiredClass[scala.annotation.implicitNotFound] lazy val MigrationAnnotationClass = requiredClass[scala.annotation.migration] lazy val ScalaStrictFPAttr = requiredClass[scala.annotation.strictfp] - lazy val SerializableAttr = requiredClass[scala.annotation.serializable] // @serializable is deprecated lazy val SwitchClass = requiredClass[scala.annotation.switch] lazy val TailrecClass = requiredClass[scala.annotation.tailrec] lazy val VarargsClass = requiredClass[scala.annotation.varargs] @@ -1009,7 +918,6 @@ trait Definitions extends api.StandardDefinitions { lazy val ParamTargetClass = requiredClass[meta.param] lazy val SetterTargetClass = requiredClass[meta.setter] lazy val ClassTargetClass = requiredClass[meta.companionClass] - lazy val ObjectTargetClass = requiredClass[meta.companionObject] lazy val MethodTargetClass = requiredClass[meta.companionMethod] // TODO: module, moduleClass? package, packageObject? lazy val LanguageFeatureAnnot = requiredClass[meta.languageFeature] @@ -1054,7 +962,6 @@ trait Definitions extends api.StandardDefinitions { def getLanguageFeature(name: String, owner: Symbol = languageFeatureModule): Symbol = getMember(owner, newTypeName(name)) def termMember(owner: Symbol, name: String): Symbol = owner.info.member(newTermName(name)) - def typeMember(owner: Symbol, name: String): Symbol = owner.info.member(newTypeName(name)) def findNamedMember(fullName: Name, root: Symbol): Symbol = { val segs = nme.segments(fullName.toString, fullName.isTermName) @@ -1121,9 +1028,6 @@ trait Definitions extends api.StandardDefinitions { def getDeclIfDefined(owner: Symbol, name: Name): Symbol = owner.info.nonPrivateDecl(name) - def packageExists(packageName: String): Boolean = - getModuleIfDefined(packageName).isPackage - private def newAlias(owner: Symbol, name: TypeName, alias: Type): AliasTypeSymbol = owner.newAliasType(name) setInfoAndEnter alias @@ -1155,7 +1059,6 @@ trait Definitions extends api.StandardDefinitions { newPolyMethod(1, owner, name, flags)(tparams => (Some(Nil), createFn(tparams.head))) } - lazy val boxedClassValues = boxedClass.values.toSet[Symbol] lazy val isUnbox = unboxMethod.values.toSet[Symbol] lazy val isBox = boxMethod.values.toSet[Symbol] @@ -1215,8 +1118,6 @@ trait Definitions extends api.StandardDefinitions { /** Is symbol a value class? */ def isPrimitiveValueClass(sym: Symbol) = ScalaValueClasses contains sym - def isNonUnitValueClass(sym: Symbol) = isPrimitiveValueClass(sym) && (sym != UnitClass) - def isSpecializableClass(sym: Symbol) = isPrimitiveValueClass(sym) || (sym == AnyRefClass) def isPrimitiveValueType(tp: Type) = isPrimitiveValueClass(tp.typeSymbol) /** Is symbol a boxed value class, e.g. java.lang.Integer? */ @@ -1281,11 +1182,6 @@ trait Definitions extends api.StandardDefinitions { newCaller } - // def addScalaCallerInfo(scalaCaller: Symbol, methSym: Symbol, delType: Type) { - // assert(Delegate_scalaCallers contains scalaCaller) - // Delegate_scalaCallerInfos += (scalaCaller -> (methSym, delType)) - // } - def addScalaCallerInfo(scalaCaller: Symbol, methSym: Symbol) { assert(Delegate_scalaCallers contains scalaCaller) Delegate_scalaCallerTargets += (scalaCaller -> methSym) diff --git a/src/reflect/scala/reflect/internal/ExistentialsAndSkolems.scala b/src/reflect/scala/reflect/internal/ExistentialsAndSkolems.scala index 59c027868e..2a0fe9d19a 100644 --- a/src/reflect/scala/reflect/internal/ExistentialsAndSkolems.scala +++ b/src/reflect/scala/reflect/internal/ExistentialsAndSkolems.scala @@ -7,7 +7,6 @@ package scala.reflect package internal import scala.collection.{ mutable, immutable } -import util._ /** The name of this trait defines the eventual intent better than * it does the initial contents. diff --git a/src/reflect/scala/reflect/internal/Importers.scala b/src/reflect/scala/reflect/internal/Importers.scala index ea8d6078ff..6ad9a63822 100644 --- a/src/reflect/scala/reflect/internal/Importers.scala +++ b/src/reflect/scala/reflect/internal/Importers.scala @@ -316,7 +316,6 @@ trait Importers extends api.Importers { self: SymbolTable => def importName(name: from.Name): Name = if (name.isTypeName) newTypeName(name.toString) else newTermName(name.toString) def importTypeName(name: from.TypeName): TypeName = importName(name).toTypeName - def importTermName(name: from.TermName): TermName = importName(name).toTermName def importModifiers(mods: from.Modifiers): Modifiers = new Modifiers(mods.flags, importName(mods.privateWithin), mods.annotations map importTree) diff --git a/src/reflect/scala/reflect/internal/Names.scala b/src/reflect/scala/reflect/internal/Names.scala index 9193674f3e..35ff9284e0 100644 --- a/src/reflect/scala/reflect/internal/Names.scala +++ b/src/reflect/scala/reflect/internal/Names.scala @@ -135,9 +135,6 @@ trait Names extends api.Names with LowPriorityNames { def newTypeName(bs: Array[Byte], offset: Int, len: Int): TypeName = newTermName(bs, offset, len).toTypeName - def nameChars: Array[Char] = chrs - @deprecated("", "2.9.0") def view(s: String): TermName = newTermName(s) - // Classes ---------------------------------------------------------------------- /** The name class. @@ -192,17 +189,6 @@ trait Names extends api.Names with LowPriorityNames { cs } - /** Write to UTF8 representation of this name to given character array. - * Start copying to index `to`. Return index of next free byte in array. - * Array must have enough remaining space for all bytes - * (i.e. maximally 3*length bytes). - */ - final def copyUTF8(bs: Array[Byte], offset: Int): Int = { - val bytes = Codec.toUTF8(chrs, index, len) - scala.compat.Platform.arraycopy(bytes, 0, bs, offset, bytes.length) - offset + bytes.length - } - /** @return the hash value of this name */ final override def hashCode(): Int = index @@ -283,8 +269,6 @@ trait Names extends api.Names with LowPriorityNames { */ final def lastPos(c: Char): Int = lastPos(c, len - 1) - final def lastPos(s: String): Int = lastPos(s, len - s.length) - /** Returns the index of the last occurrence of char c in this * name from start, -1 if not found. * @@ -298,26 +282,6 @@ trait Names extends api.Names with LowPriorityNames { i } - /** Returns the index of the last occurrence of string s in this - * name from start, -1 if not found. - * - * @param s the string - * @param start ... - * @return the index of the last occurrence of s - */ - final def lastPos(s: String, start: Int): Int = { - var i = lastPos(s.charAt(0), start) - while (i >= 0) { - var j = 1; - while (s.charAt(j) == chrs(index + i + j)) { - j += 1 - if (j == s.length()) return i; - } - i = lastPos(s.charAt(0), i - 1) - } - -s.length() - } - /** Does this name start with prefix? */ final def startsWith(prefix: Name): Boolean = startsWith(prefix, 0) @@ -379,7 +343,6 @@ trait Names extends api.Names with LowPriorityNames { if (idx == length) -1 else idx } def lastIndexOf(ch: Char) = lastPos(ch) - def lastIndexOf(ch: Char, fromIndex: Int) = lastPos(ch, fromIndex) /** Replace all occurrences of `from` by `to` in * name; result is always a term name. @@ -428,9 +391,7 @@ trait Names extends api.Names with LowPriorityNames { def append(ch: Char) = newName("" + this + ch) def append(suffix: String) = newName("" + this + suffix) def append(suffix: Name) = newName("" + this + suffix) - def prepend(ch: Char) = newName("" + ch + this) def prepend(prefix: String) = newName("" + prefix + this) - def prepend(prefix: Name) = newName("" + prefix + this) def decodedName: ThisNameType = newName(decode) def isOperatorName: Boolean = decode != toString @@ -445,7 +406,6 @@ trait Names extends api.Names with LowPriorityNames { def stripSuffix(suffix: Name): T = if (name endsWith suffix) dropRight(suffix.length) else name def dropRight(n: Int): T = name.subName(0, name.length - n).asInstanceOf[T] def drop(n: Int): T = name.subName(n, name.length).asInstanceOf[T] - def nonEmpty: Boolean = name.length > 0 } implicit val NameTag = ClassTag[Name](classOf[Name]) @@ -489,7 +449,7 @@ trait Names extends api.Names with LowPriorityNames { type ThisNameType = TermName protected[this] def thisName: TermName = this - var next: TermName = termHashtable(hash) + val next: TermName = termHashtable(hash) termHashtable(hash) = this def isTermName: Boolean = true def isTypeName: Boolean = false @@ -518,7 +478,7 @@ trait Names extends api.Names with LowPriorityNames { type ThisNameType = TypeName protected[this] def thisName: TypeName = this - var next: TypeName = typeHashtable(hash) + val next: TypeName = typeHashtable(hash) typeHashtable(hash) = this def isTermName: Boolean = false def isTypeName: Boolean = true diff --git a/src/reflect/scala/reflect/internal/Printers.scala b/src/reflect/scala/reflect/internal/Printers.scala index 31f9cb7e59..08ba93520a 100644 --- a/src/reflect/scala/reflect/internal/Printers.scala +++ b/src/reflect/scala/reflect/internal/Printers.scala @@ -475,8 +475,6 @@ trait Printers extends api.Printers { self: SymbolTable => } def newRawTreePrinter(writer: PrintWriter): RawTreePrinter = new RawTreePrinter(writer) - def newRawTreePrinter(stream: OutputStream): RawTreePrinter = newRawTreePrinter(new PrintWriter(stream)) - def newRawTreePrinter(): RawTreePrinter = newRawTreePrinter(new PrintWriter(ConsoleWriter)) // provides footnotes for types and mirrors import scala.collection.mutable.{Map, WeakHashMap, SortedSet} diff --git a/src/reflect/scala/reflect/internal/Scopes.scala b/src/reflect/scala/reflect/internal/Scopes.scala index 6d0d34cfc1..525fd3fca4 100644 --- a/src/reflect/scala/reflect/internal/Scopes.scala +++ b/src/reflect/scala/reflect/internal/Scopes.scala @@ -330,8 +330,6 @@ trait Scopes extends api.Scopes { self: SymbolTable => */ def iterator: Iterator[Symbol] = toList.iterator - def containsSymbol(s: Symbol) = lookupAll(s.name) contains s - override def foreach[U](p: Symbol => U): Unit = toList foreach p override def filterNot(p: Symbol => Boolean): Scope = ( diff --git a/src/reflect/scala/reflect/internal/StdNames.scala b/src/reflect/scala/reflect/internal/StdNames.scala index 5e7f5777b2..a5810c9c83 100644 --- a/src/reflect/scala/reflect/internal/StdNames.scala +++ b/src/reflect/scala/reflect/internal/StdNames.scala @@ -104,7 +104,6 @@ trait StdNames { val IMPORT: NameType = "" val MODULE_SUFFIX_NAME: NameType = MODULE_SUFFIX_STRING val MODULE_VAR_SUFFIX: NameType = "$module" - val NAME_JOIN_NAME: NameType = NAME_JOIN_STRING val PACKAGE: NameType = "package" val ROOT: NameType = "" val SPECIALIZED_SUFFIX: NameType = "$sp" @@ -121,16 +120,12 @@ trait StdNames { final val Short: NameType = "Short" final val Unit: NameType = "Unit" - final val ScalaValueNames: scala.List[NameType] = - scala.List(Byte, Char, Short, Int, Long, Float, Double, Boolean, Unit) - // some types whose companions we utilize final val AnyRef: NameType = "AnyRef" final val Array: NameType = "Array" final val List: NameType = "List" final val Seq: NameType = "Seq" final val Symbol: NameType = "Symbol" - final val ClassTag: NameType = "ClassTag" final val WeakTypeTag: NameType = "WeakTypeTag" final val TypeTag : NameType = "TypeTag" final val Expr: NameType = "Expr" @@ -220,12 +215,10 @@ trait StdNames { final val Any: NameType = "Any" final val AnyVal: NameType = "AnyVal" - final val ExprApi: NameType = "ExprApi" final val Mirror: NameType = "Mirror" final val Nothing: NameType = "Nothing" final val Null: NameType = "Null" final val Object: NameType = "Object" - final val PartialFunction: NameType = "PartialFunction" final val PrefixType: NameType = "PrefixType" final val Product: NameType = "Product" final val Serializable: NameType = "Serializable" @@ -239,7 +232,6 @@ trait StdNames { final val Group: NameType = "Group" final val Tree: NameType = "Tree" final val Type : NameType = "Type" - final val TypeTree: NameType = "TypeTree" // Annotation simple names, used in Namer final val BeanPropertyAnnot: NameType = "BeanProperty" @@ -249,13 +241,11 @@ trait StdNames { // Classfile Attributes final val AnnotationDefaultATTR: NameType = "AnnotationDefault" final val BridgeATTR: NameType = "Bridge" - final val ClassfileAnnotationATTR: NameType = "RuntimeInvisibleAnnotations" // RetentionPolicy.CLASS. Currently not used (Apr 2009). final val CodeATTR: NameType = "Code" final val ConstantValueATTR: NameType = "ConstantValue" final val DeprecatedATTR: NameType = "Deprecated" final val ExceptionsATTR: NameType = "Exceptions" final val InnerClassesATTR: NameType = "InnerClasses" - final val LineNumberTableATTR: NameType = "LineNumberTable" final val LocalVariableTableATTR: NameType = "LocalVariableTable" final val RuntimeAnnotationATTR: NameType = "RuntimeVisibleAnnotations" // RetentionPolicy.RUNTIME final val RuntimeParamAnnotationATTR: NameType = "RuntimeVisibleParameterAnnotations" // RetentionPolicy.RUNTIME (annotations on parameters) @@ -284,9 +274,6 @@ trait StdNames { val EXCEPTION_RESULT_PREFIX = "exceptionResult" val EXPAND_SEPARATOR_STRING = "$$" val INTERPRETER_IMPORT_WRAPPER = "$iw" - val INTERPRETER_LINE_PREFIX = "line" - val INTERPRETER_VAR_PREFIX = "res" - val INTERPRETER_WRAPPER_SUFFIX = "$object" val LOCALDUMMY_PREFIX = " true - case _ => false - } - def isOpAssignmentName(name: Name) = name match { case raw.NE | raw.LE | raw.GE | EMPTY => false case _ => @@ -395,18 +375,6 @@ trait StdNames { else name ) - /* - def anonNumberSuffix(name: Name): Name = { - ("" + name) lastIndexOf '$' match { - case -1 => nme.EMPTY - case idx => - val s = name drop idx - if (s.toString forall (_.isDigit)) s - else nme.EMPTY - } - } - */ - /** Return the original name and the types on which this name * is specialized. For example, * {{{ @@ -458,18 +426,6 @@ trait StdNames { } else name.toTermName } - // If the name ends with $nn where nn are - // all digits, strip the $ and the digits. - // Otherwise return the argument. - def stripAnonNumberSuffix(name: Name): Name = { - var pos = name.length - while (pos > 0 && name.charAt(pos - 1).isDigit) - pos -= 1 - - if (pos <= 0 || pos == name.length || name.charAt(pos - 1) != '$') name - else name.subName(0, pos - 1) - } - def stripModuleSuffix(name: Name): Name = ( if (isModuleName(name)) name dropRight MODULE_SUFFIX_STRING.length else name ) @@ -484,8 +440,6 @@ trait StdNames { final val Nil: NameType = "Nil" final val Predef: NameType = "Predef" - final val ScalaRunTime: NameType = "ScalaRunTime" - final val Some: NameType = "Some" val _1 : NameType = "_1" val _2 : NameType = "_2" @@ -581,14 +535,10 @@ trait StdNames { val Annotation: NameType = "Annotation" val Any: NameType = "Any" val AnyVal: NameType = "AnyVal" - val AppliedTypeTree: NameType = "AppliedTypeTree" - val Apply: NameType = "Apply" val ArrayAnnotArg: NameType = "ArrayAnnotArg" - val Constant: NameType = "Constant" val ConstantType: NameType = "ConstantType" val EmptyPackage: NameType = "EmptyPackage" val EmptyPackageClass: NameType = "EmptyPackageClass" - val ExistentialTypeTree: NameType = "ExistentialTypeTree" val Flag : NameType = "Flag" val Ident: NameType = "Ident" val Import: NameType = "Import" @@ -597,10 +547,8 @@ trait StdNames { val Modifiers: NameType = "Modifiers" val NestedAnnotArg: NameType = "NestedAnnotArg" val NoFlags: NameType = "NoFlags" - val NoPrefix: NameType = "NoPrefix" val NoSymbol: NameType = "NoSymbol" val Nothing: NameType = "Nothing" - val NoType: NameType = "NoType" val Null: NameType = "Null" val Object: NameType = "Object" val RootPackage: NameType = "RootPackage" @@ -609,17 +557,14 @@ trait StdNames { val StringContext: NameType = "StringContext" val This: NameType = "This" val ThisType: NameType = "ThisType" - val Tree : NameType = "Tree" val Tuple2: NameType = "Tuple2" val TYPE_ : NameType = "TYPE" - val TypeApply: NameType = "TypeApply" val TypeRef: NameType = "TypeRef" val TypeTree: NameType = "TypeTree" val UNIT : NameType = "UNIT" val add_ : NameType = "add" val annotation: NameType = "annotation" val anyValClass: NameType = "anyValClass" - val append: NameType = "append" val apply: NameType = "apply" val applyDynamic: NameType = "applyDynamic" val applyDynamicNamed: NameType = "applyDynamicNamed" @@ -627,34 +572,24 @@ trait StdNames { val args : NameType = "args" val argv : NameType = "argv" val arrayClass: NameType = "arrayClass" - val arrayElementClass: NameType = "arrayElementClass" - val arrayValue: NameType = "arrayValue" val array_apply : NameType = "array_apply" val array_clone : NameType = "array_clone" val array_length : NameType = "array_length" val array_update : NameType = "array_update" - val arraycopy: NameType = "arraycopy" - val asTerm: NameType = "asTerm" val asModule: NameType = "asModule" - val asMethod: NameType = "asMethod" val asType: NameType = "asType" - val asClass: NameType = "asClass" val asInstanceOf_ : NameType = "asInstanceOf" val asInstanceOf_Ob : NameType = "$asInstanceOf" - val assert_ : NameType = "assert" - val assume_ : NameType = "assume" val box: NameType = "box" val build : NameType = "build" val bytes: NameType = "bytes" val canEqual_ : NameType = "canEqual" val checkInitialized: NameType = "checkInitialized" - val ClassManifestFactory: NameType = "ClassManifestFactory" val classOf: NameType = "classOf" val clone_ : NameType = if (forMSIL) "MemberwiseClone" else "clone" // sn.OClone causes checkinit failure val conforms: NameType = "conforms" val copy: NameType = "copy" val currentMirror: NameType = "currentMirror" - val definitions: NameType = "definitions" val delayedInit: NameType = "delayedInit" val delayedInitArg: NameType = "delayedInit$body" val drop: NameType = "drop" @@ -667,7 +602,6 @@ trait StdNames { val equalsNumObject : NameType = "equalsNumObject" val equals_ : NameType = if (forMSIL) "Equals" else "equals" val error: NameType = "error" - val eval: NameType = "eval" val ex: NameType = "ex" val experimental: NameType = "experimental" val f: NameType = "f" @@ -678,17 +612,11 @@ trait StdNames { val flagsFromBits : NameType = "flagsFromBits" val flatMap: NameType = "flatMap" val foreach: NameType = "foreach" - val genericArrayOps: NameType = "genericArrayOps" val get: NameType = "get" - val getOrElse: NameType = "getOrElse" - val hasNext: NameType = "hasNext" val hashCode_ : NameType = if (forMSIL) "GetHashCode" else "hashCode" val hash_ : NameType = "hash" - val head: NameType = "head" - val identity: NameType = "identity" val implicitly: NameType = "implicitly" val in: NameType = "in" - val info: NameType = "info" val inlinedEquals: NameType = "inlinedEquals" val isArray: NameType = "isArray" val isDefinedAt: NameType = "isDefinedAt" @@ -700,57 +628,42 @@ trait StdNames { val lang: NameType = "lang" val length: NameType = "length" val lengthCompare: NameType = "lengthCompare" - val liftedTree: NameType = "liftedTree" - val `macro` : NameType = "macro" - val macroThis : NameType = "_this" val macroContext : NameType = "c" val main: NameType = "main" - val manifest: NameType = "manifest" - val ManifestFactory: NameType = "ManifestFactory" val manifestToTypeTag: NameType = "manifestToTypeTag" val map: NameType = "map" val materializeClassTag: NameType = "materializeClassTag" val materializeWeakTypeTag: NameType = "materializeWeakTypeTag" val materializeTypeTag: NameType = "materializeTypeTag" - val mirror : NameType = "mirror" val moduleClass : NameType = "moduleClass" - val name: NameType = "name" val ne: NameType = "ne" val newArray: NameType = "newArray" val newFreeTerm: NameType = "newFreeTerm" val newFreeType: NameType = "newFreeType" val newNestedSymbol: NameType = "newNestedSymbol" val newScopeWith: NameType = "newScopeWith" - val next: NameType = "next" val nmeNewTermName: NameType = "newTermName" val nmeNewTypeName: NameType = "newTypeName" - val normalize: NameType = "normalize" val notifyAll_ : NameType = "notifyAll" val notify_ : NameType = "notify" val null_ : NameType = "null" - val ofDim: NameType = "ofDim" - val origin: NameType = "origin" val prefix : NameType = "prefix" val productArity: NameType = "productArity" val productElement: NameType = "productElement" val productIterator: NameType = "productIterator" val productPrefix: NameType = "productPrefix" val readResolve: NameType = "readResolve" - val reflect : NameType = "reflect" val reify : NameType = "reify" val rootMirror : NameType = "rootMirror" - val runOrElse: NameType = "runOrElse" val runtime: NameType = "runtime" val runtimeClass: NameType = "runtimeClass" val runtimeMirror: NameType = "runtimeMirror" - val sameElements: NameType = "sameElements" val scala_ : NameType = "scala" val selectDynamic: NameType = "selectDynamic" val selectOverloadedMethod: NameType = "selectOverloadedMethod" val selectTerm: NameType = "selectTerm" val selectType: NameType = "selectType" val self: NameType = "self" - val setAccessible: NameType = "setAccessible" val setAnnotations: NameType = "setAnnotations" val setSymbol: NameType = "setSymbol" val setType: NameType = "setType" @@ -760,15 +673,10 @@ trait StdNames { val staticModule : NameType = "staticModule" val staticPackage : NameType = "staticPackage" val synchronized_ : NameType = "synchronized" - val tail: NameType = "tail" - val `then` : NameType = "then" val this_ : NameType = "this" val thisPrefix : NameType = "thisPrefix" - val throw_ : NameType = "throw" val toArray: NameType = "toArray" - val toList: NameType = "toList" val toObjectArray : NameType = "toObjectArray" - val toSeq: NameType = "toSeq" val toString_ : NameType = if (forMSIL) "ToString" else "toString" val toTypeConstructor: NameType = "toTypeConstructor" val tpe : NameType = "tpe" @@ -788,14 +696,9 @@ trait StdNames { val view_ : NameType = "view" val wait_ : NameType = "wait" val withFilter: NameType = "withFilter" - val wrap: NameType = "wrap" - val zip: NameType = "zip" - - val synthSwitch: NameType = "$synthSwitch" // unencoded operators object raw { - final val AMP : NameType = "&" final val BANG : NameType = "!" final val BAR : NameType = "|" final val DOLLAR: NameType = "$" @@ -804,7 +707,6 @@ trait StdNames { final val MINUS: NameType = "-" final val NE: NameType = "!=" final val PLUS : NameType = "+" - final val SLASH: NameType = "/" final val STAR : NameType = "*" final val TILDE: NameType = "~" @@ -860,14 +762,7 @@ trait StdNames { // Grouped here so Cleanup knows what tests to perform. val CommonOpNames = Set[Name](OR, XOR, AND, EQ, NE) - val ConversionNames = Set[Name](toByte, toChar, toDouble, toFloat, toInt, toLong, toShort) val BooleanOpNames = Set[Name](ZOR, ZAND, UNARY_!) ++ CommonOpNames - val NumberOpNames = ( - Set[Name](ADD, SUB, MUL, DIV, MOD, LSL, LSR, ASR, LT, LE, GE, GT) - ++ Set(UNARY_+, UNARY_-, UNARY_!) - ++ ConversionNames - ++ CommonOpNames - ) val add: NameType = "add" val complement: NameType = "complement" @@ -999,7 +894,6 @@ trait StdNames { object fulltpnme extends TypeNames { val RuntimeNothing: NameType = "scala.runtime.Nothing$" val RuntimeNull: NameType = "scala.runtime.Null$" - val JavaLangEnum: NameType = "java.lang.Enum" } /** Java binary names, like scala/runtime/Nothing$. @@ -1014,16 +908,11 @@ trait StdNames { val javanme = nme.javaKeywords object nme extends TermNames { - - def isModuleVarName(name: Name): Boolean = - stripAnonNumberSuffix(name) endsWith MODULE_VAR_SUFFIX - def moduleVarName(name: TermName): TermName = newTermNameCached("" + name + MODULE_VAR_SUFFIX) def getCause = sn.GetCause def getClass_ = sn.GetClass - def getComponentType = sn.GetComponentType def getMethod_ = sn.GetMethod def invoke_ = sn.Invoke @@ -1036,15 +925,6 @@ trait StdNames { val reflMethodCacheName: NameType = "reflMethod$Cache" val reflMethodName: NameType = "reflMethod$Method" - private val reflectionCacheNames = Set[NameType]( - reflPolyCacheName, - reflClassCacheName, - reflParamsCacheName, - reflMethodCacheName, - reflMethodName - ) - def isReflectionCacheName(name: Name) = reflectionCacheNames exists (name startsWith _) - @deprecated("Use a method in tpnme", "2.10.0") def dropSingletonName(name: Name): TypeName = tpnme.dropSingletonName(name) @deprecated("Use a method in tpnme", "2.10.0") def singletonName(name: Name): TypeName = tpnme.singletonName(name) @deprecated("Use a method in tpnme", "2.10.0") def implClassName(name: Name): TypeName = tpnme.implClassName(name) @@ -1056,28 +936,21 @@ trait StdNames { protected val stringToTypeName = null protected implicit def createNameType(s: String): TypeName = newTypeNameCached(s) - val BeanProperty : TypeName - val BooleanBeanProperty : TypeName val BoxedBoolean : TypeName val BoxedCharacter : TypeName val BoxedNumber : TypeName - val Class : TypeName val Delegate : TypeName val IOOBException : TypeName // IndexOutOfBoundsException val InvTargetException : TypeName // InvocationTargetException - val JavaSerializable : TypeName val MethodAsObject : TypeName val NPException : TypeName // NullPointerException val Object : TypeName - val String : TypeName val Throwable : TypeName val ValueType : TypeName - val ForName : TermName val GetCause : TermName val GetClass : TermName val GetClassLoader : TermName - val GetComponentType : TermName val GetMethod : TermName val Invoke : TermName val JavaLang : TermName @@ -1152,22 +1025,18 @@ trait StdNames { final val BoxedLong: TypeName = "java.lang.Long" final val BoxedNumber: TypeName = "java.lang.Number" final val BoxedShort: TypeName = "java.lang.Short" - final val Class: TypeName = "java.lang.Class" final val Delegate: TypeName = tpnme.NO_NAME final val IOOBException: TypeName = "java.lang.IndexOutOfBoundsException" final val InvTargetException: TypeName = "java.lang.reflect.InvocationTargetException" final val MethodAsObject: TypeName = "java.lang.reflect.Method" final val NPException: TypeName = "java.lang.NullPointerException" final val Object: TypeName = "java.lang.Object" - final val String: TypeName = "java.lang.String" final val Throwable: TypeName = "java.lang.Throwable" final val ValueType: TypeName = tpnme.NO_NAME - final val ForName: TermName = newTermName("forName") final val GetCause: TermName = newTermName("getCause") final val GetClass: TermName = newTermName("getClass") final val GetClassLoader: TermName = newTermName("getClassLoader") - final val GetComponentType: TermName = newTermName("getComponentType") final val GetMethod: TermName = newTermName("getMethod") final val Invoke: TermName = newTermName("invoke") final val JavaLang: TermName = newTermName("java.lang") @@ -1185,28 +1054,21 @@ trait StdNames { } private class MSILNames extends SymbolNames { - final val BeanProperty: TypeName = tpnme.NO_NAME - final val BooleanBeanProperty: TypeName = tpnme.NO_NAME final val BoxedBoolean: TypeName = "System.IConvertible" final val BoxedCharacter: TypeName = "System.IConvertible" final val BoxedNumber: TypeName = "System.IConvertible" - final val Class: TypeName = "System.Type" final val Delegate: TypeName = "System.MulticastDelegate" final val IOOBException: TypeName = "System.IndexOutOfRangeException" final val InvTargetException: TypeName = "System.Reflection.TargetInvocationException" - final val JavaSerializable: TypeName = tpnme.NO_NAME final val MethodAsObject: TypeName = "System.Reflection.MethodInfo" final val NPException: TypeName = "System.NullReferenceException" final val Object: TypeName = "System.Object" - final val String: TypeName = "System.String" final val Throwable: TypeName = "System.Exception" final val ValueType: TypeName = "System.ValueType" - final val ForName: TermName = newTermName("GetType") final val GetCause: TermName = newTermName("InnerException") /* System.Reflection.TargetInvocationException.InnerException */ final val GetClass: TermName = newTermName("GetType") final lazy val GetClassLoader: TermName = throw new UnsupportedOperationException("Scala reflection is not supported on this platform"); - final val GetComponentType: TermName = newTermName("GetElementType") final val GetMethod: TermName = newTermName("GetMethod") final val Invoke: TermName = newTermName("Invoke") final val JavaLang: TermName = newTermName("System") @@ -1223,13 +1085,7 @@ trait StdNames { ) } - private class J2SENames extends JavaNames { - final val BeanProperty: TypeName = "scala.beans.BeanProperty" - final val BooleanBeanProperty: TypeName = "scala.beans.BooleanBeanProperty" - final val JavaSerializable: TypeName = "java.io.Serializable" - } - lazy val sn: SymbolNames = if (forMSIL) new MSILNames - else new J2SENames + else new JavaNames { } } diff --git a/src/reflect/scala/reflect/internal/SymbolTable.scala b/src/reflect/scala/reflect/internal/SymbolTable.scala index fb1bf9ed9d..ed3d4d4f6a 100644 --- a/src/reflect/scala/reflect/internal/SymbolTable.scala +++ b/src/reflect/scala/reflect/internal/SymbolTable.scala @@ -183,9 +183,6 @@ abstract class SymbolTable extends macros.Universe /** The phase identifier of the given period. */ final def phaseId(period: Period): Phase#Id = period & 0xFF - /** The period at the start of run that includes `period`. */ - final def startRun(period: Period): Period = period & 0xFFFFFF00 - /** The current period. */ final def currentPeriod: Period = { //assert(per == (currentRunId << 8) + phase.id) @@ -298,7 +295,6 @@ abstract class SymbolTable extends macros.Universe object perRunCaches { import java.lang.ref.WeakReference - import scala.runtime.ScalaRunTime.stringOf import scala.collection.generic.Clearable // Weak references so the garbage collector will take care of diff --git a/src/reflect/scala/reflect/internal/Symbols.scala b/src/reflect/scala/reflect/internal/Symbols.scala index 2e806dd6b1..e1c55e3fe0 100644 --- a/src/reflect/scala/reflect/internal/Symbols.scala +++ b/src/reflect/scala/reflect/internal/Symbols.scala @@ -19,8 +19,6 @@ trait Symbols extends api.Symbols { self: SymbolTable => protected var ids = 0 - val emptySymbolArray = new Array[Symbol](0) - protected def nextId() = { ids += 1; ids } /** Used for deciding in the IDE whether we can interrupt the compiler */ @@ -256,9 +254,6 @@ trait Symbols extends api.Symbols { self: SymbolTable => (m, c) } - final def newPackageSymbol(name: TermName, pos: Position = NoPosition, newFlags: Long = 0L): ModuleSymbol = - newTermSymbol(name, pos, newFlags).asInstanceOf[ModuleSymbol] - final def newModuleClassSymbol(name: TypeName, pos: Position = NoPosition, newFlags: Long = 0L): ModuleClassSymbol = newClassSymbol(name, pos, newFlags).asInstanceOf[ModuleClassSymbol] @@ -321,11 +316,6 @@ trait Symbols extends api.Symbols { self: SymbolTable => () => { cnt += 1; nme.syntheticParamName(cnt) } } - /** Synthetic value parameters when parameter symbols are not available - */ - final def newSyntheticValueParamss(argtypess: List[List[Type]]): List[List[TermSymbol]] = - argtypess map (xs => newSyntheticValueParams(xs, freshNamer)) - /** Synthetic value parameters when parameter symbols are not available. * Calling this method multiple times will re-use the same parameter names. */ @@ -341,7 +331,6 @@ trait Symbols extends api.Symbols { self: SymbolTable => final def newSyntheticValueParam(argtype: Type, name: TermName = nme.syntheticParamName(1)): TermSymbol = newValueParameter(name, owner.pos.focus, SYNTHETIC) setInfo argtype - def newSyntheticTypeParam(): TypeSymbol = newSyntheticTypeParam("T0", 0L) def newSyntheticTypeParam(name: String, newFlags: Long): TypeSymbol = newTypeParameter(newTypeName(name), NoPosition, newFlags) setInfo TypeBounds.empty def newSyntheticTypeParams(num: Int): List[TypeSymbol] = (0 until num).toList map (n => newSyntheticTypeParam("T" + n, 0L)) @@ -405,14 +394,6 @@ trait Symbols extends api.Symbols { self: SymbolTable => final def newRefinementClass(pos: Position): RefinementClassSymbol = createRefinementClassSymbol(pos, 0L) - /** Create a new getter for current symbol (which must be a field) - */ - final def newGetter: MethodSymbol = ( - owner.newMethod(nme.getterName(name.toTermName), NoPosition, getterFlags(flags)) - setPrivateWithin privateWithin - setInfo MethodType(Nil, tpe) - ) - final def newErrorSymbol(name: Name): Symbol = name match { case x: TypeName => newErrorClass(x) case x: TermName => newErrorValue(x) @@ -528,14 +509,12 @@ trait Symbols extends api.Symbols { self: SymbolTable => */ def isContravariant = false def isCovariant = false - def isExistentialQuantified = false def isExistentialSkolem = false def isExistentiallyBound = false def isGADTSkolem = false def isTypeParameter = false def isTypeParameterOrSkolem = false def isTypeSkolem = false - def isTypeMacro = false def isInvariant = !isCovariant && !isContravariant /** Qualities of Terms, always false for TypeSymbols. @@ -719,14 +698,10 @@ trait Symbols extends api.Symbols { self: SymbolTable => = hasAnnotation(DeprecatedInheritanceAttr) def deprecatedInheritanceMessage = getAnnotation(DeprecatedInheritanceAttr) flatMap (_ stringArg 0) - def deprecatedInheritanceVersion - = getAnnotation(DeprecatedInheritanceAttr) flatMap (_ stringArg 1) def hasDeprecatedOverridingAnnotation = hasAnnotation(DeprecatedOverridingAttr) def deprecatedOverridingMessage = getAnnotation(DeprecatedOverridingAttr) flatMap (_ stringArg 0) - def deprecatedOverridingVersion - = getAnnotation(DeprecatedOverridingAttr) flatMap (_ stringArg 1) // !!! when annotation arguments are not literal strings, but any sort of // assembly of strings, there is a fair chance they will turn up here not as @@ -806,8 +781,6 @@ trait Symbols extends api.Symbols { self: SymbolTable => final def isStaticOwner: Boolean = isPackageClass || isModuleClass && isStatic - def isTopLevelModule = hasFlag(MODULE) && owner.isPackageClass - /** A helper function for isEffectivelyFinal. */ private def isNotOverridden = ( owner.isClass && ( @@ -844,12 +817,6 @@ trait Symbols extends api.Symbols { self: SymbolTable => */ def isLocalClass = false - def isStableClass = false - -/* code for fixing nested objects - override final def isModuleClass: Boolean = - super.isModuleClass && !isExpandedModuleClass -*/ /** Is this class or type defined as a structural refinement type? */ final def isStructuralRefinement: Boolean = @@ -868,10 +835,6 @@ trait Symbols extends api.Symbols { self: SymbolTable => final def isStructuralRefinementMember = owner.isStructuralRefinement && isPossibleInRefinement && isPublic final def isPossibleInRefinement = !isConstructor && !isOverridingSymbol - /** Is this symbol a member of class `clazz`? */ - def isMemberOf(clazz: Symbol) = - clazz.info.member(name).alternatives contains this - /** A a member of class `base` is incomplete if * (1) it is declared deferred or * (2) it is abstract override and its super symbol in `base` is @@ -969,6 +932,14 @@ trait Symbols extends api.Symbols { self: SymbolTable => def ownerChain: List[Symbol] = this :: owner.ownerChain def originalOwnerChain: List[Symbol] = this :: originalOwner.getOrElse(this, rawowner).originalOwnerChain + // All the symbols overridden by this symbol and this symbol at the head, + // or Nil if this is NoSymbol. + def overrideChain = ( + if (this eq NoSymbol) Nil + else if (!owner.isClass) this :: Nil + else this :: allOverriddenSymbols + ) + // Non-classes skip self and return rest of owner chain; overridden in ClassSymbol. def enclClassChain: List[Symbol] = owner.enclClassChain @@ -1078,9 +1049,6 @@ trait Symbols extends api.Symbols { self: SymbolTable => protected def createImplClassSymbol(name: TypeName, pos: Position, newFlags: Long): ClassSymbol = new ClassSymbol(this, pos, name) with ImplClassSymbol initFlags newFlags - protected def createTermSymbol(name: TermName, pos: Position, newFlags: Long): TermSymbol = - new TermSymbol(this, pos, name) initFlags newFlags - protected def createMethodSymbol(name: TermName, pos: Position, newFlags: Long): MethodSymbol = new MethodSymbol(this, pos, name) initFlags newFlags @@ -1442,14 +1410,6 @@ trait Symbols extends api.Symbols { self: SymbolTable => protected def needsInitialize(isFlagRelated: Boolean, mask: Long) = !isInitialized && (flags & LOCKED) == 0 && shouldTriggerCompleter(this, if (infos ne null) infos.info else null, isFlagRelated, mask) - /** Was symbol's type updated during given phase? */ - final def isUpdatedAt(pid: Phase#Id): Boolean = { - assert(isCompilerUniverse) - var infos = this.infos - while ((infos ne null) && phaseId(infos.validFrom) != pid + 1) infos = infos.prev - infos ne null - } - /** Was symbol's type updated during given phase? */ final def hasTypeAt(pid: Phase#Id): Boolean = { assert(isCompilerUniverse) @@ -1941,10 +1901,10 @@ trait Symbols extends api.Symbols { self: SymbolTable => (this.rawInfo ne NoType) && (this.effectiveOwner == that.effectiveOwner) && ( !this.effectiveOwner.isPackageClass - || (this.sourceFile eq null) - || (that.sourceFile eq null) - || (this.sourceFile.path == that.sourceFile.path) // Cheap possibly wrong check, then expensive normalization - || (this.sourceFile.canonicalPath == that.sourceFile.canonicalPath) + || (this.associatedFile eq null) + || (that.associatedFile eq null) + || (this.associatedFile.path == that.associatedFile.path) // Cheap possibly wrong check, then expensive normalization + || (this.associatedFile.canonicalPath == that.associatedFile.canonicalPath) ) ) @@ -2071,9 +2031,10 @@ trait Symbols extends api.Symbols { self: SymbolTable => if (isClassConstructor) NoSymbol else matchingSymbol(ofclazz, ofclazz.thisType) /** Returns all symbols overriden by this symbol. */ - final def allOverriddenSymbols: List[Symbol] = - if (!owner.isClass) Nil + final def allOverriddenSymbols: List[Symbol] = ( + if ((this eq NoSymbol) || !owner.isClass) Nil else owner.ancestors map overriddenSymbol filter (_ != NoSymbol) + ) /** Equivalent to allOverriddenSymbols.nonEmpty, but more efficient. */ // !!! When if ever will this answer differ from .isOverride? @@ -2084,7 +2045,7 @@ trait Symbols extends api.Symbols { self: SymbolTable => ) /** Equivalent to allOverriddenSymbols.head (or NoSymbol if no overrides) but more efficient. */ def nextOverriddenSymbol: Symbol = { - if (owner.isClass) owner.ancestors foreach { base => + if ((this ne NoSymbol) && owner.isClass) owner.ancestors foreach { base => val sym = overriddenSymbol(base) if (sym != NoSymbol) return sym @@ -2189,13 +2150,6 @@ trait Symbols extends api.Symbols { self: SymbolTable => case p :: _ => p case _ => NoSymbol } -/* code for fixing nested objects - def expandModuleClassName() { - name = newTypeName(name.toString + "$") - } - - def isExpandedModuleClass: Boolean = name(name.length - 1) == '$' -*/ /** Desire to re-use the field in ClassSymbol which stores the source * file to also store the classfile, but without changing the behavior @@ -2205,10 +2159,6 @@ trait Symbols extends api.Symbols { self: SymbolTable => private def sourceFileOnly(file: AbstractFile): AbstractFile = if ((file eq null) || (file.path endsWith ".class")) null else file - private def binaryFileOnly(file: AbstractFile): AbstractFile = - if ((file eq null) || !(file.path endsWith ".class")) null else file - - final def binaryFile: AbstractFile = binaryFileOnly(associatedFile) final def sourceFile: AbstractFile = sourceFileOnly(associatedFile) /** Overridden in ModuleSymbols to delegate to the module class. */ @@ -2232,9 +2182,6 @@ trait Symbols extends api.Symbols { self: SymbolTable => // ------ toString ------------------------------------------------------------------- - /** A tag which (in the ideal case) uniquely identifies class symbols */ - final def tag: Int = fullName.## - /** The simple name of this Symbol */ final def simpleName: Name = name @@ -2702,7 +2649,6 @@ trait Symbols extends api.Symbols { self: SymbolTable => final def asNameType(n: Name) = n.toTypeName override def isNonClassType = true - override def isTypeMacro = hasFlag(MACRO) override def resolveOverloadedFlag(flag: Long) = flag match { case TRAIT => "" // DEFAULTPARAM @@ -2720,7 +2666,6 @@ trait Symbols extends api.Symbols { self: SymbolTable => override def isAbstractType = this hasFlag DEFERRED override def isContravariant = this hasFlag CONTRAVARIANT override def isCovariant = this hasFlag COVARIANT - override def isExistentialQuantified = isExistentiallyBound && !isSkolem override def isExistentiallyBound = this hasFlag EXISTENTIAL override def isTypeParameter = isTypeParameterOrSkolem && !isSkolem override def isTypeParameterOrSkolem = this hasFlag PARAM @@ -2851,7 +2796,6 @@ trait Symbols extends api.Symbols { self: SymbolTable => override def isTypeSkolem = this hasFlag PARAM override def isAbstractType = this hasFlag DEFERRED - override def isExistentialQuantified = false override def existentialBound = if (isAbstractType) this.info else super.existentialBound /** If typeskolem comes from a type parameter, that parameter, otherwise skolem itself */ @@ -2937,21 +2881,6 @@ trait Symbols extends api.Symbols { self: SymbolTable => || isLocal || !owner.isPackageClass && owner.isLocalClass ) - override def isStableClass = (this hasFlag STABLE) || checkStable() - - private def checkStable() = { - def hasNoAbstractTypeMember(clazz: Symbol): Boolean = - (clazz hasFlag STABLE) || { - var e = clazz.info.decls.elems - while ((e ne null) && !(e.sym.isAbstractType && info.member(e.sym.name) == e.sym)) - e = e.next - e == null - } - (info.baseClasses forall hasNoAbstractTypeMember) && { - setFlag(STABLE) - true - } - } override def enclClassChain = this :: owner.enclClassChain diff --git a/src/reflect/scala/reflect/internal/TreeGen.scala b/src/reflect/scala/reflect/internal/TreeGen.scala index 6ce93d93b2..f30807a26c 100644 --- a/src/reflect/scala/reflect/internal/TreeGen.scala +++ b/src/reflect/scala/reflect/internal/TreeGen.scala @@ -12,9 +12,6 @@ abstract class TreeGen extends macros.TreeBuilder { def scalaDot(name: Name) = Select(Ident(nme.scala_) setSymbol ScalaPackage, name) def scalaAnnotationDot(name: Name) = Select(scalaDot(nme.annotation), name) def scalaAnyRefConstr = scalaDot(tpnme.AnyRef) setSymbol AnyRefClass - def scalaUnitConstr = scalaDot(tpnme.Unit) setSymbol UnitClass - def productConstr = scalaDot(tpnme.Product) setSymbol ProductRootClass - def serializableConstr = scalaDot(tpnme.Serializable) setSymbol SerializableClass def scalaFunctionConstr(argtpes: List[Tree], restpe: Tree, abstractFun: Boolean = false): Tree = { val cls = if (abstractFun) @@ -247,10 +244,6 @@ abstract class TreeGen extends macros.TreeBuilder { def mkClassOf(tp: Type): Tree = Literal(Constant(tp)) setType ConstantType(Constant(tp)) - /** Builds a list with given head and tail. */ - def mkNewCons(head: Tree, tail: Tree): Tree = - New(Apply(mkAttributedRef(ConsClass), List(head, tail))) - /** Builds a list with given head and tail. */ def mkNil: Tree = mkAttributedRef(NilModule) diff --git a/src/reflect/scala/reflect/internal/TreeInfo.scala b/src/reflect/scala/reflect/internal/TreeInfo.scala index 7ae7cf1821..3fcac16222 100644 --- a/src/reflect/scala/reflect/internal/TreeInfo.scala +++ b/src/reflect/scala/reflect/internal/TreeInfo.scala @@ -131,13 +131,6 @@ abstract class TreeInfo { ) } - - @deprecated("Use isExprSafeToInline instead", "2.10.0") - def isPureExpr(tree: Tree) = isExprSafeToInline(tree) - - def zipMethodParamsAndArgs(params: List[Symbol], args: List[Tree]): List[(Symbol, Tree)] = - mapMethodParamsAndArgs(params, args)((param, arg) => ((param, arg))) - def mapMethodParamsAndArgs[R](params: List[Symbol], args: List[Tree])(f: (Symbol, Tree) => R): List[R] = { val b = List.newBuilder[R] foreachMethodParamAndArg(params, args)((param, arg) => b += f(param, arg)) @@ -175,37 +168,6 @@ abstract class TreeInfo { true } - /** - * Selects the correct parameter list when there are nested applications. - * Given Apply(fn, args), args might correspond to any of fn.symbol's parameter - * lists. To choose the correct one before uncurry, we have to unwrap any - * applies: for instance Apply(fn @ Apply(Apply(_, _), _), args) implies args - * correspond to the third parameter list. - * - * The argument fn is the function part of the apply node being considered. - * - * Also accounts for varargs. - */ - private def applyMethodParameters(fn: Tree): List[Symbol] = { - val depth = applyDepth(fn) - // There could be applies which go beyond the parameter list(s), - // being applied to the result of the method call. - // !!! Note that this still doesn't seem correct, although it should - // be closer than what it replaced. - if (depth < fn.symbol.paramss.size) fn.symbol.paramss(depth) - else if (fn.symbol.paramss.isEmpty) Nil - else fn.symbol.paramss.last - } - - def zipMethodParamsAndArgs(t: Tree): List[(Symbol, Tree)] = t match { - case Apply(fn, args) => zipMethodParamsAndArgs(applyMethodParameters(fn), args) - case _ => Nil - } - def foreachMethodParamAndArg(t: Tree)(f: (Symbol, Tree) => Unit): Unit = t match { - case Apply(fn, args) => foreachMethodParamAndArg(applyMethodParameters(fn), args)(f) - case _ => - } - /** Is symbol potentially a getter of a variable? */ def mayBeVarGetter(sym: Symbol): Boolean = sym.info match { @@ -354,10 +316,6 @@ abstract class TreeInfo { case x: Ident => !x.isBackquoted && nme.isVariableName(x.name) case _ => false } - def isDeprecatedIdentifier(tree: Tree): Boolean = tree match { - case x: Ident => !x.isBackquoted && nme.isDeprecatedIdentifierName(x.name) - case _ => false - } /** The first constructor definitions in `stats` */ def firstConstructor(stats: List[Tree]): Tree = stats find { @@ -416,12 +374,6 @@ abstract class TreeInfo { /** Is name a left-associative operator? */ def isLeftAssoc(operator: Name) = operator.nonEmpty && (operator.endChar != ':') - /** Is tree a `this` node which belongs to `enclClass`? */ - def isSelf(tree: Tree, enclClass: Symbol): Boolean = tree match { - case This(_) => tree.symbol == enclClass - case _ => false - } - /** a Match(Typed(_, tpt), _) must be translated into a switch if isSwitchAnnotation(tpt.tpe) */ def isSwitchAnnotation(tpe: Type) = tpe hasAnnotation definitions.SwitchClass diff --git a/src/reflect/scala/reflect/internal/Trees.scala b/src/reflect/scala/reflect/internal/Trees.scala index ed08226ec7..50b07f0cc8 100644 --- a/src/reflect/scala/reflect/internal/Trees.scala +++ b/src/reflect/scala/reflect/internal/Trees.scala @@ -847,7 +847,6 @@ trait Trees extends api.Trees { self: SymbolTable => /** Is the tree Predef, scala.Predef, or _root_.scala.Predef? */ def isReferenceToPredef(t: Tree) = isReferenceToScalaMember(t, nme.Predef) - def isReferenceToAnyVal(t: Tree) = isReferenceToScalaMember(t, tpnme.AnyVal) // --- modifiers implementation --------------------------------------- diff --git a/src/reflect/scala/reflect/internal/TypeDebugging.scala b/src/reflect/scala/reflect/internal/TypeDebugging.scala index 68b4fa69a1..d437b1b058 100644 --- a/src/reflect/scala/reflect/internal/TypeDebugging.scala +++ b/src/reflect/scala/reflect/internal/TypeDebugging.scala @@ -9,8 +9,6 @@ package internal trait TypeDebugging { self: SymbolTable => - import definitions._ - // @M toString that is safe during debugging (does not normalize, ...) object typeDebug { private def to_s(x: Any): String = x match { @@ -20,7 +18,6 @@ trait TypeDebugging { case x: Product => x.productIterator mkString ("(", ", ", ")") case _ => "" + x } - def ptIndent(x: Any) = ("" + x).replaceAll("\\n", " ") def ptBlock(label: String, pairs: (String, Any)*): String = { if (pairs.isEmpty) label + "{ }" else { diff --git a/src/reflect/scala/reflect/internal/Types.scala b/src/reflect/scala/reflect/internal/Types.scala index 2f97b01ffa..579731f033 100644 --- a/src/reflect/scala/reflect/internal/Types.scala +++ b/src/reflect/scala/reflect/internal/Types.scala @@ -167,10 +167,6 @@ trait Types extends api.Types { self: SymbolTable => log = Nil } finally unlock() } - def size = { - lock() - try log.size finally unlock() - } // `block` should not affect constraints on typevars def undo[T](block: => T): T = { @@ -182,20 +178,6 @@ trait Types extends api.Types { self: SymbolTable => finally undoTo(before) } finally unlock() } - - // if `block` evaluates to false, it should not affect constraints on typevars - def undoUnless(block: => Boolean): Boolean = { - lock() - try { - val before = log - var result = false - - try result = block - finally if (!result) undoTo(before) - - result - } finally unlock() - } } /** A map from lists to compound types that have the given list as parents. @@ -292,7 +274,6 @@ trait Types extends api.Types { self: SymbolTable => abstract class TypeApiImpl extends TypeApi { this: Type => def declaration(name: Name): Symbol = decl(name) - def nonPrivateDeclaration(name: Name): Symbol = nonPrivateDecl(name) def declarations = decls def typeArguments = typeArgs def erasure = this match { @@ -379,9 +360,6 @@ trait Types extends api.Types { self: SymbolTable => /** Is this type produced as a repair for an error? */ def isErroneous: Boolean = ErroneousCollector.collect(this) - /** Does this type denote a reference type which can be null? */ - // def isNullable: Boolean = false - /** Can this type only be subtyped by bottom types? * This is assessed to be the case if the class is final, * and all type parameters (if any) are invariant. @@ -519,11 +497,6 @@ trait Types extends api.Types { self: SymbolTable => /** Only used for dependent method types. */ def resultApprox: Type = ApproximateDependentMap(resultType) - /** If this is a TypeRef `clazz`[`T`], return the argument `T` - * otherwise return this type - */ - def remove(clazz: Symbol): Type = this - /** For a curried/nullary method or poly type its non-method result type, * the type itself for all other types */ def finalResultType: Type = this @@ -661,16 +634,6 @@ trait Types extends api.Types { self: SymbolTable => def nonPrivateMember(name: Name): Symbol = memberBasedOnName(name, BridgeAndPrivateFlags) - /** All members with the given flags, excluding bridges. - */ - def membersWithFlags(requiredFlags: Long): Scope = - membersBasedOnFlags(BridgeFlags, requiredFlags) - - /** All non-private members with the given flags, excluding bridges. - */ - def nonPrivateMembersWithFlags(requiredFlags: Long): Scope = - membersBasedOnFlags(BridgeAndPrivateFlags, requiredFlags) - /** The non-private member with given name, admitting members with given flags `admit`. * "Admitting" refers to the fact that members with a PRIVATE, BRIDGE, or VBRIDGE * flag are usually excluded from findMember results, but supplying any of those flags @@ -691,7 +654,6 @@ trait Types extends api.Types { self: SymbolTable => */ def membersBasedOnFlags(excludedFlags: Long, requiredFlags: Long): Scope = findMembers(excludedFlags, requiredFlags) -// findMember(nme.ANYNAME, excludedFlags, requiredFlags, false).alternatives def memberBasedOnName(name: Name, excludedFlags: Long): Symbol = findMember(name, excludedFlags, 0, false) @@ -806,7 +768,6 @@ trait Types extends api.Types { self: SymbolTable => else substThis(from, to).substSym(symsFrom, symsTo) /** Returns all parts of this type which satisfy predicate `p` */ - def filter(p: Type => Boolean): List[Type] = new FilterTypeCollector(p) collect this def withFilter(p: Type => Boolean) = new FilterMapForeach(p) class FilterMapForeach(p: Type => Boolean) extends FilterTypeCollector(p){ @@ -836,9 +797,6 @@ trait Types extends api.Types { self: SymbolTable => /** Does this type contain a reference to this symbol? */ def contains(sym: Symbol): Boolean = new ContainsCollector(sym).collect(this) - /** Does this type contain a reference to this type */ - def containsTp(tp: Type): Boolean = new ContainsTypeCollector(tp).collect(this) - /** Is this type a subtype of that type? */ def <:<(that: Type): Boolean = { if (Statistics.canEnable) stat_<:<(that) @@ -899,11 +857,6 @@ trait Types extends api.Types { self: SymbolTable => else isSameType(this, that)) ); - /** Does this type implement symbol `sym` with same or stronger type? */ - def specializes(sym: Symbol): Boolean = - if (explainSwitch) explain("specializes", specializesSym, this, sym) - else specializesSym(this, sym) - /** Is this type close enough to that type so that members * with the two type would override each other? * This means: @@ -1241,10 +1194,6 @@ trait Types extends api.Types { self: SymbolTable => def setAnnotations(annots: List[AnnotationInfo]): Type = annotatedType(annots, this) def withAnnotations(annots: List[AnnotationInfo]): Type = annotatedType(annots, this) - /** Remove any annotations from this type and from any - * types embedded in this type. */ - def stripAnnotations = StripAnnotationsMap(this) - /** Set the self symbol of an annotated type, or do nothing * otherwise. */ def withSelfsym(sym: Symbol) = this @@ -1337,7 +1286,6 @@ trait Types extends api.Types { self: SymbolTable => override def baseType(clazz: Symbol): Type = this override def safeToString: String = "" override def narrow: Type = this - // override def isNullable: Boolean = true override def kind = "ErrorType" } @@ -1347,7 +1295,6 @@ trait Types extends api.Types { self: SymbolTable => case object WildcardType extends Type { override def isWildcard = true override def safeToString: String = "?" - // override def isNullable: Boolean = true override def kind = "WildcardType" } /** BoundedWildcardTypes, used only during type inference, are created in @@ -1372,7 +1319,6 @@ trait Types extends api.Types { self: SymbolTable => case object NoType extends Type { override def isTrivial: Boolean = true override def safeToString: String = "" - // override def isNullable: Boolean = true override def kind = "NoType" } @@ -1382,7 +1328,6 @@ trait Types extends api.Types { self: SymbolTable => override def isStable: Boolean = true override def prefixString = "" override def safeToString: String = "" - // override def isNullable: Boolean = true override def kind = "NoPrefixType" } @@ -1390,7 +1335,6 @@ trait Types extends api.Types { self: SymbolTable => */ abstract case class ThisType(sym: Symbol) extends SingletonType with ThisTypeApi { assert(sym.isClass, sym) - //assert(sym.isClass && !sym.isModuleClass || sym.isRoot, sym) override def isTrivial: Boolean = sym.isPackageClass override def isNotNull = true override def typeSymbol = sym @@ -1431,7 +1375,6 @@ trait Types extends api.Types { self: SymbolTable => } override def isGround = sym.isPackageClass || pre.isGround - // override def isNullable = underlying.isNullable override def isNotNull = underlying.isNotNull private[reflect] var underlyingCache: Type = NoType private[reflect] var underlyingPeriod = NoPeriod @@ -1533,7 +1476,6 @@ trait Types extends api.Types { self: SymbolTable => private def emptyUpperBound = typeIsAny(hi) || hi.isWildcard def isEmptyBounds = emptyLowerBound && emptyUpperBound - // override def isNullable: Boolean = NullClass.tpe <:< lo; override def safeToString = lowerString + upperString override def kind = "TypeBoundsType" } @@ -1618,9 +1560,6 @@ trait Types extends api.Types { self: SymbolTable => override def isStructuralRefinement: Boolean = typeSymbol.isAnonOrRefinementClass && (decls exists symbolIsPossibleInRefinement) - // override def isNullable: Boolean = - // parents forall (p => p.isNullable && !p.typeSymbol.isAbstractType); - override def safeToString: String = parentsString(parents) + ( (if (settings.debug.value || parents.isEmpty || (decls.elems ne null)) fullyInitializeScope(decls).mkString("{", "; ", "}") else "") @@ -2031,11 +1970,6 @@ trait Types extends api.Types { self: SymbolTable => change } - // override def isNullable: Boolean = - // symbol == AnyClass || - // symbol != NothingClass && (symbol isSubClass ObjectClass) && !(symbol isSubClass NonNullClass); - - // override def isNonNull: Boolean = symbol == NonNullClass || super.isNonNull; override def kind = "ClassInfoType" override def safeToString = @@ -2069,8 +2003,6 @@ trait Types extends api.Types { self: SymbolTable => override def deconst: Type = underlying override def safeToString: String = underlying.toString + "(" + value.escapedStringValue + ")" - // override def isNullable: Boolean = value.value eq null - // override def isNonNull: Boolean = value.value ne null override def kind = "ConstantType" } @@ -2332,7 +2264,6 @@ trait Types extends api.Types { self: SymbolTable => } override def isStable = bounds.hi.typeSymbol isSubClass SingletonClass override def bounds = thisInfo.bounds - // def transformInfo(tp: Type): Type = appliedType(tp.asSeenFrom(pre, sym.owner), typeArgsOrDummies) override protected[Types] def baseTypeSeqImpl: BaseTypeSeq = transform(bounds.hi).baseTypeSeq prepend this override def kind = "AbstractTypeRef" } @@ -2885,21 +2816,9 @@ trait Types extends api.Types { self: SymbolTable => override def safeToString = pre.toString + targs.mkString("(with type arguments ", ", ", ")"); override def memberType(sym: Symbol) = appliedType(pre.memberType(sym), targs) -// override def memberType(sym: Symbol) = pre.memberType(sym) match { -// case PolyType(tparams, restp) => -// restp.subst(tparams, targs) -// /* I don't think this is needed, as existential types close only over value types -// case ExistentialType(tparams, qtpe) => -// existentialAbstraction(tparams, qtpe.memberType(sym)) -// */ -// case ErrorType => -// ErrorType -// } override def kind = "AntiPolyType" } - //private var tidCount = 0 //DEBUG - object HasTypeMember { def apply(name: TypeName, tp: Type): Type = { val bound = refinedType(List(WildcardType), NoSymbol) @@ -2914,16 +2833,6 @@ trait Types extends api.Types { self: SymbolTable => } } - // Not used yet. - object HasTypeParams { - def unapply(tp: Type): Option[(List[Symbol], Type)] = tp match { - case AnnotatedType(_, tp, _) => unapply(tp) - case ExistentialType(tparams, qtpe) => Some((tparams, qtpe)) - case PolyType(tparams, restpe) => Some((tparams, restpe)) - case _ => None - } - } - //@M // a TypeVar used to be a case class with only an origin and a constr // then, constr became mutable (to support UndoLog, I guess), @@ -3017,7 +2926,6 @@ trait Types extends api.Types { self: SymbolTable => require(params.nonEmpty, this) override def isHigherKinded = true - override protected def typeVarString = params.map(_.name).mkString("[", ", ", "]=>" + originName) } /** Precondition: zipped params/args nonEmpty. (Size equivalence enforced structurally.) @@ -3032,10 +2940,6 @@ trait Types extends api.Types { self: SymbolTable => override def params: List[Symbol] = zippedArgs map (_._1) override def typeArgs: List[Type] = zippedArgs map (_._2) - - override protected def typeVarString = ( - zippedArgs map { case (p, a) => p.name + "=" + a } mkString (origin + "[", ", ", "]") - ) } trait UntouchableTypeVar extends TypeVar { @@ -3079,7 +2983,6 @@ trait Types extends api.Types { self: SymbolTable => * in operations that are exposed from types. Hence, no syncing of `constr` * or `encounteredHigherLevel` or `suspended` accesses should be necessary. */ -// var constr = constr0 def instValid = constr.instValid override def isGround = instValid && constr.inst.isGround @@ -3355,7 +3258,6 @@ trait Types extends api.Types { self: SymbolTable => ).flatten map (s => s.decodedName + tparamsOfSym(s)) mkString "#" } private def levelString = if (settings.explaintypes.value) level else "" - protected def typeVarString = originName override def safeToString = ( if ((constr eq null) || (constr.inst eq null)) "TVar<" + originName + "=null>" else if (constr.inst ne NoType) "=?" + constr.inst @@ -3516,11 +3418,6 @@ trait Types extends api.Types { self: SymbolTable => (if (typeParams.isEmpty) "" else typeParamsString(this)) + super.safeToString } - // def mkLazyType(tparams: Symbol*)(f: Symbol => Unit): LazyType = ( - // if (tparams.isEmpty) new LazyType { override def complete(sym: Symbol) = f(sym) } - // else new LazyPolyType(tparams.toList) { override def complete(sym: Symbol) = f(sym) } - // ) - // Creators --------------------------------------------------------------- /** Rebind symbol `sym` to an overriding member in type `pre`. */ @@ -3702,25 +3599,6 @@ trait Types extends api.Types { self: SymbolTable => def appliedType(tyconSym: Symbol, args: Type*): Type = appliedType(tyconSym.typeConstructor, args.toList) - /** A creator for existential types where the type arguments, - * rather than being applied directly, are interpreted as the - * upper bounds of unknown types. For instance if the type argument - * list given is List(AnyRefClass), the resulting type would be - * e.g. Set[_ <: AnyRef] rather than Set[AnyRef] . - */ - def appliedTypeAsUpperBounds(tycon: Type, args: List[Type]): Type = { - tycon match { - case TypeRef(pre, sym, _) if sameLength(sym.typeParams, args) => - val eparams = typeParamsToExistentials(sym) - val bounds = args map (TypeBounds upper _) - foreach2(eparams, bounds)(_ setInfo _) - - newExistentialType(eparams, typeRef(pre, sym, eparams map (_.tpe))) - case _ => - appliedType(tycon, args) - } - } - /** A creator and extractor for type parameterizations that strips empty type parameter lists. * Use this factory method to indicate the type has kind * (it's a polymorphic value) * until we start tracking explicit kinds equivalent to typeFun (except that the latter requires tparams nonEmpty). @@ -3822,18 +3700,6 @@ trait Types extends api.Types { self: SymbolTable => } } - /** Substitutes the empty scope for any non-empty decls in the type. */ - object dropAllRefinements extends TypeMap { - def apply(tp: Type): Type = tp match { - case rt @ RefinedType(parents, decls) if !decls.isEmpty => - mapOver(copyRefinedType(rt, parents, EmptyScope)) - case ClassInfoType(parents, decls, clazz) if !decls.isEmpty => - mapOver(ClassInfoType(parents, EmptyScope, clazz)) - case _ => - mapOver(tp) - } - } - /** Type with all top-level occurrences of abstract types replaced by their bounds */ def abstractTypesToBounds(tp: Type): Type = tp match { // @M don't normalize here (compiler loops on pos/bug1090.scala ) case TypeRef(_, sym, _) if sym.isAbstractType => @@ -4165,8 +4031,6 @@ trait Types extends api.Types { self: SymbolTable => case rtp @ RefinedType(parents, decls) => val parents1 = parents mapConserve this val decls1 = mapOver(decls) - //if ((parents1 eq parents) && (decls1 eq decls)) tp - //else refinementOfClass(tp.typeSymbol, parents1, decls1) copyRefinedType(rtp, parents1, decls1) case ExistentialType(tparams, result) => val tparams1 = mapOver(tparams) @@ -4301,21 +4165,6 @@ trait Types extends api.Types { self: SymbolTable => } } - /** A collector that tests for existential types appearing at given variance in a type - * @PP: Commenting out due to not being used anywhere. - */ - // class ContainsVariantExistentialCollector(v: Int) extends TypeCollector(false) with VariantTypeMap { - // variance = v - // - // def traverse(tp: Type) = tp match { - // case ExistentialType(_, _) if (variance == v) => result = true - // case _ => mapOver(tp) - // } - // } - // - // val containsCovariantExistentialCollector = new ContainsVariantExistentialCollector(1) - // val containsContravariantExistentialCollector = new ContainsVariantExistentialCollector(-1) - def typeParamsToExistentials(clazz: Symbol, tparams: List[Symbol]): List[Symbol] = { val eparams = mapWithIndex(tparams)((tparam, i) => clazz.newExistential(newTypeName("?"+i), clazz.pos) setInfo tparam.info.bounds) @@ -4849,15 +4698,6 @@ trait Types extends api.Types { self: SymbolTable => } } - object StripAnnotationsMap extends TypeMap { - def apply(tp: Type): Type = tp match { - case AnnotatedType(_, atp, _) => - mapOver(atp) - case tp => - mapOver(tp) - } - } - /** A map to convert every occurrence of a wildcard type to a fresh * type variable */ object wildcardToTypeVarMap extends TypeMap { @@ -4919,8 +4759,6 @@ trait Types extends api.Types { self: SymbolTable => /** A map to implement the `filter` method. */ class FilterTypeCollector(p: Type => Boolean) extends TypeCollector[List[Type]](Nil) { - def withFilter(q: Type => Boolean) = new FilterTypeCollector(tp => p(tp) && q(tp)) - override def collect(tp: Type) = super.collect(tp).reverse def traverse(tp: Type) { @@ -5381,108 +5219,7 @@ trait Types extends api.Types { self: SymbolTable => case _ => tp.normalize } */ -/* - private def isSameType0(tp1: Type, tp2: Type): Boolean = { - if (tp1 eq tp2) return true - ((tp1, tp2) match { - case (ErrorType, _) => true - case (WildcardType, _) => true - case (_, ErrorType) => true - case (_, WildcardType) => true - - case (NoType, _) => false - case (NoPrefix, _) => tp2.typeSymbol.isPackageClass - case (_, NoType) => false - case (_, NoPrefix) => tp1.typeSymbol.isPackageClass - - case (ThisType(sym1), ThisType(sym2)) - if (sym1 == sym2) => - true - case (SingleType(pre1, sym1), SingleType(pre2, sym2)) - if (equalSymsAndPrefixes(sym1, pre1, sym2, pre2)) => - true -/* - case (SingleType(pre1, sym1), ThisType(sym2)) - if (sym1.isModule && - sym1.moduleClass == sym2 && - pre1 =:= sym2.owner.thisType) => - true - case (ThisType(sym1), SingleType(pre2, sym2)) - if (sym2.isModule && - sym2.moduleClass == sym1 && - pre2 =:= sym1.owner.thisType) => - true -*/ - case (ConstantType(value1), ConstantType(value2)) => - value1 == value2 - case (TypeRef(pre1, sym1, args1), TypeRef(pre2, sym2, args2)) => - equalSymsAndPrefixes(sym1, pre1, sym2, pre2) && - ((tp1.isHigherKinded && tp2.isHigherKinded && tp1.normalize =:= tp2.normalize) || - isSameTypes(args1, args2)) - // @M! normalize reduces higher-kinded case to PolyType's - case (RefinedType(parents1, ref1), RefinedType(parents2, ref2)) => - def isSubScope(s1: Scope, s2: Scope): Boolean = s2.toList.forall { - sym2 => - var e1 = s1.lookupEntry(sym2.name) - (e1 ne null) && { - val substSym = sym2.info.substThis(sym2.owner, e1.sym.owner.thisType) - var isEqual = false - while (!isEqual && (e1 ne null)) { - isEqual = e1.sym.info =:= substSym - e1 = s1.lookupNextEntry(e1) - } - isEqual - } - } - //Console.println("is same? " + tp1 + " " + tp2 + " " + tp1.typeSymbol.owner + " " + tp2.typeSymbol.owner)//DEBUG - isSameTypes(parents1, parents2) && isSubScope(ref1, ref2) && isSubScope(ref2, ref1) - case (MethodType(params1, res1), MethodType(params2, res2)) => - // new dependent types: probably fix this, use substSym as done for PolyType - (isSameTypes(tp1.paramTypes, tp2.paramTypes) && - res1 =:= res2 && - tp1.isImplicit == tp2.isImplicit) - case (PolyType(tparams1, res1), PolyType(tparams2, res2)) => - // assert((tparams1 map (_.typeParams.length)) == (tparams2 map (_.typeParams.length))) - (tparams1.length == tparams2.length) && (tparams1 corresponds tparams2)(_.info =:= _.info.substSym(tparams2, tparams1)) && // @M looks like it might suffer from same problem as #2210 - res1 =:= res2.substSym(tparams2, tparams1) - case (ExistentialType(tparams1, res1), ExistentialType(tparams2, res2)) => - (tparams1.length == tparams2.length) && (tparams1 corresponds tparams2)(_.info =:= _.info.substSym(tparams2, tparams1)) && // @M looks like it might suffer from same problem as #2210 - res1 =:= res2.substSym(tparams2, tparams1) - case (TypeBounds(lo1, hi1), TypeBounds(lo2, hi2)) => - lo1 =:= lo2 && hi1 =:= hi2 - case (BoundedWildcardType(bounds), _) => - bounds containsType tp2 - case (_, BoundedWildcardType(bounds)) => - bounds containsType tp1 - case (tv @ TypeVar(_,_), tp) => - tv.registerTypeEquality(tp, true) - case (tp, tv @ TypeVar(_,_)) => - tv.registerTypeEquality(tp, false) - case (AnnotatedType(_,_,_), _) => - annotationsConform(tp1, tp2) && annotationsConform(tp2, tp1) && tp1.withoutAnnotations =:= tp2.withoutAnnotations - case (_, AnnotatedType(_,_,_)) => - annotationsConform(tp1, tp2) && annotationsConform(tp2, tp1) && tp1.withoutAnnotations =:= tp2.withoutAnnotations - case (_: SingletonType, _: SingletonType) => - var origin1 = tp1 - while (origin1.underlying.isInstanceOf[SingletonType]) { - assert(origin1 ne origin1.underlying, origin1) - origin1 = origin1.underlying - } - var origin2 = tp2 - while (origin2.underlying.isInstanceOf[SingletonType]) { - assert(origin2 ne origin2.underlying, origin2) - origin2 = origin2.underlying - } - ((origin1 ne tp1) || (origin2 ne tp2)) && (origin1 =:= origin2) - case _ => - false - }) || { - val tp1n = normalizePlus(tp1) - val tp2n = normalizePlus(tp2) - ((tp1n ne tp1) || (tp2n ne tp2)) && isSameType(tp1n, tp2n) - } - } -*/ + private def isSameType1(tp1: Type, tp2: Type): Boolean = { if ((tp1 eq tp2) || (tp1 eq ErrorType) || (tp1 eq WildcardType) || @@ -5755,18 +5492,6 @@ trait Types extends api.Types { self: SymbolTable => false } - @deprecated("The compiler doesn't use this so you shouldn't either - it will be removed", "2.10.0") - def instTypeVar(tp: Type): Type = tp match { - case TypeRef(pre, sym, args) => - copyTypeRef(tp, instTypeVar(pre), sym, args) - case SingleType(pre, sym) => - singleType(instTypeVar(pre), sym) - case TypeVar(_, constr) => - instTypeVar(constr.inst) - case _ => - tp - } - def isErrorOrWildcard(tp: Type) = (tp eq ErrorType) || (tp eq WildcardType) def isSingleType(tp: Type) = tp match { @@ -5862,7 +5587,7 @@ trait Types extends api.Types { self: SymbolTable => * useful as documentation; it is likely that !isNonValueType(tp) * will serve better than isValueType(tp). */ - def isValueType(tp: Type) = isValueElseNonValue(tp) + /** def isValueType(tp: Type) = isValueElseNonValue(tp) */ /** SLS 3.3, Non-Value Types * Is the given type definitely a non-value type, as defined in SLS 3.3? @@ -5873,7 +5598,7 @@ trait Types extends api.Types { self: SymbolTable => * not designated non-value types because there is code which depends on using * them as type arguments, but their precise status is unclear. */ - def isNonValueType(tp: Type) = !isValueElseNonValue(tp) + /** def isNonValueType(tp: Type) = !isValueElseNonValue(tp) */ def isNonRefinementClassType(tpe: Type) = tpe match { case SingleType(_, sym) => sym.isModuleClass @@ -5922,8 +5647,6 @@ trait Types extends api.Types { self: SymbolTable => corresponds3(tps1, tps2, tparams map (_.variance))(isSubArg) } - def differentOrNone(tp1: Type, tp2: Type) = if (tp1 eq tp2) NoType else tp1 - /** Does type `tp1` conform to `tp2`? */ private def isSubType2(tp1: Type, tp2: Type, depth: Int): Boolean = { if ((tp1 eq tp2) || isErrorOrWildcard(tp1) || isErrorOrWildcard(tp2)) return true @@ -6127,18 +5850,6 @@ trait Types extends api.Types { self: SymbolTable => !(sym isNonBottomSubClass AnyValClass) && !(sym isNonBottomSubClass NotNullClass) - /** Are `tps1` and `tps2` lists of equal length such that all elements - * of `tps1` conform to corresponding elements of `tps2`? - */ - def isSubTypes(tps1: List[Type], tps2: List[Type]): Boolean = (tps1 corresponds tps2)(_ <:< _) - - /** Does type `tp` implement symbol `sym` with same or - * stronger type? Exact only if `sym` is a member of some - * refinement type, otherwise we might return false negatives. - */ - def specializesSym(tp: Type, sym: Symbol): Boolean = - specializesSym(tp, sym, AnyDepth) - def specializesSym(tp: Type, sym: Symbol, depth: Int): Boolean = tp.typeSymbol == NothingClass || tp.typeSymbol == NullClass && containsNull(sym.owner) || { @@ -6580,10 +6291,6 @@ trait Types extends api.Types { self: SymbolTable => case _ => t } - def elimRefinement(t: Type) = t match { - case RefinedType(parents, decls) if !decls.isEmpty => intersectionType(parents) - case _ => t - } /** Eliminate from list of types all elements which are a subtype * of some other element of the list. */ @@ -6628,28 +6335,12 @@ trait Types extends api.Types { self: SymbolTable => (annotationsLub(lub(ts map (_.withoutAnnotations)), ts), true) else (lub(ts), false) - def weakGlb(ts: List[Type]) = { - if (ts.nonEmpty && (ts forall isNumericValueType)) { - val nglb = numericGlb(ts) - if (nglb != NoType) (nglb, true) - else (glb(ts), false) - } else if (ts exists typeHasAnnotations) { - (annotationsGlb(glb(ts map (_.withoutAnnotations)), ts), true) - } else (glb(ts), false) - } - def numericLub(ts: List[Type]) = ts reduceLeft ((t1, t2) => if (isNumericSubType(t1, t2)) t2 else if (isNumericSubType(t2, t1)) t1 else IntClass.tpe) - def numericGlb(ts: List[Type]) = - ts reduceLeft ((t1, t2) => - if (isNumericSubType(t1, t2)) t1 - else if (isNumericSubType(t2, t1)) t2 - else NoType) - def isWeakSubType(tp1: Type, tp2: Type) = tp1.deconst.normalize match { case TypeRef(_, sym1, _) if isNumericValueClass(sym1) => @@ -7011,9 +6702,6 @@ trait Types extends api.Types { self: SymbolTable => // Without this, the matchesType call would lead to type variables on both // sides of a subtyping/equality judgement, which can lead to recursive types // being constructed. See pos/t0851 for a situation where this happens. - def suspendingTypeVarsInType[T](tp: Type)(op: => T): T = - suspendingTypeVars(typeVarsInType(tp))(op) - @inline final def suspendingTypeVars[T](tvs: List[TypeVar])(op: => T): T = { val saved = tvs map (_.suspended) tvs foreach (_.suspended = true) @@ -7251,8 +6939,7 @@ trait Types extends api.Types { self: SymbolTable => /** Members which can be imported into other scopes. */ - def importableMembers(clazz: Symbol): Scope = importableMembers(clazz.info) - def importableMembers(pre: Type): Scope = pre.members filter isImportable + def importableMembers(pre: Type): Scope = pre.members filter isImportable def objToAny(tp: Type): Type = if (!phase.erasedTypes && tp.typeSymbol == ObjectClass) AnyClass.tpe @@ -7347,7 +7034,6 @@ trait Types extends api.Types { self: SymbolTable => object TypesStats { import BaseTypeSeqsStats._ val rawTypeCount = Statistics.newCounter ("#raw type creations") - val asSeenFromCount = Statistics.newCounter ("#asSeenFrom ops") val subtypeCount = Statistics.newCounter ("#subtype ops") val sametypeCount = Statistics.newCounter ("#sametype ops") val lubCount = Statistics.newCounter ("#toplevel lubs/glbs") diff --git a/src/reflect/scala/reflect/internal/pickling/PickleBuffer.scala b/src/reflect/scala/reflect/internal/pickling/PickleBuffer.scala index e8ee202978..34c6fe234c 100644 --- a/src/reflect/scala/reflect/internal/pickling/PickleBuffer.scala +++ b/src/reflect/scala/reflect/internal/pickling/PickleBuffer.scala @@ -91,9 +91,6 @@ class PickleBuffer(data: Array[Byte], from: Int, to: Int) { // -- Basic input routines -------------------------------------------- - /** Peek at the current byte without moving the read index */ - def peekByte(): Int = bytes(readIndex) - /** Read a byte */ def readByte(): Int = { val x = bytes(readIndex); readIndex += 1; x diff --git a/src/reflect/scala/reflect/internal/pickling/PickleFormat.scala b/src/reflect/scala/reflect/internal/pickling/PickleFormat.scala index 94b2f77ff9..3722c77aa2 100644 --- a/src/reflect/scala/reflect/internal/pickling/PickleFormat.scala +++ b/src/reflect/scala/reflect/internal/pickling/PickleFormat.scala @@ -115,7 +115,6 @@ object PickleFormat { */ val MajorVersion = 5 val MinorVersion = 0 - def VersionString = "V" + MajorVersion + "." + MinorVersion final val TERMname = 1 final val TYPEname = 2 diff --git a/src/reflect/scala/reflect/internal/pickling/UnPickler.scala b/src/reflect/scala/reflect/internal/pickling/UnPickler.scala index 551ba4ee5c..2dab9b37b4 100644 --- a/src/reflect/scala/reflect/internal/pickling/UnPickler.scala +++ b/src/reflect/scala/reflect/internal/pickling/UnPickler.scala @@ -186,8 +186,6 @@ abstract class UnPickler /*extends scala.reflect.generic.UnPickler*/ { case _ => errorBadSignature("bad name tag: " + tag) } } - protected def readTermName(): TermName = readName().toTermName - protected def readTypeName(): TypeName = readName().toTypeName private def readEnd() = readNat() + readIndex /** Read a symbol */ @@ -793,7 +791,6 @@ abstract class UnPickler /*extends scala.reflect.generic.UnPickler*/ { protected def readTreeRef(): Tree = at(readNat(), readTree) protected def readTypeNameRef(): TypeName = readNameRef().toTypeName - protected def readTermNameRef(): TermName = readNameRef().toTermName protected def readTemplateRef(): Template = readTreeRef() match { @@ -829,11 +826,6 @@ abstract class UnPickler /*extends scala.reflect.generic.UnPickler*/ { protected def errorBadSignature(msg: String) = throw new RuntimeException("malformed Scala signature of " + classRoot.name + " at " + readIndex + "; " + msg) - protected def errorMissingRequirement(name: Name, owner: Symbol): Symbol = - mirrorThatLoaded(owner).missingHook(owner, name) orElse MissingRequirementError.signal( - s"bad reference while unpickling $filename: ${name.longString} not found in ${owner.tpe.widen}" - ) - def inferMethodAlternative(fun: Tree, argtpes: List[Type], restpe: Type) {} // can't do it; need a compiler for that. def newLazyTypeRef(i: Int): LazyType = new LazyTypeRef(i) @@ -845,7 +837,6 @@ abstract class UnPickler /*extends scala.reflect.generic.UnPickler*/ { * error reporting, so we rely on the typechecker to report the error). */ def toTypeError(e: MissingRequirementError) = { - // e.printStackTrace() new TypeError(e.msg) } diff --git a/src/reflect/scala/reflect/internal/util/Collections.scala b/src/reflect/scala/reflect/internal/util/Collections.scala index 2ba15e0776..0d644aa73e 100644 --- a/src/reflect/scala/reflect/internal/util/Collections.scala +++ b/src/reflect/scala/reflect/internal/util/Collections.scala @@ -40,8 +40,6 @@ trait Collections { mforeach(xss)(x => if ((res eq null) && p(x)) res = Some(x)) if (res eq null) None else res } - final def mfilter[A](xss: List[List[A]])(p: A => Boolean) = - for (xs <- xss; x <- xs; if p(x)) yield x final def map2[A, B, C](xs1: List[A], xs2: List[B])(f: (A, B) => C): List[C] = { val lb = new ListBuffer[C] @@ -78,19 +76,6 @@ trait Collections { lb.toList } - final def distinctBy[A, B](xs: List[A])(f: A => B): List[A] = { - val buf = new ListBuffer[A] - val seen = mutable.Set[B]() - xs foreach { x => - val y = f(x) - if (!seen(y)) { - buf += x - seen += y - } - } - buf.toList - } - @tailrec final def flattensToEmpty(xss: Seq[Seq[_]]): Boolean = { xss.isEmpty || xss.head.isEmpty && flattensToEmpty(xss.tail) } @@ -189,18 +174,6 @@ trait Collections { } false } - final def forall2[A, B](xs1: List[A], xs2: List[B])(f: (A, B) => Boolean): Boolean = { - var ys1 = xs1 - var ys2 = xs2 - while (!ys1.isEmpty && !ys2.isEmpty) { - if (!f(ys1.head, ys2.head)) - return false - - ys1 = ys1.tail - ys2 = ys2.tail - } - true - } final def forall3[A, B, C](xs1: List[A], xs2: List[B], xs3: List[C])(f: (A, B, C) => Boolean): Boolean = { var ys1 = xs1 var ys2 = xs2 @@ -222,6 +195,3 @@ trait Collections { case _: IllegalArgumentException => None } } - -object Collections extends Collections { } - diff --git a/src/reflect/scala/reflect/internal/util/HashSet.scala b/src/reflect/scala/reflect/internal/util/HashSet.scala index 4135f3c469..74b6a54c6e 100644 --- a/src/reflect/scala/reflect/internal/util/HashSet.scala +++ b/src/reflect/scala/reflect/internal/util/HashSet.scala @@ -6,8 +6,6 @@ package scala.reflect.internal.util object HashSet { - def apply[T >: Null <: AnyRef](): HashSet[T] = this(16) - def apply[T >: Null <: AnyRef](label: String): HashSet[T] = this(label, 16) def apply[T >: Null <: AnyRef](initialCapacity: Int): HashSet[T] = this("No Label", initialCapacity) def apply[T >: Null <: AnyRef](label: String, initialCapacity: Int): HashSet[T] = new HashSet[T](label, initialCapacity) diff --git a/src/reflect/scala/reflect/internal/util/Origins.scala b/src/reflect/scala/reflect/internal/util/Origins.scala index 3259a12163..a2b9e24ebc 100644 --- a/src/reflect/scala/reflect/internal/util/Origins.scala +++ b/src/reflect/scala/reflect/internal/util/Origins.scala @@ -6,9 +6,7 @@ package scala.reflect package internal.util -import NameTransformer._ import scala.collection.{ mutable, immutable } -import Origins._ /** A debugging class for logging from whence a method is being called. * Say you wanted to discover who was calling phase_= in SymbolTable. diff --git a/src/reflect/scala/reflect/internal/util/Position.scala b/src/reflect/scala/reflect/internal/util/Position.scala index 0725e9775b..bbc95feaab 100644 --- a/src/reflect/scala/reflect/internal/util/Position.scala +++ b/src/reflect/scala/reflect/internal/util/Position.scala @@ -128,7 +128,7 @@ abstract class Position extends scala.reflect.api.Position { self => def endOrPoint: Int = point @deprecated("use point instead", "2.9.0") - def offset: Option[Int] = if (isDefined) Some(point) else None + def offset: Option[Int] = if (isDefined) Some(point) else None // used by sbt /** The same position with a different start value (if a range) */ def withStart(off: Int): Position = this diff --git a/src/reflect/scala/reflect/internal/util/SourceFile.scala b/src/reflect/scala/reflect/internal/util/SourceFile.scala index bc2d0ee4db..dd2a6e21f1 100644 --- a/src/reflect/scala/reflect/internal/util/SourceFile.scala +++ b/src/reflect/scala/reflect/internal/util/SourceFile.scala @@ -24,7 +24,6 @@ abstract class SourceFile { assert(offset < length, file + ": " + offset + " >= " + length) new OffsetPosition(this, offset) } - def position(line: Int, column: Int) : Position = new OffsetPosition(this, lineToOffset(line) + column) def offsetToLine(offset: Int): Int def lineToOffset(index : Int): Int @@ -37,9 +36,6 @@ abstract class SourceFile { def dbg(offset: Int) = (new OffsetPosition(this, offset)).dbgString def path = file.path - def beginsWith(offset: Int, text: String): Boolean = - (content drop offset) startsWith text - def lineToString(index: Int): String = content drop lineToOffset(index) takeWhile (c => !isLineBreakChar(c.toChar)) mkString "" @@ -81,7 +77,6 @@ object ScriptSourceFile { } else 0 } - def stripHeader(cs: Array[Char]): Array[Char] = cs drop headerLength(cs) def apply(file: AbstractFile, content: Array[Char]) = { val underlying = new BatchSourceFile(file, content) @@ -91,7 +86,6 @@ object ScriptSourceFile { stripped } } -import ScriptSourceFile._ class ScriptSourceFile(underlying: BatchSourceFile, content: Array[Char], override val start: Int) extends BatchSourceFile(underlying.file, content) { override def isSelfContained = false diff --git a/src/reflect/scala/reflect/internal/util/StringOps.scala b/src/reflect/scala/reflect/internal/util/StringOps.scala index 5645eb4889..93bbfdd273 100644 --- a/src/reflect/scala/reflect/internal/util/StringOps.scala +++ b/src/reflect/scala/reflect/internal/util/StringOps.scala @@ -6,7 +6,6 @@ ** |/ ** \* */ - package scala.reflect.internal.util /** This object provides utility methods to extract elements @@ -16,24 +15,8 @@ package scala.reflect.internal.util * @version 1.0 */ trait StringOps { - def onull(s: String) = if (s == null) "" else s - def oempty(xs: String*) = xs filterNot (x => x == null || x == "") - def ojoin(xs: String*): String = oempty(xs: _*) mkString " " - def ojoin(xs: Seq[String], sep: String): String = oempty(xs: _*) mkString sep - def ojoinOr(xs: Seq[String], sep: String, orElse: String) = { - val ys = oempty(xs: _*) - if (ys.isEmpty) orElse else ys mkString sep - } - def trimTrailingSpace(s: String) = { - if (s.length == 0 || !s.charAt(s.length - 1).isWhitespace) s - else { - var idx = s.length - 1 - while (idx >= 0 && s.charAt(idx).isWhitespace) - idx -= 1 - - s.substring(0, idx + 1) - } - } + def oempty(xs: String*) = xs filterNot (x => x == null || x == "") + def ojoin(xs: String*): String = oempty(xs: _*) mkString " " def longestCommonPrefix(xs: List[String]): String = { if (xs.isEmpty || xs.contains("")) "" else xs.head.head match { @@ -57,14 +40,6 @@ trait StringOps { def words(str: String): List[String] = decompose(str, ' ') - def stripPrefixOpt(str: String, prefix: String): Option[String] = - if (str startsWith prefix) Some(str drop prefix.length) - else None - - def stripSuffixOpt(str: String, suffix: String): Option[String] = - if (str endsWith suffix) Some(str dropRight suffix.length) - else None - def splitWhere(str: String, f: Char => Boolean, doDropIndex: Boolean = false): Option[(String, String)] = splitAt(str, str indexWhere f, doDropIndex) diff --git a/src/reflect/scala/reflect/internal/util/TableDef.scala b/src/reflect/scala/reflect/internal/util/TableDef.scala index 8e2bcc2ff7..04ecfe8d76 100644 --- a/src/reflect/scala/reflect/internal/util/TableDef.scala +++ b/src/reflect/scala/reflect/internal/util/TableDef.scala @@ -67,12 +67,6 @@ class TableDef[T](_cols: Column[T]*) { override def toString = allToSeq mkString "\n" } - def formatterFor(rows: Seq[T]): T => String = { - val formatStr = new Table(rows).rowFormat - - x => formatStr.format(colApply(x) : _*) - } - def table(rows: Seq[T]) = new Table(rows) override def toString = cols.mkString("TableDef(", ", ", ")") diff --git a/src/reflect/scala/reflect/internal/util/TraceSymbolActivity.scala b/src/reflect/scala/reflect/internal/util/TraceSymbolActivity.scala index 7ea8a75417..632890d600 100644 --- a/src/reflect/scala/reflect/internal/util/TraceSymbolActivity.scala +++ b/src/reflect/scala/reflect/internal/util/TraceSymbolActivity.scala @@ -12,12 +12,9 @@ trait TraceSymbolActivity { if (enabled && global.isCompilerUniverse) scala.sys addShutdownHook showAllSymbols() - private type Set[T] = scala.collection.immutable.Set[T] - val allSymbols = mutable.Map[Int, Symbol]() val allChildren = mutable.Map[Int, List[Int]]() withDefaultValue Nil val prevOwners = mutable.Map[Int, List[(Int, Phase)]]() withDefaultValue Nil - val symsCaused = mutable.Map[Int, Int]() withDefaultValue 0 val allTrees = mutable.Set[Tree]() def recordSymbolsInTree(tree: Tree) { diff --git a/src/reflect/scala/reflect/internal/util/WeakHashSet.scala b/src/reflect/scala/reflect/internal/util/WeakHashSet.scala index 9882aad5e5..41e74f80e9 100644 --- a/src/reflect/scala/reflect/internal/util/WeakHashSet.scala +++ b/src/reflect/scala/reflect/internal/util/WeakHashSet.scala @@ -1,9 +1,6 @@ package scala.reflect.internal.util import scala.collection.mutable -import scala.collection.mutable.ArrayBuffer -import scala.collection.mutable.Builder -import scala.collection.mutable.SetBuilder import scala.collection.generic.Clearable import scala.runtime.AbstractFunction1 diff --git a/src/reflect/scala/reflect/macros/TreeBuilder.scala b/src/reflect/scala/reflect/macros/TreeBuilder.scala index 204dc40858..fbbbe13201 100644 --- a/src/reflect/scala/reflect/macros/TreeBuilder.scala +++ b/src/reflect/scala/reflect/macros/TreeBuilder.scala @@ -11,7 +11,6 @@ abstract class TreeBuilder { val global: Universe import global._ - import definitions._ /** Builds a reference to value whose type is given stable prefix. * The type must be suitable for this. For example, it diff --git a/src/reflect/scala/reflect/runtime/JavaMirrors.scala b/src/reflect/scala/reflect/runtime/JavaMirrors.scala index 44fbd55162..d264cc06b4 100644 --- a/src/reflect/scala/reflect/runtime/JavaMirrors.scala +++ b/src/reflect/scala/reflect/runtime/JavaMirrors.scala @@ -17,12 +17,9 @@ import internal.ClassfileConstants._ import internal.pickling.UnPickler import scala.collection.mutable.{ HashMap, ListBuffer } import internal.Flags._ -//import scala.tools.nsc.util.ScalaClassLoader -//import scala.tools.nsc.util.ScalaClassLoader._ import ReflectionUtils.{staticSingletonInstance, innerSingletonInstance} import scala.language.existentials import scala.runtime.{ScalaRunTime, BoxesRunTime} -import scala.reflect.internal.util.Collections._ private[reflect] trait JavaMirrors extends internal.SymbolTable with api.JavaUniverse { thisUniverse: SymbolTable => @@ -835,20 +832,6 @@ private[reflect] trait JavaMirrors extends internal.SymbolTable with api.JavaUni .asMethod } - /** - * The Scala field corresponding to given Java field. - * @param jfield The Java field - * @return A Scala field object that corresponds to `jfield`. - * // ??? should we return the getter instead? - */ - def fieldToScala(jfield: jField): TermSymbol = - toScala(fieldCache, jfield)(_ fieldToScala1 _) - - private def fieldToScala1(jfield: jField): TermSymbol = { - val owner = followStatic(classToScala(jfield.getDeclaringClass), jfield.getModifiers) - (lookup(owner, jfield.getName) suchThat (!_.isMethod) orElse jfieldAsScala(jfield)).asTerm - } - /** * The Scala package corresponding to given Java package */ @@ -1112,13 +1095,6 @@ private[reflect] trait JavaMirrors extends internal.SymbolTable with api.JavaUni // -------------------- Scala to Java ----------------------------------- - /** Optionally, the Java package corresponding to a given Scala package, or None if no such Java package exists. - * @param pkg The Scala package - */ - def packageToJavaOption(pkg: ModuleSymbol): Option[jPackage] = packageCache.toJavaOption(pkg) { - Option(jPackage.getPackage(pkg.fullName.toString)) - } - /** The Java class corresponding to given Scala class. * Note: This only works for * - top-level classes diff --git a/src/reflect/scala/reflect/runtime/JavaUniverse.scala b/src/reflect/scala/reflect/runtime/JavaUniverse.scala index 0f70a676fa..a12e7d43d4 100644 --- a/src/reflect/scala/reflect/runtime/JavaUniverse.scala +++ b/src/reflect/scala/reflect/runtime/JavaUniverse.scala @@ -1,8 +1,6 @@ package scala.reflect package runtime -import internal.{SomePhase, NoPhase, Phase, TreeGen} - /** An implementation of [[scala.reflect.api.Universe]] for runtime reflection using JVM classloaders. * * Should not be instantiated directly, use [[scala.reflect.runtime.universe]] instead. @@ -11,7 +9,7 @@ import internal.{SomePhase, NoPhase, Phase, TreeGen} */ class JavaUniverse extends internal.SymbolTable with ReflectSetup with runtime.SymbolTable { self => - def picklerPhase = SomePhase + def picklerPhase = internal.SomePhase def forInteractive = false def forScaladoc = false @@ -26,4 +24,3 @@ class JavaUniverse extends internal.SymbolTable with ReflectSetup with runtime.S init() } - diff --git a/src/reflect/scala/reflect/runtime/SynchronizedSymbols.scala b/src/reflect/scala/reflect/runtime/SynchronizedSymbols.scala index 366b4319c3..1154927279 100644 --- a/src/reflect/scala/reflect/runtime/SynchronizedSymbols.scala +++ b/src/reflect/scala/reflect/runtime/SynchronizedSymbols.scala @@ -83,9 +83,6 @@ private[reflect] trait SynchronizedSymbols extends internal.Symbols { self: Symb override protected def createPackageObjectClassSymbol(pos: Position, newFlags: Long): PackageObjectClassSymbol = new PackageObjectClassSymbol(this, pos) with SynchronizedClassSymbol initFlags newFlags - override protected def createTermSymbol(name: TermName, pos: Position, newFlags: Long): TermSymbol = - new TermSymbol(this, pos, name) with SynchronizedTermSymbol initFlags newFlags - override protected def createMethodSymbol(name: TermName, pos: Position, newFlags: Long): MethodSymbol = new MethodSymbol(this, pos, name) with SynchronizedMethodSymbol initFlags newFlags diff --git a/src/reflect/scala/reflect/runtime/package.scala b/src/reflect/scala/reflect/runtime/package.scala index b97913daf0..eadbc0c52e 100644 --- a/src/reflect/scala/reflect/runtime/package.scala +++ b/src/reflect/scala/reflect/runtime/package.scala @@ -6,7 +6,7 @@ package scala.reflect package object runtime { /** The entry point into Scala runtime reflection. - * + * * To use Scala runtime reflection, simply use or import `scala.reflect.runtime.universe._` * * See [[scala.reflect.api.Universe]] or the -- cgit v1.2.3 From 009c57d4622fe69394fe031ad7577a4fdee0b1d9 Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Tue, 13 Nov 2012 08:30:55 -0800 Subject: Removed code from the typechecker. Removing code from this neighborhood is more difficult than elsewhere, making it all the more important that it be done. --- .../scala/tools/nsc/typechecker/Contexts.scala | 22 ----- .../tools/nsc/typechecker/DestructureTypes.scala | 10 -- .../scala/tools/nsc/typechecker/Duplicators.scala | 22 ----- .../scala/tools/nsc/typechecker/Implicits.scala | 6 -- .../tools/nsc/typechecker/MethodSynthesis.scala | 82 +++------------- .../scala/tools/nsc/typechecker/Namers.scala | 6 -- .../tools/nsc/typechecker/NamesDefaults.scala | 2 - .../tools/nsc/typechecker/PatternMatching.scala | 105 +++++++++------------ .../scala/tools/nsc/typechecker/TreeCheckers.scala | 7 -- .../tools/nsc/typechecker/TypeDiagnostics.scala | 13 --- .../scala/tools/nsc/typechecker/Typers.scala | 24 ----- .../scala/tools/nsc/typechecker/Unapplies.scala | 12 --- 12 files changed, 60 insertions(+), 251 deletions(-) (limited to 'src') diff --git a/src/compiler/scala/tools/nsc/typechecker/Contexts.scala b/src/compiler/scala/tools/nsc/typechecker/Contexts.scala index a8d7de6362..16e5e7d4a2 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Contexts.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Contexts.scala @@ -219,8 +219,6 @@ trait Contexts { self: Analyzer => current } - def logError(err: AbsTypeError) = buffer += err - def withImplicitsEnabled[T](op: => T): T = { val saved = implicitsEnabled implicitsEnabled = true @@ -312,15 +310,6 @@ trait Contexts { self: Analyzer => c } - // TODO: remove? Doesn't seem to be used - def make(unit: CompilationUnit): Context = { - val c = make(unit, EmptyTree, owner, scope, imports) - c.setReportErrors() - c.implicitsEnabled = true - c.macrosEnabled = true - c - } - def makeNewImport(sym: Symbol): Context = makeNewImport(gen.mkWildcardImport(sym)) @@ -489,17 +478,6 @@ trait Contexts { self: Analyzer => sub.isNonBottomSubClass(base) || sub.isModuleClass && sub.linkedClassOfClass.isNonBottomSubClass(base) - /** Return closest enclosing context that defines a superclass of `clazz`, or a - * companion module of a superclass of `clazz`, or NoContext if none exists */ - def enclosingSuperClassContext(clazz: Symbol): Context = { - var c = this.enclClass - while (c != NoContext && - !clazz.isNonBottomSubClass(c.owner) && - !(c.owner.isModuleClass && clazz.isNonBottomSubClass(c.owner.companionClass))) - c = c.outer.enclClass - c - } - /** Return the closest enclosing context that defines a subclass of `clazz` * or a companion object thereof, or `NoContext` if no such context exists. */ diff --git a/src/compiler/scala/tools/nsc/typechecker/DestructureTypes.scala b/src/compiler/scala/tools/nsc/typechecker/DestructureTypes.scala index 79cd46e018..73572bcae9 100644 --- a/src/compiler/scala/tools/nsc/typechecker/DestructureTypes.scala +++ b/src/compiler/scala/tools/nsc/typechecker/DestructureTypes.scala @@ -64,15 +64,6 @@ trait DestructureTypes { }, tree.productPrefix ) - def wrapSymbol(label: String, sym: Symbol): Node = { - if (sym eq NoSymbol) wrapEmpty - else atom(label, sym) - } - def wrapInfo(sym: Symbol) = sym.info match { - case TypeBounds(lo, hi) => typeBounds(lo, hi) - case PolyType(tparams, restpe) => polyFunction(tparams, restpe) - case _ => wrapEmpty - } def wrapSymbolInfo(sym: Symbol): Node = { if ((sym eq NoSymbol) || openSymbols(sym)) wrapEmpty else { @@ -95,7 +86,6 @@ trait DestructureTypes { def constant(label: String, const: Constant): Node = atom(label, const) def scope(decls: Scope): Node = node("decls", scopeMemberList(decls.toList)) - def const[T](named: (String, T)): Node = constant(named._1, Constant(named._2)) def resultType(restpe: Type): Node = this("resultType", restpe) def typeParams(tps: List[Symbol]): Node = node("typeParams", symbolList(tps)) diff --git a/src/compiler/scala/tools/nsc/typechecker/Duplicators.scala b/src/compiler/scala/tools/nsc/typechecker/Duplicators.scala index 7d58155eb2..9c23b8663c 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Duplicators.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Duplicators.scala @@ -19,11 +19,6 @@ abstract class Duplicators extends Analyzer { import global._ import definitions.{ AnyRefClass, AnyValClass } - def retyped(context: Context, tree: Tree): Tree = { - resetClassOwners - (newBodyDuplicator(context)).typed(tree) - } - /** Retype the given tree in the given context. Use this method when retyping * a method in a different class. The typer will replace references to the this of * the old class with the new class, and map symbols through the given 'env'. The @@ -42,9 +37,6 @@ abstract class Duplicators extends Analyzer { protected def newBodyDuplicator(context: Context) = new BodyDuplicator(context) - def retypedMethod(context: Context, tree: Tree, oldThis: Symbol, newThis: Symbol): Tree = - (newBodyDuplicator(context)).retypedMethod(tree.asInstanceOf[DefDef], oldThis, newThis) - /** Return the special typer for duplicate method bodies. */ override def newTyper(context: Context): Typer = newBodyDuplicator(context) @@ -186,20 +178,6 @@ abstract class Duplicators extends Analyzer { stats.foreach(invalidate(_, owner)) } - def retypedMethod(ddef: DefDef, oldThis: Symbol, newThis: Symbol): Tree = { - oldClassOwner = oldThis - newClassOwner = newThis - invalidateAll(ddef.tparams) - mforeach(ddef.vparamss) { vdef => - invalidate(vdef) - vdef.tpe = null - } - ddef.symbol = NoSymbol - enterSym(context, ddef) - debuglog("remapping this of " + oldClassOwner + " to " + newClassOwner) - typed(ddef) - } - /** Optionally cast this tree into some other type, if required. * Unless overridden, just returns the tree. */ diff --git a/src/compiler/scala/tools/nsc/typechecker/Implicits.scala b/src/compiler/scala/tools/nsc/typechecker/Implicits.scala index 576a21fe31..c92f65afdd 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Implicits.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Implicits.scala @@ -233,10 +233,6 @@ trait Implicits { object HasMember { private val hasMemberCache = perRunCaches.newMap[Name, Type]() def apply(name: Name): Type = hasMemberCache.getOrElseUpdate(name, memberWildcardType(name, WildcardType)) - def unapply(pt: Type): Option[Name] = pt match { - case RefinedType(List(WildcardType), Scope(sym)) if sym.tpe == WildcardType => Some(sym.name) - case _ => None - } } /** An extractor for types of the form ? { name: (? >: argtpe <: Any*)restp } @@ -1493,9 +1489,7 @@ object ImplicitsStats { val subtypeImpl = Statistics.newSubCounter(" of which in implicit", subtypeCount) val findMemberImpl = Statistics.newSubCounter(" of which in implicit", findMemberCount) val subtypeAppInfos = Statistics.newSubCounter(" of which in app impl", subtypeCount) - val subtypeImprovCount = Statistics.newSubCounter(" of which in improves", subtypeCount) val implicitSearchCount = Statistics.newCounter ("#implicit searches", "typer") - val triedImplicits = Statistics.newSubCounter(" #tried", implicitSearchCount) val plausiblyCompatibleImplicits = Statistics.newSubCounter(" #plausibly compatible", implicitSearchCount) val matchingImplicits = Statistics.newSubCounter(" #matching", implicitSearchCount) diff --git a/src/compiler/scala/tools/nsc/typechecker/MethodSynthesis.scala b/src/compiler/scala/tools/nsc/typechecker/MethodSynthesis.scala index 6aafd32237..e67d6814f1 100644 --- a/src/compiler/scala/tools/nsc/typechecker/MethodSynthesis.scala +++ b/src/compiler/scala/tools/nsc/typechecker/MethodSynthesis.scala @@ -29,61 +29,23 @@ trait MethodSynthesis { if (sym.isLazy) ValDef(sym, body) else DefDef(sym, body) - def applyTypeInternal(tags: List[TT[_]]): Type = { - val symbols = tags map compilerSymbolFromTag - val container :: args = symbols - val tparams = container.typeConstructor.typeParams - - // Conservative at present - if manifests were more usable this could do a lot more. - // [Eugene to Paul] all right, they are now. what do you have in mind? - require(symbols forall (_ ne NoSymbol), "Must find all tags: " + symbols) - require(container.owner.isPackageClass, "Container must be a top-level class in a package: " + container) - require(tparams.size == args.size, "Arguments must match type constructor arity: " + tparams + ", " + args) - - appliedType(container, args map (_.tpe): _*) - } - - def companionType[T](implicit ct: CT[T]) = - rootMirror.getRequiredModule(ct.runtimeClass.getName).tpe - - // Use these like `applyType[List, Int]` or `applyType[Map, Int, String]` - def applyType[CC](implicit t1: TT[CC]): Type = - applyTypeInternal(List(t1)) - - def applyType[CC[X1], X1](implicit t1: TT[CC[_]], t2: TT[X1]): Type = - applyTypeInternal(List(t1, t2)) - - def applyType[CC[X1, X2], X1, X2](implicit t1: TT[CC[_,_]], t2: TT[X1], t3: TT[X2]): Type = - applyTypeInternal(List(t1, t2, t3)) - - def applyType[CC[X1, X2, X3], X1, X2, X3](implicit t1: TT[CC[_,_,_]], t2: TT[X1], t3: TT[X2], t4: TT[X3]): Type = - applyTypeInternal(List(t1, t2, t3, t4)) - - def newMethodType[F](owner: Symbol)(implicit t: TT[F]): Type = { - val fnSymbol = compilerSymbolFromTag(t) - val formals = compilerTypeFromTag(t).typeArguments - assert(fnSymbol isSubClass FunctionClass(formals.size - 1), (owner, t)) - val params = owner newSyntheticValueParams formals - MethodType(params, formals.last) - } - - /** The annotations amongst those found on the original symbol which - * should be propagated to this kind of accessor. - */ - def deriveAnnotations(initial: List[AnnotationInfo], category: Symbol, keepClean: Boolean): List[AnnotationInfo] = { - initial filter { ann => - // There are no meta-annotation arguments attached to `ann` - if (ann.metaAnnotations.isEmpty) { - // A meta-annotation matching `annotKind` exists on `ann`'s definition. - (ann.defaultTargets contains category) || - // `ann`'s definition has no meta-annotations, and `keepClean` is true. - (ann.defaultTargets.isEmpty && keepClean) - } - // There are meta-annotation arguments, and one of them matches `annotKind` - else ann.metaAnnotations exists (_ matches category) + /** The annotations amongst those found on the original symbol which + * should be propagated to this kind of accessor. + */ + def deriveAnnotations(initial: List[AnnotationInfo], category: Symbol, keepClean: Boolean): List[AnnotationInfo] = { + initial filter { ann => + // There are no meta-annotation arguments attached to `ann` + if (ann.metaAnnotations.isEmpty) { + // A meta-annotation matching `annotKind` exists on `ann`'s definition. + (ann.defaultTargets contains category) || + // `ann`'s definition has no meta-annotations, and `keepClean` is true. + (ann.defaultTargets.isEmpty && keepClean) } + // There are meta-annotation arguments, and one of them matches `annotKind` + else ann.metaAnnotations exists (_ matches category) } - } + } + } import synthesisUtil._ class ClassMethodSynthesis(val clazz: Symbol, localTyper: Typer) { @@ -118,22 +80,9 @@ trait MethodSynthesis { finishMethod(clazz.info.decls enter m, f) } - private def cloneInternal(original: Symbol, f: Symbol => Tree): Tree = - cloneInternal(original, f, original.name) - def clazzMember(name: Name) = clazz.info nonPrivateMember name def typeInClazz(sym: Symbol) = clazz.thisType memberType sym - /** Function argument takes the newly created method symbol of - * the same type as `name` in clazz, and returns the tree to be - * added to the template. - */ - def overrideMethod(name: Name)(f: Symbol => Tree): Tree = - overrideMethod(clazzMember(name))(f) - - def overrideMethod(original: Symbol)(f: Symbol => Tree): Tree = - cloneInternal(original, sym => f(sym setFlag OVERRIDE)) - def deriveMethod(original: Symbol, nameFn: Name => Name)(f: Symbol => Tree): Tree = cloneInternal(original, f, nameFn(original.name)) @@ -311,7 +260,6 @@ trait MethodSynthesis { // Final methods to make the rest easier to reason about. final def mods = tree.mods final def basisSym = tree.symbol - final def derivedFlags: Long = basisSym.flags & flagsMask | flagsExtra } trait DerivedFromClassDef extends DerivedFromMemberDef { diff --git a/src/compiler/scala/tools/nsc/typechecker/Namers.scala b/src/compiler/scala/tools/nsc/typechecker/Namers.scala index 28bed0f1bf..817b4b7542 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Namers.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Namers.scala @@ -1388,12 +1388,6 @@ trait Namers extends MethodSynthesis { tpe } - def ensureParent(clazz: Symbol, parent: Symbol) = { - val info0 = clazz.info - val info1 = includeParent(info0, parent) - if (info0 ne info1) clazz setInfo info1 - } - class LogTransitions[S](onEnter: S => String, onExit: S => String) { val enabled = settings.debug.value @inline final def apply[T](entity: S)(body: => T): T = { diff --git a/src/compiler/scala/tools/nsc/typechecker/NamesDefaults.scala b/src/compiler/scala/tools/nsc/typechecker/NamesDefaults.scala index 252a738755..14c8d85836 100644 --- a/src/compiler/scala/tools/nsc/typechecker/NamesDefaults.scala +++ b/src/compiler/scala/tools/nsc/typechecker/NamesDefaults.scala @@ -41,8 +41,6 @@ trait NamesDefaults { self: Analyzer => blockTyper: Typer ) { } - val noApplyInfo = NamedApplyInfo(None, Nil, Nil, null) - def nameOf(arg: Tree) = arg match { case AssignOrNamedArg(Ident(name), rhs) => Some(name) case _ => None diff --git a/src/compiler/scala/tools/nsc/typechecker/PatternMatching.scala b/src/compiler/scala/tools/nsc/typechecker/PatternMatching.scala index 7cb420d2dc..07c12f1034 100644 --- a/src/compiler/scala/tools/nsc/typechecker/PatternMatching.scala +++ b/src/compiler/scala/tools/nsc/typechecker/PatternMatching.scala @@ -291,8 +291,6 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL // pt is the skolemized version val pt = repeatedToSeq(ptUnCPS) - // val packedPt = repeatedToSeq(typer.packedType(match_, context.owner)) - // the alternative to attaching the default case override would be to simply // append the default to the list of cases and suppress the unreachable case error that may arise (once we detect that...) val matchFailGenOverride = match_.attachments.get[DefaultOverrideMatchAttachment].map{case DefaultOverrideMatchAttachment(default) => ((scrut: Tree) => default)} @@ -547,54 +545,55 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL def fromCaseClass(fun: Tree, args: List[Tree]): Option[ExtractorCall] = Some(new ExtractorCallProd(fun, args)) // THE PRINCIPLED SLOW PATH -- NOT USED + // !!! Use it, test it, or delete it, else it is unlikely to be an asset. // generate a call to the (synthetically generated) extractor of a case class // NOTE: it's an apply, not a select, since in general an extractor call may have multiple argument lists (including an implicit one) // that we need to preserve, so we supply the scrutinee as Ident(nme.SELECTOR_DUMMY), // and replace that dummy by a reference to the actual binder in translateExtractorPattern - def fromCaseClassUnapply(fun: Tree, args: List[Tree]): Option[ExtractorCall] = { - // TODO: can we rework the typer so we don't have to do all this twice? - // undo rewrite performed in (5) of adapt - val orig = fun match {case tpt: TypeTree => tpt.original case _ => fun} - val origSym = orig.symbol - val extractor = unapplyMember(origSym.filter(sym => reallyExists(unapplyMember(sym.tpe))).tpe) - - if((fun.tpe eq null) || fun.tpe.isError || (extractor eq NoSymbol)) { - None - } else { - // this is a tricky balance: pos/t602.scala, pos/sudoku.scala, run/virtpatmat_alts.scala must all be happy - // bypass typing at own risk: val extractorCall = Select(orig, extractor) setType caseClassApplyToUnapplyTp(fun.tpe) - // can't always infer type arguments (pos/t602): - /* case class Span[K <: Ordered[K]](low: Option[K]) { - override def equals(x: Any): Boolean = x match { - case Span((low0 @ _)) if low0 equals low => true - } - }*/ - // so... leave undetermined type params floating around if we have to - // (if we don't infer types, uninstantiated type params show up later: pos/sudoku.scala) - // (see also run/virtpatmat_alts.scala) - val savedUndets = context.undetparams - val extractorCall = try { - context.undetparams = Nil - silent(_.typed(Apply(Select(orig, extractor), List(Ident(nme.SELECTOR_DUMMY) setType fun.tpe.finalResultType)), EXPRmode, WildcardType), reportAmbiguousErrors = false) match { - case SilentResultValue(extractorCall) => extractorCall // if !extractorCall.containsError() - case _ => - // this fails to resolve overloading properly... - // Apply(typedOperator(Select(orig, extractor)), List(Ident(nme.SELECTOR_DUMMY))) // no need to set the type of the dummy arg, it will be replaced anyway - - // patmatDebug("funtpe after = "+ fun.tpe.finalResultType) - // patmatDebug("orig: "+(orig, orig.tpe)) - val tgt = typed(orig, EXPRmode | QUALmode | POLYmode, HasMember(extractor.name)) // can't specify fun.tpe.finalResultType as the type for the extractor's arg, - // as it may have been inferred incorrectly (see t602, where it's com.mosol.sl.Span[Any], instead of com.mosol.sl.Span[?K]) - // patmatDebug("tgt = "+ (tgt, tgt.tpe)) - val oper = typed(Select(tgt, extractor.name), EXPRmode | FUNmode | POLYmode | TAPPmode, WildcardType) - // patmatDebug("oper: "+ (oper, oper.tpe)) - Apply(oper, List(Ident(nme.SELECTOR_DUMMY))) // no need to set the type of the dummy arg, it will be replaced anyway - } - } finally context.undetparams = savedUndets - - Some(this(extractorCall, args)) // TODO: simplify spliceApply? - } - } + // def fromCaseClassUnapply(fun: Tree, args: List[Tree]): Option[ExtractorCall] = { + // // TODO: can we rework the typer so we don't have to do all this twice? + // // undo rewrite performed in (5) of adapt + // val orig = fun match {case tpt: TypeTree => tpt.original case _ => fun} + // val origSym = orig.symbol + // val extractor = unapplyMember(origSym.filter(sym => reallyExists(unapplyMember(sym.tpe))).tpe) + + // if((fun.tpe eq null) || fun.tpe.isError || (extractor eq NoSymbol)) { + // None + // } else { + // // this is a tricky balance: pos/t602.scala, pos/sudoku.scala, run/virtpatmat_alts.scala must all be happy + // // bypass typing at own risk: val extractorCall = Select(orig, extractor) setType caseClassApplyToUnapplyTp(fun.tpe) + // // can't always infer type arguments (pos/t602): + // /* case class Span[K <: Ordered[K]](low: Option[K]) { + // override def equals(x: Any): Boolean = x match { + // case Span((low0 @ _)) if low0 equals low => true + // } + // }*/ + // // so... leave undetermined type params floating around if we have to + // // (if we don't infer types, uninstantiated type params show up later: pos/sudoku.scala) + // // (see also run/virtpatmat_alts.scala) + // val savedUndets = context.undetparams + // val extractorCall = try { + // context.undetparams = Nil + // silent(_.typed(Apply(Select(orig, extractor), List(Ident(nme.SELECTOR_DUMMY) setType fun.tpe.finalResultType)), EXPRmode, WildcardType), reportAmbiguousErrors = false) match { + // case SilentResultValue(extractorCall) => extractorCall // if !extractorCall.containsError() + // case _ => + // // this fails to resolve overloading properly... + // // Apply(typedOperator(Select(orig, extractor)), List(Ident(nme.SELECTOR_DUMMY))) // no need to set the type of the dummy arg, it will be replaced anyway + + // // patmatDebug("funtpe after = "+ fun.tpe.finalResultType) + // // patmatDebug("orig: "+(orig, orig.tpe)) + // val tgt = typed(orig, EXPRmode | QUALmode | POLYmode, HasMember(extractor.name)) // can't specify fun.tpe.finalResultType as the type for the extractor's arg, + // // as it may have been inferred incorrectly (see t602, where it's com.mosol.sl.Span[Any], instead of com.mosol.sl.Span[?K]) + // // patmatDebug("tgt = "+ (tgt, tgt.tpe)) + // val oper = typed(Select(tgt, extractor.name), EXPRmode | FUNmode | POLYmode | TAPPmode, WildcardType) + // // patmatDebug("oper: "+ (oper, oper.tpe)) + // Apply(oper, List(Ident(nme.SELECTOR_DUMMY))) // no need to set the type of the dummy arg, it will be replaced anyway + // } + // } finally context.undetparams = savedUndets + + // Some(this(extractorCall, args)) // TODO: simplify spliceApply? + // } + // } } abstract class ExtractorCall(val args: List[Tree]) { @@ -1413,10 +1412,8 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL // local / context-free def _asInstanceOf(b: Symbol, tp: Type): Tree - def _asInstanceOf(t: Tree, tp: Type): Tree def _equals(checker: Tree, binder: Symbol): Tree def _isInstanceOf(b: Symbol, tp: Type): Tree - def and(a: Tree, b: Tree): Tree def drop(tgt: Tree)(n: Int): Tree def index(tgt: Tree)(i: Int): Tree def mkZero(tp: Type): Tree @@ -1458,12 +1455,10 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL abstract class CommonCodegen extends AbsCodegen { import CODE._ def fun(arg: Symbol, body: Tree): Tree = Function(List(ValDef(arg)), body) - def genTypeApply(tfun: Tree, args: Type*): Tree = if(args contains NoType) tfun else TypeApply(tfun, args.toList map TypeTree) def tupleSel(binder: Symbol)(i: Int): Tree = (REF(binder) DOT nme.productAccessorName(i)) // make tree that accesses the i'th component of the tuple referenced by binder def index(tgt: Tree)(i: Int): Tree = tgt APPLY (LIT(i)) def drop(tgt: Tree)(n: Int): Tree = (tgt DOT vpmName.drop) (LIT(n)) def _equals(checker: Tree, binder: Symbol): Tree = checker MEMBER_== REF(binder) // NOTE: checker must be the target of the ==, that's the patmat semantics for ya - def and(a: Tree, b: Tree): Tree = a AND b // drop annotations generated by CPS plugin etc, since its annotationchecker rejects T @cps[U] <: Any // let's assume for now annotations don't affect casts, drop them there, and bring them back using the outer Typed tree @@ -1471,10 +1466,8 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL Typed(gen.mkAsInstanceOf(t, tp.withoutAnnotations, true, false), TypeTree() setType tp) // the force is needed mainly to deal with the GADT typing hack (we can't detect it otherwise as tp nor pt need contain an abstract type, we're just casting wildly) - def _asInstanceOf(t: Tree, tp: Type): Tree = if (t.tpe != NoType && t.isTyped && typesConform(t.tpe, tp)) t else mkCast(t, tp) def _asInstanceOf(b: Symbol, tp: Type): Tree = if (typesConform(b.info, tp)) REF(b) else mkCast(REF(b), tp) def _isInstanceOf(b: Symbol, tp: Type): Tree = gen.mkIsInstanceOf(REF(b), tp.withoutAnnotations, true, false) - // if (typesConform(b.info, tpX)) { patmatDebug("warning: emitted spurious isInstanceOf: "+(b, tp)); TRUE } // duplicated out of frustration with cast generation def mkZero(tp: Type): Tree = { @@ -2879,8 +2872,6 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL v +"(="+ v.path +": "+ v.staticTpCheckable +") "+ assignment }.mkString("\n") - def modelString(model: Model) = varAssignmentString(modelToVarAssignment(model)) - // return constructor call when the model is a true counter example // (the variables don't take into account type information derived from other variables, // so, naively, you might try to construct a counter example like _ :: Nil(_ :: _, _ :: _), @@ -3538,7 +3529,6 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL // for the catch-cases in a try/catch private object typeSwitchMaker extends SwitchMaker { val unchecked = false - def switchableTpe(tp: Type) = true val alternativesSupported = false // TODO: needs either back-end support of flattening of alternatives during typers val canJump = false @@ -3584,11 +3574,6 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL trait OptimizedCodegen extends CodegenCore with TypedSubstitution with OptimizedMatchMonadInterface { override def codegen: AbsCodegen = optimizedCodegen - // trait AbsOptimizedCodegen extends AbsCodegen { - // def flatMapCondStored(cond: Tree, condSym: Symbol, res: Tree, nextBinder: Symbol, next: Tree): Tree - // } - // def optimizedCodegen: AbsOptimizedCodegen - // when we know we're targetting Option, do some inlining the optimizer won't do // for example, `o.flatMap(f)` becomes `if(o == None) None else f(o.get)`, similarly for orElse and guard // this is a special instance of the advanced inlining optimization that takes a method call on diff --git a/src/compiler/scala/tools/nsc/typechecker/TreeCheckers.scala b/src/compiler/scala/tools/nsc/typechecker/TreeCheckers.scala index 710adf5a9c..be7554abe2 100644 --- a/src/compiler/scala/tools/nsc/typechecker/TreeCheckers.scala +++ b/src/compiler/scala/tools/nsc/typechecker/TreeCheckers.scala @@ -142,13 +142,6 @@ abstract class TreeCheckers extends Analyzer { currentRun.units foreach (x => wrap(x)(check(x))) } - def printingTypings[T](body: => T): T = { - val saved = global.printTypings - global.printTypings = true - val result = body - global.printTypings = saved - result - } def runWithUnit[T](unit: CompilationUnit)(body: => Unit): Unit = { val unit0 = currentUnit currentRun.currentUnit = unit diff --git a/src/compiler/scala/tools/nsc/typechecker/TypeDiagnostics.scala b/src/compiler/scala/tools/nsc/typechecker/TypeDiagnostics.scala index 4f5291507e..19f0b56e94 100644 --- a/src/compiler/scala/tools/nsc/typechecker/TypeDiagnostics.scala +++ b/src/compiler/scala/tools/nsc/typechecker/TypeDiagnostics.scala @@ -37,14 +37,6 @@ trait TypeDiagnostics { import global._ import definitions._ - /** The common situation of making sure nothing is erroneous could be - * nicer if Symbols, Types, and Trees all implemented some common interface - * in which isErroneous and similar would be placed. - */ - def noErroneousTypes(tps: Type*) = tps forall (x => !x.isErroneous) - def noErroneousSyms(syms: Symbol*) = syms forall (x => !x.isErroneous) - def noErroneousTrees(trees: Tree*) = trees forall (x => !x.isErroneous) - /** For errors which are artifacts of the implementation: such messages * indicate that the restriction may be lifted in the future. */ @@ -294,7 +286,6 @@ trait TypeDiagnostics { // distinguished from the other types in the same error message private val savedName = sym.name def restoreName() = sym.name = savedName - def isAltered = sym.name != savedName def modifyName(f: String => String) = sym setName newTypeName(f(sym.name.toString)) /** Prepend java.lang, scala., or Predef. if this type originated @@ -478,10 +469,6 @@ trait TypeDiagnostics { } super.traverse(t) } - def isUnused(t: Tree): Boolean = ( - if (t.symbol.isTerm) isUnusedTerm(t.symbol) - else isUnusedType(t.symbol) - ) def isUnusedType(m: Symbol): Boolean = ( m.isType && !m.isTypeParameterOrSkolem // would be nice to improve this diff --git a/src/compiler/scala/tools/nsc/typechecker/Typers.scala b/src/compiler/scala/tools/nsc/typechecker/Typers.scala index ee5446ee87..670e3902dd 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Typers.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Typers.scala @@ -359,17 +359,6 @@ trait Typers extends Modes with Adaptations with Tags { def privates[T <: Tree](owner: Symbol, tree: T): T = check(owner, EmptyScope, WildcardType, tree) - /** Check that type tree does not refer to entities - * defined in scope scope. - * - * @param scope ... - * @param pt ... - * @param tree ... - * @return ... - */ - def locals[T <: Tree](scope: Scope, pt: Type, tree: T): T = - check(NoSymbol, scope, pt, tree) - private def check[T <: Tree](owner: Symbol, scope: Scope, pt: Type, tree: T): T = { this.owner = owner this.scope = scope @@ -5221,8 +5210,6 @@ trait Typers extends Modes with Adaptations with Tags { def typedHigherKindedType(tree: Tree, mode: Int): Tree = typed(tree, HKmode, WildcardType) - def typedHigherKindedType(tree: Tree): Tree = typedHigherKindedType(tree, NOmode) - /** Types a type constructor tree used in a new or supertype */ def typedTypeConstructor(tree: Tree, mode: Int): Tree = { val result = typed(tree, forTypeMode(mode) | FUNmode, WildcardType) @@ -5297,28 +5284,17 @@ trait Typers extends Modes with Adaptations with Tags { case Some(tree1) => transformed -= tree; tree1 case None => typed(tree, mode, pt) } - -/* - def convertToTypeTree(tree: Tree): Tree = tree match { - case TypeTree() => tree - case _ => TypeTree(tree.tpe) - } -*/ } } object TypersStats { import scala.reflect.internal.TypesStats._ - import scala.reflect.internal.BaseTypeSeqsStats._ val typedIdentCount = Statistics.newCounter("#typechecked identifiers") val typedSelectCount = Statistics.newCounter("#typechecked selections") val typedApplyCount = Statistics.newCounter("#typechecked applications") val rawTypeFailed = Statistics.newSubCounter (" of which in failed", rawTypeCount) val subtypeFailed = Statistics.newSubCounter(" of which in failed", subtypeCount) val findMemberFailed = Statistics.newSubCounter(" of which in failed", findMemberCount) - val compoundBaseTypeSeqCount = Statistics.newSubCounter(" of which for compound types", baseTypeSeqCount) - val typerefBaseTypeSeqCount = Statistics.newSubCounter(" of which for typerefs", baseTypeSeqCount) - val singletonBaseTypeSeqCount = Statistics.newSubCounter(" of which for singletons", baseTypeSeqCount) val failedSilentNanos = Statistics.newSubTimer("time spent in failed", typerNanos) val failedApplyNanos = Statistics.newSubTimer(" failed apply", typerNanos) val failedOpEqNanos = Statistics.newSubTimer(" failed op=", typerNanos) diff --git a/src/compiler/scala/tools/nsc/typechecker/Unapplies.scala b/src/compiler/scala/tools/nsc/typechecker/Unapplies.scala index bf44b65406..061c6679da 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Unapplies.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Unapplies.scala @@ -23,7 +23,6 @@ trait Unapplies extends ast.TreeDSL private val unapplyParamName = nme.x_0 - // In the typeCompleter (templateSig) of a case class (resp it's module), // synthetic `copy` (reps `apply`, `unapply`) methods are added. To compute // their signatures, the corresponding ClassDef is needed. During naming (in @@ -46,17 +45,6 @@ trait Unapplies extends ast.TreeDSL } } - /** returns type of the unapply method returning T_0...T_n - * for n == 0, boolean - * for n == 1, Some[T0] - * else Some[Product[Ti]] - */ - def unapplyReturnTypeExpected(argsLength: Int) = argsLength match { - case 0 => BooleanClass.tpe - case 1 => optionType(WildcardType) - case n => optionType(productType((List fill n)(WildcardType))) - } - /** returns unapply or unapplySeq if available */ def unapplyMember(tp: Type): Symbol = (tp member nme.unapply) match { case NoSymbol => tp member nme.unapplySeq -- cgit v1.2.3 From c54432c3e209f1ffbe3a05d0d7ee0532d20e4dd0 Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Tue, 13 Nov 2012 08:32:27 -0800 Subject: Removed code from scaladoc. Nobody is immune! --- src/compiler/scala/tools/nsc/ast/DocComments.scala | 7 ---- src/compiler/scala/tools/nsc/doc/html/Page.scala | 6 --- .../scala/tools/nsc/doc/model/Entity.scala | 27 -------------- .../tools/nsc/doc/model/IndexModelFactory.scala | 2 - .../scala/tools/nsc/doc/model/ModelFactory.scala | 43 ++-------------------- .../doc/model/ModelFactoryImplicitSupport.scala | 25 ++++--------- .../tools/nsc/doc/model/comment/Comment.scala | 3 -- .../nsc/doc/model/comment/CommentFactory.scala | 20 ---------- .../tools/nsc/doc/model/diagram/Diagram.scala | 11 +----- 9 files changed, 13 insertions(+), 131 deletions(-) (limited to 'src') diff --git a/src/compiler/scala/tools/nsc/ast/DocComments.scala b/src/compiler/scala/tools/nsc/ast/DocComments.scala index 21407289db..023f3c229c 100755 --- a/src/compiler/scala/tools/nsc/ast/DocComments.scala +++ b/src/compiler/scala/tools/nsc/ast/DocComments.scala @@ -21,11 +21,6 @@ trait DocComments { self: Global => /** The raw doc comment map */ val docComments = mutable.HashMap[Symbol, DocComment]() - /** Associate comment with symbol `sym` at position `pos`. */ - def docComment(sym: Symbol, docStr: String, pos: Position = NoPosition) = - if ((sym ne null) && (sym ne NoSymbol)) - docComments += (sym -> DocComment(docStr, pos)) - /** The raw doc comment of symbol `sym`, as it appears in the source text, "" if missing. */ def rawDocComment(sym: Symbol): String = @@ -120,8 +115,6 @@ trait DocComments { self: Global => getDocComment(sym) map getUseCases getOrElse List() } - def useCases(sym: Symbol): List[(Symbol, String, Position)] = useCases(sym, sym.enclClass) - /** Returns the javadoc format of doc comment string `s`, including wiki expansion */ def toJavaDoc(s: String): String = expandWiki(s) diff --git a/src/compiler/scala/tools/nsc/doc/html/Page.scala b/src/compiler/scala/tools/nsc/doc/html/Page.scala index 62166f7def..ef9beb1dce 100644 --- a/src/compiler/scala/tools/nsc/doc/html/Page.scala +++ b/src/compiler/scala/tools/nsc/doc/html/Page.scala @@ -88,12 +88,6 @@ abstract class Page { def relativeLinkTo(destClass: TemplateEntity): String = relativeLinkTo(templateToPath(destClass)) - /** A relative link from this page to some destination page in the Scaladoc site. - * @param destPage The page that the link will point to. */ - def relativeLinkTo(destPage: HtmlPage): String = { - relativeLinkTo(destPage.path) - } - /** A relative link from this page to some destination path. * @param destPath The path that the link will point to. */ def relativeLinkTo(destPath: List[String]): String = { diff --git a/src/compiler/scala/tools/nsc/doc/model/Entity.scala b/src/compiler/scala/tools/nsc/doc/model/Entity.scala index 6b24073339..04046accc4 100644 --- a/src/compiler/scala/tools/nsc/doc/model/Entity.scala +++ b/src/compiler/scala/tools/nsc/doc/model/Entity.scala @@ -23,10 +23,6 @@ import diagram._ * - type and value parameters; * - annotations. */ trait Entity { - - /** Similar to symbols, so we can track entities */ - def id: Int - /** The name of the entity. Note that the name does not qualify this entity uniquely; use its `qualifiedName` * instead. */ def name : String @@ -59,9 +55,6 @@ trait Entity { /** Indicates whether this entity lives in the types namespace (classes, traits, abstract/alias types) */ def isType: Boolean - - /** Indicates whether this entity lives in the terms namespace (objects, packages, methods, values) */ - def isTerm: Boolean } object Entity { @@ -97,9 +90,6 @@ trait TemplateEntity extends Entity { /** Whether documentation is available for this template. */ def isDocTemplate: Boolean - /** Whether documentation is available for this template. */ - def isNoDocMemberTemplate: Boolean - /** Whether this template is a case class. */ def isCaseClass: Boolean @@ -174,12 +164,6 @@ trait MemberEntity extends Entity { /** Whether this member is an abstract type. */ def isAbstractType: Boolean - /** Whether this member is a template. */ - def isTemplate: Boolean - - /** Whether this member is implicit. */ - def isImplicit: Boolean - /** Whether this member is abstract. */ def isAbstract: Boolean @@ -381,14 +365,9 @@ trait RootPackage extends Package /** A non-template member (method, value, lazy value, variable, constructor, alias type, and abstract type). */ trait NonTemplateMemberEntity extends MemberEntity { - /** Whether this member is a use case. A use case is a member which does not exist in the documented code. * It corresponds to a real member, and provides a simplified, yet compatible signature for that member. */ def isUseCase: Boolean - - /** Whether this member is a bridge member. A bridge member does only exist for binary compatibility reasons - * and should not appear in ScalaDoc. */ - def isBridge: Boolean } @@ -503,12 +482,6 @@ trait ImplicitConversion { /** The result type after the conversion */ def targetType: TypeEntity - /** The result type after the conversion - * Note: not all targetTypes have a corresponding template. Examples include conversions resulting in refinement - * types. Need to check it's not option! - */ - def targetTemplate: Option[TemplateEntity] - /** The components of the implicit conversion type parents */ def targetTypeComponents: List[(TemplateEntity, TypeEntity)] diff --git a/src/compiler/scala/tools/nsc/doc/model/IndexModelFactory.scala b/src/compiler/scala/tools/nsc/doc/model/IndexModelFactory.scala index 10e2f23142..1d6063255d 100755 --- a/src/compiler/scala/tools/nsc/doc/model/IndexModelFactory.scala +++ b/src/compiler/scala/tools/nsc/doc/model/IndexModelFactory.scala @@ -17,8 +17,6 @@ object IndexModelFactory { object result extends mutable.HashMap[Char,SymbolMap] { - /* Owner template ordering */ - implicit def orderingSet = math.Ordering.String.on { x: MemberEntity => x.name.toLowerCase } /* symbol name ordering */ implicit def orderingMap = math.Ordering.String.on { x: String => x.toLowerCase } diff --git a/src/compiler/scala/tools/nsc/doc/model/ModelFactory.scala b/src/compiler/scala/tools/nsc/doc/model/ModelFactory.scala index f11f090b4b..96ecf51e44 100644 --- a/src/compiler/scala/tools/nsc/doc/model/ModelFactory.scala +++ b/src/compiler/scala/tools/nsc/doc/model/ModelFactory.scala @@ -43,11 +43,6 @@ class ModelFactory(val global: Global, val settings: doc.Settings) { def modelFinished: Boolean = _modelFinished private var universe: Universe = null - protected def closestPackage(sym: Symbol) = { - if (sym.isPackage || sym.isPackageClass) sym - else sym.enclosingPackage - } - def makeModel: Option[Universe] = { val universe = new Universe { thisUniverse => thisFactory.universe = thisUniverse @@ -77,7 +72,6 @@ class ModelFactory(val global: Global, val settings: doc.Settings) { /* ============== IMPLEMENTATION PROVIDING ENTITY TYPES ============== */ abstract class EntityImpl(val sym: Symbol, val inTpl: TemplateImpl) extends Entity { - val id = { ids += 1; ids } val name = optimize(sym.nameString) val universe = thisFactory.universe @@ -91,7 +85,6 @@ class ModelFactory(val global: Global, val settings: doc.Settings) { def annotations = sym.annotations.map(makeAnnotation) def inPackageObject: Boolean = sym.owner.isModuleClass && sym.owner.sourceModule.isPackageObject def isType = sym.name.isTypeName - def isTerm = sym.name.isTermName } trait TemplateImpl extends EntityImpl with TemplateEntity { @@ -103,7 +96,6 @@ class ModelFactory(val global: Global, val settings: doc.Settings) { def isObject = sym.isModule && !sym.isPackage def isCaseClass = sym.isCaseClass def isRootPackage = false - def isNoDocMemberTemplate = false def selfType = if (sym.thisSym eq sym) None else Some(makeType(sym.thisSym.typeOfThis, this)) } @@ -178,9 +170,7 @@ class ModelFactory(val global: Global, val settings: doc.Settings) { }) else None - def inheritedFrom = - if (inTemplate.sym == this.sym.owner || inTemplate.sym.isPackage) Nil else - makeTemplate(this.sym.owner) :: (sym.allOverriddenSymbols map { os => makeTemplate(os.owner) }) + def resultType = { def resultTpe(tpe: Type): Type = tpe match { // similar to finalResultType, except that it leaves singleton types alone case PolyType(_, res) => resultTpe(res) @@ -195,7 +185,6 @@ class ModelFactory(val global: Global, val settings: doc.Settings) { def isVal = false def isLazyVal = false def isVar = false - def isImplicit = sym.isImplicit def isConstructor = false def isAliasType = false def isAbstractType = false @@ -203,7 +192,7 @@ class ModelFactory(val global: Global, val settings: doc.Settings) { // for the explanation of conversion == null see comment on flags ((!sym.isTrait && ((sym hasFlag Flags.ABSTRACT) || (sym hasFlag Flags.DEFERRED)) && (!isImplicitlyInherited)) || sym.isAbstractClass || sym.isAbstractType) && !sym.isSynthetic - def isTemplate = false + def signature = externalSignature(sym) lazy val signatureCompat = { @@ -257,25 +246,10 @@ class ModelFactory(val global: Global, val settings: doc.Settings) { */ abstract class MemberTemplateImpl(sym: Symbol, inTpl: DocTemplateImpl) extends MemberImpl(sym, inTpl) with TemplateImpl with HigherKindedImpl with MemberTemplateEntity { // no templates cache for this class, each owner gets its own instance - override def isTemplate = true def isDocTemplate = false - override def isNoDocMemberTemplate = true lazy val definitionName = optimize(inDefinitionTemplates.head.qualifiedName + "." + name) def valueParams: List[List[ValueParam]] = Nil /** TODO, these are now only computed for DocTemplates */ - // Seems unused - // def parentTemplates = - // if (sym.isPackage || sym == AnyClass) - // List() - // else - // sym.tpe.parents.flatMap { tpe: Type => - // val tSym = tpe.typeSymbol - // if (tSym != NoSymbol) - // List(makeTemplate(tSym)) - // else - // List() - // } filter (_.isInstanceOf[DocTemplateEntity]) - def parentTypes = if (sym.isPackage || sym == AnyClass) List() else { val tps = (this match { @@ -380,9 +354,9 @@ class ModelFactory(val global: Global, val settings: doc.Settings) { lazy val memberSyms = sym.info.members.filter(s => membersShouldDocument(s, this)).toList // the inherited templates (classes, traits or objects) - var memberSymsLazy = memberSyms.filter(t => templateShouldDocument(t, this) && !inOriginalOwner(t, this)) + val memberSymsLazy = memberSyms.filter(t => templateShouldDocument(t, this) && !inOriginalOwner(t, this)) // the direct members (methods, values, vars, types and directly contained templates) - var memberSymsEager = memberSyms.filter(!memberSymsLazy.contains(_)) + val memberSymsEager = memberSyms.filter(!memberSymsLazy.contains(_)) // the members generated by the symbols in memberSymsEager val ownMembers = (memberSymsEager.flatMap(makeMember(_, None, this))) @@ -438,7 +412,6 @@ class ModelFactory(val global: Global, val settings: doc.Settings) { else List() ) - override def isTemplate = true override def isDocTemplate = true private[this] lazy val companionSymbol = if (sym.isAliasType || sym.isAbstractType) { @@ -545,7 +518,6 @@ class ModelFactory(val global: Global, val settings: doc.Settings) { val qualifiedName = conversion.fold(inDefinitionTemplates.head.qualifiedName)(_.conversionQualifiedName) optimize(qualifiedName + "#" + name) } - def isBridge = sym.isBridge def isUseCase = useCaseOf.isDefined override def byConversion: Option[ImplicitConversionImpl] = conversion override def isImplicitlyInherited = { assert(modelFinished); conversion.isDefined } @@ -707,7 +679,6 @@ class ModelFactory(val global: Global, val settings: doc.Settings) { override def inTemplate = this override def toRoot = this :: Nil override def qualifiedName = "_root_" - override def inheritedFrom = Nil override def isRootPackage = true override lazy val memberSyms = (bSym.info.members ++ EmptyPackage.info.members).toList filter { s => @@ -857,12 +828,6 @@ class ModelFactory(val global: Global, val settings: doc.Settings) { inTpl.members.find(_.sym == aSym) } - @deprecated("Use `findLinkTarget` instead.", "2.10.0") - def findTemplate(query: String): Option[DocTemplateImpl] = { - assert(modelFinished) - docTemplatesCache.values find { (tpl: DocTemplateImpl) => tpl.qualifiedName == query && !packageDropped(tpl) && !tpl.isObject } - } - def findTemplateMaybe(aSym: Symbol): Option[DocTemplateImpl] = { assert(modelFinished) docTemplatesCache.get(normalizeTemplate(aSym)).filterNot(packageDropped(_)) diff --git a/src/compiler/scala/tools/nsc/doc/model/ModelFactoryImplicitSupport.scala b/src/compiler/scala/tools/nsc/doc/model/ModelFactoryImplicitSupport.scala index c1ca8c1448..015fce294e 100644 --- a/src/compiler/scala/tools/nsc/doc/model/ModelFactoryImplicitSupport.scala +++ b/src/compiler/scala/tools/nsc/doc/model/ModelFactoryImplicitSupport.scala @@ -345,15 +345,6 @@ trait ModelFactoryImplicitSupport { makeRootPackage } - def targetTemplate: Option[TemplateEntity] = toType match { - // @Vlad: I'm being extra conservative in template creation -- I don't want to create templates for complex types - // such as refinement types because the template can't represent the type corectly (a template corresponds to a - // package, class, trait or object) - case t: TypeRef => Some(makeTemplate(t.sym)) - case RefinedType(parents, decls) => None - case _ => error("Scaladoc implicits: Could not create template for: " + toType + " of type " + toType.getClass); None - } - def targetTypeComponents: List[(TemplateEntity, TypeEntity)] = makeParentTypes(toType, None, inTpl) def convertorMethod: Either[MemberEntity, String] = { @@ -492,14 +483,14 @@ trait ModelFactoryImplicitSupport { /** * Make implicits explicit - Not used curently */ - object implicitToExplicit extends TypeMap { - def apply(tp: Type): Type = mapOver(tp) match { - case MethodType(params, resultType) => - MethodType(params.map(param => if (param.isImplicit) param.cloneSymbol.resetFlag(Flags.IMPLICIT) else param), resultType) - case other => - other - } - } + // object implicitToExplicit extends TypeMap { + // def apply(tp: Type): Type = mapOver(tp) match { + // case MethodType(params, resultType) => + // MethodType(params.map(param => if (param.isImplicit) param.cloneSymbol.resetFlag(Flags.IMPLICIT) else param), resultType) + // case other => + // other + // } + // } /** * removeImplicitParameters transforms implicit parameters from the view result type into constraints and diff --git a/src/compiler/scala/tools/nsc/doc/model/comment/Comment.scala b/src/compiler/scala/tools/nsc/doc/model/comment/Comment.scala index 3e172544dd..736727fc1a 100644 --- a/src/compiler/scala/tools/nsc/doc/model/comment/Comment.scala +++ b/src/compiler/scala/tools/nsc/doc/model/comment/Comment.scala @@ -102,9 +102,6 @@ abstract class Comment { /** A usage example related to the entity. */ def example: List[Body] - /** The comment as it appears in the source text. */ - def source: Option[String] - /** A description for the primary constructor */ def constructor: Option[Body] diff --git a/src/compiler/scala/tools/nsc/doc/model/comment/CommentFactory.scala b/src/compiler/scala/tools/nsc/doc/model/comment/CommentFactory.scala index 9617b15068..c798def4cb 100644 --- a/src/compiler/scala/tools/nsc/doc/model/comment/CommentFactory.scala +++ b/src/compiler/scala/tools/nsc/doc/model/comment/CommentFactory.scala @@ -28,11 +28,6 @@ trait CommentFactory { thisFactory: ModelFactory with CommentFactory with Member protected val commentCache = mutable.HashMap.empty[(global.Symbol, TemplateImpl), Comment] - def addCommentBody(sym: global.Symbol, inTpl: TemplateImpl, docStr: String, docPos: global.Position): global.Symbol = { - commentCache += (sym, inTpl) -> parse(docStr, docStr, docPos, None) - sym - } - def comment(sym: global.Symbol, currentTpl: Option[DocTemplateImpl], inTpl: DocTemplateImpl): Option[Comment] = { val key = (sym, inTpl) if (commentCache isDefinedAt key) @@ -132,7 +127,6 @@ trait CommentFactory { thisFactory: ModelFactory with CommentFactory with Member val note = note0 val example = example0 val constructor = constructor0 - val source = source0 val inheritDiagram = inheritDiagram0 val contentDiagram = contentDiagram0 val groupDesc = groupDesc0 @@ -957,20 +951,6 @@ trait CommentFactory { thisFactory: ModelFactory with CommentFactory with Member count } - final def jumpUntil(chars: String): Int = { - assert(chars.length > 0) - var count = 0 - val c = chars.charAt(0) - while (!check(chars) && char != endOfText) { - nextChar() - while (char != c && char != endOfText) { - nextChar() - count += 1 - } - } - count - } - final def jumpUntil(pred: => Boolean): Int = { var count = 0 while (!pred && char != endOfText) { diff --git a/src/compiler/scala/tools/nsc/doc/model/diagram/Diagram.scala b/src/compiler/scala/tools/nsc/doc/model/diagram/Diagram.scala index c2aa1f17f3..150b293b81 100644 --- a/src/compiler/scala/tools/nsc/doc/model/diagram/Diagram.scala +++ b/src/compiler/scala/tools/nsc/doc/model/diagram/Diagram.scala @@ -36,20 +36,12 @@ case class InheritanceDiagram(thisNode: ThisNode, override def isInheritanceDiagram = true lazy val depthInfo = new DepthInfo { def maxDepth = 3 - def nodeDepth(node: Node) = - if (node == thisNode) 1 - else if (superClasses.contains(node)) 0 - else if (subClasses.contains(node)) 2 - else if (incomingImplicits.contains(node) || outgoingImplicits.contains(node)) 1 - else -1 } } trait DepthInfo { /** Gives the maximum depth */ def maxDepth: Int - /** Gives the depth of any node in the diagram or -1 if the node is not in the diagram */ - def nodeDepth(node: Node): Int } abstract class Node { @@ -142,5 +134,4 @@ class ContentDiagramDepth(pack: ContentDiagram) extends DepthInfo { } val maxDepth = _maxDepth - def nodeDepth(node: Node) = _nodeDepth.getOrElse(node, -1) -} \ No newline at end of file +} -- cgit v1.2.3 From 69d850c8993765e4b3008f8bcc99b90937df9ffb Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Tue, 13 Nov 2012 08:34:06 -0800 Subject: Remove code from misc bits of the compiler. They are everywhere. They defy categorization. They are... M I S C --- src/compiler/scala/tools/ant/sabbus/Settings.scala | 2 +- src/compiler/scala/tools/cmd/FromString.scala | 9 +-------- src/compiler/scala/tools/cmd/Reference.scala | 1 - .../scala/tools/reflect/ToolBoxFactory.scala | 3 --- src/compiler/scala/tools/util/Javap.scala | 8 -------- src/compiler/scala/tools/util/PathResolver.scala | 23 +++------------------- .../scala/util/continuations/ControlContext.scala | 4 ++-- .../library/scala/util/continuations/package.scala | 6 +++--- src/detach/plugin/scala/tools/detach/Detach.scala | 2 +- 9 files changed, 11 insertions(+), 47 deletions(-) (limited to 'src') diff --git a/src/compiler/scala/tools/ant/sabbus/Settings.scala b/src/compiler/scala/tools/ant/sabbus/Settings.scala index fde61e9564..d0fefdaa03 100644 --- a/src/compiler/scala/tools/ant/sabbus/Settings.scala +++ b/src/compiler/scala/tools/ant/sabbus/Settings.scala @@ -10,7 +10,7 @@ package scala.tools.ant.sabbus import java.io.File -import org.apache.tools.ant.types.{Path, Reference} +import org.apache.tools.ant.types.Path class Settings { diff --git a/src/compiler/scala/tools/cmd/FromString.scala b/src/compiler/scala/tools/cmd/FromString.scala index 2a624875ee..433bbb167e 100644 --- a/src/compiler/scala/tools/cmd/FromString.scala +++ b/src/compiler/scala/tools/cmd/FromString.scala @@ -24,18 +24,11 @@ abstract class FromString[+T](implicit t: ru.TypeTag[T]) extends PartialFunction } object FromString { - // We need these because we clash with the String => Path implicits. - private def toFile(s: String) = new File(new java.io.File(s)) + // We need this because we clash with the String => Path implicits. private def toDir(s: String) = new Directory(new java.io.File(s)) /** Path related stringifiers. */ - val ExistingFile: FromString[File] = new FromString[File]()(tagOfFile) { - override def isDefinedAt(s: String) = toFile(s).isFile - def apply(s: String): File = - if (isDefinedAt(s)) toFile(s) - else cmd.runAndExit(println("'%s' is not an existing file." format s)) - } val ExistingDir: FromString[Directory] = new FromString[Directory]()(tagOfDirectory) { override def isDefinedAt(s: String) = toDir(s).isDirectory def apply(s: String): Directory = diff --git a/src/compiler/scala/tools/cmd/Reference.scala b/src/compiler/scala/tools/cmd/Reference.scala index d4f2060f81..ec2a414065 100644 --- a/src/compiler/scala/tools/cmd/Reference.scala +++ b/src/compiler/scala/tools/cmd/Reference.scala @@ -26,7 +26,6 @@ trait Reference extends Spec { def isUnaryOption(s: String) = unary contains toOpt(s) def isBinaryOption(s: String) = binary contains toOpt(s) def isExpandOption(s: String) = expansionMap contains toOpt(s) - def isAnyOption(s: String) = isUnaryOption(s) || isBinaryOption(s) || isExpandOption(s) def expandArg(arg: String) = expansionMap.getOrElse(fromOpt(arg), List(arg)) diff --git a/src/compiler/scala/tools/reflect/ToolBoxFactory.scala b/src/compiler/scala/tools/reflect/ToolBoxFactory.scala index f0c88eadea..b1d343cee9 100644 --- a/src/compiler/scala/tools/reflect/ToolBoxFactory.scala +++ b/src/compiler/scala/tools/reflect/ToolBoxFactory.scala @@ -392,9 +392,6 @@ abstract class ToolBoxFactory[U <: JavaUniverse](val u: U) { factorySelf => uttree } - def showAttributed(tree: u.Tree, printTypes: Boolean = true, printIds: Boolean = true, printKinds: Boolean = false): String = - compiler.showAttributed(importer.importTree(tree), printTypes, printIds, printKinds) - def parse(code: String): u.Tree = { if (compiler.settings.verbose.value) println("parsing "+code) val ctree: compiler.Tree = compiler.parse(code) diff --git a/src/compiler/scala/tools/util/Javap.scala b/src/compiler/scala/tools/util/Javap.scala index 4d94581cc1..381dbd1d87 100644 --- a/src/compiler/scala/tools/util/Javap.scala +++ b/src/compiler/scala/tools/util/Javap.scala @@ -107,19 +107,11 @@ object Javap { type FakeEnvironment = AnyRef type FakePrinter = AnyRef - def apply(path: String): Unit = apply(Seq(path)) - def apply(args: Seq[String]): Unit = new JavapClass() apply args foreach (_.show()) - sealed trait JpResult { type ResultType def isError: Boolean def value: ResultType def show(): Unit - // todo - // def header(): String - // def fields(): List[String] - // def methods(): List[String] - // def signatures(): List[String] } class JpError(msg: String) extends JpResult { type ResultType = String diff --git a/src/compiler/scala/tools/util/PathResolver.scala b/src/compiler/scala/tools/util/PathResolver.scala index 6b0821edf3..5d79a7d6cd 100644 --- a/src/compiler/scala/tools/util/PathResolver.scala +++ b/src/compiler/scala/tools/util/PathResolver.scala @@ -18,16 +18,9 @@ import scala.language.postfixOps // https://wiki.scala-lang.org/display/SW/Classpath object PathResolver { - // Imports property/environment functions which suppress - // security exceptions. + // Imports property/environment functions which suppress security exceptions. import AccessControl._ - def firstNonEmpty(xs: String*) = xs find (_ != "") getOrElse "" - - /** Map all classpath elements to absolute paths and reconstruct the classpath. - */ - def makeAbsolute(cp: String) = ClassPath.map(cp, x => Path(x).toAbsolute.path) - /** pretty print class path */ def ppcp(s: String) = split(s) match { case Nil => "" @@ -45,7 +38,6 @@ object PathResolver { /** Environment variables which java pays attention to so it * seems we do as well. */ - def classPathEnv = envOrElse("CLASSPATH", "") def sourcePathEnv = envOrElse("SOURCEPATH", "") def javaBootClassPath = propOrElse("sun.boot.class.path", searchForBootClasspath) @@ -85,7 +77,6 @@ object PathResolver { def scalaHome = Environment.scalaHome def scalaHomeDir = Directory(scalaHome) - def scalaHomeExists = scalaHomeDir.isDirectory def scalaLibDir = Directory(scalaHomeDir / "lib") def scalaClassesDir = Directory(scalaHomeDir / "classes") @@ -108,15 +99,7 @@ object PathResolver { // classpath as set up by the runner (or regular classpath under -nobootcp) // and then again here. def scalaBootClassPath = "" - // scalaLibDirFound match { - // case Some(dir) if scalaHomeExists => - // val paths = ClassPath expandDir dir.path - // join(paths: _*) - // case _ => "" - // } - def scalaExtDirs = Environment.scalaExtDirs - def scalaPluginPath = (scalaHomeDir / "misc" / "scala-devel" / "plugins").path override def toString = """ @@ -135,7 +118,7 @@ object PathResolver { ) } - def fromPathString(path: String, context: JavaContext = DefaultJavaContext): JavaClassPath = { + def fromPathString(path: String, context: JavaContext = DefaultJavaContext): JavaClassPath = { // called from scalap val s = new Settings() s.classpath.value = path new PathResolver(s, context) result @@ -160,7 +143,7 @@ object PathResolver { } } } -import PathResolver.{ Defaults, Environment, firstNonEmpty, ppcp } +import PathResolver.{ Defaults, Environment, ppcp } class PathResolver(settings: Settings, context: JavaContext) { def this(settings: Settings) = this(settings, if (settings.inline.value) new JavaContext else DefaultJavaContext) diff --git a/src/continuations/library/scala/util/continuations/ControlContext.scala b/src/continuations/library/scala/util/continuations/ControlContext.scala index 44a5b537b6..c196809da9 100644 --- a/src/continuations/library/scala/util/continuations/ControlContext.scala +++ b/src/continuations/library/scala/util/continuations/ControlContext.scala @@ -183,7 +183,7 @@ final class ControlContext[+A,-B,+C](val fun: (A => B, Exception => B) => C, val // need filter or other functions? - final def flatMapCatch[A1>:A,B1<:B,C1>:C<:B1](pf: PartialFunction[Exception, ControlContext[A1,B1,C1]]): ControlContext[A1,B1,C1] = { + final def flatMapCatch[A1>:A,B1<:B,C1>:C<:B1](pf: PartialFunction[Exception, ControlContext[A1,B1,C1]]): ControlContext[A1,B1,C1] = { // called by codegen from SelectiveCPSTransform if (fun eq null) this else { @@ -209,7 +209,7 @@ final class ControlContext[+A,-B,+C](val fun: (A => B, Exception => B) => C, val } } - final def mapFinally(f: () => Unit): ControlContext[A,B,C] = { + final def mapFinally(f: () => Unit): ControlContext[A,B,C] = { // called in code generated by SelectiveCPSTransform if (fun eq null) { try { f() diff --git a/src/continuations/library/scala/util/continuations/package.scala b/src/continuations/library/scala/util/continuations/package.scala index 1b50956c93..573fae85e7 100644 --- a/src/continuations/library/scala/util/continuations/package.scala +++ b/src/continuations/library/scala/util/continuations/package.scala @@ -166,7 +166,7 @@ package object continuations { throw new NoSuchMethodException("this code has to be compiled with the Scala continuations plugin enabled") } - def shiftUnitR[A,B](x: A): ControlContext[A,B,B] = { + def shiftUnitR[A,B](x: A): ControlContext[A,B,B] = { // called in code generated by SelectiveCPSTransform new ControlContext[A, B, B](null, x) } @@ -176,11 +176,11 @@ package object continuations { * a final result. * @see shift */ - def shiftR[A,B,C](fun: (A => B) => C): ControlContext[A,B,C] = { + def shiftR[A,B,C](fun: (A => B) => C): ControlContext[A,B,C] = { // called in code generated by SelectiveCPSTransform new ControlContext((f:A=>B,g:Exception=>B) => fun(f), null.asInstanceOf[A]) } - def reifyR[A,B,C](ctx: => ControlContext[A,B,C]): ControlContext[A,B,C] = { + def reifyR[A,B,C](ctx: => ControlContext[A,B,C]): ControlContext[A,B,C] = { // called in code generated by SelectiveCPSTransform ctx } diff --git a/src/detach/plugin/scala/tools/detach/Detach.scala b/src/detach/plugin/scala/tools/detach/Detach.scala index 73f6cde58c..499a97b761 100644 --- a/src/detach/plugin/scala/tools/detach/Detach.scala +++ b/src/detach/plugin/scala/tools/detach/Detach.scala @@ -73,7 +73,7 @@ abstract class Detach extends PluginComponent } private val serializableAnnotationInfo = - AnnotationInfo(SerializableAttr.tpe, List(), List()) + AnnotationInfo(requiredClass[scala.annotation.serializable].tpe, List(), List()) /* private val throwsAnnotationInfo = { val RemoteExceptionClass = definitions.getClass("java.rmi.RemoteException") -- cgit v1.2.3 From 373ded2ad31e6c9d85a6e3ca40774913ba2ab4f9 Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Tue, 13 Nov 2012 08:35:21 -0800 Subject: Remove code from compiler central. All those old-timey methods whose melodies have become unfashionable. --- .../scala/tools/nsc/CompilationUnits.scala | 25 +---- src/compiler/scala/tools/nsc/CompileServer.scala | 2 - src/compiler/scala/tools/nsc/CompilerCommand.scala | 5 - src/compiler/scala/tools/nsc/Global.scala | 84 ++-------------- src/compiler/scala/tools/nsc/ObjectRunner.scala | 5 - src/compiler/scala/tools/nsc/Phases.scala | 2 - src/compiler/scala/tools/nsc/Properties.scala | 1 - src/compiler/scala/tools/nsc/ScriptRunner.scala | 2 - src/compiler/scala/tools/nsc/ast/Printers.scala | 79 --------------- src/compiler/scala/tools/nsc/ast/TreeDSL.scala | 57 +---------- src/compiler/scala/tools/nsc/ast/TreeGen.scala | 94 ------------------ src/compiler/scala/tools/nsc/ast/TreeInfo.scala | 5 - .../scala/tools/nsc/ast/parser/Parsers.scala | 22 +---- .../scala/tools/nsc/ast/parser/Scanners.scala | 42 +------- .../scala/tools/nsc/ast/parser/Tokens.scala | 38 +------- .../scala/tools/nsc/ast/parser/TreeBuilder.scala | 16 ---- .../scala/tools/nsc/interactive/BuildManager.scala | 5 - .../scala/tools/nsc/interactive/Global.scala | 35 ------- src/compiler/scala/tools/nsc/io/Jar.scala | 9 -- src/compiler/scala/tools/nsc/io/MsilFile.scala | 5 +- src/compiler/scala/tools/nsc/io/Pickler.scala | 82 ---------------- src/compiler/scala/tools/nsc/io/Socket.scala | 5 +- src/compiler/scala/tools/nsc/io/SourceReader.scala | 3 - src/compiler/scala/tools/nsc/io/package.scala | 16 ---- .../scala/tools/nsc/javac/JavaParsers.scala | 46 +-------- .../scala/tools/nsc/javac/JavaScanners.scala | 37 ------- .../scala/tools/nsc/javac/JavaTokens.scala | 6 -- .../scala/tools/nsc/matching/MatchSupport.scala | 20 ---- src/compiler/scala/tools/nsc/matching/Matrix.scala | 27 ------ .../tools/nsc/matching/ParallelMatching.scala | 6 +- .../scala/tools/nsc/matching/PatternBindings.scala | 7 -- .../scala/tools/nsc/matching/Patterns.scala | 36 +------ .../scala/tools/nsc/settings/AbsSettings.scala | 11 +-- .../scala/tools/nsc/settings/MutableSettings.scala | 10 +- .../scala/tools/nsc/settings/ScalaSettings.scala | 8 +- .../tools/nsc/settings/StandardScalaSettings.scala | 5 - .../scala/tools/nsc/settings/Warnings.scala | 15 +-- .../nsc/symtab/classfile/AbstractFileReader.scala | 7 +- .../nsc/symtab/classfile/ClassfileParser.scala | 14 --- .../tools/nsc/symtab/classfile/ICodeReader.scala | 6 +- .../scala/tools/nsc/symtab/classfile/Pickler.scala | 106 --------------------- .../tools/nsc/transform/SpecializeTypes.scala | 27 +----- .../scala/tools/nsc/transform/TailCalls.scala | 1 - .../tools/nsc/transform/TypingTransformers.scala | 2 - src/compiler/scala/tools/nsc/util/ClassPath.scala | 35 ------- .../scala/tools/nsc/util/CommandLineParser.scala | 4 - .../scala/tools/nsc/util/JavaCharArrayReader.scala | 57 +---------- .../scala/tools/nsc/util/ScalaClassLoader.scala | 43 --------- .../scala/tools/nsc/util/SimpleTracer.scala | 1 - src/compiler/scala/tools/nsc/util/package.scala | 20 ---- src/reflect/scala/reflect/internal/Names.scala | 4 +- src/reflect/scala/reflect/internal/TreeGen.scala | 2 +- 52 files changed, 56 insertions(+), 1146 deletions(-) (limited to 'src') diff --git a/src/compiler/scala/tools/nsc/CompilationUnits.scala b/src/compiler/scala/tools/nsc/CompilationUnits.scala index 5be819c134..663fbeceb0 100644 --- a/src/compiler/scala/tools/nsc/CompilationUnits.scala +++ b/src/compiler/scala/tools/nsc/CompilationUnits.scala @@ -26,7 +26,7 @@ trait CompilationUnits { self: Global => class CompilationUnit(val source: SourceFile) extends CompilationUnitContextApi { /** the fresh name creator */ - var fresh: FreshNameCreator = new FreshNameCreator.Default + val fresh: FreshNameCreator = new FreshNameCreator.Default def freshTermName(prefix: String): TermName = newTermName(fresh.newName(prefix)) def freshTypeName(prefix: String): TypeName = newTypeName(fresh.newName(prefix)) @@ -36,16 +36,6 @@ trait CompilationUnits { self: Global => def exists = source != NoSourceFile && source != null -// def parseSettings() = { -// val argsmarker = "SCALAC_ARGS" -// if(comments nonEmpty) { -// val pragmas = comments find (_.text.startsWith("//#")) // only parse first one -// pragmas foreach { p => -// val i = p.text.indexOf(argsmarker) -// if(i > 0) -// } -// } -// } /** Note: depends now contains toplevel classes. * To get their sourcefiles, you need to dereference with .sourcefile */ @@ -107,18 +97,5 @@ trait CompilationUnits { self: Global => lazy val isJava = source.file.name.endsWith(".java") override def toString() = source.toString() - - def clear() { - fresh = new FreshNameCreator.Default - body = EmptyTree - depends.clear() - defined.clear() - synthetics.clear() - toCheck.clear() - checkedFeatures = Set() - icode.clear() - } } } - - diff --git a/src/compiler/scala/tools/nsc/CompileServer.scala b/src/compiler/scala/tools/nsc/CompileServer.scala index 521f788fa1..f79990d526 100644 --- a/src/compiler/scala/tools/nsc/CompileServer.scala +++ b/src/compiler/scala/tools/nsc/CompileServer.scala @@ -29,8 +29,6 @@ class StandardCompileServer extends SocketServer { var shutdown = false var verbose = false - val versionMsg = "Fast " + Properties.versionMsg - val MaxCharge = 0.8 private val runtime = Runtime.getRuntime() diff --git a/src/compiler/scala/tools/nsc/CompilerCommand.scala b/src/compiler/scala/tools/nsc/CompilerCommand.scala index 829e097714..0462e69f74 100644 --- a/src/compiler/scala/tools/nsc/CompilerCommand.scala +++ b/src/compiler/scala/tools/nsc/CompilerCommand.scala @@ -14,9 +14,6 @@ class CompilerCommand(arguments: List[String], val settings: Settings) { type Setting = Settings#Setting - /** file extensions of files that the compiler can process */ - lazy val fileEndings = Properties.fileEndings - private val processArgumentsResult = if (shouldProcessArguments) processArguments else (true, Nil) @@ -40,8 +37,6 @@ class CompilerCommand(arguments: List[String], val settings: Settings) { """.stripMargin.trim + "\n" def shortUsage = "Usage: %s " format cmdName - def createUsagePreface(shouldExplain: Boolean) = - if (shouldExplain) shortUsage + "\n" + explainAdvanced else "" /** Creates a help message for a subset of options based on cond */ def createUsageMsg(cond: Setting => Boolean): String = { diff --git a/src/compiler/scala/tools/nsc/Global.scala b/src/compiler/scala/tools/nsc/Global.scala index 13bec828ca..1574be8d86 100644 --- a/src/compiler/scala/tools/nsc/Global.scala +++ b/src/compiler/scala/tools/nsc/Global.scala @@ -70,8 +70,6 @@ class Global(var currentSettings: Settings, var reporter: Reporter) def this(settings: Settings) = this(settings, new ConsoleReporter(settings)) - def mkAttributedQualifier(tpe: Type, termSym: Symbol): Tree = gen.mkAttributedQualifier(tpe, termSym) - def picklerPhase: Phase = if (currentRun.isDefined) currentRun.picklerPhase else NoPhase // platform specific elements @@ -265,15 +263,10 @@ class Global(var currentSettings: Settings, var reporter: Reporter) def informComplete(msg: String): Unit = reporter.withoutTruncating(inform(msg)) def informProgress(msg: String) = if (settings.verbose.value) inform("[" + msg + "]") - def inform[T](msg: String, value: T): T = returning(value)(x => inform(msg + x)) def informTime(msg: String, start: Long) = informProgress(elapsedMessage(msg, start)) def logError(msg: String, t: Throwable): Unit = () - def logAfterEveryPhase[T](msg: String)(op: => T) { - log("Running operation '%s' after every phase.\n".format(msg) + describeAfterEveryPhase(op)) - } - override def shouldLogAtThisPhase = settings.log.isSetByUser && ( (settings.log containsPhase globalPhase) || (settings.log containsPhase phase) ) @@ -419,8 +412,8 @@ class Global(var currentSettings: Settings, var reporter: Reporter) } /** Switch to turn on detailed type logs */ - var printTypings = settings.Ytyperdebug.value - var printInfers = settings.Yinferdebug.value + val printTypings = settings.Ytyperdebug.value + val printInfers = settings.Yinferdebug.value // phaseName = "parser" object syntaxAnalyzer extends { @@ -638,13 +631,6 @@ class Global(var currentSettings: Settings, var reporter: Reporter) } } - // phaseName = "SAMPLE PHASE" - object sampleTransform extends { - val global: Global.this.type = Global.this - val runsAfter = List[String]() - val runsRightAfter = None - } with SampleTransform - /** The checkers are for validating the compiler data structures * at phase boundaries. */ @@ -778,7 +764,7 @@ class Global(var currentSettings: Settings, var reporter: Reporter) /** Returns List of (phase, value) pairs, including only those * where the value compares unequal to the previous phase's value. */ - def afterEachPhase[T](op: => T): List[(Phase, T)] = { + def afterEachPhase[T](op: => T): List[(Phase, T)] = { // used in tests phaseDescriptors.map(_.ownPhase).filterNot(_ eq NoPhase).foldLeft(List[(Phase, T)]()) { (res, ph) => val value = exitingPhase(ph)(op) if (res.nonEmpty && res.head._2 == value) res @@ -786,40 +772,8 @@ class Global(var currentSettings: Settings, var reporter: Reporter) } reverse } - /** Returns List of ChangeAfterPhase objects, encapsulating those - * phase transitions where the result of the operation gave a different - * list than it had when run during the previous phase. - */ - def changesAfterEachPhase[T](op: => List[T]): List[ChangeAfterPhase[T]] = { - val ops = ((NoPhase, Nil)) :: afterEachPhase(op) - - ops sliding 2 map { - case (_, before) :: (ph, after) :: Nil => - val lost = before filterNot (after contains _) - val gained = after filterNot (before contains _) - ChangeAfterPhase(ph, lost, gained) - case _ => ??? - } toList - } private def numberedPhase(ph: Phase) = "%2d/%s".format(ph.id, ph.name) - case class ChangeAfterPhase[+T](ph: Phase, lost: List[T], gained: List[T]) { - private def mkStr(what: String, xs: List[_]) = ( - if (xs.isEmpty) "" - else xs.mkString(what + " after " + numberedPhase(ph) + " {\n ", "\n ", "\n}\n") - ) - override def toString = mkStr("Lost", lost) + mkStr("Gained", gained) - } - - def describeAfterEachPhase[T](op: => T): List[String] = - afterEachPhase(op) map { case (ph, t) => "[after %-15s] %s".format(numberedPhase(ph), t) } - - def describeAfterEveryPhase[T](op: => T): String = - describeAfterEachPhase(op) map (" " + _ + "\n") mkString - - def printAfterEachPhase[T](op: => T): Unit = - describeAfterEachPhase(op) foreach (m => println(" " + m)) - // ------------ Invalidations --------------------------------- /** Is given package class a system package class that cannot be invalidated? @@ -1057,7 +1011,6 @@ class Global(var currentSettings: Settings, var reporter: Reporter) @inline final def exitingPostErasure[T](op: => T): T = exitingPhase(currentRun.posterasurePhase)(op) @inline final def exitingExplicitOuter[T](op: => T): T = exitingPhase(currentRun.explicitouterPhase)(op) @inline final def exitingFlatten[T](op: => T): T = exitingPhase(currentRun.flattenPhase)(op) - @inline final def exitingIcode[T](op: => T): T = exitingPhase(currentRun.icodePhase)(op) @inline final def exitingMixin[T](op: => T): T = exitingPhase(currentRun.mixinPhase)(op) @inline final def exitingPickler[T](op: => T): T = exitingPhase(currentRun.picklerPhase)(op) @inline final def exitingRefchecks[T](op: => T): T = exitingPhase(currentRun.refchecksPhase)(op) @@ -1071,21 +1024,9 @@ class Global(var currentSettings: Settings, var reporter: Reporter) @inline final def enteringMixin[T](op: => T): T = enteringPhase(currentRun.mixinPhase)(op) @inline final def enteringPickler[T](op: => T): T = enteringPhase(currentRun.picklerPhase)(op) @inline final def enteringRefchecks[T](op: => T): T = enteringPhase(currentRun.refchecksPhase)(op) - @inline final def enteringSpecialize[T](op: => T): T = enteringPhase(currentRun.specializePhase)(op) @inline final def enteringTyper[T](op: => T): T = enteringPhase(currentRun.typerPhase)(op) @inline final def enteringUncurry[T](op: => T): T = enteringPhase(currentRun.uncurryPhase)(op) - def explainContext(c: analyzer.Context): String = ( - if (c == null) "" else ( - """| context owners: %s - | - |Enclosing block or template: - |%s""".format( - c.owner.ownerChain.takeWhile(!_.isPackageClass).mkString(" -> "), - nodePrinters.nodeToString(c.enclClassOrMethod.tree) - ) - ) - ) // Owners up to and including the first package class. private def ownerChainString(sym: Symbol): String = ( if (sym == null) "" @@ -1098,9 +1039,6 @@ class Global(var currentSettings: Settings, var reporter: Reporter) pairs.toList collect { case (k, v) if v != null => "%20s: %s".format(k, v) } mkString "\n" ) - def explainTree(t: Tree): String = formatExplain( - ) - /** Don't want to introduce new errors trying to report errors, * so swallow exceptions. */ @@ -1158,7 +1096,6 @@ class Global(var currentSettings: Settings, var reporter: Reporter) } def newUnitParser(code: String) = new syntaxAnalyzer.UnitParser(newCompilationUnit(code)) - def newUnitScanner(code: String) = new syntaxAnalyzer.UnitScanner(newCompilationUnit(code)) def newCompilationUnit(code: String) = new CompilationUnit(newSourceFile(code)) def newSourceFile(code: String) = new BatchSourceFile("", code) @@ -1181,9 +1118,8 @@ class Global(var currentSettings: Settings, var reporter: Reporter) val inlinerWarnings = new ConditionalWarning("inliner", settings.YinlinerWarnings) val allConditionalWarnings = List(deprecationWarnings0, uncheckedWarnings0, featureWarnings, inlinerWarnings) - // for sbt's benefit - def uncheckedWarnings: List[(Position, String)] = uncheckedWarnings0.warnings.toList - def deprecationWarnings: List[(Position, String)] = deprecationWarnings0.warnings.toList + def uncheckedWarnings: List[(Position, String)] = uncheckedWarnings0.warnings.toList // used in sbt + def deprecationWarnings: List[(Position, String)] = deprecationWarnings0.warnings.toList // used in sbt var reportedFeature = Set[Symbol]() @@ -1350,7 +1286,7 @@ class Global(var currentSettings: Settings, var reporter: Reporter) val namerPhase = phaseNamed("namer") // val packageobjectsPhase = phaseNamed("packageobjects") val typerPhase = phaseNamed("typer") - val inlineclassesPhase = phaseNamed("inlineclasses") + // val inlineclassesPhase = phaseNamed("inlineclasses") // val superaccessorsPhase = phaseNamed("superaccessors") val picklerPhase = phaseNamed("pickler") val refchecksPhase = phaseNamed("refchecks") @@ -1363,7 +1299,7 @@ class Global(var currentSettings: Settings, var reporter: Reporter) val erasurePhase = phaseNamed("erasure") val posterasurePhase = phaseNamed("posterasure") // val lazyvalsPhase = phaseNamed("lazyvals") - val lambdaliftPhase = phaseNamed("lambdalift") + // val lambdaliftPhase = phaseNamed("lambdalift") // val constructorsPhase = phaseNamed("constructors") val flattenPhase = phaseNamed("flatten") val mixinPhase = phaseNamed("mixin") @@ -1373,12 +1309,10 @@ class Global(var currentSettings: Settings, var reporter: Reporter) val inlineExceptionHandlersPhase = phaseNamed("inlineExceptionHandlers") val closelimPhase = phaseNamed("closelim") val dcePhase = phaseNamed("dce") - val jvmPhase = phaseNamed("jvm") + // val jvmPhase = phaseNamed("jvm") // val msilPhase = phaseNamed("msil") def runIsAt(ph: Phase) = globalPhase.id == ph.id - def runIsPast(ph: Phase) = globalPhase.id > ph.id - // def runIsAtBytecodeGen = (runIsAt(jvmPhase) || runIsAt(msilPhase)) def runIsAtOptimiz = { runIsAt(inlinerPhase) || // listing phases in full for robustness when -Ystop-after has been given. runIsAt(inlineExceptionHandlersPhase) || @@ -1743,7 +1677,7 @@ class Global(var currentSettings: Settings, var reporter: Reporter) // and forScaladoc default to onlyPresentation, which is the same as defaulting // to false except in old code. The downside is that this leaves us calling a // deprecated method: but I see no simple way out, so I leave it for now. - def forJVM = settings.target.value startsWith "jvm" + // def forJVM = settings.target.value startsWith "jvm" override def forMSIL = settings.target.value startsWith "msil" def forInteractive = false def forScaladoc = false diff --git a/src/compiler/scala/tools/nsc/ObjectRunner.scala b/src/compiler/scala/tools/nsc/ObjectRunner.scala index 3c75429311..95264aeda6 100644 --- a/src/compiler/scala/tools/nsc/ObjectRunner.scala +++ b/src/compiler/scala/tools/nsc/ObjectRunner.scala @@ -11,11 +11,6 @@ import util.ScalaClassLoader import util.Exceptional.unwrap trait CommonRunner { - /** Check whether a class with the specified name - * exists on the specified class path. */ - def classExists(urls: List[URL], objectName: String): Boolean = - ScalaClassLoader.classExists(urls, objectName) - /** Run a given object, specified by name, using a * specified classpath and argument list. * diff --git a/src/compiler/scala/tools/nsc/Phases.scala b/src/compiler/scala/tools/nsc/Phases.scala index aad70a9c5e..1266622b6d 100644 --- a/src/compiler/scala/tools/nsc/Phases.scala +++ b/src/compiler/scala/tools/nsc/Phases.scala @@ -20,7 +20,6 @@ object Phases { } val values = new Array[Cell](MaxPhases + 1) def results = values filterNot (_ == null) - def apply(ph: Phase): T = values(ph.id).value def update(ph: Phase, value: T): Unit = values(ph.id) = Cell(ph, value) } /** A class for recording the elapsed time of each phase in the @@ -38,7 +37,6 @@ object Phases { >> ("ms" -> (_.value)) >+ " " << ("share" -> (_.value.toDouble * 100 / total formatted "%.2f")) } - def formatted = "" + table() } } diff --git a/src/compiler/scala/tools/nsc/Properties.scala b/src/compiler/scala/tools/nsc/Properties.scala index 55fd196716..570d5572d6 100644 --- a/src/compiler/scala/tools/nsc/Properties.scala +++ b/src/compiler/scala/tools/nsc/Properties.scala @@ -21,5 +21,4 @@ object Properties extends scala.util.PropertiesTrait { // derived values def isEmacsShell = propOrEmpty("env.emacs") != "" - def fileEndings = fileEndingString.split("""\|""").toList } diff --git a/src/compiler/scala/tools/nsc/ScriptRunner.scala b/src/compiler/scala/tools/nsc/ScriptRunner.scala index 0b307a861e..92b2dc79ed 100644 --- a/src/compiler/scala/tools/nsc/ScriptRunner.scala +++ b/src/compiler/scala/tools/nsc/ScriptRunner.scala @@ -48,8 +48,6 @@ class ScriptRunner extends HasCompileSocket { case x => x } - def isScript(settings: Settings) = settings.script.value != "" - /** Choose a jar filename to hold the compiled version of a script. */ private def jarFileFor(scriptFile: String)= File( if (scriptFile endsWith ".jar") scriptFile diff --git a/src/compiler/scala/tools/nsc/ast/Printers.scala b/src/compiler/scala/tools/nsc/ast/Printers.scala index 0414e0f123..b9f348632a 100644 --- a/src/compiler/scala/tools/nsc/ast/Printers.scala +++ b/src/compiler/scala/tools/nsc/ast/Printers.scala @@ -200,91 +200,12 @@ trait Printers extends scala.reflect.internal.Printers { this: Global => override def printTree(tree: Tree) { print(safe(tree)) } } - class TreeMatchTemplate { - // non-trees defined in Trees - // - // case class ImportSelector(name: Name, namePos: Int, rename: Name, renamePos: Int) - // case class Modifiers(flags: Long, privateWithin: Name, annotations: List[Tree], positions: Map[Long, Position]) - // - def apply(t: Tree): Unit = t match { - // eliminated by typer - case Annotated(annot, arg) => - case AssignOrNamedArg(lhs, rhs) => - case DocDef(comment, definition) => - case Import(expr, selectors) => - - // eliminated by refchecks - case ModuleDef(mods, name, impl) => - case TypeTreeWithDeferredRefCheck() => - - // eliminated by erasure - case TypeDef(mods, name, tparams, rhs) => - case Typed(expr, tpt) => - - // eliminated by cleanup - case ApplyDynamic(qual, args) => - - // eliminated by explicitouter - case Alternative(trees) => - case Bind(name, body) => - case CaseDef(pat, guard, body) => - case Star(elem) => - case UnApply(fun, args) => - - // eliminated by lambdalift - case Function(vparams, body) => - - // eliminated by uncurry - case AppliedTypeTree(tpt, args) => - case CompoundTypeTree(templ) => - case ExistentialTypeTree(tpt, whereClauses) => - case SelectFromTypeTree(qual, selector) => - case SingletonTypeTree(ref) => - case TypeBoundsTree(lo, hi) => - - // survivors - case Apply(fun, args) => - case ArrayValue(elemtpt, trees) => - case Assign(lhs, rhs) => - case Block(stats, expr) => - case ClassDef(mods, name, tparams, impl) => - case DefDef(mods, name, tparams, vparamss, tpt, rhs) => - case EmptyTree => - case Ident(name) => - case If(cond, thenp, elsep) => - case LabelDef(name, params, rhs) => - case Literal(value) => - case Match(selector, cases) => - case New(tpt) => - case PackageDef(pid, stats) => - case Return(expr) => - case Select(qualifier, selector) => - case Super(qual, mix) => - case Template(parents, self, body) => - case This(qual) => - case Throw(expr) => - case Try(block, catches, finalizer) => - case TypeApply(fun, args) => - case TypeTree() => - case ValDef(mods, name, tpt, rhs) => - - // missing from the Trees comment - case Parens(args) => // only used during parsing - case SelectFromArray(qual, name, erasure) => // only used during erasure - } - } - def asString(t: Tree): String = render(t, newStandardTreePrinter, settings.printtypes.value, settings.uniqid.value, settings.Yshowsymkinds.value) def asCompactString(t: Tree): String = render(t, newCompactTreePrinter, settings.printtypes.value, settings.uniqid.value, settings.Yshowsymkinds.value) def asCompactDebugString(t: Tree): String = render(t, newCompactTreePrinter, true, true, true) def newStandardTreePrinter(writer: PrintWriter): TreePrinter = new TreePrinter(writer) - def newStandardTreePrinter(stream: OutputStream): TreePrinter = newStandardTreePrinter(new PrintWriter(stream)) - def newStandardTreePrinter(): TreePrinter = newStandardTreePrinter(new PrintWriter(ConsoleWriter)) - def newCompactTreePrinter(writer: PrintWriter): CompactTreePrinter = new CompactTreePrinter(writer) - def newCompactTreePrinter(stream: OutputStream): CompactTreePrinter = newCompactTreePrinter(new PrintWriter(stream)) - def newCompactTreePrinter(): CompactTreePrinter = newCompactTreePrinter(new PrintWriter(ConsoleWriter)) override def newTreePrinter(writer: PrintWriter): TreePrinter = if (settings.Ycompacttrees.value) newCompactTreePrinter(writer) diff --git a/src/compiler/scala/tools/nsc/ast/TreeDSL.scala b/src/compiler/scala/tools/nsc/ast/TreeDSL.scala index 3acefe9441..e8bc932bf5 100644 --- a/src/compiler/scala/tools/nsc/ast/TreeDSL.scala +++ b/src/compiler/scala/tools/nsc/ast/TreeDSL.scala @@ -84,16 +84,12 @@ trait TreeDSL { def ANY_EQ (other: Tree) = OBJ_EQ(other AS ObjectClass.tpe) def ANY_== (other: Tree) = fn(target, Any_==, other) def ANY_!= (other: Tree) = fn(target, Any_!=, other) - def OBJ_== (other: Tree) = fn(target, Object_==, other) def OBJ_!= (other: Tree) = fn(target, Object_!=, other) def OBJ_EQ (other: Tree) = fn(target, Object_eq, other) def OBJ_NE (other: Tree) = fn(target, Object_ne, other) - def INT_| (other: Tree) = fn(target, getMember(IntClass, nme.OR), other) - def INT_& (other: Tree) = fn(target, getMember(IntClass, nme.AND), other) def INT_>= (other: Tree) = fn(target, getMember(IntClass, nme.GE), other) def INT_== (other: Tree) = fn(target, getMember(IntClass, nme.EQ), other) - def INT_!= (other: Tree) = fn(target, getMember(IntClass, nme.NE), other) // generic operations on ByteClass, IntClass, LongClass def GEN_| (other: Tree, kind: ClassSymbol) = fn(target, getMember(kind, nme.OR), other) @@ -101,9 +97,6 @@ trait TreeDSL { def GEN_== (other: Tree, kind: ClassSymbol) = fn(target, getMember(kind, nme.EQ), other) def GEN_!= (other: Tree, kind: ClassSymbol) = fn(target, getMember(kind, nme.NE), other) - def BOOL_&& (other: Tree) = fn(target, Boolean_and, other) - def BOOL_|| (other: Tree) = fn(target, Boolean_or, other) - /** Apply, Select, Match **/ def APPLY(params: Tree*) = Apply(target, params.toList) def APPLY(params: List[Tree]) = Apply(target, params) @@ -129,8 +122,6 @@ trait TreeDSL { def IS(tpe: Type) = gen.mkIsInstanceOf(target, tpe, true) def IS_OBJ(tpe: Type) = gen.mkIsInstanceOf(target, tpe, false) - // XXX having some difficulty expressing nullSafe in a way that doesn't freak out value types - // def TOSTRING() = nullSafe(fn(_: Tree, nme.toString_), LIT("null"))(target) def TOSTRING() = fn(target, nme.toString_) def GETCLASS() = fn(target, Object_getClass) } @@ -158,7 +149,6 @@ trait TreeDSL { def mkTree(rhs: Tree): ResultTreeType def ===(rhs: Tree): ResultTreeType - private var _mods: Modifiers = null private var _tpt: Tree = null private var _pos: Position = null @@ -166,19 +156,12 @@ trait TreeDSL { _tpt = TypeTree(tp) this } - def withFlags(flags: Long*): this.type = { - if (_mods == null) - _mods = defaultMods - - _mods = flags.foldLeft(_mods)(_ | _) - this - } def withPos(pos: Position): this.type = { _pos = pos this } - final def mods = if (_mods == null) defaultMods else _mods + final def mods = defaultMods final def tpt = if (_tpt == null) defaultTpt else _tpt final def pos = if (_pos == null) defaultPos else _pos } @@ -243,7 +226,6 @@ trait TreeDSL { } class TryStart(body: Tree, catches: List[CaseDef], fin: Tree) { def CATCH(xs: CaseDef*) = new TryStart(body, xs.toList, fin) - def FINALLY(x: Tree) = Try(body, catches, x) def ENDTRY = Try(body, catches, fin) } @@ -251,16 +233,9 @@ trait TreeDSL { def DEFAULT: CaseStart = new CaseStart(WILD.empty, EmptyTree) class SymbolMethods(target: Symbol) { - def BIND(body: Tree) = Bind(target, body) - def IS_NULL() = REF(target) OBJ_EQ NULL - def NOT_NULL() = REF(target) OBJ_NE NULL - - def GET() = fn(REF(target), nme.get) - - // name of nth indexed argument to a method (first parameter list), defaults to 1st - def ARG(idx: Int = 0) = Ident(target.paramss.head(idx)) - def ARGS = target.paramss.head - def ARGNAMES = ARGS map Ident + def IS_NULL() = REF(target) OBJ_EQ NULL + def GET() = fn(REF(target), nme.get) + def ARGS = target.paramss.head } /** Top level accessible. */ @@ -268,32 +243,13 @@ trait TreeDSL { def THROW(sym: Symbol, msg: Tree): Throw = Throw(sym.tpe, msg.TOSTRING()) def NEW(tpt: Tree, args: Tree*): Tree = New(tpt, List(args.toList)) - def NEW(sym: Symbol, args: Tree*): Tree = New(sym.tpe, args: _*) - - def DEF(name: Name, tp: Type): DefTreeStart = DEF(name) withType tp - def DEF(name: Name): DefTreeStart = new DefTreeStart(name) def DEF(sym: Symbol): DefSymStart = new DefSymStart(sym) - - def VAL(name: Name, tp: Type): ValTreeStart = VAL(name) withType tp - def VAL(name: Name): ValTreeStart = new ValTreeStart(name) def VAL(sym: Symbol): ValSymStart = new ValSymStart(sym) - def VAR(name: Name, tp: Type): ValTreeStart = VAL(name, tp) withFlags Flags.MUTABLE - def VAR(name: Name): ValTreeStart = VAL(name) withFlags Flags.MUTABLE - def VAR(sym: Symbol): ValSymStart = VAL(sym) withFlags Flags.MUTABLE - - def LAZYVAL(name: Name, tp: Type): ValTreeStart = VAL(name, tp) withFlags Flags.LAZY - def LAZYVAL(name: Name): ValTreeStart = VAL(name) withFlags Flags.LAZY - def LAZYVAL(sym: Symbol): ValSymStart = VAL(sym) withFlags Flags.LAZY - def AND(guards: Tree*) = if (guards.isEmpty) EmptyTree else guards reduceLeft gen.mkAnd - def OR(guards: Tree*) = - if (guards.isEmpty) EmptyTree - else guards reduceLeft gen.mkOr - def IF(tree: Tree) = new IfStart(tree, EmptyTree) def TRY(tree: Tree) = new TryStart(tree, Nil, EmptyTree) def BLOCK(xs: Tree*) = Block(xs.init.toList, xs.last) @@ -311,11 +267,6 @@ trait TreeDSL { case List(tree) if flattenUnary => tree case _ => Apply(TupleClass(trees.length).companionModule, trees: _*) } - def makeTupleType(trees: List[Tree], flattenUnary: Boolean): Tree = trees match { - case Nil => gen.scalaUnitConstr - case List(tree) if flattenUnary => tree - case _ => AppliedTypeTree(REF(TupleClass(trees.length)), trees) - } /** Implicits - some of these should probably disappear **/ implicit def mkTreeMethods(target: Tree): TreeMethods = new TreeMethods(target) diff --git a/src/compiler/scala/tools/nsc/ast/TreeGen.scala b/src/compiler/scala/tools/nsc/ast/TreeGen.scala index 983f355c58..1adcf46958 100644 --- a/src/compiler/scala/tools/nsc/ast/TreeGen.scala +++ b/src/compiler/scala/tools/nsc/ast/TreeGen.scala @@ -62,72 +62,6 @@ abstract class TreeGen extends scala.reflect.internal.TreeGen with TreeDSL { // are very picky about things and it crashes the compiler with "unexpected new". Annotated(New(scalaDot(UncheckedClass.name), ListOfNil), expr) } - // if it's a Match, mark the selector unchecked; otherwise nothing. - def mkUncheckedMatch(tree: Tree) = tree match { - case Match(selector, cases) => atPos(tree.pos)(Match(mkUnchecked(selector), cases)) - case _ => tree - } - - def mkSynthSwitchSelector(expr: Tree): Tree = atPos(expr.pos) { - // This can't be "Annotated(New(SwitchClass), expr)" because annotations - // are very picky about things and it crashes the compiler with "unexpected new". - Annotated(Ident(nme.synthSwitch), expr) - } - - // TODO: would be so much nicer if we would know during match-translation (i.e., type checking) - // whether we should emit missingCase-style apply (and isDefinedAt), instead of transforming trees post-factum - class MatchMatcher { - def caseMatch(orig: Tree, selector: Tree, cases: List[CaseDef], wrap: Tree => Tree): Tree = unknownTree(orig) - def caseVirtualizedMatch(orig: Tree, _match: Tree, targs: List[Tree], scrut: Tree, matcher: Tree): Tree = unknownTree(orig) - def caseVirtualizedMatchOpt(orig: Tree, prologue: List[Tree], cases: List[Tree], matchEndDef: Tree, wrap: Tree => Tree): Tree = unknownTree(orig) - - def genVirtualizedMatch(prologue: List[Tree], cases: List[Tree], matchEndDef: Tree): Tree = Block(prologue ++ cases, matchEndDef) - - def apply(matchExpr: Tree): Tree = matchExpr match { - // old-style match or virtpatmat switch - case Match(selector, cases) => // println("simple match: "+ (selector, cases) + "for:\n"+ matchExpr ) - caseMatch(matchExpr, selector, cases, identity) - // old-style match or virtpatmat switch - case Block((vd: ValDef) :: Nil, orig@Match(selector, cases)) => // println("block match: "+ (selector, cases, vd) + "for:\n"+ matchExpr ) - caseMatch(matchExpr, selector, cases, m => copyBlock(matchExpr, List(vd), m)) - // virtpatmat - case Apply(Apply(TypeApply(Select(tgt, nme.runOrElse), targs), List(scrut)), List(matcher)) if !settings.XoldPatmat.value => // println("virt match: "+ (tgt, targs, scrut, matcher) + "for:\n"+ matchExpr ) - caseVirtualizedMatch(matchExpr, tgt, targs, scrut, matcher) - // optimized version of virtpatmat - case Block(stats, matchEndDef) if !settings.XoldPatmat.value && (stats forall treeInfo.hasSynthCaseSymbol) => - // the assumption is once we encounter a case, the remainder of the block will consist of cases - // the prologue may be empty, usually it is the valdef that stores the scrut - val (prologue, cases) = stats span (s => !s.isInstanceOf[LabelDef]) - caseVirtualizedMatchOpt(matchExpr, prologue, cases, matchEndDef, identity) - // optimized version of virtpatmat - case Block(outerStats, orig@Block(stats, matchEndDef)) if !settings.XoldPatmat.value && (stats forall treeInfo.hasSynthCaseSymbol) => - val (prologue, cases) = stats span (s => !s.isInstanceOf[LabelDef]) - caseVirtualizedMatchOpt(matchExpr, prologue, cases, matchEndDef, m => copyBlock(matchExpr, outerStats, m)) - case other => - unknownTree(other) - } - - def unknownTree(t: Tree): Tree = throw new MatchError(t) - def copyBlock(orig: Tree, stats: List[Tree], expr: Tree): Block = Block(stats, expr) - - def dropSyntheticCatchAll(cases: List[CaseDef]): List[CaseDef] = - if (settings.XoldPatmat.value) cases - else cases filter { - case CaseDef(pat, EmptyTree, Throw(Apply(Select(New(exTpt), nme.CONSTRUCTOR), _))) if (treeInfo.isWildcardArg(pat) && (exTpt.tpe.typeSymbol eq MatchErrorClass)) => false - case CaseDef(pat, guard, body) => true - } - } - - def mkCached(cvar: Symbol, expr: Tree): Tree = { - val cvarRef = mkUnattributedRef(cvar) - Block( - List( - If(Apply(Select(cvarRef, nme.eq), List(Literal(Constant(null)))), - Assign(cvarRef, expr), - EmptyTree)), - cvarRef - ) - } // Builds a tree of the form "{ lhs = rhs ; lhs }" def mkAssignAndReturn(lhs: Symbol, rhs: Tree): Tree = { @@ -150,11 +84,6 @@ abstract class TreeGen extends scala.reflect.internal.TreeGen with TreeDSL { ValDef(mval) } - // def m: T = { if (m$ eq null) m$ = new m$class(...) m$ } - // where (...) are eventual outer accessors - def mkCachedModuleAccessDef(accessor: Symbol, mvar: Symbol) = - DefDef(accessor, mkCached(mvar, newModule(accessor, mvar.tpe))) - def mkModuleAccessDef(accessor: Symbol, msym: Symbol) = DefDef(accessor, Select(This(msym.owner), msym)) @@ -164,10 +93,6 @@ abstract class TreeGen extends scala.reflect.internal.TreeGen with TreeDSL { else New(tpe, This(accessor.owner.enclClass)) } - // def m: T; - def mkModuleAccessDcl(accessor: Symbol) = - DefDef(accessor setFlag lateDEFERRED, EmptyTree) - def mkRuntimeCall(meth: Name, args: List[Tree]): Tree = mkRuntimeCall(meth, Nil, args) @@ -264,25 +189,6 @@ abstract class TreeGen extends scala.reflect.internal.TreeGen with TreeDSL { else mkCast(tree, pt) - def mkZeroContravariantAfterTyper(tp: Type): Tree = { - // contravariant -- for replacing an argument in a method call - // must use subtyping, as otherwise we miss types like `Any with Int` - val tree = - if (NullClass.tpe <:< tp) Literal(Constant(null)) - else if (UnitClass.tpe <:< tp) Literal(Constant()) - else if (BooleanClass.tpe <:< tp) Literal(Constant(false)) - else if (FloatClass.tpe <:< tp) Literal(Constant(0.0f)) - else if (DoubleClass.tpe <:< tp) Literal(Constant(0.0d)) - else if (ByteClass.tpe <:< tp) Literal(Constant(0.toByte)) - else if (ShortClass.tpe <:< tp) Literal(Constant(0.toShort)) - else if (IntClass.tpe <:< tp) Literal(Constant(0)) - else if (LongClass.tpe <:< tp) Literal(Constant(0L)) - else if (CharClass.tpe <:< tp) Literal(Constant(0.toChar)) - else mkCast(Literal(Constant(null)), tp) - - tree - } - /** Translate names in Select/Ident nodes to type names. */ def convertToTypeName(tree: Tree): Option[RefTree] = tree match { diff --git a/src/compiler/scala/tools/nsc/ast/TreeInfo.scala b/src/compiler/scala/tools/nsc/ast/TreeInfo.scala index 97227a5b6e..f53f99a279 100644 --- a/src/compiler/scala/tools/nsc/ast/TreeInfo.scala +++ b/src/compiler/scala/tools/nsc/ast/TreeInfo.scala @@ -6,8 +6,6 @@ package scala.tools.nsc package ast -import scala.reflect.internal.HasFlags - /** This class ... * * @author Martin Odersky @@ -38,7 +36,4 @@ abstract class TreeInfo extends scala.reflect.internal.TreeInfo { case ClassDef(_, `name`, _, _) :: Nil => true case _ => super.firstDefinesClassOrObject(trees, name) } - - def isInterface(mods: HasFlags, body: List[Tree]) = - mods.isTrait && (body forall isInterfaceMember) } diff --git a/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala b/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala index c934f34398..f430f1fc34 100644 --- a/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala +++ b/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala @@ -9,7 +9,8 @@ package scala.tools.nsc package ast.parser -import scala.collection.mutable.{ListBuffer, StringBuilder} +import scala.collection.{ mutable, immutable } +import mutable.{ ListBuffer, StringBuilder } import scala.reflect.internal.{ ModifierFlags => Flags } import scala.reflect.internal.Chars.{ isScalaLetter } import scala.reflect.internal.util.{ SourceFile, OffsetPosition } @@ -167,7 +168,6 @@ self => object symbXMLBuilder extends SymbolicXMLBuilder(this, preserveWS = true) { // DEBUG choices val global: self.global.type = self.global - def freshName(prefix: String): Name = SourceFileParser.this.freshName(prefix) } def xmlLiteral : Tree = xmlp.xLiteral @@ -463,7 +463,7 @@ self => /* ------------- ERROR HANDLING ------------------------------------------- */ - var assumedClosingParens = scala.collection.mutable.Map(RPAREN -> 0, RBRACKET -> 0, RBRACE -> 0) + val assumedClosingParens = mutable.Map(RPAREN -> 0, RBRACKET -> 0, RBRACE -> 0) private var inFunReturnType = false @inline private def fromWithinReturnType[T](body: => T): T = { @@ -640,8 +640,6 @@ self => case _ => false } - def isTypeIntro: Boolean = isTypeIntroToken(in.token) - def isStatSeqEnd = in.token == RBRACE || in.token == EOF def isStatSep(token: Int): Boolean = @@ -765,10 +763,6 @@ self => } } - def checkSize(kind: String, size: Int, max: Int) { - if (size > max) syntaxError("too many "+kind+", maximum = "+max, false) - } - def checkAssoc(offset: Int, op: Name, leftAssoc: Boolean) = if (treeInfo.isLeftAssoc(op) != leftAssoc) syntaxError( @@ -1214,15 +1208,6 @@ self => /* ----------- EXPRESSIONS ------------------------------------------------ */ - /** {{{ - * EqualsExpr ::= `=' Expr - * }}} - */ - def equalsExpr(): Tree = { - accept(EQUALS) - expr() - } - def condExpr(): Tree = { if (in.token == LPAREN) { in.nextToken() @@ -1964,7 +1949,6 @@ self => /** Default entry points into some pattern contexts. */ def pattern(): Tree = noSeq.pattern() - def patterns(): List[Tree] = noSeq.patterns() def seqPatterns(): List[Tree] = seqOK.patterns() def xmlSeqPatterns(): List[Tree] = xmlSeqOK.patterns() // Called from xml parser def argumentPatterns(): List[Tree] = inParens { diff --git a/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala b/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala index 1be5fb1782..af7f48988f 100644 --- a/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala +++ b/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala @@ -10,7 +10,8 @@ import scala.reflect.internal.util._ import scala.reflect.internal.Chars._ import Tokens._ import scala.annotation.switch -import scala.collection.mutable.{ ListBuffer, ArrayBuffer } +import scala.collection.{ mutable, immutable } +import mutable.{ ListBuffer, ArrayBuffer } import scala.xml.Utility.{ isNameStart } /** See Parsers.scala / ParsersCommon for some explanation of ScannersCommon. @@ -26,7 +27,6 @@ trait ScannersCommon { trait ScannerCommon extends CommonTokenData { // things to fill in, in addition to buf, decodeUni which come from CharArrayReader - def warning(off: Int, msg: String): Unit def error (off: Int, msg: String): Unit def incompleteInputError(off: Int, msg: String): Unit def deprecationWarning(off: Int, msg: String): Unit @@ -50,9 +50,6 @@ trait Scanners extends ScannersCommon { /** Offset into source character array */ type Offset = Int - /** An undefined offset */ - val NoOffset: Offset = -1 - trait TokenData extends CommonTokenData { /** the next token */ @@ -88,8 +85,6 @@ trait Scanners extends ScannersCommon { def isAtEnd = charOffset >= buf.length - def flush = { charOffset = offset; nextChar(); this } - def resume(lastCode: Int) = { token = lastCode if (next.token != EMPTY && !reporter.hasErrors) @@ -98,10 +93,6 @@ trait Scanners extends ScannersCommon { nextToken() } - /** the last error offset - */ - var errOffset: Offset = NoOffset - /** A character buffer for literals */ val cbuf = new StringBuilder @@ -1063,7 +1054,6 @@ trait Scanners extends ScannersCommon { def syntaxError(off: Offset, msg: String) { error(off, msg) token = ERROR - errOffset = off } /** generate an error at the current token offset @@ -1076,7 +1066,6 @@ trait Scanners extends ScannersCommon { def incompleteInputError(msg: String) { incompleteInputError(offset, msg) token = EOF - errOffset = offset } override def toString() = token match { @@ -1241,7 +1230,6 @@ trait Scanners extends ScannersCommon { override val decodeUni: Boolean = !settings.nouescape.value // suppress warnings, throw exception on errors - def warning(off: Offset, msg: String): Unit = () def deprecationWarning(off: Offset, msg: String): Unit = () def error (off: Offset, msg: String): Unit = throw new MalformedInput(off, msg) def incompleteInputError(off: Offset, msg: String): Unit = throw new MalformedInput(off, msg) @@ -1252,7 +1240,6 @@ trait Scanners extends ScannersCommon { class UnitScanner(unit: CompilationUnit, patches: List[BracePatch]) extends SourceFileScanner(unit.source) { def this(unit: CompilationUnit) = this(unit, List()) - override def warning(off: Offset, msg: String) = unit.warning(unit.position(off), msg) override def deprecationWarning(off: Offset, msg: String) = unit.deprecationWarning(unit.position(off), msg) override def error (off: Offset, msg: String) = unit.error(unit.position(off), msg) override def incompleteInputError(off: Offset, msg: String) = unit.incompleteInputError(unit.position(off), msg) @@ -1311,7 +1298,7 @@ trait Scanners extends ScannersCommon { } class ParensAnalyzer(unit: CompilationUnit, patches: List[BracePatch]) extends UnitScanner(unit, patches) { - var balance = scala.collection.mutable.Map(RPAREN -> 0, RBRACKET -> 0, RBRACE -> 0) + val balance = mutable.Map(RPAREN -> 0, RBRACKET -> 0, RBRACE -> 0) init() @@ -1433,18 +1420,6 @@ trait Scanners extends ScannersCommon { else bp :: insertPatch(bps, patch) } - def leftColumn(offset: Int) = - if (offset == -1) -1 else column(lineStart(line(offset))) - - def rightColumn(offset: Int, default: Int) = - if (offset == -1) -1 - else { - val rlin = line(offset) - if (lineStart(rlin) == offset) column(offset) - else if (rlin + 1 < lineStart.length) column(lineStart(rlin + 1)) - else default - } - def insertRBrace(): List[BracePatch] = { def insert(bps: List[BracePair]): List[BracePatch] = bps match { case List() => patches @@ -1486,17 +1461,6 @@ trait Scanners extends ScannersCommon { delete(bracePairs) } - def imbalanceMeasure: Int = { - def measureList(bps: List[BracePair]): Int = - (bps map measure).sum - def measure(bp: BracePair): Int = - (if (bp.lindent != bp.rindent) 1 else 0) + measureList(bp.nested) - measureList(bracePairs) - } - - def improves(patches1: List[BracePatch]): Boolean = - imbalanceMeasure > new ParensAnalyzer(unit, patches1).imbalanceMeasure - override def error(offset: Int, msg: String) {} } } diff --git a/src/compiler/scala/tools/nsc/ast/parser/Tokens.scala b/src/compiler/scala/tools/nsc/ast/parser/Tokens.scala index c3fd414426..5a7dc4950d 100644 --- a/src/compiler/scala/tools/nsc/ast/parser/Tokens.scala +++ b/src/compiler/scala/tools/nsc/ast/parser/Tokens.scala @@ -6,15 +6,11 @@ package scala.tools.nsc package ast.parser -import scala.annotation.switch - /** Common code between JavaTokens and Tokens. Not as much (and not as concrete) * as one might like because JavaTokens for no clear reason chose new numbers for * identical token sets. */ abstract class Tokens { - import scala.reflect.internal.Chars._ - /** special tokens */ final val EMPTY = -3 final val UNDEF = -2 @@ -34,14 +30,6 @@ abstract class Tokens { def isIdentifier(code: Int): Boolean def isLiteral(code: Int): Boolean - def isKeyword(code: Int): Boolean - def isSymbol(code: Int): Boolean - - final def isSpace(at: Char) = at == ' ' || at == '\t' - final def isNewLine(at: Char) = at == CR || at == LF || at == FF - final def isBrace(code: Int) = code >= LPAREN && code <= RBRACE - final def isOpenBrace(code: Int) = isBrace(code) && (code % 2 == 0) - final def isCloseBrace(code: Int) = isBrace(code) && (code % 2 == 1) } object Tokens extends Tokens { @@ -52,20 +40,10 @@ object Tokens extends Tokens { def isLiteral(code: Int) = code >= CHARLIT && code <= INTERPOLATIONID - /** identifiers */ final val IDENTIFIER = 10 final val BACKQUOTED_IDENT = 11 - def isIdentifier(code: Int) = - code >= IDENTIFIER && code <= BACKQUOTED_IDENT - - @switch def canBeginExpression(code: Int) = code match { - case IDENTIFIER|BACKQUOTED_IDENT|USCORE => true - case LBRACE|LPAREN|LBRACKET|COMMENT => true - case IF|DO|WHILE|FOR|NEW|TRY|THROW => true - case NULL|THIS|TRUE|FALSE => true - case code => isLiteral(code) - } + def isIdentifier(code: Int) = code >= IDENTIFIER && code <= BACKQUOTED_IDENT // used by ide /** keywords */ final val IF = 20 @@ -113,17 +91,6 @@ object Tokens extends Tokens { final val MACRO = 62 // not yet used in 2.10 final val THEN = 63 // not yet used in 2.10 - def isKeyword(code: Int) = - code >= IF && code <= LAZY - - @switch def isDefinition(code: Int) = code match { - case CLASS|TRAIT|OBJECT => true - case CASECLASS|CASEOBJECT => true - case DEF|VAL|VAR => true - case TYPE => true - case _ => false - } - /** special symbols */ final val COMMA = 70 final val SEMI = 71 @@ -141,9 +108,6 @@ object Tokens extends Tokens { final val AT = 83 final val VIEWBOUND = 84 - def isSymbol(code: Int) = - code >= COMMA && code <= VIEWBOUND - /** parenthesis */ final val LPAREN = 90 final val RPAREN = 91 diff --git a/src/compiler/scala/tools/nsc/ast/parser/TreeBuilder.scala b/src/compiler/scala/tools/nsc/ast/parser/TreeBuilder.scala index 49b772ed2c..d6c499d838 100644 --- a/src/compiler/scala/tools/nsc/ast/parser/TreeBuilder.scala +++ b/src/compiler/scala/tools/nsc/ast/parser/TreeBuilder.scala @@ -26,15 +26,11 @@ abstract class TreeBuilder { def o2p(offset: Int): Position def r2p(start: Int, point: Int, end: Int): Position - def rootId(name: Name) = gen.rootId(name) def rootScalaDot(name: Name) = gen.rootScalaDot(name) def scalaDot(name: Name) = gen.scalaDot(name) def scalaAnyRefConstr = scalaDot(tpnme.AnyRef) - def scalaAnyValConstr = scalaDot(tpnme.AnyVal) - def scalaAnyConstr = scalaDot(tpnme.Any) def scalaUnitConstr = scalaDot(tpnme.Unit) def productConstr = scalaDot(tpnme.Product) - def productConstrN(n: Int) = scalaDot(newTypeName("Product" + n)) def serializableConstr = scalaDot(tpnme.Serializable) def convertToTypeName(t: Tree) = gen.convertToTypeName(t) @@ -444,18 +440,6 @@ abstract class TreeBuilder { def makeForYield(enums: List[Enumerator], body: Tree): Tree = makeFor(nme.map, nme.flatMap, enums, body) - /** Create tree for a lifted expression XX-LIFTING - */ - def makeLifted(gs: List[ValFrom], body: Tree): Tree = { - def combine(gs: List[ValFrom]): ValFrom = (gs: @unchecked) match { - case g :: Nil => g - case ValFrom(pos1, pat1, rhs1) :: gs2 => - val ValFrom(_, pat2, rhs2) = combine(gs2) - ValFrom(pos1, makeTuple(List(pat1, pat2), false), Apply(Select(rhs1, nme.zip), List(rhs2))) - } - makeForYield(List(combine(gs)), body) - } - /** Create tree for a pattern alternative */ def makeAlternative(ts: List[Tree]): Tree = { def alternatives(t: Tree): List[Tree] = t match { diff --git a/src/compiler/scala/tools/nsc/interactive/BuildManager.scala b/src/compiler/scala/tools/nsc/interactive/BuildManager.scala index a3f76994bc..6b72eb12f8 100644 --- a/src/compiler/scala/tools/nsc/interactive/BuildManager.scala +++ b/src/compiler/scala/tools/nsc/interactive/BuildManager.scala @@ -15,9 +15,6 @@ trait BuildManager { /** Add the given source files to the managed build process. */ def addSourceFiles(files: Set[AbstractFile]) - /** Remove the given files from the managed build process. */ - def removeFiles(files: Set[AbstractFile]) - /** The given files have been modified by the user. Recompile * them and their dependent files. */ @@ -71,8 +68,6 @@ object BuildManagerTest extends EvalLoop { val settings = new Settings(buildError) settings.Ybuildmanagerdebug.value = true val command = new CompilerCommand(args.toList, settings) -// settings.make.value = "off" -// val buildManager: BuildManager = new SimpleBuildManager(settings) val buildManager: BuildManager = new RefinedBuildManager(settings) buildManager.addSourceFiles(command.files) diff --git a/src/compiler/scala/tools/nsc/interactive/Global.scala b/src/compiler/scala/tools/nsc/interactive/Global.scala index 4dedbcfd3d..e4bff1e192 100644 --- a/src/compiler/scala/tools/nsc/interactive/Global.scala +++ b/src/compiler/scala/tools/nsc/interactive/Global.scala @@ -395,41 +395,6 @@ class Global(settings: Settings, _reporter: Reporter, projectName: String = "") if (typerRun != currentTyperRun) demandNewCompilerRun() } - def debugInfo(source : SourceFile, start : Int, length : Int): String = { - println("DEBUG INFO "+source+"/"+start+"/"+length) - val end = start+length - val pos = rangePos(source, start, start, end) - - val tree = locateTree(pos) - val sw = new StringWriter - val pw = new PrintWriter(sw) - newTreePrinter(pw).print(tree) - pw.flush - - val typed = new Response[Tree] - askTypeAt(pos, typed) - val typ = typed.get.left.toOption match { - case Some(tree) => - val sw = new StringWriter - val pw = new PrintWriter(sw) - newTreePrinter(pw).print(tree) - pw.flush - sw.toString - case None => "" - } - - val completionResponse = new Response[List[Member]] - askTypeCompletion(pos, completionResponse) - val completion = completionResponse.get.left.toOption match { - case Some(members) => - members mkString "\n" - case None => "" - } - - source.content.view.drop(start).take(length).mkString+" : "+source.path+" ("+start+", "+end+ - ")\n\nlocateTree:\n"+sw.toString+"\n\naskTypeAt:\n"+typ+"\n\ncompletion:\n"+completion - } - // ----------------- The Background Runner Thread ----------------------- private var threadId = 0 diff --git a/src/compiler/scala/tools/nsc/io/Jar.scala b/src/compiler/scala/tools/nsc/io/Jar.scala index 3a9a878bc2..49a1ff114f 100644 --- a/src/compiler/scala/tools/nsc/io/Jar.scala +++ b/src/compiler/scala/tools/nsc/io/Jar.scala @@ -36,9 +36,6 @@ class Jar(file: File) extends Iterable[JarEntry] { def this(jfile: JFile) = this(File(jfile)) def this(path: String) = this(File(path)) - protected def errorFn(msg: String): Unit = Console println msg - - lazy val jarFile = new JarFile(file.jfile) lazy val manifest = withJarInput(s => Option(s.getManifest)) def mainClass = manifest map (f => f(Name.MAIN_CLASS)) @@ -124,7 +121,6 @@ object Jar { m } def apply(manifest: JManifest): WManifest = new WManifest(manifest) - implicit def unenrichManifest(x: WManifest): JManifest = x.underlying } class WManifest(manifest: JManifest) { for ((k, v) <- initialMainAttrs) @@ -141,12 +137,7 @@ object Jar { } def apply(name: Attributes.Name): String = attrs(name) - def apply(name: String): String = apply(new Attributes.Name(name)) def update(key: Attributes.Name, value: String) = attrs.put(key, value) - def update(key: String, value: String) = attrs.put(new Attributes.Name(key), value) - - def mainClass: String = apply(Name.MAIN_CLASS) - def mainClass_=(value: String) = update(Name.MAIN_CLASS, value) } // See http://download.java.net/jdk7/docs/api/java/nio/file/Path.html diff --git a/src/compiler/scala/tools/nsc/io/MsilFile.scala b/src/compiler/scala/tools/nsc/io/MsilFile.scala index 1a3a4f5c81..bda13a5ed0 100644 --- a/src/compiler/scala/tools/nsc/io/MsilFile.scala +++ b/src/compiler/scala/tools/nsc/io/MsilFile.scala @@ -12,7 +12,4 @@ import ch.epfl.lamp.compiler.msil.{ Type => MsilType } * ClassPath can treat all of JVM/MSIL/bin/src files * uniformly, as AbstractFiles. */ -class MsilFile(val msilType: MsilType) extends VirtualFile(msilType.FullName, msilType.Namespace) { -} - -object NoMsilFile extends MsilFile(null) { } +class MsilFile(val msilType: MsilType) extends VirtualFile(msilType.FullName, msilType.Namespace) { } diff --git a/src/compiler/scala/tools/nsc/io/Pickler.scala b/src/compiler/scala/tools/nsc/io/Pickler.scala index 56ff4a57ee..5d32c10143 100644 --- a/src/compiler/scala/tools/nsc/io/Pickler.scala +++ b/src/compiler/scala/tools/nsc/io/Pickler.scala @@ -70,14 +70,6 @@ abstract class Pickler[T] { */ def wrapped [U] (in: T => U)(out: U => T): Pickler[U] = wrappedPickler(this)(in)(out) - /** A pickler obtained from the current pickler by also admitting `null` as - * a handled value, represented as the token `null`. - * - * @param fromNull an implicit evidence parameter ensuring that the type of values - * handled by this pickler contains `null`. - */ - def orNull(implicit fromNull: Null <:< T): Pickler[T] = nullablePickler(this) - /** A conditional pickler obtained from the current pickler. * @param cond the condition to test to find out whether pickler can handle * some Scala value. @@ -92,9 +84,6 @@ abstract class Pickler[T] { } object Pickler { - - var picklerDebugMode = false - /** A base class representing unpickler result. It has two subclasses: * `UnpickleSucess` for successful unpicklings and `UnpickleFailure` for failures, * where a value of the given type `T` could not be unpickled from input. @@ -174,17 +163,6 @@ object Pickler { def ~ [T](y: T): S ~ T = new ~ (x, y) } - /** A converter from binary functions to functions over `~`-pairs - */ - implicit def fromTilde[T1, T2, R](f: (T1, T2) => R): T1 ~ T2 => R = { case x1 ~ x2 => f(x1, x2) } - - /** An converter from unctions returning Options over pair to functions returning `~`-pairs - * The converted function will raise a `MatchError` where the original function returned - * a `None`. This converter is useful for turning `unapply` methods of case classes - * into wrapper methods that can be passed as second argument to `wrap`. - */ - implicit def toTilde[T1, T2, S](f: S => Option[(T1, T2)]): S => T1 ~ T2 = { x => (f(x): @unchecked) match { case Some((x1, x2)) => x1 ~ x2 } } - /** Same as `p.labelled(label)`. */ def labelledPickler[T](label: String, p: Pickler[T]): Pickler[T] = new Pickler[T] { @@ -248,16 +226,6 @@ object Pickler { def unpickle(rd: Lexer) = p.unpickle(rd) orElse qq.unpickle(rd) } - /** Same as `p.orNull` - */ - def nullablePickler[T](p: Pickler[T])(implicit fromNull: Null <:< T): Pickler[T] = new Pickler[T] { - def pickle(wr: Writer, x: T) = - if (x == null) wr.write("null") else p.pickle(wr, x) - def unpickle(rd: Lexer): Unpickled[T] = - if (rd.token == NullLit) nextSuccess(rd, fromNull(null)) - else p.unpickle(rd) - } - /** A conditional pickler for singleton objects. It represents these * with the object's underlying class as a label. * Example: Object scala.None would be represented as `scala.None$()`. @@ -329,22 +297,9 @@ object Pickler { implicit val longPickler: Pickler[Long] = tokenPickler("integer literal") { case IntLit(s) => s.toLong } - /** A pickler for values of type `Double`, represented as floating point literals */ - implicit val doublePickler: Pickler[Double] = - tokenPickler("floating point literal") { case FloatLit(s) => s.toDouble } - - /** A pickler for values of type `Byte`, represented as integer literals */ - implicit val bytePickler: Pickler[Byte] = longPickler.wrapped { _.toByte } { _.toLong } - - /** A pickler for values of type `Short`, represented as integer literals */ - implicit val shortPickler: Pickler[Short] = longPickler.wrapped { _.toShort } { _.toLong } - /** A pickler for values of type `Int`, represented as integer literals */ implicit val intPickler: Pickler[Int] = longPickler.wrapped { _.toInt } { _.toLong } - /** A pickler for values of type `Float`, represented as floating point literals */ - implicit val floatPickler: Pickler[Float] = doublePickler.wrapped { _.toFloat } { _.toLong } - /** A conditional pickler for the boolean value `true` */ private val truePickler = tokenPickler("boolean literal") { case TrueLit => true } cond { _ == true } @@ -372,52 +327,15 @@ object Pickler { } } - /** A pickler for values of type `Char`, represented as string literals of length 1 */ - implicit val charPickler: Pickler[Char] = - stringPickler - .wrapped { s => require(s.length == 1, "single character string literal expected, but "+quoted(s)+" found"); s(0) } { _.toString } - - /** A pickler for pairs, represented as `~`-pairs */ - implicit def tuple2Pickler[T1: Pickler, T2: Pickler]: Pickler[(T1, T2)] = - (pkl[T1] ~ pkl[T2]) - .wrapped { case x1 ~ x2 => (x1, x2) } { case (x1, x2) => x1 ~ x2 } - .labelled ("tuple2") - /** A pickler for 3-tuples, represented as `~`-tuples */ implicit def tuple3Pickler[T1, T2, T3](implicit p1: Pickler[T1], p2: Pickler[T2], p3: Pickler[T3]): Pickler[(T1, T2, T3)] = (p1 ~ p2 ~ p3) .wrapped { case x1 ~ x2 ~ x3 => (x1, x2, x3) } { case (x1, x2, x3) => x1 ~ x2 ~ x3 } .labelled ("tuple3") - /** A pickler for 4-tuples, represented as `~`-tuples */ - implicit def tuple4Pickler[T1, T2, T3, T4](implicit p1: Pickler[T1], p2: Pickler[T2], p3: Pickler[T3], p4: Pickler[T4]): Pickler[(T1, T2, T3, T4)] = - (p1 ~ p2 ~ p3 ~ p4) - .wrapped { case x1 ~ x2 ~ x3 ~ x4 => (x1, x2, x3, x4) } { case (x1, x2, x3, x4) => x1 ~ x2 ~ x3 ~ x4 } - .labelled ("tuple4") - - /** A conditional pickler for the `scala.None` object */ - implicit val nonePickler = singletonPickler(None) - - /** A conditional pickler for instances of class `scala.Some` */ - implicit def somePickler[T: Pickler]: CondPickler[Some[T]] = - pkl[T] - .wrapped { Some(_) } { _.get } - .asClass (classOf[Some[T]]) - - /** A pickler for optional values */ - implicit def optionPickler[T: Pickler]: Pickler[Option[T]] = nonePickler | somePickler[T] - /** A pickler for list values */ implicit def listPickler[T: Pickler]: Pickler[List[T]] = iterPickler[T] .wrapped { _.toList } { _.iterator } .labelled ("scala.List") - - /** A pickler for vector values */ - implicit def vectorPickler[T: Pickler]: Pickler[Vector[T]] = - iterPickler[T] .wrapped { Vector() ++ _ } { _.iterator } .labelled ("scala.Vector") - - /** A pickler for array values */ - implicit def array[T : ClassTag : Pickler]: Pickler[Array[T]] = - iterPickler[T] .wrapped { _.toArray} { _.iterator } .labelled ("scala.Array") } /** A subclass of Pickler can indicate whether a particular value can be pickled by instances diff --git a/src/compiler/scala/tools/nsc/io/Socket.scala b/src/compiler/scala/tools/nsc/io/Socket.scala index e766c1b2fd..4925c50d85 100644 --- a/src/compiler/scala/tools/nsc/io/Socket.scala +++ b/src/compiler/scala/tools/nsc/io/Socket.scala @@ -28,13 +28,10 @@ object Socket { private val optHandler = handlerFn[Option[T]](_ => None) private val eitherHandler = handlerFn[Either[Throwable, T]](x => Left(x)) - def getOrElse[T1 >: T](alt: T1): T1 = opt getOrElse alt def either: Either[Throwable, T] = try Right(f()) catch eitherHandler def opt: Option[T] = try Some(f()) catch optHandler } - def newIPv4Server(port: Int = 0) = new Box(() => preferringIPv4(new ServerSocket(0))) - def newServer(port: Int = 0) = new Box(() => new ServerSocket(0)) def localhost(port: Int) = apply(InetAddress.getLocalHost(), port) def apply(host: InetAddress, port: Int) = new Box(() => new Socket(new JSocket(host, port))) def apply(host: String, port: Int) = new Box(() => new Socket(new JSocket(host, port))) @@ -62,4 +59,4 @@ class Socket(jsocket: JSocket) extends Streamable.Bytes with Closeable { out.close() } } -} \ No newline at end of file +} diff --git a/src/compiler/scala/tools/nsc/io/SourceReader.scala b/src/compiler/scala/tools/nsc/io/SourceReader.scala index af745eb3e8..ece78db2cf 100644 --- a/src/compiler/scala/tools/nsc/io/SourceReader.scala +++ b/src/compiler/scala/tools/nsc/io/SourceReader.scala @@ -33,9 +33,6 @@ class SourceReader(decoder: CharsetDecoder, reporter: Reporter) { "Please try specifying another one using the -encoding option") } - /** Reads the file with the specified name. */ - def read(filename: String): Array[Char]= read(new JFile(filename)) - /** Reads the specified file. */ def read(file: JFile): Array[Char] = { val c = new FileInputStream(file).getChannel diff --git a/src/compiler/scala/tools/nsc/io/package.scala b/src/compiler/scala/tools/nsc/io/package.scala index c29a7c96df..0b2db115fb 100644 --- a/src/compiler/scala/tools/nsc/io/package.scala +++ b/src/compiler/scala/tools/nsc/io/package.scala @@ -20,15 +20,11 @@ package object io { type Path = scala.reflect.io.Path val Path = scala.reflect.io.Path type PlainFile = scala.reflect.io.PlainFile - val PlainFile = scala.reflect.io.PlainFile val Streamable = scala.reflect.io.Streamable type VirtualDirectory = scala.reflect.io.VirtualDirectory type VirtualFile = scala.reflect.io.VirtualFile - val ZipArchive = scala.reflect.io.ZipArchive type ZipArchive = scala.reflect.io.ZipArchive - implicit def postfixOps = scala.language.postfixOps // make all postfix ops in this package compile without warning - type JManifest = java.util.jar.Manifest type JFile = java.io.File @@ -38,23 +34,11 @@ package object io { def runnable(body: => Unit): Runnable = new Runnable { override def run() = body } def callable[T](body: => T): Callable[T] = new Callable[T] { override def call() = body } def spawn[T](body: => T): Future[T] = daemonThreadPool submit callable(body) - def submit(runnable: Runnable) = daemonThreadPool submit runnable - // Create, start, and return a daemon thread - def daemonize(body: => Unit): Thread = newThread(_ setDaemon true)(body) def newThread(f: Thread => Unit)(body: => Unit): Thread = { val thread = new Thread(runnable(body)) f(thread) thread.start thread } - - // Set a timer to execute the given code. - def timer(seconds: Int)(body: => Unit): Timer = { - val alarm = new Timer(true) // daemon - val tt = new TimerTask { def run() = body } - - alarm.schedule(tt, seconds * 1000) - alarm - } } diff --git a/src/compiler/scala/tools/nsc/javac/JavaParsers.scala b/src/compiler/scala/tools/nsc/javac/JavaParsers.scala index 2f6c13dd67..73cbeaa6c4 100644 --- a/src/compiler/scala/tools/nsc/javac/JavaParsers.scala +++ b/src/compiler/scala/tools/nsc/javac/JavaParsers.scala @@ -35,7 +35,6 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners { abstract class JavaParser extends ParserCommon { val in: JavaScanner - protected def posToReport: Int = in.currentPos def freshName(prefix : String): Name protected implicit def i2p(offset : Int) : Position private implicit def p2i(pos : Position): Int = if (pos.isDefined) pos.point else -1 @@ -94,11 +93,7 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners { if (skipIt) skip() } - def warning(msg: String) : Unit = warning(in.currentPos, msg) - def errorTypeTree = TypeTree().setType(ErrorType) setPos in.currentPos - def errorTermTree = Literal(Constant(null)) setPos in.currentPos - def errorPatternTree = blankExpr setPos in.currentPos // --------- tree building ----------------------------- @@ -178,11 +173,7 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners { def accept(token: Int): Int = { val pos = in.currentPos if (in.token != token) { - val posToReport = - //if (in.currentPos.line(unit.source).get(0) > in.lastPos.line(unit.source).get(0)) - // in.lastPos - //else - in.currentPos + val posToReport = in.currentPos val msg = JavaScannerConfiguration.token2string(token) + " expected but " + JavaScannerConfiguration.token2string(in.token) + " found." @@ -352,41 +343,6 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners { if (in.token == LPAREN) { skipAhead(); accept(RPAREN) } else if (in.token == LBRACE) { skipAhead(); accept(RBRACE) } } -/* - def annotationArg() = { - val pos = in.token - if (in.token == IDENTIFIER && in.lookaheadToken == ASSIGN) { - val name = ident() - accept(ASSIGN) - atPos(pos) { - ValDef(Modifiers(Flags.JAVA), name, TypeTree(), elementValue()) - } - } else { - elementValue() - } - } - - def elementValue(): Tree = - if (in.token == AT) annotation() - else if (in.token == LBRACE) elementValueArrayInitializer() - else expression1() - - def elementValueArrayInitializer() = { - accept(LBRACE) - val buf = new ListBuffer[Tree] - def loop() = - if (in.token != RBRACE) { - buf += elementValue() - if (in.token == COMMA) { - in.nextToken - loop() - } - } - loop() - accept(RBRACE) - buf.toList - } - */ def modifiers(inInterface: Boolean): Modifiers = { var flags: Long = Flags.JAVA diff --git a/src/compiler/scala/tools/nsc/javac/JavaScanners.scala b/src/compiler/scala/tools/nsc/javac/JavaScanners.scala index e230585a8b..84eee36f18 100644 --- a/src/compiler/scala/tools/nsc/javac/JavaScanners.scala +++ b/src/compiler/scala/tools/nsc/javac/JavaScanners.scala @@ -57,23 +57,14 @@ trait JavaScanners extends ast.parser.ScannersCommon { /** ... */ abstract class AbstractJavaScanner extends AbstractJavaTokenData { - implicit def p2g(pos: Position): ScanPosition implicit def g2p(pos: ScanPosition): Position - /** the last error position - */ - var errpos: ScanPosition - var lastPos: ScanPosition - def skipToken: ScanPosition def nextToken(): Unit def next: AbstractJavaTokenData def intVal(negated: Boolean): Long def floatVal(negated: Boolean): Double def intVal: Long = intVal(false) def floatVal: Double = floatVal(false) - //def token2string(token : Int) : String = configuration.token2string(token) - /** return recent scala doc, if any */ - def flushDoc: DocComment def currentPos: Position } @@ -227,17 +218,9 @@ trait JavaScanners extends ast.parser.ScannersCommon { abstract class JavaScanner extends AbstractJavaScanner with JavaTokenData with Cloneable with ScannerCommon { override def intVal = super.intVal// todo: needed? override def floatVal = super.floatVal - override var errpos: Int = NoPos def currentPos: Position = g2p(pos - 1) - var in: JavaCharArrayReader = _ - def dup: JavaScanner = { - val dup = clone().asInstanceOf[JavaScanner] - dup.in = in.dup - dup - } - /** character buffer for literals */ val cbuf = new StringBuilder() @@ -256,12 +239,6 @@ trait JavaScanners extends ast.parser.ScannersCommon { */ var docBuffer: StringBuilder = null - def flushDoc: DocComment = { - val ret = if (docBuffer != null) DocComment(docBuffer.toString, NoPosition) else null - docBuffer = null - ret - } - /** add the given character to the documentation buffer */ protected def putDocChar(c: Char) { @@ -277,13 +254,6 @@ trait JavaScanners extends ast.parser.ScannersCommon { // Get next token ------------------------------------------------------------ - /** read next token and return last position - */ - def skipToken: Int = { - val p = pos; nextToken - p - 1 - } - def nextToken() { if (next.token == EMPTY) { fetchToken() @@ -308,7 +278,6 @@ trait JavaScanners extends ast.parser.ScannersCommon { private def fetchToken() { if (token == EOF) return lastPos = in.cpos - 1 - //var index = bp while (true) { in.ch match { case ' ' | '\t' | CR | LF | FF => @@ -868,7 +837,6 @@ trait JavaScanners extends ast.parser.ScannersCommon { def syntaxError(pos: Int, msg: String) { error(pos, msg) token = ERROR - errpos = pos } /** generate an error at the current token position @@ -879,7 +847,6 @@ trait JavaScanners extends ast.parser.ScannersCommon { def incompleteInputError(msg: String) { incompleteInputError(pos, msg) token = EOF - errpos = pos } override def toString() = token match { @@ -913,16 +880,12 @@ trait JavaScanners extends ast.parser.ScannersCommon { } } - /** ... - */ class JavaUnitScanner(unit: CompilationUnit) extends JavaScanner { in = new JavaCharArrayReader(unit.source.content, !settings.nouescape.value, syntaxError) init - def warning(pos: Int, msg: String) = unit.warning(pos, msg) def error (pos: Int, msg: String) = unit. error(pos, msg) def incompleteInputError(pos: Int, msg: String) = unit.incompleteInputError(pos, msg) def deprecationWarning(pos: Int, msg: String) = unit.deprecationWarning(pos, msg) - implicit def p2g(pos: Position): Int = if (pos.isDefined) pos.point else -1 implicit def g2p(pos: Int): Position = new OffsetPosition(unit.source, pos) } } diff --git a/src/compiler/scala/tools/nsc/javac/JavaTokens.scala b/src/compiler/scala/tools/nsc/javac/JavaTokens.scala index a562de291d..953a3c6d82 100644 --- a/src/compiler/scala/tools/nsc/javac/JavaTokens.scala +++ b/src/compiler/scala/tools/nsc/javac/JavaTokens.scala @@ -68,9 +68,6 @@ object JavaTokens extends ast.parser.Tokens { final val VOLATILE = 68 final val WHILE = 69 - def isKeyword(code : Int) = - code >= ABSTRACT && code <= WHILE - /** special symbols */ final val COMMA = 70 final val SEMI = 71 @@ -115,9 +112,6 @@ object JavaTokens extends ast.parser.Tokens { final val GTGTEQ = 113 final val GTGTGTEQ = 114 - def isSymbol(code : Int) = - code >= COMMA && code <= GTGTGTEQ - /** parenthesis */ final val LPAREN = 115 final val RPAREN = 116 diff --git a/src/compiler/scala/tools/nsc/matching/MatchSupport.scala b/src/compiler/scala/tools/nsc/matching/MatchSupport.scala index 5ce1aabcd8..3c26997cfe 100644 --- a/src/compiler/scala/tools/nsc/matching/MatchSupport.scala +++ b/src/compiler/scala/tools/nsc/matching/MatchSupport.scala @@ -22,9 +22,6 @@ trait MatchSupport extends ast.TreeDSL { self: ParallelMatching => def impossible: Nothing = abort("this never happens") - def treeCollect[T](tree: Tree, pf: PartialFunction[Tree, T]): List[T] = - tree filter (pf isDefinedAt _) map (x => pf(x)) - object Types { import definitions._ @@ -36,24 +33,12 @@ trait MatchSupport extends ast.TreeDSL { self: ParallelMatching => // These tests for final classes can inspect the typeSymbol private def is(s: Symbol) = tpe.typeSymbol eq s - def isByte = is(ByteClass) - def isShort = is(ShortClass) def isInt = is(IntClass) - def isChar = is(CharClass) - def isBoolean = is(BooleanClass) def isNothing = is(NothingClass) - def isArray = is(ArrayClass) } } object Debug { - def typeToString(t: Type): String = t match { - case NoType => "x" - case x => x.toString - } - def symbolToString(s: Symbol): String = s match { - case x => x.toString - } def treeToString(t: Tree): String = treeInfo.unbind(t) match { case EmptyTree => "?" case WILD() => "_" @@ -66,10 +51,6 @@ trait MatchSupport extends ast.TreeDSL { self: ParallelMatching => // Formatting for some error messages private val NPAD = 15 def pad(s: String): String = "%%%ds" format (NPAD-1) format s - def pad(s: Any): String = pad(s match { - case x: Tree => treeToString(x) - case x => x.toString - }) // pretty print for debugging def pp(x: Any): String = pp(x, false) @@ -117,7 +98,6 @@ trait MatchSupport extends ast.TreeDSL { self: ParallelMatching => else x } - def indent(s: Any) = s.toString() split "\n" map (" " + _) mkString "\n" def indentAll(s: Seq[Any]) = s map (" " + _.toString() + "\n") mkString } diff --git a/src/compiler/scala/tools/nsc/matching/Matrix.scala b/src/compiler/scala/tools/nsc/matching/Matrix.scala index d2f5a98411..ba966acf34 100644 --- a/src/compiler/scala/tools/nsc/matching/Matrix.scala +++ b/src/compiler/scala/tools/nsc/matching/Matrix.scala @@ -140,7 +140,6 @@ trait Matrix extends MatrixAdditions { cases: List[CaseDef], default: Tree ) { - def tvars = roots map (_.lhs) def valDefs = roots map (_.valDef) override def toString() = "MatrixInit(roots = %s, %d cases)".format(pp(roots), cases.size) } @@ -151,27 +150,12 @@ trait Matrix extends MatrixAdditions { object PatternVarGroup { def apply(xs: PatternVar*) = new PatternVarGroup(xs.toList) def apply(xs: List[PatternVar]) = new PatternVarGroup(xs) - - // XXX - transitional - def fromBindings(vlist: List[Binding], freeVars: List[Symbol] = Nil) = { - def vmap(v: Symbol): Option[Binding] = vlist find (_.pvar eq v) - val info = - if (freeVars.isEmpty) vlist - else (freeVars map vmap).flatten - - val xs = - for (Binding(lhs, rhs) <- info) yield - new PatternVar(lhs, Ident(rhs) setType lhs.tpe, !(rhs hasFlag NO_EXHAUSTIVE)) - - new PatternVarGroup(xs) - } } val emptyPatternVarGroup = PatternVarGroup() class PatternVarGroup(val pvs: List[PatternVar]) { def syms = pvs map (_.sym) def valDefs = pvs map (_.valDef) - def idents = pvs map (_.ident) def extractIndex(index: Int): (PatternVar, PatternVarGroup) = { val (t, ts) = self.extractIndex(pvs, index) @@ -180,16 +164,11 @@ trait Matrix extends MatrixAdditions { def isEmpty = pvs.isEmpty def size = pvs.size - def head = pvs.head - def ::(t: PatternVar) = PatternVarGroup(t :: pvs) def :::(ts: List[PatternVar]) = PatternVarGroup(ts ::: pvs) - def ++(other: PatternVarGroup) = PatternVarGroup(pvs ::: other.pvs) def apply(i: Int) = pvs(i) def zipWithIndex = pvs.zipWithIndex def indices = pvs.indices - def map[T](f: PatternVar => T) = pvs map f - def filter(p: PatternVar => Boolean) = PatternVarGroup(pvs filter p) override def toString() = pp(pvs) } @@ -237,12 +216,6 @@ trait Matrix extends MatrixAdditions { tracing("create")(new PatternVar(lhs, rhs, checked)) } - def createLazy(tpe: Type, f: Symbol => Tree, checked: Boolean) = { - val lhs = newVar(owner.pos, tpe, Flags.LAZY :: flags(checked)) - val rhs = f(lhs) - - tracing("createLazy")(new PatternVar(lhs, rhs, checked)) - } private def newVar( pos: Position, diff --git a/src/compiler/scala/tools/nsc/matching/ParallelMatching.scala b/src/compiler/scala/tools/nsc/matching/ParallelMatching.scala index ea4d9cd3f4..b5e25f3809 100644 --- a/src/compiler/scala/tools/nsc/matching/ParallelMatching.scala +++ b/src/compiler/scala/tools/nsc/matching/ParallelMatching.scala @@ -126,7 +126,7 @@ trait ParallelMatching extends ast.TreeDSL // for propagating "unchecked" to synthetic vars def isChecked = !(sym hasFlag NO_EXHAUSTIVE) - def flags: List[Long] = List(NO_EXHAUSTIVE) filter (sym hasFlag _) + // def flags: List[Long] = List(NO_EXHAUSTIVE) filter (sym hasFlag _) // this is probably where this actually belongs def createVar(tpe: Type, f: Symbol => Tree) = context.createVar(tpe, f, isChecked) @@ -170,7 +170,7 @@ trait ParallelMatching extends ast.TreeDSL case class PatternMatch(scrut: Scrutinee, ps: List[Pattern]) { def head = ps.head def tail = ps.tail - def size = ps.length + // def size = ps.length def headType = head.necessaryType private val dummyCount = if (head.isCaseClass) headType.typeSymbol.caseFieldAccessors.length else 0 @@ -576,7 +576,7 @@ trait ParallelMatching extends ast.TreeDSL (_ys.toList, _ns.toList) } - val moreSpecific = yeses map (_.moreSpecific) + // val moreSpecific = yeses map (_.moreSpecific) val subsumed = yeses map (x => (x.bx, x.subsumed)) val remaining = noes map (x => (x.bx, x.remaining)) diff --git a/src/compiler/scala/tools/nsc/matching/PatternBindings.scala b/src/compiler/scala/tools/nsc/matching/PatternBindings.scala index 3ff5ce83bb..c6fa6f6ba0 100644 --- a/src/compiler/scala/tools/nsc/matching/PatternBindings.scala +++ b/src/compiler/scala/tools/nsc/matching/PatternBindings.scala @@ -59,10 +59,6 @@ trait PatternBindings extends ast.TreeDSL trait PatternBindingLogic { self: Pattern => - // This is for traversing the pattern tree - pattern types which might have - // bound variables beneath them return a list of said patterns for flatMapping. - def subpatternsForVars: List[Pattern] = Nil - // The outermost Bind(x1, Bind(x2, ...)) surrounding the tree. private var _boundTree: Tree = tree def boundTree = _boundTree @@ -113,9 +109,6 @@ trait PatternBindings extends ast.TreeDSL } class Bindings(private val vlist: List[Binding]) { - // if (!vlist.isEmpty) - // traceCategory("Bindings", this.toString) - def get() = vlist def toMap = vlist map (x => (x.pvar, x.tvar)) toMap diff --git a/src/compiler/scala/tools/nsc/matching/Patterns.scala b/src/compiler/scala/tools/nsc/matching/Patterns.scala index e92c43f1fd..df536da108 100644 --- a/src/compiler/scala/tools/nsc/matching/Patterns.scala +++ b/src/compiler/scala/tools/nsc/matching/Patterns.scala @@ -32,9 +32,6 @@ trait Patterns extends ast.TreeDSL { // An empty pattern def NoPattern = WildcardPattern() - // The constant null pattern - def NullPattern = LiteralPattern(NULL) - // The Nil pattern def NilPattern = Pattern(gen.mkNil) @@ -60,7 +57,6 @@ trait Patterns extends ast.TreeDSL { override def covers(sym: Symbol) = newMatchesPattern(sym, tpt.tpe) override def sufficientType = tpt.tpe - override def subpatternsForVars: List[Pattern] = List(Pattern(expr)) override def simplify(pv: PatternVar) = Pattern(expr) match { case ExtractorPattern(ua) if pv.sym.tpe <:< tpt.tpe => this rebindTo expr case _ => this @@ -140,10 +136,6 @@ trait Patterns extends ast.TreeDSL { require(fn.isType && this.isCaseClass, "tree: " + tree + " fn: " + fn) def name = tpe.typeSymbol.name def cleanName = tpe.typeSymbol.decodedName - def hasPrefix = tpe.prefix.prefixString != "" - def prefixedName = - if (hasPrefix) "%s.%s".format(tpe.prefix.prefixString, cleanName) - else cleanName private def isColonColon = cleanName == "::" @@ -222,15 +214,13 @@ trait Patterns extends ast.TreeDSL { // 8.1.8 (b) (literal ArrayValues) case class SequencePattern(tree: ArrayValue) extends Pattern with SequenceLikePattern { - lazy val ArrayValue(elemtpt, elems) = tree + lazy val ArrayValue(_, elems) = tree - override def subpatternsForVars: List[Pattern] = elemPatterns override def description = "Seq(%s)".format(elemPatterns mkString ", ") } // 8.1.8 (c) case class StarPattern(tree: Star) extends Pattern { - lazy val Star(elem) = tree override def description = "_*" } // XXX temporary? @@ -384,15 +374,7 @@ trait Patterns extends ast.TreeDSL { // Covers if the symbol matches the unapply method's argument type, // and the return type of the unapply is Some. override def covers(sym: Symbol) = newMatchesPattern(sym, arg.tpe) - - // TODO: for alwaysCovers: - // fn.tpe.finalResultType.typeSymbol == SomeClass - override def necessaryType = arg.tpe - override def subpatternsForVars = args match { - case List(ArrayValue(elemtpe, elems)) => toPats(elems) - case _ => toPats(args) - } def resTypes = analyzer.unapplyTypeList(unfn.symbol, unfn.tpe, args.length) def resTypesString = resTypes match { @@ -403,13 +385,7 @@ trait Patterns extends ast.TreeDSL { sealed trait ApplyPattern extends Pattern { lazy val Apply(fn, args) = tree - override def subpatternsForVars: List[Pattern] = toPats(args) - - override def dummies = - if (!this.isCaseClass) Nil - else emptyPatterns(sufficientType.typeSymbol.caseFieldAccessors.size) - def isConstructorPattern = fn.isType override def covers(sym: Symbol) = newMatchesPattern(sym, fn.tpe) } @@ -419,9 +395,6 @@ trait Patterns extends ast.TreeDSL { // returns either a simplification of this pattern or identity. def simplify(pv: PatternVar): Pattern = this - // the right number of dummies for this pattern - def dummies: List[Pattern] = Nil - // Is this a default pattern (untyped "_" or an EmptyTree inserted by the matcher) def isDefault = false @@ -451,14 +424,8 @@ trait Patterns extends ast.TreeDSL { def isModule = sym.isModule || tpe.termSymbol.isModule def isCaseClass = tpe.typeSymbol.isCase def isObject = (sym != null) && (sym != NoSymbol) && tpe.prefix.isStable // XXX not entire logic - def hasStar = false - def setType(tpe: Type): this.type = { - tree setType tpe - this - } - def equalsCheck = tracing("equalsCheck")( if (sym.isValue) singleType(NoPrefix, sym) @@ -475,7 +442,6 @@ trait Patterns extends ast.TreeDSL { final override def toString = description - def toTypeString() = "%s <: x <: %s".format(necessaryType, sufficientType) def kindString = "" } diff --git a/src/compiler/scala/tools/nsc/settings/AbsSettings.scala b/src/compiler/scala/tools/nsc/settings/AbsSettings.scala index e965370713..4727e6d867 100644 --- a/src/compiler/scala/tools/nsc/settings/AbsSettings.scala +++ b/src/compiler/scala/tools/nsc/settings/AbsSettings.scala @@ -47,8 +47,6 @@ trait AbsSettings extends scala.reflect.internal.settings.AbsSettings { } }) - implicit lazy val SettingOrdering: Ordering[Setting] = Ordering.ordered - trait AbsSetting extends Ordered[Setting] with AbsSettingValue { def name: String def helpDescription: String @@ -83,14 +81,6 @@ trait AbsSettings extends scala.reflect.internal.settings.AbsSettings { this } - /** If the appearance of the setting should halt argument processing. */ - private var isTerminatorSetting = false - def shouldStopProcessing = isTerminatorSetting - def stopProcessing(): this.type = { - isTerminatorSetting = true - this - } - /** Issue error and return */ def errorAndValue[T](msg: String, x: T): T = { errorFn(msg) ; x } @@ -110,6 +100,7 @@ trait AbsSettings extends scala.reflect.internal.settings.AbsSettings { /** Attempt to set from a properties file style property value. * Currently used by Eclipse SDT only. + * !!! Needs test. */ def tryToSetFromPropertyValue(s: String): Unit = tryToSet(s :: Nil) diff --git a/src/compiler/scala/tools/nsc/settings/MutableSettings.scala b/src/compiler/scala/tools/nsc/settings/MutableSettings.scala index 4f4f0544da..748c6069f0 100644 --- a/src/compiler/scala/tools/nsc/settings/MutableSettings.scala +++ b/src/compiler/scala/tools/nsc/settings/MutableSettings.scala @@ -176,7 +176,7 @@ class MutableSettings(val errorFn: String => Unit) * The class loader defining `T` should provide resources `app.class.path` * and `boot.class.path`. These resources should contain the application * and boot classpaths in the same form as would be passed on the command line.*/ - def embeddedDefaults[T: ClassTag]: Unit = + def embeddedDefaults[T: ClassTag]: Unit = // called from sbt and repl embeddedDefaults(classTag[T].runtimeClass.getClassLoader) /** Initializes these settings for embedded use by a class from the given class loader. @@ -239,7 +239,7 @@ class MutableSettings(val errorFn: String => Unit) /** Add a destination directory for sources found under srcdir. * Both directories should exits. */ - def add(srcDir: String, outDir: String): Unit = + def add(srcDir: String, outDir: String): Unit = // used in ide? add(checkDir(AbstractFile.getDirectory(srcDir), srcDir), checkDir(AbstractFile.getDirectory(outDir), outDir)) @@ -434,7 +434,7 @@ class MutableSettings(val errorFn: String => Unit) def tryToSet(args: List[String]) = { value = true ; Some(args) } def unparse: List[String] = if (value) List(name) else Nil - override def tryToSetFromPropertyValue(s : String) { + override def tryToSetFromPropertyValue(s : String) { // used from ide value = s.equalsIgnoreCase("true") } } @@ -527,7 +527,7 @@ class MutableSettings(val errorFn: String => Unit) Some(rest) } override def tryToSetColon(args: List[String]) = tryToSet(args) - override def tryToSetFromPropertyValue(s: String) = tryToSet(s.trim.split(',').toList) + override def tryToSetFromPropertyValue(s: String) = tryToSet(s.trim.split(',').toList) // used from ide def unparse: List[String] = value map (name + ":" + _) withHelpSyntax(name + ":<" + arg + ">") @@ -561,7 +561,7 @@ class MutableSettings(val errorFn: String => Unit) } def unparse: List[String] = if (value == default) Nil else List(name + ":" + value) - override def tryToSetFromPropertyValue(s: String) = tryToSetColon(s::Nil) + override def tryToSetFromPropertyValue(s: String) = tryToSetColon(s::Nil) // used from ide withHelpSyntax(name + ":<" + helpArg + ">") } diff --git a/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala b/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala index 8dce48ee9a..c54745f6c6 100644 --- a/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala +++ b/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala @@ -52,14 +52,14 @@ trait ScalaSettings extends AbsScalaSettings val jvmargs = PrefixSetting("-J", "-J", "Pass directly to the runtime system.") val defines = PrefixSetting("-Dproperty=value", "-D", "Pass -Dproperty=value directly to the runtime system.") - val toolcp = PathSetting("-toolcp", "Add to the runner classpath.", "") + /*val toolcp =*/ PathSetting("-toolcp", "Add to the runner classpath.", "") val nobootcp = BooleanSetting("-nobootcp", "Do not use the boot classpath for the scala jars.") /** * Standard settings */ // argfiles is only for the help message - val argfiles = BooleanSetting ("@", "A text file containing compiler arguments (options and source files)") + /*val argfiles = */ BooleanSetting ("@", "A text file containing compiler arguments (options and source files)") val classpath = PathSetting ("-classpath", "Specify where to find user class files.", defaultClasspath) withAbbreviation "-cp" val d = OutputSetting (outputDirs, ".") val nospecialization = BooleanSetting ("-no-specialization", "Ignore @specialize annotations.") @@ -114,7 +114,6 @@ trait ScalaSettings extends AbsScalaSettings /** Compatibility stubs for options whose value name did * not previously match the option name. */ - def XO = optimise def debuginfo = g def dependenciesFile = dependencyfile def nowarnings = nowarn @@ -180,12 +179,9 @@ trait ScalaSettings extends AbsScalaSettings val exposeEmptyPackage = BooleanSetting("-Yexpose-empty-package", "Internal only: expose the empty package.").internalOnly() - def stop = stopAfter - /** Area-specific debug output. */ val Ybuildmanagerdebug = BooleanSetting("-Ybuild-manager-debug", "Generate debug information for the Refined Build Manager compiler.") - val Ycompletion = BooleanSetting("-Ycompletion-debug", "Trace all tab completion activity.") val Ydocdebug = BooleanSetting("-Ydoc-debug", "Trace all scaladoc activity.") val Yidedebug = BooleanSetting("-Yide-debug", "Generate, validate and output trees using the interactive compiler.") val Yinferdebug = BooleanSetting("-Yinfer-debug", "Trace type inference and implicit search.") diff --git a/src/compiler/scala/tools/nsc/settings/StandardScalaSettings.scala b/src/compiler/scala/tools/nsc/settings/StandardScalaSettings.scala index e866ad6ae0..98ef74aee3 100644 --- a/src/compiler/scala/tools/nsc/settings/StandardScalaSettings.scala +++ b/src/compiler/scala/tools/nsc/settings/StandardScalaSettings.scala @@ -48,9 +48,4 @@ trait StandardScalaSettings { val usejavacp = BooleanSetting ("-usejavacp", "Utilize the java.class.path in classpath resolution.") val verbose = BooleanSetting ("-verbose", "Output messages about what the compiler is doing.") val version = BooleanSetting ("-version", "Print product version and exit.") - - /** These are @ and -Dkey=val style settings, which don't - * nicely map to identifiers. - */ - val argfiles: BooleanSetting // exists only to echo help message, should be done differently } diff --git a/src/compiler/scala/tools/nsc/settings/Warnings.scala b/src/compiler/scala/tools/nsc/settings/Warnings.scala index d6d77278ab..2649a150ad 100644 --- a/src/compiler/scala/tools/nsc/settings/Warnings.scala +++ b/src/compiler/scala/tools/nsc/settings/Warnings.scala @@ -26,7 +26,6 @@ trait Warnings { // These warnings should be pretty quiet unless you're doing // something inadvisable. protected def lintWarnings = List( - // warnDeadCode, warnInaccessible, warnNullaryOverride, warnNullaryUnit, @@ -39,9 +38,13 @@ trait Warnings { BooleanSetting("-Xlint", "Enable recommended additional warnings.") withPostSetHook (_ => lintWarnings foreach (_.value = true)) ) - val warnEverything = ( + + /*val warnEverything = */ ( BooleanSetting("-Ywarn-all", "Enable all -Y warnings.") - withPostSetHook (_ => lintWarnings foreach (_.value = true)) + withPostSetHook { _ => + lint.value = true + allWarnings foreach (_.value = true) + } ) // Individual warnings. @@ -57,7 +60,7 @@ trait Warnings { val warnInferAny = BooleanSetting ("-Ywarn-infer-any", "Warn when a type argument is inferred to be `Any`.") // Backward compatibility. - def Xwarnfatal = fatalWarnings - def Xchecknull = warnSelectNullable - def Ywarndeadcode = warnDeadCode + @deprecated("Use fatalWarnings", "2.11.0") def Xwarnfatal = fatalWarnings // used by sbt + @deprecated("Use warnSelectNullable", "2.11.0") def Xchecknull = warnSelectNullable // used by ide + @deprecated("Use warnDeadCode", "2.11.0") def Ywarndeadcode = warnDeadCode // used by ide } diff --git a/src/compiler/scala/tools/nsc/symtab/classfile/AbstractFileReader.scala b/src/compiler/scala/tools/nsc/symtab/classfile/AbstractFileReader.scala index 427b5bf887..17e3b08ec2 100644 --- a/src/compiler/scala/tools/nsc/symtab/classfile/AbstractFileReader.scala +++ b/src/compiler/scala/tools/nsc/symtab/classfile/AbstractFileReader.scala @@ -29,11 +29,6 @@ class AbstractFileReader(val file: AbstractFile) { */ var bp: Int = 0 - /** return byte at offset 'pos' - */ - @throws(classOf[IndexOutOfBoundsException]) - def byteAt(pos: Int): Byte = buf(pos) - /** read a byte */ @throws(classOf[IndexOutOfBoundsException]) @@ -45,7 +40,7 @@ class AbstractFileReader(val file: AbstractFile) { /** read some bytes */ - def nextBytes(len: Int): Array[Byte] = { + def nextBytes(len: Int): Array[Byte] = { // used in ide bp += len buf.slice(bp - len, bp) } diff --git a/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala b/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala index 67f6c3ec5d..6d213af2b6 100644 --- a/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala +++ b/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala @@ -1169,20 +1169,6 @@ abstract class ClassfileParser { } object innerClasses extends mutable.HashMap[Name, InnerClassEntry] { - /** Return the Symbol of the top level class enclosing `name`, - * or 'name's symbol if no entry found for `name`. - */ - def topLevelClass(name: Name): Symbol = { - val tlName = if (isDefinedAt(name)) { - var entry = this(name) - while (isDefinedAt(entry.outerName)) - entry = this(entry.outerName) - entry.outerName - } else - name - classNameToSymbol(tlName) - } - /** Return the class symbol for `externalName`. It looks it up in its outer class. * Forces all outer class symbols to be completed. * diff --git a/src/compiler/scala/tools/nsc/symtab/classfile/ICodeReader.scala b/src/compiler/scala/tools/nsc/symtab/classfile/ICodeReader.scala index b7511377cc..b5459ec773 100644 --- a/src/compiler/scala/tools/nsc/symtab/classfile/ICodeReader.scala +++ b/src/compiler/scala/tools/nsc/symtab/classfile/ICodeReader.scala @@ -632,9 +632,9 @@ abstract class ICodeReader extends ClassfileParser { else instanceCode class LinearCode { - var instrs: ListBuffer[(Int, Instruction)] = new ListBuffer - var jmpTargets: mutable.Set[Int] = perRunCaches.newSet[Int]() - var locals: mutable.Map[Int, List[(Local, TypeKind)]] = perRunCaches.newMap() + val instrs: ListBuffer[(Int, Instruction)] = new ListBuffer + val jmpTargets: mutable.Set[Int] = perRunCaches.newSet[Int]() + val locals: mutable.Map[Int, List[(Local, TypeKind)]] = perRunCaches.newMap() var containsDUPX = false var containsNEW = false diff --git a/src/compiler/scala/tools/nsc/symtab/classfile/Pickler.scala b/src/compiler/scala/tools/nsc/symtab/classfile/Pickler.scala index 9a8db447c3..42ea7e61f0 100644 --- a/src/compiler/scala/tools/nsc/symtab/classfile/Pickler.scala +++ b/src/compiler/scala/tools/nsc/symtab/classfile/Pickler.scala @@ -999,112 +999,6 @@ abstract class Pickler extends SubComponent { patchNat(startpos + 1, writeIndex - (startpos + 2)) } - /** Print entry for diagnostics */ - def printEntryAtIndex(idx: Int) = printEntry(entries(idx)) - def printEntry(entry: AnyRef) { - def printRef(ref: AnyRef) { - print(index(ref)+ - (if (ref.isInstanceOf[Name]) "("+ref+") " else " ")) - } - def printRefs(refs: List[AnyRef]) { refs foreach printRef } - def printSymInfo(sym: Symbol) { - printRef(sym.name) - printRef(localizedOwner(sym)) - print(flagsToString(sym.flags & PickledFlags)+" ") - if (sym.hasAccessBoundary) printRef(sym.privateWithin) - printRef(sym.info) - } - def printBody(entry: AnyRef) = entry match { - case name: Name => - print((if (name.isTermName) "TERMname " else "TYPEname ")+name) - case NoSymbol => - print("NONEsym") - case sym: Symbol if !isLocal(sym) => - if (sym.isModuleClass) { - print("EXTMODCLASSref "); printRef(sym.name.toTermName) - } else { - print("EXTref "); printRef(sym.name) - } - if (!sym.owner.isRoot) printRef(sym.owner) - case sym: ClassSymbol => - print("CLASSsym ") - printSymInfo(sym) - if (sym.thisSym.tpe != sym.tpe) printRef(sym.typeOfThis) - case sym: TypeSymbol => - print(if (sym.isAbstractType) "TYPEsym " else "ALIASsym ") - printSymInfo(sym) - case sym: TermSymbol => - print(if (sym.isModule) "MODULEsym " else "VALsym ") - printSymInfo(sym) - if (sym.alias != NoSymbol) printRef(sym.alias) - case NoType => - print("NOtpe") - case NoPrefix => - print("NOPREFIXtpe") - case ThisType(sym) => - print("THIStpe "); printRef(sym) - case SingleType(pre, sym) => - print("SINGLEtpe "); printRef(pre); printRef(sym); - case ConstantType(value) => - print("CONSTANTtpe "); printRef(value); - case TypeRef(pre, sym, args) => - print("TYPEREFtpe "); printRef(pre); printRef(sym); printRefs(args); - case TypeBounds(lo, hi) => - print("TYPEBOUNDStpe "); printRef(lo); printRef(hi); - case tp @ RefinedType(parents, decls) => - print("REFINEDtpe "); printRef(tp.typeSymbol); printRefs(parents); - case ClassInfoType(parents, decls, clazz) => - print("CLASSINFOtpe "); printRef(clazz); printRefs(parents); - case mt @ MethodType(formals, restpe) => - print("METHODtpe"); printRef(restpe); printRefs(formals) - case PolyType(tparams, restpe) => - print("POLYtpe "); printRef(restpe); printRefs(tparams); - case ExistentialType(tparams, restpe) => - print("EXISTENTIALtpe "); printRef(restpe); printRefs(tparams); - print("||| "+entry) - case c @ Constant(_) => - print("LITERAL ") - if (c.tag == BooleanTag) print("Boolean "+(if (c.booleanValue) 1 else 0)) - else if (c.tag == ByteTag) print("Byte "+c.longValue) - else if (c.tag == ShortTag) print("Short "+c.longValue) - else if (c.tag == CharTag) print("Char "+c.longValue) - else if (c.tag == IntTag) print("Int "+c.longValue) - else if (c.tag == LongTag) print("Long "+c.longValue) - else if (c.tag == FloatTag) print("Float "+c.floatValue) - else if (c.tag == DoubleTag) print("Double "+c.doubleValue) - else if (c.tag == StringTag) { print("String "); printRef(newTermName(c.stringValue)) } - else if (c.tag == ClazzTag) { print("Class "); printRef(c.typeValue) } - else if (c.tag == EnumTag) { print("Enum "); printRef(c.symbolValue) } - case AnnotatedType(annots, tp, selfsym) => - if (settings.selfInAnnots.value) { - print("ANNOTATEDWSELFtpe ") - printRef(tp) - printRef(selfsym) - printRefs(annots) - } else { - print("ANNOTATEDtpe ") - printRef(tp) - printRefs(annots) - } - case (target: Symbol, AnnotationInfo(atp, args, Nil)) => - print("SYMANNOT ") - printRef(target) - printRef(atp) - for (c <- args) printRef(c) - case (target: Symbol, children: List[_]) => - print("CHILDREN ") - printRef(target) - for (c <- children) printRef(c.asInstanceOf[Symbol]) - case AnnotationInfo(atp, args, Nil) => - print("ANNOTINFO") - printRef(atp) - for (c <- args) printRef(c) - case _ => - throw new FatalError("bad entry: " + entry + " " + entry.getClass) - } - printBody(entry); println() - } - /** Write byte array */ def writeArray() { assert(writeIndex == 0) diff --git a/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala b/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala index 78fb725041..7eb0b4e12c 100644 --- a/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala +++ b/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala @@ -184,13 +184,6 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers { } } - /** Returns the generic class that was specialized to 'sClass', or - * 'sClass' itself if sClass is not a specialized subclass. - */ - def genericClass(sClass: Symbol): Symbol = - if (sClass.isSpecialized) sClass.superClass - else sClass - case class Overload(sym: Symbol, env: TypeEnv) { override def toString = "specialized overload " + sym + " in " + env def matchesSym(sym1: Symbol) = sym.info =:= sym1.info @@ -222,8 +215,6 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers { * type bounds of other @specialized type parameters (and not in its result type). */ def degenerate = false - - def isAccessor = false } /** Symbol is a special overloaded method of 'original', in the environment env. */ @@ -247,9 +238,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers { } /** Symbol is a specialized accessor for the `target` field. */ - case class SpecializedAccessor(target: Symbol) extends SpecializedInfo { - override def isAccessor = true - } + case class SpecializedAccessor(target: Symbol) extends SpecializedInfo { } /** Symbol is a specialized method whose body should be the target's method body. */ case class Implementation(target: Symbol) extends SpecializedInfo @@ -288,9 +277,6 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers { def specializedParams(sym: Symbol): List[Symbol] = sym.info.typeParams filter (_.isSpecialized) - def splitParams(tps: List[Symbol]) = - tps partition (_.isSpecialized) - /** Given an original class symbol and a list of types its type parameters are instantiated at * returns a list of type parameters that should remain in the TypeRef when instantiating a * specialized type. @@ -1062,7 +1048,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers { if (isPrimitiveValueClass(tp2.typeSymbol) || isSpecializedAnyRefSubtype(tp2, sym1)) env + ((sym1, tp2)) else if (isSpecializedAnyRefSubtype(tp2, sym1)) - env + ((sym1, tp2)) // env + ((sym1, AnyRefClass.tpe)) + env + ((sym1, tp2)) else if (strict) unifyError(tp1, tp2) else @@ -1185,7 +1171,6 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers { * * A conflicting type environment could still be satisfiable. */ - def conflicting(env: TypeEnv) = !nonConflicting(env) def nonConflicting(env: TypeEnv) = env forall { case (tvar, tpe) => (subst(env, tvar.info.bounds.lo) <:< tpe) && (tpe <:< subst(env, tvar.info.bounds.hi)) } @@ -1668,7 +1653,6 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers { val symbol = tree.symbol debuglog("specializing body of" + symbol.defString) val DefDef(_, _, tparams, vparams :: Nil, tpt, _) = tree -// val (_, origtparams) = splitParams(source.typeParams) val env = typeEnv(symbol) val boundTvars = env.keySet val origtparams = source.typeParams.filter(tparam => !boundTvars(tparam) || !isPrimitiveValueType(env(tparam))) @@ -1865,11 +1849,4 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers { resultTree } } - - def printSpecStats() { - println(" concreteSpecMembers: %7d".format(concreteSpecMethods.size)) - println(" overloads: %7d".format(overloads.size)) - println(" typeEnv: %7d".format(typeEnv.size)) - println(" info: %7d".format(info.size)) - } } diff --git a/src/compiler/scala/tools/nsc/transform/TailCalls.scala b/src/compiler/scala/tools/nsc/transform/TailCalls.scala index 488b8aad4e..6ab99eaec6 100644 --- a/src/compiler/scala/tools/nsc/transform/TailCalls.scala +++ b/src/compiler/scala/tools/nsc/transform/TailCalls.scala @@ -147,7 +147,6 @@ abstract class TailCalls extends Transform { } def enclosingType = method.enclClass.typeOfThis - def methodTypeParams = method.tpe.typeParams def isEligible = method.isEffectivelyFinal // @tailrec annotation indicates mandatory transformation def isMandatory = method.hasAnnotation(TailrecClass) && !forMSIL diff --git a/src/compiler/scala/tools/nsc/transform/TypingTransformers.scala b/src/compiler/scala/tools/nsc/transform/TypingTransformers.scala index b7da0e0087..73f39225bd 100644 --- a/src/compiler/scala/tools/nsc/transform/TypingTransformers.scala +++ b/src/compiler/scala/tools/nsc/transform/TypingTransformers.scala @@ -21,13 +21,11 @@ trait TypingTransformers { else analyzer.newTyper(analyzer.rootContext(unit, EmptyTree, true)) protected var curTree: Tree = _ - protected def typedPos(pos: Position)(tree: Tree) = localTyper typed { atPos(pos)(tree) } override final def atOwner[A](owner: Symbol)(trans: => A): A = atOwner(curTree, owner)(trans) def atOwner[A](tree: Tree, owner: Symbol)(trans: => A): A = { val savedLocalTyper = localTyper -// println("transformer atOwner: " + owner + " isPackage? " + owner.isPackage) localTyper = localTyper.atOwner(tree, if (owner.isModule) owner.moduleClass else owner) val result = super.atOwner(owner)(trans) localTyper = savedLocalTyper diff --git a/src/compiler/scala/tools/nsc/util/ClassPath.scala b/src/compiler/scala/tools/nsc/util/ClassPath.scala index 0c49b9b8e7..ee66801c45 100644 --- a/src/compiler/scala/tools/nsc/util/ClassPath.scala +++ b/src/compiler/scala/tools/nsc/util/ClassPath.scala @@ -49,22 +49,6 @@ object ClassPath { /** Split the classpath, apply a transformation function, and reassemble it. */ def map(cp: String, f: String => String): String = join(split(cp) map f: _*) - /** Split the classpath, filter according to predicate, and reassemble. */ - def filter(cp: String, p: String => Boolean): String = join(split(cp) filter p: _*) - - /** Split the classpath and map them into Paths */ - def toPaths(cp: String): List[Path] = split(cp) map (x => Path(x).toAbsolute) - - /** Make all classpath components absolute. */ - def makeAbsolute(cp: String): String = fromPaths(toPaths(cp): _*) - - /** Join the paths as a classpath */ - def fromPaths(paths: Path*): String = join(paths map (_.path): _*) - def fromURLs(urls: URL*): String = fromPaths(urls map (x => Path(x.getPath)) : _*) - - /** Split the classpath and map them into URLs */ - def toURLs(cp: String): List[URL] = toPaths(cp) map (_.toURL) - /** Expand path and possibly expanding stars */ def expandPath(path: String, expandStar: Boolean = true): List[String] = if (expandStar) split(path) flatMap expandS @@ -124,13 +108,6 @@ object ClassPath { for (dir <- expandPath(path, false) ; name <- expandDir(dir) ; entry <- Option(AbstractFile getDirectory name)) yield newClassPath(entry) - def classesAtAllURLS(path: String): List[ClassPath[T]] = - (path split " ").toList flatMap classesAtURL - - def classesAtURL(spec: String) = - for (url <- specToURL(spec).toList ; location <- Option(AbstractFile getURL url)) yield - newClassPath(location) - def classesInExpandedPath(path: String): IndexedSeq[ClassPath[T]] = classesInPathImpl(path, true).toIndexedSeq @@ -399,15 +376,3 @@ class JavaClassPath( containers: IndexedSeq[ClassPath[AbstractFile]], context: JavaContext) extends MergedClassPath[AbstractFile](containers, context) { } - -object JavaClassPath { - def fromURLs(urls: Seq[URL], context: JavaContext): JavaClassPath = { - val containers = { - for (url <- urls ; f = AbstractFile getURL url ; if f != null) yield - new DirectoryClassPath(f, context) - } - new JavaClassPath(containers.toIndexedSeq, context) - } - def fromURLs(urls: Seq[URL]): JavaClassPath = - fromURLs(urls, ClassPath.DefaultJavaContext) -} diff --git a/src/compiler/scala/tools/nsc/util/CommandLineParser.scala b/src/compiler/scala/tools/nsc/util/CommandLineParser.scala index 81c1b1d37a..e8f962a9e2 100644 --- a/src/compiler/scala/tools/nsc/util/CommandLineParser.scala +++ b/src/compiler/scala/tools/nsc/util/CommandLineParser.scala @@ -21,7 +21,6 @@ import scala.collection.mutable.ListBuffer trait ParserUtil extends Parsers { protected implicit class ParserPlus[+T](underlying: Parser[T]) { def !~>[U](p: => Parser[U]): Parser[U] = (underlying ~! p) ^^ { case a~b => b } - def <~![U](p: => Parser[U]): Parser[T] = (underlying ~! p) ^^ { case a~b => a } } } @@ -37,7 +36,6 @@ case class CommandLine( def withUnaryArgs(xs: List[String]) = copy(unaryArguments = xs) def withBinaryArgs(xs: List[String]) = copy(binaryArguments = xs) - def originalArgs = args def assumeBinary = true def enforceArity = true def onlyKnownOptions = false @@ -105,7 +103,6 @@ case class CommandLine( def isSet(arg: String) = args contains arg def get(arg: String) = argMap get arg - def getOrElse(arg: String, orElse: => String) = if (isSet(arg)) apply(arg) else orElse def apply(arg: String) = argMap(arg) override def toString() = "CommandLine(\n%s)\n" format (args map (" " + _ + "\n") mkString) @@ -115,7 +112,6 @@ object CommandLineParser extends RegexParsers with ParserUtil { override def skipWhitespace = false def elemExcept(xs: Elem*): Parser[Elem] = elem("elemExcept", x => x != EofCh && !(xs contains x)) - def elemOf(xs: Elem*): Parser[Elem] = elem("elemOf", xs contains _) def escaped(ch: Char): Parser[String] = "\\" + ch def mkQuoted(ch: Char): Parser[String] = ( elem(ch) !~> rep(escaped(ch) | elemExcept(ch)) <~ ch ^^ (_.mkString) diff --git a/src/compiler/scala/tools/nsc/util/JavaCharArrayReader.scala b/src/compiler/scala/tools/nsc/util/JavaCharArrayReader.scala index b7ed7903bc..fc3dd2bac2 100644 --- a/src/compiler/scala/tools/nsc/util/JavaCharArrayReader.scala +++ b/src/compiler/scala/tools/nsc/util/JavaCharArrayReader.scala @@ -14,74 +14,32 @@ class JavaCharArrayReader(buf: IndexedSeq[Char], start: Int, /* startline: int, def this(buf: IndexedSeq[Char], decodeUni: Boolean, error: String => Unit) = this(buf, 0, /* 1, 1, */ decodeUni, error) - /** produce a duplicate of this char array reader which starts reading - * at current position, independent of what happens to original reader - */ - def dup: JavaCharArrayReader = clone().asInstanceOf[JavaCharArrayReader] - - /** layout constant - */ - val tabinc = 8 - /** the line and column position of the current character */ var ch: Char = _ var bp = start - var oldBp = -1 - var oldCh: Char = _ - - //private var cline: Int = _ - //private var ccol: Int = _ def cpos = bp var isUnicode: Boolean = _ - var lastLineStartPos: Int = 0 - var lineStartPos: Int = 0 - var lastBlankLinePos: Int = 0 - - private var onlyBlankChars = false - //private var nextline = startline - //private var nextcol = startcol - - private def markNewLine() { - lastLineStartPos = lineStartPos - if (onlyBlankChars) lastBlankLinePos = lineStartPos - lineStartPos = bp - onlyBlankChars = true - //nextline += 1 - //nextcol = 1 - } - - def hasNext: Boolean = if (bp < buf.length) true - else { - false - } - def last: Char = if (bp > start + 2) buf(bp - 2) else ' ' // XML literals + def hasNext = bp < buf.length def next(): Char = { - //cline = nextline - //ccol = nextcol val buf = this.buf.asInstanceOf[collection.mutable.WrappedArray[Char]].array if(!hasNext) { ch = SU return SU // there is an endless stream of SU's at the end } - oldBp = bp - oldCh = ch ch = buf(bp) isUnicode = false bp = bp + 1 ch match { case '\t' => - // nextcol = ((nextcol - 1) / tabinc * tabinc) + tabinc + 1; case CR => if (bp < buf.size && buf(bp) == LF) { ch = LF bp += 1 } - markNewLine() case LF | FF => - markNewLine() case '\\' => def evenSlashPrefix: Boolean = { var p = bp - 2 @@ -90,11 +48,10 @@ class JavaCharArrayReader(buf: IndexedSeq[Char], start: Int, /* startline: int, } def udigit: Int = { val d = digit2int(buf(bp), 16) - if (d >= 0) { bp += 1; /* nextcol = nextcol + 1 */ } + if (d >= 0) bp += 1 else error("error in unicode escape"); d } - // nextcol += 1 if (buf(bp) == 'u' && decodeUni && evenSlashPrefix) { do { bp += 1 //; nextcol += 1 @@ -104,20 +61,10 @@ class JavaCharArrayReader(buf: IndexedSeq[Char], start: Int, /* startline: int, isUnicode = true } case _ => - if (ch > ' ') onlyBlankChars = false - // nextcol += 1 } ch } - def rewind() { - if (oldBp == -1) throw new IllegalArgumentException - bp = oldBp - ch = oldCh - oldBp = -1 - oldCh = 'x' - } - def copy: JavaCharArrayReader = new JavaCharArrayReader(buf, bp, /* nextcol, nextline, */ decodeUni, error) } diff --git a/src/compiler/scala/tools/nsc/util/ScalaClassLoader.scala b/src/compiler/scala/tools/nsc/util/ScalaClassLoader.scala index a2994966fd..1d2cc73c6b 100644 --- a/src/compiler/scala/tools/nsc/util/ScalaClassLoader.scala +++ b/src/compiler/scala/tools/nsc/util/ScalaClassLoader.scala @@ -46,9 +46,6 @@ trait ScalaClassLoader extends JClassLoader { def create(path: String): AnyRef = tryToInitializeClass[AnyRef](path) map (_.newInstance()) orNull - def constructorsOf[T <: AnyRef : ClassTag]: List[Constructor[T]] = - classTag[T].runtimeClass.getConstructors.toList map (_.asInstanceOf[Constructor[T]]) - /** The actual bytes for a class file, or an empty array if it can't be found. */ def classBytes(className: String): Array[Byte] = classAsStream(className) match { case null => Array() @@ -71,14 +68,6 @@ trait ScalaClassLoader extends JClassLoader { try asContext(method.invoke(null, Array(arguments.toArray: AnyRef): _*)) // !!! : AnyRef shouldn't be necessary catch unwrapHandler({ case ex => throw ex }) } - - /** A list comprised of this classloader followed by all its - * (non-null) parent classloaders, if any. - */ - def loaderChain: List[ScalaClassLoader] = this :: (getParent match { - case null => Nil - case p => p.loaderChain - }) } /** Methods for obtaining various classloaders. @@ -99,35 +88,6 @@ object ScalaClassLoader { } def contextLoader = apply(Thread.currentThread.getContextClassLoader) def appLoader = apply(JClassLoader.getSystemClassLoader) - def extLoader = apply(appLoader.getParent) - def bootLoader = apply(null) - def contextChain = loaderChain(contextLoader) - - def pathToErasure[T: ClassTag] = pathToClass(classTag[T].runtimeClass) - def pathToClass(clazz: Class[_]) = clazz.getName.replace('.', JFile.separatorChar) + ".class" - def locate[T: ClassTag] = contextLoader getResource pathToErasure[T] - - /** Tries to guess the classpath by type matching the context classloader - * and its parents, looking for any classloaders which will reveal their - * classpath elements as urls. It it can't find any, creates a classpath - * from the supplied string. - */ - def guessClassPathString(default: String = ""): String = { - val classpathURLs = contextChain flatMap { - case x: HasClassPath => x.classPathURLs - case x: JURLClassLoader => x.getURLs.toSeq - case _ => Nil - } - if (classpathURLs.isEmpty) default - else JavaClassPath.fromURLs(classpathURLs).asClasspathString - } - - def loaderChain(head: JClassLoader) = { - def loop(cl: JClassLoader): List[JClassLoader] = - if (cl == null) Nil else cl :: loop(cl.getParent) - - loop(head) - } def setContext(cl: JClassLoader) = Thread.currentThread.setContextClassLoader(cl) def savingContextLoader[T](body: => T): T = { @@ -143,14 +103,12 @@ object ScalaClassLoader { private var classloaderURLs: Seq[URL] = urls def classPathURLs: Seq[URL] = classloaderURLs - def classPath: ClassPath[_] = JavaClassPath fromURLs classPathURLs /** Override to widen to public */ override def addURL(url: URL) = { classloaderURLs :+= url super.addURL(url) } - def toLongString = urls.mkString("URLClassLoader(\n ", "\n ", "\n)\n") } def fromURLs(urls: Seq[URL], parent: ClassLoader = null): URLClassLoader = @@ -161,7 +119,6 @@ object ScalaClassLoader { fromURLs(urls) tryToLoadClass name isDefined /** Finding what jar a clazz or instance came from */ - def origin(x: Any): Option[URL] = originOfClass(x.getClass) def originOfClass(x: Class[_]): Option[URL] = Option(x.getProtectionDomain.getCodeSource) flatMap (x => Option(x.getLocation)) } diff --git a/src/compiler/scala/tools/nsc/util/SimpleTracer.scala b/src/compiler/scala/tools/nsc/util/SimpleTracer.scala index b103ae9cb0..a33af1754d 100644 --- a/src/compiler/scala/tools/nsc/util/SimpleTracer.scala +++ b/src/compiler/scala/tools/nsc/util/SimpleTracer.scala @@ -14,6 +14,5 @@ class SimpleTracer(out: PrintStream, enabled: Boolean = true) { if (enabled) out.println(msg+value) value } - def withOutput(out: PrintStream) = new SimpleTracer(out, enabled) def when(enabled: Boolean): SimpleTracer = new SimpleTracer(out, enabled) } diff --git a/src/compiler/scala/tools/nsc/util/package.scala b/src/compiler/scala/tools/nsc/util/package.scala index e9dcaa8e16..792a659ad6 100644 --- a/src/compiler/scala/tools/nsc/util/package.scala +++ b/src/compiler/scala/tools/nsc/util/package.scala @@ -18,16 +18,9 @@ package object util { type HashSet[T >: Null <: AnyRef] = scala.reflect.internal.util.HashSet[T] val HashSet = scala.reflect.internal.util.HashSet - def onull[T](value: T, orElse: => T): T = if (value == null) orElse else value - /** Apply a function and return the passed value */ def returning[T](x: T)(f: T => Unit): T = { f(x) ; x } - /** Frequency counter */ - def freq[T](xs: Traversable[T]): Map[T, Int] = xs groupBy identity mapValues (_.size) - - def freqrank[T](xs: Traversable[(T, Int)]): List[(Int, T)] = xs.toList map (_.swap) sortBy (-_._1) - /** Execute code and then wait for all non-daemon Threads * created and begun during its execution to complete. */ @@ -54,18 +47,6 @@ package object util { (result, ts2 filterNot (ts1 contains _)) } - /** Given a function and a block of code, evaluates code block, - * calls function with milliseconds elapsed, and returns block result. - */ - def millisElapsedTo[T](f: Long => Unit)(body: => T): T = { - val start = System.currentTimeMillis - val result = body - val end = System.currentTimeMillis - - f(end - start) - result - } - /** Generate a string using a routine that wants to write on a stream. */ def stringFromWriter(writer: PrintWriter => Unit): String = { val stringWriter = new StringWriter() @@ -96,7 +77,6 @@ package object util { } lazy val trace = new SimpleTracer(System.out) - lazy val errtrace = new SimpleTracer(System.err) @deprecated("Moved to scala.reflect.internal.util.StringOps", "2.10.0") val StringOps = scala.reflect.internal.util.StringOps diff --git a/src/reflect/scala/reflect/internal/Names.scala b/src/reflect/scala/reflect/internal/Names.scala index 35ff9284e0..333651162e 100644 --- a/src/reflect/scala/reflect/internal/Names.scala +++ b/src/reflect/scala/reflect/internal/Names.scala @@ -183,7 +183,7 @@ trait Names extends api.Names with LowPriorityNames { scala.compat.Platform.arraycopy(chrs, index, cs, offset, len) /** @return the ascii representation of this name */ - final def toChars: Array[Char] = { + final def toChars: Array[Char] = { // used by ide val cs = new Array[Char](len) copyChars(cs, 0) cs @@ -394,7 +394,7 @@ trait Names extends api.Names with LowPriorityNames { def prepend(prefix: String) = newName("" + prefix + this) def decodedName: ThisNameType = newName(decode) - def isOperatorName: Boolean = decode != toString + def isOperatorName: Boolean = decode != toString // used by ide def longString: String = nameKind + " " + decode def debugString = { val s = decode ; if (isTypeName) s + "!" else s } } diff --git a/src/reflect/scala/reflect/internal/TreeGen.scala b/src/reflect/scala/reflect/internal/TreeGen.scala index f30807a26c..072e94e069 100644 --- a/src/reflect/scala/reflect/internal/TreeGen.scala +++ b/src/reflect/scala/reflect/internal/TreeGen.scala @@ -11,7 +11,7 @@ abstract class TreeGen extends macros.TreeBuilder { def rootScalaDot(name: Name) = Select(rootId(nme.scala_) setSymbol ScalaPackage, name) def scalaDot(name: Name) = Select(Ident(nme.scala_) setSymbol ScalaPackage, name) def scalaAnnotationDot(name: Name) = Select(scalaDot(nme.annotation), name) - def scalaAnyRefConstr = scalaDot(tpnme.AnyRef) setSymbol AnyRefClass + def scalaAnyRefConstr = scalaDot(tpnme.AnyRef) setSymbol AnyRefClass // used in ide def scalaFunctionConstr(argtpes: List[Tree], restpe: Tree, abstractFun: Boolean = false): Tree = { val cls = if (abstractFun) -- cgit v1.2.3 From f9053e5ac67db484e8a9f2acc39cf8d39a2f1f33 Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Sat, 24 Nov 2012 11:32:43 -0800 Subject: Updated asm to 4.1. Released 2012-10-14. One of the listed features is 316321: asm osgi bundles based on BND. in case that's interesting to someone. --- src/asm/scala/tools/asm/AnnotationVisitor.java | 66 +- src/asm/scala/tools/asm/AnnotationWriter.java | 58 +- src/asm/scala/tools/asm/Attribute.java | 193 +- src/asm/scala/tools/asm/ByteVector.java | 49 +- src/asm/scala/tools/asm/ClassReader.java | 2980 ++++++++++---------- src/asm/scala/tools/asm/ClassVisitor.java | 233 +- src/asm/scala/tools/asm/ClassWriter.java | 499 ++-- src/asm/scala/tools/asm/Context.java | 110 + src/asm/scala/tools/asm/FieldVisitor.java | 34 +- src/asm/scala/tools/asm/FieldWriter.java | 72 +- src/asm/scala/tools/asm/Frame.java | 1024 +++---- src/asm/scala/tools/asm/Handle.java | 48 +- src/asm/scala/tools/asm/Handler.java | 9 +- src/asm/scala/tools/asm/Item.java | 162 +- src/asm/scala/tools/asm/Label.java | 135 +- src/asm/scala/tools/asm/MethodVisitor.java | 516 ++-- src/asm/scala/tools/asm/MethodWriter.java | 1165 ++++---- src/asm/scala/tools/asm/Type.java | 254 +- .../scala/tools/asm/signature/SignatureReader.java | 181 +- .../tools/asm/signature/SignatureVisitor.java | 51 +- .../scala/tools/asm/signature/SignatureWriter.java | 2 +- src/asm/scala/tools/asm/tree/AbstractInsnNode.java | 30 +- src/asm/scala/tools/asm/tree/AnnotationNode.java | 55 +- src/asm/scala/tools/asm/tree/ClassNode.java | 102 +- src/asm/scala/tools/asm/tree/FieldInsnNode.java | 34 +- src/asm/scala/tools/asm/tree/FieldNode.java | 104 +- src/asm/scala/tools/asm/tree/FrameNode.java | 121 +- src/asm/scala/tools/asm/tree/IincInsnNode.java | 8 +- src/asm/scala/tools/asm/tree/InnerClassNode.java | 42 +- src/asm/scala/tools/asm/tree/InsnList.java | 120 +- src/asm/scala/tools/asm/tree/InsnNode.java | 33 +- src/asm/scala/tools/asm/tree/IntInsnNode.java | 13 +- .../tools/asm/tree/InvokeDynamicInsnNode.java | 22 +- src/asm/scala/tools/asm/tree/JumpInsnNode.java | 25 +- src/asm/scala/tools/asm/tree/LabelNode.java | 2 +- src/asm/scala/tools/asm/tree/LdcInsnNode.java | 11 +- src/asm/scala/tools/asm/tree/LineNumberNode.java | 8 +- .../scala/tools/asm/tree/LocalVariableNode.java | 45 +- .../scala/tools/asm/tree/LookupSwitchInsnNode.java | 21 +- src/asm/scala/tools/asm/tree/MethodInsnNode.java | 38 +- src/asm/scala/tools/asm/tree/MethodNode.java | 236 +- .../tools/asm/tree/MultiANewArrayInsnNode.java | 10 +- .../scala/tools/asm/tree/TableSwitchInsnNode.java | 30 +- .../scala/tools/asm/tree/TryCatchBlockNode.java | 32 +- src/asm/scala/tools/asm/tree/TypeInsnNode.java | 19 +- src/asm/scala/tools/asm/tree/VarInsnNode.java | 21 +- .../scala/tools/asm/tree/analysis/Analyzer.java | 160 +- .../tools/asm/tree/analysis/AnalyzerException.java | 11 +- .../tools/asm/tree/analysis/BasicInterpreter.java | 483 ++-- .../scala/tools/asm/tree/analysis/BasicValue.java | 9 +- .../tools/asm/tree/analysis/BasicVerifier.java | 598 ++-- src/asm/scala/tools/asm/tree/analysis/Frame.java | 854 +++--- .../scala/tools/asm/tree/analysis/Interpreter.java | 110 +- .../tools/asm/tree/analysis/SimpleVerifier.java | 119 +- .../tools/asm/tree/analysis/SourceInterpreter.java | 148 +- .../scala/tools/asm/tree/analysis/SourceValue.java | 8 +- .../scala/tools/asm/tree/analysis/Subroutine.java | 9 +- src/asm/scala/tools/asm/util/ASMifiable.java | 13 +- src/asm/scala/tools/asm/util/ASMifier.java | 485 ++-- .../tools/asm/util/CheckAnnotationAdapter.java | 28 +- .../scala/tools/asm/util/CheckClassAdapter.java | 587 +++- .../scala/tools/asm/util/CheckFieldAdapter.java | 23 +- .../scala/tools/asm/util/CheckMethodAdapter.java | 937 +++--- .../tools/asm/util/CheckSignatureAdapter.java | 57 +- src/asm/scala/tools/asm/util/Printer.java | 316 +-- src/asm/scala/tools/asm/util/SignatureChecker.java | 47 - src/asm/scala/tools/asm/util/Textifiable.java | 8 +- src/asm/scala/tools/asm/util/Textifier.java | 446 ++- .../tools/asm/util/TraceAnnotationVisitor.java | 23 +- .../scala/tools/asm/util/TraceClassVisitor.java | 159 +- .../scala/tools/asm/util/TraceFieldVisitor.java | 10 +- .../scala/tools/asm/util/TraceMethodVisitor.java | 89 +- .../tools/asm/util/TraceSignatureVisitor.java | 59 +- .../scala/tools/nsc/backend/jvm/GenASM.scala | 8 +- 74 files changed, 7406 insertions(+), 7421 deletions(-) create mode 100644 src/asm/scala/tools/asm/Context.java delete mode 100644 src/asm/scala/tools/asm/util/SignatureChecker.java (limited to 'src') diff --git a/src/asm/scala/tools/asm/AnnotationVisitor.java b/src/asm/scala/tools/asm/AnnotationVisitor.java index b96e730a73..c806ca71e8 100644 --- a/src/asm/scala/tools/asm/AnnotationVisitor.java +++ b/src/asm/scala/tools/asm/AnnotationVisitor.java @@ -54,8 +54,9 @@ public abstract class AnnotationVisitor { /** * Constructs a new {@link AnnotationVisitor}. * - * @param api the ASM API version implemented by this visitor. Must be one - * of {@link Opcodes#ASM4}. + * @param api + * the ASM API version implemented by this visitor. Must be one + * of {@link Opcodes#ASM4}. */ public AnnotationVisitor(final int api) { this(api, null); @@ -64,15 +65,17 @@ public abstract class AnnotationVisitor { /** * Constructs a new {@link AnnotationVisitor}. * - * @param api the ASM API version implemented by this visitor. Must be one - * of {@link Opcodes#ASM4}. - * @param av the annotation visitor to which this visitor must delegate - * method calls. May be null. + * @param api + * the ASM API version implemented by this visitor. Must be one + * of {@link Opcodes#ASM4}. + * @param av + * the annotation visitor to which this visitor must delegate + * method calls. May be null. */ public AnnotationVisitor(final int api, final AnnotationVisitor av) { - /*if (api != Opcodes.ASM4) { + if (api != Opcodes.ASM4) { throw new IllegalArgumentException(); - }*/ + } this.api = api; this.av = av; } @@ -80,14 +83,17 @@ public abstract class AnnotationVisitor { /** * Visits a primitive value of the annotation. * - * @param name the value name. - * @param value the actual value, whose type must be {@link Byte}, - * {@link Boolean}, {@link Character}, {@link Short}, {@link Integer} - * , {@link Long}, {@link Float}, {@link Double}, {@link String} or - * {@link Type} or OBJECT or ARRAY sort. This value can also be an - * array of byte, boolean, short, char, int, long, float or double - * values (this is equivalent to using {@link #visitArray visitArray} - * and visiting each array element in turn, but is more convenient). + * @param name + * the value name. + * @param value + * the actual value, whose type must be {@link Byte}, + * {@link Boolean}, {@link Character}, {@link Short}, + * {@link Integer} , {@link Long}, {@link Float}, {@link Double}, + * {@link String} or {@link Type} or OBJECT or ARRAY sort. This + * value can also be an array of byte, boolean, short, char, int, + * long, float or double values (this is equivalent to using + * {@link #visitArray visitArray} and visiting each array element + * in turn, but is more convenient). */ public void visit(String name, Object value) { if (av != null) { @@ -98,9 +104,12 @@ public abstract class AnnotationVisitor { /** * Visits an enumeration value of the annotation. * - * @param name the value name. - * @param desc the class descriptor of the enumeration class. - * @param value the actual enumeration value. + * @param name + * the value name. + * @param desc + * the class descriptor of the enumeration class. + * @param value + * the actual enumeration value. */ public void visitEnum(String name, String desc, String value) { if (av != null) { @@ -111,12 +120,14 @@ public abstract class AnnotationVisitor { /** * Visits a nested annotation value of the annotation. * - * @param name the value name. - * @param desc the class descriptor of the nested annotation class. + * @param name + * the value name. + * @param desc + * the class descriptor of the nested annotation class. * @return a visitor to visit the actual nested annotation value, or - * null if this visitor is not interested in visiting - * this nested annotation. The nested annotation value must be - * fully visited before calling other methods on this annotation + * null if this visitor is not interested in visiting this + * nested annotation. The nested annotation value must be fully + * visited before calling other methods on this annotation * visitor. */ public AnnotationVisitor visitAnnotation(String name, String desc) { @@ -132,10 +143,11 @@ public abstract class AnnotationVisitor { * can be passed as value to {@link #visit visit}. This is what * {@link ClassReader} does. * - * @param name the value name. + * @param name + * the value name. * @return a visitor to visit the actual array value elements, or - * null if this visitor is not interested in visiting - * these values. The 'name' parameters passed to the methods of this + * null if this visitor is not interested in visiting these + * values. The 'name' parameters passed to the methods of this * visitor are ignored. All the array values must be visited * before calling other methods on this annotation visitor. */ diff --git a/src/asm/scala/tools/asm/AnnotationWriter.java b/src/asm/scala/tools/asm/AnnotationWriter.java index e530780249..8eb5b2ef48 100644 --- a/src/asm/scala/tools/asm/AnnotationWriter.java +++ b/src/asm/scala/tools/asm/AnnotationWriter.java @@ -90,20 +90,20 @@ final class AnnotationWriter extends AnnotationVisitor { /** * Constructs a new {@link AnnotationWriter}. * - * @param cw the class writer to which this annotation must be added. - * @param named true if values are named, false otherwise. - * @param bv where the annotation values must be stored. - * @param parent where the number of annotation values must be stored. - * @param offset where in parent the number of annotation values must - * be stored. + * @param cw + * the class writer to which this annotation must be added. + * @param named + * true if values are named, false otherwise. + * @param bv + * where the annotation values must be stored. + * @param parent + * where the number of annotation values must be stored. + * @param offset + * where in parent the number of annotation values must + * be stored. */ - AnnotationWriter( - final ClassWriter cw, - final boolean named, - final ByteVector bv, - final ByteVector parent, - final int offset) - { + AnnotationWriter(final ClassWriter cw, final boolean named, + final ByteVector bv, final ByteVector parent, final int offset) { super(Opcodes.ASM4); this.cw = cw; this.named = named; @@ -190,11 +190,8 @@ final class AnnotationWriter extends AnnotationVisitor { } @Override - public void visitEnum( - final String name, - final String desc, - final String value) - { + public void visitEnum(final String name, final String desc, + final String value) { ++size; if (named) { bv.putShort(cw.newUTF8(name)); @@ -203,10 +200,8 @@ final class AnnotationWriter extends AnnotationVisitor { } @Override - public AnnotationVisitor visitAnnotation( - final String name, - final String desc) - { + public AnnotationVisitor visitAnnotation(final String name, + final String desc) { ++size; if (named) { bv.putShort(cw.newUTF8(name)); @@ -259,7 +254,8 @@ final class AnnotationWriter extends AnnotationVisitor { * Puts the annotations of this annotation writer list into the given byte * vector. * - * @param out where the annotations must be put. + * @param out + * where the annotations must be put. */ void put(final ByteVector out) { int n = 0; @@ -286,15 +282,15 @@ final class AnnotationWriter extends AnnotationVisitor { /** * Puts the given annotation lists into the given byte vector. * - * @param panns an array of annotation writer lists. - * @param off index of the first annotation to be written. - * @param out where the annotations must be put. + * @param panns + * an array of annotation writer lists. + * @param off + * index of the first annotation to be written. + * @param out + * where the annotations must be put. */ - static void put( - final AnnotationWriter[] panns, - final int off, - final ByteVector out) - { + static void put(final AnnotationWriter[] panns, final int off, + final ByteVector out) { int size = 1 + 2 * (panns.length - off); for (int i = off; i < panns.length; ++i) { size += panns[i] == null ? 0 : panns[i].getSize(); diff --git a/src/asm/scala/tools/asm/Attribute.java b/src/asm/scala/tools/asm/Attribute.java index 408f21ce1e..ac40a758a2 100644 --- a/src/asm/scala/tools/asm/Attribute.java +++ b/src/asm/scala/tools/asm/Attribute.java @@ -55,7 +55,8 @@ public class Attribute { /** * Constructs a new empty attribute. * - * @param type the type of the attribute. + * @param type + * the type of the attribute. */ protected Attribute(final String type) { this.type = type; @@ -91,39 +92,39 @@ public class Attribute { } /** - * Reads a {@link #type type} attribute. This method must return a new - * {@link Attribute} object, of type {@link #type type}, corresponding to - * the len bytes starting at the given offset, in the given class - * reader. + * Reads a {@link #type type} attribute. This method must return a + * new {@link Attribute} object, of type {@link #type type}, + * corresponding to the len bytes starting at the given offset, in + * the given class reader. * - * @param cr the class that contains the attribute to be read. - * @param off index of the first byte of the attribute's content in {@link - * ClassReader#b cr.b}. The 6 attribute header bytes, containing the - * type and the length of the attribute, are not taken into account - * here. - * @param len the length of the attribute's content. - * @param buf buffer to be used to call - * {@link ClassReader#readUTF8 readUTF8}, - * {@link ClassReader#readClass(int,char[]) readClass} or - * {@link ClassReader#readConst readConst}. - * @param codeOff index of the first byte of code's attribute content in - * {@link ClassReader#b cr.b}, or -1 if the attribute to be read is - * not a code attribute. The 6 attribute header bytes, containing the - * type and the length of the attribute, are not taken into account - * here. - * @param labels the labels of the method's code, or null if the - * attribute to be read is not a code attribute. + * @param cr + * the class that contains the attribute to be read. + * @param off + * index of the first byte of the attribute's content in + * {@link ClassReader#b cr.b}. The 6 attribute header bytes, + * containing the type and the length of the attribute, are not + * taken into account here. + * @param len + * the length of the attribute's content. + * @param buf + * buffer to be used to call {@link ClassReader#readUTF8 + * readUTF8}, {@link ClassReader#readClass(int,char[]) readClass} + * or {@link ClassReader#readConst readConst}. + * @param codeOff + * index of the first byte of code's attribute content in + * {@link ClassReader#b cr.b}, or -1 if the attribute to be read + * is not a code attribute. The 6 attribute header bytes, + * containing the type and the length of the attribute, are not + * taken into account here. + * @param labels + * the labels of the method's code, or null if the + * attribute to be read is not a code attribute. * @return a new {@link Attribute} object corresponding to the given * bytes. */ - protected Attribute read( - final ClassReader cr, - final int off, - final int len, - final char[] buf, - final int codeOff, - final Label[] labels) - { + protected Attribute read(final ClassReader cr, final int off, + final int len, final char[] buf, final int codeOff, + final Label[] labels) { Attribute attr = new Attribute(type); attr.value = new byte[len]; System.arraycopy(cr.b, off, attr.value, 0, len); @@ -133,30 +134,30 @@ public class Attribute { /** * Returns the byte array form of this attribute. * - * @param cw the class to which this attribute must be added. This parameter - * can be used to add to the constant pool of this class the items - * that corresponds to this attribute. - * @param code the bytecode of the method corresponding to this code - * attribute, or null if this attribute is not a code - * attributes. - * @param len the length of the bytecode of the method corresponding to this - * code attribute, or null if this attribute is not a code - * attribute. - * @param maxStack the maximum stack size of the method corresponding to - * this code attribute, or -1 if this attribute is not a code - * attribute. - * @param maxLocals the maximum number of local variables of the method - * corresponding to this code attribute, or -1 if this attribute is - * not a code attribute. + * @param cw + * the class to which this attribute must be added. This + * parameter can be used to add to the constant pool of this + * class the items that corresponds to this attribute. + * @param code + * the bytecode of the method corresponding to this code + * attribute, or null if this attribute is not a code + * attributes. + * @param len + * the length of the bytecode of the method corresponding to this + * code attribute, or null if this attribute is not a + * code attribute. + * @param maxStack + * the maximum stack size of the method corresponding to this + * code attribute, or -1 if this attribute is not a code + * attribute. + * @param maxLocals + * the maximum number of local variables of the method + * corresponding to this code attribute, or -1 if this attribute + * is not a code attribute. * @return the byte array form of this attribute. */ - protected ByteVector write( - final ClassWriter cw, - final byte[] code, - final int len, - final int maxStack, - final int maxLocals) - { + protected ByteVector write(final ClassWriter cw, final byte[] code, + final int len, final int maxStack, final int maxLocals) { ByteVector v = new ByteVector(); v.data = value; v.length = value.length; @@ -181,30 +182,30 @@ public class Attribute { /** * Returns the size of all the attributes in this attribute list. * - * @param cw the class writer to be used to convert the attributes into byte - * arrays, with the {@link #write write} method. - * @param code the bytecode of the method corresponding to these code - * attributes, or null if these attributes are not code - * attributes. - * @param len the length of the bytecode of the method corresponding to - * these code attributes, or null if these attributes are - * not code attributes. - * @param maxStack the maximum stack size of the method corresponding to - * these code attributes, or -1 if these attributes are not code - * attributes. - * @param maxLocals the maximum number of local variables of the method - * corresponding to these code attributes, or -1 if these attributes - * are not code attributes. + * @param cw + * the class writer to be used to convert the attributes into + * byte arrays, with the {@link #write write} method. + * @param code + * the bytecode of the method corresponding to these code + * attributes, or null if these attributes are not code + * attributes. + * @param len + * the length of the bytecode of the method corresponding to + * these code attributes, or null if these attributes + * are not code attributes. + * @param maxStack + * the maximum stack size of the method corresponding to these + * code attributes, or -1 if these attributes are not code + * attributes. + * @param maxLocals + * the maximum number of local variables of the method + * corresponding to these code attributes, or -1 if these + * attributes are not code attributes. * @return the size of all the attributes in this attribute list. This size * includes the size of the attribute headers. */ - final int getSize( - final ClassWriter cw, - final byte[] code, - final int len, - final int maxStack, - final int maxLocals) - { + final int getSize(final ClassWriter cw, final byte[] code, final int len, + final int maxStack, final int maxLocals) { Attribute attr = this; int size = 0; while (attr != null) { @@ -219,30 +220,30 @@ public class Attribute { * Writes all the attributes of this attribute list in the given byte * vector. * - * @param cw the class writer to be used to convert the attributes into byte - * arrays, with the {@link #write write} method. - * @param code the bytecode of the method corresponding to these code - * attributes, or null if these attributes are not code - * attributes. - * @param len the length of the bytecode of the method corresponding to - * these code attributes, or null if these attributes are - * not code attributes. - * @param maxStack the maximum stack size of the method corresponding to - * these code attributes, or -1 if these attributes are not code - * attributes. - * @param maxLocals the maximum number of local variables of the method - * corresponding to these code attributes, or -1 if these attributes - * are not code attributes. - * @param out where the attributes must be written. + * @param cw + * the class writer to be used to convert the attributes into + * byte arrays, with the {@link #write write} method. + * @param code + * the bytecode of the method corresponding to these code + * attributes, or null if these attributes are not code + * attributes. + * @param len + * the length of the bytecode of the method corresponding to + * these code attributes, or null if these attributes + * are not code attributes. + * @param maxStack + * the maximum stack size of the method corresponding to these + * code attributes, or -1 if these attributes are not code + * attributes. + * @param maxLocals + * the maximum number of local variables of the method + * corresponding to these code attributes, or -1 if these + * attributes are not code attributes. + * @param out + * where the attributes must be written. */ - final void put( - final ClassWriter cw, - final byte[] code, - final int len, - final int maxStack, - final int maxLocals, - final ByteVector out) - { + final void put(final ClassWriter cw, final byte[] code, final int len, + final int maxStack, final int maxLocals, final ByteVector out) { Attribute attr = this; while (attr != null) { ByteVector b = attr.write(cw, code, len, maxStack, maxLocals); diff --git a/src/asm/scala/tools/asm/ByteVector.java b/src/asm/scala/tools/asm/ByteVector.java index 5081f0184b..2bc63eb384 100644 --- a/src/asm/scala/tools/asm/ByteVector.java +++ b/src/asm/scala/tools/asm/ByteVector.java @@ -59,7 +59,8 @@ public class ByteVector { * Constructs a new {@link ByteVector ByteVector} with the given initial * size. * - * @param initialSize the initial size of the byte vector to be constructed. + * @param initialSize + * the initial size of the byte vector to be constructed. */ public ByteVector(final int initialSize) { data = new byte[initialSize]; @@ -69,7 +70,8 @@ public class ByteVector { * Puts a byte into this byte vector. The byte vector is automatically * enlarged if necessary. * - * @param b a byte. + * @param b + * a byte. * @return this byte vector. */ public ByteVector putByte(final int b) { @@ -86,8 +88,10 @@ public class ByteVector { * Puts two bytes into this byte vector. The byte vector is automatically * enlarged if necessary. * - * @param b1 a byte. - * @param b2 another byte. + * @param b1 + * a byte. + * @param b2 + * another byte. * @return this byte vector. */ ByteVector put11(final int b1, final int b2) { @@ -106,7 +110,8 @@ public class ByteVector { * Puts a short into this byte vector. The byte vector is automatically * enlarged if necessary. * - * @param s a short. + * @param s + * a short. * @return this byte vector. */ public ByteVector putShort(final int s) { @@ -125,8 +130,10 @@ public class ByteVector { * Puts a byte and a short into this byte vector. The byte vector is * automatically enlarged if necessary. * - * @param b a byte. - * @param s a short. + * @param b + * a byte. + * @param s + * a short. * @return this byte vector. */ ByteVector put12(final int b, final int s) { @@ -146,7 +153,8 @@ public class ByteVector { * Puts an int into this byte vector. The byte vector is automatically * enlarged if necessary. * - * @param i an int. + * @param i + * an int. * @return this byte vector. */ public ByteVector putInt(final int i) { @@ -167,7 +175,8 @@ public class ByteVector { * Puts a long into this byte vector. The byte vector is automatically * enlarged if necessary. * - * @param l a long. + * @param l + * a long. * @return this byte vector. */ public ByteVector putLong(final long l) { @@ -194,7 +203,8 @@ public class ByteVector { * Puts an UTF8 string into this byte vector. The byte vector is * automatically enlarged if necessary. * - * @param s a String. + * @param s + * a String. * @return this byte vector. */ public ByteVector putUTF8(final String s) { @@ -259,14 +269,16 @@ public class ByteVector { * Puts an array of bytes into this byte vector. The byte vector is * automatically enlarged if necessary. * - * @param b an array of bytes. May be null to put len - * null bytes into this byte vector. - * @param off index of the fist byte of b that must be copied. - * @param len number of bytes of b that must be copied. + * @param b + * an array of bytes. May be null to put len + * null bytes into this byte vector. + * @param off + * index of the fist byte of b that must be copied. + * @param len + * number of bytes of b that must be copied. * @return this byte vector. */ - public ByteVector putByteArray(final byte[] b, final int off, final int len) - { + public ByteVector putByteArray(final byte[] b, final int off, final int len) { if (length + len > data.length) { enlarge(len); } @@ -280,8 +292,9 @@ public class ByteVector { /** * Enlarge this byte vector so that it can receive n more bytes. * - * @param size number of additional bytes that this byte vector should be - * able to receive. + * @param size + * number of additional bytes that this byte vector should be + * able to receive. */ private void enlarge(final int size) { int length1 = 2 * data.length; diff --git a/src/asm/scala/tools/asm/ClassReader.java b/src/asm/scala/tools/asm/ClassReader.java index f3287d41ae..cc655c1b62 100644 --- a/src/asm/scala/tools/asm/ClassReader.java +++ b/src/asm/scala/tools/asm/ClassReader.java @@ -112,9 +112,8 @@ public class ClassReader { public final byte[] b; /** - * The start index of each constant pool item in {@link #b b}, plus one. - * The one byte offset skips the constant pool item tag that indicates its - * type. + * The start index of each constant pool item in {@link #b b}, plus one. The + * one byte offset skips the constant pool item tag that indicates its type. */ private final int[] items; @@ -147,7 +146,8 @@ public class ClassReader { /** * Constructs a new {@link ClassReader} object. * - * @param b the bytecode of the class to be read. + * @param b + * the bytecode of the class to be read. */ public ClassReader(final byte[] b) { this(b, 0, b.length); @@ -156,14 +156,17 @@ public class ClassReader { /** * Constructs a new {@link ClassReader} object. * - * @param b the bytecode of the class to be read. - * @param off the start offset of the class data. - * @param len the length of the class data. + * @param b + * the bytecode of the class to be read. + * @param off + * the start offset of the class data. + * @param len + * the length of the class data. */ public ClassReader(final byte[] b, final int off, final int len) { this.b = b; // checks the class version - if (readShort(6) > Opcodes.V1_7) { + if (readShort(off + 6) > Opcodes.V1_7) { throw new IllegalArgumentException(); } // parses the constant pool @@ -176,35 +179,35 @@ public class ClassReader { items[i] = index + 1; int size; switch (b[index]) { - case ClassWriter.FIELD: - case ClassWriter.METH: - case ClassWriter.IMETH: - case ClassWriter.INT: - case ClassWriter.FLOAT: - case ClassWriter.NAME_TYPE: - case ClassWriter.INDY: - size = 5; - break; - case ClassWriter.LONG: - case ClassWriter.DOUBLE: - size = 9; - ++i; - break; - case ClassWriter.UTF8: - size = 3 + readUnsignedShort(index + 1); - if (size > max) { - max = size; - } - break; - case ClassWriter.HANDLE: - size = 4; - break; - // case ClassWriter.CLASS: - // case ClassWriter.STR: - // case ClassWriter.MTYPE - default: - size = 3; - break; + case ClassWriter.FIELD: + case ClassWriter.METH: + case ClassWriter.IMETH: + case ClassWriter.INT: + case ClassWriter.FLOAT: + case ClassWriter.NAME_TYPE: + case ClassWriter.INDY: + size = 5; + break; + case ClassWriter.LONG: + case ClassWriter.DOUBLE: + size = 9; + ++i; + break; + case ClassWriter.UTF8: + size = 3 + readUnsignedShort(index + 1); + if (size > max) { + max = size; + } + break; + case ClassWriter.HANDLE: + size = 4; + break; + // case ClassWriter.CLASS: + // case ClassWriter.STR: + // case ClassWriter.MTYPE + default: + size = 3; + break; } index += size; } @@ -249,8 +252,7 @@ public class ClassReader { * @see ClassVisitor#visit(int, int, String, String, String, String[]) */ public String getSuperName() { - int n = items[readUnsignedShort(header + 4)]; - return n == 0 ? null : readUTF8(n, new char[maxStringLength]); + return readClass(header + 4, new char[maxStringLength]); } /** @@ -280,7 +282,8 @@ public class ClassReader { * Copies the constant pool data into the given {@link ClassWriter}. Should * be called before the {@link #accept(ClassVisitor,int)} method. * - * @param classWriter the {@link ClassWriter} to copy constant pool into. + * @param classWriter + * the {@link ClassWriter} to copy constant pool into. */ void copyPool(final ClassWriter classWriter) { char[] buf = new char[maxStringLength]; @@ -292,82 +295,63 @@ public class ClassReader { Item item = new Item(i); int nameType; switch (tag) { - case ClassWriter.FIELD: - case ClassWriter.METH: - case ClassWriter.IMETH: - nameType = items[readUnsignedShort(index + 2)]; - item.set(tag, - readClass(index, buf), - readUTF8(nameType, buf), - readUTF8(nameType + 2, buf)); - break; - - case ClassWriter.INT: - item.set(readInt(index)); - break; - - case ClassWriter.FLOAT: - item.set(Float.intBitsToFloat(readInt(index))); - break; - - case ClassWriter.NAME_TYPE: - item.set(tag, - readUTF8(index, buf), - readUTF8(index + 2, buf), - null); - break; - - case ClassWriter.LONG: - item.set(readLong(index)); - ++i; - break; - - case ClassWriter.DOUBLE: - item.set(Double.longBitsToDouble(readLong(index))); - ++i; - break; - - case ClassWriter.UTF8: { - String s = strings[i]; - if (s == null) { - index = items[i]; - s = strings[i] = readUTF(index + 2, - readUnsignedShort(index), - buf); - } - item.set(tag, s, null, null); + case ClassWriter.FIELD: + case ClassWriter.METH: + case ClassWriter.IMETH: + nameType = items[readUnsignedShort(index + 2)]; + item.set(tag, readClass(index, buf), readUTF8(nameType, buf), + readUTF8(nameType + 2, buf)); + break; + case ClassWriter.INT: + item.set(readInt(index)); + break; + case ClassWriter.FLOAT: + item.set(Float.intBitsToFloat(readInt(index))); + break; + case ClassWriter.NAME_TYPE: + item.set(tag, readUTF8(index, buf), readUTF8(index + 2, buf), + null); + break; + case ClassWriter.LONG: + item.set(readLong(index)); + ++i; + break; + case ClassWriter.DOUBLE: + item.set(Double.longBitsToDouble(readLong(index))); + ++i; + break; + case ClassWriter.UTF8: { + String s = strings[i]; + if (s == null) { + index = items[i]; + s = strings[i] = readUTF(index + 2, + readUnsignedShort(index), buf); } - break; - - case ClassWriter.HANDLE: { - int fieldOrMethodRef = items[readUnsignedShort(index + 1)]; - nameType = items[readUnsignedShort(fieldOrMethodRef + 2)]; - item.set(ClassWriter.HANDLE_BASE + readByte(index), - readClass(fieldOrMethodRef, buf), - readUTF8(nameType, buf), - readUTF8(nameType + 2, buf)); - + item.set(tag, s, null, null); + break; + } + case ClassWriter.HANDLE: { + int fieldOrMethodRef = items[readUnsignedShort(index + 1)]; + nameType = items[readUnsignedShort(fieldOrMethodRef + 2)]; + item.set(ClassWriter.HANDLE_BASE + readByte(index), + readClass(fieldOrMethodRef, buf), + readUTF8(nameType, buf), readUTF8(nameType + 2, buf)); + break; + } + case ClassWriter.INDY: + if (classWriter.bootstrapMethods == null) { + copyBootstrapMethods(classWriter, items2, buf); } - break; - - - case ClassWriter.INDY: - if (classWriter.bootstrapMethods == null) { - copyBootstrapMethods(classWriter, items2, buf); - } - nameType = items[readUnsignedShort(index + 2)]; - item.set(readUTF8(nameType, buf), - readUTF8(nameType + 2, buf), - readUnsignedShort(index)); - break; - - - // case ClassWriter.STR: - // case ClassWriter.CLASS: - // case ClassWriter.MTYPE - default: - item.set(tag, readUTF8(index, buf), null, null); - break; + nameType = items[readUnsignedShort(index + 2)]; + item.set(readUTF8(nameType, buf), readUTF8(nameType + 2, buf), + readUnsignedShort(index)); + break; + // case ClassWriter.STR: + // case ClassWriter.CLASS: + // case ClassWriter.MTYPE + default: + item.set(tag, readUTF8(index, buf), null, null); + break; } int index2 = item.hashCode % items2.length; @@ -382,77 +366,59 @@ public class ClassReader { classWriter.index = ll; } - private void copyBootstrapMethods(ClassWriter classWriter, Item[] items2, char[] buf) { - int i, j, k, u, v; - - // skip class header - v = header; - v += 8 + (readUnsignedShort(v + 6) << 1); - - // skips fields and methods - i = readUnsignedShort(v); - v += 2; - for (; i > 0; --i) { - j = readUnsignedShort(v + 6); - v += 8; - for (; j > 0; --j) { - v += 6 + readInt(v + 2); + /** + * Copies the bootstrap method data into the given {@link ClassWriter}. + * Should be called before the {@link #accept(ClassVisitor,int)} method. + * + * @param classWriter + * the {@link ClassWriter} to copy bootstrap methods into. + */ + private void copyBootstrapMethods(final ClassWriter classWriter, + final Item[] items, final char[] c) { + // finds the "BootstrapMethods" attribute + int u = getAttributes(); + boolean found = false; + for (int i = readUnsignedShort(u); i > 0; --i) { + String attrName = readUTF8(u + 2, c); + if ("BootstrapMethods".equals(attrName)) { + found = true; + break; } + u += 6 + readInt(u + 4); } - i = readUnsignedShort(v); - v += 2; - for (; i > 0; --i) { - j = readUnsignedShort(v + 6); - v += 8; - for (; j > 0; --j) { - v += 6 + readInt(v + 2); - } + if (!found) { + return; } - - // read class attributes - i = readUnsignedShort(v); - v += 2; - for (; i > 0; --i) { - String attrName = readUTF8(v, buf); - int size = readInt(v + 2); - if ("BootstrapMethods".equals(attrName)) { - int boostrapMethodCount = readUnsignedShort(v + 6); - int x = v + 8; - for (j = 0; j < boostrapMethodCount; j++) { - int hashCode = readConst(readUnsignedShort(x), buf).hashCode(); - k = readUnsignedShort(x + 2); - u = x + 4; - for(; k > 0; --k) { - hashCode ^= readConst(readUnsignedShort(u), buf).hashCode(); - u += 2; - } - Item item = new Item(j); - item.set(x - v - 8, hashCode & 0x7FFFFFFF); - - int index2 = item.hashCode % items2.length; - item.next = items2[index2]; - items2[index2] = item; - - x = u; - } - - classWriter.bootstrapMethodsCount = boostrapMethodCount; - ByteVector bootstrapMethods = new ByteVector(size + 62); - bootstrapMethods.putByteArray(b, v + 8, size - 2); - classWriter.bootstrapMethods = bootstrapMethods; - return; + // copies the bootstrap methods in the class writer + int boostrapMethodCount = readUnsignedShort(u + 8); + for (int j = 0, v = u + 10; j < boostrapMethodCount; j++) { + int position = v - u - 10; + int hashCode = readConst(readUnsignedShort(v), c).hashCode(); + for (int k = readUnsignedShort(v + 2); k > 0; --k) { + hashCode ^= readConst(readUnsignedShort(v + 4), c).hashCode(); + v += 2; } - v += 6 + size; + v += 4; + Item item = new Item(j); + item.set(position, hashCode & 0x7FFFFFFF); + int index = item.hashCode % items.length; + item.next = items[index]; + items[index] = item; } - - // we are in trouble !!! + int attrSize = readInt(u + 4); + ByteVector bootstrapMethods = new ByteVector(attrSize + 62); + bootstrapMethods.putByteArray(b, u + 10, attrSize - 2); + classWriter.bootstrapMethodsCount = boostrapMethodCount; + classWriter.bootstrapMethods = bootstrapMethods; } /** * Constructs a new {@link ClassReader} object. * - * @param is an input stream from which to read the class. - * @throws IOException if a problem occurs during reading. + * @param is + * an input stream from which to read the class. + * @throws IOException + * if a problem occurs during reading. */ public ClassReader(final InputStream is) throws IOException { this(readClass(is, false)); @@ -461,25 +427,30 @@ public class ClassReader { /** * Constructs a new {@link ClassReader} object. * - * @param name the binary qualified name of the class to be read. - * @throws IOException if an exception occurs during reading. + * @param name + * the binary qualified name of the class to be read. + * @throws IOException + * if an exception occurs during reading. */ public ClassReader(final String name) throws IOException { - this(readClass(ClassLoader.getSystemResourceAsStream(name.replace('.', '/') - + ".class"), true)); + this(readClass( + ClassLoader.getSystemResourceAsStream(name.replace('.', '/') + + ".class"), true)); } /** * Reads the bytecode of a class. * - * @param is an input stream from which to read the class. - * @param close true to close the input stream after reading. + * @param is + * an input stream from which to read the class. + * @param close + * true to close the input stream after reading. * @return the bytecode read from the given input stream. - * @throws IOException if a problem occurs during reading. + * @throws IOException + * if a problem occurs during reading. */ private static byte[] readClass(final InputStream is, boolean close) - throws IOException - { + throws IOException { if (is == null) { throw new IOException("Class not found"); } @@ -520,14 +491,16 @@ public class ClassReader { // ------------------------------------------------------------------------ /** - * Makes the given visitor visit the Java class of this {@link ClassReader}. - * This class is the one specified in the constructor (see + * Makes the given visitor visit the Java class of this {@link ClassReader} + * . This class is the one specified in the constructor (see * {@link #ClassReader(byte[]) ClassReader}). * - * @param classVisitor the visitor that must visit this class. - * @param flags option flags that can be used to modify the default behavior - * of this class. See {@link #SKIP_DEBUG}, {@link #EXPAND_FRAMES}, - * {@link #SKIP_FRAMES}, {@link #SKIP_CODE}. + * @param classVisitor + * the visitor that must visit this class. + * @param flags + * option flags that can be used to modify the default behavior + * of this class. See {@link #SKIP_DEBUG}, {@link #EXPAND_FRAMES} + * , {@link #SKIP_FRAMES}, {@link #SKIP_CODE}. */ public void accept(final ClassVisitor classVisitor, final int flags) { accept(classVisitor, new Attribute[0], flags); @@ -538,1117 +511,923 @@ public class ClassReader { * This class is the one specified in the constructor (see * {@link #ClassReader(byte[]) ClassReader}). * - * @param classVisitor the visitor that must visit this class. - * @param attrs prototypes of the attributes that must be parsed during the - * visit of the class. Any attribute whose type is not equal to the - * type of one the prototypes will not be parsed: its byte array - * value will be passed unchanged to the ClassWriter. This may - * corrupt it if this value contains references to the constant pool, - * or has syntactic or semantic links with a class element that has - * been transformed by a class adapter between the reader and the - * writer. - * @param flags option flags that can be used to modify the default behavior - * of this class. See {@link #SKIP_DEBUG}, {@link #EXPAND_FRAMES}, - * {@link #SKIP_FRAMES}, {@link #SKIP_CODE}. + * @param classVisitor + * the visitor that must visit this class. + * @param attrs + * prototypes of the attributes that must be parsed during the + * visit of the class. Any attribute whose type is not equal to + * the type of one the prototypes will not be parsed: its byte + * array value will be passed unchanged to the ClassWriter. + * This may corrupt it if this value contains references to + * the constant pool, or has syntactic or semantic links with a + * class element that has been transformed by a class adapter + * between the reader and the writer. + * @param flags + * option flags that can be used to modify the default behavior + * of this class. See {@link #SKIP_DEBUG}, {@link #EXPAND_FRAMES} + * , {@link #SKIP_FRAMES}, {@link #SKIP_CODE}. */ - public void accept( - final ClassVisitor classVisitor, - final Attribute[] attrs, - final int flags) - { - byte[] b = this.b; // the bytecode array + public void accept(final ClassVisitor classVisitor, + final Attribute[] attrs, final int flags) { + int u = header; // current offset in the class file char[] c = new char[maxStringLength]; // buffer used to read strings - int i, j, k; // loop variables - int u, v, w; // indexes in b - Attribute attr; - - int access; - String name; - String desc; - String attrName; - String signature; - int anns = 0; - int ianns = 0; - Attribute cattrs = null; - - // visits the header - u = header; - access = readUnsignedShort(u); - name = readClass(u + 2, c); - v = items[readUnsignedShort(u + 4)]; - String superClassName = v == 0 ? null : readUTF8(v, c); - String[] implementedItfs = new String[readUnsignedShort(u + 6)]; - w = 0; + + Context context = new Context(); + context.attrs = attrs; + context.flags = flags; + context.buffer = c; + + // reads the class declaration + int access = readUnsignedShort(u); + String name = readClass(u + 2, c); + String superClass = readClass(u + 4, c); + String[] interfaces = new String[readUnsignedShort(u + 6)]; u += 8; - for (i = 0; i < implementedItfs.length; ++i) { - implementedItfs[i] = readClass(u, c); + for (int i = 0; i < interfaces.length; ++i) { + interfaces[i] = readClass(u, c); u += 2; } - boolean skipCode = (flags & SKIP_CODE) != 0; - boolean skipDebug = (flags & SKIP_DEBUG) != 0; - boolean unzip = (flags & EXPAND_FRAMES) != 0; - - // skips fields and methods - v = u; - i = readUnsignedShort(v); - v += 2; - for (; i > 0; --i) { - j = readUnsignedShort(v + 6); - v += 8; - for (; j > 0; --j) { - v += 6 + readInt(v + 2); - } - } - i = readUnsignedShort(v); - v += 2; - for (; i > 0; --i) { - j = readUnsignedShort(v + 6); - v += 8; - for (; j > 0; --j) { - v += 6 + readInt(v + 2); - } - } - // reads the class's attributes - signature = null; + // reads the class attributes + String signature = null; String sourceFile = null; String sourceDebug = null; String enclosingOwner = null; String enclosingName = null; String enclosingDesc = null; - int[] bootstrapMethods = null; // start indexed of the bsms + int anns = 0; + int ianns = 0; + int innerClasses = 0; + Attribute attributes = null; - i = readUnsignedShort(v); - v += 2; - for (; i > 0; --i) { - attrName = readUTF8(v, c); + u = getAttributes(); + for (int i = readUnsignedShort(u); i > 0; --i) { + String attrName = readUTF8(u + 2, c); // tests are sorted in decreasing frequency order // (based on frequencies observed on typical classes) if ("SourceFile".equals(attrName)) { - sourceFile = readUTF8(v + 6, c); + sourceFile = readUTF8(u + 8, c); } else if ("InnerClasses".equals(attrName)) { - w = v + 6; + innerClasses = u + 8; } else if ("EnclosingMethod".equals(attrName)) { - enclosingOwner = readClass(v + 6, c); - int item = readUnsignedShort(v + 8); + enclosingOwner = readClass(u + 8, c); + int item = readUnsignedShort(u + 10); if (item != 0) { enclosingName = readUTF8(items[item], c); enclosingDesc = readUTF8(items[item] + 2, c); } } else if (SIGNATURES && "Signature".equals(attrName)) { - signature = readUTF8(v + 6, c); - } else if (ANNOTATIONS && "RuntimeVisibleAnnotations".equals(attrName)) { - anns = v + 6; + signature = readUTF8(u + 8, c); + } else if (ANNOTATIONS + && "RuntimeVisibleAnnotations".equals(attrName)) { + anns = u + 8; } else if ("Deprecated".equals(attrName)) { access |= Opcodes.ACC_DEPRECATED; } else if ("Synthetic".equals(attrName)) { - access |= Opcodes.ACC_SYNTHETIC | ClassWriter.ACC_SYNTHETIC_ATTRIBUTE; + access |= Opcodes.ACC_SYNTHETIC + | ClassWriter.ACC_SYNTHETIC_ATTRIBUTE; } else if ("SourceDebugExtension".equals(attrName)) { - int len = readInt(v + 2); - sourceDebug = readUTF(v + 6, len, new char[len]); - } else if (ANNOTATIONS && "RuntimeInvisibleAnnotations".equals(attrName)) { - ianns = v + 6; + int len = readInt(u + 4); + sourceDebug = readUTF(u + 8, len, new char[len]); + } else if (ANNOTATIONS + && "RuntimeInvisibleAnnotations".equals(attrName)) { + ianns = u + 8; } else if ("BootstrapMethods".equals(attrName)) { - int boostrapMethodCount = readUnsignedShort(v + 6); - bootstrapMethods = new int[boostrapMethodCount]; - int x = v + 8; - for (j = 0; j < boostrapMethodCount; j++) { - bootstrapMethods[j] = x; - x += 2 + readUnsignedShort(x + 2) << 1; + int[] bootstrapMethods = new int[readUnsignedShort(u + 8)]; + for (int j = 0, v = u + 10; j < bootstrapMethods.length; j++) { + bootstrapMethods[j] = v; + v += 2 + readUnsignedShort(v + 2) << 1; } + context.bootstrapMethods = bootstrapMethods; } else { - attr = readAttribute(attrs, - attrName, - v + 6, - readInt(v + 2), - c, - -1, - null); + Attribute attr = readAttribute(attrs, attrName, u + 8, + readInt(u + 4), c, -1, null); if (attr != null) { - attr.next = cattrs; - cattrs = attr; + attr.next = attributes; + attributes = attr; } } - v += 6 + readInt(v + 2); + u += 6 + readInt(u + 4); } - // calls the visit method - classVisitor.visit(readInt(4), - access, - name, - signature, - superClassName, - implementedItfs); - - // calls the visitSource method - if (!skipDebug && (sourceFile != null || sourceDebug != null)) { + + // visits the class declaration + classVisitor.visit(readInt(items[1] - 7), access, name, signature, + superClass, interfaces); + + // visits the source and debug info + if ((flags & SKIP_DEBUG) == 0 + && (sourceFile != null || sourceDebug != null)) { classVisitor.visitSource(sourceFile, sourceDebug); } - // calls the visitOuterClass method + // visits the outer class if (enclosingOwner != null) { - classVisitor.visitOuterClass(enclosingOwner, - enclosingName, + classVisitor.visitOuterClass(enclosingOwner, enclosingName, enclosingDesc); } // visits the class annotations - if (ANNOTATIONS) { - for (i = 1; i >= 0; --i) { - v = i == 0 ? ianns : anns; - if (v != 0) { - j = readUnsignedShort(v); - v += 2; - for (; j > 0; --j) { - v = readAnnotationValues(v + 2, - c, - true, - classVisitor.visitAnnotation(readUTF8(v, c), i != 0)); - } - } + if (ANNOTATIONS && anns != 0) { + for (int i = readUnsignedShort(anns), v = anns + 2; i > 0; --i) { + v = readAnnotationValues(v + 2, c, true, + classVisitor.visitAnnotation(readUTF8(v, c), true)); + } + } + if (ANNOTATIONS && ianns != 0) { + for (int i = readUnsignedShort(ianns), v = ianns + 2; i > 0; --i) { + v = readAnnotationValues(v + 2, c, true, + classVisitor.visitAnnotation(readUTF8(v, c), false)); } } - // visits the class attributes - while (cattrs != null) { - attr = cattrs.next; - cattrs.next = null; - classVisitor.visitAttribute(cattrs); - cattrs = attr; + // visits the attributes + while (attributes != null) { + Attribute attr = attributes.next; + attributes.next = null; + classVisitor.visitAttribute(attributes); + attributes = attr; } - // calls the visitInnerClass method - if (w != 0) { - i = readUnsignedShort(w); - w += 2; - for (; i > 0; --i) { - classVisitor.visitInnerClass(readUnsignedShort(w) == 0 - ? null - : readClass(w, c), readUnsignedShort(w + 2) == 0 - ? null - : readClass(w + 2, c), readUnsignedShort(w + 4) == 0 - ? null - : readUTF8(w + 4, c), readUnsignedShort(w + 6)); - w += 8; + // visits the inner classes + if (innerClasses != 0) { + int v = innerClasses + 2; + for (int i = readUnsignedShort(innerClasses); i > 0; --i) { + classVisitor.visitInnerClass(readClass(v, c), + readClass(v + 2, c), readUTF8(v + 4, c), + readUnsignedShort(v + 6)); + v += 8; } } - // visits the fields - i = readUnsignedShort(u); + // visits the fields and methods + u = header + 10 + 2 * interfaces.length; + for (int i = readUnsignedShort(u - 2); i > 0; --i) { + u = readField(classVisitor, context, u); + } u += 2; - for (; i > 0; --i) { - access = readUnsignedShort(u); - name = readUTF8(u + 2, c); - desc = readUTF8(u + 4, c); - // visits the field's attributes and looks for a ConstantValue - // attribute - int fieldValueItem = 0; - signature = null; - anns = 0; - ianns = 0; - cattrs = null; - - j = readUnsignedShort(u + 6); - u += 8; - for (; j > 0; --j) { - attrName = readUTF8(u, c); - // tests are sorted in decreasing frequency order - // (based on frequencies observed on typical classes) - if ("ConstantValue".equals(attrName)) { - fieldValueItem = readUnsignedShort(u + 6); - } else if (SIGNATURES && "Signature".equals(attrName)) { - signature = readUTF8(u + 6, c); - } else if ("Deprecated".equals(attrName)) { - access |= Opcodes.ACC_DEPRECATED; - } else if ("Synthetic".equals(attrName)) { - access |= Opcodes.ACC_SYNTHETIC | ClassWriter.ACC_SYNTHETIC_ATTRIBUTE; - } else if (ANNOTATIONS && "RuntimeVisibleAnnotations".equals(attrName)) { - anns = u + 6; - } else if (ANNOTATIONS && "RuntimeInvisibleAnnotations".equals(attrName)) { - ianns = u + 6; - } else { - attr = readAttribute(attrs, - attrName, - u + 6, - readInt(u + 2), - c, - -1, - null); - if (attr != null) { - attr.next = cattrs; - cattrs = attr; - } + for (int i = readUnsignedShort(u - 2); i > 0; --i) { + u = readMethod(classVisitor, context, u); + } + + // visits the end of the class + classVisitor.visitEnd(); + } + + /** + * Reads a field and makes the given visitor visit it. + * + * @param classVisitor + * the visitor that must visit the field. + * @param context + * information about the class being parsed. + * @param u + * the start offset of the field in the class file. + * @return the offset of the first byte following the field in the class. + */ + private int readField(final ClassVisitor classVisitor, + final Context context, int u) { + // reads the field declaration + char[] c = context.buffer; + int access = readUnsignedShort(u); + String name = readUTF8(u + 2, c); + String desc = readUTF8(u + 4, c); + u += 6; + + // reads the field attributes + String signature = null; + int anns = 0; + int ianns = 0; + Object value = null; + Attribute attributes = null; + + for (int i = readUnsignedShort(u); i > 0; --i) { + String attrName = readUTF8(u + 2, c); + // tests are sorted in decreasing frequency order + // (based on frequencies observed on typical classes) + if ("ConstantValue".equals(attrName)) { + int item = readUnsignedShort(u + 8); + value = item == 0 ? null : readConst(item, c); + } else if (SIGNATURES && "Signature".equals(attrName)) { + signature = readUTF8(u + 8, c); + } else if ("Deprecated".equals(attrName)) { + access |= Opcodes.ACC_DEPRECATED; + } else if ("Synthetic".equals(attrName)) { + access |= Opcodes.ACC_SYNTHETIC + | ClassWriter.ACC_SYNTHETIC_ATTRIBUTE; + } else if (ANNOTATIONS + && "RuntimeVisibleAnnotations".equals(attrName)) { + anns = u + 8; + } else if (ANNOTATIONS + && "RuntimeInvisibleAnnotations".equals(attrName)) { + ianns = u + 8; + } else { + Attribute attr = readAttribute(context.attrs, attrName, u + 8, + readInt(u + 4), c, -1, null); + if (attr != null) { + attr.next = attributes; + attributes = attr; } - u += 6 + readInt(u + 2); } - // visits the field - FieldVisitor fv = classVisitor.visitField(access, - name, - desc, - signature, - fieldValueItem == 0 ? null : readConst(fieldValueItem, c)); - // visits the field annotations and attributes - if (fv != null) { - if (ANNOTATIONS) { - for (j = 1; j >= 0; --j) { - v = j == 0 ? ianns : anns; - if (v != 0) { - k = readUnsignedShort(v); - v += 2; - for (; k > 0; --k) { - v = readAnnotationValues(v + 2, - c, - true, - fv.visitAnnotation(readUTF8(v, c), j != 0)); - } - } - } - } - while (cattrs != null) { - attr = cattrs.next; - cattrs.next = null; - fv.visitAttribute(cattrs); - cattrs = attr; - } - fv.visitEnd(); + u += 6 + readInt(u + 4); + } + u += 2; + + // visits the field declaration + FieldVisitor fv = classVisitor.visitField(access, name, desc, + signature, value); + if (fv == null) { + return u; + } + + // visits the field annotations + if (ANNOTATIONS && anns != 0) { + for (int i = readUnsignedShort(anns), v = anns + 2; i > 0; --i) { + v = readAnnotationValues(v + 2, c, true, + fv.visitAnnotation(readUTF8(v, c), true)); + } + } + if (ANNOTATIONS && ianns != 0) { + for (int i = readUnsignedShort(ianns), v = ianns + 2; i > 0; --i) { + v = readAnnotationValues(v + 2, c, true, + fv.visitAnnotation(readUTF8(v, c), false)); } } - // visits the methods - i = readUnsignedShort(u); - u += 2; - for (; i > 0; --i) { - int u0 = u + 6; - access = readUnsignedShort(u); - name = readUTF8(u + 2, c); - desc = readUTF8(u + 4, c); - signature = null; - anns = 0; - ianns = 0; - int dann = 0; - int mpanns = 0; - int impanns = 0; - cattrs = null; - v = 0; - w = 0; - - // looks for Code and Exceptions attributes - j = readUnsignedShort(u + 6); - u += 8; - for (; j > 0; --j) { - attrName = readUTF8(u, c); - int attrSize = readInt(u + 2); - u += 6; - // tests are sorted in decreasing frequency order - // (based on frequencies observed on typical classes) - if ("Code".equals(attrName)) { - if (!skipCode) { - v = u; - } - } else if ("Exceptions".equals(attrName)) { - w = u; - } else if (SIGNATURES && "Signature".equals(attrName)) { - signature = readUTF8(u, c); - } else if ("Deprecated".equals(attrName)) { - access |= Opcodes.ACC_DEPRECATED; - } else if (ANNOTATIONS && "RuntimeVisibleAnnotations".equals(attrName)) { - anns = u; - } else if (ANNOTATIONS && "AnnotationDefault".equals(attrName)) { - dann = u; - } else if ("Synthetic".equals(attrName)) { - access |= Opcodes.ACC_SYNTHETIC | ClassWriter.ACC_SYNTHETIC_ATTRIBUTE; - } else if (ANNOTATIONS && "RuntimeInvisibleAnnotations".equals(attrName)) { - ianns = u; - } else if (ANNOTATIONS && "RuntimeVisibleParameterAnnotations".equals(attrName)) - { - mpanns = u; - } else if (ANNOTATIONS && "RuntimeInvisibleParameterAnnotations".equals(attrName)) - { - impanns = u; - } else { - attr = readAttribute(attrs, - attrName, - u, - attrSize, - c, - -1, - null); - if (attr != null) { - attr.next = cattrs; - cattrs = attr; - } + // visits the field attributes + while (attributes != null) { + Attribute attr = attributes.next; + attributes.next = null; + fv.visitAttribute(attributes); + attributes = attr; + } + + // visits the end of the field + fv.visitEnd(); + + return u; + } + + /** + * Reads a method and makes the given visitor visit it. + * + * @param classVisitor + * the visitor that must visit the method. + * @param context + * information about the class being parsed. + * @param u + * the start offset of the method in the class file. + * @return the offset of the first byte following the method in the class. + */ + private int readMethod(final ClassVisitor classVisitor, + final Context context, int u) { + // reads the method declaration + char[] c = context.buffer; + int access = readUnsignedShort(u); + String name = readUTF8(u + 2, c); + String desc = readUTF8(u + 4, c); + u += 6; + + // reads the method attributes + int code = 0; + int exception = 0; + String[] exceptions = null; + String signature = null; + int anns = 0; + int ianns = 0; + int dann = 0; + int mpanns = 0; + int impanns = 0; + int firstAttribute = u; + Attribute attributes = null; + + for (int i = readUnsignedShort(u); i > 0; --i) { + String attrName = readUTF8(u + 2, c); + // tests are sorted in decreasing frequency order + // (based on frequencies observed on typical classes) + if ("Code".equals(attrName)) { + if ((context.flags & SKIP_CODE) == 0) { + code = u + 8; } - u += attrSize; - } - // reads declared exceptions - String[] exceptions; - if (w == 0) { - exceptions = null; + } else if ("Exceptions".equals(attrName)) { + exceptions = new String[readUnsignedShort(u + 8)]; + exception = u + 10; + for (int j = 0; j < exceptions.length; ++j) { + exceptions[j] = readClass(exception, c); + exception += 2; + } + } else if (SIGNATURES && "Signature".equals(attrName)) { + signature = readUTF8(u + 8, c); + } else if ("Deprecated".equals(attrName)) { + access |= Opcodes.ACC_DEPRECATED; + } else if (ANNOTATIONS + && "RuntimeVisibleAnnotations".equals(attrName)) { + anns = u + 8; + } else if (ANNOTATIONS && "AnnotationDefault".equals(attrName)) { + dann = u + 8; + } else if ("Synthetic".equals(attrName)) { + access |= Opcodes.ACC_SYNTHETIC + | ClassWriter.ACC_SYNTHETIC_ATTRIBUTE; + } else if (ANNOTATIONS + && "RuntimeInvisibleAnnotations".equals(attrName)) { + ianns = u + 8; + } else if (ANNOTATIONS + && "RuntimeVisibleParameterAnnotations".equals(attrName)) { + mpanns = u + 8; + } else if (ANNOTATIONS + && "RuntimeInvisibleParameterAnnotations".equals(attrName)) { + impanns = u + 8; } else { - exceptions = new String[readUnsignedShort(w)]; - w += 2; - for (j = 0; j < exceptions.length; ++j) { - exceptions[j] = readClass(w, c); - w += 2; + Attribute attr = readAttribute(context.attrs, attrName, u + 8, + readInt(u + 4), c, -1, null); + if (attr != null) { + attr.next = attributes; + attributes = attr; } } + u += 6 + readInt(u + 4); + } + u += 2; - // visits the method's code, if any - MethodVisitor mv = classVisitor.visitMethod(access, - name, - desc, - signature, - exceptions); + // visits the method declaration + MethodVisitor mv = classVisitor.visitMethod(access, name, desc, + signature, exceptions); + if (mv == null) { + return u; + } - if (mv != null) { - /* - * if the returned MethodVisitor is in fact a MethodWriter, it - * means there is no method adapter between the reader and the - * writer. If, in addition, the writer's constant pool was - * copied from this reader (mw.cw.cr == this), and the signature - * and exceptions of the method have not been changed, then it - * is possible to skip all visit events and just copy the - * original code of the method to the writer (the access, name - * and descriptor can have been changed, this is not important - * since they are not copied as is from the reader). - */ - if (WRITER && mv instanceof MethodWriter) { - MethodWriter mw = (MethodWriter) mv; - if (mw.cw.cr == this) { - if (signature == mw.signature) { - boolean sameExceptions = false; - if (exceptions == null) { - sameExceptions = mw.exceptionCount == 0; - } else { - if (exceptions.length == mw.exceptionCount) { - sameExceptions = true; - for (j = exceptions.length - 1; j >= 0; --j) - { - w -= 2; - if (mw.exceptions[j] != readUnsignedShort(w)) - { - sameExceptions = false; - break; - } - } - } - } - if (sameExceptions) { - /* - * we do not copy directly the code into - * MethodWriter to save a byte array copy - * operation. The real copy will be done in - * ClassWriter.toByteArray(). - */ - mw.classReaderOffset = u0; - mw.classReaderLength = u - u0; - continue; - } + /* + * if the returned MethodVisitor is in fact a MethodWriter, it means + * there is no method adapter between the reader and the writer. If, in + * addition, the writer's constant pool was copied from this reader + * (mw.cw.cr == this), and the signature and exceptions of the method + * have not been changed, then it is possible to skip all visit events + * and just copy the original code of the method to the writer (the + * access, name and descriptor can have been changed, this is not + * important since they are not copied as is from the reader). + */ + if (WRITER && mv instanceof MethodWriter) { + MethodWriter mw = (MethodWriter) mv; + if (mw.cw.cr == this && signature == mw.signature) { + boolean sameExceptions = false; + if (exceptions == null) { + sameExceptions = mw.exceptionCount == 0; + } else if (exceptions.length == mw.exceptionCount) { + sameExceptions = true; + for (int j = exceptions.length - 1; j >= 0; --j) { + exception -= 2; + if (mw.exceptions[j] != readUnsignedShort(exception)) { + sameExceptions = false; + break; } } } - - if (ANNOTATIONS && dann != 0) { - AnnotationVisitor dv = mv.visitAnnotationDefault(); - readAnnotationValue(dann, c, null, dv); - if (dv != null) { - dv.visitEnd(); - } - } - if (ANNOTATIONS) { - for (j = 1; j >= 0; --j) { - w = j == 0 ? ianns : anns; - if (w != 0) { - k = readUnsignedShort(w); - w += 2; - for (; k > 0; --k) { - w = readAnnotationValues(w + 2, - c, - true, - mv.visitAnnotation(readUTF8(w, c), j != 0)); - } - } - } + if (sameExceptions) { + /* + * we do not copy directly the code into MethodWriter to + * save a byte array copy operation. The real copy will be + * done in ClassWriter.toByteArray(). + */ + mw.classReaderOffset = firstAttribute; + mw.classReaderLength = u - firstAttribute; + return u; } - if (ANNOTATIONS && mpanns != 0) { - readParameterAnnotations(mpanns, desc, c, true, mv); + } + } + + // visits the method annotations + if (ANNOTATIONS && dann != 0) { + AnnotationVisitor dv = mv.visitAnnotationDefault(); + readAnnotationValue(dann, c, null, dv); + if (dv != null) { + dv.visitEnd(); + } + } + if (ANNOTATIONS && anns != 0) { + for (int i = readUnsignedShort(anns), v = anns + 2; i > 0; --i) { + v = readAnnotationValues(v + 2, c, true, + mv.visitAnnotation(readUTF8(v, c), true)); + } + } + if (ANNOTATIONS && ianns != 0) { + for (int i = readUnsignedShort(ianns), v = ianns + 2; i > 0; --i) { + v = readAnnotationValues(v + 2, c, true, + mv.visitAnnotation(readUTF8(v, c), false)); + } + } + if (ANNOTATIONS && mpanns != 0) { + readParameterAnnotations(mpanns, desc, c, true, mv); + } + if (ANNOTATIONS && impanns != 0) { + readParameterAnnotations(impanns, desc, c, false, mv); + } + + // visits the method attributes + while (attributes != null) { + Attribute attr = attributes.next; + attributes.next = null; + mv.visitAttribute(attributes); + attributes = attr; + } + + // visits the method code + if (code != 0) { + context.access = access; + context.name = name; + context.desc = desc; + mv.visitCode(); + readCode(mv, context, code); + } + + // visits the end of the method + mv.visitEnd(); + + return u; + } + + /** + * Reads the bytecode of a method and makes the given visitor visit it. + * + * @param mv + * the visitor that must visit the method's code. + * @param context + * information about the class being parsed. + * @param u + * the start offset of the code attribute in the class file. + */ + private void readCode(final MethodVisitor mv, final Context context, int u) { + // reads the header + byte[] b = this.b; + char[] c = context.buffer; + int maxStack = readUnsignedShort(u); + int maxLocals = readUnsignedShort(u + 2); + int codeLength = readInt(u + 4); + u += 8; + + // reads the bytecode to find the labels + int codeStart = u; + int codeEnd = u + codeLength; + Label[] labels = new Label[codeLength + 2]; + readLabel(codeLength + 1, labels); + while (u < codeEnd) { + int offset = u - codeStart; + int opcode = b[u] & 0xFF; + switch (ClassWriter.TYPE[opcode]) { + case ClassWriter.NOARG_INSN: + case ClassWriter.IMPLVAR_INSN: + u += 1; + break; + case ClassWriter.LABEL_INSN: + readLabel(offset + readShort(u + 1), labels); + u += 3; + break; + case ClassWriter.LABELW_INSN: + readLabel(offset + readInt(u + 1), labels); + u += 5; + break; + case ClassWriter.WIDE_INSN: + opcode = b[u + 1] & 0xFF; + if (opcode == Opcodes.IINC) { + u += 6; + } else { + u += 4; } - if (ANNOTATIONS && impanns != 0) { - readParameterAnnotations(impanns, desc, c, false, mv); + break; + case ClassWriter.TABL_INSN: + // skips 0 to 3 padding bytes + u = u + 4 - (offset & 3); + // reads instruction + readLabel(offset + readInt(u), labels); + for (int i = readInt(u + 8) - readInt(u + 4) + 1; i > 0; --i) { + readLabel(offset + readInt(u + 12), labels); + u += 4; } - while (cattrs != null) { - attr = cattrs.next; - cattrs.next = null; - mv.visitAttribute(cattrs); - cattrs = attr; + u += 12; + break; + case ClassWriter.LOOK_INSN: + // skips 0 to 3 padding bytes + u = u + 4 - (offset & 3); + // reads instruction + readLabel(offset + readInt(u), labels); + for (int i = readInt(u + 4); i > 0; --i) { + readLabel(offset + readInt(u + 12), labels); + u += 8; } + u += 8; + break; + case ClassWriter.VAR_INSN: + case ClassWriter.SBYTE_INSN: + case ClassWriter.LDC_INSN: + u += 2; + break; + case ClassWriter.SHORT_INSN: + case ClassWriter.LDCW_INSN: + case ClassWriter.FIELDORMETH_INSN: + case ClassWriter.TYPE_INSN: + case ClassWriter.IINC_INSN: + u += 3; + break; + case ClassWriter.ITFMETH_INSN: + case ClassWriter.INDYMETH_INSN: + u += 5; + break; + // case MANA_INSN: + default: + u += 4; + break; } + } - if (mv != null && v != 0) { - int maxStack = readUnsignedShort(v); - int maxLocals = readUnsignedShort(v + 2); - int codeLength = readInt(v + 4); - v += 8; + // reads the try catch entries to find the labels, and also visits them + for (int i = readUnsignedShort(u); i > 0; --i) { + Label start = readLabel(readUnsignedShort(u + 2), labels); + Label end = readLabel(readUnsignedShort(u + 4), labels); + Label handler = readLabel(readUnsignedShort(u + 6), labels); + String type = readUTF8(items[readUnsignedShort(u + 8)], c); + mv.visitTryCatchBlock(start, end, handler, type); + u += 8; + } + u += 2; - int codeStart = v; - int codeEnd = v + codeLength; - - mv.visitCode(); - - // 1st phase: finds the labels - int label; - Label[] labels = new Label[codeLength + 2]; - readLabel(codeLength + 1, labels); - while (v < codeEnd) { - w = v - codeStart; - int opcode = b[v] & 0xFF; - switch (ClassWriter.TYPE[opcode]) { - case ClassWriter.NOARG_INSN: - case ClassWriter.IMPLVAR_INSN: - v += 1; - break; - case ClassWriter.LABEL_INSN: - readLabel(w + readShort(v + 1), labels); - v += 3; - break; - case ClassWriter.LABELW_INSN: - readLabel(w + readInt(v + 1), labels); - v += 5; - break; - case ClassWriter.WIDE_INSN: - opcode = b[v + 1] & 0xFF; - if (opcode == Opcodes.IINC) { - v += 6; - } else { - v += 4; - } - break; - case ClassWriter.TABL_INSN: - // skips 0 to 3 padding bytes* - v = v + 4 - (w & 3); - // reads instruction - readLabel(w + readInt(v), labels); - j = readInt(v + 8) - readInt(v + 4) + 1; - v += 12; - for (; j > 0; --j) { - readLabel(w + readInt(v), labels); - v += 4; - } - break; - case ClassWriter.LOOK_INSN: - // skips 0 to 3 padding bytes* - v = v + 4 - (w & 3); - // reads instruction - readLabel(w + readInt(v), labels); - j = readInt(v + 4); - v += 8; - for (; j > 0; --j) { - readLabel(w + readInt(v + 4), labels); - v += 8; - } - break; - case ClassWriter.VAR_INSN: - case ClassWriter.SBYTE_INSN: - case ClassWriter.LDC_INSN: - v += 2; - break; - case ClassWriter.SHORT_INSN: - case ClassWriter.LDCW_INSN: - case ClassWriter.FIELDORMETH_INSN: - case ClassWriter.TYPE_INSN: - case ClassWriter.IINC_INSN: - v += 3; - break; - case ClassWriter.ITFMETH_INSN: - case ClassWriter.INDYMETH_INSN: - v += 5; - break; - // case MANA_INSN: - default: - v += 4; - break; - } - } - // parses the try catch entries - j = readUnsignedShort(v); - v += 2; - for (; j > 0; --j) { - Label start = readLabel(readUnsignedShort(v), labels); - Label end = readLabel(readUnsignedShort(v + 2), labels); - Label handler = readLabel(readUnsignedShort(v + 4), labels); - int type = readUnsignedShort(v + 6); - if (type == 0) { - mv.visitTryCatchBlock(start, end, handler, null); - } else { - mv.visitTryCatchBlock(start, - end, - handler, - readUTF8(items[type], c)); - } - v += 8; - } - // parses the local variable, line number tables, and code - // attributes - int varTable = 0; - int varTypeTable = 0; - int stackMap = 0; - int stackMapSize = 0; - int frameCount = 0; - int frameMode = 0; - int frameOffset = 0; - int frameLocalCount = 0; - int frameLocalDiff = 0; - int frameStackCount = 0; - Object[] frameLocal = null; - Object[] frameStack = null; - boolean zip = true; - cattrs = null; - j = readUnsignedShort(v); - v += 2; - for (; j > 0; --j) { - attrName = readUTF8(v, c); - if ("LocalVariableTable".equals(attrName)) { - if (!skipDebug) { - varTable = v + 6; - k = readUnsignedShort(v + 6); - w = v + 8; - for (; k > 0; --k) { - label = readUnsignedShort(w); - if (labels[label] == null) { - readLabel(label, labels).status |= Label.DEBUG; - } - label += readUnsignedShort(w + 2); - if (labels[label] == null) { - readLabel(label, labels).status |= Label.DEBUG; - } - w += 10; - } - } - } else if ("LocalVariableTypeTable".equals(attrName)) { - varTypeTable = v + 6; - } else if ("LineNumberTable".equals(attrName)) { - if (!skipDebug) { - k = readUnsignedShort(v + 6); - w = v + 8; - for (; k > 0; --k) { - label = readUnsignedShort(w); - if (labels[label] == null) { - readLabel(label, labels).status |= Label.DEBUG; - } - labels[label].line = readUnsignedShort(w + 2); - w += 4; - } - } - } else if (FRAMES && "StackMapTable".equals(attrName)) { - if ((flags & SKIP_FRAMES) == 0) { - stackMap = v + 8; - stackMapSize = readInt(v + 2); - frameCount = readUnsignedShort(v + 6); + // reads the code attributes + int varTable = 0; + int varTypeTable = 0; + boolean zip = true; + boolean unzip = (context.flags & EXPAND_FRAMES) != 0; + int stackMap = 0; + int stackMapSize = 0; + int frameCount = 0; + Context frame = null; + Attribute attributes = null; + + for (int i = readUnsignedShort(u); i > 0; --i) { + String attrName = readUTF8(u + 2, c); + if ("LocalVariableTable".equals(attrName)) { + if ((context.flags & SKIP_DEBUG) == 0) { + varTable = u + 8; + for (int j = readUnsignedShort(u + 8), v = u; j > 0; --j) { + int label = readUnsignedShort(v + 10); + if (labels[label] == null) { + readLabel(label, labels).status |= Label.DEBUG; } - /* - * here we do not extract the labels corresponding to - * the attribute content. This would require a full - * parsing of the attribute, which would need to be - * repeated in the second phase (see below). Instead the - * content of the attribute is read one frame at a time - * (i.e. after a frame has been visited, the next frame - * is read), and the labels it contains are also - * extracted one frame at a time. Thanks to the ordering - * of frames, having only a "one frame lookahead" is not - * a problem, i.e. it is not possible to see an offset - * smaller than the offset of the current insn and for - * which no Label exist. - */ - /* - * This is not true for UNINITIALIZED type offsets. We - * solve this by parsing the stack map table without a - * full decoding (see below). - */ - } else if (FRAMES && "StackMap".equals(attrName)) { - if ((flags & SKIP_FRAMES) == 0) { - stackMap = v + 8; - stackMapSize = readInt(v + 2); - frameCount = readUnsignedShort(v + 6); - zip = false; - } - /* - * IMPORTANT! here we assume that the frames are - * ordered, as in the StackMapTable attribute, although - * this is not guaranteed by the attribute format. - */ - } else { - for (k = 0; k < attrs.length; ++k) { - if (attrs[k].type.equals(attrName)) { - attr = attrs[k].read(this, - v + 6, - readInt(v + 2), - c, - codeStart - 8, - labels); - if (attr != null) { - attr.next = cattrs; - cattrs = attr; - } - } + label += readUnsignedShort(v + 12); + if (labels[label] == null) { + readLabel(label, labels).status |= Label.DEBUG; } + v += 10; } - v += 6 + readInt(v + 2); } - - // 2nd phase: visits each instruction - if (FRAMES && stackMap != 0) { - // creates the very first (implicit) frame from the method - // descriptor - frameLocal = new Object[maxLocals]; - frameStack = new Object[maxStack]; - if (unzip) { - int local = 0; - if ((access & Opcodes.ACC_STATIC) == 0) { - if ("".equals(name)) { - frameLocal[local++] = Opcodes.UNINITIALIZED_THIS; - } else { - frameLocal[local++] = readClass(header + 2, c); - } - } - j = 1; - loop: while (true) { - k = j; - switch (desc.charAt(j++)) { - case 'Z': - case 'C': - case 'B': - case 'S': - case 'I': - frameLocal[local++] = Opcodes.INTEGER; - break; - case 'F': - frameLocal[local++] = Opcodes.FLOAT; - break; - case 'J': - frameLocal[local++] = Opcodes.LONG; - break; - case 'D': - frameLocal[local++] = Opcodes.DOUBLE; - break; - case '[': - while (desc.charAt(j) == '[') { - ++j; - } - if (desc.charAt(j) == 'L') { - ++j; - while (desc.charAt(j) != ';') { - ++j; - } - } - frameLocal[local++] = desc.substring(k, ++j); - break; - case 'L': - while (desc.charAt(j) != ';') { - ++j; - } - frameLocal[local++] = desc.substring(k + 1, - j++); - break; - default: - break loop; - } + } else if ("LocalVariableTypeTable".equals(attrName)) { + varTypeTable = u + 8; + } else if ("LineNumberTable".equals(attrName)) { + if ((context.flags & SKIP_DEBUG) == 0) { + for (int j = readUnsignedShort(u + 8), v = u; j > 0; --j) { + int label = readUnsignedShort(v + 10); + if (labels[label] == null) { + readLabel(label, labels).status |= Label.DEBUG; } - frameLocalCount = local; + labels[label].line = readUnsignedShort(v + 12); + v += 4; } - /* - * for the first explicit frame the offset is not - * offset_delta + 1 but only offset_delta; setting the - * implicit frame offset to -1 allow the use of the - * "offset_delta + 1" rule in all cases - */ - frameOffset = -1; - /* - * Finds labels for UNINITIALIZED frame types. Instead of - * decoding each element of the stack map table, we look - * for 3 consecutive bytes that "look like" an UNINITIALIZED - * type (tag 8, offset within code bounds, NEW instruction - * at this offset). We may find false positives (i.e. not - * real UNINITIALIZED types), but this should be rare, and - * the only consequence will be the creation of an unneeded - * label. This is better than creating a label for each NEW - * instruction, and faster than fully decoding the whole - * stack map table. - */ - for (j = stackMap; j < stackMap + stackMapSize - 2; ++j) { - if (b[j] == 8) { // UNINITIALIZED FRAME TYPE - k = readUnsignedShort(j + 1); - if (k >= 0 && k < codeLength) { // potential offset - if ((b[codeStart + k] & 0xFF) == Opcodes.NEW) { // NEW at this offset - readLabel(k, labels); - } - } + } + } else if (FRAMES && "StackMapTable".equals(attrName)) { + if ((context.flags & SKIP_FRAMES) == 0) { + stackMap = u + 10; + stackMapSize = readInt(u + 4); + frameCount = readUnsignedShort(u + 8); + } + /* + * here we do not extract the labels corresponding to the + * attribute content. This would require a full parsing of the + * attribute, which would need to be repeated in the second + * phase (see below). Instead the content of the attribute is + * read one frame at a time (i.e. after a frame has been + * visited, the next frame is read), and the labels it contains + * are also extracted one frame at a time. Thanks to the + * ordering of frames, having only a "one frame lookahead" is + * not a problem, i.e. it is not possible to see an offset + * smaller than the offset of the current insn and for which no + * Label exist. + */ + /* + * This is not true for UNINITIALIZED type offsets. We solve + * this by parsing the stack map table without a full decoding + * (see below). + */ + } else if (FRAMES && "StackMap".equals(attrName)) { + if ((context.flags & SKIP_FRAMES) == 0) { + zip = false; + stackMap = u + 10; + stackMapSize = readInt(u + 4); + frameCount = readUnsignedShort(u + 8); + } + /* + * IMPORTANT! here we assume that the frames are ordered, as in + * the StackMapTable attribute, although this is not guaranteed + * by the attribute format. + */ + } else { + for (int j = 0; j < context.attrs.length; ++j) { + if (context.attrs[j].type.equals(attrName)) { + Attribute attr = context.attrs[j].read(this, u + 8, + readInt(u + 4), c, codeStart - 8, labels); + if (attr != null) { + attr.next = attributes; + attributes = attr; } } } - v = codeStart; - Label l; - while (v < codeEnd) { - w = v - codeStart; - - l = labels[w]; - if (l != null) { - mv.visitLabel(l); - if (!skipDebug && l.line > 0) { - mv.visitLineNumber(l.line, l); + } + u += 6 + readInt(u + 4); + } + u += 2; + + // generates the first (implicit) stack map frame + if (FRAMES && stackMap != 0) { + /* + * for the first explicit frame the offset is not offset_delta + 1 + * but only offset_delta; setting the implicit frame offset to -1 + * allow the use of the "offset_delta + 1" rule in all cases + */ + frame = context; + frame.offset = -1; + frame.mode = 0; + frame.localCount = 0; + frame.localDiff = 0; + frame.stackCount = 0; + frame.local = new Object[maxLocals]; + frame.stack = new Object[maxStack]; + if (unzip) { + getImplicitFrame(context); + } + /* + * Finds labels for UNINITIALIZED frame types. Instead of decoding + * each element of the stack map table, we look for 3 consecutive + * bytes that "look like" an UNINITIALIZED type (tag 8, offset + * within code bounds, NEW instruction at this offset). We may find + * false positives (i.e. not real UNINITIALIZED types), but this + * should be rare, and the only consequence will be the creation of + * an unneeded label. This is better than creating a label for each + * NEW instruction, and faster than fully decoding the whole stack + * map table. + */ + for (int i = stackMap; i < stackMap + stackMapSize - 2; ++i) { + if (b[i] == 8) { // UNINITIALIZED FRAME TYPE + int v = readUnsignedShort(i + 1); + if (v >= 0 && v < codeLength) { + if ((b[codeStart + v] & 0xFF) == Opcodes.NEW) { + readLabel(v, labels); } } + } + } + } - while (FRAMES && frameLocal != null - && (frameOffset == w || frameOffset == -1)) - { - // if there is a frame for this offset, - // makes the visitor visit it, - // and reads the next frame if there is one. - if (!zip || unzip) { - mv.visitFrame(Opcodes.F_NEW, - frameLocalCount, - frameLocal, - frameStackCount, - frameStack); - } else if (frameOffset != -1) { - mv.visitFrame(frameMode, - frameLocalDiff, - frameLocal, - frameStackCount, - frameStack); - } + // visits the instructions + u = codeStart; + while (u < codeEnd) { + int offset = u - codeStart; + + // visits the label and line number for this offset, if any + Label l = labels[offset]; + if (l != null) { + mv.visitLabel(l); + if ((context.flags & SKIP_DEBUG) == 0 && l.line > 0) { + mv.visitLineNumber(l.line, l); + } + } - if (frameCount > 0) { - int tag, delta, n; - if (zip) { - tag = b[stackMap++] & 0xFF; - } else { - tag = MethodWriter.FULL_FRAME; - frameOffset = -1; - } - frameLocalDiff = 0; - if (tag < MethodWriter.SAME_LOCALS_1_STACK_ITEM_FRAME) - { - delta = tag; - frameMode = Opcodes.F_SAME; - frameStackCount = 0; - } else if (tag < MethodWriter.RESERVED) { - delta = tag - - MethodWriter.SAME_LOCALS_1_STACK_ITEM_FRAME; - stackMap = readFrameType(frameStack, - 0, - stackMap, - c, - labels); - frameMode = Opcodes.F_SAME1; - frameStackCount = 1; - } else { - delta = readUnsignedShort(stackMap); - stackMap += 2; - if (tag == MethodWriter.SAME_LOCALS_1_STACK_ITEM_FRAME_EXTENDED) - { - stackMap = readFrameType(frameStack, - 0, - stackMap, - c, - labels); - frameMode = Opcodes.F_SAME1; - frameStackCount = 1; - } else if (tag >= MethodWriter.CHOP_FRAME - && tag < MethodWriter.SAME_FRAME_EXTENDED) - { - frameMode = Opcodes.F_CHOP; - frameLocalDiff = MethodWriter.SAME_FRAME_EXTENDED - - tag; - frameLocalCount -= frameLocalDiff; - frameStackCount = 0; - } else if (tag == MethodWriter.SAME_FRAME_EXTENDED) - { - frameMode = Opcodes.F_SAME; - frameStackCount = 0; - } else if (tag < MethodWriter.FULL_FRAME) { - j = unzip ? frameLocalCount : 0; - for (k = tag - - MethodWriter.SAME_FRAME_EXTENDED; k > 0; k--) - { - stackMap = readFrameType(frameLocal, - j++, - stackMap, - c, - labels); - } - frameMode = Opcodes.F_APPEND; - frameLocalDiff = tag - - MethodWriter.SAME_FRAME_EXTENDED; - frameLocalCount += frameLocalDiff; - frameStackCount = 0; - } else { // if (tag == FULL_FRAME) { - frameMode = Opcodes.F_FULL; - n = frameLocalDiff = frameLocalCount = readUnsignedShort(stackMap); - stackMap += 2; - for (j = 0; n > 0; n--) { - stackMap = readFrameType(frameLocal, - j++, - stackMap, - c, - labels); - } - n = frameStackCount = readUnsignedShort(stackMap); - stackMap += 2; - for (j = 0; n > 0; n--) { - stackMap = readFrameType(frameStack, - j++, - stackMap, - c, - labels); - } - } - } - frameOffset += delta + 1; - readLabel(frameOffset, labels); - - --frameCount; - } else { - frameLocal = null; - } + // visits the frame for this offset, if any + while (FRAMES && frame != null + && (frame.offset == offset || frame.offset == -1)) { + // if there is a frame for this offset, makes the visitor visit + // it, and reads the next frame if there is one. + if (frame.offset != -1) { + if (!zip || unzip) { + mv.visitFrame(Opcodes.F_NEW, frame.localCount, + frame.local, frame.stackCount, frame.stack); + } else { + mv.visitFrame(frame.mode, frame.localDiff, frame.local, + frame.stackCount, frame.stack); } + } + if (frameCount > 0) { + stackMap = readFrame(stackMap, zip, unzip, labels, frame); + --frameCount; + } else { + frame = null; + } + } - int opcode = b[v] & 0xFF; - switch (ClassWriter.TYPE[opcode]) { - case ClassWriter.NOARG_INSN: - mv.visitInsn(opcode); - v += 1; - break; - case ClassWriter.IMPLVAR_INSN: - if (opcode > Opcodes.ISTORE) { - opcode -= 59; // ISTORE_0 - mv.visitVarInsn(Opcodes.ISTORE + (opcode >> 2), - opcode & 0x3); - } else { - opcode -= 26; // ILOAD_0 - mv.visitVarInsn(Opcodes.ILOAD + (opcode >> 2), - opcode & 0x3); - } - v += 1; - break; - case ClassWriter.LABEL_INSN: - mv.visitJumpInsn(opcode, labels[w - + readShort(v + 1)]); - v += 3; - break; - case ClassWriter.LABELW_INSN: - mv.visitJumpInsn(opcode - 33, labels[w - + readInt(v + 1)]); - v += 5; - break; - case ClassWriter.WIDE_INSN: - opcode = b[v + 1] & 0xFF; - if (opcode == Opcodes.IINC) { - mv.visitIincInsn(readUnsignedShort(v + 2), - readShort(v + 4)); - v += 6; - } else { - mv.visitVarInsn(opcode, - readUnsignedShort(v + 2)); - v += 4; - } - break; - case ClassWriter.TABL_INSN: - // skips 0 to 3 padding bytes - v = v + 4 - (w & 3); - // reads instruction - label = w + readInt(v); - int min = readInt(v + 4); - int max = readInt(v + 8); - v += 12; - Label[] table = new Label[max - min + 1]; - for (j = 0; j < table.length; ++j) { - table[j] = labels[w + readInt(v)]; - v += 4; - } - mv.visitTableSwitchInsn(min, - max, - labels[label], - table); - break; - case ClassWriter.LOOK_INSN: - // skips 0 to 3 padding bytes - v = v + 4 - (w & 3); - // reads instruction - label = w + readInt(v); - j = readInt(v + 4); - v += 8; - int[] keys = new int[j]; - Label[] values = new Label[j]; - for (j = 0; j < keys.length; ++j) { - keys[j] = readInt(v); - values[j] = labels[w + readInt(v + 4)]; - v += 8; - } - mv.visitLookupSwitchInsn(labels[label], - keys, - values); - break; - case ClassWriter.VAR_INSN: - mv.visitVarInsn(opcode, b[v + 1] & 0xFF); - v += 2; - break; - case ClassWriter.SBYTE_INSN: - mv.visitIntInsn(opcode, b[v + 1]); - v += 2; - break; - case ClassWriter.SHORT_INSN: - mv.visitIntInsn(opcode, readShort(v + 1)); - v += 3; - break; - case ClassWriter.LDC_INSN: - mv.visitLdcInsn(readConst(b[v + 1] & 0xFF, c)); - v += 2; - break; - case ClassWriter.LDCW_INSN: - mv.visitLdcInsn(readConst(readUnsignedShort(v + 1), - c)); - v += 3; - break; - case ClassWriter.FIELDORMETH_INSN: - case ClassWriter.ITFMETH_INSN: { - int cpIndex = items[readUnsignedShort(v + 1)]; - String iowner = readClass(cpIndex, c); - cpIndex = items[readUnsignedShort(cpIndex + 2)]; - String iname = readUTF8(cpIndex, c); - String idesc = readUTF8(cpIndex + 2, c); - if (opcode < Opcodes.INVOKEVIRTUAL) { - mv.visitFieldInsn(opcode, iowner, iname, idesc); - } else { - mv.visitMethodInsn(opcode, iowner, iname, idesc); - } - if (opcode == Opcodes.INVOKEINTERFACE) { - v += 5; - } else { - v += 3; - } - break; - } - case ClassWriter.INDYMETH_INSN: { - int cpIndex = items[readUnsignedShort(v + 1)]; - int bsmIndex = bootstrapMethods[readUnsignedShort(cpIndex)]; - cpIndex = items[readUnsignedShort(cpIndex + 2)]; - String iname = readUTF8(cpIndex, c); - String idesc = readUTF8(cpIndex + 2, c); - - int mhIndex = readUnsignedShort(bsmIndex); - Handle bsm = (Handle) readConst(mhIndex, c); - int bsmArgCount = readUnsignedShort(bsmIndex + 2); - Object[] bsmArgs = new Object[bsmArgCount]; - bsmIndex += 4; - for(int a = 0; a < bsmArgCount; a++) { - int argIndex = readUnsignedShort(bsmIndex); - bsmArgs[a] = readConst(argIndex, c); - bsmIndex += 2; - } - mv.visitInvokeDynamicInsn(iname, idesc, bsm, bsmArgs); - - v += 5; - break; - } - case ClassWriter.TYPE_INSN: - mv.visitTypeInsn(opcode, readClass(v + 1, c)); - v += 3; - break; - case ClassWriter.IINC_INSN: - mv.visitIincInsn(b[v + 1] & 0xFF, b[v + 2]); - v += 3; - break; - // case MANA_INSN: - default: - mv.visitMultiANewArrayInsn(readClass(v + 1, c), - b[v + 3] & 0xFF); - v += 4; - break; - } + // visits the instruction at this offset + int opcode = b[u] & 0xFF; + switch (ClassWriter.TYPE[opcode]) { + case ClassWriter.NOARG_INSN: + mv.visitInsn(opcode); + u += 1; + break; + case ClassWriter.IMPLVAR_INSN: + if (opcode > Opcodes.ISTORE) { + opcode -= 59; // ISTORE_0 + mv.visitVarInsn(Opcodes.ISTORE + (opcode >> 2), + opcode & 0x3); + } else { + opcode -= 26; // ILOAD_0 + mv.visitVarInsn(Opcodes.ILOAD + (opcode >> 2), opcode & 0x3); } - l = labels[codeEnd - codeStart]; - if (l != null) { - mv.visitLabel(l); + u += 1; + break; + case ClassWriter.LABEL_INSN: + mv.visitJumpInsn(opcode, labels[offset + readShort(u + 1)]); + u += 3; + break; + case ClassWriter.LABELW_INSN: + mv.visitJumpInsn(opcode - 33, labels[offset + readInt(u + 1)]); + u += 5; + break; + case ClassWriter.WIDE_INSN: + opcode = b[u + 1] & 0xFF; + if (opcode == Opcodes.IINC) { + mv.visitIincInsn(readUnsignedShort(u + 2), readShort(u + 4)); + u += 6; + } else { + mv.visitVarInsn(opcode, readUnsignedShort(u + 2)); + u += 4; } - // visits the local variable tables - if (!skipDebug && varTable != 0) { - int[] typeTable = null; - if (varTypeTable != 0) { - k = readUnsignedShort(varTypeTable) * 3; - w = varTypeTable + 2; - typeTable = new int[k]; - while (k > 0) { - typeTable[--k] = w + 6; // signature - typeTable[--k] = readUnsignedShort(w + 8); // index - typeTable[--k] = readUnsignedShort(w); // start - w += 10; - } - } - k = readUnsignedShort(varTable); - w = varTable + 2; - for (; k > 0; --k) { - int start = readUnsignedShort(w); - int length = readUnsignedShort(w + 2); - int index = readUnsignedShort(w + 8); - String vsignature = null; - if (typeTable != null) { - for (int a = 0; a < typeTable.length; a += 3) { - if (typeTable[a] == start - && typeTable[a + 1] == index) - { - vsignature = readUTF8(typeTable[a + 2], c); - break; - } - } - } - mv.visitLocalVariable(readUTF8(w + 4, c), - readUTF8(w + 6, c), - vsignature, - labels[start], - labels[start + length], - index); - w += 10; - } + break; + case ClassWriter.TABL_INSN: { + // skips 0 to 3 padding bytes + u = u + 4 - (offset & 3); + // reads instruction + int label = offset + readInt(u); + int min = readInt(u + 4); + int max = readInt(u + 8); + Label[] table = new Label[max - min + 1]; + u += 12; + for (int i = 0; i < table.length; ++i) { + table[i] = labels[offset + readInt(u)]; + u += 4; + } + mv.visitTableSwitchInsn(min, max, labels[label], table); + break; + } + case ClassWriter.LOOK_INSN: { + // skips 0 to 3 padding bytes + u = u + 4 - (offset & 3); + // reads instruction + int label = offset + readInt(u); + int len = readInt(u + 4); + int[] keys = new int[len]; + Label[] values = new Label[len]; + u += 8; + for (int i = 0; i < len; ++i) { + keys[i] = readInt(u); + values[i] = labels[offset + readInt(u + 4)]; + u += 8; + } + mv.visitLookupSwitchInsn(labels[label], keys, values); + break; + } + case ClassWriter.VAR_INSN: + mv.visitVarInsn(opcode, b[u + 1] & 0xFF); + u += 2; + break; + case ClassWriter.SBYTE_INSN: + mv.visitIntInsn(opcode, b[u + 1]); + u += 2; + break; + case ClassWriter.SHORT_INSN: + mv.visitIntInsn(opcode, readShort(u + 1)); + u += 3; + break; + case ClassWriter.LDC_INSN: + mv.visitLdcInsn(readConst(b[u + 1] & 0xFF, c)); + u += 2; + break; + case ClassWriter.LDCW_INSN: + mv.visitLdcInsn(readConst(readUnsignedShort(u + 1), c)); + u += 3; + break; + case ClassWriter.FIELDORMETH_INSN: + case ClassWriter.ITFMETH_INSN: { + int cpIndex = items[readUnsignedShort(u + 1)]; + String iowner = readClass(cpIndex, c); + cpIndex = items[readUnsignedShort(cpIndex + 2)]; + String iname = readUTF8(cpIndex, c); + String idesc = readUTF8(cpIndex + 2, c); + if (opcode < Opcodes.INVOKEVIRTUAL) { + mv.visitFieldInsn(opcode, iowner, iname, idesc); + } else { + mv.visitMethodInsn(opcode, iowner, iname, idesc); } - // visits the other attributes - while (cattrs != null) { - attr = cattrs.next; - cattrs.next = null; - mv.visitAttribute(cattrs); - cattrs = attr; + if (opcode == Opcodes.INVOKEINTERFACE) { + u += 5; + } else { + u += 3; } - // visits the max stack and max locals values - mv.visitMaxs(maxStack, maxLocals); + break; } + case ClassWriter.INDYMETH_INSN: { + int cpIndex = items[readUnsignedShort(u + 1)]; + int bsmIndex = context.bootstrapMethods[readUnsignedShort(cpIndex)]; + Handle bsm = (Handle) readConst(readUnsignedShort(bsmIndex), c); + int bsmArgCount = readUnsignedShort(bsmIndex + 2); + Object[] bsmArgs = new Object[bsmArgCount]; + bsmIndex += 4; + for (int i = 0; i < bsmArgCount; i++) { + bsmArgs[i] = readConst(readUnsignedShort(bsmIndex), c); + bsmIndex += 2; + } + cpIndex = items[readUnsignedShort(cpIndex + 2)]; + String iname = readUTF8(cpIndex, c); + String idesc = readUTF8(cpIndex + 2, c); + mv.visitInvokeDynamicInsn(iname, idesc, bsm, bsmArgs); + u += 5; + break; + } + case ClassWriter.TYPE_INSN: + mv.visitTypeInsn(opcode, readClass(u + 1, c)); + u += 3; + break; + case ClassWriter.IINC_INSN: + mv.visitIincInsn(b[u + 1] & 0xFF, b[u + 2]); + u += 3; + break; + // case MANA_INSN: + default: + mv.visitMultiANewArrayInsn(readClass(u + 1, c), b[u + 3] & 0xFF); + u += 4; + break; + } + } + if (labels[codeLength] != null) { + mv.visitLabel(labels[codeLength]); + } - if (mv != null) { - mv.visitEnd(); + // visits the local variable tables + if ((context.flags & SKIP_DEBUG) == 0 && varTable != 0) { + int[] typeTable = null; + if (varTypeTable != 0) { + u = varTypeTable + 2; + typeTable = new int[readUnsignedShort(varTypeTable) * 3]; + for (int i = typeTable.length; i > 0;) { + typeTable[--i] = u + 6; // signature + typeTable[--i] = readUnsignedShort(u + 8); // index + typeTable[--i] = readUnsignedShort(u); // start + u += 10; + } + } + u = varTable + 2; + for (int i = readUnsignedShort(varTable); i > 0; --i) { + int start = readUnsignedShort(u); + int length = readUnsignedShort(u + 2); + int index = readUnsignedShort(u + 8); + String vsignature = null; + if (typeTable != null) { + for (int j = 0; j < typeTable.length; j += 3) { + if (typeTable[j] == start && typeTable[j + 1] == index) { + vsignature = readUTF8(typeTable[j + 2], c); + break; + } + } + } + mv.visitLocalVariable(readUTF8(u + 4, c), readUTF8(u + 6, c), + vsignature, labels[start], labels[start + length], + index); + u += 10; } } - // visits the end of the class - classVisitor.visitEnd(); + // visits the code attributes + while (attributes != null) { + Attribute attr = attributes.next; + attributes.next = null; + mv.visitAttribute(attributes); + attributes = attr; + } + + // visits the max stack and max locals values + mv.visitMaxs(maxStack, maxLocals); } /** * Reads parameter annotations and makes the given visitor visit them. * - * @param v start offset in {@link #b b} of the annotations to be read. - * @param desc the method descriptor. - * @param buf buffer to be used to call {@link #readUTF8 readUTF8}, - * {@link #readClass(int,char[]) readClass} or - * {@link #readConst readConst}. - * @param visible true if the annotations to be read are visible - * at runtime. - * @param mv the visitor that must visit the annotations. + * @param v + * start offset in {@link #b b} of the annotations to be read. + * @param desc + * the method descriptor. + * @param buf + * buffer to be used to call {@link #readUTF8 readUTF8}, + * {@link #readClass(int,char[]) readClass} or {@link #readConst + * readConst}. + * @param visible + * true if the annotations to be read are visible at + * runtime. + * @param mv + * the visitor that must visit the annotations. */ - private void readParameterAnnotations( - int v, - final String desc, - final char[] buf, - final boolean visible, - final MethodVisitor mv) - { + private void readParameterAnnotations(int v, final String desc, + final char[] buf, final boolean visible, final MethodVisitor mv) { int i; int n = b[v++] & 0xFF; // workaround for a bug in javac (javac compiler generates a parameter @@ -1679,21 +1458,22 @@ public class ClassReader { /** * Reads the values of an annotation and makes the given visitor visit them. * - * @param v the start offset in {@link #b b} of the values to be read - * (including the unsigned short that gives the number of values). - * @param buf buffer to be used to call {@link #readUTF8 readUTF8}, - * {@link #readClass(int,char[]) readClass} or - * {@link #readConst readConst}. - * @param named if the annotation values are named or not. - * @param av the visitor that must visit the values. + * @param v + * the start offset in {@link #b b} of the values to be read + * (including the unsigned short that gives the number of + * values). + * @param buf + * buffer to be used to call {@link #readUTF8 readUTF8}, + * {@link #readClass(int,char[]) readClass} or {@link #readConst + * readConst}. + * @param named + * if the annotation values are named or not. + * @param av + * the visitor that must visit the values. * @return the end offset of the annotation values. */ - private int readAnnotationValues( - int v, - final char[] buf, - final boolean named, - final AnnotationVisitor av) - { + private int readAnnotationValues(int v, final char[] buf, + final boolean named, final AnnotationVisitor av) { int i = readUnsignedShort(v); v += 2; if (named) { @@ -1714,210 +1494,371 @@ public class ClassReader { /** * Reads a value of an annotation and makes the given visitor visit it. * - * @param v the start offset in {@link #b b} of the value to be read (not - * including the value name constant pool index). - * @param buf buffer to be used to call {@link #readUTF8 readUTF8}, - * {@link #readClass(int,char[]) readClass} or - * {@link #readConst readConst}. - * @param name the name of the value to be read. - * @param av the visitor that must visit the value. + * @param v + * the start offset in {@link #b b} of the value to be read + * (not including the value name constant pool index). + * @param buf + * buffer to be used to call {@link #readUTF8 readUTF8}, + * {@link #readClass(int,char[]) readClass} or {@link #readConst + * readConst}. + * @param name + * the name of the value to be read. + * @param av + * the visitor that must visit the value. * @return the end offset of the annotation value. */ - private int readAnnotationValue( - int v, - final char[] buf, - final String name, - final AnnotationVisitor av) - { + private int readAnnotationValue(int v, final char[] buf, final String name, + final AnnotationVisitor av) { int i; if (av == null) { switch (b[v] & 0xFF) { - case 'e': // enum_const_value - return v + 5; - case '@': // annotation_value - return readAnnotationValues(v + 3, buf, true, null); - case '[': // array_value - return readAnnotationValues(v + 1, buf, false, null); - default: - return v + 3; + case 'e': // enum_const_value + return v + 5; + case '@': // annotation_value + return readAnnotationValues(v + 3, buf, true, null); + case '[': // array_value + return readAnnotationValues(v + 1, buf, false, null); + default: + return v + 3; } } switch (b[v++] & 0xFF) { - case 'I': // pointer to CONSTANT_Integer - case 'J': // pointer to CONSTANT_Long - case 'F': // pointer to CONSTANT_Float - case 'D': // pointer to CONSTANT_Double - av.visit(name, readConst(readUnsignedShort(v), buf)); - v += 2; - break; - case 'B': // pointer to CONSTANT_Byte - av.visit(name, - new Byte((byte) readInt(items[readUnsignedShort(v)]))); - v += 2; - break; - case 'Z': // pointer to CONSTANT_Boolean - av.visit(name, readInt(items[readUnsignedShort(v)]) == 0 - ? Boolean.FALSE - : Boolean.TRUE); - v += 2; - break; - case 'S': // pointer to CONSTANT_Short - av.visit(name, - new Short((short) readInt(items[readUnsignedShort(v)]))); - v += 2; + case 'I': // pointer to CONSTANT_Integer + case 'J': // pointer to CONSTANT_Long + case 'F': // pointer to CONSTANT_Float + case 'D': // pointer to CONSTANT_Double + av.visit(name, readConst(readUnsignedShort(v), buf)); + v += 2; + break; + case 'B': // pointer to CONSTANT_Byte + av.visit(name, + new Byte((byte) readInt(items[readUnsignedShort(v)]))); + v += 2; + break; + case 'Z': // pointer to CONSTANT_Boolean + av.visit(name, + readInt(items[readUnsignedShort(v)]) == 0 ? Boolean.FALSE + : Boolean.TRUE); + v += 2; + break; + case 'S': // pointer to CONSTANT_Short + av.visit(name, new Short( + (short) readInt(items[readUnsignedShort(v)]))); + v += 2; + break; + case 'C': // pointer to CONSTANT_Char + av.visit(name, new Character( + (char) readInt(items[readUnsignedShort(v)]))); + v += 2; + break; + case 's': // pointer to CONSTANT_Utf8 + av.visit(name, readUTF8(v, buf)); + v += 2; + break; + case 'e': // enum_const_value + av.visitEnum(name, readUTF8(v, buf), readUTF8(v + 2, buf)); + v += 4; + break; + case 'c': // class_info + av.visit(name, Type.getType(readUTF8(v, buf))); + v += 2; + break; + case '@': // annotation_value + v = readAnnotationValues(v + 2, buf, true, + av.visitAnnotation(name, readUTF8(v, buf))); + break; + case '[': // array_value + int size = readUnsignedShort(v); + v += 2; + if (size == 0) { + return readAnnotationValues(v - 2, buf, false, + av.visitArray(name)); + } + switch (this.b[v++] & 0xFF) { + case 'B': + byte[] bv = new byte[size]; + for (i = 0; i < size; i++) { + bv[i] = (byte) readInt(items[readUnsignedShort(v)]); + v += 3; + } + av.visit(name, bv); + --v; break; - case 'C': // pointer to CONSTANT_Char - av.visit(name, - new Character((char) readInt(items[readUnsignedShort(v)]))); - v += 2; + case 'Z': + boolean[] zv = new boolean[size]; + for (i = 0; i < size; i++) { + zv[i] = readInt(items[readUnsignedShort(v)]) != 0; + v += 3; + } + av.visit(name, zv); + --v; break; - case 's': // pointer to CONSTANT_Utf8 - av.visit(name, readUTF8(v, buf)); - v += 2; + case 'S': + short[] sv = new short[size]; + for (i = 0; i < size; i++) { + sv[i] = (short) readInt(items[readUnsignedShort(v)]); + v += 3; + } + av.visit(name, sv); + --v; break; - case 'e': // enum_const_value - av.visitEnum(name, readUTF8(v, buf), readUTF8(v + 2, buf)); - v += 4; + case 'C': + char[] cv = new char[size]; + for (i = 0; i < size; i++) { + cv[i] = (char) readInt(items[readUnsignedShort(v)]); + v += 3; + } + av.visit(name, cv); + --v; break; - case 'c': // class_info - av.visit(name, Type.getType(readUTF8(v, buf))); - v += 2; + case 'I': + int[] iv = new int[size]; + for (i = 0; i < size; i++) { + iv[i] = readInt(items[readUnsignedShort(v)]); + v += 3; + } + av.visit(name, iv); + --v; break; - case '@': // annotation_value - v = readAnnotationValues(v + 2, - buf, - true, - av.visitAnnotation(name, readUTF8(v, buf))); + case 'J': + long[] lv = new long[size]; + for (i = 0; i < size; i++) { + lv[i] = readLong(items[readUnsignedShort(v)]); + v += 3; + } + av.visit(name, lv); + --v; break; - case '[': // array_value - int size = readUnsignedShort(v); - v += 2; - if (size == 0) { - return readAnnotationValues(v - 2, - buf, - false, - av.visitArray(name)); + case 'F': + float[] fv = new float[size]; + for (i = 0; i < size; i++) { + fv[i] = Float + .intBitsToFloat(readInt(items[readUnsignedShort(v)])); + v += 3; } - switch (this.b[v++] & 0xFF) { - case 'B': - byte[] bv = new byte[size]; - for (i = 0; i < size; i++) { - bv[i] = (byte) readInt(items[readUnsignedShort(v)]); - v += 3; - } - av.visit(name, bv); - --v; - break; - case 'Z': - boolean[] zv = new boolean[size]; - for (i = 0; i < size; i++) { - zv[i] = readInt(items[readUnsignedShort(v)]) != 0; - v += 3; - } - av.visit(name, zv); - --v; - break; - case 'S': - short[] sv = new short[size]; - for (i = 0; i < size; i++) { - sv[i] = (short) readInt(items[readUnsignedShort(v)]); - v += 3; - } - av.visit(name, sv); - --v; - break; - case 'C': - char[] cv = new char[size]; - for (i = 0; i < size; i++) { - cv[i] = (char) readInt(items[readUnsignedShort(v)]); - v += 3; - } - av.visit(name, cv); - --v; - break; - case 'I': - int[] iv = new int[size]; - for (i = 0; i < size; i++) { - iv[i] = readInt(items[readUnsignedShort(v)]); - v += 3; - } - av.visit(name, iv); - --v; - break; - case 'J': - long[] lv = new long[size]; - for (i = 0; i < size; i++) { - lv[i] = readLong(items[readUnsignedShort(v)]); - v += 3; - } - av.visit(name, lv); - --v; - break; - case 'F': - float[] fv = new float[size]; - for (i = 0; i < size; i++) { - fv[i] = Float.intBitsToFloat(readInt(items[readUnsignedShort(v)])); - v += 3; - } - av.visit(name, fv); - --v; - break; - case 'D': - double[] dv = new double[size]; - for (i = 0; i < size; i++) { - dv[i] = Double.longBitsToDouble(readLong(items[readUnsignedShort(v)])); - v += 3; - } - av.visit(name, dv); - --v; - break; - default: - v = readAnnotationValues(v - 3, - buf, - false, - av.visitArray(name)); + av.visit(name, fv); + --v; + break; + case 'D': + double[] dv = new double[size]; + for (i = 0; i < size; i++) { + dv[i] = Double + .longBitsToDouble(readLong(items[readUnsignedShort(v)])); + v += 3; } + av.visit(name, dv); + --v; + break; + default: + v = readAnnotationValues(v - 3, buf, false, av.visitArray(name)); + } } return v; } - private int readFrameType( - final Object[] frame, - final int index, - int v, - final char[] buf, - final Label[] labels) - { - int type = b[v++] & 0xFF; - switch (type) { - case 0: - frame[index] = Opcodes.TOP; - break; - case 1: - frame[index] = Opcodes.INTEGER; - break; - case 2: - frame[index] = Opcodes.FLOAT; + /** + * Computes the implicit frame of the method currently being parsed (as + * defined in the given {@link Context}) and stores it in the given context. + * + * @param frame + * information about the class being parsed. + */ + private void getImplicitFrame(final Context frame) { + String desc = frame.desc; + Object[] locals = frame.local; + int local = 0; + if ((frame.access & Opcodes.ACC_STATIC) == 0) { + if ("".equals(frame.name)) { + locals[local++] = Opcodes.UNINITIALIZED_THIS; + } else { + locals[local++] = readClass(header + 2, frame.buffer); + } + } + int i = 1; + loop: while (true) { + int j = i; + switch (desc.charAt(i++)) { + case 'Z': + case 'C': + case 'B': + case 'S': + case 'I': + locals[local++] = Opcodes.INTEGER; break; - case 3: - frame[index] = Opcodes.DOUBLE; + case 'F': + locals[local++] = Opcodes.FLOAT; break; - case 4: - frame[index] = Opcodes.LONG; + case 'J': + locals[local++] = Opcodes.LONG; break; - case 5: - frame[index] = Opcodes.NULL; + case 'D': + locals[local++] = Opcodes.DOUBLE; break; - case 6: - frame[index] = Opcodes.UNINITIALIZED_THIS; + case '[': + while (desc.charAt(i) == '[') { + ++i; + } + if (desc.charAt(i) == 'L') { + ++i; + while (desc.charAt(i) != ';') { + ++i; + } + } + locals[local++] = desc.substring(j, ++i); break; - case 7: // Object - frame[index] = readClass(v, buf); - v += 2; + case 'L': + while (desc.charAt(i) != ';') { + ++i; + } + locals[local++] = desc.substring(j + 1, i++); break; - default: // Uninitialized - frame[index] = readLabel(readUnsignedShort(v), labels); - v += 2; + default: + break loop; + } + } + frame.localCount = local; + } + + /** + * Reads a stack map frame and stores the result in the given + * {@link Context} object. + * + * @param stackMap + * the start offset of a stack map frame in the class file. + * @param zip + * if the stack map frame at stackMap is compressed or not. + * @param unzip + * if the stack map frame must be uncompressed. + * @param labels + * the labels of the method currently being parsed, indexed by + * their offset. A new label for the parsed stack map frame is + * stored in this array if it does not already exist. + * @param frame + * where the parsed stack map frame must be stored. + * @return the offset of the first byte following the parsed frame. + */ + private int readFrame(int stackMap, boolean zip, boolean unzip, + Label[] labels, Context frame) { + char[] c = frame.buffer; + int tag; + int delta; + if (zip) { + tag = b[stackMap++] & 0xFF; + } else { + tag = MethodWriter.FULL_FRAME; + frame.offset = -1; + } + frame.localDiff = 0; + if (tag < MethodWriter.SAME_LOCALS_1_STACK_ITEM_FRAME) { + delta = tag; + frame.mode = Opcodes.F_SAME; + frame.stackCount = 0; + } else if (tag < MethodWriter.RESERVED) { + delta = tag - MethodWriter.SAME_LOCALS_1_STACK_ITEM_FRAME; + stackMap = readFrameType(frame.stack, 0, stackMap, c, labels); + frame.mode = Opcodes.F_SAME1; + frame.stackCount = 1; + } else { + delta = readUnsignedShort(stackMap); + stackMap += 2; + if (tag == MethodWriter.SAME_LOCALS_1_STACK_ITEM_FRAME_EXTENDED) { + stackMap = readFrameType(frame.stack, 0, stackMap, c, labels); + frame.mode = Opcodes.F_SAME1; + frame.stackCount = 1; + } else if (tag >= MethodWriter.CHOP_FRAME + && tag < MethodWriter.SAME_FRAME_EXTENDED) { + frame.mode = Opcodes.F_CHOP; + frame.localDiff = MethodWriter.SAME_FRAME_EXTENDED - tag; + frame.localCount -= frame.localDiff; + frame.stackCount = 0; + } else if (tag == MethodWriter.SAME_FRAME_EXTENDED) { + frame.mode = Opcodes.F_SAME; + frame.stackCount = 0; + } else if (tag < MethodWriter.FULL_FRAME) { + int local = unzip ? frame.localCount : 0; + for (int i = tag - MethodWriter.SAME_FRAME_EXTENDED; i > 0; i--) { + stackMap = readFrameType(frame.local, local++, stackMap, c, + labels); + } + frame.mode = Opcodes.F_APPEND; + frame.localDiff = tag - MethodWriter.SAME_FRAME_EXTENDED; + frame.localCount += frame.localDiff; + frame.stackCount = 0; + } else { // if (tag == FULL_FRAME) { + frame.mode = Opcodes.F_FULL; + int n = readUnsignedShort(stackMap); + stackMap += 2; + frame.localDiff = n; + frame.localCount = n; + for (int local = 0; n > 0; n--) { + stackMap = readFrameType(frame.local, local++, stackMap, c, + labels); + } + n = readUnsignedShort(stackMap); + stackMap += 2; + frame.stackCount = n; + for (int stack = 0; n > 0; n--) { + stackMap = readFrameType(frame.stack, stack++, stackMap, c, + labels); + } + } + } + frame.offset += delta + 1; + readLabel(frame.offset, labels); + return stackMap; + } + + /** + * Reads a stack map frame type and stores it at the given index in the + * given array. + * + * @param frame + * the array where the parsed type must be stored. + * @param index + * the index in 'frame' where the parsed type must be stored. + * @param v + * the start offset of the stack map frame type to read. + * @param buf + * a buffer to read strings. + * @param labels + * the labels of the method currently being parsed, indexed by + * their offset. If the parsed type is an Uninitialized type, a + * new label for the corresponding NEW instruction is stored in + * this array if it does not already exist. + * @return the offset of the first byte after the parsed type. + */ + private int readFrameType(final Object[] frame, final int index, int v, + final char[] buf, final Label[] labels) { + int type = b[v++] & 0xFF; + switch (type) { + case 0: + frame[index] = Opcodes.TOP; + break; + case 1: + frame[index] = Opcodes.INTEGER; + break; + case 2: + frame[index] = Opcodes.FLOAT; + break; + case 3: + frame[index] = Opcodes.DOUBLE; + break; + case 4: + frame[index] = Opcodes.LONG; + break; + case 5: + frame[index] = Opcodes.NULL; + break; + case 6: + frame[index] = Opcodes.UNINITIALIZED_THIS; + break; + case 7: // Object + frame[index] = readClass(v, buf); + v += 2; + break; + default: // Uninitialized + frame[index] = readLabel(readUnsignedShort(v), labels); + v += 2; } return v; } @@ -1927,10 +1868,12 @@ public class ClassReader { * implementation of this method creates a label for the given offset if it * has not been already created. * - * @param offset a bytecode offset in a method. - * @param labels the already created labels, indexed by their offset. If a - * label already exists for offset this method must not create a new - * one. Otherwise it must store the new label in this array. + * @param offset + * a bytecode offset in a method. + * @param labels + * the already created labels, indexed by their offset. If a + * label already exists for offset this method must not create a + * new one. Otherwise it must store the new label in this array. * @return a non null Label, which must be equal to labels[offset]. */ protected Label readLabel(int offset, Label[] labels) { @@ -1940,40 +1883,68 @@ public class ClassReader { return labels[offset]; } + /** + * Returns the start index of the attribute_info structure of this class. + * + * @return the start index of the attribute_info structure of this class. + */ + private int getAttributes() { + // skips the header + int u = header + 8 + readUnsignedShort(header + 6) * 2; + // skips fields and methods + for (int i = readUnsignedShort(u); i > 0; --i) { + for (int j = readUnsignedShort(u + 8); j > 0; --j) { + u += 6 + readInt(u + 12); + } + u += 8; + } + u += 2; + for (int i = readUnsignedShort(u); i > 0; --i) { + for (int j = readUnsignedShort(u + 8); j > 0; --j) { + u += 6 + readInt(u + 12); + } + u += 8; + } + // the attribute_info structure starts just after the methods + return u + 2; + } + /** * Reads an attribute in {@link #b b}. * - * @param attrs prototypes of the attributes that must be parsed during the - * visit of the class. Any attribute whose type is not equal to the - * type of one the prototypes is ignored (i.e. an empty - * {@link Attribute} instance is returned). - * @param type the type of the attribute. - * @param off index of the first byte of the attribute's content in - * {@link #b b}. The 6 attribute header bytes, containing the type - * and the length of the attribute, are not taken into account here - * (they have already been read). - * @param len the length of the attribute's content. - * @param buf buffer to be used to call {@link #readUTF8 readUTF8}, - * {@link #readClass(int,char[]) readClass} or - * {@link #readConst readConst}. - * @param codeOff index of the first byte of code's attribute content in - * {@link #b b}, or -1 if the attribute to be read is not a code - * attribute. The 6 attribute header bytes, containing the type and - * the length of the attribute, are not taken into account here. - * @param labels the labels of the method's code, or null if the - * attribute to be read is not a code attribute. + * @param attrs + * prototypes of the attributes that must be parsed during the + * visit of the class. Any attribute whose type is not equal to + * the type of one the prototypes is ignored (i.e. an empty + * {@link Attribute} instance is returned). + * @param type + * the type of the attribute. + * @param off + * index of the first byte of the attribute's content in + * {@link #b b}. The 6 attribute header bytes, containing the + * type and the length of the attribute, are not taken into + * account here (they have already been read). + * @param len + * the length of the attribute's content. + * @param buf + * buffer to be used to call {@link #readUTF8 readUTF8}, + * {@link #readClass(int,char[]) readClass} or {@link #readConst + * readConst}. + * @param codeOff + * index of the first byte of code's attribute content in + * {@link #b b}, or -1 if the attribute to be read is not a code + * attribute. The 6 attribute header bytes, containing the type + * and the length of the attribute, are not taken into account + * here. + * @param labels + * the labels of the method's code, or null if the + * attribute to be read is not a code attribute. * @return the attribute that has been read, or null to skip this * attribute. */ - private Attribute readAttribute( - final Attribute[] attrs, - final String type, - final int off, - final int len, - final char[] buf, - final int codeOff, - final Label[] labels) - { + private Attribute readAttribute(final Attribute[] attrs, final String type, + final int off, final int len, final char[] buf, final int codeOff, + final Label[] labels) { for (int i = 0; i < attrs.length; ++i) { if (attrs[i].type.equals(type)) { return attrs[i].read(this, off, len, buf, codeOff, labels); @@ -1987,9 +1958,9 @@ public class ClassReader { // ------------------------------------------------------------------------ /** - * Returns the number of constant pool items in {@link #b b}. + * Returns the number of constant pool items in {@link #b b}. * - * @return the number of constant pool items in {@link #b b}. + * @return the number of constant pool items in {@link #b b}. */ public int getItemCount() { return items.length; @@ -2000,7 +1971,8 @@ public class ClassReader { * one. This method is intended for {@link Attribute} sub classes, and is * normally not needed by class generators or adapters. * - * @param item the index a constant pool item. + * @param item + * the index a constant pool item. * @return the start index of the constant pool item in {@link #b b}, plus * one. */ @@ -2024,7 +1996,8 @@ public class ClassReader { * {@link Attribute} sub classes, and is normally not needed by class * generators or adapters. * - * @param index the start index of the value to be read in {@link #b b}. + * @param index + * the start index of the value to be read in {@link #b b}. * @return the read value. */ public int readByte(final int index) { @@ -2032,11 +2005,12 @@ public class ClassReader { } /** - * Reads an unsigned short value in {@link #b b}. This method is - * intended for {@link Attribute} sub classes, and is normally not needed by - * class generators or adapters. + * Reads an unsigned short value in {@link #b b}. This method is intended + * for {@link Attribute} sub classes, and is normally not needed by class + * generators or adapters. * - * @param index the start index of the value to be read in {@link #b b}. + * @param index + * the start index of the value to be read in {@link #b b}. * @return the read value. */ public int readUnsignedShort(final int index) { @@ -2049,7 +2023,8 @@ public class ClassReader { * for {@link Attribute} sub classes, and is normally not needed by class * generators or adapters. * - * @param index the start index of the value to be read in {@link #b b}. + * @param index + * the start index of the value to be read in {@link #b b}. * @return the read value. */ public short readShort(final int index) { @@ -2062,7 +2037,8 @@ public class ClassReader { * {@link Attribute} sub classes, and is normally not needed by class * generators or adapters. * - * @param index the start index of the value to be read in {@link #b b}. + * @param index + * the start index of the value to be read in {@link #b b}. * @return the read value. */ public int readInt(final int index) { @@ -2072,11 +2048,12 @@ public class ClassReader { } /** - * Reads a signed long value in {@link #b b}. This method is intended - * for {@link Attribute} sub classes, and is normally not needed by class + * Reads a signed long value in {@link #b b}. This method is intended for + * {@link Attribute} sub classes, and is normally not needed by class * generators or adapters. * - * @param index the start index of the value to be read in {@link #b b}. + * @param index + * the start index of the value to be read in {@link #b b}. * @return the read value. */ public long readLong(final int index) { @@ -2090,14 +2067,19 @@ public class ClassReader { * is intended for {@link Attribute} sub classes, and is normally not needed * by class generators or adapters. * - * @param index the start index of an unsigned short value in {@link #b b}, - * whose value is the index of an UTF8 constant pool item. - * @param buf buffer to be used to read the item. This buffer must be - * sufficiently large. It is not automatically resized. + * @param index + * the start index of an unsigned short value in {@link #b b}, + * whose value is the index of an UTF8 constant pool item. + * @param buf + * buffer to be used to read the item. This buffer must be + * sufficiently large. It is not automatically resized. * @return the String corresponding to the specified UTF8 item. */ public String readUTF8(int index, final char[] buf) { int item = readUnsignedShort(index); + if (index == 0 || item == 0) { + return null; + } String s = strings[item]; if (s != null) { return s; @@ -2109,10 +2091,13 @@ public class ClassReader { /** * Reads UTF8 string in {@link #b b}. * - * @param index start offset of the UTF8 string to be read. - * @param utfLen length of the UTF8 string to be read. - * @param buf buffer to be used to read the string. This buffer must be - * sufficiently large. It is not automatically resized. + * @param index + * start offset of the UTF8 string to be read. + * @param utfLen + * length of the UTF8 string to be read. + * @param buf + * buffer to be used to read the string. This buffer must be + * sufficiently large. It is not automatically resized. * @return the String corresponding to the specified UTF8 string. */ private String readUTF(int index, final int utfLen, final char[] buf) { @@ -2125,28 +2110,28 @@ public class ClassReader { while (index < endIndex) { c = b[index++]; switch (st) { - case 0: - c = c & 0xFF; - if (c < 0x80) { // 0xxxxxxx - buf[strLen++] = (char) c; - } else if (c < 0xE0 && c > 0xBF) { // 110x xxxx 10xx xxxx - cc = (char) (c & 0x1F); - st = 1; - } else { // 1110 xxxx 10xx xxxx 10xx xxxx - cc = (char) (c & 0x0F); - st = 2; - } - break; + case 0: + c = c & 0xFF; + if (c < 0x80) { // 0xxxxxxx + buf[strLen++] = (char) c; + } else if (c < 0xE0 && c > 0xBF) { // 110x xxxx 10xx xxxx + cc = (char) (c & 0x1F); + st = 1; + } else { // 1110 xxxx 10xx xxxx 10xx xxxx + cc = (char) (c & 0x0F); + st = 2; + } + break; - case 1: // byte 2 of 2-byte char or byte 3 of 3-byte char - buf[strLen++] = (char) ((cc << 6) | (c & 0x3F)); - st = 0; - break; + case 1: // byte 2 of 2-byte char or byte 3 of 3-byte char + buf[strLen++] = (char) ((cc << 6) | (c & 0x3F)); + st = 0; + break; - case 2: // byte 2 of 3-byte char - cc = (char) ((cc << 6) | (c & 0x3F)); - st = 1; - break; + case 2: // byte 2 of 3-byte char + cc = (char) ((cc << 6) | (c & 0x3F)); + st = 1; + break; } } return new String(buf, 0, strLen); @@ -2157,10 +2142,12 @@ public class ClassReader { * intended for {@link Attribute} sub classes, and is normally not needed by * class generators or adapters. * - * @param index the start index of an unsigned short value in {@link #b b}, - * whose value is the index of a class constant pool item. - * @param buf buffer to be used to read the item. This buffer must be - * sufficiently large. It is not automatically resized. + * @param index + * the start index of an unsigned short value in {@link #b b}, + * whose value is the index of a class constant pool item. + * @param buf + * buffer to be used to read the item. This buffer must be + * sufficiently large. It is not automatically resized. * @return the String corresponding to the specified class item. */ public String readClass(final int index, final char[] buf) { @@ -2175,9 +2162,11 @@ public class ClassReader { * method is intended for {@link Attribute} sub classes, and is normally not * needed by class generators or adapters. * - * @param item the index of a constant pool item. - * @param buf buffer to be used to read the item. This buffer must be - * sufficiently large. It is not automatically resized. + * @param item + * the index of a constant pool item. + * @param buf + * buffer to be used to read the item. This buffer must be + * sufficiently large. It is not automatically resized. * @return the {@link Integer}, {@link Float}, {@link Long}, {@link Double}, * {@link String}, {@link Type} or {@link Handle} corresponding to * the given constant pool item. @@ -2185,32 +2174,29 @@ public class ClassReader { public Object readConst(final int item, final char[] buf) { int index = items[item]; switch (b[index - 1]) { - case ClassWriter.INT: - return new Integer(readInt(index)); - case ClassWriter.FLOAT: - return new Float(Float.intBitsToFloat(readInt(index))); - case ClassWriter.LONG: - return new Long(readLong(index)); - case ClassWriter.DOUBLE: - return new Double(Double.longBitsToDouble(readLong(index))); - case ClassWriter.CLASS: - return Type.getObjectType(readUTF8(index, buf)); - case ClassWriter.STR: - return readUTF8(index, buf); - case ClassWriter.MTYPE: - return Type.getMethodType(readUTF8(index, buf)); - - //case ClassWriter.HANDLE_BASE + [1..9]: - default: { - int tag = readByte(index); - int[] items = this.items; - int cpIndex = items[readUnsignedShort(index + 1)]; - String owner = readClass(cpIndex, buf); - cpIndex = items[readUnsignedShort(cpIndex + 2)]; - String name = readUTF8(cpIndex, buf); - String desc = readUTF8(cpIndex + 2, buf); - return new Handle(tag, owner, name, desc); - } + case ClassWriter.INT: + return new Integer(readInt(index)); + case ClassWriter.FLOAT: + return new Float(Float.intBitsToFloat(readInt(index))); + case ClassWriter.LONG: + return new Long(readLong(index)); + case ClassWriter.DOUBLE: + return new Double(Double.longBitsToDouble(readLong(index))); + case ClassWriter.CLASS: + return Type.getObjectType(readUTF8(index, buf)); + case ClassWriter.STR: + return readUTF8(index, buf); + case ClassWriter.MTYPE: + return Type.getMethodType(readUTF8(index, buf)); + default: // case ClassWriter.HANDLE_BASE + [1..9]: + int tag = readByte(index); + int[] items = this.items; + int cpIndex = items[readUnsignedShort(index + 1)]; + String owner = readClass(cpIndex, buf); + cpIndex = items[readUnsignedShort(cpIndex + 2)]; + String name = readUTF8(cpIndex, buf); + String desc = readUTF8(cpIndex + 2, buf); + return new Handle(tag, owner, name, desc); } } } diff --git a/src/asm/scala/tools/asm/ClassVisitor.java b/src/asm/scala/tools/asm/ClassVisitor.java index ae38ae0ab9..3fc364d5e5 100644 --- a/src/asm/scala/tools/asm/ClassVisitor.java +++ b/src/asm/scala/tools/asm/ClassVisitor.java @@ -30,11 +30,11 @@ package scala.tools.asm; /** - * A visitor to visit a Java class. The methods of this class must be called - * in the following order: visit [ visitSource ] [ + * A visitor to visit a Java class. The methods of this class must be called in + * the following order: visit [ visitSource ] [ * visitOuterClass ] ( visitAnnotation | - * visitAttribute )* ( visitInnerClass | - * visitField | visitMethod )* visitEnd. + * visitAttribute )* ( visitInnerClass | visitField | + * visitMethod )* visitEnd. * * @author Eric Bruneton */ @@ -55,8 +55,9 @@ public abstract class ClassVisitor { /** * Constructs a new {@link ClassVisitor}. * - * @param api the ASM API version implemented by this visitor. Must be one - * of {@link Opcodes#ASM4}. + * @param api + * the ASM API version implemented by this visitor. Must be one + * of {@link Opcodes#ASM4}. */ public ClassVisitor(final int api) { this(api, null); @@ -65,15 +66,17 @@ public abstract class ClassVisitor { /** * Constructs a new {@link ClassVisitor}. * - * @param api the ASM API version implemented by this visitor. Must be one - * of {@link Opcodes#ASM4}. - * @param cv the class visitor to which this visitor must delegate method - * calls. May be null. + * @param api + * the ASM API version implemented by this visitor. Must be one + * of {@link Opcodes#ASM4}. + * @param cv + * the class visitor to which this visitor must delegate method + * calls. May be null. */ public ClassVisitor(final int api, final ClassVisitor cv) { - /*if (api != Opcodes.ASM4) { + if (api != Opcodes.ASM4) { throw new IllegalArgumentException(); - }*/ + } this.api = api; this.cv = cv; } @@ -81,30 +84,30 @@ public abstract class ClassVisitor { /** * Visits the header of the class. * - * @param version the class version. - * @param access the class's access flags (see {@link Opcodes}). This - * parameter also indicates if the class is deprecated. - * @param name the internal name of the class (see - * {@link Type#getInternalName() getInternalName}). - * @param signature the signature of this class. May be null if - * the class is not a generic one, and does not extend or implement - * generic classes or interfaces. - * @param superName the internal of name of the super class (see - * {@link Type#getInternalName() getInternalName}). For interfaces, - * the super class is {@link Object}. May be null, but - * only for the {@link Object} class. - * @param interfaces the internal names of the class's interfaces (see - * {@link Type#getInternalName() getInternalName}). May be - * null. + * @param version + * the class version. + * @param access + * the class's access flags (see {@link Opcodes}). This parameter + * also indicates if the class is deprecated. + * @param name + * the internal name of the class (see + * {@link Type#getInternalName() getInternalName}). + * @param signature + * the signature of this class. May be null if the class + * is not a generic one, and does not extend or implement generic + * classes or interfaces. + * @param superName + * the internal of name of the super class (see + * {@link Type#getInternalName() getInternalName}). For + * interfaces, the super class is {@link Object}. May be + * null, but only for the {@link Object} class. + * @param interfaces + * the internal names of the class's interfaces (see + * {@link Type#getInternalName() getInternalName}). May be + * null. */ - public void visit( - int version, - int access, - String name, - String signature, - String superName, - String[] interfaces) - { + public void visit(int version, int access, String name, String signature, + String superName, String[] interfaces) { if (cv != null) { cv.visit(version, access, name, signature, superName, interfaces); } @@ -113,11 +116,13 @@ public abstract class ClassVisitor { /** * Visits the source of the class. * - * @param source the name of the source file from which the class was - * compiled. May be null. - * @param debug additional debug information to compute the correspondance - * between source and compiled elements of the class. May be - * null. + * @param source + * the name of the source file from which the class was compiled. + * May be null. + * @param debug + * additional debug information to compute the correspondance + * between source and compiled elements of the class. May be + * null. */ public void visitSource(String source, String debug) { if (cv != null) { @@ -129,16 +134,19 @@ public abstract class ClassVisitor { * Visits the enclosing class of the class. This method must be called only * if the class has an enclosing class. * - * @param owner internal name of the enclosing class of the class. - * @param name the name of the method that contains the class, or - * null if the class is not enclosed in a method of its - * enclosing class. - * @param desc the descriptor of the method that contains the class, or - * null if the class is not enclosed in a method of its - * enclosing class. + * @param owner + * internal name of the enclosing class of the class. + * @param name + * the name of the method that contains the class, or + * null if the class is not enclosed in a method of its + * enclosing class. + * @param desc + * the descriptor of the method that contains the class, or + * null if the class is not enclosed in a method of its + * enclosing class. */ public void visitOuterClass(String owner, String name, String desc) { - if (cv != null) { + if (cv != null) { cv.visitOuterClass(owner, name, desc); } } @@ -146,8 +154,10 @@ public abstract class ClassVisitor { /** * Visits an annotation of the class. * - * @param desc the class descriptor of the annotation class. - * @param visible true if the annotation is visible at runtime. + * @param desc + * the class descriptor of the annotation class. + * @param visible + * true if the annotation is visible at runtime. * @return a visitor to visit the annotation values, or null if * this visitor is not interested in visiting this annotation. */ @@ -161,7 +171,8 @@ public abstract class ClassVisitor { /** * Visits a non standard attribute of the class. * - * @param attr an attribute. + * @param attr + * an attribute. */ public void visitAttribute(Attribute attr) { if (cv != null) { @@ -173,23 +184,22 @@ public abstract class ClassVisitor { * Visits information about an inner class. This inner class is not * necessarily a member of the class being visited. * - * @param name the internal name of an inner class (see - * {@link Type#getInternalName() getInternalName}). - * @param outerName the internal name of the class to which the inner class - * belongs (see {@link Type#getInternalName() getInternalName}). May - * be null for not member classes. - * @param innerName the (simple) name of the inner class inside its - * enclosing class. May be null for anonymous inner - * classes. - * @param access the access flags of the inner class as originally declared - * in the enclosing class. + * @param name + * the internal name of an inner class (see + * {@link Type#getInternalName() getInternalName}). + * @param outerName + * the internal name of the class to which the inner class + * belongs (see {@link Type#getInternalName() getInternalName}). + * May be null for not member classes. + * @param innerName + * the (simple) name of the inner class inside its enclosing + * class. May be null for anonymous inner classes. + * @param access + * the access flags of the inner class as originally declared in + * the enclosing class. */ - public void visitInnerClass( - String name, - String outerName, - String innerName, - int access) - { + public void visitInnerClass(String name, String outerName, + String innerName, int access) { if (cv != null) { cv.visitInnerClass(name, outerName, innerName, access); } @@ -198,33 +208,32 @@ public abstract class ClassVisitor { /** * Visits a field of the class. * - * @param access the field's access flags (see {@link Opcodes}). This - * parameter also indicates if the field is synthetic and/or - * deprecated. - * @param name the field's name. - * @param desc the field's descriptor (see {@link Type Type}). - * @param signature the field's signature. May be null if the - * field's type does not use generic types. - * @param value the field's initial value. This parameter, which may be - * null if the field does not have an initial value, must - * be an {@link Integer}, a {@link Float}, a {@link Long}, a - * {@link Double} or a {@link String} (for int, - * float, long or String fields - * respectively). This parameter is only used for static fields. - * Its value is ignored for non static fields, which must be - * initialized through bytecode instructions in constructors or - * methods. + * @param access + * the field's access flags (see {@link Opcodes}). This parameter + * also indicates if the field is synthetic and/or deprecated. + * @param name + * the field's name. + * @param desc + * the field's descriptor (see {@link Type Type}). + * @param signature + * the field's signature. May be null if the field's + * type does not use generic types. + * @param value + * the field's initial value. This parameter, which may be + * null if the field does not have an initial value, + * must be an {@link Integer}, a {@link Float}, a {@link Long}, a + * {@link Double} or a {@link String} (for int, + * float, long or String fields + * respectively). This parameter is only used for static + * fields. Its value is ignored for non static fields, which + * must be initialized through bytecode instructions in + * constructors or methods. * @return a visitor to visit field annotations and attributes, or - * null if this class visitor is not interested in - * visiting these annotations and attributes. + * null if this class visitor is not interested in visiting + * these annotations and attributes. */ - public FieldVisitor visitField( - int access, - String name, - String desc, - String signature, - Object value) - { + public FieldVisitor visitField(int access, String name, String desc, + String signature, Object value) { if (cv != null) { return cv.visitField(access, name, desc, signature, value); } @@ -233,31 +242,31 @@ public abstract class ClassVisitor { /** * Visits a method of the class. This method must return a new - * {@link MethodVisitor} instance (or null) each time it is - * called, i.e., it should not return a previously returned visitor. + * {@link MethodVisitor} instance (or null) each time it is called, + * i.e., it should not return a previously returned visitor. * - * @param access the method's access flags (see {@link Opcodes}). This - * parameter also indicates if the method is synthetic and/or - * deprecated. - * @param name the method's name. - * @param desc the method's descriptor (see {@link Type Type}). - * @param signature the method's signature. May be null if the - * method parameters, return type and exceptions do not use generic - * types. - * @param exceptions the internal names of the method's exception classes - * (see {@link Type#getInternalName() getInternalName}). May be - * null. + * @param access + * the method's access flags (see {@link Opcodes}). This + * parameter also indicates if the method is synthetic and/or + * deprecated. + * @param name + * the method's name. + * @param desc + * the method's descriptor (see {@link Type Type}). + * @param signature + * the method's signature. May be null if the method + * parameters, return type and exceptions do not use generic + * types. + * @param exceptions + * the internal names of the method's exception classes (see + * {@link Type#getInternalName() getInternalName}). May be + * null. * @return an object to visit the byte code of the method, or null * if this class visitor is not interested in visiting the code of * this method. */ - public MethodVisitor visitMethod( - int access, - String name, - String desc, - String signature, - String[] exceptions) - { + public MethodVisitor visitMethod(int access, String name, String desc, + String signature, String[] exceptions) { if (cv != null) { return cv.visitMethod(access, name, desc, signature, exceptions); } diff --git a/src/asm/scala/tools/asm/ClassWriter.java b/src/asm/scala/tools/asm/ClassWriter.java index c7a0736b51..93ed7313c7 100644 --- a/src/asm/scala/tools/asm/ClassWriter.java +++ b/src/asm/scala/tools/asm/ClassWriter.java @@ -66,11 +66,17 @@ public class ClassWriter extends ClassVisitor { public static final int COMPUTE_FRAMES = 2; /** - * Pseudo access flag to distinguish between the synthetic attribute and - * the synthetic access flag. + * Pseudo access flag to distinguish between the synthetic attribute and the + * synthetic access flag. */ static final int ACC_SYNTHETIC_ATTRIBUTE = 0x40000; + /** + * Factor to convert from ACC_SYNTHETIC_ATTRIBUTE to Opcode.ACC_SYNTHETIC. + */ + static final int TO_ACC_SYNTHETIC = ACC_SYNTHETIC_ATTRIBUTE + / Opcodes.ACC_SYNTHETIC; + /** * The type of instructions without any argument. */ @@ -238,8 +244,8 @@ public class ClassWriter extends ClassVisitor { /** * The base value for all CONSTANT_MethodHandle constant pool items. - * Internally, ASM store the 9 variations of CONSTANT_MethodHandle into - * 9 different items. + * Internally, ASM store the 9 variations of CONSTANT_MethodHandle into 9 + * different items. */ static final int HANDLE_BASE = 20; @@ -266,9 +272,8 @@ public class ClassWriter extends ClassVisitor { static final int TYPE_MERGED = 32; /** - * The type of BootstrapMethods items. These items are stored in a - * special class attribute named BootstrapMethods and - * not in the constant pool. + * The type of BootstrapMethods items. These items are stored in a special + * class attribute named BootstrapMethods and not in the constant pool. */ static final int BSM = 33; @@ -327,10 +332,10 @@ public class ClassWriter extends ClassVisitor { * necessarily be stored in the constant pool. This type table is used by * the control flow and data flow analysis algorithm used to compute stack * map frames from scratch. This array associates to each index i - * the Item whose index is i. All Item objects stored in this - * array are also stored in the {@link #items} hash table. These two arrays - * allow to retrieve an Item from its index or, conversely, to get the index - * of an Item from its value. Each Item stores an internal name in its + * the Item whose index is i. All Item objects stored in this array + * are also stored in the {@link #items} hash table. These two arrays allow + * to retrieve an Item from its index or, conversely, to get the index of an + * Item from its value. Each Item stores an internal name in its * {@link Item#strVal1} field. */ Item[] typeTable; @@ -439,16 +444,16 @@ public class ClassWriter extends ClassVisitor { /** * The fields of this class. These fields are stored in a linked list of * {@link FieldWriter} objects, linked to each other by their - * {@link FieldWriter#fv} field. This field stores the first element of - * this list. + * {@link FieldWriter#fv} field. This field stores the first element of this + * list. */ FieldWriter firstField; /** * The fields of this class. These fields are stored in a linked list of * {@link FieldWriter} objects, linked to each other by their - * {@link FieldWriter#fv} field. This field stores the last element of - * this list. + * {@link FieldWriter#fv} field. This field stores the last element of this + * list. */ FieldWriter lastField; @@ -463,8 +468,8 @@ public class ClassWriter extends ClassVisitor { /** * The methods of this class. These methods are stored in a linked list of * {@link MethodWriter} objects, linked to each other by their - * {@link MethodWriter#mv} field. This field stores the last element of - * this list. + * {@link MethodWriter#mv} field. This field stores the last element of this + * list. */ MethodWriter lastMethod; @@ -584,8 +589,10 @@ public class ClassWriter extends ClassVisitor { /** * Constructs a new {@link ClassWriter} object. * - * @param flags option flags that can be used to modify the default behavior - * of this class. See {@link #COMPUTE_MAXS}, {@link #COMPUTE_FRAMES}. + * @param flags + * option flags that can be used to modify the default behavior + * of this class. See {@link #COMPUTE_MAXS}, + * {@link #COMPUTE_FRAMES}. */ public ClassWriter(final int flags) { super(Opcodes.ASM4); @@ -606,26 +613,32 @@ public class ClassWriter extends ClassVisitor { * "mostly add" bytecode transformations. These optimizations are the * following: * - *
    • The constant pool from the original class is copied as is in the - * new class, which saves time. New constant pool entries will be added at - * the end if necessary, but unused constant pool entries won't be - * removed.
    • Methods that are not transformed are copied as is - * in the new class, directly from the original class bytecode (i.e. without - * emitting visit events for all the method instructions), which saves a - * lot of time. Untransformed methods are detected by the fact that - * the {@link ClassReader} receives {@link MethodVisitor} objects that come - * from a {@link ClassWriter} (and not from any other {@link ClassVisitor} - * instance).
    + *
      + *
    • The constant pool from the original class is copied as is in the new + * class, which saves time. New constant pool entries will be added at the + * end if necessary, but unused constant pool entries won't be + * removed.
    • + *
    • Methods that are not transformed are copied as is in the new class, + * directly from the original class bytecode (i.e. without emitting visit + * events for all the method instructions), which saves a lot of + * time. Untransformed methods are detected by the fact that the + * {@link ClassReader} receives {@link MethodVisitor} objects that come from + * a {@link ClassWriter} (and not from any other {@link ClassVisitor} + * instance).
    • + *
    * - * @param classReader the {@link ClassReader} used to read the original - * class. It will be used to copy the entire constant pool from the - * original class and also to copy other fragments of original - * bytecode where applicable. - * @param flags option flags that can be used to modify the default behavior - * of this class. These option flags do not affect methods that - * are copied as is in the new class. This means that the maximum - * stack size nor the stack frames will be computed for these - * methods. See {@link #COMPUTE_MAXS}, {@link #COMPUTE_FRAMES}. + * @param classReader + * the {@link ClassReader} used to read the original class. It + * will be used to copy the entire constant pool from the + * original class and also to copy other fragments of original + * bytecode where applicable. + * @param flags + * option flags that can be used to modify the default behavior + * of this class. These option flags do not affect methods + * that are copied as is in the new class. This means that the + * maximum stack size nor the stack frames will be computed for + * these methods. See {@link #COMPUTE_MAXS}, + * {@link #COMPUTE_FRAMES}. */ public ClassWriter(final ClassReader classReader, final int flags) { this(flags); @@ -638,14 +651,9 @@ public class ClassWriter extends ClassVisitor { // ------------------------------------------------------------------------ @Override - public final void visit( - final int version, - final int access, - final String name, - final String signature, - final String superName, - final String[] interfaces) - { + public final void visit(final int version, final int access, + final String name, final String signature, final String superName, + final String[] interfaces) { this.version = version; this.access = access; this.name = newClass(name); @@ -674,11 +682,8 @@ public class ClassWriter extends ClassVisitor { } @Override - public final void visitOuterClass( - final String owner, - final String name, - final String desc) - { + public final void visitOuterClass(final String owner, final String name, + final String desc) { enclosingMethodOwner = newClass(owner); if (name != null && desc != null) { enclosingMethod = newNameType(name, desc); @@ -686,10 +691,8 @@ public class ClassWriter extends ClassVisitor { } @Override - public final AnnotationVisitor visitAnnotation( - final String desc, - final boolean visible) - { + public final AnnotationVisitor visitAnnotation(final String desc, + final boolean visible) { if (!ClassReader.ANNOTATIONS) { return null; } @@ -714,12 +717,8 @@ public class ClassWriter extends ClassVisitor { } @Override - public final void visitInnerClass( - final String name, - final String outerName, - final String innerName, - final int access) - { + public final void visitInnerClass(final String name, + final String outerName, final String innerName, final int access) { if (innerClasses == null) { innerClasses = new ByteVector(); } @@ -731,32 +730,16 @@ public class ClassWriter extends ClassVisitor { } @Override - public final FieldVisitor visitField( - final int access, - final String name, - final String desc, - final String signature, - final Object value) - { + public final FieldVisitor visitField(final int access, final String name, + final String desc, final String signature, final Object value) { return new FieldWriter(this, access, name, desc, signature, value); } @Override - public final MethodVisitor visitMethod( - final int access, - final String name, - final String desc, - final String signature, - final String[] exceptions) - { - return new MethodWriter(this, - access, - name, - desc, - signature, - exceptions, - computeMaxs, - computeFrames); + public final MethodVisitor visitMethod(final int access, final String name, + final String desc, final String signature, final String[] exceptions) { + return new MethodWriter(this, access, name, desc, signature, + exceptions, computeMaxs, computeFrames); } @Override @@ -773,7 +756,7 @@ public class ClassWriter extends ClassVisitor { * @return the bytecode of the class that was build with this class writer. */ public byte[] toByteArray() { - if (index > Short.MAX_VALUE) { + if (index > 0xFFFF) { throw new RuntimeException("Class file too large!"); } // computes the real size of the bytecode of this class @@ -793,8 +776,9 @@ public class ClassWriter extends ClassVisitor { mb = (MethodWriter) mb.mv; } int attributeCount = 0; - if (bootstrapMethods != null) { // we put it as first argument in order - // to improve a bit ClassReader.copyBootstrapMethods + if (bootstrapMethods != null) { + // we put it as first attribute in order to improve a bit + // ClassReader.copyBootstrapMethods ++attributeCount; size += 8 + bootstrapMethods.length; newUTF8("BootstrapMethods"); @@ -824,12 +808,13 @@ public class ClassWriter extends ClassVisitor { size += 6; newUTF8("Deprecated"); } - if ((access & Opcodes.ACC_SYNTHETIC) != 0 - && ((version & 0xFFFF) < Opcodes.V1_5 || (access & ACC_SYNTHETIC_ATTRIBUTE) != 0)) - { - ++attributeCount; - size += 6; - newUTF8("Synthetic"); + if ((access & Opcodes.ACC_SYNTHETIC) != 0) { + if ((version & 0xFFFF) < Opcodes.V1_5 + || (access & ACC_SYNTHETIC_ATTRIBUTE) != 0) { + ++attributeCount; + size += 6; + newUTF8("Synthetic"); + } } if (innerClasses != null) { ++attributeCount; @@ -856,9 +841,8 @@ public class ClassWriter extends ClassVisitor { ByteVector out = new ByteVector(size); out.putInt(0xCAFEBABE).putInt(version); out.putShort(index).putByteArray(pool.data, 0, pool.length); - int mask = Opcodes.ACC_DEPRECATED - | ClassWriter.ACC_SYNTHETIC_ATTRIBUTE - | ((access & ClassWriter.ACC_SYNTHETIC_ATTRIBUTE) / (ClassWriter.ACC_SYNTHETIC_ATTRIBUTE / Opcodes.ACC_SYNTHETIC)); + int mask = Opcodes.ACC_DEPRECATED | ACC_SYNTHETIC_ATTRIBUTE + | ((access & ACC_SYNTHETIC_ATTRIBUTE) / TO_ACC_SYNTHETIC); out.putShort(access & ~mask).putShort(name).putShort(superName); out.putShort(interfaceCount); for (int i = 0; i < interfaceCount; ++i) { @@ -877,9 +861,10 @@ public class ClassWriter extends ClassVisitor { mb = (MethodWriter) mb.mv; } out.putShort(attributeCount); - if (bootstrapMethods != null) { // should be the first class attribute ? + if (bootstrapMethods != null) { out.putShort(newUTF8("BootstrapMethods")); - out.putInt(bootstrapMethods.length + 2).putShort(bootstrapMethodsCount); + out.putInt(bootstrapMethods.length + 2).putShort( + bootstrapMethodsCount); out.putByteArray(bootstrapMethods.data, 0, bootstrapMethods.length); } if (ClassReader.SIGNATURES && signature != 0) { @@ -900,10 +885,11 @@ public class ClassWriter extends ClassVisitor { if ((access & Opcodes.ACC_DEPRECATED) != 0) { out.putShort(newUTF8("Deprecated")).putInt(0); } - if ((access & Opcodes.ACC_SYNTHETIC) != 0 - && ((version & 0xFFFF) < Opcodes.V1_5 || (access & ACC_SYNTHETIC_ATTRIBUTE) != 0)) - { - out.putShort(newUTF8("Synthetic")).putInt(0); + if ((access & Opcodes.ACC_SYNTHETIC) != 0) { + if ((version & 0xFFFF) < Opcodes.V1_5 + || (access & ACC_SYNTHETIC_ATTRIBUTE) != 0) { + out.putShort(newUTF8("Synthetic")).putInt(0); + } } if (innerClasses != null) { out.putShort(newUTF8("InnerClasses")); @@ -937,10 +923,11 @@ public class ClassWriter extends ClassVisitor { * Adds a number or string constant to the constant pool of the class being * build. Does nothing if the constant pool already contains a similar item. * - * @param cst the value of the constant to be added to the constant pool. - * This parameter must be an {@link Integer}, a {@link Float}, a - * {@link Long}, a {@link Double}, a {@link String} or a - * {@link Type}. + * @param cst + * the value of the constant to be added to the constant pool. + * This parameter must be an {@link Integer}, a {@link Float}, a + * {@link Long}, a {@link Double}, a {@link String} or a + * {@link Type}. * @return a new or already existing constant item with the given value. */ Item newConstItem(final Object cst) { @@ -973,12 +960,12 @@ public class ClassWriter extends ClassVisitor { } else if (cst instanceof Type) { Type t = (Type) cst; int s = t.getSort(); - if (s == Type.ARRAY) { - return newClassItem(t.getDescriptor()); - } else if (s == Type.OBJECT) { + if (s == Type.OBJECT) { return newClassItem(t.getInternalName()); - } else { // s == Type.METHOD + } else if (s == Type.METHOD) { return newMethodTypeItem(t.getDescriptor()); + } else { // s == primitive type or array + return newClassItem(t.getDescriptor()); } } else if (cst instanceof Handle) { Handle h = (Handle) cst; @@ -994,9 +981,10 @@ public class ClassWriter extends ClassVisitor { * This method is intended for {@link Attribute} sub classes, and is * normally not needed by class generators or adapters. * - * @param cst the value of the constant to be added to the constant pool. - * This parameter must be an {@link Integer}, a {@link Float}, a - * {@link Long}, a {@link Double} or a {@link String}. + * @param cst + * the value of the constant to be added to the constant pool. + * This parameter must be an {@link Integer}, a {@link Float}, a + * {@link Long}, a {@link Double} or a {@link String}. * @return the index of a new or already existing constant item with the * given value. */ @@ -1010,7 +998,8 @@ public class ClassWriter extends ClassVisitor { * method is intended for {@link Attribute} sub classes, and is normally not * needed by class generators or adapters. * - * @param value the String value. + * @param value + * the String value. * @return the index of a new or already existing UTF8 item. */ public int newUTF8(final String value) { @@ -1030,7 +1019,8 @@ public class ClassWriter extends ClassVisitor { * This method is intended for {@link Attribute} sub classes, and is * normally not needed by class generators or adapters. * - * @param value the internal name of the class. + * @param value + * the internal name of the class. * @return a new or already existing class reference item. */ Item newClassItem(final String value) { @@ -1050,7 +1040,8 @@ public class ClassWriter extends ClassVisitor { * This method is intended for {@link Attribute} sub classes, and is * normally not needed by class generators or adapters. * - * @param value the internal name of the class. + * @param value + * the internal name of the class. * @return the index of a new or already existing class reference item. */ public int newClass(final String value) { @@ -1063,7 +1054,8 @@ public class ClassWriter extends ClassVisitor { * This method is intended for {@link Attribute} sub classes, and is * normally not needed by class generators or adapters. * - * @param methodDesc method descriptor of the method type. + * @param methodDesc + * method descriptor of the method type. * @return a new or already existing method type reference item. */ Item newMethodTypeItem(final String methodDesc) { @@ -1083,7 +1075,8 @@ public class ClassWriter extends ClassVisitor { * This method is intended for {@link Attribute} sub classes, and is * normally not needed by class generators or adapters. * - * @param methodDesc method descriptor of the method type. + * @param methodDesc + * method descriptor of the method type. * @return the index of a new or already existing method type reference * item. */ @@ -1097,33 +1090,34 @@ public class ClassWriter extends ClassVisitor { * intended for {@link Attribute} sub classes, and is normally not needed by * class generators or adapters. * - * @param tag the kind of this handle. Must be {@link Opcodes#H_GETFIELD}, - * {@link Opcodes#H_GETSTATIC}, {@link Opcodes#H_PUTFIELD}, - * {@link Opcodes#H_PUTSTATIC}, {@link Opcodes#H_INVOKEVIRTUAL}, - * {@link Opcodes#H_INVOKESTATIC}, {@link Opcodes#H_INVOKESPECIAL}, - * {@link Opcodes#H_NEWINVOKESPECIAL} or - * {@link Opcodes#H_INVOKEINTERFACE}. - * @param owner the internal name of the field or method owner class. - * @param name the name of the field or method. - * @param desc the descriptor of the field or method. + * @param tag + * the kind of this handle. Must be {@link Opcodes#H_GETFIELD}, + * {@link Opcodes#H_GETSTATIC}, {@link Opcodes#H_PUTFIELD}, + * {@link Opcodes#H_PUTSTATIC}, {@link Opcodes#H_INVOKEVIRTUAL}, + * {@link Opcodes#H_INVOKESTATIC}, + * {@link Opcodes#H_INVOKESPECIAL}, + * {@link Opcodes#H_NEWINVOKESPECIAL} or + * {@link Opcodes#H_INVOKEINTERFACE}. + * @param owner + * the internal name of the field or method owner class. + * @param name + * the name of the field or method. + * @param desc + * the descriptor of the field or method. * @return a new or an already existing method type reference item. */ - Item newHandleItem( - final int tag, - final String owner, - final String name, - final String desc) - { + Item newHandleItem(final int tag, final String owner, final String name, + final String desc) { key4.set(HANDLE_BASE + tag, owner, name, desc); Item result = get(key4); if (result == null) { if (tag <= Opcodes.H_PUTSTATIC) { put112(HANDLE, tag, newField(owner, name, desc)); } else { - put112(HANDLE, tag, newMethod(owner, - name, - desc, - tag == Opcodes.H_INVOKEINTERFACE)); + put112(HANDLE, + tag, + newMethod(owner, name, desc, + tag == Opcodes.H_INVOKEINTERFACE)); } result = new Item(index++, key4); put(result); @@ -1132,29 +1126,30 @@ public class ClassWriter extends ClassVisitor { } /** - * Adds a handle to the constant pool of the class being - * build. Does nothing if the constant pool already contains a similar item. - * This method is intended for {@link Attribute} sub classes, and is - * normally not needed by class generators or adapters. + * Adds a handle to the constant pool of the class being build. Does nothing + * if the constant pool already contains a similar item. This method is + * intended for {@link Attribute} sub classes, and is normally not needed by + * class generators or adapters. * - * @param tag the kind of this handle. Must be {@link Opcodes#H_GETFIELD}, - * {@link Opcodes#H_GETSTATIC}, {@link Opcodes#H_PUTFIELD}, - * {@link Opcodes#H_PUTSTATIC}, {@link Opcodes#H_INVOKEVIRTUAL}, - * {@link Opcodes#H_INVOKESTATIC}, {@link Opcodes#H_INVOKESPECIAL}, - * {@link Opcodes#H_NEWINVOKESPECIAL} or - * {@link Opcodes#H_INVOKEINTERFACE}. - * @param owner the internal name of the field or method owner class. - * @param name the name of the field or method. - * @param desc the descriptor of the field or method. + * @param tag + * the kind of this handle. Must be {@link Opcodes#H_GETFIELD}, + * {@link Opcodes#H_GETSTATIC}, {@link Opcodes#H_PUTFIELD}, + * {@link Opcodes#H_PUTSTATIC}, {@link Opcodes#H_INVOKEVIRTUAL}, + * {@link Opcodes#H_INVOKESTATIC}, + * {@link Opcodes#H_INVOKESPECIAL}, + * {@link Opcodes#H_NEWINVOKESPECIAL} or + * {@link Opcodes#H_INVOKEINTERFACE}. + * @param owner + * the internal name of the field or method owner class. + * @param name + * the name of the field or method. + * @param desc + * the descriptor of the field or method. * @return the index of a new or already existing method type reference * item. */ - public int newHandle( - final int tag, - final String owner, - final String name, - final String desc) - { + public int newHandle(final int tag, final String owner, final String name, + final String desc) { return newHandleItem(tag, owner, name, desc).index; } @@ -1164,19 +1159,19 @@ public class ClassWriter extends ClassVisitor { * This method is intended for {@link Attribute} sub classes, and is * normally not needed by class generators or adapters. * - * @param name name of the invoked method. - * @param desc descriptor of the invoke method. - * @param bsm the bootstrap method. - * @param bsmArgs the bootstrap method constant arguments. + * @param name + * name of the invoked method. + * @param desc + * descriptor of the invoke method. + * @param bsm + * the bootstrap method. + * @param bsmArgs + * the bootstrap method constant arguments. * * @return a new or an already existing invokedynamic type reference item. */ - Item newInvokeDynamicItem( - final String name, - final String desc, - final Handle bsm, - final Object... bsmArgs) - { + Item newInvokeDynamicItem(final String name, final String desc, + final Handle bsm, final Object... bsmArgs) { // cache for performance ByteVector bootstrapMethods = this.bootstrapMethods; if (bootstrapMethods == null) { @@ -1186,9 +1181,7 @@ public class ClassWriter extends ClassVisitor { int position = bootstrapMethods.length; // record current position int hashCode = bsm.hashCode(); - bootstrapMethods.putShort(newHandle(bsm.tag, - bsm.owner, - bsm.name, + bootstrapMethods.putShort(newHandle(bsm.tag, bsm.owner, bsm.name, bsm.desc)); int argsLength = bsmArgs.length; @@ -1250,20 +1243,20 @@ public class ClassWriter extends ClassVisitor { * This method is intended for {@link Attribute} sub classes, and is * normally not needed by class generators or adapters. * - * @param name name of the invoked method. - * @param desc descriptor of the invoke method. - * @param bsm the bootstrap method. - * @param bsmArgs the bootstrap method constant arguments. + * @param name + * name of the invoked method. + * @param desc + * descriptor of the invoke method. + * @param bsm + * the bootstrap method. + * @param bsmArgs + * the bootstrap method constant arguments. * - * @return the index of a new or already existing invokedynamic - * reference item. - */ - public int newInvokeDynamic( - final String name, - final String desc, - final Handle bsm, - final Object... bsmArgs) - { + * @return the index of a new or already existing invokedynamic reference + * item. + */ + public int newInvokeDynamic(final String name, final String desc, + final Handle bsm, final Object... bsmArgs) { return newInvokeDynamicItem(name, desc, bsm, bsmArgs).index; } @@ -1271,13 +1264,15 @@ public class ClassWriter extends ClassVisitor { * Adds a field reference to the constant pool of the class being build. * Does nothing if the constant pool already contains a similar item. * - * @param owner the internal name of the field's owner class. - * @param name the field's name. - * @param desc the field's descriptor. + * @param owner + * the internal name of the field's owner class. + * @param name + * the field's name. + * @param desc + * the field's descriptor. * @return a new or already existing field reference item. */ - Item newFieldItem(final String owner, final String name, final String desc) - { + Item newFieldItem(final String owner, final String name, final String desc) { key3.set(FIELD, owner, name, desc); Item result = get(key3); if (result == null) { @@ -1294,13 +1289,15 @@ public class ClassWriter extends ClassVisitor { * This method is intended for {@link Attribute} sub classes, and is * normally not needed by class generators or adapters. * - * @param owner the internal name of the field's owner class. - * @param name the field's name. - * @param desc the field's descriptor. + * @param owner + * the internal name of the field's owner class. + * @param name + * the field's name. + * @param desc + * the field's descriptor. * @return the index of a new or already existing field reference item. */ - public int newField(final String owner, final String name, final String desc) - { + public int newField(final String owner, final String name, final String desc) { return newFieldItem(owner, name, desc).index; } @@ -1308,18 +1305,18 @@ public class ClassWriter extends ClassVisitor { * Adds a method reference to the constant pool of the class being build. * Does nothing if the constant pool already contains a similar item. * - * @param owner the internal name of the method's owner class. - * @param name the method's name. - * @param desc the method's descriptor. - * @param itf true if owner is an interface. + * @param owner + * the internal name of the method's owner class. + * @param name + * the method's name. + * @param desc + * the method's descriptor. + * @param itf + * true if owner is an interface. * @return a new or already existing method reference item. */ - Item newMethodItem( - final String owner, - final String name, - final String desc, - final boolean itf) - { + Item newMethodItem(final String owner, final String name, + final String desc, final boolean itf) { int type = itf ? IMETH : METH; key3.set(type, owner, name, desc); Item result = get(key3); @@ -1337,18 +1334,18 @@ public class ClassWriter extends ClassVisitor { * This method is intended for {@link Attribute} sub classes, and is * normally not needed by class generators or adapters. * - * @param owner the internal name of the method's owner class. - * @param name the method's name. - * @param desc the method's descriptor. - * @param itf true if owner is an interface. + * @param owner + * the internal name of the method's owner class. + * @param name + * the method's name. + * @param desc + * the method's descriptor. + * @param itf + * true if owner is an interface. * @return the index of a new or already existing method reference item. */ - public int newMethod( - final String owner, - final String name, - final String desc, - final boolean itf) - { + public int newMethod(final String owner, final String name, + final String desc, final boolean itf) { return newMethodItem(owner, name, desc, itf).index; } @@ -1356,7 +1353,8 @@ public class ClassWriter extends ClassVisitor { * Adds an integer to the constant pool of the class being build. Does * nothing if the constant pool already contains a similar item. * - * @param value the int value. + * @param value + * the int value. * @return a new or already existing int item. */ Item newInteger(final int value) { @@ -1374,7 +1372,8 @@ public class ClassWriter extends ClassVisitor { * Adds a float to the constant pool of the class being build. Does nothing * if the constant pool already contains a similar item. * - * @param value the float value. + * @param value + * the float value. * @return a new or already existing float item. */ Item newFloat(final float value) { @@ -1392,7 +1391,8 @@ public class ClassWriter extends ClassVisitor { * Adds a long to the constant pool of the class being build. Does nothing * if the constant pool already contains a similar item. * - * @param value the long value. + * @param value + * the long value. * @return a new or already existing long item. */ Item newLong(final long value) { @@ -1411,7 +1411,8 @@ public class ClassWriter extends ClassVisitor { * Adds a double to the constant pool of the class being build. Does nothing * if the constant pool already contains a similar item. * - * @param value the double value. + * @param value + * the double value. * @return a new or already existing double item. */ Item newDouble(final double value) { @@ -1430,7 +1431,8 @@ public class ClassWriter extends ClassVisitor { * Adds a string to the constant pool of the class being build. Does nothing * if the constant pool already contains a similar item. * - * @param value the String value. + * @param value + * the String value. * @return a new or already existing string item. */ private Item newString(final String value) { @@ -1450,8 +1452,10 @@ public class ClassWriter extends ClassVisitor { * method is intended for {@link Attribute} sub classes, and is normally not * needed by class generators or adapters. * - * @param name a name. - * @param desc a type descriptor. + * @param name + * a name. + * @param desc + * a type descriptor. * @return the index of a new or already existing name and type item. */ public int newNameType(final String name, final String desc) { @@ -1462,8 +1466,10 @@ public class ClassWriter extends ClassVisitor { * Adds a name and type to the constant pool of the class being build. Does * nothing if the constant pool already contains a similar item. * - * @param name a name. - * @param desc a type descriptor. + * @param name + * a name. + * @param desc + * a type descriptor. * @return a new or already existing name and type item. */ Item newNameTypeItem(final String name, final String desc) { @@ -1481,7 +1487,8 @@ public class ClassWriter extends ClassVisitor { * Adds the given internal name to {@link #typeTable} and returns its index. * Does nothing if the type table already contains this internal name. * - * @param type the internal name to be added to the type table. + * @param type + * the internal name to be added to the type table. * @return the index of this internal name in the type table. */ int addType(final String type) { @@ -1498,9 +1505,11 @@ public class ClassWriter extends ClassVisitor { * index. This method is used for UNINITIALIZED types, made of an internal * name and a bytecode offset. * - * @param type the internal name to be added to the type table. - * @param offset the bytecode offset of the NEW instruction that created - * this UNINITIALIZED type value. + * @param type + * the internal name to be added to the type table. + * @param offset + * the bytecode offset of the NEW instruction that created this + * UNINITIALIZED type value. * @return the index of this internal name in the type table. */ int addUninitializedType(final String type, final int offset) { @@ -1518,7 +1527,8 @@ public class ClassWriter extends ClassVisitor { /** * Adds the given Item to {@link #typeTable}. * - * @param item the value to be added to the type table. + * @param item + * the value to be added to the type table. * @return the added Item, which a new Item instance with the same value as * the given Item. */ @@ -1544,8 +1554,10 @@ public class ClassWriter extends ClassVisitor { * {@link #items} hash table to speedup future calls with the same * parameters. * - * @param type1 index of an internal name in {@link #typeTable}. - * @param type2 index of an internal name in {@link #typeTable}. + * @param type1 + * index of an internal name in {@link #typeTable}. + * @param type2 + * index of an internal name in {@link #typeTable}. * @return the index of the common super type of the two given types. */ int getMergedType(final int type1, final int type2) { @@ -1572,13 +1584,14 @@ public class ClassWriter extends ClassVisitor { * that is currently being generated by this ClassWriter, which can of * course not be loaded since it is under construction. * - * @param type1 the internal name of a class. - * @param type2 the internal name of another class. + * @param type1 + * the internal name of a class. + * @param type2 + * the internal name of another class. * @return the internal name of the common super class of the two given * classes. */ - protected String getCommonSuperClass(final String type1, final String type2) - { + protected String getCommonSuperClass(final String type1, final String type2) { Class c, d; ClassLoader classLoader = getClass().getClassLoader(); try { @@ -1607,7 +1620,8 @@ public class ClassWriter extends ClassVisitor { * Returns the constant pool's hash table item which is equal to the given * item. * - * @param key a constant pool item. + * @param key + * a constant pool item. * @return the constant pool's hash table item which is equal to the given * item, or null if there is no such item. */ @@ -1623,7 +1637,8 @@ public class ClassWriter extends ClassVisitor { * Puts the given item in the constant pool's hash table. The hash table * must not already contains this item. * - * @param i the item to be added to the constant pool's hash table. + * @param i + * the item to be added to the constant pool's hash table. */ private void put(final Item i) { if (index + typeCount > threshold) { @@ -1651,9 +1666,12 @@ public class ClassWriter extends ClassVisitor { /** * Puts one byte and two shorts into the constant pool. * - * @param b a byte. - * @param s1 a short. - * @param s2 another short. + * @param b + * a byte. + * @param s1 + * a short. + * @param s2 + * another short. */ private void put122(final int b, final int s1, final int s2) { pool.put12(b, s1).putShort(s2); @@ -1662,9 +1680,12 @@ public class ClassWriter extends ClassVisitor { /** * Puts two bytes and one short into the constant pool. * - * @param b1 a byte. - * @param b2 another byte. - * @param s a short. + * @param b1 + * a byte. + * @param b2 + * another byte. + * @param s + * a short. */ private void put112(final int b1, final int b2, final int s) { pool.put11(b1, b2).putShort(s); diff --git a/src/asm/scala/tools/asm/Context.java b/src/asm/scala/tools/asm/Context.java new file mode 100644 index 0000000000..7b3a2ad9dd --- /dev/null +++ b/src/asm/scala/tools/asm/Context.java @@ -0,0 +1,110 @@ +/*** + * ASM: a very small and fast Java bytecode manipulation framework + * Copyright (c) 2000-2011 INRIA, France Telecom + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions + * are met: + * 1. Redistributions of source code must retain the above copyright + * notice, this list of conditions and the following disclaimer. + * 2. Redistributions in binary form must reproduce the above copyright + * notice, this list of conditions and the following disclaimer in the + * documentation and/or other materials provided with the distribution. + * 3. Neither the name of the copyright holders nor the names of its + * contributors may be used to endorse or promote products derived from + * this software without specific prior written permission. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF + * THE POSSIBILITY OF SUCH DAMAGE. + */ + +package scala.tools.asm; + +/** + * Information about a class being parsed in a {@link ClassReader}. + * + * @author Eric Bruneton + */ +class Context { + + /** + * Prototypes of the attributes that must be parsed for this class. + */ + Attribute[] attrs; + + /** + * The {@link ClassReader} option flags for the parsing of this class. + */ + int flags; + + /** + * The buffer used to read strings. + */ + char[] buffer; + + /** + * The start index of each bootstrap method. + */ + int[] bootstrapMethods; + + /** + * The access flags of the method currently being parsed. + */ + int access; + + /** + * The name of the method currently being parsed. + */ + String name; + + /** + * The descriptor of the method currently being parsed. + */ + String desc; + + /** + * The offset of the latest stack map frame that has been parsed. + */ + int offset; + + /** + * The encoding of the latest stack map frame that has been parsed. + */ + int mode; + + /** + * The number of locals in the latest stack map frame that has been parsed. + */ + int localCount; + + /** + * The number locals in the latest stack map frame that has been parsed, + * minus the number of locals in the previous frame. + */ + int localDiff; + + /** + * The local values of the latest stack map frame that has been parsed. + */ + Object[] local; + + /** + * The stack size of the latest stack map frame that has been parsed. + */ + int stackCount; + + /** + * The stack values of the latest stack map frame that has been parsed. + */ + Object[] stack; +} diff --git a/src/asm/scala/tools/asm/FieldVisitor.java b/src/asm/scala/tools/asm/FieldVisitor.java index 9ac0f6236f..9171f331e5 100644 --- a/src/asm/scala/tools/asm/FieldVisitor.java +++ b/src/asm/scala/tools/asm/FieldVisitor.java @@ -30,9 +30,9 @@ package scala.tools.asm; /** - * A visitor to visit a Java field. The methods of this class must be called - * in the following order: ( visitAnnotation | - * visitAttribute )* visitEnd. + * A visitor to visit a Java field. The methods of this class must be called in + * the following order: ( visitAnnotation | visitAttribute )* + * visitEnd. * * @author Eric Bruneton */ @@ -53,8 +53,9 @@ public abstract class FieldVisitor { /** * Constructs a new {@link FieldVisitor}. * - * @param api the ASM API version implemented by this visitor. Must be one - * of {@link Opcodes#ASM4}. + * @param api + * the ASM API version implemented by this visitor. Must be one + * of {@link Opcodes#ASM4}. */ public FieldVisitor(final int api) { this(api, null); @@ -63,15 +64,17 @@ public abstract class FieldVisitor { /** * Constructs a new {@link FieldVisitor}. * - * @param api the ASM API version implemented by this visitor. Must be one - * of {@link Opcodes#ASM4}. - * @param fv the field visitor to which this visitor must delegate method - * calls. May be null. + * @param api + * the ASM API version implemented by this visitor. Must be one + * of {@link Opcodes#ASM4}. + * @param fv + * the field visitor to which this visitor must delegate method + * calls. May be null. */ public FieldVisitor(final int api, final FieldVisitor fv) { - /*if (api != Opcodes.ASM4) { + if (api != Opcodes.ASM4) { throw new IllegalArgumentException(); - }*/ + } this.api = api; this.fv = fv; } @@ -79,8 +82,10 @@ public abstract class FieldVisitor { /** * Visits an annotation of the field. * - * @param desc the class descriptor of the annotation class. - * @param visible true if the annotation is visible at runtime. + * @param desc + * the class descriptor of the annotation class. + * @param visible + * true if the annotation is visible at runtime. * @return a visitor to visit the annotation values, or null if * this visitor is not interested in visiting this annotation. */ @@ -94,7 +99,8 @@ public abstract class FieldVisitor { /** * Visits a non standard attribute of the field. * - * @param attr an attribute. + * @param attr + * an attribute. */ public void visitAttribute(Attribute attr) { if (fv != null) { diff --git a/src/asm/scala/tools/asm/FieldWriter.java b/src/asm/scala/tools/asm/FieldWriter.java index 45ef6d0df3..02c6059b91 100644 --- a/src/asm/scala/tools/asm/FieldWriter.java +++ b/src/asm/scala/tools/asm/FieldWriter.java @@ -92,21 +92,21 @@ final class FieldWriter extends FieldVisitor { /** * Constructs a new {@link FieldWriter}. * - * @param cw the class writer to which this field must be added. - * @param access the field's access flags (see {@link Opcodes}). - * @param name the field's name. - * @param desc the field's descriptor (see {@link Type}). - * @param signature the field's signature. May be null. - * @param value the field's constant value. May be null. + * @param cw + * the class writer to which this field must be added. + * @param access + * the field's access flags (see {@link Opcodes}). + * @param name + * the field's name. + * @param desc + * the field's descriptor (see {@link Type}). + * @param signature + * the field's signature. May be null. + * @param value + * the field's constant value. May be null. */ - FieldWriter( - final ClassWriter cw, - final int access, - final String name, - final String desc, - final String signature, - final Object value) - { + FieldWriter(final ClassWriter cw, final int access, final String name, + final String desc, final String signature, final Object value) { super(Opcodes.ASM4); if (cw.firstField == null) { cw.firstField = this; @@ -131,10 +131,8 @@ final class FieldWriter extends FieldVisitor { // ------------------------------------------------------------------------ @Override - public AnnotationVisitor visitAnnotation( - final String desc, - final boolean visible) - { + public AnnotationVisitor visitAnnotation(final String desc, + final boolean visible) { if (!ClassReader.ANNOTATIONS) { return null; } @@ -177,11 +175,12 @@ final class FieldWriter extends FieldVisitor { cw.newUTF8("ConstantValue"); size += 8; } - if ((access & Opcodes.ACC_SYNTHETIC) != 0 - && ((cw.version & 0xFFFF) < Opcodes.V1_5 || (access & ClassWriter.ACC_SYNTHETIC_ATTRIBUTE) != 0)) - { - cw.newUTF8("Synthetic"); - size += 6; + if ((access & Opcodes.ACC_SYNTHETIC) != 0) { + if ((cw.version & 0xFFFF) < Opcodes.V1_5 + || (access & ClassWriter.ACC_SYNTHETIC_ATTRIBUTE) != 0) { + cw.newUTF8("Synthetic"); + size += 6; + } } if ((access & Opcodes.ACC_DEPRECATED) != 0) { cw.newUTF8("Deprecated"); @@ -208,21 +207,23 @@ final class FieldWriter extends FieldVisitor { /** * Puts the content of this field into the given byte vector. * - * @param out where the content of this field must be put. + * @param out + * where the content of this field must be put. */ void put(final ByteVector out) { - int mask = Opcodes.ACC_DEPRECATED - | ClassWriter.ACC_SYNTHETIC_ATTRIBUTE - | ((access & ClassWriter.ACC_SYNTHETIC_ATTRIBUTE) / (ClassWriter.ACC_SYNTHETIC_ATTRIBUTE / Opcodes.ACC_SYNTHETIC)); + final int FACTOR = ClassWriter.TO_ACC_SYNTHETIC; + int mask = Opcodes.ACC_DEPRECATED | ClassWriter.ACC_SYNTHETIC_ATTRIBUTE + | ((access & ClassWriter.ACC_SYNTHETIC_ATTRIBUTE) / FACTOR); out.putShort(access & ~mask).putShort(name).putShort(desc); int attributeCount = 0; if (value != 0) { ++attributeCount; } - if ((access & Opcodes.ACC_SYNTHETIC) != 0 - && ((cw.version & 0xFFFF) < Opcodes.V1_5 || (access & ClassWriter.ACC_SYNTHETIC_ATTRIBUTE) != 0)) - { - ++attributeCount; + if ((access & Opcodes.ACC_SYNTHETIC) != 0) { + if ((cw.version & 0xFFFF) < Opcodes.V1_5 + || (access & ClassWriter.ACC_SYNTHETIC_ATTRIBUTE) != 0) { + ++attributeCount; + } } if ((access & Opcodes.ACC_DEPRECATED) != 0) { ++attributeCount; @@ -244,10 +245,11 @@ final class FieldWriter extends FieldVisitor { out.putShort(cw.newUTF8("ConstantValue")); out.putInt(2).putShort(value); } - if ((access & Opcodes.ACC_SYNTHETIC) != 0 - && ((cw.version & 0xFFFF) < Opcodes.V1_5 || (access & ClassWriter.ACC_SYNTHETIC_ATTRIBUTE) != 0)) - { - out.putShort(cw.newUTF8("Synthetic")).putInt(0); + if ((access & Opcodes.ACC_SYNTHETIC) != 0) { + if ((cw.version & 0xFFFF) < Opcodes.V1_5 + || (access & ClassWriter.ACC_SYNTHETIC_ATTRIBUTE) != 0) { + out.putShort(cw.newUTF8("Synthetic")).putInt(0); + } } if ((access & Opcodes.ACC_DEPRECATED) != 0) { out.putShort(cw.newUTF8("Deprecated")).putInt(0); diff --git a/src/asm/scala/tools/asm/Frame.java b/src/asm/scala/tools/asm/Frame.java index 387b56796d..bcc3e8450b 100644 --- a/src/asm/scala/tools/asm/Frame.java +++ b/src/asm/scala/tools/asm/Frame.java @@ -80,13 +80,13 @@ final class Frame { * table contains only internal type names (array type descriptors are * forbidden - dimensions must be represented through the DIM field). * - * The LONG and DOUBLE types are always represented by using two slots (LONG + - * TOP or DOUBLE + TOP), for local variable types as well as in the operand - * stack. This is necessary to be able to simulate DUPx_y instructions, - * whose effect would be dependent on the actual type values if types were - * always represented by a single slot in the stack (and this is not - * possible, since actual type values are not always known - cf LOCAL and - * STACK type kinds). + * The LONG and DOUBLE types are always represented by using two slots (LONG + * + TOP or DOUBLE + TOP), for local variable types as well as in the + * operand stack. This is necessary to be able to simulate DUPx_y + * instructions, whose effect would be dependent on the actual type values + * if types were always represented by a single slot in the stack (and this + * is not possible, since actual type values are not always known - cf LOCAL + * and STACK type kinds). */ /** @@ -117,9 +117,9 @@ final class Frame { /** * Flag used for LOCAL and STACK types. Indicates that if this type happens * to be a long or double type (during the computations of input frames), - * then it must be set to TOP because the second word of this value has - * been reused to store other data in the basic block. Hence the first word - * no longer stores a valid long or double value. + * then it must be set to TOP because the second word of this value has been + * reused to store other data in the basic block. Hence the first word no + * longer stores a valid long or double value. */ static final int TOP_IF_LONG_OR_DOUBLE = 0x800000; @@ -523,7 +523,8 @@ final class Frame { /** * Returns the output frame local variable type at the given index. * - * @param local the index of the local that must be returned. + * @param local + * the index of the local that must be returned. * @return the output frame local variable type at the given index. */ private int get(final int local) { @@ -545,8 +546,10 @@ final class Frame { /** * Sets the output frame local variable type at the given index. * - * @param local the index of the local that must be set. - * @param type the value of the local that must be set. + * @param local + * the index of the local that must be set. + * @param type + * the value of the local that must be set. */ private void set(final int local, final int type) { // creates and/or resizes the output local variables array if necessary @@ -566,7 +569,8 @@ final class Frame { /** * Pushes a new type onto the output frame stack. * - * @param type the type that must be pushed. + * @param type + * the type that must be pushed. */ private void push(final int type) { // creates and/or resizes the output stack array if necessary @@ -591,10 +595,12 @@ final class Frame { /** * Pushes a new type onto the output frame stack. * - * @param cw the ClassWriter to which this label belongs. - * @param desc the descriptor of the type to be pushed. Can also be a method - * descriptor (in this case this method pushes its return type onto - * the output frame stack). + * @param cw + * the ClassWriter to which this label belongs. + * @param desc + * the descriptor of the type to be pushed. Can also be a method + * descriptor (in this case this method pushes its return type + * onto the output frame stack). */ private void push(final ClassWriter cw, final String desc) { int type = type(cw, desc); @@ -609,72 +615,74 @@ final class Frame { /** * Returns the int encoding of the given type. * - * @param cw the ClassWriter to which this label belongs. - * @param desc a type descriptor. + * @param cw + * the ClassWriter to which this label belongs. + * @param desc + * a type descriptor. * @return the int encoding of the given type. */ private static int type(final ClassWriter cw, final String desc) { String t; int index = desc.charAt(0) == '(' ? desc.indexOf(')') + 1 : 0; switch (desc.charAt(index)) { - case 'V': - return 0; + case 'V': + return 0; + case 'Z': + case 'C': + case 'B': + case 'S': + case 'I': + return INTEGER; + case 'F': + return FLOAT; + case 'J': + return LONG; + case 'D': + return DOUBLE; + case 'L': + // stores the internal name, not the descriptor! + t = desc.substring(index + 1, desc.length() - 1); + return OBJECT | cw.addType(t); + // case '[': + default: + // extracts the dimensions and the element type + int data; + int dims = index + 1; + while (desc.charAt(dims) == '[') { + ++dims; + } + switch (desc.charAt(dims)) { case 'Z': + data = BOOLEAN; + break; case 'C': + data = CHAR; + break; case 'B': + data = BYTE; + break; case 'S': + data = SHORT; + break; case 'I': - return INTEGER; + data = INTEGER; + break; case 'F': - return FLOAT; + data = FLOAT; + break; case 'J': - return LONG; + data = LONG; + break; case 'D': - return DOUBLE; - case 'L': - // stores the internal name, not the descriptor! - t = desc.substring(index + 1, desc.length() - 1); - return OBJECT | cw.addType(t); - // case '[': + data = DOUBLE; + break; + // case 'L': default: - // extracts the dimensions and the element type - int data; - int dims = index + 1; - while (desc.charAt(dims) == '[') { - ++dims; - } - switch (desc.charAt(dims)) { - case 'Z': - data = BOOLEAN; - break; - case 'C': - data = CHAR; - break; - case 'B': - data = BYTE; - break; - case 'S': - data = SHORT; - break; - case 'I': - data = INTEGER; - break; - case 'F': - data = FLOAT; - break; - case 'J': - data = LONG; - break; - case 'D': - data = DOUBLE; - break; - // case 'L': - default: - // stores the internal name, not the descriptor - t = desc.substring(dims + 1, desc.length() - 1); - data = OBJECT | cw.addType(t); - } - return (dims - index) << 28 | data; + // stores the internal name, not the descriptor + t = desc.substring(dims + 1, desc.length() - 1); + data = OBJECT | cw.addType(t); + } + return (dims - index) << 28 | data; } } @@ -695,7 +703,8 @@ final class Frame { /** * Pops the given number of types from the output frame stack. * - * @param elements the number of types that must be popped. + * @param elements + * the number of types that must be popped. */ private void pop(final int elements) { if (outputStackTop >= elements) { @@ -712,9 +721,10 @@ final class Frame { /** * Pops a type from the output frame stack. * - * @param desc the descriptor of the type to be popped. Can also be a method - * descriptor (in this case this method pops the types corresponding - * to the method arguments). + * @param desc + * the descriptor of the type to be popped. Can also be a method + * descriptor (in this case this method pops the types + * corresponding to the method arguments). */ private void pop(final String desc) { char c = desc.charAt(0); @@ -731,7 +741,8 @@ final class Frame { * Adds a new type to the list of types on which a constructor is invoked in * the basic block. * - * @param var a type on a which a constructor is invoked. + * @param var + * a type on a which a constructor is invoked. */ private void init(final int var) { // creates and/or resizes the initializations array if necessary @@ -752,8 +763,10 @@ final class Frame { * Replaces the given type with the appropriate type if it is one of the * types on which a constructor is invoked in the basic block. * - * @param cw the ClassWriter to which this label belongs. - * @param t a type + * @param cw + * the ClassWriter to which this label belongs. + * @param t + * a type * @return t or, if t is one of the types on which a constructor is invoked * in the basic block, the type corresponding to this constructor. */ @@ -787,17 +800,17 @@ final class Frame { * Initializes the input frame of the first basic block from the method * descriptor. * - * @param cw the ClassWriter to which this label belongs. - * @param access the access flags of the method to which this label belongs. - * @param args the formal parameter types of this method. - * @param maxLocals the maximum number of local variables of this method. + * @param cw + * the ClassWriter to which this label belongs. + * @param access + * the access flags of the method to which this label belongs. + * @param args + * the formal parameter types of this method. + * @param maxLocals + * the maximum number of local variables of this method. */ - void initInputFrame( - final ClassWriter cw, - final int access, - final Type[] args, - final int maxLocals) - { + void initInputFrame(final ClassWriter cw, final int access, + final Type[] args, final int maxLocals) { inputLocals = new int[maxLocals]; inputStack = new int[0]; int i = 0; @@ -823,435 +836,435 @@ final class Frame { /** * Simulates the action of the given instruction on the output stack frame. * - * @param opcode the opcode of the instruction. - * @param arg the operand of the instruction, if any. - * @param cw the class writer to which this label belongs. - * @param item the operand of the instructions, if any. + * @param opcode + * the opcode of the instruction. + * @param arg + * the operand of the instruction, if any. + * @param cw + * the class writer to which this label belongs. + * @param item + * the operand of the instructions, if any. */ - void execute( - final int opcode, - final int arg, - final ClassWriter cw, - final Item item) - { + void execute(final int opcode, final int arg, final ClassWriter cw, + final Item item) { int t1, t2, t3, t4; switch (opcode) { - case Opcodes.NOP: - case Opcodes.INEG: - case Opcodes.LNEG: - case Opcodes.FNEG: - case Opcodes.DNEG: - case Opcodes.I2B: - case Opcodes.I2C: - case Opcodes.I2S: - case Opcodes.GOTO: - case Opcodes.RETURN: - break; - case Opcodes.ACONST_NULL: - push(NULL); - break; - case Opcodes.ICONST_M1: - case Opcodes.ICONST_0: - case Opcodes.ICONST_1: - case Opcodes.ICONST_2: - case Opcodes.ICONST_3: - case Opcodes.ICONST_4: - case Opcodes.ICONST_5: - case Opcodes.BIPUSH: - case Opcodes.SIPUSH: - case Opcodes.ILOAD: + case Opcodes.NOP: + case Opcodes.INEG: + case Opcodes.LNEG: + case Opcodes.FNEG: + case Opcodes.DNEG: + case Opcodes.I2B: + case Opcodes.I2C: + case Opcodes.I2S: + case Opcodes.GOTO: + case Opcodes.RETURN: + break; + case Opcodes.ACONST_NULL: + push(NULL); + break; + case Opcodes.ICONST_M1: + case Opcodes.ICONST_0: + case Opcodes.ICONST_1: + case Opcodes.ICONST_2: + case Opcodes.ICONST_3: + case Opcodes.ICONST_4: + case Opcodes.ICONST_5: + case Opcodes.BIPUSH: + case Opcodes.SIPUSH: + case Opcodes.ILOAD: + push(INTEGER); + break; + case Opcodes.LCONST_0: + case Opcodes.LCONST_1: + case Opcodes.LLOAD: + push(LONG); + push(TOP); + break; + case Opcodes.FCONST_0: + case Opcodes.FCONST_1: + case Opcodes.FCONST_2: + case Opcodes.FLOAD: + push(FLOAT); + break; + case Opcodes.DCONST_0: + case Opcodes.DCONST_1: + case Opcodes.DLOAD: + push(DOUBLE); + push(TOP); + break; + case Opcodes.LDC: + switch (item.type) { + case ClassWriter.INT: push(INTEGER); break; - case Opcodes.LCONST_0: - case Opcodes.LCONST_1: - case Opcodes.LLOAD: + case ClassWriter.LONG: push(LONG); push(TOP); break; - case Opcodes.FCONST_0: - case Opcodes.FCONST_1: - case Opcodes.FCONST_2: - case Opcodes.FLOAD: + case ClassWriter.FLOAT: push(FLOAT); break; - case Opcodes.DCONST_0: - case Opcodes.DCONST_1: - case Opcodes.DLOAD: + case ClassWriter.DOUBLE: push(DOUBLE); push(TOP); break; - case Opcodes.LDC: - switch (item.type) { - case ClassWriter.INT: - push(INTEGER); - break; - case ClassWriter.LONG: - push(LONG); - push(TOP); - break; - case ClassWriter.FLOAT: - push(FLOAT); - break; - case ClassWriter.DOUBLE: - push(DOUBLE); - push(TOP); - break; - case ClassWriter.CLASS: - push(OBJECT | cw.addType("java/lang/Class")); - break; - case ClassWriter.STR: - push(OBJECT | cw.addType("java/lang/String")); - break; - case ClassWriter.MTYPE: - push(OBJECT | cw.addType("java/lang/invoke/MethodType")); - break; - // case ClassWriter.HANDLE_BASE + [1..9]: - default: - push(OBJECT | cw.addType("java/lang/invoke/MethodHandle")); - } - break; - case Opcodes.ALOAD: - push(get(arg)); - break; - case Opcodes.IALOAD: - case Opcodes.BALOAD: - case Opcodes.CALOAD: - case Opcodes.SALOAD: - pop(2); - push(INTEGER); - break; - case Opcodes.LALOAD: - case Opcodes.D2L: - pop(2); - push(LONG); - push(TOP); + case ClassWriter.CLASS: + push(OBJECT | cw.addType("java/lang/Class")); break; - case Opcodes.FALOAD: - pop(2); - push(FLOAT); + case ClassWriter.STR: + push(OBJECT | cw.addType("java/lang/String")); break; - case Opcodes.DALOAD: - case Opcodes.L2D: - pop(2); - push(DOUBLE); - push(TOP); + case ClassWriter.MTYPE: + push(OBJECT | cw.addType("java/lang/invoke/MethodType")); break; - case Opcodes.AALOAD: - pop(1); - t1 = pop(); - push(ELEMENT_OF + t1); - break; - case Opcodes.ISTORE: - case Opcodes.FSTORE: - case Opcodes.ASTORE: - t1 = pop(); - set(arg, t1); - if (arg > 0) { - t2 = get(arg - 1); - // if t2 is of kind STACK or LOCAL we cannot know its size! - if (t2 == LONG || t2 == DOUBLE) { - set(arg - 1, TOP); - } else if ((t2 & KIND) != BASE) { - set(arg - 1, t2 | TOP_IF_LONG_OR_DOUBLE); - } + // case ClassWriter.HANDLE_BASE + [1..9]: + default: + push(OBJECT | cw.addType("java/lang/invoke/MethodHandle")); + } + break; + case Opcodes.ALOAD: + push(get(arg)); + break; + case Opcodes.IALOAD: + case Opcodes.BALOAD: + case Opcodes.CALOAD: + case Opcodes.SALOAD: + pop(2); + push(INTEGER); + break; + case Opcodes.LALOAD: + case Opcodes.D2L: + pop(2); + push(LONG); + push(TOP); + break; + case Opcodes.FALOAD: + pop(2); + push(FLOAT); + break; + case Opcodes.DALOAD: + case Opcodes.L2D: + pop(2); + push(DOUBLE); + push(TOP); + break; + case Opcodes.AALOAD: + pop(1); + t1 = pop(); + push(ELEMENT_OF + t1); + break; + case Opcodes.ISTORE: + case Opcodes.FSTORE: + case Opcodes.ASTORE: + t1 = pop(); + set(arg, t1); + if (arg > 0) { + t2 = get(arg - 1); + // if t2 is of kind STACK or LOCAL we cannot know its size! + if (t2 == LONG || t2 == DOUBLE) { + set(arg - 1, TOP); + } else if ((t2 & KIND) != BASE) { + set(arg - 1, t2 | TOP_IF_LONG_OR_DOUBLE); } - break; - case Opcodes.LSTORE: - case Opcodes.DSTORE: - pop(1); - t1 = pop(); - set(arg, t1); - set(arg + 1, TOP); - if (arg > 0) { - t2 = get(arg - 1); - // if t2 is of kind STACK or LOCAL we cannot know its size! - if (t2 == LONG || t2 == DOUBLE) { - set(arg - 1, TOP); - } else if ((t2 & KIND) != BASE) { - set(arg - 1, t2 | TOP_IF_LONG_OR_DOUBLE); - } + } + break; + case Opcodes.LSTORE: + case Opcodes.DSTORE: + pop(1); + t1 = pop(); + set(arg, t1); + set(arg + 1, TOP); + if (arg > 0) { + t2 = get(arg - 1); + // if t2 is of kind STACK or LOCAL we cannot know its size! + if (t2 == LONG || t2 == DOUBLE) { + set(arg - 1, TOP); + } else if ((t2 & KIND) != BASE) { + set(arg - 1, t2 | TOP_IF_LONG_OR_DOUBLE); } - break; - case Opcodes.IASTORE: - case Opcodes.BASTORE: - case Opcodes.CASTORE: - case Opcodes.SASTORE: - case Opcodes.FASTORE: - case Opcodes.AASTORE: - pop(3); - break; - case Opcodes.LASTORE: - case Opcodes.DASTORE: - pop(4); - break; - case Opcodes.POP: - case Opcodes.IFEQ: - case Opcodes.IFNE: - case Opcodes.IFLT: - case Opcodes.IFGE: - case Opcodes.IFGT: - case Opcodes.IFLE: - case Opcodes.IRETURN: - case Opcodes.FRETURN: - case Opcodes.ARETURN: - case Opcodes.TABLESWITCH: - case Opcodes.LOOKUPSWITCH: - case Opcodes.ATHROW: - case Opcodes.MONITORENTER: - case Opcodes.MONITOREXIT: - case Opcodes.IFNULL: - case Opcodes.IFNONNULL: - pop(1); - break; - case Opcodes.POP2: - case Opcodes.IF_ICMPEQ: - case Opcodes.IF_ICMPNE: - case Opcodes.IF_ICMPLT: - case Opcodes.IF_ICMPGE: - case Opcodes.IF_ICMPGT: - case Opcodes.IF_ICMPLE: - case Opcodes.IF_ACMPEQ: - case Opcodes.IF_ACMPNE: - case Opcodes.LRETURN: - case Opcodes.DRETURN: - pop(2); - break; - case Opcodes.DUP: - t1 = pop(); - push(t1); - push(t1); - break; - case Opcodes.DUP_X1: - t1 = pop(); - t2 = pop(); - push(t1); - push(t2); - push(t1); - break; - case Opcodes.DUP_X2: - t1 = pop(); - t2 = pop(); - t3 = pop(); - push(t1); - push(t3); - push(t2); - push(t1); - break; - case Opcodes.DUP2: - t1 = pop(); - t2 = pop(); - push(t2); - push(t1); - push(t2); - push(t1); - break; - case Opcodes.DUP2_X1: - t1 = pop(); - t2 = pop(); - t3 = pop(); - push(t2); - push(t1); - push(t3); - push(t2); - push(t1); - break; - case Opcodes.DUP2_X2: - t1 = pop(); - t2 = pop(); - t3 = pop(); - t4 = pop(); - push(t2); - push(t1); - push(t4); - push(t3); - push(t2); - push(t1); - break; - case Opcodes.SWAP: + } + break; + case Opcodes.IASTORE: + case Opcodes.BASTORE: + case Opcodes.CASTORE: + case Opcodes.SASTORE: + case Opcodes.FASTORE: + case Opcodes.AASTORE: + pop(3); + break; + case Opcodes.LASTORE: + case Opcodes.DASTORE: + pop(4); + break; + case Opcodes.POP: + case Opcodes.IFEQ: + case Opcodes.IFNE: + case Opcodes.IFLT: + case Opcodes.IFGE: + case Opcodes.IFGT: + case Opcodes.IFLE: + case Opcodes.IRETURN: + case Opcodes.FRETURN: + case Opcodes.ARETURN: + case Opcodes.TABLESWITCH: + case Opcodes.LOOKUPSWITCH: + case Opcodes.ATHROW: + case Opcodes.MONITORENTER: + case Opcodes.MONITOREXIT: + case Opcodes.IFNULL: + case Opcodes.IFNONNULL: + pop(1); + break; + case Opcodes.POP2: + case Opcodes.IF_ICMPEQ: + case Opcodes.IF_ICMPNE: + case Opcodes.IF_ICMPLT: + case Opcodes.IF_ICMPGE: + case Opcodes.IF_ICMPGT: + case Opcodes.IF_ICMPLE: + case Opcodes.IF_ACMPEQ: + case Opcodes.IF_ACMPNE: + case Opcodes.LRETURN: + case Opcodes.DRETURN: + pop(2); + break; + case Opcodes.DUP: + t1 = pop(); + push(t1); + push(t1); + break; + case Opcodes.DUP_X1: + t1 = pop(); + t2 = pop(); + push(t1); + push(t2); + push(t1); + break; + case Opcodes.DUP_X2: + t1 = pop(); + t2 = pop(); + t3 = pop(); + push(t1); + push(t3); + push(t2); + push(t1); + break; + case Opcodes.DUP2: + t1 = pop(); + t2 = pop(); + push(t2); + push(t1); + push(t2); + push(t1); + break; + case Opcodes.DUP2_X1: + t1 = pop(); + t2 = pop(); + t3 = pop(); + push(t2); + push(t1); + push(t3); + push(t2); + push(t1); + break; + case Opcodes.DUP2_X2: + t1 = pop(); + t2 = pop(); + t3 = pop(); + t4 = pop(); + push(t2); + push(t1); + push(t4); + push(t3); + push(t2); + push(t1); + break; + case Opcodes.SWAP: + t1 = pop(); + t2 = pop(); + push(t1); + push(t2); + break; + case Opcodes.IADD: + case Opcodes.ISUB: + case Opcodes.IMUL: + case Opcodes.IDIV: + case Opcodes.IREM: + case Opcodes.IAND: + case Opcodes.IOR: + case Opcodes.IXOR: + case Opcodes.ISHL: + case Opcodes.ISHR: + case Opcodes.IUSHR: + case Opcodes.L2I: + case Opcodes.D2I: + case Opcodes.FCMPL: + case Opcodes.FCMPG: + pop(2); + push(INTEGER); + break; + case Opcodes.LADD: + case Opcodes.LSUB: + case Opcodes.LMUL: + case Opcodes.LDIV: + case Opcodes.LREM: + case Opcodes.LAND: + case Opcodes.LOR: + case Opcodes.LXOR: + pop(4); + push(LONG); + push(TOP); + break; + case Opcodes.FADD: + case Opcodes.FSUB: + case Opcodes.FMUL: + case Opcodes.FDIV: + case Opcodes.FREM: + case Opcodes.L2F: + case Opcodes.D2F: + pop(2); + push(FLOAT); + break; + case Opcodes.DADD: + case Opcodes.DSUB: + case Opcodes.DMUL: + case Opcodes.DDIV: + case Opcodes.DREM: + pop(4); + push(DOUBLE); + push(TOP); + break; + case Opcodes.LSHL: + case Opcodes.LSHR: + case Opcodes.LUSHR: + pop(3); + push(LONG); + push(TOP); + break; + case Opcodes.IINC: + set(arg, INTEGER); + break; + case Opcodes.I2L: + case Opcodes.F2L: + pop(1); + push(LONG); + push(TOP); + break; + case Opcodes.I2F: + pop(1); + push(FLOAT); + break; + case Opcodes.I2D: + case Opcodes.F2D: + pop(1); + push(DOUBLE); + push(TOP); + break; + case Opcodes.F2I: + case Opcodes.ARRAYLENGTH: + case Opcodes.INSTANCEOF: + pop(1); + push(INTEGER); + break; + case Opcodes.LCMP: + case Opcodes.DCMPL: + case Opcodes.DCMPG: + pop(4); + push(INTEGER); + break; + case Opcodes.JSR: + case Opcodes.RET: + throw new RuntimeException( + "JSR/RET are not supported with computeFrames option"); + case Opcodes.GETSTATIC: + push(cw, item.strVal3); + break; + case Opcodes.PUTSTATIC: + pop(item.strVal3); + break; + case Opcodes.GETFIELD: + pop(1); + push(cw, item.strVal3); + break; + case Opcodes.PUTFIELD: + pop(item.strVal3); + pop(); + break; + case Opcodes.INVOKEVIRTUAL: + case Opcodes.INVOKESPECIAL: + case Opcodes.INVOKESTATIC: + case Opcodes.INVOKEINTERFACE: + pop(item.strVal3); + if (opcode != Opcodes.INVOKESTATIC) { t1 = pop(); - t2 = pop(); - push(t1); - push(t2); - break; - case Opcodes.IADD: - case Opcodes.ISUB: - case Opcodes.IMUL: - case Opcodes.IDIV: - case Opcodes.IREM: - case Opcodes.IAND: - case Opcodes.IOR: - case Opcodes.IXOR: - case Opcodes.ISHL: - case Opcodes.ISHR: - case Opcodes.IUSHR: - case Opcodes.L2I: - case Opcodes.D2I: - case Opcodes.FCMPL: - case Opcodes.FCMPG: - pop(2); - push(INTEGER); - break; - case Opcodes.LADD: - case Opcodes.LSUB: - case Opcodes.LMUL: - case Opcodes.LDIV: - case Opcodes.LREM: - case Opcodes.LAND: - case Opcodes.LOR: - case Opcodes.LXOR: - pop(4); - push(LONG); - push(TOP); - break; - case Opcodes.FADD: - case Opcodes.FSUB: - case Opcodes.FMUL: - case Opcodes.FDIV: - case Opcodes.FREM: - case Opcodes.L2F: - case Opcodes.D2F: - pop(2); - push(FLOAT); - break; - case Opcodes.DADD: - case Opcodes.DSUB: - case Opcodes.DMUL: - case Opcodes.DDIV: - case Opcodes.DREM: - pop(4); - push(DOUBLE); - push(TOP); - break; - case Opcodes.LSHL: - case Opcodes.LSHR: - case Opcodes.LUSHR: - pop(3); - push(LONG); - push(TOP); - break; - case Opcodes.IINC: - set(arg, INTEGER); - break; - case Opcodes.I2L: - case Opcodes.F2L: - pop(1); - push(LONG); - push(TOP); - break; - case Opcodes.I2F: - pop(1); - push(FLOAT); - break; - case Opcodes.I2D: - case Opcodes.F2D: - pop(1); - push(DOUBLE); - push(TOP); - break; - case Opcodes.F2I: - case Opcodes.ARRAYLENGTH: - case Opcodes.INSTANCEOF: - pop(1); - push(INTEGER); - break; - case Opcodes.LCMP: - case Opcodes.DCMPL: - case Opcodes.DCMPG: - pop(4); - push(INTEGER); - break; - case Opcodes.JSR: - case Opcodes.RET: - throw new RuntimeException("JSR/RET are not supported with computeFrames option"); - case Opcodes.GETSTATIC: - push(cw, item.strVal3); - break; - case Opcodes.PUTSTATIC: - pop(item.strVal3); - break; - case Opcodes.GETFIELD: - pop(1); - push(cw, item.strVal3); - break; - case Opcodes.PUTFIELD: - pop(item.strVal3); - pop(); - break; - case Opcodes.INVOKEVIRTUAL: - case Opcodes.INVOKESPECIAL: - case Opcodes.INVOKESTATIC: - case Opcodes.INVOKEINTERFACE: - pop(item.strVal3); - if (opcode != Opcodes.INVOKESTATIC) { - t1 = pop(); - if (opcode == Opcodes.INVOKESPECIAL - && item.strVal2.charAt(0) == '<') - { - init(t1); - } + if (opcode == Opcodes.INVOKESPECIAL + && item.strVal2.charAt(0) == '<') { + init(t1); } - push(cw, item.strVal3); + } + push(cw, item.strVal3); + break; + case Opcodes.INVOKEDYNAMIC: + pop(item.strVal2); + push(cw, item.strVal2); + break; + case Opcodes.NEW: + push(UNINITIALIZED | cw.addUninitializedType(item.strVal1, arg)); + break; + case Opcodes.NEWARRAY: + pop(); + switch (arg) { + case Opcodes.T_BOOLEAN: + push(ARRAY_OF | BOOLEAN); break; - case Opcodes.INVOKEDYNAMIC: - pop(item.strVal2); - push(cw, item.strVal2); + case Opcodes.T_CHAR: + push(ARRAY_OF | CHAR); break; - case Opcodes.NEW: - push(UNINITIALIZED | cw.addUninitializedType(item.strVal1, arg)); + case Opcodes.T_BYTE: + push(ARRAY_OF | BYTE); break; - case Opcodes.NEWARRAY: - pop(); - switch (arg) { - case Opcodes.T_BOOLEAN: - push(ARRAY_OF | BOOLEAN); - break; - case Opcodes.T_CHAR: - push(ARRAY_OF | CHAR); - break; - case Opcodes.T_BYTE: - push(ARRAY_OF | BYTE); - break; - case Opcodes.T_SHORT: - push(ARRAY_OF | SHORT); - break; - case Opcodes.T_INT: - push(ARRAY_OF | INTEGER); - break; - case Opcodes.T_FLOAT: - push(ARRAY_OF | FLOAT); - break; - case Opcodes.T_DOUBLE: - push(ARRAY_OF | DOUBLE); - break; - // case Opcodes.T_LONG: - default: - push(ARRAY_OF | LONG); - break; - } + case Opcodes.T_SHORT: + push(ARRAY_OF | SHORT); break; - case Opcodes.ANEWARRAY: - String s = item.strVal1; - pop(); - if (s.charAt(0) == '[') { - push(cw, '[' + s); - } else { - push(ARRAY_OF | OBJECT | cw.addType(s)); - } + case Opcodes.T_INT: + push(ARRAY_OF | INTEGER); break; - case Opcodes.CHECKCAST: - s = item.strVal1; - pop(); - if (s.charAt(0) == '[') { - push(cw, s); - } else { - push(OBJECT | cw.addType(s)); - } + case Opcodes.T_FLOAT: + push(ARRAY_OF | FLOAT); break; - // case Opcodes.MULTIANEWARRAY: + case Opcodes.T_DOUBLE: + push(ARRAY_OF | DOUBLE); + break; + // case Opcodes.T_LONG: default: - pop(arg); - push(cw, item.strVal1); + push(ARRAY_OF | LONG); break; + } + break; + case Opcodes.ANEWARRAY: + String s = item.strVal1; + pop(); + if (s.charAt(0) == '[') { + push(cw, '[' + s); + } else { + push(ARRAY_OF | OBJECT | cw.addType(s)); + } + break; + case Opcodes.CHECKCAST: + s = item.strVal1; + pop(); + if (s.charAt(0) == '[') { + push(cw, s); + } else { + push(OBJECT | cw.addType(s)); + } + break; + // case Opcodes.MULTIANEWARRAY: + default: + pop(arg); + push(cw, item.strVal1); + break; } } @@ -1260,10 +1273,13 @@ final class Frame { * frames of this basic block. Returns true if the input frame of * the given label has been changed by this operation. * - * @param cw the ClassWriter to which this label belongs. - * @param frame the basic block whose input frame must be updated. - * @param edge the kind of the {@link Edge} between this label and 'label'. - * See {@link Edge#info}. + * @param cw + * the ClassWriter to which this label belongs. + * @param frame + * the basic block whose input frame must be updated. + * @param edge + * the kind of the {@link Edge} between this label and 'label'. + * See {@link Edge#info}. * @return true if the input frame of the given label has been * changed by this operation. */ @@ -1294,7 +1310,8 @@ final class Frame { } else { t = dim + inputStack[nStack - (s & VALUE)]; } - if ((s & TOP_IF_LONG_OR_DOUBLE) != 0 && (t == LONG || t == DOUBLE)) { + if ((s & TOP_IF_LONG_OR_DOUBLE) != 0 + && (t == LONG || t == DOUBLE)) { t = TOP; } } @@ -1346,7 +1363,8 @@ final class Frame { } else { t = dim + inputStack[nStack - (s & VALUE)]; } - if ((s & TOP_IF_LONG_OR_DOUBLE) != 0 && (t == LONG || t == DOUBLE)) { + if ((s & TOP_IF_LONG_OR_DOUBLE) != 0 + && (t == LONG || t == DOUBLE)) { t = TOP; } } @@ -1363,19 +1381,19 @@ final class Frame { * type. Returns true if the type array has been modified by this * operation. * - * @param cw the ClassWriter to which this label belongs. - * @param t the type with which the type array element must be merged. - * @param types an array of types. - * @param index the index of the type that must be merged in 'types'. + * @param cw + * the ClassWriter to which this label belongs. + * @param t + * the type with which the type array element must be merged. + * @param types + * an array of types. + * @param index + * the index of the type that must be merged in 'types'. * @return true if the type array has been modified by this * operation. */ - private static boolean merge( - final ClassWriter cw, - int t, - final int[] types, - final int index) - { + private static boolean merge(final ClassWriter cw, int t, + final int[] types, final int index) { int u = types[index]; if (u == t) { // if the types are equal, merge(u,t)=u, so there is no change diff --git a/src/asm/scala/tools/asm/Handle.java b/src/asm/scala/tools/asm/Handle.java index be8f334192..5dd06a54b9 100644 --- a/src/asm/scala/tools/asm/Handle.java +++ b/src/asm/scala/tools/asm/Handle.java @@ -66,18 +66,23 @@ public final class Handle { /** * Constructs a new field or method handle. * - * @param tag the kind of field or method designated by this Handle. Must be - * {@link Opcodes#H_GETFIELD}, {@link Opcodes#H_GETSTATIC}, - * {@link Opcodes#H_PUTFIELD}, {@link Opcodes#H_PUTSTATIC}, - * {@link Opcodes#H_INVOKEVIRTUAL}, {@link Opcodes#H_INVOKESTATIC}, - * {@link Opcodes#H_INVOKESPECIAL}, - * {@link Opcodes#H_NEWINVOKESPECIAL} or - * {@link Opcodes#H_INVOKEINTERFACE}. - * @param owner the internal name of the field or method designed by this - * handle. - * @param name the name of the field or method designated by this handle. - * @param desc the descriptor of the field or method designated by this - * handle. + * @param tag + * the kind of field or method designated by this Handle. Must be + * {@link Opcodes#H_GETFIELD}, {@link Opcodes#H_GETSTATIC}, + * {@link Opcodes#H_PUTFIELD}, {@link Opcodes#H_PUTSTATIC}, + * {@link Opcodes#H_INVOKEVIRTUAL}, + * {@link Opcodes#H_INVOKESTATIC}, + * {@link Opcodes#H_INVOKESPECIAL}, + * {@link Opcodes#H_NEWINVOKESPECIAL} or + * {@link Opcodes#H_INVOKEINTERFACE}. + * @param owner + * the internal name of the field or method designed by this + * handle. + * @param name + * the name of the field or method designated by this handle. + * @param desc + * the descriptor of the field or method designated by this + * handle. */ public Handle(int tag, String owner, String name, String desc) { this.tag = tag; @@ -101,11 +106,9 @@ public final class Handle { } /** - * Returns the internal name of the field or method designed by this - * handle. + * Returns the internal name of the field or method designed by this handle. * - * @return the internal name of the field or method designed by this - * handle. + * @return the internal name of the field or method designed by this handle. */ public String getOwner() { return owner; @@ -138,8 +141,8 @@ public final class Handle { return false; } Handle h = (Handle) obj; - return tag == h.tag && owner.equals(h.owner) - && name.equals(h.name) && desc.equals(h.desc); + return tag == h.tag && owner.equals(h.owner) && name.equals(h.name) + && desc.equals(h.desc); } @Override @@ -149,8 +152,13 @@ public final class Handle { /** * Returns the textual representation of this handle. The textual - * representation is:
    owner '.' name desc ' ' '(' tag ')'
    . As - * this format is unambiguous, it can be parsed if necessary. + * representation is: + * + *
    +     * owner '.' name desc ' ' '(' tag ')'
    +     * 
    + * + * . As this format is unambiguous, it can be parsed if necessary. */ @Override public String toString() { diff --git a/src/asm/scala/tools/asm/Handler.java b/src/asm/scala/tools/asm/Handler.java index 9e92bb98be..a06cb8152a 100644 --- a/src/asm/scala/tools/asm/Handler.java +++ b/src/asm/scala/tools/asm/Handler.java @@ -72,9 +72,12 @@ class Handler { * Removes the range between start and end from the given exception * handlers. * - * @param h an exception handler list. - * @param start the start of the range to be removed. - * @param end the end of the range to be removed. Maybe null. + * @param h + * an exception handler list. + * @param start + * the start of the range to be removed. + * @param end + * the end of the range to be removed. Maybe null. * @return the exception handler list with the start-end range removed. */ static Handler remove(Handler h, Label start, Label end) { diff --git a/src/asm/scala/tools/asm/Item.java b/src/asm/scala/tools/asm/Item.java index 021a0b11d3..94195a1082 100644 --- a/src/asm/scala/tools/asm/Item.java +++ b/src/asm/scala/tools/asm/Item.java @@ -53,8 +53,8 @@ final class Item { * {@link ClassWriter#METH}, {@link ClassWriter#IMETH}, * {@link ClassWriter#MTYPE}, {@link ClassWriter#INDY}. * - * MethodHandle constant 9 variations are stored using a range - * of 9 values from {@link ClassWriter#HANDLE_BASE} + 1 to + * MethodHandle constant 9 variations are stored using a range of 9 values + * from {@link ClassWriter#HANDLE_BASE} + 1 to * {@link ClassWriter#HANDLE_BASE} + 9. * * Special Item types are used for Items that are stored in the ClassWriter @@ -115,7 +115,8 @@ final class Item { * Constructs an uninitialized {@link Item} for constant pool element at * given position. * - * @param index index of the item to be constructed. + * @param index + * index of the item to be constructed. */ Item(final int index) { this.index = index; @@ -124,8 +125,10 @@ final class Item { /** * Constructs a copy of the given item. * - * @param index index of the item to be constructed. - * @param i the item that must be copied into the item to be constructed. + * @param index + * index of the item to be constructed. + * @param i + * the item that must be copied into the item to be constructed. */ Item(final int index, final Item i) { this.index = index; @@ -141,7 +144,8 @@ final class Item { /** * Sets this item to an integer item. * - * @param intVal the value of this item. + * @param intVal + * the value of this item. */ void set(final int intVal) { this.type = ClassWriter.INT; @@ -152,7 +156,8 @@ final class Item { /** * Sets this item to a long item. * - * @param longVal the value of this item. + * @param longVal + * the value of this item. */ void set(final long longVal) { this.type = ClassWriter.LONG; @@ -163,7 +168,8 @@ final class Item { /** * Sets this item to a float item. * - * @param floatVal the value of this item. + * @param floatVal + * the value of this item. */ void set(final float floatVal) { this.type = ClassWriter.FLOAT; @@ -174,7 +180,8 @@ final class Item { /** * Sets this item to a double item. * - * @param doubleVal the value of this item. + * @param doubleVal + * the value of this item. */ void set(final double doubleVal) { this.type = ClassWriter.DOUBLE; @@ -185,49 +192,53 @@ final class Item { /** * Sets this item to an item that do not hold a primitive value. * - * @param type the type of this item. - * @param strVal1 first part of the value of this item. - * @param strVal2 second part of the value of this item. - * @param strVal3 third part of the value of this item. + * @param type + * the type of this item. + * @param strVal1 + * first part of the value of this item. + * @param strVal2 + * second part of the value of this item. + * @param strVal3 + * third part of the value of this item. */ - void set( - final int type, - final String strVal1, - final String strVal2, - final String strVal3) - { + void set(final int type, final String strVal1, final String strVal2, + final String strVal3) { this.type = type; this.strVal1 = strVal1; this.strVal2 = strVal2; this.strVal3 = strVal3; switch (type) { - case ClassWriter.UTF8: - case ClassWriter.STR: - case ClassWriter.CLASS: - case ClassWriter.MTYPE: - case ClassWriter.TYPE_NORMAL: - hashCode = 0x7FFFFFFF & (type + strVal1.hashCode()); - return; - case ClassWriter.NAME_TYPE: - hashCode = 0x7FFFFFFF & (type + strVal1.hashCode() - * strVal2.hashCode()); - return; - // ClassWriter.FIELD: - // ClassWriter.METH: - // ClassWriter.IMETH: - // ClassWriter.HANDLE_BASE + 1..9 - default: - hashCode = 0x7FFFFFFF & (type + strVal1.hashCode() - * strVal2.hashCode() * strVal3.hashCode()); + case ClassWriter.UTF8: + case ClassWriter.STR: + case ClassWriter.CLASS: + case ClassWriter.MTYPE: + case ClassWriter.TYPE_NORMAL: + hashCode = 0x7FFFFFFF & (type + strVal1.hashCode()); + return; + case ClassWriter.NAME_TYPE: { + hashCode = 0x7FFFFFFF & (type + strVal1.hashCode() + * strVal2.hashCode()); + return; + } + // ClassWriter.FIELD: + // ClassWriter.METH: + // ClassWriter.IMETH: + // ClassWriter.HANDLE_BASE + 1..9 + default: + hashCode = 0x7FFFFFFF & (type + strVal1.hashCode() + * strVal2.hashCode() * strVal3.hashCode()); } } /** * Sets the item to an InvokeDynamic item. * - * @param name invokedynamic's name. - * @param desc invokedynamic's desc. - * @param bsmIndex zero based index into the class attribute BootrapMethods. + * @param name + * invokedynamic's name. + * @param desc + * invokedynamic's desc. + * @param bsmIndex + * zero based index into the class attribute BootrapMethods. */ void set(String name, String desc, int bsmIndex) { this.type = ClassWriter.INDY; @@ -241,10 +252,12 @@ final class Item { /** * Sets the item to a BootstrapMethod item. * - * @param position position in byte in the class attribute BootrapMethods. - * @param hashCode hashcode of the item. This hashcode is processed from - * the hashcode of the bootstrap method and the hashcode of - * all bootstrap arguments. + * @param position + * position in byte in the class attribute BootrapMethods. + * @param hashCode + * hashcode of the item. This hashcode is processed from the + * hashcode of the bootstrap method and the hashcode of all + * bootstrap arguments. */ void set(int position, int hashCode) { this.type = ClassWriter.BSM; @@ -256,41 +269,42 @@ final class Item { * Indicates if the given item is equal to this one. This method assumes * that the two items have the same {@link #type}. * - * @param i the item to be compared to this one. Both items must have the - * same {@link #type}. + * @param i + * the item to be compared to this one. Both items must have the + * same {@link #type}. * @return true if the given item if equal to this one, * false otherwise. */ boolean isEqualTo(final Item i) { switch (type) { - case ClassWriter.UTF8: - case ClassWriter.STR: - case ClassWriter.CLASS: - case ClassWriter.MTYPE: - case ClassWriter.TYPE_NORMAL: - return i.strVal1.equals(strVal1); - case ClassWriter.TYPE_MERGED: - case ClassWriter.LONG: - case ClassWriter.DOUBLE: - return i.longVal == longVal; - case ClassWriter.INT: - case ClassWriter.FLOAT: - return i.intVal == intVal; - case ClassWriter.TYPE_UNINIT: - return i.intVal == intVal && i.strVal1.equals(strVal1); - case ClassWriter.NAME_TYPE: - return i.strVal1.equals(strVal1) && i.strVal2.equals(strVal2); - case ClassWriter.INDY: - return i.longVal == longVal && i.strVal1.equals(strVal1) - && i.strVal2.equals(strVal2); - - // case ClassWriter.FIELD: - // case ClassWriter.METH: - // case ClassWriter.IMETH: - // case ClassWriter.HANDLE_BASE + 1..9 - default: - return i.strVal1.equals(strVal1) && i.strVal2.equals(strVal2) - && i.strVal3.equals(strVal3); + case ClassWriter.UTF8: + case ClassWriter.STR: + case ClassWriter.CLASS: + case ClassWriter.MTYPE: + case ClassWriter.TYPE_NORMAL: + return i.strVal1.equals(strVal1); + case ClassWriter.TYPE_MERGED: + case ClassWriter.LONG: + case ClassWriter.DOUBLE: + return i.longVal == longVal; + case ClassWriter.INT: + case ClassWriter.FLOAT: + return i.intVal == intVal; + case ClassWriter.TYPE_UNINIT: + return i.intVal == intVal && i.strVal1.equals(strVal1); + case ClassWriter.NAME_TYPE: + return i.strVal1.equals(strVal1) && i.strVal2.equals(strVal2); + case ClassWriter.INDY: { + return i.longVal == longVal && i.strVal1.equals(strVal1) + && i.strVal2.equals(strVal2); + } + // case ClassWriter.FIELD: + // case ClassWriter.METH: + // case ClassWriter.IMETH: + // case ClassWriter.HANDLE_BASE + 1..9 + default: + return i.strVal1.equals(strVal1) && i.strVal2.equals(strVal2) + && i.strVal3.equals(strVal3); } } diff --git a/src/asm/scala/tools/asm/Label.java b/src/asm/scala/tools/asm/Label.java index 712c7f251f..5d5529ce74 100644 --- a/src/asm/scala/tools/asm/Label.java +++ b/src/asm/scala/tools/asm/Label.java @@ -32,9 +32,9 @@ package scala.tools.asm; /** * A label represents a position in the bytecode of a method. Labels are used * for jump, goto, and switch instructions, and for try catch blocks. A label - * designates the instruction that is just after. Note however that - * there can be other elements between a label and the instruction it - * designates (such as other labels, stack map frames, line numbers, etc.). + * designates the instruction that is just after. Note however that there + * can be other elements between a label and the instruction it designates (such + * as other labels, stack map frames, line numbers, etc.). * * @author Eric Bruneton */ @@ -110,8 +110,8 @@ public class Label { /** * Field used to associate user information to a label. Warning: this field * is used by the ASM tree package. In order to use it with the ASM tree - * package you must override the {@link - * org.objectweb.asm.tree.MethodNode#getLabelNode} method. + * package you must override the + * {@link scala.tools.asm.tree.MethodNode#getLabelNode} method. */ public Object info; @@ -154,7 +154,7 @@ public class Label { * indicates if this reference uses 2 or 4 bytes, and its absolute value * gives the position of the bytecode instruction. This array is also used * as a bitset to store the subroutines to which a basic block belongs. This - * information is needed in {@linked MethodWriter#visitMaxs}, after all + * information is needed in {@linked MethodWriter#visitMaxs}, after all * forward references have been resolved. Hence the same array can be used * for both purposes without problems. */ @@ -177,11 +177,11 @@ public class Label { * state of the local variables and the operand stack at the end of each * basic block, called the "output frame", relatively to the frame * state at the beginning of the basic block, which is called the "input - * frame", and which is unknown during this step. The second step, - * in {@link MethodWriter#visitMaxs}, is a fix point algorithm that - * computes information about the input frame of each basic block, from the - * input state of the first basic block (known from the method signature), - * and by the using the previously computed relative output frames. + * frame", and which is unknown during this step. The second step, in + * {@link MethodWriter#visitMaxs}, is a fix point algorithm that computes + * information about the input frame of each basic block, from the input + * state of the first basic block (known from the method signature), and by + * the using the previously computed relative output frames. * * The algorithm used to compute the maximum stack size only computes the * relative output and absolute input stack heights, while the algorithm @@ -266,11 +266,13 @@ public class Label { * generators or adapters. * * @return the offset corresponding to this label. - * @throws IllegalStateException if this label is not resolved yet. + * @throws IllegalStateException + * if this label is not resolved yet. */ public int getOffset() { if ((status & RESOLVED) == 0) { - throw new IllegalStateException("Label offset position has not been resolved yet"); + throw new IllegalStateException( + "Label offset position has not been resolved yet"); } return position; } @@ -281,21 +283,21 @@ public class Label { * directly. Otherwise, a null offset is written and a new forward reference * is declared for this label. * - * @param owner the code writer that calls this method. - * @param out the bytecode of the method. - * @param source the position of first byte of the bytecode instruction that - * contains this label. - * @param wideOffset true if the reference must be stored in 4 - * bytes, or false if it must be stored with 2 bytes. - * @throws IllegalArgumentException if this label has not been created by - * the given code writer. - */ - void put( - final MethodWriter owner, - final ByteVector out, - final int source, - final boolean wideOffset) - { + * @param owner + * the code writer that calls this method. + * @param out + * the bytecode of the method. + * @param source + * the position of first byte of the bytecode instruction that + * contains this label. + * @param wideOffset + * true if the reference must be stored in 4 bytes, or + * false if it must be stored with 2 bytes. + * @throws IllegalArgumentException + * if this label has not been created by the given code writer. + */ + void put(final MethodWriter owner, final ByteVector out, final int source, + final boolean wideOffset) { if ((status & RESOLVED) == 0) { if (wideOffset) { addReference(-1 - source, out.length); @@ -319,25 +321,21 @@ public class Label { * yet. For backward references, the offset of the reference can be, and * must be, computed and stored directly. * - * @param sourcePosition the position of the referencing instruction. This - * position will be used to compute the offset of this forward - * reference. - * @param referencePosition the position where the offset for this forward - * reference must be stored. - */ - private void addReference( - final int sourcePosition, - final int referencePosition) - { + * @param sourcePosition + * the position of the referencing instruction. This position + * will be used to compute the offset of this forward reference. + * @param referencePosition + * the position where the offset for this forward reference must + * be stored. + */ + private void addReference(final int sourcePosition, + final int referencePosition) { if (srcAndRefPositions == null) { srcAndRefPositions = new int[6]; } if (referenceCount >= srcAndRefPositions.length) { int[] a = new int[srcAndRefPositions.length + 6]; - System.arraycopy(srcAndRefPositions, - 0, - a, - 0, + System.arraycopy(srcAndRefPositions, 0, a, 0, srcAndRefPositions.length); srcAndRefPositions = a; } @@ -351,9 +349,12 @@ public class Label { * position becomes known. This method fills in the blanks that where left * in the bytecode by each forward reference previously added to this label. * - * @param owner the code writer that calls this method. - * @param position the position of this label in the bytecode. - * @param data the bytecode of the method. + * @param owner + * the code writer that calls this method. + * @param position + * the position of this label in the bytecode. + * @param data + * the bytecode of the method. * @return true if a blank that was left for this label was to * small to store the offset. In such a case the corresponding jump * instruction is replaced with a pseudo instruction (using unused @@ -361,14 +362,12 @@ public class Label { * instructions will need to be replaced with true instructions with * wider offsets (4 bytes instead of 2). This is done in * {@link MethodWriter#resizeInstructions}. - * @throws IllegalArgumentException if this label has already been resolved, - * or if it has not been created by the given code writer. - */ - boolean resolve( - final MethodWriter owner, - final int position, - final byte[] data) - { + * @throws IllegalArgumentException + * if this label has already been resolved, or if it has not + * been created by the given code writer. + */ + boolean resolve(final MethodWriter owner, final int position, + final byte[] data) { boolean needUpdate = false; this.status |= RESOLVED; this.position = position; @@ -431,7 +430,8 @@ public class Label { /** * Returns true is this basic block belongs to the given subroutine. * - * @param id a subroutine id. + * @param id + * a subroutine id. * @return true is this basic block belongs to the given subroutine. */ boolean inSubroutine(final long id) { @@ -445,7 +445,8 @@ public class Label { * Returns true if this basic block and the given one belong to a common * subroutine. * - * @param block another basic block. + * @param block + * another basic block. * @return true if this basic block and the given one belong to a common * subroutine. */ @@ -464,8 +465,10 @@ public class Label { /** * Marks this basic block as belonging to the given subroutine. * - * @param id a subroutine id. - * @param nbSubroutines the total number of subroutines in the method. + * @param id + * a subroutine id. + * @param nbSubroutines + * the total number of subroutines in the method. */ void addToSubroutine(final long id, final int nbSubroutines) { if ((status & VISITED) == 0) { @@ -481,14 +484,16 @@ public class Label { * flow graph to find all the blocks that are reachable from the current * block WITHOUT following any JSR target. * - * @param JSR a JSR block that jumps to this subroutine. If this JSR is not - * null it is added to the successor of the RET blocks found in the - * subroutine. - * @param id the id of this subroutine. - * @param nbSubroutines the total number of subroutines in the method. - */ - void visitSubroutine(final Label JSR, final long id, final int nbSubroutines) - { + * @param JSR + * a JSR block that jumps to this subroutine. If this JSR is not + * null it is added to the successor of the RET blocks found in + * the subroutine. + * @param id + * the id of this subroutine. + * @param nbSubroutines + * the total number of subroutines in the method. + */ + void visitSubroutine(final Label JSR, final long id, final int nbSubroutines) { // user managed stack of labels, to avoid using a recursive method // (recursivity can lead to stack overflow with very large methods) Label stack = this; diff --git a/src/asm/scala/tools/asm/MethodVisitor.java b/src/asm/scala/tools/asm/MethodVisitor.java index a8a859a6a9..e43ca97823 100644 --- a/src/asm/scala/tools/asm/MethodVisitor.java +++ b/src/asm/scala/tools/asm/MethodVisitor.java @@ -30,19 +30,19 @@ package scala.tools.asm; /** - * A visitor to visit a Java method. The methods of this class must be - * called in the following order: [ visitAnnotationDefault ] ( + * A visitor to visit a Java method. The methods of this class must be called in + * the following order: [ visitAnnotationDefault ] ( * visitAnnotation | visitParameterAnnotation | * visitAttribute )* [ visitCode ( visitFrame | - * visitXInsn
    | visitLabel | visitTryCatchBlock | - * visitLocalVariable | visitLineNumber )* visitMaxs ] - * visitEnd. In addition, the visitXInsn
    - * and visitLabel methods must be called in the sequential order of - * the bytecode instructions of the visited code, visitTryCatchBlock - * must be called before the labels passed as arguments have been - * visited, and the visitLocalVariable and visitLineNumber - * methods must be called after the labels passed as arguments have been - * visited. + * visitXInsn | visitLabel | + * visitTryCatchBlock | visitLocalVariable | + * visitLineNumber )* visitMaxs ] visitEnd. In + * addition, the visitXInsn and visitLabel methods + * must be called in the sequential order of the bytecode instructions of the + * visited code, visitTryCatchBlock must be called before the + * labels passed as arguments have been visited, and the + * visitLocalVariable and visitLineNumber methods must be + * called after the labels passed as arguments have been visited. * * @author Eric Bruneton */ @@ -63,8 +63,9 @@ public abstract class MethodVisitor { /** * Constructs a new {@link MethodVisitor}. * - * @param api the ASM API version implemented by this visitor. Must be one - * of {@link Opcodes#ASM4}. + * @param api + * the ASM API version implemented by this visitor. Must be one + * of {@link Opcodes#ASM4}. */ public MethodVisitor(final int api) { this(api, null); @@ -73,15 +74,17 @@ public abstract class MethodVisitor { /** * Constructs a new {@link MethodVisitor}. * - * @param api the ASM API version implemented by this visitor. Must be one - * of {@link Opcodes#ASM4}. - * @param mv the method visitor to which this visitor must delegate method - * calls. May be null. + * @param api + * the ASM API version implemented by this visitor. Must be one + * of {@link Opcodes#ASM4}. + * @param mv + * the method visitor to which this visitor must delegate method + * calls. May be null. */ public MethodVisitor(final int api, final MethodVisitor mv) { - /*if (api != Opcodes.ASM4) { + if (api != Opcodes.ASM4) { throw new IllegalArgumentException(); - }*/ + } this.api = api; this.mv = mv; } @@ -94,8 +97,8 @@ public abstract class MethodVisitor { * Visits the default value of this annotation interface method. * * @return a visitor to the visit the actual default value of this - * annotation interface method, or null if this visitor - * is not interested in visiting this default value. The 'name' + * annotation interface method, or null if this visitor is + * not interested in visiting this default value. The 'name' * parameters passed to the methods of this annotation visitor are * ignored. Moreover, exacly one visit method must be called on this * annotation visitor, followed by visitEnd. @@ -110,8 +113,10 @@ public abstract class MethodVisitor { /** * Visits an annotation of this method. * - * @param desc the class descriptor of the annotation class. - * @param visible true if the annotation is visible at runtime. + * @param desc + * the class descriptor of the annotation class. + * @param visible + * true if the annotation is visible at runtime. * @return a visitor to visit the annotation values, or null if * this visitor is not interested in visiting this annotation. */ @@ -125,17 +130,17 @@ public abstract class MethodVisitor { /** * Visits an annotation of a parameter this method. * - * @param parameter the parameter index. - * @param desc the class descriptor of the annotation class. - * @param visible true if the annotation is visible at runtime. + * @param parameter + * the parameter index. + * @param desc + * the class descriptor of the annotation class. + * @param visible + * true if the annotation is visible at runtime. * @return a visitor to visit the annotation values, or null if * this visitor is not interested in visiting this annotation. */ - public AnnotationVisitor visitParameterAnnotation( - int parameter, - String desc, - boolean visible) - { + public AnnotationVisitor visitParameterAnnotation(int parameter, + String desc, boolean visible) { if (mv != null) { return mv.visitParameterAnnotation(parameter, desc, visible); } @@ -145,7 +150,8 @@ public abstract class MethodVisitor { /** * Visits a non standard attribute of this method. * - * @param attr an attribute. + * @param attr + * an attribute. */ public void visitAttribute(Attribute attr) { if (mv != null) { @@ -169,57 +175,74 @@ public abstract class MethodVisitor { * such as GOTO or THROW, that is the target of a jump instruction, or that * starts an exception handler block. The visited types must describe the * values of the local variables and of the operand stack elements just - * before i is executed.

    (*) this is mandatory only - * for classes whose version is greater than or equal to - * {@link Opcodes#V1_6 V1_6}.

    Packed frames are basically - * "deltas" from the state of the previous frame (very first frame is - * implicitly defined by the method's parameters and access flags):
      + * before i is executed.
      + *
      + * (*) this is mandatory only for classes whose version is greater than or + * equal to {@link Opcodes#V1_6 V1_6}.
      + *
      + * The frames of a method must be given either in expanded form, or in + * compressed form (all frames must use the same format, i.e. you must not + * mix expanded and compressed frames within a single method): + *
        + *
      • In expanded form, all frames must have the F_NEW type.
      • + *
      • In compressed form, frames are basically "deltas" from the state of + * the previous frame: + *
          *
        • {@link Opcodes#F_SAME} representing frame with exactly the same - * locals as the previous frame and with the empty stack.
        • {@link Opcodes#F_SAME1} - * representing frame with exactly the same locals as the previous frame and - * with single value on the stack (nStack is 1 and - * stack[0] contains value for the type of the stack item).
        • + * locals as the previous frame and with the empty stack. + *
        • {@link Opcodes#F_SAME1} representing frame with exactly the same + * locals as the previous frame and with single value on the stack ( + * nStack is 1 and stack[0] contains value for the + * type of the stack item).
        • *
        • {@link Opcodes#F_APPEND} representing frame with current locals are * the same as the locals in the previous frame, except that additional * locals are defined (nLocal is 1, 2 or 3 and * local elements contains values representing added types).
        • - *
        • {@link Opcodes#F_CHOP} representing frame with current locals are - * the same as the locals in the previous frame, except that the last 1-3 - * locals are absent and with the empty stack (nLocals is 1, - * 2 or 3).
        • {@link Opcodes#F_FULL} representing complete frame - * data.
        + *
      • {@link Opcodes#F_CHOP} representing frame with current locals are the + * same as the locals in the previous frame, except that the last 1-3 locals + * are absent and with the empty stack (nLocals is 1, 2 or 3).
      • + *
      • {@link Opcodes#F_FULL} representing complete frame data.
      • + *
      + *

    + * In both cases the first frame, corresponding to the method's parameters + * and access flags, is implicit and must not be visited. Also, it is + * illegal to visit two or more frames for the same code location (i.e., at + * least one instruction must be visited between two calls to visitFrame). * - * @param type the type of this stack map frame. Must be - * {@link Opcodes#F_NEW} for expanded frames, or - * {@link Opcodes#F_FULL}, {@link Opcodes#F_APPEND}, - * {@link Opcodes#F_CHOP}, {@link Opcodes#F_SAME} or - * {@link Opcodes#F_APPEND}, {@link Opcodes#F_SAME1} for compressed - * frames. - * @param nLocal the number of local variables in the visited frame. - * @param local the local variable types in this frame. This array must not - * be modified. Primitive types are represented by - * {@link Opcodes#TOP}, {@link Opcodes#INTEGER}, - * {@link Opcodes#FLOAT}, {@link Opcodes#LONG}, - * {@link Opcodes#DOUBLE},{@link Opcodes#NULL} or - * {@link Opcodes#UNINITIALIZED_THIS} (long and double are - * represented by a single element). Reference types are represented - * by String objects (representing internal names), and uninitialized - * types by Label objects (this label designates the NEW instruction - * that created this uninitialized value). - * @param nStack the number of operand stack elements in the visited frame. - * @param stack the operand stack types in this frame. This array must not - * be modified. Its content has the same format as the "local" array. - * @throws IllegalStateException if a frame is visited just after another - * one, without any instruction between the two (unless this frame - * is a Opcodes#F_SAME frame, in which case it is silently ignored). + * @param type + * the type of this stack map frame. Must be + * {@link Opcodes#F_NEW} for expanded frames, or + * {@link Opcodes#F_FULL}, {@link Opcodes#F_APPEND}, + * {@link Opcodes#F_CHOP}, {@link Opcodes#F_SAME} or + * {@link Opcodes#F_APPEND}, {@link Opcodes#F_SAME1} for + * compressed frames. + * @param nLocal + * the number of local variables in the visited frame. + * @param local + * the local variable types in this frame. This array must not be + * modified. Primitive types are represented by + * {@link Opcodes#TOP}, {@link Opcodes#INTEGER}, + * {@link Opcodes#FLOAT}, {@link Opcodes#LONG}, + * {@link Opcodes#DOUBLE},{@link Opcodes#NULL} or + * {@link Opcodes#UNINITIALIZED_THIS} (long and double are + * represented by a single element). Reference types are + * represented by String objects (representing internal names), + * and uninitialized types by Label objects (this label + * designates the NEW instruction that created this uninitialized + * value). + * @param nStack + * the number of operand stack elements in the visited frame. + * @param stack + * the operand stack types in this frame. This array must not be + * modified. Its content has the same format as the "local" + * array. + * @throws IllegalStateException + * if a frame is visited just after another one, without any + * instruction between the two (unless this frame is a + * Opcodes#F_SAME frame, in which case it is silently ignored). */ - public void visitFrame( - int type, - int nLocal, - Object[] local, - int nStack, - Object[] stack) - { + public void visitFrame(int type, int nLocal, Object[] local, int nStack, + Object[] stack) { if (mv != null) { mv.visitFrame(type, nLocal, local, nStack, stack); } @@ -232,20 +255,22 @@ public abstract class MethodVisitor { /** * Visits a zero operand instruction. * - * @param opcode the opcode of the instruction to be visited. This opcode is - * either NOP, ACONST_NULL, ICONST_M1, ICONST_0, ICONST_1, ICONST_2, - * ICONST_3, ICONST_4, ICONST_5, LCONST_0, LCONST_1, FCONST_0, - * FCONST_1, FCONST_2, DCONST_0, DCONST_1, IALOAD, LALOAD, FALOAD, - * DALOAD, AALOAD, BALOAD, CALOAD, SALOAD, IASTORE, LASTORE, FASTORE, - * DASTORE, AASTORE, BASTORE, CASTORE, SASTORE, POP, POP2, DUP, - * DUP_X1, DUP_X2, DUP2, DUP2_X1, DUP2_X2, SWAP, IADD, LADD, FADD, - * DADD, ISUB, LSUB, FSUB, DSUB, IMUL, LMUL, FMUL, DMUL, IDIV, LDIV, - * FDIV, DDIV, IREM, LREM, FREM, DREM, INEG, LNEG, FNEG, DNEG, ISHL, - * LSHL, ISHR, LSHR, IUSHR, LUSHR, IAND, LAND, IOR, LOR, IXOR, LXOR, - * I2L, I2F, I2D, L2I, L2F, L2D, F2I, F2L, F2D, D2I, D2L, D2F, I2B, - * I2C, I2S, LCMP, FCMPL, FCMPG, DCMPL, DCMPG, IRETURN, LRETURN, - * FRETURN, DRETURN, ARETURN, RETURN, ARRAYLENGTH, ATHROW, - * MONITORENTER, or MONITOREXIT. + * @param opcode + * the opcode of the instruction to be visited. This opcode is + * either NOP, ACONST_NULL, ICONST_M1, ICONST_0, ICONST_1, + * ICONST_2, ICONST_3, ICONST_4, ICONST_5, LCONST_0, LCONST_1, + * FCONST_0, FCONST_1, FCONST_2, DCONST_0, DCONST_1, IALOAD, + * LALOAD, FALOAD, DALOAD, AALOAD, BALOAD, CALOAD, SALOAD, + * IASTORE, LASTORE, FASTORE, DASTORE, AASTORE, BASTORE, CASTORE, + * SASTORE, POP, POP2, DUP, DUP_X1, DUP_X2, DUP2, DUP2_X1, + * DUP2_X2, SWAP, IADD, LADD, FADD, DADD, ISUB, LSUB, FSUB, DSUB, + * IMUL, LMUL, FMUL, DMUL, IDIV, LDIV, FDIV, DDIV, IREM, LREM, + * FREM, DREM, INEG, LNEG, FNEG, DNEG, ISHL, LSHL, ISHR, LSHR, + * IUSHR, LUSHR, IAND, LAND, IOR, LOR, IXOR, LXOR, I2L, I2F, I2D, + * L2I, L2F, L2D, F2I, F2L, F2D, D2I, D2L, D2F, I2B, I2C, I2S, + * LCMP, FCMPL, FCMPG, DCMPL, DCMPG, IRETURN, LRETURN, FRETURN, + * DRETURN, ARETURN, RETURN, ARRAYLENGTH, ATHROW, MONITORENTER, + * or MONITOREXIT. */ public void visitInsn(int opcode) { if (mv != null) { @@ -256,17 +281,20 @@ public abstract class MethodVisitor { /** * Visits an instruction with a single int operand. * - * @param opcode the opcode of the instruction to be visited. This opcode is - * either BIPUSH, SIPUSH or NEWARRAY. - * @param operand the operand of the instruction to be visited.
    When - * opcode is BIPUSH, operand value should be between Byte.MIN_VALUE - * and Byte.MAX_VALUE.
    When opcode is SIPUSH, operand value - * should be between Short.MIN_VALUE and Short.MAX_VALUE.
    When - * opcode is NEWARRAY, operand value should be one of - * {@link Opcodes#T_BOOLEAN}, {@link Opcodes#T_CHAR}, - * {@link Opcodes#T_FLOAT}, {@link Opcodes#T_DOUBLE}, - * {@link Opcodes#T_BYTE}, {@link Opcodes#T_SHORT}, - * {@link Opcodes#T_INT} or {@link Opcodes#T_LONG}. + * @param opcode + * the opcode of the instruction to be visited. This opcode is + * either BIPUSH, SIPUSH or NEWARRAY. + * @param operand + * the operand of the instruction to be visited.
    + * When opcode is BIPUSH, operand value should be between + * Byte.MIN_VALUE and Byte.MAX_VALUE.
    + * When opcode is SIPUSH, operand value should be between + * Short.MIN_VALUE and Short.MAX_VALUE.
    + * When opcode is NEWARRAY, operand value should be one of + * {@link Opcodes#T_BOOLEAN}, {@link Opcodes#T_CHAR}, + * {@link Opcodes#T_FLOAT}, {@link Opcodes#T_DOUBLE}, + * {@link Opcodes#T_BYTE}, {@link Opcodes#T_SHORT}, + * {@link Opcodes#T_INT} or {@link Opcodes#T_LONG}. */ public void visitIntInsn(int opcode, int operand) { if (mv != null) { @@ -278,11 +306,13 @@ public abstract class MethodVisitor { * Visits a local variable instruction. A local variable instruction is an * instruction that loads or stores the value of a local variable. * - * @param opcode the opcode of the local variable instruction to be visited. - * This opcode is either ILOAD, LLOAD, FLOAD, DLOAD, ALOAD, ISTORE, - * LSTORE, FSTORE, DSTORE, ASTORE or RET. - * @param var the operand of the instruction to be visited. This operand is - * the index of a local variable. + * @param opcode + * the opcode of the local variable instruction to be visited. + * This opcode is either ILOAD, LLOAD, FLOAD, DLOAD, ALOAD, + * ISTORE, LSTORE, FSTORE, DSTORE, ASTORE or RET. + * @param var + * the operand of the instruction to be visited. This operand is + * the index of a local variable. */ public void visitVarInsn(int opcode, int var) { if (mv != null) { @@ -294,11 +324,13 @@ public abstract class MethodVisitor { * Visits a type instruction. A type instruction is an instruction that * takes the internal name of a class as parameter. * - * @param opcode the opcode of the type instruction to be visited. This - * opcode is either NEW, ANEWARRAY, CHECKCAST or INSTANCEOF. - * @param type the operand of the instruction to be visited. This operand - * must be the internal name of an object or array class (see {@link - * Type#getInternalName() getInternalName}). + * @param opcode + * the opcode of the type instruction to be visited. This opcode + * is either NEW, ANEWARRAY, CHECKCAST or INSTANCEOF. + * @param type + * the operand of the instruction to be visited. This operand + * must be the internal name of an object or array class (see + * {@link Type#getInternalName() getInternalName}). */ public void visitTypeInsn(int opcode, String type) { if (mv != null) { @@ -310,14 +342,19 @@ public abstract class MethodVisitor { * Visits a field instruction. A field instruction is an instruction that * loads or stores the value of a field of an object. * - * @param opcode the opcode of the type instruction to be visited. This - * opcode is either GETSTATIC, PUTSTATIC, GETFIELD or PUTFIELD. - * @param owner the internal name of the field's owner class (see {@link - * Type#getInternalName() getInternalName}). - * @param name the field's name. - * @param desc the field's descriptor (see {@link Type Type}). + * @param opcode + * the opcode of the type instruction to be visited. This opcode + * is either GETSTATIC, PUTSTATIC, GETFIELD or PUTFIELD. + * @param owner + * the internal name of the field's owner class (see + * {@link Type#getInternalName() getInternalName}). + * @param name + * the field's name. + * @param desc + * the field's descriptor (see {@link Type Type}). */ - public void visitFieldInsn(int opcode, String owner, String name, String desc) { + public void visitFieldInsn(int opcode, String owner, String name, + String desc) { if (mv != null) { mv.visitFieldInsn(opcode, owner, name, desc); } @@ -327,15 +364,20 @@ public abstract class MethodVisitor { * Visits a method instruction. A method instruction is an instruction that * invokes a method. * - * @param opcode the opcode of the type instruction to be visited. This - * opcode is either INVOKEVIRTUAL, INVOKESPECIAL, INVOKESTATIC - * or INVOKEINTERFACE. - * @param owner the internal name of the method's owner class (see {@link - * Type#getInternalName() getInternalName}). - * @param name the method's name. - * @param desc the method's descriptor (see {@link Type Type}). + * @param opcode + * the opcode of the type instruction to be visited. This opcode + * is either INVOKEVIRTUAL, INVOKESPECIAL, INVOKESTATIC or + * INVOKEINTERFACE. + * @param owner + * the internal name of the method's owner class (see + * {@link Type#getInternalName() getInternalName}). + * @param name + * the method's name. + * @param desc + * the method's descriptor (see {@link Type Type}). */ - public void visitMethodInsn(int opcode, String owner, String name, String desc) { + public void visitMethodInsn(int opcode, String owner, String name, + String desc) { if (mv != null) { mv.visitMethodInsn(opcode, owner, name, desc); } @@ -344,16 +386,21 @@ public abstract class MethodVisitor { /** * Visits an invokedynamic instruction. * - * @param name the method's name. - * @param desc the method's descriptor (see {@link Type Type}). - * @param bsm the bootstrap method. - * @param bsmArgs the bootstrap method constant arguments. Each argument - * must be an {@link Integer}, {@link Float}, {@link Long}, - * {@link Double}, {@link String}, {@link Type} or {@link Handle} - * value. This method is allowed to modify the content of the array - * so a caller should expect that this array may change. + * @param name + * the method's name. + * @param desc + * the method's descriptor (see {@link Type Type}). + * @param bsm + * the bootstrap method. + * @param bsmArgs + * the bootstrap method constant arguments. Each argument must be + * an {@link Integer}, {@link Float}, {@link Long}, + * {@link Double}, {@link String}, {@link Type} or {@link Handle} + * value. This method is allowed to modify the content of the + * array so a caller should expect that this array may change. */ - public void visitInvokeDynamicInsn(String name, String desc, Handle bsm, Object... bsmArgs) { + public void visitInvokeDynamicInsn(String name, String desc, Handle bsm, + Object... bsmArgs) { if (mv != null) { mv.visitInvokeDynamicInsn(name, desc, bsm, bsmArgs); } @@ -363,13 +410,15 @@ public abstract class MethodVisitor { * Visits a jump instruction. A jump instruction is an instruction that may * jump to another instruction. * - * @param opcode the opcode of the type instruction to be visited. This - * opcode is either IFEQ, IFNE, IFLT, IFGE, IFGT, IFLE, IF_ICMPEQ, - * IF_ICMPNE, IF_ICMPLT, IF_ICMPGE, IF_ICMPGT, IF_ICMPLE, IF_ACMPEQ, - * IF_ACMPNE, GOTO, JSR, IFNULL or IFNONNULL. - * @param label the operand of the instruction to be visited. This operand - * is a label that designates the instruction to which the jump - * instruction may jump. + * @param opcode + * the opcode of the type instruction to be visited. This opcode + * is either IFEQ, IFNE, IFLT, IFGE, IFGT, IFLE, IF_ICMPEQ, + * IF_ICMPNE, IF_ICMPLT, IF_ICMPGE, IF_ICMPGT, IF_ICMPLE, + * IF_ACMPEQ, IF_ACMPNE, GOTO, JSR, IFNULL or IFNONNULL. + * @param label + * the operand of the instruction to be visited. This operand is + * a label that designates the instruction to which the jump + * instruction may jump. */ public void visitJumpInsn(int opcode, Label label) { if (mv != null) { @@ -381,7 +430,8 @@ public abstract class MethodVisitor { * Visits a label. A label designates the instruction that will be visited * just after it. * - * @param label a {@link Label Label} object. + * @param label + * a {@link Label Label} object. */ public void visitLabel(Label label) { if (mv != null) { @@ -398,41 +448,44 @@ public abstract class MethodVisitor { * future versions of the Java Virtual Machine. To easily detect new * constant types, implementations of this method should check for * unexpected constant types, like this: + * *
          * if (cst instanceof Integer) {
    -     *   // ...
    +     *     // ...
          * } else if (cst instanceof Float) {
    -     *   // ...
    +     *     // ...
          * } else if (cst instanceof Long) {
    -     *   // ...
    -     * } else if (cst instanceof Double) {
    -     *   // ...
    -     * } else if (cst instanceof String) {
    -     *   // ...
    -     * } else if (cst instanceof Type) {
    -     *   int sort = ((Type) cst).getSort();
    -     *   if (sort == Type.OBJECT) {
          *     // ...
    -     *   } else if (sort == Type.ARRAY) {
    +     * } else if (cst instanceof Double) {
          *     // ...
    -     *   } else if (sort == Type.METHOD) {
    +     * } else if (cst instanceof String) {
          *     // ...
    -     *   } else {
    -     *     // throw an exception
    -     *   }
    +     * } else if (cst instanceof Type) {
    +     *     int sort = ((Type) cst).getSort();
    +     *     if (sort == Type.OBJECT) {
    +     *         // ...
    +     *     } else if (sort == Type.ARRAY) {
    +     *         // ...
    +     *     } else if (sort == Type.METHOD) {
    +     *         // ...
    +     *     } else {
    +     *         // throw an exception
    +     *     }
          * } else if (cst instanceof Handle) {
    -     *   // ...
    +     *     // ...
          * } else {
    -     *   // throw an exception
    -     * }
    + * // throw an exception + * } + *
    * - * @param cst the constant to be loaded on the stack. This parameter must be - * a non null {@link Integer}, a {@link Float}, a {@link Long}, a - * {@link Double}, a {@link String}, a {@link Type} of OBJECT or ARRAY - * sort for .class constants, for classes whose version is - * 49.0, a {@link Type} of METHOD sort or a {@link Handle} for - * MethodType and MethodHandle constants, for classes whose version - * is 51.0. + * @param cst + * the constant to be loaded on the stack. This parameter must be + * a non null {@link Integer}, a {@link Float}, a {@link Long}, a + * {@link Double}, a {@link String}, a {@link Type} of OBJECT or + * ARRAY sort for .class constants, for classes whose + * version is 49.0, a {@link Type} of METHOD sort or a + * {@link Handle} for MethodType and MethodHandle constants, for + * classes whose version is 51.0. */ public void visitLdcInsn(Object cst) { if (mv != null) { @@ -443,8 +496,10 @@ public abstract class MethodVisitor { /** * Visits an IINC instruction. * - * @param var index of the local variable to be incremented. - * @param increment amount to increment the local variable by. + * @param var + * index of the local variable to be incremented. + * @param increment + * amount to increment the local variable by. */ public void visitIincInsn(int var, int increment) { if (mv != null) { @@ -455,13 +510,18 @@ public abstract class MethodVisitor { /** * Visits a TABLESWITCH instruction. * - * @param min the minimum key value. - * @param max the maximum key value. - * @param dflt beginning of the default handler block. - * @param labels beginnings of the handler blocks. labels[i] is - * the beginning of the handler block for the min + i key. + * @param min + * the minimum key value. + * @param max + * the maximum key value. + * @param dflt + * beginning of the default handler block. + * @param labels + * beginnings of the handler blocks. labels[i] is the + * beginning of the handler block for the min + i key. */ - public void visitTableSwitchInsn(int min, int max, Label dflt, Label... labels) { + public void visitTableSwitchInsn(int min, int max, Label dflt, + Label... labels) { if (mv != null) { mv.visitTableSwitchInsn(min, max, dflt, labels); } @@ -470,10 +530,13 @@ public abstract class MethodVisitor { /** * Visits a LOOKUPSWITCH instruction. * - * @param dflt beginning of the default handler block. - * @param keys the values of the keys. - * @param labels beginnings of the handler blocks. labels[i] is - * the beginning of the handler block for the keys[i] key. + * @param dflt + * beginning of the default handler block. + * @param keys + * the values of the keys. + * @param labels + * beginnings of the handler blocks. labels[i] is the + * beginning of the handler block for the keys[i] key. */ public void visitLookupSwitchInsn(Label dflt, int[] keys, Label[] labels) { if (mv != null) { @@ -484,8 +547,10 @@ public abstract class MethodVisitor { /** * Visits a MULTIANEWARRAY instruction. * - * @param desc an array type descriptor (see {@link Type Type}). - * @param dims number of dimensions of the array to allocate. + * @param desc + * an array type descriptor (see {@link Type Type}). + * @param dims + * number of dimensions of the array to allocate. */ public void visitMultiANewArrayInsn(String desc, int dims) { if (mv != null) { @@ -500,17 +565,22 @@ public abstract class MethodVisitor { /** * Visits a try catch block. * - * @param start beginning of the exception handler's scope (inclusive). - * @param end end of the exception handler's scope (exclusive). - * @param handler beginning of the exception handler's code. - * @param type internal name of the type of exceptions handled by the - * handler, or null to catch any exceptions (for "finally" - * blocks). - * @throws IllegalArgumentException if one of the labels has already been - * visited by this visitor (by the {@link #visitLabel visitLabel} - * method). + * @param start + * beginning of the exception handler's scope (inclusive). + * @param end + * end of the exception handler's scope (exclusive). + * @param handler + * beginning of the exception handler's code. + * @param type + * internal name of the type of exceptions handled by the + * handler, or null to catch any exceptions (for + * "finally" blocks). + * @throws IllegalArgumentException + * if one of the labels has already been visited by this visitor + * (by the {@link #visitLabel visitLabel} method). */ - public void visitTryCatchBlock(Label start, Label end, Label handler, String type) { + public void visitTryCatchBlock(Label start, Label end, Label handler, + String type) { if (mv != null) { mv.visitTryCatchBlock(start, end, handler, type); } @@ -519,28 +589,28 @@ public abstract class MethodVisitor { /** * Visits a local variable declaration. * - * @param name the name of a local variable. - * @param desc the type descriptor of this local variable. - * @param signature the type signature of this local variable. May be - * null if the local variable type does not use generic - * types. - * @param start the first instruction corresponding to the scope of this - * local variable (inclusive). - * @param end the last instruction corresponding to the scope of this local - * variable (exclusive). - * @param index the local variable's index. - * @throws IllegalArgumentException if one of the labels has not already - * been visited by this visitor (by the - * {@link #visitLabel visitLabel} method). + * @param name + * the name of a local variable. + * @param desc + * the type descriptor of this local variable. + * @param signature + * the type signature of this local variable. May be + * null if the local variable type does not use generic + * types. + * @param start + * the first instruction corresponding to the scope of this local + * variable (inclusive). + * @param end + * the last instruction corresponding to the scope of this local + * variable (exclusive). + * @param index + * the local variable's index. + * @throws IllegalArgumentException + * if one of the labels has not already been visited by this + * visitor (by the {@link #visitLabel visitLabel} method). */ - public void visitLocalVariable( - String name, - String desc, - String signature, - Label start, - Label end, - int index) - { + public void visitLocalVariable(String name, String desc, String signature, + Label start, Label end, int index) { if (mv != null) { mv.visitLocalVariable(name, desc, signature, start, end, index); } @@ -549,12 +619,14 @@ public abstract class MethodVisitor { /** * Visits a line number declaration. * - * @param line a line number. This number refers to the source file from - * which the class was compiled. - * @param start the first instruction corresponding to this line number. - * @throws IllegalArgumentException if start has not already been - * visited by this visitor (by the {@link #visitLabel visitLabel} - * method). + * @param line + * a line number. This number refers to the source file from + * which the class was compiled. + * @param start + * the first instruction corresponding to this line number. + * @throws IllegalArgumentException + * if start has not already been visited by this + * visitor (by the {@link #visitLabel visitLabel} method). */ public void visitLineNumber(int line, Label start) { if (mv != null) { @@ -566,8 +638,10 @@ public abstract class MethodVisitor { * Visits the maximum stack size and the maximum number of local variables * of the method. * - * @param maxStack maximum stack size of the method. - * @param maxLocals maximum number of local variables for the method. + * @param maxStack + * maximum stack size of the method. + * @param maxLocals + * maximum number of local variables for the method. */ public void visitMaxs(int maxStack, int maxLocals) { if (mv != null) { diff --git a/src/asm/scala/tools/asm/MethodWriter.java b/src/asm/scala/tools/asm/MethodWriter.java index 321bacb6fc..f5fbd1e74f 100644 --- a/src/asm/scala/tools/asm/MethodWriter.java +++ b/src/asm/scala/tools/asm/MethodWriter.java @@ -42,7 +42,7 @@ class MethodWriter extends MethodVisitor { /** * Pseudo access flag used to denote constructors. */ - static final int ACC_CONSTRUCTOR = 262144; + static final int ACC_CONSTRUCTOR = 0x80000; /** * Frame has exactly the same locals as the previous stack map frame and @@ -229,7 +229,7 @@ class MethodWriter extends MethodVisitor { private int maxLocals; /** - * Number of local variables in the current stack map frame. + * Number of local variables in the current stack map frame. */ private int currentLocals; @@ -256,11 +256,6 @@ class MethodWriter extends MethodVisitor { */ private int[] previousFrame; - /** - * Index of the next element to be added in {@link #frame}. - */ - private int frameIndex; - /** * The current stack map frame. The first element contains the offset of the * instruction to which the frame corresponds, the second element is the @@ -357,7 +352,8 @@ class MethodWriter extends MethodVisitor { * A list of labels. This list is the list of basic blocks in the method, * i.e. a list of Label objects linked to each other by their * {@link Label#successor} field, in the order they are visited by - * {@link MethodVisitor#visitLabel}, and starting with the first basic block. + * {@link MethodVisitor#visitLabel}, and starting with the first basic + * block. */ private Label labels; @@ -396,28 +392,30 @@ class MethodWriter extends MethodVisitor { /** * Constructs a new {@link MethodWriter}. * - * @param cw the class writer in which the method must be added. - * @param access the method's access flags (see {@link Opcodes}). - * @param name the method's name. - * @param desc the method's descriptor (see {@link Type}). - * @param signature the method's signature. May be null. - * @param exceptions the internal names of the method's exceptions. May be - * null. - * @param computeMaxs true if the maximum stack size and number - * of local variables must be automatically computed. - * @param computeFrames true if the stack map tables must be - * recomputed from scratch. - */ - MethodWriter( - final ClassWriter cw, - final int access, - final String name, - final String desc, - final String signature, - final String[] exceptions, - final boolean computeMaxs, - final boolean computeFrames) - { + * @param cw + * the class writer in which the method must be added. + * @param access + * the method's access flags (see {@link Opcodes}). + * @param name + * the method's name. + * @param desc + * the method's descriptor (see {@link Type}). + * @param signature + * the method's signature. May be null. + * @param exceptions + * the internal names of the method's exceptions. May be + * null. + * @param computeMaxs + * true if the maximum stack size and number of local + * variables must be automatically computed. + * @param computeFrames + * true if the stack map tables must be recomputed from + * scratch. + */ + MethodWriter(final ClassWriter cw, final int access, final String name, + final String desc, final String signature, + final String[] exceptions, final boolean computeMaxs, + final boolean computeFrames) { super(Opcodes.ASM4); if (cw.firstMethod == null) { cw.firstMethod = this; @@ -427,6 +425,9 @@ class MethodWriter extends MethodVisitor { cw.lastMethod = this; this.cw = cw; this.access = access; + if ("".equals(name)) { + this.access |= ACC_CONSTRUCTOR; + } this.name = cw.newUTF8(name); this.desc = cw.newUTF8(desc); this.descriptor = desc; @@ -442,9 +443,6 @@ class MethodWriter extends MethodVisitor { } this.compute = computeFrames ? FRAMES : (computeMaxs ? MAXS : NOTHING); if (computeMaxs || computeFrames) { - if (computeFrames && "".equals(name)) { - this.access |= ACC_CONSTRUCTOR; - } // updates maxLocals int size = Type.getArgumentsAndReturnSizes(descriptor) >> 2; if ((access & Opcodes.ACC_STATIC) != 0) { @@ -473,10 +471,8 @@ class MethodWriter extends MethodVisitor { } @Override - public AnnotationVisitor visitAnnotation( - final String desc, - final boolean visible) - { + public AnnotationVisitor visitAnnotation(final String desc, + final boolean visible) { if (!ClassReader.ANNOTATIONS) { return null; } @@ -495,11 +491,8 @@ class MethodWriter extends MethodVisitor { } @Override - public AnnotationVisitor visitParameterAnnotation( - final int parameter, - final String desc, - final boolean visible) - { + public AnnotationVisitor visitParameterAnnotation(final int parameter, + final String desc, final boolean visible) { if (!ClassReader.ANNOTATIONS) { return null; } @@ -545,20 +538,18 @@ class MethodWriter extends MethodVisitor { } @Override - public void visitFrame( - final int type, - final int nLocal, - final Object[] local, - final int nStack, - final Object[] stack) - { + public void visitFrame(final int type, final int nLocal, + final Object[] local, final int nStack, final Object[] stack) { if (!ClassReader.FRAMES || compute == FRAMES) { return; } if (type == Opcodes.F_NEW) { + if (previousFrame == null) { + visitImplicitFirstFrame(); + } currentLocals = nLocal; - startFrame(code.length, nLocal, nStack); + int frameIndex = startFrame(code.length, nLocal, nStack); for (int i = 0; i < nLocal; ++i) { if (local[i] instanceof String) { frame[frameIndex++] = Frame.OBJECT @@ -601,48 +592,44 @@ class MethodWriter extends MethodVisitor { } switch (type) { - case Opcodes.F_FULL: - currentLocals = nLocal; - stackMap.putByte(FULL_FRAME) - .putShort(delta) - .putShort(nLocal); - for (int i = 0; i < nLocal; ++i) { - writeFrameType(local[i]); - } - stackMap.putShort(nStack); - for (int i = 0; i < nStack; ++i) { - writeFrameType(stack[i]); - } - break; - case Opcodes.F_APPEND: - currentLocals += nLocal; - stackMap.putByte(SAME_FRAME_EXTENDED + nLocal) - .putShort(delta); - for (int i = 0; i < nLocal; ++i) { - writeFrameType(local[i]); - } - break; - case Opcodes.F_CHOP: - currentLocals -= nLocal; - stackMap.putByte(SAME_FRAME_EXTENDED - nLocal) + case Opcodes.F_FULL: + currentLocals = nLocal; + stackMap.putByte(FULL_FRAME).putShort(delta).putShort(nLocal); + for (int i = 0; i < nLocal; ++i) { + writeFrameType(local[i]); + } + stackMap.putShort(nStack); + for (int i = 0; i < nStack; ++i) { + writeFrameType(stack[i]); + } + break; + case Opcodes.F_APPEND: + currentLocals += nLocal; + stackMap.putByte(SAME_FRAME_EXTENDED + nLocal).putShort(delta); + for (int i = 0; i < nLocal; ++i) { + writeFrameType(local[i]); + } + break; + case Opcodes.F_CHOP: + currentLocals -= nLocal; + stackMap.putByte(SAME_FRAME_EXTENDED - nLocal).putShort(delta); + break; + case Opcodes.F_SAME: + if (delta < 64) { + stackMap.putByte(delta); + } else { + stackMap.putByte(SAME_FRAME_EXTENDED).putShort(delta); + } + break; + case Opcodes.F_SAME1: + if (delta < 64) { + stackMap.putByte(SAME_LOCALS_1_STACK_ITEM_FRAME + delta); + } else { + stackMap.putByte(SAME_LOCALS_1_STACK_ITEM_FRAME_EXTENDED) .putShort(delta); - break; - case Opcodes.F_SAME: - if (delta < 64) { - stackMap.putByte(delta); - } else { - stackMap.putByte(SAME_FRAME_EXTENDED).putShort(delta); - } - break; - case Opcodes.F_SAME1: - if (delta < 64) { - stackMap.putByte(SAME_LOCALS_1_STACK_ITEM_FRAME + delta); - } else { - stackMap.putByte(SAME_LOCALS_1_STACK_ITEM_FRAME_EXTENDED) - .putShort(delta); - } - writeFrameType(stack[0]); - break; + } + writeFrameType(stack[0]); + break; } previousFrameOffset = code.length; @@ -672,8 +659,7 @@ class MethodWriter extends MethodVisitor { } // if opcode == ATHROW or xRETURN, ends current block (no successor) if ((opcode >= Opcodes.IRETURN && opcode <= Opcodes.RETURN) - || opcode == Opcodes.ATHROW) - { + || opcode == Opcodes.ATHROW) { noSuccessor(); } } @@ -731,8 +717,7 @@ class MethodWriter extends MethodVisitor { // updates max locals int n; if (opcode == Opcodes.LLOAD || opcode == Opcodes.DLOAD - || opcode == Opcodes.LSTORE || opcode == Opcodes.DSTORE) - { + || opcode == Opcodes.LSTORE || opcode == Opcodes.DSTORE) { n = var + 2; } else { n = var + 1; @@ -784,12 +769,8 @@ class MethodWriter extends MethodVisitor { } @Override - public void visitFieldInsn( - final int opcode, - final String owner, - final String name, - final String desc) - { + public void visitFieldInsn(final int opcode, final String owner, + final String name, final String desc) { Item i = cw.newFieldItem(owner, name, desc); // Label currentBlock = this.currentBlock; if (currentBlock != null) { @@ -800,19 +781,19 @@ class MethodWriter extends MethodVisitor { // computes the stack size variation char c = desc.charAt(0); switch (opcode) { - case Opcodes.GETSTATIC: - size = stackSize + (c == 'D' || c == 'J' ? 2 : 1); - break; - case Opcodes.PUTSTATIC: - size = stackSize + (c == 'D' || c == 'J' ? -2 : -1); - break; - case Opcodes.GETFIELD: - size = stackSize + (c == 'D' || c == 'J' ? 1 : 0); - break; - // case Constants.PUTFIELD: - default: - size = stackSize + (c == 'D' || c == 'J' ? -3 : -2); - break; + case Opcodes.GETSTATIC: + size = stackSize + (c == 'D' || c == 'J' ? 2 : 1); + break; + case Opcodes.PUTSTATIC: + size = stackSize + (c == 'D' || c == 'J' ? -2 : -1); + break; + case Opcodes.GETFIELD: + size = stackSize + (c == 'D' || c == 'J' ? 1 : 0); + break; + // case Constants.PUTFIELD: + default: + size = stackSize + (c == 'D' || c == 'J' ? -3 : -2); + break; } // updates current and max stack sizes if (size > maxStackSize) { @@ -826,12 +807,8 @@ class MethodWriter extends MethodVisitor { } @Override - public void visitMethodInsn( - final int opcode, - final String owner, - final String name, - final String desc) - { + public void visitMethodInsn(final int opcode, final String owner, + final String name, final String desc) { boolean itf = opcode == Opcodes.INVOKEINTERFACE; Item i = cw.newMethodItem(owner, name, desc, itf); int argSize = i.intVal; @@ -882,12 +859,8 @@ class MethodWriter extends MethodVisitor { } @Override - public void visitInvokeDynamicInsn( - final String name, - final String desc, - final Handle bsm, - final Object... bsmArgs) - { + public void visitInvokeDynamicInsn(final String name, final String desc, + final Handle bsm, final Object... bsmArgs) { Item i = cw.newInvokeDynamicItem(name, desc, bsm, bsmArgs); int argSize = i.intVal; // Label currentBlock = this.currentBlock; @@ -967,8 +940,7 @@ class MethodWriter extends MethodVisitor { } // adds the instruction to the bytecode of the method if ((label.status & Label.RESOLVED) != 0 - && label.position - code.length < Short.MIN_VALUE) - { + && label.position - code.length < Short.MIN_VALUE) { /* * case of a backward jump with an offset < -32768. In this case we * automatically replace GOTO with GOTO_W, JSR with JSR_W and IFxxx @@ -986,8 +958,7 @@ class MethodWriter extends MethodVisitor { if (nextInsn != null) { nextInsn.status |= Label.TARGET; } - code.putByte(opcode <= 166 - ? ((opcode + 1) ^ 1) - 1 + code.putByte(opcode <= 166 ? ((opcode + 1) ^ 1) - 1 : opcode ^ 1); code.putShort(8); // jump offset code.putByte(200); // GOTO_W @@ -1082,8 +1053,7 @@ class MethodWriter extends MethodVisitor { } else { int size; // computes the stack size variation - if (i.type == ClassWriter.LONG || i.type == ClassWriter.DOUBLE) - { + if (i.type == ClassWriter.LONG || i.type == ClassWriter.DOUBLE) { size = stackSize + 2; } else { size = stackSize + 1; @@ -1122,8 +1092,7 @@ class MethodWriter extends MethodVisitor { } // adds the instruction to the bytecode of the method if ((var > 255) || (increment > 127) || (increment < -128)) { - code.putByte(196 /* WIDE */) - .put12(Opcodes.IINC, var) + code.putByte(196 /* WIDE */).put12(Opcodes.IINC, var) .putShort(increment); } else { code.putByte(Opcodes.IINC).put11(var, increment); @@ -1131,12 +1100,8 @@ class MethodWriter extends MethodVisitor { } @Override - public void visitTableSwitchInsn( - final int min, - final int max, - final Label dflt, - final Label... labels) - { + public void visitTableSwitchInsn(final int min, final int max, + final Label dflt, final Label... labels) { // adds the instruction to the bytecode of the method int source = code.length; code.putByte(Opcodes.TABLESWITCH); @@ -1151,11 +1116,8 @@ class MethodWriter extends MethodVisitor { } @Override - public void visitLookupSwitchInsn( - final Label dflt, - final int[] keys, - final Label[] labels) - { + public void visitLookupSwitchInsn(final Label dflt, final int[] keys, + final Label[] labels) { // adds the instruction to the bytecode of the method int source = code.length; code.putByte(Opcodes.LOOKUPSWITCH); @@ -1214,12 +1176,8 @@ class MethodWriter extends MethodVisitor { } @Override - public void visitTryCatchBlock( - final Label start, - final Label end, - final Label handler, - final String type) - { + public void visitTryCatchBlock(final Label start, final Label end, + final Label handler, final String type) { ++handlerCount; Handler h = new Handler(); h.start = start; @@ -1236,14 +1194,9 @@ class MethodWriter extends MethodVisitor { } @Override - public void visitLocalVariable( - final String name, - final String desc, - final String signature, - final Label start, - final Label end, - final int index) - { + public void visitLocalVariable(final String name, final String desc, + final String signature, final Label start, final Label end, + final int index) { if (signature != null) { if (localVarType == null) { localVarType = new ByteVector(); @@ -1251,8 +1204,7 @@ class MethodWriter extends MethodVisitor { ++localVarTypeCount; localVarType.putShort(start.position) .putShort(end.position - start.position) - .putShort(cw.newUTF8(name)) - .putShort(cw.newUTF8(signature)) + .putShort(cw.newUTF8(name)).putShort(cw.newUTF8(signature)) .putShort(index); } if (localVar == null) { @@ -1261,8 +1213,7 @@ class MethodWriter extends MethodVisitor { ++localVarCount; localVar.putShort(start.position) .putShort(end.position - start.position) - .putShort(cw.newUTF8(name)) - .putShort(cw.newUTF8(desc)) + .putShort(cw.newUTF8(name)).putShort(cw.newUTF8(desc)) .putShort(index); if (compute != NOTHING) { // updates max locals @@ -1294,8 +1245,7 @@ class MethodWriter extends MethodVisitor { Label h = handler.handler.getFirst(); Label e = handler.end.getFirst(); // computes the kind of the edges to 'h' - String t = handler.desc == null - ? "java/lang/Throwable" + String t = handler.desc == null ? "java/lang/Throwable" : handler.desc; int kind = Frame.OBJECT | cw.addType(t); // h is an exception handler @@ -1382,11 +1332,12 @@ class MethodWriter extends MethodVisitor { } code.data[end] = (byte) Opcodes.ATHROW; // emits a frame for this unreachable block - startFrame(start, 0, 1); - frame[frameIndex++] = Frame.OBJECT + int frameIndex = startFrame(start, 0, 1); + frame[frameIndex] = Frame.OBJECT | cw.addType("java/lang/Throwable"); endFrame(); - // removes the start-end range from the exception handlers + // removes the start-end range from the exception + // handlers firstHandler = Handler.remove(firstHandler, l, k); } } @@ -1535,8 +1486,10 @@ class MethodWriter extends MethodVisitor { /** * Adds a successor to the {@link #currentBlock currentBlock} block. * - * @param info information about the control flow edge to be added. - * @param successor the successor block to be added to the current block. + * @param info + * information about the control flow edge to be added. + * @param successor + * the successor block to be added to the current block. */ private void addSuccessor(final int info, final Label successor) { // creates and initializes an Edge object... @@ -1573,7 +1526,8 @@ class MethodWriter extends MethodVisitor { /** * Visits a frame that has been computed from scratch. * - * @param f the frame that must be visited. + * @param f + * the frame that must be visited. */ private void visitFrame(final Frame f) { int i, t; @@ -1606,7 +1560,7 @@ class MethodWriter extends MethodVisitor { } } // visits the frame and its content - startFrame(f.owner.position, nLocal, nStack); + int frameIndex = startFrame(f.owner.position, nLocal, nStack); for (i = 0; nLocal > 0; ++i, --nLocal) { t = locals[i]; frame[frameIndex++] = t; @@ -1624,16 +1578,79 @@ class MethodWriter extends MethodVisitor { endFrame(); } + /** + * Visit the implicit first frame of this method. + */ + private void visitImplicitFirstFrame() { + // There can be at most descriptor.length() + 1 locals + int frameIndex = startFrame(0, descriptor.length() + 1, 0); + if ((access & Opcodes.ACC_STATIC) == 0) { + if ((access & ACC_CONSTRUCTOR) == 0) { + frame[frameIndex++] = Frame.OBJECT | cw.addType(cw.thisName); + } else { + frame[frameIndex++] = 6; // Opcodes.UNINITIALIZED_THIS; + } + } + int i = 1; + loop: while (true) { + int j = i; + switch (descriptor.charAt(i++)) { + case 'Z': + case 'C': + case 'B': + case 'S': + case 'I': + frame[frameIndex++] = 1; // Opcodes.INTEGER; + break; + case 'F': + frame[frameIndex++] = 2; // Opcodes.FLOAT; + break; + case 'J': + frame[frameIndex++] = 4; // Opcodes.LONG; + break; + case 'D': + frame[frameIndex++] = 3; // Opcodes.DOUBLE; + break; + case '[': + while (descriptor.charAt(i) == '[') { + ++i; + } + if (descriptor.charAt(i) == 'L') { + ++i; + while (descriptor.charAt(i) != ';') { + ++i; + } + } + frame[frameIndex++] = Frame.OBJECT + | cw.addType(descriptor.substring(j, ++i)); + break; + case 'L': + while (descriptor.charAt(i) != ';') { + ++i; + } + frame[frameIndex++] = Frame.OBJECT + | cw.addType(descriptor.substring(j + 1, i++)); + break; + default: + break loop; + } + } + frame[1] = frameIndex - 3; + endFrame(); + } + /** * Starts the visit of a stack map frame. * - * @param offset the offset of the instruction to which the frame - * corresponds. - * @param nLocal the number of local variables in the frame. - * @param nStack the number of stack elements in the frame. - */ - private void startFrame(final int offset, final int nLocal, final int nStack) - { + * @param offset + * the offset of the instruction to which the frame corresponds. + * @param nLocal + * the number of local variables in the frame. + * @param nStack + * the number of stack elements in the frame. + * @return the index of the next element to be written in this frame. + */ + private int startFrame(final int offset, final int nLocal, final int nStack) { int n = 3 + nLocal + nStack; if (frame == null || frame.length < n) { frame = new int[n]; @@ -1641,7 +1658,7 @@ class MethodWriter extends MethodVisitor { frame[0] = offset; frame[1] = nLocal; frame[2] = nStack; - frameIndex = 3; + return 3; } /** @@ -1686,24 +1703,23 @@ class MethodWriter extends MethodVisitor { if (cstackSize == 0) { k = clocalsSize - localsSize; switch (k) { - case -3: - case -2: - case -1: - type = CHOP_FRAME; - localsSize = clocalsSize; - break; - case 0: - type = delta < 64 ? SAME_FRAME : SAME_FRAME_EXTENDED; - break; - case 1: - case 2: - case 3: - type = APPEND_FRAME; - break; + case -3: + case -2: + case -1: + type = CHOP_FRAME; + localsSize = clocalsSize; + break; + case 0: + type = delta < 64 ? SAME_FRAME : SAME_FRAME_EXTENDED; + break; + case 1: + case 2: + case 3: + type = APPEND_FRAME; + break; } } else if (clocalsSize == localsSize && cstackSize == 1) { - type = delta < 63 - ? SAME_LOCALS_1_STACK_ITEM_FRAME + type = delta < 63 ? SAME_LOCALS_1_STACK_ITEM_FRAME : SAME_LOCALS_1_STACK_ITEM_FRAME_EXTENDED; } if (type != FULL_FRAME) { @@ -1718,36 +1734,34 @@ class MethodWriter extends MethodVisitor { } } switch (type) { - case SAME_FRAME: - stackMap.putByte(delta); - break; - case SAME_LOCALS_1_STACK_ITEM_FRAME: - stackMap.putByte(SAME_LOCALS_1_STACK_ITEM_FRAME + delta); - writeFrameTypes(3 + clocalsSize, 4 + clocalsSize); - break; - case SAME_LOCALS_1_STACK_ITEM_FRAME_EXTENDED: - stackMap.putByte(SAME_LOCALS_1_STACK_ITEM_FRAME_EXTENDED) - .putShort(delta); - writeFrameTypes(3 + clocalsSize, 4 + clocalsSize); - break; - case SAME_FRAME_EXTENDED: - stackMap.putByte(SAME_FRAME_EXTENDED).putShort(delta); - break; - case CHOP_FRAME: - stackMap.putByte(SAME_FRAME_EXTENDED + k).putShort(delta); - break; - case APPEND_FRAME: - stackMap.putByte(SAME_FRAME_EXTENDED + k).putShort(delta); - writeFrameTypes(3 + localsSize, 3 + clocalsSize); - break; - // case FULL_FRAME: - default: - stackMap.putByte(FULL_FRAME) - .putShort(delta) - .putShort(clocalsSize); - writeFrameTypes(3, 3 + clocalsSize); - stackMap.putShort(cstackSize); - writeFrameTypes(3 + clocalsSize, 3 + clocalsSize + cstackSize); + case SAME_FRAME: + stackMap.putByte(delta); + break; + case SAME_LOCALS_1_STACK_ITEM_FRAME: + stackMap.putByte(SAME_LOCALS_1_STACK_ITEM_FRAME + delta); + writeFrameTypes(3 + clocalsSize, 4 + clocalsSize); + break; + case SAME_LOCALS_1_STACK_ITEM_FRAME_EXTENDED: + stackMap.putByte(SAME_LOCALS_1_STACK_ITEM_FRAME_EXTENDED).putShort( + delta); + writeFrameTypes(3 + clocalsSize, 4 + clocalsSize); + break; + case SAME_FRAME_EXTENDED: + stackMap.putByte(SAME_FRAME_EXTENDED).putShort(delta); + break; + case CHOP_FRAME: + stackMap.putByte(SAME_FRAME_EXTENDED + k).putShort(delta); + break; + case APPEND_FRAME: + stackMap.putByte(SAME_FRAME_EXTENDED + k).putShort(delta); + writeFrameTypes(3 + localsSize, 3 + clocalsSize); + break; + // case FULL_FRAME: + default: + stackMap.putByte(FULL_FRAME).putShort(delta).putShort(clocalsSize); + writeFrameTypes(3, 3 + clocalsSize); + stackMap.putShort(cstackSize); + writeFrameTypes(3 + clocalsSize, 3 + clocalsSize + cstackSize); } } @@ -1757,8 +1771,10 @@ class MethodWriter extends MethodVisitor { * in {@link Label} to the format used in StackMapTable attributes. In * particular, it converts type table indexes to constant pool indexes. * - * @param start index of the first type in {@link #frame} to write. - * @param end index of last type in {@link #frame} to write (exclusive). + * @param start + * index of the first type in {@link #frame} to write. + * @param end + * index of last type in {@link #frame} to write (exclusive). */ private void writeFrameTypes(final int start, final int end) { for (int i = start; i < end; ++i) { @@ -1767,15 +1783,15 @@ class MethodWriter extends MethodVisitor { if (d == 0) { int v = t & Frame.BASE_VALUE; switch (t & Frame.BASE_KIND) { - case Frame.OBJECT: - stackMap.putByte(7) - .putShort(cw.newClass(cw.typeTable[v].strVal1)); - break; - case Frame.UNINITIALIZED: - stackMap.putByte(8).putShort(cw.typeTable[v].intVal); - break; - default: - stackMap.putByte(v); + case Frame.OBJECT: + stackMap.putByte(7).putShort( + cw.newClass(cw.typeTable[v].strVal1)); + break; + case Frame.UNINITIALIZED: + stackMap.putByte(8).putShort(cw.typeTable[v].intVal); + break; + default: + stackMap.putByte(v); } } else { StringBuffer buf = new StringBuffer(); @@ -1789,29 +1805,29 @@ class MethodWriter extends MethodVisitor { buf.append(';'); } else { switch (t & 0xF) { - case 1: - buf.append('I'); - break; - case 2: - buf.append('F'); - break; - case 3: - buf.append('D'); - break; - case 9: - buf.append('Z'); - break; - case 10: - buf.append('B'); - break; - case 11: - buf.append('C'); - break; - case 12: - buf.append('S'); - break; - default: - buf.append('J'); + case 1: + buf.append('I'); + break; + case 2: + buf.append('F'); + break; + case 3: + buf.append('D'); + break; + case 9: + buf.append('Z'); + break; + case 10: + buf.append('B'); + break; + case 11: + buf.append('C'); + break; + case 12: + buf.append('S'); + break; + default: + buf.append('J'); } } stackMap.putByte(7).putShort(cw.newClass(buf.toString())); @@ -1875,10 +1891,7 @@ class MethodWriter extends MethodVisitor { size += 8 + stackMap.length; } if (cattrs != null) { - size += cattrs.getSize(cw, - code.data, - code.length, - maxStack, + size += cattrs.getSize(cw, code.data, code.length, maxStack, maxLocals); } } @@ -1886,11 +1899,12 @@ class MethodWriter extends MethodVisitor { cw.newUTF8("Exceptions"); size += 8 + 2 * exceptionCount; } - if ((access & Opcodes.ACC_SYNTHETIC) != 0 - && ((cw.version & 0xFFFF) < Opcodes.V1_5 || (access & ClassWriter.ACC_SYNTHETIC_ATTRIBUTE) != 0)) - { - cw.newUTF8("Synthetic"); - size += 6; + if ((access & Opcodes.ACC_SYNTHETIC) != 0) { + if ((cw.version & 0xFFFF) < Opcodes.V1_5 + || (access & ClassWriter.ACC_SYNTHETIC_ATTRIBUTE) != 0) { + cw.newUTF8("Synthetic"); + size += 6; + } } if ((access & Opcodes.ACC_DEPRECATED) != 0) { cw.newUTF8("Deprecated"); @@ -1936,13 +1950,15 @@ class MethodWriter extends MethodVisitor { /** * Puts the bytecode of this method in the given byte vector. * - * @param out the byte vector into which the bytecode of this method must be - * copied. + * @param out + * the byte vector into which the bytecode of this method must be + * copied. */ final void put(final ByteVector out) { - int mask = Opcodes.ACC_DEPRECATED + final int FACTOR = ClassWriter.TO_ACC_SYNTHETIC; + int mask = ACC_CONSTRUCTOR | Opcodes.ACC_DEPRECATED | ClassWriter.ACC_SYNTHETIC_ATTRIBUTE - | ((access & ClassWriter.ACC_SYNTHETIC_ATTRIBUTE) / (ClassWriter.ACC_SYNTHETIC_ATTRIBUTE / Opcodes.ACC_SYNTHETIC)); + | ((access & ClassWriter.ACC_SYNTHETIC_ATTRIBUTE) / FACTOR); out.putShort(access & ~mask).putShort(name).putShort(desc); if (classReaderOffset != 0) { out.putByteArray(cw.cr.b, classReaderOffset, classReaderLength); @@ -1955,10 +1971,11 @@ class MethodWriter extends MethodVisitor { if (exceptionCount > 0) { ++attributeCount; } - if ((access & Opcodes.ACC_SYNTHETIC) != 0 - && ((cw.version & 0xFFFF) < Opcodes.V1_5 || (access & ClassWriter.ACC_SYNTHETIC_ATTRIBUTE) != 0)) - { - ++attributeCount; + if ((access & Opcodes.ACC_SYNTHETIC) != 0) { + if ((cw.version & 0xFFFF) < Opcodes.V1_5 + || (access & ClassWriter.ACC_SYNTHETIC_ATTRIBUTE) != 0) { + ++attributeCount; + } } if ((access & Opcodes.ACC_DEPRECATED) != 0) { ++attributeCount; @@ -2000,10 +2017,7 @@ class MethodWriter extends MethodVisitor { size += 8 + stackMap.length; } if (cattrs != null) { - size += cattrs.getSize(cw, - code.data, - code.length, - maxStack, + size += cattrs.getSize(cw, code.data, code.length, maxStack, maxLocals); } out.putShort(cw.newUTF8("Code")).putInt(size); @@ -2013,10 +2027,8 @@ class MethodWriter extends MethodVisitor { if (handlerCount > 0) { Handler h = firstHandler; while (h != null) { - out.putShort(h.start.position) - .putShort(h.end.position) - .putShort(h.handler.position) - .putShort(h.type); + out.putShort(h.start.position).putShort(h.end.position) + .putShort(h.handler.position).putShort(h.type); h = h.next; } } @@ -2063,24 +2075,24 @@ class MethodWriter extends MethodVisitor { } } if (exceptionCount > 0) { - out.putShort(cw.newUTF8("Exceptions")) - .putInt(2 * exceptionCount + 2); + out.putShort(cw.newUTF8("Exceptions")).putInt( + 2 * exceptionCount + 2); out.putShort(exceptionCount); for (int i = 0; i < exceptionCount; ++i) { out.putShort(exceptions[i]); } } - if ((access & Opcodes.ACC_SYNTHETIC) != 0 - && ((cw.version & 0xFFFF) < Opcodes.V1_5 || (access & ClassWriter.ACC_SYNTHETIC_ATTRIBUTE) != 0)) - { - out.putShort(cw.newUTF8("Synthetic")).putInt(0); + if ((access & Opcodes.ACC_SYNTHETIC) != 0) { + if ((cw.version & 0xFFFF) < Opcodes.V1_5 + || (access & ClassWriter.ACC_SYNTHETIC_ATTRIBUTE) != 0) { + out.putShort(cw.newUTF8("Synthetic")).putInt(0); + } } if ((access & Opcodes.ACC_DEPRECATED) != 0) { out.putShort(cw.newUTF8("Deprecated")).putInt(0); } if (ClassReader.SIGNATURES && signature != null) { - out.putShort(cw.newUTF8("Signature")) - .putInt(2) + out.putShort(cw.newUTF8("Signature")).putInt(2) .putShort(cw.newUTF8(signature)); } if (ClassReader.ANNOTATIONS && annd != null) { @@ -2123,10 +2135,12 @@ class MethodWriter extends MethodVisitor { * 32768, in which case IFEQ 32766 must be replaced with IFNEQ 8 GOTO_W * 32765. This, in turn, may require to increase the size of another jump * instruction, and so on... All these operations are handled automatically - * by this method.

    This method must be called after all the method - * that is being built has been visited. In particular, the - * {@link Label Label} objects used to construct the method are no longer - * valid after this method has been called. + * by this method. + *

    + * This method must be called after all the method that is being built + * has been visited. In particular, the {@link Label Label} objects used + * to construct the method are no longer valid after this method has been + * called. */ private void resizeInstructions() { byte[] b = code.data; // bytecode of the method @@ -2176,158 +2190,14 @@ class MethodWriter extends MethodVisitor { int insert = 0; // bytes to be added after this instruction switch (ClassWriter.TYPE[opcode]) { - case ClassWriter.NOARG_INSN: - case ClassWriter.IMPLVAR_INSN: - u += 1; - break; - case ClassWriter.LABEL_INSN: - if (opcode > 201) { - // converts temporary opcodes 202 to 217, 218 and - // 219 to IFEQ ... JSR (inclusive), IFNULL and - // IFNONNULL - opcode = opcode < 218 ? opcode - 49 : opcode - 20; - label = u + readUnsignedShort(b, u + 1); - } else { - label = u + readShort(b, u + 1); - } - newOffset = getNewOffset(allIndexes, allSizes, u, label); - if (newOffset < Short.MIN_VALUE - || newOffset > Short.MAX_VALUE) - { - if (!resize[u]) { - if (opcode == Opcodes.GOTO - || opcode == Opcodes.JSR) - { - // two additional bytes will be required to - // replace this GOTO or JSR instruction with - // a GOTO_W or a JSR_W - insert = 2; - } else { - // five additional bytes will be required to - // replace this IFxxx instruction with - // IFNOTxxx GOTO_W , where IFNOTxxx - // is the "opposite" opcode of IFxxx (i.e., - // IFNE for IFEQ) and where designates - // the instruction just after the GOTO_W. - insert = 5; - } - resize[u] = true; - } - } - u += 3; - break; - case ClassWriter.LABELW_INSN: - u += 5; - break; - case ClassWriter.TABL_INSN: - if (state == 1) { - // true number of bytes to be added (or removed) - // from this instruction = (future number of padding - // bytes - current number of padding byte) - - // previously over estimated variation = - // = ((3 - newOffset%4) - (3 - u%4)) - u%4 - // = (-newOffset%4 + u%4) - u%4 - // = -(newOffset & 3) - newOffset = getNewOffset(allIndexes, allSizes, 0, u); - insert = -(newOffset & 3); - } else if (!resize[u]) { - // over estimation of the number of bytes to be - // added to this instruction = 3 - current number - // of padding bytes = 3 - (3 - u%4) = u%4 = u & 3 - insert = u & 3; - resize[u] = true; - } - // skips instruction - u = u + 4 - (u & 3); - u += 4 * (readInt(b, u + 8) - readInt(b, u + 4) + 1) + 12; - break; - case ClassWriter.LOOK_INSN: - if (state == 1) { - // like TABL_INSN - newOffset = getNewOffset(allIndexes, allSizes, 0, u); - insert = -(newOffset & 3); - } else if (!resize[u]) { - // like TABL_INSN - insert = u & 3; - resize[u] = true; - } - // skips instruction - u = u + 4 - (u & 3); - u += 8 * readInt(b, u + 4) + 8; - break; - case ClassWriter.WIDE_INSN: - opcode = b[u + 1] & 0xFF; - if (opcode == Opcodes.IINC) { - u += 6; - } else { - u += 4; - } - break; - case ClassWriter.VAR_INSN: - case ClassWriter.SBYTE_INSN: - case ClassWriter.LDC_INSN: - u += 2; - break; - case ClassWriter.SHORT_INSN: - case ClassWriter.LDCW_INSN: - case ClassWriter.FIELDORMETH_INSN: - case ClassWriter.TYPE_INSN: - case ClassWriter.IINC_INSN: - u += 3; - break; - case ClassWriter.ITFMETH_INSN: - case ClassWriter.INDYMETH_INSN: - u += 5; - break; - // case ClassWriter.MANA_INSN: - default: - u += 4; - break; - } - if (insert != 0) { - // adds a new (u, insert) entry in the allIndexes and - // allSizes arrays - int[] newIndexes = new int[allIndexes.length + 1]; - int[] newSizes = new int[allSizes.length + 1]; - System.arraycopy(allIndexes, - 0, - newIndexes, - 0, - allIndexes.length); - System.arraycopy(allSizes, 0, newSizes, 0, allSizes.length); - newIndexes[allIndexes.length] = u; - newSizes[allSizes.length] = insert; - allIndexes = newIndexes; - allSizes = newSizes; - if (insert > 0) { - state = 3; - } - } - } - if (state < 3) { - --state; - } - } while (state != 0); - - // 2nd step: - // copies the bytecode of the method into a new bytevector, updates the - // offsets, and inserts (or removes) bytes as requested. - - ByteVector newCode = new ByteVector(code.length); - - u = 0; - while (u < code.length) { - int opcode = b[u] & 0xFF; - switch (ClassWriter.TYPE[opcode]) { case ClassWriter.NOARG_INSN: case ClassWriter.IMPLVAR_INSN: - newCode.putByte(opcode); u += 1; break; case ClassWriter.LABEL_INSN: if (opcode > 201) { - // changes temporary opcodes 202 to 217 (inclusive), 218 - // and 219 to IFEQ ... JSR (inclusive), IFNULL and + // converts temporary opcodes 202 to 217, 218 and + // 219 to IFEQ ... JSR (inclusive), IFNULL and // IFNONNULL opcode = opcode < 218 ? opcode - 49 : opcode - 20; label = u + readUnsignedShort(b, u + 1); @@ -2335,100 +2205,78 @@ class MethodWriter extends MethodVisitor { label = u + readShort(b, u + 1); } newOffset = getNewOffset(allIndexes, allSizes, u, label); - if (resize[u]) { - // replaces GOTO with GOTO_W, JSR with JSR_W and IFxxx - // with IFNOTxxx GOTO_W , where IFNOTxxx is - // the "opposite" opcode of IFxxx (i.e., IFNE for IFEQ) - // and where designates the instruction just after - // the GOTO_W. - if (opcode == Opcodes.GOTO) { - newCode.putByte(200); // GOTO_W - } else if (opcode == Opcodes.JSR) { - newCode.putByte(201); // JSR_W - } else { - newCode.putByte(opcode <= 166 - ? ((opcode + 1) ^ 1) - 1 - : opcode ^ 1); - newCode.putShort(8); // jump offset - newCode.putByte(200); // GOTO_W - // newOffset now computed from start of GOTO_W - newOffset -= 3; + if (newOffset < Short.MIN_VALUE + || newOffset > Short.MAX_VALUE) { + if (!resize[u]) { + if (opcode == Opcodes.GOTO || opcode == Opcodes.JSR) { + // two additional bytes will be required to + // replace this GOTO or JSR instruction with + // a GOTO_W or a JSR_W + insert = 2; + } else { + // five additional bytes will be required to + // replace this IFxxx instruction with + // IFNOTxxx GOTO_W , where IFNOTxxx + // is the "opposite" opcode of IFxxx (i.e., + // IFNE for IFEQ) and where designates + // the instruction just after the GOTO_W. + insert = 5; + } + resize[u] = true; } - newCode.putInt(newOffset); - } else { - newCode.putByte(opcode); - newCode.putShort(newOffset); } u += 3; break; case ClassWriter.LABELW_INSN: - label = u + readInt(b, u + 1); - newOffset = getNewOffset(allIndexes, allSizes, u, label); - newCode.putByte(opcode); - newCode.putInt(newOffset); u += 5; break; case ClassWriter.TABL_INSN: - // skips 0 to 3 padding bytes - v = u; - u = u + 4 - (v & 3); - // reads and copies instruction - newCode.putByte(Opcodes.TABLESWITCH); - newCode.putByteArray(null, 0, (4 - newCode.length % 4) % 4); - label = v + readInt(b, u); - u += 4; - newOffset = getNewOffset(allIndexes, allSizes, v, label); - newCode.putInt(newOffset); - j = readInt(b, u); - u += 4; - newCode.putInt(j); - j = readInt(b, u) - j + 1; - u += 4; - newCode.putInt(readInt(b, u - 4)); - for (; j > 0; --j) { - label = v + readInt(b, u); - u += 4; - newOffset = getNewOffset(allIndexes, allSizes, v, label); - newCode.putInt(newOffset); + if (state == 1) { + // true number of bytes to be added (or removed) + // from this instruction = (future number of padding + // bytes - current number of padding byte) - + // previously over estimated variation = + // = ((3 - newOffset%4) - (3 - u%4)) - u%4 + // = (-newOffset%4 + u%4) - u%4 + // = -(newOffset & 3) + newOffset = getNewOffset(allIndexes, allSizes, 0, u); + insert = -(newOffset & 3); + } else if (!resize[u]) { + // over estimation of the number of bytes to be + // added to this instruction = 3 - current number + // of padding bytes = 3 - (3 - u%4) = u%4 = u & 3 + insert = u & 3; + resize[u] = true; } + // skips instruction + u = u + 4 - (u & 3); + u += 4 * (readInt(b, u + 8) - readInt(b, u + 4) + 1) + 12; break; case ClassWriter.LOOK_INSN: - // skips 0 to 3 padding bytes - v = u; - u = u + 4 - (v & 3); - // reads and copies instruction - newCode.putByte(Opcodes.LOOKUPSWITCH); - newCode.putByteArray(null, 0, (4 - newCode.length % 4) % 4); - label = v + readInt(b, u); - u += 4; - newOffset = getNewOffset(allIndexes, allSizes, v, label); - newCode.putInt(newOffset); - j = readInt(b, u); - u += 4; - newCode.putInt(j); - for (; j > 0; --j) { - newCode.putInt(readInt(b, u)); - u += 4; - label = v + readInt(b, u); - u += 4; - newOffset = getNewOffset(allIndexes, allSizes, v, label); - newCode.putInt(newOffset); + if (state == 1) { + // like TABL_INSN + newOffset = getNewOffset(allIndexes, allSizes, 0, u); + insert = -(newOffset & 3); + } else if (!resize[u]) { + // like TABL_INSN + insert = u & 3; + resize[u] = true; } + // skips instruction + u = u + 4 - (u & 3); + u += 8 * readInt(b, u + 4) + 8; break; case ClassWriter.WIDE_INSN: opcode = b[u + 1] & 0xFF; if (opcode == Opcodes.IINC) { - newCode.putByteArray(b, u, 6); u += 6; } else { - newCode.putByteArray(b, u, 4); u += 4; } break; case ClassWriter.VAR_INSN: case ClassWriter.SBYTE_INSN: case ClassWriter.LDC_INSN: - newCode.putByteArray(b, u, 2); u += 2; break; case ClassWriter.SHORT_INSN: @@ -2436,19 +2284,178 @@ class MethodWriter extends MethodVisitor { case ClassWriter.FIELDORMETH_INSN: case ClassWriter.TYPE_INSN: case ClassWriter.IINC_INSN: - newCode.putByteArray(b, u, 3); u += 3; break; case ClassWriter.ITFMETH_INSN: case ClassWriter.INDYMETH_INSN: - newCode.putByteArray(b, u, 5); u += 5; break; - // case MANA_INSN: + // case ClassWriter.MANA_INSN: default: - newCode.putByteArray(b, u, 4); u += 4; break; + } + if (insert != 0) { + // adds a new (u, insert) entry in the allIndexes and + // allSizes arrays + int[] newIndexes = new int[allIndexes.length + 1]; + int[] newSizes = new int[allSizes.length + 1]; + System.arraycopy(allIndexes, 0, newIndexes, 0, + allIndexes.length); + System.arraycopy(allSizes, 0, newSizes, 0, allSizes.length); + newIndexes[allIndexes.length] = u; + newSizes[allSizes.length] = insert; + allIndexes = newIndexes; + allSizes = newSizes; + if (insert > 0) { + state = 3; + } + } + } + if (state < 3) { + --state; + } + } while (state != 0); + + // 2nd step: + // copies the bytecode of the method into a new bytevector, updates the + // offsets, and inserts (or removes) bytes as requested. + + ByteVector newCode = new ByteVector(code.length); + + u = 0; + while (u < code.length) { + int opcode = b[u] & 0xFF; + switch (ClassWriter.TYPE[opcode]) { + case ClassWriter.NOARG_INSN: + case ClassWriter.IMPLVAR_INSN: + newCode.putByte(opcode); + u += 1; + break; + case ClassWriter.LABEL_INSN: + if (opcode > 201) { + // changes temporary opcodes 202 to 217 (inclusive), 218 + // and 219 to IFEQ ... JSR (inclusive), IFNULL and + // IFNONNULL + opcode = opcode < 218 ? opcode - 49 : opcode - 20; + label = u + readUnsignedShort(b, u + 1); + } else { + label = u + readShort(b, u + 1); + } + newOffset = getNewOffset(allIndexes, allSizes, u, label); + if (resize[u]) { + // replaces GOTO with GOTO_W, JSR with JSR_W and IFxxx + // with IFNOTxxx GOTO_W , where IFNOTxxx is + // the "opposite" opcode of IFxxx (i.e., IFNE for IFEQ) + // and where designates the instruction just after + // the GOTO_W. + if (opcode == Opcodes.GOTO) { + newCode.putByte(200); // GOTO_W + } else if (opcode == Opcodes.JSR) { + newCode.putByte(201); // JSR_W + } else { + newCode.putByte(opcode <= 166 ? ((opcode + 1) ^ 1) - 1 + : opcode ^ 1); + newCode.putShort(8); // jump offset + newCode.putByte(200); // GOTO_W + // newOffset now computed from start of GOTO_W + newOffset -= 3; + } + newCode.putInt(newOffset); + } else { + newCode.putByte(opcode); + newCode.putShort(newOffset); + } + u += 3; + break; + case ClassWriter.LABELW_INSN: + label = u + readInt(b, u + 1); + newOffset = getNewOffset(allIndexes, allSizes, u, label); + newCode.putByte(opcode); + newCode.putInt(newOffset); + u += 5; + break; + case ClassWriter.TABL_INSN: + // skips 0 to 3 padding bytes + v = u; + u = u + 4 - (v & 3); + // reads and copies instruction + newCode.putByte(Opcodes.TABLESWITCH); + newCode.putByteArray(null, 0, (4 - newCode.length % 4) % 4); + label = v + readInt(b, u); + u += 4; + newOffset = getNewOffset(allIndexes, allSizes, v, label); + newCode.putInt(newOffset); + j = readInt(b, u); + u += 4; + newCode.putInt(j); + j = readInt(b, u) - j + 1; + u += 4; + newCode.putInt(readInt(b, u - 4)); + for (; j > 0; --j) { + label = v + readInt(b, u); + u += 4; + newOffset = getNewOffset(allIndexes, allSizes, v, label); + newCode.putInt(newOffset); + } + break; + case ClassWriter.LOOK_INSN: + // skips 0 to 3 padding bytes + v = u; + u = u + 4 - (v & 3); + // reads and copies instruction + newCode.putByte(Opcodes.LOOKUPSWITCH); + newCode.putByteArray(null, 0, (4 - newCode.length % 4) % 4); + label = v + readInt(b, u); + u += 4; + newOffset = getNewOffset(allIndexes, allSizes, v, label); + newCode.putInt(newOffset); + j = readInt(b, u); + u += 4; + newCode.putInt(j); + for (; j > 0; --j) { + newCode.putInt(readInt(b, u)); + u += 4; + label = v + readInt(b, u); + u += 4; + newOffset = getNewOffset(allIndexes, allSizes, v, label); + newCode.putInt(newOffset); + } + break; + case ClassWriter.WIDE_INSN: + opcode = b[u + 1] & 0xFF; + if (opcode == Opcodes.IINC) { + newCode.putByteArray(b, u, 6); + u += 6; + } else { + newCode.putByteArray(b, u, 4); + u += 4; + } + break; + case ClassWriter.VAR_INSN: + case ClassWriter.SBYTE_INSN: + case ClassWriter.LDC_INSN: + newCode.putByteArray(b, u, 2); + u += 2; + break; + case ClassWriter.SHORT_INSN: + case ClassWriter.LDCW_INSN: + case ClassWriter.FIELDORMETH_INSN: + case ClassWriter.TYPE_INSN: + case ClassWriter.IINC_INSN: + newCode.putByteArray(b, u, 3); + u += 3; + break; + case ClassWriter.ITFMETH_INSN: + case ClassWriter.INDYMETH_INSN: + newCode.putByteArray(b, u, 5); + u += 5; + break; + // case MANA_INSN: + default: + newCode.putByteArray(b, u, 4); + u += 4; + break; } } @@ -2471,8 +2478,7 @@ class MethodWriter extends MethodVisitor { * must therefore never have been called for this label. */ u = l.position - 3; - if ((l.status & Label.STORE) != 0 || (u >= 0 && resize[u])) - { + if ((l.status & Label.STORE) != 0 || (u >= 0 && resize[u])) { getNewOffset(allIndexes, allSizes, l); // TODO update offsets in UNINITIALIZED values visitFrame(l.frame); @@ -2528,10 +2534,11 @@ class MethodWriter extends MethodVisitor { b = lineNumber.data; u = 0; while (u < lineNumber.length) { - writeShort(b, u, getNewOffset(allIndexes, - allSizes, - 0, - readUnsignedShort(b, u))); + writeShort( + b, + u, + getNewOffset(allIndexes, allSizes, 0, + readUnsignedShort(b, u))); u += 4; } } @@ -2554,8 +2561,10 @@ class MethodWriter extends MethodVisitor { /** * Reads an unsigned short value in the given byte array. * - * @param b a byte array. - * @param index the start index of the value to be read. + * @param b + * a byte array. + * @param index + * the start index of the value to be read. * @return the read value. */ static int readUnsignedShort(final byte[] b, final int index) { @@ -2565,8 +2574,10 @@ class MethodWriter extends MethodVisitor { /** * Reads a signed short value in the given byte array. * - * @param b a byte array. - * @param index the start index of the value to be read. + * @param b + * a byte array. + * @param index + * the start index of the value to be read. * @return the read value. */ static short readShort(final byte[] b, final int index) { @@ -2576,8 +2587,10 @@ class MethodWriter extends MethodVisitor { /** * Reads a signed int value in the given byte array. * - * @param b a byte array. - * @param index the start index of the value to be read. + * @param b + * a byte array. + * @param index + * the start index of the value to be read. * @return the read value. */ static int readInt(final byte[] b, final int index) { @@ -2588,9 +2601,12 @@ class MethodWriter extends MethodVisitor { /** * Writes a short value in the given byte array. * - * @param b a byte array. - * @param index where the first byte of the short value must be written. - * @param s the value to be written in the given byte array. + * @param b + * a byte array. + * @param index + * where the first byte of the short value must be written. + * @param s + * the value to be written in the given byte array. */ static void writeShort(final byte[] b, final int index, final int s) { b[index] = (byte) (s >>> 8); @@ -2598,32 +2614,34 @@ class MethodWriter extends MethodVisitor { } /** - * Computes the future value of a bytecode offset.

    Note: it is possible - * to have several entries for the same instruction in the indexes - * and sizes: two entries (index=a,size=b) and (index=a,size=b') - * are equivalent to a single entry (index=a,size=b+b'). + * Computes the future value of a bytecode offset. + *

    + * Note: it is possible to have several entries for the same instruction in + * the indexes and sizes: two entries (index=a,size=b) and + * (index=a,size=b') are equivalent to a single entry (index=a,size=b+b'). * - * @param indexes current positions of the instructions to be resized. Each - * instruction must be designated by the index of its last - * byte, plus one (or, in other words, by the index of the first - * byte of the next instruction). - * @param sizes the number of bytes to be added to the above - * instructions. More precisely, for each i < len, - * sizes[i] bytes will be added at the end of the - * instruction designated by indexes[i] or, if - * sizes[i] is negative, the last |sizes[i]| - * bytes of the instruction will be removed (the instruction size - * must not become negative or null). - * @param begin index of the first byte of the source instruction. - * @param end index of the first byte of the target instruction. + * @param indexes + * current positions of the instructions to be resized. Each + * instruction must be designated by the index of its last + * byte, plus one (or, in other words, by the index of the + * first byte of the next instruction). + * @param sizes + * the number of bytes to be added to the above + * instructions. More precisely, for each i < len, + * sizes[i] bytes will be added at the end of the + * instruction designated by indexes[i] or, if + * sizes[i] is negative, the last | + * sizes[i]| bytes of the instruction will be removed + * (the instruction size must not become negative or + * null). + * @param begin + * index of the first byte of the source instruction. + * @param end + * index of the first byte of the target instruction. * @return the future value of the given bytecode offset. */ - static int getNewOffset( - final int[] indexes, - final int[] sizes, - final int begin, - final int end) - { + static int getNewOffset(final int[] indexes, final int[] sizes, + final int begin, final int end) { int offset = end - begin; for (int i = 0; i < indexes.length; ++i) { if (begin < indexes[i] && indexes[i] <= end) { @@ -2640,24 +2658,25 @@ class MethodWriter extends MethodVisitor { /** * Updates the offset of the given label. * - * @param indexes current positions of the instructions to be resized. Each - * instruction must be designated by the index of its last - * byte, plus one (or, in other words, by the index of the first - * byte of the next instruction). - * @param sizes the number of bytes to be added to the above - * instructions. More precisely, for each i < len, - * sizes[i] bytes will be added at the end of the - * instruction designated by indexes[i] or, if - * sizes[i] is negative, the last |sizes[i]| - * bytes of the instruction will be removed (the instruction size - * must not become negative or null). - * @param label the label whose offset must be updated. - */ - static void getNewOffset( - final int[] indexes, - final int[] sizes, - final Label label) - { + * @param indexes + * current positions of the instructions to be resized. Each + * instruction must be designated by the index of its last + * byte, plus one (or, in other words, by the index of the + * first byte of the next instruction). + * @param sizes + * the number of bytes to be added to the above + * instructions. More precisely, for each i < len, + * sizes[i] bytes will be added at the end of the + * instruction designated by indexes[i] or, if + * sizes[i] is negative, the last | + * sizes[i]| bytes of the instruction will be removed + * (the instruction size must not become negative or + * null). + * @param label + * the label whose offset must be updated. + */ + static void getNewOffset(final int[] indexes, final int[] sizes, + final Label label) { if ((label.status & Label.RESIZED) == 0) { label.position = getNewOffset(indexes, sizes, 0, label.position); label.status |= Label.RESIZED; diff --git a/src/asm/scala/tools/asm/Type.java b/src/asm/scala/tools/asm/Type.java index bf1107182a..7821a492e6 100644 --- a/src/asm/scala/tools/asm/Type.java +++ b/src/asm/scala/tools/asm/Type.java @@ -190,13 +190,16 @@ public class Type { /** * Constructs a reference type. * - * @param sort the sort of the reference type to be constructed. - * @param buf a buffer containing the descriptor of the previous type. - * @param off the offset of this descriptor in the previous buffer. - * @param len the length of this descriptor. - */ - private Type(final int sort, final char[] buf, final int off, final int len) - { + * @param sort + * the sort of the reference type to be constructed. + * @param buf + * a buffer containing the descriptor of the previous type. + * @param off + * the offset of this descriptor in the previous buffer. + * @param len + * the length of this descriptor. + */ + private Type(final int sort, final char[] buf, final int off, final int len) { this.sort = sort; this.buf = buf; this.off = off; @@ -206,7 +209,8 @@ public class Type { /** * Returns the Java type corresponding to the given type descriptor. * - * @param typeDescriptor a field or method type descriptor. + * @param typeDescriptor + * a field or method type descriptor. * @return the Java type corresponding to the given type descriptor. */ public static Type getType(final String typeDescriptor) { @@ -216,7 +220,8 @@ public class Type { /** * Returns the Java type corresponding to the given internal name. * - * @param internalName an internal name. + * @param internalName + * an internal name. * @return the Java type corresponding to the given internal name. */ public static Type getObjectType(final String internalName) { @@ -228,7 +233,8 @@ public class Type { * Returns the Java type corresponding to the given method descriptor. * Equivalent to Type.getType(methodDescriptor). * - * @param methodDescriptor a method descriptor. + * @param methodDescriptor + * a method descriptor. * @return the Java type corresponding to the given method descriptor. */ public static Type getMethodType(final String methodDescriptor) { @@ -239,18 +245,23 @@ public class Type { * Returns the Java method type corresponding to the given argument and * return types. * - * @param returnType the return type of the method. - * @param argumentTypes the argument types of the method. - * @return the Java type corresponding to the given argument and return types. + * @param returnType + * the return type of the method. + * @param argumentTypes + * the argument types of the method. + * @return the Java type corresponding to the given argument and return + * types. */ - public static Type getMethodType(final Type returnType, final Type... argumentTypes) { + public static Type getMethodType(final Type returnType, + final Type... argumentTypes) { return getType(getMethodDescriptor(returnType, argumentTypes)); } /** * Returns the Java type corresponding to the given class. * - * @param c a class. + * @param c + * a class. * @return the Java type corresponding to the given class. */ public static Type getType(final Class c) { @@ -282,7 +293,8 @@ public class Type { /** * Returns the Java method type corresponding to the given constructor. * - * @param c a {@link Constructor Constructor} object. + * @param c + * a {@link Constructor Constructor} object. * @return the Java method type corresponding to the given constructor. */ public static Type getType(final Constructor c) { @@ -292,7 +304,8 @@ public class Type { /** * Returns the Java method type corresponding to the given method. * - * @param m a {@link Method Method} object. + * @param m + * a {@link Method Method} object. * @return the Java method type corresponding to the given method. */ public static Type getType(final Method m) { @@ -303,7 +316,8 @@ public class Type { * Returns the Java types corresponding to the argument types of the given * method descriptor. * - * @param methodDescriptor a method descriptor. + * @param methodDescriptor + * a method descriptor. * @return the Java types corresponding to the argument types of the given * method descriptor. */ @@ -338,7 +352,8 @@ public class Type { * Returns the Java types corresponding to the argument types of the given * method. * - * @param method a method. + * @param method + * a method. * @return the Java types corresponding to the argument types of the given * method. */ @@ -355,7 +370,8 @@ public class Type { * Returns the Java type corresponding to the return type of the given * method descriptor. * - * @param methodDescriptor a method descriptor. + * @param methodDescriptor + * a method descriptor. * @return the Java type corresponding to the return type of the given * method descriptor. */ @@ -368,7 +384,8 @@ public class Type { * Returns the Java type corresponding to the return type of the given * method. * - * @param method a method. + * @param method + * a method. * @return the Java type corresponding to the return type of the given * method. */ @@ -379,12 +396,13 @@ public class Type { /** * Computes the size of the arguments and of the return value of a method. * - * @param desc the descriptor of a method. + * @param desc + * the descriptor of a method. * @return the size of the arguments of the method (plus one for the * implicit this argument), argSize, and the size of its return * value, retSize, packed into a single int i = - * (argSize << 2) | retSize (argSize is therefore equal - * to i >> 2, and retSize to i & 0x03). + * (argSize << 2) | retSize (argSize is therefore equal to + * i >> 2, and retSize to i & 0x03). */ public static int getArgumentsAndReturnSizes(final String desc) { int n = 1; @@ -419,52 +437,54 @@ public class Type { * method descriptors, buf is supposed to contain nothing more than the * descriptor itself. * - * @param buf a buffer containing a type descriptor. - * @param off the offset of this descriptor in the previous buffer. + * @param buf + * a buffer containing a type descriptor. + * @param off + * the offset of this descriptor in the previous buffer. * @return the Java type corresponding to the given type descriptor. */ private static Type getType(final char[] buf, final int off) { int len; switch (buf[off]) { - case 'V': - return VOID_TYPE; - case 'Z': - return BOOLEAN_TYPE; - case 'C': - return CHAR_TYPE; - case 'B': - return BYTE_TYPE; - case 'S': - return SHORT_TYPE; - case 'I': - return INT_TYPE; - case 'F': - return FLOAT_TYPE; - case 'J': - return LONG_TYPE; - case 'D': - return DOUBLE_TYPE; - case '[': - len = 1; - while (buf[off + len] == '[') { - ++len; - } - if (buf[off + len] == 'L') { - ++len; - while (buf[off + len] != ';') { - ++len; - } - } - return new Type(ARRAY, buf, off, len + 1); - case 'L': - len = 1; + case 'V': + return VOID_TYPE; + case 'Z': + return BOOLEAN_TYPE; + case 'C': + return CHAR_TYPE; + case 'B': + return BYTE_TYPE; + case 'S': + return SHORT_TYPE; + case 'I': + return INT_TYPE; + case 'F': + return FLOAT_TYPE; + case 'J': + return LONG_TYPE; + case 'D': + return DOUBLE_TYPE; + case '[': + len = 1; + while (buf[off + len] == '[') { + ++len; + } + if (buf[off + len] == 'L') { + ++len; while (buf[off + len] != ';') { ++len; } - return new Type(OBJECT, buf, off + 1, len - 1); + } + return new Type(ARRAY, buf, off, len + 1); + case 'L': + len = 1; + while (buf[off + len] != ';') { + ++len; + } + return new Type(OBJECT, buf, off + 1, len - 1); // case '(': - default: - return new Type(METHOD, buf, 0, buf.length); + default: + return new Type(METHOD, buf, off, buf.length - off); } } @@ -475,11 +495,11 @@ public class Type { /** * Returns the sort of this Java type. * - * @return {@link #VOID VOID}, {@link #BOOLEAN BOOLEAN}, - * {@link #CHAR CHAR}, {@link #BYTE BYTE}, {@link #SHORT SHORT}, - * {@link #INT INT}, {@link #FLOAT FLOAT}, {@link #LONG LONG}, - * {@link #DOUBLE DOUBLE}, {@link #ARRAY ARRAY}, - * {@link #OBJECT OBJECT} or {@link #METHOD METHOD}. + * @return {@link #VOID VOID}, {@link #BOOLEAN BOOLEAN}, {@link #CHAR CHAR}, + * {@link #BYTE BYTE}, {@link #SHORT SHORT}, {@link #INT INT}, + * {@link #FLOAT FLOAT}, {@link #LONG LONG}, {@link #DOUBLE DOUBLE}, + * {@link #ARRAY ARRAY}, {@link #OBJECT OBJECT} or {@link #METHOD + * METHOD}. */ public int getSort() { return sort; @@ -517,34 +537,34 @@ public class Type { */ public String getClassName() { switch (sort) { - case VOID: - return "void"; - case BOOLEAN: - return "boolean"; - case CHAR: - return "char"; - case BYTE: - return "byte"; - case SHORT: - return "short"; - case INT: - return "int"; - case FLOAT: - return "float"; - case LONG: - return "long"; - case DOUBLE: - return "double"; - case ARRAY: - StringBuffer b = new StringBuffer(getElementType().getClassName()); - for (int i = getDimensions(); i > 0; --i) { - b.append("[]"); - } - return b.toString(); - case OBJECT: - return new String(buf, off, len).replace('/', '.'); - default: - return null; + case VOID: + return "void"; + case BOOLEAN: + return "boolean"; + case CHAR: + return "char"; + case BYTE: + return "byte"; + case SHORT: + return "short"; + case INT: + return "int"; + case FLOAT: + return "float"; + case LONG: + return "long"; + case DOUBLE: + return "double"; + case ARRAY: + StringBuffer b = new StringBuffer(getElementType().getClassName()); + for (int i = getDimensions(); i > 0; --i) { + b.append("[]"); + } + return b.toString(); + case OBJECT: + return new String(buf, off, len).replace('/', '.'); + default: + return null; } } @@ -613,15 +633,15 @@ public class Type { * Returns the descriptor corresponding to the given argument and return * types. * - * @param returnType the return type of the method. - * @param argumentTypes the argument types of the method. + * @param returnType + * the return type of the method. + * @param argumentTypes + * the argument types of the method. * @return the descriptor corresponding to the given argument and return * types. */ - public static String getMethodDescriptor( - final Type returnType, - final Type... argumentTypes) - { + public static String getMethodDescriptor(final Type returnType, + final Type... argumentTypes) { StringBuffer buf = new StringBuffer(); buf.append('('); for (int i = 0; i < argumentTypes.length; ++i) { @@ -636,11 +656,13 @@ public class Type { * Appends the descriptor corresponding to this Java type to the given * string buffer. * - * @param buf the string buffer to which the descriptor must be appended. + * @param buf + * the string buffer to which the descriptor must be appended. */ private void getDescriptor(final StringBuffer buf) { if (this.buf == null) { - // descriptor is in byte 3 of 'off' for primitive types (buf == null) + // descriptor is in byte 3 of 'off' for primitive types (buf == + // null) buf.append((char) ((off & 0xFF000000) >>> 24)); } else if (sort == OBJECT) { buf.append('L'); @@ -661,7 +683,8 @@ public class Type { * class is its fully qualified name, as returned by Class.getName(), where * '.' are replaced by '/'. * - * @param c an object or array class. + * @param c + * an object or array class. * @return the internal name of the given class. */ public static String getInternalName(final Class c) { @@ -671,7 +694,8 @@ public class Type { /** * Returns the descriptor corresponding to the given Java type. * - * @param c an object class, a primitive class or an array class. + * @param c + * an object class, a primitive class or an array class. * @return the descriptor corresponding to the given class. */ public static String getDescriptor(final Class c) { @@ -683,7 +707,8 @@ public class Type { /** * Returns the descriptor corresponding to the given constructor. * - * @param c a {@link Constructor Constructor} object. + * @param c + * a {@link Constructor Constructor} object. * @return the descriptor of the given constructor. */ public static String getConstructorDescriptor(final Constructor c) { @@ -699,7 +724,8 @@ public class Type { /** * Returns the descriptor corresponding to the given method. * - * @param m a {@link Method Method} object. + * @param m + * a {@link Method Method} object. * @return the descriptor of the given method. */ public static String getMethodDescriptor(final Method m) { @@ -717,8 +743,10 @@ public class Type { /** * Appends the descriptor of the given class to the given string buffer. * - * @param buf the string buffer to which the descriptor must be appended. - * @param c the class whose descriptor must be computed. + * @param buf + * the string buffer to which the descriptor must be appended. + * @param c + * the class whose descriptor must be computed. */ private static void getDescriptor(final StringBuffer buf, final Class c) { Class d = c; @@ -783,9 +811,10 @@ public class Type { * Returns a JVM instruction opcode adapted to this Java type. This method * must not be used for method types. * - * @param opcode a JVM instruction opcode. This opcode must be one of ILOAD, - * ISTORE, IALOAD, IASTORE, IADD, ISUB, IMUL, IDIV, IREM, INEG, ISHL, - * ISHR, IUSHR, IAND, IOR, IXOR and IRETURN. + * @param opcode + * a JVM instruction opcode. This opcode must be one of ILOAD, + * ISTORE, IALOAD, IASTORE, IADD, ISUB, IMUL, IDIV, IREM, INEG, + * ISHL, ISHR, IUSHR, IAND, IOR, IXOR and IRETURN. * @return an opcode that is similar to the given opcode, but adapted to * this Java type. For example, if this type is float and * opcode is IRETURN, this method returns FRETURN. @@ -809,7 +838,8 @@ public class Type { /** * Tests if the given object is equal to this type. * - * @param o the object to be compared to this type. + * @param o + * the object to be compared to this type. * @return true if the given object is equal to this type. */ @Override diff --git a/src/asm/scala/tools/asm/signature/SignatureReader.java b/src/asm/scala/tools/asm/signature/SignatureReader.java index 22e6427e63..9c7c3880d9 100644 --- a/src/asm/scala/tools/asm/signature/SignatureReader.java +++ b/src/asm/scala/tools/asm/signature/SignatureReader.java @@ -46,8 +46,9 @@ public class SignatureReader { /** * Constructs a {@link SignatureReader} for the given signature. * - * @param signature A ClassSignature, MethodTypeSignature, - * or FieldTypeSignature. + * @param signature + * A ClassSignature, MethodTypeSignature, or + * FieldTypeSignature. */ public SignatureReader(final String signature) { this.signature = signature; @@ -58,15 +59,15 @@ public class SignatureReader { * {@link SignatureReader}. This signature is the one specified in the * constructor (see {@link #SignatureReader(String) SignatureReader}). This * method is intended to be called on a {@link SignatureReader} that was - * created using a ClassSignature (such as the + * created using a ClassSignature (such as the signature + * parameter of the {@link scala.tools.asm.ClassVisitor#visit + * ClassVisitor.visit} method) or a MethodTypeSignature (such as the * signature parameter of the - * {@link org.objectweb.asm.ClassVisitor#visit ClassVisitor.visit} method) - * or a MethodTypeSignature (such as the signature - * parameter of the - * {@link org.objectweb.asm.ClassVisitor#visitMethod ClassVisitor.visitMethod} - * method). + * {@link scala.tools.asm.ClassVisitor#visitMethod + * ClassVisitor.visitMethod} method). * - * @param v the visitor that must visit this signature. + * @param v + * the visitor that must visit this signature. */ public void accept(final SignatureVisitor v) { String signature = this.signature; @@ -118,12 +119,12 @@ public class SignatureReader { * method is intended to be called on a {@link SignatureReader} that was * created using a FieldTypeSignature, such as the * signature parameter of the - * {@link org.objectweb.asm.ClassVisitor#visitField - * ClassVisitor.visitField} or {@link - * org.objectweb.asm.MethodVisitor#visitLocalVariable + * {@link scala.tools.asm.ClassVisitor#visitField ClassVisitor.visitField} + * or {@link scala.tools.asm.MethodVisitor#visitLocalVariable * MethodVisitor.visitLocalVariable} methods. * - * @param v the visitor that must visit this signature. + * @param v + * the visitor that must visit this signature. */ public void acceptType(final SignatureVisitor v) { parseType(this.signature, 0, v); @@ -132,98 +133,96 @@ public class SignatureReader { /** * Parses a field type signature and makes the given visitor visit it. * - * @param signature a string containing the signature that must be parsed. - * @param pos index of the first character of the signature to parsed. - * @param v the visitor that must visit this signature. + * @param signature + * a string containing the signature that must be parsed. + * @param pos + * index of the first character of the signature to parsed. + * @param v + * the visitor that must visit this signature. * @return the index of the first character after the parsed signature. */ - private static int parseType( - final String signature, - int pos, - final SignatureVisitor v) - { + private static int parseType(final String signature, int pos, + final SignatureVisitor v) { char c; int start, end; boolean visited, inner; String name; switch (c = signature.charAt(pos++)) { - case 'Z': - case 'C': - case 'B': - case 'S': - case 'I': - case 'F': - case 'J': - case 'D': - case 'V': - v.visitBaseType(c); - return pos; + case 'Z': + case 'C': + case 'B': + case 'S': + case 'I': + case 'F': + case 'J': + case 'D': + case 'V': + v.visitBaseType(c); + return pos; - case '[': - return parseType(signature, pos, v.visitArrayType()); + case '[': + return parseType(signature, pos, v.visitArrayType()); - case 'T': - end = signature.indexOf(';', pos); - v.visitTypeVariable(signature.substring(pos, end)); - return end + 1; + case 'T': + end = signature.indexOf(';', pos); + v.visitTypeVariable(signature.substring(pos, end)); + return end + 1; - default: // case 'L': - start = pos; - visited = false; - inner = false; - for (;;) { - switch (c = signature.charAt(pos++)) { - case '.': - case ';': - if (!visited) { - name = signature.substring(start, pos - 1); - if (inner) { - v.visitInnerClassType(name); - } else { - v.visitClassType(name); - } - } - if (c == ';') { - v.visitEnd(); - return pos; - } - start = pos; - visited = false; - inner = true; - break; + default: // case 'L': + start = pos; + visited = false; + inner = false; + for (;;) { + switch (c = signature.charAt(pos++)) { + case '.': + case ';': + if (!visited) { + name = signature.substring(start, pos - 1); + if (inner) { + v.visitInnerClassType(name); + } else { + v.visitClassType(name); + } + } + if (c == ';') { + v.visitEnd(); + return pos; + } + start = pos; + visited = false; + inner = true; + break; - case '<': - name = signature.substring(start, pos - 1); - if (inner) { - v.visitInnerClassType(name); - } else { - v.visitClassType(name); - } - visited = true; - top: for (;;) { - switch (c = signature.charAt(pos)) { - case '>': - break top; - case '*': - ++pos; - v.visitTypeArgument(); - break; - case '+': - case '-': - pos = parseType(signature, - pos + 1, - v.visitTypeArgument(c)); - break; - default: - pos = parseType(signature, - pos, - v.visitTypeArgument('=')); - break; - } - } + case '<': + name = signature.substring(start, pos - 1); + if (inner) { + v.visitInnerClassType(name); + } else { + v.visitClassType(name); + } + visited = true; + top: for (;;) { + switch (c = signature.charAt(pos)) { + case '>': + break top; + case '*': + ++pos; + v.visitTypeArgument(); + break; + case '+': + case '-': + pos = parseType(signature, pos + 1, + v.visitTypeArgument(c)); + break; + default: + pos = parseType(signature, pos, + v.visitTypeArgument('=')); + break; + } } } + } } } } diff --git a/src/asm/scala/tools/asm/signature/SignatureVisitor.java b/src/asm/scala/tools/asm/signature/SignatureVisitor.java index 2fc364e374..f38f81f53b 100644 --- a/src/asm/scala/tools/asm/signature/SignatureVisitor.java +++ b/src/asm/scala/tools/asm/signature/SignatureVisitor.java @@ -35,21 +35,21 @@ import scala.tools.asm.Opcodes; * A visitor to visit a generic signature. The methods of this interface must be * called in one of the three following orders (the last one is the only valid * order for a {@link SignatureVisitor} that is returned by a method of this - * interface):

    • ClassSignature = ( - * visitFormalTypeParameter - * visitClassBound? - * visitInterfaceBound* )* ( visitSuperClass - * visitInterface* )
    • + * interface): + *
        + *
      • ClassSignature = ( visitFormalTypeParameter + * visitClassBound? visitInterfaceBound* )* ( + * visitSuperClass visitInterface* )
      • *
      • MethodSignature = ( visitFormalTypeParameter - * visitClassBound? - * visitInterfaceBound* )* ( visitParameterType* - * visitReturnType - * visitExceptionType* )
      • TypeSignature = - * visitBaseType | visitTypeVariable | - * visitArrayType | ( + * visitClassBound? visitInterfaceBound* )* ( + * visitParameterType* visitReturnType + * visitExceptionType* )
      • + *
      • TypeSignature = visitBaseType | + * visitTypeVariable | visitArrayType | ( * visitClassType visitTypeArgument* ( - * visitInnerClassType visitTypeArgument* )* - * visitEnd ) )
      + * visitInnerClassType visitTypeArgument* )* visitEnd + * ) ) + *
    * * @author Thomas Hallgren * @author Eric Bruneton @@ -80,8 +80,9 @@ public abstract class SignatureVisitor { /** * Constructs a new {@link SignatureVisitor}. * - * @param api the ASM API version implemented by this visitor. Must be one - * of {@link Opcodes#ASM4}. + * @param api + * the ASM API version implemented by this visitor. Must be one + * of {@link Opcodes#ASM4}. */ public SignatureVisitor(final int api) { this.api = api; @@ -90,7 +91,8 @@ public abstract class SignatureVisitor { /** * Visits a formal type parameter. * - * @param name the name of the formal parameter. + * @param name + * the name of the formal parameter. */ public void visitFormalTypeParameter(String name) { } @@ -162,8 +164,9 @@ public abstract class SignatureVisitor { /** * Visits a signature corresponding to a primitive type. * - * @param descriptor the descriptor of the primitive type, or 'V' for - * void. + * @param descriptor + * the descriptor of the primitive type, or 'V' for void + * . */ public void visitBaseType(char descriptor) { } @@ -171,7 +174,8 @@ public abstract class SignatureVisitor { /** * Visits a signature corresponding to a type variable. * - * @param name the name of the type variable. + * @param name + * the name of the type variable. */ public void visitTypeVariable(String name) { } @@ -190,7 +194,8 @@ public abstract class SignatureVisitor { * Starts the visit of a signature corresponding to a class or interface * type. * - * @param name the internal name of the class or interface. + * @param name + * the internal name of the class or interface. */ public void visitClassType(String name) { } @@ -198,7 +203,8 @@ public abstract class SignatureVisitor { /** * Visits an inner class. * - * @param name the local name of the inner class in its enclosing class. + * @param name + * the local name of the inner class in its enclosing class. */ public void visitInnerClassType(String name) { } @@ -213,7 +219,8 @@ public abstract class SignatureVisitor { /** * Visits a type argument of the last visited class or inner class type. * - * @param wildcard '+', '-' or '='. + * @param wildcard + * '+', '-' or '='. * @return a non null visitor to visit the signature of the type argument. */ public SignatureVisitor visitTypeArgument(char wildcard) { diff --git a/src/asm/scala/tools/asm/signature/SignatureWriter.java b/src/asm/scala/tools/asm/signature/SignatureWriter.java index a59fdfde2b..ebf4fe07b4 100644 --- a/src/asm/scala/tools/asm/signature/SignatureWriter.java +++ b/src/asm/scala/tools/asm/signature/SignatureWriter.java @@ -224,4 +224,4 @@ public class SignatureWriter extends SignatureVisitor { } argumentStack /= 2; } -} \ No newline at end of file +} diff --git a/src/asm/scala/tools/asm/tree/AbstractInsnNode.java b/src/asm/scala/tools/asm/tree/AbstractInsnNode.java index 471f842ffc..411eead3c7 100644 --- a/src/asm/scala/tools/asm/tree/AbstractInsnNode.java +++ b/src/asm/scala/tools/asm/tree/AbstractInsnNode.java @@ -148,7 +148,8 @@ public abstract class AbstractInsnNode { /** * Constructs a new {@link AbstractInsnNode}. * - * @param opcode the opcode of the instruction to be constructed. + * @param opcode + * the opcode of the instruction to be constructed. */ protected AbstractInsnNode(final int opcode) { this.opcode = opcode; @@ -197,38 +198,47 @@ public abstract class AbstractInsnNode { /** * Makes the given code visitor visit this instruction. * - * @param cv a code visitor. + * @param cv + * a code visitor. */ public abstract void accept(final MethodVisitor cv); /** * Returns a copy of this instruction. * - * @param labels a map from LabelNodes to cloned LabelNodes. + * @param labels + * a map from LabelNodes to cloned LabelNodes. * @return a copy of this instruction. The returned instruction does not * belong to any {@link InsnList}. */ - public abstract AbstractInsnNode clone(final Map labels); + public abstract AbstractInsnNode clone( + final Map labels); /** * Returns the clone of the given label. * - * @param label a label. - * @param map a map from LabelNodes to cloned LabelNodes. + * @param label + * a label. + * @param map + * a map from LabelNodes to cloned LabelNodes. * @return the clone of the given label. */ - static LabelNode clone(final LabelNode label, final Map map) { + static LabelNode clone(final LabelNode label, + final Map map) { return map.get(label); } /** * Returns the clones of the given labels. * - * @param labels a list of labels. - * @param map a map from LabelNodes to cloned LabelNodes. + * @param labels + * a list of labels. + * @param map + * a map from LabelNodes to cloned LabelNodes. * @return the clones of the given labels. */ - static LabelNode[] clone(final List labels, final Map map) { + static LabelNode[] clone(final List labels, + final Map map) { LabelNode[] clones = new LabelNode[labels.size()]; for (int i = 0; i < clones.length; ++i) { clones[i] = map.get(labels.get(i)); diff --git a/src/asm/scala/tools/asm/tree/AnnotationNode.java b/src/asm/scala/tools/asm/tree/AnnotationNode.java index 9f132550e6..1f4beef9f7 100644 --- a/src/asm/scala/tools/asm/tree/AnnotationNode.java +++ b/src/asm/scala/tools/asm/tree/AnnotationNode.java @@ -52,11 +52,11 @@ public class AnnotationNode extends AnnotationVisitor { * as two consecutive elements in the list. The name is a {@link String}, * and the value may be a {@link Byte}, {@link Boolean}, {@link Character}, * {@link Short}, {@link Integer}, {@link Long}, {@link Float}, - * {@link Double}, {@link String} or {@link org.objectweb.asm.Type}, or an + * {@link Double}, {@link String} or {@link scala.tools.asm.Type}, or an * two elements String array (for enumeration values), a * {@link AnnotationNode}, or a {@link List} of values of one of the - * preceding types. The list may be null if there is no name - * value pair. + * preceding types. The list may be null if there is no name value + * pair. */ public List values; @@ -65,7 +65,8 @@ public class AnnotationNode extends AnnotationVisitor { * constructor. Instead, they must use the * {@link #AnnotationNode(int, String)} version. * - * @param desc the class descriptor of the annotation class. + * @param desc + * the class descriptor of the annotation class. */ public AnnotationNode(final String desc) { this(Opcodes.ASM4, desc); @@ -74,9 +75,11 @@ public class AnnotationNode extends AnnotationVisitor { /** * Constructs a new {@link AnnotationNode}. * - * @param api the ASM API version implemented by this visitor. Must be one - * of {@link Opcodes#ASM4}. - * @param desc the class descriptor of the annotation class. + * @param api + * the ASM API version implemented by this visitor. Must be one + * of {@link Opcodes#ASM4}. + * @param desc + * the class descriptor of the annotation class. */ public AnnotationNode(final int api, final String desc) { super(api); @@ -86,7 +89,8 @@ public class AnnotationNode extends AnnotationVisitor { /** * Constructs a new {@link AnnotationNode} to visit an array value. * - * @param values where the visited values must be stored. + * @param values + * where the visited values must be stored. */ AnnotationNode(final List values) { super(Opcodes.ASM4); @@ -109,11 +113,8 @@ public class AnnotationNode extends AnnotationVisitor { } @Override - public void visitEnum( - final String name, - final String desc, - final String value) - { + public void visitEnum(final String name, final String desc, + final String value) { if (values == null) { values = new ArrayList(this.desc != null ? 2 : 1); } @@ -124,10 +125,8 @@ public class AnnotationNode extends AnnotationVisitor { } @Override - public AnnotationVisitor visitAnnotation( - final String name, - final String desc) - { + public AnnotationVisitor visitAnnotation(final String name, + final String desc) { if (values == null) { values = new ArrayList(this.desc != null ? 2 : 1); } @@ -166,7 +165,8 @@ public class AnnotationNode extends AnnotationVisitor { * recursively, do not contain elements that were introduced in more recent * versions of the ASM API than the given version. * - * @param api an ASM API version. Must be one of {@link Opcodes#ASM4}. + * @param api + * an ASM API version. Must be one of {@link Opcodes#ASM4}. */ public void check(final int api) { // nothing to do @@ -175,7 +175,8 @@ public class AnnotationNode extends AnnotationVisitor { /** * Makes the given visitor visit this annotation. * - * @param av an annotation visitor. Maybe null. + * @param av + * an annotation visitor. Maybe null. */ public void accept(final AnnotationVisitor av) { if (av != null) { @@ -193,15 +194,15 @@ public class AnnotationNode extends AnnotationVisitor { /** * Makes the given visitor visit a given annotation value. * - * @param av an annotation visitor. Maybe null. - * @param name the value name. - * @param value the actual value. + * @param av + * an annotation visitor. Maybe null. + * @param name + * the value name. + * @param value + * the actual value. */ - static void accept( - final AnnotationVisitor av, - final String name, - final Object value) - { + static void accept(final AnnotationVisitor av, final String name, + final Object value) { if (av != null) { if (value instanceof String[]) { String[] typeconst = (String[]) value; diff --git a/src/asm/scala/tools/asm/tree/ClassNode.java b/src/asm/scala/tools/asm/tree/ClassNode.java index 64effae698..c3d999985a 100644 --- a/src/asm/scala/tools/asm/tree/ClassNode.java +++ b/src/asm/scala/tools/asm/tree/ClassNode.java @@ -53,33 +53,33 @@ public class ClassNode extends ClassVisitor { public int version; /** - * The class's access flags (see {@link org.objectweb.asm.Opcodes}). This + * The class's access flags (see {@link scala.tools.asm.Opcodes}). This * field also indicates if the class is deprecated. */ public int access; /** * The internal name of the class (see - * {@link org.objectweb.asm.Type#getInternalName() getInternalName}). + * {@link scala.tools.asm.Type#getInternalName() getInternalName}). */ public String name; /** - * The signature of the class. Mayt be null. + * The signature of the class. May be null. */ public String signature; /** * The internal of name of the super class (see - * {@link org.objectweb.asm.Type#getInternalName() getInternalName}). For - * interfaces, the super class is {@link Object}. May be null, - * but only for the {@link Object} class. + * {@link scala.tools.asm.Type#getInternalName() getInternalName}). For + * interfaces, the super class is {@link Object}. May be null, but + * only for the {@link Object} class. */ public String superName; /** * The internal names of the class's interfaces (see - * {@link org.objectweb.asm.Type#getInternalName() getInternalName}). This + * {@link scala.tools.asm.Type#getInternalName() getInternalName}). This * list is a list of {@link String} objects. */ public List interfaces; @@ -91,7 +91,7 @@ public class ClassNode extends ClassVisitor { public String sourceFile; /** - * Debug information to compute the correspondance between source and + * Debug information to compute the correspondence between source and * compiled elements of the class. May be null. */ public String sourceDebug; @@ -109,8 +109,8 @@ public class ClassNode extends ClassVisitor { public String outerMethod; /** - * The descriptor of the method that contains the class, or null - * if the class is not enclosed in a method. + * The descriptor of the method that contains the class, or null if + * the class is not enclosed in a method. */ public String outerMethodDesc; @@ -118,7 +118,7 @@ public class ClassNode extends ClassVisitor { * The runtime visible annotations of this class. This list is a list of * {@link AnnotationNode} objects. May be null. * - * @associates org.objectweb.asm.tree.AnnotationNode + * @associates scala.tools.asm.tree.AnnotationNode * @label visible */ public List visibleAnnotations; @@ -127,7 +127,7 @@ public class ClassNode extends ClassVisitor { * The runtime invisible annotations of this class. This list is a list of * {@link AnnotationNode} objects. May be null. * - * @associates org.objectweb.asm.tree.AnnotationNode + * @associates scala.tools.asm.tree.AnnotationNode * @label invisible */ public List invisibleAnnotations; @@ -136,7 +136,7 @@ public class ClassNode extends ClassVisitor { * The non standard attributes of this class. This list is a list of * {@link Attribute} objects. May be null. * - * @associates org.objectweb.asm.Attribute + * @associates scala.tools.asm.Attribute */ public List attrs; @@ -144,7 +144,7 @@ public class ClassNode extends ClassVisitor { * Informations about the inner classes of this class. This list is a list * of {@link InnerClassNode} objects. * - * @associates org.objectweb.asm.tree.InnerClassNode + * @associates scala.tools.asm.tree.InnerClassNode */ public List innerClasses; @@ -152,7 +152,7 @@ public class ClassNode extends ClassVisitor { * The fields of this class. This list is a list of {@link FieldNode} * objects. * - * @associates org.objectweb.asm.tree.FieldNode + * @associates scala.tools.asm.tree.FieldNode */ public List fields; @@ -160,7 +160,7 @@ public class ClassNode extends ClassVisitor { * The methods of this class. This list is a list of {@link MethodNode} * objects. * - * @associates org.objectweb.asm.tree.MethodNode + * @associates scala.tools.asm.tree.MethodNode */ public List methods; @@ -176,8 +176,9 @@ public class ClassNode extends ClassVisitor { /** * Constructs a new {@link ClassNode}. * - * @param api the ASM API version implemented by this visitor. Must be one - * of {@link Opcodes#ASM4}. + * @param api + * the ASM API version implemented by this visitor. Must be one + * of {@link Opcodes#ASM4}. */ public ClassNode(final int api) { super(api); @@ -192,14 +193,9 @@ public class ClassNode extends ClassVisitor { // ------------------------------------------------------------------------ @Override - public void visit( - final int version, - final int access, - final String name, - final String signature, - final String superName, - final String[] interfaces) - { + public void visit(final int version, final int access, final String name, + final String signature, final String superName, + final String[] interfaces) { this.version = version; this.access = access; this.name = name; @@ -217,21 +213,16 @@ public class ClassNode extends ClassVisitor { } @Override - public void visitOuterClass( - final String owner, - final String name, - final String desc) - { + public void visitOuterClass(final String owner, final String name, + final String desc) { outerClass = owner; outerMethod = name; outerMethodDesc = desc; } @Override - public AnnotationVisitor visitAnnotation( - final String desc, - final boolean visible) - { + public AnnotationVisitor visitAnnotation(final String desc, + final boolean visible) { AnnotationNode an = new AnnotationNode(desc); if (visible) { if (visibleAnnotations == null) { @@ -256,44 +247,25 @@ public class ClassNode extends ClassVisitor { } @Override - public void visitInnerClass( - final String name, - final String outerName, - final String innerName, - final int access) - { - InnerClassNode icn = new InnerClassNode(name, - outerName, - innerName, + public void visitInnerClass(final String name, final String outerName, + final String innerName, final int access) { + InnerClassNode icn = new InnerClassNode(name, outerName, innerName, access); innerClasses.add(icn); } @Override - public FieldVisitor visitField( - final int access, - final String name, - final String desc, - final String signature, - final Object value) - { + public FieldVisitor visitField(final int access, final String name, + final String desc, final String signature, final Object value) { FieldNode fn = new FieldNode(access, name, desc, signature, value); fields.add(fn); return fn; } @Override - public MethodVisitor visitMethod( - final int access, - final String name, - final String desc, - final String signature, - final String[] exceptions) - { - MethodNode mn = new MethodNode(access, - name, - desc, - signature, + public MethodVisitor visitMethod(final int access, final String name, + final String desc, final String signature, final String[] exceptions) { + MethodNode mn = new MethodNode(access, name, desc, signature, exceptions); methods.add(mn); return mn; @@ -313,7 +285,8 @@ public class ClassNode extends ClassVisitor { * contain elements that were introduced in more recent versions of the ASM * API than the given version. * - * @param api an ASM API version. Must be one of {@link Opcodes#ASM4}. + * @param api + * an ASM API version. Must be one of {@link Opcodes#ASM4}. */ public void check(final int api) { // nothing to do @@ -322,7 +295,8 @@ public class ClassNode extends ClassVisitor { /** * Makes the given class visitor visit this class. * - * @param cv a class visitor. + * @param cv + * a class visitor. */ public void accept(final ClassVisitor cv) { // visits header diff --git a/src/asm/scala/tools/asm/tree/FieldInsnNode.java b/src/asm/scala/tools/asm/tree/FieldInsnNode.java index 6b7a6a142a..0c94f18adf 100644 --- a/src/asm/scala/tools/asm/tree/FieldInsnNode.java +++ b/src/asm/scala/tools/asm/tree/FieldInsnNode.java @@ -43,7 +43,7 @@ public class FieldInsnNode extends AbstractInsnNode { /** * The internal name of the field's owner class (see - * {@link org.objectweb.asm.Type#getInternalName() getInternalName}). + * {@link scala.tools.asm.Type#getInternalName() getInternalName}). */ public String owner; @@ -53,26 +53,27 @@ public class FieldInsnNode extends AbstractInsnNode { public String name; /** - * The field's descriptor (see {@link org.objectweb.asm.Type}). + * The field's descriptor (see {@link scala.tools.asm.Type}). */ public String desc; /** * Constructs a new {@link FieldInsnNode}. * - * @param opcode the opcode of the type instruction to be constructed. This - * opcode must be GETSTATIC, PUTSTATIC, GETFIELD or PUTFIELD. - * @param owner the internal name of the field's owner class (see - * {@link org.objectweb.asm.Type#getInternalName() getInternalName}). - * @param name the field's name. - * @param desc the field's descriptor (see {@link org.objectweb.asm.Type}). + * @param opcode + * the opcode of the type instruction to be constructed. This + * opcode must be GETSTATIC, PUTSTATIC, GETFIELD or PUTFIELD. + * @param owner + * the internal name of the field's owner class (see + * {@link scala.tools.asm.Type#getInternalName() + * getInternalName}). + * @param name + * the field's name. + * @param desc + * the field's descriptor (see {@link scala.tools.asm.Type}). */ - public FieldInsnNode( - final int opcode, - final String owner, - final String name, - final String desc) - { + public FieldInsnNode(final int opcode, final String owner, + final String name, final String desc) { super(opcode); this.owner = owner; this.name = name; @@ -82,8 +83,9 @@ public class FieldInsnNode extends AbstractInsnNode { /** * Sets the opcode of this instruction. * - * @param opcode the new instruction opcode. This opcode must be GETSTATIC, - * PUTSTATIC, GETFIELD or PUTFIELD. + * @param opcode + * the new instruction opcode. This opcode must be GETSTATIC, + * PUTSTATIC, GETFIELD or PUTFIELD. */ public void setOpcode(final int opcode) { this.opcode = opcode; diff --git a/src/asm/scala/tools/asm/tree/FieldNode.java b/src/asm/scala/tools/asm/tree/FieldNode.java index 9a1e17033c..61b614ec59 100644 --- a/src/asm/scala/tools/asm/tree/FieldNode.java +++ b/src/asm/scala/tools/asm/tree/FieldNode.java @@ -46,7 +46,7 @@ import scala.tools.asm.Opcodes; public class FieldNode extends FieldVisitor { /** - * The field's access flags (see {@link org.objectweb.asm.Opcodes}). This + * The field's access flags (see {@link scala.tools.asm.Opcodes}). This * field also indicates if the field is synthetic and/or deprecated. */ public int access; @@ -57,7 +57,7 @@ public class FieldNode extends FieldVisitor { public String name; /** - * The field's descriptor (see {@link org.objectweb.asm.Type}). + * The field's descriptor (see {@link scala.tools.asm.Type}). */ public String desc; @@ -67,8 +67,8 @@ public class FieldNode extends FieldVisitor { public String signature; /** - * The field's initial value. This field, which may be null if - * the field does not have an initial value, must be an {@link Integer}, a + * The field's initial value. This field, which may be null if the + * field does not have an initial value, must be an {@link Integer}, a * {@link Float}, a {@link Long}, a {@link Double} or a {@link String}. */ public Object value; @@ -77,7 +77,7 @@ public class FieldNode extends FieldVisitor { * The runtime visible annotations of this field. This list is a list of * {@link AnnotationNode} objects. May be null. * - * @associates org.objectweb.asm.tree.AnnotationNode + * @associates scala.tools.asm.tree.AnnotationNode * @label visible */ public List visibleAnnotations; @@ -86,7 +86,7 @@ public class FieldNode extends FieldVisitor { * The runtime invisible annotations of this field. This list is a list of * {@link AnnotationNode} objects. May be null. * - * @associates org.objectweb.asm.tree.AnnotationNode + * @associates scala.tools.asm.tree.AnnotationNode * @label invisible */ public List invisibleAnnotations; @@ -95,7 +95,7 @@ public class FieldNode extends FieldVisitor { * The non standard attributes of this field. This list is a list of * {@link Attribute} objects. May be null. * - * @associates org.objectweb.asm.Attribute + * @associates scala.tools.asm.Attribute */ public List attrs; @@ -104,25 +104,25 @@ public class FieldNode extends FieldVisitor { * constructor. Instead, they must use the * {@link #FieldNode(int, int, String, String, String, Object)} version. * - * @param access the field's access flags (see - * {@link org.objectweb.asm.Opcodes}). This parameter also indicates - * if the field is synthetic and/or deprecated. - * @param name the field's name. - * @param desc the field's descriptor (see {@link org.objectweb.asm.Type - * Type}). - * @param signature the field's signature. - * @param value the field's initial value. This parameter, which may be - * null if the field does not have an initial value, must be - * an {@link Integer}, a {@link Float}, a {@link Long}, a - * {@link Double} or a {@link String}. + * @param access + * the field's access flags (see + * {@link scala.tools.asm.Opcodes}). This parameter also + * indicates if the field is synthetic and/or deprecated. + * @param name + * the field's name. + * @param desc + * the field's descriptor (see {@link scala.tools.asm.Type + * Type}). + * @param signature + * the field's signature. + * @param value + * the field's initial value. This parameter, which may be + * null if the field does not have an initial value, + * must be an {@link Integer}, a {@link Float}, a {@link Long}, a + * {@link Double} or a {@link String}. */ - public FieldNode( - final int access, - final String name, - final String desc, - final String signature, - final Object value) - { + public FieldNode(final int access, final String name, final String desc, + final String signature, final Object value) { this(Opcodes.ASM4, access, name, desc, signature, value); } @@ -131,28 +131,28 @@ public class FieldNode extends FieldVisitor { * constructor. Instead, they must use the * {@link #FieldNode(int, int, String, String, String, Object)} version. * - * @param api the ASM API version implemented by this visitor. Must be one - * of {@link Opcodes#ASM4}. - * @param access the field's access flags (see - * {@link org.objectweb.asm.Opcodes}). This parameter also indicates - * if the field is synthetic and/or deprecated. - * @param name the field's name. - * @param desc the field's descriptor (see {@link org.objectweb.asm.Type - * Type}). - * @param signature the field's signature. - * @param value the field's initial value. This parameter, which may be - * null if the field does not have an initial value, must be - * an {@link Integer}, a {@link Float}, a {@link Long}, a - * {@link Double} or a {@link String}. + * @param api + * the ASM API version implemented by this visitor. Must be one + * of {@link Opcodes#ASM4}. + * @param access + * the field's access flags (see + * {@link scala.tools.asm.Opcodes}). This parameter also + * indicates if the field is synthetic and/or deprecated. + * @param name + * the field's name. + * @param desc + * the field's descriptor (see {@link scala.tools.asm.Type + * Type}). + * @param signature + * the field's signature. + * @param value + * the field's initial value. This parameter, which may be + * null if the field does not have an initial value, + * must be an {@link Integer}, a {@link Float}, a {@link Long}, a + * {@link Double} or a {@link String}. */ - public FieldNode( - final int api, - final int access, - final String name, - final String desc, - final String signature, - final Object value) - { + public FieldNode(final int api, final int access, final String name, + final String desc, final String signature, final Object value) { super(api); this.access = access; this.name = name; @@ -166,10 +166,8 @@ public class FieldNode extends FieldVisitor { // ------------------------------------------------------------------------ @Override - public AnnotationVisitor visitAnnotation( - final String desc, - final boolean visible) - { + public AnnotationVisitor visitAnnotation(final String desc, + final boolean visible) { AnnotationNode an = new AnnotationNode(desc); if (visible) { if (visibleAnnotations == null) { @@ -207,7 +205,8 @@ public class FieldNode extends FieldVisitor { * contain elements that were introduced in more recent versions of the ASM * API than the given version. * - * @param api an ASM API version. Must be one of {@link Opcodes#ASM4}. + * @param api + * an ASM API version. Must be one of {@link Opcodes#ASM4}. */ public void check(final int api) { // nothing to do @@ -216,7 +215,8 @@ public class FieldNode extends FieldVisitor { /** * Makes the given class visitor visit this field. * - * @param cv a class visitor. + * @param cv + * a class visitor. */ public void accept(final ClassVisitor cv) { FieldVisitor fv = cv.visitField(access, name, desc, signature, value); diff --git a/src/asm/scala/tools/asm/tree/FrameNode.java b/src/asm/scala/tools/asm/tree/FrameNode.java index 66825de0ac..f13fc66749 100644 --- a/src/asm/scala/tools/asm/tree/FrameNode.java +++ b/src/asm/scala/tools/asm/tree/FrameNode.java @@ -45,8 +45,9 @@ import scala.tools.asm.Opcodes; * the target of a jump instruction, or that starts an exception handler block. * The stack map frame types must describe the values of the local variables and * of the operand stack elements just before i is executed.
    - *
    (*) this is mandatory only for classes whose version is greater than or - * equal to {@link Opcodes#V1_6 V1_6}. + *
    + * (*) this is mandatory only for classes whose version is greater than or equal + * to {@link Opcodes#V1_6 V1_6}. * * @author Eric Bruneton */ @@ -83,48 +84,48 @@ public class FrameNode extends AbstractInsnNode { /** * Constructs a new {@link FrameNode}. * - * @param type the type of this frame. Must be {@link Opcodes#F_NEW} for - * expanded frames, or {@link Opcodes#F_FULL}, - * {@link Opcodes#F_APPEND}, {@link Opcodes#F_CHOP}, - * {@link Opcodes#F_SAME} or {@link Opcodes#F_APPEND}, - * {@link Opcodes#F_SAME1} for compressed frames. - * @param nLocal number of local variables of this stack map frame. - * @param local the types of the local variables of this stack map frame. - * Elements of this list can be Integer, String or LabelNode objects - * (for primitive, reference and uninitialized types respectively - - * see {@link MethodVisitor}). - * @param nStack number of operand stack elements of this stack map frame. - * @param stack the types of the operand stack elements of this stack map - * frame. Elements of this list can be Integer, String or LabelNode - * objects (for primitive, reference and uninitialized types - * respectively - see {@link MethodVisitor}). + * @param type + * the type of this frame. Must be {@link Opcodes#F_NEW} for + * expanded frames, or {@link Opcodes#F_FULL}, + * {@link Opcodes#F_APPEND}, {@link Opcodes#F_CHOP}, + * {@link Opcodes#F_SAME} or {@link Opcodes#F_APPEND}, + * {@link Opcodes#F_SAME1} for compressed frames. + * @param nLocal + * number of local variables of this stack map frame. + * @param local + * the types of the local variables of this stack map frame. + * Elements of this list can be Integer, String or LabelNode + * objects (for primitive, reference and uninitialized types + * respectively - see {@link MethodVisitor}). + * @param nStack + * number of operand stack elements of this stack map frame. + * @param stack + * the types of the operand stack elements of this stack map + * frame. Elements of this list can be Integer, String or + * LabelNode objects (for primitive, reference and uninitialized + * types respectively - see {@link MethodVisitor}). */ - public FrameNode( - final int type, - final int nLocal, - final Object[] local, - final int nStack, - final Object[] stack) - { + public FrameNode(final int type, final int nLocal, final Object[] local, + final int nStack, final Object[] stack) { super(-1); this.type = type; switch (type) { - case Opcodes.F_NEW: - case Opcodes.F_FULL: - this.local = asList(nLocal, local); - this.stack = asList(nStack, stack); - break; - case Opcodes.F_APPEND: - this.local = asList(nLocal, local); - break; - case Opcodes.F_CHOP: - this.local = Arrays.asList(new Object[nLocal]); - break; - case Opcodes.F_SAME: - break; - case Opcodes.F_SAME1: - this.stack = asList(1, stack); - break; + case Opcodes.F_NEW: + case Opcodes.F_FULL: + this.local = asList(nLocal, local); + this.stack = asList(nStack, stack); + break; + case Opcodes.F_APPEND: + this.local = asList(nLocal, local); + break; + case Opcodes.F_CHOP: + this.local = Arrays.asList(new Object[nLocal]); + break; + case Opcodes.F_SAME: + break; + case Opcodes.F_SAME1: + this.stack = asList(1, stack); + break; } } @@ -136,31 +137,29 @@ public class FrameNode extends AbstractInsnNode { /** * Makes the given visitor visit this stack map frame. * - * @param mv a method visitor. + * @param mv + * a method visitor. */ @Override public void accept(final MethodVisitor mv) { switch (type) { - case Opcodes.F_NEW: - case Opcodes.F_FULL: - mv.visitFrame(type, - local.size(), - asArray(local), - stack.size(), - asArray(stack)); - break; - case Opcodes.F_APPEND: - mv.visitFrame(type, local.size(), asArray(local), 0, null); - break; - case Opcodes.F_CHOP: - mv.visitFrame(type, local.size(), null, 0, null); - break; - case Opcodes.F_SAME: - mv.visitFrame(type, 0, null, 0, null); - break; - case Opcodes.F_SAME1: - mv.visitFrame(type, 0, null, 1, asArray(stack)); - break; + case Opcodes.F_NEW: + case Opcodes.F_FULL: + mv.visitFrame(type, local.size(), asArray(local), stack.size(), + asArray(stack)); + break; + case Opcodes.F_APPEND: + mv.visitFrame(type, local.size(), asArray(local), 0, null); + break; + case Opcodes.F_CHOP: + mv.visitFrame(type, local.size(), null, 0, null); + break; + case Opcodes.F_SAME: + mv.visitFrame(type, 0, null, 0, null); + break; + case Opcodes.F_SAME1: + mv.visitFrame(type, 0, null, 1, asArray(stack)); + break; } } diff --git a/src/asm/scala/tools/asm/tree/IincInsnNode.java b/src/asm/scala/tools/asm/tree/IincInsnNode.java index 75ac40884d..f9adf2e38c 100644 --- a/src/asm/scala/tools/asm/tree/IincInsnNode.java +++ b/src/asm/scala/tools/asm/tree/IincInsnNode.java @@ -54,8 +54,10 @@ public class IincInsnNode extends AbstractInsnNode { /** * Constructs a new {@link IincInsnNode}. * - * @param var index of the local variable to be incremented. - * @param incr increment amount to increment the local variable by. + * @param var + * index of the local variable to be incremented. + * @param incr + * increment amount to increment the local variable by. */ public IincInsnNode(final int var, final int incr) { super(Opcodes.IINC); @@ -77,4 +79,4 @@ public class IincInsnNode extends AbstractInsnNode { public AbstractInsnNode clone(final Map labels) { return new IincInsnNode(var, incr); } -} \ No newline at end of file +} diff --git a/src/asm/scala/tools/asm/tree/InnerClassNode.java b/src/asm/scala/tools/asm/tree/InnerClassNode.java index 4579488921..aa3810c759 100644 --- a/src/asm/scala/tools/asm/tree/InnerClassNode.java +++ b/src/asm/scala/tools/asm/tree/InnerClassNode.java @@ -40,14 +40,14 @@ public class InnerClassNode { /** * The internal name of an inner class (see - * {@link org.objectweb.asm.Type#getInternalName() getInternalName}). + * {@link scala.tools.asm.Type#getInternalName() getInternalName}). */ public String name; /** * The internal name of the class to which the inner class belongs (see - * {@link org.objectweb.asm.Type#getInternalName() getInternalName}). May - * be null. + * {@link scala.tools.asm.Type#getInternalName() getInternalName}). May be + * null. */ public String outerName; @@ -66,24 +66,23 @@ public class InnerClassNode { /** * Constructs a new {@link InnerClassNode}. * - * @param name the internal name of an inner class (see - * {@link org.objectweb.asm.Type#getInternalName() getInternalName}). - * @param outerName the internal name of the class to which the inner class - * belongs (see - * {@link org.objectweb.asm.Type#getInternalName() getInternalName}). - * May be null. - * @param innerName the (simple) name of the inner class inside its - * enclosing class. May be null for anonymous inner - * classes. - * @param access the access flags of the inner class as originally declared - * in the enclosing class. + * @param name + * the internal name of an inner class (see + * {@link scala.tools.asm.Type#getInternalName() + * getInternalName}). + * @param outerName + * the internal name of the class to which the inner class + * belongs (see {@link scala.tools.asm.Type#getInternalName() + * getInternalName}). May be null. + * @param innerName + * the (simple) name of the inner class inside its enclosing + * class. May be null for anonymous inner classes. + * @param access + * the access flags of the inner class as originally declared in + * the enclosing class. */ - public InnerClassNode( - final String name, - final String outerName, - final String innerName, - final int access) - { + public InnerClassNode(final String name, final String outerName, + final String innerName, final int access) { this.name = name; this.outerName = outerName; this.innerName = innerName; @@ -93,7 +92,8 @@ public class InnerClassNode { /** * Makes the given class visitor visit this inner class. * - * @param cv a class visitor. + * @param cv + * a class visitor. */ public void accept(final ClassVisitor cv) { cv.visitInnerClass(name, outerName, innerName, access); diff --git a/src/asm/scala/tools/asm/tree/InsnList.java b/src/asm/scala/tools/asm/tree/InsnList.java index dedd3bba73..55d83c2e8b 100644 --- a/src/asm/scala/tools/asm/tree/InsnList.java +++ b/src/asm/scala/tools/asm/tree/InsnList.java @@ -73,8 +73,8 @@ public class InsnList { /** * Returns the first instruction in this list. * - * @return the first instruction in this list, or null if the - * list is empty. + * @return the first instruction in this list, or null if the list + * is empty. */ public AbstractInsnNode getFirst() { return first; @@ -96,9 +96,11 @@ public class InsnList { * time it is called. Once the cache is built, this method run in constant * time. This cache is invalidated by all the methods that modify the list. * - * @param index the index of the instruction that must be returned. + * @param index + * the index of the instruction that must be returned. * @return the instruction whose index is given. - * @throws IndexOutOfBoundsException if (index < 0 || index >= size()). + * @throws IndexOutOfBoundsException + * if (index < 0 || index >= size()). */ public AbstractInsnNode get(final int index) { if (index < 0 || index >= size) { @@ -111,11 +113,12 @@ public class InsnList { } /** - * Returns true if the given instruction belongs to this list. - * This method always scans the instructions of this list until it finds the + * Returns true if the given instruction belongs to this list. This + * method always scans the instructions of this list until it finds the * given instruction or reaches the end of the list. * - * @param insn an instruction. + * @param insn + * an instruction. * @return true if the given instruction belongs to this list. */ public boolean contains(final AbstractInsnNode insn) { @@ -133,7 +136,8 @@ public class InsnList { * constant time. The cache is invalidated by all the methods that modify * the list. * - * @param insn an instruction of this list. + * @param insn + * an instruction of this list. * @return the index of the given instruction in this list. The result of * this method is undefined if the given instruction does not belong * to this list. Use {@link #contains contains} to test if an @@ -149,7 +153,8 @@ public class InsnList { /** * Makes the given visitor visit all of the instructions in this list. * - * @param mv the method visitor that must visit the instructions. + * @param mv + * the method visitor that must visit the instructions. */ public void accept(final MethodVisitor mv) { AbstractInsnNode insn = first; @@ -198,9 +203,11 @@ public class InsnList { /** * Replaces an instruction of this list with another instruction. * - * @param location an instruction of this list. - * @param insn another instruction, which must not belong to any - * {@link InsnList}. + * @param location + * an instruction of this list. + * @param insn + * another instruction, which must not belong to any + * {@link InsnList}. */ public void set(final AbstractInsnNode location, final AbstractInsnNode insn) { AbstractInsnNode next = location.next; @@ -232,8 +239,9 @@ public class InsnList { /** * Adds the given instruction to the end of this list. * - * @param insn an instruction, which must not belong to any - * {@link InsnList}. + * @param insn + * an instruction, which must not belong to any + * {@link InsnList}. */ public void add(final AbstractInsnNode insn) { ++size; @@ -252,8 +260,9 @@ public class InsnList { /** * Adds the given instructions to the end of this list. * - * @param insns an instruction list, which is cleared during the process. - * This list must be different from 'this'. + * @param insns + * an instruction list, which is cleared during the process. This + * list must be different from 'this'. */ public void add(final InsnList insns) { if (insns.size == 0) { @@ -276,8 +285,9 @@ public class InsnList { /** * Inserts the given instruction at the begining of this list. * - * @param insn an instruction, which must not belong to any - * {@link InsnList}. + * @param insn + * an instruction, which must not belong to any + * {@link InsnList}. */ public void insert(final AbstractInsnNode insn) { ++size; @@ -296,8 +306,9 @@ public class InsnList { /** * Inserts the given instructions at the begining of this list. * - * @param insns an instruction list, which is cleared during the process. - * This list must be different from 'this'. + * @param insns + * an instruction list, which is cleared during the process. This + * list must be different from 'this'. */ public void insert(final InsnList insns) { if (insns.size == 0) { @@ -320,12 +331,15 @@ public class InsnList { /** * Inserts the given instruction after the specified instruction. * - * @param location an instruction of this list after which insn must be - * inserted. - * @param insn the instruction to be inserted, which must not belong to - * any {@link InsnList}. + * @param location + * an instruction of this list after which insn must be + * inserted. + * @param insn + * the instruction to be inserted, which must not belong to + * any {@link InsnList}. */ - public void insert(final AbstractInsnNode location, final AbstractInsnNode insn) { + public void insert(final AbstractInsnNode location, + final AbstractInsnNode insn) { ++size; AbstractInsnNode next = location.next; if (next == null) { @@ -343,10 +357,12 @@ public class InsnList { /** * Inserts the given instructions after the specified instruction. * - * @param location an instruction of this list after which the - * instructions must be inserted. - * @param insns the instruction list to be inserted, which is cleared during - * the process. This list must be different from 'this'. + * @param location + * an instruction of this list after which the + * instructions must be inserted. + * @param insns + * the instruction list to be inserted, which is cleared during + * the process. This list must be different from 'this'. */ public void insert(final AbstractInsnNode location, final InsnList insns) { if (insns.size == 0) { @@ -371,12 +387,15 @@ public class InsnList { /** * Inserts the given instruction before the specified instruction. * - * @param location an instruction of this list before which insn must be - * inserted. - * @param insn the instruction to be inserted, which must not belong to - * any {@link InsnList}. + * @param location + * an instruction of this list before which insn must be + * inserted. + * @param insn + * the instruction to be inserted, which must not belong to + * any {@link InsnList}. */ - public void insertBefore(final AbstractInsnNode location, final AbstractInsnNode insn) { + public void insertBefore(final AbstractInsnNode location, + final AbstractInsnNode insn) { ++size; AbstractInsnNode prev = location.prev; if (prev == null) { @@ -394,37 +413,39 @@ public class InsnList { /** * Inserts the given instructions before the specified instruction. * - * @param location an instruction of this list before which the instructions - * must be inserted. - * @param insns the instruction list to be inserted, which is cleared during - * the process. This list must be different from 'this'. + * @param location + * an instruction of this list before which the + * instructions must be inserted. + * @param insns + * the instruction list to be inserted, which is cleared during + * the process. This list must be different from 'this'. */ - public void insertBefore(final AbstractInsnNode location, final InsnList insns) { + public void insertBefore(final AbstractInsnNode location, + final InsnList insns) { if (insns.size == 0) { return; } size += insns.size; AbstractInsnNode ifirst = insns.first; AbstractInsnNode ilast = insns.last; - AbstractInsnNode prev = location .prev; + AbstractInsnNode prev = location.prev; if (prev == null) { first = ifirst; } else { prev.next = ifirst; } - location .prev = ilast; - ilast.next = location ; + location.prev = ilast; + ilast.next = location; ifirst.prev = prev; cache = null; insns.removeAll(false); } - - /** * Removes the given instruction from this list. * - * @param insn the instruction of this list that must be removed. + * @param insn + * the instruction of this list that must be removed. */ public void remove(final AbstractInsnNode insn) { --size; @@ -456,8 +477,9 @@ public class InsnList { /** * Removes all of the instructions of this list. * - * @param mark if the instructions must be marked as no longer belonging to - * any {@link InsnList}. + * @param mark + * if the instructions must be marked as no longer belonging to + * any {@link InsnList}. */ void removeAll(final boolean mark) { if (mark) { @@ -499,14 +521,14 @@ public class InsnList { } // this class is not generified because it will create bridges - private final class InsnListIterator implements ListIterator/**/ { + private final class InsnListIterator implements ListIterator { AbstractInsnNode next; AbstractInsnNode prev; InsnListIterator(int index) { - if(index==size()) { + if (index == size()) { next = null; prev = getLast(); } else { diff --git a/src/asm/scala/tools/asm/tree/InsnNode.java b/src/asm/scala/tools/asm/tree/InsnNode.java index d4664d23c2..4d5288cafa 100644 --- a/src/asm/scala/tools/asm/tree/InsnNode.java +++ b/src/asm/scala/tools/asm/tree/InsnNode.java @@ -43,20 +43,22 @@ public class InsnNode extends AbstractInsnNode { /** * Constructs a new {@link InsnNode}. * - * @param opcode the opcode of the instruction to be constructed. This - * opcode must be NOP, ACONST_NULL, ICONST_M1, ICONST_0, ICONST_1, - * ICONST_2, ICONST_3, ICONST_4, ICONST_5, LCONST_0, LCONST_1, - * FCONST_0, FCONST_1, FCONST_2, DCONST_0, DCONST_1, IALOAD, LALOAD, - * FALOAD, DALOAD, AALOAD, BALOAD, CALOAD, SALOAD, IASTORE, LASTORE, - * FASTORE, DASTORE, AASTORE, BASTORE, CASTORE, SASTORE, POP, POP2, - * DUP, DUP_X1, DUP_X2, DUP2, DUP2_X1, DUP2_X2, SWAP, IADD, LADD, - * FADD, DADD, ISUB, LSUB, FSUB, DSUB, IMUL, LMUL, FMUL, DMUL, IDIV, - * LDIV, FDIV, DDIV, IREM, LREM, FREM, DREM, INEG, LNEG, FNEG, DNEG, - * ISHL, LSHL, ISHR, LSHR, IUSHR, LUSHR, IAND, LAND, IOR, LOR, IXOR, - * LXOR, I2L, I2F, I2D, L2I, L2F, L2D, F2I, F2L, F2D, D2I, D2L, D2F, - * I2B, I2C, I2S, LCMP, FCMPL, FCMPG, DCMPL, DCMPG, IRETURN, LRETURN, - * FRETURN, DRETURN, ARETURN, RETURN, ARRAYLENGTH, ATHROW, - * MONITORENTER, or MONITOREXIT. + * @param opcode + * the opcode of the instruction to be constructed. This opcode + * must be NOP, ACONST_NULL, ICONST_M1, ICONST_0, ICONST_1, + * ICONST_2, ICONST_3, ICONST_4, ICONST_5, LCONST_0, LCONST_1, + * FCONST_0, FCONST_1, FCONST_2, DCONST_0, DCONST_1, IALOAD, + * LALOAD, FALOAD, DALOAD, AALOAD, BALOAD, CALOAD, SALOAD, + * IASTORE, LASTORE, FASTORE, DASTORE, AASTORE, BASTORE, CASTORE, + * SASTORE, POP, POP2, DUP, DUP_X1, DUP_X2, DUP2, DUP2_X1, + * DUP2_X2, SWAP, IADD, LADD, FADD, DADD, ISUB, LSUB, FSUB, DSUB, + * IMUL, LMUL, FMUL, DMUL, IDIV, LDIV, FDIV, DDIV, IREM, LREM, + * FREM, DREM, INEG, LNEG, FNEG, DNEG, ISHL, LSHL, ISHR, LSHR, + * IUSHR, LUSHR, IAND, LAND, IOR, LOR, IXOR, LXOR, I2L, I2F, I2D, + * L2I, L2F, L2D, F2I, F2L, F2D, D2I, D2L, D2F, I2B, I2C, I2S, + * LCMP, FCMPL, FCMPG, DCMPL, DCMPG, IRETURN, LRETURN, FRETURN, + * DRETURN, ARETURN, RETURN, ARRAYLENGTH, ATHROW, MONITORENTER, + * or MONITOREXIT. */ public InsnNode(final int opcode) { super(opcode); @@ -70,7 +72,8 @@ public class InsnNode extends AbstractInsnNode { /** * Makes the given visitor visit this instruction. * - * @param mv a method visitor. + * @param mv + * a method visitor. */ @Override public void accept(final MethodVisitor mv) { diff --git a/src/asm/scala/tools/asm/tree/IntInsnNode.java b/src/asm/scala/tools/asm/tree/IntInsnNode.java index b61270c786..e0aeed4bc8 100644 --- a/src/asm/scala/tools/asm/tree/IntInsnNode.java +++ b/src/asm/scala/tools/asm/tree/IntInsnNode.java @@ -48,9 +48,11 @@ public class IntInsnNode extends AbstractInsnNode { /** * Constructs a new {@link IntInsnNode}. * - * @param opcode the opcode of the instruction to be constructed. This - * opcode must be BIPUSH, SIPUSH or NEWARRAY. - * @param operand the operand of the instruction to be constructed. + * @param opcode + * the opcode of the instruction to be constructed. This opcode + * must be BIPUSH, SIPUSH or NEWARRAY. + * @param operand + * the operand of the instruction to be constructed. */ public IntInsnNode(final int opcode, final int operand) { super(opcode); @@ -60,8 +62,9 @@ public class IntInsnNode extends AbstractInsnNode { /** * Sets the opcode of this instruction. * - * @param opcode the new instruction opcode. This opcode must be BIPUSH, - * SIPUSH or NEWARRAY. + * @param opcode + * the new instruction opcode. This opcode must be BIPUSH, SIPUSH + * or NEWARRAY. */ public void setOpcode(final int opcode) { this.opcode = opcode; diff --git a/src/asm/scala/tools/asm/tree/InvokeDynamicInsnNode.java b/src/asm/scala/tools/asm/tree/InvokeDynamicInsnNode.java index d993b5a054..7ee84b875b 100644 --- a/src/asm/scala/tools/asm/tree/InvokeDynamicInsnNode.java +++ b/src/asm/scala/tools/asm/tree/InvokeDynamicInsnNode.java @@ -65,17 +65,17 @@ public class InvokeDynamicInsnNode extends AbstractInsnNode { /** * Constructs a new {@link InvokeDynamicInsnNode}. * - * @param name invokedynamic name. - * @param desc invokedynamic descriptor (see {@link org.objectweb.asm.Type}). - * @param bsm the bootstrap method. - * @param bsmArgs the boostrap constant arguments. + * @param name + * invokedynamic name. + * @param desc + * invokedynamic descriptor (see {@link scala.tools.asm.Type}). + * @param bsm + * the bootstrap method. + * @param bsmArgs + * the boostrap constant arguments. */ - public InvokeDynamicInsnNode( - final String name, - final String desc, - final Handle bsm, - final Object... bsmArgs) - { + public InvokeDynamicInsnNode(final String name, final String desc, + final Handle bsm, final Object... bsmArgs) { super(Opcodes.INVOKEDYNAMIC); this.name = name; this.desc = desc; @@ -97,4 +97,4 @@ public class InvokeDynamicInsnNode extends AbstractInsnNode { public AbstractInsnNode clone(final Map labels) { return new InvokeDynamicInsnNode(name, desc, bsm, bsmArgs); } -} \ No newline at end of file +} diff --git a/src/asm/scala/tools/asm/tree/JumpInsnNode.java b/src/asm/scala/tools/asm/tree/JumpInsnNode.java index 339ebbd2d0..81e1e09deb 100644 --- a/src/asm/scala/tools/asm/tree/JumpInsnNode.java +++ b/src/asm/scala/tools/asm/tree/JumpInsnNode.java @@ -50,13 +50,15 @@ public class JumpInsnNode extends AbstractInsnNode { /** * Constructs a new {@link JumpInsnNode}. * - * @param opcode the opcode of the type instruction to be constructed. This - * opcode must be IFEQ, IFNE, IFLT, IFGE, IFGT, IFLE, IF_ICMPEQ, - * IF_ICMPNE, IF_ICMPLT, IF_ICMPGE, IF_ICMPGT, IF_ICMPLE, IF_ACMPEQ, - * IF_ACMPNE, GOTO, JSR, IFNULL or IFNONNULL. - * @param label the operand of the instruction to be constructed. This - * operand is a label that designates the instruction to which the - * jump instruction may jump. + * @param opcode + * the opcode of the type instruction to be constructed. This + * opcode must be IFEQ, IFNE, IFLT, IFGE, IFGT, IFLE, IF_ICMPEQ, + * IF_ICMPNE, IF_ICMPLT, IF_ICMPGE, IF_ICMPGT, IF_ICMPLE, + * IF_ACMPEQ, IF_ACMPNE, GOTO, JSR, IFNULL or IFNONNULL. + * @param label + * the operand of the instruction to be constructed. This operand + * is a label that designates the instruction to which the jump + * instruction may jump. */ public JumpInsnNode(final int opcode, final LabelNode label) { super(opcode); @@ -66,10 +68,11 @@ public class JumpInsnNode extends AbstractInsnNode { /** * Sets the opcode of this instruction. * - * @param opcode the new instruction opcode. This opcode must be IFEQ, IFNE, - * IFLT, IFGE, IFGT, IFLE, IF_ICMPEQ, IF_ICMPNE, IF_ICMPLT, - * IF_ICMPGE, IF_ICMPGT, IF_ICMPLE, IF_ACMPEQ, IF_ACMPNE, GOTO, JSR, - * IFNULL or IFNONNULL. + * @param opcode + * the new instruction opcode. This opcode must be IFEQ, IFNE, + * IFLT, IFGE, IFGT, IFLE, IF_ICMPEQ, IF_ICMPNE, IF_ICMPLT, + * IF_ICMPGE, IF_ICMPGT, IF_ICMPLE, IF_ACMPEQ, IF_ACMPNE, GOTO, + * JSR, IFNULL or IFNONNULL. */ public void setOpcode(final int opcode) { this.opcode = opcode; diff --git a/src/asm/scala/tools/asm/tree/LabelNode.java b/src/asm/scala/tools/asm/tree/LabelNode.java index 523a8d6442..44c48c1160 100644 --- a/src/asm/scala/tools/asm/tree/LabelNode.java +++ b/src/asm/scala/tools/asm/tree/LabelNode.java @@ -75,4 +75,4 @@ public class LabelNode extends AbstractInsnNode { public void resetLabel() { label = null; } -} \ No newline at end of file +} diff --git a/src/asm/scala/tools/asm/tree/LdcInsnNode.java b/src/asm/scala/tools/asm/tree/LdcInsnNode.java index f8d115acd5..4e328f9b39 100644 --- a/src/asm/scala/tools/asm/tree/LdcInsnNode.java +++ b/src/asm/scala/tools/asm/tree/LdcInsnNode.java @@ -44,16 +44,17 @@ public class LdcInsnNode extends AbstractInsnNode { /** * The constant to be loaded on the stack. This parameter must be a non null * {@link Integer}, a {@link Float}, a {@link Long}, a {@link Double}, a - * {@link String} or a {@link org.objectweb.asm.Type}. + * {@link String} or a {@link scala.tools.asm.Type}. */ public Object cst; /** * Constructs a new {@link LdcInsnNode}. * - * @param cst the constant to be loaded on the stack. This parameter must be - * a non null {@link Integer}, a {@link Float}, a {@link Long}, a - * {@link Double} or a {@link String}. + * @param cst + * the constant to be loaded on the stack. This parameter must be + * a non null {@link Integer}, a {@link Float}, a {@link Long}, a + * {@link Double} or a {@link String}. */ public LdcInsnNode(final Object cst) { super(Opcodes.LDC); @@ -74,4 +75,4 @@ public class LdcInsnNode extends AbstractInsnNode { public AbstractInsnNode clone(final Map labels) { return new LdcInsnNode(cst); } -} \ No newline at end of file +} diff --git a/src/asm/scala/tools/asm/tree/LineNumberNode.java b/src/asm/scala/tools/asm/tree/LineNumberNode.java index acc83c8d30..9947aa70a9 100644 --- a/src/asm/scala/tools/asm/tree/LineNumberNode.java +++ b/src/asm/scala/tools/asm/tree/LineNumberNode.java @@ -55,9 +55,11 @@ public class LineNumberNode extends AbstractInsnNode { /** * Constructs a new {@link LineNumberNode}. * - * @param line a line number. This number refers to the source file from - * which the class was compiled. - * @param start the first instruction corresponding to this line number. + * @param line + * a line number. This number refers to the source file from + * which the class was compiled. + * @param start + * the first instruction corresponding to this line number. */ public LineNumberNode(final int line, final LabelNode start) { super(-1); diff --git a/src/asm/scala/tools/asm/tree/LocalVariableNode.java b/src/asm/scala/tools/asm/tree/LocalVariableNode.java index 51cbd3ca00..0d8e27356f 100644 --- a/src/asm/scala/tools/asm/tree/LocalVariableNode.java +++ b/src/asm/scala/tools/asm/tree/LocalVariableNode.java @@ -73,24 +73,24 @@ public class LocalVariableNode { /** * Constructs a new {@link LocalVariableNode}. * - * @param name the name of a local variable. - * @param desc the type descriptor of this local variable. - * @param signature the signature of this local variable. May be - * null. - * @param start the first instruction corresponding to the scope of this - * local variable (inclusive). - * @param end the last instruction corresponding to the scope of this local - * variable (exclusive). - * @param index the local variable's index. + * @param name + * the name of a local variable. + * @param desc + * the type descriptor of this local variable. + * @param signature + * the signature of this local variable. May be null. + * @param start + * the first instruction corresponding to the scope of this local + * variable (inclusive). + * @param end + * the last instruction corresponding to the scope of this local + * variable (exclusive). + * @param index + * the local variable's index. */ - public LocalVariableNode( - final String name, - final String desc, - final String signature, - final LabelNode start, - final LabelNode end, - final int index) - { + public LocalVariableNode(final String name, final String desc, + final String signature, final LabelNode start, final LabelNode end, + final int index) { this.name = name; this.desc = desc; this.signature = signature; @@ -102,14 +102,11 @@ public class LocalVariableNode { /** * Makes the given visitor visit this local variable declaration. * - * @param mv a method visitor. + * @param mv + * a method visitor. */ public void accept(final MethodVisitor mv) { - mv.visitLocalVariable(name, - desc, - signature, - start.getLabel(), - end.getLabel(), - index); + mv.visitLocalVariable(name, desc, signature, start.getLabel(), + end.getLabel(), index); } } diff --git a/src/asm/scala/tools/asm/tree/LookupSwitchInsnNode.java b/src/asm/scala/tools/asm/tree/LookupSwitchInsnNode.java index 6d0f971c29..d2479b4814 100644 --- a/src/asm/scala/tools/asm/tree/LookupSwitchInsnNode.java +++ b/src/asm/scala/tools/asm/tree/LookupSwitchInsnNode.java @@ -64,20 +64,21 @@ public class LookupSwitchInsnNode extends AbstractInsnNode { /** * Constructs a new {@link LookupSwitchInsnNode}. * - * @param dflt beginning of the default handler block. - * @param keys the values of the keys. - * @param labels beginnings of the handler blocks. labels[i] is - * the beginning of the handler block for the keys[i] key. + * @param dflt + * beginning of the default handler block. + * @param keys + * the values of the keys. + * @param labels + * beginnings of the handler blocks. labels[i] is the + * beginning of the handler block for the keys[i] key. */ - public LookupSwitchInsnNode( - final LabelNode dflt, - final int[] keys, - final LabelNode[] labels) - { + public LookupSwitchInsnNode(final LabelNode dflt, final int[] keys, + final LabelNode[] labels) { super(Opcodes.LOOKUPSWITCH); this.dflt = dflt; this.keys = new ArrayList(keys == null ? 0 : keys.length); - this.labels = new ArrayList(labels == null ? 0 : labels.length); + this.labels = new ArrayList(labels == null ? 0 + : labels.length); if (keys != null) { for (int i = 0; i < keys.length; ++i) { this.keys.add(new Integer(keys[i])); diff --git a/src/asm/scala/tools/asm/tree/MethodInsnNode.java b/src/asm/scala/tools/asm/tree/MethodInsnNode.java index c3036bc6b4..bf09f556d8 100644 --- a/src/asm/scala/tools/asm/tree/MethodInsnNode.java +++ b/src/asm/scala/tools/asm/tree/MethodInsnNode.java @@ -43,7 +43,7 @@ public class MethodInsnNode extends AbstractInsnNode { /** * The internal name of the method's owner class (see - * {@link org.objectweb.asm.Type#getInternalName() getInternalName}). + * {@link scala.tools.asm.Type#getInternalName() getInternalName}). */ public String owner; @@ -53,27 +53,28 @@ public class MethodInsnNode extends AbstractInsnNode { public String name; /** - * The method's descriptor (see {@link org.objectweb.asm.Type}). + * The method's descriptor (see {@link scala.tools.asm.Type}). */ public String desc; /** * Constructs a new {@link MethodInsnNode}. * - * @param opcode the opcode of the type instruction to be constructed. This - * opcode must be INVOKEVIRTUAL, INVOKESPECIAL, INVOKESTATIC or - * INVOKEINTERFACE. - * @param owner the internal name of the method's owner class (see - * {@link org.objectweb.asm.Type#getInternalName() getInternalName}). - * @param name the method's name. - * @param desc the method's descriptor (see {@link org.objectweb.asm.Type}). + * @param opcode + * the opcode of the type instruction to be constructed. This + * opcode must be INVOKEVIRTUAL, INVOKESPECIAL, INVOKESTATIC or + * INVOKEINTERFACE. + * @param owner + * the internal name of the method's owner class (see + * {@link scala.tools.asm.Type#getInternalName() + * getInternalName}). + * @param name + * the method's name. + * @param desc + * the method's descriptor (see {@link scala.tools.asm.Type}). */ - public MethodInsnNode( - final int opcode, - final String owner, - final String name, - final String desc) - { + public MethodInsnNode(final int opcode, final String owner, + final String name, final String desc) { super(opcode); this.owner = owner; this.name = name; @@ -83,8 +84,9 @@ public class MethodInsnNode extends AbstractInsnNode { /** * Sets the opcode of this instruction. * - * @param opcode the new instruction opcode. This opcode must be - * INVOKEVIRTUAL, INVOKESPECIAL, INVOKESTATIC or INVOKEINTERFACE. + * @param opcode + * the new instruction opcode. This opcode must be INVOKEVIRTUAL, + * INVOKESPECIAL, INVOKESTATIC or INVOKEINTERFACE. */ public void setOpcode(final int opcode) { this.opcode = opcode; @@ -104,4 +106,4 @@ public class MethodInsnNode extends AbstractInsnNode { public AbstractInsnNode clone(final Map labels) { return new MethodInsnNode(opcode, owner, name, desc); } -} \ No newline at end of file +} diff --git a/src/asm/scala/tools/asm/tree/MethodNode.java b/src/asm/scala/tools/asm/tree/MethodNode.java index 70ec39e058..5f9c778e0c 100644 --- a/src/asm/scala/tools/asm/tree/MethodNode.java +++ b/src/asm/scala/tools/asm/tree/MethodNode.java @@ -81,7 +81,7 @@ public class MethodNode extends MethodVisitor { * The runtime visible annotations of this method. This list is a list of * {@link AnnotationNode} objects. May be null. * - * @associates org.objectweb.asm.tree.AnnotationNode + * @associates scala.tools.asm.tree.AnnotationNode * @label visible */ public List visibleAnnotations; @@ -90,7 +90,7 @@ public class MethodNode extends MethodVisitor { * The runtime invisible annotations of this method. This list is a list of * {@link AnnotationNode} objects. May be null. * - * @associates org.objectweb.asm.tree.AnnotationNode + * @associates scala.tools.asm.tree.AnnotationNode * @label invisible */ public List invisibleAnnotations; @@ -99,7 +99,7 @@ public class MethodNode extends MethodVisitor { * The non standard attributes of this method. This list is a list of * {@link Attribute} objects. May be null. * - * @associates org.objectweb.asm.Attribute + * @associates scala.tools.asm.Attribute */ public List attrs; @@ -117,7 +117,7 @@ public class MethodNode extends MethodVisitor { * The runtime visible parameter annotations of this method. These lists are * lists of {@link AnnotationNode} objects. May be null. * - * @associates org.objectweb.asm.tree.AnnotationNode + * @associates scala.tools.asm.tree.AnnotationNode * @label invisible parameters */ public List[] visibleParameterAnnotations; @@ -126,7 +126,7 @@ public class MethodNode extends MethodVisitor { * The runtime invisible parameter annotations of this method. These lists * are lists of {@link AnnotationNode} objects. May be null. * - * @associates org.objectweb.asm.tree.AnnotationNode + * @associates scala.tools.asm.tree.AnnotationNode * @label visible parameters */ public List[] invisibleParameterAnnotations; @@ -135,7 +135,7 @@ public class MethodNode extends MethodVisitor { * The instructions of this method. This list is a list of * {@link AbstractInsnNode} objects. * - * @associates org.objectweb.asm.tree.AbstractInsnNode + * @associates scala.tools.asm.tree.AbstractInsnNode * @label instructions */ public InsnList instructions; @@ -144,7 +144,7 @@ public class MethodNode extends MethodVisitor { * The try catch blocks of this method. This list is a list of * {@link TryCatchBlockNode} objects. * - * @associates org.objectweb.asm.tree.TryCatchBlockNode + * @associates scala.tools.asm.tree.TryCatchBlockNode */ public List tryCatchBlocks; @@ -162,7 +162,7 @@ public class MethodNode extends MethodVisitor { * The local variables of this method. This list is a list of * {@link LocalVariableNode} objects. May be null * - * @associates org.objectweb.asm.tree.LocalVariableNode + * @associates scala.tools.asm.tree.LocalVariableNode */ public List localVariables; @@ -170,7 +170,7 @@ public class MethodNode extends MethodVisitor { * If the accept method has been called on this object. */ private boolean visited; - + /** * Constructs an uninitialized {@link MethodNode}. Subclasses must not * use this constructor. Instead, they must use the @@ -183,8 +183,9 @@ public class MethodNode extends MethodVisitor { /** * Constructs an uninitialized {@link MethodNode}. * - * @param api the ASM API version implemented by this visitor. Must be one - * of {@link Opcodes#ASM4}. + * @param api + * the ASM API version implemented by this visitor. Must be one + * of {@link Opcodes#ASM4}. */ public MethodNode(final int api) { super(api); @@ -196,56 +197,55 @@ public class MethodNode extends MethodVisitor { * constructor. Instead, they must use the * {@link #MethodNode(int, int, String, String, String, String[])} version. * - * @param access the method's access flags (see {@link Opcodes}). This - * parameter also indicates if the method is synthetic and/or - * deprecated. - * @param name the method's name. - * @param desc the method's descriptor (see {@link Type}). - * @param signature the method's signature. May be null. - * @param exceptions the internal names of the method's exception classes - * (see {@link Type#getInternalName() getInternalName}). May be - * null. + * @param access + * the method's access flags (see {@link Opcodes}). This + * parameter also indicates if the method is synthetic and/or + * deprecated. + * @param name + * the method's name. + * @param desc + * the method's descriptor (see {@link Type}). + * @param signature + * the method's signature. May be null. + * @param exceptions + * the internal names of the method's exception classes (see + * {@link Type#getInternalName() getInternalName}). May be + * null. */ - public MethodNode( - final int access, - final String name, - final String desc, - final String signature, - final String[] exceptions) - { + public MethodNode(final int access, final String name, final String desc, + final String signature, final String[] exceptions) { this(Opcodes.ASM4, access, name, desc, signature, exceptions); } /** * Constructs a new {@link MethodNode}. * - * @param api the ASM API version implemented by this visitor. Must be one - * of {@link Opcodes#ASM4}. - * @param access the method's access flags (see {@link Opcodes}). This - * parameter also indicates if the method is synthetic and/or - * deprecated. - * @param name the method's name. - * @param desc the method's descriptor (see {@link Type}). - * @param signature the method's signature. May be null. - * @param exceptions the internal names of the method's exception classes - * (see {@link Type#getInternalName() getInternalName}). May be - * null. + * @param api + * the ASM API version implemented by this visitor. Must be one + * of {@link Opcodes#ASM4}. + * @param access + * the method's access flags (see {@link Opcodes}). This + * parameter also indicates if the method is synthetic and/or + * deprecated. + * @param name + * the method's name. + * @param desc + * the method's descriptor (see {@link Type}). + * @param signature + * the method's signature. May be null. + * @param exceptions + * the internal names of the method's exception classes (see + * {@link Type#getInternalName() getInternalName}). May be + * null. */ - public MethodNode( - final int api, - final int access, - final String name, - final String desc, - final String signature, - final String[] exceptions) - { + public MethodNode(final int api, final int access, final String name, + final String desc, final String signature, final String[] exceptions) { super(api); this.access = access; this.name = name; this.desc = desc; this.signature = signature; - this.exceptions = new ArrayList(exceptions == null - ? 0 + this.exceptions = new ArrayList(exceptions == null ? 0 : exceptions.length); boolean isAbstract = (access & Opcodes.ACC_ABSTRACT) != 0; if (!isAbstract) { @@ -274,10 +274,8 @@ public class MethodNode extends MethodVisitor { } @Override - public AnnotationVisitor visitAnnotation( - final String desc, - final boolean visible) - { + public AnnotationVisitor visitAnnotation(final String desc, + final boolean visible) { AnnotationNode an = new AnnotationNode(desc); if (visible) { if (visibleAnnotations == null) { @@ -294,28 +292,27 @@ public class MethodNode extends MethodVisitor { } @Override - public AnnotationVisitor visitParameterAnnotation( - final int parameter, - final String desc, - final boolean visible) - { + public AnnotationVisitor visitParameterAnnotation(final int parameter, + final String desc, final boolean visible) { AnnotationNode an = new AnnotationNode(desc); if (visible) { if (visibleParameterAnnotations == null) { int params = Type.getArgumentTypes(this.desc).length; - visibleParameterAnnotations = (List[])new List[params]; + visibleParameterAnnotations = (List[]) new List[params]; } if (visibleParameterAnnotations[parameter] == null) { - visibleParameterAnnotations[parameter] = new ArrayList(1); + visibleParameterAnnotations[parameter] = new ArrayList( + 1); } visibleParameterAnnotations[parameter].add(an); } else { if (invisibleParameterAnnotations == null) { int params = Type.getArgumentTypes(this.desc).length; - invisibleParameterAnnotations = (List[])new List[params]; + invisibleParameterAnnotations = (List[]) new List[params]; } if (invisibleParameterAnnotations[parameter] == null) { - invisibleParameterAnnotations[parameter] = new ArrayList(1); + invisibleParameterAnnotations[parameter] = new ArrayList( + 1); } invisibleParameterAnnotations[parameter].add(an); } @@ -335,17 +332,10 @@ public class MethodNode extends MethodVisitor { } @Override - public void visitFrame( - final int type, - final int nLocal, - final Object[] local, - final int nStack, - final Object[] stack) - { - instructions.add(new FrameNode(type, nLocal, local == null - ? null - : getLabelNodes(local), nStack, stack == null - ? null + public void visitFrame(final int type, final int nLocal, + final Object[] local, final int nStack, final Object[] stack) { + instructions.add(new FrameNode(type, nLocal, local == null ? null + : getLabelNodes(local), nStack, stack == null ? null : getLabelNodes(stack))); } @@ -370,32 +360,20 @@ public class MethodNode extends MethodVisitor { } @Override - public void visitFieldInsn( - final int opcode, - final String owner, - final String name, - final String desc) - { + public void visitFieldInsn(final int opcode, final String owner, + final String name, final String desc) { instructions.add(new FieldInsnNode(opcode, owner, name, desc)); } @Override - public void visitMethodInsn( - final int opcode, - final String owner, - final String name, - final String desc) - { + public void visitMethodInsn(final int opcode, final String owner, + final String name, final String desc) { instructions.add(new MethodInsnNode(opcode, owner, name, desc)); } @Override - public void visitInvokeDynamicInsn( - String name, - String desc, - Handle bsm, - Object... bsmArgs) - { + public void visitInvokeDynamicInsn(String name, String desc, Handle bsm, + Object... bsmArgs) { instructions.add(new InvokeDynamicInsnNode(name, desc, bsm, bsmArgs)); } @@ -420,26 +398,16 @@ public class MethodNode extends MethodVisitor { } @Override - public void visitTableSwitchInsn( - final int min, - final int max, - final Label dflt, - final Label... labels) - { - instructions.add(new TableSwitchInsnNode(min, - max, - getLabelNode(dflt), + public void visitTableSwitchInsn(final int min, final int max, + final Label dflt, final Label... labels) { + instructions.add(new TableSwitchInsnNode(min, max, getLabelNode(dflt), getLabelNodes(labels))); } @Override - public void visitLookupSwitchInsn( - final Label dflt, - final int[] keys, - final Label[] labels) - { - instructions.add(new LookupSwitchInsnNode(getLabelNode(dflt), - keys, + public void visitLookupSwitchInsn(final Label dflt, final int[] keys, + final Label[] labels) { + instructions.add(new LookupSwitchInsnNode(getLabelNode(dflt), keys, getLabelNodes(labels))); } @@ -449,33 +417,18 @@ public class MethodNode extends MethodVisitor { } @Override - public void visitTryCatchBlock( - final Label start, - final Label end, - final Label handler, - final String type) - { + public void visitTryCatchBlock(final Label start, final Label end, + final Label handler, final String type) { tryCatchBlocks.add(new TryCatchBlockNode(getLabelNode(start), - getLabelNode(end), - getLabelNode(handler), - type)); + getLabelNode(end), getLabelNode(handler), type)); } @Override - public void visitLocalVariable( - final String name, - final String desc, - final String signature, - final Label start, - final Label end, - final int index) - { - localVariables.add(new LocalVariableNode(name, - desc, - signature, - getLabelNode(start), - getLabelNode(end), - index)); + public void visitLocalVariable(final String name, final String desc, + final String signature, final Label start, final Label end, + final int index) { + localVariables.add(new LocalVariableNode(name, desc, signature, + getLabelNode(start), getLabelNode(end), index)); } @Override @@ -499,12 +452,13 @@ public class MethodNode extends MethodVisitor { * the {@link Label#info} field to store associations between labels and * label nodes. * - * @param l a Label. + * @param l + * a Label. * @return the LabelNode corresponding to l. */ protected LabelNode getLabelNode(final Label l) { if (!(l.info instanceof LabelNode)) { - l.info = new LabelNode(l); + l.info = new LabelNode(); } return (LabelNode) l.info; } @@ -539,7 +493,8 @@ public class MethodNode extends MethodVisitor { * recursively, do not contain elements that were introduced in more recent * versions of the ASM API than the given version. * - * @param api an ASM API version. Must be one of {@link Opcodes#ASM4}. + * @param api + * an ASM API version. Must be one of {@link Opcodes#ASM4}. */ public void check(final int api) { // nothing to do @@ -548,15 +503,13 @@ public class MethodNode extends MethodVisitor { /** * Makes the given class visitor visit this method. * - * @param cv a class visitor. + * @param cv + * a class visitor. */ public void accept(final ClassVisitor cv) { String[] exceptions = new String[this.exceptions.size()]; this.exceptions.toArray(exceptions); - MethodVisitor mv = cv.visitMethod(access, - name, - desc, - signature, + MethodVisitor mv = cv.visitMethod(access, name, desc, signature, exceptions); if (mv != null) { accept(mv); @@ -566,7 +519,8 @@ public class MethodNode extends MethodVisitor { /** * Makes the given method visitor visit this method. * - * @param mv a method visitor. + * @param mv + * a method visitor. */ public void accept(final MethodVisitor mv) { // visits the method attributes @@ -588,8 +542,7 @@ public class MethodNode extends MethodVisitor { AnnotationNode an = invisibleAnnotations.get(i); an.accept(mv.visitAnnotation(an.desc, false)); } - n = visibleParameterAnnotations == null - ? 0 + n = visibleParameterAnnotations == null ? 0 : visibleParameterAnnotations.length; for (i = 0; i < n; ++i) { List l = visibleParameterAnnotations[i]; @@ -601,8 +554,7 @@ public class MethodNode extends MethodVisitor { an.accept(mv.visitParameterAnnotation(i, an.desc, true)); } } - n = invisibleParameterAnnotations == null - ? 0 + n = invisibleParameterAnnotations == null ? 0 : invisibleParameterAnnotations.length; for (i = 0; i < n; ++i) { List l = invisibleParameterAnnotations[i]; diff --git a/src/asm/scala/tools/asm/tree/MultiANewArrayInsnNode.java b/src/asm/scala/tools/asm/tree/MultiANewArrayInsnNode.java index 9dfba77335..fe5e8832b3 100644 --- a/src/asm/scala/tools/asm/tree/MultiANewArrayInsnNode.java +++ b/src/asm/scala/tools/asm/tree/MultiANewArrayInsnNode.java @@ -42,7 +42,7 @@ import scala.tools.asm.Opcodes; public class MultiANewArrayInsnNode extends AbstractInsnNode { /** - * An array type descriptor (see {@link org.objectweb.asm.Type}). + * An array type descriptor (see {@link scala.tools.asm.Type}). */ public String desc; @@ -54,8 +54,10 @@ public class MultiANewArrayInsnNode extends AbstractInsnNode { /** * Constructs a new {@link MultiANewArrayInsnNode}. * - * @param desc an array type descriptor (see {@link org.objectweb.asm.Type}). - * @param dims number of dimensions of the array to allocate. + * @param desc + * an array type descriptor (see {@link scala.tools.asm.Type}). + * @param dims + * number of dimensions of the array to allocate. */ public MultiANewArrayInsnNode(final String desc, final int dims) { super(Opcodes.MULTIANEWARRAY); @@ -78,4 +80,4 @@ public class MultiANewArrayInsnNode extends AbstractInsnNode { return new MultiANewArrayInsnNode(desc, dims); } -} \ No newline at end of file +} diff --git a/src/asm/scala/tools/asm/tree/TableSwitchInsnNode.java b/src/asm/scala/tools/asm/tree/TableSwitchInsnNode.java index 929ad9b32b..9b3c2a3437 100644 --- a/src/asm/scala/tools/asm/tree/TableSwitchInsnNode.java +++ b/src/asm/scala/tools/asm/tree/TableSwitchInsnNode.java @@ -69,18 +69,18 @@ public class TableSwitchInsnNode extends AbstractInsnNode { /** * Constructs a new {@link TableSwitchInsnNode}. * - * @param min the minimum key value. - * @param max the maximum key value. - * @param dflt beginning of the default handler block. - * @param labels beginnings of the handler blocks. labels[i] is - * the beginning of the handler block for the min + i key. + * @param min + * the minimum key value. + * @param max + * the maximum key value. + * @param dflt + * beginning of the default handler block. + * @param labels + * beginnings of the handler blocks. labels[i] is the + * beginning of the handler block for the min + i key. */ - public TableSwitchInsnNode( - final int min, - final int max, - final LabelNode dflt, - final LabelNode... labels) - { + public TableSwitchInsnNode(final int min, final int max, + final LabelNode dflt, final LabelNode... labels) { super(Opcodes.TABLESWITCH); this.min = min; this.max = max; @@ -107,9 +107,7 @@ public class TableSwitchInsnNode extends AbstractInsnNode { @Override public AbstractInsnNode clone(final Map labels) { - return new TableSwitchInsnNode(min, - max, - clone(dflt, labels), - clone(this.labels, labels)); + return new TableSwitchInsnNode(min, max, clone(dflt, labels), clone( + this.labels, labels)); } -} \ No newline at end of file +} diff --git a/src/asm/scala/tools/asm/tree/TryCatchBlockNode.java b/src/asm/scala/tools/asm/tree/TryCatchBlockNode.java index 375b4cfcb9..ab4fa97c34 100644 --- a/src/asm/scala/tools/asm/tree/TryCatchBlockNode.java +++ b/src/asm/scala/tools/asm/tree/TryCatchBlockNode.java @@ -62,19 +62,19 @@ public class TryCatchBlockNode { /** * Constructs a new {@link TryCatchBlockNode}. * - * @param start beginning of the exception handler's scope (inclusive). - * @param end end of the exception handler's scope (exclusive). - * @param handler beginning of the exception handler's code. - * @param type internal name of the type of exceptions handled by the - * handler, or null to catch any exceptions (for "finally" - * blocks). + * @param start + * beginning of the exception handler's scope (inclusive). + * @param end + * end of the exception handler's scope (exclusive). + * @param handler + * beginning of the exception handler's code. + * @param type + * internal name of the type of exceptions handled by the + * handler, or null to catch any exceptions (for + * "finally" blocks). */ - public TryCatchBlockNode( - final LabelNode start, - final LabelNode end, - final LabelNode handler, - final String type) - { + public TryCatchBlockNode(final LabelNode start, final LabelNode end, + final LabelNode handler, final String type) { this.start = start; this.end = end; this.handler = handler; @@ -84,11 +84,11 @@ public class TryCatchBlockNode { /** * Makes the given visitor visit this try catch block. * - * @param mv a method visitor. + * @param mv + * a method visitor. */ public void accept(final MethodVisitor mv) { - mv.visitTryCatchBlock(start.getLabel(), end.getLabel(), handler == null - ? null - : handler.getLabel(), type); + mv.visitTryCatchBlock(start.getLabel(), end.getLabel(), + handler == null ? null : handler.getLabel(), type); } } diff --git a/src/asm/scala/tools/asm/tree/TypeInsnNode.java b/src/asm/scala/tools/asm/tree/TypeInsnNode.java index 0b2666c498..3210dd60e6 100644 --- a/src/asm/scala/tools/asm/tree/TypeInsnNode.java +++ b/src/asm/scala/tools/asm/tree/TypeInsnNode.java @@ -43,17 +43,19 @@ public class TypeInsnNode extends AbstractInsnNode { /** * The operand of this instruction. This operand is an internal name (see - * {@link org.objectweb.asm.Type}). + * {@link scala.tools.asm.Type}). */ public String desc; /** * Constructs a new {@link TypeInsnNode}. * - * @param opcode the opcode of the type instruction to be constructed. This - * opcode must be NEW, ANEWARRAY, CHECKCAST or INSTANCEOF. - * @param desc the operand of the instruction to be constructed. This - * operand is an internal name (see {@link org.objectweb.asm.Type}). + * @param opcode + * the opcode of the type instruction to be constructed. This + * opcode must be NEW, ANEWARRAY, CHECKCAST or INSTANCEOF. + * @param desc + * the operand of the instruction to be constructed. This operand + * is an internal name (see {@link scala.tools.asm.Type}). */ public TypeInsnNode(final int opcode, final String desc) { super(opcode); @@ -63,8 +65,9 @@ public class TypeInsnNode extends AbstractInsnNode { /** * Sets the opcode of this instruction. * - * @param opcode the new instruction opcode. This opcode must be NEW, - * ANEWARRAY, CHECKCAST or INSTANCEOF. + * @param opcode + * the new instruction opcode. This opcode must be NEW, + * ANEWARRAY, CHECKCAST or INSTANCEOF. */ public void setOpcode(final int opcode) { this.opcode = opcode; @@ -84,4 +87,4 @@ public class TypeInsnNode extends AbstractInsnNode { public AbstractInsnNode clone(final Map labels) { return new TypeInsnNode(opcode, desc); } -} \ No newline at end of file +} diff --git a/src/asm/scala/tools/asm/tree/VarInsnNode.java b/src/asm/scala/tools/asm/tree/VarInsnNode.java index 89f572db59..5dd9ef6726 100644 --- a/src/asm/scala/tools/asm/tree/VarInsnNode.java +++ b/src/asm/scala/tools/asm/tree/VarInsnNode.java @@ -51,11 +51,13 @@ public class VarInsnNode extends AbstractInsnNode { /** * Constructs a new {@link VarInsnNode}. * - * @param opcode the opcode of the local variable instruction to be - * constructed. This opcode must be ILOAD, LLOAD, FLOAD, DLOAD, - * ALOAD, ISTORE, LSTORE, FSTORE, DSTORE, ASTORE or RET. - * @param var the operand of the instruction to be constructed. This operand - * is the index of a local variable. + * @param opcode + * the opcode of the local variable instruction to be + * constructed. This opcode must be ILOAD, LLOAD, FLOAD, DLOAD, + * ALOAD, ISTORE, LSTORE, FSTORE, DSTORE, ASTORE or RET. + * @param var + * the operand of the instruction to be constructed. This operand + * is the index of a local variable. */ public VarInsnNode(final int opcode, final int var) { super(opcode); @@ -65,9 +67,10 @@ public class VarInsnNode extends AbstractInsnNode { /** * Sets the opcode of this instruction. * - * @param opcode the new instruction opcode. This opcode must be ILOAD, - * LLOAD, FLOAD, DLOAD, ALOAD, ISTORE, LSTORE, FSTORE, DSTORE, ASTORE - * or RET. + * @param opcode + * the new instruction opcode. This opcode must be ILOAD, LLOAD, + * FLOAD, DLOAD, ALOAD, ISTORE, LSTORE, FSTORE, DSTORE, ASTORE or + * RET. */ public void setOpcode(final int opcode) { this.opcode = opcode; @@ -87,4 +90,4 @@ public class VarInsnNode extends AbstractInsnNode { public AbstractInsnNode clone(final Map labels) { return new VarInsnNode(opcode, var); } -} \ No newline at end of file +} diff --git a/src/asm/scala/tools/asm/tree/analysis/Analyzer.java b/src/asm/scala/tools/asm/tree/analysis/Analyzer.java index df387b0b8e..0134555f10 100644 --- a/src/asm/scala/tools/asm/tree/analysis/Analyzer.java +++ b/src/asm/scala/tools/asm/tree/analysis/Analyzer.java @@ -51,9 +51,10 @@ import scala.tools.asm.tree.VarInsnNode; * A semantic bytecode analyzer. This class does not fully check that JSR and * RET instructions are valid. * - * @param type of the Value used for the analysis. + * @param + * type of the Value used for the analysis. * - * @author Eric Bruneton + * @author Eric Bruneton */ public class Analyzer implements Opcodes { @@ -78,8 +79,9 @@ public class Analyzer implements Opcodes { /** * Constructs a new {@link Analyzer}. * - * @param interpreter the interpreter to be used to symbolically interpret - * the bytecode instructions. + * @param interpreter + * the interpreter to be used to symbolically interpret the + * bytecode instructions. */ public Analyzer(final Interpreter interpreter) { this.interpreter = interpreter; @@ -88,26 +90,28 @@ public class Analyzer implements Opcodes { /** * Analyzes the given method. * - * @param owner the internal name of the class to which the method belongs. - * @param m the method to be analyzed. + * @param owner + * the internal name of the class to which the method belongs. + * @param m + * the method to be analyzed. * @return the symbolic state of the execution stack frame at each bytecode * instruction of the method. The size of the returned array is * equal to the number of instructions (and labels) of the method. A * given frame is null if and only if the corresponding * instruction cannot be reached (dead code). - * @throws AnalyzerException if a problem occurs during the analysis. + * @throws AnalyzerException + * if a problem occurs during the analysis. */ public Frame[] analyze(final String owner, final MethodNode m) - throws AnalyzerException - { + throws AnalyzerException { if ((m.access & (ACC_ABSTRACT | ACC_NATIVE)) != 0) { - frames = (Frame[])new Frame[0]; + frames = (Frame[]) new Frame[0]; return frames; } n = m.instructions.size(); insns = m.instructions; - handlers = (List[])new List[n]; - frames = (Frame[])new Frame[n]; + handlers = (List[]) new List[n]; + frames = (Frame[]) new Frame[n]; subroutines = new Subroutine[n]; queued = new boolean[n]; queue = new int[n]; @@ -188,8 +192,7 @@ public class Analyzer implements Opcodes { if (insnType == AbstractInsnNode.LABEL || insnType == AbstractInsnNode.LINE - || insnType == AbstractInsnNode.FRAME) - { + || insnType == AbstractInsnNode.FRAME) { merge(insn + 1, f, subroutine); newControlFlowEdge(insn, insn + 1); } else { @@ -205,8 +208,7 @@ public class Analyzer implements Opcodes { int jump = insns.indexOf(j.label); if (insnOpcode == JSR) { merge(jump, current, new Subroutine(j.label, - m.maxLocals, - j)); + m.maxLocals, j)); } else { merge(jump, current, subroutine); } @@ -235,31 +237,27 @@ public class Analyzer implements Opcodes { } } else if (insnOpcode == RET) { if (subroutine == null) { - throw new AnalyzerException(insnNode, "RET instruction outside of a sub routine"); + throw new AnalyzerException(insnNode, + "RET instruction outside of a sub routine"); } for (int i = 0; i < subroutine.callers.size(); ++i) { JumpInsnNode caller = subroutine.callers.get(i); int call = insns.indexOf(caller); if (frames[call] != null) { - merge(call + 1, - frames[call], - current, - subroutines[call], - subroutine.access); + merge(call + 1, frames[call], current, + subroutines[call], subroutine.access); newControlFlowEdge(insn, call + 1); } } } else if (insnOpcode != ATHROW - && (insnOpcode < IRETURN || insnOpcode > RETURN)) - { + && (insnOpcode < IRETURN || insnOpcode > RETURN)) { if (subroutine != null) { if (insnNode instanceof VarInsnNode) { int var = ((VarInsnNode) insnNode).var; subroutine.access[var] = true; if (insnOpcode == LLOAD || insnOpcode == DLOAD || insnOpcode == LSTORE - || insnOpcode == DSTORE) - { + || insnOpcode == DSTORE) { subroutine.access[var + 1] = true; } } else if (insnNode instanceof IincInsnNode) { @@ -292,23 +290,23 @@ public class Analyzer implements Opcodes { } } } catch (AnalyzerException e) { - throw new AnalyzerException(e.node, "Error at instruction " + insn - + ": " + e.getMessage(), e); + throw new AnalyzerException(e.node, "Error at instruction " + + insn + ": " + e.getMessage(), e); } catch (Exception e) { - throw new AnalyzerException(insnNode, "Error at instruction " + insn - + ": " + e.getMessage(), e); + throw new AnalyzerException(insnNode, "Error at instruction " + + insn + ": " + e.getMessage(), e); } } return frames; } - private void findSubroutine(int insn, final Subroutine sub, final List calls) - throws AnalyzerException - { + private void findSubroutine(int insn, final Subroutine sub, + final List calls) throws AnalyzerException { while (true) { if (insn < 0 || insn >= n) { - throw new AnalyzerException(null, "Execution can fall off end of the code"); + throw new AnalyzerException(null, + "Execution can fall off end of the code"); } if (subroutines[insn] != null) { return; @@ -352,18 +350,18 @@ public class Analyzer implements Opcodes { // if insn does not falls through to the next instruction, return. switch (node.getOpcode()) { - case GOTO: - case RET: - case TABLESWITCH: - case LOOKUPSWITCH: - case IRETURN: - case LRETURN: - case FRETURN: - case DRETURN: - case ARETURN: - case RETURN: - case ATHROW: - return; + case GOTO: + case RET: + case TABLESWITCH: + case LOOKUPSWITCH: + case IRETURN: + case LRETURN: + case FRETURN: + case DRETURN: + case ARETURN: + case RETURN: + case ATHROW: + return; } insn++; } @@ -387,8 +385,9 @@ public class Analyzer implements Opcodes { /** * Returns the exception handlers for the given instruction. * - * @param insn the index of an instruction of the last recently analyzed - * method. + * @param insn + * the index of an instruction of the last recently analyzed + * method. * @return a list of {@link TryCatchBlockNode} objects. */ public List getHandlers(final int insn) { @@ -400,9 +399,12 @@ public class Analyzer implements Opcodes { * execution of control flow analysis loop in #analyze. The default * implementation of this method does nothing. * - * @param owner the internal name of the class to which the method belongs. - * @param m the method to be analyzed. - * @throws AnalyzerException if a problem occurs. + * @param owner + * the internal name of the class to which the method belongs. + * @param m + * the method to be analyzed. + * @throws AnalyzerException + * if a problem occurs. */ protected void init(String owner, MethodNode m) throws AnalyzerException { } @@ -410,8 +412,10 @@ public class Analyzer implements Opcodes { /** * Constructs a new frame with the given size. * - * @param nLocals the maximum number of local variables of the frame. - * @param nStack the maximum stack size of the frame. + * @param nLocals + * the maximum number of local variables of the frame. + * @param nStack + * the maximum stack size of the frame. * @return the created frame. */ protected Frame newFrame(final int nLocals, final int nStack) { @@ -421,7 +425,8 @@ public class Analyzer implements Opcodes { /** * Constructs a new frame that is identical to the given frame. * - * @param src a frame. + * @param src + * a frame. * @return the created frame. */ protected Frame newFrame(final Frame src) { @@ -434,8 +439,10 @@ public class Analyzer implements Opcodes { * control flow graph of a method (this method is called by the * {@link #analyze analyze} method during its visit of the method's code). * - * @param insn an instruction index. - * @param successor index of a successor instruction. + * @param insn + * an instruction index. + * @param successor + * index of a successor instruction. */ protected void newControlFlowEdge(final int insn, final int successor) { } @@ -447,16 +454,16 @@ public class Analyzer implements Opcodes { * method is called by the {@link #analyze analyze} method during its visit * of the method's code). * - * @param insn an instruction index. - * @param successor index of a successor instruction. + * @param insn + * an instruction index. + * @param successor + * index of a successor instruction. * @return true if this edge must be considered in the data flow analysis * performed by this analyzer, or false otherwise. The default * implementation of this method always returns true. */ - protected boolean newControlFlowExceptionEdge( - final int insn, - final int successor) - { + protected boolean newControlFlowExceptionEdge(final int insn, + final int successor) { return true; } @@ -469,28 +476,25 @@ public class Analyzer implements Opcodes { * the {@link #analyze analyze} method during its visit of the method's * code). * - * @param insn an instruction index. - * @param tcb TryCatchBlockNode corresponding to this edge. + * @param insn + * an instruction index. + * @param tcb + * TryCatchBlockNode corresponding to this edge. * @return true if this edge must be considered in the data flow analysis * performed by this analyzer, or false otherwise. The default * implementation of this method delegates to * {@link #newControlFlowExceptionEdge(int, int) * newControlFlowExceptionEdge(int, int)}. */ - protected boolean newControlFlowExceptionEdge( - final int insn, - final TryCatchBlockNode tcb) - { + protected boolean newControlFlowExceptionEdge(final int insn, + final TryCatchBlockNode tcb) { return newControlFlowExceptionEdge(insn, insns.indexOf(tcb.handler)); } // ------------------------------------------------------------------------- - private void merge( - final int insn, - final Frame frame, - final Subroutine subroutine) throws AnalyzerException - { + private void merge(final int insn, final Frame frame, + final Subroutine subroutine) throws AnalyzerException { Frame oldFrame = frames[insn]; Subroutine oldSubroutine = subroutines[insn]; boolean changes; @@ -518,13 +522,9 @@ public class Analyzer implements Opcodes { } } - private void merge( - final int insn, - final Frame beforeJSR, - final Frame afterRET, - final Subroutine subroutineBeforeJSR, - final boolean[] access) throws AnalyzerException - { + private void merge(final int insn, final Frame beforeJSR, + final Frame afterRET, final Subroutine subroutineBeforeJSR, + final boolean[] access) throws AnalyzerException { Frame oldFrame = frames[insn]; Subroutine oldSubroutine = subroutines[insn]; boolean changes; diff --git a/src/asm/scala/tools/asm/tree/analysis/AnalyzerException.java b/src/asm/scala/tools/asm/tree/analysis/AnalyzerException.java index a89bb3513f..5e3f51f21a 100644 --- a/src/asm/scala/tools/asm/tree/analysis/AnalyzerException.java +++ b/src/asm/scala/tools/asm/tree/analysis/AnalyzerException.java @@ -46,17 +46,14 @@ public class AnalyzerException extends Exception { this.node = node; } - public AnalyzerException(final AbstractInsnNode node, final String msg, final Throwable exception) { + public AnalyzerException(final AbstractInsnNode node, final String msg, + final Throwable exception) { super(msg, exception); this.node = node; } - public AnalyzerException( - final AbstractInsnNode node, - final String msg, - final Object expected, - final Value encountered) - { + public AnalyzerException(final AbstractInsnNode node, final String msg, + final Object expected, final Value encountered) { super((msg == null ? "Expected " : msg + ": expected ") + expected + ", but found " + encountered); this.node = node; diff --git a/src/asm/scala/tools/asm/tree/analysis/BasicInterpreter.java b/src/asm/scala/tools/asm/tree/analysis/BasicInterpreter.java index 64ddcc11e6..8d6653c1c5 100644 --- a/src/asm/scala/tools/asm/tree/analysis/BasicInterpreter.java +++ b/src/asm/scala/tools/asm/tree/analysis/BasicInterpreter.java @@ -50,8 +50,7 @@ import scala.tools.asm.tree.TypeInsnNode; * @author Bing Ran */ public class BasicInterpreter extends Interpreter implements - Opcodes -{ + Opcodes { public BasicInterpreter() { super(ASM4); @@ -67,292 +66,286 @@ public class BasicInterpreter extends Interpreter implements return BasicValue.UNINITIALIZED_VALUE; } switch (type.getSort()) { - case Type.VOID: - return null; - case Type.BOOLEAN: - case Type.CHAR: - case Type.BYTE: - case Type.SHORT: - case Type.INT: - return BasicValue.INT_VALUE; - case Type.FLOAT: - return BasicValue.FLOAT_VALUE; - case Type.LONG: - return BasicValue.LONG_VALUE; - case Type.DOUBLE: - return BasicValue.DOUBLE_VALUE; - case Type.ARRAY: - case Type.OBJECT: - return BasicValue.REFERENCE_VALUE; - default: - throw new Error("Internal error"); + case Type.VOID: + return null; + case Type.BOOLEAN: + case Type.CHAR: + case Type.BYTE: + case Type.SHORT: + case Type.INT: + return BasicValue.INT_VALUE; + case Type.FLOAT: + return BasicValue.FLOAT_VALUE; + case Type.LONG: + return BasicValue.LONG_VALUE; + case Type.DOUBLE: + return BasicValue.DOUBLE_VALUE; + case Type.ARRAY: + case Type.OBJECT: + return BasicValue.REFERENCE_VALUE; + default: + throw new Error("Internal error"); } } @Override public BasicValue newOperation(final AbstractInsnNode insn) - throws AnalyzerException - { + throws AnalyzerException { switch (insn.getOpcode()) { - case ACONST_NULL: - return newValue(Type.getObjectType("null")); - case ICONST_M1: - case ICONST_0: - case ICONST_1: - case ICONST_2: - case ICONST_3: - case ICONST_4: - case ICONST_5: + case ACONST_NULL: + return newValue(Type.getObjectType("null")); + case ICONST_M1: + case ICONST_0: + case ICONST_1: + case ICONST_2: + case ICONST_3: + case ICONST_4: + case ICONST_5: + return BasicValue.INT_VALUE; + case LCONST_0: + case LCONST_1: + return BasicValue.LONG_VALUE; + case FCONST_0: + case FCONST_1: + case FCONST_2: + return BasicValue.FLOAT_VALUE; + case DCONST_0: + case DCONST_1: + return BasicValue.DOUBLE_VALUE; + case BIPUSH: + case SIPUSH: + return BasicValue.INT_VALUE; + case LDC: + Object cst = ((LdcInsnNode) insn).cst; + if (cst instanceof Integer) { return BasicValue.INT_VALUE; - case LCONST_0: - case LCONST_1: - return BasicValue.LONG_VALUE; - case FCONST_0: - case FCONST_1: - case FCONST_2: + } else if (cst instanceof Float) { return BasicValue.FLOAT_VALUE; - case DCONST_0: - case DCONST_1: + } else if (cst instanceof Long) { + return BasicValue.LONG_VALUE; + } else if (cst instanceof Double) { return BasicValue.DOUBLE_VALUE; - case BIPUSH: - case SIPUSH: - return BasicValue.INT_VALUE; - case LDC: - Object cst = ((LdcInsnNode) insn).cst; - if (cst instanceof Integer) { - return BasicValue.INT_VALUE; - } else if (cst instanceof Float) { - return BasicValue.FLOAT_VALUE; - } else if (cst instanceof Long) { - return BasicValue.LONG_VALUE; - } else if (cst instanceof Double) { - return BasicValue.DOUBLE_VALUE; - } else if (cst instanceof String) { - return newValue(Type.getObjectType("java/lang/String")); - } else if (cst instanceof Type) { - int sort = ((Type) cst).getSort(); - if (sort == Type.OBJECT || sort == Type.ARRAY) { - return newValue(Type.getObjectType("java/lang/Class")); - } else if (sort == Type.METHOD) { - return newValue(Type.getObjectType("java/lang/invoke/MethodType")); - } else { - throw new IllegalArgumentException("Illegal LDC constant " + cst); - } - } else if (cst instanceof Handle) { - return newValue(Type.getObjectType("java/lang/invoke/MethodHandle")); + } else if (cst instanceof String) { + return newValue(Type.getObjectType("java/lang/String")); + } else if (cst instanceof Type) { + int sort = ((Type) cst).getSort(); + if (sort == Type.OBJECT || sort == Type.ARRAY) { + return newValue(Type.getObjectType("java/lang/Class")); + } else if (sort == Type.METHOD) { + return newValue(Type + .getObjectType("java/lang/invoke/MethodType")); } else { - throw new IllegalArgumentException("Illegal LDC constant " + cst); + throw new IllegalArgumentException("Illegal LDC constant " + + cst); } - case JSR: - return BasicValue.RETURNADDRESS_VALUE; - case GETSTATIC: - return newValue(Type.getType(((FieldInsnNode) insn).desc)); - case NEW: - return newValue(Type.getObjectType(((TypeInsnNode) insn).desc)); - default: - throw new Error("Internal error."); + } else if (cst instanceof Handle) { + return newValue(Type + .getObjectType("java/lang/invoke/MethodHandle")); + } else { + throw new IllegalArgumentException("Illegal LDC constant " + + cst); + } + case JSR: + return BasicValue.RETURNADDRESS_VALUE; + case GETSTATIC: + return newValue(Type.getType(((FieldInsnNode) insn).desc)); + case NEW: + return newValue(Type.getObjectType(((TypeInsnNode) insn).desc)); + default: + throw new Error("Internal error."); } } @Override - public BasicValue copyOperation(final AbstractInsnNode insn, final BasicValue value) - throws AnalyzerException - { + public BasicValue copyOperation(final AbstractInsnNode insn, + final BasicValue value) throws AnalyzerException { return value; } @Override - public BasicValue unaryOperation(final AbstractInsnNode insn, final BasicValue value) - throws AnalyzerException - { + public BasicValue unaryOperation(final AbstractInsnNode insn, + final BasicValue value) throws AnalyzerException { switch (insn.getOpcode()) { - case INEG: - case IINC: - case L2I: - case F2I: - case D2I: - case I2B: - case I2C: - case I2S: - return BasicValue.INT_VALUE; - case FNEG: - case I2F: - case L2F: - case D2F: - return BasicValue.FLOAT_VALUE; - case LNEG: - case I2L: - case F2L: - case D2L: - return BasicValue.LONG_VALUE; - case DNEG: - case I2D: - case L2D: - case F2D: - return BasicValue.DOUBLE_VALUE; - case IFEQ: - case IFNE: - case IFLT: - case IFGE: - case IFGT: - case IFLE: - case TABLESWITCH: - case LOOKUPSWITCH: - case IRETURN: - case LRETURN: - case FRETURN: - case DRETURN: - case ARETURN: - case PUTSTATIC: - return null; - case GETFIELD: - return newValue(Type.getType(((FieldInsnNode) insn).desc)); - case NEWARRAY: - switch (((IntInsnNode) insn).operand) { - case T_BOOLEAN: - return newValue(Type.getType("[Z")); - case T_CHAR: - return newValue(Type.getType("[C")); - case T_BYTE: - return newValue(Type.getType("[B")); - case T_SHORT: - return newValue(Type.getType("[S")); - case T_INT: - return newValue(Type.getType("[I")); - case T_FLOAT: - return newValue(Type.getType("[F")); - case T_DOUBLE: - return newValue(Type.getType("[D")); - case T_LONG: - return newValue(Type.getType("[J")); - default: - throw new AnalyzerException(insn, "Invalid array type"); - } - case ANEWARRAY: - String desc = ((TypeInsnNode) insn).desc; - return newValue(Type.getType("[" + Type.getObjectType(desc))); - case ARRAYLENGTH: - return BasicValue.INT_VALUE; - case ATHROW: - return null; - case CHECKCAST: - desc = ((TypeInsnNode) insn).desc; - return newValue(Type.getObjectType(desc)); - case INSTANCEOF: - return BasicValue.INT_VALUE; - case MONITORENTER: - case MONITOREXIT: - case IFNULL: - case IFNONNULL: - return null; + case INEG: + case IINC: + case L2I: + case F2I: + case D2I: + case I2B: + case I2C: + case I2S: + return BasicValue.INT_VALUE; + case FNEG: + case I2F: + case L2F: + case D2F: + return BasicValue.FLOAT_VALUE; + case LNEG: + case I2L: + case F2L: + case D2L: + return BasicValue.LONG_VALUE; + case DNEG: + case I2D: + case L2D: + case F2D: + return BasicValue.DOUBLE_VALUE; + case IFEQ: + case IFNE: + case IFLT: + case IFGE: + case IFGT: + case IFLE: + case TABLESWITCH: + case LOOKUPSWITCH: + case IRETURN: + case LRETURN: + case FRETURN: + case DRETURN: + case ARETURN: + case PUTSTATIC: + return null; + case GETFIELD: + return newValue(Type.getType(((FieldInsnNode) insn).desc)); + case NEWARRAY: + switch (((IntInsnNode) insn).operand) { + case T_BOOLEAN: + return newValue(Type.getType("[Z")); + case T_CHAR: + return newValue(Type.getType("[C")); + case T_BYTE: + return newValue(Type.getType("[B")); + case T_SHORT: + return newValue(Type.getType("[S")); + case T_INT: + return newValue(Type.getType("[I")); + case T_FLOAT: + return newValue(Type.getType("[F")); + case T_DOUBLE: + return newValue(Type.getType("[D")); + case T_LONG: + return newValue(Type.getType("[J")); default: - throw new Error("Internal error."); + throw new AnalyzerException(insn, "Invalid array type"); + } + case ANEWARRAY: + String desc = ((TypeInsnNode) insn).desc; + return newValue(Type.getType("[" + Type.getObjectType(desc))); + case ARRAYLENGTH: + return BasicValue.INT_VALUE; + case ATHROW: + return null; + case CHECKCAST: + desc = ((TypeInsnNode) insn).desc; + return newValue(Type.getObjectType(desc)); + case INSTANCEOF: + return BasicValue.INT_VALUE; + case MONITORENTER: + case MONITOREXIT: + case IFNULL: + case IFNONNULL: + return null; + default: + throw new Error("Internal error."); } } @Override - public BasicValue binaryOperation( - final AbstractInsnNode insn, - final BasicValue value1, - final BasicValue value2) throws AnalyzerException - { + public BasicValue binaryOperation(final AbstractInsnNode insn, + final BasicValue value1, final BasicValue value2) + throws AnalyzerException { switch (insn.getOpcode()) { - case IALOAD: - case BALOAD: - case CALOAD: - case SALOAD: - case IADD: - case ISUB: - case IMUL: - case IDIV: - case IREM: - case ISHL: - case ISHR: - case IUSHR: - case IAND: - case IOR: - case IXOR: - return BasicValue.INT_VALUE; - case FALOAD: - case FADD: - case FSUB: - case FMUL: - case FDIV: - case FREM: - return BasicValue.FLOAT_VALUE; - case LALOAD: - case LADD: - case LSUB: - case LMUL: - case LDIV: - case LREM: - case LSHL: - case LSHR: - case LUSHR: - case LAND: - case LOR: - case LXOR: - return BasicValue.LONG_VALUE; - case DALOAD: - case DADD: - case DSUB: - case DMUL: - case DDIV: - case DREM: - return BasicValue.DOUBLE_VALUE; - case AALOAD: - return BasicValue.REFERENCE_VALUE; - case LCMP: - case FCMPL: - case FCMPG: - case DCMPL: - case DCMPG: - return BasicValue.INT_VALUE; - case IF_ICMPEQ: - case IF_ICMPNE: - case IF_ICMPLT: - case IF_ICMPGE: - case IF_ICMPGT: - case IF_ICMPLE: - case IF_ACMPEQ: - case IF_ACMPNE: - case PUTFIELD: - return null; - default: - throw new Error("Internal error."); + case IALOAD: + case BALOAD: + case CALOAD: + case SALOAD: + case IADD: + case ISUB: + case IMUL: + case IDIV: + case IREM: + case ISHL: + case ISHR: + case IUSHR: + case IAND: + case IOR: + case IXOR: + return BasicValue.INT_VALUE; + case FALOAD: + case FADD: + case FSUB: + case FMUL: + case FDIV: + case FREM: + return BasicValue.FLOAT_VALUE; + case LALOAD: + case LADD: + case LSUB: + case LMUL: + case LDIV: + case LREM: + case LSHL: + case LSHR: + case LUSHR: + case LAND: + case LOR: + case LXOR: + return BasicValue.LONG_VALUE; + case DALOAD: + case DADD: + case DSUB: + case DMUL: + case DDIV: + case DREM: + return BasicValue.DOUBLE_VALUE; + case AALOAD: + return BasicValue.REFERENCE_VALUE; + case LCMP: + case FCMPL: + case FCMPG: + case DCMPL: + case DCMPG: + return BasicValue.INT_VALUE; + case IF_ICMPEQ: + case IF_ICMPNE: + case IF_ICMPLT: + case IF_ICMPGE: + case IF_ICMPGT: + case IF_ICMPLE: + case IF_ACMPEQ: + case IF_ACMPNE: + case PUTFIELD: + return null; + default: + throw new Error("Internal error."); } } @Override - public BasicValue ternaryOperation( - final AbstractInsnNode insn, - final BasicValue value1, - final BasicValue value2, - final BasicValue value3) throws AnalyzerException - { + public BasicValue ternaryOperation(final AbstractInsnNode insn, + final BasicValue value1, final BasicValue value2, + final BasicValue value3) throws AnalyzerException { return null; } @Override - public BasicValue naryOperation(final AbstractInsnNode insn, final List values) - throws AnalyzerException - { + public BasicValue naryOperation(final AbstractInsnNode insn, + final List values) throws AnalyzerException { int opcode = insn.getOpcode(); if (opcode == MULTIANEWARRAY) { return newValue(Type.getType(((MultiANewArrayInsnNode) insn).desc)); - } else if (opcode == INVOKEDYNAMIC){ - return newValue(Type.getReturnType(((InvokeDynamicInsnNode) insn).desc)); + } else if (opcode == INVOKEDYNAMIC) { + return newValue(Type + .getReturnType(((InvokeDynamicInsnNode) insn).desc)); } else { return newValue(Type.getReturnType(((MethodInsnNode) insn).desc)); } } @Override - public void returnOperation( - final AbstractInsnNode insn, - final BasicValue value, - final BasicValue expected) throws AnalyzerException - { + public void returnOperation(final AbstractInsnNode insn, + final BasicValue value, final BasicValue expected) + throws AnalyzerException { } @Override diff --git a/src/asm/scala/tools/asm/tree/analysis/BasicValue.java b/src/asm/scala/tools/asm/tree/analysis/BasicValue.java index 6c449db9b0..439941fb9f 100644 --- a/src/asm/scala/tools/asm/tree/analysis/BasicValue.java +++ b/src/asm/scala/tools/asm/tree/analysis/BasicValue.java @@ -48,11 +48,14 @@ public class BasicValue implements Value { public static final BasicValue LONG_VALUE = new BasicValue(Type.LONG_TYPE); - public static final BasicValue DOUBLE_VALUE = new BasicValue(Type.DOUBLE_TYPE); + public static final BasicValue DOUBLE_VALUE = new BasicValue( + Type.DOUBLE_TYPE); - public static final BasicValue REFERENCE_VALUE = new BasicValue(Type.getObjectType("java/lang/Object")); + public static final BasicValue REFERENCE_VALUE = new BasicValue( + Type.getObjectType("java/lang/Object")); - public static final BasicValue RETURNADDRESS_VALUE = new BasicValue(Type.VOID_TYPE); + public static final BasicValue RETURNADDRESS_VALUE = new BasicValue( + Type.VOID_TYPE); private final Type type; diff --git a/src/asm/scala/tools/asm/tree/analysis/BasicVerifier.java b/src/asm/scala/tools/asm/tree/analysis/BasicVerifier.java index 9297dd9294..71666edb74 100644 --- a/src/asm/scala/tools/asm/tree/analysis/BasicVerifier.java +++ b/src/asm/scala/tools/asm/tree/analysis/BasicVerifier.java @@ -55,47 +55,41 @@ public class BasicVerifier extends BasicInterpreter { } @Override - public BasicValue copyOperation(final AbstractInsnNode insn, final BasicValue value) - throws AnalyzerException - { + public BasicValue copyOperation(final AbstractInsnNode insn, + final BasicValue value) throws AnalyzerException { Value expected; switch (insn.getOpcode()) { - case ILOAD: - case ISTORE: - expected = BasicValue.INT_VALUE; - break; - case FLOAD: - case FSTORE: - expected = BasicValue.FLOAT_VALUE; - break; - case LLOAD: - case LSTORE: - expected = BasicValue.LONG_VALUE; - break; - case DLOAD: - case DSTORE: - expected = BasicValue.DOUBLE_VALUE; - break; - case ALOAD: - if (!value.isReference()) { - throw new AnalyzerException(insn, - null, - "an object reference", - value); - } - return value; - case ASTORE: - if (!value.isReference() - && !BasicValue.RETURNADDRESS_VALUE.equals(value)) - { - throw new AnalyzerException(insn, - null, - "an object reference or a return address", - value); - } - return value; - default: - return value; + case ILOAD: + case ISTORE: + expected = BasicValue.INT_VALUE; + break; + case FLOAD: + case FSTORE: + expected = BasicValue.FLOAT_VALUE; + break; + case LLOAD: + case LSTORE: + expected = BasicValue.LONG_VALUE; + break; + case DLOAD: + case DSTORE: + expected = BasicValue.DOUBLE_VALUE; + break; + case ALOAD: + if (!value.isReference()) { + throw new AnalyzerException(insn, null, "an object reference", + value); + } + return value; + case ASTORE: + if (!value.isReference() + && !BasicValue.RETURNADDRESS_VALUE.equals(value)) { + throw new AnalyzerException(insn, null, + "an object reference or a return address", value); + } + return value; + default: + return value; } if (!expected.equals(value)) { throw new AnalyzerException(insn, null, expected, value); @@ -104,91 +98,85 @@ public class BasicVerifier extends BasicInterpreter { } @Override - public BasicValue unaryOperation(final AbstractInsnNode insn, final BasicValue value) - throws AnalyzerException - { + public BasicValue unaryOperation(final AbstractInsnNode insn, + final BasicValue value) throws AnalyzerException { BasicValue expected; switch (insn.getOpcode()) { - case INEG: - case IINC: - case I2F: - case I2L: - case I2D: - case I2B: - case I2C: - case I2S: - case IFEQ: - case IFNE: - case IFLT: - case IFGE: - case IFGT: - case IFLE: - case TABLESWITCH: - case LOOKUPSWITCH: - case IRETURN: - case NEWARRAY: - case ANEWARRAY: - expected = BasicValue.INT_VALUE; - break; - case FNEG: - case F2I: - case F2L: - case F2D: - case FRETURN: - expected = BasicValue.FLOAT_VALUE; - break; - case LNEG: - case L2I: - case L2F: - case L2D: - case LRETURN: - expected = BasicValue.LONG_VALUE; - break; - case DNEG: - case D2I: - case D2F: - case D2L: - case DRETURN: - expected = BasicValue.DOUBLE_VALUE; - break; - case GETFIELD: - expected = newValue(Type.getObjectType(((FieldInsnNode) insn).owner)); - break; - case CHECKCAST: - if (!value.isReference()) { - throw new AnalyzerException(insn, - null, - "an object reference", - value); - } - return super.unaryOperation(insn, value); - case ARRAYLENGTH: - if (!isArrayValue(value)) { - throw new AnalyzerException(insn, - null, - "an array reference", - value); - } - return super.unaryOperation(insn, value); - case ARETURN: - case ATHROW: - case INSTANCEOF: - case MONITORENTER: - case MONITOREXIT: - case IFNULL: - case IFNONNULL: - if (!value.isReference()) { - throw new AnalyzerException(insn, - null, - "an object reference", - value); - } - return super.unaryOperation(insn, value); - case PUTSTATIC: - expected = newValue(Type.getType(((FieldInsnNode) insn).desc)); - break; - default: - throw new Error("Internal error."); + case INEG: + case IINC: + case I2F: + case I2L: + case I2D: + case I2B: + case I2C: + case I2S: + case IFEQ: + case IFNE: + case IFLT: + case IFGE: + case IFGT: + case IFLE: + case TABLESWITCH: + case LOOKUPSWITCH: + case IRETURN: + case NEWARRAY: + case ANEWARRAY: + expected = BasicValue.INT_VALUE; + break; + case FNEG: + case F2I: + case F2L: + case F2D: + case FRETURN: + expected = BasicValue.FLOAT_VALUE; + break; + case LNEG: + case L2I: + case L2F: + case L2D: + case LRETURN: + expected = BasicValue.LONG_VALUE; + break; + case DNEG: + case D2I: + case D2F: + case D2L: + case DRETURN: + expected = BasicValue.DOUBLE_VALUE; + break; + case GETFIELD: + expected = newValue(Type + .getObjectType(((FieldInsnNode) insn).owner)); + break; + case CHECKCAST: + if (!value.isReference()) { + throw new AnalyzerException(insn, null, "an object reference", + value); + } + return super.unaryOperation(insn, value); + case ARRAYLENGTH: + if (!isArrayValue(value)) { + throw new AnalyzerException(insn, null, "an array reference", + value); + } + return super.unaryOperation(insn, value); + case ARETURN: + case ATHROW: + case INSTANCEOF: + case MONITORENTER: + case MONITOREXIT: + case IFNULL: + case IFNONNULL: + if (!value.isReference()) { + throw new AnalyzerException(insn, null, "an object reference", + value); + } + return super.unaryOperation(insn, value); + case PUTSTATIC: + expected = newValue(Type.getType(((FieldInsnNode) insn).desc)); + break; + default: + throw new Error("Internal error."); } if (!isSubTypeOf(value, expected)) { throw new AnalyzerException(insn, null, expected, value); @@ -197,125 +185,125 @@ public class BasicVerifier extends BasicInterpreter { } @Override - public BasicValue binaryOperation( - final AbstractInsnNode insn, - final BasicValue value1, - final BasicValue value2) throws AnalyzerException - { + public BasicValue binaryOperation(final AbstractInsnNode insn, + final BasicValue value1, final BasicValue value2) + throws AnalyzerException { BasicValue expected1; BasicValue expected2; switch (insn.getOpcode()) { - case IALOAD: - expected1 = newValue(Type.getType("[I")); - expected2 = BasicValue.INT_VALUE; - break; - case BALOAD: - if (isSubTypeOf(value1, newValue(Type.getType("[Z")))) { - expected1 = newValue(Type.getType("[Z")); - } else { - expected1 = newValue(Type.getType("[B")); - } - expected2 = BasicValue.INT_VALUE; - break; - case CALOAD: - expected1 = newValue(Type.getType("[C")); - expected2 = BasicValue.INT_VALUE; - break; - case SALOAD: - expected1 = newValue(Type.getType("[S")); - expected2 = BasicValue.INT_VALUE; - break; - case LALOAD: - expected1 = newValue(Type.getType("[J")); - expected2 = BasicValue.INT_VALUE; - break; - case FALOAD: - expected1 = newValue(Type.getType("[F")); - expected2 = BasicValue.INT_VALUE; - break; - case DALOAD: - expected1 = newValue(Type.getType("[D")); - expected2 = BasicValue.INT_VALUE; - break; - case AALOAD: - expected1 = newValue(Type.getType("[Ljava/lang/Object;")); - expected2 = BasicValue.INT_VALUE; - break; - case IADD: - case ISUB: - case IMUL: - case IDIV: - case IREM: - case ISHL: - case ISHR: - case IUSHR: - case IAND: - case IOR: - case IXOR: - case IF_ICMPEQ: - case IF_ICMPNE: - case IF_ICMPLT: - case IF_ICMPGE: - case IF_ICMPGT: - case IF_ICMPLE: - expected1 = BasicValue.INT_VALUE; - expected2 = BasicValue.INT_VALUE; - break; - case FADD: - case FSUB: - case FMUL: - case FDIV: - case FREM: - case FCMPL: - case FCMPG: - expected1 = BasicValue.FLOAT_VALUE; - expected2 = BasicValue.FLOAT_VALUE; - break; - case LADD: - case LSUB: - case LMUL: - case LDIV: - case LREM: - case LAND: - case LOR: - case LXOR: - case LCMP: - expected1 = BasicValue.LONG_VALUE; - expected2 = BasicValue.LONG_VALUE; - break; - case LSHL: - case LSHR: - case LUSHR: - expected1 = BasicValue.LONG_VALUE; - expected2 = BasicValue.INT_VALUE; - break; - case DADD: - case DSUB: - case DMUL: - case DDIV: - case DREM: - case DCMPL: - case DCMPG: - expected1 = BasicValue.DOUBLE_VALUE; - expected2 = BasicValue.DOUBLE_VALUE; - break; - case IF_ACMPEQ: - case IF_ACMPNE: - expected1 = BasicValue.REFERENCE_VALUE; - expected2 = BasicValue.REFERENCE_VALUE; - break; - case PUTFIELD: - FieldInsnNode fin = (FieldInsnNode) insn; - expected1 = newValue(Type.getObjectType(fin.owner)); - expected2 = newValue(Type.getType(fin.desc)); - break; - default: - throw new Error("Internal error."); + case IALOAD: + expected1 = newValue(Type.getType("[I")); + expected2 = BasicValue.INT_VALUE; + break; + case BALOAD: + if (isSubTypeOf(value1, newValue(Type.getType("[Z")))) { + expected1 = newValue(Type.getType("[Z")); + } else { + expected1 = newValue(Type.getType("[B")); + } + expected2 = BasicValue.INT_VALUE; + break; + case CALOAD: + expected1 = newValue(Type.getType("[C")); + expected2 = BasicValue.INT_VALUE; + break; + case SALOAD: + expected1 = newValue(Type.getType("[S")); + expected2 = BasicValue.INT_VALUE; + break; + case LALOAD: + expected1 = newValue(Type.getType("[J")); + expected2 = BasicValue.INT_VALUE; + break; + case FALOAD: + expected1 = newValue(Type.getType("[F")); + expected2 = BasicValue.INT_VALUE; + break; + case DALOAD: + expected1 = newValue(Type.getType("[D")); + expected2 = BasicValue.INT_VALUE; + break; + case AALOAD: + expected1 = newValue(Type.getType("[Ljava/lang/Object;")); + expected2 = BasicValue.INT_VALUE; + break; + case IADD: + case ISUB: + case IMUL: + case IDIV: + case IREM: + case ISHL: + case ISHR: + case IUSHR: + case IAND: + case IOR: + case IXOR: + case IF_ICMPEQ: + case IF_ICMPNE: + case IF_ICMPLT: + case IF_ICMPGE: + case IF_ICMPGT: + case IF_ICMPLE: + expected1 = BasicValue.INT_VALUE; + expected2 = BasicValue.INT_VALUE; + break; + case FADD: + case FSUB: + case FMUL: + case FDIV: + case FREM: + case FCMPL: + case FCMPG: + expected1 = BasicValue.FLOAT_VALUE; + expected2 = BasicValue.FLOAT_VALUE; + break; + case LADD: + case LSUB: + case LMUL: + case LDIV: + case LREM: + case LAND: + case LOR: + case LXOR: + case LCMP: + expected1 = BasicValue.LONG_VALUE; + expected2 = BasicValue.LONG_VALUE; + break; + case LSHL: + case LSHR: + case LUSHR: + expected1 = BasicValue.LONG_VALUE; + expected2 = BasicValue.INT_VALUE; + break; + case DADD: + case DSUB: + case DMUL: + case DDIV: + case DREM: + case DCMPL: + case DCMPG: + expected1 = BasicValue.DOUBLE_VALUE; + expected2 = BasicValue.DOUBLE_VALUE; + break; + case IF_ACMPEQ: + case IF_ACMPNE: + expected1 = BasicValue.REFERENCE_VALUE; + expected2 = BasicValue.REFERENCE_VALUE; + break; + case PUTFIELD: + FieldInsnNode fin = (FieldInsnNode) insn; + expected1 = newValue(Type.getObjectType(fin.owner)); + expected2 = newValue(Type.getType(fin.desc)); + break; + default: + throw new Error("Internal error."); } if (!isSubTypeOf(value1, expected1)) { - throw new AnalyzerException(insn, "First argument", expected1, value1); + throw new AnalyzerException(insn, "First argument", expected1, + value1); } else if (!isSubTypeOf(value2, expected2)) { - throw new AnalyzerException(insn, "Second argument", expected2, value2); + throw new AnalyzerException(insn, "Second argument", expected2, + value2); } if (insn.getOpcode() == AALOAD) { return getElementValue(value1); @@ -325,79 +313,73 @@ public class BasicVerifier extends BasicInterpreter { } @Override - public BasicValue ternaryOperation( - final AbstractInsnNode insn, - final BasicValue value1, - final BasicValue value2, - final BasicValue value3) throws AnalyzerException - { + public BasicValue ternaryOperation(final AbstractInsnNode insn, + final BasicValue value1, final BasicValue value2, + final BasicValue value3) throws AnalyzerException { BasicValue expected1; BasicValue expected3; switch (insn.getOpcode()) { - case IASTORE: - expected1 = newValue(Type.getType("[I")); - expected3 = BasicValue.INT_VALUE; - break; - case BASTORE: - if (isSubTypeOf(value1, newValue(Type.getType("[Z")))) { - expected1 = newValue(Type.getType("[Z")); - } else { - expected1 = newValue(Type.getType("[B")); - } - expected3 = BasicValue.INT_VALUE; - break; - case CASTORE: - expected1 = newValue(Type.getType("[C")); - expected3 = BasicValue.INT_VALUE; - break; - case SASTORE: - expected1 = newValue(Type.getType("[S")); - expected3 = BasicValue.INT_VALUE; - break; - case LASTORE: - expected1 = newValue(Type.getType("[J")); - expected3 = BasicValue.LONG_VALUE; - break; - case FASTORE: - expected1 = newValue(Type.getType("[F")); - expected3 = BasicValue.FLOAT_VALUE; - break; - case DASTORE: - expected1 = newValue(Type.getType("[D")); - expected3 = BasicValue.DOUBLE_VALUE; - break; - case AASTORE: - expected1 = value1; - expected3 = BasicValue.REFERENCE_VALUE; - break; - default: - throw new Error("Internal error."); + case IASTORE: + expected1 = newValue(Type.getType("[I")); + expected3 = BasicValue.INT_VALUE; + break; + case BASTORE: + if (isSubTypeOf(value1, newValue(Type.getType("[Z")))) { + expected1 = newValue(Type.getType("[Z")); + } else { + expected1 = newValue(Type.getType("[B")); + } + expected3 = BasicValue.INT_VALUE; + break; + case CASTORE: + expected1 = newValue(Type.getType("[C")); + expected3 = BasicValue.INT_VALUE; + break; + case SASTORE: + expected1 = newValue(Type.getType("[S")); + expected3 = BasicValue.INT_VALUE; + break; + case LASTORE: + expected1 = newValue(Type.getType("[J")); + expected3 = BasicValue.LONG_VALUE; + break; + case FASTORE: + expected1 = newValue(Type.getType("[F")); + expected3 = BasicValue.FLOAT_VALUE; + break; + case DASTORE: + expected1 = newValue(Type.getType("[D")); + expected3 = BasicValue.DOUBLE_VALUE; + break; + case AASTORE: + expected1 = value1; + expected3 = BasicValue.REFERENCE_VALUE; + break; + default: + throw new Error("Internal error."); } if (!isSubTypeOf(value1, expected1)) { - throw new AnalyzerException(insn, "First argument", "a " + expected1 - + " array reference", value1); + throw new AnalyzerException(insn, "First argument", "a " + + expected1 + " array reference", value1); } else if (!BasicValue.INT_VALUE.equals(value2)) { throw new AnalyzerException(insn, "Second argument", - BasicValue.INT_VALUE, - value2); + BasicValue.INT_VALUE, value2); } else if (!isSubTypeOf(value3, expected3)) { - throw new AnalyzerException(insn, "Third argument", expected3, value3); + throw new AnalyzerException(insn, "Third argument", expected3, + value3); } return null; } @Override - public BasicValue naryOperation(final AbstractInsnNode insn, final List values) - throws AnalyzerException - { + public BasicValue naryOperation(final AbstractInsnNode insn, + final List values) throws AnalyzerException { int opcode = insn.getOpcode(); if (opcode == MULTIANEWARRAY) { for (int i = 0; i < values.size(); ++i) { if (!BasicValue.INT_VALUE.equals(values.get(i))) { - throw new AnalyzerException(insn, - null, - BasicValue.INT_VALUE, - values.get(i)); + throw new AnalyzerException(insn, null, + BasicValue.INT_VALUE, values.get(i)); } } } else { @@ -407,22 +389,18 @@ public class BasicVerifier extends BasicInterpreter { Type owner = Type.getObjectType(((MethodInsnNode) insn).owner); if (!isSubTypeOf(values.get(i++), newValue(owner))) { throw new AnalyzerException(insn, "Method owner", - newValue(owner), - values.get(0)); + newValue(owner), values.get(0)); } } - String desc = (opcode == INVOKEDYNAMIC)? - ((InvokeDynamicInsnNode) insn).desc: - ((MethodInsnNode) insn).desc; + String desc = (opcode == INVOKEDYNAMIC) ? ((InvokeDynamicInsnNode) insn).desc + : ((MethodInsnNode) insn).desc; Type[] args = Type.getArgumentTypes(desc); while (i < values.size()) { BasicValue expected = newValue(args[j++]); BasicValue encountered = values.get(i++); if (!isSubTypeOf(encountered, expected)) { - throw new AnalyzerException(insn, - "Argument " + j, - expected, - encountered); + throw new AnalyzerException(insn, "Argument " + j, + expected, encountered); } } } @@ -430,16 +408,12 @@ public class BasicVerifier extends BasicInterpreter { } @Override - public void returnOperation( - final AbstractInsnNode insn, - final BasicValue value, - final BasicValue expected) throws AnalyzerException - { + public void returnOperation(final AbstractInsnNode insn, + final BasicValue value, final BasicValue expected) + throws AnalyzerException { if (!isSubTypeOf(value, expected)) { - throw new AnalyzerException(insn, - "Incompatible return type", - expected, - value); + throw new AnalyzerException(insn, "Incompatible return type", + expected, value); } } @@ -448,12 +422,12 @@ public class BasicVerifier extends BasicInterpreter { } protected BasicValue getElementValue(final BasicValue objectArrayValue) - throws AnalyzerException - { + throws AnalyzerException { return BasicValue.REFERENCE_VALUE; } - protected boolean isSubTypeOf(final BasicValue value, final BasicValue expected) { + protected boolean isSubTypeOf(final BasicValue value, + final BasicValue expected) { return value.equals(expected); } } diff --git a/src/asm/scala/tools/asm/tree/analysis/Frame.java b/src/asm/scala/tools/asm/tree/analysis/Frame.java index fe19c2c9ae..0d92edc4d6 100644 --- a/src/asm/scala/tools/asm/tree/analysis/Frame.java +++ b/src/asm/scala/tools/asm/tree/analysis/Frame.java @@ -44,10 +44,11 @@ import scala.tools.asm.tree.VarInsnNode; /** * A symbolic execution stack frame. A stack frame contains a set of local * variable slots, and an operand stack. Warning: long and double values are - * represented by two slots in local variables, and by one slot - * in the operand stack. + * represented by two slots in local variables, and by one slot in + * the operand stack. * - * @param type of the Value used for the analysis. + * @param + * type of the Value used for the analysis. * * @author Eric Bruneton */ @@ -77,8 +78,10 @@ public class Frame { /** * Constructs a new frame with the given size. * - * @param nLocals the maximum number of local variables of the frame. - * @param nStack the maximum stack size of the frame. + * @param nLocals + * the maximum number of local variables of the frame. + * @param nStack + * the maximum stack size of the frame. */ public Frame(final int nLocals, final int nStack) { this.values = (V[]) new Value[nLocals + nStack]; @@ -88,7 +91,8 @@ public class Frame { /** * Constructs a new frame that is identical to the given frame. * - * @param src a frame. + * @param src + * a frame. */ public Frame(final Frame src) { this(src.locals, src.values.length - src.locals); @@ -98,7 +102,8 @@ public class Frame { /** * Copies the state of the given frame into this frame. * - * @param src a frame. + * @param src + * a frame. * @return this frame. */ public Frame init(final Frame src) { @@ -111,8 +116,9 @@ public class Frame { /** * Sets the expected return type of the analyzed method. * - * @param v the expected return type of the analyzed method, or - * null if the method returns void. + * @param v + * the expected return type of the analyzed method, or + * null if the method returns void. */ public void setReturn(final V v) { returnValue = v; @@ -130,13 +136,16 @@ public class Frame { /** * Returns the value of the given local variable. * - * @param i a local variable index. + * @param i + * a local variable index. * @return the value of the given local variable. - * @throws IndexOutOfBoundsException if the variable does not exist. + * @throws IndexOutOfBoundsException + * if the variable does not exist. */ public V getLocal(final int i) throws IndexOutOfBoundsException { if (i >= locals) { - throw new IndexOutOfBoundsException("Trying to access an inexistant local variable"); + throw new IndexOutOfBoundsException( + "Trying to access an inexistant local variable"); } return values[i]; } @@ -144,15 +153,18 @@ public class Frame { /** * Sets the value of the given local variable. * - * @param i a local variable index. - * @param value the new value of this local variable. - * @throws IndexOutOfBoundsException if the variable does not exist. + * @param i + * a local variable index. + * @param value + * the new value of this local variable. + * @throws IndexOutOfBoundsException + * if the variable does not exist. */ public void setLocal(final int i, final V value) - throws IndexOutOfBoundsException - { + throws IndexOutOfBoundsException { if (i >= locals) { - throw new IndexOutOfBoundsException("Trying to access an inexistant local variable "+i); + throw new IndexOutOfBoundsException( + "Trying to access an inexistant local variable " + i); } values[i] = value; } @@ -170,10 +182,11 @@ public class Frame { /** * Returns the value of the given operand stack slot. * - * @param i the index of an operand stack slot. + * @param i + * the index of an operand stack slot. * @return the value of the given operand stack slot. - * @throws IndexOutOfBoundsException if the operand stack slot does not - * exist. + * @throws IndexOutOfBoundsException + * if the operand stack slot does not exist. */ public V getStack(final int i) throws IndexOutOfBoundsException { return values[i + locals]; @@ -190,11 +203,13 @@ public class Frame { * Pops a value from the operand stack of this frame. * * @return the value that has been popped from the stack. - * @throws IndexOutOfBoundsException if the operand stack is empty. + * @throws IndexOutOfBoundsException + * if the operand stack is empty. */ public V pop() throws IndexOutOfBoundsException { if (top == 0) { - throw new IndexOutOfBoundsException("Cannot pop operand off an empty stack."); + throw new IndexOutOfBoundsException( + "Cannot pop operand off an empty stack."); } return values[--top + locals]; } @@ -202,466 +217,469 @@ public class Frame { /** * Pushes a value into the operand stack of this frame. * - * @param value the value that must be pushed into the stack. - * @throws IndexOutOfBoundsException if the operand stack is full. + * @param value + * the value that must be pushed into the stack. + * @throws IndexOutOfBoundsException + * if the operand stack is full. */ public void push(final V value) throws IndexOutOfBoundsException { if (top + locals >= values.length) { - throw new IndexOutOfBoundsException("Insufficient maximum stack size."); + throw new IndexOutOfBoundsException( + "Insufficient maximum stack size."); } values[top++ + locals] = value; } - public void execute( - final AbstractInsnNode insn, - final Interpreter interpreter) throws AnalyzerException - { + public void execute(final AbstractInsnNode insn, + final Interpreter interpreter) throws AnalyzerException { V value1, value2, value3, value4; List values; int var; switch (insn.getOpcode()) { - case Opcodes.NOP: - break; - case Opcodes.ACONST_NULL: - case Opcodes.ICONST_M1: - case Opcodes.ICONST_0: - case Opcodes.ICONST_1: - case Opcodes.ICONST_2: - case Opcodes.ICONST_3: - case Opcodes.ICONST_4: - case Opcodes.ICONST_5: - case Opcodes.LCONST_0: - case Opcodes.LCONST_1: - case Opcodes.FCONST_0: - case Opcodes.FCONST_1: - case Opcodes.FCONST_2: - case Opcodes.DCONST_0: - case Opcodes.DCONST_1: - case Opcodes.BIPUSH: - case Opcodes.SIPUSH: - case Opcodes.LDC: - push(interpreter.newOperation(insn)); - break; - case Opcodes.ILOAD: - case Opcodes.LLOAD: - case Opcodes.FLOAD: - case Opcodes.DLOAD: - case Opcodes.ALOAD: - push(interpreter.copyOperation(insn, - getLocal(((VarInsnNode) insn).var))); - break; - case Opcodes.IALOAD: - case Opcodes.LALOAD: - case Opcodes.FALOAD: - case Opcodes.DALOAD: - case Opcodes.AALOAD: - case Opcodes.BALOAD: - case Opcodes.CALOAD: - case Opcodes.SALOAD: - value2 = pop(); - value1 = pop(); - push(interpreter.binaryOperation(insn, value1, value2)); - break; - case Opcodes.ISTORE: - case Opcodes.LSTORE: - case Opcodes.FSTORE: - case Opcodes.DSTORE: - case Opcodes.ASTORE: - value1 = interpreter.copyOperation(insn, pop()); - var = ((VarInsnNode) insn).var; - setLocal(var, value1); - if (value1.getSize() == 2) { - setLocal(var + 1, interpreter.newValue(null)); + case Opcodes.NOP: + break; + case Opcodes.ACONST_NULL: + case Opcodes.ICONST_M1: + case Opcodes.ICONST_0: + case Opcodes.ICONST_1: + case Opcodes.ICONST_2: + case Opcodes.ICONST_3: + case Opcodes.ICONST_4: + case Opcodes.ICONST_5: + case Opcodes.LCONST_0: + case Opcodes.LCONST_1: + case Opcodes.FCONST_0: + case Opcodes.FCONST_1: + case Opcodes.FCONST_2: + case Opcodes.DCONST_0: + case Opcodes.DCONST_1: + case Opcodes.BIPUSH: + case Opcodes.SIPUSH: + case Opcodes.LDC: + push(interpreter.newOperation(insn)); + break; + case Opcodes.ILOAD: + case Opcodes.LLOAD: + case Opcodes.FLOAD: + case Opcodes.DLOAD: + case Opcodes.ALOAD: + push(interpreter.copyOperation(insn, + getLocal(((VarInsnNode) insn).var))); + break; + case Opcodes.IALOAD: + case Opcodes.LALOAD: + case Opcodes.FALOAD: + case Opcodes.DALOAD: + case Opcodes.AALOAD: + case Opcodes.BALOAD: + case Opcodes.CALOAD: + case Opcodes.SALOAD: + value2 = pop(); + value1 = pop(); + push(interpreter.binaryOperation(insn, value1, value2)); + break; + case Opcodes.ISTORE: + case Opcodes.LSTORE: + case Opcodes.FSTORE: + case Opcodes.DSTORE: + case Opcodes.ASTORE: + value1 = interpreter.copyOperation(insn, pop()); + var = ((VarInsnNode) insn).var; + setLocal(var, value1); + if (value1.getSize() == 2) { + setLocal(var + 1, interpreter.newValue(null)); + } + if (var > 0) { + Value local = getLocal(var - 1); + if (local != null && local.getSize() == 2) { + setLocal(var - 1, interpreter.newValue(null)); } - if (var > 0) { - Value local = getLocal(var - 1); - if (local != null && local.getSize() == 2) { - setLocal(var - 1, interpreter.newValue(null)); - } + } + break; + case Opcodes.IASTORE: + case Opcodes.LASTORE: + case Opcodes.FASTORE: + case Opcodes.DASTORE: + case Opcodes.AASTORE: + case Opcodes.BASTORE: + case Opcodes.CASTORE: + case Opcodes.SASTORE: + value3 = pop(); + value2 = pop(); + value1 = pop(); + interpreter.ternaryOperation(insn, value1, value2, value3); + break; + case Opcodes.POP: + if (pop().getSize() == 2) { + throw new AnalyzerException(insn, "Illegal use of POP"); + } + break; + case Opcodes.POP2: + if (pop().getSize() == 1) { + if (pop().getSize() != 1) { + throw new AnalyzerException(insn, "Illegal use of POP2"); } - break; - case Opcodes.IASTORE: - case Opcodes.LASTORE: - case Opcodes.FASTORE: - case Opcodes.DASTORE: - case Opcodes.AASTORE: - case Opcodes.BASTORE: - case Opcodes.CASTORE: - case Opcodes.SASTORE: - value3 = pop(); + } + break; + case Opcodes.DUP: + value1 = pop(); + if (value1.getSize() != 1) { + throw new AnalyzerException(insn, "Illegal use of DUP"); + } + push(value1); + push(interpreter.copyOperation(insn, value1)); + break; + case Opcodes.DUP_X1: + value1 = pop(); + value2 = pop(); + if (value1.getSize() != 1 || value2.getSize() != 1) { + throw new AnalyzerException(insn, "Illegal use of DUP_X1"); + } + push(interpreter.copyOperation(insn, value1)); + push(value2); + push(value1); + break; + case Opcodes.DUP_X2: + value1 = pop(); + if (value1.getSize() == 1) { value2 = pop(); - value1 = pop(); - interpreter.ternaryOperation(insn, value1, value2, value3); - break; - case Opcodes.POP: - if (pop().getSize() == 2) { - throw new AnalyzerException(insn, "Illegal use of POP"); - } - break; - case Opcodes.POP2: - if (pop().getSize() == 1) { - if (pop().getSize() != 1) { - throw new AnalyzerException(insn, "Illegal use of POP2"); + if (value2.getSize() == 1) { + value3 = pop(); + if (value3.getSize() == 1) { + push(interpreter.copyOperation(insn, value1)); + push(value3); + push(value2); + push(value1); + break; } + } else { + push(interpreter.copyOperation(insn, value1)); + push(value2); + push(value1); + break; } - break; - case Opcodes.DUP: - value1 = pop(); - if (value1.getSize() != 1) { - throw new AnalyzerException(insn, "Illegal use of DUP"); + } + throw new AnalyzerException(insn, "Illegal use of DUP_X2"); + case Opcodes.DUP2: + value1 = pop(); + if (value1.getSize() == 1) { + value2 = pop(); + if (value2.getSize() == 1) { + push(value2); + push(value1); + push(interpreter.copyOperation(insn, value2)); + push(interpreter.copyOperation(insn, value1)); + break; } + } else { push(value1); push(interpreter.copyOperation(insn, value1)); break; - case Opcodes.DUP_X1: - value1 = pop(); + } + throw new AnalyzerException(insn, "Illegal use of DUP2"); + case Opcodes.DUP2_X1: + value1 = pop(); + if (value1.getSize() == 1) { value2 = pop(); - if (value1.getSize() != 1 || value2.getSize() != 1) { - throw new AnalyzerException(insn, "Illegal use of DUP_X1"); - } - push(interpreter.copyOperation(insn, value1)); - push(value2); - push(value1); - break; - case Opcodes.DUP_X2: - value1 = pop(); - if (value1.getSize() == 1) { - value2 = pop(); - if (value2.getSize() == 1) { - value3 = pop(); - if (value3.getSize() == 1) { - push(interpreter.copyOperation(insn, value1)); - push(value3); - push(value2); - push(value1); - break; - } - } else { + if (value2.getSize() == 1) { + value3 = pop(); + if (value3.getSize() == 1) { + push(interpreter.copyOperation(insn, value2)); push(interpreter.copyOperation(insn, value1)); + push(value3); push(value2); push(value1); break; } } - throw new AnalyzerException(insn, "Illegal use of DUP_X2"); - case Opcodes.DUP2: - value1 = pop(); - if (value1.getSize() == 1) { - value2 = pop(); - if (value2.getSize() == 1) { - push(value2); - push(value1); - push(interpreter.copyOperation(insn, value2)); - push(interpreter.copyOperation(insn, value1)); - break; - } - } else { - push(value1); + } else { + value2 = pop(); + if (value2.getSize() == 1) { push(interpreter.copyOperation(insn, value1)); + push(value2); + push(value1); break; } - throw new AnalyzerException(insn, "Illegal use of DUP2"); - case Opcodes.DUP2_X1: - value1 = pop(); - if (value1.getSize() == 1) { - value2 = pop(); - if (value2.getSize() == 1) { - value3 = pop(); - if (value3.getSize() == 1) { + } + throw new AnalyzerException(insn, "Illegal use of DUP2_X1"); + case Opcodes.DUP2_X2: + value1 = pop(); + if (value1.getSize() == 1) { + value2 = pop(); + if (value2.getSize() == 1) { + value3 = pop(); + if (value3.getSize() == 1) { + value4 = pop(); + if (value4.getSize() == 1) { push(interpreter.copyOperation(insn, value2)); push(interpreter.copyOperation(insn, value1)); + push(value4); push(value3); push(value2); push(value1); break; } - } - } else { - value2 = pop(); - if (value2.getSize() == 1) { + } else { + push(interpreter.copyOperation(insn, value2)); push(interpreter.copyOperation(insn, value1)); + push(value3); push(value2); push(value1); break; } } - throw new AnalyzerException(insn, "Illegal use of DUP2_X1"); - case Opcodes.DUP2_X2: - value1 = pop(); - if (value1.getSize() == 1) { - value2 = pop(); - if (value2.getSize() == 1) { - value3 = pop(); - if (value3.getSize() == 1) { - value4 = pop(); - if (value4.getSize() == 1) { - push(interpreter.copyOperation(insn, value2)); - push(interpreter.copyOperation(insn, value1)); - push(value4); - push(value3); - push(value2); - push(value1); - break; - } - } else { - push(interpreter.copyOperation(insn, value2)); - push(interpreter.copyOperation(insn, value1)); - push(value3); - push(value2); - push(value1); - break; - } - } - } else { - value2 = pop(); - if (value2.getSize() == 1) { - value3 = pop(); - if (value3.getSize() == 1) { - push(interpreter.copyOperation(insn, value1)); - push(value3); - push(value2); - push(value1); - break; - } - } else { + } else { + value2 = pop(); + if (value2.getSize() == 1) { + value3 = pop(); + if (value3.getSize() == 1) { push(interpreter.copyOperation(insn, value1)); + push(value3); push(value2); push(value1); break; } - } - throw new AnalyzerException(insn, "Illegal use of DUP2_X2"); - case Opcodes.SWAP: - value2 = pop(); - value1 = pop(); - if (value1.getSize() != 1 || value2.getSize() != 1) { - throw new AnalyzerException(insn, "Illegal use of SWAP"); - } - push(interpreter.copyOperation(insn, value2)); - push(interpreter.copyOperation(insn, value1)); - break; - case Opcodes.IADD: - case Opcodes.LADD: - case Opcodes.FADD: - case Opcodes.DADD: - case Opcodes.ISUB: - case Opcodes.LSUB: - case Opcodes.FSUB: - case Opcodes.DSUB: - case Opcodes.IMUL: - case Opcodes.LMUL: - case Opcodes.FMUL: - case Opcodes.DMUL: - case Opcodes.IDIV: - case Opcodes.LDIV: - case Opcodes.FDIV: - case Opcodes.DDIV: - case Opcodes.IREM: - case Opcodes.LREM: - case Opcodes.FREM: - case Opcodes.DREM: - value2 = pop(); - value1 = pop(); - push(interpreter.binaryOperation(insn, value1, value2)); - break; - case Opcodes.INEG: - case Opcodes.LNEG: - case Opcodes.FNEG: - case Opcodes.DNEG: - push(interpreter.unaryOperation(insn, pop())); - break; - case Opcodes.ISHL: - case Opcodes.LSHL: - case Opcodes.ISHR: - case Opcodes.LSHR: - case Opcodes.IUSHR: - case Opcodes.LUSHR: - case Opcodes.IAND: - case Opcodes.LAND: - case Opcodes.IOR: - case Opcodes.LOR: - case Opcodes.IXOR: - case Opcodes.LXOR: - value2 = pop(); - value1 = pop(); - push(interpreter.binaryOperation(insn, value1, value2)); - break; - case Opcodes.IINC: - var = ((IincInsnNode) insn).var; - setLocal(var, interpreter.unaryOperation(insn, getLocal(var))); - break; - case Opcodes.I2L: - case Opcodes.I2F: - case Opcodes.I2D: - case Opcodes.L2I: - case Opcodes.L2F: - case Opcodes.L2D: - case Opcodes.F2I: - case Opcodes.F2L: - case Opcodes.F2D: - case Opcodes.D2I: - case Opcodes.D2L: - case Opcodes.D2F: - case Opcodes.I2B: - case Opcodes.I2C: - case Opcodes.I2S: - push(interpreter.unaryOperation(insn, pop())); - break; - case Opcodes.LCMP: - case Opcodes.FCMPL: - case Opcodes.FCMPG: - case Opcodes.DCMPL: - case Opcodes.DCMPG: - value2 = pop(); - value1 = pop(); - push(interpreter.binaryOperation(insn, value1, value2)); - break; - case Opcodes.IFEQ: - case Opcodes.IFNE: - case Opcodes.IFLT: - case Opcodes.IFGE: - case Opcodes.IFGT: - case Opcodes.IFLE: - interpreter.unaryOperation(insn, pop()); - break; - case Opcodes.IF_ICMPEQ: - case Opcodes.IF_ICMPNE: - case Opcodes.IF_ICMPLT: - case Opcodes.IF_ICMPGE: - case Opcodes.IF_ICMPGT: - case Opcodes.IF_ICMPLE: - case Opcodes.IF_ACMPEQ: - case Opcodes.IF_ACMPNE: - value2 = pop(); - value1 = pop(); - interpreter.binaryOperation(insn, value1, value2); - break; - case Opcodes.GOTO: - break; - case Opcodes.JSR: - push(interpreter.newOperation(insn)); - break; - case Opcodes.RET: - break; - case Opcodes.TABLESWITCH: - case Opcodes.LOOKUPSWITCH: - interpreter.unaryOperation(insn, pop()); - break; - case Opcodes.IRETURN: - case Opcodes.LRETURN: - case Opcodes.FRETURN: - case Opcodes.DRETURN: - case Opcodes.ARETURN: - value1 = pop(); - interpreter.unaryOperation(insn, value1); - interpreter.returnOperation(insn, value1, returnValue); - break; - case Opcodes.RETURN: - if (returnValue != null) { - throw new AnalyzerException(insn, "Incompatible return type"); - } - break; - case Opcodes.GETSTATIC: - push(interpreter.newOperation(insn)); - break; - case Opcodes.PUTSTATIC: - interpreter.unaryOperation(insn, pop()); - break; - case Opcodes.GETFIELD: - push(interpreter.unaryOperation(insn, pop())); - break; - case Opcodes.PUTFIELD: - value2 = pop(); - value1 = pop(); - interpreter.binaryOperation(insn, value1, value2); - break; - case Opcodes.INVOKEVIRTUAL: - case Opcodes.INVOKESPECIAL: - case Opcodes.INVOKESTATIC: - case Opcodes.INVOKEINTERFACE: { - values = new ArrayList(); - String desc = ((MethodInsnNode) insn).desc; - for (int i = Type.getArgumentTypes(desc).length; i > 0; --i) { - values.add(0, pop()); - } - if (insn.getOpcode() != Opcodes.INVOKESTATIC) { - values.add(0, pop()); - } - if (Type.getReturnType(desc) == Type.VOID_TYPE) { - interpreter.naryOperation(insn, values); } else { - push(interpreter.naryOperation(insn, values)); + push(interpreter.copyOperation(insn, value1)); + push(value2); + push(value1); + break; } - break; } - case Opcodes.INVOKEDYNAMIC: { - values = new ArrayList(); - String desc = ((InvokeDynamicInsnNode) insn).desc; - for (int i = Type.getArgumentTypes(desc).length; i > 0; --i) { - values.add(0, pop()); - } - if (Type.getReturnType(desc) == Type.VOID_TYPE) { - interpreter.naryOperation(insn, values); - } else { - push(interpreter.naryOperation(insn, values)); - } - break; + throw new AnalyzerException(insn, "Illegal use of DUP2_X2"); + case Opcodes.SWAP: + value2 = pop(); + value1 = pop(); + if (value1.getSize() != 1 || value2.getSize() != 1) { + throw new AnalyzerException(insn, "Illegal use of SWAP"); } - case Opcodes.NEW: - push(interpreter.newOperation(insn)); - break; - case Opcodes.NEWARRAY: - case Opcodes.ANEWARRAY: - case Opcodes.ARRAYLENGTH: - push(interpreter.unaryOperation(insn, pop())); - break; - case Opcodes.ATHROW: - interpreter.unaryOperation(insn, pop()); - break; - case Opcodes.CHECKCAST: - case Opcodes.INSTANCEOF: - push(interpreter.unaryOperation(insn, pop())); - break; - case Opcodes.MONITORENTER: - case Opcodes.MONITOREXIT: - interpreter.unaryOperation(insn, pop()); - break; - case Opcodes.MULTIANEWARRAY: - values = new ArrayList(); - for (int i = ((MultiANewArrayInsnNode) insn).dims; i > 0; --i) { - values.add(0, pop()); - } + push(interpreter.copyOperation(insn, value2)); + push(interpreter.copyOperation(insn, value1)); + break; + case Opcodes.IADD: + case Opcodes.LADD: + case Opcodes.FADD: + case Opcodes.DADD: + case Opcodes.ISUB: + case Opcodes.LSUB: + case Opcodes.FSUB: + case Opcodes.DSUB: + case Opcodes.IMUL: + case Opcodes.LMUL: + case Opcodes.FMUL: + case Opcodes.DMUL: + case Opcodes.IDIV: + case Opcodes.LDIV: + case Opcodes.FDIV: + case Opcodes.DDIV: + case Opcodes.IREM: + case Opcodes.LREM: + case Opcodes.FREM: + case Opcodes.DREM: + value2 = pop(); + value1 = pop(); + push(interpreter.binaryOperation(insn, value1, value2)); + break; + case Opcodes.INEG: + case Opcodes.LNEG: + case Opcodes.FNEG: + case Opcodes.DNEG: + push(interpreter.unaryOperation(insn, pop())); + break; + case Opcodes.ISHL: + case Opcodes.LSHL: + case Opcodes.ISHR: + case Opcodes.LSHR: + case Opcodes.IUSHR: + case Opcodes.LUSHR: + case Opcodes.IAND: + case Opcodes.LAND: + case Opcodes.IOR: + case Opcodes.LOR: + case Opcodes.IXOR: + case Opcodes.LXOR: + value2 = pop(); + value1 = pop(); + push(interpreter.binaryOperation(insn, value1, value2)); + break; + case Opcodes.IINC: + var = ((IincInsnNode) insn).var; + setLocal(var, interpreter.unaryOperation(insn, getLocal(var))); + break; + case Opcodes.I2L: + case Opcodes.I2F: + case Opcodes.I2D: + case Opcodes.L2I: + case Opcodes.L2F: + case Opcodes.L2D: + case Opcodes.F2I: + case Opcodes.F2L: + case Opcodes.F2D: + case Opcodes.D2I: + case Opcodes.D2L: + case Opcodes.D2F: + case Opcodes.I2B: + case Opcodes.I2C: + case Opcodes.I2S: + push(interpreter.unaryOperation(insn, pop())); + break; + case Opcodes.LCMP: + case Opcodes.FCMPL: + case Opcodes.FCMPG: + case Opcodes.DCMPL: + case Opcodes.DCMPG: + value2 = pop(); + value1 = pop(); + push(interpreter.binaryOperation(insn, value1, value2)); + break; + case Opcodes.IFEQ: + case Opcodes.IFNE: + case Opcodes.IFLT: + case Opcodes.IFGE: + case Opcodes.IFGT: + case Opcodes.IFLE: + interpreter.unaryOperation(insn, pop()); + break; + case Opcodes.IF_ICMPEQ: + case Opcodes.IF_ICMPNE: + case Opcodes.IF_ICMPLT: + case Opcodes.IF_ICMPGE: + case Opcodes.IF_ICMPGT: + case Opcodes.IF_ICMPLE: + case Opcodes.IF_ACMPEQ: + case Opcodes.IF_ACMPNE: + value2 = pop(); + value1 = pop(); + interpreter.binaryOperation(insn, value1, value2); + break; + case Opcodes.GOTO: + break; + case Opcodes.JSR: + push(interpreter.newOperation(insn)); + break; + case Opcodes.RET: + break; + case Opcodes.TABLESWITCH: + case Opcodes.LOOKUPSWITCH: + interpreter.unaryOperation(insn, pop()); + break; + case Opcodes.IRETURN: + case Opcodes.LRETURN: + case Opcodes.FRETURN: + case Opcodes.DRETURN: + case Opcodes.ARETURN: + value1 = pop(); + interpreter.unaryOperation(insn, value1); + interpreter.returnOperation(insn, value1, returnValue); + break; + case Opcodes.RETURN: + if (returnValue != null) { + throw new AnalyzerException(insn, "Incompatible return type"); + } + break; + case Opcodes.GETSTATIC: + push(interpreter.newOperation(insn)); + break; + case Opcodes.PUTSTATIC: + interpreter.unaryOperation(insn, pop()); + break; + case Opcodes.GETFIELD: + push(interpreter.unaryOperation(insn, pop())); + break; + case Opcodes.PUTFIELD: + value2 = pop(); + value1 = pop(); + interpreter.binaryOperation(insn, value1, value2); + break; + case Opcodes.INVOKEVIRTUAL: + case Opcodes.INVOKESPECIAL: + case Opcodes.INVOKESTATIC: + case Opcodes.INVOKEINTERFACE: { + values = new ArrayList(); + String desc = ((MethodInsnNode) insn).desc; + for (int i = Type.getArgumentTypes(desc).length; i > 0; --i) { + values.add(0, pop()); + } + if (insn.getOpcode() != Opcodes.INVOKESTATIC) { + values.add(0, pop()); + } + if (Type.getReturnType(desc) == Type.VOID_TYPE) { + interpreter.naryOperation(insn, values); + } else { push(interpreter.naryOperation(insn, values)); - break; - case Opcodes.IFNULL: - case Opcodes.IFNONNULL: - interpreter.unaryOperation(insn, pop()); - break; - default: - throw new RuntimeException("Illegal opcode "+insn.getOpcode()); + } + break; + } + case Opcodes.INVOKEDYNAMIC: { + values = new ArrayList(); + String desc = ((InvokeDynamicInsnNode) insn).desc; + for (int i = Type.getArgumentTypes(desc).length; i > 0; --i) { + values.add(0, pop()); + } + if (Type.getReturnType(desc) == Type.VOID_TYPE) { + interpreter.naryOperation(insn, values); + } else { + push(interpreter.naryOperation(insn, values)); + } + break; + } + case Opcodes.NEW: + push(interpreter.newOperation(insn)); + break; + case Opcodes.NEWARRAY: + case Opcodes.ANEWARRAY: + case Opcodes.ARRAYLENGTH: + push(interpreter.unaryOperation(insn, pop())); + break; + case Opcodes.ATHROW: + interpreter.unaryOperation(insn, pop()); + break; + case Opcodes.CHECKCAST: + case Opcodes.INSTANCEOF: + push(interpreter.unaryOperation(insn, pop())); + break; + case Opcodes.MONITORENTER: + case Opcodes.MONITOREXIT: + interpreter.unaryOperation(insn, pop()); + break; + case Opcodes.MULTIANEWARRAY: + values = new ArrayList(); + for (int i = ((MultiANewArrayInsnNode) insn).dims; i > 0; --i) { + values.add(0, pop()); + } + push(interpreter.naryOperation(insn, values)); + break; + case Opcodes.IFNULL: + case Opcodes.IFNONNULL: + interpreter.unaryOperation(insn, pop()); + break; + default: + throw new RuntimeException("Illegal opcode " + insn.getOpcode()); } } /** * Merges this frame with the given frame. * - * @param frame a frame. - * @param interpreter the interpreter used to merge values. + * @param frame + * a frame. + * @param interpreter + * the interpreter used to merge values. * @return true if this frame has been changed as a result of the * merge operation, or false otherwise. - * @throws AnalyzerException if the frames have incompatible sizes. + * @throws AnalyzerException + * if the frames have incompatible sizes. */ - public boolean merge(final Frame frame, final Interpreter interpreter) - throws AnalyzerException - { + public boolean merge(final Frame frame, + final Interpreter interpreter) throws AnalyzerException { if (top != frame.top) { throw new AnalyzerException(null, "Incompatible stack heights"); } boolean changes = false; for (int i = 0; i < locals + top; ++i) { V v = interpreter.merge(values[i], frame.values[i]); - if (v != values[i]) { + if (!v.equals(values[i])) { values[i] = v; changes = true; } @@ -672,9 +690,11 @@ public class Frame { /** * Merges this frame with the given frame (case of a RET instruction). * - * @param frame a frame - * @param access the local variables that have been accessed by the - * subroutine to which the RET instruction corresponds. + * @param frame + * a frame + * @param access + * the local variables that have been accessed by the subroutine + * to which the RET instruction corresponds. * @return true if this frame has been changed as a result of the * merge operation, or false otherwise. */ diff --git a/src/asm/scala/tools/asm/tree/analysis/Interpreter.java b/src/asm/scala/tools/asm/tree/analysis/Interpreter.java index 930c8f4af8..56f4bedc00 100644 --- a/src/asm/scala/tools/asm/tree/analysis/Interpreter.java +++ b/src/asm/scala/tools/asm/tree/analysis/Interpreter.java @@ -42,7 +42,8 @@ import scala.tools.asm.tree.AbstractInsnNode; * various semantic interpreters, without needing to duplicate the code to * simulate the transfer of values. * - * @param type of the Value used for the analysis. + * @param + * type of the Value used for the analysis. * * @author Eric Bruneton */ @@ -57,12 +58,13 @@ public abstract class Interpreter { /** * Creates a new value that represents the given type. * - * Called for method parameters (including this), - * exception handler variable and with null type - * for variables reserved by long and double types. + * Called for method parameters (including this), exception + * handler variable and with null type for variables reserved + * by long and double types. * - * @param type a primitive or reference type, or null to - * represent an uninitialized value. + * @param type + * a primitive or reference type, or null to represent + * an uninitialized value. * @return a value that represents the given type. The size of the returned * value must be equal to the size of the given type. */ @@ -76,9 +78,11 @@ public abstract class Interpreter { * ICONST_5, LCONST_0, LCONST_1, FCONST_0, FCONST_1, FCONST_2, DCONST_0, * DCONST_1, BIPUSH, SIPUSH, LDC, JSR, GETSTATIC, NEW * - * @param insn the bytecode instruction to be interpreted. + * @param insn + * the bytecode instruction to be interpreted. * @return the result of the interpretation of the given instruction. - * @throws AnalyzerException if an error occured during the interpretation. + * @throws AnalyzerException + * if an error occured during the interpretation. */ public abstract V newOperation(AbstractInsnNode insn) throws AnalyzerException; @@ -90,11 +94,14 @@ public abstract class Interpreter { * ILOAD, LLOAD, FLOAD, DLOAD, ALOAD, ISTORE, LSTORE, FSTORE, DSTORE, * ASTORE, DUP, DUP_X1, DUP_X2, DUP2, DUP2_X1, DUP2_X2, SWAP * - * @param insn the bytecode instruction to be interpreted. - * @param value the value that must be moved by the instruction. + * @param insn + * the bytecode instruction to be interpreted. + * @param value + * the value that must be moved by the instruction. * @return the result of the interpretation of the given instruction. The * returned value must be equal to the given value. - * @throws AnalyzerException if an error occured during the interpretation. + * @throws AnalyzerException + * if an error occured during the interpretation. */ public abstract V copyOperation(AbstractInsnNode insn, V value) throws AnalyzerException; @@ -109,10 +116,13 @@ public abstract class Interpreter { * PUTSTATIC, GETFIELD, NEWARRAY, ANEWARRAY, ARRAYLENGTH, ATHROW, CHECKCAST, * INSTANCEOF, MONITORENTER, MONITOREXIT, IFNULL, IFNONNULL * - * @param insn the bytecode instruction to be interpreted. - * @param value the argument of the instruction to be interpreted. + * @param insn + * the bytecode instruction to be interpreted. + * @param value + * the argument of the instruction to be interpreted. * @return the result of the interpretation of the given instruction. - * @throws AnalyzerException if an error occured during the interpretation. + * @throws AnalyzerException + * if an error occured during the interpretation. */ public abstract V unaryOperation(AbstractInsnNode insn, V value) throws AnalyzerException; @@ -128,11 +138,15 @@ public abstract class Interpreter { * DCMPG, IF_ICMPEQ, IF_ICMPNE, IF_ICMPLT, IF_ICMPGE, IF_ICMPGT, IF_ICMPLE, * IF_ACMPEQ, IF_ACMPNE, PUTFIELD * - * @param insn the bytecode instruction to be interpreted. - * @param value1 the first argument of the instruction to be interpreted. - * @param value2 the second argument of the instruction to be interpreted. + * @param insn + * the bytecode instruction to be interpreted. + * @param value1 + * the first argument of the instruction to be interpreted. + * @param value2 + * the second argument of the instruction to be interpreted. * @return the result of the interpretation of the given instruction. - * @throws AnalyzerException if an error occured during the interpretation. + * @throws AnalyzerException + * if an error occured during the interpretation. */ public abstract V binaryOperation(AbstractInsnNode insn, V value1, V value2) throws AnalyzerException; @@ -143,18 +157,20 @@ public abstract class Interpreter { * * IASTORE, LASTORE, FASTORE, DASTORE, AASTORE, BASTORE, CASTORE, SASTORE * - * @param insn the bytecode instruction to be interpreted. - * @param value1 the first argument of the instruction to be interpreted. - * @param value2 the second argument of the instruction to be interpreted. - * @param value3 the third argument of the instruction to be interpreted. + * @param insn + * the bytecode instruction to be interpreted. + * @param value1 + * the first argument of the instruction to be interpreted. + * @param value2 + * the second argument of the instruction to be interpreted. + * @param value3 + * the third argument of the instruction to be interpreted. * @return the result of the interpretation of the given instruction. - * @throws AnalyzerException if an error occured during the interpretation. + * @throws AnalyzerException + * if an error occured during the interpretation. */ - public abstract V ternaryOperation( - AbstractInsnNode insn, - V value1, - V value2, - V value3) throws AnalyzerException; + public abstract V ternaryOperation(AbstractInsnNode insn, V value1, + V value2, V value3) throws AnalyzerException; /** * Interprets a bytecode instruction with a variable number of arguments. @@ -163,14 +179,16 @@ public abstract class Interpreter { * INVOKEVIRTUAL, INVOKESPECIAL, INVOKESTATIC, INVOKEINTERFACE, * MULTIANEWARRAY and INVOKEDYNAMIC * - * @param insn the bytecode instruction to be interpreted. - * @param values the arguments of the instruction to be interpreted. + * @param insn + * the bytecode instruction to be interpreted. + * @param values + * the arguments of the instruction to be interpreted. * @return the result of the interpretation of the given instruction. - * @throws AnalyzerException if an error occured during the interpretation. + * @throws AnalyzerException + * if an error occured during the interpretation. */ - public abstract V naryOperation( - AbstractInsnNode insn, - List< ? extends V> values) throws AnalyzerException; + public abstract V naryOperation(AbstractInsnNode insn, + List values) throws AnalyzerException; /** * Interprets a bytecode return instruction. This method is called for the @@ -178,15 +196,17 @@ public abstract class Interpreter { * * IRETURN, LRETURN, FRETURN, DRETURN, ARETURN * - * @param insn the bytecode instruction to be interpreted. - * @param value the argument of the instruction to be interpreted. - * @param expected the expected return type of the analyzed method. - * @throws AnalyzerException if an error occured during the interpretation. + * @param insn + * the bytecode instruction to be interpreted. + * @param value + * the argument of the instruction to be interpreted. + * @param expected + * the expected return type of the analyzed method. + * @throws AnalyzerException + * if an error occured during the interpretation. */ - public abstract void returnOperation( - AbstractInsnNode insn, - V value, - V expected) throws AnalyzerException; + public abstract void returnOperation(AbstractInsnNode insn, V value, + V expected) throws AnalyzerException; /** * Merges two values. The merge operation must return a value that @@ -195,8 +215,10 @@ public abstract class Interpreter { * values are integer intervals, the merged value must be an interval that * contains the previous ones. Likewise for other types of values). * - * @param v a value. - * @param w another value. + * @param v + * a value. + * @param w + * another value. * @return the merged value. If the merged value is equal to v, * this method must return v. */ diff --git a/src/asm/scala/tools/asm/tree/analysis/SimpleVerifier.java b/src/asm/scala/tools/asm/tree/analysis/SimpleVerifier.java index c4f515d328..eaecd057ea 100644 --- a/src/asm/scala/tools/asm/tree/analysis/SimpleVerifier.java +++ b/src/asm/scala/tools/asm/tree/analysis/SimpleVerifier.java @@ -79,15 +79,15 @@ public class SimpleVerifier extends BasicVerifier { * Constructs a new {@link SimpleVerifier} to verify a specific class. This * class will not be loaded into the JVM since it may be incorrect. * - * @param currentClass the class that is verified. - * @param currentSuperClass the super class of the class that is verified. - * @param isInterface if the class that is verified is an interface. + * @param currentClass + * the class that is verified. + * @param currentSuperClass + * the super class of the class that is verified. + * @param isInterface + * if the class that is verified is an interface. */ - public SimpleVerifier( - final Type currentClass, - final Type currentSuperClass, - final boolean isInterface) - { + public SimpleVerifier(final Type currentClass, + final Type currentSuperClass, final boolean isInterface) { this(currentClass, currentSuperClass, null, isInterface); } @@ -95,32 +95,25 @@ public class SimpleVerifier extends BasicVerifier { * Constructs a new {@link SimpleVerifier} to verify a specific class. This * class will not be loaded into the JVM since it may be incorrect. * - * @param currentClass the class that is verified. - * @param currentSuperClass the super class of the class that is verified. - * @param currentClassInterfaces the interfaces implemented by the class - * that is verified. - * @param isInterface if the class that is verified is an interface. + * @param currentClass + * the class that is verified. + * @param currentSuperClass + * the super class of the class that is verified. + * @param currentClassInterfaces + * the interfaces implemented by the class that is verified. + * @param isInterface + * if the class that is verified is an interface. */ - public SimpleVerifier( - final Type currentClass, - final Type currentSuperClass, - final List currentClassInterfaces, - final boolean isInterface) - { - this(ASM4, - currentClass, - currentSuperClass, - currentClassInterfaces, + public SimpleVerifier(final Type currentClass, + final Type currentSuperClass, + final List currentClassInterfaces, final boolean isInterface) { + this(ASM4, currentClass, currentSuperClass, currentClassInterfaces, isInterface); } - protected SimpleVerifier( - final int api, - final Type currentClass, - final Type currentSuperClass, - final List currentClassInterfaces, - final boolean isInterface) - { + protected SimpleVerifier(final int api, final Type currentClass, + final Type currentSuperClass, + final List currentClassInterfaces, final boolean isInterface) { super(api); this.currentClass = currentClass; this.currentSuperClass = currentSuperClass; @@ -133,7 +126,8 @@ public class SimpleVerifier extends BasicVerifier { * classes. This is useful if you are verifying multiple interdependent * classes. * - * @param loader a ClassLoader to use + * @param loader + * a ClassLoader to use */ public void setClassLoader(final ClassLoader loader) { this.loader = loader; @@ -148,11 +142,11 @@ public class SimpleVerifier extends BasicVerifier { boolean isArray = type.getSort() == Type.ARRAY; if (isArray) { switch (type.getElementType().getSort()) { - case Type.BOOLEAN: - case Type.CHAR: - case Type.BYTE: - case Type.SHORT: - return new BasicValue(type); + case Type.BOOLEAN: + case Type.CHAR: + case Type.BYTE: + case Type.SHORT: + return new BasicValue(type); } } @@ -181,8 +175,7 @@ public class SimpleVerifier extends BasicVerifier { @Override protected BasicValue getElementValue(final BasicValue objectArrayValue) - throws AnalyzerException - { + throws AnalyzerException { Type arrayType = objectArrayValue.getType(); if (arrayType != null) { if (arrayType.getSort() == Type.ARRAY) { @@ -196,28 +189,28 @@ public class SimpleVerifier extends BasicVerifier { } @Override - protected boolean isSubTypeOf(final BasicValue value, final BasicValue expected) { + protected boolean isSubTypeOf(final BasicValue value, + final BasicValue expected) { Type expectedType = expected.getType(); Type type = value.getType(); switch (expectedType.getSort()) { - case Type.INT: - case Type.FLOAT: - case Type.LONG: - case Type.DOUBLE: - return type.equals(expectedType); - case Type.ARRAY: - case Type.OBJECT: - if ("Lnull;".equals(type.getDescriptor())) { - return true; - } else if (type.getSort() == Type.OBJECT - || type.getSort() == Type.ARRAY) - { - return isAssignableFrom(expectedType, type); - } else { - return false; - } - default: - throw new Error("Internal error"); + case Type.INT: + case Type.FLOAT: + case Type.LONG: + case Type.DOUBLE: + return type.equals(expectedType); + case Type.ARRAY: + case Type.OBJECT: + if ("Lnull;".equals(type.getDescriptor())) { + return true; + } else if (type.getSort() == Type.OBJECT + || type.getSort() == Type.ARRAY) { + return isAssignableFrom(expectedType, type); + } else { + return false; + } + default: + throw new Error("Internal error"); } } @@ -227,11 +220,9 @@ public class SimpleVerifier extends BasicVerifier { Type t = v.getType(); Type u = w.getType(); if (t != null - && (t.getSort() == Type.OBJECT || t.getSort() == Type.ARRAY)) - { + && (t.getSort() == Type.OBJECT || t.getSort() == Type.ARRAY)) { if (u != null - && (u.getSort() == Type.OBJECT || u.getSort() == Type.ARRAY)) - { + && (u.getSort() == Type.OBJECT || u.getSort() == Type.ARRAY)) { if ("Lnull;".equals(t.getDescriptor())) { return w; } @@ -288,7 +279,8 @@ public class SimpleVerifier extends BasicVerifier { return false; } else { if (isInterface) { - return u.getSort() == Type.OBJECT || u.getSort() == Type.ARRAY; + return u.getSort() == Type.OBJECT + || u.getSort() == Type.ARRAY; } return isAssignableFrom(t, getSuperClass(u)); } @@ -318,8 +310,7 @@ public class SimpleVerifier extends BasicVerifier { try { if (t.getSort() == Type.ARRAY) { return Class.forName(t.getDescriptor().replace('/', '.'), - false, - loader); + false, loader); } return Class.forName(t.getClassName(), false, loader); } catch (ClassNotFoundException e) { diff --git a/src/asm/scala/tools/asm/tree/analysis/SourceInterpreter.java b/src/asm/scala/tools/asm/tree/analysis/SourceInterpreter.java index 067200b51e..a68086c073 100644 --- a/src/asm/scala/tools/asm/tree/analysis/SourceInterpreter.java +++ b/src/asm/scala/tools/asm/tree/analysis/SourceInterpreter.java @@ -47,8 +47,7 @@ import scala.tools.asm.tree.MethodInsnNode; * @author Eric Bruneton */ public class SourceInterpreter extends Interpreter implements - Opcodes -{ + Opcodes { public SourceInterpreter() { super(ASM4); @@ -70,125 +69,118 @@ public class SourceInterpreter extends Interpreter implements public SourceValue newOperation(final AbstractInsnNode insn) { int size; switch (insn.getOpcode()) { - case LCONST_0: - case LCONST_1: - case DCONST_0: - case DCONST_1: - size = 2; - break; - case LDC: - Object cst = ((LdcInsnNode) insn).cst; - size = cst instanceof Long || cst instanceof Double ? 2 : 1; - break; - case GETSTATIC: - size = Type.getType(((FieldInsnNode) insn).desc).getSize(); - break; - default: - size = 1; + case LCONST_0: + case LCONST_1: + case DCONST_0: + case DCONST_1: + size = 2; + break; + case LDC: + Object cst = ((LdcInsnNode) insn).cst; + size = cst instanceof Long || cst instanceof Double ? 2 : 1; + break; + case GETSTATIC: + size = Type.getType(((FieldInsnNode) insn).desc).getSize(); + break; + default: + size = 1; } return new SourceValue(size, insn); } @Override - public SourceValue copyOperation(final AbstractInsnNode insn, final SourceValue value) { + public SourceValue copyOperation(final AbstractInsnNode insn, + final SourceValue value) { return new SourceValue(value.getSize(), insn); } @Override - public SourceValue unaryOperation(final AbstractInsnNode insn, final SourceValue value) - { + public SourceValue unaryOperation(final AbstractInsnNode insn, + final SourceValue value) { int size; switch (insn.getOpcode()) { - case LNEG: - case DNEG: - case I2L: - case I2D: - case L2D: - case F2L: - case F2D: - case D2L: - size = 2; - break; - case GETFIELD: - size = Type.getType(((FieldInsnNode) insn).desc).getSize(); - break; - default: - size = 1; + case LNEG: + case DNEG: + case I2L: + case I2D: + case L2D: + case F2L: + case F2D: + case D2L: + size = 2; + break; + case GETFIELD: + size = Type.getType(((FieldInsnNode) insn).desc).getSize(); + break; + default: + size = 1; } return new SourceValue(size, insn); } @Override - public SourceValue binaryOperation( - final AbstractInsnNode insn, - final SourceValue value1, - final SourceValue value2) - { + public SourceValue binaryOperation(final AbstractInsnNode insn, + final SourceValue value1, final SourceValue value2) { int size; switch (insn.getOpcode()) { - case LALOAD: - case DALOAD: - case LADD: - case DADD: - case LSUB: - case DSUB: - case LMUL: - case DMUL: - case LDIV: - case DDIV: - case LREM: - case DREM: - case LSHL: - case LSHR: - case LUSHR: - case LAND: - case LOR: - case LXOR: - size = 2; - break; - default: - size = 1; + case LALOAD: + case DALOAD: + case LADD: + case DADD: + case LSUB: + case DSUB: + case LMUL: + case DMUL: + case LDIV: + case DDIV: + case LREM: + case DREM: + case LSHL: + case LSHR: + case LUSHR: + case LAND: + case LOR: + case LXOR: + size = 2; + break; + default: + size = 1; } return new SourceValue(size, insn); } @Override - public SourceValue ternaryOperation( - final AbstractInsnNode insn, - final SourceValue value1, - final SourceValue value2, - final SourceValue value3) - { + public SourceValue ternaryOperation(final AbstractInsnNode insn, + final SourceValue value1, final SourceValue value2, + final SourceValue value3) { return new SourceValue(1, insn); } @Override - public SourceValue naryOperation(final AbstractInsnNode insn, final List values) { + public SourceValue naryOperation(final AbstractInsnNode insn, + final List values) { int size; int opcode = insn.getOpcode(); if (opcode == MULTIANEWARRAY) { size = 1; } else { - String desc = (opcode == INVOKEDYNAMIC)? - ((InvokeDynamicInsnNode) insn).desc: - ((MethodInsnNode) insn).desc; + String desc = (opcode == INVOKEDYNAMIC) ? ((InvokeDynamicInsnNode) insn).desc + : ((MethodInsnNode) insn).desc; size = Type.getReturnType(desc).getSize(); } return new SourceValue(size, insn); } @Override - public void returnOperation( - final AbstractInsnNode insn, - final SourceValue value, - final SourceValue expected) - { + public void returnOperation(final AbstractInsnNode insn, + final SourceValue value, final SourceValue expected) { } @Override public SourceValue merge(final SourceValue d, final SourceValue w) { if (d.insns instanceof SmallSet && w.insns instanceof SmallSet) { - Set s = ((SmallSet) d.insns).union((SmallSet) w.insns); + Set s = ((SmallSet) d.insns) + .union((SmallSet) w.insns); if (s == d.insns && d.size == w.size) { return d; } else { diff --git a/src/asm/scala/tools/asm/tree/analysis/SourceValue.java b/src/asm/scala/tools/asm/tree/analysis/SourceValue.java index 57ff212fb2..40d6b68180 100644 --- a/src/asm/scala/tools/asm/tree/analysis/SourceValue.java +++ b/src/asm/scala/tools/asm/tree/analysis/SourceValue.java @@ -48,8 +48,8 @@ public class SourceValue implements Value { /** * The instructions that can produce this value. For example, for the Java - * code below, the instructions that can produce the value of i - * at line 5 are the txo ISTORE instructions at line 1 and 3: + * code below, the instructions that can produce the value of i at + * line 5 are the txo ISTORE instructions at line 1 and 3: * *
          * 1: i = 0;
    @@ -64,7 +64,7 @@ public class SourceValue implements Value {
         public final Set insns;
     
         public SourceValue(final int size) {
    -        this(size, SmallSet.emptySet());
    +        this(size, SmallSet. emptySet());
         }
     
         public SourceValue(final int size, final AbstractInsnNode insn) {
    @@ -84,7 +84,7 @@ public class SourceValue implements Value {
         @Override
         public boolean equals(final Object value) {
             if (!(value instanceof SourceValue)) {
    -        	return false;
    +            return false;
             }
             SourceValue v = (SourceValue) value;
             return size == v.size && insns.equals(v.insns);
    diff --git a/src/asm/scala/tools/asm/tree/analysis/Subroutine.java b/src/asm/scala/tools/asm/tree/analysis/Subroutine.java
    index 038880ddcd..d734bbd499 100644
    --- a/src/asm/scala/tools/asm/tree/analysis/Subroutine.java
    +++ b/src/asm/scala/tools/asm/tree/analysis/Subroutine.java
    @@ -51,11 +51,8 @@ class Subroutine {
         private Subroutine() {
         }
     
    -    Subroutine(
    -        final LabelNode start,
    -        final int maxLocals,
    -        final JumpInsnNode caller)
    -    {
    +    Subroutine(final LabelNode start, final int maxLocals,
    +            final JumpInsnNode caller) {
             this.start = start;
             this.access = new boolean[maxLocals];
             this.callers = new ArrayList();
    @@ -90,4 +87,4 @@ class Subroutine {
             }
             return changes;
         }
    -}
    \ No newline at end of file
    +}
    diff --git a/src/asm/scala/tools/asm/util/ASMifiable.java b/src/asm/scala/tools/asm/util/ASMifiable.java
    index 6a31dd508f..95cc6e3a74 100644
    --- a/src/asm/scala/tools/asm/util/ASMifiable.java
    +++ b/src/asm/scala/tools/asm/util/ASMifiable.java
    @@ -34,7 +34,7 @@ import java.util.Map;
     import scala.tools.asm.Label;
     
     /**
    - * An {@link org.objectweb.asm.Attribute Attribute} that can print the ASM code
    + * An {@link scala.tools.asm.Attribute Attribute} that can print the ASM code
      * to create an equivalent attribute.
      *
      * @author Eugene Kuleshov
    @@ -44,10 +44,13 @@ public interface ASMifiable {
         /**
          * Prints the ASM code to create an attribute equal to this attribute.
          *
    -     * @param buf a buffer used for printing Java code.
    -     * @param varName name of the variable in a printed code used to store
    -     *        attribute instance.
    -     * @param labelNames map of label instances to their names.
    +     * @param buf
    +     *            a buffer used for printing Java code.
    +     * @param varName
    +     *            name of the variable in a printed code used to store attribute
    +     *            instance.
    +     * @param labelNames
    +     *            map of label instances to their names.
          */
         void asmify(StringBuffer buf, String varName, Map labelNames);
     }
    diff --git a/src/asm/scala/tools/asm/util/ASMifier.java b/src/asm/scala/tools/asm/util/ASMifier.java
    index 5967c877d1..7e6b223853 100644
    --- a/src/asm/scala/tools/asm/util/ASMifier.java
    +++ b/src/asm/scala/tools/asm/util/ASMifier.java
    @@ -91,11 +91,14 @@ public class ASMifier extends Printer {
         /**
          * Constructs a new {@link ASMifier}.
          *
    -     * @param api the ASM API version implemented by this class. Must be one of
    -     *        {@link Opcodes#ASM4}.
    -     * @param name the name of the visitor variable in the produced code.
    -     * @param id identifier of the annotation visitor variable in the produced
    -     *        code.
    +     * @param api
    +     *            the ASM API version implemented by this class. Must be one of
    +     *            {@link Opcodes#ASM4}.
    +     * @param name
    +     *            the name of the visitor variable in the produced code.
    +     * @param id
    +     *            identifier of the annotation visitor variable in the produced
    +     *            code.
          */
         protected ASMifier(final int api, final String name, final int id) {
             super(api);
    @@ -105,13 +108,15 @@ public class ASMifier extends Printer {
     
         /**
          * Prints the ASM source code to generate the given class to the standard
    -     * output. 

    Usage: ASMifier [-debug] <binary - * class name or class file name> + * output. + *

    + * Usage: ASMifier [-debug] <binary class name or class file name> * - * @param args the command line arguments. + * @param args + * the command line arguments. * - * @throws Exception if the class cannot be found, or if an IO exception - * occurs. + * @throws Exception + * if the class cannot be found, or if an IO exception occurs. */ public static void main(final String[] args) throws Exception { int i = 0; @@ -129,22 +134,21 @@ public class ASMifier extends Printer { } } if (!ok) { - System.err.println("Prints the ASM code to generate the given class."); + System.err + .println("Prints the ASM code to generate the given class."); System.err.println("Usage: ASMifier [-debug] " + ""); return; } ClassReader cr; if (args[i].endsWith(".class") || args[i].indexOf('\\') > -1 - || args[i].indexOf('/') > -1) - { + || args[i].indexOf('/') > -1) { cr = new ClassReader(new FileInputStream(args[i])); } else { cr = new ClassReader(args[i]); } - cr.accept(new TraceClassVisitor(null, - new ASMifier(), - new PrintWriter(System.out)), flags); + cr.accept(new TraceClassVisitor(null, new ASMifier(), new PrintWriter( + System.out)), flags); } // ------------------------------------------------------------------------ @@ -152,14 +156,9 @@ public class ASMifier extends Printer { // ------------------------------------------------------------------------ @Override - public void visit( - final int version, - final int access, - final String name, - final String signature, - final String superName, - final String[] interfaces) - { + public void visit(final int version, final int access, final String name, + final String signature, final String superName, + final String[] interfaces) { String simpleName; int n = name.lastIndexOf('/'); if (n == -1) { @@ -170,8 +169,8 @@ public class ASMifier extends Printer { simpleName = name.substring(n + 1); } text.add("import java.util.*;\n"); - text.add("import org.objectweb.asm.*;\n"); - text.add("import org.objectweb.asm.attrs.*;\n"); + text.add("import scala.tools.asm.*;\n"); + text.add("import scala.tools.asm.attrs.*;\n"); text.add("public class " + simpleName + "Dump implements Opcodes {\n\n"); text.add("public static byte[] dump () throws Exception {\n\n"); text.add("ClassWriter cw = new ClassWriter(0);\n"); @@ -182,30 +181,30 @@ public class ASMifier extends Printer { buf.setLength(0); buf.append("cw.visit("); switch (version) { - case Opcodes.V1_1: - buf.append("V1_1"); - break; - case Opcodes.V1_2: - buf.append("V1_2"); - break; - case Opcodes.V1_3: - buf.append("V1_3"); - break; - case Opcodes.V1_4: - buf.append("V1_4"); - break; - case Opcodes.V1_5: - buf.append("V1_5"); - break; - case Opcodes.V1_6: - buf.append("V1_6"); - break; - case Opcodes.V1_7: - buf.append("V1_7"); - break; - default: - buf.append(version); - break; + case Opcodes.V1_1: + buf.append("V1_1"); + break; + case Opcodes.V1_2: + buf.append("V1_2"); + break; + case Opcodes.V1_3: + buf.append("V1_3"); + break; + case Opcodes.V1_4: + buf.append("V1_4"); + break; + case Opcodes.V1_5: + buf.append("V1_5"); + break; + case Opcodes.V1_6: + buf.append("V1_6"); + break; + case Opcodes.V1_7: + buf.append("V1_7"); + break; + default: + buf.append(version); + break; } buf.append(", "); appendAccess(access | ACCESS_CLASS); @@ -242,11 +241,8 @@ public class ASMifier extends Printer { } @Override - public void visitOuterClass( - final String owner, - final String name, - final String desc) - { + public void visitOuterClass(final String owner, final String name, + final String desc) { buf.setLength(0); buf.append("cw.visitOuterClass("); appendConstant(owner); @@ -259,10 +255,8 @@ public class ASMifier extends Printer { } @Override - public ASMifier visitClassAnnotation( - final String desc, - final boolean visible) - { + public ASMifier visitClassAnnotation(final String desc, + final boolean visible) { return visitAnnotation(desc, visible); } @@ -272,12 +266,8 @@ public class ASMifier extends Printer { } @Override - public void visitInnerClass( - final String name, - final String outerName, - final String innerName, - final int access) - { + public void visitInnerClass(final String name, final String outerName, + final String innerName, final int access) { buf.setLength(0); buf.append("cw.visitInnerClass("); appendConstant(name); @@ -292,13 +282,8 @@ public class ASMifier extends Printer { } @Override - public ASMifier visitField( - final int access, - final String name, - final String desc, - final String signature, - final Object value) - { + public ASMifier visitField(final int access, final String name, + final String desc, final String signature, final Object value) { buf.setLength(0); buf.append("{\n"); buf.append("fv = cw.visitField("); @@ -320,13 +305,8 @@ public class ASMifier extends Printer { } @Override - public ASMifier visitMethod( - final int access, - final String name, - final String desc, - final String signature, - final String[] exceptions) - { + public ASMifier visitMethod(final int access, final String name, + final String desc, final String signature, final String[] exceptions) { buf.setLength(0); buf.append("{\n"); buf.append("mv = cw.visitMethod("); @@ -380,11 +360,8 @@ public class ASMifier extends Printer { } @Override - public void visitEnum( - final String name, - final String desc, - final String value) - { + public void visitEnum(final String name, final String desc, + final String value) { buf.setLength(0); buf.append("av").append(id).append(".visitEnum("); appendConstant(buf, name); @@ -397,10 +374,7 @@ public class ASMifier extends Printer { } @Override - public ASMifier visitAnnotation( - final String name, - final String desc) - { + public ASMifier visitAnnotation(final String name, final String desc) { buf.setLength(0); buf.append("{\n"); buf.append("AnnotationVisitor av").append(id + 1).append(" = av"); @@ -443,10 +417,8 @@ public class ASMifier extends Printer { // ------------------------------------------------------------------------ @Override - public ASMifier visitFieldAnnotation( - final String desc, - final boolean visible) - { + public ASMifier visitFieldAnnotation(final String desc, + final boolean visible) { return visitAnnotation(desc, visible); } @@ -469,9 +441,7 @@ public class ASMifier extends Printer { @Override public ASMifier visitAnnotationDefault() { buf.setLength(0); - buf.append("{\n") - .append("av0 = ") - .append(name) + buf.append("{\n").append("av0 = ").append(name) .append(".visitAnnotationDefault();\n"); text.add(buf.toString()); ASMifier a = createASMifier("av", 0); @@ -481,23 +451,17 @@ public class ASMifier extends Printer { } @Override - public ASMifier visitMethodAnnotation( - final String desc, - final boolean visible) - { + public ASMifier visitMethodAnnotation(final String desc, + final boolean visible) { return visitAnnotation(desc, visible); } @Override - public ASMifier visitParameterAnnotation( - final int parameter, - final String desc, - final boolean visible) - { + public ASMifier visitParameterAnnotation(final int parameter, + final String desc, final boolean visible) { buf.setLength(0); - buf.append("{\n") - .append("av0 = ").append(name).append(".visitParameterAnnotation(") - .append(parameter) + buf.append("{\n").append("av0 = ").append(name) + .append(".visitParameterAnnotation(").append(parameter) .append(", "); appendConstant(desc); buf.append(", ").append(visible).append(");\n"); @@ -519,52 +483,47 @@ public class ASMifier extends Printer { } @Override - public void visitFrame( - final int type, - final int nLocal, - final Object[] local, - final int nStack, - final Object[] stack) - { + public void visitFrame(final int type, final int nLocal, + final Object[] local, final int nStack, final Object[] stack) { buf.setLength(0); switch (type) { - case Opcodes.F_NEW: - case Opcodes.F_FULL: - declareFrameTypes(nLocal, local); - declareFrameTypes(nStack, stack); - if (type == Opcodes.F_NEW) { - buf.append(name).append(".visitFrame(Opcodes.F_NEW, "); - } else { - buf.append(name).append(".visitFrame(Opcodes.F_FULL, "); - } - buf.append(nLocal).append(", new Object[] {"); - appendFrameTypes(nLocal, local); - buf.append("}, ").append(nStack).append(", new Object[] {"); - appendFrameTypes(nStack, stack); - buf.append('}'); - break; - case Opcodes.F_APPEND: - declareFrameTypes(nLocal, local); - buf.append(name).append(".visitFrame(Opcodes.F_APPEND,") - .append(nLocal) - .append(", new Object[] {"); - appendFrameTypes(nLocal, local); - buf.append("}, 0, null"); - break; - case Opcodes.F_CHOP: - buf.append(name).append(".visitFrame(Opcodes.F_CHOP,") - .append(nLocal) - .append(", null, 0, null"); - break; - case Opcodes.F_SAME: - buf.append(name).append(".visitFrame(Opcodes.F_SAME, 0, null, 0, null"); - break; - case Opcodes.F_SAME1: - declareFrameTypes(1, stack); - buf.append(name).append(".visitFrame(Opcodes.F_SAME1, 0, null, 1, new Object[] {"); - appendFrameTypes(1, stack); - buf.append('}'); - break; + case Opcodes.F_NEW: + case Opcodes.F_FULL: + declareFrameTypes(nLocal, local); + declareFrameTypes(nStack, stack); + if (type == Opcodes.F_NEW) { + buf.append(name).append(".visitFrame(Opcodes.F_NEW, "); + } else { + buf.append(name).append(".visitFrame(Opcodes.F_FULL, "); + } + buf.append(nLocal).append(", new Object[] {"); + appendFrameTypes(nLocal, local); + buf.append("}, ").append(nStack).append(", new Object[] {"); + appendFrameTypes(nStack, stack); + buf.append('}'); + break; + case Opcodes.F_APPEND: + declareFrameTypes(nLocal, local); + buf.append(name).append(".visitFrame(Opcodes.F_APPEND,") + .append(nLocal).append(", new Object[] {"); + appendFrameTypes(nLocal, local); + buf.append("}, 0, null"); + break; + case Opcodes.F_CHOP: + buf.append(name).append(".visitFrame(Opcodes.F_CHOP,") + .append(nLocal).append(", null, 0, null"); + break; + case Opcodes.F_SAME: + buf.append(name).append( + ".visitFrame(Opcodes.F_SAME, 0, null, 0, null"); + break; + case Opcodes.F_SAME1: + declareFrameTypes(1, stack); + buf.append(name).append( + ".visitFrame(Opcodes.F_SAME1, 0, null, 1, new Object[] {"); + appendFrameTypes(1, stack); + buf.append('}'); + break; } buf.append(");\n"); text.add(buf.toString()); @@ -573,7 +532,8 @@ public class ASMifier extends Printer { @Override public void visitInsn(final int opcode) { buf.setLength(0); - buf.append(name).append(".visitInsn(").append(OPCODES[opcode]).append(");\n"); + buf.append(name).append(".visitInsn(").append(OPCODES[opcode]) + .append(");\n"); text.add(buf.toString()); } @@ -584,43 +544,35 @@ public class ASMifier extends Printer { .append(".visitIntInsn(") .append(OPCODES[opcode]) .append(", ") - .append(opcode == Opcodes.NEWARRAY - ? TYPES[operand] - : Integer.toString(operand)) - .append(");\n"); + .append(opcode == Opcodes.NEWARRAY ? TYPES[operand] : Integer + .toString(operand)).append(");\n"); text.add(buf.toString()); } @Override public void visitVarInsn(final int opcode, final int var) { buf.setLength(0); - buf.append(name) - .append(".visitVarInsn(") - .append(OPCODES[opcode]) - .append(", ") - .append(var) - .append(");\n"); + buf.append(name).append(".visitVarInsn(").append(OPCODES[opcode]) + .append(", ").append(var).append(");\n"); text.add(buf.toString()); } @Override public void visitTypeInsn(final int opcode, final String type) { buf.setLength(0); - buf.append(name).append(".visitTypeInsn(").append(OPCODES[opcode]).append(", "); + buf.append(name).append(".visitTypeInsn(").append(OPCODES[opcode]) + .append(", "); appendConstant(type); buf.append(");\n"); text.add(buf.toString()); } @Override - public void visitFieldInsn( - final int opcode, - final String owner, - final String name, - final String desc) - { + public void visitFieldInsn(final int opcode, final String owner, + final String name, final String desc) { buf.setLength(0); - buf.append(this.name).append(".visitFieldInsn(").append(OPCODES[opcode]).append(", "); + buf.append(this.name).append(".visitFieldInsn(") + .append(OPCODES[opcode]).append(", "); appendConstant(owner); buf.append(", "); appendConstant(name); @@ -631,14 +583,11 @@ public class ASMifier extends Printer { } @Override - public void visitMethodInsn( - final int opcode, - final String owner, - final String name, - final String desc) - { + public void visitMethodInsn(final int opcode, final String owner, + final String name, final String desc) { buf.setLength(0); - buf.append(this.name).append(".visitMethodInsn(").append(OPCODES[opcode]).append(", "); + buf.append(this.name).append(".visitMethodInsn(") + .append(OPCODES[opcode]).append(", "); appendConstant(owner); buf.append(", "); appendConstant(name); @@ -649,12 +598,8 @@ public class ASMifier extends Printer { } @Override - public void visitInvokeDynamicInsn( - String name, - String desc, - Handle bsm, - Object... bsmArgs) - { + public void visitInvokeDynamicInsn(String name, String desc, Handle bsm, + Object... bsmArgs) { buf.setLength(0); buf.append(this.name).append(".visitInvokeDynamicInsn("); appendConstant(name); @@ -677,7 +622,8 @@ public class ASMifier extends Printer { public void visitJumpInsn(final int opcode, final Label label) { buf.setLength(0); declareLabel(label); - buf.append(name).append(".visitJumpInsn(").append(OPCODES[opcode]).append(", "); + buf.append(name).append(".visitJumpInsn(").append(OPCODES[opcode]) + .append(", "); appendLabel(label); buf.append(");\n"); text.add(buf.toString()); @@ -705,34 +651,22 @@ public class ASMifier extends Printer { @Override public void visitIincInsn(final int var, final int increment) { buf.setLength(0); - buf.append(name) - .append(".visitIincInsn(") - .append(var) - .append(", ") - .append(increment) - .append(");\n"); + buf.append(name).append(".visitIincInsn(").append(var).append(", ") + .append(increment).append(");\n"); text.add(buf.toString()); } @Override - public void visitTableSwitchInsn( - final int min, - final int max, - final Label dflt, - final Label... labels) - { + public void visitTableSwitchInsn(final int min, final int max, + final Label dflt, final Label... labels) { buf.setLength(0); for (int i = 0; i < labels.length; ++i) { declareLabel(labels[i]); } declareLabel(dflt); - buf.append(name) - .append(".visitTableSwitchInsn(") - .append(min) - .append(", ") - .append(max) - .append(", "); + buf.append(name).append(".visitTableSwitchInsn(").append(min) + .append(", ").append(max).append(", "); appendLabel(dflt); buf.append(", new Label[] {"); for (int i = 0; i < labels.length; ++i) { @@ -744,11 +678,8 @@ public class ASMifier extends Printer { } @Override - public void visitLookupSwitchInsn( - final Label dflt, - final int[] keys, - final Label[] labels) - { + public void visitLookupSwitchInsn(final Label dflt, final int[] keys, + final Label[] labels) { buf.setLength(0); for (int i = 0; i < labels.length; ++i) { declareLabel(labels[i]); @@ -780,12 +711,8 @@ public class ASMifier extends Printer { } @Override - public void visitTryCatchBlock( - final Label start, - final Label end, - final Label handler, - final String type) - { + public void visitTryCatchBlock(final Label start, final Label end, + final Label handler, final String type) { buf.setLength(0); declareLabel(start); declareLabel(end); @@ -803,14 +730,9 @@ public class ASMifier extends Printer { } @Override - public void visitLocalVariable( - final String name, - final String desc, - final String signature, - final Label start, - final Label end, - final int index) - { + public void visitLocalVariable(final String name, final String desc, + final String signature, final Label start, final Label end, + final int index) { buf.setLength(0); buf.append(this.name).append(".visitLocalVariable("); appendConstant(name); @@ -838,12 +760,8 @@ public class ASMifier extends Printer { @Override public void visitMaxs(final int maxStack, final int maxLocals) { buf.setLength(0); - buf.append(name) - .append(".visitMaxs(") - .append(maxStack) - .append(", ") - .append(maxLocals) - .append(");\n"); + buf.append(name).append(".visitMaxs(").append(maxStack).append(", ") + .append(maxLocals).append(");\n"); text.add(buf.toString()); } @@ -858,14 +776,9 @@ public class ASMifier extends Printer { // Common methods // ------------------------------------------------------------------------ - public ASMifier visitAnnotation( - final String desc, - final boolean visible) - { + public ASMifier visitAnnotation(final String desc, final boolean visible) { buf.setLength(0); - buf.append("{\n") - .append("av0 = ") - .append(name) + buf.append("{\n").append("av0 = ").append(name) .append(".visitAnnotation("); appendConstant(desc); buf.append(", ").append(visible).append(");\n"); @@ -895,15 +808,16 @@ public class ASMifier extends Printer { // Utility methods // ------------------------------------------------------------------------ - protected ASMifier createASMifier(final String name, final int id) { + protected ASMifier createASMifier(final String name, final int id) { return new ASMifier(Opcodes.ASM4, name, id); } /** - * Appends a string representation of the given access modifiers to {@link - * #buf buf}. + * Appends a string representation of the given access modifiers to + * {@link #buf buf}. * - * @param access some access modifiers. + * @param access + * some access modifiers. */ void appendAccess(final int access) { boolean first = true; @@ -945,8 +859,7 @@ public class ASMifier extends Printer { first = false; } if ((access & Opcodes.ACC_VOLATILE) != 0 - && (access & ACCESS_FIELD) != 0) - { + && (access & ACCESS_FIELD) != 0) { if (!first) { buf.append(" + "); } @@ -954,8 +867,7 @@ public class ASMifier extends Printer { first = false; } if ((access & Opcodes.ACC_BRIDGE) != 0 && (access & ACCESS_CLASS) == 0 - && (access & ACCESS_FIELD) == 0) - { + && (access & ACCESS_FIELD) == 0) { if (!first) { buf.append(" + "); } @@ -963,8 +875,7 @@ public class ASMifier extends Printer { first = false; } if ((access & Opcodes.ACC_VARARGS) != 0 && (access & ACCESS_CLASS) == 0 - && (access & ACCESS_FIELD) == 0) - { + && (access & ACCESS_FIELD) == 0) { if (!first) { buf.append(" + "); } @@ -972,8 +883,7 @@ public class ASMifier extends Printer { first = false; } if ((access & Opcodes.ACC_TRANSIENT) != 0 - && (access & ACCESS_FIELD) != 0) - { + && (access & ACCESS_FIELD) != 0) { if (!first) { buf.append(" + "); } @@ -981,8 +891,7 @@ public class ASMifier extends Printer { first = false; } if ((access & Opcodes.ACC_NATIVE) != 0 && (access & ACCESS_CLASS) == 0 - && (access & ACCESS_FIELD) == 0) - { + && (access & ACCESS_FIELD) == 0) { if (!first) { buf.append(" + "); } @@ -991,8 +900,7 @@ public class ASMifier extends Printer { } if ((access & Opcodes.ACC_ENUM) != 0 && ((access & ACCESS_CLASS) != 0 - || (access & ACCESS_FIELD) != 0 || (access & ACCESS_INNER) != 0)) - { + || (access & ACCESS_FIELD) != 0 || (access & ACCESS_INNER) != 0)) { if (!first) { buf.append(" + "); } @@ -1000,8 +908,7 @@ public class ASMifier extends Printer { first = false; } if ((access & Opcodes.ACC_ANNOTATION) != 0 - && ((access & ACCESS_CLASS) != 0 || (access & ACCESS_INNER) != 0)) - { + && ((access & ACCESS_CLASS) != 0 || (access & ACCESS_INNER) != 0)) { if (!first) { buf.append(" + "); } @@ -1052,8 +959,9 @@ public class ASMifier extends Printer { * Appends a string representation of the given constant to the given * buffer. * - * @param cst an {@link Integer}, {@link Float}, {@link Long}, - * {@link Double} or {@link String} object. May be null. + * @param cst + * an {@link Integer}, {@link Float}, {@link Long}, + * {@link Double} or {@link String} object. May be null. */ protected void appendConstant(final Object cst) { appendConstant(buf, cst); @@ -1063,9 +971,11 @@ public class ASMifier extends Printer { * Appends a string representation of the given constant to the given * buffer. * - * @param buf a string buffer. - * @param cst an {@link Integer}, {@link Float}, {@link Long}, - * {@link Double} or {@link String} object. May be null. + * @param buf + * a string buffer. + * @param cst + * an {@link Integer}, {@link Float}, {@link Long}, + * {@link Double} or {@link String} object. May be null. */ static void appendConstant(final StringBuffer buf, final Object cst) { if (cst == null) { @@ -1079,14 +989,16 @@ public class ASMifier extends Printer { } else if (cst instanceof Handle) { buf.append("new Handle("); Handle h = (Handle) cst; - buf.append("Opcodes.").append(HANDLE_TAG[h.getTag()]).append(", \""); + buf.append("Opcodes.").append(HANDLE_TAG[h.getTag()]) + .append(", \""); buf.append(h.getOwner()).append("\", \""); buf.append(h.getName()).append("\", \""); buf.append(h.getDesc()).append("\")"); } else if (cst instanceof Byte) { buf.append("new Byte((byte)").append(cst).append(')'); } else if (cst instanceof Boolean) { - buf.append(((Boolean) cst).booleanValue() ? "Boolean.TRUE" : "Boolean.FALSE"); + buf.append(((Boolean) cst).booleanValue() ? "Boolean.TRUE" + : "Boolean.FALSE"); } else if (cst instanceof Short) { buf.append("new Short((short)").append(cst).append(')'); } else if (cst instanceof Character) { @@ -1125,8 +1037,7 @@ public class ASMifier extends Printer { char[] v = (char[]) cst; buf.append("new char[] {"); for (int i = 0; i < v.length; i++) { - buf.append(i == 0 ? "" : ",") - .append("(char)") + buf.append(i == 0 ? "" : ",").append("(char)") .append((int) v[i]); } buf.append('}'); @@ -1178,27 +1089,27 @@ public class ASMifier extends Printer { appendConstant(o[i]); } else if (o[i] instanceof Integer) { switch (((Integer) o[i]).intValue()) { - case 0: - buf.append("Opcodes.TOP"); - break; - case 1: - buf.append("Opcodes.INTEGER"); - break; - case 2: - buf.append("Opcodes.FLOAT"); - break; - case 3: - buf.append("Opcodes.DOUBLE"); - break; - case 4: - buf.append("Opcodes.LONG"); - break; - case 5: - buf.append("Opcodes.NULL"); - break; - case 6: - buf.append("Opcodes.UNINITIALIZED_THIS"); - break; + case 0: + buf.append("Opcodes.TOP"); + break; + case 1: + buf.append("Opcodes.INTEGER"); + break; + case 2: + buf.append("Opcodes.FLOAT"); + break; + case 3: + buf.append("Opcodes.DOUBLE"); + break; + case 4: + buf.append("Opcodes.LONG"); + break; + case 5: + buf.append("Opcodes.NULL"); + break; + case 6: + buf.append("Opcodes.UNINITIALIZED_THIS"); + break; } } else { appendLabel((Label) o[i]); @@ -1211,7 +1122,8 @@ public class ASMifier extends Printer { * declaration is of the form "Label lXXX = new Label();". Does nothing if * the given label has already been declared. * - * @param l a label. + * @param l + * a label. */ protected void declareLabel(final Label l) { if (labelNames == null) { @@ -1227,10 +1139,11 @@ public class ASMifier extends Printer { /** * Appends the name of the given label to {@link #buf buf}. The given label - * must already have a name. One way to ensure this is to always - * call {@link #declareLabel declared} before calling this method. + * must already have a name. One way to ensure this is to always call + * {@link #declareLabel declared} before calling this method. * - * @param l a label. + * @param l + * a label. */ protected void appendLabel(final Label l) { buf.append(labelNames.get(l)); diff --git a/src/asm/scala/tools/asm/util/CheckAnnotationAdapter.java b/src/asm/scala/tools/asm/util/CheckAnnotationAdapter.java index 8030c14f2e..f00a8f04a2 100644 --- a/src/asm/scala/tools/asm/util/CheckAnnotationAdapter.java +++ b/src/asm/scala/tools/asm/util/CheckAnnotationAdapter.java @@ -65,8 +65,7 @@ public class CheckAnnotationAdapter extends AnnotationVisitor { || value instanceof byte[] || value instanceof boolean[] || value instanceof char[] || value instanceof short[] || value instanceof int[] || value instanceof long[] - || value instanceof float[] || value instanceof double[])) - { + || value instanceof float[] || value instanceof double[])) { throw new IllegalArgumentException("Invalid annotation value"); } if (value instanceof Type) { @@ -81,11 +80,8 @@ public class CheckAnnotationAdapter extends AnnotationVisitor { } @Override - public void visitEnum( - final String name, - final String desc, - final String value) - { + public void visitEnum(final String name, final String desc, + final String value) { checkEnd(); checkName(name); CheckMethodAdapter.checkDesc(desc, false); @@ -98,15 +94,12 @@ public class CheckAnnotationAdapter extends AnnotationVisitor { } @Override - public AnnotationVisitor visitAnnotation( - final String name, - final String desc) - { + public AnnotationVisitor visitAnnotation(final String name, + final String desc) { checkEnd(); checkName(name); CheckMethodAdapter.checkDesc(desc, false); - return new CheckAnnotationAdapter(av == null - ? null + return new CheckAnnotationAdapter(av == null ? null : av.visitAnnotation(name, desc)); } @@ -114,8 +107,7 @@ public class CheckAnnotationAdapter extends AnnotationVisitor { public AnnotationVisitor visitArray(final String name) { checkEnd(); checkName(name); - return new CheckAnnotationAdapter(av == null - ? null + return new CheckAnnotationAdapter(av == null ? null : av.visitArray(name), false); } @@ -130,13 +122,15 @@ public class CheckAnnotationAdapter extends AnnotationVisitor { private void checkEnd() { if (end) { - throw new IllegalStateException("Cannot call a visit method after visitEnd has been called"); + throw new IllegalStateException( + "Cannot call a visit method after visitEnd has been called"); } } private void checkName(final String name) { if (named && name == null) { - throw new IllegalArgumentException("Annotation value name must not be null"); + throw new IllegalArgumentException( + "Annotation value name must not be null"); } } } diff --git a/src/asm/scala/tools/asm/util/CheckClassAdapter.java b/src/asm/scala/tools/asm/util/CheckClassAdapter.java index a455322531..0bfa143a95 100644 --- a/src/asm/scala/tools/asm/util/CheckClassAdapter.java +++ b/src/asm/scala/tools/asm/util/CheckClassAdapter.java @@ -59,10 +59,10 @@ import scala.tools.asm.tree.analysis.SimpleVerifier; * only on its arguments, but does not check the sequence * of method calls. For example, the invalid sequence * visitField(ACC_PUBLIC, "i", "I", null) visitField(ACC_PUBLIC, - * "i", "D", null) - * will not be detected by this class adapter. + * "i", "D", null) will not be detected by this class adapter. * - *

    CheckClassAdapter can be also used to verify bytecode + *

    + * CheckClassAdapter can be also used to verify bytecode * transformations in order to make sure transformed bytecode is sane. For * example: * @@ -80,19 +80,20 @@ import scala.tools.asm.tree.analysis.SimpleVerifier; *

    * * Above code runs transformed bytecode trough the - * CheckClassAdapter. It won't be exactly the same verification - * as JVM does, but it run data flow analysis for the code of each method and + * CheckClassAdapter. It won't be exactly the same verification as + * JVM does, but it run data flow analysis for the code of each method and * checks that expectations are met for each method instruction. * - *

    If method bytecode has errors, assertion text will show the erroneous + *

    + * If method bytecode has errors, assertion text will show the erroneous * instruction number and dump of the failed method with information about * locals and stack slot for each instruction. For example (format is - * insnNumber locals : stack): * *

    - * org.objectweb.asm.tree.analysis.AnalyzerException: Error at instruction 71: Expected I, but found .
    - *   at org.objectweb.asm.tree.analysis.Analyzer.analyze(Analyzer.java:289)
    - *   at org.objectweb.asm.util.CheckClassAdapter.verify(CheckClassAdapter.java:135)
    + * scala.tools.asm.tree.analysis.AnalyzerException: Error at instruction 71: Expected I, but found .
    + *   at scala.tools.asm.tree.analysis.Analyzer.analyze(Analyzer.java:289)
    + *   at scala.tools.asm.util.CheckClassAdapter.verify(CheckClassAdapter.java:135)
      * ...
      * remove()V
      * 00000 LinkedBlockingQueue$Itr . . . . . . . .  :
    @@ -114,8 +115,9 @@ import scala.tools.asm.tree.analysis.SimpleVerifier;
      * initialized. You can also see that at the beginning of the method (code
      * inserted by the transformation) variable 2 is initialized.
      *
    - * 

    Note that when used like that, CheckClassAdapter.verify() - * can trigger additional class loading, because it is using + *

    + * Note that when used like that, CheckClassAdapter.verify() can + * trigger additional class loading, because it is using * SimpleVerifier. * * @author Eric Bruneton @@ -159,13 +161,15 @@ public class CheckClassAdapter extends ClassVisitor { private boolean checkDataFlow; /** - * Checks a given class.

    Usage: CheckClassAdapter <binary - * class name or class file name> + * Checks a given class. + *

    + * Usage: CheckClassAdapter <binary class name or class file name> * - * @param args the command line arguments. + * @param args + * the command line arguments. * - * @throws Exception if the class cannot be found, or if an IO exception - * occurs. + * @throws Exception + * if the class cannot be found, or if an IO exception occurs. */ public static void main(final String[] args) throws Exception { if (args.length != 1) { @@ -187,27 +191,26 @@ public class CheckClassAdapter extends ClassVisitor { /** * Checks a given class. * - * @param cr a ClassReader that contains bytecode for the - * analysis. - * @param loader a ClassLoader which will be used to load - * referenced classes. This is useful if you are verifiying multiple - * interdependent classes. - * @param dump true if bytecode should be printed out not only when errors - * are found. - * @param pw write where results going to be printed + * @param cr + * a ClassReader that contains bytecode for the + * analysis. + * @param loader + * a ClassLoader which will be used to load + * referenced classes. This is useful if you are verifiying + * multiple interdependent classes. + * @param dump + * true if bytecode should be printed out not only when errors + * are found. + * @param pw + * write where results going to be printed */ - public static void verify( - final ClassReader cr, - final ClassLoader loader, - final boolean dump, - final PrintWriter pw) - { + public static void verify(final ClassReader cr, final ClassLoader loader, + final boolean dump, final PrintWriter pw) { ClassNode cn = new ClassNode(); cr.accept(new CheckClassAdapter(cn, false), ClassReader.SKIP_DEBUG); - Type syperType = cn.superName == null - ? null - : Type.getObjectType(cn.superName); + Type syperType = cn.superName == null ? null : Type + .getObjectType(cn.superName); List methods = cn.methods; List interfaces = new ArrayList(); @@ -217,9 +220,8 @@ public class CheckClassAdapter extends ClassVisitor { for (int i = 0; i < methods.size(); ++i) { MethodNode method = methods.get(i); - SimpleVerifier verifier = new SimpleVerifier(Type.getObjectType(cn.name), - syperType, - interfaces, + SimpleVerifier verifier = new SimpleVerifier( + Type.getObjectType(cn.name), syperType, interfaces, (cn.access & Opcodes.ACC_INTERFACE) != 0); Analyzer a = new Analyzer(verifier); if (loader != null) { @@ -241,25 +243,22 @@ public class CheckClassAdapter extends ClassVisitor { /** * Checks a given class * - * @param cr a ClassReader that contains bytecode for the - * analysis. - * @param dump true if bytecode should be printed out not only when errors - * are found. - * @param pw write where results going to be printed + * @param cr + * a ClassReader that contains bytecode for the + * analysis. + * @param dump + * true if bytecode should be printed out not only when errors + * are found. + * @param pw + * write where results going to be printed */ - public static void verify( - final ClassReader cr, - final boolean dump, - final PrintWriter pw) - { + public static void verify(final ClassReader cr, final boolean dump, + final PrintWriter pw) { verify(cr, null, dump, pw); } - static void printAnalyzerResult( - MethodNode method, - Analyzer a, - final PrintWriter pw) - { + static void printAnalyzerResult(MethodNode method, Analyzer a, + final PrintWriter pw) { Frame[] frames = a.getFrames(); Textifier t = new Textifier(); TraceMethodVisitor mv = new TraceMethodVisitor(t); @@ -310,7 +309,8 @@ public class CheckClassAdapter extends ClassVisitor { * this constructor. Instead, they must use the * {@link #CheckClassAdapter(int, ClassVisitor, boolean)} version. * - * @param cv the class visitor to which this adapter must delegate calls. + * @param cv + * the class visitor to which this adapter must delegate calls. */ public CheckClassAdapter(final ClassVisitor cv) { this(cv, true); @@ -321,33 +321,34 @@ public class CheckClassAdapter extends ClassVisitor { * this constructor. Instead, they must use the * {@link #CheckClassAdapter(int, ClassVisitor, boolean)} version. * - * @param cv the class visitor to which this adapter must delegate calls. - * @param checkDataFlow true to perform basic data flow checks, or - * false to not perform any data flow check (see - * {@link CheckMethodAdapter}). This option requires valid maxLocals - * and maxStack values. + * @param cv + * the class visitor to which this adapter must delegate calls. + * @param checkDataFlow + * true to perform basic data flow checks, or + * false to not perform any data flow check (see + * {@link CheckMethodAdapter}). This option requires valid + * maxLocals and maxStack values. */ - public CheckClassAdapter(final ClassVisitor cv, final boolean checkDataFlow) - { + public CheckClassAdapter(final ClassVisitor cv, final boolean checkDataFlow) { this(Opcodes.ASM4, cv, checkDataFlow); } /** * Constructs a new {@link CheckClassAdapter}. * - * @param api the ASM API version implemented by this visitor. Must be one - * of {@link Opcodes#ASM4}. - * @param cv the class visitor to which this adapter must delegate calls. - * @param checkDataFlow true to perform basic data flow checks, or - * false to not perform any data flow check (see - * {@link CheckMethodAdapter}). This option requires valid maxLocals - * and maxStack values. + * @param api + * the ASM API version implemented by this visitor. Must be one + * of {@link Opcodes#ASM4}. + * @param cv + * the class visitor to which this adapter must delegate calls. + * @param checkDataFlow + * true to perform basic data flow checks, or + * false to not perform any data flow check (see + * {@link CheckMethodAdapter}). This option requires valid + * maxLocals and maxStack values. */ - protected CheckClassAdapter( - final int api, - final ClassVisitor cv, - final boolean checkDataFlow) - { + protected CheckClassAdapter(final int api, final ClassVisitor cv, + final boolean checkDataFlow) { super(api, cv); this.labels = new HashMap(); this.checkDataFlow = checkDataFlow; @@ -358,14 +359,9 @@ public class CheckClassAdapter extends ClassVisitor { // ------------------------------------------------------------------------ @Override - public void visit( - final int version, - final int access, - final String name, - final String signature, - final String superName, - final String[] interfaces) - { + public void visit(final int version, final int access, final String name, + final String signature, final String superName, + final String[] interfaces) { if (start) { throw new IllegalStateException("visit must be called only once"); } @@ -375,24 +371,25 @@ public class CheckClassAdapter extends ClassVisitor { + Opcodes.ACC_SUPER + Opcodes.ACC_INTERFACE + Opcodes.ACC_ABSTRACT + Opcodes.ACC_SYNTHETIC + Opcodes.ACC_ANNOTATION + Opcodes.ACC_ENUM - + Opcodes.ACC_DEPRECATED - + 0x40000); // ClassWriter.ACC_SYNTHETIC_ATTRIBUTE + + Opcodes.ACC_DEPRECATED + 0x40000); // ClassWriter.ACC_SYNTHETIC_ATTRIBUTE if (name == null || !name.endsWith("package-info")) { CheckMethodAdapter.checkInternalName(name, "class name"); } if ("java/lang/Object".equals(name)) { if (superName != null) { - throw new IllegalArgumentException("The super class name of the Object class must be 'null'"); + throw new IllegalArgumentException( + "The super class name of the Object class must be 'null'"); } } else { CheckMethodAdapter.checkInternalName(superName, "super class name"); } if (signature != null) { - CheckMethodAdapter.checkClassSignature(signature); + checkClassSignature(signature); } if ((access & Opcodes.ACC_INTERFACE) != 0) { if (!"java/lang/Object".equals(superName)) { - throw new IllegalArgumentException("The super class name of interfaces must be 'java/lang/Object'"); + throw new IllegalArgumentException( + "The super class name of interfaces must be 'java/lang/Object'"); } } if (interfaces != null) { @@ -409,21 +406,20 @@ public class CheckClassAdapter extends ClassVisitor { public void visitSource(final String file, final String debug) { checkState(); if (source) { - throw new IllegalStateException("visitSource can be called only once."); + throw new IllegalStateException( + "visitSource can be called only once."); } source = true; super.visitSource(file, debug); } @Override - public void visitOuterClass( - final String owner, - final String name, - final String desc) - { + public void visitOuterClass(final String owner, final String name, + final String desc) { checkState(); if (outer) { - throw new IllegalStateException("visitOuterClass can be called only once."); + throw new IllegalStateException( + "visitOuterClass can be called only once."); } outer = true; if (owner == null) { @@ -436,12 +432,8 @@ public class CheckClassAdapter extends ClassVisitor { } @Override - public void visitInnerClass( - final String name, - final String outerName, - final String innerName, - final int access) - { + public void visitInnerClass(final String name, final String outerName, + final String innerName, final int access) { checkState(); CheckMethodAdapter.checkInternalName(name, "class name"); if (outerName != null) { @@ -459,52 +451,44 @@ public class CheckClassAdapter extends ClassVisitor { } @Override - public FieldVisitor visitField( - final int access, - final String name, - final String desc, - final String signature, - final Object value) - { + public FieldVisitor visitField(final int access, final String name, + final String desc, final String signature, final Object value) { checkState(); checkAccess(access, Opcodes.ACC_PUBLIC + Opcodes.ACC_PRIVATE + Opcodes.ACC_PROTECTED + Opcodes.ACC_STATIC + Opcodes.ACC_FINAL + Opcodes.ACC_VOLATILE + Opcodes.ACC_TRANSIENT + Opcodes.ACC_SYNTHETIC - + Opcodes.ACC_ENUM + Opcodes.ACC_DEPRECATED - + 0x40000); // ClassWriter.ACC_SYNTHETIC_ATTRIBUTE + + Opcodes.ACC_ENUM + Opcodes.ACC_DEPRECATED + 0x40000); // ClassWriter.ACC_SYNTHETIC_ATTRIBUTE CheckMethodAdapter.checkUnqualifiedName(version, name, "field name"); CheckMethodAdapter.checkDesc(desc, false); if (signature != null) { - CheckMethodAdapter.checkFieldSignature(signature); + checkFieldSignature(signature); } if (value != null) { CheckMethodAdapter.checkConstant(value); } - FieldVisitor av = super.visitField(access, name, desc, signature, value); + FieldVisitor av = super + .visitField(access, name, desc, signature, value); return new CheckFieldAdapter(av); } @Override - public MethodVisitor visitMethod( - final int access, - final String name, - final String desc, - final String signature, - final String[] exceptions) - { + public MethodVisitor visitMethod(final int access, final String name, + final String desc, final String signature, final String[] exceptions) { checkState(); checkAccess(access, Opcodes.ACC_PUBLIC + Opcodes.ACC_PRIVATE + Opcodes.ACC_PROTECTED + Opcodes.ACC_STATIC + Opcodes.ACC_FINAL + Opcodes.ACC_SYNCHRONIZED + Opcodes.ACC_BRIDGE + Opcodes.ACC_VARARGS + Opcodes.ACC_NATIVE + Opcodes.ACC_ABSTRACT + Opcodes.ACC_STRICT - + Opcodes.ACC_SYNTHETIC + Opcodes.ACC_DEPRECATED - + 0x40000); // ClassWriter.ACC_SYNTHETIC_ATTRIBUTE - CheckMethodAdapter.checkMethodIdentifier(version, name, "method name"); + + Opcodes.ACC_SYNTHETIC + Opcodes.ACC_DEPRECATED + 0x40000); // ClassWriter.ACC_SYNTHETIC_ATTRIBUTE + if (!"".equals(name) && !"".equals(name)) { + CheckMethodAdapter.checkMethodIdentifier(version, name, + "method name"); + } CheckMethodAdapter.checkMethodDesc(desc); if (signature != null) { - CheckMethodAdapter.checkMethodSignature(signature); + checkMethodSignature(signature); } if (exceptions != null) { for (int i = 0; i < exceptions.length; ++i) { @@ -514,27 +498,19 @@ public class CheckClassAdapter extends ClassVisitor { } CheckMethodAdapter cma; if (checkDataFlow) { - cma = new CheckMethodAdapter(access, - name, - desc, - super.visitMethod(access, name, desc, signature, exceptions), - labels); + cma = new CheckMethodAdapter(access, name, desc, super.visitMethod( + access, name, desc, signature, exceptions), labels); } else { - cma = new CheckMethodAdapter(super.visitMethod(access, - name, - desc, - signature, - exceptions), labels); + cma = new CheckMethodAdapter(super.visitMethod(access, name, desc, + signature, exceptions), labels); } cma.version = version; return cma; } @Override - public AnnotationVisitor visitAnnotation( - final String desc, - final boolean visible) - { + public AnnotationVisitor visitAnnotation(final String desc, + final boolean visible) { checkState(); CheckMethodAdapter.checkDesc(desc, false); return new CheckAnnotationAdapter(super.visitAnnotation(desc, visible)); @@ -544,7 +520,8 @@ public class CheckClassAdapter extends ClassVisitor { public void visitAttribute(final Attribute attr) { checkState(); if (attr == null) { - throw new IllegalArgumentException("Invalid attribute (must not be null)"); + throw new IllegalArgumentException( + "Invalid attribute (must not be null)"); } super.visitAttribute(attr); } @@ -566,10 +543,12 @@ public class CheckClassAdapter extends ClassVisitor { */ private void checkState() { if (!start) { - throw new IllegalStateException("Cannot visit member before visit has been called."); + throw new IllegalStateException( + "Cannot visit member before visit has been called."); } if (end) { - throw new IllegalStateException("Cannot visit member after visitEnd has been called."); + throw new IllegalStateException( + "Cannot visit member after visitEnd has been called."); } } @@ -578,8 +557,10 @@ public class CheckClassAdapter extends ClassVisitor { * method also checks that mutually incompatible flags are not set * simultaneously. * - * @param access the access flags to be checked - * @param possibleAccess the valid access flags. + * @param access + * the access flags to be checked + * @param possibleAccess + * the valid access flags. */ static void checkAccess(final int access, final int possibleAccess) { if ((access & ~possibleAccess) != 0) { @@ -590,14 +571,336 @@ public class CheckClassAdapter extends ClassVisitor { int pri = (access & Opcodes.ACC_PRIVATE) == 0 ? 0 : 1; int pro = (access & Opcodes.ACC_PROTECTED) == 0 ? 0 : 1; if (pub + pri + pro > 1) { - throw new IllegalArgumentException("public private and protected are mutually exclusive: " - + access); + throw new IllegalArgumentException( + "public private and protected are mutually exclusive: " + + access); } int fin = (access & Opcodes.ACC_FINAL) == 0 ? 0 : 1; int abs = (access & Opcodes.ACC_ABSTRACT) == 0 ? 0 : 1; if (fin + abs > 1) { - throw new IllegalArgumentException("final and abstract are mutually exclusive: " - + access); + throw new IllegalArgumentException( + "final and abstract are mutually exclusive: " + access); + } + } + + /** + * Checks a class signature. + * + * @param signature + * a string containing the signature that must be checked. + */ + public static void checkClassSignature(final String signature) { + // ClassSignature: + // FormalTypeParameters? ClassTypeSignature ClassTypeSignature* + + int pos = 0; + if (getChar(signature, 0) == '<') { + pos = checkFormalTypeParameters(signature, pos); + } + pos = checkClassTypeSignature(signature, pos); + while (getChar(signature, pos) == 'L') { + pos = checkClassTypeSignature(signature, pos); + } + if (pos != signature.length()) { + throw new IllegalArgumentException(signature + ": error at index " + + pos); + } + } + + /** + * Checks a method signature. + * + * @param signature + * a string containing the signature that must be checked. + */ + public static void checkMethodSignature(final String signature) { + // MethodTypeSignature: + // FormalTypeParameters? ( TypeSignature* ) ( TypeSignature | V ) ( + // ^ClassTypeSignature | ^TypeVariableSignature )* + + int pos = 0; + if (getChar(signature, 0) == '<') { + pos = checkFormalTypeParameters(signature, pos); + } + pos = checkChar('(', signature, pos); + while ("ZCBSIFJDL[T".indexOf(getChar(signature, pos)) != -1) { + pos = checkTypeSignature(signature, pos); + } + pos = checkChar(')', signature, pos); + if (getChar(signature, pos) == 'V') { + ++pos; + } else { + pos = checkTypeSignature(signature, pos); + } + while (getChar(signature, pos) == '^') { + ++pos; + if (getChar(signature, pos) == 'L') { + pos = checkClassTypeSignature(signature, pos); + } else { + pos = checkTypeVariableSignature(signature, pos); + } + } + if (pos != signature.length()) { + throw new IllegalArgumentException(signature + ": error at index " + + pos); + } + } + + /** + * Checks a field signature. + * + * @param signature + * a string containing the signature that must be checked. + */ + public static void checkFieldSignature(final String signature) { + int pos = checkFieldTypeSignature(signature, 0); + if (pos != signature.length()) { + throw new IllegalArgumentException(signature + ": error at index " + + pos); + } + } + + /** + * Checks the formal type parameters of a class or method signature. + * + * @param signature + * a string containing the signature that must be checked. + * @param pos + * index of first character to be checked. + * @return the index of the first character after the checked part. + */ + private static int checkFormalTypeParameters(final String signature, int pos) { + // FormalTypeParameters: + // < FormalTypeParameter+ > + + pos = checkChar('<', signature, pos); + pos = checkFormalTypeParameter(signature, pos); + while (getChar(signature, pos) != '>') { + pos = checkFormalTypeParameter(signature, pos); + } + return pos + 1; + } + + /** + * Checks a formal type parameter of a class or method signature. + * + * @param signature + * a string containing the signature that must be checked. + * @param pos + * index of first character to be checked. + * @return the index of the first character after the checked part. + */ + private static int checkFormalTypeParameter(final String signature, int pos) { + // FormalTypeParameter: + // Identifier : FieldTypeSignature? (: FieldTypeSignature)* + + pos = checkIdentifier(signature, pos); + pos = checkChar(':', signature, pos); + if ("L[T".indexOf(getChar(signature, pos)) != -1) { + pos = checkFieldTypeSignature(signature, pos); + } + while (getChar(signature, pos) == ':') { + pos = checkFieldTypeSignature(signature, pos + 1); + } + return pos; + } + + /** + * Checks a field type signature. + * + * @param signature + * a string containing the signature that must be checked. + * @param pos + * index of first character to be checked. + * @return the index of the first character after the checked part. + */ + private static int checkFieldTypeSignature(final String signature, int pos) { + // FieldTypeSignature: + // ClassTypeSignature | ArrayTypeSignature | TypeVariableSignature + // + // ArrayTypeSignature: + // [ TypeSignature + + switch (getChar(signature, pos)) { + case 'L': + return checkClassTypeSignature(signature, pos); + case '[': + return checkTypeSignature(signature, pos + 1); + default: + return checkTypeVariableSignature(signature, pos); } } + + /** + * Checks a class type signature. + * + * @param signature + * a string containing the signature that must be checked. + * @param pos + * index of first character to be checked. + * @return the index of the first character after the checked part. + */ + private static int checkClassTypeSignature(final String signature, int pos) { + // ClassTypeSignature: + // L Identifier ( / Identifier )* TypeArguments? ( . Identifier + // TypeArguments? )* ; + + pos = checkChar('L', signature, pos); + pos = checkIdentifier(signature, pos); + while (getChar(signature, pos) == '/') { + pos = checkIdentifier(signature, pos + 1); + } + if (getChar(signature, pos) == '<') { + pos = checkTypeArguments(signature, pos); + } + while (getChar(signature, pos) == '.') { + pos = checkIdentifier(signature, pos + 1); + if (getChar(signature, pos) == '<') { + pos = checkTypeArguments(signature, pos); + } + } + return checkChar(';', signature, pos); + } + + /** + * Checks the type arguments in a class type signature. + * + * @param signature + * a string containing the signature that must be checked. + * @param pos + * index of first character to be checked. + * @return the index of the first character after the checked part. + */ + private static int checkTypeArguments(final String signature, int pos) { + // TypeArguments: + // < TypeArgument+ > + + pos = checkChar('<', signature, pos); + pos = checkTypeArgument(signature, pos); + while (getChar(signature, pos) != '>') { + pos = checkTypeArgument(signature, pos); + } + return pos + 1; + } + + /** + * Checks a type argument in a class type signature. + * + * @param signature + * a string containing the signature that must be checked. + * @param pos + * index of first character to be checked. + * @return the index of the first character after the checked part. + */ + private static int checkTypeArgument(final String signature, int pos) { + // TypeArgument: + // * | ( ( + | - )? FieldTypeSignature ) + + char c = getChar(signature, pos); + if (c == '*') { + return pos + 1; + } else if (c == '+' || c == '-') { + pos++; + } + return checkFieldTypeSignature(signature, pos); + } + + /** + * Checks a type variable signature. + * + * @param signature + * a string containing the signature that must be checked. + * @param pos + * index of first character to be checked. + * @return the index of the first character after the checked part. + */ + private static int checkTypeVariableSignature(final String signature, + int pos) { + // TypeVariableSignature: + // T Identifier ; + + pos = checkChar('T', signature, pos); + pos = checkIdentifier(signature, pos); + return checkChar(';', signature, pos); + } + + /** + * Checks a type signature. + * + * @param signature + * a string containing the signature that must be checked. + * @param pos + * index of first character to be checked. + * @return the index of the first character after the checked part. + */ + private static int checkTypeSignature(final String signature, int pos) { + // TypeSignature: + // Z | C | B | S | I | F | J | D | FieldTypeSignature + + switch (getChar(signature, pos)) { + case 'Z': + case 'C': + case 'B': + case 'S': + case 'I': + case 'F': + case 'J': + case 'D': + return pos + 1; + default: + return checkFieldTypeSignature(signature, pos); + } + } + + /** + * Checks an identifier. + * + * @param signature + * a string containing the signature that must be checked. + * @param pos + * index of first character to be checked. + * @return the index of the first character after the checked part. + */ + private static int checkIdentifier(final String signature, int pos) { + if (!Character.isJavaIdentifierStart(getChar(signature, pos))) { + throw new IllegalArgumentException(signature + + ": identifier expected at index " + pos); + } + ++pos; + while (Character.isJavaIdentifierPart(getChar(signature, pos))) { + ++pos; + } + return pos; + } + + /** + * Checks a single character. + * + * @param signature + * a string containing the signature that must be checked. + * @param pos + * index of first character to be checked. + * @return the index of the first character after the checked part. + */ + private static int checkChar(final char c, final String signature, int pos) { + if (getChar(signature, pos) == c) { + return pos + 1; + } + throw new IllegalArgumentException(signature + ": '" + c + + "' expected at index " + pos); + } + + /** + * Returns the signature car at the given index. + * + * @param signature + * a signature. + * @param pos + * an index in signature. + * @return the character at the given index, or 0 if there is no such + * character. + */ + private static char getChar(final String signature, int pos) { + return pos < signature.length() ? signature.charAt(pos) : (char) 0; + } } diff --git a/src/asm/scala/tools/asm/util/CheckFieldAdapter.java b/src/asm/scala/tools/asm/util/CheckFieldAdapter.java index bdcbe14b16..4657605936 100644 --- a/src/asm/scala/tools/asm/util/CheckFieldAdapter.java +++ b/src/asm/scala/tools/asm/util/CheckFieldAdapter.java @@ -46,7 +46,8 @@ public class CheckFieldAdapter extends FieldVisitor { * this constructor. Instead, they must use the * {@link #CheckFieldAdapter(int, FieldVisitor)} version. * - * @param fv the field visitor to which this adapter must delegate calls. + * @param fv + * the field visitor to which this adapter must delegate calls. */ public CheckFieldAdapter(final FieldVisitor fv) { this(Opcodes.ASM4, fv); @@ -55,19 +56,19 @@ public class CheckFieldAdapter extends FieldVisitor { /** * Constructs a new {@link CheckFieldAdapter}. * - * @param api the ASM API version implemented by this visitor. Must be one - * of {@link Opcodes#ASM4}. - * @param fv the field visitor to which this adapter must delegate calls. + * @param api + * the ASM API version implemented by this visitor. Must be one + * of {@link Opcodes#ASM4}. + * @param fv + * the field visitor to which this adapter must delegate calls. */ protected CheckFieldAdapter(final int api, final FieldVisitor fv) { super(api, fv); } @Override - public AnnotationVisitor visitAnnotation( - final String desc, - final boolean visible) - { + public AnnotationVisitor visitAnnotation(final String desc, + final boolean visible) { checkEnd(); CheckMethodAdapter.checkDesc(desc, false); return new CheckAnnotationAdapter(super.visitAnnotation(desc, visible)); @@ -77,7 +78,8 @@ public class CheckFieldAdapter extends FieldVisitor { public void visitAttribute(final Attribute attr) { checkEnd(); if (attr == null) { - throw new IllegalArgumentException("Invalid attribute (must not be null)"); + throw new IllegalArgumentException( + "Invalid attribute (must not be null)"); } super.visitAttribute(attr); } @@ -91,7 +93,8 @@ public class CheckFieldAdapter extends FieldVisitor { private void checkEnd() { if (end) { - throw new IllegalStateException("Cannot call a visit method after visitEnd has been called"); + throw new IllegalStateException( + "Cannot call a visit method after visitEnd has been called"); } } } diff --git a/src/asm/scala/tools/asm/util/CheckMethodAdapter.java b/src/asm/scala/tools/asm/util/CheckMethodAdapter.java index 7549765421..9da01c9d6e 100644 --- a/src/asm/scala/tools/asm/util/CheckMethodAdapter.java +++ b/src/asm/scala/tools/asm/util/CheckMethodAdapter.java @@ -58,7 +58,7 @@ import scala.tools.asm.tree.analysis.BasicVerifier; * arguments - such as the fact that the given opcode is correct for a given * visit method. This adapter can also perform some basic data flow checks (more * precisely those that can be performed without the full class hierarchy - see - * {@link org.objectweb.asm.tree.analysis.BasicVerifier}). For instance in a + * {@link scala.tools.asm.tree.analysis.BasicVerifier}). For instance in a * method whose signature is void m (), the invalid instruction * IRETURN, or the invalid sequence IADD L2I will be detected if the data flow * checks are enabled. These checks are enabled by using the @@ -74,6 +74,11 @@ public class CheckMethodAdapter extends MethodVisitor { */ public int version; + /** + * The access flags of the method. + */ + private int access; + /** * true if the visitCode method has been called. */ @@ -106,6 +111,21 @@ public class CheckMethodAdapter extends MethodVisitor { */ private Set

    - * Usage: Textifier [-debug] <binary class name or class - * file name > + * Prints a disassembled view of the given class to the standard output. + *

    + * Usage: Textifier [-debug] <binary class name or class file name > * - * @param args the command line arguments. + * @param args + * the command line arguments. * - * @throws Exception if the class cannot be found, or if an IO exception - * occurs. + * @throws Exception + * if the class cannot be found, or if an IO exception occurs. */ public static void main(final String[] args) throws Exception { int i = 0; @@ -182,21 +184,20 @@ public class Textifier extends Printer { } } if (!ok) { - System.err.println("Prints a disassembled view of the given class."); + System.err + .println("Prints a disassembled view of the given class."); System.err.println("Usage: Textifier [-debug] " + ""); return; } ClassReader cr; if (args[i].endsWith(".class") || args[i].indexOf('\\') > -1 - || args[i].indexOf('/') > -1) - { + || args[i].indexOf('/') > -1) { cr = new ClassReader(new FileInputStream(args[i])); } else { cr = new ClassReader(args[i]); } - cr.accept(new TraceClassVisitor(new PrintWriter(System.out)), - flags); + cr.accept(new TraceClassVisitor(new PrintWriter(System.out)), flags); } // ------------------------------------------------------------------------ @@ -204,38 +205,27 @@ public class Textifier extends Printer { // ------------------------------------------------------------------------ @Override - public void visit( - final int version, - final int access, - final String name, - final String signature, - final String superName, - final String[] interfaces) - { + public void visit(final int version, final int access, final String name, + final String signature, final String superName, + final String[] interfaces) { int major = version & 0xFFFF; int minor = version >>> 16; buf.setLength(0); - buf.append("// class version ") - .append(major) - .append('.') - .append(minor) - .append(" (") - .append(version) - .append(")\n"); + buf.append("// class version ").append(major).append('.').append(minor) + .append(" (").append(version).append(")\n"); if ((access & Opcodes.ACC_DEPRECATED) != 0) { buf.append("// DEPRECATED\n"); } - buf.append("// access flags 0x").append(Integer.toHexString(access).toUpperCase()).append('\n'); + buf.append("// access flags 0x") + .append(Integer.toHexString(access).toUpperCase()).append('\n'); appendDescriptor(CLASS_SIGNATURE, signature); if (signature != null) { TraceSignatureVisitor sv = new TraceSignatureVisitor(access); SignatureReader r = new SignatureReader(signature); r.accept(sv); - buf.append("// declaration: ") - .append(name) - .append(sv.getDeclaration()) - .append('\n'); + buf.append("// declaration: ").append(name) + .append(sv.getDeclaration()).append('\n'); } appendAccess(access & ~Opcodes.ACC_SUPER); @@ -269,15 +259,11 @@ public class Textifier extends Printer { public void visitSource(final String file, final String debug) { buf.setLength(0); if (file != null) { - buf.append(tab) - .append("// compiled from: ") - .append(file) + buf.append(tab).append("// compiled from: ").append(file) .append('\n'); } if (debug != null) { - buf.append(tab) - .append("// debug info: ") - .append(debug) + buf.append(tab).append("// debug info: ").append(debug) .append('\n'); } if (buf.length() > 0) { @@ -286,11 +272,8 @@ public class Textifier extends Printer { } @Override - public void visitOuterClass( - final String owner, - final String name, - final String desc) - { + public void visitOuterClass(final String owner, final String name, + final String desc) { buf.setLength(0); buf.append(tab).append("OUTERCLASS "); appendDescriptor(INTERNAL_NAME, owner); @@ -304,10 +287,8 @@ public class Textifier extends Printer { } @Override - public Textifier visitClassAnnotation( - final String desc, - final boolean visible) - { + public Textifier visitClassAnnotation(final String desc, + final boolean visible) { text.add("\n"); return visitAnnotation(desc, visible); } @@ -319,15 +300,13 @@ public class Textifier extends Printer { } @Override - public void visitInnerClass( - final String name, - final String outerName, - final String innerName, - final int access) - { + public void visitInnerClass(final String name, final String outerName, + final String innerName, final int access) { buf.setLength(0); buf.append(tab).append("// access flags 0x"); - buf.append(Integer.toHexString(access & ~Opcodes.ACC_SUPER).toUpperCase()).append('\n'); + buf.append( + Integer.toHexString(access & ~Opcodes.ACC_SUPER).toUpperCase()) + .append('\n'); buf.append(tab); appendAccess(access); buf.append("INNERCLASS "); @@ -341,19 +320,15 @@ public class Textifier extends Printer { } @Override - public Textifier visitField( - final int access, - final String name, - final String desc, - final String signature, - final Object value) - { + public Textifier visitField(final int access, final String name, + final String desc, final String signature, final Object value) { buf.setLength(0); buf.append('\n'); if ((access & Opcodes.ACC_DEPRECATED) != 0) { buf.append(tab).append("// DEPRECATED\n"); } - buf.append(tab).append("// access flags 0x").append(Integer.toHexString(access).toUpperCase()).append('\n'); + buf.append(tab).append("// access flags 0x") + .append(Integer.toHexString(access).toUpperCase()).append('\n'); if (signature != null) { buf.append(tab); appendDescriptor(FIELD_SIGNATURE, signature); @@ -361,10 +336,8 @@ public class Textifier extends Printer { TraceSignatureVisitor sv = new TraceSignatureVisitor(0); SignatureReader r = new SignatureReader(signature); r.acceptType(sv); - buf.append(tab) - .append("// declaration: ") - .append(sv.getDeclaration()) - .append('\n'); + buf.append(tab).append("// declaration: ") + .append(sv.getDeclaration()).append('\n'); } buf.append(tab); @@ -390,19 +363,15 @@ public class Textifier extends Printer { } @Override - public Textifier visitMethod( - final int access, - final String name, - final String desc, - final String signature, - final String[] exceptions) - { + public Textifier visitMethod(final int access, final String name, + final String desc, final String signature, final String[] exceptions) { buf.setLength(0); buf.append('\n'); if ((access & Opcodes.ACC_DEPRECATED) != 0) { buf.append(tab).append("// DEPRECATED\n"); } - buf.append(tab).append("// access flags 0x").append(Integer.toHexString(access).toUpperCase()).append('\n'); + buf.append(tab).append("// access flags 0x") + .append(Integer.toHexString(access).toUpperCase()).append('\n'); if (signature != null) { buf.append(tab); @@ -415,12 +384,8 @@ public class Textifier extends Printer { String genericReturn = v.getReturnType(); String genericExceptions = v.getExceptions(); - buf.append(tab) - .append("// declaration: ") - .append(genericReturn) - .append(' ') - .append(name) - .append(genericDecl); + buf.append(tab).append("// declaration: ").append(genericReturn) + .append(' ').append(name).append(genericDecl); if (genericExceptions != null) { buf.append(" throws ").append(genericExceptions); } @@ -593,11 +558,8 @@ public class Textifier extends Printer { } @Override - public void visitEnum( - final String name, - final String desc, - final String value) - { + public void visitEnum(final String name, final String desc, + final String value) { buf.setLength(0); appendComa(valueNumber++); if (name != null) { @@ -609,10 +571,7 @@ public class Textifier extends Printer { } @Override - public Textifier visitAnnotation( - final String name, - final String desc) - { + public Textifier visitAnnotation(final String name, final String desc) { buf.setLength(0); appendComa(valueNumber++); if (name != null) { @@ -629,9 +588,7 @@ public class Textifier extends Printer { } @Override - public Textifier visitArray( - final String name) - { + public Textifier visitArray(final String name) { buf.setLength(0); appendComa(valueNumber++); if (name != null) { @@ -654,10 +611,8 @@ public class Textifier extends Printer { // ------------------------------------------------------------------------ @Override - public Textifier visitFieldAnnotation( - final String desc, - final boolean visible) - { + public Textifier visitFieldAnnotation(final String desc, + final boolean visible) { return visitAnnotation(desc, visible); } @@ -684,19 +639,14 @@ public class Textifier extends Printer { } @Override - public Textifier visitMethodAnnotation( - final String desc, - final boolean visible) - { + public Textifier visitMethodAnnotation(final String desc, + final boolean visible) { return visitAnnotation(desc, visible); } @Override - public Textifier visitParameterAnnotation( - final int parameter, - final String desc, - final boolean visible) - { + public Textifier visitParameterAnnotation(final int parameter, + final String desc, final boolean visible) { buf.setLength(0); buf.append(tab2).append('@'); appendDescriptor(FIELD_DESCRIPTOR, desc); @@ -730,40 +680,35 @@ public class Textifier extends Printer { } @Override - public void visitFrame( - final int type, - final int nLocal, - final Object[] local, - final int nStack, - final Object[] stack) - { + public void visitFrame(final int type, final int nLocal, + final Object[] local, final int nStack, final Object[] stack) { buf.setLength(0); buf.append(ltab); buf.append("FRAME "); switch (type) { - case Opcodes.F_NEW: - case Opcodes.F_FULL: - buf.append("FULL ["); - appendFrameTypes(nLocal, local); - buf.append("] ["); - appendFrameTypes(nStack, stack); - buf.append(']'); - break; - case Opcodes.F_APPEND: - buf.append("APPEND ["); - appendFrameTypes(nLocal, local); - buf.append(']'); - break; - case Opcodes.F_CHOP: - buf.append("CHOP ").append(nLocal); - break; - case Opcodes.F_SAME: - buf.append("SAME"); - break; - case Opcodes.F_SAME1: - buf.append("SAME1 "); - appendFrameTypes(1, stack); - break; + case Opcodes.F_NEW: + case Opcodes.F_FULL: + buf.append("FULL ["); + appendFrameTypes(nLocal, local); + buf.append("] ["); + appendFrameTypes(nStack, stack); + buf.append(']'); + break; + case Opcodes.F_APPEND: + buf.append("APPEND ["); + appendFrameTypes(nLocal, local); + buf.append(']'); + break; + case Opcodes.F_CHOP: + buf.append("CHOP ").append(nLocal); + break; + case Opcodes.F_SAME: + buf.append("SAME"); + break; + case Opcodes.F_SAME1: + buf.append("SAME1 "); + appendFrameTypes(1, stack); + break; } buf.append('\n'); text.add(buf.toString()); @@ -782,20 +727,15 @@ public class Textifier extends Printer { buf.append(tab2) .append(OPCODES[opcode]) .append(' ') - .append(opcode == Opcodes.NEWARRAY - ? TYPES[operand] - : Integer.toString(operand)) - .append('\n'); + .append(opcode == Opcodes.NEWARRAY ? TYPES[operand] : Integer + .toString(operand)).append('\n'); text.add(buf.toString()); } @Override public void visitVarInsn(final int opcode, final int var) { buf.setLength(0); - buf.append(tab2) - .append(OPCODES[opcode]) - .append(' ') - .append(var) + buf.append(tab2).append(OPCODES[opcode]).append(' ').append(var) .append('\n'); text.add(buf.toString()); } @@ -810,12 +750,8 @@ public class Textifier extends Printer { } @Override - public void visitFieldInsn( - final int opcode, - final String owner, - final String name, - final String desc) - { + public void visitFieldInsn(final int opcode, final String owner, + final String name, final String desc) { buf.setLength(0); buf.append(tab2).append(OPCODES[opcode]).append(' '); appendDescriptor(INTERNAL_NAME, owner); @@ -826,12 +762,8 @@ public class Textifier extends Printer { } @Override - public void visitMethodInsn( - final int opcode, - final String owner, - final String name, - final String desc) - { + public void visitMethodInsn(final int opcode, final String owner, + final String name, final String desc) { buf.setLength(0); buf.append(tab2).append(OPCODES[opcode]).append(' '); appendDescriptor(INTERNAL_NAME, owner); @@ -842,12 +774,8 @@ public class Textifier extends Printer { } @Override - public void visitInvokeDynamicInsn( - String name, - String desc, - Handle bsm, - Object... bsmArgs) - { + public void visitInvokeDynamicInsn(String name, String desc, Handle bsm, + Object... bsmArgs) { buf.setLength(0); buf.append(tab2).append("INVOKEDYNAMIC").append(' '); buf.append(name); @@ -855,11 +783,11 @@ public class Textifier extends Printer { buf.append(" ["); appendHandle(bsm); buf.append(tab3).append("// arguments:"); - if(bsmArgs.length == 0) { + if (bsmArgs.length == 0) { buf.append(" none"); } else { buf.append('\n').append(tab3); - for(int i = 0; i < bsmArgs.length; i++) { + for (int i = 0; i < bsmArgs.length; i++) { Object cst = bsmArgs[i]; if (cst instanceof String) { Printer.appendString(buf, (String) cst); @@ -915,22 +843,14 @@ public class Textifier extends Printer { @Override public void visitIincInsn(final int var, final int increment) { buf.setLength(0); - buf.append(tab2) - .append("IINC ") - .append(var) - .append(' ') - .append(increment) - .append('\n'); + buf.append(tab2).append("IINC ").append(var).append(' ') + .append(increment).append('\n'); text.add(buf.toString()); } @Override - public void visitTableSwitchInsn( - final int min, - final int max, - final Label dflt, - final Label... labels) - { + public void visitTableSwitchInsn(final int min, final int max, + final Label dflt, final Label... labels) { buf.setLength(0); buf.append(tab2).append("TABLESWITCH\n"); for (int i = 0; i < labels.length; ++i) { @@ -945,11 +865,8 @@ public class Textifier extends Printer { } @Override - public void visitLookupSwitchInsn( - final Label dflt, - final int[] keys, - final Label[] labels) - { + public void visitLookupSwitchInsn(final Label dflt, final int[] keys, + final Label[] labels) { buf.setLength(0); buf.append(tab2).append("LOOKUPSWITCH\n"); for (int i = 0; i < labels.length; ++i) { @@ -973,12 +890,8 @@ public class Textifier extends Printer { } @Override - public void visitTryCatchBlock( - final Label start, - final Label end, - final Label handler, - final String type) - { + public void visitTryCatchBlock(final Label start, final Label end, + final Label handler, final String type) { buf.setLength(0); buf.append(tab2).append("TRYCATCHBLOCK "); appendLabel(start); @@ -993,14 +906,9 @@ public class Textifier extends Printer { } @Override - public void visitLocalVariable( - final String name, - final String desc, - final String signature, - final Label start, - final Label end, - final int index) - { + public void visitLocalVariable(final String name, final String desc, + final String signature, final Label start, final Label end, + final int index) { buf.setLength(0); buf.append(tab2).append("LOCALVARIABLE ").append(name).append(' '); appendDescriptor(FIELD_DESCRIPTOR, desc); @@ -1017,10 +925,8 @@ public class Textifier extends Printer { TraceSignatureVisitor sv = new TraceSignatureVisitor(0); SignatureReader r = new SignatureReader(signature); r.acceptType(sv); - buf.append(tab2) - .append("// declaration: ") - .append(sv.getDeclaration()) - .append('\n'); + buf.append(tab2).append("// declaration: ") + .append(sv.getDeclaration()).append('\n'); } text.add(buf.toString()); } @@ -1056,14 +962,13 @@ public class Textifier extends Printer { /** * Prints a disassembled view of the given annotation. * - * @param desc the class descriptor of the annotation class. - * @param visible true if the annotation is visible at runtime. + * @param desc + * the class descriptor of the annotation class. + * @param visible + * true if the annotation is visible at runtime. * @return a visitor to visit the annotation values. */ - public Textifier visitAnnotation( - final String desc, - final boolean visible) - { + public Textifier visitAnnotation(final String desc, final boolean visible) { buf.setLength(0); buf.append(tab).append('@'); appendDescriptor(FIELD_DESCRIPTOR, desc); @@ -1078,7 +983,8 @@ public class Textifier extends Printer { /** * Prints a disassembled view of the given attribute. * - * @param attr an attribute. + * @param attr + * an attribute. */ public void visitAttribute(final Attribute attr) { buf.setLength(0); @@ -1111,15 +1017,16 @@ public class Textifier extends Printer { * Appends an internal name, a type descriptor or a type signature to * {@link #buf buf}. * - * @param type indicates if desc is an internal name, a field descriptor, a - * method descriptor, a class signature, ... - * @param desc an internal name, type descriptor, or type signature. May be - * null. + * @param type + * indicates if desc is an internal name, a field descriptor, a + * method descriptor, a class signature, ... + * @param desc + * an internal name, type descriptor, or type signature. May be + * null. */ protected void appendDescriptor(final int type, final String desc) { if (type == CLASS_SIGNATURE || type == FIELD_SIGNATURE - || type == METHOD_SIGNATURE) - { + || type == METHOD_SIGNATURE) { if (desc != null) { buf.append("// signature ").append(desc).append('\n'); } @@ -1132,7 +1039,8 @@ public class Textifier extends Printer { * Appends the name of the given label to {@link #buf buf}. Creates a new * label name if the given label does not yet have one. * - * @param l a label. + * @param l + * a label. */ protected void appendLabel(final Label l) { if (labelNames == null) { @@ -1149,40 +1057,42 @@ public class Textifier extends Printer { /** * Appends the information about the given handle to {@link #buf buf}. * - * @param h a handle, non null. + * @param h + * a handle, non null. */ protected void appendHandle(final Handle h) { buf.append('\n').append(tab3); int tag = h.getTag(); - buf.append("// handle kind 0x").append(Integer.toHexString(tag)).append(" : "); + buf.append("// handle kind 0x").append(Integer.toHexString(tag)) + .append(" : "); switch (tag) { - case Opcodes.H_GETFIELD: - buf.append("GETFIELD"); - break; - case Opcodes.H_GETSTATIC: - buf.append("GETSTATIC"); - break; - case Opcodes.H_PUTFIELD: - buf.append("PUTFIELD"); - break; - case Opcodes.H_PUTSTATIC: - buf.append("PUTSTATIC"); - break; - case Opcodes.H_INVOKEINTERFACE: - buf.append("INVOKEINTERFACE"); - break; - case Opcodes.H_INVOKESPECIAL: - buf.append("INVOKESPECIAL"); - break; - case Opcodes.H_INVOKESTATIC: - buf.append("INVOKESTATIC"); - break; - case Opcodes.H_INVOKEVIRTUAL: - buf.append("INVOKEVIRTUAL"); - break; - case Opcodes.H_NEWINVOKESPECIAL: - buf.append("NEWINVOKESPECIAL"); - break; + case Opcodes.H_GETFIELD: + buf.append("GETFIELD"); + break; + case Opcodes.H_GETSTATIC: + buf.append("GETSTATIC"); + break; + case Opcodes.H_PUTFIELD: + buf.append("PUTFIELD"); + break; + case Opcodes.H_PUTSTATIC: + buf.append("PUTSTATIC"); + break; + case Opcodes.H_INVOKEINTERFACE: + buf.append("INVOKEINTERFACE"); + break; + case Opcodes.H_INVOKESPECIAL: + buf.append("INVOKESPECIAL"); + break; + case Opcodes.H_INVOKESTATIC: + buf.append("INVOKESTATIC"); + break; + case Opcodes.H_INVOKEVIRTUAL: + buf.append("INVOKEVIRTUAL"); + break; + case Opcodes.H_NEWINVOKESPECIAL: + buf.append("NEWINVOKESPECIAL"); + break; } buf.append('\n'); buf.append(tab3); @@ -1195,10 +1105,11 @@ public class Textifier extends Printer { } /** - * Appends a string representation of the given access modifiers to {@link - * #buf buf}. + * Appends a string representation of the given access modifiers to + * {@link #buf buf}. * - * @param access some access modifiers. + * @param access + * some access modifiers. */ private void appendAccess(final int access) { if ((access & Opcodes.ACC_PUBLIC) != 0) { @@ -1231,6 +1142,9 @@ public class Textifier extends Printer { if ((access & Opcodes.ACC_STRICT) != 0) { buf.append("strictfp "); } + if ((access & Opcodes.ACC_SYNTHETIC) != 0) { + buf.append("synthetic "); + } if ((access & Opcodes.ACC_ENUM) != 0) { buf.append("enum "); } @@ -1256,27 +1170,27 @@ public class Textifier extends Printer { } } else if (o[i] instanceof Integer) { switch (((Integer) o[i]).intValue()) { - case 0: - appendDescriptor(FIELD_DESCRIPTOR, "T"); - break; - case 1: - appendDescriptor(FIELD_DESCRIPTOR, "I"); - break; - case 2: - appendDescriptor(FIELD_DESCRIPTOR, "F"); - break; - case 3: - appendDescriptor(FIELD_DESCRIPTOR, "D"); - break; - case 4: - appendDescriptor(FIELD_DESCRIPTOR, "J"); - break; - case 5: - appendDescriptor(FIELD_DESCRIPTOR, "N"); - break; - case 6: - appendDescriptor(FIELD_DESCRIPTOR, "U"); - break; + case 0: + appendDescriptor(FIELD_DESCRIPTOR, "T"); + break; + case 1: + appendDescriptor(FIELD_DESCRIPTOR, "I"); + break; + case 2: + appendDescriptor(FIELD_DESCRIPTOR, "F"); + break; + case 3: + appendDescriptor(FIELD_DESCRIPTOR, "D"); + break; + case 4: + appendDescriptor(FIELD_DESCRIPTOR, "J"); + break; + case 5: + appendDescriptor(FIELD_DESCRIPTOR, "N"); + break; + case 6: + appendDescriptor(FIELD_DESCRIPTOR, "U"); + break; } } else { appendLabel((Label) o[i]); diff --git a/src/asm/scala/tools/asm/util/TraceAnnotationVisitor.java b/src/asm/scala/tools/asm/util/TraceAnnotationVisitor.java index f112609031..33e7cf0b26 100644 --- a/src/asm/scala/tools/asm/util/TraceAnnotationVisitor.java +++ b/src/asm/scala/tools/asm/util/TraceAnnotationVisitor.java @@ -58,33 +58,26 @@ public final class TraceAnnotationVisitor extends AnnotationVisitor { } @Override - public void visitEnum( - final String name, - final String desc, - final String value) - { + public void visitEnum(final String name, final String desc, + final String value) { p.visitEnum(name, desc, value); super.visitEnum(name, desc, value); } @Override - public AnnotationVisitor visitAnnotation( - final String name, - final String desc) - { + public AnnotationVisitor visitAnnotation(final String name, + final String desc) { Printer p = this.p.visitAnnotation(name, desc); - AnnotationVisitor av = this.av == null - ? null - : this.av.visitAnnotation(name, desc); + AnnotationVisitor av = this.av == null ? null : this.av + .visitAnnotation(name, desc); return new TraceAnnotationVisitor(av, p); } @Override public AnnotationVisitor visitArray(final String name) { Printer p = this.p.visitArray(name); - AnnotationVisitor av = this.av == null - ? null - : this.av.visitArray(name); + AnnotationVisitor av = this.av == null ? null : this.av + .visitArray(name); return new TraceAnnotationVisitor(av, p); } diff --git a/src/asm/scala/tools/asm/util/TraceClassVisitor.java b/src/asm/scala/tools/asm/util/TraceClassVisitor.java index bb830b71ce..ff7a017482 100644 --- a/src/asm/scala/tools/asm/util/TraceClassVisitor.java +++ b/src/asm/scala/tools/asm/util/TraceClassVisitor.java @@ -42,30 +42,41 @@ import scala.tools.asm.Opcodes; * A {@link ClassVisitor} that prints the classes it visits with a * {@link Printer}. This class visitor can be used in the middle of a class * visitor chain to trace the class that is visited at a given point in this - * chain. This may be useful for debugging purposes.

    The trace printed when - * visiting the Hello class is the following:

    - * - *
     // class version 49.0 (49) // access flags 0x21 public class Hello {
    - * 
    + * chain. This may be useful for debugging purposes.
    + * 

    + * The trace printed when visiting the Hello class is the following: + *

    + *

    + * + *
    + * // class version 49.0 (49) // access flags 0x21 public class Hello {
    + *
      * // compiled from: Hello.java
    - * 
    + *
      * // access flags 0x1 public <init> ()V ALOAD 0 INVOKESPECIAL
      * java/lang/Object <init> ()V RETURN MAXSTACK = 1 MAXLOCALS = 1
    - * 
    + *
      * // access flags 0x9 public static main ([Ljava/lang/String;)V GETSTATIC
      * java/lang/System out Ljava/io/PrintStream; LDC "hello"
      * INVOKEVIRTUAL java/io/PrintStream println (Ljava/lang/String;)V RETURN
    - * MAXSTACK = 2 MAXLOCALS = 1 } 
    - * - *
    where Hello is defined by:

    - * - *
     public class Hello {
    - * 
    - * public static void main(String[] args) {
    - * System.out.println("hello"); } } 
    - * + * MAXSTACK = 2 MAXLOCALS = 1 } + *
    + * + *
    where Hello is defined by: + *

    + *

    + * + *
    + * public class Hello {
    + *
    + *     public static void main(String[] args) {
    + *         System.out.println("hello");
    + *     }
    + * }
    + * 
    + * *
    - * + * * @author Eric Bruneton * @author Eugene Kuleshov */ @@ -83,8 +94,9 @@ public final class TraceClassVisitor extends ClassVisitor { /** * Constructs a new {@link TraceClassVisitor}. - * - * @param pw the print writer to be used to print the class. + * + * @param pw + * the print writer to be used to print the class. */ public TraceClassVisitor(final PrintWriter pw) { this(null, pw); @@ -92,10 +104,12 @@ public final class TraceClassVisitor extends ClassVisitor { /** * Constructs a new {@link TraceClassVisitor}. - * - * @param cv the {@link ClassVisitor} to which this visitor delegates calls. - * May be null. - * @param pw the print writer to be used to print the class. + * + * @param cv + * the {@link ClassVisitor} to which this visitor delegates + * calls. May be null. + * @param pw + * the print writer to be used to print the class. */ public TraceClassVisitor(final ClassVisitor cv, final PrintWriter pw) { this(cv, new Textifier(), pw); @@ -103,33 +117,28 @@ public final class TraceClassVisitor extends ClassVisitor { /** * Constructs a new {@link TraceClassVisitor}. - * - * @param cv the {@link ClassVisitor} to which this visitor delegates calls. - * May be null. - * @param p the object that actually converts visit events into text. - * @param pw the print writer to be used to print the class. May be null if - * you simply want to use the result via - * {@link Printer#getText()}, instead of printing it. + * + * @param cv + * the {@link ClassVisitor} to which this visitor delegates + * calls. May be null. + * @param p + * the object that actually converts visit events into text. + * @param pw + * the print writer to be used to print the class. May be null if + * you simply want to use the result via + * {@link Printer#getText()}, instead of printing it. */ - public TraceClassVisitor( - final ClassVisitor cv, - final Printer p, - final PrintWriter pw) - { + public TraceClassVisitor(final ClassVisitor cv, final Printer p, + final PrintWriter pw) { super(Opcodes.ASM4, cv); this.pw = pw; this.p = p; } @Override - public void visit( - final int version, - final int access, - final String name, - final String signature, - final String superName, - final String[] interfaces) - { + public void visit(final int version, final int access, final String name, + final String signature, final String superName, + final String[] interfaces) { p.visit(version, access, name, signature, superName, interfaces); super.visit(version, access, name, signature, superName, interfaces); } @@ -141,20 +150,15 @@ public final class TraceClassVisitor extends ClassVisitor { } @Override - public void visitOuterClass( - final String owner, - final String name, - final String desc) - { + public void visitOuterClass(final String owner, final String name, + final String desc) { p.visitOuterClass(owner, name, desc); super.visitOuterClass(owner, name, desc); } @Override - public AnnotationVisitor visitAnnotation( - final String desc, - final boolean visible) - { + public AnnotationVisitor visitAnnotation(final String desc, + final boolean visible) { Printer p = this.p.visitClassAnnotation(desc, visible); AnnotationVisitor av = cv == null ? null : cv.visitAnnotation(desc, visible); @@ -168,55 +172,28 @@ public final class TraceClassVisitor extends ClassVisitor { } @Override - public void visitInnerClass( - final String name, - final String outerName, - final String innerName, - final int access) - { + public void visitInnerClass(final String name, final String outerName, + final String innerName, final int access) { p.visitInnerClass(name, outerName, innerName, access); super.visitInnerClass(name, outerName, innerName, access); } @Override - public FieldVisitor visitField( - final int access, - final String name, - final String desc, - final String signature, - final Object value) - { - Printer p = this.p.visitField(access, - name, - desc, - signature, - value); - FieldVisitor fv = cv == null ? null : cv.visitField(access, - name, - desc, - signature, - value); + public FieldVisitor visitField(final int access, final String name, + final String desc, final String signature, final Object value) { + Printer p = this.p.visitField(access, name, desc, signature, value); + FieldVisitor fv = cv == null ? null : cv.visitField(access, name, desc, + signature, value); return new TraceFieldVisitor(fv, p); } @Override - public MethodVisitor visitMethod( - final int access, - final String name, - final String desc, - final String signature, - final String[] exceptions) - { - Printer p = this.p.visitMethod(access, - name, - desc, - signature, - exceptions); - MethodVisitor mv = cv == null ? null : cv.visitMethod(access, - name, - desc, - signature, + public MethodVisitor visitMethod(final int access, final String name, + final String desc, final String signature, final String[] exceptions) { + Printer p = this.p.visitMethod(access, name, desc, signature, exceptions); + MethodVisitor mv = cv == null ? null : cv.visitMethod(access, name, + desc, signature, exceptions); return new TraceMethodVisitor(mv, p); } diff --git a/src/asm/scala/tools/asm/util/TraceFieldVisitor.java b/src/asm/scala/tools/asm/util/TraceFieldVisitor.java index f537e83be1..9547a70008 100644 --- a/src/asm/scala/tools/asm/util/TraceFieldVisitor.java +++ b/src/asm/scala/tools/asm/util/TraceFieldVisitor.java @@ -37,7 +37,7 @@ import scala.tools.asm.Opcodes; /** * A {@link FieldVisitor} that prints the fields it visits with a * {@link Printer}. - * + * * @author Eric Bruneton */ public final class TraceFieldVisitor extends FieldVisitor { @@ -52,12 +52,10 @@ public final class TraceFieldVisitor extends FieldVisitor { super(Opcodes.ASM4, fv); this.p = p; } - + @Override - public AnnotationVisitor visitAnnotation( - final String desc, - final boolean visible) - { + public AnnotationVisitor visitAnnotation(final String desc, + final boolean visible) { Printer p = this.p.visitFieldAnnotation(desc, visible); AnnotationVisitor av = fv == null ? null : fv.visitAnnotation(desc, visible); diff --git a/src/asm/scala/tools/asm/util/TraceMethodVisitor.java b/src/asm/scala/tools/asm/util/TraceMethodVisitor.java index 9aabf2079e..9034567c8f 100644 --- a/src/asm/scala/tools/asm/util/TraceMethodVisitor.java +++ b/src/asm/scala/tools/asm/util/TraceMethodVisitor.java @@ -56,10 +56,8 @@ public final class TraceMethodVisitor extends MethodVisitor { } @Override - public AnnotationVisitor visitAnnotation( - final String desc, - final boolean visible) - { + public AnnotationVisitor visitAnnotation(final String desc, + final boolean visible) { Printer p = this.p.visitMethodAnnotation(desc, visible); AnnotationVisitor av = mv == null ? null : mv.visitAnnotation(desc, visible); @@ -80,17 +78,11 @@ public final class TraceMethodVisitor extends MethodVisitor { } @Override - public AnnotationVisitor visitParameterAnnotation( - final int parameter, - final String desc, - final boolean visible) - { - Printer p = this.p.visitParameterAnnotation(parameter, - desc, - visible); - AnnotationVisitor av = mv == null - ? null - : mv.visitParameterAnnotation(parameter, desc, visible); + public AnnotationVisitor visitParameterAnnotation(final int parameter, + final String desc, final boolean visible) { + Printer p = this.p.visitParameterAnnotation(parameter, desc, visible); + AnnotationVisitor av = mv == null ? null : mv.visitParameterAnnotation( + parameter, desc, visible); return new TraceAnnotationVisitor(av, p); } @@ -101,13 +93,8 @@ public final class TraceMethodVisitor extends MethodVisitor { } @Override - public void visitFrame( - final int type, - final int nLocal, - final Object[] local, - final int nStack, - final Object[] stack) - { + public void visitFrame(final int type, final int nLocal, + final Object[] local, final int nStack, final Object[] stack) { p.visitFrame(type, nLocal, local, nStack, stack); super.visitFrame(type, nLocal, local, nStack, stack); } @@ -137,34 +124,22 @@ public final class TraceMethodVisitor extends MethodVisitor { } @Override - public void visitFieldInsn( - final int opcode, - final String owner, - final String name, - final String desc) - { + public void visitFieldInsn(final int opcode, final String owner, + final String name, final String desc) { p.visitFieldInsn(opcode, owner, name, desc); super.visitFieldInsn(opcode, owner, name, desc); } @Override - public void visitMethodInsn( - final int opcode, - final String owner, - final String name, - final String desc) - { + public void visitMethodInsn(final int opcode, final String owner, + final String name, final String desc) { p.visitMethodInsn(opcode, owner, name, desc); super.visitMethodInsn(opcode, owner, name, desc); } @Override - public void visitInvokeDynamicInsn( - String name, - String desc, - Handle bsm, - Object... bsmArgs) - { + public void visitInvokeDynamicInsn(String name, String desc, Handle bsm, + Object... bsmArgs) { p.visitInvokeDynamicInsn(name, desc, bsm, bsmArgs); super.visitInvokeDynamicInsn(name, desc, bsm, bsmArgs); } @@ -194,22 +169,15 @@ public final class TraceMethodVisitor extends MethodVisitor { } @Override - public void visitTableSwitchInsn( - final int min, - final int max, - final Label dflt, - final Label... labels) - { + public void visitTableSwitchInsn(final int min, final int max, + final Label dflt, final Label... labels) { p.visitTableSwitchInsn(min, max, dflt, labels); super.visitTableSwitchInsn(min, max, dflt, labels); } @Override - public void visitLookupSwitchInsn( - final Label dflt, - final int[] keys, - final Label[] labels) - { + public void visitLookupSwitchInsn(final Label dflt, final int[] keys, + final Label[] labels) { p.visitLookupSwitchInsn(dflt, keys, labels); super.visitLookupSwitchInsn(dflt, keys, labels); } @@ -221,25 +189,16 @@ public final class TraceMethodVisitor extends MethodVisitor { } @Override - public void visitTryCatchBlock( - final Label start, - final Label end, - final Label handler, - final String type) - { + public void visitTryCatchBlock(final Label start, final Label end, + final Label handler, final String type) { p.visitTryCatchBlock(start, end, handler, type); super.visitTryCatchBlock(start, end, handler, type); } @Override - public void visitLocalVariable( - final String name, - final String desc, - final String signature, - final Label start, - final Label end, - final int index) - { + public void visitLocalVariable(final String name, final String desc, + final String signature, final Label start, final Label end, + final int index) { p.visitLocalVariable(name, desc, signature, start, end, index); super.visitLocalVariable(name, desc, signature, start, end, index); } diff --git a/src/asm/scala/tools/asm/util/TraceSignatureVisitor.java b/src/asm/scala/tools/asm/util/TraceSignatureVisitor.java index a37b759811..1e23c7ef1a 100644 --- a/src/asm/scala/tools/asm/util/TraceSignatureVisitor.java +++ b/src/asm/scala/tools/asm/util/TraceSignatureVisitor.java @@ -117,8 +117,7 @@ public final class TraceSignatureVisitor extends SignatureVisitor { @Override public SignatureVisitor visitInterface() { - separator = seenInterface ? ", " : isInterface - ? " extends " + separator = seenInterface ? ", " : isInterface ? " extends " : " implements "; seenInterface = true; startType(); @@ -165,34 +164,34 @@ public final class TraceSignatureVisitor extends SignatureVisitor { @Override public void visitBaseType(final char descriptor) { switch (descriptor) { - case 'V': - declaration.append("void"); - break; - case 'B': - declaration.append("byte"); - break; - case 'J': - declaration.append("long"); - break; - case 'Z': - declaration.append("boolean"); - break; - case 'I': - declaration.append("int"); - break; - case 'S': - declaration.append("short"); - break; - case 'C': - declaration.append("char"); - break; - case 'F': - declaration.append("float"); - break; - // case 'D': - default: - declaration.append("double"); - break; + case 'V': + declaration.append("void"); + break; + case 'B': + declaration.append("byte"); + break; + case 'J': + declaration.append("long"); + break; + case 'Z': + declaration.append("boolean"); + break; + case 'I': + declaration.append("int"); + break; + case 'S': + declaration.append("short"); + break; + case 'C': + declaration.append("char"); + break; + case 'F': + declaration.append("float"); + break; + // case 'D': + default: + declaration.append("double"); + break; } endType(); } diff --git a/src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala b/src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala index 1aa80d5c5b..d2e641cbf9 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala @@ -888,10 +888,10 @@ abstract class GenASM extends SubComponent with BytecodeWriters { // Run the signature parser to catch bogus signatures. val isValidSignature = wrap { // Alternative: scala.tools.reflect.SigParser (frontend to sun.reflect.generics.parser.SignatureParser) - import scala.tools.asm.util.SignatureChecker - if (sym.isMethod) { SignatureChecker checkMethodSignature sig } // requires asm-util.jar - else if (sym.isTerm) { SignatureChecker checkFieldSignature sig } - else { SignatureChecker checkClassSignature sig } + import scala.tools.asm.util.CheckClassAdapter + if (sym.isMethod) { CheckClassAdapter checkMethodSignature sig } // requires asm-util.jar + else if (sym.isTerm) { CheckClassAdapter checkFieldSignature sig } + else { CheckClassAdapter checkClassSignature sig } } if(!isValidSignature) { -- cgit v1.2.3 From 426744441c22fa3153b7192bead46f8b244c4f12 Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Sun, 25 Nov 2012 18:29:16 -0800 Subject: Fix for SerialVersionUID instability. Can hardly believe this has been broken for a decade or so, but there it is - see test case. Four classes attempt to set their SerialVersionUID to 13. One succeeds. No warnings or errors. The output before this patch (for me anyway - your random numbers may differ) was: 860336111422349646 13 8409527228024057943 -7852527872932878365 There was already code in place for rejecting annotations with non-constant args when constant args are required, but that check is only performed on ClassfileAnnotations, and SerialVersionUID was a StaticAnnotation. Maybe people don't reach for ClassfileAnnotation because of this giant warning which I see no way to suppress: warning: Implementation restriction: subclassing Classfile does not make your annotation visible at runtime. If that is what you want, you must write the annotation class in Java. Why did I change the name of the field from uid to value? If you don't use the name 'value', you have to name the argument every time you use it, even if it's the only parameter. I didn't relish breaking every usage of scala's @SerialVersionUID in the known universe. --- src/library/scala/SerialVersionUID.scala | 2 +- test/files/neg/serialversionuid-not-const.check | 10 ++++++++++ test/files/neg/serialversionuid-not-const.scala | 16 ++++++++++++++++ 3 files changed, 27 insertions(+), 1 deletion(-) create mode 100644 test/files/neg/serialversionuid-not-const.check create mode 100644 test/files/neg/serialversionuid-not-const.scala (limited to 'src') diff --git a/src/library/scala/SerialVersionUID.scala b/src/library/scala/SerialVersionUID.scala index 1f7d047060..77094f0bbf 100644 --- a/src/library/scala/SerialVersionUID.scala +++ b/src/library/scala/SerialVersionUID.scala @@ -12,4 +12,4 @@ package scala * Annotation for specifying the `static SerialVersionUID` field * of a serializable class. */ -class SerialVersionUID(uid: Long) extends scala.annotation.StaticAnnotation +class SerialVersionUID(value: Long) extends scala.annotation.ClassfileAnnotation diff --git a/test/files/neg/serialversionuid-not-const.check b/test/files/neg/serialversionuid-not-const.check new file mode 100644 index 0000000000..9c383d97ad --- /dev/null +++ b/test/files/neg/serialversionuid-not-const.check @@ -0,0 +1,10 @@ +serialversionuid-not-const.scala:1: error: annotation argument needs to be a constant; found: 13L.toLong +@SerialVersionUID(13l.toLong) class C1 extends Serializable + ^ +serialversionuid-not-const.scala:3: error: annotation argument needs to be a constant; found: 13.asInstanceOf[Long] +@SerialVersionUID(13.asInstanceOf[Long]) class C3 extends Serializable + ^ +serialversionuid-not-const.scala:4: error: annotation argument needs to be a constant; found: Test.bippy +@SerialVersionUID(Test.bippy) class C4 extends Serializable + ^ +three errors found diff --git a/test/files/neg/serialversionuid-not-const.scala b/test/files/neg/serialversionuid-not-const.scala new file mode 100644 index 0000000000..f0e3ef4e7e --- /dev/null +++ b/test/files/neg/serialversionuid-not-const.scala @@ -0,0 +1,16 @@ +@SerialVersionUID(13l.toLong) class C1 extends Serializable +@SerialVersionUID(13l) class C2 extends Serializable +@SerialVersionUID(13.asInstanceOf[Long]) class C3 extends Serializable +@SerialVersionUID(Test.bippy) class C4 extends Serializable + +object Test { + val bippy = 13L + + def show(c: Class[_]) = println(java.io.ObjectStreamClass.lookup(c).getSerialVersionUID) + def main(args: Array[String]): Unit = { + show(classOf[C1]) + show(classOf[C2]) + show(classOf[C3]) + show(classOf[C4]) + } +} -- cgit v1.2.3 From a854529c93b42141da07aba055d83192b99d4899 Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Sun, 25 Nov 2012 23:25:14 -0800 Subject: Eliminate some one-arg asserts. The only thing more fun than debugging non-deterministic scaladoc crashes unrelated to one's change is doing so with all one-argument asserts. --- src/compiler/scala/tools/nsc/doc/model/ModelFactory.scala | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) (limited to 'src') diff --git a/src/compiler/scala/tools/nsc/doc/model/ModelFactory.scala b/src/compiler/scala/tools/nsc/doc/model/ModelFactory.scala index 04f95455a5..9fb5806c3d 100644 --- a/src/compiler/scala/tools/nsc/doc/model/ModelFactory.scala +++ b/src/compiler/scala/tools/nsc/doc/model/ModelFactory.scala @@ -233,8 +233,8 @@ class ModelFactory(val global: Global, val settings: doc.Settings) { * exists, but should not be documented (either it's not included in the source or it's not visible) */ class NoDocTemplateImpl(sym: Symbol, inTpl: TemplateImpl) extends EntityImpl(sym, inTpl) with TemplateImpl with HigherKindedImpl with NoDocTemplate { - assert(modelFinished) - assert(!(noDocTemplatesCache isDefinedAt sym)) + assert(modelFinished, this) + assert(!(noDocTemplatesCache isDefinedAt sym), (sym, noDocTemplatesCache(sym))) noDocTemplatesCache += (sym -> this) def isDocTemplate = false } @@ -269,7 +269,7 @@ class ModelFactory(val global: Global, val settings: doc.Settings) { * All ancestors of the template and all non-package members. */ abstract class DocTemplateImpl(sym: Symbol, inTpl: DocTemplateImpl) extends MemberTemplateImpl(sym, inTpl) with DocTemplateEntity { - assert(!modelFinished) + assert(!modelFinished, (sym, inTpl)) assert(!(docTemplatesCache isDefinedAt sym), sym) docTemplatesCache += (sym -> this) @@ -620,7 +620,7 @@ class ModelFactory(val global: Global, val settings: doc.Settings) { */ def createTemplate(aSym: Symbol, inTpl: DocTemplateImpl): Option[MemberImpl] = { // don't call this after the model finished! - assert(!modelFinished) + assert(!modelFinished, (aSym, inTpl)) def createRootPackageComment: Option[Comment] = if(settings.docRootContent.isDefault) None @@ -636,7 +636,7 @@ class ModelFactory(val global: Global, val settings: doc.Settings) { } def createDocTemplate(bSym: Symbol, inTpl: DocTemplateImpl): DocTemplateImpl = { - assert(!modelFinished) // only created BEFORE the model is finished + assert(!modelFinished, (bSym, inTpl)) // only created BEFORE the model is finished if (bSym.isAliasType && bSym != AnyRefClass) new DocTemplateImpl(bSym, inTpl) with AliasImpl with AliasType { override def isAliasType = true } else if (bSym.isAbstractType) -- cgit v1.2.3 From 5573281a24bc57dc75d3e9efa85ff720eeb39d10 Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Sun, 25 Nov 2012 23:25:41 -0800 Subject: Account for existence of scala's ClassfileAnnotation. Apparently this thing is not real well tested, as the scaladoc code was written as if it does not exist. --- .../scala/tools/nsc/doc/model/ModelFactory.scala | 26 +++++++++++++--------- 1 file changed, 15 insertions(+), 11 deletions(-) (limited to 'src') diff --git a/src/compiler/scala/tools/nsc/doc/model/ModelFactory.scala b/src/compiler/scala/tools/nsc/doc/model/ModelFactory.scala index 9fb5806c3d..acdc3e6797 100644 --- a/src/compiler/scala/tools/nsc/doc/model/ModelFactory.scala +++ b/src/compiler/scala/tools/nsc/doc/model/ModelFactory.scala @@ -842,24 +842,28 @@ class ModelFactory(val global: Global, val settings: doc.Settings) { lazy val annotationClass = makeTemplate(annot.symbol) val arguments = { // lazy - def noParams = annot.args map { _ => None } + def annotArgs = annot.args match { + case Nil => annot.assocs collect { case (_, LiteralAnnotArg(const)) => Literal(const) } + case xs => xs + } + def noParams = annotArgs map (_ => None) + val params: List[Option[ValueParam]] = annotationClass match { case aClass: DocTemplateEntity with Class => (aClass.primaryConstructor map { _.valueParams.head }) match { case Some(vps) => vps map { Some(_) } - case None => noParams + case _ => noParams } case _ => noParams } - assert(params.length == annot.args.length) - (params zip annot.args) flatMap { case (param, arg) => - makeTree(arg) match { - case Some(tree) => - Some(new ValueArgument { - def parameter = param - def value = tree - }) - case None => None + assert(params.length == annotArgs.length, (params, annotArgs)) + + params zip annotArgs flatMap { case (param, arg) => + makeTree(arg) map { tree => + new ValueArgument { + def parameter = param + def value = tree + } } } } -- cgit v1.2.3 From 1426d9cecf1b6123d0dffe44a8ab0dbf88a29707 Mon Sep 17 00:00:00 2001 From: Roberto Tyley Date: Tue, 27 Nov 2012 22:51:01 +0000 Subject: Add convenience attribute operator to NodeSeq Compared to the current method of reading the string text of an attribute: (x \ "@bar").text ...the new operator removes the need for a pair of parenthesis and shortens the overall expression by 7 chars : x \@ "bar" Discussion on scala-internals: https://groups.google.com/d/topic/scala-internals/BZ-tfbebDqE/discussion --- src/library/scala/xml/NodeSeq.scala | 5 +++++ test/files/jvm/xmlattr.scala | 7 +++++++ 2 files changed, 12 insertions(+) (limited to 'src') diff --git a/src/library/scala/xml/NodeSeq.scala b/src/library/scala/xml/NodeSeq.scala index decf60dad7..d2efc947b1 100644 --- a/src/library/scala/xml/NodeSeq.scala +++ b/src/library/scala/xml/NodeSeq.scala @@ -145,6 +145,11 @@ abstract class NodeSeq extends AbstractSeq[Node] with immutable.Seq[Node] with S } } + /** Convenience method which returns string text of the named attribute. Use: + * - `that \@ "foo"` to get the string text of attribute `"foo"`; + */ + def \@(attributeName: String): String = (this \ ("@" + attributeName)).text + override def toString(): String = theSeq.mkString def text: String = (this map (_.text)).mkString diff --git a/test/files/jvm/xmlattr.scala b/test/files/jvm/xmlattr.scala index d214642eb6..6423268ba7 100644 --- a/test/files/jvm/xmlattr.scala +++ b/test/files/jvm/xmlattr.scala @@ -6,6 +6,7 @@ object Test { UnprefixedAttributeTest() AttributeWithOptionTest() AttributeOutputTest() + AttributeOperatorTest() } object UnprefixedAttributeTest { @@ -60,4 +61,10 @@ object Test { } } + object AttributeOperatorTest { + def apply() { + val xml = + assert(xml \@ "bar" == "apple") + } + } } -- cgit v1.2.3 From 2857d43a5a217d45f879878740081d4b91c1b2d8 Mon Sep 17 00:00:00 2001 From: Som Snytt Date: Fri, 2 Nov 2012 22:00:03 -0700 Subject: Javap for Java 7 (Fixes SI-4936) Add support for reflective invocation of javap under jdk7, using the quasi-javax.tools API. Under -Ygen-javap, warn if you can't. Since JAVA_HOME is used to locate tools.jar, the script is updated to convert it for cygwin. Update Javap for simpl repl. Also, reduce clutter of JavaxTools as suggested. JavapTool7 evades repl truncating if enabled; the truncating PrintWriter is not line-oriented but string-oriented. --- .../scala/tools/ant/templates/tool-unix.tmpl | 3 + .../tools/nsc/backend/jvm/BytecodeWriters.scala | 31 +- .../scala/tools/nsc/backend/jvm/GenASM.scala | 7 +- .../scala/tools/nsc/interpreter/ILoop.scala | 2 - src/compiler/scala/tools/util/Javap.scala | 320 ++++++++++++++++----- 5 files changed, 283 insertions(+), 80 deletions(-) (limited to 'src') diff --git a/src/compiler/scala/tools/ant/templates/tool-unix.tmpl b/src/compiler/scala/tools/ant/templates/tool-unix.tmpl index f1c6c52785..84ccaba749 100644 --- a/src/compiler/scala/tools/ant/templates/tool-unix.tmpl +++ b/src/compiler/scala/tools/ant/templates/tool-unix.tmpl @@ -102,6 +102,9 @@ if [[ -n "$cygwin" ]]; then format=windows fi SCALA_HOME="$(cygpath --$format "$SCALA_HOME")" + if [[ -n "$JAVA_HOME" ]]; then + JAVA_HOME="$(cygpath --$format "$JAVA_HOME")" + fi TOOL_CLASSPATH="$(cygpath --path --$format "$TOOL_CLASSPATH")" elif [[ -n "$mingw" ]]; then SCALA_HOME="$(cmd //c echo "$SCALA_HOME")" diff --git a/src/compiler/scala/tools/nsc/backend/jvm/BytecodeWriters.scala b/src/compiler/scala/tools/nsc/backend/jvm/BytecodeWriters.scala index 8c8950d295..941ccd9a2d 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/BytecodeWriters.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/BytecodeWriters.scala @@ -9,7 +9,7 @@ package backend.jvm import java.io.{ DataOutputStream, FileOutputStream, OutputStream, File => JFile } import scala.tools.nsc.io._ import scala.tools.nsc.util.ScalaClassLoader -import scala.tools.util.JavapClass +import scala.tools.util.{ Javap, JavapClass } import java.util.jar.Attributes.Name import scala.language.postfixOps @@ -59,27 +59,32 @@ trait BytecodeWriters { override def close() = writer.close() } + /** To be mixed-in with the BytecodeWriter that generates + * the class file to be disassembled. + */ trait JavapBytecodeWriter extends BytecodeWriter { val baseDir = Directory(settings.Ygenjavap.value).createDirectory() - - def emitJavap(bytes: Array[Byte], javapFile: io.File) { - val pw = javapFile.printWriter() - val javap = new JavapClass(ScalaClassLoader.appLoader, pw) { - override def findBytes(path: String): Array[Byte] = bytes - } - - try javap(Seq("-verbose", "dummy")) foreach (_.show()) - finally pw.close() + val cl = ScalaClassLoader.appLoader + + def emitJavap(classFile: AbstractFile, javapFile: File) { + val pw = javapFile.printWriter() + try { + val javap = new JavapClass(cl, pw) { + override def findBytes(path: String): Array[Byte] = classFile.toByteArray + } + javap(Seq("-verbose", "-protected", classFile.name)) foreach (_.show()) + } finally pw.close() } abstract override def writeClass(label: String, jclassName: String, jclassBytes: Array[Byte], sym: Symbol) { super.writeClass(label, jclassName, jclassBytes, sym) - val bytes = getFile(sym, jclassName, ".class").toByteArray + val classFile = getFile(sym, jclassName, ".class") val segments = jclassName.split("[./]") val javapFile = segments.foldLeft(baseDir: Path)(_ / _) changeExtension "javap" toFile; - javapFile.parent.createDirectory() - emitJavap(bytes, javapFile) + + if (Javap.isAvailable(cl)) emitJavap(classFile, javapFile) + else warning("No javap on classpath, skipping javap output.") } } diff --git a/src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala b/src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala index d2e641cbf9..cd391aa498 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala @@ -126,13 +126,18 @@ abstract class GenASM extends SubComponent with BytecodeWriters { new DirectToJarfileWriter(f.file) case _ => + import scala.tools.util.Javap if (settings.Ygenjavap.isDefault) { if(settings.Ydumpclasses.isDefault) new ClassBytecodeWriter { } else new ClassBytecodeWriter with DumpBytecodeWriter { } } - else new ClassBytecodeWriter with JavapBytecodeWriter { } + else if (Javap.isAvailable()) new ClassBytecodeWriter with JavapBytecodeWriter { } + else { + warning("No javap on classpath, skipping javap output.") + new ClassBytecodeWriter { } + } // TODO A ScalapBytecodeWriter could take asm.util.Textifier as starting point. // Three areas where javap ouput is less than ideal (e.g. when comparing versions of the same classfile) are: diff --git a/src/compiler/scala/tools/nsc/interpreter/ILoop.scala b/src/compiler/scala/tools/nsc/interpreter/ILoop.scala index 612a90f3ea..3af3c44cb6 100644 --- a/src/compiler/scala/tools/nsc/interpreter/ILoop.scala +++ b/src/compiler/scala/tools/nsc/interpreter/ILoop.scala @@ -308,8 +308,6 @@ class ILoop(in0: Option[BufferedReader], protected val out: JPrintWriter) private def javapCommand(line: String): Result = { if (javap == null) ":javap unavailable, no tools.jar at %s. Set JDK_HOME.".format(jdkHome) - else if (javaVersion startsWith "1.7") - ":javap not yet working with java 1.7" else if (line == "") ":javap [-lcsvp] [path1 path2 ...]" else diff --git a/src/compiler/scala/tools/util/Javap.scala b/src/compiler/scala/tools/util/Javap.scala index 381dbd1d87..6f5f4f6ed4 100644 --- a/src/compiler/scala/tools/util/Javap.scala +++ b/src/compiler/scala/tools/util/Javap.scala @@ -6,12 +6,21 @@ package scala.tools package util +import java.lang.{ ClassLoader => JavaClassLoader, Iterable => JIterable } import scala.tools.nsc.util.ScalaClassLoader -import java.io.{ InputStream, PrintWriter, ByteArrayInputStream } +import java.io.{ ByteArrayInputStream, CharArrayWriter, FileNotFoundException, InputStream, + PrintWriter, Writer } +import java.util.{ Locale } +import javax.tools.{ Diagnostic, DiagnosticCollector, DiagnosticListener, + ForwardingJavaFileManager, JavaFileManager, JavaFileObject, + SimpleJavaFileObject, StandardLocation } import scala.tools.nsc.io.File -import Javap._ +import scala.util.{ Properties, Try, Success, Failure } +import scala.collection.JavaConverters import scala.language.reflectiveCalls +import Javap._ + trait Javap { def loader: ScalaClassLoader def printWriter: PrintWriter @@ -33,46 +42,29 @@ class JavapClass( val printWriter: PrintWriter = new PrintWriter(System.out, true) ) extends Javap { - lazy val parser = new JpOptions - - val EnvClass = loader.tryToInitializeClass[FakeEnvironment](Env).orNull - val PrinterClass = loader.tryToInitializeClass[FakePrinter](Printer).orNull - private def failed = (EnvClass eq null) || (PrinterClass eq null) - - val PrinterCtr = ( - if (failed) null - else PrinterClass.getConstructor(classOf[InputStream], classOf[PrintWriter], EnvClass) - ) - - def findBytes(path: String): Array[Byte] = - tryFile(path) getOrElse tryClass(path) + lazy val tool = JavapTool(loader, printWriter) + /** Run the tool. Option args start with "-". + * The default options are "-protected -verbose". + * Byte data for filename args is retrieved with findBytes. + * If the filename does not end with ".class", javap will + * insert a banner of the form: + * `Binary file dummy contains simple.Complex`. + */ def apply(args: Seq[String]): List[JpResult] = { - if (failed) Nil - else args.toList filterNot (_ startsWith "-") map { path => - val bytes = findBytes(path) - if (bytes.isEmpty) new JpError("Could not find class bytes for '%s'".format(path)) - else new JpSuccess(newPrinter(new ByteArrayInputStream(bytes), newEnv(args))) - } + val (optional, claases) = args partition (_ startsWith "-") + val options = if (optional.nonEmpty) optional else JavapTool.DefaultOptions + tool(options)(claases map (claas => claas -> bytesFor(claas))) } - def newPrinter(in: InputStream, env: FakeEnvironment): FakePrinter = - if (failed) null - else PrinterCtr.newInstance(in, printWriter, env) - - def newEnv(opts: Seq[String]): FakeEnvironment = { - lazy val env: FakeEnvironment = EnvClass.newInstance() - - if (failed) null - else parser(opts) foreach { case (name, value) => - val field = EnvClass getDeclaredField name - field setAccessible true - field.set(env, value.asInstanceOf[AnyRef]) - } - - env + private def bytesFor(path: String) = Try { + val bytes = findBytes(path) + if (bytes.isEmpty) throw new FileNotFoundException(s"Could not find class bytes for '${path}'") + else bytes } + def findBytes(path: String): Array[Byte] = tryFile(path) getOrElse tryClass(path) + /** Assume the string is a path and try to find the classfile * it represents. */ @@ -87,44 +79,179 @@ class JavapClass( /** Assume the string is a fully qualified class name and try to * find the class object it represents. */ - def tryClass(path: String): Array[Byte] = { - val extName = ( - if (path endsWith ".class") (path dropRight 6).replace('/', '.') - else path - ) - loader.classBytes(extName) + def tryClass(path: String): Array[Byte] = loader classBytes { + if (path endsWith ".class") (path dropRight 6).replace('/', '.') + else path } } -object Javap { - val Env = "sun.tools.javap.JavapEnvironment" - val Printer = "sun.tools.javap.JavapPrinter" +abstract class JavapTool { + type ByteAry = Array[Byte] + type Input = Pair[String, Try[ByteAry]] + def apply(options: Seq[String])(inputs: Seq[Input]): List[JpResult] + // Since the tool is loaded by reflection, check for catastrophic failure. + protected def failed: Boolean + implicit protected class Failer[A](a: =>A) { + def orFailed[B >: A](b: =>B) = if (failed) b else a + } + protected def noToolError = new JpError(s"No javap tool available: ${getClass.getName} failed to initialize.") +} - def isAvailable(cl: ScalaClassLoader = ScalaClassLoader.appLoader) = - cl.tryToInitializeClass[AnyRef](Env).isDefined +class JavapTool6(loader: ScalaClassLoader, printWriter: PrintWriter) extends JavapTool { + import JavapTool._ + val EnvClass = loader.tryToInitializeClass[FakeEnvironment](Env).orNull + val PrinterClass = loader.tryToInitializeClass[FakePrinter](Printer).orNull + override protected def failed = (EnvClass eq null) || (PrinterClass eq null) - // "documentation" - type FakeEnvironment = AnyRef - type FakePrinter = AnyRef + val PrinterCtr = PrinterClass.getConstructor(classOf[InputStream], classOf[PrintWriter], EnvClass) orFailed null + def newPrinter(in: InputStream, env: FakeEnvironment): FakePrinter = + PrinterCtr.newInstance(in, printWriter, env) orFailed null + def showable(fp: FakePrinter) = new Showable { + def show() = fp.asInstanceOf[{ def print(): Unit }].print() + } - sealed trait JpResult { - type ResultType - def isError: Boolean - def value: ResultType - def show(): Unit + lazy val parser = new JpOptions + def newEnv(opts: Seq[String]): FakeEnvironment = { + def result = { + val env: FakeEnvironment = EnvClass.newInstance() + parser(opts) foreach { case (name, value) => + val field = EnvClass getDeclaredField name + field setAccessible true + field.set(env, value.asInstanceOf[AnyRef]) + } + env + } + result orFailed null } - class JpError(msg: String) extends JpResult { - type ResultType = String - def isError = true - def value = msg - def show() = println(msg) + + override def apply(options: Seq[String])(inputs: Seq[Input]): List[JpResult] = + (inputs map { + case (_, Success(ba)) => new JpSuccess(showable(newPrinter(new ByteArrayInputStream(ba), newEnv(options)))) + case (_, Failure(e)) => new JpError(e.toString) + }).toList orFailed List(noToolError) +} + +class JavapTool7(loader: ScalaClassLoader, printWriter: PrintWriter) extends JavapTool { + + import JavapTool._ + type Task = { + def call(): Boolean // true = ok + //def run(args: Array[String]): Int // all args + //def handleOptions(args: Array[String]): Unit // options, then run() or call() } - class JpSuccess(val value: AnyRef) extends JpResult { - type ResultType = AnyRef - def isError = false - def show() = value.asInstanceOf[{ def print(): Unit }].print() + // result of Task.run + //object TaskResult extends Enumeration { + // val Ok, Error, CmdErr, SysErr, Abnormal = Value + //} + val TaskClaas = loader.tryToInitializeClass[Task](JavapTool.Tool).orNull + override protected def failed = TaskClaas eq null + + val TaskCtor = TaskClaas.getConstructor( + classOf[Writer], + classOf[JavaFileManager], + classOf[DiagnosticListener[_]], + classOf[JIterable[String]], + classOf[JIterable[String]] + ) orFailed null + + val reporter = new DiagnosticCollector[JavaFileObject] + + // DisassemblerTool.getStandardFileManager(reporter,locale,charset) + val defaultFileManager: JavaFileManager = + (loader.tryToLoadClass[JavaFileManager]("com.sun.tools.javap.JavapFileManager").get getMethod ( + "create", + classOf[DiagnosticListener[_]], + classOf[PrintWriter] + ) invoke (null, reporter, new PrintWriter(System.err, true))).asInstanceOf[JavaFileManager] orFailed null + + // manages named arrays of bytes, which might have failed to load + class JavapFileManager(val managed: Seq[Input])(delegate: JavaFileManager = defaultFileManager) + extends ForwardingJavaFileManager[JavaFileManager](delegate) { + import JavaFileObject.Kind + import Kind._ + import StandardLocation._ + import JavaFileManager.Location + import java.net.URI + def uri(name: String): URI = new URI(name) // new URI("jfo:" + name) + + def inputNamed(name: String): Try[ByteAry] = (managed find (_._1 == name)).get._2 + def managedFile(name: String, kind: Kind) = kind match { + case CLASS => fileObjectForInput(name, inputNamed(name), kind) + case _ => null + } + // todo: just wrap it as scala abstractfile and adapt it uniformly + def fileObjectForInput(name: String, bytes: Try[ByteAry], kind: Kind): JavaFileObject = + new SimpleJavaFileObject(uri(name), kind) { + override def openInputStream(): InputStream = new ByteArrayInputStream(bytes.get) + // if non-null, ClassWriter wrongly requires scheme non-null + override def toUri: URI = null + override def getName: String = name + // suppress + override def getLastModified: Long = -1L + } + override def getJavaFileForInput(location: Location, className: String, kind: Kind): JavaFileObject = + location match { + case CLASS_PATH => managedFile(className, kind) + case _ => null + } + override def hasLocation(location: Location): Boolean = + location match { + case CLASS_PATH => true + case _ => false + } + } + val writer = new CharArrayWriter + def fileManager(inputs: Seq[Input]) = new JavapFileManager(inputs)() + def showable(): Showable = { + val written = { + writer.flush() + val w = writer.toString + writer.reset() + w + } + val msgs = { + import Properties.lineSeparator + val m = reporter.messages + if (m.nonEmpty) m mkString ("", lineSeparator, lineSeparator) + else "" + } + new Showable { + def show() = { + val mw = msgs + written + printWriter.write(mw, 0, mw.length) // ReplStrippingWriter clips on write(String) if truncating + printWriter.flush() + } + } + } + // eventually, use the tool interface + def task(options: Seq[String], claases: Seq[String], inputs: Seq[Input]): Task = { + //ServiceLoader.load(classOf[javax.tools.DisassemblerTool]). + //getTask(writer, fileManager, reporter, options.asJava, claases.asJava) + import JavaConverters.asJavaIterableConverter + TaskCtor.newInstance(writer, fileManager(inputs), reporter, options.asJava, claases.asJava) + .orFailed (throw new IllegalStateException) } + // a result per input + override def apply(options: Seq[String])(inputs: Seq[Input]): List[JpResult] = (inputs map { + case (claas, Success(ba)) => + if (task(options, Seq(claas), inputs).call()) new JpSuccess(showable()) + else new JpError(reporter.messages mkString ",") + case (_, Failure(e)) => new JpError(e.toString) + }).toList orFailed List(noToolError) +} + +object JavapTool { + // >= 1.7 + val Tool = "com.sun.tools.javap.JavapTask" + // < 1.7 + val Env = "sun.tools.javap.JavapEnvironment" + val Printer = "sun.tools.javap.JavapPrinter" + // "documentation" + type FakeEnvironment = AnyRef + type FakePrinter = AnyRef + + // support JavapEnvironment class JpOptions { private object Access { final val PRIVATE = 0 @@ -162,4 +289,69 @@ object Javap { } } } + + val DefaultOptions = List("-protected", "-verbose") + + def isAvailable(cl: ScalaClassLoader = ScalaClassLoader.appLoader) = Seq(Env, Tool) exists (cn => hasClass(cl, cn)) + + private def hasClass(cl: ScalaClassLoader, cn: String) = cl.tryToInitializeClass[AnyRef](cn).isDefined + + private def isTaskable(cl: ScalaClassLoader) = hasClass(cl, Tool) + + def apply(cl: ScalaClassLoader, pw: PrintWriter) = + if (isTaskable(cl)) new JavapTool7(cl, pw) else new JavapTool6(cl, pw) + + /** A richer [[javax.tools.DiagnosticCollector]]. */ + implicit class JavaReporter(val c: DiagnosticCollector[JavaFileObject]) extends AnyVal { + import scala.collection.JavaConverters.iterableAsScalaIterableConverter + /** All diagnostics in the collector. */ + def diagnostics: Iterable[Diagnostic[_ <: JavaFileObject]] = c.getDiagnostics.asScala + /** All diagnostic messages. + * @param locale Locale for diagnostic messages, null by default. + */ + def messages(implicit locale: Locale = null) = (diagnostics map (_ getMessage locale)).toList + /* + import Diagnostic.Kind.ERROR + private def isErr(d: Diagnostic[_]) = d.getKind == ERROR + /** Count the errors. */ + def errorCount: Int = diagnostics count isErr + /** Error diagnostics in the collector. */ + def errors = (diagnostics filter isErr).toList + */ + } +} + +object Javap { + + def isAvailable(cl: ScalaClassLoader = ScalaClassLoader.appLoader) = JavapTool.isAvailable(cl) + + def apply(path: String): Unit = apply(Seq(path)) + def apply(args: Seq[String]): Unit = new JavapClass() apply args foreach (_.show()) + + trait Showable { + def show(): Unit + } + + sealed trait JpResult { + type ResultType + def isError: Boolean + def value: ResultType + def show(): Unit + // todo + // def header(): String + // def fields(): List[String] + // def methods(): List[String] + // def signatures(): List[String] + } + class JpError(msg: String) extends JpResult { + type ResultType = String + def isError = true + def value = msg + def show() = println(msg) // makes sense for :javap, less for -Ygen-javap + } + class JpSuccess(val value: Showable) extends JpResult { + type ResultType = AnyRef + def isError = false + def show() = value.show() // output to tool's PrintWriter + } } -- cgit v1.2.3 From 47245f547f55df1feff9add1e8cd73edd8d0b154 Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Fri, 30 Nov 2012 16:23:11 +0100 Subject: Remove Name -> TermName implicit. And simplify the name implicits. --- .../scala/reflect/reify/codegen/GenSymbols.scala | 2 +- .../scala/reflect/reify/phases/Reshape.scala | 4 ++-- src/compiler/scala/tools/nsc/ast/TreeDSL.scala | 2 +- src/compiler/scala/tools/nsc/ast/TreeGen.scala | 4 ++-- .../scala/tools/nsc/ast/parser/Parsers.scala | 18 +++++++++--------- .../tools/nsc/interpreter/MemberHandlers.scala | 2 +- .../scala/tools/nsc/javac/JavaParsers.scala | 12 ++++++------ .../nsc/symtab/classfile/ClassfileParser.scala | 12 ++++++------ .../tools/nsc/symtab/classfile/ICodeReader.scala | 4 ++-- .../scala/tools/nsc/symtab/clr/TypeParser.scala | 3 +-- .../scala/tools/nsc/transform/Constructors.scala | 8 ++++---- .../tools/nsc/transform/ExtensionMethods.scala | 2 +- .../scala/tools/nsc/transform/LambdaLift.scala | 6 +++--- .../scala/tools/nsc/transform/LazyVals.scala | 18 +++++++++--------- src/compiler/scala/tools/nsc/transform/Mixin.scala | 10 +++++----- .../scala/tools/nsc/transform/SpecializeTypes.scala | 10 +++++----- .../scala/tools/nsc/transform/UnCurry.scala | 2 +- .../scala/tools/nsc/typechecker/Macros.scala | 2 +- .../tools/nsc/typechecker/MethodSynthesis.scala | 8 +++++--- .../scala/tools/nsc/typechecker/Namers.scala | 6 +++--- .../tools/nsc/typechecker/TypeDiagnostics.scala | 2 +- .../scala/tools/reflect/ToolBoxFactory.scala | 4 ++-- .../tools/selectivecps/SelectiveCPSTransform.scala | 2 +- src/reflect/scala/reflect/internal/Names.scala | 21 +++++---------------- src/reflect/scala/reflect/internal/Symbols.scala | 2 +- 25 files changed, 78 insertions(+), 88 deletions(-) (limited to 'src') diff --git a/src/compiler/scala/reflect/reify/codegen/GenSymbols.scala b/src/compiler/scala/reflect/reify/codegen/GenSymbols.scala index 39103b801e..a8a066cd8a 100644 --- a/src/compiler/scala/reflect/reify/codegen/GenSymbols.scala +++ b/src/compiler/scala/reflect/reify/codegen/GenSymbols.scala @@ -143,7 +143,7 @@ trait GenSymbols { val reification = reificode(sym) import reification.{name, binding} val tree = reification.tree updateAttachment ReifyBindingAttachment(binding) - state.symtab += (sym, name, tree) + state.symtab += (sym, name.toTermName, tree) } fromSymtab } diff --git a/src/compiler/scala/reflect/reify/phases/Reshape.scala b/src/compiler/scala/reflect/reify/phases/Reshape.scala index f31c3d4755..75384ddce1 100644 --- a/src/compiler/scala/reflect/reify/phases/Reshape.scala +++ b/src/compiler/scala/reflect/reify/phases/Reshape.scala @@ -254,7 +254,7 @@ trait Reshape { val flags1 = (flags0 & GetterFlags) & ~(STABLE | ACCESSOR | METHOD) val mods1 = Modifiers(flags1, privateWithin0, annotations0) setPositions mods0.positions val mods2 = toPreTyperModifiers(mods1, ddef.symbol) - ValDef(mods2, name1, tpt0, extractRhs(rhs0)) + ValDef(mods2, name1.toTermName, tpt0, extractRhs(rhs0)) } private def trimAccessors(deff: Tree, stats: List[Tree]): List[Tree] = { @@ -293,7 +293,7 @@ trait Reshape { } val mods2 = toPreTyperModifiers(mods1, vdef.symbol) val name1 = nme.dropLocalSuffix(name) - val vdef1 = ValDef(mods2, name1, tpt, rhs) + val vdef1 = ValDef(mods2, name1.toTermName, tpt, rhs) if (reifyDebug) println("resetting visibility of field: %s => %s".format(vdef, vdef1)) Some(vdef1) // no copyAttrs here, because new ValDef and old symbols are now out of sync case ddef: DefDef if !ddef.mods.isLazy => diff --git a/src/compiler/scala/tools/nsc/ast/TreeDSL.scala b/src/compiler/scala/tools/nsc/ast/TreeDSL.scala index e8bc932bf5..3129748e9f 100644 --- a/src/compiler/scala/tools/nsc/ast/TreeDSL.scala +++ b/src/compiler/scala/tools/nsc/ast/TreeDSL.scala @@ -181,7 +181,7 @@ trait TreeDSL { self: VODDStart => type ResultTreeType = ValDef - def mkTree(rhs: Tree): ValDef = ValDef(mods, name, tpt, rhs) + def mkTree(rhs: Tree): ValDef = ValDef(mods, name.toTermName, tpt, rhs) } trait DefCreator { self: VODDStart => diff --git a/src/compiler/scala/tools/nsc/ast/TreeGen.scala b/src/compiler/scala/tools/nsc/ast/TreeGen.scala index 1adcf46958..9e98e9b0e8 100644 --- a/src/compiler/scala/tools/nsc/ast/TreeGen.scala +++ b/src/compiler/scala/tools/nsc/ast/TreeGen.scala @@ -74,7 +74,7 @@ abstract class TreeGen extends scala.reflect.internal.TreeGen with TreeDSL { val extraFlags = if (inClass) PrivateLocal | SYNTHETIC else 0 val mval = ( - accessor.owner.newVariable(nme.moduleVarName(accessor.name), accessor.pos.focus, MODULEVAR | extraFlags) + accessor.owner.newVariable(nme.moduleVarName(accessor.name.toTermName), accessor.pos.focus, MODULEVAR | extraFlags) setInfo accessor.tpe.finalResultType addAnnotation VolatileAttr ) @@ -210,7 +210,7 @@ abstract class TreeGen extends scala.reflect.internal.TreeGen with TreeDSL { */ private def mkPackedValDef(expr: Tree, owner: Symbol, name: Name): (ValDef, () => Ident) = { val packedType = typer.packedType(expr, owner) - val sym = owner.newValue(name, expr.pos.makeTransparent, SYNTHETIC) setInfo packedType + val sym = owner.newValue(name.toTermName, expr.pos.makeTransparent, SYNTHETIC) setInfo packedType (ValDef(sym, expr), () => Ident(sym) setPos sym.pos.focus setType expr.tpe) } diff --git a/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala b/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala index f430f1fc34..8cb5ccf19b 100644 --- a/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala +++ b/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala @@ -701,10 +701,10 @@ self => tree match { case Ident(name) => removeAsPlaceholder(name) - makeParam(name, TypeTree() setPos o2p(tree.pos.endOrPoint)) + makeParam(name.toTermName, TypeTree() setPos o2p(tree.pos.endOrPoint)) case Typed(Ident(name), tpe) if tpe.isType => // get the ident! removeAsPlaceholder(name) - makeParam(name, tpe) + makeParam(name.toTermName, tpe) case _ => syntaxError(tree.pos, "not a legal formal parameter", false) makeParam(nme.ERROR, errorTypeTree setPos o2p(tree.pos.endOrPoint)) @@ -783,7 +783,7 @@ self => val rPos = top.pos val end = if (rPos.isDefined) rPos.endOrPoint else opPos.endOrPoint top = atPos(start, opinfo.offset, end) { - makeBinop(isExpr, opinfo.operand, opinfo.operator, top, opPos) + makeBinop(isExpr, opinfo.operand, opinfo.operator.toTermName, top, opPos) } } top @@ -1306,7 +1306,7 @@ self => val cond = condExpr() newLinesOpt() val body = expr() - makeWhile(lname, cond, body) + makeWhile(lname.toTermName, cond, body) } } parseWhile @@ -1318,7 +1318,7 @@ self => if (isStatSep) in.nextToken() accept(WHILE) val cond = condExpr() - makeDoWhile(lname, body, cond) + makeDoWhile(lname.toTermName, body, cond) } } parseDo @@ -1477,7 +1477,7 @@ self => def prefixExpr(): Tree = { if (isUnaryOp) { atPos(in.offset) { - val name = nme.toUnaryName(rawIdent()) + val name = nme.toUnaryName(rawIdent().toTermName) if (name == nme.UNARY_- && isNumericLit) simpleExprRest(atPos(in.offset)(literal(isNegated = true)), canApply = true) else @@ -1515,7 +1515,7 @@ self => val pname = freshName("x$") in.nextToken() val id = atPos(start) (Ident(pname)) - val param = atPos(id.pos.focus){ makeSyntheticParam(pname) } + val param = atPos(id.pos.focus){ makeSyntheticParam(pname.toTermName) } placeholderParams = param :: placeholderParams id case LPAREN => @@ -2136,7 +2136,7 @@ self => expr() } else EmptyTree atPos(start, if (name == nme.ERROR) start else nameOffset) { - ValDef((mods | implicitmod | bynamemod) withAnnotations annots, name, tpt, default) + ValDef((mods | implicitmod | bynamemod) withAnnotations annots, name.toTermName, tpt, default) } } def paramClause(): List[ValDef] = { @@ -2696,7 +2696,7 @@ self => atPos(start, if (name == nme.ERROR) start else nameOffset) { val mods1 = if (in.token == SUBTYPE) mods | Flags.DEFERRED else mods val template = templateOpt(mods1, name, NoMods, Nil, tstart) - ModuleDef(mods1, name, template) + ModuleDef(mods1, name.toTermName, template) } } diff --git a/src/compiler/scala/tools/nsc/interpreter/MemberHandlers.scala b/src/compiler/scala/tools/nsc/interpreter/MemberHandlers.scala index 381dfeb261..a48c2a4b67 100644 --- a/src/compiler/scala/tools/nsc/interpreter/MemberHandlers.scala +++ b/src/compiler/scala/tools/nsc/interpreter/MemberHandlers.scala @@ -149,7 +149,7 @@ trait MemberHandlers { } class ModuleHandler(module: ModuleDef) extends MemberDefHandler(module) { - override def definesTerm = Some(name) + override def definesTerm = Some(name.toTermName) override def definesValue = true override def resultExtractionCode(req: Request) = codegenln("defined module ", name) diff --git a/src/compiler/scala/tools/nsc/javac/JavaParsers.scala b/src/compiler/scala/tools/nsc/javac/JavaParsers.scala index 73cbeaa6c4..f4c8cf991d 100644 --- a/src/compiler/scala/tools/nsc/javac/JavaParsers.scala +++ b/src/compiler/scala/tools/nsc/javac/JavaParsers.scala @@ -125,7 +125,7 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners { def makeSyntheticParam(count: Int, tpt: Tree): ValDef = makeParam(nme.syntheticParamName(count), tpt) def makeParam(name: String, tpt: Tree): ValDef = - makeParam(newTypeName(name), tpt) + makeParam(name: TermName, tpt) def makeParam(name: TermName, tpt: Tree): ValDef = ValDef(Modifiers(Flags.JAVA | Flags.PARAM), name, tpt, EmptyTree) @@ -448,7 +448,7 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners { AppliedTypeTree(scalaDot(tpnme.JAVA_REPEATED_PARAM_CLASS_NAME), List(t)) } } - varDecl(in.currentPos, Modifiers(Flags.JAVA | Flags.PARAM), t, ident()) + varDecl(in.currentPos, Modifiers(Flags.JAVA | Flags.PARAM), t, ident().toTermName) } def optThrows() { @@ -542,7 +542,7 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners { * these potential definitions are real or not. */ def fieldDecls(pos: Position, mods: Modifiers, tpt: Tree, name: Name): List[Tree] = { - val buf = ListBuffer[Tree](varDecl(pos, mods, tpt, name)) + val buf = ListBuffer[Tree](varDecl(pos, mods, tpt, name.toTermName)) val maybe = new ListBuffer[Tree] // potential variable definitions. while (in.token == COMMA) { in.nextToken @@ -550,10 +550,10 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners { val name = ident() if (in.token == ASSIGN || in.token == SEMI) { // ... followed by a `=` or `;`, we know it's a real variable definition buf ++= maybe - buf += varDecl(in.currentPos, mods, tpt.duplicate, name) + buf += varDecl(in.currentPos, mods, tpt.duplicate, name.toTermName) maybe.clear() } else if (in.token == COMMA) { // ... if there's a comma after the ident, it could be a real vardef or not. - maybe += varDecl(in.currentPos, mods, tpt.duplicate, name) + maybe += varDecl(in.currentPos, mods, tpt.duplicate, name.toTermName) } else { // ... if there's something else we were still in the initializer of the // previous var def; skip to next comma or semicolon. skipTo(COMMA, SEMI) @@ -830,7 +830,7 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners { // The STABLE flag is to signal to namer that this was read from a // java enum, and so should be given a Constant type (thereby making // it usable in annotations.) - ValDef(Modifiers(Flags.STABLE | Flags.JAVA | Flags.STATIC), name, enumType, blankExpr) + ValDef(Modifiers(Flags.STABLE | Flags.JAVA | Flags.STATIC), name.toTermName, enumType, blankExpr) } } diff --git a/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala b/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala index 6d213af2b6..cb58111b51 100644 --- a/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala +++ b/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala @@ -252,8 +252,8 @@ abstract class ClassfileParser { } else { log("Couldn't find " + name + ": " + tpe + " inside: \n" + ownerTpe) f = tpe match { - case MethodType(_, _) => owner.newMethod(name, owner.pos) - case _ => owner.newVariable(name, owner.pos) + case MethodType(_, _) => owner.newMethod(name.toTermName, owner.pos) + case _ => owner.newVariable(name.toTermName, owner.pos) } f setInfo tpe log("created fake member " + f.fullName) @@ -282,7 +282,7 @@ abstract class ClassfileParser { if (in.buf(start).toInt != CONSTANT_NAMEANDTYPE) errorBadTag(start) val name = getName(in.getChar(start + 1).toInt) // create a dummy symbol for method types - val dummySym = ownerTpe.typeSymbol.newMethod(name, ownerTpe.typeSymbol.pos) + val dummySym = ownerTpe.typeSymbol.newMethod(name.toTermName, ownerTpe.typeSymbol.pos) var tpe = getType(dummySym, in.getChar(start + 3).toInt) // fix the return type, which is blindly set to the class currently parsed @@ -457,7 +457,7 @@ abstract class ClassfileParser { ss = name.subName(start, end) sym = owner.info.decls lookup ss if (sym == NoSymbol) { - sym = owner.newPackage(ss) setInfo completer + sym = owner.newPackage(ss.toTermName) setInfo completer sym.moduleClass setInfo completer owner.info.decls enter sym } @@ -604,7 +604,7 @@ abstract class ClassfileParser { } else { val name = pool.getName(in.nextChar) val info = pool.getType(in.nextChar) - val sym = getOwner(jflags).newValue(name, NoPosition, sflags) + val sym = getOwner(jflags).newValue(name.toTermName, NoPosition, sflags) val isEnum = (jflags & JAVA_ACC_ENUM) != 0 sym setInfo { @@ -639,7 +639,7 @@ abstract class ClassfileParser { in.skip(4); skipAttributes() } else { val name = pool.getName(in.nextChar) - val sym = getOwner(jflags).newMethod(name, NoPosition, sflags) + val sym = getOwner(jflags).newMethod(name.toTermName, NoPosition, sflags) var info = pool.getType(sym, (in.nextChar)) if (name == nme.CONSTRUCTOR) info match { diff --git a/src/compiler/scala/tools/nsc/symtab/classfile/ICodeReader.scala b/src/compiler/scala/tools/nsc/symtab/classfile/ICodeReader.scala index b5459ec773..79b08bcabf 100644 --- a/src/compiler/scala/tools/nsc/symtab/classfile/ICodeReader.scala +++ b/src/compiler/scala/tools/nsc/symtab/classfile/ICodeReader.scala @@ -80,7 +80,7 @@ abstract class ICodeReader extends ClassfileParser { val jflags = in.nextChar val name = pool getName in.nextChar val owner = getOwner(jflags) - val dummySym = owner.newMethod(name, owner.pos, toScalaMethodFlags(jflags)) + val dummySym = owner.newMethod(name.toTermName, owner.pos, toScalaMethodFlags(jflags)) try { val ch = in.nextChar @@ -94,7 +94,7 @@ abstract class ICodeReader extends ClassfileParser { if (sym == NoSymbol) sym = owner.info.findMember(newTermName(name + nme.LOCAL_SUFFIX_STRING), 0, 0, false).suchThat(_.tpe =:= tpe) if (sym == NoSymbol) { - sym = if (field) owner.newValue(name, owner.pos, toScalaFieldFlags(jflags)) else dummySym + sym = if (field) owner.newValue(name.toTermName, owner.pos, toScalaFieldFlags(jflags)) else dummySym sym setInfoAndEnter tpe log(s"ICodeReader could not locate ${name.decode} in $owner. Created ${sym.defString}.") } diff --git a/src/compiler/scala/tools/nsc/symtab/clr/TypeParser.scala b/src/compiler/scala/tools/nsc/symtab/clr/TypeParser.scala index f0e49ce500..8992ad1f8b 100644 --- a/src/compiler/scala/tools/nsc/symtab/clr/TypeParser.scala +++ b/src/compiler/scala/tools/nsc/symtab/clr/TypeParser.scala @@ -608,8 +608,7 @@ abstract class TypeParser { if(method.IsSpecialName) { val paramsArity = method.GetParameters().size // handle operator overload, otherwise handle as any static method - val operName = operatorOverload(name, paramsArity) - if (operName.isDefined) { return operName.get; } + operatorOverload(name, paramsArity) foreach (x => return x.toTermName) } return newTermName(name); } diff --git a/src/compiler/scala/tools/nsc/transform/Constructors.scala b/src/compiler/scala/tools/nsc/transform/Constructors.scala index 323c7c7261..534a140684 100644 --- a/src/compiler/scala/tools/nsc/transform/Constructors.scala +++ b/src/compiler/scala/tools/nsc/transform/Constructors.scala @@ -60,7 +60,7 @@ abstract class Constructors extends Transform with ast.TreeDSL { // The constructor parameter corresponding to an accessor def parameter(acc: Symbol): Symbol = - parameterNamed(nme.getterName(acc.originalName)) + parameterNamed(nme.getterName(acc.originalName.toTermName)) // The constructor parameter with given name. This means the parameter // has given name, or starts with given name, and continues with a `$` afterwards. @@ -281,7 +281,7 @@ abstract class Constructors extends Transform with ast.TreeDSL { specializedStats find { case Assign(sel @ Select(This(_), _), rhs) => ( (sel.symbol hasFlag SPECIALIZED) - && (nme.unspecializedName(nme.localToGetter(sel.symbol.name)) == nme.localToGetter(sym.name)) + && (nme.unspecializedName(nme.localToGetter(sel.symbol.name.toTermName)) == nme.localToGetter(sym.name.toTermName)) ) case _ => false } @@ -399,7 +399,7 @@ abstract class Constructors extends Transform with ast.TreeDSL { def addGetter(sym: Symbol): Symbol = { val getr = addAccessor( - sym, nme.getterName(sym.name), getterFlags(sym.flags)) + sym, nme.getterName(sym.name.toTermName), getterFlags(sym.flags)) getr setInfo MethodType(List(), sym.tpe) defBuf += localTyper.typedPos(sym.pos)(DefDef(getr, Select(This(clazz), sym))) getr @@ -408,7 +408,7 @@ abstract class Constructors extends Transform with ast.TreeDSL { def addSetter(sym: Symbol): Symbol = { sym setFlag MUTABLE val setr = addAccessor( - sym, nme.getterToSetter(nme.getterName(sym.name)), setterFlags(sym.flags)) + sym, nme.getterToSetter(nme.getterName(sym.name.toTermName)), setterFlags(sym.flags)) setr setInfo MethodType(setr.newSyntheticValueParams(List(sym.tpe)), UnitClass.tpe) defBuf += localTyper.typed { //util.trace("adding setter def for "+setr) { diff --git a/src/compiler/scala/tools/nsc/transform/ExtensionMethods.scala b/src/compiler/scala/tools/nsc/transform/ExtensionMethods.scala index 521d732664..717c4b627b 100644 --- a/src/compiler/scala/tools/nsc/transform/ExtensionMethods.scala +++ b/src/compiler/scala/tools/nsc/transform/ExtensionMethods.scala @@ -184,7 +184,7 @@ abstract class ExtensionMethods extends Transform with TypingTransformers { def makeExtensionMethodSymbol = { val extensionName = extensionNames(origMeth).head val extensionMeth = ( - companion.moduleClass.newMethod(extensionName, origMeth.pos, origMeth.flags & ~OVERRIDE & ~PROTECTED | FINAL) + companion.moduleClass.newMethod(extensionName.toTermName, origMeth.pos, origMeth.flags & ~OVERRIDE & ~PROTECTED | FINAL) setAnnotations origMeth.annotations ) companion.info.decls.enter(extensionMeth) diff --git a/src/compiler/scala/tools/nsc/transform/LambdaLift.scala b/src/compiler/scala/tools/nsc/transform/LambdaLift.scala index 4a23e65ad2..0198f959e3 100644 --- a/src/compiler/scala/tools/nsc/transform/LambdaLift.scala +++ b/src/compiler/scala/tools/nsc/transform/LambdaLift.scala @@ -247,8 +247,8 @@ abstract class LambdaLift extends InfoTransform { // SI-5652 If the lifted symbol is accessed from an inner class, it will be made public. (where?) // Generating a a unique name, mangled with the enclosing class name, avoids a VerifyError // in the case that a sub-class happens to lifts out a method with the *same* name. - val name = freshen(sym.name + nme.NAME_JOIN_STRING) - if (originalName.isTermName && !sym.enclClass.isImplClass && calledFromInner(sym)) nme.expandedName(name, sym.enclClass) + val name = freshen("" + sym.name + nme.NAME_JOIN_STRING) + if (originalName.isTermName && !sym.enclClass.isImplClass && calledFromInner(sym)) nme.expandedName(name.toTermName, sym.enclClass) else name } } @@ -290,7 +290,7 @@ abstract class LambdaLift extends InfoTransform { proxies(owner) = for (fv <- freeValues.toList) yield { val proxyName = proxyNames.getOrElse(fv, fv.name) - val proxy = owner.newValue(proxyName, owner.pos, newFlags) setInfo fv.info + val proxy = owner.newValue(proxyName.toTermName, owner.pos, newFlags) setInfo fv.info if (owner.isClass) owner.info.decls enter proxy proxy } diff --git a/src/compiler/scala/tools/nsc/transform/LazyVals.scala b/src/compiler/scala/tools/nsc/transform/LazyVals.scala index 481228fb3d..e6c9afb042 100644 --- a/src/compiler/scala/tools/nsc/transform/LazyVals.scala +++ b/src/compiler/scala/tools/nsc/transform/LazyVals.scala @@ -68,7 +68,7 @@ abstract class LazyVals extends Transform with TypingTransformers with ast.TreeD curTree = tree tree match { - + case Block(_, _) => val block1 = super.transform(tree) val Block(stats, expr) = block1 @@ -79,7 +79,7 @@ abstract class LazyVals extends Transform with TypingTransformers with ast.TreeD List(stat) }) treeCopy.Block(block1, stats1, expr) - + case DefDef(_, _, _, _, _, rhs) => atOwner(tree.symbol) { val (res, slowPathDef) = if (!sym.owner.isClass && sym.isLazy) { val enclosingClassOrDummyOrMethod = { @@ -100,9 +100,9 @@ abstract class LazyVals extends Transform with TypingTransformers with ast.TreeD val (rhs1, sDef) = mkLazyDef(enclosingClassOrDummyOrMethod, transform(rhs), idx, sym) sym.resetFlag((if (lazyUnit(sym)) 0 else LAZY) | ACCESSOR) (rhs1, sDef) - } else + } else (transform(rhs), EmptyTree) - + val ddef1 = deriveDefDef(tree)(_ => if (LocalLazyValFinder.find(res)) typed(addBitmapDefs(sym, res)) else res) if (slowPathDef != EmptyTree) Block(slowPathDef, ddef1) else ddef1 } @@ -189,10 +189,10 @@ abstract class LazyVals extends Transform with TypingTransformers with ast.TreeD case _ => prependStats(bmps, rhs) } } - + def mkSlowPathDef(clazz: Symbol, lzyVal: Symbol, cond: Tree, syncBody: List[Tree], stats: List[Tree], retVal: Tree): Tree = { - val defSym = clazz.newMethod(nme.newLazyValSlowComputeName(lzyVal.name), lzyVal.pos, STABLE | PRIVATE) + val defSym = clazz.newMethod(nme.newLazyValSlowComputeName(lzyVal.name.toTermName), lzyVal.pos, STABLE | PRIVATE) defSym setInfo MethodType(List(), lzyVal.tpe.resultType) defSym.owner = lzyVal.owner debuglog(s"crete slow compute path $defSym with owner ${defSym.owner} for lazy val $lzyVal") @@ -201,8 +201,8 @@ abstract class LazyVals extends Transform with TypingTransformers with ast.TreeD val rhs: Tree = (gen.mkSynchronizedCheck(clazz, cond, syncBody, stats)).changeOwner(currentOwner -> defSym) DEF(defSym).mkTree(addBitmapDefs(lzyVal, BLOCK(rhs, retVal))) setSymbol defSym } - - + + def mkFastPathBody(clazz: Symbol, lzyVal: Symbol, cond: Tree, syncBody: List[Tree], stats: List[Tree], retVal: Tree): (Tree, Tree) = { val slowPathDef: Tree = mkSlowPathDef(clazz, lzyVal, cond, syncBody, stats, retVal) @@ -221,7 +221,7 @@ abstract class LazyVals extends Transform with TypingTransformers with ast.TreeD * Similarly as for normal lazy val members (see Mixin), the result will be a tree of the form * { if ((bitmap&n & MASK) == 0) this.l$compute() * else l$ - * + * * def l$compute() = { synchronized(enclosing_class_or_dummy) { * if ((bitmap$n & MASK) == 0) { * l$ = diff --git a/src/compiler/scala/tools/nsc/transform/Mixin.scala b/src/compiler/scala/tools/nsc/transform/Mixin.scala index ac1cdd1f46..f78a5133a6 100644 --- a/src/compiler/scala/tools/nsc/transform/Mixin.scala +++ b/src/compiler/scala/tools/nsc/transform/Mixin.scala @@ -207,14 +207,14 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL { // println("creating new getter for "+ field +" : "+ field.info +" at "+ field.locationString+(field hasFlag MUTABLE)) val newFlags = field.flags & ~PrivateLocal | ACCESSOR | lateDEFERRED | ( if (field.isMutable) 0 else STABLE ) // TODO preserve pre-erasure info? - clazz.newMethod(nme.getterName(field.name), field.pos, newFlags) setInfo MethodType(Nil, field.info) + clazz.newMethod(nme.getterName(field.name.toTermName), field.pos, newFlags) setInfo MethodType(Nil, field.info) } /** Create a new setter. Setters are never private or local. They are * always accessors and deferred. */ def newSetter(field: Symbol): Symbol = { //println("creating new setter for "+field+field.locationString+(field hasFlag MUTABLE)) - val setterName = nme.getterToSetter(nme.getterName(field.name)) + val setterName = nme.getterToSetter(nme.getterName(field.name.toTermName)) val newFlags = field.flags & ~PrivateLocal | ACCESSOR | lateDEFERRED val setter = clazz.newMethod(setterName, field.pos, newFlags) // TODO preserve pre-erasure info? @@ -315,7 +315,7 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL { // carries over the current entry in the type history) val sym = enteringErasure { // so we have a type history entry before erasure - clazz.newValue(nme.getterToLocal(mixinMember.name), mixinMember.pos).setInfo(mixinMember.tpe.resultType) + clazz.newValue(nme.getterToLocal(mixinMember.name.toTermName), mixinMember.pos).setInfo(mixinMember.tpe.resultType) } sym updateInfo mixinMember.tpe.resultType // info at current phase @@ -711,7 +711,7 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL { */ def bitmapFor(clazz0: Symbol, offset: Int, field: Symbol): Symbol = { val category = bitmapCategory(field) - val bitmapName = nme.newBitmapName(category, offset / flagsPerBitmap(field)) + val bitmapName = nme.newBitmapName(category, offset / flagsPerBitmap(field)).toTermName val sym = clazz0.info.decl(bitmapName) assert(!sym.isOverloaded, sym) @@ -775,7 +775,7 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL { def mkSlowPathDef(clazz: Symbol, lzyVal: Symbol, cond: Tree, syncBody: List[Tree], stats: List[Tree], retVal: Tree, attrThis: Tree, args: List[Tree]): Symbol = { - val defSym = clazz.newMethod(nme.newLazyValSlowComputeName(lzyVal.name), lzyVal.pos, PRIVATE) + val defSym = clazz.newMethod(nme.newLazyValSlowComputeName(lzyVal.name.toTermName), lzyVal.pos, PRIVATE) val params = defSym newSyntheticValueParams args.map(_.symbol.tpe) defSym setInfoAndEnter MethodType(params, lzyVal.tpe.resultType) val rhs: Tree = (gen.mkSynchronizedCheck(attrThis, cond, syncBody, stats)).changeOwner(currentOwner -> defSym) diff --git a/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala b/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala index 2574644727..4e4c1b98ac 100644 --- a/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala +++ b/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala @@ -323,11 +323,11 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers { */ private def specializedName(name: Name, types1: List[Type], types2: List[Type]): TermName = { if (nme.INITIALIZER == name || (types1.isEmpty && types2.isEmpty)) - name + name.toTermName else if (nme.isSetterName(name)) - nme.getterToSetter(specializedName(nme.setterToGetter(name), types1, types2)) + nme.getterToSetter(specializedName(nme.setterToGetter(name.toTermName), types1, types2)) else if (nme.isLocalName(name)) - nme.getterToLocal(specializedName(nme.localToGetter(name), types1, types2)) + nme.getterToLocal(specializedName(nme.localToGetter(name.toTermName), types1, types2)) else { val (base, cs, ms) = nme.splitSpecializedName(name) newTermName(base.toString + "$" @@ -689,7 +689,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers { def mkAccessor(field: Symbol, name: Name) = { val newFlags = (SPECIALIZED | m.getter(clazz).flags) & ~(LOCAL | CASEACCESSOR | PARAMACCESSOR) // we rely on the super class to initialize param accessors - val sym = sClass.newMethod(name, field.pos, newFlags) + val sym = sClass.newMethod(name.toTermName, field.pos, newFlags) info(sym) = SpecializedAccessor(field) sym } @@ -708,7 +708,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers { // debuglog("m: " + m + " isLocal: " + nme.isLocalName(m.name) + " specVal: " + specVal.name + " isLocal: " + nme.isLocalName(specVal.name)) if (nme.isLocalName(m.name)) { - val specGetter = mkAccessor(specVal, nme.localToGetter(specVal.name)) setInfo MethodType(Nil, specVal.info) + val specGetter = mkAccessor(specVal, nme.localToGetter(specVal.name.toTermName)) setInfo MethodType(Nil, specVal.info) val origGetter = overrideIn(sClass, m.getter(clazz)) info(origGetter) = Forward(specGetter) enterMember(specGetter) diff --git a/src/compiler/scala/tools/nsc/transform/UnCurry.scala b/src/compiler/scala/tools/nsc/transform/UnCurry.scala index 4f889a1d86..ebe30b5aac 100644 --- a/src/compiler/scala/tools/nsc/transform/UnCurry.scala +++ b/src/compiler/scala/tools/nsc/transform/UnCurry.scala @@ -835,7 +835,7 @@ abstract class UnCurry extends InfoTransform } // create the symbol - val forwsym = currentClass.newMethod(dd.name, dd.pos, VARARGS | SYNTHETIC | flatdd.symbol.flags) setInfo forwtype + val forwsym = currentClass.newMethod(dd.name.toTermName, dd.pos, VARARGS | SYNTHETIC | flatdd.symbol.flags) setInfo forwtype // create the tree val forwtree = theTyper.typedPos(dd.pos) { diff --git a/src/compiler/scala/tools/nsc/typechecker/Macros.scala b/src/compiler/scala/tools/nsc/typechecker/Macros.scala index 4b534b0d2e..2e265172c4 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Macros.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Macros.scala @@ -324,7 +324,7 @@ trait Macros extends scala.tools.reflect.FastTrack with Traces { def sigma(tpe: Type): Type = SigmaTypeMap(tpe) def makeParam(name: Name, pos: Position, tpe: Type, flags: Long) = - macroDef.newValueParameter(name, pos, flags) setInfo tpe + macroDef.newValueParameter(name.toTermName, pos, flags) setInfo tpe def implType(isType: Boolean, origTpe: Type): Type = { def tsym = if (isType) WeakTagClass else ExprClass def targ = origTpe.typeArgs.headOption getOrElse NoType diff --git a/src/compiler/scala/tools/nsc/typechecker/MethodSynthesis.scala b/src/compiler/scala/tools/nsc/typechecker/MethodSynthesis.scala index 3ac5d388d3..ee128c0e57 100644 --- a/src/compiler/scala/tools/nsc/typechecker/MethodSynthesis.scala +++ b/src/compiler/scala/tools/nsc/typechecker/MethodSynthesis.scala @@ -60,7 +60,7 @@ trait MethodSynthesis { overrideFlag | SYNTHETIC } def newMethodFlags(method: Symbol) = { - val overrideFlag = if (isOverride(method.name)) OVERRIDE else 0L + val overrideFlag = if (isOverride(method.name.toTermName)) OVERRIDE else 0L (method.flags | overrideFlag | SYNTHETIC) & ~DEFERRED } @@ -68,11 +68,13 @@ trait MethodSynthesis { localTyper typed ValOrDefDef(method, f(method)) private def createInternal(name: Name, f: Symbol => Tree, info: Type): Tree = { - val m = clazz.newMethod(name.toTermName, clazz.pos.focus, newMethodFlags(name)) + val name1 = name.toTermName + val m = clazz.newMethod(name1, clazz.pos.focus, newMethodFlags(name1)) finishMethod(m setInfoAndEnter info, f) } private def createInternal(name: Name, f: Symbol => Tree, infoFn: Symbol => Type): Tree = { - val m = clazz.newMethod(name.toTermName, clazz.pos.focus, newMethodFlags(name)) + val name1 = name.toTermName + val m = clazz.newMethod(name1, clazz.pos.focus, newMethodFlags(name1)) finishMethod(m setInfoAndEnter infoFn(m), f) } private def cloneInternal(original: Symbol, f: Symbol => Tree, name: Name): Tree = { diff --git a/src/compiler/scala/tools/nsc/typechecker/Namers.scala b/src/compiler/scala/tools/nsc/typechecker/Namers.scala index 79fc0e0081..538e2d1d76 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Namers.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Namers.scala @@ -300,11 +300,11 @@ trait Namers extends MethodSynthesis { case DefDef(_, nme.CONSTRUCTOR, _, _, _, _) => owner.newConstructor(pos, flags) case DefDef(_, _, _, _, _, _) => owner.newMethod(name.toTermName, pos, flags) case ClassDef(_, _, _, _) => owner.newClassSymbol(name.toTypeName, pos, flags) - case ModuleDef(_, _, _) => owner.newModule(name, pos, flags) + case ModuleDef(_, _, _) => owner.newModule(name.toTermName, pos, flags) case PackageDef(pid, _) => createPackageSymbol(pos, pid) case ValDef(_, _, _, _) => - if (isParameter) owner.newValueParameter(name, pos, flags) - else owner.newValue(name, pos, flags) + if (isParameter) owner.newValueParameter(name.toTermName, pos, flags) + else owner.newValue(name.toTermName, pos, flags) } } private def createFieldSymbol(tree: ValDef): TermSymbol = diff --git a/src/compiler/scala/tools/nsc/typechecker/TypeDiagnostics.scala b/src/compiler/scala/tools/nsc/typechecker/TypeDiagnostics.scala index 19f0b56e94..3bb6ae53dc 100644 --- a/src/compiler/scala/tools/nsc/typechecker/TypeDiagnostics.scala +++ b/src/compiler/scala/tools/nsc/typechecker/TypeDiagnostics.scala @@ -480,7 +480,7 @@ trait TypeDiagnostics { && (m.isPrivate || m.isLocal) && !targets(m) && !(m.name == nme.WILDCARD) // e.g. val _ = foo - && !ignoreNames(m.name) // serialization methods + && !ignoreNames(m.name.toTermName) // serialization methods && !isConstantType(m.info.resultType) // subject to constant inlining && !treeTypes.exists(_ contains m) // e.g. val a = new Foo ; new a.Bar ) diff --git a/src/compiler/scala/tools/reflect/ToolBoxFactory.scala b/src/compiler/scala/tools/reflect/ToolBoxFactory.scala index b1d343cee9..42cdfcdd49 100644 --- a/src/compiler/scala/tools/reflect/ToolBoxFactory.scala +++ b/src/compiler/scala/tools/reflect/ToolBoxFactory.scala @@ -148,13 +148,13 @@ abstract class ToolBoxFactory[U <: JavaUniverse](val u: U) { factorySelf => unwrapped = new Transformer { override def transform(tree: Tree): Tree = tree match { - case Ident(name) if invertedIndex contains name => + case Ident(name: TermName) if invertedIndex contains name => Ident(invertedIndex(name)) setType tree.tpe case _ => super.transform(tree) } }.transform(unwrapped) - new TreeTypeSubstituter(dummies1 map (_.symbol), dummies1 map (dummy => SingleType(NoPrefix, invertedIndex(dummy.symbol.name)))).traverse(unwrapped) + new TreeTypeSubstituter(dummies1 map (_.symbol), dummies1 map (dummy => SingleType(NoPrefix, invertedIndex(dummy.symbol.name.toTermName)))).traverse(unwrapped) unwrapped = if (expr0.isTerm) unwrapped else unwrapFromTerm(unwrapped) unwrapped } diff --git a/src/continuations/plugin/scala/tools/selectivecps/SelectiveCPSTransform.scala b/src/continuations/plugin/scala/tools/selectivecps/SelectiveCPSTransform.scala index f16cfb10f8..801c328177 100644 --- a/src/continuations/plugin/scala/tools/selectivecps/SelectiveCPSTransform.scala +++ b/src/continuations/plugin/scala/tools/selectivecps/SelectiveCPSTransform.scala @@ -345,7 +345,7 @@ abstract class SelectiveCPSTransform extends PluginComponent with val ctxSym = currentOwner.newValue(newTermName("" + vd.symbol.name + cpsNames.shiftSuffix)).setInfo(rhs1.tpe) val ctxDef = localTyper.typed(ValDef(ctxSym, rhs1)) def ctxRef = localTyper.typed(Ident(ctxSym)) - val argSym = currentOwner.newValue(vd.symbol.name).setInfo(tpe) + val argSym = currentOwner.newValue(vd.symbol.name.toTermName).setInfo(tpe) val argDef = localTyper.typed(ValDef(argSym, Select(ctxRef, ctxRef.tpe.member(cpsNames.getTrivialValue)))) val switchExpr = localTyper.typedPos(vd.symbol.pos) { val body2 = mkBlock(bodyStms, bodyExpr).duplicate // dup before typing! diff --git a/src/reflect/scala/reflect/internal/Names.scala b/src/reflect/scala/reflect/internal/Names.scala index 333651162e..cea9215ae2 100644 --- a/src/reflect/scala/reflect/internal/Names.scala +++ b/src/reflect/scala/reflect/internal/Names.scala @@ -10,22 +10,7 @@ import scala.io.Codec import java.security.MessageDigest import scala.language.implicitConversions -trait LowPriorityNames { - self: Names => - - implicit def nameToNameOps(name: Name): NameOps[Name] = new NameOps[Name](name) -} - -/** The class Names ... - * - * @author Martin Odersky - * @version 1.0, 05/02/2005 - */ -trait Names extends api.Names with LowPriorityNames { - implicit def promoteTermNamesAsNecessary(name: Name): TermName = name.toTermName - -// Operations ------------------------------------------------------------- - +trait Names extends api.Names { private final val HASH_SIZE = 0x8000 private final val HASH_MASK = 0x7FFF private final val NAME_SIZE = 0x20000 @@ -399,9 +384,13 @@ trait Names extends api.Names with LowPriorityNames { def debugString = { val s = decode ; if (isTypeName) s + "!" else s } } + implicit def AnyNameOps(name: Name): NameOps[Name] = new NameOps(name) implicit def TermNameOps(name: TermName): NameOps[TermName] = new NameOps(name) implicit def TypeNameOps(name: TypeName): NameOps[TypeName] = new NameOps(name) + /** FIXME: This is a good example of something which is pure "value class" but cannot + * reap the benefits because an (unused) $outer pointer so it is not single-field. + */ final class NameOps[T <: Name](name: T) { def stripSuffix(suffix: Name): T = if (name endsWith suffix) dropRight(suffix.length) else name def dropRight(n: Int): T = name.subName(0, name.length - n).asInstanceOf[T] diff --git a/src/reflect/scala/reflect/internal/Symbols.scala b/src/reflect/scala/reflect/internal/Symbols.scala index ad7dbe5ef1..ebe1762190 100644 --- a/src/reflect/scala/reflect/internal/Symbols.scala +++ b/src/reflect/scala/reflect/internal/Symbols.scala @@ -248,7 +248,7 @@ trait Symbols extends api.Symbols { self: SymbolTable => final def newModuleAndClassSymbol(name: Name, pos: Position, flags0: FlagSet): (ModuleSymbol, ClassSymbol) = { val flags = flags0 | MODULE - val m = newModuleSymbol(name, pos, flags) + val m = newModuleSymbol(name.toTermName, pos, flags) val c = newModuleClass(name.toTypeName, pos, flags & ModuleToClassFlags) connectModuleToClass(m, c) (m, c) -- cgit v1.2.3 From 81e68f9abdbb77b8188375aad6568d4eac51d248 Mon Sep 17 00:00:00 2001 From: Som Snytt Date: Sun, 2 Dec 2012 13:09:39 -0800 Subject: Javap for repl output. The enabling fix is in AbstractFileClassLoader: the javap tool uses a ScalaClassLoader to pick up tools.jar, but delegation to the parent AbstractFileCL was broken; minimal support for findResource was added. Other minor tweaks to output: retain the weird "Binary file... contains" message as documentation; the message is not output for :javap $line3/$read$$iw$$iw$Foo.class, for example. No option handling by javap-7 except to catch IllegalArg, so that combined arg style is not yet supported (:javap -pv). --- .../nsc/interpreter/AbstractFileClassLoader.scala | 17 +++- src/compiler/scala/tools/util/Javap.scala | 90 +++++++++++++--------- 2 files changed, 71 insertions(+), 36 deletions(-) (limited to 'src') diff --git a/src/compiler/scala/tools/nsc/interpreter/AbstractFileClassLoader.scala b/src/compiler/scala/tools/nsc/interpreter/AbstractFileClassLoader.scala index fcb485defd..c6e880ea0d 100644 --- a/src/compiler/scala/tools/nsc/interpreter/AbstractFileClassLoader.scala +++ b/src/compiler/scala/tools/nsc/interpreter/AbstractFileClassLoader.scala @@ -7,7 +7,7 @@ package interpreter import scala.tools.nsc.io.AbstractFile import util.ScalaClassLoader -import java.net.URL +import java.net.{ URL, URLConnection, URLStreamHandler } import scala.collection.{ mutable, immutable } /** @@ -55,10 +55,25 @@ class AbstractFileClassLoader(val root: AbstractFile, parent: ClassLoader) return file } + // parent delegation in JCL uses getResource; so either add parent.getResAsStream + // or implement findResource, which we do here as a study in scarlet (my complexion + // after looking at CLs and URLs) + override def findResource(name: String): URL = findAbstractFile(name) match { + case null => null + case file => new URL(null, "repldir:" + file.path, new URLStreamHandler { + override def openConnection(url: URL): URLConnection = new URLConnection(url) { + override def connect() { } + override def getInputStream = file.input + } + }) + } + + // this inverts delegation order: super.getResAsStr calls parent.getRes if we fail override def getResourceAsStream(name: String) = findAbstractFile(name) match { case null => super.getResourceAsStream(name) case file => file.input } + // ScalaClassLoader.classBytes uses getResAsStream, so we'll try again before delegating override def classBytes(name: String): Array[Byte] = findAbstractFile(name) match { case null => super.classBytes(name) case file => file.toByteArray diff --git a/src/compiler/scala/tools/util/Javap.scala b/src/compiler/scala/tools/util/Javap.scala index 6f5f4f6ed4..89c5969087 100644 --- a/src/compiler/scala/tools/util/Javap.scala +++ b/src/compiler/scala/tools/util/Javap.scala @@ -17,6 +17,7 @@ import javax.tools.{ Diagnostic, DiagnosticCollector, DiagnosticListener, import scala.tools.nsc.io.File import scala.util.{ Properties, Try, Success, Failure } import scala.collection.JavaConverters +import scala.collection.generic.Clearable import scala.language.reflectiveCalls import Javap._ @@ -54,7 +55,8 @@ class JavapClass( def apply(args: Seq[String]): List[JpResult] = { val (optional, claases) = args partition (_ startsWith "-") val options = if (optional.nonEmpty) optional else JavapTool.DefaultOptions - tool(options)(claases map (claas => claas -> bytesFor(claas))) + if (claases.nonEmpty) tool(options)(claases map (claas => claas -> bytesFor(claas))) + else List(JpResult(":javap [-lcsvp] [path1 path2 ...]")) } private def bytesFor(path: String) = Try { @@ -69,12 +71,10 @@ class JavapClass( * it represents. */ def tryFile(path: String): Option[Array[Byte]] = { - val file = File( + val file = if (path.endsWith(".class")) path else path.replace('.', '/') + ".class" - ) - if (!file.exists) None - else try Some(file.toByteArray) catch { case x: Exception => None } + (Try (File(file)) filter (_.exists) map (_.toByteArray)).toOption } /** Assume the string is a fully qualified class name and try to * find the class object it represents. @@ -126,8 +126,8 @@ class JavapTool6(loader: ScalaClassLoader, printWriter: PrintWriter) extends Jav override def apply(options: Seq[String])(inputs: Seq[Input]): List[JpResult] = (inputs map { - case (_, Success(ba)) => new JpSuccess(showable(newPrinter(new ByteArrayInputStream(ba), newEnv(options)))) - case (_, Failure(e)) => new JpError(e.toString) + case (_, Success(ba)) => JpResult(showable(newPrinter(new ByteArrayInputStream(ba), newEnv(options)))) + case (_, Failure(e)) => JpResult(e.toString) }).toList orFailed List(noToolError) } @@ -154,7 +154,30 @@ class JavapTool7(loader: ScalaClassLoader, printWriter: PrintWriter) extends Jav classOf[JIterable[String]] ) orFailed null - val reporter = new DiagnosticCollector[JavaFileObject] + class JavaReporter extends DiagnosticListener[JavaFileObject] with Clearable { + import scala.collection.mutable.{ ArrayBuffer, SynchronizedBuffer } + type D = Diagnostic[_ <: JavaFileObject] + val diagnostics = new ArrayBuffer[D] with SynchronizedBuffer[D] + override def report(d: Diagnostic[_ <: JavaFileObject]) { + diagnostics += d + } + override def clear() = diagnostics.clear() + /** All diagnostic messages. + * @param locale Locale for diagnostic messages, null by default. + */ + def messages(implicit locale: Locale = null) = (diagnostics map (_ getMessage locale)).toList + + def reportable: String = { + import Properties.lineSeparator + //val container = "Binary file .* contains .*".r + //val m = messages filter (_ match { case container() => false case _ => true }) + val m = messages + clear() + if (m.nonEmpty) m mkString ("", lineSeparator, lineSeparator) + else "" + } + } + val reporter = new JavaReporter // DisassemblerTool.getStandardFileManager(reporter,locale,charset) val defaultFileManager: JavaFileManager = @@ -209,12 +232,7 @@ class JavapTool7(loader: ScalaClassLoader, printWriter: PrintWriter) extends Jav writer.reset() w } - val msgs = { - import Properties.lineSeparator - val m = reporter.messages - if (m.nonEmpty) m mkString ("", lineSeparator, lineSeparator) - else "" - } + val msgs = reporter.reportable new Showable { def show() = { val mw = msgs + written @@ -232,11 +250,23 @@ class JavapTool7(loader: ScalaClassLoader, printWriter: PrintWriter) extends Jav .orFailed (throw new IllegalStateException) } // a result per input + private def apply1(options: Seq[String], claas: String, inputs: Seq[Input]): Try[JpResult] = + Try { + task(options, Seq(claas), inputs).call() + } map { + case true => JpResult(showable()) + case _ => JpResult(reporter.reportable) + } recoverWith { + case e: java.lang.reflect.InvocationTargetException => e.getCause match { + case t: IllegalArgumentException => Success(JpResult(t.getMessage)) // bad option + case x => Failure(x) + } + } lastly { + reporter.clear + } override def apply(options: Seq[String])(inputs: Seq[Input]): List[JpResult] = (inputs map { - case (claas, Success(ba)) => - if (task(options, Seq(claas), inputs).call()) new JpSuccess(showable()) - else new JpError(reporter.messages mkString ",") - case (_, Failure(e)) => new JpError(e.toString) + case (claas, Success(_)) => apply1(options, claas, inputs).get + case (_, Failure(e)) => JpResult(e.toString) }).toList orFailed List(noToolError) } @@ -301,23 +331,9 @@ object JavapTool { def apply(cl: ScalaClassLoader, pw: PrintWriter) = if (isTaskable(cl)) new JavapTool7(cl, pw) else new JavapTool6(cl, pw) - /** A richer [[javax.tools.DiagnosticCollector]]. */ - implicit class JavaReporter(val c: DiagnosticCollector[JavaFileObject]) extends AnyVal { - import scala.collection.JavaConverters.iterableAsScalaIterableConverter - /** All diagnostics in the collector. */ - def diagnostics: Iterable[Diagnostic[_ <: JavaFileObject]] = c.getDiagnostics.asScala - /** All diagnostic messages. - * @param locale Locale for diagnostic messages, null by default. - */ - def messages(implicit locale: Locale = null) = (diagnostics map (_ getMessage locale)).toList - /* - import Diagnostic.Kind.ERROR - private def isErr(d: Diagnostic[_]) = d.getKind == ERROR - /** Count the errors. */ - def errorCount: Int = diagnostics count isErr - /** Error diagnostics in the collector. */ - def errors = (diagnostics filter isErr).toList - */ + implicit class Lastly[A](val t: Try[A]) extends AnyVal { + private def effect[X](last: =>Unit)(a: X): Try[A] = { last; t } + def lastly(last: =>Unit): Try[A] = t transform (effect(last) _, effect(last) _) } } @@ -343,6 +359,10 @@ object Javap { // def methods(): List[String] // def signatures(): List[String] } + object JpResult { + def apply(msg: String) = new JpError(msg) + def apply(res: Showable) = new JpSuccess(res) + } class JpError(msg: String) extends JpResult { type ResultType = String def isError = true -- cgit v1.2.3 From de6649439e39c1f656644ac090fc49a7a8f02340 Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Sat, 1 Dec 2012 12:38:10 +0100 Subject: Remove TermName -> String implicit. These implicits were crutches going back to a much Stringier time. Of course "with great type safety comes great verbosity" and no doubt this could be cleaned up significantly further. At least the underpinnings are consistent now - the only implicits involving name should be String -> TypeName and String -> TermName. --- .../scala/reflect/reify/codegen/GenSymbols.scala | 6 ++-- .../scala/reflect/reify/codegen/GenUtils.scala | 36 +++++++++------------- .../scala/reflect/reify/utils/NodePrinters.scala | 6 ++-- .../scala/tools/nsc/backend/icode/GenICode.scala | 2 +- .../scala/tools/nsc/backend/jvm/GenASM.scala | 8 ++--- .../scala/tools/nsc/backend/jvm/GenAndroid.scala | 8 ++--- .../scala/tools/nsc/interactive/Global.scala | 4 +-- .../scala/tools/nsc/transform/CleanUp.scala | 2 +- .../tools/nsc/typechecker/ContextErrors.scala | 2 +- .../plugin/scala/tools/selectivecps/CPSUtils.scala | 2 +- src/partest/scala/tools/partest/CompilerTest.scala | 2 +- .../scala/reflect/internal/Definitions.scala | 8 ++--- src/reflect/scala/reflect/internal/Mirrors.scala | 13 ++++---- src/reflect/scala/reflect/internal/StdNames.scala | 12 +++----- .../scala/reflect/runtime/JavaMirrors.scala | 2 +- 15 files changed, 52 insertions(+), 61 deletions(-) (limited to 'src') diff --git a/src/compiler/scala/reflect/reify/codegen/GenSymbols.scala b/src/compiler/scala/reflect/reify/codegen/GenSymbols.scala index a8a066cd8a..731aab93b8 100644 --- a/src/compiler/scala/reflect/reify/codegen/GenSymbols.scala +++ b/src/compiler/scala/reflect/reify/codegen/GenSymbols.scala @@ -98,7 +98,7 @@ trait GenSymbols { def reifyFreeTerm(binding: Tree): Tree = reifyIntoSymtab(binding.symbol) { sym => if (reifyDebug) println("Free term" + (if (sym.isCapturedVariable) " (captured)" else "") + ": " + sym + "(" + sym.accurateKindString + ")") - val name = newTermName(nme.REIFY_FREE_PREFIX + sym.name + (if (sym.isType) nme.REIFY_FREE_THIS_SUFFIX else "")) + val name = newTermName("" + nme.REIFY_FREE_PREFIX + sym.name + (if (sym.isType) nme.REIFY_FREE_THIS_SUFFIX else "")) if (sym.isCapturedVariable) { assert(binding.isInstanceOf[Ident], showRaw(binding)) val capturedBinding = referenceCapturedVariable(sym) @@ -112,14 +112,14 @@ trait GenSymbols { reifyIntoSymtab(binding.symbol) { sym => if (reifyDebug) println("Free type: %s (%s)".format(sym, sym.accurateKindString)) state.reificationIsConcrete = false - val name = newTermName(nme.REIFY_FREE_PREFIX + sym.name) + val name: TermName = nme.REIFY_FREE_PREFIX append sym.name Reification(name, binding, mirrorBuildCall(nme.newFreeType, reify(sym.name.toString), mirrorBuildCall(nme.flagsFromBits, reify(sym.flags)), reify(origin(sym)))) } def reifySymDef(sym: Symbol): Tree = reifyIntoSymtab(sym) { sym => if (reifyDebug) println("Sym def: %s (%s)".format(sym, sym.accurateKindString)) - val name = newTermName(nme.REIFY_SYMDEF_PREFIX + sym.name) + val name: TermName = nme.REIFY_SYMDEF_PREFIX append sym.name def reifiedOwner = if (sym.owner.isLocatable) reify(sym.owner) else reifySymDef(sym.owner) Reification(name, Ident(sym), mirrorBuildCall(nme.newNestedSymbol, reifiedOwner, reify(sym.name), reify(sym.pos), mirrorBuildCall(nme.flagsFromBits, reify(sym.flags)), reify(sym.isClass))) } diff --git a/src/compiler/scala/reflect/reify/codegen/GenUtils.scala b/src/compiler/scala/reflect/reify/codegen/GenUtils.scala index e2275f79ff..c684f16325 100644 --- a/src/compiler/scala/reflect/reify/codegen/GenUtils.scala +++ b/src/compiler/scala/reflect/reify/codegen/GenUtils.scala @@ -30,41 +30,35 @@ trait GenUtils { def call(fname: String, args: Tree*): Tree = Apply(termPath(fname), args.toList) - def mirrorSelect(name: String): Tree = - termPath(nme.UNIVERSE_PREFIX + name) + def mirrorSelect(name: String): Tree = termPath(nme.UNIVERSE_PREFIX + name) + def mirrorSelect(name: TermName): Tree = mirrorSelect(name.toString) - def mirrorBuildSelect(name: String): Tree = - termPath(nme.UNIVERSE_BUILD_PREFIX + name) + def mirrorBuildSelect(name: TermName): Tree = + termPath("" + nme.UNIVERSE_BUILD_PREFIX + name) - def mirrorMirrorSelect(name: String): Tree = - termPath(nme.MIRROR_PREFIX + name) + def mirrorMirrorSelect(name: TermName): Tree = + termPath("" + nme.MIRROR_PREFIX + name) def mirrorCall(name: TermName, args: Tree*): Tree = - call("" + (nme.UNIVERSE_PREFIX append name), args: _*) - - def mirrorCall(name: String, args: Tree*): Tree = - call(nme.UNIVERSE_PREFIX + name, args: _*) + call("" + nme.UNIVERSE_PREFIX + name, args: _*) def mirrorBuildCall(name: TermName, args: Tree*): Tree = - call("" + (nme.UNIVERSE_BUILD_PREFIX append name), args: _*) - - def mirrorBuildCall(name: String, args: Tree*): Tree = - call(nme.UNIVERSE_BUILD_PREFIX + name, args: _*) + call("" + nme.UNIVERSE_BUILD_PREFIX + name, args: _*) def mirrorMirrorCall(name: TermName, args: Tree*): Tree = - call("" + (nme.MIRROR_PREFIX append name), args: _*) - - def mirrorMirrorCall(name: String, args: Tree*): Tree = - call(nme.MIRROR_PREFIX + name, args: _*) + call("" + nme.MIRROR_PREFIX + name, args: _*) def mirrorFactoryCall(value: Product, args: Tree*): Tree = mirrorFactoryCall(value.productPrefix, args: _*) - def mirrorFactoryCall(prefix: String, args: Tree*): Tree = - mirrorCall(prefix, args: _*) + def mirrorFactoryCall(prefix: TermName, args: Tree*): Tree = + mirrorCall("" + prefix, args: _*) + + def scalaFactoryCall(name: TermName, args: Tree*): Tree = + call(s"scala.$name.apply", args: _*) def scalaFactoryCall(name: String, args: Tree*): Tree = - call("scala." + name + ".apply", args: _*) + scalaFactoryCall(name: TermName, args: _*) def mkList(args: List[Tree]): Tree = scalaFactoryCall("collection.immutable.List", args: _*) diff --git a/src/compiler/scala/reflect/reify/utils/NodePrinters.scala b/src/compiler/scala/reflect/reify/utils/NodePrinters.scala index 97ec479a6c..86e50e0a68 100644 --- a/src/compiler/scala/reflect/reify/utils/NodePrinters.scala +++ b/src/compiler/scala/reflect/reify/utils/NodePrinters.scala @@ -75,10 +75,10 @@ trait NodePrinters { printout += universe.trim if (mirrorIsUsed) printout += mirror.replace("Mirror[", "scala.reflect.api.Mirror[").trim val imports = scala.collection.mutable.ListBuffer[String](); - imports += nme.UNIVERSE_SHORT + imports += nme.UNIVERSE_SHORT.toString // if (buildIsUsed) imports += nme.build - if (mirrorIsUsed) imports += nme.MIRROR_SHORT - if (flagsAreUsed) imports += nme.Flag + if (mirrorIsUsed) imports += nme.MIRROR_SHORT.toString + if (flagsAreUsed) imports += nme.Flag.toString printout += s"""import ${imports map (_ + "._") mkString ", "}""" val name = if (isExpr) "tree" else "tpe" diff --git a/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala b/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala index f07c331fb0..fb9e68a3a1 100644 --- a/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala +++ b/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala @@ -1565,7 +1565,7 @@ abstract class GenICode extends SubComponent { */ def genEqEqPrimitive(l: Tree, r: Tree, ctx: Context)(thenCtx: Context, elseCtx: Context): Unit = { def getTempLocal = ctx.method.lookupLocal(nme.EQEQ_LOCAL_VAR) getOrElse { - ctx.makeLocal(l.pos, AnyRefClass.tpe, nme.EQEQ_LOCAL_VAR) + ctx.makeLocal(l.pos, AnyRefClass.tpe, nme.EQEQ_LOCAL_VAR.toString) } /** True if the equality comparison is between values that require the use of the rich equality diff --git a/src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala b/src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala index d2e641cbf9..6edd089e75 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala @@ -1198,7 +1198,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters { /* Typestate: should be called before emitting fields (because it adds an IField to the current IClass). */ def addCreatorCode(block: BasicBlock) { val fieldSymbol = ( - clasz.symbol.newValue(newTermName(androidFieldName), NoPosition, Flags.STATIC | Flags.FINAL) + clasz.symbol.newValue(androidFieldName, NoPosition, Flags.STATIC | Flags.FINAL) setInfo AndroidCreatorClass.tpe ) val methodSymbol = definitions.getMember(clasz.symbol.companionModule, androidFieldName) @@ -1213,7 +1213,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters { jclass.visitField( PublicStaticFinal, - androidFieldName, + androidFieldName.toString, tdesc_creator, null, // no java-generic-signature null // no initial value @@ -1233,7 +1233,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters { clinit.visitMethodInsn( asm.Opcodes.INVOKEVIRTUAL, moduleName, - androidFieldName, + androidFieldName.toString, asm.Type.getMethodDescriptor(creatorType, Array.empty[asm.Type]: _*) ) @@ -1241,7 +1241,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters { clinit.visitFieldInsn( asm.Opcodes.PUTSTATIC, thisName, - androidFieldName, + androidFieldName.toString, tdesc_creator ) } diff --git a/src/compiler/scala/tools/nsc/backend/jvm/GenAndroid.scala b/src/compiler/scala/tools/nsc/backend/jvm/GenAndroid.scala index 72b7e35408..92bca19213 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/GenAndroid.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/GenAndroid.scala @@ -35,7 +35,7 @@ trait GenAndroid { def addCreatorCode(codegen: BytecodeGenerator, block: BasicBlock) { import codegen._ val fieldSymbol = ( - clasz.symbol.newValue(newTermName(fieldName), NoPosition, Flags.STATIC | Flags.FINAL) + clasz.symbol.newValue(fieldName, NoPosition, Flags.STATIC | Flags.FINAL) setInfo AndroidCreatorClass.tpe ) val methodSymbol = definitions.getMember(clasz.symbol.companionModule, fieldName) @@ -48,15 +48,15 @@ trait GenAndroid { import codegen._ val creatorType = javaType(AndroidCreatorClass) jclass.addNewField(PublicStaticFinal, - fieldName, + fieldName.toString, creatorType) val moduleName = javaName(clasz.symbol)+"$" clinit.emitGETSTATIC(moduleName, nme.MODULE_INSTANCE_FIELD.toString, new JObjectType(moduleName)) - clinit.emitINVOKEVIRTUAL(moduleName, fieldName, + clinit.emitINVOKEVIRTUAL(moduleName, fieldName.toString, new JMethodType(creatorType, Array())) - clinit.emitPUTSTATIC(jclass.getName(), fieldName, creatorType) + clinit.emitPUTSTATIC(jclass.getName(), fieldName.toString, creatorType) } } diff --git a/src/compiler/scala/tools/nsc/interactive/Global.scala b/src/compiler/scala/tools/nsc/interactive/Global.scala index e4bff1e192..9716c75215 100644 --- a/src/compiler/scala/tools/nsc/interactive/Global.scala +++ b/src/compiler/scala/tools/nsc/interactive/Global.scala @@ -810,8 +810,6 @@ class Global(settings: Settings, _reporter: Reporter, projectName: String = "") respond(response) { scopeMembers(pos) } } - private val Dollar = newTermName("$") - private class Members[M <: Member] extends LinkedHashMap[Name, Set[M]] { override def default(key: Name) = Set() @@ -827,7 +825,7 @@ class Global(settings: Settings, _reporter: Reporter, projectName: String = "") def add(sym: Symbol, pre: Type, implicitlyAdded: Boolean)(toMember: (Symbol, Type) => M) { if ((sym.isGetter || sym.isSetter) && sym.accessed != NoSymbol) { add(sym.accessed, pre, implicitlyAdded)(toMember) - } else if (!sym.name.decodedName.containsName(Dollar) && !sym.isSynthetic && sym.hasRawInfo) { + } else if (!sym.name.decodedName.containsName("$") && !sym.isSynthetic && sym.hasRawInfo) { val symtpe = pre.memberType(sym) onTypeError ErrorType matching(sym, symtpe, this(sym.name)) match { case Some(m) => diff --git a/src/compiler/scala/tools/nsc/transform/CleanUp.scala b/src/compiler/scala/tools/nsc/transform/CleanUp.scala index 847ca574a9..3af9c6d74d 100644 --- a/src/compiler/scala/tools/nsc/transform/CleanUp.scala +++ b/src/compiler/scala/tools/nsc/transform/CleanUp.scala @@ -120,7 +120,7 @@ abstract class CleanUp extends Transform with ast.TreeDSL { } def addStaticMethodToClass(forBody: (Symbol, Symbol) => Tree): Symbol = { - val methSym = currentClass.newMethod(mkTerm(nme.reflMethodName), ad.pos, STATIC | SYNTHETIC) + val methSym = currentClass.newMethod(mkTerm(nme.reflMethodName.toString), ad.pos, STATIC | SYNTHETIC) val params = methSym.newSyntheticValueParams(List(ClassClass.tpe)) methSym setInfoAndEnter MethodType(params, MethodClass.tpe) diff --git a/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala b/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala index bfc9f08553..7482e76f4e 100644 --- a/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala +++ b/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala @@ -511,7 +511,7 @@ trait ContextErrors { NormalTypeError(tree, fun.tpe+" does not take parameters") // Dynamic - def DynamicVarArgUnsupported(tree: Tree, name: String) = + def DynamicVarArgUnsupported(tree: Tree, name: Name) = issueNormalTypeError(tree, name+ " does not support passing a vararg parameter") def DynamicRewriteError(tree: Tree, err: AbsTypeError) = { diff --git a/src/continuations/plugin/scala/tools/selectivecps/CPSUtils.scala b/src/continuations/plugin/scala/tools/selectivecps/CPSUtils.scala index c591030bce..4924e056af 100644 --- a/src/continuations/plugin/scala/tools/selectivecps/CPSUtils.scala +++ b/src/continuations/plugin/scala/tools/selectivecps/CPSUtils.scala @@ -35,7 +35,7 @@ trait CPSUtils { lazy val MarkerCPSAdaptMinus = rootMirror.getRequiredClass("scala.util.continuations.cpsMinus") lazy val Context = rootMirror.getRequiredClass("scala.util.continuations.ControlContext") - lazy val ModCPS = rootMirror.getRequiredPackage("scala.util.continuations") + lazy val ModCPS = rootMirror.getPackage("scala.util.continuations") lazy val MethShiftUnit = definitions.getMember(ModCPS, cpsNames.shiftUnit) lazy val MethShiftUnit0 = definitions.getMember(ModCPS, cpsNames.shiftUnit0) diff --git a/src/partest/scala/tools/partest/CompilerTest.scala b/src/partest/scala/tools/partest/CompilerTest.scala index bb0732dcc6..7495f97efd 100644 --- a/src/partest/scala/tools/partest/CompilerTest.scala +++ b/src/partest/scala/tools/partest/CompilerTest.scala @@ -49,7 +49,7 @@ abstract class CompilerTest extends DirectTest { } class SymsInPackage(pkgName: String) { - def pkg = rootMirror.getRequiredPackage(pkgName) + def pkg = rootMirror.getPackage(pkgName) def classes = allMembers(pkg) filter (_.isClass) def modules = allMembers(pkg) filter (_.isModule) def symbols = classes ++ terms filterNot (_ eq NoSymbol) diff --git a/src/reflect/scala/reflect/internal/Definitions.scala b/src/reflect/scala/reflect/internal/Definitions.scala index 9a846179b9..37c61735d6 100644 --- a/src/reflect/scala/reflect/internal/Definitions.scala +++ b/src/reflect/scala/reflect/internal/Definitions.scala @@ -14,7 +14,7 @@ import scala.reflect.api.{Universe => ApiUniverse} trait Definitions extends api.StandardDefinitions { self: SymbolTable => - import rootMirror.{getModule, getClassByName, getRequiredClass, getRequiredModule, getRequiredPackage, getClassIfDefined, getModuleIfDefined, getPackageObject, getPackageObjectIfDefined, requiredClass, requiredModule} + import rootMirror.{getModule, getPackage, getClassByName, getRequiredClass, getRequiredModule, getClassIfDefined, getModuleIfDefined, getPackageObject, getPackageObjectIfDefined, requiredClass, requiredModule} object definitions extends DefinitionsClass @@ -169,11 +169,11 @@ trait Definitions extends api.StandardDefinitions { // It becomes tricky to create dedicated objects for other symbols because // of initialization order issues. - lazy val JavaLangPackage = getRequiredPackage(sn.JavaLang) + lazy val JavaLangPackage = getPackage("java.lang") lazy val JavaLangPackageClass = JavaLangPackage.moduleClass.asClass - lazy val ScalaPackage = getRequiredPackage(nme.scala_) + lazy val ScalaPackage = getPackage("scala") lazy val ScalaPackageClass = ScalaPackage.moduleClass.asClass - lazy val RuntimePackage = getRequiredPackage("scala.runtime") + lazy val RuntimePackage = getPackage("scala.runtime") lazy val RuntimePackageClass = RuntimePackage.moduleClass.asClass // convenient one-argument parameter lists diff --git a/src/reflect/scala/reflect/internal/Mirrors.scala b/src/reflect/scala/reflect/internal/Mirrors.scala index 80aa06d020..6e76a7afb3 100644 --- a/src/reflect/scala/reflect/internal/Mirrors.scala +++ b/src/reflect/scala/reflect/internal/Mirrors.scala @@ -172,14 +172,15 @@ trait Mirrors extends api.Mirrors { case _ => MissingRequirementError.notFound("package " + fullname) } - def getPackage(fullname: Name): ModuleSymbol = + def getPackage(fullname: TermName): ModuleSymbol = ensurePackageSymbol(fullname.toString, getModuleOrClass(fullname), allowModules = true) - def getRequiredPackage(fullname: String): ModuleSymbol = + @deprecated("Use getPackage", "2.11.0") def getRequiredPackage(fullname: String): ModuleSymbol = getPackage(newTermNameCached(fullname)) - def getPackageObject(fullname: String): ModuleSymbol = - (getPackage(newTermName(fullname)).info member nme.PACKAGE) match { + def getPackageObject(fullname: String): ModuleSymbol = getPackageObject(newTermName(fullname)) + def getPackageObject(fullname: TermName): ModuleSymbol = + (getPackage(fullname).info member nme.PACKAGE) match { case x: ModuleSymbol => x case _ => MissingRequirementError.notFound("package object " + fullname) } @@ -187,8 +188,8 @@ trait Mirrors extends api.Mirrors { def getPackageObjectIfDefined(fullname: String): Symbol = getPackageObjectIfDefined(newTermNameCached(fullname)) - def getPackageObjectIfDefined(fullname: Name): Symbol = - wrapMissing(getPackageObject(fullname.toTermName)) + def getPackageObjectIfDefined(fullname: TermName): Symbol = + wrapMissing(getPackageObject(fullname)) override def staticPackage(fullname: String): ModuleSymbol = ensurePackageSymbol(fullname.toString, getModuleOrClass(newTermNameCached(fullname)), allowModules = false) diff --git a/src/reflect/scala/reflect/internal/StdNames.scala b/src/reflect/scala/reflect/internal/StdNames.scala index a5810c9c83..a1e8ada302 100644 --- a/src/reflect/scala/reflect/internal/StdNames.scala +++ b/src/reflect/scala/reflect/internal/StdNames.scala @@ -18,8 +18,6 @@ trait StdNames { def encode(str: String): TermName = newTermNameCached(NameTransformer.encode(str)) - implicit def lowerTermNames(n: TermName): String = n.toString - /** Tensions: would like the keywords to be the very first names entered into the names * storage so their ids count from 0, which simplifies the parser. Switched to abstract * classes to avoid all the indirection which is generated with implementation-containing @@ -37,11 +35,7 @@ trait StdNames { kws = kws + result result } - def result: Set[TermName] = { - val result = kws - kws = null - result - } + def result: Set[TermName] = try kws finally kws = null } private final object compactify extends (String => String) { @@ -201,6 +195,8 @@ trait StdNames { } abstract class TypeNames extends Keywords with TypeNamesApi { + override type NameType = TypeName + protected implicit def createNameType(name: String): TypeName = newTypeNameCached(name) final val BYNAME_PARAM_CLASS_NAME: NameType = "" @@ -262,6 +258,8 @@ trait StdNames { } abstract class TermNames extends Keywords with TermNamesApi { + override type NameType = TermName + protected implicit def createNameType(name: String): TermName = newTermNameCached(name) /** Base strings from which synthetic names are derived. */ diff --git a/src/reflect/scala/reflect/runtime/JavaMirrors.scala b/src/reflect/scala/reflect/runtime/JavaMirrors.scala index d264cc06b4..8819cd058d 100644 --- a/src/reflect/scala/reflect/runtime/JavaMirrors.scala +++ b/src/reflect/scala/reflect/runtime/JavaMirrors.scala @@ -466,7 +466,7 @@ private[reflect] trait JavaMirrors extends internal.SymbolTable with api.JavaUni staticSingletonInstance(classLoader, symbol.fullName) else if (outer == null) staticSingletonInstance(classToJava(symbol.moduleClass.asClass)) - else innerSingletonInstance(outer, symbol.name) + else innerSingletonInstance(outer, symbol.name.toString) } override def toString = s"module mirror for ${symbol.fullName} (bound to $outer)" } -- cgit v1.2.3 From ff9cfd9eb7f47be69d302f73de08a00303249a0d Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Mon, 3 Dec 2012 18:34:18 +0100 Subject: Don't return unimportables from importedSymbol. Hardening against the symptom of SI-6745, which yielded: wat.scala:4: error: too many arguments for constructor Predef: ()object Predef def this() = this(0) ^ The fix for the underlying problem in that bug has been targetted at branch 2.10.x. --- .../scala/tools/nsc/typechecker/Contexts.scala | 9 ++++++++- test/files/run/t6745-2.scala | 22 ++++++++++++++++++++++ 2 files changed, 30 insertions(+), 1 deletion(-) create mode 100644 test/files/run/t6745-2.scala (limited to 'src') diff --git a/src/compiler/scala/tools/nsc/typechecker/Contexts.scala b/src/compiler/scala/tools/nsc/typechecker/Contexts.scala index 1af61d31ec..c0d2f44c7b 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Contexts.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Contexts.scala @@ -996,7 +996,14 @@ trait Contexts { self: Analyzer => if (settings.lint.value && selectors.nonEmpty && result != NoSymbol && pos != NoPosition) recordUsage(current, result) - result + // Harden against the fallout from bugs like SI-6745 + // + // [JZ] I considered issuing a devWarning and moving the + // check inside the above loop, as I believe that + // this always represents a mistake on the part of + // the caller. + if (definitions isImportable result) result + else NoSymbol } private def selectorString(s: ImportSelector): String = { if (s.name == nme.WILDCARD && s.rename == null) "_" diff --git a/test/files/run/t6745-2.scala b/test/files/run/t6745-2.scala new file mode 100644 index 0000000000..31ecd42bd1 --- /dev/null +++ b/test/files/run/t6745-2.scala @@ -0,0 +1,22 @@ +import scala.tools.nsc._ +import scala.tools.partest.CompilerTest +import scala.collection.{ mutable, immutable, generic } + +object Test extends CompilerTest { + import global._ + import rootMirror._ + import definitions._ + import global.analyzer.{Context, ImportInfo} + + override def code = """ +package context { +} + """ + + def check(source: String, unit: global.CompilationUnit) = { + val context: Context = global.analyzer.rootContext(unit) + val importInfo: ImportInfo = context.imports.head // Predef._ + val importedSym = importInfo.importedSymbol(nme.CONSTRUCTOR) + assert(importedSym == NoSymbol, importedSym) // was "constructor Predef" + } +} -- cgit v1.2.3 From 06844ee821ae500c7485498cc2054e3cf765623f Mon Sep 17 00:00:00 2001 From: James Iry Date: Wed, 5 Dec 2012 12:43:34 -0800 Subject: SI-6769 Removes GenJVM backend Get rid of GenJVM and everything that refers to it. Also get rid of GenAndroid since it's dead code that refers to GenJVM. --- META-INF/MANIFEST.MF | 3 - README.rst | 1 - build.detach.xml | 1 - build.examples.xml | 12 - build.xml | 67 +- lib/fjbg.jar.desired.sha1 | 1 - project/Build.scala | 10 +- project/Layers.scala | 13 +- project/Packaging.scala | 2 +- project/Testing.scala | 2 +- src/compiler/scala/tools/ant/Scalac.scala | 2 +- src/compiler/scala/tools/nsc/Global.scala | 9 +- .../scala/tools/nsc/backend/JavaPlatform.scala | 6 +- .../scala/tools/nsc/backend/icode/GenICode.scala | 2 +- .../scala/tools/nsc/backend/jvm/GenASM.scala | 2 +- .../scala/tools/nsc/backend/jvm/GenAndroid.scala | 62 - .../scala/tools/nsc/backend/jvm/GenJVM.scala | 1950 -------------------- .../scala/tools/nsc/backend/jvm/GenJVMUtil.scala | 141 -- .../tools/nsc/settings/StandardScalaSettings.scala | 2 +- src/compiler/scala/tools/nsc/transform/Mixin.scala | 2 +- src/eclipse/README.md | 2 +- src/eclipse/fjbg/.classpath | 7 - src/eclipse/fjbg/.project | 30 - src/eclipse/scala-compiler/.classpath | 1 - src/fjbg/ch/epfl/lamp/fjbg/FJBGContext.java | 195 -- src/fjbg/ch/epfl/lamp/fjbg/JAccessFlags.java | 35 - src/fjbg/ch/epfl/lamp/fjbg/JArrayType.java | 62 - src/fjbg/ch/epfl/lamp/fjbg/JAttribute.java | 84 - src/fjbg/ch/epfl/lamp/fjbg/JAttributeFactory.java | 101 - src/fjbg/ch/epfl/lamp/fjbg/JClass.java | 420 ----- src/fjbg/ch/epfl/lamp/fjbg/JCode.java | 1308 ------------- src/fjbg/ch/epfl/lamp/fjbg/JCodeAttribute.java | 125 -- src/fjbg/ch/epfl/lamp/fjbg/JCodeIterator.java | 377 ---- src/fjbg/ch/epfl/lamp/fjbg/JConstantPool.java | 771 -------- .../ch/epfl/lamp/fjbg/JConstantValueAttribute.java | 69 - .../epfl/lamp/fjbg/JEnclosingMethodAttribute.java | 83 - .../ch/epfl/lamp/fjbg/JExceptionsAttribute.java | 90 - src/fjbg/ch/epfl/lamp/fjbg/JExtendedCode.java | 667 ------- src/fjbg/ch/epfl/lamp/fjbg/JField.java | 62 - src/fjbg/ch/epfl/lamp/fjbg/JFieldOrMethod.java | 138 -- .../ch/epfl/lamp/fjbg/JInnerClassesAttribute.java | 201 -- src/fjbg/ch/epfl/lamp/fjbg/JLabel.java | 30 - .../epfl/lamp/fjbg/JLineNumberTableAttribute.java | 121 -- src/fjbg/ch/epfl/lamp/fjbg/JLocalVariable.java | 42 - .../lamp/fjbg/JLocalVariableTableAttribute.java | 167 -- src/fjbg/ch/epfl/lamp/fjbg/JMember.java | 109 -- src/fjbg/ch/epfl/lamp/fjbg/JMethod.java | 199 -- src/fjbg/ch/epfl/lamp/fjbg/JMethodType.java | 87 - src/fjbg/ch/epfl/lamp/fjbg/JObjectType.java | 65 - src/fjbg/ch/epfl/lamp/fjbg/JOpcode.java | 1267 ------------- src/fjbg/ch/epfl/lamp/fjbg/JOtherAttribute.java | 77 - src/fjbg/ch/epfl/lamp/fjbg/JReferenceType.java | 19 - .../ch/epfl/lamp/fjbg/JSourceFileAttribute.java | 69 - .../ch/epfl/lamp/fjbg/JStackMapTableAttribute.java | 282 --- src/fjbg/ch/epfl/lamp/fjbg/JType.java | 316 ---- src/fjbg/ch/epfl/lamp/fjbg/Main.java | 131 -- src/fjbg/ch/epfl/lamp/util/ByteArray.java | 145 -- src/intellij/compiler.iml.SAMPLE | 1 - src/intellij/fjbg.iml.SAMPLE | 12 - src/intellij/scala-lang.ipr.SAMPLE | 2 - src/intellij/test.iml.SAMPLE | 1 - .../tools/partest/nest/ConsoleFileManager.scala | 5 - .../tools/partest/nest/ReflectiveRunner.scala | 4 +- test/disabled/presentation/akka.flags | 4 +- test/disabled/presentation/simple-tests.opts | 4 +- test/files/ant/imported.xml | 5 - tools/buildcp | 2 +- tools/strapcp | 3 +- 68 files changed, 29 insertions(+), 10258 deletions(-) delete mode 100644 lib/fjbg.jar.desired.sha1 delete mode 100644 src/compiler/scala/tools/nsc/backend/jvm/GenAndroid.scala delete mode 100644 src/compiler/scala/tools/nsc/backend/jvm/GenJVM.scala delete mode 100644 src/compiler/scala/tools/nsc/backend/jvm/GenJVMUtil.scala delete mode 100644 src/eclipse/fjbg/.classpath delete mode 100644 src/eclipse/fjbg/.project delete mode 100644 src/fjbg/ch/epfl/lamp/fjbg/FJBGContext.java delete mode 100644 src/fjbg/ch/epfl/lamp/fjbg/JAccessFlags.java delete mode 100644 src/fjbg/ch/epfl/lamp/fjbg/JArrayType.java delete mode 100644 src/fjbg/ch/epfl/lamp/fjbg/JAttribute.java delete mode 100644 src/fjbg/ch/epfl/lamp/fjbg/JAttributeFactory.java delete mode 100644 src/fjbg/ch/epfl/lamp/fjbg/JClass.java delete mode 100644 src/fjbg/ch/epfl/lamp/fjbg/JCode.java delete mode 100644 src/fjbg/ch/epfl/lamp/fjbg/JCodeAttribute.java delete mode 100644 src/fjbg/ch/epfl/lamp/fjbg/JCodeIterator.java delete mode 100644 src/fjbg/ch/epfl/lamp/fjbg/JConstantPool.java delete mode 100644 src/fjbg/ch/epfl/lamp/fjbg/JConstantValueAttribute.java delete mode 100644 src/fjbg/ch/epfl/lamp/fjbg/JEnclosingMethodAttribute.java delete mode 100644 src/fjbg/ch/epfl/lamp/fjbg/JExceptionsAttribute.java delete mode 100644 src/fjbg/ch/epfl/lamp/fjbg/JExtendedCode.java delete mode 100644 src/fjbg/ch/epfl/lamp/fjbg/JField.java delete mode 100644 src/fjbg/ch/epfl/lamp/fjbg/JFieldOrMethod.java delete mode 100644 src/fjbg/ch/epfl/lamp/fjbg/JInnerClassesAttribute.java delete mode 100644 src/fjbg/ch/epfl/lamp/fjbg/JLabel.java delete mode 100644 src/fjbg/ch/epfl/lamp/fjbg/JLineNumberTableAttribute.java delete mode 100644 src/fjbg/ch/epfl/lamp/fjbg/JLocalVariable.java delete mode 100644 src/fjbg/ch/epfl/lamp/fjbg/JLocalVariableTableAttribute.java delete mode 100644 src/fjbg/ch/epfl/lamp/fjbg/JMember.java delete mode 100644 src/fjbg/ch/epfl/lamp/fjbg/JMethod.java delete mode 100644 src/fjbg/ch/epfl/lamp/fjbg/JMethodType.java delete mode 100644 src/fjbg/ch/epfl/lamp/fjbg/JObjectType.java delete mode 100644 src/fjbg/ch/epfl/lamp/fjbg/JOpcode.java delete mode 100644 src/fjbg/ch/epfl/lamp/fjbg/JOtherAttribute.java delete mode 100644 src/fjbg/ch/epfl/lamp/fjbg/JReferenceType.java delete mode 100644 src/fjbg/ch/epfl/lamp/fjbg/JSourceFileAttribute.java delete mode 100644 src/fjbg/ch/epfl/lamp/fjbg/JStackMapTableAttribute.java delete mode 100644 src/fjbg/ch/epfl/lamp/fjbg/JType.java delete mode 100644 src/fjbg/ch/epfl/lamp/fjbg/Main.java delete mode 100644 src/fjbg/ch/epfl/lamp/util/ByteArray.java delete mode 100644 src/intellij/fjbg.iml.SAMPLE (limited to 'src') diff --git a/META-INF/MANIFEST.MF b/META-INF/MANIFEST.MF index 28a70d2879..53043cd99f 100644 --- a/META-INF/MANIFEST.MF +++ b/META-INF/MANIFEST.MF @@ -7,7 +7,6 @@ Eclipse-LazyStart: true Bundle-ClassPath: ., bin, - lib/fjbg.jar, lib/jline.jar, lib/msil.jar Export-Package: @@ -50,8 +49,6 @@ Export-Package: ch.epfl.lamp.compiler.msil, ch.epfl.lamp.compiler.msil.emit, ch.epfl.lamp.compiler.msil.util, - ch.epfl.lamp.fjbg, - ch.epfl.lamp.util Require-Bundle: org.apache.ant, org.scala-ide.scala.library diff --git a/README.rst b/README.rst index 72c4b6028b..7a1ed1dcf4 100644 --- a/README.rst +++ b/README.rst @@ -18,7 +18,6 @@ build script or user-created if needed. This is not a complete listing. :: +--dist/ The destination folder for Scala distributions. +--docs/ Documentation and sample code. +--lib/ Pre-compiled libraries for the build. - | +--fjbg.jar The Java byte-code generation library. | +--scala-compiler.jar The stable reference ('starr') compiler jar | +--scala-library.jar The stable reference ('starr') library jar | +--scala-library-src.jar A snapshot of the source used to build starr. diff --git a/build.detach.xml b/build.detach.xml index 132c812a26..7845d59e1e 100644 --- a/build.detach.xml +++ b/build.detach.xml @@ -72,7 +72,6 @@ QUICK BUILD (QUICK) - diff --git a/build.examples.xml b/build.examples.xml index 62210d5ece..b105604afc 100644 --- a/build.examples.xml +++ b/build.examples.xml @@ -28,8 +28,6 @@ PROPERTIES - - @@ -81,15 +79,6 @@ INITIALISATION /> - - - - - - @@ -111,7 +100,6 @@ INITIALISATION - diff --git a/build.xml b/build.xml index 3cfbd454e2..77dcdbd259 100644 --- a/build.xml +++ b/build.xml @@ -461,7 +461,6 @@ INITIALISATION - @@ -592,58 +591,11 @@ LOCAL DEPENDENCY (FORKJOIN) - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + @@ -790,7 +742,6 @@ LOCAL REFERENCE BUILD (LOCKER) - @@ -809,7 +760,6 @@ LOCAL REFERENCE BUILD (LOCKER) - @@ -845,7 +795,6 @@ LOCAL REFERENCE BUILD (LOCKER) - @@ -855,7 +804,6 @@ LOCAL REFERENCE BUILD (LOCKER) - @@ -953,7 +901,6 @@ PACKED LOCKER BUILD (PALO) - @@ -1185,7 +1132,6 @@ QUICK BUILD (QUICK) - @@ -1203,7 +1149,6 @@ QUICK BUILD (QUICK) - @@ -1254,7 +1199,6 @@ QUICK BUILD (QUICK) - @@ -1376,7 +1320,6 @@ QUICK BUILD (QUICK) - @@ -1406,7 +1349,6 @@ QUICK BUILD (QUICK) - @@ -1533,7 +1475,6 @@ PACKED QUICK BUILD (PACK) - @@ -1967,7 +1908,6 @@ BOOTSTRAPPING BUILD (STRAP) - @@ -1985,7 +1925,6 @@ BOOTSTRAPPING BUILD (STRAP) - @@ -2036,7 +1975,6 @@ BOOTSTRAPPING BUILD (STRAP) - @@ -2168,7 +2106,7 @@ BOOTSTRAPPING BUILD (STRAP) @@ -2829,7 +2767,6 @@ STABLE REFERENCE (STARR) - diff --git a/lib/fjbg.jar.desired.sha1 b/lib/fjbg.jar.desired.sha1 deleted file mode 100644 index 6f3ccc77bd..0000000000 --- a/lib/fjbg.jar.desired.sha1 +++ /dev/null @@ -1 +0,0 @@ -8acc87f222210b4a5eb2675477602fc1759e7684 *fjbg.jar diff --git a/project/Build.scala b/project/Build.scala index a50a572d54..ed03028fcc 100644 --- a/project/Build.scala +++ b/project/Build.scala @@ -24,7 +24,7 @@ object ScalaBuild extends Build with Layers with Packaging with Testing { ) // Collections of projects to run 'compile' on. - lazy val compiledProjects = Seq(quickLib, quickComp, continuationsLibrary, actors, swing, forkjoin, fjbg) + lazy val compiledProjects = Seq(quickLib, quickComp, continuationsLibrary, actors, swing, forkjoin) // Collection of projects to 'package' and 'publish' together. lazy val packagedBinaryProjects = Seq(scalaLibrary, scalaCompiler, swing, actors, continuationsPlugin, jline, scalap) lazy val partestRunProjects = Seq(testsuite, continuationsTestsuite) @@ -82,7 +82,7 @@ object ScalaBuild extends Build with Layers with Packaging with Testing { makeExplodedDist <<= (makeExplodedDist in scaladist).identity, // Note: We override unmanagedSources so that ~ compile will look at all these sources, then run our aggregated compile... unmanagedSourceDirectories in Compile <<= baseDirectory apply (_ / "src") apply { dir => - Seq("library/scala","actors","compiler","fjbg","swing","continuations/library","forkjoin") map (dir / _) + Seq("library/scala","actors","compiler","swing","continuations/library","forkjoin") map (dir / _) }, // TODO - Make exported products == makeDist so we can use this when creating a *real* distribution. commands += Release.pushStarr @@ -132,8 +132,6 @@ object ScalaBuild extends Build with Layers with Packaging with Testing { // Jline nested project. Compile this sucker once and be done. lazy val jline = Project("jline", file("src/jline")) - // Fast Java Bytecode Generator (nested in every scala-compiler.jar) - lazy val fjbg = Project("fjbg", file(".")) settings(settingOverrides : _*) // Our wrapped version of msil. lazy val asm = Project("asm", file(".")) settings(settingOverrides : _*) // Forkjoin backport @@ -283,7 +281,7 @@ object ScalaBuild extends Build with Layers with Packaging with Testing { // -------------------------------------------------------------- // Real Compiler Artifact // -------------------------------------------------------------- - lazy val packageScalaBinTask = Seq(quickComp, fjbg, asm).map(p => products in p in Compile).join.map(_.flatten).map(productTaskToMapping) + lazy val packageScalaBinTask = Seq(quickComp, asm).map(p => products in p in Compile).join.map(_.flatten).map(productTaskToMapping) lazy val scalaBinArtifactSettings : Seq[Setting[_]] = inConfig(Compile)(Defaults.packageTasks(packageBin, packageScalaBinTask)) ++ Seq( name := "scala-compiler", crossPaths := false, @@ -331,6 +329,6 @@ object ScalaBuild extends Build with Layers with Packaging with Testing { lazy val documentation = ( Project("documentation", file(".")) settings (documentationSettings: _*) - dependsOn(quickLib, quickComp, actors, fjbg, forkjoin, swing, continuationsLibrary) + dependsOn(quickLib, quickComp, actors, forkjoin, swing, continuationsLibrary) ) } diff --git a/project/Layers.scala b/project/Layers.scala index 35cc79c130..009129efcf 100644 --- a/project/Layers.scala +++ b/project/Layers.scala @@ -13,8 +13,6 @@ trait Layers extends Build { def jline: Project /** Reference to forkjoin library */ def forkjoin: Project - /** Reference to Fast-Java-Bytecode-Generator library */ - def fjbg: Project /** Reference to the ASM wrapped project. */ def asm: Project /** A setting that adds some external dependencies. */ @@ -23,7 +21,7 @@ trait Layers extends Build { def aaa_root: Project /** Creates a reference Scala version that can be used to build other projects. This takes in the raw - * library, compiler and fjbg libraries as well as a string representing the layer name (used for compiling the compile-interface). + * library, compiler as well as a string representing the layer name (used for compiling the compile-interface). */ def makeScalaReference(layer: String, library: Project, reflect: Project, compiler: Project) = scalaInstance <<= (appConfiguration in library, @@ -31,10 +29,9 @@ trait Layers extends Build { (exportedProducts in library in Compile), (exportedProducts in reflect in Compile), (exportedProducts in compiler in Compile), - (exportedProducts in fjbg in Compile), (fullClasspath in jline in Runtime), (exportedProducts in asm in Runtime)) map { - (app, version: String, lib: Classpath, reflect: Classpath, comp: Classpath, fjbg: Classpath, jline: Classpath, asm: Classpath) => + (app, version: String, lib: Classpath, reflect: Classpath, comp: Classpath, jline: Classpath, asm: Classpath) => val launcher = app.provider.scalaProvider.launcher (lib,comp) match { case (Seq(libraryJar), Seq(compilerJar)) => @@ -43,14 +40,14 @@ trait Layers extends Build { libraryJar.data, compilerJar.data, launcher, - ((fjbg.files ++ jline.files ++ asm.files ++ reflect.files):_*)) + ((jline.files ++ asm.files ++ reflect.files):_*)) case _ => error("Cannot build a ScalaReference with more than one classpath element") } } /** Creates a "layer" of Scala compilation. That is, this will build the next version of Scala from a previous version. * Returns the library project and compiler project from the next layer. - * Note: The library and compiler are not *complete* in the sense that they are missing things like "actors" and "fjbg". + * Note: The library and compiler are not *complete* in the sense that they are missing things like "actors". */ def makeLayer(layer: String, referenceScala: Setting[Task[ScalaInstance]], autoLock: Boolean = false) : (Project, Project, Project) = { val autoLockSettings: Seq[Setting[_]] = @@ -108,7 +105,7 @@ trait Layers extends Build { dirs.descendentsExcept( ("*.xml" | "*.html" | "*.gif" | "*.png" | "*.js" | "*.css" | "*.tmpl" | "*.swf" | "*.properties" | "*.txt"),"*.scala").get }, // TODO - Use depends on *and* SBT's magic dependency mechanisms... - unmanagedClasspath in Compile <<= Seq(forkjoin, library, reflect, fjbg, jline, asm).map(exportedProducts in Compile in _).join.map(_.flatten), + unmanagedClasspath in Compile <<= Seq(forkjoin, library, reflect, jline, asm).map(exportedProducts in Compile in _).join.map(_.flatten), externalDeps, referenceScala ) diff --git a/project/Packaging.scala b/project/Packaging.scala index eb4e69f99e..6cb51a10a6 100644 --- a/project/Packaging.scala +++ b/project/Packaging.scala @@ -19,7 +19,7 @@ trait Packaging { self: ScalaBuild.type => genBin <<= genBinTask(genBinRunner, binDir, fullClasspath in Runtime, false), binDir in genBinQuick <<= baseDirectory apply (_ / "target" / "bin"), // Configure the classpath this way to avoid having .jar files and previous layers on the classpath. - fullClasspath in Runtime in genBinQuick <<= Seq(quickComp,quickLib,scalap,actors,swing,fjbg,jline,forkjoin).map(classDirectory in Compile in _).join.map(Attributed.blankSeq), + fullClasspath in Runtime in genBinQuick <<= Seq(quickComp,quickLib,scalap,actors,swing,jline,forkjoin).map(classDirectory in Compile in _).join.map(Attributed.blankSeq), fullClasspath in Runtime in genBinQuick <++= (fullClasspath in Compile in jline), genBinQuick <<= genBinTask(genBinRunner, binDir in genBinQuick, fullClasspath in Runtime in genBinQuick, true), runManmakerMan <<= runManmakerTask(fullClasspath in Runtime in manmaker, runner in manmaker, "scala.tools.docutil.EmitManPage", "man1", ".1"), diff --git a/project/Testing.scala b/project/Testing.scala index 5de72116a3..de63a66164 100644 --- a/project/Testing.scala +++ b/project/Testing.scala @@ -34,7 +34,7 @@ trait Testing { self: ScalaBuild.type => val continuationsTestsuite = ( Project("continuations-testsuite", file(".")) settings (continuationsTestsuiteSettings:_*) - dependsOn (partest, scalaLibrary, scalaCompiler, fjbg) + dependsOn (partest, scalaLibrary, scalaCompiler) ) } diff --git a/src/compiler/scala/tools/ant/Scalac.scala b/src/compiler/scala/tools/ant/Scalac.scala index cf3b5f949b..8f507d924a 100644 --- a/src/compiler/scala/tools/ant/Scalac.scala +++ b/src/compiler/scala/tools/ant/Scalac.scala @@ -100,7 +100,7 @@ class Scalac extends ScalaMatchingTask with ScalacShared { /** Defines valid values for the `target` property. */ object Target extends PermissibleValue { - val values = List("jvm-1.5", "jvm-1.5-fjbg", "jvm-1.5-asm", "jvm-1.6", "jvm-1.7", "msil") + val values = List("jvm-1.5", "jvm-1.6", "jvm-1.7", "msil") } /** Defines valid values for the `deprecation` and `unchecked` properties. */ diff --git a/src/compiler/scala/tools/nsc/Global.scala b/src/compiler/scala/tools/nsc/Global.scala index 397e6c42d7..15379c08b8 100644 --- a/src/compiler/scala/tools/nsc/Global.scala +++ b/src/compiler/scala/tools/nsc/Global.scala @@ -24,7 +24,7 @@ import typechecker._ import transform._ import backend.icode.{ ICodes, GenICode, ICodeCheckers } import backend.{ ScalaPrimitives, Platform, MSILPlatform, JavaPlatform } -import backend.jvm.{GenJVM, GenASM} +import backend.jvm.GenASM import backend.opt.{ Inliners, InlineExceptionHandlers, ClosureElimination, DeadCodeElimination } import backend.icode.analysis._ import scala.language.postfixOps @@ -594,13 +594,6 @@ class Global(var currentSettings: Settings, var reporter: Reporter) val runsRightAfter = None } with DeadCodeElimination - // phaseName = "jvm", FJBG-based version - object genJVM extends { - val global: Global.this.type = Global.this - val runsAfter = List("dce") - val runsRightAfter = None - } with GenJVM - // phaseName = "jvm", ASM-based version object genASM extends { val global: Global.this.type = Global.this diff --git a/src/compiler/scala/tools/nsc/backend/JavaPlatform.scala b/src/compiler/scala/tools/nsc/backend/JavaPlatform.scala index fd4366baf1..5cc4404ca1 100644 --- a/src/compiler/scala/tools/nsc/backend/JavaPlatform.scala +++ b/src/compiler/scala/tools/nsc/backend/JavaPlatform.scala @@ -42,13 +42,9 @@ trait JavaPlatform extends Platform { if (settings.make.isDefault) Nil else List(dependencyAnalysis) - private def classEmitPhase = - if (settings.target.value == "jvm-1.5-fjbg") genJVM - else genASM - def platformPhases = List( flatten, // get rid of inner classes - classEmitPhase // generate .class files + genASM // generate .class files ) ++ depAnalysisPhase lazy val externalEquals = getDecl(BoxesRunTimeClass, nme.equals_) diff --git a/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala b/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala index f07c331fb0..0dc49ed993 100644 --- a/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala +++ b/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala @@ -925,7 +925,7 @@ abstract class GenICode extends SubComponent { val cm = CALL_METHOD(sym, invokeStyle) /** In a couple cases, squirrel away a little extra information in the - * CALL_METHOD for use by GenJVM. + * CALL_METHOD for use by GenASM. */ fun match { case Select(qual, _) => diff --git a/src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala b/src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala index d2e641cbf9..c182e098ba 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala @@ -190,7 +190,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters { /* don't javaNameCache.clear() because that causes the following tests to fail: * test/files/run/macro-repl-dontexpand.scala * test/files/jvm/interpreter.scala - * TODO but why? what use could javaNameCache possibly see once GenJVM is over? + * TODO but why? what use could javaNameCache possibly see once GenASM is over? */ /* TODO After emitting all class files (e.g., in a separate compiler phase) ASM can perform bytecode verification: diff --git a/src/compiler/scala/tools/nsc/backend/jvm/GenAndroid.scala b/src/compiler/scala/tools/nsc/backend/jvm/GenAndroid.scala deleted file mode 100644 index 72b7e35408..0000000000 --- a/src/compiler/scala/tools/nsc/backend/jvm/GenAndroid.scala +++ /dev/null @@ -1,62 +0,0 @@ -/* NSC -- new Scala compiler - * Copyright 2005-2013 LAMP/EPFL - * @author Stephane Micheloud - */ - - -package scala.tools.nsc -package backend.jvm - -import ch.epfl.lamp.fjbg._ -import symtab.Flags - -trait GenAndroid { - self: GenJVM => - - import global._ - import icodes._ - import opcodes._ - - /** From the reference documentation of the Android SDK: - * The `Parcelable` interface identifies classes whose instances can be - * written to and restored from a `Parcel`. Classes implementing the - * `Parcelable` interface must also have a static field called `CREATOR`, - * which is an object implementing the `Parcelable.Creator` interface. - */ - private val fieldName = newTermName("CREATOR") - - private lazy val AndroidParcelableInterface = rootMirror.getClassIfDefined("android.os.Parcelable") - private lazy val AndroidCreatorClass = rootMirror.getClassIfDefined("android.os.Parcelable$Creator") - - def isAndroidParcelableClass(sym: Symbol) = - (AndroidParcelableInterface != NoSymbol) && - (sym.parentSymbols contains AndroidParcelableInterface) - - def addCreatorCode(codegen: BytecodeGenerator, block: BasicBlock) { - import codegen._ - val fieldSymbol = ( - clasz.symbol.newValue(newTermName(fieldName), NoPosition, Flags.STATIC | Flags.FINAL) - setInfo AndroidCreatorClass.tpe - ) - val methodSymbol = definitions.getMember(clasz.symbol.companionModule, fieldName) - clasz addField new IField(fieldSymbol) - block emit CALL_METHOD(methodSymbol, Static(false)) - block emit STORE_FIELD(fieldSymbol, true) - } - - def legacyAddCreatorCode(codegen: BytecodeGenerator, clinit: JExtendedCode) { - import codegen._ - val creatorType = javaType(AndroidCreatorClass) - jclass.addNewField(PublicStaticFinal, - fieldName, - creatorType) - val moduleName = javaName(clasz.symbol)+"$" - clinit.emitGETSTATIC(moduleName, - nme.MODULE_INSTANCE_FIELD.toString, - new JObjectType(moduleName)) - clinit.emitINVOKEVIRTUAL(moduleName, fieldName, - new JMethodType(creatorType, Array())) - clinit.emitPUTSTATIC(jclass.getName(), fieldName, creatorType) - } - -} diff --git a/src/compiler/scala/tools/nsc/backend/jvm/GenJVM.scala b/src/compiler/scala/tools/nsc/backend/jvm/GenJVM.scala deleted file mode 100644 index e1484d1f97..0000000000 --- a/src/compiler/scala/tools/nsc/backend/jvm/GenJVM.scala +++ /dev/null @@ -1,1950 +0,0 @@ -/* NSC -- new Scala compiler - * Copyright 2005-2013 LAMP/EPFL - * @author Iulian Dragos - */ - -package scala.tools.nsc -package backend.jvm - -import java.nio.ByteBuffer -import scala.collection.{ mutable, immutable } -import scala.reflect.internal.pickling.{ PickleFormat, PickleBuffer } -import scala.tools.nsc.symtab._ -import scala.reflect.internal.util.{ SourceFile, NoSourceFile } -import scala.reflect.internal.ClassfileConstants._ -import ch.epfl.lamp.fjbg._ -import JAccessFlags._ -import JObjectType.{ JAVA_LANG_STRING, JAVA_LANG_OBJECT } -import scala.language.postfixOps - -/** This class ... - * - * @author Iulian Dragos - * @version 1.0 - * - */ -abstract class GenJVM extends SubComponent with GenJVMUtil with GenAndroid with BytecodeWriters { - import global._ - import icodes._ - import icodes.opcodes._ - import definitions._ - - val phaseName = "jvm" - - /** Create a new phase */ - override def newPhase(p: Phase): Phase = new JvmPhase(p) - - /** JVM code generation phase - */ - class JvmPhase(prev: Phase) extends ICodePhase(prev) { - def name = phaseName - override def erasedTypes = true - def apply(cls: IClass) = sys.error("no implementation") - - def isJavaEntryPoint(clasz: IClass) = { - val sym = clasz.symbol - def fail(msg: String, pos: Position = sym.pos) = { - clasz.cunit.warning(sym.pos, - sym.name + " has a main method with parameter type Array[String], but " + sym.fullName('.') + " will not be a runnable program.\n" + - " Reason: " + msg - // TODO: make this next claim true, if possible - // by generating valid main methods as static in module classes - // not sure what the jvm allows here - // + " You can still run the program by calling it as " + sym.javaSimpleName + " instead." - ) - false - } - def failNoForwarder(msg: String) = { - fail(msg + ", which means no static forwarder can be generated.\n") - } - val possibles = if (sym.hasModuleFlag) (sym.tpe nonPrivateMember nme.main).alternatives else Nil - val hasApproximate = possibles exists { m => - m.info match { - case MethodType(p :: Nil, _) => p.tpe.typeSymbol == ArrayClass - case _ => false - } - } - // At this point it's a module with a main-looking method, so either - // succeed or warn that it isn't. - hasApproximate && { - // Before erasure so we can identify generic mains. - enteringErasure { - val companion = sym.linkedClassOfClass - - if (hasJavaMainMethod(companion)) - failNoForwarder("companion contains its own main method") - else if (companion.tpe.member(nme.main) != NoSymbol) - // this is only because forwarders aren't smart enough yet - failNoForwarder("companion contains its own main method (implementation restriction: no main is allowed, regardless of signature)") - else if (companion.isTrait) - failNoForwarder("companion is a trait") - // Now either succeeed, or issue some additional warnings for things which look like - // attempts to be java main methods. - else possibles exists { m => - m.info match { - case PolyType(_, _) => - fail("main methods cannot be generic.") - case MethodType(params, res) => - if (res.typeSymbol :: params exists (_.isAbstractType)) - fail("main methods cannot refer to type parameters or abstract types.", m.pos) - else - isJavaMainMethod(m) || fail("main method must have exact signature (Array[String])Unit", m.pos) - case tp => - fail("don't know what this is: " + tp, m.pos) - } - } - } - } - } - - override def run() { - // we reinstantiate the bytecode generator at each run, to allow the GC - // to collect everything - if (settings.debug.value) - inform("[running phase " + name + " on icode]") - - if (settings.Xdce.value) - for ((sym, cls) <- icodes.classes if inliner.isClosureClass(sym) && !deadCode.liveClosures(sym)) { - log(s"Optimizer eliminated ${sym.fullNameString}") - icodes.classes -= sym - } - - // For predictably ordered error messages. - val sortedClasses = classes.values.toList sortBy ("" + _.symbol.fullName) - val entryPoints = sortedClasses filter isJavaEntryPoint - - val bytecodeWriter = settings.outputDirs.getSingleOutput match { - case Some(f) if f hasExtension "jar" => - // If no main class was specified, see if there's only one - // entry point among the classes going into the jar. - if (settings.mainClass.isDefault) { - entryPoints map (_.symbol fullName '.') match { - case Nil => - log("No Main-Class designated or discovered.") - case name :: Nil => - log("Unique entry point: setting Main-Class to " + name) - settings.mainClass.value = name - case names => - log("No Main-Class due to multiple entry points:\n " + names.mkString("\n ")) - } - } - else log("Main-Class was specified: " + settings.mainClass.value) - - new DirectToJarfileWriter(f.file) - - case _ => - if (settings.Ygenjavap.isDefault) { - if(settings.Ydumpclasses.isDefault) - new ClassBytecodeWriter { } - else - new ClassBytecodeWriter with DumpBytecodeWriter { } - } - else new ClassBytecodeWriter with JavapBytecodeWriter { } - } - - val codeGenerator = new BytecodeGenerator(bytecodeWriter) - debuglog("Created new bytecode generator for " + classes.size + " classes.") - - sortedClasses foreach { c => - try codeGenerator.genClass(c) - catch { - case e: JCode.CodeSizeTooBigException => - log("Skipped class %s because it has methods that are too long.".format(c)) - } - } - - bytecodeWriter.close() - classes.clear() - } - } - - var pickledBytes = 0 // statistics - - /** - * Java bytecode generator. - * - */ - class BytecodeGenerator(bytecodeWriter: BytecodeWriter) extends BytecodeUtil { - def this() = this(new ClassBytecodeWriter { }) - def debugLevel = settings.debuginfo.indexOfChoice - import bytecodeWriter.writeClass - - val MIN_SWITCH_DENSITY = 0.7 - val INNER_CLASSES_FLAGS = - (ACC_PUBLIC | ACC_PRIVATE | ACC_PROTECTED | ACC_STATIC | ACC_INTERFACE | ACC_ABSTRACT) - - val PublicStatic = ACC_PUBLIC | ACC_STATIC - val PublicStaticFinal = ACC_PUBLIC | ACC_STATIC | ACC_FINAL - - val StringBuilderClassName = javaName(definitions.StringBuilderClass) - val BoxesRunTime = "scala.runtime.BoxesRunTime" - - val StringBuilderType = new JObjectType(StringBuilderClassName) // TODO use ASMType.getObjectType - val toStringType = new JMethodType(JAVA_LANG_STRING, JType.EMPTY_ARRAY) // TODO use ASMType.getMethodType - val arrayCloneType = new JMethodType(JAVA_LANG_OBJECT, JType.EMPTY_ARRAY) - - // Scala attributes - val BeanInfoAttr = rootMirror.getRequiredClass("scala.beans.BeanInfo") - - final val ExcludedForwarderFlags = { - import Flags._ - // Should include DEFERRED but this breaks findMember. - ( CASE | SPECIALIZED | LIFTED | PROTECTED | STATIC | EXPANDEDNAME | BridgeAndPrivateFlags ) - } - - // Additional interface parents based on annotations and other cues - def newParentForAttr(attr: Symbol): Option[Symbol] = attr match { - case CloneableAttr => Some(JavaCloneableClass) - case RemoteAttr => Some(RemoteInterfaceClass) - case _ => None - } - - val versionPickle = { - val vp = new PickleBuffer(new Array[Byte](16), -1, 0) - assert(vp.writeIndex == 0, vp) - vp writeNat PickleFormat.MajorVersion - vp writeNat PickleFormat.MinorVersion - vp writeNat 0 - vp - } - - private def helperBoxTo(kind: ValueTypeKind): Tuple2[String, JMethodType] = { - val boxedType = definitions.boxedClass(kind.toType.typeSymbol) - val mtype = new JMethodType(javaType(boxedType), Array(javaType(kind))) - - Pair("boxTo" + boxedType.decodedName, mtype) - } - - private val jBoxTo: Map[TypeKind, Tuple2[String, JMethodType]] = Map( - BOOL -> helperBoxTo(BOOL) , - BYTE -> helperBoxTo(BYTE) , - CHAR -> helperBoxTo(CHAR) , - SHORT -> helperBoxTo(SHORT) , - INT -> helperBoxTo(INT) , - LONG -> helperBoxTo(LONG) , - FLOAT -> helperBoxTo(FLOAT) , - DOUBLE -> helperBoxTo(DOUBLE) - ) - - private def helperUnboxTo(kind: ValueTypeKind): Tuple2[String, JMethodType] = { - val mtype = new JMethodType(javaType(kind), Array(JAVA_LANG_OBJECT)) - val mname = "unboxTo" + kind.toType.typeSymbol.decodedName - - Pair(mname, mtype) - } - - private val jUnboxTo: Map[TypeKind, Tuple2[String, JMethodType]] = Map( - BOOL -> helperUnboxTo(BOOL) , - BYTE -> helperUnboxTo(BYTE) , - CHAR -> helperUnboxTo(CHAR) , - SHORT -> helperUnboxTo(SHORT) , - INT -> helperUnboxTo(INT) , - LONG -> helperUnboxTo(LONG) , - FLOAT -> helperUnboxTo(FLOAT) , - DOUBLE -> helperUnboxTo(DOUBLE) - ) - - var clasz: IClass = _ - var method: IMethod = _ - var jclass: JClass = _ - var jmethod: JMethod = _ - - def isParcelableClass = isAndroidParcelableClass(clasz.symbol) - def isRemoteClass = clasz.symbol hasAnnotation RemoteAttr - def serialVUID = clasz.symbol getAnnotation SerialVersionUIDAttr collect { - case AnnotationInfo(_, Literal(const) :: _, _) => const.longValue - } - - val fjbgContext = new FJBGContext(49, 0) - val emitVars = debugLevel >= 3 - - // bug had phase with wrong name; leaving enabled for brief pseudo deprecation - private val checkSignatures = ( - (settings.check containsName phaseName) - || (settings.check.value contains "genjvm") && { - global.warning("This option will be removed: please use -Ycheck:%s, not -Ycheck:genjvm." format phaseName) - true - } - ) - - /** For given symbol return a symbol corresponding to a class that should be declared as inner class. - * - * For example: - * class A { - * class B - * object C - * } - * - * then method will return NoSymbol for A, the same symbol for A.B (corresponding to A$B class) and A$C$ symbol - * for A.C. - */ - private def innerClassSymbolFor(s: Symbol): Symbol = - if (s.isClass) s else if (s.isModule) s.moduleClass else NoSymbol - - override def javaName(sym: Symbol): String = { // TODO Miguel says: check whether a single pass over `icodes.classes` can populate `innerClassBuffer` faster. - /** - * Checks if given symbol corresponds to inner class/object and add it to innerClassBuffer - * - * Note: This method is called recursively thus making sure that we add complete chain - * of inner class all until root class. - */ - def collectInnerClass(s: Symbol): Unit = { - // TODO: some enteringFlatten { ... } which accounts for - // being nested in parameterized classes (if we're going to selectively flatten.) - val x = innerClassSymbolFor(s) - if(x ne NoSymbol) { - assert(x.isClass, "not an inner-class symbol") - val isInner = !x.rawowner.isPackageClass - if (isInner) { - innerClassBuffer += x - collectInnerClass(x.rawowner) - } - } - } - collectInnerClass(sym) - - super.javaName(sym) - } - - /** Write a class to disk, adding the Scala signature (pickled type - * information) and inner classes. - * - * @param jclass The FJBG class, where code was emitted - * @param sym The corresponding symbol, used for looking up pickled information - */ - def emitClass(jclass: JClass, sym: Symbol) { - addInnerClasses(jclass) - writeClass("" + sym.name, jclass.getName(), toByteArray(jclass), sym) - } - - /** Returns the ScalaSignature annotation if it must be added to this class, - * none otherwise; furthermore, it adds to `jclass` the ScalaSig marker - * attribute (marking that a scala signature annotation is present) or the - * Scala marker attribute (marking that the signature for this class is in - * another file). The annotation that is returned by this method must be - * added to the class' annotations list when generating them. - * - * @param jclass The class file that is being readied. - * @param sym The symbol for which the signature has been entered in - * the symData map. This is different than the symbol - * that is being generated in the case of a mirror class. - * @return An option that is: - * - defined and contains an annotation info of the - * ScalaSignature type, instantiated with the pickle - * signature for sym (a ScalaSig marker attribute has - * been written); - * - undefined if the jclass/sym couple must not contain a - * signature (a Scala marker attribute has been written). - */ - def scalaSignatureAddingMarker(jclass: JClass, sym: Symbol): Option[AnnotationInfo] = - currentRun.symData get sym match { - case Some(pickle) if !nme.isModuleName(newTermName(jclass.getName)) => - val scalaAttr = - fjbgContext.JOtherAttribute(jclass, jclass, tpnme.ScalaSignatureATTR.toString, - versionPickle.bytes, versionPickle.writeIndex) - jclass addAttribute scalaAttr - val scalaAnnot = { - val sigBytes = ScalaSigBytes(pickle.bytes.take(pickle.writeIndex)) - AnnotationInfo(sigBytes.sigAnnot, Nil, List((nme.bytes, sigBytes))) - } - pickledBytes += pickle.writeIndex - currentRun.symData -= sym - currentRun.symData -= sym.companionSymbol - Some(scalaAnnot) - case _ => - val markerAttr = - fjbgContext.JOtherAttribute(jclass, jclass, tpnme.ScalaATTR.toString, new Array[Byte](0), 0) - jclass addAttribute markerAttr - None - } - - private var innerClassBuffer = mutable.LinkedHashSet[Symbol]() - - /** Drop redundant interfaces (ones which are implemented by some other parent) from the immediate parents. - * This is important on Android because there is otherwise an interface explosion. - */ - private def minimizeInterfaces(interfaces: List[Symbol]): List[Symbol] = { - var rest = interfaces - var leaves = List.empty[Symbol] - while(!rest.isEmpty) { - val candidate = rest.head - val nonLeaf = leaves exists { lsym => lsym isSubClass candidate } - if(!nonLeaf) { - leaves = candidate :: (leaves filterNot { lsym => candidate isSubClass lsym }) - } - rest = rest.tail - } - - leaves - } - - def genClass(c: IClass) { - clasz = c - innerClassBuffer.clear() - - val name = javaName(c.symbol) - - val ps = c.symbol.info.parents - - val superClass: Symbol = if(ps.isEmpty) ObjectClass else ps.head.typeSymbol; - - val superInterfaces0: List[Symbol] = if(ps.isEmpty) Nil else c.symbol.mixinClasses; - val superInterfaces = superInterfaces0 ++ c.symbol.annotations.flatMap(ann => newParentForAttr(ann.symbol)) distinct - - val ifaces = - if(superInterfaces.isEmpty) JClass.NO_INTERFACES - else mkArray(minimizeInterfaces(superInterfaces) map javaName) - - jclass = fjbgContext.JClass(javaFlags(c.symbol), - name, - javaName(superClass), - ifaces, - c.cunit.source.toString) - - if (isStaticModule(c.symbol) || serialVUID != None || isParcelableClass) { - if (isStaticModule(c.symbol)) - addModuleInstanceField - addStaticInit(jclass, c.lookupStaticCtor) - - if (isTopLevelModule(c.symbol)) { - if (c.symbol.companionClass == NoSymbol) - generateMirrorClass(c.symbol, c.cunit.source) - else - log("No mirror class for module with linked class: " + - c.symbol.fullName) - } - } - else { - c.lookupStaticCtor foreach (constructor => addStaticInit(jclass, Some(constructor))) - - // it must be a top level class (name contains no $s) - def isCandidateForForwarders(sym: Symbol): Boolean = - exitingPickler { - !(sym.name.toString contains '$') && sym.hasModuleFlag && !sym.isImplClass && !sym.isNestedClass - } - - // At some point this started throwing lots of exceptions as a compile was finishing. - // error: java.lang.AssertionError: - // assertion failed: List(object package$CompositeThrowable, object package$CompositeThrowable) - // ...is the one I've seen repeatedly. Suppressing. - val lmoc = ( - try c.symbol.companionModule - catch { case x: AssertionError => - Console.println("Suppressing failed assert: " + x) - NoSymbol - } - ) - // add static forwarders if there are no name conflicts; see bugs #363 and #1735 - if (lmoc != NoSymbol && !c.symbol.isInterface) { - if (isCandidateForForwarders(lmoc) && !settings.noForwarders.value) { - log("Adding static forwarders from '%s' to implementations in '%s'".format(c.symbol, lmoc)) - addForwarders(jclass, lmoc.moduleClass) - } - } - } - - clasz.fields foreach genField - clasz.methods foreach genMethod - - val ssa = scalaSignatureAddingMarker(jclass, c.symbol) - addGenericSignature(jclass, c.symbol, c.symbol.owner) - addAnnotations(jclass, c.symbol.annotations ++ ssa) - addEnclosingMethodAttribute(jclass, c.symbol) - emitClass(jclass, c.symbol) - - if (c.symbol hasAnnotation BeanInfoAttr) - genBeanInfoClass(c) - } - - private def addEnclosingMethodAttribute(jclass: JClass, clazz: Symbol) { - val sym = clazz.originalEnclosingMethod - if (sym.isMethod) { - debuglog("enclosing method for %s is %s (in %s)".format(clazz, sym, sym.enclClass)) - jclass addAttribute fjbgContext.JEnclosingMethodAttribute( - jclass, - javaName(sym.enclClass), - javaName(sym), - javaType(sym) - ) - } else if (clazz.isAnonymousClass) { - val enclClass = clazz.rawowner - assert(enclClass.isClass, enclClass) - val sym = enclClass.primaryConstructor - if (sym == NoSymbol) - log("Ran out of room looking for an enclosing method for %s: no constructor here.".format( - enclClass, clazz) - ) - else { - debuglog("enclosing method for %s is %s (in %s)".format(clazz, sym, enclClass)) - jclass addAttribute fjbgContext.JEnclosingMethodAttribute( - jclass, - javaName(enclClass), - javaName(sym), - javaType(sym).asInstanceOf[JMethodType] - ) - } - } - } - - private def toByteArray(jc: JClass): Array[Byte] = { - val bos = new java.io.ByteArrayOutputStream() - val dos = new java.io.DataOutputStream(bos) - jc.writeTo(dos) - dos.close() - bos.toByteArray - } - - /** - * Generate a bean info class that describes the given class. - * - * @author Ross Judson (ross.judson@soletta.com) - */ - def genBeanInfoClass(c: IClass) { - val beanInfoClass = fjbgContext.JClass(javaFlags(c.symbol), - javaName(c.symbol) + "BeanInfo", - "scala/beans/ScalaBeanInfo", - JClass.NO_INTERFACES, - c.cunit.source.toString) - - var fieldList = List[String]() - for (f <- clasz.fields if f.symbol.hasGetter; - g = f.symbol.getter(c.symbol); - s = f.symbol.setter(c.symbol); - if g.isPublic && !(f.symbol.name startsWith "$")) // inserting $outer breaks the bean - fieldList = javaName(f.symbol) :: javaName(g) :: (if (s != NoSymbol) javaName(s) else null) :: fieldList - val methodList = - for (m <- clasz.methods - if !m.symbol.isConstructor && - m.symbol.isPublic && - !(m.symbol.name startsWith "$") && - !m.symbol.isGetter && - !m.symbol.isSetter) yield javaName(m.symbol) - - val constructor = beanInfoClass.addNewMethod(ACC_PUBLIC, "", JType.VOID, new Array[JType](0), new Array[String](0)) - val jcode = constructor.getCode().asInstanceOf[JExtendedCode] - val strKind = new JObjectType(javaName(StringClass)) - val stringArrayKind = new JArrayType(strKind) - val conType = new JMethodType(JType.VOID, Array(javaType(ClassClass), stringArrayKind, stringArrayKind)) - - def push(lst:Seq[String]) { - var fi = 0 - for (f <- lst) { - jcode.emitDUP() - jcode emitPUSH fi - if (f != null) - jcode emitPUSH f - else - jcode.emitACONST_NULL() - jcode emitASTORE strKind - fi += 1 - } - } - - jcode.emitALOAD_0() - // push the class - jcode emitPUSH javaType(c.symbol).asInstanceOf[JReferenceType] - - // push the string array of field information - jcode emitPUSH fieldList.length - jcode emitANEWARRAY strKind - push(fieldList) - - // push the string array of method information - jcode emitPUSH methodList.length - jcode emitANEWARRAY strKind - push(methodList) - - // invoke the superclass constructor, which will do the - // necessary java reflection and create Method objects. - jcode.emitINVOKESPECIAL("scala/beans/ScalaBeanInfo", "", conType) - jcode.emitRETURN() - - // write the bean information class file. - writeClass("BeanInfo ", beanInfoClass.getName(), toByteArray(beanInfoClass), c.symbol) - } - - /** Add the given 'throws' attributes to jmethod */ - def addExceptionsAttribute(jmethod: JMethod, excs: List[AnnotationInfo]) { - if (excs.isEmpty) return - - val cpool = jmethod.getConstantPool - val buf: ByteBuffer = ByteBuffer.allocate(512) - var nattr = 0 - - // put some random value; the actual number is determined at the end - buf putShort 0xbaba.toShort - - for (ThrownException(exc) <- excs.distinct) { - buf.putShort( - cpool.addClass( - javaName(exc)).shortValue) - nattr += 1 - } - - assert(nattr > 0, nattr) - buf.putShort(0, nattr.toShort) - addAttribute(jmethod, tpnme.ExceptionsATTR, buf) - } - - /** Whether an annotation should be emitted as a Java annotation - * .initialize: if 'annot' is read from pickle, atp might be un-initialized - */ - private def shouldEmitAnnotation(annot: AnnotationInfo) = - annot.symbol.initialize.isJavaDefined && - annot.matches(ClassfileAnnotationClass) && - annot.args.isEmpty - - private def emitJavaAnnotations(cpool: JConstantPool, buf: ByteBuffer, annotations: List[AnnotationInfo]): Int = { - def emitArgument(arg: ClassfileAnnotArg): Unit = arg match { - case LiteralAnnotArg(const) => - const.tag match { - case BooleanTag => - buf put 'Z'.toByte - buf putShort cpool.addInteger(if(const.booleanValue) 1 else 0).toShort - case ByteTag => - buf put 'B'.toByte - buf putShort cpool.addInteger(const.byteValue).toShort - case ShortTag => - buf put 'S'.toByte - buf putShort cpool.addInteger(const.shortValue).toShort - case CharTag => - buf put 'C'.toByte - buf putShort cpool.addInteger(const.charValue).toShort - case IntTag => - buf put 'I'.toByte - buf putShort cpool.addInteger(const.intValue).toShort - case LongTag => - buf put 'J'.toByte - buf putShort cpool.addLong(const.longValue).toShort - case FloatTag => - buf put 'F'.toByte - buf putShort cpool.addFloat(const.floatValue).toShort - case DoubleTag => - buf put 'D'.toByte - buf putShort cpool.addDouble(const.doubleValue).toShort - case StringTag => - buf put 's'.toByte - buf putShort cpool.addUtf8(const.stringValue).toShort - case ClazzTag => - buf put 'c'.toByte - buf putShort cpool.addUtf8(javaType(const.typeValue).getSignature()).toShort - case EnumTag => - buf put 'e'.toByte - buf putShort cpool.addUtf8(javaType(const.tpe).getSignature()).toShort - buf putShort cpool.addUtf8(const.symbolValue.name.toString).toShort - } - - case sb@ScalaSigBytes(bytes) if !sb.isLong => - buf put 's'.toByte - buf putShort cpool.addUtf8(sb.encodedBytes).toShort - - case sb@ScalaSigBytes(bytes) if sb.isLong => - buf put '['.toByte - val stringCount = (sb.encodedBytes.length / 65534) + 1 - buf putShort stringCount.toShort - for (i <- 0 until stringCount) { - buf put 's'.toByte - val j = i * 65535 - val string = sb.encodedBytes.slice(j, j + 65535) - buf putShort cpool.addUtf8(string).toShort - } - - case ArrayAnnotArg(args) => - buf put '['.toByte - buf putShort args.length.toShort - args foreach emitArgument - - case NestedAnnotArg(annInfo) => - buf put '@'.toByte - emitAnnotation(annInfo) - } - - def emitAnnotation(annotInfo: AnnotationInfo) { - val AnnotationInfo(typ, args, assocs) = annotInfo - val jtype = javaType(typ) - buf putShort cpool.addUtf8(jtype.getSignature()).toShort - assert(args.isEmpty, args) - buf putShort assocs.length.toShort - for ((name, value) <- assocs) { - buf putShort cpool.addUtf8(name.toString).toShort - emitArgument(value) - } - } - - var nannots = 0 - val pos = buf.position() - - // put some random value; the actual number of annotations is determined at the end - buf putShort 0xbaba.toShort - - for (annot <- annotations if shouldEmitAnnotation(annot)) { - nannots += 1 - emitAnnotation(annot) - } - - // save the number of annotations - buf.putShort(pos, nannots.toShort) - nannots - } - - // @M don't generate java generics sigs for (members of) implementation - // classes, as they are monomorphic (TODO: ok?) - private def needsGenericSignature(sym: Symbol) = !( - // PP: This condition used to include sym.hasExpandedName, but this leads - // to the total loss of generic information if a private member is - // accessed from a closure: both the field and the accessor were generated - // without it. This is particularly bad because the availability of - // generic information could disappear as a consequence of a seemingly - // unrelated change. - settings.Ynogenericsig.value - || sym.isArtifact - || sym.isLiftedMethod - || sym.isBridge - || (sym.ownerChain exists (_.isImplClass)) - ) - def addGenericSignature(jmember: JMember, sym: Symbol, owner: Symbol) { - if (needsGenericSignature(sym)) { - val memberTpe = enteringErasure(owner.thisType.memberInfo(sym)) - - erasure.javaSig(sym, memberTpe) foreach { sig => - // This seems useful enough in the general case. - log(sig) - if (checkSignatures) { - val normalizedTpe = enteringErasure(erasure.prepareSigMap(memberTpe)) - val bytecodeTpe = owner.thisType.memberInfo(sym) - if (!sym.isType && !sym.isConstructor && !(erasure.erasure(sym)(normalizedTpe) =:= bytecodeTpe)) { - clasz.cunit.warning(sym.pos, - """|compiler bug: created generic signature for %s in %s that does not conform to its erasure - |signature: %s - |original type: %s - |normalized type: %s - |erasure type: %s - |if this is reproducible, please report bug at https://issues.scala-lang.org/ - """.trim.stripMargin.format(sym, sym.owner.skipPackageObject.fullName, sig, memberTpe, normalizedTpe, bytecodeTpe)) - return - } - } - val index = jmember.getConstantPool.addUtf8(sig).toShort - if (settings.verbose.value && settings.debug.value) - enteringErasure(println("add generic sig "+sym+":"+sym.info+" ==> "+sig+" @ "+index)) - - val buf = ByteBuffer.allocate(2) - buf putShort index - addAttribute(jmember, tpnme.SignatureATTR, buf) - } - } - } - - def addAnnotations(jmember: JMember, annotations: List[AnnotationInfo]) { - if (annotations exists (_ matches definitions.DeprecatedAttr)) { - val attr = jmember.getContext().JOtherAttribute( - jmember.getJClass(), jmember, tpnme.DeprecatedATTR.toString, - new Array[Byte](0), 0) - jmember addAttribute attr - } - - val toEmit = annotations filter shouldEmitAnnotation - if (toEmit.isEmpty) return - - val buf: ByteBuffer = ByteBuffer.allocate(2048) - emitJavaAnnotations(jmember.getConstantPool, buf, toEmit) - addAttribute(jmember, tpnme.RuntimeAnnotationATTR, buf) - } - - def addParamAnnotations(jmethod: JMethod, pannotss: List[List[AnnotationInfo]]) { - val annotations = pannotss map (_ filter shouldEmitAnnotation) - if (annotations forall (_.isEmpty)) return - - val buf: ByteBuffer = ByteBuffer.allocate(2048) - - // number of parameters - buf.put(annotations.length.toByte) - for (annots <- annotations) - emitJavaAnnotations(jmethod.getConstantPool, buf, annots) - - addAttribute(jmethod, tpnme.RuntimeParamAnnotationATTR, buf) - } - - def addAttribute(jmember: JMember, name: Name, buf: ByteBuffer) { - if (buf.position() < 2) - return - - val length = buf.position() - val arr = buf.array().slice(0, length) - - val attr = jmember.getContext().JOtherAttribute(jmember.getJClass(), - jmember, - name.toString, - arr, - length) - jmember addAttribute attr - } - - def addInnerClasses(jclass: JClass) { - /** The outer name for this inner class. Note that it returns null - * when the inner class should not get an index in the constant pool. - * That means non-member classes (anonymous). See Section 4.7.5 in the JVMS. - */ - def outerName(innerSym: Symbol): String = { - if (innerSym.originalEnclosingMethod != NoSymbol) - null - else { - val outerName = javaName(innerSym.rawowner) - if (isTopLevelModule(innerSym.rawowner)) "" + nme.stripModuleSuffix(newTermName(outerName)) - else outerName - } - } - - def innerName(innerSym: Symbol): String = - if (innerSym.isAnonymousClass || innerSym.isAnonymousFunction) - null - else - innerSym.rawname + innerSym.moduleSuffix - - // add inner classes which might not have been referenced yet - exitingErasure { - for (sym <- List(clasz.symbol, clasz.symbol.linkedClassOfClass); m <- sym.info.decls.map(innerClassSymbolFor) if m.isClass) - innerClassBuffer += m - } - - val allInners = innerClassBuffer.toList - if (allInners.nonEmpty) { - debuglog(clasz.symbol.fullName('.') + " contains " + allInners.size + " inner classes.") - val innerClassesAttr = jclass.getInnerClasses() - // sort them so inner classes succeed their enclosing class - // to satisfy the Eclipse Java compiler - for (innerSym <- allInners sortBy (_.name.length)) { - val flags = { - val staticFlag = if (innerSym.rawowner.hasModuleFlag) ACC_STATIC else 0 - (javaFlags(innerSym) | staticFlag) & INNER_CLASSES_FLAGS - } - val jname = javaName(innerSym) - val oname = outerName(innerSym) - val iname = innerName(innerSym) - - // Mimicking javap inner class output - debuglog( - if (oname == null || iname == null) "//class " + jname - else "//%s=class %s of class %s".format(iname, jname, oname) - ) - - innerClassesAttr.addEntry(jname, oname, iname, flags) - } - } - } - - def genField(f: IField) { - debuglog("Adding field: " + f.symbol.fullName) - - val jfield = jclass.addNewField( - javaFieldFlags(f.symbol), - javaName(f.symbol), - javaType(f.symbol.tpe) - ) - - addGenericSignature(jfield, f.symbol, clasz.symbol) - addAnnotations(jfield, f.symbol.annotations) - } - - def genMethod(m: IMethod) { - if (m.symbol.isStaticConstructor || definitions.isGetClass(m.symbol)) return - - debuglog("Generating method " + m.symbol.fullName) - method = m - endPC.clear - computeLocalVarsIndex(m) - - var resTpe = javaType(m.symbol.tpe.resultType) - if (m.symbol.isClassConstructor) - resTpe = JType.VOID - - var flags = javaFlags(m.symbol) - if (jclass.isInterface) - flags |= ACC_ABSTRACT - - if (m.symbol.isStrictFP) - flags |= ACC_STRICT - - // native methods of objects are generated in mirror classes - if (method.native) - flags |= ACC_NATIVE - - jmethod = jclass.addNewMethod(flags, - javaName(m.symbol), - resTpe, - mkArray(m.params map (p => javaType(p.kind))), - mkArray(m.params map (p => javaName(p.sym)))) - - addRemoteException(jmethod, m.symbol) - - if (!jmethod.isAbstract() && !method.native) { - val jcode = jmethod.getCode().asInstanceOf[JExtendedCode] - - // add a fake local for debugging purposes - if (emitVars && isClosureApply(method.symbol)) { - val outerField = clasz.symbol.info.decl(nme.OUTER_LOCAL) - if (outerField != NoSymbol) { - log("Adding fake local to represent outer 'this' for closure " + clasz) - val _this = new Local( - method.symbol.newVariable(nme.FAKE_LOCAL_THIS), toTypeKind(outerField.tpe), false) - m.locals = m.locals ::: List(_this) - computeLocalVarsIndex(m) // since we added a new local, we need to recompute indexes - - jcode.emitALOAD_0() - jcode.emitGETFIELD(javaName(clasz.symbol), - javaName(outerField), - javaType(outerField)) - jcode.emitSTORE(indexOf(_this), javaType(_this.kind)) - } - } - - for (local <- m.locals if ! m.params.contains(local)) { - debuglog("add local var: " + local) - jmethod.addNewLocalVariable(javaType(local.kind), javaName(local.sym)) - } - - genCode(m) - if (emitVars) - genLocalVariableTable(m, jcode) - } - - addGenericSignature(jmethod, m.symbol, clasz.symbol) - val (excs, others) = m.symbol.annotations partition (_.symbol == ThrowsClass) - addExceptionsAttribute(jmethod, excs) - addAnnotations(jmethod, others) - addParamAnnotations(jmethod, m.params.map(_.sym.annotations)) - - // check for code size - try jmethod.freeze() - catch { - case e: JCode.CodeSizeTooBigException => - clasz.cunit.error(m.symbol.pos, "Code size exceeds JVM limits: %d".format(e.codeSize)) - throw e - } - } - - /** Adds a @remote annotation, actual use unknown. - */ - private def addRemoteException(jmethod: JMethod, meth: Symbol) { - val needsAnnotation = ( - (isRemoteClass || (meth hasAnnotation RemoteAttr) && jmethod.isPublic) - && !(meth.throwsAnnotations contains RemoteExceptionClass) - ) - if (needsAnnotation) { - val c = Constant(RemoteExceptionClass.tpe) - val arg = Literal(c) setType c.tpe - meth.addAnnotation(ThrowsClass, arg) - } - } - - private def isClosureApply(sym: Symbol): Boolean = { - (sym.name == nme.apply) && - sym.owner.isSynthetic && - sym.owner.tpe.parents.exists { t => - val TypeRef(_, sym, _) = t - FunctionClass contains sym - } - } - - def addModuleInstanceField() { - jclass.addNewField(PublicStaticFinal, - nme.MODULE_INSTANCE_FIELD.toString, - jclass.getType()) - } - - def addStaticInit(cls: JClass, mopt: Option[IMethod]) { - val clinitMethod = cls.addNewMethod(PublicStatic, - "", - JType.VOID, - JType.EMPTY_ARRAY, - new Array[String](0)) - val clinit = clinitMethod.getCode().asInstanceOf[JExtendedCode] - - mopt match { - case Some(m) => - val oldLastBlock = m.lastBlock - val lastBlock = m.newBlock() - oldLastBlock.replaceInstruction(oldLastBlock.length - 1, JUMP(lastBlock)) - - if (isStaticModule(clasz.symbol)) { - // call object's private ctor from static ctor - lastBlock emit NEW(REFERENCE(m.symbol.enclClass)) - lastBlock emit CALL_METHOD(m.symbol.enclClass.primaryConstructor, Static(true)) - } - - // add serialVUID code - serialVUID foreach { value => - import Flags._, definitions._ - val fieldName = "serialVersionUID" - val fieldSymbol = clasz.symbol.newValue(newTermName(fieldName), NoPosition, STATIC | FINAL) setInfo LongClass.tpe - clasz addField new IField(fieldSymbol) - lastBlock emit CONSTANT(Constant(value)) - lastBlock emit STORE_FIELD(fieldSymbol, true) - } - - if (isParcelableClass) - addCreatorCode(BytecodeGenerator.this, lastBlock) - - lastBlock emit RETURN(UNIT) - lastBlock.close - - method = m - jmethod = clinitMethod - genCode(m) - case None => - legacyStaticInitializer(cls, clinit) - } - } - - private def legacyStaticInitializer(cls: JClass, clinit: JExtendedCode) { - if (isStaticModule(clasz.symbol)) { - clinit emitNEW cls.getName() - clinit.emitINVOKESPECIAL(cls.getName(), - JMethod.INSTANCE_CONSTRUCTOR_NAME, - JMethodType.ARGLESS_VOID_FUNCTION) - } - - serialVUID foreach { value => - val fieldName = "serialVersionUID" - jclass.addNewField(PublicStaticFinal, fieldName, JType.LONG) - clinit emitPUSH value - clinit.emitPUSH(value) - clinit.emitPUTSTATIC(jclass.getName(), fieldName, JType.LONG) - } - - if (isParcelableClass) - legacyAddCreatorCode(BytecodeGenerator.this, clinit) - - clinit.emitRETURN() - } - - /** Add a forwarder for method m */ - def addForwarder(jclass: JClass, module: Symbol, m: Symbol) { - val moduleName = javaName(module) - val methodInfo = module.thisType.memberInfo(m) - val paramJavaTypes = methodInfo.paramTypes map javaType - val paramNames = 0 until paramJavaTypes.length map ("x_" + _) - // TODO: evaluate the other flags we might be dropping on the floor here. - val flags = PublicStatic | ( - if (m.isVarargsMethod) ACC_VARARGS else 0 - ) - - /** Forwarders must not be marked final, as the JVM will not allow - * redefinition of a final static method, and we don't know what classes - * might be subclassing the companion class. See SI-4827. - */ - val mirrorMethod = jclass.addNewMethod( - flags, - javaName(m), - javaType(methodInfo.resultType), - mkArray(paramJavaTypes), - mkArray(paramNames)) - val mirrorCode = mirrorMethod.getCode().asInstanceOf[JExtendedCode] - mirrorCode.emitGETSTATIC(moduleName, - nme.MODULE_INSTANCE_FIELD.toString, - new JObjectType(moduleName)) - - var i = 0 - var index = 0 - val argTypes = mirrorMethod.getArgumentTypes() - while (i < argTypes.length) { - mirrorCode.emitLOAD(index, argTypes(i)) - index += argTypes(i).getSize() - i += 1 - } - - mirrorCode.emitINVOKEVIRTUAL(moduleName, mirrorMethod.getName, javaType(m).asInstanceOf[JMethodType]) - mirrorCode emitRETURN mirrorMethod.getReturnType() - - addRemoteException(mirrorMethod, m) - // only add generic signature if the method is concrete; bug #1745 - if (!m.isDeferred) - addGenericSignature(mirrorMethod, m, module) - - val (throws, others) = m.annotations partition (_.symbol == ThrowsClass) - addExceptionsAttribute(mirrorMethod, throws) - addAnnotations(mirrorMethod, others) - addParamAnnotations(mirrorMethod, m.info.params.map(_.annotations)) - } - - /** Add forwarders for all methods defined in `module` that don't conflict - * with methods in the companion class of `module`. A conflict arises when - * a method with the same name is defined both in a class and its companion - * object: method signature is not taken into account. - */ - def addForwarders(jclass: JClass, moduleClass: Symbol) { - assert(moduleClass.isModuleClass, moduleClass) - debuglog("Dumping mirror class for object: " + moduleClass) - - val className = jclass.getName - val linkedClass = moduleClass.companionClass - lazy val conflictingNames: Set[Name] = { - linkedClass.info.members collect { case sym if sym.name.isTermName => sym.name } toSet - } - debuglog("Potentially conflicting names for forwarders: " + conflictingNames) - - for (m <- moduleClass.info.membersBasedOnFlags(ExcludedForwarderFlags, Flags.METHOD)) { - if (m.isType || m.isDeferred || (m.owner eq ObjectClass) || m.isConstructor) - debuglog("No forwarder for '%s' from %s to '%s'".format(m, className, moduleClass)) - else if (conflictingNames(m.name)) - log("No forwarder for " + m + " due to conflict with " + linkedClass.info.member(m.name)) - else { - log("Adding static forwarder for '%s' from %s to '%s'".format(m, className, moduleClass)) - addForwarder(jclass, moduleClass, m) - } - } - } - - /** Generate a mirror class for a top-level module. A mirror class is a class - * containing only static methods that forward to the corresponding method - * on the MODULE instance of the given Scala object. It will only be - * generated if there is no companion class: if there is, an attempt will - * instead be made to add the forwarder methods to the companion class. - */ - def generateMirrorClass(clasz: Symbol, sourceFile: SourceFile) { - import JAccessFlags._ - /* We need to save inner classes buffer and create a new one to make sure - * that we do confuse inner classes of the class we mirror with inner - * classes of the class we are mirroring. These two sets can be different - * as seen in this case: - * - * class A { - * class B - * def b: B = new B - * } - * object C extends A - * - * Here mirror class of C has a static forwarder for (inherited) method `b` - * therefore it refers to class `B` and needs InnerClasses entry. However, - * the real class for `C` (named `C$`) is empty and does not refer to `B` - * thus does not need InnerClasses entry it. - * - * NOTE: This logic has been refactored in GenASM and everything is - * implemented in a much cleaner way by having two separate buffers. - */ - val savedInnerClasses = innerClassBuffer - innerClassBuffer = mutable.LinkedHashSet[Symbol]() - val moduleName = javaName(clasz) // + "$" - val mirrorName = moduleName.substring(0, moduleName.length() - 1) - val mirrorClass = fjbgContext.JClass(ACC_SUPER | ACC_PUBLIC | ACC_FINAL, - mirrorName, - JAVA_LANG_OBJECT.getName, - JClass.NO_INTERFACES, - "" + sourceFile) - - log("Dumping mirror class for '%s'".format(mirrorClass.getName)) - addForwarders(mirrorClass, clasz) - val ssa = scalaSignatureAddingMarker(mirrorClass, clasz.companionSymbol) - addAnnotations(mirrorClass, clasz.annotations ++ ssa) - emitClass(mirrorClass, clasz) - innerClassBuffer = savedInnerClasses - } - - var linearization: List[BasicBlock] = Nil - var isModuleInitialized = false - - def genCode(m: IMethod) { - val jcode = jmethod.getCode.asInstanceOf[JExtendedCode] - - def makeLabels(bs: List[BasicBlock]) = { - debuglog("Making labels for: " + method) - - mutable.HashMap(bs map (_ -> jcode.newLabel) : _*) - } - - isModuleInitialized = false - - linearization = linearizer.linearize(m) - val labels = makeLabels(linearization) - - var nextBlock: BasicBlock = linearization.head - - def genBlocks(l: List[BasicBlock]): Unit = l match { - case Nil => () - case x :: Nil => nextBlock = null; genBlock(x) - case x :: y :: ys => nextBlock = y; genBlock(x); genBlocks(y :: ys) - } - - /** Generate exception handlers for the current method. */ - def genExceptionHandlers() { - - /** Return a list of pairs of intervals where the handler is active. - * The intervals in the list have to be inclusive in the beginning and - * exclusive in the end: [start, end). - */ - def ranges(e: ExceptionHandler): List[(Int, Int)] = { - var covered = e.covered - var ranges: List[(Int, Int)] = Nil - var start = -1 - var end = -1 - - linearization foreach { b => - if (! (covered contains b) ) { - if (start >= 0) { // we're inside a handler range - end = labels(b).getAnchor() - ranges ::= ((start, end)) - start = -1 - } - } else { - if (start < 0) // we're not inside a handler range - start = labels(b).getAnchor() - - end = endPC(b) - covered -= b - } - } - - /* Add the last interval. Note that since the intervals are - * open-ended to the right, we have to give a number past the actual - * code! - */ - if (start >= 0) { - ranges ::= ((start, jcode.getPC())) - } - - if (!covered.isEmpty) - debuglog("Some covered blocks were not found in method: " + method + - " covered: " + covered + " not in " + linearization) - ranges - } - - for (e <- this.method.exh ; p <- ranges(e).sortBy(_._1)) { - if (p._1 < p._2) { - debuglog("Adding exception handler " + e + "at block: " + e.startBlock + " for " + method + - " from: " + p._1 + " to: " + p._2 + " catching: " + e.cls); - val cls = if (e.cls == NoSymbol || e.cls == ThrowableClass) null - else javaName(e.cls) - jcode.addExceptionHandler(p._1, p._2, - labels(e.startBlock).getAnchor(), - cls) - } else - log("Empty exception range: " + p) - } - } - - def isAccessibleFrom(target: Symbol, site: Symbol): Boolean = { - target.isPublic || target.isProtected && { - (site.enclClass isSubClass target.enclClass) || - (site.enclosingPackage == target.privateWithin) - } - } - - def genCallMethod(call: CALL_METHOD) { - val CALL_METHOD(method, style) = call - val siteSymbol = clasz.symbol - val hostSymbol = call.hostClass - val methodOwner = method.owner - // info calls so that types are up to date; erasure may add lateINTERFACE to traits - hostSymbol.info ; methodOwner.info - - def isInterfaceCall(sym: Symbol) = ( - sym.isInterface && methodOwner != ObjectClass - || sym.isJavaDefined && sym.isNonBottomSubClass(ClassfileAnnotationClass) - ) - // whether to reference the type of the receiver or - // the type of the method owner (if not an interface!) - val useMethodOwner = ( - style != Dynamic - || !isInterfaceCall(hostSymbol) && isAccessibleFrom(methodOwner, siteSymbol) - || hostSymbol.isBottomClass - ) - val receiver = if (useMethodOwner) methodOwner else hostSymbol - val jowner = javaName(receiver) - val jname = javaName(method) - val jtype = javaType(method).asInstanceOf[JMethodType] - - def dbg(invoke: String) { - debuglog("%s %s %s.%s:%s".format(invoke, receiver.accessString, jowner, jname, jtype)) - } - - def initModule() { - // we initialize the MODULE$ field immediately after the super ctor - if (isStaticModule(siteSymbol) && !isModuleInitialized && - jmethod.getName() == JMethod.INSTANCE_CONSTRUCTOR_NAME && - jname == JMethod.INSTANCE_CONSTRUCTOR_NAME) { - isModuleInitialized = true - jcode.emitALOAD_0() - jcode.emitPUTSTATIC(jclass.getName(), - nme.MODULE_INSTANCE_FIELD.toString, - jclass.getType()) - } - } - - style match { - case Static(true) => dbg("invokespecial"); jcode.emitINVOKESPECIAL(jowner, jname, jtype) - case Static(false) => dbg("invokestatic"); jcode.emitINVOKESTATIC(jowner, jname, jtype) - case Dynamic if isInterfaceCall(receiver) => dbg("invokinterface"); jcode.emitINVOKEINTERFACE(jowner, jname, jtype) - case Dynamic => dbg("invokevirtual"); jcode.emitINVOKEVIRTUAL(jowner, jname, jtype) - case SuperCall(_) => - dbg("invokespecial") - jcode.emitINVOKESPECIAL(jowner, jname, jtype) - initModule() - } - } - - def genBlock(b: BasicBlock) { - labels(b).anchorToNext() - - debuglog("Generating code for block: " + b + " at pc: " + labels(b).getAnchor()) - var lastMappedPC = 0 - var lastLineNr = 0 - var crtPC = 0 - - /** local variables whose scope appears in this block. */ - val varsInBlock: mutable.Set[Local] = new mutable.HashSet - val lastInstr = b.lastInstruction - - for (instr <- b) { - instr match { - case THIS(clasz) => jcode.emitALOAD_0() - - case CONSTANT(const) => genConstant(jcode, const) - - case LOAD_ARRAY_ITEM(kind) => - if(kind.isRefOrArrayType) { jcode.emitAALOAD() } - else { - (kind: @unchecked) match { - case UNIT => throw new IllegalArgumentException("invalid type for aload " + kind) - case BOOL | BYTE => jcode.emitBALOAD() - case SHORT => jcode.emitSALOAD() - case CHAR => jcode.emitCALOAD() - case INT => jcode.emitIALOAD() - case LONG => jcode.emitLALOAD() - case FLOAT => jcode.emitFALOAD() - case DOUBLE => jcode.emitDALOAD() - } - } - - case LOAD_LOCAL(local) => jcode.emitLOAD(indexOf(local), javaType(local.kind)) - - case lf @ LOAD_FIELD(field, isStatic) => - val owner = javaName(lf.hostClass) - debuglog("LOAD_FIELD with owner: " + owner + - " flags: " + field.owner.flagString) - val fieldJName = javaName(field) - val fieldJType = javaType(field) - if (isStatic) jcode.emitGETSTATIC(owner, fieldJName, fieldJType) - else jcode.emitGETFIELD( owner, fieldJName, fieldJType) - - case LOAD_MODULE(module) => - // assert(module.isModule, "Expected module: " + module) - debuglog("generating LOAD_MODULE for: " + module + " flags: " + module.flagString); - if (clasz.symbol == module.moduleClass && jmethod.getName() != nme.readResolve.toString) - jcode.emitALOAD_0() - else - jcode.emitGETSTATIC(javaName(module) /* + "$" */ , - nme.MODULE_INSTANCE_FIELD.toString, - javaType(module)) - - case STORE_ARRAY_ITEM(kind) => - if(kind.isRefOrArrayType) { jcode.emitAASTORE() } - else { - (kind: @unchecked) match { - case UNIT => throw new IllegalArgumentException("invalid type for astore " + kind) - case BOOL | BYTE => jcode.emitBASTORE() - case SHORT => jcode.emitSASTORE() - case CHAR => jcode.emitCASTORE() - case INT => jcode.emitIASTORE() - case LONG => jcode.emitLASTORE() - case FLOAT => jcode.emitFASTORE() - case DOUBLE => jcode.emitDASTORE() - } - } - - case STORE_LOCAL(local) => - jcode.emitSTORE(indexOf(local), javaType(local.kind)) - - case STORE_THIS(_) => - // this only works for impl classes because the self parameter comes first - // in the method signature. If that changes, this code has to be revisited. - jcode.emitASTORE_0() - - case STORE_FIELD(field, isStatic) => - val owner = javaName(field.owner) - val fieldJName = javaName(field) - val fieldJType = javaType(field) - if (isStatic) jcode.emitPUTSTATIC(owner, fieldJName, fieldJType) - else jcode.emitPUTFIELD( owner, fieldJName, fieldJType) - - case CALL_PRIMITIVE(primitive) => genPrimitive(primitive, instr.pos) - - /** Special handling to access native Array.clone() */ - case call @ CALL_METHOD(definitions.Array_clone, Dynamic) => - val target: String = javaType(call.targetTypeKind).getSignature() - jcode.emitINVOKEVIRTUAL(target, "clone", arrayCloneType) - - case call @ CALL_METHOD(method, style) => genCallMethod(call) - - case BOX(kind) => - val Pair(mname, mtype) = jBoxTo(kind) - jcode.emitINVOKESTATIC(BoxesRunTime, mname, mtype) - - case UNBOX(kind) => - val Pair(mname, mtype) = jUnboxTo(kind) - jcode.emitINVOKESTATIC(BoxesRunTime, mname, mtype) - - case NEW(REFERENCE(cls)) => - val className = javaName(cls) - jcode emitNEW className - - case CREATE_ARRAY(elem, 1) => - if(elem.isRefOrArrayType) { jcode emitANEWARRAY javaType(elem).asInstanceOf[JReferenceType] } - else { jcode emitNEWARRAY javaType(elem) } - - case CREATE_ARRAY(elem, dims) => - jcode.emitMULTIANEWARRAY(javaType(ArrayN(elem, dims)).asInstanceOf[JReferenceType], dims) - - case IS_INSTANCE(tpe) => - tpe match { - case REFERENCE(cls) => jcode emitINSTANCEOF new JObjectType(javaName(cls)) - case ARRAY(elem) => jcode emitINSTANCEOF new JArrayType(javaType(elem)) - case _ => abort("Unknown reference type in IS_INSTANCE: " + tpe) - } - - case CHECK_CAST(tpe) => - tpe match { - case REFERENCE(cls) => if (cls != ObjectClass) { jcode emitCHECKCAST new JObjectType(javaName(cls)) } // No need to checkcast for Objects - case ARRAY(elem) => jcode emitCHECKCAST new JArrayType(javaType(elem)) - case _ => abort("Unknown reference type in IS_INSTANCE: " + tpe) - } - - case SWITCH(tags, branches) => - val tagArray = new Array[Array[Int]](tags.length) - var caze = tags - var i = 0 - - while (i < tagArray.length) { - tagArray(i) = new Array[Int](caze.head.length) - caze.head.copyToArray(tagArray(i), 0) - i += 1 - caze = caze.tail - } - val branchArray = jcode.newLabels(tagArray.length) - i = 0 - while (i < branchArray.length) { - branchArray(i) = labels(branches(i)) - i += 1 - } - debuglog("Emitting SWITCH:\ntags: " + tags + "\nbranches: " + branches) - jcode.emitSWITCH(tagArray, - branchArray, - labels(branches.last), - MIN_SWITCH_DENSITY) - () - - case JUMP(whereto) => - if (nextBlock != whereto) - jcode.emitGOTO_maybe_W(labels(whereto), false) // default to short jumps - - case CJUMP(success, failure, cond, kind) => - if(kind.isIntSizedType) { // BOOL, BYTE, CHAR, SHORT, or INT - if (nextBlock == success) { - jcode.emitIF_ICMP(conds(cond.negate()), labels(failure)) - // .. and fall through to success label - } else { - jcode.emitIF_ICMP(conds(cond), labels(success)) - if (nextBlock != failure) - jcode.emitGOTO_maybe_W(labels(failure), false) - } - } else if(kind.isRefOrArrayType) { // REFERENCE(_) | ARRAY(_) - if (nextBlock == success) { - jcode.emitIF_ACMP(conds(cond.negate()), labels(failure)) - // .. and fall through to success label - } else { - jcode.emitIF_ACMP(conds(cond), labels(success)) - if (nextBlock != failure) - jcode.emitGOTO_maybe_W(labels(failure), false) - } - } else { - (kind: @unchecked) match { - case LONG => jcode.emitLCMP() - case FLOAT => - if (cond == LT || cond == LE) jcode.emitFCMPG() - else jcode.emitFCMPL() - case DOUBLE => - if (cond == LT || cond == LE) jcode.emitDCMPG() - else jcode.emitDCMPL() - } - if (nextBlock == success) { - jcode.emitIF(conds(cond.negate()), labels(failure)) - // .. and fall through to success label - } else { - jcode.emitIF(conds(cond), labels(success)); - if (nextBlock != failure) - jcode.emitGOTO_maybe_W(labels(failure), false) - } - } - - case CZJUMP(success, failure, cond, kind) => - if(kind.isIntSizedType) { // BOOL, BYTE, CHAR, SHORT, or INT - if (nextBlock == success) { - jcode.emitIF(conds(cond.negate()), labels(failure)) - } else { - jcode.emitIF(conds(cond), labels(success)) - if (nextBlock != failure) - jcode.emitGOTO_maybe_W(labels(failure), false) - } - } else if(kind.isRefOrArrayType) { // REFERENCE(_) | ARRAY(_) - val Success = success - val Failure = failure - (cond, nextBlock) match { - case (EQ, Success) => jcode emitIFNONNULL labels(failure) - case (NE, Failure) => jcode emitIFNONNULL labels(success) - case (EQ, Failure) => jcode emitIFNULL labels(success) - case (NE, Success) => jcode emitIFNULL labels(failure) - case (EQ, _) => - jcode emitIFNULL labels(success) - jcode.emitGOTO_maybe_W(labels(failure), false) - case (NE, _) => - jcode emitIFNONNULL labels(success) - jcode.emitGOTO_maybe_W(labels(failure), false) - case _ => - } - } else { - (kind: @unchecked) match { - case LONG => - jcode.emitLCONST_0() - jcode.emitLCMP() - case FLOAT => - jcode.emitFCONST_0() - if (cond == LT || cond == LE) jcode.emitFCMPG() - else jcode.emitFCMPL() - case DOUBLE => - jcode.emitDCONST_0() - if (cond == LT || cond == LE) jcode.emitDCMPG() - else jcode.emitDCMPL() - } - if (nextBlock == success) { - jcode.emitIF(conds(cond.negate()), labels(failure)) - } else { - jcode.emitIF(conds(cond), labels(success)) - if (nextBlock != failure) - jcode.emitGOTO_maybe_W(labels(failure), false) - } - } - - case RETURN(kind) => jcode emitRETURN javaType(kind) - - case THROW(_) => jcode.emitATHROW() - - case DROP(kind) => - if(kind.isWideType) jcode.emitPOP2() - else jcode.emitPOP() - - case DUP(kind) => - if(kind.isWideType) jcode.emitDUP2() - else jcode.emitDUP() - - case MONITOR_ENTER() => jcode.emitMONITORENTER() - - case MONITOR_EXIT() => jcode.emitMONITOREXIT() - - case SCOPE_ENTER(lv) => - varsInBlock += lv - lv.start = jcode.getPC() - - case SCOPE_EXIT(lv) => - if (varsInBlock(lv)) { - lv.ranges = (lv.start, jcode.getPC()) :: lv.ranges - varsInBlock -= lv - } - else if (b.varsInScope(lv)) { - lv.ranges = (labels(b).getAnchor(), jcode.getPC()) :: lv.ranges - b.varsInScope -= lv - } - else dumpMethodAndAbort(method, "Illegal local var nesting") - - case LOAD_EXCEPTION(_) => - () - } - - crtPC = jcode.getPC() - - // assert(instr.pos.source.isEmpty || instr.pos.source.get == (clasz.cunit.source), "sources don't match") - // val crtLine = instr.pos.line.get(lastLineNr); - - val crtLine = try { - if (instr.pos == NoPosition) lastLineNr else (instr.pos).line // check NoPosition to avoid costly exception - } catch { - case _: UnsupportedOperationException => - log("Warning: wrong position in: " + method) - lastLineNr - } - - if (instr eq lastInstr) { endPC(b) = jcode.getPC() } - - //System.err.println("CRTLINE: " + instr.pos + " " + - // /* (if (instr.pos < clasz.cunit.source.content.length) clasz.cunit.source.content(instr.pos) else '*') + */ " " + crtLine); - - if (crtPC > lastMappedPC) { - jcode.completeLineNumber(lastMappedPC, crtPC, crtLine) - lastMappedPC = crtPC - lastLineNr = crtLine - } - } - - // local vars that survived this basic block - for (lv <- varsInBlock) { - lv.ranges = (lv.start, jcode.getPC()) :: lv.ranges - } - for (lv <- b.varsInScope) { - lv.ranges = (labels(b).getAnchor(), jcode.getPC()) :: lv.ranges - } - } - - def genPrimitive(primitive: Primitive, pos: Position) { - primitive match { - case Negation(kind) => - if(kind.isIntSizedType) { jcode.emitINEG() } - else { - kind match { - case LONG => jcode.emitLNEG() - case FLOAT => jcode.emitFNEG() - case DOUBLE => jcode.emitDNEG() - case _ => abort("Impossible to negate a " + kind) - } - } - - case Arithmetic(op, kind) => - op match { - case ADD => - if(kind.isIntSizedType) { jcode.emitIADD() } - else { - (kind: @unchecked) match { - case LONG => jcode.emitLADD() - case FLOAT => jcode.emitFADD() - case DOUBLE => jcode.emitDADD() - } - } - - case SUB => - if(kind.isIntSizedType) { jcode.emitISUB() } - else { - (kind: @unchecked) match { - case LONG => jcode.emitLSUB() - case FLOAT => jcode.emitFSUB() - case DOUBLE => jcode.emitDSUB() - } - } - - case MUL => - if(kind.isIntSizedType) { jcode.emitIMUL() } - else { - (kind: @unchecked) match { - case LONG => jcode.emitLMUL() - case FLOAT => jcode.emitFMUL() - case DOUBLE => jcode.emitDMUL() - } - } - - case DIV => - if(kind.isIntSizedType) { jcode.emitIDIV() } - else { - (kind: @unchecked) match { - case LONG => jcode.emitLDIV() - case FLOAT => jcode.emitFDIV() - case DOUBLE => jcode.emitDDIV() - } - } - - case REM => - if(kind.isIntSizedType) { jcode.emitIREM() } - else { - (kind: @unchecked) match { - case LONG => jcode.emitLREM() - case FLOAT => jcode.emitFREM() - case DOUBLE => jcode.emitDREM() - } - } - - case NOT => - if(kind.isIntSizedType) { - jcode.emitPUSH(-1) - jcode.emitIXOR() - } else if(kind == LONG) { - jcode.emitPUSH(-1l) - jcode.emitLXOR() - } else { - abort("Impossible to negate an " + kind) - } - - case _ => - abort("Unknown arithmetic primitive " + primitive) - } - - case Logical(op, kind) => ((op, kind): @unchecked) match { - case (AND, LONG) => jcode.emitLAND() - case (AND, INT) => jcode.emitIAND() - case (AND, _) => - jcode.emitIAND() - if (kind != BOOL) - jcode.emitT2T(javaType(INT), javaType(kind)); - - case (OR, LONG) => jcode.emitLOR() - case (OR, INT) => jcode.emitIOR() - case (OR, _) => - jcode.emitIOR() - if (kind != BOOL) - jcode.emitT2T(javaType(INT), javaType(kind)); - - case (XOR, LONG) => jcode.emitLXOR() - case (XOR, INT) => jcode.emitIXOR() - case (XOR, _) => - jcode.emitIXOR() - if (kind != BOOL) - jcode.emitT2T(javaType(INT), javaType(kind)); - } - - case Shift(op, kind) => ((op, kind): @unchecked) match { - case (LSL, LONG) => jcode.emitLSHL() - case (LSL, INT) => jcode.emitISHL() - case (LSL, _) => - jcode.emitISHL() - jcode.emitT2T(javaType(INT), javaType(kind)) - - case (ASR, LONG) => jcode.emitLSHR() - case (ASR, INT) => jcode.emitISHR() - case (ASR, _) => - jcode.emitISHR() - jcode.emitT2T(javaType(INT), javaType(kind)) - - case (LSR, LONG) => jcode.emitLUSHR() - case (LSR, INT) => jcode.emitIUSHR() - case (LSR, _) => - jcode.emitIUSHR() - jcode.emitT2T(javaType(INT), javaType(kind)) - } - - case Comparison(op, kind) => ((op, kind): @unchecked) match { - case (CMP, LONG) => jcode.emitLCMP() - case (CMPL, FLOAT) => jcode.emitFCMPL() - case (CMPG, FLOAT) => jcode.emitFCMPG() - case (CMPL, DOUBLE) => jcode.emitDCMPL() - case (CMPG, DOUBLE) => jcode.emitDCMPL() - } - - case Conversion(src, dst) => - debuglog("Converting from: " + src + " to: " + dst) - if (dst == BOOL) { - println("Illegal conversion at: " + clasz + " at: " + pos.source + ":" + pos.line) - } else - jcode.emitT2T(javaType(src), javaType(dst)) - - case ArrayLength(_) => - jcode.emitARRAYLENGTH() - - case StartConcat => - jcode emitNEW StringBuilderClassName - jcode.emitDUP() - jcode.emitINVOKESPECIAL(StringBuilderClassName, - JMethod.INSTANCE_CONSTRUCTOR_NAME, - JMethodType.ARGLESS_VOID_FUNCTION) - - case StringConcat(el) => - val jtype = el match { - case REFERENCE(_) | ARRAY(_) => JAVA_LANG_OBJECT - case _ => javaType(el) - } - jcode.emitINVOKEVIRTUAL(StringBuilderClassName, - "append", - new JMethodType(StringBuilderType, - Array(jtype))) - case EndConcat => - jcode.emitINVOKEVIRTUAL(StringBuilderClassName, - "toString", - toStringType) - - case _ => - abort("Unimplemented primitive " + primitive) - } - } - - // genCode starts here - genBlocks(linearization) - - if (this.method.exh != Nil) - genExceptionHandlers; - } - - - /** Emit a Local variable table for debugging purposes. - * Synthetic locals are skipped. All variables are method-scoped. - */ - private def genLocalVariableTable(m: IMethod, jcode: JCode) { - val vars = m.locals filterNot (_.sym.isArtifact) - if (vars.isEmpty) return - - val pool = jclass.getConstantPool - val pc = jcode.getPC() - var anonCounter = 0 - var entries = 0 - vars.foreach { lv => - lv.ranges = mergeEntries(lv.ranges.reverse); - entries += lv.ranges.length - } - if (!jmethod.isStatic()) entries += 1 - - val lvTab = ByteBuffer.allocate(2 + 10 * entries) - def emitEntry(name: String, signature: String, idx: Short, start: Short, end: Short) { - lvTab putShort start - lvTab putShort end - lvTab putShort pool.addUtf8(name).toShort - lvTab putShort pool.addUtf8(signature).toShort - lvTab putShort idx - } - - lvTab.putShort(entries.toShort) - - if (!jmethod.isStatic()) { - emitEntry("this", jclass.getType().getSignature(), 0, 0.toShort, pc.toShort) - } - - for (lv <- vars) { - val name = if (javaName(lv.sym) eq null) { - anonCounter += 1 - "" - } else javaName(lv.sym) - - val index = indexOf(lv).toShort - val tpe = javaType(lv.kind).getSignature() - for ((start, end) <- lv.ranges) { - emitEntry(name, tpe, index, start.toShort, (end - start).toShort) - } - } - val attr = - fjbgContext.JOtherAttribute(jclass, - jcode, - tpnme.LocalVariableTableATTR.toString, - lvTab.array()) - jcode addAttribute attr - } - - - /** For each basic block, the first PC address following it. */ - val endPC = new mutable.HashMap[BasicBlock, Int] - - ////////////////////// local vars /////////////////////// - - def sizeOf(k: TypeKind): Int = if(k.isWideType) 2 else 1 - - def indexOf(local: Local): Int = { - assert(local.index >= 0, "Invalid index for: " + local + "{" + local.## + "}: ") - local.index - } - - /** - * Compute the indexes of each local variable of the given - * method. *Does not assume the parameters come first!* - */ - def computeLocalVarsIndex(m: IMethod) { - var idx = if (m.symbol.isStaticMember) 0 else 1; - - for (l <- m.params) { - debuglog("Index value for " + l + "{" + l.## + "}: " + idx) - l.index = idx - idx += sizeOf(l.kind) - } - - for (l <- m.locals if !(m.params contains l)) { - debuglog("Index value for " + l + "{" + l.## + "}: " + idx) - l.index = idx - idx += sizeOf(l.kind) - } - } - - ////////////////////// Utilities //////////////////////// - - /** Merge adjacent ranges. */ - private def mergeEntries(ranges: List[(Int, Int)]): List[(Int, Int)] = - (ranges.foldLeft(Nil: List[(Int, Int)]) { (collapsed: List[(Int, Int)], p: (Int, Int)) => (collapsed, p) match { - case (Nil, _) => List(p) - case ((s1, e1) :: rest, (s2, e2)) if (e1 == s2) => (s1, e2) :: rest - case _ => p :: collapsed - }}).reverse - } - - private def mkFlags(args: Int*) = args.foldLeft(0)(_ | _) - - /** - * Return the Java modifiers for the given symbol. - * Java modifiers for classes: - * - public, abstract, final, strictfp (not used) - * for interfaces: - * - the same as for classes, without 'final' - * for fields: - * - public, private (*) - * - static, final - * for methods: - * - the same as for fields, plus: - * - abstract, synchronized (not used), strictfp (not used), native (not used) - * - * (*) protected cannot be used, since inner classes 'see' protected members, - * and they would fail verification after lifted. - */ - def javaFlags(sym: Symbol): Int = { - // constructors of module classes should be private - // PP: why are they only being marked private at this stage and not earlier? - val privateFlag = - sym.isPrivate || (sym.isPrimaryConstructor && isTopLevelModule(sym.owner)) - - // Final: the only fields which can receive ACC_FINAL are eager vals. - // Neither vars nor lazy vals can, because: - // - // Source: http://docs.oracle.com/javase/specs/jls/se7/html/jls-17.html#jls-17.5.3 - // "Another problem is that the specification allows aggressive - // optimization of final fields. Within a thread, it is permissible to - // reorder reads of a final field with those modifications of a final - // field that do not take place in the constructor." - // - // A var or lazy val which is marked final still has meaning to the - // scala compiler. The word final is heavily overloaded unfortunately; - // for us it means "not overridable". At present you can't override - // vars regardless; this may change. - // - // The logic does not check .isFinal (which checks flags for the FINAL flag, - // and includes symbols marked lateFINAL) instead inspecting rawflags so - // we can exclude lateFINAL. Such symbols are eligible for inlining, but to - // avoid breaking proxy software which depends on subclassing, we do not - // emit ACC_FINAL. - // Nested objects won't receive ACC_FINAL in order to allow for their overriding. - - val finalFlag = ( - (((sym.rawflags & Flags.FINAL) != 0) || isTopLevelModule(sym)) - && !sym.enclClass.isInterface - && !sym.isClassConstructor - && !sym.isMutable // lazy vals and vars both - ) - - // Primitives are "abstract final" to prohibit instantiation - // without having to provide any implementations, but that is an - // illegal combination of modifiers at the bytecode level so - // suppress final if abstract if present. - mkFlags( - if (privateFlag) ACC_PRIVATE else ACC_PUBLIC, - if (sym.isDeferred || sym.hasAbstractFlag) ACC_ABSTRACT else 0, - if (sym.isInterface) ACC_INTERFACE else 0, - if (finalFlag && !sym.hasAbstractFlag) ACC_FINAL else 0, - if (sym.isStaticMember) ACC_STATIC else 0, - if (sym.isBridge) ACC_BRIDGE | ACC_SYNTHETIC else 0, - if (sym.isArtifact) ACC_SYNTHETIC else 0, - if (sym.isClass && !sym.isInterface) ACC_SUPER else 0, - if (sym.isVarargsMethod) ACC_VARARGS else 0, - if (sym.hasFlag(Flags.SYNCHRONIZED)) JAVA_ACC_SYNCHRONIZED else 0 - ) - } - def javaFieldFlags(sym: Symbol) = ( - javaFlags(sym) | mkFlags( - if (sym hasAnnotation TransientAttr) ACC_TRANSIENT else 0, - if (sym hasAnnotation VolatileAttr) ACC_VOLATILE else 0, - if (sym.isMutable) 0 else ACC_FINAL - ) - ) - - def isTopLevelModule(sym: Symbol): Boolean = - exitingPickler { sym.isModuleClass && !sym.isImplClass && !sym.isNestedClass } - - def isStaticModule(sym: Symbol): Boolean = { - sym.isModuleClass && !sym.isImplClass && !sym.isLifted - } - -} diff --git a/src/compiler/scala/tools/nsc/backend/jvm/GenJVMUtil.scala b/src/compiler/scala/tools/nsc/backend/jvm/GenJVMUtil.scala deleted file mode 100644 index 613f8f893e..0000000000 --- a/src/compiler/scala/tools/nsc/backend/jvm/GenJVMUtil.scala +++ /dev/null @@ -1,141 +0,0 @@ -/* NSC -- new Scala compiler - * Copyright 2005-2013 LAMP/EPFL - * @author Iulian Dragos - */ - -package scala.tools.nsc -package backend.jvm - -import scala.collection.{ mutable, immutable } -import ch.epfl.lamp.fjbg._ - -trait GenJVMUtil { - self: GenJVM => - - import global._ - import icodes._ - import definitions._ - - /** Map from type kinds to the Java reference types. It is used for - * loading class constants. @see Predef.classOf. - */ - val classLiteral = immutable.Map[TypeKind, JObjectType]( - UNIT -> new JObjectType("java.lang.Void"), - BOOL -> new JObjectType("java.lang.Boolean"), - BYTE -> new JObjectType("java.lang.Byte"), - SHORT -> new JObjectType("java.lang.Short"), - CHAR -> new JObjectType("java.lang.Character"), - INT -> new JObjectType("java.lang.Integer"), - LONG -> new JObjectType("java.lang.Long"), - FLOAT -> new JObjectType("java.lang.Float"), - DOUBLE -> new JObjectType("java.lang.Double") - ) - - // Don't put this in per run caches. - private val javaNameCache = new mutable.WeakHashMap[Symbol, Name]() ++= List( - NothingClass -> binarynme.RuntimeNothing, - RuntimeNothingClass -> binarynme.RuntimeNothing, - NullClass -> binarynme.RuntimeNull, - RuntimeNullClass -> binarynme.RuntimeNull - ) - - /** This trait may be used by tools who need access to - * utility methods like javaName and javaType. (for instance, - * the Eclipse plugin uses it). - */ - trait BytecodeUtil { - - val conds = immutable.Map[TestOp, Int]( - EQ -> JExtendedCode.COND_EQ, - NE -> JExtendedCode.COND_NE, - LT -> JExtendedCode.COND_LT, - GT -> JExtendedCode.COND_GT, - LE -> JExtendedCode.COND_LE, - GE -> JExtendedCode.COND_GE - ) - - /** Specialized array conversion to prevent calling - * java.lang.reflect.Array.newInstance via TraversableOnce.toArray - */ - - def mkArray(xs: Traversable[JType]): Array[JType] = { val a = new Array[JType](xs.size); xs.copyToArray(a); a } - def mkArray(xs: Traversable[String]): Array[String] = { val a = new Array[String](xs.size); xs.copyToArray(a); a } - - /** Return the a name of this symbol that can be used on the Java - * platform. It removes spaces from names. - * - * Special handling: - * scala.Nothing erases to scala.runtime.Nothing$ - * scala.Null erases to scala.runtime.Null$ - * - * This is needed because they are not real classes, and they mean - * 'abrupt termination upon evaluation of that expression' or null respectively. - * This handling is done already in GenICode, but here we need to remove - * references from method signatures to these types, because such classes can - * not exist in the classpath: the type checker will be very confused. - */ - def javaName(sym: Symbol): String = - javaNameCache.getOrElseUpdate(sym, { - if (sym.isClass || (sym.isModule && !sym.isMethod)) - sym.javaBinaryName - else - sym.javaSimpleName - }).toString - - def javaType(t: TypeKind): JType = (t: @unchecked) match { - case UNIT => JType.VOID - case BOOL => JType.BOOLEAN - case BYTE => JType.BYTE - case SHORT => JType.SHORT - case CHAR => JType.CHAR - case INT => JType.INT - case LONG => JType.LONG - case FLOAT => JType.FLOAT - case DOUBLE => JType.DOUBLE - case REFERENCE(cls) => new JObjectType(javaName(cls)) - case ARRAY(elem) => new JArrayType(javaType(elem)) - } - - def javaType(t: Type): JType = javaType(toTypeKind(t)) - - def javaType(s: Symbol): JType = - if (s.isMethod) - new JMethodType( - if (s.isClassConstructor) JType.VOID else javaType(s.tpe.resultType), - mkArray(s.tpe.paramTypes map javaType) - ) - else - javaType(s.tpe) - - protected def genConstant(jcode: JExtendedCode, const: Constant) { - const.tag match { - case UnitTag => () - case BooleanTag => jcode emitPUSH const.booleanValue - case ByteTag => jcode emitPUSH const.byteValue - case ShortTag => jcode emitPUSH const.shortValue - case CharTag => jcode emitPUSH const.charValue - case IntTag => jcode emitPUSH const.intValue - case LongTag => jcode emitPUSH const.longValue - case FloatTag => jcode emitPUSH const.floatValue - case DoubleTag => jcode emitPUSH const.doubleValue - case StringTag => jcode emitPUSH const.stringValue - case NullTag => jcode.emitACONST_NULL() - case ClazzTag => - val kind = toTypeKind(const.typeValue) - val toPush = - if (kind.isValueType) classLiteral(kind) - else javaType(kind).asInstanceOf[JReferenceType] - - jcode emitPUSH toPush - - case EnumTag => - val sym = const.symbolValue - jcode.emitGETSTATIC(javaName(sym.owner), - javaName(sym), - javaType(sym.tpe.underlying)) - case _ => - abort("Unknown constant value: " + const) - } - } - } -} diff --git a/src/compiler/scala/tools/nsc/settings/StandardScalaSettings.scala b/src/compiler/scala/tools/nsc/settings/StandardScalaSettings.scala index 98ef74aee3..3babd08a31 100644 --- a/src/compiler/scala/tools/nsc/settings/StandardScalaSettings.scala +++ b/src/compiler/scala/tools/nsc/settings/StandardScalaSettings.scala @@ -41,7 +41,7 @@ trait StandardScalaSettings { val optimise: BooleanSetting // depends on post hook which mutates other settings val print = BooleanSetting ("-print", "Print program with Scala-specific features removed.") val target = ChoiceSetting ("-target", "target", "Target platform for object files. All JVM 1.5 targets are deprecated.", - List("jvm-1.5", "jvm-1.5-fjbg", "jvm-1.5-asm", "jvm-1.6", "jvm-1.7", "msil"), + List("jvm-1.5", "jvm-1.6", "jvm-1.7", "msil"), "jvm-1.6") val unchecked = BooleanSetting ("-unchecked", "Enable additional warnings where generated code depends on assumptions.") val uniqid = BooleanSetting ("-uniqid", "Uniquely tag all identifiers in debugging output.") diff --git a/src/compiler/scala/tools/nsc/transform/Mixin.scala b/src/compiler/scala/tools/nsc/transform/Mixin.scala index ac1cdd1f46..1d1e93dd34 100644 --- a/src/compiler/scala/tools/nsc/transform/Mixin.scala +++ b/src/compiler/scala/tools/nsc/transform/Mixin.scala @@ -513,7 +513,7 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL { * - create a new method definition that also has a `self` parameter * (which comes first) Iuli: this position is assumed by tail call elimination * on a different receiver. Storing a new 'this' assumes it is located at - * index 0 in the local variable table. See 'STORE_THIS' and GenJVM/GenMSIL. + * index 0 in the local variable table. See 'STORE_THIS' and GenASM/GenMSIL. * - Map implementation class types in type-apply's to their interfaces * - Remove all fields in implementation classes */ diff --git a/src/eclipse/README.md b/src/eclipse/README.md index 39a3f457a0..d135f99418 100644 --- a/src/eclipse/README.md +++ b/src/eclipse/README.md @@ -44,7 +44,7 @@ If you want to go back to normal (for instance, to commit your changes to projec DETAILS ======= -The compiler project depends on the library, reflect, asm and fjbg projects. The +The compiler project depends on the library, reflect, and asm projects. The builder will take care of the correct ordering, and changes in one project will be picked up by the dependent projects. diff --git a/src/eclipse/fjbg/.classpath b/src/eclipse/fjbg/.classpath deleted file mode 100644 index 3e2f55f48a..0000000000 --- a/src/eclipse/fjbg/.classpath +++ /dev/null @@ -1,7 +0,0 @@ - - - - - - - diff --git a/src/eclipse/fjbg/.project b/src/eclipse/fjbg/.project deleted file mode 100644 index 8acea9f5fe..0000000000 --- a/src/eclipse/fjbg/.project +++ /dev/null @@ -1,30 +0,0 @@ - - - fjbg - - - - - - org.scala-ide.sdt.core.scalabuilder - - - - - - org.scala-ide.sdt.core.scalanature - org.eclipse.jdt.core.javanature - - - - fjbg - 2 - SCALA_BASEDIR/src/fjbg - - - libs-classes-fjbg - 2 - SCALA_BASEDIR/build/libs/classes/fjbg - - - diff --git a/src/eclipse/scala-compiler/.classpath b/src/eclipse/scala-compiler/.classpath index 40a4ed9996..e6af46c68f 100644 --- a/src/eclipse/scala-compiler/.classpath +++ b/src/eclipse/scala-compiler/.classpath @@ -3,7 +3,6 @@ - diff --git a/src/fjbg/ch/epfl/lamp/fjbg/FJBGContext.java b/src/fjbg/ch/epfl/lamp/fjbg/FJBGContext.java deleted file mode 100644 index 9856dc7311..0000000000 --- a/src/fjbg/ch/epfl/lamp/fjbg/FJBGContext.java +++ /dev/null @@ -1,195 +0,0 @@ -/* FJBG -- Fast Java Bytecode Generator - * Copyright 2002-2013 LAMP/EPFL - * @author Michel Schinz - */ - -package ch.epfl.lamp.fjbg; - -import java.io.DataInputStream; -import java.io.IOException; - -/** - * Context in which FJBG executes. Used both as a factory for most - * FJBG classes and as a repository for other factories. - * - * @author Michel Schinz - * @version 1.0 - */ - -public class FJBGContext { - /** Class file major version */ - final int MAJOR_VERSION; - - /** Class file minor version */ - final int MINOR_VERSION; - - public FJBGContext() { - this(45, 3); - } - - public FJBGContext(int major, int minor) { - MAJOR_VERSION = major; - MINOR_VERSION = minor; - } - - // Factory methods - ////////////////////////////////////////////////////////////////////// - - public JClass JClass(int accessFlags, - String name, - String superclassName, - String[] interfaceNames, - String sourceFileName) { - return new JClass(this, - accessFlags, - name, - superclassName, - interfaceNames, - sourceFileName); - } - - public JClass JClass(DataInputStream stream) - throws IOException { - return new JClass(this, stream); - } - - public JConstantPool JConstantPool() { - return new JConstantPool(this); - } - - public JConstantPool JConstantPool(DataInputStream stream) - throws IOException { - return new JConstantPool(this, stream); - } - - public JField JField(JClass owner, - int accessFlags, - String name, - JType type) { - return new JField(this, - owner, - accessFlags, - name, - type); - } - - public JField JField(JClass owner, DataInputStream stream) - throws IOException { - return new JField(this, owner, stream); - } - - public JMethod JMethod(JClass owner, - int accessFlags, - String name, - JType returnType, - JType[] argTypes, - String[] argNames) { - return new JMethod(this, - owner, - accessFlags, - name, - returnType, - argTypes, - argNames); - } - - public JMethod JMethod(JClass owner, - int accessFlags, - String name, - JMethodType type, - String[] argNames) { - return JMethod(owner, - accessFlags, - name, - type.getReturnType(), - type.getArgumentTypes(), - argNames); - } - - public JMethod JMethod(JClass owner, DataInputStream stream) - throws IOException { - return new JMethod(this, owner, stream); - } - - public JLocalVariable JLocalVariable(JMethod owner, - JType type, - String name, - int index) { - return new JLocalVariable(this, owner, type, name, index); - } - - public JCode JCode(JClass clazz, JMethod owner) { - return new JExtendedCode(this, clazz, owner); - } - - public JCode JCode(JClass clazz, JMethod owner, DataInputStream stream) - throws IOException { - return new JCode(this, clazz, owner, stream); - } - - public JAttributeFactory JAttributeFactory() { - return new JAttributeFactory(this); - } - - // Attributes - public JCodeAttribute JCodeAttribute(JClass clazz, JMethod owner) { - return new JCodeAttribute(this, clazz, owner); - } - - public JEnclosingMethodAttribute JEnclosingMethodAttribute(JClass clazz, - String className, - String methodName, - JType methodType) { - return new JEnclosingMethodAttribute(this, clazz, className, methodName, methodType); - } - - public JExceptionsAttribute JExceptionsAttribute(JClass clazz, - JMethod owner) { - return new JExceptionsAttribute(this, clazz, owner); - } - - public JLineNumberTableAttribute JLineNumberTableAttribute(JClass clazz, - JCode owner) { - return new JLineNumberTableAttribute(this, clazz, owner); - } - - public JLocalVariableTableAttribute JLocalVariableTableAttribute(JClass clazz, - JCode owner) { - return new JLocalVariableTableAttribute(this, clazz, owner); - } - - public JOtherAttribute JOtherAttribute(JClass clazz, - Object owner, - String name, - byte[] contents, - int length) { - return new JOtherAttribute(this, clazz, owner, name, contents, length); - } - - public JOtherAttribute JOtherAttribute(JClass clazz, - Object owner, - String name, - byte[] contents) { - return JOtherAttribute(clazz, owner, name, contents, contents.length); - } - - public JSourceFileAttribute JSourceFileAttribute(JClass clazz, - String sourceFileName) { - return new JSourceFileAttribute(this, clazz, sourceFileName); - } - - public JStackMapTableAttribute JStackMapTableAttribute(JClass clazz, - JCode owner) { - return new JStackMapTableAttribute(this, clazz, owner); - } - - /// Repository - ////////////////////////////////////////////////////////////////////// - - protected JAttributeFactory jAttributeFactory = null; - public JAttributeFactory getJAttributeFactory() { - if (jAttributeFactory == null) - jAttributeFactory = JAttributeFactory(); - return jAttributeFactory; - } -} diff --git a/src/fjbg/ch/epfl/lamp/fjbg/JAccessFlags.java b/src/fjbg/ch/epfl/lamp/fjbg/JAccessFlags.java deleted file mode 100644 index 01d8cc9a7e..0000000000 --- a/src/fjbg/ch/epfl/lamp/fjbg/JAccessFlags.java +++ /dev/null @@ -1,35 +0,0 @@ -/* FJBG -- Fast Java Bytecode Generator - * Copyright 2002-2013 LAMP/EPFL - * @author Michel Schinz - */ - -package ch.epfl.lamp.fjbg; - -/** - * Definition of access flags for fields, methods and classes. - * - * @author Michel Schinz - * @version 1.0 - */ - -public interface JAccessFlags { - public static int ACC_PUBLIC = 0x0001; - public static int ACC_PRIVATE = 0x0002; - public static int ACC_PROTECTED = 0x0004; - public static int ACC_STATIC = 0x0008; - public static int ACC_FINAL = 0x0010; - public static int ACC_SUPER = 0x0020; - public static int ACC_VOLATILE = 0x0040; - public static int ACC_TRANSIENT = 0x0080; - public static int ACC_NATIVE = 0x0100; - public static int ACC_INTERFACE = 0x0200; - public static int ACC_ABSTRACT = 0x0400; - public static int ACC_STRICT = 0x0800; - public static int ACC_SYNTHETIC = 0x1000; - public static int ACC_ANNOTATION= 0x2000; - public static int ACC_ENUM = 0x4000; - - // 1.5 specifics - public static int ACC_BRIDGE = 0x0040; - public static int ACC_VARARGS = 0x0080; -} diff --git a/src/fjbg/ch/epfl/lamp/fjbg/JArrayType.java b/src/fjbg/ch/epfl/lamp/fjbg/JArrayType.java deleted file mode 100644 index 61a04523ca..0000000000 --- a/src/fjbg/ch/epfl/lamp/fjbg/JArrayType.java +++ /dev/null @@ -1,62 +0,0 @@ -/* FJBG -- Fast Java Bytecode Generator - * Copyright 2002-2013 LAMP/EPFL - * @author Michel Schinz - */ - -package ch.epfl.lamp.fjbg; - -/** - * Types for Java arrays. - * - * @author Michel Schinz - * @version 1.0 - */ - -public class JArrayType extends JReferenceType { - protected final JType elementType; - protected String signature = null; - - public JArrayType(JType elementType) { - this.elementType = elementType; - } - - public int getSize() { return 1; } - - public String getSignature() { - if (signature == null) - signature = "[" + elementType.getSignature(); - return signature; - } - - public String getDescriptor() { - return getSignature(); - } - - public int getTag() { return T_ARRAY; } - - public JType getElementType() { return elementType; } - - public String toString() { - return elementType.toString() + "[]"; - } - - public boolean isArrayType() { return true; } - - public boolean isCompatibleWith(JType other) { - if (other instanceof JObjectType) - return (JObjectType)other == JObjectType.JAVA_LANG_OBJECT; - else if (other instanceof JArrayType) - return elementType.isCompatibleWith(((JArrayType)other).elementType); - else return other == JType.REFERENCE; - } - - public static JArrayType BOOLEAN = new JArrayType(JType.BOOLEAN); - public static JArrayType BYTE = new JArrayType(JType.BYTE); - public static JArrayType CHAR = new JArrayType(JType.CHAR); - public static JArrayType SHORT = new JArrayType(JType.SHORT); - public static JArrayType INT = new JArrayType(JType.INT); - public static JArrayType FLOAT = new JArrayType(JType.FLOAT); - public static JArrayType LONG = new JArrayType(JType.LONG); - public static JArrayType DOUBLE = new JArrayType(JType.DOUBLE); - public static JArrayType REFERENCE = new JArrayType(JType.REFERENCE); -} diff --git a/src/fjbg/ch/epfl/lamp/fjbg/JAttribute.java b/src/fjbg/ch/epfl/lamp/fjbg/JAttribute.java deleted file mode 100644 index 6a825beb18..0000000000 --- a/src/fjbg/ch/epfl/lamp/fjbg/JAttribute.java +++ /dev/null @@ -1,84 +0,0 @@ -/* FJBG -- Fast Java Bytecode Generator - * Copyright 2002-2013 LAMP/EPFL - * @author Michel Schinz - */ - -package ch.epfl.lamp.fjbg; - -import java.io.DataInputStream; -import java.io.DataOutputStream; -import java.io.IOException; -import java.util.*; - -/** - * Abstract superclass for attributes which can be attached to various - * parts of a class file. - * - * Attributes are used for classes (section 4.2), fields (section 4.6), - * methods (section 4.7) and the Code attribute (section 4.8.3). - * See sections 4.2 and later of the JVM specification. - * - * @author Michel Schinz - * @version 1.0 - */ - -public abstract class JAttribute { - protected final int nameIdx; - - static public void writeTo(List/**/ attrs, DataOutputStream stream) - throws IOException { - stream.writeShort(attrs.size()); - Iterator attrsIt = attrs.iterator(); - while (attrsIt.hasNext()) { - JAttribute attr = (JAttribute)attrsIt.next(); - attr.writeTo(stream); - } - } - - static public List/**/ readFrom(FJBGContext context, - JClass clazz, - Object owner, - DataInputStream stream) - throws IOException { - JAttributeFactory factory = context.getJAttributeFactory(); - int count = stream.readShort(); - ArrayList list = new ArrayList(count); - for (int i = 0; i < count; ++i) - list.add(factory.newInstance(clazz, owner, stream)); - return list; - } - - public JAttribute(FJBGContext context, JClass clazz) { - this.nameIdx = clazz.getConstantPool().addUtf8(getName()); - } - - public JAttribute(FJBGContext context, JClass clazz, String name) { - this.nameIdx = clazz.getConstantPool().addUtf8(name); - } - - abstract public String getName(); - - /** - * Write the attribute to a stream. - */ - public void writeTo(DataOutputStream stream) throws IOException { - int contentsSize = getSize(); - - stream.writeShort(nameIdx); - stream.writeInt(contentsSize); - int streamSizeBefore = stream.size(); - writeContentsTo(stream); - int streamSizeDiff = stream.size() - streamSizeBefore; - - assert contentsSize == streamSizeDiff - : "invalid size for attribute " + getName() - + " given: " + contentsSize - + " actual: " + streamSizeDiff; - } - - // Note: it is not legal to add data to the constant pool during - // the execution of any of the following two methods. - protected abstract int getSize(); - protected abstract void writeContentsTo(DataOutputStream stream) - throws IOException; -} diff --git a/src/fjbg/ch/epfl/lamp/fjbg/JAttributeFactory.java b/src/fjbg/ch/epfl/lamp/fjbg/JAttributeFactory.java deleted file mode 100644 index 33cdce2760..0000000000 --- a/src/fjbg/ch/epfl/lamp/fjbg/JAttributeFactory.java +++ /dev/null @@ -1,101 +0,0 @@ -/* FJBG -- Fast Java Bytecode Generator - * Copyright 2002-2013 LAMP/EPFL - * @author Michel Schinz - */ - -package ch.epfl.lamp.fjbg; - -import java.io.DataInputStream; -import java.io.IOException; -import java.lang.reflect.Constructor; -import java.lang.reflect.InvocationTargetException; -import java.util.HashMap; - -/** - * Extensible factory to build subclasses of JAttribute based on an - * attribute name. - * - * @author Michel Schinz - * @version 1.0 - */ - -public class JAttributeFactory { - protected FJBGContext context; - protected HashMap/**/ constructors = new HashMap(); - - protected final static Class[] CONSTRUCTOR_ARGS = new Class[] { - FJBGContext.class, - JClass.class, - Object.class, - String.class, - int.class, - DataInputStream.class - }; - - protected final static Constructor defaultDefaultConstructor; - static { - try { - defaultDefaultConstructor = - JOtherAttribute.class.getConstructor(CONSTRUCTOR_ARGS); - } catch (NoSuchMethodException e) { - throw new RuntimeException(e); - } - } - - protected final Constructor defaultConstructor; - - public JAttributeFactory(FJBGContext context, - Constructor defaultConstructor) { - this.context = context; - this.defaultConstructor = defaultConstructor; - registerClass("Code", JCodeAttribute.class); - registerClass("ConstantValue", JConstantValueAttribute.class); - registerClass("EnclosingMethod", JEnclosingMethodAttribute.class); - registerClass("Exceptions", JExceptionsAttribute.class); - registerClass("InnerClasses", JInnerClassesAttribute.class); - registerClass("LineNumberTable", JLineNumberTableAttribute.class); - registerClass("LocalVariableTable", JLocalVariableTableAttribute.class); - registerClass("SourceFile", JSourceFileAttribute.class); - registerClass("StackMapTable", JStackMapTableAttribute.class); - } - - public JAttributeFactory(FJBGContext context) { - this(context, defaultDefaultConstructor); - } - - public void registerClass(String attributeName, - Class clazz) { - if (! JAttribute.class.isAssignableFrom(clazz)) - throw new IllegalArgumentException("Not a subclass of JAttribute: " - + clazz); - - try { - Constructor constr = clazz.getConstructor(CONSTRUCTOR_ARGS); - constructors.put(attributeName, constr); - } catch (NoSuchMethodException e) { - throw new IllegalArgumentException("No appropriate constructor for " - + clazz); - } - } - - public JAttribute newInstance(JClass clazz, - Object owner, - DataInputStream stream) - throws IOException { - String name = clazz.getConstantPool().lookupUtf8(stream.readShort()); - Integer size = new Integer(stream.readInt()); - Constructor constr = (Constructor)constructors.get(name); - if (constr == null) constr = defaultConstructor; - - Object[] args = new Object[] { context, clazz, owner, name, size, stream }; - try { - return (JAttribute)constr.newInstance(args); - } catch (InstantiationException e) { - throw new RuntimeException(e); - } catch (IllegalAccessException e) { - throw new RuntimeException(e); - } catch (InvocationTargetException e) { - throw new RuntimeException(e); - } - } -} diff --git a/src/fjbg/ch/epfl/lamp/fjbg/JClass.java b/src/fjbg/ch/epfl/lamp/fjbg/JClass.java deleted file mode 100644 index bb1538ec23..0000000000 --- a/src/fjbg/ch/epfl/lamp/fjbg/JClass.java +++ /dev/null @@ -1,420 +0,0 @@ -/* FJBG -- Fast Java Bytecode Generator - * Copyright 2002-2013 LAMP/EPFL - * @author Michel Schinz - */ - -package ch.epfl.lamp.fjbg; - -import java.util.*; -import java.io.*; - -/** - * Representation of a Java class. - * - * @author Michel Schinz, Stephane Micheloud - * @version 1.1 - */ -public class JClass extends JMember { - - /** Magic number for Java class files. */ - public final static int MAGIC_NUMBER = 0xCAFEBABE; - - protected final JAttributeFactory attributeFactory; - - protected final String superclassName; - protected final String[] interfaceNames; - protected final String sourceFileName; - protected final JConstantPool pool; - - public final static String[] NO_INTERFACES = new String[0]; - - protected final LinkedList/**/ methods = new LinkedList(); - protected final LinkedList/**/ fields = new LinkedList(); - - protected JInnerClassesAttribute innerClasses; - - protected int major; - protected int minor; - - /** - * Creates a new class with its access flags, name, superclass name, - * interfaces names and source file name initialized to a given value. - * The constructor also initializes the pool and adds a sourceFileName - * attribute to the class. - * @param accessFlags the int representing the access flags of the class. - * @param name the string representing the name of the class. - * @param superclassName the string representing the name of the class' - * superclass. - * @param interfaceNames the list of strings representing the names of the - * interfaces implemented by the class. - * @param sourceFileName name of the file from which the class was compiled. - */ - protected JClass(FJBGContext context, - int accessFlags, - String name, - String superclassName, - String[] interfaceNames, - String sourceFileName) { - super(context, accessFlags, name); - this.attributeFactory = context.getJAttributeFactory(); - - this.major = context.MAJOR_VERSION; - this.minor = context.MINOR_VERSION; - - this.superclassName = superclassName; - this.interfaceNames = interfaceNames; - this.sourceFileName = sourceFileName; - this.pool = context.JConstantPool(); - if (sourceFileName != null) - addAttribute(context.JSourceFileAttribute(this, sourceFileName)); - } - - protected JClass(FJBGContext context, DataInputStream stream) - throws IOException { - super(context); - this.attributeFactory = context.getJAttributeFactory(); - - int magic = stream.readInt(); - if (magic != MAGIC_NUMBER) - throw new IllegalArgumentException("invalid magic number: "+magic); - - minor = stream.readShort(); - major = stream.readShort(); - pool = context.JConstantPool(stream); - accessFlags = stream.readShort(); - - // This class, super class and interfaces - name = pool.lookupClass(stream.readShort()); - superclassName = pool.lookupClass(stream.readShort()); - interfaceNames = new String[stream.readShort()]; - for (int i = 0; i < interfaceNames.length; ++i) - interfaceNames[i] = pool.lookupClass(stream.readShort()); - - // Fields, methods and attributes - int fieldsCount = stream.readShort(); - for (int i = 0; i < fieldsCount; ++i) - addField(context.JField(this, stream)); - - int methodsCount = stream.readShort(); - for (int i = 0; i < methodsCount; ++i) - addMethod(context.JMethod(this, stream)); - - String fileName = null; - int attributesCount = stream.readShort(); - for (int i = 0; i < attributesCount; ++i) { - JAttribute attr = attributeFactory.newInstance(this, this, stream); - if (attr instanceof JSourceFileAttribute) - fileName = ((JSourceFileAttribute)attr).getFileName(); - else if (attr instanceof JInnerClassesAttribute) - innerClasses = (JInnerClassesAttribute)attr; - addAttribute(attr); - } - sourceFileName = fileName; - } - - /** - * Gets the name of the class' superclass. - * @return The string representing the name of the class' superclass. - */ - public String getSuperclassName() { return superclassName; } - - /** - * Gets the names of the interfaces implemented by the class. - * @return The array containing the string representations of the - * names of the interfaces implemented by the class. - */ - public String[] getInterfaceNames() { return interfaceNames; } - - /** - * Gets the source file name of this class. - * @return The string representing the source file name of this class. - */ - public String getSourceFileName() { return sourceFileName; } - - /** - * Gets the type of the objects that are instances of the class. - * @return The type of the instances of the class. - */ - public JType getType() { return new JObjectType(name); } - - public JClass getJClass() { return this; } - - public boolean isPublic() { - return (accessFlags & JAccessFlags.ACC_PUBLIC) != 0; - } - - public boolean isPrivate() { - return (accessFlags & JAccessFlags.ACC_PRIVATE) != 0; - } - - public boolean isProtected() { - return (accessFlags & JAccessFlags.ACC_PROTECTED) != 0; - } - - public boolean isStatic() { - return (accessFlags & JAccessFlags.ACC_STATIC) != 0; - } - - public boolean isFinal() { - return (accessFlags & JAccessFlags.ACC_FINAL) != 0; - } - - public boolean isAbstract() { - return (accessFlags & JAccessFlags.ACC_ABSTRACT) != 0; - } - - /** - * Gets the version number of the class. - * @param major The int representing the major part of the version number - * of the class. - * @param minor The int representing the minor part of the version number - * of the class. - */ - public void setVersion(int major, int minor) { - assert !frozen; - this.major = major; - this.minor = minor; - } - - /** - * Gets the major part of the number describing the version of the class. - * @return The int representing the major part of the version number of - * the class. - */ - public int getMajorVersion() { return major; } - - /** - * Gets the minor part of the number describing the version of the class. - * @return The int representing the minor part of the version number of - * the class. - */ - public int getMinorVersion() { return minor; } - - /** - * Gets the constant pool of the class. - * @return The constant pool of the class. - */ - public JConstantPool getConstantPool() { return pool; } - - public JInnerClassesAttribute getInnerClasses() { - if (innerClasses == null) { - innerClasses = new JInnerClassesAttribute(context, this); - addAttribute(innerClasses); - } - return innerClasses; - } - - /** - * Decides if the class is an interface. - * @return The boolean representing if the class is an interface or not. - */ - public boolean isInterface() { - return (accessFlags & JAccessFlags.ACC_INTERFACE) != 0; - } - - public void addField(JField field) { - assert !frozen; - fields.add(field); - } - - /** - * Create and add a new field to the class. - */ - public JField addNewField(int accessFlags, String name, JType type) { - assert !frozen; - JField f = context.JField(this, accessFlags, name, type); - addField(f); - return f; - } - - protected void addMethod(JMethod method) { - assert !frozen; - methods.add(method); - } - - /** - * Create and add a new method to the class. - */ - public JMethod addNewMethod(int accessFlags, - String name, - JType returnType, - JType[] argTypes, - String[] argNames) { - assert !frozen; - JMethod m = context.JMethod(this, - accessFlags, - name, - returnType, - argTypes, - argNames); - addMethod(m); - return m; - } - - /** - * Remove a previously-added method. This makes no attempt at - * minimising the constant pool by removing all constants which - * were used only by this method. - */ - public void removeMethod(JMethod m) { - assert !frozen; - methods.remove(m); - } - - public JField[] getFields() { - return (JField[])fields.toArray(new JField[fields.size()]); - } - - public JMethod[] getMethods() { - return (JMethod[])methods.toArray(new JMethod[methods.size()]); - } - - /** - * Freeze the contents of this class so that it can be written to - * a file. - */ - public void freeze() { - assert !frozen; - frozen = true; - } - - /** - * Writes the contents of the class to a file referenced by its name. - * @param fileName The name of the file in which the class must be written. - */ - public void writeTo(String fileName) throws IOException { - writeTo(new File(fileName)); - } - - /** - * Writes the contents of the class to a file. - * @param file The file in which the class must be written. - */ - public void writeTo(File file) throws IOException { - File parent = file.getParentFile(); - if (parent != null && !parent.isDirectory()) - if (!parent.mkdirs()) - throw new IOException("cannot create directory " + parent); - - FileOutputStream fStream = new FileOutputStream(file); - BufferedOutputStream bStream = new BufferedOutputStream(fStream); - DataOutputStream dStream = new DataOutputStream(bStream); - writeTo(dStream); - dStream.close(); - bStream.close(); - fStream.close(); - } - - /** - * Writes the contents of the class to a data stream. - * @param stream The data stream in which the class must be written. - */ - public void writeTo(DataOutputStream stream) throws IOException { - if (!frozen) freeze(); - - int thisClassIdx = pool.addClass(name); - int superClassIdx = pool.addClass(superclassName); - int[] interfacesIdx = new int[interfaceNames.length]; - - for (int i = 0; i < interfaceNames.length; ++i) - interfacesIdx[i] = pool.addClass(interfaceNames[i]); - - pool.freeze(); - - // Magic number. - stream.writeInt(MAGIC_NUMBER); - // Version - stream.writeShort(minor); - stream.writeShort(major); - // Constant pool - pool.writeTo(stream); - // Access flags - stream.writeShort(accessFlags); - - // This class, super class and interfaces - stream.writeShort(thisClassIdx); - stream.writeShort(superClassIdx); - stream.writeShort(interfacesIdx.length); - for (int i = 0; i < interfacesIdx.length; ++i) - stream.writeShort(interfacesIdx[i]); - - // Fields and methods - stream.writeShort(fields.size()); - Iterator fieldsIt = fields.iterator(); - while (fieldsIt.hasNext()) - ((JField)fieldsIt.next()).writeTo(stream); - - stream.writeShort(methods.size()); - Iterator methodsIt = methods.iterator(); - while (methodsIt.hasNext()) - ((JMethod)methodsIt.next()).writeTo(stream); - - // Attributes - JAttribute.writeTo(attributes, stream); - } - - // Follows javap output format for ClassFile. - /*@Override*/ public String toString() { - StringBuffer buf = new StringBuffer(); - if (sourceFileName != null) { - buf.append("Compiled from \""); - buf.append(sourceFileName); - buf.append("\"\n"); - } - buf.append(getMemberName()); - buf.append(toExternalName(getName())); - if (!isInterface()) { - buf.append(" extends "); - buf.append(toExternalName(getSuperclassName())); - } - if (interfaceNames.length > 0) { - if (isInterface()) buf.append(" extends "); - else buf.append(" implements "); - for (int i = 0; i < interfaceNames.length; ++i) { - if (i > 0) buf.append(","); - buf.append(toExternalName(interfaceNames[i])); - } - } - buf.append("\n"); - Iterator attrsIt = attributes.iterator(); - while (attrsIt.hasNext()) { - JAttribute attr = (JAttribute)attrsIt.next(); - buf.append(attr); - } - buf.append(" minor version: "); - buf.append(minor); - buf.append("\n major version: "); - buf.append(major); - buf.append("\n"); - buf.append(pool); - buf.append("\n{\n"); - JField[] jfields = getFields(); - for (int i = 0; i < jfields.length; ++i) { - if (i > 0) buf.append("\n"); - buf.append(jfields[i]); - } - buf.append("\n"); - JMethod[] jmethods = getMethods(); - for (int i = 0; i < jmethods.length; ++i) { - if (i > 0) buf.append("\n"); - buf.append(jmethods[i]); - } - buf.append("\n}\n"); - return buf.toString(); - } - - private String getMemberName() { - StringBuffer buf = new StringBuffer(); - if (isPublic()) buf.append("public "); - else if (isProtected()) buf.append("protected "); - else if (isPrivate()) buf.append("private "); - if (isInterface()) - buf.append("interface "); - else { - if (isAbstract()) buf.append("abstract "); - else if (isFinal()) buf.append("final "); - buf.append("class "); - } - return buf.toString(); - } -} diff --git a/src/fjbg/ch/epfl/lamp/fjbg/JCode.java b/src/fjbg/ch/epfl/lamp/fjbg/JCode.java deleted file mode 100644 index ab6934ab30..0000000000 --- a/src/fjbg/ch/epfl/lamp/fjbg/JCode.java +++ /dev/null @@ -1,1308 +0,0 @@ -/* FJBG -- Fast Java Bytecode Generator - * Copyright 2002-2013 LAMP/EPFL - * @author Michel Schinz - */ - -package ch.epfl.lamp.fjbg; - -import java.io.DataInputStream; -import java.io.DataOutputStream; -import java.io.IOException; -import java.util.*; - -import ch.epfl.lamp.util.ByteArray; - -/** - * List of instructions, to which Java byte-code instructions can be added. - * - * @author Michel Schinz, Thomas Friedli - * @version 1.0 - */ - -public class JCode { - protected boolean frozen = false; - - public static int MAX_CODE_SIZE = 65535; - - protected final FJBGContext context; - protected final JMethod owner; - - protected final ByteArray codeArray; - - protected final LinkedList/**/ exceptionHandlers = - new LinkedList(); - - protected final JConstantPool pool; - - protected final ArrayList/**/ offsetToPatch = - new ArrayList(); - - protected static int UNKNOWN_STACK_SIZE = Integer.MIN_VALUE; - protected int maxStackSize = UNKNOWN_STACK_SIZE; - protected int[] stackProduction = null; - protected int[] stackSizes; - - protected JCode(FJBGContext context, JClass clazz, JMethod owner) { - this.context = context; - this.pool = clazz.getConstantPool(); - this.owner = owner; - this.codeArray = new ByteArray(); - } - - protected JCode(FJBGContext context, - JClass clazz, - JMethod owner, - DataInputStream stream) - throws IOException { - this.context = context; - this.pool = clazz.getConstantPool(); - this.owner = owner; - owner.setCode(this); - int size = stream.readInt(); - if (size > MAX_CODE_SIZE) // section 4.10 - throw new Error("code size must be less than " + MAX_CODE_SIZE + ": " + size); - this.codeArray = new ByteArray(stream, size); - } - - /** - * Gets the program counter, which is defined as the address of the - * next instruction. - * @return The int representing the value of the program counter - */ - public int getPC() { - return codeArray.getSize(); - } - - /** - * Gets the size of the code - * @return The number of bytes of the code - */ - public int getSize() { - return codeArray.getSize(); - } - - /** - * Gets the method to which the code belongs - * @return The method to which the code belongs - */ - public JMethod getOwner() { - return owner; - } - - // Stack size - public int getMaxStackSize() { - if (maxStackSize == UNKNOWN_STACK_SIZE) - maxStackSize = computeMaxStackSize(); - return maxStackSize; - } - - // Freezing - ////////////////////////////////////////////////////////////////////// - - public static class CodeSizeTooBigException extends OffsetTooBigException { - public int codeSize; - - public CodeSizeTooBigException(int size) { - codeSize = size; - } - } - - public void freeze() throws OffsetTooBigException { - assert !frozen; - - if (getSize() > MAX_CODE_SIZE) throw new CodeSizeTooBigException(getSize()); - - patchAllOffset(); - codeArray.freeze(); - frozen = true; - } - - // Attributes - ////////////////////////////////////////////////////////////////////// - - protected final LinkedList/**/ attributes = new LinkedList(); - - public void addAttribute(JAttribute attr) { - attributes.add(attr); - } - - public List/**/ getAttributes() { - return attributes; - } - - // Emitting code - ////////////////////////////////////////////////////////////////////// - - public void emit(JOpcode opcode) { - setStackProduction(getPC(), opcode); - codeArray.addU1(opcode.code); - } - - public void emitNOP() { emit(JOpcode.NOP); } - - // Constant loading. - public void emitACONST_NULL() { emit(JOpcode.ACONST_NULL); } - public void emitICONST_M1() { emit(JOpcode.ICONST_M1); } - public void emitICONST_0() { emit(JOpcode.ICONST_0); } - public void emitICONST_1() { emit(JOpcode.ICONST_1); } - public void emitICONST_2() { emit(JOpcode.ICONST_2); } - public void emitICONST_3() { emit(JOpcode.ICONST_3); } - public void emitICONST_4() { emit(JOpcode.ICONST_4); } - public void emitICONST_5() { emit(JOpcode.ICONST_5); } - public void emitLCONST_0() { emit(JOpcode.LCONST_0); } - public void emitLCONST_1() { emit(JOpcode.LCONST_1); } - public void emitFCONST_0() { emit(JOpcode.FCONST_0); } - public void emitFCONST_1() { emit(JOpcode.FCONST_1); } - public void emitFCONST_2() { emit(JOpcode.FCONST_2); } - public void emitDCONST_0() { emit(JOpcode.DCONST_0); } - public void emitDCONST_1() { emit(JOpcode.DCONST_1); } - - public void emitBIPUSH(int b) { emitU1(JOpcode.BIPUSH, b); } - public void emitSIPUSH(int s) { emitU2(JOpcode.SIPUSH, s); } - public void emitLDC(int value) { - emitU1(JOpcode.LDC, pool.addInteger(value)); - } - public void emitLDC(float value) { - emitU1(JOpcode.LDC, pool.addFloat(value)); - } - public void emitLDC(String value) { - emitU1(JOpcode.LDC, pool.addString(value)); - } - public void emitLDC_W(int value) { - emitU1(JOpcode.LDC_W, pool.addInteger(value)); - } - public void emitLDC_W(float value) { - emitU1(JOpcode.LDC_W, pool.addFloat(value)); - } - public void emitLDC_W(String value) { - emitU1(JOpcode.LDC_W, pool.addString(value)); - } - public void emitLDC2_W(long value) { - emitU2(JOpcode.LDC2_W, pool.addLong(value)); - } - public void emitLDC2_W(double value) { - emitU2(JOpcode.LDC2_W, pool.addDouble(value)); - } - - // Loading variables. - public void emitILOAD(int index) { emitU1(JOpcode.ILOAD, index); } - public void emitLLOAD(int index) { emitU1(JOpcode.LLOAD, index); } - public void emitFLOAD(int index) { emitU1(JOpcode.FLOAD, index); } - public void emitDLOAD(int index) { emitU1(JOpcode.DLOAD, index); } - public void emitALOAD(int index) { emitU1(JOpcode.ALOAD, index); } - - public void emitILOAD_0() { emit(JOpcode.ILOAD_0); } - public void emitILOAD_1() { emit(JOpcode.ILOAD_1); } - public void emitILOAD_2() { emit(JOpcode.ILOAD_2); } - public void emitILOAD_3() { emit(JOpcode.ILOAD_3); } - public void emitLLOAD_0() { emit(JOpcode.LLOAD_0); } - public void emitLLOAD_1() { emit(JOpcode.LLOAD_1); } - public void emitLLOAD_2() { emit(JOpcode.LLOAD_2); } - public void emitLLOAD_3() { emit(JOpcode.LLOAD_3); } - public void emitFLOAD_0() { emit(JOpcode.FLOAD_0); } - public void emitFLOAD_1() { emit(JOpcode.FLOAD_1); } - public void emitFLOAD_2() { emit(JOpcode.FLOAD_2); } - public void emitFLOAD_3() { emit(JOpcode.FLOAD_3); } - public void emitDLOAD_0() { emit(JOpcode.DLOAD_0); } - public void emitDLOAD_1() { emit(JOpcode.DLOAD_1); } - public void emitDLOAD_2() { emit(JOpcode.DLOAD_2); } - public void emitDLOAD_3() { emit(JOpcode.DLOAD_3); } - public void emitALOAD_0() { emit(JOpcode.ALOAD_0); } - public void emitALOAD_1() { emit(JOpcode.ALOAD_1); } - public void emitALOAD_2() { emit(JOpcode.ALOAD_2); } - public void emitALOAD_3() { emit(JOpcode.ALOAD_3); } - - public void emitIALOAD() { emit(JOpcode.IALOAD); } - public void emitLALOAD() { emit(JOpcode.LALOAD); } - public void emitFALOAD() { emit(JOpcode.FALOAD); } - public void emitDALOAD() { emit(JOpcode.DALOAD); } - public void emitAALOAD() { emit(JOpcode.AALOAD); } - public void emitBALOAD() { emit(JOpcode.BALOAD); } - public void emitCALOAD() { emit(JOpcode.CALOAD); } - public void emitSALOAD() { emit(JOpcode.SALOAD); } - - // Storing variables. - public void emitISTORE(int index) { emitU1(JOpcode.ISTORE, index); } - public void emitLSTORE(int index) { emitU1(JOpcode.LSTORE, index); } - public void emitFSTORE(int index) { emitU1(JOpcode.FSTORE, index); } - public void emitDSTORE(int index) { emitU1(JOpcode.DSTORE, index); } - public void emitASTORE(int index) { emitU1(JOpcode.ASTORE, index); } - - public void emitISTORE_0() { emit(JOpcode.ISTORE_0); } - public void emitISTORE_1() { emit(JOpcode.ISTORE_1); } - public void emitISTORE_2() { emit(JOpcode.ISTORE_2); } - public void emitISTORE_3() { emit(JOpcode.ISTORE_3); } - public void emitLSTORE_0() { emit(JOpcode.LSTORE_0); } - public void emitLSTORE_1() { emit(JOpcode.LSTORE_1); } - public void emitLSTORE_2() { emit(JOpcode.LSTORE_2); } - public void emitLSTORE_3() { emit(JOpcode.LSTORE_3); } - public void emitFSTORE_0() { emit(JOpcode.FSTORE_0); } - public void emitFSTORE_1() { emit(JOpcode.FSTORE_1); } - public void emitFSTORE_2() { emit(JOpcode.FSTORE_2); } - public void emitFSTORE_3() { emit(JOpcode.FSTORE_3); } - public void emitDSTORE_0() { emit(JOpcode.DSTORE_0); } - public void emitDSTORE_1() { emit(JOpcode.DSTORE_1); } - public void emitDSTORE_2() { emit(JOpcode.DSTORE_2); } - public void emitDSTORE_3() { emit(JOpcode.DSTORE_3); } - public void emitASTORE_0() { emit(JOpcode.ASTORE_0); } - public void emitASTORE_1() { emit(JOpcode.ASTORE_1); } - public void emitASTORE_2() { emit(JOpcode.ASTORE_2); } - public void emitASTORE_3() { emit(JOpcode.ASTORE_3); } - - public void emitIASTORE() { emit(JOpcode.IASTORE); } - public void emitLASTORE() { emit(JOpcode.LASTORE); } - public void emitFASTORE() { emit(JOpcode.FASTORE); } - public void emitDASTORE() { emit(JOpcode.DASTORE); } - public void emitAASTORE() { emit(JOpcode.AASTORE); } - public void emitBASTORE() { emit(JOpcode.BASTORE); } - public void emitCASTORE() { emit(JOpcode.CASTORE); } - public void emitSASTORE() { emit(JOpcode.SASTORE); } - - // Stack manipulation. - public void emitPOP() { emit(JOpcode.POP); } - public void emitPOP2() { emit(JOpcode.POP2); } - public void emitDUP() { emit(JOpcode.DUP); } - public void emitDUP_X1() { emit(JOpcode.DUP_X1); } - public void emitDUP_X2() { emit(JOpcode.DUP_X2); } - public void emitDUP2() { emit(JOpcode.DUP2); } - public void emitDUP2_X1() { emit(JOpcode.DUP2_X1); } - public void emitDUP2_X2() { emit(JOpcode.DUP2_X2); } - public void emitSWAP() { emit(JOpcode.SWAP); } - - // Artithmetic and logic operations. - public void emitIADD() { emit(JOpcode.IADD); } - public void emitLADD() { emit(JOpcode.LADD); } - public void emitFADD() { emit(JOpcode.FADD); } - public void emitDADD() { emit(JOpcode.DADD); } - - public void emitISUB() { emit(JOpcode.ISUB); } - public void emitLSUB() { emit(JOpcode.LSUB); } - public void emitFSUB() { emit(JOpcode.FSUB); } - public void emitDSUB() { emit(JOpcode.DSUB); } - - public void emitIMUL() { emit(JOpcode.IMUL); } - public void emitLMUL() { emit(JOpcode.LMUL); } - public void emitFMUL() { emit(JOpcode.FMUL); } - public void emitDMUL() { emit(JOpcode.DMUL); } - - public void emitIDIV() { emit(JOpcode.IDIV); } - public void emitLDIV() { emit(JOpcode.LDIV); } - public void emitFDIV() { emit(JOpcode.FDIV); } - public void emitDDIV() { emit(JOpcode.DDIV); } - - public void emitIREM() { emit(JOpcode.IREM); } - public void emitLREM() { emit(JOpcode.LREM); } - public void emitFREM() { emit(JOpcode.FREM); } - public void emitDREM() { emit(JOpcode.DREM); } - - public void emitINEG() { emit(JOpcode.INEG); } - public void emitLNEG() { emit(JOpcode.LNEG); } - public void emitFNEG() { emit(JOpcode.FNEG); } - public void emitDNEG() { emit(JOpcode.DNEG); } - - public void emitISHL() { emit(JOpcode.ISHL); } - public void emitLSHL() { emit(JOpcode.LSHL); } - - public void emitISHR() { emit(JOpcode.ISHR); } - public void emitLSHR() { emit(JOpcode.LSHR); } - - public void emitIUSHR() { emit(JOpcode.IUSHR); } - public void emitLUSHR() { emit(JOpcode.LUSHR); } - - public void emitIAND() { emit(JOpcode.IAND); } - public void emitLAND() { emit(JOpcode.LAND); } - - public void emitIOR() { emit(JOpcode.IOR); } - public void emitLOR() { emit(JOpcode.LOR); } - - public void emitIXOR() { emit(JOpcode.IXOR); } - public void emitLXOR() { emit(JOpcode.LXOR); } - - public void emitIINC(int index, int increment) { - emitU1U1(JOpcode.IINC, index, increment); - } - - // (Numeric) type conversions. - public void emitI2L() { emit(JOpcode.I2L); } - public void emitI2F() { emit(JOpcode.I2F); } - public void emitI2D() { emit(JOpcode.I2D); } - public void emitL2I() { emit(JOpcode.L2I); } - public void emitL2F() { emit(JOpcode.L2F); } - public void emitL2D() { emit(JOpcode.L2D); } - public void emitF2I() { emit(JOpcode.F2I); } - public void emitF2L() { emit(JOpcode.F2L); } - public void emitF2D() { emit(JOpcode.F2D); } - public void emitD2I() { emit(JOpcode.D2I); } - public void emitD2L() { emit(JOpcode.D2L); } - public void emitD2F() { emit(JOpcode.D2F); } - public void emitI2B() { emit(JOpcode.I2B); } - public void emitI2C() { emit(JOpcode.I2C); } - public void emitI2S() { emit(JOpcode.I2S); } - - // Comparisons and tests. - public void emitLCMP() { emit(JOpcode.LCMP); } - public void emitFCMPL() { emit(JOpcode.FCMPL); } - public void emitFCMPG() { emit(JOpcode.FCMPG); } - public void emitDCMPL() { emit(JOpcode.DCMPL); } - public void emitDCMPG() { emit(JOpcode.DCMPG); } - - protected void emitGenericIF(JOpcode opcode, Label label) - throws OffsetTooBigException { - emitU2(opcode, label.getOffset16(getPC() + 1, getPC())); - } - - public void emitIFEQ(Label label) throws OffsetTooBigException { - emitGenericIF(JOpcode.IFEQ, label); - } - public void emitIFEQ(int targetPC) throws OffsetTooBigException { - emitU2(JOpcode.IFEQ, targetPC - getPC()); - } - public void emitIFEQ() { - emitU2(JOpcode.IFEQ, 0); - } - - public void emitIFNE(Label label) throws OffsetTooBigException { - emitGenericIF(JOpcode.IFNE, label); - } - public void emitIFNE(int targetPC) throws OffsetTooBigException { - emitU2(JOpcode.IFNE, targetPC - getPC()); - } - public void emitIFNE() { - emitU2(JOpcode.IFNE, 0); - } - - public void emitIFLT(Label label) throws OffsetTooBigException { - emitGenericIF(JOpcode.IFLT, label); - } - public void emitIFLT(int targetPC) throws OffsetTooBigException { - emitU2(JOpcode.IFLT, targetPC - getPC()); - } - public void emitIFLT() { - emitU2(JOpcode.IFLT, 0); - } - - public void emitIFGE(Label label) throws OffsetTooBigException { - emitGenericIF(JOpcode.IFGE, label); - } - public void emitIFGE(int targetPC) throws OffsetTooBigException { - emitU2(JOpcode.IFGE, targetPC - getPC()); - } - public void emitIFGE() { - emitU2(JOpcode.IFGE, 0); - } - - public void emitIFGT(Label label) throws OffsetTooBigException { - emitGenericIF(JOpcode.IFGT, label); - } - public void emitIFGT(int targetPC) throws OffsetTooBigException { - emitU2(JOpcode.IFGT, targetPC - getPC()); - } - public void emitIFGT() { - emitU2(JOpcode.IFGT, 0); - } - - public void emitIFLE(Label label) throws OffsetTooBigException { - emitGenericIF(JOpcode.IFLE, label); - } - public void emitIFLE(int targetPC) throws OffsetTooBigException { - emitU2(JOpcode.IFLE, targetPC - getPC()); - } - public void emitIFLE() { - emitU2(JOpcode.IFLE, 0); - } - - public void emitIF_ICMPEQ(Label label) throws OffsetTooBigException { - emitGenericIF(JOpcode.IF_ICMPEQ, label); - } - public void emitIF_ICMPEQ(int targetPC) throws OffsetTooBigException { - emitU2(JOpcode.IF_ICMPEQ, targetPC - getPC()); - } - public void emitIF_ICMPEQ() { - emitU2(JOpcode.IF_ICMPEQ, 0); - } - - public void emitIF_ICMPNE(Label label) throws OffsetTooBigException { - emitGenericIF(JOpcode.IF_ICMPNE, label); - } - public void emitIF_ICMPNE(int targetPC) throws OffsetTooBigException { - emitU2(JOpcode.IF_ICMPNE, targetPC - getPC()); - } - public void emitIF_ICMPNE() { - emitU2(JOpcode.IF_ICMPNE, 0); - } - - public void emitIF_ICMPLT(Label label) throws OffsetTooBigException { - emitGenericIF(JOpcode.IF_ICMPLT, label); - } - public void emitIF_ICMPLT(int targetPC) throws OffsetTooBigException { - emitU2(JOpcode.IF_ICMPLT, targetPC - getPC()); - } - public void emitIF_ICMPLT() { - emitU2(JOpcode.IF_ICMPLT, 0); - } - - public void emitIF_ICMPGE(Label label) throws OffsetTooBigException { - emitGenericIF(JOpcode.IF_ICMPGE, label); - } - public void emitIF_ICMPGE(int targetPC) throws OffsetTooBigException { - emitU2(JOpcode.IF_ICMPGE, targetPC - getPC()); - } - public void emitIF_ICMPGE() { - emitU2(JOpcode.IF_ICMPGE, 0); - } - - public void emitIF_ICMPGT(Label label) throws OffsetTooBigException { - emitGenericIF(JOpcode.IF_ICMPGT, label); - } - public void emitIF_ICMPGT(int targetPC) throws OffsetTooBigException { - emitU2(JOpcode.IF_ICMPGT, targetPC - getPC()); - } - public void emitIF_ICMPGT() { - emitU2(JOpcode.IF_ICMPGT, 0); - } - - public void emitIF_ICMPLE(Label label) throws OffsetTooBigException { - emitGenericIF(JOpcode.IF_ICMPLE, label); - } - public void emitIF_ICMPLE(int targetPC) throws OffsetTooBigException { - emitU2(JOpcode.IF_ICMPLE, targetPC - getPC()); - } - public void emitIF_ICMPLE() { - emitU2(JOpcode.IF_ICMPLE, 0); - } - - public void emitIF_ACMPEQ(Label label) throws OffsetTooBigException { - emitGenericIF(JOpcode.IF_ACMPEQ, label); - } - public void emitIF_ACMPEQ(int targetPC) throws OffsetTooBigException { - emitU2(JOpcode.IF_ACMPEQ, targetPC - getPC()); - } - public void emitIF_ACMPEQ() { - emitU2(JOpcode.IF_ACMPEQ, 0); - } - - public void emitIF_ACMPNE(Label label) throws OffsetTooBigException { - emitGenericIF(JOpcode.IF_ACMPNE, label); - } - public void emitIF_ACMPNE(int targetPC) throws OffsetTooBigException { - emitU2(JOpcode.IF_ACMPNE, targetPC - getPC()); - } - public void emitIF_ACMPNE() { - emitU2(JOpcode.IF_ACMPNE, 0); - } - - public void emitIFNULL(Label label) throws OffsetTooBigException { - emitGenericIF(JOpcode.IFNULL, label); - } - public void emitIFNULL(int targetPC) throws OffsetTooBigException { - emitU2(JOpcode.IFNULL, targetPC - getPC()); - } - public void emitIFNULL() { - emitU2(JOpcode.IFNULL, 0); - } - - public void emitIFNONNULL(Label label) throws OffsetTooBigException { - emitGenericIF(JOpcode.IFNONNULL, label); - } - public void emitIFNONNULL(int targetPC) throws OffsetTooBigException { - emitU2(JOpcode.IFNONNULL, targetPC - getPC()); - } - public void emitIFNONNULL() { - emitU2(JOpcode.IFNONNULL, 0); - } - - public void emitGOTO(Label label) throws OffsetTooBigException { - emitU2(JOpcode.GOTO, label.getOffset16(getPC() + 1, getPC())); - } - public void emitGOTO(int targetPC) throws OffsetTooBigException { - int offset = targetPC - getPC(); - checkOffset16(offset); - emitU2(JOpcode.GOTO, offset); - } - public void emitGOTO() { - emitU2(JOpcode.GOTO, 0); - } - - public void emitGOTO_W(Label label) { - emitU4(JOpcode.GOTO_W, label.getOffset32(getPC() + 1, getPC())); - } - public void emitGOTO_W(int targetPC) { - emitU4(JOpcode.GOTO_W, targetPC - getPC()); - } - public void emitGOTO_W() { - emitU4(JOpcode.GOTO_W, 0); - } - - public void emitJSR(Label label) throws OffsetTooBigException { - emitU2(JOpcode.JSR, label.getOffset16(getPC() + 1, getPC())); - } - public void emitJSR(int targetPC) { - emitU2(JOpcode.JSR, targetPC - getPC()); - } - public void emitJSR() { - emitU2(JOpcode.JSR, 0); - } - - public void emitJSR_W(Label label) { - emitU4(JOpcode.JSR_W, label.getOffset32(getPC() + 1, getPC())); - } - public void emitJSR_W(int targetPC) { - emitU4(JOpcode.JSR_W, targetPC - getPC()); - } - public void emitJSR_W() { - emitU4(JOpcode.JSR_W, 0); - } - - /* - public void emitRET(Label label) throws OffsetTooBigException { - emitU2(JOpcode.RET, label.getOffset16(getPC() + 1, getPC())); - } - public void emitRET(int targetPC) { - emitU1(JOpcode.RET, targetPC); - } - public void emitRET() { - emitU1(JOpcode.RET, 0); - } - */ - - public void emitRET(int index) { - emitU1(JOpcode.RET, index); - } - - public void emitRET(JLocalVariable var) { - emitRET(var.getIndex()); - } - - public void emitTABLESWITCH(int[] keys, - Label[] branches, - Label defaultBranch) { - assert keys.length == branches.length; - - int low = keys[0], high = keys[keys.length - 1]; - int instrPC = getPC(); - - setStackProduction(instrPC, JOpcode.TABLESWITCH); - codeArray.addU1(JOpcode.cTABLESWITCH); - while (getPC() % 4 != 0) codeArray.addU1(0); - - codeArray.addU4(defaultBranch.getOffset32(getPC(), instrPC)); - codeArray.addU4(low); - codeArray.addU4(high); - for (int i = 0; i < branches.length; i++) { - assert keys[i] == low + i; - codeArray.addU4(branches[i].getOffset32(getPC(), instrPC)); - } - } - - public void emitLOOKUPSWITCH(int[] keys, - Label[] branches, - Label defaultBranch) { - assert keys.length == branches.length; - - int instrPC = getPC(); - setStackProduction(getPC(), JOpcode.LOOKUPSWITCH); - codeArray.addU1(JOpcode.cLOOKUPSWITCH); - while (getPC() % 4 != 0) codeArray.addU1(0); - - codeArray.addU4(defaultBranch.getOffset32(getPC(), instrPC)); - codeArray.addU4(branches.length); - for (int i = 0; i < branches.length; i++) { - codeArray.addU4(keys[i]); - codeArray.addU4(branches[i].getOffset32(getPC(), instrPC)); - } - } - - public void emitIRETURN() { emit(JOpcode.IRETURN); } - public void emitLRETURN() { emit(JOpcode.LRETURN); } - public void emitFRETURN() { emit(JOpcode.FRETURN); } - public void emitDRETURN() { emit(JOpcode.DRETURN); } - public void emitARETURN() { emit(JOpcode.ARETURN); } - public void emitRETURN() { emit(JOpcode.RETURN); } - - // Field access - public void emitGETSTATIC(String className, String name, JType type) { - setStackProduction(getPC(), type.getSize()); - int index = pool.addFieldRef(className, name, type.getSignature()); - emitU2(JOpcode.GETSTATIC, index); - } - public void emitPUTSTATIC(String className, String name, JType type) { - setStackProduction(getPC(), -type.getSize()); - int index = pool.addFieldRef(className, name, type.getSignature()); - emitU2(JOpcode.PUTSTATIC, index); - } - public void emitGETFIELD(String className, String name, JType type) { - setStackProduction(getPC(), type.getSize() - 1); - int index = pool.addFieldRef(className, name, type.getSignature()); - emitU2(JOpcode.GETFIELD, index); - } - public void emitPUTFIELD(String className, String name, JType type) { - setStackProduction(getPC(), -(type.getSize() + 1)); - int index = pool.addFieldRef(className, name, type.getSignature()); - emitU2(JOpcode.PUTFIELD, index); - } - - // Method invocation - public void emitINVOKEVIRTUAL(String className, - String name, - JMethodType type) { - setStackProduction(getPC(), type.getProducedStack() - 1); - int index = - pool.addClassMethodRef(className, name, type.getSignature()); - emitU2(JOpcode.INVOKEVIRTUAL, index); - } - public void emitINVOKESPECIAL(String className, - String name, - JMethodType type) { - setStackProduction(getPC(), type.getProducedStack() - 1); - int index = - pool.addClassMethodRef(className, name, type.getSignature()); - emitU2(JOpcode.INVOKESPECIAL, index); - } - public void emitINVOKESTATIC(String className, - String name, - JMethodType type) { - setStackProduction(getPC(), type.getProducedStack()); - int index = - pool.addClassMethodRef(className, name, type.getSignature()); - emitU2(JOpcode.INVOKESTATIC, index); - } - public void emitINVOKEINTERFACE(String className, - String name, - JMethodType type) { - setStackProduction(getPC(), type.getProducedStack() - 1); - int index = - pool.addInterfaceMethodRef(className, name, type.getSignature()); - emitU2U1U1(JOpcode.INVOKEINTERFACE, index, type.getArgsSize() + 1, 0); - } - - // Object creation - public void emitNEW(String className) { - emitU2(JOpcode.NEW, pool.addClass(className)); - } - public void emitNEWARRAY(JType elemType) { - emitU1(JOpcode.NEWARRAY, elemType.getTag()); - } - public void emitANEWARRAY(JReferenceType elemType) { - emitU2(JOpcode.ANEWARRAY, pool.addDescriptor(elemType)); - } - public void emitMULTIANEWARRAY(JReferenceType elemType, int dimensions) { - setStackProduction(getPC(), -dimensions + 1); - emitU2U1(JOpcode.MULTIANEWARRAY, - pool.addDescriptor(elemType), - dimensions); - } - public void emitARRAYLENGTH() { emit(JOpcode.ARRAYLENGTH); } - - // Exception throwing - public void emitATHROW() { emit(JOpcode.ATHROW); } - - // Dynamic typing - public void emitCHECKCAST(JReferenceType type) { - emitU2(JOpcode.CHECKCAST, pool.addDescriptor(type)); - } - public void emitINSTANCEOF(JReferenceType type) { - emitU2(JOpcode.INSTANCEOF, pool.addDescriptor(type)); - } - - // Monitors - public void emitMONITORENTER() { emit(JOpcode.MONITORENTER); } - public void emitMONITOREXIT() { emit(JOpcode.MONITOREXIT); } - - // Wide variants - // FIXME setStackProd. will here raise an exception - public void emitWIDE(JOpcode opcode, int index) { - assert (opcode.code == JOpcode.cILOAD) - || (opcode.code == JOpcode.cLLOAD) - || (opcode.code == JOpcode.cFLOAD) - || (opcode.code == JOpcode.cDLOAD) - || (opcode.code == JOpcode.cALOAD) - || (opcode.code == JOpcode.cISTORE) - || (opcode.code == JOpcode.cLSTORE) - || (opcode.code == JOpcode.cFSTORE) - || (opcode.code == JOpcode.cDSTORE) - || (opcode.code == JOpcode.cASTORE) - || (opcode.code == JOpcode.cRET) - : "invalide opcode for WIDE: " + opcode; - - setStackProduction(getPC(), opcode); - codeArray.addU1(JOpcode.WIDE.code); - codeArray.addU1(opcode.code); - codeArray.addU2(index); - } - public void emitWIDE(JOpcode opcode, int index, int constant) { - assert opcode.code == JOpcode.cIINC - : "invalid opcode for WIDE: " + opcode; - - setStackProduction(getPC(), opcode); - codeArray.addU1(JOpcode.cWIDE); - codeArray.addU1(opcode.code); - codeArray.addU2(index); - codeArray.addU2(constant); - } - - protected void emitU1(JOpcode opcode, int i1) { - setStackProduction(getPC(), opcode); - codeArray.addU1(opcode.code); - codeArray.addU1(i1); - } - - protected void emitU1U1(JOpcode opcode, int i1, int i2) { - setStackProduction(getPC(), opcode); - codeArray.addU1(opcode.code); - codeArray.addU1(i1); - codeArray.addU1(i2); - } - - protected void emitU2(JOpcode opcode, int i1) { - setStackProduction(getPC(), opcode); - codeArray.addU1(opcode.code); - codeArray.addU2(i1); - } - - protected void emitU2U1(JOpcode opcode, int i1, int i2) { - setStackProduction(getPC(), opcode); - codeArray.addU1(opcode.code); - codeArray.addU2(i1); - codeArray.addU1(i2); - } - - protected void emitU2U1U1(JOpcode opcode, int i1, int i2, int i3) { - setStackProduction(getPC(), opcode); - codeArray.addU1(opcode.code); - codeArray.addU2(i1); - codeArray.addU1(i2); - codeArray.addU1(i3); - } - - protected void emitU4(JOpcode opcode, int i1) { - setStackProduction(getPC(), opcode); - codeArray.addU1(opcode.code); - codeArray.addU4(i1); - } - - protected int getU1(int sourcePos) { - return codeArray.getU1(sourcePos); - } - - protected int getU2(int sourcePos) { - return codeArray.getU2(sourcePos); - } - - protected int getU4(int sourcePos) { - return codeArray.getU4(sourcePos); - } - - protected int getS1(int sourcePos) { - return codeArray.getS1(sourcePos); - } - - protected int getS2(int sourcePos) { - return codeArray.getS2(sourcePos); - } - - protected int getS4(int sourcePos) { - return codeArray.getS4(sourcePos); - } - - // Stack size computation - ////////////////////////////////////////////////////////////////////// - - protected int getStackProduction(int pc) { - if (stackProduction == null || pc >= stackProduction.length) - return UNKNOWN_STACK_SIZE; - else - return stackProduction[pc]; - } - - protected void setStackProduction(int pc, int production) { - if (stackProduction == null) { - stackProduction = new int[256]; - Arrays.fill(stackProduction, UNKNOWN_STACK_SIZE); - } else { - while (pc >= stackProduction.length) { - int[] newStackProduction = new int[stackProduction.length * 2]; - System.arraycopy(stackProduction, 0, - newStackProduction, 0, - stackProduction.length); - Arrays.fill(newStackProduction, - stackProduction.length, - newStackProduction.length, - UNKNOWN_STACK_SIZE); - stackProduction = newStackProduction; - } - } - stackProduction[pc] = production; - } - - protected void setStackProduction(int pc, JOpcode opcode) { - // TODO we should instead check whether the opcode has known - // stack consumption/production. - if (getStackProduction(pc) == UNKNOWN_STACK_SIZE) -// && opcode.hasKnownProducedDataSize() -// && opcode.hasKnownConsumedDataSize()) - setStackProduction(pc, - opcode.getProducedDataSize() - - opcode.getConsumedDataSize()); - } - - protected int computeMaxStackSize() { - if (stackSizes == null) { - stackSizes = new int[getSize()]; - Arrays.fill(stackSizes, UNKNOWN_STACK_SIZE); - stackSizes[0] = 0; - } - int size = computeMaxStackSize(0, 0, 0); - - // compute stack sizes for exception handlers too - ExceptionHandler exh = null; - for (Iterator it = exceptionHandlers.iterator(); - it.hasNext();) { - exh = (ExceptionHandler)it.next(); - int exhSize = computeMaxStackSize(exh.getHandlerPC(), 1, 1); - if (size < exhSize) - size = exhSize; - } - - return size; - } - - protected int computeMaxStackSize(int pc, int stackSize, int maxStackSize) { - JCodeIterator iterator = new JCodeIterator(this, pc); - for (;;) { - int successors = iterator.getSuccessorCount(); - if (successors == 0) - return maxStackSize; - else { - assert stackProduction[iterator.getPC()] != UNKNOWN_STACK_SIZE - : "unknown stack production, pc=" + iterator.getPC() - + " in method " + owner.getName(); - stackSize += stackProduction[iterator.getPC()]; - if (stackSize > maxStackSize) - maxStackSize = stackSize; - int nextPC = -1; - for (int i = 0; i < successors; ++i) { - int succPC = iterator.getSuccessorPC(i); - assert succPC >= 0 && succPC < stackSizes.length - : iterator.getPC() + ": invalid pc: " + succPC - + " op: " + iterator.getOpcode(); - if (stackSizes[succPC] == UNKNOWN_STACK_SIZE) { - stackSizes[succPC] = stackSize; - if (nextPC == -1) - nextPC = succPC; - else - maxStackSize = computeMaxStackSize(succPC, - stackSize, - maxStackSize); - } - } - if (nextPC == -1) - return maxStackSize; - else - iterator.moveTo(nextPC); - } - } - } - - // Labels - ////////////////////////////////////////////////////////////////////// - - public static class OffsetTooBigException extends Exception { - public OffsetTooBigException() { super(); } - public OffsetTooBigException(String message) { super(message); } - } - - protected void checkOffset16(int offset) throws OffsetTooBigException { - if (offset < Short.MIN_VALUE || offset > Short.MAX_VALUE) - throw new OffsetTooBigException("offset too big to fit" - + " in 16 bits: " + offset); - } - - public class Label { - protected boolean anchored = false; - protected int targetPC = 0; - - public void anchorToNext() { - assert !anchored; - this.targetPC = getPC(); - anchored = true; - } - - public int getAnchor() { - assert anchored; - return targetPC; - } - - protected int getOffset16(int pc, int instrPC) - throws OffsetTooBigException { - if (anchored) { - int offset = targetPC - instrPC; - checkOffset16(offset); - return offset; - } else { - recordOffsetToPatch(pc, 16, instrPC, this); - return 0; - } - } - - protected int getOffset32(int pc, int instrPC) { - if (anchored) - return targetPC - instrPC; - else { - recordOffsetToPatch(pc, 32, instrPC, this); - return 0; - } - } - } - - public Label newLabel() { - return new Label(); - } - - public Label[] newLabels(int count) { - Label[] labels = new Label[count]; - for (int i = 0; i < labels.length; ++i) - labels[i] = newLabel(); - return labels; - } - - protected static class OffsetToPatch { - public final int pc; - public final int size; - public final int instrPC; - public final Label label; - - public OffsetToPatch(int pc, int size, int instrPC, Label label) { - this.pc = pc; - this.size = size; - this.instrPC = instrPC; - this.label = label; - } - } - - protected void recordOffsetToPatch(int offsetPC, - int size, - int instrPC, - Label label) { - offsetToPatch.add(new OffsetToPatch(offsetPC, size, instrPC, label)); - } - - protected void patchAllOffset() throws OffsetTooBigException { - Iterator offsetIt = offsetToPatch.iterator(); - while (offsetIt.hasNext()) { - OffsetToPatch offset = (OffsetToPatch)offsetIt.next(); - int offsetValue = offset.label.getAnchor() - offset.instrPC; - if (offset.size == 16) { - checkOffset16(offsetValue); - codeArray.putU2(offset.pc, offsetValue); - } else - codeArray.putU4(offset.pc, offsetValue); - } - } - - // Exception handling - ////////////////////////////////////////////////////////////////////// - - public class ExceptionHandler { - protected int startPC, endPC, handlerPC; - protected final String catchType; - protected final int catchTypeIndex; - - public void setStartPC(int pc) { - this.startPC = pc; - } - - public int getStartPC() { - return this.startPC; - } - - public void setEndPC(int pc) { - this.endPC = pc; - } - - public int getEndPC() { - return this.endPC; - } - - public void setHandlerPC(int pc) { - this.handlerPC = pc; - } - - public int getHandlerPC() { - return this.handlerPC; - } - - public ExceptionHandler(String catchType) { - this(0, 0, 0, catchType); - } - - public ExceptionHandler(int startPC, - int endPC, - int handlerPC, - String catchType) { - this.startPC = startPC; - this.endPC = endPC; - this.handlerPC = handlerPC; - this.catchType = catchType; - this.catchTypeIndex = (catchType == null - ? 0 - : pool.addClass(catchType)); - } - - public ExceptionHandler(DataInputStream stream) throws IOException { - this.startPC = stream.readShort(); - this.endPC = stream.readShort(); - this.handlerPC = stream.readShort(); - this.catchTypeIndex = stream.readShort(); - this.catchType = (catchTypeIndex == 0 - ? null - : pool.lookupClass(catchTypeIndex)); - } - - public void writeTo(DataOutputStream stream) throws IOException { - stream.writeShort(startPC); - stream.writeShort(endPC); - stream.writeShort(handlerPC); - stream.writeShort(catchTypeIndex); - } - - // Follows javap output format for exception handlers. - /*@Override*/public String toString() { - StringBuffer buf = new StringBuffer(" "); - if (startPC < 10) buf.append(" "); - buf.append(startPC); - buf.append(" "); - if (endPC < 10) buf.append(" "); - buf.append(endPC); - buf.append(" "); - buf.append(handlerPC); - buf.append(" "); - if (catchType != null) { - buf.append("Class "); - buf.append(catchType); - } - else - buf.append("any"); - return buf.toString(); - } - - } - - public void addExceptionHandler(ExceptionHandler handler) { - assert !frozen; - exceptionHandlers.add(handler); - } - - public void addExceptionHandler(int startPC, - int endPC, - int handlerPC, - String catchType) { - addExceptionHandler(new ExceptionHandler(startPC, - endPC, - handlerPC, - catchType)); - } - - public void addFinallyHandler(int startPC, int endPC, int handlerPC) { - assert !frozen; - addExceptionHandler(startPC, endPC, handlerPC, null); - } - - public List/**/ getExceptionHandlers() { - return exceptionHandlers; - } - - // Line numbers - ////////////////////////////////////////////////////////////////////// - - protected int[] lineNumbers = null; - protected void ensureLineNumberCapacity(int endPC) { - assert !frozen; - if (lineNumbers == null) { - lineNumbers = new int[endPC]; - addAttribute(context.JLineNumberTableAttribute(owner.getOwner(), - this)); - } else if (lineNumbers.length < endPC) { - int[] newLN = new int[Math.max(endPC, lineNumbers.length * 2)]; - System.arraycopy(lineNumbers, 0, newLN, 0, lineNumbers.length); - lineNumbers = newLN; - } - } - - /** - * Set all line numbers in the interval [startPC, endPC) to - * line, overwriting existing line numbers. - */ - public void setLineNumber(int startPC, int endPC, int line) { - ensureLineNumberCapacity(endPC); - Arrays.fill(lineNumbers, startPC, endPC, line); - } - - public void setLineNumber(int instrPC, int line) { - setLineNumber(instrPC, instrPC + 1, line); - } - - /** Sets all non-filled line numbers in the interval [startPC, endPC) - * to 'line'. - */ - public void completeLineNumber(int startPC, int endPC, int line) { - ensureLineNumberCapacity(endPC); - for (int pc = startPC; pc < endPC; ++pc) - if (lineNumbers[pc] == 0) lineNumbers[pc] = line; - } - - public int[] getLineNumbers() { - assert frozen; - if (lineNumbers == null) return new int[0]; - else if (lineNumbers.length == getPC()) return lineNumbers; - else { - int[] trimmedLN = new int[getPC()]; - System.arraycopy(lineNumbers, 0, - trimmedLN, 0, - Math.min(lineNumbers.length, trimmedLN.length)); - return trimmedLN; - } - } - - // Output - ////////////////////////////////////////////////////////////////////// - - public void writeTo(DataOutputStream stream) throws IOException { - assert frozen; - stream.writeInt(getSize()); - codeArray.writeTo(stream); - } - - // Follows javap output format for opcodes. - /*@Override*/ public String toString() { - StringBuffer buf = new StringBuffer(); - JOpcode opcode = null; - int pc = 0, addr = 0; - while (pc < codeArray.getSize()) { - buf.append("\n "); - buf.append(pc); - buf.append(":\t"); - opcode = JOpcode.OPCODES[codeArray.getU1(pc)]; - buf.append(decode(opcode, pc)); - if (opcode.code == JOpcode.cTABLESWITCH || - opcode.code == JOpcode.cLOOKUPSWITCH) { - addr = ((pc / 4 + 1) + 1) * 4; // U4 aligned data - int low = codeArray.getU4(addr); - int high = codeArray.getU4(addr+4); - pc = addr + (2/*low+high*/ + (high - low + 1)/*targets*/) * 4; - } else - pc += opcode.getSize(); - } - if (exceptionHandlers.size() > 0) { - buf.append("\n Exception table:\n from to target type\n"); - Iterator it = exceptionHandlers.iterator(); - while (it.hasNext()) { - ExceptionHandler exh = (ExceptionHandler)it.next(); - buf.append(exh); - buf.append("\n"); - } - } - return buf.toString(); - } - - private String decode(JOpcode opcode, int pc) { - String ownerClassName = owner.getOwner().getName(); - int data, data2; - StringBuilder buf = new StringBuilder(); - buf.append(opcode.name.toLowerCase()); - switch (opcode.code) { - case JOpcode.cALOAD: case JOpcode.cASTORE: case JOpcode.cBIPUSH: - case JOpcode.cDLOAD: case JOpcode.cDSTORE: - case JOpcode.cFLOAD: case JOpcode.cFSTORE: - case JOpcode.cILOAD: case JOpcode.cISTORE: - case JOpcode.cLLOAD: case JOpcode.cLSTORE: - data = codeArray.getU1(pc+1); - buf.append("\t"); - buf.append(data); - break; - case JOpcode.cLDC: - data = codeArray.getU1(pc+1); - buf.append("\t#"); - buf.append(data); - buf.append("; "); - buf.append(pool.lookupEntry(data).toComment(ownerClassName)); - break; - case JOpcode.cNEWARRAY: - data = codeArray.getU1(pc+1); - buf.append(" "); - buf.append(JType.tagToString(data)); - break; - case JOpcode.cIINC: - data = codeArray.getU1(pc+1); - data2 = codeArray.getU1(pc+2); - buf.append("\t"); - buf.append(data); - buf.append(", "); - buf.append(data2); - break; - case JOpcode.cSIPUSH: - data = codeArray.getU2(pc+1); - buf.append("\t"); - buf.append(data); - break; - case JOpcode.cANEWARRAY: case JOpcode.cCHECKCAST: - case JOpcode.cGETFIELD: case JOpcode.cGETSTATIC: - case JOpcode.cINSTANCEOF: - case JOpcode.cINVOKESPECIAL: case JOpcode.cINVOKESTATIC: - case JOpcode.cINVOKEVIRTUAL: - case JOpcode.cLDC_W: case JOpcode.cLDC2_W: case JOpcode.cNEW: - case JOpcode.cPUTFIELD: case JOpcode.cPUTSTATIC: - data = codeArray.getU2(pc+1); - buf.append("\t#"); - buf.append(data); - buf.append("; "); - buf.append(pool.lookupEntry(data).toComment(ownerClassName)); - break; - case JOpcode.cIF_ACMPEQ: case JOpcode.cIF_ACMPNE: - case JOpcode.cIFEQ: case JOpcode.cIFGE: case JOpcode.cIFGT: - case JOpcode.cIFLE: case JOpcode.cIFLT: case JOpcode.cIFNE: - case JOpcode.cIFNONNULL: case JOpcode.cIFNULL: - case JOpcode.cIF_ICMPEQ: case JOpcode.cIF_ICMPGE: - case JOpcode.cIF_ICMPGT: case JOpcode.cIF_ICMPLE: - case JOpcode.cIF_ICMPLT: case JOpcode.cIF_ICMPNE: - data = codeArray.getU2(pc+1); // maybe S2 offset - buf.append("\t"); - buf.append(pc+data); - break; - case JOpcode.cGOTO: - data = codeArray.getS2(pc+1); // always S2 offset - buf.append("\t"); - buf.append(pc+data); - break; - case JOpcode.cINVOKEINTERFACE: - data = codeArray.getU2(pc+1); - data2 = codeArray.getU1(pc+3); - buf.append("\t#"); - buf.append(data); - buf.append(", "); - buf.append(data2); - buf.append("; "); - buf.append(pool.lookupEntry(data).toComment(ownerClassName)); - break; - case JOpcode.cTABLESWITCH: - buf.append("{ //"); - int addr = ((pc / 4 + 1) + 1) * 4; // U4 aligned data - int low = codeArray.getU4(addr); - int high = codeArray.getU4(addr+4); - buf.append(low); - buf.append(" to "); - buf.append(high); - for (int i = low; i <= high; ++i) { - buf.append("\n\t\t"); - buf.append(i); - buf.append(": "); - buf.append(pc+codeArray.getU4(addr+(i-1)*4)); - buf.append(";"); - } - buf.append("\n\t\tdefault: "); - buf.append(pc+codeArray.getU4(addr-4)); - buf.append(" }"); - default: - } - return buf.toString(); - } -} diff --git a/src/fjbg/ch/epfl/lamp/fjbg/JCodeAttribute.java b/src/fjbg/ch/epfl/lamp/fjbg/JCodeAttribute.java deleted file mode 100644 index 9f3fcf8c6a..0000000000 --- a/src/fjbg/ch/epfl/lamp/fjbg/JCodeAttribute.java +++ /dev/null @@ -1,125 +0,0 @@ -/* FJBG -- Fast Java Bytecode Generator - * Copyright 2002-2013 LAMP/EPFL - * @author Michel Schinz - */ - -package ch.epfl.lamp.fjbg; - -import java.io.DataInputStream; -import java.io.DataOutputStream; -import java.io.IOException; -import java.util.Iterator; -import java.util.List; - -/** - * Code attribute, containing code of methods. - * - * A Code attribute contains the JVM instructions and auxiliary information - * for a single method, instance initialization method, or class or interface - * initialization method. See section 4.8.3 of the JVM specification. - * - * @author Michel Schinz, Stephane Micheloud - * @version 1.1 - */ - -public class JCodeAttribute extends JAttribute { - protected final JCode code; - protected final JMethod owner; - protected static int UNKNOWN_STACK_SIZE = Integer.MIN_VALUE; - protected final int maxStackSize; - protected final int maxLocals; - - public JCodeAttribute(FJBGContext context, JClass clazz, JMethod owner) { - super(context, clazz); - this.owner = owner; - - this.maxStackSize = UNKNOWN_STACK_SIZE; - this.maxLocals = 0; // unknown - this.code = owner.getCode(); - - assert clazz == owner.getOwner(); - } - - public JCodeAttribute(FJBGContext context, - JClass clazz, - Object owner, - String name, - int size, - DataInputStream stream) - throws IOException { - super(context, clazz, name); - this.owner = (JMethod)owner; - - this.maxStackSize = stream.readShort(); - this.maxLocals = stream.readShort(); - this.code = context.JCode(clazz, (JMethod)owner, stream); - - int handlersCount = stream.readShort(); - for (int i = 0; i < handlersCount; ++i) - code.addExceptionHandler(code.new ExceptionHandler(stream)); - List/**/ attributes = - JAttribute.readFrom(context, clazz, code, stream); - Iterator attrIt = attributes.iterator(); - while (attrIt.hasNext()) - code.addAttribute((JAttribute)attrIt.next()); - - assert name.equals(getName()); - } - - public String getName() { return "Code"; } - - // Follows javap output format for Code attribute. - /*@Override*/ public String toString() { - StringBuffer buf = new StringBuffer(" Code:"); - buf.append("\n Stack="); - buf.append(maxStackSize); - buf.append(", Locals="); - buf.append(maxLocals); - buf.append(", Args_size="); - buf.append(owner.getArgsSize()); - buf.append(code); - buf.append("\n"); - Iterator it = code.getAttributes().iterator(); - while (it.hasNext()) { - JAttribute attr = (JAttribute)it.next(); - buf.append(attr); - buf.append("\n"); - } - return buf.toString(); - } - - protected int getSize() { - int handlersNum = code.getExceptionHandlers().size(); - - int attrsSize = 0; - Iterator attrsIt = code.getAttributes().iterator(); - while (attrsIt.hasNext()) { - JAttribute attr = (JAttribute)attrsIt.next(); - attrsSize += attr.getSize() + 6; - } - - return 2 // max stack - + 2 // max locals - + 4 // code size - + code.getSize() // code - + 2 // exception table size - + 8 * handlersNum // exception table - + 2 // attributes count - + attrsSize; // attributes - } - - protected void writeContentsTo(DataOutputStream stream) throws IOException { - List/**/ handlers = code.getExceptionHandlers(); - - stream.writeShort(code.getMaxStackSize()); - stream.writeShort(owner.getMaxLocals()); - - code.writeTo(stream); - - stream.writeShort(handlers.size()); - Iterator handlerIt = handlers.iterator(); - while (handlerIt.hasNext()) - ((JCode.ExceptionHandler)handlerIt.next()).writeTo(stream); - JAttribute.writeTo(code.getAttributes(), stream); - } -} diff --git a/src/fjbg/ch/epfl/lamp/fjbg/JCodeIterator.java b/src/fjbg/ch/epfl/lamp/fjbg/JCodeIterator.java deleted file mode 100644 index d09dfd19a4..0000000000 --- a/src/fjbg/ch/epfl/lamp/fjbg/JCodeIterator.java +++ /dev/null @@ -1,377 +0,0 @@ -/* FJBG -- Fast Java Bytecode Generator - * Copyright 2002-2013 LAMP/EPFL - * @author Michel Schinz - */ - -package ch.epfl.lamp.fjbg; - -import ch.epfl.lamp.util.ByteArray; - -/** - * Iterator used to examine the contents of an instruction list. - * - * @author Michel Schinz, Thomas Friedli - * @version 1.0 - */ - -public class JCodeIterator { - protected final JCode code; - protected final JConstantPool pool; - protected final ByteArray codeArray; - - protected int pc; - protected JOpcode opcode; - - /** - * Creates a new code iterator with its instruction list - * and its pc initialized to a given value. - */ - public JCodeIterator(JCode code, int pc) { - this.code = code; - this.pool = code.getOwner().getOwner().getConstantPool(); - this.codeArray = code.codeArray; - this.pc = pc; - setOpcode(); - } - - public JCodeIterator(JCode code) { - this(code, 0); - } - - /** - * Get the current program counter. - * @return The current program counter. - */ - public int getPC() { return pc; } - - /** - * Searches the type of the instruction positionned at the - * current address and updates the current instruction. - */ - protected void setOpcode() { - // TODO : check if the current pc is the beginning - // of an instruction - opcode = isValid() ? JOpcode.OPCODES[codeArray.getU1(pc)] : null; - } - - /** - * Returns the opcode of the current instruction. - * @return The opcode of the current instruction. - */ - public JOpcode getOpcode() { - return opcode; - } - - /** - * Updates the program counter to an given value. - * @param pc The new value of the program counter. - */ - public void moveTo(int pc) { - this.pc = pc; - setOpcode(); - } - - /** - * Check the validity of the iterator. - * @return true iff the iterator points to a valid address. - */ - public boolean isValid() { - return pc < codeArray.getSize(); - } - - /** - * Updates the current instruction with the next one in the - * sense of their position in the code. - */ - public void moveToNext() { - moveTo(pc + getInstructionSize()); - } - - /** - * Updates the current instruction with a specific successor - * of it. - * @param succ The index of the wanted successor in the list of - * the successors of the current instruction. - */ - public void moveToSuccessor(int succ) { - moveTo(getSuccessorPC(succ)); - } - - /** - * Updates the current instruction with the one positionned - * at a given index relatively to the actual program counter - * @param offset The relative position of the instruction - * compared with the position of the current one - */ - public void moveRelatively(int offset) { - moveTo(pc + offset); - } - - /** - * Returns the size in bytes of the current instruction. - * @return The size in bytes of the current instruction. - */ - public int getInstructionSize() { - if (opcode.size != JOpcode.UNKNOWN) { - return opcode.size; - } else if (opcode == JOpcode.TABLESWITCH) { - int lowOffset = 1 + pad4(pc + 1) + 4; - int low = codeArray.getS4(pc + lowOffset); - int high = codeArray.getS4(pc + lowOffset + 4); - return lowOffset + 8 + 4 * (high - low + 1); - } else if (opcode == JOpcode.LOOKUPSWITCH) { - int npairsOffset = 1 + pad4(pc + 1) + 4; - int npairs = codeArray.getS4(pc + npairsOffset); - return npairsOffset + 4 + 8 * npairs; - } else if (opcode == JOpcode.WIDE) { - if (codeArray.getU1(pc + 1) == JOpcode.cIINC) - return 6; - else - return 4; - } else - throw new Error("Unknown size for instruction " + opcode); - } - - /** - * Returns the number of successors of the current instruction. - * @return The number of successors of the current instruction. - */ - public int getSuccessorCount() { - if (opcode.successorCount != JOpcode.UNKNOWN) { - return opcode.successorCount; - } else if (opcode == JOpcode.TABLESWITCH) { - int lowPos = pc + 1 + pad4(pc + 1) + 4; - return 1 // default case - + codeArray.getS4(lowPos + 4) // value of HIGH field - - codeArray.getS4(lowPos) + 1; // value of LOW field - } else if (opcode == JOpcode.LOOKUPSWITCH) { - int npairsPos = pc + 1 + pad4(pc + 1) + 4; - return 1 + codeArray.getS4(npairsPos); - } else - throw new Error("Unknown successors for instruction " + opcode); - } - - /** - * Returns the address of the successor of the current instruction - * given its index in the list of successors of the current - * instruction. - * @param index The index of the wanted successor in the list of - * the successors of the current instruction. - * @return The address of the specific successor. - */ - public int getSuccessorPC(int index) { - assert (index >= 0) && (index < getSuccessorCount()) : index; - - switch (opcode.jumpKind) { - case JOpcode.JMP_NEXT: - return pc + getInstructionSize(); - case JOpcode.JMP_ALWAYS_S2_OFFSET: - return pc + codeArray.getS2(pc + 1); - case JOpcode.JMP_ALWAYS_S4_OFFSET: - return pc + codeArray.getS4(pc + 1); - case JOpcode.JMP_MAYBE_S2_OFFSET: - if (index == 0) - return pc + getInstructionSize(); - else - return pc + codeArray.getS2(pc + 1); - case JOpcode.JMP_TABLE: { - int defaultPos = pc + 1 + pad4(pc + 1); - if (index == 0) - return pc + codeArray.getS4(defaultPos); - else - return pc + codeArray.getS4(defaultPos + 3*4 + 4 * (index - 1)); - } - case JOpcode.JMP_LOOKUP: { - int defaultPos = pc + 1 + pad4(pc + 1); - if (index == 0) - return pc + codeArray.getS4(defaultPos); - else - return pc + codeArray.getS4(defaultPos + 2*4 + 4 + 8 * (index - 1)); - } - default: - throw new Error(); - } - } - - /** - * Returns the total size of data words put on the stack by the current - * instruction. - * @return The total size of data words put on the stack by the current - * instruction. - */ - public int getProducedDataSize() { - if (opcode.getProducedDataTypes() == JOpcode.UNKNOWN_TYPE) { - switch (opcode.code) { - case JOpcode.cLDC: case JOpcode.cLDC_W: case JOpcode.cBALOAD: - return 1; - case JOpcode.cLDC2_W: case JOpcode.cDUP: case JOpcode.cSWAP: - return 2; - case JOpcode.cDUP_X1: - return 3; - case JOpcode.cDUP_X2: case JOpcode.cDUP2: - return 4; - case JOpcode.cDUP2_X1: - return 5; - case JOpcode.cDUP2_X2: - return 6; - case JOpcode.cGETSTATIC: case JOpcode.cGETFIELD: { - JConstantPool.FieldOrMethodRefEntry entry = - (JConstantPool.FieldOrMethodRefEntry) - pool.lookupEntry(codeArray.getU2(pc + 1)); - return JType.parseSignature(entry.getSignature()).getSize(); - } - case JOpcode.cWIDE : { - int op = codeArray.getU1(pc + 1); - if (op >= JOpcode.cILOAD && op <= JOpcode.cALOAD) { - JOpcode opcode2 = JOpcode.OPCODES[op]; - return JType.getTotalSize(opcode2.getProducedDataTypes()); - } else if (op >= JOpcode.cISTORE && op <= JOpcode.cASTORE) - return 0; - else return 0; // (IINC) - } - default : - throw new Error(opcode.toString()); - } - } else - return JType.getTotalSize(opcode.getProducedDataTypes()); - } - - /** - * Returns the total size of data words taken from the stack by the current - * instruction. - * @return The total size of data words taken from the stack by the current - * instruction. - */ - public int getConsumedDataSize() { - if (opcode.getConsumedDataTypes() != JOpcode.UNKNOWN_TYPE) - return JType.getTotalSize(opcode.getConsumedDataTypes()); - else { - switch (opcode.code) { - case JOpcode.cPOP: case JOpcode.cDUP: - return 1; - case JOpcode.cPOP2: case JOpcode.cSWAP: - case JOpcode.cDUP_X1: case JOpcode.cDUP2: - return 2; - case JOpcode.cDUP_X2: case JOpcode.cDUP2_X1: - return 3; - case JOpcode.cDUP2_X2: - return 4; - case JOpcode.cPUTSTATIC: case JOpcode.cPUTFIELD: { - JConstantPool.FieldOrMethodRefEntry entry = - (JConstantPool.FieldOrMethodRefEntry) - pool.lookupEntry(codeArray.getU2(pc + 1)); - return JType.parseSignature(entry.getSignature()).getSize(); - } - case JOpcode.cINVOKEVIRTUAL: case JOpcode.cINVOKESPECIAL: - case JOpcode.cINVOKESTATIC: case JOpcode.cINVOKEINTERFACE : { - JConstantPool.FieldOrMethodRefEntry entry = - (JConstantPool.FieldOrMethodRefEntry) - pool.lookupEntry(codeArray.getU2(pc + 1)); - JMethodType tp = (JMethodType) - JType.parseSignature(entry.getSignature()); - return tp.getArgsSize() - + (opcode == JOpcode.INVOKESTATIC ? 0 : 1); - } - case JOpcode.cWIDE : { - int op = codeArray.getU1(pc + 1); - if (op >= JOpcode.cILOAD && op <= JOpcode.cALOAD) - return 0; - else if (op >= JOpcode.cISTORE && op <= JOpcode.cASTORE) { - JOpcode opcode2 = JOpcode.OPCODES[op]; - return JType.getTotalSize(opcode2.getConsumedDataTypes()); - } else - return 0; // (IINC) - } - case JOpcode.cMULTIANEWARRAY : - return codeArray.getU1(pc + 3); - default: - throw new Error(opcode.toString()); - } - } - } - - /** - * Returns the number of data types put on the stack by the current - * instruction. - * @return The number of data types put on the stack by the current - * instruction. - */ - public int getProducedDataTypesNumber() { - if (opcode.getProducedDataTypes() != JOpcode.UNKNOWN_TYPE) - return opcode.getProducedDataTypes().length; - else { - switch (opcode.code) { - case JOpcode.cLDC: case JOpcode.cLDC_W: case JOpcode.cLDC2_W: - case JOpcode.cBALOAD: case JOpcode.cGETSTATIC: - case JOpcode.cGETFIELD: - return 1; - case JOpcode.cDUP: case JOpcode.cSWAP: - return 2; - case JOpcode.cDUP_X1: - return 3; - case JOpcode.cWIDE: { - int op = codeArray.getU1(pc + 1); - if (op >= JOpcode.cILOAD && op <= JOpcode.cALOAD) - return 1; - else if (op >= JOpcode.cISTORE && op <= JOpcode.cASTORE) - return 0; - else - return 0; // (IINC) - } - default: - throw new Error("JOpcode implementation error"); - } - } - } - - /** - * Returns the number of data types taken from the stack by the current - * instruction. - * @return The number of data types taken from the stack by the current - * instruction. - */ -// public int getConsumedDataTypesNumber() { -// if (opcode.getConsumedDataTypes() == JOpcode.UNKNOWN_TYPE) { -// switch (opcode.code) { -// case 87 : return 1; // POP -// case 88 : return 2; // POP2 -// case 89 : return 1; // DUP -// case 90 : return 2; // DUP_X1 -// case 91 : // DUP_X2 -// case 92 : // DUP2 -// case 93 : // DUP2_X1 -// case 94 : // DUP2_X2 -// throw new UnsupportedOperationException("Opcode " + opcode.name -// + " has a stack-dependant" -// + " data types consumption"); -// case 95 : return 2; // SWAP -// case 179 : return 1; // PUTSTATIC -// case 181 : return 1; // PUTFIELD -// case 182 : // INVOKEVIRTUAL -// case 183 : // INVOKESPECIAL -// case 185 : // INVOKEINTERFACE -// s = epool.getClassMethodRef(codeArray.getU2(pc + 1)).split(" ")[3]; -// return ((JMethodType)JType.parseSignature(s)).argTypes.length + 1; -// case 184 : // INVOKESTATIC -// s = epool.getClassMethodRef(codeArray.getU2(pc + 1)).split(" ")[3]; -// return ((JMethodType)JType.parseSignature(s)).argTypes.length; -// case 196 : // WIDE -// int op = codeArray.getU1(pc + 1); -// if (op >= 21 && op <= 25) return 0; // (xLOAD) -// else if (op >= 54 && op <= 58) // (xSTORE) -// return JOpcode.OPCODES[op].getConsumedDataTypes().length; -// else return 0; // (IINC) -// case 197 : return codeArray.getU1(pc + 3); // MULTIANEWARRAY -// default : throw new Error("JOpcode implementation error"); -// } -// } else return opcode.getConsumedDataTypes().length; -// } - - - // Return the number between 0 and 3 which, if added to the given - // value, would yield a multiple of 4. - protected int[] padding = { 0, 3, 2, 1 }; - protected int pad4(int value) { - return padding[value % 4]; - } -} diff --git a/src/fjbg/ch/epfl/lamp/fjbg/JConstantPool.java b/src/fjbg/ch/epfl/lamp/fjbg/JConstantPool.java deleted file mode 100644 index 9867e01b25..0000000000 --- a/src/fjbg/ch/epfl/lamp/fjbg/JConstantPool.java +++ /dev/null @@ -1,771 +0,0 @@ -/* FJBG -- Fast Java Bytecode Generator - * Copyright 2002-2013 LAMP/EPFL - * @author Michel Schinz - */ - -package ch.epfl.lamp.fjbg; - -import java.io.DataInputStream; -import java.io.DataOutputStream; -import java.io.IOException; -import java.util.HashMap; - -/** - * Constant pool, holding constants for a Java class file. - * - * @author Michel Schinz - * @version 2.0 - */ - -public class JConstantPool { - protected boolean frozen = false; - - protected HashMap/**/ entryToIndex = new HashMap(); - protected Entry[] indexToEntry; - protected int currIndex; - - public static final short CONSTANT_Utf8 = 1; - public static final short CONSTANT_Integer = 3; - public static final short CONSTANT_Float = 4; - public static final short CONSTANT_Long = 5; - public static final short CONSTANT_Double = 6; - public static final short CONSTANT_Class = 7; - public static final short CONSTANT_String = 8; - public static final short CONSTANT_Fieldref = 9; - public static final short CONSTANT_Methodref = 10; - public static final short CONSTANT_InterfaceMethodref = 11; - public static final short CONSTANT_NameAndType = 12; - - protected JConstantPool(FJBGContext context) { - indexToEntry = new Entry[8]; - currIndex = 1; - } - - protected JConstantPool(FJBGContext context, DataInputStream stream) - throws IOException { - int count = stream.readShort(); - indexToEntry = new EntryIndex[count]; - - currIndex = 1; - while (currIndex < count) { - EntryIndex e; - int tag = stream.readByte(); - - switch (tag) { - case CONSTANT_Utf8: - e = new Utf8Entry(stream); - // no duplicates - entryToIndex.put(e, new Integer(currIndex)); - break; - case CONSTANT_Integer: - e = new IntegerEntry(stream); - break; - case CONSTANT_Float: - e = new FloatEntry(stream); - break; - case CONSTANT_Long: - e = new LongEntry(stream); - break; - case CONSTANT_Double: - e = new DoubleEntry(stream); - break; - case CONSTANT_Class: - e = new DescriptorEntryIndex(stream); - break; - case CONSTANT_String: - e = new StringEntryIndex(stream); - break; - case CONSTANT_Fieldref: - case CONSTANT_Methodref: - case CONSTANT_InterfaceMethodref: - e = new FieldOrMethodRefEntryIndex(tag, stream); - break; - case CONSTANT_NameAndType: - e = new NameAndTypeEntryIndex(stream); - break; - default: - throw new IllegalArgumentException("unknown entry in pool: " + tag); - } - indexToEntry[currIndex] = e; - currIndex += e.getSize(); - } - } - - public void freeze() { frozen = true; } - - /** - * Returns a string representing the type of an entry - * knowing its tag - * @param tag The tag representing the type of the - * constant pool entry - */ - public String getEntryType(int tag) { - switch (tag) { - case CONSTANT_Utf8 : return "Utf8"; - case CONSTANT_Integer : return "Integer"; - case CONSTANT_Float : return "Float"; - case CONSTANT_Long : return "Long"; - case CONSTANT_Double : return "Double"; - case CONSTANT_Class : return "Class"; - case CONSTANT_String : return "String"; - case CONSTANT_Fieldref : return "Field"; - case CONSTANT_Methodref : return "Method"; - case CONSTANT_InterfaceMethodref : return "InterfaceMethod"; - case CONSTANT_NameAndType : return "NameAndType"; - default : throw new Error("invalid constant pool tag : " + tag); - } - } - - public int addClass(String className) { - return addDescriptor(className.replace('.', '/')); - } - - public int addDescriptor(JReferenceType type) { - return addDescriptor(type.getDescriptor()); - } - - protected int addDescriptor(String name) { - return addEntry(new DescriptorEntryValue(name)); - } - - public int addClassMethodRef(String className, - String methodName, - String signature) { - return addMethodRef(true, className, methodName, signature); - } - - public int addInterfaceMethodRef(String className, - String methodName, - String signature) { - return addMethodRef(false, className, methodName, signature); - } - - public int addMethodRef(boolean isClass, - String className, - String methodName, - String signature) { - return addEntry(new FieldOrMethodRefEntryValue(isClass - ? CONSTANT_Methodref - : CONSTANT_InterfaceMethodref, - className, - methodName, - signature)); - } - - public int addFieldRef(String className, - String fieldName, - String signature) { - return addEntry(new FieldOrMethodRefEntryValue(CONSTANT_Fieldref, - className, - fieldName, - signature)); - } - - public int addInteger(int value) { - return addEntry(new IntegerEntry(value)); - } - - public int addFloat(float value) { - return addEntry(new FloatEntry(value)); - } - - public int addLong(long value) { - return addEntry(new LongEntry(value)); - } - - public int addDouble(double value) { - return addEntry(new DoubleEntry(value)); - } - - public int addString(String value) { - return addEntry(new StringEntryValue(value)); - } - - public int addNameAndType(String name, String descriptor) { - return addEntry(new NameAndTypeEntryValue(name, descriptor)); - } - - public int addUtf8(String value) { - return addEntry(new Utf8Entry(value)); - } - - public int addUtf8(byte[] value) { - return addEntry(new Utf8Entry(value)); - } - - protected int addEntry(EntryValue e) { - assert !frozen; - Integer idx = (Integer)entryToIndex.get(e); - if (idx != null) - return idx.intValue(); - - e.addChildren(); - - int index = currIndex; - currIndex += e.getSize(); - - entryToIndex.put(e, new Integer(index)); - if (index >= indexToEntry.length) { - Entry[] newI2E = new Entry[indexToEntry.length * 2]; - System.arraycopy(indexToEntry, 0, newI2E, 0, indexToEntry.length); - indexToEntry = newI2E; - } - indexToEntry[index] = e; - return index; - } - - /// Lookup methods - ////////////////////////////////////////////////////////////////////// - - public Entry lookupEntry(int index) { - assert index > 0 && index < currIndex - : "invalid index: " + index; - assert indexToEntry[index] != null - : "invalid index (null contents): " + index; - return indexToEntry[index]; - } - - public String lookupClass(int index) { - DescriptorEntry entry = (DescriptorEntry)lookupEntry(index); - return entry.getValue(); - } - - public String lookupNameAndType(int index) { - NameAndTypeEntry entry = (NameAndTypeEntry)lookupEntry(index); - return entry.getName()+":"+entry.getDescriptor(); - } - - public String lookupUtf8(int index) { - Utf8Entry entry = (Utf8Entry)lookupEntry(index); - return entry.getValue(); - } - - /// Output - ////////////////////////////////////////////////////////////////////// - - public void writeTo(DataOutputStream stream) throws IOException { - if (! frozen) freeze(); - - stream.writeShort(currIndex); - for (int i = 0; i < currIndex; ++i) { - Entry entry = indexToEntry[i]; - if (entry != null) { - stream.writeByte(entry.getTag()); - entry.writeContentsTo(stream); - } - } - } - - // Follows javap output format for constant pool. - /*@Override*/ public String toString() { - StringBuffer buf = new StringBuffer(" Constant pool:"); - for (int i = 0; i < currIndex; ++i) { - Entry entry = indexToEntry[i]; - if (entry != null) { - if (i > 0) buf.append("\n"); - buf.append("const #"); - buf.append(i); - buf.append(" = "); - buf.append(entry); - } - } - buf.append("\n"); - return buf.toString(); - } - - /// Classes for the various kinds of entries - ////////////////////////////////////////////////////////////////////// - - public interface Entry { - public int getTag(); - - int getSize(); - void writeContentsTo(DataOutputStream stream) throws IOException; - String toComment(String ownerClassName); - } - - protected interface EntryValue extends Entry { - abstract void addChildren(); - } - - protected interface EntryIndex extends Entry { - abstract void fetchChildren(); - } - - abstract protected class ChildlessEntry implements EntryValue, EntryIndex { - public void addChildren() {} - public void fetchChildren() {} - } - - public class IntegerEntry extends ChildlessEntry implements Entry { - private final int value; - public IntegerEntry(int value) { this.value = value; } - public IntegerEntry(DataInputStream stream) throws IOException { - this(stream.readInt()); - } - - public int hashCode() { return value; } - public boolean equals(Object o) { - return o instanceof IntegerEntry && ((IntegerEntry)o).value == value; - } - - public int getTag() { return CONSTANT_Integer; } - public int getValue() { return value; } - - public int getSize() { return 1; } - public void writeContentsTo(DataOutputStream stream) throws IOException { - stream.writeInt(value); - } - /*@Override*/ public String toString() { - StringBuffer buf = new StringBuffer("int\t"); - buf.append(getValue()); - buf.append(";"); - return buf.toString(); - } - public String toComment(String ownerClassname) { - return "//int "+getValue(); - } - } - - public class FloatEntry extends ChildlessEntry implements Entry { - private final float value; - public FloatEntry(float value) { this.value = value; } - public FloatEntry(DataInputStream stream) throws IOException { - this(stream.readFloat()); - } - - public int hashCode() { return (int)value; } - public boolean equals(Object o) { - return o instanceof FloatEntry && ((FloatEntry)o).value == value; - } - - public int getTag() { return CONSTANT_Float; } - public float getValue() { return value; } - - public int getSize() { return 1; } - public void writeContentsTo(DataOutputStream stream) throws IOException { - stream.writeFloat(value); - } - /*@Override*/ public String toString() { - StringBuffer buf = new StringBuffer("float\t"); - buf.append(getValue()); - buf.append("f"); - return buf.toString(); - } - public String toComment(String ownerClassname) { - return "//float "+getValue()+"f"; - } - } - - public class LongEntry extends ChildlessEntry implements Entry { - private final long value; - public LongEntry(long value) { this.value = value; } - public LongEntry(DataInputStream stream) throws IOException { - this(stream.readLong()); - } - - public int hashCode() { return (int)value; } - public boolean equals(Object o) { - return o instanceof LongEntry && ((LongEntry)o).value == value; - } - - public int getTag() { return CONSTANT_Long; } - public long getValue() { return value; } - - public int getSize() { return 2; } - public void writeContentsTo(DataOutputStream stream) throws IOException { - stream.writeLong(value); - } - /*@Override*/ public String toString() { - StringBuffer buf = new StringBuffer("long\t"); - buf.append(getValue()); - buf.append("l;"); - return buf.toString(); - } - public String toComment(String ownerClassname) { - return "//long "+getValue()+"l"; - } - } - - public class DoubleEntry extends ChildlessEntry implements Entry { - private final double value; - public DoubleEntry(double value) { this.value = value; } - public DoubleEntry(DataInputStream stream) throws IOException { - this(stream.readDouble()); - } - - public int hashCode() { return (int)value; } - public boolean equals(Object o) { - return o instanceof DoubleEntry && ((DoubleEntry)o).value == value; - } - - public int getTag() { return CONSTANT_Double; } - public double getValue() { return value; } - - public int getSize() { return 2; } - public void writeContentsTo(DataOutputStream stream) throws IOException { - stream.writeDouble(value); - } - /*@Override*/ public String toString() { - StringBuffer buf = new StringBuffer("double\t"); - buf.append(getValue()); - return buf.toString(); - } - public String toComment(String ownerClassname) { - return "//double "+getValue(); - } - } - - public class Utf8Entry extends ChildlessEntry implements Entry { - private final String value; - private final byte[] bytes; - public Utf8Entry(String value) { - this.value = value.intern(); - this.bytes = null; - } - public Utf8Entry(DataInputStream stream) throws IOException { - this(stream.readUTF()); - } - public Utf8Entry(byte[] bytes) { - this.bytes = bytes; - this.value = null; - } - - public int hashCode() { - if (bytes != null) return bytes.hashCode(); - return value.hashCode(); - } - public boolean equals(Object o) { - boolean isEqual = o instanceof Utf8Entry; - if (bytes != null) { - isEqual = isEqual && ((Utf8Entry)o).bytes == bytes; - } - else { - isEqual = isEqual && ((Utf8Entry)o).value == value; - } - return isEqual; - } - - public int getTag() { return CONSTANT_Utf8; } - public String getValue() { return value; } - public byte[] getBytes() { return bytes; } - - public int getSize() { return 1; } - public void writeContentsTo(DataOutputStream stream) throws IOException { - if (bytes != null) { - if (bytes.length > 65535) { - throw new IOException("String literal of length " + bytes.length + " does not fit in Classfile"); - } - stream.writeShort(bytes.length); - stream.write(bytes); - } - else - stream.writeUTF(value); - } - // Follows javap output format for Utf8 pool entries. - public String toString() { return "Asciz\t"+escaped(getValue())+";"; } - public String toComment(String ownerClassname) { - return "//Asciz "+escaped(getValue()); - } - private String escaped(String s) { - return s.replace("\n", "\\n"); - } - } - - abstract public class StringEntry implements Entry { - protected String value; - protected int valueIndex; - - public int hashCode() { - assert value != null; - return value.hashCode(); - } - public boolean equals(Object o) { - return o instanceof StringEntry && ((StringEntry)o).value == value; - } - - public int getTag() { return CONSTANT_String; } - public String getValue() { return value; } - - public int getSize() { return 1; } - public void writeContentsTo(DataOutputStream stream) throws IOException { - stream.writeShort(valueIndex); - } - // Follows javap output format for String pool entries. - public String toString() { - return "String\t#"+valueIndex+";\t// "+escaped(getValue()); - } - public String toComment(String ownerClassname) { - return "//String "+escaped(getValue()); - } - private String escaped(String s) { - return s.replace("\n", "\\n"); - } - } - - public class StringEntryValue extends StringEntry implements EntryValue { - public StringEntryValue(String value) { - this.value = value.intern(); - } - public void addChildren() { - valueIndex = addUtf8(value); - } - } - - public class StringEntryIndex extends StringEntry implements EntryIndex { - public StringEntryIndex(int valueIndex) { - this.valueIndex = valueIndex; - } - public StringEntryIndex(DataInputStream stream) throws IOException { - this(stream.readShort()); - } - public String getValue() { - if (value == null) fetchChildren(); - return super.getValue(); - } - public void fetchChildren() { - value = lookupUtf8(valueIndex); - } - } - - abstract public class DescriptorEntry implements Entry { - protected String name; - protected int nameIndex; - - public int hashCode() { - assert name != null; - return name.hashCode(); - } - public boolean equals(Object o) { - return o instanceof DescriptorEntry && ((DescriptorEntry)o).name == name; - } - - public int getTag() { return CONSTANT_Class; } - public String getValue() { return name; } - - public int getSize() { return 1; } - public void writeContentsTo(DataOutputStream stream) throws IOException { - stream.writeShort(nameIndex); - } - // Follows javap output format for class pool entries. - public String toString() { - StringBuffer buf = new StringBuffer("class\t#"); - buf.append(nameIndex); - buf.append(";\t// "); - buf.append(getClassName()); - return buf.toString(); - } - public String toComment(String ownerClassname) { - return "//class "+getClassName(); - } - private String getClassName() { - StringBuffer buf = new StringBuffer(); - String value = getValue(); - if (value.startsWith("[")) buf.append("\""); - buf.append(value); - if (value.startsWith("[")) buf.append("\""); - return buf.toString(); - } - } - - protected class DescriptorEntryValue - extends DescriptorEntry - implements EntryValue { - public DescriptorEntryValue(String name) { this.name = name.intern(); } - public void addChildren() { - nameIndex = addUtf8(name); - } - } - - protected class DescriptorEntryIndex - extends DescriptorEntry - implements EntryIndex { - public DescriptorEntryIndex(int nameIndex) { this.nameIndex = nameIndex; } - public DescriptorEntryIndex(DataInputStream stream) throws IOException { - this(stream.readShort()); - } - public String getValue() { - if (name == null) fetchChildren(); - return super.getValue(); - } - public void fetchChildren() { - name = lookupUtf8(nameIndex); - } - } - - abstract public class FieldOrMethodRefEntry implements Entry { - private final int tag; - protected String className, thingName, signature; - protected int classIndex, nameAndTypeIndex; - - public FieldOrMethodRefEntry(int tag) { - assert tag == CONSTANT_Fieldref - || tag == CONSTANT_Methodref - || tag == CONSTANT_InterfaceMethodref; - - this.tag = tag; - } - - public int hashCode() { - return tag - + className.hashCode() - + thingName.hashCode() - + signature.hashCode(); - } - public boolean equals(Object o) { - return o instanceof FieldOrMethodRefEntry - && ((FieldOrMethodRefEntry)o).tag == tag - && ((FieldOrMethodRefEntry)o).className == className - && ((FieldOrMethodRefEntry)o).thingName == thingName - && ((FieldOrMethodRefEntry)o).signature == signature; - } - - public int getTag() { return tag; } - public String getClassName() { return className; } - public String getFieldOrMethodName() { return thingName; } - public String getSignature() { return signature; } - - public int getSize() { return 1; } - public void writeContentsTo(DataOutputStream stream) throws IOException { - stream.writeShort(classIndex); - stream.writeShort(nameAndTypeIndex); - } - // Follows javap output format for field/method pool entries. - public String toString() { - return getEntryType(tag)+"\t#"+classIndex+".#"+nameAndTypeIndex+ - ";\t// "+getName("")+":"+signature; - } - public String toComment(String ownerClassName) { - return "//"+getEntryType(tag)+" "+getName(ownerClassName)+":"+signature; - } - private String getName(String ownerClassName) { - String name = getFieldOrMethodName(); - if (JMethod.INSTANCE_CONSTRUCTOR_NAME.equals(name)) - name = "\""+name+"\""; - if (!getClassName().equals(ownerClassName)) - name = getClassName()+"."+name; - return name; - } - } - - protected class FieldOrMethodRefEntryValue - extends FieldOrMethodRefEntry - implements EntryValue { - public FieldOrMethodRefEntryValue(int tag, - String className, - String thingName, - String signature) { - super(tag); - this.className = className.intern(); - this.thingName = thingName.intern(); - this.signature = signature.intern(); - } - - public void addChildren() { - classIndex = addClass(className); - nameAndTypeIndex = addNameAndType(thingName, signature); - } - } - - protected class FieldOrMethodRefEntryIndex - extends FieldOrMethodRefEntry - implements EntryIndex { - public FieldOrMethodRefEntryIndex(int tag, - int classIndex, - int nameAndTypeIndex) { - super(tag); - this.classIndex = classIndex; - this.nameAndTypeIndex = nameAndTypeIndex; - } - public FieldOrMethodRefEntryIndex(int tag, DataInputStream stream) - throws IOException { - this(tag, stream.readShort(), stream.readShort()); - } - public String getClassName() { - if (className == null) fetchChildren(); - return super.getClassName(); - } - public String getFieldOrMethodName() { - if (thingName == null) fetchChildren(); - return super.getFieldOrMethodName(); - } - public String getSignature() { - if (signature == null) fetchChildren(); - return super.getSignature(); - } - public void fetchChildren() { - className = lookupClass(classIndex); - NameAndTypeEntry nat = (NameAndTypeEntry)lookupEntry(nameAndTypeIndex); - thingName = nat.getName(); - signature = nat.getDescriptor(); - } - } - - abstract public class NameAndTypeEntry implements Entry { - protected String name, descriptor; - protected int nameIndex, descriptorIndex; - - public int hashCode() { return name.hashCode() + descriptor.hashCode(); } - public boolean equals(Object o) { - return o instanceof NameAndTypeEntry - && ((NameAndTypeEntry)o).name == name - && ((NameAndTypeEntry)o).descriptor == descriptor; - } - - public int getTag() { return CONSTANT_NameAndType; } - public String getName() { return name; } - public String getDescriptor() { return descriptor; } - - public int getSize() { return 1; } - public void writeContentsTo(DataOutputStream stream) throws IOException { - stream.writeShort(nameIndex); - stream.writeShort(descriptorIndex); - } - // Follows javap output format for name/type pool entries. - public String toString() { - String natName = getName(); - if (JMethod.INSTANCE_CONSTRUCTOR_NAME.equals(natName)) - natName = "\""+natName+"\""; - return "NameAndType\t#"+nameIndex+":#"+descriptorIndex+ - ";// "+natName+":"+getDescriptor(); - } - public String toComment(String ownerClassname) { return ""; } - } - - protected class NameAndTypeEntryValue - extends NameAndTypeEntry - implements EntryValue { - public NameAndTypeEntryValue(String name, String descriptor) { - this.name = name.intern(); - this.descriptor = descriptor.intern(); - } - public void addChildren() { - nameIndex = addUtf8(name); - descriptorIndex = addUtf8(descriptor); - } - } - - protected class NameAndTypeEntryIndex - extends NameAndTypeEntry - implements EntryIndex { - public NameAndTypeEntryIndex(int nameIndex, int descriptorIndex) { - this.nameIndex = nameIndex; - this.descriptorIndex = descriptorIndex; - } - public NameAndTypeEntryIndex(DataInputStream stream) throws IOException { - this(stream.readShort(), stream.readShort()); - } - public String getName() { - if (name == null) fetchChildren(); - return super.getName(); - } - public String getDescriptor() { - if (descriptor == null) fetchChildren(); - return super.getDescriptor(); - } - public void fetchChildren() { - name = lookupUtf8(nameIndex); - descriptor = lookupUtf8(descriptorIndex); - } - } -} diff --git a/src/fjbg/ch/epfl/lamp/fjbg/JConstantValueAttribute.java b/src/fjbg/ch/epfl/lamp/fjbg/JConstantValueAttribute.java deleted file mode 100644 index 6ee05e43c7..0000000000 --- a/src/fjbg/ch/epfl/lamp/fjbg/JConstantValueAttribute.java +++ /dev/null @@ -1,69 +0,0 @@ -/* FJBG -- Fast Java Bytecode Generator - * Copyright 2002-2013 LAMP/EPFL - * @author Michel Schinz - */ - -package ch.epfl.lamp.fjbg; - -import java.io.DataInputStream; -import java.io.DataOutputStream; -import java.io.IOException; - -/** - * ConstantValue attribute representing the value of a constant field. - * - * There can be no more than one ConstantValue attribute in the attributes - * table of a given field_info structure.. See section 4.8.2 of the JVM - * specification. - * - * @author Stephane Micheloud - * @version 1.0 - */ - -public class JConstantValueAttribute extends JAttribute { - /** Constant pool of the current classfile. */ - private JConstantPool pool; - - protected int constantValueIndex; - - public JConstantValueAttribute(FJBGContext context, - JClass clazz, - JField field) { - super(context, clazz); - this.pool = clazz.pool; - - assert field.getOwner() == clazz; - } - - public JConstantValueAttribute(FJBGContext context, - JClass clazz, - Object owner, // JField - String name, - int size, - DataInputStream stream) - throws IOException { - super(context, clazz, name); - this.pool = clazz.pool; - - this.constantValueIndex = stream.readShort(); - - assert name.equals(getName()); - } - - public String getName() { return "ConstantValue"; } - - // Follows javap output format for ConstantValue attribute. - /*@Override*/ public String toString() { - StringBuffer buf = new StringBuffer(" Constant value: "); - buf.append(pool.lookupEntry(constantValueIndex)); - return buf.toString(); - } - - protected int getSize() { - return 2; // Short.SIZE - } - - protected void writeContentsTo(DataOutputStream stream) throws IOException { - stream.writeShort(constantValueIndex); - } -} diff --git a/src/fjbg/ch/epfl/lamp/fjbg/JEnclosingMethodAttribute.java b/src/fjbg/ch/epfl/lamp/fjbg/JEnclosingMethodAttribute.java deleted file mode 100644 index f663f00ae1..0000000000 --- a/src/fjbg/ch/epfl/lamp/fjbg/JEnclosingMethodAttribute.java +++ /dev/null @@ -1,83 +0,0 @@ -/* FJBG -- Fast Java Bytecode Generator - * Copyright 2002-2013 LAMP/EPFL - * @author Michel Schinz - */ - -package ch.epfl.lamp.fjbg; - -import java.io.DataInputStream; -import java.io.DataOutputStream; -import java.io.IOException; - -/** - * EclosingMethod attribute - - * A class must have an EnclosingMethod attribute if and only if it is a - * local class or an anonymous class. A class may have no more than one - * EnclosingMethod attribute. See section 4.8.6 of the JVM specification. - * - * @author Michel Schinz - * @version 1.0 - */ - -public class JEnclosingMethodAttribute extends JAttribute { - /** Constant pool of the current classfile. */ - private JConstantPool pool; - - protected final int classIdx; - protected final int nameAndTypeIdx; - - public JEnclosingMethodAttribute(FJBGContext context, - JClass clazz, - String className, - String methodName, - JType methodType) { - super(context, clazz); - this.pool = clazz.pool; - - this.classIdx = pool.addClass(className); - this.nameAndTypeIdx = pool.addNameAndType(methodName, methodType.getSignature()); - } - - public JEnclosingMethodAttribute(FJBGContext context, - JClass clazz, - Object owner, - String name, - int size, - DataInputStream stream) - throws IOException { - super(context, clazz, name); - this.pool = clazz.pool; - - this.classIdx = stream.readShort(); - this.nameAndTypeIdx = stream.readShort(); - - assert name.equals(getName()); - } - - public String getName() { return "EnclosingMethod"; } - - // Follows javap output format for EnclosingMethod attribute. - /*@Override*/ public String toString() { - StringBuffer buf = new StringBuffer(" EnclosingMethod:"); - buf.append("\n #"); - buf.append(classIdx); - if (nameAndTypeIdx != 0) { - buf.append(" of #"); - buf.append(nameAndTypeIdx); - } - buf.append(";\t// "); - buf.append(pool.lookupEntry(classIdx)); - buf.append("\n"); - return buf.toString(); - } - - protected int getSize() { - return 4; // 2 * Short.SIZE - } - - protected void writeContentsTo(DataOutputStream stream) throws IOException { - stream.writeShort(classIdx); - stream.writeShort(nameAndTypeIdx); - } -} diff --git a/src/fjbg/ch/epfl/lamp/fjbg/JExceptionsAttribute.java b/src/fjbg/ch/epfl/lamp/fjbg/JExceptionsAttribute.java deleted file mode 100644 index b91d0f2e93..0000000000 --- a/src/fjbg/ch/epfl/lamp/fjbg/JExceptionsAttribute.java +++ /dev/null @@ -1,90 +0,0 @@ -/* FJBG -- Fast Java Bytecode Generator - * Copyright 2002-2013 LAMP/EPFL - * @author Michel Schinz - */ - -package ch.epfl.lamp.fjbg; - -import java.io.DataInputStream; -import java.io.DataOutputStream; -import java.io.IOException; - -/** - * Exceptions attribute - - * This table is used by compilers to indicate which Exceptions a method - * is declared to throw. See section 2.6.4 of the JVM specification. - * - * @author Stephane Micheloud - * @version 1.0 - */ - -public class JExceptionsAttribute extends JAttribute { - /** Constant pool of the current classfile. */ - private JConstantPool pool; - - protected int[] indexTable; - protected int count; - - public JExceptionsAttribute(FJBGContext context, - JClass clazz, - JMethod owner) { - super(context, clazz); - this.pool = clazz.pool; - - this.count = 0; - this.indexTable = new int[8]; // some size > count - - assert clazz == owner.getOwner(); - } - - public JExceptionsAttribute(FJBGContext context, - JClass clazz, - Object owner, //JMethod - String name, - int size, - DataInputStream stream) - throws IOException { - super(context, clazz, name); - this.pool = clazz.pool; - - this.count = stream.readShort(); - this.indexTable = new int[count]; - for (int i = 0; i < count; ++i) - indexTable[i] = stream.readShort(); - - assert name.equals(getName()); - } - - public void addEntry(int classIndex) { - if (count >= indexTable.length) { - int[] newIT = new int[indexTable.length * 2]; - System.arraycopy(indexTable, 0, newIT, 0, indexTable.length); - indexTable = newIT; - } - indexTable[count++] = classIndex; - } - - public String getName() { return "Exceptions"; } - - // Follows javap output format for Exceptions attribute. - /*@Override*/ public String toString() { - StringBuffer buf = new StringBuffer(" Exceptions: "); - for (int i = 0; i < indexTable.length; ++i) { - buf.append("\n throws "); - buf.append(JClass.toExternalName(pool.lookupClass(indexTable[i]))); - } - buf.append("\n"); - return buf.toString(); - } - - protected int getSize() { - return 2 + indexTable.length * 2; - } - - protected void writeContentsTo(DataOutputStream stream) throws IOException { - stream.writeShort(count); - for (int i = 0; i < count; ++i) - stream.writeShort(indexTable[i]); - } -} diff --git a/src/fjbg/ch/epfl/lamp/fjbg/JExtendedCode.java b/src/fjbg/ch/epfl/lamp/fjbg/JExtendedCode.java deleted file mode 100644 index d82db8289f..0000000000 --- a/src/fjbg/ch/epfl/lamp/fjbg/JExtendedCode.java +++ /dev/null @@ -1,667 +0,0 @@ -/* FJBG -- Fast Java Bytecode Generator - * Copyright 2002-2013 LAMP/EPFL - * @author Michel Schinz - */ - -package ch.epfl.lamp.fjbg; - -/** - * Extended list of instructions, providing pseudo-instructions which - * are easier to use than the standard ones. - * - * @author Michel Schinz, Thomas Friedli - * @version 1.0 - */ - -public class JExtendedCode extends JCode { - public final static int COND_EQ = 0; - public final static int COND_NE = 1; - public final static int COND_LT = 2; - public final static int COND_GE = 3; - public final static int COND_GT = 4; - public final static int COND_LE = 5; - - private final JOpcode[] forbidden = new JOpcode[0]; - private final JOpcode[] nothingToDo = new JOpcode[0]; - - private final JOpcode[][][] typeConversions = { - { - /* T_BOOLEAN -> T_BOOLEAN */ nothingToDo, - /* T_BOOLEAN -> T_CHAR */ forbidden, - /* T_BOOLEAN -> T_FLOAT */ forbidden, - /* T_BOOLEAN -> T_DOUBLE */ forbidden, - /* T_BOOLEAN -> T_BYTE */ forbidden, - /* T_BOOLEAN -> T_SHORT */ forbidden, - /* T_BOOLEAN -> T_INT */ forbidden, - /* T_BOOLEAN -> T_LONG */ forbidden - }, - { - /* T_CHAR -> T_BOOLEAN */ forbidden, - /* T_CHAR -> T_CHAR */ nothingToDo, - /* T_CHAR -> T_FLOAT */ {JOpcode.I2F}, - /* T_CHAR -> T_DOUBLE */ {JOpcode.I2D}, - /* T_CHAR -> T_BYTE */ {JOpcode.I2B}, - /* T_CHAR -> T_SHORT */ {JOpcode.I2S}, - /* T_CHAR -> T_INT */ nothingToDo, - /* T_CHAR -> T_LONG */ {JOpcode.I2L} - }, - { - /* T_FLOAT -> T_BOOLEAN */ forbidden, - /* T_FLOAT -> T_CHAR */ {JOpcode.F2I, JOpcode.I2C}, - /* T_FLOAT -> T_FLOAT */ nothingToDo, - /* T_FLOAT -> T_DOUBLE */ {JOpcode.F2D}, - /* T_FLOAT -> T_BYTE */ {JOpcode.F2I, JOpcode.I2B}, - /* T_FLOAT -> T_SHORT */ {JOpcode.F2I, JOpcode.I2S}, - /* T_FLOAT -> T_INT */ {JOpcode.F2I}, - /* T_FLOAT -> T_LONG */ {JOpcode.F2L} - }, - { - /* T_DOUBLE -> T_BOOLEAN */ forbidden, - /* T_DOUBLE -> T_CHAR */ {JOpcode.D2I, JOpcode.I2C}, - /* T_DOUBLE -> T_FLOAT */ {JOpcode.D2F}, - /* T_DOUBLE -> T_DOUBLE */ nothingToDo, - /* T_DOUBLE -> T_BYTE */ {JOpcode.D2I, JOpcode.I2B}, - /* T_DOUBLE -> T_SHORT */ {JOpcode.D2I, JOpcode.I2S}, - /* T_DOUBLE -> T_INT */ {JOpcode.D2I}, - /* T_DOUBLE -> T_LONG */ {JOpcode.D2L} - }, - { - /* T_BYTE -> T_BOOLEAN */ forbidden, - /* T_BYTE -> T_CHAR */ {JOpcode.I2C}, - /* T_BYTE -> T_FLOAT */ {JOpcode.I2F}, - /* T_BYTE -> T_DOUBLE */ {JOpcode.I2D}, - /* T_BYTE -> T_BYTE */ nothingToDo, - /* T_BYTE -> T_SHORT */ nothingToDo, - /* T_BYTE -> T_INT */ nothingToDo, - /* T_BYTE -> T_LONG */ {JOpcode.I2L} - }, - { - /* T_SHORT -> T_BOOLEAN */ forbidden, - /* T_SHORT -> T_CHAR */ {JOpcode.I2C}, - /* T_SHORT -> T_FLOAT */ {JOpcode.I2F}, - /* T_SHORT -> T_DOUBLE */ {JOpcode.I2D}, - /* T_SHORT -> T_BYTE */ {JOpcode.I2B}, - /* T_SHORT -> T_SHORT */ nothingToDo, - /* T_SHORT -> T_INT */ nothingToDo, - /* T_SHORT -> T_LONG */ {JOpcode.I2L} - }, - { - /* T_INT -> T_BOOLEAN */ forbidden, - /* T_INT -> T_CHAR */ {JOpcode.I2C}, - /* T_INT -> T_FLOAT */ {JOpcode.I2F}, - /* T_INT -> T_DOUBLE */ {JOpcode.I2D}, - /* T_INT -> T_BYTE */ {JOpcode.I2B}, - /* T_INT -> T_SHORT */ {JOpcode.I2S}, - /* T_INT -> T_INT */ nothingToDo, - /* T_INT -> T_LONG */ {JOpcode.I2L} - }, - { - /* T_LONG -> T_BOOLEAN */ forbidden, - /* T_LONG -> T_CHAR */ {JOpcode.L2I, JOpcode.I2C}, - /* T_LONG -> T_FLOAT */ {JOpcode.L2F}, - /* T_LONG -> T_DOUBLE */ {JOpcode.L2D}, - /* T_LONG -> T_BYTE */ {JOpcode.L2I, JOpcode.I2B}, - /* T_LONG -> T_SHORT */ {JOpcode.L2I, JOpcode.I2S}, - /* T_LONG -> T_INT */ {JOpcode.L2I}, - /* T_LONG -> T_LONG */ nothingToDo - } - }; - - public JExtendedCode(FJBGContext context, - JClass clazz, - JMethod owner) { - super(context, clazz, owner); - } - - public void emitPUSH(boolean value) { emitPUSH(value ? 1 : 0); } - public void emitPUSH(Boolean value) { emitPUSH(value.booleanValue()); } - - public void emitPUSH(byte value) { - switch (value) { - case -1: emitICONST_M1(); break; - case 0: emitICONST_0(); break; - case 1: emitICONST_1(); break; - case 2: emitICONST_2(); break; - case 3: emitICONST_3(); break; - case 4: emitICONST_4(); break; - case 5: emitICONST_5(); break; - default: - emitBIPUSH(value); - } - } - public void emitPUSH(Byte value) { emitPUSH(value.byteValue()); } - - public void emitPUSH(short value) { - switch (value) { - case -1: emitICONST_M1(); break; - case 0: emitICONST_0(); break; - case 1: emitICONST_1(); break; - case 2: emitICONST_2(); break; - case 3: emitICONST_3(); break; - case 4: emitICONST_4(); break; - case 5: emitICONST_5(); break; - default: - if (value >= Byte.MIN_VALUE && value <= Byte.MAX_VALUE) - emitBIPUSH((byte)value); - else - emitSIPUSH(value); - } - } - public void emitPUSH(Short value) { emitPUSH(value.shortValue()); } - - // TODO check that we do the right thing here - public void emitPUSH(char value) { emitPUSH((int)value); } - public void emitPUSH(Character value) { emitPUSH(value.charValue()); } - - public void emitPUSH(int value) { - switch (value) { - case -1: emitICONST_M1(); break; - case 0: emitICONST_0(); break; - case 1: emitICONST_1(); break; - case 2: emitICONST_2(); break; - case 3: emitICONST_3(); break; - case 4: emitICONST_4(); break; - case 5: emitICONST_5(); break; - default: - if (value >= Byte.MIN_VALUE && value <= Byte.MAX_VALUE) - emitBIPUSH((byte)value); - else if (value >= Short.MIN_VALUE && value <= Short.MAX_VALUE) - emitSIPUSH((short)value); - else - emitPUSH_index(pool.addInteger(value)); - break; - } - } - public void emitPUSH(Integer value) { emitPUSH(value.intValue()); } - - public void emitPUSH(long value) { - if (value == 0L) - emitLCONST_0(); - else if (value == 1L) - emitLCONST_1(); - else - emitLDC2_W(value); - } - public void emitPUSH(Long value) { emitPUSH(value.longValue()); } - - private static final Float ZEROF = Float.valueOf(0f); - private static final Float ONEF = Float.valueOf(1f); - private static final Float TWOF = Float.valueOf(2f); - public void emitPUSH(Float value) { - if (ZEROF.equals(value)) - emitFCONST_0(); - else if (ONEF.equals(value)) - emitFCONST_1(); - else if (TWOF.equals(value)) - emitFCONST_2(); - else - emitPUSH_index(pool.addFloat(value.floatValue())); - } - public void emitPUSH(float value) { emitPUSH(Float.valueOf(value)); } - - private static final Double ZEROD = Double.valueOf(0d); - private static final Double ONED = Double.valueOf(1d); - public void emitPUSH(Double value) { - if (ZEROD.equals(value)) - emitDCONST_0(); - else if (ONED.equals(value)) - emitDCONST_1(); - else - emitLDC2_W(value.doubleValue()); - } - public void emitPUSH(double value) { emitPUSH(Double.valueOf(value)); } - - public void emitPUSH(String s) { - emitPUSH_index(pool.addString(s)); - } - - /** Pushes a class literal on the stack */ - public void emitPUSH(JReferenceType type) { - assert owner.owner.major >= 49; - emitPUSH_index(pool.addClass(type.getDescriptor())); - } - - protected void emitPUSH_index(int index) { - if (index <= 0xFF) - emitU1(JOpcode.LDC, index); - else - emitU2(JOpcode.LDC_W, index); - } - - public void emitLOAD(int index, JType type) { - JOpcode opcode; - - switch (type.getTag()) { - case JType.T_BOOLEAN: case JType.T_BYTE: case JType.T_CHAR: - case JType.T_SHORT: case JType.T_INT: - switch (index) { - case 0: emitILOAD_0(); return; - case 1: emitILOAD_1(); return; - case 2: emitILOAD_2(); return; - case 3: emitILOAD_3(); return; - default: opcode = JOpcode.ILOAD; - } break; - case JType.T_FLOAT: - switch (index) { - case 0: emitFLOAD_0(); return; - case 1: emitFLOAD_1(); return; - case 2: emitFLOAD_2(); return; - case 3: emitFLOAD_3(); return; - default: opcode = JOpcode.FLOAD; - } break; - case JType.T_LONG: - switch (index) { - case 0: emitLLOAD_0(); return; - case 1: emitLLOAD_1(); return; - case 2: emitLLOAD_2(); return; - case 3: emitLLOAD_3(); return; - default: opcode = JOpcode.LLOAD; - } break; - case JType.T_DOUBLE: - switch (index) { - case 0: emitDLOAD_0(); return; - case 1: emitDLOAD_1(); return; - case 2: emitDLOAD_2(); return; - case 3: emitDLOAD_3(); return; - default: opcode = JOpcode.DLOAD; - } break; - case JType.T_ARRAY: case JType.T_OBJECT: - switch (index) { - case 0: emitALOAD_0(); return; - case 1: emitALOAD_1(); return; - case 2: emitALOAD_2(); return; - case 3: emitALOAD_3(); return; - default: opcode = JOpcode.ALOAD; - } break; - default: - throw new IllegalArgumentException("invalid type for load "+type); - } - - if (index > 0xFF) - emitWIDE(opcode, index); - else - emitU1(opcode, index); - } - public void emitLOAD(JLocalVariable var) { - emitLOAD(var.index, var.type); - } - - public void emitSTORE(int index, JType type) { - JOpcode opcode; - - switch (type.getTag()) { - case JType.T_BOOLEAN: case JType.T_BYTE: case JType.T_CHAR: - case JType.T_SHORT: case JType.T_INT: - switch (index) { - case 0: emitISTORE_0(); return; - case 1: emitISTORE_1(); return; - case 2: emitISTORE_2(); return; - case 3: emitISTORE_3(); return; - default: opcode = JOpcode.ISTORE; - } break; - case JType.T_FLOAT: - switch (index) { - case 0: emitFSTORE_0(); return; - case 1: emitFSTORE_1(); return; - case 2: emitFSTORE_2(); return; - case 3: emitFSTORE_3(); return; - default: opcode = JOpcode.FSTORE; - } break; - case JType.T_LONG: - switch (index) { - case 0: emitLSTORE_0(); return; - case 1: emitLSTORE_1(); return; - case 2: emitLSTORE_2(); return; - case 3: emitLSTORE_3(); return; - default: opcode = JOpcode.LSTORE; - } break; - case JType.T_DOUBLE: - switch (index) { - case 0: emitDSTORE_0(); return; - case 1: emitDSTORE_1(); return; - case 2: emitDSTORE_2(); return; - case 3: emitDSTORE_3(); return; - default: opcode = JOpcode.DSTORE; - } break; - case JType.T_ARRAY: case JType.T_OBJECT: case JType.T_ADDRESS: - switch (index) { - case 0: emitASTORE_0(); return; - case 1: emitASTORE_1(); return; - case 2: emitASTORE_2(); return; - case 3: emitASTORE_3(); return; - default: opcode = JOpcode.ASTORE; - } break; - default: - throw new IllegalArgumentException("invalid type for store "+type); - } - - if (index > 0xFF) - emitWIDE(opcode, index); - else - emitU1(opcode, index); - } - public void emitSTORE(JLocalVariable var) { - emitSTORE(var.index, var.type); - } - - public void emitALOAD(JType type) { - switch (type.getTag()) { - case JType.T_BOOLEAN: - case JType.T_BYTE: - emitBALOAD(); - break; - case JType.T_CHAR: - emitCALOAD(); - break; - case JType.T_SHORT: - emitSALOAD(); - break; - case JType.T_INT: - emitIALOAD(); - break; - case JType.T_FLOAT: - emitFALOAD(); - break; - case JType.T_LONG: - emitLALOAD(); - break; - case JType.T_DOUBLE: - emitDALOAD(); - break; - case JType.T_ARRAY: - case JType.T_OBJECT: - emitAALOAD(); - break; - default: - throw new IllegalArgumentException("invalid type for aload " + type); - } - } - - public void emitASTORE(JType type) { - switch (type.getTag()) { - case JType.T_BOOLEAN: - case JType.T_BYTE: - emitBASTORE(); - break; - case JType.T_CHAR: - emitCASTORE(); - break; - case JType.T_SHORT: - emitSASTORE(); - break; - case JType.T_INT: - emitIASTORE(); - break; - case JType.T_FLOAT: - emitFASTORE(); - break; - case JType.T_LONG: - emitLASTORE(); - break; - case JType.T_DOUBLE: - emitDASTORE(); - break; - case JType.T_ARRAY: - case JType.T_OBJECT: - emitAASTORE(); - break; - default: - throw new IllegalArgumentException("invalid type for astore " + type); - } - } - - public void emitRETURN(JType type) { - if (type.isValueType()) { - switch (type.getTag()) { - case JType.T_BOOLEAN: - case JType.T_BYTE: - case JType.T_CHAR: - case JType.T_SHORT: - case JType.T_INT: - emitIRETURN(); - break; - case JType.T_FLOAT: - emitFRETURN(); - break; - case JType.T_LONG: - emitLRETURN(); - break; - case JType.T_DOUBLE: - emitDRETURN(); - break; - } - } else if (type.isArrayType() || type.isObjectType()) - emitARETURN(); - else if (type == JType.VOID) - emitRETURN(); - else - throw new IllegalArgumentException("invalid type for RETURN " + type); - } - - public void emitADD(JType type) { - switch (type.getTag()) { - case JType.T_BOOLEAN: case JType.T_BYTE: case JType.T_CHAR: - case JType.T_SHORT: case JType.T_INT: - emitIADD(); break; - case JType.T_FLOAT: - emitFADD(); break; - case JType.T_LONG: - emitLADD(); break; - case JType.T_DOUBLE: - emitDADD(); break; - } - } - - /** - * Emits a basic type conversion instruction choosen according to the - * types given in parameter. - * - * @param fromType The type of the value to be cast into another type. - * @param toType The type the value will be cast into. - */ - public void emitT2T(JType fromType, JType toType) { - assert fromType.getTag() >= JType.T_BOOLEAN - && fromType.getTag() <= JType.T_LONG - && toType.getTag() >= JType.T_BOOLEAN - && toType.getTag() <= JType.T_LONG; - - JOpcode[] conv = typeConversions[fromType.getTag() - 4][toType.getTag() - 4]; - if (conv == forbidden) { - throw new Error("inconvertible types : " + fromType.toString() - + " -> " + toType.toString()); - } else if (conv != nothingToDo) { - for (int i = 0; i < conv.length; i++) { - emit(conv[i]); - } - } - } - - public void emitIF(int cond, Label label) throws OffsetTooBigException { - assert cond >= COND_EQ && cond <= COND_LE; - emitU2(JOpcode.OPCODES[153 + cond], label.getOffset16(getPC() + 1, getPC())); - } - public void emitIF(int cond, int targetPC) throws OffsetTooBigException { - int offset = targetPC - getPC(); - emitU2(JOpcode.OPCODES[153 + cond], offset); - } - public void emitIF(int cond) throws OffsetTooBigException { - emitIF(cond, 0); - } - - public void emitIF_ICMP(int cond, Label label) throws OffsetTooBigException { - assert cond >= COND_EQ && cond <= COND_LE; - emitU2(JOpcode.OPCODES[159 + cond], label.getOffset16(getPC() + 1, getPC())); - } - public void emitIF_ICMP(int cond, int targetPC) throws OffsetTooBigException { - int offset = targetPC - getPC(); - emitU2(JOpcode.OPCODES[159 + cond], offset); - } - public void emitIF_ICMP(int cond) throws OffsetTooBigException { - emitIF_ICMP(cond, 0); - } - - public void emitIF_ACMP(int cond, Label label) throws OffsetTooBigException { - assert cond == COND_EQ || cond == COND_NE; - emitU2(JOpcode.OPCODES[165 + cond], label.getOffset16(getPC() + 1, getPC())); - } - public void emitIF_ACMP(int cond, int targetPC) throws OffsetTooBigException { - int offset = targetPC - getPC(); - emitU2(JOpcode.OPCODES[165 + cond], offset); - } - public void emitIF_ACMP(int cond) throws OffsetTooBigException { - emitIF_ACMP(cond, 0); - } - - public void emitGOTO_maybe_W(Label label, boolean defaultToWide) { - if (label.anchored) - emitGOTO_maybe_W(label.targetPC); - else { - if (defaultToWide) - emitGOTO_W(label); - else { - try { - emitGOTO(label); - } catch (OffsetTooBigException e) { - throw new Error(e); - } - } - } - } - - public void emitGOTO_maybe_W(int targetPC) { - int offset = targetPC - (getPC() + 1); - if (offset < Short.MIN_VALUE || offset > Short.MAX_VALUE) - emitGOTO_W(targetPC); - else { - try { - emitGOTO(targetPC); - } catch (OffsetTooBigException e) { - throw new Error(e); - } - } - } - - /** - * Emits a switch instruction choosen according to the caracteristics - * of the given list of keys and a default maxRate. - * - * @param keySets The array of all keys that must be compared to the - * value on stack. - * @param branches The labels representing the jump addresses linked - * with the corresponding keys. - * @param defaultBranch The label representing the default branch - * address. - */ - public void emitSWITCH(int[][] keySets, - Label[] branches, - Label defaultBranch, - double minDensity) { - assert keySets.length == branches.length; - - int flatSize = 0; - for (int i = 0; i < keySets.length; ++i) - flatSize += keySets[i].length; - - int[] flatKeys = new int[flatSize]; - Label[] flatBranches = new Label[flatSize]; - int flatI = 0; - for (int i = 0; i < keySets.length; ++i) { - Label branch = branches[i]; - int[] keys = keySets[i]; - for (int j = 0; j < keys.length; ++j) { - flatKeys[flatI] = keys[j]; - flatBranches[flatI] = branch; - } - ++flatI; - } - assert flatI == flatSize; - emitSWITCH(flatKeys, flatBranches, defaultBranch, minDensity); - } - - /** - * Emits a switch instruction choosen according to the caracteristics - * of the given list of keys and a given maxRate. - * - * @param keys The array of all keys that must be compared to the - * value on stack. - * @param branches The labels representing the jump addresses linked - * with the corresponding keys. - * @param defaultBranch The label representing the default branch - * address. - * @param minDensity The minimum density to use for TABLESWITCH. - */ - public void emitSWITCH(int[] keys, - Label[] branches, - Label defaultBranch, - double minDensity) { - assert keys.length == branches.length; - - //The special case for empty keys. It makes sense to allow - //empty keys and generate LOOKUPSWITCH with defaultBranch - //only. This is exactly what javac does for switch statement - //that has only a default case. - if (keys.length == 0) { - emitLOOKUPSWITCH(keys, branches, defaultBranch); - return; - } - //the rest of the code assumes that keys.length > 0 - - // sorting the tables - // FIXME use quicksort - for (int i = 1; i < keys.length; i++) { - for (int j = 1; j <= keys.length - i; j++) { - if (keys[j] < keys[j - 1]) { - int tmp = keys[j]; - keys[j] = keys[j - 1]; - keys[j - 1] = tmp; - - Label tmp_l = branches[j]; - branches[j] = branches[j - 1]; - branches[j - 1] = tmp_l; - } - } - } - - int keyMin = keys[0], keyMax = keys[keys.length - 1]; - /** Calculate in long to guard against overflow. */ - long keyRange = (long)keyMax - keyMin + 1; - if ((double)keys.length / (double)keyRange >= minDensity) { - // Keys are dense enough, use a table in which holes are - // filled with defaultBranch. - int[] newKeys = new int[(int)keyRange]; - Label[] newBranches = new Label[(int)keyRange]; - int oldPos = 0; - for (int i = 0; i < keyRange; ++i) { - int key = keyMin + i; - newKeys[i] = key; - if (keys[oldPos] == key) { - newBranches[i] = branches[oldPos]; - ++oldPos; - } else - newBranches[i] = defaultBranch; - } - assert oldPos == keys.length; - emitTABLESWITCH(newKeys, newBranches, defaultBranch); - } else - emitLOOKUPSWITCH(keys, branches, defaultBranch); - } - - /** - * Emits a method invocation instruction choosen according to - * the caracteristics of the given method. - * - * @param method The method to be invoked. - */ - public void emitINVOKE(JMethod method) { - String mName = method.getName(); - String cName = method.getOwner().getName(); - JMethodType mType = (JMethodType)method.getType(); - if (method.isStatic()) - emitINVOKESTATIC(cName, mName, mType); - else if (method.getOwner().isInterface()) - emitINVOKEINTERFACE(cName, mName, mType); - else - emitINVOKEVIRTUAL(cName, mName, mType); - } - -} diff --git a/src/fjbg/ch/epfl/lamp/fjbg/JField.java b/src/fjbg/ch/epfl/lamp/fjbg/JField.java deleted file mode 100644 index 29d826ba99..0000000000 --- a/src/fjbg/ch/epfl/lamp/fjbg/JField.java +++ /dev/null @@ -1,62 +0,0 @@ -/* FJBG -- Fast Java Bytecode Generator - * Copyright 2002-2013 LAMP/EPFL - * @author Michel Schinz - */ - -package ch.epfl.lamp.fjbg; - -import java.io.DataInputStream; -import java.io.IOException; - -/** - * Java class field. - * - * @author Michel Schinz - * @version 1.0 - */ - -public class JField extends JFieldOrMethod { - - protected JField(FJBGContext context, - JClass owner, - int accessFlags, - String name, - JType type) { - super(context, owner, accessFlags, name, type); - } - - protected JField(FJBGContext context, - JClass owner, - DataInputStream stream) - throws IOException { - super(context, owner, stream); - } - - // Follows javap output format for fields. - /*@Override*/ public String toString() { - StringBuffer buf = new StringBuffer(flagsToString()); - buf.append(toExternalName(getType())); - buf.append(" "); - buf.append(getName()); - buf.append(";\n"); - java.util.Iterator attrsIt = attributes.iterator(); - while (attrsIt.hasNext()) { - JAttribute attrs = (JAttribute)attrsIt.next(); - buf.append(attrs); - } - return buf.toString(); - } - - private String flagsToString() { - StringBuffer buf = new StringBuffer(); - if (isPublic()) buf.append("public "); - else if (isProtected()) buf.append("protected "); - else if (isPrivate()) buf.append("private "); - if (isStatic()) buf.append("static "); - else if (isTransient()) buf.append("transient "); - else if (isVolatile()) buf.append("volatile "); - if (isAbstract()) buf.append("abstract "); - else if (isFinal()) buf.append("final "); - return buf.toString(); - } -} diff --git a/src/fjbg/ch/epfl/lamp/fjbg/JFieldOrMethod.java b/src/fjbg/ch/epfl/lamp/fjbg/JFieldOrMethod.java deleted file mode 100644 index 794c0f13b5..0000000000 --- a/src/fjbg/ch/epfl/lamp/fjbg/JFieldOrMethod.java +++ /dev/null @@ -1,138 +0,0 @@ -/* FJBG -- Fast Java Bytecode Generator - * Copyright 2002-2013 LAMP/EPFL - * @author Michel Schinz - */ - -package ch.epfl.lamp.fjbg; - -import java.io.DataInputStream; -import java.io.DataOutputStream; -import java.io.IOException; - -/** - * Abstract superclass for a Java field or method. - * - * No two methods of fields in one class file may have the same name and - * descriptor. See sections 4.6 and 4.7 of the JVM specification. - * - * @author Michel Schinz - * @version 1.0 - */ - -abstract public class JFieldOrMethod extends JMember { - - protected final JClass owner; - protected final JType type; - - protected final int nameIndex, signatureIndex; - - protected JFieldOrMethod(FJBGContext context, - JClass owner, - int accessFlags, - String name, - JType type) { - super(context, accessFlags, name); - this.owner = owner; - this.type = type; - - nameIndex = owner.pool.addUtf8(name); - signatureIndex = owner.pool.addUtf8(type.getSignature()); - } - - protected JFieldOrMethod(FJBGContext context, - JClass owner, - DataInputStream stream) - throws IOException { - super(context); - this.owner = owner; - this.accessFlags = stream.readShort(); - this.nameIndex = stream.readShort(); - this.name = owner.pool.lookupUtf8(nameIndex); - this.signatureIndex = stream.readShort(); - this.type = JType.parseSignature(owner.pool.lookupUtf8(signatureIndex)); - this.attributes.addAll(JAttribute.readFrom(context, owner, this, stream)); - } - - public void freeze() throws JCode.OffsetTooBigException { - assert !frozen; - frozen = true; - } - - public JClass getOwner() { return owner; } - - public JType getType() { return type; } - - public JClass getJClass() { return owner; } - - public boolean isPublic() { - return (accessFlags & JAccessFlags.ACC_PUBLIC) != 0; - } - - public boolean isPrivate() { - return (accessFlags & JAccessFlags.ACC_PRIVATE) != 0; - } - - public boolean isProtected() { - return (accessFlags & JAccessFlags.ACC_PROTECTED) != 0; - } - - public boolean isStatic() { - return (accessFlags & JAccessFlags.ACC_STATIC) != 0; - } - - public boolean isFinal() { - return (accessFlags & JAccessFlags.ACC_FINAL) != 0; - } - - public boolean isSuper() { - return (accessFlags & JAccessFlags.ACC_SUPER) != 0; - } - - public boolean isVolatile() { - return (accessFlags & JAccessFlags.ACC_VOLATILE) != 0; - } - - public boolean isTransient() { - return (accessFlags & JAccessFlags.ACC_TRANSIENT) != 0; - } - - public boolean isNative() { - return (accessFlags & JAccessFlags.ACC_NATIVE) != 0; - } - - public boolean isInterface() { - return (accessFlags & JAccessFlags.ACC_INTERFACE) != 0; - } - - public boolean isAbstract() { - return (accessFlags & JAccessFlags.ACC_ABSTRACT) != 0; - } - - public boolean isStrict() { - return (accessFlags & JAccessFlags.ACC_STRICT) != 0; - } - - // 1.5 specifics - public boolean isBridge() { - return (accessFlags & JAccessFlags.ACC_BRIDGE) != 0; - } - - public boolean hasVarargs() { - return (accessFlags & JAccessFlags.ACC_VARARGS) != 0; - } - - public void writeTo(DataOutputStream stream) throws IOException { - if (! frozen) { - try { - freeze(); - } - catch (JCode.OffsetTooBigException e) { - throw new Error(e); - } - } - stream.writeShort(accessFlags); - stream.writeShort(nameIndex); - stream.writeShort(signatureIndex); - JAttribute.writeTo(getAttributes(), stream); - } -} diff --git a/src/fjbg/ch/epfl/lamp/fjbg/JInnerClassesAttribute.java b/src/fjbg/ch/epfl/lamp/fjbg/JInnerClassesAttribute.java deleted file mode 100644 index 1c1ced500d..0000000000 --- a/src/fjbg/ch/epfl/lamp/fjbg/JInnerClassesAttribute.java +++ /dev/null @@ -1,201 +0,0 @@ -/* FJBG -- Fast Java Bytecode Generator - * Copyright 2002-2013 LAMP/EPFL - * @author Michel Schinz - */ - -package ch.epfl.lamp.fjbg; - -import java.io.DataInputStream; -import java.io.DataOutputStream; -import java.io.IOException; -import java.util.Iterator; -import java.util.LinkedHashMap; -import java.util.Map; - -/** - * InnerClasses attribute. - * - * The ClassFile structure of a class/interface C must have exactly one - * InnerClasses attribute in its attributes table if the constant pool of C - * contains a CONSTANT_Class_info entry which represents a class or interface - * that is not a member of a package. See section 4.8.5 of the JVM Specification. - * - * @author Iulian Dragos, Stephane Micheloud - * @version 1.1 - */ -public class JInnerClassesAttribute extends JAttribute { - /** Constant pool of the current classfile. */ - private JConstantPool pool; - - /** InnerClass entries */ - private Map/**/ entries = new LinkedHashMap(); - - public JInnerClassesAttribute(FJBGContext context, JClass clazz) { - super(context, clazz); - this.pool = clazz.pool; - } - - public JInnerClassesAttribute(FJBGContext context, - JClass clazz, - Object owner, - String name, - int size, - DataInputStream stream) - throws IOException { - super(context, clazz, name); - this.pool = clazz.pool; - - String inner = null; - int count = stream.readShort(); - for (int i = 0; i < count; ++i) { - int innerIdx = stream.readShort(); - int outerIdx = stream.readShort(); - int nameIdx = stream.readShort(); - int flags = stream.readShort(); - inner = pool.lookupClass(innerIdx); - entries.put(inner, new Entry(innerIdx, outerIdx, nameIdx, flags)); - } - - assert name.equals(getName()); - } - - public void addEntry(String inner, String outer, String name, int flags) { - int innerIdx = pool.addClass(inner); - int outerIdx = 0; - if (outer != null) outerIdx = pool.addClass(outer); - int nameIdx = 0; - if (name != null) nameIdx = pool.addUtf8(name); - - Entry e = new Entry(innerIdx, outerIdx, nameIdx, flags); - - if (entries.containsKey(inner)) { - Entry other = (Entry) entries.get(inner); - assert other.outerInfo == e.outerInfo && other.originalName == e.originalName && other.innerFlags == e.innerFlags - : inner + " already declared as " + other; - } else - entries.put(inner, e); - } - - public String getName() { return "InnerClasses"; } - - // Follows javap output format for the InnerClass attribute. - /*@Override*/ public String toString() { - // Here we intentionally use "InnerClass" as javap :-( - StringBuffer buf = new StringBuffer(" InnerClass: "); - for (Iterator it = entries.values().iterator(); it.hasNext(); ) { - Entry e = (Entry)it.next(); - buf.append("\n "); - buf.append(e.innerFlagsToString()); - buf.append("#"); - if (e.originalName != 0) { - buf.append(e.originalName); - buf.append("= #"); - } - buf.append(e.innerInfo); - if (e.outerInfo != 0) { - buf.append(" of #"); - buf.append(e.outerInfo); - } - buf.append("; //"); - if (e.originalName != 0) { - buf.append(pool.lookupUtf8(e.originalName)); - buf.append("="); - } - buf.append("class "); - buf.append(pool.lookupClass(e.innerInfo)); - if (e.outerInfo != 0) { - buf.append(" of class "); - buf.append(pool.lookupClass(e.outerInfo)); - } - } - buf.append("\n"); - return buf.toString(); - } - - protected int getSize() { - return 2 + entries.size() * 8; - } - - protected void writeContentsTo(DataOutputStream stream) throws IOException { - stream.writeShort(entries.size()); - for (Iterator it = entries.values().iterator(); it.hasNext(); ) { - Entry e = (Entry)it.next(); - stream.writeShort(e.innerInfo); - stream.writeShort(e.outerInfo); - stream.writeShort(e.originalName); - stream.writeShort(e.innerFlags); - } - } - - /** An entry in the InnerClasses attribute, as defined by the JVM Spec. */ - private class Entry { - /** CONSTANT_Class_info index in the pool for the inner class (mangled). */ - int innerInfo; - - /** CONSTANT_Class_info index in the pool for the outer class (mangled). */ - int outerInfo; - - /** CONSTANT_Utf8_info index in the pool for the original name of the inner class. */ - int originalName; - - /** Short int for modifier flags. */ - int innerFlags; - - public Entry(int iI, int oI, int oN, int f) { - this.innerInfo = iI; - this.outerInfo = oI; - this.originalName = oN; - this.innerFlags = f; - } - - public Entry(String innerClass, String outerClass, String name, int flags) { - this(pool.addClass(innerClass), pool.addClass(outerClass), pool.addUtf8(name), flags); - } - - /** Two entries are equal if they refer to the same inner class. - * innerInfo represents a unique name (mangled). - */ - public boolean equals(Object other) { - if (other instanceof Entry) { - Entry otherEntry = (Entry) other; - return otherEntry.innerInfo == this.innerInfo; - } - return false; - } - - public String innerFlagsToString() { - StringBuffer buf = new StringBuffer(); - if (isPublic()) buf.append("public "); - else if (isProtected()) buf.append("protected "); - else if (isPrivate()) buf.append("private "); - //if (isStatic()) buf.append("static "); // as javap - if (isAbstract()) buf.append("abstract "); - else if (isFinal()) buf.append("final "); - return buf.toString(); - } - - private boolean isPublic() { - return (innerFlags & JAccessFlags.ACC_PUBLIC) != 0; - } - - private boolean isPrivate() { - return (innerFlags & JAccessFlags.ACC_PRIVATE) != 0; - } - - private boolean isProtected() { - return (innerFlags & JAccessFlags.ACC_PROTECTED) != 0; - } - - private boolean isStatic() { - return (innerFlags & JAccessFlags.ACC_STATIC) != 0; - } - - private boolean isFinal() { - return (innerFlags & JAccessFlags.ACC_FINAL) != 0; - } - - private boolean isAbstract() { - return (innerFlags & JAccessFlags.ACC_ABSTRACT) != 0; - } - } -} diff --git a/src/fjbg/ch/epfl/lamp/fjbg/JLabel.java b/src/fjbg/ch/epfl/lamp/fjbg/JLabel.java deleted file mode 100644 index 96f3b4ebef..0000000000 --- a/src/fjbg/ch/epfl/lamp/fjbg/JLabel.java +++ /dev/null @@ -1,30 +0,0 @@ -/* FJBG -- Fast Java Bytecode Generator - * Copyright 2002-2013 LAMP/EPFL - * @author Michel Schinz - */ - -package ch.epfl.lamp.fjbg; - -/** - * Labels which can be attached to instructions. - * - * @author Michel Schinz - * @version 1.0 - */ - -public class JLabel { - public final static int UNDEFINED_ANCHOR = -1; - protected int anchor = UNDEFINED_ANCHOR; - - public boolean isAnchored() { return anchor != UNDEFINED_ANCHOR; } - - public int getAnchor() { - assert isAnchored(); - return anchor; - } - - public void setAnchor(int anchor) { - assert !isAnchored(); - this.anchor = anchor; - } -} diff --git a/src/fjbg/ch/epfl/lamp/fjbg/JLineNumberTableAttribute.java b/src/fjbg/ch/epfl/lamp/fjbg/JLineNumberTableAttribute.java deleted file mode 100644 index f8c09b8ef8..0000000000 --- a/src/fjbg/ch/epfl/lamp/fjbg/JLineNumberTableAttribute.java +++ /dev/null @@ -1,121 +0,0 @@ -/* FJBG -- Fast Java Bytecode Generator - * Copyright 2002-2013 LAMP/EPFL - * @author Michel Schinz - */ - -package ch.epfl.lamp.fjbg; - -import java.io.DataInputStream; -import java.io.DataOutputStream; -import java.io.IOException; - -/** - * Attribute storing correspondance between instructions and source - * line numbers. - * - * @author Michel Schinz - * @version 1.0 - */ - -public class JLineNumberTableAttribute extends JAttribute { - protected final JCode code; - - public JLineNumberTableAttribute(FJBGContext context, - JClass clazz, - JCode owner) { - super(context, clazz); - this.code = owner; - - assert owner.getOwner().getOwner() == clazz; - } - - public JLineNumberTableAttribute(FJBGContext context, - JClass clazz, - Object owner, - String name, - int size, - DataInputStream stream) - throws IOException { - super(context, clazz, name); - this.code = (JCode)owner; - - int[] mapping = new int[code.getSize()]; - - int count = stream.readShort(); - for (int i = 0; i < count; ++i) { - int startPC = stream.readShort(); - int lineNum = stream.readShort(); - mapping[startPC] = lineNum; - } - - // Avoids duplication of LineNumberTable attribute - // (see method ensureLineNumberCapacity in class JCode). - assert code.lineNumbers == null; - code.lineNumbers = new int[0]; - - int lineNum = 0; - for (int pc = 0; pc < mapping.length; ++pc) { - if (mapping[pc] != 0) lineNum = mapping[pc]; - if (lineNum != 0) code.setLineNumber(pc, lineNum); - } - - assert name.equals(getName()); - } - - public String getName() { return "LineNumberTable"; } - - // Follows javap output format for LineNumberTable attribute. - /*@Override*/ public String toString() { - StringBuffer buf = new StringBuffer(" LineNumberTable: "); - int[] encoding = encode(); - for (int i = 0; i < encoding.length/2; ++i) { - buf.append("\n line "); - buf.append(encoding[i * 2 + 1]); - buf.append(": "); - buf.append(encoding[i * 2]); - } - buf.append("\n"); - return buf.toString(); - } - - protected int[] encoding; - protected int[] encode() { - if (encoding == null) { - int[] lineNumbers = code.getLineNumbers(); - int[] preEncoding = new int[lineNumbers.length * 2]; - int prevLineNum = 0; - - int i = 0; - for (int pc = 0; pc < lineNumbers.length; ++pc) { - int lineNum = lineNumbers[pc]; - if (lineNum != 0 & lineNum != prevLineNum) { - preEncoding[i++] = pc; - preEncoding[i++] = lineNum; - prevLineNum = lineNum; - } - } - if (i == preEncoding.length) - encoding = preEncoding; - else { - encoding = new int[i]; - System.arraycopy(preEncoding, 0, encoding, 0, i); - } - } - return encoding; - } - - protected int getSize() { - int[] encoding = encode(); - return 2 + encoding.length * 2; - } - - protected void writeContentsTo(DataOutputStream stream) throws IOException { - int[] encoding = encode(); - int entries = encoding.length / 2; - stream.writeShort(entries); - for (int i = 0; i < entries; ++i) { - stream.writeShort(encoding[i * 2]); - stream.writeShort(encoding[i * 2 + 1]); - } - } -} diff --git a/src/fjbg/ch/epfl/lamp/fjbg/JLocalVariable.java b/src/fjbg/ch/epfl/lamp/fjbg/JLocalVariable.java deleted file mode 100644 index af7980656f..0000000000 --- a/src/fjbg/ch/epfl/lamp/fjbg/JLocalVariable.java +++ /dev/null @@ -1,42 +0,0 @@ -/* FJBG -- Fast Java Bytecode Generator - * Copyright 2002-2013 LAMP/EPFL - * @author Michel Schinz - */ - -package ch.epfl.lamp.fjbg; - -/** - * Representation of a local variable or method argument. - * - * @author Michel Schinz - * @version 1.0 - */ - -public class JLocalVariable { - protected final JMethod owner; - protected final JType type; - protected final String name; - protected final int index; - - protected JLocalVariable(FJBGContext context, - JMethod owner, - JType type, - String name, - int index) { - this.owner = owner; - this.type = type; - this.name = name; - this.index = index; - - assert index < 0xFFFF : "index too big for local variable: " + index; - } - - public JMethod getOwner() { return owner; } - public int getIndex() { return index; } - public String getName() { return name; } - public JType getType() { return type; } - - /*@Override*/ public String toString() { - return "0\t"+type.getSize()+"\t"+index+"\t"+name+"\t"+type; - } -} diff --git a/src/fjbg/ch/epfl/lamp/fjbg/JLocalVariableTableAttribute.java b/src/fjbg/ch/epfl/lamp/fjbg/JLocalVariableTableAttribute.java deleted file mode 100644 index b277cc71c0..0000000000 --- a/src/fjbg/ch/epfl/lamp/fjbg/JLocalVariableTableAttribute.java +++ /dev/null @@ -1,167 +0,0 @@ -/* FJBG -- Fast Java Bytecode Generator - * Copyright 2002-2013 LAMP/EPFL - * @author Michel Schinz - */ - -package ch.epfl.lamp.fjbg; - -import java.io.DataInputStream; -import java.io.DataOutputStream; -import java.io.IOException; -import java.util.Iterator; -import java.util.LinkedList; - -import ch.epfl.lamp.fjbg.JConstantPool.*; - -/** - * Attribute storing local variables. - * - * @author Stephane Micheloud - * @version 1.0 - */ - -public class JLocalVariableTableAttribute extends JAttribute { - /** Constant pool of the current classfile. */ - private JConstantPool pool; - - protected final LinkedList/**/ entries = new LinkedList(); - protected int localVariableIndex = 0; - - public JLocalVariableTableAttribute(FJBGContext context, - JClass clazz, - JCode code) { - super(context, clazz); - this.pool = clazz.pool; - - assert code.getOwner().getOwner() == clazz; - } - - public JLocalVariableTableAttribute(FJBGContext context, - JClass clazz, - Object owner, - String name, - int size, - DataInputStream stream) - throws IOException { - super(context, clazz, name); - this.pool = clazz.pool; - - int count = stream.readShort(); - for (int i = 0; i < count; ++i) { - int startPc = stream.readShort(); - int length = stream.readShort(); - int nameIndex = stream.readShort(); - int descIndex = stream.readShort(); - int index = stream.readShort(); - addEntry(startPc, length, nameIndex, descIndex, index); - } - - assert name.equals(getName()); - } - - public void addEntry(int startPc, int length, int nameIndex, - int descIndex, int index) { - entries.add(new Entry(startPc, length, nameIndex, descIndex, index)); - } - - public void addEntry(int startPc, int length, String name, - String desc, int index) { - Entry e = new Entry(startPc, length, name, desc, index); - Entry other = getEntry(index); - if (other != null) { - assert other.nameIndex == e.nameIndex && other.descIndex == e.descIndex - : e + " already declared as " + other; - } else - entries.add(e); - } - - public void addEntry(int startPc, int length, String name, String desc) { - entries.add(new Entry(startPc, length, name, desc)); - } - - public String getName() { return "LocalVariableTable"; } - - // Follows javap output format for LocalVariableTable attribute. - /*@Override*/ public String toString() { - StringBuffer buf = new StringBuffer(" LocalVariableTable: "); - buf.append("\n Start Length Slot Name Signature"); - for (Iterator it = entries.iterator(); it.hasNext(); ) { - buf.append("\n "); - Entry e = (Entry)it.next(); - Utf8Entry name = (Utf8Entry)pool.lookupEntry(e.nameIndex); - Utf8Entry sig = (Utf8Entry)pool.lookupEntry(e.descIndex); - buf.append(e.startPc); - buf.append(" "); - buf.append(e.length); - buf.append(" "); - buf.append(e.index); - buf.append(" "); - buf.append(name.getValue()); - buf.append(" "); - buf.append(sig.getValue()); - } - buf.append("\n"); - return buf.toString(); - } - - public int getMaxLocals() { - return localVariableIndex; - } - - public int getSize() { - return 2 + entries.size() * 10; - } - - protected void writeContentsTo(DataOutputStream stream) throws IOException { - stream.writeShort(entries.size()); - for (Iterator it = entries.iterator(); it.hasNext(); ) { - Entry e = (Entry)it.next(); - stream.writeShort(e.startPc); - stream.writeShort(e.length); - stream.writeShort(e.nameIndex); - stream.writeShort(e.descIndex); - stream.writeShort(e.index); - } - } - - private Entry getEntry(int index) { - Entry e = null; - try { e = (Entry)entries.get(index); } catch (Exception ex) {} - return e; - } - - private class Entry { - int startPc; - int length; - int nameIndex; - int descIndex; - int index; - - public Entry(int startPc, int length, int nameIndex, int descIndex, int index) { - this.startPc = startPc; - this.length = length; - this.nameIndex = nameIndex; - this.descIndex = descIndex; - this.index = index; - localVariableIndex += length; - } - - public Entry(int startPc, int length, String name, String desc, int index) { - this(startPc, length, pool.addUtf8(name), pool.addUtf8(desc), index); - } - - public Entry(int startPc, int length, String name, String desc) { - this(startPc, length, pool.addUtf8(name), pool.addUtf8(desc), localVariableIndex); - } - - /** Two entries are equal if they refer to the same index. - */ - public boolean equals(Object other) { - if (other instanceof Entry) { - Entry otherEntry = (Entry) other; - return otherEntry.index == this.index; - } - return false; - } - } -} diff --git a/src/fjbg/ch/epfl/lamp/fjbg/JMember.java b/src/fjbg/ch/epfl/lamp/fjbg/JMember.java deleted file mode 100644 index 6356cc874d..0000000000 --- a/src/fjbg/ch/epfl/lamp/fjbg/JMember.java +++ /dev/null @@ -1,109 +0,0 @@ -/* FJBG -- Fast Java Bytecode Generator - * Copyright 2002-2013 LAMP/EPFL - * @author Michel Schinz - */ - -package ch.epfl.lamp.fjbg; - -import java.util.Iterator; -import java.util.LinkedList; -import java.util.List; - -/** - * Abstract superclass for a Java class, field or method. - * - * @author Nikolay Mihaylov - * @version 1.0 - */ - -abstract public class JMember { - - protected boolean frozen = false; - - protected final FJBGContext context; - - protected String name; - - protected int accessFlags; - - protected final List/**/ attributes = new LinkedList(); - - protected JMember(FJBGContext context) { this.context = context; } - - protected JMember(FJBGContext context, int accessFlags, String name) { - this(context); - this.name = name; - this.accessFlags = accessFlags; - } - - /** - * Gets the access flags of the class. - * @return The int representing the access flags of the class. - */ - public int getAccessFlags() { return accessFlags; } - - /** - * Gets the name of the member. - * @return The string representing the name of the member. - */ - public String getName() { return name; } - - /** - * Gets the type of the objects that are instances of the class. - * @return The type of the instances of the class. - */ - public abstract JType getType(); - - /** - * Gets the class corresponding to/owning this member - * @return The class owning this member or the class itself. - */ - public abstract JClass getJClass(); - - /** - * Gets the constant pool of the class. - * @return The constant pool of the class. - */ - public JConstantPool getConstantPool() { return getJClass().getConstantPool(); } - - public FJBGContext getContext() { return context; } - - /** - * Adds an attribute to the class. - * @param attr The attribute to be added. - */ - public void addAttribute(JAttribute attr) { - assert !frozen; - attributes.add(attr); - } - - /** - * Gets the list of all attributes of the class. - * @return The list of the attributes of the class representation. - */ - public List/**/ getAttributes() { - return attributes; - } - - /** - * Get the attribute with the given name, or null if it doesn't - * exist. - */ - public JAttribute getAttribute(String name) { - Iterator attrIt = getAttributes().iterator(); - while (attrIt.hasNext()) { - JAttribute attr = (JAttribute)attrIt.next(); - if (attr.getName().equals(name)) - return attr; - } - return null; - } - - protected static String toExternalName(String name) { - return name.replace('/', '.'); - } - - protected static String toExternalName(JType tpe) { - return tpe.toString().replace(':', '.'); - } -} diff --git a/src/fjbg/ch/epfl/lamp/fjbg/JMethod.java b/src/fjbg/ch/epfl/lamp/fjbg/JMethod.java deleted file mode 100644 index 01d58a45c7..0000000000 --- a/src/fjbg/ch/epfl/lamp/fjbg/JMethod.java +++ /dev/null @@ -1,199 +0,0 @@ -/* FJBG -- Fast Java Bytecode Generator - * Copyright 2002-2013 LAMP/EPFL - * @author Michel Schinz - */ - -package ch.epfl.lamp.fjbg; - -import java.io.DataInputStream; -import java.io.IOException; -import java.util.Iterator; -import java.util.LinkedList; -import java.util.List; - -/** - * Representation of a Java method. - * - * @author Michel Schinz - * @version 1.0 - */ - -public class JMethod extends JFieldOrMethod { - public final static String CLASS_CONSTRUCTOR_NAME = ""; - public final static String INSTANCE_CONSTRUCTOR_NAME = ""; - - protected /*final*/ JCode code; - protected final String[] argNames; - - protected final LinkedList/**/ localVariables = - new LinkedList(); - protected int localVariableIndex = 0; - - - protected JMethod(FJBGContext context, - JClass owner, - int accessFlags, - String name, - JType returnType, - JType[] argTypes, - String[] argNames) { - super(context, - owner, - accessFlags, - name, - new JMethodType(returnType, argTypes)); - this.argNames = argNames; - - assert argTypes.length == argNames.length; - - if (isAbstract() || isNative()) { - code = null; - } else { - code = context.JCode(owner, this); - addAttribute(context.JCodeAttribute(owner, this)); - - if (!isStatic()) - addNewLocalVariable(owner.getType(), "this"); - - for (int i = 0; i < argTypes.length; ++i) - addNewLocalVariable(argTypes[i], argNames[i]); - } - } - - protected JMethod(FJBGContext context, - JClass owner, - DataInputStream stream) - throws IOException { - super(context, owner, stream); - - assert isAbstract() || isNative() || code != null; - - int n = 0; - if (code != null) { - for (Iterator it = code.getAttributes().iterator(); it.hasNext(); ) { - JAttribute attr = (JAttribute)it.next(); - if (attr instanceof JLocalVariableTableAttribute) - n = ((JLocalVariableTableAttribute)attr).getMaxLocals(); - } - } - this.localVariableIndex = n; - - - JType[] argTypes = ((JMethodType)getType()).getArgumentTypes(); - argNames = new String[argTypes.length]; // TODO get from attribute - for (int i = 0; i < argNames.length; ++i) - argNames[i] = "v"+i; - } - - public void freeze() throws JCode.OffsetTooBigException { - if (code != null) code.freeze(); - super.freeze(); - } - - public JType getReturnType() { - return ((JMethodType)type).getReturnType(); - } - - public JType[] getArgumentTypes() { - return ((JMethodType)type).getArgumentTypes(); - } - - public int getArgsSize() { - int size = ((JMethodType)type).getArgsSize(); - if (!isStatic()) size += 1; // for this - return size; - } - - public String[] getArgumentNames() { - return argNames; - } - - public JCode getCode() { - assert !isAbstract(); - return code; - } - - // Invoked by the JCode constructor - protected void setCode(JCode code) { - assert null == this.code; - this.code = code; - } - - public JCodeIterator codeIterator() { - return new JCodeIterator(code); - } - - // Local variables - // FIXME : find a better management method for local variables - public JLocalVariable addNewLocalVariable(JType type, String name) { - assert !frozen; - JLocalVariable var = - context.JLocalVariable(this, type, name, localVariableIndex); - localVariableIndex += type.getSize(); - localVariables.add(var); - return var; - } - - public JLocalVariable getLocalVariable(int index) { - for (int i = 0; i < localVariables.size(); i++) { - if (((JLocalVariable)localVariables.get(i)).index == index) - return (JLocalVariable)localVariables.get(i); - } - return null; - } - - public JLocalVariable[] getLocalVariables() { - return (JLocalVariable[])localVariables - .toArray(new JLocalVariable[localVariables.size()]); - } - - - public int getMaxLocals() { - return localVariableIndex; - } - - // Follows javap output format for methods. - /*@Override*/ public String toString() { - StringBuffer buf = new StringBuffer(flagsToString()); - String name = getName(); - if (CLASS_CONSTRUCTOR_NAME.equals(name)) - buf.append("{}"); - else { - if (INSTANCE_CONSTRUCTOR_NAME.equals(name)) - name = getOwner().getName(); - else { - buf.append(toExternalName(getReturnType())); - buf.append(" "); - } - buf.append(toExternalName(name)); - buf.append("("); - JType[] ts = getArgumentTypes(); - for (int i = 0; i < ts.length; ++i) { - if (i > 0) buf.append(", "); - buf.append(toExternalName(ts[i])); - } - buf.append(")"); - } - buf.append(";\n"); - Iterator it = attributes.iterator(); - while(it.hasNext()) { - JAttribute attr = (JAttribute)it.next(); - buf.append(attr); - } - return buf.toString(); - } - - private String flagsToString() { - StringBuffer buf = new StringBuffer(); - if (isPublic()) buf.append("public "); - else if (isProtected()) buf.append("protected "); - else if (isPrivate()) buf.append("private "); - if (isBridge()) buf.append(" "); - if (hasVarargs()) buf.append(" "); - if (isStatic()) buf.append("static "); - else if (isNative()) buf.append("native "); - if (isAbstract()) buf.append("abstract "); - else if (isFinal()) buf.append("final "); - return buf.toString(); - } -} diff --git a/src/fjbg/ch/epfl/lamp/fjbg/JMethodType.java b/src/fjbg/ch/epfl/lamp/fjbg/JMethodType.java deleted file mode 100644 index cd3d71fd9c..0000000000 --- a/src/fjbg/ch/epfl/lamp/fjbg/JMethodType.java +++ /dev/null @@ -1,87 +0,0 @@ -/* FJBG -- Fast Java Bytecode Generator - * Copyright 2002-2013 LAMP/EPFL - * @author Michel Schinz - */ - -package ch.epfl.lamp.fjbg; - -/** - * Type for Java methods. These types do not really exist in Java, but - * are provided here because they are useful in several places. - * - * @author Michel Schinz - * @version 1.0 - */ - -public class JMethodType extends JType { - protected final JType returnType; - protected final JType[] argTypes; - protected String signature = null; - - public final static JMethodType ARGLESS_VOID_FUNCTION = - new JMethodType(JType.VOID, JType.EMPTY_ARRAY); - - public JMethodType(JType returnType, JType[] argTypes) { - this.returnType = returnType; - this.argTypes = argTypes; - } - - public JType getReturnType() { return returnType; } - public JType[] getArgumentTypes() { return argTypes; } - - public int getSize() { - throw new UnsupportedOperationException(); - } - - public String getSignature() { - if (signature == null) { - StringBuffer buf = new StringBuffer(); - buf.append('('); - for (int i = 0; i < argTypes.length; ++i) - buf.append(argTypes[i].getSignature()); - buf.append(')'); - buf.append(returnType.getSignature()); - signature = buf.toString(); - } - return signature; - } - - public int getTag() { return T_UNKNOWN; } - - public String toString() { - StringBuffer buf = new StringBuffer(); - buf.append('('); - for (int i = 0; i < argTypes.length; ++i) - buf.append(argTypes[i].toString()); - buf.append(')'); - buf.append(returnType.toString()); - return buf.toString(); - } - - public int getArgsSize() { - int size = 0; - for (int i = 0; i < argTypes.length; ++i) - size += argTypes[i].getSize(); - return size; - } - - public int getProducedStack() { - return returnType.getSize() - getArgsSize(); - } - - public boolean isCompatibleWith(JType other) { - return false; - } - public boolean equals(Object o) { - if (o instanceof JMethodType) - return ((JMethodType)o).getSignature().equals(this.getSignature()); - else - return false; - } - public int hashCode() { - if (signature == null) - return 0; - else - return signature.hashCode(); - } -} diff --git a/src/fjbg/ch/epfl/lamp/fjbg/JObjectType.java b/src/fjbg/ch/epfl/lamp/fjbg/JObjectType.java deleted file mode 100644 index 06db5b115a..0000000000 --- a/src/fjbg/ch/epfl/lamp/fjbg/JObjectType.java +++ /dev/null @@ -1,65 +0,0 @@ -/* FJBG -- Fast Java Bytecode Generator - * Copyright 2002-2013 LAMP/EPFL - * @author Michel Schinz - */ - -package ch.epfl.lamp.fjbg; - -/** - * Types for Java objects. - * - * @author Michel Schinz - * @version 1.0 - */ - -public class JObjectType extends JReferenceType { - protected final String name; - protected String signature = null; - - public final static JObjectType JAVA_LANG_OBJECT = - new JObjectType("java.lang.Object"); - public final static JObjectType JAVA_LANG_STRING = - new JObjectType("java.lang.String"); - public final static JObjectType CLONEABLE = - new JObjectType("Cloneable"); - public final static JObjectType JAVA_IO_SERIALIZABLE = - new JObjectType("java.io.Serializable"); - - public JObjectType(String name) { - this.name = name; - } - - public int getSize() { return 1; } - - public String getName() { return name; } - - public String getSignature() { - if (signature == null) - signature = "L" + name.replace('.','/') + ";"; - return signature; - } - - public String getDescriptor() { - return name.replace('.','/'); - } - - public int getTag() { return T_OBJECT; } - - public String toString() { return name; } - - public boolean isObjectType() { return true; } - - public boolean isCompatibleWith(JType other) { - return other instanceof JObjectType - || other == JType.REFERENCE; - } - public boolean equals(Object o) { - if (o instanceof JObjectType) - return ((JObjectType)o).getSignature().equals(this.getSignature()); - else - return false; - } - public int hashCode() { - return name.hashCode(); - } -} diff --git a/src/fjbg/ch/epfl/lamp/fjbg/JOpcode.java b/src/fjbg/ch/epfl/lamp/fjbg/JOpcode.java deleted file mode 100644 index cc68681a96..0000000000 --- a/src/fjbg/ch/epfl/lamp/fjbg/JOpcode.java +++ /dev/null @@ -1,1267 +0,0 @@ -/* FJBG -- Fast Java Bytecode Generator - * Copyright 2002-2013 LAMP/EPFL - * @author Michel Schinz - */ - -package ch.epfl.lamp.fjbg; - -/** - * Definition of opcodes for the JVM. - * - * @author Michel Schinz, Thomas Friedli - * @version 1.0 - */ - -public class JOpcode { - public final String name; - public final int code; - - // The following attributes can be (statically) unknown for some - // instructions, and are therefore not public. To know their value, - // functions have to be used (see JCodeIterator). - protected final int size; - protected final JType[] producedDataTypes; - protected final JType[] consumedDataTypes; - protected final int jumpKind; - protected final int successorCount; - - protected final static int UNKNOWN = Integer.MIN_VALUE; - - protected final static int JMP_NONE = 0; - protected final static int JMP_NEXT = 1; - protected final static int JMP_ALWAYS_S2_OFFSET = 2; - protected final static int JMP_ALWAYS_S4_OFFSET = 3; - protected final static int JMP_MAYBE_S2_OFFSET = 4; - protected final static int JMP_TABLE = 5; - protected final static int JMP_LOOKUP = 6; - - protected final static JType[] NO_DATA = new JType[0]; - - protected final static JType[] INT_TYPE = - new JType[] { JType.INT }; - protected final static JType[] FLOAT_TYPE = - new JType[] { JType.FLOAT }; - protected final static JType[] LONG_TYPE = - new JType[] { JType.LONG }; - protected final static JType[] DOUBLE_TYPE = - new JType[] { JType.DOUBLE }; - protected final static JType[] OBJECT_REF_TYPE = - new JType[] { JObjectType.JAVA_LANG_OBJECT }; - protected final static JType[] ARRAY_REF_TYPE = - new JType[] { new JArrayType(JType.VOID) }; - protected final static JType[] REFERENCE_TYPE = - new JType[] { JType.REFERENCE }; - protected final static JType[] ADDRESS_TYPE = - new JType[] { JType.ADDRESS }; - protected final static JType[] UNKNOWN_TYPE = - new JType[] { JType.UNKNOWN }; - - /// Instruction codes - public final static int cNOP = 0; - public final static int cACONST_NULL = 1; - public final static int cICONST_M1 = 2; - public final static int cICONST_0 = 3; - public final static int cICONST_1 = 4; - public final static int cICONST_2 = 5; - public final static int cICONST_3 = 6; - public final static int cICONST_4 = 7; - public final static int cICONST_5 = 8; - public final static int cLCONST_0 = 9; - public final static int cLCONST_1 = 10; - public final static int cFCONST_0 = 11; - public final static int cFCONST_1 = 12; - public final static int cFCONST_2 = 13; - public final static int cDCONST_0 = 14; - public final static int cDCONST_1 = 15; - public final static int cBIPUSH = 16; - public final static int cSIPUSH = 17; - public final static int cLDC = 18; - public final static int cLDC_W = 19; - public final static int cLDC2_W = 20; - public final static int cILOAD = 21; - public final static int cLLOAD = 22; - public final static int cFLOAD = 23; - public final static int cDLOAD = 24; - public final static int cALOAD = 25; - public final static int cILOAD_0 = 26; - public final static int cILOAD_1 = 27; - public final static int cILOAD_2 = 28; - public final static int cILOAD_3 = 29; - public final static int cLLOAD_0 = 30; - public final static int cLLOAD_1 = 31; - public final static int cLLOAD_2 = 32; - public final static int cLLOAD_3 = 33; - public final static int cFLOAD_0 = 34; - public final static int cFLOAD_1 = 35; - public final static int cFLOAD_2 = 36; - public final static int cFLOAD_3 = 37; - public final static int cDLOAD_0 = 38; - public final static int cDLOAD_1 = 39; - public final static int cDLOAD_2 = 40; - public final static int cDLOAD_3 = 41; - public final static int cALOAD_0 = 42; - public final static int cALOAD_1 = 43; - public final static int cALOAD_2 = 44; - public final static int cALOAD_3 = 45; - public final static int cIALOAD = 46; - public final static int cLALOAD = 47; - public final static int cFALOAD = 48; - public final static int cDALOAD = 49; - public final static int cAALOAD = 50; - public final static int cBALOAD = 51; - public final static int cCALOAD = 52; - public final static int cSALOAD = 53; - public final static int cISTORE = 54; - public final static int cLSTORE = 55; - public final static int cFSTORE = 56; - public final static int cDSTORE = 57; - public final static int cASTORE = 58; - public final static int cISTORE_0 = 59; - public final static int cISTORE_1 = 60; - public final static int cISTORE_2 = 61; - public final static int cISTORE_3 = 62; - public final static int cLSTORE_0 = 63; - public final static int cLSTORE_1 = 64; - public final static int cLSTORE_2 = 65; - public final static int cLSTORE_3 = 66; - public final static int cFSTORE_0 = 67; - public final static int cFSTORE_1 = 68; - public final static int cFSTORE_2 = 69; - public final static int cFSTORE_3 = 70; - public final static int cDSTORE_0 = 71; - public final static int cDSTORE_1 = 72; - public final static int cDSTORE_2 = 73; - public final static int cDSTORE_3 = 74; - public final static int cASTORE_0 = 75; - public final static int cASTORE_1 = 76; - public final static int cASTORE_2 = 77; - public final static int cASTORE_3 = 78; - public final static int cIASTORE = 79; - public final static int cLASTORE = 80; - public final static int cFASTORE = 81; - public final static int cDASTORE = 82; - public final static int cAASTORE = 83; - public final static int cBASTORE = 84; - public final static int cCASTORE = 85; - public final static int cSASTORE = 86; - public final static int cPOP = 87; - public final static int cPOP2 = 88; - public final static int cDUP = 89; - public final static int cDUP_X1 = 90; - public final static int cDUP_X2 = 91; - public final static int cDUP2 = 92; - public final static int cDUP2_X1 = 93; - public final static int cDUP2_X2 = 94; - public final static int cSWAP = 95; - public final static int cIADD = 96; - public final static int cLADD = 97; - public final static int cFADD = 98; - public final static int cDADD = 99; - public final static int cISUB = 100; - public final static int cLSUB = 101; - public final static int cFSUB = 102; - public final static int cDSUB = 103; - public final static int cIMUL = 104; - public final static int cLMUL = 105; - public final static int cFMUL = 106; - public final static int cDMUL = 107; - public final static int cIDIV = 108; - public final static int cLDIV = 109; - public final static int cFDIV = 110; - public final static int cDDIV = 111; - public final static int cIREM = 112; - public final static int cLREM = 113; - public final static int cFREM = 114; - public final static int cDREM = 115; - public final static int cINEG = 116; - public final static int cLNEG = 117; - public final static int cFNEG = 118; - public final static int cDNEG = 119; - public final static int cISHL = 120; - public final static int cLSHL = 121; - public final static int cISHR = 122; - public final static int cLSHR = 123; - public final static int cIUSHR = 124; - public final static int cLUSHR = 125; - public final static int cIAND = 126; - public final static int cLAND = 127; - public final static int cIOR = 128; - public final static int cLOR = 129; - public final static int cIXOR = 130; - public final static int cLXOR = 131; - public final static int cIINC = 132; - public final static int cI2L = 133; - public final static int cI2F = 134; - public final static int cI2D = 135; - public final static int cL2I = 136; - public final static int cL2F = 137; - public final static int cL2D = 138; - public final static int cF2I = 139; - public final static int cF2L = 140; - public final static int cF2D = 141; - public final static int cD2I = 142; - public final static int cD2L = 143; - public final static int cD2F = 144; - public final static int cI2B = 145; - public final static int cI2C = 146; - public final static int cI2S = 147; - public final static int cLCMP = 148; - public final static int cFCMPL = 149; - public final static int cFCMPG = 150; - public final static int cDCMPL = 151; - public final static int cDCMPG = 152; - public final static int cIFEQ = 153; - public final static int cIFNE = 154; - public final static int cIFLT = 155; - public final static int cIFGE = 156; - public final static int cIFGT = 157; - public final static int cIFLE = 158; - public final static int cIF_ICMPEQ = 159; - public final static int cIF_ICMPNE = 160; - public final static int cIF_ICMPLT = 161; - public final static int cIF_ICMPGE = 162; - public final static int cIF_ICMPGT = 163; - public final static int cIF_ICMPLE = 164; - public final static int cIF_ACMPEQ = 165; - public final static int cIF_ACMPNE = 166; - public final static int cGOTO = 167; - public final static int cJSR = 168; - public final static int cRET = 169; - public final static int cTABLESWITCH = 170; - public final static int cLOOKUPSWITCH = 171; - public final static int cIRETURN = 172; - public final static int cLRETURN = 173; - public final static int cFRETURN = 174; - public final static int cDRETURN = 175; - public final static int cARETURN = 176; - public final static int cRETURN = 177; - public final static int cGETSTATIC = 178; - public final static int cPUTSTATIC = 179; - public final static int cGETFIELD = 180; - public final static int cPUTFIELD = 181; - public final static int cINVOKEVIRTUAL = 182; - public final static int cINVOKESPECIAL = 183; - public final static int cINVOKESTATIC = 184; - public final static int cINVOKEINTERFACE = 185; - public final static int cNEW = 187; - public final static int cNEWARRAY = 188; - public final static int cANEWARRAY = 189; - public final static int cARRAYLENGTH = 190; - public final static int cATHROW = 191; - public final static int cCHECKCAST = 192; - public final static int cINSTANCEOF = 193; - public final static int cMONITORENTER = 194; - public final static int cMONITOREXIT = 195; - public final static int cWIDE = 196; - public final static int cMULTIANEWARRAY = 197; - public final static int cIFNULL = 198; - public final static int cIFNONNULL = 199; - public final static int cGOTO_W = 200; - public final static int cJSR_W = 201; - - // Objects representing instructions - public final static JOpcode NOP = - new JOpcode("NOP", cNOP, 1, NO_DATA, NO_DATA, JMP_NEXT); - public final static JOpcode ACONST_NULL = new JOpcode("ACONST_NULL", - cACONST_NULL, - 1, - REFERENCE_TYPE, - NO_DATA, - JMP_NEXT); - public final static JOpcode ICONST_M1 = - new JOpcode("ICONST_M1", cICONST_M1, 1, INT_TYPE, NO_DATA, JMP_NEXT); - public final static JOpcode ICONST_0 = - new JOpcode("ICONST_0", cICONST_0, 1, INT_TYPE, NO_DATA, JMP_NEXT); - public final static JOpcode ICONST_1 = - new JOpcode("ICONST_1", cICONST_1, 1, INT_TYPE, NO_DATA, JMP_NEXT); - public final static JOpcode ICONST_2 = - new JOpcode("ICONST_2", cICONST_2, 1, INT_TYPE, NO_DATA, JMP_NEXT); - public final static JOpcode ICONST_3 = - new JOpcode("ICONST_3", cICONST_3, 1, INT_TYPE, NO_DATA, JMP_NEXT); - public final static JOpcode ICONST_4 = - new JOpcode("ICONST_4", cICONST_4, 1, INT_TYPE, NO_DATA, JMP_NEXT); - public final static JOpcode ICONST_5 = - new JOpcode("ICONST_5", cICONST_5, 1, INT_TYPE, NO_DATA, JMP_NEXT); - public final static JOpcode LCONST_0 = - new JOpcode("LCONST_0", cLCONST_0, 1, LONG_TYPE, NO_DATA, JMP_NEXT); - public final static JOpcode LCONST_1 = - new JOpcode("LCONST_1", cLCONST_1, 1, LONG_TYPE, NO_DATA, JMP_NEXT); - public final static JOpcode FCONST_0 = - new JOpcode("FCONST_0", cFCONST_0, 1, FLOAT_TYPE, NO_DATA, JMP_NEXT); - public final static JOpcode FCONST_1 = - new JOpcode("FCONST_1", cFCONST_1, 1, FLOAT_TYPE, NO_DATA, JMP_NEXT); - public final static JOpcode FCONST_2 = - new JOpcode("FCONST_2", cFCONST_2, 1, FLOAT_TYPE, NO_DATA, JMP_NEXT); - public final static JOpcode DCONST_0 = - new JOpcode("DCONST_0", cDCONST_0, 1, DOUBLE_TYPE, NO_DATA, JMP_NEXT); - public final static JOpcode DCONST_1 = - new JOpcode("DCONST_1", cDCONST_1, 1, DOUBLE_TYPE, NO_DATA, JMP_NEXT); - public final static JOpcode BIPUSH = - new JOpcode("BIPUSH", cBIPUSH, 2, INT_TYPE, NO_DATA, JMP_NEXT); - public final static JOpcode SIPUSH = - new JOpcode("SIPUSH", cSIPUSH, 3, INT_TYPE, NO_DATA, JMP_NEXT); - public final static JOpcode LDC = - new JOpcode("LDC", cLDC, 2, UNKNOWN_TYPE, NO_DATA, JMP_NEXT); - public final static JOpcode LDC_W = - new JOpcode("LDC_W", cLDC_W, 3, UNKNOWN_TYPE, NO_DATA, JMP_NEXT); - public final static JOpcode LDC2_W = - new JOpcode("LDC2_W", cLDC2_W, 3, UNKNOWN_TYPE, NO_DATA, JMP_NEXT); - public final static JOpcode ILOAD = - new JOpcode("ILOAD", cILOAD, 2, INT_TYPE, NO_DATA, JMP_NEXT); - public final static JOpcode LLOAD = - new JOpcode("LLOAD", cLLOAD, 2, LONG_TYPE, NO_DATA, JMP_NEXT); - public final static JOpcode FLOAD = - new JOpcode("FLOAD", cFLOAD, 2, FLOAT_TYPE, NO_DATA, JMP_NEXT); - public final static JOpcode DLOAD = - new JOpcode("DLOAD", cDLOAD, 2, DOUBLE_TYPE, NO_DATA, JMP_NEXT); - public final static JOpcode ALOAD = - new JOpcode("ALOAD", cALOAD, 2, REFERENCE_TYPE, NO_DATA, JMP_NEXT); - public final static JOpcode ILOAD_0 = - new JOpcode("ILOAD_0", cILOAD_0, 1, INT_TYPE, NO_DATA, JMP_NEXT); - public final static JOpcode ILOAD_1 = - new JOpcode("ILOAD_1", cILOAD_1, 1, INT_TYPE, NO_DATA, JMP_NEXT); - public final static JOpcode ILOAD_2 = - new JOpcode("ILOAD_2", cILOAD_2, 1, INT_TYPE, NO_DATA, JMP_NEXT); - public final static JOpcode ILOAD_3 = - new JOpcode("ILOAD_3", cILOAD_3, 1, INT_TYPE, NO_DATA, JMP_NEXT); - public final static JOpcode LLOAD_0 = - new JOpcode("LLOAD_0", cLLOAD_0, 1, LONG_TYPE, NO_DATA, JMP_NEXT); - public final static JOpcode LLOAD_1 = - new JOpcode("LLOAD_1", cLLOAD_1, 1, LONG_TYPE, NO_DATA, JMP_NEXT); - public final static JOpcode LLOAD_2 = - new JOpcode("LLOAD_2", cLLOAD_2, 1, LONG_TYPE, NO_DATA, JMP_NEXT); - public final static JOpcode LLOAD_3 = - new JOpcode("LLOAD_3", cLLOAD_3, 1, LONG_TYPE, NO_DATA, JMP_NEXT); - public final static JOpcode FLOAD_0 = - new JOpcode("FLOAD_0", cFLOAD_0, 1, FLOAT_TYPE, NO_DATA, JMP_NEXT); - public final static JOpcode FLOAD_1 = - new JOpcode("FLOAD_1", cFLOAD_1, 1, FLOAT_TYPE, NO_DATA, JMP_NEXT); - public final static JOpcode FLOAD_2 = - new JOpcode("FLOAD_2", cFLOAD_2, 1, FLOAT_TYPE, NO_DATA, JMP_NEXT); - public final static JOpcode FLOAD_3 = - new JOpcode("FLOAD_3", cFLOAD_3, 1, FLOAT_TYPE, NO_DATA, JMP_NEXT); - public final static JOpcode DLOAD_0 = - new JOpcode("DLOAD_0", cDLOAD_0, 1, DOUBLE_TYPE, NO_DATA, JMP_NEXT); - public final static JOpcode DLOAD_1 = - new JOpcode("DLOAD_1", cDLOAD_1, 1, DOUBLE_TYPE, NO_DATA, JMP_NEXT); - public final static JOpcode DLOAD_2 = - new JOpcode("DLOAD_2", cDLOAD_2, 1, DOUBLE_TYPE, NO_DATA, JMP_NEXT); - public final static JOpcode DLOAD_3 = - new JOpcode("DLOAD_3", cDLOAD_3, 1, DOUBLE_TYPE, NO_DATA, JMP_NEXT); - public final static JOpcode ALOAD_0 = - new JOpcode("ALOAD_0", cALOAD_0, 1, REFERENCE_TYPE, NO_DATA, JMP_NEXT); - public final static JOpcode ALOAD_1 = - new JOpcode("ALOAD_1", cALOAD_1, 1, REFERENCE_TYPE, NO_DATA, JMP_NEXT); - public final static JOpcode ALOAD_2 = - new JOpcode("ALOAD_2", cALOAD_2, 1, REFERENCE_TYPE, NO_DATA, JMP_NEXT); - public final static JOpcode ALOAD_3 = - new JOpcode("ALOAD_3", cALOAD_3, 1, REFERENCE_TYPE, NO_DATA, JMP_NEXT); - public final static JOpcode IALOAD = - new JOpcode("IALOAD", - cIALOAD, - 1, - INT_TYPE, - new JType[] {JType.INT, JArrayType.INT}, - JMP_NEXT); - public final static JOpcode LALOAD = - new JOpcode("LALOAD", - cLALOAD, - 1, - LONG_TYPE, - new JType[] {JType.INT, JArrayType.LONG}, - JMP_NEXT); - public final static JOpcode FALOAD = - new JOpcode("FALOAD", - cFALOAD, - 1, - FLOAT_TYPE, - new JType[] {JType.INT, JArrayType.FLOAT}, - JMP_NEXT); - public final static JOpcode DALOAD = - new JOpcode("DALOAD", - cDALOAD, - 1, - DOUBLE_TYPE, - new JType[] {JType.INT, JArrayType.DOUBLE}, - JMP_NEXT); - public final static JOpcode AALOAD = - new JOpcode("AALOAD", - cAALOAD, - 1, - REFERENCE_TYPE, - new JType[] {JType.INT, JArrayType.REFERENCE}, - JMP_NEXT); - public final static JOpcode BALOAD = - new JOpcode("BALOAD", - cBALOAD, - 1, - INT_TYPE, - new JType[] {JType.INT, new JArrayType(JType.UNKNOWN)}, - JMP_NEXT); - public final static JOpcode CALOAD = - new JOpcode("CALOAD", - cCALOAD, - 1, - INT_TYPE, - new JType[] {JType.INT, JArrayType.CHAR}, - JMP_NEXT); - public final static JOpcode SALOAD = - new JOpcode("SALOAD", - cSALOAD, - 1, - INT_TYPE, - new JType[] {JType.INT, JArrayType.SHORT}, - JMP_NEXT); - public final static JOpcode ISTORE = - new JOpcode("ISTORE", cISTORE, 2, NO_DATA, INT_TYPE, JMP_NEXT); - public final static JOpcode LSTORE = - new JOpcode("LSTORE", cLSTORE, 2, NO_DATA, LONG_TYPE, JMP_NEXT); - public final static JOpcode FSTORE = - new JOpcode("FSTORE", cFSTORE, 2, NO_DATA, FLOAT_TYPE, JMP_NEXT); - public final static JOpcode DSTORE = - new JOpcode("DSTORE", cDSTORE, 2, NO_DATA, DOUBLE_TYPE, JMP_NEXT); - public final static JOpcode ASTORE = - new JOpcode("ASTORE", cASTORE, 2, NO_DATA, REFERENCE_TYPE, JMP_NEXT); - public final static JOpcode ISTORE_0 = - new JOpcode("ISTORE_0", cISTORE_0, 1, NO_DATA, INT_TYPE, JMP_NEXT); - public final static JOpcode ISTORE_1 = - new JOpcode("ISTORE_1", cISTORE_1, 1, NO_DATA, INT_TYPE, JMP_NEXT); - public final static JOpcode ISTORE_2 = - new JOpcode("ISTORE_2", cISTORE_2, 1, NO_DATA, INT_TYPE, JMP_NEXT); - public final static JOpcode ISTORE_3 = - new JOpcode("ISTORE_3", cISTORE_3, 1, NO_DATA, INT_TYPE, JMP_NEXT); - public final static JOpcode LSTORE_0 = - new JOpcode("LSTORE_0", cLSTORE_0, 1, NO_DATA, LONG_TYPE, JMP_NEXT); - public final static JOpcode LSTORE_1 = - new JOpcode("LSTORE_1", cLSTORE_1, 1, NO_DATA, LONG_TYPE, JMP_NEXT); - public final static JOpcode LSTORE_2 = - new JOpcode("LSTORE_2", cLSTORE_2, 1, NO_DATA, LONG_TYPE, JMP_NEXT); - public final static JOpcode LSTORE_3 = - new JOpcode("LSTORE_3", cLSTORE_3, 1, NO_DATA, LONG_TYPE, JMP_NEXT); - public final static JOpcode FSTORE_0 = - new JOpcode("FSTORE_0", cFSTORE_0, 1, NO_DATA, FLOAT_TYPE, JMP_NEXT); - public final static JOpcode FSTORE_1 = - new JOpcode("FSTORE_1", cFSTORE_1, 1, NO_DATA, FLOAT_TYPE, JMP_NEXT); - public final static JOpcode FSTORE_2 = - new JOpcode("FSTORE_2", cFSTORE_2, 1, NO_DATA, FLOAT_TYPE, JMP_NEXT); - public final static JOpcode FSTORE_3 = - new JOpcode("FSTORE_3", cFSTORE_3, 1, NO_DATA, FLOAT_TYPE, JMP_NEXT); - public final static JOpcode DSTORE_0 = - new JOpcode("DSTORE_0", cDSTORE_0, 1, NO_DATA, DOUBLE_TYPE, JMP_NEXT); - public final static JOpcode DSTORE_1 = - new JOpcode("DSTORE_1", cDSTORE_1, 1, NO_DATA, DOUBLE_TYPE, JMP_NEXT); - public final static JOpcode DSTORE_2 = - new JOpcode("DSTORE_2", cDSTORE_2, 1, NO_DATA, DOUBLE_TYPE, JMP_NEXT); - public final static JOpcode DSTORE_3 = - new JOpcode("DSTORE_3", cDSTORE_3, 1, NO_DATA, DOUBLE_TYPE, JMP_NEXT); - public final static JOpcode ASTORE_0 = new JOpcode("ASTORE_0", - cASTORE_0, - 1, - NO_DATA, - REFERENCE_TYPE, - JMP_NEXT); - public final static JOpcode ASTORE_1 = new JOpcode("ASTORE_1", - cASTORE_1, - 1, - NO_DATA, - REFERENCE_TYPE, - JMP_NEXT); - public final static JOpcode ASTORE_2 = new JOpcode("ASTORE_2", - cASTORE_2, - 1, - NO_DATA, - REFERENCE_TYPE, - JMP_NEXT); - public final static JOpcode ASTORE_3 = new JOpcode("ASTORE_3", - cASTORE_3, - 1, - NO_DATA, - REFERENCE_TYPE, - JMP_NEXT); - public final static JOpcode IASTORE = - new JOpcode("IASTORE", - cIASTORE, - 1, - NO_DATA, - new JType[] { JType.INT, - JType.INT, - JArrayType.INT}, - JMP_NEXT); - public final static JOpcode LASTORE = - new JOpcode("LASTORE", - cLASTORE, - 1, - NO_DATA, - new JType[] { JType.LONG, - JType.INT, - JArrayType.LONG}, - JMP_NEXT); - public final static JOpcode FASTORE = - new JOpcode("FASTORE", - cFASTORE, - 1, - NO_DATA, - new JType[] { JType.FLOAT, - JType.INT, - JArrayType.FLOAT}, - JMP_NEXT); - public final static JOpcode DASTORE = - new JOpcode("DASTORE", - cDASTORE, - 1, - NO_DATA, - new JType[] { JType.DOUBLE, - JType.INT, - JArrayType.DOUBLE}, - JMP_NEXT); - public final static JOpcode AASTORE = - new JOpcode("AASTORE", - cAASTORE, - 1, - NO_DATA, - new JType[] { JType.REFERENCE, - JType.INT, - JArrayType.REFERENCE}, - JMP_NEXT); - public final static JOpcode BASTORE = - new JOpcode("BASTORE", - cBASTORE, - 1, - NO_DATA, - new JType[] { JType.INT, - JType.INT, - new JArrayType(JType.UNKNOWN)}, - JMP_NEXT); - public final static JOpcode CASTORE = - new JOpcode("CASTORE", - cCASTORE, - 1, - NO_DATA, - new JType[] { JType.INT, - JType.INT, - JArrayType.CHAR}, - JMP_NEXT); - public final static JOpcode SASTORE = - new JOpcode("SASTORE", - cSASTORE, - 1, - NO_DATA, - new JType[] { JType.INT, - JType.INT, - JArrayType.SHORT}, - JMP_NEXT); - public final static JOpcode POP = - new JOpcode("POP", cPOP, 1, NO_DATA, UNKNOWN_TYPE, JMP_NEXT); - public final static JOpcode POP2 = - new JOpcode("POP2", cPOP2, 1, NO_DATA, UNKNOWN_TYPE, JMP_NEXT); - public final static JOpcode DUP = - new JOpcode("DUP", cDUP, 1, UNKNOWN_TYPE, UNKNOWN_TYPE, JMP_NEXT); - public final static JOpcode DUP_X1 = new JOpcode("DUP_X1", - cDUP_X1, - 1, - UNKNOWN_TYPE, - UNKNOWN_TYPE, - JMP_NEXT); - public final static JOpcode DUP_X2 = new JOpcode("DUP_X2", - cDUP_X2, - 1, - UNKNOWN_TYPE, - UNKNOWN_TYPE, - JMP_NEXT); - public final static JOpcode DUP2 = - new JOpcode("DUP2", cDUP2, 1, UNKNOWN_TYPE, UNKNOWN_TYPE, JMP_NEXT); - public final static JOpcode DUP2_X1 = new JOpcode("DUP2_X1", - cDUP2_X1, - 1, - UNKNOWN_TYPE, - UNKNOWN_TYPE, - JMP_NEXT); - public final static JOpcode DUP2_X2 = new JOpcode("DUP2_X2", - cDUP2_X2, - 1, - UNKNOWN_TYPE, - UNKNOWN_TYPE, - JMP_NEXT); - public final static JOpcode SWAP = - new JOpcode("SWAP", cSWAP, 1, UNKNOWN_TYPE, UNKNOWN_TYPE, JMP_NEXT); - public final static JOpcode IADD = - new JOpcode("IADD", - cIADD, - 1, - INT_TYPE, - new JType[] { JType.INT, JType.INT }, - JMP_NEXT); - public final static JOpcode LADD = - new JOpcode("LADD", - cLADD, - 1, - LONG_TYPE, - new JType[] { JType.LONG, JType.LONG }, - JMP_NEXT); - public final static JOpcode FADD = - new JOpcode("FADD", - cFADD, - 1, - FLOAT_TYPE, - new JType[] { JType.FLOAT, JType.FLOAT }, - JMP_NEXT); - public final static JOpcode DADD = - new JOpcode("DADD", - cDADD, - 1, - DOUBLE_TYPE, - new JType[] { JType.DOUBLE, JType.DOUBLE }, - JMP_NEXT); - public final static JOpcode ISUB = - new JOpcode("ISUB", - cISUB, - 1, - INT_TYPE, - new JType[] {JType.INT, JType.INT }, - JMP_NEXT); - public final static JOpcode LSUB = - new JOpcode("LSUB", - cLSUB, - 1, - LONG_TYPE, - new JType[] { JType.LONG, JType.LONG }, - JMP_NEXT); - public final static JOpcode FSUB = - new JOpcode("FSUB", - cFSUB, - 1, - FLOAT_TYPE, - new JType[] { JType.FLOAT, JType.FLOAT }, - JMP_NEXT); - public final static JOpcode DSUB = - new JOpcode("DSUB", - cDSUB, - 1, - DOUBLE_TYPE, - new JType[] { JType.DOUBLE, JType.DOUBLE }, - JMP_NEXT); - public final static JOpcode IMUL = - new JOpcode("IMUL", - cIMUL, - 1, - INT_TYPE, - new JType[] {JType.INT, JType.INT }, - JMP_NEXT); - public final static JOpcode LMUL = - new JOpcode("LMUL", - cLMUL, - 1, - LONG_TYPE, - new JType[] { JType.LONG, JType.LONG }, - JMP_NEXT); - public final static JOpcode FMUL = - new JOpcode("FMUL", - cFMUL, - 1, - FLOAT_TYPE, - new JType[] { JType.FLOAT, JType.FLOAT }, - JMP_NEXT); - public final static JOpcode DMUL = - new JOpcode("DMUL", - cDMUL, - 1, - DOUBLE_TYPE, - new JType[] { JType.DOUBLE, JType.DOUBLE }, - JMP_NEXT); - public final static JOpcode IDIV = - new JOpcode("IDIV", - cIDIV, - 1, - INT_TYPE, - new JType[] {JType.INT, JType.INT }, - JMP_NEXT); - public final static JOpcode LDIV = - new JOpcode("LDIV", - cLDIV, - 1, - LONG_TYPE, - new JType[] { JType.LONG, JType.LONG }, - JMP_NEXT); - public final static JOpcode FDIV = - new JOpcode("FDIV", - cFDIV, - 1, - FLOAT_TYPE, - new JType[] { JType.FLOAT, JType.FLOAT }, - JMP_NEXT); - public final static JOpcode DDIV = - new JOpcode("DDIV", - cDDIV, - 1, - DOUBLE_TYPE, - new JType[] { JType.DOUBLE, JType.DOUBLE }, - JMP_NEXT); - public final static JOpcode IREM = - new JOpcode("IREM", - cIREM, - 1, - INT_TYPE, - new JType[] {JType.INT, JType.INT }, - JMP_NEXT); - public final static JOpcode LREM = - new JOpcode("LREM", - cLREM, - 1, - LONG_TYPE, - new JType[] { JType.LONG, JType.LONG }, - JMP_NEXT); - public final static JOpcode FREM = - new JOpcode("FREM", - cFREM, - 1, - FLOAT_TYPE, - new JType[] { JType.FLOAT, JType.FLOAT }, - JMP_NEXT); - public final static JOpcode DREM = - new JOpcode("DREM", - cDREM, - 1, - DOUBLE_TYPE, - new JType[] { JType.DOUBLE, JType.DOUBLE }, - JMP_NEXT); - public final static JOpcode INEG = - new JOpcode("INEG", cINEG, 1, INT_TYPE, INT_TYPE, JMP_NEXT); - public final static JOpcode LNEG = - new JOpcode("LNEG", cLNEG, 1, LONG_TYPE, LONG_TYPE, JMP_NEXT); - public final static JOpcode FNEG = - new JOpcode("FNEG", cFNEG, 1, FLOAT_TYPE, FLOAT_TYPE, JMP_NEXT); - public final static JOpcode DNEG = - new JOpcode("DNEG", cDNEG, 1, DOUBLE_TYPE, DOUBLE_TYPE, JMP_NEXT); - public final static JOpcode ISHL = - new JOpcode("ISHL", cISHL, - 1, - INT_TYPE, - new JType[] { JType.INT, JType.INT }, - JMP_NEXT); - public final static JOpcode LSHL = - new JOpcode("LSHL", - cLSHL, - 1, - LONG_TYPE, - new JType [] { JType.INT, JType.LONG }, - JMP_NEXT); - public final static JOpcode ISHR = - new JOpcode("ISHR", - cISHR, - 1, - INT_TYPE, - new JType[] { JType.INT, JType.INT }, - JMP_NEXT); - public final static JOpcode LSHR = - new JOpcode("LSHR", - cLSHR, - 1, - LONG_TYPE, - new JType[] { JType.INT, JType.LONG }, - JMP_NEXT); - public final static JOpcode IUSHR = - new JOpcode("IUSHR", - cIUSHR, - 1, - INT_TYPE, - new JType[] { JType.INT, JType.INT }, - JMP_NEXT); - public final static JOpcode LUSHR = - new JOpcode("LUSHR", - cLUSHR, - 1, - LONG_TYPE, - new JType[] { JType.INT, JType.LONG }, - JMP_NEXT); - public final static JOpcode IAND = - new JOpcode("IAND", - cIAND, - 1, - INT_TYPE, - new JType[] { JType.INT, JType.INT }, - JMP_NEXT); - public final static JOpcode LAND = - new JOpcode("LAND", - cLAND, - 1, - LONG_TYPE, - new JType[] { JType.LONG, JType.LONG }, - JMP_NEXT); - public final static JOpcode IOR = - new JOpcode("IOR", - cIOR, - 1, - INT_TYPE, - new JType[] { JType.INT, JType.INT }, - JMP_NEXT); - public final static JOpcode LOR = - new JOpcode("LOR", - cLOR, - 1, - LONG_TYPE, - new JType[] { JType.LONG, JType.LONG }, - JMP_NEXT); - public final static JOpcode IXOR = - new JOpcode("IXOR", - cIXOR, - 1, - INT_TYPE, - new JType[] { JType.INT, JType.INT }, - JMP_NEXT); - public final static JOpcode LXOR = - new JOpcode("LXOR", - cLXOR, - 1, - LONG_TYPE, - new JType[] { JType.LONG, JType.LONG }, - JMP_NEXT); - public final static JOpcode IINC = - new JOpcode("IINC", cIINC, 3, NO_DATA, NO_DATA, JMP_NEXT); - public final static JOpcode I2L = - new JOpcode("I2L", cI2L, 1, LONG_TYPE, INT_TYPE, JMP_NEXT); - public final static JOpcode I2F = - new JOpcode("I2F", cI2F, 1, FLOAT_TYPE, INT_TYPE, JMP_NEXT); - public final static JOpcode I2D = - new JOpcode("I2D", cI2D, 1, DOUBLE_TYPE, INT_TYPE, JMP_NEXT); - public final static JOpcode L2I = - new JOpcode("L2I", cL2I, 1, INT_TYPE, LONG_TYPE, JMP_NEXT); - public final static JOpcode L2F = - new JOpcode("L2F", cL2F, 1, FLOAT_TYPE, LONG_TYPE, JMP_NEXT); - public final static JOpcode L2D = - new JOpcode("L2D", cL2D, 1, DOUBLE_TYPE, LONG_TYPE, JMP_NEXT); - public final static JOpcode F2I = - new JOpcode("F2I", cF2I, 1, INT_TYPE, FLOAT_TYPE, JMP_NEXT); - public final static JOpcode F2L = - new JOpcode("F2L", cF2L, 1, LONG_TYPE, FLOAT_TYPE, JMP_NEXT); - public final static JOpcode F2D = - new JOpcode("F2D", cF2D, 1, DOUBLE_TYPE, FLOAT_TYPE, JMP_NEXT); - public final static JOpcode D2I = - new JOpcode("D2I", cD2I, 1, INT_TYPE, DOUBLE_TYPE, JMP_NEXT); - public final static JOpcode D2L = - new JOpcode("D2L", cD2L, 1, LONG_TYPE, DOUBLE_TYPE, JMP_NEXT); - public final static JOpcode D2F = - new JOpcode("D2F", cD2F, 1, FLOAT_TYPE, DOUBLE_TYPE, JMP_NEXT); - public final static JOpcode I2B = - new JOpcode("I2B", cI2B, 1, INT_TYPE, INT_TYPE, JMP_NEXT); - public final static JOpcode I2C = - new JOpcode("I2C", cI2C, 1, INT_TYPE, INT_TYPE, JMP_NEXT); - public final static JOpcode I2S = - new JOpcode("I2S", cI2S, 1, INT_TYPE, INT_TYPE, JMP_NEXT); - public final static JOpcode LCMP = - new JOpcode("LCMP", - cLCMP, - 1, - INT_TYPE, - new JType[] { JType.LONG, JType.LONG }, - JMP_NEXT); - public final static JOpcode FCMPL = - new JOpcode("FCMPL", - cFCMPL, - 1, - INT_TYPE, - new JType[] { JType.FLOAT, JType.FLOAT }, - JMP_NEXT); - public final static JOpcode FCMPG = - new JOpcode("FCMPG", - cFCMPG, - 1, - INT_TYPE, - new JType[] { JType.FLOAT, JType.FLOAT }, - JMP_NEXT); - public final static JOpcode DCMPL = - new JOpcode("DCMPL", - cDCMPL, - 1, - INT_TYPE, - new JType[] { JType.LONG, JType.LONG }, - JMP_NEXT); - public final static JOpcode DCMPG = - new JOpcode("DCMPG", - cDCMPG, - 1, - INT_TYPE, - new JType[] { JType.DOUBLE, JType.DOUBLE }, - JMP_NEXT); - public final static JOpcode IFEQ = - new JOpcode("IFEQ", cIFEQ, 3, NO_DATA, INT_TYPE, JMP_MAYBE_S2_OFFSET); - public final static JOpcode IFNE = - new JOpcode("IFNE", cIFNE, 3, NO_DATA, INT_TYPE, JMP_MAYBE_S2_OFFSET); - public final static JOpcode IFLT = - new JOpcode("IFLT", cIFLT, 3, NO_DATA, INT_TYPE, JMP_MAYBE_S2_OFFSET); - public final static JOpcode IFGE = - new JOpcode("IFGE", cIFGE, 3, NO_DATA, INT_TYPE, JMP_MAYBE_S2_OFFSET); - public final static JOpcode IFGT = - new JOpcode("IFGT", cIFGT, 3, NO_DATA, INT_TYPE, JMP_MAYBE_S2_OFFSET); - public final static JOpcode IFLE = - new JOpcode("IFLE", cIFLE, 3, NO_DATA, INT_TYPE, JMP_MAYBE_S2_OFFSET); - public final static JOpcode IF_ICMPEQ = - new JOpcode("IF_ICMPEQ", - cIF_ICMPEQ, - 3, - NO_DATA, - new JType[] { JType.INT, JType.INT }, - JMP_MAYBE_S2_OFFSET); - public final static JOpcode IF_ICMPNE = - new JOpcode("IF_ICMPNE", - cIF_ICMPNE, - 3, - NO_DATA, - new JType[] { JType.INT, JType.INT }, - JMP_MAYBE_S2_OFFSET); - public final static JOpcode IF_ICMPLT = - new JOpcode("IF_ICMPLT", - cIF_ICMPLT, - 3, - NO_DATA, - new JType[] { JType.INT, JType.INT }, - JMP_MAYBE_S2_OFFSET); - public final static JOpcode IF_ICMPGE = - new JOpcode("IF_ICMPGE", - cIF_ICMPGE, - 3, - NO_DATA, - new JType[] { JType.INT, JType.INT }, - JMP_MAYBE_S2_OFFSET); - public final static JOpcode IF_ICMPGT = - new JOpcode("IF_ICMPGT", - cIF_ICMPGT, - 3, - NO_DATA, - new JType[] { JType.INT, JType.INT }, - JMP_MAYBE_S2_OFFSET); - public final static JOpcode IF_ICMPLE = - new JOpcode("IF_ICMPLE", - cIF_ICMPLE, - 3, - NO_DATA, - new JType[] { JType.INT, JType.INT }, - JMP_MAYBE_S2_OFFSET); - public final static JOpcode IF_ACMPEQ = - new JOpcode("IF_ACMPEQ", - cIF_ACMPEQ, - 3, - NO_DATA, - new JType[] { JType.REFERENCE, JType.REFERENCE }, - JMP_MAYBE_S2_OFFSET); - public final static JOpcode IF_ACMPNE = - new JOpcode("IF_ACMPNE", - cIF_ACMPNE, - 3, - NO_DATA, - new JType[] { JType.REFERENCE, JType.REFERENCE }, - JMP_MAYBE_S2_OFFSET); - public final static JOpcode GOTO = - new JOpcode("GOTO", cGOTO, 3, NO_DATA, NO_DATA, JMP_ALWAYS_S2_OFFSET); - public final static JOpcode JSR = - new JOpcode("JSR", cJSR, 3, ADDRESS_TYPE, NO_DATA, JMP_ALWAYS_S2_OFFSET); - public final static JOpcode RET = - new JOpcode("RET", cRET, 2, NO_DATA, NO_DATA, JMP_NONE); - public final static JOpcode TABLESWITCH = new JOpcode("TABLESWITCH", - cTABLESWITCH, - UNKNOWN, - NO_DATA, - INT_TYPE, - JMP_TABLE); - public final static JOpcode LOOKUPSWITCH = new JOpcode("LOOKUPSWITCH", - cLOOKUPSWITCH, - UNKNOWN, - NO_DATA, - INT_TYPE, - JMP_LOOKUP); - public final static JOpcode IRETURN = - new JOpcode("IRETURN", cIRETURN, 1, NO_DATA, INT_TYPE, JMP_NONE); - public final static JOpcode LRETURN = - new JOpcode("LRETURN", cLRETURN, 1, NO_DATA, LONG_TYPE, JMP_NONE); - public final static JOpcode FRETURN = - new JOpcode("FRETURN", cFRETURN, 1, NO_DATA, FLOAT_TYPE, JMP_NONE); - public final static JOpcode DRETURN = - new JOpcode("DRETURN", cDRETURN, 1, NO_DATA, DOUBLE_TYPE, JMP_NONE); - public final static JOpcode ARETURN = new JOpcode("ARETURN", - cARETURN, - 1, - NO_DATA, - OBJECT_REF_TYPE, - JMP_NONE); - public final static JOpcode RETURN = - new JOpcode("RETURN", cRETURN, 1, NO_DATA, NO_DATA, JMP_NONE); - public final static JOpcode GETSTATIC = new JOpcode("GETSTATIC", - cGETSTATIC, - 3, - UNKNOWN_TYPE, - NO_DATA, - JMP_NEXT); - public final static JOpcode PUTSTATIC = new JOpcode("PUTSTATIC", - cPUTSTATIC, - 3, - NO_DATA, - UNKNOWN_TYPE, - JMP_NEXT); - public final static JOpcode GETFIELD = new JOpcode("GETFIELD", - cGETFIELD, - 3, - UNKNOWN_TYPE, - OBJECT_REF_TYPE, - JMP_NEXT); - public final static JOpcode PUTFIELD = - new JOpcode("PUTFIELD", cPUTFIELD, 3, NO_DATA, UNKNOWN_TYPE, JMP_NEXT); - public final static JOpcode INVOKEVIRTUAL = new JOpcode("INVOKEVIRTUAL", - cINVOKEVIRTUAL, - 3, - NO_DATA, - UNKNOWN_TYPE, - JMP_NEXT); - public final static JOpcode INVOKESPECIAL = new JOpcode("INVOKESPECIAL", - cINVOKESPECIAL, - 3, - NO_DATA, - UNKNOWN_TYPE, - JMP_NEXT); - public final static JOpcode INVOKESTATIC = new JOpcode("INVOKESTATIC", - cINVOKESTATIC, - 3, - NO_DATA, - UNKNOWN_TYPE, - JMP_NEXT); - public final static JOpcode INVOKEINTERFACE = - new JOpcode("INVOKEINTERFACE", - cINVOKEINTERFACE, - 5, - NO_DATA, - UNKNOWN_TYPE, - JMP_NEXT); - public final static JOpcode NEW = - new JOpcode("NEW", cNEW, 3, OBJECT_REF_TYPE, NO_DATA, JMP_NEXT); - public final static JOpcode NEWARRAY = - new JOpcode("NEWARRAY", - cNEWARRAY, - 2, - ARRAY_REF_TYPE, - INT_TYPE, - JMP_NEXT); - public final static JOpcode ANEWARRAY = - new JOpcode("ANEWARRAY", - cANEWARRAY, - 3, - ARRAY_REF_TYPE, - INT_TYPE, - JMP_NEXT); - public final static JOpcode ARRAYLENGTH = new JOpcode("ARRAYLENGTH", - cARRAYLENGTH, - 1, - INT_TYPE, - ARRAY_REF_TYPE, - JMP_NEXT); - public final static JOpcode ATHROW = new JOpcode("ATHROW", - cATHROW, - 1, - OBJECT_REF_TYPE, - OBJECT_REF_TYPE, - JMP_NONE); - public final static JOpcode CHECKCAST = new JOpcode("CHECKCAST", - cCHECKCAST, - 3, - OBJECT_REF_TYPE, - OBJECT_REF_TYPE, - JMP_NEXT); - public final static JOpcode INSTANCEOF = new JOpcode("INSTANCEOF", - cINSTANCEOF, - 3, - INT_TYPE, - OBJECT_REF_TYPE, - JMP_NEXT); - public final static JOpcode MONITORENTER = new JOpcode("MONITORENTER", - cMONITORENTER, - 1, - NO_DATA, - OBJECT_REF_TYPE, - JMP_NEXT); - public final static JOpcode MONITOREXIT = new JOpcode("MONITOREXIT", - cMONITOREXIT, - 1, - NO_DATA, - OBJECT_REF_TYPE, - JMP_NEXT); - public final static JOpcode WIDE = new JOpcode("WIDE", - cWIDE, - UNKNOWN, - UNKNOWN_TYPE, - UNKNOWN_TYPE, - JMP_NEXT); - public final static JOpcode MULTIANEWARRAY = new JOpcode("MULTIANEWARRAY", - cMULTIANEWARRAY, - 4, - ARRAY_REF_TYPE, - UNKNOWN_TYPE, - JMP_NEXT); - public final static JOpcode IFNULL = new JOpcode("IFNULL", - cIFNULL, - 3, - NO_DATA, - REFERENCE_TYPE, - JMP_MAYBE_S2_OFFSET); - public final static JOpcode IFNONNULL = new JOpcode("IFNONNULL", - cIFNONNULL, - 3, - NO_DATA, - REFERENCE_TYPE, - JMP_MAYBE_S2_OFFSET); - public final static JOpcode GOTO_W = new JOpcode("GOTO_W", - cGOTO_W, - 5, - NO_DATA, - NO_DATA, - JMP_ALWAYS_S4_OFFSET); - public final static JOpcode JSR_W = - new JOpcode("JSR_W", cJSR_W, 5, ADDRESS_TYPE, NO_DATA, JMP_NEXT); - - public final static JOpcode[] OPCODES = { - NOP, ACONST_NULL, ICONST_M1, ICONST_0, ICONST_1, - ICONST_2, ICONST_3, ICONST_4, ICONST_5, LCONST_0, - LCONST_1, FCONST_0, FCONST_1, FCONST_2, DCONST_0, - DCONST_1, BIPUSH, SIPUSH, LDC, LDC_W, - LDC2_W, ILOAD, LLOAD, FLOAD, DLOAD, - ALOAD, ILOAD_0, ILOAD_1, ILOAD_2, ILOAD_3, - LLOAD_0, LLOAD_1, LLOAD_2, LLOAD_3, FLOAD_0, - FLOAD_1, FLOAD_2, FLOAD_3, DLOAD_0, DLOAD_1, - DLOAD_2, DLOAD_3, ALOAD_0, ALOAD_1, ALOAD_2, - ALOAD_3, IALOAD, LALOAD, FALOAD, DALOAD, - AALOAD, BALOAD, CALOAD, SALOAD, ISTORE, - LSTORE, FSTORE, DSTORE, ASTORE, ISTORE_0, - ISTORE_1, ISTORE_2, ISTORE_3, LSTORE_0, LSTORE_1, - LSTORE_2, LSTORE_3, FSTORE_0, FSTORE_1, FSTORE_2, - FSTORE_3, DSTORE_0, DSTORE_1, DSTORE_2, DSTORE_3, - ASTORE_0, ASTORE_1, ASTORE_2, ASTORE_3, IASTORE, - LASTORE, FASTORE, DASTORE, AASTORE, BASTORE, - CASTORE, SASTORE, POP, POP2, DUP, - DUP_X1, DUP_X2, DUP2, DUP2_X1, DUP2_X2, - SWAP, IADD, LADD, FADD, DADD, - ISUB, LSUB, FSUB, DSUB, IMUL, - LMUL, FMUL, DMUL, IDIV, LDIV, - FDIV, DDIV, IREM, LREM, FREM, - DREM, INEG, LNEG, FNEG, DNEG, - ISHL, LSHL, ISHR, LSHR, IUSHR, - LUSHR, IAND, LAND, IOR, LOR, - IXOR, LXOR, IINC, I2L, I2F, - I2D, L2I, L2F, L2D, F2I, - F2L, F2D, D2I, D2L, D2F, - I2B, I2C, I2S, LCMP, FCMPL, - FCMPG, DCMPL, DCMPG, IFEQ, IFNE, - IFLT, IFGE, IFGT, IFLE, IF_ICMPEQ, - IF_ICMPNE, IF_ICMPLT, IF_ICMPGE, IF_ICMPGT, IF_ICMPLE, - IF_ACMPEQ, IF_ACMPNE, GOTO, JSR, RET, - TABLESWITCH, LOOKUPSWITCH, IRETURN, LRETURN, FRETURN, - DRETURN, ARETURN, RETURN, GETSTATIC, PUTSTATIC, - GETFIELD, PUTFIELD, INVOKEVIRTUAL, INVOKESPECIAL, INVOKESTATIC, - INVOKEINTERFACE, null, NEW, NEWARRAY, ANEWARRAY, - ARRAYLENGTH, ATHROW, CHECKCAST, INSTANCEOF, MONITORENTER, - MONITOREXIT, WIDE, MULTIANEWARRAY, IFNULL, IFNONNULL, - GOTO_W, JSR_W - }; - - protected JOpcode(String name, - int code, - int size, - JType[] producedDataTypes, - JType[] consumedDataTypes, - int jumpKind) { - this.name = name; - this.code = code; - this.size = size; - this.producedDataTypes = producedDataTypes; - this.consumedDataTypes = consumedDataTypes; - this.jumpKind = jumpKind; - switch (jumpKind) { - case JMP_NONE: successorCount = 0; break; - case JMP_NEXT: successorCount = 1; break; - case JMP_ALWAYS_S2_OFFSET: successorCount = 1; break; - case JMP_ALWAYS_S4_OFFSET: successorCount = 1; break; - case JMP_MAYBE_S2_OFFSET: successorCount = 2; break; - case JMP_TABLE: successorCount = UNKNOWN; break; - case JMP_LOOKUP: successorCount = UNKNOWN; break; - default: successorCount = UNKNOWN; break; - } - } - - public String toString() { return name; } - protected int getSize() { return size; } - protected JType[] getProducedDataTypes() { return producedDataTypes; } - protected JType[] getConsumedDataTypes() { return consumedDataTypes; } - - protected int getProducedDataSize() { - if (producedDataTypes != UNKNOWN_TYPE) - return JType.getTotalSize(producedDataTypes); - else { - switch (code) { - case cLDC: case cLDC_W: case cBALOAD: - return 1; - case cLDC2_W: case cDUP: case cSWAP: - return 2; - case cDUP_X1: - return 3; - case cDUP_X2: case cDUP2: - return 4; - case cDUP2_X1: - return 5; - case cDUP2_X2: - return 6; - default: - throw new Error(this.toString()); - } - } - } - - protected int getConsumedDataSize() { - if (consumedDataTypes != UNKNOWN_TYPE) - return JType.getTotalSize(consumedDataTypes); - else { - switch (code) { - case cPOP: case cDUP: - return 1; - case cPOP2: case cDUP_X1: case cDUP2: case cSWAP: - return 2; - case cDUP_X2: case cDUP2_X1: - return 3; - case cDUP2_X2: - return 4; - default: - throw new Error(this.toString()); - } - } - } - - protected int getProducedDataTypesNumber() { - if (producedDataTypes != UNKNOWN_TYPE) - return producedDataTypes.length; - else { - switch (code) { - case cLDC: case cLDC_W: case cLDC2_W: case cBALOAD: - case cGETSTATIC: case cGETFIELD: - return 1; - case cDUP: case cSWAP: - return 2; - case cDUP_X2: case cDUP2: case cDUP2_X1: case cDUP2_X2: - return 2; - case cDUP_X1: - return 3; - default: - throw new Error(this.toString()); - } - } - } - - protected int getConsumedDataTypesNumber() { - if (consumedDataTypes != UNKNOWN_TYPE) - return consumedDataTypes.length; - else { - switch (code) { - case cPOP: case cDUP: case cPUTSTATIC: - return 1; - case cPUTFIELD: case cDUP_X1: case cDUP_X2: - case cDUP2: case cDUP2_X1: case cPOP2: case cSWAP: - return 2; - default: - throw new Error(this.toString()); - } - } - } -} diff --git a/src/fjbg/ch/epfl/lamp/fjbg/JOtherAttribute.java b/src/fjbg/ch/epfl/lamp/fjbg/JOtherAttribute.java deleted file mode 100644 index 50aa9d3636..0000000000 --- a/src/fjbg/ch/epfl/lamp/fjbg/JOtherAttribute.java +++ /dev/null @@ -1,77 +0,0 @@ -/* FJBG -- Fast Java Bytecode Generator - * Copyright 2002-2013 LAMP/EPFL - * @author Michel Schinz - */ - -package ch.epfl.lamp.fjbg; - -import java.io.DataInputStream; -import java.io.DataOutputStream; -import java.io.IOException; - -/** - * Attributes which are unknown to the JVM (or at least to this library). - * - * @author Michel Schinz - * @version 1.0 - */ - -public class JOtherAttribute extends JAttribute { - protected final String name; - protected final byte[] contents; - protected final int length; - - public JOtherAttribute(FJBGContext context, - JClass clazz, - Object owner, - String name, - byte[] contents, - int length) { - super(context, clazz, name); - this.name = name; - this.contents = contents; - this.length = length; - } - - public JOtherAttribute(FJBGContext context, - JClass clazz, - Object owner, - String name, - int size, - DataInputStream stream) - throws IOException { - super(context, clazz, name); - this.name = name; - this.contents = new byte[size]; - this.length = size; - - stream.read(contents, 0, length); - } - - public String getName() { return name; } - - // Follows javap output format for user-defined attributes. - /*@Override*/ public String toString() { - StringBuffer buf = new StringBuffer(" "); - buf.append(name); - buf.append(": length = 0x"); - buf.append(Integer.toHexString(length).toUpperCase()); - for (int i = 0; i < length; ++i) { - if (i % 16 == 0) buf.append("\n "); - buf.append(hexString(contents[i])); - buf.append(" "); - } - buf.append("\n"); - return buf.toString(); - } - - protected int getSize() { return length; } - - protected void writeContentsTo(DataOutputStream stream) throws IOException { - stream.write(contents, 0, length); - } - - private static final String hexString(int i) { - return ((0 <= i && i < 16) ? "0" : "")+Integer.toHexString(i).toUpperCase(); - } -} diff --git a/src/fjbg/ch/epfl/lamp/fjbg/JReferenceType.java b/src/fjbg/ch/epfl/lamp/fjbg/JReferenceType.java deleted file mode 100644 index 73d1026c04..0000000000 --- a/src/fjbg/ch/epfl/lamp/fjbg/JReferenceType.java +++ /dev/null @@ -1,19 +0,0 @@ -/* FJBG -- Fast Java Bytecode Generator - * Copyright 2002-2013 LAMP/EPFL - * @author Michel Schinz - */ - -package ch.epfl.lamp.fjbg; - -/** - * Types for Java references, i.e. arrays and objects. - * - * @author Michel Schinz - * @version 1.0 - */ - -abstract public class JReferenceType extends JType { - public boolean isReferenceType() { return true; } - - abstract public String getDescriptor(); -} diff --git a/src/fjbg/ch/epfl/lamp/fjbg/JSourceFileAttribute.java b/src/fjbg/ch/epfl/lamp/fjbg/JSourceFileAttribute.java deleted file mode 100644 index 3a17cb2c44..0000000000 --- a/src/fjbg/ch/epfl/lamp/fjbg/JSourceFileAttribute.java +++ /dev/null @@ -1,69 +0,0 @@ -/* FJBG -- Fast Java Bytecode Generator - * Copyright 2002-2013 LAMP/EPFL - * @author Michel Schinz - */ - -package ch.epfl.lamp.fjbg; - -import java.io.DataInputStream; -import java.io.DataOutputStream; -import java.io.IOException; - -/** - * Sourcefile attribute, which can be attached to class files to - * associate them with their source file. - * - * There can be no more than one SourceFile attribute in the attributes table - * of a given ClassFile structure. See section 4.8.9 of the JVM specification. - * - * @author Michel Schinz - * @version 1.0 - */ - -public class JSourceFileAttribute extends JAttribute { - protected final String sourceFileName; - protected final int sourceFileIndex; - - public JSourceFileAttribute(FJBGContext context, - JClass clazz, - String sourceFileName) { - super(context, clazz); - this.sourceFileName = sourceFileName; - this.sourceFileIndex = clazz.getConstantPool().addUtf8(sourceFileName); - } - - public JSourceFileAttribute(FJBGContext context, - JClass clazz, - Object owner, - String name, - int size, - DataInputStream stream) - throws IOException { - super(context, clazz, name); - - this.sourceFileIndex = stream.readShort(); - this.sourceFileName = clazz.getConstantPool().lookupUtf8(sourceFileIndex); - - assert name.equals(getName()); - } - - public String getName() { return "SourceFile"; } - - public String getFileName() { return sourceFileName; } - - // Follows javap output format for SourceFile attribute. - /*@Override*/ public String toString() { - StringBuffer buf = new StringBuffer(" SourceFile: \""); - buf.append(sourceFileName); - buf.append("\"\n"); - return buf.toString(); - } - - protected int getSize() { - return 2; // Short.SIZE - } - - protected void writeContentsTo(DataOutputStream stream) throws IOException { - stream.writeShort(sourceFileIndex); - } -} diff --git a/src/fjbg/ch/epfl/lamp/fjbg/JStackMapTableAttribute.java b/src/fjbg/ch/epfl/lamp/fjbg/JStackMapTableAttribute.java deleted file mode 100644 index 72a5484d40..0000000000 --- a/src/fjbg/ch/epfl/lamp/fjbg/JStackMapTableAttribute.java +++ /dev/null @@ -1,282 +0,0 @@ -/* FJBG -- Fast Java Bytecode Generator - * Copyright 2002-2013 LAMP/EPFL - * @author Michel Schinz - */ - -package ch.epfl.lamp.fjbg; - -import java.io.DataInputStream; -import java.io.DataOutputStream; -import java.io.IOException; -import java.util.ArrayList; -import java.util.Iterator; -import java.util.List; - -/** - * - * @author Stephane Micheloud - * @version 1.0 - */ - -public class JStackMapTableAttribute extends JAttribute { - /** Constant pool of the current classfile. */ - private JConstantPool pool; - - /** StackMapTable entries */ - protected final List/**/ entries = new ArrayList(); - protected int entriesSize = 0; - protected boolean usesU2; - - public JStackMapTableAttribute(FJBGContext context, - JClass clazz, - JCode code) { - super(context, clazz); - this.pool = clazz.pool; - - assert code.getOwner().getOwner() == clazz; - } - - public JStackMapTableAttribute(FJBGContext context, - JClass clazz, - Object owner, - String name, - int size, - DataInputStream stream) - throws IOException { - super(context, clazz, name); - this.pool = clazz.pool; - - int count = stream.readShort(); - this.usesU2 = count < 65536; - for (int i = 0; i < count; ++i) - this.entries.add(new Frame(stream)); - this.entriesSize = computeSize(); - - assert name.equals(getName()); - } - - public String getName() { return "StackMapTable"; } - - // Follows javap output format for StackMapTable attribute. - /*@Override*/ public String toString() { - Frame frame = null; - StringBuffer buf = new StringBuffer(" StackMapTable: number_of_entries = "); - buf.append(entries.size()); - Iterator it = entries.iterator(); - while (it.hasNext()) { - frame = (Frame)it.next(); - buf.append("\n frame_type = "); - buf.append(frame.tag); - buf.append(" /* "); - buf.append(getFrameType(frame.tag)); - buf.append(" */"); - if (frame.offsetDelta != -1) - buf.append("\n offset_delta = "+frame.offsetDelta); - if (frame.locals != null) - appendTypeInfoArray(buf, "locals", frame.locals); - if (frame.stackItems != null) - appendTypeInfoArray(buf, "stack", frame.stackItems); - } - buf.append("\n"); - return buf.toString(); - } - - protected int getSize() { - return entriesSize; - } - - protected void writeContentsTo(DataOutputStream stream) throws IOException { - stream.writeShort(entriesSize); - Iterator it = entries.iterator(); - while (it.hasNext()) { - Frame frame = (Frame)it.next(); - frame.writeContentsTo(stream); - } - } - - private class TypeInfo { - final int tag; - final int poolIndexOrOffset; // tag == 7 => poolIndex, tag = 8 => offset - private int bytes; - TypeInfo(DataInputStream stream) throws IOException { - int size = 1; - this.tag = stream.readByte(); - if (tag == 7) { // ITEM_Object; // 7 - poolIndexOrOffset = stream.readShort(); - size += 2; - } else if (tag == 8) { // ITEM_Uninitialized // 8 - poolIndexOrOffset = (usesU2) ? stream.readShort() : stream.readInt(); - size += (usesU2) ? 2 : 4; - } else - poolIndexOrOffset = -1; - this.bytes += size; - } - int getSize() { return bytes; } - void writeContentsTo(DataOutputStream stream) throws IOException { - stream.writeByte(tag); - if (tag == 7) { // ITEM_Object; // 7 - stream.writeShort(poolIndexOrOffset); - } else if (tag == 8) { // ITEM_Uninitialized // 8 - if (usesU2) stream.writeShort(poolIndexOrOffset); - else stream.writeInt(poolIndexOrOffset); - } - } - /*@Override*/ public String toString() { - switch (tag) { - case 0: // ITEM_Top - return ""; - case 1: // ITEM_Integer - return "int"; - case 2: // ITEM_Float - return "float"; - case 3: // ITEM_Double - return "double"; - case 4: // ITEM_Long - return "long"; - case 5: // ITEM_Null - return "null"; - case 6: // ITEM_UninializedThis - return "this"; - case 7: // ITEM_Object - String name = pool.lookupClass(poolIndexOrOffset); - if (name.startsWith("[")) name = "\""+name+"\""; - return "class "+name; - case 8: // ITEM_Uninitialized - return ""; - default: - return String.valueOf(tag); - } - } - } - - private class Frame { - final int tag; - int offsetDelta = -1; - TypeInfo[] stackItems = null; - TypeInfo[] locals = null; - private int bytes; - Frame(DataInputStream stream) throws IOException { - // The stack_map_frame structure consists of a one-byte tag - // followed by zero or more bytes. - this.tag = stream.readUnsignedByte(); - if (tag < 64) { // SAME; // 0-63 - //done - } else if (tag < 128) { // SAME_LOCALS_1_STACK_ITEM; // 64-127 - this.offsetDelta = tag - 64; - readStackItems(stream, 1); - } else if (tag < 248) { // reserved for future use. - assert false : "Tags in the range [128-247] are reserved for future use."; - } else if (tag < 251) { // CHOP; // 248-250 - int k = 251 - tag; - readOffsetDelta(stream); - } else if (tag == 251) { // SAME_FRAME_EXTENDED - readOffsetDelta(stream); - } else if (tag < 255) { // APPEND; // 252-254 - readOffsetDelta(stream); - readLocals(stream, tag - 251); - } else { // FULL_FRAME; // 255 - readOffsetDelta(stream); - readLocals(stream); - readStackItems(stream); - } - } - int getSize() { return bytes; } - void readOffsetDelta(DataInputStream stream) throws IOException { - this.offsetDelta = (usesU2) ? stream.readShort() : stream.readInt(); - this.bytes += (usesU2) ? 2 : 4; - } - int getOffsetDelta() { return offsetDelta; } - void readStackItems(DataInputStream stream, int k) throws IOException { - this.stackItems = new TypeInfo[k]; - for (int i = 0; i < k; ++i) { - stackItems[i] = new TypeInfo(stream); - this.bytes += stackItems[i].getSize(); - } - } - void readStackItems(DataInputStream stream) throws IOException { - int k = (usesU2) ? stream.readShort() : stream.readInt(); - this.bytes += (usesU2) ? 2 : 4; - readStackItems(stream, k); - } - void readLocals(DataInputStream stream, int k) throws IOException { - this.locals = new TypeInfo[k]; - for (int i = 0; i < k; ++i) { - locals[i] = new TypeInfo(stream); - this.bytes += locals[i].getSize(); - } - } - void readLocals(DataInputStream stream) throws IOException { - int k = (usesU2) ? stream.readShort() : stream.readInt(); - this.bytes += (usesU2) ? 2 : 4; - readLocals(stream, k); - } - void writeContentsTo(DataOutputStream stream) throws IOException { - stream.writeByte(tag); - if (tag < 64) { - //done - } else if (tag < 128) { // SAME; // 0-63 - assert stackItems.length == 1; - stackItems[0].writeContentsTo(stream); - } else if (tag < 248) { - assert false : "Tags in the range [128-247] are reserved for future use."; - } else if (tag < 251) { - if (usesU2) stream.writeShort(offsetDelta); - else stream.writeInt(offsetDelta); - } else if (tag == 251) { - if (usesU2) stream.writeShort(offsetDelta); - else stream.writeInt(offsetDelta); - } else if (tag < 255) { // APPEND; // 252-254 - if (usesU2) stream.writeShort(offsetDelta); - else stream.writeInt(offsetDelta); - for (int i = 0; i < locals.length; ++i) - locals[i].writeContentsTo(stream); - } else { - if (usesU2) stream.writeShort(offsetDelta); - else stream.writeInt(offsetDelta); - for (int i = 0; i < locals.length; ++i) - locals[i].writeContentsTo(stream); - for (int i = 0; i < stackItems.length; ++i) - stackItems[i].writeContentsTo(stream); - } - } - } - - private int computeSize() { - int size = (usesU2) ? 2 : 4; // number of frames - Iterator it = entries.iterator(); - while (it.hasNext()) { - Frame frame = (Frame)it.next(); - size += frame.getSize(); - } - return size; - } - - private static final String getFrameType(int tag) { - if (tag < 64) return "same"; - else if (tag < 128) return "same locals 1 stack item"; - else if (tag < 248) return ""; - else if (tag < 251) return "chop"; - else if (tag == 251) return "same frame extended"; - else if (tag < 255) return "append"; - else return "full frame"; - } - - private static StringBuffer appendTypeInfoArray(StringBuffer buf, - String s, TypeInfo[] a) { - buf.append("\n "); - buf.append(s); - buf.append(" = "); - if (a.length > 0) { - buf.append("[ "); - for (int i = 0; i < a.length; ++i) { - if (i > 0) buf.append(", "); - buf.append(a[i]); - } - buf.append(" ]"); - } - else - buf.append("[]"); - return buf; - } - -} diff --git a/src/fjbg/ch/epfl/lamp/fjbg/JType.java b/src/fjbg/ch/epfl/lamp/fjbg/JType.java deleted file mode 100644 index 298a2b0565..0000000000 --- a/src/fjbg/ch/epfl/lamp/fjbg/JType.java +++ /dev/null @@ -1,316 +0,0 @@ -/* FJBG -- Fast Java Bytecode Generator - * Copyright 2002-2013 LAMP/EPFL - * @author Michel Schinz - */ - -package ch.epfl.lamp.fjbg; - -import java.io.IOException; -import java.io.StringReader; -import java.util.ArrayList; - -/** - * Representation of Java types. - * - * @version 1.0 - * @author Michel Schinz - */ - -abstract public class JType { - abstract public int getSize(); - abstract public String getSignature(); - abstract public int getTag(); - abstract public String toString(); - abstract public boolean isCompatibleWith(JType other); - - public boolean isValueType() { return false; } - public boolean isObjectType() { return false; } - public boolean isArrayType() { return false; } - public boolean isReferenceType() { return false; } - - // Tags for types. Taken from BCEL. - public static final int T_BOOLEAN = 4; - public static final int T_CHAR = 5; - public static final int T_FLOAT = 6; - public static final int T_DOUBLE = 7; - public static final int T_BYTE = 8; - public static final int T_SHORT = 9; - public static final int T_INT = 10; - public static final int T_LONG = 11; - public static final int T_VOID = 12; // Non-standard - public static final int T_ARRAY = 13; - public static final int T_OBJECT = 14; - public static final int T_UNKNOWN = 15; - public static final int T_ADDRESS = 16; - - public static final int T_REFERENCE = 17; // type compatible with references - - public static final JType[] EMPTY_ARRAY = new JType[0]; - - protected static JType parseSig(StringReader s) throws IOException { - int nextChar = s.read(); - if (nextChar == -1) throw new IllegalArgumentException(); - - switch ((char)nextChar) { - case 'V' : return VOID; - case 'Z' : return BOOLEAN; - case 'B' : return BYTE; - case 'C' : return CHAR; - case 'S' : return SHORT; - case 'I' : return INT; - case 'F' : return FLOAT; - case 'J' : return LONG; - case 'D' : return DOUBLE; - case 'L': { - StringBuffer className = new StringBuffer(); - for (;;) { - nextChar = s.read(); - if (nextChar == -1 || nextChar == ';') break; - className.append(nextChar == '/' ? ':' : ((char)nextChar)); - } - if (nextChar != ';') throw new IllegalArgumentException(); - return new JObjectType(className.toString()); - } - case '[': { - JType elemType = parseSig(s); - return new JArrayType(elemType); - } - case '(': { - ArrayList argTps = new ArrayList(); - for (;;) { - s.mark(1); - nextChar = s.read(); - if (nextChar == -1 || nextChar == ')') break; - s.reset(); - argTps.add(parseSig(s)); - } - if (nextChar != ')') throw new IllegalArgumentException("a"); - JType[] argTpsA = (JType[])argTps.toArray(new JType[argTps.size()]); - JType returnType = parseSig(s); - return new JMethodType(returnType, argTpsA); - } - default: - throw new IllegalArgumentException(); - } - } - - /** - * A signature is a string representing the generic type of a field or - * method, or generic type information for a class declaration. - * See section 4.4.4 of the JVM specification. - */ - public static JType parseSignature(String signature) { - try { - StringReader sigReader = new StringReader(signature); - JType parsed = parseSig(sigReader); - if (sigReader.read() != -1) - throw new IllegalArgumentException(); - return parsed; - } catch (IllegalArgumentException e) { - throw new IllegalArgumentException("invalid signature " + signature); - } catch (IOException e) { - throw new Error(e); - } - } - - public static int getTotalSize(JType[] types) { - int size = 0; - for (int i = 0; i < types.length; ++i) - size += types[i].getSize(); - return size; - } - - protected JType() {} - - public static JType VOID = new JType() { - public int getSize() { return 0; } - public String getSignature() { return "V"; } - public int getTag() { return T_VOID; } - public String toString() { return "void"; } - public boolean isCompatibleWith(JType other) { - throw new UnsupportedOperationException("type VOID is no real " - + "data type therefore " - + "cannot be assigned to " - + other.toString()); - } - }; - - public static JType BOOLEAN = new JType() { - public int getSize() { return 1; } - public String getSignature() { return "Z"; } - public int getTag() { return T_BOOLEAN; } - public String toString() { return "boolean"; } - public boolean isValueType() { return true; } - public boolean isCompatibleWith(JType other) { - return other == BOOLEAN - || other == INT - || other == BYTE - || other == CHAR - || other == SHORT; - } - }; - - public static JType BYTE = new JType() { - public int getSize() { return 1; } - public String getSignature() { return "B"; } - public int getTag() { return T_BYTE; } - public String toString() { return "byte"; } - public boolean isValueType() { return true; } - public boolean isCompatibleWith(JType other) { - return other == BOOLEAN - || other == INT - || other == BYTE - || other == CHAR - || other == SHORT; - } - }; - - public static JType CHAR = new JType() { - public int getSize() { return 1; } - public String getSignature() { return "C"; } - public int getTag() { return T_CHAR; } - public String toString() { return "char"; } - public boolean isValueType() { return true; } - public boolean isCompatibleWith(JType other) { - return other == BOOLEAN - || other == INT - || other == BYTE - || other == CHAR - || other == SHORT; - } - }; - - public static JType SHORT = new JType() { - public int getSize() { return 1; } - public String getSignature() { return "S"; } - public int getTag() { return T_SHORT; } - public String toString() { return "short"; } - public boolean isValueType() { return true; } - public boolean isCompatibleWith(JType other) { - return other == BOOLEAN - || other == INT - || other == BYTE - || other == CHAR - || other == SHORT; - } - }; - - public static JType INT = new JType() { - public int getSize() { return 1; } - public String getSignature() { return "I"; } - public int getTag() { return T_INT; } - public String toString() { return "int"; } - public boolean isValueType() { return true; } - public boolean isCompatibleWith(JType other) { - return other == BOOLEAN - || other == INT - || other == BYTE - || other == CHAR - || other == SHORT; - } - }; - - public static JType FLOAT = new JType() { - public int getSize() { return 1; } - public String getSignature() { return "F"; } - public int getTag() { return T_FLOAT; } - public String toString() { return "float"; } - public boolean isValueType() { return true; } - public boolean isCompatibleWith(JType other) { - return other == FLOAT; - } - }; - - public static JType LONG = new JType() { - public int getSize() { return 2; } - public String getSignature() { return "J"; } - public int getTag() { return T_LONG; } - public String toString() { return "long"; } - public boolean isValueType() { return true; } - public boolean isCompatibleWith(JType other) { - return other == LONG; - } - }; - - public static JType DOUBLE = new JType() { - public int getSize() { return 2; } - public String getSignature() { return "D"; } - public int getTag() { return T_DOUBLE; } - public String toString() { return "double"; } - public boolean isValueType() { return true; } - public boolean isCompatibleWith(JType other) { - return other == DOUBLE; - } - }; - - public static JType REFERENCE = new JType() { - public int getSize() { return 1; } - public String getSignature() { - throw new UnsupportedOperationException("type REFERENCE is no real " - + "data type and therefore " - + "has no signature"); - } - public int getTag() { return T_REFERENCE; } - public String toString() { return ""; } - public boolean isCompatibleWith(JType other) { - throw new UnsupportedOperationException("type REFERENCE is no real " - + "data type and therefore " - + "cannot be assigned to " - + other.toString()); - } - }; - - public static JType ADDRESS = new JType() { - public int getSize() { return 1; } - public String getSignature() { - throw new UnsupportedOperationException("type ADDRESS is no usable " - + "data type and therefore " - + "has no signature"); - } - public int getTag() { return T_ADDRESS; } - public String toString() { return "
    "; } - public boolean isCompatibleWith(JType other) { - return other == ADDRESS; - } - }; - - public static JType UNKNOWN = new JType() { - public int getSize() { - throw new UnsupportedOperationException("type UNKNOWN is no real " - + "data type and therefore " - + "has no size"); - } - public String getSignature() { - throw new UnsupportedOperationException("type UNKNOWN is no real " - + "data type and therefore " - + "has no signature"); - } - public int getTag() { return T_UNKNOWN; } - public String toString() { return ""; } - public boolean isCompatibleWith(JType other) { - throw new UnsupportedOperationException("type UNKNOWN is no real " - + "data type and therefore " - + "cannot be assigned to " - + other.toString()); - } - }; - - protected static String tagToString(int tag) { - switch (tag) { - case T_BOOLEAN : return "boolean"; - case T_CHAR : return "char"; - case T_FLOAT : return "float"; - case T_DOUBLE : return "double"; - case T_BYTE : return "byte"; - case T_SHORT : return "short"; - case T_INT : return "int"; - case T_LONG : return "long"; - case T_VOID : return "void"; // Non-standard - case T_ARRAY : return "[]"; - case T_OBJECT : return "Object"; - case T_UNKNOWN : return ""; - case T_ADDRESS : return "
    "; - default: return String.valueOf(tag); - } - } -} diff --git a/src/fjbg/ch/epfl/lamp/fjbg/Main.java b/src/fjbg/ch/epfl/lamp/fjbg/Main.java deleted file mode 100644 index 810ee7c400..0000000000 --- a/src/fjbg/ch/epfl/lamp/fjbg/Main.java +++ /dev/null @@ -1,131 +0,0 @@ -/* FJBG -- Fast Java Bytecode Generator - * Copyright 2002-2013 LAMP/EPFL - * @author Michel Schinz - */ - -package ch.epfl.lamp.fjbg; - -import java.io.DataInputStream; -import java.io.File; -import java.io.FileInputStream; -import java.io.InputStream; -import java.io.IOException; -import java.util.ArrayList; -import java.util.jar.JarFile; -import java.util.zip.ZipEntry; - -/** - * Main program entry to execute the FJBG reader from the command line. - * - * The reader prints out the decoded data in the same output format as - * javap, the Java bytecode disassembler of the Sun J2SE SDK. - * - * @author Stephane Micheloud - * @version 1.1 - */ - -public class Main { - private static final String PRODUCT_STRING = "Fast Java Bytecode Generator"; - private static final String VERSION_STRING = "version 1.1"; - - private static final int ACTION_USAGE = 0; - private static final int ACTION_DONE = 1; - private static final int ACTION_PROCEED = 2; - - private static String classPath = "."; - private static String[] classNames = null; - - public static void main(String[] args) { - switch (parseArgs(args)) { - case ACTION_USAGE: printUsage(); break; - case ACTION_PROCEED: processClasses(); break; - default: - } - } - - private static void processClasses() { - FJBGContext fjbgContext = new FJBGContext(49, 0); - if (classNames.length > 0) - try { - for (int i = 0; i < classNames.length; ++i) - processClass(fjbgContext, classNames[i]); - } catch (IOException e) { - System.err.println(e.getMessage()); - } - else - System.err.println( - "No classes were specified on the command line. Try -help."); - } - - private static void processClass(FJBGContext fjbgContext, String className) - throws IOException { - InputStream in = getInputStream(className); - JClass jclass = fjbgContext.JClass(new DataInputStream(in)); - System.out.println(jclass); - in.close(); - } - - private static InputStream getInputStream(String className) throws IOException { - String name = null; - String[] paths = classPath.split(File.pathSeparator); - for (int i = 0; i < paths.length; ++i) { - File parent = new File(paths[i]); - if (parent.isDirectory()) { - name = className.replace('.', File.separatorChar)+".class"; - File f = new File(parent, name); - if (f.isFile()) return new FileInputStream(f); - } else if (paths[i].endsWith(".jar")) { - JarFile f = new JarFile(parent); - name = className.replace('.', '/')+".class"; - ZipEntry e = f.getEntry(name); - if (e != null) return f.getInputStream(e); - } - } - throw new IOException("ERROR:Could not find "+className); - } - - private static int parseArgs(String[] args) { - ArrayList/**/ classes = new ArrayList(); - String arg = null; - int action = ACTION_USAGE; - int i = 0, n = args.length; - while (i < n) { - arg = args[i]; - if (arg.equals("-classpath") && (i+1) < n) { - classPath = args[i+1]; i += 2; - } else if (arg.equals("-cp") && (i+1) < n) { - classPath = args[i+1]; i += 2; - } else if (arg.equals("-help")) { - i = n+1; - //} else if (arg.equals("-v")) { - // verbose = true; i += 1; - } else if (arg.equals("-version")) { - System.err.println(PRODUCT_STRING+" "+VERSION_STRING); - action = ACTION_DONE; i = n+1; - } else if (arg.startsWith("-")) { - System.err.println("invalid flag: "+arg); - i = n+1; - } else { - classes.add(arg); i += 1; - } - } - if (i == n && i > 0) { - classNames = (String[])classes.toArray(new String[classes.size()]); - action = ACTION_PROCEED; - } - return action; - } - - private static void printUsage() { - System.out.println("Usage: fjbg "); - System.out.println(); - System.out.println("where possible options include:"); - System.out.println(" -cp Specify where to find user class files"); - System.out.println(" -classpath Specify where to find user class files"); - System.out.println(" -help Print a synopsis of standard options"); - System.out.println(" -version Version information"); - System.out.println(); - System.exit(1); - } -} - diff --git a/src/fjbg/ch/epfl/lamp/util/ByteArray.java b/src/fjbg/ch/epfl/lamp/util/ByteArray.java deleted file mode 100644 index b852e1ac1f..0000000000 --- a/src/fjbg/ch/epfl/lamp/util/ByteArray.java +++ /dev/null @@ -1,145 +0,0 @@ -/* FJBG -- Fast Java Bytecode Generator - * Copyright 2002-2013 LAMP/EPFL - * @author Michel Schinz - */ - -package ch.epfl.lamp.util; - -import java.io.IOException; -import java.io.InputStream; -import java.io.OutputStream; - -/** - * Array of bytes. - * - * @author Michel Schinz - * @version 1.0 - */ - -public class ByteArray { - protected final static int BYTE_BLOCK_BITS = 8; - protected final static int BYTE_BLOCK_SIZE = 1 << BYTE_BLOCK_BITS; - protected final static int BYTE_BLOCK_MASK = BYTE_BLOCK_SIZE - 1; - - protected byte[][] data = new byte[][] { new byte[BYTE_BLOCK_SIZE] }; - protected int pos = 0; // The next free position. - - protected boolean frozen = false; - - public ByteArray() { } - - public ByteArray(InputStream stream, int size) throws IOException { - pos = size; - for (int i = 0; size > 0; ++i) { - int sizeToRead = Math.min(BYTE_BLOCK_SIZE, size); - stream.read(data[i], 0, sizeToRead); - - size -= sizeToRead; - if (size > 0) addNewBlock(); - } - } - - public void freeze() { frozen = true; } - - public int nextBytePosition() { - return pos; - } - - public int getSize() { - return pos; - } - - protected void addNewBlock() { - int nextBlockPos = pos >>> BYTE_BLOCK_BITS; - if (nextBlockPos == data.length) { - byte[][] newData = new byte[data.length * 2][]; - System.arraycopy(data, 0, newData, 0, data.length); - data = newData; - } - assert data[nextBlockPos] == null : pos + " " + nextBlockPos; - data[nextBlockPos] = new byte[BYTE_BLOCK_SIZE]; - } - - protected void addByte(int b) { - assert !frozen; - - if ((pos & BYTE_BLOCK_MASK) == 0 && pos > 0) - addNewBlock(); - int currPos = pos++; - data[currPos >>> BYTE_BLOCK_BITS][currPos & BYTE_BLOCK_MASK] = (byte)b; - } - - public void addU1(int i) { - assert i <= 0xFF : i; - addByte(i); - } - - public void addU2(int i) { - assert i <= 0xFFFF : i; - - addByte(i >>> 8); - addByte(i & 0xFF); - } - - public void addU4(int i) { - addByte(i >>> 24); - addByte((i >>> 16) & 0xFF); - addByte((i >>> 8) & 0xFF); - addByte(i & 0xFF); - } - - public void putByte(int targetPos, int b) { - assert !frozen; - assert targetPos < pos : targetPos + " >= " + pos; - - data[targetPos >>> BYTE_BLOCK_BITS][targetPos & BYTE_BLOCK_MASK] = (byte)b; - } - - public void putU2(int targetPos, int i) { - assert i < 0xFFFF : i; - putByte(targetPos, i >>> 8); - putByte(targetPos + 1, i & 0xFF); - } - - public void putU4(int targetPos, int i) { - putByte(targetPos, i >>> 24); - putByte(targetPos + 1, (i >>> 16) & 0xFF); - putByte(targetPos + 2, (i >>> 8) & 0xFF); - putByte(targetPos + 3, i & 0xFF); - } - - public int getU1(int sourcePos) { - assert sourcePos < pos : sourcePos + " >= " + pos; - return data[sourcePos >>> BYTE_BLOCK_BITS][sourcePos & BYTE_BLOCK_MASK] & 0xFF; - } - - public int getU2(int sourcePos) { - return (getU1(sourcePos) << 8) | getU1(sourcePos + 1); - } - - public int getU4(int sourcePos) { - return (getU2(sourcePos) << 16) | getU2(sourcePos + 2); - } - - public int getS1(int sourcePos) { - assert sourcePos < pos : sourcePos + " >= " + pos; - return data[sourcePos >>> BYTE_BLOCK_BITS][sourcePos & BYTE_BLOCK_MASK]; - } - - public int getS2(int sourcePos) { - return (getS1(sourcePos) << 8) | getU1(sourcePos + 1); - } - - public int getS4(int sourcePos) { - return (getS2(sourcePos) << 16) | getU2(sourcePos + 2); - } - - public void writeTo(OutputStream stream) throws IOException { - if (!frozen) freeze(); - - for (int i = 0; i < data.length && data[i] != null; ++i) { - int len = Math.min(BYTE_BLOCK_SIZE, pos - (i << BYTE_BLOCK_BITS)); - stream.write(data[i], 0, len); - } - } -} diff --git a/src/intellij/compiler.iml.SAMPLE b/src/intellij/compiler.iml.SAMPLE index 696c347b7b..a3ac93cc77 100644 --- a/src/intellij/compiler.iml.SAMPLE +++ b/src/intellij/compiler.iml.SAMPLE @@ -20,7 +20,6 @@ - diff --git a/src/intellij/fjbg.iml.SAMPLE b/src/intellij/fjbg.iml.SAMPLE deleted file mode 100644 index 03eca69246..0000000000 --- a/src/intellij/fjbg.iml.SAMPLE +++ /dev/null @@ -1,12 +0,0 @@ - - - - - - - - - - - - diff --git a/src/intellij/scala-lang.ipr.SAMPLE b/src/intellij/scala-lang.ipr.SAMPLE index 37307c2029..a532d8c5c8 100644 --- a/src/intellij/scala-lang.ipr.SAMPLE +++ b/src/intellij/scala-lang.ipr.SAMPLE @@ -198,7 +198,6 @@ - @@ -230,7 +229,6 @@ - diff --git a/src/intellij/test.iml.SAMPLE b/src/intellij/test.iml.SAMPLE index 112fec428f..424872ccb6 100644 --- a/src/intellij/test.iml.SAMPLE +++ b/src/intellij/test.iml.SAMPLE @@ -12,7 +12,6 @@ - diff --git a/src/partest/scala/tools/partest/nest/ConsoleFileManager.scala b/src/partest/scala/tools/partest/nest/ConsoleFileManager.scala index 7000e8280b..0ec3f60bf5 100644 --- a/src/partest/scala/tools/partest/nest/ConsoleFileManager.scala +++ b/src/partest/scala/tools/partest/nest/ConsoleFileManager.scala @@ -84,7 +84,6 @@ class ConsoleFileManager extends FileManager { latestReflectFile = testClassesDir / "reflect" latestCompFile = testClassesDir / "compiler" latestPartestFile = testClassesDir / "partest" - latestFjbgFile = testParent / "lib" / "fjbg.jar" } else if (testBuild.isDefined) { val dir = Path(testBuild.get) @@ -94,7 +93,6 @@ class ConsoleFileManager extends FileManager { latestReflectFile = dir / "lib/scala-reflect.jar" latestCompFile = dir / "lib/scala-compiler.jar" latestPartestFile = dir / "lib/scala-partest.jar" - latestFjbgFile = testParent / "lib" / "fjbg.jar" } else { def setupQuick() { @@ -152,8 +150,6 @@ class ConsoleFileManager extends FileManager { // run setup based on most recent time pairs(pairs.keys max)() - - latestFjbgFile = prefixFile("lib/fjbg.jar") } LATEST_LIB = latestLibFile.getAbsolutePath @@ -174,7 +170,6 @@ class ConsoleFileManager extends FileManager { var latestReflectFile: File = _ var latestCompFile: File = _ var latestPartestFile: File = _ - var latestFjbgFile: File = _ def latestScalapFile: File = (latestLibFile.parent / "scalap.jar").jfile var testClassesDir: Directory = _ // initialize above fields diff --git a/src/partest/scala/tools/partest/nest/ReflectiveRunner.scala b/src/partest/scala/tools/partest/nest/ReflectiveRunner.scala index 4b0ed1f82a..d3a40718c6 100644 --- a/src/partest/scala/tools/partest/nest/ReflectiveRunner.scala +++ b/src/partest/scala/tools/partest/nest/ReflectiveRunner.scala @@ -51,9 +51,9 @@ class ReflectiveRunner { new ConsoleFileManager import fileManager. - { latestCompFile, latestReflectFile, latestLibFile, latestPartestFile, latestFjbgFile, latestScalapFile, latestActorsFile } + { latestCompFile, latestReflectFile, latestLibFile, latestPartestFile, latestScalapFile, latestActorsFile } val files = - Array(latestCompFile, latestReflectFile, latestLibFile, latestPartestFile, latestFjbgFile, latestScalapFile, latestActorsFile) map (x => io.File(x)) + Array(latestCompFile, latestReflectFile, latestLibFile, latestPartestFile, latestScalapFile, latestActorsFile) map (x => io.File(x)) val sepUrls = files map (_.toURL) var sepLoader = new URLClassLoader(sepUrls, null) diff --git a/test/disabled/presentation/akka.flags b/test/disabled/presentation/akka.flags index 56d026a62d..9bf2878f62 100644 --- a/test/disabled/presentation/akka.flags +++ b/test/disabled/presentation/akka.flags @@ -12,7 +12,7 @@ # running partest from. Run it from the root scala checkout for these files to resolve correctly # (by default when running 'ant test', or 'test/partest'). Paths use Unix separators, the test # framework translates them to the platform dependent representation. -# -bootclasspath lib/scala-compiler.jar:lib/scala-library.jar:lib/fjbg.jar +# -bootclasspath lib/scala-compiler.jar:lib/scala-library.jar # the following line would test using the quick compiler -# -bootclasspath build/quick/classes/compiler:build/quick/classes/library:lib/fjbg.jar +# -bootclasspath build/quick/classes/compiler:build/quick/classes/library diff --git a/test/disabled/presentation/simple-tests.opts b/test/disabled/presentation/simple-tests.opts index 8529bbf1a0..d651316984 100644 --- a/test/disabled/presentation/simple-tests.opts +++ b/test/disabled/presentation/simple-tests.opts @@ -12,7 +12,7 @@ # running partest from. Run it from the root scala checkout for these files to resolve correctly # (by default when running 'ant test', or 'test/partest'). Paths use Unix separators, the test # framework translates them to the platform dependent representation. --bootclasspath lib/scala-compiler.jar:lib/scala-library.jar:lib/fjbg.jar +-bootclasspath lib/scala-compiler.jar:lib/scala-library.jar # the following line would test using the quick compiler -# -bootclasspath build/quick/classes/compiler:build/quick/classes/library:lib/fjbg.jar +# -bootclasspath build/quick/classes/compiler:build/quick/classes/library diff --git a/test/files/ant/imported.xml b/test/files/ant/imported.xml index 5a4dfc319b..182c80aadf 100644 --- a/test/files/ant/imported.xml +++ b/test/files/ant/imported.xml @@ -56,7 +56,6 @@ INITIALISATION - @@ -67,7 +66,6 @@ INITIALISATION - @@ -78,7 +76,6 @@ INITIALISATION - @@ -89,7 +86,6 @@ INITIALISATION - @@ -98,7 +94,6 @@ INITIALISATION - diff --git a/tools/buildcp b/tools/buildcp index 766ab81f90..792d8d60b0 100755 --- a/tools/buildcp +++ b/tools/buildcp @@ -8,4 +8,4 @@ lib=$($dir/abspath $dir/../lib) build=$($dir/abspath $dir/../build) cp=$($dir/cpof $build/$1/classes):$build/asm/classes -echo $cp:$lib/fjbg.jar:$lib/msil.jar:$lib/forkjoin.jar:$lib/jline.jar:$lib/extra/'*' +echo $cp:$lib/msil.jar:$lib/forkjoin.jar:$lib/jline.jar:$lib/extra/'*' diff --git a/tools/strapcp b/tools/strapcp index 6a46b4e1c8..6a4044ae24 100755 --- a/tools/strapcp +++ b/tools/strapcp @@ -6,7 +6,6 @@ strap="$dir/../build/strap/classes" [[ -d $strap ]] || { echo "Error: no directory at $strap"; exit 1; } cp=$($dir/cpof $strap) -fjbg=$($dir/abspath $dir/../lib/fjbg.jar) asm=$($dir/abspath $dir/../build/asm/classes) -echo $cp:$fjbg:$asm +echo $cp:$asm -- cgit v1.2.3 From 96fa31d0a3cf7ff401f9197cd0e12acd296e55b1 Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Wed, 5 Dec 2012 12:49:21 -0800 Subject: Expunged the .net backend. It lives on in a branch born from this commit's parent. It's abrupt; no attempt is made to offer a "smooth transition" for the serious msil userbase, population zero. If anyone feels very strongly that such a transition is necessary, I will be happy to talk you into feeling differently. --- build.detach.xml | 7 +- build.examples.xml | 14 +- build.xml | 98 - lib/msil.jar.desired.sha1 | 1 - project/Build.scala | 30 +- project/Layers.scala | 11 +- src/compiler/scala/tools/ant/Scalac.scala | 15 +- .../scala/tools/ant/sabbus/ScalacFork.scala | 5 +- src/compiler/scala/tools/ant/sabbus/TaskArgs.scala | 2 - src/compiler/scala/tools/nsc/Global.scala | 16 +- src/compiler/scala/tools/nsc/Main.scala | 7 +- src/compiler/scala/tools/nsc/Properties.scala | 3 - src/compiler/scala/tools/nsc/ScalaDoc.scala | 9 +- .../scala/tools/nsc/backend/MSILPlatform.scala | 69 - .../nsc/backend/icode/ExceptionHandlers.scala | 3 - .../scala/tools/nsc/backend/icode/GenICode.scala | 262 +-- .../tools/nsc/backend/icode/Linearizers.scala | 138 -- .../scala/tools/nsc/backend/icode/Opcodes.scala | 69 - .../scala/tools/nsc/backend/icode/TypeKinds.scala | 7 - .../scala/tools/nsc/backend/msil/GenMSIL.scala | 2244 -------------------- src/compiler/scala/tools/nsc/io/MsilFile.scala | 15 - .../scala/tools/nsc/settings/ScalaSettings.scala | 4 - .../tools/nsc/settings/StandardScalaSettings.scala | 2 +- .../scala/tools/nsc/symtab/SymbolLoaders.scala | 19 +- .../scala/tools/nsc/symtab/clr/CLRTypes.scala | 132 -- .../scala/tools/nsc/symtab/clr/TypeParser.scala | 849 -------- .../scala/tools/nsc/transform/CleanUp.scala | 29 +- src/compiler/scala/tools/nsc/transform/Mixin.scala | 2 +- .../scala/tools/nsc/transform/TailCalls.scala | 3 +- .../scala/tools/nsc/transform/UnCurry.scala | 38 +- .../scala/tools/nsc/typechecker/Macros.scala | 3 - .../tools/nsc/typechecker/MethodSynthesis.scala | 6 +- .../scala/tools/nsc/typechecker/RefChecks.scala | 5 +- .../scala/tools/nsc/typechecker/Typers.scala | 46 +- src/compiler/scala/tools/nsc/util/ClassPath.scala | 3 +- .../scala/tools/nsc/util/MsilClassPath.scala | 166 -- src/intellij/compiler.iml.SAMPLE | 1 - src/intellij/msil.iml.SAMPLE | 24 - src/intellij/scala-lang.ipr.SAMPLE | 1 - src/intellij/test.iml.SAMPLE | 1 - src/manual/scala/man1/scalac.scala | 24 - src/msil/ch/epfl/lamp/compiler/msil/Assembly.java | 253 --- .../ch/epfl/lamp/compiler/msil/AssemblyName.java | 96 - src/msil/ch/epfl/lamp/compiler/msil/Attribute.java | 654 ------ .../ch/epfl/lamp/compiler/msil/BindingFlags.java | 169 -- .../lamp/compiler/msil/CallingConventions.java | 75 - .../epfl/lamp/compiler/msil/ConstructedType.java | 48 - .../epfl/lamp/compiler/msil/ConstructorInfo.java | 54 - .../compiler/msil/CustomAttributeProvider.java | 82 - .../ch/epfl/lamp/compiler/msil/CustomModifier.java | 45 - .../epfl/lamp/compiler/msil/EventAttributes.java | 32 - src/msil/ch/epfl/lamp/compiler/msil/EventInfo.java | 58 - .../epfl/lamp/compiler/msil/FieldAttributes.java | 119 -- src/msil/ch/epfl/lamp/compiler/msil/FieldInfo.java | 141 -- .../compiler/msil/GenericParamAndConstraints.java | 40 - .../lamp/compiler/msil/HasCustomModifiers.java | 9 - .../compiler/msil/ICustomAttributeProvider.java | 57 - .../ch/epfl/lamp/compiler/msil/MemberInfo.java | 47 - .../ch/epfl/lamp/compiler/msil/MemberTypes.java | 81 - .../epfl/lamp/compiler/msil/MethodAttributes.java | 158 -- .../ch/epfl/lamp/compiler/msil/MethodBase.java | 198 -- .../lamp/compiler/msil/MethodImplAttributes.java | 116 - .../ch/epfl/lamp/compiler/msil/MethodInfo.java | 69 - src/msil/ch/epfl/lamp/compiler/msil/Module.java | 155 -- .../ch/epfl/lamp/compiler/msil/PEAssembly.java | 69 - src/msil/ch/epfl/lamp/compiler/msil/PEFile.java | 941 -------- src/msil/ch/epfl/lamp/compiler/msil/PEModule.java | 456 ---- src/msil/ch/epfl/lamp/compiler/msil/PEType.java | 419 ---- .../lamp/compiler/msil/ParameterAttributes.java | 72 - .../ch/epfl/lamp/compiler/msil/ParameterInfo.java | 76 - .../ch/epfl/lamp/compiler/msil/PrimitiveType.java | 62 - .../lamp/compiler/msil/PropertyAttributes.java | 45 - .../ch/epfl/lamp/compiler/msil/PropertyInfo.java | 104 - src/msil/ch/epfl/lamp/compiler/msil/Type.java | 1142 ---------- .../ch/epfl/lamp/compiler/msil/TypeAttributes.java | 190 -- src/msil/ch/epfl/lamp/compiler/msil/Version.java | 71 - .../lamp/compiler/msil/emit/AssemblyBuilder.scala | 122 -- .../compiler/msil/emit/ConstructorBuilder.scala | 64 - .../lamp/compiler/msil/emit/FieldBuilder.scala | 60 - .../msil/emit/ICustomAttributeSetter.scala | 18 - .../epfl/lamp/compiler/msil/emit/ILGenerator.scala | 539 ----- .../lamp/compiler/msil/emit/ILPrinterVisitor.scala | 860 -------- .../ch/epfl/lamp/compiler/msil/emit/Label.scala | 147 -- .../lamp/compiler/msil/emit/LocalBuilder.scala | 44 - .../lamp/compiler/msil/emit/MethodBuilder.scala | 70 - .../lamp/compiler/msil/emit/ModuleBuilder.scala | 136 -- .../msil/emit/MultipleFilesILPrinterVisitor.scala | 137 -- .../ch/epfl/lamp/compiler/msil/emit/OpCode.scala | 1948 ----------------- .../ch/epfl/lamp/compiler/msil/emit/OpCodes.scala | 1205 ----------- .../lamp/compiler/msil/emit/ParameterBuilder.scala | 44 - .../msil/emit/SingleFileILPrinterVisitor.scala | 93 - .../epfl/lamp/compiler/msil/emit/TypeBuilder.scala | 261 --- .../epfl/lamp/compiler/msil/emit/Visitable.scala | 24 - .../ch/epfl/lamp/compiler/msil/emit/Visitor.scala | 58 - .../compiler/msil/tests/CustomAttributesTest.java | 31 - .../lamp/compiler/msil/tests/JavaTypeTest.java | 18 - .../epfl/lamp/compiler/msil/tests/MembersTest.java | 100 - .../epfl/lamp/compiler/msil/tests/TableDump.java | 311 --- .../ch/epfl/lamp/compiler/msil/tests/Test.java | 92 - .../epfl/lamp/compiler/msil/util/PECustomMod.java | 23 - .../ch/epfl/lamp/compiler/msil/util/PESection.java | 57 - .../ch/epfl/lamp/compiler/msil/util/PEStream.java | 199 -- .../ch/epfl/lamp/compiler/msil/util/Signature.java | 129 -- .../ch/epfl/lamp/compiler/msil/util/Table.java | 1859 ---------------- .../scala/reflect/internal/Definitions.scala | 54 +- src/reflect/scala/reflect/internal/StdNames.scala | 81 +- .../scala/reflect/internal/SymbolTable.scala | 6 - test/attic/files/cli/test1/Main.check.scalac | 6 +- test/attic/files/cli/test2/Main.check.scalac | 6 +- test/attic/files/cli/test3/Main.check.scalac | 6 +- tools/buildcp | 2 +- 111 files changed, 83 insertions(+), 19513 deletions(-) delete mode 100644 lib/msil.jar.desired.sha1 delete mode 100644 src/compiler/scala/tools/nsc/backend/MSILPlatform.scala delete mode 100644 src/compiler/scala/tools/nsc/backend/msil/GenMSIL.scala delete mode 100644 src/compiler/scala/tools/nsc/io/MsilFile.scala delete mode 100644 src/compiler/scala/tools/nsc/symtab/clr/CLRTypes.scala delete mode 100644 src/compiler/scala/tools/nsc/symtab/clr/TypeParser.scala delete mode 100644 src/compiler/scala/tools/nsc/util/MsilClassPath.scala delete mode 100644 src/intellij/msil.iml.SAMPLE delete mode 100644 src/msil/ch/epfl/lamp/compiler/msil/Assembly.java delete mode 100644 src/msil/ch/epfl/lamp/compiler/msil/AssemblyName.java delete mode 100644 src/msil/ch/epfl/lamp/compiler/msil/Attribute.java delete mode 100644 src/msil/ch/epfl/lamp/compiler/msil/BindingFlags.java delete mode 100644 src/msil/ch/epfl/lamp/compiler/msil/CallingConventions.java delete mode 100644 src/msil/ch/epfl/lamp/compiler/msil/ConstructedType.java delete mode 100644 src/msil/ch/epfl/lamp/compiler/msil/ConstructorInfo.java delete mode 100644 src/msil/ch/epfl/lamp/compiler/msil/CustomAttributeProvider.java delete mode 100644 src/msil/ch/epfl/lamp/compiler/msil/CustomModifier.java delete mode 100644 src/msil/ch/epfl/lamp/compiler/msil/EventAttributes.java delete mode 100644 src/msil/ch/epfl/lamp/compiler/msil/EventInfo.java delete mode 100644 src/msil/ch/epfl/lamp/compiler/msil/FieldAttributes.java delete mode 100644 src/msil/ch/epfl/lamp/compiler/msil/FieldInfo.java delete mode 100644 src/msil/ch/epfl/lamp/compiler/msil/GenericParamAndConstraints.java delete mode 100644 src/msil/ch/epfl/lamp/compiler/msil/HasCustomModifiers.java delete mode 100644 src/msil/ch/epfl/lamp/compiler/msil/ICustomAttributeProvider.java delete mode 100644 src/msil/ch/epfl/lamp/compiler/msil/MemberInfo.java delete mode 100644 src/msil/ch/epfl/lamp/compiler/msil/MemberTypes.java delete mode 100644 src/msil/ch/epfl/lamp/compiler/msil/MethodAttributes.java delete mode 100644 src/msil/ch/epfl/lamp/compiler/msil/MethodBase.java delete mode 100644 src/msil/ch/epfl/lamp/compiler/msil/MethodImplAttributes.java delete mode 100644 src/msil/ch/epfl/lamp/compiler/msil/MethodInfo.java delete mode 100644 src/msil/ch/epfl/lamp/compiler/msil/Module.java delete mode 100644 src/msil/ch/epfl/lamp/compiler/msil/PEAssembly.java delete mode 100644 src/msil/ch/epfl/lamp/compiler/msil/PEFile.java delete mode 100644 src/msil/ch/epfl/lamp/compiler/msil/PEModule.java delete mode 100644 src/msil/ch/epfl/lamp/compiler/msil/PEType.java delete mode 100644 src/msil/ch/epfl/lamp/compiler/msil/ParameterAttributes.java delete mode 100644 src/msil/ch/epfl/lamp/compiler/msil/ParameterInfo.java delete mode 100644 src/msil/ch/epfl/lamp/compiler/msil/PrimitiveType.java delete mode 100644 src/msil/ch/epfl/lamp/compiler/msil/PropertyAttributes.java delete mode 100644 src/msil/ch/epfl/lamp/compiler/msil/PropertyInfo.java delete mode 100644 src/msil/ch/epfl/lamp/compiler/msil/Type.java delete mode 100644 src/msil/ch/epfl/lamp/compiler/msil/TypeAttributes.java delete mode 100644 src/msil/ch/epfl/lamp/compiler/msil/Version.java delete mode 100644 src/msil/ch/epfl/lamp/compiler/msil/emit/AssemblyBuilder.scala delete mode 100644 src/msil/ch/epfl/lamp/compiler/msil/emit/ConstructorBuilder.scala delete mode 100644 src/msil/ch/epfl/lamp/compiler/msil/emit/FieldBuilder.scala delete mode 100644 src/msil/ch/epfl/lamp/compiler/msil/emit/ICustomAttributeSetter.scala delete mode 100644 src/msil/ch/epfl/lamp/compiler/msil/emit/ILGenerator.scala delete mode 100644 src/msil/ch/epfl/lamp/compiler/msil/emit/ILPrinterVisitor.scala delete mode 100644 src/msil/ch/epfl/lamp/compiler/msil/emit/Label.scala delete mode 100644 src/msil/ch/epfl/lamp/compiler/msil/emit/LocalBuilder.scala delete mode 100644 src/msil/ch/epfl/lamp/compiler/msil/emit/MethodBuilder.scala delete mode 100644 src/msil/ch/epfl/lamp/compiler/msil/emit/ModuleBuilder.scala delete mode 100644 src/msil/ch/epfl/lamp/compiler/msil/emit/MultipleFilesILPrinterVisitor.scala delete mode 100644 src/msil/ch/epfl/lamp/compiler/msil/emit/OpCode.scala delete mode 100644 src/msil/ch/epfl/lamp/compiler/msil/emit/OpCodes.scala delete mode 100644 src/msil/ch/epfl/lamp/compiler/msil/emit/ParameterBuilder.scala delete mode 100644 src/msil/ch/epfl/lamp/compiler/msil/emit/SingleFileILPrinterVisitor.scala delete mode 100644 src/msil/ch/epfl/lamp/compiler/msil/emit/TypeBuilder.scala delete mode 100644 src/msil/ch/epfl/lamp/compiler/msil/emit/Visitable.scala delete mode 100644 src/msil/ch/epfl/lamp/compiler/msil/emit/Visitor.scala delete mode 100644 src/msil/ch/epfl/lamp/compiler/msil/tests/CustomAttributesTest.java delete mode 100644 src/msil/ch/epfl/lamp/compiler/msil/tests/JavaTypeTest.java delete mode 100644 src/msil/ch/epfl/lamp/compiler/msil/tests/MembersTest.java delete mode 100644 src/msil/ch/epfl/lamp/compiler/msil/tests/TableDump.java delete mode 100644 src/msil/ch/epfl/lamp/compiler/msil/tests/Test.java delete mode 100644 src/msil/ch/epfl/lamp/compiler/msil/util/PECustomMod.java delete mode 100644 src/msil/ch/epfl/lamp/compiler/msil/util/PESection.java delete mode 100644 src/msil/ch/epfl/lamp/compiler/msil/util/PEStream.java delete mode 100644 src/msil/ch/epfl/lamp/compiler/msil/util/Signature.java delete mode 100644 src/msil/ch/epfl/lamp/compiler/msil/util/Table.java (limited to 'src') diff --git a/build.detach.xml b/build.detach.xml index 132c812a26..b13528f462 100644 --- a/build.detach.xml +++ b/build.detach.xml @@ -11,7 +11,7 @@ SuperSabbus for Scala detach plugin. - + @@ -73,7 +73,6 @@ QUICK BUILD (QUICK) - @@ -120,9 +119,9 @@ QUICK BUILD (QUICK) - + - + - - @@ -59,7 +57,7 @@ INITIALISATION - - - - - - @@ -112,7 +101,6 @@ INITIALISATION - diff --git a/build.xml b/build.xml index a828d71a44..214887f946 100644 --- a/build.xml +++ b/build.xml @@ -203,7 +203,6 @@ PROPERTIES - @@ -463,7 +462,6 @@ INITIALISATION - @@ -669,9 +667,6 @@ LOCAL REFERENCE BUILD (LOCKER) - - - - - - - - - - - - - - - - - - - - - - - - - - - @@ -1163,33 +1120,6 @@ QUICK BUILD (QUICK) - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/lib/msil.jar.desired.sha1 b/lib/msil.jar.desired.sha1 deleted file mode 100644 index 9396b273ab..0000000000 --- a/lib/msil.jar.desired.sha1 +++ /dev/null @@ -1 +0,0 @@ -d48cb950ceded82a5e0ffae8ef2c68d0923ed00c *msil.jar diff --git a/project/Build.scala b/project/Build.scala index a50a572d54..80a3fb42f1 100644 --- a/project/Build.scala +++ b/project/Build.scala @@ -11,7 +11,7 @@ object ScalaBuild extends Build with Layers with Packaging with Testing { override lazy val settings = super.settings ++ Versions.settings ++ Seq( autoScalaLibrary := false, resolvers += Resolver.url( - "Typesafe nightlies", + "Typesafe nightlies", url("https://typesafe.artifactoryonline.com/typesafe/ivy-snapshots/") )(Resolver.ivyStylePatterns), resolvers ++= Seq( @@ -21,14 +21,14 @@ object ScalaBuild extends Build with Layers with Packaging with Testing { organization := "org.scala-lang", version <<= Versions.mavenVersion, pomExtra := epflPomExtra - ) + ) // Collections of projects to run 'compile' on. lazy val compiledProjects = Seq(quickLib, quickComp, continuationsLibrary, actors, swing, forkjoin, fjbg) // Collection of projects to 'package' and 'publish' together. lazy val packagedBinaryProjects = Seq(scalaLibrary, scalaCompiler, swing, actors, continuationsPlugin, jline, scalap) lazy val partestRunProjects = Seq(testsuite, continuationsTestsuite) - + private def epflPomExtra = ( 2002 @@ -47,7 +47,7 @@ object ScalaBuild extends Build with Layers with Packaging with Testing { ) - + // Settings used to make sure publishing goes smoothly. def publishSettings: Seq[Setting[_]] = Seq( ivyScala ~= ((is: Option[IvyScala]) => is.map(_.copy(checkExplicit = false))), @@ -91,7 +91,7 @@ object ScalaBuild extends Build with Layers with Packaging with Testing { lazy val aaa_root = Project("scala", file(".")) settings(projectSettings: _*) settings(ShaResolve.settings: _*) // External dependencies used for various projects - lazy val externalDeps: Setting[_] = libraryDependencies <<= (sbtVersion)(v => + lazy val externalDeps: Setting[_] = libraryDependencies <<= (sbtVersion)(v => Seq( "org.apache.ant" % "ant" % "1.8.2", "org.scala-sbt" % "compiler-interface" % v % "provided" @@ -134,7 +134,7 @@ object ScalaBuild extends Build with Layers with Packaging with Testing { lazy val jline = Project("jline", file("src/jline")) // Fast Java Bytecode Generator (nested in every scala-compiler.jar) lazy val fjbg = Project("fjbg", file(".")) settings(settingOverrides : _*) - // Our wrapped version of msil. + // Our wrapped version of asm. lazy val asm = Project("asm", file(".")) settings(settingOverrides : _*) // Forkjoin backport lazy val forkjoin = Project("forkjoin", file(".")) settings(settingOverrides : _*) @@ -175,9 +175,9 @@ object ScalaBuild extends Build with Layers with Packaging with Testing { // -------------------------------------------------------------- // Projects dependent on layered compilation (quick) // -------------------------------------------------------------- - def addCheaterDependency(projectName: String): Setting[_] = - pomPostProcess <<= (version, organization, pomPostProcess) apply { (v,o,k) => - val dependency: scala.xml.Node = + def addCheaterDependency(projectName: String): Setting[_] = + pomPostProcess <<= (version, organization, pomPostProcess) apply { (v,o,k) => + val dependency: scala.xml.Node = {o} {projectName} @@ -193,10 +193,10 @@ object ScalaBuild extends Build with Layers with Packaging with Testing { case n: scala.xml.Elem if n.label == "dependencies" => n } isEmpty) // TODO - Keep namespace on project... - k andThen { + k andThen { case n @ { nested@_*} if hasDependencies(n) => {nested}{dependency} - case { nested@_*} => + case { nested@_*} => { nested map fixDependencies } } } @@ -205,7 +205,7 @@ object ScalaBuild extends Build with Layers with Packaging with Testing { lazy val dependentProjectSettings = settingOverrides ++ Seq(quickScalaInstance, quickScalaLibraryDependency, addCheaterDependency("scala-library")) lazy val actors = Project("scala-actors", file(".")) settings(dependentProjectSettings:_*) dependsOn(forkjoin % "provided") lazy val swing = Project("scala-swing", file(".")) settings(dependentProjectSettings:_*) dependsOn(actors % "provided") - // This project will generate man pages (in man1 and html) for scala. + // This project will generate man pages (in man1 and html) for scala. lazy val manmakerSettings: Seq[Setting[_]] = dependentProjectSettings :+ externalDeps lazy val manmaker = Project("manual", file(".")) settings(manmakerSettings:_*) @@ -234,7 +234,7 @@ object ScalaBuild extends Build with Layers with Packaging with Testing { lazy val continuationsPlugin = Project("continuations-plugin", file(".")) settings(continuationsPluginSettings:_*) lazy val continuationsLibrarySettings = dependentProjectSettings ++ Seq( scalaSource in Compile <<= baseDirectory(_ / "src/continuations/library/"), - scalacOptions in Compile <++= (exportedProducts in Compile in continuationsPlugin) map { + scalacOptions in Compile <++= (exportedProducts in Compile in continuationsPlugin) map { case Seq(cpDir) => Seq("-Xplugin-require:continuations", "-P:continuations:enable", "-Xplugin:"+cpDir.data.getAbsolutePath) } ) @@ -297,11 +297,11 @@ object ScalaBuild extends Build with Layers with Packaging with Testing { lazy val scalaCompiler = Project("scala-compiler", file(".")) settings(publishSettings:_*) settings(scalaBinArtifactSettings:_*) dependsOn(scalaReflect) lazy val fullQuickScalaReference = makeScalaReference("pack", scalaLibrary, scalaReflect, scalaCompiler) - + // -------------------------------------------------------------- // Generating Documentation. // -------------------------------------------------------------- - + // TODO - Migrate this into the dist project. // Scaladocs lazy val documentationSettings: Seq[Setting[_]] = dependentProjectSettings ++ Seq( diff --git a/project/Layers.scala b/project/Layers.scala index 35cc79c130..259e460a52 100644 --- a/project/Layers.scala +++ b/project/Layers.scala @@ -47,19 +47,19 @@ trait Layers extends Build { case _ => error("Cannot build a ScalaReference with more than one classpath element") } } - + /** Creates a "layer" of Scala compilation. That is, this will build the next version of Scala from a previous version. * Returns the library project and compiler project from the next layer. * Note: The library and compiler are not *complete* in the sense that they are missing things like "actors" and "fjbg". */ def makeLayer(layer: String, referenceScala: Setting[Task[ScalaInstance]], autoLock: Boolean = false) : (Project, Project, Project) = { - val autoLockSettings: Seq[Setting[_]] = - if(autoLock) Seq(compile in Compile <<= (compile in Compile, lock) apply { (c, l) => + val autoLockSettings: Seq[Setting[_]] = + if(autoLock) Seq(compile in Compile <<= (compile in Compile, lock) apply { (c, l) => c flatMapR { cResult => val result = Result.tryValue(cResult) l mapR { tx => result } } - }) + }) else Seq.empty @@ -69,7 +69,7 @@ trait Layers extends Build { unmanagedClasspath in Compile <<= (exportedProducts in forkjoin in Compile).identity, managedClasspath in Compile := Seq(), scalaSource in Compile <<= (baseDirectory) apply (_ / "src" / "library"), - resourceDirectory in Compile <<= baseDirectory apply (_ / "src" / "library"), + resourceDirectory in Compile <<= baseDirectory apply (_ / "src" / "library"), defaultExcludes in unmanagedResources := ("*.scala" | "*.java" | "*.disabled"), // TODO - Allow other scalac option settings. scalacOptions in Compile <++= (scalaSource in Compile) map (src => Seq("-sourcepath", src.getAbsolutePath)), @@ -96,7 +96,6 @@ trait Layers extends Build { version := layer, scalaSource in Compile <<= (baseDirectory) apply (_ / "src" / "compiler"), resourceDirectory in Compile <<= baseDirectory apply (_ / "src" / "compiler"), - unmanagedSourceDirectories in Compile <+= (baseDirectory) apply (_ / "src" / "msil"), defaultExcludes := ("tests"), defaultExcludes in unmanagedResources := "*.scala", resourceGenerators in Compile <+= (resourceManaged, Versions.scalaVersions, skip in Compile, streams) map Versions.generateVersionPropertiesFile("compiler.properties"), diff --git a/src/compiler/scala/tools/ant/Scalac.scala b/src/compiler/scala/tools/ant/Scalac.scala index cf3b5f949b..e6671ba093 100644 --- a/src/compiler/scala/tools/ant/Scalac.scala +++ b/src/compiler/scala/tools/ant/Scalac.scala @@ -56,8 +56,6 @@ import scala.tools.nsc.reporters.{Reporter, ConsoleReporter} * - `usejavacp`, * - `failonerror`, * - `scalacdebugging`, - * - `assemname`, - * - `assemrefs`. * * It also takes the following parameters as nested elements: * - `src` (for `srcdir`), @@ -100,7 +98,7 @@ class Scalac extends ScalaMatchingTask with ScalacShared { /** Defines valid values for the `target` property. */ object Target extends PermissibleValue { - val values = List("jvm-1.5", "jvm-1.5-fjbg", "jvm-1.5-asm", "jvm-1.6", "jvm-1.7", "msil") + val values = List("jvm-1.5", "jvm-1.5-fjbg", "jvm-1.5-asm", "jvm-1.6", "jvm-1.7") } /** Defines valid values for the `deprecation` and `unchecked` properties. */ @@ -170,11 +168,6 @@ class Scalac extends ScalaMatchingTask with ScalacShared { /** Indicates whether compilation errors will fail the build; defaults to true. */ protected var failonerror: Boolean = true - // Name of the output assembly (only relevant with -target:msil) - protected var assemname: Option[String] = None - // List of assemblies referenced by the program (only relevant with -target:msil) - protected var assemrefs: Option[String] = None - /** Prints out the files being compiled by the scalac ant task * (not only the number of files). */ protected var scalacDebugging: Boolean = false @@ -421,9 +414,6 @@ class Scalac extends ScalaMatchingTask with ScalacShared { * @param input The specified flag */ def setScalacdebugging(input: Boolean) { scalacDebugging = input } - def setAssemname(input: String) { assemname = Some(input) } - def setAssemrefs(input: String) { assemrefs = Some(input) } - /** Sets the `compilerarg` as a nested compilerarg Ant parameter. * @return A compiler argument to be configured. */ def createCompilerArg(): ImplementationSpecificArgument = { @@ -616,9 +606,6 @@ class Scalac extends ScalaMatchingTask with ScalacShared { if (!unchecked.isEmpty) settings.unchecked.value = unchecked.get if (!usejavacp.isEmpty) settings.usejavacp.value = usejavacp.get - if (!assemname.isEmpty) settings.assemname.value = assemname.get - if (!assemrefs.isEmpty) settings.assemrefs.value = assemrefs.get - val jvmargs = scalacCompilerArgs.getArgs filter (_ startsWith "-J") if (!jvmargs.isEmpty) settings.jvmargs.value = jvmargs.toList val defines = scalacCompilerArgs.getArgs filter (_ startsWith "-D") diff --git a/src/compiler/scala/tools/ant/sabbus/ScalacFork.scala b/src/compiler/scala/tools/ant/sabbus/ScalacFork.scala index 9cdf484080..d5545fe76a 100644 --- a/src/compiler/scala/tools/ant/sabbus/ScalacFork.scala +++ b/src/compiler/scala/tools/ant/sabbus/ScalacFork.scala @@ -80,7 +80,7 @@ class ScalacFork extends ScalaMatchingTask with ScalacShared with TaskArgs { private def createMapper() = { val mapper = new GlobPatternMapper() - val extension = if (isMSIL) "*.msil" else "*.class" + val extension = "*.class" mapper setTo extension mapper setFrom "*.scala" @@ -104,9 +104,6 @@ class ScalacFork extends ScalaMatchingTask with ScalacShared with TaskArgs { sourcePath foreach (settings.sourcepath = _) settings.extraParams = extraArgsFlat - if (isMSIL) - settings.sourcedir = sourceDir - val mapper = createMapper() val includedFiles: Array[File] = diff --git a/src/compiler/scala/tools/ant/sabbus/TaskArgs.scala b/src/compiler/scala/tools/ant/sabbus/TaskArgs.scala index 6bb1aaa306..b061bcf7fb 100644 --- a/src/compiler/scala/tools/ant/sabbus/TaskArgs.scala +++ b/src/compiler/scala/tools/ant/sabbus/TaskArgs.scala @@ -98,6 +98,4 @@ trait TaskArgs extends CompilationPathProperty { val parts = a.getParts if(parts eq null) Seq[String]() else parts.toSeq } - - def isMSIL = compTarget exists (_ == "msil") } diff --git a/src/compiler/scala/tools/nsc/Global.scala b/src/compiler/scala/tools/nsc/Global.scala index 397e6c42d7..654dc92a9e 100644 --- a/src/compiler/scala/tools/nsc/Global.scala +++ b/src/compiler/scala/tools/nsc/Global.scala @@ -23,7 +23,7 @@ import ast.parser._ import typechecker._ import transform._ import backend.icode.{ ICodes, GenICode, ICodeCheckers } -import backend.{ ScalaPrimitives, Platform, MSILPlatform, JavaPlatform } +import backend.{ ScalaPrimitives, Platform, JavaPlatform } import backend.jvm.{GenJVM, GenASM} import backend.opt.{ Inliners, InlineExceptionHandlers, ClosureElimination, DeadCodeElimination } import backend.icode.analysis._ @@ -77,8 +77,7 @@ class Global(var currentSettings: Settings, var reporter: Reporter) type ThisPlatform = Platform { val global: Global.this.type } lazy val platform: ThisPlatform = - if (forMSIL) new { val global: Global.this.type = Global.this } with MSILPlatform - else new { val global: Global.this.type = Global.this } with JavaPlatform + new { val global: Global.this.type = Global.this } with JavaPlatform type PlatformClassPath = ClassPath[platform.BinaryRepr] type OptClassPath = Option[PlatformClassPath] @@ -620,7 +619,7 @@ class Global(var currentSettings: Settings, var reporter: Reporter) object terminal extends { val global: Global.this.type = Global.this val phaseName = "terminal" - val runsAfter = List("jvm", "msil") + val runsAfter = List("jvm") val runsRightAfter = None } with SubComponent { private var cache: Option[GlobalPhase] = None @@ -1314,7 +1313,6 @@ class Global(var currentSettings: Settings, var reporter: Reporter) val closelimPhase = phaseNamed("closelim") val dcePhase = phaseNamed("dce") // val jvmPhase = phaseNamed("jvm") - // val msilPhase = phaseNamed("msil") def runIsAt(ph: Phase) = globalPhase.id == ph.id def runIsAtOptimiz = { @@ -1675,14 +1673,6 @@ class Global(var currentSettings: Settings, var reporter: Reporter) } }) } - // In order to not outright break code which overrides onlyPresentation (like sbt 0.7.5.RC0) - // I restored and deprecated it. That would be enough to avoid the compilation - // failure, but the override wouldn't accomplish anything. So now forInteractive - // and forScaladoc default to onlyPresentation, which is the same as defaulting - // to false except in old code. The downside is that this leaves us calling a - // deprecated method: but I see no simple way out, so I leave it for now. - // def forJVM = settings.target.value startsWith "jvm" - override def forMSIL = settings.target.value startsWith "msil" def forInteractive = false def forScaladoc = false def createJavadoc = false diff --git a/src/compiler/scala/tools/nsc/Main.scala b/src/compiler/scala/tools/nsc/Main.scala index 5a3ea56f67..a4b22b0e11 100644 --- a/src/compiler/scala/tools/nsc/Main.scala +++ b/src/compiler/scala/tools/nsc/Main.scala @@ -9,7 +9,6 @@ import java.io.File import File.pathSeparator import scala.tools.nsc.interactive.{ RefinedBuildManager, SimpleBuildManager } import scala.tools.nsc.io.AbstractFile -import Properties.msilLibPath /** The main class for NSC, a compiler for the programming * language Scala. @@ -60,11 +59,7 @@ object Main extends Driver with EvalLoop { } false } - else { - if (settings.target.value == "msil") - msilLibPath foreach (x => settings.assemrefs.value += (pathSeparator + x)) - true - } + else true override def newCompiler(): Global = if (settings.Yrangepos.value) new Global(settings, reporter) with interactive.RangePositions diff --git a/src/compiler/scala/tools/nsc/Properties.scala b/src/compiler/scala/tools/nsc/Properties.scala index 570d5572d6..feb4ded2f2 100644 --- a/src/compiler/scala/tools/nsc/Properties.scala +++ b/src/compiler/scala/tools/nsc/Properties.scala @@ -16,9 +16,6 @@ object Properties extends scala.util.PropertiesTrait { def residentPromptString = scalaPropOrElse("resident.prompt", "\nnsc> ") def shellPromptString = scalaPropOrElse("shell.prompt", "\nscala> ") - // settings based on system properties - def msilLibPath = propOrNone("msil.libpath") - // derived values def isEmacsShell = propOrEmpty("env.emacs") != "" } diff --git a/src/compiler/scala/tools/nsc/ScalaDoc.scala b/src/compiler/scala/tools/nsc/ScalaDoc.scala index ba434bc797..14b76b53b3 100644 --- a/src/compiler/scala/tools/nsc/ScalaDoc.scala +++ b/src/compiler/scala/tools/nsc/ScalaDoc.scala @@ -10,7 +10,6 @@ import java.io.File.pathSeparator import scala.tools.nsc.doc.DocFactory import scala.tools.nsc.reporters.ConsoleReporter import scala.reflect.internal.util.FakePos -import Properties.msilLibPath /** The main class for scaladoc, a front-end for the Scala compiler * that generates documentation from source files. @@ -42,12 +41,8 @@ class ScalaDoc { reporter.warning(null, "Phases are restricted when using Scaladoc") else if (docSettings.help.value || !hasFiles) reporter.echo(command.usageMsg) - else try { - if (docSettings.target.value == "msil") - msilLibPath foreach (x => docSettings.assemrefs.value += (pathSeparator + x)) - - new DocFactory(reporter, docSettings) document command.files - } + else + try { new DocFactory(reporter, docSettings) document command.files } catch { case ex @ FatalError(msg) => if (docSettings.debug.value) ex.printStackTrace() diff --git a/src/compiler/scala/tools/nsc/backend/MSILPlatform.scala b/src/compiler/scala/tools/nsc/backend/MSILPlatform.scala deleted file mode 100644 index 4493685b52..0000000000 --- a/src/compiler/scala/tools/nsc/backend/MSILPlatform.scala +++ /dev/null @@ -1,69 +0,0 @@ -/* NSC -- new Scala compiler - * Copyright 2005-2013 LAMP/EPFL - * @author Paul Phillips - */ - -package scala.tools.nsc -package backend - -import ch.epfl.lamp.compiler.{ msil => msillib } -import util.{ ClassPath, MsilClassPath } -import msil.GenMSIL -import io.{ AbstractFile, MsilFile } - -trait MSILPlatform extends Platform { - import global._ - import definitions.{ ComparatorClass, BoxedNumberClass, getMember } - - type BinaryRepr = MsilFile - - if (settings.verbose.value) - inform("[AssemRefs = " + settings.assemrefs.value + "]") - - // phaseName = "msil" - object genMSIL extends { - val global: MSILPlatform.this.global.type = MSILPlatform.this.global - val runsAfter = List[String]("dce") - val runsRightAfter = None - } with GenMSIL - - lazy val classPath = MsilClassPath.fromSettings(settings) - def rootLoader = new loaders.PackageLoader(classPath.asInstanceOf[ClassPath[platform.BinaryRepr]]) - // See discussion in JavaPlatForm for why we need a cast here. - - /** Update classpath with a substituted subentry */ - def updateClassPath(subst: Map[ClassPath[BinaryRepr], ClassPath[BinaryRepr]]) = - throw new UnsupportedOperationException("classpath invalidations not supported on MSIL") - - def platformPhases = List( - genMSIL // generate .msil files - ) - - lazy val externalEquals = getMember(ComparatorClass.companionModule, nme.equals_) - def isMaybeBoxed(sym: Symbol) = sym isNonBottomSubClass BoxedNumberClass - - def newClassLoader(bin: MsilFile): loaders.SymbolLoader = new loaders.MsilFileLoader(bin) - - /** - * Tells whether a class should be loaded and entered into the package - * scope. On .NET, this method returns `false` for all synthetic classes - * (anonymous classes, implementation classes, module classes), their - * symtab is encoded in the pickle of another class. - */ - def doLoad(cls: ClassPath[BinaryRepr]#ClassRep): Boolean = { - if (cls.binary.isDefined) { - val typ = cls.binary.get.msilType - if (typ.IsDefined(loaders.clrTypes.SCALA_SYMTAB_ATTR, false)) { - val attrs = typ.GetCustomAttributes(loaders.clrTypes.SCALA_SYMTAB_ATTR, false) - assert(attrs.length == 1, attrs.length) - val a = attrs(0).asInstanceOf[msillib.Attribute] - // symtab_constr takes a byte array argument (the pickle), i.e. typ has a pickle. - // otherwise, symtab_default_constr was used, which marks typ as scala-synthetic. - a.getConstructor() == loaders.clrTypes.SYMTAB_CONSTR - } else true // always load non-scala types - } else true // always load source - } - - def needCompile(bin: MsilFile, src: AbstractFile) = - false // always use compiled file on .net -} diff --git a/src/compiler/scala/tools/nsc/backend/icode/ExceptionHandlers.scala b/src/compiler/scala/tools/nsc/backend/icode/ExceptionHandlers.scala index 7c2961778f..a872e9cd00 100644 --- a/src/compiler/scala/tools/nsc/backend/icode/ExceptionHandlers.scala +++ b/src/compiler/scala/tools/nsc/backend/icode/ExceptionHandlers.scala @@ -27,9 +27,6 @@ trait ExceptionHandlers { private var _startBlock: BasicBlock = _; var finalizer: Finalizer = _; - /** Needed for the MSIL backend. */ - var resultKind: TypeKind = _; - def setStartBlock(b: BasicBlock) = { _startBlock = b; b.exceptionHandlerStart = true diff --git a/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala b/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala index f07c331fb0..603e1209a5 100644 --- a/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala +++ b/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala @@ -164,10 +164,7 @@ abstract class GenICode extends SubComponent { private def genStat(tree: Tree, ctx: Context): Context = tree match { case Assign(lhs @ Select(_, _), rhs) => val isStatic = lhs.symbol.isStaticMember - var ctx1 = if (isStatic) ctx - else if (forMSIL && msil_IsValuetypeInstField(lhs.symbol)) - msil_genLoadQualifierAddress(lhs, ctx) - else genLoadQualifier(lhs, ctx) + var ctx1 = if (isStatic) ctx else genLoadQualifier(lhs, ctx) ctx1 = genLoad(rhs, ctx1, toTypeKind(lhs.symbol.info)) ctx1.bb.emit(STORE_FIELD(lhs.symbol, isStatic), tree.pos) @@ -301,9 +298,6 @@ abstract class GenICode extends SubComponent { val Apply(fun, args) = tree val monitor = ctx.makeLocal(tree.pos, ObjectClass.tpe, "monitor") var monitorResult: Local = null - - // if the synchronized block returns a result, store it in a local variable. just leaving - // it on the stack is not valid in MSIL (stack is cleaned when leaving try-blocks) val argTpe = args.head.tpe val hasResult = expectedType != UNIT if (hasResult) @@ -461,131 +455,6 @@ abstract class GenICode extends SubComponent { ) } - /** - * forMSIL - */ - private def msil_IsValuetypeInstMethod(msym: Symbol) = ( - loaders.clrTypes.methods get msym exists (mMSIL => - mMSIL.IsInstance && mMSIL.DeclaringType.IsValueType - ) - ) - private def msil_IsValuetypeInstField(fsym: Symbol) = ( - loaders.clrTypes.fields get fsym exists (fMSIL => - !fMSIL.IsStatic && fMSIL.DeclaringType.IsValueType - ) - ) - - /** - * forMSIL: Adds a local var, the emitted code requires one more slot on the stack as on entry - */ - private def msil_genLoadZeroOfNonEnumValuetype(ctx: Context, kind: TypeKind, pos: Position, leaveAddressOnStackInstead: Boolean) { - val REFERENCE(clssym) = kind - assert(loaders.clrTypes.isNonEnumValuetype(clssym), clssym) - val local = ctx.makeLocal(pos, clssym.tpe, "tmp") - ctx.method.addLocal(local) - ctx.bb.emit(CIL_LOAD_LOCAL_ADDRESS(local), pos) - ctx.bb.emit(CIL_INITOBJ(kind), pos) - val instr = if (leaveAddressOnStackInstead) - CIL_LOAD_LOCAL_ADDRESS(local) - else - LOAD_LOCAL(local) - ctx.bb.emit(instr, pos) - } - - /** - * forMSIL - */ - private def msil_genLoadAddressOf(tree: Tree, ctx: Context, expectedType: TypeKind, butRawValueIsAlsoGoodEnough: Boolean): Context = { - var generatedType = expectedType - var addressTaken = false - debuglog("at line: " + (if (tree.pos.isDefined) tree.pos.line else tree.pos)) - - var resCtx: Context = tree match { - - // emits CIL_LOAD_FIELD_ADDRESS - case Select(qualifier, selector) if (!tree.symbol.isModule) => - addressTaken = true - val sym = tree.symbol - generatedType = toTypeKind(sym.info) - - if (sym.isStaticMember) { - ctx.bb.emit(CIL_LOAD_FIELD_ADDRESS(sym, true), tree.pos) - ctx - } else { - val ctx1 = genLoadQualifier(tree, ctx) - ctx1.bb.emit(CIL_LOAD_FIELD_ADDRESS(sym, false), tree.pos) - ctx1 - } - - // emits CIL_LOAD_LOCAL_ADDRESS - case Ident(name) if (!tree.symbol.isPackage && !tree.symbol.isModule)=> - addressTaken = true - val sym = tree.symbol - try { - val Some(l) = ctx.method.lookupLocal(sym) - ctx.bb.emit(CIL_LOAD_LOCAL_ADDRESS(l), tree.pos) - generatedType = l.kind // actually, should be "V&" but the callsite is aware of this - } catch { - case ex: MatchError => - abort("symbol " + sym + " does not exist in " + ctx.method) - } - ctx - - // emits CIL_LOAD_ARRAY_ITEM_ADDRESS - case Apply(fun, args) => - if (isPrimitive(fun.symbol)) { - val sym = tree.symbol - val Select(receiver, _) = fun - val code = scalaPrimitives.getPrimitive(sym, receiver.tpe) - - if (isArrayOp(code)) { - val arrayObj = receiver - val k = toTypeKind(arrayObj.tpe) - val ARRAY(elementType) = k - if (scalaPrimitives.isArrayGet(code)) { - var ctx1 = genLoad(arrayObj, ctx, k) - // load argument on stack - debugassert(args.length == 1, "Too many arguments for array get operation: " + tree) - ctx1 = genLoad(args.head, ctx1, INT) - generatedType = elementType // actually "managed pointer to element type" but the callsite is aware of this - ctx1.bb.emit(CIL_LOAD_ARRAY_ITEM_ADDRESS(elementType), tree.pos) - addressTaken = true - ctx1 - } else null - } else null - } else null - - case This(qual) => - /* TODO: this case handler is a placeholder for the time when Level 2 support for valuetypes is in place, - in particular when invoking other methods on this where this is a valuetype value (boxed or not). - As receiver, a managed pointer is expected, and a plain ldarg.0 achieves just that. */ - addressTaken = true - genLoad(tree, ctx, expectedType) - - case _ => - null /* A method returning ByRef won't pass peverify, so I guess this case handler is dead code. - Even if it's not, the code below to handler !addressTaken below. */ - } - - if (!addressTaken) { - resCtx = genLoad(tree, ctx, expectedType) - if (!butRawValueIsAlsoGoodEnough) { - // raw value on stack (must be an intermediate result, e.g. returned by method call), take address - addressTaken = true - val boxType = expectedType // toTypeKind(expectedType /* TODO FIXME */) - resCtx.bb.emit(BOX(boxType), tree.pos) - resCtx.bb.emit(CIL_UNBOX(boxType), tree.pos) - } - } - - // emit conversion - if (generatedType != expectedType) - abort("Unexpected tree in msil_genLoadAddressOf: " + tree + " at: " + tree.pos) - - resCtx - } - - /** * Generate code for trees that produce values on the stack * @@ -808,31 +677,15 @@ abstract class GenICode extends SubComponent { debugassert(ctor.owner == cls, "Symbol " + ctor.owner.fullName + " is different than " + tpt) - val ctx2 = if (forMSIL && loaders.clrTypes.isNonEnumValuetype(cls)) { - /* parameterful constructors are the only possible custom constructors, - a default constructor can't be defined for valuetypes, CLR dixit */ - val isDefaultConstructor = args.isEmpty - if (isDefaultConstructor) { - msil_genLoadZeroOfNonEnumValuetype(ctx, rt, tree.pos, leaveAddressOnStackInstead = false) - ctx - } else { - val ctx1 = genLoadArguments(args, ctor.info.paramTypes, ctx) - ctx1.bb.emit(CIL_NEWOBJ(ctor), tree.pos) - ctx1 - } - } else { - val nw = NEW(rt) - ctx.bb.emit(nw, tree.pos) - ctx.bb.emit(DUP(generatedType)) - val ctx1 = genLoadArguments(args, ctor.info.paramTypes, ctx) - - val init = CALL_METHOD(ctor, Static(true)) - nw.init = init - ctx1.bb.emit(init, tree.pos) - ctx1 - } - ctx2 + val nw = NEW(rt) + ctx.bb.emit(nw, tree.pos) + ctx.bb.emit(DUP(generatedType)) + val ctx1 = genLoadArguments(args, ctor.info.paramTypes, ctx) + val init = CALL_METHOD(ctor, Static(true)) + nw.init = init + ctx1.bb.emit(init, tree.pos) + ctx1 case _ => abort("Cannot instantiate " + tpt + " of kind: " + generatedType) } @@ -866,12 +719,6 @@ abstract class GenICode extends SubComponent { ctx1.bb.emit(UNBOX(boxType), expr.pos) ctx1 - case Apply(fun @ _, List(expr)) if (forMSIL && loaders.clrTypes.isAddressOf(fun.symbol)) => - debuglog("ADDRESSOF : " + fun.symbol.fullName); - val ctx1 = msil_genLoadAddressOf(expr, ctx, toTypeKind(expr.tpe), butRawValueIsAlsoGoodEnough = false) - generatedType = toTypeKind(fun.symbol.tpe.resultType) - ctx1 - case app @ Apply(fun, args) => def genLoadApply6 = { val sym = fun.symbol @@ -913,14 +760,7 @@ abstract class GenICode extends SubComponent { else Dynamic - var ctx1 = - if (invokeStyle.hasInstance) { - if (forMSIL && !(invokeStyle.isInstanceOf[SuperCall]) && msil_IsValuetypeInstMethod(sym)) - msil_genLoadQualifierAddress(fun, ctx) - else - genLoadQualifier(fun, ctx) - } else ctx - + var ctx1 = if (invokeStyle.hasInstance) genLoadQualifier(fun, ctx) else ctx ctx1 = genLoadArguments(args, sym.info.paramTypes, ctx1) val cm = CALL_METHOD(sym, invokeStyle) @@ -956,7 +796,6 @@ abstract class GenICode extends SubComponent { genLoadApply6 case ApplyDynamic(qual, args) => - assert(!forMSIL, tree) // TODO - this is where we'd catch dynamic applies for invokedynamic. sys.error("No invokedynamic support yet.") // val ctx1 = genLoad(qual, ctx, ObjectReference) @@ -1189,15 +1028,6 @@ abstract class GenICode extends SubComponent { abort("Unknown qualifier " + tree) } - /** forMSIL */ - private def msil_genLoadQualifierAddress(tree: Tree, ctx: Context): Context = - tree match { - case Select(qualifier, _) => - msil_genLoadAddressOf(qualifier, ctx, toTypeKind(qualifier.tpe), butRawValueIsAlsoGoodEnough = false) - case _ => - abort("Unknown qualifier " + tree) - } - /** * Generate code that loads args into label parameters. */ @@ -1385,7 +1215,7 @@ abstract class GenICode extends SubComponent { def genStringConcat(tree: Tree, ctx: Context): Context = { liftStringConcat(tree) match { // Optimization for expressions of the form "" + x. We can avoid the StringBuilder. - case List(Literal(Constant("")), arg) if !forMSIL => + case List(Literal(Constant("")), arg) => debuglog("Rewriting \"\" + x as String.valueOf(x) for: " + arg) val ctx1 = genLoad(arg, ctx, ObjectReference) ctx1.bb.emit(CALL_METHOD(String_valueOf, Static(false)), arg.pos) @@ -1988,10 +1818,9 @@ abstract class GenICode extends SubComponent { * 'covered' by this exception handler (in addition to the * previously active handlers). */ - private def newExceptionHandler(cls: Symbol, resultKind: TypeKind, pos: Position): ExceptionHandler = { + private def newExceptionHandler(cls: Symbol, pos: Position): ExceptionHandler = { handlerCount += 1 val exh = new ExceptionHandler(method, newTermNameCached("" + handlerCount), cls, pos) - exh.resultKind = resultKind method.addHandler(exh) handlers = exh :: handlers debuglog("added handler: " + exh); @@ -2051,7 +1880,7 @@ abstract class GenICode extends SubComponent { def Try(body: Context => Context, handlers: List[(Symbol, TypeKind, Context => Context)], finalizer: Tree, - tree: Tree) = if (forMSIL) TryMsil(body, handlers, finalizer, tree) else { + tree: Tree) = { val outerCtx = this.dup // context for generating exception handlers, covered by finalizer val finalizerCtx = this.dup // context for generating finalizer handler @@ -2083,7 +1912,7 @@ abstract class GenICode extends SubComponent { if (finalizer != EmptyTree) { - val exh = outerCtx.newExceptionHandler(NoSymbol, toTypeKind(finalizer.tpe), finalizer.pos) // finalizer covers exception handlers + val exh = outerCtx.newExceptionHandler(NoSymbol, finalizer.pos) // finalizer covers exception handlers this.addActiveHandler(exh) // .. and body aswell val ctx = finalizerCtx.enterExceptionHandler(exh) val exception = ctx.makeLocal(finalizer.pos, ThrowableClass.tpe, "exc") @@ -2098,7 +1927,7 @@ abstract class GenICode extends SubComponent { } for ((sym, kind, handler) <- handlers) { - val exh = this.newExceptionHandler(sym, kind, tree.pos) + val exh = this.newExceptionHandler(sym, tree.pos) var ctx1 = outerCtx.enterExceptionHandler(exh) ctx1.addFinalizer(finalizer, finalizerCtx) loadException(ctx1, exh, tree.pos) @@ -2122,67 +1951,6 @@ abstract class GenICode extends SubComponent { afterCtx } - - - /** try-catch-finally blocks are actually simpler to emit in MSIL, because there - * is support for `finally` in bytecode. - * - * A - * try { .. } catch { .. } finally { .. } - * block is de-sugared into - * try { try { ..} catch { .. } } finally { .. } - * - * In ICode `finally` block is represented exactly the same as an exception handler, - * but with `NoSymbol` as the exception class. The covered blocks are all blocks of - * the `try { .. } catch { .. }`. - * - * Also, TryMsil does not enter any Finalizers into the `cleanups`, because the - * CLI takes care of running the finalizer when seeing a `leave` statement inside - * a try / catch. - */ - def TryMsil(body: Context => Context, - handlers: List[(Symbol, TypeKind, (Context => Context))], - finalizer: Tree, - tree: Tree) = { - - val outerCtx = this.dup // context for generating exception handlers, covered by finalizer - val finalizerCtx = this.dup // context for generating finalizer handler - val afterCtx = outerCtx.newBlock - - if (finalizer != EmptyTree) { - // finalizer is covers try and all catch blocks, i.e. - // try { try { .. } catch { ..} } finally { .. } - val exh = outerCtx.newExceptionHandler(NoSymbol, UNIT, tree.pos) - this.addActiveHandler(exh) - val ctx = finalizerCtx.enterExceptionHandler(exh) - loadException(ctx, exh, tree.pos) - val ctx1 = genLoad(finalizer, ctx, UNIT) - // need jump for the ICode to be valid. MSIL backend will emit `Endfinally` instead. - ctx1.bb.closeWith(JUMP(afterCtx.bb)) - finalizerCtx.endHandler() - } - - for (handler <- handlers) { - val exh = this.newExceptionHandler(handler._1, handler._2, tree.pos) - var ctx1 = outerCtx.enterExceptionHandler(exh) - loadException(ctx1, exh, tree.pos) - ctx1 = handler._3(ctx1) - // msil backend will emit `Leave` to jump out of a handler - ctx1.bb.closeWith(JUMP(afterCtx.bb)) - outerCtx.endHandler() - } - - val bodyCtx = this.newBlock - - val finalCtx = body(bodyCtx) - - outerCtx.bb.closeWith(JUMP(bodyCtx.bb)) - - // msil backend will emit `Leave` to jump out of a try-block - finalCtx.bb.closeWith(JUMP(afterCtx.bb)) - - afterCtx - } } } diff --git a/src/compiler/scala/tools/nsc/backend/icode/Linearizers.scala b/src/compiler/scala/tools/nsc/backend/icode/Linearizers.scala index b8a98955c9..80477f0c6e 100644 --- a/src/compiler/scala/tools/nsc/backend/icode/Linearizers.scala +++ b/src/compiler/scala/tools/nsc/backend/icode/Linearizers.scala @@ -197,142 +197,4 @@ trait Linearizers { def linearize(m: IMethod): List[BasicBlock] = m.blocks def linearizeAt(m: IMethod, start: BasicBlock): List[BasicBlock] = sys.error("not implemented") } - - /** The MSIL linearizer is used only for methods with at least one exception handler. - * It makes sure that all the blocks belonging to a `try`, `catch` or `finally` block - * are emitted in an order that allows the lexical nesting of try-catch-finally, just - * like in the source code. - */ - class MSILLinearizer extends Linearizer { - /** The MSIL linearizer first calls a NormalLInearizer. This is because the ILGenerator checks - * the stack size before emitting instructions. For instance, to emit a `store`, there needs - * to be some value on the stack. This can blow up in situations like this: - * ... - * jump 3 - * 4: store_local 0 - * jump 5 - * 3: load_value - * jump 4 - * 5: ... - * here, 3 must be scheduled first. - * - * The NormalLinearizer also removes dead blocks (blocks without predecessor). This is important - * in the following example: - * try { throw new Exception } - * catch { case e => throw e } - * which adds a dead block containing just a "throw" (which, again, would blow up code generation - * because of the stack size; there's no value on the stack when emitting that `throw`) - */ - val normalLinearizer = new NormalLinearizer() - - def linearize(m: IMethod): List[BasicBlock] = { - - val handlersByCovered = m.exh.groupBy(_.covered) - - // number of basic blocks covered by the entire try-catch expression - def size(covered: scala.collection.immutable.Set[BasicBlock]) = { - val hs = handlersByCovered(covered) - covered.size + (hs :\ 0)((h, s) => h.blocks.length + s) - } - - val tryBlocks = handlersByCovered.keys.toList sortBy size - var result = normalLinearizer.linearize(m) - val frozen = mutable.HashSet[BasicBlock](result.head) - - for (tryBlock <- tryBlocks) { - result = groupBlocks(m, result, handlersByCovered(tryBlock), frozen) - } - result - } - - /** @param handlers a list of handlers covering the same blocks (same try, multiple catches) - * @param frozen blocks can't be moved (fist block of a method, blocks directly following a try-catch) - */ - def groupBlocks(method: IMethod, blocks: List[BasicBlock], handlers: List[ExceptionHandler], frozen: mutable.HashSet[BasicBlock]) = { - assert(blocks.head == method.startBlock, method) - - // blocks before the try, and blocks for the try - val beforeAndTry = new ListBuffer[BasicBlock]() - // blocks for the handlers - val catches = handlers map (_ => new ListBuffer[BasicBlock]()) - // blocks to be put at the end - val after = new ListBuffer[BasicBlock]() - - var beforeTry = true - val head = handlers.head - - for (b <- blocks) { - if (head covers b) { - beforeTry = false - beforeAndTry += b - } else { - val handlerIndex = handlers.indexWhere(_.blocks.contains(b)) - if (handlerIndex >= 0) { - catches(handlerIndex) += b - } else if (beforeTry) { - beforeAndTry += b - } else { - after += b - } - } - } - - // reorder the blocks in "catches" so that the "firstBlock" is actually first - (catches, handlers).zipped foreach { (lb, handler) => - lb -= handler.startBlock - handler.startBlock +=: lb - } - - // The first block emitted after a try-catch must be the one that the try / catch - // blocks jump to (because in msil, these jumps cannot be emitted manually) - var firstAfter: Option[BasicBlock] = None - - // Find the (hopefully) unique successor, look at the try and all catch blocks - var blks = head.covered.toList :: handlers.map(_.blocks) - while (firstAfter.isEmpty && !blks.isEmpty) { - val b = blks.head - blks = blks.tail - - val leaving = leavingBlocks(b) - // no leaving blocks when the try or catch ends with THROW or RET - if (!leaving.isEmpty) { - assert(leaving.size <= 1, leaving) - firstAfter = Some(leaving.head) - } - } - if (firstAfter.isDefined) { - val b = firstAfter.get - if (frozen(b)) { - assert(after contains b, b +", "+ method) - } else { - frozen += b - if (beforeAndTry contains b) { - beforeAndTry -= b - } else { - assert(after contains b, after) - after -= b - } - b +=: after - } - } - - for (lb <- catches) { beforeAndTry ++= lb } - beforeAndTry ++= after - beforeAndTry.toList - } - - /** Returns all direct successors of `blocks` wich are not part - * that list, i.e. successors outside the `blocks` list. - */ - private def leavingBlocks(blocks: List[BasicBlock]) = { - val res = new mutable.HashSet[BasicBlock]() - for (b <- blocks; s <- b.directSuccessors; if (!blocks.contains(s))) - res += s - res - } - - def linearizeAt(m: IMethod, start: BasicBlock): List[BasicBlock] = { - sys.error("not implemented") - } - } } diff --git a/src/compiler/scala/tools/nsc/backend/icode/Opcodes.scala b/src/compiler/scala/tools/nsc/backend/icode/Opcodes.scala index eaa742a1da..137e2b556f 100644 --- a/src/compiler/scala/tools/nsc/backend/icode/Opcodes.scala +++ b/src/compiler/scala/tools/nsc/backend/icode/Opcodes.scala @@ -734,74 +734,5 @@ trait Opcodes { self: ICodes => case class SuperCall(mix: Name) extends InvokeStyle { override def toString(): String = { "super(" + mix + ")" } } - - - // CLR backend - - case class CIL_LOAD_LOCAL_ADDRESS(local: Local) extends Instruction { - /** Returns a string representation of this instruction */ - override def toString(): String = "CIL_LOAD_LOCAL_ADDRESS "+local //+isArgument?" (argument)":""; - - override def consumed = 0 - override def produced = 1 - - override def producedTypes = msil_mgdptr(local.kind) :: Nil - - override def category = localsCat - } - - case class CIL_LOAD_FIELD_ADDRESS(field: Symbol, isStatic: Boolean) extends Instruction { - /** Returns a string representation of this instruction */ - override def toString(): String = - "CIL_LOAD_FIELD_ADDRESS " + (if (isStatic) field.fullName else field.toString) - - override def consumed = if (isStatic) 0 else 1 - override def produced = 1 - - override def consumedTypes = if (isStatic) Nil else REFERENCE(field.owner) :: Nil; - override def producedTypes = msil_mgdptr(REFERENCE(field.owner)) :: Nil; - - override def category = fldsCat - } - - case class CIL_LOAD_ARRAY_ITEM_ADDRESS(kind: TypeKind) extends Instruction { - /** Returns a string representation of this instruction */ - override def toString(): String = "CIL_LOAD_ARRAY_ITEM_ADDRESS (" + kind + ")" - - override def consumed = 2 - override def produced = 1 - - override def consumedTypes = ARRAY(kind) :: INT :: Nil - override def producedTypes = msil_mgdptr(kind) :: Nil - - override def category = arraysCat - } - - case class CIL_UNBOX(valueType: TypeKind) extends Instruction { - override def toString(): String = "CIL_UNBOX " + valueType - override def consumed = 1 - override def consumedTypes = ObjectReferenceList // actually consumes a 'boxed valueType' - override def produced = 1 - override def producedTypes = msil_mgdptr(valueType) :: Nil - override def category = objsCat - } - - case class CIL_INITOBJ(valueType: TypeKind) extends Instruction { - override def toString(): String = "CIL_INITOBJ " + valueType - override def consumed = 1 - override def consumedTypes = ObjectReferenceList // actually consumes a managed pointer - override def produced = 0 - override def category = objsCat - } - - case class CIL_NEWOBJ(method: Symbol) extends Instruction { - override def toString(): String = "CIL_NEWOBJ " + hostClass.fullName + method.fullName - val hostClass: Symbol = method.owner; - override def consumed = method.tpe.paramTypes.length - override def consumedTypes = method.tpe.paramTypes map toTypeKind - override def produced = 1 - override def producedTypes = toTypeKind(method.tpe.resultType) :: Nil - override def category = objsCat - } } } diff --git a/src/compiler/scala/tools/nsc/backend/icode/TypeKinds.scala b/src/compiler/scala/tools/nsc/backend/icode/TypeKinds.scala index 0990cfba6f..84f5fe2678 100644 --- a/src/compiler/scala/tools/nsc/backend/icode/TypeKinds.scala +++ b/src/compiler/scala/tools/nsc/backend/icode/TypeKinds.scala @@ -427,11 +427,4 @@ trait TypeKinds { self: ICodes => primitiveTypeMap.getOrElse(sym, newReference(sym)) private def primitiveOrClassType(sym: Symbol, targs: List[Type]) = primitiveTypeMap.getOrElse(sym, arrayOrClassType(sym, targs)) - - def msil_mgdptr(tk: TypeKind): TypeKind = (tk: @unchecked) match { - case REFERENCE(cls) => REFERENCE(loaders.clrTypes.mdgptrcls4clssym(cls)) - // TODO have ready class-symbols for the by-ref versions of built-in valuetypes - case _ => abort("cannot obtain a managed pointer for " + tk) - } - } diff --git a/src/compiler/scala/tools/nsc/backend/msil/GenMSIL.scala b/src/compiler/scala/tools/nsc/backend/msil/GenMSIL.scala deleted file mode 100644 index 2fb6550239..0000000000 --- a/src/compiler/scala/tools/nsc/backend/msil/GenMSIL.scala +++ /dev/null @@ -1,2244 +0,0 @@ -/* NSC -- new scala compiler - * Copyright 2005-2013 LAMP/EPFL - * @author Nikolay Mihaylov - */ - - -package scala.tools.nsc -package backend.msil - -import java.io.{File, IOException} -import scala.collection.{ mutable, immutable } -import scala.tools.nsc.symtab._ - -import ch.epfl.lamp.compiler.msil.{Type => MsilType, _} -import ch.epfl.lamp.compiler.msil.emit._ -import ch.epfl.lamp.compiler.msil.util.PECustomMod -import scala.language.postfixOps - -abstract class GenMSIL extends SubComponent { - import global._ - import loaders.clrTypes - import clrTypes.{types, constructors, methods, fields} - import icodes._ - import icodes.opcodes._ - - /** Create a new phase */ - override def newPhase(p: Phase) = new MsilPhase(p) - - val phaseName = "msil" - /** MSIL code generation phase - */ - class MsilPhase(prev: Phase) extends GlobalPhase(prev) { - def name = phaseName - override def newFlags = phaseNewFlags - - override def erasedTypes = true - - override def run() { - if (settings.debug.value) inform("[running phase " + name + " on icode]") - - val codeGenerator = new BytecodeGenerator - - //classes is ICodes.classes, a HashMap[Symbol, IClass] - classes.values foreach codeGenerator.findEntryPoint - if( settings.Xshowcls.isSetByUser && (codeGenerator.entryPoint == null) ) { // TODO introduce dedicated setting instead - val entryclass = settings.Xshowcls.value.toString - warning("Couldn't find entry class " + entryclass) - } - - codeGenerator.initAssembly - - val classesSorted = classes.values.toList.sortBy(c => c.symbol.id) // simplifies comparing cross-compiler vs. .exe output - classesSorted foreach codeGenerator.createTypeBuilder - classesSorted foreach codeGenerator.createClassMembers - - try { - classesSorted foreach codeGenerator.genClass - } finally { - codeGenerator.writeAssembly - } - } - - override def apply(unit: CompilationUnit) { - abort("MSIL works on icode classes, not on compilation units!") - } - } - - /** - * MSIL bytecode generator. - * - */ - class BytecodeGenerator { - - val MODULE_INSTANCE_NAME = "MODULE$" - - import clrTypes.{VOID => MVOID, BOOLEAN => MBOOL, BYTE => MBYTE, SHORT => MSHORT, - CHAR => MCHAR, INT => MINT, LONG => MLONG, FLOAT => MFLOAT, - DOUBLE => MDOUBLE, OBJECT => MOBJECT, STRING => MSTRING, - STRING_ARRAY => MSTRING_ARRAY, - SYMTAB_CONSTR => SYMTAB_ATTRIBUTE_CONSTRUCTOR, - SYMTAB_DEFAULT_CONSTR => SYMTAB_ATTRIBUTE_EMPTY_CONSTRUCTOR} - - val EXCEPTION = clrTypes.getType("System.Exception") - val MEMBERWISE_CLONE = MOBJECT.GetMethod("MemberwiseClone", MsilType.EmptyTypes) - - val MMONITOR = clrTypes.getType("System.Threading.Monitor") - val MMONITOR_ENTER = MMONITOR.GetMethod("Enter", Array(MOBJECT)) - val MMONITOR_EXIT = MMONITOR.GetMethod("Exit", Array(MOBJECT)) - - val MSTRING_BUILDER = clrTypes.getType("System.Text.StringBuilder") - val MSTRING_BUILDER_CONSTR = MSTRING_BUILDER.GetConstructor(MsilType.EmptyTypes) - val MSTRING_BUILDER_TOSTRING = MSTRING_BUILDER.GetMethod("ToString", - MsilType.EmptyTypes) - - val TYPE_FROM_HANDLE = - clrTypes.getType("System.Type").GetMethod("GetTypeFromHandle", Array(clrTypes.getType("System.RuntimeTypeHandle"))) - - val INT_PTR = clrTypes.getType("System.IntPtr") - - val SystemConvert = clrTypes.getType("System.Convert") - - val objParam = Array(MOBJECT) - - val toBool: MethodInfo = SystemConvert.GetMethod("ToBoolean", objParam) // see comment in emitUnbox - val toSByte: MethodInfo = SystemConvert.GetMethod("ToSByte", objParam) - val toShort: MethodInfo = SystemConvert.GetMethod("ToInt16", objParam) - val toChar: MethodInfo = SystemConvert.GetMethod("ToChar", objParam) - val toInt: MethodInfo = SystemConvert.GetMethod("ToInt32", objParam) - val toLong: MethodInfo = SystemConvert.GetMethod("ToInt64", objParam) - val toFloat: MethodInfo = SystemConvert.GetMethod("ToSingle", objParam) - val toDouble: MethodInfo = SystemConvert.GetMethod("ToDouble", objParam) - - //val boxedUnit: FieldInfo = msilType(definitions.BoxedUnitModule.info).GetField("UNIT") - val boxedUnit: FieldInfo = fields(definitions.BoxedUnit_UNIT) - - // Scala attributes - // symtab.Definitions -> object (singleton..) - val CloneableAttr = definitions.CloneableAttr.tpe - val TransientAtt = definitions.TransientAttr.tpe - // remoting: the architectures are too different, no mapping (no portable code - // possible) - - // java instance methods that are mapped to static methods in .net - // these will need to be called with OpCodes.Call (not Callvirt) - val dynToStatMapped = mutable.HashSet[Symbol]() - - initMappings() - - /** Create the mappings between java and .net classes and methods */ - private def initMappings() { - mapType(definitions.AnyClass, MOBJECT) - mapType(definitions.AnyRefClass, MOBJECT) - //mapType(definitions.NullClass, clrTypes.getType("scala.AllRef$")) - //mapType(definitions.NothingClass, clrTypes.getType("scala.All$")) - // FIXME: for some reason the upper two lines map to null - mapType(definitions.NullClass, EXCEPTION) - mapType(definitions.NothingClass, EXCEPTION) - - mapType(definitions.BooleanClass, MBOOL) - mapType(definitions.ByteClass, MBYTE) - mapType(definitions.ShortClass, MSHORT) - mapType(definitions.CharClass, MCHAR) - mapType(definitions.IntClass, MINT) - mapType(definitions.LongClass, MLONG) - mapType(definitions.FloatClass, MFLOAT) - mapType(definitions.DoubleClass, MDOUBLE) - } - - var clasz: IClass = _ - var method: IMethod = _ - - var massembly: AssemblyBuilder = _ - var mmodule: ModuleBuilder = _ - var mcode: ILGenerator = _ - - var assemName: String = _ - var firstSourceName = "" - var outDir: File = _ - var srcPath: File = _ - var moduleName: String = _ - - def initAssembly() { - - assemName = settings.assemname.value - - if (assemName == "") { - if (entryPoint != null) { - assemName = msilName(entryPoint.enclClass) - // remove the $ at the end (from module-name) - assemName = assemName.substring(0, assemName.length() - 1) - } else { - // assuming filename of first source file - assert(firstSourceName.endsWith(".scala"), firstSourceName) - assemName = firstSourceName.substring(0, firstSourceName.length() - 6) - } - } else { - if (assemName.endsWith(".msil")) - assemName = assemName.substring(0, assemName.length()-5) - if (assemName.endsWith(".il")) - assemName = assemName.substring(0, assemName.length()-3) - val f: File = new File(assemName) - assemName = f.getName() - } - - outDir = new File(settings.outdir.value) - - srcPath = new File(settings.sourcedir.value) - - val assemblyName = new AssemblyName() - assemblyName.Name = assemName - massembly = AssemblyBuilderFactory.DefineDynamicAssembly(assemblyName) - - moduleName = assemName // + (if (entryPoint == null) ".dll" else ".exe") - // filename here: .dll or .exe (in both parameters), second: give absolute-path - mmodule = massembly.DefineDynamicModule(moduleName, - new File(outDir, moduleName).getAbsolutePath()) - assert (mmodule != null) - } - - - /** - * Form of the custom Attribute parameter (Ecma-335.pdf) - * - p. 163 for CustomAttrib Form, - * - p. 164 for FixedArg Form (Array and Element) (if array or not is known!) - * !! least significant byte first if values longer than one byte !! - * - * 1: Prolog (unsigned int16, value 0x0001) -> symtab[0] = 0x01, symtab[1] = 0x00 - * 2: FixedArgs (directly the data, get number and types from related constructor) - * 2.1: length of the array (unsigned int32, 4 bytes, least significant first) - * 2.2: the byte array data - * 3: NumNamed (unsigned int16, number of named fields and properties, 0x0000) - */ - def addSymtabAttribute(sym: Symbol, tBuilder: TypeBuilder) { - def addMarker() { - val markerSymtab = new Array[Byte](4) - markerSymtab(0) = 1.toByte - tBuilder.SetCustomAttribute(SYMTAB_ATTRIBUTE_EMPTY_CONSTRUCTOR, markerSymtab) - } - - // both conditions are needed (why exactly..?) - if (tBuilder.Name.endsWith("$") || sym.isModuleClass) { - addMarker() - } else { - currentRun.symData.get(sym) match { - case Some(pickle) => - var size = pickle.writeIndex - val symtab = new Array[Byte](size + 8) - symtab(0) = 1.toByte - for (i <- 2 until 6) { - symtab(i) = (size & 0xff).toByte - size = size >> 8 - } - java.lang.System.arraycopy(pickle.bytes, 0, symtab, 6, pickle.writeIndex) - - tBuilder.SetCustomAttribute(SYMTAB_ATTRIBUTE_CONSTRUCTOR, symtab) - - currentRun.symData -= sym - currentRun.symData -= sym.companionSymbol - - case _ => - addMarker() - } - } - } - - /** - * Mutates `member` adding CLR attributes (if any) based on sym.annotations. - * Please notice that CLR custom modifiers are a different beast (see customModifiers below) - * and thus shouldn't be added by this method. - */ - def addAttributes(member: ICustomAttributeSetter, annotations: List[AnnotationInfo]) { - // val attributes = annotations.map(_.atp.typeSymbol).collect { - // case definitions.TransientAttr => null // TODO this is just an example - // } - return // TODO: implement at some point - } - - /** - * What's a CLR custom modifier? Intro available as source comments in compiler.msil.CustomModifier. - * It's basically a marker associated with a location (think of FieldInfo, ParameterInfo, and PropertyInfo) - * and thus that marker (be it optional or required) becomes part of the signature of that location. - * Some annotations will become CLR attributes (see addAttributes above), others custom modifiers (this method). - */ - def customModifiers(annotations: List[AnnotationInfo]): Array[CustomModifier] = { - annotations.map(_.atp.typeSymbol).collect { - case definitions.VolatileAttr => new CustomModifier(true, CustomModifier.VolatileMarker) - } toArray - } - - - - /* - debuglog("creating annotations: " + annotations + " for member : " + member) - for (annot@ AnnotationInfo(typ, annArgs, nvPairs) <- annotations ; - if annot.isConstant) - //!typ.typeSymbol.isJavaDefined - { -// assert(consts.length <= 1, -// "too many constant arguments for annotations; "+consts.toString()) - - // Problem / TODO having the symbol of the annotations type would be nicer - // (i hope that type.typeSymbol is the same as the one in types2create) - // AND: this will crash if the annotations Type is already compiled (-> not a typeBuilder) - // when this is solved, types2create will be the same as icodes.classes, thus superfluous - val annType: TypeBuilder = getType(typ.typeSymbol).asInstanceOf[TypeBuilder] -// val annType: MsilType = getType(typ.typeSymbol) - - // Problem / TODO: i have no idea which constructor is used. This - // information should be available in AnnotationInfo. - annType.CreateType() // else, GetConstructors can't be used - val constr: ConstructorInfo = annType.GetConstructors()(0) - // prevent a second call of CreateType, only needed because there's no - // other way than GetConstructors()(0) to get the constructor, if there's - // no constructor symbol available. - - val args: Array[Byte] = - getAttributeArgs( - annArgs map (_.constant.get), - (for((n,v) <- nvPairs) yield (n, v.constant.get))) - member.SetCustomAttribute(constr, args) - } - } */ - -/* def getAttributeArgs(consts: List[Constant], nvPairs: List[(Name, Constant)]): Array[Byte] = { - val buf = ByteBuffer.allocate(2048) // FIXME: this may be not enough! - buf.order(java.nio.ByteOrder.LITTLE_ENDIAN) - buf.putShort(1.toShort) // signature - - def emitSerString(str: String) = { - // this is wrong, it has to be the length of the UTF-8 byte array, which - // may be longer (see clr-book on page 302) -// val length: Int = str.length - val strBytes: Array[Byte] = try { - str.getBytes("UTF-8") - } catch { - case _: Error => abort("could not get byte-array for string: " + str) - } - val length: Int = strBytes.length //this length is stored big-endian - if (length < 128) - buf.put(length.toByte) - else if (length < (1<<14)) { - buf.put(((length >> 8) | 0x80).toByte) // the bits 14 and 15 of length are '0' - buf.put((length | 0xff).toByte) - } else if (length < (1 << 29)) { - buf.put(((length >> 24) | 0xc0).toByte) - buf.put(((length >> 16) & 0xff).toByte) - buf.put(((length >> 8) & 0xff).toByte) - buf.put(((length ) & 0xff).toByte) - } else - abort("string too long for attribute parameter: " + length) - buf.put(strBytes) - } - - def emitConst(const: Constant): Unit = const.tag match { - case BooleanTag => buf.put((if (const.booleanValue) 1 else 0).toByte) - case ByteTag => buf.put(const.byteValue) - case ShortTag => buf.putShort(const.shortValue) - case CharTag => buf.putChar(const.charValue) - case IntTag => buf.putInt(const.intValue) - case LongTag => buf.putLong(const.longValue) - case FloatTag => buf.putFloat(const.floatValue) - case DoubleTag => buf.putDouble(const.doubleValue) - case StringTag => - val str: String = const.stringValue - if (str == null) { - buf.put(0xff.toByte) - } else { - emitSerString(str) - } - case ArrayTag => - val arr: Array[Constant] = const.arrayValue - if (arr == null) { - buf.putInt(0xffffffff) - } else { - buf.putInt(arr.length) - arr.foreach(emitConst) - } - - // TODO: other Tags: NoTag, UnitTag, ClazzTag, EnumTag, ArrayTag ??? - - case _ => abort("could not handle attribute argument: " + const) - } - - consts foreach emitConst - buf.putShort(nvPairs.length.toShort) - def emitNamedArg(nvPair: (Name, Constant)) { - // the named argument is a property of the attribute (it can't be a field, since - // all fields in scala are private) - buf.put(0x54.toByte) - - def emitType(c: Constant) = c.tag match { // type of the constant, Ecma-335.pdf, page 151 - case BooleanTag => buf.put(0x02.toByte) - case ByteTag => buf.put(0x05.toByte) - case ShortTag => buf.put(0x06.toByte) - case CharTag => buf.put(0x07.toByte) - case IntTag => buf.put(0x08.toByte) - case LongTag => buf.put(0x0a.toByte) - case FloatTag => buf.put(0x0c.toByte) - case DoubleTag => buf.put(0x0d.toByte) - case StringTag => buf.put(0x0e.toByte) - - // TODO: other Tags: NoTag, UnitTag, ClazzTag, EnumTag ??? - - // ArrayTag falls in here - case _ => abort("could not handle attribute argument: " + c) - } - - val cnst: Constant = nvPair._2 - if (cnst.tag == ArrayTag) { - buf.put(0x1d.toByte) - emitType(cnst.arrayValue(0)) // FIXME: will crash if array length = 0 - } else if (cnst.tag == EnumTag) { - buf.put(0x55.toByte) - // TODO: put a SerString (don't know what exactly, names of the enums somehow..) - } else { - buf.put(0x51.toByte) - emitType(cnst) - } - - emitSerString(nvPair._1.toString) - emitConst(nvPair._2) - } - - val length = buf.position() - buf.array().slice(0, length) - } */ - - def writeAssembly() { - if (entryPoint != null) { - assert(entryPoint.enclClass.isModuleClass, entryPoint.enclClass) - val mainMethod = methods(entryPoint) - val stringArrayTypes: Array[MsilType] = Array(MSTRING_ARRAY) - val globalMain = mmodule.DefineGlobalMethod( - "Main", MethodAttributes.Public | MethodAttributes.Static, - MVOID, stringArrayTypes) - globalMain.DefineParameter(0, ParameterAttributes.None, "args") - massembly.SetEntryPoint(globalMain) - val code = globalMain.GetILGenerator() - val moduleField = getModuleInstanceField(entryPoint.enclClass) - code.Emit(OpCodes.Ldsfld, moduleField) - code.Emit(OpCodes.Ldarg_0) - code.Emit(OpCodes.Callvirt, mainMethod) - code.Emit(OpCodes.Ret) - } - createTypes() - var outDirName: String = null - try { - if (settings.Ygenjavap.isDefault) { // we reuse the JVM-sounding setting because it's conceptually similar - outDirName = outDir.getPath() - massembly.Save(outDirName + "\\" + assemName + ".msil") /* use SingleFileILPrinterVisitor */ - } else { - outDirName = srcPath.getPath() - massembly.Save(settings.Ygenjavap.value, outDirName) /* use MultipleFilesILPrinterVisitor */ - } - } catch { - case e:IOException => abort("Could not write to " + outDirName + ": " + e.getMessage()) - } - } - - private def createTypes() { - for (sym <- classes.keys) { - val iclass = classes(sym) - val tBuilder = types(sym).asInstanceOf[TypeBuilder] - - debuglog("Calling CreatType for " + sym + ", " + tBuilder.toString) - - tBuilder.CreateType() - tBuilder.setSourceFilepath(iclass.cunit.source.file.path) - } - } - - private[GenMSIL] def ilasmFileName(iclass: IClass) : String = { - // method.sourceFile contains just the filename - iclass.cunit.source.file.toString.replace("\\", "\\\\") - } - - private[GenMSIL] def genClass(iclass: IClass) { - val sym = iclass.symbol - debuglog("Generating class " + sym + " flags: " + sym.flagString) - clasz = iclass - - val tBuilder = getType(sym).asInstanceOf[TypeBuilder] - if (isCloneable(sym)) { - // FIXME: why there's no nme.clone_ ? - // "Clone": if the code is non-portable, "Clone" is defined, not "clone" - // TODO: improve condition (should override AnyRef.clone) - if (iclass.methods.forall(m => { - !((m.symbol.name.toString != "clone" || m.symbol.name.toString != "Clone") && - m.symbol.tpe.paramTypes.length != 0) - })) { - debuglog("auto-generating cloneable method for " + sym) - val attrs: Short = (MethodAttributes.Public | MethodAttributes.Virtual | - MethodAttributes.HideBySig).toShort - val cloneMethod = tBuilder.DefineMethod("Clone", attrs, MOBJECT, - MsilType.EmptyTypes) - val clCode = cloneMethod.GetILGenerator() - clCode.Emit(OpCodes.Ldarg_0) - clCode.Emit(OpCodes.Call, MEMBERWISE_CLONE) - clCode.Emit(OpCodes.Ret) - } - } - - val line = sym.pos.line - tBuilder.setPosition(line, ilasmFileName(iclass)) - - if (isTopLevelModule(sym)) { - if (sym.companionClass == NoSymbol) - generateMirrorClass(sym) - else - log("No mirror class for module with linked class: " + - sym.fullName) - } - - addSymtabAttribute(sym, tBuilder) - addAttributes(tBuilder, sym.annotations) - - if (iclass.symbol != definitions.ArrayClass) - iclass.methods foreach genMethod - - } //genClass - - - private def genMethod(m: IMethod) { - debuglog("Generating method " + m.symbol + " flags: " + m.symbol.flagString + - " owner: " + m.symbol.owner) - method = m - localBuilders.clear - computeLocalVarsIndex(m) - - if (m.symbol.isClassConstructor) { - mcode = constructors(m.symbol).asInstanceOf[ConstructorBuilder].GetILGenerator() - } else { - val mBuilder = methods(m.symbol).asInstanceOf[MethodBuilder] - if (!mBuilder.IsAbstract()) - try { - mcode = mBuilder.GetILGenerator() - } catch { - case e: Exception => - java.lang.System.out.println("m.symbol = " + m.symbol.flagString + " " + m.symbol) - java.lang.System.out.println("m.symbol.owner = " + m.symbol.owner.flagString + " " + m.symbol.owner) - java.lang.System.out.println("mBuilder = " + mBuilder) - java.lang.System.out.println("mBuilder.DeclaringType = " + - TypeAttributes.toString(mBuilder.DeclaringType.Attributes) + - "::" + mBuilder.DeclaringType) - throw e - } - else - mcode = null - } - - if (mcode != null) { - for (local <- m.locals ; if !(m.params contains local)) { - debuglog("add local var: " + local + ", of kind " + local.kind) - val t: MsilType = msilType(local.kind) - val localBuilder = mcode.DeclareLocal(t) - localBuilder.SetLocalSymInfo(msilName(local.sym)) - localBuilders(local) = localBuilder - } - genCode(m) - } - - } - - /** Special linearizer for methods with at least one exception handler. This - * linearizer brings all basic blocks in the right order so that nested - * try-catch and try-finally blocks can be emitted. - */ - val msilLinearizer = new MSILLinearizer() - - val labels = mutable.HashMap[BasicBlock, Label]() - - /* when emitting .line, it's enough to include the full filename just once per method, thus reducing filesize. - * this scheme relies on the fact that the entry block is emitted first. */ - var dbFilenameSeen = false - - def genCode(m: IMethod) { - - def makeLabels(blocks: List[BasicBlock]) = { - debuglog("Making labels for: " + method) - for (bb <- blocks) labels(bb) = mcode.DefineLabel() - } - - labels.clear - - var linearization = if(!m.exh.isEmpty) msilLinearizer.linearize(m) - else linearizer.linearize(m) - - if (!m.exh.isEmpty) - linearization = computeExceptionMaps(linearization, m) - - makeLabels(linearization) - - // debug val blocksInM = m.code.blocks.toList.sortBy(bb => bb.label) - // debug val blocksInL = linearization.sortBy(bb => bb.label) - // debug val MButNotL = (blocksInM.toSet) diff (blocksInL.toSet) // if non-empty, a jump to B fails to find a label for B (case CJUMP, case CZJUMP) - // debug if(!MButNotL.isEmpty) { } - - dbFilenameSeen = false - genBlocks(linearization) - - // RETURN inside exception blocks are replaced by Leave. The target of the - // leave is a `Ret` outside any exception block (generated here). - if (handlerReturnMethod == m) { - mcode.MarkLabel(handlerReturnLabel) - if (handlerReturnKind != UNIT) - mcode.Emit(OpCodes.Ldloc, handlerReturnLocal) - mcode.Emit(OpCodes.Ret) - } - - beginExBlock.clear() - beginCatchBlock.clear() - endExBlock.clear() - endFinallyLabels.clear() - } - - def genBlocks(blocks: List[BasicBlock], previous: BasicBlock = null) { - blocks match { - case Nil => () - case x :: Nil => genBlock(x, prev = previous, next = null) - case x :: y :: ys => genBlock(x, prev = previous, next = y); genBlocks(y :: ys, previous = x) - } - } - - // the try blocks starting at a certain BasicBlock - val beginExBlock = mutable.HashMap[BasicBlock, List[ExceptionHandler]]() - - // the catch blocks starting / endling at a certain BasicBlock - val beginCatchBlock = mutable.HashMap[BasicBlock, ExceptionHandler]() - val endExBlock = mutable.HashMap[BasicBlock, List[ExceptionHandler]]() - - /** When emitting the code (genBlock), the number of currently active try / catch - * blocks. When seeing a `RETURN` inside a try / catch, we need to - * - store the result in a local (if it's not UNIT) - * - emit `Leave handlerReturnLabel` instead of the Return - * - emit code at the end: load the local and return its value - */ - val currentHandlers = new mutable.Stack[ExceptionHandler] - // The IMethod the Local/Label/Kind below belong to - var handlerReturnMethod: IMethod = _ - // Stores the result when returning inside an exception block - var handlerReturnLocal: LocalBuilder = _ - // Label for a return instruction outside any exception block - var handlerReturnLabel: Label = _ - // The result kind. - var handlerReturnKind: TypeKind = _ - def returnFromHandler(kind: TypeKind): (LocalBuilder, Label) = { - if (handlerReturnMethod != method) { - handlerReturnMethod = method - if (kind != UNIT) { - handlerReturnLocal = mcode.DeclareLocal(msilType(kind)) - handlerReturnLocal.SetLocalSymInfo("$handlerReturn") - } - handlerReturnLabel = mcode.DefineLabel() - handlerReturnKind = kind - } - (handlerReturnLocal, handlerReturnLabel) - } - - /** For try/catch nested inside a finally, we can't use `Leave OutsideFinally`, the - * Leave target has to be inside the finally (and it has to be the `endfinally` instruction). - * So for every finalizer, we have a label which marks the place of the `endfinally`, - * nested try/catch blocks will leave there. - */ - val endFinallyLabels = mutable.HashMap[ExceptionHandler, Label]() - - /** Computes which blocks are the beginning / end of a try or catch block */ - private def computeExceptionMaps(blocks: List[BasicBlock], m: IMethod): List[BasicBlock] = { - val visitedBlocks = new mutable.HashSet[BasicBlock]() - - // handlers which have not been introduced so far - var openHandlers = m.exh - - - /** Example - * try { - * try { - * // *1* - * } catch { - * case h1 => - * } - * } catch { - * case h2 => - * case h3 => - * try { - * - * } catch { - * case h4 => // *2* - * case h5 => - * } - * } - */ - - // Stack of nested try blocks. Each bloc has a List of ExceptionHandler (multiple - // catch statements). Example *1*: Stack(List(h2, h3), List(h1)) - val currentTryHandlers = new mutable.Stack[List[ExceptionHandler]]() - - // Stack of nested catch blocks. The head of the list is the current catch block. The - // tail is all following catch blocks. Example *2*: Stack(List(h3), List(h4, h5)) - val currentCatchHandlers = new mutable.Stack[List[ExceptionHandler]]() - - for (b <- blocks) { - - // are we past the current catch blocks? - def endHandlers(): List[ExceptionHandler] = { - var res: List[ExceptionHandler] = Nil - if (!currentCatchHandlers.isEmpty) { - val handler = currentCatchHandlers.top.head - if (!handler.blocks.contains(b)) { - // all blocks of the handler are either visited, or not part of the linearization (i.e. dead) - assert(handler.blocks.forall(b => visitedBlocks.contains(b) || !blocks.contains(b)), - "Bad linearization of basic blocks inside catch. Found block not part of the handler\n"+ - b.fullString +"\nwhile in catch-part of\n"+ handler) - - val rest = currentCatchHandlers.pop.tail - if (rest.isEmpty) { - // all catch blocks of that exception handler are covered - res = handler :: endHandlers() - } else { - // there are more catch blocks for that try (handlers covering the same) - currentCatchHandlers.push(rest) - beginCatchBlock(b) = rest.head - } - } - } - res - } - val end = endHandlers() - if (!end.isEmpty) endExBlock(b) = end - - // are we past the current try block? - if (!currentTryHandlers.isEmpty) { - val handler = currentTryHandlers.top.head - if (!handler.covers(b)) { - // all of the covered blocks are visited, or not part of the linearization - assert(handler.covered.forall(b => visitedBlocks.contains(b) || !blocks.contains(b)), - "Bad linearization of basic blocks inside try. Found non-covered block\n"+ - b.fullString +"\nwhile in try-part of\n"+ handler) - - assert(handler.startBlock == b, - "Bad linearization of basic blocks. The entry block of a catch does not directly follow the try\n"+ - b.fullString +"\n"+ handler) - - val handlers = currentTryHandlers.pop - currentCatchHandlers.push(handlers) - beginCatchBlock(b) = handler - } - } - - // are there try blocks starting at b? - val (newHandlers, stillOpen) = openHandlers.partition(_.covers(b)) - openHandlers = stillOpen - - val newHandlersBySize = newHandlers.groupBy(_.covered.size) - // big handlers first, smaller ones are nested inside the try of the big one - // (checked by the assertions below) - val sizes = newHandlersBySize.keys.toList.sortWith(_ > _) - - val beginHandlers = new mutable.ListBuffer[ExceptionHandler] - for (s <- sizes) { - val sHandlers = newHandlersBySize(s) - for (h <- sHandlers) { - assert(h.covered == sHandlers.head.covered, - "bad nesting of exception handlers. same size, but not covering same blocks\n"+ - h +"\n"+ sHandlers.head) - assert(h.resultKind == sHandlers.head.resultKind, - "bad nesting of exception handlers. same size, but the same resultKind\n"+ - h +"\n"+ sHandlers.head) - } - for (bigger <- beginHandlers; h <- sHandlers) { - assert(h.covered.subsetOf(bigger.covered), - "bad nesting of exception handlers. try blocks of smaller handler are not nested in bigger one.\n"+ - h +"\n"+ bigger) - assert(h.blocks.toSet.subsetOf(bigger.covered), - "bad nesting of exception handlers. catch blocks of smaller handler are not nested in bigger one.\n"+ - h +"\n"+ bigger) - } - beginHandlers += sHandlers.head - currentTryHandlers.push(sHandlers) - } - beginExBlock(b) = beginHandlers.toList - visitedBlocks += b - } - - // if there handlers left (i.e. handlers covering nothing, or a - // non-existent (dead) block), remove their catch-blocks. - val liveBlocks = if (openHandlers.isEmpty) blocks else { - blocks.filter(b => openHandlers.forall(h => !h.blocks.contains(b))) - } - - /** There might be open handlers, but no more blocks. happens when try/catch end - * with `throw` or `return` - * def foo() { try { .. throw } catch { _ => .. throw } } - * - * In this case we need some code after the catch block for the auto-generated - * `leave` instruction. So we're adding a (dead) `throw new Exception`. - */ - val rest = currentCatchHandlers.map(handlers => { - assert(handlers.length == 1, handlers) - handlers.head - }).toList - - if (rest.isEmpty) { - liveBlocks - } else { - val b = m.code.newBlock - b.emit(Seq( - NEW(REFERENCE(definitions.ThrowableClass)), - DUP(REFERENCE(definitions.ObjectClass)), - CALL_METHOD(definitions.ThrowableClass.primaryConstructor, Static(true)), - THROW(definitions.ThrowableClass) - )) - b.close - endExBlock(b) = rest - liveBlocks ::: List(b) - } - } - - /** - * @param block the BasicBlock to emit code for - * @param next the following BasicBlock, `null` if `block` is the last one - */ - def genBlock(block: BasicBlock, prev: BasicBlock, next: BasicBlock) { - - def loadLocalOrAddress(local: Local, msg : String , loadAddr : Boolean) { - debuglog(msg + " for " + local) - val isArg = local.arg - val i = local.index - if (isArg) - loadArg(mcode, loadAddr)(i) - else - loadLocal(i, local, mcode, loadAddr) - } - - def loadFieldOrAddress(field: Symbol, isStatic: Boolean, msg: String, loadAddr : Boolean) { - debuglog(msg + " with owner: " + field.owner + - " flags: " + field.owner.flagString) - val fieldInfo = fields.get(field) match { - case Some(fInfo) => fInfo - case None => - val fInfo = getType(field.owner).GetField(msilName(field)) - fields(field) = fInfo - fInfo - } - if (fieldInfo.IsVolatile) { - mcode.Emit(OpCodes.Volatile) - } - if (!fieldInfo.IsLiteral) { - if (loadAddr) { - mcode.Emit(if (isStatic) OpCodes.Ldsflda else OpCodes.Ldflda, fieldInfo) - } else { - mcode.Emit(if (isStatic) OpCodes.Ldsfld else OpCodes.Ldfld, fieldInfo) - } - } else { - assert(!loadAddr, "can't take AddressOf a literal field (not even with readonly. prefix) because no memory was allocated to such field ...") - // TODO the above can be overcome by loading the value, boxing, and finally unboxing. An address to a copy of the raw value will be on the stack. - /* We perform `field inlining' as required by CLR. - * Emit as for a CONSTANT ICode stmt, with the twist that the constant value is available - * as a java.lang.Object and its .NET type allows constant initialization in CLR, i.e. that type - * is one of I1, I2, I4, I8, R4, R8, CHAR, BOOLEAN, STRING, or CLASS (in this last case, - * only accepting nullref as value). See Table 9-1 in Lidin's book on ILAsm. */ - val value = fieldInfo.getValue() - if (value == null) { - mcode.Emit(OpCodes.Ldnull) - } else { - val typ = if (fieldInfo.FieldType.IsEnum) fieldInfo.FieldType.getUnderlyingType - else fieldInfo.FieldType - if (typ == clrTypes.STRING) { - mcode.Emit(OpCodes.Ldstr, value.asInstanceOf[String]) - } else if (typ == clrTypes.BOOLEAN) { - mcode.Emit(if (value.asInstanceOf[Boolean]) OpCodes.Ldc_I4_1 - else OpCodes.Ldc_I4_0) - } else if (typ == clrTypes.BYTE || typ == clrTypes.UBYTE) { - loadI4(value.asInstanceOf[Byte], mcode) - } else if (typ == clrTypes.SHORT || typ == clrTypes.USHORT) { - loadI4(value.asInstanceOf[Int], mcode) - } else if (typ == clrTypes.CHAR) { - loadI4(value.asInstanceOf[Char], mcode) - } else if (typ == clrTypes.INT || typ == clrTypes.UINT) { - loadI4(value.asInstanceOf[Int], mcode) - } else if (typ == clrTypes.LONG || typ == clrTypes.ULONG) { - mcode.Emit(OpCodes.Ldc_I8, value.asInstanceOf[Long]) - } else if (typ == clrTypes.FLOAT) { - mcode.Emit(OpCodes.Ldc_R4, value.asInstanceOf[Float]) - } else if (typ == clrTypes.DOUBLE) { - mcode.Emit(OpCodes.Ldc_R8, value.asInstanceOf[Double]) - } else { - /* TODO one more case is described in Partition II, 16.2: bytearray(...) */ - abort("Unknown type for static literal field: " + fieldInfo) - } - } - } - } - - /** Creating objects works differently on .NET. On the JVM - * - NEW(type) => reference on Stack - * - DUP, load arguments, CALL_METHOD(constructor) - * - * On .NET, the NEW and DUP are ignored, but we emit a special method call - * - load arguments - * - NewObj(constructor) => reference on stack - * - * This variable tells whether the previous instruction was a NEW, - * we expect a DUP which is not emitted. */ - var previousWasNEW = false - - var lastLineNr: Int = 0 - var lastPos: Position = NoPosition - - - // EndExceptionBlock must happen before MarkLabel because it adds the - // Leave instruction. Otherwise, labels(block) points to the Leave - // (inside the catch) instead of the instruction afterwards. - for (handlers <- endExBlock.get(block); exh <- handlers) { - currentHandlers.pop() - for (l <- endFinallyLabels.get(exh)) - mcode.MarkLabel(l) - mcode.EndExceptionBlock() - } - - mcode.MarkLabel(labels(block)) - debuglog("Generating code for block: " + block) - - for (handler <- beginCatchBlock.get(block)) { - if (!currentHandlers.isEmpty && currentHandlers.top.covered == handler.covered) { - currentHandlers.pop() - currentHandlers.push(handler) - } - if (handler.cls == NoSymbol) { - // `finally` blocks are represented the same as `catch`, but with no catch-type - mcode.BeginFinallyBlock() - } else { - val t = getType(handler.cls) - mcode.BeginCatchBlock(t) - } - } - for (handlers <- beginExBlock.get(block); exh <- handlers) { - currentHandlers.push(exh) - mcode.BeginExceptionBlock() - } - - for (instr <- block) { - try { - val currentLineNr = instr.pos.line - val skip = if(instr.pos.isRange) instr.pos.sameRange(lastPos) else (currentLineNr == lastLineNr); - if(!skip || !dbFilenameSeen) { - val fileName = if(dbFilenameSeen) "" else {dbFilenameSeen = true; ilasmFileName(clasz)}; - if(instr.pos.isRange) { - val startLine = instr.pos.focusStart.line - val endLine = instr.pos.focusEnd.line - val startCol = instr.pos.focusStart.column - val endCol = instr.pos.focusEnd.column - mcode.setPosition(startLine, endLine, startCol, endCol, fileName) - } else { - mcode.setPosition(instr.pos.line, fileName) - } - lastLineNr = currentLineNr - lastPos = instr.pos - } - } catch { case _: UnsupportedOperationException => () } - - if (previousWasNEW) - assert(instr.isInstanceOf[DUP], block) - - instr match { - case THIS(clasz) => - mcode.Emit(OpCodes.Ldarg_0) - - case CONSTANT(const) => - const.tag match { - case UnitTag => () - case BooleanTag => mcode.Emit(if (const.booleanValue) OpCodes.Ldc_I4_1 - else OpCodes.Ldc_I4_0) - case ByteTag => loadI4(const.byteValue, mcode) - case ShortTag => loadI4(const.shortValue, mcode) - case CharTag => loadI4(const.charValue, mcode) - case IntTag => loadI4(const.intValue, mcode) - case LongTag => mcode.Emit(OpCodes.Ldc_I8, const.longValue) - case FloatTag => mcode.Emit(OpCodes.Ldc_R4, const.floatValue) - case DoubleTag => mcode.Emit(OpCodes.Ldc_R8, const.doubleValue) - case StringTag => mcode.Emit(OpCodes.Ldstr, const.stringValue) - case NullTag => mcode.Emit(OpCodes.Ldnull) - case ClazzTag => - mcode.Emit(OpCodes.Ldtoken, msilType(const.typeValue)) - mcode.Emit(OpCodes.Call, TYPE_FROM_HANDLE) - case _ => abort("Unknown constant value: " + const) - } - - case LOAD_ARRAY_ITEM(kind) => - (kind: @unchecked) match { - case BOOL => mcode.Emit(OpCodes.Ldelem_I1) - case BYTE => mcode.Emit(OpCodes.Ldelem_I1) // I1 for System.SByte, i.e. a scala.Byte - case SHORT => mcode.Emit(OpCodes.Ldelem_I2) - case CHAR => mcode.Emit(OpCodes.Ldelem_U2) - case INT => mcode.Emit(OpCodes.Ldelem_I4) - case LONG => mcode.Emit(OpCodes.Ldelem_I8) - case FLOAT => mcode.Emit(OpCodes.Ldelem_R4) - case DOUBLE => mcode.Emit(OpCodes.Ldelem_R8) - case REFERENCE(cls) => mcode.Emit(OpCodes.Ldelem_Ref) - case ARRAY(elem) => mcode.Emit(OpCodes.Ldelem_Ref) - - // case UNIT is not possible: an Array[Unit] will be an - // Array[scala.runtime.BoxedUnit] (-> case REFERENCE) - } - - case LOAD_LOCAL(local) => loadLocalOrAddress(local, "load_local", false) - - case CIL_LOAD_LOCAL_ADDRESS(local) => loadLocalOrAddress(local, "cil_load_local_address", true) - - case LOAD_FIELD(field, isStatic) => loadFieldOrAddress(field, isStatic, "load_field", false) - - case CIL_LOAD_FIELD_ADDRESS(field, isStatic) => loadFieldOrAddress(field, isStatic, "cil_load_field_address", true) - - case CIL_LOAD_ARRAY_ITEM_ADDRESS(kind) => mcode.Emit(OpCodes.Ldelema, msilType(kind)) - - case CIL_NEWOBJ(msym) => - assert(msym.isClassConstructor) - val constructorInfo: ConstructorInfo = getConstructor(msym) - mcode.Emit(OpCodes.Newobj, constructorInfo) - - case LOAD_MODULE(module) => - debuglog("Generating LOAD_MODULE for: " + showsym(module)) - mcode.Emit(OpCodes.Ldsfld, getModuleInstanceField(module)) - - case STORE_ARRAY_ITEM(kind) => - (kind: @unchecked) match { - case BOOL => mcode.Emit(OpCodes.Stelem_I1) - case BYTE => mcode.Emit(OpCodes.Stelem_I1) - case SHORT => mcode.Emit(OpCodes.Stelem_I2) - case CHAR => mcode.Emit(OpCodes.Stelem_I2) - case INT => mcode.Emit(OpCodes.Stelem_I4) - case LONG => mcode.Emit(OpCodes.Stelem_I8) - case FLOAT => mcode.Emit(OpCodes.Stelem_R4) - case DOUBLE => mcode.Emit(OpCodes.Stelem_R8) - case REFERENCE(cls) => mcode.Emit(OpCodes.Stelem_Ref) - case ARRAY(elem) => mcode.Emit(OpCodes.Stelem_Ref) // @TODO: test this! (occurs when calling a Array[Object]* vararg param method) - - // case UNIT not possible (see comment at LOAD_ARRAY_ITEM) - } - - case STORE_LOCAL(local) => - val isArg = local.arg - val i = local.index - debuglog("store_local for " + local + ", index " + i) - - // there are some locals defined by the compiler that - // are isArg and are need to be stored. - if (isArg) { - if (i >= -128 && i <= 127) - mcode.Emit(OpCodes.Starg_S, i) - else - mcode.Emit(OpCodes.Starg, i) - } else { - i match { - case 0 => mcode.Emit(OpCodes.Stloc_0) - case 1 => mcode.Emit(OpCodes.Stloc_1) - case 2 => mcode.Emit(OpCodes.Stloc_2) - case 3 => mcode.Emit(OpCodes.Stloc_3) - case _ => - if (i >= -128 && i <= 127) - mcode.Emit(OpCodes.Stloc_S, localBuilders(local)) - else - mcode.Emit(OpCodes.Stloc, localBuilders(local)) - } - } - - case STORE_THIS(_) => - // this only works for impl classes because the self parameter comes first - // in the method signature. If that changes, this code has to be revisited. - mcode.Emit(OpCodes.Starg_S, 0) - - case STORE_FIELD(field, isStatic) => - val fieldInfo = fields.get(field) match { - case Some(fInfo) => fInfo - case None => - val fInfo = getType(field.owner).GetField(msilName(field)) - fields(field) = fInfo - fInfo - } - mcode.Emit(if (isStatic) OpCodes.Stsfld else OpCodes.Stfld, fieldInfo) - - case CALL_PRIMITIVE(primitive) => - genPrimitive(primitive, instr.pos) - - case CALL_METHOD(msym, style) => - if (msym.isClassConstructor) { - val constructorInfo: ConstructorInfo = getConstructor(msym) - (style: @unchecked) match { - // normal constructor calls are Static.. - case Static(_) => - if (method.symbol.isClassConstructor && method.symbol.owner == msym.owner) - // we're generating a constructor (method: IMethod is a constructor), and we're - // calling another constructor of the same class. - - // @LUC TODO: this can probably break, namely when having: class A { def this() { new A() } } - // instead, we should instruct the CALL_METHOD with additional information, know whether it's - // an instance creation constructor call or not. - mcode.Emit(OpCodes.Call, constructorInfo) - else - mcode.Emit(OpCodes.Newobj, constructorInfo) - case SuperCall(_) => - mcode.Emit(OpCodes.Call, constructorInfo) - if (isStaticModule(clasz.symbol) && - notInitializedModules.contains(clasz.symbol) && - method.symbol.isClassConstructor) - { - notInitializedModules -= clasz.symbol - mcode.Emit(OpCodes.Ldarg_0) - mcode.Emit(OpCodes.Stsfld, getModuleInstanceField(clasz.symbol)) - } - } - - } else { - - var doEmit = true - getTypeOpt(msym.owner) match { - case Some(typ) if (typ.IsEnum) => { - def negBool() = { - mcode.Emit(OpCodes.Ldc_I4_0) - mcode.Emit(OpCodes.Ceq) - } - doEmit = false - val name = msym.name - if (name eq nme.EQ) { mcode.Emit(OpCodes.Ceq) } - else if (name eq nme.NE) { mcode.Emit(OpCodes.Ceq); negBool } - else if (name eq nme.LT) { mcode.Emit(OpCodes.Clt) } - else if (name eq nme.LE) { mcode.Emit(OpCodes.Cgt); negBool } - else if (name eq nme.GT) { mcode.Emit(OpCodes.Cgt) } - else if (name eq nme.GE) { mcode.Emit(OpCodes.Clt); negBool } - else if (name eq nme.OR) { mcode.Emit(OpCodes.Or) } - else if (name eq nme.AND) { mcode.Emit(OpCodes.And) } - else if (name eq nme.XOR) { mcode.Emit(OpCodes.Xor) } - else - doEmit = true - } - case _ => () - } - - // method: implicit view(FunctionX[PType0, PType1, ...,PTypeN, ResType]):DelegateType - val (isDelegateView, paramType, resType) = enteringTyper { - msym.tpe match { - case MethodType(params, resultType) - if (params.length == 1 && msym.name == nme.view_) => - val paramType = params(0).tpe - val isDel = definitions.isCorrespondingDelegate(resultType, paramType) - (isDel, paramType, resultType) - case _ => (false, null, null) - } - } - if (doEmit && isDelegateView) { - doEmit = false - createDelegateCaller(paramType, resType) - } - - if (doEmit && - (msym.name == nme.PLUS || msym.name == nme.MINUS) - && clrTypes.isDelegateType(msilType(msym.owner.tpe))) - { - doEmit = false - val methodInfo: MethodInfo = getMethod(msym) - // call it as a static method, even if the compiler (symbol) thinks it's virtual - mcode.Emit(OpCodes.Call, methodInfo) - mcode.Emit(OpCodes.Castclass, msilType(msym.owner.tpe)) - } - - if (doEmit && definitions.Delegate_scalaCallers.contains(msym)) { - doEmit = false - val methodSym: Symbol = definitions.Delegate_scalaCallerTargets(msym) - val delegateType: Type = msym.tpe match { - case MethodType(_, retType) => retType - case _ => abort("not a method type: " + msym.tpe) - } - val methodInfo: MethodInfo = getMethod(methodSym) - val delegCtor = msilType(delegateType).GetConstructor(Array(MOBJECT, INT_PTR)) - if (methodSym.isStatic) { - mcode.Emit(OpCodes.Ldftn, methodInfo) - } else { - mcode.Emit(OpCodes.Dup) - mcode.Emit(OpCodes.Ldvirtftn, methodInfo) - } - mcode.Emit(OpCodes.Newobj, delegCtor) - } - - if (doEmit) { - val methodInfo: MethodInfo = getMethod(msym) - (style: @unchecked) match { - case SuperCall(_) => - mcode.Emit(OpCodes.Call, methodInfo) - case Dynamic => - // methodInfo.DeclaringType is null for global methods - val isValuetypeMethod = (methodInfo.DeclaringType ne null) && (methodInfo.DeclaringType.IsValueType) - val isValuetypeVirtualMethod = isValuetypeMethod && (methodInfo.IsVirtual) - if (dynToStatMapped(msym)) { - mcode.Emit(OpCodes.Call, methodInfo) - } else if (isValuetypeVirtualMethod) { - mcode.Emit(OpCodes.Constrained, methodInfo.DeclaringType) - mcode.Emit(OpCodes.Callvirt, methodInfo) - } else if (isValuetypeMethod) { - // otherwise error "Callvirt on a value type method" ensues - mcode.Emit(OpCodes.Call, methodInfo) - } else { - mcode.Emit(OpCodes.Callvirt, methodInfo) - } - case Static(_) => - if(methodInfo.IsVirtual && !mcode.Ldarg0WasJustEmitted) { - mcode.Emit(OpCodes.Callvirt, methodInfo) - } else mcode.Emit(OpCodes.Call, methodInfo) - } - } - } - - case BOX(boxType) => - emitBox(mcode, boxType) - - case UNBOX(boxType) => - emitUnbox(mcode, boxType) - - case CIL_UNBOX(boxType) => - mcode.Emit(OpCodes.Unbox, msilType(boxType)) - - case CIL_INITOBJ(valueType) => - mcode.Emit(OpCodes.Initobj, msilType(valueType)) - - case NEW(REFERENCE(cls)) => - // the next instruction must be a DUP, see comment on `var previousWasNEW` - previousWasNEW = true - - // works also for arrays and reference-types - case CREATE_ARRAY(elem, dims) => - // TODO: handle multi dimensional arrays - assert(dims == 1, "Can't handle multi dimensional arrays") - mcode.Emit(OpCodes.Newarr, msilType(elem)) - - // works for arrays and reference-types - case IS_INSTANCE(tpe) => - mcode.Emit(OpCodes.Isinst, msilType(tpe)) - mcode.Emit(OpCodes.Ldnull) - mcode.Emit(OpCodes.Ceq) - mcode.Emit(OpCodes.Ldc_I4_0) - mcode.Emit(OpCodes.Ceq) - - // works for arrays and reference-types - // part from the scala reference: "S <: T does not imply - // Array[S] <: Array[T] in Scala. However, it is possible - // to cast an array of S to an array of T if such a cast - // is permitted in the host environment." - case CHECK_CAST(tpknd) => - val tMSIL = msilType(tpknd) - mcode.Emit(OpCodes.Castclass, tMSIL) - - // no SWITCH is generated when there's - // - a default case ("case _ => ...") in the matching expr - // - OR is used ("case 1 | 2 => ...") - case SWITCH(tags, branches) => - // tags is List[List[Int]]; a list of integers for every label. - // if the int on stack is 4, and 4 is in the second list => jump - // to second label - // branches is List[BasicBlock] - // the labels to jump to (the last one is the default one) - - val switchLocal = mcode.DeclareLocal(MINT) - // several switch variables will appear with the same name in the - // assembly code, but this makes no truble - switchLocal.SetLocalSymInfo("$switch_var") - - mcode.Emit(OpCodes.Stloc, switchLocal) - var i = 0 - for (l <- tags) { - val targetLabel = labels(branches(i)) - for (i <- l) { - mcode.Emit(OpCodes.Ldloc, switchLocal) - loadI4(i, mcode) - mcode.Emit(OpCodes.Beq, targetLabel) - } - i += 1 - } - val defaultTarget = labels(branches(i)) - if (next != branches(i)) - mcode.Emit(OpCodes.Br, defaultTarget) - - case JUMP(whereto) => - val (leaveHandler, leaveFinally, lfTarget) = leavesHandler(block, whereto) - if (leaveHandler) { - if (leaveFinally) { - if (lfTarget.isDefined) mcode.Emit(OpCodes.Leave, lfTarget.get) - else mcode.Emit(OpCodes.Endfinally) - } else - mcode.Emit(OpCodes.Leave, labels(whereto)) - } else if (next != whereto) - mcode.Emit(OpCodes.Br, labels(whereto)) - - case CJUMP(success, failure, cond, kind) => - // cond is TestOp (see Primitives.scala), and can take - // values EQ, NE, LT, GE LE, GT - // kind is TypeKind - val isFloat = kind == FLOAT || kind == DOUBLE - val emit = (c: TestOp, l: Label) => emitBr(c, l, isFloat) - emitCondBr(block, cond, success, failure, next, emit) - - case CZJUMP(success, failure, cond, kind) => - emitCondBr(block, cond, success, failure, next, emitBrBool(_, _)) - - case RETURN(kind) => - if (currentHandlers.isEmpty) - mcode.Emit(OpCodes.Ret) - else { - val (local, label) = returnFromHandler(kind) - if (kind != UNIT) - mcode.Emit(OpCodes.Stloc, local) - mcode.Emit(OpCodes.Leave, label) - } - - case THROW(_) => - mcode.Emit(OpCodes.Throw) - - case DROP(kind) => - mcode.Emit(OpCodes.Pop) - - case DUP(kind) => - // see comment on `var previousWasNEW` - if (!previousWasNEW) - mcode.Emit(OpCodes.Dup) - else - previousWasNEW = false - - case MONITOR_ENTER() => - mcode.Emit(OpCodes.Call, MMONITOR_ENTER) - - case MONITOR_EXIT() => - mcode.Emit(OpCodes.Call, MMONITOR_EXIT) - - case SCOPE_ENTER(_) | SCOPE_EXIT(_) | LOAD_EXCEPTION(_) => - () - } - - } // end for (instr <- b) { .. } - } // end genBlock - - def genPrimitive(primitive: Primitive, pos: Position) { - primitive match { - case Negation(kind) => - kind match { - // CHECK: is ist possible to get this for BOOL? in this case, verify. - case BOOL | BYTE | CHAR | SHORT | INT | LONG | FLOAT | DOUBLE => - mcode.Emit(OpCodes.Neg) - - case _ => abort("Impossible to negate a " + kind) - } - - case Arithmetic(op, kind) => - op match { - case ADD => mcode.Emit(OpCodes.Add) - case SUB => mcode.Emit(OpCodes.Sub) - case MUL => mcode.Emit(OpCodes.Mul) - case DIV => mcode.Emit(OpCodes.Div) - case REM => mcode.Emit(OpCodes.Rem) - case NOT => mcode.Emit(OpCodes.Not) //bitwise complement (one's complement) - case _ => abort("Unknown arithmetic primitive " + primitive ) - } - - case Logical(op, kind) => op match { - case AND => mcode.Emit(OpCodes.And) - case OR => mcode.Emit(OpCodes.Or) - case XOR => mcode.Emit(OpCodes.Xor) - } - - case Shift(op, kind) => op match { - case LSL => mcode.Emit(OpCodes.Shl) - case ASR => mcode.Emit(OpCodes.Shr) - case LSR => mcode.Emit(OpCodes.Shr_Un) - } - - case Conversion(src, dst) => - debuglog("Converting from: " + src + " to: " + dst) - - dst match { - case BYTE => mcode.Emit(OpCodes.Conv_I1) // I1 for System.SByte, i.e. a scala.Byte - case SHORT => mcode.Emit(OpCodes.Conv_I2) - case CHAR => mcode.Emit(OpCodes.Conv_U2) - case INT => mcode.Emit(OpCodes.Conv_I4) - case LONG => mcode.Emit(OpCodes.Conv_I8) - case FLOAT => mcode.Emit(OpCodes.Conv_R4) - case DOUBLE => mcode.Emit(OpCodes.Conv_R8) - case _ => - Console.println("Illegal conversion at: " + clasz + - " at: " + pos.source + ":" + pos.line) - } - - case ArrayLength(_) => - mcode.Emit(OpCodes.Ldlen) - - case StartConcat => - mcode.Emit(OpCodes.Newobj, MSTRING_BUILDER_CONSTR) - - - case StringConcat(el) => - val elemType : MsilType = el match { - case REFERENCE(_) | ARRAY(_) => MOBJECT - case _ => msilType(el) - } - - val argTypes:Array[MsilType] = Array(elemType) - val stringBuilderAppend = MSTRING_BUILDER.GetMethod("Append", argTypes ) - mcode.Emit(OpCodes.Callvirt, stringBuilderAppend) - - case EndConcat => - mcode.Emit(OpCodes.Callvirt, MSTRING_BUILDER_TOSTRING) - - case _ => - abort("Unimplemented primitive " + primitive) - } - } // end genPrimitive - - - ////////////////////// loading /////////////////////// - - def loadI4(value: Int, code: ILGenerator): Unit = value match { - case -1 => code.Emit(OpCodes.Ldc_I4_M1) - case 0 => code.Emit(OpCodes.Ldc_I4_0) - case 1 => code.Emit(OpCodes.Ldc_I4_1) - case 2 => code.Emit(OpCodes.Ldc_I4_2) - case 3 => code.Emit(OpCodes.Ldc_I4_3) - case 4 => code.Emit(OpCodes.Ldc_I4_4) - case 5 => code.Emit(OpCodes.Ldc_I4_5) - case 6 => code.Emit(OpCodes.Ldc_I4_6) - case 7 => code.Emit(OpCodes.Ldc_I4_7) - case 8 => code.Emit(OpCodes.Ldc_I4_8) - case _ => - if (value >= -128 && value <= 127) - code.Emit(OpCodes.Ldc_I4_S, value) - else - code.Emit(OpCodes.Ldc_I4, value) - } - - def loadArg(code: ILGenerator, loadAddr: Boolean)(i: Int) = - if (loadAddr) { - if (i >= -128 && i <= 127) - code.Emit(OpCodes.Ldarga_S, i) - else - code.Emit(OpCodes.Ldarga, i) - } else { - i match { - case 0 => code.Emit(OpCodes.Ldarg_0) - case 1 => code.Emit(OpCodes.Ldarg_1) - case 2 => code.Emit(OpCodes.Ldarg_2) - case 3 => code.Emit(OpCodes.Ldarg_3) - case _ => - if (i >= -128 && i <= 127) - code.Emit(OpCodes.Ldarg_S, i) - else - code.Emit(OpCodes.Ldarg, i) - } - } - - def loadLocal(i: Int, local: Local, code: ILGenerator, loadAddr: Boolean) = - if (loadAddr) { - if (i >= -128 && i <= 127) - code.Emit(OpCodes.Ldloca_S, localBuilders(local)) - else - code.Emit(OpCodes.Ldloca, localBuilders(local)) - } else { - i match { - case 0 => code.Emit(OpCodes.Ldloc_0) - case 1 => code.Emit(OpCodes.Ldloc_1) - case 2 => code.Emit(OpCodes.Ldloc_2) - case 3 => code.Emit(OpCodes.Ldloc_3) - case _ => - if (i >= -128 && i <= 127) - code.Emit(OpCodes.Ldloc_S, localBuilders(local)) - else - code.Emit(OpCodes.Ldloc, localBuilders(local)) - } - } - - ////////////////////// branches /////////////////////// - - /** Returns a Triple (Boolean, Boolean, Option[Label]) - * - whether the jump leaves some exception block (try / catch / finally) - * - whether it leaves a finally handler (finally block, but not it's try / catch) - * - a label where to jump for leaving the finally handler - * . None to leave directly using `endfinally` - * . Some(label) to emit `leave label` (for try / catch inside a finally handler) - */ - def leavesHandler(from: BasicBlock, to: BasicBlock): (Boolean, Boolean, Option[Label]) = - if (currentHandlers.isEmpty) (false, false, None) - else { - val h = currentHandlers.head - val leaveHead = { h.covers(from) != h.covers(to) || - h.blocks.contains(from) != h.blocks.contains(to) } - if (leaveHead) { - // we leave the innermost exception block. - // find out if we also leave som e `finally` handler - currentHandlers.find(e => { - e.cls == NoSymbol && e.blocks.contains(from) != e.blocks.contains(to) - }) match { - case Some(finallyHandler) => - if (h == finallyHandler) { - // the finally handler is the innermost, so we can emit `endfinally` directly - (true, true, None) - } else { - // we need to `Leave` to the `endfinally` of the next outer finally handler - val l = endFinallyLabels.getOrElseUpdate(finallyHandler, mcode.DefineLabel()) - (true, true, Some(l)) - } - case None => - (true, false, None) - } - } else (false, false, None) - } - - def emitCondBr(block: BasicBlock, cond: TestOp, success: BasicBlock, failure: BasicBlock, - next: BasicBlock, emitBrFun: (TestOp, Label) => Unit) { - val (sLeaveHandler, sLeaveFinally, slfTarget) = leavesHandler(block, success) - val (fLeaveHandler, fLeaveFinally, flfTarget) = leavesHandler(block, failure) - - if (sLeaveHandler || fLeaveHandler) { - val sLabelOpt = if (sLeaveHandler) { - val leaveSLabel = mcode.DefineLabel() - emitBrFun(cond, leaveSLabel) - Some(leaveSLabel) - } else { - emitBrFun(cond, labels(success)) - None - } - - if (fLeaveHandler) { - if (fLeaveFinally) { - if (flfTarget.isDefined) mcode.Emit(OpCodes.Leave, flfTarget.get) - else mcode.Emit(OpCodes.Endfinally) - } else - mcode.Emit(OpCodes.Leave, labels(failure)) - } else - mcode.Emit(OpCodes.Br, labels(failure)) - - sLabelOpt.map(l => { - mcode.MarkLabel(l) - if (sLeaveFinally) { - if (slfTarget.isDefined) mcode.Emit(OpCodes.Leave, slfTarget.get) - else mcode.Emit(OpCodes.Endfinally) - } else - mcode.Emit(OpCodes.Leave, labels(success)) - }) - } else { - if (next == success) { - emitBrFun(cond.negate, labels(failure)) - } else { - emitBrFun(cond, labels(success)) - if (next != failure) { - mcode.Emit(OpCodes.Br, labels(failure)) - } - } - } - } - - def emitBr(condition: TestOp, dest: Label, isFloat: Boolean) { - condition match { - case EQ => mcode.Emit(OpCodes.Beq, dest) - case NE => mcode.Emit(OpCodes.Bne_Un, dest) - case LT => mcode.Emit(if (isFloat) OpCodes.Blt_Un else OpCodes.Blt, dest) - case GE => mcode.Emit(if (isFloat) OpCodes.Bge_Un else OpCodes.Bge, dest) - case LE => mcode.Emit(if (isFloat) OpCodes.Ble_Un else OpCodes.Ble, dest) - case GT => mcode.Emit(if (isFloat) OpCodes.Bgt_Un else OpCodes.Bgt, dest) - } - } - - def emitBrBool(cond: TestOp, dest: Label) { - (cond: @unchecked) match { - // EQ -> Brfalse, NE -> Brtrue; this is because we come from - // a CZJUMP. If the value on the stack is 0 (e.g. a boolean - // method returned false), and we are in the case EQ, then - // we need to emit Brfalse (EQ Zero means false). vice versa - case EQ => mcode.Emit(OpCodes.Brfalse, dest) - case NE => mcode.Emit(OpCodes.Brtrue, dest) - } - } - - ////////////////////// local vars /////////////////////// - - /** - * Compute the indexes of each local variable of the given - * method. - */ - def computeLocalVarsIndex(m: IMethod) { - var idx = if (m.symbol.isStaticMember) 0 else 1 - - val params = m.params - for (l <- params) { - debuglog("Index value for parameter " + l + ": " + idx) - l.index = idx - idx += 1 // sizeOf(l.kind) - } - - val locvars = m.locals filterNot (params contains) - idx = 0 - - for (l <- locvars) { - debuglog("Index value for local variable " + l + ": " + idx) - l.index = idx - idx += 1 // sizeOf(l.kind) - } - - } - - ////////////////////// Utilities //////////////////////// - - /** Return the a name of this symbol that can be used on the .NET - * platform. It removes spaces from names. - * - * Special handling: scala.All and scala.AllRef are 'erased' to - * scala.All$ and scala.AllRef$. This is needed because they are - * not real classes, and they mean 'abrupt termination upon evaluation - * of that expression' or 'null' respectively. This handling is - * done already in GenICode, but here we need to remove references - * from method signatures to these types, because such classes can - * not exist in the classpath: the type checker will be very confused. - */ - def msilName(sym: Symbol): String = { - val suffix = sym.moduleSuffix - // Flags.JAVA: "symbol was not defined by a scala-class" (java, or .net-class) - - if (sym == definitions.NothingClass) - return "scala.runtime.Nothing$" - else if (sym == definitions.NullClass) - return "scala.runtime.Null$" - - (if (sym.isClass || (sym.isModule && !sym.isMethod)) { - if (sym.isNestedClass) sym.simpleName - else sym.fullName - } else - sym.simpleName.toString.trim()) + suffix - } - - - ////////////////////// flags /////////////////////// - - def msilTypeFlags(sym: Symbol): Int = { - var mf: Int = TypeAttributes.AutoLayout | TypeAttributes.AnsiClass - - if(sym.isNestedClass) { - mf = mf | (if (sym hasFlag Flags.PRIVATE) TypeAttributes.NestedPrivate else TypeAttributes.NestedPublic) - } else { - mf = mf | (if (sym hasFlag Flags.PRIVATE) TypeAttributes.NotPublic else TypeAttributes.Public) - } - mf = mf | (if (sym hasFlag Flags.ABSTRACT) TypeAttributes.Abstract else 0) - mf = mf | (if (sym.isTrait && !sym.isImplClass) TypeAttributes.Interface else TypeAttributes.Class) - mf = mf | (if (sym isFinal) TypeAttributes.Sealed else 0) - mf - // static: not possible (or?) - } - - def msilMethodFlags(sym: Symbol): Short = { - var mf: Int = MethodAttributes.HideBySig | - (if (sym hasFlag Flags.PRIVATE) MethodAttributes.Private - else MethodAttributes.Public) - - if (!sym.isClassConstructor) { - if (sym.isStaticMember) - mf = mf | FieldAttributes.Static // coincidentally, same value as for MethodAttributes.Static ... - else { - mf = mf | MethodAttributes.Virtual - if (sym.isFinal && !getType(sym.owner).IsInterface) - mf = mf | MethodAttributes.Final - if (sym.isDeferred || getType(sym.owner).IsInterface) - mf = mf | MethodAttributes.Abstract - } - } - - if (sym.isStaticMember) { - mf = mf | MethodAttributes.Static - } - - // constructors of module classes should be private - if (sym.isPrimaryConstructor && isTopLevelModule(sym.owner)) { - mf |= MethodAttributes.Private - mf &= ~(MethodAttributes.Public) - } - - mf.toShort - } - - def msilFieldFlags(sym: Symbol): Short = { - var mf: Int = - if (sym hasFlag Flags.PRIVATE) FieldAttributes.Private - else if (sym hasFlag Flags.PROTECTED) FieldAttributes.FamORAssem - else FieldAttributes.Public - - if (sym hasFlag Flags.FINAL) - mf = mf | FieldAttributes.InitOnly - - if (sym.isStaticMember) - mf = mf | FieldAttributes.Static - - // TRANSIENT: "not serialized", VOLATILE: doesn't exist on .net - // TODO: add this annotation also if the class has the custom attribute - // System.NotSerializedAttribute - sym.annotations.foreach( a => a match { - case AnnotationInfo(TransientAtt, _, _) => - mf = mf | FieldAttributes.NotSerialized - case _ => () - }) - - mf.toShort - } - - ////////////////////// builders, types /////////////////////// - - var entryPoint: Symbol = _ - - val notInitializedModules = mutable.HashSet[Symbol]() - - // TODO: create fields also in def createType, and not in genClass, - // add a getField method (it only works as it is because fields never - // accessed from outside a class) - - val localBuilders = mutable.HashMap[Local, LocalBuilder]() - - private[GenMSIL] def findEntryPoint(cls: IClass) { - - def isEntryPoint(sym: Symbol):Boolean = { - if (isStaticModule(sym.owner) && msilName(sym) == "main") - if (sym.tpe.paramTypes.length == 1) { - toTypeKind(sym.tpe.paramTypes(0)) match { - case ARRAY(elem) => - if (elem.toType.typeSymbol == definitions.StringClass) { - return true - } - case _ => () - } - } - false - } - - if((entryPoint == null) && settings.Xshowcls.isSetByUser) { // TODO introduce dedicated setting instead - val entryclass = settings.Xshowcls.value.toString - val cfn = cls.symbol.fullName - if(cfn == entryclass) { - for (m <- cls.methods; if isEntryPoint(m.symbol)) { entryPoint = m.symbol } - if(entryPoint == null) { warning("Couldn't find main method in class " + cfn) } - } - } - - if (firstSourceName == "") - if (cls.symbol.sourceFile != null) // is null for nested classes - firstSourceName = cls.symbol.sourceFile.name - } - - // ##################################################################### - // get and create types - - private def msilType(t: TypeKind): MsilType = (t: @unchecked) match { - case UNIT => MVOID - case BOOL => MBOOL - case BYTE => MBYTE - case SHORT => MSHORT - case CHAR => MCHAR - case INT => MINT - case LONG => MLONG - case FLOAT => MFLOAT - case DOUBLE => MDOUBLE - case REFERENCE(cls) => getType(cls) - case ARRAY(elem) => - msilType(elem) match { - // For type builders, cannot call "clrTypes.mkArrayType" because this looks up - // the type "tp" in the assembly (not in the HashMap "types" of the backend). - // This can fail for nested types because the builders are not complete yet. - case tb: TypeBuilder => tb.MakeArrayType() - case tp: MsilType => clrTypes.mkArrayType(tp) - } - } - - private def msilType(tpe: Type): MsilType = msilType(toTypeKind(tpe)) - - private def msilParamTypes(sym: Symbol): Array[MsilType] = { - sym.tpe.paramTypes.map(msilType).toArray - } - - def getType(sym: Symbol) = getTypeOpt(sym).getOrElse(abort(showsym(sym))) - - /** - * Get an MSIL type from a symbol. First look in the clrTypes.types map, then - * lookup the name using clrTypes.getType - */ - def getTypeOpt(sym: Symbol): Option[MsilType] = { - val tmp = types.get(sym) - tmp match { - case typ @ Some(_) => typ - case None => - def typeString(sym: Symbol): String = { - val s = if (sym.isNestedClass) typeString(sym.owner) +"+"+ sym.simpleName - else sym.fullName - if (sym.isModuleClass && !sym.isTrait) s + "$" else s - } - val name = typeString(sym) - val typ = clrTypes.getType(name) - if (typ == null) - None - else { - types(sym) = typ - Some(typ) - } - } - } - - def mapType(sym: Symbol, mType: MsilType) { - assert(mType != null, showsym(sym)) - types(sym) = mType - } - - def createTypeBuilder(iclass: IClass) { - /** - * First look in the clrTypes.types map, if that fails check if it's a class being compiled, otherwise - * lookup by name (clrTypes.getType calls the static method msil.Type.GetType(fullname)). - */ - def msilTypeFromSym(sym: Symbol): MsilType = { - types.get(sym).getOrElse { - classes.get(sym) match { - case Some(iclass) => - msilTypeBuilderFromSym(sym) - case None => - getType(sym) - } - } - } - - def msilTypeBuilderFromSym(sym: Symbol): TypeBuilder = { - if(!(types.contains(sym) && types(sym).isInstanceOf[TypeBuilder])){ - val iclass = classes(sym) - assert(iclass != null) - createTypeBuilder(iclass) - } - types(sym).asInstanceOf[TypeBuilder] - } - - val sym = iclass.symbol - if (types.contains(sym) && types(sym).isInstanceOf[TypeBuilder]) - return - - def isInterface(s: Symbol) = s.isTrait && !s.isImplClass - val parents: List[Type] = - if (sym.info.parents.isEmpty) List(definitions.ObjectClass.tpe) - else sym.info.parents.distinct - - val superType : MsilType = if (isInterface(sym)) null else msilTypeFromSym(parents.head.typeSymbol) - debuglog("super type: " + parents(0).typeSymbol + ", msil type: " + superType) - - val interfaces: Array[MsilType] = - parents.tail.map(p => msilTypeFromSym(p.typeSymbol)).toArray - if (parents.length > 1) { - if (settings.debug.value) { - log("interfaces:") - for (i <- 0.until(interfaces.length)) { - log(" type: " + parents(i + 1).typeSymbol + ", msil type: " + interfaces(i)) - } - } - } - - val tBuilder = if (sym.isNestedClass) { - val ownerT = msilTypeBuilderFromSym(sym.owner).asInstanceOf[TypeBuilder] - ownerT.DefineNestedType(msilName(sym), msilTypeFlags(sym), superType, interfaces) - } else { - mmodule.DefineType(msilName(sym), msilTypeFlags(sym), superType, interfaces) - } - mapType(sym, tBuilder) - } // createTypeBuilder - - def createClassMembers(iclass: IClass) { - try { - createClassMembers0(iclass) - } - catch { - case e: Throwable => - java.lang.System.err.println(showsym(iclass.symbol)) - java.lang.System.err.println("with methods = " + iclass.methods) - throw e - } - } - - def createClassMembers0(iclass: IClass) { - - val mtype = getType(iclass.symbol).asInstanceOf[TypeBuilder] - - for (ifield <- iclass.fields) { - val sym = ifield.symbol - debuglog("Adding field: " + sym.fullName) - - val attributes = msilFieldFlags(sym) - val fieldTypeWithCustomMods = - new PECustomMod(msilType(sym.tpe), - customModifiers(sym.annotations)) - val fBuilder = mtype.DefineField(msilName(sym), - fieldTypeWithCustomMods, - attributes) - fields(sym) = fBuilder - addAttributes(fBuilder, sym.annotations) - } // all iclass.fields iterated over - - if (isStaticModule(iclass.symbol)) { - val sc = iclass.lookupStaticCtor - if (sc.isDefined) { - val m = sc.get - val oldLastBlock = m.lastBlock - val lastBlock = m.newBlock() - oldLastBlock.replaceInstruction(oldLastBlock.length - 1, JUMP(lastBlock)) - // call object's private ctor from static ctor - lastBlock.emit(CIL_NEWOBJ(iclass.symbol.primaryConstructor)) - lastBlock.emit(DROP(toTypeKind(iclass.symbol.tpe))) - lastBlock emit RETURN(UNIT) - lastBlock.close - } - } - - if (iclass.symbol != definitions.ArrayClass) { - for (m: IMethod <- iclass.methods) { - val sym = m.symbol - debuglog("Creating MethodBuilder for " + sym.flagString + " " + - sym.owner.fullName + "::" + sym.name) - - val ownerType = getType(sym.enclClass).asInstanceOf[TypeBuilder] - assert(mtype == ownerType, "mtype = " + mtype + "; ownerType = " + ownerType) - val paramTypes = msilParamTypes(sym) - val attr = msilMethodFlags(sym) - - if (m.symbol.isClassConstructor) { - val constr = - ownerType.DefineConstructor(attr, CallingConventions.Standard, paramTypes) - for (i <- 0.until(paramTypes.length)) { - constr.DefineParameter(i, ParameterAttributes.None, msilName(m.params(i).sym)) - } - mapConstructor(sym, constr) - addAttributes(constr, sym.annotations) - } else { - val resType = msilType(m.returnType) - val method = - ownerType.DefineMethod(msilName(sym), attr, resType, paramTypes) - for (i <- 0.until(paramTypes.length)) { - method.DefineParameter(i, ParameterAttributes.None, msilName(m.params(i).sym)) - } - if (!methods.contains(sym)) - mapMethod(sym, method) - addAttributes(method, sym.annotations) - debuglog("\t created MethodBuilder " + method) - } - } - } // method builders created for non-array iclass - - if (isStaticModule(iclass.symbol)) { - addModuleInstanceField(iclass.symbol) - notInitializedModules += iclass.symbol - if (iclass.lookupStaticCtor.isEmpty) { - addStaticInit(iclass.symbol) - } - } - - } // createClassMembers0 - - private def isTopLevelModule(sym: Symbol): Boolean = - enteringRefchecks { - sym.isModuleClass && !sym.isImplClass && !sym.isNestedClass - } - - // if the module is lifted it does not need to be initialized in - // its static constructor, and the MODULE$ field is not required. - // the outer class will care about it. - private def isStaticModule(sym: Symbol): Boolean = { - // .net inner classes: removed '!sym.hasFlag(Flags.LIFTED)', added - // 'sym.isStatic'. -> no longer compatible without skipping flatten! - sym.isModuleClass && sym.isStatic && !sym.isImplClass - } - - private def isCloneable(sym: Symbol): Boolean = { - !sym.annotations.forall( a => a match { - case AnnotationInfo(CloneableAttr, _, _) => false - case _ => true - }) - } - - private def addModuleInstanceField(sym: Symbol) { - debuglog("Adding Module-Instance Field for " + showsym(sym)) - val tBuilder = getType(sym).asInstanceOf[TypeBuilder] - val fb = tBuilder.DefineField(MODULE_INSTANCE_NAME, - tBuilder, - (FieldAttributes.Public | - //FieldAttributes.InitOnly | - FieldAttributes.Static).toShort) - fields(sym) = fb - } - - - // the symbol may be a object-symbol (module-symbol), or a module-class-symbol - private def getModuleInstanceField(sym: Symbol): FieldInfo = { - assert(sym.isModule || sym.isModuleClass, "Expected module: " + showsym(sym)) - - // when called by LOAD_MODULE, the corresponding type maybe doesn't - // exist yet -> make a getType - val moduleClassSym = if (sym.isModule) sym.moduleClass else sym - - // TODO: get module field for modules not defined in the - // source currently compiling (e.g. Console) - - fields get moduleClassSym match { - case Some(sym) => sym - case None => - //val mclass = types(moduleClassSym) - val nameInMetadata = nestingAwareFullClassname(moduleClassSym) - val mClass = clrTypes.getType(nameInMetadata) - val mfield = mClass.GetField("MODULE$") - assert(mfield ne null, "module not found " + showsym(moduleClassSym)) - fields(moduleClassSym) = mfield - mfield - } - - //fields(moduleClassSym) - } - - def nestingAwareFullClassname(csym: Symbol) : String = { - val suffix = csym.moduleSuffix - val res = if (csym.isNestedClass) - nestingAwareFullClassname(csym.owner) + "+" + csym.encodedName - else - csym.fullName - res + suffix - } - - /** Adds a static initializer which creates an instance of the module - * class (calls the primary constructor). A special primary constructor - * will be generated (notInitializedModules) which stores the new instance - * in the MODULE$ field right after the super call. - */ - private def addStaticInit(sym: Symbol) { - val tBuilder = getType(sym).asInstanceOf[TypeBuilder] - - val staticInit = tBuilder.DefineConstructor( - (MethodAttributes.Static | MethodAttributes.Public).toShort, - CallingConventions.Standard, - MsilType.EmptyTypes) - - val sicode = staticInit.GetILGenerator() - - val instanceConstructor = constructors(sym.primaryConstructor) - - // there are no constructor parameters. assuming the constructor takes no parameter - // is fine: we call (in the static constructor) the constructor of the module class, - // which takes no arguments - an object definition cannot take constructor arguments. - sicode.Emit(OpCodes.Newobj, instanceConstructor) - // the stsfld is done in the instance constructor, just after the super call. - sicode.Emit(OpCodes.Pop) - - sicode.Emit(OpCodes.Ret) - } - - private def generateMirrorClass(sym: Symbol) { - assert(sym.isModuleClass, "Can't generate Mirror-Class for the Non-Module class " + sym) - debuglog("Dumping mirror class for object: " + sym) - val moduleName = msilName(sym) - val mirrorName = moduleName.substring(0, moduleName.length() - 1) - val mirrorTypeBuilder = mmodule.DefineType(mirrorName, - TypeAttributes.Class | - TypeAttributes.Public | - TypeAttributes.Sealed, - MOBJECT, - MsilType.EmptyTypes) - - val iclass = classes(sym) - - for (m <- sym.tpe.nonPrivateMembers - if m.owner != definitions.ObjectClass && !m.isProtected && - m.isMethod && !m.isClassConstructor && !m.isStaticMember && !m.isCase && - !m.isDeferred) - { - debuglog(" Mirroring method: " + m) - val paramTypes = msilParamTypes(m) - val paramNames: Array[String] = new Array[String](paramTypes.length) - for (i <- 0 until paramTypes.length) - paramNames(i) = "x_" + i - - // CHECK: verify if getMethodName is better than msilName - val mirrorMethod = mirrorTypeBuilder.DefineMethod(msilName(m), - (MethodAttributes.Public | - MethodAttributes.Static).toShort, - msilType(m.tpe.resultType), - paramTypes) - - var i = 0 - while (i < paramTypes.length) { - mirrorMethod.DefineParameter(i, ParameterAttributes.None, paramNames(i)) - i += 1 - } - - val mirrorCode = mirrorMethod.GetILGenerator() - mirrorCode.Emit(OpCodes.Ldsfld, getModuleInstanceField(sym)) - val mInfo = getMethod(m) - for (paramidx <- 0.until(paramTypes.length)) { - val mInfoParams = mInfo.GetParameters - val loadAddr = mInfoParams(paramidx).ParameterType.IsByRef - loadArg(mirrorCode, loadAddr)(paramidx) - } - - mirrorCode.Emit(OpCodes.Callvirt, getMethod(m)) - mirrorCode.Emit(OpCodes.Ret) - } - - addSymtabAttribute(sym.sourceModule, mirrorTypeBuilder) - - mirrorTypeBuilder.CreateType() - mirrorTypeBuilder.setSourceFilepath(iclass.cunit.source.file.path) - } - - - // ##################################################################### - // delegate callers - - var delegateCallers: TypeBuilder = _ - var nbDelegateCallers: Int = 0 - - private def initDelegateCallers() = { - delegateCallers = mmodule.DefineType("$DelegateCallers", TypeAttributes.Public | - TypeAttributes.Sealed) - } - - private def createDelegateCaller(functionType: Type, delegateType: Type) = { - if (delegateCallers == null) - initDelegateCallers() - // create a field an store the function-object - val mFunctionType: MsilType = msilType(functionType) - val anonfunField: FieldBuilder = delegateCallers.DefineField( - "$anonfunField$$" + nbDelegateCallers, mFunctionType, - (FieldAttributes.InitOnly | FieldAttributes.Public | FieldAttributes.Static).toShort) - mcode.Emit(OpCodes.Stsfld, anonfunField) - - - // create the static caller method and the delegate object - val (params, returnType) = delegateType.member(nme.apply).tpe match { - case MethodType(delParams, delReturn) => (delParams, delReturn) - case _ => abort("not a delegate type: " + delegateType) - } - val caller: MethodBuilder = delegateCallers.DefineMethod( - "$delegateCaller$$" + nbDelegateCallers, - (MethodAttributes.Final | MethodAttributes.Public | MethodAttributes.Static).toShort, - msilType(returnType), (params map (_.tpe)).map(msilType).toArray) - for (i <- 0 until params.length) - caller.DefineParameter(i, ParameterAttributes.None, "arg" + i) // FIXME: use name of parameter symbol - val delegCtor = msilType(delegateType).GetConstructor(Array(MOBJECT, INT_PTR)) - mcode.Emit(OpCodes.Ldnull) - mcode.Emit(OpCodes.Ldftn, caller) - mcode.Emit(OpCodes.Newobj, delegCtor) - - - // create the static caller method body - val functionApply: MethodInfo = getMethod(functionType.member(nme.apply)) - val dcode: ILGenerator = caller.GetILGenerator() - dcode.Emit(OpCodes.Ldsfld, anonfunField) - for (i <- 0 until params.length) { - loadArg(dcode, false /* TODO confirm whether passing actual as-is to formal is correct wrt the ByRef attribute of the param */)(i) - emitBox(dcode, toTypeKind(params(i).tpe)) - } - dcode.Emit(OpCodes.Callvirt, functionApply) - emitUnbox(dcode, toTypeKind(returnType)) - dcode.Emit(OpCodes.Ret) - - nbDelegateCallers = nbDelegateCallers + 1 - - } //def createDelegateCaller - - def emitBox(code: ILGenerator, boxType: TypeKind) = (boxType: @unchecked) match { - // doesn't make sense, unit as parameter.. - case UNIT => code.Emit(OpCodes.Ldsfld, boxedUnit) - case BOOL | BYTE | SHORT | CHAR | INT | LONG | FLOAT | DOUBLE => - code.Emit(OpCodes.Box, msilType(boxType)) - case REFERENCE(cls) if clrTypes.isValueType(cls) => - code.Emit(OpCodes.Box, (msilType(boxType))) - case REFERENCE(_) | ARRAY(_) => - warning("Tried to BOX a non-valuetype.") - () - } - - def emitUnbox(code: ILGenerator, boxType: TypeKind) = (boxType: @unchecked) match { - case UNIT => code.Emit(OpCodes.Pop) - /* (1) it's essential to keep the code emitted here (as of now plain calls to System.Convert.ToBlaBla methods) - behaviorally.equiv.wrt. BoxesRunTime.unboxToBlaBla methods - (case null: that's easy, case boxed: track changes to unboxBlaBla) - (2) See also: asInstanceOf to cast from Any to number, - tracked in http://lampsvn.epfl.ch/trac/scala/ticket/4437 */ - case BOOL => code.Emit(OpCodes.Call, toBool) - case BYTE => code.Emit(OpCodes.Call, toSByte) - case SHORT => code.Emit(OpCodes.Call, toShort) - case CHAR => code.Emit(OpCodes.Call, toChar) - case INT => code.Emit(OpCodes.Call, toInt) - case LONG => code.Emit(OpCodes.Call, toLong) - case FLOAT => code.Emit(OpCodes.Call, toFloat) - case DOUBLE => code.Emit(OpCodes.Call, toDouble) - case REFERENCE(cls) if clrTypes.isValueType(cls) => - code.Emit(OpCodes.Unbox, msilType(boxType)) - code.Emit(OpCodes.Ldobj, msilType(boxType)) - case REFERENCE(_) | ARRAY(_) => - warning("Tried to UNBOX a non-valuetype.") - () - } - - // ##################################################################### - // get and create methods / constructors - - def getConstructor(sym: Symbol): ConstructorInfo = constructors.get(sym) match { - case Some(constr) => constr - case None => - val mClass = getType(sym.owner) - val constr = mClass.GetConstructor(msilParamTypes(sym)) - if (constr eq null) { - java.lang.System.out.println("Cannot find constructor " + sym.owner + "::" + sym.name) - java.lang.System.out.println("scope = " + sym.owner.tpe.decls) - abort(sym.fullName) - } - else { - mapConstructor(sym, constr) - constr - } - } - - def mapConstructor(sym: Symbol, cInfo: ConstructorInfo) = { - constructors(sym) = cInfo - } - - private def getMethod(sym: Symbol): MethodInfo = { - - methods.get(sym) match { - case Some(method) => method - case None => - val mClass = getType(sym.owner) - try { - val method = mClass.GetMethod(msilName(sym), msilParamTypes(sym), - msilType(sym.tpe.resultType)) - if (method eq null) { - java.lang.System.out.println("Cannot find method " + sym.owner + "::" + msilName(sym)) - java.lang.System.out.println("scope = " + sym.owner.tpe.decls) - abort(sym.fullName) - } - else { - mapMethod(sym, method) - method - } - } - catch { - case e: Exception => - Console.println("While looking up " + mClass + "::" + sym.nameString) - Console.println("\t" + showsym(sym)) - throw e - } - } - } - - /* - * add a mapping between sym and mInfo - */ - private def mapMethod(sym: Symbol, mInfo: MethodInfo) { - assert (mInfo != null, mInfo) - methods(sym) = mInfo - } - - private def showsym(sym: Symbol): String = (sym.toString + - "\n symbol = " + sym.flagString + " " + sym + - "\n owner = " + sym.owner.flagString + " " + sym.owner - ) - - } // class BytecodeGenerator - -} // class GenMSIL diff --git a/src/compiler/scala/tools/nsc/io/MsilFile.scala b/src/compiler/scala/tools/nsc/io/MsilFile.scala deleted file mode 100644 index bda13a5ed0..0000000000 --- a/src/compiler/scala/tools/nsc/io/MsilFile.scala +++ /dev/null @@ -1,15 +0,0 @@ -/* NSC -- new Scala compiler - * Copyright 2005-2013 LAMP/EPFL - * @author Paul Phillips - */ - -package scala.tools.nsc -package io - -import ch.epfl.lamp.compiler.msil.{ Type => MsilType } - -/** This class wraps an MsilType. It exists only so - * ClassPath can treat all of JVM/MSIL/bin/src files - * uniformly, as AbstractFiles. - */ -class MsilFile(val msilType: MsilType) extends VirtualFile(msilType.FullName, msilType.Namespace) { } diff --git a/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala b/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala index cf6579a696..8f964cf9e1 100644 --- a/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala +++ b/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala @@ -69,10 +69,6 @@ trait ScalaSettings extends AbsScalaSettings * -X "Advanced" settings */ val Xhelp = BooleanSetting ("-X", "Print a synopsis of advanced options.") - val assemname = StringSetting ("-Xassem-name", "file", "(Requires -target:msil) Name of the output assembly.", "").dependsOn(target, "msil") - val assemrefs = StringSetting ("-Xassem-path", "path", "(Requires -target:msil) List of assemblies referenced by the program.", ".").dependsOn(target, "msil") - val assemextdirs = StringSetting ("-Xassem-extdirs", "dirs", "(Requires -target:msil) List of directories containing assemblies. default:lib", Defaults.scalaLibDir.path).dependsOn(target, "msil") - val sourcedir = StringSetting ("-Xsourcedir", "directory", "(Requires -target:msil) Mirror source folder structure in output directory.", ".").dependsOn(target, "msil") val checkInit = BooleanSetting ("-Xcheckinit", "Wrap field accessors to throw an exception on uninitialized access.") val developer = BooleanSetting ("-Xdev", "Indicates user is a developer - issue warnings about anything which seems amiss") val noassertions = BooleanSetting ("-Xdisable-assertions", "Generate no assertions or assumptions.") diff --git a/src/compiler/scala/tools/nsc/settings/StandardScalaSettings.scala b/src/compiler/scala/tools/nsc/settings/StandardScalaSettings.scala index 98ef74aee3..6e5ac4f409 100644 --- a/src/compiler/scala/tools/nsc/settings/StandardScalaSettings.scala +++ b/src/compiler/scala/tools/nsc/settings/StandardScalaSettings.scala @@ -41,7 +41,7 @@ trait StandardScalaSettings { val optimise: BooleanSetting // depends on post hook which mutates other settings val print = BooleanSetting ("-print", "Print program with Scala-specific features removed.") val target = ChoiceSetting ("-target", "target", "Target platform for object files. All JVM 1.5 targets are deprecated.", - List("jvm-1.5", "jvm-1.5-fjbg", "jvm-1.5-asm", "jvm-1.6", "jvm-1.7", "msil"), + List("jvm-1.5", "jvm-1.5-fjbg", "jvm-1.5-asm", "jvm-1.6", "jvm-1.7"), "jvm-1.6") val unchecked = BooleanSetting ("-unchecked", "Enable additional warnings where generated code depends on assumptions.") val uniqid = BooleanSetting ("-uniqid", "Uniquely tag all identifiers in debugging output.") diff --git a/src/compiler/scala/tools/nsc/symtab/SymbolLoaders.scala b/src/compiler/scala/tools/nsc/symtab/SymbolLoaders.scala index 19502f0d7e..bbff03f67f 100644 --- a/src/compiler/scala/tools/nsc/symtab/SymbolLoaders.scala +++ b/src/compiler/scala/tools/nsc/symtab/SymbolLoaders.scala @@ -12,7 +12,7 @@ import scala.tools.nsc.util.{ ClassPath } import classfile.ClassfileParser import scala.reflect.internal.MissingRequirementError import scala.reflect.internal.util.Statistics -import scala.tools.nsc.io.{ AbstractFile, MsilFile } +import scala.tools.nsc.io.{ AbstractFile } /** This class ... * @@ -152,7 +152,7 @@ abstract class SymbolLoaders { def sourcefile: Option[AbstractFile] = None /** - * Description of the resource (ClassPath, AbstractFile, MsilFile) + * Description of the resource (ClassPath, AbstractFile) * being processed by this loader */ protected def description: String @@ -265,16 +265,6 @@ abstract class SymbolLoaders { override def sourcefile: Option[AbstractFile] = classfileParser.srcfile } - class MsilFileLoader(msilFile: MsilFile) extends SymbolLoader with FlagAssigningCompleter { - private def typ = msilFile.msilType - private object typeParser extends clr.TypeParser { - val global: SymbolLoaders.this.global.type = SymbolLoaders.this.global - } - - protected def description = "MsilFile "+ typ.FullName + ", assembly "+ typ.Assembly.FullName - protected def doComplete(root: Symbol) { typeParser.parse(typ, root) } - } - class SourcefileLoader(val srcfile: AbstractFile) extends SymbolLoader with FlagAssigningCompleter { protected def description = "source file "+ srcfile.toString override def fromSource = true @@ -287,11 +277,6 @@ abstract class SymbolLoaders { protected def doComplete(root: Symbol) { root.sourceModule.initialize } } - object clrTypes extends clr.CLRTypes { - val global: SymbolLoaders.this.global.type = SymbolLoaders.this.global - if (global.forMSIL) init() - } - /** used from classfile parser to avoid cyclies */ var parentsLevel = 0 var pendingLoadActions: List[() => Unit] = Nil diff --git a/src/compiler/scala/tools/nsc/symtab/clr/CLRTypes.scala b/src/compiler/scala/tools/nsc/symtab/clr/CLRTypes.scala deleted file mode 100644 index 624db027f1..0000000000 --- a/src/compiler/scala/tools/nsc/symtab/clr/CLRTypes.scala +++ /dev/null @@ -1,132 +0,0 @@ -/* NSC -- new scala compiler - * Copyright 2004-2013 LAMP/EPFL - */ - - -package scala.tools.nsc -package symtab -package clr - -import ch.epfl.lamp.compiler.msil._ -import scala.collection.{ mutable, immutable } - -/** - * Collects all types from all reference assemblies. - */ -abstract class CLRTypes { - - val global: Global - import global.Symbol - - //########################################################################## - - var BYTE: Type = _ - var UBYTE: Type = _ - var SHORT: Type = _ - var USHORT: Type = _ - var CHAR: Type = _ - var INT: Type = _ - var UINT: Type = _ - var LONG: Type = _ - var ULONG: Type = _ - var FLOAT: Type = _ - var DOUBLE: Type = _ - var BOOLEAN: Type = _ - var VOID: Type = _ - var ENUM: Type = _ - var DELEGATE: Type = _ - - var OBJECT: Type = _ - var STRING: Type = _ - var STRING_ARRAY: Type = _ - - var VALUE_TYPE: Type = _ - - var SCALA_SYMTAB_ATTR: Type = _ - var SYMTAB_CONSTR: ConstructorInfo = _ - var SYMTAB_DEFAULT_CONSTR: ConstructorInfo = _ - - var DELEGATE_COMBINE: MethodInfo = _ - var DELEGATE_REMOVE: MethodInfo = _ - - val types: mutable.Map[Symbol,Type] = new mutable.HashMap - val constructors: mutable.Map[Symbol,ConstructorInfo] = new mutable.HashMap - val methods: mutable.Map[Symbol,MethodInfo] = new mutable.HashMap - val fields: mutable.Map[Symbol, FieldInfo] = new mutable.HashMap - val sym2type: mutable.Map[Type,Symbol] = new mutable.HashMap - val addressOfViews = new mutable.HashSet[Symbol] - val mdgptrcls4clssym: mutable.Map[ /*cls*/ Symbol, /*cls*/ Symbol] = new mutable.HashMap - - def isAddressOf(msym : Symbol) = addressOfViews.contains(msym) - - def isNonEnumValuetype(cls: Symbol) = { - val msilTOpt = types.get(cls) - val res = msilTOpt.isDefined && { - val msilT = msilTOpt.get - msilT.IsValueType && !msilT.IsEnum - } - res - } - - def isValueType(cls: Symbol): Boolean = { - val opt = types.get(cls) - opt.isDefined && opt.get.IsValueType - } - - def init() = try { // initialize - // the MsilClasspath (nsc/util/Classpath.scala) initializes the msil-library by calling - // Assembly.LoadFrom("mscorlib.dll"), so this type should be found - Type.initMSCORLIB(getTypeSafe("System.String").Assembly) - - BYTE = getTypeSafe("System.SByte") - UBYTE = getTypeSafe("System.Byte") - CHAR = getTypeSafe("System.Char") - SHORT = getTypeSafe("System.Int16") - USHORT = getTypeSafe("System.UInt16") - INT = getTypeSafe("System.Int32") - UINT = getTypeSafe("System.UInt32") - LONG = getTypeSafe("System.Int64") - ULONG = getTypeSafe("System.UInt64") - FLOAT = getTypeSafe("System.Single") - DOUBLE = getTypeSafe("System.Double") - BOOLEAN = getTypeSafe("System.Boolean") - VOID = getTypeSafe("System.Void") - ENUM = getTypeSafe("System.Enum") - DELEGATE = getTypeSafe("System.MulticastDelegate") - - OBJECT = getTypeSafe("System.Object") - STRING = getTypeSafe("System.String") - STRING_ARRAY = getTypeSafe("System.String[]") - VALUE_TYPE = getTypeSafe("System.ValueType") - - SCALA_SYMTAB_ATTR = getTypeSafe("scala.runtime.SymtabAttribute") - val bytearray: Array[Type] = Array(Type.GetType("System.Byte[]")) - SYMTAB_CONSTR = SCALA_SYMTAB_ATTR.GetConstructor(bytearray) - SYMTAB_DEFAULT_CONSTR = SCALA_SYMTAB_ATTR.GetConstructor(Type.EmptyTypes) - - val delegate: Type = getTypeSafe("System.Delegate") - val dargs: Array[Type] = Array(delegate, delegate) - DELEGATE_COMBINE = delegate.GetMethod("Combine", dargs) - DELEGATE_REMOVE = delegate.GetMethod("Remove", dargs) - } - catch { - case e: RuntimeException => - Console.println(e.getMessage) - throw e - } - - //########################################################################## - // type mapping and lookup - - def getType(name: String): Type = Type.GetType(name) - - def getTypeSafe(name: String): Type = { - val t = Type.GetType(name) - assert(t != null, name) - t - } - - def mkArrayType(elemType: Type): Type = getType(elemType.FullName + "[]") - - def isDelegateType(t: Type): Boolean = { t.BaseType() == DELEGATE } -} // CLRTypes diff --git a/src/compiler/scala/tools/nsc/symtab/clr/TypeParser.scala b/src/compiler/scala/tools/nsc/symtab/clr/TypeParser.scala deleted file mode 100644 index f0e49ce500..0000000000 --- a/src/compiler/scala/tools/nsc/symtab/clr/TypeParser.scala +++ /dev/null @@ -1,849 +0,0 @@ -/* NSC -- new scala compiler - * Copyright 2004-2013 LAMP/EPFL - */ - -package scala.tools.nsc -package symtab -package clr - -import java.io.IOException -import io.MsilFile -import ch.epfl.lamp.compiler.msil.{Type => MSILType, Attribute => MSILAttribute, _} -import scala.collection.{ mutable, immutable } -import scala.reflect.internal.pickling.UnPickler -import ch.epfl.lamp.compiler.msil.Type.TMVarUsage - -/** - * @author Nikolay Mihaylov - */ -abstract class TypeParser { - - val global: Global - - import global._ - import loaders.clrTypes - - //########################################################################## - - private var clazz: Symbol = _ - private var instanceDefs: Scope = _ // was members - private var staticModule: Symbol = _ // was staticsClass - private var staticDefs: Scope = _ // was statics - - protected def statics: Symbol = staticModule.moduleClass - - protected var busy: Boolean = false // lock to detect recursive reads - - private object unpickler extends UnPickler { - val global: TypeParser.this.global.type = TypeParser.this.global - } - - def parse(typ: MSILType, root: Symbol) { - - def handleError(e: Throwable) = { - if (settings.debug.value) e.printStackTrace() //debug - throw new IOException("type '" + typ.FullName + "' is broken\n(" + e.getMessage() + ")") - } - assert(!busy) - busy = true - - if (root.isModule) { - this.clazz = root.companionClass - this.staticModule = root - } else { - this.clazz = root - this.staticModule = root.companionModule - } - try { - parseClass(typ) - } catch { - case e: FatalError => handleError(e) - case e: RuntimeException => handleError(e) - } - busy = false - } - - class TypeParamsType(override val typeParams: List[Symbol]) extends LazyType with FlagAgnosticCompleter { - override def complete(sym: Symbol) { throw new AssertionError("cyclic type dereferencing") } - } - - /* the names `classTParams` and `newTParams` stem from the forJVM version (ClassfileParser.sigToType()) - * but there are differences that should be kept in mind. - * forMSIL, a nested class knows nothing about any type-params in the nesting class, - * therefore newTParams is redundant (other than for recording lexical order), - * it always contains the same elements as classTParams.value */ - val classTParams = scala.collection.mutable.Map[Int,Symbol]() // TODO should this be a stack? (i.e., is it possible for >1 invocation to getCLRType on the same TypeParser instance be active ) - val newTParams = new scala.collection.mutable.ListBuffer[Symbol]() - val methodTParams = scala.collection.mutable.Map[Int,Symbol]() - - private def sig2typeBounds(tvarCILDef: GenericParamAndConstraints): Type = { - val ts = new scala.collection.mutable.ListBuffer[Type] - for (cnstrnt <- tvarCILDef.Constraints) { - ts += getCLRType(cnstrnt) // TODO we're definitely not at or after erasure, no need to call objToAny, right? - } - TypeBounds.upper(intersectionType(ts.toList, clazz)) - // TODO variance??? - } - - private def createViewFromTo(viewSuffix : String, fromTpe : Type, toTpe : Type, - addToboxMethodMap : Boolean, isAddressOf : Boolean) : Symbol = { - val flags = Flags.JAVA | Flags.STATIC | Flags.IMPLICIT; // todo: static? shouldn't be final instead? - val viewMethodType = (msym: Symbol) => JavaMethodType(msym.newSyntheticValueParams(List(fromTpe)), toTpe) - val vmsym = createMethod(nme.view_ + viewSuffix, flags, viewMethodType, null, true); - // !!! this used to mutate a mutable map in definitions, but that map became - // immutable and this kept "working" with a no-op. So now it's commented out - // since I retired the deprecated code which allowed for that bug. - // - // if (addToboxMethodMap) definitions.boxMethod(clazz) = vmsym - - if (isAddressOf) clrTypes.addressOfViews += vmsym - vmsym - } - - private def createDefaultConstructor(typ: MSILType) { - val attrs = MethodAttributes.Public | MethodAttributes.RTSpecialName | MethodAttributes.SpecialName // TODO instance - val declType= typ - val method = new ConstructorInfo(declType, attrs, Array[MSILType]()) - val flags = Flags.JAVA - val owner = clazz - val methodSym = owner.newMethod(nme.CONSTRUCTOR, NoPosition, flags) - val rettype = clazz.tpe - val mtype = methodType(Array[MSILType](), rettype); - val mInfo = mtype(methodSym) - methodSym.setInfo(mInfo) - instanceDefs.enter(methodSym); - clrTypes.constructors(methodSym) = method - } - - private def parseClass(typ: MSILType) { - - { - val t4c = clrTypes.types.get(clazz) - assert(t4c == None || t4c == Some(typ)) - } - clrTypes.types(clazz) = typ - - { - val c4t = clrTypes.sym2type.get(typ) - assert(c4t == None || c4t == Some(clazz)) - } - clrTypes.sym2type(typ) = clazz - - if (typ.IsDefined(clrTypes.SCALA_SYMTAB_ATTR, false)) { - val attrs = typ.GetCustomAttributes(clrTypes.SCALA_SYMTAB_ATTR, false); - assert (attrs.length == 1, attrs.length); - val a = attrs(0).asInstanceOf[MSILAttribute]; - assert (a.getConstructor() == clrTypes.SYMTAB_CONSTR); - val symtab = a.getConstructorArguments()(0).asInstanceOf[Array[Byte]] - unpickler.unpickle(symtab, 0, clazz, staticModule, typ.FullName); - val mClass = clrTypes.getType(typ.FullName + "$"); - if (mClass != null) { - clrTypes.types(statics) = mClass; - val moduleInstance = mClass.GetField("MODULE$"); - assert (moduleInstance != null, mClass); - clrTypes.fields(statics) = moduleInstance; - } - return - } - val flags = translateAttributes(typ) - - var clazzBoxed : Symbol = NoSymbol - var clazzMgdPtr : Symbol = NoSymbol - - val canBeTakenAddressOf = (typ.IsValueType || typ.IsEnum) && (typ.FullName != "System.Enum") - - if(canBeTakenAddressOf) { - clazzBoxed = clazz.owner.newClass(clazz.name.toTypeName append newTypeName("Boxed")) - clazzMgdPtr = clazz.owner.newClass(clazz.name.toTypeName append newTypeName("MgdPtr")) - clrTypes.mdgptrcls4clssym(clazz) = clazzMgdPtr - /* adding typMgdPtr to clrTypes.sym2type should happen early (before metadata for supertypes is parsed, - before metadata for members are parsed) so that clazzMgdPtr can be found by getClRType. */ - val typMgdPtr = MSILType.mkByRef(typ) - clrTypes.types(clazzMgdPtr) = typMgdPtr - clrTypes.sym2type(typMgdPtr) = clazzMgdPtr - /* clazzMgdPtr but not clazzBoxed is mapped by clrTypes.types into an msil.Type instance, - because there's no metadata-level representation for a "boxed valuetype" */ - val instanceDefsMgdPtr = newScope - val classInfoMgdPtr = ClassInfoType(definitions.anyvalparam, instanceDefsMgdPtr, clazzMgdPtr) - clazzMgdPtr.setFlag(flags) - clazzMgdPtr.setInfo(classInfoMgdPtr) - } - -/* START CLR generics (snippet 1) */ - // first pass - for (tvarCILDef <- typ.getSortedTVars() ) { - val tpname = newTypeName(tvarCILDef.Name.replaceAll("!", "")) // TODO are really all type-params named in all assemblies out there? (NO) - val tpsym = clazz.newTypeParameter(tpname) - classTParams.put(tvarCILDef.Number, tpsym) - newTParams += tpsym - // TODO wouldn't the following also be needed later, i.e. during getCLRType - tpsym.setInfo(definitions.AnyClass.tpe) - } - // second pass - for (tvarCILDef <- typ.getSortedTVars() ) { - val tpsym = classTParams(tvarCILDef.Number) - tpsym.setInfo(sig2typeBounds(tvarCILDef)) // we never skip bounds unlike in forJVM - } -/* END CLR generics (snippet 1) */ - val ownTypeParams = newTParams.toList -/* START CLR generics (snippet 2) */ - if (!ownTypeParams.isEmpty) { - clazz.setInfo(new TypeParamsType(ownTypeParams)) - if(typ.IsValueType && !typ.IsEnum) { - clazzBoxed.setInfo(new TypeParamsType(ownTypeParams)) - } - } -/* END CLR generics (snippet 2) */ - instanceDefs = newScope - staticDefs = newScope - - val classInfoAsInMetadata = { - val ifaces: Array[MSILType] = typ.getInterfaces() - val superType = if (typ.BaseType() != null) getCLRType(typ.BaseType()) - else if (typ.IsInterface()) definitions.ObjectClass.tpe - else definitions.AnyClass.tpe; // this branch activates for System.Object only. - // parents (i.e., base type and interfaces) - val parents = new scala.collection.mutable.ListBuffer[Type]() - parents += superType - for (iface <- ifaces) { - parents += getCLRType(iface) // here the variance doesn't matter - } - // methods, properties, events, fields are entered in a moment - if (canBeTakenAddressOf) { - val instanceDefsBoxed = newScope - ClassInfoType(parents.toList, instanceDefsBoxed, clazzBoxed) - } else - ClassInfoType(parents.toList, instanceDefs, clazz) - } - - val staticInfo = ClassInfoType(List(), staticDefs, statics) - - clazz.setFlag(flags) - - if (canBeTakenAddressOf) { - clazzBoxed.setInfo( if (ownTypeParams.isEmpty) classInfoAsInMetadata - else genPolyType(ownTypeParams, classInfoAsInMetadata) ) - clazzBoxed.setFlag(flags) - val rawValueInfoType = ClassInfoType(definitions.anyvalparam, instanceDefs, clazz) - clazz.setInfo( if (ownTypeParams.isEmpty) rawValueInfoType - else genPolyType(ownTypeParams, rawValueInfoType) ) - } else { - clazz.setInfo( if (ownTypeParams.isEmpty) classInfoAsInMetadata - else genPolyType(ownTypeParams, classInfoAsInMetadata) ) - } - - // TODO I don't remember if statics.setInfo and staticModule.setInfo should also know about type params - statics.setFlag(Flags.JAVA) - statics.setInfo(staticInfo) - staticModule.setFlag(Flags.JAVA) - staticModule.setInfo(statics.tpe) - - - if (canBeTakenAddressOf) { - // implicit conversions are owned by staticModule.moduleClass - createViewFromTo("2Boxed", clazz.tpe, clazzBoxed.tpe, addToboxMethodMap = true, isAddressOf = false) - // createViewFromTo("2Object", clazz.tpe, definitions.ObjectClass.tpe, addToboxMethodMap = true, isAddressOf = false) - createViewFromTo("2MgdPtr", clazz.tpe, clazzMgdPtr.tpe, addToboxMethodMap = false, isAddressOf = true) - // a return can't have type managed-pointer, thus a dereference-conversion is not needed - // similarly, a method can't declare as return type "boxed valuetype" - if (!typ.IsEnum) { - // a synthetic default constructor for raw-type allows `new X' syntax - createDefaultConstructor(typ) - } - } - - // import nested types - for (ntype <- typ.getNestedTypes() if !(ntype.IsNestedPrivate || ntype.IsNestedAssembly || ntype.IsNestedFamANDAssem) - || ntype.IsInterface /* TODO why shouldn't nested ifaces be type-parsed too? */ ) - { - val loader = new loaders.MsilFileLoader(new MsilFile(ntype)) - val nclazz = statics.newClass(ntype.Name) - val nmodule = statics.newModule(ntype.Name) - nclazz.setInfo(loader) - nmodule.setInfo(loader) - staticDefs.enter(nclazz) - staticDefs.enter(nmodule) - - assert(nclazz.companionModule == nmodule, nmodule) - assert(nmodule.companionClass == nclazz, nclazz) - } - - val fields = typ.getFields() - for (field <- fields - if !(field.IsPrivate() || field.IsAssembly() || field.IsFamilyAndAssembly) - if (getCLRType(field.FieldType) != null) - ) { - assert (!field.FieldType.IsPointer && !field.FieldType.IsByRef, "CLR requirement") - val flags = translateAttributes(field); - val name = newTermName(field.Name); - val fieldType = - if (field.IsLiteral && !field.FieldType.IsEnum && isDefinedAtgetConstant(getCLRType(field.FieldType))) - ConstantType(getConstant(getCLRType(field.FieldType), field.getValue)) - else - getCLRType(field.FieldType) - val owner = if (field.IsStatic()) statics else clazz; - val sym = owner.newValue(name, NoPosition, flags).setInfo(fieldType); - // TODO: set private within!!! -> look at typechecker/Namers.scala - (if (field.IsStatic()) staticDefs else instanceDefs).enter(sym); - clrTypes.fields(sym) = field; - } - - for (constr <- typ.getConstructors() if !constr.IsStatic() && !constr.IsPrivate() && - !constr.IsAssembly() && !constr.IsFamilyAndAssembly() && !constr.HasPtrParamOrRetType()) - createMethod(constr); - - // initially also contains getters and setters of properties. - val methodsSet = new mutable.HashSet[MethodInfo](); - methodsSet ++= typ.getMethods(); - - for (prop <- typ.getProperties) { - val propType: Type = getCLSType(prop.PropertyType); - if (propType != null) { - val getter: MethodInfo = prop.GetGetMethod(true); - val setter: MethodInfo = prop.GetSetMethod(true); - var gparamsLength: Int = -1; - if (!(getter == null || getter.IsPrivate || getter.IsAssembly - || getter.IsFamilyAndAssembly || getter.HasPtrParamOrRetType)) - { - assert(prop.PropertyType == getter.ReturnType); - val gparams: Array[ParameterInfo] = getter.GetParameters(); - gparamsLength = gparams.length; - val name: TermName = if (gparamsLength == 0) prop.Name else nme.apply; - val flags = translateAttributes(getter); - val owner: Symbol = if (getter.IsStatic) statics else clazz; - val methodSym = owner.newMethod(name, NoPosition, flags) - val mtype: Type = if (gparamsLength == 0) NullaryMethodType(propType) // .NET properties can't be polymorphic - else methodType(getter, getter.ReturnType)(methodSym) - methodSym.setInfo(mtype); - methodSym.setFlag(Flags.ACCESSOR); - (if (getter.IsStatic) staticDefs else instanceDefs).enter(methodSym) - clrTypes.methods(methodSym) = getter; - methodsSet -= getter; - } - if (!(setter == null || setter.IsPrivate || setter.IsAssembly - || setter.IsFamilyAndAssembly || setter.HasPtrParamOrRetType)) - { - val sparams: Array[ParameterInfo] = setter.GetParameters() - if(getter != null) - assert(getter.IsStatic == setter.IsStatic); - assert(setter.ReturnType == clrTypes.VOID); - if(getter != null) - assert(sparams.length == gparamsLength + 1, "" + getter + "; " + setter); - - val name: TermName = if (gparamsLength == 0) nme.getterToSetter(prop.Name) - else nme.update; - val flags = translateAttributes(setter); - val mtype = methodType(setter, definitions.UnitClass.tpe); - val owner: Symbol = if (setter.IsStatic) statics else clazz; - val methodSym = owner.newMethod(name, NoPosition, flags) - methodSym.setInfo(mtype(methodSym)) - methodSym.setFlag(Flags.ACCESSOR); - (if (setter.IsStatic) staticDefs else instanceDefs).enter(methodSym); - clrTypes.methods(methodSym) = setter; - methodsSet -= setter; - } - } - } - -/* for (event <- typ.GetEvents) { - // adding += and -= methods to add delegates to an event. - // raising the event ist not possible from outside the class (this is so - // generally in .net world) - val adder: MethodInfo = event.GetAddMethod(); - val remover: MethodInfo = event.GetRemoveMethod(); - if (!(adder == null || adder.IsPrivate || adder.IsAssembly - || adder.IsFamilyAndAssembly)) - { - assert(adder.ReturnType == clrTypes.VOID); - assert(adder.GetParameters().map(_.ParameterType).toList == List(event.EventHandlerType)); - val name = encode("+="); - val flags = translateAttributes(adder); - val mtype: Type = methodType(adder, adder.ReturnType); - createMethod(name, flags, mtype, adder, adder.IsStatic) - methodsSet -= adder; - } - if (!(remover == null || remover.IsPrivate || remover.IsAssembly - || remover.IsFamilyAndAssembly)) - { - assert(remover.ReturnType == clrTypes.VOID); - assert(remover.GetParameters().map(_.ParameterType).toList == List(event.EventHandlerType)); - val name = encode("-="); - val flags = translateAttributes(remover); - val mtype: Type = methodType(remover, remover.ReturnType); - createMethod(name, flags, mtype, remover, remover.IsStatic) - methodsSet -= remover; - } - } */ - -/* Adds view amounting to syntax sugar for a CLR implicit overload. - The long-form syntax can also be supported if "methodsSet -= method" (last statement) is removed. - - /* remember, there's typ.getMethods and type.GetMethods */ - for (method <- typ.getMethods) - if(!method.HasPtrParamOrRetType && - method.IsPublic && method.IsStatic && method.IsSpecialName && - method.Name == "op_Implicit") { - // create a view: typ => method's return type - val viewRetType: Type = getCLRType(method.ReturnType) - val viewParamTypes: List[Type] = method.GetParameters().map(_.ParameterType).map(getCLSType).toList; - /* The spec says "The operator method shall be defined as a static method on either the operand or return type." - * We don't consider the declaring type for the purposes of definitions.functionType, - * instead we regard op_Implicit's argument type and return type as defining the view's signature. - */ - if (viewRetType != null && !viewParamTypes.contains(null)) { - /* The check above applies e.g. to System.Decimal that has a conversion from UInt16, a non-CLS type, whose CLS-mapping returns null */ - val funType: Type = definitions.functionType(viewParamTypes, viewRetType); - val flags = Flags.JAVA | Flags.STATIC | Flags.IMPLICIT; // todo: static? shouldn't be final instead? - val viewMethodType = (msym: Symbol) => JavaMethodType(msym.newSyntheticValueParams(viewParamTypes), funType) - val vmsym = createMethod(nme.view_, flags, viewMethodType, method, true); - methodsSet -= method; - } - } -*/ - - for (method <- methodsSet.iterator) - if (!method.IsPrivate() && !method.IsAssembly() && !method.IsFamilyAndAssembly() - && !method.HasPtrParamOrRetType) - createMethod(method); - - // Create methods and views for delegate support - if (clrTypes.isDelegateType(typ)) { - createDelegateView(typ) - createDelegateChainers(typ) - } - - // for enumerations introduce comparison and bitwise logical operations; - // the backend will recognize them and replace them with comparison or - // bitwise logical operations on the primitive underlying type - - if (typ.IsEnum) { - val ENUM_CMP_NAMES = List(nme.EQ, nme.NE, nme.LT, nme.LE, nme.GT, nme.GE); - val ENUM_BIT_LOG_NAMES = List(nme.OR, nme.AND, nme.XOR); - - val flags = Flags.JAVA | Flags.FINAL - for (cmpName <- ENUM_CMP_NAMES) { - val enumCmp = clazz.newMethod(cmpName) - val enumCmpType = JavaMethodType(enumCmp.newSyntheticValueParams(List(clazz.tpe)), definitions.BooleanClass.tpe) - enumCmp.setFlag(flags).setInfo(enumCmpType) - instanceDefs.enter(enumCmp) - } - - for (bitLogName <- ENUM_BIT_LOG_NAMES) { - val enumBitLog = clazz.newMethod(bitLogName) - val enumBitLogType = JavaMethodType(enumBitLog.newSyntheticValueParams(List(clazz.tpe)), clazz.tpe /* was classInfo, infinite typer */) - enumBitLog.setFlag(flags).setInfo(enumBitLogType) - instanceDefs.enter(enumBitLog) - } - } - - } // parseClass - - private def populateMethodTParams(method: MethodBase, methodSym: MethodSymbol) : List[Symbol] = { - if(!method.IsGeneric) Nil - else { - methodTParams.clear - val newMethodTParams = new scala.collection.mutable.ListBuffer[Symbol]() - - // first pass - for (mvarCILDef <- method.getSortedMVars() ) { - val mtpname = newTypeName(mvarCILDef.Name.replaceAll("!", "")) // TODO are really all method-level-type-params named in all assemblies out there? (NO) - val mtpsym = methodSym.newTypeParameter(mtpname) - methodTParams.put(mvarCILDef.Number, mtpsym) - newMethodTParams += mtpsym - // TODO wouldn't the following also be needed later, i.e. during getCLRType - mtpsym.setInfo(definitions.AnyClass.tpe) - } - // second pass - for (mvarCILDef <- method.getSortedMVars() ) { - val mtpsym = methodTParams(mvarCILDef.Number) - mtpsym.setInfo(sig2typeBounds(mvarCILDef)) // we never skip bounds unlike in forJVM - } - - newMethodTParams.toList - } - } - - private def createMethod(method: MethodBase) { - - val flags = translateAttributes(method); - val owner = if (method.IsStatic()) statics else clazz; - val methodSym = owner.newMethod(getName(method), NoPosition, flags) - /* START CLR generics (snippet 3) */ - val newMethodTParams = populateMethodTParams(method, methodSym) - /* END CLR generics (snippet 3) */ - - val rettype = if (method.IsConstructor()) clazz.tpe - else getCLSType(method.asInstanceOf[MethodInfo].ReturnType); - if (rettype == null) return; - val mtype = methodType(method, rettype); - if (mtype == null) return; -/* START CLR generics (snippet 4) */ - val mInfo = if (method.IsGeneric) genPolyType(newMethodTParams, mtype(methodSym)) - else mtype(methodSym) -/* END CLR generics (snippet 4) */ -/* START CLR non-generics (snippet 4) - val mInfo = mtype(methodSym) - END CLR non-generics (snippet 4) */ - methodSym.setInfo(mInfo) - (if (method.IsStatic()) staticDefs else instanceDefs).enter(methodSym); - if (method.IsConstructor()) - clrTypes.constructors(methodSym) = method.asInstanceOf[ConstructorInfo] - else clrTypes.methods(methodSym) = method.asInstanceOf[MethodInfo]; - } - - private def createMethod(name: TermName, flags: Long, args: Array[MSILType], retType: MSILType, method: MethodInfo, statik: Boolean): Symbol = { - val mtype = methodType(args, getCLSType(retType)) - assert(mtype != null) - createMethod(name, flags, mtype, method, statik) - } - - private def createMethod(name: TermName, flags: Long, mtype: Symbol => Type, method: MethodInfo, statik: Boolean): Symbol = { - val methodSym: Symbol = (if (statik) statics else clazz).newMethod(name) - methodSym.setFlag(flags).setInfo(mtype(methodSym)) - (if (statik) staticDefs else instanceDefs).enter(methodSym) - if (method != null) - clrTypes.methods(methodSym) = method - methodSym - } - - private def createDelegateView(typ: MSILType) = { - val invoke: MethodInfo = typ.GetMember("Invoke")(0).asInstanceOf[MethodInfo]; - val invokeRetType: Type = getCLRType(invoke.ReturnType); - val invokeParamTypes: List[Type] =invoke.GetParameters().map(_.ParameterType).map(getCLSType).toList; - val funType: Type = definitions.functionType(invokeParamTypes, invokeRetType); - - val typClrType: Type = getCLRType(typ); - val flags = Flags.JAVA | Flags.STATIC | Flags.IMPLICIT; // todo: static? think not needed - - // create the forward view: delegate => function - val delegateParamTypes: List[Type] = List(typClrType); - // not ImplicitMethodType, this is for methods with implicit parameters (not implicit methods) - val forwardViewMethodType = (msym: Symbol) => JavaMethodType(msym.newSyntheticValueParams(delegateParamTypes), funType) - createMethod(nme.view_, flags, forwardViewMethodType, null, true); - - // create the backward view: function => delegate - val functionParamTypes: List[Type] = List(funType); - val backwardViewMethodType = (msym: Symbol) => JavaMethodType(msym.newSyntheticValueParams(functionParamTypes), typClrType) - createMethod(nme.view_, flags, backwardViewMethodType, null, true); - } - - private def createDelegateChainers(typ: MSILType) = { - val flags: Long = Flags.JAVA | Flags.FINAL - val args: Array[MSILType] = Array(typ) - - var s = createMethod(encode("+="), flags, args, clrTypes.VOID, clrTypes.DELEGATE_COMBINE, false); - s = createMethod(encode("-="), flags, args, clrTypes.VOID, clrTypes.DELEGATE_REMOVE, false); - - s = createMethod(nme.PLUS, flags, args, typ, clrTypes.DELEGATE_COMBINE, false); - s = createMethod(nme.MINUS, flags, args, typ, clrTypes.DELEGATE_REMOVE, false); - } - - private def getName(method: MethodBase): TermName = { - - def operatorOverload(name : String, paramsArity : Int) : Option[Name] = paramsArity match { - case 1 => name match { - // PartitionI.10.3.1 - case "op_Decrement" => Some(encode("--")) - case "op_Increment" => Some(encode("++")) - case "op_UnaryNegation" => Some(nme.UNARY_-) - case "op_UnaryPlus" => Some(nme.UNARY_+) - case "op_LogicalNot" => Some(nme.UNARY_!) - case "op_OnesComplement" => Some(nme.UNARY_~) - /* op_True and op_False have no operator symbol assigned, - Other methods that will have to be written in full are: - op_AddressOf & (unary) - op_PointerDereference * (unary) */ - case _ => None - } - case 2 => name match { - // PartitionI.10.3.2 - case "op_Addition" => Some(nme.ADD) - case "op_Subtraction" => Some(nme.SUB) - case "op_Multiply" => Some(nme.MUL) - case "op_Division" => Some(nme.DIV) - case "op_Modulus" => Some(nme.MOD) - case "op_ExclusiveOr" => Some(nme.XOR) - case "op_BitwiseAnd" => Some(nme.AND) - case "op_BitwiseOr" => Some(nme.OR) - case "op_LogicalAnd" => Some(nme.ZAND) - case "op_LogicalOr" => Some(nme.ZOR) - case "op_LeftShift" => Some(nme.LSL) - case "op_RightShift" => Some(nme.ASR) - case "op_Equality" => Some(nme.EQ) - case "op_GreaterThan" => Some(nme.GT) - case "op_LessThan" => Some(nme.LT) - case "op_Inequality" => Some(nme.NE) - case "op_GreaterThanOrEqual" => Some(nme.GE) - case "op_LessThanOrEqual" => Some(nme.LE) - - /* op_MemberSelection is reserved in Scala */ - - /* The standard does not assign operator symbols to op_Assign , op_SignedRightShift , op_UnsignedRightShift , - * and op_UnsignedRightShiftAssignment so those names will be used instead to invoke those methods. */ - - /* - The remaining binary operators are not overloaded in C# and are therefore not in widespread use. They have to be written in full. - - op_RightShiftAssignment >>= - op_MultiplicationAssignment *= - op_PointerToMemberSelection ->* - op_SubtractionAssignment -= - op_ExclusiveOrAssignment ^= - op_LeftShiftAssignment <<= - op_ModulusAssignment %= - op_AdditionAssignment += - op_BitwiseAndAssignment &= - op_BitwiseOrAssignment |= - op_Comma , - op_DivisionAssignment /= - */ - case _ => None - } - case _ => None - } - - if (method.IsConstructor()) return nme.CONSTRUCTOR; - val name = method.Name; - if (method.IsStatic()) { - if(method.IsSpecialName) { - val paramsArity = method.GetParameters().size - // handle operator overload, otherwise handle as any static method - val operName = operatorOverload(name, paramsArity) - if (operName.isDefined) { return operName.get; } - } - return newTermName(name); - } - val params = method.GetParameters(); - name match { - case "GetHashCode" if (params.length == 0) => nme.hashCode_; - case "ToString" if (params.length == 0) => nme.toString_; - case "Finalize" if (params.length == 0) => nme.finalize_; - case "Equals" if (params.length == 1 && params(0).ParameterType == clrTypes.OBJECT) => - nme.equals_; - case "Invoke" if (clrTypes.isDelegateType(method.DeclaringType)) => nme.apply; - case _ => newTermName(name); - } - } - - //########################################################################## - - private def methodType(method: MethodBase, rettype: MSILType): Symbol => Type = { - val rtype = getCLSType(rettype); - if (rtype == null) null else methodType(method, rtype); - } - - /** Return a method type for the given method. */ - private def methodType(method: MethodBase, rettype: Type): Symbol => Type = - methodType(method.GetParameters().map(_.ParameterType), rettype); - - /** Return a method type for the provided argument types and return type. */ - private def methodType(argtypes: Array[MSILType], rettype: Type): Symbol => Type = { - def paramType(typ: MSILType): Type = - if (typ eq clrTypes.OBJECT) definitions.AnyClass.tpe // TODO a hack to compile scalalib, should be definitions.AnyRefClass.tpe - else getCLSType(typ); - val ptypes = argtypes.map(paramType).toList; - if (ptypes.contains(null)) null - else method => JavaMethodType(method.newSyntheticValueParams(ptypes), rettype); - } - - //########################################################################## - - private def getClassType(typ: MSILType): Type = { - assert(typ != null); - val res = rootMirror.getClassByName(typ.FullName.replace('+', '.') : TypeName).tpe; - //if (res.isError()) - // global.reporter.error("unknown class reference " + type.FullName); - res - } - - private def getCLSType(typ: MSILType): Type = { // getCLS returns non-null for types GenMSIL can handle, be they CLS-compliant or not - if (typ.IsTMVarUsage()) - /* START CLR generics (snippet 5) */ - getCLRType(typ) - /* END CLR generics (snippet 5) */ - /* START CLR non-generics (snippet 5) - null - END CLR non-generics (snippet 5) */ - else if ( /* TODO hack if UBYE, uncommented, "ambiguous reference to overloaded definition" ensues, for example for System.Math.Max(x, y) */ - typ == clrTypes.USHORT || typ == clrTypes.UINT || typ == clrTypes.ULONG - /* || typ == clrTypes.UBYTE */ - || typ.IsNotPublic() || typ.IsNestedPrivate() - || typ.IsNestedAssembly() || typ.IsNestedFamANDAssem() - || typ.IsPointer() - || (typ.IsArray() && getCLRType(typ.GetElementType()) == null) /* TODO hack: getCLR instead of getCLS */ - || (typ.IsByRef() && !typ.GetElementType().CanBeTakenAddressOf())) - null - else - getCLRType(typ) - } - - private def getCLRTypeIfPrimitiveNullOtherwise(typ: MSILType): Type = - if (typ == clrTypes.OBJECT) - definitions.ObjectClass.tpe; - else if (typ == clrTypes.VALUE_TYPE) - definitions.AnyValClass.tpe - else if (typ == clrTypes.STRING) - definitions.StringClass.tpe; - else if (typ == clrTypes.VOID) - definitions.UnitClass.tpe - else if (typ == clrTypes.BOOLEAN) - definitions.BooleanClass.tpe - else if (typ == clrTypes.CHAR) - definitions.CharClass.tpe - else if ((typ == clrTypes.BYTE) || (typ == clrTypes.UBYTE)) // TODO U... is a hack to compile scalalib - definitions.ByteClass.tpe - else if ((typ == clrTypes.SHORT) || (typ == clrTypes.SHORT)) // TODO U... is a hack to compile scalalib - definitions.ShortClass.tpe - else if ((typ == clrTypes.INT) || (typ == clrTypes.UINT)) // TODO U... is a hack to compile scalalib - definitions.IntClass.tpe - else if ((typ == clrTypes.LONG) || (typ == clrTypes.LONG)) // TODO U... is a hack to compile scalalib - definitions.LongClass.tpe - else if (typ == clrTypes.FLOAT) - definitions.FloatClass.tpe - else if (typ == clrTypes.DOUBLE) - definitions.DoubleClass.tpe - else null - - - private def getCLRType(tMSIL: MSILType): Type = { - var res = getCLRTypeIfPrimitiveNullOtherwise(tMSIL) - if (res != null) res - else if (tMSIL.isInstanceOf[ConstructedType]) { - val ct = tMSIL.asInstanceOf[ConstructedType] - /* START CLR generics (snippet 6) */ - val cttpArgs = ct.typeArgs.map(tmsil => getCLRType(tmsil)).toList - appliedType(getCLRType(ct.instantiatedType), cttpArgs) - /* END CLR generics (snippet 6) */ - /* START CLR non-generics (snippet 6) - getCLRType(ct.instantiatedType) - END CLR non-generics (snippet 6) */ - } else if (tMSIL.isInstanceOf[TMVarUsage]) { - /* START CLR generics (snippet 7) */ - val tVarUsage = tMSIL.asInstanceOf[TMVarUsage] - val tVarNumber = tVarUsage.Number - if (tVarUsage.isTVar) classTParams(tVarNumber).typeConstructor // shouldn't fail, just return definitions.AnyClass.tpe at worst - else methodTParams(tVarNumber).typeConstructor // shouldn't fail, just return definitions.AnyClass.tpe at worst - /* END CLR generics (snippet 7) */ - /* START CLR non-generics (snippet 7) - null // definitions.ObjectClass.tpe - END CLR non-generics (snippet 7) */ - } else if (tMSIL.IsArray()) { - var elemtp = getCLRType(tMSIL.GetElementType()) - // cut&pasted from ClassfileParser - // make unbounded Array[T] where T is a type variable into Array[T with Object] - // (this is necessary because such arrays have a representation which is incompatible - // with arrays of primitive types). - // TODO does that incompatibility also apply to .NET? - if (elemtp.typeSymbol.isAbstractType && !(elemtp <:< definitions.ObjectClass.tpe)) - elemtp = intersectionType(List(elemtp, definitions.ObjectClass.tpe)) - appliedType(definitions.ArrayClass.tpe, List(elemtp)) - } else { - res = clrTypes.sym2type.get(tMSIL) match { - case Some(sym) => sym.tpe - case None => if (tMSIL.IsByRef && tMSIL.GetElementType.IsValueType) { - val addressed = getCLRType(tMSIL.GetElementType) - val clasym = addressed.typeSymbolDirect // TODO should be .typeSymbol? - clasym.info.load(clasym) - val secondAttempt = clrTypes.sym2type.get(tMSIL) - secondAttempt match { case Some(sym) => sym.tpe - case None => null - } - } else getClassType(tMSIL) - } - if (res == null) - null // TODO new RuntimeException() - else res - } - } - - // the values are Java-Box-Classes (e.g. Integer, Boolean, Character) - // java.lang.Number to get the value (if a number, not for boolean, character) - // see ch.epfl.lamp.compiler.msil.util.PEStream.java - def getConstant(constType: Type, value: Object): Constant = { - val typeClass = constType.typeSymbol - if (typeClass == definitions.BooleanClass) - Constant(value.asInstanceOf[java.lang.Boolean].booleanValue) - else if (typeClass == definitions.ByteClass) - Constant(value.asInstanceOf[java.lang.Number].byteValue) - else if (typeClass == definitions.ShortClass) - Constant(value.asInstanceOf[java.lang.Number].shortValue) - else if (typeClass == definitions.CharClass) - Constant(value.asInstanceOf[java.lang.Character].charValue) - else if (typeClass == definitions.IntClass) - Constant(value.asInstanceOf[java.lang.Number].intValue) - else if (typeClass == definitions.LongClass) - Constant(value.asInstanceOf[java.lang.Number].longValue) - else if (typeClass == definitions.FloatClass) - Constant(value.asInstanceOf[java.lang.Number].floatValue) - else if (typeClass == definitions.DoubleClass) - Constant(value.asInstanceOf[java.lang.Number].doubleValue) - else if (typeClass == definitions.StringClass) - Constant(value.asInstanceOf[java.lang.String]) - else - abort("illegal value: " + value + ", class-symbol: " + typeClass) - } - - def isDefinedAtgetConstant(constType: Type): Boolean = { - val typeClass = constType.typeSymbol - if ( (typeClass == definitions.BooleanClass) - || (typeClass == definitions.ByteClass) - || (typeClass == definitions.ShortClass) - || (typeClass == definitions.CharClass) - || (typeClass == definitions.IntClass) - || (typeClass == definitions.LongClass) - || (typeClass == definitions.FloatClass) - || (typeClass == definitions.DoubleClass) - || (typeClass == definitions.StringClass) - ) - true - else - false - } - - private def translateAttributes(typ: MSILType): Long = { - var flags: Long = Flags.JAVA; - if (typ.IsNotPublic() || typ.IsNestedPrivate() - || typ.IsNestedAssembly() || typ.IsNestedFamANDAssem()) - flags = flags | Flags.PRIVATE; - else if (typ.IsNestedFamily() || typ.IsNestedFamORAssem()) - flags = flags | Flags.PROTECTED; - if (typ.IsAbstract()) - flags = flags | Flags.ABSTRACT; - if (typ.IsSealed()) - flags = flags | Flags.FINAL; - if (typ.IsInterface()) - flags = flags | Flags.INTERFACE | Flags.TRAIT | Flags.ABSTRACT; - - flags - } - - private def translateAttributes(field: FieldInfo): Long = { - var flags: Long = Flags.JAVA; - if (field.IsPrivate() || field.IsAssembly() || field.IsFamilyAndAssembly()) - flags = flags | Flags.PRIVATE; - else if (field.IsFamily() || field.IsFamilyOrAssembly()) - flags = flags | Flags.PROTECTED; - if (field.IsInitOnly() || field.IsLiteral()) - flags = flags | Flags.FINAL; - else - flags = flags | Flags.MUTABLE; - if (field.IsStatic) - flags = flags | Flags.STATIC - - flags - } - - private def translateAttributes(method: MethodBase): Long = { - var flags: Long = Flags.JAVA; - if (method.IsPrivate() || method.IsAssembly() || method.IsFamilyAndAssembly()) - flags = flags | Flags.PRIVATE; - else if (method.IsFamily() || method.IsFamilyOrAssembly()) - flags = flags | Flags.PROTECTED; - if (method.IsAbstract()) - flags = flags | Flags.DEFERRED; - if (method.IsStatic) - flags = flags | Flags.STATIC - - flags - } -} diff --git a/src/compiler/scala/tools/nsc/transform/CleanUp.scala b/src/compiler/scala/tools/nsc/transform/CleanUp.scala index 847ca574a9..029eeab3e0 100644 --- a/src/compiler/scala/tools/nsc/transform/CleanUp.scala +++ b/src/compiler/scala/tools/nsc/transform/CleanUp.scala @@ -69,12 +69,6 @@ abstract class CleanUp extends Transform with ast.TreeDSL { case "mono-cache" => MONO_CACHE case "poly-cache" => POLY_CACHE } - - def shouldRewriteTry(tree: Try) = { - val sym = tree.tpe.typeSymbol - forMSIL && (sym != UnitClass) && (sym != NothingClass) - } - private def typedWithPos(pos: Position)(tree: Tree) = localTyper.typedPos(pos)(tree) @@ -543,10 +537,9 @@ abstract class CleanUp extends Transform with ast.TreeDSL { * constructor. */ case Template(parents, self, body) => localTyper = typer.atOwner(tree, currentClass) - if (forMSIL) savingStatics( transformTemplate(tree) ) - else transformTemplate(tree) + transformTemplate(tree) - case Literal(c) if (c.tag == ClazzTag) && !forMSIL=> + case Literal(c) if c.tag == ClazzTag => val tpe = c.typeValue typedWithPos(tree.pos) { if (isPrimitiveValueClass(tpe.typeSymbol)) { @@ -559,24 +552,6 @@ abstract class CleanUp extends Transform with ast.TreeDSL { else tree } - /* MSIL requires that the stack is empty at the end of a try-block. - * Hence, we here rewrite all try blocks with a result != {Unit, All} such that they - * store their result in a local variable. The catch blocks are adjusted as well. - * The try tree is subsituted by a block whose result expression is read of that variable. */ - case theTry @ Try(block, catches, finalizer) if shouldRewriteTry(theTry) => - def transformTry = { - val tpe = theTry.tpe.widen - val tempVar = currentOwner.newVariable(mkTerm(nme.EXCEPTION_RESULT_PREFIX), theTry.pos).setInfo(tpe) - def assignBlock(rhs: Tree) = super.transform(BLOCK(Ident(tempVar) === transform(rhs))) - - val newBlock = assignBlock(block) - val newCatches = for (CaseDef(pattern, guard, body) <- catches) yield - (CASE(super.transform(pattern)) IF (super.transform(guard))) ==> assignBlock(body) - val newTry = Try(newBlock, newCatches, super.transform(finalizer)) - - typedWithPos(theTry.pos)(BLOCK(VAL(tempVar) === EmptyTree, newTry, Ident(tempVar))) - } - transformTry /* * This transformation should identify Scala symbol invocations in the tree and replace them * with references to a static member. Also, whenever a class has at least a single symbol invocation diff --git a/src/compiler/scala/tools/nsc/transform/Mixin.scala b/src/compiler/scala/tools/nsc/transform/Mixin.scala index ac1cdd1f46..135660ed27 100644 --- a/src/compiler/scala/tools/nsc/transform/Mixin.scala +++ b/src/compiler/scala/tools/nsc/transform/Mixin.scala @@ -513,7 +513,7 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL { * - create a new method definition that also has a `self` parameter * (which comes first) Iuli: this position is assumed by tail call elimination * on a different receiver. Storing a new 'this' assumes it is located at - * index 0 in the local variable table. See 'STORE_THIS' and GenJVM/GenMSIL. + * index 0 in the local variable table. See 'STORE_THIS' and GenASM. * - Map implementation class types in type-apply's to their interfaces * - Remove all fields in implementation classes */ diff --git a/src/compiler/scala/tools/nsc/transform/TailCalls.scala b/src/compiler/scala/tools/nsc/transform/TailCalls.scala index 6ab99eaec6..7cad2b3986 100644 --- a/src/compiler/scala/tools/nsc/transform/TailCalls.scala +++ b/src/compiler/scala/tools/nsc/transform/TailCalls.scala @@ -149,7 +149,7 @@ abstract class TailCalls extends Transform { def enclosingType = method.enclClass.typeOfThis def isEligible = method.isEffectivelyFinal // @tailrec annotation indicates mandatory transformation - def isMandatory = method.hasAnnotation(TailrecClass) && !forMSIL + def isMandatory = method.hasAnnotation(TailrecClass) def isTransformed = isEligible && accessed(label) def tailrecFailure() = unit.error(failPos, "could not optimize @tailrec annotated " + method + ": " + failReason) @@ -229,7 +229,6 @@ abstract class TailCalls extends Transform { } else if (!matchesTypeArgs) failHere("it is called recursively with different type arguments") else if (receiver == EmptyTree) rewriteTailCall(This(currentClass)) - else if (forMSIL) fail("it cannot be optimized on MSIL") else if (!receiverIsSame) failHere("it changes type of 'this' on a polymorphic recursive call") else rewriteTailCall(receiver) } diff --git a/src/compiler/scala/tools/nsc/transform/UnCurry.scala b/src/compiler/scala/tools/nsc/transform/UnCurry.scala index 8ae9490dbe..65b9eb79b3 100644 --- a/src/compiler/scala/tools/nsc/transform/UnCurry.scala +++ b/src/compiler/scala/tools/nsc/transform/UnCurry.scala @@ -61,24 +61,6 @@ abstract class UnCurry extends InfoTransform // uncurry and uncurryType expand type aliases - /** Traverse tree omitting local method definitions. - * If a `return` is encountered, set `returnFound` to true. - * Used for MSIL only. - */ - private object lookForReturns extends Traverser { - var returnFound = false - override def traverse(tree: Tree): Unit = tree match { - case Return(_) => returnFound = true - case DefDef(_, _, _, _, _, _) => ; - case _ => super.traverse(tree) - } - def found(tree: Tree) = { - returnFound = false - traverse(tree) - returnFound - } - } - class UnCurryTransformer(unit: CompilationUnit) extends TypingTransformer(unit) { private var needTryLift = false private var inPattern = false @@ -537,13 +519,6 @@ abstract class UnCurry extends InfoTransform finally needTryLift = saved } - /** A try or synchronized needs to be lifted anyway for MSIL if it contains - * return statements. These are disallowed in the CLR. By lifting - * such returns will be converted to throws. - */ - def shouldBeLiftedAnyway(tree: Tree) = false && // buggy, see #1981 - forMSIL && lookForReturns.found(tree) - /** Transform tree `t` to { def f = t; f } where `f` is a fresh name */ def liftTree(tree: Tree) = { @@ -618,13 +593,10 @@ abstract class UnCurry extends InfoTransform treeCopy.UnApply(tree, fn1, args1) case Apply(fn, args) => - if (fn.symbol == Object_synchronized && shouldBeLiftedAnyway(args.head)) - transform(treeCopy.Apply(tree, fn, List(liftTree(args.head)))) - else - withNeedLift(true) { - val formals = fn.tpe.paramTypes - treeCopy.Apply(tree, transform(fn), transformTrees(transformArgs(tree.pos, fn.symbol, args, formals))) - } + withNeedLift(true) { + val formals = fn.tpe.paramTypes + treeCopy.Apply(tree, transform(fn), transformTrees(transformArgs(tree.pos, fn.symbol, args, formals))) + } case Assign(_: RefTree, _) => withNeedLift(true) { super.transform(tree) } @@ -643,7 +615,7 @@ abstract class UnCurry extends InfoTransform super.transform(tree) case Try(block, catches, finalizer) => - if (needTryLift || shouldBeLiftedAnyway(tree)) transform(liftTree(tree)) + if (needTryLift) transform(liftTree(tree)) else super.transform(tree) case CaseDef(pat, guard, body) => diff --git a/src/compiler/scala/tools/nsc/typechecker/Macros.scala b/src/compiler/scala/tools/nsc/typechecker/Macros.scala index e40d978e6d..f595aa7dc7 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Macros.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Macros.scala @@ -470,9 +470,6 @@ trait Macros extends scala.tools.reflect.FastTrack with Traces { * Is also capable of detecting REPL and reusing its classloader. */ lazy val macroClassloader: ClassLoader = { - if (global.forMSIL) - throw new UnsupportedOperationException("Scala reflection not available on this platform") - val classpath = global.classPath.asURLs macroLogVerbose("macro classloader: initializing from -cp: %s".format(classpath)) val loader = ScalaClassLoader.fromURLs(classpath, self.getClass.getClassLoader) diff --git a/src/compiler/scala/tools/nsc/typechecker/MethodSynthesis.scala b/src/compiler/scala/tools/nsc/typechecker/MethodSynthesis.scala index 3ac5d388d3..8e2a1fc5dc 100644 --- a/src/compiler/scala/tools/nsc/typechecker/MethodSynthesis.scala +++ b/src/compiler/scala/tools/nsc/typechecker/MethodSynthesis.scala @@ -201,8 +201,7 @@ trait MethodSynthesis { ) def beanAccessors(vd: ValDef): List[DerivedFromValDef] = { val setter = if (vd.mods.isMutable) List(BeanSetter(vd)) else Nil - if (forMSIL) Nil - else if (vd.symbol hasAnnotation BeanPropertyAttr) + if (vd.symbol hasAnnotation BeanPropertyAttr) BeanGetter(vd) :: setter else if (vd.symbol hasAnnotation BooleanBeanPropertyAttr) BooleanBeanGetter(vd) :: setter @@ -521,9 +520,6 @@ trait MethodSynthesis { } protected def enterBeans(tree: ValDef) { - if (forMSIL) - return - val ValDef(mods, name, _, _) = tree val beans = beanAccessorsFromNames(tree) if (beans.nonEmpty) { diff --git a/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala b/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala index 0ae225ccee..16680c3f13 100644 --- a/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala +++ b/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala @@ -1116,8 +1116,7 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans def isUnit(s: Symbol) = unboxedValueClass(s) == UnitClass def isNumeric(s: Symbol) = isNumericValueClass(unboxedValueClass(s)) || isAnyNumber(s) def isScalaNumber(s: Symbol) = s isSubClass ScalaNumberClass - // test is behind a platform guard - def isJavaNumber(s: Symbol) = !forMSIL && (s isSubClass JavaNumberClass) + def isJavaNumber(s: Symbol) = s isSubClass JavaNumberClass // includes java.lang.Number if appropriate [SI-5779] def isAnyNumber(s: Symbol) = isScalaNumber(s) || isJavaNumber(s) def isMaybeAnyValue(s: Symbol) = isPrimitiveValueClass(unboxedValueClass(s)) || isMaybeValue(s) @@ -1160,7 +1159,7 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans nonSensiblyNeq() } else if (isNumeric(receiver)) { - if (!isNumeric(actual) && !forMSIL) + if (!isNumeric(actual)) if (isUnit(actual) || isBoolean(actual) || !isMaybeValue(actual)) // 5 == "abc" nonSensiblyNeq() } diff --git a/src/compiler/scala/tools/nsc/typechecker/Typers.scala b/src/compiler/scala/tools/nsc/typechecker/Typers.scala index e534e36a0d..8d606a8fd5 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Typers.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Typers.scala @@ -1130,13 +1130,6 @@ trait Typers extends Modes with Adaptations with Tags { // (14); the condition prevents chains of views debuglog("inferring view from " + tree.tpe + " to " + pt) val coercion = inferView(tree, tree.tpe, pt, true) - // convert forward views of delegate types into closures wrapped around - // the delegate's apply method (the "Invoke" method, which was translated into apply) - if (forMSIL && coercion != null && isCorrespondingDelegate(tree.tpe, pt)) { - val meth: Symbol = tree.tpe.member(nme.apply) - debuglog("replacing forward delegate view with: " + meth + ":" + meth.tpe) - return typed(Select(tree, meth), mode, pt) - } if (coercion != EmptyTree) { def msg = "inferred view from " + tree.tpe + " to " + pt + " = " + coercion + ":" + coercion.tpe if (settings.logImplicitConv.value) @@ -3037,26 +3030,6 @@ trait Typers extends Modes with Adaptations with Tags { case _ => tp } - // Replace the Delegate-Chainer methods += and -= with corresponding - // + and - calls, which are translated in the code generator into - // Combine and Remove - if (forMSIL) { - fun match { - case Select(qual, name) => - if (isSubType(qual.tpe, DelegateClass.tpe) - && (name == encode("+=") || name == encode("-="))) { - val n = if (name == encode("+=")) nme.PLUS else nme.MINUS - val f = Select(qual, n) - // the compiler thinks, the PLUS method takes only one argument, - // but he thinks it's an instance method -> still two ref's on the stack - // -> translated by backend - val rhs = treeCopy.Apply(tree, f, args) - return typed(Assign(qual, rhs)) - } - case _ => () - } - } - /** * This is translating uses of List() into Nil. This is less * than ideal from a consistency standpoint, but it shouldn't be @@ -4162,24 +4135,7 @@ trait Typers extends Modes with Adaptations with Tags { else adapt(expr1, mode, functionType(formals map (t => WildcardType), WildcardType)) case MethodType(formals, _) => if (isFunctionType(pt)) expr1 - else expr1 match { - case Select(qual, name) if (forMSIL && - pt != WildcardType && - pt != ErrorType && - isSubType(pt, DelegateClass.tpe)) => - val scalaCaller = newScalaCaller(pt) - addScalaCallerInfo(scalaCaller, expr1.symbol) - val n: Name = scalaCaller.name - val del = Ident(DelegateClass) setType DelegateClass.tpe - val f = Select(del, n) - //val f1 = TypeApply(f, List(Ident(pt.symbol) setType pt)) - val args: List[Tree] = if(expr1.symbol.isStatic) List(Literal(Constant(null))) - else List(qual) // where the scala-method is located - val rhs = Apply(f, args) - typed(rhs) - case _ => - adapt(expr1, mode, functionType(formals map (t => WildcardType), WildcardType)) - } + else adapt(expr1, mode, functionType(formals map (t => WildcardType), WildcardType)) case ErrorType => expr1 case _ => diff --git a/src/compiler/scala/tools/nsc/util/ClassPath.scala b/src/compiler/scala/tools/nsc/util/ClassPath.scala index ee66801c45..cbf6ef69d7 100644 --- a/src/compiler/scala/tools/nsc/util/ClassPath.scala +++ b/src/compiler/scala/tools/nsc/util/ClassPath.scala @@ -184,8 +184,7 @@ abstract class ClassPath[T] { def sourcepaths: IndexedSeq[AbstractFile] /** - * Represents classes which can be loaded with a ClassfileLoader/MsilFileLoader - * and / or a SourcefileLoader. + * Represents classes which can be loaded with a ClassfileLoader and/or SourcefileLoader. */ case class ClassRep(binary: Option[T], source: Option[AbstractFile]) { def name: String = binary match { diff --git a/src/compiler/scala/tools/nsc/util/MsilClassPath.scala b/src/compiler/scala/tools/nsc/util/MsilClassPath.scala deleted file mode 100644 index 2f209c550d..0000000000 --- a/src/compiler/scala/tools/nsc/util/MsilClassPath.scala +++ /dev/null @@ -1,166 +0,0 @@ -/* NSC -- new Scala compiler - * Copyright 2006-2013 LAMP/EPFL - * @author Martin Odersky - */ - -// $Id$ - -package scala.tools.nsc -package util - -import scala.util.Sorting -import scala.collection.mutable -import scala.tools.nsc.io.{ AbstractFile, MsilFile } -import ch.epfl.lamp.compiler.msil.{ Type => MSILType, Assembly } -import ClassPath.{ ClassPathContext, isTraitImplementation } - -/** Keeping the MSIL classpath code in its own file is important to make sure - * we don't accidentally introduce a dependency on msil.jar in the jvm. - */ - -object MsilClassPath { - def collectTypes(assemFile: AbstractFile) = { - var res: Array[MSILType] = MSILType.EmptyTypes - val assem = Assembly.LoadFrom(assemFile.path) - if (assem != null) { - // DeclaringType == null: true for non-inner classes - res = assem.GetTypes() filter (_.DeclaringType == null) - Sorting.stableSort(res, (t1: MSILType, t2: MSILType) => (t1.FullName compareTo t2.FullName) < 0) - } - res - } - - /** On the java side this logic is in PathResolver, but as I'm not really - * up to folding MSIL into that, I am encapsulating it here. - */ - def fromSettings(settings: Settings): MsilClassPath = { - val context = - if (settings.inline.value) new MsilContext - else new MsilContext { override def isValidName(name: String) = !isTraitImplementation(name) } - - import settings._ - new MsilClassPath(assemextdirs.value, assemrefs.value, sourcepath.value, context) - } - - class MsilContext extends ClassPathContext[MsilFile] { - def toBinaryName(rep: MsilFile) = rep.msilType.Name - def newClassPath(assemFile: AbstractFile) = new AssemblyClassPath(MsilClassPath collectTypes assemFile, "", this) - } - - private def assembleEntries(ext: String, user: String, source: String, context: MsilContext): List[ClassPath[MsilFile]] = { - import ClassPath._ - val etr = new mutable.ListBuffer[ClassPath[MsilFile]] - val names = new mutable.HashSet[String] - - // 1. Assemblies from -Xassem-extdirs - for (dirName <- expandPath(ext, expandStar = false)) { - val dir = AbstractFile.getDirectory(dirName) - if (dir ne null) { - for (file <- dir) { - val name = file.name.toLowerCase - if (name.endsWith(".dll") || name.endsWith(".exe")) { - names += name - etr += context.newClassPath(file) - } - } - } - } - - // 2. Assemblies from -Xassem-path - for (fileName <- expandPath(user, expandStar = false)) { - val file = AbstractFile.getFile(fileName) - if (file ne null) { - val name = file.name.toLowerCase - if (name.endsWith(".dll") || name.endsWith(".exe")) { - names += name - etr += context.newClassPath(file) - } - } - } - - def check(n: String) { - if (!names.contains(n)) - throw new AssertionError("Cannot find assembly "+ n + - ". Use -Xassem-extdirs or -Xassem-path to specify its location") - } - check("mscorlib.dll") - check("scalaruntime.dll") - - // 3. Source path - for (dirName <- expandPath(source, expandStar = false)) { - val file = AbstractFile.getDirectory(dirName) - if (file ne null) etr += new SourcePath[MsilFile](file, context) - } - - etr.toList - } -} -import MsilClassPath._ - -/** - * A assembly file (dll / exe) containing classes and namespaces - */ -class AssemblyClassPath(types: Array[MSILType], namespace: String, val context: MsilContext) extends ClassPath[MsilFile] { - def name = { - val i = namespace.lastIndexOf('.') - if (i < 0) namespace - else namespace drop (i + 1) - } - def asURLs = List(new java.net.URL(name)) - def asClasspathString = sys.error("Unknown") // I don't know what if anything makes sense here? - - private lazy val first: Int = { - var m = 0 - var n = types.length - 1 - while (m < n) { - val l = (m + n) / 2 - val res = types(l).FullName.compareTo(namespace) - if (res < 0) m = l + 1 - else n = l - } - if (types(m).FullName.startsWith(namespace)) m else types.length - } - - lazy val classes = { - val cls = new mutable.ListBuffer[ClassRep] - var i = first - while (i < types.length && types(i).Namespace.startsWith(namespace)) { - // CLRTypes used to exclude java.lang.Object and java.lang.String (no idea why..) - if (types(i).Namespace == namespace) - cls += ClassRep(Some(new MsilFile(types(i))), None) - i += 1 - } - cls.toIndexedSeq - } - - lazy val packages = { - val nsSet = new mutable.HashSet[String] - var i = first - while (i < types.length && types(i).Namespace.startsWith(namespace)) { - val subns = types(i).Namespace - if (subns.length > namespace.length) { - // example: namespace = "System", subns = "System.Reflection.Emit" - // => find second "." and "System.Reflection" to nsSet. - val end = subns.indexOf('.', namespace.length + 1) - nsSet += (if (end < 0) subns - else subns.substring(0, end)) - } - i += 1 - } - val xs = for (ns <- nsSet.toList) - yield new AssemblyClassPath(types, ns, context) - - xs.toIndexedSeq - } - - val sourcepaths: IndexedSeq[AbstractFile] = IndexedSeq() - - override def toString() = "assembly classpath "+ namespace -} - -/** - * The classpath when compiling with target:msil. Binary files are represented as - * MSILType values. - */ -class MsilClassPath(ext: String, user: String, source: String, context: MsilContext) -extends MergedClassPath[MsilFile](MsilClassPath.assembleEntries(ext, user, source, context), context) { } diff --git a/src/intellij/compiler.iml.SAMPLE b/src/intellij/compiler.iml.SAMPLE index 696c347b7b..0fcc9cbc16 100644 --- a/src/intellij/compiler.iml.SAMPLE +++ b/src/intellij/compiler.iml.SAMPLE @@ -21,7 +21,6 @@ - diff --git a/src/intellij/msil.iml.SAMPLE b/src/intellij/msil.iml.SAMPLE deleted file mode 100644 index 56f794785f..0000000000 --- a/src/intellij/msil.iml.SAMPLE +++ /dev/null @@ -1,24 +0,0 @@ - - - - - - - - - - - - - - - - - - - - diff --git a/src/intellij/scala-lang.ipr.SAMPLE b/src/intellij/scala-lang.ipr.SAMPLE index 37307c2029..130a676508 100644 --- a/src/intellij/scala-lang.ipr.SAMPLE +++ b/src/intellij/scala-lang.ipr.SAMPLE @@ -202,7 +202,6 @@ - diff --git a/src/intellij/test.iml.SAMPLE b/src/intellij/test.iml.SAMPLE index 112fec428f..668ddcc356 100644 --- a/src/intellij/test.iml.SAMPLE +++ b/src/intellij/test.iml.SAMPLE @@ -14,7 +14,6 @@ - diff --git a/src/manual/scala/man1/scalac.scala b/src/manual/scala/man1/scalac.scala index 13b1fd58e0..1c0c7c4a96 100644 --- a/src/manual/scala/man1/scalac.scala +++ b/src/manual/scala/man1/scalac.scala @@ -145,13 +145,6 @@ object scalac extends Command { Definition( CmdOption("sourcepath", Argument("path")), "Specify location(s) of source files."), - Definition( - CmdOptionBound("target:", Argument("target")), - SeqPara( - "Specify which backend to use (" & Mono("jvm-1.5," & - "msil") & ").", - "The default value is " & Mono("\"jvm-1.5\"") & " (was " & - Mono("\"jvm-1.4\"") & " up to Scala version 2.6.1).")), Definition( CmdOption("toolcp", Argument("path")), "Add to the runner classpath."), @@ -181,19 +174,6 @@ object scalac extends Command { Section("Advanced Options", DefinitionList( - Definition( - CmdOption("Xassem-extdirs", Argument("dirs")), - "(Requires " & Mono("-target:msil") & - ") List of directories containing assemblies." & - " default:" & Mono("lib") & "."), - Definition( - CmdOption("Xassem-name", Argument("file")), - "(Requires " & Mono("-target:msil") & - ") Name of the output assembly."), - Definition( - CmdOption("Xassem-path", Argument("path")), - "(Requires " & Mono("-target:msil") & - ") List of assemblies referenced by the program."), Definition( CmdOption("Xcheck-null"), "Warn upon selection of nullable reference"), @@ -289,10 +269,6 @@ object scalac extends Command { Definition( CmdOption("Xsource-reader", Argument("classname")), "Specify a custom method for reading source files."), - Definition( - CmdOption("Xsourcedir", Argument("path")), - "(Requires " & Mono("-target:msil") & - ") Mirror source folder structure in output directory.."), Definition( CmdOption("Xverify"), "Verify generic signatures in generated bytecode."), diff --git a/src/msil/ch/epfl/lamp/compiler/msil/Assembly.java b/src/msil/ch/epfl/lamp/compiler/msil/Assembly.java deleted file mode 100644 index 59bbeee3a4..0000000000 --- a/src/msil/ch/epfl/lamp/compiler/msil/Assembly.java +++ /dev/null @@ -1,253 +0,0 @@ -/* - * System.Reflection-like API for access to .NET assemblies (DLL & EXE) - */ - - -package ch.epfl.lamp.compiler.msil; - -import ch.epfl.lamp.compiler.msil.util.Table; -import ch.epfl.lamp.compiler.msil.util.Table.AssemblyDef; -import ch.epfl.lamp.compiler.msil.util.Table.ModuleDef; - -import java.util.HashMap; -import java.util.Iterator; -import java.io.File; -import java.io.FileNotFoundException; - -/** - * Defines an Assembly, which is a reusable, versionable, and self-describing - * building block of a common language runtime application. - * - * @author Nikolay Mihaylov - * @version 1.0 - */ -public abstract class Assembly extends CustomAttributeProvider { - - //########################################################################## - // static members - - // all the assemblies - public static final HashMap assemblies = new HashMap(); - - /** Loads an assembly from the specified path. */ - public static Assembly LoadFrom(String assemblyFileName) { - File afile = new File(assemblyFileName); - return LoadFrom(afile.getParentFile(), afile.getName()); - } - - /** Loads an assembly with the given name from the given directory. */ - public static Assembly LoadFrom(File dir, String name) { - File file = null; - PEFile pefile = null; -// try { -// if (dir == null) -// dir = new File("."); -// dir = dir.getCanonicalFile(); -// } catch (java.io.IOException e) {} - - if (name.toUpperCase().endsWith(".EXE") || name.toUpperCase().endsWith(".DLL")) { - file = new File(dir, name); - pefile = getPEFile(file); - name = name.substring(0, name.length() - 4); - } - - File adir = pefile == null ? new File(dir, name) : null; - - if (pefile == null) { - file = new File(dir, name + ".dll"); - pefile = getPEFile(file); - } - if (pefile == null) { - file = new File(dir, name + ".DLL"); - pefile = getPEFile(file); - } - if (pefile == null && adir.exists()) { - file = new File(adir, name + ".dll"); - pefile = getPEFile(file); - } - if (pefile == null && adir.exists()) { - file = new File(adir, name + ".DLL"); - pefile = getPEFile(file); - } - - if (pefile == null) { - file = new File(dir, name + ".exe"); - pefile = getPEFile(file); - } - if (pefile == null) { - file = new File(dir, name + ".EXE"); - pefile = getPEFile(file); - } - if (pefile == null && adir.exists()) { - file = new File(adir, name + ".exe"); - pefile = getPEFile(file); - } - if (pefile == null && adir.exists()) { - file = new File(adir, name + ".EXE"); - pefile = getPEFile(file); - } - - if (pefile == null) - throw new RuntimeException("Cannot find assembly " + new File(dir, name)); - return getPEAssembly(pefile); - } - - private static Assembly getPEAssembly(PEFile pefile) { - AssemblyDef assem = pefile.AssemblyDef; - if (assem == null) - throw new RuntimeException("File " + pefile - + " does not contain a manifest"); - assem.readRow(1); - String name = pefile.getString(assem.Name); - Assembly a = (Assembly) assemblies.get(name); - if (a != null) { - return a; - } - - AssemblyName an = new AssemblyName(); - an.Name = pefile.getString(assem.Name); - an.Version = new Version(assem.MajorVersion, assem.MinorVersion, - assem.BuildNumber, assem.RevisionNumber); - an.SetPublicKey(pefile.getBlob(assem.PublicKey)); - return new PEAssembly(pefile, an); - } - - protected static PEFile getPEFile(File f) { - PEFile pefile = null; - try { pefile = new PEFile(f.getAbsolutePath()); } - catch (FileNotFoundException e) {} - catch (RuntimeException e) { - java.lang.System.out.println("swallowed RuntimeException at getPEFile"); - } - return pefile; - } - - //########################################################################## - // public fields - - /** The entry point of this assembly. */ - public MethodInfo EntryPoint; - - /** the display name of the assembly. */ - public final String FullName; - - //########################################################################## - // constructor - - protected Assembly(AssemblyName an, boolean external) { - assemblyName = an; - FullName = an.toString(); - if(external) { - assemblies.put(an.Name, this); - } - //System.out.println("assemblies after adding the current one: " + assemblies); - } - - protected Assembly(AssemblyName an) { - this(an, false); - } - - protected static Assembly getAssembly(String name) { - return (Assembly) assemblies.get(name); - } - - //########################################################################## - // instrumental methods - - /** @return the file from which this assembly was loaded. */ - public File getFile() { - throw new RuntimeException("Not supported"); - } - - /** Gets the specified module in this assembly. Works on filenames. */ - public Module GetModule(String name) { - initModules(); - return (Module)modulesMap.get(name); - } - - /** Get all the modules of the assembly. */ - public Module[] GetModules() { - initModules(); - return (Module[])modulesMap.values(). - toArray(new Module[modulesMap.size()]); - } - - /** Get the corresponding type. */ - public Type GetType(String name) { - initModules(); - Iterator modules = modulesMap.values().iterator(); - Type t = null; - while (t == null && modules.hasNext()) { - t = ((Module)modules.next()).GetType(name); - } - return t; - } - - /** @return an array of all types defined in the assembly. */ - public synchronized Type[] GetTypes() { - if (types != null) - return (Type[])types.clone(); - initModules(); - - Iterator modules = modulesMap.values().iterator(); - Type[] newTypes = ((Module)modules.next()).GetTypes(); - while (modules.hasNext()) { - Module module = (Module)modules.next(); - Type[] mtypes = module.GetTypes(); - Type[] oldTypes = newTypes; - newTypes = new Type[oldTypes.length + mtypes.length]; - System.arraycopy(oldTypes, 0, newTypes, 0, oldTypes.length); - System.arraycopy(mtypes, 0, newTypes, oldTypes.length, mtypes.length); - } - types = newTypes; - return (Type[]) types.clone(); - } - - public AssemblyName GetName() { - return assemblyName; - } - - public String toString() { - return FullName; - } - - //########################################################################## - // protected members - - // the assembly name - protected final AssemblyName assemblyName; - - // all the types exported by the assembly - protected Type[] types = null; - - // the module defined in this assembly (only one right now) - private final HashMap/**/ modulesMap = new HashMap(); - - protected void addType(Type type) { - Type.addType(type); - } - - protected void addModule(String name, Module module) { - modulesMap.put(name, module); - } - - private boolean initModules = true; - protected final void initModules() { - if (initModules) { - loadModules(); - initModules = false; - } - } - - /** used for lazy construction of the Assembly. */ - protected abstract void loadModules(); - - void dumpTypes() { - Type[] types = GetTypes(); - for (int i = 0; i < types.length; i++) - System.out.println(types[i]); - } - - //########################################################################## - -} // class Assembly diff --git a/src/msil/ch/epfl/lamp/compiler/msil/AssemblyName.java b/src/msil/ch/epfl/lamp/compiler/msil/AssemblyName.java deleted file mode 100644 index acdcb32e33..0000000000 --- a/src/msil/ch/epfl/lamp/compiler/msil/AssemblyName.java +++ /dev/null @@ -1,96 +0,0 @@ -/* - * System.Reflection-like API for access to .NET assemblies (DLL & EXE) - */ - - -package ch.epfl.lamp.compiler.msil; - -import javax.crypto.Mac; - -import java.security.MessageDigest; - -import ch.epfl.lamp.compiler.msil.util.Table; - -/** - * Fully describes an assembly's unique identity. - * Right now it's only the name - * - * @author Nikolay Mihaylov - * @version 1.0 - */ -public class AssemblyName { - - //########################################################################## - // public interface - - /** The simple, unencrypted name of the assembly. */ - public String Name; - - /** - * Gets or sets the major, minor, revision, and build numbers - * of the assembly. - */ - public Version Version; - - /** - * Gets a strong name consisting of a public key, a given name, - * and version parts. - */ - public byte[] GetPublicKeyToken() { - return publicKeyToken == null ? null : (byte[]) publicKeyToken.clone(); - } - - /** - * Sets a strong name consisting of a public key, a given name, - * and version parts. - */ - public void SetPublicKeyToken(byte[] key) { - this.publicKeyToken = key.length == 0 ? null : (byte[]) key.clone(); - } - - /** - * Returns the public key identifying the originator of the assembly. - */ - public byte[] GetPublicKey() { - return publicKey == null ? null : (byte[]) publicKey.clone(); - } - - /** - * Sets the public key identifying the originator of the assembly. - */ - public void SetPublicKey(byte[] key) { - if (key.length > 0) { - this.publicKey = (byte[]) key.clone(); - byte[] hash = sha.digest(key); - byte[] keyToken = new byte[8]; - for (int i = 0; i < keyToken.length; i++) - keyToken[i] = hash[hash.length - 1 - i]; - this.publicKeyToken = keyToken; - //System.out.println("Pubic key and key token of assembly " + this + ":"); - //System.out.println("\tPublic key = " + Table.bytes2hex(key)); - //System.out.println("\tKey token = " + Table.bytes2hex(keyToken)); - } - } - - public String toString() { - return Name + ", Version=" + Version; - } - - //########################################################################## - - private byte[] publicKeyToken; - - private byte[] publicKey; - - private static final MessageDigest sha; - static { - MessageDigest md = null; - try { - md = MessageDigest.getInstance("SHA"); - } catch (java.security.NoSuchAlgorithmException e) {} - sha = md; - } - - //########################################################################## - -} // class AssemblyName diff --git a/src/msil/ch/epfl/lamp/compiler/msil/Attribute.java b/src/msil/ch/epfl/lamp/compiler/msil/Attribute.java deleted file mode 100644 index 0f2c4e6764..0000000000 --- a/src/msil/ch/epfl/lamp/compiler/msil/Attribute.java +++ /dev/null @@ -1,654 +0,0 @@ -/* - * System.Reflection-like API for access to .NET assemblies (DLL & EXE) - */ - - -package ch.epfl.lamp.compiler.msil; - -import ch.epfl.lamp.compiler.msil.util.Signature; - -import java.util.Map; -import java.util.HashMap; -import java.util.LinkedHashMap; -import java.util.Iterator; -import java.nio.ByteBuffer; -import java.nio.ByteOrder; -import java.io.UnsupportedEncodingException; - -/** - * Describes custom attribute instances. - * - * @author Nikolay Mihaylov - * @version 1.0 - */ -public class Attribute { - - //########################################################################## - - private final ConstructorInfo constr; - - private final byte[] value; - - Attribute(ConstructorInfo constr, byte[] value) { - assert constr != null; - this.constr = constr; - assert value != null : constr.toString(); - this.value = value; - } - - //########################################################################## - // public interface - - /** @return the type (class) of the attribute. */ - public Type GetType() { return constr.DeclaringType; } - - /** @return the constructor of this attribute. */ - public ConstructorInfo getConstructor() { - return constr; - } - - /** @return the Blob with serialized constructor & named arguments. */ - public byte[] getValue() { - byte[] value = new byte[this.value.length]; - System.arraycopy(this.value, 0, value, 0, value.length); - return value; - } - - /**@return an array with the arguments to the attribute's constructor. */ - public Object[] getConstructorArguments() { - parseBlob(); - Object[] cas = new Object[constrArgs.length]; - System.arraycopy(constrArgs, 0, cas, 0, cas.length); - return cas; - } - - /** @return the named argument with the given name. */ - public NamedArgument getNamedArgument(String name) { - return (NamedArgument)namedArgs.get(name); - } - - /** @return an array of all named arguments for this attribute. */ - public NamedArgument[] getNamedArguments() { - NamedArgument[] nargs = - (NamedArgument[])namedArgs.values().toArray(NamedArgument.EMPTY); - return nargs; - } - - /** @return a string representation of this attribute. */ - public String toString() { - parseBlob(); - ParameterInfo[] params = constr.GetParameters(); - assert params.length == constrArgs.length : this.constr; - StringBuffer str = new StringBuffer(); - str.append('['); - str.append(constr.DeclaringType.FullName); - str.append('('); - for (int i = 0; i < constrArgs.length; i++) { - if (i > 0) - str.append(", "); - Type t = params[i].ParameterType; - if (t.IsEnum()) { - str.append('('); - str.append(t.FullName); - str.append(')'); - } - formatValue(str, constrArgs[i]); - } - NamedArgument[] nargs = getNamedArguments(); - for (int i = 0; i < nargs.length; i++) { - str.append(", ").append(nargs[i]); - } - str.append(")]"); - return str.toString(); - } - - //######################################################################### - - private static final Map type2id = new HashMap(); - private static final Map id2type = new HashMap(); - static { - map("Boolean", Signature.ELEMENT_TYPE_BOOLEAN); - map("Char", Signature.ELEMENT_TYPE_CHAR); - map("SByte", Signature.ELEMENT_TYPE_I1); - map("Byte", Signature.ELEMENT_TYPE_U1); - map("Int16", Signature.ELEMENT_TYPE_I2); - map("UInt16", Signature.ELEMENT_TYPE_U2); - map("Int32", Signature.ELEMENT_TYPE_I4); - map("UInt32", Signature.ELEMENT_TYPE_U4); - map("Int64", Signature.ELEMENT_TYPE_I8); - map("UInt64", Signature.ELEMENT_TYPE_U8); - map("Single", Signature.ELEMENT_TYPE_R4); - map("Double", Signature.ELEMENT_TYPE_R8); - map("String", Signature.ELEMENT_TYPE_STRING); - map("Type", Signature.X_ELEMENT_TYPE_TYPE); - map("Object", Signature.ELEMENT_TYPE_OBJECT); - } - private static void map(String type, int id) { - Type t = Type.GetType("System." + type); - assert type != null : type + " -> " + id; - Integer i = new Integer(id); - type2id.put(t, i); - id2type.put(i, t); - } - private static int getTypeId(Type type) { - Integer id = (Integer)type2id.get(type); - assert id != null : type; - return id.intValue(); - } - - private Object[] constrArgs; - private Map namedArgs; - private ByteBuffer buf; - - private void parseBlob() { - try { parseBlob0(); } - catch (RuntimeException e) { - throw new RuntimeException(PEFile.bytes2hex(value), e); - } - } - - private void parseBlob0() { - if (buf != null) - return; - buf = ByteBuffer.wrap(value); // Sec. 23.3 in Partition II of CLR Spec. - buf.order(ByteOrder.LITTLE_ENDIAN); - - short sig = buf.getShort(); // Prolog - assert sig == 1 : PEFile.bytes2hex(value); - ParameterInfo[] params = constr.GetParameters(); - constrArgs = new Object[params.length]; - for (int i = 0; i < params.length; i++) { - constrArgs[i] = parseFixedArg(params[i].ParameterType); // FixedArg - } - - int ncount = buf.getShort(); // NumNamed - namedArgs = new LinkedHashMap(); - for (int i = 0; i < ncount; i++) { - int designator = buf.get(); // designator one of 0x53 (FIELD) or 0x54 (PROPERTY) - assert designator == Signature.X_ELEMENT_KIND_FIELD - || designator == Signature.X_ELEMENT_KIND_PROPERTY - : "0x" + PEFile.byte2hex(designator); - Type type = parseFieldOrPropTypeInNamedArg(); // FieldOrPropType - String name = parseString(); // FieldOrPropName - Object value = parseFixedArg(type); // FixedArg - NamedArgument narg = - new NamedArgument(designator, name, type, value); - namedArgs.put(name, narg); - } - } - - private Object parseFixedArg(Type type) { - if (type.IsArray()) - return parseArray(type.GetElementType()); - else - return parseElem(type); - } - - /* indicates whether the "simple" case (the other is "enum") of the first row - in the Elem production should be taken. */ - private boolean isSimpleElem(Type type) { - if(!type2id.containsKey(type)) return false; - int id = getTypeId(type); - switch(id){ - case Signature.ELEMENT_TYPE_STRING: - case Signature.X_ELEMENT_TYPE_TYPE: - case Signature.ELEMENT_TYPE_OBJECT: - return false; - default: - return true; - } - } - - /* indicates whether the second row in the Elem production - should be taken (and more specifically, "string" case within that row). */ - private boolean isStringElem(Type type) { - if(!type2id.containsKey(type)) return false; - int id = getTypeId(type); - return id == Signature.ELEMENT_TYPE_STRING; - } - - /* indicates whether the second row in the Elem production - should be taken (and more specifically, "type" case within that row). */ - private boolean isTypeElem(Type type) { - if(!type2id.containsKey(type)) return false; - int id = getTypeId(type); - return id == Signature.X_ELEMENT_TYPE_TYPE; - } - - /* indicates whether the third row in the Elem production - should be taken (and more specifically, "boxed" case within that row). */ - private boolean isSystemObject(Type type) { - if(!type2id.containsKey(type)) return false; - int id = getTypeId(type); - return id == Signature.ELEMENT_TYPE_OBJECT; - } - - private Object parseElem(Type type) { - // simple or enum - if (isSimpleElem(type)) return parseVal(getTypeId(type)); - if (type.IsEnum()) return parseVal(getTypeId(type.getUnderlyingType())); - // string or type - if (isStringElem(type)) return parseString(); - if (isTypeElem(type)) return getTypeFromSerString(); - // boxed valuetype, please notice that a "simple" boxed valuetype is preceded by 0x51 - if (isSystemObject(type)) { - Type boxedT = parse0x51(); - if(boxedT.IsEnum()) { - return new BoxedArgument(boxedT, parseVal(getTypeId(boxedT.getUnderlyingType()))); - } else { - return new BoxedArgument(boxedT, parseVal(getTypeId(boxedT))); // TODO dead code? - } - } else { - Type boxedT = parseType(); - return parseVal(getTypeId(boxedT)); - } - } - - /* this does not parse an Elem, but a made-up production (Element). Don't read too much into this method name! */ - private Object parseVal(int id) { - switch (id) { - case Signature.ELEMENT_TYPE_BOOLEAN: - return new Boolean(buf.get() == 0 ? false : true); - case Signature.ELEMENT_TYPE_CHAR: - return new Character(buf.getChar()); - case Signature.ELEMENT_TYPE_I1: - case Signature.ELEMENT_TYPE_U1: - return new Byte(buf.get()); // TODO U1 not the same as I1 - case Signature.ELEMENT_TYPE_I2: - case Signature.ELEMENT_TYPE_U2: - return new Short(buf.getShort()); // TODO U2 not the same as I2 - case Signature.ELEMENT_TYPE_I4: - case Signature.ELEMENT_TYPE_U4: - return new Integer(buf.getInt()); // TODO U4 not the same as I4 - case Signature.ELEMENT_TYPE_I8: - case Signature.ELEMENT_TYPE_U8: - return new Long(buf.getLong()); // TODO U8 not the same as I8 - case Signature.ELEMENT_TYPE_R4: - return new Float(buf.getFloat()); - case Signature.ELEMENT_TYPE_R8: - return new Double(buf.getDouble()); - case Signature.X_ELEMENT_TYPE_TYPE: - return getTypeFromSerString(); - case Signature.ELEMENT_TYPE_STRING: - return parseString(); - default: - throw new RuntimeException("Shouldn't have called parseVal with: " + id); - } - } - - private Object parseArray(Type type) { - if (type.IsEnum()) - return parseArray(type.getUnderlyingType()); - return parseArray(getTypeId(type)); - } - - private Object parseArray(int id) { - switch (id) { - case Signature.ELEMENT_TYPE_BOOLEAN: - return parseBooleanArray(); - case Signature.ELEMENT_TYPE_CHAR: - return parseCharArray(); - case Signature.ELEMENT_TYPE_I1: - case Signature.ELEMENT_TYPE_U1: // TODO U1 not the same as I1 - return parseByteArray(); - case Signature.ELEMENT_TYPE_I2: - case Signature.ELEMENT_TYPE_U2: - return parseShortArray(); - case Signature.ELEMENT_TYPE_I4: - case Signature.ELEMENT_TYPE_U4: - return parseIntArray(); - case Signature.ELEMENT_TYPE_I8: - case Signature.ELEMENT_TYPE_U8: - return parseLongArray(); - case Signature.ELEMENT_TYPE_R4: - return parseFloatArray(); - case Signature.ELEMENT_TYPE_R8: - return parseDoubleArray(); - case Signature.ELEMENT_TYPE_STRING: - return parseStringArray(); - case Signature.X_ELEMENT_TYPE_ENUM: - return parseArray(getTypeFromSerString()); - default: - throw new RuntimeException("Unknown type id: " + id); - } - } - - private Type parseType() { // FieldOrPropType, Sec. 23.3 in Partition II of CLR Spec. - int id = buf.get(); - switch (id) { - case Signature.ELEMENT_TYPE_SZARRAY: - Type arrT = Type.mkArray(parseType(), 1); - return arrT; - case Signature.X_ELEMENT_TYPE_ENUM: - String enumName = parseString(); - Type enumT = Type.getType(enumName); - return enumT; - default: - Type t = (Type)id2type.get(new Integer(id)); - assert t != null : PEFile.byte2hex(id); - return t; - } - } - - private Type parse0x51() { - int id = buf.get(); - switch (id) { - case 0x51: - return parse0x51(); - case Signature.ELEMENT_TYPE_SZARRAY: - Type arrT = Type.mkArray(parseType(), 1); - return arrT; - case Signature.X_ELEMENT_TYPE_ENUM: - String enumName = parseString(); - Type enumT = Type.getType(enumName); - return enumT; - default: - Type t = (Type)id2type.get(new Integer(id)); - assert t != null : PEFile.byte2hex(id); - return t; - } - } - - - private Type parseFieldOrPropTypeInNamedArg() { // FieldOrPropType, Sec. 23.3 in Partition II of CLR Spec. - int id = buf.get(); - switch (id) { - case 0x51: - return (Type)(id2type.get(new Integer(Signature.ELEMENT_TYPE_OBJECT))); - // TODO remove case Signature.ELEMENT_TYPE_SZARRAY: - // Type arrT = Type.mkArray(parseType(), 1); - // return arrT; - case Signature.X_ELEMENT_TYPE_ENUM: - String enumName = parseString(); - Type enumT = Type.getType(enumName); // TODO this "lookup" only covers already-loaded assemblies. - return enumT; // TODO null as return value (due to the above) spells trouble later. - default: - Type t = (Type)id2type.get(new Integer(id)); - assert t != null : PEFile.byte2hex(id); - return t; - } - } - - private Type getTypeFromSerString() { - String typename = parseString(); - int i = typename.indexOf(','); - /* fully qualified assembly name follows. Just strip it on the assumption that - the assembly is referenced in the externs and the type will be found. */ - String name = (i < 0) ? typename : typename.substring(0, i); - Type t = Type.GetType(name); - if (t == null && i > 0) { - int j = typename.indexOf(',', i + 1); - if (j > 0) { - String assemName = typename.substring(i + 1, j); - try { - Assembly.LoadFrom(assemName); - } catch (Throwable e) { - throw new RuntimeException(typename, e); - } - t = Type.GetType(name); - } - } - assert t != null : typename; - return t; - } - - private boolean[] parseBooleanArray() { - boolean[] arr = new boolean[buf.getInt()]; - for (int i = 0; i < arr.length; i++) - arr[i] = buf.get() == 0 ? false : true; - return arr; - } - - private char[] parseCharArray() { - char[] arr = new char[buf.getInt()]; - for (int i = 0; i < arr.length; i++) - arr[i] = buf.getChar(); - return arr; - } - - private byte[] parseByteArray() { - byte[] arr = new byte[buf.getInt()]; - for (int i = 0; i < arr.length; i++) - arr[i] = buf.get(); - return arr; - } - - private short[] parseShortArray() { - short[] arr = new short[buf.getInt()]; - for (int i = 0; i < arr.length; i++) - arr[i] = buf.getShort(); - return arr; - } - - private int[] parseIntArray() { - int[] arr = new int[buf.getInt()]; - for (int i = 0; i < arr.length; i++) - arr[i] = buf.getInt(); - return arr; - } - - private long[] parseLongArray() { - long[] arr = new long[buf.getInt()]; - for (int i = 0; i < arr.length; i++) - arr[i] = buf.getLong(); - return arr; - } - - private float[] parseFloatArray() { - float[] arr = new float[buf.getInt()]; - for (int i = 0; i < arr.length; i++) - arr[i] = buf.getFloat(); - return arr; - } - - private double[] parseDoubleArray() { - double[] arr = new double[buf.getInt()]; - for (int i = 0; i < arr.length; i++) - arr[i] = buf.getDouble(); - return arr; - } - - private String[] parseStringArray() { - String[] arr = new String[buf.getInt()]; - for (int i = 0; i < arr.length; i++) - arr[i] = parseString(); - return arr; - } - - private String parseString() { // SerString convention - String str = null; - int length = parseLength(); - if (length < 0) - return null; - try { str = new String(value, buf.position(), length, "UTF-8" ); } - catch (UnsupportedEncodingException e) { throw new Error(e); } - buf.position(buf.position() + length); - return str; - } - - private int getByte() { - return (buf.get() + 0x0100) & 0xff; - } - - public int parseLength() { - int length = getByte(); - // check for invalid length format: the first, second or third - // most significant bits should be 0; if all are 1 the length is invalid. - if ((length & 0xe0) == 0xe0) - return -1; - if ((length & 0x80) != 0) { - length = ((length & 0x7f) << 8) | getByte(); - if ((length & 0x4000) != 0) - length = ((length & 0x3fff) << 16) | (getByte()<<8) | getByte(); - } - return length; - } - - //########################################################################## - private static void formatValue(StringBuffer str, Object o) { - Class c = (o == null) ? null : o.getClass(); - if (c == null) { - str.append(""); - } else if (c == String.class) { - str.append('"'); - str.append(o); - str.append('"'); - } else if (c == Character.class) { - str.append('\''); - str.append(o); - str.append('\''); - } else if (c == boolean[].class) { - str.append("new boolean[] {"); - boolean[] arr = (boolean[])o; - for (int i = 0; i < arr.length; i++) { - if (i > 0) str.append(", "); - str.append(arr[i]); - } - str.append('}'); - } else if (c == char[].class) { - str.append("new short[] {"); - short[] arr = (short[])o; - for (int i = 0; i < arr.length; i++) { - if (i > 0) str.append(", "); - str.append(arr[i]); - } - str.append('}'); - } else if (c == byte[].class) { - str.append("new byte[] {"); - byte[] arr = (byte[])o; - for (int i = 0; i < arr.length; i++) { - if (i > 0) str.append(", "); - str.append(arr[i]); - } - str.append('}'); - } else if (c == short[].class) { - str.append("new short[] {"); - short[] arr = (short[])o; - for (int i = 0; i < arr.length; i++) { - if (i > 0) str.append(", "); - str.append(arr[i]); - } - str.append('}'); - } else if (c == int[].class) { - str.append("new int[] {"); - int[] arr = (int[])o; - for (int i = 0; i < arr.length; i++) { - if (i > 0) str.append(", "); - str.append(arr[i]); - } - str.append('}'); - } else if (c == long[].class) { - str.append("new long[] {"); - long[] arr = (long[])o; - for (int i = 0; i < arr.length; i++) { - if (i > 0) str.append(", "); - str.append(arr[i]); - } - str.append('}'); - } else if (c == float[].class) { - str.append("new float[] {"); - float[] arr = (float[])o; - for (int i = 0; i < arr.length; i++) { - if (i > 0) str.append(", "); - str.append(arr[i]); - } - str.append('}'); - } else if (c == double[].class) { - str.append("new double[] {"); - double[] arr = (double[])o; - for (int i = 0; i < arr.length; i++) { - if (i > 0) str.append(", "); - str.append(arr[i]); - } - str.append('}'); - } else if (c == String[].class) { - str.append("new String[] {"); - String[] arr = (String[])o; - for (int i = 0; i < arr.length; i++) { - if (i > 0) str.append(", "); - formatValue(str, arr[i]); - } - str.append('}'); - } else if (o instanceof Type) { - str.append("typeof("); - str.append(o); - str.append(")"); - } else - str.append(o); - } - - //########################################################################## - - /** Represents named arguments (assigned outside of the constructor) - * of a custom attribute - */ - public static class NamedArgument { - - /** Designates if the named argument corresponds to a field or property. - * Possible values: - * Signature.X_ELEMENT_KIND_FIELD = 0x53 - * Signature.X_ELEMENT_KIND_PROPERTY = 0x54 - */ - public final int designator; - - /** The name of the field/property. */ - public final String name; - - /** Type of the field/property. */ - public final Type type; - - /** The value for the field/property. */ - public final Object value; - - /** An empty array NamedArgument. */ - public static final NamedArgument[] EMPTY = new NamedArgument[0]; - - public NamedArgument(int designator, String name,Type type,Object value) - { - this.designator = designator; - this.name = name; - this.type = type; - this.value = value; - } - - /** @return true if the named argument specifies a field; - * false otherwise. - */ - public boolean isField() { - return designator == Signature.X_ELEMENT_KIND_FIELD; - } - - /** @return true if the named argument specifies a property; - * false otherwise. - */ - public boolean isProperty() { - return designator == Signature.X_ELEMENT_KIND_PROPERTY; - } - - /** @return a string representation of the named argument. */ - public String toString() { - StringBuffer str = new StringBuffer(name); - str.append(" = "); - if (type.IsEnum()) - str.append('(').append(type.FullName).append(')'); - formatValue(str, value); - return str.toString(); - } - } - - //########################################################################## - - public static class BoxedArgument { - public final Type type; - public final Object value; - public BoxedArgument(Type type, Object value) { - this.type = type; this.value = value; - } - public String toString() { - return "(" + type.FullName + ")" + value; - } - } - - //########################################################################## - -} // class Attribute diff --git a/src/msil/ch/epfl/lamp/compiler/msil/BindingFlags.java b/src/msil/ch/epfl/lamp/compiler/msil/BindingFlags.java deleted file mode 100644 index cac2319b50..0000000000 --- a/src/msil/ch/epfl/lamp/compiler/msil/BindingFlags.java +++ /dev/null @@ -1,169 +0,0 @@ -/* - * System.Reflection-like API for access to .NET assemblies (DLL & EXE) - */ - - -package ch.epfl.lamp.compiler.msil; - -/** - * Specifies flags that control binding and the way in which - * the search for members and types is conducted by reflection. - * - * Note: You must specify Instance or Static along with Public or NonPublic - * or no members will be returned. - * - * @author Nikolay Mihaylov - * @version 1.0 - */ -public abstract class BindingFlags { - - //########################################################################## - - // disallows extending the class; - private BindingFlags() {} - - /** - * Specifies no binding flag. - */ - public static final int Default = 0x0000; - - /** - * Specifies that the case of the member name should not be considered - * when binding. - */ - public static final int IgnoreCase = 0x0001; - - /** - * Specifies that only members declared at the level of the supplied type's - * hierarchy should be considered. Inherited members are not considered. - */ - public static final int DeclaredOnly = 0x0002; - - /** - * Specifies that instance members are to be included in the search. - */ - public static final int Instance = 0x0004; - - /** - * Specifies that static members are to be included in the search. - */ - public static final int Static = 0x0008; - - /** - * Specifies that public members are to be included in the search. - */ - public static final int Public = 0x0010; - - /** - * Specifies that non-public members are to be included in the search. - */ - public static final int NonPublic = 0x0020; - - /** - * Specifies that static members up the hierarchy should be returned. - * Static members include fields, methods, events, and properties. - * Nested types are not returned. - */ - public static final int FlattenHierarchy = 0x0040; - - /** - * Specifies that a method is to be invoked. This may not be a constructor - * or a type initializer. - */ - public static final int InvokeMethod = 0x0100; - - /** - * Specifies that Reflection should create an instance of - * the specified type. Calls the constructor that matches - * the given arguments. The supplied member name is ignored. - * If the type of lookup is not specified, (Instance | Public) - * will apply. It is not possible to call a type initializer. - */ - public static final int CreateInstance = 0x0200; - - /** - * Specifies that the value of the specified field should be returned. - */ - public static final int GetField = 0x0400; - - /** - * Specifies that the value of the specified field should be set. - */ - public static final int SetField = 0x0800; - - /** - * Specifies that the value of the specified property should be returned. - */ - public static final int GetProperty = 0x1000; - - /** - * Specifies that the value of the specified property should be set. - * For COM properties, specifying this binding flag is equivalent to - * specifying PutDispProperty and PutRefDispProperty. - */ - public static final int SetProperty = 0x2000; - - /** - * Specifies that the PROPPUT member on a COM object should be invoked. - * PROPPUT specifies a property-setting function that uses a value. - * Use PutDispProperty if a property has both PROPPUT and PROPPUTREF - * and you need to distinguish which one is called. - */ - public static final int PutDispProperty = 0x4000; - - - /** - * Specifies that the PROPPUTREF member on a COM object should be invoked. - * PROPPUTREF specifies a property-setting function that uses a reference - * instead of a value. Use PutRefDispProperty if a property has both - * PROPPUT and PROPPUTREF and you need to distinguish which one is called. - */ - public static final int PutRefDispProperty = 0x8000; - - /** - * Specifies that types of the supplied arguments must exactly match - * the types of the corresponding formal parameters. Reflection - * throws an exception if the caller supplies a non-null Binder object, - * since that implies that the caller is supplying BindToXXX - * implementations that will pick the appropriate method. - * Reflection models the accessibility rules of the common type system. - * For example, if the caller is in the same assembly, the caller - * does not need special permissions for internal members. Otherwise, - * the caller needs ReflectionPermission. This is consistent with - * lookup of members that are protected, private, and so on. - * The general principle is that ChangeType should perform only - * widening coercions, which never lose data. An example of a - * widening coercion is coercing a value that is a 32-bit signed integer - * to a value that is a 64-bit signed integer. This is distinguished - * from a narrowing coercion, which may lose data. An example of - * a narrowing coercion is coercing a 64-bit signed integer to - * a 32-bit signed integer. - * The default binder ignores this flag, while custom binders can - * implement the semantics of this flag. - */ - public static final int ExactBinding = 0x10000; - - /** - * Used in COM interop to specify that the return value of the member - * can be ignored. - */ - public static final int IgnoreReturn = 0x100000 ; - - /** - * Returns the set of members whose parameter count matches the number - * of supplied arguments. This binding flag is used for methods with - * parameters that have default values and methods with variable arguments - * (varargs). This flag should only be used with Type.InvokeMember. - * Parameters with default values are used only in calls where trailing - * arguments are omitted. They must be the last arguments. - */ - public static final int OptionalParamBinding = 0x40000; - - /** - * Not implemented. - */ - public static final int SuppressChangeType = 0x20000; - - //########################################################################## - -} // class BindingFlags diff --git a/src/msil/ch/epfl/lamp/compiler/msil/CallingConventions.java b/src/msil/ch/epfl/lamp/compiler/msil/CallingConventions.java deleted file mode 100644 index 50bf9fb5d5..0000000000 --- a/src/msil/ch/epfl/lamp/compiler/msil/CallingConventions.java +++ /dev/null @@ -1,75 +0,0 @@ -/* - * System.Reflection-like API for access to .NET assemblies (DLL & EXE) - */ - - -package ch.epfl.lamp.compiler.msil; - - -/** - * Calling conventions - * - * @author Nikolay Mihaylov - * @version 1.0 - */ -public abstract class CallingConventions { - - //######################################################################## - - /** - * Specifies the default calling convention as determined by the - * common language runtime. - */ - public static final short Standard = (short) 0x0001; - - /** - * Specifies the calling convention for methods with variable arguments. - */ - public static final short VarArgs = (short) 0x0002; - - /** - * Specifies that either the Standard or the VarArgs calling - * convention may be used. - */ - public static final short Any = Standard | VarArgs; - - /** - * Specifies an instance or virtual method (not a static method). - * At run-time, the called method is passed a pointer to the target - * object as its first argument (the this pointer). The signature - * stored in metadata does not include the type of this first argument, - * because the method is known and its owner class can be discovered - * from metadata. - */ - public static final short HasThis = (short) 0x0020; - - /** - * Specifies that the signature is a function-pointer signature, - * representing a call to an instance or virtual method (not a static - * method). If ExplicitThis is set, HasThis must also be set. The first - * argument passed to the called method is still a this pointer, but the - * type of the first argument is now unknown. Therefore, a token that - * describes the type (or class) of the this pointer is explicitly stored - * into its metadata signature. - */ - public static final short ExplicitThis = (short) 0x0040; - - //######################################################################## - - private CallingConventions() {} - - public static String toString(int callConv) { - StringBuffer s = new StringBuffer(); - - if ((callConv & HasThis) != 0) { - s.append("instance"); - if ((callConv & ExplicitThis) != 0) - s.append(" explicit"); - } - - return s.toString(); - } - - //########################################################################## - -} // class CallingConventions diff --git a/src/msil/ch/epfl/lamp/compiler/msil/ConstructedType.java b/src/msil/ch/epfl/lamp/compiler/msil/ConstructedType.java deleted file mode 100644 index 8c82cb4876..0000000000 --- a/src/msil/ch/epfl/lamp/compiler/msil/ConstructedType.java +++ /dev/null @@ -1,48 +0,0 @@ -package ch.epfl.lamp.compiler.msil; - -import java.util.Arrays; - -/* The only reason for ConstructedType to extend Type is complying with existing code - (e.g., caseFieldBuilder in ILPrinterVisitor) expecting a Type. - */ -public class ConstructedType extends Type { - - public final Type instantiatedType; - public final Type[] typeArgs; - - public ConstructedType(Type instantiatedType, Type[] typeArgs) { - super(instantiatedType.Module, instantiatedType.Attributes, "", null, null, null, instantiatedType.auxAttr /*AuxAttr.None*/ , null); - this.instantiatedType = instantiatedType; - this.typeArgs = typeArgs; - } - - public String toString() { - String res = instantiatedType.toString() + "["; - for (int i = 0; i < typeArgs.length; i++) { - res = res + typeArgs[i].toString(); - if(i + 1 < typeArgs.length) { - res = res + ", "; - } - } - return res + "]"; - } - - - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - - ConstructedType that = (ConstructedType) o; - - if (!instantiatedType.equals(that.instantiatedType)) return false; - if (!Arrays.equals(typeArgs, that.typeArgs)) return false; - - return true; - } - - public int hashCode() { - int result = instantiatedType.hashCode(); - result = 31 * result + Arrays.hashCode(typeArgs); - return result; - } -} diff --git a/src/msil/ch/epfl/lamp/compiler/msil/ConstructorInfo.java b/src/msil/ch/epfl/lamp/compiler/msil/ConstructorInfo.java deleted file mode 100644 index 69f5d6d32a..0000000000 --- a/src/msil/ch/epfl/lamp/compiler/msil/ConstructorInfo.java +++ /dev/null @@ -1,54 +0,0 @@ -/* - * System.Reflection-like API for access to .NET assemblies (DLL & EXE) - */ - - -package ch.epfl.lamp.compiler.msil; - -/** - * Discovers the attributes of a class constructor and provides - * access to constructor metadata. - * ConstructorInfo is used to discover the attributes of a constructor - * as well as to invoke a constructor. Objects are created by invoking - * either the GetConstructors or GetConstructor method of a Type object. - * - * @author Nikolay Mihaylov - * @version 1.0 - */ -public class ConstructorInfo extends MethodBase { - //########################################################################## - - public final int MemberType() { return MemberTypes.Constructor; } - - public final boolean IsConstructor() { return true; } - - protected static final String CTOR = ".ctor"; - protected static final String CCTOR = ".cctor"; - protected static final ConstructorInfo[] EMPTY_ARRAY = new ConstructorInfo[0]; - - protected static String getName(int attrs) { - return (attrs & MethodAttributes.Static) == 0 ? CTOR : CCTOR; - } - - /** Public constructors */ - - public ConstructorInfo(Type declType, int attrs, Type[] paramTypes) { - super(getName(attrs), declType, attrs, paramTypes); - assert declType != null : "Owner can't be 'null' for a constructor!"; - } - - public ConstructorInfo(Type declType, int attrs, ParameterInfo[] params) - { - super(getName(attrs), declType, attrs, params); - assert declType != null : "Owner can't be 'null' for a constructor!"; - } - - - public String toString() { - return MethodAttributes.toString(Attributes) + " " + Type.VOID() + - " " + DeclaringType.FullName + "::" + Name + params2String(); - } - - //########################################################################## - -} // class ConstructorInfo diff --git a/src/msil/ch/epfl/lamp/compiler/msil/CustomAttributeProvider.java b/src/msil/ch/epfl/lamp/compiler/msil/CustomAttributeProvider.java deleted file mode 100644 index 0e58c18114..0000000000 --- a/src/msil/ch/epfl/lamp/compiler/msil/CustomAttributeProvider.java +++ /dev/null @@ -1,82 +0,0 @@ -/* - * System.Reflection-like API for access to .NET assemblies (DLL & EXE) - */ - - -package ch.epfl.lamp.compiler.msil; - -import java.util.List; -import java.util.LinkedList; -import java.util.Iterator; - -/** - * @author Nikolay Mihaylov - * @version 1.0 - */ -public abstract class CustomAttributeProvider implements ICustomAttributeProvider { - - //########################################################################## - - protected List/**/ custAttrs; - private static final Object[] EMPTY = new Object[0]; - - //TODO: take inherit into account - public Object[] GetCustomAttributes(boolean inherit) { - initAttributes(null); - return custAttrs.size() == 0 ? EMPTY - : custAttrs.toArray(new Attribute[custAttrs.size()]); - } - - //TODO: take inherit into account - public Object[] GetCustomAttributes(Type attributeType, boolean inherit) { - initAttributes(attributeType); - List tAttrs = null; - if (constrType == attributeType) - tAttrs = custAttrs; - else { - tAttrs = new LinkedList(); - for (Iterator attrs = custAttrs.iterator(); attrs.hasNext(); ) { - Attribute a = (Attribute) attrs.next(); - if (a.GetType() == attributeType) tAttrs.add(a); - } - } - return tAttrs.size() == 0 ? EMPTY - : tAttrs.toArray(new Attribute[tAttrs.size()]); - } - - //TODO: take inherit into account - public boolean IsDefined(Type attributeType, boolean inherit) { - initAttributes(attributeType); - if (constrType == attributeType) - return custAttrs.size() > 0; - Iterator attrs = custAttrs.iterator(); - while (attrs.hasNext()) { - if (((Attribute)attrs.next()).GetType() == attributeType) - return true; - } - return false; -// return inherit && (DeclaringClass.BaseType != null) -// && DeclaringClass.BaseType.IsDefined(inherit); - } - - protected void addCustomAttribute(ConstructorInfo constr, byte[] value) { - Attribute attr = new Attribute(constr, value); - assert constrType == null || constrType == attr.GetType(); - if (custAttrs == null) - custAttrs = new LinkedList(); - custAttrs.add(attr); - } - - private void initAttributes(Type atype) { - if (custAttrs != null - && (constrType == null || constrType == atype)) - return; - custAttrs = new LinkedList(); - constrType = atype; - loadCustomAttributes(atype); - } - - protected void loadCustomAttributes(Type atype) {} - - private Type constrType; -} diff --git a/src/msil/ch/epfl/lamp/compiler/msil/CustomModifier.java b/src/msil/ch/epfl/lamp/compiler/msil/CustomModifier.java deleted file mode 100644 index cf30008c60..0000000000 --- a/src/msil/ch/epfl/lamp/compiler/msil/CustomModifier.java +++ /dev/null @@ -1,45 +0,0 @@ -package ch.epfl.lamp.compiler.msil; - -/** - * Quoting from the CIL spec, Partition II, Sec. 7.1.1: - * - * Custom modifiers, defined using `modreq` (required modifier) and `modopt` (optional modifier), are - * similar to custom attributes (Sec. 21) except that modifiers are part of a signature rather than being attached to a - * declaration. Each modifer associates a type reference with an item in the signature. - * - */ -public class CustomModifier { - - public boolean isReqd; - public Type marker; - - public CustomModifier(boolean isReqd, Type marker) { - this.isReqd = isReqd; - this.marker = marker; - } - - public String toString() { - String res = (isReqd ? "modreq( " : "modopt( ") + marker.toString() + " )"; - return res; - } - - public static Type[] helperCustomMods(boolean isReqd, CustomModifier[] cmods) { - if(cmods == null) return null; - int count = 0; - for (int idx = 0; idx < cmods.length; idx++) { - if(cmods[idx].isReqd == isReqd) count++; - } - Type[] res = new Type[count]; - int residx = 0; - for (int idx = 0; idx < cmods.length; idx++) { - res[residx] = cmods[idx].marker; - residx++; - } - return res; - } - - public static Type VolatileMarker() { - return Type.GetType("System.Runtime.CompilerServices.IsVolatile"); - } - -} diff --git a/src/msil/ch/epfl/lamp/compiler/msil/EventAttributes.java b/src/msil/ch/epfl/lamp/compiler/msil/EventAttributes.java deleted file mode 100644 index a183993cb9..0000000000 --- a/src/msil/ch/epfl/lamp/compiler/msil/EventAttributes.java +++ /dev/null @@ -1,32 +0,0 @@ -/* - * System.Reflection-like API for access to .NET assemblies (DLL & EXE) - */ - - -package ch.epfl.lamp.compiler.msil; - -/** - * Specifies flags that describe the attributes of a an event. - * - * @author Nikolay Mihaylov - * @version 1.0 - */ -public final class EventAttributes { - - //########################################################################## - - /** Specifies that the event has no attributes. */ - public static final short None = 0x000; - - /** Specifies a reserved flag for CLR use only. */ - public static final short ReservedMask = 0x0400; - - /** Specifies that the event is special in a way described by the name. */ - public static final short SpecialName = 0x0200; - - /** Specifies the the CLR should check name encoding. */ - public static final short RTSpecialName = 0x0400; - - //########################################################################## - -} // class EventAttributes diff --git a/src/msil/ch/epfl/lamp/compiler/msil/EventInfo.java b/src/msil/ch/epfl/lamp/compiler/msil/EventInfo.java deleted file mode 100644 index 3ccba7900b..0000000000 --- a/src/msil/ch/epfl/lamp/compiler/msil/EventInfo.java +++ /dev/null @@ -1,58 +0,0 @@ -/* - * System.Reflection-like API for access to .NET assemblies (DLL & EXE) - */ - - -package ch.epfl.lamp.compiler.msil; - - -/** - * Discovers the attributes of an event - * and provides access to event metadata. - * - * @author Nikolay Mihaylov - * @version 1.0 - */ -public class EventInfo extends MemberInfo { - - //########################################################################## - - public final int MemberType() { return MemberTypes.Event; } - - /** Attributes associated with the event. */ - public final short Attributes; - - /** The Type object for the underlying event-handler delegate - * associated with this event. - */ - public final Type EventHandlerType; - - public MethodInfo GetAddMethod() { return addMethod; } - - public MethodInfo GetRemoveMethod() { return removeMethod; } - - public String toString() { - return "" + EventHandlerType + " " + Name; - } - - //########################################################################## - - protected static final EventInfo[] EMPTY_ARRAY = new EventInfo[0]; - - protected MethodInfo addMethod; - - protected MethodInfo removeMethod; - - protected EventInfo(String name, Type declType, short attr, - Type handlerType, MethodInfo add, MethodInfo remove) - { - super(name, declType); - Attributes = attr; - EventHandlerType = handlerType; - this.addMethod = add; - this.removeMethod = remove; - } - - //########################################################################## - -} // class EventInfo diff --git a/src/msil/ch/epfl/lamp/compiler/msil/FieldAttributes.java b/src/msil/ch/epfl/lamp/compiler/msil/FieldAttributes.java deleted file mode 100644 index d7d1bb3d54..0000000000 --- a/src/msil/ch/epfl/lamp/compiler/msil/FieldAttributes.java +++ /dev/null @@ -1,119 +0,0 @@ -/* - * System.Reflection-like API for access to .NET assemblies (DLL & EXE) - */ - - -package ch.epfl.lamp.compiler.msil; - -/** - * Specifies flags that describe the attributes of a field. - * - * @author Nikolay Mihaylov - * @version 1.0 - */ -public final class FieldAttributes { - - //########################################################################## - - /** Specifies the access level of a given field. */ - public static final short FieldAccessMask = 0x0007; - - /** Member not refereneceable. */ - public static final short CompilerControlled = 0x0000; - - /** Field is accessible only by the parent type. */ - public static final short Private = 0x0001; - - /** Field is accessible only by subtypes in this assembly. */ - public static final short FamANDAssem = 0x0002; - - /** Field is accessible throughout the assembly. */ - public static final short Assembly = 0x0003; - - /** Field is accessible only by type and subtypes. */ - public static final short Family = 0x0004; - - /** Field is accessible by subtypes anywhere, - * as well as throughout this assembly. */ - public static final short FamORAssem = 0x0005; - - /** Specifies that the field is accessible by any member - * for whom this scope is visible. */ - public static final short Public = 0x0006; - - //########################################################################## - // - - /** Field represents the defined type, or else it is per-instance. */ - public static final short Static = 0x0010; - - /** Field is initialized only and cannot be written after initialization. */ - public static final short InitOnly = 0x0020; - - /** Value is compile-time constant. */ - public static final short Literal = 0x0040; - - /** Field does not have to be serialized when the type is remoted. */ - public static final short NotSerialized = 0x0080; - - /** Field is special. */ - public static final short SpecialName = 0x0200; - - //########################################################################## - // Interop attributes - - /** Implementation is forwarded through PInvoke */ - public static final short PinvokeImpl = 0x2000; - - - //########################################################################## - // Additional flags - - /** CLI provides 'special' behavior depending upon the name of the field */ - public static final short RTSpecialName = 0x0400; - - /** Field has marshalling information. */ - public static final short HasFieldMarshal = 0x1000; - - /** Field has a default value. */ - public static final short HasDefault = (short)0x8000; - - /** Field has a Relative Virtual Address (RVA). The RVA is the location - * of the method body in the current image, as an address relative - * to the start of the image file in which it is located. */ - public static final short HasFieldRVA = 0x0100; - - //########################################################################## - // - - public static String toString(short attrs) { - StringBuffer str = new StringBuffer(); - switch (attrs & FieldAccessMask) { - case CompilerControlled: str.append("compilercontrolled"); break; - case Private: str.append("private"); break; - case FamANDAssem: str.append("famandassem"); break; - case Assembly: str.append("assembly"); break; - case Family: str.append("family"); break; - case FamORAssem: str.append("famorassem"); break; - case Public: str.append("public"); break; - } - if ((attrs & Static) != 0) str.append(" static"); - if ((attrs & InitOnly) != 0) str.append(" initonly"); - if ((attrs & Literal) != 0) str.append(" literal"); - if ((attrs & NotSerialized) != 0) str.append(" notserialized"); - if ((attrs & SpecialName) != 0) str.append(" specialname"); - if ((attrs & PinvokeImpl) != 0) str.append(""); - if ((attrs & RTSpecialName) != 0) str.append(" rtspecialname"); - if ((attrs & HasFieldMarshal) != 0) str.append(" marshal()"); - //if ((attrs & HasDefault) != 0) str.append(" default(???)"); - return str.toString(); - } - - //########################################################################## - - // makes the class uninstantiable - private FieldAttributes() {} - - //########################################################################## - -} // class FieldAttributes diff --git a/src/msil/ch/epfl/lamp/compiler/msil/FieldInfo.java b/src/msil/ch/epfl/lamp/compiler/msil/FieldInfo.java deleted file mode 100644 index 536a67e9a8..0000000000 --- a/src/msil/ch/epfl/lamp/compiler/msil/FieldInfo.java +++ /dev/null @@ -1,141 +0,0 @@ -/* - * System.Reflection-like API for access to .NET assemblies (DLL & EXE) - */ - - -package ch.epfl.lamp.compiler.msil; - -import ch.epfl.lamp.compiler.msil.util.PECustomMod; - -/** - * Discovers the attributes of a field and provides access to field metadata. - * - * @author Nikolay Mihaylov - * @version 1.0 - */ -public class FieldInfo extends MemberInfo implements HasCustomModifiers { - - //########################################################################## - // public interface - - public final int MemberType() { return MemberTypes.Field; } - - /** Attributes associated with this field. */ - public final short Attributes; - - /** Type of the field represented by this FieldInfo object. */ - public final Type FieldType; - - /** can be null */ - public final CustomModifier[] cmods; - - protected final Object value; - - public final boolean IsStatic() { - return (Attributes & FieldAttributes.Static) != 0; - } - - public final boolean IsInitOnly() { - return (Attributes & FieldAttributes.InitOnly) != 0; - } - - public final boolean IsLiteral() { - return (Attributes & FieldAttributes.Literal) != 0; - - } - - public final boolean IsPublic() { - return (Attributes & FieldAttributes.FieldAccessMask) - == FieldAttributes.Public; - } - - public final boolean IsPrivate() { - return (Attributes & FieldAttributes.FieldAccessMask) - == FieldAttributes.Private; - } - - public final boolean IsFamily() { - return (Attributes & FieldAttributes.FieldAccessMask) - == FieldAttributes.Family; - } - - public final boolean IsAssembly() { - return (Attributes & FieldAttributes.FieldAccessMask) - == FieldAttributes.Assembly; - } - - public final boolean IsFamilyOrAssembly() { - return (Attributes & FieldAttributes.FieldAccessMask) - == FieldAttributes.FamORAssem; - } - - public final boolean IsFamilyAndAssembly() { - return (Attributes & FieldAttributes.FieldAccessMask) - == FieldAttributes.FamANDAssem; - } - public final boolean IsSpecialName() { - return (Attributes & FieldAttributes.SpecialName) != 0; - } - - public final boolean IsPinvokeImpl() { - return (Attributes & FieldAttributes.PinvokeImpl) != 0; - } - - public final boolean IsNotSerialized() { - return (Attributes & FieldAttributes.NotSerialized) != 0; - } - - private boolean knownVolatile = false; - private boolean cachedVolatile = false; - public final boolean IsVolatile() { - if(knownVolatile) return cachedVolatile; - knownVolatile = true; - if(cmods == null) { - cachedVolatile = false; - return cachedVolatile; - } - for (int idx = 0; idx < cmods.length; idx++) { - if(cmods[idx].marker == CustomModifier.VolatileMarker()) { - cachedVolatile = true; - return cachedVolatile; - } - } - cachedVolatile = false; - return cachedVolatile; - } - - public final Type[] GetOptionalCustomModifiers () { - return CustomModifier.helperCustomMods(false, cmods); - } - - public final Type[] GetRequiredCustomModifiers() { - return CustomModifier.helperCustomMods(true, cmods); - } - - public String toString() { - return FieldAttributes.toString(Attributes) + " " + - FieldType + " " + DeclaringType.FullName + "::" + Name; - } - - //########################################################################## - - protected static final FieldInfo[] EMPTY_ARRAY = new FieldInfo[0]; - - /** Initializes a new instance of the FieldInfo class. */ - protected FieldInfo(String name, Type declType, - int attrs, PECustomMod fieldTypeWithMods, Object value) - { - super(name, declType); - FieldType = fieldTypeWithMods.marked; - cmods = fieldTypeWithMods.cmods; - Attributes = (short) attrs; - this.value = value; - } - - /** - */ - public Object getValue() { return value; } - - //########################################################################## - -} // class FieldInfo diff --git a/src/msil/ch/epfl/lamp/compiler/msil/GenericParamAndConstraints.java b/src/msil/ch/epfl/lamp/compiler/msil/GenericParamAndConstraints.java deleted file mode 100644 index 6237fbafee..0000000000 --- a/src/msil/ch/epfl/lamp/compiler/msil/GenericParamAndConstraints.java +++ /dev/null @@ -1,40 +0,0 @@ -package ch.epfl.lamp.compiler.msil; - -/** - * @author Miguel Garcia - */ -public class GenericParamAndConstraints { - - public GenericParamAndConstraints(int Number, String Name, Type[] Constraints, - boolean isInvariant, boolean isCovariant, boolean isContravariant, - boolean isReferenceType, boolean isValueType, boolean hasDefaultConstructor) { - this.Number = Number; - this.Name = Name; - this.Constraints = Constraints; // TODO representation for the class and new() constraints missing - this.isInvariant = isInvariant; - this.isCovariant = isCovariant; - this.isContravariant = isContravariant; - this.isReferenceType = isReferenceType; - this.isValueType = isValueType; - this.hasDefaultConstructor = hasDefaultConstructor; - - } - - public final int Number; - public final String Name; // can be null - public final Type[] Constraints; // can be empty array - public final boolean isInvariant; // only relevant for TVars, not for an MVar - public final boolean isCovariant; // only relevant for TVars, not for an MVar - public final boolean isContravariant; // only relevant for TVars, not for an MVar - public final boolean isReferenceType; - public final boolean isValueType; - public final boolean hasDefaultConstructor; - - public String toString() { - String res = Name == null ? "" : (Name.equals("") ? "" : Name); - res = res + " <: " + Constraints; - return res; - } - -} - diff --git a/src/msil/ch/epfl/lamp/compiler/msil/HasCustomModifiers.java b/src/msil/ch/epfl/lamp/compiler/msil/HasCustomModifiers.java deleted file mode 100644 index 5ead087350..0000000000 --- a/src/msil/ch/epfl/lamp/compiler/msil/HasCustomModifiers.java +++ /dev/null @@ -1,9 +0,0 @@ -package ch.epfl.lamp.compiler.msil; - -public interface HasCustomModifiers { - - public Type[] GetOptionalCustomModifiers(); - - public Type[] GetRequiredCustomModifiers(); - -} diff --git a/src/msil/ch/epfl/lamp/compiler/msil/ICustomAttributeProvider.java b/src/msil/ch/epfl/lamp/compiler/msil/ICustomAttributeProvider.java deleted file mode 100644 index 927185962c..0000000000 --- a/src/msil/ch/epfl/lamp/compiler/msil/ICustomAttributeProvider.java +++ /dev/null @@ -1,57 +0,0 @@ -/* - * System.Reflection-like API for access to .NET assemblies (DLL & EXE) - */ - - -package ch.epfl.lamp.compiler.msil; - -/** - * Provides custom attributes for reflection objects that support them. - * - * @author Nikolay Mihaylov - * @version 1.0 - */ -public interface ICustomAttributeProvider { - - //########################################################################## - // interface method definitions - - /** Returns an array of all of the custom attributes - * defined on this member, excluding named attributes, - * or an empty array if there are no custom attributes. - * - * @param inherit - When true, look up the hierarchy chain - * for the inherited custom attribute. - * @return - An array of Objects representing custom attributes, - * or an empty array. - */ - public Object[] GetCustomAttributes(boolean inherit); - - - /** Returns an array of custom attributes defined on this member, - * identified by type, or an empty array - * if there are no custom attributes of that type. - * - * @param attributeType - The type of the custom attributes. - * @param inherit - When true, look up the hierarchy chain - * for the inherited custom attribute. - * @return - An array of Objects representing custom attributes, - * or an empty array. - */ - public Object[] GetCustomAttributes(Type attributeType, boolean inherit); - - - /** Indicates whether one or more instance of attributeType - * is defined on this member - * - * @param attributeType - The type of the custom attributes - * @param inherit - When true, look up the hierarchy chain - * for the inherited custom attribute. - * @return - true if the attributeType is defined on this member; - * false otherwise. - */ - public boolean IsDefined(Type attributeType, boolean inherit); - - //########################################################################## - -} // interface ICustomAttributeProvider diff --git a/src/msil/ch/epfl/lamp/compiler/msil/MemberInfo.java b/src/msil/ch/epfl/lamp/compiler/msil/MemberInfo.java deleted file mode 100644 index 65ff1b290b..0000000000 --- a/src/msil/ch/epfl/lamp/compiler/msil/MemberInfo.java +++ /dev/null @@ -1,47 +0,0 @@ -/* - * System.Reflection-like API for access to .NET assemblies (DLL & EXE) - */ - - -package ch.epfl.lamp.compiler.msil; - -/** - * The root class of the Reflection hierarchy. - * - * @author Nikolay Mihaylov - * @version 1.0 - */ -public abstract class MemberInfo extends CustomAttributeProvider { - - //########################################################################## - - /** The name of this member. */ - public final String Name; - - /** - * The class that declares this member. - * Note: if the MemberInfo object is a global member, - * (that is, it was obtained from Module.GetMethods, - * which returns global methods on a module), then DeclaringType - * will be a null reference. - */ - public final Type DeclaringType; - - /** An enumerated value from the MemberTypes class, - * specifying a constructor, event, field, method, - * property, type information, all, or custom. */ - public abstract int MemberType(); - - //########################################################################## - // protected members - - protected static final MemberInfo[] EMPTY_ARRAY = new MemberInfo[0]; - - protected MemberInfo(String name, Type declType) { - Name = name; - DeclaringType = declType; - } - - //######################################################################## - -} // class MemberInfo diff --git a/src/msil/ch/epfl/lamp/compiler/msil/MemberTypes.java b/src/msil/ch/epfl/lamp/compiler/msil/MemberTypes.java deleted file mode 100644 index 5f49ad3323..0000000000 --- a/src/msil/ch/epfl/lamp/compiler/msil/MemberTypes.java +++ /dev/null @@ -1,81 +0,0 @@ -/* - * System.Reflection-like API for access to .NET assemblies (DLL & EXE) - */ - - -package ch.epfl.lamp.compiler.msil; - -/** - * Marks each type of member that is defined as a derived class of MemberInfo. - * - * @author Nikolay Mihaylov - * @version 1.0 - */ -public final class MemberTypes { - - //########################################################################## - - /** Specifies that the member is a constructor, - * representing a ConstructorInfo member. */ - public static final int Constructor = 0x01; - - - /** Specifies that the member is an event, - * representing an EventInfo member. */ - public static final int Event = 0x02; - - - /** Specifies that the member is a field, - * representing a FieldInfo member. */ - public static final int Field = 0x04; - - - /** Specifies that the member is a method, - * representing a MethodInfo member. */ - public static final int Method = 0x08; - - - /** Specifies that the member is a property, - * representing a PropertyInfo member. - */ - public static final int Property = 0x10; - - /** Specifies that the member is a type, - * representing a TypeInfo member. */ - public static final int TypeInfo = 0x20; - - - /** Specifies that the member is a custom member type. */ - public static final int Custom = 0x40; - - - /** Specifies that the member is a nested type, - * extending MemberInfo. */ - public static final int NestedType = 0x80; - - - /** Specifies all member types. */ - public static final int All = - Constructor | Event | Field | Method | Property | TypeInfo | NestedType; - - - public static String toString(int memberType) { - if ((memberType & Constructor) != 0) return "Constructor"; - if ((memberType & Event) != 0) return "Event"; - if ((memberType & Field) != 0) return "Field"; - if ((memberType & Method) != 0) return "Method"; - if ((memberType & Property) != 0) return "Property"; - if ((memberType & TypeInfo) != 0) return "TypeInfo"; - if ((memberType & Custom) != 0) return "Custom"; - if ((memberType & NestedType) != 0) return "NestedType"; - return "Unknown MemberType: " + memberType; - } - - //########################################################################## - - // makes the class uninstantiable - private MemberTypes() {} - - //########################################################################## - -} // class MemberTypes diff --git a/src/msil/ch/epfl/lamp/compiler/msil/MethodAttributes.java b/src/msil/ch/epfl/lamp/compiler/msil/MethodAttributes.java deleted file mode 100644 index a703c38fb8..0000000000 --- a/src/msil/ch/epfl/lamp/compiler/msil/MethodAttributes.java +++ /dev/null @@ -1,158 +0,0 @@ -/* - * System.Reflection-like API for access to .NET assemblies (DLL & EXE) - */ - - -package ch.epfl.lamp.compiler.msil; - -/** Specifies flags for method attributes. - * - * @author Nikolay Mihaylov - * @version 1.0 - */ -public final class MethodAttributes { - - //########################################################################## - // Method access attributes - - /** Bitmask used to retrieve accessibility information. */ - public static final short MemberAccessMask = 0x0007; - - ///** Member not referenceable*/ - //public static final short CompilerConstrolled = 0x0000; - - /** Indicates that the member cannot be referenced. */ - public static final short PrivateScope = 0x0000; - - /** Method is accessible only by the current class. */ - public static final short Private = 0x0001; - - /** Method is accessible to members of this type - * and its derived types that are in this assembly only. */ - public static final short FamANDAssem = 0x0002; - - /** Method is accessible to any class of this assembly. */ - public static final short Assembly = 0x0003; - - /** Method is accessible only to members of this class - * and its derived classes. */ - public static final short Family = 0x0004; - - /** Method is accessible to derived classes anywhere, - * as well as to any class in the assembly. */ - public static final short FamORAssem = 0x0005; - - /** Method is accessible to any object for which this object is in scope. */ - public static final short Public = 0x0006; - - - //########################################################################## - // Flags - - /** Method is defined on the type; otherwise, it is defined per instance. */ - public static final short Static = 0x0010; - - /** Method cannot be overridden. */ - public static final short Final = 0x0020; - - /** Method is virtual. */ - public static final short Virtual = 0x0040; - - /** Method hides by name and signature; otherwise, by name only. */ - public static final short HideBySig = 0x0080; - - - //########################################################################## - // vtable attributes - - /** Bitmask used to retrieve vtable attributes. */ - public static final short VtableLayoutMask = 0x0100; - - /** Method reuses existing slot in the vtable. */ - public static final short ReuseSlot = 0x0000; - - - /** Method always gets a new slot in the vtable. */ - public static final short NewSlot = 0x0100; - - - //########################################################################## - // Flags - - /** Method does not provide implementation. */ - public static final short Abstract = 0x0400; - - /** Method is special. */ - public static final short SpecialName = 0x0800; - - - //########################################################################## - // Interop attributes - - /** Method implementation is forwarded through PInvoke. */ - public static final short PInvokeImpl = 0x2000; - - /** Reserved: shall be zero for conforming implementations. - * Managed method is exported by thunk to unmanaged code. */ - public static final short UnmanagedExport = 0x0008; - - - //########################################################################## - // Additional flags - - /** CLI provides special behavior, depending on the name of the method. */ - public static final short RTSpecialName = 0x1000; - - /** Method has security associated with it. - * Reserved flag for runtime use only. - */ - public static final short HasSecurity = 0x00000040; - - /** - * Indicates that the method calls another method containing security code. - * Reserved flag for runtime use only. - */ - public static final short RequireSecObject = 0x00004000; - - /** Indicates a reserved flag for runtime use only. */ - public static final short ReservedMask = 0x0000; - - - //########################################################################## - - public static String toString(short attrs) { - StringBuffer str = new StringBuffer(accessFlagsToString(attrs)); - if ((attrs & Static) != 0) str.append(" static"); - if ((attrs & Final) != 0) str.append(" final"); - if ((attrs & Virtual) != 0) str.append(" virtual"); - if ((attrs & Abstract) != 0) str.append(" abstract"); - if ((attrs & HideBySig) != 0) str.append(" hidebysig"); - if ((attrs & NewSlot) != 0) str.append(" newslot"); - if ((attrs & SpecialName) != 0) str.append(" specialname"); - if ((attrs & PInvokeImpl) != 0) str.append(" pinvokeimpl(?!?)"); - if ((attrs & RTSpecialName) != 0) str.append(" rtspecialname"); - return str.toString(); - - } - - public static String accessFlagsToString(short attrs) { - switch (attrs & MemberAccessMask) { - case PrivateScope: return "compilercontrolled"; - case Private: return "private"; - case FamANDAssem: return "famandassem"; - case Assembly: return "assembly"; - case Family: return "family"; - case FamORAssem: return "famorassem"; - case Public: return "public"; - default: return "xxx"; - } - } - - //########################################################################## - - // makes the class uninstantiable - private MethodAttributes() {} - - //########################################################################## - -} // class Method Attributes diff --git a/src/msil/ch/epfl/lamp/compiler/msil/MethodBase.java b/src/msil/ch/epfl/lamp/compiler/msil/MethodBase.java deleted file mode 100644 index fe6404346e..0000000000 --- a/src/msil/ch/epfl/lamp/compiler/msil/MethodBase.java +++ /dev/null @@ -1,198 +0,0 @@ -/* - * System.Reflection-like API for access to .NET assemblies (DLL & EXE) - */ - - -package ch.epfl.lamp.compiler.msil; - -import java.util.Iterator; - -/** - * The common superclass of MemberInfo and ConstructorInfo - * - * @author Nikolay Mihaylov - * @version 1.0 - */ -public abstract class MethodBase extends MemberInfo { - - //########################################################################## - // public interface - - private java.util.List /* GenericParamAndConstraints */ mVars = new java.util.LinkedList(); - private GenericParamAndConstraints[] sortedMVars = null; - - public void addMVar(GenericParamAndConstraints tvarAndConstraints) { - sortedMVars = null; - mVars.add(tvarAndConstraints); - } - - public GenericParamAndConstraints[] getSortedMVars() { - if(sortedMVars == null) { - sortedMVars = new GenericParamAndConstraints[mVars.size()]; - for (int i = 0; i < sortedMVars.length; i ++){ - Iterator iter = mVars.iterator(); - while(iter.hasNext()) { - GenericParamAndConstraints tvC = (GenericParamAndConstraints)iter.next(); - if(tvC.Number == i) { - sortedMVars[i] = tvC; - } - } - } - } - return sortedMVars; - } - - public final boolean IsGeneric() { - return mVars.size() > 0; - } - - /** The attributes associated with this method/constructor. */ - public final short Attributes; - - /***/ - public final short CallingConvention; - - public abstract boolean IsConstructor(); - - public final boolean IsAbstract() { - return (Attributes & MethodAttributes.Abstract) != 0; - } - - public final boolean IsFinal() { - return (Attributes& MethodAttributes.Final) != 0; - } - - public final boolean IsVirtual() { - return (Attributes& MethodAttributes.Virtual) != 0; - } - - public final boolean IsInstance() { - return !IsStatic() && !IsVirtual(); - } - - public final boolean IsStatic() { - return (Attributes & MethodAttributes.Static) != 0; - } - - public final boolean IsHideBySig() { - return (Attributes & MethodAttributes.HideBySig) != 0; - } - - public final boolean IsSpecialName() { - return (Attributes & MethodAttributes.SpecialName) != 0; - } - - - public final boolean IsPublic() { - return (Attributes & MethodAttributes.MemberAccessMask) - == MethodAttributes.Public; - } - - public final boolean IsPrivate() { - return (Attributes & MethodAttributes.MemberAccessMask) - == MethodAttributes.Private; - } - - public final boolean IsFamily() { - return (Attributes & MethodAttributes.MemberAccessMask) - == MethodAttributes.Family; - } - - public final boolean IsAssembly() { - return (Attributes & MethodAttributes.MemberAccessMask) - == MethodAttributes.Assembly; - } - - public final boolean IsFamilyOrAssembly() { - return (Attributes & MethodAttributes.MemberAccessMask) - == MethodAttributes.FamORAssem; - } - - public final boolean IsFamilyAndAssembly() { - return (Attributes & MethodAttributes.MemberAccessMask) - == MethodAttributes.FamANDAssem; - } - - public boolean HasPtrParamOrRetType() { - // the override in MethodInfo checks the return type - ParameterInfo[] ps = GetParameters(); - for (int i = 0; i < ps.length; i++) { - Type pT = ps[i].ParameterType; - if(pT.IsPointer()) { - // Type.mkPtr creates a msil.Type for a pointer type - return true; - } - if(pT.IsByRef() && !pT.GetElementType().CanBeTakenAddressOf()) { - /* TODO Cases where GenMSIL (so far) con't emit good bytecode: - the type being taken address of IsArray(), IsGeneric(), or IsTMVarUsage. - For example, System.Enum declares - public static bool TryParse(string value, out TEnum result) where TEnum : struct, new(); - */ - return true; - } - } - return false; - } - - /** Returns the parameters of the method/constructor. */ - public ParameterInfo[] GetParameters() { - return (ParameterInfo[]) params.clone(); - } - - public int GetMethodImplementationFlags() { return implAttributes; } - - //########################################################################## - - /** Method parameters. */ - protected ParameterInfo[] params; - - protected short implAttributes; - - protected MethodBase(String name, Type declType, int attrs, Type[] paramTypes) - { - this(name, declType, attrs); - assert paramTypes != null; - params = new ParameterInfo[paramTypes.length]; - for (int i = 0; i < params.length; i++) - params[i] = new ParameterInfo(null, paramTypes[i], 0, i); - } - - protected MethodBase(String name, Type declType, int attrs, - ParameterInfo[] params) - { - this(name, declType, attrs); - this.params = params; - } - - /** - */ - private MethodBase(String name, Type declType, int attrs) { - super(name, declType); - - Attributes = (short) attrs; - - if (IsConstructor()) { - attrs |= MethodAttributes.SpecialName; - attrs |= MethodAttributes.RTSpecialName; - } - - CallingConvention = (short) (CallingConventions.Standard - | (IsStatic() ? (short)0 : CallingConventions.HasThis)); - } - - //########################################################################## - // internal methods - - protected String params2String() { - StringBuffer s = new StringBuffer("("); - for (int i = 0; i < params.length; i++) { - if (i > 0) s.append(", "); - s.append(params[i].ParameterType); - } - s.append(")"); - return s.toString(); - } - - //########################################################################## - -} // class MethodBase diff --git a/src/msil/ch/epfl/lamp/compiler/msil/MethodImplAttributes.java b/src/msil/ch/epfl/lamp/compiler/msil/MethodImplAttributes.java deleted file mode 100644 index 8e8d879593..0000000000 --- a/src/msil/ch/epfl/lamp/compiler/msil/MethodImplAttributes.java +++ /dev/null @@ -1,116 +0,0 @@ -/* - * System.Reflection-like API for access to .NET assemblies (DLL & EXE) - */ - - -package ch.epfl.lamp.compiler.msil; - -/** - * Method implementation attributes - * @author Nikolay Mihaylov - * @version 1.0 - */ -public abstract class MethodImplAttributes { - - //########################################################################## - - /** - * Specifies flags about code type. 3 - */ - public static final short CodeTypeMask = (short) 0x0003; - - /** - * Specifies that the method implementation is in MSIL. 0 - */ - public static final short IL = (short) 0x0000; - - /** - * Specifies that the method implementation is native. 1 - */ - public static final short Native = (short) 0x0001; - - /** - * This member supports the .NET Framework infrastructure and - * is not intended to be used directly from your code. 2 - */ - public static final short OPTIL = (short) 0x0002; - - /** - * Specifies that the method implementation is provided by the runtime. 3 - */ - public static final short Runtime = (short) 0x0003; - - - - /** - * Specifies whether the code is managed or unmanaged. 4 - */ - public static final short ManagedMask = (short) 0x0004; - - /** - * Specifies that the method implementation is managed, otherwise unmanaged. - */ - public static final short Managed = (short) 0x0000; - - /** - * Specifies that the method implementation is unmanaged, otherwise managed. - */ - public static final short Unmanaged = (short) 0x0004; - - - - /** - * Specifies that the method cannot be inlined. 8 - */ - public static final short NoInlining = (short) 0x0008; - - /** - * Specifies that the method is not defined. 16 - */ - public static final short ForwardRef = (short) 0x0010; - - /** - * Specifies that the method is single-threaded through the body. - * You can also use the C# lock statement or the Visual Basic - * Lock function for this purpose. 32 - */ - public static final short Synchronized = (short) 0x0020; - - /** - * Specifies that the method signature is exported exactly as declared. 128 - */ - public static final short PreserveSig = (short) 0x0080; - - /** - * Specifies an internal call. 4096 - */ - public static final short InternalCall = (short) 0x1000; - - /** - * Specifies a range check value. 65535 - */ - public static final short MaxMethodImplVal = (short) 0xffff; - - //########################################################################## - - public static String toString(int implAttr) { - StringBuffer s = new StringBuffer(); - switch (implAttr & CodeTypeMask) { - case IL: s.append("cil"); break; - case Native: s.append("native"); break; - case Runtime: s.append("runtime"); break; - } - switch (implAttr & ManagedMask) { - case Managed: s.append(" managed"); break; - case Unmanaged: s.append(" unmanaged"); break; - } - if ((implAttr & NoInlining) != 0) s.append(" noinlining"); - if ((implAttr & ForwardRef) != 0) s.append(" forwardref"); - if ((implAttr & Synchronized) != 0) s.append(" synchronized"); - if ((implAttr & InternalCall) != 0) s.append(" internalcall"); - return s.toString(); - } - - //########################################################################## - -} // class MethodImplAttributes diff --git a/src/msil/ch/epfl/lamp/compiler/msil/MethodInfo.java b/src/msil/ch/epfl/lamp/compiler/msil/MethodInfo.java deleted file mode 100644 index a415e7551f..0000000000 --- a/src/msil/ch/epfl/lamp/compiler/msil/MethodInfo.java +++ /dev/null @@ -1,69 +0,0 @@ -/* - * System.Reflection-like API for access to .NET assemblies (DLL & EXE) - */ - - -package ch.epfl.lamp.compiler.msil; - -import java.util.Iterator; - -/** - * Discovers the attributes of a method and provides access to method metadata. - * - * @author Nikolay Mihaylov - * @version 1.0 - */ -public class MethodInfo extends MethodBase { - - public boolean HasPtrParamOrRetType() { - if(ReturnType.IsByRef() && !(ReturnType.GetElementType().IsValueType())) { - /* A method returning ByRef won't pass peverify, so I guess this is dead code. */ - return true; - } - if(ReturnType.IsPointer()) { - return true; - } - return super.HasPtrParamOrRetType(); - } - - //########################################################################## - // public members - - public final int MemberType() { return MemberTypes.Method; } - - public final boolean IsConstructor() { return false; } - - /** The return type of this method. - */ - public final Type ReturnType; - - //########################################################################## - // protected members - - protected static final MethodInfo[] EMPTY_ARRAY = new MethodInfo[0]; - - /** - * Constructor Initializes a new instance of the MethodInfo class. - */ - protected MethodInfo(String name, Type declType, - int attrs, Type returnType, Type[] paramTypes ) - { - super(name, declType, attrs, paramTypes); - ReturnType = returnType; - } - - protected MethodInfo(String name, Type declType, - int attrs, Type returnType, ParameterInfo[] params ) - { - super(name, declType, attrs, params); - ReturnType = returnType; - } - - public String toString() { - return MethodAttributes.toString(Attributes) + " " + ReturnType + - " " + DeclaringType + "::" + Name + params2String(); - } - - //########################################################################## - -} // class MethodInfo diff --git a/src/msil/ch/epfl/lamp/compiler/msil/Module.java b/src/msil/ch/epfl/lamp/compiler/msil/Module.java deleted file mode 100644 index 8dd5e7119f..0000000000 --- a/src/msil/ch/epfl/lamp/compiler/msil/Module.java +++ /dev/null @@ -1,155 +0,0 @@ -/* - * System.Reflection-like API for access to .NET assemblies (DLL & EXE) - */ - - -package ch.epfl.lamp.compiler.msil; - -import java.util.Map; -import java.util.HashMap; - -/** - * Defines and represents a module. Get an instance of ModuleBuilder - * by calling DefineDynamicModule - * A module is a portable executable file of type .dll or .exe consisting - * of one or more classes and interfaces. There may be multiple namespaces - * contained in a single module, and a namespace may span multiple modules. - * One or more modules deployed as a unit compose an assembly. - * - * @author Nikolay Mihaylov - * @version 1.0 - */ -public abstract class Module extends CustomAttributeProvider { - - //########################################################################## - // public fields - - /** String representing the name of the module with the path removed. */ - public final String Name; - - /** String representing the fully qualified name and path to this module. */ - public final String FullyQualifiedName; - - /** String representing the name of the module. */ - public String ScopeName; - - /** The Assembly the Module belongs to. */ - public final Assembly Assembly; - - //########################################################################## - // constructor - - protected Module(String name, String filename, - String scopeName, Assembly assembly) - { - this.Name = name; - this.FullyQualifiedName = filename; - this.ScopeName = scopeName; - this.Assembly = assembly; - } - - //########################################################################## - // public methods - - /** Returns the specified class, performing a case-sensitive search. */ - public Type GetType(String name) { - initTypes(); - return (Type) typesMap.get(name); - } - - /** - * @return all the classes defined within this module. - */ - public Type[] GetTypes() { - initTypes(); - return (Type[]) types.clone(); - } - - /** - * @return the global field with the specified name. - */ - public FieldInfo GetField(String name) { - for (int i = 0; i < fields.length; i++) - if (fields[i].Name.equals(name)) - return fields[i]; - return null; - } - - /** - * @return an array of the global fields of the module - */ - public FieldInfo[] GetFields() { - return (FieldInfo[]) fields.clone(); - } - - /** - * @return - the global method with the specified name - */ - public MethodInfo GetMethod(String name) { - for (int i = 0; i < methods.length; i++) - if (methods[i].Name.equals(name)) - return methods[i]; - return null; - } - - /** - * @return - an array of all the global methods defined in this modules. - */ - public MethodInfo[] GetMethods() { - return (MethodInfo[]) methods.clone(); - } - - /** - */ - public String toString() { return Name; } - - //######################################################################## - // protected members - - // all the types defined in this module - protected final Map typesMap = new HashMap(); - - // all the types defined in this module - protected Type[] types; - - // the global fields of the module - protected FieldInfo[] fields = FieldInfo.EMPTY_ARRAY; - - // the global methods of the module - protected MethodInfo[] methods = MethodInfo.EMPTY_ARRAY; - - protected Type addType(Type type) { - addType(type.FullName, type); - Assembly.addType(type); - return type; - } - - protected Type addType(String name, Type type) { - assert type!= null; - typesMap.put(name, type); - return type; - } - - private boolean initTypes = true; - protected final void initTypes() { - if (initTypes) { - loadTypes(); - initTypes = false; - } - } - - protected void loadTypes() {} - - private boolean initGlobals = true; - protected final void initGlobals() { - if (initGlobals) { - loadGlobals(); - initGlobals = false; - } - } - - protected void loadGlobals() {} - - //########################################################################## - -} // class Module diff --git a/src/msil/ch/epfl/lamp/compiler/msil/PEAssembly.java b/src/msil/ch/epfl/lamp/compiler/msil/PEAssembly.java deleted file mode 100644 index a31db16c92..0000000000 --- a/src/msil/ch/epfl/lamp/compiler/msil/PEAssembly.java +++ /dev/null @@ -1,69 +0,0 @@ -/* - * System.Reflection-like API for access to .NET assemblies (DLL & EXE) - */ - - -package ch.epfl.lamp.compiler.msil; - -import ch.epfl.lamp.compiler.msil.util.Table; -import ch.epfl.lamp.compiler.msil.util.Table.*; - -import java.io.File; - -import java.util.Map; -import java.util.HashMap; - -/** Represents an assembly that resides in a real .NET assembly - * - * @author Nikolay Mihaylov - * @version 1.0 - */ -final class PEAssembly extends Assembly { - - private final PEFile pefile; - - private PEModule mainModule; - - public PEAssembly(PEFile pefile, AssemblyName an) { - super(an, true); - this.pefile = pefile; - String name = pefile.ModuleDef(1).getName(); - mainModule = new PEModule(pefile, 1, name, this); - addModule(name, mainModule); - //initModules(); - } - - protected void loadModules() { - File parentDir = pefile.getParentFile(); - FileDef fd = pefile.FileDef; - for (int row = 1; row <= fd.rows; row++) { - fd.readRow(row); - String filename = fd.getName(); - File f = new File(parentDir, filename); - PEFile pe = Assembly.getPEFile(f); - if (pe == null) { - f = new File(filename); - pe = Assembly.getPEFile(f); - if (pe == null) - continue; -// throw new RuntimeException("Cannot find file " + filename + -// " referenced by assembly " + this); - } - String name = pe.ModuleDef(1).getName(); - PEModule module = new PEModule(pe, 1, name, this); - addModule(name, module); - } - } - - public File getFile() { - return pefile.getUnderlyingFile(); - } - - protected void loadCustomAttributes(Type attributeType) { - initModules(); - mainModule.initAttributes(this, 1, Table.AssemblyDef.ID, attributeType); - } - - //########################################################################## - -} // class PEAssembly diff --git a/src/msil/ch/epfl/lamp/compiler/msil/PEFile.java b/src/msil/ch/epfl/lamp/compiler/msil/PEFile.java deleted file mode 100644 index 3eb22b9985..0000000000 --- a/src/msil/ch/epfl/lamp/compiler/msil/PEFile.java +++ /dev/null @@ -1,941 +0,0 @@ -/* - * System.Reflection-like API for access to .NET assemblies (DLL & EXE) - */ - - -package ch.epfl.lamp.compiler.msil; - -import ch.epfl.lamp.compiler.msil.util.*; -import ch.epfl.lamp.compiler.msil.util.Table.*; - -import ch.epfl.lamp.compiler.msil.Type; -import ch.epfl.lamp.compiler.msil.Module; - -import java.io.File; -import java.io.RandomAccessFile; -import java.io.PrintStream; -import java.io.IOException; -import java.io.FileNotFoundException; - -import java.nio.ByteBuffer; -import java.nio.channels.FileChannel; -import java.nio.MappedByteBuffer; - -import java.util.Date; - -/** - * A class that represents a .NET PE/COFF image. - * - * @author Nikolay Mihaylov - * @version 1.0 - * @see Standard ECMA-335: Common Language Infrastructure (CLI), 4th edition (June 2006) - */ -public class PEFile { - - //########################################################################## - - public static final int INT_SIZE = 4; - - protected final int PE_SIGNATURE_OFFSET; - protected final int COFF_HEADER_OFFSET; - protected final int PE_HEADER_OFFSET; - - protected final int numOfSections; - protected final int CLI_RVA; - protected final int CLI_Length; - public final int rvaMetadata; - public final int posMetadata; - protected final int numOfStreams; - protected final int optHeaderSize; - - protected final File underlyingFile; - protected final RandomAccessFile file; - protected final MappedByteBuffer buf; - - protected final PESection [] sections; - - public PEStream Meta, Strings, US, Blob, GUID; - - private final Table [] tables = new Table[Table.MAX_NUMBER]; - - public final boolean isDLL; - - protected final int heapSizes; - public final boolean StringIsShort, BlobIsShort, GUIDIsShort; - - protected PEModule pemodule = null; - - //########################################################################## - // PEFile constructor - - private static void fileFormatCheck(boolean cond, String s) { - if (cond) - throw new RuntimeException(s); - } - - /** - */ - public PEFile(String filename) throws FileNotFoundException { - this.underlyingFile = new File(filename); - this.file = new RandomAccessFile(underlyingFile, "r"); - FileChannel fc = file.getChannel(); - MappedByteBuffer bb = null; - try { - bb = fc.map(FileChannel.MapMode.READ_ONLY, 0L, fc.size()); - } catch (IOException e) { throw new RuntimeException(e); } - - /** Ecma 335, 25 File format extensions to PE: - * - * "Unless stated otherwise, all binary values are stored in little-endian format." - */ - - bb.order(java.nio.ByteOrder.LITTLE_ENDIAN); - this.buf = bb; - - /** Ecma 335, 25.2.1 MS-DOS header: - * - * "The PE format starts with an MS-DOS stub of exactly the following 128 bytes to - * be placed at the front of the module." - * - * We are only checking for MZ (Mark Zbikowski) - */ - - seek(0); - fileFormatCheck(readByte() != 0x4d, "Invalid PE file format: " + filename); // 'M' - fileFormatCheck(readByte() != 0x5a, "Invalid PE file format: " + filename); // 'Z' - - /** Ecma 335, 25.2.1 MS-DOS header: - * - * "At offset 0x3c in the DOS header is a 4-byte unsigned integer offset, lfanew, - * to the PE signature (shall be "PE\0\0"), immediately followed by the PE file header." - */ - - seek(0x3c); - PE_SIGNATURE_OFFSET = readInt(); - seek(PE_SIGNATURE_OFFSET); - // start of PE signature (a signature that is just 4 bytes long) - fileFormatCheck(readByte() != 0x50, "Invalid PE file format: " + filename); // 'P' - fileFormatCheck(readByte() != 0x45, "Invalid PE file format: " + filename); // 'E' - fileFormatCheck(readByte() != 0x00, "Invalid PE file format: " + filename); // 0 - fileFormatCheck(readByte() != 0x00, "Invalid PE file format: " + filename); // 0 - - //trace("PE signature offset = 0x" + Table.int2hex(PE_SIGNATURE_OFFSET)); - - COFF_HEADER_OFFSET = PE_SIGNATURE_OFFSET + 4; - PE_HEADER_OFFSET = COFF_HEADER_OFFSET + 20; - - seek(COFF_HEADER_OFFSET); - - /* start of PE file header, Sec. 25.2.2 in Partition II */ - skip(2); // Machine (always 0x14c) - numOfSections = readShort(); // Number of sections; indicates size of the Section Table - Date timeStamp = new Date(readInt() * 1000L); - skip(2 * INT_SIZE); // skip Pointer to Symbol Table (always 0) and Number of Symbols (always 0) - optHeaderSize = readShort(); - int characteristics = readShort(); - isDLL = (characteristics & 0x2000) != 0; - - seek(PE_HEADER_OFFSET + 208); // p.157, Partition II - - CLI_RVA = readInt(); // called "Data Directory Table" in Ch. 4 of Expert IL book - CLI_Length = readInt(); - //trace("CLI_RVA = 0x" + Table.int2hex(CLI_RVA)); - //trace("CLI_Length = 0x" + Table.int2hex(CLI_Length)); - - sections = new PESection[numOfSections]; - - seek(PE_HEADER_OFFSET + optHeaderSize); // go to the sections descriptors - - for (int i = 0; i < numOfSections; i++) { - seek(PE_HEADER_OFFSET + optHeaderSize + i * 40); - sections[i] = new PESection(this); - //sections[i].dump(System.out); - } - - seek(fromRVA(CLI_RVA)); - skip(8); - rvaMetadata = readInt(); - posMetadata = fromRVA(rvaMetadata); - //trace("rvaMetadata = 0x" + Table.int2hex(rvaMetadata)); - //trace("posMetadata = 0x" + Table.int2hex(posMetadata)); - - seek(posMetadata); - int magic = readInt(); - //trace("Magic metadata signature = 0x" + Table.int2hex(magic)); - fileFormatCheck(magic != 0x424a5342, "Invalid metadata signature!"); - skip(8); - - int strlength = readInt(); - //trace("version name string length = " + strlength); - skip(strlength); - align(INT_SIZE, posMetadata); - //trace("position of flags = 0x" + Table.int2hex((int)pos())); - skip(2); // ignore the flags - numOfStreams = readShort(); - //trace("Number of metadata streams = " + numOfStreams); - - for (int i = 0; i < numOfStreams; i++) { - PEStream strm = new PEStream(this); - //strm.dump(System.out); - if (strm.name.equals("#~") - || strm.name.equals("#-")) Meta = strm; - if (strm.name.equals("#Strings")) Strings = strm; - if (strm.name.equals("#US")) US = strm; - if (strm.name.equals("#Blob")) Blob = strm; - if (strm.name.equals("#GUID")) GUID = strm; - } - - seek(Meta.offset); - skip(6); - heapSizes = readByte(); - StringIsShort = (heapSizes & 0x01) == 0; - GUIDIsShort = (heapSizes & 0x02) == 0; - BlobIsShort = (heapSizes & 0x04) == 0; - - skip(1); - long tablesMask = readLong(); - long nonStandardTables = tablesMask & ~Table.VALID_TABLES_MASK; - skip(8); //go to the list of number of rows - for (int i = 0; i < tables.length; i++) { - tables[i] = Table.newTable - (this, i, ((tablesMask >> i) & 0x01) != 0 ? readInt() : 0); - } - - initIndexSize(); - initTableRefs(); - // populate the tables from the CLI image file - long start = pos(); - for (int i = 0; i < tables.length; i++) - start = tables[i].init(start); - - } // PEFile() - - - public final int[] indexSize = new int[Table.TABLE_SET_LENGTH]; - - private void initIndexSize() { - for (int i = 0; i < Table.TABLE_SET_LENGTH; i++) { - indexSize[i] = 2; - int[] tableSet = Table.TableSet[i]; - int treshold = (65536 >> Table.NoBits[i]); - for (int j = 0; j < tableSet.length; j++) { - if (tableSet[j] >= 0) { - Table t = tables[tableSet[j]]; - if (t.rows >= treshold) { - indexSize[i] = 4; - break; - } - } - } - } - } - - protected void initModule(PEModule module) { - if (pemodule != null) - throw new RuntimeException("File " + this - + " has already been assigned module " - + pemodule + "; new module is " + module); - this.pemodule = module; - } - - //########################################################################## - - public ModuleDef ModuleDef; - public ModuleDef ModuleDef(int i) { - ModuleDef.readRow(i); - return ModuleDef; - } - - public TypeRef TypeRef; - - public TypeDef TypeDef; - public TypeDef TypeDef(int i) { - TypeDef.readRow(i); - return TypeDef; - } - - public FieldTrans FieldTrans; - public FieldTrans FieldTrans(int i) { - FieldTrans.readRow(i); - return FieldTrans; - } - - public FieldDef FieldDef; - public FieldDef FieldDef(int i) { - FieldDef.readRow(i); - return FieldDef; - } - - public MethodTrans MethodTrans; - public MethodTrans MethodTrans(int i) { - MethodTrans.readRow(i); - return MethodTrans; - } - - public MethodDef MethodDef; - public MethodDef MethodDef(int i) { MethodDef.readRow(i); return MethodDef; } - - - public ParamDef ParamDef; - public ParamDef ParamDef(int i) { ParamDef.readRow(i); return ParamDef; } - - public GenericParam GenericParam; - - public GenericParam GenericParam(int i) { - GenericParam.readRow(i); - return GenericParam; - } - - public MethodSpec MethodSpec; - - public MethodSpec MethodSpec(int i) { - MethodSpec.readRow(i); - return MethodSpec; - } - - public GenericParamConstraint GenericParamConstraint; - - public GenericParamConstraint GenericParamConstraint(int i) { - GenericParamConstraint.readRow(i); - return GenericParamConstraint; - } - - public InterfaceImpl InterfaceImpl; - public MemberRef MemberRef; - public Constant Constant; - public CustomAttribute CustomAttribute; - public FieldMarshal FieldMarshal; - public DeclSecurity DeclSecurity; - public ClassLayout ClassLayout; - public FieldLayout FieldLayout; - public StandAloneSig StandAloneSig; - public EventMap EventMap; - public EventDef EventDef; - public PropertyMap PropertyMap; - public PropertyDef PropertyDef; - public MethodSemantics MethodSemantics; - public MethodImpl MethodImpl; - public ModuleRef ModuleRef; - public TypeSpec TypeSpec; - public ImplMap ImplMap; - public FieldRVA FieldRVA; - public AssemblyDef AssemblyDef; - public AssemblyRef AssemblyRef; - public FileDef FileDef; - public ExportedType ExportedType; - public ManifestResource ManifestResource; - public NestedClass NestedClass; - - - private void initTableRefs() { - ModuleDef = (ModuleDef) getTable(Table.ModuleDef.ID); - TypeRef = (TypeRef) getTable(Table.TypeRef.ID); - TypeDef = (TypeDef) getTable(Table.TypeDef.ID); - FieldTrans = (FieldTrans) getTable(Table.FieldTrans.ID); - FieldDef = (FieldDef) getTable(Table.FieldDef.ID); - MethodTrans = (MethodTrans) getTable(Table.MethodTrans.ID); - MethodDef = (MethodDef) getTable(Table.MethodDef.ID); - ParamDef = (ParamDef) getTable(Table.ParamDef.ID); - InterfaceImpl = (InterfaceImpl) getTable(Table.InterfaceImpl.ID); - MemberRef = (MemberRef) getTable(Table.MemberRef.ID); - Constant = (Constant) getTable(Table.Constant.ID); - CustomAttribute = (CustomAttribute) getTable(Table.CustomAttribute.ID); - FieldMarshal = (FieldMarshal) getTable(Table.FieldMarshal.ID); - DeclSecurity = (DeclSecurity) getTable(Table.DeclSecurity.ID); - ClassLayout = (ClassLayout) getTable(Table.ClassLayout.ID); - FieldLayout = (FieldLayout) getTable(Table.FieldLayout.ID); - StandAloneSig = (StandAloneSig) getTable(Table.StandAloneSig.ID); - EventMap = (EventMap) getTable(Table.EventMap.ID); - EventDef = (EventDef) getTable(Table.EventDef.ID); - PropertyMap = (PropertyMap) getTable(Table.PropertyMap.ID); - PropertyDef = (PropertyDef) getTable(Table.PropertyDef.ID); - MethodSemantics = (MethodSemantics) getTable(Table.MethodSemantics.ID); - MethodImpl = (MethodImpl) getTable(Table.MethodImpl.ID); - ModuleRef = (ModuleRef) getTable(Table.ModuleRef.ID); - TypeSpec = (TypeSpec) getTable(Table.TypeSpec.ID); - ImplMap = (ImplMap) getTable(Table.ImplMap.ID); - FieldRVA = (FieldRVA) getTable(Table.FieldRVA.ID); - AssemblyDef = (AssemblyDef) getTable(Table.AssemblyDef.ID); - AssemblyRef = (AssemblyRef) getTable(Table.AssemblyRef.ID); - FileDef = (FileDef) getTable(Table.FileDef.ID); - ExportedType = (ExportedType) getTable(Table.ExportedType.ID); - NestedClass = (NestedClass) getTable(Table.NestedClass.ID); - ManifestResource = - (ManifestResource) getTable(Table.ManifestResource.ID); - GenericParam = (GenericParam) getTable(Table.GenericParam.ID); - MethodSpec = (MethodSpec) getTable(Table.MethodSpec.ID); - GenericParamConstraint = (GenericParamConstraint) getTable(Table.GenericParamConstraint.ID); - } - - public static String long2hex(long a) { - StringBuffer str = new StringBuffer("0000000000000000"); - str.append(Long.toHexString(a)); - int l = str.length(); - return str.substring(l - 16, l); - } - - public static String int2hex(int a) { - StringBuffer str = new StringBuffer("00000000"); - str.append(Integer.toHexString(a)); - int l = str.length(); - return str.substring(l - 8, l); - } - - public static String short2hex(int a) { - StringBuffer str = new StringBuffer("0000"); - str.append(Integer.toHexString(a)); - int l = str.length(); - return str.substring(l - 4, l); - } - - public static String byte2hex(int a) { - StringBuffer str = new StringBuffer("00"); - str.append(Integer.toHexString(a)); - int l = str.length(); - return str.substring(l - 2, l); - } - - public static String bytes2hex(byte[] buf) { - StringBuffer str = new StringBuffer(); - for (int i = 0; i < buf.length; i++) { - str.append(byte2hex(buf[i])); - if (i < buf.length - 1) - str.append(" "); - } - return str.toString(); - } - - //########################################################################## - // filename - - public File getUnderlyingFile() { - return underlyingFile; - } - - /** - * @return the absolute path of the file - */ - public String getAbsolutePath() { - return underlyingFile.getAbsolutePath(); - } - - /** - * @return the name of this file - */ - public String getName() { - return underlyingFile.getName(); - } - - /** - * @return - */ - public String getParent() { - return underlyingFile.getParent(); - } - - /** - * @return the file representing the directory the file belongs to - */ - public File getParentFile() { - return underlyingFile.getParentFile(); - } - - public String toString() { - return getAbsolutePath(); - } - - //########################################################################## - // file pointer manipulation methods - - /** Returns the current position in the file. */ - public int pos() { - return buf.position(); - } - - /** Go to the specified position in the file. */ - public void seek(int pos) { - buf.position(pos); - } - - - /** Align the current position in the file. */ - public void align(int base) { align(base, 0); } - - /** Align the current position in a section starting at offset. */ - public void align(int base, int offset) { - int p = pos() - offset; - seek( offset + ((p % base) == 0 ? p : (p/base + 1) * base)); - } - - /** Computes the position in the file that corresponds to the given RVA. */ - public int fromRVA(int rva) { - int i; - for(i = 0; i < numOfSections; i++) - if(sections[i].virtAddr <= rva && - rva <= (sections[i].virtAddr + sections[i].virtSize)) - return rva - sections[i].virtAddr + sections[i].realAddr; - throw new RuntimeException("RVA 0x" + Integer.toHexString(rva) + - " is not within this file's sections!"); - } - - /** Go to the specified RVA (Relative Virtual Address). */ - public void gotoRVA(int rva) { - seek(fromRVA(rva)); - } - - /** Move the forward in the file by the specified number of bytes. */ - public void skip(int n) { - buf.position(buf.position() + n); - } - - /** - * Returns a memory mapped little-endian buffer - * for the specified region of the file. - */ - public MappedByteBuffer mapBuffer(long offset, int size) { - try { - MappedByteBuffer b = file.getChannel() - .map(FileChannel.MapMode.READ_ONLY, offset, size); - b.order(java.nio.ByteOrder.LITTLE_ENDIAN); - return b; - } catch (IOException e) { throw new RuntimeException(e); } - } - - /** Returns a buffer from the given offset to the end of the file. */ - public ByteBuffer getBuffer(long offset, int size) { - buf.mark(); - buf.position((int)offset); - ByteBuffer bb = buf.slice(); - buf.reset(); - bb.limit(size); - bb.order(java.nio.ByteOrder.LITTLE_ENDIAN); - return bb; - } - - //########################################################################## - // file read methods - - /** - * Read bs.length number of bytes - */ - public void read(byte[] bs) { - buf.get(bs); - } - - /** - * Read 1-byte integer from the current position in the file. - */ - public int readByte() { - return buf.get(); - } - - /** - * Read 2-byte integer from the current position in the file. - */ - public int readShort() { - return buf.getShort(); - } - - /** - * Read 4-byte integer from the current position in the file. - */ - public int readInt() { - return buf.getInt(); - } - - /** - * Read 8-byte integer from the current position in the file. - */ - public long readLong() { - return buf.getLong(); - } - - /** - * @return the size of string indeces for this file. - */ - public int getStringIndexSize() { - return StringIsShort ? 2 : 4; - } - - /** - * @return the size of GUID indeces for this file. - */ - public int getGUIDIndexSize() { - return GUIDIsShort ? 2 : 4; - } - - /** - * @return the size of Blob indeces for this file. - */ - public int getBlobIndexSize() { - return BlobIsShort ? 2 : 4; - } - - /** - * @return the size of the index to tableID for this file; - * @param tableID the ID of the table - */ - public int getTableIndexSize(int tableID) { - return tables[tableID].isShort ? 2 : 4; - } - - /** - * @return the size of the index to a set of tables with the given @param TableSetID - * @param tableSetID the ID of the table set - */ - public int getTableSetIndexSize(int tableSetID) { - return indexSize[tableSetID]; - } - - /** - * Read a String index from the current position in the file. - * @return an index into the String stream - */ - public int readStringIndex() { - return StringIsShort ? readShort() : readInt(); - } - - /** - * Read a GUID index from the current position in the file. - * @return an index in to the GUID stream - */ - public int readGUIDIndex() { - return GUIDIsShort ? readShort() : readInt(); - } - - /** - * Read a Blob index from the current position in the file. - * @return an index into the Blob stream - */ - public int readBlobIndex() { - return BlobIsShort ? readShort() : readInt(); - } - - /** Read an entry interpreted as index into table @param tableID. */ - public int readTableIndex(int tableId) { - return tables[tableId].isShort ? readShort() : readInt(); - } - - /***/ - public int readTableSetIndex(int tableSetId) { - return indexSize[tableSetId] == 2 ? readShort() : readInt(); - } - - /** - * Read a string from the String stream - * @return the string at the given position - * @param pos the position of the string in the String stream - */ - public String getString(int pos) { - String s = Strings.getString(pos); - return s;//.length() == 0 ? null : s; - } - - /** - * Read a string from the US (User Strings) stream - * @return the string at the given position - * @param pos the position of the string in the US stream - */ - public String getUString(int pos) { - return US.getString(pos); - } - - /** - * Read a blob from the Blob Stream - * @return the blob at the given position - * @param pos the position of the blob in the Blob stream - */ - public byte[] getBlob(int pos) { - return Blob.getBlob(pos); - } - - /***/ - public Sig getSignature(int pos) { - //return new Sig(getBlob(pos)); - return Blob.getSignature(pos); - } - - /***/ - public byte[] getGUID(int pos) { - return GUID.getGUID(pos); - } - - /** - * @return the table with the corresponding ID. - */ - public final Table getTable(int tableID) { - return tables[tableID]; - } - - //########################################################################## - - /***/ - void trace(String msg) { - System.out.println("[trace] " + msg); - } - - //########################################################################## - - public Sig newSignature(ByteBuffer buf) { - return new Sig(buf); - } - - /** - */ - public class Sig implements Signature { - - //###################################################################### - // instance members - - protected final ByteBuffer buf; - protected final int pos; - protected final int length; - - public Sig(ByteBuffer buf) { - this.buf = buf; - //int tmpPos = buf.position(); - length = decodeInt(); - this.pos = buf.position(); - } - - public String toString() { - StringBuffer b = new StringBuffer("("); - int savedPos = buf.position(); - reset(); - for (int i = 0; i < length; i++) { - b.append(byte2hex(readByte())); - if (i < length - 1) - b.append(" "); - } - buf.position(savedPos); - return b.append(")").toString(); - } - - public Sig reset() { buf.position(pos); return this; } - - public int pos() { return buf.position() - pos; } - - /** @return the byte at the current position in the signature Blob. - * Stay at the same position - */ - public int getByte() { - return (buf.get(buf.position()) + 0x100) & 0xff; - } - - /** @return the byte at the current position in the signature Blob. - * Move to the next byte. - */ - public int readByte() { return (buf.get() + 0x100) & 0xff; } - - /** Skip the current byte if equal to the given value. */ - public void skipByte(int b) { if (b == getByte()) buf.get(); } - - /** Decodes an integer from the signature Blob. - * @return the decoded integer - */ - public int decodeInt() { - int res = readByte(); - if ((res & 0x80) != 0) { - res = ((res & 0x7f) << 8) | readByte(); - if ((res & 0x4000) != 0) - res = ((res & 0x3fff)<<16) | (readByte()<<8) | readByte(); - } - return res; - } - - /** @return - the type encoded at the current position in the signature - * according to 23.2.12 - */ - public Type decodeType() { - try { return decodeType0(); } - catch (RuntimeException e) { - System.out.println("" + pos() + "@" + this); - throw e; - } - } - - public Type decodeType0() { - Type type = null; - int desc = readByte(); - switch (desc) { - case ELEMENT_TYPE_BOOLEAN:type = Type.GetType("System.Boolean"); break; - case ELEMENT_TYPE_CHAR: type = Type.GetType("System.Char"); break; - case ELEMENT_TYPE_I1: type = Type.GetType("System.SByte"); break; - case ELEMENT_TYPE_U1: type = Type.GetType("System.Byte"); break; - case ELEMENT_TYPE_I2: type = Type.GetType("System.Int16"); break; - case ELEMENT_TYPE_U2: type = Type.GetType("System.UInt16"); break; - case ELEMENT_TYPE_I4: type = Type.GetType("System.Int32"); break; - case ELEMENT_TYPE_U4: type = Type.GetType("System.UInt32"); break; - case ELEMENT_TYPE_I8: type = Type.GetType("System.Int64"); break; - case ELEMENT_TYPE_U8: type = Type.GetType("System.UInt64"); break; - case ELEMENT_TYPE_R4: type = Type.GetType("System.Single"); break; - case ELEMENT_TYPE_R8: type = Type.GetType("System.Double"); break; - case ELEMENT_TYPE_OBJECT: type = Type.GetType("System.Object"); break; - case ELEMENT_TYPE_STRING: type = Type.GetType("System.String"); break; - case ELEMENT_TYPE_I: type = Type.GetType("System.IntPtr"); break; - case ELEMENT_TYPE_U: type = Type.GetType("System.UIntPtr"); break; - case ELEMENT_TYPE_PTR: // Followed by token. - if (getByte() == ELEMENT_TYPE_VOID) { - readByte(); - type = Type.mkPtr(Type.GetType("System.Void")); - } else type = Type.mkPtr(decodeType()); - break; - case ELEMENT_TYPE_BYREF: /* although BYREF is not listed in 23.2.12. as possible alternative, this method is also called when parsing the signatures of a method param and a method return, which do allow for BYREF */ - type = Type.mkByRef(decodeType()); - break; - case ELEMENT_TYPE_VALUETYPE: // Followed by TypeDefOrRefEncoded - assert true; - case ELEMENT_TYPE_CLASS: - // Followed by token - type = pemodule.getTypeDefOrRef(decodeInt()); - if (type == null) throw new RuntimeException(); - break; - - case ELEMENT_TYPE_SZARRAY: // Single-dim array with 0 lower bound. - skipCustomMods(); - type = Type.mkArray(decodeType(), 1); - break; - case ELEMENT_TYPE_ARRAY: - // ... ... - // ArrayShape defined in 23.2.13 ArrayShape - Type elem = decodeType(); - int rank = decodeInt(); - int numSizes = decodeInt(); - for (int i = 0; i < numSizes; i++) - decodeInt(); // TODO don't ignore - int numLoBounds = decodeInt(); - for (int i = 0; i < numLoBounds; i++) - decodeInt(); // TODO don't ignore - type = Type.mkArray(elem, rank); - break; - - // a grammar production from 23.2.12 Type - // GENERICINST (CLASS | VALUETYPE) TypeDefOrRefEncoded GenArgCount Type* - case ELEMENT_TYPE_GENERICINST: - int b = readByte(); - /*- TODO don't ignore b as done above. Should .NET valuetypes be represented as Scala case classes? */ - Type instantiatedType = pemodule.getTypeDefOrRef(decodeInt()); - int numberOfTypeArgs = decodeInt(); - Type[] typeArgs = new Type[numberOfTypeArgs]; - for (int iarg = 0; iarg < numberOfTypeArgs; iarg++) { - typeArgs[iarg] = decodeType(); - } - type = new ConstructedType(instantiatedType, typeArgs); - break; - - // another grammar production from 23.2.12 Type - // ELEMENT_TYPE_VAR number The number non-terminal following MVAR - // or VAR is an unsigned integer value (compressed). - /* See also duplicate code in PEModule.java */ - case ELEMENT_TYPE_VAR: - int typeArgAsZeroBased = decodeInt(); - type = new Type.TMVarUsage(typeArgAsZeroBased, true); - break; - - // another grammar production from 23.2.12 Type - // ELEMENT_TYPE_MVAR number The number non-terminal following MVAR - // or VAR is an unsigned integer value (compressed). - /* See also duplicate code in PEModule.java */ - case ELEMENT_TYPE_MVAR: - typeArgAsZeroBased = decodeInt(); - type = new Type.TMVarUsage(typeArgAsZeroBased, false); - break; - - case ELEMENT_TYPE_FNPTR: - // Followed MethodDefSig or by MethodRefSig. - case ELEMENT_TYPE_END: - // Marks end of a list - case ELEMENT_TYPE_CMOD_REQD: - // Required modifier : followed by a TypeDef or TypeRef token. - case ELEMENT_TYPE_CMOD_OPT: - // Optional modifier : followed by a TypeDef or TypeRef token. - case ELEMENT_TYPE_INTERNAL: - // Implemented within the CLI. - case ELEMENT_TYPE_MODIFIER: - // Or'd with following element types. - case ELEMENT_TYPE_SENTINEL: - // Sentinel for varargs method signature. - case ELEMENT_TYPE_PINNED: - // Denotes a local variable that points at a pinned object. - default: - throw new RuntimeException(byte2hex(desc) + - "@" + pos() + " in " + this); - - } - if (type == null) throw new RuntimeException(); - return type; - } // decodeType0() - - public PECustomMod decodeFieldType() { - skipByte(FIELD); // 0x06 - CustomModifier[] cmods = getCustomMods(); - Type fieldType = decodeType(); - return new PECustomMod(fieldType, cmods); - } - - /** decodes the return type of a method signature (22.2.11). */ - public Type decodeRetType() { - skipCustomMods(); - switch (getByte()) { - case ELEMENT_TYPE_VOID: - readByte(); - return Type.GetType("System.Void"); - case ELEMENT_TYPE_TYPEDBYREF: - return Type.GetType("System.TypedReference"); - case ELEMENT_TYPE_BYREF: - return decodeType(); - default: - return decodeType(); - } - } - - public Type decodeParamType() { - skipCustomMods(); - switch (getByte()) { - case ELEMENT_TYPE_BYREF: - return decodeType(); - case ELEMENT_TYPE_TYPEDBYREF: - return Type.GetType("System.TypedReference"); - default: - return decodeType(); - } - } - - public void skipCustomMods() { - while (getByte() == ELEMENT_TYPE_CMOD_OPT /* 0x20 */ - || getByte() == ELEMENT_TYPE_CMOD_REQD /* 0x1f */ ) - { - boolean isREQD = (getByte() == ELEMENT_TYPE_CMOD_REQD); // 0x1f - // skip the tag 23.2.7 - readByte(); - // skip the TypeDefOrRefEncoded (23.2.8) - Type ignored = pemodule.getTypeDefOrRef(decodeInt()); - if(isREQD) { - // System.err.println("ELEMENT_TYPE_CMOD_REQD: " + ignored); - // throw new RuntimeException("Reqired CMOD: " + ignored); - } - } - } - - /** - * @see CustomModifier - */ - public CustomModifier[] getCustomMods() { - java.util.List/**/ cmods = new java.util.LinkedList(); - while (getByte() == ELEMENT_TYPE_CMOD_OPT || getByte() == ELEMENT_TYPE_CMOD_REQD) { - boolean isReqd = (getByte() == ELEMENT_TYPE_CMOD_REQD); - readByte(); // tag 23.2.7 - Type t = pemodule.getTypeDefOrRef(decodeInt()); // TypeDefOrRefEncoded (23.2.8) - cmods.add(new CustomModifier(isReqd, t)); - } - CustomModifier[] res = (CustomModifier[])cmods.toArray(new CustomModifier[0]); - return res; - } - - //###################################################################### - - } // class Sig - - //########################################################################## - -} // class PEFile diff --git a/src/msil/ch/epfl/lamp/compiler/msil/PEModule.java b/src/msil/ch/epfl/lamp/compiler/msil/PEModule.java deleted file mode 100644 index cb8cd8f098..0000000000 --- a/src/msil/ch/epfl/lamp/compiler/msil/PEModule.java +++ /dev/null @@ -1,456 +0,0 @@ -/* - * System.Reflection-like API for access to .NET assemblies (DLL & EXE) - */ - - -package ch.epfl.lamp.compiler.msil; - -import ch.epfl.lamp.compiler.msil.PEFile; -import ch.epfl.lamp.compiler.msil.PEFile.Sig; -import ch.epfl.lamp.compiler.msil.util.Signature; -import ch.epfl.lamp.compiler.msil.util.Table; -import ch.epfl.lamp.compiler.msil.util.Table.*; - -import java.nio.ByteBuffer; - -/** Represents a module corresponding to a PE/COFF file - * - * @author Nikolay Mihaylov - * @version 1.0 - */ -final class PEModule extends Module { - - //########################################################################## - - protected final PEFile pefile; - - private final int definingRow; - - private Type[] typeRefs = null; - - protected PEModule(PEFile pefile, int definingRow, String scopeName, - Assembly assem) - { - super(pefile.getName(), pefile.getAbsolutePath(), scopeName, assem); - this.pefile = pefile; - this.definingRow = definingRow; - pefile.initModule(this); - pefile.TypeDef.load(); // load into memory - //loadTypes(); - //pefile.FieldDef.load(); - //pefile.MethodDef.load(); - loadGlobals(); - } - - //########################################################################## - - public Type GetType(String typeName) { - initTypes(); - Object o = typesMap.get(typeName); - if (o == null) { - //System.out.println("PEModule.GetType(): Unable to find type " - // + typeName + " int module " + this); - return null; - } - return o instanceof Type ? (Type)o - : getTypeDef(((Integer)o).intValue()); - } - - - /** Load information about the types defined in this module. - */ - protected void loadTypes() { - typeRefs = new Type[pefile.TypeRef.rows]; - final int nbTypes = pefile.TypeDef.rows; - for (int row = 2; row <= nbTypes; row++) { - String name = pefile.TypeDef(row).getFullName(); - typesMap.put(name, new Integer(row)); - } - this.types = new Type[nbTypes - 1]; - for (int row = 2; row <= nbTypes; row++) { - getTypeDef(row); - } - } - - /** Return the type defined at the given row in the TypeDef table. - */ - Type getTypeDef(int row) { - if (this.types[row - 2] != null) - return this.types[row - 2]; - - TypeDef type = pefile.TypeDef(row); - int attrs = type.Flags; - String name = type.getFullName(); - - Type declType = null; - if (TypeAttributes.isNested(attrs)) { - for (int i = 1; i <= pefile.NestedClass.rows; i++) { - pefile.NestedClass.readRow(i); - if (pefile.NestedClass.NestedClass == row) - declType = getTypeDef - (pefile.NestedClass.EnclosingClass); - } - } - Type t = new PEType - (this, attrs, name, declType, Type.AuxAttr.None, pefile, row); - types[row - 2] = t; - addType(t); - int[] tvarIdxes = pefile.GenericParam.getTVarIdxes(row); - // if(tvarIdxes.length > 0) { System.out.println("Type: " + t); } - for(int i = 0; i < tvarIdxes.length; i++) { - GenericParamAndConstraints tvarAndConstraints = getTypeConstraints(tvarIdxes[i]); - // add tvarAndConstraints as i-th TVar in t - t.addTVar(tvarAndConstraints); - } - return t; - } - - public GenericParamAndConstraints getTypeConstraints(int genParamIdx) { - int tvarNumber = pefile.GenericParam(genParamIdx).Number; - // tvarName can be null - String tvarName = pefile.GenericParam.getName(); - boolean isInvariant = pefile.GenericParam.isInvariant(); - boolean isCovariant = pefile.GenericParam.isCovariant(); - boolean isContravariant = pefile.GenericParam.isContravariant(); - boolean isReferenceType = pefile.GenericParam.isReferenceType(); - boolean isValueType = pefile.GenericParam.isValueType(); - boolean hasDefaultConstructor = pefile.GenericParam.hasDefaultConstructor(); - // grab constraints - int[] TypeDefOrRefIdxes = pefile.GenericParamConstraint.getTypeDefOrRefIdxes(genParamIdx); - Type[] tCtrs = new Type[TypeDefOrRefIdxes.length]; - for(int i = 0; i < TypeDefOrRefIdxes.length; i++) { - Type tConstraint = getTypeDefOrRef(TypeDefOrRefIdxes[i]); - tCtrs[i] = tConstraint; - // System.out.println("\t\tConstraint: " + tConstraint); - } - GenericParamAndConstraints res = new GenericParamAndConstraints(tvarNumber, tvarName, tCtrs, - isInvariant, isCovariant, isContravariant, - isReferenceType, isValueType, hasDefaultConstructor); - return res; - } - - /** - * Load the desription of the module-global fields and methods - */ - protected void loadGlobals() { - //TODO: - } - - protected void loadCustomAttributes(Type attributeType) { - initAttributes(this, 1, Table.ModuleDef.ID, attributeType); - } - - /** Return the type referenced by the given row in the TypeRef table. - */ - Type getTypeRef(int row) { - return getTypeRef(row, null); - } - - /** Return the type referenced by the given row in the TypeRef table - * only if it resides in the given assembly. - * Used by initCustomAttributes to avoid unnecessary loading - * of referenced assemblies. - */ - Type getTypeRef(int row, Assembly inAssembly) { - Type type = typeRefs[row - 1]; - if (type != null) - return type; - - Table.TypeRef tr = pefile.TypeRef; - tr.readRow(row); - int tableId = Table.getTableId(Table._ResolutionScope, - tr.ResolutionScope); - int refRow = tr.ResolutionScope >> Table.NoBits[Table._ResolutionScope]; - final String typeName = tr.getFullName(); - pefile.getTable(tableId).readRow(refRow); - switch (tableId) { - case AssemblyRef.ID: - String name = pefile.AssemblyRef.getName(); - if (inAssembly != null && !inAssembly.GetName().Name.equals(name)) - return null; - Assembly assem = getAssembly(name); - type = assem.GetType(typeName); - if (type == null) { - // HACK: the IKVM.OpenJDK.Core assembly is compiled against mscorlib.dll v2.0 - // The MSIL library cannot parse the v2.0 mscorlib because of generics, so we - // use the v1.0 - // However, the java.io.FileDescriptor.FlushFileBuffers method uses a type - // Microsoft.Win32.SafeHandles.SafeFileHandle, which only exists in mscorlib - // v2.0 - // For now, jsut return Object (fine as long as we don't use that method). - Assembly asmb = getAssembly("mscorlib"); - type = asmb.GetType("System.Object"); - //throw new RuntimeException("Failed to locate type " + - //typeName + " in assembly " + assem); - } - break; - case ModuleDef.ID: - assert refRow == 1; - type = this.GetType(typeName); - //assert type != null; - break; - case TypeRef.ID: - Type nestingType = getTypeRef(refRow); - String nestedName = typeName; - type = nestingType.GetNestedType(nestedName); - break; - case ModuleRef.ID: - type = getAssembly(pefile.ModuleRef.getName()).GetType(typeName); - default: - throw new RuntimeException(refRow + "@" + pefile.getTable(tableId).getTableName()/* PEFile.byte2hex(tableId)*/); - } - if (typeRefs[row - 1] != null) - System.out.println("TypeRef[" + PEFile.short2hex(row) + "] " + - "changing type " + typeRefs[row - 1] + - " for type " + type); - typeRefs[row - 1] = type; - assert type != null : "Couldn't find type " + typeName; - return type; - } - - private Assembly getAssembly(String name) { - Assembly assem = Assembly.getAssembly(name); - if (assem != null) - return assem; - java.io.File dir = pefile.getParentFile(); - assem = Assembly.LoadFrom(dir, name); - if (assem != null) - return assem; - try { - dir = pefile.getUnderlyingFile().getCanonicalFile().getParentFile(); - } catch (java.io.IOException e) { - throw new RuntimeException(e); - } - assem = Assembly.LoadFrom(dir, name); - if (assem != null) - return assem; - throw new RuntimeException("Cannot find assembly: " + name); - - } - - /** Return the type corresponding to TypeDefOrRef coded index. - * @param index - TypeDefOrRef coded index according to 23.2.6. - */ - public Type getTypeDefOrRef(int index) { - int tableId = Table.getTableId(Table._TypeDefOrRef, index); - int row = index >> Table.NoBits[Table._TypeDefOrRef]; - Type type = null; - switch (tableId) { - case Table.TypeDef.ID: - type = getTypeDef(row); - break; - case Table.TypeRef.ID: - return getTypeRef(row); - case Table.TypeSpec.ID: - Table.TypeSpec ts = pefile.TypeSpec; - ts.readRow(row); - int posInBlobStream = ts.Signature; - byte[] blobArrWithLengthStripped = pefile.Blob.getBlob(posInBlobStream); - byte[] compressedUInt = compressUInt(blobArrWithLengthStripped.length); - byte[] byteArr = new byte[blobArrWithLengthStripped.length + compressedUInt.length]; - System.arraycopy(compressedUInt, 0, byteArr, 0, compressedUInt.length); - System.arraycopy(blobArrWithLengthStripped, 0, byteArr, compressedUInt.length, blobArrWithLengthStripped.length); - ByteBuffer buf = ByteBuffer.wrap(byteArr); - Sig sig = pefile.new Sig(buf); - int desc = sig.readByte(); - - switch (desc) { - - // GENERICINST (CLASS | VALUETYPE) TypeDefOrRefEncodred GenArgCount Type* - case Signature.ELEMENT_TYPE_GENERICINST: // i.e. 0x15 - int b = sig.readByte(); // i.e. (0x12 | 0x11) - /* TODO don't ignore b as done above */ - Type instantiatedType = getTypeDefOrRef(sig.decodeInt()); // TypeDefOrRefEncoded - int numberOfTypeArgs = sig.decodeInt(); // GenArgCount - Type[] typeArgs = new Type[numberOfTypeArgs]; - for (int iarg = 0; iarg < numberOfTypeArgs; iarg++) { - typeArgs[iarg] = sig.decodeType(); // Type* - } - type = new ConstructedType(instantiatedType, typeArgs); - break; - - /* Miguel says: Actually the following grammar rule production is not among those for a TypeSpecBlob - but I've found it in assemblies compiled from C# 3.0. - See also duplicate code in PEFile.java */ - case Signature.ELEMENT_TYPE_VAR: - int typeArgAsZeroBased = sig.decodeInt(); - type = new Type.TMVarUsage(typeArgAsZeroBased, true); - break; - - /* Miguel says: Actually the following grammar rule production is not among those for a TypeSpecBlob - but I've found it in assemblies compiled from C# 3.0. - See also duplicate code in PEFile.java */ - case Signature.ELEMENT_TYPE_MVAR: - typeArgAsZeroBased = sig.decodeInt(); - type = new Type.TMVarUsage(typeArgAsZeroBased, false); - break; - - case Signature.ELEMENT_TYPE_SZARRAY: // Single-dim array with 0 lower bound. - sig.skipCustomMods(); - type = Type.mkArray(sig.decodeType(), 1); - break; - - case Signature.ELEMENT_TYPE_ARRAY: - // ... ... - // ArrayShape defined in 23.2.13 ArrayShape - Type elem = sig.decodeType(); - int rank = sig.decodeInt(); - int numSizes = sig.decodeInt(); - for (int i = 0; i < numSizes; i++) - sig.decodeInt(); // TODO don't ignore - int numLoBounds = sig.decodeInt(); - for (int i = 0; i < numLoBounds; i++) - sig.decodeInt(); // TODO don't ignore - type = Type.mkArray(elem, rank); - break; - - default: - // TODO remaining grammar productions in 23.2.14 are for PTR and FNPTR only - throw new RuntimeException("PEModule.getTypeDefOrRef(): TypeSpec"); - } - break; - default: - throw new RuntimeException("PEModule.getTypeDefOrRef(): oops!"); - } - return type; - } - - private byte[] compressUInt(int u) { - // 23.2 in Partition II - // TODO add tests based on the examples in 23.2 in Partition II - // the CCI implementation is WriteCompressedUInt - - /* informal discussion at http://www.cnblogs.com/AndersLiu/archive/2010/02/09/en-compressed-integer-in-metadata.html */ - if (u <= 127 && 0 <= u) { - return new byte[]{(byte) u}; - } else if (u > 127 && u <= (2 ^ 14 - 1)) { - byte loByte = (byte)(u & 0xff); - byte hiByte = (byte)((u >> 8) | 0x80); - byte[] res = new byte[] { hiByte, loByte }; - return res; - } else { - byte b0 = (byte)(u & 0xff); - byte b1 = (byte)((u & 0xff00)>>8); - byte b2 = (byte)((u & 0xff0000)>>16); - byte b3 = (byte)((u >> 24)|0xc0); - byte[] res = new byte[] { b3, b2, b1, b0 }; - return res; - } - } - - /** - * Returns the method defined at the given row of the MethodDef table - * by looking up the type that defines the method. - */ - MethodBase getMethod(int row) { - for (int i = 0; i < types.length; i++) { - PEType type = (PEType)types[i]; - if ((type.methodListBeg <= row) && (row < type.methodListEnd)) { - type.initMethods(); - return type.methoddefs[row - type.methodListBeg]; - } - } - throw new RuntimeException("In module " + this - + ": cannot find type defining method 0x" - + PEFile.int2hex(row)); - } - - /** Returns the member referenced by the given row of the MemberRef table. - */ - protected MemberInfo getMemberRef(int row) { - return getMemberRef(row, null); - } - - /** Returns the member referenced by the given row of the MemberRef table - * if defined in the given assembly. - * Used by initCustomAttributes to avoid unnecessary loading of - * referenced assemblies - */ - protected MemberInfo getMemberRef(int row, Assembly inAssembly) { - MemberInfo member = null; - MemberRef mref = pefile.MemberRef; - mref.readRow(row); - int mtbl = Table.getTableId(Table._MemberRefParent, mref.Class); - int mind = Table.getTableIndex(Table._MemberRefParent, mref.Class); - switch (mtbl) { - case TypeRef.ID: - Type type = getTypeRef(mind, inAssembly); - if (type == null) - return null; - Sig sig = mref.getSignature(); - int callconv = sig.readByte(); // should be 0x20 - int paramCount = sig.decodeInt(); - //sig.skipByte(Signature.ELEMENT_TYPE_BYREF); //from MethodDef - Type retType = sig.decodeRetType(); - Type[] paramType = new Type[paramCount]; - for (int i = 0; i < paramCount; i++) - paramType[i] = sig.decodeParamType(); - - String memberName = mref.getName(); - if (memberName.equals(ConstructorInfo.CTOR) || - memberName.equals(ConstructorInfo.CCTOR)) - { - member = type.GetConstructor(paramType); - } else { - member = type.GetMethod(memberName, paramType); - } - assert member != null : type + "::" + memberName; - break; - case ModuleRef.ID: - case MethodDef.ID: - case TypeSpec.ID: - throw new RuntimeException("initCustomAttributes: " - + pefile.getTable(mtbl).getTableName()); - } - return member; - } - - protected void initCustomAttributes(Type attributeType) { - initAttributes(this, definingRow, Table.ModuleDef.ID, attributeType); - } - - // explicitly only package-visible - void initAttributes(CustomAttributeProvider cap, int definingRow, - int sourceTableId, Type attributeType) - { - int parentIndex = Table.encodeIndex(definingRow, - Table._HasCustomAttribute, - sourceTableId); - Table.CustomAttribute attrs = pefile.CustomAttribute; - for (int row = 1; row <= attrs.rows; row++) { - ConstructorInfo attrConstr = null; - attrs.readRow(row); - if (attrs.Parent == parentIndex) { - int tableId = Table.getTableId(Table._CustomAttributeType, - attrs.Type); - int ind = Table.getTableIndex(Table._CustomAttributeType, - attrs.Type); - switch (tableId) { - case MethodDef.ID: - attrConstr = (ConstructorInfo)this.getMethod(ind); - break; - case MemberRef.ID: - //System.out.println(PEFile.short2hex(ind) + "@MemberRef"); - Assembly attrAssem = - attributeType == null ? null : attributeType.Assembly(); - MemberInfo mi = this.getMemberRef(ind, attrAssem); - if (mi != null) { - assert mi instanceof ConstructorInfo - : "Expected ConstructorInfo; found " + mi; - attrConstr = (ConstructorInfo)mi; - } - break; - default: - throw new RuntimeException(); - } - if (attrConstr != null - && (attrConstr.DeclaringType == attributeType - || attributeType == null)) - cap.addCustomAttribute(attrConstr, attrs.getValue()); - } - } - } - - //########################################################################## - -} // class PEModule diff --git a/src/msil/ch/epfl/lamp/compiler/msil/PEType.java b/src/msil/ch/epfl/lamp/compiler/msil/PEType.java deleted file mode 100644 index 418c6603b3..0000000000 --- a/src/msil/ch/epfl/lamp/compiler/msil/PEType.java +++ /dev/null @@ -1,419 +0,0 @@ -/* - * System.Reflection-like API for access to .NET assemblies (DLL & EXE) - */ - - -package ch.epfl.lamp.compiler.msil; - -import ch.epfl.lamp.compiler.msil.PEFile.Sig; - -import ch.epfl.lamp.compiler.msil.util.Table; -import ch.epfl.lamp.compiler.msil.util.Table.*; -import ch.epfl.lamp.compiler.msil.util.Signature; -import ch.epfl.lamp.compiler.msil.util.PECustomMod; - -import java.util.ArrayList; - -/** - * Represents a type from a .NET assembly - * - * @author Nikolay Mihaylov - * @version 1.0 - */ -final class PEType extends Type implements Signature { - - //########################################################################## - - /** The PEFile that holds the description of the type. */ - final PEFile file; - - /** The number of the row in the TypeDef table defining the type. */ - final int definingRow; - - /** The row of the first method in the MethodDef table. */ - final int methodListBeg; - - /** The row of the last method in the MethodDef table + 1. */ - final int methodListEnd; - - /** @param definingRow - the index in the TypeDef table where - * the type description is. - */ - PEType(PEModule module, - int attributes, - String fullName, - Type declType, - int auxAttr, - PEFile file, - int definingRow) - { - super(module, attributes, fullName, null, null, declType, auxAttr); - this.file = file; - this.definingRow = definingRow; - methodListBeg = file.TypeDef(definingRow).MethodList; - methodListEnd = definingRow < file.TypeDef.rows - ? file.TypeDef(definingRow + 1).MethodList - : file.MethodDef.rows + 1; - } - - //########################################################################## - // lazy type construction methods - - protected void loadBaseType() { - TypeDef type = file.TypeDef(definingRow); - baseType = type.Extends == 0 ? null - : ((PEModule)Module).getTypeDefOrRef(type.Extends); - } - - protected void loadFields() { - // the list of the declared fields starts from the - // FieldList index in the TypeDef table up to the smaller of the: - // - the last row of the FieldDef table - // - the start of the next list of fields determined by the - // FieldList index of the next row in the TypeDef table - final ArrayList fields = new ArrayList(); - int fieldListBeg = file.TypeDef(definingRow).FieldList; - int fieldListEnd = file.FieldDef.rows + 1; - if (definingRow < file.TypeDef.rows) - fieldListEnd = file.TypeDef(definingRow + 1).FieldList; - - for (int row = fieldListBeg; row < fieldListEnd; row++) { - int frow = file.FieldTrans.rows == 0 - ? row : file.FieldTrans(row).Field; - int attrs = file.FieldDef(frow).Flags; - String name = file.FieldDef.getName(); - //System.out.println("\t-->Loading field: " + name); - Sig sig = file.FieldDef.getSignature(); - PECustomMod pecmod = sig.decodeFieldType(); - Object val = null; - Table.Constant consts = file.Constant; - for (int i = 1; i <= consts.rows; i++) { - consts.readRow(i); - int tableId = Table.getTableId(Table._HasConstant,consts.Parent); - int refRow = consts.Parent >> Table.NoBits[Table._HasConstant]; - if (tableId == Table.FieldDef.ID && refRow == frow) - val = consts.getValue(); - } - FieldInfo field = new PEFieldInfo(row, name, attrs, pecmod, val); - if (field.Name.equals("value__") && field.IsSpecialName()) { - assert underlyingType == null : underlyingType.toString(); - underlyingType = field.FieldType; - } - fields.add(field); - } - this.fields = (FieldInfo[]) - fields.toArray(FieldInfo.EMPTY_ARRAY); - fields.clear(); - } - - protected MethodBase[] methoddefs; - - protected MethodInfo getMethod(int n) { - return (MethodInfo)methoddefs[n - methodListBeg]; - } - - protected void loadMethods() { - methoddefs = new MethodBase[methodListEnd - methodListBeg]; - - final ArrayList methods = new ArrayList(); - final ArrayList constrs = new ArrayList(); - PEModule pemodule = (PEModule) Module; - for (int row = methodListBeg; row < methodListEnd; row++) { - int mrow = file.MethodTrans.rows == 0 - ? row : file.MethodTrans(row).Method; - int attrs = file.MethodDef(mrow).Flags; - String name = file.MethodDef.getName(); - Sig sig = file.MethodDef.getSignature(); - /* we're about to parse a MethodDefSig, defined in Sec. 23.2.1 of Partition II () */ - - int callConv = sig.readByte(); - // TODO decode HASTHIS from high byte of calling convention - // TODO decode EXPLICITTHIS from high byte of calling convention - // TODO handle VARARG calling convention (not CLS but may show up ) - if((callConv & 0x1F) == Signature.GENERIC) { - int genParamCount = sig.decodeInt(); - /* genParamCount is ignored because the method's type params will be obtained below - (see: file.GenericParam.getMVarIdxes(row) ) */ - } - int paramCount = sig.decodeInt(); - Type retType = sig.decodeRetType(); - Type[] paramType = new Type[paramCount]; - for (int i = 0; i < paramCount; i++) - paramType[i] = sig.decodeParamType(); - - ParameterInfo[] params = new ParameterInfo[paramCount]; - int paramListBeg = file.MethodDef.ParamList; - int paramListEnd = paramListBeg + paramCount; - if (paramListEnd > file.ParamDef.rows) { - /* don't try to read param names past ParamDef's row count - Some assembly-writers don't bother to give names for all params. */ - paramListEnd = file.ParamDef.rows + 1; - } - for (int i = paramListBeg; i < paramListEnd; i++) { - int pattr = file.ParamDef(i).Flags; - String paramName = file.ParamDef.getName(); - int seq = file.ParamDef.Sequence; - if (seq == 0) { - //System.out.println("Retval attributes 0x" + - // PEFile.short2hex(pattr)); - } else { - params[seq - 1] = new ParameterInfo(paramName, paramType[seq - 1], pattr, seq - 1); - } - } - for (int i = 0; i < params.length; i++) { - if (params[i] == null) - params[i] = new ParameterInfo(null, paramType[i], 0, 0); - } - MethodBase method = null; - if ((attrs & MethodAttributes.SpecialName) != 0 - && (attrs & MethodAttributes.RTSpecialName) != 0 - && (name.equals(ConstructorInfo.CTOR) - || name.equals(ConstructorInfo.CCTOR))) - { - method = new PEConstructorInfo(row, attrs, params); - } - else { - method = new PEMethodInfo(row, name, attrs, retType, params); - int[] mvarIdxes = file.GenericParam.getMVarIdxes(row); - // if(mvarIdxes.length > 0) { System.out.println("Method: " + method); } - for(int i = 0; i < mvarIdxes.length; i++) { - GenericParamAndConstraints mvarAndConstraints = pemodule.getTypeConstraints(mvarIdxes[i]); - // add mvarAndConstraints as i-th MVar in method - ((PEMethodInfo)method).addMVar(mvarAndConstraints); - } - } - (method.IsConstructor() ? constrs : methods).add(method); - methoddefs[row - methodListBeg] = method; - } - - this.constructors = (ConstructorInfo[]) - constrs.toArray(ConstructorInfo.EMPTY_ARRAY); - this.methods = (MethodInfo[]) - methods.toArray(MethodInfo.EMPTY_ARRAY); - constrs.clear(); methods.clear(); - } - - protected void loadProperties() { - final PropertyMap pmap = file.PropertyMap; - if (pmap == null) { - properties = PropertyInfo.EMPTY_ARRAY; - return; - } - - final PropertyDef pdef = file.PropertyDef; - int propListBeg = -1; - int propListEnd = pdef.rows + 1; - for (int i = 1; i <= pmap.rows; i++) { - pmap.readRow(i); - if (pmap.Parent == this.definingRow) { - propListBeg = pmap.PropertyList; - if (i < pmap.rows) { - pmap.readRow(i + 1); - propListEnd = pmap.PropertyList; - } - break; - } - } - if (propListBeg < 0) { - properties = PropertyInfo.EMPTY_ARRAY; - return; - } - - final ArrayList properties = new ArrayList(); - for (int i = propListBeg; i < propListEnd; i++) { - pdef.readRow(i); - Sig sig = pdef.getSignature(); - int b = sig.readByte(); - b &= ~HASTHIS; - int paramCount = sig.readByte(); - assert b == PROPERTY; - Type propType = sig.decodeType(); - int index = Table.encodeIndex(i, Table._HasSemantics, - Table.PropertyDef.ID); - MethodSemantics msem = file.MethodSemantics; - MethodInfo getter = null, setter = null; - for (int j = 1; j <= msem.rows; j++) { - msem.readRow(j); - if (msem.Association != index) - continue; - if (msem.isGetter()) - getter = getMethod(msem.Method); - else if (msem.isSetter()) - setter = getMethod(msem.Method); - else - System.err.println("PEType.loadProperties(): !?!"); - } - properties.add - (new PEPropertyInfo(i, pdef.getName(), (short)pdef.Flags, - propType, getter, setter)); - } - this.properties = (PropertyInfo[]) properties - .toArray(PropertyInfo.EMPTY_ARRAY); - } - - protected void loadEvents() { - EventMap emap = file.EventMap; - if (emap == null) { - this.events = EventInfo.EMPTY_ARRAY; - return; - } - - final EventDef edef = file.EventDef; - int eventListBeg = -1; - int eventListEnd = edef.rows + 1; - for (int i = 1; i <= emap.rows; i++) { - emap.readRow(i); - if (emap.Parent == this.definingRow) { - eventListBeg = emap.EventList; - if (i < emap.rows) { - emap.readRow(i + 1); - eventListEnd = emap.EventList; - } - break; - } - } - if (eventListBeg < 0) { - this.events = EventInfo.EMPTY_ARRAY; - return; - } - - final ArrayList events = new ArrayList(); - final MethodSemantics msem = file.MethodSemantics; - for (int i = eventListBeg; i < eventListEnd; i++) { - edef.readRow(i); - final Type handler = - ((PEModule)Module).getTypeDefOrRef(edef.EventType); - int index = - Table.encodeIndex(i, Table._HasSemantics, Table.EventDef.ID); - MethodInfo add = null, remove = null; - for (int j = 1; j <= msem.rows; j++) { - msem.readRow(j); - if (msem.Association != index) - continue; - if (msem.isAddOn()) - add = getMethod(msem.Method); - else if (msem.isRemoveOn()) - remove = getMethod(msem.Method); - else { - } - } - events.add(new PEEventInfo(i, edef.getName(), - (short)edef.EventFlags, - handler, add, remove)); - } - this.events = (EventInfo[]) events - .toArray(EventInfo.EMPTY_ARRAY); - } - - protected void loadNestedTypes() { - final ArrayList nested = new ArrayList(); - for (int i = 1; i <= file.NestedClass.rows; i++) { - file.NestedClass.readRow(i); - if (file.NestedClass.EnclosingClass == this.definingRow) - nested.add(((PEModule)Module) - .getTypeDef(file.NestedClass.NestedClass)); - } - this.nestedTypes = (Type[]) nested.toArray(Type.EmptyTypes); - } - - protected void loadInterfaces() { - // get the interfaces implemented by this class - interfaces = Type.EmptyTypes; - int index = file.InterfaceImpl.findType(definingRow); - if (index > 0) { - ArrayList ifaces = new ArrayList(); - for (int i = index; i <= file.InterfaceImpl.rows; i++) { - file.InterfaceImpl.readRow(i); - if (file.InterfaceImpl.Class != definingRow) - break; - ifaces.add(((PEModule)Module) - .getTypeDefOrRef(file.InterfaceImpl.Interface)); - } - interfaces = (Type[]) ifaces.toArray(new Type[ifaces.size()]); - } - } - - protected void loadCustomAttributes(Type attributeType) { - initAttributes(this, definingRow, Table.TypeDef.ID, attributeType); - } - - private void initAttributes(CustomAttributeProvider cap, int definingRow, - int sourceTableId, Type attributeType) - { - ((PEModule)this.Module).initAttributes - (cap, definingRow, sourceTableId, attributeType); - } - - //########################################################################## - - private class PEFieldInfo extends FieldInfo { - private final int definingRow; - public PEFieldInfo(int definingRow, String name, - int attrs, PECustomMod pecmod, Object value) - { - super(name, PEType.this, attrs, pecmod, value); - this.definingRow = definingRow; - } - protected void loadCustomAttributes(Type attributeType) { - PEType.this.initAttributes - (this, definingRow, Table.FieldDef.ID, attributeType); - } - } - - private class PEMethodInfo extends MethodInfo { - private final int definingRow; - public PEMethodInfo(int row, String name, - int attrs, Type retType, ParameterInfo[] params) - { - super(name, PEType.this, attrs, retType, params); - this.definingRow = row; - } - protected void loadCustomAttributes(Type attributeType) { - PEType.this.initAttributes - (this, definingRow, Table.MethodDef.ID, attributeType); - } - } - - private class PEConstructorInfo extends ConstructorInfo { - private final int definingRow; - public PEConstructorInfo(int row, int attrs, ParameterInfo[] params) { - super(PEType.this, attrs, params); - this.definingRow = row; - } - protected void loadCustomAttributes(Type attributeType) { - PEType.this.initAttributes - (this, definingRow, Table.MethodDef.ID, attributeType); - } - } - - private class PEPropertyInfo extends PropertyInfo { - private final int definingRow; - public PEPropertyInfo(int row, String name, short attrs, Type propType, - MethodInfo getter, MethodInfo setter) - { - super(name, PEType.this, attrs, propType, getter, setter); - this.definingRow = row; - } - protected void loadCustomAttributes(Type attributeType) { - PEType.this.initAttributes - (this, definingRow, Table.PropertyDef.ID, attributeType); - } - } - - private class PEEventInfo extends EventInfo { - private final int definingRow; - public PEEventInfo(int row, String name, short attrs, Type handler, - MethodInfo add, MethodInfo remove) - { - super(name, PEType.this, attrs, handler, add, remove); - this.definingRow = row; - } - protected void loadCustomAttributes(Type attributeType) { - PEType.this.initAttributes - (this, definingRow, Table.EventDef.ID, attributeType); - } - } - - //########################################################################## - -} // class PEType diff --git a/src/msil/ch/epfl/lamp/compiler/msil/ParameterAttributes.java b/src/msil/ch/epfl/lamp/compiler/msil/ParameterAttributes.java deleted file mode 100644 index d4360363fc..0000000000 --- a/src/msil/ch/epfl/lamp/compiler/msil/ParameterAttributes.java +++ /dev/null @@ -1,72 +0,0 @@ -/* - * System.Reflection-like API for access to .NET assemblies (DLL & EXE) - */ - - -package ch.epfl.lamp.compiler.msil; - -/** - * Defines the attributes that may be associated with a parameter. - * - * @author Nikolay Mihaylov - * @version 1.0 - */ -public final class ParameterAttributes { - - // just to make the class uninstantiable - private ParameterAttributes() {} - - //########################################################################## - - /** Specifies that there is no parameter attribute. */ - public static final short None = 0x0000; - - /** Specifies that the parameter is an input parameter. */ - public static final short In = 0x0001; - - /** Specifies that the parameter is an output parameter. */ - public static final short Out = 0x0002; - - /** Specifies that the parameter is a locale identifier. */ - public static final short Lcid = 0x0004; - - /** Specifies that the parameter is a return value. */ - public static final short Retval = 0x0008; - - /** Specifies that the parameter is optional. - * Attention: In the specification the value is 0x0004 but - * in mscorlib.dll that it Lcid and Optional is 0x0010 - */ - public static final short Optional = 0x0010; - - /** Specifies that the parameter has a default value. */ - public static final short HasDefault = 0x1000; - - /** Specifies that the parameter has field marshaling information. */ - public static final short HasFieldMarshal = 0x2000; - - /** Reserved. */ - public static final short Reserved3 = 0x4000; - - /** Reserved. */ - public static final short Reserved4 = (short)0x8000; - - /** Specifies that the parameter is reserved. */ - public static final short ReservedMask = (short)0xf000; - - /** Reserved: shall be zero in all conforming implementations. */ - public static final short Unused = (short) 0xcfe0; - - public static final String toString(int attrs) { - StringBuffer s = new StringBuffer(); - if ((attrs & In) != 0) s.append("in "); - if ((attrs & Out) != 0) s.append("out "); - if ((attrs & Optional) != 0) s.append("opt "); - if ((attrs & HasDefault) != 0) s.append("default(???) "); - if ((attrs & HasFieldMarshal) != 0) s.append("marshal(???) "); - return s.toString(); - } - - //########################################################################## - -} // class ParameterAttributes diff --git a/src/msil/ch/epfl/lamp/compiler/msil/ParameterInfo.java b/src/msil/ch/epfl/lamp/compiler/msil/ParameterInfo.java deleted file mode 100644 index 877d7aa8a5..0000000000 --- a/src/msil/ch/epfl/lamp/compiler/msil/ParameterInfo.java +++ /dev/null @@ -1,76 +0,0 @@ -/* - * System.Reflection-like API for access to .NET assemblies (DLL & EXE) - */ - - -package ch.epfl.lamp.compiler.msil; - -/** - * Discovers the attributes of a parameter and provides access to - * parameter metadata. - * - * @author Nikolay Mihaylov - * @version 1.0 - */ -public class ParameterInfo extends CustomAttributeProvider { - - //########################################################################## - - /** Attributes of the parameter. */ - public final short Attributes; - - /** Name of the parameter. */ - public final String Name; - - /** Type of the parameter. */ - public final Type ParameterType; - - /** Position of the parameter in the parameter list. */ - public final int Position; - - //########################################################################## - - /** Is this an input parameter? */ - public final boolean IsIn() { - return (Attributes & ParameterAttributes.In) != 0; - } - - /** Is this an output parameter? */ - public final boolean IsOut() { - return (Attributes & ParameterAttributes.Out) != 0; - } - - /** Is this an Lcid? */ - public final boolean IsLcid() { - return (Attributes & ParameterAttributes.Lcid) != 0; - } - - /** Is this a return value? */ - public final boolean IsRetval() { - return (Attributes & ParameterAttributes.Retval) != 0; - } - - /** Is this an optional parameter? */ - public final boolean IsOptional() { - return (Attributes & ParameterAttributes.Optional) != 0; - } - - //########################################################################## - // members not part of the public Reflection.ParameterInfo interface - - /** Initializes a new instance of the ParameterInfo class. */ - protected ParameterInfo(String name, Type type, int attr, int pos) { - Name = name; - ParameterType = type; - Attributes = (short)attr; - Position = pos; - } - - public String toString() { - return ParameterAttributes.toString(Attributes) + ParameterType + " " - + Name; - } - - //########################################################################## - -} // class ParameterInfo diff --git a/src/msil/ch/epfl/lamp/compiler/msil/PrimitiveType.java b/src/msil/ch/epfl/lamp/compiler/msil/PrimitiveType.java deleted file mode 100644 index b19fe29869..0000000000 --- a/src/msil/ch/epfl/lamp/compiler/msil/PrimitiveType.java +++ /dev/null @@ -1,62 +0,0 @@ -package ch.epfl.lamp.compiler.msil; - -import ch.epfl.lamp.compiler.msil.util.PECustomMod; - -public final class PrimitiveType extends Type { - public PrimitiveType(Module module, - int attributes, - String fullName, - Type baseType, - Type[] interfaces, - Type declType, - int auxAttr, - Type elemType) { - super(module, attributes, fullName, - baseType, interfaces, declType, auxAttr, elemType); - clearMembers(); - } - - public void clearMembers() { - fields = FieldInfo.EMPTY_ARRAY; - methods = MethodInfo.EMPTY_ARRAY; - constructors = ConstructorInfo.EMPTY_ARRAY; - events = EventInfo.EMPTY_ARRAY; - - initBaseType(); - initInterfaces(); - - initFields(); - initMethods(); - initEvents(); - initProperties(); - initNestedTypes(); - } - - public FieldInfo addField(String name, int attrs, Type fieldType) { - PECustomMod fieldTypeWithMods = new PECustomMod(fieldType, null); - FieldInfo res = new FieldInfo(name, this, attrs, fieldTypeWithMods, null); - FieldInfo[] ms = new FieldInfo[fields.length + 1]; - System.arraycopy(fields, 0, ms, 0, fields.length); - ms[ms.length - 1] = res; - fields = ms; - return res; - } - - public MethodInfo addMethod(String name, int attrs, Type returnType, Type[] paramTypes) { - MethodInfo res = new MethodInfo(name, this, attrs, returnType, paramTypes); - MethodInfo[] ms = new MethodInfo[methods.length + 1]; - System.arraycopy(methods, 0, ms, 0, methods.length); - ms[ms.length - 1] = res; - return res; - } - - public ConstructorInfo addConstructor(int attrs, Type[] paramTypes) { - ConstructorInfo res = new ConstructorInfo(this, attrs, paramTypes); - ConstructorInfo[] ms = new ConstructorInfo[constructors.length + 1]; - System.arraycopy(constructors, 0, ms, 0, constructors.length); - ms[ms.length - 1] = res; - return res; - } - -} - diff --git a/src/msil/ch/epfl/lamp/compiler/msil/PropertyAttributes.java b/src/msil/ch/epfl/lamp/compiler/msil/PropertyAttributes.java deleted file mode 100644 index b1bec64aff..0000000000 --- a/src/msil/ch/epfl/lamp/compiler/msil/PropertyAttributes.java +++ /dev/null @@ -1,45 +0,0 @@ -/* - * System.Reflection-like API for access to .NET assemblies (DLL & EXE) - */ - - -package ch.epfl.lamp.compiler.msil; - -/** - * Attributes applcicable to properties - * - * @author Nikolay Mihaylov - * @version 1.0 - */ -public final class PropertyAttributes { - - // makes the class uninstantiable - private PropertyAttributes() {} - - //########################################################################## - - /** Specifies that the property is special, with the name describing - * how the property is special. - */ - public static final short SpecialName = 0x0200; - - /** Specifies that the metadata internal APIs check the name encoding. - */ - public static final short RTSpecialName = 0x0400; - - /** Specifies that the property has a default value. - */ - public static final short HasDefault = 0x1000; - - //########################################################################## - - public static String toString(short attrs) { - StringBuffer str = new StringBuffer(); - if ((attrs & SpecialName) != 0) str.append("specialname "); - if ((attrs & RTSpecialName) != 0) str.append("rtspecialname "); - return str.toString(); - } - - //########################################################################## - -} // class PropertyAttributes diff --git a/src/msil/ch/epfl/lamp/compiler/msil/PropertyInfo.java b/src/msil/ch/epfl/lamp/compiler/msil/PropertyInfo.java deleted file mode 100644 index 4b7cef8bc1..0000000000 --- a/src/msil/ch/epfl/lamp/compiler/msil/PropertyInfo.java +++ /dev/null @@ -1,104 +0,0 @@ -/* - * System.Reflection-like API for access to .NET assemblies (DLL & EXE) - */ - - -package ch.epfl.lamp.compiler.msil; - -/** - * Discovers the attributes of a property - * and provides access to property metadata. - * - * @author Nikolay Mihaylov - * @version 1.0 - */ -public class PropertyInfo extends MemberInfo { - - //########################################################################## - - public final int MemberType() { return MemberTypes.Property; } - - public final short Attributes; - - public final boolean CanRead; - - public final boolean CanWrite; - - public final Type PropertyType; - - /** Returns an array of the public get and set accessors for this property. - */ - public MethodInfo[] GetAccessors() { - return GetAccessors(false); - } - - /** Returns an array of the public or non-public get - * and set accessors for this property. - */ - public MethodInfo[] GetAccessors(boolean nonPublic) { - MethodInfo getter = GetGetMethod(nonPublic); - MethodInfo setter = GetSetMethod(nonPublic); - if (getter == null) - if (setter == null) return MethodInfo.EMPTY_ARRAY; - else return new MethodInfo[]{setter}; - else if (setter == null) return new MethodInfo[] {getter}; - else return new MethodInfo[] {getter, setter}; - } - - /** Returns the public get accessor for this property. - */ - public MethodInfo GetGetMethod() { - return GetGetMethod(false); - } - - /** Returns the public or non-public get accessor for this property. - */ - public MethodInfo GetGetMethod(boolean nonPublic) { - return nonPublic ? getter - : getter == null || getter.IsPublic() ? getter : null; - } - - /** Returns the public set accessor for this property. - */ - public MethodInfo GetSetMethod() { - return GetSetMethod(false); - } - - /** Returns the public or non-public set accessor for this property. - */ - public MethodInfo GetSetMethod(boolean nonPublic) { - return nonPublic ? setter - : setter == null || setter.IsPublic() ? setter : null; - } - - public String toString() { - MethodInfo m = getter != null ? getter : setter; - return MethodAttributes.accessFlagsToString - ((getter != null ? getter : setter).Attributes) - + " " + PropertyAttributes.toString(Attributes) - + DeclaringType + "::" + Name; - } - - //########################################################################## - // protected members - - protected static final PropertyInfo[] EMPTY_ARRAY = new PropertyInfo[0]; - - protected MethodInfo getter; - protected MethodInfo setter; - - protected PropertyInfo(String name, Type declType, short attr, - Type propType, MethodInfo getter, MethodInfo setter) - { - super(name, declType); - Attributes = attr; - PropertyType = propType; - this.getter = getter; - this.setter = setter; - CanRead = getter != null; - CanWrite = setter != null; - } - - //########################################################################## - -} // class PropertyInfo diff --git a/src/msil/ch/epfl/lamp/compiler/msil/Type.java b/src/msil/ch/epfl/lamp/compiler/msil/Type.java deleted file mode 100644 index 830632ce45..0000000000 --- a/src/msil/ch/epfl/lamp/compiler/msil/Type.java +++ /dev/null @@ -1,1142 +0,0 @@ -/* - * System.Reflection-like API for access to .NET assemblies (DLL & EXE) - */ - - -package ch.epfl.lamp.compiler.msil; - -import java.util.Map; -import java.util.HashMap; -import java.util.List; -import java.util.ArrayList; -import java.util.Iterator; -import java.util.Arrays; - -/** - * Represents type declarations: class types, interface types, array types, - * value types, and enumeration types. - * - * @author Nikolay Mihaylov - * @version 1.0 - */ -public abstract class Type extends MemberInfo { - - private java.util.List /* GenericParamAndConstraints */ tVars = new java.util.LinkedList(); - private GenericParamAndConstraints[] sortedTVars = null; - - public void addTVar(GenericParamAndConstraints tvarAndConstraints) { - sortedTVars = null; - tVars.add(tvarAndConstraints); - } - - public GenericParamAndConstraints[] getSortedTVars() { - if(sortedTVars == null) { - sortedTVars = new GenericParamAndConstraints[tVars.size()]; - for (int i = 0; i < sortedTVars.length; i ++){ - Iterator iter = tVars.iterator(); - while(iter.hasNext()) { - GenericParamAndConstraints tvC = (GenericParamAndConstraints)iter.next(); - if(tvC.Number == i) { - sortedTVars[i] = tvC; - } - } - } - } - return sortedTVars; - } - - - //########################################################################## - // public static members - - /** Empty array of type Type. */ - public static final Type[] EmptyTypes = new Type[0]; - - /** Separates names in the namespace of the Type. */ - public static final char Delimiter = '.'; - - //########################################################################## - // public properties - - /** The fully qualified name of the Type. */ - public final String FullName; - - /** The namespace of the Type. */ - public final String Namespace; - - /** The type from which the current Type directly inherits. */ - public final Type BaseType() { - initBaseType(); - return baseType; - } - protected Type baseType; - - /** The attributes associated with the Type. */ - public final int Attributes; - - /** The sssembly that the type is declared in. */ - public final Assembly Assembly() { return Module.Assembly; } - - /** The module (the EXE/DLL) in which the current Type is defined. */ - public final Module Module; - - public final int MemberType() { - return DeclaringType == null - ? MemberTypes.TypeInfo : MemberTypes.NestedType; - } - - //########################################################################## - // internal members - - // Fields declared by this class - protected FieldInfo[] fields; - - // Methods declared by this class - protected MethodInfo[] methods; - - // Constructors of this class - protected ConstructorInfo[] constructors; - - // Properties of the class - protected PropertyInfo[] properties; - - // Events of the class - protected EventInfo[] events; - - // Interfaces implemented by this class - protected Type[] interfaces; - - // Nested types declared by this class - protected Type[] nestedTypes; - - // holds the element type of array, pointer and byref types - private final Type elemType; - - // the underlying type of an enumeration. null if the type is not enum. - protected Type underlyingType; - - protected int auxAttr; - - //########################################################################## - // Map with all the types known so far and operations on it - - private static final Map types = new HashMap(); - - protected static Type getType(String name) { - return (Type) types.get(name); - } - - protected static Type addType(Type t) { - assert(!(t instanceof TMVarUsage)); - assert(!(t instanceof ConstructedType)); - Type oldType = (Type) types.put(t.FullName, t); -// if (oldType != null) -// throw new RuntimeException("The type: [" + t.Assembly + "]" + t -// + " replaces the type: [" + -// oldType.Assembly + "]" + oldType); - return t; - } - - //########################################################################## - - /** The main constructor. */ - protected Type(Module module, - int attr, - String fullName, - Type baseType, - Type[] interfaces, - Type declType, - int auxAttr, - Type elemType) - { - super(fullName.lastIndexOf(Delimiter) < 0 ? fullName : - fullName.substring(fullName.lastIndexOf(Delimiter) + 1, - fullName.length()), - declType); - - Module = module; // null only for TMVarUsage and for PrimitiveType - Attributes = attr; - this.baseType = baseType; - if (DeclaringType == null) { - FullName = fullName; - int i = FullName.lastIndexOf(Delimiter); - Namespace = (i < 0) ? "" : FullName.substring(0,i); - } else { - FullName = declType.FullName + "+" + fullName; - Namespace = DeclaringType.Namespace; - } - - this.interfaces = interfaces; - this.elemType = elemType; - this.auxAttr = auxAttr; - } - - public final boolean IsAbstract() { - return (Attributes & TypeAttributes.Abstract) != 0; - - } - public final boolean IsPublic() { - return (Attributes & TypeAttributes.VisibilityMask) - == TypeAttributes.Public; - } - - public final boolean IsNotPublic() { - return (Attributes & TypeAttributes.VisibilityMask) - == TypeAttributes.NotPublic; - } - - public final boolean IsNestedPublic() { - return (Attributes & TypeAttributes.VisibilityMask) - == TypeAttributes.NestedPublic; - } - - public final boolean IsNestedPrivate() { - return (Attributes & TypeAttributes.VisibilityMask) - == TypeAttributes.NestedPrivate; - } - - public final boolean IsNestedFamily() { - return (Attributes & TypeAttributes.VisibilityMask) - == TypeAttributes.NestedFamily; - } - - public final boolean IsNestedAssembly() { - return (Attributes & TypeAttributes.VisibilityMask) - == TypeAttributes.NestedAssembly; - } - - public final boolean IsNestedFamORAssem() { - return (Attributes & TypeAttributes.VisibilityMask) - == TypeAttributes.NestedFamORAssem; - } - - public final boolean IsNestedFamANDAssem() { - return (Attributes & TypeAttributes.VisibilityMask) - == TypeAttributes.NestedFamANDAssem; - } - - public final boolean IsSealed() { - return (Attributes & TypeAttributes.Sealed) != 0; - } - - public final boolean IsSpecialName() { - return (Attributes & TypeAttributes.SpecialName) != 0; - } - - public final boolean IsClass() { - return (Attributes & TypeAttributes.ClassSemanticsMask) - == TypeAttributes.Class; - } - - public final boolean IsInterface(){ - return (Attributes & TypeAttributes.ClassSemanticsMask) - == TypeAttributes.Interface; - } - - public final boolean IsAutoLayout() { - return (Attributes & TypeAttributes.LayoutMask) - == TypeAttributes.AutoLayout; - } - public final boolean IsExplictitLayout() { - return (Attributes & TypeAttributes.LayoutMask) - == TypeAttributes.ExplicitLayout; - } - public final boolean IsLayoutSequential() { - return (Attributes & TypeAttributes.LayoutMask) - == TypeAttributes.SequentialLayout; - } - - public final boolean IsImport() { - return (Attributes & TypeAttributes.Import) != 0; - } - public final boolean IsSerializable() { - return (Attributes & TypeAttributes.Serializable) != 0; - } - - public final boolean IsAnsiClass() { - return (Attributes & TypeAttributes.StringFormatMask) - == TypeAttributes.AnsiClass; - } - - public final boolean IsUnicodeClass() { - return (Attributes & TypeAttributes.StringFormatMask) - == TypeAttributes.UnicodeClass; - } - public final boolean IsAutoClass() { - return (Attributes & TypeAttributes.StringFormatMask) - == TypeAttributes.AutoClass; - } - - public final boolean IsArray() { - return (auxAttr & AuxAttr.Array) != 0; - } - public final boolean IsByRef() { - return (auxAttr & AuxAttr.ByRef) != 0; - } - public final boolean IsPointer() { - return (auxAttr & AuxAttr.Pointer) != 0; - } - public final boolean IsPrimitive() { - return (auxAttr & AuxAttr.Primitive) != 0; - } - public final boolean IsValueType() { - return BaseType() == VALUE_TYPE() || IsEnum(); - } - public final boolean IsEnum() { - return BaseType() == ENUM(); - } - public boolean CanBeTakenAddressOf() { - /* TODO should be overridden in TMVarUsage, - but there's currently no way to bind a TMVarUsage to its GenericParamAndConstraints definition. Why? - Because of the way the msil library is organized (e.g., mkArray() returns the same !0[] representation - for all !0[] usages, irrespective of the scope of the !0 type-param) - This in turn is so because without generics there's no harm in using a type-def instance - where a type-ref should go (e.g., the ParameterType of a ParameterInfo nowadays may point to a PEType). - The net effect is that this method (CanBeTakenAddressOf) is conservative, it will answer "no" - for example for !0 where !0 refers to a type-param with the isValuetype constraint set. - The whole thing is ok at this point in time, where generics are not supported at the backend. */ - return IsValueType() && (this != ENUM()); - /* ENUM() is a singleton, i.e. System.Enum is not generic */ - } - - /** IsGeneric, true for a PEType or TypeBuilder (i.e., a type definition) - * containing one or more type params. Not to be called on a reference - * to a constructed type. */ - public final boolean IsGeneric() { - return tVars.size() > 0; - } - - public final boolean HasElementType() { - return IsArray() || IsPointer() || IsByRef(); - } - - public boolean IsTMVarUsage() { - // overridden in TMVarUsage - return false; - } - - public boolean IsNestedType() { - return DeclaringType != null; - } - - public boolean IsDefinitelyInternal() { - if(IsNestedType()) { - return IsNestedPrivate(); - } else { - return IsNotPublic(); - } - } - - //public final boolean IsCOMObject; - //public final boolean IsContextful; - //public final boolean IsMarshalByRef; - - protected Type(Module module, - int attr, - String fullName, - Type baseType, - Type[] interfaces, - Type declType, - int auxAttr) - { - this(module, attr, fullName, baseType, interfaces, - declType, auxAttr, null); - } - - //########################################################################## - - public static final class TMVarUsage extends Type { - - public final int Number; - public final boolean isTVar; - - /** Non-defining reference to either a TVar or an MVar. - * An instance of GenericParamAndConstraints represents a TVar or an MVar definition. */ - public TMVarUsage(int Number, boolean isTVar) { - super(null, 0, ((isTVar ? "!" : "!!") + Number), null, null, null, AuxAttr.None, null); - this.Number = Number; - this.isTVar = isTVar; - } - - public String toString() { - return (isTVar ? "!" : "!!") + Number; - } - - public final boolean IsTMVarUsage() { - return true; - } - - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - - TMVarUsage that = (TMVarUsage) o; - - if (Number != that.Number) return false; - if (isTVar != that.isTVar) return false; - - return true; - } - - public int hashCode() { - int result = Number; - result = 31 * result + (isTVar ? 1 : 0); - return result; - } - } - - protected static final class AuxAttr { - public static final int None = 0x0000; - public static final int Array = 0x0001; - public static final int ByRef = 0x0002; - public static final int Pointer = 0x0008; - public static final int Primitive = 0x0010; - } - - /***/ - public static Type mkArray(Type elemType, int rank) { - StringBuffer arrSig = new StringBuffer("["); - for (int i = 0; i < rank; i++) { - if (i > 0) arrSig.append(','); - } - arrSig.append(']'); - Type array = getType(elemType.FullName + arrSig); - if (array != null) - return array; - array = new PrimitiveType(elemType.Module, - elemType.Attributes - | TypeAttributes.Sealed - | TypeAttributes.Serializable, - elemType.FullName + arrSig, - ARRAY(), EmptyTypes, null, - AuxAttr.Array, elemType); - return addType(array); - } - - /***/ - public static Type mkPtr(Type elemType) { - String name = elemType.FullName + "*"; - Type type = getType(name); - if (type != null) return type; - type = new PrimitiveType(elemType.Module, - elemType.Attributes, - name, null, EmptyTypes, null, - AuxAttr.Pointer, elemType); - return addType(type); - } - - /***/ - public static Type mkByRef(Type elemType) { - String name = elemType.FullName + "&"; - Type type = getType(name); - if (type != null) return type; - type = new PrimitiveType(elemType.Module, - elemType.Attributes, - name, null, EmptyTypes, null, - AuxAttr.ByRef, elemType); - return addType(type); - } - - //########################################################################## - // public methods - - /** - * Return the type with the specified signature parameters. - * For example, the fully qualified name for a class might look like this: - * TopNamespace.SubNameSpace.ContainingClass+NestedClass,MyAssembly - */ - public static Type GetType(String fullName) { - Type type = getType(fullName); - if (type != null) return type; - - // check if it's an array type; TODO: make array type handling more robust - int i = fullName.lastIndexOf('['); - int j = fullName.lastIndexOf(']'); - if (i >= 0) - if (j > i && j == (fullName.length() - 1)) { - String elementTypeName = fullName.substring(0, i); - Type elementType = GetType(elementTypeName); - if (elementType == null) - throw new RuntimeException - ("Unknown element type '" + elementTypeName + - "' for the array type: " + fullName); - int rank = j - i; - for (int k = i + 1; k < j; k++) { - if (fullName.charAt(k) != ',') - throw new RuntimeException - ("Malformed type name: " + fullName); - } - return mkArray(elementType, rank); - } else - throw new RuntimeException("Malformed type name: " + fullName); - - // check if it's a pointer type - if (fullName.charAt(fullName.length() - 1) == '*') - return addType - (mkPtr(GetType(fullName.substring(0, fullName.length()-1)))); - - // check if it's a nested class - i = fullName.lastIndexOf('+'); - if (i > 0) { - if (i == 0 || i == (fullName.length() - 1)) - throw new RuntimeException("malformedTypeName"); - Type enclosing = GetType(fullName.substring(0, i)); - return enclosing == null ? null - : enclosing.GetNestedType(fullName.substring(i + 1)); - } - - //System.out.println("Looking for type: " + fullName + " (" + fullName.length() + ")"); - // try in the assemblies - Iterator assems = ch.epfl.lamp.compiler.msil.Assembly. - assemblies.values().iterator(); - while (type == null && assems.hasNext()) { - Assembly assem = ((Assembly) assems.next()); - type = assem.GetType(fullName); - //System.out.println("\tin assemby " + assem + " -> " + type); - } - - Type type2 = getType(fullName); - if (type == type2) return type; - return type == null ? null : addType(type); - } - - /** - * @return the type of the object encompassed or referenced to - * by the current array, pointer or reference type. - */ - public Type GetElementType() { - return elemType; - } - - /** - * @return the type underlying an enumeration type. - */ - public Type getUnderlyingType() { - if (!IsEnum()) return null; - // this would force the loading of the underlying type from the - // the type of the value__ field of the enumeration - initFields(); - return underlyingType; - } - - //########################################################################## - // GetField/s/ - - /** Searches for the field with the specified name. */ - public FieldInfo GetField(String name) { - initFields(); - for (int i = 0; i < fields.length; i++) - if (fields[i].Name.equals(name) && !fields[i].IsPrivate()) - return fields[i]; - return null; - } - - /** - */ - public FieldInfo GetField(String name, int bindingFlags) { - FieldInfo[] fields = this.GetFields(bindingFlags); - for (int i = 0; i < fields.length; i++) - if (name.equals(fields[i].Name)) - return fields[i]; - return null; - } - - /** Gets the fields of the current Type. */ - public FieldInfo[] GetFields() { - return GetFields(BindingFlags.Instance | BindingFlags.Public); - } - - /** - */ - public FieldInfo[] GetFields(int bindingFlags) { - initFields(); - final FieldInfo[] fields = - getAllFields((bindingFlags & BindingFlags.DeclaredOnly) != 0); - final boolean getInstance = (bindingFlags & BindingFlags.Instance) != 0; - final boolean getStatic = (bindingFlags & BindingFlags.Static) != 0; - final boolean getPublic = (bindingFlags & BindingFlags.Public) != 0; - final boolean getNonPublic = - (bindingFlags & BindingFlags.NonPublic) != 0; - - int cnt = 0; - for (int i = 0; i < fields.length; i++) { - FieldInfo field = fields[i]; - boolean accessible = (getPublic && field.IsPublic()) - || (getNonPublic && !field.IsPublic()); - if (accessible - // strip off the private fields up the hierarchy - && ((field.DeclaringType == this) - || ((field.DeclaringType != this) && !field.IsPrivate())) - && ((getInstance && !field.IsStatic()) - || ((getStatic && field.IsStatic()) && - (field.DeclaringType == this - || (bindingFlags & BindingFlags.FlattenHierarchy) != 0)) - ) - ) - fields[cnt++] = field; - } - FieldInfo [] resFields = new FieldInfo[cnt]; - System.arraycopy(fields, 0, resFields, 0, cnt); - return resFields; - } - - protected FieldInfo[] getAllFields(boolean declaredOnly) { - initFields(); - FieldInfo [] inherited = BaseType() == null || declaredOnly - ? FieldInfo.EMPTY_ARRAY - : BaseType().getAllFields(declaredOnly); - FieldInfo[] allFields = - new FieldInfo[inherited.length + this.fields.length]; - System.arraycopy(inherited, 0, allFields, 0, inherited.length); - System.arraycopy(this.fields, 0, - allFields, inherited.length, this.fields.length); - return allFields; - } - - //########################################################################## - // GetConstructor/s/ - - /** Searches for a public instance constructor whose parameters - * match the types in the specified array. */ - public ConstructorInfo GetConstructor(Type[] paramTypes) { - initMethods(); - for (int i = 0; i < constructors.length; i++) { - if (equalParameters(constructors[i].GetParameters(), paramTypes)) - return constructors[i]; - } - return null; - } - - /** Returns all public instance constructors defined for the current Type.*/ - public ConstructorInfo[] GetConstructors() { - return GetConstructors(BindingFlags.Instance | BindingFlags.Public); - } - - /***/ - public ConstructorInfo[] GetConstructors(int bindingFlags) { - initMethods(); - final boolean getInstance = (bindingFlags & BindingFlags.Instance) != 0; - final boolean getStatic = (bindingFlags & BindingFlags.Static) != 0; - final boolean getPublic = (bindingFlags & BindingFlags.Public) != 0; - final boolean getNonPublic = - (bindingFlags & BindingFlags.NonPublic) != 0; - - ConstructorInfo[] constrs = - new ConstructorInfo[this.constructors.length]; - int cnt = 0; - for (int i = 0; i < this.constructors.length; i++) { - ConstructorInfo constr = this.constructors[i]; - boolean accessible = (getPublic && constr.IsPublic()) - || (getNonPublic && !constr.IsPublic()); - if (accessible - && ((getInstance && !constr.IsStatic()) - || (getStatic && constr.IsStatic()))) - constrs[cnt++] = constr; - } - ConstructorInfo [] resConstrs = new ConstructorInfo[cnt]; - System.arraycopy(constrs, 0, resConstrs, 0, cnt); - return resConstrs; - } - - //########################################################################## - // GetMethod/s/ - - /** Searches for the specified public method whose parameters - * match the specified argument types. */ - public MethodInfo GetMethod(String name, Type[] paramTypes) { - return GetMethod(name, paramTypes, null); - } - - public MethodInfo GetMethod(String name, Type[] paramTypes, Type retType) { - initMethods(); - MethodInfo method = findMethod(methods, name, paramTypes, retType); - if (method != null) - return method; - if (BaseType() != null) { - method = BaseType().GetMethod(name, paramTypes, retType); - if (method != null) - return method; - } -// StringBuffer str = new StringBuffer(name); -// str.append('('); -// for (int i = 0; i < paramTypes.length; i++) { -// if (i > 0) str.append(", "); -// str.append(paramTypes[i]); -// } -// str.append(')'); -// System.out.println("Cannot find method " + str + ":"); -// System.out.println("Methods of class " + this); -// for (int i = 0; i < methods.length; i++) -// System.out.println("\t" + methods[i]); - return null; - } - - /** - */ - protected static MethodInfo findMethod(MethodInfo[] methods, - String name, - Type[] paramTypes, - Type retType) - { - for (int i = 0; i < methods.length; i++) - if (name.equals(methods[i].Name) - && equalParameters(methods[i].GetParameters(), paramTypes) - && (retType == null || methods[i].ReturnType == retType)) - return methods[i]; - return null; - } - - /** - */ - protected static boolean equalParameters(ParameterInfo[] params, - Type[] paramTypes) - { - if (params.length != paramTypes.length) - return false; - for (int i = 0; i < params.length; i++) { -// System.out.println(params[i].ParameterType + " == " + paramTypes[i] -// + " = " + (params[i].ParameterType == paramTypes[i])); - if (params[i].ParameterType != paramTypes[i]) - return false; - } - return true; - } - - /** - */ - public MethodInfo GetMethod(String name, Type[] paramTypes, int bindingFlags) { - MethodInfo[] methods = GetMethods(bindingFlags); - MethodInfo method = findMethod(methods, name, paramTypes, null); - if (method == null) { - StringBuffer str = new StringBuffer(name); - str.append('('); - for (int i = 0; i < paramTypes.length; i++) { - if (i > 0) str.append(", "); - str.append(paramTypes[i]); - } - str.append(')'); - System.out.println("Cannot find method " + str + ":"); - System.out.println("Methods of class " + this); - for (int i = 0; i < methods.length; i++) - System.out.println("\t" + methods[i]); - } - return method; - } - - /** Returns all public methods of the current Type. */ - public MethodInfo[] GetMethods() { - return GetMethods(BindingFlags.Instance | BindingFlags.Public); - } - - /** - */ - public MethodInfo[] GetMethods(int bindingFlags) { - initMethods(); - final MethodInfo[] methods = - getAllMethods((bindingFlags & BindingFlags.DeclaredOnly) != 0); - //System.out.println("" + this + ".GetMethods(int) -> " + methods.length); - final boolean getInstance = (bindingFlags & BindingFlags.Instance) != 0; - final boolean getStatic = (bindingFlags & BindingFlags.Static) != 0; - final boolean getPublic = (bindingFlags & BindingFlags.Public) != 0; - final boolean getNonPublic = - (bindingFlags & BindingFlags.NonPublic) != 0; - - int cnt = 0; - for (int i = 0; i < methods.length; i++) { - MethodInfo method = methods[i]; - boolean accessible = (getPublic && method.IsPublic()) - || (getNonPublic && !method.IsPublic()); - if (accessible - // strip off the private methods up the hierarchy - && ((method.DeclaringType == this) - || ((method.DeclaringType != this) && !method.IsPrivate())) - && ((getInstance && !method.IsStatic()) - || ((getStatic && method.IsStatic()) && - (method.DeclaringType == this - || (bindingFlags & BindingFlags.FlattenHierarchy) != 0)) - ) - ) - methods[cnt++] = method; - } - MethodInfo [] resMethods = new MethodInfo[cnt]; - System.arraycopy(methods, 0, resMethods, 0, cnt); - return resMethods; - } - - protected MethodInfo[] getAllMethods(boolean declaredOnly) { - initMethods(); - MethodInfo[] inherited = BaseType() == null || declaredOnly - ? MethodInfo.EMPTY_ARRAY - : BaseType().getAllMethods(declaredOnly); - MethodInfo[] allMethods = - new MethodInfo[inherited.length + this.methods.length]; - System.arraycopy(inherited, 0, allMethods, 0, inherited.length); - System.arraycopy(this.methods, 0, - allMethods, inherited.length, this.methods.length); - return allMethods; - } - - //########################################################################## - // GetProperty/ies/ - - /** Returns all public properties of the current Type. - */ - public PropertyInfo[] GetProperties() { - initProperties(); - return (PropertyInfo[]) properties.clone(); - } - - /** Returns the properties of the current class - * that satisfy the binding constrints. - */ - public PropertyInfo[] GetProperties(int bindingFlags) { - initProperties(); - return (PropertyInfo[]) properties.clone(); - } - - /** Returns the public property with the given name. - */ - public PropertyInfo GetProperty(String name) { - initProperties(); - for (int i = 0; i < properties.length; i++) - if (name.equals(properties[i].Name)) - return properties[i]; - return null; - } - - /** Returns the property with the given name - * that satisfies the binding constraints. - */ - public PropertyInfo GetProperty(String name, int bindingFlags) { - throw new RuntimeException("Method not implemented yet"); - } - - //########################################################################## - // GetEvent(s) - - public EventInfo[] GetEvents() { - initEvents(); - return (EventInfo[]) events.clone(); - } - - //########################################################################## - // GetNestedType/s/ - - /** Searches for nested type with the specified name. */ - public Type GetNestedType(String name) { - initNestedTypes(); - for (int i = 0; i < nestedTypes.length; i++) - if (nestedTypes[i].Name.equals(name)) - return nestedTypes[i]; - return null; - } - - /** Returns all types nested within the current Type. */ - public Type[] GetNestedTypes() { - initNestedTypes(); - return (Type[]) nestedTypes.clone(); - } - - //########################################################################## - // GetInterface/s/ - - /** Searches for an Interface with the given name implemented by this type - */ - public Type GetInterface(String name) { - return GetInterface(name, false); - } - - /** Searches for the specified interface, - * specifying whether to do a case-sensitive search. - * @param name - the name of the interface to get - * @param ignoreCase true to perform a case-insensitive search for name - * false to perform a case-sensitive search for name - * @return A Type object representing the interface with the specified name, - * implemented or inherited by the current Type, if found; - * otherwise, a null reference - */ - public Type GetInterface(String name, boolean ignoreCase) { - initInterfaces(); - for (int i = 0; i < interfaces.length; i++) { - Type iface = interfaces[i]; - if (ignoreCase) { - if (name.equalsIgnoreCase(iface.Name)) return iface; - if (name.equalsIgnoreCase(iface.FullName)) return iface; - } else { - if (name.equals(iface.Name)) return iface; - if (name.equals(iface.FullName)) return iface; - } - } - return BaseType() == null ? null - : BaseType().GetInterface(name, ignoreCase); - } - - /** Returns the interfaces implemented or inherited by the current Type. */ - public Type[] GetInterfaces() { - initInterfaces(); - if (BaseType() == null) return interfaces; - - Type[] ifaces = interfaces; - int count = 0; - for (int i = 0; i < interfaces.length; i++) { - if (BaseType().GetInterface(interfaces[i].FullName) == null) - ifaces[count++] = ifaces[i]; - } - Type[] baseTypeIfaces = BaseType().GetInterfaces(); - - Type[] res = new Type[baseTypeIfaces.length + count]; - System.arraycopy(baseTypeIfaces, 0, res, 0, baseTypeIfaces.length); - System.arraycopy(ifaces, 0, res, baseTypeIfaces.length, count); - - return res; - } - - - public boolean isSubtypeOf(Type that) { - if (this == that || BaseType() == that || that == OBJECT()) return true; - initInterfaces(); - for (int i = 0; i < interfaces.length; i++) - if (interfaces[i].isSubtypeOf(that)) - return true; - boolean res = BaseType() == null ? false : BaseType().isSubtypeOf(that); -// if (!res) { -// System.out.println(dumpType(this) + " not a subtype of " + -// dumpType(that)); -// } - return res; - } - - private static String formatType(Type t) { - if (t == null) return ""; - String cname = t.getClass().getName(); - int k = cname.lastIndexOf("."); - if (k >= 0) - cname = cname.substring(k + 1); - return "[" + t.Assembly().GetName() + "]" + t + - "(" + cname + "#" + Integer.toHexString(t.hashCode()) + ")"; - } - private static String dumpType(Type t) { - StringBuffer str = new StringBuffer(); - str.append(formatType(t) + " : "); - str.append(formatType(t.BaseType())); - Type[] ifaces = t.GetInterfaces(); - for (int i = 0; i < ifaces.length; i++) - str.append(", " + formatType(ifaces[i])); - return str.toString(); - } - - //########################################################################## - // GetMember/s/ - - protected MemberInfo[] members; - - public MemberInfo[] GetMember(String name) { - aggregateMembers(); - List l = new ArrayList(); - for (int i = 0; i < members.length; i++) { - if (name.equals(members[i].Name)) - l.add(members[i]); - } - return (MemberInfo[])l.toArray(MemberInfo.EMPTY_ARRAY); - } - - protected void aggregateMembers() { - if (members != null) - return; - initFields(); - initMethods(); - initProperties(); - initNestedTypes(); - // the List returned by Arrays.asList doesn't support the addAll method - // so we have to wrap it in ArrayList - List l = new ArrayList(Arrays.asList(fields)); - l.addAll(Arrays.asList(constructors)); - l.addAll(Arrays.asList(methods)); - l.addAll(Arrays.asList(properties)); - l.addAll(Arrays.asList(nestedTypes)); - members = (MemberInfo[]) l.toArray(MemberInfo.EMPTY_ARRAY); - } - - //########################################################################## - // non-standard methods that return only members declared in this type - - /** - * Return only the fields declared in this type. - */ - public FieldInfo[] getFields() { - initFields(); - FieldInfo[] fields = new FieldInfo[this.fields.length]; - System.arraycopy(this.fields, 0, fields, 0, fields.length); - return fields; - } - - /** - * Return only the conrtuctors declared in this type. - */ - public ConstructorInfo[] getConstructors() { - initMethods(); - ConstructorInfo[] ctors = new ConstructorInfo[constructors.length]; - System.arraycopy(constructors, 0, ctors, 0, ctors.length); - return ctors; - } - - /** - * Return only the methods declared in this type. - */ - public MethodInfo[] getMethods() { - initMethods(); - MethodInfo[] methods = new MethodInfo[this.methods.length]; - System.arraycopy(this.methods, 0, methods, 0, methods.length); - return methods; - } - - /** - * Return only the properties declared in this type. - */ - public PropertyInfo[] getProperties() { - initProperties(); - PropertyInfo[] props = new PropertyInfo[properties.length]; - System.arraycopy(properties, 0, props, 0, props.length); - return props; - } - - /** - * Return only the interfaces directly implemented by this type. - */ - public Type[] getInterfaces() { - initInterfaces(); - Type[] ifaces = new Type[interfaces.length]; - System.arraycopy(interfaces, 0, ifaces, 0, ifaces.length); - return ifaces; - } - - /** - * Return the types declared in this type. - */ - public Type[] getNestedTypes() { - initNestedTypes(); - Type[] nested = new Type[nestedTypes.length]; - System.arraycopy(nestedTypes, 0, nested, 0, nested.length); - return nested; - } - - //########################################################################## - - public String toString() { - return FullName; - } - - //########################################################################## - // lazy type construction members - - private boolean initBaseType = true; - protected final void initBaseType() { - if (initBaseType) { - loadBaseType(); - initBaseType = false; - } - } - protected void loadBaseType() {} - - private boolean initInterfaces = true; - protected void initInterfaces() { - if (initInterfaces) { - loadInterfaces(); - initInterfaces = false; - } - assert interfaces != null : "In type " + this; - } - protected void loadInterfaces() {} - - private boolean initNestedTypes = true; - protected void initNestedTypes() { - if (initNestedTypes) { - loadNestedTypes(); - initNestedTypes = false; - } - assert nestedTypes != null : "In type " + this; - } - protected void loadNestedTypes() {} - - private boolean initFields = true; - protected void initFields() { - if (initFields) { - loadFields(); - initFields = false; - } - assert fields != null : "In type " + this; - } - protected void loadFields() {} - - private boolean initMethods = true; - protected void initMethods() { - if (initMethods) { - loadMethods(); - initMethods = false; - } - assert constructors != null : "In type " + this; - assert methods != null : "In type " + this; - } - protected void loadMethods() {} - - private boolean initProperties = true; - protected void initProperties() { - if (initProperties) { - initMethods(); - loadProperties(); - initProperties = false; - } - assert properties != null : "In type " + this; - } - protected void loadProperties() {} - - private boolean initEvents = true; - protected void initEvents() { - if (initEvents) { - initMethods(); - loadEvents(); - initEvents = false; - } - assert events != null : "In type " + this; - } - protected void loadEvents() {} - - //########################################################################## - - //########################################################################## - // static members - - private static Assembly MSCORLIB; - private static Module MSCORLIB_DLL; - - public static Type OBJECT() { return __OBJECT; } - public static Type STRING() { return __STRING; } - public static Type ARRAY() { return __ARRAY; } - public static Type VOID() { return __VOID; } - public static Type ENUM() { return __ENUM; } - public static Type VALUE_TYPE() { return __VALUE_TYPE; } - - private static Type __OBJECT; - private static Type __STRING; - private static Type __ARRAY; - private static Type __VOID; - private static Type __ENUM; - private static Type __VALUE_TYPE; - - public static void initMSCORLIB(Assembly mscorlib) { - if (MSCORLIB != null) - throw new RuntimeException("mscorlib already initialized"); - MSCORLIB = mscorlib; - MSCORLIB_DLL = MSCORLIB.GetModules()[0]; - - __OBJECT = mscorlib.GetType("System.Object"); - __STRING = mscorlib.GetType("System.String"); - __ARRAY = mscorlib.GetType("System.Array"); - __VOID = mscorlib.GetType("System.Void"); - __ENUM = mscorlib.GetType("System.Enum"); - __VALUE_TYPE = mscorlib.GetType("System.ValueType"); - } - - //########################################################################## - -} // class Type diff --git a/src/msil/ch/epfl/lamp/compiler/msil/TypeAttributes.java b/src/msil/ch/epfl/lamp/compiler/msil/TypeAttributes.java deleted file mode 100644 index 8f489fa46f..0000000000 --- a/src/msil/ch/epfl/lamp/compiler/msil/TypeAttributes.java +++ /dev/null @@ -1,190 +0,0 @@ -/* - * System.Reflection-like API for access to .NET assemblies (DLL & EXE) - */ - - -package ch.epfl.lamp.compiler.msil; - -/** - * Specifies type attributes. - * - * @author Nikolay Mihaylov - * @version 1.0 - */ -public final class TypeAttributes { - - //########################################################################## - // Visibilty attributes - - /** Bitmask used to retrieve visibility information. */ - public static final int VisibilityMask = 0x00000007; - - /** Class has no public scope. */ - public static final int NotPublic = 0x00000000; - - /** Class has public scope. */ - public static final int Public = 0x00000001; - - /** Class is nested with public visibility. */ - public static final int NestedPublic = 0x00000002; - - /** Class is nested with private visibility. */ - public static final int NestedPrivate = 0x00000003; - - /** Class is nested with family visibility, and is thus accessible - * only by methods within its own type and any subtypes. */ - public static final int NestedFamily = 0x00000004; - - /** Class is nested with assembly visibility, and is thus accessible - * only by methods within its assembly. */ - public static final int NestedAssembly = 0x00000005; - - /** Class is nested with assembly and family visibility, and is thus accessible - * only by methods lying in the intersection of its family and assembly. */ - public static final int NestedFamANDAssem = 0x00000006; - - /** Class is nested with family or assembly visibility, and is thus accessible - * only by methods lying in the union of its family and assembly. */ - public static final int NestedFamORAssem = 0x00000007; - - //########################################################################## - // Class layout attributes - - /** Bitmask used to retrieve class layout information. */ - public static final int LayoutMask = 0x00000018; - - /** Class fields are automatically laid out by the CLR. */ - public static final int AutoLayout = 0x00000000; - - /** Class fields are laid out sequentially, in the order that the fields - * were emitted to the metadata. */ - public static final int SequentialLayout = 0x00000008; - - /** Class fields are laid out at the specified offsets. */ - public static final int ExplicitLayout = 0x00000010; - - //########################################################################## - // Class semantics attributes - - /** Bitmask used to retrieve class semantics information. */ - public static final int ClassSemanticsMask = 0x00000020; - - /** Type is a class. */ - public static final int Class = 0x00000000; - - /** Type is an interface. */ - public static final int Interface = 0x00000020; - - //########################################################################## - // Special semantics in addition to class semantics - - /** Class is abstract. */ - public static final int Abstract = 0x00000080; - - /** Class is cannot be extended. */ - public static final int Sealed = 0x00000100; - - /** Class is special in a way denoted by the name. */ - public static final int SpecialName = 0x00000400; - - //########################################################################## - // Implementation attributes - - /** Class/interface is imported from another module. */ - public static final int Import = 0x00001000; - - /** Class can be serialized. */ - public static final int Serializable = 0x00002000; - - //########################################################################## - // String formatting attributes - - /** Bitmask used to retrieve string information for native interop. */ - public static final int StringFormatMask = 0x00030000; - - /** LPTSTR is interpreted as ANSI. */ - public static final int AnsiClass = 0x00000000; - - /** LPTSTR is interpreted as UNICODE. */ - public static final int UnicodeClass = 0x00010000; - - /** LPTSTR is interpreted automatically. */ - public static final int AutoClass = 0x00020000; - - //########################################################################## - // Class initialization attributes - - /** Initialize the class before first static field access. */ - public static final int BeforeFieldInit = 0x00100000; - - //########################################################################## - // Additional flags - - /** CLI provides 'special' behavior, depending upon the name of the type. */ - public static final int RTSpecialName = 0x00000800; - - /** Type has security associate with it. */ - public static final int HasSecurity = 0x00040000; - - //########################################################################## - - public static String accessModsToString(int attrs) { - switch (attrs & VisibilityMask) { - case NotPublic: return "private"; - case Public: return "public"; - case NestedPublic: return "nested public"; - case NestedPrivate: return "nested private"; - case NestedFamily: return "nested family"; - case NestedAssembly: return "nested assembly"; - case NestedFamANDAssem: return "nested famandassem"; - case NestedFamORAssem: return "nested famorassem"; - default: - throw new RuntimeException(); - } - } - - /** Returns a string representation of the given attributes. */ - public static String toString(int attrs) { - StringBuffer str = new StringBuffer(accessModsToString(attrs)); - switch (attrs & LayoutMask) { - case AutoLayout: str.append(" auto"); break; - case SequentialLayout: str.append(" sequential"); break; - case ExplicitLayout: str.append(" explicit"); break; - } - switch (attrs & StringFormatMask) { - case AnsiClass: str.append(" ansi"); break; - case UnicodeClass: str.append(" unicode"); break; - case AutoClass: str.append(" autochar"); break; - } - if ((attrs & Interface) != 0) str.append(" interface"); - if ((attrs & Abstract) != 0) str.append(" abstract"); - if ((attrs & Sealed) != 0) str.append(" sealed"); - if ((attrs & BeforeFieldInit) != 0) str.append(" beforefieldinit"); - if ((attrs & Serializable) != 0) str.append(" serializable"); - if ((attrs & SpecialName) != 0) str.append(" specialname"); - if ((attrs & RTSpecialName) != 0) str.append(" rtspecialname"); - return str.toString(); - } - - /***/ - public static final boolean isNested(int attrs) { - switch (attrs & VisibilityMask) { - case NestedPublic: - case NestedPrivate: - case NestedFamily: - case NestedAssembly: - case NestedFamANDAssem: - case NestedFamORAssem: - return true; - default: return false; - } - } - - //########################################################################## - - // makes the class uninstantiable - private TypeAttributes() {} - - //########################################################################## - -} // class TypeAttributes diff --git a/src/msil/ch/epfl/lamp/compiler/msil/Version.java b/src/msil/ch/epfl/lamp/compiler/msil/Version.java deleted file mode 100644 index ad4b09b163..0000000000 --- a/src/msil/ch/epfl/lamp/compiler/msil/Version.java +++ /dev/null @@ -1,71 +0,0 @@ -/* - * System.Reflection-like API for access to .NET assemblies (DLL & EXE) - */ - - -package ch.epfl.lamp.compiler.msil; - - -/** - * Represents the version number for a common language runtime assembly - * - * @author Nikolay Mihaylov - * @version 1.0 - */ -public final class Version { - - //########################################################################## - // public interface - - /** - * Gets the value of the major component of the version - * number for this instance. - */ - public final int Major; - - /** - * Gets the value of the minor component of the version - * number for this instance. - */ - public final int Minor; - - /** - * Gets the value of the build component of the version - * number for this instance. - */ - public final int Build; - - /** - * Gets the value of the revision component of the version - * number for this instance. - */ - public final int Revision; - - /** - * Initializes a new instance of the Version class. - */ - public Version() { - this(0,0,0,0); - } - - /** - * Initializes a new instance of the Version class with - * the specified major, minor, build, and revision numbers. - */ - public Version(int major, int minor, int build, int revision) { - this.Major = major; - this.Minor = minor; - this.Build = build; - this.Revision = revision; - } - - /** - * Converts the value of this instance to its equivalent String representation - */ - public String toString() { - return "" + Major + "." + Minor + "." + Build + "." + Revision; - } - - //########################################################################## - -} // class Version diff --git a/src/msil/ch/epfl/lamp/compiler/msil/emit/AssemblyBuilder.scala b/src/msil/ch/epfl/lamp/compiler/msil/emit/AssemblyBuilder.scala deleted file mode 100644 index 6bf4c7d1da..0000000000 --- a/src/msil/ch/epfl/lamp/compiler/msil/emit/AssemblyBuilder.scala +++ /dev/null @@ -1,122 +0,0 @@ -/* - * System.Reflection.Emit-like API for writing .NET assemblies to MSIL - */ - - -package ch.epfl.lamp.compiler.msil.emit - -import ch.epfl.lamp.compiler.msil._ -import java.io.IOException - -/** - * Defines and represents a dynamic assembly. - * A dynamic assembly is an assembly that is created using the compiler.msil - * emit APIs. The dynamic modules in the assembly are saved when the dynamic - * assembly is saved using the Save method. To generate an executable, the - * SetEntryPoint method must be called to identify the method that is the - * entry point to the assembly. Assemblies are saved as DLL by default, - * unless SetEntryPoint requests the generation of a console application - * or a Windows-based application. - * - * @author Nikolay Mihaylov - * @version 1.0 - */ -class AssemblyBuilder(name: AssemblyName) - extends Assembly(name) - with ICustomAttributeSetter - with Visitable -{ - //########################################################################## - // public methods - - /** - * Defines a dynamic module with the given name that will be saved - * to the specified file. No symbol information is emitted. - */ - def DefineDynamicModule(name: String, fileName: String): ModuleBuilder = { - val module = new ModuleBuilder(name, fileName, "" + null, this) - addModule(name, module) - return module - } - - /** Returns the dynamic module with the specified name. */ - def GetDynamicModule(name: String): ModuleBuilder = { - return GetModule(name).asInstanceOf[ModuleBuilder] - } - - /** Saves this dynamic assembly to disk. */ - @throws(classOf[IOException]) - def Save(fileName: String) { - generatedFiles = scala.collection.mutable.ArrayBuffer.empty[String] - ILPrinterVisitor.printAssembly(this, fileName) - } - - @throws(classOf[IOException]) - def Save(destPath: String, sourceFilesPath: String) { - generatedFiles = scala.collection.mutable.ArrayBuffer.empty[String] - ILPrinterVisitor.printAssembly(this, destPath, sourceFilesPath) - } - - /** Returns the list of generated files from calling Save(). */ - def GetGeneratedFiles(): Array[String] = { - return generatedFiles.toArray // (new Array[String](generatedFiles.size())).asInstanceOf[Array[String]] - } - - /** Sets the entry point for this dynamic assembly. */ - def SetEntryPoint(entryMethod: MethodInfo) { - EntryPoint = entryMethod - } - - /** Sets a custom attribute. */ - def SetCustomAttribute(constr: ConstructorInfo, value: Array[Byte]) { - addCustomAttribute(constr, value) - } - - //########################################################################## - // protected members - - // all extern assemblies used in this assembly builder - protected var externAssemblies = scala.collection.mutable.Set.empty[Assembly] - - // register an extern assembly - protected def registerExternAssembly(assembly: Assembly) { - externAssemblies += assembly - } - - // get all extern Assemblies used in this Assembly Builder - def getExternAssemblies(): Array[Assembly] = { - externAssemblies = scala.collection.mutable.Set[Assembly]() - val iter = Assembly.assemblies.values().iterator - while (iter.hasNext) { - externAssemblies += iter.next.asInstanceOf[Assembly] - } - externAssemblies -= this - return externAssemblies.toArray - } - - def loadModules() {} - - // contains list of generated .msil files after calling Save() - var generatedFiles = scala.collection.mutable.ArrayBuffer.empty[String] - - //########################################################################## - //########################################################################## - - /** the apply method for a visitor */ - @throws(classOf[IOException]) - def apply(v: Visitor) { - v.caseAssemblyBuilder(this) - } - - //########################################################################## -} - -object AssemblyBuilderFactory { - /** - * Defines a dynamic assembly with the specified name. - */ - def DefineDynamicAssembly(name: AssemblyName): AssemblyBuilder = { - //Assembly.reset() - return new AssemblyBuilder(name) - } -} diff --git a/src/msil/ch/epfl/lamp/compiler/msil/emit/ConstructorBuilder.scala b/src/msil/ch/epfl/lamp/compiler/msil/emit/ConstructorBuilder.scala deleted file mode 100644 index ddd4708ecd..0000000000 --- a/src/msil/ch/epfl/lamp/compiler/msil/emit/ConstructorBuilder.scala +++ /dev/null @@ -1,64 +0,0 @@ -/* - * System.Reflection.Emit-like API for writing .NET assemblies to MSIL - */ - - -package ch.epfl.lamp.compiler.msil.emit - -import ch.epfl.lamp.compiler.msil.ConstructorInfo -import ch.epfl.lamp.compiler.msil.Type -import java.io.IOException - -/** - * Defines and represents a constructor of a dynamic class. - * ConstructorBuilder is used to fully describe a constructor in - * Microsoft intermediate language (MSIL), including the name, attributes, - * signature, and constructor body. It is used in conjunction with the - * TypeBuilder class to create classes at run time. Call DefineConstructor - * to get an instance of ConstructorBuilder. - * - * @author Nikolay Mihaylov - * @version 1.0 - */ -class ConstructorBuilder(declType: Type, attrs: Int, paramTypes: Array[Type]) - extends ConstructorInfo(declType, attrs, paramTypes) - with ICustomAttributeSetter - with Visitable -{ - - //########################################################################## - // public interface - - /** Defines a parameter of this constructor. */ - def DefineParameter(pos: Int, attr: Int, name: String): ParameterBuilder = { - val param = new ParameterBuilder(name, params(pos).ParameterType, attr, pos) - params(pos) = param - return param - } - - /** Returns an ILGenerator for this constructor. */ - def GetILGenerator(): ILGenerator = { - return ilGenerator - } - - /** Sets a custom attribute. */ - def SetCustomAttribute(constr: ConstructorInfo, value: Array[Byte]) { - addCustomAttribute(constr, value) - } - - //########################################################################## - - /** The apply method for a visitor. */ - @throws(classOf[IOException]) - def apply(v: Visitor) { - v.caseConstructorBuilder(this) - } - - //########################################################################## - - // the Intermediate Language Generator - // it contains the method's body - protected var ilGenerator: ILGenerator = new ILGenerator(this) - - //########################################################################## -} diff --git a/src/msil/ch/epfl/lamp/compiler/msil/emit/FieldBuilder.scala b/src/msil/ch/epfl/lamp/compiler/msil/emit/FieldBuilder.scala deleted file mode 100644 index 7ef9dc7a5b..0000000000 --- a/src/msil/ch/epfl/lamp/compiler/msil/emit/FieldBuilder.scala +++ /dev/null @@ -1,60 +0,0 @@ -/* - * System.Reflection.Emit-like API for writing .NET assemblies to MSIL - */ - - -package ch.epfl.lamp.compiler.msil.emit - -import ch.epfl.lamp.compiler.msil.FieldInfo -import ch.epfl.lamp.compiler.msil.Type -import ch.epfl.lamp.compiler.msil.FieldAttributes -import ch.epfl.lamp.compiler.msil.ConstructorInfo - -import ch.epfl.lamp.compiler.msil.util.PECustomMod - -import java.io.IOException - -/** - * Discovers the attributes of a field and provides access to field metadata. - * - * @author Nikolay Mihaylov - * @version 1.0 - */ -class FieldBuilder(name: String, declType: Type, attrs: Int, fieldTypeWithMods: PECustomMod) - extends FieldInfo(name, declType, attrs, fieldTypeWithMods, null) - with ICustomAttributeSetter - with Visitable -{ - - //########################################################################## - // public interface - - /** Sets a custom attribute. */ - def SetCustomAttribute(constr: ConstructorInfo, value: Array[Byte]) { - addCustomAttribute(constr, value) - } - - //########################################################################## - - /** the apply method for a visitor */ - @throws(classOf[IOException]) - def apply(v: Visitor) { - v.caseFieldBuilder(this) - } - - //########################################################################## - - protected var defaultValue: Object = _ - - /** Sets the default value of this field. */ - def SetConstant(defaultValue: Object) { - this.defaultValue = defaultValue - } - - /** Specifies the field layout. */ - def SetOffset(iOffset: Int) { - //this.fieldOffset = FieldAttributes.Offset.Value(iOffset) - } - - //########################################################################## -} diff --git a/src/msil/ch/epfl/lamp/compiler/msil/emit/ICustomAttributeSetter.scala b/src/msil/ch/epfl/lamp/compiler/msil/emit/ICustomAttributeSetter.scala deleted file mode 100644 index 5d74d3aa95..0000000000 --- a/src/msil/ch/epfl/lamp/compiler/msil/emit/ICustomAttributeSetter.scala +++ /dev/null @@ -1,18 +0,0 @@ -/* - * System.Reflection.Emit-like API for writing .NET assemblies to MSIL - */ - - -package ch.epfl.lamp.compiler.msil.emit - -import ch.epfl.lamp.compiler.msil.ConstructorInfo - -/** - * Declares the possibility to set a custom attribute for a member - * - * @author Nikolay Mihaylov - * @version 1.0 - */ -trait ICustomAttributeSetter { - def SetCustomAttribute(constr: ConstructorInfo, value: Array[Byte]) -} diff --git a/src/msil/ch/epfl/lamp/compiler/msil/emit/ILGenerator.scala b/src/msil/ch/epfl/lamp/compiler/msil/emit/ILGenerator.scala deleted file mode 100644 index 63ecbfd353..0000000000 --- a/src/msil/ch/epfl/lamp/compiler/msil/emit/ILGenerator.scala +++ /dev/null @@ -1,539 +0,0 @@ -/* - * System.Reflection.Emit-like API for writing .NET assemblies to MSIL - */ - - -package ch.epfl.lamp.compiler.msil.emit - -import ch.epfl.lamp.compiler.msil._ -import ch.epfl.lamp.compiler.msil.util.Table -import java.util.Stack -import java.io.IOException -import ILGenerator._ - -/** - * Generates Microsoft intermediate language (MSIL) instructions. - * - * @author Nikolay Mihaylov - * @version 1.0 - */ - final class ILGenerator(_owner: MethodBase) extends Visitable { - - //########################################################################## - // public interface - - /** - * Puts the specified instruction onto the stream of instructions. - */ - def Emit(opcode: OpCode) { - // switch opcode - if (opcode == OpCode.Ret) { - emit(opcode, null, 0) - } else { - emit(opcode, null) - } - } - - /** - * Puts the specified instruction and character argument onto - * the Microsoft intermediate language (MSIL) stream of instructions. - */ - def Emit(opcode: OpCode, arg: Char) { - emit(opcode,new Character(arg)) - } - - /** - * Puts the specified instruction and metadata token for the - * specified constructor onto the Microsoft intermediate language - * (MSIL) stream of instructions. - */ - def Emit(opcode: OpCode, arg: ConstructorInfo) { - assert(arg != null) - // newobj - // pop size is the number of parameters - emit(opcode,arg, OpCode.PUSH_size(opcode.CEE_push) - - arg.GetParameters().length) - } - - /** - * Puts the specified instruction onto the Microsoft intermediate language (MSIL) - * stream followed by the index of the given local variable. - */ - def Emit(opcode: OpCode, arg: LocalBuilder) { - assert(arg != null) - // ldarg | ldarg.s | ldarga - // ldarga.s | ldloc | ldloc.s | ldloca - // ldloca.s | starg | starg.s | stloc - // stloc.s - - // - emit(opcode, arg) - } - - - /** - * Puts the specified instruction and numerical argument onto - * the Microsoft intermediate language (MSIL) stream of instructions. - */ - def Emit(opcode: OpCode, arg: Double) { - // ldc.r4 | ldc.r8 - emit(opcode, new java.lang.Double(arg)) - } - - /** - * Puts the specified instruction and metadata token for the - * specified field onto the Microsoft intermediate language (MSIL) - * stream of instructions. - */ - def Emit(opcode: OpCode,arg: FieldInfo) { - assert(arg != null) - // ldfld | ldflda | ldsfld | ldsflda | stfld | stsfld - emit(opcode,arg) - } - - /** - * Puts the specified instruction and numerical argument onto - * the Microsoft intermediate language (MSIL) stream of instructions. - */ - def Emit(opcode: OpCode, arg: Short ) { - emit(opcode, new java.lang.Short(arg)) - } - - /** - * Puts the specified instruction and numerical argument onto - * the Microsoft intermediate language (MSIL) stream of instructions. - */ - def Emit(opcode: OpCode, arg: Int) { - // ldc.i4 | ldc.i4.s | unaligned - emit(opcode, new java.lang.Integer(arg)) - } - - /** - * Puts the specified instruction and numerical argument onto - * the Microsoft intermediate language (MSIL) stream of instructions. - */ - def Emit(opcode: OpCode, arg: Long) { - // ldc.i8 - emit(opcode, new java.lang.Long(arg)) - } - - /** - * Puts the specified instruction onto the Microsoft intermediate - * language (MSIL) stream and leaves space to include a label when - * fixes are done. - */ - def Emit(opcode: OpCode,label: Label) { - assert(label != null) - // beq | beq.s | bge | bge.s | - // bge.un | bge.un.s | bgt | bgt.s | bgt.un | bgt.un.s | - // ble | ble.s | ble.un | ble.un.s | blt | blt.s | - // blt.un | blt.un.s | bne.un | bne.un.s | br | br.s | - // brfalse | brfalse.s | brtrue | brtrue.s | leave | leave.s - - emit(opcode, label) - // is the label initialized ? if true backward jump else forward jump - if (label.isInitialized()) { -// if (arg.stacksize != lastLabel.stacksize) { -// System.err.println("ILGenerator.Emit: Stack depth differs depending on path:"); -// System.err.println("\tmethod = " + owner); -// System.err.println("\tPC = 0x" + Table.short2hex(lastLabel.address)); -// } - //assert arg.stacksize == lastLabel.stacksize; - } - else { - label.setStacksize(lastLabel.getStacksize()) - } - } - - /** - * Puts the specified instruction onto the Microsoft intermediate - * language (MSIL) stream and leaves space to include a label when - * fixes are done. - */ - def Emit(opcode: OpCode, arg: Array[Label] ) { - assert(arg != null) - // switch - - // ::= ( ) - // Examples: - // switch (0x3, -14, Label1) - // switch (5, Label2) - emit(opcode, arg, arg.length) - } - - /** - * Puts the specified instruction onto the Microsoft intermediate - * language (MSIL) stream followed by the metadata token for the - * given method. - */ - def Emit(opcode: OpCode,arg: MethodInfo) { - assert(arg != null) - // call | callvirt | jmp | ldftn | ldvirtftn - // pop size is the number of parameters - // pop 1 more if method is not static ! - // push size is either 0 (void Method) either 1 - assert(arg.ReturnType != null, "No ReturnType: " + arg.DeclaringType + "::" + arg.Name) - - val popush: Int = if (opcode == OpCode.Ldftn || - opcode == OpCode.Ldvirtftn || - opcode == OpCode.Jmp) - { - OpCode.PUSH_size(opcode.CEE_push) - OpCode.POP_size(opcode.CEE_pop) - } else if (opcode == OpCode.Calli || opcode == OpCode.Callvirt) { - (if(arg.ReturnType == VOID) 0 else 1) - arg.GetParameters().length - 1 - } else { - (if(arg.ReturnType == VOID) 0 else 1) - arg.GetParameters().length - } - emit(opcode, arg, popush) - } - - /** - * Puts the specified instruction and numerical argument onto - * the Microsoft intermediate language (MSIL) stream of instructions. - */ - def Emit(opcode: OpCode, arg: Float ) { - emit(opcode, new java.lang.Float(arg)) - } - - /** - * Puts the specified instruction onto the Microsoft intermediate - * language (MSIL) stream followed by the metadata token for the - * given string. - */ - def Emit(opcode: OpCode, arg: String ) { - assert(arg != null) - // ldstr - emit(opcode, arg) - } - - /** - * Puts the specified instruction onto the Microsoft intermediate - * language (MSIL) stream followed by the metadata token for the - * given type. - */ - def Emit(opcode: OpCode, arg: Type) { - assert(arg != null) - // box | castclass | cpobj | initobj | isinst | - // ldelema | ldobj | mkrefany | newarr | refanyval | - // sizeof | stobj | unbox - - emit(opcode, arg) - } - - /** - * Puts a call or callvirt instruction onto the Microsoft intermediate - * language (MSIL) stream. - */ - def EmitCall(opcode: OpCode, arg: MethodInfo, - optionalParameterTypes: Array[Type]) { - assert(arg != null) - // pop size is the number of parameters - // push size is either 0 (void Method) either 1 - //System.out.println(arg.ReturnType.Size + " " + arg.GetParameters().length); - emit(opcode, arg, (if(arg.ReturnType == VOID) 0 else 1) - - arg.GetParameters().length) - } - - /** - * Emits the Microsoft intermediate language (MSIL) necessary to - * call WriteLine with the given field. - */ - def EmitWriteLine(arg: FieldInfo) { - // first load field info - // if static use OpCode.Ldsfld - if (arg.IsStatic()) - Emit(OpCodes.Ldsfld, arg) - else - Emit(OpCodes.Ldfld, arg) - // then call System.Console.WriteLine(arg.Type) - val t: Type = Type.GetType("System.Console") - val argsType: Array[Type] = new Array[Type](1) - argsType(0) = arg.FieldType - val m: MethodInfo = t.GetMethod("WriteLine", argsType) - EmitCall(OpCode.Call, m, null) - } - - /** - * Emits the Microsoft intermediate language (MSIL) necessary - * to call WriteLine with the given local variable. - */ - def EmitWriteLine(arg: LocalBuilder) { - // first load local variable - Emit(OpCodes.Ldloc, arg) - // then call System.Console.WriteLine(arg.Type) - val t: Type = Type.GetType("System.Console") - val argsType: Array[Type] = new Array[Type](1) - argsType(0) = arg.LocalType - val m: MethodInfo = t.GetMethod("WriteLine", argsType) - EmitCall(OpCode.Call, m, null) - } - - /** - * Emits the Microsoft intermediate language (MSIL) to call - * WriteLine with a string. - */ - def EmitWriteLine(arg: String) { - // first load string - Emit(OpCode.Ldstr, arg) - // then call System.Console.WriteLine(string) - val t: Type = Type.GetType("System.Console") - val argsType: Array[Type] = new Array[Type](1) - argsType(0) = Type.GetType("System.String") - val m: MethodInfo = t.GetMethod("WriteLine", argsType) - EmitCall(OpCode.Call, m, null) - } - - /** - * Declares a local variable. - */ - def DeclareLocal(localType: Type): LocalBuilder = { - val l: LocalBuilder = new LocalBuilder(locals, localType) - locals = locals + 1 - localList += l - return l - } - - /** - * Returns a new label that can be used as a token for branching. - * In order to set the position of the label within the stream, you - * must call MarkLabel. This is just a token and does not yet represent - * any particular location within the stream. - */ - def DefineLabel():Label = { - new Label.NormalLabel() - } - - /** - * Marks the Microsoft intermediate language (MSIL) stream's - * current position with the given label. - */ - def MarkLabel(label: Label) { - label.mergeWith(lastLabel) - /* - label.address = lastLabel.address; - //label.stacksize = lastLabel.stacksize; - if (label.stacksize >= 0) - lastLabel.stacksize = label.stacksize; - */ - } - - /** Begins a lexical scope. */ - def BeginScope() { - emitSpecialLabel(Label.NewScope) - } - - /** Ends a lexical scope. */ - def EndScope() { - emitSpecialLabel(Label.EndScope) - } - - /** - * Begins an exception block for a non-filtered exception. - * The label for the end of the block. This will leave you in the correct - * place to execute finally blocks or to finish the try. - */ - def BeginExceptionBlock() { - emitSpecialLabel(Label.Try) - val endExc: Label = new Label.NormalLabel() // new Label(lastLabel) ??? - excStack.push(Label.Try, endExc) - } - - /** Begins a catch block. */ - def BeginCatchBlock(exceptionType: Type) { - val kind = excStack.peekKind() - if (kind == Label.Kind.Try || - kind == Label.Kind.Catch) { - /* ok */ - } else { - throw new RuntimeException("Catch should follow either a try or catch") - } - val endExc: Label = excStack.popLabel() - Emit(OpCodes.Leave, endExc) - // the CLI automatically provide the exception object on the evaluation stack - // we adjust the stacksize - lastLabel.incStacksize() - excStack.push(Label.Catch, endExc) - emitSpecialLabel(Label.Catch, exceptionType) - } - - /** Ends an exception block. */ - def EndExceptionBlock() { - val kind = excStack.peekKind() - if (kind == Label.Kind.Try) { - throw new RuntimeException("Try block with neither catch nor finally") - } else if (kind == Label.Kind.Catch) { - Emit(OpCodes.Leave, excStack.peekLabel()) - } else if (kind == Label.Kind.Finally) { - Emit(OpCodes.Endfinally) - } - MarkLabel(excStack.popLabel()) - emitSpecialLabel(Label.EndTry) - } - - /** - * Begins a finally block in the Microsoft intermediate language - * (MSIL) instruction stream. - */ - def BeginFinallyBlock() { - val endExc: Label = excStack.popLabel() - Emit(OpCodes.Leave, endExc) - excStack.push(Label.Finally, endExc) - emitSpecialLabel(Label.Finally) - } - - /** - * Emits an instruction to throw an exception. - */ - def ThrowException(exceptionType: Type) { - assert(exceptionType != null) - if (!exceptionType.isSubtypeOf(Type.GetType("System.Exception"))) - throw new RuntimeException - (exceptionType + " doesn't extend System.Exception" ) - val ctor: ConstructorInfo = exceptionType.GetConstructor(Type.EmptyTypes) - if (ctor == null) - throw new RuntimeException("Type " + exceptionType - + "doesn't have a default constructor") - Emit(OpCodes.Newobj, ctor) - Emit(OpCodes.Throw) - } - - /** - * sets the line of the source file corresponding to the next instruction - */ - def setPosition(line: Int) { - if (line != 0) lineNums.put(lastLabel, Integer.toString(line)) - } - - def setPosition(line: Int, filename: String) { - if (line != 0) lineNums.put(lastLabel, line + " '" + filename + "'") - } - - def setPosition(startLine: Int, endLine: Int, startCol: Int, endCol: Int, filename: String) { - val lineRange = startLine + "," + endLine - val colRange = startCol + "," + endCol - lineNums.put(lastLabel, lineRange + ":" + colRange + " '" + filename + "'") - } - - def getLocals(): Array[LocalBuilder] = localList.toArray - - def getLabelIterator() = labelList.iterator - - def getOpcodeIterator() = opcodeList.iterator - - def getArgumentIterator() = argumentList.iterator - - //########################################################################## - // private implementation details - - - - // the local variable list - private final val localList = scala.collection.mutable.ArrayBuffer.empty[LocalBuilder] - - // the label list, the opcode list and the opcode argument list - // labelList is an array of Label - // opcodeList is an array of OpCode - // argumentList is an array of Object (null if no argument) - private final val labelList = scala.collection.mutable.ArrayBuffer.empty[Label] - private final val opcodeList = scala.collection.mutable.ArrayBuffer.empty[OpCode] - private final val argumentList = scala.collection.mutable.ArrayBuffer.empty[Object] - - // the program counter (pc) - // also called the stream's current position - private var pc: Int = 0 - - // last label - private var lastLabel: Label = new Label.NormalLabel(pc,0) - - // the maximum size of stack - private var maxstack: Int = 0 - - // the number of the locals - private var locals: Int = 0 - - // stack of label for exception mechanism - private val excStack: ExceptionStack = new ExceptionStack() - - // the method info owner of this ILGenerator - var owner: MethodBase = _owner - - val lineNums = scala.collection.mutable.Map.empty[Label, String] - - - def getMaxStacksize(): Int = { this.maxstack } - - // private emit with Object Argument - private def emit(opcode: OpCode, arg: Object) { - emit(opcode, arg, opcode.CEE_popush) - } - - // private emit with Object Argument and override POPUSH - private def emit(opcode: OpCode, arg: Object, overridePOPUSH: Int) { - // add label, opcode and argument - labelList += lastLabel - opcodeList += opcode - argumentList += arg - // compute new lastLabel (next label) - val stackSize: Int = lastLabel.getStacksize() + overridePOPUSH - if (stackSize < 0) { - val msg = "ILGenerator.emit(): Stack underflow in method: " + owner - scala.Console.println(msg) - // throw new RuntimeException(msg) - } - if (stackSize > maxstack) - maxstack = stackSize - var address: Int = lastLabel.getAddress() + opcode.CEE_length - if (opcode.CEE_opcode == OpCode.CEE_SWITCH) { - address = address + 4*arg.asInstanceOf[Array[Label]].length - } - lastLabel = new Label.NormalLabel(address, stackSize) - pc = pc + 1 - } - - def Ldarg0WasJustEmitted() : Boolean = { - if(opcodeList.isEmpty) - return false - val lastEmitted = opcodeList(opcodeList.size - 1) - lastEmitted eq OpCode.Ldarg_0 - } - - private def emitSpecialLabel(l: Label) { - emitSpecialLabel(l, null) - } - private def emitSpecialLabel(l: Label, catchType: Type) { - labelList += l - opcodeList += null - argumentList += catchType - } - - //########################################################################## - // - @throws(classOf[IOException]) - def apply(v: Visitor) { - v.caseILGenerator(this) - } - - //########################################################################## -} // class ILGenerator - - -object ILGenerator { - - val VOID: Type = Type.GetType("System.Void") - val NO_LABEL: String = "" - - private final class ExceptionStack { - private val labels = new scala.collection.mutable.Stack[Label]() - private val kinds = new scala.collection.mutable.Stack[Label]() - def ExceptionStack() {} - def pop() { labels.pop; kinds.pop } - def push(kind: Label, label: Label) { - kinds.push(kind); labels.push(label) - } - def peekKind(): Label.Kind = kinds.top.getKind - def peekLabel(): Label = labels.top - def popLabel(): Label = { kinds.pop(); labels.pop() } - } - -} - diff --git a/src/msil/ch/epfl/lamp/compiler/msil/emit/ILPrinterVisitor.scala b/src/msil/ch/epfl/lamp/compiler/msil/emit/ILPrinterVisitor.scala deleted file mode 100644 index 413b08ddd8..0000000000 --- a/src/msil/ch/epfl/lamp/compiler/msil/emit/ILPrinterVisitor.scala +++ /dev/null @@ -1,860 +0,0 @@ -/* - * System.Reflection.Emit-like API for writing .NET assemblies in MSIL - */ - - -package ch.epfl.lamp.compiler.msil.emit - -import java.io.File -import java.io.FileWriter -import java.io.BufferedWriter -import java.io.PrintWriter -import java.io.IOException -import java.util.Comparator - -import ch.epfl.lamp.compiler.msil._ -import ch.epfl.lamp.compiler.msil.util.Table - -/** - * The MSIL printer Visitor. It prints a complete - * assembly in a single or multiple files. Then this file can be compiled by ilasm. - * - * @author Nikolay Mihaylov - * @version 1.0 - */ -abstract class ILPrinterVisitor extends Visitor { - - import ILPrinterVisitor._ - import OpCode._ - - //########################################################################## - - protected final val assemblyNameComparator = - new scala.math.Ordering[Assembly]() { - override def compare(o1: Assembly, o2: Assembly): Int = { - val a1 = o1.asInstanceOf[Assembly] - val a2 = o2.asInstanceOf[Assembly] - return a1.GetName().Name.compareTo(a2.GetName().Name) - } - } - - // the output file writer - protected var out: PrintWriter = null - - // the left margin - private var lmargin = 0 - - // indicate a newline - private var newline = true - - // print types without or with members? - protected var nomembers: Boolean = false - - // external assemblies - protected var as: Array[Assembly] = null - - private def align() { - if (newline) - padding = lmargin - printPadding() - newline = false - } - private def indent() { - lmargin += TAB - } - private def undent() { - lmargin -= TAB - assert(lmargin >= 0) - } - - private var padding = 0 - private def pad(n: Int) { - assert(n >= 0, "negative padding: " + n) - padding += n - } - private def printPadding() { - if (padding <= 0) - return - while (padding > SPACES_LEN) { - out.print(SPACES) - padding -= SPACES_LEN - } - out.print(SPACES.substring(0, padding)) - padding = 0 - } - - // methods to print code - protected def print(s: String) { align(); out.print(s)} - protected def print(o: Object) { align(); out.print(o) } - protected def print(c: Char) { align(); out.print(c) } - protected def print(`val`: Int) { align(); out.print(`val`)} - protected def print(`val`: Long){ align(); out.print(`val`)} - protected def println() { out.println(); newline = true; padding = 0 } - protected def println(c: Char) { print(c); println() } - protected def println(i: Int) { print(i); println() } - protected def println(l: Long) { print(l); println() } - protected def println(s: String){ print(s); println() } - protected def println(o: Object){ print(o); println() } - protected def printName(name: String) { - val ch = name.charAt(0) - //if (Character.isLetter(ch) && Character.isLowerCase(ch)) { - if ((ch != '.') && (ch != '!')) { - print('\''); print(name); print('\'') - } else - print(name) - } - - protected def printAssemblyBoilerplate() { - // print all the external assemblies - for (j <- 0 until as.length) { - printAssemblySignature(as(j), true) - } - // print assembly declaration - printAssemblySignature(currAssembly, false) - } - - // the entrypoint method - protected var entryPoint: MethodInfo = null - - // current opcode argument - protected var argument: Object = null - - /***/ - @throws(classOf[IOException]) - protected def print(vAble: Visitable) { - if (vAble != null) - vAble.apply(this) - } - - /** - * Visit an AssemblyBuilder - */ - @throws(classOf[IOException]) - def caseAssemblyBuilder(assemblyBuilder: AssemblyBuilder) - - protected var currentModule: Module = null - /** - * Visit a ModuleBuilder - */ - @throws(classOf[IOException]) - def caseModuleBuilder(module: ModuleBuilder) - - protected var currentType: Type = null - - def printTypeParams(sortedTVars : Array[GenericParamAndConstraints]) { - - def constraintFlags(tVar : GenericParamAndConstraints) = { - val varianceDirective = (if (tVar.isCovariant) "+ " else (if (tVar.isContravariant) "- " else "")) - val typeKindDirective = (if (tVar.isReferenceType) "class " else (if (tVar.isValueType) "valuetype " else "")) - val dfltConstrDirective = (if (tVar.hasDefaultConstructor) ".ctor " else "") - varianceDirective + typeKindDirective + dfltConstrDirective - } - - def tparamName(tVar : GenericParamAndConstraints) = { - /* TODO Type-params in referenced assemblies may lack a name (those in a TypeBuilder or MethodBuilder shouldn't). - Given that we need not list (in ilasm syntax) the original type-params' names when - providing type arguments to it, the only type-param-names we'll serialize into a .msil file - are those for type-params in a TypeBuilder or MethodBuilder. Still, more details on this - appear in Sec. 4.5 "Faulty metadata in XMLReaderFactory" of - http://lamp.epfl.ch/~magarcia/ScalaCompilerCornerReloaded/Libs4Lib.pdf - - To avoid name clashes when choosing a param name, - first collect all existing tparam-names from a type (and its nested types). - Not that those names are needed (ordinal positions can be used instead) - but will look better when disassembling with ildasm. */ - assert(tVar.Name != null) - tVar.Name - } - - if(sortedTVars.length == 0) { return } - print('<') - val lastIdx = sortedTVars.length - 1 - for (it <- 0 until sortedTVars.length) { - val tVar = sortedTVars(it) - print(constraintFlags(tVar)) - if(tVar.Constraints.length > 0) { - print('(') - for (ic <- 0 until tVar.Constraints.length) { - val cnstrt = tVar.Constraints(ic) - printReference(cnstrt) - if (ic < lastIdx) { print(", ") } - } - print(')') - } - print(" " + tparamName(tVar)) - if (it < lastIdx) { print(", ") } - } - print('>') - } - - /** - * Visit a TypeBuilder - */ - @throws(classOf[IOException]) - def caseTypeBuilder(`type`: TypeBuilder) { - currentType = `type` - if (!`type`.Namespace.equals("") && `type`.DeclaringType == null) { - print(".namespace \'" ); print(`type`.Namespace); println("\'") - println("{"); indent() - } - print(".class ") - // ::= - // * - // [extends ] - // [implements [, ]*] - print(TypeAttributes.toString(`type`.Attributes)) - print(" \'"); print(`type`.Name); print("\'") - printTypeParams(`type`.getSortedTVars()) - if (`type`.BaseType() != null) { - println() - print(" extends ") - printReference(`type`.BaseType()) - } - val ifaces: Array[Type] = `type`.getInterfaces() - if (ifaces.length > 0) { - println() - print(" implements ") - for (i <- 0 until ifaces.length) { - if (i > 0) { - println(",") - print(" ") - } - printReference(ifaces(i)) - } - } - println() - println("{") - indent() - if (!nomembers && `type`.sourceFilename != null) - println(".line " + `type`.sourceLine - + " '" + `type`.sourceFilename + "'") - if (!nomembers) { - printAttributes(`type`) - } - // print nested classes - val nested = `type`.nestedTypeBuilders.iterator - while(nested.hasNext) - print(nested.next().asInstanceOf[TypeBuilder]) - - // print each field - val fields = `type`.fieldBuilders.iterator - while(fields.hasNext) - print(fields.next().asInstanceOf[FieldBuilder]) - - // print each constructor - val constrs = `type`.constructorBuilders.iterator - while (constrs.hasNext) - print(constrs.next().asInstanceOf[ConstructorBuilder]) - - // print each method - val methods = `type`.methodBuilders.iterator - while (methods.hasNext) { - val method = methods.next().asInstanceOf[MethodBuilder] - assert(method.DeclaringType == `type`) - print(method) - } - - undent(); println("}") - if (!`type`.Namespace.equals("") && `type`.DeclaringType == null) { - undent(); println("}") - } - currentType = null - } - - /** - * Visit a FieldBuilder - */ - @throws(classOf[IOException]) - def caseFieldBuilder(field: FieldBuilder) { - if (nomembers) return - // [[int32]] * [= | at ] - print(".field ") - print(FieldAttributes.toString(field.Attributes)) - print(" "); printSignature(field.FieldType, field.cmods) - print(" \'"); print(field.Name); print("\'") - if (field.IsLiteral()) { - print(" = ") - val value = field.getValue() - if (value == null) { - print("nullref") - } else if (value.isInstanceOf[String]) { - print(msilString(value.asInstanceOf[String])) - } else if (value.isInstanceOf[Boolean]) { - print("bool (") - print(if((value.asInstanceOf[Boolean]).booleanValue()) { "true" } else { "false" }) - print(")") - } else if (value.isInstanceOf[Byte]) { - print("int8 (") - print(value) - print(")") - } else if (value.isInstanceOf[java.lang.Short]) { - print("int16 (") - print(value) - print(")") - } else if (value.isInstanceOf[Character]) { - print("char (") - print((value.asInstanceOf[Character]).charValue()) - print(")") - } else if (value.isInstanceOf[Integer]) { - print("int32 (") - print((value.asInstanceOf[Integer]).intValue()) - print(")") - } else if (value.isInstanceOf[Long]) { - print("int64 (") - print((value.asInstanceOf[Long]).longValue()) - print(")") - } else if (value.isInstanceOf[Float]) { - print(msilSyntaxFloat(value.asInstanceOf[Float])) - } else if (value.isInstanceOf[Double]) { - print(msilSyntaxDouble(value.asInstanceOf[Double])) - } else { - throw new Error("ILPrinterVisitor: Illegal default value: " - + value.getClass()) - } - } - println() - printAttributes(field) - } - - def msilSyntaxFloat(valFlo: java.lang.Float) : String = { - // !!! check if encoding is correct - val bits = java.lang.Float.floatToRawIntBits(valFlo.floatValue()) - /* see p. 170 in Lidin's book Expert .NET 2.0 IL Assembler */ - /* Note: no value is equal to Nan, including NaN. Thus, x == Float.NaN always evaluates to false. */ - val res = if (valFlo.isNaN) "0xFFC00000 /* NaN */ " /* TODO this is 'quiet NaN, http://www.savrola.com/resources/NaN.html , what's the difference with a 'signaling NaN'?? */ - else if (java.lang.Float.NEGATIVE_INFINITY == valFlo.floatValue) "0xFF800000 /* NEGATIVE_INFINITY */ " - else if (java.lang.Float.POSITIVE_INFINITY == valFlo.floatValue) "0x7F800000 /* POSITIVE_INFINITY */ " - else bits - "float32 (" + res + ")" - } - - def msilSyntaxDouble(valDou: java.lang.Double) : String = { - // !!! check if encoding is correct - val bits = java.lang.Double.doubleToRawLongBits(valDou.doubleValue()) - /* see p. 170 in Lidin's book Expert .NET 2.0 IL Assembler */ - /* Note: no value is equal to Nan, including NaN. Thus, x == Double.NaN always evaluates to false. */ - val res = if (valDou.isNaN) "0xffffffffffffffff /* NaN */ " /* TODO this is 'quiet NaN, http://www.savrola.com/resources/NaN.html , what's the difference with a 'signaling NaN'?? */ - else if (java.lang.Double.NEGATIVE_INFINITY == valDou.doubleValue) "0xfff0000000000000 /* NEGATIVE_INFINITY */ " - else if (java.lang.Double.POSITIVE_INFINITY == valDou.doubleValue) "0x7ff0000000000000 /* POSITIVE_INFINITY */ " - else bits - // float64(float64(...)) != float64(...) - "float64 (" + res + ")" - } - - /** - * Visit a ConstructorBuilder - */ - @throws(classOf[IOException]) - def caseConstructorBuilder(constr: ConstructorBuilder) { - if (nomembers) return - print(".method "); printHeader(constr, VOID) - println(); println("{"); indent() - printAttributes(constr) - try { - print(constr.GetILGenerator()) - } catch { - case e : RuntimeException => { - System.err.println("In method " + constr) - e.printStackTrace() - } - } - undent(); println("}") - } - - /** - * Visit a MethodBuilder - */ - @throws(classOf[IOException]) - def caseMethodBuilder(method: MethodBuilder) { - if (nomembers) return - print(".method "); printHeader(method, method.ReturnType) - if (method.IsAbstract() - || (method.DeclaringType != null - && method.DeclaringType.IsInterface() - && !method.IsStatic())) - { - println(" {"); indent() - printAttributes(method) - undent(); println("}") - } else { - println(); println("{"); indent() - printAttributes(method) - if (method == entryPoint) - println(".entrypoint") - try { - print(method.GetILGenerator()) - } catch { - case e: RuntimeException => - System.err.println("In method " + method) - e.printStackTrace() - } - undent(); println("}") - } - } - - /** - * Visit a ParameterBuilder - */ - @throws(classOf[IOException]) - def caseParameterBuilder(param: ParameterBuilder) { - print(ParameterAttributes.toString(param.Attributes)) - printSignature(param.ParameterType) - //print(' ') print(marshal) - print(' '); printName(param.Name) - } - - var locals: Array[LocalBuilder] = null - /** - * Visit an ILGenerator - */ - @throws(classOf[IOException]) - def caseILGenerator(code: ILGenerator) { - // print maxstack - println(".maxstack " + code.getMaxStacksize()) - // get the local variables - locals = code.getLocals() - if (locals.length > 0) { - println(".locals init (") - indent() - for (i <- 0 until locals.length) { - if (i > 0) println(",") - print(locals(i)) - } // end while - undent() - println(")") - } - // get 3 iterators for the 3 lists - val itL = code.getLabelIterator() - val itO = code.getOpcodeIterator() - val itA = code.getArgumentIterator() - // iterate over each opcode - while (itO.hasNext) { - // first print label - val label = itL.next - val oOpt = code.lineNums.get(label) - if (oOpt.isDefined) { - println(".line " + oOpt.get) - } - argument = itA.next.asInstanceOf[Object] - printLabel(label) - val o2 = itO.next - if (o2 != null) { - print(" ") - print(o2.asInstanceOf[OpCode]) - } - println() - } // end while - } - - /** - * visit an OpCode - */ - @throws(classOf[IOException]) - def caseOpCode(opCode: OpCode) { - val opString = opCode.toString() - print(opString) - pad(14 - opString.length()) - - // switch opcode - if (opCode == OpCode.Ldstr) { - print(msilString(argument.toString())) - } else if(opCode == OpCode.Switch) { - // switch ( ) - print("(") - val targets = argument.asInstanceOf[Array[Label]] - val m = targets.length - for (i <- 0 until m) { - if (i != 0) print(", ") - print(targets(i)) - } // end for - print(")") - } else if(opCode == OpCode.Call || opCode == OpCode.Callvirt || opCode == OpCode.Jmp || opCode == OpCode.Ldftn || opCode == OpCode.Ldvirtftn) { - // call | callvirt | jmp | ldftn | ldvirtftn - // [ :: ] - printSignature(argument.asInstanceOf[MethodBase]) - } else if (opCode == OpCode.Newobj) { - printSignature(argument.asInstanceOf[ConstructorInfo]) - // ldfld | ldflda | ldsfld | ldsflda | stfld | stsfld - } else if (opCode == OpCode.Ldfld || opCode == OpCode.Ldflda || opCode == OpCode.Ldsfld || opCode == OpCode.Ldsflda || opCode == OpCode.Stfld || opCode == OpCode.Stsfld) { - printSignature(argument.asInstanceOf[FieldInfo]) - } else if (opCode == OpCode.Castclass || opCode == OpCode.Isinst || opCode == OpCode.Ldobj || opCode == OpCode.Newarr) { - printSignature(argument.asInstanceOf[Type]) - } else if (opCode == OpCode.Box || opCode == OpCode.Unbox || opCode == OpCode.Ldtoken || opCode == OpCode.Initobj) { - printReference(argument.asInstanceOf[Type]) - } else if (opCode == OpCode.Ldloc || opCode == OpCode.Ldloc_S || opCode == OpCode.Ldloca || opCode == OpCode.Ldloca_S || opCode == OpCode.Stloc || opCode == OpCode.Stloc_S) { - val loc = argument.asInstanceOf[LocalBuilder] - print(loc.slot); print("\t// "); printSignature(loc.LocalType) - print(" \'"); print(loc.name); print("\'") - //print("'") print(((LocalBuilder)argument).name) print("'") - } else if (opCode == OpCode.Ldloc_0 || opCode == OpCode.Ldloc_1 || opCode == OpCode.Ldloc_2 || opCode == OpCode.Ldloc_3 ) { - val loc = locals(opCode.CEE_opcode - OpCode.CEE_LDLOC_0) - print("\t// "); printSignature(loc.LocalType) - print(" \'"); print(loc.name); print("\'") - } else if (opCode == OpCode.Stloc_0 || opCode == OpCode.Stloc_1 || opCode == OpCode.Stloc_2 || opCode == OpCode.Stloc_3 ) { - val loc = locals(opCode.CEE_opcode - OpCode.CEE_STLOC_0) - print("\t// "); printSignature(loc.LocalType) - print(" \'"); print(loc.name); print("\'") - } else if (opCode == OpCode.Readonly) { - // nothing to do - } else if (opCode == OpCode.Constrained) { - printReference(argument.asInstanceOf[Type]) - } else if (opCode == OpCode.Ldelema) { - printReference(argument.asInstanceOf[Type]) - } else { - // by default print toString argument if any - if (argument != null) { - val strArgument = java.lang.String.valueOf(argument) - if ( argument.isInstanceOf[java.lang.Float] - && ( strArgument.equals("NaN") - || strArgument.equals("-Infinity") - || strArgument.equals("Infinity"))) - print(msilSyntaxFloat(argument.asInstanceOf[java.lang.Float])) - else if ( argument.isInstanceOf[java.lang.Double] - && ( strArgument.equals("NaN") - || strArgument.equals("-Infinity") - || strArgument.equals("Infinity"))) - print(msilSyntaxDouble(argument.asInstanceOf[java.lang.Double])) - else print(strArgument) - } - - } // end switch - } - - /** - * Visit a Label - */ - def printLabel(label: Label) { - val kind = label.getKind() - if (kind == Label.Kind.Normal) { - print(label+ ": ") - } else if (kind == Label.Kind.NewScope) { - print("{"); indent() - } else if (kind == Label.Kind.EndScope) { - undent(); print("}") - } else if (kind == Label.Kind.Try) { - print(".try {"); indent() - } else if (kind == Label.Kind.Catch) { - undent() - println("}") - print("catch ") - printReference(argument.asInstanceOf[Type]) - print(" {") - indent() - } else if (kind == Label.Kind.Filter) { - undent() - println("}") - print("filter {") - indent() - } else if (kind == Label.Kind.EndFilter) { - print("endfilter") - undent() - println("}") - } else if (kind == Label.Kind.Finally) { - undent() - println("}") - print("finally {") - indent() - } else if (kind == Label.Kind.EndTry) { - undent() - print("}") - } - } - - /** - * Visit a LocalBuilder - */ - @throws(classOf[IOException]) - def caseLocalBuilder(localBuilder: LocalBuilder) { - // print type - printSignature(localBuilder.LocalType) - // space - print(" \'") - // print name - print(localBuilder.name) - print("\'") - } - - - //########################################################################## - - def printAssemblySignature(assem: Assembly, extern: Boolean) { - print(".assembly ") - if (extern) - print("extern ") - val an = assem.GetName() - printName(an.Name); println() - println("{") - if (!extern) - printAttributes(assem) - val v = an.Version - if (v != null) { - print(" .ver "); print(v.Major); print(':'); print(v.Minor) - print(':'); print(v.Build); print(':') - print(v.Revision); println() - } - var key = an.GetPublicKeyToken() - if (key != null) { - print(" .publickeytoken = ("); print(PEFile.bytes2hex(key)) - println(")") - } else { - key = an.GetPublicKey() - if (key != null) { - print(" .publickey = ("); print(PEFile.bytes2hex(key)) - println(")") - } - } - println("}") - } - - - def printSignature(field: FieldInfo) { - printSignature(field.FieldType, field.cmods) - //print(' ') print(owner) - print(' ') - //if (field.IsStatic && field.DeclaringType != currentType) { - printReference(field.DeclaringType) - print("::") - //} - printName(field.Name) - } - - // print method head - @throws(classOf[IOException]) - def printHeader(method: MethodBase, returnType: Type) { - print(MethodAttributes.toString(method.Attributes)) - print(' '); print(CallingConventions.toString(method.CallingConvention)) - print(' '); printSignature(returnType) - //print(' ') print(marshal) - print(' '); printName(method.Name) - if(method.isInstanceOf[MethodInfo]) { - val mthdInfo = method.asInstanceOf[MethodInfo] - printTypeParams(mthdInfo.getSortedMVars()) - } - val params = method.GetParameters() - print('(') - for (i <- 0 until params.length) { - if (i > 0) print(", ") - print(params(i).asInstanceOf[ParameterBuilder]) - } - print(") ") - - print(MethodImplAttributes - .toString(method.GetMethodImplementationFlags())) - } - - - def printSignature(method: MethodBase) { - var returnType: Type = null - if (method.isInstanceOf[MethodInfo]) - returnType = (method.asInstanceOf[MethodInfo]).ReturnType - else if (method.isInstanceOf[ConstructorInfo]) - returnType = VOID - else - throw new RuntimeException() - - val s = CallingConventions.toString(method.CallingConvention) - print(s) - if (s.length() > 0) print(' ') - printSignature(returnType) - //print(' ') print(owner) - print(' '); printReference(method.DeclaringType) - print("::"); printName(method.Name) - - val params = method.GetParameters() - print("(") - for (i <- 0 until params.length) { - if (i > 0) print(", ") - printSignature(params(i).ParameterType) - } - print(")") - } - - def printSignature(marked: Type, cmods: Array[CustomModifier]) { - printSignature(marked) - if( (cmods != null) && !cmods.isEmpty ) { - print(" ") - for(cm <- cmods) { - print(if (cm.isReqd) "modreq( " else "modopt( ") - printReference(cm.marker) - print(" ) ") - } - } - } - - def printSignature(`type`: Type) { - val sigOpt = primitive.get(`type`) - if (sigOpt.isDefined) { - print(sigOpt.get) - return - } - if (`type`.HasElementType()) { - printSignature(`type`.GetElementType()) - if (`type`.IsArray()) - print("[]") - else if (`type`.IsPointer()) - print('*') - else if (`type`.IsByRef()) - print('&') - } else { - val preref = if (`type`.isInstanceOf[Type.TMVarUsage]) "" - else if(`type`.IsValueType()) "valuetype " - else "class " - print(preref) - printReference(`type`) - } - } - - def printReference(`type`: Type) { - if (`type`.Module != null) { // i.e. not PrimitiveType and not TMVarUsage - if (`type`.Assembly() != currentModule.Assembly) { - print('['); print(`type`.Assembly().GetName().Name); print("]") - } else if (`type`.Module != currentModule) { - print("[.module "); print(`type`.Module.Name); print("]") - } - } - printTypeName(`type`) - } - - def printTypeName(`type`: Type) { - if (`type`.isInstanceOf[ConstructedType]) { - val ct = `type`.asInstanceOf[ConstructedType] - printTypeName(ct.instantiatedType) - print("<") - var i = 0 - while (i < ct.typeArgs.length) { - val ta = ct.typeArgs(i) - val sigOpt = primitive.get(ta) - if (sigOpt.isDefined) print(sigOpt.get) - else printTypeName(ta); /* should be printSignature, but don't want `class` or `valuetype` - appearing before a type param usage. */ - i = i + 1; - if (i < ct.typeArgs.length) { - print(", ") - } - } - print(">") - } else if (`type`.DeclaringType != null) { - printTypeName(`type`.DeclaringType) - print('/') - printName(`type`.Name) - } else { - printName(`type`.FullName) - } - } - - def printAttributes(icap: ICustomAttributeProvider) { - val attrs = icap.GetCustomAttributes(false) - for (i <- 0 until attrs.length) { - print(".custom ") - printSignature((attrs(i).asInstanceOf[Attribute]).getConstructor()) - print(" = (") - print(PEFile.bytes2hex((attrs(i).asInstanceOf[Attribute]).getValue())) - println(")") - } - } - - //########################################################################## - -} // class ILPrinterVisitor - -object ILPrinterVisitor { - final val VOID: Type = Type.GetType("System.Void") - protected final val TAB = 4 - - protected final val SPACES = " " - protected final val SPACES_LEN = SPACES.length() - - def hasControlChars(str: String): Boolean = { - for(i <- 0 until str.length()) { - val ch = str.charAt(i) - ch match { - case '\b' => - case '\t' => - case '\n' => - case '\f' => - case '\r' => - case _ => if(Character.isISOControl(ch)) return true - } - } - return false - } - - final val EMPTY: String = "" - def msilString(s: String): String = { - if (hasControlChars(s)) { - try { - return "bytearray (" + PEFile.bytes2hex(s.getBytes("UTF-16LE")) + ")" - } catch { - case e : java.io.UnsupportedEncodingException => throw new RuntimeException(e) - } - } - val str = new StringBuffer(s) - var ss = EMPTY - var i = 0 - while(i < str.length()) { - ss = EMPTY - val c = str.charAt(i) - c match { - case '\b' => ss = "\\b" - case '\t' => ss = "\\t" - case '\n' => ss = "\\n" - case '\f' => ss = "\\f" - case '\r' => ss = "\\r" - case '\"' => ss = "\\\"" - case '\'' => ss = "\\\'" - case '\\' => ss = "\\\\" - case _ => if (Character.isISOControl(c)) - ss = "\\u" + PEFile.int2hex(Character.getNumericValue(c)) - } - if (ss != EMPTY) { - str.replace(i, i + 1, ss) - i = i + ss.length() - 1 - } - i = i + 1 - } - return "\"" + str.toString() + "\"" - } - - /** - * the main printer method - */ - @throws(classOf[IOException]) - def printAssembly(assemblyBuilder: AssemblyBuilder, fileName: String) { - assemblyBuilder.apply(new SingleFileILPrinterVisitor(fileName)) - } - - @throws(classOf[IOException]) - def printAssembly(assemblyBuilder: AssemblyBuilder, destPath: String, sourceFilesPath: String) { - assemblyBuilder.apply(new MultipleFilesILPrinterVisitor(destPath, sourceFilesPath)) - } - - /** The current assembly */ - var currAssembly: Assembly = _ - - final var primitive = scala.collection.mutable.Map.empty[Type, String] - def addPrimitive(name: String, sig: String) { - val `type` = - Type.GetType(name) - assert(`type` != null, "Cannot lookup primitive type " + `type`) - primitive.put(`type`, sig) - } - - addPrimitive("System.Object", "object") - addPrimitive("System.String", "string") - addPrimitive("System.Void", "void") - addPrimitive("System.Boolean", "bool") - addPrimitive("System.Char", "char") - addPrimitive("System.SByte", "int8") - addPrimitive("System.Byte", "unsigned int8") - addPrimitive("System.Int16", "int16") - addPrimitive("System.UInt16", "unsigned int16") - addPrimitive("System.Int32", "int32") - addPrimitive("System.UInt32", "unsigned int32") - addPrimitive("System.Int64", "int64") - addPrimitive("System.UInt64", "unsigned int64") - addPrimitive("System.IntPtr", "native int") - addPrimitive("System.UIntPtr", "unsigned native int") - addPrimitive("System.Single", "float32") - addPrimitive("System.Double", "float64") - addPrimitive("System.TypedReference", "typedref") -} diff --git a/src/msil/ch/epfl/lamp/compiler/msil/emit/Label.scala b/src/msil/ch/epfl/lamp/compiler/msil/emit/Label.scala deleted file mode 100644 index a80ea72323..0000000000 --- a/src/msil/ch/epfl/lamp/compiler/msil/emit/Label.scala +++ /dev/null @@ -1,147 +0,0 @@ -/* - * System.Reflection.Emit-like API for writing .NET assemblies to MSIL - */ - - -package ch.epfl.lamp.compiler.msil.emit - -import ch.epfl.lamp.compiler.msil.Type - -/** - * Represents a label in the instruction stream. Label is used in conjunction - * with the ILGenerator class. - * - * @author Nikolay Mihaylov - * @version 1.0 - */ -abstract class Label protected { - import Label._ - def isInitialized(): Boolean - def getKind(): Kind - def getAddress(): Int - def getStacksize(): Int - def setStacksize(stacksize: Int): Unit - def incStacksize(): Unit - def mergeWith(that: Label): Unit -} - -object Label { - final val DUMMY: Int = -((1<<31)-1) - - //########################################################################## - - final class NormalLabel(_address: Int, _stacksize: Int) extends Label { - - //########################################################################## - // protected constructors - - //the position of the label - private var address: Int = _address - - //the stacksize at the label - private var stacksize: Int = _stacksize - - def this() { - this(-1, DUMMY) - } - - def this(that: NormalLabel) { - this(that.getAddress(), that.getStacksize()) - } - - //########################################################################## - // instrumental methods only used by ILGenerator - - def isInitialized() = (getAddress() != -1) || (stacksize != DUMMY) - - def getAddress() = address - - def getStacksize() = stacksize - - def setStacksize(stacksize: Int) { - assert(stacksize >= 0) - this.stacksize = stacksize - } - - def incStacksize() { - stacksize = stacksize + 1 - } - - def getKind(): Kind = Kind.Normal - - def mergeWith(that: Label) { - //assert address < 0 : "this.address = " + address + " that.address = " + that.address - address = that.getAddress() - - // assert stacksize == that.stacksize - // : "this.stacksize = " + stacksize + " that.stacksize = " - // + that.stacksize - // stacksize = that.stacksize - val ss: Int = math.max(stacksize, that.getStacksize()) - stacksize = ss - that.setStacksize(ss) - } - - //########################################################################## - // - - /** - * the toString Method return the label name - * it's "IL" + address - */ - override def toString(): String = { - var pad: String = "" - if (address < 16) pad = "000" - else if (address < 256) pad = "00" - else if (address < 4096) pad = "0" - return "IL_" + pad + Integer.toHexString(address) - } - - def getString(): String = { - val name = super.toString() - val i: Int = name.lastIndexOf('.') - return name.substring(i+1, name.length()) - } - } - - //######################################################################## - // Special Labels - - final class SpecialLabel(kind: Label.Kind) extends Label { - def isInitialized() = true - def getAddress(): Int = { throw new RuntimeException("" + kind) } - def getStacksize(): Int = { throw new RuntimeException("" + kind) } - def setStacksize(stacksize: Int) { throw new RuntimeException("" + kind) } - def incStacksize() { throw new RuntimeException("" + kind) } - def getKind(): Kind = kind - def mergeWith(that: Label) { throw new RuntimeException("" + kind) } - override def toString() = s"Label($kind)" - } - - final val NewScope: Label = new SpecialLabel(Kind.NewScope) - final val EndScope: Label = new SpecialLabel(Kind.EndScope) - final val Try: Label = new SpecialLabel(Kind.Try) - final val Catch: Label = new SpecialLabel(Kind.Catch) - final val Filter: Label = new SpecialLabel(Kind.Filter) - final val EndFilter: Label = new SpecialLabel(Kind.EndFilter) - final val Finally: Label = new SpecialLabel(Kind.Finally) - final val EndTry: Label = new SpecialLabel(Kind.EndTry) - - final class Kind() {} - - final object Kind { - final val Normal: Kind = new Kind() - - final val NewScope: Kind = new Kind() - final val EndScope: Kind = new Kind() - - final val Try: Kind = new Kind() - final val Catch: Kind = new Kind() - final val Filter: Kind = new Kind() - final val EndFilter: Kind = new Kind() - final val Finally: Kind = new Kind() - final val EndTry: Kind = new Kind() - } - - //########################################################################## -} diff --git a/src/msil/ch/epfl/lamp/compiler/msil/emit/LocalBuilder.scala b/src/msil/ch/epfl/lamp/compiler/msil/emit/LocalBuilder.scala deleted file mode 100644 index 73bca4639f..0000000000 --- a/src/msil/ch/epfl/lamp/compiler/msil/emit/LocalBuilder.scala +++ /dev/null @@ -1,44 +0,0 @@ -/** - * System.Reflection.Emit-like API for writing .NET assemblies to MSIL - */ - - -package ch.epfl.lamp.compiler.msil.emit - -import ch.epfl.lamp.compiler.msil.Type - -/** - * Represents a local variable within a method or constructor. - * - * @author Nikolay Mihaylov - * @version 1.0 - */ -class LocalBuilder(_slot : Int, localType : Type) extends Visitable { - - /** - * the type of the local variable. - */ - var LocalType : Type = localType - - // the name of the local variable - var name : String = "L_" + slot - - // the slot occupied by this local in the corresponding ILGenerator - var slot : Int = _slot - - /** - * Sets the name of this local variable. - */ - def SetLocalSymInfo(name : String) { - this.name = name - } - - override def toString() : String = name - - /** - * the apply method for a visitor - */ - def apply(v : Visitor) { - v.caseLocalBuilder(this) - } -} diff --git a/src/msil/ch/epfl/lamp/compiler/msil/emit/MethodBuilder.scala b/src/msil/ch/epfl/lamp/compiler/msil/emit/MethodBuilder.scala deleted file mode 100644 index 237d8fd728..0000000000 --- a/src/msil/ch/epfl/lamp/compiler/msil/emit/MethodBuilder.scala +++ /dev/null @@ -1,70 +0,0 @@ -/* - * System.Reflection.Emit-like API for writing .NET assemblies to MSIL - */ - - -package ch.epfl.lamp.compiler.msil.emit - -import ch.epfl.lamp.compiler.msil.MethodInfo -import ch.epfl.lamp.compiler.msil.ParameterInfo -import ch.epfl.lamp.compiler.msil.Type -import ch.epfl.lamp.compiler.msil.ConstructorInfo -import java.io.IOException - -/** - * Defines and represents a method of a dynamic class. - * - * @author Nikolay Mihaylov - * @version 1.0 - */ -class MethodBuilder(name: String, declType: Type, attrs: Int, returnType: Type, paramTypes: Array[Type]) - extends MethodInfo(name, declType, attrs, returnType, paramTypes) - with ICustomAttributeSetter - with Visitable -{ - - //########################################################################## - // public interface - - /** Defines a parameter of this method. TODO: Parameters are indexed staring - * from number 1 for the first parameter - */ - def DefineParameter(pos: Int, attr: Int, name: String): ParameterBuilder = { - val param = new ParameterBuilder(name, params(pos).ParameterType, attr, pos) - params(pos) = param - return param - } - - /** Returns an ILGenerator for this method. */ - def GetILGenerator(): ILGenerator = { - if (ilGenerator == null) - throw new RuntimeException - ("No code generator available for this method: " + this) - return ilGenerator - } - - /** Sets a custom attribute. */ - def SetCustomAttribute(constr: ConstructorInfo, value: Array[Byte]) { - addCustomAttribute(constr, value) - } - - //########################################################################## - - /** The apply method for a visitor. */ - @throws(classOf[IOException]) - def apply(v: Visitor) { - v.caseMethodBuilder(this) - } - - //########################################################################## - - // the Intermediate Language Generator - // it contains the method's body - protected final val ilGenerator : ILGenerator = - if (DeclaringType == null // global method - || !DeclaringType.IsInterface()) - new ILGenerator(this) - else null - - //########################################################################## -} diff --git a/src/msil/ch/epfl/lamp/compiler/msil/emit/ModuleBuilder.scala b/src/msil/ch/epfl/lamp/compiler/msil/emit/ModuleBuilder.scala deleted file mode 100644 index 2319d5ca27..0000000000 --- a/src/msil/ch/epfl/lamp/compiler/msil/emit/ModuleBuilder.scala +++ /dev/null @@ -1,136 +0,0 @@ -/* - * System.Reflection.Emit-like API for writing .NET assemblies to MSIL - */ - - -package ch.epfl.lamp.compiler.msil.emit - -import ch.epfl.lamp.compiler.msil._ -import java.io.IOException - -/** - * Defines and represents a module. Get an instance of ModuleBuilder - * by calling DefineDynamicModule - * - * @author Nikolay Mihaylov - * @version 1.0 - */ -class ModuleBuilder(name: String, fullname: String, scopeName: String, assembly: Assembly) - extends Module(name, fullname, scopeName, assembly) - with ICustomAttributeSetter - with Visitable -{ - - //########################################################################## - // public interface - - /** - * Complete the global function definitions for this dynamic module. - * This method should be called when the user is done with defining - * all of the global functions within this dynamic module. After calling - * this function, no more new global functions or new global data are - * allowed. - */ - def CreateGlobalFunctions() { - if (globalsCreated) - throw new RuntimeException("Global functions are already created") - this.fields = fieldBuilders.toArray // (fields).asInstanceOf[Array[FieldInfo]] - this.methods = methodBuilders.toArray // (methods).asInstanceOf[Array[MethodInfo]] - globalsCreated = true - } - - /** - * Constructs a TypeBuilder for a type with the specified name - */ - def DefineType(typeName: String): TypeBuilder = { - return DefineType(typeName, 0, null, Type.EmptyTypes) - } - - /** - * Constructs a TypeBuilder for a type with the specified name - * and specified attributes - */ - def DefineType(typeName: String, attributes: Int): TypeBuilder = { - return DefineType(typeName, attributes, null, Type.EmptyTypes) - } - - /** - * Constructs a TypeBuilder given type name, its attributes, - * and the type that the defined type extends. - */ - def DefineType(typeName: String, attributes: Int, - baseType: Type): TypeBuilder = { - return DefineType(typeName, attributes, baseType, Type.EmptyTypes) - } - - /** - * Constructs a TypeBuilder given the Full specification of a type, - * Given the type name, attributes, the type that the defined type - * extends, and the interfaces that the defined type implements. - */ - def DefineType(typeName: String, - attributes: Int, - baseType: Type, - interfaces: Array[Type]): TypeBuilder = - { - val t: Type = GetType(typeName) // Module.GetType(String) - if (t != null) - throw new RuntimeException - ("Type [" + Assembly + "]" + typeName + "' already exists!") - val `type` = - new TypeBuilder(this, attributes, typeName, baseType, interfaces, null) - addType(`type`) - return `type` - } - - /** - * Defines a global method given its name, attributes, return type, and - * parameter types. - */ - def DefineGlobalMethod(name: String, attributes: Int, - returnType: Type, paramTypes: Array[Type]): MethodBuilder = - { - val method = - new MethodBuilder(name, null, attributes, returnType, paramTypes) - methodBuilders += method - return method - } - - - override def GetTypes(): Array[Type] = { - val res = scala.collection.mutable.ArrayBuffer.empty[Type] - val iter = typesMap.values().iterator - while (iter.hasNext) { - res += iter.next.asInstanceOf[Type] - } - return res.toArray - } - - /** Sets a custom attribute. */ - def SetCustomAttribute(constr: ConstructorInfo, value: Array[Byte]) { - addCustomAttribute(constr, value) - } - - //########################################################################## - // internal members - - var globalsCreated = false - protected var fieldBuilders = scala.collection.mutable.ArrayBuffer.empty[FieldInfo] - protected var methodBuilders = scala.collection.mutable.ArrayBuffer.empty[MethodInfo] - - override def addType(t: Type): Type = { - return super.addType(t) - } - - //########################################################################## - - /** - * the apply method for a visitor - */ - @throws(classOf[IOException]) - def apply(v: Visitor) { - v.caseModuleBuilder(this) - } - - //########################################################################## -} diff --git a/src/msil/ch/epfl/lamp/compiler/msil/emit/MultipleFilesILPrinterVisitor.scala b/src/msil/ch/epfl/lamp/compiler/msil/emit/MultipleFilesILPrinterVisitor.scala deleted file mode 100644 index bbbbf40508..0000000000 --- a/src/msil/ch/epfl/lamp/compiler/msil/emit/MultipleFilesILPrinterVisitor.scala +++ /dev/null @@ -1,137 +0,0 @@ -/* - * System.Reflection.Emit-like API for writing .NET assemblies in MSIL - */ - - -package ch.epfl.lamp.compiler.msil.emit - -import java.io.File -import java.io.FileWriter -import java.io.BufferedWriter -import java.io.PrintWriter -import java.io.IOException -import java.util.Iterator -import java.util.Arrays - -import ch.epfl.lamp.compiler.msil._ -import ch.epfl.lamp.compiler.msil.emit -import ch.epfl.lamp.compiler.msil.util.Table - -/** - * The MSIL printer Visitor. It prints a complete - * assembly into separate files. Then these files can be compiled by ilasm. - * - * @author Nikolay Mihaylov - * @author Daniel Lorch - * @version 1.0 - */ -final class MultipleFilesILPrinterVisitor(destPath: String, sourceFilesPath: String) extends ILPrinterVisitor { - /** - * Visit an AssemblyBuilder - */ - @throws(classOf[IOException]) - def caseAssemblyBuilder(assemblyBuilder: AssemblyBuilder) { - ILPrinterVisitor.currAssembly = assemblyBuilder - - // first get the entryPoint - this.entryPoint = assemblyBuilder.EntryPoint - - // all external assemblies - as = assemblyBuilder.getExternAssemblies() - scala.util.Sorting.quickSort(as)(assemblyNameComparator) // Arrays.sort(as, assemblyNameComparator) - - // print each module - val m: Array[Module] = assemblyBuilder.GetModules() - nomembers = true - for(i <- 0 until m.length) { - print(m(i).asInstanceOf[ModuleBuilder]) - } - - nomembers = false - for(i <- 0 until m.length) { - print(m(i).asInstanceOf[ModuleBuilder]) - } - ILPrinterVisitor.currAssembly = null - } - - /** - * Visit a ModuleBuilder - */ - @throws(classOf[IOException]) - def caseModuleBuilder(module: ModuleBuilder) { - val assemblyBuilder = ILPrinterVisitor.currAssembly.asInstanceOf[AssemblyBuilder] - - // print module declaration - currentModule = module - - // global methods typically contain the main method - if (!module.globalsCreated) - module.CreateGlobalFunctions() - - val m: Array[MethodInfo] = module.GetMethods() - - // "Types" contain all the classes - val t: Array[Type] = module.GetTypes() - for(i <- 0 until t.length) { - val tBuilder = t(i).asInstanceOf[TypeBuilder] - val sourceFilename = tBuilder.sourceFilename - val sourceFilepath = new File(tBuilder.sourceFilepath).getCanonicalPath - val sourcePath = new File(sourceFilesPath).getCanonicalPath - var append = false - - if(!sourceFilepath.startsWith(sourcePath)) { - throw new IOException("Source file " + sourceFilename + " must lie inside sourcepath " + sourcePath) - } - - assert(sourceFilepath.endsWith(".scala"), "Source file doesn't end with .scala") - val relativeFilename = sourceFilepath.substring(sourcePath.length, sourceFilepath.length() - 6) + ".msil" - val fileName = new File(destPath, relativeFilename) - if(assemblyBuilder.generatedFiles.contains(fileName.getPath)) { - append = true - } else { - fileName.getParentFile().mkdirs() - assemblyBuilder.generatedFiles += (fileName.getPath) - } - - out = new PrintWriter(new BufferedWriter(new FileWriter(fileName, append))) - // only write assembly boilerplate and class prototypes - if (!append && nomembers) { - printAssemblyBoilerplate() - - print(".module \'"); print(module.Name); println("\'") - printAttributes(module) - } - - print(t(i).asInstanceOf[TypeBuilder]) - out.close() - } - - // now write the global methods (typically contains the "main" method) - if(!nomembers) { - val globalMethods: File = new File(destPath, ILPrinterVisitor.currAssembly.GetName().Name + ".msil") - val append = assemblyBuilder.generatedFiles.contains(globalMethods.getPath) - - out = new PrintWriter(new BufferedWriter(new FileWriter(globalMethods, append))) - - // make sure we're the first in the list (ilasm uses the first file name to guess the output file name) - assemblyBuilder.generatedFiles.insert(0, globalMethods.getPath) - - // if this file hasn't been created by one of the classes, write boilerplate - if(!append) { - printAssemblyBoilerplate() - - print(".module \'"); print(module.Name); println("\'") - printAttributes(module) - } - - for(i <- 0 until m.length) { - print(m(i).asInstanceOf[MethodBuilder]) - } - - out.close() - } - - currentModule = null - } - -} // class MultipleFilesILPrinterVisitor diff --git a/src/msil/ch/epfl/lamp/compiler/msil/emit/OpCode.scala b/src/msil/ch/epfl/lamp/compiler/msil/emit/OpCode.scala deleted file mode 100644 index b0c26884af..0000000000 --- a/src/msil/ch/epfl/lamp/compiler/msil/emit/OpCode.scala +++ /dev/null @@ -1,1948 +0,0 @@ -/* - * System.Reflection.Emit-like API for writing .NET assemblies to MSIL - */ - - -package ch.epfl.lamp.compiler.msil.emit - -import java.io.IOException - -/** Describes a Microsoft intermediate language (MSIL) instruction. - * - * @author Nikolay Mihaylov - * @version 1.0 - */ -class OpCode extends Visitable { - import OpCode._ - - /** The Operation Code of Microsoft intermediate language (MSIL) instruction. */ - var CEE_opcode : Int = _ - - /** The name of the Microsoft intermediate language (MSIL) instruction. */ - var CEE_string: String = _ - - /** The type of Microsoft intermediate language (MSIL) instruction. */ - var CEE_code: Short = _ - - /** How the Microsoft intermediate language (MSIL) instruction pops the stack. */ - var CEE_pop: Byte = _ - - /** How the Microsoft intermediate language (MSIL) instruction pushes operand onto the stack. */ - var CEE_push: Byte = _ - - /** Describes the type of flow control. */ - var CEE_flow: Byte = _ - - /** ????? */ - var CEE_inline: Byte = _ - - var CEE_length: Byte = _ - - var CEE_popush: Byte = _ - - /** - * the apply method for a visitor - */ - @throws(classOf[IOException]) - def apply(v: Visitor) { - v.caseOpCode(this) - } - - protected def length(): Byte = { - val code = OpCode.length(CEE_code) - val inline = OpCode.INLINE_length(CEE_inline) - return if(inline < 0) { -1 } else { (code + inline).toByte } - } - - protected def popush(): Byte = { - val pop = OpCode.POP_size(CEE_pop) - val push = OpCode.PUSH_size(CEE_push) - return if(pop < 0 || push < 0) { OpCode.POPUSH_SPECIAL } else { (push - pop).toByte } - } - - override def toString(): String = { - return CEE_string - } -} - -object OpCode { - - //######################################################################## - // Common Execution Environment opcodes - - final val CEE_NOP : Int = 0x0000 - final val CEE_BREAK : Int = 0x0001 - final val CEE_LDARG_0 : Int = 0x0002 - final val CEE_LDARG_1 : Int = 0x0003 - final val CEE_LDARG_2 : Int = 0x0004 - final val CEE_LDARG_3 : Int = 0x0005 - final val CEE_LDLOC_0 : Int = 0x0006 - final val CEE_LDLOC_1 : Int = 0x0007 - final val CEE_LDLOC_2 : Int = 0x0008 - final val CEE_LDLOC_3 : Int = 0x0009 - final val CEE_STLOC_0 : Int = 0x000A - final val CEE_STLOC_1 : Int = 0x000B - final val CEE_STLOC_2 : Int = 0x000C - final val CEE_STLOC_3 : Int = 0x000D - final val CEE_LDARG_S : Int = 0x000E - final val CEE_LDARGA_S : Int = 0x000F - final val CEE_STARG_S : Int = 0x0010 - final val CEE_LDLOC_S : Int = 0x0011 - final val CEE_LDLOCA_S : Int = 0x0012 - final val CEE_STLOC_S : Int = 0x0013 - final val CEE_LDNULL : Int = 0x0014 - final val CEE_LDC_I4_M1 : Int = 0x0015 - final val CEE_LDC_I4_0 : Int = 0x0016 - final val CEE_LDC_I4_1 : Int = 0x0017 - final val CEE_LDC_I4_2 : Int = 0x0018 - final val CEE_LDC_I4_3 : Int = 0x0019 - final val CEE_LDC_I4_4 : Int = 0x001A - final val CEE_LDC_I4_5 : Int = 0x001B - final val CEE_LDC_I4_6 : Int = 0x001C - final val CEE_LDC_I4_7 : Int = 0x001D - final val CEE_LDC_I4_8 : Int = 0x001E - final val CEE_LDC_I4_S : Int = 0x001F - final val CEE_LDC_I4 : Int = 0x0020 - final val CEE_LDC_I8 : Int = 0x0021 - final val CEE_LDC_R4 : Int = 0x0022 - final val CEE_LDC_R8 : Int = 0x0023 - final val CEE_UNUSED49 : Int = 0x0024 - final val CEE_DUP : Int = 0x0025 - final val CEE_POP : Int = 0x0026 - final val CEE_JMP : Int = 0x0027 - final val CEE_CALL : Int = 0x0028 - final val CEE_CALLI : Int = 0x0029 - final val CEE_RET : Int = 0x002A - final val CEE_BR_S : Int = 0x002B - final val CEE_BRFALSE_S : Int = 0x002C - final val CEE_BRTRUE_S : Int = 0x002D - final val CEE_BEQ_S : Int = 0x002E - final val CEE_BGE_S : Int = 0x002F - final val CEE_BGT_S : Int = 0x0030 - final val CEE_BLE_S : Int = 0x0031 - final val CEE_BLT_S : Int = 0x0032 - final val CEE_BNE_UN_S : Int = 0x0033 - final val CEE_BGE_UN_S : Int = 0x0034 - final val CEE_BGT_UN_S : Int = 0x0035 - final val CEE_BLE_UN_S : Int = 0x0036 - final val CEE_BLT_UN_S : Int = 0x0037 - final val CEE_BR : Int = 0x0038 - final val CEE_BRFALSE : Int = 0x0039 - final val CEE_BRTRUE : Int = 0x003A - final val CEE_BEQ : Int = 0x003B - final val CEE_BGE : Int = 0x003C - final val CEE_BGT : Int = 0x003D - final val CEE_BLE : Int = 0x003E - final val CEE_BLT : Int = 0x003F - final val CEE_BNE_UN : Int = 0x0040 - final val CEE_BGE_UN : Int = 0x0041 - final val CEE_BGT_UN : Int = 0x0042 - final val CEE_BLE_UN : Int = 0x0043 - final val CEE_BLT_UN : Int = 0x0044 - final val CEE_SWITCH : Int = 0x0045 - final val CEE_LDIND_I1 : Int = 0x0046 - final val CEE_LDIND_U1 : Int = 0x0047 - final val CEE_LDIND_I2 : Int = 0x0048 - final val CEE_LDIND_U2 : Int = 0x0049 - final val CEE_LDIND_I4 : Int = 0x004A - final val CEE_LDIND_U4 : Int = 0x004B - final val CEE_LDIND_I8 : Int = 0x004C - final val CEE_LDIND_I : Int = 0x004D - final val CEE_LDIND_R4 : Int = 0x004E - final val CEE_LDIND_R8 : Int = 0x004F - final val CEE_LDIND_REF : Int = 0x0050 - final val CEE_STIND_REF : Int = 0x0051 - final val CEE_STIND_I1 : Int = 0x0052 - final val CEE_STIND_I2 : Int = 0x0053 - final val CEE_STIND_I4 : Int = 0x0054 - final val CEE_STIND_I8 : Int = 0x0055 - final val CEE_STIND_R4 : Int = 0x0056 - final val CEE_STIND_R8 : Int = 0x0057 - final val CEE_ADD : Int = 0x0058 - final val CEE_SUB : Int = 0x0059 - final val CEE_MUL : Int = 0x005A - final val CEE_DIV : Int = 0x005B - final val CEE_DIV_UN : Int = 0x005C - final val CEE_REM : Int = 0x005D - final val CEE_REM_UN : Int = 0x005E - final val CEE_AND : Int = 0x005F - final val CEE_OR : Int = 0x0060 - final val CEE_XOR : Int = 0x0061 - final val CEE_SHL : Int = 0x0062 - final val CEE_SHR : Int = 0x0063 - final val CEE_SHR_UN : Int = 0x0064 - final val CEE_NEG : Int = 0x0065 - final val CEE_NOT : Int = 0x0066 - final val CEE_CONV_I1 : Int = 0x0067 - final val CEE_CONV_I2 : Int = 0x0068 - final val CEE_CONV_I4 : Int = 0x0069 - final val CEE_CONV_I8 : Int = 0x006A - final val CEE_CONV_R4 : Int = 0x006B - final val CEE_CONV_R8 : Int = 0x006C - final val CEE_CONV_U4 : Int = 0x006D - final val CEE_CONV_U8 : Int = 0x006E - final val CEE_CALLVIRT : Int = 0x006F - final val CEE_CPOBJ : Int = 0x0070 - final val CEE_LDOBJ : Int = 0x0071 - final val CEE_LDSTR : Int = 0x0072 - final val CEE_NEWOBJ : Int = 0x0073 - final val CEE_CASTCLASS : Int = 0x0074 - final val CEE_ISINST : Int = 0x0075 - final val CEE_CONV_R_UN : Int = 0x0076 - final val CEE_UNUSED58 : Int = 0x0077 - final val CEE_UNUSED1 : Int = 0x0078 - final val CEE_UNBOX : Int = 0x0079 - final val CEE_THROW : Int = 0x007A - final val CEE_LDFLD : Int = 0x007B - final val CEE_LDFLDA : Int = 0x007C - final val CEE_STFLD : Int = 0x007D - final val CEE_LDSFLD : Int = 0x007E - final val CEE_LDSFLDA : Int = 0x007F - final val CEE_STSFLD : Int = 0x0080 - final val CEE_STOBJ : Int = 0x0081 - final val CEE_CONV_OVF_I1_UN : Int = 0x0082 - final val CEE_CONV_OVF_I2_UN : Int = 0x0083 - final val CEE_CONV_OVF_I4_UN : Int = 0x0084 - final val CEE_CONV_OVF_I8_UN : Int = 0x0085 - final val CEE_CONV_OVF_U1_UN : Int = 0x0086 - final val CEE_CONV_OVF_U2_UN : Int = 0x0087 - final val CEE_CONV_OVF_U4_UN : Int = 0x0088 - final val CEE_CONV_OVF_U8_UN : Int = 0x0089 - final val CEE_CONV_OVF_I_UN : Int = 0x008A - final val CEE_CONV_OVF_U_UN : Int = 0x008B - final val CEE_BOX : Int = 0x008C - final val CEE_NEWARR : Int = 0x008D - final val CEE_LDLEN : Int = 0x008E - final val CEE_LDELEMA : Int = 0x008F - final val CEE_LDELEM_I1 : Int = 0x0090 - final val CEE_LDELEM_U1 : Int = 0x0091 - final val CEE_LDELEM_I2 : Int = 0x0092 - final val CEE_LDELEM_U2 : Int = 0x0093 - final val CEE_LDELEM_I4 : Int = 0x0094 - final val CEE_LDELEM_U4 : Int = 0x0095 - final val CEE_LDELEM_I8 : Int = 0x0096 - final val CEE_LDELEM_I : Int = 0x0097 - final val CEE_LDELEM_R4 : Int = 0x0098 - final val CEE_LDELEM_R8 : Int = 0x0099 - final val CEE_LDELEM_REF : Int = 0x009A - final val CEE_STELEM_I : Int = 0x009B - final val CEE_STELEM_I1 : Int = 0x009C - final val CEE_STELEM_I2 : Int = 0x009D - final val CEE_STELEM_I4 : Int = 0x009E - final val CEE_STELEM_I8 : Int = 0x009F - final val CEE_STELEM_R4 : Int = 0x00A0 - final val CEE_STELEM_R8 : Int = 0x00A1 - final val CEE_STELEM_REF : Int = 0x00A2 - final val CEE_UNUSED2 : Int = 0x00A3 - final val CEE_UNUSED3 : Int = 0x00A4 - final val CEE_UNUSED4 : Int = 0x00A5 - final val CEE_UNUSED5 : Int = 0x00A6 - final val CEE_UNUSED6 : Int = 0x00A7 - final val CEE_UNUSED7 : Int = 0x00A8 - final val CEE_UNUSED8 : Int = 0x00A9 - final val CEE_UNUSED9 : Int = 0x00AA - final val CEE_UNUSED10 : Int = 0x00AB - final val CEE_UNUSED11 : Int = 0x00AC - final val CEE_UNUSED12 : Int = 0x00AD - final val CEE_UNUSED13 : Int = 0x00AE - final val CEE_UNUSED14 : Int = 0x00AF - final val CEE_UNUSED15 : Int = 0x00B0 - final val CEE_UNUSED16 : Int = 0x00B1 - final val CEE_UNUSED17 : Int = 0x00B2 - final val CEE_CONV_OVF_I1 : Int = 0x00B3 - final val CEE_CONV_OVF_U1 : Int = 0x00B4 - final val CEE_CONV_OVF_I2 : Int = 0x00B5 - final val CEE_CONV_OVF_U2 : Int = 0x00B6 - final val CEE_CONV_OVF_I4 : Int = 0x00B7 - final val CEE_CONV_OVF_U4 : Int = 0x00B8 - final val CEE_CONV_OVF_I8 : Int = 0x00B9 - final val CEE_CONV_OVF_U8 : Int = 0x00BA - final val CEE_UNUSED50 : Int = 0x00BB - final val CEE_UNUSED18 : Int = 0x00BC - final val CEE_UNUSED19 : Int = 0x00BD - final val CEE_UNUSED20 : Int = 0x00BE - final val CEE_UNUSED21 : Int = 0x00BF - final val CEE_UNUSED22 : Int = 0x00C0 - final val CEE_UNUSED23 : Int = 0x00C1 - final val CEE_REFANYVAL : Int = 0x00C2 - final val CEE_CKFINITE : Int = 0x00C3 - final val CEE_UNUSED24 : Int = 0x00C4 - final val CEE_UNUSED25 : Int = 0x00C5 - final val CEE_MKREFANY : Int = 0x00C6 - final val CEE_UNUSED59 : Int = 0x00C7 - final val CEE_UNUSED60 : Int = 0x00C8 - final val CEE_UNUSED61 : Int = 0x00C9 - final val CEE_UNUSED62 : Int = 0x00CA - final val CEE_UNUSED63 : Int = 0x00CB - final val CEE_UNUSED64 : Int = 0x00CC - final val CEE_UNUSED65 : Int = 0x00CD - final val CEE_UNUSED66 : Int = 0x00CE - final val CEE_UNUSED67 : Int = 0x00CF - final val CEE_LDTOKEN : Int = 0x00D0 - final val CEE_CONV_U2 : Int = 0x00D1 - final val CEE_CONV_U1 : Int = 0x00D2 - final val CEE_CONV_I : Int = 0x00D3 - final val CEE_CONV_OVF_I : Int = 0x00D4 - final val CEE_CONV_OVF_U : Int = 0x00D5 - final val CEE_ADD_OVF : Int = 0x00D6 - final val CEE_ADD_OVF_UN : Int = 0x00D7 - final val CEE_MUL_OVF : Int = 0x00D8 - final val CEE_MUL_OVF_UN : Int = 0x00D9 - final val CEE_SUB_OVF : Int = 0x00DA - final val CEE_SUB_OVF_UN : Int = 0x00DB - final val CEE_ENDFINALLY : Int = 0x00DC - final val CEE_LEAVE : Int = 0x00DD - final val CEE_LEAVE_S : Int = 0x00DE - final val CEE_STIND_I : Int = 0x00DF - final val CEE_CONV_U : Int = 0x00E0 - final val CEE_UNUSED26 : Int = 0x00E1 - final val CEE_UNUSED27 : Int = 0x00E2 - final val CEE_UNUSED28 : Int = 0x00E3 - final val CEE_UNUSED29 : Int = 0x00E4 - final val CEE_UNUSED30 : Int = 0x00E5 - final val CEE_UNUSED31 : Int = 0x00E6 - final val CEE_UNUSED32 : Int = 0x00E7 - final val CEE_UNUSED33 : Int = 0x00E8 - final val CEE_UNUSED34 : Int = 0x00E9 - final val CEE_UNUSED35 : Int = 0x00EA - final val CEE_UNUSED36 : Int = 0x00EB - final val CEE_UNUSED37 : Int = 0x00EC - final val CEE_UNUSED38 : Int = 0x00ED - final val CEE_UNUSED39 : Int = 0x00EE - final val CEE_UNUSED40 : Int = 0x00EF - final val CEE_UNUSED41 : Int = 0x00F0 - final val CEE_UNUSED42 : Int = 0x00F1 - final val CEE_UNUSED43 : Int = 0x00F2 - final val CEE_UNUSED44 : Int = 0x00F3 - final val CEE_UNUSED45 : Int = 0x00F4 - final val CEE_UNUSED46 : Int = 0x00F5 - final val CEE_UNUSED47 : Int = 0x00F6 - final val CEE_UNUSED48 : Int = 0x00F7 - final val CEE_PREFIX7 : Int = 0x00F8 - final val CEE_PREFIX6 : Int = 0x00F9 - final val CEE_PREFIX5 : Int = 0x00FA - final val CEE_PREFIX4 : Int = 0x00FB - final val CEE_PREFIX3 : Int = 0x00FC - final val CEE_PREFIX2 : Int = 0x00FD - final val CEE_PREFIX1 : Int = 0x00FE - final val CEE_PREFIXREF : Int = 0x00FF - - final val CEE_ARGLIST : Int = 0x0100 - final val CEE_CEQ : Int = 0x0101 - final val CEE_CGT : Int = 0x0102 - final val CEE_CGT_UN : Int = 0x0103 - final val CEE_CLT : Int = 0x0104 - final val CEE_CLT_UN : Int = 0x0105 - final val CEE_LDFTN : Int = 0x0106 - final val CEE_LDVIRTFTN : Int = 0x0107 - final val CEE_UNUSED56 : Int = 0x0108 - final val CEE_LDARG : Int = 0x0109 - final val CEE_LDARGA : Int = 0x010A - final val CEE_STARG : Int = 0x010B - final val CEE_LDLOC : Int = 0x010C - final val CEE_LDLOCA : Int = 0x010D - final val CEE_STLOC : Int = 0x010E - final val CEE_LOCALLOC : Int = 0x010F - final val CEE_UNUSED57 : Int = 0x0110 - final val CEE_ENDFILTER : Int = 0x0111 - final val CEE_UNALIGNED : Int = 0x0112 - final val CEE_VOLATILE : Int = 0x0113 - final val CEE_TAILCALL : Int = 0x0114 - final val CEE_INITOBJ : Int = 0x0115 - final val CEE_CONSTRAINED : Int = 0xFE16 - final val CEE_READONLY : Int = 0xFE1E - final val CEE_UNUSED68 : Int = 0x0116 - final val CEE_CPBLK : Int = 0x0117 - final val CEE_INITBLK : Int = 0x0118 - final val CEE_UNUSED69 : Int = 0x0119 - final val CEE_RETHROW : Int = 0x011A - final val CEE_UNUSED51 : Int = 0x011B - final val CEE_SIZEOF : Int = 0x011C - final val CEE_REFANYTYPE : Int = 0x011D - final val CEE_UNUSED52 : Int = 0x011E - final val CEE_UNUSED53 : Int = 0x011F - final val CEE_UNUSED54 : Int = 0x0120 - final val CEE_UNUSED55 : Int = 0x0121 - final val CEE_UNUSED70 : Int = 0x0122 - - final val CEE_ILLEGAL : Int = 0x0140 - final val CEE_MACRO_END : Int = 0x0141 - - final val CEE_BRNULL : Int = 0x0180 // CEE_BRFALSE - final val CEE_BRNULL_S : Int = 0x0181 // CEE_BRFALSE_S - final val CEE_BRZERO : Int = 0x0182 // CEE_BRFALSE - final val CEE_BRZERO_S : Int = 0x0183 // CEE_BRFALSE_S - final val CEE_BRINST : Int = 0x0184 // CEE_BRTRUE - final val CEE_BRINST_S : Int = 0x0185 // CEE_BRTRUE_S - final val CEE_LDIND_U8 : Int = 0x0186 // CEE_LDIND_I8 - final val CEE_LDELEM_U8 : Int = 0x0187 // CEE_LDELEM_I8 - final val CEE_LDC_I4_M1x : Int = 0x0188 // CEE_LDC_I4_M1 - final val CEE_ENDFAULT : Int = 0x0189 // CEE_ENDFINALLY - - final val CEE_BRNONZERO : Int = 0x01C0 // CEE_BRTRUE - final val CEE_BRNONZERO_S : Int = 0x01C1 // CEE_BRTRUE_S - - final val CEE_BRNOT : Int = 0x01C2 - final val CEE_BRNOT_S : Int = 0x01C3 - final val CEE_NOCODE : Int = 0x01C4 - - final val CEE_count : Int = 0x0200 - - - //######################################################################## - // Opcode's amount and type of poped data - - final val POP_NONE : Byte = 0x00 - final val POP_1 : Byte = 0x01 - final val POP_1_1 : Byte = 0x02 - final val POP_I : Byte = 0x03 - final val POP_I_1 : Byte = 0x04 - final val POP_I_I : Byte = 0x05 - final val POP_I_I8 : Byte = 0x06 - final val POP_I_R4 : Byte = 0x07 - final val POP_I_R8 : Byte = 0x08 - final val POP_I_I_I : Byte = 0x09 - final val POP_REF : Byte = 0x0A - final val POP_REF_1 : Byte = 0x0B - final val POP_REF_I : Byte = 0x0C - final val POP_REF_I_I : Byte = 0x0D - final val POP_REF_I_I8 : Byte = 0x0E - final val POP_REF_I_R4 : Byte = 0x0F - final val POP_REF_I_R8 : Byte = 0x10 - final val POP_REF_I_REF : Byte = 0x11 - final val POP_SPECIAL : Byte = 0x12 - final val POP_count : Int = 0x13 - final val POP_size : Array[Byte] = new Array[Byte](POP_count) - - POP_size(POP_NONE) = 0 - POP_size(POP_1) = 1 - POP_size(POP_1_1) = 2 - POP_size(POP_I) = 1 - POP_size(POP_I_1) = 2 - POP_size(POP_I_I) = 2 - POP_size(POP_I_I8) = 2 - POP_size(POP_I_R4) = 2 - POP_size(POP_I_R8) = 2 - POP_size(POP_I_I_I) = 3 - POP_size(POP_REF) = 1 - POP_size(POP_REF_1) = 2 - POP_size(POP_REF_I) = 2 - POP_size(POP_REF_I_I) = 3 - POP_size(POP_REF_I_I8) = 3 - POP_size(POP_REF_I_R4) = 3 - POP_size(POP_REF_I_R8) = 3 - POP_size(POP_REF_I_REF) = 3 - POP_size(POP_SPECIAL) = -1 - - //######################################################################## - // Opcode's amount and type of pushed data - - final val PUSH_NONE : Byte = 0x00 - final val PUSH_1 : Byte = 0x01 - final val PUSH_1_1 : Byte = 0x02 - final val PUSH_I : Byte = 0x03 - final val PUSH_I8 : Byte = 0x04 - final val PUSH_R4 : Byte = 0x05 - final val PUSH_R8 : Byte = 0x06 - final val PUSH_REF : Byte = 0x07 - final val PUSH_SPECIAL : Byte = 0x08 - final val PUSH_count : Int = 0x09 - final val PUSH_size : Array[Byte] = new Array[Byte](PUSH_count) - - PUSH_size(PUSH_NONE) = 0 - PUSH_size(PUSH_1) = 1 - PUSH_size(PUSH_1_1) = 2 - PUSH_size(PUSH_I) = 1 - PUSH_size(PUSH_I8) = 1 - PUSH_size(PUSH_R4) = 1 - PUSH_size(PUSH_R8) = 1 - PUSH_size(PUSH_REF) = 1 - PUSH_size(PUSH_SPECIAL) = -1 - - //######################################################################## - // Opcode's amount of moved data - - final val POPUSH_SPECIAL : Byte = -128 - - //######################################################################## - // Opcode's inline argument types - - final val INLINE_NONE : Byte = 0x00 - final val INLINE_VARIABLE_S : Byte = 0x01 - final val INLINE_TARGET_S : Byte = 0x02 - final val INLINE_I_S : Byte = 0x03 - final val INLINE_VARIABLE : Byte = 0x04 - final val INLINE_TARGET : Byte = 0x05 - final val INLINE_I : Byte = 0x06 - final val INLINE_I8 : Byte = 0x07 - final val INLINE_R : Byte = 0x08 - final val INLINE_R8 : Byte = 0x09 - final val INLINE_STRING : Byte = 0x0A - final val INLINE_TYPE : Byte = 0x0B - final val INLINE_FIELD : Byte = 0x0C - final val INLINE_METHOD : Byte = 0x0D - final val INLINE_SIGNATURE : Byte = 0x0E - final val INLINE_TOKEN : Byte = 0x0F - final val INLINE_SWITCH : Byte = 0x10 - final val INLINE_count : Int = 0x11 - final val INLINE_length : Array[Byte] = new Array[Byte](INLINE_count) - - INLINE_length(INLINE_NONE) = 0 - INLINE_length(INLINE_VARIABLE_S) = 1 - INLINE_length(INLINE_TARGET_S) = 1 - INLINE_length(INLINE_I_S) = 1 - INLINE_length(INLINE_VARIABLE) = 2 - INLINE_length(INLINE_TARGET) = 4 - INLINE_length(INLINE_I) = 4 - INLINE_length(INLINE_I8) = 8 - INLINE_length(INLINE_R) = 4 - INLINE_length(INLINE_R8) = 8 - INLINE_length(INLINE_STRING) = 4 - INLINE_length(INLINE_TYPE) = 4 - INLINE_length(INLINE_FIELD) = 4 - INLINE_length(INLINE_METHOD) = 4 - INLINE_length(INLINE_SIGNATURE) = 4 - INLINE_length(INLINE_SWITCH) = 4 - INLINE_length(INLINE_TOKEN) = 4 - - //######################################################################## - // Opcode's control flow implications - - final val FLOW_META : Byte = 0x00 - final val FLOW_NEXT : Byte = 0x01 - final val FLOW_BRANCH : Byte = 0x02 - final val FLOW_COND_BRANCH : Byte = 0x03 - final val FLOW_BREAK : Byte = 0x04 - final val FLOW_CALL : Byte = 0x05 - final val FLOW_RETURN : Byte = 0x06 - final val FLOW_THROW : Byte = 0x07 - final val FLOW_count : Int = 0x08 - - //######################################################################## - // Init methods for Opcode - - def opcode(that: OpCode, opcode: Int, string: String, code: Int, - pop: Byte, push: Byte, inline: Byte, flow: Byte) { - that.CEE_opcode = opcode - that.CEE_string = string - that.CEE_code = code.toShort - that.CEE_pop = pop - that.CEE_push = push - that.CEE_inline = inline - that.CEE_flow = flow - that.CEE_length = that.length() - that.CEE_popush = that.popush() - } - - def length(code: Int): Byte = { - if ((code & 0xFFFFFF00) == 0xFFFFFF00) return 1 - if ((code & 0xFFFFFF00) == 0xFFFFFE00) return 2 - return 0 - } - - //######################################################################## - // case OpCode - - /** - * Adds two values and pushes the result onto the evaluation stack. - */ - final val Add = new OpCode() - opcode(Add, CEE_ADD, "add", 0xFFFFFF58, POP_1_1, PUSH_1, INLINE_NONE, FLOW_NEXT) - - /** - * Fills space if bytecodes are patched. No meaningful operation is performed - * although a processing cycle can be consumed. - */ - final val Nop = new OpCode() - opcode(Nop, CEE_NOP, "nop", 0xFFFFFF00, POP_NONE, PUSH_NONE, INLINE_NONE , FLOW_NEXT) - - /** - * Signals the Common Language Infrastructure (CLI) to inform the debugger that - * a break point has been tripped. - */ - final val Break = new OpCode() - opcode(Break, CEE_BREAK, "break" , 0xFFFFFF01, POP_NONE, PUSH_NONE , INLINE_NONE , FLOW_BREAK) - - /** - * Loads the argument at index 0 onto the evaluation stack. - */ - final val Ldarg_0 = new OpCode() - opcode(Ldarg_0, CEE_LDARG_0 , "ldarg.0" , 0xFFFFFF02, POP_NONE, PUSH_1 , INLINE_NONE , FLOW_NEXT) - - /** - * Loads the argument at index 1 onto the evaluation stack. - */ - final val Ldarg_1 = new OpCode() - opcode(Ldarg_1, CEE_LDARG_1 , "ldarg.1" , 0xFFFFFF03, POP_NONE, PUSH_1 , INLINE_NONE , FLOW_NEXT) - - /** - * Loads the argument at index 2 onto the evaluation stack. - */ - final val Ldarg_2 = new OpCode() - opcode(Ldarg_2, CEE_LDARG_2 , "ldarg.2" , 0xFFFFFF04, POP_NONE, PUSH_1 , INLINE_NONE , FLOW_NEXT) - - /** - * Loads the argument at index 3 onto the evaluation stack. - */ - final val Ldarg_3 = new OpCode() - opcode(Ldarg_3, CEE_LDARG_3 , "ldarg.3" , 0xFFFFFF05, POP_NONE, PUSH_1 , INLINE_NONE , FLOW_NEXT) - - /** - * Loads the local variable at index 0 onto the evaluation stack. - */ - final val Ldloc_0 = new OpCode() - opcode(Ldloc_0, CEE_LDLOC_0 , "ldloc.0" , 0xFFFFFF06, POP_NONE, PUSH_1 , INLINE_NONE , FLOW_NEXT) - - /** - * Loads the local variable at index 1 onto the evaluation stack. - */ - final val Ldloc_1 = new OpCode() - opcode(Ldloc_1, CEE_LDLOC_1 , "ldloc.1" , 0xFFFFFF07, POP_NONE, PUSH_1 , INLINE_NONE , FLOW_NEXT) - - /** - * Loads the local variable at index 2 onto the evaluation stack. - */ - final val Ldloc_2 = new OpCode() - opcode(Ldloc_2, CEE_LDLOC_2 , "ldloc.2" , 0xFFFFFF08, POP_NONE, PUSH_1 , INLINE_NONE , FLOW_NEXT) - - /** - * Loads the local variable at index 3 onto the evaluation stack. - */ - final val Ldloc_3 = new OpCode() - opcode(Ldloc_3, CEE_LDLOC_3 , "ldloc.3" , 0xFFFFFF09, POP_NONE, PUSH_1 , INLINE_NONE , FLOW_NEXT) - - /** - * Pops the current value from the top of the evaluation stack and - * stores it in a the local variable list at index 0. - */ - final val Stloc_0 = new OpCode() - opcode(Stloc_0, CEE_STLOC_0 , "stloc.0" , 0xFFFFFF0A, POP_1 , PUSH_NONE, INLINE_NONE , FLOW_NEXT) - - /** - * Pops the current value from the top of the evaluation stack and - * stores it in a the local variable list at index 1. - */ - final val Stloc_1 = new OpCode() - opcode(Stloc_1, CEE_STLOC_1 , "stloc.1" , 0xFFFFFF0B, POP_1 , PUSH_NONE, INLINE_NONE , FLOW_NEXT) - - /** - * Pops the current value from the top of the evaluation stack and - * stores it in a the local variable list at index 2. - */ - final val Stloc_2 = new OpCode() - opcode(Stloc_2, CEE_STLOC_2 , "stloc.2" , 0xFFFFFF0C, POP_1 , PUSH_NONE, INLINE_NONE , FLOW_NEXT) - - /** - * Pops the current value from the top of the evaluation stack and - * stores it in a the local variable list at index 3. - */ - final val Stloc_3 = new OpCode() - opcode(Stloc_3, CEE_STLOC_3 , "stloc.3" , 0xFFFFFF0D, POP_1 , PUSH_NONE, INLINE_NONE , FLOW_NEXT) - - /** - * Loads the argument (referenced by a specified short form index) - * onto the evaluation stack. - */ - final val Ldarg_S = new OpCode() - opcode(Ldarg_S, CEE_LDARG_S , "ldarg.s" , 0xFFFFFF0E, POP_NONE, PUSH_1 , INLINE_VARIABLE_S, FLOW_NEXT) - - /** - * Load an argument address, in short form, onto the evaluation stack. - */ - final val Ldarga_S = new OpCode() - opcode(Ldarga_S, CEE_LDARGA_S , "ldarga.s" , 0xFFFFFF0F, POP_NONE, PUSH_I , INLINE_VARIABLE_S, FLOW_NEXT) - - /** - * Loads the local variable at a specific index onto the evaluation stack, - * short form. - */ - final val Ldloc_S = new OpCode() - opcode(Ldloc_S, CEE_LDLOC_S , "ldloc.s" , 0xFFFFFF11, POP_NONE, PUSH_1 , INLINE_VARIABLE_S, FLOW_NEXT) - - /** - * Loads the address of the local variable at a specific index onto - * the evaluation stack, short form. - */ - final val Ldloca_S = new OpCode() - opcode(Ldloca_S, CEE_LDLOCA_S , "ldloca.s" , 0xFFFFFF12, POP_NONE, PUSH_I , INLINE_VARIABLE_S, FLOW_NEXT) - - /** - * Stores the value on top of the evaluation stack in the argument slot - * at a specified index, short form. - */ - final val Starg_S = new OpCode() - opcode(Starg_S, CEE_STARG_S , "starg.s" , 0xFFFFFF10, POP_1 , PUSH_NONE , INLINE_VARIABLE_S, FLOW_NEXT) - - /** - * Pops the current value from the top of the evaluation stack and stores it - * in a the local variable list at index (short form). - */ - final val Stloc_S = new OpCode() - opcode(Stloc_S, CEE_STLOC_S , "stloc.s" , 0xFFFFFF13, POP_1 , PUSH_NONE, INLINE_VARIABLE_S, FLOW_NEXT) - - /** - * Pushes a null reference (type O) onto the evaluation stack. - */ - final val Ldnull = new OpCode() - opcode(Ldnull, CEE_LDNULL , "ldnull" , 0xFFFFFF14, POP_NONE, PUSH_REF , INLINE_NONE, FLOW_NEXT) - - /** - * Pushes the integer value of -1 onto the evaluation stack as an int32. - */ - final val Ldc_I4_M1 = new OpCode() - opcode(Ldc_I4_M1, CEE_LDC_I4_M1, "ldc.i4.m1", 0xFFFFFF15, POP_NONE, PUSH_I, INLINE_NONE, FLOW_NEXT) - - /** - * Pushes the integer value of 0 onto the evaluation stack as an int32. - */ - final val Ldc_I4_0 = new OpCode() - opcode(Ldc_I4_0, CEE_LDC_I4_0 , "ldc.i4.0" , 0xFFFFFF16, POP_NONE, PUSH_I, INLINE_NONE, FLOW_NEXT) - - /** - * Pushes the integer value of 1 onto the evaluation stack as an int32. - */ - final val Ldc_I4_1 = new OpCode() - opcode(Ldc_I4_1, CEE_LDC_I4_1 , "ldc.i4.1" , 0xFFFFFF17, POP_NONE, PUSH_I, INLINE_NONE, FLOW_NEXT) - - /** - * Pushes the integer value of 2 onto the evaluation stack as an int32. - */ - final val Ldc_I4_2 = new OpCode() - opcode(Ldc_I4_2, CEE_LDC_I4_2 , "ldc.i4.2" , 0xFFFFFF18, POP_NONE, PUSH_I, INLINE_NONE, FLOW_NEXT) - - /** - * Pushes the integer value of 3 onto the evaluation stack as an int32. - */ - final val Ldc_I4_3 = new OpCode() - opcode(Ldc_I4_3, CEE_LDC_I4_3 , "ldc.i4.3" , 0xFFFFFF19, POP_NONE, PUSH_I, INLINE_NONE, FLOW_NEXT) - - /** - * Pushes the integer value of 4 onto the evaluation stack as an int32. - */ - final val Ldc_I4_4 = new OpCode() - opcode(Ldc_I4_4, CEE_LDC_I4_4 , "ldc.i4.4" , 0xFFFFFF1A, POP_NONE, PUSH_I, INLINE_NONE, FLOW_NEXT) - - /** - * Pushes the integer value of 5 onto the evaluation stack as an int32. - */ - final val Ldc_I4_5 = new OpCode() - opcode(Ldc_I4_5, CEE_LDC_I4_5 , "ldc.i4.5" , 0xFFFFFF1B, POP_NONE, PUSH_I, INLINE_NONE, FLOW_NEXT) - - /** - * Pushes the integer value of 6 onto the evaluation stack as an int32. - */ - final val Ldc_I4_6 = new OpCode() - opcode(Ldc_I4_6, CEE_LDC_I4_6 , "ldc.i4.6", 0xFFFFFF1C, POP_NONE, PUSH_I, INLINE_NONE, FLOW_NEXT) - - /** - * Pushes the integer value of 7 onto the evaluation stack as an int32. - */ - final val Ldc_I4_7 = new OpCode() - opcode(Ldc_I4_7, CEE_LDC_I4_7 , "ldc.i4.7", 0xFFFFFF1D, POP_NONE , PUSH_I, INLINE_NONE, FLOW_NEXT) - - /** - * Pushes the integer value of 8 onto the evaluation stack as an int32. - */ - final val Ldc_I4_8 = new OpCode() - opcode(Ldc_I4_8, CEE_LDC_I4_8 , "ldc.i4.8", 0xFFFFFF1E, POP_NONE , PUSH_I, INLINE_NONE, FLOW_NEXT) - - /** - * Pushes the supplied int8 value onto the evaluation stack as an int32, short form. - */ - final val Ldc_I4_S = new OpCode() - opcode(Ldc_I4_S, CEE_LDC_I4_S , "ldc.i4.s", 0xFFFFFF1F, POP_NONE , PUSH_I, INLINE_I_S, FLOW_NEXT) - - /** - * Pushes a supplied value of type int32 onto the evaluation stack as an int32. - */ - final val Ldc_I4 = new OpCode() - opcode(Ldc_I4, CEE_LDC_I4, "ldc.i4" , 0xFFFFFF20, POP_NONE , PUSH_I, INLINE_I , FLOW_NEXT) - - /** - * Pushes a supplied value of type int64 onto the evaluation stack as an int64. - */ - final val Ldc_I8 = new OpCode() - opcode(Ldc_I8, CEE_LDC_I8, "ldc.i8" , 0xFFFFFF21, POP_NONE , PUSH_I8, INLINE_I8 , FLOW_NEXT) - - /** - * Pushes a supplied value of type float32 onto the evaluation stack as type F (float). - */ - final val Ldc_R4 = new OpCode() - opcode(Ldc_R4, CEE_LDC_R4, "ldc.r4" , 0xFFFFFF22, POP_NONE , PUSH_R4, INLINE_R , FLOW_NEXT) - - /** - * Pushes a supplied value of type float64 onto the evaluation stack as type F (float). - */ - final val Ldc_R8 = new OpCode() - opcode(Ldc_R8, CEE_LDC_R8, "ldc.r8" , 0xFFFFFF23, POP_NONE , PUSH_R8, INLINE_R8 , FLOW_NEXT) - - /** - * Copies the current topmost value on the evaluation stack, and then pushes the copy - * onto the evaluation stack. - */ - final val Dup = new OpCode() - opcode(Dup, CEE_DUP , "dup" , 0xFFFFFF25, POP_1 , PUSH_1_1 , INLINE_NONE , FLOW_NEXT) - - /** - * Removes the value currently on top of the evaluation stack. - */ - final val Pop = new OpCode() - opcode(Pop, CEE_POP , "pop" , 0xFFFFFF26, POP_1 , PUSH_NONE , INLINE_NONE , FLOW_NEXT) - - /** - * Exits current method and jumps to specified method. - */ - final val Jmp = new OpCode() - opcode(Jmp, CEE_JMP , "jmp" , 0xFFFFFF27, POP_NONE , PUSH_NONE , INLINE_METHOD, FLOW_CALL) - - /** - * Calls the method indicated by the passed method descriptor. - */ - final val Call = new OpCode() - opcode(Call, CEE_CALL , "call" , 0xFFFFFF28, POP_SPECIAL, PUSH_SPECIAL, INLINE_METHOD , FLOW_CALL) - - /** - * constrained prefix - */ - final val Constrained = new OpCode() -opcode(Constrained, CEE_CONSTRAINED , "constrained." , 0xFFFFFE16, POP_NONE, PUSH_NONE, INLINE_NONE , FLOW_NEXT) - - /** - * readonly prefix - */ - final val Readonly = new OpCode() -opcode(Readonly, CEE_READONLY , "readonly." , 0xFFFFFE1E, POP_NONE, PUSH_NONE, INLINE_NONE , FLOW_NEXT) - - /** - * Calls the method indicated on the evaluation stack (as a pointer to an entry point) - * with arguments described by a calling convention. - */ - final val Calli = new OpCode() - opcode(Calli, CEE_CALLI, "calli" , 0xFFFFFF29, POP_SPECIAL, PUSH_SPECIAL, INLINE_SIGNATURE , FLOW_CALL) - - /** - * Returns from the current method, pushing a return value (if present) from the caller's - * evaluation stack onto the callee's evaluation stack. - */ - final val Ret = new OpCode() - opcode(Ret, CEE_RET , "ret" , 0xFFFFFF2A, POP_SPECIAL, PUSH_NONE, INLINE_NONE , FLOW_RETURN) - - /** - * Unconditionally transfers control to a target instruction (short form). - */ - final val Br_S = new OpCode() - opcode(Br_S, CEE_BR_S , "br.s" , 0xFFFFFF2B, POP_NONE, PUSH_NONE, INLINE_TARGET_S , FLOW_BRANCH) - - /** - * Transfers control to a target instruction if value is false, a null reference, or zero. - */ - final val Brfalse_S = new OpCode() - opcode(Brfalse_S, CEE_BRFALSE_S,"brfalse.s", 0xFFFFFF2C, POP_I, PUSH_NONE, INLINE_TARGET_S, FLOW_COND_BRANCH) - - /** - * Transfers control to a target instruction (short form) if value is true, not null, or non-zero. - */ - final val Brtrue_S = new OpCode() - opcode(Brtrue_S, CEE_BRTRUE_S , "brtrue.s", 0xFFFFFF2D, POP_I, PUSH_NONE, INLINE_TARGET_S, FLOW_COND_BRANCH) - - /** - * Transfers control to a target instruction (short form) if two values are equal. - */ - final val Beq_S = new OpCode() - opcode(Beq_S, CEE_BEQ_S, "beq.s", 0xFFFFFF2E, POP_1_1 , PUSH_NONE, INLINE_TARGET_S , FLOW_COND_BRANCH) - - /** - * Transfers control to a target instruction (short form) if the first value is greater than - * or equal to the second value. - */ - final val Bge_S = new OpCode() - opcode(Bge_S, CEE_BGE_S, "bge.s", 0xFFFFFF2F, POP_1_1 , PUSH_NONE, INLINE_TARGET_S, FLOW_COND_BRANCH) - - /** - * Transfers control to a target instruction (short form) if the first value is greater than - * the second value. - */ - final val Bgt_S = new OpCode() - opcode(Bgt_S, CEE_BGT_S, "bgt.s" , 0xFFFFFF30, POP_1_1 , PUSH_NONE, INLINE_TARGET_S , FLOW_COND_BRANCH) - - /** - * Transfers control to a target instruction (short form) if the first value is less than - * or equal to the second value. - */ - final val Ble_S = new OpCode() - opcode(Ble_S, CEE_BLE_S, "ble.s" , 0xFFFFFF31, POP_1_1 , PUSH_NONE, INLINE_TARGET_S , FLOW_COND_BRANCH) - - /** - * Transfers control to a target instruction (short form) if the first value is less than - * the second value. - */ - final val Blt_S = new OpCode() - opcode(Blt_S, CEE_BLT_S, "blt.s", 0xFFFFFF32, POP_1_1, PUSH_NONE, INLINE_TARGET_S, FLOW_COND_BRANCH) - - /** - * Transfers control to a target instruction (short form) when two unsigned integer values - * or unordered float values are not equal. - */ - final val Bne_Un_S = new OpCode() - opcode(Bne_Un_S, CEE_BNE_UN_S, "bne.un.s", 0xFFFFFF33, POP_1_1 , PUSH_NONE, INLINE_TARGET_S, FLOW_COND_BRANCH) - - /** - * Transfers control to a target instruction (short form) if the first value is greather - * than the second value, when comparing unsigned integer values or unordered float values. - */ - final val Bge_Un_S = new OpCode() - opcode(Bge_Un_S, CEE_BGE_UN_S, "bge.un.s", 0xFFFFFF34, POP_1_1, PUSH_NONE, INLINE_TARGET_S, FLOW_COND_BRANCH) - - /** - * Transfers control to a target instruction (short form) if the first value is greater than - * the second value, when comparing unsigned integer values or unordered float values. - */ - final val Bgt_Un_S = new OpCode() - opcode(Bgt_Un_S, CEE_BGT_UN_S, "bgt.un.s", 0xFFFFFF35, POP_1_1, PUSH_NONE, INLINE_TARGET_S, FLOW_COND_BRANCH) - - /** - * Transfers control to a target instruction (short form) if the first value is less than - * or equal to the second value, when comparing unsigned integer values or unordered float values. - */ - final val Ble_Un_S = new OpCode() - opcode(Ble_Un_S, CEE_BLE_UN_S , "ble.un.s", 0xFFFFFF36, POP_1_1, PUSH_NONE, INLINE_TARGET_S, FLOW_COND_BRANCH) - - /** - * Transfers control to a target instruction (short form) if the first value is less than - * the second value, when comparing unsigned integer values or unordered float values. - */ - final val Blt_Un_S = new OpCode() - opcode(Blt_Un_S, CEE_BLT_UN_S, "blt.un.s", 0xFFFFFF37, POP_1_1, PUSH_NONE, INLINE_TARGET_S, FLOW_COND_BRANCH) - - /** - * Unconditionally transfers control to a target instruction. - */ - final val Br = new OpCode() - opcode(Br, CEE_BR , "br" , 0xFFFFFF38, POP_NONE, PUSH_NONE, INLINE_TARGET, FLOW_BRANCH) - - /** - * Transfers control to a target instruction if value is false, a null reference - * (Nothing in Visual Basic), or zero. - */ - final val Brfalse = new OpCode() - opcode(Brfalse, CEE_BRFALSE, "brfalse", 0xFFFFFF39, POP_I, PUSH_NONE, INLINE_TARGET, FLOW_COND_BRANCH) - - /** - * Transfers control to a target instruction if value is true, not null, or non-zero. - */ - final val Brtrue = new OpCode() - opcode(Brtrue, CEE_BRTRUE , "brtrue", 0xFFFFFF3A, POP_I , PUSH_NONE, INLINE_TARGET, FLOW_COND_BRANCH) - - /** - * Transfers control to a target instruction if two values are equal. - */ - final val Beq = new OpCode() - opcode(Beq, CEE_BEQ, "beq", 0xFFFFFF3B, POP_1_1 , PUSH_NONE, INLINE_TARGET, FLOW_COND_BRANCH) - - /** - * Transfers control to a target instruction if the first value is greater than or - * equal to the second value. - */ - final val Bge = new OpCode() - opcode(Bge, CEE_BGE, "bge", 0xFFFFFF3C, POP_1_1 , PUSH_NONE, INLINE_TARGET, FLOW_COND_BRANCH) - - /** - * Transfers control to a target instruction if the first value is greater than the second value. - */ - final val Bgt = new OpCode() - opcode(Bgt, CEE_BGT, "bgt", 0xFFFFFF3D, POP_1_1 , PUSH_NONE, INLINE_TARGET, FLOW_COND_BRANCH) - - /** - * Transfers control to a target instruction if the first value is less than or equal - * to the second value. - */ - final val Ble = new OpCode() - opcode(Ble, CEE_BLE, "ble", 0xFFFFFF3E, POP_1_1 , PUSH_NONE, INLINE_TARGET, FLOW_COND_BRANCH) - - /** - * Transfers control to a target instruction if the first value is less than the second value. - */ - final val Blt = new OpCode() - opcode(Blt, CEE_BLT, "blt", 0xFFFFFF3F, POP_1_1 , PUSH_NONE, INLINE_TARGET, FLOW_COND_BRANCH) - - /** - * Transfers control to a target instruction when two unsigned integer values or - * unordered float values are not equal. - */ - final val Bne_Un = new OpCode() - opcode(Bne_Un, CEE_BNE_UN , "bne.un", 0xFFFFFF40, POP_1_1 , PUSH_NONE, INLINE_TARGET, FLOW_COND_BRANCH) - - /** - * Transfers control to a target instruction if the first value is greather than - * the second value, when comparing unsigned integer values or unordered float values. - */ - final val Bge_Un = new OpCode() - opcode(Bge_Un, CEE_BGE_UN , "bge.un", 0xFFFFFF41, POP_1_1 , PUSH_NONE, INLINE_TARGET, FLOW_COND_BRANCH) - - /** - * Transfers control to a target instruction if the first value is greater than the - * second value, when comparing unsigned integer values or unordered float values. - */ - final val Bgt_Un = new OpCode() - opcode(Bgt_Un, CEE_BGT_UN , "bgt.un", 0xFFFFFF42, POP_1_1 , PUSH_NONE, INLINE_TARGET, FLOW_COND_BRANCH) - - /** - * Transfers control to a target instruction if the first value is less than or equal to - * the second value, when comparing unsigned integer values or unordered float values. - */ - final val Ble_Un = new OpCode() - opcode(Ble_Un, CEE_BLE_UN , "ble.un" , 0xFFFFFF43, POP_1_1 , PUSH_NONE, INLINE_TARGET, FLOW_COND_BRANCH) - - /** - * Transfers control to a target instruction if the first value is less than the second value, - * when comparing unsigned integer values or unordered float values. - */ - final val Blt_Un = new OpCode() - opcode(Blt_Un, CEE_BLT_UN , "blt.un", 0xFFFFFF44, POP_1_1 , PUSH_NONE, INLINE_TARGET, FLOW_COND_BRANCH) - - /** - * Implements a jump table. - */ - final val Switch = new OpCode() - opcode(Switch, CEE_SWITCH , "switch", 0xFFFFFF45, POP_I , PUSH_NONE, INLINE_SWITCH, FLOW_COND_BRANCH) - - /** - * Loads a value of type int8 as an int32 onto the evaluation stack indirectly. - */ - final val Ldind_I1 = new OpCode() - opcode(Ldind_I1, CEE_LDIND_I1 , "ldind.i1" , 0xFFFFFF46, POP_I , PUSH_I , INLINE_NONE, FLOW_NEXT) - - /** - * Loads a value of type int16 as an int32 onto the evaluation stack indirectly. - */ - final val Ldind_I2 = new OpCode() - opcode(Ldind_I2, CEE_LDIND_I2 , "ldind.i2" , 0xFFFFFF48, POP_I , PUSH_I , INLINE_NONE, FLOW_NEXT) - - /** - * Loads a value of type int32 as an int32 onto the evaluation stack indirectly. - */ - final val Ldind_I4 = new OpCode() - opcode(Ldind_I4, CEE_LDIND_I4 , "ldind.i4" , 0xFFFFFF4A, POP_I , PUSH_I , INLINE_NONE, FLOW_NEXT) - - /** - * Loads a value of type int64 as an int64 onto the evaluation stack indirectly. - */ - final val Ldind_I8 = new OpCode() - opcode(Ldind_I8, CEE_LDIND_I8 , "ldind.i8" , 0xFFFFFF4C, POP_I , PUSH_I8 , INLINE_NONE, FLOW_NEXT) - - /** - * Loads a value of type natural int as a natural int onto the evaluation stack indirectly. - */ - final val Ldind_I = new OpCode() - opcode(Ldind_I, CEE_LDIND_I , "ldind.i" , 0xFFFFFF4D, POP_I , PUSH_I , INLINE_NONE, FLOW_NEXT) - - /** - * Loads a value of type float32 as a type F (float) onto the evaluation stack indirectly. - */ - final val Ldind_R4 = new OpCode() - opcode(Ldind_R4, CEE_LDIND_R4 , "ldind.r4" , 0xFFFFFF4E, POP_I , PUSH_R4 , INLINE_NONE, FLOW_NEXT) - - /** - * Loads a value of type float64 as a type F (float) onto the evaluation stack indirectly. - */ - final val Ldind_R8 = new OpCode() - opcode(Ldind_R8, CEE_LDIND_R8 , "ldind.r8" , 0xFFFFFF4F, POP_I , PUSH_R8 , INLINE_NONE, FLOW_NEXT) - - /** - * Loads an object reference as a type O (object reference) onto the evaluation stack indirectly. - */ - final val Ldind_Ref = new OpCode() - opcode(Ldind_Ref, CEE_LDIND_REF, "ldind.ref", 0xFFFFFF50, POP_I , PUSH_REF, INLINE_NONE, FLOW_NEXT) - - /** - * Loads a value of type unsigned int8 as an int32 onto the evaluation stack indirectly. - */ - final val Ldind_U1 = new OpCode() - opcode(Ldind_U1, CEE_LDIND_U1 , "ldind.u1" , 0xFFFFFF47, POP_I , PUSH_I , INLINE_NONE, FLOW_NEXT) - - /** - * Loads a value of type unsigned int16 as an int32 onto the evaluation stack indirectly. - */ - final val Ldind_U2 = new OpCode() - opcode(Ldind_U2, CEE_LDIND_U2 , "ldind.u2" , 0xFFFFFF49, POP_I , PUSH_I , INLINE_NONE, FLOW_NEXT) - - /** - * Loads a value of type unsigned int32 as an int32 onto the evaluation stack indirectly. - */ - final val Ldind_U4 = new OpCode() - opcode(Ldind_U4, CEE_LDIND_U4 , "ldind.u4" , 0xFFFFFF4B, POP_I , PUSH_I , INLINE_NONE, FLOW_NEXT) - - /** - * Stores a object reference value at a supplied address. - */ - final val Stind_Ref = new OpCode() - opcode(Stind_Ref, CEE_STIND_REF, "stind.ref", 0xFFFFFF51, POP_I_I , PUSH_NONE, INLINE_NONE, FLOW_NEXT) - - /** - * Stores a value of type int8 at a supplied address. - */ - final val Stind_I1 = new OpCode() - opcode(Stind_I1, CEE_STIND_I1 , "stind.i1", 0xFFFFFF52, POP_I_I , PUSH_NONE, INLINE_NONE, FLOW_NEXT) - - /** - * Stores a value of type int16 at a supplied address. - */ - final val Stind_I2 = new OpCode() - opcode(Stind_I2, CEE_STIND_I2 , "stind.i2", 0xFFFFFF53, POP_I_I , PUSH_NONE, INLINE_NONE, FLOW_NEXT) - - /** - * Stores a value of type int32 at a supplied address. - */ - final val Stind_I4 = new OpCode() - opcode(Stind_I4, CEE_STIND_I4 , "stind.i4", 0xFFFFFF54, POP_I_I , PUSH_NONE, INLINE_NONE, FLOW_NEXT) - - /** - * Stores a value of type int64 at a supplied address. - */ - final val Stind_I8 = new OpCode() - opcode(Stind_I8, CEE_STIND_I8 , "stind.i8", 0xFFFFFF55, POP_I_I8, PUSH_NONE, INLINE_NONE, FLOW_NEXT) - - /** - * Stores a value of type float32 at a supplied address. - */ - final val Stind_R4 = new OpCode() - opcode(Stind_R4, CEE_STIND_R4 , "stind.r4", 0xFFFFFF56, POP_I_R4, PUSH_NONE, INLINE_NONE, FLOW_NEXT) - - /** - * Stores a value of type float64 at a supplied address. - */ - final val Stind_R8 = new OpCode() - opcode(Stind_R8, CEE_STIND_R8 , "stind.r8", 0xFFFFFF57, POP_I_R8, PUSH_NONE, INLINE_NONE, FLOW_NEXT) - - /** - * Subtracts one value from another and pushes the result onto the evaluation stack. - */ - final val Sub = new OpCode() - opcode(Sub, CEE_SUB, "sub" , 0xFFFFFF59, POP_1_1, PUSH_1 , INLINE_NONE, FLOW_NEXT) - - /** - * Multiplies two values and pushes the result on the evaluation stack. - */ - final val Mul = new OpCode() - opcode(Mul, CEE_MUL, "mul" , 0xFFFFFF5A, POP_1_1, PUSH_1 , INLINE_NONE, FLOW_NEXT) - - /** - * Divides two values and pushes the result as a floating-point (type F) or - * quotient (type int32) onto the evaluation stack. - */ - final val Div = new OpCode() - opcode(Div, CEE_DIV, "div" , 0xFFFFFF5B, POP_1_1, PUSH_1 , INLINE_NONE, FLOW_NEXT) - - /** - * Divides two unsigned integer values and pushes the result (int32) onto the evaluation stack. - */ - final val Div_Un = new OpCode() - opcode(Div_Un, CEE_DIV_UN, "div.un" , 0xFFFFFF5C, POP_1_1, PUSH_1 , INLINE_NONE, FLOW_NEXT) - - /** - * Divides two values and pushes the remainder onto the evaluation stack. - */ - final val Rem = new OpCode() - opcode(Rem, CEE_REM , "rem" , 0xFFFFFF5D, POP_1_1, PUSH_1 , INLINE_NONE, FLOW_NEXT) - - /** - * Divides two unsigned values and pushes the remainder onto the evaluation stack. - */ - final val Rem_Un = new OpCode() - opcode(Rem_Un, CEE_REM_UN, "rem.un" , 0xFFFFFF5E, POP_1_1, PUSH_1 , INLINE_NONE, FLOW_NEXT) - - /** - * Computes the bitwise AND of two values and pushes the result onto the evaluation stack. - */ - final val And = new OpCode() - opcode(And, CEE_AND, "and" , 0xFFFFFF5F, POP_1_1, PUSH_1 , INLINE_NONE, FLOW_NEXT) - - /** - * Compute the bitwise complement of the two integer values on top of the stack and - * pushes the result onto the evaluation stack. - */ - final val Or = new OpCode() - opcode(Or, CEE_OR , "or" , 0xFFFFFF60, POP_1_1, PUSH_1 , INLINE_NONE, FLOW_NEXT) - - /** - * Computes the bitwise XOR of the top two values on the evaluation stack, - * pushing the result onto the evaluation stack. - */ - final val Xor = new OpCode() - opcode(Xor, CEE_XOR, "xor" , 0xFFFFFF61, POP_1_1, PUSH_1 , INLINE_NONE, FLOW_NEXT) - - /** - * Shifts an integer value to the left (in zeroes) by a specified number of bits, - * pushing the result onto the evaluation stack. - */ - final val Shl = new OpCode() - opcode(Shl, CEE_SHL, "shl" , 0xFFFFFF62, POP_1_1, PUSH_1 , INLINE_NONE, FLOW_NEXT) - - /** - * Shifts an integer value (in sign) to the right by a specified number of bits, - * pushing the result onto the evaluation stack. - */ - final val Shr = new OpCode() - opcode(Shr, CEE_SHR, "shr" , 0xFFFFFF63, POP_1_1, PUSH_1 , INLINE_NONE, FLOW_NEXT) - - /** - * Shifts an unsigned integer value (in zeroes) to the right by a specified number of bits, - * pushing the result onto the evaluation stack. - */ - final val Shr_Un = new OpCode() - opcode(Shr_Un, CEE_SHR_UN, "shr.un" , 0xFFFFFF64, POP_1_1, PUSH_1 , INLINE_NONE, FLOW_NEXT) - - /** - * Negates a value and pushes the result onto the evaluation stack. - */ - final val Neg = new OpCode() - opcode(Neg, CEE_NEG , "neg" , 0xFFFFFF65, POP_1 , PUSH_1 , INLINE_NONE, FLOW_NEXT) - - /** - * Computes the bitwise complement of the integer value on top of the stack and pushes - * the result onto the evaluation stack as the same type. - */ - final val Not = new OpCode() - opcode(Not, CEE_NOT , "not" , 0xFFFFFF66, POP_1 , PUSH_1 , INLINE_NONE, FLOW_NEXT) - - /** - * Converts the value on top of the evaluation stack to int8, then extends (pads) it to int32. - */ - final val Conv_I1 = new OpCode() - opcode(Conv_I1, CEE_CONV_I1, "conv.i1", 0xFFFFFF67, POP_1 , PUSH_I , INLINE_NONE, FLOW_NEXT) - - /** - * Converts the value on top of the evaluation stack to int16, then extends (pads) it to int32. - */ - final val Conv_I2 = new OpCode() - opcode(Conv_I2, CEE_CONV_I2, "conv.i2", 0xFFFFFF68, POP_1 , PUSH_I , INLINE_NONE, FLOW_NEXT) - - /** - * Converts the value on top of the evaluation stack to int32. - */ - final val Conv_I4 = new OpCode() - opcode(Conv_I4, CEE_CONV_I4, "conv.i4", 0xFFFFFF69, POP_1 , PUSH_I , INLINE_NONE, FLOW_NEXT) - - /** - * Converts the value on top of the evaluation stack to int64. - */ - final val Conv_I8 = new OpCode() - opcode(Conv_I8, CEE_CONV_I8, "conv.i8", 0xFFFFFF6A, POP_1 , PUSH_I8, INLINE_NONE, FLOW_NEXT) - - /** - * Converts the value on top of the evaluation stack to float32. - */ - final val Conv_R4 = new OpCode() - opcode(Conv_R4, CEE_CONV_R4, "conv.r4", 0xFFFFFF6B, POP_1 , PUSH_R4, INLINE_NONE, FLOW_NEXT) - - /** - * Converts the value on top of the evaluation stack to float64. - */ - final val Conv_R8 = new OpCode() - opcode(Conv_R8, CEE_CONV_R8, "conv.r8", 0xFFFFFF6C, POP_1 , PUSH_R8, INLINE_NONE, FLOW_NEXT) - - /** - * Converts the value on top of the evaluation stack to unsigned int32, and extends it to int32. - */ - final val Conv_U4 = new OpCode() - opcode(Conv_U4, CEE_CONV_U4, "conv.u4", 0xFFFFFF6D, POP_1 , PUSH_I , INLINE_NONE, FLOW_NEXT) - - /** - * Converts the value on top of the evaluation stack to unsigned int64, and extends it to int64. - */ - final val Conv_U8 = new OpCode() - opcode(Conv_U8, CEE_CONV_U8, "conv.u8", 0xFFFFFF6E, POP_1 , PUSH_I8, INLINE_NONE, FLOW_NEXT) - - /** - * Calls a late-bound method on an object, pushing the return value onto the evaluation stack. - */ - final val Callvirt = new OpCode() - opcode(Callvirt, CEE_CALLVIRT, "callvirt", 0xFFFFFF6F,POP_SPECIAL,PUSH_SPECIAL,INLINE_METHOD,FLOW_CALL) - - /** - * Copies the value type located at the address of an object (type &, * or natural int) - * to the address of the destination object (type &, * or natural int). - */ - final val Cpobj = new OpCode() - opcode(Cpobj, CEE_CPOBJ , "cpobj" , 0xFFFFFF70, POP_I_I , PUSH_NONE, INLINE_TYPE , FLOW_NEXT) - - /** - * Copies the value type object pointed to by an address to the top of the evaluation stack. - */ - final val Ldobj = new OpCode() - opcode(Ldobj, CEE_LDOBJ , "ldobj" , 0xFFFFFF71, POP_I , PUSH_1 , INLINE_TYPE , FLOW_NEXT) - - /** - * Pushes a new object reference to a string literal stored in the metadata. - */ - final val Ldstr = new OpCode() - opcode(Ldstr, CEE_LDSTR , "ldstr" , 0xFFFFFF72, POP_NONE , PUSH_REF , INLINE_STRING, FLOW_NEXT) - - /** - * Creates a new object or a new instance of a value type, pushing an object reference - * (type O) onto the evaluation stack. - */ - final val Newobj = new OpCode() - opcode(Newobj, CEE_NEWOBJ, "newobj", 0xFFFFFF73, POP_SPECIAL , PUSH_REF , INLINE_METHOD, FLOW_CALL) - - /** - * Attempts to cast an object passed by reference to the specified class. - */ - final val Castclass = new OpCode() - opcode(Castclass, CEE_CASTCLASS, "castclass", 0xFFFFFF74, POP_REF , PUSH_REF , INLINE_TYPE , FLOW_NEXT) - - /** - * Tests whether an object reference (type O) is an instance of a particular class. - */ - final val Isinst = new OpCode() - opcode(Isinst, CEE_ISINST , "isinst" , 0xFFFFFF75, POP_REF , PUSH_I , INLINE_TYPE , FLOW_NEXT) - - /** - * Converts the unsigned integer value on top of the evaluation stack to float32. - */ - final val Conv_R_Un = new OpCode() - opcode(Conv_R_Un, CEE_CONV_R_UN, "conv.r.un", 0xFFFFFF76, POP_1 , PUSH_R8 , INLINE_NONE , FLOW_NEXT) - - /** - * Converts the boxed representation of a value type to its unboxed form. - */ - final val Unbox = new OpCode() - opcode(Unbox, CEE_UNBOX , "unbox" , 0xFFFFFF79, POP_REF , PUSH_I , INLINE_TYPE , FLOW_NEXT) - - /** - * Throws the exception object currently on the evaluation stack. - */ - final val Throw = new OpCode() - opcode(Throw, CEE_THROW , "throw" , 0xFFFFFF7A, POP_REF , PUSH_NONE, INLINE_NONE , FLOW_THROW) - - /** - * Finds the value of a field in the object whose reference is currently - * on the evaluation stack. - */ - final val Ldfld = new OpCode() - opcode(Ldfld, CEE_LDFLD , "ldfld" , 0xFFFFFF7B, POP_REF , PUSH_1 , INLINE_FIELD , FLOW_NEXT) - - /** - * Finds the address of a field in the object whose reference is currently - * on the evaluation stack. - */ - final val Ldflda = new OpCode() - opcode(Ldflda, CEE_LDFLDA , "ldflda" , 0xFFFFFF7C, POP_REF , PUSH_I , INLINE_FIELD , FLOW_NEXT) - - /** - * Pushes the value of a static field onto the evaluation stack. - */ - final val Ldsfld = new OpCode() - opcode(Ldsfld, CEE_LDSFLD , "ldsfld" , 0xFFFFFF7E, POP_NONE , PUSH_1 , INLINE_FIELD , FLOW_NEXT) - - /** - * Pushes the address of a static field onto the evaluation stack. - */ - final val Ldsflda = new OpCode() - opcode(Ldsflda, CEE_LDSFLDA, "ldsflda", 0xFFFFFF7F, POP_NONE , PUSH_I , INLINE_FIELD , FLOW_NEXT) - - /** - * Replaces the value stored in the field of an object reference or pointer with a new value. - */ - final val Stfld = new OpCode() - opcode(Stfld, CEE_STFLD , "stfld" , 0xFFFFFF7D, POP_REF_1, PUSH_NONE, INLINE_FIELD , FLOW_NEXT) - - /** - * Replaces the value of a static field with a value from the evaluation stack. - */ - final val Stsfld = new OpCode() - opcode(Stsfld, CEE_STSFLD , "stsfld" , 0xFFFFFF80, POP_1 , PUSH_NONE, INLINE_FIELD , FLOW_NEXT) - - /** - * Copies a value of a specified type from the evaluation stack into a supplied memory address. - */ - final val Stobj = new OpCode() - opcode(Stobj, CEE_STOBJ , "stobj" , 0xFFFFFF81, POP_I_1, PUSH_NONE, INLINE_TYPE , FLOW_NEXT) - - /** - * Converts the unsigned value on top of the evaluation stack to signed int8 and - * extends it to int32, throwing OverflowException on overflow. - */ - final val Conv_Ovf_I1_Un = new OpCode() - opcode(Conv_Ovf_I1_Un, CEE_CONV_OVF_I1_UN, "conv.ovf.i1.un", 0xFFFFFF82, POP_1,PUSH_I,INLINE_NONE, FLOW_NEXT) - - /** - * Converts the unsigned value on top of the evaluation stack to signed int16 and - * extends it to int32, throwing OverflowException on overflow. - */ - final val Conv_Ovf_I2_Un = new OpCode() - opcode(Conv_Ovf_I2_Un, CEE_CONV_OVF_I2_UN, "conv.ovf.i2.un", 0xFFFFFF83,POP_1,PUSH_I, INLINE_NONE, FLOW_NEXT) - - /** - * Converts the unsigned value on top of the evaluation stack to signed int32, - * throwing OverflowException on overflow. - */ - final val Conv_Ovf_I4_Un = new OpCode() - opcode(Conv_Ovf_I4_Un, CEE_CONV_OVF_I4_UN, "conv.ovf.i4.un", 0xFFFFFF84,POP_1,PUSH_I, INLINE_NONE, FLOW_NEXT) - - /** - * Converts the unsigned value on top of the evaluation stack to signed int64, - * throwing OverflowException on overflow. - */ - final val Conv_Ovf_I8_Un = new OpCode() - opcode(Conv_Ovf_I8_Un, CEE_CONV_OVF_I8_UN, "conv.ovf.i8.un", 0xFFFFFF85,POP_1,PUSH_I8, INLINE_NONE, FLOW_NEXT) - - /** - * Converts the unsigned value on top of the evaluation stack to signed natural int, - * throwing OverflowException on overflow. - */ - final val Conv_Ovf_I_Un = new OpCode() - opcode(Conv_Ovf_I_Un, CEE_CONV_OVF_I_UN , "conv.ovf.i.un" , 0xFFFFFF8A,POP_1,PUSH_I, INLINE_NONE, FLOW_NEXT) - - /** - * Converts the unsigned value on top of the evaluation stack to unsigned int8 and - * extends it to int32, throwing OverflowException on overflow. - */ - final val Conv_Ovf_U1_Un = new OpCode() - opcode(Conv_Ovf_U1_Un, CEE_CONV_OVF_U1_UN, "conv.ovf.u1.un", 0xFFFFFF86,POP_1,PUSH_I, INLINE_NONE, FLOW_NEXT) - - /** - * Converts the unsigned value on top of the evaluation stack to unsigned int16 and - * extends it to int32, throwing OverflowException on overflow. - */ - final val Conv_Ovf_U2_Un = new OpCode() - opcode(Conv_Ovf_U2_Un, CEE_CONV_OVF_U2_UN, "conv.ovf.u2.un", 0xFFFFFF87,POP_1,PUSH_I, INLINE_NONE, FLOW_NEXT) - - /** - * Converts the unsigned value on top of the evaluation stack to unsigned int32, - * throwing OverflowException on overflow. - */ - final val Conv_Ovf_U4_Un = new OpCode() - opcode(Conv_Ovf_U4_Un, CEE_CONV_OVF_U4_UN, "conv.ovf.u4.un", 0xFFFFFF88,POP_1,PUSH_I, INLINE_NONE, FLOW_NEXT) - - /** - * Converts the unsigned value on top of the evaluation stack to unsigned int64, - * throwing OverflowException on overflow. - */ - final val Conv_Ovf_U8_Un = new OpCode() - opcode(Conv_Ovf_U8_Un, CEE_CONV_OVF_U8_UN, "conv.ovf.u8.un", 0xFFFFFF89,POP_1,PUSH_I8, INLINE_NONE, FLOW_NEXT) - - /** - * Converts the unsigned value on top of the evaluation stack to unsigned natural int, - * throwing OverflowException on overflow. - */ - final val Conv_Ovf_U_Un = new OpCode() - opcode(Conv_Ovf_U_Un, CEE_CONV_OVF_U_UN , "conv.ovf.u.un" , 0xFFFFFF8B,POP_1,PUSH_I, INLINE_NONE, FLOW_NEXT) - - /** - * Converts a value type to an object reference (type O). - */ - final val Box = new OpCode() - opcode(Box, CEE_BOX , "box" , 0xFFFFFF8C, POP_1 , PUSH_REF , INLINE_TYPE , FLOW_NEXT) - - /** - * Pushes an object reference to a new zero-based, one-dimensional array whose elements - * are of a specific type onto the evaluation stack. - */ - final val Newarr = new OpCode() - opcode(Newarr, CEE_NEWARR, "newarr" , 0xFFFFFF8D, POP_I , PUSH_REF , INLINE_TYPE , FLOW_NEXT) - - /** - * Pushes the number of elements of a zero-based, one-dimensional array - * onto the evaluation stack. - */ - final val Ldlen = new OpCode() - opcode(Ldlen, CEE_LDLEN, "ldlen", 0xFFFFFF8E, POP_REF, PUSH_I,INLINE_NONE , FLOW_NEXT) - - /** - * Loads the address of the array element at a specified array index onto - * the top of the evaluation stack as type & (managed pointer). - */ - final val Ldelema = new OpCode() - opcode(Ldelema, CEE_LDELEMA, "ldelema" , 0xFFFFFF8F, POP_REF_I, PUSH_I, INLINE_TYPE , FLOW_NEXT) - - /** - * Loads the element with type natural int at a specified array index onto the top - * of the evaluation stack as a natural int. - */ - final val Ldelem_I = new OpCode() - opcode(Ldelem_I, CEE_LDELEM_I, "ldelem.i" , 0xFFFFFF97, POP_REF_I, PUSH_I, INLINE_NONE , FLOW_NEXT) - - /** - * Loads the element with type int8 at a specified array index onto the top of the - * evaluation stack as an int32. - */ - final val Ldelem_I1 = new OpCode() - opcode(Ldelem_I1, CEE_LDELEM_I1, "ldelem.i1" , 0xFFFFFF90, POP_REF_I, PUSH_I, INLINE_NONE , FLOW_NEXT) - - /** - * Loads the element with type int16 at a specified array index onto the top of - * the evaluation stack as an int32. - */ - final val Ldelem_I2 = new OpCode() - opcode(Ldelem_I2, CEE_LDELEM_I2, "ldelem.i2" , 0xFFFFFF92, POP_REF_I, PUSH_I, INLINE_NONE , FLOW_NEXT) - - /** - * Loads the element with type int32 at a specified array index onto the top of the - * evaluation stack as an int32. - */ - final val Ldelem_I4 = new OpCode() - opcode(Ldelem_I4, CEE_LDELEM_I4, "ldelem.i4" , 0xFFFFFF94, POP_REF_I, PUSH_I, INLINE_NONE , FLOW_NEXT) - - /** - * Loads the element with type int64 at a specified array index onto the top of the - * evaluation stack as an int64. - */ - final val Ldelem_I8 = new OpCode() - opcode(Ldelem_I8, CEE_LDELEM_I8, "ldelem.i8" , 0xFFFFFF96, POP_REF_I, PUSH_I8, INLINE_NONE , FLOW_NEXT) - - /** - * Loads the element with type float32 at a specified array index onto the top of the - * evaluation stack as type F (float) - */ - final val Ldelem_R4 = new OpCode() - opcode(Ldelem_R4, CEE_LDELEM_R4, "ldelem.r4" , 0xFFFFFF98, POP_REF_I, PUSH_R4, INLINE_NONE , FLOW_NEXT) - - /** - * Loads the element with type float64 at a specified array index onto the top of the - * evaluation stack as type F (float) . - */ - final val Ldelem_R8 = new OpCode() - opcode(Ldelem_R8, CEE_LDELEM_R8, "ldelem.r8" , 0xFFFFFF99, POP_REF_I, PUSH_R8, INLINE_NONE , FLOW_NEXT) - - /** - * Loads the element containing an object reference at a specified array index onto - * the top of the evaluation stack as type O (object reference). - */ - final val Ldelem_Ref = new OpCode() - opcode(Ldelem_Ref, CEE_LDELEM_REF, "ldelem.ref", 0xFFFFFF9A, POP_REF_I, PUSH_REF, INLINE_NONE , FLOW_NEXT) - - /** - * Loads the element with type unsigned int8 at a specified array index onto the top - * of the evaluation stack as an int32. - */ - final val Ldelem_U1 = new OpCode() - opcode(Ldelem_U1, CEE_LDELEM_U1, "ldelem.u1" , 0xFFFFFF91, POP_REF_I, PUSH_I, INLINE_NONE , FLOW_NEXT) - - /** - * Loads the element with type unsigned int16 at a specified array index onto the top - * of the evaluation stack as an int32. - */ - final val Ldelem_U2 = new OpCode() - opcode(Ldelem_U2, CEE_LDELEM_U2, "ldelem.u2" , 0xFFFFFF93, POP_REF_I, PUSH_I, INLINE_NONE , FLOW_NEXT) - - /** - * Loads the element with type unsigned int32 at a specified array index onto the top - * of the evaluation stack as an int32. - */ - final val Ldelem_U4 = new OpCode() - opcode(Ldelem_U4, CEE_LDELEM_U4, "ldelem.u4" , 0xFFFFFF95, POP_REF_I, PUSH_I, INLINE_NONE , FLOW_NEXT) - - /** - * Replaces the array element at a given index with the natural int value on - * the evaluation stack. - */ - final val Stelem_I = new OpCode() - opcode(Stelem_I, CEE_STELEM_I, "stelem.i", 0xFFFFFF9B, POP_REF_I_I, PUSH_NONE, INLINE_NONE , FLOW_NEXT) - - /** - * Replaces the array element at a given index with the int8 value on the evaluation stack. - */ - final val Stelem_I1 = new OpCode() - opcode(Stelem_I1, CEE_STELEM_I1, "stelem.i1", 0xFFFFFF9C, POP_REF_I_I, PUSH_NONE, INLINE_NONE, FLOW_NEXT) - - /** - * Replaces the array element at a given index with the int16 value on the evaluation stack. - */ - final val Stelem_I2 = new OpCode() - opcode(Stelem_I2, CEE_STELEM_I2, "stelem.i2", 0xFFFFFF9D, POP_REF_I_I, PUSH_NONE, INLINE_NONE, FLOW_NEXT) - - /** - * Replaces the array element at a given index with the int32 value on the evaluation stack. - */ - final val Stelem_I4 = new OpCode() - opcode(Stelem_I4, CEE_STELEM_I4, "stelem.i4", 0xFFFFFF9E, POP_REF_I_I, PUSH_NONE, INLINE_NONE, FLOW_NEXT) - - /** - * Replaces the array element at a given index with the int64 value on the evaluation stack. - */ - final val Stelem_I8 = new OpCode() - opcode(Stelem_I8, CEE_STELEM_I8,"stelem.i8", 0xFFFFFF9F, POP_REF_I_I8, PUSH_NONE, INLINE_NONE, FLOW_NEXT) - - /** - * Replaces the array element at a given index with the float32 value on the evaluation stack. - */ - final val Stelem_R4 = new OpCode() - opcode(Stelem_R4, CEE_STELEM_R4,"stelem.r4", 0xFFFFFFA0, POP_REF_I_R4, PUSH_NONE, INLINE_NONE, FLOW_NEXT) - - /** - * Replaces the array element at a given index with the float64 value on the evaluation stack. - */ - final val Stelem_R8 = new OpCode() - opcode(Stelem_R8, CEE_STELEM_R8,"stelem.r8", 0xFFFFFFA1, POP_REF_I_R8, PUSH_NONE, INLINE_NONE, FLOW_NEXT) - - /** - * Replaces the array element at a given index with the object ref value (type O) - * on the evaluation stack. - */ - final val Stelem_Ref = new OpCode() - opcode(Stelem_Ref, CEE_STELEM_REF,"stelem.ref",0xFFFFFFA2,POP_REF_I_REF,PUSH_NONE, INLINE_NONE, FLOW_NEXT) - - /** - * Converts the signed value on top of the evaluation stack to signed int8 and - * extends it to int32, throwing OverflowException on overflow. - */ - final val Conv_Ovf_I1 = new OpCode() - opcode(Conv_Ovf_I1, CEE_CONV_OVF_I1, "conv.ovf.i1", 0xFFFFFFB3, POP_1, PUSH_I , INLINE_NONE , FLOW_NEXT) - - /** - * Converts the signed value on top of the evaluation stack to signed int16 and - * extending it to int32, throwing OverflowException on overflow. - */ - final val Conv_Ovf_I2 = new OpCode() - opcode(Conv_Ovf_I2, CEE_CONV_OVF_I2, "conv.ovf.i2", 0xFFFFFFB5, POP_1, PUSH_I , INLINE_NONE , FLOW_NEXT) - - /** - * Converts the signed value on top of the evaluation stack to signed int32, - * throwing OverflowException on overflow. - */ - final val Conv_Ovf_I4 = new OpCode() - opcode(Conv_Ovf_I4, CEE_CONV_OVF_I4, "conv.ovf.i4", 0xFFFFFFB7, POP_1, PUSH_I , INLINE_NONE , FLOW_NEXT) - - /** - * Converts the signed value on top of the evaluation stack to signed int64, - * throwing OverflowException on overflow. - */ - final val Conv_Ovf_I8 = new OpCode() - opcode(Conv_Ovf_I8, CEE_CONV_OVF_I8, "conv.ovf.i8", 0xFFFFFFB9, POP_1, PUSH_I8, INLINE_NONE , FLOW_NEXT) - - /** - * Converts the signed value on top of the evaluation stack to unsigned int8 and - * extends it to int32, throwing OverflowException on overflow. - */ - final val Conv_Ovf_U1 = new OpCode() - opcode(Conv_Ovf_U1, CEE_CONV_OVF_U1, "conv.ovf.u1", 0xFFFFFFB4, POP_1, PUSH_I , INLINE_NONE , FLOW_NEXT) - - /** - * Converts the signed value on top of the evaluation stack to unsigned int16 and - * extends it to int32, throwing OverflowException on overflow. - */ - final val Conv_Ovf_U2 = new OpCode() - opcode(Conv_Ovf_U2, CEE_CONV_OVF_U2, "conv.ovf.u2", 0xFFFFFFB6, POP_1, PUSH_I , INLINE_NONE , FLOW_NEXT) - - /** - * Converts the signed value on top of the evaluation stack to unsigned int32, - * throwing OverflowException on overflow. - */ - final val Conv_Ovf_U4 = new OpCode() - opcode(Conv_Ovf_U4, CEE_CONV_OVF_U4, "conv.ovf.u4", 0xFFFFFFB8, POP_1, PUSH_I , INLINE_NONE , FLOW_NEXT) - - /** - * Converts the signed value on top of the evaluation stack to unsigned int64, - * throwing OverflowException on overflow. - */ - final val Conv_Ovf_U8 = new OpCode() - opcode(Conv_Ovf_U8, CEE_CONV_OVF_U8, "conv.ovf.u8", 0xFFFFFFBA, POP_1, PUSH_I8, INLINE_NONE , FLOW_NEXT) - - /** - * Retrieves the address (type &) embedded in a typed reference. - */ - final val Refanyval = new OpCode() - opcode(Refanyval, CEE_REFANYVAL, "refanyval", 0xFFFFFFC2, POP_1, PUSH_I , INLINE_TYPE , FLOW_NEXT) - - /** - * Retrieves the type token embedded in a typed reference . - */ - final val Refanytype = new OpCode() - opcode(Refanytype, CEE_REFANYTYPE, "refanytype", 0xFFFFFE1D, POP_1 , PUSH_I , INLINE_NONE, FLOW_NEXT) - - /** - * Throws ArithmeticException if value is not a finite number. - */ - final val Ckfinite = new OpCode() - opcode(Ckfinite, CEE_CKFINITE, "ckfinite" , 0xFFFFFFC3, POP_1, PUSH_R8 , INLINE_NONE , FLOW_NEXT) - - /** - * Pushes a typed reference to an instance of a specific type onto the evaluation stack. - */ - final val Mkrefany = new OpCode() - opcode(Mkrefany, CEE_MKREFANY, "mkrefany" , 0xFFFFFFC6, POP_I, PUSH_1 , INLINE_TYPE , FLOW_NEXT) - - /** - * Converts a metadata token to its runtime representation, pushing it onto the evaluation stack. - */ - final val Ldtoken = new OpCode() - opcode(Ldtoken, CEE_LDTOKEN , "ldtoken" , 0xFFFFFFD0, POP_NONE, PUSH_I, INLINE_TOKEN , FLOW_NEXT) - - /** - * Converts the value on top of the evaluation stack to unsigned int8, and extends it to int32. - */ - final val Conv_U1 = new OpCode() - opcode(Conv_U1, CEE_CONV_U1 , "conv.u1" , 0xFFFFFFD2, POP_1, PUSH_I, INLINE_NONE , FLOW_NEXT) - - /** - * Converts the value on top of the evaluation stack to unsigned int16, and extends it to int32. - */ - final val Conv_U2 = new OpCode() - opcode(Conv_U2, CEE_CONV_U2 , "conv.u2" , 0xFFFFFFD1, POP_1, PUSH_I, INLINE_NONE , FLOW_NEXT) - - /** - * Converts the value on top of the evaluation stack to natural int. - */ - final val Conv_I = new OpCode() - opcode(Conv_I, CEE_CONV_I , "conv.i" , 0xFFFFFFD3, POP_1, PUSH_I, INLINE_NONE , FLOW_NEXT) - - /** - * Converts the signed value on top of the evaluation stack to signed natural int, - * throwing OverflowException on overflow. - */ - final val Conv_Ovf_I = new OpCode() - opcode(Conv_Ovf_I, CEE_CONV_OVF_I , "conv.ovf.i", 0xFFFFFFD4, POP_1, PUSH_I, INLINE_NONE , FLOW_NEXT) - - /** - * Converts the signed value on top of the evaluation stack to unsigned natural int, - * throwing OverflowException on overflow. - */ - final val Conv_Ovf_U = new OpCode() - opcode(Conv_Ovf_U, CEE_CONV_OVF_U , "conv.ovf.u", 0xFFFFFFD5, POP_1, PUSH_I, INLINE_NONE , FLOW_NEXT) - - /** - * Adds two integers, performs an overflow check, and pushes the result - * onto the evaluation stack. - */ - final val Add_Ovf = new OpCode() - opcode(Add_Ovf, CEE_ADD_OVF , "add.ovf" , 0xFFFFFFD6, POP_1_1, PUSH_1, INLINE_NONE , FLOW_NEXT) - - /** - * Adds two unsigned integer values, performs an overflow check, and pushes the result - * onto the evaluation stack. - */ - final val Add_Ovf_Un = new OpCode() - opcode(Add_Ovf_Un, CEE_ADD_OVF_UN , "add.ovf.un", 0xFFFFFFD7, POP_1_1, PUSH_1, INLINE_NONE , FLOW_NEXT) - - /** - * Multiplies two integer values, performs an overflow check, and pushes the result - * onto the evaluation stack. - */ - final val Mul_Ovf = new OpCode() - opcode(Mul_Ovf, CEE_MUL_OVF , "mul.ovf" , 0xFFFFFFD8, POP_1_1, PUSH_1, INLINE_NONE , FLOW_NEXT) - - /** - * Multiplies two unsigned integer values , performs an overflow check , - * and pushes the result onto the evaluation stack. - */ - final val Mul_Ovf_Un = new OpCode() - opcode(Mul_Ovf_Un, CEE_MUL_OVF_UN , "mul.ovf.un", 0xFFFFFFD9, POP_1_1, PUSH_1, INLINE_NONE , FLOW_NEXT) - - /** - * Subtracts one integer value from another, performs an overflow check, - * and pushes the result onto the evaluation stack. - */ - final val Sub_Ovf = new OpCode() - opcode(Sub_Ovf, CEE_SUB_OVF , "sub.ovf" , 0xFFFFFFDA, POP_1_1, PUSH_1, INLINE_NONE , FLOW_NEXT) - - /** - * Subtracts one unsigned integer value from another, performs an overflow check, - * and pushes the result onto the evaluation stack. - */ - final val Sub_Ovf_Un = new OpCode() - opcode(Sub_Ovf_Un, CEE_SUB_OVF_UN, "sub.ovf.un", 0xFFFFFFDB, POP_1_1, PUSH_1, INLINE_NONE , FLOW_NEXT) - - /** - * Transfers control from the fault or finally clause of an exception block back to - * the Common Language Infrastructure (CLI) exception handler. - */ - final val Endfinally = new OpCode() - opcode(Endfinally, CEE_ENDFINALLY, "endfinally", 0xFFFFFFDC, POP_NONE, PUSH_NONE, INLINE_NONE, FLOW_RETURN) - - /** - * Exits a protected region of code, unconditionally tranferring control - * to a specific target instruction. - */ - final val Leave = new OpCode() - opcode(Leave, CEE_LEAVE, "leave", 0xFFFFFFDD, POP_NONE, PUSH_NONE, INLINE_TARGET, FLOW_BRANCH) - - /** - * Exits a protected region of code, unconditionally tranferring control - * to a target instruction (short form). - */ - final val Leave_S = new OpCode() - opcode(Leave_S, CEE_LEAVE_S, "leave.s", 0xFFFFFFDE, POP_NONE, PUSH_NONE, INLINE_TARGET_S, FLOW_BRANCH) - - /** - * Stores a value of type natural int at a supplied address. - */ - final val Stind_I = new OpCode() - opcode(Stind_I, CEE_STIND_I, "stind.i", 0xFFFFFFDF, POP_I_I , PUSH_NONE, INLINE_NONE, FLOW_NEXT) - - /** - * Converts the value on top of the evaluation stack to unsigned natural int, - * and extends it to natural int. - */ - final val Conv_U = new OpCode() - opcode(Conv_U, CEE_CONV_U, "conv.u", 0xFFFFFFE0, POP_1 , PUSH_I , INLINE_NONE, FLOW_NEXT) - - /** - * Returns an unmanaged pointer to the argument list of the current method. - */ - final val Arglist = new OpCode() - opcode(Arglist, CEE_ARGLIST, "arglist" , 0xFFFFFE00, POP_NONE, PUSH_I , INLINE_NONE, FLOW_NEXT) - - /** - * Compares two values. If they are equal, the integer value 1 (int32) is pushed - * onto the evaluation stack otherwise 0 (int32) is pushed onto the evaluation stack. - */ - final val Ceq = new OpCode() - opcode(Ceq, CEE_CEQ, "ceq", 0xFFFFFE01, POP_1_1 , PUSH_I, INLINE_NONE, FLOW_NEXT) - - /** - * Compares two values. If the first value is greater than the second, - * the integer value 1 (int32) is pushed onto the evaluation stack - * otherwise 0 (int32) is pushed onto the evaluation stack. - */ - final val Cgt = new OpCode() - opcode(Cgt, CEE_CGT, "cgt", 0xFFFFFE02, POP_1_1 , PUSH_I, INLINE_NONE, FLOW_NEXT) - - /** - * Compares two unsigned or unordered values. If the first value is greater than - * the second, the integer value 1 (int32) is pushed onto the evaluation stack - * otherwise 0 (int32) is pushed onto the evaluation stack. - */ - final val Cgt_Un = new OpCode() - opcode(Cgt_Un, CEE_CGT_UN, "cgt.un", 0xFFFFFE03, POP_1_1 , PUSH_I, INLINE_NONE, FLOW_NEXT) - - /** - * Compares two values. If the first value is less than the second, - * the integer value 1 (int32) is pushed onto the evaluation stack - * otherwise 0 (int32) is pushed onto the evaluation stack. - */ - final val Clt = new OpCode() - opcode(Clt, CEE_CLT, "clt" , 0xFFFFFE04, POP_1_1 , PUSH_I , INLINE_NONE, FLOW_NEXT) - - /** - * Compares the unsigned or unordered values value1 and value2. If value1 is - * less than value2, then the integer value 1 (int32) is pushed onto the - * evaluation stack otherwise 0 (int32) is pushed onto the evaluation stack. - */ - final val Clt_Un = new OpCode() - opcode(Clt_Un, CEE_CLT_UN , "clt.un" , 0xFFFFFE05, POP_1_1 , PUSH_I , INLINE_NONE, FLOW_NEXT) - - /** - * Pushes an unmanaged pointer (type natural int) to the native code implementing - * a specific method onto the evaluation stack. - */ - final val Ldftn = new OpCode() - opcode(Ldftn, CEE_LDFTN , "ldftn" , 0xFFFFFE06, POP_NONE, PUSH_I , INLINE_METHOD, FLOW_NEXT) - - /** - * Pushes an unmanaged pointer (type natural int) to the native code implementing - * a particular virtual method associated with a specified object onto the evaluation stack. - */ - final val Ldvirtftn = new OpCode() - opcode(Ldvirtftn, CEE_LDVIRTFTN, "ldvirtftn", 0xFFFFFE07, POP_REF , PUSH_I , INLINE_METHOD, FLOW_NEXT) - - /** - * Loads an argument (referenced by a specified index value) onto the stack. - */ - final val Ldarg = new OpCode() - opcode(Ldarg, CEE_LDARG , "ldarg" , 0xFFFFFE09, POP_NONE, PUSH_1 , INLINE_VARIABLE , FLOW_NEXT) - - /** - * Load an argument address onto the evaluation stack. - */ - final val Ldarga = new OpCode() - opcode(Ldarga, CEE_LDARGA , "ldarga", 0xFFFFFE0A, POP_NONE, PUSH_I, INLINE_VARIABLE , FLOW_NEXT) - - /** - * Loads the local variable at a specific index onto the evaluation stack. - */ - final val Ldloc = new OpCode() - opcode(Ldloc, CEE_LDLOC, "ldloc", 0xFFFFFE0C, POP_NONE, PUSH_1 , INLINE_VARIABLE , FLOW_NEXT) - - /** - * Loads the address of the local variable at a specific index onto the evaluation stack. - */ - final val Ldloca = new OpCode() - opcode(Ldloca, CEE_LDLOCA, "ldloca", 0xFFFFFE0D, POP_NONE, PUSH_I, INLINE_VARIABLE , FLOW_NEXT) - - /** - * Stores the value on top of the evaluation stack in the argument slot at a specified index. - */ - final val Starg = new OpCode() - opcode(Starg, CEE_STARG, "starg", 0xFFFFFE0B, POP_1 , PUSH_NONE, INLINE_VARIABLE , FLOW_NEXT) - - /** - * Pops the current value from the top of the evaluation stack and stores it in a - * the local variable list at a specified index. - */ - final val Stloc = new OpCode() - opcode(Stloc, CEE_STLOC, "stloc", 0xFFFFFE0E, POP_1 , PUSH_NONE, INLINE_VARIABLE , FLOW_NEXT) - - /** - * Allocates a certain number of bytes from the local dynamic memory pool and pushes the - * address (a transient pointer, type *) of the first allocated Byte onto the evaluation stack. - */ - final val Localloc = new OpCode() - opcode(Localloc, CEE_LOCALLOC, "localloc" , 0xFFFFFE0F, POP_I, PUSH_I, INLINE_NONE, FLOW_NEXT) - - /** - * Transfers control from the filter clause of an exception back to the - * Common Language Infrastructure (CLI) exception handler. - */ - final val Endfilter = new OpCode() - opcode(Endfilter, CEE_ENDFILTER, "endfilter" , 0xFFFFFE11, POP_I , PUSH_NONE, INLINE_NONE, FLOW_RETURN) - - /** - * Indicates that an address currently atop the evaluation stack might not be aligned - * to the natural size of the immediately following ldind, stind, ldfld, stfld, ldobj, - * stobj, initblk, or cpblk instruction. - */ - final val Unaligned = new OpCode() - opcode(Unaligned, CEE_UNALIGNED, "unaligned.", 0xFFFFFE12, POP_NONE, PUSH_NONE, INLINE_I_S , FLOW_META) - - /** - * Specifies that an address currently atop the evaluation stack might be volatile, - * and the results of reading that location cannot be cached or that multiple stores - * to that location cannot be suppressed. - */ - final val Volatile = new OpCode() - opcode(Volatile, CEE_VOLATILE, "volatile." , 0xFFFFFE13, POP_NONE, PUSH_NONE, INLINE_NONE, FLOW_META) - - /** - * Performs a postfixed method call instruction such that the current method's stack - * frame is removed before the actual call instruction is executed. - */ - final val Tailcall = new OpCode() - opcode(Tailcall, CEE_TAILCALL, "tail." , 0xFFFFFE14, POP_NONE, PUSH_NONE, INLINE_NONE, FLOW_META) - - /** - * Initializes all the fields of the object at a specific address to a null reference - * or a 0 of the appropriate primitive type. - */ - final val Initobj = new OpCode() - opcode(Initobj, CEE_INITOBJ , "initobj" , 0xFFFFFE15, POP_I , PUSH_NONE, INLINE_TYPE, FLOW_NEXT) - - /** - * Copies a specified number bytes from a source address to a destination address . - */ - final val Cpblk = new OpCode() - opcode(Cpblk, CEE_CPBLK , "cpblk" , 0xFFFFFE17, POP_I_I_I, PUSH_NONE, INLINE_NONE, FLOW_NEXT) - - /** - * Initializes a specified block of memory at a specific address to a given size - * and initial value. - */ - final val Initblk = new OpCode() - opcode(Initblk, CEE_INITBLK , "initblk" , 0xFFFFFE18, POP_I_I_I, PUSH_NONE, INLINE_NONE, FLOW_NEXT) - - /** - * Rethrows the current exception. - */ - final val Rethrow = new OpCode() - opcode(Rethrow, CEE_RETHROW , "rethrow", 0xFFFFFE1A, POP_NONE , PUSH_NONE, INLINE_NONE, FLOW_THROW) - - /** - * Pushes the size, in bytes, of a supplied value type onto the evaluation stack. - */ - final val Sizeof = new OpCode() - opcode(Sizeof, CEE_SIZEOF, "sizeof", 0xFFFFFE1C, POP_NONE , PUSH_I , INLINE_TYPE, FLOW_NEXT) - - - - //########################################################################## -} diff --git a/src/msil/ch/epfl/lamp/compiler/msil/emit/OpCodes.scala b/src/msil/ch/epfl/lamp/compiler/msil/emit/OpCodes.scala deleted file mode 100644 index 80e4267436..0000000000 --- a/src/msil/ch/epfl/lamp/compiler/msil/emit/OpCodes.scala +++ /dev/null @@ -1,1205 +0,0 @@ -/* - * System.Reflection.Emit-like API for writing .NET assemblies to MSIL - */ - - -package ch.epfl.lamp.compiler.msil.emit - - -/** - * Provides field representations of the Microsoft Intermediate Language (MSIL) - * instructions for emission by the ILGenerator class members (such as Emit). - * - * @author Nikolay Mihaylov - * @version 1.0 - */ -object OpCodes { - - //########################################################################## - - /** - * Adds two values and pushes the result onto the evaluation stack. - */ - final val Add = OpCode.Add - - /** - * Fills space if bytecodes are patched. No meaningful operation is performed - * although a processing cycle can be consumed. - */ - final val Nop = OpCode.Nop - - /** - * Signals the Common Language Infrastructure (CLI) to inform the debugger that - * a break point has been tripped. - */ - final val Break = OpCode.Break - - /** - * Loads the argument at index 0 onto the evaluation stack. - */ - final val Ldarg_0 = OpCode.Ldarg_0 - - /** - * Loads the argument at index 1 onto the evaluation stack. - */ - final val Ldarg_1 = OpCode.Ldarg_1 - - /** - * Loads the argument at index 2 onto the evaluation stack. - */ - final val Ldarg_2 = OpCode.Ldarg_2 - - /** - * Loads the argument at index 3 onto the evaluation stack. - */ - final val Ldarg_3 = OpCode.Ldarg_3 - - /** - * Loads the local variable at index 0 onto the evaluation stack. - */ - final val Ldloc_0 = OpCode.Ldloc_0 - - /** - * Loads the local variable at index 1 onto the evaluation stack. - */ - final val Ldloc_1 = OpCode.Ldloc_1 - - /** - * Loads the local variable at index 2 onto the evaluation stack. - */ - final val Ldloc_2 = OpCode.Ldloc_2 - - /** - * Loads the local variable at index 3 onto the evaluation stack. - */ - final val Ldloc_3 = OpCode.Ldloc_3 - - /** - * Pops the current value from the top of the evaluation stack and - * stores it in a the local variable list at index 0. - */ - final val Stloc_0 = OpCode.Stloc_0 - - /** - * Pops the current value from the top of the evaluation stack and - * stores it in a the local variable list at index 1. - */ - final val Stloc_1 = OpCode.Stloc_1 - - /** - * Pops the current value from the top of the evaluation stack and - * stores it in a the local variable list at index 2. - */ - final val Stloc_2 = OpCode.Stloc_2 - - /** - * Pops the current value from the top of the evaluation stack and - * stores it in a the local variable list at index 3. - */ - final val Stloc_3 = OpCode.Stloc_3 - - /** - * Loads the argument (referenced by a specified short form index) - * onto the evaluation stack. - */ - final val Ldarg_S = OpCode.Ldarg_S - - /** - * Load an argument address, in short form, onto the evaluation stack. - */ - final val Ldarga_S = OpCode.Ldarga_S - - /** - * Loads the local variable at a specific index onto the evaluation stack, - * short form. - */ - final val Ldloc_S = OpCode.Ldloc_S - - /** - * Loads the address of the local variable at a specific index onto - * the evaluation stack, short form. - */ - final val Ldloca_S = OpCode.Ldloca_S - - /** - * Stores the value on top of the evaluation stack in the argument slot - * at a specified index, short form. - */ - final val Starg_S = OpCode.Starg_S - - /** - * Pops the current value from the top of the evaluation stack and stores it - * in a the local variable list at index (short form). - */ - final val Stloc_S = OpCode.Stloc_S - - /** - * Pushes a null reference (type O) onto the evaluation stack. - */ - final val Ldnull = OpCode.Ldnull - - /** - * Pushes the integer value of -1 onto the evaluation stack as an int32. - */ - final val Ldc_I4_M1 = OpCode.Ldc_I4_M1 - - /** - * Pushes the integer value of 0 onto the evaluation stack as an int32. - */ - final val Ldc_I4_0 = OpCode.Ldc_I4_0 - - /** - * Pushes the integer value of 1 onto the evaluation stack as an int32. - */ - final val Ldc_I4_1 = OpCode.Ldc_I4_1 - - /** - * Pushes the integer value of 2 onto the evaluation stack as an int32. - */ - final val Ldc_I4_2 = OpCode.Ldc_I4_2 - - /** - * Pushes the integer value of 3 onto the evaluation stack as an int32. - */ - final val Ldc_I4_3 = OpCode.Ldc_I4_3 - - /** - * Pushes the integer value of 4 onto the evaluation stack as an int32. - */ - final val Ldc_I4_4 = OpCode.Ldc_I4_4 - - /** - * Pushes the integer value of 5 onto the evaluation stack as an int32. - */ - final val Ldc_I4_5 = OpCode.Ldc_I4_5 - - /** - * Pushes the integer value of 6 onto the evaluation stack as an int32. - */ - final val Ldc_I4_6 = OpCode.Ldc_I4_6 - - /** - * Pushes the integer value of 7 onto the evaluation stack as an int32. - */ - final val Ldc_I4_7 = OpCode.Ldc_I4_7 - - /** - * Pushes the integer value of 8 onto the evaluation stack as an int32. - */ - final val Ldc_I4_8 = OpCode.Ldc_I4_8 - - /** - * Pushes the supplied int8 value onto the evaluation stack as an int32, short form. - */ - final val Ldc_I4_S = OpCode.Ldc_I4_S - - /** - * Pushes a supplied value of type int32 onto the evaluation stack as an int32. - */ - final val Ldc_I4 = OpCode.Ldc_I4 - - /** - * Pushes a supplied value of type int64 onto the evaluation stack as an int64. - */ - final val Ldc_I8 = OpCode.Ldc_I8 - - /** - * Pushes a supplied value of type float32 onto the evaluation stack as type F (float). - */ - final val Ldc_R4 = OpCode.Ldc_R4 - - /** - * Pushes a supplied value of type float64 onto the evaluation stack as type F (float). - */ - final val Ldc_R8 = OpCode.Ldc_R8 - - /** - * Copies the current topmost value on the evaluation stack, and then pushes the copy - * onto the evaluation stack. - */ - final val Dup = OpCode.Dup - - /** - * Removes the value currently on top of the evaluation stack. - */ - final val Pop = OpCode.Pop - - /** - * Exits current method and jumps to specified method. - */ - final val Jmp = OpCode.Jmp - - /** - * Calls the method indicated by the passed method descriptor. - */ - final val Call = OpCode.Call - - /** - * constrained. prefix - */ - final val Constrained = OpCode.Constrained - - /** - * readonly. prefix - */ - final val Readonly = OpCode.Readonly - - /** - * Calls the method indicated on the evaluation stack (as a pointer to an entry point) - * with arguments described by a calling convention. - */ - final val Calli = OpCode.Calli - - /** - * Returns from the current method, pushing a return value (if present) from the caller's - * evaluation stack onto the callee's evaluation stack. - */ - final val Ret = OpCode.Ret - - /** - * Unconditionally transfers control to a target instruction (short form). - */ - final val Br_S = OpCode.Br_S - - /** - * Transfers control to a target instruction if value is false, a null reference, or zero. - */ - final val Brfalse_S = OpCode.Brfalse_S - - /** - * Transfers control to a target instruction (short form) if value is true, not null, or non-zero. - */ - final val Brtrue_S = OpCode.Brtrue_S - - /** - * Transfers control to a target instruction (short form) if two values are equal. - */ - final val Beq_S = OpCode.Beq_S - - /** - * Transfers control to a target instruction (short form) if the first value is greater than - * or equal to the second value. - */ - final val Bge_S = OpCode.Bge_S - - /** - * Transfers control to a target instruction (short form) if the first value is greater than - * the second value. - */ - final val Bgt_S = OpCode.Bgt_S - - /** - * Transfers control to a target instruction (short form) if the first value is less than - * or equal to the second value. - */ - final val Ble_S = OpCode.Ble_S - - /** - * Transfers control to a target instruction (short form) if the first value is less than - * the second value. - */ - final val Blt_S = OpCode.Blt_S - - /** - * Transfers control to a target instruction (short form) when two unsigned integer values - * or unordered float values are not equal. - */ - final val Bne_Un_S = OpCode.Bne_Un_S - - /** - * Transfers control to a target instruction (short form) if the first value is greather - * than the second value, when comparing unsigned integer values or unordered float values. - */ - final val Bge_Un_S = OpCode.Bge_Un_S - - /** - * Transfers control to a target instruction (short form) if the first value is greater than - * the second value, when comparing unsigned integer values or unordered float values. - */ - final val Bgt_Un_S = OpCode.Bgt_Un_S - - /** - * Transfers control to a target instruction (short form) if the first value is less than - * or equal to the second value, when comparing unsigned integer values or unordered float values. - */ - final val Ble_Un_S = OpCode.Ble_Un_S - - /** - * Transfers control to a target instruction (short form) if the first value is less than - * the second value, when comparing unsigned integer values or unordered float values. - */ - final val Blt_Un_S = OpCode.Blt_Un_S - - /** - * Unconditionally transfers control to a target instruction. - */ - final val Br = OpCode.Br - - /** - * Transfers control to a target instruction if value is false, a null reference - * (Nothing in Visual Basic), or zero. - */ - final val Brfalse = OpCode.Brfalse - - /** - * Transfers control to a target instruction if value is true, not null, or non-zero. - */ - final val Brtrue = OpCode.Brtrue - - /** - * Transfers control to a target instruction if two values are equal. - */ - final val Beq = OpCode.Beq - - /** - * Transfers control to a target instruction if the first value is greater than or - * equal to the second value. - */ - final val Bge = OpCode.Bge - - /** - * Transfers control to a target instruction if the first value is greater than the second value. - */ - final val Bgt = OpCode.Bgt - - /** - * Transfers control to a target instruction if the first value is less than or equal - * to the second value. - */ - final val Ble = OpCode.Ble - - /** - * Transfers control to a target instruction if the first value is less than the second value. - */ - final val Blt = OpCode.Blt - - /** - * Transfers control to a target instruction when two unsigned integer values or - * unordered float values are not equal. - */ - final val Bne_Un = OpCode.Bne_Un - - /** - * Transfers control to a target instruction if the first value is greather than - * the second value, when comparing unsigned integer values or unordered float values. - */ - final val Bge_Un = OpCode.Bge_Un - - /** - * Transfers control to a target instruction if the first value is greater than the - * second value, when comparing unsigned integer values or unordered float values. - */ - final val Bgt_Un = OpCode.Bgt_Un - - /** - * Transfers control to a target instruction if the first value is less than or equal to - * the second value, when comparing unsigned integer values or unordered float values. - */ - final val Ble_Un = OpCode.Ble_Un - - /** - * Transfers control to a target instruction if the first value is less than the second value, - * when comparing unsigned integer values or unordered float values. - */ - final val Blt_Un = OpCode.Blt_Un - - /** - * Implements a jump table. - */ - final val Switch = OpCode.Switch - - /** - * Loads a value of type int8 as an int32 onto the evaluation stack indirectly. - */ - final val Ldind_I1 = OpCode.Ldind_I1 - - /** - * Loads a value of type int16 as an int32 onto the evaluation stack indirectly. - */ - final val Ldind_I2 = OpCode.Ldind_I2 - - /** - * Loads a value of type int32 as an int32 onto the evaluation stack indirectly. - */ - final val Ldind_I4 = OpCode.Ldind_I4 - - /** - * Loads a value of type int64 as an int64 onto the evaluation stack indirectly. - */ - final val Ldind_I8 = OpCode.Ldind_I8 - - /** - * Loads a value of type natural int as a natural int onto the evaluation stack indirectly. - */ - final val Ldind_I = OpCode.Ldind_I - - /** - * Loads a value of type float32 as a type F (float) onto the evaluation stack indirectly. - */ - final val Ldind_R4 = OpCode.Ldind_R4 - - /** - * Loads a value of type float64 as a type F (float) onto the evaluation stack indirectly. - */ - final val Ldind_R8 = OpCode.Ldind_R8 - - /** - * Loads an object reference as a type O (object reference) onto the evaluation stack indirectly. - */ - final val Ldind_Ref = OpCode.Ldind_Ref - - /** - * Loads a value of type unsigned int8 as an int32 onto the evaluation stack indirectly. - */ - final val Ldind_U1 = OpCode.Ldind_U1 - - /** - * Loads a value of type unsigned int16 as an int32 onto the evaluation stack indirectly. - */ - final val Ldind_U2 = OpCode.Ldind_U2 - - /** - * Loads a value of type unsigned int32 as an int32 onto the evaluation stack indirectly. - */ - final val Ldind_U4 = OpCode.Ldind_U4 - - /** - * Stores a object reference value at a supplied address. - */ - final val Stind_Ref = OpCode.Stind_Ref - - /** - * Stores a value of type int8 at a supplied address. - */ - final val Stind_I1 = OpCode.Stind_I1 - - /** - * Stores a value of type int16 at a supplied address. - */ - final val Stind_I2 = OpCode.Stind_I2 - - /** - * Stores a value of type int32 at a supplied address. - */ - final val Stind_I4 = OpCode.Stind_I4 - - /** - * Stores a value of type int64 at a supplied address. - */ - final val Stind_I8 = OpCode.Stind_I8 - - /** - * Stores a value of type float32 at a supplied address. - */ - final val Stind_R4 = OpCode.Stind_R4 - - /** - * Stores a value of type float64 at a supplied address. - */ - final val Stind_R8 = OpCode.Stind_R8 - - /** - * Subtracts one value from another and pushes the result onto the evaluation stack. - */ - final val Sub = OpCode.Sub - - /** - * Multiplies two values and pushes the result on the evaluation stack. - */ - final val Mul = OpCode.Mul - - /** - * Divides two values and pushes the result as a floating-point (type F) or - * quotient (type int32) onto the evaluation stack. - */ - final val Div = OpCode.Div - - /** - * Divides two unsigned integer values and pushes the result (int32) onto the evaluation stack. - */ - final val Div_Un = OpCode.Div_Un - - /** - * Divides two values and pushes the remainder onto the evaluation stack. - */ - final val Rem = OpCode.Rem - - /** - * Divides two unsigned values and pushes the remainder onto the evaluation stack. - */ - final val Rem_Un = OpCode.Rem_Un - - /** - * Computes the bitwise AND of two values and pushes the result onto the evaluation stack. - */ - final val And = OpCode.And - - /** - * Compute the bitwise complement of the two integer values on top of the stack and - * pushes the result onto the evaluation stack. - */ - final val Or = OpCode.Or - - /** - * Computes the bitwise XOR of the top two values on the evaluation stack, - * pushing the result onto the evaluation stack. - */ - final val Xor = OpCode.Xor - - /** - * Shifts an integer value to the left (in zeroes) by a specified number of bits, - * pushing the result onto the evaluation stack. - */ - final val Shl = OpCode.Shl - - /** - * Shifts an integer value (in sign) to the right by a specified number of bits, - * pushing the result onto the evaluation stack. - */ - final val Shr = OpCode.Shr - - /** - * Shifts an unsigned integer value (in zeroes) to the right by a specified number of bits, - * pushing the result onto the evaluation stack. - */ - final val Shr_Un = OpCode.Shr_Un - - /** - * Negates a value and pushes the result onto the evaluation stack. - */ - final val Neg = OpCode.Neg - - /** - * Computes the bitwise complement of the integer value on top of the stack and pushes - * the result onto the evaluation stack as the same type. - */ - final val Not = OpCode.Not - - /** - * Converts the value on top of the evaluation stack to int8, then extends (pads) it to int32. - */ - final val Conv_I1 = OpCode.Conv_I1 - - /** - * Converts the value on top of the evaluation stack to int16, then extends (pads) it to int32. - */ - final val Conv_I2 = OpCode.Conv_I2 - - /** - * Converts the value on top of the evaluation stack to int32. - */ - final val Conv_I4 = OpCode.Conv_I4 - - /** - * Converts the value on top of the evaluation stack to int64. - */ - final val Conv_I8 = OpCode.Conv_I8 - - /** - * Converts the value on top of the evaluation stack to float32. - */ - final val Conv_R4 = OpCode.Conv_R4 - - /** - * Converts the value on top of the evaluation stack to float64. - */ - final val Conv_R8 = OpCode.Conv_R8 - - /** - * Converts the value on top of the evaluation stack to unsigned int32, and extends it to int32. - */ - final val Conv_U4 = OpCode.Conv_U4 - - /** - * Converts the value on top of the evaluation stack to unsigned int64, and extends it to int64. - */ - final val Conv_U8 = OpCode.Conv_U8 - - /** - * Calls a late-bound method on an object, pushing the return value onto the evaluation stack. - */ - final val Callvirt = OpCode.Callvirt - - /** - * Copies the value type located at the address of an object (type &, * or natural int) - * to the address of the destination object (type &, * or natural int). - */ - final val Cpobj = OpCode.Cpobj - - /** - * Copies the value type object pointed to by an address to the top of the evaluation stack. - */ - final val Ldobj = OpCode.Ldobj - - /** - * Pushes a new object reference to a string literal stored in the metadata. - */ - final val Ldstr = OpCode.Ldstr - - /** - * Creates a new object or a new instance of a value type, pushing an object reference - * (type O) onto the evaluation stack. - */ - final val Newobj = OpCode.Newobj - - /** - * Attempts to cast an object passed by reference to the specified class. - */ - final val Castclass = OpCode.Castclass - - /** - * Tests whether an object reference (type O) is an instance of a particular class. - */ - final val Isinst = OpCode.Isinst - - /** - * Converts the unsigned integer value on top of the evaluation stack to float32. - */ - final val Conv_R_Un = OpCode.Conv_R_Un - - /** - * Converts the boxed representation of a value type to its unboxed form. - */ - final val Unbox = OpCode.Unbox - - /** - * Throws the exception object currently on the evaluation stack. - */ - final val Throw = OpCode.Throw - - /** - * Finds the value of a field in the object whose reference is currently - * on the evaluation stack. - */ - final val Ldfld = OpCode.Ldfld - - /** - * Finds the address of a field in the object whose reference is currently - * on the evaluation stack. - */ - final val Ldflda = OpCode.Ldflda - - /** - * Pushes the value of a static field onto the evaluation stack. - */ - final val Ldsfld = OpCode.Ldsfld - - /** - * Pushes the address of a static field onto the evaluation stack. - */ - final val Ldsflda = OpCode.Ldsflda - - /** - * Replaces the value stored in the field of an object reference or pointer with a new value. - */ - final val Stfld = OpCode.Stfld - - /** - * Replaces the value of a static field with a value from the evaluation stack. - */ - final val Stsfld = OpCode.Stsfld - - /** - * Copies a value of a specified type from the evaluation stack into a supplied memory address. - */ - final val Stobj = OpCode.Stobj - - /** - * Converts the unsigned value on top of the evaluation stack to signed int8 and - * extends it to int32, throwing OverflowException on overflow. - */ - final val Conv_Ovf_I1_Un = OpCode.Conv_Ovf_I1_Un - - /** - * Converts the unsigned value on top of the evaluation stack to signed int16 and - * extends it to int32, throwing OverflowException on overflow. - */ - final val Conv_Ovf_I2_Un = OpCode.Conv_Ovf_I2_Un - - /** - * Converts the unsigned value on top of the evaluation stack to signed int32, - * throwing OverflowException on overflow. - */ - final val Conv_Ovf_I4_Un = OpCode.Conv_Ovf_I4_Un - - /** - * Converts the unsigned value on top of the evaluation stack to signed int64, - * throwing OverflowException on overflow. - */ - final val Conv_Ovf_I8_Un = OpCode.Conv_Ovf_I8_Un - - /** - * Converts the unsigned value on top of the evaluation stack to signed natural int, - * throwing OverflowException on overflow. - */ - final val Conv_Ovf_I_Un = OpCode.Conv_Ovf_I_Un - - /** - * Converts the unsigned value on top of the evaluation stack to unsigned int8 and - * extends it to int32, throwing OverflowException on overflow. - */ - final val Conv_Ovf_U1_Un = OpCode.Conv_Ovf_U1_Un - - /** - * Converts the unsigned value on top of the evaluation stack to unsigned int16 and - * extends it to int32, throwing OverflowException on overflow. - */ - final val Conv_Ovf_U2_Un = OpCode.Conv_Ovf_U2_Un - - /** - * Converts the unsigned value on top of the evaluation stack to unsigned int32, - * throwing OverflowException on overflow. - */ - final val Conv_Ovf_U4_Un = OpCode.Conv_Ovf_U4_Un - - /** - * Converts the unsigned value on top of the evaluation stack to unsigned int64, - * throwing OverflowException on overflow. - */ - final val Conv_Ovf_U8_Un = OpCode.Conv_Ovf_U8_Un - - /** - * Converts the unsigned value on top of the evaluation stack to unsigned natural int, - * throwing OverflowException on overflow. - */ - final val Conv_Ovf_U_Un = OpCode.Conv_Ovf_U_Un - - /** - * Converts a value type to an object reference (type O). - */ - final val Box = OpCode.Box - - /** - * Pushes an object reference to a new zero-based, one-dimensional array whose elements - * are of a specific type onto the evaluation stack. - */ - final val Newarr = OpCode.Newarr - - /** - * Pushes the number of elements of a zero-based, one-dimensional array - * onto the evaluation stack. - */ - final val Ldlen = OpCode.Ldlen - - /** - * Loads the address of the array element at a specified array index onto - * the top of the evaluation stack as type & (managed pointer). - */ - final val Ldelema = OpCode.Ldelema - - /** - * Loads the element with type natural int at a specified array index onto the top - * of the evaluation stack as a natural int. - */ - final val Ldelem_I = OpCode.Ldelem_I - - /** - * Loads the element with type int8 at a specified array index onto the top of the - * evaluation stack as an int32. - */ - final val Ldelem_I1 = OpCode.Ldelem_I1 - - /** - * Loads the element with type int16 at a specified array index onto the top of - * the evaluation stack as an int32. - */ - final val Ldelem_I2 = OpCode.Ldelem_I2 - - /** - * Loads the element with type int32 at a specified array index onto the top of the - * evaluation stack as an int32. - */ - final val Ldelem_I4 = OpCode.Ldelem_I4 - - /** - * Loads the element with type int64 at a specified array index onto the top of the - * evaluation stack as an int64. - */ - final val Ldelem_I8 = OpCode.Ldelem_I8 - - /** - * Loads the element with type float32 at a specified array index onto the top of the - * evaluation stack as type F (float) - */ - final val Ldelem_R4 = OpCode.Ldelem_R4 - - /** - * Loads the element with type float64 at a specified array index onto the top of the - * evaluation stack as type F (float) . - */ - final val Ldelem_R8 = OpCode.Ldelem_R8 - - /** - * Loads the element containing an object reference at a specified array index onto - * the top of the evaluation stack as type O (object reference). - */ - final val Ldelem_Ref = OpCode.Ldelem_Ref - - /** - * Loads the element with type unsigned int8 at a specified array index onto the top - * of the evaluation stack as an int32. - */ - final val Ldelem_U1 = OpCode.Ldelem_U1 - - /** - * Loads the element with type unsigned int16 at a specified array index onto the top - * of the evaluation stack as an int32. - */ - final val Ldelem_U2 = OpCode.Ldelem_U2 - - /** - * Loads the element with type unsigned int32 at a specified array index onto the top - * of the evaluation stack as an int32. - */ - final val Ldelem_U4 = OpCode.Ldelem_U4 - - /** - * Replaces the array element at a given index with the natural int value on - * the evaluation stack. - */ - final val Stelem_I = OpCode.Stelem_I - - /** - * Replaces the array element at a given index with the int8 value on the evaluation stack. - */ - final val Stelem_I1 = OpCode.Stelem_I1 - - /** - * Replaces the array element at a given index with the int16 value on the evaluation stack. - */ - final val Stelem_I2 = OpCode.Stelem_I2 - - /** - * Replaces the array element at a given index with the int32 value on the evaluation stack. - */ - final val Stelem_I4 = OpCode.Stelem_I4 - - /** - * Replaces the array element at a given index with the int64 value on the evaluation stack. - */ - final val Stelem_I8 = OpCode.Stelem_I8 - - /** - * Replaces the array element at a given index with the float32 value on the evaluation stack. - */ - final val Stelem_R4 = OpCode.Stelem_R4 - - /** - * Replaces the array element at a given index with the float64 value on the evaluation stack. - */ - final val Stelem_R8 = OpCode.Stelem_R8 - - /** - * Replaces the array element at a given index with the object ref value (type O) - * on the evaluation stack. - */ - final val Stelem_Ref = OpCode.Stelem_Ref - - /** - * Converts the signed value on top of the evaluation stack to signed int8 and - * extends it to int32, throwing OverflowException on overflow. - */ - final val Conv_Ovf_I1 = OpCode.Conv_Ovf_I1 - - /** - * Converts the signed value on top of the evaluation stack to signed int16 and - * extending it to int32, throwing OverflowException on overflow. - */ - final val Conv_Ovf_I2 = OpCode.Conv_Ovf_I2 - - /** - * Converts the signed value on top of the evaluation stack to signed int32, - * throwing OverflowException on overflow. - */ - final val Conv_Ovf_I4 = OpCode.Conv_Ovf_I4 - - /** - * Converts the signed value on top of the evaluation stack to signed int64, - * throwing OverflowException on overflow. - */ - final val Conv_Ovf_I8 = OpCode.Conv_Ovf_I8 - - /** - * Converts the signed value on top of the evaluation stack to unsigned int8 and - * extends it to int32, throwing OverflowException on overflow. - */ - final val Conv_Ovf_U1 = OpCode.Conv_Ovf_U1 - - /** - * Converts the signed value on top of the evaluation stack to unsigned int16 and - * extends it to int32, throwing OverflowException on overflow. - */ - final val Conv_Ovf_U2 = OpCode.Conv_Ovf_U2 - - /** - * Converts the signed value on top of the evaluation stack to unsigned int32, - * throwing OverflowException on overflow. - */ - final val Conv_Ovf_U4 = OpCode.Conv_Ovf_U4 - - /** - * Converts the signed value on top of the evaluation stack to unsigned int64, - * throwing OverflowException on overflow. - */ - final val Conv_Ovf_U8 = OpCode.Conv_Ovf_U8 - - /** - * Retrieves the address (type &) embedded in a typed reference. - */ - final val Refanyval = OpCode.Refanyval - - /** - * Retrieves the type token embedded in a typed reference . - */ - final val Refanytype = OpCode.Refanytype - - /** - * Throws ArithmeticException if value is not a finite number. - */ - final val Ckfinite = OpCode.Ckfinite - - /** - * Pushes a typed reference to an instance of a specific type onto the evaluation stack. - */ - final val Mkrefany = OpCode.Mkrefany - - /** - * Converts a metadata token to its runtime representation, pushing it onto the evaluation stack. - */ - final val Ldtoken = OpCode.Ldtoken - - /** - * Converts the value on top of the evaluation stack to unsigned int8, and extends it to int32. - */ - final val Conv_U1 = OpCode.Conv_U1 - - /** - * Converts the value on top of the evaluation stack to unsigned int16, and extends it to int32. - */ - final val Conv_U2 = OpCode.Conv_U2 - - /** - * Converts the value on top of the evaluation stack to natural int. - */ - final val Conv_I = OpCode.Conv_I - - /** - * Converts the signed value on top of the evaluation stack to signed natural int, - * throwing OverflowException on overflow. - */ - final val Conv_Ovf_I = OpCode.Conv_Ovf_I - - /** - * Converts the signed value on top of the evaluation stack to unsigned natural int, - * throwing OverflowException on overflow. - */ - final val Conv_Ovf_U = OpCode.Conv_Ovf_U - - /** - * Adds two integers, performs an overflow check, and pushes the result - * onto the evaluation stack. - */ - final val Add_Ovf = OpCode.Add_Ovf - - /** - * Adds two unsigned integer values, performs an overflow check, and pushes the result - * onto the evaluation stack. - */ - final val Add_Ovf_Un = OpCode.Add_Ovf_Un - - /** - * Multiplies two integer values, performs an overflow check, and pushes the result - * onto the evaluation stack. - */ - final val Mul_Ovf = OpCode.Mul_Ovf - - /** - * Multiplies two unsigned integer values , performs an overflow check , - * and pushes the result onto the evaluation stack. - */ - final val Mul_Ovf_Un = OpCode.Mul_Ovf_Un - - /** - * Subtracts one integer value from another, performs an overflow check, - * and pushes the result onto the evaluation stack. - */ - final val Sub_Ovf = OpCode.Sub_Ovf - - /** - * Subtracts one unsigned integer value from another, performs an overflow check, - * and pushes the result onto the evaluation stack. - */ - final val Sub_Ovf_Un = OpCode.Sub_Ovf_Un - - /** - * Transfers control from the fault or finally clause of an exception block back to - * the Common Language Infrastructure (CLI) exception handler. - */ - final val Endfinally = OpCode.Endfinally - - /** - * Exits a protected region of code, unconditionally tranferring control - * to a specific target instruction. - */ - final val Leave = OpCode.Leave - - /** - * Exits a protected region of code, unconditionally tranferring control - * to a target instruction (short form). - */ - final val Leave_S = OpCode.Leave_S - - /** - * Stores a value of type natural int at a supplied address. - */ - final val Stind_I = OpCode.Stind_I - - /** - * Converts the value on top of the evaluation stack to unsigned natural int, - * and extends it to natural int. - */ - final val Conv_U = OpCode.Conv_U - - /** - * Returns an unmanaged pointer to the argument list of the current method. - */ - final val Arglist = OpCode.Arglist - - /** - * Compares two values. If they are equal, the integer value 1 (int32) is pushed - * onto the evaluation stack otherwise 0 (int32) is pushed onto the evaluation stack. - */ - final val Ceq = OpCode.Ceq - - /** - * Compares two values. If the first value is greater than the second, - * the integer value 1 (int32) is pushed onto the evaluation stack - * otherwise 0 (int32) is pushed onto the evaluation stack. - */ - final val Cgt = OpCode.Cgt - - /** - * Compares two unsigned or unordered values. If the first value is greater than - * the second, the integer value 1 (int32) is pushed onto the evaluation stack - * otherwise 0 (int32) is pushed onto the evaluation stack. - */ - final val Cgt_Un = OpCode.Cgt_Un - - /** - * Compares two values. If the first value is less than the second, - * the integer value 1 (int32) is pushed onto the evaluation stack - * otherwise 0 (int32) is pushed onto the evaluation stack. - */ - final val Clt = OpCode.Clt - - /** - * Compares the unsigned or unordered values value1 and value2. If value1 is - * less than value2, then the integer value 1 (int32) is pushed onto the - * evaluation stack otherwise 0 (int32) is pushed onto the evaluation stack. - */ - final val Clt_Un = OpCode.Clt_Un - - /** - * Pushes an unmanaged pointer (type natural int) to the native code implementing - * a specific method onto the evaluation stack. - */ - final val Ldftn = OpCode.Ldftn - - /** - * Pushes an unmanaged pointer (type natural int) to the native code implementing - * a particular virtual method associated with a specified object onto the evaluation stack. - */ - final val Ldvirtftn = OpCode.Ldvirtftn - - /** - * Loads an argument (referenced by a specified index value) onto the stack. - */ - final val Ldarg = OpCode.Ldarg - - /** - * Load an argument address onto the evaluation stack. - */ - final val Ldarga = OpCode.Ldarga - - /** - * Loads the local variable at a specific index onto the evaluation stack. - */ - final val Ldloc = OpCode.Ldloc - - /** - * Loads the address of the local variable at a specific index onto the evaluation stack. - */ - final val Ldloca = OpCode.Ldloca - - /** - * Stores the value on top of the evaluation stack in the argument slot at a specified index. - */ - final val Starg = OpCode.Starg - - /** - * Pops the current value from the top of the evaluation stack and stores it in a - * the local variable list at a specified index. - */ - final val Stloc = OpCode.Stloc - - /** - * Allocates a certain number of bytes from the local dynamic memory pool and pushes the - * address (a transient pointer, type *) of the first allocated Byte onto the evaluation stack. - */ - final val Localloc = OpCode.Localloc - - /** - * Transfers control from the filter clause of an exception back to the - * Common Language Infrastructure (CLI) exception handler. - */ - final val Endfilter = OpCode.Endfilter - - /** - * Indicates that an address currently atop the evaluation stack might not be aligned - * to the natural size of the immediately following ldind, stind, ldfld, stfld, ldobj, - * stobj, initblk, or cpblk instruction. - */ - final val Unaligned = OpCode.Unaligned - - /** - * Specifies that an address currently atop the evaluation stack might be volatile, - * and the results of reading that location cannot be cached or that multiple stores - * to that location cannot be suppressed. - */ - final val Volatile = OpCode.Volatile - - /** - * Performs a postfixed method call instruction such that the current method's stack - * frame is removed before the actual call instruction is executed. - */ - final val Tailcall = OpCode.Tailcall - - /** - * Initializes all the fields of the object at a specific address to a null reference - * or a 0 of the appropriate primitive type. - */ - final val Initobj = OpCode.Initobj - - /** - * Copies a specified number bytes from a source address to a destination address . - */ - final val Cpblk = OpCode.Cpblk - - /** - * Initializes a specified block of memory at a specific address to a given size - * and initial value. - */ - final val Initblk = OpCode.Initblk - - /** - * Rethrows the current exception. - */ - final val Rethrow = OpCode.Rethrow - - /** - * Pushes the size, in bytes, of a supplied value type onto the evaluation stack. - */ - final val Sizeof = OpCode.Sizeof - - //########################################################################## -} diff --git a/src/msil/ch/epfl/lamp/compiler/msil/emit/ParameterBuilder.scala b/src/msil/ch/epfl/lamp/compiler/msil/emit/ParameterBuilder.scala deleted file mode 100644 index 8f9d81a8b0..0000000000 --- a/src/msil/ch/epfl/lamp/compiler/msil/emit/ParameterBuilder.scala +++ /dev/null @@ -1,44 +0,0 @@ -/* - * System.Reflection.Emit-like API for writing .NET assemblies to MSIL - */ - - -package ch.epfl.lamp.compiler.msil.emit - -import ch.epfl.lamp.compiler.msil.Type -import ch.epfl.lamp.compiler.msil.ConstructorInfo -import ch.epfl.lamp.compiler.msil.ParameterInfo -import java.io.IOException - -/** - * Creates or associates parameter information. - * Parameter attributes need to consistent with the method signature. - * If you specify Out attributes for a parameter, you should ensure that - * the type of that method parameter is a ByRef type - * - * @author Nikolay Mihaylov - * @version 1.0 - */ -class ParameterBuilder(name: String, tpe: Type, attr: Int, pos: Int) - extends ParameterInfo(name, tpe, attr, pos) - with ICustomAttributeSetter - with Visitable -{ - - //########################################################################## - - /** Sets a custom attribute. */ - def SetCustomAttribute(constr: ConstructorInfo, value: Array[Byte]) { - addCustomAttribute(constr, value) - } - - //########################################################################## - - /** The apply method for a visitor */ - @throws(classOf[IOException]) - def apply(v: Visitor) { - v.caseParameterBuilder(this) - } - - //########################################################################## -} diff --git a/src/msil/ch/epfl/lamp/compiler/msil/emit/SingleFileILPrinterVisitor.scala b/src/msil/ch/epfl/lamp/compiler/msil/emit/SingleFileILPrinterVisitor.scala deleted file mode 100644 index 50e9f45373..0000000000 --- a/src/msil/ch/epfl/lamp/compiler/msil/emit/SingleFileILPrinterVisitor.scala +++ /dev/null @@ -1,93 +0,0 @@ -/* - * System.Reflection.Emit-like API for writing .NET assemblies in MSIL - */ - - -package ch.epfl.lamp.compiler.msil.emit - -import java.io.FileWriter -import java.io.BufferedWriter -import java.io.PrintWriter -import java.io.IOException -import java.util.Iterator -import java.util.HashMap -import java.util.Arrays - -import ch.epfl.lamp.compiler.msil._ -import ch.epfl.lamp.compiler.msil.emit -import ch.epfl.lamp.compiler.msil.util.Table - -/** - * The MSIL printer Visitor. It prints a complete - * assembly in a single file that can be compiled by ilasm. - * - * @author Nikolay Mihaylov - * @author Daniel Lorch - * @version 1.0 - */ -final class SingleFileILPrinterVisitor(_fileName: String) extends ILPrinterVisitor { - var fileName: String = _fileName - - out = new PrintWriter(new BufferedWriter(new FileWriter(fileName))) - - /** - * Visit an AssemblyBuilder - */ - @throws(classOf[IOException]) - def caseAssemblyBuilder(assemblyBuilder: AssemblyBuilder) { - ILPrinterVisitor.currAssembly = assemblyBuilder - - // first get the entryPoint - this.entryPoint = assemblyBuilder.EntryPoint - - // all external assemblies - as = assemblyBuilder.getExternAssemblies() - scala.util.Sorting.quickSort(as)(assemblyNameComparator) // Arrays.sort(as, assemblyNameComparator) - - assemblyBuilder.generatedFiles += fileName - printAssemblyBoilerplate() - - // print each module - val m: Array[Module] = assemblyBuilder.GetModules() - nomembers = true - for(i <- 0 until m.length) { - print(m(i).asInstanceOf[ModuleBuilder]) - } - - nomembers = false - for(i <- 0 until m.length) { - print(m(i).asInstanceOf[ModuleBuilder]) - } - // close out file - out.close() - ILPrinterVisitor.currAssembly = null - } - - /** - * Visit a ModuleBuilder - */ - @throws(classOf[IOException]) - def caseModuleBuilder(module: ModuleBuilder) { - // print module declaration - currentModule = module - if (nomembers) { - print(".module \'"); print(module.Name); println("\'") - printAttributes(module) - } - - if (!module.globalsCreated) - module.CreateGlobalFunctions() - - val m: Array[MethodInfo] = module.GetMethods() - for(i <- 0 until m.length) { - print(m(i).asInstanceOf[MethodBuilder]) - } - - val t: Array[Type] = module.GetTypes() - for(i <- 0 until t.length) { - print(t(i).asInstanceOf[TypeBuilder]) - } - currentModule = null - } - -} // class SingleFileILPrinterVisitor diff --git a/src/msil/ch/epfl/lamp/compiler/msil/emit/TypeBuilder.scala b/src/msil/ch/epfl/lamp/compiler/msil/emit/TypeBuilder.scala deleted file mode 100644 index 0b0b16da65..0000000000 --- a/src/msil/ch/epfl/lamp/compiler/msil/emit/TypeBuilder.scala +++ /dev/null @@ -1,261 +0,0 @@ -/* - * System.Reflection.Emit-like API for writing .NET assemblies to MSIL - */ - - -package ch.epfl.lamp.compiler.msil.emit - -import ch.epfl.lamp.compiler.msil._ - -import ch.epfl.lamp.compiler.msil.util.PECustomMod - -import java.io.IOException - -/** - * Defines and creates new instances of classes during runtime. - * - * @author Nikolay Mihaylov - * @version 1.0 - */ -class TypeBuilder (module: Module, attributes: Int, fullName: String, baseType: Type, interfaces: Array[Type], declType: Type) - extends Type(module, attributes, fullName, baseType, interfaces, declType, 0) - with ICustomAttributeSetter - with Visitable -{ - import TypeBuilder._ - - //########################################################################## - // public members - - /** 'Bakes' the type. */ - def CreateType(): Type = { - fields = fieldBuilders.toArray // (new Array[FieldInfo](fieldBuilders.size())).asInstanceOf[Array[FieldInfo]] - methods = methodBuilders.toArray // (new Array[MethodInfo](methodBuilders.size())).asInstanceOf[Array[MethodInfo]] - constructors = constructorBuilders.toArray // (new Array[ConstructorInfo](constructorBuilders.size())).asInstanceOf[Array[ConstructorInfo]] - nestedTypes = nestedTypeBuilders.toArray // (new Array[Type](nestedTypeBuilders.size())).asInstanceOf[Array[Type]] - - raw = false - if (DeclaringType == null) - Module.asInstanceOf[ModuleBuilder].addType(this) - return this - } - - /** - * Adds a new field to the class, with the given name, attributes and field type. The location has no custom mods. - */ - def DefineField(name: String, fieldType: Type, attrs: Short): FieldBuilder = { - val fieldTypeWithCustomMods = new PECustomMod(fieldType, null) - DefineField(name, fieldTypeWithCustomMods, attrs) - } - - /** - * Adds a new field to the class, with the given name, attributes and (field type , custom mods) combination. - */ - def DefineField(name: String, fieldTypeWithMods: PECustomMod, attrs: Short): FieldBuilder = { - val field: FieldBuilder = new FieldBuilder(name, this, attrs, fieldTypeWithMods) - fieldBuilders += field - return field - } - - /** - * Adds a new method to the class, with the given name and - * method signature. - */ - def DefineMethod(name: String, attrs: Short, returnType: Type, paramTypes: Array[Type]): MethodBuilder = { - val method = new MethodBuilder(name, this, attrs, returnType, paramTypes) - val methods = methodBuilders.iterator - while(methods.hasNext) { - val m = methods.next().asInstanceOf[MethodInfo] - if (methodsEqual(m, method)) { - throw new RuntimeException("["+ Assembly() + "] Method has already been defined: " + m) - } - } - methodBuilders += method - return method - } - - /** - * Adds a new constructor to the class, with the given attributes - * and signature. - */ - def DefineConstructor(attrs: Short, callingConvention: Short, paramTypes: Array[Type]): ConstructorBuilder = { - val constr = new ConstructorBuilder(this, attrs, paramTypes) - val iter = constructorBuilders.iterator - while(iter.hasNext) { - val c = iter.next().asInstanceOf[ConstructorInfo] - if (constructorsEqual(c, constr)) { - throw new RuntimeException("["+ Assembly() + "] Constructor has already been defined: " + c) - } - } - constructorBuilders += constr - return constr - } - - /** - * Defines a nested type given its name. - */ - def DefineNestedType(name: String, attributes: Int, baseType: Type, interfaces: Array[Type]): TypeBuilder = { - val nested = nestedTypeBuilders.iterator - while(nested.hasNext) { - val nt = nested.next - if (nt.Name.equals(name)) { - val message = "Nested type " + name + " has already been defined: " + nt - throw new RuntimeException(message) - } - } - val t = new TypeBuilder(Module, attributes, name, baseType, interfaces, this) - nestedTypeBuilders += t - return t - } - - /** Get the field with the corresponding name. */ - override def GetField(name: String): FieldInfo = { - testRaw(name) - return super.GetField(name) - } - - /** Get all fields of the current Type. */ - override def GetFields(): Array[FieldInfo] = { - testRaw("") - return super.GetFields() - } - - /** - * Searches for a public instance constructor whose parameters - * match the types in the specified array. - */ - override def GetConstructor(params: Array[Type]): ConstructorInfo = { - testRaw(".ctor" + types2String(params)) - return super.GetConstructor(params) - } - - /** - * Returns all the public constructors defined for the current Type. - */ - override def GetConstructors(): Array[ConstructorInfo] = { - testRaw("") - return super.GetConstructors() - } - - /** - * Searches for the specified public method whose parameters - * match the specified argument types. - */ - override def GetMethod(name: String, params: Array[Type]): MethodInfo = { - testRaw(name + types2String(params)) - return super.GetMethod(name, params) - } - - /** Returns all the public methods of the current Type. */ - override def GetMethods(): Array[MethodInfo] = { - testRaw("") - return super.GetMethods() - } - - /** Searches for the nested type with the specified name. */ - override def GetNestedType(name: String): Type = { - testRaw(name) - super.GetNestedType(name) - } - - /** Returns all the types nested within the current Type. */ - override def GetNestedTypes(): Array[Type] = { - testRaw("") - super.GetNestedTypes() - } - - /** Returns a Type object that represents a one-dimensional array of the current type */ - def MakeArrayType(): Type = { - Type.mkArray(this, 1) - } - - /** Sets a custom attribute. */ - def SetCustomAttribute(constr: ConstructorInfo, value: Array[Byte]) { - addCustomAttribute(constr, value) - } - - def setPosition(sourceLine: Int, sourceFilename: String) { - this.sourceLine = sourceLine - this.sourceFilename = sourceFilename - } - - def setSourceFilepath(sourceFilepath: String) { - this.sourceFilepath = sourceFilepath - } - - //########################################################################## - // protected members - - var sourceLine: Int = _ - var sourceFilename: String = _ - var sourceFilepath: String = _ - - var fieldBuilders = scala.collection.mutable.ArrayBuffer.empty[FieldBuilder] - var methodBuilders = scala.collection.mutable.ArrayBuffer.empty[MethodBuilder] - var constructorBuilders = scala.collection.mutable.ArrayBuffer.empty[ConstructorBuilder] - var nestedTypeBuilders = scala.collection.mutable.ArrayBuffer.empty[TypeBuilder] - - // shows if the type is 'raw', i.e. still subject to changes - private var raw = true - - // throws an exception if the type is 'raw', - // i.e. not finalized by call to CreateType - protected def testRaw(member: String) { - if (raw) - throw new RuntimeException("Not supported for TypeBuilder before CreateType(): " + - FullName + "::" + member) - } - - //########################################################################## - // public members not part of the Reflection.Emit.TypeBuilder interface. - - /** The apply method for a visitor. */ - @throws(classOf[IOException]) - def apply(v: Visitor) { - v.caseTypeBuilder(this) - } - - //########################################################################## - -} // class TypeBuilder - -object TypeBuilder { - def types2String(types: Array[Type]): String = { - val s = new StringBuffer("(") - for(i <- 0 until types.length) { - if (i > 0) s.append(", ") - s.append(types(i)) - } - s.append(")") - return s.toString() - } - - def methodsEqual(m1: MethodInfo, m2: MethodInfo): Boolean = { - if (!m1.Name.equals(m2.Name)) - return false - if (m1.ReturnType != m2.ReturnType) - return false - val p1 = m1.GetParameters() - val p2 = m2.GetParameters() - if (p1.length != p2.length) - return false - for(i <- 0 until p1.length) - if (p1(i).ParameterType != p2(i).ParameterType) - return false - return true - } - - def constructorsEqual(c1: ConstructorInfo, c2: ConstructorInfo): Boolean = { - if (c1.IsStatic != c2.IsStatic) - return false - val p1 = c1.GetParameters() - val p2 = c2.GetParameters() - if (p1.length != p2.length) - return false - for(i <- 0 until p1.length) - if (p1(i).ParameterType != p2(i).ParameterType) - return false - return true -} - -} diff --git a/src/msil/ch/epfl/lamp/compiler/msil/emit/Visitable.scala b/src/msil/ch/epfl/lamp/compiler/msil/emit/Visitable.scala deleted file mode 100644 index 28ec801dd4..0000000000 --- a/src/msil/ch/epfl/lamp/compiler/msil/emit/Visitable.scala +++ /dev/null @@ -1,24 +0,0 @@ -/* - * System.Reflection.Emit-like API for writing .NET assemblies to MSIL - */ - - -package ch.epfl.lamp.compiler.msil.emit - -import java.io.IOException - -/** - * The Visitable interface - */ -trait Visitable { - - //########################################################################## - - /** - * the visitable method to apply a visitor - */ - @throws(classOf[IOException]) - def apply(v: Visitor): Unit - - //########################################################################## -} diff --git a/src/msil/ch/epfl/lamp/compiler/msil/emit/Visitor.scala b/src/msil/ch/epfl/lamp/compiler/msil/emit/Visitor.scala deleted file mode 100644 index d4b84cdd4e..0000000000 --- a/src/msil/ch/epfl/lamp/compiler/msil/emit/Visitor.scala +++ /dev/null @@ -1,58 +0,0 @@ -/* - * System.Reflection.Emit-like API for writing .NET assemblies to MSIL - */ - - -package ch.epfl.lamp.compiler.msil.emit - -import java.io.IOException - -/** - * The Visitor interface to walk through the MSIL code Builder hierarchy. - */ -trait Visitor { - - //########################################################################## - - /** Visit an AssemblyBuilder */ - @throws(classOf[IOException]) - def caseAssemblyBuilder(assemblyBuilder: AssemblyBuilder): Unit - - /** Visit a ModuleBuilder */ - @throws(classOf[IOException]) - def caseModuleBuilder(moduleBuilder: ModuleBuilder): Unit - - /** Visit a TypeBuilder */ - @throws(classOf[IOException]) - def caseTypeBuilder(typeBuilder: TypeBuilder): Unit - - /** Visit a FieldBuilder */ - @throws(classOf[IOException]) - def caseFieldBuilder(fieldBuilder: FieldBuilder): Unit - - /** Visit a ConstructorBuilder */ - @throws(classOf[IOException]) - def caseConstructorBuilder(constructorBuilder: ConstructorBuilder): Unit - - /** Visit a MethodBuilder */ - @throws(classOf[IOException]) - def caseMethodBuilder(methodBuilder: MethodBuilder): Unit - - /** Visit a ParameterBuilder */ - @throws(classOf[IOException]) - def caseParameterBuilder(parameterBuilder: ParameterBuilder): Unit - - /** Visit an ILGenerator */ - @throws(classOf[IOException]) - def caseILGenerator(iLGenerator: ILGenerator): Unit - - /** Visit an OpCode */ - @throws(classOf[IOException]) - def caseOpCode(opCode: OpCode): Unit - - /** Visit a LocalBuilder */ - @throws(classOf[IOException]) - def caseLocalBuilder(localBuilder: LocalBuilder): Unit - - //########################################################################## -} diff --git a/src/msil/ch/epfl/lamp/compiler/msil/tests/CustomAttributesTest.java b/src/msil/ch/epfl/lamp/compiler/msil/tests/CustomAttributesTest.java deleted file mode 100644 index 9a6e28a545..0000000000 --- a/src/msil/ch/epfl/lamp/compiler/msil/tests/CustomAttributesTest.java +++ /dev/null @@ -1,31 +0,0 @@ - -package ch.epfl.lamp.compiler.msil.tests; - -import ch.epfl.lamp.compiler.msil.*; -import ch.epfl.lamp.compiler.msil.util.Table; - -import java.io.PrintStream; - -public class CustomAttributesTest { - public static void main(String[] args) { - if (args.length < 1) { - System.err.println("You must supply a filename!"); - System.exit(1); - } - - Assembly assem = Assembly.LoadFrom(args[0]); - Type.initMSCORLIB(assem); - - testCustomAttributes(); - } - - public static void testCustomAttributes() { - Object[] attrs = Type.GetType("System.ObsoleteAttribute") - .GetCustomAttributes(false); - assert attrs != null; - for (int i = 0; i < attrs.length; i++) { - System.out.println("\t" + attrs[i]); - } - } - -} diff --git a/src/msil/ch/epfl/lamp/compiler/msil/tests/JavaTypeTest.java b/src/msil/ch/epfl/lamp/compiler/msil/tests/JavaTypeTest.java deleted file mode 100644 index 96ec1bfeea..0000000000 --- a/src/msil/ch/epfl/lamp/compiler/msil/tests/JavaTypeTest.java +++ /dev/null @@ -1,18 +0,0 @@ - -package ch.epfl.lamp.compiler.msil.tests; - -import ch.epfl.lamp.compiler.msil.*; -import ch.epfl.lamp.compiler.msil.util.VJSAssembly; - -public class JavaTypeTest { - - public static void main(String[] args) { - if (args.length < 1) { - System.err.println("usage: java test.JavaTypeTest classname"); - System.exit(1); - } - - Type type = VJSAssembly.VJSLIB.GetType(args[0]); - MembersTest.dumpType(System.out, type); - } -} diff --git a/src/msil/ch/epfl/lamp/compiler/msil/tests/MembersTest.java b/src/msil/ch/epfl/lamp/compiler/msil/tests/MembersTest.java deleted file mode 100644 index 37a5c6ea90..0000000000 --- a/src/msil/ch/epfl/lamp/compiler/msil/tests/MembersTest.java +++ /dev/null @@ -1,100 +0,0 @@ - -package ch.epfl.lamp.compiler.msil.tests; - -import ch.epfl.lamp.compiler.msil.*; -import ch.epfl.lamp.compiler.msil.util.Table; - -import java.io.PrintStream; - -public class MembersTest { - - public static void main(String[] args) { - if (args.length < 1) { - System.err.println - ("usage: java test.MembersTest assembly [classname]"); - System.exit(1); - } - - Assembly mscorlib = Assembly.LoadFrom("mscorlib.dll"); - Type.initMSCORLIB(mscorlib); - Assembly assem = Assembly.LoadFrom(args[0]); - if (args.length > 1) { - Type type = assem.GetType(args[1]); - if (type != null) - dumpMember(System.out, type); - else System.err.println("Cannot find type " + args[1] - + " in " + assem); - } else { - Type[] types = assem.GetTypes(); - System.out.println("Number of types in assembly " + assem - + " -> " + types.length); - dumpCustomAttributes(System.out, "assembly: ", assem); - Module[] modules = assem.GetModules(); - for (int i = 0; i < modules.length; i++) { - dumpCustomAttributes(System.out, "module " + modules[i] + ": ", - modules[i]); - } - dumpMembers(System.out, types); - } - } - - public static final void dumpMember(PrintStream out, MemberInfo member) { - try { - if (member.MemberType() == MemberTypes.TypeInfo - || member.MemberType() == MemberTypes.NestedType) { - Type type = (Type)member; - dumpCustomAttributes(out, "", type); - out.print(TypeAttributes.accessModsToString(type.Attributes)); - out.print(type.IsInterface() ? " interface " : " class "); - out.print(type); - if (type.BaseType() != null) - out.println(" extends " + type.BaseType()); - Type[] ifaces = type.GetInterfaces(); - if (ifaces.length > 0) { - out.print("\timplements "); - for (int i = 0; i < ifaces.length; i++) { - out.print(ifaces[i]); - if (i < (ifaces.length - 1)) - out.print(", "); - } - out.println(); - } - out.println("{"); - int all = BindingFlags.Public | BindingFlags.DeclaredOnly// | BindingFlags.NonPublic - | BindingFlags.Instance | BindingFlags.Static; - dumpMembers(out, type.GetNestedTypes()); - dumpMembers(out, type.GetFields(all)); - dumpMembers(out, type.GetConstructors(all)); - dumpMembers(out, type.GetMethods(all)); - dumpMembers(out, type.GetProperties(all)); - dumpMembers(out, type.GetEvents()); - out.println("}"); - } else { - dumpCustomAttributes(out, "", member); - out.print(MemberTypes.toString(member.MemberType())); - out.print(": "); out.print(member); - out.println(); - } - } catch (Throwable e) { - String message = MemberTypes.toString(member.MemberType()) - + ": " + member; - throw new RuntimeException(message, e); - } - } - - public static void dumpCustomAttributes(PrintStream out, - String prefix, - ICustomAttributeProvider att) - { - Object[] attrs = att.GetCustomAttributes(false); - for (int j = 0; j < attrs.length; j++) - out.println(prefix + attrs[j]); - } - - public static void dumpMembers(PrintStream out, MemberInfo[] members) { - for (int i = 0; i < members.length; i++) { - dumpMember(out, members[i]); - } - } - -} diff --git a/src/msil/ch/epfl/lamp/compiler/msil/tests/TableDump.java b/src/msil/ch/epfl/lamp/compiler/msil/tests/TableDump.java deleted file mode 100644 index 1df389b011..0000000000 --- a/src/msil/ch/epfl/lamp/compiler/msil/tests/TableDump.java +++ /dev/null @@ -1,311 +0,0 @@ - -package ch.epfl.lamp.compiler.msil.tests; - -import ch.epfl.lamp.compiler.msil.PEFile; -import ch.epfl.lamp.compiler.msil.util.Table; -import ch.epfl.lamp.compiler.msil.util.Table.*; - -import java.io.PrintStream; -import java.io.FileNotFoundException; - -public class TableDump extends PEFile { - - //########################################################################## - - public TableDump(String filename) throws FileNotFoundException { - super(filename); - } - - /***/ - public void dump(PrintStream out) { - out.println("CLI RVA: " + CLI_RVA); - out.println("Optional header size: " + optHeaderSize); - out.println("Number of sections: " + numOfSections); - out.println(); - - for (int i = 0; i < sections.length; i++) { - sections[i].dump(out); - out.println(); - } - - out.println("MetaData Offset: 0x" + Integer.toHexString(posMetadata)); - out.println("Number of streams: " + numOfStreams); - - out.println("#~ stream"); Meta.dump(out); out.println(); - out.println("#Strings stream"); Strings.dump(out); out.println(); - if (US != null) { - out.println("#US stream"); US.dump(out); out.println(); - } - out.println("#GUID stream"); GUID.dump(out); out.println(); - out.println("#Blob stream"); Blob.dump(out); out.println(); - - out.println("Heap Sizes IndexedSeq = 0x0" + Integer.toHexString(heapSizes)); - out.println(); - - for(int i = 0; i < Table.MAX_NUMBER; i++) - if(getTable(i).rows > 0) { - dump(out, getTable(i)); - out.println(); - } - - } - - /** Dumps the contents of this table. */ - public void dump(PrintStream out, Table table) { - out.println("Table:" + " ID = 0x" + byte2hex(table.id)); - out.println("\tname = " + table.getTableName()); - out.println("\trows = " + table.rows); - //out.println("\tStart pos in file = 0x" + Long.toHexString(table.start)); - for (int i = 1; i <= table.rows; i++) - dumpRow(out, table, i); - } - - public void dumpIndex(PrintStream out, int tableSetId, int index) { - int tableId = Table.getTableId(tableSetId, index); - int row = Table.getTableIndex(tableSetId, index); - out.print(getTable(tableId).getTableName()); - out.print('['); - out.print(getTable(tableId).isShort ? short2hex(row) : int2hex(row)); - out.print(']'); - } - - public void dumpRow(PrintStream out, Table table, int row) { - table.readRow(row); - out.print(table.getTableName()); - out.print("[" + short2hex(row) + "]: "); - dumpRow(out, table); - out.println(); - } - - /** Prints the current content of the fields of the class. */ - public void dumpRow(PrintStream out, Table table) { - if (table instanceof ModuleDef) { - ModuleDef t = (ModuleDef)table; - out.print("Generation = 0x" + short2hex(t.Generation)); - out.print("; Name = " + getString(t.Name)); - //out.print("; Mvid = (" + bytes2hex(getGUID(Mvid)) + ")"); - } else if (table instanceof TypeRef) { - TypeRef t = (TypeRef)table; - out.print("FullName = " + t.getFullName()); - out.print("; ResolutionScope = 0x" + int2hex(t.ResolutionScope)); - } else if (table instanceof TypeDef) { - TypeDef t = (TypeDef)table; - out.print("Flags = 0x"); out.print(int2hex(t.Flags)); - out.print("; FullName = "); out.print(t.getFullName()); - out.print("; Extends = "); - dumpIndex(out, Table._TypeDefOrRef, t.Extends); - out.print("; FieldList = "); out.print(t.FieldList); - out.print("; MethodList = "); out.print(t.MethodList); - } else if (table instanceof FieldTrans) { - FieldTrans t = (FieldTrans)table; - out.print("Field = "); out.print(t.Field); - } else if (table instanceof FieldDef) { - FieldDef t = (FieldDef)table; - out.print("Flags = 0x" + short2hex(t.Flags)); - out.print("; Name = " + t.getName()); - out.print("; Signature = (" + - bytes2hex(getBlob(t.Signature)) + ")"); - } else if (table instanceof MethodTrans) { - MethodTrans t = (MethodTrans)table; - out.print("Method = "); out.print(t.Method); - } else if (table instanceof MethodDef) { - MethodDef t = (MethodDef)table; - out.print("Flags = 0x" + short2hex(t.Flags)); - out.print("; Name = " + t.getName()); - out.print("; ParamList = " + t.ParamList); - out.print("; Signature = (" + - bytes2hex(getBlob(t.Signature)) + ")"); - } else if (table instanceof ParamDef) { - ParamDef t = (ParamDef)table; - out.print("Flags = 0x" + short2hex(t.Flags)); - out.print("; Name = " + t.getName()); - out.print("; Sequence = " + t.Sequence); - } else if (table instanceof InterfaceImpl) { - InterfaceImpl t = (InterfaceImpl)table; - out.print("Class = 0x" + short2hex(t.Class));// + " (ref to: "); - //TypeDef td = (TypeDef) getTable(TypeDef.ID); - //td.readRow(Class); - //td.dumpRow(out); - out.print("; Interface = 0x" + short2hex(t.Interface)); - } else if (table instanceof MemberRef) { - MemberRef t = (MemberRef)table; - out.print("Name = " + t.getName()); - out.print("; Signature = (" + - bytes2hex(getBlob(t.Signature)) + ")"); - out.print("; Class = " + t.Class); - } else if (table instanceof Constant) { - Constant t = (Constant)table; - out.print("Parent = "); dumpIndex(out, Table._HasConstant, t.Parent); - out.print("; Type = 0x" + byte2hex(t.Type)); - out.print("; Value = (" + bytes2hex(getBlob(t.Value))); - out.print("); Value = " + t.getValue()); - } else if (table instanceof CustomAttribute) { - CustomAttribute t = (CustomAttribute)table; - //out.print("Parent = 0x" + int2hex(t.Parent)); - out.print("Parent = "); - dumpIndex(out, Table._HasCustomAttribute, t.Parent); - //out.print("; Type = 0x" + short2hex(t.Type)); - out.print("; Type = "); - dumpIndex(out, Table._CustomAttributeType, t.Type); - out.print("; Value = (" + bytes2hex(t.getValue()) + ")"); - } else if (table instanceof FieldMarshal) { - FieldMarshal t = (FieldMarshal)table; - out.print("NativeType = ("); - out.print(bytes2hex(getBlob(t.NativeType)) + ")"); - } else if (table instanceof DeclSecurity) { - DeclSecurity t = (DeclSecurity)table; - out.print("Action = 0x" + short2hex(t.Action)); - out.print("; PermissionSet = (" + - bytes2hex(getBlob(t.PermissionSet)) + ")"); - } else if (table instanceof ClassLayout) { - ClassLayout t = (ClassLayout)table; - out.print("PackingSize = 0x" + short2hex(t.PackingSize)); - out.print("; ClassSize = 0x" + int2hex(t.ClassSize)); - out.print(": Parent = " + t.Parent + " (ref to: "); - dumpRow(out, this.TypeDef(t.Parent)); - out.print(")"); - } else if (table instanceof FieldLayout) { - FieldLayout t = (FieldLayout)table; - out.print("Offset = 0x" + int2hex(t.Offset)); - out.print("; Field = (ref to: "); - dumpRow(out, this.FieldDef(t.Field)); - out.print(")"); - } else if (table instanceof StandAloneSig) { - StandAloneSig t = (StandAloneSig)table; - out.print("StandAloneSig: Signature = (" + - bytes2hex(getBlob(t.Signature)) + ")"); - } else if (table instanceof EventMap) { - EventMap t = (EventMap)table; - out.print("Parent = 0x" + int2hex(t.Parent) + " (ref to: "); - dumpRow(out, this.TypeDef(t.Parent)); - out.print("); EventList = 0x"); out.print(int2hex(t.EventList)); - } else if (table instanceof EventDef) { - EventDef t = (EventDef)table; - out.print("EventFlags = 0x" + short2hex(t.EventFlags)); - out.print("; Name = " + t.getName()); - out.print("; EventType = 0x" + int2hex(t.EventType)); - } else if (table instanceof PropertyMap) { - PropertyMap t = (PropertyMap)table; - out.print("Parent = " + t.Parent + " (ref to: "); - dumpRow(out, this.TypeDef(t.Parent)); - out.print(")"); - } else if (table instanceof PropertyDef) { - PropertyDef t = (PropertyDef)table; - out.print("Flags = 0x" + short2hex(t.Flags)); - out.print("; Name = " + t.getName()); - out.print("; Type = (" + bytes2hex(getBlob(t.Type)) + ")"); - } else if (table instanceof MethodSemantics) { - MethodSemantics t = (MethodSemantics)table; - out.print("Semantics = 0x" + short2hex(t.Semantics)); - out.print("; Method = 0x" + int2hex(t.Method) + " (ref to: "); - dumpRow(out, this.MethodDef(t.Method)); - out.print("); Association = 0x" + int2hex(t.Association)); - } else if (table instanceof MethodImpl) { - MethodImpl t = (MethodImpl)table; - out.print("Class = (ref to: "); - dumpRow(out, this.TypeDef(t.Class)); - out.print(")"); - } else if (table instanceof ModuleRef) { - ModuleRef t = (ModuleRef)table; - out.print("Name = " + t.getName()); - } else if (table instanceof TypeSpec) { - TypeSpec t = (TypeSpec)table; - out.print("Signature = (" + - bytes2hex(getBlob(t.Signature)) + ")"); - } else if (table instanceof ImplMap) { - ImplMap t = (ImplMap)table; - out.print("ImportName = " + getString(t.ImportName)); - } else if (table instanceof FieldRVA) { - FieldRVA t = (FieldRVA)table; - out.print("RVA = 0x" + int2hex(t.RVA)); - out.print("; Field = (ref to: "); - dumpRow(out, this.FieldDef(t.Field)); - out.print(")"); - } else if (table instanceof AssemblyDef) { - AssemblyDef t = (AssemblyDef)table; - out.print("Flags = 0x" + int2hex(t.Flags)); - out.print(" ; Name = " + getString(t.Name)); - out.print("; Culture = " + getString(t.Culture)); - out.print(" ; Version = " + t.MajorVersion + "."); - out.print(t.MinorVersion + "." + t.BuildNumber); - out.print("." + t.RevisionNumber); - out.print("; HashAlgId = 0x" + int2hex(t.HashAlgId)); - out.print("; PublicKey = ("); - out.print(bytes2hex(getBlob(t.PublicKey)) + ")"); - } else if (table instanceof AssemblyProcessor) { - AssemblyProcessor t = (AssemblyProcessor)table; - out.print("Processor = 0x" + int2hex(t.Processor)); - } else if (table instanceof AssemblyOS) { - AssemblyOS t = (AssemblyOS)table; - out.print("!?!"); - } else if (table instanceof AssemblyRef) { - AssemblyRef t = (AssemblyRef)table; - out.print("Flags = 0x" + int2hex(t.Flags)); - out.print("; Name = " + getString(t.Name)); - out.print("; Culture = " + getString(t.Culture)); - out.print("; Version = " + t.MajorVersion + "." + t.MinorVersion); - out.print("." + t.BuildNumber + "." + t.RevisionNumber); - out.print("; PublicKeyOrToken = (" + - bytes2hex(getBlob(t.PublicKeyOrToken)) + ")"); - out.print("; HashValue = (" + - bytes2hex(getBlob(t.HashValue)) + ")"); - } else if (table instanceof AssemblyRefProcessor) { - AssemblyRefProcessor t = (AssemblyRefProcessor)table; - out.print("!?!"); - } else if (table instanceof AssemblyRefOS) { - AssemblyRefOS t = (AssemblyRefOS)table; - out.print("!?!"); - } else if (table instanceof FileDef) { - FileDef t = (FileDef)table; - out.print("Flags = 0x" + int2hex(t.Flags)); - out.print("; Name = " + t.getName()); - out.print("; HashValue = (" + bytes2hex(getBlob(t.HashValue)) +")"); - } else if (table instanceof ExportedType) { - ExportedType t = (ExportedType)table; - out.print("FullName = " + t.getFullName()); - } else if (table instanceof ManifestResource) { - ManifestResource t = (ManifestResource)table; - out.print("Name = " + getString(t.Name)); - out.print("; Flags = 0x" + int2hex(t.Flags)); - } else if (table instanceof NestedClass) { - NestedClass t = (NestedClass)table; - out.print(this.TypeDef(t.EnclosingClass).getFullName()); - out.print("/"); - out.print(this.TypeDef(t.NestedClass).getFullName()); - } else - throw new RuntimeException("Unknown table " + table.getClass()); - } - - //########################################################################## - - public static void main(String[] args) { - if (args.length < 1) { - System.err.println("You must supply a filename!"); - System.exit(1); - } - - TableDump file = null; - try { - file = new TableDump(args[0]); - } catch (FileNotFoundException e) { e.printStackTrace(); } - - if (args.length > 1) { - nextarg: - for (int i = 1; i < args.length; i++) { - String name = args[i]; - for (int tableId = 0; tableId < Table.MAX_NUMBER; tableId++) { - Table table = file.getTable(tableId); - if ((table.rows > 0) && name.equals(table.getTableName())) { - file.dump(System.out, table); - System.out.println(); - continue nextarg; - } - } - System.err.println("No such table: " + name); - } - } else - file.dump(System.out); - } - - //########################################################################## -} diff --git a/src/msil/ch/epfl/lamp/compiler/msil/tests/Test.java b/src/msil/ch/epfl/lamp/compiler/msil/tests/Test.java deleted file mode 100644 index 2c5946a734..0000000000 --- a/src/msil/ch/epfl/lamp/compiler/msil/tests/Test.java +++ /dev/null @@ -1,92 +0,0 @@ - -package test; - -import ch.epfl.lamp.compiler.msil.*; -import ch.epfl.lamp.compiler.msil.util.Table; - -import java.io.PrintStream; - -public class Test { - public static void main(String[] args) { - if (args.length < 1) { - System.err.println("You must supply a filename!"); - System.exit(1); - } - - Assembly assem = Assembly.LoadFrom(args[0]); - Type.initMSCORLIB(assem); - - //"System.Collections.ArrayList" - if (args.length >= 2) { - Type t = Type.GetType(args[1]); - dumpType(System.out, t); - } else { - dumpAssembly(assem); - } - } - - - public static void dumpAssembly(Assembly assem) { - Module[] modules = assem.GetModules(); -// System.out.println("Modules in assembly " + assem + -// " (" + modules.length + ")"); -// for (int i = 0; i < modules.length; i++) { -// System.out.println("\t" + modules[i]); -// } - - Type[] types = modules[0].GetTypes(); -// System.out.println("Types in assembly " + assem + -// " (" + types.length + ")"); - for (int i = 0; i < types.length; i++) { - System.out.println("#" + i + " -> " + types[i]); - types[i].completeType(); - } - } - - public static final void dumpType(PrintStream out, Type type) { - out.println("Type = " + type); - out.println("Name = " + type.Name); - out.println("Namespace = " + type.Namespace); - out.println("FullName = " + type.FullName); - out.println("Attributes = " + TypeAttributes.toString(type.Attributes)); - out.println("BaseType = " + type.BaseType); - Type[] ifaces = type.GetInterfaces(); - if (ifaces != null) { - for (int i = 0; i < ifaces.length; i++) - out.println("\timplements " + ifaces[i]); - } - out.println("Assembly = " + type.Assembly); - out.println("Module = " + type.Module); - out.println("DeclaringType = " + type.DeclaringType); - out.println("IsInterface = " + type.IsInterface); - out.println("IsAbstract = " + type.IsAbstract); - - FieldInfo[] fields = type.GetFields(BindingFlags.Instance - | BindingFlags.Static - | BindingFlags.NonPublic); - out.println("\nFields (" + fields.length + "):"); - for (int i = 0; i < fields.length; i++) { - out.println("\t" + fields[i]); - out.println("\t\tDeclaringType = " + fields[i].DeclaringType); - out.println("\t\tReflectedType = " + fields[i].ReflectedType); - } - - ConstructorInfo[] constrs = type.GetConstructors(); - out.println("\nConstructors (" + constrs.length + "):"); - for (int i = 0; i < constrs.length; i++) { - out.println("\t" + constrs[i]); - } - -// MethodInfo[] methods = type.GetMethods(BindingFlags.Instance -// | BindingFlags.Static -// | BindingFlags.Public -// | BindingFlags.NonPublic); - MethodInfo[] methods = type.GetMethods(); - out.println("\nMethods (" + methods.length + "):"); - for (int i = 0; i < methods.length; i++) { - out.println("\t" + methods[i]); - out.println("\t\tDeclaringType = " + methods[i].DeclaringType); - out.println("\t\tReflectedType = " + methods[i].ReflectedType); - } - } -} diff --git a/src/msil/ch/epfl/lamp/compiler/msil/util/PECustomMod.java b/src/msil/ch/epfl/lamp/compiler/msil/util/PECustomMod.java deleted file mode 100644 index 56519e8487..0000000000 --- a/src/msil/ch/epfl/lamp/compiler/msil/util/PECustomMod.java +++ /dev/null @@ -1,23 +0,0 @@ -package ch.epfl.lamp.compiler.msil.util; - -import ch.epfl.lamp.compiler.msil.Type; -import ch.epfl.lamp.compiler.msil.CustomModifier; - -/** - * A PECustomMod holds the info parsed from metadata per the CustomMod production in Sec. 23.2.7, Partition II. - * */ -public final class PECustomMod { - - public final Type marked; - public final CustomModifier[] cmods; - - /** Terminology: - the CustomModifier(s) are markers, - and the msil.Type is a type marked by those markers. */ - public PECustomMod(Type marked, CustomModifier[] cmods) { - this.marked = marked; - this.cmods = cmods; - } - -} - diff --git a/src/msil/ch/epfl/lamp/compiler/msil/util/PESection.java b/src/msil/ch/epfl/lamp/compiler/msil/util/PESection.java deleted file mode 100644 index 454a94e55c..0000000000 --- a/src/msil/ch/epfl/lamp/compiler/msil/util/PESection.java +++ /dev/null @@ -1,57 +0,0 @@ -/* - * System.Reflection-like API for acces to .NET assemblies (DLL & EXE) - */ - - -package ch.epfl.lamp.compiler.msil.util; - -import ch.epfl.lamp.compiler.msil.PEFile; - -import java.io.PrintStream; - -/** Describes a section from a PE/COFF file - * - * @author Nikolay Mihaylov - * @version 1.0 - */ -public final class PESection { - - private final PEFile file; - private final long sectionStart; - - public final String name; - public final int virtAddr; - public final int virtSize; - public final int realAddr; - public final int realSize; - public final int flags; - - private static final byte[] buf = new byte[8]; - - public PESection(PEFile file) { - this.file = file; - sectionStart = file.pos(); - file.read(buf); - int i; - for(i = 7; (i >= 0) && (0 == buf[i]); i--); - name = new String(buf, 0, i + 1); - virtSize = file.readInt(); - virtAddr = file.readInt(); - realSize = file.readInt(); - realAddr = file.readInt(); - file.skip(3 * PEFile.INT_SIZE); - flags = file.readInt(); - } - - - public void dump(PrintStream out) { - out.println("Section name: " + name + - " (name.length=" + name.length() + ")"); - out.println("Virtual Address: 0x" + PEFile.int2hex(virtAddr)); - out.println("Virtual Size: 0x" + PEFile.int2hex(virtSize)); - out.println("Real Address: 0x" + PEFile.int2hex(realAddr)); - out.println("Real Size: 0x" + PEFile.int2hex(realSize)); - out.println("Flags: 0x" + PEFile.int2hex(flags)); - } - -} // class PESection diff --git a/src/msil/ch/epfl/lamp/compiler/msil/util/PEStream.java b/src/msil/ch/epfl/lamp/compiler/msil/util/PEStream.java deleted file mode 100644 index 649d9e74f2..0000000000 --- a/src/msil/ch/epfl/lamp/compiler/msil/util/PEStream.java +++ /dev/null @@ -1,199 +0,0 @@ -/* - * System.Reflection-like API for acces to .NET assemblies (DLL & EXE) - */ - - -package ch.epfl.lamp.compiler.msil.util; - -import ch.epfl.lamp.compiler.msil.PEFile; -import ch.epfl.lamp.compiler.msil.PEFile.Sig; - -import java.io.PrintStream; -import java.io.IOException; - -import java.nio.ByteBuffer; -import java.nio.channels.FileChannel; - -/** - * Implements support for CLI streams within a PE file. - * - * @author Nikolay Mihaylov - * @version 1.0 - */ -public final class PEStream implements Signature { - - //########################################################################## - // Members - - /** The name of the stream. */ - public final String name; - - /** The offset of the stream from the beginning of the file. */ - public final int offset; - - /** The size of the stream in bytes; shall be multiple of 4. */ - public final int size; - - private final PEFile file; - - private final ByteBuffer buffer; - - //########################################################################## - - /** The PEStream class constructor. - * @param file - the PEFile to which this stream belongs - */ - public PEStream(PEFile file) { - this.file = file; - offset = file.fromRVA(file.rvaMetadata + file.readInt()); - size = file.readInt(); - buffer = file.getBuffer(offset, size); - - int i = 0; - byte [] _buf = new byte [16]; - do { - _buf[i] = (byte) file.readByte(); - i++; - } while(0 != _buf[i-1]); - name = new String(_buf, 0, i - 1); - - file.align(PEFile.INT_SIZE, file.posMetadata); - //assert size % 4 == 0; - } - - /** Move to the specified position in the stream. */ - private void seek(int pos) { - try { - buffer.position(pos); - } catch (IllegalArgumentException e) { - System.err.println("\nSeek failed in file " + file - + " for position " + pos - + " of stream " + name + " (" + buffer + ")"); - throw e; - } - } - - /** Return a string from the specified position in the stream. */ - public String getString(int pos) { - seek(pos); - buffer.mark(); - int i; - for (i = 0; getByte() != 0; i++); - byte[] buf = new byte[i]; - buffer.reset(); // go back to the marked position - buffer.get(buf); - try { - return new String(buf, "UTF-8"); - } catch (java.io.UnsupportedEncodingException e) { - throw new RuntimeException(e); - } - } - - /** Read a byte from the stream. */ - public int getByte() { - return (buffer.get() + 0x0100) & 0xff; - } - - /** Return the GUID at the given position in the stream. */ - public byte[] getGUID(int pos) { - seek(pos); - byte[] buf = new byte[32]; // 128-bit GUID - try { - buffer.get(buf); - } catch (Exception e) { - System.err.println(); - System.err.println("PEStream.getBlob(): Exception for pos = " + - pos + " and buf.length = " + buf.length); - System.err.println("\tbuffer = " + buffer); - e.printStackTrace(); - throw new RuntimeException(); - } - return buf; - } - - public int readLength() { - int length = getByte(); - if ((length & 0x80) != 0) { - length = ((length & 0x7f) << 8) | getByte(); - if ((length & 0x4000) != 0) - length = ((length & 0x3fff) << 16) | (getByte()<<8) | getByte(); - } - return length; - } - - /** Return a blob from the specified position in the stream. */ - public byte[] getBlob(int pos) { - seek(pos); - // the length indicates the number of bytes - // AFTER the encoded size of the blob - int length = readLength(); - byte[] buf = new byte[length]; - buffer.get(buf); - return buf; - } - - /***/ - public Sig getSignature(int pos) { - seek(pos); - return file.newSignature(buffer); - } - - /** - */ - public Object getConstant(int type, int pos) { - Object val = null; - seek(pos); - int length = readLength(); // skip over the blob length field - switch (type) { - case ELEMENT_TYPE_BOOLEAN: - assert length == 1; - return buffer.get() == 0 ? Boolean.FALSE : Boolean.TRUE; - case ELEMENT_TYPE_CHAR: - assert length == 2 : "length == " + length; - return new Character(buffer.getChar()); - case ELEMENT_TYPE_I1: - case ELEMENT_TYPE_U1: // TODO U1 not the same as I1 - assert length == 1; - return new Byte(buffer.get()); - case ELEMENT_TYPE_I2: - case ELEMENT_TYPE_U2: - assert length == 2; - return new Short(buffer.getShort()); - case ELEMENT_TYPE_I4: - case ELEMENT_TYPE_U4: - assert length == 4; - return new Integer(buffer.getInt()); - case ELEMENT_TYPE_I8: - case ELEMENT_TYPE_U8: - assert length == 8; - return new Long(buffer.getLong()); - case ELEMENT_TYPE_R4: - assert length == 4; - return new Float(buffer.getFloat()); - case ELEMENT_TYPE_R8: - assert length == 8; - return new Double(buffer.getDouble()); - case ELEMENT_TYPE_STRING: -// length /= 2; -// char[] chars = new char[length]; -// for (int i = 0; i < length; i++) -// chars[i] = buffer.getChar(); -// val = new String(chars); - try { - return new String(getBlob(pos), "UTF-16LE"); - } catch(java.io.UnsupportedEncodingException e) { - throw new RuntimeException(e); - } - default: throw new RuntimeException("Illegal constant type: " + type); - } - } - - public void dump(PrintStream out) { - out.println("Stream name: " + name + " (length " + - name.length() + " characters)"); - out.println("Stream offset: 0x" + PEFile.int2hex(offset)); - out.println("Stream size: 0x" + PEFile.int2hex(size)); - } - - //########################################################################## -} // class PEStream diff --git a/src/msil/ch/epfl/lamp/compiler/msil/util/Signature.java b/src/msil/ch/epfl/lamp/compiler/msil/util/Signature.java deleted file mode 100644 index d5dc0ff32c..0000000000 --- a/src/msil/ch/epfl/lamp/compiler/msil/util/Signature.java +++ /dev/null @@ -1,129 +0,0 @@ -/* - * System.Reflection-like API for acces to .NET assemblies (DLL & EXE) - */ - - -package ch.epfl.lamp.compiler.msil.util; - -import ch.epfl.lamp.compiler.msil.Type; - -/** - * Signatures - * - * @author Nikolay Mihaylov - * @version 1.0 - */ -public interface Signature { - - //########################################################################## - - /** Marks end of a list. */ - public static final int ELEMENT_TYPE_END = 0x00; - /** void */ - public static final int ELEMENT_TYPE_VOID = 0x01; - /** boolean */ - public static final int ELEMENT_TYPE_BOOLEAN = 0x02; - /** char */ - public static final int ELEMENT_TYPE_CHAR = 0x03; - /** signed byte */ - public static final int ELEMENT_TYPE_I1 = 0x04; - /** byte */ - public static final int ELEMENT_TYPE_U1 = 0x05; - /** short */ - public static final int ELEMENT_TYPE_I2 = 0x06; - /** unsigned short */ - public static final int ELEMENT_TYPE_U2 = 0x07; - /** int */ - public static final int ELEMENT_TYPE_I4 = 0x08; - /** unsigned int */ - public static final int ELEMENT_TYPE_U4 = 0x09; - /** long */ - public static final int ELEMENT_TYPE_I8 = 0x0a; - /** unsigned long */ - public static final int ELEMENT_TYPE_U8 = 0x0b; - /** float */ - public static final int ELEMENT_TYPE_R4 = 0x0c; - /** double */ - public static final int ELEMENT_TYPE_R8 = 0x0d; - /** string */ - public static final int ELEMENT_TYPE_STRING = 0x0e; - /** Followed by token. */ - public static final int ELEMENT_TYPE_PTR = 0x0f; - /** Followed by token. */ - public static final int ELEMENT_TYPE_BYREF = 0x10; - /** Followed by token */ - public static final int ELEMENT_TYPE_VALUETYPE = 0x11; - /** Followed by token */ - public static final int ELEMENT_TYPE_CLASS = 0x12; - - public static final int ELEMENT_TYPE_VAR = 0x13; - - /** - * ... ... - */ - public static final int ELEMENT_TYPE_ARRAY = 0x14; - - public static final int ELEMENT_TYPE_GENERICINST = 0x15; - /***/ - public static final int ELEMENT_TYPE_TYPEDBYREF = 0x16; - /** System.IntPtr */ - public static final int ELEMENT_TYPE_I = 0x18; - /** System.UIntPtr */ - public static final int ELEMENT_TYPE_U = 0x19; - /** Followed by full method signature. */ - public static final int ELEMENT_TYPE_FNPTR = 0x1b; - /** System.Object. */ - public static final int ELEMENT_TYPE_OBJECT = 0x1c; - /** Single-dim array with 0 lower bound. */ - public static final int ELEMENT_TYPE_SZARRAY = 0x1d; - - public static final int ELEMENT_TYPE_MVAR = 0x1e; - - /** Required modifier : followed by a TypeDef or TypeRef token. */ - public static final int ELEMENT_TYPE_CMOD_REQD = 0x1f; - /** Optional modifier : followed by a TypeDef or TypeRef token. */ - public static final int ELEMENT_TYPE_CMOD_OPT = 0x20; - /** Implemented within the CLI. */ - public static final int ELEMENT_TYPE_INTERNAL = 0x21; - /** Or'd with following element types. */ - public static final int ELEMENT_TYPE_MODIFIER = 0x40; - /** Sentinel for varargs method signature. */ - public static final int ELEMENT_TYPE_SENTINEL = 0x41; - /**Denotes a local variable that points at a pinned object. */ - public static final int ELEMENT_TYPE_PINNED = 0x45; - - //########################################################################## - // signature designators - - public static final int HASTHIS = 0x20; - public static final int EXPLICITTHIS = 0x40; - public static final int DEFAULT = 0x00; - public static final int VARARG = 0x05; - public static final int GENERIC = 0x10; - public static final int SENTINEL = 0x41; - public static final int C = 0x01; - public static final int STDCALL = 0x02; - public static final int THISCALL = 0x03; - public static final int FASTCALL = 0x04; - public static final int FIELD = 0x06; - public static final int PROPERTY = 0x08; - public static final int LOCAL_SIG = 0x07; - - //########################################################################## - // extra IDs used in the serialization format of named arguments - // to custom attributes. Reverse-engineered from compiled C# example - - /** What follows is a string with the full name of the type. */ - public static final int X_ELEMENT_TYPE_TYPE = 0x50; - - /** What follows is a string with the full name of the enumeration type*/ - public static final int X_ELEMENT_TYPE_ENUM = 0x55; - - /** The named argument specifies a field. */ - public static final int X_ELEMENT_KIND_FIELD = 0x53; - - /** The named argument specifies a property. */ - public static final int X_ELEMENT_KIND_PROPERTY = 0x54; - - //########################################################################## -} // interface Signature diff --git a/src/msil/ch/epfl/lamp/compiler/msil/util/Table.java b/src/msil/ch/epfl/lamp/compiler/msil/util/Table.java deleted file mode 100644 index 1f43b8c2fa..0000000000 --- a/src/msil/ch/epfl/lamp/compiler/msil/util/Table.java +++ /dev/null @@ -1,1859 +0,0 @@ -/* - * System.Reflection-like API for acces to .NET Assemblies - */ - - -package ch.epfl.lamp.compiler.msil.util; - -import ch.epfl.lamp.compiler.msil.PEFile; -import ch.epfl.lamp.compiler.msil.PEFile.Sig; - -import java.io.PrintStream; -import java.nio.ByteBuffer; -import java.nio.MappedByteBuffer; - -/** - * Represents a table in a .NET assembly - * - * @author Nikolay Mihaylov - * @version 1.0 - */ -public abstract class Table { - - //########################################################################## - - public static final int MAX_NUMBER = 64; - - public static final long VALID_TABLES_MASK = 0x03ff3fb7ff57L; - - //########################################################################## - // fields and methods for handling predefined sets of tables - - public static final int TABLE_SET_LENGTH = 13; - - public static final int _TypeDefOrRef = 0; - public static final int _HasConstant = 1; - public static final int _HasCustomAttribute = 2; - public static final int _HasFieldMarshal = 3; - public static final int _HasDeclSecurity = 4; - public static final int _MemberRefParent = 5; - public static final int _HasSemantics = 6; - public static final int _MethodDefOrRef = 7; - public static final int _MemberForwarded = 8; - public static final int _Implementation = 9; - public static final int _CustomAttributeType = 10; - public static final int _ResolutionScope = 11; - public static final int _TypeOrMethodDef = 12; - - - public static final int[][] TableSet = new int[TABLE_SET_LENGTH][]; - - static { - TableSet[_TypeDefOrRef] = - new int[] {TypeDef.ID, TypeRef.ID, TypeSpec.ID}; - TableSet[_HasConstant] = - new int[] {FieldDef.ID, ParamDef.ID, PropertyDef.ID}; - TableSet[_HasCustomAttribute] = - new int[] {MethodDef.ID, FieldDef.ID, TypeRef.ID, TypeDef.ID, - ParamDef.ID, InterfaceImpl.ID, MemberRef.ID, ModuleDef.ID, - -1, PropertyDef.ID, EventDef.ID, -1, ModuleRef.ID, - TypeSpec.ID, AssemblyDef.ID, AssemblyRef.ID, - FileDef.ID, ExportedType.ID, ManifestResource.ID}; - TableSet[_HasFieldMarshal] = - new int[] {FieldDef.ID, ParamDef.ID}; - TableSet[_HasDeclSecurity] = - new int[] {TypeDef.ID, MethodDef.ID, AssemblyDef.ID}; - TableSet[_MemberRefParent] = - new int[] {-1, TypeRef.ID, ModuleRef.ID, MethodDef.ID, TypeSpec.ID}; - TableSet[_HasSemantics] = - new int[] {EventDef.ID, PropertyDef.ID}; - TableSet[_MethodDefOrRef] = - new int[] {MethodDef.ID, MemberRef.ID}; - TableSet[_MemberForwarded] = - new int[] {FieldDef.ID, MethodDef.ID}; - TableSet[_Implementation] = - new int[] {FileDef.ID, AssemblyRef.ID, ExportedType.ID}; - TableSet[_CustomAttributeType] = - new int[] {-1, -1, MethodDef.ID, MemberRef.ID, -1}; - TableSet[_ResolutionScope] = - new int[] {ModuleDef.ID, ModuleRef.ID, AssemblyRef.ID, TypeRef.ID}; - TableSet[_TypeOrMethodDef] = - new int[]{TypeDef.ID, MethodDef.ID}; - } - - public static final int[] NoBits = - new int[]{2, 2, 5, 1, 2, 3, 1, 1, 1, 2, 3, 2, 1}; - - public static int getMask(int tableSetId) { - return (1 << NoBits[tableSetId]) - 1; - } - - public static int getTableId(int tableSet, int index) { - return TableSet[tableSet][index & getMask(tableSet)]; - } - - public static int getTableIndex(int tableSet, int index) { - return index >> NoBits[tableSet]; - } - - public static int encodeIndex(int index, int tableSetId, int tableId) { - int[] tableSet = TableSet[tableSetId]; - for (int i = 0; i < tableSet.length; i++) { - if (tableSet[i] == tableId) - return (index << NoBits[tableSetId]) | i; - } - throw new RuntimeException("Cannot find table #" + tableId + - " in table set #" + tableSetId); - } - - //########################################################################## - - private static final String [] tableName = { - "Module", "TypeRef", "TypeDef", " FieldTrans", - "Field", "MethodTrans", "Method", "", - "Param", "InterfaceImpl", "MemberRef", "Constant", - "CustomAttribute", "FieldMarshal", "DeclSecurity","ClassLayout", - "FieldLayout", "StandAloneSig", "EventMap", "", - "Event", "PropertyMap", "", "Property", - "MethodSemantics", "MethodImpl", "ModuleRef", "TypeSpec", - "ImplMap", "FieldRVA", "", "", - "Assembly", "AssemblyProcessor","AssemblyOS", "AssemblyRef", - "AssemblyRefProcessor","AssemblyRefOS", "File", "ExportedType", - "ManifestResource", "NestedClass", "GenericParam", "MethodSpec", - "GenericParamConstraint", "", "", "", - "", "", "", "", - "", "", "", "",//0x30-0x37 - "", "", "", "", - "", "", "", "" //0x37-0x3f - }; - - /** Creates a table with the given id and number of rows. - */ - public static Table newTable(PEFile file, int id, int rows) { - Table table = null; - switch(id) { - case ModuleDef.ID: table = new ModuleDef(file, rows); break; - case TypeRef.ID: table = new TypeRef(file, rows); break; - case TypeDef.ID: table = new TypeDef(file, rows); break; - case FieldTrans.ID: table = new FieldTrans(file, rows); break; - case FieldDef.ID: table = new FieldDef(file, rows); break; - case MethodTrans.ID: table = new MethodTrans(file, rows); break; - case MethodDef.ID: table = new MethodDef(file, rows); break; - case ParamDef.ID: table = new ParamDef(file, rows); break; - case InterfaceImpl.ID: table = new InterfaceImpl(file, rows); break; - case MemberRef.ID: table = new MemberRef(file, rows); break; - case Constant.ID: table = new Constant(file, rows); break; - case CustomAttribute.ID: table = new CustomAttribute(file, rows); break; - case FieldMarshal.ID: table = new FieldMarshal(file, rows); break; - case DeclSecurity.ID: table = new DeclSecurity(file, rows); break; - case ClassLayout.ID: table = new ClassLayout(file, rows); break; - case FieldLayout.ID: table = new FieldLayout(file, rows); break; - case StandAloneSig.ID: table = new StandAloneSig(file, rows); break; - case EventMap.ID: table = new EventMap(file, rows); break; - case EventDef.ID: table = new EventDef(file, rows); break; - case PropertyMap.ID: table = new PropertyMap(file, rows); break; - case PropertyDef.ID: table = new PropertyDef(file, rows); break; - case MethodSemantics.ID: table = new MethodSemantics(file, rows); break; - case MethodImpl.ID: table = new MethodImpl(file, rows); break; - case ModuleRef.ID: table = new ModuleRef(file, rows); break; - case TypeSpec.ID: table = new TypeSpec(file, rows); break; - case ImplMap.ID: table = new ImplMap(file, rows); break; - case FieldRVA.ID: table = new FieldRVA(file, rows); break; - case AssemblyDef.ID: table = new AssemblyDef(file, rows); break; - case AssemblyProcessor.ID: table = new AssemblyProcessor(file, rows); break; - case AssemblyOS.ID: table = new AssemblyOS(file, rows); break; - case AssemblyRef.ID: table = new AssemblyRef(file, rows); break; - case AssemblyRefProcessor.ID: - table = new AssemblyRefProcessor(file, rows); break; - case AssemblyRefOS.ID: table = new AssemblyRefOS(file, rows); break; - case FileDef.ID: table = new FileDef(file, rows); break; - case ExportedType.ID: table = new ExportedType(file, rows); break; - case ManifestResource.ID: table = new ManifestResource(file, rows); break; - case NestedClass.ID: table = new NestedClass(file, rows); break; - case GenericParam.ID: - table = new GenericParam(file, rows); - break; - case MethodSpec.ID: - table = new MethodSpec(file, rows); - break; - case GenericParamConstraint.ID: - table = new GenericParamConstraint(file, rows); - break; - default: - table = new Empty(id); - } -// System.out.println("created table " + table.getName() + " with " -// + table.rows + " rows"); - return table; - } - - - //########################################################################## - // public fields - - /** Number of rows in the table. */ - public final int rows; - - /** Table ID as specified in Partition II. */ - public final int id; - - /** The file to which the table belongs. */ - protected final PEFile file; - - /** Memory mapped buffer wrapping the table. */ - protected ByteBuffer buffer; - - /** - * specified wheter a new memory-mapped byte buffer should be created - * for this table. - */ - protected boolean newMapping = false; - - /** Tells wheter the table is indexed by 2-byte (short) integer - * or by 4-byte integer. */ - public final boolean isShort; - - private int rowSize = -1; - - // the starting position of the table relative to the beginning of the file - private long start = -1; - - // the number of the row who can be accessed via the fields of the table - private int currentRow = 0; - - //########################################################################## - - protected Table(PEFile file, int id, int rows) { - this.file = file; - this.id = id; - this.rows = rows;//file.readInt(); - this.isShort = rows < (1 << 16); -// assert ((1L << id) & VALID_TABLES_MASK) != 0 -// : "Table does not have a vaid ID: " + byte2hex(id); - } - - /** - * Additional table initialization. - * @return the starting position of the next table in the stream. - */ - public final long init(long start) { - if (rows < 1) - return start; - if (this.start == -1) - this.start = start; - else throw new RuntimeException - ("Cannot re-initialize table \'" + getTableName() + "\'"); - rowSize = getRowSize(); - int size = rows * rowSize(); - buffer = this.newMapping ? file.mapBuffer(start, size) - : file.getBuffer(start, size); - return start + size; - } - - - public final String getTableName() { - return 0 <= id && id < MAX_NUMBER ? tableName[id] : ""; - } - - /** - * @return the size of the row in bytes - */ - public final int rowSize() { - return rowSize; - } - - /** - * if the underlying buffer is memory-mapped, load its contents into memory - */ - public void load() { - if (buffer instanceof MappedByteBuffer) - ((MappedByteBuffer)buffer).load(); - } - - /***/ - public final int readByte() { - return (buffer.get() + 0x100) & 0xff; - } - - /***/ - public final int readShort() { - return (buffer.getShort() + 0x10000) & 0xffff; - } - - /***/ - public final int readInt() { - return buffer.getInt(); - } - - /***/ - public final int readStringIndex() { - return file.StringIsShort ? readShort() : readInt(); - } - - /***/ - public final int readBlobIndex() { - return file.BlobIsShort ? readShort() : readInt(); - } - - /***/ - public final int readGUIDIndex() { - return file.GUIDIsShort ? readShort() : readInt(); - } - - /***/ - public final int readTableIndex(int tableId) { - return file.getTable(tableId).isShort ? readShort() : readInt(); - } - - /***/ - public final int readTableSetIndex(int tableSetId) { - return file.indexSize[tableSetId] == 2 ? readShort() : readInt(); - } - - /** Read the specified row and populate the fields of the instance. */ - public final void readRow(int row) { - seekRow(row); - int lastSeek = buffer.position(); - populateFields(); - int rowSizeRead = (int) (buffer.position() - lastSeek); - if (rowSizeRead != rowSize()) - throw new RuntimeException("Table ID=0x" + PEFile.byte2hex(id) + - ": read row size = " + rowSizeRead + - "; expected row size = " + rowSize()); - currentRow = row; - } - - /** Seeks in the file the position of the specified row. */ - protected final void seekRow(int row) { - assert row > 0 && row <= rows - : "Index " + row + " is not within the table with #rows = " + rows; - buffer.position((row - 1)* rowSize()); - } - - public final int currentRow() { return currentRow; } - - public final void nextRow() { readRow(currentRow() + 1); } - - //########################################################################## - // abstract members - - /** Assigns values to the fields of the class. */ - protected abstract void populateFields(); - - /** Returns the size of a row in bytes. */ - protected abstract int getRowSize(); - - //########################################################################## - // a table with 0 rows - - private static final class Empty extends Table { - public Empty(int id) { - super(null, id, 0); - } - protected int getRowSize() { return 0; } - protected void populateFields() { - throw new RuntimeException("Table 0x" + PEFile.byte2hex(id)); - } - } - - //########################################################################## - // table Module; ID=0x00; p115, 21.27 - - public static final class ModuleDef extends Table { - public static final int ID = 0x00; - - /** 2-byte value; reserved - shall be 0. */ - public int Generation; - - /** Index into #String. */ - public int Name; - - /** Index into #GUID; used to distinguish between - * two version of the same module. */ - public int Mvid; - - /** Index into #GUID; reserved - shall be 0. */ - public int EncId; - - /** Index into #GUID; reseved - shall be 0. */ - public int EncBaseId; - - public ModuleDef(PEFile file, int rows) { super(file, ID, rows); } - - protected void populateFields() { - Generation = readShort(); - Name = readStringIndex(); - Mvid = readGUIDIndex(); - EncId = readGUIDIndex(); - EncBaseId = readGUIDIndex(); - } - - protected int getRowSize() { - return 2 + file.getStringIndexSize() + 3*file.getGUIDIndexSize(); - } - - public String getName() { - return file.getString(Name); - } - - } // class ModuleDef - - //########################################################################## - // table TypeRef; ID=0x01; p125, 21.35 - - public static final class TypeRef extends Table { - public static final int ID = 0x1; - - /** A ResolutionScope coded index. */ - public int ResolutionScope; - - /** Index into #String. */ - public int Name; - - /** Index into #String. */ - public int Namespace; - - public TypeRef(PEFile file, int rows) { super(file, ID, rows); } - - protected void populateFields() { - ResolutionScope = readTableSetIndex(_ResolutionScope); - Name = readStringIndex(); - Namespace = readStringIndex(); - } - - protected int getRowSize() { - return file.getTableSetIndexSize(_ResolutionScope) + - 2 * file.getStringIndexSize(); - } - - public String getFullName() { - String namespace = file.getString(Namespace); - return namespace.length() == 0 ? file.getString(Name) - : namespace + "." + file.getString(Name); - } - - } // class TypeRef - - //########################################################################## - // table TypeDef; ID=0x02; p120, 21.34 - - public static final class TypeDef extends Table { - public static final int ID = 0x02; - - /** 4-byte bitmask of type TypeAttributes (22.1.14). */ - public int Flags; - - /** Index into #String. */ - public int Name; - - /** Index into #String. */ - public int Namespace; - - /** TypeDefOrRef coded index. */ - public int Extends; - - /** Index into Field table. - */ - public int FieldList; - - /** Index into Method table. */ - public int MethodList; - - - public TypeDef(PEFile file, int rows) { - super(file, ID, rows); - this.newMapping = true; - } - - public String getFullName() { - String namespace = file.getString(Namespace); - return namespace.length() == 0 ? file.getString(Name) - : namespace + "." + file.getString(Name); - } - - protected void populateFields() { - Flags = readInt(); - Name = readStringIndex(); - Namespace = readStringIndex(); - Extends = readTableSetIndex(_TypeDefOrRef); - FieldList = readTableIndex(FieldDef.ID); - MethodList = readTableIndex(MethodDef.ID); - } - - protected int getRowSize() { - return 4 + 2*file.getStringIndexSize() + - file.getTableSetIndexSize(_TypeDefOrRef) + - file.getTableIndexSize(FieldDef.ID) + - file.getTableIndexSize(MethodDef.ID); - } - - } // class TypeDef - - //########################################################################## - // Table FieldTrans; ID=0x03; undocumented - - /** - * Undocumented table. Appears to be used for translating the Field entry - * in the TypeDef(0x02) table into the real entry in the Fields(0x06) table - */ - public static final class FieldTrans extends Table { - public static final int ID = 0x03; - - public int Field; - - public FieldTrans(PEFile file, int rows) { - super(file, ID, rows); - newMapping = true; - } - - protected void populateFields() { - Field = readTableIndex(FieldDef.ID); - } - - protected int getRowSize() { - return file.getTableIndexSize(FieldDef.ID); - } - - } - - //########################################################################## - // table Field; ID=0x04; p102, 21.15 - - public static final class FieldDef extends Table { - public static final int ID = 0x04; - - /** 2-byte bitmask of type FieldAttributes (22.1.5). */ - public int Flags; - - /** Index into #String. */ - public int Name; - - /** Index into #Blob. */ - public int Signature; - - public FieldDef(PEFile file, int rows) { - super(file, ID, rows); - newMapping = true; - } - - protected void populateFields() { - Flags = readShort(); - Name = readStringIndex(); - Signature = readBlobIndex(); - } - - protected int getRowSize() { - return 2 + file.getStringIndexSize() + file.getBlobIndexSize(); - } - - public String getName() { return file.getString(Name); } - - public Sig getSignature() { return file.getSignature(Signature); } - - } //class FieldDef - - //########################################################################## - // Table MethodTrans; ID=0x05; undocumented - - /** - * Undocumented table. Appears to be used for translating the Method entry - * in the TypeDef(0x02) table into the real entry in the Methods(0x06) table - */ - public static final class MethodTrans extends Table { - public static final int ID = 0x05; - - public int Method; - - public MethodTrans(PEFile file, int rows) { - super(file, ID, rows); - newMapping = true; - } - - protected void populateFields() { - Method = readTableIndex(FieldDef.ID); - } - - protected int getRowSize() { - return file.getTableIndexSize(MethodDef.ID); - } - - } - - //########################################################################## - // table MethodDef; ID=0x06; p110, 21.24 - - public static final class MethodDef extends Table { - public static final int ID = 0x06; - - /** 4-byte constant. */ - public int RVA; - - /** 2-byte bitmask of type MethodImplAttributes (22.1.10). */ - public int ImplFlags; - - /** 2-byte bitmask of type MethodAttributes (22.1.9). */ - public int Flags; - - /** Index into #String. */ - public int Name; - - /** Index into #Blob. */ - public int Signature; - - /** Index into Param Table. */ - public int ParamList; - - public MethodDef(PEFile file, int rows) { - super(file, ID, rows); - newMapping = true; - } - - protected void populateFields() { - RVA = readInt(); - ImplFlags = readShort(); - Flags = readShort(); - Name = readStringIndex(); - Signature = readBlobIndex(); - ParamList = readTableIndex(ParamDef.ID); - } - - protected int getRowSize() { - return 8 + file.getStringIndexSize() + file.getBlobIndexSize() + - file.getTableIndexSize(ParamDef.ID); - } - - public String getName() { return file.getString(Name); } - - public Sig getSignature() { return file.getSignature(Signature); } - } // class Method - - //########################################################################## - // table Param; ID=0x08; p116, 21.30 - - public static final class ParamDef extends Table { - public static final int ID = 0x08; - - /** 2-byte bitmask of type ParamAttributes (22.1.12). */ - public int Flags; - - /** 2-byte constant. */ - public int Sequence; - - /** Index into #String. */ - public int Name; - - public ParamDef(PEFile file, int rows) { - super(file, ID, rows); - newMapping = true; - } - - protected void populateFields() { - Flags = readShort(); - Sequence = readShort(); - Name = readStringIndex(); - } - - protected int getRowSize() { return 4 + file.getStringIndexSize(); } - - public String getName() { return file.getString(Name); } - - } // class Param - - //########################################################################## - // table InterfaceImpl, ID=0x09; p107, 21.21 - - public static final class InterfaceImpl extends Table { - public static final int ID = 0x09; - - /** Index into TypeDef table. */ - public int Class; - - /** Index into TypeDefOrRef table set. */ - public int Interface; - - public InterfaceImpl(PEFile file, int rows) { super(file, ID, rows); } - - protected void populateFields() { - Class = readTableIndex(TypeDef.ID); - Interface = readTableSetIndex(_TypeDefOrRef); - } - - protected int getRowSize() { - return file.getTableIndexSize(TypeDef.ID) + - file.getTableSetIndexSize(_TypeDefOrRef); - } - - /** finds the index of the first entry - * @param targetIndex - index in the TypeDef table - the type to look for - * @return the index of the first interface for the given type; - * 0 if the type doesn't implement any interfaces - */ - - // binary search implementation -// public int findType(int targetIndex) { -// int l = 1, h = rows; -// int classIndex; -// while (l <= h) { -// int mid = (l + h) / 2; -// seekRow(mid); -// classIndex = readTableIndex(TypeDef.ID); -// if (targetIndex <= classIndex) h = mid - 1; -// else l = mid + 1; -// } -// return (targetIndex == classIndex) ? h : 0; -// } - - //linear search implementation - public int findType(int targetIndex) { - for (int i = 1; i <= rows; i++) { - seekRow(i); - if (targetIndex == readTableIndex(TypeDef.ID)) - return i; - } - return 0; - } - - } // class InterfaceImpl - - //########################################################################## - // table MemberRef; ID=0x0a; p109, 21.23 - - public static final class MemberRef extends Table { - public static final int ID = 0x0a; - - /** Index into MemberRefParent table set. */ - public int Class; - - /** Index into #String. */ - public int Name; - - /** Index into #Blob. */ - public int Signature; - - public MemberRef(PEFile file, int rows) { super(file, ID, rows); } - - protected void populateFields() { - Class = readTableSetIndex(_MemberRefParent); - Name = readStringIndex(); - Signature = readBlobIndex(); - } - - protected int getRowSize() { - return file.getTableSetIndexSize(_MemberRefParent) + - file.getStringIndexSize() + file.getBlobIndexSize(); - } - - public String getName() { - return file.getString(Name); - } - - public Sig getSignature() { - return file.getSignature(Signature); - } - - } // class MemberRef - - //########################################################################## - // table Constant; ID=0x0b; p95, 21.9 - - public static final class Constant extends Table { - public static final int ID = 0x0b; - - /** 1-byte constant followed by 1-byte padding 0 (see 22.1.15). */ - public int Type; - - /** Index into HasConst table set. */ - public int Parent; - - /** Index into #Blob. */ - public int Value; - - public Constant(PEFile file, int rows) { super(file, ID, rows); } - - protected void populateFields() { - Type = readShort(); - Parent = readTableSetIndex(_HasConstant); - Value = readBlobIndex(); - } - - protected int getRowSize() { - return 2 + file.getTableSetIndexSize(_HasConstant) + - file.getBlobIndexSize(); - } - - public Object getValue() { - if (Type == Signature.ELEMENT_TYPE_CLASS) - return null; - return file.Blob.getConstant(Type, Value); - } - - - } // class Constant - - //########################################################################## - // table CustomAttribute; ID=0x0c; p95, 21.10 - - public static final class CustomAttribute extends Table { - public static final int ID = 0x0c; - - /** Index into any metadata table, except the CustomAttribute itself; - * more precisely - index into HasCustomAttribute table set. - */ - public int Parent; - - /** Index into the CustomAttributeType table set. */ - public int Type; - - /** Index into #Blob. */ - public int Value; - - public CustomAttribute(PEFile file, int rows) { super(file, ID, rows); } - - protected void populateFields() { - Parent = readTableSetIndex(_HasCustomAttribute); - Type = readTableSetIndex(_CustomAttributeType); - Value = readBlobIndex(); - } - - protected int getRowSize() { - return file.getTableSetIndexSize(_HasCustomAttribute) + - file.getTableSetIndexSize(_CustomAttributeType) + - file.getBlobIndexSize(); - } - - public byte[] getValue() { - return Value == 0 ? null : file.getBlob(Value); - } - } // class CustomAttribute - - //########################################################################## - // table FieldMarshal; ID=0x0d; p105, 21.17 - - public static final class FieldMarshal extends Table { - public static final int ID = 0x0d; - - /** Index into HasFieldMarshal table set. */ - public int Parent; - - /** Index into #Blob. */ - public int NativeType; - - public FieldMarshal(PEFile file, int rows) { super(file, ID, rows); } - - protected void populateFields() { - Parent = readTableSetIndex(_HasFieldMarshal); - NativeType = readBlobIndex(); - } - - protected int getRowSize() { - return file.getTableSetIndexSize(_HasFieldMarshal) + - file.getBlobIndexSize(); - } - - } // class FieldMarshal - - //########################################################################## - // table DeclSecurity; ID=0x0e; p97, 21.11 - - public static final class DeclSecurity extends Table { - public static final int ID = 0x0e; - - /** 2-byte value. */ - public int Action; - - /** Index into HasDeclSecurity table set. */ - public int Parent; - - /** Index into #Blob. */ - public int PermissionSet; - - public DeclSecurity(PEFile file, int rows) { super(file, ID, rows); } - - protected void populateFields() { - Action = readShort(); - Parent = readTableSetIndex(_HasDeclSecurity); - PermissionSet = readBlobIndex(); - } - - protected int getRowSize() { - return 2 + file.getTableSetIndexSize(_HasDeclSecurity) + - file.getBlobIndexSize(); - } - - } // class DeclSecurity - - //########################################################################## - // table ClassLayout; ID=0x0f, p92, 21.8 - - public static final class ClassLayout extends Table { - public static final int ID = 0x0f; - - /** 2-byte constant. */ - public int PackingSize; - - /** 4-byte constant. */ - public int ClassSize; - - /** Index into TypeDef table. */ - public int Parent; - - public ClassLayout(PEFile file, int rows) { super(file, ID, rows); } - - protected void populateFields() { - PackingSize = readShort(); - ClassSize = readInt(); - Parent = readTableIndex(TypeDef.ID); - } - - protected int getRowSize() { - return 6 + file.getTableIndexSize(TypeDef.ID); - } - - } // class ClassLayout - - //########################################################################## - // table FieldLayout; ID=0x10; p104, 21.16 - - public static final class FieldLayout extends Table { - public static final int ID = 0x10; - - /** 4-byte constant. */ - public int Offset; - - /** Index into the Field table. */ - public int Field; - - public FieldLayout(PEFile file, int rows) { super(file, ID, rows); } - - protected void populateFields() { - Offset = readInt(); - Field = readTableIndex(FieldDef.ID); - } - - protected int getRowSize() { - return 4 + file.getTableIndexSize(FieldDef.ID); - } - - } // class FieldLayout - - //########################################################################## - // table StandAloneSig; ID=0x11; p119, 21.33 - - public static final class StandAloneSig extends Table { - public static final int ID = 0x11; - - /** Index into #Blob. */ - public int Signature; - - public StandAloneSig(PEFile file, int rows) { super(file, ID, rows); } - - protected void populateFields() { - Signature = readBlobIndex(); - } - - protected int getRowSize() { return file.getBlobIndexSize(); } - - } // class StandAloneSig - - //########################################################################## - // table EventMap; ID=0x12; p99, 21.12 - - public static final class EventMap extends Table { - public static final int ID = 0x12; - - /** Index into the TypeDef table. */ - public int Parent; - - /** Index into the Event table. */ - public int EventList; - - public EventMap(PEFile file, int rows) { super(file, ID, rows); } - - protected void populateFields() { - Parent = readTableIndex(TypeDef.ID); - EventList = readTableIndex(EventDef.ID); - } - - protected int getRowSize() { - return file.getTableIndexSize(TypeDef.ID) + - file.getTableIndexSize(EventDef.ID); - } - - } // class EventMap - - //########################################################################## - // table Event; ID=0x14; p99, 21.13 - - public static final class EventDef extends Table { - public static final int ID = 0x14; - - /** 2-byte bitmask of type EventAttribute (22.1.4). */ - public int EventFlags; - - /** Index into #String. */ - public int Name; - - /** Index into TypeDefOrRef table set. [This corresponds to the Type - * of the event; it is not the Type that owns the event] - */ - public int EventType; - - public EventDef(PEFile file, int rows) { super(file, ID, rows); } - - protected void populateFields() { - EventFlags = readShort(); - Name = readStringIndex(); - EventType = readTableSetIndex(_TypeDefOrRef); - } - - protected int getRowSize() { - return 2 + file.getStringIndexSize() + - file.getTableSetIndexSize(_TypeDefOrRef); - } - - public String getName() { return file.getString(Name); } - - } // class EventDef - - //########################################################################## - // table PropertyMap; ID=0x15; p119, 21.32 - - public static final class PropertyMap extends Table { - public static final int ID = 0x15; - - /** Index into the TypeDef table. */ - public int Parent; - - /** Index into the Property table. */ - public int PropertyList; - - public PropertyMap(PEFile file, int rows) { super(file, ID, rows); } - - protected void populateFields() { - Parent = readTableIndex(TypeDef.ID); - PropertyList = readTableIndex(PropertyDef.ID); - } - - protected int getRowSize() { - return file.getTableIndexSize(TypeDef.ID) + - file.getTableIndexSize(PropertyDef.ID); - } - - } // class PropertyMap - - //########################################################################## - // table Property; ID=0x17; p117, 21.31 - - public static final class PropertyDef extends Table { - public static final int ID = 0x17; - - /** 2-byte bitmask of type PropertyAttributes (22.1.13). */ - public int Flags; - - /** Index into #String. */ - public int Name; - - /** Index into #Blob. (Indexes the signature in the #Blob) */ - public int Type; - - public PropertyDef(PEFile file, int rows) { super(file, ID, rows); } - - protected void populateFields() { - Flags = readShort(); - Name = readStringIndex(); - Type = readBlobIndex(); - } - - protected int getRowSize() { - return 2 + file.getStringIndexSize() + - file.getBlobIndexSize(); - } - - public String getName() { return file.getString(Name); } - - public Sig getSignature() { return file.getSignature(Type); } - - } // class PropertyDef - - //########################################################################## - // table MethodSemantics; ID=0x18; p114, 21.26 - - public static final class MethodSemantics extends Table { - public static final int ID = 0x18; - - /** 2-byte bitmaks of type MethodSemanticsAttribute (22.1.11). */ - public int Semantics; - - /** Index into the Method table. */ - public int Method; - - /** Index into Event or Property table (HasSemantics table set). */ - public int Association; - - public MethodSemantics(PEFile file, int rows) { super(file, ID, rows); } - - protected void populateFields() { - Semantics = readShort(); - Method = readTableIndex(MethodDef.ID); - Association = readTableSetIndex(_HasSemantics); - } - - protected int getRowSize() { - return 2 + file.getTableIndexSize(MethodDef.ID) + - file.getTableSetIndexSize(_HasSemantics); - } - - public boolean isGetter() { return (Semantics & Getter) != 0; } - public boolean isSetter() { return (Semantics & Setter) != 0; } - public boolean isOther() { return (Semantics & Other) != 0; } - public boolean isAddOn() { return (Semantics & AddOn) != 0; } - public boolean isRemoveOn() { return (Semantics & RemoveOn) != 0; } - public boolean isFire() { return (Semantics & Fire) != 0; } - - private static final short Setter = (short)0x0001; - private static final short Getter = (short)0x0002; - private static final short Other = (short)0x0004; - private static final short AddOn = (short)0x0008; - private static final short RemoveOn = (short)0x0010; - private static final short Fire = (short)0x0020; - - } // class MethodSemantics - - - //########################################################################## - // table MethodImpl; ID=0x19; p113, 21.25 - - public static final class MethodImpl extends Table { - public static final int ID = 0x19; - - /** Index into the TypeDef table. */ - public int Class; - - /** Index into MethodDefOrRef table set. */ - public int MethodBody; - - /** Index into MethodDefOrRef table set. */ - public int MethodDeclaration; - - public MethodImpl(PEFile file, int rows) { super(file, ID, rows); } - - protected void populateFields() { - Class = readTableIndex(TypeDef.ID); - MethodBody = readTableSetIndex(_MethodDefOrRef); - MethodDeclaration = readTableSetIndex(_MethodDefOrRef); - } - - protected int getRowSize() { - return file.getTableIndexSize(TypeDef.ID) + - 2 * file.getTableSetIndexSize(_MethodDefOrRef); - } - - } // class MethodImpl - - //########################################################################## - // table ModuleRef; ID=0x1a; p116, 21.28 - - public static final class ModuleRef extends Table { - public static final int ID = 0x1a; - - /** Index into #String. */ - public int Name; - - public ModuleRef(PEFile file, int rows) { super(file, ID, rows); } - - protected void populateFields() { - Name = readStringIndex(); - } - - protected int getRowSize() { return file.getStringIndexSize(); } - - public String getName() { return file.getString(Name); } - - } // class ModuleRef - - //########################################################################## - // table TypeSpec; ID=0x1b; p126, 21.36 - - public static final class TypeSpec extends Table { - public static final int ID = 0x1b; - - /** Index into #Blob, where the blob is formatted - * as specified in 22.2.15 - */ - public int Signature; - - public TypeSpec(PEFile file, int rows) { super(file, ID, rows); } - - protected void populateFields() { - Signature = readBlobIndex(); - } - - protected int getRowSize() { return file.getBlobIndexSize(); } - - public Sig getSignature() { return file.getSignature(Signature); } - } // class TypeSpec - - //########################################################################## - // table ImplMap; ID=0x1c; p107, 21.20 - - public static final class ImplMap extends Table { - public static final int ID = 0x1c; - - /** 2-byte bitmask of type PInvokeAttributes (22.1.7). */ - public int MappingFlags; - - /** Index into MemberForwarded table set. */ - public int MemberForwarded; - - /** Index into #String. */ - public int ImportName; - - /** Index into the ModuleRef table. */ - public int ImportScope; - - public ImplMap(PEFile file, int rows) { super(file, ID, rows); } - - protected void populateFields() { - MappingFlags = readShort(); - MemberForwarded = readTableSetIndex(_MemberForwarded); - ImportName = readStringIndex(); - ImportScope = readTableIndex(ModuleRef.ID); - } - - protected int getRowSize() { - return 2 + file.getTableSetIndexSize(_MemberForwarded) + - file.getStringIndexSize() + - file.getTableIndexSize(ModuleRef.ID); - } - - } // class ImplMap - - //########################################################################## - // table FieldRVA; ID=0x1d; p106, 21.18 - - public static final class FieldRVA extends Table { - public static final int ID = 0x1d; - - /** 4-byte constant. */ - public int RVA; - - /** Index into the Field table. */ - public int Field; - - public FieldRVA(PEFile file, int rows) { super(file, ID, rows); } - - protected void populateFields() { - RVA = readInt(); - Field = readTableIndex(Table.FieldDef.ID); - } - - protected int getRowSize() { - return 4 + file.getTableIndexSize(FieldDef.ID); - } - - } - - //########################################################################## - // table Assembly; ID=0x20; p90, 21.2 - - public static final class AssemblyDef extends Table { - public static final int ID = 0x20; - - /** 4-byte constatnt of type AssemblyHashAlgorithm, clause 22.1.1 */ - public int HashAlgId; - - /** 2-byte constant */ - public int MajorVersion; - - /** 2-byte constant */ - public int MinorVersion; - - /** 2-byte constant */ - public int BuildNumber; - - /** 2-byte constant */ - public int RevisionNumber; - - /** 4-byte constant */ - public int Flags; - - /** index into #Blob */ - public int PublicKey; - - /** index into #String */ - public int Name; - - /** index into #String */ - public int Culture; - - public AssemblyDef(PEFile file, int rows) { super(file, ID, rows); } - - protected void populateFields() { - HashAlgId = readInt(); - MajorVersion = readShort(); - MinorVersion = readShort(); - BuildNumber = readShort(); - RevisionNumber = readShort(); - Flags = readInt(); - PublicKey = readBlobIndex(); - Name = readStringIndex(); - Culture = readStringIndex(); - } - - protected int getRowSize() { - return 16 + file.getBlobIndexSize() + 2*file.getStringIndexSize(); - } - - } // class AssemblyDef - - //########################################################################## - // table AssemblyProcessor; ID=0x21; p91, 21.4 - - public static final class AssemblyProcessor extends Table { - public static final int ID = 0x21; - - /** 4-byte constant. */ - public int Processor; - - public AssemblyProcessor(PEFile file, int rows) { super(file, ID, rows); } - - protected void populateFields() { - Processor = readInt(); - } - - protected int getRowSize() { return 4; } - - } - - //########################################################################## - // table AssemblyOS; ID = 0x22; p90, 21.3 - - public static final class AssemblyOS extends Table { - public static final int ID = 0x22; - - /** 4-byte constant. */ - public int OSPlatformID; - - /** 4-byte constant. */ - public int OSMajorVersion; - - /** 4-byte constant. */ - public int OSMinorVersion; - - public AssemblyOS(PEFile file, int rows) { super(file, ID, rows); } - - protected void populateFields() { - OSPlatformID = readInt(); - OSMajorVersion = readInt(); - OSMinorVersion = readInt(); - } - - protected int getRowSize() { return 12; } - - } - - //########################################################################## - // table AssemblyRef; ID = 0x23; pp91, 21.5 - - public static final class AssemblyRef extends Table { - public static final int ID = 0x23; - - /** 2-byte constant. */ - public int MajorVersion; - - /** 2-byte constant. */ - public int MinorVersion; - - /** 2-byte constant. */ - public int BuildNumber; - - /** 2-byte constant. */ - public int RevisionNumber; - - /** 4-byte bitmask of type AssemblyFlags (22.1.2). */ - public int Flags; - - /** index into #Blob. */ - public int PublicKeyOrToken; - - /** index into #String. */ - public int Name; - - /** index into #String. */ - public int Culture; - - /** index into #Blob. */ - public int HashValue; - - public AssemblyRef(PEFile file, int rows) { super(file, ID, rows); } - - protected void populateFields() { - MajorVersion = readShort(); - MinorVersion = readShort(); - BuildNumber = readShort(); - RevisionNumber = readShort(); - Flags = readInt(); - PublicKeyOrToken = readBlobIndex(); - Name = readStringIndex(); - Culture = readStringIndex(); - HashValue = readBlobIndex(); - } - - protected int getRowSize() { - return 12 + 2*file.getBlobIndexSize() + 2*file.getStringIndexSize(); - } - - public String getName() { return file.getString(Name); } - } - - //########################################################################## - // table AssemblyRefProcessor; ID=0x24; p92, 21.7 - - public static final class AssemblyRefProcessor extends Table { - public static final int ID = 0x24; - - /** 4-byte constant. */ - public int Processor; - - /** Index into the AssemblyRef table. */ - public int AssemblyRef; - - public AssemblyRefProcessor(PEFile file, int rows) { super(file, ID, rows); } - - protected void populateFields() { - Processor = readInt(); - AssemblyRef = readTableIndex(Table.AssemblyRef.ID); - } - - protected int getRowSize() { - return 4 + file.getTableIndexSize(Table.AssemblyRef.ID); - } - - } // class AssemblyRefProcessor - - //########################################################################## - // table AssemblyRefOS; ID=0x25; p92, 21.6 - - public static final class AssemblyRefOS extends Table { - public static final int ID = 0x25; - - /** 4-byte constant. */ - public int OSPlatformId; - - /** 4-byte constant. */ - public int OSMajorVersion; - - /** 4-byte constant. */ - public int OSMinorVersion; - - /** Index into the AssemblyRef table. */ - public int AssemblyRef; - - public AssemblyRefOS(PEFile file, int rows) { super(file, ID, rows); } - - protected void populateFields() { - OSPlatformId = readInt(); - OSMajorVersion = readInt(); - OSMinorVersion = readInt(); - AssemblyRef = readTableIndex(Table.AssemblyRef.ID); - } - - protected int getRowSize() { - return 12 + file.getTableIndexSize(Table.AssemblyRef.ID); - } - - } // class AssemblyRefOS - - //########################################################################## - // table File; ID=0x26; p106, 21.19 - - public static final class FileDef extends Table { - public static final int ID = 0x26; - - /** 4-byte bitmask of type FileAttributes (22.1.6). */ - public int Flags; - - /** Index into #String. */ - public int Name; - - /** Index into #Blob. */ - public int HashValue; - - public FileDef(PEFile file, int rows) { super(file, ID, rows); } - - protected void populateFields() { - Flags = readInt(); - Name = readStringIndex(); - HashValue = readBlobIndex(); - } - - protected int getRowSize() { - return 4 + file.getStringIndexSize() + file.getBlobIndexSize(); - } - - public String getName() { - return file.getString(Name); - } - - } // class FileDef - - //########################################################################## - // table ExportedType; ID=0x27; p100, 21.14 - - public static final class ExportedType extends Table { - public static final int ID = 0x27; - - /** 4-byte bitmask of type TypeAttribute (22.1.6). */ - public int Flags; - - /** 4-byte index into a TypeDef table of - * another module in this assembly. - */ - public int TypeDefId; - - /** Index into #String. */ - public int TypeName; - - /** Index into #Stream. */ - public int TypeNamespace; - - /** Index into one of two tables as follows: - * - 'File' table, where that entry says which module - * in the current assembly holds the TypeDef - * - 'ExportedType' table, where that entry is - * the enclosing Type of the current nested Type - */ - public int Implementation; - - public ExportedType(PEFile file, int rows) { super(file, ID, rows); } - - protected void populateFields() { - Flags = readInt(); - TypeDefId = readInt(); - TypeName = readStringIndex(); - TypeNamespace = readStringIndex(); - Implementation = readTableSetIndex(_Implementation); - } - - protected int getRowSize() { - return 8 + 2*file.getStringIndexSize() + - file.getTableSetIndexSize(_Implementation); - } - - public String getFullName() { - String namespace = file.getString(TypeNamespace); - return namespace.length() == 0 ? file.getString(TypeName) - : namespace + "." + file.getString(TypeName); - } - - } // class ExportedType - - //########################################################################## - // table ManifestResource; ID=0x28; p108, 21.22 - - public static final class ManifestResource extends Table { - public static final int ID = 0x28; - - /** 4-byte constant. */ - public int Offset; - - /** 4-byte bitmask of type ManifestResourceAttributes (22.1.8). */ - public int Flags; - - /** Index into #String. */ - public int Name; - - /** Index into the Implementation table set. */ - public int Implementation; - - public ManifestResource(PEFile file, int rows) { super(file, ID, rows); } - - protected void populateFields() { - Offset = readInt(); - Flags = readInt(); - Name = readStringIndex(); - Implementation = readTableSetIndex(_Implementation); - } - - protected int getRowSize() { - return 8 + file.getStringIndexSize() + - file.getTableSetIndexSize(_Implementation); - } - - } // class ManifestResource - - //########################################################################## - // table NestedClass; ID=0x29; p116, 21.29 - - public static final class NestedClass extends Table { - public static final int ID = 0x29; - - /** Index into the TypeDef table. */ - public int NestedClass; - - /** Index into the TypeDef table. */ - public int EnclosingClass; - - public NestedClass(PEFile file, int rows) { super(file, ID, rows); } - - protected void populateFields() { - NestedClass = readTableIndex(TypeDef.ID); - EnclosingClass = readTableIndex(TypeDef.ID); - } - - protected int getRowSize() { - return 2 * file.getTableIndexSize(TypeDef.ID); - } - - } // class NestedClass - - //########################################################################## - // table GenericParam; ID=0x2a; p137, 22.20 - - public static final class GenericParam extends Table { - public static final int ID = 0x2a; - - public int Number; - public int Flags; - public int Owner; // a TypeOrMethodDef (Sec 24.2.6) coded index - public int Name; // a non-null index into the String heap - - private java.util.Map /*>*/ GenericParamIdxesForMethodDefIdx = - new java.util.HashMap(); - private java.util.Map /*>*/ GenericParamIdxesForTypeDefIdx = - new java.util.HashMap(); - - private void addToMap(int key, int value, java.util.Map IdxesForIdx) { - java.util.Set /**/ bucket = (java.util.Set)IdxesForIdx.get(Integer.valueOf(key)); - if(bucket == null) { - bucket = new java.util.HashSet(); - IdxesForIdx.put(Integer.valueOf(key), bucket); - } - bucket.add(Integer.valueOf(value)); - } - - /** Indexes of rows in the GenericParam table representing type parameters defined by the type given by - * its row index TypeDefIdx (in the TypeDef table). - * No need to position the current record before invoking this method. */ - public int[] getTVarIdxes(int TypeDefIdx) { - if(!mapsPopulated) { - initMaps(); - } - java.util.Set bucket = (java.util.Set)GenericParamIdxesForTypeDefIdx.get(Integer.valueOf(TypeDefIdx)); - if(bucket == null) { - bucket = java.util.Collections.EMPTY_SET; - } - int[] res = new int[bucket.size()]; - java.util.Iterator /**/ it = bucket.iterator(); - for(int i = 0; i < bucket.size(); i++) { - res[i] = ((Integer)it.next()).intValue(); - } - return res; - } - - /** Indexes of rows in the GenericParam table representing type parameters defined by the method given by - * its row index MethodDefIdx (in the MethodDef table) - * No need to position the current record before invoking this method. */ - public int[] getMVarIdxes(int MethodDefIdx) { - if(!mapsPopulated) { - initMaps(); - } - java.util.Set bucket = (java.util.Set)GenericParamIdxesForMethodDefIdx.get(Integer.valueOf(MethodDefIdx)); - if(bucket == null) { - bucket = java.util.Collections.EMPTY_SET; - } - int[] res = new int[bucket.size()]; - java.util.Iterator /**/ it = bucket.iterator(); - for(int i = 0; i < bucket.size(); i++) { - res[i] = ((Integer)it.next()).intValue(); - } - return res; - } - - private boolean mapsPopulated = false; - - private void initMaps() { - mapsPopulated = true; - for (int currentParamRow = 1; currentParamRow <= rows; currentParamRow++) { - int currentOwner = file.GenericParam(currentParamRow).Owner; - int targetTableId = Table.getTableId(Table._TypeOrMethodDef, currentOwner); - int targetRow = currentOwner >> Table.NoBits[Table._TypeOrMethodDef]; - if(targetTableId == TypeDef.ID){ - addToMap(targetRow, currentParamRow, GenericParamIdxesForTypeDefIdx); - } else if(targetTableId == MethodDef.ID) { - addToMap(targetRow, currentParamRow, GenericParamIdxesForMethodDefIdx); - } else { - throw new RuntimeException(); - } - } - } - - public GenericParam(PEFile file, int rows) { - super(file, ID, rows); - this.newMapping = true; - } - - protected void populateFields() { - Number = readShort(); - Flags = readShort(); - Owner = readTableSetIndex(_TypeOrMethodDef); - Name = readStringIndex(); - } - - /** This method assumes populateFields() has been just called to set Flags for the current record */ - public boolean isInvariant() { - /* 23.1.7 Flags for Generic Parameters [GenericParamAttributes tributes] */ - return (Flags & 0x0003) == 0; - } - - /** This method assumes populateFields() has been just called to set Flags for the current record */ - public boolean isCovariant() { - /* 23.1.7 Flags for Generic Parameters [GenericParamAttributes tributes] */ - return (Flags & 0x0003) == 1; - } - - /** This method assumes populateFields() has been just called to set Flags for the current record */ - public boolean isContravariant() { - /* 23.1.7 Flags for Generic Parameters [GenericParamAttributes tributes] */ - return (Flags & 0x0003) == 2; - } - - /** This method assumes populateFields() has been just called to set Flags for the current record */ - public boolean isReferenceType() { - /* 23.1.7 Flags for Generic Parameters [GenericParamAttributes tributes] */ - return (Flags & 0x001C) == 4; - } - - /** This method assumes populateFields() has been just called to set Flags for the current record */ - public boolean isValueType() { - /* 23.1.7 Flags for Generic Parameters [GenericParamAttributes tributes] */ - return (Flags & 0x001C) == 8; - } - - /** This method assumes populateFields() has been just called to set Flags for the current record */ - public boolean hasDefaultConstructor() { - /* 23.1.7 Flags for Generic Parameters [GenericParamAttributes tributes] */ - return (Flags & 0x001C) == 0x0010; - } - - protected int getRowSize() { - return 2 + 2 + file.getTableSetIndexSize(_TypeOrMethodDef) + file.getStringIndexSize(); - /* Columns: - Number (2 bytes), - Flags (2 bytes), - Owner (coded token of type TypeOrMethodDef), - Name (offset in the #Strings stream). - */ - } - - public String getName() { - return file.getString(Name); - } - - } // class GenericParam - - - //########################################################################## - // table GenericParamConstraint; ID=0x2c; p139, 22.20 - - public static final class GenericParamConstraint extends Table { - public static final int ID = 0x2c; - - public int Owner; // an index into the GenericParam table - public int Constraint; // a TypeDefOrRef (Sec 24.2.6) coded index - - public GenericParamConstraint(PEFile file, int rows) { - super(file, ID, rows); - this.newMapping = true; - } - - protected void populateFields() { - Owner = readTableIndex(GenericParam.ID); - Constraint = readTableSetIndex(_TypeDefOrRef); - } - - protected int getRowSize() { - return file.getTableIndexSize(GenericParam.ID) + file.getTableSetIndexSize(_TypeDefOrRef); - /* Columns: - Owner (RID in the GenericParam table), - Constraint (coded token of type TypeDefOrRef). - */ - } - - private boolean mapPopulated = false; - - /** Indexes of rows (in the TypeDef, TypeRef, or TypeSpec tables) denoting the base class (if any) - * and interfaces (if any) that the generic parameter (of TVar or MVar kind) should support, where - * that generic parameter is represented by its index into the GenericParam table. */ - public int[] getTypeDefOrRefIdxes(int genParamIdx) { - if(!mapPopulated) { - initMap(); - } - java.util.Set bucket = (java.util.Set)TypeDefOrRefIdxesForGenParamIdx.get(Integer.valueOf(genParamIdx)); - if(bucket == null) { - bucket = java.util.Collections.EMPTY_SET; - } - int[] res = new int[bucket.size()]; - java.util.Iterator /**/ it = bucket.iterator(); - for(int i = 0; i < bucket.size(); i++) { - res[i] = ((Integer)it.next()).intValue(); - } - return res; - } - - - private void initMap() { - mapPopulated = true; - for (int currentConstraintRow = 1; currentConstraintRow <= rows; currentConstraintRow++) { - int targetGenericParam = file.GenericParamConstraint(currentConstraintRow).Owner; - int value = file.GenericParamConstraint.Constraint; - addToMap(targetGenericParam, value); - } - } - - private java.util.Map /*>*/ TypeDefOrRefIdxesForGenParamIdx = - new java.util.HashMap(); - - private void addToMap(int key, int value) { - java.util.Set /**/ bucket = (java.util.Set)TypeDefOrRefIdxesForGenParamIdx.get(Integer.valueOf(key)); - if(bucket == null) { - bucket = new java.util.HashSet(); - TypeDefOrRefIdxesForGenParamIdx.put(Integer.valueOf(key), bucket); - } - bucket.add(Integer.valueOf(value)); - } - - } // class GenericParamConstraint - - //########################################################################## - // table MethodSpec; ID=0x2b; p149, in Sec. 22.29 of Partition II - - public static final class MethodSpec extends Table { - public static final int ID = 0x2b; - - /* an index into the MethodDef or MemberRef table, specifying which generic method this row is an instantiation of. - A MethodDefOrRef (Sec. 24.2.6) coded index */ - public int Method; - - /* an index into the Blob heap (Sec. 23.2.15), holding the signature of this instantiation */ - public int Instantiation; - - public MethodSpec(PEFile file, int rows) { - super(file, ID, rows); - this.newMapping = true; - } - - protected void populateFields() { - Method = readTableSetIndex(_MethodDefOrRef); - Instantiation = readBlobIndex(); - } - - protected int getRowSize() { - return file.getTableSetIndexSize(_MethodDefOrRef) + file.getBlobIndexSize(); - } - - - } // class MethodSpec - //########################################################################## - -} // class Table diff --git a/src/reflect/scala/reflect/internal/Definitions.scala b/src/reflect/scala/reflect/internal/Definitions.scala index 9a846179b9..e74dd0c043 100644 --- a/src/reflect/scala/reflect/internal/Definitions.scala +++ b/src/reflect/scala/reflect/internal/Definitions.scala @@ -684,9 +684,7 @@ trait Definitions extends api.StandardDefinitions { def scalaRepeatedType(arg: Type) = appliedType(RepeatedParamClass, arg) def seqType(arg: Type) = appliedType(SeqClass, arg) - def ClassType(arg: Type) = - if (phase.erasedTypes || forMSIL) ClassClass.tpe - else appliedType(ClassClass, arg) + def ClassType(arg: Type) = if (phase.erasedTypes) ClassClass.tpe else appliedType(ClassClass, arg) def EnumType(sym: Symbol) = // given (in java): "class A { enum E { VAL1 } }" @@ -704,34 +702,6 @@ trait Definitions extends api.StandardDefinitions { def classExistentialType(clazz: Symbol): Type = newExistentialType(clazz.typeParams, clazz.tpe_*) - // - // .NET backend - // - - lazy val ComparatorClass = getRequiredClass("scala.runtime.Comparator") - // System.MulticastDelegate - lazy val DelegateClass: ClassSymbol = getClassByName(sn.Delegate) - var Delegate_scalaCallers: List[Symbol] = List() // Syncnote: No protection necessary yet as only for .NET where reflection is not supported. - // Symbol -> (Symbol, Type): scalaCaller -> (scalaMethodSym, DelegateType) - // var Delegate_scalaCallerInfos: HashMap[Symbol, (Symbol, Type)] = _ - lazy val Delegate_scalaCallerTargets: mutable.HashMap[Symbol, Symbol] = mutable.HashMap() - - def isCorrespondingDelegate(delegateType: Type, functionType: Type): Boolean = { - isSubType(delegateType, DelegateClass.tpe) && - (delegateType.member(nme.apply).tpe match { - case MethodType(delegateParams, delegateReturn) => - isFunctionType(functionType) && - (functionType.normalize match { - case TypeRef(_, _, args) => - (delegateParams.map(pt => { - if (pt.tpe == AnyClass.tpe) definitions.ObjectClass.tpe else pt}) - ::: List(delegateReturn)) == args - case _ => false - }) - case _ => false - }) - } - // members of class scala.Any lazy val Any_== = enterNewMethod(AnyClass, nme.EQ, anyparam, booltype, FINAL) lazy val Any_!= = enterNewMethod(AnyClass, nme.NE, anyparam, booltype, FINAL) @@ -1164,27 +1134,5 @@ trait Definitions extends api.StandardDefinitions { val _ = symbolsNotPresentInBytecode isInitialized = true } //init - - var nbScalaCallers: Int = 0 - def newScalaCaller(delegateType: Type): MethodSymbol = { - assert(forMSIL, "scalaCallers can only be created if target is .NET") - // object: reference to object on which to call (scala-)method - val paramTypes: List[Type] = List(ObjectClass.tpe) - val name = newTermName("$scalaCaller$$" + nbScalaCallers) - // tparam => resultType, which is the resultType of PolyType, i.e. the result type after applying the - // type parameter =-> a MethodType in this case - // TODO: set type bounds manually (-> MulticastDelegate), see newTypeParam - val newCaller = enterNewMethod(DelegateClass, name, paramTypes, delegateType, FINAL | STATIC) - // val newCaller = newPolyMethod(DelegateClass, name, - // tparam => MethodType(paramTypes, tparam.typeConstructor)) setFlag (FINAL | STATIC) - Delegate_scalaCallers = Delegate_scalaCallers ::: List(newCaller) - nbScalaCallers += 1 - newCaller - } - - def addScalaCallerInfo(scalaCaller: Symbol, methSym: Symbol) { - assert(Delegate_scalaCallers contains scalaCaller) - Delegate_scalaCallerTargets += (scalaCaller -> methSym) - } } } diff --git a/src/reflect/scala/reflect/internal/StdNames.scala b/src/reflect/scala/reflect/internal/StdNames.scala index a5810c9c83..7dd4a14a46 100644 --- a/src/reflect/scala/reflect/internal/StdNames.scala +++ b/src/reflect/scala/reflect/internal/StdNames.scala @@ -586,7 +586,7 @@ trait StdNames { val canEqual_ : NameType = "canEqual" val checkInitialized: NameType = "checkInitialized" val classOf: NameType = "classOf" - val clone_ : NameType = if (forMSIL) "MemberwiseClone" else "clone" // sn.OClone causes checkinit failure + val clone_ : NameType = "clone" val conforms: NameType = "conforms" val copy: NameType = "copy" val currentMirror: NameType = "currentMirror" @@ -600,20 +600,20 @@ trait StdNames { val equalsNumChar : NameType = "equalsNumChar" val equalsNumNum : NameType = "equalsNumNum" val equalsNumObject : NameType = "equalsNumObject" - val equals_ : NameType = if (forMSIL) "Equals" else "equals" + val equals_ : NameType = "equals" val error: NameType = "error" val ex: NameType = "ex" val experimental: NameType = "experimental" val f: NameType = "f" val false_ : NameType = "false" val filter: NameType = "filter" - val finalize_ : NameType = if (forMSIL) "Finalize" else "finalize" + val finalize_ : NameType = "finalize" val find_ : NameType = "find" val flagsFromBits : NameType = "flagsFromBits" val flatMap: NameType = "flatMap" val foreach: NameType = "foreach" val get: NameType = "get" - val hashCode_ : NameType = if (forMSIL) "GetHashCode" else "hashCode" + val hashCode_ : NameType = "hashCode" val hash_ : NameType = "hash" val implicitly: NameType = "implicitly" val in: NameType = "in" @@ -677,7 +677,7 @@ trait StdNames { val thisPrefix : NameType = "thisPrefix" val toArray: NameType = "toArray" val toObjectArray : NameType = "toObjectArray" - val toString_ : NameType = if (forMSIL) "ToString" else "toString" + val toString_ : NameType = "toString" val toTypeConstructor: NameType = "toTypeConstructor" val tpe : NameType = "tpe" val tree : NameType = "tree" @@ -931,33 +931,6 @@ trait StdNames { @deprecated("Use a method in tpnme", "2.10.0") def interfaceName(implname: Name): TypeName = tpnme.interfaceName(implname) } - abstract class SymbolNames { - protected val stringToTermName = null - protected val stringToTypeName = null - protected implicit def createNameType(s: String): TypeName = newTypeNameCached(s) - - val BoxedBoolean : TypeName - val BoxedCharacter : TypeName - val BoxedNumber : TypeName - val Delegate : TypeName - val IOOBException : TypeName // IndexOutOfBoundsException - val InvTargetException : TypeName // InvocationTargetException - val MethodAsObject : TypeName - val NPException : TypeName // NullPointerException - val Object : TypeName - val Throwable : TypeName - val ValueType : TypeName - - val GetCause : TermName - val GetClass : TermName - val GetClassLoader : TermName - val GetMethod : TermName - val Invoke : TermName - val JavaLang : TermName - - val Boxed: immutable.Map[TypeName, TypeName] - } - class JavaKeywords { private val kw = new KeywordSetBuilder @@ -1015,7 +988,11 @@ trait StdNames { final val keywords = kw.result } - private abstract class JavaNames extends SymbolNames { + sealed abstract class SymbolNames { + protected val stringToTermName = null + protected val stringToTypeName = null + protected implicit def createNameType(s: String): TypeName = newTypeNameCached(s) + final val BoxedBoolean: TypeName = "java.lang.Boolean" final val BoxedByte: TypeName = "java.lang.Byte" final val BoxedCharacter: TypeName = "java.lang.Character" @@ -1025,14 +1002,12 @@ trait StdNames { final val BoxedLong: TypeName = "java.lang.Long" final val BoxedNumber: TypeName = "java.lang.Number" final val BoxedShort: TypeName = "java.lang.Short" - final val Delegate: TypeName = tpnme.NO_NAME final val IOOBException: TypeName = "java.lang.IndexOutOfBoundsException" final val InvTargetException: TypeName = "java.lang.reflect.InvocationTargetException" final val MethodAsObject: TypeName = "java.lang.reflect.Method" final val NPException: TypeName = "java.lang.NullPointerException" final val Object: TypeName = "java.lang.Object" final val Throwable: TypeName = "java.lang.Throwable" - final val ValueType: TypeName = tpnme.NO_NAME final val GetCause: TermName = newTermName("getCause") final val GetClass: TermName = newTermName("getClass") @@ -1053,39 +1028,5 @@ trait StdNames { ) } - private class MSILNames extends SymbolNames { - final val BoxedBoolean: TypeName = "System.IConvertible" - final val BoxedCharacter: TypeName = "System.IConvertible" - final val BoxedNumber: TypeName = "System.IConvertible" - final val Delegate: TypeName = "System.MulticastDelegate" - final val IOOBException: TypeName = "System.IndexOutOfRangeException" - final val InvTargetException: TypeName = "System.Reflection.TargetInvocationException" - final val MethodAsObject: TypeName = "System.Reflection.MethodInfo" - final val NPException: TypeName = "System.NullReferenceException" - final val Object: TypeName = "System.Object" - final val Throwable: TypeName = "System.Exception" - final val ValueType: TypeName = "System.ValueType" - - final val GetCause: TermName = newTermName("InnerException") /* System.Reflection.TargetInvocationException.InnerException */ - final val GetClass: TermName = newTermName("GetType") - final lazy val GetClassLoader: TermName = throw new UnsupportedOperationException("Scala reflection is not supported on this platform"); - final val GetMethod: TermName = newTermName("GetMethod") - final val Invoke: TermName = newTermName("Invoke") - final val JavaLang: TermName = newTermName("System") - - val Boxed = immutable.Map[TypeName, TypeName]( - tpnme.Boolean -> "System.Boolean", - tpnme.Byte -> "System.SByte", // a scala.Byte is signed and a System.SByte too (unlike a System.Byte) - tpnme.Char -> "System.Char", - tpnme.Short -> "System.Int16", - tpnme.Int -> "System.Int32", - tpnme.Long -> "System.Int64", - tpnme.Float -> "System.Single", - tpnme.Double -> "System.Double" - ) - } - - lazy val sn: SymbolNames = - if (forMSIL) new MSILNames - else new JavaNames { } + lazy val sn: SymbolNames = new SymbolNames { } } diff --git a/src/reflect/scala/reflect/internal/SymbolTable.scala b/src/reflect/scala/reflect/internal/SymbolTable.scala index 1298fc17ed..540338dca7 100644 --- a/src/reflect/scala/reflect/internal/SymbolTable.scala +++ b/src/reflect/scala/reflect/internal/SymbolTable.scala @@ -117,12 +117,6 @@ abstract class SymbolTable extends macros.Universe @elidable(elidable.WARNING) def assertCorrectThread() {} - /** Are we compiling for Java SE? */ - // def forJVM: Boolean - - /** Are we compiling for .NET? */ - def forMSIL: Boolean = false - /** A last effort if symbol in a select . is not found. * This is overridden by the reflection compiler to make up a package * when it makes sense (i.e. is a package and is a term name). diff --git a/test/attic/files/cli/test1/Main.check.scalac b/test/attic/files/cli/test1/Main.check.scalac index 8465810d0b..5561cc1ead 100644 --- a/test/attic/files/cli/test1/Main.check.scalac +++ b/test/attic/files/cli/test1/Main.check.scalac @@ -1,7 +1,7 @@ scalac error: bad option: '-dd' scalac -help gives more information Usage: scalac -where possible options include: +where possible options include: -doc Generate documentation -g: Generate debugging info (none,source,line,vars,notc) -nowarn Generate no warnings @@ -15,10 +15,8 @@ where possible options include: -encoding Specify character encoding used by source files -windowtitle Specify window title of generated HTML documentation -documenttitle Specify document title of generated HTML documentation - -target: Specify which backend to use (jvm-1.5,msil) + -target: Specify which backend to use (jvm-1.5) -migrate Assist in migrating from Scala version 1.0 - -o Name of the output assembly (only relevant with -target:msil) - -r List of assemblies referenced by the program (only relevant with -target:msil) -debug Output debugging messages -deprecation enable detailed deprecation warnings -unchecked enable detailed unchecked warnings diff --git a/test/attic/files/cli/test2/Main.check.scalac b/test/attic/files/cli/test2/Main.check.scalac index 8465810d0b..5561cc1ead 100644 --- a/test/attic/files/cli/test2/Main.check.scalac +++ b/test/attic/files/cli/test2/Main.check.scalac @@ -1,7 +1,7 @@ scalac error: bad option: '-dd' scalac -help gives more information Usage: scalac -where possible options include: +where possible options include: -doc Generate documentation -g: Generate debugging info (none,source,line,vars,notc) -nowarn Generate no warnings @@ -15,10 +15,8 @@ where possible options include: -encoding Specify character encoding used by source files -windowtitle Specify window title of generated HTML documentation -documenttitle Specify document title of generated HTML documentation - -target: Specify which backend to use (jvm-1.5,msil) + -target: Specify which backend to use (jvm-1.5) -migrate Assist in migrating from Scala version 1.0 - -o Name of the output assembly (only relevant with -target:msil) - -r List of assemblies referenced by the program (only relevant with -target:msil) -debug Output debugging messages -deprecation enable detailed deprecation warnings -unchecked enable detailed unchecked warnings diff --git a/test/attic/files/cli/test3/Main.check.scalac b/test/attic/files/cli/test3/Main.check.scalac index 8465810d0b..5561cc1ead 100644 --- a/test/attic/files/cli/test3/Main.check.scalac +++ b/test/attic/files/cli/test3/Main.check.scalac @@ -1,7 +1,7 @@ scalac error: bad option: '-dd' scalac -help gives more information Usage: scalac -where possible options include: +where possible options include: -doc Generate documentation -g: Generate debugging info (none,source,line,vars,notc) -nowarn Generate no warnings @@ -15,10 +15,8 @@ where possible options include: -encoding Specify character encoding used by source files -windowtitle Specify window title of generated HTML documentation -documenttitle Specify document title of generated HTML documentation - -target: Specify which backend to use (jvm-1.5,msil) + -target: Specify which backend to use (jvm-1.5) -migrate Assist in migrating from Scala version 1.0 - -o Name of the output assembly (only relevant with -target:msil) - -r List of assemblies referenced by the program (only relevant with -target:msil) -debug Output debugging messages -deprecation enable detailed deprecation warnings -unchecked enable detailed unchecked warnings diff --git a/tools/buildcp b/tools/buildcp index 766ab81f90..3027dec2e3 100755 --- a/tools/buildcp +++ b/tools/buildcp @@ -8,4 +8,4 @@ lib=$($dir/abspath $dir/../lib) build=$($dir/abspath $dir/../build) cp=$($dir/cpof $build/$1/classes):$build/asm/classes -echo $cp:$lib/fjbg.jar:$lib/msil.jar:$lib/forkjoin.jar:$lib/jline.jar:$lib/extra/'*' +echo $cp:$lib/fjbg.jar:$lib/forkjoin.jar:$lib/jline.jar:$lib/extra/'*' -- cgit v1.2.3 From 0433ca4fc8c1ad0d0733b2fdccc6352904a5a531 Mon Sep 17 00:00:00 2001 From: Andriy Polishchuk Date: Thu, 6 Dec 2012 11:59:05 +0200 Subject: SI-5841 reification of renamed imports Reification of renamed imports is done by catching Selects with name != their tree.symbol.name, replacing this name with tree.symbol.name, and then doing reifyProduct in case of renamed terms and reifyBoundType (inner) in case of renamed types. --- .../scala/reflect/reify/codegen/GenTrees.scala | 7 +++++++ .../scala/reflect/reify/utils/Extractors.scala | 2 ++ test/files/run/reify_renamed_term_basic.check | 1 + test/files/run/reify_renamed_term_basic.scala | 20 ++++++++++++++++++ .../run/reify_renamed_term_local_to_reifee.check | 1 + .../run/reify_renamed_term_local_to_reifee.scala | 20 ++++++++++++++++++ .../run/reify_renamed_term_overloaded_method.check | 1 + .../run/reify_renamed_term_overloaded_method.scala | 17 +++++++++++++++ test/files/run/reify_renamed_term_si5841.check | 1 + test/files/run/reify_renamed_term_si5841.scala | 7 +++++++ test/files/run/reify_renamed_type_basic.check | 1 + test/files/run/reify_renamed_type_basic.scala | 16 +++++++++++++++ .../run/reify_renamed_type_local_to_reifee.check | 1 + .../run/reify_renamed_type_local_to_reifee.scala | 24 ++++++++++++++++++++++ test/files/run/reify_renamed_type_spliceable.check | 1 + test/files/run/reify_renamed_type_spliceable.scala | 21 +++++++++++++++++++ 16 files changed, 141 insertions(+) create mode 100644 test/files/run/reify_renamed_term_basic.check create mode 100644 test/files/run/reify_renamed_term_basic.scala create mode 100644 test/files/run/reify_renamed_term_local_to_reifee.check create mode 100644 test/files/run/reify_renamed_term_local_to_reifee.scala create mode 100644 test/files/run/reify_renamed_term_overloaded_method.check create mode 100644 test/files/run/reify_renamed_term_overloaded_method.scala create mode 100644 test/files/run/reify_renamed_term_si5841.check create mode 100644 test/files/run/reify_renamed_term_si5841.scala create mode 100644 test/files/run/reify_renamed_type_basic.check create mode 100644 test/files/run/reify_renamed_type_basic.scala create mode 100644 test/files/run/reify_renamed_type_local_to_reifee.check create mode 100644 test/files/run/reify_renamed_type_local_to_reifee.scala create mode 100644 test/files/run/reify_renamed_type_spliceable.check create mode 100644 test/files/run/reify_renamed_type_spliceable.scala (limited to 'src') diff --git a/src/compiler/scala/reflect/reify/codegen/GenTrees.scala b/src/compiler/scala/reflect/reify/codegen/GenTrees.scala index 86ad23cd15..e671124d4c 100644 --- a/src/compiler/scala/reflect/reify/codegen/GenTrees.scala +++ b/src/compiler/scala/reflect/reify/codegen/GenTrees.scala @@ -144,6 +144,11 @@ trait GenTrees { } case tree @ Ident(_) if tree.symbol.isLocalToReifee => mirrorCall(nme.Ident, reify(tree.name)) + case Select(qual, name) => + if (tree.symbol != NoSymbol && tree.symbol.name != name) + reifyProduct(Select(qual, tree.symbol.name)) + else + reifyProduct(tree) case _ => throw new Error("internal error: %s (%s, %s) is not supported".format(tree, tree.productPrefix, tree.getClass)) } @@ -193,6 +198,8 @@ trait GenTrees { } tree match { + case Select(qual, name) if (name != tree.symbol.name) => + reifyBoundType(Select(qual, tree.symbol.name)) case Select(_, _) => reifyBoundType(tree) case SelectFromTypeTree(_, _) => diff --git a/src/compiler/scala/reflect/reify/utils/Extractors.scala b/src/compiler/scala/reflect/reify/utils/Extractors.scala index 50bd309b52..254cb02ee6 100644 --- a/src/compiler/scala/reflect/reify/utils/Extractors.scala +++ b/src/compiler/scala/reflect/reify/utils/Extractors.scala @@ -251,6 +251,8 @@ trait Extractors { object BoundTerm { def unapply(tree: Tree): Option[Tree] = tree match { + case Select(_, name) if name.isTermName => + Some(tree) case Ident(name) if name.isTermName => Some(tree) case This(_) => diff --git a/test/files/run/reify_renamed_term_basic.check b/test/files/run/reify_renamed_term_basic.check new file mode 100644 index 0000000000..e78f94fffd --- /dev/null +++ b/test/files/run/reify_renamed_term_basic.check @@ -0,0 +1 @@ +((),(),()) diff --git a/test/files/run/reify_renamed_term_basic.scala b/test/files/run/reify_renamed_term_basic.scala new file mode 100644 index 0000000000..cd76def395 --- /dev/null +++ b/test/files/run/reify_renamed_term_basic.scala @@ -0,0 +1,20 @@ +import scala.reflect.runtime.universe._ +import scala.tools.reflect.Eval + +object A { + object B { + val c = () + } +} + +object Test extends App { + import A.{B => X} + import A.B.{c => y} + import X.{c => z} + + val expr = reify ( + X.c, y, z + ) + + println(expr.eval) +} \ No newline at end of file diff --git a/test/files/run/reify_renamed_term_local_to_reifee.check b/test/files/run/reify_renamed_term_local_to_reifee.check new file mode 100644 index 0000000000..e78f94fffd --- /dev/null +++ b/test/files/run/reify_renamed_term_local_to_reifee.check @@ -0,0 +1 @@ +((),(),()) diff --git a/test/files/run/reify_renamed_term_local_to_reifee.scala b/test/files/run/reify_renamed_term_local_to_reifee.scala new file mode 100644 index 0000000000..1860316a5b --- /dev/null +++ b/test/files/run/reify_renamed_term_local_to_reifee.scala @@ -0,0 +1,20 @@ +import scala.reflect.runtime.universe._ +import scala.tools.reflect.Eval + +object A { + object B { + val c = () + } +} + +object Test extends App { + val expr = reify { + import A.{B => X} + import A.B.{c => y} + import X.{c => z} + + (X.c, y, z) + } + + println(expr.eval) +} \ No newline at end of file diff --git a/test/files/run/reify_renamed_term_overloaded_method.check b/test/files/run/reify_renamed_term_overloaded_method.check new file mode 100644 index 0000000000..48082f72f0 --- /dev/null +++ b/test/files/run/reify_renamed_term_overloaded_method.check @@ -0,0 +1 @@ +12 diff --git a/test/files/run/reify_renamed_term_overloaded_method.scala b/test/files/run/reify_renamed_term_overloaded_method.scala new file mode 100644 index 0000000000..3ef442d203 --- /dev/null +++ b/test/files/run/reify_renamed_term_overloaded_method.scala @@ -0,0 +1,17 @@ +import scala.reflect.runtime.universe._ +import scala.tools.reflect.Eval + +object O { + def show(i: Int) = i.toString + def show(s: String) = s +} + +object Test extends App { + import O.{show => s} + + val expr = reify { + s("1") + s(2) + } + + println(expr.eval) +} \ No newline at end of file diff --git a/test/files/run/reify_renamed_term_si5841.check b/test/files/run/reify_renamed_term_si5841.check new file mode 100644 index 0000000000..6031277b76 --- /dev/null +++ b/test/files/run/reify_renamed_term_si5841.check @@ -0,0 +1 @@ +class scala.reflect.runtime.JavaUniverse diff --git a/test/files/run/reify_renamed_term_si5841.scala b/test/files/run/reify_renamed_term_si5841.scala new file mode 100644 index 0000000000..ef18d650bf --- /dev/null +++ b/test/files/run/reify_renamed_term_si5841.scala @@ -0,0 +1,7 @@ +import scala.reflect.runtime.universe._ +import scala.reflect.runtime.{universe => ru} +import scala.tools.reflect.Eval + +object Test extends App { + println(reify{ru}.eval.getClass) +} \ No newline at end of file diff --git a/test/files/run/reify_renamed_type_basic.check b/test/files/run/reify_renamed_type_basic.check new file mode 100644 index 0000000000..6a452c185a --- /dev/null +++ b/test/files/run/reify_renamed_type_basic.check @@ -0,0 +1 @@ +() diff --git a/test/files/run/reify_renamed_type_basic.scala b/test/files/run/reify_renamed_type_basic.scala new file mode 100644 index 0000000000..23729e5c54 --- /dev/null +++ b/test/files/run/reify_renamed_type_basic.scala @@ -0,0 +1,16 @@ +import scala.reflect.runtime.universe._ +import scala.tools.reflect.Eval + +object O { + type A = Unit +} + +object Test extends App { + import O.{A => X} + + def expr = reify { + val a: X = () + } + + println(expr.eval) +} \ No newline at end of file diff --git a/test/files/run/reify_renamed_type_local_to_reifee.check b/test/files/run/reify_renamed_type_local_to_reifee.check new file mode 100644 index 0000000000..6a452c185a --- /dev/null +++ b/test/files/run/reify_renamed_type_local_to_reifee.check @@ -0,0 +1 @@ +() diff --git a/test/files/run/reify_renamed_type_local_to_reifee.scala b/test/files/run/reify_renamed_type_local_to_reifee.scala new file mode 100644 index 0000000000..ed1bad239e --- /dev/null +++ b/test/files/run/reify_renamed_type_local_to_reifee.scala @@ -0,0 +1,24 @@ +import scala.reflect.runtime.universe._ +import scala.tools.reflect.Eval + +object O { + type A = Unit +} + +object Test extends App { + val expr = reify { + import O.{A => X} + + val a: X = () + + object P { + type B = Unit + } + + import P.{B => Y} + + val b: Y = () + } + + println(expr.eval) +} \ No newline at end of file diff --git a/test/files/run/reify_renamed_type_spliceable.check b/test/files/run/reify_renamed_type_spliceable.check new file mode 100644 index 0000000000..6a452c185a --- /dev/null +++ b/test/files/run/reify_renamed_type_spliceable.check @@ -0,0 +1 @@ +() diff --git a/test/files/run/reify_renamed_type_spliceable.scala b/test/files/run/reify_renamed_type_spliceable.scala new file mode 100644 index 0000000000..9c2cff5199 --- /dev/null +++ b/test/files/run/reify_renamed_type_spliceable.scala @@ -0,0 +1,21 @@ +import scala.reflect.runtime.universe._ +import scala.tools.reflect.Eval + +abstract class C { + type T >: Null +} + +object Test extends App { + def foo(c: C) = { + import c.{T => U} + reify { + val x: U = null + } + } + + val expr = foo(new C { + type T = AnyRef + }) + + println(expr.eval) +} \ No newline at end of file -- cgit v1.2.3 From ca1e7ec1c291160e6f89162dd13eaf567700a0eb Mon Sep 17 00:00:00 2001 From: Andriy Polishchuk Date: Thu, 6 Dec 2012 12:01:34 +0200 Subject: Cleanups of reifyBoundTerm and reifyBoundType Cases in reifyBoundTerm are merged by constructors; conditions in reifyBoundType are linearized; also, in latter, or-patterns are used to merge some cases; and some minor stuff not worth mentioning. --- .../scala/reflect/reify/codegen/GenTrees.scala | 131 +++++++++++---------- 1 file changed, 69 insertions(+), 62 deletions(-) (limited to 'src') diff --git a/src/compiler/scala/reflect/reify/codegen/GenTrees.scala b/src/compiler/scala/reflect/reify/codegen/GenTrees.scala index e671124d4c..ff0b3d7d10 100644 --- a/src/compiler/scala/reflect/reify/codegen/GenTrees.scala +++ b/src/compiler/scala/reflect/reify/codegen/GenTrees.scala @@ -117,95 +117,102 @@ trait GenTrees { // unlike in `reifyBoundType` we can skip checking for `tpe` being local or not local w.r.t the reifee // a single check for a symbol of the bound term should be enough // that's because only Idents and Thises can be bound terms, and they cannot host complex types - private def reifyBoundTerm(tree: Tree): Tree = tree match { - case tree @ This(_) if tree.symbol == NoSymbol => - throw new Error("unexpected: bound term that doesn't have a symbol: " + showRaw(tree)) - case tree @ This(_) if tree.symbol.isClass && !tree.symbol.isModuleClass && !tree.symbol.isLocalToReifee => - val sym = tree.symbol - if (reifyDebug) println("This for %s, reified as freeVar".format(sym)) - if (reifyDebug) println("Free: " + sym) - mirrorBuildCall(nme.Ident, reifyFreeTerm(This(sym))) - case tree @ This(_) if !tree.symbol.isLocalToReifee => - if (reifyDebug) println("This for %s, reified as This".format(tree.symbol)) - mirrorBuildCall(nme.This, reify(tree.symbol)) - case tree @ This(_) if tree.symbol.isLocalToReifee => - mirrorCall(nme.This, reify(tree.qual)) - case tree @ Ident(_) if tree.symbol == NoSymbol => - // this sometimes happens, e.g. for binds that don't have a body - // or for untyped code generated during previous phases - // (see a comment in Reifiers about the latter, starting with "why do we resetAllAttrs?") - mirrorCall(nme.Ident, reify(tree.name)) - case tree @ Ident(_) if !tree.symbol.isLocalToReifee => - if (tree.symbol.isVariable && tree.symbol.owner.isTerm) { - captureVariable(tree.symbol) // Note order dependency: captureVariable needs to come before reification here. - mirrorCall(nme.Select, mirrorBuildCall(nme.Ident, reify(tree.symbol)), reify(nme.elem)) - } else { - mirrorBuildCall(nme.Ident, reify(tree.symbol)) - } - case tree @ Ident(_) if tree.symbol.isLocalToReifee => - mirrorCall(nme.Ident, reify(tree.name)) - case Select(qual, name) => - if (tree.symbol != NoSymbol && tree.symbol.name != name) - reifyProduct(Select(qual, tree.symbol.name)) - else - reifyProduct(tree) - case _ => - throw new Error("internal error: %s (%s, %s) is not supported".format(tree, tree.productPrefix, tree.getClass)) + private def reifyBoundTerm(tree: Tree): Tree = { + val sym = tree.symbol + + tree match { + case This(qual) => + assert(sym != NoSymbol, "unexpected: bound term that doesn't have a symbol: " + showRaw(tree)) + if (sym.isLocalToReifee) + mirrorCall(nme.This, reify(qual)) + else if (sym.isClass && !sym.isModuleClass) { + if (reifyDebug) println("This for %s, reified as freeVar".format(sym)) + if (reifyDebug) println("Free: " + sym) + mirrorBuildCall(nme.Ident, reifyFreeTerm(This(sym))) + } + else { + if (reifyDebug) println("This for %s, reified as This".format(sym)) + mirrorBuildCall(nme.This, reify(sym)) + } + + case Ident(name) => + if (sym == NoSymbol) { + // this sometimes happens, e.g. for binds that don't have a body + // or for untyped code generated during previous phases + // (see a comment in Reifiers about the latter, starting with "why do we resetAllAttrs?") + mirrorCall(nme.Ident, reify(name)) + } + else if (!sym.isLocalToReifee) { + if (sym.isVariable && sym.owner.isTerm) { + captureVariable(sym) // Note order dependency: captureVariable needs to come before reification here. + mirrorCall(nme.Select, mirrorBuildCall(nme.Ident, reify(sym)), reify(nme.elem)) + } + else mirrorBuildCall(nme.Ident, reify(sym)) + } + else mirrorCall(nme.Ident, reify(name)) + + case Select(qual, name) => + if (sym == NoSymbol || sym.name == name) + reifyProduct(tree) + else + reifyProduct(Select(qual, sym.name)) + + case _ => + throw new Error("internal error: %s (%s, %s) is not supported".format(tree, tree.productPrefix, tree.getClass)) + } } private def reifyBoundType(tree: Tree): Tree = { + val sym = tree.symbol + val tpe = tree.tpe + def reifyBoundType(tree: Tree): Tree = { - if (tree.tpe == null) - throw new Error("unexpected: bound type that doesn't have a tpe: " + showRaw(tree)) + assert(tpe != null, "unexpected: bound type that doesn't have a tpe: " + showRaw(tree)) // if a symbol or a type of the scrutinee are local to reifee // (e.g. point to a locally declared class or to a path-dependent thingie that depends on a local variable) // then we can reify the scrutinee as a symless AST and that will definitely be hygienic // why? because then typechecking of a scrutinee doesn't depend on the environment external to the quasiquote // otherwise we need to reify the corresponding type - if (tree.symbol.isLocalToReifee || tree.tpe.isLocalToReifee) + if (sym.isLocalToReifee || tpe.isLocalToReifee) reifyProduct(tree) else { - val sym = tree.symbol - val tpe = tree.tpe if (reifyDebug) println("reifying bound type %s (underlying type is %s)".format(sym, tpe)) if (tpe.isSpliceable) { val spliced = spliceType(tpe) + if (spliced == EmptyTree) { if (reifyDebug) println("splicing failed: reify as is") mirrorBuildCall(nme.TypeTree, reify(tpe)) - } else { - spliced match { - case TypeRefToFreeType(freeType) => - if (reifyDebug) println("splicing returned a free type: " + freeType) - Ident(freeType) - case _ => - if (reifyDebug) println("splicing succeeded: " + spliced) - mirrorBuildCall(nme.TypeTree, spliced) - } } - } else { - if (sym.isLocatable) { - if (reifyDebug) println("tpe is locatable: reify as Ident(%s)".format(sym)) - mirrorBuildCall(nme.Ident, reify(sym)) - } else { - if (reifyDebug) println("tpe is not locatable: reify as TypeTree(%s)".format(tpe)) - mirrorBuildCall(nme.TypeTree, reify(tpe)) + else spliced match { + case TypeRefToFreeType(freeType) => + if (reifyDebug) println("splicing returned a free type: " + freeType) + Ident(freeType) + case _ => + if (reifyDebug) println("splicing succeeded: " + spliced) + mirrorBuildCall(nme.TypeTree, spliced) } } + else if (sym.isLocatable) { + if (reifyDebug) println("tpe is locatable: reify as Ident(%s)".format(sym)) + mirrorBuildCall(nme.Ident, reify(sym)) + } + else { + if (reifyDebug) println("tpe is not locatable: reify as TypeTree(%s)".format(tpe)) + mirrorBuildCall(nme.TypeTree, reify(tpe)) + } } } tree match { - case Select(qual, name) if (name != tree.symbol.name) => - reifyBoundType(Select(qual, tree.symbol.name)) - case Select(_, _) => - reifyBoundType(tree) - case SelectFromTypeTree(_, _) => - reifyBoundType(tree) - case Ident(_) => + case Select(qual, name) if name != sym.name => + reifyBoundType(Select(qual, sym.name)) + + case Select(_, _) | SelectFromTypeTree(_, _) | Ident(_) => reifyBoundType(tree) + case _ => throw new Error("internal error: %s (%s, %s) is not supported".format(tree, tree.productPrefix, tree.getClass)) } -- cgit v1.2.3 From c35751be33de590d8fbe669cbcb6f3de0d2c711f Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Thu, 6 Dec 2012 12:24:42 -0800 Subject: Misc touchup after purging msil/fjbg/genjvm. --- META-INF/MANIFEST.MF | 3 +++ project/Testing.scala | 8 +++---- .../tools/nsc/settings/StandardScalaSettings.scala | 2 +- src/eclipse/partest/.classpath | 1 - src/eclipse/scala-compiler/.classpath | 1 - src/eclipse/scalap/.classpath | 1 - test/pending/jvm/cf-attributes.scala | 26 +++++++++++----------- 7 files changed, 21 insertions(+), 21 deletions(-) (limited to 'src') diff --git a/META-INF/MANIFEST.MF b/META-INF/MANIFEST.MF index 53043cd99f..28a70d2879 100644 --- a/META-INF/MANIFEST.MF +++ b/META-INF/MANIFEST.MF @@ -7,6 +7,7 @@ Eclipse-LazyStart: true Bundle-ClassPath: ., bin, + lib/fjbg.jar, lib/jline.jar, lib/msil.jar Export-Package: @@ -49,6 +50,8 @@ Export-Package: ch.epfl.lamp.compiler.msil, ch.epfl.lamp.compiler.msil.emit, ch.epfl.lamp.compiler.msil.util, + ch.epfl.lamp.fjbg, + ch.epfl.lamp.util Require-Bundle: org.apache.ant, org.scala-ide.scala.library diff --git a/project/Testing.scala b/project/Testing.scala index de63a66164..5b4135a31a 100644 --- a/project/Testing.scala +++ b/project/Testing.scala @@ -17,7 +17,7 @@ trait Testing { self: ScalaBuild.type => autoScalaLibrary := false ) lazy val continuationsTestsuiteSettings: Seq[Setting[_]] = testsuiteSettings ++ Seq( - scalacOptions in Test <++= (exportedProducts in Compile in continuationsPlugin) map { + scalacOptions in Test <++= (exportedProducts in Compile in continuationsPlugin) map { case Seq(cpDir) => Seq("-Xplugin-require:continuations", "-P:continuations:enable", "-Xplugin:"+cpDir.data.getAbsolutePath) }, partestDirs <<= baseDirectory apply { bd => @@ -27,13 +27,13 @@ trait Testing { self: ScalaBuild.type => } ) val testsuite = ( - Project("testsuite", file(".")) + Project("testsuite", file(".")) settings (testsuiteSettings:_*) - dependsOn (scalaLibrary, scalaCompiler, fjbg, partest, scalacheck) + dependsOn (scalaLibrary, scalaCompiler, partest, scalacheck) ) val continuationsTestsuite = ( Project("continuations-testsuite", file(".")) - settings (continuationsTestsuiteSettings:_*) + settings (continuationsTestsuiteSettings:_*) dependsOn (partest, scalaLibrary, scalaCompiler) ) diff --git a/src/compiler/scala/tools/nsc/settings/StandardScalaSettings.scala b/src/compiler/scala/tools/nsc/settings/StandardScalaSettings.scala index ed27c1f1c8..9338d9e5b5 100644 --- a/src/compiler/scala/tools/nsc/settings/StandardScalaSettings.scala +++ b/src/compiler/scala/tools/nsc/settings/StandardScalaSettings.scala @@ -41,7 +41,7 @@ trait StandardScalaSettings { val optimise: BooleanSetting // depends on post hook which mutates other settings val print = BooleanSetting ("-print", "Print program with Scala-specific features removed.") val target = ChoiceSetting ("-target", "target", "Target platform for object files. All JVM 1.5 targets are deprecated.", - List("jvm-1.5", "jvm-1.6", "jvm-1.7")) + List("jvm-1.5", "jvm-1.6", "jvm-1.7"), "jvm-1.6") val unchecked = BooleanSetting ("-unchecked", "Enable additional warnings where generated code depends on assumptions.") val uniqid = BooleanSetting ("-uniqid", "Uniquely tag all identifiers in debugging output.") val usejavacp = BooleanSetting ("-usejavacp", "Utilize the java.class.path in classpath resolution.") diff --git a/src/eclipse/partest/.classpath b/src/eclipse/partest/.classpath index b14e465aa6..39a2c67f7c 100644 --- a/src/eclipse/partest/.classpath +++ b/src/eclipse/partest/.classpath @@ -8,7 +8,6 @@ - diff --git a/src/eclipse/scala-compiler/.classpath b/src/eclipse/scala-compiler/.classpath index e6af46c68f..3d851de53a 100644 --- a/src/eclipse/scala-compiler/.classpath +++ b/src/eclipse/scala-compiler/.classpath @@ -7,6 +7,5 @@ - diff --git a/src/eclipse/scalap/.classpath b/src/eclipse/scalap/.classpath index 2b44ad19b2..3863097a65 100644 --- a/src/eclipse/scalap/.classpath +++ b/src/eclipse/scalap/.classpath @@ -7,6 +7,5 @@ - diff --git a/test/pending/jvm/cf-attributes.scala b/test/pending/jvm/cf-attributes.scala index 9e0e9d95de..f4964b63b1 100644 --- a/test/pending/jvm/cf-attributes.scala +++ b/test/pending/jvm/cf-attributes.scala @@ -52,14 +52,14 @@ object anonymousFunctions { } object anonymousClasses { - //InnerClass: + //InnerClass: // public abstract #_= #_ of #_; //Foo=class anonymousClasses$Foo of class anonymousClasses$ // public abstract #_= #_ of #_; //Foo$class=class anonymousClasses$Foo$class of class anonymousClasses$ trait Foo { def foo() { println("foo"); } override def toString = getClass.getName } - //InnerClass: + //InnerClass: // public final #_; //class anonymousClasses$$anon$1 of class anonymousClasses$ val x = new Foo() { override def foo() { println("foo (overriden)"); } @@ -88,16 +88,16 @@ trait Test1 { trait Test2 { @throws(classOf[Exception]) - def printInnerClasses(cls: Class[_]) { - import java.io._, ch.epfl.lamp.fjbg._ - val fjbgContext = new FJBGContext(49, 0) - val outDir = System.getProperty("partest.output", "cf-attributes.obj") - val fileName = outDir+File.separator+cls.getName+".class" - val in = new DataInputStream(new FileInputStream(fileName)) - val jclass = fjbgContext.JClass(in) - println(jclass.getInnerClasses) - in.close() - } + // def printInnerClasses(cls: Class[_]) { + // import java.io._, ch.epfl.lamp.fjbg._ + // val fjbgContext = new FJBGContext(49, 0) + // val outDir = System.getProperty("partest.output", "cf-attributes.obj") + // val fileName = outDir+File.separator+cls.getName+".class" + // val in = new DataInputStream(new FileInputStream(fileName)) + // val jclass = fjbgContext.JClass(in) + // println(jclass.getInnerClasses) + // in.close() + // } def printClass(name: String) { try { printClass(Class.forName(name)) } catch { case e: Exception => println(e) } @@ -105,7 +105,7 @@ trait Test2 { def printClass(cls: Class[_]) { println("\n[[ "+cls.getName+" ]]"); try { printInnerClasses(cls) } - catch { case e: Exception => println(e) } + catch { case e: Exception => println(e) } } } -- cgit v1.2.3 From 1fa4ad083d7dc55ce498389d935b38c5b974b793 Mon Sep 17 00:00:00 2001 From: Som Snytt Date: Tue, 4 Dec 2012 01:39:46 -0800 Subject: Restore unmangling but add -raw; massage options to support tool args like -raw and combined -pv, and not pass through arbitrary options. If class arg fails, try it as a live term and use its enclosing class, to support "def m = 7; javap m". That might be more fun with filtering (to see only m). This saves having to know the line and iw's. There are no doubt more fun features for another rainy day. And it will rain again someday. --- .../scala/tools/nsc/interpreter/ILoop.scala | 12 +- .../scala/tools/nsc/interpreter/IMain.scala | 8 ++ .../scala/tools/nsc/interpreter/ISettings.scala | 11 +- .../scala/tools/nsc/interpreter/Naming.scala | 12 +- src/compiler/scala/tools/util/Javap.scala | 140 ++++++++++++++++----- 5 files changed, 148 insertions(+), 35 deletions(-) (limited to 'src') diff --git a/src/compiler/scala/tools/nsc/interpreter/ILoop.scala b/src/compiler/scala/tools/nsc/interpreter/ILoop.scala index 3af3c44cb6..351b1b8a68 100644 --- a/src/compiler/scala/tools/nsc/interpreter/ILoop.scala +++ b/src/compiler/scala/tools/nsc/interpreter/ILoop.scala @@ -283,8 +283,16 @@ class ILoop(in0: Option[BufferedReader], protected val out: JPrintWriter) val intp = ILoop.this.intp import intp._ - new JavapClass(addToolsJarToLoader(), new IMain.ReplStrippingWriter(intp)) { - override def tryClass(path: String) = super.tryClass(translatePath(path) getOrElse path) + new JavapClass(addToolsJarToLoader(), new IMain.ReplStrippingWriter(intp), Some(isettings)) { + override def tryClass(path: String) = { + val claas = super.tryClass(translatePath(path) getOrElse path) + if (!claas.isEmpty) claas + // take path as a Name in scope and find its enclosing class + else translateEnclosingClass(path) match { + case Some(encl) => super.tryClass(encl) + case _ => claas + } + } } } private lazy val javap = substituteAndLog[Javap]("javap", NoJavap)(newJavap()) diff --git a/src/compiler/scala/tools/nsc/interpreter/IMain.scala b/src/compiler/scala/tools/nsc/interpreter/IMain.scala index 3f49e782b0..91e909b1f1 100644 --- a/src/compiler/scala/tools/nsc/interpreter/IMain.scala +++ b/src/compiler/scala/tools/nsc/interpreter/IMain.scala @@ -312,6 +312,14 @@ class IMain(initialSettings: Settings, protected val out: JPrintWriter) extends case _ => Some(flatPath(sym)) } } + def translateEnclosingClass(n: String) = { + def enclosingClass(s: Symbol): Symbol = + if (s == NoSymbol || s.isClass) s else enclosingClass(s.owner) + enclosingClass(symbolOfTerm(n)) match { + case NoSymbol => None + case c => Some(flatPath(c)) + } + } private class TranslatingClassLoader(parent: ClassLoader) extends AbstractFileClassLoader(replOutput.dir, parent) { /** Overridden here to try translating a simple name to the generated diff --git a/src/compiler/scala/tools/nsc/interpreter/ISettings.scala b/src/compiler/scala/tools/nsc/interpreter/ISettings.scala index d114ca2359..a2fbbc7fb1 100644 --- a/src/compiler/scala/tools/nsc/interpreter/ISettings.scala +++ b/src/compiler/scala/tools/nsc/interpreter/ISettings.scala @@ -25,7 +25,7 @@ class ISettings(intp: IMain) { var maxAutoprintCompletion = 250 /** String unwrapping can be disabled if it is causing issues. - * Settings this to false means you will see Strings like "$iw.$iw.". + * Setting this to false means you will see Strings like "$iw.$iw.". */ var unwrapStrings = true @@ -51,4 +51,13 @@ class ISettings(intp: IMain) { | ISettings { | %s | }""".stripMargin.format(allSettingsString) + + /** Disable and finally restore String unwrapping for an operation. + */ + def withoutUnwrapping[A](op: => A): A = { + val saved = this.unwrapStrings + this.unwrapStrings = false + try op + finally this.unwrapStrings = saved + } } diff --git a/src/compiler/scala/tools/nsc/interpreter/Naming.scala b/src/compiler/scala/tools/nsc/interpreter/Naming.scala index 41ddf23de4..57f3675ada 100644 --- a/src/compiler/scala/tools/nsc/interpreter/Naming.scala +++ b/src/compiler/scala/tools/nsc/interpreter/Naming.scala @@ -6,6 +6,8 @@ package scala.tools.nsc package interpreter +import scala.util.Properties.lineSeparator + /** This is for name logic which is independent of the compiler (notice there's no Global.) * That includes at least generating, metaquoting, mangling, and unmangling. */ @@ -18,8 +20,14 @@ trait Naming { // for ansi codes. val binaryChars = cleaned count (ch => ch < 32 && !ch.isWhitespace && ch != ESC) // Lots of binary chars - translate all supposed whitespace into spaces - if (binaryChars > 5) - cleaned map (ch => if (ch.isWhitespace) ' ' else if (ch < 32) '?' else ch) + // except supposed line endings, otherwise scrubbed lines run together + if (binaryChars > 5) // more than one can count while holding a hamburger + cleaned map { + case c if lineSeparator contains c => c + case c if c.isWhitespace => ' ' + case c if c < 32 => '?' + case c => c + } // Not lots - preserve whitespace and ESC else cleaned map (ch => if (ch.isWhitespace || ch == ESC) ch else if (ch < 32) '?' else ch) diff --git a/src/compiler/scala/tools/util/Javap.scala b/src/compiler/scala/tools/util/Javap.scala index 89c5969087..69e9d53b38 100644 --- a/src/compiler/scala/tools/util/Javap.scala +++ b/src/compiler/scala/tools/util/Javap.scala @@ -8,6 +8,7 @@ package util import java.lang.{ ClassLoader => JavaClassLoader, Iterable => JIterable } import scala.tools.nsc.util.ScalaClassLoader +import scala.tools.nsc.interpreter.ISettings import java.io.{ ByteArrayInputStream, CharArrayWriter, FileNotFoundException, InputStream, PrintWriter, Writer } import java.util.{ Locale } @@ -15,12 +16,15 @@ import javax.tools.{ Diagnostic, DiagnosticCollector, DiagnosticListener, ForwardingJavaFileManager, JavaFileManager, JavaFileObject, SimpleJavaFileObject, StandardLocation } import scala.tools.nsc.io.File -import scala.util.{ Properties, Try, Success, Failure } +import scala.io.Source +import scala.util.{ Try, Success, Failure } +import scala.util.Properties.lineSeparator import scala.collection.JavaConverters import scala.collection.generic.Clearable import scala.language.reflectiveCalls import Javap._ +import JavapTool.ToolArgs trait Javap { def loader: ScalaClassLoader @@ -40,23 +44,27 @@ object NoJavap extends Javap { class JavapClass( val loader: ScalaClassLoader = ScalaClassLoader.appLoader, - val printWriter: PrintWriter = new PrintWriter(System.out, true) + val printWriter: PrintWriter = new PrintWriter(System.out, true), + isettings: Option[ISettings] = None ) extends Javap { - lazy val tool = JavapTool(loader, printWriter) + lazy val tool = JavapTool(loader, printWriter, isettings) /** Run the tool. Option args start with "-". * The default options are "-protected -verbose". * Byte data for filename args is retrieved with findBytes. - * If the filename does not end with ".class", javap will - * insert a banner of the form: - * `Binary file dummy contains simple.Complex`. */ def apply(args: Seq[String]): List[JpResult] = { - val (optional, claases) = args partition (_ startsWith "-") - val options = if (optional.nonEmpty) optional else JavapTool.DefaultOptions - if (claases.nonEmpty) tool(options)(claases map (claas => claas -> bytesFor(claas))) - else List(JpResult(":javap [-lcsvp] [path1 path2 ...]")) + val (options, claases) = args partition (_ startsWith "-") + val (flags, upgraded) = upgrade(options) + if (flags.help || claases.isEmpty) List(JpResult(JavapTool.helper(printWriter))) + else tool(flags.raw, upgraded)(claases map (claas => claas -> bytesFor(claas))) + } + + /** Cull our tool options. */ + private def upgrade(options: Seq[String]): (ToolArgs, Seq[String]) = ToolArgs fromArgs options match { + case (t,s) if s.nonEmpty => (t,s) + case (t,s) => (t, JavapTool.DefaultOptions) } private def bytesFor(path: String) = Try { @@ -88,7 +96,7 @@ class JavapClass( abstract class JavapTool { type ByteAry = Array[Byte] type Input = Pair[String, Try[ByteAry]] - def apply(options: Seq[String])(inputs: Seq[Input]): List[JpResult] + def apply(raw: Boolean, options: Seq[String])(inputs: Seq[Input]): List[JpResult] // Since the tool is loaded by reflection, check for catastrophic failure. protected def failed: Boolean implicit protected class Failer[A](a: =>A) { @@ -124,14 +132,14 @@ class JavapTool6(loader: ScalaClassLoader, printWriter: PrintWriter) extends Jav result orFailed null } - override def apply(options: Seq[String])(inputs: Seq[Input]): List[JpResult] = + override def apply(raw: Boolean, options: Seq[String])(inputs: Seq[Input]): List[JpResult] = (inputs map { case (_, Success(ba)) => JpResult(showable(newPrinter(new ByteArrayInputStream(ba), newEnv(options)))) case (_, Failure(e)) => JpResult(e.toString) }).toList orFailed List(noToolError) } -class JavapTool7(loader: ScalaClassLoader, printWriter: PrintWriter) extends JavapTool { +class JavapTool7(loader: ScalaClassLoader, printWriter: PrintWriter, isettings: Option[ISettings]) extends JavapTool { import JavapTool._ type Task = { @@ -167,11 +175,11 @@ class JavapTool7(loader: ScalaClassLoader, printWriter: PrintWriter) extends Jav */ def messages(implicit locale: Locale = null) = (diagnostics map (_ getMessage locale)).toList - def reportable: String = { - import Properties.lineSeparator - //val container = "Binary file .* contains .*".r - //val m = messages filter (_ match { case container() => false case _ => true }) - val m = messages + def reportable(raw: Boolean): String = { + // don't filter this message if raw, since the names are likely to differ + val container = "Binary file .* contains .*".r + val m = if (raw) messages + else messages filter (_ match { case container() => false case _ => true }) clear() if (m.nonEmpty) m mkString ("", lineSeparator, lineSeparator) else "" @@ -225,18 +233,23 @@ class JavapTool7(loader: ScalaClassLoader, printWriter: PrintWriter) extends Jav } val writer = new CharArrayWriter def fileManager(inputs: Seq[Input]) = new JavapFileManager(inputs)() - def showable(): Showable = { + def showable(raw: Boolean): Showable = { val written = { writer.flush() val w = writer.toString writer.reset() w } - val msgs = reporter.reportable + val msgs = reporter.reportable(raw) new Showable { - def show() = { - val mw = msgs + written - printWriter.write(mw, 0, mw.length) // ReplStrippingWriter clips on write(String) if truncating + val mw = msgs + written + // ReplStrippingWriter clips and scrubs on write(String) + // circumvent it by write(mw, 0, mw.length) or wrap it in withoutUnwrapping + def show() = + if (raw && isettings.isDefined) isettings.get withoutUnwrapping { writeLines() } + else writeLines() + private def writeLines() { + for (line <- Source.fromString(mw).getLines) printWriter write line+lineSeparator printWriter.flush() } } @@ -250,12 +263,12 @@ class JavapTool7(loader: ScalaClassLoader, printWriter: PrintWriter) extends Jav .orFailed (throw new IllegalStateException) } // a result per input - private def apply1(options: Seq[String], claas: String, inputs: Seq[Input]): Try[JpResult] = + private def applyOne(raw: Boolean, options: Seq[String], claas: String, inputs: Seq[Input]): Try[JpResult] = Try { task(options, Seq(claas), inputs).call() } map { - case true => JpResult(showable()) - case _ => JpResult(reporter.reportable) + case true => JpResult(showable(raw)) + case _ => JpResult(reporter.reportable(raw)) } recoverWith { case e: java.lang.reflect.InvocationTargetException => e.getCause match { case t: IllegalArgumentException => Success(JpResult(t.getMessage)) // bad option @@ -264,8 +277,8 @@ class JavapTool7(loader: ScalaClassLoader, printWriter: PrintWriter) extends Jav } lastly { reporter.clear } - override def apply(options: Seq[String])(inputs: Seq[Input]): List[JpResult] = (inputs map { - case (claas, Success(_)) => apply1(options, claas, inputs).get + override def apply(raw: Boolean, options: Seq[String])(inputs: Seq[Input]): List[JpResult] = (inputs map { + case (claas, Success(_)) => applyOne(raw, options, claas, inputs).get case (_, Failure(e)) => JpResult(e.toString) }).toList orFailed List(noToolError) } @@ -320,6 +333,73 @@ object JavapTool { } } + case class ToolArgs(raw: Boolean = false, help: Boolean = false) + + object ToolArgs { + def fromArgs(args: Seq[String]): (ToolArgs, Seq[String]) = ((ToolArgs(), Seq[String]()) /: (args flatMap massage)) { + case ((t,others), s) => s match { + case "-help" => (t copy (help=true), others) + case "-raw" => (t copy (raw=true), others) + case _ => (t, others :+ s) + } + } + } + + val helps = List( + "usage" -> ":javap [opts] [path or class or -]...", + "-help" -> "Prints this help message", + "-raw" -> "Don't unmangle REPL names", + "-verbose/-v" -> "Stack size, number of locals, method args", + "-private/-p" -> "Private classes and members", + "-package" -> "Package-private classes and members", + "-protected" -> "Protected classes and members", + "-public" -> "Public classes and members", + "-l" -> "Line and local variable tables", + "-c" -> "Disassembled code", + "-s" -> "Internal type signatures", + "-sysinfo" -> "System info of class", + "-constants" -> "Static final constants" + ) + + // match prefixes and unpack opts, or -help on failure + def massage(arg: String): Seq[String] = { + require(arg startsWith "-") + // arg matches opt "-foo/-f" if prefix of -foo or exactly -f + val r = """(-[^/]*)(/(-.))?""".r + def maybe(opt: String, s: String): Option[String] = opt match { + //case r(lf,_,sf) if (lf startsWith s) || (s == sf) => Some(lf) + // disambiguate by preferring short form + case r(lf,_,sf) if s == sf => Some(sf) + case r(lf,_,sf) if lf startsWith s => Some(lf) + case _ => None + } + def candidates(s: String) = (helps map (h => maybe(h._1, s))).flatten + // one candidate or one single-char candidate + def uniqueOf(maybes: Seq[String]) = { + def single(s: String) = s.length == 2 + if (maybes.length == 1) maybes + else if ((maybes count single) == 1) maybes filter single + else Nil + } + // each optchar must decode to exactly one option + def unpacked(s: String): Try[Seq[String]] = { + val ones = (s drop 1) map { c => + val maybes = uniqueOf(candidates(s"-$c")) + if (maybes.length == 1) Some(maybes.head) else None + } + Try(ones) filter (_ forall (_.isDefined)) map (_.flatten) + //if (ones exists (_.isEmpty)) Nil else ones.flatten + } + val res = uniqueOf(candidates(arg)) + if (res.nonEmpty) res + else (unpacked(arg) + getOrElse (Seq("-help"))) // or else someone needs help + } + + def helper(pw: PrintWriter) = new Showable { + def show() = helps foreach (p => pw write "%-12.12s%s%n".format(p._1,p._2)) + } + val DefaultOptions = List("-protected", "-verbose") def isAvailable(cl: ScalaClassLoader = ScalaClassLoader.appLoader) = Seq(Env, Tool) exists (cn => hasClass(cl, cn)) @@ -328,8 +408,8 @@ object JavapTool { private def isTaskable(cl: ScalaClassLoader) = hasClass(cl, Tool) - def apply(cl: ScalaClassLoader, pw: PrintWriter) = - if (isTaskable(cl)) new JavapTool7(cl, pw) else new JavapTool6(cl, pw) + def apply(cl: ScalaClassLoader, pw: PrintWriter, is: Option[ISettings]) = + if (isTaskable(cl)) new JavapTool7(cl, pw, is) else new JavapTool6(cl, pw) implicit class Lastly[A](val t: Try[A]) extends AnyVal { private def effect[X](last: =>Unit)(a: X): Try[A] = { last; t } -- cgit v1.2.3 From 593a7605d4a157ff59700741542a3581025f67e7 Mon Sep 17 00:00:00 2001 From: James Iry Date: Mon, 3 Dec 2012 09:23:59 -0800 Subject: Small cleanup work done during my initial visits to the code. These are just a series of small cleanups I did while reading the code base during my first few days. None are actual functionality bugs and none warrant a full blown bug. Now that I'm moving on to doing real work small stuff like this will likely be swept up in other bugs, so this commit just captures that initial pass of stuff. --- src/compiler/scala/tools/nsc/CompileServer.scala | 5 +---- .../scala/tools/nsc/OfflineCompilerCommand.scala | 4 ++-- src/compiler/scala/tools/nsc/transform/Mixin.scala | 16 ++++++++-------- src/compiler/scala/tools/nsc/util/SimpleTracer.scala | 2 +- 4 files changed, 12 insertions(+), 15 deletions(-) (limited to 'src') diff --git a/src/compiler/scala/tools/nsc/CompileServer.scala b/src/compiler/scala/tools/nsc/CompileServer.scala index f79990d526..e4c250a3d9 100644 --- a/src/compiler/scala/tools/nsc/CompileServer.scala +++ b/src/compiler/scala/tools/nsc/CompileServer.scala @@ -56,9 +56,6 @@ class StandardCompileServer extends SocketServer { (totalMemory - freeMemory).toDouble / maxMemory.toDouble > MaxCharge } - protected def newOfflineCompilerCommand(arguments: List[String], settings: FscSettings): OfflineCompilerCommand = - new OfflineCompilerCommand(arguments, settings) - /** Problematically, Settings are only considered equal if every setting * is exactly equal. In fsc this immediately breaks down because the randomly * chosen temporary outdirs differ between client and server. Among other @@ -91,7 +88,7 @@ class StandardCompileServer extends SocketServer { val args = input.split("\0", -1).toList val newSettings = new FscSettings(fscError) this.verbose = newSettings.verbose.value - val command = newOfflineCompilerCommand(args, newSettings) + val command = new OfflineCompilerCommand(args, newSettings) info("Settings after normalizing paths: " + newSettings) printMemoryStats() diff --git a/src/compiler/scala/tools/nsc/OfflineCompilerCommand.scala b/src/compiler/scala/tools/nsc/OfflineCompilerCommand.scala index 8a3c531ff0..2f4975e681 100644 --- a/src/compiler/scala/tools/nsc/OfflineCompilerCommand.scala +++ b/src/compiler/scala/tools/nsc/OfflineCompilerCommand.scala @@ -26,8 +26,8 @@ class OfflineCompilerCommand(arguments: List[String], settings: FscSettings) ext // instead of whatever it's supposed to be doing. val baseDirectory = { val pwd = System.getenv("PWD") - if (pwd != null && !isWin) Directory(pwd) - else Directory.Current getOrElse Directory("/") + if (pwd == null || isWin) Directory.Current getOrElse Directory("/") + else Directory(pwd) } currentDir.value = baseDirectory.path } diff --git a/src/compiler/scala/tools/nsc/transform/Mixin.scala b/src/compiler/scala/tools/nsc/transform/Mixin.scala index f78a5133a6..ca69d52798 100644 --- a/src/compiler/scala/tools/nsc/transform/Mixin.scala +++ b/src/compiler/scala/tools/nsc/transform/Mixin.scala @@ -381,20 +381,20 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL { if (clazz.isImplClass) { clazz setFlag lateMODULE var sourceModule = clazz.owner.info.decls.lookup(sym.name.toTermName) - if (sourceModule != NoSymbol) { - sourceModule setPos sym.pos - if (sourceModule.flags != MODULE) { - log("!!! Directly setting sourceModule flags from %s to MODULE".format(sourceModule.flagString)) - sourceModule.flags = MODULE - } - } - else { + if (sourceModule == NoSymbol) { sourceModule = ( clazz.owner.newModuleSymbol(sym.name.toTermName, sym.pos, MODULE) setModuleClass sym.asInstanceOf[ClassSymbol] ) clazz.owner.info.decls enter sourceModule } + else { + sourceModule setPos sym.pos + if (sourceModule.flags != MODULE) { + log("!!! Directly setting sourceModule flags from %s to MODULE".format(sourceModule.flagString)) + sourceModule.flags = MODULE + } + } sourceModule setInfo sym.tpe // Companion module isn't visible for anonymous class at this point anyway assert(clazz.sourceModule != NoSymbol || clazz.isAnonymousClass, diff --git a/src/compiler/scala/tools/nsc/util/SimpleTracer.scala b/src/compiler/scala/tools/nsc/util/SimpleTracer.scala index a33af1754d..6997dbd402 100644 --- a/src/compiler/scala/tools/nsc/util/SimpleTracer.scala +++ b/src/compiler/scala/tools/nsc/util/SimpleTracer.scala @@ -6,7 +6,7 @@ package util import java.io.PrintStream /** A simple tracer - * @param out: The print stream where trace info shoul be sent + * @param out: The print stream where trace info should be sent * @param enabled: A condition that must be true for trace info to be produced. */ class SimpleTracer(out: PrintStream, enabled: Boolean = true) { -- cgit v1.2.3 From 2567da8390a4e09710a19f662bfcf81b58b3ec10 Mon Sep 17 00:00:00 2001 From: Som Snytt Date: Fri, 7 Dec 2012 02:04:37 -0800 Subject: Support "javap -" for lastvar, move JavapTool into JavapClass. The notation "javap -" is actually pretty handy, in particular as a shorthand for javap resN. JavapClass knows if it has an intp, for purposes of names and for output; so the special tryClass impl is refactored to the tool; the special handling for "-" as well. It just uses IMain for withoutUnwrapping, so that method is unadded from ISettings. (The tryClass/tryFile template methods are handy but maybe passing a func to JavapClass.apply would be cleaner; similar for writer output and whatever magic it enables.) --- .../scala/tools/nsc/interpreter/ILoop.scala | 19 +- .../scala/tools/nsc/interpreter/ISettings.scala | 9 - src/compiler/scala/tools/util/Javap.scala | 619 +++++++++++---------- 3 files changed, 321 insertions(+), 326 deletions(-) (limited to 'src') diff --git a/src/compiler/scala/tools/nsc/interpreter/ILoop.scala b/src/compiler/scala/tools/nsc/interpreter/ILoop.scala index 351b1b8a68..b2af53574f 100644 --- a/src/compiler/scala/tools/nsc/interpreter/ILoop.scala +++ b/src/compiler/scala/tools/nsc/interpreter/ILoop.scala @@ -279,22 +279,9 @@ class ILoop(in0: Option[BufferedReader], protected val out: JPrintWriter) } } - protected def newJavap() = { - val intp = ILoop.this.intp - import intp._ - - new JavapClass(addToolsJarToLoader(), new IMain.ReplStrippingWriter(intp), Some(isettings)) { - override def tryClass(path: String) = { - val claas = super.tryClass(translatePath(path) getOrElse path) - if (!claas.isEmpty) claas - // take path as a Name in scope and find its enclosing class - else translateEnclosingClass(path) match { - case Some(encl) => super.tryClass(encl) - case _ => claas - } - } - } - } + protected def newJavap() = + JavapClass(addToolsJarToLoader(), new IMain.ReplStrippingWriter(intp), Some(intp)) + private lazy val javap = substituteAndLog[Javap]("javap", NoJavap)(newJavap()) // Still todo: modules. diff --git a/src/compiler/scala/tools/nsc/interpreter/ISettings.scala b/src/compiler/scala/tools/nsc/interpreter/ISettings.scala index a2fbbc7fb1..9541d08db1 100644 --- a/src/compiler/scala/tools/nsc/interpreter/ISettings.scala +++ b/src/compiler/scala/tools/nsc/interpreter/ISettings.scala @@ -51,13 +51,4 @@ class ISettings(intp: IMain) { | ISettings { | %s | }""".stripMargin.format(allSettingsString) - - /** Disable and finally restore String unwrapping for an operation. - */ - def withoutUnwrapping[A](op: => A): A = { - val saved = this.unwrapStrings - this.unwrapStrings = false - try op - finally this.unwrapStrings = saved - } } diff --git a/src/compiler/scala/tools/util/Javap.scala b/src/compiler/scala/tools/util/Javap.scala index 69e9d53b38..269cfa5dd4 100644 --- a/src/compiler/scala/tools/util/Javap.scala +++ b/src/compiler/scala/tools/util/Javap.scala @@ -8,7 +8,7 @@ package util import java.lang.{ ClassLoader => JavaClassLoader, Iterable => JIterable } import scala.tools.nsc.util.ScalaClassLoader -import scala.tools.nsc.interpreter.ISettings +import scala.tools.nsc.interpreter.IMain import java.io.{ ByteArrayInputStream, CharArrayWriter, FileNotFoundException, InputStream, PrintWriter, Writer } import java.util.{ Locale } @@ -24,7 +24,6 @@ import scala.collection.generic.Clearable import scala.language.reflectiveCalls import Javap._ -import JavapTool.ToolArgs trait Javap { def loader: ScalaClassLoader @@ -43,19 +42,20 @@ object NoJavap extends Javap { } class JavapClass( - val loader: ScalaClassLoader = ScalaClassLoader.appLoader, - val printWriter: PrintWriter = new PrintWriter(System.out, true), - isettings: Option[ISettings] = None + val loader: ScalaClassLoader, + val printWriter: PrintWriter, + intp: Option[IMain] = None ) extends Javap { + import JavapTool.ToolArgs - lazy val tool = JavapTool(loader, printWriter, isettings) + lazy val tool = JavapTool() /** Run the tool. Option args start with "-". * The default options are "-protected -verbose". * Byte data for filename args is retrieved with findBytes. */ def apply(args: Seq[String]): List[JpResult] = { - val (options, claases) = args partition (_ startsWith "-") + val (options, claases) = args partition (s => (s startsWith "-") && s.length > 1) val (flags, upgraded) = upgrade(options) if (flags.help || claases.isEmpty) List(JpResult(JavapTool.helper(printWriter))) else tool(flags.raw, upgraded)(claases map (claas => claas -> bytesFor(claas))) @@ -68,7 +68,8 @@ class JavapClass( } private def bytesFor(path: String) = Try { - val bytes = findBytes(path) + def last = intp.get.mostRecentVar // fail if no intp + val bytes = findBytes(if (path == "-") last else path) if (bytes.isEmpty) throw new FileNotFoundException(s"Could not find class bytes for '${path}'") else bytes } @@ -87,342 +88,354 @@ class JavapClass( /** Assume the string is a fully qualified class name and try to * find the class object it represents. */ - def tryClass(path: String): Array[Byte] = loader classBytes { - if (path endsWith ".class") (path dropRight 6).replace('/', '.') - else path + def tryClass(path: String): Array[Byte] = { + def pathology(p: String) = + if (p endsWith ".class") (p dropRight 6).replace('/', '.') + else p + def load(name: String) = loader classBytes pathology(name) + // if repl, translate the name to something replish + if (intp.isDefined) { + val claas = load(intp.get.translatePath(path) getOrElse path) + if (!claas.isEmpty) claas + // take path as a Name in scope and find its enclosing class + else intp.get.translateEnclosingClass(path) match { + case Some(encl) => load(encl) + case _ => claas // empty + } + } else load(path) } -} -abstract class JavapTool { - type ByteAry = Array[Byte] - type Input = Pair[String, Try[ByteAry]] - def apply(raw: Boolean, options: Seq[String])(inputs: Seq[Input]): List[JpResult] - // Since the tool is loaded by reflection, check for catastrophic failure. - protected def failed: Boolean - implicit protected class Failer[A](a: =>A) { - def orFailed[B >: A](b: =>B) = if (failed) b else a + abstract class JavapTool { + type ByteAry = Array[Byte] + type Input = Pair[String, Try[ByteAry]] + def apply(raw: Boolean, options: Seq[String])(inputs: Seq[Input]): List[JpResult] + // Since the tool is loaded by reflection, check for catastrophic failure. + protected def failed: Boolean + implicit protected class Failer[A](a: =>A) { + def orFailed[B >: A](b: =>B) = if (failed) b else a + } + protected def noToolError = new JpError(s"No javap tool available: ${getClass.getName} failed to initialize.") } - protected def noToolError = new JpError(s"No javap tool available: ${getClass.getName} failed to initialize.") -} -class JavapTool6(loader: ScalaClassLoader, printWriter: PrintWriter) extends JavapTool { - import JavapTool._ - val EnvClass = loader.tryToInitializeClass[FakeEnvironment](Env).orNull - val PrinterClass = loader.tryToInitializeClass[FakePrinter](Printer).orNull - override protected def failed = (EnvClass eq null) || (PrinterClass eq null) - - val PrinterCtr = PrinterClass.getConstructor(classOf[InputStream], classOf[PrintWriter], EnvClass) orFailed null - def newPrinter(in: InputStream, env: FakeEnvironment): FakePrinter = - PrinterCtr.newInstance(in, printWriter, env) orFailed null - def showable(fp: FakePrinter) = new Showable { - def show() = fp.asInstanceOf[{ def print(): Unit }].print() - } + class JavapTool6 extends JavapTool { + import JavapTool._ + val EnvClass = loader.tryToInitializeClass[FakeEnvironment](Env).orNull + val PrinterClass = loader.tryToInitializeClass[FakePrinter](Printer).orNull + override protected def failed = (EnvClass eq null) || (PrinterClass eq null) + + val PrinterCtr = PrinterClass.getConstructor(classOf[InputStream], classOf[PrintWriter], EnvClass) orFailed null + def newPrinter(in: InputStream, env: FakeEnvironment): FakePrinter = + PrinterCtr.newInstance(in, printWriter, env) orFailed null + def showable(fp: FakePrinter) = new Showable { + def show() = fp.asInstanceOf[{ def print(): Unit }].print() + } - lazy val parser = new JpOptions - def newEnv(opts: Seq[String]): FakeEnvironment = { - def result = { - val env: FakeEnvironment = EnvClass.newInstance() - parser(opts) foreach { case (name, value) => - val field = EnvClass getDeclaredField name - field setAccessible true - field.set(env, value.asInstanceOf[AnyRef]) + lazy val parser = new JpOptions + def newEnv(opts: Seq[String]): FakeEnvironment = { + def result = { + val env: FakeEnvironment = EnvClass.newInstance() + parser(opts) foreach { case (name, value) => + val field = EnvClass getDeclaredField name + field setAccessible true + field.set(env, value.asInstanceOf[AnyRef]) + } + env } - env + result orFailed null } - result orFailed null - } - override def apply(raw: Boolean, options: Seq[String])(inputs: Seq[Input]): List[JpResult] = - (inputs map { - case (_, Success(ba)) => JpResult(showable(newPrinter(new ByteArrayInputStream(ba), newEnv(options)))) - case (_, Failure(e)) => JpResult(e.toString) - }).toList orFailed List(noToolError) -} + override def apply(raw: Boolean, options: Seq[String])(inputs: Seq[Input]): List[JpResult] = + (inputs map { + case (_, Success(ba)) => JpResult(showable(newPrinter(new ByteArrayInputStream(ba), newEnv(options)))) + case (_, Failure(e)) => JpResult(e.toString) + }).toList orFailed List(noToolError) + } -class JavapTool7(loader: ScalaClassLoader, printWriter: PrintWriter, isettings: Option[ISettings]) extends JavapTool { + class JavapTool7 extends JavapTool { - import JavapTool._ - type Task = { - def call(): Boolean // true = ok - //def run(args: Array[String]): Int // all args - //def handleOptions(args: Array[String]): Unit // options, then run() or call() - } - // result of Task.run - //object TaskResult extends Enumeration { - // val Ok, Error, CmdErr, SysErr, Abnormal = Value - //} - val TaskClaas = loader.tryToInitializeClass[Task](JavapTool.Tool).orNull - override protected def failed = TaskClaas eq null - - val TaskCtor = TaskClaas.getConstructor( - classOf[Writer], - classOf[JavaFileManager], - classOf[DiagnosticListener[_]], - classOf[JIterable[String]], - classOf[JIterable[String]] - ) orFailed null - - class JavaReporter extends DiagnosticListener[JavaFileObject] with Clearable { - import scala.collection.mutable.{ ArrayBuffer, SynchronizedBuffer } - type D = Diagnostic[_ <: JavaFileObject] - val diagnostics = new ArrayBuffer[D] with SynchronizedBuffer[D] - override def report(d: Diagnostic[_ <: JavaFileObject]) { - diagnostics += d - } - override def clear() = diagnostics.clear() - /** All diagnostic messages. - * @param locale Locale for diagnostic messages, null by default. - */ - def messages(implicit locale: Locale = null) = (diagnostics map (_ getMessage locale)).toList - - def reportable(raw: Boolean): String = { - // don't filter this message if raw, since the names are likely to differ - val container = "Binary file .* contains .*".r - val m = if (raw) messages - else messages filter (_ match { case container() => false case _ => true }) - clear() - if (m.nonEmpty) m mkString ("", lineSeparator, lineSeparator) - else "" + import JavapTool._ + type Task = { + def call(): Boolean // true = ok + //def run(args: Array[String]): Int // all args + //def handleOptions(args: Array[String]): Unit // options, then run() or call() } - } - val reporter = new JavaReporter - - // DisassemblerTool.getStandardFileManager(reporter,locale,charset) - val defaultFileManager: JavaFileManager = - (loader.tryToLoadClass[JavaFileManager]("com.sun.tools.javap.JavapFileManager").get getMethod ( - "create", + // result of Task.run + //object TaskResult extends Enumeration { + // val Ok, Error, CmdErr, SysErr, Abnormal = Value + //} + val TaskClaas = loader.tryToInitializeClass[Task](JavapTool.Tool).orNull + override protected def failed = TaskClaas eq null + + val TaskCtor = TaskClaas.getConstructor( + classOf[Writer], + classOf[JavaFileManager], classOf[DiagnosticListener[_]], - classOf[PrintWriter] - ) invoke (null, reporter, new PrintWriter(System.err, true))).asInstanceOf[JavaFileManager] orFailed null - - // manages named arrays of bytes, which might have failed to load - class JavapFileManager(val managed: Seq[Input])(delegate: JavaFileManager = defaultFileManager) - extends ForwardingJavaFileManager[JavaFileManager](delegate) { - import JavaFileObject.Kind - import Kind._ - import StandardLocation._ - import JavaFileManager.Location - import java.net.URI - def uri(name: String): URI = new URI(name) // new URI("jfo:" + name) - - def inputNamed(name: String): Try[ByteAry] = (managed find (_._1 == name)).get._2 - def managedFile(name: String, kind: Kind) = kind match { - case CLASS => fileObjectForInput(name, inputNamed(name), kind) - case _ => null - } - // todo: just wrap it as scala abstractfile and adapt it uniformly - def fileObjectForInput(name: String, bytes: Try[ByteAry], kind: Kind): JavaFileObject = - new SimpleJavaFileObject(uri(name), kind) { - override def openInputStream(): InputStream = new ByteArrayInputStream(bytes.get) - // if non-null, ClassWriter wrongly requires scheme non-null - override def toUri: URI = null - override def getName: String = name - // suppress - override def getLastModified: Long = -1L + classOf[JIterable[String]], + classOf[JIterable[String]] + ) orFailed null + + class JavaReporter extends DiagnosticListener[JavaFileObject] with Clearable { + import scala.collection.mutable.{ ArrayBuffer, SynchronizedBuffer } + type D = Diagnostic[_ <: JavaFileObject] + val diagnostics = new ArrayBuffer[D] with SynchronizedBuffer[D] + override def report(d: Diagnostic[_ <: JavaFileObject]) { + diagnostics += d } - override def getJavaFileForInput(location: Location, className: String, kind: Kind): JavaFileObject = - location match { - case CLASS_PATH => managedFile(className, kind) - case _ => null + override def clear() = diagnostics.clear() + /** All diagnostic messages. + * @param locale Locale for diagnostic messages, null by default. + */ + def messages(implicit locale: Locale = null) = (diagnostics map (_ getMessage locale)).toList + + def reportable(raw: Boolean): String = { + // don't filter this message if raw, since the names are likely to differ + val container = "Binary file .* contains .*".r + val m = if (raw) messages + else messages filter (_ match { case container() => false case _ => true }) + clear() + if (m.nonEmpty) m mkString ("", lineSeparator, lineSeparator) + else "" } - override def hasLocation(location: Location): Boolean = - location match { - case CLASS_PATH => true - case _ => false - } - } - val writer = new CharArrayWriter - def fileManager(inputs: Seq[Input]) = new JavapFileManager(inputs)() - def showable(raw: Boolean): Showable = { - val written = { - writer.flush() - val w = writer.toString - writer.reset() - w } - val msgs = reporter.reportable(raw) - new Showable { - val mw = msgs + written - // ReplStrippingWriter clips and scrubs on write(String) - // circumvent it by write(mw, 0, mw.length) or wrap it in withoutUnwrapping - def show() = - if (raw && isettings.isDefined) isettings.get withoutUnwrapping { writeLines() } - else writeLines() - private def writeLines() { - for (line <- Source.fromString(mw).getLines) printWriter write line+lineSeparator - printWriter.flush() + val reporter = new JavaReporter + + // DisassemblerTool.getStandardFileManager(reporter,locale,charset) + val defaultFileManager: JavaFileManager = + (loader.tryToLoadClass[JavaFileManager]("com.sun.tools.javap.JavapFileManager").get getMethod ( + "create", + classOf[DiagnosticListener[_]], + classOf[PrintWriter] + ) invoke (null, reporter, new PrintWriter(System.err, true))).asInstanceOf[JavaFileManager] orFailed null + + // manages named arrays of bytes, which might have failed to load + class JavapFileManager(val managed: Seq[Input])(delegate: JavaFileManager = defaultFileManager) + extends ForwardingJavaFileManager[JavaFileManager](delegate) { + import JavaFileObject.Kind + import Kind._ + import StandardLocation._ + import JavaFileManager.Location + import java.net.URI + def uri(name: String): URI = new URI(name) // new URI("jfo:" + name) + + def inputNamed(name: String): Try[ByteAry] = (managed find (_._1 == name)).get._2 + def managedFile(name: String, kind: Kind) = kind match { + case CLASS => fileObjectForInput(name, inputNamed(name), kind) + case _ => null } + // todo: just wrap it as scala abstractfile and adapt it uniformly + def fileObjectForInput(name: String, bytes: Try[ByteAry], kind: Kind): JavaFileObject = + new SimpleJavaFileObject(uri(name), kind) { + override def openInputStream(): InputStream = new ByteArrayInputStream(bytes.get) + // if non-null, ClassWriter wrongly requires scheme non-null + override def toUri: URI = null + override def getName: String = name + // suppress + override def getLastModified: Long = -1L + } + override def getJavaFileForInput(location: Location, className: String, kind: Kind): JavaFileObject = + location match { + case CLASS_PATH => managedFile(className, kind) + case _ => null + } + override def hasLocation(location: Location): Boolean = + location match { + case CLASS_PATH => true + case _ => false + } } - } - // eventually, use the tool interface - def task(options: Seq[String], claases: Seq[String], inputs: Seq[Input]): Task = { - //ServiceLoader.load(classOf[javax.tools.DisassemblerTool]). - //getTask(writer, fileManager, reporter, options.asJava, claases.asJava) - import JavaConverters.asJavaIterableConverter - TaskCtor.newInstance(writer, fileManager(inputs), reporter, options.asJava, claases.asJava) - .orFailed (throw new IllegalStateException) - } - // a result per input - private def applyOne(raw: Boolean, options: Seq[String], claas: String, inputs: Seq[Input]): Try[JpResult] = - Try { - task(options, Seq(claas), inputs).call() - } map { - case true => JpResult(showable(raw)) - case _ => JpResult(reporter.reportable(raw)) - } recoverWith { - case e: java.lang.reflect.InvocationTargetException => e.getCause match { - case t: IllegalArgumentException => Success(JpResult(t.getMessage)) // bad option - case x => Failure(x) + val writer = new CharArrayWriter + def fileManager(inputs: Seq[Input]) = new JavapFileManager(inputs)() + def showable(raw: Boolean): Showable = { + val written = { + writer.flush() + val w = writer.toString + writer.reset() + w + } + val msgs = reporter.reportable(raw) + new Showable { + val mw = msgs + written + // ReplStrippingWriter clips and scrubs on write(String) + // circumvent it by write(mw, 0, mw.length) or wrap it in withoutUnwrapping + def show() = + if (raw && intp.isDefined) intp.get withoutUnwrapping { writeLines() } + else writeLines() + private def writeLines() { + for (line <- Source.fromString(mw).getLines) printWriter write line+lineSeparator + printWriter.flush() + } } - } lastly { - reporter.clear - } - override def apply(raw: Boolean, options: Seq[String])(inputs: Seq[Input]): List[JpResult] = (inputs map { - case (claas, Success(_)) => applyOne(raw, options, claas, inputs).get - case (_, Failure(e)) => JpResult(e.toString) - }).toList orFailed List(noToolError) -} - -object JavapTool { - // >= 1.7 - val Tool = "com.sun.tools.javap.JavapTask" - - // < 1.7 - val Env = "sun.tools.javap.JavapEnvironment" - val Printer = "sun.tools.javap.JavapPrinter" - // "documentation" - type FakeEnvironment = AnyRef - type FakePrinter = AnyRef - - // support JavapEnvironment - class JpOptions { - private object Access { - final val PRIVATE = 0 - final val PROTECTED = 1 - final val PACKAGE = 2 - final val PUBLIC = 3 } - private val envActionMap: Map[String, (String, Any)] = { - val map = Map( - "-l" -> (("showLineAndLocal", true)), - "-c" -> (("showDisassembled", true)), - "-s" -> (("showInternalSigs", true)), - "-verbose" -> (("showVerbose", true)), - "-private" -> (("showAccess", Access.PRIVATE)), - "-package" -> (("showAccess", Access.PACKAGE)), - "-protected" -> (("showAccess", Access.PROTECTED)), - "-public" -> (("showAccess", Access.PUBLIC)), - "-all" -> (("showallAttr", true)) - ) - map ++ List( - "-v" -> map("-verbose"), - "-p" -> map("-private") - ) + // eventually, use the tool interface + def task(options: Seq[String], claases: Seq[String], inputs: Seq[Input]): Task = { + //ServiceLoader.load(classOf[javax.tools.DisassemblerTool]). + //getTask(writer, fileManager, reporter, options.asJava, claases.asJava) + import JavaConverters.asJavaIterableConverter + TaskCtor.newInstance(writer, fileManager(inputs), reporter, options.asJava, claases.asJava) + .orFailed (throw new IllegalStateException) } - def apply(opts: Seq[String]): Seq[(String, Any)] = { - opts flatMap { opt => - envActionMap get opt match { - case Some(pair) => List(pair) - case _ => - val charOpts = opt.tail.toSeq map ("-" + _) - if (charOpts forall (envActionMap contains _)) - charOpts map envActionMap - else Nil + // a result per input + private def applyOne(raw: Boolean, options: Seq[String], claas: String, inputs: Seq[Input]): Try[JpResult] = + Try { + task(options, Seq(claas), inputs).call() + } map { + case true => JpResult(showable(raw)) + case _ => JpResult(reporter.reportable(raw)) + } recoverWith { + case e: java.lang.reflect.InvocationTargetException => e.getCause match { + case t: IllegalArgumentException => Success(JpResult(t.getMessage)) // bad option + case x => Failure(x) } + } lastly { + reporter.clear } - } + override def apply(raw: Boolean, options: Seq[String])(inputs: Seq[Input]): List[JpResult] = (inputs map { + case (claas, Success(_)) => applyOne(raw, options, claas, inputs).get + case (_, Failure(e)) => JpResult(e.toString) + }).toList orFailed List(noToolError) } - case class ToolArgs(raw: Boolean = false, help: Boolean = false) - - object ToolArgs { - def fromArgs(args: Seq[String]): (ToolArgs, Seq[String]) = ((ToolArgs(), Seq[String]()) /: (args flatMap massage)) { - case ((t,others), s) => s match { - case "-help" => (t copy (help=true), others) - case "-raw" => (t copy (raw=true), others) - case _ => (t, others :+ s) + object JavapTool { + // >= 1.7 + val Tool = "com.sun.tools.javap.JavapTask" + + // < 1.7 + val Env = "sun.tools.javap.JavapEnvironment" + val Printer = "sun.tools.javap.JavapPrinter" + // "documentation" + type FakeEnvironment = AnyRef + type FakePrinter = AnyRef + + // support JavapEnvironment + class JpOptions { + private object Access { + final val PRIVATE = 0 + final val PROTECTED = 1 + final val PACKAGE = 2 + final val PUBLIC = 3 + } + private val envActionMap: Map[String, (String, Any)] = { + val map = Map( + "-l" -> (("showLineAndLocal", true)), + "-c" -> (("showDisassembled", true)), + "-s" -> (("showInternalSigs", true)), + "-verbose" -> (("showVerbose", true)), + "-private" -> (("showAccess", Access.PRIVATE)), + "-package" -> (("showAccess", Access.PACKAGE)), + "-protected" -> (("showAccess", Access.PROTECTED)), + "-public" -> (("showAccess", Access.PUBLIC)), + "-all" -> (("showallAttr", true)) + ) + map ++ List( + "-v" -> map("-verbose"), + "-p" -> map("-private") + ) + } + def apply(opts: Seq[String]): Seq[(String, Any)] = { + opts flatMap { opt => + envActionMap get opt match { + case Some(pair) => List(pair) + case _ => + val charOpts = opt.tail.toSeq map ("-" + _) + if (charOpts forall (envActionMap contains _)) + charOpts map envActionMap + else Nil + } + } } } - } - val helps = List( - "usage" -> ":javap [opts] [path or class or -]...", - "-help" -> "Prints this help message", - "-raw" -> "Don't unmangle REPL names", - "-verbose/-v" -> "Stack size, number of locals, method args", - "-private/-p" -> "Private classes and members", - "-package" -> "Package-private classes and members", - "-protected" -> "Protected classes and members", - "-public" -> "Public classes and members", - "-l" -> "Line and local variable tables", - "-c" -> "Disassembled code", - "-s" -> "Internal type signatures", - "-sysinfo" -> "System info of class", - "-constants" -> "Static final constants" - ) - - // match prefixes and unpack opts, or -help on failure - def massage(arg: String): Seq[String] = { - require(arg startsWith "-") - // arg matches opt "-foo/-f" if prefix of -foo or exactly -f - val r = """(-[^/]*)(/(-.))?""".r - def maybe(opt: String, s: String): Option[String] = opt match { - //case r(lf,_,sf) if (lf startsWith s) || (s == sf) => Some(lf) - // disambiguate by preferring short form - case r(lf,_,sf) if s == sf => Some(sf) - case r(lf,_,sf) if lf startsWith s => Some(lf) - case _ => None - } - def candidates(s: String) = (helps map (h => maybe(h._1, s))).flatten - // one candidate or one single-char candidate - def uniqueOf(maybes: Seq[String]) = { - def single(s: String) = s.length == 2 - if (maybes.length == 1) maybes - else if ((maybes count single) == 1) maybes filter single - else Nil - } - // each optchar must decode to exactly one option - def unpacked(s: String): Try[Seq[String]] = { - val ones = (s drop 1) map { c => - val maybes = uniqueOf(candidates(s"-$c")) - if (maybes.length == 1) Some(maybes.head) else None + case class ToolArgs(raw: Boolean = false, help: Boolean = false) + + object ToolArgs { + def fromArgs(args: Seq[String]): (ToolArgs, Seq[String]) = ((ToolArgs(), Seq[String]()) /: (args flatMap massage)) { + case ((t,others), s) => s match { + case "-help" => (t copy (help=true), others) + case "-raw" => (t copy (raw=true), others) + case _ => (t, others :+ s) + } } - Try(ones) filter (_ forall (_.isDefined)) map (_.flatten) - //if (ones exists (_.isEmpty)) Nil else ones.flatten } - val res = uniqueOf(candidates(arg)) - if (res.nonEmpty) res - else (unpacked(arg) - getOrElse (Seq("-help"))) // or else someone needs help - } - def helper(pw: PrintWriter) = new Showable { - def show() = helps foreach (p => pw write "%-12.12s%s%n".format(p._1,p._2)) - } + val helps = List( + "usage" -> ":javap [opts] [path or class or -]...", + "-help" -> "Prints this help message", + "-raw" -> "Don't unmangle REPL names", + "-verbose/-v" -> "Stack size, number of locals, method args", + "-private/-p" -> "Private classes and members", + "-package" -> "Package-private classes and members", + "-protected" -> "Protected classes and members", + "-public" -> "Public classes and members", + "-l" -> "Line and local variable tables", + "-c" -> "Disassembled code", + "-s" -> "Internal type signatures", + "-sysinfo" -> "System info of class", + "-constants" -> "Static final constants" + ) + + // match prefixes and unpack opts, or -help on failure + def massage(arg: String): Seq[String] = { + require(arg startsWith "-") + // arg matches opt "-foo/-f" if prefix of -foo or exactly -f + val r = """(-[^/]*)(/(-.))?""".r + def maybe(opt: String, s: String): Option[String] = opt match { + // disambiguate by preferring short form + case r(lf,_,sf) if s == sf => Some(sf) + case r(lf,_,sf) if lf startsWith s => Some(lf) + case _ => None + } + def candidates(s: String) = (helps map (h => maybe(h._1, s))).flatten + // one candidate or one single-char candidate + def uniqueOf(maybes: Seq[String]) = { + def single(s: String) = s.length == 2 + if (maybes.length == 1) maybes + else if ((maybes count single) == 1) maybes filter single + else Nil + } + // each optchar must decode to exactly one option + def unpacked(s: String): Try[Seq[String]] = { + val ones = (s drop 1) map { c => + val maybes = uniqueOf(candidates(s"-$c")) + if (maybes.length == 1) Some(maybes.head) else None + } + Try(ones) filter (_ forall (_.isDefined)) map (_.flatten) + } + val res = uniqueOf(candidates(arg)) + if (res.nonEmpty) res + else (unpacked(arg) + getOrElse (Seq("-help"))) // or else someone needs help + } - val DefaultOptions = List("-protected", "-verbose") + def helper(pw: PrintWriter) = new Showable { + def show() = helps foreach (p => pw write "%-12.12s%s%n".format(p._1,p._2)) + } - def isAvailable(cl: ScalaClassLoader = ScalaClassLoader.appLoader) = Seq(Env, Tool) exists (cn => hasClass(cl, cn)) + val DefaultOptions = List("-protected", "-verbose") - private def hasClass(cl: ScalaClassLoader, cn: String) = cl.tryToInitializeClass[AnyRef](cn).isDefined + def isAvailable = Seq(Env, Tool) exists (cn => hasClass(loader, cn)) - private def isTaskable(cl: ScalaClassLoader) = hasClass(cl, Tool) + private def hasClass(cl: ScalaClassLoader, cn: String) = cl.tryToInitializeClass[AnyRef](cn).isDefined - def apply(cl: ScalaClassLoader, pw: PrintWriter, is: Option[ISettings]) = - if (isTaskable(cl)) new JavapTool7(cl, pw, is) else new JavapTool6(cl, pw) + private def isTaskable(cl: ScalaClassLoader) = hasClass(cl, Tool) - implicit class Lastly[A](val t: Try[A]) extends AnyVal { - private def effect[X](last: =>Unit)(a: X): Try[A] = { last; t } - def lastly(last: =>Unit): Try[A] = t transform (effect(last) _, effect(last) _) + def apply() = if (isTaskable(loader)) new JavapTool7 else new JavapTool6 } } +object JavapClass { + def apply( + loader: ScalaClassLoader = ScalaClassLoader.appLoader, + printWriter: PrintWriter = new PrintWriter(System.out, true), + intp: Option[IMain] = None + ) = new JavapClass(loader, printWriter, intp) +} + object Javap { - def isAvailable(cl: ScalaClassLoader = ScalaClassLoader.appLoader) = JavapTool.isAvailable(cl) + def isAvailable(cl: ScalaClassLoader = ScalaClassLoader.appLoader) = JavapClass(cl).JavapTool.isAvailable def apply(path: String): Unit = apply(Seq(path)) - def apply(args: Seq[String]): Unit = new JavapClass() apply args foreach (_.show()) + def apply(args: Seq[String]): Unit = JavapClass() apply args foreach (_.show()) trait Showable { def show(): Unit @@ -454,4 +467,8 @@ object Javap { def isError = false def show() = value.show() // output to tool's PrintWriter } + implicit class Lastly[A](val t: Try[A]) extends AnyVal { + private def effect[X](last: =>Unit)(a: X): Try[A] = { last; t } + def lastly(last: =>Unit): Try[A] = t transform (effect(last) _, effect(last) _) + } } -- cgit v1.2.3 From a9d2568b36c517b4691d2b5fcb60e6a8e30be3a5 Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Mon, 10 Dec 2012 14:11:28 -0800 Subject: Fix for SI-6595, lost modifiers in early defs. Saw this by accident; the trees created for early defs would wholesale replace the modifiers with PRESUPER rather than combining them. FINAL was lost that way, as would be any other modifiers which might be valid there. --- src/compiler/scala/tools/nsc/ast/Trees.scala | 2 +- test/files/pos/t6595.flags | 1 + test/files/pos/t6595.scala | 18 ++++++++++++++++++ 3 files changed, 20 insertions(+), 1 deletion(-) create mode 100644 test/files/pos/t6595.flags create mode 100644 test/files/pos/t6595.scala (limited to 'src') diff --git a/src/compiler/scala/tools/nsc/ast/Trees.scala b/src/compiler/scala/tools/nsc/ast/Trees.scala index e796258967..f1556495ec 100644 --- a/src/compiler/scala/tools/nsc/ast/Trees.scala +++ b/src/compiler/scala/tools/nsc/ast/Trees.scala @@ -104,7 +104,7 @@ trait Trees extends scala.reflect.internal.Trees { self: Global => rhs = EmptyTree ) } - val lvdefs = evdefs collect { case vdef: ValDef => copyValDef(vdef)(mods = Modifiers(PRESUPER)) } + val lvdefs = evdefs collect { case vdef: ValDef => copyValDef(vdef)(mods = vdef.mods | PRESUPER) } val constrs = { if (constrMods hasFlag TRAIT) { diff --git a/test/files/pos/t6595.flags b/test/files/pos/t6595.flags new file mode 100644 index 0000000000..85d8eb2ba2 --- /dev/null +++ b/test/files/pos/t6595.flags @@ -0,0 +1 @@ +-Xfatal-warnings diff --git a/test/files/pos/t6595.scala b/test/files/pos/t6595.scala new file mode 100644 index 0000000000..437c0bcf05 --- /dev/null +++ b/test/files/pos/t6595.scala @@ -0,0 +1,18 @@ +import scala.annotation.switch + +class Foo extends { + final val b0 = 5 +} with AnyRef { + final val b1 = 10 + + // Using the @switch annotation as a means of testing that the + // type inferred for b0 is Int(5) and not Int. Only in the former + // case can a switch be generated. + def f(p: Int) = (p: @switch) match { + case `b0` => 1 + case `b1` => 2 + case 15 => 3 + case 20 => 4 + case _ => 5 + } +} -- cgit v1.2.3 From b26f12d4b116799e8860ddfd27ad398bc0c80b6a Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Sun, 9 Dec 2012 19:40:29 -0800 Subject: Cleanup in module var creation. When all the logic in a method is for symbol creation, and then at the last minute it throws on a hastily zipped ValDef, it's really not a tree generation method, it's a symbol creation method. Eliminated redundancy and overgeneralization; marked some bits for further de-duplication. Did my best with my limited archeological skills to document what is supposed to be happening in eliminateModuleDefs. --- src/compiler/scala/tools/nsc/ast/TreeGen.scala | 20 +---- .../scala/tools/nsc/transform/CleanUp.scala | 3 +- .../scala/tools/nsc/transform/Flatten.scala | 2 +- src/compiler/scala/tools/nsc/transform/Mixin.scala | 12 +-- .../scala/tools/nsc/transform/UnCurry.scala | 2 +- .../tools/nsc/typechecker/MethodSynthesis.scala | 4 +- .../scala/tools/nsc/typechecker/RefChecks.scala | 92 +++++++++++----------- src/reflect/scala/reflect/internal/Symbols.scala | 12 +++ src/reflect/scala/reflect/internal/TreeGen.scala | 5 -- test/files/run/t0091.check | 1 + test/files/run/t0091.scala | 15 ++-- 11 files changed, 84 insertions(+), 84 deletions(-) (limited to 'src') diff --git a/src/compiler/scala/tools/nsc/ast/TreeGen.scala b/src/compiler/scala/tools/nsc/ast/TreeGen.scala index bec6de46d0..af874ed28c 100644 --- a/src/compiler/scala/tools/nsc/ast/TreeGen.scala +++ b/src/compiler/scala/tools/nsc/ast/TreeGen.scala @@ -65,28 +65,10 @@ abstract class TreeGen extends scala.reflect.internal.TreeGen with TreeDSL { // Builds a tree of the form "{ lhs = rhs ; lhs }" def mkAssignAndReturn(lhs: Symbol, rhs: Tree): Tree = { - val lhsRef = mkUnattributedRef(lhs) + def lhsRef = if (lhs.owner.isClass) Select(This(lhs.owner), lhs) else Ident(lhs) Block(Assign(lhsRef, rhs) :: Nil, lhsRef) } - def mkModuleVarDef(accessor: Symbol) = { - val inClass = accessor.owner.isClass - val extraFlags = if (inClass) PrivateLocal | SYNTHETIC else 0 - - val mval = ( - accessor.owner.newVariable(nme.moduleVarName(accessor.name.toTermName), accessor.pos.focus, MODULEVAR | extraFlags) - setInfo accessor.tpe.finalResultType - addAnnotation VolatileAttr - ) - if (inClass) - mval.owner.info.decls enter mval - - ValDef(mval) - } - - def mkModuleAccessDef(accessor: Symbol, msym: Symbol) = - DefDef(accessor, Select(This(msym.owner), msym)) - def newModule(accessor: Symbol, tpe: Type) = { val ps = tpe.typeSymbol.primaryConstructor.info.paramTypes if (ps.isEmpty) New(tpe) diff --git a/src/compiler/scala/tools/nsc/transform/CleanUp.scala b/src/compiler/scala/tools/nsc/transform/CleanUp.scala index 765ef39e6b..39460ef004 100644 --- a/src/compiler/scala/tools/nsc/transform/CleanUp.scala +++ b/src/compiler/scala/tools/nsc/transform/CleanUp.scala @@ -620,9 +620,8 @@ abstract class CleanUp extends Transform with ast.TreeDSL { // create a symbol for the static field val stfieldSym = ( currentClass.newVariable(mkTerm("symbol$"), pos, PRIVATE | STATIC | SYNTHETIC | FINAL) - setInfo SymbolClass.tpe + setInfoAndEnter SymbolClass.tpe ) - currentClass.info.decls enter stfieldSym // create field definition and initialization val stfieldDef = theTyper.typedPos(pos)(VAL(stfieldSym) === rhs) diff --git a/src/compiler/scala/tools/nsc/transform/Flatten.scala b/src/compiler/scala/tools/nsc/transform/Flatten.scala index a52dadb134..b2602f47de 100644 --- a/src/compiler/scala/tools/nsc/transform/Flatten.scala +++ b/src/compiler/scala/tools/nsc/transform/Flatten.scala @@ -23,7 +23,7 @@ abstract class Flatten extends InfoTransform { val old = (scope lookupUnshadowedEntries sym.name).toList old foreach (scope unlink _) scope enter sym - log(s"lifted ${sym.fullLocationString}" + ( if (old.isEmpty) "" else " after unlinking $old from scope." )) + log(s"lifted ${sym.fullLocationString}" + ( if (old.isEmpty) "" else s" after unlinking $old from scope." )) old } diff --git a/src/compiler/scala/tools/nsc/transform/Mixin.scala b/src/compiler/scala/tools/nsc/transform/Mixin.scala index 39b894fbef..571b3aeefc 100644 --- a/src/compiler/scala/tools/nsc/transform/Mixin.scala +++ b/src/compiler/scala/tools/nsc/transform/Mixin.scala @@ -867,7 +867,7 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL { val cond = Apply(Select(moduleVarRef, Object_eq), List(NULL)) mkFastPathBody(clazz, moduleSym, cond, List(assign), List(NULL), returnTree, attrThis, args) case _ => - abort("Invalid getter " + rhs + " for module in class " + clazz) + abort(s"Invalid getter $rhs for module in $clazz") } def mkCheckedAccessor(clazz: Symbol, retVal: Tree, offset: Int, pos: Position, fieldSym: Symbol): Tree = { @@ -1059,11 +1059,13 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL { } else if (sym.isModule && !(sym hasFlag LIFTED | BRIDGE)) { // add modules - val vdef = gen.mkModuleVarDef(sym) - addDef(position(sym), vdef) + val vsym = sym.owner.newModuleVarSymbol(sym) + addDef(position(sym), ValDef(vsym)) - val rhs = gen.newModule(sym, vdef.symbol.tpe) - val assignAndRet = gen.mkAssignAndReturn(vdef.symbol, rhs) + // !!! TODO - unravel the enormous duplication between this code and + // eliminateModuleDefs in RefChecks. + val rhs = gen.newModule(sym, vsym.tpe) + val assignAndRet = gen.mkAssignAndReturn(vsym, rhs) val attrThis = gen.mkAttributedThis(clazz) val rhs1 = mkInnerClassAccessorDoubleChecked(attrThis, assignAndRet, sym, List()) diff --git a/src/compiler/scala/tools/nsc/transform/UnCurry.scala b/src/compiler/scala/tools/nsc/transform/UnCurry.scala index 90ea93d7b2..ccee8242d8 100644 --- a/src/compiler/scala/tools/nsc/transform/UnCurry.scala +++ b/src/compiler/scala/tools/nsc/transform/UnCurry.scala @@ -640,7 +640,7 @@ abstract class UnCurry extends InfoTransform tree1 } ) - assert(result.tpe != null, result + " tpe is null") + assert(result.tpe != null, result.shortClass + " tpe is null:\n" + result) result setType uncurryTreeType(result.tpe) } diff --git a/src/compiler/scala/tools/nsc/typechecker/MethodSynthesis.scala b/src/compiler/scala/tools/nsc/typechecker/MethodSynthesis.scala index 18bc95af39..d74d5ecfbe 100644 --- a/src/compiler/scala/tools/nsc/typechecker/MethodSynthesis.scala +++ b/src/compiler/scala/tools/nsc/typechecker/MethodSynthesis.scala @@ -50,7 +50,9 @@ trait MethodSynthesis { class ClassMethodSynthesis(val clazz: Symbol, localTyper: Typer) { def mkThis = This(clazz) setPos clazz.pos.focus - def mkThisSelect(sym: Symbol) = atPos(clazz.pos.focus)(Select(mkThis, sym)) + def mkThisSelect(sym: Symbol) = atPos(clazz.pos.focus)( + if (clazz.isClass) Select(This(clazz), sym) else Ident(sym) + ) private def isOverride(name: TermName) = clazzMember(name).alternatives exists (sym => !sym.isDeferred && (sym.owner != clazz)) diff --git a/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala b/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala index b2334faa71..396c6acd38 100644 --- a/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala +++ b/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala @@ -1251,57 +1251,61 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans finally popLevel() } - /** Eliminate ModuleDefs. - * - A top level object is replaced with their module class. - * - An inner object is transformed into a module var, created on first access. + /** Eliminate ModuleDefs. In all cases the ModuleDef (carrying a module symbol) is + * replaced with a ClassDef (carrying the corresponding module class symbol) with additional + * trees created as follows: * - * In both cases, this transformation returns the list of replacement trees: - * - Top level: the module class accessor definition - * - Inner: a class definition, declaration of module var, and module var accessor + * 1) A statically reachable object (either top-level or nested only in objects) receives + * no additional trees. + * 2) An inner object which matches an existing member (e.g. implements an interface) + * receives an accessor DefDef to implement the interface. + * 3) An inner object otherwise receives a private ValDef which declares a module var + * (the field which holds the module class - it has a name like Foo$module) and an + * accessor for that field. The instance is created lazily, on first access. */ - private def eliminateModuleDefs(tree: Tree): List[Tree] = { - val ModuleDef(mods, name, impl) = tree - val sym = tree.symbol - val classSym = sym.moduleClass - val cdef = ClassDef(mods | MODULE, name.toTypeName, Nil, impl) setSymbol classSym setType NoType - - def findOrCreateModuleVar() = localTyper.typedPos(tree.pos) { - // See SI-5012, SI-6712. + private def eliminateModuleDefs(moduleDef: Tree): List[Tree] = exitingRefchecks { + val ModuleDef(mods, name, impl) = moduleDef + val module = moduleDef.symbol + val site = module.owner + val moduleName = module.name.toTermName + // The typer doesn't take kindly to seeing this ClassDef; we have to + // set NoType so it will be ignored. + val cdef = ClassDef(module.moduleClass, impl) setType NoType + + // Create the module var unless the immediate owner is a class and + // the module var already exists there. See SI-5012, SI-6712. + def findOrCreateModuleVar() = { val vsym = ( - if (sym.owner.isTerm) NoSymbol - else sym.enclClass.info.decl(nme.moduleVarName(sym.name.toTermName)) + if (site.isTerm) NoSymbol + else site.info decl nme.moduleVarName(moduleName) ) - // In case we are dealing with local symbol then we already have - // to correct error with forward reference - if (vsym == NoSymbol) gen.mkModuleVarDef(sym) - else ValDef(vsym) + vsym orElse (site newModuleVarSymbol module) } - def createStaticModuleAccessor() = exitingRefchecks { - val method = ( - sym.owner.newMethod(sym.name.toTermName, sym.pos, (sym.flags | STABLE) & ~MODULE) - setInfoAndEnter NullaryMethodType(sym.moduleClass.tpe) - ) - localTyper.typedPos(tree.pos)(gen.mkModuleAccessDef(method, sym)) + def newInnerObject() = { + // Create the module var unless it is already in the module owner's scope. + // The lookup is on module.enclClass and not module.owner lest there be a + // nullary method between us and the class; see SI-5012. + val moduleVar = findOrCreateModuleVar() + val rhs = gen.newModule(module, moduleVar.tpe) + val body = if (site.isTrait) rhs else gen.mkAssignAndReturn(moduleVar, rhs) + val accessor = DefDef(module, body.changeOwner(moduleVar -> module)) + + ValDef(moduleVar) :: accessor :: Nil } - def createInnerModuleAccessor(vdef: Tree) = List( - vdef, - localTyper.typedPos(tree.pos) { - val vsym = vdef.symbol - exitingRefchecks { - val rhs = gen.newModule(sym, vsym.tpe) - val body = if (sym.owner.isTrait) rhs else gen.mkAssignAndReturn(vsym, rhs) - DefDef(sym, body.changeOwner(vsym -> sym)) - } - } - ) - transformTrees(cdef :: { - if (!sym.isStatic) - createInnerModuleAccessor(findOrCreateModuleVar) - else if (sym.isOverridingSymbol) - List(createStaticModuleAccessor()) + def matchingInnerObject() = { + val newFlags = (module.flags | STABLE) & ~MODULE + val newInfo = NullaryMethodType(module.moduleClass.tpe) + val accessor = site.newMethod(moduleName, module.pos, newFlags) setInfoAndEnter newInfo + + DefDef(accessor, Select(This(site), module)) :: Nil + } + val newTrees = cdef :: ( + if (module.isStatic) + if (module.isOverridingSymbol) matchingInnerObject() else Nil else - Nil - }) + newInnerObject() + ) + transformTrees(newTrees map localTyper.typedPos(moduleDef.pos)) } def transformStat(tree: Tree, index: Int): List[Tree] = tree match { diff --git a/src/reflect/scala/reflect/internal/Symbols.scala b/src/reflect/scala/reflect/internal/Symbols.scala index 1f8658cc86..8e776b8590 100644 --- a/src/reflect/scala/reflect/internal/Symbols.scala +++ b/src/reflect/scala/reflect/internal/Symbols.scala @@ -243,6 +243,18 @@ trait Symbols extends api.Symbols { self: SymbolTable => final def newImport(pos: Position): TermSymbol = newTermSymbol(nme.IMPORT, pos) + def newModuleVarSymbol(accessor: Symbol): TermSymbol = { + val newName = nme.moduleVarName(accessor.name.toTermName) + val newFlags = MODULEVAR | ( if (this.isClass) PrivateLocal | SYNTHETIC else 0 ) + val newInfo = accessor.tpe.finalResultType + val mval = newVariable(newName, accessor.pos.focus, newFlags) addAnnotation VolatileAttr + + if (this.isClass) + mval setInfoAndEnter newInfo + else + mval setInfo newInfo + } + final def newModuleSymbol(name: TermName, pos: Position = NoPosition, newFlags: Long = 0L): ModuleSymbol = newTermSymbol(name, pos, newFlags).asInstanceOf[ModuleSymbol] diff --git a/src/reflect/scala/reflect/internal/TreeGen.scala b/src/reflect/scala/reflect/internal/TreeGen.scala index 072e94e069..f3aa37bd15 100644 --- a/src/reflect/scala/reflect/internal/TreeGen.scala +++ b/src/reflect/scala/reflect/internal/TreeGen.scala @@ -127,11 +127,6 @@ abstract class TreeGen extends macros.TreeBuilder { if (sym.owner.isClass) mkAttributedRef(sym.owner.thisType, sym) else mkAttributedIdent(sym) - /** Builds an untyped reference to given symbol. */ - def mkUnattributedRef(sym: Symbol): Tree = - if (sym.owner.isClass) Select(This(sym.owner), sym) - else Ident(sym) - /** Replaces tree type with a stable type if possible */ def stabilize(tree: Tree): Tree = { for(tp <- stableTypeFor(tree)) tree.tpe = tp diff --git a/test/files/run/t0091.check b/test/files/run/t0091.check index 7ed6ff82de..fd3c81a4d7 100644 --- a/test/files/run/t0091.check +++ b/test/files/run/t0091.check @@ -1 +1,2 @@ 5 +5 diff --git a/test/files/run/t0091.scala b/test/files/run/t0091.scala index eaddde0dbf..45235eb77b 100644 --- a/test/files/run/t0091.scala +++ b/test/files/run/t0091.scala @@ -4,10 +4,13 @@ object C extends B { object m extends A { def x = 5 } } object Test { - // The type annotation here is necessary, otherwise - // the compiler would reference C$m$ directly. - def o : B = C - def main(argv : Array[String]) : Unit = { - println(o.m.x) - } + // The type annotation here is necessary, otherwise + // the compiler would reference C$m$ directly. + def o1 : B = C + def o2 = C + + def main(argv : Array[String]) : Unit = { + println(o1.m.x) + println(o2.m.x) + } } -- cgit v1.2.3 From 4bc3fa102768e78b194fd6a594f4b87d29e4efbf Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Mon, 10 Dec 2012 00:27:21 -0800 Subject: Eliminated some sources of tree sharing. Tracking shared trees led to various perpetrators, the simplest of which are addressed herein. More consideration will be required: we need to approach the problem with sufficient command to assure both that trees are only shared when safe (which might without architectural changes be "never") but also that we do not duplicate definition trees unless it is appropriate. Why do we care about tree sharing? Sometimes, a lot of the time even, you can get away with sharing trees - but that's also why it's responsible for all kinds of trouble. If the compiler would break obviously and immediately then we wouldn't be doing it. The danger of sharing is that one piece of an AST may undergo a transformation or mutation and an unrelated piece of the AST will be partially dragged into the change. The danger has become more urgent with the arrival of macros. The first step in preventing tree sharing mishaps is to reduce the amount the compiler does so whatever is left is a lot easier to see. As a happy accident, it will also fix bugs. --- src/compiler/scala/tools/nsc/ast/TreeDSL.scala | 20 +++++++++++--------- .../scala/tools/nsc/transform/ExtensionMethods.scala | 13 +++++++------ src/compiler/scala/tools/nsc/transform/Mixin.scala | 9 +++++---- 3 files changed, 23 insertions(+), 19 deletions(-) (limited to 'src') diff --git a/src/compiler/scala/tools/nsc/ast/TreeDSL.scala b/src/compiler/scala/tools/nsc/ast/TreeDSL.scala index 3129748e9f..e3bf562a2c 100644 --- a/src/compiler/scala/tools/nsc/ast/TreeDSL.scala +++ b/src/compiler/scala/tools/nsc/ast/TreeDSL.scala @@ -30,19 +30,17 @@ trait TreeDSL { def returning[T](x: T)(f: T => Unit): T = util.returning(x)(f) object LIT extends (Any => Literal) { + def typed(x: Any) = apply(x) setType ConstantType(Constant(x)) def apply(x: Any) = Literal(Constant(x)) def unapply(x: Any) = condOpt(x) { case Literal(Constant(value)) => value } } - // You might think these could all be vals, but empirically I have found that - // at least in the case of UNIT the compiler breaks if you re-use trees. - // However we need stable identifiers to have attractive pattern matching. - // So it's inconsistent until I devise a better way. - val TRUE = LIT(true) - val FALSE = LIT(false) - val ZERO = LIT(0) - def NULL = LIT(null) - def UNIT = LIT(()) + // Boring, predictable trees. + def TRUE = LIT typed true + def FALSE = LIT typed false + def ZERO = LIT(0) + def NULL = LIT(null) + def UNIT = LIT(()) // for those preferring boring, predictable lives, without the thrills of tree-sharing // (but with the perk of typed trees) @@ -106,6 +104,10 @@ trait TreeDSL { def DOT(sym: Symbol) = SelectStart(Select(target, sym)) /** Assignment */ + // !!! This method is responsible for some tree sharing, but a diligent + // reviewer pointed out that we shouldn't blindly duplicate these trees + // as there might be DefTrees nested beneath them. It's not entirely + // clear how to proceed, so for now it retains the non-duplicating behavior. def ===(rhs: Tree) = Assign(target, rhs) /** Methods for sequences **/ diff --git a/src/compiler/scala/tools/nsc/transform/ExtensionMethods.scala b/src/compiler/scala/tools/nsc/transform/ExtensionMethods.scala index 717c4b627b..77e7e013ab 100644 --- a/src/compiler/scala/tools/nsc/transform/ExtensionMethods.scala +++ b/src/compiler/scala/tools/nsc/transform/ExtensionMethods.scala @@ -232,12 +232,13 @@ abstract class ExtensionMethods extends Transform with TypingTransformers { override def transformStats(stats: List[Tree], exprOwner: Symbol): List[Tree] = super.transformStats(stats, exprOwner) map { - case md @ ModuleDef(_, _, _) if extensionDefs contains md.symbol => - val defns = extensionDefs(md.symbol).toList map (member => - atOwner(md.symbol)(localTyper.typedPos(md.pos.focus)(member)) - ) - extensionDefs -= md.symbol - deriveModuleDef(md)(tmpl => deriveTemplate(tmpl)(_ ++ defns)) + case md @ ModuleDef(_, _, _) => + val extraStats = extensionDefs remove md.symbol match { + case Some(defns) => defns.toList map (defn => atOwner(md.symbol)(localTyper.typedPos(md.pos.focus)(defn.duplicate))) + case _ => Nil + } + if (extraStats.isEmpty) md + else deriveModuleDef(md)(tmpl => deriveTemplate(tmpl)(_ ++ extraStats)) case stat => stat } diff --git a/src/compiler/scala/tools/nsc/transform/Mixin.scala b/src/compiler/scala/tools/nsc/transform/Mixin.scala index 571b3aeefc..4ecc1e01db 100644 --- a/src/compiler/scala/tools/nsc/transform/Mixin.scala +++ b/src/compiler/scala/tools/nsc/transform/Mixin.scala @@ -1035,16 +1035,17 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL { } // if class is not a trait add accessor definitions else if (!clazz.isTrait) { + // This needs to be a def to avoid sharing trees + def accessedRef = accessedReference(sym) if (sym.hasAccessorFlag && (!sym.isDeferred || sym.hasFlag(lateDEFERRED))) { // add accessor definitions addDefDef(sym, { - val accessedRef = accessedReference(sym) if (sym.isSetter) { if (isOverriddenSetter(sym)) UNIT else accessedRef match { - case Literal(_) => accessedRef - case _ => - val init = Assign(accessedRef, Ident(sym.firstParam)) + case ref @ Literal(_) => ref + case ref => + val init = Assign(ref, Ident(sym.firstParam)) val getter = sym.getter(clazz) if (!needsInitFlag(getter)) init -- cgit v1.2.3 From e77db05f7306aeb80c46df9c8d76dfd739a97e6a Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Mon, 10 Dec 2012 00:27:53 -0800 Subject: Normalized TRUE/FALSE trees in the pattern matcher. There's no longer any need to maintain a distinction, as the tree sharing among literals which it is trying to avoid no longer takes place. --- src/compiler/scala/tools/nsc/ast/TreeDSL.scala | 5 ----- src/compiler/scala/tools/nsc/transform/Erasure.scala | 2 +- src/compiler/scala/tools/nsc/transform/Mixin.scala | 2 +- src/compiler/scala/tools/nsc/transform/UnCurry.scala | 6 +++--- src/compiler/scala/tools/nsc/typechecker/PatternMatching.scala | 10 +++++----- src/compiler/scala/tools/nsc/typechecker/Typers.scala | 4 ++-- 6 files changed, 12 insertions(+), 17 deletions(-) (limited to 'src') diff --git a/src/compiler/scala/tools/nsc/ast/TreeDSL.scala b/src/compiler/scala/tools/nsc/ast/TreeDSL.scala index e3bf562a2c..1c6bba19b3 100644 --- a/src/compiler/scala/tools/nsc/ast/TreeDSL.scala +++ b/src/compiler/scala/tools/nsc/ast/TreeDSL.scala @@ -42,11 +42,6 @@ trait TreeDSL { def NULL = LIT(null) def UNIT = LIT(()) - // for those preferring boring, predictable lives, without the thrills of tree-sharing - // (but with the perk of typed trees) - def TRUE_typed = LIT(true) setType ConstantType(Constant(true)) - def FALSE_typed = LIT(false) setType ConstantType(Constant(false)) - object WILD { def empty = Ident(nme.WILDCARD) def apply(tpe: Type) = Ident(nme.WILDCARD) setType tpe diff --git a/src/compiler/scala/tools/nsc/transform/Erasure.scala b/src/compiler/scala/tools/nsc/transform/Erasure.scala index ba799f9186..45bd5cf003 100644 --- a/src/compiler/scala/tools/nsc/transform/Erasure.scala +++ b/src/compiler/scala/tools/nsc/transform/Erasure.scala @@ -522,7 +522,7 @@ abstract class Erasure extends AddInterfaces && !exitingErasure((member.tpe <:< other.tpe))) // no static guarantees (TODO: is the subtype test ever true?) import CODE._ - val _false = FALSE_typed + val _false = FALSE val pt = member.tpe.resultType lazy val zero = if (_false.tpe <:< pt) _false diff --git a/src/compiler/scala/tools/nsc/transform/Mixin.scala b/src/compiler/scala/tools/nsc/transform/Mixin.scala index 4ecc1e01db..45ef083b66 100644 --- a/src/compiler/scala/tools/nsc/transform/Mixin.scala +++ b/src/compiler/scala/tools/nsc/transform/Mixin.scala @@ -875,7 +875,7 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL { val bitmapSym = bitmapFor(clazz, offset, sym) val kind = bitmapKind(sym) val mask = maskForOffset(offset, sym, kind) - val msg = "Uninitialized field: " + unit.source + ": " + pos.line + val msg = s"Uninitialized field: ${unit.source}: ${pos.line}" val result = IF (mkTest(clazz, mask, bitmapSym, false, kind)) . THEN (retVal) . diff --git a/src/compiler/scala/tools/nsc/transform/UnCurry.scala b/src/compiler/scala/tools/nsc/transform/UnCurry.scala index ccee8242d8..90ea6c94d8 100644 --- a/src/compiler/scala/tools/nsc/transform/UnCurry.scala +++ b/src/compiler/scala/tools/nsc/transform/UnCurry.scala @@ -362,11 +362,11 @@ abstract class UnCurry extends InfoTransform val body = bodyForIDA match { case Match(selector, cases) => - if (cases exists treeInfo.isDefaultCase) TRUE_typed + if (cases exists treeInfo.isDefaultCase) TRUE else doSubst(Match(/*gen.mkUnchecked*/(selector), - (cases map (c => deriveCaseDef(c)(x => TRUE_typed))) :+ ( - DEFAULT ==> FALSE_typed))) + (cases map (c => deriveCaseDef(c)(x => TRUE))) :+ ( + DEFAULT ==> FALSE))) } body.changeOwner(fun.symbol -> methSym) diff --git a/src/compiler/scala/tools/nsc/typechecker/PatternMatching.scala b/src/compiler/scala/tools/nsc/typechecker/PatternMatching.scala index 42c34526d7..ede117f51a 100644 --- a/src/compiler/scala/tools/nsc/typechecker/PatternMatching.scala +++ b/src/compiler/scala/tools/nsc/typechecker/PatternMatching.scala @@ -1146,7 +1146,7 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL val expectedOuter = expectedTp.prefix match { case ThisType(clazz) => THIS(clazz) case pre if pre != NoType => REF(pre.prefix, pre.termSymbol) - case _ => TRUE_typed // fallback for SI-6183 + case _ => TRUE // fallback for SI-6183 } // ExplicitOuter replaces `Select(q, outerSym) OBJ_EQ expectedPrefix` by `Select(q, outerAccessor(outerSym.owner)) OBJ_EQ expectedPrefix` @@ -1278,10 +1278,10 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL // one alternative may still generate multiple trees (e.g., an extractor call + equality test) // (for now,) alternatives may not bind variables (except wildcards), so we don't care about the final substitution built internally by makeTreeMakers val combinedAlts = altss map (altTreeMakers => - ((casegen: Casegen) => combineExtractors(altTreeMakers :+ TrivialTreeMaker(casegen.one(TRUE_typed)))(casegen)) + ((casegen: Casegen) => combineExtractors(altTreeMakers :+ TrivialTreeMaker(casegen.one(TRUE)))(casegen)) ) - val findAltMatcher = codegenAlt.matcher(EmptyTree, NoSymbol, BooleanClass.tpe)(combinedAlts, Some(x => FALSE_typed)) + val findAltMatcher = codegenAlt.matcher(EmptyTree, NoSymbol, BooleanClass.tpe)(combinedAlts, Some(x => FALSE)) codegenAlt.ifThenElseZero(findAltMatcher, substitution(next)) } } @@ -3241,7 +3241,7 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL } } - private val defaultLabel: Symbol = newSynthCaseLabel("default") + private val defaultLabel: Symbol = newSynthCaseLabel("default") /** Collapse guarded cases that switch on the same constant (the last case may be unguarded). * @@ -3682,7 +3682,7 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL def flatMapCondStored(cond: Tree, condSym: Symbol, res: Tree, nextBinder: Symbol, next: Tree): Tree = ifThenElseZero(cond, BLOCK( - condSym === TRUE_typed, + condSym === TRUE, nextBinder === res, next )) diff --git a/src/compiler/scala/tools/nsc/typechecker/Typers.scala b/src/compiler/scala/tools/nsc/typechecker/Typers.scala index fd6acc25cc..a1c1b53cce 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Typers.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Typers.scala @@ -2508,7 +2508,7 @@ trait Typers extends Modes with Adaptations with Tags { import CODE._ // need to duplicate the cases before typing them to generate the apply method, or the symbols will be all messed up - val casesTrue = if (isPartial) cases map (c => deriveCaseDef(c)(x => atPos(x.pos.focus)(TRUE_typed)).duplicate.asInstanceOf[CaseDef]) else Nil + val casesTrue = if (isPartial) cases map (c => deriveCaseDef(c)(x => atPos(x.pos.focus)(TRUE)).duplicate.asInstanceOf[CaseDef]) else Nil // println("casesTrue "+ casesTrue) def parentsPartial(targs: List[Type]) = addSerializable(appliedType(AbstractPartialFunctionClass.typeConstructor, targs)) @@ -2594,7 +2594,7 @@ trait Typers extends Modes with Adaptations with Tags { methodSym setInfoAndEnter MethodType(paramSyms, BooleanClass.tpe) val match_ = methodBodyTyper.typedMatch(gen.mkUnchecked(selector), casesTrue, mode, BooleanClass.tpe) - val body = methodBodyTyper.virtualizedMatch(match_ updateAttachment DefaultOverrideMatchAttachment(FALSE_typed), mode, BooleanClass.tpe) + val body = methodBodyTyper.virtualizedMatch(match_ updateAttachment DefaultOverrideMatchAttachment(FALSE), mode, BooleanClass.tpe) DefDef(methodSym, body) } -- cgit v1.2.3 From 2d6ce2a10fd6e5627de586d616b8e7cb409d1417 Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Wed, 12 Dec 2012 16:24:12 -0800 Subject: Removed src/detach. Prying one more stowaway off the side of the train. This isn't used, hasn't been used, and won't be used. --- build.detach.xml | 184 --- src/detach/library/scala/remoting/Channel.scala | 190 ---- src/detach/library/scala/remoting/Debug.scala | 27 - .../library/scala/remoting/ServerChannel.scala | 68 -- src/detach/library/scala/remoting/detach.scala | 49 - src/detach/library/scala/runtime/RemoteRef.scala | 182 --- .../library/scala/runtime/remoting/Debug.scala | 85 -- .../scala/runtime/remoting/RegistryDelegate.scala | 192 ---- .../scala/runtime/remoting/RemoteBooleanRef.scala | 51 - .../scala/runtime/remoting/RemoteByteRef.scala | 51 - .../scala/runtime/remoting/RemoteCharRef.scala | 51 - .../scala/runtime/remoting/RemoteDoubleRef.scala | 50 - .../scala/runtime/remoting/RemoteFloatRef.scala | 50 - .../library/scala/runtime/remoting/RemoteGC.scala | 66 -- .../scala/runtime/remoting/RemoteIntRef.scala | 51 - .../scala/runtime/remoting/RemoteLongRef.scala | 51 - .../scala/runtime/remoting/RemoteObjectRef.scala | 51 - .../scala/runtime/remoting/RemoteShortRef.scala | 50 - src/detach/plugin/scala/tools/detach/Detach.scala | 1190 -------------------- .../plugin/scala/tools/detach/DetachPlugin.scala | 41 - src/detach/plugin/scalac-plugin.xml | 4 - test/files/detach-neg/det_bar.check | 4 - test/files/detach-neg/det_bar.scala | 13 - test/files/detach-run/actor-run.check | 5 - test/files/detach-run/actor/Client.scala | 54 - test/files/detach-run/actor/Server.scala | 27 - test/files/detach-run/actor/ServerConsole.scala | 75 -- test/files/detach-run/actor/actor.flags | 1 - test/files/detach-run/actor/actor.scala | 157 --- test/files/detach-run/actor/java.policy | 25 - test/files/detach-run/basic-run.check | 5 - test/files/detach-run/basic/Client.scala | 48 - test/files/detach-run/basic/Server.scala | 22 - test/files/detach-run/basic/ServerConsole.scala | 83 -- test/files/detach-run/basic/basic.flags | 1 - test/files/detach-run/basic/basic.scala | 169 --- test/files/detach-run/basic/java.policy | 26 - 37 files changed, 3449 deletions(-) delete mode 100644 build.detach.xml delete mode 100644 src/detach/library/scala/remoting/Channel.scala delete mode 100644 src/detach/library/scala/remoting/Debug.scala delete mode 100644 src/detach/library/scala/remoting/ServerChannel.scala delete mode 100644 src/detach/library/scala/remoting/detach.scala delete mode 100644 src/detach/library/scala/runtime/RemoteRef.scala delete mode 100644 src/detach/library/scala/runtime/remoting/Debug.scala delete mode 100644 src/detach/library/scala/runtime/remoting/RegistryDelegate.scala delete mode 100644 src/detach/library/scala/runtime/remoting/RemoteBooleanRef.scala delete mode 100644 src/detach/library/scala/runtime/remoting/RemoteByteRef.scala delete mode 100644 src/detach/library/scala/runtime/remoting/RemoteCharRef.scala delete mode 100644 src/detach/library/scala/runtime/remoting/RemoteDoubleRef.scala delete mode 100644 src/detach/library/scala/runtime/remoting/RemoteFloatRef.scala delete mode 100644 src/detach/library/scala/runtime/remoting/RemoteGC.scala delete mode 100644 src/detach/library/scala/runtime/remoting/RemoteIntRef.scala delete mode 100644 src/detach/library/scala/runtime/remoting/RemoteLongRef.scala delete mode 100644 src/detach/library/scala/runtime/remoting/RemoteObjectRef.scala delete mode 100644 src/detach/library/scala/runtime/remoting/RemoteShortRef.scala delete mode 100644 src/detach/plugin/scala/tools/detach/Detach.scala delete mode 100644 src/detach/plugin/scala/tools/detach/DetachPlugin.scala delete mode 100644 src/detach/plugin/scalac-plugin.xml delete mode 100644 test/files/detach-neg/det_bar.check delete mode 100644 test/files/detach-neg/det_bar.scala delete mode 100644 test/files/detach-run/actor-run.check delete mode 100644 test/files/detach-run/actor/Client.scala delete mode 100644 test/files/detach-run/actor/Server.scala delete mode 100644 test/files/detach-run/actor/ServerConsole.scala delete mode 100644 test/files/detach-run/actor/actor.flags delete mode 100644 test/files/detach-run/actor/actor.scala delete mode 100644 test/files/detach-run/actor/java.policy delete mode 100644 test/files/detach-run/basic-run.check delete mode 100644 test/files/detach-run/basic/Client.scala delete mode 100644 test/files/detach-run/basic/Server.scala delete mode 100644 test/files/detach-run/basic/ServerConsole.scala delete mode 100644 test/files/detach-run/basic/basic.flags delete mode 100644 test/files/detach-run/basic/basic.scala delete mode 100644 test/files/detach-run/basic/java.policy (limited to 'src') diff --git a/build.detach.xml b/build.detach.xml deleted file mode 100644 index 03360e36d5..0000000000 --- a/build.detach.xml +++ /dev/null @@ -1,184 +0,0 @@ - - - - - -SuperSabbus for Scala detach plugin. - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/src/detach/library/scala/remoting/Channel.scala b/src/detach/library/scala/remoting/Channel.scala deleted file mode 100644 index e60d16c0d5..0000000000 --- a/src/detach/library/scala/remoting/Channel.scala +++ /dev/null @@ -1,190 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2007-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -// $Id: Channel.scala 18365 2009-07-21 11:00:42Z michelou $ - -package scala.remoting - -import java.io._ -import java.net._ -import java.rmi.server.RMIClassLoader - -/**

    - * The class Channel implements (basic) typed channels - * which use Java socket communication and Scala type manifests to - * provide type-safe send/receive operations between a localhost and another - * remote machine by specifying some host and port. - *

    - * - * @author Stephane Micheloud - * @version 1.1 - */ -class Channel protected (socket: Socket) { - - // Create a socket without a timeout - def this(host: String, port: Int) = this(new Socket(host, port)) - - // // Create a socket with a timeout - // val sockaddr: SocketAddress = new InetSocketAddress(addr, port) - // val socket = new Socket() - // // If the timeout occurs, SocketTimeoutException is thrown. - // socket.connect(sockaddr, 2000) // 2 seconds - - /** Returns the local address of this channel. */ - val host = socket.getInetAddress.getHostAddress - - /** Returns the port on which this channel is listening. */ - val port = socket.getLocalPort - - private var cl: ClassLoader = - try { - // requires permission in Java policy file - val codebase = System.getProperty("java.rmi.server.codebase") - if (codebase != null) info("codebase="+codebase) - RMIClassLoader.getClassLoader(codebase) - } - catch { - case e: Exception => - sys.error("Class loader undefined: " + e.getMessage) - null - } - def classLoader: ClassLoader = cl - def classLoader_=(x: ClassLoader) { cl = x } - - info(""+this) - - private class CustomObjectInputStream(in: InputStream) - extends ObjectInputStream(in) { - override def resolveClass(desc: ObjectStreamClass): Class[_] = - if (cl eq null) - super.resolveClass(desc) - else - try { - info("resolve class "+desc.getName) - cl loadClass desc.getName - } - catch { - case e: ClassNotFoundException => - super.resolveClass(desc) - } - } - - // lazy modifier is required! - private lazy val in = - try { - new CustomObjectInputStream(socket.getInputStream) - } - catch { - case e: IOException => - sys.error("Input stream undefined: "+e.getMessage+" ("+this+")") - null - } - private lazy val out = - try { - new ObjectOutputStream(socket.getOutputStream) - } - catch { - case e: IOException => - sys.error("Output stream undefined: "+e.getMessage+" ("+this+")") - null - } - - /** receive<primtype> methods may throw an - * IOException. - */ - def receiveUnit = receive[Unit] - def receiveBoolean = receive[Boolean] - def receiveByte = receive[Byte] - def receiveChar = receive[Char] - def receiveShort = receive[Short] - def receiveInt = receive[Int] - def receiveLong = receive[Long] - def receiveFloat = receive[Float] - def receiveDouble = receive[Double] - def receiveString = receive[String] - - /** receive method may throw either an - * ClassNotFoundException or an IOException. - * - * @throw ChannelException if received value has not - * the expected type. - */ - @throws(classOf[ChannelException]) - def receive[T](implicit expected: scala.reflect.ClassTag[T]): T = { - val found = in.readObject().asInstanceOf[reflect.ClassTag[_]] - info("receive: found="+found+", expected="+expected) - import scala.reflect.ClassTag - val x = found match { - case ClassTag.Unit => () - case ClassTag.Boolean => in.readBoolean() - case ClassTag.Byte => in.readByte() - case ClassTag.Char => in.readChar() - case ClassTag.Short => in.readShort() - case ClassTag.Int => in.readInt() - case ClassTag.Long => in.readLong() - case ClassTag.Float => in.readFloat() - case ClassTag.Double => in.readDouble() - case _ => in.readObject() - } - val res = if (found <:< expected) - x.asInstanceOf[T] - else - throw new ChannelException( - "\n\tfound \""+found+"\"\n\texpected \""+expected+"\"") - info("received "+res+" (available="+in.available+")") - res - } - - /** ? method may throw either an - * ClassNotFoundException or an IOException. - */ - def ?[T](implicit t: scala.reflect.ClassTag[T]): T = receive[T](t) - - /** send method may throw an IOException. - */ - def send[T](x: T)(implicit t: scala.reflect.ClassTag[T]) { - out writeObject t - x match { - case x: Unit => // nop - case x: Boolean => out writeBoolean x - case x: Byte => out writeByte x - case x: Char => out writeChar x - case x: Short => out writeShort x - case x: Int => out writeInt x - case x: Long => out writeLong x - case x: Float => out writeFloat x - case x: Double => out writeDouble x - case x => out writeObject x - } - out.flush() - info("sent "+x) - } - - /** ! method may throw an IOException. - */ - def ![T](x: T)(implicit m: scala.reflect.ClassTag[T]) { send(x)(m) } - - def close() { - try { socket.close() } - catch { case e: IOException => } - info(this+" closed") - } - - override def toString: String = socket.toString - - private def info(msg: String) { - runtime.remoting.Debug.info("[Channel] "+msg) - } -} - -/** ChannelException may be thrown by the operation - * receive when the received data has not the expected type. - */ -case class ChannelException(msg: String) extends IOException(msg) - diff --git a/src/detach/library/scala/remoting/Debug.scala b/src/detach/library/scala/remoting/Debug.scala deleted file mode 100644 index 79f2bcedde..0000000000 --- a/src/detach/library/scala/remoting/Debug.scala +++ /dev/null @@ -1,27 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2007-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -// $Id: Debug.scala 17412 2009-03-31 10:08:25Z michelou $ - -package scala.remoting - -/** - * @author Stephane Micheloud - * @version 1.0 - */ -object Debug extends runtime.remoting.Debug { - private val f = new java.text.SimpleDateFormat("HH:mm:ss") - private val c = new java.util.GregorianCalendar - - def getTime: String = f format c.getTime - - def getLocation(obj: AnyRef): String = { - val s = obj.getClass().getClassLoader().toString() - s substring s.indexOf('[') - } -} diff --git a/src/detach/library/scala/remoting/ServerChannel.scala b/src/detach/library/scala/remoting/ServerChannel.scala deleted file mode 100644 index 7828f85a1d..0000000000 --- a/src/detach/library/scala/remoting/ServerChannel.scala +++ /dev/null @@ -1,68 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2007-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -// $Id: ServerChannel.scala 18365 2009-07-21 11:00:42Z michelou $ - -package scala.remoting - -import java.net.{ServerSocket, Socket} - -/**

    - * Creates a server channel and binds its associated socket to the - * specified port number.
    - * Example: - *

    - *  class ComputeChannel(s: Socket) extends Channel(s) {
    - *    def receiveFunc = receive[Int => Int]
    - *  }
    - *  class ComputeServer(p: Int)
    - *  extends AbstractServerChannel[ComputeChannel](p) {
    - *     def newChannel(s: Socket) = new ComputeChannel(s)
    - *  }
    - * - * @author Stephane Micheloud - * @version 1.0 - */ -class ServerChannel(p: Int) extends AbstractServerChannel[Channel](p) { - def newChannel(s: Socket) = new Channel(s) -} - -abstract class AbstractServerChannel[T <: Channel](_port: Int) { - - /** Creates an input channel and binds its associated socket to any - * free port. - */ - def this() = this(0) - - // The maximum queue length for incoming requests to connect is set to 50. - private val serverSocket = new ServerSocket(_port) - - /** Returns the local address of this channel. */ - val host = serverSocket.getInetAddress.getHostAddress - - /** Returns the port on which this channel is listening. */ - val port = serverSocket.getLocalPort - info("Listening on port "+port) - - protected def newChannel(socket: Socket): T - - def accept: T = { - System.gc() // required! - newChannel(serverSocket.accept) - } - - def close() { - try { serverSocket.close() } - catch { case e: java.io.IOException => } - info("Server socket "+host+":"+port+" closed") - } - - protected def info(msg: String) { - runtime.remoting.Debug.info("[ServerChannel] "+msg) - } -} diff --git a/src/detach/library/scala/remoting/detach.scala b/src/detach/library/scala/remoting/detach.scala deleted file mode 100644 index 51a3ac515d..0000000000 --- a/src/detach/library/scala/remoting/detach.scala +++ /dev/null @@ -1,49 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://www.scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -// $Id: detach.scala 16901 2009-01-13 15:37:05Z michelou $ - -package scala.remoting - - -/** The detach object is a marker object which informs - * the Scala compiler that arguments whose type is a function type are - * eligible for remote closure generation. - * - * @author Stephane Micheloud - * @version 1.0, 13/07/2005 - */ -object detach { - - def apply[R](f: Function0[R]): Function0[R] = f - def apply[T0, R](f: Function1[T0, R]): Function1[T0, R] = f - def apply[T0, T1, R](f: Function2[T0, T1, R]): Function2[T0, T1, R] = f - def apply[T0, T1, T2, R](f: Function3[T0, T1, T2, R]): Function3[T0, T1, T2, R] = f - def apply[T0, T1, T2, T3, R](f: Function4[T0, T1, T2, T3, R]): Function4[T0, T1, T2, T3, R] = f - def apply[T0, T1, T2, T3, T4, R](f: Function5[T0, T1, T2, T3, T4, R]): Function5[T0, T1, T2, T3, T4, R] = f - def apply[T0, T1, T2, T3, T4, T5, R](f: Function6[T0, T1, T2, T3, T4, T5, R]): Function6[T0, T1, T2, T3, T4, T5, R] = f - def apply[T0, T1, T2, T3, T4, T5, T6, R](f: Function7[T0, T1, T2, T3, T4, T5, T6, R]): Function7[T0, T1, T2, T3, T4, T5, T6, R] = f - def apply[T0, T1, T2, T3, T4, T5, T6, T7, R](f: Function8[T0, T1, T2, T3, T4, T5, T6, T7, R]): Function8[T0, T1, T2, T3, T4, T5, T6, T7, R] = f - def apply[T0, T1, T2, T3, T4, T5, T6, T7, T8, R](f: Function9[T0, T1, T2, T3, T4, T5, T6, T7, T8, R]): Function9[T0, T1, T2, T3, T4, T5, T6, T7, T8, R] = f - - // since 2.7.0 - def apply[T0, T1, T2, T3, T4, T5, T6, T7, T8, T9, R](f: Function10[T0, T1, T2, T3, T4, T5, T6, T7, T8, T9, R]): Function10[T0, T1, T2, T3, T4, T5, T6, T7, T8, T9, R] = f - def apply[T0, T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, R](f: Function11[T0, T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, R]): Function11[T0, T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, R] = f - def apply[T0, T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, R](f: Function12[T0, T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, R]): Function12[T0, T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, R] = f - def apply[T0, T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, R](f: Function13[T0, T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, R]): Function13[T0, T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, R] = f - def apply[T0, T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, R](f: Function14[T0, T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, R]): Function14[T0, T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, R] = f - def apply[T0, T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, R](f: Function15[T0, T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, R]): Function15[T0, T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, R] = f - def apply[T0, T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, R](f: Function16[T0, T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, R]): Function16[T0, T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, R] = f - def apply[T0, T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, R](f: Function17[T0, T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, R]): Function17[T0, T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, R] = f - def apply[T0, T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, R](f: Function18[T0, T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, R]): Function18[T0, T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, R] = f - def apply[T0, T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, R](f: Function19[T0, T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, R]): Function19[T0, T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, R] = f - def apply[T0, T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, R](f: Function20[T0, T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, R]): Function20[T0, T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, R] = f - def apply[T0, T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, R](f: Function21[T0, T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, R]): Function21[T0, T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, R] = f - def apply[T0, T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, R](f: Function22[T0, T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, R]): Function22[T0, T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, R] = f -} - diff --git a/src/detach/library/scala/runtime/RemoteRef.scala b/src/detach/library/scala/runtime/RemoteRef.scala deleted file mode 100644 index e65b22cb71..0000000000 --- a/src/detach/library/scala/runtime/RemoteRef.scala +++ /dev/null @@ -1,182 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -// $Id: RemoteRef.scala 18365 2009-07-21 11:00:42Z michelou $ - -package scala.runtime - -import java.net.{InetAddress, MalformedURLException} -import java.rmi.{NoSuchObjectException, NotBoundException, Remote} -import java.rmi.registry.{LocateRegistry, Registry} -import java.rmi.server.{ExportException, RemoteObject, UnicastRemoteObject} - -import scala.runtime.remoting.{Debug, RemoteGC} - -/** - * - * @author Stephane Micheloud - * @version 1.0 - */ -object RemoteRef { /*extends Thread { - start() - - private class QuitException extends Exception - private var isTerminated = false - - // keeps track of live remote objects - val remoteGC = new RemoteGC - - override def run() { - info("started thread") - try { - while (!isTerminated) { - this.synchronized { - try { - wait(200) - } catch { - case _: InterruptedException => - if (isTerminated) throw new QuitException - } - remoteGC.gc() - if (remoteGC.allClosed) - throw new QuitException - } // synchronized - - } - } catch { - case _: QuitException => - // allow thread to exit - } - } -*/ - try { - val prop = System.getProperty("sun.rmi.dgc.server.gcInterval") - if (prop eq null) - System.setProperty("sun.rmi.dgc.server.gcInterval", "10000") - } - catch { - case e => - error(e.getMessage) - } - - private val host = - try { - val prop = System.getProperty("java.rmi.server.hostname") - if (prop ne null) prop else InetAddress.getLocalHost.getHostAddress - } - catch { - case e => - warning(e.getMessage) - InetAddress.getLocalHost.getHostAddress - } - - private val port = - try { - val prop = System.getProperty("scala.remoting.port") - if (prop ne null) prop.toInt else Registry.REGISTRY_PORT - } - catch { - case e => - warning(e.getMessage) - Registry.REGISTRY_PORT // default port - } - - private val registry = - try { - LocateRegistry.createRegistry(port) - } - catch { - case e => - warning(e.getMessage) - LocateRegistry.getRegistry(host, port) - } - - private val prefix = "//"+host+":"+port+"/" - printDebugInfos - - // Variant 1: rebind/unbind - def bind(name: String, x: Remote): Remote = - try { - registry.rebind(prefix+name, x) - info("\""+prefix+name+"\" bound") - val stub = RemoteObject.toStub(x) - //remoteGC.newRef(stub) - stub - } catch { - case e: MalformedURLException => - error(e.getMessage); null - case e: ExportException => - info(""+e); null - case e: Exception => // AlreadyBoundException, etc.. - throw e - } - - def unbind(name: String) = - try { - registry.unbind(prefix+name) - info("\""+name+"\" unbound") - } catch { - case e: java.io.EOFException => - warning(e.getMessage) - case e: NotBoundException => - warning(e.getMessage+" already unbound") - case e: MalformedURLException => - error(e.getMessage) - case e: Exception => - throw e - } -/* - // Variant 2: un-/exportObject - def bind(name: String, x: Remote): Remote = - try { - val ex = UnicastRemoteObject.exportObject(x) - registry.rebind(prefix+name, ex) - info("\""+prefix+name+"\" bound") - //val stub = RemoteObject.toStub(ex) - //remoteGC.newRef(ex) - ex //stub - } catch { - case e: MalformedURLException => - error(e.getMessage); null - case e: ExportException => - info(""+e); null - case e: Exception => // AlreadyBoundException, etc.. - throw e - } - - def unbind(x: Remote) { - try { - UnicastRemoteObject.unexportObject(x, false) - info("\""+x+"\" unbound") - } catch { - case e: java.io.EOFException => - warning(e.getMessage) - case e: NotBoundException => - warning(e.getMessage+" already unbound") - case e: MalformedURLException => - error(e.getMessage) - case e: Exception => - throw e - } - } -*/ - private def info(msg: String) { Debug.info("[RemoteRef] "+msg) } - private def warning(msg: String) { Debug.warning("[RemoteRef] "+msg) } - private def error(msg: String) { Debug.error("[RemoteRef] "+msg) } - - private def printDebugInfos() { - def property(name: String): String = - name+"="+( - try { System.getProperty(name, "") } - catch { case e => warning(e.getMessage); "?" }) - info(property("java.rmi.server.hostname")) - info(property("sun.rmi.dgc.server.gcInterval")) - info("registry="+registry) - info("prefix="+prefix) - } -} diff --git a/src/detach/library/scala/runtime/remoting/Debug.scala b/src/detach/library/scala/runtime/remoting/Debug.scala deleted file mode 100644 index 06cdc67997..0000000000 --- a/src/detach/library/scala/runtime/remoting/Debug.scala +++ /dev/null @@ -1,85 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2007-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -// $Id: Debug.scala 17777 2009-05-19 18:16:25Z michelou $ - -package scala.runtime.remoting - -/** - * @author Stephane Micheloud - * @version 1.0 - */ -object Debug extends Debug { - override def info (msg: String) { if (lib) super.info(msg) } - override def verbose(msg: String) { if (lib) super.verbose(msg) } - override def warning(msg: String) { if (lib) super.warning(msg) } - override def error (msg: String) { if (lib) super.error(msg) } -} - -/** - * @author Stephane Micheloud - * @version 1.0 - */ -class Debug(tag: String) { - - def this() = this("") - - object Level extends Enumeration { - type Level = Value - val SILENT, ERROR, WARNING, VERBOSE, INFO = Value - } - - private val level0 = - try { - val prop = System.getProperty("scala.remoting.logLevel") - if (prop ne null) prop.toLowerCase else "" - } - catch { - case e => - Console.err.println(e.getMessage) - "" - } - - import Level._ - protected var (lev, lib) = { - val p = java.util.regex.Pattern.compile("(error|warning|verbose|info)(\\,lib)?(.*)") - val m = p matcher level0 - val (s, b) = - if (m.matches) (m.group(1), m.group(2) ne null) - else ("", false) - s match { - case "error" => (ERROR , b) - case "warning" => (WARNING, b) - case "verbose" => (VERBOSE, b) - case "info" => (INFO , b) - case _ => (SILENT , false) - } - } - - def level = lev - def level_= (lev: Level) = { this.lev = lev } - - private val tag0: String = - if (tag != null & tag.length > 0) tag+" " else "" - - def info(msg: String) { - if (lev >= INFO) Console.println(tag0 + "(info): " + msg) - } - - def verbose(msg: String) { - if (lev >= VERBOSE) Console.println(tag0 + "(verb): " + msg) - } - - def warning(msg: String) { - if (lev >= WARNING) Console.err.println(tag0 + "(warn): " + msg) - } - - def error(msg: String) { - if (lev >= ERROR) Console.err.println(tag0 + "(erro): " + msg) - } -} diff --git a/src/detach/library/scala/runtime/remoting/RegistryDelegate.scala b/src/detach/library/scala/runtime/remoting/RegistryDelegate.scala deleted file mode 100644 index 1105832ef7..0000000000 --- a/src/detach/library/scala/runtime/remoting/RegistryDelegate.scala +++ /dev/null @@ -1,192 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -// $Id: RegistryDelegate.scala 18234 2009-07-07 13:21:57Z michelou $ - -package scala.runtime.remoting - -import java.rmi.{RMISecurityManager, Remote, RemoteException} -import java.rmi.registry.{LocateRegistry, Registry} -import java.rmi.server.UnicastRemoteObject - -/** - *

    - * This class implements the registry delegate concept - * (see http://www.genady.net/rmi/v20/docs/delegate/RegistryDelegate.html) - *

    - *

    - * In order to enforce some level of security, the standard RMI registry - * implementation (e.g. rmiregistry.exe) only allows processes - * on the same host to register objects in the registry (think of a bank - * running a registry on one of its servers, and doesn't want anybody - * modifying it). So, by design, if a process tries to - * bind(String, Remote) an object to a remote registry, - * an exception will be thrown. - *

    - *

    - * However, the design of a distributed system may require remote clients to - * register themselves in a central registry. If such system is deployed in a - * controlled and trusted environment (e.g., a firewalled intranet with tight - * access control), the security risk may be acceptable. - *

    - *

    - * The simplest technical solution to the remote registration problem is to - * have a registry delegate. A registry delegate is an object that serves as - * a proxy for the real registry. The delegate itself usually appears in the - * registry under a well known name. It implements the Registry interface and - * simply delegates all method calls to the appropriate methods of the real - * registry. The delegate is allowed to perform bind and unbind operations - * because it is running on the same host as the registry. - *

    - *

    - * The common scenario for starting a registry and creating the delegate is - * starting a class with the following main(Array[String]) method: - *

    - *
    - *   @throws(classOf[AccessException], classOf[RemoteException], classOf[AlreadyBoundException])
    - *   object namingService {
    - *     def main(args: Array[String]) {
    - *       if (System.getSecurityManager() == null)
    - *         System.setSecurityManager(new RMISecurityManager())
    - *
    - *       val registry = LocateRegistry.createRegistry(REGISTRY_PORT)
    - *       registry.bind(DELEGATE_NAME, new RegistryDelegate());
    - *
    - *       do {
    - *         try {
    - *           Thread.sleep(Long.MAX_VALUE)
    - *         } catch {
    - *           case e: InterruptedException => // do nothing
    - *           case e: Throwable => e.printStackTrace(); sys.exit(1)
    - *         }
    - *       } while (true)
    - *     }
    - *  }
    - *

    - * The common usage scenario looks something like: - *

    - *   Registry remoteRegistry = LocateRegistry.getRegistry("remotehost.mycompany.com");
    - *   Registry delegate = (Registry) remoteRegistry.lookup(DELEGATE_NAME);
    - *   delegate.bind("someName", new SomeRemoteObject());
    - *

    - * The getRegistryDelegate(String) method is a helper method - * that fetches the registry delegate for you. - *

    - *

    - * The main(Array[String]) method of this class will create a - * local registry on the default port, create a registry delegate and bind - * it under the well known name that you chose in the wizard - * (DELEGATE_NAME). - *

    - * - * @author Genady Beryozkin, rmi-info@genady.net - */ - -object RMIDelegate { - /** The name under which the delegate appears in the registry. */ - val DELEGATE_NAME = "foo" - - /** This method retrieves the registry delegate from a registry that is - * running on a remote host. - */ - @throws(classOf[RemoteException]) - def getRegistryDelegate(remoteHost: String): Registry = - getRegistryDelegate(remoteHost, Registry.REGISTRY_PORT) - - /** This method retrieves the registry delegate from a registry that is - * running on a remote host. - */ - @throws(classOf[RemoteException]) - def getRegistryDelegate(remoteHost: String, remotePort: Int): Registry = { - val registry = LocateRegistry.getRegistry(remoteHost, remotePort) - (registry lookup DELEGATE_NAME).asInstanceOf[Registry] - } - - /** A simple way to run a registry and bind a registry delegate. */ - @throws(classOf[RemoteException]) - def main(args: Array[String]) { - var port = Registry.REGISTRY_PORT - - if (args.length > 0) { - if (args(0) equals "-help") { - println("Usage: rmidelegate ") - sys.exit(0) - } - try { - port = args(0).toInt - } catch { - case e: NumberFormatException => - println("Usage: rmidelegate ") - sys.exit(1) - } - val opts = args filter (_ startsWith "-J-D") - for (opt <- opts) { - val x = opt.substring(4) split "=" - if (x.length == 2) System.setProperty(x(0), x(1)) - else System.setProperty(x(0), "") - } - } - - if (System.getSecurityManager() == null) - System.setSecurityManager(new RMISecurityManager() { - override def checkPermission(p: java.security.Permission) {} - }) - - - val registry = LocateRegistry.createRegistry(port) - registry.bind(DELEGATE_NAME, new RegistryDelegate()) - - do { - try { - Thread.sleep(Long.MaxValue) - } catch { - case e: InterruptedException => - // do nothing - case e: Throwable => - e.printStackTrace() - sys.exit(1) - } - } while (true) - } - -} - -/** Create a delegate for a user provided registry instance. The registry is - * assumed to be a local registry, as there is no point in creating a delegate - * for a remote registry. - */ -class RegistryDelegate(reg: Registry) extends UnicastRemoteObject with Registry { - /** The local registry */ - private val localRegistry: Registry = reg - - /** Create a delegate for a local registry that is bound to the default - * local port (1099). - */ - def this() = this(LocateRegistry.getRegistry()) - - /** Create a delegate for a local registry that is bound to a user - * specified port. - */ - def this(port: Int) = this(LocateRegistry.getRegistry(port)) - - @throws(classOf[RemoteException]) - def bind(name: String, obj: Remote) { localRegistry.bind(name, obj) } - - @throws(classOf[RemoteException]) - def list(): Array[String] = localRegistry.list() - - @throws(classOf[RemoteException]) - def lookup(name: String): Remote = localRegistry.lookup(name) - - @throws(classOf[RemoteException]) - def rebind(name: String, obj: Remote) { localRegistry.rebind(name, obj) } - - @throws(classOf[RemoteException]) - def unbind(name: String) { localRegistry.unbind(name) } - -} diff --git a/src/detach/library/scala/runtime/remoting/RemoteBooleanRef.scala b/src/detach/library/scala/runtime/remoting/RemoteBooleanRef.scala deleted file mode 100644 index ff6c8f6b6c..0000000000 --- a/src/detach/library/scala/runtime/remoting/RemoteBooleanRef.scala +++ /dev/null @@ -1,51 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -// $Id: RemoteBooleanRef.scala 18398 2009-07-28 14:26:36Z michelou $ - -package scala.runtime.remoting - -import java.rmi.server.{UnicastRemoteObject, Unreferenced} -import scala.runtime.{BooleanRef, RemoteRef} - -/** - * The trait RemoteRemoteBooleanRef provides a remote interface - * for manipulating boolean references. - * - * @author Stephane Micheloud - * @version 1.0 - */ -@remote -trait RemoteBooleanRef { - def elem_=(value: Boolean) - def elem: Boolean -} - -/** - * The class RemoteBooleanRefImpl implements a remote (global) - * boolean reference by inheriting from the class - * UnicastRemoteObject. - * - * In particular, it forwards method invocations to the elem - * accessors of class runtime.BooleanRef and implements the - * java.rmi.server.Unreferenced interface to automatically - * remove the no more referenced binding from the registry. - * - * @author Stephane Micheloud - * @version 1.0 - */ -class RemoteBooleanRefImpl(name: String, x: BooleanRef) -extends UnicastRemoteObject with RemoteBooleanRef with Unreferenced { - def elem_=(value: Boolean) { x.elem = value } - def elem: Boolean = x.elem - override def toString() = x.elem.toString - def unreferenced() { - Debug.info("[RemoteBooleanRefImpl] unreferenced: "+this) - RemoteRef.unbind(name) - } -} diff --git a/src/detach/library/scala/runtime/remoting/RemoteByteRef.scala b/src/detach/library/scala/runtime/remoting/RemoteByteRef.scala deleted file mode 100644 index 335f0d9019..0000000000 --- a/src/detach/library/scala/runtime/remoting/RemoteByteRef.scala +++ /dev/null @@ -1,51 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -// $Id: RemoteByteRef.scala 18398 2009-07-28 14:26:36Z michelou $ - -package scala.runtime.remoting - -import java.rmi.server.{UnicastRemoteObject, Unreferenced} -import scala.runtime.{ByteRef, RemoteRef} - -/** - * The trait RemoteRemoteByteRef provides a remote interface - * for manipulating byte references. - * - * @author Stephane Micheloud - * @version 1.0 - */ -@remote -trait RemoteByteRef { - def elem_=(value: Byte) - def elem: Byte -} - -/** - * The class RemoteByteRefImpl implements a remote (global) - * byte reference by inheriting from the class - * UnicastRemoteObject. - * - * In particular, it forwards method invocations to the elem - * accessors of class runtime.ByteRef and implements the - * java.rmi.server.Unreferenced interface to automatically - * remove the no more referenced binding from the registry. - * - * @author Stephane Micheloud - * @version 1.0 - */ -class RemoteByteRefImpl(name: String, x: ByteRef) -extends UnicastRemoteObject with RemoteByteRef with Unreferenced { - def elem_=(value: Byte) { x.elem = value } - def elem: Byte = x.elem - override def toString() = x.elem.toString - def unreferenced() { - Debug.info("[RemoteByteRefImpl] unreferenced: "+this) - RemoteRef.unbind(name) - } -} diff --git a/src/detach/library/scala/runtime/remoting/RemoteCharRef.scala b/src/detach/library/scala/runtime/remoting/RemoteCharRef.scala deleted file mode 100644 index e0f48eb970..0000000000 --- a/src/detach/library/scala/runtime/remoting/RemoteCharRef.scala +++ /dev/null @@ -1,51 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -// $Id: RemoteCharRef.scala 18398 2009-07-28 14:26:36Z michelou $ - -package scala.runtime.remoting - -import java.rmi.server.{UnicastRemoteObject, Unreferenced} -import scala.runtime.{CharRef, RemoteRef} - -/** - * The trait RemoteRemoteCharRef provides a remote interface - * for manipulating character references. - * - * @author Stephane Micheloud - * @version 1.0 - */ -@remote -trait RemoteCharRef { - def elem_=(value: Char) - def elem: Char -} - -/** - * The class RemoteCharRefImpl implements a remote (global) - * character reference by inheriting from the class - * UnicastRemoteObject. - * - * In particular, it forwards method invocations to the elem - * accessors of class runtime.CharRef and implements the - * java.rmi.server.Unreferenced interface to automatically - * remove the no more referenced binding from the registry. - * - * @author Stephane Micheloud - * @version 1.0 - */ -class RemoteCharRefImpl(name: String, x: CharRef) -extends UnicastRemoteObject with RemoteCharRef with Unreferenced { - def elem_=(value: Char) { x.elem = value } - def elem: Char = x.elem - override def toString() = x.elem.toString - def unreferenced() { - Debug.info("[RemoteCharRefImpl] unreferenced: "+this) - RemoteRef.unbind(name) - } -} diff --git a/src/detach/library/scala/runtime/remoting/RemoteDoubleRef.scala b/src/detach/library/scala/runtime/remoting/RemoteDoubleRef.scala deleted file mode 100644 index 2e1319595a..0000000000 --- a/src/detach/library/scala/runtime/remoting/RemoteDoubleRef.scala +++ /dev/null @@ -1,50 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -// $Id: RemoteDoubleRef.scala 18398 2009-07-28 14:26:36Z michelou $ - -package scala.runtime.remoting - -import java.rmi.server.{UnicastRemoteObject, Unreferenced} -import scala.runtime.{DoubleRef, RemoteRef} - -/** - * The trait RemoteRemoteDoubleRef provides.. - * - * @author Stephane Micheloud - * @version 1.0 - */ -@remote -trait RemoteDoubleRef { - def elem_=(value: Double) - def elem: Double -} - -/** - * The class RemoteDoubleRefImpl implements a remote (global) - * double reference by inheriting from the class - * UnicastRemoteObject. - * - * In particular, it forwards method invocations to the elem - * accessors of class runtime.DoubleRef and implements the - * java.rmi.server.Unreferenced interface to automatically - * remove the no more referenced binding from the registry. - * - * @author Stephane Micheloud - * @version 1.0 - */ -class RemoteDoubleRefImpl(name: String, x: DoubleRef) -extends UnicastRemoteObject with RemoteDoubleRef with Unreferenced { - def elem_=(value: Double) { x.elem = value } - def elem: Double = x.elem - override def toString() = x.elem.toString - def unreferenced() { - Debug.info("[RemoteDoubleRefImpl] unreferenced: "+this) - RemoteRef.unbind(name) - } -} diff --git a/src/detach/library/scala/runtime/remoting/RemoteFloatRef.scala b/src/detach/library/scala/runtime/remoting/RemoteFloatRef.scala deleted file mode 100644 index f4e61ea6da..0000000000 --- a/src/detach/library/scala/runtime/remoting/RemoteFloatRef.scala +++ /dev/null @@ -1,50 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -// $Id: RemoteFloatRef.scala 18398 2009-07-28 14:26:36Z michelou $ - -package scala.runtime.remoting - -import java.rmi.server.{UnicastRemoteObject, Unreferenced} -import scala.runtime.{FloatRef, RemoteRef} - -/** - * The trait RemoteRemoteFloatRef provides a remote interface - * for manipulating float references. - * - * @author Stephane Micheloud - * @version 1.0 - */ -@remote -trait RemoteFloatRef { - def elem_=(value: Float) - def elem: Float -} - -/** - * The class RemoteFloatRefImpl implements a remote (global) - * float reference by inheriting from the class - * UnicastRemoteObject. - * - * In particular, it forwards method invocations to the elem - * accessors of class runtime.FloatRef and implements the - * java.rmi.server.Unreferenced interface. - * - * @author Stephane Micheloud - * @version 1.0 - */ -class RemoteFloatRefImpl(name: String, x: FloatRef) -extends UnicastRemoteObject with RemoteFloatRef with Unreferenced { - def elem_=(value: Float) { x.elem = value } - def elem: Float = x.elem - override def toString() = x.elem.toString - def unreferenced() { - Debug.info("[RemoteIntFloatImpl] unreferenced: "+this) - RemoteRef.unbind(name) - } -} diff --git a/src/detach/library/scala/runtime/remoting/RemoteGC.scala b/src/detach/library/scala/runtime/remoting/RemoteGC.scala deleted file mode 100644 index 393c031bfc..0000000000 --- a/src/detach/library/scala/runtime/remoting/RemoteGC.scala +++ /dev/null @@ -1,66 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -// $Id: RemoteGC.scala 17547 2009-04-21 13:56:28Z michelou $ - -package scala.runtime.remoting - -import java.lang.ref.{Reference, WeakReference, ReferenceQueue} -import java.rmi.{NoSuchObjectException, Remote} -import java.rmi.server.UnicastRemoteObject -import scala.collection.mutable - -/** - * - * @author Stephane Micheloud - * @version 1.0 - */ -// Adapted from scala.actors.ActorGC -private [runtime] class RemoteGC { - - private val refQueue = new ReferenceQueue[Remote] - private val refSet = new mutable.HashSet[Reference[T] forSome { type T <: Remote }] - - private var liveRefs = 0 - - def newRef(a: Remote) = synchronized { - refSet += new WeakReference(a, refQueue) - liveRefs += 1 - info("added object reference \""+a+"\" ("+liveRefs+")") - } - - def gc() = synchronized { - info("GC called ("+liveRefs+")") - // check for unreachable object references - def drain() { - val wr = refQueue.poll - if (wr != null) { - val msg = try { - UnicastRemoteObject.unexportObject(wr.get, true/*force*/) - "removed object reference" - } - catch { - case e: NoSuchObjectException => - "object already unbound" - } - info(msg+" ("+liveRefs+")") - liveRefs -= 1 - refSet -= wr - // continue draining - drain() - } - } - drain() - } - - def allClosed: Boolean = synchronized { - liveRefs <= 0 - } - - private def info(msg: String) { Debug.info("[RemoteGC] "+msg) } -} diff --git a/src/detach/library/scala/runtime/remoting/RemoteIntRef.scala b/src/detach/library/scala/runtime/remoting/RemoteIntRef.scala deleted file mode 100644 index b14403f6ca..0000000000 --- a/src/detach/library/scala/runtime/remoting/RemoteIntRef.scala +++ /dev/null @@ -1,51 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -// $Id: RemoteIntRef.scala 18398 2009-07-28 14:26:36Z michelou $ - -package scala.runtime.remoting - -import java.rmi.server.{UnicastRemoteObject, Unreferenced} -import scala.runtime.{IntRef, RemoteRef} - -/** - * The trait RemoteRemoteIntRef provides a remote interface - * for manipulating integer references. - * - * @author Stephane Micheloud - * @version 1.0 - */ -@remote -trait RemoteIntRef { - def elem_=(value: Int) - def elem: Int -} - -/** - * The class RemoteIntRefImpl implements a remote (global) - * integer reference by inheriting from the class - * UnicastRemoteObject. - * - * In particular, it forwards method invocations to the elem - * accessors of class runtime.IntRef and implements the - * java.rmi.server.Unreferenced interface to automatically - * remove the no more referenced binding from the registry. - * - * @author Stephane Micheloud - * @version 1.0 - */ -class RemoteIntRefImpl(name: String, x: IntRef) -extends UnicastRemoteObject with RemoteIntRef with Unreferenced { - def elem_=(value: Int) { x.elem = value } - def elem: Int = x.elem - override def toString() = x.elem.toString - def unreferenced() { - Debug.info("[RemoteIntRefImpl] unreferenced: "+this) - RemoteRef.unbind(name) - } -} diff --git a/src/detach/library/scala/runtime/remoting/RemoteLongRef.scala b/src/detach/library/scala/runtime/remoting/RemoteLongRef.scala deleted file mode 100644 index da83491489..0000000000 --- a/src/detach/library/scala/runtime/remoting/RemoteLongRef.scala +++ /dev/null @@ -1,51 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -// $Id: RemoteLongRef.scala 18398 2009-07-28 14:26:36Z michelou $ - -package scala.runtime.remoting - -import java.rmi.server.{UnicastRemoteObject, Unreferenced} -import scala.runtime.{LongRef, RemoteRef} - -/** - * The trait RemoteRemoteLongRef provides a remote interface - * for manipulating long integer references. - * - * @author Stephane Micheloud - * @version 1.0 - */ -@remote -trait RemoteLongRef { - def elem_=(value: Long) - def elem: Long -} - -/** - * The class RemoteLongRefImpl implements a remote (global) - * long integer reference by inheriting from the class - * UnicastRemoteObject. - * - * In particular, it forwards method invocations to the elem - * accessors of class runtime.LongRef and implements the - * java.rmi.server.Unreferenced interface to automatically - * remove the no more referenced binding from the registry. - * - * @author Stephane Micheloud - * @version 1.0 - */ -class RemoteLongRefImpl(name: String, x: LongRef) -extends UnicastRemoteObject with RemoteLongRef with Unreferenced { - def elem_=(value: Long) { x.elem = value } - def elem: Long = x.elem - override def toString() = x.elem.toString - def unreferenced() { - Debug.info("[RemoteLongRefImpl] unreferenced: "+this) - RemoteRef.unbind(name) - } -} diff --git a/src/detach/library/scala/runtime/remoting/RemoteObjectRef.scala b/src/detach/library/scala/runtime/remoting/RemoteObjectRef.scala deleted file mode 100644 index 9f27b26114..0000000000 --- a/src/detach/library/scala/runtime/remoting/RemoteObjectRef.scala +++ /dev/null @@ -1,51 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -// $Id: RemoteObjectRef.scala 18398 2009-07-28 14:26:36Z michelou $ - -package scala.runtime.remoting - -import java.rmi.server.{UnicastRemoteObject, Unreferenced} -import scala.runtime.{ObjectRef, RemoteRef} - -/** - * The trait RemoteRemoteObjectRef provides a remote interface - * for manipulating object references. - * - * @author Stephane Micheloud - * @version 1.0 - */ -@remote -trait RemoteObjectRef { - def elem_=(value: AnyRef) - def elem: AnyRef -} - -/** - * The class RemoteObjectRefImpl implements a remote (global) - * object reference by inheriting from the class - * UnicastRemoteObject. - * - * In particular, it forwards method invocations to the elem - * accessors of class runtime.ObjectRef and implements the - * java.rmi.server.Unreferenced interface to automatically - * remove the no more referenced binding from the registry. - * - * @author Stephane Micheloud - * @version 1.0 - */ -class RemoteObjectRefImpl(name: String, x: ObjectRef) -extends UnicastRemoteObject with RemoteObjectRef with Unreferenced { - def elem_=(value: AnyRef) { x.elem = value } - def elem: AnyRef = x.elem - override def toString() = x.elem.toString - def unreferenced() { - Debug.info("[RemoteObjectRefImpl] unreferenced: "+this) - RemoteRef.unbind(name) - } -} diff --git a/src/detach/library/scala/runtime/remoting/RemoteShortRef.scala b/src/detach/library/scala/runtime/remoting/RemoteShortRef.scala deleted file mode 100644 index 2ced9dbc83..0000000000 --- a/src/detach/library/scala/runtime/remoting/RemoteShortRef.scala +++ /dev/null @@ -1,50 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -// $Id: RemoteShortRef.scala 18398 2009-07-28 14:26:36Z michelou $ - -package scala.runtime.remoting - -import java.rmi.server.{UnicastRemoteObject, Unreferenced} -import scala.runtime.{ShortRef, RemoteRef} - -/** - * The trait RemoteRemoteShortRef provides a remote interface - * for manipulating short integer references. - * - * @author Stephane Micheloud - * @version 1.0 - */ -@remote -trait RemoteShortRef { - def elem_=(value: Short) - def elem: Short -} - -/** - * The class RemoteShortRefImpl implements a remote (global) - * short integer reference by inheriting from the class - * UnicastRemoteObject. - * - * In particular, it forwards method invocations to the elem - * accessors of class runtime.ShortRef and implements the - * java.rmi.server.Unreferenced interface. - * - * @author Stephane Micheloud - * @version 1.0 - */ -class RemoteShortRefImpl(name: String, x: ShortRef) -extends UnicastRemoteObject with RemoteShortRef with Unreferenced { - def elem_=(value: Short) { x.elem = value } - def elem: Short = x.elem - override def toString() = x.elem.toString - def unreferenced() { - Debug.info("[RemoteShortRefImpl] unreferenced: "+this) - RemoteRef.unbind(name) - } -} diff --git a/src/detach/plugin/scala/tools/detach/Detach.scala b/src/detach/plugin/scala/tools/detach/Detach.scala deleted file mode 100644 index 499a97b761..0000000000 --- a/src/detach/plugin/scala/tools/detach/Detach.scala +++ /dev/null @@ -1,1190 +0,0 @@ -/* NSC -- new Scala compiler - * Copyright 2005-2013 LAMP/EPFL - * @author Stephane Micheloud - */ - -package scala.tools.detach - -import scala.collection.{ mutable, immutable } -import scala.collection.mutable.ListBuffer -import scala.tools.nsc._ -import scala.tools.nsc.plugins.PluginComponent -import scala.tools.nsc.symtab.Flags._ -import scala.tools.nsc.transform._ - -abstract class Detach extends PluginComponent - with Transform with TypingTransformers { - import global._ - import definitions._ - - /** the following two members override abstract members in Transform */ - val phaseName: String = "detach" - - protected def newTransformer(unit: CompilationUnit): Transformer = - new DetachTransformer(unit) - - // set with the `-P:detach:enable` plugin option (see DetachPlugin) */ - protected[detach] var isEnabled = false - - private class DetachTransformer(unit: CompilationUnit) - extends TypingTransformer(unit) { - private val DEBUG = settings.debug.value - private val PROXY_PREFIX = "proxy$" // local proxy objects - private val PROXY_SUFFIX = "$proxy" // top-level proxy classes - private val DETACH_SUFFIX = "$detach" // detached closures - private val IMPL_SUFFIX = "Impl" // follows Java convention - - private val nme_bind = newTermName("bind") - private val nme_unbind = newTermName("unbind") - private val nme_unreferenced = newTermName("unreferenced") - - private val Functions = FunctionClass.toList // see method isFuncType - - private val RemoteClass = - definitions.getClass("java.rmi.Remote") - - private val UIDClass = - definitions.getClass("java.rmi.server.UID") - - private val UnicastRemoteObjectClass = - definitions.getClass("java.rmi.server.UnicastRemoteObject") - - private val UnreferencedClass = - definitions.getClass("java.rmi.server.Unreferenced") - - private val DetachModule = - definitions.getModule("scala.remoting.detach") - - private val DebugModule = - definitions.getModule("scala.remoting.Debug") - - private val RemoteRefModule = - definitions.getModule("scala.runtime.RemoteRef") - - private val ThreadModule = - definitions.getModule("java.lang.Thread") - - private val UnicastRemoteObjectModule = - definitions.getModule("java.rmi.server.UnicastRemoteObject") - - private val remoteAnnotationInfo = { - val RemoteAttr: Symbol = definitions.getClass("scala.remote") - AnnotationInfo(RemoteAttr.tpe, List(), List()) - } - - private val serializableAnnotationInfo = - AnnotationInfo(requiredClass[scala.annotation.serializable].tpe, List(), List()) -/* - private val throwsAnnotationInfo = { - val RemoteExceptionClass = definitions.getClass("java.rmi.RemoteException") - val ThrowsAttr = definitions.getClass("scala.throws") - AnnotationInfo( - ThrowsAttr.tpe, - List(Literal(Constant(RemoteExceptionClass.tpe))), - List() - ) - } -*/ - // todo: see generation of Java version UID - private def serialVersionUIDAnnotationInfo(clazz: Symbol) = { - def genHash(sym: Symbol): Long = { - val sym1 = if (sym.isConstructor) sym.owner else sym - val ts = sym.tpe match { - case MethodType(params, rt) => (params map (_.tpe)) ::: List(rt) - case t => List(t) - } - val hashes = sym1.nameString.hashCode :: - (ts map (_.typeSymbol.nameString.hashCode)) - (0L /: hashes)((acc, h) => acc ^ h) - } - val hashes = for (sym <- clazz.info.decls.toList) yield genHash(sym) - val uid: Long = (0L /: hashes) ((acc, h) => acc * 41 + h) - val serialVersionUIDAttr = definitions.getClass("scala.SerialVersionUID") - AnnotationInfo( - serialVersionUIDAttr.tpe, - List(Literal(Constant(uid))), - List() - ) - } - - private def elems(suffix: String): List[(Symbol, Symbol)] = - for (clazz <- ObjectRefClass :: refClass.valuesIterator.toList) yield { - val name = "scala.runtime.remoting.Remote" + clazz.name + suffix - (clazz, definitions.getClass(name)) - } - private val remoteRefClass = immutable.HashMap(elems(""): _*) - private val remoteRefImpl = immutable.HashMap(elems("Impl"): _*) - - private val proxyInterfaceDefs = new mutable.HashMap[Symbol/*owner*/, ListBuffer[Tree]] - private val detachedClosureApply = new mutable.HashMap[Tree, Apply] - - private type SymSet = mutable.HashSet[Symbol] - private val capturedObjects = new mutable.HashMap[Symbol/*clazz*/, SymSet] - private val capturedFuncs = new mutable.HashMap[Symbol/*clazz*/, SymSet] - private val capturedCallers = new mutable.HashMap[Symbol/*clazz*/, SymSet] - private val capturedThisClass = new mutable.HashMap[Symbol, Symbol] - - private val proxies = new mutable.HashMap[ - Symbol, //clazz - (Symbol, Symbol, mutable.HashMap[Symbol, Symbol]) //iface, impl, accessor map - ] - def toInterface(clazz: Symbol) = proxies(clazz)._1 - private val classdefs = new mutable.HashMap[Symbol/*clazz*/, ClassDef] - // detachedClosure gathers class definitions containing a "detach" apply - private val detachedClosure = new mutable.HashMap[Symbol/*clazz*/, ClassDef] - - /**

    - * The method freeObjTraverser.traverse is invoked - * in the method DetachPlugin.transformUnit in order to - * gather information about objects referenced inside a detached - * closure and which will be accessed remotely through object proxies. - *

    - *

    - * Object proxies are generated in method mkClosureApply - * and their definitions are generated in method genProxy. - *

    - */ - private val freeObjTraverser = new Traverser { - def symSet(f: mutable.HashMap[Symbol, SymSet], sym: Symbol): SymSet = f.get(sym) match { - case Some(ss) => ss - case None => val ss = new mutable.HashSet[Symbol]; f(sym) = ss; ss - } - def getClosureApply(tree: Tree): Apply = tree match { - case Block(_, expr) => getClosureApply(expr) - case Typed(expr, _) => getClosureApply(expr) - case apply @ Apply(Select(_, _), _) => apply // sel="" or some "f$0" - case Apply(fun, _) => getClosureApply(fun) - case _ => - throw new Error("getClosureApply: unhandled case " + tree) - } - def isFuncType(tp: Type): Boolean = tp match { - case TypeRef(pre, sym, args) => - Functions contains sym.tpe.typeSymbol - case _ => - false - } - def isOuterMember(sym: Symbol): Boolean = - sym.isOuterAccessor || - sym.name.endsWith(nme.OUTER/*, nme.OUTER.length*/) - override def traverse(tree: Tree) { - val sym = tree.symbol - val owner = - if (currentOwner.isModule) currentOwner - else currentOwner.enclClass - tree match { - case cdef @ ClassDef(_, _, _, impl) => - classdefs(sym) = cdef - super.traverse(impl) - if (detachedClosure contains sym) { - detachedClosure(sym) = cdef - symSet(capturedObjects, sym) += capturedThisClass(sym) - } - - case Apply(Select(qual, _), List(arg)) - if (qual.tpe <:< DetachModule.tpe) => - assert(isFuncType(arg.tpe))//debug - val t = getClosureApply(arg) - if (!t.fun.symbol.isConstructor) - unit.error(t.pos, "detach inapplicable for " +t.fun.symbol) - val sym = t.fun.symbol.owner - capturedThisClass(sym) = owner - symSet(capturedFuncs, sym) - detachedClosureApply(tree) = t - classdefs get sym match { - case None => - detachedClosure(sym) = null // set later in case ClassDef - case Some(cdef) => - detachedClosure(sym) = cdef - symSet(capturedObjects, sym) += capturedThisClass(sym) - } - super.traverse(arg) - - case Select(qual @ This(_), name) - if qual.symbol.isModuleClass && !qual.symbol.isPackageClass => - val qsym = qual.symbol - symSet(capturedFuncs, owner) += sym - symSet(capturedObjects, owner) += qsym - - case Select(qual, name) - if (qual.hasSymbolField && - (sym.owner != owner) && - !(sym.ownerChain contains ScalaPackageClass) && - !(sym.owner hasFlag JAVA)) => - val qsym = qual.symbol - symSet(capturedFuncs, owner) += sym - if (qsym.isStaticModule && !qsym.isPackage) { - //println("*****1******* capturedObjects("+owner+") += "+qsym) - symSet(capturedObjects, owner) += qsym - } - else if (!isOuterMember(qsym) && !(qsym isNestedIn owner)) { - //println("*****3******* capturedCallers("+sym+") += "+qsym) - symSet(capturedCallers, sym) += qsym - } - - case _ => - super.traverse(tree) - } - } - } //freeObjTraverser - - private val valueClass = immutable.HashMap( - (for ((sym, ref) <- refClass.toList) yield (ref, sym)): _* - ) + (ObjectRefClass -> ObjectClass) - - private def toValueClass(tp: Type): Type = - if (isRefClass(tp)) valueClass(tp.typeSymbol).tpe - else if (proxies contains tp.typeSymbol) toInterface(tp.typeSymbol).tpe - else tp - - private def isRefClass(tp: Type): Boolean = - (tp ne null) && - ((refClass.valuesIterator contains tp.typeSymbol) || (ObjectRefClass eq tp.typeSymbol)) - - private def isRemoteRefClass(tp: Type): Boolean = - (tp ne null) && (remoteRefClass.valuesIterator contains tp.typeSymbol) - - private def mkRemoteRefClass(tp: Type): Type = { - assert(isRefClass(tp)) - val tp1 = remoteRefClass(tp.typeSymbol) - typeRef(tp1.typeConstructor.prefix, tp1, Nil) // after erasure, no type anymore! - } - - class TreeOuterSubstituter(from: List[Symbol], to: List[Symbol]) extends Traverser { - if (DEBUG) - println("\nTreeOuterSubstituter:"+ - "\n\tfrom="+from.mkString(",")+ - "\n\tto="+to.mkString(",")) - val substMap = new mutable.HashMap[Symbol, Symbol] - override def traverse(tree: Tree) { - def subst(from: List[Symbol], to: List[Symbol]) { - if (!from.isEmpty) - if (tree.symbol.tpe == from.head.tpe) { - if (DEBUG) - println("\nTreeOuterSubstituter\n\tsym="+tree.symbol+ - ", tpe="+tree.symbol.tpe+ - "\n\towner="+tree.symbol.owner) - tree.symbol updateInfo to.head.tpe - } - else tree.symbol.tpe match { - case MethodType(params, restp) => - for (p <- params if p.tpe == from.head.tpe) { - p updateInfo to.head.tpe - } - if (restp == from.head.tpe) { - if (DEBUG) - println("\nTreeOuterSubstituter(2)\n\tsym="+tree.symbol+ - ", tpe="+tree.symbol.tpe+ - ", owner="+tree.symbol.owner) - tree.symbol updateInfo MethodType(params, to.head.tpe) - } - case _ => - subst(from.tail, to.tail) - } - } - def isOuter(sym: Symbol): Boolean = - sym.isOuterAccessor || - sym.name.endsWith(nme.OUTER/*, nme.OUTER.length*/) - if (tree.hasSymbolField && isOuter(tree.symbol)) subst(from, to) - super.traverse(tree) - } - } - - // based on class Trees.TreeTypeSubstituter - private class TreeTypeRefSubstituter(clazz: Symbol) extends Traverser { - override def traverse(tree: Tree) { - val sym = tree.symbol - if (tree.hasSymbolField && isRefClass(sym.tpe) && - (sym.owner.enclClass == clazz) && - (sym.isValueParameter || sym.hasFlag(PARAMACCESSOR))) { - sym setInfo mkRemoteRefClass(sym.tpe) - tree.tpe = sym.tpe - } - if (isRefClass(tree.tpe)) - tree.tpe = mkRemoteRefClass(tree.tpe) - super.traverse(tree) - } - override def apply[T <: Tree](tree: T): T = super.apply(tree) - } - - private class TreeOwnerSubstituter(from: Symbol, to: Symbol) extends Traverser { - def substType(sym: Symbol): Type = { - def subst(tpe: Type): Type = tpe match { - case MethodType(params, restp) => - println("TreeOwnerSubstituter[1]: tpe="+tpe+ - ", tpe.typeSymbol="+tpe.typeSymbol+", sym="+sym)//debug - for (p <- params if p.tpe == from.tpe) { - println("TreeOwnerSubstituter[2]: sym="+sym+ - ", sym.owner="+sym.owner+", p.tpe="+p.tpe)//debug - p updateInfo to.tpe - } - MethodType(params, subst(restp)) - case _ => - if (sym.owner == from && tpe == from.tpe) { - println("TreeOwnerSubstituter[3]: sym="+sym+ - ", owner="+sym.owner+", tpe="+tpe)//debug - to.tpe - } else tpe - } - subst(sym.tpe) - } - val map = new mutable.HashMap[Symbol, Symbol] - override def traverse(tree: Tree) { - if (tree.hasSymbolField && tree.symbol != NoSymbol) { - val sym = tree.symbol - if (sym.owner == from) { - val sym1 = map get sym match { - case Some(s) => s - case None => val s = sym.cloneSymbol(to); map(sym) = s; s - } - tree setSymbol sym1 - } - val sym1 = tree.symbol - val tp = substType(sym1) - if (tp != sym1.tpe) { - if (sym1.owner == to) - println("\n%%%%%1%%%%%%% TreeOwnerSubst: tree="+tree+", sym1="+sym1+", sym1.owner="+sym1.owner)//debug - sym1 setInfo tp - tree setSymbol sym1 - } - } - super.traverse(tree) - } - //override def apply[T <: Tree](tree: T): T = super.apply(tree/*.duplicate*/) - } - - private var inConstructorFlag = 0L - - private def isCaptured(clazz: Symbol, sym: Symbol): Boolean = - if (capturedFuncs contains clazz) { - //log("**1** isCaptured: clazz="+clazz+", sym="+sym+", ") - capturedFuncs(clazz) contains sym - } - else { - //log("**2** isCaptured: clazz="+clazz+", sym="+sym) - sym.isMethod && !sym.isConstructor - } - - private class TreeAccessorSubstituter(clazz: Symbol, objs: List[Symbol], proxySyms: List[Symbol]) - extends Transformer { - def removeAccessors(tree: Tree): Tree = tree match { - case Apply(fun, _) => - removeAccessors(fun) - case Select(qual, _) if tree.hasSymbolField && tree.symbol.isOuterAccessor => - removeAccessors(qual) - case _ => - tree - } - if (DEBUG) - println("\nTreeAccessorSubstituter: "+ - "\n\tobjs="+objs.mkString(",")+ - "\n\tproxies="+proxySyms.mkString(",")) - override def transform(tree: Tree): Tree = tree match { - // transforms field assignment $outer.i$1.elem=.. - // into setter $outer.i$1_=(..) - case Assign(lhs @ Select(qual1 @ Select(qual, name), name1), rhs) - if qual1.hasSymbolField && !qual1.symbol.isPrivateLocal && - isRemoteRefClass(qual1.tpe) => - if (DEBUG) - println("\nTreeAccessorSubstituter: Assign1\n\tqual1="+qual1+", sel.tpe="+lhs.tpe+ - "\n\tqual1.tpe="+qual1.tpe+", name1="+name1+ - "\n\tqual.tpe="+qual.tpe+", tree.tpe="+tree.tpe)//debug - val iface = toInterface(qual.tpe.typeSymbol) - val sym = iface.tpe.decls lookup nme.getterToSetter(name) - atPos(tree.pos)(Apply( - Select(super.transform(qual), sym) setType lhs.tpe, - List(super.transform(rhs)) - ) setType tree.tpe) - - // transforms local assignment this.x$1.elem=.. - // into setter method this.x$1_=(..) - case Assign(lhs @ Select(qual, name), rhs) - if qual.hasSymbolField && qual.symbol.isPrivateLocal && - isRemoteRefClass(qual.tpe) => - if (DEBUG) - println("\nTreeAccessorSubstituter: Assign2"+ - "\n\tqual="+qual+", qual.tpe="+qual.tpe+ - "\n\tname="+name) - // substitute the 'elem' member of the reference class with - // the corresponding setter method of the remote reference class. - val qual1 = super.transform(qual) - val sym = qual1.tpe.decls lookup nme.getterToSetter(name) - val fun = gen.mkAttributedSelect(qual1, sym) - Apply(fun, List(super.transform(rhs))) setType lhs.tpe - - case Assign(Select(qual, name), rhs) - if qual.hasSymbolField && (objs contains qual.symbol) => - val sym = qual.symbol - val proxy = proxySyms(objs indexOf sym) - if (DEBUG) - println("\nTreeAccessorSubstituter: Assign3"+ - "\n\tqual="+qual+", qual.tpe="+qual.tpe+ - "\n\tproxy="+proxy+", proxy.tpe="+proxy.tpe+ - "\n\tname="+name)//debug - // substitute the member accessor of the enclosing class with - // the corresponding setter method of the detached interface. - val iface = toInterface(sym) - val substSymbols = new TreeSymSubstituter( - sym.info.decls.toList filter { isCaptured(sym, _) }, - iface.info.decls.toList) - substSymbols(Apply( - Select(Ident(proxy), nme.getterToSetter(name)), - List(super.transform(rhs)))) - - // transforms setter invocation this.i$1_=(..) - // into setter invocation $outer.i$1_=(..) - case Apply(Select(qual @ This(_), name), args) - if (objs contains qual.symbol) && nme.isSetterName(name) => - val proxy = proxySyms(objs indexOf qual.symbol) - if (DEBUG) - println("\nTreeAccessorSubstituter: Apply"+ - "\n\tqual="+qual+", qual.tpe="+qual.tpe+ - "\n\tproxy="+proxy+", proxy.tpe="+proxy.tpe+ - "\n\tname="+name+", decoded="+name.decode) - val qual1 = gen.mkAttributedSelect(gen.mkAttributedThis(proxy.owner), proxy) - val sym1 = proxy.info.decls lookup name.decode - val fun = gen.mkAttributedSelect(qual1, sym1) - Apply(fun, args map (super.transform(_))) setType tree.tpe - - // transforms access to field this.name$1 - // into invocation of getter method $outer.name$1() - case Select(qual @ This(_), name) - if objs contains qual.symbol => - val proxy = proxySyms(objs indexOf qual.symbol) - if (DEBUG) - println("\nTreeAccessorSubstituter: Select"+ - "\n\tqual="+qual+", qual.tpe="+qual.tpe+ - "\n\tproxy="+proxy+", proxy.tpe="+proxy.tpe+ - "\n\tname="+name+", decoded="+name.decode) - val qual1 = gen.mkAttributedSelect(gen.mkAttributedThis(proxy.owner), proxy) - val sym1 = proxy.info.decls lookup nme.originalName(name) //name - gen.mkAttributedSelect(qual1, sym1) - - // transforms field $outer.name$1 into getter method $outer.name$1() - case Select(qual @ Select(_, name1), name) - if qual.hasSymbolField && name1.endsWith(nme.OUTER/*, nme.OUTER.length*/) && - !tree.symbol.isMethod => - if (DEBUG) - println("\nTreeAccessorSubstituter: Select0\n\tqual="+qual+ - ", qual.tpe="+qual.tpe+", name="+name)//debug - val sym = qual.symbol - val qual1 = gen.mkAttributedSelect(gen.mkAttributedThis(sym.owner), sym) - val iface = toInterface(qual.tpe.typeSymbol) - val sym1 = iface.tpe.decls lookup name - val fun = gen.mkAttributedSelect(qual1, sym1) - Apply(fun, List()) setType tree.tpe - - case Select(apply @ Apply(fun @ Select(qual, _), _), name) - if fun.symbol.isOuterAccessor => - val tsym = fun.symbol.tpe.resultType.typeSymbol - val funcs = capturedFuncs(clazz).toList filter (sym => - (tsym.ownerChain contains sym.owner) || (tsym isSubClass sym.owner)) - if (DEBUG) - println("\nTreeAccessorSubstituter: Select1\n\tfun="+fun+ - ",\n\tfun.tpe="+fun.tpe+", name="+name+ - ",\n\tfuncs="+funcs)//debug - funcs find (tree.symbol.==) match { - case Some(sym) => - val qual1 = - if (currentOwner.enclClass isNestedIn clazz) apply - else removeAccessors(qual) - val name1 = - (if (tsym isSubClass qual1.tpe.typeSymbol) "" - else tsym.fullName('$')+"$")+sym.name - val iface = toInterface(qual1.tpe.typeSymbol) - val sym1 = iface.tpe.decls lookup name1 - gen.mkAttributedSelect(qual1, sym1) - case None => - super.transform(tree) - } - - // transforms field access $outer.i$1.elem - // into invocation of getter method $outer.i$1() - case Select(qual @ Select(qual1, name1), name) - if qual.hasSymbolField && !qual.symbol.isPrivateLocal && - isRemoteRefClass(qual.tpe) => - if (DEBUG) - println("\nTreeAccessorSubstituter: Select2\n\tqual="+qual+ - "\n\tqual.tpe="+qual.tpe+", tree.tpe="+tree.tpe)//debug - val iface = toInterface(qual.symbol.owner) - val sym1 = iface.tpe.decls lookup name1 - val fun = gen.mkAttributedSelect(qual1, sym1) - Apply(fun, List()) setType tree.tpe - - // transforms local access this.i$1.elem - // into invocation of getter method this.i$1() - case Select(qual, name) - if qual.hasSymbolField && qual.symbol.isPrivateLocal && - isRemoteRefClass(qual.tpe) => - if (DEBUG) - println("\nTreeAccessorSubstituter: Select3\n\tqual="+qual+ - "\n\tqual.tpe="+qual.tpe)//debug - val sym = qual.tpe.decls lookup name - val fun = gen.mkAttributedSelect(qual, sym) - Apply(fun, List()) setType tree.tpe - - case Select(qual, name) - if qual.hasSymbolField && (objs contains qual.symbol) => - if (DEBUG) - println("\nTreeAccessorSubstituter: Select4\n\tqual="+qual+ - ", qual.tpe="+qual.tpe+", name="+name)//debug - val sym = qual.symbol - val proxy = proxySyms(objs indexOf sym) - // substitute the accessor of a member of the enclosing class - // with the corresponding accessor of the detached interface - val qual1 = gen.mkAttributedSelect(gen.mkAttributedThis(proxy.owner), proxy) - val iface = toInterface(sym) - val sym1 = iface.tpe.decls lookup name.decode - gen.mkAttributedSelect(qual1, sym1) - - case _ => - super.transform(tree) - } - def apply[T <: Tree](tree: T): T = transform(tree).asInstanceOf[T] - } // TreeAccessorSubstituter -/* - private class TreeNameSubstituter(from: Name, to: Symbol) extends Transformer { - override def transform(tree: Tree): Tree = tree match { - case Super(qual, mix) if tree.symbol.name == from => - Super(qual, mix) setSymbol to - case This(name) if name == from => - This(to.name) setSymbol to - case _ => - super.transform(tree) - } - def apply[T <: Tree](tree: T): T = transform(tree).asInstanceOf[T] - } -*/ - /**

    - * Given the closure definition (generated by previous phases) - *

    -     *    class $anonfun$1 extends Object with Function1 {
    -     *      def this($outer: C, x$1: Int): $anonfun$1 = ..
    -     *      def apply(x: Int): Int = x + this.$outer.x() + this.x$1
    -     *    }
    - *

    - * the method mkClosureDef transforms the above code - * to the following: - *

    -     *    @serializable
    -     *    class $anonfun$1$detach extends Object with Function1 {
    -     *      def this($outer: C$proxy, x$1: Int): $anonfun$1$detach = ..
    -     *      def apply(x: Int): Int = x + this.$outer.x() + this.x$1
    -     *    }
    - *

    - * In particular, it performs the following operations: - * 1) add constructor parameter proxy_n to access - * proxy of the enclosing class - * 2) change reference types in constructor arguments to type - * ' - * 3) change occurences of this identifier to - * proxy_n in template code - * 4) change reference types of local value definitions associated - * to updated constructor arguments to type Remote_type_Ref - *

    - * - * @param clazz the symbol of the original closure definition - * @return the typed class definition for the detached closure. - */ - private def mkClosureDef(clazz: Symbol): Tree = { - val cdef = detachedClosure(clazz) - val name = cdef.symbol.name - if (name endsWith DETACH_SUFFIX) - return cdef // closure already detached - - clazz.name = encode(clazz.name.decode + DETACH_SUFFIX) - clazz addAnnotation serialVersionUIDAnnotationInfo(clazz) - clazz addAnnotation serializableAnnotationInfo - - val thiz = capturedThisClass(clazz) - val (List(outer), captured) = - capturedObjects(clazz).toList partition (thiz.==) - - /**

    - * Method updateConstructorParams updates the class - * symbol of the detached closure as follows: - * 1) it appends the "$detach" suffix to the class name, - * 2) it adds the "@serializable" annotation to class attributes, - * 3) it adds a parameter symbol for each element of "captured". - *

    - *

    - * and also updates the signature of the constructor symbol: - * 1) it adds a parameter type for each element of "captured", - * 2) it changes reference types to remote reference types. - *

    - */ - def updateConstructorParams(vparams: List[ValDef]): List[Symbol] = { - val hasOuter = !vparams.isEmpty && (vparams.head.symbol.tpe == thiz.tpe) - val ctor = clazz.primaryConstructor - val params = (for (sym <- captured) yield { - val iface = toInterface(sym) - val param = ctor.newValueParameter(ctor.pos, freshProxyName) - .setFlag(SYNTHETIC) - .setInfo(iface.tpe) - param.owner = ctor - param - }) ::: ( - if (hasOuter) Nil - else { - val iface = toInterface(thiz) - val param = ctor.newValueParameter(ctor.pos, nme.OUTER) - .setFlag(SYNTHETIC) - .setInfo(iface.tpe) - param.owner = ctor - List(param) - } - ) - val tp = ctor.tpe match { - case mt @ MethodType(params1, restp) => - val params2 = if (hasOuter) { - val iface = toInterface(params1.head.tpe.typeSymbol) - ctor.newSyntheticValueParam(iface.tpe) :: params1.tail - } - else params1 - for (p <- params2 if isRefClass(p.tpe)) { - p updateInfo mkRemoteRefClass(p.tpe) - } - MethodType(params ::: params2, restp) - case tp => - tp - } - ctor updateInfo tp - params - } //updateConstructorParams - - /** - */ - def updateConstructorDef(ctor: DefDef): (List[Tree], List[Symbol]) = { - val DefDef(mods, name, tparams, List(vparams), tpt, rhs) = ctor - val newparams = updateConstructorParams(vparams) - val vparams0 = newparams map (sym => ValDef(sym) setType sym.tpe) - val ctorDef = treeCopy.DefDef(ctor, mods, name, tparams, List(vparams0 ::: vparams), tpt, rhs) - val accessors = for (sym <- newparams) yield { - val acc = clazz.newValue(sym.pos, sym.name) - .setFlag(SYNTHETIC | PARAMACCESSOR | PRIVATE | LOCAL) - .setInfo(sym.tpe) - clazz.info.decls enter acc - acc - } - val accDefs = accessors map (sym => ValDef(sym) setType sym.tpe) - (ctorDef :: accDefs, accessors) - } //updateConstructorDef - - val impl = cdef.impl - val (List(ctor: DefDef), body1) = impl.body partition (t => - t.isDef && t.symbol.isPrimaryConstructor) - val (defs, accessors) = updateConstructorDef(ctor) - val impl1 = treeCopy.Template(impl, impl.parents, impl.self, defs ::: body1) - val (from, to) = /*List.unzip*/( - for (obj <- captured ::: List(outer)) - yield (obj, toInterface(obj)) - ) unzip - //val substNames = new TreeNameSubstituter(name, clazz) - val substTypeRefs = new TreeTypeRefSubstituter(clazz) - val substAccs = new TreeAccessorSubstituter(clazz, from, accessors) - val substTypes = new TreeOuterSubstituter(from, to) - val substSyms = new TreeSymSubstituter(from, to) - val t1 = ClassDef(clazz, substSyms(substTypes(substAccs(substTypeRefs(impl1))))) - //println("mkClosureDef: t(untyped)=\n"+nodeToString(t1)) - val t = localTyper typed t1 - detachedClosure(clazz) = t.asInstanceOf[ClassDef] - //println("mkClosureDef: t(typed)=\n"+nodeToString(t)) - t - } //mkClosureDef - - /**

    - * Given a class C with member x - * which is (remotely) referenced from inside a detached closure: - *

    -     *    class C extends .. {
    -     *      var x: Int
    -     *    }
    - *

    - * the method addProxy generates the following two - * proxy definitions (used later in method mkClosureApply - * to generate object proxies): - *

    -     *    trait C$proxy extends java.rmi.Remote {
    -     *      def x(): Int
    -     *      def x_=(x$1: Int): Unit
    -     *    }
    -     *    class C$proxyImpl
    -     *    extends java.rmi.server.UnicastRemoteObject
    -     *    with C$proxy with java.rmi.server.Unreferenced {
    -     *      def this(x$0: String, x$1: C): C$ProxyImpl = ..
    -     *      def x(): Int = this.x$1.x()
    -     *      def x_=(x$1: Int): Unit = this.x$1.x_=(x$1)
    -     *      def unreferenced(): Unit = RemoteRef.unbind(this.x$0)
    -     *    }
    - */ - private def addProxy(closure: Symbol, clazz: Symbol) { - // the Sun RMI compiler crashes with the error message - // "error: An error has occurred in the compiler; ..." with trace - // "sun.tools.java.CompilerError: getInnerClassField" if the - // generated proxy class does not belong to the top-level scope. - val proxyOwner = clazz.toplevelClass.owner //clazz.owner - - if (DEBUG) - println("\nadd proxy for "+clazz+" in "+proxyOwner)//debug - - val (proxyIntf, proxyImpl, proxyMap) = proxies get clazz match { - case Some(proxy) => - proxy - case None => - val iface = - proxyOwner.newClass(clazz.pos, encode(clazz.name.decode + PROXY_SUFFIX)) - iface.sourceFile = clazz.sourceFile - iface setFlag (ABSTRACT | TRAIT | INTERFACE) // Java interface - val iparents = List(ObjectClass.tpe, RemoteClass.tpe) - iface setInfo ClassInfoType(iparents, newScope, iface) - // methods must throw RemoteException - iface addAnnotation remoteAnnotationInfo - - val iclaz = - proxyOwner.newClass(clazz.pos, encode(iface.name.decode + IMPL_SUFFIX)) - iclaz.sourceFile = clazz.sourceFile - iclaz setFlag (SYNTHETIC | FINAL) - // Variant 1: rebind/unbind - val cparents = List(UnicastRemoteObjectClass.tpe, iface.tpe, UnreferencedClass.tpe) - // Variant 2: un-/exportObject - //val cparents = List(ObjectClass.tpe, iface.tpe, UnreferencedClass.tpe) - iclaz setInfo ClassInfoType(cparents, newScope, iclaz) - val proxy = (iface, iclaz, new mutable.HashMap[Symbol, Symbol]) - proxies(clazz) = proxy - proxy - } - - def addAccessors() { - def mkGetter(sym: Symbol, name: String): Symbol = { - val getter = if (sym.isMethod) { - val meth = sym.cloneSymbol(proxyIntf) - meth.name = name - val tsym = meth.tpe.resultType.typeSymbol - if (proxies contains tsym) - meth updateInfo MethodType(List(), toInterface(tsym).tpe) - meth - } - else { - val meth = proxyIntf.newMethod(sym.pos, nme.getterName(sym.originalName)) - meth setFlag ACCESSOR - meth setInfo MethodType(List(), toValueClass(sym.tpe)) - meth - } - getter setFlag ABSTRACT - getter resetFlag FINAL - getter - } - def mkSetter(sym: Symbol): Symbol = { - val setter = proxyIntf.newMethod(sym.pos, nme.getterToSetter(sym.originalName)) - setter setFlag (sym.flags & ~(PRIVATE | LOCAL) | ACCESSOR | lateDEFERRED) - val param = setter.newSyntheticValueParam(toValueClass(sym.tpe)) - setter setInfo MethodType(List(param), UnitClass.tpe) - setter setFlag ABSTRACT - setter resetFlag FINAL - setter - } - def create(owner: Symbol, clazz: Symbol) { - val funcs = capturedFuncs(owner).toList - funcs find (_.isConstructor) match { - case Some(sym) if capturedFuncs contains sym.owner => - create(sym.owner, clazz) - case _ => - } - val newfuncs = funcs filterNot (proxyMap.valuesIterator.toList contains) - val (members, others) = newfuncs partition (clazz isSubClass _.owner) - val outers = others filter (sym => - (clazz isNestedIn sym.owner) && clazz.isClass) - for (sym <- outers) { - val sym1 = mkGetter(sym, sym.fullName('$')) - proxyIntf.info.decls enter sym1 - proxyMap(sym1) = sym - }/* - for (sym <- outers if capturedCallers contains sym; - caller <- capturedCallers(sym)) { - val sym1 = mkGetter(sym, caller.nameString+'$'+sym.nameString) - if (clazz.isAnonymousClass) - println("[2] clazz="+clazz+", sym1="+sym1) - proxyIntf.info.decls enter sym1 - proxyMap(sym1) = sym - }*/ - for (sym <- members if !sym.isConstructor) { - val sym1 = mkGetter(sym, sym.originalName.decode) - proxyIntf.info.decls enter sym1 - proxyMap(sym1) = sym - } - for (sym <- members if isRefClass(sym.tpe)) { - val sym1 = mkSetter(sym) - proxyIntf.info.decls enter sym1 - proxyMap(sym1) = sym - } - } - create(closure, clazz) - } - - addAccessors - if (DEBUG) { - val xs = proxyMap.keysIterator.toList - println("\tadded "+proxyIntf+ - "\n\twith "+xs.mkString(", ")+" ["+xs.length+"]") - } - } //addProxy - - def genProxy(clazz: Symbol) { - val (proxyIntf, proxyImpl, proxyMap) = proxies(clazz) - - // generate proxy interface - val ifaceBody = proxyMap.keysIterator.toList map { DefDef(_, EmptyTree) } - val ifaceParents = - proxyIntf.info.parents map (t => TypeTree(t) setPos proxyIntf.pos) - val ifaceTmpl = Template(ifaceParents, emptyValDef, ifaceBody) - val ifaceDef = localTyper typed ClassDef(proxyIntf, ifaceTmpl) - - // generated proxy implementation - // Variant 1: rebind/unbind - val param1 = - proxyImpl.newValueParameter(proxyImpl.pos, freshName("x$")) - .setFlag(SYNTHETIC | PARAMACCESSOR | PRIVATE | LOCAL) - .setInfo(StringClass.tpe) - proxyImpl.info.decls enter param1 - - val param2 = - proxyImpl.newValueParameter(proxyImpl.pos, freshName("x$")) - .setFlag(SYNTHETIC | PARAMACCESSOR | PRIVATE | LOCAL) - .setInfo(clazz.tpe) - proxyImpl.info.decls enter param2 - - val unreferenced = - proxyImpl.newMethod(proxyImpl.pos, nme_unreferenced) - .setInfo(MethodType(List(), UnitClass.tpe)) - proxyImpl.info.decls enter unreferenced - - val proxyBody = - DefDef(unreferenced, List(List()), Block( - List(Apply( //stats - Select(gen.mkAttributedRef(DebugModule), "info"), - List(Apply( - Select(Literal(Constant("unreferenced: ")), "$plus"), - // Variant 1: rebind/unbind - List(Select(This(proxyImpl), param1.name)) - // Variant 2: un-/exportObject - //List(This(proxyImpl)) - )) - )), - Apply( //expr - Select(gen.mkAttributedRef(RemoteRefModule), nme_unbind), - // Variant 1: rebind/unbind - List(Select(This(proxyImpl), param1.name)) - // Variant 2: un-/exportObject - //List(This(proxyImpl)) - ) - )) :: ( - for (sym <- proxyIntf.info.decls.toList) yield { - val sym1 = sym.cloneSymbol(proxyImpl) - sym1 resetFlag (ABSTRACT | DEFERRED | lateDEFERRED) - proxyImpl.info.decls enter sym1 - DefDef(sym1, { - val sym2 = proxyMap(sym) - var t = Select(This(proxyImpl), param2) - var outerAcc = - if (sym2.owner isSubClass param2) None - else param2.info.decls.toList find (_.isOuterAccessor) - while (!outerAcc.isEmpty) { - t = Select(t, outerAcc.get) - val outerClass = outerAcc.get.tpe.resultType.typeSymbol - outerAcc = - if (sym2.owner == outerClass) None - else outerClass.info.decls.toList find (_.isOuterAccessor) - } - val sel = Select(t, sym2) - if (sym2.isMethod) { - Apply(sel, sym1.paramss(0) map { Ident(_) }) - } - else if (isRefClass(sym2.tpe)) { - val sel1 = Select(sel, nme.elem) - if (sym1.tpe.paramTypes.length == 0) sel1 - else Assign(sel1, Ident(sym1.paramss(0)(0))) - } - else - sel - }) - }) - val proxyParents = - proxyImpl.info.parents map (t => TypeTree(t) setPos proxyImpl.pos) - val proxyTmpl = Template(proxyParents, - emptyValDef, NoMods, - // Variant 1: rebind/unbind - /*vparamss*/ List(List(ValDef(param1), ValDef(param2))), - // Variant 2: un-/exportObject - ///*vparamss*/ List(List(ValDef(param2))), - /*argss*/ List(List()), proxyBody, NoPosition) - val proxyDef = localTyper typed ClassDef(proxyImpl, proxyTmpl) - - // remember definitions to be added by transformStats - val proxyOwner = proxyIntf.owner - if (! (proxyInterfaceDefs contains proxyOwner)) - proxyInterfaceDefs(proxyOwner) = new ListBuffer - proxyInterfaceDefs(proxyOwner) += ifaceDef - proxyInterfaceDefs(proxyOwner) += proxyDef - } //genProxy - - private def freshName(s: String): Name = - unit.fresh.newName(s) - - private def freshProxyName: Name = - unit.fresh.newName(PROXY_PREFIX) - - /**

    - * Given a detached closure applied in some environment consisting - * of an enclosing class C and some local variables - * x$1 (immutable) and y$1 (mutable): - *

    -     *    scala.remoting.detach.apply({
    -     *      (new $anonfun$1(C.this, x$1, y$1): Function1)
    -     *    })
    - *

    - * the above code is transformed to the following block: - *

    -     *    {
    -     *      val proxy$1: C$Proxy =
    -     *        RemoteRef.bind("C/proxy$1", new C$ProxyImpl(C.this))
    -     *      val proxy$2: RemoteIntRef =
    -     *        RemoteRef.bind("C/proxy$2", new RemoteIntRefImpl(y$1))
    -     *      (new $anonfun$1detach(proxy$1, x$1, proxy$2): Function1)
    -     *    }
    -     *  
    - */ - private def mkClosureApply(tree: Tree): Tree = { - val apply @ Apply(fun, args) = detachedClosureApply(tree) - assert(fun.symbol.isConstructor, fun.symbol+" is not a constructor")//debug - val clazz = apply.tpe.typeSymbol - val thiz = capturedThisClass(clazz) - val cdef = mkClosureDef(clazz) - val uid = localTyper typed { - val sym = currentOwner.newValue(tree.pos, freshName("uid$")) - .setFlag(SYNTHETIC) - .setInfo(StringClass.tpe) - val rhs = Apply(Select( - Apply( - Select(New(TypeTree(UIDClass.tpe)), nme.CONSTRUCTOR), - List() - ), - "toString" - ), List()) - ValDef(sym, rhs) - } - def cast(tree: Tree, tpe: Type): Tree = - Apply( - TypeApply( - Select(tree, Object_asInstanceOf), - List(TypeTree(tpe)) - ), - List() - ) - - def mkProxy(csym: Symbol): ValDef = { - val (iface, proxy, _) = proxies(csym) - val sym = currentOwner.newValue(csym.pos, freshProxyName) - .setFlag(SYNTHETIC) - .setInfo(iface.tpe) - val bind = Select(gen.mkAttributedRef(RemoteRefModule), nme_bind) - val name = Apply( - Select(Literal(Constant(sym.fullName('/')+"$")), String_+), - List(Ident(uid.symbol)) - ) - val thiz = - if (csym.isModule) gen.mkAttributedIdent(csym) - else gen.mkAttributedThis(csym) - val args = List(name, - Apply(Select(New(TypeTree(proxy.tpe)), nme.CONSTRUCTOR), - // Variant 1: rebind/unbind - List(name, thiz))) - // Variant 2: un-/exportObject - //List(thiz))) - val rhs = cast(Apply(bind, args), iface.tpe) - ValDef(sym, rhs) - } - - def mkObjProxies: List[ValDef] = { - val (outer, captured) = - capturedObjects(clazz).toList partition (thiz.==) - (captured ::: outer) map mkProxy - } - - def mkArgProxies: Map[Symbol, ValDef] = { - def retRefs(t: Tree): List[Tree] = t match { - case Apply(fun, args) => - args flatMap retRefs - case id @ Ident(_) => - if (isRefClass(id.tpe)) List(id) else Nil - case Template(_, _, body) => - body flatMap retRefs - case New(tpt) => - retRefs(tpt) - case thiz @ This(_) => - if (isRefClass(thiz.tpe)) List(thiz) else Nil - case _ => - throw new Error("Internal error: " + t.getClass) - } - new immutable.HashMap[Symbol, ValDef] ++ ( - for (variable <- retRefs(apply)) yield { - val param = variable.symbol - assert(isRefClass(param.tpe), param) - val proxy = currentOwner.newValue(param.pos, freshProxyName) - .setFlag(SYNTHETIC) - .setInfo(mkRemoteRefClass(param.tpe)) - val bind = Select(gen.mkAttributedRef(RemoteRefModule), nme_bind) - //val name = Literal(Constant(proxy.fullName('/'))) - val name = Apply( - Select(Literal(Constant(proxy.fullName('/')+"$")), String_+), - List(Ident(uid.symbol)) - ) - val ts = param.tpe.typeSymbol - val args = List(name, - Apply( - Select(New(TypeTree(remoteRefImpl(ts).tpe)), nme.CONSTRUCTOR), - // Variant 1: rebind/unbind - List(name, variable))) - // Variant 2: un-/exportObject - //List(variable))) - val rhs = cast(Apply(bind, args), remoteRefClass(ts).tpe) - (param, ValDef(proxy, rhs)) - } - ) - } //mkArgProxies - - /**

    - * Method mkClosureInstance updates the list of actual - * parameters passed to the closure instance. - *

    - */ - def mkClosureInstance(objProxies: List[ValDef], - argProxies: Map[Symbol, ValDef]): Tree = { - fun.tpe = fun.symbol.tpe - val args0 = objProxies map (tree => Ident(tree.symbol)) - val hasOuter = !args.isEmpty && (args.head.symbol.tpe == thiz.tpe) - val args1 = (if (hasOuter) args.tail else args) map (arg => - argProxies get arg.symbol match { - case Some(t) => Ident(t.symbol) - case None => arg - } - ) - if (DEBUG) - println("\nmkClosureInstance:\n\targs0="+args0+"\n\targs1="+args1) - val t = Typed( - Apply(fun, args0 ::: args1), - //TypeTree(clazz.info.parents.tail.head) //interface (2.7.x) - TypeTree(clazz.info.parents.head) //interface (2.8.x) - ) - localTyper typed t - } //mkClosureInstance - - val objProxies = mkObjProxies - val argProxies = mkArgProxies - val stats = uid :: objProxies ::: argProxies.valuesIterator.toList - val expr = mkClosureInstance(objProxies, argProxies) - localTyper typed Block(stats, expr) - } //mkClosureApply - - override def transform(tree: Tree): Tree = { - def withInConstructorFlag(inConstructorFlag: Long)(f: => Tree): Tree = { - val savedInConstructorFlag = this.inConstructorFlag - this.inConstructorFlag = inConstructorFlag - val t = f - this.inConstructorFlag = savedInConstructorFlag - t - } - if (!isEnabled) return tree - tree match { - case ClassDef(mods, name, tparams, impl) => - val tree1 = super.transform(tree) - if (!reporter.hasErrors && (capturedThisClass contains tree1.symbol)) - mkClosureDef(tree1.symbol) - else - tree1 - - case Apply(Select(_, _), _) => - val tree1 = super.transform(tree) - if (!reporter.hasErrors && (detachedClosureApply contains tree1)) - atPos(tree1.pos)(mkClosureApply(tree1)) - else - tree1 - - case Template(_, _, _) => - withInConstructorFlag(0) { super.transform(tree) } - - case _ => - super.transform(tree) - } - } - - /** Transform statements and add detached definitions to them. */ - override def transformStats(stats: List[Tree], exprOwner: Symbol): List[Tree] = { - val stats1 = super.transformStats(stats, exprOwner) - val newDefs = { - val buf = new ListBuffer[Tree] - if (proxyInterfaceDefs contains currentOwner) - buf ++= proxyInterfaceDefs(currentOwner).toList - buf.toList - } - if (newDefs.isEmpty) stats1 else stats1 ::: newDefs - } - - private def genProxies() { - def printDebugInfo() { - println("\ncompilation unit : "+unit) - for ((sym, _) <- detachedClosure) { - println("closure to detach: "+sym+" (owner: "+sym.owner+")") - println("captured this : "+capturedThisClass(sym)) - val objs = capturedObjects get sym match { - case Some(ss) => ss.toList - case None => Nil - } - println("captured objects : "+objs.mkString(", ")+" ["+objs.length+"]") - } - println("\ncalled functions :") - for (sym <- capturedFuncs.keysIterator) { - val xs = capturedFuncs(sym).toList map (s => { - val callers = capturedCallers get s match { - case Some(ss) => "|"+ss.toList.mkString(",") - case None => "" - } - s+"("+s.owner.name+callers+")" - }) - println("\t"+sym+" -> "+xs.mkString(", ")+" ["+xs.length+"]") - } - } - def printDebugInfo2() { - println("\nproxy classes :") - for (sym <- proxies.keysIterator) - println("\t"+sym+"("+sym.tpe+") -> "+proxies(sym)) - } - if (DEBUG) - printDebugInfo - for ((closure, _) <- detachedClosure; - captured <- capturedObjects(closure)) - addProxy(closure, captured) - if (DEBUG) - printDebugInfo2 - for (sym <- proxies.keysIterator) - genProxy(sym) - } //genProxies - - /**

    - * Method transformUnit performs three successive operations: - *

    - *
      - *
    1. it first gathers infos about free objects and detached - * closures;
    2. - *
    3. it then adds proxies for free objects;
    4. - *
    5. finally, if transforms detached closures (both definition and - * instantiation).
    6. - *
    - */ - override def transformUnit(unit: CompilationUnit) { - freeObjTraverser.traverse(unit.body) - if (!reporter.hasErrors) genProxies - super.transformUnit(unit) - } - } - -} - diff --git a/src/detach/plugin/scala/tools/detach/DetachPlugin.scala b/src/detach/plugin/scala/tools/detach/DetachPlugin.scala deleted file mode 100644 index c6e18b7abe..0000000000 --- a/src/detach/plugin/scala/tools/detach/DetachPlugin.scala +++ /dev/null @@ -1,41 +0,0 @@ -/* NSC -- new Scala compiler - * Copyright 2005-2013 LAMP/EPFL - * @author Stephane Micheloud - */ - -package scala.tools.detach - -import scala.tools.nsc.{Global, Phase} -import scala.tools.nsc.plugins.{Plugin, PluginComponent} - -class DetachPlugin(val global: Global) extends Plugin { - import global._ - - val name = "detach" - val description = "Perform detaching of remote closures" - - object detach extends { - val global = DetachPlugin.this.global - val runsAfter = List("lambdalift") - override val runsBefore = List("constructors") - } with Detach - - val components = List[PluginComponent](detach) - - def setEnabled(flag: Boolean) { detach.isEnabled = flag } - - override def processOptions(options: List[String], error: String => Unit) = { - var enabled = false - for (option <- options) { - if (option == "enable") { - enabled = true - } else { - error("Option not understood: "+option) - } - } - setEnabled(enabled) - } - - override val optionsHelp: Option[String] = - Some(" -P:detach:enable Enable detaching of remote closures") -} diff --git a/src/detach/plugin/scalac-plugin.xml b/src/detach/plugin/scalac-plugin.xml deleted file mode 100644 index 6c8600e331..0000000000 --- a/src/detach/plugin/scalac-plugin.xml +++ /dev/null @@ -1,4 +0,0 @@ - - detach - scala.tools.detach.DetachPlugin - diff --git a/test/files/detach-neg/det_bar.check b/test/files/detach-neg/det_bar.check deleted file mode 100644 index 70b47581a5..0000000000 --- a/test/files/detach-neg/det_bar.check +++ /dev/null @@ -1,4 +0,0 @@ -det_bar.scala:7: error: detach inapplicable for method bar - detach(bar) - ^ -one error found diff --git a/test/files/detach-neg/det_bar.scala b/test/files/detach-neg/det_bar.scala deleted file mode 100644 index 862afb1d6e..0000000000 --- a/test/files/detach-neg/det_bar.scala +++ /dev/null @@ -1,13 +0,0 @@ -import scala.remoting._ -class A(y: Int) { - var z = 2 - var bar = (x: Int) => x + y + z - def foo(x: Int): Int = x + y + z - bar = (x: Int) => x * y - detach(bar) -} - -object test extends App { - val a = new A(1) - println(a.bar(2)) -} diff --git a/test/files/detach-run/actor-run.check b/test/files/detach-run/actor-run.check deleted file mode 100644 index 9448ddd5fe..0000000000 --- a/test/files/detach-run/actor-run.check +++ /dev/null @@ -1,5 +0,0 @@ -Server.main 8889 -Client.main 127.0.0.1 8889 -yInstVal = 10 -zLocVal = 1000 -result received: 11111 diff --git a/test/files/detach-run/actor/Client.scala b/test/files/detach-run/actor/Client.scala deleted file mode 100644 index 12573e24d3..0000000000 --- a/test/files/detach-run/actor/Client.scala +++ /dev/null @@ -1,54 +0,0 @@ -/* - * @author Stephane Micheloud - */ - -import scala.actors.Actor._, ClientHelper._ -import scala.actors.remote._, RemoteActor._ -import scala.remoting._, Debug._ - -object Foo { - def trace(msg: String) { info("[Foo.trace] "+msg)} -} -object Client { - val yInstVal: Int = 10 - var yInstVar: Int = 99 - object Bar { - def trace(msg: String) { info("[Bar.trace] "+msg) } - } - def main(args: Array[String]) { - init(args) - actor { - val server = select(Node(host, port), 'Server) - val zLocVal: Int = 1000 - var zLocVar: Int = 9998 - server ! detach( - (x: Int) => { - println("yInstVal = "+yInstVal) - this.trace("yInstVar = "+yInstVar) - Bar.trace("zLocVal = "+zLocVal) - Foo.trace("zLocVar = "+zLocVar) - zLocVar += 2 - System.out.println("zLocVal = "+zLocVal) - Debug.info("zLocVar = "+zLocVar) - x + yInstVal + yInstVar + zLocVal + zLocVar - }) - react { - case result: Int => - println("result received: " + result) - Predef.exit(0) - } - } - } - private def trace(msg: String) { info("[Client.trace] "+msg) } -} - -object ClientHelper { - private var _host = "127.0.0.1" - private var _port = 8888 - def host = _host - def port = _port - def init(args: Array[String]) { - try { _host = args(0) } catch { case _ => } - try { _port = args(1).toInt } catch { case _ => } - } -} diff --git a/test/files/detach-run/actor/Server.scala b/test/files/detach-run/actor/Server.scala deleted file mode 100644 index b56d22f744..0000000000 --- a/test/files/detach-run/actor/Server.scala +++ /dev/null @@ -1,27 +0,0 @@ -/* - * @author Stephane Micheloud - */ - -import scala.actors.Actor._ -import scala.actors.remote.RemoteActor._ - -object Server extends ServerConsole { - private def computation(f: Int => Int): Int = { - //some time-consuming task - f(2) - } - def main(args: Array[String]) { - actor { - classLoader = serverClassLoader - alive(args(0).toInt) - register('Server, self) - loopWhile(isRunning) { - react { - case f: (Int => Int) => - val result = computation(f) - sender ! result - } - } - } - } -} diff --git a/test/files/detach-run/actor/ServerConsole.scala b/test/files/detach-run/actor/ServerConsole.scala deleted file mode 100644 index 8ebd9d4c2e..0000000000 --- a/test/files/detach-run/actor/ServerConsole.scala +++ /dev/null @@ -1,75 +0,0 @@ -/* - * @author Stephane Micheloud - */ - -import java.io.{BufferedReader, InputStreamReader} - -import scala.compat.Platform.currentTime -import scala.remoting.Debug, Debug._ - -trait ServerConsole extends Thread { - private val startTime = currentTime - actors.Debug.level = // e.g. 3 // info+warning+error - try { System.getProperty("scala.actors.logLevel", "0").toInt } - catch { case e => 0 } - - start() - - val serverClassLoader = { - import java.rmi.server.RMIClassLoader - val codebase = System.getProperty("java.rmi.server.codebase") - info("[ServerConsole] codebase="+codebase) - RMIClassLoader getClassLoader codebase - } - - private var isTerminated = false - - def terminate() { isTerminated = false } - - def isRunning = !isTerminated - - override def run() { - val in = new BufferedReader(new InputStreamReader(System.in)) - var quit = false - while (!quit) { - val args = getArgs(in) - if (args contains "quit") - quit = true - if (args contains "cls") { - println(ERASE_SCREEN) - println(CURSOR_HOME) - } - if (args contains "warning") - Debug.level = Level.WARNING - if (args contains "info") - Debug.level = Level.INFO - if (args contains "silent") - Debug.level = Level.SILENT - } - terminate() - println("Server exited ("+mkTimeString(currentTime - startTime)+")") - sys.exit(0) - } - - protected def trace(msg: String) { - Debug.info("[ServerConsole.trace] "+msg) - } - - private def getArgs(in: BufferedReader): List[String] = { - val input = try { in.readLine() } catch { case _ => null } - if (input != null) (input.trim split "\\s+").toList else Nil - } - - private def mkTimeString(time: Long): String = { - def twoDigits(i: Long) = (if (i < 10) "0" else "")+i - val sec = time / 1000 - val min = sec / 60 - val h = min / 60 - twoDigits(h) +":"+ - twoDigits(min - h * 60)+":"+ - twoDigits(sec - min * 60) - } - - private val ERASE_SCREEN = "\033[2J" - private val CURSOR_HOME = "\033[H" -} diff --git a/test/files/detach-run/actor/actor.flags b/test/files/detach-run/actor/actor.flags deleted file mode 100644 index 55eed8bbcd..0000000000 --- a/test/files/detach-run/actor/actor.flags +++ /dev/null @@ -1 +0,0 @@ --Xpluginsdir ../../../../build/pack/misc/scala-devel/plugins -Xplugin-require:detach -P:detach:enable diff --git a/test/files/detach-run/actor/actor.scala b/test/files/detach-run/actor/actor.scala deleted file mode 100644 index 23a10d6982..0000000000 --- a/test/files/detach-run/actor/actor.scala +++ /dev/null @@ -1,157 +0,0 @@ -/* - * @author Stephane Micheloud - */ - -object Test { - - val name = "actor" - val host = "127.0.0.1" - val port = 8889 - - def main(args: Array[String]) { - setenv() - println("Server.main "+port) - Server.main(Array(port.toString)) - println("Client.main "+host+" "+port) - Client.main(Array(host, port.toString)) - Server.terminate() - } - - private def setenv() { - import Env._ - - // Java properties for server & client - System.setProperty("scala.actors.logLevel", actors_logLevel) - System.setProperty("scala.remoting.logLevel", logLevel) - System.setProperty("java.security.manager", "") - System.setProperty("java.security.policy", policyFile) - // Java properties for server only - System.setProperty("java.rmi.server.codebase", deployUrl) - System.setProperty("java.rmi.server.hostname", host) - System.setProperty("java.rmi.server.useCodebaseOnly", "true") - - // application-specific classes to be deployed and accessed via URL - // (i.e. detached closure, proxy interfaces and proxy stubs) - val classNames = List( - "$anonfun$main$1$proxy", - "$anonfun$main$1$proxyImpl_Stub", - "Bar$proxy", - "Bar$proxyImpl_Stub", - "Client$$anonfun$main$1$$anonfun$apply$1$detach", - "Client$proxy", - "Client$proxyImpl_Stub", - "Foo$proxy", - "Foo$proxyImpl_Stub") - - val proxyImplNames = - for (n <- classNames; i = n lastIndexOf "_Stub"; if i > 0) - yield n.substring(0, i) - - generatePolicyFile() - generateRmiStubs(proxyImplNames) - generateJarFile(classNames) - } -} - -object Env { - import java.io._, java.util.jar._ - - val actors_logLevel = "0" - // = "3" // info+warning+error - val logLevel = "silent" - // = "info" // debug user code only - // = "info,lib" // debug user & library code - - // we assume an Apache server is running locally for deployment - private val sep = File.separator - val docPath = System.getProperty("user.home")+sep+"public_html" - val docRoot = "http://127.0.0.1/~"+System.getProperty("user.name") - - private val policyTmpl = - System.getProperty("partest.cwd")+sep+Test.name+sep+"java.policy" - val outPath = System.getProperty("partest.output") - val libPath = System.getProperty("partest.lib") - val policyFile = outPath+sep+"java.policy" - val codebaseDir = outPath+sep+"-" - - assert((new File(docPath)).isDirectory, - "Root directory \""+docPath+"\" not found") - val deployJar = docPath+sep+Test.name+"_deploy.jar" - val deployUrl = docRoot+"/"+Test.name+"_deploy.jar" - - def generatePolicyFile() { - val in = new BufferedReader(new FileReader(policyTmpl)) - val out = new PrintWriter(new BufferedWriter(new FileWriter(policyFile))) - var line = in.readLine() - while (line != null) { - val line1 = line.replaceAll("@PROJECT_LIB_BASE@", codebaseDir) - out.println(line1) - line = in.readLine() - } - in.close() - out.close() - } - - def generateRmiStubs(classNames: List[String]) { - val options = List( - "-v1.2", - "-classpath "+libPath+File.pathSeparator+outPath, - "-d "+outPath) - rmic(options, classNames) - //ls(outPath) - } - - def generateJarFile(classNames: List[String]) { - val out = new JarOutputStream(new FileOutputStream(deployJar)) - classNames foreach (name => try { - val classFile = name+".class" - val in = new FileInputStream(outPath+sep+classFile) - out putNextEntry new JarEntry(classFile) - val buf = new Array[Byte](512) - var len = in read buf - while (len != -1) { - out.write(buf, 0, len) - len = in read buf - } - in.close() - } catch { - case e: FileNotFoundException => println(e) - }) - out.close() - } - - private def ls(path: String) { exec("ls -al "+path) } - - private def rmic(options: List[String], classNames: List[String]) { - val javaHome = scala.util.Properties.javaHome - val jdkHome = - if (javaHome endsWith "jre") javaHome.substring(0, javaHome.length-4) - else javaHome - val rmicExt = if (scala.util.Properties.isWin) ".exe" else "" - val rmicCmd = jdkHome+sep+"bin"+sep+"rmic"+rmicExt - val cmdLine = rmicCmd+options.mkString(" ", " ", "")+ - classNames.mkString(" "," ","") - // println(cmdLine) - exec(cmdLine) - } - - private def exec(command: String) { - val proc = Runtime.getRuntime exec command - proc.waitFor() - val out = new BufferedReader(new InputStreamReader(proc.getInputStream)) - var line = out.readLine() - while (line != null) { - println(line) - line = out.readLine() - } - out.close() - val err = new BufferedReader(new InputStreamReader(proc.getErrorStream)) - line = err.readLine() - while (line != null) { - println(line) - line = err.readLine() - } - err.close() - } -} - diff --git a/test/files/detach-run/actor/java.policy b/test/files/detach-run/actor/java.policy deleted file mode 100644 index b305f10b4c..0000000000 --- a/test/files/detach-run/actor/java.policy +++ /dev/null @@ -1,25 +0,0 @@ -// See http://java.sun.com/javase/6/docs/technotes/guides/security/permissions.html -// See http://mindprod.com/jgloss/policyfile.html -// The policy expands ${/} to the correct path or folder delimiter on your host platform. - -// Actions available with SocketPermission: accept, connect, listen, resolve -// 1) The "resolve" action is implied when any of the other actions are present. -// 2) The "listen" action is only meaningful when used with "localhost". - -grant { - permission java.net.SocketPermission "*:80", "connect,accept,listen"; - permission java.net.SocketPermission "*:1024-", "connect,accept,listen"; - permission java.util.PropertyPermission "scala.remoting.logLevel", "read"; - permission java.util.PropertyPermission "scala.remoting.port", "read"; -}; - -grant codeBase "@PROJECT_LIB_BASE@" { - permission java.lang.RuntimePermission "getClassLoader"; - permission java.util.PropertyPermission "java.rmi.server.codebase", "read"; - permission java.util.PropertyPermission "java.rmi.server.hostname", "read"; - permission java.util.PropertyPermission "sun.rmi.dgc.server.gcInterval", "read,write"; -}; - -//grant { -// permission java.security.AllPermission; -//}; diff --git a/test/files/detach-run/basic-run.check b/test/files/detach-run/basic-run.check deleted file mode 100644 index 6463d97497..0000000000 --- a/test/files/detach-run/basic-run.check +++ /dev/null @@ -1,5 +0,0 @@ -Server.main 8889 -> Client.main 127.0.0.1 8889 -yInstVal = 10 -zLocVal = 1000 -result received: 11111 diff --git a/test/files/detach-run/basic/Client.scala b/test/files/detach-run/basic/Client.scala deleted file mode 100644 index f8eddb041d..0000000000 --- a/test/files/detach-run/basic/Client.scala +++ /dev/null @@ -1,48 +0,0 @@ -/* - * @author Stephane Micheloud - */ - -import java.net._, Thread._, ClientHelper._ -import scala.remoting._, Debug._ - -object Foo { - def trace(s: String) { info("[Foo.trace] "+s)} -} -object Client { - val yInstVal: Int = 10 - var yInstVar: Int = 99 - object Bar { - def trace(s: String) { info("[Bar.trace] "+s) } - } - def main(args: Array[String]) { - init(args) - val server = new Channel(host, port) - val zLocVal: Int = 1000 - var zLocVar: Int = 9998 - server ! detach( - (x: Int) => { - println("yInstVal = "+yInstVal) - this.trace("yInstVar = "+yInstVar) - Bar.trace("zLocVal = "+zLocVal) - Foo.trace("zLocVar = "+zLocVar) - zLocVar += 2 - System.out.println("zLocVal = "+zLocVal) - Debug.info("zLocVar = "+zLocVar) - x + yInstVal + yInstVar + zLocVal + zLocVar - }) - val result = server.receiveInt - println("result received: " + result) - } - private def trace(s: String) { info("[Client.trace] "+s) } -} - -object ClientHelper { - private var _host = "127.0.0.1" - private var _port = 8888 - def host = _host - def port = _port - def init(args: Array[String]) { - try { _host = args(0) } catch { case _ => } - try { _port = args(1).toInt } catch { case _ => } - } -} diff --git a/test/files/detach-run/basic/Server.scala b/test/files/detach-run/basic/Server.scala deleted file mode 100644 index f8aa02a4ba..0000000000 --- a/test/files/detach-run/basic/Server.scala +++ /dev/null @@ -1,22 +0,0 @@ -/* - * @author Stephane Micheloud - */ - -import scala.remoting.ServerChannel - -object Server extends ServerConsole { - private def computation(f: Int => Int): Int = { - //some time-consuming task - f(2) - } - def main(args: Array[String]) { - val server = new ServerChannel(args(0).toInt) - loop { - val client = server.accept - val f = client.receive[Int => Int] - val result = computation(f) - client ! result - } - server.close() - } -} diff --git a/test/files/detach-run/basic/ServerConsole.scala b/test/files/detach-run/basic/ServerConsole.scala deleted file mode 100644 index 65b81c0ca1..0000000000 --- a/test/files/detach-run/basic/ServerConsole.scala +++ /dev/null @@ -1,83 +0,0 @@ -/* - * @author Stephane Micheloud - */ - -import java.io._ - -import scala.compat.Platform.currentTime -import scala.remoting.Debug, Debug._ - -trait ServerConsole extends Thread { - private val startTime = currentTime - - start() - - private var isTerminated = false - - def terminate() { isTerminated = true } - - protected def loop(block: => Unit) { - while (!isTerminated) { - try { - block - } - catch { - case e: ObjectStreamException => - trace("Object stream error ("+e.getMessage+")") - case e: EOFException => - trace("Connection lost") - case e: ClassNotFoundException => - trace("Class not found") - case e => - trace("Server error: "+e) - } - } - } - - override def run() { - val in = new BufferedReader(new InputStreamReader(System.in)) - var quit = false - while (!quit) { - val args = getArgs(in) - if (args contains "quit") - quit = true - if (args contains "cls") { - println(ERASE_SCREEN) - println(CURSOR_HOME) - } - if (args contains "warning") - Debug.level = Level.WARNING - if (args contains "info") - Debug.level = Level.INFO - if (args contains "silent") - Debug.level = Level.SILENT - } - terminate() - println("Server exited ("+mkTimeString(currentTime - startTime)+")") - exit(0) - - } - - protected def trace(msg: String) { - Debug.info("[ServerConsole.trace] "+msg) - } - - private def getArgs(in: BufferedReader): List[String] = { - print("> ") - val input = try { in.readLine() } catch { case _ => null } - if (input != null) (input.trim split "\\s+").toList else Nil - } - - private def mkTimeString(time: Long): String = { - def twoDigits(i: Long) = (if (i < 10) "0" else "")+i - val sec = time / 1000 - val min = sec / 60 - val h = min / 60 - twoDigits(h) +":"+ - twoDigits(min - h * 60)+":"+ - twoDigits(sec - min * 60) - } - - private val ERASE_SCREEN = "\033[2J" - private val CURSOR_HOME = "\033[H" -} diff --git a/test/files/detach-run/basic/basic.flags b/test/files/detach-run/basic/basic.flags deleted file mode 100644 index 55eed8bbcd..0000000000 --- a/test/files/detach-run/basic/basic.flags +++ /dev/null @@ -1 +0,0 @@ --Xpluginsdir ../../../../build/pack/misc/scala-devel/plugins -Xplugin-require:detach -P:detach:enable diff --git a/test/files/detach-run/basic/basic.scala b/test/files/detach-run/basic/basic.scala deleted file mode 100644 index 4d0fc2d933..0000000000 --- a/test/files/detach-run/basic/basic.scala +++ /dev/null @@ -1,169 +0,0 @@ -/* - * @author Stephane Micheloud - */ - -object Test { - - val name = "basic" - val host = "127.0.0.1" - val port = 8889 - - def main(args: Array[String]) { - setenv() - println("Server.main "+port) - server.start() - println("Client.main "+host+" "+port) - client.start() - server.terminate() - } - - private var server = new ServerThread(port) - private var client = new ClientThread(host, port) - - private class ServerThread(port: Int) extends Runnable { - private var th = new Thread(this) - def start() { th.start(); Thread.sleep(1000) } - def run() { Server.main(Array(port.toString)) } - def terminate() { Server.terminate(); sys.exit(0) } - } - - private class ClientThread(host: String, port: Int) extends Runnable { - private var th = new Thread(this) - def start() { th.start(); th.join() } - def run() { Client.main(Array(host, port.toString)) } - } - - private def setenv() { - import Env._ - - // Java properties for server & client - System.setProperty("scala.remoting.logLevel", logLevel) - System.setProperty("java.security.manager", "") - System.setProperty("java.security.policy", policyFile) - // Java properties for server only - System.setProperty("java.rmi.server.codebase", deployUrl) - System.setProperty("java.rmi.server.hostname", host) - System.setProperty("java.rmi.server.useCodebaseOnly", "true") - - // application-secific classes to be deployed and accessed via URL - // (i.e. detached closure, proxy interfaces and proxy stubs) - val classNames = List( - "Bar$proxy", - "Bar$proxyImpl_Stub", - "Client$$anonfun$main$1$detach", - "Client$proxy", - "Client$proxyImpl_Stub", - "Foo$proxy", - "Foo$proxyImpl_Stub") - - val proxyImplNames = - for (n <- classNames; i = n lastIndexOf "_Stub"; if i > 0) - yield n.substring(0, i) - - generatePolicyFile() - generateRmiStubs(proxyImplNames) - generateJarFile(classNames) - } -} - -object Env { - import java.io._, java.util.jar._ - - val actors_logLevel = "0" - // = "3" // info+warning+error - val logLevel = "silent" - // = "info" // debug user code only - // = "info,lib" // debug user & library code - - // we assume an Apache server is running locally for deployment - private val sep = File.separator - val docPath = System.getProperty("user.home")+sep+"public_html" - val docRoot = "http://127.0.0.1/~"+System.getProperty("user.name") - - private val policyTmpl = - System.getProperty("partest.cwd")+sep+Test.name+sep+"java.policy" - val outPath = System.getProperty("partest.output") - val libPath = System.getProperty("partest.lib") - val policyFile = outPath+sep+"java.policy" - val codebaseDir = outPath+sep+"-" - - assert((new File(docPath)).isDirectory, - "Root directory \""+docPath+"\" not found") - val deployJar = docPath+sep+Test.name+"_deploy.jar" - val deployUrl = docRoot+"/"+Test.name+"_deploy.jar" - - def generatePolicyFile() { - val in = new BufferedReader(new FileReader(policyTmpl)) - val out = new PrintWriter(new BufferedWriter(new FileWriter(policyFile))) - var line = in.readLine() - while (line != null) { - val line1 = line.replaceAll("@PROJECT_LIB_BASE@", codebaseDir) - out.println(line1) - line = in.readLine() - } - in.close() - out.close() - } - - def generateRmiStubs(classNames: List[String]) { - val options = List( - "-v1.2", - "-classpath "+libPath+File.pathSeparator+outPath, - "-d "+outPath) - rmic(options, classNames) - //ls(outPath) - } - - def generateJarFile(classNames: List[String]) { - val out = new JarOutputStream(new FileOutputStream(deployJar)) - classNames foreach (name => try { - val classFile = name+".class" - val in = new FileInputStream(outPath+sep+classFile) - out putNextEntry new JarEntry(classFile) - val buf = new Array[Byte](512) - var len = in read buf - while (len != -1) { - out.write(buf, 0, len) - len = in read buf - } - in.close() - } catch { - case e: FileNotFoundException => println(e) - }) - out.close() - } - - private def ls(path: String) { exec("ls -al "+path) } - - private def rmic(options: List[String], classNames: List[String]) { - val javaHome = scala.util.Properties.javaHome - val jdkHome = - if (javaHome endsWith "jre") javaHome.substring(0, javaHome.length-4) - else javaHome - val rmicExt = if (scala.util.Properties.isWin) ".exe" else "" - val rmicCmd = jdkHome+sep+"bin"+sep+"rmic"+rmicExt - val cmdLine = rmicCmd+options.mkString(" ", " ", "")+ - classNames.mkString(" "," ","") - // println(cmdLine) - exec(cmdLine) - } - - private def exec(command: String) { - val proc = Runtime.getRuntime exec command - proc.waitFor() - val out = new BufferedReader(new InputStreamReader(proc.getInputStream)) - var line = out.readLine() - while (line != null) { - println(line) - line = out.readLine() - } - out.close() - val err = new BufferedReader(new InputStreamReader(proc.getErrorStream)) - line = err.readLine() - while (line != null) { - println(line) - line = err.readLine() - } - err.close() - } -} diff --git a/test/files/detach-run/basic/java.policy b/test/files/detach-run/basic/java.policy deleted file mode 100644 index 92c1045c3d..0000000000 --- a/test/files/detach-run/basic/java.policy +++ /dev/null @@ -1,26 +0,0 @@ -// See http://java.sun.com/javase/6/docs/technotes/guides/security/permissions.html -// See http://mindprod.com/jgloss/policyfile.html -// The policy expands ${/} to the correct path or folder delimiter on your host platform. - -// Actions available with SocketPermission: accept, connect, listen, resolve -// 1) The "resolve" action is implied when any of the other actions are present. -// 2) The "listen" action is only meaningful when used with "localhost". - -grant { - permission java.net.SocketPermission "*:80", "connect,accept,listen"; - permission java.net.SocketPermission "*:1024-", "connect,accept,listen"; - permission java.util.PropertyPermission "scala.remoting.logLevel", "read"; - permission java.util.PropertyPermission "scala.remoting.port", "read"; -}; - -grant codeBase "@PROJECT_LIB_BASE@" { - permission java.lang.RuntimePermission "getClassLoader"; - permission java.lang.RuntimePermission "createClassLoader"; - permission java.util.PropertyPermission "java.rmi.server.codebase", "read"; - permission java.util.PropertyPermission "java.rmi.server.hostname", "read"; - permission java.util.PropertyPermission "sun.rmi.dgc.server.gcInterval", "read,write"; -}; - -//grant { -// permission java.security.AllPermission; -//}; -- cgit v1.2.3 From b2bec5a1355bd271bed86b071823a64eeafc0618 Mon Sep 17 00:00:00 2001 From: Simon Ochsenreither Date: Wed, 12 Dec 2012 16:56:22 +0100 Subject: SI-6809 Forbids deprecated case class definitions without parameter list This has been deprecated since at least 2.7.7, so it should be good to go. --- .../scala/tools/nsc/ast/parser/Parsers.scala | 4 ++-- test/files/neg/t5956.check | 21 ++++----------------- test/files/neg/t5956.scala | 4 ++-- .../depmet_implicit_oopsla_session_simpler.scala | 2 +- test/files/pos/infer2-pos.scala | 2 +- test/files/pos/t0301.scala | 2 +- test/files/pos/t344.scala | 4 ++-- test/files/pos/t911.scala | 8 ++++---- test/files/run/caseclasses.scala | 2 +- test/files/run/structural.scala | 2 +- test/files/run/t4415.scala | 2 +- test/scaladoc/resources/Trac4325.scala | 4 ++-- 12 files changed, 22 insertions(+), 35 deletions(-) (limited to 'src') diff --git a/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala b/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala index 679ef1a0c9..8a53c5836c 100644 --- a/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala +++ b/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala @@ -2153,8 +2153,8 @@ self => val start = in.offset newLineOptWhenFollowedBy(LPAREN) if (ofCaseClass && in.token != LPAREN) - deprecationWarning(in.lastOffset, "case classes without a parameter list have been deprecated;\n"+ - "use either case objects or case classes with `()' as parameter list.") + syntaxError(in.lastOffset, "case classes without a parameter list are not allowed;\n"+ + "use either case objects or case classes with an explicit `()' as a parameter list.") while (implicitmod == 0 && in.token == LPAREN) { in.nextToken() vds += paramClause() diff --git a/test/files/neg/t5956.check b/test/files/neg/t5956.check index 6641dac97f..f5ae42c799 100644 --- a/test/files/neg/t5956.check +++ b/test/files/neg/t5956.check @@ -1,20 +1,7 @@ -t5956.scala:1: warning: case classes without a parameter list have been deprecated; -use either case objects or case classes with `()' as parameter list. -object O { case class C[T]; class C } - ^ -t5956.scala:2: warning: case classes without a parameter list have been deprecated; -use either case objects or case classes with `()' as parameter list. -object T { case class C[T]; case class C } - ^ -t5956.scala:2: warning: case classes without a parameter list have been deprecated; -use either case objects or case classes with `()' as parameter list. -object T { case class C[T]; case class C } - ^ t5956.scala:1: error: C is already defined as case class C -object O { case class C[T]; class C } - ^ +object O { case class C[T](); class C() } + ^ t5956.scala:2: error: C is already defined as case class C -object T { case class C[T]; case class C } - ^ -three warnings found +object T { case class C[T](); case class C() } + ^ two errors found diff --git a/test/files/neg/t5956.scala b/test/files/neg/t5956.scala index d985fa97a4..3cc10f3e19 100644 --- a/test/files/neg/t5956.scala +++ b/test/files/neg/t5956.scala @@ -1,2 +1,2 @@ -object O { case class C[T]; class C } -object T { case class C[T]; case class C } +object O { case class C[T](); class C() } +object T { case class C[T](); case class C() } diff --git a/test/files/pos/depmet_implicit_oopsla_session_simpler.scala b/test/files/pos/depmet_implicit_oopsla_session_simpler.scala index d2986ef56f..7c9af66611 100644 --- a/test/files/pos/depmet_implicit_oopsla_session_simpler.scala +++ b/test/files/pos/depmet_implicit_oopsla_session_simpler.scala @@ -5,7 +5,7 @@ object Sessions { def run(dp: Dual): Unit } - sealed case class Stop extends Session { + sealed case class Stop() extends Session { type Dual = Stop def run(dp: Dual): Unit = {} diff --git a/test/files/pos/infer2-pos.scala b/test/files/pos/infer2-pos.scala index 06d0f5814f..0ed9666f40 100644 --- a/test/files/pos/infer2-pos.scala +++ b/test/files/pos/infer2-pos.scala @@ -1,7 +1,7 @@ package test class Lst[T] case class cons[T](x: T, xs: Lst[T]) extends Lst[T] -case class nil[T] extends Lst[T] +case class nil[T]() extends Lst[T] object test { Console.println(cons(1, nil())) } diff --git a/test/files/pos/t0301.scala b/test/files/pos/t0301.scala index cb68f38062..24b4776010 100644 --- a/test/files/pos/t0301.scala +++ b/test/files/pos/t0301.scala @@ -1,7 +1,7 @@ package fos abstract class Expr -case class Var extends Expr +case class Var() extends Expr object Analyzer { def substitution(expr: Expr, cls: (Var,Var)): Expr = diff --git a/test/files/pos/t344.scala b/test/files/pos/t344.scala index 8a6ad9120d..449a763af7 100644 --- a/test/files/pos/t344.scala +++ b/test/files/pos/t344.scala @@ -1,7 +1,7 @@ object Bug { class A; - case class A1 extends A; - case class A2 extends A; + case class A1() extends A; + case class A2() extends A; def f: A = if (true) A1() diff --git a/test/files/pos/t911.scala b/test/files/pos/t911.scala index 224b14cda3..cfa4f49dc1 100644 --- a/test/files/pos/t911.scala +++ b/test/files/pos/t911.scala @@ -1,6 +1,6 @@ object Test { -def foo : Any = { - case class Foo {} - Foo; -} + def foo: Any = { + case class Foo() {} + Foo; + } } diff --git a/test/files/run/caseclasses.scala b/test/files/run/caseclasses.scala index 5aafea59e3..668c984f3d 100644 --- a/test/files/run/caseclasses.scala +++ b/test/files/run/caseclasses.scala @@ -1,6 +1,6 @@ case class Foo(x: Int)(y: Int) -case class Bar +case class Bar() abstract class Base abstract case class Abs(x: Int) extends Base diff --git a/test/files/run/structural.scala b/test/files/run/structural.scala index 36af8c4bfc..3a703d2cf1 100644 --- a/test/files/run/structural.scala +++ b/test/files/run/structural.scala @@ -152,7 +152,7 @@ object test2 { object test3 { - case class Exc extends Exception + case class Exc() extends Exception object Rec { def f = throw Exc() diff --git a/test/files/run/t4415.scala b/test/files/run/t4415.scala index f96031d650..caf1609b9e 100644 --- a/test/files/run/t4415.scala +++ b/test/files/run/t4415.scala @@ -39,7 +39,7 @@ class SecondProperty extends TopProperty class SubclassSecondProperty extends StandardProperty trait MyProp[T] -case class MyPropImpl[T] extends MyProp[T] +case class MyPropImpl[T]() extends MyProp[T] object SubclassMatch { diff --git a/test/scaladoc/resources/Trac4325.scala b/test/scaladoc/resources/Trac4325.scala index ffb968d571..ccc2f1900a 100644 --- a/test/scaladoc/resources/Trac4325.scala +++ b/test/scaladoc/resources/Trac4325.scala @@ -1,5 +1,5 @@ -case class WithSynthetic +case class WithSynthetic() -case class WithObject +case class WithObject() object WithObject -- cgit v1.2.3 From 8aae61180c3edab97c653dbc7096a439f5c3da12 Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Sat, 15 Dec 2012 16:38:10 -0800 Subject: Deskolemize type skolems before pickling. Lex Spoon noticed what appeared to be duplicate symbols in methods read from classfiles. The duplicates turned out to be type skolems, which because they're not supposed to be pickled in the first place (right?) are unpickled without turning back into skolems. Now pickler is careful to deskolemize before writing anything down. The effort implied by test case is more than can possibly be justified for this obscure compiler corner, but I'll chalk it up to reflection exploration. --- .../scala/tools/nsc/symtab/classfile/Pickler.scala | 35 ++++++++++++++++++-- test/files/run/no-pickle-skolems.check | 1 + test/files/run/no-pickle-skolems/Source_1.scala | 5 +++ test/files/run/no-pickle-skolems/Test_2.scala | 37 ++++++++++++++++++++++ 4 files changed, 76 insertions(+), 2 deletions(-) create mode 100644 test/files/run/no-pickle-skolems.check create mode 100644 test/files/run/no-pickle-skolems/Source_1.scala create mode 100644 test/files/run/no-pickle-skolems/Test_2.scala (limited to 'src') diff --git a/src/compiler/scala/tools/nsc/symtab/classfile/Pickler.scala b/src/compiler/scala/tools/nsc/symtab/classfile/Pickler.scala index caa45ea6db..efe7519d5e 100644 --- a/src/compiler/scala/tools/nsc/symtab/classfile/Pickler.scala +++ b/src/compiler/scala/tools/nsc/symtab/classfile/Pickler.scala @@ -148,9 +148,34 @@ abstract class Pickler extends SubComponent { true } + /** If the symbol is a type skolem, deskolemize and log it. + * If we fail to deskolemize, in a method like + * trait Trait[+A] { def f[CC[X]] : CC[A] } + * the applied type CC[A] will hold a different CC symbol + * than the type-constructor type-parameter CC. + */ + private def deskolemize(sym: Symbol) = { + if (sym.isTypeSkolem) { + val sym1 = sym.deSkolemize + log({ + val what0 = sym.defString + val what = sym1.defString match { + case `what0` => what0 + case other => what0 + "->" + other + } + val where = sym.enclMethod.fullLocationString + s"deskolemizing $what in $where" + }) + sym1 + } + else sym + } + /** Store symbol in index. If symbol is local, also store everything it references. */ - def putSymbol(sym: Symbol) { + def putSymbol(sym0: Symbol) { + val sym = deskolemize(sym0) + if (putEntry(sym)) { if (isLocal(sym)) { putEntry(sym.name) @@ -503,7 +528,13 @@ abstract class Pickler extends SubComponent { /** Write a reference to object, i.e., the object's number in the map index. */ - private def writeRef(ref: AnyRef) { writeNat(index(ref)) } + private def writeRef(ref0: AnyRef) { + val ref = ref0 match { + case sym: Symbol => deskolemize(sym) + case _ => ref0 + } + writeNat(index(ref)) + } private def writeRefs(refs: List[AnyRef]) { refs foreach writeRef } private def writeRefsWithLength(refs: List[AnyRef]) { writeNat(refs.length) diff --git a/test/files/run/no-pickle-skolems.check b/test/files/run/no-pickle-skolems.check new file mode 100644 index 0000000000..d64066171a --- /dev/null +++ b/test/files/run/no-pickle-skolems.check @@ -0,0 +1 @@ +OK! diff --git a/test/files/run/no-pickle-skolems/Source_1.scala b/test/files/run/no-pickle-skolems/Source_1.scala new file mode 100644 index 0000000000..1b4cbfa788 --- /dev/null +++ b/test/files/run/no-pickle-skolems/Source_1.scala @@ -0,0 +1,5 @@ +package s + +trait Foo { def to[CC[X]](implicit cc: CC[Int]): Unit } + +class Bar extends Foo { def to[CC[X]](implicit cc: CC[Int]): Unit = ??? } diff --git a/test/files/run/no-pickle-skolems/Test_2.scala b/test/files/run/no-pickle-skolems/Test_2.scala new file mode 100644 index 0000000000..90bb4c4f88 --- /dev/null +++ b/test/files/run/no-pickle-skolems/Test_2.scala @@ -0,0 +1,37 @@ +import scala.reflect.runtime.universe._ + +object Test { + /** Collects symbols by the given name, even if they're not + * named CC. + */ + def collectSymbols[T: TypeTag](inMethod: TermName, name: String): List[String] = { + val m = typeOf[T] member inMethod typeSignatureIn typeOf[T] + var buf: List[Symbol] = Nil + var seen: Set[Symbol] = Set() + def id(s: Symbol): Int = s.asInstanceOf[{ def id: Int }].id + + def check(s: Symbol) { + if (!seen(s)) { + seen += s + if (s.name.toString == name) buf ::= s + } + } + def loop(t: Type) { + t match { + case TypeRef(pre, sym, args) => loop(pre) ; check(sym) ; args foreach loop + case PolyType(tparams, restpe) => tparams foreach { tp => check(tp) ; check(tp.owner) ; loop(tp.typeSignature) } ; loop(restpe) + case MethodType(params, restpe) => params foreach { p => check(p) ; loop(p.typeSignature) } ; loop(restpe) + case _ => + } + } + loop(m) + + buf.reverse.distinct map (s => s.name + "#" + id(s)) + } + + def main(args: Array[String]): Unit = { + val syms = collectSymbols[s.Bar]("to", "CC") + assert(syms.size == 1, syms) + println("OK!") + } +} -- cgit v1.2.3 From 1feee89e3c7b8edc7a7e2a724f9d9c3194ddf171 Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Thu, 29 Nov 2012 11:20:19 +0100 Subject: Integrate isNameInScope in Contexts. This was a partial implementation of symbol lookup used for short-circuiting some expensive tests. After rescuing lookup from typedIdent, it should only be a wrapper around lookupSymbol (as it now is.) --- src/compiler/scala/tools/nsc/typechecker/Contexts.scala | 11 +---------- src/reflect/scala/reflect/internal/Scopes.scala | 4 ++-- 2 files changed, 3 insertions(+), 12 deletions(-) (limited to 'src') diff --git a/src/compiler/scala/tools/nsc/typechecker/Contexts.scala b/src/compiler/scala/tools/nsc/typechecker/Contexts.scala index c0d2f44c7b..a494a4a524 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Contexts.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Contexts.scala @@ -436,16 +436,7 @@ trait Contexts { self: Analyzer => case _ => outer.isLocal() } - /** Fast path for some slow checks (ambiguous assignment in Refchecks, and - * existence of __match for MatchTranslation in virtpatmat.) This logic probably - * needs improvement. - */ - def isNameInScope(name: Name) = ( - enclosingContextChain exists (ctx => - (ctx.scope.lookupEntry(name) != null) - || (ctx.owner.rawInfo.member(name) != NoSymbol) - ) - ) + def isNameInScope(name: Name) = lookupSymbol(name, _ => true).isSuccess // nextOuter determines which context is searched next for implicits // (after `this`, which contributes `newImplicits` below.) In diff --git a/src/reflect/scala/reflect/internal/Scopes.scala b/src/reflect/scala/reflect/internal/Scopes.scala index 04f1d73360..b1cfaa4774 100644 --- a/src/reflect/scala/reflect/internal/Scopes.scala +++ b/src/reflect/scala/reflect/internal/Scopes.scala @@ -10,8 +10,8 @@ trait Scopes extends api.Scopes { self: SymbolTable => /** An ADT to represent the results of symbol name lookups. */ - sealed trait NameLookup { def symbol: Symbol } - case class LookupSucceeded(qualifier: Tree, symbol: Symbol) extends NameLookup + sealed trait NameLookup { def symbol: Symbol ; def isSuccess = false } + case class LookupSucceeded(qualifier: Tree, symbol: Symbol) extends NameLookup { override def isSuccess = true } case class LookupAmbiguous(msg: String) extends NameLookup { def symbol = NoSymbol } case class LookupInaccessible(symbol: Symbol, msg: String) extends NameLookup case object LookupNotFound extends NameLookup { def symbol = NoSymbol } -- cgit v1.2.3 From 5f1e18b1cd8f76f40bb01d257ae3b81cb70e3e07 Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Mon, 3 Dec 2012 11:53:17 -0800 Subject: Optimization in SpecializeTypes. Avoid time traveling to find type parameters which will never be there. --- .../scala/tools/nsc/transform/SpecializeTypes.scala | 15 ++++++++++----- src/compiler/scala/tools/nsc/typechecker/Typers.scala | 9 ++------- src/reflect/scala/reflect/internal/Definitions.scala | 15 +++++++++++++++ 3 files changed, 27 insertions(+), 12 deletions(-) (limited to 'src') diff --git a/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala b/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala index 4e4c1b98ac..e3239313de 100644 --- a/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala +++ b/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala @@ -403,11 +403,16 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers { case _ => false }) def specializedTypeVars(tpes: List[Type]): immutable.Set[Symbol] = { - val buf = Set.newBuilder[Symbol] - tpes foreach (tp => buf ++= specializedTypeVars(tp)) - buf.result + if (tpes.isEmpty) immutable.Set.empty else { + val buf = Set.newBuilder[Symbol] + tpes foreach (tp => buf ++= specializedTypeVars(tp)) + buf.result + } } - def specializedTypeVars(sym: Symbol): immutable.Set[Symbol] = enteringTyper(specializedTypeVars(sym.info)) + def specializedTypeVars(sym: Symbol): immutable.Set[Symbol] = ( + if (definitions.neverHasTypeParameters(sym)) immutable.Set.empty + else enteringTyper(specializedTypeVars(sym.info)) + ) /** Return the set of @specialized type variables mentioned by the given type. * It only counts type variables that appear: @@ -436,7 +441,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers { case AnnotatedType(_, tp, _) => specializedTypeVars(tp) case TypeBounds(lo, hi) => specializedTypeVars(lo :: hi :: Nil) case RefinedType(parents, _) => parents flatMap specializedTypeVars toSet - case _ => Set() + case _ => immutable.Set.empty } /** Returns the type parameter in the specialized class `sClass` that corresponds to type parameter diff --git a/src/compiler/scala/tools/nsc/typechecker/Typers.scala b/src/compiler/scala/tools/nsc/typechecker/Typers.scala index a1c1b53cce..703c12038f 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Typers.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Typers.scala @@ -2716,13 +2716,8 @@ trait Typers extends Modes with Adaptations with Tags { val att = templ.attachments.get[CompoundTypeTreeOriginalAttachment].getOrElse(CompoundTypeTreeOriginalAttachment(Nil, Nil)) templ.removeAttachment[CompoundTypeTreeOriginalAttachment] templ updateAttachment att.copy(stats = stats1) - for (stat <- stats1 if stat.isDef) { - val member = stat.symbol - if (!(context.owner.ancestors forall - (bc => member.matchingSymbol(bc, context.owner.thisType) == NoSymbol))) { - member setFlag OVERRIDE - } - } + for (stat <- stats1 if stat.isDef && stat.symbol.isOverridingSymbol) + stat.symbol setFlag OVERRIDE } } diff --git a/src/reflect/scala/reflect/internal/Definitions.scala b/src/reflect/scala/reflect/internal/Definitions.scala index 8c048ed7f8..d165f66004 100644 --- a/src/reflect/scala/reflect/internal/Definitions.scala +++ b/src/reflect/scala/reflect/internal/Definitions.scala @@ -686,6 +686,21 @@ trait Definitions extends api.StandardDefinitions { def ClassType(arg: Type) = if (phase.erasedTypes) ClassClass.tpe else appliedType(ClassClass, arg) + /** Can we tell by inspecting the symbol that it will never + * at any phase have type parameters? + */ + def neverHasTypeParameters(sym: Symbol) = sym match { + case _: RefinementClassSymbol => true + case _: ModuleClassSymbol => true + case _: ImplClassSymbol => true + case _ => + ( + sym.isPrimitiveValueClass + || sym.isAnonymousClass + || sym.initialize.isMonomorphicType + ) + } + def EnumType(sym: Symbol) = // given (in java): "class A { enum E { VAL1 } }" // - sym: the symbol of the actual enumeration value (VAL1) -- cgit v1.2.3 From 5b2990c5119ae58d2c9d90d6b4822d906559aeef Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Tue, 18 Dec 2012 14:50:55 +0100 Subject: SI-6745 Fix lookup We should only consult the decls of the enclosing class. Members of the self type, enclosing scopes, or imports should not be considered. --- .../scala/tools/nsc/typechecker/Contexts.scala | 45 ++++++++++++++++------ test/files/neg/t4460a.check | 4 ++ test/files/neg/t4460a.scala | 7 ++++ test/files/neg/t4460b.check | 4 ++ test/files/neg/t4460b.scala | 9 +++++ test/files/neg/t4460c.check | 7 ++++ test/files/neg/t4460c.scala | 7 ++++ test/files/pos/t6745.scala | 4 ++ 8 files changed, 75 insertions(+), 12 deletions(-) create mode 100644 test/files/neg/t4460a.check create mode 100644 test/files/neg/t4460a.scala create mode 100644 test/files/neg/t4460b.check create mode 100644 test/files/neg/t4460b.scala create mode 100644 test/files/neg/t4460c.check create mode 100644 test/files/neg/t4460c.scala create mode 100644 test/files/pos/t6745.scala (limited to 'src') diff --git a/src/compiler/scala/tools/nsc/typechecker/Contexts.scala b/src/compiler/scala/tools/nsc/typechecker/Contexts.scala index c0d2f44c7b..01c8030f64 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Contexts.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Contexts.scala @@ -802,6 +802,12 @@ trait Contexts { self: Analyzer => case _ => LookupSucceeded(qual, sym) } ) + def finishDefSym(sym: Symbol, pre0: Type): NameLookup = + if (requiresQualifier(sym)) + finish(gen.mkAttributedQualifier(pre0), sym) + else + finish(EmptyTree, sym) + def isPackageOwnedInDifferentUnit(s: Symbol) = ( s.isDefinedInPackage && ( !currentRun.compiles(s) @@ -825,17 +831,36 @@ trait Contexts { self: Analyzer => found1 } + + def lookupInScope(scope: Scope) = + (scope lookupUnshadowedEntries name filter (e => qualifies(e.sym))).toList + + def newOverloaded(owner: Symbol, pre: Type, entries: List[ScopeEntry]) = + logResult(s"!!! lookup overloaded")(owner.newOverloaded(pre, entries map (_.sym))) + + // Constructor lookup should only look in the decls of the enclosing class + // not in the self-type, nor in the enclosing context, nor in imports (SI-4460, SI-6745) + if (name == nme.CONSTRUCTOR) return { + val enclClassSym = cx.enclClass.owner + val scope = cx.enclClass.prefix.baseType(enclClassSym).decls + val constructorSym = lookupInScope(scope) match { + case Nil => NoSymbol + case hd :: Nil => hd.sym + case entries => newOverloaded(enclClassSym, cx.enclClass.prefix, entries) + } + finishDefSym(constructorSym, cx.enclClass.prefix) + } + // cx.scope eq null arises during FixInvalidSyms in Duplicators while (defSym == NoSymbol && (cx ne NoContext) && (cx.scope ne null)) { - pre = cx.enclClass.prefix - val entries = (cx.scope lookupUnshadowedEntries name filter (e => qualifies(e.sym))).toList - defSym = entries match { - case Nil => searchPrefix - case hd :: tl => + pre = cx.enclClass.prefix + defSym = lookupInScope(cx.scope) match { + case Nil => searchPrefix + case entries @ (hd :: tl) => // we have a winner: record the symbol depth symbolDepth = (cx.depth - cx.scope.nestingLevel) + hd.depth if (tl.isEmpty) hd.sym - else logResult(s"!!! lookup overloaded")(cx.owner.newOverloaded(pre, entries map (_.sym))) + else newOverloaded(cx.owner, pre, entries) } if (!defSym.exists) cx = cx.outer // push further outward @@ -873,12 +898,8 @@ trait Contexts { self: Analyzer => } // At this point only one or the other of defSym and impSym might be set. - if (defSym.exists) { - if (requiresQualifier(defSym)) - finish(gen.mkAttributedQualifier(pre), defSym) - else - finish(EmptyTree, defSym) - } + if (defSym.exists) + finishDefSym(defSym, pre) else if (impSym.exists) { // We continue walking down the imports as long as the tail is non-empty, which gives us: // imports == imp1 :: imp2 :: _ diff --git a/test/files/neg/t4460a.check b/test/files/neg/t4460a.check new file mode 100644 index 0000000000..b711e7acb1 --- /dev/null +++ b/test/files/neg/t4460a.check @@ -0,0 +1,4 @@ +t4460a.scala:6: error: called constructor's definition must precede calling constructor's definition + def this() = this() // was binding to Predef. !! + ^ +one error found diff --git a/test/files/neg/t4460a.scala b/test/files/neg/t4460a.scala new file mode 100644 index 0000000000..0a7a22178d --- /dev/null +++ b/test/files/neg/t4460a.scala @@ -0,0 +1,7 @@ +trait A + +class B(val x: Int) { + self: A => + + def this() = this() // was binding to Predef. !! +} diff --git a/test/files/neg/t4460b.check b/test/files/neg/t4460b.check new file mode 100644 index 0000000000..f0e703fd10 --- /dev/null +++ b/test/files/neg/t4460b.check @@ -0,0 +1,4 @@ +t4460b.scala:7: error: called constructor's definition must precede calling constructor's definition + def this() = this() // was binding to Predef. !! + ^ +one error found diff --git a/test/files/neg/t4460b.scala b/test/files/neg/t4460b.scala new file mode 100644 index 0000000000..1233017dd4 --- /dev/null +++ b/test/files/neg/t4460b.scala @@ -0,0 +1,9 @@ +trait A + +class Outer() { + class B(val x: Int) { + self: A => + + def this() = this() // was binding to Predef. !! + } +} diff --git a/test/files/neg/t4460c.check b/test/files/neg/t4460c.check new file mode 100644 index 0000000000..4e96711b8b --- /dev/null +++ b/test/files/neg/t4460c.check @@ -0,0 +1,7 @@ +t4460c.scala:4: error: overloaded method constructor B with alternatives: + (a: String)B + (x: Int)B + cannot be applied to () + def this(a: String) = this() + ^ +one error found diff --git a/test/files/neg/t4460c.scala b/test/files/neg/t4460c.scala new file mode 100644 index 0000000000..1ae258508e --- /dev/null +++ b/test/files/neg/t4460c.scala @@ -0,0 +1,7 @@ +class B(val x: Int) { + self: A => + + def this(a: String) = this() +} + +class A() diff --git a/test/files/pos/t6745.scala b/test/files/pos/t6745.scala new file mode 100644 index 0000000000..2ab8e6d39a --- /dev/null +++ b/test/files/pos/t6745.scala @@ -0,0 +1,4 @@ +class Bar(val i: Int) { + self: Any with AnyRef => + def this() = this(0) +} -- cgit v1.2.3 From fadb306fdf3d37284fd29c50aa3956cabe79480d Mon Sep 17 00:00:00 2001 From: Som Snytt Date: Wed, 26 Sep 2012 11:36:01 -0700 Subject: PluginComponent contributes description to -Xshow-phases. In Global, SubComponent is called a phase descriptor, but it doesn't actually have a description. (Phase itself does.) This fix adds a description to PluginComponent so that plugins can describe what they do in -Xshow-phases. Elliptical descriptions Exploded archives Plugged-in partest Roundup at the Little h! --- src/compiler/scala/tools/nsc/Global.scala | 40 ++++++- .../nsc/backend/opt/InlineExceptionHandlers.scala | 2 +- src/compiler/scala/tools/nsc/io/Jar.scala | 14 +++ src/compiler/scala/tools/nsc/plugins/Plugin.scala | 128 +++++++++++---------- .../scala/tools/nsc/plugins/PluginComponent.scala | 8 +- .../tools/nsc/plugins/PluginDescription.scala | 47 +++----- src/compiler/scala/tools/nsc/plugins/Plugins.scala | 15 ++- .../tools/selectivecps/SelectiveANFTransform.scala | 3 +- .../tools/selectivecps/SelectiveCPSTransform.scala | 2 + .../scala/tools/partest/nest/CompileManager.scala | 43 +++++-- .../tools/partest/nest/ReflectiveRunner.scala | 3 + .../scala/tools/partest/nest/RunnerManager.scala | 17 +-- test/files/neg/t6446-additional.check | 31 +++++ test/files/neg/t6446-additional/ploogin_1.scala | 31 +++++ test/files/neg/t6446-additional/sample_2.flags | 1 + test/files/neg/t6446-additional/sample_2.scala | 6 + test/files/neg/t6446-additional/scalac-plugin.xml | 4 + test/files/neg/t6446-list.check | 1 + test/files/neg/t6446-list/ploogin_1.scala | 31 +++++ test/files/neg/t6446-list/sample_2.flags | 1 + test/files/neg/t6446-list/sample_2.scala | 6 + test/files/neg/t6446-list/scalac-plugin.xml | 4 + test/files/neg/t6446-missing.check | 31 +++++ test/files/neg/t6446-missing/sample_2.flags | 1 + test/files/neg/t6446-missing/sample_2.scala | 6 + test/files/neg/t6446-missing/scalac-plugin.xml | 4 + test/files/neg/t6446-show-phases.check | 30 +++++ test/files/neg/t6446-show-phases.flags | 1 + test/files/neg/t6446-show-phases.scala | 3 + test/files/pos/t4351.check | 1 - test/files/pos/t4351.scala | 20 ---- test/files/run/inline-ex-handlers.scala | 2 +- test/files/run/programmatic-main.check | 60 +++++----- test/files/run/t4351.check | 1 + test/files/run/t4351.scala | 21 ++++ 35 files changed, 442 insertions(+), 177 deletions(-) create mode 100755 test/files/neg/t6446-additional.check create mode 100644 test/files/neg/t6446-additional/ploogin_1.scala create mode 100644 test/files/neg/t6446-additional/sample_2.flags create mode 100644 test/files/neg/t6446-additional/sample_2.scala create mode 100644 test/files/neg/t6446-additional/scalac-plugin.xml create mode 100755 test/files/neg/t6446-list.check create mode 100644 test/files/neg/t6446-list/ploogin_1.scala create mode 100644 test/files/neg/t6446-list/sample_2.flags create mode 100644 test/files/neg/t6446-list/sample_2.scala create mode 100644 test/files/neg/t6446-list/scalac-plugin.xml create mode 100755 test/files/neg/t6446-missing.check create mode 100644 test/files/neg/t6446-missing/sample_2.flags create mode 100644 test/files/neg/t6446-missing/sample_2.scala create mode 100644 test/files/neg/t6446-missing/scalac-plugin.xml create mode 100644 test/files/neg/t6446-show-phases.check create mode 100644 test/files/neg/t6446-show-phases.flags create mode 100644 test/files/neg/t6446-show-phases.scala delete mode 100644 test/files/pos/t4351.check delete mode 100644 test/files/pos/t4351.scala create mode 100644 test/files/run/t4351.check create mode 100644 test/files/run/t4351.scala (limited to 'src') diff --git a/src/compiler/scala/tools/nsc/Global.scala b/src/compiler/scala/tools/nsc/Global.scala index 34d5d10cbf..7ea49a5c86 100644 --- a/src/compiler/scala/tools/nsc/Global.scala +++ b/src/compiler/scala/tools/nsc/Global.scala @@ -572,7 +572,7 @@ class Global(var currentSettings: Settings, var reporter: Reporter) val runsRightAfter = None } with Inliners - // phaseName = "inlineExceptionHandlers" + // phaseName = "inlinehandlers" object inlineExceptionHandlers extends { val global: Global.this.type = Global.this val runsAfter = List("inliner") @@ -582,7 +582,7 @@ class Global(var currentSettings: Settings, var reporter: Reporter) // phaseName = "closelim" object closureElimination extends { val global: Global.this.type = Global.this - val runsAfter = List("inlineExceptionHandlers") + val runsAfter = List("inlinehandlers") val runsRightAfter = None } with ClosureElimination @@ -724,13 +724,41 @@ class Global(var currentSettings: Settings, var reporter: Reporter) /** A description of the phases that will run */ def phaseDescriptions: String = { - val width = phaseNames map (_.length) max - val fmt = "%" + width + "s %2s %s\n" + val Limit = 16 // phase names should not be absurdly long + val MaxCol = 80 // because some of us edit on green screens + val maxName = (0 /: phaseNames)(_ max _.length) + val width = maxName min Limit + val maxDesc = MaxCol - (width + 6) // descriptions not novels + val fmt = if (settings.verbose.value) s"%${maxName}s %2s %s%n" + else s"%${width}.${width}s %2s %.${maxDesc}s%n" val line1 = fmt.format("phase name", "id", "description") val line2 = fmt.format("----------", "--", "-----------") + + // built-in string precision merely truncates + import java.util.{ Formattable, FormattableFlags, Formatter } + def fmtable(s: String) = new Formattable { + override def formatTo(formatter: Formatter, flags: Int, width: Int, precision: Int) { + val p = elliptically(s, precision) + val w = if (width > 0 && p.length < width) { + import FormattableFlags.LEFT_JUSTIFY + val leftly = (flags & LEFT_JUSTIFY) == LEFT_JUSTIFY + val sb = new StringBuilder + def pad() = 1 to width - p.length foreach (_ => sb.append(' ')) + if (!leftly) pad() + sb.append(p) + if (leftly) pad() + sb.toString + } else p + formatter.out.append(w) + } + } + def elliptically(s: String, max: Int) = + if (max < 0 || s.length <= max) s + else if (max < 4) s.take(max) + else s.take(max - 3) + "..." val descs = phaseDescriptors.zipWithIndex map { - case (ph, idx) => fmt.format(ph.phaseName, idx + 1, phasesDescMap(ph)) + case (ph, idx) => fmt.format(fmtable(ph.phaseName), idx + 1, fmtable(phasesDescMap(ph))) } line1 :: line2 :: descs mkString } @@ -1302,7 +1330,7 @@ class Global(var currentSettings: Settings, var reporter: Reporter) val cleanupPhase = phaseNamed("cleanup") val icodePhase = phaseNamed("icode") val inlinerPhase = phaseNamed("inliner") - val inlineExceptionHandlersPhase = phaseNamed("inlineExceptionHandlers") + val inlineExceptionHandlersPhase = phaseNamed("inlinehandlers") val closelimPhase = phaseNamed("closelim") val dcePhase = phaseNamed("dce") // val jvmPhase = phaseNamed("jvm") diff --git a/src/compiler/scala/tools/nsc/backend/opt/InlineExceptionHandlers.scala b/src/compiler/scala/tools/nsc/backend/opt/InlineExceptionHandlers.scala index c534c2230c..4e65c72b0b 100644 --- a/src/compiler/scala/tools/nsc/backend/opt/InlineExceptionHandlers.scala +++ b/src/compiler/scala/tools/nsc/backend/opt/InlineExceptionHandlers.scala @@ -52,7 +52,7 @@ abstract class InlineExceptionHandlers extends SubComponent { import icodes._ import icodes.opcodes._ - val phaseName = "inlineExceptionHandlers" + val phaseName = "inlinehandlers" /** Create a new phase */ override def newPhase(p: Phase) = new InlineExceptionHandlersPhase(p) diff --git a/src/compiler/scala/tools/nsc/io/Jar.scala b/src/compiler/scala/tools/nsc/io/Jar.scala index 49a1ff114f..0dca75dab9 100644 --- a/src/compiler/scala/tools/nsc/io/Jar.scala +++ b/src/compiler/scala/tools/nsc/io/Jar.scala @@ -47,6 +47,20 @@ class Jar(file: File) extends Iterable[JarEntry] { case _ => Nil } + /** Invoke f with input for named jar entry (or None). */ + def withEntryStream[A](name: String)(f: Option[InputStream] => A) = { + val jarFile = new JarFile(file.jfile) + def apply() = + jarFile getEntry name match { + case null => f(None) + case entry => + val in = Some(jarFile getInputStream entry) + try f(in) + finally in map (_.close()) + } + try apply() finally jarFile.close() + } + def withJarInput[T](f: JarInputStream => T): T = { val in = new JarInputStream(file.inputStream()) try f(in) diff --git a/src/compiler/scala/tools/nsc/plugins/Plugin.scala b/src/compiler/scala/tools/nsc/plugins/Plugin.scala index 093f8285e1..b0113f7696 100644 --- a/src/compiler/scala/tools/nsc/plugins/Plugin.scala +++ b/src/compiler/scala/tools/nsc/plugins/Plugin.scala @@ -6,10 +6,14 @@ package scala.tools.nsc package plugins -import io.{ Path, Jar } -import java.net.URLClassLoader -import java.util.jar.JarFile +import scala.tools.nsc.io.{ Jar } +import scala.tools.nsc.util.ScalaClassLoader +import scala.reflect.io.{ Directory, File, Path } +import java.io.InputStream import java.util.zip.ZipException + +import scala.collection.mutable.ListBuffer +import scala.util.{ Try, Success, Failure } import scala.xml.XML /** Information about a plugin loaded from a jar file. @@ -34,11 +38,13 @@ abstract class Plugin { val description: String /** The compiler that this plugin uses. This is normally equated - * to a constructor parameter in the concrete subclass. */ + * to a constructor parameter in the concrete subclass. + */ val global: Global /** Handle any plugin-specific options. The `-P:plugname:` part - * will not be present. */ + * will not be present. + */ def processOptions(options: List[String], error: String => Unit) { if (!options.isEmpty) error("Error: " + name + " has no options") @@ -60,90 +66,86 @@ object Plugin { private val PluginXML = "scalac-plugin.xml" - /** Create a class loader with the specified file plus + /** Create a class loader with the specified locations plus * the loader that loaded the Scala compiler. */ - private def loaderFor(jarfiles: Seq[Path]): ClassLoader = { + private def loaderFor(locations: Seq[Path]): ScalaClassLoader = { val compilerLoader = classOf[Plugin].getClassLoader - val jarurls = jarfiles map (_.toURL) + val urls = locations map (_.toURL) - new URLClassLoader(jarurls.toArray, compilerLoader) + ScalaClassLoader fromURLs (urls, compilerLoader) } - /** Try to load a plugin description from the specified - * file, returning `None` if it does not work. + /** Try to load a plugin description from the specified location. */ - private def loadDescription(jarfile: Path): Option[PluginDescription] = - // XXX Return to this once we have some ARM support - if (!jarfile.exists) None - else try { - val jar = new JarFile(jarfile.jfile) - - try { - jar getEntry PluginXML match { - case null => None - case entry => - val in = jar getInputStream entry - val packXML = XML load in - in.close() - - PluginDescription fromXML packXML - } - } - finally jar.close() - } - catch { - case _: ZipException => None + private def loadDescriptionFromJar(jarp: Path): Try[PluginDescription] = { + // XXX Return to this once we have more ARM support + def read(is: Option[InputStream]) = is match { + case None => throw new RuntimeException(s"Missing $PluginXML in $jarp") + case _ => PluginDescription fromXML (XML load is.get) } + Try(new Jar(jarp.jfile).withEntryStream(PluginXML)(read)) + } + + private def loadDescriptionFromFile(f: Path): Try[PluginDescription] = + Try(XML loadFile f.jfile) map (PluginDescription fromXML _) type AnyClass = Class[_] - /** Loads a plugin class from the named jar file. + /** Use a class loader to load the plugin class. * - * @return `None` if the jar file has no plugin in it or - * if the plugin is badly formed. + * @return `None` on failure */ - def loadFrom(jarfile: Path, loader: ClassLoader): Option[AnyClass] = - loadDescription(jarfile) match { - case None => - println("Warning: could not load descriptor for plugin %s".format(jarfile)) - None - case Some(pdesc) => - try Some(loader loadClass pdesc.classname) catch { - case _: Exception => - println("Warning: class not found for plugin in %s (%s)".format(jarfile, pdesc.classname)) - None - } + def load(pd: PluginDescription, loader: ClassLoader): Try[AnyClass] = { + Try[AnyClass] { + loader loadClass pd.classname + } recoverWith { + case _: Exception => + Failure(new RuntimeException(s"Warning: class not found: ${pd.classname}")) } + } - /** Load all plugins found in the argument list, both in the - * jar files explicitly listed, and in the jar files in the - * directories specified. Skips all plugins in `ignoring`. + /** Load all plugins specified by the arguments. + * Each of `jars` must be a valid plugin archive or exploded archive. + * Each of `dirs` may be a directory containing arbitrary plugin archives. + * Skips all plugins named in `ignoring`. * A single classloader is created and used to load all of them. */ def loadAllFrom( jars: List[Path], dirs: List[Path], - ignoring: List[String]): List[AnyClass] = + ignoring: List[String]): List[Try[AnyClass]] = { - val alljars = (jars ::: (for { - dir <- dirs if dir.isDirectory - entry <- dir.toDirectory.files.toList sortBy (_.name) -// was: if Path.isJarOrZip(entry) - if Jar.isJarOrZip(entry) - pdesc <- loadDescription(entry) - if !(ignoring contains pdesc.name) - } yield entry)).distinct - - val loader = loaderFor(alljars) - (alljars map (loadFrom(_, loader))).flatten + // List[(jar, Success(descriptor))] in dir + def scan(d: Directory) = for { + f <- d.files.toList sortBy (_.name) + if Jar isJarOrZip f + pd = loadDescriptionFromJar(f) + if pd.isSuccess + } yield (f, pd) + // (dir, Try(descriptor)) + def explode(d: Directory) = d -> loadDescriptionFromFile(d / PluginXML) + // (j, Try(descriptor)) + def required(j: Path) = j -> loadDescriptionFromJar(j) + + type Paired = Pair[Path, Try[PluginDescription]] + val included: List[Paired] = (dirs flatMap (_ ifDirectory scan)).flatten + val exploded: List[Paired] = jars flatMap (_ ifDirectory explode) + val explicit: List[Paired] = jars flatMap (_ ifFile required) + def ignored(p: Paired) = p match { + case (path, Success(pd)) => ignoring contains pd.name + case _ => false + } + val (locs, pds) = ((explicit ::: exploded ::: included) filterNot ignored).unzip + + val loader = loaderFor(locs.distinct) + pds filter (_.isSuccess) map (_.get) map (Plugin load (_, loader)) } /** Instantiate a plugin class, given the class and * the compiler it is to be used in. */ def instantiate(clazz: AnyClass, global: Global): Plugin = { - val constructor = clazz getConstructor classOf[Global] - (constructor newInstance global).asInstanceOf[Plugin] + (clazz getConstructor classOf[Global] newInstance global).asInstanceOf[Plugin] } } diff --git a/src/compiler/scala/tools/nsc/plugins/PluginComponent.scala b/src/compiler/scala/tools/nsc/plugins/PluginComponent.scala index 4d98b2563c..c6e1af7ea4 100644 --- a/src/compiler/scala/tools/nsc/plugins/PluginComponent.scala +++ b/src/compiler/scala/tools/nsc/plugins/PluginComponent.scala @@ -18,8 +18,12 @@ abstract class PluginComponent extends SubComponent { /** Internal flag to tell external from internal phases */ final override val internal = false - /** Phases supplied by plugins should not have give the runsRightAfter constraint, - * but can override it */ + /** Phases supplied by plugins should not have to supply the + * runsRightAfter constraint, but can override it. + */ val runsRightAfter: Option[String] = None + /** Useful for -Xshow-phases. */ + def description: String = "" + } diff --git a/src/compiler/scala/tools/nsc/plugins/PluginDescription.scala b/src/compiler/scala/tools/nsc/plugins/PluginDescription.scala index f77123ba11..27693d1a45 100644 --- a/src/compiler/scala/tools/nsc/plugins/PluginDescription.scala +++ b/src/compiler/scala/tools/nsc/plugins/PluginDescription.scala @@ -13,17 +13,12 @@ import scala.xml.Node * * @author Lex Spoon * @version 1.0, 2007-5-21 + * @param name A short name of the plugin, used to identify it in + * various contexts. The phase defined by the plugin + * should have the same name. + * @param classname The name of the main Plugin class. */ -abstract class PluginDescription { - - /** A short name of the compiler, used to identify it in - * various contexts. The phase defined by the plugin - * should have the same name. - */ - val name: String - - /** The name of the main class for the plugin */ - val classname: String +case class PluginDescription(name: String, classname: String) { /** An XML representation of this description. It can be * read back using `PluginDescription.fromXML`. @@ -44,32 +39,24 @@ abstract class PluginDescription { */ object PluginDescription { - def fromXML(xml: Node): Option[PluginDescription] = { - // check the top-level tag - xml match { - case {_*} => () - case _ => return None - } + def fromXML(xml: Node): PluginDescription = { // extract one field def getField(field: String): Option[String] = { val text = (xml \\ field).text.trim if (text == "") None else Some(text) } - - // extract the required fields - val name1 = getField("name") match { - case None => return None - case Some(str) => str + def extracted = { + val name = "name" + val claas = "classname" + val vs = Map(name -> getField(name), claas -> getField(claas)) + if (vs.values exists (_.isEmpty)) fail() + else PluginDescription(name = vs(name).get, classname = vs(claas).get) } - val classname1 = getField("classname") match { - case None => return None - case Some(str) => str + def fail() = throw new RuntimeException("Bad plugin descriptor.") + // check the top-level tag + xml match { + case {_*} => extracted + case _ => fail() } - - Some(new PluginDescription { - val name = name1 - val classname = classname1 - }) } - } diff --git a/src/compiler/scala/tools/nsc/plugins/Plugins.scala b/src/compiler/scala/tools/nsc/plugins/Plugins.scala index 736bd826e4..bb7d54d8f6 100644 --- a/src/compiler/scala/tools/nsc/plugins/Plugins.scala +++ b/src/compiler/scala/tools/nsc/plugins/Plugins.scala @@ -7,7 +7,8 @@ package scala.tools.nsc package plugins -import io.{ File, Path } +import scala.reflect.io.{ File, Path } +import scala.tools.util.PathResolver.Defaults /** Support for run-time loading of compiler plugins. * @@ -25,8 +26,14 @@ trait Plugins { */ protected def loadRoughPluginsList(): List[Plugin] = { val jars = settings.plugin.value map Path.apply - val dirs = (settings.pluginsDir.value split File.pathSeparator).toList map Path.apply - val classes = Plugin.loadAllFrom(jars, dirs, settings.disable.value) + def injectDefault(s: String) = if (s.isEmpty) Defaults.scalaPluginPath else s + val dirs = (settings.pluginsDir.value split File.pathSeparator).toList map injectDefault map Path.apply + val maybes = Plugin.loadAllFrom(jars, dirs, settings.disable.value) + val (goods, errors) = maybes partition (_.isSuccess) + errors foreach (_ recover { + case e: Exception => inform(e.getMessage) + }) + val classes = goods map (_.get) // flatten // Each plugin must only be instantiated once. A common pattern // is to register annotation checkers during object construction, so @@ -106,7 +113,7 @@ trait Plugins { * @see phasesSet */ protected def computePluginPhases(): Unit = - phasesSet ++= (plugins flatMap (_.components)) + for (p <- plugins; c <- p.components) addToPhasesSet(c, c.description) /** Summary of the options for all loaded plugins */ def pluginOptionsHelp: String = diff --git a/src/continuations/plugin/scala/tools/selectivecps/SelectiveANFTransform.scala b/src/continuations/plugin/scala/tools/selectivecps/SelectiveANFTransform.scala index f62eebaaa0..36f0253243 100644 --- a/src/continuations/plugin/scala/tools/selectivecps/SelectiveANFTransform.scala +++ b/src/continuations/plugin/scala/tools/selectivecps/SelectiveANFTransform.scala @@ -17,13 +17,14 @@ abstract class SelectiveANFTransform extends PluginComponent with Transform with import definitions._ // standard classes and methods import typer.atOwner // methods to type trees + override def description = "ANF pre-transform for @cps" + /** the following two members override abstract members in Transform */ val phaseName: String = "selectiveanf" protected def newTransformer(unit: CompilationUnit): Transformer = new ANFTransformer(unit) - class ANFTransformer(unit: CompilationUnit) extends TypingTransformer(unit) { implicit val _unit = unit // allow code in CPSUtils.scala to report errors diff --git a/src/continuations/plugin/scala/tools/selectivecps/SelectiveCPSTransform.scala b/src/continuations/plugin/scala/tools/selectivecps/SelectiveCPSTransform.scala index 801c328177..f61828debc 100644 --- a/src/continuations/plugin/scala/tools/selectivecps/SelectiveCPSTransform.scala +++ b/src/continuations/plugin/scala/tools/selectivecps/SelectiveCPSTransform.scala @@ -17,6 +17,8 @@ abstract class SelectiveCPSTransform extends PluginComponent with import definitions._ // standard classes and methods import typer.atOwner // methods to type trees + override def description = "@cps-driven transform of selectiveanf assignments" + /** the following two members override abstract members in Transform */ val phaseName: String = "selectivecps" diff --git a/src/partest/scala/tools/partest/nest/CompileManager.scala b/src/partest/scala/tools/partest/nest/CompileManager.scala index 3f005d143e..9a48c5ce2b 100644 --- a/src/partest/scala/tools/partest/nest/CompileManager.scala +++ b/src/partest/scala/tools/partest/nest/CompileManager.scala @@ -9,7 +9,7 @@ package scala.tools.partest package nest import scala.tools.nsc.{ Global, Settings, CompilerCommand, FatalError, io } -import scala.tools.nsc.io.{ File => SFile } +import scala.reflect.io.{ Directory, File => SFile, FileOperationException } import scala.tools.nsc.interactive.RangePositions import scala.tools.nsc.reporters.{ Reporter, ConsoleReporter } import scala.tools.nsc.util.{ ClassPath, FakePos } @@ -70,10 +70,27 @@ class DirectCompiler(val fileManager: FileManager) extends SimpleCompiler { s } - private def updatePluginPath(options: String): String = { - def absolutize(path: String) = Path(path) match { + implicit class Copier(f: SFile) { + // But what if f is bigger than CHUNK?! + def copyTo(dest: Path) { + dest.toFile writeAll f.slurp + } + } + + // plugin path can be relative to test root, or cwd is out + private def updatePluginPath(options: String, out: Option[File], srcdir: Directory): String = { + val dir = fileManager.testRootDir + def pathOrCwd(p: String) = + if (p == "." && out.isDefined) { + val plugxml = "scalac-plugin.xml" + val pout = Path(out.get) + val pd = (srcdir / plugxml).toFile + if (pd.exists) pd copyTo (pout / plugxml) + pout + } else Path(p) + def absolutize(path: String) = pathOrCwd(path) match { case x if x.isAbsolute => x.path - case x => (fileManager.testRootDir / x).toAbsolute.path + case x => (dir / x).toAbsolute.path } val (opt1, opt2) = (options split "\\s").toList partition (_ startsWith "-Xplugin:") @@ -90,17 +107,21 @@ class DirectCompiler(val fileManager: FileManager) extends SimpleCompiler { } val logWriter = new FileWriter(log) + // this api has no notion of srcdir, so fake it + val fstFile = SFile(files(0)) + val srcdir = fstFile.parent + // check whether there is a ".flags" file + def convertFlags(f: SFile) = updatePluginPath(f.slurp(), out, srcdir) val logFile = basename(log.getName) val flagsFileName = "%s.flags" format (logFile.substring(0, logFile.lastIndexOf("-"))) - val argString = (io.File(log).parent / flagsFileName) ifFile (x => updatePluginPath(x.slurp())) getOrElse "" + val argString = (SFile(log).parent / flagsFileName) ifFile (convertFlags) getOrElse "" // slurp local flags (e.g., "A_1.flags") - val fstFile = SFile(files(0)) def isInGroup(num: Int) = fstFile.stripExtension endsWith ("_" + num) val inGroup = (1 to 9) flatMap (group => if (isInGroup(group)) List(group) else List()) val localFlagsList = if (inGroup.nonEmpty) { - val localArgString = (fstFile.parent / (fstFile.stripExtension + ".flags")) ifFile (x => updatePluginPath(x.slurp())) getOrElse "" + val localArgString = (srcdir / (fstFile.stripExtension + ".flags")) ifFile (convertFlags) getOrElse "" localArgString.split(' ').toList.filter(_.length > 0) } else List() @@ -140,8 +161,10 @@ class DirectCompiler(val fileManager: FileManager) extends SimpleCompiler { NestUI.verbose("compiling "+toCompile) NestUI.verbose("with classpath: "+global.classPath.toString) NestUI.verbose("and java classpath: "+ propOrEmpty("java.class.path")) - try new global.Run compile toCompile - catch { + try { + if (command.shouldStopWithInfo) logWriter append (command getInfoMessage global) + else new global.Run compile toCompile + } catch { case FatalError(msg) => testRep.error(null, "fatal error: " + msg) return CompilerCrashed @@ -152,7 +175,7 @@ class DirectCompiler(val fileManager: FileManager) extends SimpleCompiler { } finally logWriter.close() - if (testRep.hasErrors) CompileFailed + if (testRep.hasErrors || command.shouldStopWithInfo) CompileFailed else CompileSuccess } } diff --git a/src/partest/scala/tools/partest/nest/ReflectiveRunner.scala b/src/partest/scala/tools/partest/nest/ReflectiveRunner.scala index d3a40718c6..3446dd0f72 100644 --- a/src/partest/scala/tools/partest/nest/ReflectiveRunner.scala +++ b/src/partest/scala/tools/partest/nest/ReflectiveRunner.scala @@ -81,6 +81,9 @@ class ReflectiveRunner { val newClasspath = ClassPath.join(paths: _*) setProp("java.class.path", newClasspath) + + // don't let partest find pluginsdir; in ant build, standard plugin has dedicated test suite + //setProp("scala.home", latestLibFile.parent.parent.path) setProp("scala.home", "") if (isPartestDebug) diff --git a/src/partest/scala/tools/partest/nest/RunnerManager.scala b/src/partest/scala/tools/partest/nest/RunnerManager.scala index f2ce19a950..fbef97dab4 100644 --- a/src/partest/scala/tools/partest/nest/RunnerManager.scala +++ b/src/partest/scala/tools/partest/nest/RunnerManager.scala @@ -344,21 +344,22 @@ class RunnerManager(kind: String, val fileManager: FileManager, params: TestRunP * compiler expects and how to implement them. (see SI-1240 for the full story) * * In practice, this happens in 3 steps: - * STEP1: feed all the files to scalac - * it will parse java files and obtain their expected signatures and generate bytecode for scala files - * STEP2: feed the java files to javac - * it will generate the bytecode for the java files and link to the scalac-generated bytecode for scala - * STEP3: only if there are both scala and java files, recompile the scala sources so they link to the correct + * STEP1: Feed all the files to scalac if there are also non-Scala sources. + * It will parse java files and obtain their expected signatures and generate bytecode for scala files + * STEP2: Feed the java files to javac if there are any. + * It will generate the bytecode for the java files and link to the scalac-generated bytecode for scala + * STEP3: (Re-)compile the scala sources so they link to the correct * java signatures, in case the signatures deduced by scalac from the source files were wrong. Since the * bytecode for java is already in place, we only feed the scala files to scalac so it will take the - * java signatures from the existing javac-generated bytecode + * java signatures from the existing javac-generated bytecode. + * Note that no artifacts are deleted before this step. */ List(1, 2, 3).foldLeft(CompileSuccess: CompilationOutcome) { - case (CompileSuccess, 1) if scalaFiles.nonEmpty => + case (CompileSuccess, 1) if scalaFiles.nonEmpty && javaFiles.nonEmpty => compileMgr.attemptCompile(Some(outDir), allFiles, kind, logFile) case (CompileSuccess, 2) if javaFiles.nonEmpty => javac(outDir, javaFiles, logFile) - case (CompileSuccess, 3) if scalaFiles.nonEmpty && javaFiles.nonEmpty => + case (CompileSuccess, 3) if scalaFiles.nonEmpty => // TODO: Do we actually need this? SI-1240 is known to require this, but we don't know if other tests // require it: https://groups.google.com/forum/?fromgroups#!topic/scala-internals/rFDKAcOKciU compileMgr.attemptCompile(Some(outDir), scalaFiles, kind, logFile) diff --git a/test/files/neg/t6446-additional.check b/test/files/neg/t6446-additional.check new file mode 100755 index 0000000000..53dd383941 --- /dev/null +++ b/test/files/neg/t6446-additional.check @@ -0,0 +1,31 @@ + phase name id description + ---------- -- ----------- + parser 1 parse source into ASTs, perform simple desugaring + namer 2 resolve names, attach symbols to named trees +packageobjects 3 load package objects + typer 4 the meat and potatoes: type the trees + patmat 5 translate match expressions +superaccessors 6 add super accessors in traits and nested classes + extmethods 7 add extension methods for inline classes + pickler 8 serialize symbol tables + refchecks 9 reference/override checking, translate nested objects + uncurry 10 uncurry, translate function values to anonymous classes + tailcalls 11 replace tail calls by jumps + specialize 12 @specialized-driven class and method specialization + explicitouter 13 this refs to outer pointers, translate patterns + erasure 14 erase types, add interfaces for traits + posterasure 15 clean up erased inline classes + lazyvals 16 allocate bitmaps, translate lazy vals into lazified defs + lambdalift 17 move nested functions to top level + constructors 18 move field definitions into constructors + flatten 19 eliminate inner classes + mixin 20 mixin composition + cleanup 21 platform-specific cleanups, generate reflective calls + icode 22 generate portable intermediate code + inliner 23 optimization: do inlining +inlinehandlers 24 optimization: inline exception handlers + closelim 25 optimization: eliminate uncalled closures + dce 26 optimization: eliminate dead code + jvm 27 generate JVM bytecode + ploogin 28 A sample phase that does so many things it's kind of hard... + terminal 29 The last phase in the compiler chain diff --git a/test/files/neg/t6446-additional/ploogin_1.scala b/test/files/neg/t6446-additional/ploogin_1.scala new file mode 100644 index 0000000000..ed6adfc1cf --- /dev/null +++ b/test/files/neg/t6446-additional/ploogin_1.scala @@ -0,0 +1,31 @@ + +package t6446 + +import scala.tools.nsc.{ Global, Phase } +import scala.tools.nsc.plugins.{ Plugin, PluginComponent } +import scala.reflect.io.Path +import scala.reflect.io.File + +/** A test plugin. */ +class Ploogin(val global: Global) extends Plugin { + import global._ + + val name = "ploogin" + val description = "A sample plugin for testing." + val components = List[PluginComponent](TestComponent) + + private object TestComponent extends PluginComponent { + val global: Ploogin.this.global.type = Ploogin.this.global + //override val runsBefore = List("refchecks") + val runsAfter = List("jvm") + val phaseName = Ploogin.this.name + override def description = "A sample phase that does so many things it's kind of hard to describe briefly." + def newPhase(prev: Phase) = new TestPhase(prev) + class TestPhase(prev: Phase) extends StdPhase(prev) { + override def description = TestComponent.this.description + def apply(unit: CompilationUnit) { + // kewl kode + } + } + } +} diff --git a/test/files/neg/t6446-additional/sample_2.flags b/test/files/neg/t6446-additional/sample_2.flags new file mode 100644 index 0000000000..4d518c2286 --- /dev/null +++ b/test/files/neg/t6446-additional/sample_2.flags @@ -0,0 +1 @@ +-Xplugin:. -Xshow-phases diff --git a/test/files/neg/t6446-additional/sample_2.scala b/test/files/neg/t6446-additional/sample_2.scala new file mode 100644 index 0000000000..73cdc64e40 --- /dev/null +++ b/test/files/neg/t6446-additional/sample_2.scala @@ -0,0 +1,6 @@ + +package sample + +// just a sample that is compiled with the sample plugin enabled +object Sample extends App { +} diff --git a/test/files/neg/t6446-additional/scalac-plugin.xml b/test/files/neg/t6446-additional/scalac-plugin.xml new file mode 100644 index 0000000000..e849bb5919 --- /dev/null +++ b/test/files/neg/t6446-additional/scalac-plugin.xml @@ -0,0 +1,4 @@ + +sample-plugin +t6446.Ploogin + diff --git a/test/files/neg/t6446-list.check b/test/files/neg/t6446-list.check new file mode 100755 index 0000000000..fa5c581941 --- /dev/null +++ b/test/files/neg/t6446-list.check @@ -0,0 +1 @@ +ploogin - A sample plugin for testing. diff --git a/test/files/neg/t6446-list/ploogin_1.scala b/test/files/neg/t6446-list/ploogin_1.scala new file mode 100644 index 0000000000..ed6adfc1cf --- /dev/null +++ b/test/files/neg/t6446-list/ploogin_1.scala @@ -0,0 +1,31 @@ + +package t6446 + +import scala.tools.nsc.{ Global, Phase } +import scala.tools.nsc.plugins.{ Plugin, PluginComponent } +import scala.reflect.io.Path +import scala.reflect.io.File + +/** A test plugin. */ +class Ploogin(val global: Global) extends Plugin { + import global._ + + val name = "ploogin" + val description = "A sample plugin for testing." + val components = List[PluginComponent](TestComponent) + + private object TestComponent extends PluginComponent { + val global: Ploogin.this.global.type = Ploogin.this.global + //override val runsBefore = List("refchecks") + val runsAfter = List("jvm") + val phaseName = Ploogin.this.name + override def description = "A sample phase that does so many things it's kind of hard to describe briefly." + def newPhase(prev: Phase) = new TestPhase(prev) + class TestPhase(prev: Phase) extends StdPhase(prev) { + override def description = TestComponent.this.description + def apply(unit: CompilationUnit) { + // kewl kode + } + } + } +} diff --git a/test/files/neg/t6446-list/sample_2.flags b/test/files/neg/t6446-list/sample_2.flags new file mode 100644 index 0000000000..9cb3232964 --- /dev/null +++ b/test/files/neg/t6446-list/sample_2.flags @@ -0,0 +1 @@ +-Xplugin:. -Xplugin-list diff --git a/test/files/neg/t6446-list/sample_2.scala b/test/files/neg/t6446-list/sample_2.scala new file mode 100644 index 0000000000..73cdc64e40 --- /dev/null +++ b/test/files/neg/t6446-list/sample_2.scala @@ -0,0 +1,6 @@ + +package sample + +// just a sample that is compiled with the sample plugin enabled +object Sample extends App { +} diff --git a/test/files/neg/t6446-list/scalac-plugin.xml b/test/files/neg/t6446-list/scalac-plugin.xml new file mode 100644 index 0000000000..e849bb5919 --- /dev/null +++ b/test/files/neg/t6446-list/scalac-plugin.xml @@ -0,0 +1,4 @@ + +sample-plugin +t6446.Ploogin + diff --git a/test/files/neg/t6446-missing.check b/test/files/neg/t6446-missing.check new file mode 100755 index 0000000000..f976bf480e --- /dev/null +++ b/test/files/neg/t6446-missing.check @@ -0,0 +1,31 @@ +Warning: class not found: t6446.Ploogin + phase name id description + ---------- -- ----------- + parser 1 parse source into ASTs, perform simple desugaring + namer 2 resolve names, attach symbols to named trees +packageobjects 3 load package objects + typer 4 the meat and potatoes: type the trees + patmat 5 translate match expressions +superaccessors 6 add super accessors in traits and nested classes + extmethods 7 add extension methods for inline classes + pickler 8 serialize symbol tables + refchecks 9 reference/override checking, translate nested objects + uncurry 10 uncurry, translate function values to anonymous classes + tailcalls 11 replace tail calls by jumps + specialize 12 @specialized-driven class and method specialization + explicitouter 13 this refs to outer pointers, translate patterns + erasure 14 erase types, add interfaces for traits + posterasure 15 clean up erased inline classes + lazyvals 16 allocate bitmaps, translate lazy vals into lazified defs + lambdalift 17 move nested functions to top level + constructors 18 move field definitions into constructors + flatten 19 eliminate inner classes + mixin 20 mixin composition + cleanup 21 platform-specific cleanups, generate reflective calls + icode 22 generate portable intermediate code + inliner 23 optimization: do inlining +inlinehandlers 24 optimization: inline exception handlers + closelim 25 optimization: eliminate uncalled closures + dce 26 optimization: eliminate dead code + jvm 27 generate JVM bytecode + terminal 28 The last phase in the compiler chain diff --git a/test/files/neg/t6446-missing/sample_2.flags b/test/files/neg/t6446-missing/sample_2.flags new file mode 100644 index 0000000000..4d518c2286 --- /dev/null +++ b/test/files/neg/t6446-missing/sample_2.flags @@ -0,0 +1 @@ +-Xplugin:. -Xshow-phases diff --git a/test/files/neg/t6446-missing/sample_2.scala b/test/files/neg/t6446-missing/sample_2.scala new file mode 100644 index 0000000000..73cdc64e40 --- /dev/null +++ b/test/files/neg/t6446-missing/sample_2.scala @@ -0,0 +1,6 @@ + +package sample + +// just a sample that is compiled with the sample plugin enabled +object Sample extends App { +} diff --git a/test/files/neg/t6446-missing/scalac-plugin.xml b/test/files/neg/t6446-missing/scalac-plugin.xml new file mode 100644 index 0000000000..9c34d63f83 --- /dev/null +++ b/test/files/neg/t6446-missing/scalac-plugin.xml @@ -0,0 +1,4 @@ + +missing-plugin +t6446.Ploogin + diff --git a/test/files/neg/t6446-show-phases.check b/test/files/neg/t6446-show-phases.check new file mode 100644 index 0000000000..5bbe43990c --- /dev/null +++ b/test/files/neg/t6446-show-phases.check @@ -0,0 +1,30 @@ + phase name id description + ---------- -- ----------- + parser 1 parse source into ASTs, perform simple desugaring + namer 2 resolve names, attach symbols to named trees +packageobjects 3 load package objects + typer 4 the meat and potatoes: type the trees + patmat 5 translate match expressions +superaccessors 6 add super accessors in traits and nested classes + extmethods 7 add extension methods for inline classes + pickler 8 serialize symbol tables + refchecks 9 reference/override checking, translate nested objects + uncurry 10 uncurry, translate function values to anonymous classes + tailcalls 11 replace tail calls by jumps + specialize 12 @specialized-driven class and method specialization + explicitouter 13 this refs to outer pointers, translate patterns + erasure 14 erase types, add interfaces for traits + posterasure 15 clean up erased inline classes + lazyvals 16 allocate bitmaps, translate lazy vals into lazified defs + lambdalift 17 move nested functions to top level + constructors 18 move field definitions into constructors + flatten 19 eliminate inner classes + mixin 20 mixin composition + cleanup 21 platform-specific cleanups, generate reflective calls + icode 22 generate portable intermediate code + inliner 23 optimization: do inlining +inlinehandlers 24 optimization: inline exception handlers + closelim 25 optimization: eliminate uncalled closures + dce 26 optimization: eliminate dead code + jvm 27 generate JVM bytecode + terminal 28 The last phase in the compiler chain diff --git a/test/files/neg/t6446-show-phases.flags b/test/files/neg/t6446-show-phases.flags new file mode 100644 index 0000000000..845666e100 --- /dev/null +++ b/test/files/neg/t6446-show-phases.flags @@ -0,0 +1 @@ +-Xshow-phases diff --git a/test/files/neg/t6446-show-phases.scala b/test/files/neg/t6446-show-phases.scala new file mode 100644 index 0000000000..a9afb042d2 --- /dev/null +++ b/test/files/neg/t6446-show-phases.scala @@ -0,0 +1,3 @@ + +// testing compiler flag output only +object Test extends App diff --git a/test/files/pos/t4351.check b/test/files/pos/t4351.check deleted file mode 100644 index cb5d407e13..0000000000 --- a/test/files/pos/t4351.check +++ /dev/null @@ -1 +0,0 @@ -runtime exception diff --git a/test/files/pos/t4351.scala b/test/files/pos/t4351.scala deleted file mode 100644 index 2d57588793..0000000000 --- a/test/files/pos/t4351.scala +++ /dev/null @@ -1,20 +0,0 @@ -object Test { - def main(args: Array[String]): Unit = { - try new BooleanPropImpl() value - catch { - case e: RuntimeException => println("runtime exception") - } - } -} - -trait Prop[@specialized(Boolean) +T] { - def value: T -} - -class PropImpl[+T] extends Prop[T] { - def value: T = scala.sys.error("") -} - -trait BooleanProp extends Prop[Boolean] - -class BooleanPropImpl() extends PropImpl[Boolean] with BooleanProp diff --git a/test/files/run/inline-ex-handlers.scala b/test/files/run/inline-ex-handlers.scala index a96b938e13..33e794b940 100644 --- a/test/files/run/inline-ex-handlers.scala +++ b/test/files/run/inline-ex-handlers.scala @@ -1,7 +1,7 @@ import scala.tools.partest.IcodeTest object Test extends IcodeTest { - override def printIcodeAfterPhase = "inlineExceptionHandlers" + override def printIcodeAfterPhase = "inlinehandlers" } import scala.util.Random._ diff --git a/test/files/run/programmatic-main.check b/test/files/run/programmatic-main.check index bdf76ddce1..d472c569d2 100644 --- a/test/files/run/programmatic-main.check +++ b/test/files/run/programmatic-main.check @@ -1,31 +1,31 @@ - phase name id description - ---------- -- ----------- - parser 1 parse source into ASTs, perform simple desugaring - namer 2 resolve names, attach symbols to named trees - packageobjects 3 load package objects - typer 4 the meat and potatoes: type the trees - patmat 5 translate match expressions - superaccessors 6 add super accessors in traits and nested classes - extmethods 7 add extension methods for inline classes - pickler 8 serialize symbol tables - refchecks 9 reference/override checking, translate nested objects - uncurry 10 uncurry, translate function values to anonymous classes - tailcalls 11 replace tail calls by jumps - specialize 12 @specialized-driven class and method specialization - explicitouter 13 this refs to outer pointers, translate patterns - erasure 14 erase types, add interfaces for traits - posterasure 15 clean up erased inline classes - lazyvals 16 allocate bitmaps, translate lazy vals into lazified defs - lambdalift 17 move nested functions to top level - constructors 18 move field definitions into constructors - flatten 19 eliminate inner classes - mixin 20 mixin composition - cleanup 21 platform-specific cleanups, generate reflective calls - icode 22 generate portable intermediate code - inliner 23 optimization: do inlining -inlineExceptionHandlers 24 optimization: inline exception handlers - closelim 25 optimization: eliminate uncalled closures - dce 26 optimization: eliminate dead code - jvm 27 generate JVM bytecode - terminal 28 The last phase in the compiler chain + phase name id description + ---------- -- ----------- + parser 1 parse source into ASTs, perform simple desugaring + namer 2 resolve names, attach symbols to named trees +packageobjects 3 load package objects + typer 4 the meat and potatoes: type the trees + patmat 5 translate match expressions +superaccessors 6 add super accessors in traits and nested classes + extmethods 7 add extension methods for inline classes + pickler 8 serialize symbol tables + refchecks 9 reference/override checking, translate nested objects + uncurry 10 uncurry, translate function values to anonymous classes + tailcalls 11 replace tail calls by jumps + specialize 12 @specialized-driven class and method specialization + explicitouter 13 this refs to outer pointers, translate patterns + erasure 14 erase types, add interfaces for traits + posterasure 15 clean up erased inline classes + lazyvals 16 allocate bitmaps, translate lazy vals into lazified defs + lambdalift 17 move nested functions to top level + constructors 18 move field definitions into constructors + flatten 19 eliminate inner classes + mixin 20 mixin composition + cleanup 21 platform-specific cleanups, generate reflective calls + icode 22 generate portable intermediate code + inliner 23 optimization: do inlining +inlinehandlers 24 optimization: inline exception handlers + closelim 25 optimization: eliminate uncalled closures + dce 26 optimization: eliminate dead code + jvm 27 generate JVM bytecode + terminal 28 The last phase in the compiler chain diff --git a/test/files/run/t4351.check b/test/files/run/t4351.check new file mode 100644 index 0000000000..cb5d407e13 --- /dev/null +++ b/test/files/run/t4351.check @@ -0,0 +1 @@ +runtime exception diff --git a/test/files/run/t4351.scala b/test/files/run/t4351.scala new file mode 100644 index 0000000000..d954d748b7 --- /dev/null +++ b/test/files/run/t4351.scala @@ -0,0 +1,21 @@ +object Test { + def main(args: Array[String]): Unit = { + try new BooleanPropImpl().value + catch { + // was: StackOverflowError + case e: RuntimeException => println("runtime exception") + } + } +} + +trait Prop[@specialized(Boolean) +T] { + def value: T +} + +class PropImpl[+T] extends Prop[T] { + def value: T = scala.sys.error("") +} + +trait BooleanProp extends Prop[Boolean] + +class BooleanPropImpl() extends PropImpl[Boolean] with BooleanProp -- cgit v1.2.3 From 9e88ddf82e2d8aa779aab46aec284d5566deac14 Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Wed, 12 Dec 2012 16:05:24 -0800 Subject: Eliminating var-like setter tpe_= on Tree. Deprecated tpe_= on Tree, which is redundant with and less useful than setType. To provide a small layer of insulation from the direct nulling out of mutable fields used to signal the typer, added def clearType() which is merely tree.tpe = null but is shamefaced about the null and var-settings parts like a respectable method should be. --- src/compiler/scala/tools/nsc/ast/Trees.scala | 7 ++-- .../scala/tools/nsc/backend/icode/GenICode.scala | 4 +-- .../scala/tools/nsc/transform/Erasure.scala | 12 +++---- src/compiler/scala/tools/nsc/transform/Mixin.scala | 17 +++++---- .../tools/nsc/transform/SpecializeTypes.scala | 11 +++--- .../tools/nsc/typechecker/ContextErrors.scala | 4 +-- .../scala/tools/nsc/typechecker/Contexts.scala | 4 +-- .../scala/tools/nsc/typechecker/Duplicators.scala | 33 +++++++---------- .../scala/tools/nsc/typechecker/EtaExpansion.scala | 6 ++-- .../scala/tools/nsc/typechecker/Namers.scala | 3 +- .../scala/tools/nsc/typechecker/TreeCheckers.scala | 9 ++--- .../scala/tools/nsc/typechecker/Typers.scala | 42 +++++++++++----------- .../tools/selectivecps/SelectiveANFTransform.scala | 2 +- .../tools/selectivecps/SelectiveCPSTransform.scala | 6 ++-- src/reflect/scala/reflect/internal/Importers.scala | 2 +- src/reflect/scala/reflect/internal/Symbols.scala | 4 +-- src/reflect/scala/reflect/internal/TreeGen.scala | 6 ++-- src/reflect/scala/reflect/internal/Trees.scala | 11 +++--- src/reflect/scala/reflect/macros/Universe.scala | 4 +-- 19 files changed, 85 insertions(+), 102 deletions(-) (limited to 'src') diff --git a/src/compiler/scala/tools/nsc/ast/Trees.scala b/src/compiler/scala/tools/nsc/ast/Trees.scala index 0e3e2fe644..4b5e23e177 100644 --- a/src/compiler/scala/tools/nsc/ast/Trees.scala +++ b/src/compiler/scala/tools/nsc/ast/Trees.scala @@ -343,9 +343,7 @@ trait Trees extends scala.reflect.internal.Trees { self: Global => if (tpt.original != null) transform(tpt.original) else if (tpt.tpe != null && (tpt.wasEmpty || (tpt.tpe exists (tp => locals contains tp.typeSymbol)))) { - val dupl = tpt.duplicate - dupl.tpe = null - dupl + tpt.duplicate.clearType() } else tree case TypeApply(fn, args) if args map transform exists (_.isEmpty) => @@ -356,8 +354,7 @@ trait Trees extends scala.reflect.internal.Trees { self: Global => val dupl = tree.duplicate if (tree.hasSymbolField && (!localOnly || (locals contains tree.symbol)) && !(keepLabels && tree.symbol.isLabel)) dupl.symbol = NoSymbol - dupl.tpe = null - dupl + dupl.clearType() } } } diff --git a/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala b/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala index cc3562079e..2ea26ddaa9 100644 --- a/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala +++ b/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala @@ -1643,9 +1643,7 @@ abstract class GenICode extends SubComponent { t match { case t @ Apply(_, args) if sym.isLabel && !boundLabels(sym) => val newSym = getLabel(sym.pos, sym.name) - val tree = Apply(global.gen.mkAttributedRef(newSym), transformTrees(args)) setPos t.pos - tree.tpe = t.tpe - tree + Apply(global.gen.mkAttributedRef(newSym), transformTrees(args)) setPos t.pos setType t.tpe case t @ LabelDef(name, params, rhs) => val newSym = getLabel(t.pos, name) diff --git a/src/compiler/scala/tools/nsc/transform/Erasure.scala b/src/compiler/scala/tools/nsc/transform/Erasure.scala index 45bd5cf003..13d3bb23cb 100644 --- a/src/compiler/scala/tools/nsc/transform/Erasure.scala +++ b/src/compiler/scala/tools/nsc/transform/Erasure.scala @@ -857,8 +857,7 @@ abstract class Erasure extends AddInterfaces alt => alt == first || !(first.tpe looselyMatches alt.tpe) } if (tree.symbol ne sym1) { - tree1.symbol = sym1 - tree1.tpe = sym1.tpe + tree1 setSymbol sym1 setType sym1.tpe } } tree1 @@ -1260,13 +1259,12 @@ abstract class Erasure extends AddInterfaces tree1 setType specialScalaErasure(tree1.tpe) case ArrayValue(elemtpt, trees) => treeCopy.ArrayValue( - tree1, elemtpt setType specialScalaErasure.applyInArray(elemtpt.tpe), trees map transform) setType null + tree1, elemtpt setType specialScalaErasure.applyInArray(elemtpt.tpe), trees map transform).clearType() case DefDef(_, _, _, _, tpt, _) => - val result = super.transform(tree1) setType null - tpt.tpe = specialErasure(tree1.symbol)(tree1.symbol.tpe).resultType - result + try super.transform(tree1).clearType() + finally tpt setType specialErasure(tree1.symbol)(tree1.symbol.tpe).resultType case _ => - super.transform(tree1) setType null + super.transform(tree1).clearType() } } } diff --git a/src/compiler/scala/tools/nsc/transform/Mixin.scala b/src/compiler/scala/tools/nsc/transform/Mixin.scala index 45ef083b66..0769b67282 100644 --- a/src/compiler/scala/tools/nsc/transform/Mixin.scala +++ b/src/compiler/scala/tools/nsc/transform/Mixin.scala @@ -396,8 +396,7 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL { } sourceModule setInfo sym.tpe // Companion module isn't visible for anonymous class at this point anyway - assert(clazz.sourceModule != NoSymbol || clazz.isAnonymousClass, - clazz + " has no sourceModule: sym = " + sym + " sym.tpe = " + sym.tpe) + assert(clazz.sourceModule != NoSymbol || clazz.isAnonymousClass, s"$clazz has no sourceModule: $sym ${sym.tpe}") parents1 = List() decls1 = newScopeWith(decls.toList filter isImplementedStatically: _*) } else if (!parents.isEmpty) { @@ -545,12 +544,18 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL { } tree } + // !!! What is this doing, and why is it only looking for exactly + // one type parameter? It would seem to be + // "Map implementation class types in type-apply's to their interfaces" + // from the comment on preTransform, but is there some way we should know + // that impl class types in type applies can only appear in single + // type parameter type constructors? case Apply(tapp @ TypeApply(fn, List(arg)), List()) => if (arg.tpe.typeSymbol.isImplClass) { val ifacetpe = toInterface(arg.tpe) - arg.tpe = ifacetpe - tapp.tpe = MethodType(List(), ifacetpe) - tree.tpe = ifacetpe + arg setType ifacetpe + tapp setType MethodType(Nil, ifacetpe) + tree setType ifacetpe } tree case ValDef(_, _, _, _) if currentOwner.isImplClass => @@ -1129,7 +1134,7 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL { // change every node type that refers to an implementation class to its // corresponding interface, unless the node's symbol is an implementation class. if (tree.tpe.typeSymbol.isImplClass && ((sym eq null) || !sym.isImplClass)) - tree.tpe = toInterface(tree.tpe) + tree modifyType toInterface tree match { case templ @ Template(parents, self, body) => diff --git a/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala b/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala index 4e4c1b98ac..8e274f7647 100644 --- a/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala +++ b/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala @@ -1240,9 +1240,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers { class BodyDuplicator(_context: Context) extends super.BodyDuplicator(_context) { override def castType(tree: Tree, pt: Type): Tree = { - // log(" expected type: " + pt) - // log(" tree type: " + tree.tpe) - tree.tpe = if (tree.tpe != null) fixType(tree.tpe) else null + tree modifyType fixType // log(" tree type: " + tree.tpe) val ntree = if (tree.tpe != null && !(tree.tpe <:< pt)) { val casttpe = CastMap(tree.tpe) @@ -1250,8 +1248,8 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers { else if (casttpe <:< CastMap(pt)) gen.mkCast(tree, pt) else tree } else tree - ntree.tpe = null - ntree + + ntree.clearType() } } @@ -1679,8 +1677,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers { false) // don't make private fields public val newBody = symSubstituter(body(source).duplicate) - tpt.tpe = tpt.tpe.substSym(oldtparams, newtparams) - + tpt modifyType (_.substSym(oldtparams, newtparams)) copyDefDef(tree)(vparamss = List(newSyms map ValDef), rhs = newBody) } diff --git a/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala b/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala index 30c12a4286..fe93a0ea76 100644 --- a/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala +++ b/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala @@ -154,7 +154,7 @@ trait ContextErrors { // the found/req types. val foundType: Type = req.normalize match { case RefinedType(parents, decls) if !decls.isEmpty && found.typeSymbol.isAnonOrRefinementClass => - val retyped = typed (tree.duplicate setType null) + val retyped = typed (tree.duplicate.clearType()) val foundDecls = retyped.tpe.decls filter (sym => !sym.isConstructor && !sym.isSynthetic) if (foundDecls.isEmpty || (found.typeSymbol eq NoSymbol)) found else { @@ -182,7 +182,7 @@ trait ContextErrors { } def ParentTypesError(templ: Template, ex: TypeError) = { - templ.tpe = null + templ.clearType() issueNormalTypeError(templ, ex.getMessage()) setError(templ) } diff --git a/src/compiler/scala/tools/nsc/typechecker/Contexts.scala b/src/compiler/scala/tools/nsc/typechecker/Contexts.scala index c0d2f44c7b..9ee0855bc6 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Contexts.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Contexts.scala @@ -106,8 +106,8 @@ trait Contexts { self: Analyzer => var sc = startContext while (sc != NoContext) { sc.tree match { - case Import(qual, _) => qual.tpe = singleType(qual.symbol.owner.thisType, qual.symbol) - case _ => + case Import(qual, _) => qual setType singleType(qual.symbol.owner.thisType, qual.symbol) + case _ => } sc = sc.outer } diff --git a/src/compiler/scala/tools/nsc/typechecker/Duplicators.scala b/src/compiler/scala/tools/nsc/typechecker/Duplicators.scala index 9c23b8663c..1c48eeed70 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Duplicators.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Duplicators.scala @@ -210,40 +210,35 @@ abstract class Duplicators extends Analyzer { tree match { case ttree @ TypeTree() => // log("fixing tpe: " + tree.tpe + " with sym: " + tree.tpe.typeSymbol) - ttree.tpe = fixType(ttree.tpe) - ttree + ttree modifyType fixType case Block(stats, res) => debuglog("invalidating block") invalidateAll(stats) invalidate(res) - tree.tpe = null - super.typed(tree, mode, pt) + super.typed(tree.clearType(), mode, pt) case ClassDef(_, _, _, tmpl @ Template(parents, _, stats)) => // log("invalidating classdef " + tree) tmpl.symbol = tree.symbol.newLocalDummy(tree.pos) invalidateAll(stats, tree.symbol) - tree.tpe = null - super.typed(tree, mode, pt) + super.typed(tree.clearType(), mode, pt) case ddef @ DefDef(_, _, _, _, tpt, rhs) => - ddef.tpt.tpe = fixType(ddef.tpt.tpe) - ddef.tpe = null - super.typed(ddef, mode, pt) + ddef.tpt modifyType fixType + super.typed(ddef.clearType(), mode, pt) case vdef @ ValDef(mods, name, tpt, rhs) => // log("vdef fixing tpe: " + tree.tpe + " with sym: " + tree.tpe.typeSymbol + " and " + invalidSyms) //if (mods.hasFlag(Flags.LAZY)) vdef.symbol.resetFlag(Flags.MUTABLE) // Martin to Iulian: lazy vars can now appear because they are no longer boxed; Please check that deleting this statement is OK. - vdef.tpt.tpe = fixType(vdef.tpt.tpe) - vdef.tpe = null - super.typed(vdef, mode, pt) + vdef.tpt modifyType fixType + super.typed(vdef.clearType(), mode, pt) case ldef @ LabelDef(name, params, rhs) => // log("label def: " + ldef) // in case the rhs contains any definitions -- TODO: is this necessary? invalidate(rhs) - ldef.tpe = null + ldef.clearType() // is this LabelDef generated by tailcalls? val isTailLabel = (ldef.params.length >= 1) && (ldef.params.head.name == nme.THIS) @@ -261,27 +256,23 @@ abstract class Duplicators extends Analyzer { val params1 = params map newParam val rhs1 = (new TreeSubstituter(params map (_.symbol), params1) transform rhs) // TODO: duplicate? - rhs1.tpe = null - super.typed(treeCopy.LabelDef(tree, name, params1, rhs1), mode, pt) + super.typed(treeCopy.LabelDef(tree, name, params1, rhs1.clearType()), mode, pt) case Bind(name, _) => // log("bind: " + tree) invalidate(tree) - tree.tpe = null - super.typed(tree, mode, pt) + super.typed(tree.clearType(), mode, pt) case Ident(_) if tree.symbol.isLabel => debuglog("Ident to labeldef " + tree + " switched to ") tree.symbol = updateSym(tree.symbol) - tree.tpe = null - super.typed(tree, mode, pt) + super.typed(tree.clearType(), mode, pt) case Ident(_) if (origtreesym ne null) && origtreesym.isLazy => debuglog("Ident to a lazy val " + tree + ", " + tree.symbol + " updated to " + origtreesym) tree.symbol = updateSym(origtreesym) - tree.tpe = null - super.typed(tree, mode, pt) + super.typed(tree.clearType(), mode, pt) case Select(th @ This(_), sel) if (oldClassOwner ne null) && (th.symbol == oldClassOwner) => // We use the symbol name instead of the tree name because the symbol diff --git a/src/compiler/scala/tools/nsc/typechecker/EtaExpansion.scala b/src/compiler/scala/tools/nsc/typechecker/EtaExpansion.scala index 2806d7b2d9..4fbb788c7b 100644 --- a/src/compiler/scala/tools/nsc/typechecker/EtaExpansion.scala +++ b/src/compiler/scala/tools/nsc/typechecker/EtaExpansion.scala @@ -94,11 +94,11 @@ trait EtaExpansion { self: Analyzer => // with repeated params, there might be more or fewer args than params liftout(arg, byName(i).getOrElse(false)) } - treeCopy.Apply(tree, liftoutPrefix(fn), newArgs) setType null + treeCopy.Apply(tree, liftoutPrefix(fn), newArgs).clearType() case TypeApply(fn, args) => - treeCopy.TypeApply(tree, liftoutPrefix(fn), args) setType null + treeCopy.TypeApply(tree, liftoutPrefix(fn), args).clearType() case Select(qual, name) => - treeCopy.Select(tree, liftout(qual, false), name) setSymbol NoSymbol setType null + treeCopy.Select(tree, liftout(qual, false), name).clearType() setSymbol NoSymbol case Ident(name) => tree } diff --git a/src/compiler/scala/tools/nsc/typechecker/Namers.scala b/src/compiler/scala/tools/nsc/typechecker/Namers.scala index 9a32747c3a..d524c88f43 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Namers.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Namers.scala @@ -1361,8 +1361,7 @@ trait Namers extends MethodSynthesis { transformed(tree) = newImport // copy symbol and type attributes back into old expression // so that the structure builder will find it. - expr.symbol = expr1.symbol - expr.tpe = expr1.tpe + expr setSymbol expr1.symbol setType expr1.tpe ImportType(expr1) } } diff --git a/src/compiler/scala/tools/nsc/typechecker/TreeCheckers.scala b/src/compiler/scala/tools/nsc/typechecker/TreeCheckers.scala index fb95c952d2..260bd87fdf 100644 --- a/src/compiler/scala/tools/nsc/typechecker/TreeCheckers.scala +++ b/src/compiler/scala/tools/nsc/typechecker/TreeCheckers.scala @@ -192,11 +192,8 @@ abstract class TreeCheckers extends Analyzer { override def typed(tree: Tree, mode: Int, pt: Type): Tree = returning(tree) { case EmptyTree | TypeTree() => () case _ if tree.tpe != null => - tpeOfTree.getOrElseUpdate(tree, { - val saved = tree.tpe - tree.tpe = null - saved - }) + tpeOfTree.getOrElseUpdate(tree, try tree.tpe finally tree.clearType()) + wrap(tree)(super.typed(tree, mode, pt) match { case _: Literal => () case x if x ne tree => treesDiffer(tree, x) @@ -288,7 +285,7 @@ abstract class TreeCheckers extends Analyzer { if (oldtpe =:= tree.tpe) () else typesDiffer(tree, oldtpe, tree.tpe) - tree.tpe = oldtpe + tree setType oldtpe super.traverse(tree) } } diff --git a/src/compiler/scala/tools/nsc/typechecker/Typers.scala b/src/compiler/scala/tools/nsc/typechecker/Typers.scala index a1c1b53cce..f243569dc8 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Typers.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Typers.scala @@ -53,7 +53,7 @@ trait Typers extends Modes with Adaptations with Tags { object UnTyper extends Traverser { override def traverse(tree: Tree) = { if (tree.canHaveAttrs) { - tree.tpe = null + tree.clearType() if (tree.hasSymbolField) tree.symbol = NoSymbol } super.traverse(tree) @@ -323,7 +323,7 @@ trait Typers extends Modes with Adaptations with Tags { def checkNonCyclic(defn: Tree, tpt: Tree) { if (!checkNonCyclic(defn.pos, tpt.tpe, defn.symbol)) { - tpt.tpe = ErrorType + tpt setType ErrorType defn.symbol.setInfo(ErrorType) } } @@ -813,7 +813,7 @@ trait Typers extends Modes with Adaptations with Tags { val tree1 = typed(resetAllAttrs(original), mode, WildcardType) // Q: `typed` already calls `addAnnotations` and `adapt`. the only difference here is that // we pass `EmptyTree` as the `original`. intended? added in 2009 (53d98e7d42) by martin. - tree1.tpe = addAnnotations(tree1, tree1.tpe) + tree1 setType addAnnotations(tree1, tree1.tpe) if (tree1.isEmpty) tree1 else adapt(tree1, mode, pt, EmptyTree) } ) @@ -931,7 +931,7 @@ trait Typers extends Modes with Adaptations with Tags { tree setSymbol overloadedExtractorOfObject tree.tpe match { - case OverloadedType(pre, alts) => tree.tpe = overloadedType(pre, alts filter (alt => hasUnapplyMember(alt.tpe))) + case OverloadedType(pre, alts) => tree setType overloadedType(pre, alts filter (alt => hasUnapplyMember(alt.tpe))) case _ => } val unapply = unapplyMember(extractor.tpe) @@ -1584,7 +1584,7 @@ trait Typers extends Modes with Adaptations with Tags { if (preSuperVals.isEmpty && preSuperStats.nonEmpty) devWarning("Wanted to zip empty presuper val list with " + preSuperStats) else - map2(preSuperStats, preSuperVals)((ldef, gdef) => gdef.tpt.tpe = ldef.symbol.tpe) + map2(preSuperStats, preSuperVals)((ldef, gdef) => gdef.tpt setType ldef.symbol.tpe) if (superCall1 == cunit) EmptyTree else cbody2 case _ => @@ -1604,7 +1604,7 @@ trait Typers extends Modes with Adaptations with Tags { return explode(supersupertpt, supertpt1 :: acc) } } - if (supertpt.tpe.typeSymbol == AnyClass) supertpt.tpe = AnyRefClass.tpe + if (supertpt.tpe.typeSymbol == AnyClass) supertpt setType AnyRefClass.tpe supertpt :: acc } explode(first, Nil) ++ rest @@ -2278,7 +2278,7 @@ trait Typers extends Modes with Adaptations with Tags { if (!nme.isLoopHeaderLabel(ldef.symbol.name) || isPastTyper) { val restpe = ldef.symbol.tpe.resultType val rhs1 = typed(ldef.rhs, restpe) - ldef.params foreach (param => param.tpe = param.symbol.tpe) + ldef.params foreach (param => param setType param.symbol.tpe) deriveLabelDef(ldef)(_ => rhs1) setType restpe } else { @@ -2286,14 +2286,14 @@ trait Typers extends Modes with Adaptations with Tags { val rhs1 = typed(ldef.rhs) val restpe = rhs1.tpe if (restpe == initpe) { // stable result, no need to check again - ldef.params foreach (param => param.tpe = param.symbol.tpe) + ldef.params foreach (param => param setType param.symbol.tpe) treeCopy.LabelDef(ldef, ldef.name, ldef.params, rhs1) setType restpe } else { context.scope.unlink(ldef.symbol) val sym2 = namer.enterInScope( context.owner.newLabel(ldef.name, ldef.pos) setInfo MethodType(List(), restpe)) val rhs2 = typed(resetAllAttrs(ldef.rhs), restpe) - ldef.params foreach (param => param.tpe = param.symbol.tpe) + ldef.params foreach (param => param setType param.symbol.tpe) deriveLabelDef(ldef)(_ => rhs2) setSymbol sym2 setType restpe } } @@ -2399,7 +2399,7 @@ trait Typers extends Modes with Adaptations with Tags { val contextWithTypeBounds = context.nextEnclosing(_.tree.isInstanceOf[CaseDef]) if (contextWithTypeBounds.savedTypeBounds.nonEmpty) { - body1.tpe = contextWithTypeBounds restoreTypeBounds body1.tpe + body1 modifyType (contextWithTypeBounds restoreTypeBounds _) // insert a cast if something typechecked under the GADT constraints, // but not in real life (i.e., now that's we've reset the method's type skolems' @@ -3276,7 +3276,7 @@ trait Typers extends Modes with Adaptations with Tags { else None if (!isApplicableSafe(Nil, unappType, List(pt), WildcardType)) { - //Console.println("UNAPP: need to typetest, arg.tpe = "+arg.tpe+", unappType = "+unappType) + //Console.println(s"UNAPP: need to typetest, arg: ${arg.tpe} unappType: $unappType") val (freeVars, unappFormal) = freshArgType(unappType.skolemizeExistential(context.owner, tree)) val unapplyContext = context.makeNewScope(context.tree, context.owner) freeVars foreach unapplyContext.scope.enter @@ -3286,12 +3286,12 @@ trait Typers extends Modes with Adaptations with Tags { // turn any unresolved type variables in freevars into existential skolems val skolems = freeVars map (fv => unapplyContext.owner.newExistentialSkolem(fv, fv)) - arg.tpe = pattp.substSym(freeVars, skolems) + arg setType pattp.substSym(freeVars, skolems) argDummy setInfo arg.tpe } - // setType null is necessary so that ref will be stabilized; see bug 881 - val fun1 = typedPos(fun.pos)(Apply(Select(fun setType null, unapp), List(arg))) + // clearing the type is necessary so that ref will be stabilized; see bug 881 + val fun1 = typedPos(fun.pos)(Apply(Select(fun.clearType(), unapp), List(arg))) if (fun1.tpe.isErroneous) duplErrTree else { @@ -3305,7 +3305,7 @@ trait Typers extends Modes with Adaptations with Tags { val pt1 = if (isFullyDefined(pt)) pt else makeFullyDefined(pt) // SI-1048 val itype = glb(List(pt1, arg.tpe)) - arg.tpe = pt1 // restore type (arg is a dummy tree, just needs to pass typechecking) + arg setType pt1 // restore type (arg is a dummy tree, just needs to pass typechecking) val unapply = UnApply(fun1, args1) setPos tree.pos setType itype // if the type that the unapply method expects for its argument is uncheckable, wrap in classtag extractor @@ -4018,7 +4018,7 @@ trait Typers extends Modes with Adaptations with Tags { // Erroneous annotations were already reported in typedAnnotation arg1 // simply drop erroneous annotations else { - ann.tpe = atype + ann setType atype resultingTypeTree(atype) } } else { @@ -4029,7 +4029,7 @@ trait Typers extends Modes with Adaptations with Tags { else { if (ann.tpe == null) { val annotInfo = typedAnnotation(ann, annotMode) - ann.tpe = arg1.tpe.withAnnotation(annotInfo) + ann setType arg1.tpe.withAnnotation(annotInfo) } val atype = ann.tpe Typed(arg1, resultingTypeTree(atype)) setPos tree.pos setType atype @@ -4580,7 +4580,7 @@ trait Typers extends Modes with Adaptations with Tags { NoSymbol } if (phase.erasedTypes && qual.isInstanceOf[Super] && tree.symbol != NoSymbol) - qual.tpe = tree.symbol.owner.tpe + qual setType tree.symbol.owner.tpe if (!reallyExists(sym)) { def handleMissing: Tree = { @@ -5176,7 +5176,7 @@ trait Typers extends Modes with Adaptations with Tags { try { if (context.retyping && (tree.tpe ne null) && (tree.tpe.isErroneous || !(tree.tpe <:< pt))) { - tree.tpe = null + tree.clearType() if (tree.hasSymbolField) tree.symbol = NoSymbol } @@ -5200,7 +5200,7 @@ trait Typers extends Modes with Adaptations with Tags { tree1 } - tree1.tpe = addAnnotations(tree1, tree1.tpe) + tree1 modifyType (addAnnotations(tree1, _)) val result = if (tree1.isEmpty) tree1 else adapt(tree1, mode, pt, tree) if (!alreadyTyped) { @@ -5212,7 +5212,7 @@ trait Typers extends Modes with Adaptations with Tags { result } catch { case ex: TypeError => - tree.tpe = null + tree.clearType() // The only problematic case are (recoverable) cyclic reference errors which can pop up almost anywhere. printTyping("caught %s: while typing %s".format(ex, tree)) //DEBUG diff --git a/src/continuations/plugin/scala/tools/selectivecps/SelectiveANFTransform.scala b/src/continuations/plugin/scala/tools/selectivecps/SelectiveANFTransform.scala index f62eebaaa0..5775c662da 100644 --- a/src/continuations/plugin/scala/tools/selectivecps/SelectiveANFTransform.scala +++ b/src/continuations/plugin/scala/tools/selectivecps/SelectiveANFTransform.scala @@ -128,7 +128,7 @@ abstract class SelectiveANFTransform extends PluginComponent with Transform with def transformPureMatch(tree: Tree, selector: Tree, cases: List[CaseDef]) = { val caseVals = cases map { case cd @ CaseDef(pat, guard, body) => - // if (!hasPlusMarker(body.tpe)) body.tpe = body.tpe withAnnotation newPlusMarker() // TODO: to avoid warning + // if (!hasPlusMarker(body.tpe)) body modifyType (_ withAnnotation newPlusMarker()) // TODO: to avoid warning val bodyVal = transExpr(body, None, ext) // ??? triggers "cps-transformed unexpectedly" warning in transTailValue treeCopy.CaseDef(cd, transform(pat), transform(guard), bodyVal) } diff --git a/src/continuations/plugin/scala/tools/selectivecps/SelectiveCPSTransform.scala b/src/continuations/plugin/scala/tools/selectivecps/SelectiveCPSTransform.scala index 801c328177..2a6c1e1967 100644 --- a/src/continuations/plugin/scala/tools/selectivecps/SelectiveCPSTransform.scala +++ b/src/continuations/plugin/scala/tools/selectivecps/SelectiveCPSTransform.scala @@ -85,7 +85,7 @@ abstract class SelectiveCPSTransform extends PluginComponent with //gen.mkAttributedSelect(gen.mkAttributedSelect(gen.mkAttributedSelect(gen.mkAttributedIdent(ScalaPackage), //ScalaPackage.tpe.member("util")), ScalaPackage.tpe.member("util").tpe.member("continuations")), MethShiftR) //gen.mkAttributedRef(ModCPS.tpe, MethShiftR) // TODO: correct? - debuglog("funR.tpe = " + funR.tpe) + debuglog("funR.tpe: " + funR.tpe) Apply( TypeApply(funR, targs).setType(appliedType(funR.tpe, targs.map((t:Tree) => t.tpe))), args.map(transform(_)) @@ -97,7 +97,7 @@ abstract class SelectiveCPSTransform extends PluginComponent with debuglog("found shiftUnit: " + tree) atPos(tree.pos) { val funR = gen.mkAttributedRef(MethShiftUnitR) // TODO: correct? - debuglog("funR.tpe = " + funR.tpe) + debuglog("funR.tpe: " + funR.tpe) Apply( TypeApply(funR, List(targs(0), targs(1))).setType(appliedType(funR.tpe, List(targs(0).tpe, targs(1).tpe))), @@ -110,7 +110,7 @@ abstract class SelectiveCPSTransform extends PluginComponent with log("found reify: " + tree) atPos(tree.pos) { val funR = gen.mkAttributedRef(MethReifyR) // TODO: correct? - debuglog("funR.tpe = " + funR.tpe) + debuglog("funR.tpe: " + funR.tpe) Apply( TypeApply(funR, targs).setType(appliedType(funR.tpe, targs.map((t:Tree) => t.tpe))), args.map(transform(_)) diff --git a/src/reflect/scala/reflect/internal/Importers.scala b/src/reflect/scala/reflect/internal/Importers.scala index 29f1c7e1ca..53410b29c5 100644 --- a/src/reflect/scala/reflect/internal/Importers.scala +++ b/src/reflect/scala/reflect/internal/Importers.scala @@ -439,7 +439,7 @@ trait Importers extends api.Importers { self: SymbolTable => if (tt.original != null) mytt.setOriginal(importTree(tt.original)) case _ => if (mytree.hasSymbolField) mytree.symbol = importSymbol(tree.symbol) - mytree.tpe = importType(tree.tpe) + mytree setType importType(tree.tpe) } } }) diff --git a/src/reflect/scala/reflect/internal/Symbols.scala b/src/reflect/scala/reflect/internal/Symbols.scala index 8e776b8590..fd5c3909b8 100644 --- a/src/reflect/scala/reflect/internal/Symbols.scala +++ b/src/reflect/scala/reflect/internal/Symbols.scala @@ -2632,8 +2632,8 @@ trait Symbols extends api.Symbols { self: SymbolTable => * * {{{ * tsym is an instance of AbstractTypeSymbol - * tsym.info = TypeBounds(Nothing, Number) - * tsym.tpe = TypeRef(NoPrefix, T, List()) + * tsym.info == TypeBounds(Nothing, Number) + * tsym.tpe == TypeRef(NoPrefix, T, List()) * }}} */ class AbstractTypeSymbol protected[Symbols] (initOwner: Symbol, initPos: Position, initName: TypeName) diff --git a/src/reflect/scala/reflect/internal/TreeGen.scala b/src/reflect/scala/reflect/internal/TreeGen.scala index f3aa37bd15..0954432c77 100644 --- a/src/reflect/scala/reflect/internal/TreeGen.scala +++ b/src/reflect/scala/reflect/internal/TreeGen.scala @@ -128,9 +128,9 @@ abstract class TreeGen extends macros.TreeBuilder { else mkAttributedIdent(sym) /** Replaces tree type with a stable type if possible */ - def stabilize(tree: Tree): Tree = { - for(tp <- stableTypeFor(tree)) tree.tpe = tp - tree + def stabilize(tree: Tree): Tree = stableTypeFor(tree) match { + case Some(tp) => tree setType tp + case _ => tree } /** Computes stable type for a tree if possible */ diff --git a/src/reflect/scala/reflect/internal/Trees.scala b/src/reflect/scala/reflect/internal/Trees.scala index 6dc1ff157c..024e1b30bf 100644 --- a/src/reflect/scala/reflect/internal/Trees.scala +++ b/src/reflect/scala/reflect/internal/Trees.scala @@ -24,7 +24,9 @@ trait Trees extends api.Trees { self: SymbolTable => private[this] var rawtpe: Type = _ final def tpe = rawtpe - def tpe_=(t: Type) = rawtpe = t + @deprecated("Use setType", "2.11.0") def tpe_=(t: Type): Unit = setType(t) + + def clearType(): this.type = this setType null def setType(tp: Type): this.type = { rawtpe = tp; this } def defineType(tp: Type): this.type = setType(tp) @@ -1402,7 +1404,7 @@ trait Trees extends api.Trees { self: SymbolTable => class ThisSubstituter(clazz: Symbol, to: => Tree) extends Transformer { val newtpe = to.tpe override def transform(tree: Tree) = { - if (tree.tpe ne null) tree.tpe = tree.tpe.substThis(clazz, newtpe) + tree modifyType (_.substThis(clazz, newtpe)) tree match { case This(_) if tree.symbol == clazz => to case _ => super.transform(tree) @@ -1412,8 +1414,7 @@ trait Trees extends api.Trees { self: SymbolTable => class TypeMapTreeSubstituter(val typeMap: TypeMap) extends Traverser { override def traverse(tree: Tree) { - if (tree.tpe ne null) - tree.tpe = typeMap(tree.tpe) + tree modifyType typeMap if (tree.isDef) tree.symbol modifyInfo typeMap @@ -1445,8 +1446,8 @@ trait Trees extends api.Trees { self: SymbolTable => if (tree.symbol == from.head) tree setSymbol to.head else subst(from.tail, to.tail) } + tree modifyType symSubst - if (tree.tpe ne null) tree.tpe = symSubst(tree.tpe) if (tree.hasSymbolField) { subst(from, to) tree match { diff --git a/src/reflect/scala/reflect/macros/Universe.scala b/src/reflect/scala/reflect/macros/Universe.scala index 4e76f7c408..31f3192a85 100644 --- a/src/reflect/scala/reflect/macros/Universe.scala +++ b/src/reflect/scala/reflect/macros/Universe.scala @@ -114,7 +114,7 @@ abstract class Universe extends scala.reflect.api.Universe { def setPos(newpos: Position): Tree /** Sets the `tpe` of the tree. Returns `Unit`. */ - def tpe_=(t: Type): Unit + @deprecated("Use setType", "2.11.0") def tpe_=(t: Type): Unit /** Sets the `tpe` of the tree. Returns the tree itself. */ def setType(tp: Type): Tree @@ -238,4 +238,4 @@ abstract class Universe extends scala.reflect.api.Universe { /** The AST that corresponds to this compilation unit. */ def body: Tree } -} \ No newline at end of file +} -- cgit v1.2.3 From 3781cbee8165ff72b5f597e266e8b2f8c95f6cb4 Mon Sep 17 00:00:00 2001 From: Grzegorz Kossakowski Date: Fri, 9 Nov 2012 22:22:42 -0800 Subject: Correct whitespace in `ASMTransformer.java`. Let's stick to 2 spaces for indentation (and no tabs). --- .../tools/partest/javaagent/ASMTransformer.java | 54 +++++++++++----------- 1 file changed, 27 insertions(+), 27 deletions(-) (limited to 'src') diff --git a/src/partest/scala/tools/partest/javaagent/ASMTransformer.java b/src/partest/scala/tools/partest/javaagent/ASMTransformer.java index b6bec2f598..7338e2b01b 100644 --- a/src/partest/scala/tools/partest/javaagent/ASMTransformer.java +++ b/src/partest/scala/tools/partest/javaagent/ASMTransformer.java @@ -28,33 +28,33 @@ public class ASMTransformer implements ClassFileTransformer { public byte[] transform(final ClassLoader classLoader, String className, Class classBeingRedefined, ProtectionDomain protectionDomain, byte[] classfileBuffer) { if (shouldTransform(className)) { - ClassWriter writer = new ClassWriter(ClassWriter.COMPUTE_FRAMES | ClassWriter.COMPUTE_MAXS) { - // this is copied verbatim from the superclass, - // except that we use the outer class loader - @Override protected String getCommonSuperClass(final String type1, final String type2) { - Class c, d; - try { - c = Class.forName(type1.replace('/', '.'), false, classLoader); - d = Class.forName(type2.replace('/', '.'), false, classLoader); - } catch (Exception e) { - throw new RuntimeException(e.toString()); - } - if (c.isAssignableFrom(d)) { - return type1; - } - if (d.isAssignableFrom(c)) { - return type2; - } - if (c.isInterface() || d.isInterface()) { - return "java/lang/Object"; - } else { - do { - c = c.getSuperclass(); - } while (!c.isAssignableFrom(d)); - return c.getName().replace('.', '/'); - } - } - }; + ClassWriter writer = new ClassWriter(ClassWriter.COMPUTE_FRAMES | ClassWriter.COMPUTE_MAXS) { + // this is copied verbatim from the superclass, + // except that we use the outer class loader + @Override protected String getCommonSuperClass(final String type1, final String type2) { + Class c, d; + try { + c = Class.forName(type1.replace('/', '.'), false, classLoader); + d = Class.forName(type2.replace('/', '.'), false, classLoader); + } catch (Exception e) { + throw new RuntimeException(e.toString()); + } + if (c.isAssignableFrom(d)) { + return type1; + } + if (d.isAssignableFrom(c)) { + return type2; + } + if (c.isInterface() || d.isInterface()) { + return "java/lang/Object"; + } else { + do { + c = c.getSuperclass(); + } while (!c.isAssignableFrom(d)); + return c.getName().replace('.', '/'); + } + } + }; ProfilerVisitor visitor = new ProfilerVisitor(writer); ClassReader reader = new ClassReader(classfileBuffer); reader.accept(visitor, 0); -- cgit v1.2.3 From ba6a3d6f87c0007196a4a82aaef58a290f87b864 Mon Sep 17 00:00:00 2001 From: Grzegorz Kossakowski Date: Fri, 9 Nov 2012 22:29:09 -0800 Subject: Set `canRetransform` flag to `false` in instrumentation. We do not need to retransform classes once they are loaded. All instrumentation byte-code is pushed at loading time. This fixes a problem with Java 7 that was failing to add a transformer because we did not declare retransformation capability in `MANIFEST.MF` file in Java agent jar. Java 6 allowed to add transformer due to a bug. --- src/partest/scala/tools/partest/javaagent/ProfilingAgent.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) (limited to 'src') diff --git a/src/partest/scala/tools/partest/javaagent/ProfilingAgent.java b/src/partest/scala/tools/partest/javaagent/ProfilingAgent.java index c2e4dc69f4..3b18987040 100644 --- a/src/partest/scala/tools/partest/javaagent/ProfilingAgent.java +++ b/src/partest/scala/tools/partest/javaagent/ProfilingAgent.java @@ -20,6 +20,6 @@ public class ProfilingAgent { // and the test-case itself won't be loaded yet. We rely here on the fact that ASMTransformer does // not depend on Scala library. In case our assumptions are wrong we can always insert call to // inst.retransformClasses. - inst.addTransformer(new ASMTransformer(), true); + inst.addTransformer(new ASMTransformer(), false); } } -- cgit v1.2.3 From b6dd2d2e14aa88ea42622ac6a6092bba8ddb300a Mon Sep 17 00:00:00 2001 From: Grzegorz Kossakowski Date: Fri, 9 Nov 2012 22:58:47 -0800 Subject: Do not recompute stack frames when instrumenting bytecode. It turns out that we do not need to do that. See comment in `ProfilerVisitor.java`. Also, since recomputing stack frame map was the only reason we needed to implement `getCommonSuperClass` we can now remove its implementation that was causing problems on Java 7 due to a cyclic dependency involving class loader because we would try to load a class we are currently transforming and transformer is triggered just before classloading. //cc @namin who worked on this code with me. --- .../tools/partest/javaagent/ASMTransformer.java | 33 ++++++---------------- .../tools/partest/javaagent/ProfilerVisitor.java | 13 +++++++++ 2 files changed, 21 insertions(+), 25 deletions(-) (limited to 'src') diff --git a/src/partest/scala/tools/partest/javaagent/ASMTransformer.java b/src/partest/scala/tools/partest/javaagent/ASMTransformer.java index 7338e2b01b..878c8613d5 100644 --- a/src/partest/scala/tools/partest/javaagent/ASMTransformer.java +++ b/src/partest/scala/tools/partest/javaagent/ASMTransformer.java @@ -26,33 +26,16 @@ public class ASMTransformer implements ClassFileTransformer { className.startsWith("instrumented/")); } - public byte[] transform(final ClassLoader classLoader, String className, Class classBeingRedefined, ProtectionDomain protectionDomain, byte[] classfileBuffer) { + public byte[] transform(final ClassLoader classLoader, final String className, Class classBeingRedefined, ProtectionDomain protectionDomain, byte[] classfileBuffer) { if (shouldTransform(className)) { - ClassWriter writer = new ClassWriter(ClassWriter.COMPUTE_FRAMES | ClassWriter.COMPUTE_MAXS) { - // this is copied verbatim from the superclass, - // except that we use the outer class loader + ClassWriter writer = new ClassWriter(ClassWriter.COMPUTE_MAXS) { @Override protected String getCommonSuperClass(final String type1, final String type2) { - Class c, d; - try { - c = Class.forName(type1.replace('/', '.'), false, classLoader); - d = Class.forName(type2.replace('/', '.'), false, classLoader); - } catch (Exception e) { - throw new RuntimeException(e.toString()); - } - if (c.isAssignableFrom(d)) { - return type1; - } - if (d.isAssignableFrom(c)) { - return type2; - } - if (c.isInterface() || d.isInterface()) { - return "java/lang/Object"; - } else { - do { - c = c.getSuperclass(); - } while (!c.isAssignableFrom(d)); - return c.getName().replace('.', '/'); - } + // Since we are not recomputing stack frame map, this should never be called we override this method because + // default implementation uses reflection for implementation and might try to load the class that we are + // currently processing. That leads to weird results like swallowed exceptions and classes being not + // transformed. + throw new RuntimeException("Unexpected call to getCommonSuperClass(" + type1 + ", " + type2 + + ") while transforming " + className); } }; ProfilerVisitor visitor = new ProfilerVisitor(writer); diff --git a/src/partest/scala/tools/partest/javaagent/ProfilerVisitor.java b/src/partest/scala/tools/partest/javaagent/ProfilerVisitor.java index ac83f66506..8306327b14 100644 --- a/src/partest/scala/tools/partest/javaagent/ProfilerVisitor.java +++ b/src/partest/scala/tools/partest/javaagent/ProfilerVisitor.java @@ -33,6 +33,19 @@ public class ProfilerVisitor extends ClassVisitor implements Opcodes { // only instrument non-abstract methods if((access & ACC_ABSTRACT) == 0) { assert(className != null); + /* The following instructions do not modify compressed stack frame map so + * we don't need to worry about recalculating stack frame map. Specifically, + * let's quote "ASM 4.0, A Java bytecode engineering library" guide (p. 40): + * + * In order to save space, a compiled method does not contain one frame per + * instruction: in fact it contains only the frames for the instructions + * that correspond to jump targets or exception handlers, or that follow + * unconditional jump instructions. Indeed the other frames can be easily + * and quickly inferred from these ones. + * + * Instructions below are just loading constants and calling a method so according + * to definition above they do not contribute to compressed stack frame map. + */ mv.visitLdcInsn(className); mv.visitLdcInsn(name); mv.visitLdcInsn(desc); -- cgit v1.2.3 From 44f650409a42b5b9292c8851aad39792939b9282 Mon Sep 17 00:00:00 2001 From: Dmitry Bushev Date: Mon, 3 Dec 2012 19:13:08 +0400 Subject: Fix scaladoc typo for isTerm method --- src/reflect/scala/reflect/api/Symbols.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) (limited to 'src') diff --git a/src/reflect/scala/reflect/api/Symbols.scala b/src/reflect/scala/reflect/api/Symbols.scala index b53c700701..a4a4277239 100644 --- a/src/reflect/scala/reflect/api/Symbols.scala +++ b/src/reflect/scala/reflect/api/Symbols.scala @@ -245,7 +245,7 @@ trait Symbols { self: Universe => /** Does this symbol represent the definition of a term? * Note that every symbol is either a term or a type. * So for every symbol `sym` (except for `NoSymbol`), - * either `sym.isTerm` is true or `sym.isTerm` is true. + * either `sym.isTerm` is true or `sym.isType` is true. * * @group Tests */ -- cgit v1.2.3 From cf7b51db3b289d2b1782ffb863912217936dcccb Mon Sep 17 00:00:00 2001 From: Erik Osheim Date: Mon, 17 Dec 2012 23:28:08 -0500 Subject: Fix Iterator#copyToArray (fixes SI-6827). As pointed out in #scala, when using a non-zero start it's possible to get an ArrayIndexOutOfBoundsException due to an incorrect bounds check. This patch fixes this, as well as another potential bounds error, and adds test cases. Incorporates some other suggestions by Som-Snytt to ensure that callers will get useful error messages in cases where the start parameter is wrong (negative or out-of-array-bounds). Review by @som-snytt. --- src/library/scala/collection/Iterator.scala | 6 ++++-- test/files/run/t6827.check | 15 ++++++++++++++ test/files/run/t6827.scala | 31 +++++++++++++++++++++++++++++ 3 files changed, 50 insertions(+), 2 deletions(-) create mode 100644 test/files/run/t6827.check create mode 100644 test/files/run/t6827.scala (limited to 'src') diff --git a/src/library/scala/collection/Iterator.scala b/src/library/scala/collection/Iterator.scala index d7dc202fad..cb7d2095bc 100644 --- a/src/library/scala/collection/Iterator.scala +++ b/src/library/scala/collection/Iterator.scala @@ -1109,12 +1109,14 @@ trait Iterator[+A] extends TraversableOnce[A] { * $willNotTerminateInf */ def copyToArray[B >: A](xs: Array[B], start: Int, len: Int): Unit = { + require(start >= 0 && start < xs.length, s"start $start out of range ${xs.length}") var i = start - val end = start + math.min(len, xs.length) - while (hasNext && i < end) { + val end = start + math.min(len, xs.length - start) + while (i < end && hasNext) { xs(i) = next() i += 1 } + // TODO: return i - start so the caller knows how many values read? } /** Tests if another iterator produces the same values as this one. diff --git a/test/files/run/t6827.check b/test/files/run/t6827.check new file mode 100644 index 0000000000..3a3a71c67d --- /dev/null +++ b/test/files/run/t6827.check @@ -0,0 +1,15 @@ +start at -5: java.lang.IllegalArgumentException: requirement failed: start -5 out of range 10 +start at -1: java.lang.IllegalArgumentException: requirement failed: start -1 out of range 10 +start at limit: java.lang.IllegalArgumentException: requirement failed: start 10 out of range 10 +start at limit-1: ok +first 10: ok +read all: ok +test huge len: ok +5 from 5: ok +20 from 5: ok +test len overflow: ok +start beyond limit: java.lang.IllegalArgumentException: requirement failed: start 30 out of range 10 +read 0: ok +read -1: ok +invalid read 0: java.lang.IllegalArgumentException: requirement failed: start 30 out of range 10 +invalid read -1: java.lang.IllegalArgumentException: requirement failed: start 30 out of range 10 diff --git a/test/files/run/t6827.scala b/test/files/run/t6827.scala new file mode 100644 index 0000000000..7e8918e3dc --- /dev/null +++ b/test/files/run/t6827.scala @@ -0,0 +1,31 @@ +object Test extends App { + val ns = (0 until 20) + val arr = new Array[Int](10) + + def tryit(label: String, start: Int, len: Int): Unit = { + val status = try { + val it = ns.toIterator + it.copyToArray(arr, start, len) + "ok" + } catch { + case e: Exception => e.toString + } + println("%s: %s" format (label, status)) + } + + tryit("start at -5", -5, 10) + tryit("start at -1", -1, 10) + tryit("start at limit", 10, 10) + tryit("start at limit-1", 9, 10) + tryit("first 10", 0, 10) + tryit("read all", 0, 20) + tryit("test huge len", 0, Int.MaxValue) + tryit("5 from 5", 5, 10) + tryit("20 from 5", 5, 20) + tryit("test len overflow", 5, Int.MaxValue) + tryit("start beyond limit", 30, 10) + tryit("read 0", 0, 0) + tryit("read -1", 0, -1) + tryit("invalid read 0", 30, 0) + tryit("invalid read -1", 30, -1) +} -- cgit v1.2.3 From 667e0a265fe6e45b0968973de78a7948118dc5ad Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Sat, 22 Dec 2012 09:03:07 -0800 Subject: Remove stray debugging output line. I finally reached my "CHECK THREAD ACCESS" limit. --- src/compiler/scala/tools/nsc/interactive/Global.scala | 1 - 1 file changed, 1 deletion(-) (limited to 'src') diff --git a/src/compiler/scala/tools/nsc/interactive/Global.scala b/src/compiler/scala/tools/nsc/interactive/Global.scala index a34ebb2b8c..07ffe9e437 100644 --- a/src/compiler/scala/tools/nsc/interactive/Global.scala +++ b/src/compiler/scala/tools/nsc/interactive/Global.scala @@ -52,7 +52,6 @@ class Global(settings: Settings, _reporter: Reporter, projectName: String = "") import log.logreplay debugLog("logger: " + log.getClass + " writing to " + (new java.io.File(logName)).getAbsolutePath) debugLog("classpath: "+classPath) - Console.err.println("\n ======= CHECK THREAD ACCESS compiler build ========\n") private var curTime = System.nanoTime private def timeStep = { -- cgit v1.2.3 From 9c5b207f59e7b16a8a5246082505624c4df214c5 Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Sat, 17 Nov 2012 14:28:09 -0800 Subject: Rewrote FastTrack for clarity. We can say what we wish to say with more directness and with fewer vars, levels of indirection, public members, and implicit conversions. --- .../scala/tools/nsc/typechecker/Macros.scala | 7 +++- src/compiler/scala/tools/nsc/util/package.scala | 2 +- src/compiler/scala/tools/reflect/FastTrack.scala | 48 ++++++++++++---------- src/reflect/scala/reflect/internal/TreeInfo.scala | 2 + 4 files changed, 34 insertions(+), 25 deletions(-) (limited to 'src') diff --git a/src/compiler/scala/tools/nsc/typechecker/Macros.scala b/src/compiler/scala/tools/nsc/typechecker/Macros.scala index 4d1ab98fa0..6ed879af14 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Macros.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Macros.scala @@ -546,6 +546,7 @@ trait Macros extends scala.tools.reflect.FastTrack with Traces { /** Calculate the arguments to pass to a macro implementation when expanding the provided tree. */ case class MacroArgs(c: MacroContext, others: List[Any]) + private def macroArgs(typer: Typer, expandee: Tree): MacroArgs = { val macroDef = expandee.symbol val prefixTree = expandee.collect{ case Select(qual, name) => qual }.headOption.getOrElse(EmptyTree) @@ -574,9 +575,11 @@ trait Macros extends scala.tools.reflect.FastTrack with Traces { val preparedArgss: List[List[Any]] = if (fastTrack contains macroDef) { - if (fastTrack(macroDef) validate context) argss + // Take a dry run of the fast track implementation + if (fastTrack(macroDef) validate expandee) argss else typer.TyperErrorGen.MacroPartialApplicationError(expandee) - } else { + } + else { // if paramss have typetag context bounds, add an arglist to argss if necessary and instantiate the corresponding evidences // consider the following example: // diff --git a/src/compiler/scala/tools/nsc/util/package.scala b/src/compiler/scala/tools/nsc/util/package.scala index 792a659ad6..039fec8605 100644 --- a/src/compiler/scala/tools/nsc/util/package.scala +++ b/src/compiler/scala/tools/nsc/util/package.scala @@ -69,7 +69,7 @@ package object util { * (to exclude assert, require, etc.) */ def stackTraceHeadString(ex: Throwable): String = { - val frame = ex.getStackTrace.dropWhile(_.getClassName contains "Predef").head + val frame = ex.getStackTrace.dropWhile(_.getClassName contains "Predef") take 1 mkString "" val msg = ex.getMessage match { case null | "" => "" ; case s => s"""("$s")""" } val clazz = ex.getClass.getName.split('.').last diff --git a/src/compiler/scala/tools/reflect/FastTrack.scala b/src/compiler/scala/tools/reflect/FastTrack.scala index d35ac43424..ac50324fa9 100644 --- a/src/compiler/scala/tools/reflect/FastTrack.scala +++ b/src/compiler/scala/tools/reflect/FastTrack.scala @@ -2,7 +2,9 @@ package scala.tools package reflect import scala.reflect.reify.Taggers -import scala.tools.nsc.typechecker.{Analyzer, Macros} +import scala.tools.nsc.typechecker.{ Analyzer, Macros } +import scala.reflect.runtime.Macros.currentMirror +import scala.reflect.api.Universe /** Optimizes system macro expansions by hardwiring them directly to their implementations * bypassing standard reflective load and invoke to avoid the overhead of Java/Scala reflection. @@ -12,30 +14,32 @@ trait FastTrack { import global._ import definitions._ - import scala.language.implicitConversions - private implicit def context2taggers(c0: MacroContext): Taggers { val c: c0.type } = new { val c: c0.type = c0 } with Taggers - private implicit def context2macroimplementations(c0: MacroContext): MacroImplementations { val c: c0.type } = new { val c: c0.type = c0 } with MacroImplementations + import treeInfo.Applied + + private implicit def context2taggers(c0: MacroContext): Taggers { val c: c0.type } = + new { val c: c0.type = c0 } with Taggers + private implicit def context2macroimplementations(c0: MacroContext): MacroImplementations { val c: c0.type } = + new { val c: c0.type = c0 } with MacroImplementations + private def make(sym: Symbol)(pf: PartialFunction[Applied, MacroContext => Tree]) = + sym -> new FastTrackEntry(pf) - implicit def fastTrackEntry2MacroRuntime(entry: FastTrackEntry): MacroRuntime = args => entry.run(args.c) - type FastTrackExpander = PartialFunction[(MacroContext, Tree), Tree] - case class FastTrackEntry(sym: Symbol, expander: FastTrackExpander) { - def validate(c: MacroContext): Boolean = expander.isDefinedAt((c, c.expandee)) - def run(c: MacroContext): Any = { - val result = expander((c, c.expandee)) - c.Expr[Nothing](result)(c.WeakTypeTag.Nothing) + final class FastTrackEntry(pf: PartialFunction[Applied, MacroContext => Tree]) extends (MacroArgs => Any) { + def validate(tree: Tree) = pf isDefinedAt Applied(tree) + def apply(margs: MacroArgs) = { + val MacroArgs(c, args) = margs + // Macros validated that the pf is defined here - and there's not much we could do if it weren't. + c.Expr[Nothing](pf(Applied(c.expandee))(c))(c.WeakTypeTag.Nothing) } } - lazy val fastTrack: Map[Symbol, FastTrackEntry] = { - var registry = Map[Symbol, FastTrackEntry]() - implicit class BindTo(sym: Symbol) { def bindTo(expander: FastTrackExpander): Unit = if (sym != NoSymbol) registry += sym -> FastTrackEntry(sym, expander) } - materializeClassTag bindTo { case (c, Apply(TypeApply(_, List(tt)), List())) => c.materializeClassTag(tt.tpe) } - materializeWeakTypeTag bindTo { case (c, Apply(TypeApply(_, List(tt)), List(u))) => c.materializeTypeTag(u, EmptyTree, tt.tpe, concrete = false) } - materializeTypeTag bindTo { case (c, Apply(TypeApply(_, List(tt)), List(u))) => c.materializeTypeTag(u, EmptyTree, tt.tpe, concrete = true) } - ApiUniverseReify bindTo { case (c, Apply(TypeApply(_, List(tt)), List(expr))) => c.materializeExpr(c.prefix.tree, EmptyTree, expr) } - ReflectRuntimeCurrentMirror bindTo { case (c, _) => scala.reflect.runtime.Macros.currentMirror(c).tree } - StringContext_f bindTo { case (c, app@Apply(Select(Apply(_, parts), _), args)) => c.macro_StringInterpolation_f(parts, args, app.pos) } - registry - } + /** A map from a set of pre-established macro symbols to their implementations. */ + lazy val fastTrack = Map[Symbol, FastTrackEntry]( + make( materializeClassTag) { case Applied(_, ttag :: Nil, _) => _.materializeClassTag(ttag.tpe) }, + make( materializeWeakTypeTag) { case Applied(_, ttag :: Nil, (u :: _) :: _) => _.materializeTypeTag(u, EmptyTree, ttag.tpe, concrete = false) }, + make( materializeTypeTag) { case Applied(_, ttag :: Nil, (u :: _) :: _) => _.materializeTypeTag(u, EmptyTree, ttag.tpe, concrete = true) }, + make( ApiUniverseReify) { case Applied(_, ttag :: Nil, (expr :: _) :: _) => c => c.materializeExpr(c.prefix.tree, EmptyTree, expr) }, + make( StringContext_f) { case Applied(Select(Apply(_, ps), _), _, args) => c => c.macro_StringInterpolation_f(ps, args.flatten, c.expandee.pos) }, + make(ReflectRuntimeCurrentMirror) { case _ => c => currentMirror(c).tree } + ) } diff --git a/src/reflect/scala/reflect/internal/TreeInfo.scala b/src/reflect/scala/reflect/internal/TreeInfo.scala index 13b761086c..9614513458 100644 --- a/src/reflect/scala/reflect/internal/TreeInfo.scala +++ b/src/reflect/scala/reflect/internal/TreeInfo.scala @@ -621,6 +621,8 @@ abstract class TreeInfo { * For advanced use, call `dissectApplied` explicitly and use its methods instead of pattern matching. */ object Applied { + def apply(tree: Tree): Applied = new Applied(tree) + def unapply(applied: Applied): Option[(Tree, List[Tree], List[List[Tree]])] = Some((applied.core, applied.targs, applied.argss)) -- cgit v1.2.3 From 21c4db241e7e85d338c1e179373266689ce9590a Mon Sep 17 00:00:00 2001 From: Eugene Burmako Date: Thu, 13 Dec 2012 02:31:54 +0100 Subject: Moves annotationError outside typedAnnotation This refactoring allows everyone from the compiler, e.g. the macro engine which expands annotation-emitting macros, to produce annotation errors. --- src/compiler/scala/tools/nsc/typechecker/Typers.scala | 13 ++++++------- src/reflect/scala/reflect/internal/AnnotationInfos.scala | 2 ++ 2 files changed, 8 insertions(+), 7 deletions(-) (limited to 'src') diff --git a/src/compiler/scala/tools/nsc/typechecker/Typers.scala b/src/compiler/scala/tools/nsc/typechecker/Typers.scala index 4fd65c18d1..453dfd2f35 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Typers.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Typers.scala @@ -3359,14 +3359,13 @@ trait Typers extends Modes with Adaptations with Tags { * @param annClass the expected annotation class */ def typedAnnotation(ann: Tree, mode: Int = EXPRmode, selfsym: Symbol = NoSymbol, annClass: Symbol = AnnotationClass, requireJava: Boolean = false): AnnotationInfo = { - lazy val annotationError = AnnotationInfo(ErrorType, Nil, Nil) var hasError: Boolean = false val pending = ListBuffer[AbsTypeError]() def reportAnnotationError(err: AbsTypeError) = { pending += err hasError = true - annotationError + ErroneousAnnotation } /** Calling constfold right here is necessary because some trees (negated @@ -3446,12 +3445,12 @@ trait Typers extends Modes with Adaptations with Tags { extract(ann, List()) } - val res = if (fun.isErroneous) annotationError + val res = if (fun.isErroneous) ErroneousAnnotation else { val typedFun @ Select(New(tpt), _) = typed(fun, forFunMode(mode), WildcardType) val annType = tpt.tpe - if (typedFun.isErroneous) annotationError + if (typedFun.isErroneous) ErroneousAnnotation else if (annType.typeSymbol isNonBottomSubClass ClassfileAnnotationClass) { // annotation to be saved as java classfile annotation val isJava = typedFun.symbol.owner.isJavaDefined @@ -3496,7 +3495,7 @@ trait Typers extends Modes with Adaptations with Tags { reportAnnotationError(AnnotationMissingArgError(ann, annType, sym)) } - if (hasError) annotationError + if (hasError) ErroneousAnnotation else AnnotationInfo(annType, List(), nvPairs map {p => (p._1, p._2.get)}).setOriginal(Apply(typedFun, args).setPos(ann.pos)) } } else if (requireJava) { @@ -3548,14 +3547,14 @@ trait Typers extends Modes with Adaptations with Tags { if (annType.typeSymbol == DeprecatedAttr && argss.flatten.size < 2) unit.deprecationWarning(ann.pos, "@deprecated now takes two arguments; see the scaladoc.") - if ((typedAnn.tpe == null) || typedAnn.tpe.isErroneous) annotationError + if ((typedAnn.tpe == null) || typedAnn.tpe.isErroneous) ErroneousAnnotation else annInfo(typedAnn) } } if (hasError) { pending.foreach(ErrorUtils.issueTypeError) - annotationError + ErroneousAnnotation } else res } diff --git a/src/reflect/scala/reflect/internal/AnnotationInfos.scala b/src/reflect/scala/reflect/internal/AnnotationInfos.scala index cfa4bdf44c..7a972c3f1a 100644 --- a/src/reflect/scala/reflect/internal/AnnotationInfos.scala +++ b/src/reflect/scala/reflect/internal/AnnotationInfos.scala @@ -326,6 +326,8 @@ trait AnnotationInfos extends api.Annotations { self: SymbolTable => object UnmappableAnnotation extends CompleteAnnotationInfo(NoType, Nil, Nil) + object ErroneousAnnotation extends CompleteAnnotationInfo(ErrorType, Nil, Nil) + /** Extracts symbol of thrown exception from AnnotationInfo. * * Supports both “old-style” `@throws(classOf[Exception])` -- cgit v1.2.3 From 2d612c38df4ecc4ad82ae26858de9e34ce66fcee Mon Sep 17 00:00:00 2001 From: Eugene Burmako Date: Tue, 18 Dec 2012 14:54:09 +0100 Subject: adds Tree.nonEmpty So that trees become consistent with isEmpty on lists and options. --- src/reflect/scala/reflect/api/Trees.scala | 6 ++++++ src/reflect/scala/reflect/internal/Trees.scala | 2 ++ 2 files changed, 8 insertions(+) (limited to 'src') diff --git a/src/reflect/scala/reflect/api/Trees.scala b/src/reflect/scala/reflect/api/Trees.scala index cfa6315797..e4ee048f84 100644 --- a/src/reflect/scala/reflect/api/Trees.scala +++ b/src/reflect/scala/reflect/api/Trees.scala @@ -89,6 +89,12 @@ trait Trees { self: Universe => */ def isEmpty: Boolean + /** Is this tree one of the empty trees? + * + * @see `isEmpty` + */ + def nonEmpty: Boolean + /** Can this tree carry attributes (i.e. symbols, types or positions)? * Typically the answer is yes, except for the `EmptyTree` null object and * two special singletons: `emptyValDef` and `pendingSuperCall`. diff --git a/src/reflect/scala/reflect/internal/Trees.scala b/src/reflect/scala/reflect/internal/Trees.scala index 9e737528d2..4dad3dd37a 100644 --- a/src/reflect/scala/reflect/internal/Trees.scala +++ b/src/reflect/scala/reflect/internal/Trees.scala @@ -39,6 +39,8 @@ trait Trees extends api.Trees { self: SymbolTable => def isDef = false def isEmpty = false + def nonEmpty = !isEmpty + def canHaveAttrs = true /** The canonical way to test if a Tree represents a term. -- cgit v1.2.3 From 0271b35f77379dea9194887b08910771a6c0faf8 Mon Sep 17 00:00:00 2001 From: Eugene Burmako Date: Sat, 8 Dec 2012 15:17:54 +0100 Subject: showRaw can now print positions --- src/reflect/scala/reflect/api/Printers.scala | 24 +++++++++++++---------- src/reflect/scala/reflect/internal/Printers.scala | 7 ++++--- 2 files changed, 18 insertions(+), 13 deletions(-) (limited to 'src') diff --git a/src/reflect/scala/reflect/api/Printers.scala b/src/reflect/scala/reflect/api/Printers.scala index 76df76cdc8..651eaa3333 100644 --- a/src/reflect/scala/reflect/api/Printers.scala +++ b/src/reflect/scala/reflect/api/Printers.scala @@ -143,6 +143,7 @@ trait Printers { self: Universe => protected var printIds = false protected var printKinds = false protected var printMirrors = false + protected var printPositions = false def withTypes: this.type = { printTypes = true; this } def withoutTypes: this.type = { printTypes = false; this } def withIds: this.type = { printIds = true; this } @@ -151,6 +152,8 @@ trait Printers { self: Universe => def withoutKinds: this.type = { printKinds = false; this } def withMirrors: this.type = { printMirrors = true; this } def withoutMirrors: this.type = { printMirrors = false; this } + def withPositions: this.type = { printPositions = true; this } + def withoutPositions: this.type = { printPositions = false; this } } /** @group Printers */ @@ -163,7 +166,7 @@ trait Printers { self: Universe => } /** @group Printers */ - protected def render(what: Any, mkPrinter: PrintWriter => TreePrinter, printTypes: BooleanFlag = None, printIds: BooleanFlag = None, printKinds: BooleanFlag = None, printMirrors: BooleanFlag = None): String = { + protected def render(what: Any, mkPrinter: PrintWriter => TreePrinter, printTypes: BooleanFlag = None, printIds: BooleanFlag = None, printKinds: BooleanFlag = None, printMirrors: BooleanFlag = None, printPositions: BooleanFlag = None): String = { val buffer = new StringWriter() val writer = new PrintWriter(buffer) val printer = mkPrinter(writer) @@ -171,23 +174,24 @@ trait Printers { self: Universe => printIds.value.map(printIds => if (printIds) printer.withIds else printer.withoutIds) printKinds.value.map(printKinds => if (printKinds) printer.withKinds else printer.withoutKinds) printMirrors.value.map(printMirrors => if (printMirrors) printer.withMirrors else printer.withoutMirrors) + printPositions.value.map(printPositions => if (printPositions) printer.withPositions else printer.withoutPositions) printer.print(what) writer.flush() buffer.toString } /** By default trees are printed with `show` - * @group Printers + * @group Printers */ override protected def treeToString(tree: Tree) = show(tree) /** Renders a representation of a reflection artifact - * as desugared Java code. + * as desugared Scala code. * - * @group Printers + * @group Printers */ - def show(any: Any, printTypes: BooleanFlag = None, printIds: BooleanFlag = None, printKinds: BooleanFlag = None, printMirrors: BooleanFlag = None): String = - render(any, newTreePrinter(_), printTypes, printIds, printKinds, printMirrors) + def show(any: Any, printTypes: BooleanFlag = None, printIds: BooleanFlag = None, printKinds: BooleanFlag = None, printMirrors: BooleanFlag = None, printPositions: BooleanFlag = None): String = + render(any, newTreePrinter(_), printTypes, printIds, printKinds, printMirrors, printPositions) /** Hook to define what `show(...)` means. * @group Printers @@ -195,12 +199,12 @@ trait Printers { self: Universe => protected def newTreePrinter(out: PrintWriter): TreePrinter /** Renders internal structure of a reflection artifact as the - * visualization of a Scala syntax tree. + * visualization of a Scala syntax tree. * - * @group Printers + * @group Printers */ - def showRaw(any: Any, printTypes: BooleanFlag = None, printIds: BooleanFlag = None, printKinds: BooleanFlag = None, printMirrors: BooleanFlag = None): String = - render(any, newRawTreePrinter(_), printTypes, printIds, printKinds, printMirrors) + def showRaw(any: Any, printTypes: BooleanFlag = None, printIds: BooleanFlag = None, printKinds: BooleanFlag = None, printMirrors: BooleanFlag = None, printPositions: BooleanFlag = None): String = + render(any, newRawTreePrinter(_), printTypes, printIds, printKinds, printMirrors, printPositions) /** Hook to define what `showRaw(...)` means. * @group Printers diff --git a/src/reflect/scala/reflect/internal/Printers.scala b/src/reflect/scala/reflect/internal/Printers.scala index 2a3525206f..5d4db7ac18 100644 --- a/src/reflect/scala/reflect/internal/Printers.scala +++ b/src/reflect/scala/reflect/internal/Printers.scala @@ -67,12 +67,12 @@ trait Printers extends api.Printers { self: SymbolTable => printIds = settings.uniqid.value printKinds = settings.Yshowsymkinds.value printMirrors = false // typically there's no point to print mirrors inside the compiler, as there is only one mirror there - protected def doPrintPositions = settings.Xprintpos.value + printPositions = settings.Xprintpos.value def indent() = indentMargin += indentStep def undent() = indentMargin -= indentStep - def printPosition(tree: Tree) = if (doPrintPositions) print(tree.pos.show) + def printPosition(tree: Tree) = if (printPositions) print(tree.pos.show) def println() { out.println() @@ -389,7 +389,7 @@ trait Printers extends api.Printers { self: SymbolTable => print(x.escapedStringValue) case tt: TypeTree => - if ((tree.tpe eq null) || (doPrintPositions && tt.original != null)) { + if ((tree.tpe eq null) || (printPositions && tt.original != null)) { if (tt.original != null) print("") else print("") } else if ((tree.tpe.typeSymbol ne null) && tree.tpe.typeSymbol.isAnonymousClass) { @@ -550,6 +550,7 @@ trait Printers extends api.Printers { self: SymbolTable => printProduct( tree, preamble = _ => { + if (printPositions) print(tree.pos.show) print(tree.productPrefix) if (printTypes && tree.tpe != null) print(tree.tpe) }, -- cgit v1.2.3 From e5d34d70499504e085ddf957c1c818ffb63f4e8d Mon Sep 17 00:00:00 2001 From: Eugene Burmako Date: Thu, 13 Dec 2012 00:46:06 +0100 Subject: the scanner is now less eager about deprecations When healing braces it isn't very useful to report deprecation warnings, especially since this process is just simple context-free skimming, which can't know about what positions can accept what identifiers. --- src/compiler/scala/tools/nsc/ast/parser/Scanners.scala | 11 ++++++++++- test/files/neg/macro-false-deprecation-warning.check | 4 ++++ test/files/neg/macro-false-deprecation-warning.flags | 1 + .../macro-false-deprecation-warning/Impls_Macros_1.scala | 15 +++++++++++++++ 4 files changed, 30 insertions(+), 1 deletion(-) create mode 100644 test/files/neg/macro-false-deprecation-warning.check create mode 100644 test/files/neg/macro-false-deprecation-warning.flags create mode 100644 test/files/neg/macro-false-deprecation-warning/Impls_Macros_1.scala (limited to 'src') diff --git a/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala b/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala index af7f48988f..3025e4c440 100644 --- a/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala +++ b/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala @@ -104,6 +104,11 @@ trait Scanners extends ScannersCommon { cbuf.append(c) } + /** Determines whether this scanner should emit identifier deprecation warnings, + * e.g. when seeing `macro` or `then`, which are planned to become keywords in future versions of Scala. + */ + protected def emitIdentifierDeprecationWarnings = true + /** Clear buffer and set name and token */ private def finishNamed(idtoken: Int = IDENTIFIER) { name = newTermName(cbuf.toString) @@ -113,7 +118,7 @@ trait Scanners extends ScannersCommon { val idx = name.start - kwOffset if (idx >= 0 && idx < kwArray.length) { token = kwArray(idx) - if (token == IDENTIFIER && allowIdent != name) + if (token == IDENTIFIER && allowIdent != name && emitIdentifierDeprecationWarnings) deprecationWarning(name+" is now a reserved word; usage as an identifier is deprecated") } } @@ -1461,6 +1466,10 @@ trait Scanners extends ScannersCommon { delete(bracePairs) } + // don't emit deprecation warnings about identifiers like `macro` or `then` + // when skimming through the source file trying to heal braces + override def emitIdentifierDeprecationWarnings = false + override def error(offset: Int, msg: String) {} } } diff --git a/test/files/neg/macro-false-deprecation-warning.check b/test/files/neg/macro-false-deprecation-warning.check new file mode 100644 index 0000000000..7d56505ec4 --- /dev/null +++ b/test/files/neg/macro-false-deprecation-warning.check @@ -0,0 +1,4 @@ +Impls_Macros_1.scala:5: error: illegal start of simple expression +} +^ +one error found diff --git a/test/files/neg/macro-false-deprecation-warning.flags b/test/files/neg/macro-false-deprecation-warning.flags new file mode 100644 index 0000000000..cd66464f2f --- /dev/null +++ b/test/files/neg/macro-false-deprecation-warning.flags @@ -0,0 +1 @@ +-language:experimental.macros \ No newline at end of file diff --git a/test/files/neg/macro-false-deprecation-warning/Impls_Macros_1.scala b/test/files/neg/macro-false-deprecation-warning/Impls_Macros_1.scala new file mode 100644 index 0000000000..6dc2ea114b --- /dev/null +++ b/test/files/neg/macro-false-deprecation-warning/Impls_Macros_1.scala @@ -0,0 +1,15 @@ +import scala.reflect.macros.Context + +object Helper { + def unapplySeq[T](x: List[T]): Option[Seq[T]] = +} + +object Macros { + def impl[T: c.WeakTypeTag](c: Context)(x: c.Expr[List[T]]) = { + c.universe.reify(Helper.unapplySeq(x.splice)) + } + + object UnapplyMacro { + def unapplySeq[T](x: List[T]): Option[Seq[T]] = macro impl[T] + } +} -- cgit v1.2.3 From e5ed594a89f4e468f3a9e754eb75687885908ba3 Mon Sep 17 00:00:00 2001 From: Den Shabalin Date: Sat, 8 Dec 2012 13:47:14 +0100 Subject: Adds extractors for TypeName, TermName and Modifiers This change allows to pattern match over type names, term names and modifiers. Otherwise it can be quite painful to match over complex trees as each name or modifiers requires a guard. This pull request also changes the name of default constructor for term and type names i.e. TypeName(s) instead of newTermName(s). This is shorter to type, more consistent with the rest of reflection api and consistent with the way it will be pattern matched later on. --- src/reflect/scala/reflect/api/Names.scala | 30 +++++++++++++- src/reflect/scala/reflect/api/Trees.scala | 8 +++- src/reflect/scala/reflect/internal/Names.scala | 10 +++++ src/reflect/scala/reflect/internal/Trees.scala | 2 +- test/files/scalacheck/ReflectionExtractors.scala | 52 ++++++++++++++++++++++++ 5 files changed, 98 insertions(+), 4 deletions(-) create mode 100644 test/files/scalacheck/ReflectionExtractors.scala (limited to 'src') diff --git a/src/reflect/scala/reflect/api/Names.scala b/src/reflect/scala/reflect/api/Names.scala index 7c12f180a8..8add98d815 100644 --- a/src/reflect/scala/reflect/api/Names.scala +++ b/src/reflect/scala/reflect/api/Names.scala @@ -58,7 +58,7 @@ trait Names { * Can be used for pattern matching, instance tests, serialization and likes. * @group Tags */ -implicit val TypeNameTag: ClassTag[TypeName] + implicit val TypeNameTag: ClassTag[TypeName] /** The abstract type of names representing types. * @group Names @@ -109,10 +109,38 @@ implicit val TypeNameTag: ClassTag[TypeName] /** Create a new term name. * @group Names */ + @deprecated("Use TermName instead", "2.11.0") def newTermName(s: String): TermName /** Creates a new type name. * @group Names */ + @deprecated("Use TypeName instead", "2.11.0") def newTypeName(s: String): TypeName + + /** The constructor/extractor for `TermName` instances. + * @group Extractors + */ + val TermName: TermNameExtractor + + /** An extractor class to create and pattern match with syntax `TermName(s)`. + * @group Extractors + */ + abstract class TermNameExtractor { + def apply(s: String): TermName + def unapply(name: TermName): Option[String] + } + + /** The constructor/extractor for `TypeName` instances. + * @group Extractors + */ + val TypeName: TypeNameExtractor + + /** An extractor class to create and pattern match with syntax `TypeName(s)`. + * @group Extractors + */ + abstract class TypeNameExtractor { + def apply(s: String): TypeName + def unapply(name: TypeName): Option[String] + } } diff --git a/src/reflect/scala/reflect/api/Trees.scala b/src/reflect/scala/reflect/api/Trees.scala index cfa6315797..34be977905 100644 --- a/src/reflect/scala/reflect/api/Trees.scala +++ b/src/reflect/scala/reflect/api/Trees.scala @@ -3018,15 +3018,19 @@ trait Trees { self: Universe => /** The constructor/extractor for `Modifiers` instances. * @group Traversal */ - val Modifiers: ModifiersCreator + val Modifiers: ModifiersExtractor + + @deprecated("Use ModifiersExtractor instead", "2.11.0") + type ModifiersCreator = ModifiersExtractor /** An extractor class to create and pattern match with syntax `Modifiers(flags, privateWithin, annotations)`. * Modifiers encapsulate flags, visibility annotations and Scala annotations for member definitions. * @group Traversal */ - abstract class ModifiersCreator { + abstract class ModifiersExtractor { def apply(): Modifiers = Modifiers(NoFlags, tpnme.EMPTY, List()) def apply(flags: FlagSet, privateWithin: Name, annotations: List[Tree]): Modifiers + def unapply(mods: Modifiers): Option[(FlagSet, Name, List[Tree])] } /** The factory for `Modifiers` instances. diff --git a/src/reflect/scala/reflect/internal/Names.scala b/src/reflect/scala/reflect/internal/Names.scala index cea9215ae2..b60d1e619f 100644 --- a/src/reflect/scala/reflect/internal/Names.scala +++ b/src/reflect/scala/reflect/internal/Names.scala @@ -463,6 +463,11 @@ trait Names extends api.Names { implicit val TermNameTag = ClassTag[TermName](classOf[TermName]) + object TermName extends TermNameExtractor { + def apply(s: String) = newTermName(s) + def unapply(name: TermName): Option[String] = Some(name.toString) + } + sealed abstract class TypeName(index0: Int, len0: Int, hash: Int) extends Name(index0, len0) { type ThisNameType = TypeName protected[this] def thisName: TypeName = this @@ -492,4 +497,9 @@ trait Names extends api.Names { } implicit val TypeNameTag = ClassTag[TypeName](classOf[TypeName]) + + object TypeName extends TypeNameExtractor { + def apply(s: String) = newTypeName(s) + def unapply(name: TypeName): Option[String] = Some(name.toString) + } } diff --git a/src/reflect/scala/reflect/internal/Trees.scala b/src/reflect/scala/reflect/internal/Trees.scala index 9e737528d2..9795299342 100644 --- a/src/reflect/scala/reflect/internal/Trees.scala +++ b/src/reflect/scala/reflect/internal/Trees.scala @@ -932,7 +932,7 @@ trait Trees extends api.Trees { self: SymbolTable => override def toString = "Modifiers(%s, %s, %s)".format(flagString, annotations mkString ", ", positions) } - object Modifiers extends ModifiersCreator + object Modifiers extends ModifiersExtractor implicit val ModifiersTag = ClassTag[Modifiers](classOf[Modifiers]) diff --git a/test/files/scalacheck/ReflectionExtractors.scala b/test/files/scalacheck/ReflectionExtractors.scala new file mode 100644 index 0000000000..a2615feb3e --- /dev/null +++ b/test/files/scalacheck/ReflectionExtractors.scala @@ -0,0 +1,52 @@ +import org.scalacheck._ +import Prop._ +import Gen._ +import Arbitrary._ + +import scala.reflect.runtime.universe._ +import Flag._ + +object Test extends Properties("reflection extractors") { + + val genFlag = oneOf( + TRAIT, INTERFACE, MUTABLE, MACRO, DEFERRED, ABSTRACT, FINAL, SEALED, + IMPLICIT, LAZY, OVERRIDE, PRIVATE, PROTECTED, LOCAL, CASE, ABSOVERRIDE, + BYNAMEPARAM, PARAM, COVARIANT, CONTRAVARIANT, DEFAULTPARAM, PRESUPER, + DEFAULTINIT + ) + val genModifiers = + for(flag <- genFlag; privateWithin <- genName) + yield Modifiers(flag, privateWithin, Nil) + val genTermName = for(name <- arbitrary[String]) yield TermName(name) + val genTypeName = for(name <- arbitrary[String]) yield TypeName(name) + val genName = oneOf(genTermName, genTypeName) + + implicit val arbTermName: Arbitrary[TermName] = Arbitrary(genTermName) + implicit val arbTypeName: Arbitrary[TypeName] = Arbitrary(genTypeName) + implicit val arbName: Arbitrary[Name] = Arbitrary(genName) + implicit val arbMods: Arbitrary[Modifiers] = Arbitrary(genModifiers) + + property("extract term name") = forAll { (name: TermName) => + val TermName(s) = name + s == name.toString + } + + property("extract type name") = forAll { (name: TypeName) => + val TypeName(s) = name + s == name.toString + } + + property("extract term or type name") = forAll { (name: Name) => + name match { + case TermName(s) => s == name.toString + case TypeName(s) => s == name.toString + } + } + + property("extract modifiers") = forAll { (mods: Modifiers) => + val Modifiers(flags, priv, annots) = mods + flags == mods.flags && + priv == mods.privateWithin && + annots == mods.annotations + } +} \ No newline at end of file -- cgit v1.2.3 From c2297185296d68006dc07a86b35ca4ca0ebbdff7 Mon Sep 17 00:00:00 2001 From: Den Shabalin Date: Sat, 8 Dec 2012 19:41:43 +0100 Subject: Changes reifier to use shorter name constructors --- src/compiler/scala/reflect/reify/codegen/GenNames.scala | 2 +- src/reflect/scala/reflect/internal/StdNames.scala | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) (limited to 'src') diff --git a/src/compiler/scala/reflect/reify/codegen/GenNames.scala b/src/compiler/scala/reflect/reify/codegen/GenNames.scala index 7c3c1d1149..4266c6f8d6 100644 --- a/src/compiler/scala/reflect/reify/codegen/GenNames.scala +++ b/src/compiler/scala/reflect/reify/codegen/GenNames.scala @@ -7,7 +7,7 @@ trait GenNames { import global._ def reifyName(name: Name) = { - val factory = if (name.isTypeName) nme.nmeNewTypeName else nme.nmeNewTermName + val factory = if (name.isTypeName) nme.TypeName else nme.TermName mirrorCall(factory, Literal(Constant(name.toString))) } } diff --git a/src/reflect/scala/reflect/internal/StdNames.scala b/src/reflect/scala/reflect/internal/StdNames.scala index c5521ae650..10dd2c82aa 100644 --- a/src/reflect/scala/reflect/internal/StdNames.scala +++ b/src/reflect/scala/reflect/internal/StdNames.scala @@ -640,8 +640,6 @@ trait StdNames { val newFreeType: NameType = "newFreeType" val newNestedSymbol: NameType = "newNestedSymbol" val newScopeWith: NameType = "newScopeWith" - val nmeNewTermName: NameType = "newTermName" - val nmeNewTypeName: NameType = "newTypeName" val notifyAll_ : NameType = "notifyAll" val notify_ : NameType = "notify" val null_ : NameType = "null" @@ -672,6 +670,7 @@ trait StdNames { val staticModule : NameType = "staticModule" val staticPackage : NameType = "staticPackage" val synchronized_ : NameType = "synchronized" + val TermName: NameType = "TermName" val this_ : NameType = "this" val thisPrefix : NameType = "thisPrefix" val toArray: NameType = "toArray" @@ -682,6 +681,7 @@ trait StdNames { val tree : NameType = "tree" val true_ : NameType = "true" val typedProductIterator: NameType = "typedProductIterator" + val TypeName: NameType = "TypeName" val typeTagToManifest: NameType = "typeTagToManifest" val unapply: NameType = "unapply" val unapplySeq: NameType = "unapplySeq" -- cgit v1.2.3 From 136bf70d79df1d8a65b98e094bbbc26cb5e3a96b Mon Sep 17 00:00:00 2001 From: Den Shabalin Date: Sat, 8 Dec 2012 19:43:06 +0100 Subject: Changes tree pretty printer to use shorter name constructors --- src/compiler/scala/reflect/reify/utils/NodePrinters.scala | 6 +++--- src/reflect/scala/reflect/internal/Printers.scala | 2 +- 2 files changed, 4 insertions(+), 4 deletions(-) (limited to 'src') diff --git a/src/compiler/scala/reflect/reify/utils/NodePrinters.scala b/src/compiler/scala/reflect/reify/utils/NodePrinters.scala index 86e50e0a68..0740f8d0b6 100644 --- a/src/compiler/scala/reflect/reify/utils/NodePrinters.scala +++ b/src/compiler/scala/reflect/reify/utils/NodePrinters.scala @@ -36,8 +36,8 @@ trait NodePrinters { flagsAreUsed = true show(m.group(1).toLong) }) - s = s.replace("Modifiers(0L, newTypeName(\"\"), List())", "Modifiers()") - s = """Modifiers\((\d+)[lL], newTypeName\("(.*?)"\), List\((.*?)\)\)""".r.replaceAllIn(s, m => { + s = s.replace("Modifiers(0L, TypeName(\"\"), List())", "Modifiers()") + s = """Modifiers\((\d+)[lL], TypeName\("(.*?)"\), List\((.*?)\)\)""".r.replaceAllIn(s, m => { val buf = new scala.collection.mutable.ListBuffer[String] val annotations = m.group(3) @@ -46,7 +46,7 @@ trait NodePrinters { val privateWithin = "" + m.group(2) if (buf.nonEmpty || privateWithin != "") - buf.append("newTypeName(\"" + privateWithin + "\")") + buf.append("TypeName(\"" + privateWithin + "\")") val bits = m.group(1) if (buf.nonEmpty || bits != "0L") { diff --git a/src/reflect/scala/reflect/internal/Printers.scala b/src/reflect/scala/reflect/internal/Printers.scala index 2a3525206f..d36d439077 100644 --- a/src/reflect/scala/reflect/internal/Printers.scala +++ b/src/reflect/scala/reflect/internal/Printers.scala @@ -672,7 +672,7 @@ trait Printers extends api.Printers { self: SymbolTable => case nme.CONSTRUCTOR => "nme.CONSTRUCTOR" case nme.ROOTPKG => "nme.ROOTPKG" case _ => - val prefix = if (name.isTermName) "newTermName(\"" else "newTypeName(\"" + val prefix = if (name.isTermName) "TermName(\"" else "TypeName(\"" prefix + name.toString + "\")" } -- cgit v1.2.3 From 9f5a021aa718a4c87b87610ef475982760140767 Mon Sep 17 00:00:00 2001 From: Eugene Burmako Date: Sat, 22 Dec 2012 17:41:33 +0100 Subject: renames c.fresh to c.freshName --- src/compiler/scala/reflect/macros/runtime/Names.scala | 13 +++++++++++-- .../scala/tools/reflect/MacroImplementations.scala | 2 +- src/reflect/scala/reflect/macros/Names.scala | 14 ++++++++++++++ 3 files changed, 26 insertions(+), 3 deletions(-) (limited to 'src') diff --git a/src/compiler/scala/reflect/macros/runtime/Names.scala b/src/compiler/scala/reflect/macros/runtime/Names.scala index ee9f3a56d3..635e8bcd45 100644 --- a/src/compiler/scala/reflect/macros/runtime/Names.scala +++ b/src/compiler/scala/reflect/macros/runtime/Names.scala @@ -7,11 +7,20 @@ trait Names { lazy val freshNameCreator = callsiteTyper.context.unit.fresh def fresh(): String = - freshNameCreator.newName() + freshName() def fresh(name: String): String = - freshNameCreator.newName(name) + freshName(name) def fresh[NameType <: Name](name: NameType): NameType = + freshName[NameType](name) + + def freshName(): String = + freshNameCreator.newName() + + def freshName(name: String): String = + freshNameCreator.newName(name) + + def freshName[NameType <: Name](name: NameType): NameType = name.mapName(freshNameCreator.newName(_)).asInstanceOf[NameType] } \ No newline at end of file diff --git a/src/compiler/scala/tools/reflect/MacroImplementations.scala b/src/compiler/scala/tools/reflect/MacroImplementations.scala index d7c50504a8..ab967496c4 100644 --- a/src/compiler/scala/tools/reflect/MacroImplementations.scala +++ b/src/compiler/scala/tools/reflect/MacroImplementations.scala @@ -37,7 +37,7 @@ abstract class MacroImplementations { val argsStack = Stack(args : _*) def defval(value: Tree, tpe: Type): Unit = { - val freshName = newTermName(c.fresh("arg$")) + val freshName = newTermName(c.freshName("arg$")) evals += ValDef(Modifiers(), freshName, TypeTree(tpe) setPos value.pos.focus, value) setPos value.pos ids += Ident(freshName) } diff --git a/src/reflect/scala/reflect/macros/Names.scala b/src/reflect/scala/reflect/macros/Names.scala index 8bbaa5f848..7e2ac5e02d 100644 --- a/src/reflect/scala/reflect/macros/Names.scala +++ b/src/reflect/scala/reflect/macros/Names.scala @@ -11,13 +11,27 @@ trait Names { self: Context => /** Creates a unique string. */ + @deprecated("Use freshName instead", "2.11.0") def fresh(): String /** Creates a unique string having a given prefix. */ + @deprecated("Use freshName instead", "2.11.0") def fresh(name: String): String /** Creates a unique name having a given name as a prefix and * having the same flavor (term name or type name) as the given name. */ + @deprecated("Use freshName instead", "2.11.0") def fresh[NameType <: Name](name: NameType): NameType + + /** Creates a unique string. */ + def freshName(): String + + /** Creates a unique string having a given prefix. */ + def freshName(name: String): String + + /** Creates a unique name having a given name as a prefix and + * having the same flavor (term name or type name) as the given name. + */ + def freshName[NameType <: Name](name: NameType): NameType } -- cgit v1.2.3 From 2375e2d878e6c493d9ab3097ef1d13d8641a6209 Mon Sep 17 00:00:00 2001 From: Eugene Burmako Date: Tue, 25 Dec 2012 02:08:21 +0100 Subject: enclosures are now strongly typed and are no longer vals --- .../scala/reflect/macros/runtime/Enclosures.scala | 19 +++++++--- src/reflect/scala/reflect/macros/Enclosures.scala | 44 ++++++++++++++++++---- test/files/run/macro-enclosures.check | 32 ++++++++++++++++ test/files/run/macro-enclosures.flags | 1 + .../run/macro-enclosures/Impls_Macros_1.scala | 14 +++++++ test/files/run/macro-enclosures/Test_2.scala | 11 ++++++ test/files/run/t6394a/Macros_1.scala | 2 +- 7 files changed, 109 insertions(+), 14 deletions(-) create mode 100644 test/files/run/macro-enclosures.check create mode 100644 test/files/run/macro-enclosures.flags create mode 100644 test/files/run/macro-enclosures/Impls_Macros_1.scala create mode 100644 test/files/run/macro-enclosures/Test_2.scala (limited to 'src') diff --git a/src/compiler/scala/reflect/macros/runtime/Enclosures.scala b/src/compiler/scala/reflect/macros/runtime/Enclosures.scala index d9f337b5ba..e8b2961611 100644 --- a/src/compiler/scala/reflect/macros/runtime/Enclosures.scala +++ b/src/compiler/scala/reflect/macros/runtime/Enclosures.scala @@ -1,22 +1,31 @@ package scala.reflect.macros package runtime +import scala.reflect.{ClassTag, classTag} + trait Enclosures { self: Context => import universe._ - private def site = callsiteTyper.context - private def enclTrees = site.enclosingContextChain map (_.tree) - private def enclPoses = enclosingMacros map (_.macroApplication.pos) filterNot (_ eq NoPosition) + private lazy val site = callsiteTyper.context + private lazy val enclTrees = site.enclosingContextChain map (_.tree) + private lazy val enclPoses = enclosingMacros map (_.macroApplication.pos) filterNot (_ eq NoPosition) + + private def lenientEnclosure[T <: Tree : ClassTag]: Tree = enclTrees collectFirst { case x: T => x } getOrElse EmptyTree + private def strictEnclosure[T <: Tree : ClassTag]: T = enclTrees collectFirst { case x: T => x } getOrElse (throw new EnclosureException(classTag[T].runtimeClass, enclTrees)) // vals are eager to simplify debugging // after all we wouldn't save that much time by making them lazy val macroApplication: Tree = expandee - val enclosingClass: Tree = enclTrees collectFirst { case x: ImplDef => x } getOrElse EmptyTree + def enclosingPackage: PackageDef = strictEnclosure[PackageDef] + val enclosingClass: Tree = lenientEnclosure[ImplDef] + def enclosingImpl: ImplDef = strictEnclosure[ImplDef] + def enclosingTemplate: Template = strictEnclosure[Template] val enclosingImplicits: List[(Type, Tree)] = site.openImplicits val enclosingMacros: List[Context] = this :: universe.analyzer.openMacros // include self - val enclosingMethod: Tree = site.enclMethod.tree + val enclosingMethod: Tree = lenientEnclosure[DefDef] + def enclosingDef: DefDef = strictEnclosure[DefDef] val enclosingPosition: Position = if (enclPoses.isEmpty) NoPosition else enclPoses.head.pos val enclosingUnit: CompilationUnit = universe.currentRun.currentUnit val enclosingRun: Run = universe.currentRun diff --git a/src/reflect/scala/reflect/macros/Enclosures.scala b/src/reflect/scala/reflect/macros/Enclosures.scala index c48656b366..1e366ccbc3 100644 --- a/src/reflect/scala/reflect/macros/Enclosures.scala +++ b/src/reflect/scala/reflect/macros/Enclosures.scala @@ -15,7 +15,7 @@ trait Enclosures { /** The tree that undergoes macro expansion. * Can be useful to get an offset or a range position of the entire tree being processed. */ - val macroApplication: Tree + def macroApplication: Tree /** Contexts that represent macros in-flight, including the current one. Very much like a stack trace, but for macros only. * Can be useful for interoperating with other macros and for imposing compiler-friendly limits on macro expansion. @@ -27,7 +27,7 @@ trait Enclosures { * Unlike `openMacros`, this is a val, which means that it gets initialized when the context is created * and always stays the same regardless of whatever happens during macro expansion. */ - val enclosingMacros: List[Context] + def enclosingMacros: List[Context] /** Types along with corresponding trees for which implicit arguments are currently searched. * Can be useful to get information about an application with an implicit parameter that is materialized during current macro expansion. @@ -35,28 +35,56 @@ trait Enclosures { * Unlike `openImplicits`, this is a val, which means that it gets initialized when the context is created * and always stays the same regardless of whatever happens during macro expansion. */ - val enclosingImplicits: List[(Type, Tree)] + def enclosingImplicits: List[(Type, Tree)] /** Tries to guess a position for the enclosing application. * But that is simple, right? Just dereference ``pos'' of ``macroApplication''? Not really. * If we're in a synthetic macro expansion (no positions), we must do our best to infer the position of something that triggerd this expansion. * Surprisingly, quite often we can do this by navigation the ``enclosingMacros'' stack. */ - val enclosingPosition: Position + def enclosingPosition: Position /** Tree that corresponds to the enclosing method, or EmptyTree if not applicable. */ - val enclosingMethod: Tree + @deprecated("Use enclosingDef instead, but be wary of changes in semantics", "2.10.1") + def enclosingMethod: Tree /** Tree that corresponds to the enclosing class, or EmptyTree if not applicable. */ - val enclosingClass: Tree + @deprecated("Use enclosingImpl instead, but be wary of changes in semantics", "2.10.1") + def enclosingClass: Tree + + /** Tree that corresponds to the enclosing DefDef tree. + * Throws `EnclosureException` if there's no such enclosing tree. + */ + def enclosingDef: universe.DefDef + + /** Tree that corresponds to the enclosing Template tree. + * Throws `EnclosureException` if there's no such enclosing tree. + */ + def enclosingTemplate: universe.Template + + /** Tree that corresponds to the enclosing ImplDef tree (i.e. either ClassDef or ModuleDef). + * Throws `EnclosureException` if there's no such enclosing tree. + */ + def enclosingImpl: universe.ImplDef + + /** Tree that corresponds to the enclosing PackageDef tree. + * Throws `EnclosureException` if there's no such enclosing tree. + */ + def enclosingPackage: universe.PackageDef /** Compilation unit that contains this macro application. */ - val enclosingUnit: CompilationUnit + def enclosingUnit: CompilationUnit /** Compilation run that contains this macro application. */ - val enclosingRun: Run + def enclosingRun: Run + + /** Indicates than one of the enclosure methods failed to find a tree + * of required type among enclosing trees. + */ + case class EnclosureException(expected: Class[_], enclosingTrees: List[Tree]) + extends Exception(s"Couldn't find a tree of type $expected among enclosing trees $enclosingTrees") } \ No newline at end of file diff --git a/test/files/run/macro-enclosures.check b/test/files/run/macro-enclosures.check new file mode 100644 index 0000000000..36bb67e194 --- /dev/null +++ b/test/files/run/macro-enclosures.check @@ -0,0 +1,32 @@ +enclosingPackage = package test { + object Test extends scala.AnyRef { + def () = { + super.(); + () + }; + def test = Macros.foo + } +} +enclosingClass = object Test extends scala.AnyRef { + def () = { + super.(); + () + }; + def test = Macros.foo +} +enclosingImpl = object Test extends scala.AnyRef { + def () = { + super.(); + () + }; + def test = Macros.foo +} +enclosingTemplate = scala.AnyRef { + def () = { + super.(); + () + }; + def test = Macros.foo +} +enclosingMethod = def test = Macros.foo +enclosingDef = def test = Macros.foo diff --git a/test/files/run/macro-enclosures.flags b/test/files/run/macro-enclosures.flags new file mode 100644 index 0000000000..cd66464f2f --- /dev/null +++ b/test/files/run/macro-enclosures.flags @@ -0,0 +1 @@ +-language:experimental.macros \ No newline at end of file diff --git a/test/files/run/macro-enclosures/Impls_Macros_1.scala b/test/files/run/macro-enclosures/Impls_Macros_1.scala new file mode 100644 index 0000000000..cd54028676 --- /dev/null +++ b/test/files/run/macro-enclosures/Impls_Macros_1.scala @@ -0,0 +1,14 @@ +import scala.reflect.macros.Context + +object Macros { + def impl(c: Context) = c.universe.reify { + println("enclosingPackage = " + c.literal(c.enclosingPackage.toString).splice) + println("enclosingClass = " + c.literal(c.enclosingClass.toString).splice) + println("enclosingImpl = " + c.literal(c.enclosingImpl.toString).splice) + println("enclosingTemplate = " + c.literal(c.enclosingTemplate.toString).splice) + println("enclosingMethod = " + c.literal(c.enclosingMethod.toString).splice) + println("enclosingDef = " + c.literal(c.enclosingDef.toString).splice) + } + + def foo = macro impl +} \ No newline at end of file diff --git a/test/files/run/macro-enclosures/Test_2.scala b/test/files/run/macro-enclosures/Test_2.scala new file mode 100644 index 0000000000..779fe5211e --- /dev/null +++ b/test/files/run/macro-enclosures/Test_2.scala @@ -0,0 +1,11 @@ +object Test extends App { + test.Test.test +} + +package test { + object Test { + def test = { + Macros.foo + } + } +} \ No newline at end of file diff --git a/test/files/run/t6394a/Macros_1.scala b/test/files/run/t6394a/Macros_1.scala index 3d39d3e40a..5aa07e7f41 100644 --- a/test/files/run/t6394a/Macros_1.scala +++ b/test/files/run/t6394a/Macros_1.scala @@ -4,7 +4,7 @@ object Macros { def impl(c:Context): c.Expr[Any] = { import c.universe._ - val selfTree = This(c.enclosingClass.symbol.asModule.moduleClass) + val selfTree = This(c.enclosingImpl.symbol.asModule.moduleClass) c.Expr[AnyRef](selfTree) } -- cgit v1.2.3 From 48cdfefb95ee43ded08688d6c99a8c3a32d47f18 Mon Sep 17 00:00:00 2001 From: Eugene Burmako Date: Tue, 25 Dec 2012 14:09:33 +0100 Subject: macro expansions are now auto-duplicated The fix still requires macro developers to be careful about sharing trees by references, because attributed DefTrees will still bring trouble. However this is an improvement, because it doesn't make matters worse and automatically fixes situations similar to one in the test. A much more thorough discussion with a number of open questions left: http://groups.google.com/group/scala-internals/browse_thread/thread/492560d941b315cc --- .../scala/tools/nsc/typechecker/Macros.scala | 8 +++--- src/reflect/scala/reflect/internal/Trees.scala | 7 ++++-- test/files/run/macro-duplicate.check | 0 test/files/run/macro-duplicate.flags | 1 + .../files/run/macro-duplicate/Impls_Macros_1.scala | 29 ++++++++++++++++++++++ test/files/run/macro-duplicate/Test_2.scala | 6 +++++ 6 files changed, 46 insertions(+), 5 deletions(-) create mode 100644 test/files/run/macro-duplicate.check create mode 100644 test/files/run/macro-duplicate.flags create mode 100644 test/files/run/macro-duplicate/Impls_Macros_1.scala create mode 100644 test/files/run/macro-duplicate/Test_2.scala (limited to 'src') diff --git a/src/compiler/scala/tools/nsc/typechecker/Macros.scala b/src/compiler/scala/tools/nsc/typechecker/Macros.scala index 4d1ab98fa0..86c59bc671 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Macros.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Macros.scala @@ -691,9 +691,11 @@ trait Macros extends scala.tools.reflect.FastTrack with Traces { var expectedTpe = expandee.tpe if (isNullaryInvocation(expandee)) expectedTpe = expectedTpe.finalResultType - var typechecked = typecheck("macro def return type", expanded, expectedTpe) - typechecked = typecheck("expected type", typechecked, pt) - typechecked + // also see http://groups.google.com/group/scala-internals/browse_thread/thread/492560d941b315cc + val expanded0 = duplicateAndKeepPositions(expanded) + val expanded1 = typecheck("macro def return type", expanded0, expectedTpe) + val expanded2 = typecheck("expected type", expanded1, pt) + expanded2 } finally { popMacroContext() } diff --git a/src/reflect/scala/reflect/internal/Trees.scala b/src/reflect/scala/reflect/internal/Trees.scala index 9e737528d2..ae9fca14cb 100644 --- a/src/reflect/scala/reflect/internal/Trees.scala +++ b/src/reflect/scala/reflect/internal/Trees.scala @@ -1499,15 +1499,18 @@ trait Trees extends api.Trees { self: SymbolTable => } } - private lazy val duplicator = new Transformer { + private lazy val duplicator = new Duplicator(focusPositions = true) + private class Duplicator(focusPositions: Boolean) extends Transformer { override val treeCopy = newStrictTreeCopier override def transform(t: Tree) = { val t1 = super.transform(t) - if ((t1 ne t) && t1.pos.isRange) t1 setPos t.pos.focus + if ((t1 ne t) && t1.pos.isRange && focusPositions) t1 setPos t.pos.focus t1 } } + def duplicateAndKeepPositions(tree: Tree) = new Duplicator(focusPositions = false) transform tree + // ------ copiers ------------------------------------------- def copyDefDef(tree: Tree)( diff --git a/test/files/run/macro-duplicate.check b/test/files/run/macro-duplicate.check new file mode 100644 index 0000000000..e69de29bb2 diff --git a/test/files/run/macro-duplicate.flags b/test/files/run/macro-duplicate.flags new file mode 100644 index 0000000000..cd66464f2f --- /dev/null +++ b/test/files/run/macro-duplicate.flags @@ -0,0 +1 @@ +-language:experimental.macros \ No newline at end of file diff --git a/test/files/run/macro-duplicate/Impls_Macros_1.scala b/test/files/run/macro-duplicate/Impls_Macros_1.scala new file mode 100644 index 0000000000..de81923330 --- /dev/null +++ b/test/files/run/macro-duplicate/Impls_Macros_1.scala @@ -0,0 +1,29 @@ +import scala.reflect.macros.Context + +object Macros { + def impl(c: Context) = { + import c.universe._ + val Expr(Block((cdef: ClassDef) :: Nil, _)) = reify { class C { def x = 2 } } + val cdef1 = + new Transformer { + override def transform(tree: Tree): Tree = tree match { + case Template(_, _, ctor :: defs) => + val defs1 = defs collect { + case ddef @ DefDef(mods, name, tparams, vparamss, tpt, body) => + val future = Select(Select(Select(Ident(newTermName("scala")), newTermName("concurrent")), newTermName("package")), newTermName("future")) + val Future = Select(Select(Ident(newTermName("scala")), newTermName("concurrent")), newTypeName("Future")) + val tpt1 = if (tpt.isEmpty) tpt else AppliedTypeTree(Future, List(tpt)) + val body1 = Apply(future, List(body)) + val name1 = newTermName("async" + name.toString.capitalize) + DefDef(mods, name1, tparams, vparamss, tpt1, body1) + } + Template(Nil, emptyValDef, ctor +: defs ::: defs1) + case _ => + super.transform(tree) + } + } transform cdef + c.Expr[Unit](Block(cdef1 :: Nil, Literal(Constant(())))) + } + + def foo = macro impl +} \ No newline at end of file diff --git a/test/files/run/macro-duplicate/Test_2.scala b/test/files/run/macro-duplicate/Test_2.scala new file mode 100644 index 0000000000..6dbd4382d3 --- /dev/null +++ b/test/files/run/macro-duplicate/Test_2.scala @@ -0,0 +1,6 @@ +import scala.concurrent._ +import ExecutionContext.Implicits.global + +object Test extends App { + Macros.foo +} \ No newline at end of file -- cgit v1.2.3 From 6084d2d948bb92c5153e0e4391c3bf80d2eafe38 Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Tue, 25 Dec 2012 21:51:18 -0800 Subject: Removed old pattern matcher. --- .../scala/tools/nsc/matching/MatchSupport.scala | 115 --- src/compiler/scala/tools/nsc/matching/Matrix.scala | 232 ------ .../scala/tools/nsc/matching/MatrixAdditions.scala | 191 ----- .../tools/nsc/matching/ParallelMatching.scala | 866 --------------------- .../scala/tools/nsc/matching/PatternBindings.scala | 126 --- .../scala/tools/nsc/matching/Patterns.scala | 457 ----------- .../scala/tools/nsc/settings/ScalaSettings.scala | 1 - .../scala/tools/nsc/transform/ExplicitOuter.scala | 82 +- .../scala/tools/nsc/transform/UnCurry.scala | 163 +--- .../scala/tools/nsc/typechecker/Infer.scala | 3 +- .../tools/nsc/typechecker/PatternMatching.scala | 4 +- .../scala/tools/nsc/typechecker/Typers.scala | 19 +- .../internal/settings/MutableSettings.scala | 1 - src/reflect/scala/reflect/runtime/Settings.scala | 1 - test/files/jvm/interpreter.check | 6 +- test/files/jvm/interpreter.scala | 2 +- test/files/neg/pat_unreachable.check | 19 +- test/files/neg/pat_unreachable.flags | 2 +- test/files/neg/t3692-new.check | 11 +- test/files/neg/t3692-new.flags | 2 +- test/files/neg/t3692-old.check | 14 - test/files/neg/t3692-old.flags | 1 - test/files/neg/t3692-old.scala | 19 - test/files/neg/unreachablechar.check | 7 +- test/files/neg/unreachablechar.flags | 2 +- test/files/pos/t1439.flags | 2 +- test/files/run/patmat_unapp_abstype-old.check | 4 - test/files/run/patmat_unapp_abstype-old.flags | 1 - test/files/run/patmat_unapp_abstype-old.scala | 83 -- test/files/run/t3835.scala | 2 +- 30 files changed, 46 insertions(+), 2392 deletions(-) delete mode 100644 src/compiler/scala/tools/nsc/matching/MatchSupport.scala delete mode 100644 src/compiler/scala/tools/nsc/matching/Matrix.scala delete mode 100644 src/compiler/scala/tools/nsc/matching/MatrixAdditions.scala delete mode 100644 src/compiler/scala/tools/nsc/matching/ParallelMatching.scala delete mode 100644 src/compiler/scala/tools/nsc/matching/PatternBindings.scala delete mode 100644 src/compiler/scala/tools/nsc/matching/Patterns.scala delete mode 100644 test/files/neg/t3692-old.check delete mode 100644 test/files/neg/t3692-old.flags delete mode 100644 test/files/neg/t3692-old.scala delete mode 100644 test/files/run/patmat_unapp_abstype-old.check delete mode 100644 test/files/run/patmat_unapp_abstype-old.flags delete mode 100644 test/files/run/patmat_unapp_abstype-old.scala (limited to 'src') diff --git a/src/compiler/scala/tools/nsc/matching/MatchSupport.scala b/src/compiler/scala/tools/nsc/matching/MatchSupport.scala deleted file mode 100644 index 3c26997cfe..0000000000 --- a/src/compiler/scala/tools/nsc/matching/MatchSupport.scala +++ /dev/null @@ -1,115 +0,0 @@ -/* NSC -- new Scala compiler - * Copyright 2005-2013 LAMP/EPFL - * Author: Paul Phillips - */ - -package scala.tools.nsc -package matching - -import scala.annotation.elidable -import scala.language.postfixOps - -/** Ancillary bits of ParallelMatching which are better off - * out of the way. - */ -trait MatchSupport extends ast.TreeDSL { self: ParallelMatching => - - import global.{ typer => _, _ } - import CODE._ - - /** Debugging support: enable with -Ypmat-debug **/ - private final def trace = settings.Ypmatdebug.value - - def impossible: Nothing = abort("this never happens") - - object Types { - import definitions._ - - val subrangeTypes = Set[Symbol](ByteClass, ShortClass, CharClass, IntClass) - - implicit class RichType(undecodedTpe: Type) { - def tpe = decodedEqualsType(undecodedTpe) - def isAnyRef = tpe <:< AnyRefClass.tpe - - // These tests for final classes can inspect the typeSymbol - private def is(s: Symbol) = tpe.typeSymbol eq s - def isInt = is(IntClass) - def isNothing = is(NothingClass) - } - } - - object Debug { - def treeToString(t: Tree): String = treeInfo.unbind(t) match { - case EmptyTree => "?" - case WILD() => "_" - case Literal(Constant(x)) => "LIT(%s)".format(x) - case Apply(fn, args) => "%s(%s)".format(treeToString(fn), args map treeToString mkString ",") - case Typed(expr, tpt) => "%s: %s".format(treeToString(expr), treeToString(tpt)) - case x => x.toString + " (" + x.getClass + ")" - } - - // Formatting for some error messages - private val NPAD = 15 - def pad(s: String): String = "%%%ds" format (NPAD-1) format s - - // pretty print for debugging - def pp(x: Any): String = pp(x, false) - def pp(x: Any, newlines: Boolean): String = { - val stripStrings = List("""java\.lang\.""", """\$iw\.""") - - def clean(s: String): String = - stripStrings.foldLeft(s)((s, x) => s.replaceAll(x, "")) - - def pplist(xs: List[Any]): String = - if (newlines) (xs map (" " + _ + "\n")).mkString("\n", "", "") - else xs.mkString("(", ", ", ")") - - pp(x match { - case s: String => return clean(s) - case x: Tree => asCompactString(x) - case xs: List[_] => pplist(xs map pp) - case x: Tuple2[_,_] => "%s -> %s".format(pp(x._1), pp(x._2)) - case x => x.toString - }) - } - - @elidable(elidable.FINE) def TRACE(f: String, xs: Any*): Unit = { - if (trace) { - val msg = if (xs.isEmpty) f else f.format(xs map pp: _*) - println(msg) - } - } - @elidable(elidable.FINE) def traceCategory(cat: String, f: String, xs: Any*) = { - if (trace) - TRACE("[" + """%10s""".format(cat) + "] " + f, xs: _*) - } - def tracing[T](s: String)(x: T): T = { - if (trace) - println(("[" + """%10s""".format(s) + "] %s") format pp(x)) - - x - } - private[nsc] def printing[T](fmt: String, xs: Any*)(x: T): T = { - println(fmt.format(xs: _*) + " == " + x) - x - } - private[nsc] def debugging[T](fmt: String, xs: Any*)(x: T): T = { - if (settings.debug.value) printing(fmt, xs: _*)(x) - else x - } - - def indentAll(s: Seq[Any]) = s map (" " + _.toString() + "\n") mkString - } - - /** Drops the 'i'th element of a list. - */ - def dropIndex[T](xs: List[T], n: Int) = { - val (l1, l2) = xs splitAt n - l1 ::: (l2 drop 1) - } - - /** Extract the nth element of a list and return it and the remainder. - */ - def extractIndex[T](xs: List[T], n: Int): (T, List[T]) = - (xs(n), dropIndex(xs, n)) -} diff --git a/src/compiler/scala/tools/nsc/matching/Matrix.scala b/src/compiler/scala/tools/nsc/matching/Matrix.scala deleted file mode 100644 index ba966acf34..0000000000 --- a/src/compiler/scala/tools/nsc/matching/Matrix.scala +++ /dev/null @@ -1,232 +0,0 @@ -/* NSC -- new Scala compiler - * Copyright 2005-2013 LAMP/EPFL - * Author: Paul Phillips - */ - -package scala.tools.nsc -package matching - -import transform.ExplicitOuter -import symtab.Flags -import scala.collection.mutable -import scala.language.implicitConversions - -trait Matrix extends MatrixAdditions { - self: ExplicitOuter with ParallelMatching => - - import global.{ typer => _, _ } - import analyzer.Typer - import CODE._ - import Debug._ - import Flags.{ SYNTHETIC, MUTABLE } - - private[matching] val NO_EXHAUSTIVE = Flags.TRANS_FLAG - - /** Translation of match expressions. - * - * `p`: pattern - * `g`: guard - * `bx`: body index - * - * internal representation is (tvars:List[Symbol], rows:List[Row]) - * - * tmp1 tmp_n - * Row( p_11 ... p_1n g_1 b_1 ) + subst - * - * Row( p_m1 ... p_mn g_m b_m ) + subst - * - * Implementation based on the algorithm described in - * - * "A Term Pattern-Match Compiler Inspired by Finite Automata Theory" - * Mikael Pettersson - * ftp://ftp.ida.liu.se/pub/labs/pelab/papers/cc92pmc.ps.gz - * - * @author Burak Emir - */ - - /** "The Mixture Rule" - - {v=pat1, pats1 .. } {q1} - match {.. } {..} - {v=patn, patsn .. } {qn} - - The is the real work-horse of the algorithm. There is some column whose top-most pattern is a - constructor. (Forsimplicity, itisdepicted above asthe left-most column, but anycolumn will do.) - The goal is to build a test state with the variablevand some outgoing arcs (one for each construc- - tor and possibly a default arc). Foreach constructor in the selected column, its arc is defined as - follows: - - Let {i1,...,ij} be the rows-indices of the patterns in the column that match c. Since the pat- - terns are viewed as regular expressions, this will be the indices of the patterns that either - have the same constructor c, or are wildcards. - - Let {pat1,...,patj} be the patterns in the column corresponding to the indices computed - above, and let nbe the arity of the constructor c, i.e. the number of sub-patterns it has. For - eachpati, its n sub-patterns are extracted; if pat i is a wildcard, nwildcards are produced - instead, each tagged with the right path variable. This results in a pattern matrix with n - columns and j rows. This matrix is then appended to the result of selecting, from each col- - umn in the rest of the original matrix, those rows whose indices are in {i1,...,ij}. Finally - the indices are used to select the corresponding final states that go with these rows. Note - that the order of the indices is significant; selected rows do not change their relative orders. - The arc for the constructor c is now defined as (c’,state), where c’ is cwith any - immediate sub-patterns replaced by their path variables (thus c’ is a simple pattern), and - state is the result of recursively applying match to the new matrix and the new sequence - of final states. - - Finally, the possibility for matching failure is considered. If the set of constructors is exhaustive, - then no more arcs are computed. Otherwise, a default arc(_,state)is the last arc. If there are - any wildcard patterns in the selected column, then their rows are selected from the rest of the - matrix and the final states, and the state is the result of applying match to the new matrix and - states. Otherwise,the error state is used after its reference count has been incremented. - **/ - - /** Handles all translation of pattern matching. - */ - def handlePattern( - selector: Tree, // tree being matched upon (called scrutinee after this) - cases: List[CaseDef], // list of cases in the match - isChecked: Boolean, // whether exhaustiveness checking is enabled (disabled with @unchecked) - context: MatrixContext): Tree = - { - import context._ - TRACE("handlePattern", "(%s: %s) match { %s cases }", selector, selector.tpe, cases.size) - - val matrixInit: MatrixInit = { - val v = copyVar(selector, isChecked, selector.tpe, "temp") - MatrixInit(List(v), cases, atPos(selector.pos)(MATCHERROR(v.ident))) - } - val matrix = new MatchMatrix(context) { lazy val data = matrixInit } - val mch = typer typed matrix.expansion.toTree - val dfatree = typer typed Block(matrix.data.valDefs, mch) - - // redundancy check - matrix.targets filter (_.unreached) foreach (cs => cunit.error(cs.body.pos, "unreachable code")) - // optimize performs squeezing and resets any remaining NO_EXHAUSTIVE - tracing("handlePattern")(matrix optimize dfatree) - } - - case class MatrixContext( - cunit: CompilationUnit, // current unit - handleOuter: Tree => Tree, // for outer pointer - typer: Typer, // a local typer - owner: Symbol, // the current owner - matchResultType: Type) // the expected result type of the whole match - extends Squeezer - { - private def ifNull[T](x: T, alt: T) = if (x == null) alt else x - - // NO_EXHAUSTIVE communicates there should be no exhaustiveness checking - private def flags(checked: Boolean) = if (checked) Nil else List(NO_EXHAUSTIVE) - - // Recording the symbols of the synthetics we create so we don't go clearing - // anyone else's mutable flags. - private val _syntheticSyms = mutable.HashSet[Symbol]() - def clearSyntheticSyms() = { - _syntheticSyms foreach (_ resetFlag (NO_EXHAUSTIVE|MUTABLE)) - debuglog("Cleared NO_EXHAUSTIVE/MUTABLE on " + _syntheticSyms.size + " synthetic symbols.") - _syntheticSyms.clear() - } - def recordSyntheticSym(sym: Symbol): Symbol = { - _syntheticSyms += sym - if (_syntheticSyms.size > 25000) { - cunit.error(owner.pos, "Sanity check failed: over 25000 symbols created for pattern match.") - abort("This is a bug in the pattern matcher.") - } - sym - } - - case class MatrixInit( - roots: List[PatternVar], - cases: List[CaseDef], - default: Tree - ) { - def valDefs = roots map (_.valDef) - override def toString() = "MatrixInit(roots = %s, %d cases)".format(pp(roots), cases.size) - } - - implicit def pvlist2pvgroup(xs: List[PatternVar]): PatternVarGroup = - PatternVarGroup(xs) - - object PatternVarGroup { - def apply(xs: PatternVar*) = new PatternVarGroup(xs.toList) - def apply(xs: List[PatternVar]) = new PatternVarGroup(xs) - } - - val emptyPatternVarGroup = PatternVarGroup() - class PatternVarGroup(val pvs: List[PatternVar]) { - def syms = pvs map (_.sym) - def valDefs = pvs map (_.valDef) - - def extractIndex(index: Int): (PatternVar, PatternVarGroup) = { - val (t, ts) = self.extractIndex(pvs, index) - (t, PatternVarGroup(ts)) - } - - def isEmpty = pvs.isEmpty - def size = pvs.size - def :::(ts: List[PatternVar]) = PatternVarGroup(ts ::: pvs) - - def apply(i: Int) = pvs(i) - def zipWithIndex = pvs.zipWithIndex - def indices = pvs.indices - - override def toString() = pp(pvs) - } - - /** Every temporary variable allocated is put in a PatternVar. - */ - class PatternVar(val lhs: Symbol, val rhs: Tree, val checked: Boolean) { - def sym = lhs - def tpe = lhs.tpe - if (checked) - lhs resetFlag NO_EXHAUSTIVE - else - lhs setFlag NO_EXHAUSTIVE - - // See #1427 for an example of a crash which occurs unless we retype: - // in that instance there is an existential in the pattern. - lazy val ident = typer typed Ident(lhs) - lazy val valDef = typer typedValDef ValDef(lhs, rhs) - - override def toString() = "%s: %s = %s".format(lhs, tpe, rhs) - } - - /** Given a tree, creates a new synthetic variable of the same type - * and assigns the tree to it. - */ - def copyVar( - root: Tree, - checked: Boolean, - _tpe: Type = null, - label: String = "temp"): PatternVar = - { - val tpe = ifNull(_tpe, root.tpe) - val name = cunit.freshTermName(label) - val sym = newVar(root.pos, tpe, flags(checked), name) - - tracing("copy")(new PatternVar(sym, root, checked)) - } - - /** Creates a new synthetic variable of the specified type and - * assigns the result of f(symbol) to it. - */ - def createVar(tpe: Type, f: Symbol => Tree, checked: Boolean) = { - val lhs = newVar(owner.pos, tpe, flags(checked)) - val rhs = f(lhs) - - tracing("create")(new PatternVar(lhs, rhs, checked)) - } - - private def newVar( - pos: Position, - tpe: Type, - flags: List[Long], - name: TermName = null): Symbol = - { - val n = if (name == null) cunit.freshTermName("temp") else name - // careful: pos has special meaning - val flagsLong = (SYNTHETIC.toLong /: flags)(_|_) - recordSyntheticSym(owner.newVariable(n, pos, flagsLong) setInfo tpe) - } - } -} diff --git a/src/compiler/scala/tools/nsc/matching/MatrixAdditions.scala b/src/compiler/scala/tools/nsc/matching/MatrixAdditions.scala deleted file mode 100644 index b1ca6e7b5a..0000000000 --- a/src/compiler/scala/tools/nsc/matching/MatrixAdditions.scala +++ /dev/null @@ -1,191 +0,0 @@ -/* NSC -- new Scala compiler - * Copyright 2005-2013 LAMP/EPFL - * Author: Paul Phillips - */ - -package scala.tools.nsc -package matching - -import transform.ExplicitOuter - -/** Traits which are mixed into MatchMatrix, but separated out as - * (somewhat) independent components to keep them on the sidelines. - */ -trait MatrixAdditions extends ast.TreeDSL { - self: ExplicitOuter with ParallelMatching => - - import global.{ typer => _, _ } - import symtab.Flags - import Debug._ - import treeInfo._ - import definitions.{ isPrimitiveValueClass } - - /** The Squeezer, responsible for all the squeezing. - */ - private[matching] trait Squeezer { - self: MatrixContext => - - private val settings_squeeze = !settings.Ynosqueeze.value - - class RefTraverser(vd: ValDef) extends Traverser { - private val targetSymbol = vd.symbol - private var safeRefs = 0 - private var isSafe = true - - def canDrop = isSafe && safeRefs == 0 - def canInline = isSafe && safeRefs == 1 - - override def traverse(tree: Tree): Unit = tree match { - case t: Ident if t.symbol eq targetSymbol => - // target symbol's owner should match currentOwner - if (targetSymbol.owner == currentOwner) safeRefs += 1 - else isSafe = false - - case LabelDef(_, params, rhs) => - if (params exists (_.symbol eq targetSymbol)) // cannot substitute this one - isSafe = false - - traverse(rhs) - case _ if safeRefs > 1 => () - case _ => - super.traverse(tree) - } - } - - /** Compresses multiple Blocks. */ - private def combineBlocks(stats: List[Tree], expr: Tree): Tree = expr match { - case Block(stats1, expr1) if stats.isEmpty => combineBlocks(stats1, expr1) - case _ => Block(stats, expr) - } - def squeezedBlock(vds: List[Tree], exp: Tree): Tree = - if (settings_squeeze) combineBlocks(Nil, squeezedBlock1(vds, exp)) - else combineBlocks(vds, exp) - - private def squeezedBlock1(vds: List[Tree], exp: Tree): Tree = { - lazy val squeezedTail = squeezedBlock(vds.tail, exp) - def default = squeezedTail match { - case Block(vds2, exp2) => Block(vds.head :: vds2, exp2) - case exp2 => Block(vds.head :: Nil, exp2) - } - - if (vds.isEmpty) exp - else vds.head match { - case vd: ValDef => - val rt = new RefTraverser(vd) - rt.atOwner(owner)(rt traverse squeezedTail) - - if (rt.canDrop) - squeezedTail - else if (isConstantType(vd.symbol.tpe) || rt.canInline) - new TreeSubstituter(List(vd.symbol), List(vd.rhs)) transform squeezedTail - else - default - case _ => default - } - } - } - - /** The Optimizer, responsible for some of the optimizing. - */ - private[matching] trait MatchMatrixOptimizer { - self: MatchMatrix => - - import self.context._ - - final def optimize(tree: Tree): Tree = { - // Uses treeInfo extractors rather than looking at trees directly - // because the many Blocks obscure our vision. - object lxtt extends Transformer { - override def transform(tree: Tree): Tree = tree match { - case Block(stats, ld @ LabelDef(_, _, body)) if targets exists (_ shouldInline ld.symbol) => - squeezedBlock(transformStats(stats, currentOwner), body) - case IsIf(cond, IsTrue(), IsFalse()) => - transform(cond) - case IsIf(cond1, IsIf(cond2, thenp, elsep1), elsep2) if elsep1 equalsStructure elsep2 => - transform(typer typed If(gen.mkAnd(cond1, cond2), thenp, elsep2)) - case If(cond1, IsIf(cond2, thenp, Apply(jmp, Nil)), ld: LabelDef) if jmp.symbol eq ld.symbol => - transform(typer typed If(gen.mkAnd(cond1, cond2), thenp, ld)) - case _ => - super.transform(tree) - } - } - try lxtt transform tree - finally clearSyntheticSyms() - } - } - - /** The Exhauster. - */ - private[matching] trait MatrixExhaustiveness { - self: MatchMatrix => - - import self.context._ - - /** Exhaustiveness checking requires looking for sealed classes - * and if found, making sure all children are covered by a pattern. - */ - class ExhaustivenessChecker(rep: Rep, matchPos: Position) { - val Rep(tvars, rows) = rep - - import Flags.{ MUTABLE, ABSTRACT, SEALED } - - private case class Combo(index: Int, sym: Symbol) { } - - /* True if the patterns in 'row' cover the given type symbol combination, and has no guard. */ - private def rowCoversCombo(row: Row, combos: List[Combo]) = - row.guard.isEmpty && combos.forall(c => row.pats(c.index) covers c.sym) - - private def requiresExhaustive(sym: Symbol) = { - (sym.isMutable) && // indicates that have not yet checked exhaustivity - !(sym hasFlag NO_EXHAUSTIVE) && // indicates @unchecked - (sym.tpe.typeSymbol.isSealed) && - !isPrimitiveValueClass(sym.tpe.typeSymbol) // make sure it's not a primitive, else (5: Byte) match { case 5 => ... } sees no Byte - } - - private lazy val inexhaustives: List[List[Combo]] = { - // let's please not get too clever side-effecting the mutable flag. - val toCollect = tvars.zipWithIndex filter { case (pv, i) => requiresExhaustive(pv.sym) } - val collected = toCollect map { case (pv, i) => - // okay, now reset the flag - pv.sym resetFlag MUTABLE - - i -> ( - pv.tpe.typeSymbol.sealedDescendants.toList sortBy (_.sealedSortName) - // symbols which are both sealed and abstract need not be covered themselves, because - // all of their children must be and they cannot otherwise be created. - filterNot (x => x.isSealed && x.isAbstractClass && !isPrimitiveValueClass(x)) - // have to filter out children which cannot match: see ticket #3683 for an example - filter (_.tpe matchesPattern pv.tpe) - ) - } - - val folded = - collected.foldRight(List[List[Combo]]())((c, xs) => { - val (i, syms) = c match { case (i, set) => (i, set.toList) } - xs match { - case Nil => syms map (s => List(Combo(i, s))) - case _ => for (s <- syms ; rest <- xs) yield Combo(i, s) :: rest - } - }) - - folded filterNot (combo => rows exists (r => rowCoversCombo(r, combo))) - } - - private def mkPad(xs: List[Combo], i: Int): String = xs match { - case Nil => pad("*") - case Combo(j, sym) :: rest => if (j == i) pad(sym.name.toString) else mkPad(rest, i) - } - private def mkMissingStr(open: List[Combo]) = - "missing combination %s\n" format tvars.indices.map(mkPad(open, _)).mkString - - /** The only public method. */ - def check = { - def errMsg = (inexhaustives map mkMissingStr).mkString - if (inexhaustives.nonEmpty) - cunit.warning(matchPos, "match is not exhaustive!\n" + errMsg) - - rep - } - } - } -} diff --git a/src/compiler/scala/tools/nsc/matching/ParallelMatching.scala b/src/compiler/scala/tools/nsc/matching/ParallelMatching.scala deleted file mode 100644 index b5e25f3809..0000000000 --- a/src/compiler/scala/tools/nsc/matching/ParallelMatching.scala +++ /dev/null @@ -1,866 +0,0 @@ -/* NSC -- new Scala compiler - * Copyright 2005-2013 LAMP/EPFL - * Copyright 2007 Google Inc. All Rights Reserved. - * Author: bqe@google.com (Burak Emir) - */ - -package scala.tools.nsc -package matching - -import PartialFunction._ -import scala.collection.{ mutable } -import transform.ExplicitOuter -import mutable.ListBuffer -import scala.language.postfixOps - -trait ParallelMatching extends ast.TreeDSL - with MatchSupport - with Matrix - with Patterns - with PatternBindings -{ - self: ExplicitOuter => - - import global.{ typer => _, _ } - import definitions.{ - IntClass, BooleanClass, SomeClass, OptionClass, - getProductArgs, productProj, Object_eq, Any_asInstanceOf - } - import CODE._ - import Types._ - import Debug._ - - /** Transition **/ - def toPats(xs: List[Tree]): List[Pattern] = xs map Pattern.apply - - /** The umbrella matrix class. **/ - abstract class MatchMatrix(val context: MatrixContext) extends MatchMatrixOptimizer with MatrixExhaustiveness { - import context._ - - def data: MatrixContext#MatrixInit - - lazy val MatrixInit(roots, cases, failTree) = data - lazy val (rows, targets) = expand(roots, cases).unzip - lazy val expansion: Rep = make(roots, rows) - - private val shortCuts = perRunCaches.newMap[Int, Symbol]() - - final def createShortCut(theLabel: Symbol): Int = { - val key = shortCuts.size + 1 - shortCuts(key) = theLabel - -key - } - def createLabelDef(namePrefix: String, body: Tree, params: List[Symbol] = Nil, restpe: Type = matchResultType) = { - val labelName = cunit.freshTermName(namePrefix) - val labelSym = owner.newLabel(labelName, owner.pos) - val labelInfo = MethodType(params, restpe) - - LabelDef(labelSym setInfo labelInfo, params, body setType restpe) - } - - /** This is the recursively focal point for translating the current - * list of pattern variables and a list of pattern match rows into - * a tree suitable for entering erasure. - * - * The first time it is called, the variables are (copies of) the - * original pattern matcher roots, and the rows correspond to the - * original casedefs. - */ - final def make(roots1: PatternVarGroup, rows1: List[Row]): Rep = { - traceCategory("New Match", "%sx%s (%s)", roots1.size, rows1.size, roots1.syms.mkString(", ")) - def classifyPat(opat: Pattern, j: Int): Pattern = opat simplify roots1(j) - - val newRows = rows1 flatMap (_ expandAlternatives classifyPat) - if (rows1.length != newRows.length) make(roots1, newRows) // recursive call if any change - else { - val rep = Rep(roots1, newRows) - new ExhaustivenessChecker(rep, roots.head.sym.pos).check - rep - } - } - - override def toString() = "MatchMatrix(%s) { %s }".format(matchResultType, indentAll(targets)) - - /** - * Encapsulates a symbol being matched on. It is created from a - * PatternVar, which encapsulates the symbol's creation and assignment. - * - * We never match on trees directly - a temporary variable is created - * (in a PatternVar) for any expression being matched on. - */ - class Scrutinee(val pv: PatternVar) { - import definitions._ - - // presenting a face of our symbol - def sym = pv.sym - def tpe = sym.tpe - def pos = sym.pos - def id = ID(sym) setPos pos // attributed ident - - def accessors = if (isCaseClass) sym.caseFieldAccessors else Nil - def accessorTypes = accessors map (x => (tpe memberType x).resultType) - - lazy val accessorPatternVars = PatternVarGroup( - for ((accessor, tpe) <- accessors zip accessorTypes) yield - createVar(tpe, _ => fn(id, accessor)) - ) - - private def extraValDefs = if (pv.rhs.isEmpty) Nil else List(pv.valDef) - def allValDefs = extraValDefs ::: accessorPatternVars.valDefs - - // tests - def isDefined = sym ne NoSymbol - def isSubrangeType = subrangeTypes(tpe.typeSymbol) - def isCaseClass = tpe.typeSymbol.isCase - - // sequences - def seqType = tpe.widen baseType SeqClass - def elemType = tpe typeArgs 0 - - private def elemAt(i: Int) = (id DOT (tpe member nme.apply))(LIT(i)) - private def createElemVar(i: Int) = createVar(elemType, _ => elemAt(i)) - private def createSeqVar(drop: Int) = createVar(seqType, _ => id DROP drop) - - def createSequenceVars(count: Int): List[PatternVar] = - (0 to count).toList map (i => if (i < count) createElemVar(i) else createSeqVar(i)) - - // for propagating "unchecked" to synthetic vars - def isChecked = !(sym hasFlag NO_EXHAUSTIVE) - // def flags: List[Long] = List(NO_EXHAUSTIVE) filter (sym hasFlag _) - - // this is probably where this actually belongs - def createVar(tpe: Type, f: Symbol => Tree) = context.createVar(tpe, f, isChecked) - - def castedTo(headType: Type) = - if (tpe =:= headType) this - else new Scrutinee(createVar(headType, lhs => gen.mkAsInstanceOf(id, lhs.tpe))) - - override def toString() = "(%s: %s)".format(id, tpe) - } - - def isPatternSwitch(scrut: Scrutinee, ps: List[Pattern]): Option[PatternSwitch] = { - def isSwitchableConst(x: Pattern) = cond(x) { case x: LiteralPattern if x.isSwitchable => true } - def isSwitchableDefault(x: Pattern) = isSwitchableConst(x) || x.isDefault - - // TODO - scala> (5: Any) match { case 5 => 5 ; case 6 => 7 } - // ... should compile to a switch. It doesn't because the scrut isn't Int/Char, but - // that could be handle in an if/else since every pattern requires an Int. - // More immediately, Byte and Short scruts should also work. - if (!scrut.isSubrangeType) None - else { - val (_lits, others) = ps span isSwitchableConst - val lits = _lits collect { case x: LiteralPattern => x } - - condOpt(others) { - case Nil => new PatternSwitch(scrut, lits, None) - // TODO: This needs to also allow the case that the last is a compatible type pattern. - case List(x) if isSwitchableDefault(x) => new PatternSwitch(scrut, lits, Some(x)) - } - } - } - - class PatternSwitch( - scrut: Scrutinee, - override val ps: List[LiteralPattern], - val defaultPattern: Option[Pattern] - ) extends PatternMatch(scrut, ps) { - require(scrut.isSubrangeType && (ps forall (_.isSwitchable))) - } - - case class PatternMatch(scrut: Scrutinee, ps: List[Pattern]) { - def head = ps.head - def tail = ps.tail - // def size = ps.length - - def headType = head.necessaryType - private val dummyCount = if (head.isCaseClass) headType.typeSymbol.caseFieldAccessors.length else 0 - def dummies = emptyPatterns(dummyCount) - - def apply(i: Int): Pattern = ps(i) - def pzip() = ps.zipWithIndex - def pzip[T](others: List[T]) = { - assert(ps.size == others.size, "Internal error: ps = %s, others = %s".format(ps, others)) - ps zip others - } - - // Any unapply - returns Some(true) if a type test is needed before the unapply can - // be called (e.g. def unapply(x: Foo) = { ... } but our scrutinee is type Any.) - object AnyUnapply { - def unapply(x: Pattern): Option[Boolean] = condOpt(x.tree) { - case UnapplyParamType(tpe) => !(scrut.tpe <:< tpe) - } - } - - def mkRule(rest: Rep): RuleApplication = { - tracing("Rule")(head match { - case x if isEquals(x.tree.tpe) => new MixEquals(this, rest) - case x: SequencePattern => new MixSequence(this, rest, x) - case AnyUnapply(false) => new MixUnapply(this, rest) - case _ => - isPatternSwitch(scrut, ps) match { - case Some(x) => new MixLiteralInts(x, rest) - case _ => new MixTypes(this, rest) - } - }) - } - override def toString() = "%s match {%s}".format(scrut, indentAll(ps)) - } // PatternMatch - - /***** Rule Applications *****/ - - sealed abstract class RuleApplication { - def pmatch: PatternMatch - def rest: Rep - def cond: Tree - def success: Tree - def failure: Tree - - lazy val PatternMatch(scrut, patterns) = pmatch - lazy val head = pmatch.head - lazy val codegen: Tree = IF (cond) THEN (success) ELSE (failure) - - def mkFail(xs: List[Row]): Tree = - if (xs.isEmpty) failTree - else remake(xs).toTree - - def remake( - rows: List[Row], - pvgroup: PatternVarGroup = emptyPatternVarGroup, - includeScrut: Boolean = true): Rep = - { - val scrutpvs = if (includeScrut) List(scrut.pv) else Nil - make(pvgroup.pvs ::: scrutpvs ::: rest.tvars, rows) - } - - /** translate outcome of the rule application into code (possible involving recursive application of rewriting) */ - def tree(): Tree - - override def toString = - "Rule/%s (%s =^= %s)".format(getClass.getSimpleName, scrut, head) - } - - /** {case ... if guard => bx} else {guardedRest} */ - /** VariableRule: The top-most rows has only variable (non-constructor) patterns. */ - case class VariableRule(subst: Bindings, guard: Tree, guardedRest: Rep, bx: Int) extends RuleApplication { - def pmatch: PatternMatch = impossible - def rest: Rep = guardedRest - - private lazy val (valDefs, successTree) = targets(bx) applyBindings subst.toMap - lazy val cond = guard - lazy val success = successTree - lazy val failure = guardedRest.toTree - - final def tree(): Tree = - if (bx < 0) REF(shortCuts(-bx)) - else squeezedBlock( - valDefs, - if (cond.isEmpty) success else codegen - ) - - override def toString = "(case %d) {\n Bindings: %s\n\n if (%s) { %s }\n else { %s }\n}".format( - bx, subst, guard, success, guardedRest - ) - } - - class MixLiteralInts(val pmatch: PatternSwitch, val rest: Rep) extends RuleApplication { - val literals = pmatch.ps - val defaultPattern = pmatch.defaultPattern - - private lazy val casted: Tree = - if (!scrut.tpe.isInt) scrut.id DOT nme.toInt else scrut.id - - // creates a row transformer for injecting the default case bindings at a given index - private def addDefaultVars(index: Int): Row => Row = - if (defaultVars.isEmpty) identity - else rebindAll(_, pmatch(index).boundVariables, scrut.sym) - - // add bindings for all the given vs to the given tvar - private def rebindAll(r: Row, vs: Iterable[Symbol], tvar: Symbol) = - r rebind r.subst.add(vs, tvar) - - private def bindVars(Tag: Int, orig: Bindings): Bindings = { - def myBindVars(rest: List[(Int, List[Symbol])], bnd: Bindings): Bindings = rest match { - case Nil => bnd - case (Tag,vs)::xs => myBindVars(xs, bnd.add(vs, scrut.sym)) - case (_, vs)::xs => myBindVars(xs, bnd) - } - myBindVars(varMap, orig) - } - - // bound vars and rows for default pattern (only one row, but a list is easier to use later) - lazy val (defaultVars, defaultRows) = defaultPattern match { - case None => (Nil, Nil) - case Some(p) => (p.boundVariables, List(rebindAll(rest rows literals.size, p.boundVariables, scrut.sym))) - } - - // literalMap is a map from each literal to a list of row indices. - // varMap is a list from each literal to a list of the defined vars. - lazy val (litPairs, varMap) = ( - literals.zipWithIndex map { - case (lit, index) => - val tag = lit.intValue - (tag -> index, tag -> lit.boundVariables) - } unzip - ) - def literalMap = litPairs groupBy (_._1) map { - case (k, vs) => (k, vs map (_._2)) - } - - lazy val cases = - for ((tag, indices) <- literalMap.toList.sortBy(_._1)) yield { - val newRows = indices map (i => addDefaultVars(i)(rest rows i)) - val r = remake(newRows ++ defaultRows, includeScrut = false) - val r2 = make(r.tvars, r.rows map (x => x rebind bindVars(tag, x.subst))) - - CASE(Literal(Constant(tag))) ==> r2.toTree - } - - lazy val defaultTree = remake(defaultRows, includeScrut = false).toTree - def defaultCase = CASE(WILD(IntClass.tpe)) ==> defaultTree - - // cond/success/failure only used if there is exactly one case. - lazy val cond = scrut.id MEMBER_== cases.head.pat - lazy val success = cases.head.body - lazy val failure = defaultTree - - // only one case becomes if/else, otherwise match - def tree() = - if (cases.size == 1) codegen - else casted MATCH (cases :+ defaultCase: _*) - } - - /** mixture rule for unapply pattern - */ - class MixUnapply(val pmatch: PatternMatch, val rest: Rep) extends RuleApplication { - val Pattern(UnApply(unMethod, unArgs)) = head - val Apply(unTarget, _ :: trailing) = unMethod - - object SameUnapplyCall { - def isSame(t: Tree) = isEquivalentTree(unTarget, t) - def unapply(x: Pattern) = /*tracing("SameUnapplyCall (%s vs. %s)".format(unTarget, x))*/(x match { - case Pattern(UnApply(Apply(fn, _), args)) if isSame(fn) => Some(args) - case _ => None - }) - } - object SameUnapplyPattern { - def isSame(t: Tree) = isEquivalentTree(unMethod, t) - def apply(x: Pattern) = unapply(x).isDefined - def unapply(x: Pattern) = /*tracing("SameUnapplyPattern (%s vs. %s)".format(unMethod, x))*/(x match { - case Pattern(UnApply(t, _)) if isSame(t) => Some(unArgs) - case _ => None - }) - } - - private lazy val zipped = pmatch pzip rest.rows - - lazy val unapplyResult: PatternVar = - scrut.createVar(unMethod.tpe, Apply(unTarget, scrut.id :: trailing) setType _.tpe) - - lazy val cond: Tree = unapplyResult.tpe.normalize match { - case TypeRef(_, BooleanClass, _) => unapplyResult.ident - case TypeRef(_, SomeClass, _) => TRUE - case _ => NOT(unapplyResult.ident DOT nme.isEmpty) - } - - lazy val failure = - mkFail(zipped.tail filterNot (x => SameUnapplyPattern(x._1)) map { case (pat, r) => r insert pat }) - - private def doSuccess: (List[PatternVar], List[PatternVar], List[Row]) = { - // pattern variable for the unapply result of Some(x).get - def unMethodTypeArg = unMethod.tpe.baseType(OptionClass).typeArgs match { - case Nil => log("No type argument for unapply result! " + unMethod.tpe) ; NoType - case arg :: _ => arg - } - lazy val pv = scrut.createVar(unMethodTypeArg, _ => fn(ID(unapplyResult.lhs), nme.get)) - def tuple = pv.lhs - - // at this point it's Some[T1,T2...] - lazy val tpes = getProductArgs(tuple.tpe) - - // one pattern variable per tuple element - lazy val tuplePVs = - for ((tpe, i) <- tpes.zipWithIndex) yield - scrut.createVar(tpe, _ => fn(ID(tuple), productProj(tuple, i + 1))) - - // the filter prevents infinite unapply recursion - def mkNewRows(sameFilter: (List[Tree]) => List[Tree]) = { - val dum = if (unArgs.length <= 1) unArgs.length else tpes.size - for ((pat, r) <- zipped) yield pat match { - case SameUnapplyCall(xs) => r.insert2(toPats(sameFilter(xs)) :+ NoPattern, pat.boundVariables, scrut.sym) - case _ => r insert (emptyPatterns(dum) :+ pat) - } - } - - // 0 is Boolean, 1 is Option[T], 2+ is Option[(T1,T2,...)] - unArgs.length match { - case 0 => (Nil, Nil, mkNewRows((xs) => Nil)) - case 1 => (List(pv), List(pv), mkNewRows(xs => List(xs.head))) - case _ => (pv :: tuplePVs, tuplePVs, mkNewRows(identity)) - } - } - - lazy val success = { - val (squeezePVs, pvs, rows) = doSuccess - val srep = remake(rows, pvs).toTree - - squeezedBlock(squeezePVs map (_.valDef), srep) - } - - final def tree() = - squeezedBlock(List(handleOuter(unapplyResult.valDef)), codegen) - } - - /** Handle Sequence patterns (including Star patterns.) - * Note: pivot == head, just better typed. - */ - sealed class MixSequence(val pmatch: PatternMatch, val rest: Rep, pivot: SequencePattern) extends RuleApplication { - require(scrut.tpe <:< head.tpe) - - def hasStar = pivot.hasStar - private def pivotLen = pivot.nonStarLength - private def seqDummies = emptyPatterns(pivot.elems.length + 1) - - // Should the given pattern join the expanded pivot in the success matrix? If so, - // this partial function will be defined for the pattern, and the result of the apply - // is the expanded sequence of new patterns. - lazy val successMatrixFn = new PartialFunction[Pattern, List[Pattern]] { - private def seqIsDefinedAt(x: SequenceLikePattern) = (hasStar, x.hasStar) match { - case (true, true) => true - case (true, false) => pivotLen <= x.nonStarLength - case (false, true) => pivotLen >= x.nonStarLength - case (false, false) => pivotLen == x.nonStarLength - } - - def isDefinedAt(pat: Pattern) = pat match { - case x: SequenceLikePattern => seqIsDefinedAt(x) - case WildcardPattern() => true - case _ => false - } - - def apply(pat: Pattern): List[Pattern] = pat match { - case x: SequenceLikePattern => - def isSameLength = pivotLen == x.nonStarLength - def rebound = x.nonStarPatterns :+ (x.elemPatterns.last rebindTo WILD(scrut.seqType)) - - (pivot.hasStar, x.hasStar, isSameLength) match { - case (true, true, true) => rebound :+ NoPattern - case (true, true, false) => (seqDummies drop 1) :+ x - case (true, false, true) => x.elemPatterns ++ List(NilPattern, NoPattern) - case (false, true, true) => rebound - case (false, false, true) => x.elemPatterns :+ NoPattern - case _ => seqDummies - } - - case _ => seqDummies - } - } - - // Should the given pattern be in the fail matrix? This is true of any sequences - // as long as the result of the length test on the pivot doesn't make it impossible: - // for instance if neither sequence is right ignoring and they are of different - // lengths, the later one cannot match since its length must be wrong. - def failureMatrixFn(c: Pattern) = (pivot ne c) && (c match { - case x: SequenceLikePattern => - (hasStar, x.hasStar) match { - case (_, true) => true - case (true, false) => pivotLen > x.nonStarLength - case (false, false) => pivotLen != x.nonStarLength - } - case WildcardPattern() => true - case _ => false - }) - - // divide the remaining rows into success/failure branches, expanding subsequences of patterns - val successRows = pmatch pzip rest.rows collect { - case (c, row) if successMatrixFn isDefinedAt c => row insert successMatrixFn(c) - } - val failRows = pmatch pzip rest.rows collect { - case (c, row) if failureMatrixFn(c) => row insert c - } - - // the discrimination test for sequences is a call to lengthCompare. Note that - // this logic must be fully consistent wiith successMatrixFn and failureMatrixFn above: - // any inconsistency will (and frequently has) manifested as pattern matcher crashes. - lazy val cond = { - // the method call symbol - val methodOp: Symbol = head.tpe member nme.lengthCompare - - // the comparison to perform. If the pivot is right ignoring, then a scrutinee sequence - // of >= pivot length could match it; otherwise it must be exactly equal. - val compareOp: (Tree, Tree) => Tree = if (hasStar) _ INT_>= _ else _ INT_== _ - - // scrutinee.lengthCompare(pivotLength) [== | >=] 0 - val compareFn: Tree => Tree = (t: Tree) => compareOp((t DOT methodOp)(LIT(pivotLen)), ZERO) - - // wrapping in a null check on the scrutinee - // XXX this needs to use the logic in "def condition" - nullSafe(compareFn, FALSE)(scrut.id) - // condition(head.tpe, scrut.id, head.boundVariables.nonEmpty) - } - lazy val success = { - // one pattern var per sequence element up to elemCount, and one more for the rest of the sequence - lazy val pvs = scrut createSequenceVars pivotLen - - squeezedBlock(pvs map (_.valDef), remake(successRows, pvs, hasStar).toTree) - } - lazy val failure = remake(failRows).toTree - - final def tree(): Tree = codegen - } - - class MixEquals(val pmatch: PatternMatch, val rest: Rep) extends RuleApplication { - private lazy val rhs = - decodedEqualsType(head.tpe) match { - case SingleType(pre, sym) => REF(pre, sym) - case PseudoType(o) => o - } - private lazy val labelDef = - createLabelDef("fail%", remake((rest.rows.tail, pmatch.tail).zipped map (_ insert _)).toTree) - - lazy val cond = handleOuter(rhs MEMBER_== scrut.id) - lazy val successOne = rest.rows.head.insert2(List(NoPattern), head.boundVariables, scrut.sym) - lazy val successTwo = Row(emptyPatterns(1 + rest.tvars.size), NoBinding, EmptyTree, createShortCut(labelDef.symbol)) - lazy val success = remake(List(successOne, successTwo)).toTree - lazy val failure = labelDef - - final def tree() = codegen - override def toString() = "MixEquals(%s == %s)".format(scrut, head) - } - - /** Mixture rule for type tests. - * moreSpecific: more specific patterns - * subsumed: more general patterns (subsuming current), rows index and subpatterns - * remaining: remaining, rows index and pattern - */ - class MixTypes(val pmatch: PatternMatch, val rest: Rep) extends RuleApplication { - case class Yes(bx: Int, moreSpecific: Pattern, subsumed: List[Pattern]) - case class No(bx: Int, remaining: Pattern) - - val (yeses, noes) = { - val _ys = new ListBuffer[Yes] - val _ns = new ListBuffer[No] - - for ((pattern, j) <- pmatch.pzip()) { - // scrutinee, head of pattern group - val (s, p) = (pattern.tpe, head.necessaryType) - - def isEquivalent = head.necessaryType =:= pattern.tpe - def isObjectTest = pattern.isObject && (p =:= pattern.necessaryType) - - def sMatchesP = matches(s, p) - def pMatchesS = matches(p, s) - - def ifEquiv(yes: Pattern): Pattern = if (isEquivalent) yes else pattern - - def passl(p: Pattern = NoPattern, ps: List[Pattern] = pmatch.dummies) = Some(Yes(j, p, ps)) - def passr() = Some( No(j, pattern)) - - def typed(pp: Tree) = passl(ifEquiv(Pattern(pp))) - def subs() = passl(ifEquiv(NoPattern), pattern subpatterns pmatch) - - val (oneY, oneN) = pattern match { - case Pattern(LIT(null)) if !(p =:= s) => (None, passr) // (1) - case x if isObjectTest => (passl(), None) // (2) - case Pattern(Typed(pp, _)) if sMatchesP => (typed(pp), None) // (4) - // The next line used to be this which "fixed" 1697 but introduced - // numerous regressions including #3136. - // case Pattern(_: UnApply, _) => (passl(), passr) - case Pattern(_: UnApply) => (None, passr) - case x if !x.isDefault && sMatchesP => (subs(), None) - case x if x.isDefault || pMatchesS => (passl(), passr) - case _ => (None, passr) - } - oneY map (_ys +=) - oneN map (_ns +=) - } - (_ys.toList, _ns.toList) - } - - // val moreSpecific = yeses map (_.moreSpecific) - val subsumed = yeses map (x => (x.bx, x.subsumed)) - val remaining = noes map (x => (x.bx, x.remaining)) - - private def mkZipped = - for (Yes(j, moreSpecific, subsumed) <- yeses) yield - j -> (moreSpecific :: subsumed) - - lazy val casted = scrut castedTo pmatch.headType - lazy val cond = condition(casted.tpe, scrut, head.boundVariables.nonEmpty) - - private def isAnyMoreSpecific = yeses exists (x => !x.moreSpecific.isEmpty) - lazy val (subtests, subtestVars) = - if (isAnyMoreSpecific) (mkZipped, List(casted.pv)) - else (subsumed, Nil) - - lazy val newRows = - for ((j, ps) <- subtests) yield - (rest rows j).insert2(ps, pmatch(j).boundVariables, casted.sym) - - lazy val success = { - val srep = remake(newRows, subtestVars ::: casted.accessorPatternVars, includeScrut = false) - squeezedBlock(casted.allValDefs, srep.toTree) - } - - lazy val failure = - mkFail(remaining map { case (p1, p2) => rest rows p1 insert p2 }) - - final def tree(): Tree = codegen - } - - /*** States, Rows, Etc. ***/ - - case class Row(pats: List[Pattern], subst: Bindings, guard: Tree, bx: Int) { - private def nobindings = subst.get().isEmpty - private def bindstr = if (nobindings) "" else pp(subst) - - /** Extracts the 'i'th pattern. */ - def extractColumn(i: Int) = { - val (x, xs) = extractIndex(pats, i) - (x, copy(pats = xs)) - } - - /** Replaces the 'i'th pattern with the argument. */ - def replaceAt(i: Int, p: Pattern) = { - val newps = (pats take i) ::: p :: (pats drop (i + 1)) - copy(pats = newps) - } - - def insert(h: Pattern) = copy(pats = h :: pats) - def insert(hs: List[Pattern]) = copy(pats = hs ::: pats) // prepends supplied pattern - def rebind(b: Bindings) = copy(subst = b) // substitutes for bindings - - def insert2(hs: List[Pattern], vs: Iterable[Symbol], tvar: Symbol) = - tracing("insert2")(copy(pats = hs ::: pats, subst = subst.add(vs, tvar))) - - // returns this rows with alternatives expanded - def expandAlternatives(classifyPat: (Pattern, Int) => Pattern): List[Row] = { - def isNotAlternative(p: Pattern) = !cond(p.tree) { case _: Alternative => true } - - // classify all the top level patterns - alternatives come back unaltered - val newPats: List[Pattern] = pats.zipWithIndex map classifyPat.tupled - // see if any alternatives were in there - val (ps, others) = newPats span isNotAlternative - // make a new row for each alternative, with it spliced into the original position - if (others.isEmpty) List(copy(pats = ps)) - else extractBindings(others.head) map (x => replaceAt(ps.size, x)) - } - override def toString() = { - val bs = if (nobindings) "" else "\n" + bindstr - "Row(%d)(%s%s)".format(bx, pp(pats), bs) - } - } - abstract class State { - def bx: Int // index into the list of rows - def params: List[Symbol] // bound names to be supplied as arguments to labeldef - def body: Tree // body to execute upon match - def label: Option[LabelDef] // label definition for this state - - // Called with a bindings map when a match is achieved. - // Returns a list of variable declarations based on the labeldef parameters - // and the given substitution, and the body to execute. - protected def applyBindingsImpl(subst: Map[Symbol, Symbol]): (List[ValDef], Tree) - - final def applyBindings(subst: Map[Symbol, Symbol]): (List[ValDef], Tree) = { - _referenceCount += 1 - applyBindingsImpl(subst) - } - - private var _referenceCount = 0 - def referenceCount = _referenceCount - def unreached = referenceCount == 0 - def shouldInline(sym: Symbol) = referenceCount == 1 && label.exists(_.symbol == sym) - - // Creates a simple Ident if the symbol's type conforms to - // the val definition's type, or a casted Ident if not. - private def newValIdent(lhs: Symbol, rhs: Symbol) = - if (rhs.tpe <:< lhs.tpe) Ident(rhs) - else gen.mkTypeApply(Ident(rhs), Any_asInstanceOf, List(lhs.tpe)) - - protected def newValDefinition(lhs: Symbol, rhs: Symbol) = - typer typedValDef ValDef(lhs, newValIdent(lhs, rhs)) - - protected def newValReference(lhs: Symbol, rhs: Symbol) = - typer typed newValIdent(lhs, rhs) - - protected def valDefsFor(subst: Map[Symbol, Symbol]) = mapSubst(subst)(newValDefinition) - protected def identsFor(subst: Map[Symbol, Symbol]) = mapSubst(subst)(newValReference) - - protected def mapSubst[T](subst: Map[Symbol, Symbol])(f: (Symbol, Symbol) => T): List[T] = - params flatMap { lhs => - subst get lhs map (rhs => f(lhs, rhs)) orElse { - // This should not happen; the code should be structured so it is - // impossible, but that still lies ahead. - cunit.warning(lhs.pos, "No binding") - None - } - } - - // typer is not able to digest a body of type Nothing being assigned result type Unit - protected def caseResultType = - if (body.tpe.isNothing) body.tpe else matchResultType - } - - case class LiteralState(bx: Int, params: List[Symbol], body: Tree) extends State { - def label = None - - protected def applyBindingsImpl(subst: Map[Symbol, Symbol]) = - (valDefsFor(subst), body.duplicate setType caseResultType) - } - - case class FinalState(bx: Int, params: List[Symbol], body: Tree) extends State { - traceCategory("Final State", "(%s) => %s", paramsString, body) - def label = Some(labelDef) - - private lazy val labelDef = createLabelDef("body%" + bx, body, params, caseResultType) - - protected def applyBindingsImpl(subst: Map[Symbol, Symbol]) = { - val tree = - if (referenceCount > 1) ID(labelDef.symbol) APPLY identsFor(subst) - else labelDef - - (valDefsFor(subst), tree) - } - - private def paramsString = params map (s => s.name + ": " + s.tpe) mkString ", " - override def toString() = pp("(%s) => %s".format(pp(params), body)) - } - - case class Rep(val tvars: PatternVarGroup, val rows: List[Row]) { - lazy val Row(pats, subst, guard, index) = rows.head - lazy val guardedRest = if (guard.isEmpty) Rep(Nil, Nil) else make(tvars, rows.tail) - lazy val (defaults, others) = pats span (_.isDefault) - - /** Cut out the column containing the non-default pattern. */ - class Cut(index: Int) { - /** The first two separate out the 'i'th pattern in each row from the remainder. */ - private val (_column, _rows) = rows map (_ extractColumn index) unzip - - /** Now the 'i'th tvar is separated out and used as a new Scrutinee. */ - private val (_pv, _tvars) = tvars extractIndex index - - /** The non-default pattern (others.head) replaces the column head. */ - private val (_ncol, _nrep) = - (others.head :: _column.tail, make(_tvars, _rows)) - - def mix() = { - val newScrut = new Scrutinee(new PatternVar(_pv.sym, EmptyTree, _pv.checked)) - PatternMatch(newScrut, _ncol) mkRule _nrep - } - } - - /** Converts this to a tree - recursively acquires subreps. */ - final def toTree(): Tree = tracing("toTree")(typer typed applyRule()) - - /** The VariableRule. */ - private def variable() = { - val binding = (defaults map (_.boundVariables) zip tvars.pvs) . - foldLeft(subst)((b, pair) => b.add(pair._1, pair._2.lhs)) - - VariableRule(binding, guard, guardedRest, index) - } - /** The MixtureRule: picks a rewrite rule to apply. */ - private def mixture() = new Cut(defaults.size) mix() - - /** Applying the rule will result in one of: - * - * VariableRule - if all patterns are default patterns - * MixtureRule - if one or more patterns are not default patterns - * Error - no rows remaining - */ - final def applyRule(): Tree = - if (rows.isEmpty) failTree - else if (others.isEmpty) variable.tree() - else mixture.tree() - - def ppn(x: Any) = pp(x, newlines = true) - override def toString() = - if (tvars.isEmpty) "Rep(%d) = %s".format(rows.size, ppn(rows)) - else "Rep(%dx%d)%s%s".format(tvars.size, rows.size, ppn(tvars), ppn(rows)) - } - - /** Expands the patterns recursively. */ - final def expand(roots: List[PatternVar], cases: List[CaseDef]) = tracing("expand") { - for ((CaseDef(pat, guard, body), bx) <- cases.zipWithIndex) yield { - val subtrees = pat match { - case x if roots.length <= 1 => List(x) - case Apply(_, args) => args - case WILD() => emptyTrees(roots.length) - } - val params = pat filter (_.isInstanceOf[Bind]) map (_.symbol) distinct - val row = Row(toPats(subtrees), NoBinding, guard, bx) - val state = body match { - case x: Literal => LiteralState(bx, params, body) - case _ => FinalState(bx, params, body) - } - - row -> state - } - } - - /** returns the condition in "if (cond) k1 else k2" - */ - final def condition(tpe: Type, scrut: Scrutinee, isBound: Boolean): Tree = { - assert(scrut.isDefined) - val cond = handleOuter(condition(tpe, scrut.id, isBound)) - - if (!needsOuterTest(tpe, scrut.tpe, owner)) cond - else addOuterCondition(cond, tpe, scrut.id) - } - - final def condition(tpe: Type, scrutTree: Tree, isBound: Boolean): Tree = { - assert((tpe ne NoType) && (scrutTree.tpe ne NoType)) - def isMatchUnlessNull = scrutTree.tpe <:< tpe && tpe.isAnyRef - def isRef = scrutTree.tpe.isAnyRef - - // See ticket #1503 for the motivation behind checking for a binding. - // The upshot is that it is unsound to assume equality means the right - // type, but if the value doesn't appear on the right hand side of the - // match that's unimportant; so we add an instance check only if there - // is a binding. - def bindingWarning() = { - if (isBound && settings.Xmigration28.value) { - cunit.warning(scrutTree.pos, - "A bound pattern such as 'x @ Pattern' now matches fewer cases than the same pattern with no binding.") - } - } - - def genEquals(sym: Symbol): Tree = { - val t1: Tree = REF(sym) MEMBER_== scrutTree - - if (isBound) { - bindingWarning() - t1 AND (scrutTree IS tpe.widen) - } - else t1 - } - - typer typed { - tpe match { - case ConstantType(Constant(null)) if isRef => scrutTree OBJ_EQ NULL - case ConstantType(const) => scrutTree MEMBER_== Literal(const) - case SingleType(NoPrefix, sym) => genEquals(sym) - case SingleType(pre, sym) if sym.isStable => genEquals(sym) - case ThisType(sym) if sym.isModule => genEquals(sym) - case _ if isMatchUnlessNull => scrutTree OBJ_NE NULL - case _ => scrutTree IS tpe - } - } - } - - /** adds a test comparing the dynamic outer to the static outer */ - final def addOuterCondition(cond: Tree, tpe2test: Type, scrut: Tree) = { - val TypeRef(prefix, _, _) = tpe2test - val theRef = handleOuter(prefix match { - case NoPrefix => abort("assertion failed: NoPrefix") - case ThisType(clazz) => THIS(clazz) - case pre => REF(pre.prefix, pre.termSymbol) - }) - outerAccessor(tpe2test.typeSymbol) match { - case NoSymbol => ifDebug(cunit.warning(scrut.pos, "no outer acc for " + tpe2test.typeSymbol)) ; cond - case outerAcc => - val casted = gen.mkAsInstanceOf(scrut, tpe2test, any = true, wrapInApply = true) - cond AND ((casted DOT outerAcc)() OBJ_EQ theRef) - } - } - } -} diff --git a/src/compiler/scala/tools/nsc/matching/PatternBindings.scala b/src/compiler/scala/tools/nsc/matching/PatternBindings.scala deleted file mode 100644 index c6fa6f6ba0..0000000000 --- a/src/compiler/scala/tools/nsc/matching/PatternBindings.scala +++ /dev/null @@ -1,126 +0,0 @@ -/* NSC -- new Scala compiler - * Copyright 2005-2013 LAMP/EPFL - * Author: Paul Phillips - */ - -package scala.tools.nsc -package matching - -import transform.ExplicitOuter -import scala.language.postfixOps - -trait PatternBindings extends ast.TreeDSL -{ - self: ExplicitOuter with ParallelMatching => - - import global.{ typer => _, _ } - import definitions.{ EqualsPatternClass } - import CODE._ - - /** EqualsPattern **/ - def isEquals(tpe: Type) = tpe.typeSymbol == EqualsPatternClass - def mkEqualsRef(tpe: Type) = typeRef(NoPrefix, EqualsPatternClass, List(tpe)) - def decodedEqualsType(tpe: Type) = - if (tpe.typeSymbol == EqualsPatternClass) tpe.typeArgs.head else tpe - - // A subtype test which creates fresh existentials for type - // parameters on the right hand side. - def matches(arg1: Type, arg2: Type) = decodedEqualsType(arg1) matchesPattern decodedEqualsType(arg2) - - // For spotting duplicate unapplies - def isEquivalentTree(t1: Tree, t2: Tree) = (t1.symbol == t2.symbol) && (t1 equalsStructure t2) - - // Reproduce the Bind trees wrapping oldTree around newTree - def moveBindings(oldTree: Tree, newTree: Tree): Tree = oldTree match { - case b @ Bind(x, body) => Bind(b.symbol, moveBindings(body, newTree)) - case _ => newTree - } - - // used as argument to `EqualsPatternClass` - case class PseudoType(o: Tree) extends SimpleTypeProxy { - override def underlying: Type = o.tpe - override def safeToString: String = "PseudoType("+o+")" - } - - // If the given pattern contains alternatives, return it as a list of patterns. - // Makes typed copies of any bindings found so all alternatives point to final state. - def extractBindings(p: Pattern): List[Pattern] = - toPats(_extractBindings(p.boundTree, identity)) - - private def _extractBindings(p: Tree, prevBindings: Tree => Tree): List[Tree] = { - def newPrev(b: Bind) = (x: Tree) => treeCopy.Bind(b, b.name, x) setType x.tpe - - p match { - case b @ Bind(_, body) => _extractBindings(body, newPrev(b)) - case Alternative(ps) => ps map prevBindings - } - } - - trait PatternBindingLogic { - self: Pattern => - - // The outermost Bind(x1, Bind(x2, ...)) surrounding the tree. - private var _boundTree: Tree = tree - def boundTree = _boundTree - def setBound(x: Bind): Pattern = { - _boundTree = x - this - } - def boundVariables = strip(boundTree) - - // If a tree has bindings, boundTree looks something like - // Bind(v3, Bind(v2, Bind(v1, tree))) - // This takes the given tree and creates a new pattern - // using the same bindings. - def rebindTo(t: Tree): Pattern = Pattern(moveBindings(boundTree, t)) - - // Wrap this pattern's bindings around (_: Type) - def rebindToType(tpe: Type, ascription: Type = null): Pattern = { - val aType = if (ascription == null) tpe else ascription - rebindTo(Typed(WILD(tpe), TypeTree(aType)) setType tpe) - } - - // Wrap them around _ - def rebindToEmpty(tpe: Type): Pattern = - rebindTo(Typed(EmptyTree, TypeTree(tpe)) setType tpe) - - // Wrap them around a singleton type for an EqualsPattern check. - def rebindToEqualsCheck(): Pattern = - rebindToType(equalsCheck) - - // Like rebindToEqualsCheck, but subtly different. Not trying to be - // mysterious -- I haven't sorted it all out yet. - def rebindToObjectCheck(): Pattern = - rebindToType(mkEqualsRef(sufficientType), sufficientType) - - /** Helpers **/ - private def wrapBindings(vs: List[Symbol], pat: Tree): Tree = vs match { - case Nil => pat - case x :: xs => Bind(x, wrapBindings(xs, pat)) setType pat.tpe - } - private def strip(t: Tree): List[Symbol] = t match { - case b @ Bind(_, pat) => b.symbol :: strip(pat) - case _ => Nil - } - } - - case class Binding(pvar: Symbol, tvar: Symbol) { - override def toString() = pvar.name + " -> " + tvar.name - } - - class Bindings(private val vlist: List[Binding]) { - def get() = vlist - def toMap = vlist map (x => (x.pvar, x.tvar)) toMap - - def add(vs: Iterable[Symbol], tvar: Symbol): Bindings = { - val newBindings = vs.toList map (v => Binding(v, tvar)) - new Bindings(newBindings ++ vlist) - } - - override def toString() = - if (vlist.isEmpty) "" - else vlist.mkString(", ") - } - - val NoBinding: Bindings = new Bindings(Nil) -} diff --git a/src/compiler/scala/tools/nsc/matching/Patterns.scala b/src/compiler/scala/tools/nsc/matching/Patterns.scala deleted file mode 100644 index df536da108..0000000000 --- a/src/compiler/scala/tools/nsc/matching/Patterns.scala +++ /dev/null @@ -1,457 +0,0 @@ -/* NSC -- new Scala compiler - * Copyright 2005-2013 LAMP/EPFL - * Author: Paul Phillips - */ - -package scala.tools.nsc -package matching - -import PartialFunction._ - -/** Patterns are wrappers for Trees with enhanced semantics. - * - * @author Paul Phillips - */ - -trait Patterns extends ast.TreeDSL { - self: transform.ExplicitOuter => - - import global.{ typer => _, _ } - import definitions._ - import CODE._ - import Debug._ - import treeInfo.{ unbind, isStar, isVarPattern } - - type PatternMatch = MatchMatrix#PatternMatch - private type PatternVar = MatrixContext#PatternVar - - // Fresh patterns - def emptyPatterns(i: Int): List[Pattern] = List.fill(i)(NoPattern) - def emptyTrees(i: Int): List[Tree] = List.fill(i)(EmptyTree) - - // An empty pattern - def NoPattern = WildcardPattern() - - // The Nil pattern - def NilPattern = Pattern(gen.mkNil) - - // 8.1.1 - case class VariablePattern(tree: Ident) extends NamePattern { - lazy val Ident(name) = tree - require(isVarPattern(tree) && name != nme.WILDCARD) - override def covers(sym: Symbol) = true - override def description = "%s".format(name) - } - - // 8.1.1 (b) - case class WildcardPattern() extends Pattern { - def tree = EmptyTree - override def covers(sym: Symbol) = true - override def isDefault = true - override def description = "_" - } - - // 8.1.2 - case class TypedPattern(tree: Typed) extends Pattern { - lazy val Typed(expr, tpt) = tree - - override def covers(sym: Symbol) = newMatchesPattern(sym, tpt.tpe) - override def sufficientType = tpt.tpe - override def simplify(pv: PatternVar) = Pattern(expr) match { - case ExtractorPattern(ua) if pv.sym.tpe <:< tpt.tpe => this rebindTo expr - case _ => this - } - override def description = "%s: %s".format(Pattern(expr), tpt) - } - - // 8.1.3 - case class LiteralPattern(tree: Literal) extends Pattern { - lazy val Literal(const @ Constant(value)) = tree - - def isSwitchable = cond(const.tag) { case ByteTag | ShortTag | IntTag | CharTag => true } - def intValue = const.intValue - override def description = { - val s = if (value == null) "null" else value.toString - "Lit(%s)".format(s) - } - } - - // 8.1.4 (a) - case class ApplyIdentPattern(tree: Apply) extends ApplyPattern with NamePattern { - // XXX - see bug 3411 for code which violates this assumption - // require (!isVarPattern(fn) && args.isEmpty) - lazy val ident @ Ident(name) = fn - - override def sufficientType = Pattern(ident).equalsCheck - override def simplify(pv: PatternVar) = this.rebindToObjectCheck() - override def description = "Id(%s)".format(name) - } - // 8.1.4 (b) - case class ApplySelectPattern(tree: Apply) extends ApplyPattern with SelectPattern { - require (args.isEmpty) - lazy val Apply(select: Select, _) = tree - - override lazy val sufficientType = qualifier.tpe match { - case t: ThisType => singleType(t, sym) // this.X - case _ => - qualifier match { - case _: Apply => PseudoType(tree) - case _ => singleType(Pattern(qualifier).necessaryType, sym) - } - } - - override def covers(sym: Symbol) = newMatchesPattern(sym, sufficientType) - override def simplify(pv: PatternVar) = this.rebindToObjectCheck() - override def description = backticked match { - case Some(s) => "this." + s - case _ => "Sel(%s.%s)".format(Pattern(qualifier), name) - } - - } - // 8.1.4 (c) - case class StableIdPattern(tree: Select) extends SelectPattern { - def select = tree - override def description = "St(%s)".format(printableSegments.mkString(" . ")) - private def printableSegments = - pathSegments filter (x => !x.isEmpty && (x.toString != "$iw")) - } - // 8.1.4 (d) - case class ObjectPattern(tree: Apply) extends ApplyPattern { // NamePattern? - require(!fn.isType && isModule) - - override def covers(sym: Symbol) = newMatchesPattern(sym, sufficientType) - override def sufficientType = tpe.narrow - override def simplify(pv: PatternVar) = this.rebindToObjectCheck() - override def description = "Obj(%s)".format(fn) - } - // 8.1.4 (e) - case class SimpleIdPattern(tree: Ident) extends NamePattern { - val Ident(name) = tree - override def covers(sym: Symbol) = newMatchesPattern(sym, tpe.narrow) - override def description = "Id(%s)".format(name) - } - - // 8.1.5 - case class ConstructorPattern(tree: Apply) extends ApplyPattern with NamePattern { - require(fn.isType && this.isCaseClass, "tree: " + tree + " fn: " + fn) - def name = tpe.typeSymbol.name - def cleanName = tpe.typeSymbol.decodedName - - private def isColonColon = cleanName == "::" - - override def subpatterns(pm: MatchMatrix#PatternMatch) = - if (pm.head.isCaseClass) toPats(args) - else super.subpatterns(pm) - - override def simplify(pv: PatternVar) = - if (args.isEmpty) this rebindToEmpty tree.tpe - else this - - override def covers(sym: Symbol) = { - debugging("[constructor] Does " + this + " cover " + sym + " ? ") { - sym.tpe.typeSymbol == this.tpe.typeSymbol - } - } - override def description = { - if (isColonColon) "%s :: %s".format(Pattern(args(0)), Pattern(args(1))) - else "%s(%s)".format(name, toPats(args).mkString(", ")) - } - } - // 8.1.6 - case class TuplePattern(tree: Apply) extends ApplyPattern { - override def description = "((%s))".format(args.size, toPats(args).mkString(", ")) - } - - // 8.1.7 / 8.1.8 (unapply and unapplySeq calls) - case class ExtractorPattern(tree: UnApply) extends UnapplyPattern { - private def uaTyped = Typed(tree, TypeTree(arg.tpe)) setType arg.tpe - - override def simplify(pv: PatternVar) = { - if (pv.tpe <:< arg.tpe) this - else this rebindTo uaTyped - } - override def description = "Unapply(%s => %s)".format(necessaryType, resTypesString) - } - - // Special List handling. It was like that when I got here. - case class ListExtractorPattern(tree: UnApply, tpt: Tree, elems: List[Tree]) extends UnapplyPattern with SequenceLikePattern { - // As yet I can't testify this is doing any good relative to using - // tpt.tpe, but it doesn't seem to hurt either. - private lazy val packedType = global.typer.computeType(tpt, tpt.tpe) - private lazy val consRef = appliedType(ConsClass, packedType) - private lazy val listRef = appliedType(ListClass, packedType) - - // Fold a list into a well-typed x :: y :: etc :: tree. - private def listFolder(hd: Tree, tl: Tree): Tree = unbind(hd) match { - case t @ Star(_) => moveBindings(hd, WILD(t.tpe)) - case _ => - val dummyMethod = NoSymbol.newTermSymbol(newTermName("matching$dummy")) - val consType = MethodType(dummyMethod newSyntheticValueParams List(packedType, listRef), consRef) - - Apply(TypeTree(consType), List(hd, tl)) setType consRef - } - private def foldedPatterns = elems.foldRight(gen.mkNil)((x, y) => listFolder(x, y)) - override def necessaryType = if (nonStarPatterns.nonEmpty) consRef else listRef - - override def simplify(pv: PatternVar) = { - if (pv.tpe <:< necessaryType) - Pattern(foldedPatterns) - else - this rebindTo (Typed(tree, TypeTree(necessaryType)) setType necessaryType) - } - override def description = "List(%s => %s)".format(packedType, resTypesString) - } - - trait SequenceLikePattern extends Pattern { - def elems: List[Tree] - override def hasStar = elems.nonEmpty && isStar(elems.last) - - def elemPatterns = toPats(elems) - def nonStarElems = if (hasStar) elems.init else elems - def nonStarPatterns = toPats(nonStarElems) - def nonStarLength = nonStarElems.length - } - - // 8.1.8 (b) (literal ArrayValues) - case class SequencePattern(tree: ArrayValue) extends Pattern with SequenceLikePattern { - lazy val ArrayValue(_, elems) = tree - - override def description = "Seq(%s)".format(elemPatterns mkString ", ") - } - - // 8.1.8 (c) - case class StarPattern(tree: Star) extends Pattern { - override def description = "_*" - } - // XXX temporary? - case class ThisPattern(tree: This) extends NamePattern { - lazy val This(name) = tree - override def description = "this" - } - - // 8.1.9 - // InfixPattern ... subsumed by Constructor/Extractor Patterns - - // 8.1.10 - case class AlternativePattern(tree: Alternative) extends Pattern { - private lazy val Alternative(subtrees) = tree - private def alts = toPats(subtrees) - override def description = "Alt(%s)".format(alts mkString " | ") - } - - // 8.1.11 - // XMLPattern ... for now, subsumed by SequencePattern, but if we want - // to make it work right, it probably needs special handling. - - private def abortUnknownTree(tree: Tree) = - abort("Unknown Tree reached pattern matcher: %s/%s".format(tree, tree.getClass)) - - object Pattern { - // a small tree -> pattern cache - private val cache = perRunCaches.newMap[Tree, Pattern]() - - def apply(tree: Tree): Pattern = { - if (cache contains tree) - return cache(tree) - - val p = tree match { - case x: Bind => apply(unbind(tree)) setBound x - case EmptyTree => WildcardPattern() - case Ident(nme.WILDCARD) => WildcardPattern() - case x @ Alternative(ps) => AlternativePattern(x) - case x: Apply => ApplyPattern(x) - case x: Typed => TypedPattern(x) - case x: Literal => LiteralPattern(x) - case x: UnApply => UnapplyPattern(x) - case x: Ident => if (isVarPattern(x)) VariablePattern(x) else SimpleIdPattern(x) - case x: ArrayValue => SequencePattern(x) - case x: Select => StableIdPattern(x) - case x: Star => StarPattern(x) - case x: This => ThisPattern(x) // XXX ? - case _ => abortUnknownTree(tree) - } - cache(tree) = p - - // limiting the trace output - p match { - case WildcardPattern() => p - case _: LiteralPattern => p - case _ => tracing("Pattern")(p) - } - } - // matching on Pattern(...) always skips the bindings. - def unapply(other: Any): Option[Tree] = other match { - case x: Tree => unapply(Pattern(x)) - case x: Pattern => Some(x.tree) - case _ => None - } - } - - object UnapplyPattern { - private object UnapplySeq { - def unapply(x: UnApply) = x match { - case UnApply( - Apply(TypeApply(Select(qual, nme.unapplySeq), List(tpt)), _), - List(ArrayValue(_, elems))) => - Some((qual.symbol, tpt, elems)) - case _ => - None - } - } - - def apply(x: UnApply): Pattern = x match { - case UnapplySeq(ListModule, tpt, elems) => - ListExtractorPattern(x, tpt, elems) - case _ => - ExtractorPattern(x) - } - } - - // right now a tree like x @ Apply(fn, Nil) where !fn.isType - // is handled by creating a singleton type: - // - // val stype = Types.singleType(x.tpe.prefix, x.symbol) - // - // and then passing that as a type argument to EqualsPatternClass: - // - // val tpe = typeRef(NoPrefix, EqualsPatternClass, List(stype)) - // - // then creating a Typed pattern and rebinding. - // - // val newpat = Typed(EmptyTree, TypeTree(tpe)) setType tpe) - // - // This is also how Select(qual, name) is handled. - object ApplyPattern { - def apply(x: Apply): Pattern = { - val Apply(fn, args) = x - def isModule = x.symbol.isModule || x.tpe.termSymbol.isModule - - if (fn.isType) { - if (isTupleType(fn.tpe)) TuplePattern(x) - else ConstructorPattern(x) - } - else if (args.isEmpty) { - if (isModule) ObjectPattern(x) - else fn match { - case _: Ident => ApplyIdentPattern(x) - case _: Select => ApplySelectPattern(x) - } - } - else abortUnknownTree(x) - } - } - - /** Some intermediate pattern classes with shared structure **/ - - sealed trait SelectPattern extends NamePattern { - def select: Select - lazy val Select(qualifier, name) = select - def pathSegments = getPathSegments(tree) - def backticked: Option[String] = qualifier match { - case _: This if nme.isVariableName(name) => Some("`%s`".format(name)) - case _ => None - } - override def covers(sym: Symbol) = newMatchesPattern(sym, tree.tpe) - protected def getPathSegments(t: Tree): List[Name] = t match { - case Select(q, name) => name :: getPathSegments(q) - case Apply(f, Nil) => getPathSegments(f) - case _ => Nil - } - } - - sealed trait NamePattern extends Pattern { - def name: Name - override def sufficientType = tpe.narrow - override def simplify(pv: PatternVar) = this.rebindToEqualsCheck() - override def description = name.toString - } - - sealed trait UnapplyPattern extends Pattern { - lazy val UnApply(unfn, args) = tree - lazy val Apply(fn, _) = unfn - lazy val MethodType(List(arg, _*), _) = fn.tpe - - // Covers if the symbol matches the unapply method's argument type, - // and the return type of the unapply is Some. - override def covers(sym: Symbol) = newMatchesPattern(sym, arg.tpe) - override def necessaryType = arg.tpe - - def resTypes = analyzer.unapplyTypeList(unfn.symbol, unfn.tpe, args.length) - def resTypesString = resTypes match { - case Nil => "Boolean" - case xs => xs.mkString(", ") - } - } - - sealed trait ApplyPattern extends Pattern { - lazy val Apply(fn, args) = tree - - override def covers(sym: Symbol) = newMatchesPattern(sym, fn.tpe) - } - - sealed abstract class Pattern extends PatternBindingLogic { - def tree: Tree - - // returns either a simplification of this pattern or identity. - def simplify(pv: PatternVar): Pattern = this - - // Is this a default pattern (untyped "_" or an EmptyTree inserted by the matcher) - def isDefault = false - - // what type must a scrutinee have to have any chance of matching this pattern? - def necessaryType = tpe - - // what type could a scrutinee have which would automatically indicate a match? - // (nullness and guards will still be checked.) - def sufficientType = tpe - - // the subpatterns for this pattern (at the moment, that means constructor arguments) - def subpatterns(pm: MatchMatrix#PatternMatch): List[Pattern] = pm.dummies - - // if this pattern should be considered to cover the given symbol - def covers(sym: Symbol): Boolean = newMatchesPattern(sym, sufficientType) - def newMatchesPattern(sym: Symbol, pattp: Type) = { - debugging("[" + kindString + "] Does " + pattp + " cover " + sym + " ? ") { - (sym.isModuleClass && (sym.tpe.typeSymbol eq pattp.typeSymbol)) || - (sym.tpe.baseTypeSeq exists (_ matchesPattern pattp)) - } - } - - def sym = tree.symbol - def tpe = tree.tpe - def isEmpty = tree.isEmpty - - def isModule = sym.isModule || tpe.termSymbol.isModule - def isCaseClass = tpe.typeSymbol.isCase - def isObject = (sym != null) && (sym != NoSymbol) && tpe.prefix.isStable // XXX not entire logic - def hasStar = false - - def equalsCheck = - tracing("equalsCheck")( - if (sym.isValue) singleType(NoPrefix, sym) - else tpe.narrow - ) - - /** Standard methods **/ - override def equals(other: Any) = other match { - case x: Pattern => this.boundTree == x.boundTree - case _ => super.equals(other) - } - override def hashCode() = boundTree.hashCode() - def description = super.toString - - final override def toString = description - - def kindString = "" - } - - /*** Extractors ***/ - - object UnapplyParamType { - def unapply(x: Tree): Option[Type] = condOpt(unbind(x)) { - case UnApply(Apply(fn, _), _) => fn.tpe match { - case m: MethodType => m.paramTypes.head - } - } - } -} diff --git a/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala b/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala index 8f964cf9e1..9c8ffc5ae3 100644 --- a/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala +++ b/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala @@ -104,7 +104,6 @@ trait ScalaSettings extends AbsScalaSettings val showPhases = BooleanSetting ("-Xshow-phases", "Print a synopsis of compiler phases.") val sourceReader = StringSetting ("-Xsource-reader", "classname", "Specify a custom method for reading source files.", "") - val XoldPatmat = BooleanSetting ("-Xoldpatmat", "Use the pre-2.10 pattern matcher. Otherwise, the 'virtualizing' pattern matcher is used in 2.10.") val XnoPatmatAnalysis = BooleanSetting ("-Xno-patmat-analysis", "Don't perform exhaustivity/unreachability analysis. Also, ignore @switch annotation.") val XfullLubs = BooleanSetting ("-Xfull-lubs", "Retains pre 2.10 behavior of less aggressive truncation of least upper bounds.") diff --git a/src/compiler/scala/tools/nsc/transform/ExplicitOuter.scala b/src/compiler/scala/tools/nsc/transform/ExplicitOuter.scala index 01c22245cb..9696692146 100644 --- a/src/compiler/scala/tools/nsc/transform/ExplicitOuter.scala +++ b/src/compiler/scala/tools/nsc/transform/ExplicitOuter.scala @@ -9,7 +9,6 @@ package transform import symtab._ import Flags.{ CASE => _, _ } import scala.collection.mutable.ListBuffer -import matching.{ Patterns, ParallelMatching } /** This class ... * @@ -17,15 +16,12 @@ import matching.{ Patterns, ParallelMatching } * @version 1.0 */ abstract class ExplicitOuter extends InfoTransform - with Patterns - with ParallelMatching with TypingTransformers with ast.TreeDSL { import global._ import definitions._ import CODE._ - import Debug.TRACE /** The following flags may be set by this phase: */ override def phaseNewFlags: Long = notPROTECTED @@ -76,9 +72,7 @@ abstract class ExplicitOuter extends InfoTransform class RemoveBindingsTransformer(toRemove: Set[Symbol]) extends Transformer { override def transform(tree: Tree) = tree match { - case Bind(_, body) if toRemove(tree.symbol) => - TRACE("Dropping unused binding: " + tree.symbol) - super.transform(body) + case Bind(_, body) if toRemove(tree.symbol) => super.transform(body) case _ => super.transform(tree) } } @@ -363,74 +357,6 @@ abstract class ExplicitOuter extends InfoTransform } } - // requires settings.XoldPatmat.value - def matchTranslation(tree: Match) = { - val Match(selector, cases) = tree - var nselector = transform(selector) - - def makeGuardDef(vs: List[Symbol], guard: Tree) = { - val gdname = unit.freshTermName("gd") - val method = currentOwner.newMethod(gdname, tree.pos, SYNTHETIC) - val params = method newSyntheticValueParams vs.map(_.tpe) - method setInfo new MethodType(params, BooleanClass.tpe) - - localTyper typed { - DEF(method) === guard.changeOwner(currentOwner -> method).substituteSymbols(vs, params) - } - } - - val nguard = new ListBuffer[Tree] - val ncases = - for (CaseDef(pat, guard, body) <- cases) yield { - // Strip out any unused pattern bindings up front - val patternIdents = for (b @ Bind(_, _) <- pat) yield b.symbol - val references: Set[Symbol] = Set(guard, body) flatMap { t => for (id @ Ident(name) <- t) yield id.symbol } - val (used, unused) = patternIdents partition references - val strippedPat = if (unused.isEmpty) pat else new RemoveBindingsTransformer(unused.toSet) transform pat - - val gdcall = - if (guard == EmptyTree) EmptyTree - else { - val guardDef = makeGuardDef(used, guard) - nguard += transform(guardDef) // building up list of guards - - localTyper typed (Ident(guardDef.symbol) APPLY (used map Ident)) - } - - (CASE(transform(strippedPat)) IF gdcall) ==> transform(body) - } - - val (checkExhaustive, requireSwitch) = nselector match { - case Typed(nselector1, tpt) => - val unchecked = tpt.tpe hasAnnotation UncheckedClass - if (unchecked) - nselector = nselector1 - - // Don't require a tableswitch if there are 1-2 casedefs - // since the matcher intentionally emits an if-then-else. - (!unchecked, treeInfo.isSwitchAnnotation(tpt.tpe) && ncases.size > 2) - case _ => - (true, false) - } - - val t = atPos(tree.pos) { - val context = MatrixContext(currentUnit, transform, localTyper, currentOwner, tree.tpe) - val t_untyped = handlePattern(nselector, ncases, checkExhaustive, context) - - /* if @switch annotation is present, verify the resulting tree is a Match */ - if (requireSwitch) t_untyped match { - case Block(_, Match(_, _)) => // ok - case _ => - unit.error(tree.pos, "could not emit switch for @switch annotated match") - } - - localTyper.typed(t_untyped, context.matchResultType) - } - - if (nguard.isEmpty) t - else Block(nguard.toList, t) setType t.tpe - } - /** The main transformation method */ override def transform(tree: Tree): Tree = { val sym = tree.symbol @@ -512,14 +438,10 @@ abstract class ExplicitOuter extends InfoTransform }) super.transform(treeCopy.Apply(tree, sel, outerVal :: args)) - // entry point for pattern matcher translation - case m: Match if settings.XoldPatmat.value => // the new pattern matcher runs in its own phase right after typer - matchTranslation(m) - // for the new pattern matcher // base..eq(o) --> base.$outer().eq(o) if there's an accessor, else the whole tree becomes TRUE // TODO remove the synthetic `` method from outerFor?? - case Apply(eqsel@Select(eqapp@Apply(sel@Select(base, nme.OUTER_SYNTH), Nil), eq), args) if !settings.XoldPatmat.value => + case Apply(eqsel@Select(eqapp@Apply(sel@Select(base, nme.OUTER_SYNTH), Nil), eq), args) => val outerFor = sel.symbol.owner.toInterface // TODO: toInterface necessary? val acc = outerAccessor(outerFor) diff --git a/src/compiler/scala/tools/nsc/transform/UnCurry.scala b/src/compiler/scala/tools/nsc/transform/UnCurry.scala index b94ae99263..6e89f6387e 100644 --- a/src/compiler/scala/tools/nsc/transform/UnCurry.scala +++ b/src/compiler/scala/tools/nsc/transform/UnCurry.scala @@ -211,16 +211,11 @@ abstract class UnCurry extends InfoTransform * } * new $anon() * - * If `settings.XoldPatmat.value`, also synthesized AbstractPartialFunction subclasses (see synthPartialFunction). - * */ def transformFunction(fun: Function): Tree = deEta(fun) match { // nullary or parameterless case fun1 if fun1 ne fun => fun1 - case _ if fun.tpe.typeSymbol == PartialFunctionClass => - // only get here when running under -Xoldpatmat - synthPartialFunction(fun) case _ => val parents = ( if (isFunctionType(fun.tpe)) addSerializable(abstractFunctionForFunctionType(fun.tpe)) @@ -259,131 +254,6 @@ abstract class UnCurry extends InfoTransform } - /** Transform a function node (x => body) of type PartialFunction[T, R] where - * body = expr match { case P_i if G_i => E_i }_i=1..n - * to (assuming none of the cases is a default case): - * - * class $anon() extends AbstractPartialFunction[T, R] with Serializable { - * def applyOrElse[A1 <: A, B1 >: B](x: A1, default: A1 => B1): B1 = (expr: @unchecked) match { - * case P_1 if G_1 => E_1 - * ... - * case P_n if G_n => E_n - * case _ => default(expr) - * } - * def isDefinedAt(x: T): boolean = (x: @unchecked) match { - * case P_1 if G_1 => true - * ... - * case P_n if G_n => true - * case _ => false - * } - * } - * new $anon() - * - * If there's a default case, the original match is used for applyOrElse, and isDefinedAt returns `true` - */ - def synthPartialFunction(fun: Function) = { - if (!settings.XoldPatmat.value) - devWarning("Under the new pattern matching scheme, PartialFunction should have been synthesized during typers.") - - val targs = fun.tpe.typeArgs - val (formals, restpe) = (targs.init, targs.last) - - val anonClass = fun.symbol.owner newAnonymousFunctionClass(fun.pos, inConstructorFlag) addAnnotation serialVersionUIDAnnotation - val parents = addSerializable(appliedType(AbstractPartialFunctionClass, targs: _*)) - anonClass setInfo ClassInfoType(parents, newScope, anonClass) - - // duplicate before applyOrElseMethodDef is run so that it does not mess up our trees and label symbols (we have a fresh set) - // otherwise `TreeSymSubstituter(fun.vparams map (_.symbol), params)` won't work as the subst has been run already - val bodyForIDA = { - val duped = fun.body.duplicate - val oldParams = new mutable.ListBuffer[Symbol]() - val newParams = new mutable.ListBuffer[Symbol]() - - val oldSyms0 = - duped filter { - case l@LabelDef(_, params, _) => - params foreach {p => - val oldSym = p.symbol - p.symbol = oldSym.cloneSymbol - oldParams += oldSym - newParams += p.symbol - } - true - case _ => false - } map (_.symbol) - val oldSyms = oldParams.toList ++ oldSyms0 - val newSyms = newParams.toList ++ (oldSyms0 map (_.cloneSymbol)) - // println("duping "+ oldSyms +" --> "+ (newSyms map (_.ownerChain))) - - val substLabels = new TreeSymSubstituter(oldSyms, newSyms) - - substLabels(duped) - } - - // def applyOrElse[A1 <: A, B1 >: B](x: A1, default: A1 => B1): B1 = - val applyOrElseMethodDef = { - val methSym = anonClass.newMethod(nme.applyOrElse, fun.pos, newFlags = FINAL | OVERRIDE | SYNTHETIC) - - val List(argtpe) = formals - val A1 = methSym newTypeParameter(newTypeName("A1")) setInfo TypeBounds.upper(argtpe) - val B1 = methSym newTypeParameter(newTypeName("B1")) setInfo TypeBounds.lower(restpe) - val methFormals = List(A1.tpe, functionType(List(A1.tpe), B1.tpe)) - val params@List(x, default) = methSym newSyntheticValueParams methFormals - methSym setInfoAndEnter polyType(List(A1, B1), MethodType(params, B1.tpe)) - - val substParam = new TreeSymSubstituter(fun.vparams map (_.symbol), List(x)) - val body = localTyper.typedPos(fun.pos) { import CODE._ - def defaultAction(scrut: Tree) = REF(default) APPLY (REF(x)) - - substParam(fun.body) match { - case orig@Match(selector, cases) => - if (cases exists treeInfo.isDefaultCase) orig - else { - val defaultCase = CaseDef(Ident(nme.WILDCARD), EmptyTree, defaultAction(selector.duplicate)) - Match(/*gen.mkUnchecked*/(selector), cases :+ defaultCase) - } - - } - } - body.changeOwner(fun.symbol -> methSym) - - val methDef = DefDef(methSym, body) - - // Have to repack the type to avoid mismatches when existentials - // appear in the result - see SI-4869. - methDef.tpt setType localTyper.packedType(body, methSym) - methDef - } - - val isDefinedAtMethodDef = { - val methSym = anonClass.newMethod(nme.isDefinedAt, fun.pos, FINAL | SYNTHETIC) - val params = methSym newSyntheticValueParams formals - methSym setInfoAndEnter MethodType(params, BooleanClass.tpe) - - val substParam = new TreeSymSubstituter(fun.vparams map (_.symbol), params) - def doSubst(x: Tree) = substParam(resetLocalAttrsKeepLabels(x)) // see pos/t1761 for why `resetLocalAttrs`, but must keep label symbols around - - val body = bodyForIDA match { - case Match(selector, cases) => - if (cases exists treeInfo.isDefaultCase) TRUE - else - doSubst(Match(/*gen.mkUnchecked*/(selector), - (cases map (c => deriveCaseDef(c)(x => TRUE))) :+ ( - DEFAULT ==> FALSE))) - - } - body.changeOwner(fun.symbol -> methSym) - - DefDef(methSym, body) - } - - localTyper.typedPos(fun.pos) { - Block( - List(ClassDef(anonClass, NoMods, ListOfNil, List(applyOrElseMethodDef, isDefinedAtMethodDef), fun.pos)), - Typed(New(anonClass.tpe), TypeTree(fun.tpe))) - } - } - def transformArgs(pos: Position, fun: Symbol, args: List[Tree], formals: List[Type]) = { val isJava = fun.isJavaDefined def transformVarargs(varargsElemType: Type) = { @@ -674,35 +544,6 @@ abstract class UnCurry extends InfoTransform def isDefaultCatch(cdef: CaseDef) = isThrowable(cdef.pat) && cdef.guard.isEmpty - def postTransformTry(tree: Try) = { - val body = tree.block - val catches = tree.catches - val finalizer = tree.finalizer - if (!settings.XoldPatmat.value) { - if (catches exists (cd => !treeInfo.isCatchCase(cd))) - devWarning("VPM BUG - illegal try/catch " + catches) - tree - } else if (catches forall treeInfo.isCatchCase) { - tree - } else { - val exname = unit.freshTermName("ex$") - val cases = - if ((catches exists treeInfo.isDefaultCase) || isDefaultCatch(catches.last)) catches - else catches :+ CaseDef(Ident(nme.WILDCARD), EmptyTree, Throw(Ident(exname))) - val catchall = - atPos(tree.pos) { - CaseDef( - Bind(exname, Ident(nme.WILDCARD)), - EmptyTree, - Match(Ident(exname), cases)) - } - debuglog("rewrote try: " + catches + " ==> " + catchall); - val catches1 = localTyper.typedCases( - List(catchall), ThrowableClass.tpe, WildcardType) - treeCopy.Try(tree, body, catches1, finalizer) - } - } - tree match { /* Some uncurry post transformations add members to templates. * @@ -734,7 +575,9 @@ abstract class UnCurry extends InfoTransform addJavaVarargsForwarders(dd, flatdd) case tree: Try => - postTransformTry(tree) + if (tree.catches exists (cd => !treeInfo.isCatchCase(cd))) + devWarning("VPM BUG - illegal try/catch " + tree.catches) + tree case Apply(Apply(fn, args), args1) => treeCopy.Apply(tree, fn, args ::: args1) diff --git a/src/compiler/scala/tools/nsc/typechecker/Infer.scala b/src/compiler/scala/tools/nsc/typechecker/Infer.scala index a541906a99..2693fcfd27 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Infer.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Infer.scala @@ -575,14 +575,13 @@ trait Infer extends Checkable { && (restpe.isWildcard || (varianceInType(restpe)(tparam) & COVARIANT) == 0) // don't retract covariant occurrences ) - // checks !settings.XoldPatmat.value directly so one need not run under -Xexperimental to use virtpatmat buf += ((tparam, if (retract) None else Some( if (targ.typeSymbol == RepeatedParamClass) targ.baseType(SeqClass) else if (targ.typeSymbol == JavaRepeatedParamClass) targ.baseType(ArrayClass) // this infers Foo.type instead of "object Foo" (see also widenIfNecessary) - else if (targ.typeSymbol.isModuleClass || ((settings.Xexperimental.value || !settings.XoldPatmat.value) && tvar.constr.avoidWiden)) targ + else if (targ.typeSymbol.isModuleClass || tvar.constr.avoidWiden) targ else targ.widen ) )) diff --git a/src/compiler/scala/tools/nsc/typechecker/PatternMatching.scala b/src/compiler/scala/tools/nsc/typechecker/PatternMatching.scala index dba2f25e32..49eca828a9 100644 --- a/src/compiler/scala/tools/nsc/typechecker/PatternMatching.scala +++ b/src/compiler/scala/tools/nsc/typechecker/PatternMatching.scala @@ -67,9 +67,7 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL } } - def newTransformer(unit: CompilationUnit): Transformer = - if (!settings.XoldPatmat.value) new MatchTransformer(unit) - else noopTransformer + def newTransformer(unit: CompilationUnit): Transformer = new MatchTransformer(unit) // duplicated from CPSUtils (avoid dependency from compiler -> cps plugin...) private lazy val MarkerCPSAdaptPlus = rootMirror.getClassIfDefined("scala.util.continuations.cpsPlus") diff --git a/src/compiler/scala/tools/nsc/typechecker/Typers.scala b/src/compiler/scala/tools/nsc/typechecker/Typers.scala index 4fd65c18d1..a3688f249d 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Typers.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Typers.scala @@ -96,8 +96,8 @@ trait Typers extends Modes with Adaptations with Tags { // when true: // - we may virtualize matches (if -Xexperimental and there's a suitable __match in scope) // - we synthesize PartialFunction implementations for `x => x match {...}` and `match {...}` when the expected type is PartialFunction - // this is disabled by: -Xoldpatmat or interactive compilation (we run it for scaladoc due to SI-5933) - private def newPatternMatching = !settings.XoldPatmat.value && !forInteractive //&& !forScaladoc && (phase.id < currentRun.uncurryPhase.id) + // this is disabled by: interactive compilation (we run it for scaladoc due to SI-5933) + private def newPatternMatching = !forInteractive //&& !forScaladoc && (phase.id < currentRun.uncurryPhase.id) abstract class Typer(context0: Context) extends TyperDiagnostics with Adaptation with Tag with TyperContextErrors { import context0.unit @@ -2440,18 +2440,14 @@ trait Typers extends Modes with Adaptations with Tags { val selectorTp = packCaptured(selector1.tpe.widen).skolemizeExistential(context.owner, selector) val casesTyped = typedCases(cases, selectorTp, pt) - val (resTp, needAdapt) = - if (!settings.XoldPatmat.value) ptOrLubPacked(casesTyped, pt) - else ptOrLub(casesTyped map (_.tpe), pt) + val (resTp, needAdapt) = ptOrLubPacked(casesTyped, pt) val casesAdapted = if (!needAdapt) casesTyped else casesTyped map (adaptCase(_, mode, resTp)) treeCopy.Match(tree, selector1, casesAdapted) setType resTp } - // match has been typed -- virtualize it if we're feeling experimental - // (virtualized matches are expanded during type checking so they have the full context available) - // otherwise, do nothing: matches are translated during phase `patmat` (unless -Xoldpatmat) + // match has been typed -- virtualize it during type checking so the full context is available def virtualizedMatch(match_ : Match, mode: Int, pt: Type) = { import patmat.{ vpmName, PureMatchTranslator } @@ -3333,7 +3329,7 @@ trait Typers extends Modes with Adaptations with Tags { // if there's a ClassTag that allows us to turn the unchecked type test for `pt` into a checked type test // return the corresponding extractor (an instance of ClassTag[`pt`]) - def extractorForUncheckedType(pos: Position, pt: Type): Option[Tree] = if (settings.XoldPatmat.value || isPastTyper) None else { + def extractorForUncheckedType(pos: Position, pt: Type): Option[Tree] = if (isPastTyper) None else { // only look at top-level type, can't (reliably) do anything about unchecked type args (in general) pt.normalize.typeConstructor match { // if at least one of the types in an intersection is checkable, use the checkable ones @@ -4142,8 +4138,7 @@ trait Typers extends Modes with Adaptations with Tags { // in the special (though common) case where the types are equal, it pays to pack before comparing // especially virtpatmat needs more aggressive unification of skolemized types // this breaks src/library/scala/collection/immutable/TrieIterator.scala - if ( !settings.XoldPatmat.value && !isPastTyper - && thenp1.tpe.annotations.isEmpty && elsep1.tpe.annotations.isEmpty // annotated types need to be lubbed regardless (at least, continations break if you by pass them like this) + if (!isPastTyper && thenp1.tpe.annotations.isEmpty && elsep1.tpe.annotations.isEmpty // annotated types need to be lubbed regardless (at least, continations break if you by pass them like this) && thenTp =:= elseTp ) (thenp1.tpe.deconst, false) // use unpacked type. Important to deconst, as is done in ptOrLub, otherwise `if (???) 0 else 0` evaluates to 0 (SI-6331) // TODO: skolemize (lub of packed types) when that no longer crashes on files/pos/t4070b.scala @@ -4157,7 +4152,7 @@ trait Typers extends Modes with Adaptations with Tags { } } - // under -Xexperimental (and not -Xoldpatmat), and when there's a suitable __match in scope, virtualize the pattern match + // When there's a suitable __match in scope, virtualize the pattern match // otherwise, type the Match and leave it until phase `patmat` (immediately after typer) // empty-selector matches are transformed into synthetic PartialFunction implementations when the expected type demands it def typedVirtualizedMatch(tree: Match): Tree = { diff --git a/src/reflect/scala/reflect/internal/settings/MutableSettings.scala b/src/reflect/scala/reflect/internal/settings/MutableSettings.scala index 81ed63bfc6..d5ed9dab5b 100644 --- a/src/reflect/scala/reflect/internal/settings/MutableSettings.scala +++ b/src/reflect/scala/reflect/internal/settings/MutableSettings.scala @@ -44,7 +44,6 @@ abstract class MutableSettings extends AbsSettings { def Yrecursion: IntSetting def maxClassfileName: IntSetting def Xexperimental: BooleanSetting - def XoldPatmat: BooleanSetting def XnoPatmatAnalysis: BooleanSetting def XfullLubs: BooleanSetting def breakCycles: BooleanSetting diff --git a/src/reflect/scala/reflect/runtime/Settings.scala b/src/reflect/scala/reflect/runtime/Settings.scala index 7d04202455..ba524f4df2 100644 --- a/src/reflect/scala/reflect/runtime/Settings.scala +++ b/src/reflect/scala/reflect/runtime/Settings.scala @@ -32,7 +32,6 @@ private[reflect] class Settings extends MutableSettings { val Xexperimental = new BooleanSetting(false) val XfullLubs = new BooleanSetting(false) val XnoPatmatAnalysis = new BooleanSetting(false) - val XoldPatmat = new BooleanSetting(false) val Xprintpos = new BooleanSetting(false) val Ynotnull = new BooleanSetting(false) val Yshowsymkinds = new BooleanSetting(false) diff --git a/test/files/jvm/interpreter.check b/test/files/jvm/interpreter.check index 6145b6c4d2..477096fb7e 100644 --- a/test/files/jvm/interpreter.check +++ b/test/files/jvm/interpreter.check @@ -357,10 +357,8 @@ defined class Term scala> def f(e: Exp) = e match { // non-exhaustive warning here case _:Fact => 3 } -:18: warning: match is not exhaustive! -missing combination Exp -missing combination Term - +:18: warning: match may not be exhaustive. +It would fail on the following inputs: Exp(), Term() def f(e: Exp) = e match { // non-exhaustive warning here ^ f: (e: Exp)Int diff --git a/test/files/jvm/interpreter.scala b/test/files/jvm/interpreter.scala index f45eb034a9..bd1851053f 100644 --- a/test/files/jvm/interpreter.scala +++ b/test/files/jvm/interpreter.scala @@ -2,7 +2,7 @@ import scala.tools.nsc._ import scala.tools.partest.ReplTest object Test extends ReplTest { - override def extraSettings = "-deprecation -Xoldpatmat" + override def extraSettings = "-deprecation" def code = // basics 3+4 diff --git a/test/files/neg/pat_unreachable.check b/test/files/neg/pat_unreachable.check index c5706b7fad..b4c0e7e104 100644 --- a/test/files/neg/pat_unreachable.check +++ b/test/files/neg/pat_unreachable.check @@ -1,13 +1,14 @@ -pat_unreachable.scala:5: error: unreachable code - case Seq(x, y, z, w) => List(z,w) // redundant! - ^ -pat_unreachable.scala:9: error: unreachable code - case Seq(x, y) => List(x, y) - ^ -pat_unreachable.scala:23: error: unreachable code +pat_unreachable.scala:22: warning: patterns after a variable pattern cannot match (SLS 8.1.1) +If you intended to match against parameter b of method contrivedExample, you must use backticks, like: case `b` => + case b => println("matched b") + ^ +pat_unreachable.scala:23: warning: unreachable code due to variable pattern 'b' on line 22 +If you intended to match against parameter c of method contrivedExample, you must use backticks, like: case `c` => case c => println("matched c") ^ -pat_unreachable.scala:24: error: unreachable code +pat_unreachable.scala:24: warning: unreachable code due to variable pattern 'b' on line 22 case _ => println("matched neither") ^ -four errors found +error: No warnings can be incurred under -Xfatal-warnings. +three warnings found +one error found diff --git a/test/files/neg/pat_unreachable.flags b/test/files/neg/pat_unreachable.flags index cb8324a345..85d8eb2ba2 100644 --- a/test/files/neg/pat_unreachable.flags +++ b/test/files/neg/pat_unreachable.flags @@ -1 +1 @@ --Xoldpatmat \ No newline at end of file +-Xfatal-warnings diff --git a/test/files/neg/t3692-new.check b/test/files/neg/t3692-new.check index 5aa991c105..9b96449930 100644 --- a/test/files/neg/t3692-new.check +++ b/test/files/neg/t3692-new.check @@ -7,8 +7,13 @@ t3692-new.scala:15: warning: non-variable type argument Int in type pattern Map[ t3692-new.scala:16: warning: non-variable type argument Int in type pattern Map[T,Int] is unchecked since it is eliminated by erasure case m2: Map[T, Int] => new java.util.HashMap[T, Integer] ^ -t3692-new.scala:16: error: unreachable code - case m2: Map[T, Int] => new java.util.HashMap[T, Integer] +t3692-new.scala:15: warning: unreachable code + case m1: Map[Int, V] => new java.util.HashMap[Integer, V] ^ -three warnings found +t3692-new.scala:4: warning: Tester has a main method with parameter type Array[String], but Tester will not be a runnable program. + Reason: main method must have exact signature (Array[String])Unit +object Tester { + ^ +error: No warnings can be incurred under -Xfatal-warnings. +5 warnings found one error found diff --git a/test/files/neg/t3692-new.flags b/test/files/neg/t3692-new.flags index cb8324a345..85d8eb2ba2 100644 --- a/test/files/neg/t3692-new.flags +++ b/test/files/neg/t3692-new.flags @@ -1 +1 @@ --Xoldpatmat \ No newline at end of file +-Xfatal-warnings diff --git a/test/files/neg/t3692-old.check b/test/files/neg/t3692-old.check deleted file mode 100644 index 9f3ae516aa..0000000000 --- a/test/files/neg/t3692-old.check +++ /dev/null @@ -1,14 +0,0 @@ -t3692-old.scala:13: warning: non-variable type argument Int in type pattern Map[Int,Int] is unchecked since it is eliminated by erasure - case m0: Map[Int, Int] => new java.util.HashMap[Integer, Integer] - ^ -t3692-old.scala:14: warning: non-variable type argument Int in type pattern Map[Int,V] is unchecked since it is eliminated by erasure - case m1: Map[Int, V] => new java.util.HashMap[Integer, V] - ^ -t3692-old.scala:15: warning: non-variable type argument Int in type pattern Map[T,Int] is unchecked since it is eliminated by erasure - case m2: Map[T, Int] => new java.util.HashMap[T, Integer] - ^ -t3692-old.scala:15: error: unreachable code - case m2: Map[T, Int] => new java.util.HashMap[T, Integer] - ^ -three warnings found -one error found diff --git a/test/files/neg/t3692-old.flags b/test/files/neg/t3692-old.flags deleted file mode 100644 index cb8324a345..0000000000 --- a/test/files/neg/t3692-old.flags +++ /dev/null @@ -1 +0,0 @@ --Xoldpatmat \ No newline at end of file diff --git a/test/files/neg/t3692-old.scala b/test/files/neg/t3692-old.scala deleted file mode 100644 index 151535ae94..0000000000 --- a/test/files/neg/t3692-old.scala +++ /dev/null @@ -1,19 +0,0 @@ -import java.lang.Integer - -object ManifestTester { - def main(args: Array[String]) = { - val map = Map("John" -> 1, "Josh" -> 2) - new ManifestTester().toJavaMap(map) - } -} - -class ManifestTester { - private final def toJavaMap[T, V](map: Map[T, V])(implicit m1: Manifest[T], m2: Manifest[V]): java.util.Map[_, _] = { - map match { - case m0: Map[Int, Int] => new java.util.HashMap[Integer, Integer] - case m1: Map[Int, V] => new java.util.HashMap[Integer, V] - case m2: Map[T, Int] => new java.util.HashMap[T, Integer] - case _ => new java.util.HashMap[T, V] - } - } -} \ No newline at end of file diff --git a/test/files/neg/unreachablechar.check b/test/files/neg/unreachablechar.check index 58ce1a7e91..121f12a0c7 100644 --- a/test/files/neg/unreachablechar.check +++ b/test/files/neg/unreachablechar.check @@ -1,4 +1,9 @@ -unreachablechar.scala:5: error: unreachable code +unreachablechar.scala:4: warning: patterns after a variable pattern cannot match (SLS 8.1.1) + case _ => println("stuff"); + ^ +unreachablechar.scala:5: warning: unreachable code due to variable pattern on line 4 case 'f' => println("not stuff?"); ^ +error: No warnings can be incurred under -Xfatal-warnings. +two warnings found one error found diff --git a/test/files/neg/unreachablechar.flags b/test/files/neg/unreachablechar.flags index 809e9ff2f2..85d8eb2ba2 100644 --- a/test/files/neg/unreachablechar.flags +++ b/test/files/neg/unreachablechar.flags @@ -1 +1 @@ - -Xoldpatmat +-Xfatal-warnings diff --git a/test/files/pos/t1439.flags b/test/files/pos/t1439.flags index 1e70f5c5c7..bca57e4785 100644 --- a/test/files/pos/t1439.flags +++ b/test/files/pos/t1439.flags @@ -1 +1 @@ --unchecked -Xfatal-warnings -Xoldpatmat -language:higherKinds +-unchecked -Xfatal-warnings -language:higherKinds diff --git a/test/files/run/patmat_unapp_abstype-old.check b/test/files/run/patmat_unapp_abstype-old.check deleted file mode 100644 index 72239d16cd..0000000000 --- a/test/files/run/patmat_unapp_abstype-old.check +++ /dev/null @@ -1,4 +0,0 @@ -TypeRef -none of the above -Bar -Foo diff --git a/test/files/run/patmat_unapp_abstype-old.flags b/test/files/run/patmat_unapp_abstype-old.flags deleted file mode 100644 index ba80cad69b..0000000000 --- a/test/files/run/patmat_unapp_abstype-old.flags +++ /dev/null @@ -1 +0,0 @@ --Xoldpatmat diff --git a/test/files/run/patmat_unapp_abstype-old.scala b/test/files/run/patmat_unapp_abstype-old.scala deleted file mode 100644 index 45496f08a2..0000000000 --- a/test/files/run/patmat_unapp_abstype-old.scala +++ /dev/null @@ -1,83 +0,0 @@ -// abstract types and extractors, oh my! -trait TypesAPI { - trait Type - - // an alternative fix (implemented in the virtual pattern matcher, is to replace the isInstanceOf by a manifest-based run-time test) - // that's what typeRefMani is for - type TypeRef <: Type //; implicit def typeRefMani: Manifest[TypeRef] - val TypeRef: TypeRefExtractor; trait TypeRefExtractor { - def apply(x: Int): TypeRef - def unapply(x: TypeRef): Option[(Int)] - } - - // just for illustration, should follow the same pattern as TypeRef - case class MethodType(n: Int) extends Type -} - -// user should not be exposed to the implementation -trait TypesUser extends TypesAPI { - def shouldNotCrash(tp: Type): Unit = { - tp match { - case TypeRef(x) => println("TypeRef") - // the above checks tp.isInstanceOf[TypeRef], which is erased to tp.isInstanceOf[Type] - // before calling TypeRef.unapply(tp), which will then crash unless tp.isInstanceOf[TypesImpl#TypeRef] (which is not implied by tp.isInstanceOf[Type]) - // tp.isInstanceOf[TypesImpl#TypeRef] is equivalent to classOf[TypesImpl#TypeRef].isAssignableFrom(tp.getClass) - // this is equivalent to manifest - // it is NOT equivalent to manifest[Type] <:< typeRefMani - case MethodType(x) => println("MethodType") - case _ => println("none of the above") - } - } -} - -trait TypesImpl extends TypesAPI { - object TypeRef extends TypeRefExtractor // this will have a bridged unapply(x: Type) = unapply(x.asInstanceOf[TypeRef]) - case class TypeRef(n: Int) extends Type // this has a bridge from TypesAPI#Type to TypesImpl#TypeRef - // --> the cast in the bridge will fail because the pattern matcher can't type test against the abstract types in TypesUser - //lazy val typeRefMani = manifest[TypeRef] -} - -trait Foos { - trait Bar - type Foo <: Bar - trait FooExtractor { - def unapply(foo: Foo): Option[Int] - } - val Foo: FooExtractor -} - -trait RealFoos extends Foos { - class Foo(val x: Int) extends Bar - object Foo extends FooExtractor { - def unapply(foo: Foo): Option[Int] = Some(foo.x) - } -} - -trait Intermed extends Foos { - def crash(bar: Bar): Unit = - bar match { - case Foo(x) => println("Foo") - case _ => println("Bar") - } -} - -object TestUnappStaticallyKnownSynthetic extends TypesImpl with TypesUser { - def test() = { - shouldNotCrash(TypeRef(10)) // should and does print "TypeRef" - // once #1697/#2337 are fixed, this should generate the correct output - shouldNotCrash(MethodType(10)) // should print "MethodType" but prints "none of the above" -- good one, pattern matcher! - } -} - -object TestUnappDynamicSynth extends RealFoos with Intermed { - case class FooToo(n: Int) extends Bar - def test() = { - crash(FooToo(10)) - crash(new Foo(5)) - } -} - -object Test extends App { - TestUnappStaticallyKnownSynthetic.test() - TestUnappDynamicSynth.test() -} diff --git a/test/files/run/t3835.scala b/test/files/run/t3835.scala index c120a61f6e..766b6ddc2e 100644 --- a/test/files/run/t3835.scala +++ b/test/files/run/t3835.scala @@ -1,6 +1,6 @@ object Test extends App { // work around optimizer bug SI-5672 -- generates wrong bytecode for switches in arguments - // virtpatmat happily emits a switch for a one-case switch, whereas -Xoldpatmat did not + // virtpatmat happily emits a switch for a one-case switch // this is not the focus of this test, hence the temporary workaround def a = (1, 2, 3) match { case (r, \u03b8, \u03c6) => r + \u03b8 + \u03c6 } println(a) -- cgit v1.2.3 From 186e3bf4027a8c2b9bf0550f1aacff5ee4be2313 Mon Sep 17 00:00:00 2001 From: Eugene Burmako Date: Wed, 26 Sep 2012 16:40:18 +0200 Subject: bind + argc specialization = 20x perf boost Default logic of mirror construction, which gets triggered via reflectField/reflectMethod/reflectConstructor, validates a lot of facts about its arguments. This takes quite a bit of time, which significantly degrades performance of reflection-heavy applications. Proposed two changes provide an order of magnitude performance boost to a simple app, which repeatedly invokes the same method for different receiver instances. --- src/reflect/scala/reflect/api/Mirrors.scala | 10 +++++ .../scala/reflect/runtime/JavaMirrors.scala | 50 +++++++++++++++++++++- 2 files changed, 58 insertions(+), 2 deletions(-) (limited to 'src') diff --git a/src/reflect/scala/reflect/api/Mirrors.scala b/src/reflect/scala/reflect/api/Mirrors.scala index d0d8a37584..d30563c706 100644 --- a/src/reflect/scala/reflect/api/Mirrors.scala +++ b/src/reflect/scala/reflect/api/Mirrors.scala @@ -352,6 +352,11 @@ trait Mirrors { self: Universe => * the value of the base field. To achieve overriding behavior, use reflectMethod on an accessor. */ def set(value: Any): Unit + + /** Creates a new mirror which uses the same symbol, but is bound to a different receiver. + * This is significantly faster than recreating the mirror from scratch. + */ + def bind(newReceiver: Any): FieldMirror } /** A mirror that reflects a method. @@ -373,6 +378,11 @@ trait Mirrors { self: Universe => * with invoking the corresponding method or constructor. */ def apply(args: Any*): Any + + /** Creates a new mirror which uses the same symbol, but is bound to a different receiver. + * This is significantly faster than recreating the mirror from scratch. + */ + def bind(newReceiver: Any): MethodMirror } /** A mirror that reflects the instance or static parts of a runtime class. diff --git a/src/reflect/scala/reflect/runtime/JavaMirrors.scala b/src/reflect/scala/reflect/runtime/JavaMirrors.scala index 57cfb8b515..67b24cbdea 100644 --- a/src/reflect/scala/reflect/runtime/JavaMirrors.scala +++ b/src/reflect/scala/reflect/runtime/JavaMirrors.scala @@ -282,6 +282,7 @@ private[reflect] trait JavaMirrors extends internal.SymbolTable with api.JavaUni if (!symbol.isMutable) ErrorSetImmutableField(symbol) jfield.set(receiver, value) } + def bind(newReceiver: Any) = new JavaFieldMirror(newReceiver, symbol) override def toString = s"field mirror for ${symbol.fullName} (bound to $receiver)" } @@ -329,7 +330,16 @@ private[reflect] trait JavaMirrors extends internal.SymbolTable with api.JavaUni private def mkJavaMethodMirror[T: ClassTag](receiver: T, symbol: MethodSymbol): JavaMethodMirror = { if (isBytecodelessMethod(symbol)) new JavaBytecodelessMethodMirror(receiver, symbol) else if (symbol.paramss.flatten exists (p => isByNameParamType(p.info))) new JavaByNameMethodMirror(receiver, symbol) - else new JavaVanillaMethodMirror(receiver, symbol) + else { + symbol.paramss.flatten.length match { + case 0 => new JavaVanillaMethodMirror0(receiver, symbol) + case 1 => new JavaVanillaMethodMirror1(receiver, symbol) + case 2 => new JavaVanillaMethodMirror2(receiver, symbol) + case 3 => new JavaVanillaMethodMirror3(receiver, symbol) + case 4 => new JavaVanillaMethodMirror4(receiver, symbol) + case _ => new JavaVanillaMethodMirror(receiver, symbol) + } + } } private abstract class JavaMethodMirror(val symbol: MethodSymbol) @@ -340,8 +350,10 @@ private[reflect] trait JavaMirrors extends internal.SymbolTable with api.JavaUni jmeth } + def jinvokeraw(jmeth: jMethod, receiver: Any, args: Seq[Any]) = jmeth.invoke(receiver, args.asInstanceOf[Seq[AnyRef]]: _*) + def jinvoke(jmeth: jMethod, receiver: Any, args: Seq[Any]): Any = { - val result = jmeth.invoke(receiver, args.asInstanceOf[Seq[AnyRef]]: _*) + val result = jinvokeraw(jmeth, receiver, args) if (jmeth.getReturnType == java.lang.Void.TYPE) () else result } @@ -351,11 +363,43 @@ private[reflect] trait JavaMirrors extends internal.SymbolTable with api.JavaUni private class JavaVanillaMethodMirror(val receiver: Any, symbol: MethodSymbol) extends JavaMethodMirror(symbol) { + def bind(newReceiver: Any) = new JavaVanillaMethodMirror(newReceiver, symbol) def apply(args: Any*): Any = jinvoke(jmeth, receiver, args) } + private class JavaVanillaMethodMirror0(receiver: Any, symbol: MethodSymbol) + extends JavaVanillaMethodMirror(receiver, symbol) { + override def bind(newReceiver: Any) = new JavaVanillaMethodMirror0(newReceiver, symbol) + override def jinvokeraw(jmeth: jMethod, receiver: Any, args: Seq[Any]) = jmeth.invoke(receiver) + } + + private class JavaVanillaMethodMirror1(receiver: Any, symbol: MethodSymbol) + extends JavaVanillaMethodMirror(receiver, symbol) { + override def bind(newReceiver: Any) = new JavaVanillaMethodMirror1(newReceiver, symbol) + override def jinvokeraw(jmeth: jMethod, receiver: Any, args: Seq[Any]) = jmeth.invoke(receiver, args(0).asInstanceOf[AnyRef]) + } + + private class JavaVanillaMethodMirror2(receiver: Any, symbol: MethodSymbol) + extends JavaVanillaMethodMirror(receiver, symbol) { + override def bind(newReceiver: Any) = new JavaVanillaMethodMirror2(newReceiver, symbol) + override def jinvokeraw(jmeth: jMethod, receiver: Any, args: Seq[Any]) = jmeth.invoke(receiver, args(0).asInstanceOf[AnyRef], args(1).asInstanceOf[AnyRef]) + } + + private class JavaVanillaMethodMirror3(receiver: Any, symbol: MethodSymbol) + extends JavaVanillaMethodMirror(receiver, symbol) { + override def bind(newReceiver: Any) = new JavaVanillaMethodMirror3(newReceiver, symbol) + override def jinvokeraw(jmeth: jMethod, receiver: Any, args: Seq[Any]) = jmeth.invoke(receiver, args(0).asInstanceOf[AnyRef], args(1).asInstanceOf[AnyRef], args(2).asInstanceOf[AnyRef]) + } + + private class JavaVanillaMethodMirror4(receiver: Any, symbol: MethodSymbol) + extends JavaVanillaMethodMirror(receiver, symbol) { + override def bind(newReceiver: Any) = new JavaVanillaMethodMirror4(newReceiver, symbol) + override def jinvokeraw(jmeth: jMethod, receiver: Any, args: Seq[Any]) = jmeth.invoke(receiver, args(0).asInstanceOf[AnyRef], args(1).asInstanceOf[AnyRef], args(2).asInstanceOf[AnyRef], args(3).asInstanceOf[AnyRef]) + } + private class JavaByNameMethodMirror(val receiver: Any, symbol: MethodSymbol) extends JavaMethodMirror(symbol) { + def bind(newReceiver: Any) = new JavaByNameMethodMirror(newReceiver, symbol) def apply(args: Any*): Any = { val transformed = map2(args.toList, symbol.paramss.flatten)((arg, param) => if (isByNameParamType(param.info)) () => arg else arg) jinvoke(jmeth, receiver, transformed) @@ -364,6 +408,7 @@ private[reflect] trait JavaMirrors extends internal.SymbolTable with api.JavaUni private class JavaBytecodelessMethodMirror[T: ClassTag](val receiver: T, symbol: MethodSymbol) extends JavaMethodMirror(symbol) { + def bind(newReceiver: Any) = new JavaBytecodelessMethodMirror(newReceiver.asInstanceOf[T], symbol) def apply(args: Any*): Any = { // checking type conformance is too much of a hassle, so we don't do it here // actually it's not even necessary, because we manually dispatch arguments below @@ -420,6 +465,7 @@ private[reflect] trait JavaMirrors extends internal.SymbolTable with api.JavaUni private class JavaConstructorMirror(val outer: AnyRef, val symbol: MethodSymbol) extends MethodMirror { + def bind(newReceiver: Any) = new JavaConstructorMirror(newReceiver.asInstanceOf[AnyRef], symbol) override val receiver = outer lazy val jconstr = { val jconstr = constructorToJava(symbol) -- cgit v1.2.3 From 45ef0514e97ff618ce1d68f9c81b5024fa793af1 Mon Sep 17 00:00:00 2001 From: Eugene Burmako Date: Thu, 27 Dec 2012 23:53:46 +0100 Subject: a few performance improvements for toArray First of all the typo I have made when migrating from manifests to tags. `repr.getClass` in `WrappedArray` should read `array.getClass`. Secondly manifests for Any, Object/AnyRef, AnyVal, Null and Nothing now have their `newArray` methods overridden to avoid reflective overhead of array instantiation. --- src/library/scala/collection/mutable/WrappedArray.scala | 2 +- src/library/scala/reflect/Manifest.scala | 5 +++++ 2 files changed, 6 insertions(+), 1 deletion(-) (limited to 'src') diff --git a/src/library/scala/collection/mutable/WrappedArray.scala b/src/library/scala/collection/mutable/WrappedArray.scala index f02f5a241f..b83724090c 100644 --- a/src/library/scala/collection/mutable/WrappedArray.scala +++ b/src/library/scala/collection/mutable/WrappedArray.scala @@ -62,7 +62,7 @@ extends AbstractSeq[T] override def par = ParArray.handoff(array) private def elementClass: Class[_] = - arrayElementClass(repr.getClass) + arrayElementClass(array.getClass) override def toArray[U >: T : ClassTag]: Array[U] = { val thatElementClass = arrayElementClass(implicitly[ClassTag[U]]) diff --git a/src/library/scala/reflect/Manifest.scala b/src/library/scala/reflect/Manifest.scala index eddfe63118..f62d0ecd16 100644 --- a/src/library/scala/reflect/Manifest.scala +++ b/src/library/scala/reflect/Manifest.scala @@ -162,11 +162,13 @@ object ManifestFactory { private val NullTYPE = classOf[scala.runtime.Null$] val Any: Manifest[scala.Any] = new PhantomManifest[scala.Any](ObjectTYPE, "Any") { + override def newArray(len: Int) = new Array[scala.Any](len) override def <:<(that: ClassManifest[_]): Boolean = (that eq this) private def readResolve(): Any = Manifest.Any } val Object: Manifest[java.lang.Object] = new PhantomManifest[java.lang.Object](ObjectTYPE, "Object") { + override def newArray(len: Int) = new Array[java.lang.Object](len) override def <:<(that: ClassManifest[_]): Boolean = (that eq this) || (that eq Any) private def readResolve(): Any = Manifest.Object } @@ -174,17 +176,20 @@ object ManifestFactory { val AnyRef: Manifest[scala.AnyRef] = Object.asInstanceOf[Manifest[scala.AnyRef]] val AnyVal: Manifest[scala.AnyVal] = new PhantomManifest[scala.AnyVal](ObjectTYPE, "AnyVal") { + override def newArray(len: Int) = new Array[scala.AnyVal](len) override def <:<(that: ClassManifest[_]): Boolean = (that eq this) || (that eq Any) private def readResolve(): Any = Manifest.AnyVal } val Null: Manifest[scala.Null] = new PhantomManifest[scala.Null](NullTYPE, "Null") { + override def newArray(len: Int) = new Array[scala.Null](len) override def <:<(that: ClassManifest[_]): Boolean = (that ne null) && (that ne Nothing) && !(that <:< AnyVal) private def readResolve(): Any = Manifest.Null } val Nothing: Manifest[scala.Nothing] = new PhantomManifest[scala.Nothing](NothingTYPE, "Nothing") { + override def newArray(len: Int) = new Array[scala.Nothing](len) override def <:<(that: ClassManifest[_]): Boolean = (that ne null) private def readResolve(): Any = Manifest.Nothing } -- cgit v1.2.3 From 3f9943be7ad9de6a3443befaec1613682dbd0129 Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Thu, 29 Nov 2012 17:33:03 +0100 Subject: Eliminate allocations in ListBuffer. ++= on a linear sequence can be accomplished without closure allocation. --- .../scala/collection/mutable/ListBuffer.scala | 20 ++++++++++++++++++-- 1 file changed, 18 insertions(+), 2 deletions(-) (limited to 'src') diff --git a/src/library/scala/collection/mutable/ListBuffer.scala b/src/library/scala/collection/mutable/ListBuffer.scala index b7b487964c..e059f31929 100644 --- a/src/library/scala/collection/mutable/ListBuffer.scala +++ b/src/library/scala/collection/mutable/ListBuffer.scala @@ -11,6 +11,7 @@ package scala.collection package mutable +import scala.annotation.tailrec import generic._ import immutable.{List, Nil, ::} import java.io._ @@ -178,8 +179,23 @@ final class ListBuffer[A] this } - override def ++=(xs: TraversableOnce[A]): this.type = - if (xs.asInstanceOf[AnyRef] eq this) ++= (this take size) else super.++=(xs) + private def ++=(elems: collection.LinearSeq[A]): this.type = { + @tailrec def loop(xs: collection.LinearSeq[A]) { + if (xs.nonEmpty) { + this += xs.head + loop(xs.tail) + } + } + loop(elems) + this + } + + override def ++=(xs: TraversableOnce[A]): this.type = xs match { + case x: AnyRef if x eq this => this ++= (this take size) + case xs: collection.LinearSeq[_] => this ++= xs + case _ => super.++=(xs) + + } override def ++=:(xs: TraversableOnce[A]): this.type = if (xs.asInstanceOf[AnyRef] eq this) ++=: (this take size) else super.++=:(xs) -- cgit v1.2.3 From c53359ecbe135e79d55a6806209a6301bb386ada Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Thu, 29 Nov 2012 17:40:38 +0100 Subject: Eliminate allocations in ClassfileParser. --- .../tools/nsc/symtab/classfile/ClassfileParser.scala | 16 ++++++++++------ 1 file changed, 10 insertions(+), 6 deletions(-) (limited to 'src') diff --git a/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala b/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala index cb58111b51..04e860f9db 100644 --- a/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala +++ b/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala @@ -1225,16 +1225,20 @@ abstract class ClassfileParser { } def skipAttributes() { - val attrCount = in.nextChar - for (i <- 0 until attrCount) { - in.skip(2); in.skip(in.nextInt) + var attrCount: Int = in.nextChar + while (attrCount > 0) { + in skip 2 + in skip in.nextInt + attrCount -= 1 } } def skipMembers() { - val memberCount = in.nextChar - for (i <- 0 until memberCount) { - in.skip(6); skipAttributes() + var memberCount: Int = in.nextChar + while (memberCount > 0) { + in skip 6 + skipAttributes() + memberCount -= 1 } } -- cgit v1.2.3 From 3059e3a0c039645158d2e5533e84d00f508ca824 Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Fri, 30 Nov 2012 04:39:08 +0100 Subject: Eliminating more allocations in the collections. --- src/library/scala/collection/IndexedSeqOptimized.scala | 10 ++++++++-- src/library/scala/collection/TraversableLike.scala | 17 ++++++++++------- 2 files changed, 18 insertions(+), 9 deletions(-) (limited to 'src') diff --git a/src/library/scala/collection/IndexedSeqOptimized.scala b/src/library/scala/collection/IndexedSeqOptimized.scala index 09c4b14ba0..9721a42e91 100755 --- a/src/library/scala/collection/IndexedSeqOptimized.scala +++ b/src/library/scala/collection/IndexedSeqOptimized.scala @@ -33,11 +33,17 @@ trait IndexedSeqOptimized[+A, +Repr] extends Any with IndexedSeqLike[A, Repr] { while (i < len) { f(this(i)); i += 1 } } + private def prefixLengthImpl(p: A => Boolean, expectTrue: Boolean): Int = { + var i = 0 + while (i < length && p(apply(i)) == expectTrue) i += 1 + i + } + override /*IterableLike*/ - def forall(p: A => Boolean): Boolean = prefixLength(p(_)) == length + def forall(p: A => Boolean): Boolean = prefixLengthImpl(p, expectTrue = true) == length override /*IterableLike*/ - def exists(p: A => Boolean): Boolean = prefixLength(!p(_)) != length + def exists(p: A => Boolean): Boolean = prefixLengthImpl(p, expectTrue = false) != length override /*IterableLike*/ def find(p: A => Boolean): Option[A] = { diff --git a/src/library/scala/collection/TraversableLike.scala b/src/library/scala/collection/TraversableLike.scala index c1a68b6b16..a55257d128 100644 --- a/src/library/scala/collection/TraversableLike.scala +++ b/src/library/scala/collection/TraversableLike.scala @@ -252,18 +252,21 @@ trait TraversableLike[+A, +Repr] extends Any b.result } + private def filterImpl(p: A => Boolean, isFlipped: Boolean): Repr = { + val b = newBuilder + for (x <- this) + if (p(x) != isFlipped) b += x + + b.result + } + /** Selects all elements of this $coll which satisfy a predicate. * * @param p the predicate used to test elements. * @return a new $coll consisting of all elements of this $coll that satisfy the given * predicate `p`. The order of the elements is preserved. */ - def filter(p: A => Boolean): Repr = { - val b = newBuilder - for (x <- this) - if (p(x)) b += x - b.result - } + def filter(p: A => Boolean): Repr = filterImpl(p, isFlipped = false) /** Selects all elements of this $coll which do not satisfy a predicate. * @@ -271,7 +274,7 @@ trait TraversableLike[+A, +Repr] extends Any * @return a new $coll consisting of all elements of this $coll that do not satisfy the given * predicate `p`. The order of the elements is preserved. */ - def filterNot(p: A => Boolean): Repr = filter(!p(_)) + def filterNot(p: A => Boolean): Repr = filterImpl(p, isFlipped = true) def collect[B, That](pf: PartialFunction[A, B])(implicit bf: CanBuildFrom[Repr, B, That]): That = { val b = bf(repr) -- cgit v1.2.3 From 78269a68d04d57e65ff0403edb6e06440ea74f7d Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Fri, 30 Nov 2012 04:39:14 +0100 Subject: Eliminating allocations in Codec. --- src/library/scala/io/Codec.scala | 55 ++++++++++++++++++---------------------- 1 file changed, 25 insertions(+), 30 deletions(-) (limited to 'src') diff --git a/src/library/scala/io/Codec.scala b/src/library/scala/io/Codec.scala index 5d046e48b0..bda4234460 100644 --- a/src/library/scala/io/Codec.scala +++ b/src/library/scala/io/Codec.scala @@ -43,42 +43,37 @@ class Codec(val charSet: Charset) { override def toString = name // these methods can be chained to configure the variables above - def onMalformedInput(newAction: Action): this.type = { _onMalformedInput = newAction ; this } - def onUnmappableCharacter(newAction: Action): this.type = { _onUnmappableCharacter = newAction ; this } - def decodingReplaceWith(newReplacement: String): this.type = { _decodingReplacement = newReplacement ; this } + def onMalformedInput(newAction: Action): this.type = { _onMalformedInput = newAction ; this } + def onUnmappableCharacter(newAction: Action): this.type = { _onUnmappableCharacter = newAction ; this } + def decodingReplaceWith(newReplacement: String): this.type = { _decodingReplacement = newReplacement ; this } def encodingReplaceWith(newReplacement: Array[Byte]): this.type = { _encodingReplacement = newReplacement ; this } - def onCodingException(handler: Handler): this.type = { _onCodingException = handler ; this } + def onCodingException(handler: Handler): this.type = { _onCodingException = handler ; this } def name = charSet.name - def encoder = - applyFunctions[CharsetEncoder](charSet.newEncoder(), - (_ onMalformedInput _onMalformedInput, _onMalformedInput != null), - (_ onUnmappableCharacter _onUnmappableCharacter, _onUnmappableCharacter != null), - (_ replaceWith _encodingReplacement, _encodingReplacement != null) - ) - - def decoder = - applyFunctions[CharsetDecoder](charSet.newDecoder(), - (_ onMalformedInput _onMalformedInput, _onMalformedInput != null), - (_ onUnmappableCharacter _onUnmappableCharacter, _onUnmappableCharacter != null), - (_ replaceWith _decodingReplacement, _decodingReplacement != null) - ) + def encoder: CharsetEncoder = { + val enc = charSet.newEncoder() + if (_onMalformedInput ne null) enc onMalformedInput _onMalformedInput + if (_onUnmappableCharacter ne null) enc onUnmappableCharacter _onUnmappableCharacter + if (_encodingReplacement ne null) enc replaceWith _encodingReplacement + enc + } + def decoder: CharsetDecoder = { + val dec = charSet.newDecoder() + if (_onMalformedInput ne null) dec onMalformedInput _onMalformedInput + if (_onUnmappableCharacter ne null) dec onUnmappableCharacter _onUnmappableCharacter + if (_decodingReplacement ne null) dec replaceWith _decodingReplacement + dec + } def wrap(body: => Int): Int = try body catch { case e: CharacterCodingException => _onCodingException(e) } - - // call a series of side effecting methods on an object, finally returning the object - private def applyFunctions[T](x: T, fs: Configure[T]*) = - fs.foldLeft(x)((x, pair) => pair match { - case (f, cond) => if (cond) f(x) else x - }) } trait LowPriorityCodecImplicits { self: Codec.type => /** The Codec of Last Resort. */ - implicit def fallbackSystemCodec: Codec = defaultCharsetCodec + implicit lazy val fallbackSystemCodec: Codec = defaultCharsetCodec } object Codec extends LowPriorityCodecImplicits { @@ -90,9 +85,9 @@ object Codec extends LowPriorityCodecImplicits { * the fact that you can influence anything at all via -Dfile.encoding * as an accident, with any anomalies considered "not a bug". */ - def defaultCharsetCodec = apply(Charset.defaultCharset) - def fileEncodingCodec = apply(scala.util.Properties.encodingString) - def default = defaultCharsetCodec + def defaultCharsetCodec = apply(Charset.defaultCharset) + def fileEncodingCodec = apply(scala.util.Properties.encodingString) + def default = defaultCharsetCodec def apply(encoding: String): Codec = new Codec(Charset forName encoding) def apply(charSet: Charset): Codec = new Codec(charSet) @@ -130,7 +125,7 @@ object Codec extends LowPriorityCodecImplicits { bytes } - implicit def string2codec(s: String) = apply(s) - implicit def charset2codec(c: Charset) = apply(c) - implicit def decoder2codec(cd: CharsetDecoder) = apply(cd) + implicit def string2codec(s: String): Codec = apply(s) + implicit def charset2codec(c: Charset): Codec = apply(c) + implicit def decoder2codec(cd: CharsetDecoder): Codec = apply(cd) } -- cgit v1.2.3 From d3099c0d3ef363f4f1815409051da2edfec81f30 Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Fri, 30 Nov 2012 04:42:29 +0100 Subject: Eliminating allocations in typeDepth. --- src/reflect/scala/reflect/internal/Types.scala | 17 +++++++++-------- 1 file changed, 9 insertions(+), 8 deletions(-) (limited to 'src') diff --git a/src/reflect/scala/reflect/internal/Types.scala b/src/reflect/scala/reflect/internal/Types.scala index 282d7e18ac..599bc2e264 100644 --- a/src/reflect/scala/reflect/internal/Types.scala +++ b/src/reflect/scala/reflect/internal/Types.scala @@ -5021,19 +5021,19 @@ trait Types extends api.Types { self: SymbolTable => /** The maximum depth of type `tp` */ def typeDepth(tp: Type): Int = tp match { case TypeRef(pre, sym, args) => - typeDepth(pre) max typeDepth(args) + 1 + math.max(typeDepth(pre), typeDepth(args) + 1) case RefinedType(parents, decls) => - typeDepth(parents) max typeDepth(decls.toList.map(_.info)) + 1 + math.max(typeDepth(parents), symTypeDepth(decls.toList) + 1) case TypeBounds(lo, hi) => - typeDepth(lo) max typeDepth(hi) + math.max(typeDepth(lo), typeDepth(hi)) case MethodType(paramtypes, result) => typeDepth(result) case NullaryMethodType(result) => typeDepth(result) case PolyType(tparams, result) => - typeDepth(result) max typeDepth(tparams map (_.info)) + 1 + math.max(typeDepth(result), symTypeDepth(tparams) + 1) case ExistentialType(tparams, result) => - typeDepth(result) max typeDepth(tparams map (_.info)) + 1 + math.max(typeDepth(result), symTypeDepth(tparams) + 1) case _ => 1 } @@ -5045,13 +5045,14 @@ trait Types extends api.Types { self: SymbolTable => // for (tp <- tps) d = d max by(tp) //!!!OPT!!! // d def loop(tps: List[Type], acc: Int): Int = tps match { - case tp :: rest => loop(rest, acc max by(tp)) - case _ => acc + case tp :: rest => loop(rest, math.max(acc, by(tp))) + case _ => acc } loop(tps, 0) } - private def typeDepth(tps: List[Type]): Int = maxDepth(tps, typeDepth) + private def symTypeDepth(syms: List[Symbol]): Int = typeDepth(syms map (_.info)) + private def typeDepth(tps: List[Type]): Int = maxDepth(tps, typeDepth) private def baseTypeSeqDepth(tps: List[Type]): Int = maxDepth(tps, _.baseTypeSeqDepth) /** Is intersection of given types populated? That is, -- cgit v1.2.3 From 1697132ec8e0df21c98a1420d186c58e02af69ab Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Fri, 30 Nov 2012 05:19:07 +0100 Subject: Eliminate allocations in Growable. --- src/library/scala/collection/generic/Growable.scala | 17 +++++++++++++++-- src/library/scala/collection/mutable/ListBuffer.scala | 15 --------------- 2 files changed, 15 insertions(+), 17 deletions(-) (limited to 'src') diff --git a/src/library/scala/collection/generic/Growable.scala b/src/library/scala/collection/generic/Growable.scala index cb75212e3d..52a0d32de1 100644 --- a/src/library/scala/collection/generic/Growable.scala +++ b/src/library/scala/collection/generic/Growable.scala @@ -6,10 +6,11 @@ ** |/ ** \* */ - package scala.collection package generic +import scala.annotation.tailrec + /** This trait forms part of collections that can be augmented * using a `+=` operator and that can be cleared of all elements using * a `clear` method. @@ -45,7 +46,19 @@ trait Growable[-A] extends Clearable { * @param xs the TraversableOnce producing the elements to $add. * @return the $coll itself. */ - def ++=(xs: TraversableOnce[A]): this.type = { xs.seq foreach += ; this } + def ++=(xs: TraversableOnce[A]): this.type = { + @tailrec def loop(xs: collection.LinearSeq[A]) { + if (xs.nonEmpty) { + this += xs.head + loop(xs.tail) + } + } + xs.seq match { + case xs: collection.LinearSeq[_] => loop(xs) + case xs => xs foreach += + } + this + } /** Clears the $coll's contents. After this operation, the * $coll is empty. diff --git a/src/library/scala/collection/mutable/ListBuffer.scala b/src/library/scala/collection/mutable/ListBuffer.scala index e059f31929..97d469bca2 100644 --- a/src/library/scala/collection/mutable/ListBuffer.scala +++ b/src/library/scala/collection/mutable/ListBuffer.scala @@ -6,12 +6,9 @@ ** |/ ** \* */ - - package scala.collection package mutable -import scala.annotation.tailrec import generic._ import immutable.{List, Nil, ::} import java.io._ @@ -179,20 +176,8 @@ final class ListBuffer[A] this } - private def ++=(elems: collection.LinearSeq[A]): this.type = { - @tailrec def loop(xs: collection.LinearSeq[A]) { - if (xs.nonEmpty) { - this += xs.head - loop(xs.tail) - } - } - loop(elems) - this - } - override def ++=(xs: TraversableOnce[A]): this.type = xs match { case x: AnyRef if x eq this => this ++= (this take size) - case xs: collection.LinearSeq[_] => this ++= xs case _ => super.++=(xs) } -- cgit v1.2.3 From bf253b8983ec3a2807d2137a9e29b732135eb2dc Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Fri, 30 Nov 2012 05:25:16 +0100 Subject: Eliminate allocations in TypeMap. --- src/reflect/scala/reflect/api/Trees.scala | 2 +- src/reflect/scala/reflect/internal/Types.scala | 9 +++++++-- 2 files changed, 8 insertions(+), 3 deletions(-) (limited to 'src') diff --git a/src/reflect/scala/reflect/api/Trees.scala b/src/reflect/scala/reflect/api/Trees.scala index cfa6315797..94226ae866 100644 --- a/src/reflect/scala/reflect/api/Trees.scala +++ b/src/reflect/scala/reflect/api/Trees.scala @@ -2921,7 +2921,7 @@ trait Trees { self: Universe => def transform(tree: Tree): Tree = itransform(this, tree) /** Transforms a list of trees. */ - def transformTrees(trees: List[Tree]): List[Tree] = trees mapConserve (transform(_)) + def transformTrees(trees: List[Tree]): List[Tree] = trees mapConserve transform /** Transforms a `Template`. */ def transformTemplate(tree: Template): Template = diff --git a/src/reflect/scala/reflect/internal/Types.scala b/src/reflect/scala/reflect/internal/Types.scala index 599bc2e264..b706ef8abe 100644 --- a/src/reflect/scala/reflect/internal/Types.scala +++ b/src/reflect/scala/reflect/internal/Types.scala @@ -4082,8 +4082,13 @@ trait Types extends api.Types { self: SymbolTable => /** Called by mapOver to determine whether the original symbols can * be returned, or whether they must be cloned. Overridden in VariantTypeMap. */ - protected def noChangeToSymbols(origSyms: List[Symbol]) = - origSyms forall (sym => sym.info eq this(sym.info)) + protected def noChangeToSymbols(origSyms: List[Symbol]): Boolean = { + @tailrec def loop(syms: List[Symbol]): Boolean = syms match { + case Nil => true + case x :: xs => (x.info eq this(x.info)) && loop(xs) + } + loop(origSyms) + } /** Map this function over given scope */ def mapOver(scope: Scope): Scope = { -- cgit v1.2.3 From 2e3e43b5971ab93b04ab4677fe23a81bb3291470 Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Fri, 30 Nov 2012 06:01:17 +0100 Subject: Eliminate allocations in CPSAnnotationChecker. --- .../plugin/scala/tools/selectivecps/CPSAnnotationChecker.scala | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) (limited to 'src') diff --git a/src/continuations/plugin/scala/tools/selectivecps/CPSAnnotationChecker.scala b/src/continuations/plugin/scala/tools/selectivecps/CPSAnnotationChecker.scala index c147dc483d..cf5b1fa2c4 100644 --- a/src/continuations/plugin/scala/tools/selectivecps/CPSAnnotationChecker.scala +++ b/src/continuations/plugin/scala/tools/selectivecps/CPSAnnotationChecker.scala @@ -221,7 +221,7 @@ abstract class CPSAnnotationChecker extends CPSUtils with Modes { } else if (retMode && !hasPlusMarker(tree.tpe) && annotsTree.isEmpty && annotsExpected.nonEmpty) { // add a marker annotation that will make tree.tpe behave as pt, subtyping wise // tree will look like having any possible annotation - + // note 1: we are only adding a plus marker if the method's result type is a cps type // (annotsExpected.nonEmpty == cpsParamAnnotation(pt).nonEmpty) // note 2: we are not adding the expected cps annotations, since they will be added @@ -234,7 +234,7 @@ abstract class CPSAnnotationChecker extends CPSUtils with Modes { /** Returns an adapted type for a return expression if the method's result type (pt) is a CPS type. * Otherwise, it returns the `default` type (`typedReturn` passes `NothingClass.tpe`). - * + * * A return expression in a method that has a CPS result type is an error unless the return * is in tail position. Therefore, we are making sure that only the types of return expressions * are adapted which will either be removed, or lead to an error. @@ -396,8 +396,10 @@ abstract class CPSAnnotationChecker extends CPSUtils with Modes { override def addAnnotations(tree: Tree, tpe: Type): Type = { import scala.util.control._ if (!cpsEnabled) { - if (Exception.failAsValue(classOf[MissingRequirementError])(false)(hasCpsParamTypes(tpe))) + val report = try hasCpsParamTypes(tpe) catch { case _: MissingRequirementError => false } + if (report) global.reporter.error(tree.pos, "this code must be compiled with the Scala continuations plugin enabled") + return tpe } -- cgit v1.2.3 From 9a6320b882495e93210b0e11dad02271306d83d2 Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Fri, 30 Nov 2012 06:08:41 +0100 Subject: Eliminate allocations in BaseTypeSeqs. --- src/reflect/scala/reflect/internal/BaseTypeSeqs.scala | 12 ++++++++++-- 1 file changed, 10 insertions(+), 2 deletions(-) (limited to 'src') diff --git a/src/reflect/scala/reflect/internal/BaseTypeSeqs.scala b/src/reflect/scala/reflect/internal/BaseTypeSeqs.scala index eba10e8ffb..18a4a36840 100644 --- a/src/reflect/scala/reflect/internal/BaseTypeSeqs.scala +++ b/src/reflect/scala/reflect/internal/BaseTypeSeqs.scala @@ -193,15 +193,23 @@ trait BaseTypeSeqs { i += 1 } var minTypes: List[Type] = List() + def alreadyInMinTypes(tp: Type): Boolean = { + @annotation.tailrec def loop(tps: List[Type]): Boolean = tps match { + case Nil => false + case x :: xs => (tp =:= x) || loop(xs) + } + loop(minTypes) + } + i = 0 while (i < nparents) { if (nextTypeSymbol(i) == minSym) { nextRawElem(i) match { case RefinedType(variants, decls) => for (tp <- variants) - if (!(minTypes exists (tp =:= _))) minTypes = tp :: minTypes + if (!alreadyInMinTypes(tp)) minTypes ::= tp case tp => - if (!(minTypes exists (tp =:= _))) minTypes = tp :: minTypes + if (!alreadyInMinTypes(tp)) minTypes ::= tp } index(i) = index(i) + 1 } -- cgit v1.2.3 From cdf6feb1aea366c33ac99e3e5f1e235f7ea0ae19 Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Fri, 30 Nov 2012 06:23:51 +0100 Subject: Eliminate allocations in uncurry and the backend. --- src/compiler/scala/tools/nsc/backend/icode/Members.scala | 9 +++++++-- src/compiler/scala/tools/nsc/transform/UnCurry.scala | 3 ++- 2 files changed, 9 insertions(+), 3 deletions(-) (limited to 'src') diff --git a/src/compiler/scala/tools/nsc/backend/icode/Members.scala b/src/compiler/scala/tools/nsc/backend/icode/Members.scala index 12daa32186..248a505b54 100644 --- a/src/compiler/scala/tools/nsc/backend/icode/Members.scala +++ b/src/compiler/scala/tools/nsc/backend/icode/Members.scala @@ -46,8 +46,13 @@ trait Members { def touched = _touched def touched_=(b: Boolean): Unit = { - if (b) - blocks foreach (_.touched = true) + @annotation.tailrec def loop(xs: List[BasicBlock]) { + xs match { + case Nil => + case x :: xs => x.touched = true ; loop(xs) + } + } + if (b) loop(blocks.toList) _touched = b } diff --git a/src/compiler/scala/tools/nsc/transform/UnCurry.scala b/src/compiler/scala/tools/nsc/transform/UnCurry.scala index 6e89f6387e..f4e40a216e 100644 --- a/src/compiler/scala/tools/nsc/transform/UnCurry.scala +++ b/src/compiler/scala/tools/nsc/transform/UnCurry.scala @@ -114,7 +114,8 @@ abstract class UnCurry extends InfoTransform def isByNameRef(tree: Tree) = ( tree.isTerm && !byNameArgs(tree) - && tree.hasSymbolWhich(isByName) + && (tree.symbol ne null) + && (isByName(tree.symbol)) ) /** Uncurry a type of a tree node. -- cgit v1.2.3 From 113405b935db20705b88df4fd3ff24273e4391bc Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Fri, 30 Nov 2012 06:30:32 +0100 Subject: Eliminate allocations in Trees. --- src/reflect/scala/reflect/api/Trees.scala | 6 ++++-- src/reflect/scala/reflect/internal/Trees.scala | 7 +++++-- 2 files changed, 9 insertions(+), 4 deletions(-) (limited to 'src') diff --git a/src/reflect/scala/reflect/api/Trees.scala b/src/reflect/scala/reflect/api/Trees.scala index 94226ae866..05458cb311 100644 --- a/src/reflect/scala/reflect/api/Trees.scala +++ b/src/reflect/scala/reflect/api/Trees.scala @@ -2951,8 +2951,10 @@ trait Trees { self: Universe => if (exprOwner != currentOwner && stat.isTerm) atOwner(exprOwner)(transform(stat)) else transform(stat)) filter (EmptyTree != _) /** Transforms `Modifiers`. */ - def transformModifiers(mods: Modifiers): Modifiers = - mods.mapAnnotations(transformTrees) + def transformModifiers(mods: Modifiers): Modifiers = { + if (mods.annotations.isEmpty) mods + else mods mapAnnotations transformTrees + } /** Transforms a tree with a given owner symbol. */ def atOwner[A](owner: Symbol)(trans: => A): A = { diff --git a/src/reflect/scala/reflect/internal/Trees.scala b/src/reflect/scala/reflect/internal/Trees.scala index 9e737528d2..870c1ec5ed 100644 --- a/src/reflect/scala/reflect/internal/Trees.scala +++ b/src/reflect/scala/reflect/internal/Trees.scala @@ -926,8 +926,11 @@ trait Trees extends api.Trees { self: SymbolTable => def withPosition(flag: Long, position: Position) = copy() setPositions positions + (flag -> position) - override def mapAnnotations(f: List[Tree] => List[Tree]): Modifiers = - Modifiers(flags, privateWithin, f(annotations)) setPositions positions + override def mapAnnotations(f: List[Tree] => List[Tree]): Modifiers = { + val newAnns = f(annotations) + if (annotations == newAnns) this + else Modifiers(flags, privateWithin, newAnns) setPositions positions + } override def toString = "Modifiers(%s, %s, %s)".format(flagString, annotations mkString ", ", positions) } -- cgit v1.2.3 From 57c40c54d6119c42e256d6f7c4c7681a5257b266 Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Fri, 30 Nov 2012 11:04:58 +0100 Subject: Eliminate allocations in Specialize. --- src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) (limited to 'src') diff --git a/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala b/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala index 173ca1e628..116b6ab58f 100644 --- a/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala +++ b/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala @@ -10,6 +10,7 @@ import scala.tools.nsc.symtab.Flags import scala.collection.{ mutable, immutable } import scala.language.postfixOps import scala.language.existentials +import scala.annotation.tailrec /** Specialize code on types. * @@ -403,11 +404,11 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers { case _ => false }) def specializedTypeVars(tpes: List[Type]): immutable.Set[Symbol] = { - if (tpes.isEmpty) immutable.Set.empty else { - val buf = Set.newBuilder[Symbol] - tpes foreach (tp => buf ++= specializedTypeVars(tp)) - buf.result + @tailrec def loop(result: immutable.Set[Symbol], xs: List[Type]): immutable.Set[Symbol] = { + if (xs.isEmpty) result + else loop(result ++ specializedTypeVars(xs.head), xs.tail) } + loop(immutable.Set.empty, tpes) } def specializedTypeVars(sym: Symbol): immutable.Set[Symbol] = ( if (definitions.neverHasTypeParameters(sym)) immutable.Set.empty -- cgit v1.2.3 From eb491d2f1a857a25e381eb23275b78ccafd2981a Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Fri, 30 Nov 2012 11:05:30 +0100 Subject: Eliminate allocations in Trees and Symbols. --- src/reflect/scala/reflect/api/Trees.scala | 3 ++- src/reflect/scala/reflect/internal/Symbols.scala | 12 +++++++++++- 2 files changed, 13 insertions(+), 2 deletions(-) (limited to 'src') diff --git a/src/reflect/scala/reflect/api/Trees.scala b/src/reflect/scala/reflect/api/Trees.scala index 05458cb311..0b44d6a237 100644 --- a/src/reflect/scala/reflect/api/Trees.scala +++ b/src/reflect/scala/reflect/api/Trees.scala @@ -2921,7 +2921,8 @@ trait Trees { self: Universe => def transform(tree: Tree): Tree = itransform(this, tree) /** Transforms a list of trees. */ - def transformTrees(trees: List[Tree]): List[Tree] = trees mapConserve transform + def transformTrees(trees: List[Tree]): List[Tree] = + if (trees.isEmpty) Nil else trees mapConserve transform /** Transforms a `Template`. */ def transformTemplate(tree: Template): Template = diff --git a/src/reflect/scala/reflect/internal/Symbols.scala b/src/reflect/scala/reflect/internal/Symbols.scala index fd5c3909b8..3d43500ef1 100644 --- a/src/reflect/scala/reflect/internal/Symbols.scala +++ b/src/reflect/scala/reflect/internal/Symbols.scala @@ -2048,7 +2048,17 @@ trait Symbols extends api.Symbols { self: SymbolTable => /** Returns all symbols overriden by this symbol. */ final def allOverriddenSymbols: List[Symbol] = ( if ((this eq NoSymbol) || !owner.isClass) Nil - else owner.ancestors map overriddenSymbol filter (_ != NoSymbol) + else { + def loop(xs: List[Symbol]): List[Symbol] = xs match { + case Nil => Nil + case x :: xs => + overriddenSymbol(x) match { + case NoSymbol => loop(xs) + case sym => sym :: loop(xs) + } + } + loop(owner.ancestors) + } ) /** Equivalent to allOverriddenSymbols.nonEmpty, but more efficient. */ -- cgit v1.2.3 From 6a288b632e0e78a96f1298be9b4e8231728183af Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Fri, 30 Nov 2012 11:22:00 +0100 Subject: Eliminate allocations in Types. At this commit the statistics when compiling src/library are as follows. These counts are precise, collected by a modified Function1 which counts every instantiation of every implementing class. The net result is 27 million fewer allocations, over a 20% drop. // master (5b5635ee9d), total and top five by count: Total Function1 allocations: 128,805,865 scala.collection.immutable.$colon$colon 26781958 scala.collection.mutable.ListBuffer 15365174 scala.collection.TraversableLike$$anonfun$map$1 9127787 scala.collection.generic.Growable$$anonfun$$plus$plus$eq$1 4636154 scala.collection.mutable.StringBuilder 3531211 // After these commits, total and top five by count: Total Function1 allocations: 101,865,721 scala.collection.immutable.$colon$colon 26993704 scala.collection.mutable.ListBuffer 15319656 scala.collection.TraversableLike$$anonfun$map$1 7585019 scala.reflect.internal.Types$MethodType$$anonfun$paramTypes$1 2447307 scala.reflect.internal.Types$SubstSymMap 2436088 --- src/reflect/scala/reflect/internal/Types.scala | 76 ++++++++++++++------------ 1 file changed, 41 insertions(+), 35 deletions(-) (limited to 'src') diff --git a/src/reflect/scala/reflect/internal/Types.scala b/src/reflect/scala/reflect/internal/Types.scala index b706ef8abe..c82904ae67 100644 --- a/src/reflect/scala/reflect/internal/Types.scala +++ b/src/reflect/scala/reflect/internal/Types.scala @@ -5023,42 +5023,9 @@ trait Types extends api.Types { self: SymbolTable => else if (bd <= 7) td max (bd - 2) else (td - 1) max (bd - 3) - /** The maximum depth of type `tp` */ - def typeDepth(tp: Type): Int = tp match { - case TypeRef(pre, sym, args) => - math.max(typeDepth(pre), typeDepth(args) + 1) - case RefinedType(parents, decls) => - math.max(typeDepth(parents), symTypeDepth(decls.toList) + 1) - case TypeBounds(lo, hi) => - math.max(typeDepth(lo), typeDepth(hi)) - case MethodType(paramtypes, result) => - typeDepth(result) - case NullaryMethodType(result) => - typeDepth(result) - case PolyType(tparams, result) => - math.max(typeDepth(result), symTypeDepth(tparams) + 1) - case ExistentialType(tparams, result) => - math.max(typeDepth(result), symTypeDepth(tparams) + 1) - case _ => - 1 - } - - private def maxDepth(tps: List[Type], by: Type => Int): Int = { - //OPT replaced with tailrecursive function to save on #closures - // was: - // var d = 0 - // for (tp <- tps) d = d max by(tp) //!!!OPT!!! - // d - def loop(tps: List[Type], acc: Int): Int = tps match { - case tp :: rest => loop(rest, math.max(acc, by(tp))) - case _ => acc - } - loop(tps, 0) - } - private def symTypeDepth(syms: List[Symbol]): Int = typeDepth(syms map (_.info)) - private def typeDepth(tps: List[Type]): Int = maxDepth(tps, typeDepth) - private def baseTypeSeqDepth(tps: List[Type]): Int = maxDepth(tps, _.baseTypeSeqDepth) + private def typeDepth(tps: List[Type]): Int = maxDepth(tps) + private def baseTypeSeqDepth(tps: List[Type]): Int = maxBaseTypeSeqDepth(tps) /** Is intersection of given types populated? That is, * for all types tp1, tp2 in intersection @@ -7006,6 +6973,45 @@ trait Types extends api.Types { self: SymbolTable => private[scala] val typeIsAny = (tp: Type) => tp.typeSymbolDirect eq AnyClass private[scala] val typeIsHigherKinded = (tp: Type) => tp.isHigherKinded + /** The maximum depth of type `tp` */ + def typeDepth(tp: Type): Int = tp match { + case TypeRef(pre, sym, args) => + math.max(typeDepth(pre), typeDepth(args) + 1) + case RefinedType(parents, decls) => + math.max(typeDepth(parents), symTypeDepth(decls.toList) + 1) + case TypeBounds(lo, hi) => + math.max(typeDepth(lo), typeDepth(hi)) + case MethodType(paramtypes, result) => + typeDepth(result) + case NullaryMethodType(result) => + typeDepth(result) + case PolyType(tparams, result) => + math.max(typeDepth(result), symTypeDepth(tparams) + 1) + case ExistentialType(tparams, result) => + math.max(typeDepth(result), symTypeDepth(tparams) + 1) + case _ => + 1 + } + //OPT replaced with tailrecursive function to save on #closures + // was: + // var d = 0 + // for (tp <- tps) d = d max by(tp) //!!!OPT!!! + // d + private[scala] def maxDepth(tps: List[Type]): Int = { + @tailrec def loop(tps: List[Type], acc: Int): Int = tps match { + case tp :: rest => loop(rest, math.max(acc, typeDepth(tp))) + case _ => acc + } + loop(tps, 0) + } + private[scala] def maxBaseTypeSeqDepth(tps: List[Type]): Int = { + @tailrec def loop(tps: List[Type], acc: Int): Int = tps match { + case tp :: rest => loop(rest, math.max(acc, tp.baseTypeSeqDepth)) + case _ => acc + } + loop(tps, 0) + } + @tailrec private def typesContain(tps: List[Type], sym: Symbol): Boolean = tps match { case tp :: rest => (tp contains sym) || typesContain(rest, sym) case _ => false -- cgit v1.2.3 From 7abb0c911a7c3d60057fbcab6fc3687322a67082 Mon Sep 17 00:00:00 2001 From: Miguel Garcia Date: Fri, 28 Dec 2012 15:21:28 +0100 Subject: fusion of loops in Range.foreach() and Range.validateRangeBoundaries() This commit allows closure elimination in more cases. The non-inlined case also benefits from saving a Range.validateRangeBoundaries() invocation. Before this commit, the closure argument to Range.foreach() escaped to Range.validateRangeBoundaries(). As a consequence, closure elimination required inlining both of them. Given that the current optimizer duplicates a closure body whenever that closure's apply() is invoked, the resulting code size taxed the JIT compiler. In particular when apply() delegates to a specialized version, or when a bridge apply() stands in the way. --- src/library/scala/collection/immutable/Range.scala | 22 ++++++++++++++-------- 1 file changed, 14 insertions(+), 8 deletions(-) (limited to 'src') diff --git a/src/library/scala/collection/immutable/Range.scala b/src/library/scala/collection/immutable/Range.scala index 02c10700b1..480c88ddcf 100644 --- a/src/library/scala/collection/immutable/Range.scala +++ b/src/library/scala/collection/immutable/Range.scala @@ -112,6 +112,7 @@ extends scala.collection.AbstractSeq[Int] fail() } + @deprecated("Range.foreach() is now self-contained, making this auxiliary method redundant.", "2.10.1") def validateRangeBoundaries(f: Int => Any): Boolean = { validateMaxLength() @@ -134,14 +135,19 @@ extends scala.collection.AbstractSeq[Int] } @inline final override def foreach[@specialized(Unit) U](f: Int => U) { - if (validateRangeBoundaries(f)) { - var i = start - val terminal = terminalElement - val step = this.step - while (i != terminal) { - f(i) - i += step - } + validateMaxLength() + val isCommonCase = (start != Int.MinValue || end != Int.MinValue) + var i = start + var count = 0 + val terminal = terminalElement + val step = this.step + while( + if(isCommonCase) { i != terminal } + else { count < numRangeElements } + ) { + f(i) + count += 1 + i += step } } -- cgit v1.2.3 From ed40f5cbdf35d09b02898e9c0950b9bd34c1f858 Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Fri, 21 Dec 2012 15:06:10 -0800 Subject: Removed dead implementation. Another "attractive nuisance" burning off time until I realized it was commented out. --- .../scala/tools/nsc/typechecker/Implicits.scala | 82 ---------------------- 1 file changed, 82 deletions(-) (limited to 'src') diff --git a/src/compiler/scala/tools/nsc/typechecker/Implicits.scala b/src/compiler/scala/tools/nsc/typechecker/Implicits.scala index 8d869b669c..8d6e0c3b85 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Implicits.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Implicits.scala @@ -1041,88 +1041,6 @@ trait Implicits { infoMap } - /** The parts of a type is the smallest set of types that contains - * - the type itself - * - the parts of its immediate components (prefix and argument) - * - the parts of its base types - * - for alias types and abstract types, we take instead the parts - * - of their upper bounds. - * @return For those parts that refer to classes with companion objects that - * can be accessed with unambiguous stable prefixes, the implicits infos - * which are members of these companion objects. - - private def companionImplicits(tp: Type): Infoss = { - val partMap = new LinkedHashMap[Symbol, Type] - val seen = mutable.HashSet[Type]() // cycle detection - - /** Enter all parts of `tp` into `parts` set. - * This method is performance critical: about 2-4% of all type checking is spent here - */ - def getParts(tp: Type) { - if (seen(tp)) - return - seen += tp - tp match { - case TypeRef(pre, sym, args) => - if (sym.isClass) { - if (!((sym.name == tpnme.REFINE_CLASS_NAME) || - (sym.name startsWith tpnme.ANON_CLASS_NAME) || - (sym.name == tpnme.ROOT))) - partMap get sym match { - case Some(pre1) => - if (!(pre =:= pre1)) partMap(sym) = NoType // ambiguous prefix - ignore implicit members - case None => - if (pre.isStable) partMap(sym) = pre - val bts = tp.baseTypeSeq - var i = 1 - while (i < bts.length) { - getParts(bts(i)) - i += 1 - } - getParts(pre) - args foreach getParts - } - } else if (sym.isAliasType) { - getParts(tp.normalize) - } else if (sym.isAbstractType) { - getParts(tp.bounds.hi) - } - case ThisType(_) => - getParts(tp.widen) - case _: SingletonType => - getParts(tp.widen) - case RefinedType(ps, _) => - for (p <- ps) getParts(p) - case AnnotatedType(_, t, _) => - getParts(t) - case ExistentialType(_, t) => - getParts(t) - case PolyType(_, t) => - getParts(t) - case _ => - } - } - - getParts(tp) - - val buf = new ListBuffer[Infos] - for ((clazz, pre) <- partMap) { - if (pre != NoType) { - val companion = clazz.companionModule - companion.moduleClass match { - case mc: ModuleClassSymbol => - buf += (mc.implicitMembers map (im => - new ImplicitInfo(im.name, singleType(pre, companion), im))) - case _ => - } - } - } - //println("companion implicits of "+tp+" = "+buf.toList) // DEBUG - buf.toList - } - -*/ - /** The implicits made available by type `pt`. * These are all implicits found in companion objects of classes C * such that some part of `tp` has C as one of its superclasses. -- cgit v1.2.3 From 6c3c0e391655457e917a8c85d2d74eb9297e0571 Mon Sep 17 00:00:00 2001 From: Eugene Burmako Date: Sun, 16 Dec 2012 17:01:00 +0100 Subject: fixes the typedIdent problem for good Previous attachment retaining fix was only working for Idents which get turned into Selects. Now it works for all transformations applied to Idents (e.g. when an ident refers to something within a package obj). --- src/compiler/scala/tools/nsc/typechecker/Typers.scala | 6 +++--- test/files/pos/attachments-typed-another-ident.check | 0 test/files/pos/attachments-typed-another-ident.flags | 1 + .../pos/attachments-typed-another-ident/Impls_1.scala | 17 +++++++++++++++++ .../attachments-typed-another-ident/Macros_Test_2.scala | 5 +++++ 5 files changed, 26 insertions(+), 3 deletions(-) create mode 100644 test/files/pos/attachments-typed-another-ident.check create mode 100644 test/files/pos/attachments-typed-another-ident.flags create mode 100644 test/files/pos/attachments-typed-another-ident/Impls_1.scala create mode 100644 test/files/pos/attachments-typed-another-ident/Macros_Test_2.scala (limited to 'src') diff --git a/src/compiler/scala/tools/nsc/typechecker/Typers.scala b/src/compiler/scala/tools/nsc/typechecker/Typers.scala index 4fd65c18d1..f68c7dbc6c 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Typers.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Typers.scala @@ -4753,7 +4753,7 @@ trait Typers extends Modes with Adaptations with Tags { case sym => typed1(tree setSymbol sym, mode, pt) } case LookupSucceeded(qual, sym) => - // this -> Foo.this + (// this -> Foo.this if (sym.isThisSym) typed1(This(sym.owner) setPos tree.pos, mode, pt) // Inferring classOf type parameter from expected type. Otherwise an @@ -4762,12 +4762,12 @@ trait Typers extends Modes with Adaptations with Tags { typedClassOf(tree, TypeTree(pt.typeArgs.head)) else { val pre1 = if (sym.owner.isPackageClass) sym.owner.thisType else if (qual == EmptyTree) NoPrefix else qual.tpe - val tree1 = if (qual == EmptyTree) tree else atPos(tree.pos)(Select(atPos(tree.pos.focusStart)(qual), name) setAttachments tree.attachments) + val tree1 = if (qual == EmptyTree) tree else atPos(tree.pos)(Select(atPos(tree.pos.focusStart)(qual), name)) val (tree2, pre2) = makeAccessible(tree1, sym, pre1, qual) // SI-5967 Important to replace param type A* with Seq[A] when seen from from a reference, to avoid // inference errors in pattern matching. stabilize(tree2, pre2, mode, pt) modifyType dropIllegalStarTypes - } + }) setAttachments tree.attachments } } diff --git a/test/files/pos/attachments-typed-another-ident.check b/test/files/pos/attachments-typed-another-ident.check new file mode 100644 index 0000000000..e69de29bb2 diff --git a/test/files/pos/attachments-typed-another-ident.flags b/test/files/pos/attachments-typed-another-ident.flags new file mode 100644 index 0000000000..cd66464f2f --- /dev/null +++ b/test/files/pos/attachments-typed-another-ident.flags @@ -0,0 +1 @@ +-language:experimental.macros \ No newline at end of file diff --git a/test/files/pos/attachments-typed-another-ident/Impls_1.scala b/test/files/pos/attachments-typed-another-ident/Impls_1.scala new file mode 100644 index 0000000000..957bafc6ae --- /dev/null +++ b/test/files/pos/attachments-typed-another-ident/Impls_1.scala @@ -0,0 +1,17 @@ +import scala.reflect.macros.Context +import language.experimental.macros + +object MyAttachment + +object Macros { + def impl(c: Context) = { + import c.universe._ + val ident = Ident(newTermName("bar")) updateAttachment MyAttachment + assert(ident.attachments.get[MyAttachment.type].isDefined, ident.attachments) + val typed = c.typeCheck(ident) + assert(typed.attachments.get[MyAttachment.type].isDefined, typed.attachments) + c.Expr[Int](typed) + } + + def foo = macro impl +} diff --git a/test/files/pos/attachments-typed-another-ident/Macros_Test_2.scala b/test/files/pos/attachments-typed-another-ident/Macros_Test_2.scala new file mode 100644 index 0000000000..022639bfe9 --- /dev/null +++ b/test/files/pos/attachments-typed-another-ident/Macros_Test_2.scala @@ -0,0 +1,5 @@ +object Test extends App { + def bar = 2 + Macros.foo +} + -- cgit v1.2.3 From 394cc426c1ff1da53146679b4e2995ece52a133e Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Fri, 21 Dec 2012 12:39:02 -0800 Subject: Fix and simplify typedTypeConstructor. Investigating the useful output of devWarning (-Xdev people, it's good for you) led back to this comment: "normalize to get rid of type aliases" You may know that this is not all the normalizing does. Normalizing also turns TypeRefs with unapplied arguments (type constructors) into PolyTypes. That means that when typedParentType would call typedTypeConstructor it would find its parent had morphed into a PolyType. Not that it noticed; it would blithely continue and unwittingly discard the type arguments by way of appliedType (which smoothly logged the incident, thank you appliedType.) The simplification of typedTypeConstructor: There was a whole complicated special treatment of AnyRef here which appears to have become unnecessary. Removed special treatment and lit a candle for regularity. Updated lots of tests regarding newly not-so-special AnyRef. --- .../scala/tools/nsc/typechecker/Typers.scala | 31 ++---- test/files/jvm/annotations.check | 3 - test/files/neg/override-object-no.check | 4 +- test/files/neg/t2078.check | 2 +- test/files/neg/t2336.check | 2 +- test/files/neg/t3691.check | 2 +- test/files/neg/t4877.check | 6 +- test/files/neg/t5060.check | 4 +- test/files/neg/t5063.check | 2 +- test/files/neg/t6436.check | 4 +- test/files/neg/t6436b.check | 4 +- test/files/neg/t963.check | 2 +- test/files/run/existentials-in-compiler.check | 104 ++++++++++----------- test/files/run/existentials3-new.check | 4 +- test/files/run/macro-declared-in-trait.check | 2 +- test/files/run/reflection-equality.check | 2 +- test/files/run/repl-colon-type.check | 8 +- test/files/run/repl-parens.check | 2 +- test/files/run/t4172.check | 2 +- test/files/run/t5256a.check | 2 +- test/files/run/t5256b.check | 2 +- test/files/run/t5256d.check | 2 +- test/files/run/t5256e.check | 2 +- test/files/run/t5256f.check | 4 +- test/files/scalap/abstractClass/result.test | 2 +- test/files/scalap/abstractMethod/result.test | 2 +- test/files/scalap/cbnParam/result.test | 2 +- test/files/scalap/classPrivate/result.test | 4 +- test/files/scalap/classWithExistential/result.test | 2 +- .../scalap/classWithSelfAnnotation/result.test | 2 +- test/files/scalap/covariantParam/result.test | 2 +- test/files/scalap/defaultParameter/result.test | 4 +- test/files/scalap/implicitParam/result.test | 2 +- test/files/scalap/packageObject/result.test | 2 +- test/files/scalap/paramClauses/result.test | 2 +- test/files/scalap/paramNames/result.test | 2 +- test/files/scalap/sequenceParam/result.test | 2 +- test/files/scalap/simpleClass/result.test | 2 +- test/files/scalap/traitObject/result.test | 4 +- test/files/scalap/typeAnnotations/result.test | 2 +- test/files/scalap/valAndVar/result.test | 2 +- test/files/scalap/wildcardType/result.test | 2 +- 42 files changed, 115 insertions(+), 129 deletions(-) (limited to 'src') diff --git a/src/compiler/scala/tools/nsc/typechecker/Typers.scala b/src/compiler/scala/tools/nsc/typechecker/Typers.scala index a3688f249d..ad2ec7ff6c 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Typers.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Typers.scala @@ -5307,29 +5307,18 @@ trait Typers extends Modes with Adaptations with Tags { def typedTypeConstructor(tree: Tree, mode: Int): Tree = { val result = typed(tree, forTypeMode(mode) | FUNmode, WildcardType) - val restpe = result.tpe.normalize // normalize to get rid of type aliases for the following check (#1241) - if (!phase.erasedTypes && restpe.isInstanceOf[TypeRef] && !restpe.prefix.isStable && !context.unit.isJava) { - // The isJava exception if OK only because the only type constructors scalac gets - // to see are those in the signatures. These do not need a unique object as a prefix. - // The situation is different for new's and super's, but scalac does not look deep - // enough to see those. See #3938 - ConstructorPrefixError(tree, restpe) - } else { - //@M fix for #2208 - // if there are no type arguments, normalization does not bypass any checks, so perform it to get rid of AnyRef - if (result.tpe.typeArgs.isEmpty) { - // minimal check: if(result.tpe.typeSymbolDirect eq AnyRefClass) { - // must expand the fake AnyRef type alias, because bootstrapping (init in Definitions) is not - // designed to deal with the cycles in the scala package (ScalaObject extends - // AnyRef, but the AnyRef type alias is entered after the scala package is - // loaded and completed, so that ScalaObject is unpickled while AnyRef is not - // yet defined ) - // !!! TODO - revisit now that ScalaObject is gone. - result setType(restpe) - } else { // must not normalize: type application must be (bounds-)checked (during RefChecks), see #2208 + // get rid of type aliases for the following check (#1241) + result.tpe.dealias match { + case restpe @ TypeRef(pre, _, _) if !phase.erasedTypes && !pre.isStable && !context.unit.isJava => + // The isJava exception if OK only because the only type constructors scalac gets + // to see are those in the signatures. These do not need a unique object as a prefix. + // The situation is different for new's and super's, but scalac does not look deep + // enough to see those. See #3938 + ConstructorPrefixError(tree, restpe) + case _ => + // must not normalize: type application must be (bounds-)checked (during RefChecks), see #2208 // during uncurry (after refchecks), all types are normalized result - } } } diff --git a/test/files/jvm/annotations.check b/test/files/jvm/annotations.check index e307f8930d..8a4d58d56c 100644 --- a/test/files/jvm/annotations.check +++ b/test/files/jvm/annotations.check @@ -28,9 +28,6 @@ public Test4$Foo8(int) @test.SourceAnnotation(mails={bill.gates@bloodsuckers.com}, value=http://eppli.com) private int Test4$Foo9.z -@test.SourceAnnotation(mails={bill.gates@bloodsuckers.com}, value=http://eppli.com) -public int Test4$Foo9.getZ() - @test.SourceAnnotation(mails={bill.gates@bloodsuckers.com}, value=http://apple.com) public int Test4$Foo9.x() diff --git a/test/files/neg/override-object-no.check b/test/files/neg/override-object-no.check index 52bad2b937..9cfda80fc3 100644 --- a/test/files/neg/override-object-no.check +++ b/test/files/neg/override-object-no.check @@ -6,8 +6,8 @@ an overriding object must conform to the overridden object's class bound; ^ override-object-no.scala:21: error: overriding object Bar in trait Quux1 with object Bar in trait Quux2: an overriding object must conform to the overridden object's class bound; - found : Object{def g: String} - required: Object{def g: Int} + found : AnyRef{def g: String} + required: AnyRef{def g: Int} trait Quux2 extends Quux1 { override object Bar { def g = "abc" } } // err ^ override-object-no.scala:25: error: overriding object Bar in trait Quux3; diff --git a/test/files/neg/t2078.check b/test/files/neg/t2078.check index 3cdaa7d27a..00bb323a0b 100644 --- a/test/files/neg/t2078.check +++ b/test/files/neg/t2078.check @@ -1,4 +1,4 @@ -t2078.scala:2: error: contravariant type S occurs in covariant position in type => Object{val x: S} of value f +t2078.scala:2: error: contravariant type S occurs in covariant position in type => AnyRef{val x: S} of value f val f = new { val x = y } ^ one error found diff --git a/test/files/neg/t2336.check b/test/files/neg/t2336.check index 983717469c..28acd4d179 100644 --- a/test/files/neg/t2336.check +++ b/test/files/neg/t2336.check @@ -1,4 +1,4 @@ -t2336.scala:6: error: type Foo[Int] is not a stable prefix +t2336.scala:6: error: Foo[Int] is not a legal prefix for a constructor new Foo[Int]#Bar(0) ^ one error found diff --git a/test/files/neg/t3691.check b/test/files/neg/t3691.check index bdf6c268b2..6a7e13049a 100644 --- a/test/files/neg/t3691.check +++ b/test/files/neg/t3691.check @@ -9,7 +9,7 @@ t3691.scala:5: error: type mismatch; val c = (new A[String]{}): { type A } // not ok ^ t3691.scala:7: error: type mismatch; - found : Object{type A = String} + found : AnyRef{type A = String} required: AnyRef{type A[X]} val x = (new { type A = String }): { type A[X] } // not ok ^ diff --git a/test/files/neg/t4877.check b/test/files/neg/t4877.check index a4b1e6a50d..5a2413ca8b 100644 --- a/test/files/neg/t4877.check +++ b/test/files/neg/t4877.check @@ -1,10 +1,10 @@ t4877.scala:4: error: type mismatch; - found : Object{def bar: Int} + found : AnyRef{def bar: Int} required: AnyRef{def bar: String} def foo: AnyRef { def bar: String } = new AnyRef { def bar = 42 } ^ t4877.scala:6: error: type mismatch; - found : Object{def bar(x: Int): String} + found : AnyRef{def bar(x: Int): String} required: AnyRef{def bar(x: Int): Int} def foo3: AnyRef { def bar(x: Int): Int } = new AnyRef { def bar(x: Int) = "abc" } ^ @@ -14,7 +14,7 @@ t4877.scala:7: error: type mismatch; def foo4: C { def bar(x: Int): Int ; def quux(x: Int): Int } = new C { def bar(x: Int) = 5 } ^ t4877.scala:17: error: type mismatch; - found : Object{type Mom = String; def bar(x: Int): Int; def bippy(): List[Int]} + found : AnyRef{type Mom = String; def bar(x: Int): Int; def bippy(): List[Int]} required: B.this.Bippy (which expands to) AnyRef{type Mom; def bar(x: Int): this.Mom; def bippy(): List[this.Mom]} val x: Bippy = new AnyRef { diff --git a/test/files/neg/t5060.check b/test/files/neg/t5060.check index e71f30ccdb..09b2d9a4b1 100644 --- a/test/files/neg/t5060.check +++ b/test/files/neg/t5060.check @@ -1,7 +1,7 @@ -t5060.scala:2: error: covariant type T occurs in contravariant position in type => Object{def contains(x: T): Unit} of value foo0 +t5060.scala:2: error: covariant type T occurs in contravariant position in type => AnyRef{def contains(x: T): Unit} of value foo0 val foo0 = { ^ -t5060.scala:6: error: covariant type T occurs in contravariant position in type => Object{def contains(x: T): Unit} of method foo1 +t5060.scala:6: error: covariant type T occurs in contravariant position in type => AnyRef{def contains(x: T): Unit} of method foo1 def foo1 = { ^ two errors found diff --git a/test/files/neg/t5063.check b/test/files/neg/t5063.check index 84690d0a1d..c6e553c1b5 100644 --- a/test/files/neg/t5063.check +++ b/test/files/neg/t5063.check @@ -1,4 +1,4 @@ -t5063.scala:2: error: value + is not a member of Object +t5063.scala:2: error: value + is not a member of AnyRef super.+("") ^ one error found diff --git a/test/files/neg/t6436.check b/test/files/neg/t6436.check index ecb28f9100..5cee6fb558 100644 --- a/test/files/neg/t6436.check +++ b/test/files/neg/t6436.check @@ -2,8 +2,8 @@ t6436.scala:8: error: type mismatch; found : StringContext required: ?{def q: ?} Note that implicit conversions are not applicable because they are ambiguous: - both method foo1 in object quasiquotes of type (ctx: StringContext)Object{def q: Nothing} - and method foo2 in object quasiquotes of type (ctx: StringContext)Object{def q: Nothing} + both method foo1 in object quasiquotes of type (ctx: StringContext)AnyRef{def q: Nothing} + and method foo2 in object quasiquotes of type (ctx: StringContext)AnyRef{def q: Nothing} are possible conversion functions from StringContext to ?{def q: ?} println(q"a") ^ diff --git a/test/files/neg/t6436b.check b/test/files/neg/t6436b.check index b3c2d73739..21ab972b79 100644 --- a/test/files/neg/t6436b.check +++ b/test/files/neg/t6436b.check @@ -2,8 +2,8 @@ t6436b.scala:8: error: type mismatch; found : StringContext required: ?{def q: ?} Note that implicit conversions are not applicable because they are ambiguous: - both method foo1 in object quasiquotes of type (ctx: StringContext)Object{def q: Nothing} - and method foo2 in object quasiquotes of type (ctx: StringContext)Object{def q: Nothing} + both method foo1 in object quasiquotes of type (ctx: StringContext)AnyRef{def q: Nothing} + and method foo2 in object quasiquotes of type (ctx: StringContext)AnyRef{def q: Nothing} are possible conversion functions from StringContext to ?{def q: ?} println(StringContext("a").q()) ^ diff --git a/test/files/neg/t963.check b/test/files/neg/t963.check index 1f2d0687b3..4dc202c7bd 100644 --- a/test/files/neg/t963.check +++ b/test/files/neg/t963.check @@ -5,7 +5,7 @@ t963.scala:17: error: stable identifier required, but Test.this.y4.x found. val w4 : y4.x.type = y4.x ^ t963.scala:10: error: type mismatch; - found : Object{def x: Integer} + found : AnyRef{def x: Integer} required: AnyRef{val x: Integer} val y2 : { val x : java.lang.Integer } = new { def x = new java.lang.Integer(r.nextInt) } ^ diff --git a/test/files/run/existentials-in-compiler.check b/test/files/run/existentials-in-compiler.check index 4df4b0ca96..0d7a9298b4 100644 --- a/test/files/run/existentials-in-compiler.check +++ b/test/files/run/existentials-in-compiler.check @@ -1,156 +1,156 @@ -abstract trait Bippy[A <: AnyRef, B] extends Object +abstract trait Bippy[A <: AnyRef, B] extends AnyRef extest.Bippy[_ <: AnyRef, _] -abstract trait BippyBud[A <: AnyRef, B, C <: List[A]] extends Object +abstract trait BippyBud[A <: AnyRef, B, C <: List[A]] extends AnyRef extest.BippyBud[A,B,C] forSome { A <: AnyRef; B; C <: List[A] } -abstract trait BippyLike[A <: AnyRef, B <: List[A], This <: extest.BippyLike[A,B,This] with extest.Bippy[A,B]] extends Object +abstract trait BippyLike[A <: AnyRef, B <: List[A], This <: extest.BippyLike[A,B,This] with extest.Bippy[A,B]] extends AnyRef extest.BippyLike[A,B,This] forSome { A <: AnyRef; B <: List[A]; This <: extest.BippyLike[A,B,This] with extest.Bippy[A,B] } -abstract trait Contra[-A >: AnyRef, -B] extends Object +abstract trait Contra[-A >: AnyRef, -B] extends AnyRef extest.Contra[_ >: AnyRef, _] -abstract trait ContraLike[-A >: AnyRef, -B >: List[A]] extends Object +abstract trait ContraLike[-A >: AnyRef, -B >: List[A]] extends AnyRef extest.ContraLike[A,B] forSome { -A >: AnyRef; -B >: List[A] } -abstract trait Cov01[+A <: AnyRef, +B] extends Object +abstract trait Cov01[+A <: AnyRef, +B] extends AnyRef extest.Cov01[_ <: AnyRef, _] -abstract trait Cov02[+A <: AnyRef, B] extends Object +abstract trait Cov02[+A <: AnyRef, B] extends AnyRef extest.Cov02[_ <: AnyRef, _] -abstract trait Cov03[+A <: AnyRef, -B] extends Object +abstract trait Cov03[+A <: AnyRef, -B] extends AnyRef extest.Cov03[_ <: AnyRef, _] -abstract trait Cov04[A <: AnyRef, +B] extends Object +abstract trait Cov04[A <: AnyRef, +B] extends AnyRef extest.Cov04[_ <: AnyRef, _] -abstract trait Cov05[A <: AnyRef, B] extends Object +abstract trait Cov05[A <: AnyRef, B] extends AnyRef extest.Cov05[_ <: AnyRef, _] -abstract trait Cov06[A <: AnyRef, -B] extends Object +abstract trait Cov06[A <: AnyRef, -B] extends AnyRef extest.Cov06[_ <: AnyRef, _] -abstract trait Cov07[-A <: AnyRef, +B] extends Object +abstract trait Cov07[-A <: AnyRef, +B] extends AnyRef extest.Cov07[_ <: AnyRef, _] -abstract trait Cov08[-A <: AnyRef, B] extends Object +abstract trait Cov08[-A <: AnyRef, B] extends AnyRef extest.Cov08[_ <: AnyRef, _] -abstract trait Cov09[-A <: AnyRef, -B] extends Object +abstract trait Cov09[-A <: AnyRef, -B] extends AnyRef extest.Cov09[_ <: AnyRef, _] -abstract trait Cov11[+A <: AnyRef, +B <: List[_]] extends Object +abstract trait Cov11[+A <: AnyRef, +B <: List[_]] extends AnyRef extest.Cov11[_ <: AnyRef, _ <: List[_]] -abstract trait Cov12[+A <: AnyRef, B <: List[_]] extends Object +abstract trait Cov12[+A <: AnyRef, B <: List[_]] extends AnyRef extest.Cov12[_ <: AnyRef, _ <: List[_]] -abstract trait Cov13[+A <: AnyRef, -B <: List[_]] extends Object +abstract trait Cov13[+A <: AnyRef, -B <: List[_]] extends AnyRef extest.Cov13[_ <: AnyRef, _ <: List[_]] -abstract trait Cov14[A <: AnyRef, +B <: List[_]] extends Object +abstract trait Cov14[A <: AnyRef, +B <: List[_]] extends AnyRef extest.Cov14[_ <: AnyRef, _ <: List[_]] -abstract trait Cov15[A <: AnyRef, B <: List[_]] extends Object +abstract trait Cov15[A <: AnyRef, B <: List[_]] extends AnyRef extest.Cov15[_ <: AnyRef, _ <: List[_]] -abstract trait Cov16[A <: AnyRef, -B <: List[_]] extends Object +abstract trait Cov16[A <: AnyRef, -B <: List[_]] extends AnyRef extest.Cov16[_ <: AnyRef, _ <: List[_]] -abstract trait Cov17[-A <: AnyRef, +B <: List[_]] extends Object +abstract trait Cov17[-A <: AnyRef, +B <: List[_]] extends AnyRef extest.Cov17[_ <: AnyRef, _ <: List[_]] -abstract trait Cov18[-A <: AnyRef, B <: List[_]] extends Object +abstract trait Cov18[-A <: AnyRef, B <: List[_]] extends AnyRef extest.Cov18[_ <: AnyRef, _ <: List[_]] -abstract trait Cov19[-A <: AnyRef, -B <: List[_]] extends Object +abstract trait Cov19[-A <: AnyRef, -B <: List[_]] extends AnyRef extest.Cov19[_ <: AnyRef, _ <: List[_]] -abstract trait Cov21[+A, +B] extends Object +abstract trait Cov21[+A, +B] extends AnyRef extest.Cov21[_, _] -abstract trait Cov22[+A, B] extends Object +abstract trait Cov22[+A, B] extends AnyRef extest.Cov22[_, _] -abstract trait Cov23[+A, -B] extends Object +abstract trait Cov23[+A, -B] extends AnyRef extest.Cov23[_, _] -abstract trait Cov24[A, +B] extends Object +abstract trait Cov24[A, +B] extends AnyRef extest.Cov24[_, _] -abstract trait Cov25[A, B] extends Object +abstract trait Cov25[A, B] extends AnyRef extest.Cov25[_, _] -abstract trait Cov26[A, -B] extends Object +abstract trait Cov26[A, -B] extends AnyRef extest.Cov26[_, _] -abstract trait Cov27[-A, +B] extends Object +abstract trait Cov27[-A, +B] extends AnyRef extest.Cov27[_, _] -abstract trait Cov28[-A, B] extends Object +abstract trait Cov28[-A, B] extends AnyRef extest.Cov28[_, _] -abstract trait Cov29[-A, -B] extends Object +abstract trait Cov29[-A, -B] extends AnyRef extest.Cov29[_, _] -abstract trait Cov31[+A, +B, C <: (A, B)] extends Object +abstract trait Cov31[+A, +B, C <: (A, B)] extends AnyRef extest.Cov31[A,B,C] forSome { +A; +B; C <: (A, B) } -abstract trait Cov32[+A, B, C <: (A, B)] extends Object +abstract trait Cov32[+A, B, C <: (A, B)] extends AnyRef extest.Cov32[A,B,C] forSome { +A; B; C <: (A, B) } -abstract trait Cov33[+A, -B, C <: Tuple2[A, _]] extends Object +abstract trait Cov33[+A, -B, C <: Tuple2[A, _]] extends AnyRef extest.Cov33[A,B,C] forSome { +A; -B; C <: Tuple2[A, _] } -abstract trait Cov34[A, +B, C <: (A, B)] extends Object +abstract trait Cov34[A, +B, C <: (A, B)] extends AnyRef extest.Cov34[A,B,C] forSome { A; +B; C <: (A, B) } -abstract trait Cov35[A, B, C <: (A, B)] extends Object +abstract trait Cov35[A, B, C <: (A, B)] extends AnyRef extest.Cov35[A,B,C] forSome { A; B; C <: (A, B) } -abstract trait Cov36[A, -B, C <: Tuple2[A, _]] extends Object +abstract trait Cov36[A, -B, C <: Tuple2[A, _]] extends AnyRef extest.Cov36[A,B,C] forSome { A; -B; C <: Tuple2[A, _] } -abstract trait Cov37[-A, +B, C <: Tuple2[_, B]] extends Object +abstract trait Cov37[-A, +B, C <: Tuple2[_, B]] extends AnyRef extest.Cov37[A,B,C] forSome { -A; +B; C <: Tuple2[_, B] } -abstract trait Cov38[-A, B, C <: Tuple2[_, B]] extends Object +abstract trait Cov38[-A, B, C <: Tuple2[_, B]] extends AnyRef extest.Cov38[A,B,C] forSome { -A; B; C <: Tuple2[_, B] } -abstract trait Cov39[-A, -B, C <: Tuple2[_, _]] extends Object +abstract trait Cov39[-A, -B, C <: Tuple2[_, _]] extends AnyRef extest.Cov39[_, _, _ <: Tuple2[_, _]] -abstract trait Cov41[+A >: Null, +B] extends Object +abstract trait Cov41[+A >: Null, +B] extends AnyRef extest.Cov41[_ >: Null, _] -abstract trait Cov42[+A >: Null, B] extends Object +abstract trait Cov42[+A >: Null, B] extends AnyRef extest.Cov42[_ >: Null, _] -abstract trait Cov43[+A >: Null, -B] extends Object +abstract trait Cov43[+A >: Null, -B] extends AnyRef extest.Cov43[_ >: Null, _] -abstract trait Cov44[A >: Null, +B] extends Object +abstract trait Cov44[A >: Null, +B] extends AnyRef extest.Cov44[_ >: Null, _] -abstract trait Cov45[A >: Null, B] extends Object +abstract trait Cov45[A >: Null, B] extends AnyRef extest.Cov45[_ >: Null, _] -abstract trait Cov46[A >: Null, -B] extends Object +abstract trait Cov46[A >: Null, -B] extends AnyRef extest.Cov46[_ >: Null, _] -abstract trait Cov47[-A >: Null, +B] extends Object +abstract trait Cov47[-A >: Null, +B] extends AnyRef extest.Cov47[_ >: Null, _] -abstract trait Cov48[-A >: Null, B] extends Object +abstract trait Cov48[-A >: Null, B] extends AnyRef extest.Cov48[_ >: Null, _] -abstract trait Cov49[-A >: Null, -B] extends Object +abstract trait Cov49[-A >: Null, -B] extends AnyRef extest.Cov49[_ >: Null, _] -abstract trait Covariant[+A <: AnyRef, +B] extends Object +abstract trait Covariant[+A <: AnyRef, +B] extends AnyRef extest.Covariant[_ <: AnyRef, _] -abstract trait CovariantLike[+A <: AnyRef, +B <: List[A], +This <: extest.CovariantLike[A,B,This] with extest.Covariant[A,B]] extends Object +abstract trait CovariantLike[+A <: AnyRef, +B <: List[A], +This <: extest.CovariantLike[A,B,This] with extest.Covariant[A,B]] extends AnyRef extest.CovariantLike[A,B,This] forSome { +A <: AnyRef; +B <: List[A]; +This <: extest.CovariantLike[A,B,This] with extest.Covariant[A,B] } diff --git a/test/files/run/existentials3-new.check b/test/files/run/existentials3-new.check index 00614b19db..8f7dd701ac 100644 --- a/test/files/run/existentials3-new.check +++ b/test/files/run/existentials3-new.check @@ -7,7 +7,7 @@ Test.ToS, t=RefinedType, s=f5 () => Test.ToS, t=TypeRef, s=trait Function0 $anon, t=TypeRef, s=type $anon $anon, t=TypeRef, s=type $anon -List[java.lang.Object{type T1}#T1], t=TypeRef, s=class List +List[AnyRef{type T1}#T1], t=TypeRef, s=class List List[Seq[Int]], t=TypeRef, s=class List List[Seq[U forSome { type U <: Int }]], t=TypeRef, s=class List Bar.type, t=TypeRef, s=type Bar.type @@ -19,6 +19,6 @@ Test.ToS, t=RefinedType, s=g5 () => Test.ToS, t=TypeRef, s=trait Function0 $anon, t=TypeRef, s=type $anon $anon, t=TypeRef, s=type $anon -List[java.lang.Object{type T1}#T1], t=TypeRef, s=class List +List[AnyRef{type T1}#T1], t=TypeRef, s=class List List[Seq[Int]], t=TypeRef, s=class List List[Seq[U forSome { type U <: Int }]], t=TypeRef, s=class List diff --git a/test/files/run/macro-declared-in-trait.check b/test/files/run/macro-declared-in-trait.check index 104ff1e99b..0d70ac74f3 100644 --- a/test/files/run/macro-declared-in-trait.check +++ b/test/files/run/macro-declared-in-trait.check @@ -1,5 +1,5 @@ prefix = Expr[Nothing]({ - final class $anon extends Object with Base { + final class $anon extends AnyRef with Base { def (): anonymous class $anon = { $anon.super.(); () diff --git a/test/files/run/reflection-equality.check b/test/files/run/reflection-equality.check index 17c1f6dd70..65b525731f 100644 --- a/test/files/run/reflection-equality.check +++ b/test/files/run/reflection-equality.check @@ -24,7 +24,7 @@ cs: reflect.runtime.universe.ClassSymbol = class X scala> val ts: Type = cs.typeSignature ts: reflect.runtime.universe.Type = -java.lang.Object { +scala.AnyRef { def (): X def methodIntIntInt(x: scala.Int,y: scala.Int): scala.Int } diff --git a/test/files/run/repl-colon-type.check b/test/files/run/repl-colon-type.check index 7716221f54..4cd0e1d588 100644 --- a/test/files/run/repl-colon-type.check +++ b/test/files/run/repl-colon-type.check @@ -75,10 +75,10 @@ scala> :type -v List(1,2,3) filter _ // Internal Type structure TypeRef( - TypeSymbol(abstract trait Function1[-T1, +R] extends Object) + TypeSymbol(abstract trait Function1[-T1, +R] extends AnyRef) args = List( TypeRef( - TypeSymbol(abstract trait Function1[-T1, +R] extends Object) + TypeSymbol(abstract trait Function1[-T1, +R] extends AnyRef) args = List( TypeRef(TypeSymbol(final abstract class Int extends AnyVal)) TypeRef( @@ -145,7 +145,7 @@ Int => Iterator[List[Nothing]] // Internal Type structure TypeRef( - TypeSymbol(abstract trait Function1[-T1, +R] extends Object) + TypeSymbol(abstract trait Function1[-T1, +R] extends AnyRef) args = List( TypeRef(TypeSymbol(final abstract class Int extends AnyVal)) TypeRef( @@ -178,7 +178,7 @@ PolyType( typeParams = List(TypeParam(T <: AnyVal)) resultType = NullaryMethodType( TypeRef( - TypeSymbol(abstract trait Function1[-T1, +R] extends Object) + TypeSymbol(abstract trait Function1[-T1, +R] extends AnyRef) args = List( TypeRef(TypeSymbol(final abstract class Int extends AnyVal)) TypeRef( diff --git a/test/files/run/repl-parens.check b/test/files/run/repl-parens.check index 4b7ce6b059..15f4b4524a 100644 --- a/test/files/run/repl-parens.check +++ b/test/files/run/repl-parens.check @@ -66,7 +66,7 @@ scala> 55 ; () => 5 res13: () => Int = scala> () => { class X ; new X } -res14: () => Object = +res14: () => AnyRef = scala> diff --git a/test/files/run/t4172.check b/test/files/run/t4172.check index f16c9e5151..94cdff4870 100644 --- a/test/files/run/t4172.check +++ b/test/files/run/t4172.check @@ -5,7 +5,7 @@ scala> scala> val c = { class C { override def toString = "C" }; ((new C, new C { def f = 2 })) } warning: there were 1 feature warnings; re-run with -feature for details -c: (C, C{def f: Int}) forSome { type C <: Object } = (C,C) +c: (C, C{def f: Int}) forSome { type C <: AnyRef } = (C,C) scala> diff --git a/test/files/run/t5256a.check b/test/files/run/t5256a.check index 7e60139db3..09b5a02831 100644 --- a/test/files/run/t5256a.check +++ b/test/files/run/t5256a.check @@ -1,6 +1,6 @@ class A A -Object { +AnyRef { def (): A def foo: Nothing } diff --git a/test/files/run/t5256b.check b/test/files/run/t5256b.check index a80df6eb30..ca93aaa706 100644 --- a/test/files/run/t5256b.check +++ b/test/files/run/t5256b.check @@ -1,6 +1,6 @@ class A Test.A -Object { +AnyRef { def (): Test.A def foo: Nothing } diff --git a/test/files/run/t5256d.check b/test/files/run/t5256d.check index 9742ae572e..b7617e80a2 100644 --- a/test/files/run/t5256d.check +++ b/test/files/run/t5256d.check @@ -22,7 +22,7 @@ scala> println(c.fullName) $line8.$read.$iw.$iw.$iw.$iw.A scala> println(c.typeSignature) -java.lang.Object { +scala.AnyRef { def (): A def foo: scala.Nothing } diff --git a/test/files/run/t5256e.check b/test/files/run/t5256e.check index 011115720c..ed3513183e 100644 --- a/test/files/run/t5256e.check +++ b/test/files/run/t5256e.check @@ -1,6 +1,6 @@ class A Test.C.A -Object { +AnyRef { def (): C.this.A def foo: Nothing } diff --git a/test/files/run/t5256f.check b/test/files/run/t5256f.check index e0fec85596..6a89d0b86a 100644 --- a/test/files/run/t5256f.check +++ b/test/files/run/t5256f.check @@ -1,12 +1,12 @@ class A1 Test.A1 -Object { +AnyRef { def (): Test.A1 def foo: Nothing } class A2 Test.A2 -Object { +AnyRef { def (): Test.this.A2 def foo: Nothing } diff --git a/test/files/scalap/abstractClass/result.test b/test/files/scalap/abstractClass/result.test index 9163346fc6..ef1daac23d 100644 --- a/test/files/scalap/abstractClass/result.test +++ b/test/files/scalap/abstractClass/result.test @@ -1,4 +1,4 @@ -abstract class AbstractClass extends java.lang.Object { +abstract class AbstractClass extends scala.AnyRef { def this() = { /* compiled code */ } def foo : scala.Predef.String } diff --git a/test/files/scalap/abstractMethod/result.test b/test/files/scalap/abstractMethod/result.test index 90f572f258..40fa02d408 100644 --- a/test/files/scalap/abstractMethod/result.test +++ b/test/files/scalap/abstractMethod/result.test @@ -1,4 +1,4 @@ -trait AbstractMethod extends java.lang.Object { +trait AbstractMethod extends scala.AnyRef { def $init$() : scala.Unit = { /* compiled code */ } def arity : scala.Int def isCool : scala.Boolean = { /* compiled code */ } diff --git a/test/files/scalap/cbnParam/result.test b/test/files/scalap/cbnParam/result.test index fbe035d63c..52ecb6ae66 100644 --- a/test/files/scalap/cbnParam/result.test +++ b/test/files/scalap/cbnParam/result.test @@ -1,3 +1,3 @@ -class CbnParam extends java.lang.Object { +class CbnParam extends scala.AnyRef { def this(s : => scala.Predef.String) = { /* compiled code */ } } diff --git a/test/files/scalap/classPrivate/result.test b/test/files/scalap/classPrivate/result.test index 5f2e1cc00e..ab2d40cdaf 100644 --- a/test/files/scalap/classPrivate/result.test +++ b/test/files/scalap/classPrivate/result.test @@ -1,7 +1,7 @@ -class ClassPrivate extends java.lang.Object { +class ClassPrivate extends scala.AnyRef { def this() = { /* compiled code */ } def baz : scala.Int = { /* compiled code */ } - class Outer extends java.lang.Object { + class Outer extends scala.AnyRef { def this() = { /* compiled code */ } private[ClassPrivate] def qux : scala.Int = { /* compiled code */ } } diff --git a/test/files/scalap/classWithExistential/result.test b/test/files/scalap/classWithExistential/result.test index b8ce005da9..caee3fd6de 100644 --- a/test/files/scalap/classWithExistential/result.test +++ b/test/files/scalap/classWithExistential/result.test @@ -1,4 +1,4 @@ -class ClassWithExistential extends java.lang.Object { +class ClassWithExistential extends scala.AnyRef { def this() = { /* compiled code */ } def foo[A, B] : scala.Function1[A, B forSome {type A <: scala.Seq[scala.Int]; type B >: scala.Predef.String}] = { /* compiled code */ } } diff --git a/test/files/scalap/classWithSelfAnnotation/result.test b/test/files/scalap/classWithSelfAnnotation/result.test index df7bd86643..82bbd9e8df 100644 --- a/test/files/scalap/classWithSelfAnnotation/result.test +++ b/test/files/scalap/classWithSelfAnnotation/result.test @@ -1,4 +1,4 @@ -class ClassWithSelfAnnotation extends java.lang.Object { +class ClassWithSelfAnnotation extends scala.AnyRef { this : ClassWithSelfAnnotation with java.lang.CharSequence => def this() = { /* compiled code */ } def foo : scala.Int = { /* compiled code */ } diff --git a/test/files/scalap/covariantParam/result.test b/test/files/scalap/covariantParam/result.test index 2f52f1f28e..f7a3c98966 100644 --- a/test/files/scalap/covariantParam/result.test +++ b/test/files/scalap/covariantParam/result.test @@ -1,4 +1,4 @@ -class CovariantParam[+A] extends java.lang.Object { +class CovariantParam[+A] extends scala.AnyRef { def this() = { /* compiled code */ } def foo[A](a : A) : scala.Int = { /* compiled code */ } } diff --git a/test/files/scalap/defaultParameter/result.test b/test/files/scalap/defaultParameter/result.test index 38bf6ac4e3..0c775ea7b5 100644 --- a/test/files/scalap/defaultParameter/result.test +++ b/test/files/scalap/defaultParameter/result.test @@ -1,3 +1,3 @@ -trait DefaultParameter extends java.lang.Object { +trait DefaultParameter extends scala.AnyRef { def foo(s : scala.Predef.String) : scala.Unit -} \ No newline at end of file +} diff --git a/test/files/scalap/implicitParam/result.test b/test/files/scalap/implicitParam/result.test index 0ea212dda6..a2cfd6092d 100644 --- a/test/files/scalap/implicitParam/result.test +++ b/test/files/scalap/implicitParam/result.test @@ -1,4 +1,4 @@ -class ImplicitParam extends java.lang.Object { +class ImplicitParam extends scala.AnyRef { def this() = { /* compiled code */ } def foo(i : scala.Int)(implicit f : scala.Float, d : scala.Double) : scala.Int = { /* compiled code */ } } diff --git a/test/files/scalap/packageObject/result.test b/test/files/scalap/packageObject/result.test index 94c6a01b08..5732d92958 100644 --- a/test/files/scalap/packageObject/result.test +++ b/test/files/scalap/packageObject/result.test @@ -1,4 +1,4 @@ -package object PackageObject extends java.lang.Object { +package object PackageObject extends scala.AnyRef { def this() = { /* compiled code */ } type A = scala.Predef.String def foo(i : scala.Int) : scala.Int = { /* compiled code */ } diff --git a/test/files/scalap/paramClauses/result.test b/test/files/scalap/paramClauses/result.test index dc4397386c..3a141e8faf 100644 --- a/test/files/scalap/paramClauses/result.test +++ b/test/files/scalap/paramClauses/result.test @@ -1,4 +1,4 @@ -class ParamClauses extends java.lang.Object { +class ParamClauses extends scala.AnyRef { def this() = { /* compiled code */ } def foo(i : scala.Int)(s : scala.Predef.String)(t : scala.Double) : scala.Int = { /* compiled code */ } } diff --git a/test/files/scalap/paramNames/result.test b/test/files/scalap/paramNames/result.test index 4d3c7d0c1e..85e37f858d 100644 --- a/test/files/scalap/paramNames/result.test +++ b/test/files/scalap/paramNames/result.test @@ -1,4 +1,4 @@ -class ParamNames extends java.lang.Object { +class ParamNames extends scala.AnyRef { def this() = { /* compiled code */ } def foo(s : => scala.Seq[scala.Int], s2 : => scala.Seq[scala.Any]) : scala.Unit = { /* compiled code */ } } diff --git a/test/files/scalap/sequenceParam/result.test b/test/files/scalap/sequenceParam/result.test index ed47c094fe..142d92fea3 100644 --- a/test/files/scalap/sequenceParam/result.test +++ b/test/files/scalap/sequenceParam/result.test @@ -1,3 +1,3 @@ -class SequenceParam extends java.lang.Object { +class SequenceParam extends scala.AnyRef { def this(s : scala.Predef.String, i : scala.Int*) = { /* compiled code */ } } diff --git a/test/files/scalap/simpleClass/result.test b/test/files/scalap/simpleClass/result.test index 905046ce52..4fdf25d1cf 100644 --- a/test/files/scalap/simpleClass/result.test +++ b/test/files/scalap/simpleClass/result.test @@ -1,4 +1,4 @@ -class SimpleClass extends java.lang.Object { +class SimpleClass extends scala.AnyRef { def this() = { /* compiled code */ } def foo : scala.Int = { /* compiled code */ } } diff --git a/test/files/scalap/traitObject/result.test b/test/files/scalap/traitObject/result.test index d0521043c8..104ba14f1a 100644 --- a/test/files/scalap/traitObject/result.test +++ b/test/files/scalap/traitObject/result.test @@ -1,8 +1,8 @@ -trait TraitObject extends java.lang.Object { +trait TraitObject extends scala.AnyRef { def $init$() : scala.Unit = { /* compiled code */ } def foo : scala.Int = { /* compiled code */ } } -object TraitObject extends java.lang.Object { +object TraitObject extends scala.AnyRef { def this() = { /* compiled code */ } def bar : scala.Int = { /* compiled code */ } } diff --git a/test/files/scalap/typeAnnotations/result.test b/test/files/scalap/typeAnnotations/result.test index d28712f12b..407b0235c6 100644 --- a/test/files/scalap/typeAnnotations/result.test +++ b/test/files/scalap/typeAnnotations/result.test @@ -1,4 +1,4 @@ -abstract class TypeAnnotations[@scala.specialized R] extends java.lang.Object { +abstract class TypeAnnotations[@scala.specialized R] extends scala.AnyRef { def this() = { /* compiled code */ } @scala.specialized val x : scala.Int = { /* compiled code */ } diff --git a/test/files/scalap/valAndVar/result.test b/test/files/scalap/valAndVar/result.test index 90081acade..e940da9801 100644 --- a/test/files/scalap/valAndVar/result.test +++ b/test/files/scalap/valAndVar/result.test @@ -1,4 +1,4 @@ -class ValAndVar extends java.lang.Object { +class ValAndVar extends scala.AnyRef { def this() = { /* compiled code */ } val foo : java.lang.String = { /* compiled code */ } var bar : scala.Int = { /* compiled code */ } diff --git a/test/files/scalap/wildcardType/result.test b/test/files/scalap/wildcardType/result.test index 28147b6605..e43261db32 100644 --- a/test/files/scalap/wildcardType/result.test +++ b/test/files/scalap/wildcardType/result.test @@ -1,3 +1,3 @@ -class WildcardType extends java.lang.Object { +class WildcardType extends scala.AnyRef { def this(f : scala.Function1[scala.Int, _]) = { /* compiled code */ } } -- cgit v1.2.3 From 422f461578ae0547181afe6d2c0c52ea1071d37b Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Sat, 22 Dec 2012 08:13:48 -0800 Subject: Shored up a hidden dealiasing dependency. Like the comment says: // This way typedNew always returns a dealiased type. This // used to happen by accident for instantiations without type // arguments due to ad hoc code in typedTypeConstructor, and // annotations depended on it (to the extent that they worked, // which they did not when given a parameterized type alias // which dealiased to an annotation.) typedTypeConstructor // dealiases nothing now, but it makes sense for a "new" to // always be given a dealiased type. PS: Simply running the test suite is becoming more difficult all the time. Running "ant test" includes time consuming activities of niche interest such as all the osgi tests, but test.suite manages to miss the continuations tests. --- src/compiler/scala/tools/nsc/typechecker/Typers.scala | 8 +++++++- test/files/continuations-neg/function2.check | 2 +- test/files/continuations-neg/t5314-type-error.check | 4 ++-- test/files/jvm/annotations.check | 15 +++++++++++++++ test/files/jvm/annotations.scala | 6 ++++++ 5 files changed, 31 insertions(+), 4 deletions(-) (limited to 'src') diff --git a/src/compiler/scala/tools/nsc/typechecker/Typers.scala b/src/compiler/scala/tools/nsc/typechecker/Typers.scala index ad2ec7ff6c..abc5baff72 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Typers.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Typers.scala @@ -4208,7 +4208,13 @@ trait Typers extends Modes with Adaptations with Tags { def typedNew(tree: New) = { val tpt = tree.tpt val tpt1 = { - val tpt0 = typedTypeConstructor(tpt) + // This way typedNew always returns a dealiased type. This used to happen by accident + // for instantiations without type arguments due to ad hoc code in typedTypeConstructor, + // and annotations depended on it (to the extent that they worked, which they did + // not when given a parameterized type alias which dealiased to an annotation.) + // typedTypeConstructor dealiases nothing now, but it makes sense for a "new" to always be + // given a dealiased type. + val tpt0 = typedTypeConstructor(tpt) modifyType (_.dealias) if (checkStablePrefixClassType(tpt0)) if (tpt0.hasSymbolField && !tpt0.symbol.typeParams.isEmpty) { context.undetparams = cloneSymbols(tpt0.symbol.typeParams) diff --git a/test/files/continuations-neg/function2.check b/test/files/continuations-neg/function2.check index 82b81c1444..4b1a6227bc 100644 --- a/test/files/continuations-neg/function2.check +++ b/test/files/continuations-neg/function2.check @@ -1,6 +1,6 @@ function2.scala:11: error: type mismatch; found : () => Int - required: () => Int @util.continuations.cps[Int] + required: () => Int @scala.util.continuations.cpsParam[Int,Int] val g: () => Int @cps[Int] = f ^ one error found diff --git a/test/files/continuations-neg/t5314-type-error.check b/test/files/continuations-neg/t5314-type-error.check index 1f4e46a7f2..e66c9d833f 100644 --- a/test/files/continuations-neg/t5314-type-error.check +++ b/test/files/continuations-neg/t5314-type-error.check @@ -1,6 +1,6 @@ t5314-type-error.scala:7: error: type mismatch; - found : Int @util.continuations.cps[Int] - required: Int @util.continuations.cps[String] + found : Int @scala.util.continuations.cpsParam[Int,Int] + required: Int @scala.util.continuations.cpsParam[String,String] def bar(x:Int): Int @cps[String] = return foo(x) ^ one error found diff --git a/test/files/jvm/annotations.check b/test/files/jvm/annotations.check index 8a4d58d56c..a8dc5ecdd1 100644 --- a/test/files/jvm/annotations.check +++ b/test/files/jvm/annotations.check @@ -28,6 +28,21 @@ public Test4$Foo8(int) @test.SourceAnnotation(mails={bill.gates@bloodsuckers.com}, value=http://eppli.com) private int Test4$Foo9.z +@test.SourceAnnotation(mails={bill.gates@bloodsuckers.com}, value=http://eppli.com) +private int Test4$Foo9.z2 + +@test.SourceAnnotation(mails={bill.gates@bloodsuckers.com}, value=http://eppli.com) +private int Test4$Foo9.z3 + +@test.SourceAnnotation(mails={bill.gates@bloodsuckers.com}, value=http://eppli.com) +public int Test4$Foo9.getZ() + +@test.SourceAnnotation(mails={bill.gates@bloodsuckers.com}, value=http://eppli.com) +public int Test4$Foo9.getZ2() + +@test.SourceAnnotation(mails={bill.gates@bloodsuckers.com}, value=http://eppli.com) +public int Test4$Foo9.getZ3() + @test.SourceAnnotation(mails={bill.gates@bloodsuckers.com}, value=http://apple.com) public int Test4$Foo9.x() diff --git a/test/files/jvm/annotations.scala b/test/files/jvm/annotations.scala index 66ebde592b..77a45fae89 100644 --- a/test/files/jvm/annotations.scala +++ b/test/files/jvm/annotations.scala @@ -101,6 +101,12 @@ object Test4 { type myAnn = SourceAnnotation @beanGetter @field @BeanProperty @myAnn("http://eppli.com") var z = 0 + + type myAnn2[T] = SourceAnnotation @beanGetter @field + @BeanProperty @myAnn2[String]("http://eppli.com") var z2 = 0 + + type myAnn3[CC[_]] = SourceAnnotation @beanGetter @field + @BeanProperty @myAnn3[List]("http://eppli.com") var z3 = 0 } class Foo10(@SourceAnnotation("on param 1") val name: String) class Foo11(@(SourceAnnotation @scala.annotation.meta.field)("on param 2") val name: String) -- cgit v1.2.3 From 3bf51189f979eb0dd41744ca844fd12dfdaa0dee Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Fri, 21 Dec 2012 15:11:29 -0800 Subject: Cleaning up type alias usage. I determined that many if not most of the calls to .normalize have no intent beyond dealiasing the type. In light of this I went call site to call site knocking on doors and asking why exactly they were calling any of .normalize .widen.normalize .normalize.widen and if I didn't like their answers they found themselves introduced to 'dropAliasesAndSingleTypes', the recursive widener and dealiaser which I concluded is necessary after all. Discovered that the object called 'deAlias' actually depends upon calling 'normalize', not 'dealias'. Decided this was sufficient cause to rename it to 'normalizeAliases'. Created dealiasWiden and dealiasWidenChain. Dropped dropAliasesAndSingleTypes in favor of methods on Type alongside dealias and widen (Type#dealiasWiden). These should reduce the number of "hey, the type alias doesn't work" bugs. --- .../tools/nsc/interpreter/CompletionOutput.scala | 8 ++--- .../scala/tools/nsc/interpreter/IMain.scala | 2 +- .../scala/tools/nsc/typechecker/Implicits.scala | 31 ++++++++-------- .../scala/tools/nsc/typechecker/Infer.scala | 6 ++-- .../scala/tools/nsc/typechecker/Typers.scala | 18 +++++----- src/reflect/scala/reflect/internal/Types.scala | 41 +++++++++++++++++----- test/files/run/existentials3-old.check | 4 +-- 7 files changed, 65 insertions(+), 45 deletions(-) (limited to 'src') diff --git a/src/compiler/scala/tools/nsc/interpreter/CompletionOutput.scala b/src/compiler/scala/tools/nsc/interpreter/CompletionOutput.scala index c647ef6f51..c5bb8494ce 100644 --- a/src/compiler/scala/tools/nsc/interpreter/CompletionOutput.scala +++ b/src/compiler/scala/tools/nsc/interpreter/CompletionOutput.scala @@ -37,7 +37,7 @@ trait CompletionOutput { val pkg = method.ownerChain find (_.isPackageClass) map (_.fullName) getOrElse "" def relativize(str: String): String = quietString(str stripPrefix (pkg + ".")) - def relativize(tp: Type): String = relativize(tp.normalize.toString) + def relativize(tp: Type): String = relativize(tp.dealiasWiden.toString) def braceList(tparams: List[String]) = if (tparams.isEmpty) "" else (tparams map relativize).mkString("[", ", ", "]") def parenList(params: List[Any]) = params.mkString("(", ", ", ")") @@ -55,8 +55,8 @@ trait CompletionOutput { } ) - def tupleString(tp: Type) = parenList(tp.normalize.typeArgs map relativize) - def functionString(tp: Type) = tp.normalize.typeArgs match { + def tupleString(tp: Type) = parenList(tp.dealiasWiden.typeArgs map relativize) + def functionString(tp: Type) = tp.dealiasWiden.typeArgs match { case List(t, r) => t + " => " + r case xs => parenList(xs.init) + " => " + xs.last } @@ -64,7 +64,7 @@ trait CompletionOutput { def tparamsString(tparams: List[Symbol]) = braceList(tparams map (_.defString)) def paramsString(params: List[Symbol]) = { def paramNameString(sym: Symbol) = if (sym.isSynthetic) "" else sym.nameString + ": " - def paramString(sym: Symbol) = paramNameString(sym) + typeToString(sym.info.normalize) + def paramString(sym: Symbol) = paramNameString(sym) + typeToString(sym.info.dealiasWiden) val isImplicit = params.nonEmpty && params.head.isImplicit val strs = (params map paramString) match { diff --git a/src/compiler/scala/tools/nsc/interpreter/IMain.scala b/src/compiler/scala/tools/nsc/interpreter/IMain.scala index 91e909b1f1..36f012229e 100644 --- a/src/compiler/scala/tools/nsc/interpreter/IMain.scala +++ b/src/compiler/scala/tools/nsc/interpreter/IMain.scala @@ -523,7 +523,7 @@ class IMain(initialSettings: Settings, protected val out: JPrintWriter) extends // normalize non-public types so we don't see protected aliases like Self def normalizeNonPublic(tp: Type) = tp match { - case TypeRef(_, sym, _) if sym.isAliasType && !sym.isPublic => tp.normalize + case TypeRef(_, sym, _) if sym.isAliasType && !sym.isPublic => tp.dealias case _ => tp } diff --git a/src/compiler/scala/tools/nsc/typechecker/Implicits.scala b/src/compiler/scala/tools/nsc/typechecker/Implicits.scala index 8d6e0c3b85..ed1e6d01e8 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Implicits.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Implicits.scala @@ -439,8 +439,8 @@ trait Implicits { val start = if (Statistics.canEnable) Statistics.startTimer(matchesPtNanos) else null val result = normSubType(tp, pt) || isView && { pt match { - case TypeRef(_, Function1.Sym, args) => - matchesPtView(tp, args.head, args.tail.head, undet) + case TypeRef(_, Function1.Sym, arg1 :: arg2 :: Nil) => + matchesPtView(tp, arg1, arg2, undet) case _ => false } @@ -484,7 +484,7 @@ trait Implicits { loop(restpe, pt) else pt match { case tr @ TypeRef(pre, sym, args) => - if (sym.isAliasType) loop(tp, pt.normalize) + if (sym.isAliasType) loop(tp, pt.dealias) else if (sym.isAbstractType) loop(tp, pt.bounds.lo) else { val len = args.length - 1 @@ -528,18 +528,15 @@ trait Implicits { * to a final true or false. */ private def isPlausiblySubType(tp1: Type, tp2: Type) = !isImpossibleSubType(tp1, tp2) - private def isImpossibleSubType(tp1: Type, tp2: Type) = tp1.normalize.widen match { - case tr1 @ TypeRef(_, sym1, _) => - // We can only rule out a subtype relationship if the left hand - // side is a class, else we may not know enough. - sym1.isClass && (tp2.normalize.widen match { - case TypeRef(_, sym2, _) => - sym2.isClass && !(sym1 isWeakSubClass sym2) - case RefinedType(parents, decls) => - decls.nonEmpty && - tr1.member(decls.head.name) == NoSymbol - case _ => false - }) + private def isImpossibleSubType(tp1: Type, tp2: Type) = tp1.dealiasWiden match { + // We can only rule out a subtype relationship if the left hand + // side is a class, else we may not know enough. + case tr1 @ TypeRef(_, sym1, _) if sym1.isClass => + tp2.dealiasWiden match { + case TypeRef(_, sym2, _) => sym2.isClass && !(sym1 isWeakSubClass sym2) + case RefinedType(parents, decls) => decls.nonEmpty && tr1.member(decls.head.name) == NoSymbol + case _ => false + } case _ => false } @@ -1010,7 +1007,7 @@ trait Implicits { args foreach (getParts(_)) } } else if (sym.isAliasType) { - getParts(tp.normalize) + getParts(tp.dealias) } else if (sym.isAbstractType) { getParts(tp.bounds.hi) } @@ -1168,7 +1165,7 @@ trait Implicits { implicit def wrapResult(tree: Tree): SearchResult = if (tree == EmptyTree) SearchFailure else new SearchResult(tree, if (from.isEmpty) EmptyTreeTypeSubstituter else new TreeTypeSubstituter(from, to)) - val tp1 = tp0.normalize + val tp1 = tp0.dealias tp1 match { case ThisType(_) | SingleType(_, _) => // can't generate a reference to a value that's abstracted over by an existential diff --git a/src/compiler/scala/tools/nsc/typechecker/Infer.scala b/src/compiler/scala/tools/nsc/typechecker/Infer.scala index 2693fcfd27..a43dbae1fa 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Infer.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Infer.scala @@ -43,7 +43,7 @@ trait Infer extends Checkable { case formal => formal } else formals if (isVarArgTypes(formals1) && (removeRepeated || formals.length != nargs)) { - val ft = formals1.last.normalize.typeArgs.head + val ft = formals1.last.dealiasWiden.typeArgs.head formals1.init ::: (for (i <- List.range(formals1.length - 1, nargs)) yield ft) } else formals1 } @@ -1437,9 +1437,9 @@ trait Infer extends Checkable { } object approximateAbstracts extends TypeMap { - def apply(tp: Type): Type = tp.normalize match { + def apply(tp: Type): Type = tp.dealiasWiden match { case TypeRef(pre, sym, _) if sym.isAbstractType => WildcardType - case _ => mapOver(tp) + case _ => mapOver(tp) } } diff --git a/src/compiler/scala/tools/nsc/typechecker/Typers.scala b/src/compiler/scala/tools/nsc/typechecker/Typers.scala index abc5baff72..b5f456d1ae 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Typers.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Typers.scala @@ -13,7 +13,7 @@ package scala.tools.nsc package typechecker import scala.collection.mutable -import scala.reflect.internal.util.{ BatchSourceFile, Statistics } +import scala.reflect.internal.util.{ BatchSourceFile, Statistics, shortClassOfInstance } import mutable.ListBuffer import symtab.Flags._ @@ -227,7 +227,7 @@ trait Typers extends Modes with Adaptations with Tags { case ExistentialType(tparams, tpe) => new SubstWildcardMap(tparams).apply(tp) case TypeRef(_, sym, _) if sym.isAliasType => - val tp0 = tp.normalize + val tp0 = tp.dealias val tp1 = dropExistential(tp0) if (tp1 eq tp0) tp else tp1 case _ => tp @@ -413,7 +413,7 @@ trait Typers extends Modes with Adaptations with Tags { if (!hiddenSymbols.isEmpty && hiddenSymbols.head == sym && sym.isAliasType && sameLength(sym.typeParams, args)) { hiddenSymbols = hiddenSymbols.tail - t.normalize + t.dealias } else t case SingleType(_, sym) => checkNoEscape(sym) @@ -1033,9 +1033,9 @@ trait Typers extends Modes with Adaptations with Tags { adapt(tree setType restpe, mode, pt, original) case TypeRef(_, ByNameParamClass, List(arg)) if ((mode & EXPRmode) != 0) => // (2) adapt(tree setType arg, mode, pt, original) - case tr @ TypeRef(_, sym, _) if sym.isAliasType && tr.normalize.isInstanceOf[ExistentialType] && + case tr @ TypeRef(_, sym, _) if sym.isAliasType && tr.dealias.isInstanceOf[ExistentialType] && ((mode & (EXPRmode | LHSmode)) == EXPRmode) => - adapt(tree setType tr.normalize.skolemizeExistential(context.owner, tree), mode, pt, original) + adapt(tree setType tr.dealias.skolemizeExistential(context.owner, tree), mode, pt, original) case et @ ExistentialType(_, _) if ((mode & (EXPRmode | LHSmode)) == EXPRmode) => adapt(tree setType et.skolemizeExistential(context.owner, tree), mode, pt, original) case PolyType(tparams, restpe) if inNoModes(mode, TAPPmode | PATTERNmode | HKmode) => // (3) @@ -1105,7 +1105,7 @@ trait Typers extends Modes with Adaptations with Tags { if (tree1.tpe <:< pt) adapt(tree1, mode, pt, original) else { if (inExprModeButNot(mode, FUNmode)) { - pt.normalize match { + pt.dealias match { case TypeRef(_, sym, _) => // note: was if (pt.typeSymbol == UnitClass) but this leads to a potentially // infinite expansion if pt is constant type () @@ -1251,7 +1251,7 @@ trait Typers extends Modes with Adaptations with Tags { def adaptToMember(qual: Tree, searchTemplate: Type, reportAmbiguous: Boolean = true, saveErrors: Boolean = true): Tree = { if (isAdaptableWithView(qual)) { - qual.tpe.widen.normalize match { + qual.tpe.dealiasWiden match { case et: ExistentialType => qual setType et.skolemizeExistential(context.owner, qual) // open the existential case _ => @@ -1766,7 +1766,7 @@ trait Typers extends Modes with Adaptations with Tags { _.typedTemplate(cdef.impl, parentTypes(cdef.impl)) } val impl2 = finishMethodSynthesis(impl1, clazz, context) - if (clazz.isTrait && clazz.info.parents.nonEmpty && clazz.info.firstParent.normalize.typeSymbol == AnyClass) + if (clazz.isTrait && clazz.info.parents.nonEmpty && clazz.info.firstParent.typeSymbol == AnyClass) checkEphemeral(clazz, impl2.body) if ((clazz != ClassfileAnnotationClass) && (clazz isNonBottomSubClass ClassfileAnnotationClass)) @@ -3675,7 +3675,7 @@ trait Typers extends Modes with Adaptations with Tags { val normalizeLocals = new TypeMap { def apply(tp: Type): Type = tp match { case TypeRef(pre, sym, args) => - if (sym.isAliasType && containsLocal(tp)) apply(tp.normalize) + if (sym.isAliasType && containsLocal(tp)) apply(tp.dealias) else { if (pre.isVolatile) InferTypeWithVolatileTypeSelectionError(tree, pre) diff --git a/src/reflect/scala/reflect/internal/Types.scala b/src/reflect/scala/reflect/internal/Types.scala index 282d7e18ac..7a63699259 100644 --- a/src/reflect/scala/reflect/internal/Types.scala +++ b/src/reflect/scala/reflect/internal/Types.scala @@ -563,6 +563,26 @@ trait Types extends api.Types { self: SymbolTable => /** Expands type aliases. */ def dealias = this + /** Repeatedly apply widen and dealias until they have no effect. + * This compensates for the fact that type aliases can hide beneath + * singleton types and singleton types can hide inside type aliases. + */ + def dealiasWiden: Type = ( + if (this ne widen) widen.dealiasWiden + else if (this ne dealias) dealias.dealiasWiden + else this + ) + + /** All the types encountered in the course of dealiasing/widening, + * including each intermediate beta reduction step (whereas calling + * dealias applies as many as possible.) + */ + def dealiasWidenChain: List[Type] = this :: ( + if (this ne widen) widen.dealiasWidenChain + else if (this ne betaReduce) betaReduce.dealiasWidenChain + else Nil + ) + def etaExpand: Type = this /** Performs a single step of beta-reduction on types. @@ -3236,7 +3256,7 @@ trait Types extends api.Types { self: SymbolTable => if (constr.instValid) constr.inst // get here when checking higher-order subtyping of the typevar by itself // TODO: check whether this ever happens? - else if (isHigherKinded) typeFun(params, applyArgs(params map (_.typeConstructor))) + else if (isHigherKinded) logResult("Normalizing HK $this")(typeFun(params, applyArgs(params map (_.typeConstructor)))) else super.normalize ) override def typeSymbol = origin.typeSymbol @@ -3663,7 +3683,7 @@ trait Types extends api.Types { self: SymbolTable => def existentialAbstraction(tparams: List[Symbol], tpe0: Type): Type = if (tparams.isEmpty) tpe0 else { - val tpe = deAlias(tpe0) + val tpe = normalizeAliases(tpe0) val tpe1 = new ExistentialExtrapolation(tparams) extrapolate tpe var tparams0 = tparams var tparams1 = tparams0 filter tpe1.contains @@ -3677,13 +3697,16 @@ trait Types extends api.Types { self: SymbolTable => newExistentialType(tparams1, tpe1) } - /** Remove any occurrences of type aliases from this type */ - object deAlias extends TypeMap { - def apply(tp: Type): Type = mapOver { - tp match { - case TypeRef(pre, sym, args) if sym.isAliasType => tp.normalize - case _ => tp - } + /** Normalize any type aliases within this type (@see Type#normalize). + * Note that this depends very much on the call to "normalize", not "dealias", + * so it is no longer carries the too-stealthy name "deAlias". + */ + object normalizeAliases extends TypeMap { + def apply(tp: Type): Type = tp match { + case TypeRef(_, sym, _) if sym.isAliasType => + def msg = if (tp.isHigherKinded) s"Normalizing type alias function $tp" else s"Dealiasing type alias $tp" + mapOver(logResult(msg)(tp.normalize)) + case _ => mapOver(tp) } } diff --git a/test/files/run/existentials3-old.check b/test/files/run/existentials3-old.check index 72abfac637..36a458dacc 100644 --- a/test/files/run/existentials3-old.check +++ b/test/files/run/existentials3-old.check @@ -5,7 +5,7 @@ Object with Test$ToS Object with Test$ToS scala.Function0[Object with Test$ToS] scala.Function0[Object with Test$ToS] -_ <: Object with _ <: Object with Test$ToS +_ <: Object with _ <: Object with Object with Test$ToS _ <: Object with _ <: Object with _ <: Object with Test$ToS scala.collection.immutable.List[Object with scala.collection.Seq[Int]] scala.collection.immutable.List[Object with scala.collection.Seq[_ <: Int]] @@ -16,7 +16,7 @@ Object with Test$ToS Object with Test$ToS scala.Function0[Object with Test$ToS] scala.Function0[Object with Test$ToS] -_ <: Object with _ <: Object with Test$ToS +_ <: Object with _ <: Object with Object with Test$ToS _ <: Object with _ <: Object with _ <: Object with Test$ToS scala.collection.immutable.List[Object with scala.collection.Seq[Int]] scala.collection.immutable.List[Object with scala.collection.Seq[_ <: Int]] -- cgit v1.2.3 From dbebcd509e4013ce02655a2687b27d0967b3650e Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Wed, 19 Dec 2012 07:32:19 -0800 Subject: SI-6846, regression in type constructor inference. In 658ba1b4e6 some inference was gained and some was lost. In this commit we regain what was lost and gain even more. Dealiasing and widening should be fully handled now, as illustrated by the test case. --- .../scala/tools/nsc/typechecker/Infer.scala | 6 +++-- src/reflect/scala/reflect/internal/Types.scala | 19 +++++++-------- test/files/pos/t6846.scala | 28 ++++++++++++++++++++++ 3 files changed, 40 insertions(+), 13 deletions(-) create mode 100644 test/files/pos/t6846.scala (limited to 'src') diff --git a/src/compiler/scala/tools/nsc/typechecker/Infer.scala b/src/compiler/scala/tools/nsc/typechecker/Infer.scala index a43dbae1fa..7188290688 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Infer.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Infer.scala @@ -1122,15 +1122,17 @@ trait Infer extends Checkable { */ def inferExprInstance(tree: Tree, tparams: List[Symbol], pt: Type = WildcardType, treeTp0: Type = null, keepNothings: Boolean = true, useWeaklyCompatible: Boolean = false): List[Symbol] = { val treeTp = if(treeTp0 eq null) tree.tpe else treeTp0 // can't refer to tree in default for treeTp0 + val (targs, tvars) = exprTypeArgs(tparams, treeTp, pt, useWeaklyCompatible) printInference( ptBlock("inferExprInstance", "tree" -> tree, "tree.tpe"-> tree.tpe, "tparams" -> tparams, - "pt" -> pt + "pt" -> pt, + "targs" -> targs, + "tvars" -> tvars ) ) - val (targs, tvars) = exprTypeArgs(tparams, treeTp, pt, useWeaklyCompatible) if (keepNothings || (targs eq null)) { //@M: adjustTypeArgs fails if targs==null, neg/t0226 substExpr(tree, tparams, targs, pt) diff --git a/src/reflect/scala/reflect/internal/Types.scala b/src/reflect/scala/reflect/internal/Types.scala index 7a63699259..c121c6020e 100644 --- a/src/reflect/scala/reflect/internal/Types.scala +++ b/src/reflect/scala/reflect/internal/Types.scala @@ -3144,23 +3144,20 @@ trait Types extends api.Types { self: SymbolTable => * Checks subtyping of higher-order type vars, and uses variances as defined in the * type parameter we're trying to infer (the result will be sanity-checked later). */ - def unifyFull(tpe: Type) = { - // The alias/widen variations are often no-ops. - val tpes = ( - if (isLowerBound) List(tpe, tpe.widen, tpe.dealias, tpe.widen.dealias).distinct - else List(tpe) - ) - tpes exists { tp => - val lhs = if (isLowerBound) tp.typeArgs else typeArgs - val rhs = if (isLowerBound) typeArgs else tp.typeArgs - - sameLength(lhs, rhs) && { + def unifyFull(tpe: Type): Boolean = { + def unifySpecific(tp: Type) = { + sameLength(typeArgs, tp.typeArgs) && { + val lhs = if (isLowerBound) tp.typeArgs else typeArgs + val rhs = if (isLowerBound) typeArgs else tp.typeArgs // this is a higher-kinded type var with same arity as tp. // side effect: adds the type constructor itself as a bound addBound(tp.typeConstructor) isSubArgs(lhs, rhs, params, AnyDepth) } } + // The type with which we can successfully unify can be hidden + // behind singleton types and type aliases. + tpe.dealiasWidenChain exists unifySpecific } // There's a <: test taking place right now, where tp is a concrete type and this is a typevar diff --git a/test/files/pos/t6846.scala b/test/files/pos/t6846.scala new file mode 100644 index 0000000000..009566493f --- /dev/null +++ b/test/files/pos/t6846.scala @@ -0,0 +1,28 @@ +object Test { + class Arb[_] + implicit def foo[M[_], A]: Arb[M[A]] = null + foo: Arb[List[Int]] + type ListInt = List[Int] + foo: Arb[ListInt] +} + +object Test2 { + import scala.collection.immutable.List + + class Carb[_] + implicit def narrow[N, M[_], A](x: Carb[M[A]])(implicit ev: N <:< M[A]): Carb[N] = null + implicit def bar[M[_], A]: Carb[M[A]] = null + + type ListInt = List[Int] + + val x: List[Int] = List(1) + val y: ListInt = List(1) + + type ListSingletonX = x.type + type ListSingletonY = y.type + + bar: Carb[List[Int]] + bar: Carb[ListInt] + bar: Carb[ListSingletonX] + bar: Carb[ListSingletonY] +} -- cgit v1.2.3 From 56ef2b330dfb3381fe2f6e717b959f1757ce69bb Mon Sep 17 00:00:00 2001 From: Eugene Burmako Date: Sat, 29 Dec 2012 10:32:08 +0100 Subject: cleans up usages of --- src/library/scala/reflect/NameTransformer.scala | 2 +- test/files/run/reflection-enclosed-basic.scala | 2 +- test/files/run/reflection-enclosed-inner-basic.scala | 2 +- test/files/run/reflection-enclosed-inner-inner-basic.scala | 2 +- test/files/run/reflection-enclosed-inner-nested-basic.scala | 2 +- test/files/run/reflection-enclosed-nested-basic.scala | 2 +- test/files/run/reflection-enclosed-nested-inner-basic.scala | 2 +- test/files/run/reflection-enclosed-nested-nested-basic.scala | 2 +- test/files/run/reflection-magicsymbols-invoke.scala | 2 +- test/files/run/reflection-sanitychecks.scala | 2 +- 10 files changed, 10 insertions(+), 10 deletions(-) (limited to 'src') diff --git a/src/library/scala/reflect/NameTransformer.scala b/src/library/scala/reflect/NameTransformer.scala index 384ebc6134..0beb840bed 100755 --- a/src/library/scala/reflect/NameTransformer.scala +++ b/src/library/scala/reflect/NameTransformer.scala @@ -93,7 +93,7 @@ object NameTransformer { */ def decode(name0: String): String = { //System.out.println("decode: " + name);//DEBUG - val name = if (name0.endsWith("")) name0.substring(0, name0.length() - ("").length()) + "this" + val name = if (name0.endsWith("")) name0.stripSuffix("") + "this" else name0; var buf: StringBuilder = null val len = name.length() diff --git a/test/files/run/reflection-enclosed-basic.scala b/test/files/run/reflection-enclosed-basic.scala index 39e327cff6..7b9e0c20dc 100644 --- a/test/files/run/reflection-enclosed-basic.scala +++ b/test/files/run/reflection-enclosed-basic.scala @@ -20,7 +20,7 @@ object Test extends App { def testNestedClass(name: String) = { val sym = cm.staticClass(name) println(sym) - val ctor = sym.typeSignature.declaration(TermName("")).asMethod + val ctor = sym.typeSignature.declaration(nme.CONSTRUCTOR).asMethod val ctorMirror = cm.reflectClass(sym).reflectConstructor(ctor) val instance = ctorMirror() println(instance) diff --git a/test/files/run/reflection-enclosed-inner-basic.scala b/test/files/run/reflection-enclosed-inner-basic.scala index 997a67ed61..c1cf9bc336 100644 --- a/test/files/run/reflection-enclosed-inner-basic.scala +++ b/test/files/run/reflection-enclosed-inner-basic.scala @@ -26,7 +26,7 @@ object Test extends App { def testInnerClass(name: String) = { val sym = b.typeSignature.declaration(TypeName(name)).asClass println(sym) - val ctor = sym.typeSignature.declaration(TermName("")).asMethod + val ctor = sym.typeSignature.declaration(nme.CONSTRUCTOR).asMethod val ctorMirror = cm.reflect(new B).reflectClass(sym).reflectConstructor(ctor) val instance = ctorMirror() println(instance) diff --git a/test/files/run/reflection-enclosed-inner-inner-basic.scala b/test/files/run/reflection-enclosed-inner-inner-basic.scala index 704363dab9..8a73fac522 100644 --- a/test/files/run/reflection-enclosed-inner-inner-basic.scala +++ b/test/files/run/reflection-enclosed-inner-inner-basic.scala @@ -28,7 +28,7 @@ object Test extends App { def testInnerClass(name: String) = { val sym = b.typeSignature.declaration(TypeName(name)).asClass println(sym) - val ctor = sym.typeSignature.declaration(TermName("")).asMethod + val ctor = sym.typeSignature.declaration(nme.CONSTRUCTOR).asMethod val outer1 = new B val outer2 = new outer1.BB val ctorMirror = cm.reflect(outer2).reflectClass(sym).reflectConstructor(ctor) diff --git a/test/files/run/reflection-enclosed-inner-nested-basic.scala b/test/files/run/reflection-enclosed-inner-nested-basic.scala index 1e3797a9ea..6c2fc6df7a 100644 --- a/test/files/run/reflection-enclosed-inner-nested-basic.scala +++ b/test/files/run/reflection-enclosed-inner-nested-basic.scala @@ -29,7 +29,7 @@ object Test extends App { def testNestedClass(name: String) = { val sym = b.typeSignature.declaration(TypeName(name)).asClass println(sym) - val ctor = sym.typeSignature.declaration(TermName("")).asMethod + val ctor = sym.typeSignature.declaration(nme.CONSTRUCTOR).asMethod val ctorMirror = cm.reflect(outer1.BB).reflectClass(sym).reflectConstructor(ctor) val instance = ctorMirror() println(instance) diff --git a/test/files/run/reflection-enclosed-nested-basic.scala b/test/files/run/reflection-enclosed-nested-basic.scala index a629d8a2d0..180ac4ebee 100644 --- a/test/files/run/reflection-enclosed-nested-basic.scala +++ b/test/files/run/reflection-enclosed-nested-basic.scala @@ -26,7 +26,7 @@ object Test extends App { def testNestedClass(name: String) = { val sym = b.typeSignature.declaration(TypeName(name)).asClass println(sym) - val ctor = sym.typeSignature.declaration(TermName("")).asMethod + val ctor = sym.typeSignature.declaration(nme.CONSTRUCTOR).asMethod val ctorMirror = cm.reflectClass(sym).reflectConstructor(ctor) val instance = ctorMirror() println(instance) diff --git a/test/files/run/reflection-enclosed-nested-inner-basic.scala b/test/files/run/reflection-enclosed-nested-inner-basic.scala index 9c726e55d4..2558b8035a 100644 --- a/test/files/run/reflection-enclosed-nested-inner-basic.scala +++ b/test/files/run/reflection-enclosed-nested-inner-basic.scala @@ -28,7 +28,7 @@ object Test extends App { def testInnerClass(name: String) = { val sym = b.typeSignature.declaration(TypeName(name)).asClass println(sym) - val ctor = sym.typeSignature.declaration(TermName("")).asMethod + val ctor = sym.typeSignature.declaration(nme.CONSTRUCTOR).asMethod val ctorMirror = cm.reflect(new B.BB).reflectClass(sym).reflectConstructor(ctor) val instance = ctorMirror() println(instance) diff --git a/test/files/run/reflection-enclosed-nested-nested-basic.scala b/test/files/run/reflection-enclosed-nested-nested-basic.scala index 247eba120e..b4711c9a8c 100644 --- a/test/files/run/reflection-enclosed-nested-nested-basic.scala +++ b/test/files/run/reflection-enclosed-nested-nested-basic.scala @@ -28,7 +28,7 @@ object Test extends App { def testNestedClass(name: String) = { val sym = b.typeSignature.declaration(TypeName(name)).asClass println(sym) - val ctor = sym.typeSignature.declaration(TermName("")).asMethod + val ctor = sym.typeSignature.declaration(nme.CONSTRUCTOR).asMethod val ctorMirror = cm.reflectClass(sym).reflectConstructor(ctor) val instance = ctorMirror() println(instance) diff --git a/test/files/run/reflection-magicsymbols-invoke.scala b/test/files/run/reflection-magicsymbols-invoke.scala index e366495ec7..ff3992709f 100644 --- a/test/files/run/reflection-magicsymbols-invoke.scala +++ b/test/files/run/reflection-magicsymbols-invoke.scala @@ -54,7 +54,7 @@ object Test extends App { println("it's important to print the list of AnyVal's members") println("if some of them change (possibly, adding and/or removing magic symbols), we must update this test") typeOf[AnyVal].declarations.toList.sortBy(key).foreach(sym => println(key(sym))) - test(typeOf[AnyVal], null, "") + test(typeOf[AnyVal], null, nme.CONSTRUCTOR.toString) test(typeOf[AnyVal], 2, "getClass") println("============\nAnyRef") diff --git a/test/files/run/reflection-sanitychecks.scala b/test/files/run/reflection-sanitychecks.scala index 709c32c80e..6d3daff1f7 100644 --- a/test/files/run/reflection-sanitychecks.scala +++ b/test/files/run/reflection-sanitychecks.scala @@ -38,7 +38,7 @@ object Test extends App { println("method #2: " + failsafe(im.reflectMethod(tpe.member(TermName("baz")).asMethod)())) println("constructor #1: " + failsafe(cm.reflectClass(im.symbol).reflectConstructor(tpe.member(TermName("bar")).asMethod)())) println("constructor #2: " + failsafe(cm.reflectClass(im.symbol).reflectConstructor(tpe.member(TermName("")).asMethod)())) - println("class: " + failsafe(im.reflectClass(tpe.member(TypeName("C")).asClass).reflectConstructor(typeOf[C].member(TypeName("C")).asClass.typeSignature.member(TermName("")).asMethod)())) + println("class: " + failsafe(im.reflectClass(tpe.member(TypeName("C")).asClass).reflectConstructor(typeOf[C].member(TypeName("C")).asClass.typeSignature.member(nme.CONSTRUCTOR).asMethod)())) println("object: " + failsafe(im.reflectModule(tpe.member(TermName("O")).asModule).instance)) println() } -- cgit v1.2.3 From 653b29bfdd091d7e2c069b8ad56f8ca51ad4b244 Mon Sep 17 00:00:00 2001 From: Miguel Garcia Date: Sat, 29 Dec 2012 14:15:45 +0100 Subject: nested closures are flattened by calling supplementErrorMessage() directly A closure C that becomes an argument to the constructor of another closure makes both closures harder to eliminate (either by scalac-optimizer or JIT-compiler) than is the case when C is the argument to an @inline method. --- src/compiler/scala/tools/nsc/Global.scala | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) (limited to 'src') diff --git a/src/compiler/scala/tools/nsc/Global.scala b/src/compiler/scala/tools/nsc/Global.scala index 95da7324aa..20b8265071 100644 --- a/src/compiler/scala/tools/nsc/Global.scala +++ b/src/compiler/scala/tools/nsc/Global.scala @@ -228,13 +228,15 @@ class Global(var currentSettings: Settings, var reporter: Reporter) // of assert and require (but for now I've reproduced them here, // because there are a million to fix.) @inline final def assert(assertion: Boolean, message: => Any) { - Predef.assert(assertion, supplementErrorMessage("" + message)) + if (!assertion) + throw new java.lang.AssertionError("assertion failed: "+ supplementErrorMessage("" + message)) } @inline final def assert(assertion: Boolean) { assert(assertion, "") } @inline final def require(requirement: Boolean, message: => Any) { - Predef.require(requirement, supplementErrorMessage("" + message)) + if (!requirement) + throw new IllegalArgumentException("requirement failed: "+ supplementErrorMessage("" + message)) } @inline final def require(requirement: Boolean) { require(requirement, "") -- cgit v1.2.3 From 176aa56682827f3d987e110e6da7eb11e4ea1a58 Mon Sep 17 00:00:00 2001 From: Carlo Dapor Date: Wed, 2 Jan 2013 03:51:36 +0100 Subject: Updated copyright to 2013 --- build.xml | 2 +- docs/LICENSE | 2 +- docs/examples/jolib/Ref.scala | 2 +- docs/examples/jolib/parallelOr.scala | 4 ++-- docs/examples/parsing/ArithmeticParser.scala | 2 +- project/Versions.scala | 2 +- src/build/genprod.scala | 4 ++-- src/compiler/scala/tools/nsc/doc/base/CommentFactoryBase.scala | 2 +- src/compiler/scala/tools/nsc/interactive/Doc.scala | 2 +- src/compiler/scala/tools/nsc/interpreter/ReplDir.scala | 2 +- src/eclipse/README.md | 2 +- src/library-aux/scala/Any.scala | 2 +- src/library-aux/scala/AnyRef.scala | 2 +- src/library-aux/scala/Nothing.scala | 2 +- src/library-aux/scala/Null.scala | 2 +- src/library/scala/Boolean.scala | 2 +- src/library/scala/Byte.scala | 2 +- src/library/scala/Char.scala | 2 +- src/library/scala/Double.scala | 2 +- src/library/scala/Float.scala | 2 +- src/library/scala/Function0.scala | 2 +- src/library/scala/Function1.scala | 2 +- src/library/scala/Function10.scala | 2 +- src/library/scala/Function11.scala | 2 +- src/library/scala/Function12.scala | 2 +- src/library/scala/Function13.scala | 2 +- src/library/scala/Function14.scala | 2 +- src/library/scala/Function15.scala | 2 +- src/library/scala/Function16.scala | 2 +- src/library/scala/Function17.scala | 2 +- src/library/scala/Function18.scala | 2 +- src/library/scala/Function19.scala | 2 +- src/library/scala/Function2.scala | 2 +- src/library/scala/Function20.scala | 2 +- src/library/scala/Function21.scala | 2 +- src/library/scala/Function22.scala | 2 +- src/library/scala/Function3.scala | 2 +- src/library/scala/Function4.scala | 2 +- src/library/scala/Function5.scala | 2 +- src/library/scala/Function6.scala | 2 +- src/library/scala/Function7.scala | 2 +- src/library/scala/Function8.scala | 2 +- src/library/scala/Function9.scala | 2 +- src/library/scala/Int.scala | 2 +- src/library/scala/Long.scala | 2 +- src/library/scala/Product.scala | 2 +- src/library/scala/Product1.scala | 2 +- src/library/scala/Product10.scala | 2 +- src/library/scala/Product11.scala | 2 +- src/library/scala/Product12.scala | 2 +- src/library/scala/Product13.scala | 2 +- src/library/scala/Product14.scala | 2 +- src/library/scala/Product15.scala | 2 +- src/library/scala/Product16.scala | 2 +- src/library/scala/Product17.scala | 2 +- src/library/scala/Product18.scala | 2 +- src/library/scala/Product19.scala | 2 +- src/library/scala/Product2.scala | 2 +- src/library/scala/Product20.scala | 2 +- src/library/scala/Product21.scala | 2 +- src/library/scala/Product22.scala | 2 +- src/library/scala/Product3.scala | 2 +- src/library/scala/Product4.scala | 2 +- src/library/scala/Product5.scala | 2 +- src/library/scala/Product6.scala | 2 +- src/library/scala/Product7.scala | 2 +- src/library/scala/Product8.scala | 2 +- src/library/scala/Product9.scala | 2 +- src/library/scala/Short.scala | 2 +- src/library/scala/Tuple1.scala | 2 +- src/library/scala/Tuple10.scala | 2 +- src/library/scala/Tuple11.scala | 2 +- src/library/scala/Tuple12.scala | 2 +- src/library/scala/Tuple13.scala | 2 +- src/library/scala/Tuple14.scala | 2 +- src/library/scala/Tuple15.scala | 2 +- src/library/scala/Tuple16.scala | 2 +- src/library/scala/Tuple17.scala | 2 +- src/library/scala/Tuple18.scala | 2 +- src/library/scala/Tuple19.scala | 2 +- src/library/scala/Tuple2.scala | 2 +- src/library/scala/Tuple20.scala | 2 +- src/library/scala/Tuple21.scala | 2 +- src/library/scala/Tuple22.scala | 2 +- src/library/scala/Tuple3.scala | 2 +- src/library/scala/Tuple4.scala | 2 +- src/library/scala/Tuple5.scala | 2 +- src/library/scala/Tuple6.scala | 2 +- src/library/scala/Tuple7.scala | 2 +- src/library/scala/Tuple8.scala | 2 +- src/library/scala/Tuple9.scala | 2 +- src/library/scala/Unit.scala | 2 +- src/library/scala/collection/Searching.scala | 2 +- src/library/scala/collection/generic/IndexedSeqFactory.scala | 2 +- src/library/scala/collection/generic/IsSeqLike.scala | 2 +- src/library/scala/collection/mutable/History.scala | 2 +- src/library/scala/concurrent/FutureTaskRunner.scala | 2 +- src/library/scala/runtime/AbstractFunction0.scala | 2 +- src/library/scala/runtime/AbstractFunction1.scala | 2 +- src/library/scala/runtime/AbstractFunction10.scala | 2 +- src/library/scala/runtime/AbstractFunction11.scala | 2 +- src/library/scala/runtime/AbstractFunction12.scala | 2 +- src/library/scala/runtime/AbstractFunction13.scala | 2 +- src/library/scala/runtime/AbstractFunction14.scala | 2 +- src/library/scala/runtime/AbstractFunction15.scala | 2 +- src/library/scala/runtime/AbstractFunction16.scala | 2 +- src/library/scala/runtime/AbstractFunction17.scala | 2 +- src/library/scala/runtime/AbstractFunction18.scala | 2 +- src/library/scala/runtime/AbstractFunction19.scala | 2 +- src/library/scala/runtime/AbstractFunction2.scala | 2 +- src/library/scala/runtime/AbstractFunction20.scala | 2 +- src/library/scala/runtime/AbstractFunction21.scala | 2 +- src/library/scala/runtime/AbstractFunction22.scala | 2 +- src/library/scala/runtime/AbstractFunction3.scala | 2 +- src/library/scala/runtime/AbstractFunction4.scala | 2 +- src/library/scala/runtime/AbstractFunction5.scala | 2 +- src/library/scala/runtime/AbstractFunction6.scala | 2 +- src/library/scala/runtime/AbstractFunction7.scala | 2 +- src/library/scala/runtime/AbstractFunction8.scala | 2 +- src/library/scala/runtime/AbstractFunction9.scala | 2 +- src/library/scala/util/Properties.scala | 2 +- src/manual/scala/tools/docutil/resources/index.html | 4 ++-- src/scalap/decoder.properties | 2 +- src/swing/scala/swing/ColorChooser.scala | 2 +- src/swing/scala/swing/PopupMenu.scala | 2 +- src/swing/scala/swing/event/ColorChanged.scala | 2 +- src/swing/scala/swing/event/PopupMenuEvent.scala | 2 +- test/disabled/pos/spec-List.scala | 2 +- test/files/pos/spec-Function1.scala | 2 +- test/files/pos/t5644/BoxesRunTime.java | 2 +- test/instrumented/library/scala/runtime/BoxesRunTime.java | 2 +- test/instrumented/library/scala/runtime/ScalaRunTime.scala | 2 +- test/partest | 2 +- test/partest.bat | 2 +- test/scaladoc/resources/doc-root/Any.scala | 2 +- test/scaladoc/resources/doc-root/AnyRef.scala | 2 +- test/scaladoc/resources/doc-root/Nothing.scala | 2 +- test/scaladoc/resources/doc-root/Null.scala | 2 +- test/script-tests/jar-manifest/run-test.check | 2 +- 139 files changed, 142 insertions(+), 142 deletions(-) (limited to 'src') diff --git a/build.xml b/build.xml index 6048f0f3fa..7bb7b4d365 100644 --- a/build.xml +++ b/build.xml @@ -225,7 +225,7 @@ PROPERTIES - + tree.tpe.symbol.typeParams.length == 1, tree.tpe.typeParams.length == 0!) !sameLength(tree.tpe.typeParams, pt.typeParams) && @@ -991,7 +992,7 @@ trait Typers extends Modes with Adaptations with Tags { } def insertApply(): Tree = { - assert(!inHKMode(mode), modeString(mode)) //@M + assert(!mode.inHKMode, mode) //@M val adapted = adaptToName(tree, nme.apply) def stabilize0(pre: Type): Tree = stabilize(adapted, pre, EXPRmode | QUALmode, WildcardType) // TODO reconcile the overlap between Typers#stablize and TreeGen.stabilize @@ -1019,26 +1020,26 @@ trait Typers extends Modes with Adaptations with Tags { tree.tpe match { case atp @ AnnotatedType(_, _, _) if canAdaptAnnotations(tree, mode, pt) => // (-1) adaptAnnotations(tree, mode, pt) - case ct @ ConstantType(value) if inNoModes(mode, TYPEmode | FUNmode) && (ct <:< pt) && !forScaladoc && !forInteractive => // (0) + case ct @ ConstantType(value) if mode.inNone(TYPEmode | FUNmode) && (ct <:< pt) && !forScaladoc && !forInteractive => // (0) val sym = tree.symbol if (sym != null && sym.isDeprecated) { val msg = sym.toString + sym.locationString + " is deprecated: " + sym.deprecationMessage.getOrElse("") unit.deprecationWarning(tree.pos, msg) } treeCopy.Literal(tree, value) - case OverloadedType(pre, alts) if !inFunMode(mode) => // (1) + case OverloadedType(pre, alts) if !mode.inFunMode => // (1) inferExprAlternative(tree, pt) adapt(tree, mode, pt, original) case NullaryMethodType(restpe) => // (2) adapt(tree setType restpe, mode, pt, original) - case TypeRef(_, ByNameParamClass, List(arg)) if ((mode & EXPRmode) != 0) => // (2) + case TypeRef(_, ByNameParamClass, List(arg)) if mode.inExprMode => // (2) adapt(tree setType arg, mode, pt, original) case tr @ TypeRef(_, sym, _) if sym.isAliasType && tr.dealias.isInstanceOf[ExistentialType] && ((mode & (EXPRmode | LHSmode)) == EXPRmode) => adapt(tree setType tr.dealias.skolemizeExistential(context.owner, tree), mode, pt, original) case et @ ExistentialType(_, _) if ((mode & (EXPRmode | LHSmode)) == EXPRmode) => adapt(tree setType et.skolemizeExistential(context.owner, tree), mode, pt, original) - case PolyType(tparams, restpe) if inNoModes(mode, TAPPmode | PATTERNmode | HKmode) => // (3) + case PolyType(tparams, restpe) if mode.inNone(TAPPmode | PATTERNmode | HKmode) => // (3) // assert((mode & HKmode) == 0) //@M a PolyType in HKmode represents an anonymous type function, // we're in HKmode since a higher-kinded type is expected --> hence, don't implicitly apply it to type params! // ticket #2197 triggered turning the assert into a guard @@ -1057,18 +1058,18 @@ trait Typers extends Modes with Adaptations with Tags { adaptToImplicitMethod(mt) case mt: MethodType if (((mode & (EXPRmode | FUNmode | LHSmode)) == EXPRmode) && - (context.undetparams.isEmpty || inPolyMode(mode))) && !(tree.symbol != null && tree.symbol.isTermMacro) => + (context.undetparams.isEmpty || mode.inPolyMode)) && !(tree.symbol != null && tree.symbol.isTermMacro) => instantiateToMethodType(mt) case _ => - def shouldInsertApply(tree: Tree) = inAllModes(mode, EXPRmode | FUNmode) && (tree.tpe match { + def shouldInsertApply(tree: Tree) = mode.inAll(EXPRmode | FUNmode) && (tree.tpe match { case _: MethodType | _: OverloadedType | _: PolyType => false case _ => applyPossible }) def applyPossible = { def applyMeth = member(adaptToName(tree, nme.apply), nme.apply) dyna.acceptsApplyDynamic(tree.tpe) || ( - if ((mode & TAPPmode) != 0) + if (mode.inAll(TAPPmode)) tree.tpe.typeParams.isEmpty && applyMeth.filter(!_.tpe.typeParams.isEmpty) != NoSymbol else applyMeth.filter(_.tpe.paramSectionCount > 0) != NoSymbol @@ -1077,17 +1078,17 @@ trait Typers extends Modes with Adaptations with Tags { if (tree.isType) adaptType() else if ( - inExprModeButNot(mode, FUNmode) && !tree.isDef && // typechecking application + mode.inExprModeButNot(FUNmode) && !tree.isDef && // typechecking application tree.symbol != null && tree.symbol.isTermMacro && // of a macro !tree.attachments.get[SuppressMacroExpansionAttachment.type].isDefined) macroExpand(this, tree, mode, pt) - else if (inAllModes(mode, PATTERNmode | FUNmode)) + else if (mode.inAll(PATTERNmode | FUNmode)) adaptConstrPattern() else if (shouldInsertApply(tree)) insertApply() - else if (!context.undetparams.isEmpty && !inPolyMode(mode)) { // (9) - assert(!inHKMode(mode), modeString(mode)) //@M - if (inExprModeButNot(mode, FUNmode) && pt.typeSymbol == UnitClass) + else if (!context.undetparams.isEmpty && !mode.inPolyMode) { // (9) + assert(!mode.inHKMode, mode) //@M + if (mode.inExprModeButNot(FUNmode) && pt.typeSymbol == UnitClass) instantiateExpectingUnit(tree, mode) else instantiate(tree, mode, pt) @@ -1095,7 +1096,7 @@ trait Typers extends Modes with Adaptations with Tags { tree } else { def fallBack: Tree = { - if (inPatternMode(mode)) { + if (mode.inPatternMode) { if ((tree.symbol ne null) && tree.symbol.isModule) inferModulePattern(tree, pt) if (isPopulated(tree.tpe, approximateAbstracts(pt))) @@ -1104,7 +1105,7 @@ trait Typers extends Modes with Adaptations with Tags { val tree1 = constfold(tree, pt) // (10) (11) if (tree1.tpe <:< pt) adapt(tree1, mode, pt, original) else { - if (inExprModeButNot(mode, FUNmode)) { + if (mode.inExprModeButNot(FUNmode)) { pt.dealias match { case TypeRef(_, sym, _) => // note: was if (pt.typeSymbol == UnitClass) but this leads to a potentially @@ -1212,7 +1213,7 @@ trait Typers extends Modes with Adaptations with Tags { } } - def instantiate(tree: Tree, mode: Int, pt: Type): Tree = { + def instantiate(tree: Tree, mode: Mode, pt: Type): Tree = { inferExprInstance(tree, context.extractUndetparams(), pt) adapt(tree, mode, pt) } @@ -1220,7 +1221,7 @@ trait Typers extends Modes with Adaptations with Tags { * with expected type Unit, but if that fails, try again with pt = WildcardType * and discard the expression. */ - def instantiateExpectingUnit(tree: Tree, mode: Int): Tree = { + def instantiateExpectingUnit(tree: Tree, mode: Mode): Tree = { val savedUndetparams = context.undetparams silent(_.instantiate(tree, mode, UnitClass.tpe)) orElse { _ => context.undetparams = savedUndetparams @@ -1294,7 +1295,7 @@ trait Typers extends Modes with Adaptations with Tags { * a method `name`. If that's ambiguous try taking arguments into * account using `adaptToArguments`. */ - def adaptToMemberWithArgs(tree: Tree, qual: Tree, name: Name, mode: Int, reportAmbiguous: Boolean, saveErrors: Boolean): Tree = { + def adaptToMemberWithArgs(tree: Tree, qual: Tree, name: Name, mode: Mode, reportAmbiguous: Boolean, saveErrors: Boolean): Tree = { def onError(reportError: => Tree): Tree = context.tree match { case Apply(tree1, args) if (tree1 eq tree) && args.nonEmpty => ( silent (_.typedArgs(args, mode)) @@ -2296,7 +2297,7 @@ trait Typers extends Modes with Adaptations with Tags { } } - def typedBlock(block: Block, mode: Int, pt: Type): Block = { + def typedBlock(block: Block, mode: Mode, pt: Type): Block = { val syntheticPrivates = new ListBuffer[Symbol] try { namer.enterSyms(block.stats) @@ -2358,7 +2359,7 @@ trait Typers extends Modes with Adaptations with Tags { case _ => stat::Nil }) val stats2 = typedStats(stats1, context.owner) - val expr1 = typed(block.expr, mode & ~(FUNmode | QUALmode), pt) + val expr1 = typed(block.expr, mode &~ (FUNmode | QUALmode), pt) treeCopy.Block(block, stats2, expr1) .setType(if (treeInfo.isExprSafeToInline(block)) expr1.tpe else expr1.tpe.deconst) } finally { @@ -2429,13 +2430,13 @@ trait Typers extends Modes with Adaptations with Tags { newTyper(context.makeNewScope(cdef, context.owner)).typedCase(cdef, pattp, pt) } - def adaptCase(cdef: CaseDef, mode: Int, tpe: Type): CaseDef = deriveCaseDef(cdef)(adapt(_, mode, tpe)) + def adaptCase(cdef: CaseDef, mode: Mode, tpe: Type): CaseDef = deriveCaseDef(cdef)(adapt(_, mode, tpe)) def ptOrLub(tps: List[Type], pt: Type ) = if (isFullyDefined(pt)) (pt, false) else weakLub(tps map (_.deconst)) def ptOrLubPacked(trees: List[Tree], pt: Type) = if (isFullyDefined(pt)) (pt, false) else weakLub(trees map (c => packedType(c, context.owner).deconst)) // takes untyped sub-trees of a match and type checks them - def typedMatch(selector: Tree, cases: List[CaseDef], mode: Int, pt: Type, tree: Tree = EmptyTree): Match = { + def typedMatch(selector: Tree, cases: List[CaseDef], mode: Mode, pt: Type, tree: Tree = EmptyTree): Match = { val selector1 = checkDead(typed(selector, EXPRmode | BYVALmode, WildcardType)) val selectorTp = packCaptured(selector1.tpe.widen).skolemizeExistential(context.owner, selector) val casesTyped = typedCases(cases, selectorTp, pt) @@ -2448,7 +2449,7 @@ trait Typers extends Modes with Adaptations with Tags { } // match has been typed -- virtualize it during type checking so the full context is available - def virtualizedMatch(match_ : Match, mode: Int, pt: Type) = { + def virtualizedMatch(match_ : Match, mode: Mode, pt: Type) = { import patmat.{ vpmName, PureMatchTranslator } // TODO: add fallback __match sentinel to predef @@ -2466,7 +2467,7 @@ trait Typers extends Modes with Adaptations with Tags { // Match(EmptyTree, cases) ==> new PartialFunction { def apply(params) = `translateMatch('`(param1,...,paramN)` match { cases }')` } // for fresh params, the selector of the match we'll translated simply gathers those in a tuple // NOTE: restricted to PartialFunction -- leave Function trees if the expected type does not demand a partial function - class MatchFunTyper(tree: Tree, cases: List[CaseDef], mode: Int, pt0: Type) { + class MatchFunTyper(tree: Tree, cases: List[CaseDef], mode: Mode, pt0: Type) { // TODO: remove FunctionN support -- this is currently designed so that it can emit FunctionN and PartialFunction subclasses // however, we should leave Function nodes until Uncurry so phases after typer can still detect normal Function trees // we need to synthesize PartialFunction impls, though, to avoid nastiness in Uncurry in transforming&duplicating generated pattern matcher trees @@ -2616,7 +2617,7 @@ trait Typers extends Modes with Adaptations with Tags { } // Function(params, Match(sel, cases)) ==> new Function { def apply(params) = `translateMatch('sel match { cases }')` } - class MatchFunTyperBetaReduced(fun: Function, sel: Tree, cases: List[CaseDef], mode: Int, pt: Type) extends MatchFunTyper(fun, cases, mode, pt) { + class MatchFunTyperBetaReduced(fun: Function, sel: Tree, cases: List[CaseDef], mode: Mode, pt: Type) extends MatchFunTyper(fun, cases, mode, pt) { override def deriveFormals = fun.vparams map { p => if(p.tpt.tpe == null) typedType(p.tpt).tpe else p.tpt.tpe } @@ -2629,7 +2630,7 @@ trait Typers extends Modes with Adaptations with Tags { override def mkSel(params: List[Symbol]) = sel.duplicate } - private def typedFunction(fun: Function, mode: Int, pt: Type): Tree = { + private def typedFunction(fun: Function, mode: Mode, pt: Type): Tree = { val numVparams = fun.vparams.length if (numVparams > definitions.MaxFunctionArity) return MaxFunctionArityError(fun) @@ -2654,7 +2655,7 @@ trait Typers extends Modes with Adaptations with Tags { else { fun match { case etaExpansion(vparams, fn, args) => - silent(_.typed(fn, forFunMode(mode), pt)) filter (_ => context.undetparams.isEmpty) map { fn1 => + silent(_.typed(fn, mode.forFunMode, pt)) filter (_ => context.undetparams.isEmpty) map { fn1 => // if context,undetparams is not empty, the function was polymorphic, // so we need the missing arguments to infer its type. See #871 //println("typing eta "+fun+":"+fn1.tpe+"/"+context.undetparams) @@ -2859,14 +2860,14 @@ trait Typers extends Modes with Adaptations with Tags { } } - def typedArg(arg: Tree, mode: Int, newmode: Int, pt: Type): Tree = { - val typedMode = onlyStickyModes(mode) | newmode - val t = withCondConstrTyper((mode & SCCmode) != 0)(_.typed(arg, typedMode, pt)) + def typedArg(arg: Tree, mode: Mode, newmode: Mode, pt: Type): Tree = { + val typedMode = mode.onlySticky | newmode + val t = withCondConstrTyper((mode & SCCmode) != NOmode)(_.typed(arg, typedMode, pt)) checkDead.inMode(typedMode, t) } - def typedArgs(args: List[Tree], mode: Int) = - args mapConserve (arg => typedArg(arg, mode, 0, WildcardType)) + def typedArgs(args: List[Tree], mode: Mode) = + args mapConserve (arg => typedArg(arg, mode, NOmode, WildcardType)) /** Type trees in `args0` against corresponding expected type in `adapted0`. * @@ -2876,8 +2877,8 @@ trait Typers extends Modes with Adaptations with Tags { * * (docs reverse-engineered -- AM) */ - def typedArgs(args0: List[Tree], mode: Int, formals0: List[Type], adapted0: List[Type]): List[Tree] = { - val sticky = onlyStickyModes(mode) + def typedArgs(args0: List[Tree], mode: Mode, formals0: List[Type], adapted0: List[Type]): List[Tree] = { + val sticky = mode.onlySticky def loop(args: List[Tree], formals: List[Type], adapted: List[Type]): List[Tree] = { if (args.isEmpty || adapted.isEmpty) Nil else { @@ -2885,7 +2886,7 @@ trait Typers extends Modes with Adaptations with Tags { val isVarArgs = formals.isEmpty || formals.tail.isEmpty && isRepeatedParamType(formals.head) val typedMode = sticky | ( if (isVarArgs) STARmode | BYVALmode - else if (isByNameParamType(formals.head)) 0 + else if (isByNameParamType(formals.head)) NOmode else BYVALmode ) var tree = typedArg(args.head, mode, typedMode, adapted.head) @@ -2937,7 +2938,7 @@ trait Typers extends Modes with Adaptations with Tags { } } - def doTypedApply(tree: Tree, fun0: Tree, args: List[Tree], mode: Int, pt: Type): Tree = { + def doTypedApply(tree: Tree, fun0: Tree, args: List[Tree], mode: Mode, pt: Type): Tree = { // TODO_NMT: check the assumption that args nonEmpty def duplErrTree = setError(treeCopy.Apply(tree, fun0, args)) def duplErrorTree(err: AbsTypeError) = { issue(err); duplErrTree } @@ -2979,7 +2980,7 @@ trait Typers extends Modes with Adaptations with Tags { if (sym1 != NoSymbol) sym = sym1 } if (sym == NoSymbol) fun - else adapt(fun setSymbol sym setType pre.memberType(sym), forFunMode(mode), WildcardType) + else adapt(fun setSymbol sym setType pre.memberType(sym), mode.forFunMode, WildcardType) } else fun } @@ -3012,7 +3013,7 @@ trait Typers extends Modes with Adaptations with Tags { setError(tree) else { inferMethodAlternative(fun, undetparams, argtpes.toList, pt) - doTypedApply(tree, adapt(fun, forFunMode(mode), WildcardType), args1, mode, pt) + doTypedApply(tree, adapt(fun, mode.forFunMode, WildcardType), args1, mode, pt) } } handleOverloaded @@ -3037,7 +3038,7 @@ trait Typers extends Modes with Adaptations with Tags { // Depending on user options, may warn or error here if // a Unit or tuple was inserted. Some(t) filter (tupledTree => - !inExprModeButNot(mode, FUNmode) + !mode.inExprModeButNot(FUNmode) || tupledTree.symbol == null || checkValidAdaptation(tupledTree, args) ) @@ -3060,7 +3061,7 @@ trait Typers extends Modes with Adaptations with Tags { } if (mt.isErroneous) duplErrTree - else if (inPatternMode(mode)) { + else if (mode.inPatternMode) { // #2064 duplErrorTree(WrongNumberOfArgsError(tree, fun)) } else if (lencmp > 0) { @@ -3157,7 +3158,7 @@ trait Typers extends Modes with Adaptations with Tags { // precise(foo) : foo.type => foo.type val restpe = mt.resultType(args1 map (arg => gen.stableTypeFor(arg) getOrElse arg.tpe)) def ifPatternSkipFormals(tp: Type) = tp match { - case MethodType(_, rtp) if (inPatternMode(mode)) => rtp + case MethodType(_, rtp) if (mode.inPatternMode) => rtp case _ => tp } @@ -3181,7 +3182,7 @@ trait Typers extends Modes with Adaptations with Tags { doTypedApply(tree, fun, args, mode, pt) } else { def handlePolymorphicCall = { - assert(!inPatternMode(mode), modeString(mode)) // this case cannot arise for patterns + assert(!mode.inPatternMode, mode) // this case cannot arise for patterns val lenientTargs = protoTypeArgs(tparams, formals, mt.resultApprox, pt) val strictTargs = map2(lenientTargs, tparams)((targ, tparam) => if (targ == WildcardType) tparam.tpeHK else targ) @@ -3222,7 +3223,7 @@ trait Typers extends Modes with Adaptations with Tags { if (!tree.isErrorTyped) setError(tree) else tree // @H change to setError(treeCopy.Apply(tree, fun, args)) - case otpe if inPatternMode(mode) && unapplyMember(otpe).exists => + case otpe if mode.inPatternMode && unapplyMember(otpe).exists => doTypedUnapply(tree, fun0, fun, args, mode, pt) case _ => @@ -3230,7 +3231,7 @@ trait Typers extends Modes with Adaptations with Tags { } } - def doTypedUnapply(tree: Tree, fun0: Tree, fun: Tree, args: List[Tree], mode: Int, pt: Type): Tree = { + def doTypedUnapply(tree: Tree, fun0: Tree, fun: Tree, args: List[Tree], mode: Mode, pt: Type): Tree = { def duplErrTree = setError(treeCopy.Apply(tree, fun0, args)) def duplErrorTree(err: AbsTypeError) = { issue(err); duplErrTree } @@ -3354,7 +3355,7 @@ trait Typers extends Modes with Adaptations with Tags { * * @param annClass the expected annotation class */ - def typedAnnotation(ann: Tree, mode: Int = EXPRmode, selfsym: Symbol = NoSymbol, annClass: Symbol = AnnotationClass, requireJava: Boolean = false): AnnotationInfo = { + def typedAnnotation(ann: Tree, mode: Mode = EXPRmode, selfsym: Symbol = NoSymbol, annClass: Symbol = AnnotationClass, requireJava: Boolean = false): AnnotationInfo = { var hasError: Boolean = false val pending = ListBuffer[AbsTypeError]() @@ -3399,7 +3400,7 @@ trait Typers extends Modes with Adaptations with Tags { // use of Array.apply[T: ClassTag](xs: T*): Array[T] // and Array.apply(x: Int, xs: Int*): Array[Int] (and similar) case Apply(fun, args) => - val typedFun = typed(fun, forFunMode(mode), WildcardType) + val typedFun = typed(fun, mode.forFunMode, WildcardType) if (typedFun.symbol.owner == ArrayModule.moduleClass && typedFun.symbol.name == nme.apply) pt match { case TypeRef(_, ArrayClass, targ :: _) => @@ -3443,7 +3444,7 @@ trait Typers extends Modes with Adaptations with Tags { val res = if (fun.isErroneous) ErroneousAnnotation else { - val typedFun @ Select(New(tpt), _) = typed(fun, forFunMode(mode), WildcardType) + val typedFun @ Select(New(tpt), _) = typed(fun, mode.forFunMode, WildcardType) val annType = tpt.tpe if (typedFun.isErroneous) ErroneousAnnotation @@ -3733,7 +3734,7 @@ trait Typers extends Modes with Adaptations with Tags { if (!checkClassType(tpt) && noGen) tpt else atPos(tree.pos)(gen.mkClassOf(tpt.tpe)) - protected def typedExistentialTypeTree(tree: ExistentialTypeTree, mode: Int): Tree = { + protected def typedExistentialTypeTree(tree: ExistentialTypeTree, mode: Mode): Tree = { for (wc <- tree.whereClauses) if (wc.symbol == NoSymbol) { namer.enterSym(wc); wc.symbol setFlag EXISTENTIAL } else context.scope enter wc.symbol @@ -3748,7 +3749,7 @@ trait Typers extends Modes with Adaptations with Tags { } // lifted out of typed1 because it's needed in typedImplicit0 - protected def typedTypeApply(tree: Tree, mode: Int, fun: Tree, args: List[Tree]): Tree = fun.tpe match { + protected def typedTypeApply(tree: Tree, mode: Mode, fun: Tree, args: List[Tree]): Tree = fun.tpe match { case OverloadedType(pre, alts) => inferPolyAlternatives(fun, args map (_.tpe)) val tparams = fun.symbol.typeParams //@M TODO: fun.symbol.info.typeParams ? (as in typedAppliedTypeTree) @@ -3837,7 +3838,7 @@ trait Typers extends Modes with Adaptations with Tags { // else false } - def typedNamedApply(orig: Tree, fun: Tree, args: List[Tree], mode: Int, pt: Type): Tree = { + def typedNamedApply(orig: Tree, fun: Tree, args: List[Tree], mode: Mode, pt: Type): Tree = { def argToBinding(arg: Tree): Tree = arg match { case AssignOrNamedArg(Ident(name), rhs) => gen.mkTuple(List(CODE.LIT(name.toString), rhs)) case _ => gen.mkTuple(List(CODE.LIT(""), arg)) @@ -3922,10 +3923,10 @@ trait Typers extends Modes with Adaptations with Tags { println(s) } - def typed1(tree: Tree, mode: Int, pt: Type): Tree = { - def isPatternMode = inPatternMode(mode) - def inPatternConstructor = inAllModes(mode, PATTERNmode | FUNmode) - def isQualifierMode = (mode & QUALmode) != 0 + def typed1(tree: Tree, mode: Mode, pt: Type): Tree = { + def isPatternMode = mode.inPatternMode + def inPatternConstructor = mode.inAll(PATTERNmode | FUNmode) + def isQualifierMode = mode.inAll(QUALmode) // Lookup in the given class using the root mirror. def lookupInOwner(owner: Symbol, name: Name): Symbol = @@ -3947,7 +3948,7 @@ trait Typers extends Modes with Adaptations with Tags { val ann = atd.annot val arg1 = typed(atd.arg, mode, pt) /** mode for typing the annotation itself */ - val annotMode = mode & ~TYPEmode | EXPRmode + val annotMode = (mode &~ TYPEmode) | EXPRmode def resultingTypeTree(tpe: Type) = { // we need symbol-ful originals for reification @@ -4049,7 +4050,7 @@ trait Typers extends Modes with Adaptations with Tags { else context.owner.newValue(name, tree.pos) if (name != nme.WILDCARD) { - if ((mode & ALTmode) != 0) VariableInPatternAlternativeError(tree) + if (mode.inAll(ALTmode)) VariableInPatternAlternativeError(tree) namer.enterInScope(sym) } @@ -4276,7 +4277,7 @@ trait Typers extends Modes with Adaptations with Tags { UnderscoreEtaError(expr1) } - def tryTypedArgs(args: List[Tree], mode: Int): Option[List[Tree]] = { + def tryTypedArgs(args: List[Tree], mode: Mode): Option[List[Tree]] = { val c = context.makeSilent(false) c.retyping = true try { @@ -4356,7 +4357,7 @@ trait Typers extends Modes with Adaptations with Tags { val stableApplication = (fun.symbol ne null) && fun.symbol.isMethod && fun.symbol.isStable if (stableApplication && isPatternMode) { // treat stable function applications f() as expressions. - typed1(tree, mode & ~PATTERNmode | EXPRmode, pt) + typed1(tree, (mode &~ PATTERNmode) | EXPRmode, pt) } else { val funpt = if (isPatternMode) pt else WildcardType val appStart = if (Statistics.canEnable) Statistics.startTimer(failedApplyNanos) else null @@ -4379,9 +4380,9 @@ trait Typers extends Modes with Adaptations with Tags { reportError } } - silent(_.typed(fun, forFunMode(mode), funpt), - if ((mode & EXPRmode) != 0) false else context.ambiguousErrors, - if ((mode & EXPRmode) != 0) tree else context.tree) match { + silent(_.typed(fun, mode.forFunMode, funpt), + if (mode.inExprMode) false else context.ambiguousErrors, + if (mode.inExprMode) tree else context.tree) match { case SilentResultValue(fun1) => val fun2 = if (stableApplication) stabilizeFun(fun1, mode, pt) else fun1 if (Statistics.canEnable) Statistics.incCounter(typedApplyCount) @@ -4521,7 +4522,7 @@ trait Typers extends Modes with Adaptations with Tags { val owntype = ( if (!mix.isEmpty) findMixinSuper(clazz.tpe) - else if ((mode & SUPERCONSTRmode) != 0) clazz.info.firstParent + else if (mode.inAll(SUPERCONSTRmode)) clazz.info.firstParent else intersectionType(clazz.info.parents) ) treeCopy.Super(tree, qual1, mix) setType SuperType(clazz.thisType, owntype) @@ -4565,7 +4566,7 @@ trait Typers extends Modes with Adaptations with Tags { // symbol not found? --> try to convert implicitly to a type that does have the required // member. Added `| PATTERNmode` to allow enrichment in patterns (so we can add e.g., an // xml member to StringContext, which in turn has an unapply[Seq] method) - if (name != nme.CONSTRUCTOR && inExprModeOr(mode, PATTERNmode)) { + if (name != nme.CONSTRUCTOR && mode.inExprModeOr(PATTERNmode)) { val qual1 = adaptToMemberWithArgs(tree, qual, name, mode, true, true) if ((qual1 ne qual) && !qual1.isErrorTyped) return typed(treeCopy.Select(tree, qual1, name), mode, pt) @@ -4738,7 +4739,7 @@ trait Typers extends Modes with Adaptations with Tags { setError(tree) } // ignore current variable scope in patterns to enforce linearity - val startContext = if (inNoModes(mode, PATTERNmode | TYPEPATmode)) context else context.outer + val startContext = if (mode.inNone(PATTERNmode | TYPEPATmode)) context else context.outer val nameLookup = tree.symbol match { case NoSymbol => startContext.lookupSymbol(name, qualifies) case sym => LookupSucceeded(EmptyTree, sym) @@ -4774,8 +4775,8 @@ trait Typers extends Modes with Adaptations with Tags { def typedIdentOrWildcard(tree: Ident) = { val name = tree.name if (Statistics.canEnable) Statistics.incCounter(typedIdentCount) - if ((name == nme.WILDCARD && (mode & (PATTERNmode | FUNmode)) == PATTERNmode) || - (name == tpnme.WILDCARD && (mode & TYPEmode) != 0)) + if ((name == nme.WILDCARD && mode.inPatternNotFunMode) || + (name == tpnme.WILDCARD && mode.inAll(TYPEmode))) tree setType makeFullyDefined(pt) else typedIdent(tree, name) @@ -4898,7 +4899,7 @@ trait Typers extends Modes with Adaptations with Tags { } def typedStar(tree: Star) = { - if ((mode & STARmode) == 0 && !isPastTyper) + if (mode.inNone(STARmode) && !isPastTyper) StarPatternWithVarargParametersError(tree) treeCopy.Star(tree, typed(tree.elem, mode, pt)) setType makeFullyDefined(pt) } @@ -4958,7 +4959,7 @@ trait Typers extends Modes with Adaptations with Tags { } case Ident(tpnme.WILDCARD_STAR) => - val exprTyped = typed(expr, onlyStickyModes(mode), WildcardType) + val exprTyped = typed(expr, mode.onlySticky, WildcardType) def subArrayType(pt: Type) = if (isPrimitiveValueClass(pt.typeSymbol) || !isFullyDefined(pt)) arrayType(pt) else { @@ -4967,8 +4968,8 @@ trait Typers extends Modes with Adaptations with Tags { } val (exprAdapted, baseClass) = exprTyped.tpe.typeSymbol match { - case ArrayClass => (adapt(exprTyped, onlyStickyModes(mode), subArrayType(pt)), ArrayClass) - case _ => (adapt(exprTyped, onlyStickyModes(mode), seqType(pt)), SeqClass) + case ArrayClass => (adapt(exprTyped, mode.onlySticky, subArrayType(pt)), ArrayClass) + case _ => (adapt(exprTyped, mode.onlySticky, seqType(pt)), SeqClass) } exprAdapted.tpe.baseType(baseClass) match { case TypeRef(_, _, List(elemtp)) => @@ -4979,7 +4980,7 @@ trait Typers extends Modes with Adaptations with Tags { case _ => val tptTyped = typedType(tpt, mode) - val exprTyped = typed(expr, onlyStickyModes(mode), tptTyped.tpe.deconst) + val exprTyped = typed(expr, mode.onlySticky, tptTyped.tpe.deconst) val treeTyped = treeCopy.Typed(tree, exprTyped, tptTyped) if (isPatternMode) { @@ -5011,7 +5012,7 @@ trait Typers extends Modes with Adaptations with Tags { //val undets = context.undetparams // @M: fun is typed in TAPPmode because it is being applied to its actual type parameters - val fun1 = typed(fun, forFunMode(mode) | TAPPmode, WildcardType) + val fun1 = typed(fun, mode.forFunMode | TAPPmode, WildcardType) val tparams = fun1.symbol.typeParams //@M TODO: val undets_fun = context.undetparams ? @@ -5164,7 +5165,7 @@ trait Typers extends Modes with Adaptations with Tags { } } - def typed(tree: Tree, mode: Int, pt: Type): Tree = { + def typed(tree: Tree, mode: Mode, pt: Type): Tree = { lastTreeToTyper = tree indentTyping() @@ -5184,7 +5185,7 @@ trait Typers extends Modes with Adaptations with Tags { "undetparams" -> context.undetparams, "implicitsEnabled" -> context.implicitsEnabled, "enrichmentEnabled" -> context.enrichmentEnabled, - "mode" -> modeString(mode), + "mode" -> mode, "silent" -> context.bufferErrors, "context.owner" -> context.owner ) @@ -5247,7 +5248,7 @@ trait Typers extends Modes with Adaptations with Tags { ret } - def typedPos(pos: Position, mode: Int, pt: Type)(tree: Tree) = typed(atPos(pos)(tree), mode, pt) + def typedPos(pos: Position, mode: Mode, pt: Type)(tree: Tree) = typed(atPos(pos)(tree), mode, pt) def typedPos(pos: Position)(tree: Tree) = typed(atPos(pos)(tree)) // TODO: see if this formulation would impose any penalty, since // it makes for a lot less casting. @@ -5261,13 +5262,13 @@ trait Typers extends Modes with Adaptations with Tags { /** Types qualifier `tree` of a select node. * E.g. is tree occurs in a context like `tree.m`. */ - def typedQualifier(tree: Tree, mode: Int, pt: Type): Tree = + def typedQualifier(tree: Tree, mode: Mode, pt: Type): Tree = typed(tree, EXPRmode | QUALmode | POLYmode | mode & TYPEPATmode, pt) // TR: don't set BYVALmode, since qualifier might end up as by-name param to an implicit /** Types qualifier `tree` of a select node. * E.g. is tree occurs in a context like `tree.m`. */ - def typedQualifier(tree: Tree, mode: Int): Tree = + def typedQualifier(tree: Tree, mode: Mode): Tree = typedQualifier(tree, mode, WildcardType) def typedQualifier(tree: Tree): Tree = typedQualifier(tree, NOmode, WildcardType) @@ -5300,23 +5301,23 @@ trait Typers extends Modes with Adaptations with Tags { } /** Types a (fully parameterized) type tree */ - def typedType(tree: Tree, mode: Int): Tree = - typed(tree, forTypeMode(mode), WildcardType) + def typedType(tree: Tree, mode: Mode): Tree = + typed(tree, mode.forTypeMode, WildcardType) /** Types a (fully parameterized) type tree */ def typedType(tree: Tree): Tree = typedType(tree, NOmode) /** Types a higher-kinded type tree -- pt denotes the expected kind*/ - def typedHigherKindedType(tree: Tree, mode: Int, pt: Type): Tree = + def typedHigherKindedType(tree: Tree, mode: Mode, pt: Type): Tree = if (pt.typeParams.isEmpty) typedType(tree, mode) // kind is known and it's * else typed(tree, HKmode, pt) - def typedHigherKindedType(tree: Tree, mode: Int): Tree = + def typedHigherKindedType(tree: Tree, mode: Mode): Tree = typed(tree, HKmode, WildcardType) /** Types a type constructor tree used in a new or supertype */ - def typedTypeConstructor(tree: Tree, mode: Int): Tree = { - val result = typed(tree, forTypeMode(mode) | FUNmode, WildcardType) + def typedTypeConstructor(tree: Tree, mode: Mode): Tree = { + val result = typed(tree, mode.forTypeMode | FUNmode, WildcardType) // get rid of type aliases for the following check (#1241) result.tpe.dealias match { @@ -5373,7 +5374,7 @@ trait Typers extends Modes with Adaptations with Tags { case None => op } - def transformedOrTyped(tree: Tree, mode: Int, pt: Type): Tree = transformed.get(tree) match { + def transformedOrTyped(tree: Tree, mode: Mode, pt: Type): Tree = transformed.get(tree) match { case Some(tree1) => transformed -= tree; tree1 case None => typed(tree, mode, pt) } diff --git a/src/compiler/scala/tools/reflect/ToolBoxFactory.scala b/src/compiler/scala/tools/reflect/ToolBoxFactory.scala index 834f5436dc..7b065e7cf6 100644 --- a/src/compiler/scala/tools/reflect/ToolBoxFactory.scala +++ b/src/compiler/scala/tools/reflect/ToolBoxFactory.scala @@ -1,6 +1,7 @@ package scala.tools package reflect +import scala.tools.nsc.EXPRmode import scala.tools.nsc.reporters._ import scala.tools.nsc.CompilerCommand import scala.tools.nsc.io.VirtualDirectory @@ -163,7 +164,7 @@ abstract class ToolBoxFactory[U <: JavaUniverse](val u: U) { factorySelf => transformDuringTyper(expr, withImplicitViewsDisabled = withImplicitViewsDisabled, withMacrosDisabled = withMacrosDisabled)( (currentTyper, expr) => { trace("typing (implicit views = %s, macros = %s): ".format(!withImplicitViewsDisabled, !withMacrosDisabled))(showAttributed(expr, true, true, settings.Yshowsymkinds.value)) - currentTyper.silent(_.typed(expr, analyzer.EXPRmode, pt)) match { + currentTyper.silent(_.typed(expr, EXPRmode, pt)) match { case analyzer.SilentResultValue(result) => trace("success: ")(showAttributed(result, true, true, settings.Yshowsymkinds.value)) result diff --git a/src/continuations/plugin/scala/tools/selectivecps/CPSAnnotationChecker.scala b/src/continuations/plugin/scala/tools/selectivecps/CPSAnnotationChecker.scala index cf5b1fa2c4..600b51f376 100644 --- a/src/continuations/plugin/scala/tools/selectivecps/CPSAnnotationChecker.scala +++ b/src/continuations/plugin/scala/tools/selectivecps/CPSAnnotationChecker.scala @@ -2,11 +2,10 @@ package scala.tools.selectivecps -import scala.tools.nsc.Global -import scala.tools.nsc.typechecker.Modes +import scala.tools.nsc.{ Global, Mode } import scala.tools.nsc.MissingRequirementError -abstract class CPSAnnotationChecker extends CPSUtils with Modes { +abstract class CPSAnnotationChecker extends CPSUtils { val global: Global import global._ import definitions._ @@ -117,14 +116,14 @@ abstract class CPSAnnotationChecker extends CPSUtils with Modes { bounds } - override def canAdaptAnnotations(tree: Tree, mode: Int, pt: Type): Boolean = { + override def canAdaptAnnotations(tree: Tree, mode: Mode, pt: Type): Boolean = { if (!cpsEnabled) return false - vprintln("can adapt annotations? " + tree + " / " + tree.tpe + " / " + Integer.toHexString(mode) + " / " + pt) + vprintln("can adapt annotations? " + tree + " / " + tree.tpe + " / " + mode + " / " + pt) val annots1 = cpsParamAnnotation(tree.tpe) val annots2 = cpsParamAnnotation(pt) - if ((mode & global.analyzer.PATTERNmode) != 0) { + if (mode.inPatternMode) { //println("can adapt pattern annotations? " + tree + " / " + tree.tpe + " / " + Integer.toHexString(mode) + " / " + pt) if (!annots1.isEmpty) { return true @@ -133,7 +132,7 @@ abstract class CPSAnnotationChecker extends CPSUtils with Modes { /* // not precise enough -- still relying on addAnnotations to remove things from ValDef symbols - if ((mode & global.analyzer.TYPEmode) != 0 && (mode & global.analyzer.BYVALmode) != 0) { + if ((mode & TYPEmode) != 0 && (mode & BYVALmode) != 0) { if (!annots1.isEmpty) { return true } @@ -142,16 +141,16 @@ abstract class CPSAnnotationChecker extends CPSUtils with Modes { /* this interferes with overloading resolution - if ((mode & global.analyzer.BYVALmode) != 0 && tree.tpe <:< pt) { + if ((mode & BYVALmode) != 0 && tree.tpe <:< pt) { vprintln("already compatible, can't adapt further") return false } */ - if ((mode & global.analyzer.EXPRmode) != 0) { + if (mode.inExprMode) { if ((annots1 corresponds annots2)(_.atp <:< _.atp)) { vprintln("already same, can't adapt further") false - } else if (annots1.isEmpty && !annots2.isEmpty && ((mode & global.analyzer.BYVALmode) == 0)) { + } else if (annots1.isEmpty && !annots2.isEmpty && !mode.inByValMode) { //println("can adapt annotations? " + tree + " / " + tree.tpe + " / " + Integer.toHexString(mode) + " / " + pt) if (!hasPlusMarker(tree.tpe)) { // val base = tree.tpe <:< removeAllCPSAnnotations(pt) @@ -164,10 +163,10 @@ abstract class CPSAnnotationChecker extends CPSUtils with Modes { true //} } else false - } else if (!hasPlusMarker(tree.tpe) && annots1.isEmpty && !annots2.isEmpty && ((mode & global.analyzer.RETmode) != 0)) { + } else if (!hasPlusMarker(tree.tpe) && annots1.isEmpty && !annots2.isEmpty && mode.inRetMode) { vprintln("checking enclosing method's result type without annotations") tree.tpe <:< pt.withoutAnnotations - } else if (!hasMinusMarker(tree.tpe) && !annots1.isEmpty && ((mode & global.analyzer.BYVALmode) != 0)) { + } else if (!hasMinusMarker(tree.tpe) && !annots1.isEmpty && mode.inByValMode) { val optCpsTypes: Option[(Type, Type)] = cpsParamTypes(tree.tpe) val optExpectedCpsTypes: Option[(Type, Type)] = cpsParamTypes(pt) if (optCpsTypes.isEmpty || optExpectedCpsTypes.isEmpty) { @@ -183,21 +182,21 @@ abstract class CPSAnnotationChecker extends CPSUtils with Modes { } else false } - override def adaptAnnotations(tree: Tree, mode: Int, pt: Type): Tree = { + override def adaptAnnotations(tree: Tree, mode: Mode, pt: Type): Tree = { if (!cpsEnabled) return tree - vprintln("adapt annotations " + tree + " / " + tree.tpe + " / " + modeString(mode) + " / " + pt) + vprintln("adapt annotations " + tree + " / " + tree.tpe + " / " + mode + " / " + pt) - val patMode = (mode & global.analyzer.PATTERNmode) != 0 - val exprMode = (mode & global.analyzer.EXPRmode) != 0 - val byValMode = (mode & global.analyzer.BYVALmode) != 0 - val retMode = (mode & global.analyzer.RETmode) != 0 + val patMode = mode.inPatternMode + val exprMode = mode.inExprMode + val byValMode = mode.inByValMode + val retMode = mode.inRetMode val annotsTree = cpsParamAnnotation(tree.tpe) val annotsExpected = cpsParamAnnotation(pt) // not sure I rephrased this comment correctly: - // replacing `patMode` in the condition below by `patMode || ((mode & global.analyzer.TYPEmode) != 0 && (mode & global.analyzer.BYVALmode))` + // replacing `patMode` in the condition below by `patMode || ((mode & TYPEmode) != 0 && (mode & BYVALmode))` // doesn't work correctly -- still relying on addAnnotations to remove things from ValDef symbols if (patMode && !annotsTree.isEmpty) tree modifyType removeAllCPSAnnotations else if (exprMode && !byValMode && !hasPlusMarker(tree.tpe) && annotsTree.isEmpty && annotsExpected.nonEmpty) { // shiftUnit diff --git a/src/reflect/scala/reflect/internal/AnnotationCheckers.scala b/src/reflect/scala/reflect/internal/AnnotationCheckers.scala index 5318d3e540..13346d9151 100644 --- a/src/reflect/scala/reflect/internal/AnnotationCheckers.scala +++ b/src/reflect/scala/reflect/internal/AnnotationCheckers.scala @@ -39,14 +39,14 @@ trait AnnotationCheckers { /** Decide whether this annotation checker can adapt a tree * that has an annotated type to the given type tp, taking * into account the given mode (see method adapt in trait Typers).*/ - def canAdaptAnnotations(tree: Tree, mode: Int, pt: Type): Boolean = false + def canAdaptAnnotations(tree: Tree, mode: Mode, pt: Type): Boolean = false /** Adapt a tree that has an annotated type to the given type tp, * taking into account the given mode (see method adapt in trait Typers). * An implementation cannot rely on canAdaptAnnotations being called * before. If the implementing class cannot do the adaptiong, it * should return the tree unchanged.*/ - def adaptAnnotations(tree: Tree, mode: Int, pt: Type): Tree = tree + def adaptAnnotations(tree: Tree, mode: Mode, pt: Type): Tree = tree /** Adapt the type of a return expression. The decision of an annotation checker * whether the type should be adapted is based on the type of the expression @@ -113,7 +113,7 @@ trait AnnotationCheckers { /** Find out whether any annotation checker can adapt a tree * to a given type. Called by Typers.adapt. */ - def canAdaptAnnotations(tree: Tree, mode: Int, pt: Type): Boolean = { + def canAdaptAnnotations(tree: Tree, mode: Mode, pt: Type): Boolean = { annotationCheckers.exists(_.canAdaptAnnotations(tree, mode, pt)) } @@ -121,7 +121,7 @@ trait AnnotationCheckers { * to a given type (called by Typers.adapt). Annotation checkers * that cannot do the adaption should pass the tree through * unchanged. */ - def adaptAnnotations(tree: Tree, mode: Int, pt: Type): Tree = { + def adaptAnnotations(tree: Tree, mode: Mode, pt: Type): Tree = { annotationCheckers.foldLeft(tree)((tree, checker) => checker.adaptAnnotations(tree, mode, pt)) } @@ -129,7 +129,7 @@ trait AnnotationCheckers { /** Let a registered annotation checker adapt the type of a return expression. * Annotation checkers that cannot do the adaptation should simply return * the `default` argument. - * + * * Note that the result is undefined if more than one annotation checker * returns an adapted type which is not a subtype of `default`. */ diff --git a/src/reflect/scala/reflect/internal/Mode.scala b/src/reflect/scala/reflect/internal/Mode.scala new file mode 100644 index 0000000000..850e3b5669 --- /dev/null +++ b/src/reflect/scala/reflect/internal/Mode.scala @@ -0,0 +1,149 @@ +/* NSC -- new Scala compiler + * Copyright 2005-2013 LAMP/EPFL + * @author Martin Odersky + */ + +package scala.reflect +package internal + +object Mode { + private implicit def liftIntBitsToMode(bits: Int): Mode = apply(bits) + def apply(bits: Int): Mode = new Mode(bits) + + /** NOmode, EXPRmode and PATTERNmode are mutually exclusive. + */ + final val NOmode: Mode = 0x000 + final val EXPRmode: Mode = 0x001 + final val PATTERNmode: Mode = 0x002 + + /** TYPEmode needs a comment. <-- XXX. + */ + final val TYPEmode: Mode = 0x004 + + /** SCCmode is orthogonal to above. When set we are + * in the this or super constructor call of a constructor. + */ + final val SCCmode: Mode = 0x008 + + /** FUNmode is orthogonal to above. + * When set we are looking for a method or constructor. + */ + final val FUNmode: Mode = 0x010 + + /** POLYmode is orthogonal to above. + * When set expression types can be polymorphic. + */ + final val POLYmode: Mode = 0x020 + + /** QUALmode is orthogonal to above. When set + * expressions may be packages and Java statics modules. + */ + final val QUALmode: Mode = 0x040 + + /** TAPPmode is set for the function/type constructor + * part of a type application. When set we do not decompose PolyTypes. + */ + final val TAPPmode: Mode = 0x080 + + /** SUPERCONSTRmode is set for the super + * in a superclass constructor call super.. + */ + final val SUPERCONSTRmode: Mode = 0x100 + + /** SNDTRYmode indicates that an application is typed for the 2nd time. + * In that case functions may no longer be coerced with implicit views. + */ + final val SNDTRYmode: Mode = 0x200 + + /** LHSmode is set for the left-hand side of an assignment. + */ + final val LHSmode: Mode = 0x400 + + /** STARmode is set when star patterns are allowed. + * (This was formerly called REGPATmode.) + */ + final val STARmode: Mode = 0x1000 + + /** ALTmode is set when we are under a pattern alternative. + */ + final val ALTmode: Mode = 0x2000 + + /** HKmode is set when we are typing a higher-kinded type. + * adapt should then check kind-arity based on the prototypical type's + * kind arity. Type arguments should not be inferred. + */ + final val HKmode: Mode = 0x4000 // @M: could also use POLYmode | TAPPmode + + /** BYVALmode is set when we are typing an expression + * that occurs in a by-value position. An expression e1 is in by-value + * position within expression e2 iff it will be reduced to a value at that + * position during the evaluation of e2. Examples are by-value function + * arguments or the conditional of an if-then-else clause. + * This mode has been added to support continuations. + */ + final val BYVALmode: Mode = 0x8000 + + /** TYPEPATmode is set when we are typing a type in a pattern. + */ + final val TYPEPATmode: Mode = 0x10000 + + /** RETmode is set when we are typing a return expression. + */ + final val RETmode: Mode = 0x20000 + + final private val StickyModes: Mode = EXPRmode | PATTERNmode | TYPEmode | ALTmode + + /** Translates a mask of mode flags into something readable. + */ + private val modeNameMap = Map[Int, String]( + (1 << 0) -> "EXPRmode", + (1 << 1) -> "PATTERNmode", + (1 << 2) -> "TYPEmode", + (1 << 3) -> "SCCmode", + (1 << 4) -> "FUNmode", + (1 << 5) -> "POLYmode", + (1 << 6) -> "QUALmode", + (1 << 7) -> "TAPPmode", + (1 << 8) -> "SUPERCONSTRmode", + (1 << 9) -> "SNDTRYmode", + (1 << 10) -> "LHSmode", + (1 << 11) -> "", + (1 << 12) -> "STARmode", + (1 << 13) -> "ALTmode", + (1 << 14) -> "HKmode", + (1 << 15) -> "BYVALmode", + (1 << 16) -> "TYPEPATmode" + ).map({ case (k, v) => Mode(k) -> v }) +} +import Mode._ + +final class Mode private (val bits: Int) extends AnyVal { + def &(other: Mode): Mode = new Mode(bits & other.bits) + def |(other: Mode): Mode = new Mode(bits | other.bits) + def &~(other: Mode): Mode = new Mode(bits & ~(other.bits)) + + def onlySticky = this & Mode.StickyModes + def forFunMode = this & (Mode.StickyModes | SCCmode) | FUNmode | POLYmode | BYVALmode + def forTypeMode = + if (inAny(PATTERNmode | TYPEPATmode)) TYPEmode | TYPEPATmode + else TYPEmode + + def inAll(required: Mode) = (this & required) == required + def inAny(required: Mode) = (this & required) !=NOmode + def inNone(prohibited: Mode) = (this & prohibited) == NOmode + def inHKMode = inAll(HKmode) + def inFunMode = inAll(FUNmode) + def inPolyMode = inAll(POLYmode) + def inPatternMode = inAll(PATTERNmode) + def inExprMode = inAll(EXPRmode) + def inByValMode = inAll(BYVALmode) + def inRetMode = inAll(RETmode) + + def inPatternNotFunMode = inPatternMode && !inFunMode + def inExprModeOr(others: Mode) = inAny(EXPRmode | others) + def inExprModeButNot(prohibited: Mode) = inAll(EXPRmode) && inNone(prohibited) + + override def toString = + if (bits == 0) "NOmode" + else (modeNameMap filterKeys inAll).values.toList.sorted mkString " " +} diff --git a/test/files/pos/CustomGlobal.scala b/test/files/pos/CustomGlobal.scala index 30bf227950..a5668bd7c0 100644 --- a/test/files/pos/CustomGlobal.scala +++ b/test/files/pos/CustomGlobal.scala @@ -22,7 +22,7 @@ class CustomGlobal(currentSettings: Settings, reporter: Reporter) extends Global override def newTyper(context: Context): Typer = new CustomTyper(context) class CustomTyper(context : Context) extends Typer(context) { - override def typed(tree: Tree, mode: Int, pt: Type): Tree = { + override def typed(tree: Tree, mode: Mode, pt: Type): Tree = { if (tree.summaryString contains "Bippy") println("I'm typing a Bippy! It's a " + tree.shortClass + ".") -- cgit v1.2.3 From 3bb8745ca7dcda8c81103b3965b83973b4a72214 Mon Sep 17 00:00:00 2001 From: Som Snytt Date: Thu, 10 Jan 2013 13:33:45 -0800 Subject: Fixes and features for javap (fixing SI-6894) Output filtering is refactored for javap6. That means javap6 also supports -raw. Handling of # is: Foo#foo filter on foo, Foo# filter on apply, -fun Foo#foo for anonfuns of foo, -fun Foo# anonfuns filtering on apply. One is loath to add command options, so it's not possible to ask for "only apply methods in anonfuns pertaining to a method." Hypothetical syntax to say "show me the apply only": -fun Foo#foo(), for future reference. --- src/compiler/scala/tools/util/Javap.scala | 165 +++++++++++++-------- test/files/run/repl-javap-outdir-funs/foo_1.scala | 6 + .../run/repl-javap-outdir-funs/run-repl_7.scala | 12 ++ 3 files changed, 118 insertions(+), 65 deletions(-) create mode 100644 test/files/run/repl-javap-outdir-funs/foo_1.scala create mode 100644 test/files/run/repl-javap-outdir-funs/run-repl_7.scala (limited to 'src') diff --git a/src/compiler/scala/tools/util/Javap.scala b/src/compiler/scala/tools/util/Javap.scala index b39d5a5d89..cbfd8fec51 100644 --- a/src/compiler/scala/tools/util/Javap.scala +++ b/src/compiler/scala/tools/util/Javap.scala @@ -61,11 +61,11 @@ class JavapClass( def apply(args: Seq[String]): List[JpResult] = { val (options, claases) = args partition (s => (s startsWith "-") && s.length > 1) val (flags, upgraded) = upgrade(options) - if (flags.help || claases.isEmpty) List(JpResult(JavapTool.helper(printWriter))) - else { - val targets = if (flags.fun) FunFinder(loader, intp).funs(claases) else claases - tool(flags.raw, upgraded)(targets map (claas => claas -> bytesFor(claas, flags.app))) - } + import flags.{ app, fun, help, raw } + val targets = if (fun && !help) FunFinder(loader, intp).funs(claases) else claases + if (help || claases.isEmpty) List(JpResult(JavapTool.helper(printWriter))) + else if (targets.isEmpty) List(JpResult("No anonfuns found.")) + else tool(raw, upgraded)(targets map (claas => claas -> bytesFor(claas, app))) } /** Cull our tool options. */ @@ -74,10 +74,14 @@ class JavapClass( case (t,s) => (t, JavapTool.DefaultOptions) } - /** Find bytes. Handle "-", "-app", "Foo#bar" (by ignoring member). */ + /** Find bytes. Handle "-", "-app", "Foo#bar" (by ignoring member), "#bar" (by taking "bar"). */ private def bytesFor(path: String, app: Boolean) = Try { def last = intp.get.mostRecentVar // fail if no intp - def req = if (path == "-") last else path.splitHashMember._1 + def req = if (path == "-") last else { + val s = path.splitHashMember + if (s._1.nonEmpty) s._1 + else s._2 getOrElse "#" + } def asAppBody(s: String) = { val (cls, fix) = s.splitSuffix s"${cls}$$delayedInit$$body${fix}" @@ -146,16 +150,70 @@ class JavapClass( load(q) } + /** Base class for javap tool adapters for java 6 and 7. */ abstract class JavapTool { type ByteAry = Array[Byte] type Input = Pair[String, Try[ByteAry]] + + /** Run the tool. */ def apply(raw: Boolean, options: Seq[String])(inputs: Seq[Input]): List[JpResult] + // Since the tool is loaded by reflection, check for catastrophic failure. protected def failed: Boolean implicit protected class Failer[A](a: =>A) { def orFailed[B >: A](b: =>B) = if (failed) b else a } protected def noToolError = new JpError(s"No javap tool available: ${getClass.getName} failed to initialize.") + + // output filtering support + val writer = new CharArrayWriter + def written = { + writer.flush() + val w = writer.toString + writer.reset() + w + } + + /** Create a Showable with output massage. + * @param raw show ugly repl names + * @param target attempt to filter output to show region of interest + * @param preamble other messages to output + */ + def showWithPreamble(raw: Boolean, target: String, preamble: String = ""): Showable = new Showable { + // ReplStrippingWriter clips and scrubs on write(String) + // circumvent it by write(mw, 0, mw.length) or wrap it in withoutUnwrapping + def show() = + if (raw && intp.isDefined) intp.get withoutUnwrapping { writeLines() } + else writeLines() + private def writeLines() { + // take Foo# as Foo#apply for purposes of filtering. Useful for -fun Foo#; + // if apply is added here, it's for other than -fun: javap Foo#, perhaps m#? + val filterOn = target.splitHashMember._2 map { s => if (s.isEmpty) "apply" else s } + var filtering = false // true if in region matching filter + // true to output + def checkFilter(line: String) = if (filterOn.isEmpty) true else { + // cheap heuristic, todo maybe parse for the java sig. + // method sigs end in paren semi + def isAnyMethod = line.endsWith(");") + def isOurMethod = { + val lparen = line.lastIndexOf('(') + val blank = line.lastIndexOf(' ', lparen) + (blank >= 0 && line.substring(blank+1, lparen) == filterOn.get) + } + filtering = if (filtering) { + // next blank line terminates section + // for -public, next line is next method, more or less + line.trim.nonEmpty && !isAnyMethod + } else { + isAnyMethod && isOurMethod + } + filtering + } + for (line <- Source.fromString(preamble + written).getLines; if checkFilter(line)) + printWriter write line+lineSeparator + printWriter.flush() + } + } } class JavapTool6 extends JavapTool { @@ -165,10 +223,13 @@ class JavapClass( override protected def failed = (EnvClass eq null) || (PrinterClass eq null) val PrinterCtr = PrinterClass.getConstructor(classOf[InputStream], classOf[PrintWriter], EnvClass) orFailed null + val printWrapper = new PrintWriter(writer) def newPrinter(in: InputStream, env: FakeEnvironment): FakePrinter = - PrinterCtr.newInstance(in, printWriter, env) orFailed null - def showable(fp: FakePrinter) = new Showable { - def show() = fp.asInstanceOf[{ def print(): Unit }].print() + PrinterCtr.newInstance(in, printWrapper, env) orFailed null + def showable(raw: Boolean, target: String, fp: FakePrinter): Showable = { + fp.asInstanceOf[{ def print(): Unit }].print() // run tool and flush to buffer + printWrapper.flush() // just in case + showWithPreamble(raw, target) } lazy val parser = new JpOptions @@ -187,8 +248,8 @@ class JavapClass( override def apply(raw: Boolean, options: Seq[String])(inputs: Seq[Input]): List[JpResult] = (inputs map { - case (_, Success(ba)) => JpResult(showable(newPrinter(new ByteArrayInputStream(ba), newEnv(options)))) - case (_, Failure(e)) => JpResult(e.toString) + case (claas, Success(ba)) => JpResult(showable(raw, claas, newPrinter(new ByteArrayInputStream(ba), newEnv(options)))) + case (_, Failure(e)) => JpResult(e.toString) }).toList orFailed List(noToolError) } @@ -284,50 +345,11 @@ class JavapClass( case _ => false } } - val writer = new CharArrayWriter def fileManager(inputs: Seq[Input]) = new JavapFileManager(inputs)() - def showable(raw: Boolean, target: String): Showable = { - val written = { - writer.flush() - val w = writer.toString - writer.reset() - w - } - val msgs = reporter.reportable(raw) - new Showable { - val mw = msgs + written - // ReplStrippingWriter clips and scrubs on write(String) - // circumvent it by write(mw, 0, mw.length) or wrap it in withoutUnwrapping - def show() = - if (raw && intp.isDefined) intp.get withoutUnwrapping { writeLines() } - else writeLines() - private def writeLines() { - // take Foo# as Foo#apply for purposes of filtering. Useful for -fun Foo# - val filterOn = target.splitHashMember._2 map { s => if (s.isEmpty) "apply" else s } - var filtering = false // true if in region matching filter - // true to output - def checkFilter(line: String) = if (filterOn.isEmpty) true else { - def isOurMethod = { - val lparen = line.lastIndexOf('(') - val blank = line.lastIndexOf(' ', lparen) - (blank >= 0 && line.substring(blank+1, lparen) == filterOn.get) - } - filtering = if (filtering) { - // next blank line terminates section - line.trim.nonEmpty - } else { - // cheap heuristic, todo maybe parse for the java sig. - // method sigs end in paren semi - line.endsWith(");") && isOurMethod - } - filtering - } - for (line <- Source.fromString(mw).getLines; if checkFilter(line)) - printWriter write line+lineSeparator - printWriter.flush() - } - } - } + + // show tool messages and tool output, with output massage + def showable(raw: Boolean, target: String): Showable = showWithPreamble(raw, target, reporter.reportable(raw)) + // eventually, use the tool interface def task(options: Seq[String], claases: Seq[String], inputs: Seq[Input]): Task = { //ServiceLoader.load(classOf[javax.tools.DisassemblerTool]). @@ -529,7 +551,11 @@ object JavapClass { /* only the file location from which the given class is loaded */ def locate(k: String): Option[Path] = { Try { - (cl loadClass k).getProtectionDomain.getCodeSource.getLocation + val claas = try cl loadClass k catch { + case _: NoClassDefFoundError => null // let it snow + } + // cf ScalaClassLoader.originOfClass + claas.getProtectionDomain.getCodeSource.getLocation } match { case Success(null) => None case Success(loc) if loc.isFile => Some(Path(new JFile(loc.toURI))) @@ -588,28 +614,37 @@ object JavapClass { (new Jar(f) map maybe).flatten } def loadable(name: String) = loader resourceable name - // translated class, optional member, whether it is repl output - def translate(s: String): (String, Option[String], Boolean) = { + // translated class, optional member, opt member to filter on, whether it is repl output + def translate(s: String): (String, Option[String], Option[String], Boolean) = { val (k0, m0) = s.splitHashMember + val k = k0.asClassName val member = m0 filter (_.nonEmpty) // take Foo# as no member, not "" + val filter = m0 flatMap { case "" => Some("apply") case _ => None } // take Foo# as filter on apply // class is either something replish or available to loader // $line.$read$$etc$Foo#member - ((intp flatMap (_ translatePath k0) filter (loadable) map ((_, member, true))) - // s = "f" and $line.$read$$etc$#f is what we're after, ignoring any #member - orElse (intp flatMap (_ translateEnclosingClass k0) map ((_, Some(s), true))) - getOrElse (k0, member, false)) + ((intp flatMap (_ translatePath k) filter (loadable) map ((_, member, filter, true))) + // s = "f" and $line.$read$$etc$#f is what we're after, + // ignoring any #member (except take # as filter on #apply) + orElse (intp flatMap (_ translateEnclosingClass k) map ((_, Some(k), filter, true))) + getOrElse (k, member, filter, false)) } /** Find the classnames of anonfuns associated with k, * where k may be an available class or a symbol in scope. */ def funsOf(k0: String): Seq[String] = { // class is either something replish or available to loader - val (k, member, isReplish) = translate(k0) + val (k, member, filter, isReplish) = translate(k0) val splat = k split "\\." val name = splat.last val prefix = if (splat.length > 1) splat.init mkString "/" else "" val pkg = if (splat.length > 1) splat.init mkString "." else "" - def packaged(s: String) = if (pkg.isEmpty) s else s"$pkg.$s" + // reconstitute an anonfun with a package + // if filtered, add the hash back, e.g. pkg.Foo#bar, pkg.Foo$anon$1#apply + def packaged(s: String) = { + val p = if (pkg.isEmpty) s else s"$pkg.$s" + val pm = filter map (p + "#" + _) + pm getOrElse p + } // is this translated path in (usually virtual) repl outdir? or loadable from filesystem? val fs = if (isReplish) { def outed(d: AbstractFile, p: Seq[String]): Option[AbstractFile] = { @@ -628,7 +663,7 @@ object JavapClass { } fs match { case Some(xs) => xs.to[Seq] // maybe empty - case None => Seq(k0) // just bail on fail + case None => Seq() // nothing found, e.g., junk input } } def funs(ks: Seq[String]) = ks flatMap funsOf _ diff --git a/test/files/run/repl-javap-outdir-funs/foo_1.scala b/test/files/run/repl-javap-outdir-funs/foo_1.scala new file mode 100644 index 0000000000..9b98e94733 --- /dev/null +++ b/test/files/run/repl-javap-outdir-funs/foo_1.scala @@ -0,0 +1,6 @@ + +package disktest + +class Foo { + def m(vs: List[Int]) = vs map (_ + 1) +} diff --git a/test/files/run/repl-javap-outdir-funs/run-repl_7.scala b/test/files/run/repl-javap-outdir-funs/run-repl_7.scala new file mode 100644 index 0000000000..dfe3dae270 --- /dev/null +++ b/test/files/run/repl-javap-outdir-funs/run-repl_7.scala @@ -0,0 +1,12 @@ +import scala.tools.partest.JavapTest + +object Test extends JavapTest { + def code = """ + |:javap -fun disktest/Foo.class + """.stripMargin + + override def yah(res: Seq[String]) = { + def filtered = res filter (_ contains "public final class disktest.Foo") + 1 == filtered.size + } +} -- cgit v1.2.3 From 38958f458cec09dacece690b2376d96fc7758972 Mon Sep 17 00:00:00 2001 From: Adriaan Moors Date: Thu, 10 Jan 2013 14:43:00 -0800 Subject: SI-6955 switch emission no longer foiled by type alias dealiasWiden the type of the scrutinee before checking it's switchable now with tests! (using IcodeTest since javap is not available everywhere) rebase of #1879 --- .../tools/nsc/typechecker/PatternMatching.scala | 2 +- test/files/run/t6955.check | 1 + test/files/run/t6955.scala | 28 ++++++++++++++++++++++ 3 files changed, 30 insertions(+), 1 deletion(-) create mode 100644 test/files/run/t6955.check create mode 100644 test/files/run/t6955.scala (limited to 'src') diff --git a/src/compiler/scala/tools/nsc/typechecker/PatternMatching.scala b/src/compiler/scala/tools/nsc/typechecker/PatternMatching.scala index 49eca828a9..5f70da6a63 100644 --- a/src/compiler/scala/tools/nsc/typechecker/PatternMatching.scala +++ b/src/compiler/scala/tools/nsc/typechecker/PatternMatching.scala @@ -3520,7 +3520,7 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL override def emitSwitch(scrut: Tree, scrutSym: Symbol, cases: List[List[TreeMaker]], pt: Type, matchFailGenOverride: Option[Tree => Tree], unchecked: Boolean): Option[Tree] = { import CODE._ val regularSwitchMaker = new RegularSwitchMaker(scrutSym, matchFailGenOverride, unchecked) // TODO: if patterns allow switch but the type of the scrutinee doesn't, cast (type-test) the scrutinee to the corresponding switchable type and switch on the result - if (regularSwitchMaker.switchableTpe(scrutSym.tpe)) { + if (regularSwitchMaker.switchableTpe(scrutSym.tpe.dealiasWiden)) { val caseDefsWithDefault = regularSwitchMaker(cases map {c => (scrutSym, c)}, pt) if (caseDefsWithDefault isEmpty) None // not worth emitting a switch. else { diff --git a/test/files/run/t6955.check b/test/files/run/t6955.check new file mode 100644 index 0000000000..0cfbf08886 --- /dev/null +++ b/test/files/run/t6955.check @@ -0,0 +1 @@ +2 diff --git a/test/files/run/t6955.scala b/test/files/run/t6955.scala new file mode 100644 index 0000000000..980aa420cc --- /dev/null +++ b/test/files/run/t6955.scala @@ -0,0 +1,28 @@ +import scala.tools.partest.IcodeTest + +// this class should compile to code that uses switches (twice) +class Switches { + type Tag = Byte + + def switchBad(i: Tag): Int = i match { // notice type of i is Tag = Byte + case 1 => 1 + case 2 => 2 + case 3 => 3 + case _ => 0 + } + + // this worked before, should keep working + def switchOkay(i: Byte): Int = i match { + case 1 => 1 + case 2 => 2 + case 3 => 3 + case _ => 0 + } +} + +object Test extends IcodeTest { + // ensure we get two switches out of this -- ignore the rest of the output for robustness + // exclude the constant we emit for the "SWITCH ..." string below (we get the icode for all the code you see in this file) + override def show() = println(collectIcode("").filter(x => x.indexOf("SWITCH ...") >= 0 && x.indexOf("CONSTANT(") == -1).size) +} + -- cgit v1.2.3 From b61a64ddb5700b5d77295df43af0e3feb3c46ac6 Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Sat, 12 Jan 2013 00:54:44 +0100 Subject: SI-6964 Remove build managers, both simple and refined. Deprecated in 2.10.0, out to pasture in 2.11.0. Users are advised to migrate to: https://github.com/typesafehub/zinc http://www.scala-sbt.org/ --- build.xml | 1 - project/Partest.scala | 3 +- src/compiler/scala/tools/nsc/Global.scala | 19 +- src/compiler/scala/tools/nsc/Main.scala | 18 -- .../scala/tools/nsc/backend/JavaPlatform.scala | 6 +- .../nsc/dependencies/DependencyAnalysis.scala | 253 --------------- .../scala/tools/nsc/dependencies/Files.scala | 177 ----------- .../scala/tools/nsc/interactive/BuildManager.scala | 83 ----- .../nsc/interactive/RefinedBuildManager.scala | 354 --------------------- .../tools/nsc/interactive/SimpleBuildManager.scala | 100 ------ .../scala/tools/nsc/settings/ScalaSettings.scala | 2 - src/partest/README | 1 - src/partest/scala/tools/partest/PartestTask.scala | 8 - .../scala/tools/partest/nest/ConsoleRunner.scala | 1 - src/partest/scala/tools/partest/nest/NestUI.scala | 1 - .../scala/tools/partest/nest/RunnerManager.scala | 116 ------- .../scala/tools/partest/nest/TestFile.scala | 1 - test/disabled/presentation/simple-tests.check | 2 - test/files/buildmanager/annotated/A.scala | 1 - test/files/buildmanager/annotated/annotated.check | 6 - test/files/buildmanager/annotated/annotated.test | 2 - test/files/buildmanager/freshnames/A.scala | 16 - test/files/buildmanager/freshnames/B.scala | 4 - .../files/buildmanager/freshnames/freshnames.check | 6 - test/files/buildmanager/freshnames/freshnames.test | 2 - test/files/buildmanager/infer/A.scala | 16 - test/files/buildmanager/infer/infer.check | 6 - test/files/buildmanager/infer/infer.test | 2 - .../buildmanager/namesdefaults/defparam-use.scala | 5 - .../buildmanager/namesdefaults/defparam.scala | 7 - .../buildmanager/namesdefaults/namesdefaults.check | 9 - .../buildmanager/namesdefaults/namesdefaults.test | 3 - test/files/buildmanager/simpletest/A.scala | 3 - test/files/buildmanager/simpletest/B.scala | 3 - .../simpletest/simpletest.changes/A1.scala | 1 - .../files/buildmanager/simpletest/simpletest.check | 11 - test/files/buildmanager/simpletest/simpletest.test | 3 - test/files/buildmanager/t2280/A.scala | 1 - test/files/buildmanager/t2280/B.java | 2 - test/files/buildmanager/t2280/t2280.check | 6 - test/files/buildmanager/t2280/t2280.test | 2 - test/files/buildmanager/t2556_1/A.scala | 3 - test/files/buildmanager/t2556_1/B.scala | 3 - .../buildmanager/t2556_1/t2556_1.changes/A2.scala | 4 - test/files/buildmanager/t2556_1/t2556_1.check | 12 - test/files/buildmanager/t2556_1/t2556_1.test | 3 - test/files/buildmanager/t2556_2/A.scala | 4 - test/files/buildmanager/t2556_2/B.scala | 2 - test/files/buildmanager/t2556_2/C.scala | 4 - .../buildmanager/t2556_2/t2556_2.changes/A2.scala | 4 - test/files/buildmanager/t2556_2/t2556_2.check | 13 - test/files/buildmanager/t2556_2/t2556_2.test | 3 - test/files/buildmanager/t2556_3/A.scala | 5 - test/files/buildmanager/t2556_3/B.scala | 5 - test/files/buildmanager/t2556_3/C.scala | 2 - .../buildmanager/t2556_3/t2556_3.changes/A2.scala | 5 - test/files/buildmanager/t2556_3/t2556_3.check | 18 -- test/files/buildmanager/t2556_3/t2556_3.test | 3 - test/files/buildmanager/t2557/A.scala | 4 - test/files/buildmanager/t2557/B.scala | 4 - test/files/buildmanager/t2557/C.scala | 3 - test/files/buildmanager/t2557/D.scala | 1 - test/files/buildmanager/t2557/E.scala | 1 - test/files/buildmanager/t2557/F.scala | 4 - .../buildmanager/t2557/t2557.changes/D2.scala | 2 - test/files/buildmanager/t2557/t2557.check | 10 - test/files/buildmanager/t2557/t2557.test | 3 - test/files/buildmanager/t2559/A.scala | 5 - test/files/buildmanager/t2559/D.scala | 4 - .../buildmanager/t2559/t2559.changes/A2.scala | 5 - test/files/buildmanager/t2559/t2559.check | 9 - test/files/buildmanager/t2559/t2559.test | 3 - test/files/buildmanager/t2562/A.scala | 7 - test/files/buildmanager/t2562/B.scala | 8 - .../buildmanager/t2562/t2562.changes/A2.scala | 8 - test/files/buildmanager/t2562/t2562.check | 12 - test/files/buildmanager/t2562/t2562.test | 3 - test/files/buildmanager/t2649/A.scala | 3 - test/files/buildmanager/t2649/B.scala | 4 - .../buildmanager/t2649/t2649.changes/A2.scala | 4 - test/files/buildmanager/t2649/t2649.check | 9 - test/files/buildmanager/t2649/t2649.test | 3 - test/files/buildmanager/t2650_1/A.scala | 4 - test/files/buildmanager/t2650_1/B.scala | 3 - .../buildmanager/t2650_1/t2650_1.changes/A2.scala | 3 - test/files/buildmanager/t2650_1/t2650_1.check | 12 - test/files/buildmanager/t2650_1/t2650_1.test | 3 - test/files/buildmanager/t2650_2/A.scala | 3 - test/files/buildmanager/t2650_2/B.scala | 4 - .../buildmanager/t2650_2/t2650_2.changes/A2.scala | 4 - test/files/buildmanager/t2650_2/t2650_2.check | 14 - test/files/buildmanager/t2650_2/t2650_2.test | 3 - test/files/buildmanager/t2650_3/A.scala | 4 - test/files/buildmanager/t2650_3/B.scala | 3 - .../buildmanager/t2650_3/t2650_3.changes/A2.scala | 4 - test/files/buildmanager/t2650_3/t2650_3.check | 14 - test/files/buildmanager/t2650_3/t2650_3.test | 3 - test/files/buildmanager/t2650_4/A.scala | 5 - test/files/buildmanager/t2650_4/B.scala | 3 - .../buildmanager/t2650_4/t2650_4.changes/A2.scala | 5 - test/files/buildmanager/t2650_4/t2650_4.check | 14 - test/files/buildmanager/t2650_4/t2650_4.test | 3 - test/files/buildmanager/t2651_2/A.scala | 1 - .../buildmanager/t2651_2/t2651_2.changes/A2.scala | 1 - test/files/buildmanager/t2651_2/t2651_2.check | 6 - test/files/buildmanager/t2651_2/t2651_2.test | 3 - test/files/buildmanager/t2651_3/A.scala | 3 - .../buildmanager/t2651_3/t2651_3.changes/A2.scala | 3 - test/files/buildmanager/t2651_3/t2651_3.check | 6 - test/files/buildmanager/t2651_3/t2651_3.test | 3 - test/files/buildmanager/t2651_4/A.scala | 5 - test/files/buildmanager/t2651_4/B.scala | 3 - .../buildmanager/t2651_4/t2651_4.changes/A2.scala | 5 - test/files/buildmanager/t2651_4/t2651_4.check | 13 - test/files/buildmanager/t2651_4/t2651_4.test | 3 - test/files/buildmanager/t2653/A.scala | 2 - test/files/buildmanager/t2653/B.scala | 3 - .../buildmanager/t2653/t2653.changes/A2.scala | 2 - test/files/buildmanager/t2653/t2653.check | 15 - test/files/buildmanager/t2653/t2653.test | 3 - test/files/buildmanager/t2654/A.scala | 2 - test/files/buildmanager/t2654/B.scala | 1 - .../buildmanager/t2654/t2654.changes/A2.scala | 4 - test/files/buildmanager/t2654/t2654.check | 6 - test/files/buildmanager/t2654/t2654.test | 3 - test/files/buildmanager/t2655/A.scala | 4 - test/files/buildmanager/t2655/B.scala | 3 - .../buildmanager/t2655/t2655.changes/A2.scala | 4 - test/files/buildmanager/t2655/t2655.check | 13 - test/files/buildmanager/t2655/t2655.test | 3 - test/files/buildmanager/t2657/A.scala | 3 - test/files/buildmanager/t2657/B.scala | 4 - .../buildmanager/t2657/t2657.changes/A2.scala | 3 - test/files/buildmanager/t2657/t2657.check | 14 - test/files/buildmanager/t2657/t2657.test | 3 - test/files/buildmanager/t2789/A.scala | 5 - test/files/buildmanager/t2789/B.scala | 3 - .../buildmanager/t2789/t2789.changes/A2.scala | 5 - test/files/buildmanager/t2789/t2789.check | 11 - test/files/buildmanager/t2789/t2789.test | 3 - test/files/buildmanager/t2790/A.scala | 5 - test/files/buildmanager/t2790/B.scala | 4 - .../buildmanager/t2790/t2790.changes/A2.scala | 4 - test/files/buildmanager/t2790/t2790.check | 13 - test/files/buildmanager/t2790/t2790.test | 3 - test/files/buildmanager/t2792/A1.scala | 3 - test/files/buildmanager/t2792/A2.scala | 4 - test/files/buildmanager/t2792/A3.scala | 3 - .../buildmanager/t2792/t2792.changes/A1_1.scala | 3 - test/files/buildmanager/t2792/t2792.check | 14 - test/files/buildmanager/t2792/t2792.test | 3 - test/files/buildmanager/t3045/A.java | 7 - test/files/buildmanager/t3045/t3045.check | 3 - test/files/buildmanager/t3045/t3045.test | 1 - test/files/buildmanager/t3054/bar/Bar.java | 7 - test/files/buildmanager/t3054/foo/Foo.scala | 5 - test/files/buildmanager/t3054/t3054.check | 3 - test/files/buildmanager/t3054/t3054.test | 1 - test/files/buildmanager/t3059/A.scala | 4 - test/files/buildmanager/t3059/B.scala | 4 - test/files/buildmanager/t3059/t3059.check | 6 - test/files/buildmanager/t3059/t3059.test | 2 - test/files/buildmanager/t3133/A.java | 7 - test/files/buildmanager/t3133/t3133.check | 3 - test/files/buildmanager/t3133/t3133.test | 1 - test/files/buildmanager/t3140/A.scala | 8 - test/files/buildmanager/t3140/t3140.check | 6 - test/files/buildmanager/t3140/t3140.test | 2 - test/files/buildmanager/t4215/A.scala | 5 - test/files/buildmanager/t4215/t4215.check | 6 - test/files/buildmanager/t4215/t4215.test | 2 - 171 files changed, 3 insertions(+), 1902 deletions(-) delete mode 100644 src/compiler/scala/tools/nsc/dependencies/DependencyAnalysis.scala delete mode 100644 src/compiler/scala/tools/nsc/dependencies/Files.scala delete mode 100644 src/compiler/scala/tools/nsc/interactive/BuildManager.scala delete mode 100644 src/compiler/scala/tools/nsc/interactive/RefinedBuildManager.scala delete mode 100644 src/compiler/scala/tools/nsc/interactive/SimpleBuildManager.scala delete mode 100644 test/files/buildmanager/annotated/A.scala delete mode 100644 test/files/buildmanager/annotated/annotated.check delete mode 100644 test/files/buildmanager/annotated/annotated.test delete mode 100644 test/files/buildmanager/freshnames/A.scala delete mode 100644 test/files/buildmanager/freshnames/B.scala delete mode 100644 test/files/buildmanager/freshnames/freshnames.check delete mode 100644 test/files/buildmanager/freshnames/freshnames.test delete mode 100644 test/files/buildmanager/infer/A.scala delete mode 100644 test/files/buildmanager/infer/infer.check delete mode 100644 test/files/buildmanager/infer/infer.test delete mode 100644 test/files/buildmanager/namesdefaults/defparam-use.scala delete mode 100644 test/files/buildmanager/namesdefaults/defparam.scala delete mode 100644 test/files/buildmanager/namesdefaults/namesdefaults.check delete mode 100644 test/files/buildmanager/namesdefaults/namesdefaults.test delete mode 100644 test/files/buildmanager/simpletest/A.scala delete mode 100644 test/files/buildmanager/simpletest/B.scala delete mode 100644 test/files/buildmanager/simpletest/simpletest.changes/A1.scala delete mode 100644 test/files/buildmanager/simpletest/simpletest.check delete mode 100644 test/files/buildmanager/simpletest/simpletest.test delete mode 100644 test/files/buildmanager/t2280/A.scala delete mode 100644 test/files/buildmanager/t2280/B.java delete mode 100644 test/files/buildmanager/t2280/t2280.check delete mode 100644 test/files/buildmanager/t2280/t2280.test delete mode 100644 test/files/buildmanager/t2556_1/A.scala delete mode 100644 test/files/buildmanager/t2556_1/B.scala delete mode 100644 test/files/buildmanager/t2556_1/t2556_1.changes/A2.scala delete mode 100644 test/files/buildmanager/t2556_1/t2556_1.check delete mode 100644 test/files/buildmanager/t2556_1/t2556_1.test delete mode 100644 test/files/buildmanager/t2556_2/A.scala delete mode 100644 test/files/buildmanager/t2556_2/B.scala delete mode 100644 test/files/buildmanager/t2556_2/C.scala delete mode 100644 test/files/buildmanager/t2556_2/t2556_2.changes/A2.scala delete mode 100644 test/files/buildmanager/t2556_2/t2556_2.check delete mode 100644 test/files/buildmanager/t2556_2/t2556_2.test delete mode 100644 test/files/buildmanager/t2556_3/A.scala delete mode 100644 test/files/buildmanager/t2556_3/B.scala delete mode 100644 test/files/buildmanager/t2556_3/C.scala delete mode 100644 test/files/buildmanager/t2556_3/t2556_3.changes/A2.scala delete mode 100644 test/files/buildmanager/t2556_3/t2556_3.check delete mode 100644 test/files/buildmanager/t2556_3/t2556_3.test delete mode 100644 test/files/buildmanager/t2557/A.scala delete mode 100644 test/files/buildmanager/t2557/B.scala delete mode 100644 test/files/buildmanager/t2557/C.scala delete mode 100644 test/files/buildmanager/t2557/D.scala delete mode 100644 test/files/buildmanager/t2557/E.scala delete mode 100644 test/files/buildmanager/t2557/F.scala delete mode 100644 test/files/buildmanager/t2557/t2557.changes/D2.scala delete mode 100644 test/files/buildmanager/t2557/t2557.check delete mode 100644 test/files/buildmanager/t2557/t2557.test delete mode 100644 test/files/buildmanager/t2559/A.scala delete mode 100644 test/files/buildmanager/t2559/D.scala delete mode 100644 test/files/buildmanager/t2559/t2559.changes/A2.scala delete mode 100644 test/files/buildmanager/t2559/t2559.check delete mode 100644 test/files/buildmanager/t2559/t2559.test delete mode 100644 test/files/buildmanager/t2562/A.scala delete mode 100644 test/files/buildmanager/t2562/B.scala delete mode 100644 test/files/buildmanager/t2562/t2562.changes/A2.scala delete mode 100644 test/files/buildmanager/t2562/t2562.check delete mode 100644 test/files/buildmanager/t2562/t2562.test delete mode 100644 test/files/buildmanager/t2649/A.scala delete mode 100644 test/files/buildmanager/t2649/B.scala delete mode 100644 test/files/buildmanager/t2649/t2649.changes/A2.scala delete mode 100644 test/files/buildmanager/t2649/t2649.check delete mode 100644 test/files/buildmanager/t2649/t2649.test delete mode 100644 test/files/buildmanager/t2650_1/A.scala delete mode 100644 test/files/buildmanager/t2650_1/B.scala delete mode 100644 test/files/buildmanager/t2650_1/t2650_1.changes/A2.scala delete mode 100644 test/files/buildmanager/t2650_1/t2650_1.check delete mode 100644 test/files/buildmanager/t2650_1/t2650_1.test delete mode 100644 test/files/buildmanager/t2650_2/A.scala delete mode 100644 test/files/buildmanager/t2650_2/B.scala delete mode 100644 test/files/buildmanager/t2650_2/t2650_2.changes/A2.scala delete mode 100644 test/files/buildmanager/t2650_2/t2650_2.check delete mode 100644 test/files/buildmanager/t2650_2/t2650_2.test delete mode 100644 test/files/buildmanager/t2650_3/A.scala delete mode 100644 test/files/buildmanager/t2650_3/B.scala delete mode 100644 test/files/buildmanager/t2650_3/t2650_3.changes/A2.scala delete mode 100644 test/files/buildmanager/t2650_3/t2650_3.check delete mode 100644 test/files/buildmanager/t2650_3/t2650_3.test delete mode 100644 test/files/buildmanager/t2650_4/A.scala delete mode 100644 test/files/buildmanager/t2650_4/B.scala delete mode 100644 test/files/buildmanager/t2650_4/t2650_4.changes/A2.scala delete mode 100644 test/files/buildmanager/t2650_4/t2650_4.check delete mode 100644 test/files/buildmanager/t2650_4/t2650_4.test delete mode 100644 test/files/buildmanager/t2651_2/A.scala delete mode 100644 test/files/buildmanager/t2651_2/t2651_2.changes/A2.scala delete mode 100644 test/files/buildmanager/t2651_2/t2651_2.check delete mode 100644 test/files/buildmanager/t2651_2/t2651_2.test delete mode 100644 test/files/buildmanager/t2651_3/A.scala delete mode 100644 test/files/buildmanager/t2651_3/t2651_3.changes/A2.scala delete mode 100644 test/files/buildmanager/t2651_3/t2651_3.check delete mode 100644 test/files/buildmanager/t2651_3/t2651_3.test delete mode 100644 test/files/buildmanager/t2651_4/A.scala delete mode 100644 test/files/buildmanager/t2651_4/B.scala delete mode 100644 test/files/buildmanager/t2651_4/t2651_4.changes/A2.scala delete mode 100644 test/files/buildmanager/t2651_4/t2651_4.check delete mode 100644 test/files/buildmanager/t2651_4/t2651_4.test delete mode 100644 test/files/buildmanager/t2653/A.scala delete mode 100644 test/files/buildmanager/t2653/B.scala delete mode 100644 test/files/buildmanager/t2653/t2653.changes/A2.scala delete mode 100644 test/files/buildmanager/t2653/t2653.check delete mode 100644 test/files/buildmanager/t2653/t2653.test delete mode 100644 test/files/buildmanager/t2654/A.scala delete mode 100644 test/files/buildmanager/t2654/B.scala delete mode 100644 test/files/buildmanager/t2654/t2654.changes/A2.scala delete mode 100644 test/files/buildmanager/t2654/t2654.check delete mode 100644 test/files/buildmanager/t2654/t2654.test delete mode 100644 test/files/buildmanager/t2655/A.scala delete mode 100644 test/files/buildmanager/t2655/B.scala delete mode 100644 test/files/buildmanager/t2655/t2655.changes/A2.scala delete mode 100644 test/files/buildmanager/t2655/t2655.check delete mode 100644 test/files/buildmanager/t2655/t2655.test delete mode 100644 test/files/buildmanager/t2657/A.scala delete mode 100644 test/files/buildmanager/t2657/B.scala delete mode 100644 test/files/buildmanager/t2657/t2657.changes/A2.scala delete mode 100644 test/files/buildmanager/t2657/t2657.check delete mode 100644 test/files/buildmanager/t2657/t2657.test delete mode 100644 test/files/buildmanager/t2789/A.scala delete mode 100644 test/files/buildmanager/t2789/B.scala delete mode 100644 test/files/buildmanager/t2789/t2789.changes/A2.scala delete mode 100644 test/files/buildmanager/t2789/t2789.check delete mode 100644 test/files/buildmanager/t2789/t2789.test delete mode 100644 test/files/buildmanager/t2790/A.scala delete mode 100644 test/files/buildmanager/t2790/B.scala delete mode 100644 test/files/buildmanager/t2790/t2790.changes/A2.scala delete mode 100644 test/files/buildmanager/t2790/t2790.check delete mode 100644 test/files/buildmanager/t2790/t2790.test delete mode 100644 test/files/buildmanager/t2792/A1.scala delete mode 100644 test/files/buildmanager/t2792/A2.scala delete mode 100644 test/files/buildmanager/t2792/A3.scala delete mode 100644 test/files/buildmanager/t2792/t2792.changes/A1_1.scala delete mode 100644 test/files/buildmanager/t2792/t2792.check delete mode 100644 test/files/buildmanager/t2792/t2792.test delete mode 100644 test/files/buildmanager/t3045/A.java delete mode 100644 test/files/buildmanager/t3045/t3045.check delete mode 100644 test/files/buildmanager/t3045/t3045.test delete mode 100644 test/files/buildmanager/t3054/bar/Bar.java delete mode 100644 test/files/buildmanager/t3054/foo/Foo.scala delete mode 100644 test/files/buildmanager/t3054/t3054.check delete mode 100644 test/files/buildmanager/t3054/t3054.test delete mode 100644 test/files/buildmanager/t3059/A.scala delete mode 100644 test/files/buildmanager/t3059/B.scala delete mode 100644 test/files/buildmanager/t3059/t3059.check delete mode 100644 test/files/buildmanager/t3059/t3059.test delete mode 100644 test/files/buildmanager/t3133/A.java delete mode 100644 test/files/buildmanager/t3133/t3133.check delete mode 100644 test/files/buildmanager/t3133/t3133.test delete mode 100644 test/files/buildmanager/t3140/A.scala delete mode 100644 test/files/buildmanager/t3140/t3140.check delete mode 100644 test/files/buildmanager/t3140/t3140.test delete mode 100644 test/files/buildmanager/t4215/A.scala delete mode 100644 test/files/buildmanager/t4215/t4215.check delete mode 100644 test/files/buildmanager/t4215/t4215.test (limited to 'src') diff --git a/build.xml b/build.xml index 7bb7b4d365..c5bfaf3ef5 100644 --- a/build.xml +++ b/build.xml @@ -2440,7 +2440,6 @@ BOOTRAPING TEST AND TEST SUITE - diff --git a/project/Partest.scala b/project/Partest.scala index fbb0a2a980..2ea41ba80b 100644 --- a/project/Partest.scala +++ b/project/Partest.scala @@ -33,11 +33,10 @@ object partest { // What's fun here is that we want "*.scala" files *and* directories in the base directory... def partestResources(base: File, testType: String): PathFinder = testType match { case "res" => base ** "*.res" - case "buildmanager" => base * "*" // TODO - Only allow directories that have "*.scala" children... case _ => base * "*" filter { f => !f.getName.endsWith(".obj") && (f.isDirectory || f.getName.endsWith(".scala")) } } - lazy val partestTestTypes = Seq("run", "jvm", "pos", "neg", "buildmanager", "res", "shootout", "scalap", "specialized", "presentation", "scalacheck") + lazy val partestTestTypes = Seq("run", "jvm", "pos", "neg", "res", "shootout", "scalap", "specialized", "presentation", "scalacheck") // TODO - Figure out how to specify only a subset of resources... def partestTestsTask(testDirs: ScopedSetting[Map[String,File]]): Project.Initialize[Task[Map[String, Seq[File]]]] = diff --git a/src/compiler/scala/tools/nsc/Global.scala b/src/compiler/scala/tools/nsc/Global.scala index 8746a7fd8d..05d0bcf6b0 100644 --- a/src/compiler/scala/tools/nsc/Global.scala +++ b/src/compiler/scala/tools/nsc/Global.scala @@ -16,7 +16,6 @@ import scala.reflect.internal.util.{ OffsetPosition, SourceFile, NoSourceFile, B import scala.reflect.internal.pickling.{ PickleBuffer, PickleFormat } import symtab.{ Flags, SymbolTable, SymbolLoaders, SymbolTrackers } import symtab.classfile.Pickler -import dependencies.DependencyAnalysis import plugins.Plugins import ast._ import ast.parser._ @@ -329,9 +328,6 @@ class Global(var currentSettings: Settings, var reporter: Reporter) } } - if (!dependencyAnalysis.off) - dependencyAnalysis.loadDependencyAnalysis() - if (settings.verbose.value || settings.Ylogcp.value) { // Uses the "do not truncate" inform informComplete("[search path for source files: " + classPath.sourcepaths.mkString(",") + "]") @@ -606,14 +602,6 @@ class Global(var currentSettings: Settings, var reporter: Reporter) val runsRightAfter = None } with GenASM - // This phase is optional: only added if settings.make option is given. - // phaseName = "dependencyAnalysis" - object dependencyAnalysis extends { - val global: Global.this.type = Global.this - val runsAfter = List("jvm") - val runsRightAfter = None - } with DependencyAnalysis - // phaseName = "terminal" object terminal extends { val global: Global.this.type = Global.this @@ -1472,8 +1460,7 @@ class Global(var currentSettings: Settings, var reporter: Reporter) } /** Compile list of source files */ - def compileSources(_sources: List[SourceFile]) { - val sources = dependencyAnalysis calculateFiles _sources.distinct + def compileSources(sources: List[SourceFile]) { // there is a problem already, e.g. a plugin was passed a bad option if (reporter.hasErrors) return @@ -1568,10 +1555,6 @@ class Global(var currentSettings: Settings, var reporter: Reporter) symSource.keys foreach (x => resetPackageClass(x.owner)) informTime("total", startTime) - // record dependency data - if (!dependencyAnalysis.off) - dependencyAnalysis.saveDependencyAnalysis() - // Clear any sets or maps created via perRunCaches. perRunCaches.clearAll() diff --git a/src/compiler/scala/tools/nsc/Main.scala b/src/compiler/scala/tools/nsc/Main.scala index a4b22b0e11..c3c919fae4 100644 --- a/src/compiler/scala/tools/nsc/Main.scala +++ b/src/compiler/scala/tools/nsc/Main.scala @@ -7,7 +7,6 @@ package scala.tools.nsc import java.io.File import File.pathSeparator -import scala.tools.nsc.interactive.{ RefinedBuildManager, SimpleBuildManager } import scala.tools.nsc.io.AbstractFile /** The main class for NSC, a compiler for the programming @@ -42,23 +41,6 @@ object Main extends Driver with EvalLoop { askShutdown false } - else if (settings.Ybuilderdebug.value != "none") { - def fileSet(files : List[String]) = Set.empty ++ (files map AbstractFile.getFile) - - val buildManager = settings.Ybuilderdebug.value match { - case "simple" => new SimpleBuildManager(settings) - case _ => new RefinedBuildManager(settings) - } - buildManager.addSourceFiles(fileSet(command.files)) - - // enter resident mode - loop { line => - val args = line.split(' ').toList - val command = new CompilerCommand(args.toList, settings) - buildManager.update(fileSet(command.files), Set.empty) - } - false - } else true override def newCompiler(): Global = diff --git a/src/compiler/scala/tools/nsc/backend/JavaPlatform.scala b/src/compiler/scala/tools/nsc/backend/JavaPlatform.scala index 5cc4404ca1..08602f87dc 100644 --- a/src/compiler/scala/tools/nsc/backend/JavaPlatform.scala +++ b/src/compiler/scala/tools/nsc/backend/JavaPlatform.scala @@ -38,14 +38,10 @@ trait JavaPlatform extends Platform { // replaces the tighter abstract definition here. If we had DOT typing rules, the two // types would be conjoined and everything would work out. Yet another reason to push for DOT. - private def depAnalysisPhase = - if (settings.make.isDefault) Nil - else List(dependencyAnalysis) - def platformPhases = List( flatten, // get rid of inner classes genASM // generate .class files - ) ++ depAnalysisPhase + ) lazy val externalEquals = getDecl(BoxesRunTimeClass, nme.equals_) lazy val externalEqualsNumNum = getDecl(BoxesRunTimeClass, nme.equalsNumNum) diff --git a/src/compiler/scala/tools/nsc/dependencies/DependencyAnalysis.scala b/src/compiler/scala/tools/nsc/dependencies/DependencyAnalysis.scala deleted file mode 100644 index 4d4b6589a0..0000000000 --- a/src/compiler/scala/tools/nsc/dependencies/DependencyAnalysis.scala +++ /dev/null @@ -1,253 +0,0 @@ -package scala.tools.nsc -package dependencies - -import io.Path -import scala.collection._ -import scala.tools.nsc.io.AbstractFile -import scala.reflect.internal.util.SourceFile - -trait DependencyAnalysis extends SubComponent with Files { - import global._ - - val phaseName = "dependencyAnalysis" - - def off = settings.make.isDefault || settings.make.value == "all" - def shouldCheckClasspath = settings.make.value != "transitivenocp" - - def newPhase(prev: Phase) = new AnalysisPhase(prev) - - private def depPath = Path(settings.dependenciesFile.value) - def loadDependencyAnalysis(): Boolean = ( - depPath.path != "none" && depPath.isFile && loadFrom( - AbstractFile.getFile(depPath), - path => AbstractFile.getFile(depPath.parent resolve Path(path)) - ) - ) - def saveDependencyAnalysis(): Unit = { - if (!depPath.exists) - dependenciesFile = AbstractFile.getFile(depPath.createFile()) - - /** The directory where file lookup should start */ - val rootPath = depPath.parent.normalize - saveDependencies( - file => rootPath.relativize(Path(file.file).normalize).path - ) - } - - lazy val maxDepth = settings.make.value match { - case "changed" => 0 - case "immediate" => 1 - case _ => Int.MaxValue - } - - // todo: order insensible checking and, also checking timestamp? - def validateClasspath(cp1: String, cp2: String): Boolean = cp1 == cp2 - - def nameToFile(src: AbstractFile, name: String) = - settings.outputDirs.outputDirFor(src) - .lookupPathUnchecked(name.toString.replace(".", java.io.File.separator) + ".class", false) - - private var depFile: Option[AbstractFile] = None - - def dependenciesFile_=(file: AbstractFile) { - assert(file ne null) - depFile = Some(file) - } - - def dependenciesFile: Option[AbstractFile] = depFile - - def classpath = settings.classpath.value - def newDeps = new FileDependencies(classpath) - - var dependencies = newDeps - - def managedFiles = dependencies.dependencies.keySet - - /** Top level definitions per source file. */ - val definitions: mutable.Map[AbstractFile, List[Symbol]] = - new mutable.HashMap[AbstractFile, List[Symbol]] { - override def default(f: AbstractFile) = Nil - } - - /** External references used by source file. */ - val references: mutable.Map[AbstractFile, immutable.Set[String]] = - new mutable.HashMap[AbstractFile, immutable.Set[String]] { - override def default(f: AbstractFile) = immutable.Set() - } - - /** External references for inherited members used in the source file */ - val inherited: mutable.Map[AbstractFile, immutable.Set[Inherited]] = - new mutable.HashMap[AbstractFile, immutable.Set[Inherited]] { - override def default(f: AbstractFile) = immutable.Set() - } - - /** Write dependencies to the current file. */ - def saveDependencies(fromFile: AbstractFile => String) = - if(dependenciesFile.isDefined) - dependencies.writeTo(dependenciesFile.get, fromFile) - - /** Load dependencies from the given file and save the file reference for - * future saves. - */ - def loadFrom(f: AbstractFile, toFile: String => AbstractFile): Boolean = { - dependenciesFile = f - FileDependencies.readFrom(f, toFile) match { - case Some(fd) => - val success = if (shouldCheckClasspath) validateClasspath(fd.classpath, classpath) else true - dependencies = if (success) fd else { - if (settings.debug.value) - println("Classpath has changed. Nuking dependencies") - newDeps - } - - success - case None => false - } - } - - def calculateFiles(files: List[SourceFile]): List[SourceFile] = - if (off) files - else if (dependencies.isEmpty) { - println("No known dependencies. Compiling " + - (if (settings.debug.value) files.mkString(", ") else "everything")) - files - } else { - val (direct, indirect) = dependencies.invalidatedFiles(maxDepth); - val filtered = files.filter(x => { - val f = x.file.absolute - direct(f) || indirect(f) || !dependencies.containsFile(f); - }) - filtered match { - case Nil => println("No changes to recompile"); - case x => println("Recompiling " + ( - if(settings.debug.value) x.mkString(", ") else x.length + " files") - ) - } - filtered - } - - case class Inherited(qualifier: String, member: Name) - - class AnalysisPhase(prev: Phase) extends StdPhase(prev) { - - override def cancelled(unit: CompilationUnit) = - super.cancelled(unit) && !unit.isJava - - def apply(unit : global.CompilationUnit) { - val f = unit.source.file.file - // When we're passed strings by the interpreter - // they have no source file. We simply ignore this case - // as irrelevant to dependency analysis. - if (f != null){ - val source: AbstractFile = unit.source.file; - for (d <- unit.icode){ - val name = d.toString - d.symbol match { - case s : ModuleClassSymbol => - val isTopLevelModule = exitingPickler { !s.isImplClass && !s.isNestedClass } - - if (isTopLevelModule && (s.companionModule != NoSymbol)) { - dependencies.emits(source, nameToFile(unit.source.file, name)) - } - dependencies.emits(source, nameToFile(unit.source.file, name + "$")) - case _ => - dependencies.emits(source, nameToFile(unit.source.file, name)) - } - } - - dependencies.reset(source) - for (d <- unit.depends; if (d.sourceFile != null)){ - dependencies.depends(source, d.sourceFile) - } - } - - // find all external references in this compilation unit - val file = unit.source.file - references += file -> immutable.Set.empty[String] - inherited += file -> immutable.Set.empty[Inherited] - - val buf = new mutable.ListBuffer[Symbol] - - (new Traverser { - override def traverse(tree: Tree) { - if ((tree.symbol ne null) - && (tree.symbol != NoSymbol) - && (!tree.symbol.isPackage) - && (!tree.symbol.isJavaDefined) - && (!tree.symbol.tpe.isError) - && ((tree.symbol.sourceFile eq null) - || (tree.symbol.sourceFile.path != file.path)) - && (!tree.symbol.isClassConstructor)) { - updateReferences(tree.symbol.fullName) - // was "at uncurryPhase.prev", which is actually non-deterministic - // because the continuations plugin may or may not supply uncurry's - // immediately preceding phase. - enteringRefchecks(checkType(tree.symbol.tpe)) - } - - tree match { - case cdef: ClassDef if !cdef.symbol.hasPackageFlag && - !cdef.symbol.isAnonymousFunction => - if (cdef.symbol != NoSymbol) buf += cdef.symbol - // was "at erasurePhase.prev" - enteringExplicitOuter { - for (s <- cdef.symbol.info.decls) - s match { - case ts: TypeSymbol if !ts.isClass => - checkType(s.tpe) - case _ => - } - } - super.traverse(tree) - - case ddef: DefDef => - // was "at typer.prev" - enteringTyper { checkType(ddef.symbol.tpe) } - super.traverse(tree) - case a @ Select(q, n) if ((a.symbol != NoSymbol) && (q.symbol != null)) => // #2556 - if (!a.symbol.isConstructor && - !a.symbol.owner.isPackageClass && - !isSameType(q.tpe, a.symbol.owner.tpe)) - inherited += file -> - (inherited(file) + Inherited(q.symbol.tpe.resultType.safeToString, n)) - super.traverse(tree) - case _ => - super.traverse(tree) - } - } - - def checkType(tpe: Type): Unit = - tpe match { - case t: MethodType => - checkType(t.resultType) - for (s <- t.params) checkType(s.tpe) - - case t: TypeRef => - if (t.sym.isAliasType) { - updateReferences(t.typeSymbolDirect.fullName) - checkType(t.typeSymbolDirect.info) - } - updateReferences(t.typeSymbol.fullName) - for (tp <- t.args) checkType(tp) - - case t: PolyType => - checkType(t.resultType) - updateReferences(t.typeSymbol.fullName) - - case t: NullaryMethodType => - checkType(t.resultType) - updateReferences(t.typeSymbol.fullName) - - case t => - updateReferences(t.typeSymbol.fullName) - } - - def updateReferences(s: String): Unit = - references += file -> (references(file) + s) - - }).apply(unit.body) - - definitions(unit.source.file) = buf.toList - } - } -} diff --git a/src/compiler/scala/tools/nsc/dependencies/Files.scala b/src/compiler/scala/tools/nsc/dependencies/Files.scala deleted file mode 100644 index 194351a13f..0000000000 --- a/src/compiler/scala/tools/nsc/dependencies/Files.scala +++ /dev/null @@ -1,177 +0,0 @@ -package scala.tools.nsc -package dependencies - -import java.io.{InputStream, OutputStream, PrintStream, InputStreamReader, BufferedReader} -import io.{AbstractFile, PlainFile, VirtualFile} - -import scala.collection._ - - -trait Files { self : SubComponent => - - class FileDependencies(val classpath: String) { - import FileDependencies._ - - class Tracker extends mutable.OpenHashMap[AbstractFile, mutable.Set[AbstractFile]] { - override def default(key: AbstractFile) = { - this(key) = new mutable.HashSet[AbstractFile] - this(key) - } - } - - val dependencies = new Tracker - val targets = new Tracker - - def isEmpty = dependencies.isEmpty && targets.isEmpty - - def emits(source: AbstractFile, result: AbstractFile) = - targets(source) += result - def depends(from: AbstractFile, on: AbstractFile) = - dependencies(from) += on - - def reset(file: AbstractFile) = dependencies -= file - - def cleanEmpty = { - dependencies foreach {case (_, value) => - value retain (x => x.exists && (x ne removedFile))} - dependencies retain ((key, value) => key.exists && !value.isEmpty) - targets foreach {case (_, value) => value retain (_.exists)} - targets retain ((key, value) => key.exists && !value.isEmpty) - } - - def containsFile(f: AbstractFile) = targets.contains(f.absolute) - - def invalidatedFiles(maxDepth: Int) = { - val direct = new mutable.HashSet[AbstractFile] - - for ((file, products) <- targets) { - // This looks a bit odd. It may seem like one should invalidate a file - // if *any* of its dependencies are older than it. The forall is there - // to deal with the fact that a) Some results might have been orphaned - // and b) Some files might not need changing. - direct(file) ||= products.forall(d => d.lastModified < file.lastModified) - } - - val indirect = dependentFiles(maxDepth, direct) - - for ((source, targets) <- targets - if direct(source) || indirect(source) || (source eq removedFile)) { - targets foreach (_.delete) - targets -= source - } - - (direct, indirect) - } - - /** Return the set of files that depend on the given changed files. - * It computes the transitive closure up to the given depth. - */ - def dependentFiles(depth: Int, changed: Set[AbstractFile]): Set[AbstractFile] = { - val indirect = new mutable.HashSet[AbstractFile] - val newInvalidations = new mutable.HashSet[AbstractFile] - - def invalid(file: AbstractFile) = - indirect(file) || changed(file) || (file eq removedFile) - - def go(i: Int) : Unit = if(i > 0) { - newInvalidations.clear - for((target, depends) <- dependencies if !invalid(target); - d <- depends) - newInvalidations(target) ||= invalid(d) - - indirect ++= newInvalidations - if (!newInvalidations.isEmpty) go(i - 1) - } - - go(depth) - - indirect --= changed - } - - def writeTo(file: AbstractFile, fromFile: AbstractFile => String): Unit = - writeToFile(file)(out => writeTo(new PrintStream(out), fromFile)) - - def writeTo(print: PrintStream, fromFile: AbstractFile => String): Unit = { - def emit(tracker: Tracker) = - for ((f, ds) <- tracker; d <- ds) print.println(fromFile(f) + arrow + fromFile(d)) - - cleanEmpty - print.println(classpath) - print.println(separator) - emit(dependencies) - print.println(separator) - emit(targets) - } - } - - object FileDependencies { - private val separator:String = "-------" - private val arrow = " -> " - private val removedFile = new VirtualFile("removed") - - private def validLine(l: String) = (l != null) && (l != separator) - - def readFrom(file: AbstractFile, toFile: String => AbstractFile): Option[FileDependencies] = - readFromFile(file) { in => - val reader = new BufferedReader(new InputStreamReader(in)) - val it = new FileDependencies(reader.readLine) - - def readLines(valid: Boolean)(f: (AbstractFile, AbstractFile) => Unit): Boolean = { - var continue = valid - var line: String = null - while (continue && {line = reader.readLine; validLine(line)}) { - line.split(arrow) match { - case Array(from, on) => f(toFile(from), toFile(on)) - case _ => - global.inform("Parse error: Unrecognised string " + line) - continue = false - } - } - continue - } - - reader.readLine - - val dResult = readLines(true)( - (_, _) match { - case (null, _) => // fromFile is removed, it's ok - case (fromFile, null) => - // onFile is removed, should recompile fromFile - it.depends(fromFile, removedFile) - case (fromFile, onFile) => it.depends(fromFile, onFile) - }) - - readLines(dResult)( - (_, _) match { - case (null, null) => - // source and target are all removed, it's ok - case (null, targetFile) => - // source is removed, should remove relative target later - it.emits(removedFile, targetFile) - case (_, null) => - // it may has been cleaned outside, or removed during last phase - case (sourceFile, targetFile) => it.emits(sourceFile, targetFile) - }) - - Some(it) - } - } - - def writeToFile[T](file: AbstractFile)(f: OutputStream => T) : T = { - val out = file.bufferedOutput - try { - f(out) - } finally { - out.close - } - } - - def readFromFile[T](file: AbstractFile)(f: InputStream => T) : T = { - val in = file.input - try{ - f(in) - } finally { - in.close - } - } -} diff --git a/src/compiler/scala/tools/nsc/interactive/BuildManager.scala b/src/compiler/scala/tools/nsc/interactive/BuildManager.scala deleted file mode 100644 index 6b72eb12f8..0000000000 --- a/src/compiler/scala/tools/nsc/interactive/BuildManager.scala +++ /dev/null @@ -1,83 +0,0 @@ -/* NSC -- new Scala compiler - * Copyright 2009-2013 Typesafe/Scala Solutions and LAMP/EPFL - * @author Iulian Dragos - * @author Hubert Plocinicak - */ -package scala.tools.nsc -package interactive - -import scala.collection._ -import io.AbstractFile -import scala.language.implicitConversions - -trait BuildManager { - - /** Add the given source files to the managed build process. */ - def addSourceFiles(files: Set[AbstractFile]) - - /** The given files have been modified by the user. Recompile - * them and their dependent files. - */ - def update(added: Set[AbstractFile], removed: Set[AbstractFile]) - - /** Notification that the supplied set of files is being built */ - def buildingFiles(included: Set[AbstractFile]) {} - - /** Load saved dependency information. */ - def loadFrom(file: AbstractFile, toFile: String => AbstractFile) : Boolean - - /** Save dependency information to `file`. */ - def saveTo(file: AbstractFile, fromFile: AbstractFile => String) - - def compiler: scala.tools.nsc.Global - - /** Delete classfiles derived from the supplied set of sources */ - def deleteClassfiles(sources : Set[AbstractFile]) { - val targets = compiler.dependencyAnalysis.dependencies.targets - for(source <- sources; cf <- targets(source)) - cf.delete - } -} - - -/** Simple driver for testing the build manager. It presents - * the user to a 'resident compiler' prompt. Each line is - * interpreted as a set of files that have changed. The builder - * then derives the dependent files and recompiles them. - */ -object BuildManagerTest extends EvalLoop { - - def prompt = "builder > " - - private def buildError(msg: String) { - println(msg + "\n scalac -help gives more information") - } - - def main(args: Array[String]) { - implicit def filesToSet(fs: List[String]): Set[AbstractFile] = { - def partition(s: String, r: Tuple2[List[AbstractFile], List[String]])= { - val v = AbstractFile.getFile(s) - if (v == null) (r._1, s::r._2) else (v::r._1, r._2) - } - val result = fs.foldRight((List[AbstractFile](), List[String]()))(partition) - if (!result._2.isEmpty) - Console.err.println("No such file(s): " + result._2.mkString(",")) - Set.empty ++ result._1 - } - - val settings = new Settings(buildError) - settings.Ybuildmanagerdebug.value = true - val command = new CompilerCommand(args.toList, settings) - val buildManager: BuildManager = new RefinedBuildManager(settings) - - buildManager.addSourceFiles(command.files) - - // enter resident mode - loop { line => - val args = line.split(' ').toList - val command = new CompilerCommand(args, settings) - buildManager.update(command.files, Set.empty) - } - - } -} diff --git a/src/compiler/scala/tools/nsc/interactive/RefinedBuildManager.scala b/src/compiler/scala/tools/nsc/interactive/RefinedBuildManager.scala deleted file mode 100644 index 9873276f05..0000000000 --- a/src/compiler/scala/tools/nsc/interactive/RefinedBuildManager.scala +++ /dev/null @@ -1,354 +0,0 @@ -/* NSC -- new Scala compiler - * Copyright 2009-2013 Typesafe/Scala Solutions and LAMP/EPFL - * @author Iulian Dragos - * @author Hubert Plocinicak - */ -package scala.tools.nsc -package interactive - -import scala.collection._ -import scala.tools.nsc.reporters.{Reporter, ConsoleReporter} -import scala.util.control.Breaks._ -import scala.tools.nsc.symtab.Flags - -import dependencies._ -import util.ClassPath -import io.AbstractFile -import scala.tools.util.PathResolver - -/** A more defined build manager, based on change sets. For each - * updated source file, it computes the set of changes to its - * definitions, then checks all dependent units to see if the - * changes require a compilation. It repeats this process until - * a fixpoint is reached. - */ -@deprecated("Use sbt incremental compilation mechanism", "2.10.0") -class RefinedBuildManager(val settings: Settings) extends Changes with BuildManager { - - class BuilderGlobal(settings: Settings, reporter : Reporter) extends scala.tools.nsc.Global(settings, reporter) { - - def this(settings: Settings) = - this(settings, new ConsoleReporter(settings)) - - override def computeInternalPhases() { - super.computeInternalPhases - phasesSet += dependencyAnalysis - } - lazy val _classpath = new NoSourcePathPathResolver(settings).result - override def classPath = _classpath.asInstanceOf[ClassPath[platform.BinaryRepr]] - // See discussion in JavaPlatForm for why we need a cast here. - - def newRun() = new Run() - } - - class NoSourcePathPathResolver(settings: Settings) extends PathResolver(settings) { - override def containers = Calculated.basis.dropRight(1).flatten.distinct - } - - protected def newCompiler(settings: Settings) = new BuilderGlobal(settings) - - val compiler = newCompiler(settings) - import compiler.{ Symbol, Type, enteringErasure } - import compiler.dependencyAnalysis.Inherited - - private case class SymWithHistory(sym: Symbol, befErasure: Type) - - /** Managed source files. */ - private val sources: mutable.Set[AbstractFile] = new mutable.HashSet[AbstractFile] - - private val definitions: mutable.Map[AbstractFile, List[SymWithHistory]] = - new mutable.HashMap[AbstractFile, List[SymWithHistory]] { - override def default(key: AbstractFile) = Nil - } - - /** External references used by source file. */ - private var references: mutable.Map[AbstractFile, immutable.Set[String]] = _ - - /** External references for inherited members */ - private var inherited: mutable.Map[AbstractFile, immutable.Set[Inherited]] = _ - - /** Reverse of definitions, used for caching */ - private val classes: mutable.Map[String, AbstractFile] = - new mutable.HashMap[String, AbstractFile] { - override def default(key: String) = null - } - - /** Add the given source files to the managed build process. */ - def addSourceFiles(files: Set[AbstractFile]) { - sources ++= files - update(files) - } - - /** Remove the given files from the managed build process. */ - def removeFiles(files: Set[AbstractFile]) { - sources --= files - deleteClassfiles(files) - update(invalidatedByRemove(files)) - } - - /** Return the set of invalidated files caused by removing the given files. - */ - private def invalidatedByRemove(files: Set[AbstractFile]): Set[AbstractFile] = { - val changes = new mutable.HashMap[Symbol, List[Change]] - for (f <- files; SymWithHistory(sym, _) <- definitions(f)) - changes += sym -> List(Removed(Class(sym.fullName))) - invalidated(files, changes) - } - - def update(added: Set[AbstractFile], removed: Set[AbstractFile]) { - sources --= removed - deleteClassfiles(removed) - update(added ++ invalidatedByRemove(removed)) - } - - /** The given files have been modified by the user. Recompile - * them and all files that depend on them. Only files that - * have been previously added as source files are recompiled. - * Files that were already compiled are taken out from the result - * of the dependency analysis. - */ - private def update(files: Set[AbstractFile]) = { - val coll: mutable.Map[AbstractFile, immutable.Set[AbstractFile]] = - mutable.HashMap[AbstractFile, immutable.Set[AbstractFile]]() - compiler.reporter.reset() - - // See if we really have corresponding symbols, not just those - // which share the name - def isCorrespondingSym(from: Symbol, to: Symbol): Boolean = - (from.hasFlag(Flags.TRAIT) == to.hasFlag(Flags.TRAIT)) && // has to run in 2.8, so no hasTraitFlag - (from.hasFlag(Flags.MODULE) == to.hasFlag(Flags.MODULE)) - - // For testing purposes only, order irrelevant for compilation - def toStringSet(set: Set[AbstractFile]): String = - set.toList sortBy (_.name) mkString("Set(", ", ", ")") - - def update0(files: Set[AbstractFile]): Unit = if (!files.isEmpty) { - deleteClassfiles(files) - val run = compiler.newRun() - if (settings.Ybuildmanagerdebug.value) - compiler.inform("compiling " + toStringSet(files)) - buildingFiles(files) - - run.compileFiles(files.toList) - if (compiler.reporter.hasErrors) { - return - } - - // Deterministic behaviour required by partest - val changesOf = new mutable.HashMap[Symbol, List[Change]] { - override def toString: String = { - val changesOrdered = - toList.map(e => { - e._1.toString + " -> " + - e._2.sortBy(_.toString).mkString("List(", ", ", ")") - }) - changesOrdered.sorted.mkString("Map(", ", ", ")") - } - } - val additionalDefs: mutable.HashSet[AbstractFile] = mutable.HashSet.empty - - val defs = compiler.dependencyAnalysis.definitions - for (src <- files) { - if (definitions(src).isEmpty) - additionalDefs ++= compiler.dependencyAnalysis. - dependencies.dependentFiles(1, mutable.Set(src)) - else { - val syms = defs(src) - for (sym <- syms) { - definitions(src).find( - s => (s.sym.fullName == sym.fullName) && - isCorrespondingSym(s.sym, sym)) match { - case Some(SymWithHistory(oldSym, info)) => - val changes = changeSet(oldSym.info, sym) - val changesErasure = enteringErasure(changeSet(info, sym)) - - changesOf(oldSym) = (changes ++ changesErasure).distinct - case _ => - // a new top level definition - changesOf(sym) = sym.parentSymbols filter (_.isSealed) map (p => - changeChangeSet(p, sym+" extends a sealed "+p)) - } - } - // Create a change for the top level classes that were removed - val removed = definitions(src) filterNot ((s:SymWithHistory) => - syms.find(_.fullName == (s.sym.fullName)) != None) - for (s <- removed) { - changesOf(s.sym) = List(removeChangeSet(s.sym)) - } - } - } - if (settings.Ybuildmanagerdebug.value) - compiler.inform("Changes: " + changesOf) - updateDefinitions(files) - val invalid = invalidated(files, changesOf, additionalDefs) - update0(checkCycles(invalid, files, coll)) - } - - update0(files) - // remove the current run in order to save some memory - compiler.dropRun() - } - - // Attempt to break the cycling reference deps as soon as possible and reduce - // the number of compilations to minimum without having too coarse grained rules - private def checkCycles(files: Set[AbstractFile], initial: Set[AbstractFile], - collect: mutable.Map[AbstractFile, immutable.Set[AbstractFile]]): - Set[AbstractFile] = { - def followChain(set: Set[AbstractFile], rest: immutable.Set[AbstractFile]): - immutable.Set[AbstractFile] = { - val deps:Set[AbstractFile] = set.flatMap( - s => collect.get(s) match { - case Some(x) => x - case _ => Set[AbstractFile]() - }) - val newDeps = deps -- rest - if (newDeps.isEmpty) rest else followChain(newDeps, rest ++ newDeps) - } - var res:Set[AbstractFile] = mutable.Set() - files.foreach( f => - if (collect contains f) { - val chain = followChain(Set(f), immutable.Set()) ++ files - chain.foreach((fc: AbstractFile) => collect += fc -> chain) - res ++= chain - } else - res += f - ) - - initial.foreach((f: AbstractFile) => collect += (f -> (collect.getOrElse(f, immutable.Set()) ++ res))) - if (res.subsetOf(initial)) Set() else res - } - - /** Return the set of source files that are invalidated by the given changes. */ - def invalidated(files: Set[AbstractFile], changesOf: scala.collection.Map[Symbol, List[Change]], - processed: Set[AbstractFile] = Set.empty): - Set[AbstractFile] = { - val buf = new mutable.HashSet[AbstractFile] - val newChangesOf = new mutable.HashMap[Symbol, List[Change]] - var directDeps = - compiler.dependencyAnalysis.dependencies.dependentFiles(1, files) - - def invalidate(file: AbstractFile, reason: String, change: Change) = { - if (settings.Ybuildmanagerdebug.value) - compiler.inform("invalidate " + file + " because " + reason + " [" + change + "]") - buf += file - directDeps -= file - for (syms <- definitions(file)) // fixes #2557 - newChangesOf(syms.sym) = List(change, parentChangeSet(syms.sym)) - break - } - - for ((oldSym, changes) <- changesOf; change <- changes) { - def checkParents(cls: Symbol, file: AbstractFile) { - val parentChange = cls.parentSymbols exists (_.fullName == oldSym.fullName) - // if (settings.buildmanagerdebug.value) - // compiler.inform("checkParents " + cls + " oldSym: " + oldSym + " parentChange: " + parentChange + " " + cls.info.parents) - change match { - case Changed(Class(_)) if parentChange => - invalidate(file, "parents have changed", change) - - case Changed(Definition(_)) if parentChange => - invalidate(file, "inherited method changed", change) - - case Added(Definition(_)) if parentChange => - invalidate(file, "inherited new method", change) - - case Removed(Definition(_)) if parentChange => - invalidate(file, "inherited method removed", change) - - case _ => () - } - } - - def checkInterface(cls: Symbol, file: AbstractFile) { - change match { - case Added(Definition(name)) => - if (cls.info.decls.iterator.exists(_.fullName == name)) - invalidate(file, "of new method with existing name", change) - case Changed(Class(name)) => - if (cls.info.typeSymbol.fullName == name) - invalidate(file, "self type changed", change) - case _ => - () - } - } - - def checkReferences(file: AbstractFile) { - //if (settings.buildmanagerdebug.value) - // compiler.inform(file + ":" + references(file)) - val refs = references(file) - if (refs.isEmpty) - invalidate(file, "it is a direct dependency and we don't yet have finer-grained dependency information", change) - else { - change match { - case Removed(Definition(name)) if refs(name) => - invalidate(file, "it references deleted definition", change) - case Removed(Class(name)) if (refs(name)) => - invalidate(file, "it references deleted class", change) - case Changed(Class(name)) if (refs(name)) => - invalidate(file, "it references changed class", change) - case Changed(Definition(name)) if (refs(name)) => - invalidate(file, "it references changed definition", change) - case Added(Definition(name)) if (refs(name)) => - invalidate(file, "it references added definition", change) - case _ => () - } - } - } - - def checkInheritedReferences(file: AbstractFile) { - val refs = inherited(file) - if (!refs.isEmpty) - change match { - case ParentChanged(Class(name)) => - for (Inherited(q, member) <- refs.find(p => (p != null && p.qualifier == name)); - classFile <- classes.get(q); - defs <- definitions.get(classFile); - s <- defs.find(p => p.sym.fullName == q) - if ((s.sym).tpe.nonPrivateMember(member) == compiler.NoSymbol)) - invalidate(file, "it references invalid (no longer inherited) definition", change) - () - case _ => () - } - } - - for (file <- directDeps) { - breakable { - for (cls <- definitions(file)) checkParents(cls.sym, file) - for (cls <- definitions(file)) checkInterface(cls.sym, file) - checkReferences(file) - checkInheritedReferences(file) - } - } - } - if (buf.isEmpty) - processed - else - invalidated(buf.clone() --= processed, newChangesOf, processed ++ buf) - } - - /** Update the map of definitions per source file */ - private def updateDefinitions(files: Set[AbstractFile]) { - for (src <- files; localDefs = compiler.dependencyAnalysis.definitions(src)) { - definitions(src) = (localDefs map (s => { - this.classes += s.fullName -> src - SymWithHistory(s.cloneSymbol, enteringErasure(s.info.cloneInfo(s))) - })) - } - this.references = compiler.dependencyAnalysis.references - this.inherited = compiler.dependencyAnalysis.inherited - } - - /** Load saved dependency information. */ - def loadFrom(file: AbstractFile, toFile: String => AbstractFile) : Boolean = { - val success = compiler.dependencyAnalysis.loadFrom(file, toFile) - if (success) - sources ++= compiler.dependencyAnalysis.managedFiles - success - } - - /** Save dependency information to `file`. */ - def saveTo(file: AbstractFile, fromFile: AbstractFile => String) { - compiler.dependencyAnalysis.dependenciesFile = file - compiler.dependencyAnalysis.saveDependencies(fromFile) - } -} diff --git a/src/compiler/scala/tools/nsc/interactive/SimpleBuildManager.scala b/src/compiler/scala/tools/nsc/interactive/SimpleBuildManager.scala deleted file mode 100644 index ff25dac7ac..0000000000 --- a/src/compiler/scala/tools/nsc/interactive/SimpleBuildManager.scala +++ /dev/null @@ -1,100 +0,0 @@ -/* NSC -- new Scala compiler - * Copyright 2009-2013 Typesafe/Scala Solutions and LAMP/EPFL - * @author Martin Odersky - */ -package scala.tools.nsc -package interactive - -import scala.collection._ - -import scala.tools.nsc.reporters.{Reporter, ConsoleReporter} -import io.AbstractFile - -/** A simple build manager, using the default scalac dependency tracker. - * The transitive closure of all dependent files on a modified file - * is recompiled at once. - * - * It is equivalent to using a resident compiler mode with the - * '-make:transitive' option. - */ -class SimpleBuildManager(val settings: Settings) extends BuildManager { - - class BuilderGlobal(settings: Settings, reporter : Reporter) extends scala.tools.nsc.Global(settings, reporter) { - - def this(settings: Settings) = - this(settings, new ConsoleReporter(settings)) - - def newRun() = new Run() - } - - protected def newCompiler(settings: Settings) = new BuilderGlobal(settings) - - val compiler = newCompiler(settings) - - /** Managed source files. */ - private val sources: mutable.Set[AbstractFile] = new mutable.HashSet[AbstractFile] - - /** Add the given source files to the managed build process. */ - def addSourceFiles(files: Set[AbstractFile]) { - sources ++= files - update(files) - } - - /** Remove the given files from the managed build process. */ - def removeFiles(files: Set[AbstractFile]) { - sources --= files - deleteClassfiles(files) - update(invalidatedByRemove(files)) - } - - - /** Return the set of invalidated files caused by removing the given files. */ - private def invalidatedByRemove(files: Set[AbstractFile]): Set[AbstractFile] = { - val deps = compiler.dependencyAnalysis.dependencies - deps.dependentFiles(Int.MaxValue, files) - } - - def update(added: Set[AbstractFile], removed: Set[AbstractFile]) { - sources --= removed - deleteClassfiles(removed) - update(added ++ invalidatedByRemove(removed)) - } - - /** The given files have been modified by the user. Recompile - * them and all files that depend on them. Only files that - * have been previously added as source files are recompiled. - */ - def update(files: Set[AbstractFile]) { - deleteClassfiles(files) - - val deps = compiler.dependencyAnalysis.dependencies - val run = compiler.newRun() - compiler.inform("compiling " + files) - - val toCompile = - (files ++ deps.dependentFiles(Int.MaxValue, files)) intersect sources - - - compiler.inform("Recompiling " + - (if(settings.debug.value) toCompile.mkString(", ") - else toCompile.size + " files")) - - buildingFiles(toCompile) - - run.compileFiles(files.toList) - } - - /** Load saved dependency information. */ - def loadFrom(file: AbstractFile, toFile: String => AbstractFile) : Boolean = { - val success = compiler.dependencyAnalysis.loadFrom(file, toFile) - if (success) - sources ++= compiler.dependencyAnalysis.managedFiles - success - } - - /** Save dependency information to `file`. */ - def saveTo(file: AbstractFile, fromFile: AbstractFile => String) { - compiler.dependencyAnalysis.dependenciesFile = file - compiler.dependencyAnalysis.saveDependencies(fromFile) - } -} diff --git a/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala b/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala index 9c8ffc5ae3..36e6a74820 100644 --- a/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala +++ b/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala @@ -162,7 +162,6 @@ trait ScalaSettings extends AbsScalaSettings val refinementMethodDispatch = ChoiceSetting ("-Ystruct-dispatch", "policy", "structural method dispatch policy", List("no-cache", "mono-cache", "poly-cache", "invoke-dynamic"), "poly-cache") val Yrangepos = BooleanSetting ("-Yrangepos", "Use range positions for syntax trees.") - val Ybuilderdebug = ChoiceSetting ("-Ybuilder-debug", "manager", "Compile using the specified build manager.", List("none", "refined", "simple"), "none") val Yreifycopypaste = BooleanSetting ("-Yreify-copypaste", "Dump the reified trees in copypasteable representation.") val Yreplsync = BooleanSetting ("-Yrepl-sync", "Do not use asynchronous code for repl startup") val Yreploutdir = StringSetting ("-Yrepl-outdir", "path", "Write repl-generated classfiles to given output directory (use \"\" to generate a temporary dir)" , "") @@ -177,7 +176,6 @@ trait ScalaSettings extends AbsScalaSettings /** Area-specific debug output. */ - val Ybuildmanagerdebug = BooleanSetting("-Ybuild-manager-debug", "Generate debug information for the Refined Build Manager compiler.") val Ydocdebug = BooleanSetting("-Ydoc-debug", "Trace all scaladoc activity.") val Yidedebug = BooleanSetting("-Yide-debug", "Generate, validate and output trees using the interactive compiler.") val Yinferdebug = BooleanSetting("-Yinfer-debug", "Trace type inference and implicit search.") diff --git a/src/partest/README b/src/partest/README index 0434aa7499..17594dbb1e 100644 --- a/src/partest/README +++ b/src/partest/README @@ -24,7 +24,6 @@ Other arguments: * --run next files test the interpreter and all backends * --jvm next files test the JVM backend * --res next files test the resident compiler - * --buildmanager next files test the build manager * --shootout next files are shootout tests * --script next files test the script runner * ''-Dpartest.scalac_opts=...'' -> add compiler options diff --git a/src/partest/scala/tools/partest/PartestTask.scala b/src/partest/scala/tools/partest/PartestTask.scala index 41d69a5448..d320f20616 100644 --- a/src/partest/scala/tools/partest/PartestTask.scala +++ b/src/partest/scala/tools/partest/PartestTask.scala @@ -43,7 +43,6 @@ import org.apache.tools.ant.types.Commandline.Argument * - `runtests`, * - `jvmtests`, * - `residenttests`, - * - `buildmanagertests`, * - `shootouttests`, * - `scalaptests`, * - `scalachecktests`, @@ -76,10 +75,6 @@ class PartestTask extends Task with CompilationPathProperty { residentFiles = Some(input) } - def addConfiguredBuildManagerTests(input: FileSet) { - buildManagerFiles = Some(input) - } - def addConfiguredScalacheckTests(input: FileSet) { scalacheckFiles = Some(input) } @@ -187,7 +182,6 @@ class PartestTask extends Task with CompilationPathProperty { private var runFiles: Option[FileSet] = None private var jvmFiles: Option[FileSet] = None private var residentFiles: Option[FileSet] = None - private var buildManagerFiles: Option[FileSet] = None private var scalacheckFiles: Option[FileSet] = None private var scriptFiles: Option[FileSet] = None private var shootoutFiles: Option[FileSet] = None @@ -244,7 +238,6 @@ class PartestTask extends Task with CompilationPathProperty { private def getRunFiles = getFilesAndDirs(runFiles) private def getJvmFiles = getFilesAndDirs(jvmFiles) private def getResidentFiles = getFiles(residentFiles) - private def getBuildManagerFiles = getFilesAndDirs(buildManagerFiles) private def getScalacheckFiles = getFilesAndDirs(scalacheckFiles) private def getScriptFiles = getFiles(scriptFiles) private def getShootoutFiles = getFiles(shootoutFiles) @@ -363,7 +356,6 @@ class PartestTask extends Task with CompilationPathProperty { (getRunFiles, "run", "Compiling and running files"), (getJvmFiles, "jvm", "Compiling and running files"), (getResidentFiles, "res", "Running resident compiler scenarii"), - (getBuildManagerFiles, "buildmanager", "Running Build Manager scenarii"), (getScalacheckFiles, "scalacheck", "Running scalacheck tests"), (getScriptFiles, "script", "Running script files"), (getShootoutFiles, "shootout", "Running shootout tests"), diff --git a/src/partest/scala/tools/partest/nest/ConsoleRunner.scala b/src/partest/scala/tools/partest/nest/ConsoleRunner.scala index d146618d0e..6a24926b14 100644 --- a/src/partest/scala/tools/partest/nest/ConsoleRunner.scala +++ b/src/partest/scala/tools/partest/nest/ConsoleRunner.scala @@ -31,7 +31,6 @@ class ConsoleRunner extends DirectRunner { TestSet("run", stdFilter, "Testing interpreter and backend"), TestSet("jvm", stdFilter, "Testing JVM backend"), TestSet("res", x => x.isFile && (x hasExtension "res"), "Testing resident compiler"), - TestSet("buildmanager", _.isDirectory, "Testing Build Manager"), TestSet("shootout", stdFilter, "Testing shootout tests"), TestSet("script", stdFilter, "Testing script tests"), TestSet("scalacheck", stdFilter, "Testing ScalaCheck tests"), diff --git a/src/partest/scala/tools/partest/nest/NestUI.scala b/src/partest/scala/tools/partest/nest/NestUI.scala index ab90d387d0..df90b22448 100644 --- a/src/partest/scala/tools/partest/nest/NestUI.scala +++ b/src/partest/scala/tools/partest/nest/NestUI.scala @@ -73,7 +73,6 @@ object NestUI { println(" --run run interpreter and backend tests") println(" --jvm run JVM backend tests") println(" --res run resident compiler tests") - println(" --buildmanager run Build Manager tests") println(" --scalacheck run ScalaCheck tests") println(" --script run script runner tests") println(" --shootout run shootout tests") diff --git a/src/partest/scala/tools/partest/nest/RunnerManager.scala b/src/partest/scala/tools/partest/nest/RunnerManager.scala index fbef97dab4..0ed14dc858 100644 --- a/src/partest/scala/tools/partest/nest/RunnerManager.scala +++ b/src/partest/scala/tools/partest/nest/RunnerManager.scala @@ -19,7 +19,6 @@ import scala.tools.nsc.util.{ ClassPath, FakePos, ScalaClassLoader, stackTraceSt import ClassPath.{ join, split } import scala.tools.scalap.scalax.rules.scalasig.ByteCode import scala.collection.{ mutable, immutable } -import scala.tools.nsc.interactive.{ BuildManager, RefinedBuildManager } import scala.sys.process._ import java.util.concurrent.{ Executors, TimeUnit, TimeoutException } import PartestDefaults.{ javaCmd, javacCmd } @@ -530,121 +529,6 @@ class RunnerManager(kind: String, val fileManager: FileManager, params: TestRunP case "ant" => runAntTest(file) - case "buildmanager" => - val (swr, wr) = newTestWriters() - printInfoStart(file, wr) - val (outDir, testFile, changesDir) = { - if (!file.isDirectory) - (null, null, null) - else { - NestUI.verbose(this+" running test "+fileBase) - val outDir = createOutputDir() - val testFile = new File(file, fileBase + ".test") - val changesDir = new File(file, fileBase + ".changes") - - if (changesDir.isFile || !testFile.isFile) { - // if changes exists then it has to be a dir - if (!testFile.isFile) NestUI.verbose("invalid build manager test file") - if (changesDir.isFile) NestUI.verbose("invalid build manager changes directory") - (null, null, null) - } - else { - copyTestFiles(file, outDir) - NestUI.verbose("outDir: "+outDir) - NestUI.verbose("logFile: "+logFile) - (outDir, testFile, changesDir) - } - } - } - if (outDir == null) - return (false, LogContext(logFile)) - - // Pre-conditions satisfied - val sourcepath = outDir.getAbsolutePath+File.separator - - // configure input/output files - val logWriter = new PrintStream(new FileOutputStream(logFile), true) - val testReader = new BufferedReader(new FileReader(testFile)) - val logConsoleWriter = new PrintWriter(logWriter, true) - - // create proper settings for the compiler - val settings = new Settings(workerError) - settings.outdir.value = outDir.getAbsoluteFile.getAbsolutePath - settings.sourcepath.value = sourcepath - settings.classpath.value = fileManager.CLASSPATH - settings.Ybuildmanagerdebug.value = true - - // simulate Build Manager loop - val prompt = "builder > " - val reporter = new ConsoleReporter(settings, scala.Console.in, logConsoleWriter) - val bM: BuildManager = - new RefinedBuildManager(settings) { - override protected def newCompiler(settings: Settings) = - new BuilderGlobal(settings, reporter) - } - - def testCompile(line: String): Boolean = { - NestUI.verbose("compiling " + line) - val args = (line split ' ').toList - val command = new CompilerCommand(args, settings) - command.ok && { - bM.update(filesToSet(settings.sourcepath.value, command.files), Set.empty) - !reporter.hasErrors - } - } - - val updateFiles = (line: String) => { - NestUI.verbose("updating " + line) - (line split ' ').toList forall (u => - (u split "=>").toList match { - case origFileName::(newFileName::Nil) => - val newFile = new File(changesDir, newFileName) - if (newFile.isFile) { - val v = overwriteFileWith(new File(outDir, origFileName), newFile) - if (!v) - NestUI.verbose("'update' operation on " + u + " failed") - v - } else { - NestUI.verbose("File " + newFile + " is invalid") - false - } - case a => - NestUI.verbose("Other =: " + a) - false - } - ) - } - - def loop(): Boolean = { - testReader.readLine() match { - case null | "" => - NestUI.verbose("finished") - true - case s if s startsWith ">>update " => - updateFiles(s stripPrefix ">>update ") && loop() - case s if s startsWith ">>compile " => - val files = s stripPrefix ">>compile " - logWriter.println(prompt + files) - // In the end, it can finish with an error - if (testCompile(files)) loop() - else { - val t = testReader.readLine() - (t == null) || (t == "") - } - case s => - NestUI.verbose("wrong command in test file: " + s) - false - } - } - - Output.withRedirected(logWriter) { - try loop() - finally testReader.close() - } - fileManager.mapFile(logFile, replaceSlashes(new File(sourcepath), _)) - - (diffCheck(file, compareOutput(file, logFile)), LogContext(logFile, swr, wr)) - case "res" => { // simulate resident compiler loop val prompt = "\nnsc> " diff --git a/src/partest/scala/tools/partest/nest/TestFile.scala b/src/partest/scala/tools/partest/nest/TestFile.scala index 87177772ab..880c6e431b 100644 --- a/src/partest/scala/tools/partest/nest/TestFile.scala +++ b/src/partest/scala/tools/partest/nest/TestFile.scala @@ -54,7 +54,6 @@ abstract class TestFile(val kind: String) extends TestFileCommon { case class PosTestFile(file: JFile, fileManager: FileManager) extends TestFile("pos") case class NegTestFile(file: JFile, fileManager: FileManager) extends TestFile("neg") case class RunTestFile(file: JFile, fileManager: FileManager) extends TestFile("run") -case class BuildManagerTestFile(file: JFile, fileManager: FileManager) extends TestFile("bm") case class ScalaCheckTestFile(file: JFile, fileManager: FileManager) extends TestFile("scalacheck") case class JvmTestFile(file: JFile, fileManager: FileManager) extends TestFile("jvm") case class ShootoutTestFile(file: JFile, fileManager: FileManager) extends TestFile("shootout") { diff --git a/test/disabled/presentation/simple-tests.check b/test/disabled/presentation/simple-tests.check index cdb80ed987..0f72cb5ab9 100644 --- a/test/disabled/presentation/simple-tests.check +++ b/test/disabled/presentation/simple-tests.check @@ -187,8 +187,6 @@ TypeMember(value Xshowobj,Tester.this.settings.StringSetting,false,true,) TypeMember(value Xshowtrees,Tester.this.settings.BooleanSetting,false,true,) TypeMember(value Xwarnfatal,Tester.this.settings.BooleanSetting,false,true,) TypeMember(value Xwarninit,Tester.this.settings.BooleanSetting,false,true,) -TypeMember(value Ybuilderdebug,Tester.this.settings.ChoiceSetting,false,true,) -TypeMember(value Ybuildmanagerdebug,Tester.this.settings.BooleanSetting,false,true,) TypeMember(value Ycompacttrees,Tester.this.settings.BooleanSetting,false,true,) TypeMember(value Ycompletion,Tester.this.settings.BooleanSetting,false,true,) TypeMember(value YdepMethTpes,Tester.this.settings.BooleanSetting,false,true,) diff --git a/test/files/buildmanager/annotated/A.scala b/test/files/buildmanager/annotated/A.scala deleted file mode 100644 index 4130cf21ec..0000000000 --- a/test/files/buildmanager/annotated/A.scala +++ /dev/null @@ -1 +0,0 @@ -case class A[T](x: String, y: T) diff --git a/test/files/buildmanager/annotated/annotated.check b/test/files/buildmanager/annotated/annotated.check deleted file mode 100644 index ce92c9a294..0000000000 --- a/test/files/buildmanager/annotated/annotated.check +++ /dev/null @@ -1,6 +0,0 @@ -builder > A.scala -compiling Set(A.scala) -Changes: Map() -builder > A.scala -compiling Set(A.scala) -Changes: Map(class A -> List(), object A -> List()) diff --git a/test/files/buildmanager/annotated/annotated.test b/test/files/buildmanager/annotated/annotated.test deleted file mode 100644 index 392e0d365f..0000000000 --- a/test/files/buildmanager/annotated/annotated.test +++ /dev/null @@ -1,2 +0,0 @@ ->>compile A.scala ->>compile A.scala diff --git a/test/files/buildmanager/freshnames/A.scala b/test/files/buildmanager/freshnames/A.scala deleted file mode 100644 index e8ab26ca1e..0000000000 --- a/test/files/buildmanager/freshnames/A.scala +++ /dev/null @@ -1,16 +0,0 @@ -abstract class A { - - var t: List[B] - - def foo(n: String): Option[B] = { - t.reverse find (_.names contains n) - } - - def bar(n: Int): Option[B] = { - t.reverse find (_.names contains n) - } -} - -//class A -case class B(names: List[String]) - diff --git a/test/files/buildmanager/freshnames/B.scala b/test/files/buildmanager/freshnames/B.scala deleted file mode 100644 index d700225c08..0000000000 --- a/test/files/buildmanager/freshnames/B.scala +++ /dev/null @@ -1,4 +0,0 @@ -abstract class C extends A { - def test(n: Int) = bar(n) -} - diff --git a/test/files/buildmanager/freshnames/freshnames.check b/test/files/buildmanager/freshnames/freshnames.check deleted file mode 100644 index 9f05fb8a36..0000000000 --- a/test/files/buildmanager/freshnames/freshnames.check +++ /dev/null @@ -1,6 +0,0 @@ -builder > B.scala A.scala -compiling Set(A.scala, B.scala) -Changes: Map() -builder > A.scala -compiling Set(A.scala) -Changes: Map(class A -> List(), class B -> List(), object B -> List()) diff --git a/test/files/buildmanager/freshnames/freshnames.test b/test/files/buildmanager/freshnames/freshnames.test deleted file mode 100644 index 20b20298f9..0000000000 --- a/test/files/buildmanager/freshnames/freshnames.test +++ /dev/null @@ -1,2 +0,0 @@ ->>compile B.scala A.scala ->>compile A.scala diff --git a/test/files/buildmanager/infer/A.scala b/test/files/buildmanager/infer/A.scala deleted file mode 100644 index 46b5391609..0000000000 --- a/test/files/buildmanager/infer/A.scala +++ /dev/null @@ -1,16 +0,0 @@ -class Foo(flag: Boolean) { - val classpath = - if (flag) - new AClasspath - else - new BClasspath -} - -class AClasspath extends MergedClasspath[A] - -class BClasspath extends MergedClasspath[B] - -abstract class MergedClasspath[T] - -class A -class B diff --git a/test/files/buildmanager/infer/infer.check b/test/files/buildmanager/infer/infer.check deleted file mode 100644 index 1f736977ff..0000000000 --- a/test/files/buildmanager/infer/infer.check +++ /dev/null @@ -1,6 +0,0 @@ -builder > A.scala -compiling Set(A.scala) -Changes: Map() -builder > A.scala -compiling Set(A.scala) -Changes: Map(class A -> List(), class AClasspath -> List(), class B -> List(), class BClasspath -> List(), class Foo -> List(), class MergedClasspath -> List()) diff --git a/test/files/buildmanager/infer/infer.test b/test/files/buildmanager/infer/infer.test deleted file mode 100644 index 392e0d365f..0000000000 --- a/test/files/buildmanager/infer/infer.test +++ /dev/null @@ -1,2 +0,0 @@ ->>compile A.scala ->>compile A.scala diff --git a/test/files/buildmanager/namesdefaults/defparam-use.scala b/test/files/buildmanager/namesdefaults/defparam-use.scala deleted file mode 100644 index 5b5bbb3f4e..0000000000 --- a/test/files/buildmanager/namesdefaults/defparam-use.scala +++ /dev/null @@ -1,5 +0,0 @@ - -object Test extends App { - val outer = new Outer - new outer.Inner -} diff --git a/test/files/buildmanager/namesdefaults/defparam.scala b/test/files/buildmanager/namesdefaults/defparam.scala deleted file mode 100644 index d817c719ab..0000000000 --- a/test/files/buildmanager/namesdefaults/defparam.scala +++ /dev/null @@ -1,7 +0,0 @@ -class Outer { - - class Inner(val x: List[Int] = Nil) - -// lazy val Inner = "abc" -} - diff --git a/test/files/buildmanager/namesdefaults/namesdefaults.check b/test/files/buildmanager/namesdefaults/namesdefaults.check deleted file mode 100644 index 4a94d1fb55..0000000000 --- a/test/files/buildmanager/namesdefaults/namesdefaults.check +++ /dev/null @@ -1,9 +0,0 @@ -builder > defparam.scala defparam-use.scala -compiling Set(defparam-use.scala, defparam.scala) -Changes: Map() -builder > defparam-use.scala -compiling Set(defparam-use.scala) -Changes: Map(class Test$delayedInit$body -> List(), object Test -> List()) -builder > defparam-use.scala -compiling Set(defparam-use.scala) -Changes: Map(class Test$delayedInit$body -> List(), object Test -> List()) diff --git a/test/files/buildmanager/namesdefaults/namesdefaults.test b/test/files/buildmanager/namesdefaults/namesdefaults.test deleted file mode 100644 index 84ccc36bc3..0000000000 --- a/test/files/buildmanager/namesdefaults/namesdefaults.test +++ /dev/null @@ -1,3 +0,0 @@ ->>compile defparam.scala defparam-use.scala ->>compile defparam-use.scala ->>compile defparam-use.scala diff --git a/test/files/buildmanager/simpletest/A.scala b/test/files/buildmanager/simpletest/A.scala deleted file mode 100644 index ef704706bb..0000000000 --- a/test/files/buildmanager/simpletest/A.scala +++ /dev/null @@ -1,3 +0,0 @@ -class A { - def foo = 2 -} diff --git a/test/files/buildmanager/simpletest/B.scala b/test/files/buildmanager/simpletest/B.scala deleted file mode 100644 index 364dc6e4cb..0000000000 --- a/test/files/buildmanager/simpletest/B.scala +++ /dev/null @@ -1,3 +0,0 @@ -class B extends A { - override def foo = 2 -} diff --git a/test/files/buildmanager/simpletest/simpletest.changes/A1.scala b/test/files/buildmanager/simpletest/simpletest.changes/A1.scala deleted file mode 100644 index 83d15dc739..0000000000 --- a/test/files/buildmanager/simpletest/simpletest.changes/A1.scala +++ /dev/null @@ -1 +0,0 @@ -class A diff --git a/test/files/buildmanager/simpletest/simpletest.check b/test/files/buildmanager/simpletest/simpletest.check deleted file mode 100644 index 95ea2c4c0d..0000000000 --- a/test/files/buildmanager/simpletest/simpletest.check +++ /dev/null @@ -1,11 +0,0 @@ -builder > A.scala B.scala -compiling Set(A.scala, B.scala) -Changes: Map() -builder > A.scala -compiling Set(A.scala) -Changes: Map(class A -> List(Removed(Definition(A.foo)))) -invalidate B.scala because inherited method removed [Removed(Definition(A.foo))] -compiling Set(B.scala) -B.scala:2: error: method foo overrides nothing - override def foo = 2 - ^ diff --git a/test/files/buildmanager/simpletest/simpletest.test b/test/files/buildmanager/simpletest/simpletest.test deleted file mode 100644 index 2c0be1502f..0000000000 --- a/test/files/buildmanager/simpletest/simpletest.test +++ /dev/null @@ -1,3 +0,0 @@ ->>compile A.scala B.scala ->>update A.scala=>A1.scala ->>compile A.scala diff --git a/test/files/buildmanager/t2280/A.scala b/test/files/buildmanager/t2280/A.scala deleted file mode 100644 index 5febadeb06..0000000000 --- a/test/files/buildmanager/t2280/A.scala +++ /dev/null @@ -1 +0,0 @@ -class A extends B diff --git a/test/files/buildmanager/t2280/B.java b/test/files/buildmanager/t2280/B.java deleted file mode 100644 index aef8e106e9..0000000000 --- a/test/files/buildmanager/t2280/B.java +++ /dev/null @@ -1,2 +0,0 @@ -public class B {} - diff --git a/test/files/buildmanager/t2280/t2280.check b/test/files/buildmanager/t2280/t2280.check deleted file mode 100644 index 7ea7511c63..0000000000 --- a/test/files/buildmanager/t2280/t2280.check +++ /dev/null @@ -1,6 +0,0 @@ -builder > A.scala B.java -compiling Set(A.scala, B.java) -Changes: Map() -builder > B.java -compiling Set(B.java) -Changes: Map(class B -> List()) diff --git a/test/files/buildmanager/t2280/t2280.test b/test/files/buildmanager/t2280/t2280.test deleted file mode 100644 index 2eda777853..0000000000 --- a/test/files/buildmanager/t2280/t2280.test +++ /dev/null @@ -1,2 +0,0 @@ ->>compile A.scala B.java ->>compile B.java diff --git a/test/files/buildmanager/t2556_1/A.scala b/test/files/buildmanager/t2556_1/A.scala deleted file mode 100644 index c6e200b217..0000000000 --- a/test/files/buildmanager/t2556_1/A.scala +++ /dev/null @@ -1,3 +0,0 @@ -class A { - def x(i: Int) = i+"3" -} diff --git a/test/files/buildmanager/t2556_1/B.scala b/test/files/buildmanager/t2556_1/B.scala deleted file mode 100644 index 8529587b56..0000000000 --- a/test/files/buildmanager/t2556_1/B.scala +++ /dev/null @@ -1,3 +0,0 @@ -class B extends A { - def x(s: String) = s+"5" -} diff --git a/test/files/buildmanager/t2556_1/t2556_1.changes/A2.scala b/test/files/buildmanager/t2556_1/t2556_1.changes/A2.scala deleted file mode 100644 index 4ac1045e13..0000000000 --- a/test/files/buildmanager/t2556_1/t2556_1.changes/A2.scala +++ /dev/null @@ -1,4 +0,0 @@ -class A { - def x(i: String) = i+"3" -} - diff --git a/test/files/buildmanager/t2556_1/t2556_1.check b/test/files/buildmanager/t2556_1/t2556_1.check deleted file mode 100644 index 2e501c8f6f..0000000000 --- a/test/files/buildmanager/t2556_1/t2556_1.check +++ /dev/null @@ -1,12 +0,0 @@ -builder > A.scala B.scala -compiling Set(A.scala, B.scala) -Changes: Map() -builder > A.scala -compiling Set(A.scala) -Changes: Map(class A -> List(Changed(Definition(A.x))[method x changed from (i: Int)String to (i: String)String flags: ])) -invalidate B.scala because inherited method changed [Changed(Definition(A.x))[method x changed from (i: Int)String to (i: String)String flags: ]] -compiling Set(B.scala) -B.scala:2: error: overriding method x in class A of type (i: String)String; - method x needs `override' modifier - def x(s: String) = s+"5" - ^ diff --git a/test/files/buildmanager/t2556_1/t2556_1.test b/test/files/buildmanager/t2556_1/t2556_1.test deleted file mode 100644 index 6f3bd03361..0000000000 --- a/test/files/buildmanager/t2556_1/t2556_1.test +++ /dev/null @@ -1,3 +0,0 @@ ->>compile A.scala B.scala ->>update A.scala=>A2.scala ->>compile A.scala diff --git a/test/files/buildmanager/t2556_2/A.scala b/test/files/buildmanager/t2556_2/A.scala deleted file mode 100644 index b8da5c8fb1..0000000000 --- a/test/files/buildmanager/t2556_2/A.scala +++ /dev/null @@ -1,4 +0,0 @@ -class A { - def x(i: Int) = i+"3" -} - diff --git a/test/files/buildmanager/t2556_2/B.scala b/test/files/buildmanager/t2556_2/B.scala deleted file mode 100644 index 80ff25d0ca..0000000000 --- a/test/files/buildmanager/t2556_2/B.scala +++ /dev/null @@ -1,2 +0,0 @@ -class B extends A - diff --git a/test/files/buildmanager/t2556_2/C.scala b/test/files/buildmanager/t2556_2/C.scala deleted file mode 100644 index 0ab13e3757..0000000000 --- a/test/files/buildmanager/t2556_2/C.scala +++ /dev/null @@ -1,4 +0,0 @@ -class C extends B { - def x(s: String) = s+"5" -} - diff --git a/test/files/buildmanager/t2556_2/t2556_2.changes/A2.scala b/test/files/buildmanager/t2556_2/t2556_2.changes/A2.scala deleted file mode 100644 index 4ac1045e13..0000000000 --- a/test/files/buildmanager/t2556_2/t2556_2.changes/A2.scala +++ /dev/null @@ -1,4 +0,0 @@ -class A { - def x(i: String) = i+"3" -} - diff --git a/test/files/buildmanager/t2556_2/t2556_2.check b/test/files/buildmanager/t2556_2/t2556_2.check deleted file mode 100644 index cae4f72212..0000000000 --- a/test/files/buildmanager/t2556_2/t2556_2.check +++ /dev/null @@ -1,13 +0,0 @@ -builder > A.scala B.scala C.scala -compiling Set(A.scala, B.scala, C.scala) -Changes: Map() -builder > A.scala -compiling Set(A.scala) -Changes: Map(class A -> List(Changed(Definition(A.x))[method x changed from (i: Int)String to (i: String)String flags: ])) -invalidate B.scala because inherited method changed [Changed(Definition(A.x))[method x changed from (i: Int)String to (i: String)String flags: ]] -invalidate C.scala because inherited method changed [Changed(Definition(A.x))[method x changed from (i: Int)String to (i: String)String flags: ]] -compiling Set(B.scala, C.scala) -C.scala:2: error: overriding method x in class A of type (i: String)String; - method x needs `override' modifier - def x(s: String) = s+"5" - ^ diff --git a/test/files/buildmanager/t2556_2/t2556_2.test b/test/files/buildmanager/t2556_2/t2556_2.test deleted file mode 100644 index 9f31bb6409..0000000000 --- a/test/files/buildmanager/t2556_2/t2556_2.test +++ /dev/null @@ -1,3 +0,0 @@ ->>compile A.scala B.scala C.scala ->>update A.scala=>A2.scala ->>compile A.scala diff --git a/test/files/buildmanager/t2556_3/A.scala b/test/files/buildmanager/t2556_3/A.scala deleted file mode 100644 index 089a05f493..0000000000 --- a/test/files/buildmanager/t2556_3/A.scala +++ /dev/null @@ -1,5 +0,0 @@ -class A { - def x = 3 -} -class B extends A - diff --git a/test/files/buildmanager/t2556_3/B.scala b/test/files/buildmanager/t2556_3/B.scala deleted file mode 100644 index 0ec5ae4b55..0000000000 --- a/test/files/buildmanager/t2556_3/B.scala +++ /dev/null @@ -1,5 +0,0 @@ -object E { - def main(args: Array[String]) = - println( (new C).x ) -} - diff --git a/test/files/buildmanager/t2556_3/C.scala b/test/files/buildmanager/t2556_3/C.scala deleted file mode 100644 index 403df8455e..0000000000 --- a/test/files/buildmanager/t2556_3/C.scala +++ /dev/null @@ -1,2 +0,0 @@ -class C extends B - diff --git a/test/files/buildmanager/t2556_3/t2556_3.changes/A2.scala b/test/files/buildmanager/t2556_3/t2556_3.changes/A2.scala deleted file mode 100644 index 21cb2779f9..0000000000 --- a/test/files/buildmanager/t2556_3/t2556_3.changes/A2.scala +++ /dev/null @@ -1,5 +0,0 @@ -class A { - def x = 3 -} -class B - diff --git a/test/files/buildmanager/t2556_3/t2556_3.check b/test/files/buildmanager/t2556_3/t2556_3.check deleted file mode 100644 index 34f90f7f9b..0000000000 --- a/test/files/buildmanager/t2556_3/t2556_3.check +++ /dev/null @@ -1,18 +0,0 @@ -builder > A.scala B.scala C.scala -compiling Set(A.scala, B.scala, C.scala) -Changes: Map() -builder > A.scala -compiling Set(A.scala) -Changes: Map(class A -> List(), class B -> List(Changed(Class(B))[List((A,Object))])) -invalidate C.scala because parents have changed [Changed(Class(B))[List((A,Object))]] -invalidate B.scala because it references invalid (no longer inherited) definition [ParentChanged(Class(C))] -compiling Set(B.scala, C.scala) -B.scala:3: error: type mismatch; - found : C - required: ?{def x: ?} -Note that implicit conversions are not applicable because they are ambiguous: - both method any2Ensuring in object Predef of type [A](x: A)Ensuring[A] - and method any2ArrowAssoc in object Predef of type [A](x: A)ArrowAssoc[A] - are possible conversion functions from C to ?{def x: ?} - println( (new C).x ) - ^ diff --git a/test/files/buildmanager/t2556_3/t2556_3.test b/test/files/buildmanager/t2556_3/t2556_3.test deleted file mode 100644 index 9f31bb6409..0000000000 --- a/test/files/buildmanager/t2556_3/t2556_3.test +++ /dev/null @@ -1,3 +0,0 @@ ->>compile A.scala B.scala C.scala ->>update A.scala=>A2.scala ->>compile A.scala diff --git a/test/files/buildmanager/t2557/A.scala b/test/files/buildmanager/t2557/A.scala deleted file mode 100644 index 3be55f19a6..0000000000 --- a/test/files/buildmanager/t2557/A.scala +++ /dev/null @@ -1,4 +0,0 @@ -trait A { - def x = 3 -} - diff --git a/test/files/buildmanager/t2557/B.scala b/test/files/buildmanager/t2557/B.scala deleted file mode 100644 index ea86a90079..0000000000 --- a/test/files/buildmanager/t2557/B.scala +++ /dev/null @@ -1,4 +0,0 @@ -trait B extends A { - override def x = super.x * 2 -} - diff --git a/test/files/buildmanager/t2557/C.scala b/test/files/buildmanager/t2557/C.scala deleted file mode 100644 index dd575ac38d..0000000000 --- a/test/files/buildmanager/t2557/C.scala +++ /dev/null @@ -1,3 +0,0 @@ -trait C extends A { - override def x = super.x + 5 -} diff --git a/test/files/buildmanager/t2557/D.scala b/test/files/buildmanager/t2557/D.scala deleted file mode 100644 index 4e662a80ce..0000000000 --- a/test/files/buildmanager/t2557/D.scala +++ /dev/null @@ -1 +0,0 @@ -trait D extends C with B diff --git a/test/files/buildmanager/t2557/E.scala b/test/files/buildmanager/t2557/E.scala deleted file mode 100644 index 2aee552675..0000000000 --- a/test/files/buildmanager/t2557/E.scala +++ /dev/null @@ -1 +0,0 @@ -trait E extends D diff --git a/test/files/buildmanager/t2557/F.scala b/test/files/buildmanager/t2557/F.scala deleted file mode 100644 index e1996704e7..0000000000 --- a/test/files/buildmanager/t2557/F.scala +++ /dev/null @@ -1,4 +0,0 @@ -object F extends E { - def main(args: Array[String]) = - println(x) -} diff --git a/test/files/buildmanager/t2557/t2557.changes/D2.scala b/test/files/buildmanager/t2557/t2557.changes/D2.scala deleted file mode 100644 index 67295f8e6d..0000000000 --- a/test/files/buildmanager/t2557/t2557.changes/D2.scala +++ /dev/null @@ -1,2 +0,0 @@ -trait D extends B with C - diff --git a/test/files/buildmanager/t2557/t2557.check b/test/files/buildmanager/t2557/t2557.check deleted file mode 100644 index 736ef3645e..0000000000 --- a/test/files/buildmanager/t2557/t2557.check +++ /dev/null @@ -1,10 +0,0 @@ -builder > A.scala B.scala C.scala D.scala E.scala F.scala -compiling Set(A.scala, B.scala, C.scala, D.scala, E.scala, F.scala) -Changes: Map() -builder > D.scala -compiling Set(D.scala) -Changes: Map(trait D -> List(Changed(Class(D))[List((Object,Object), (C,B), (B,C))])) -invalidate E.scala because parents have changed [Changed(Class(D))[List((Object,Object), (C,B), (B,C))]] -invalidate F.scala because parents have changed [Changed(Class(D))[List((Object,Object), (C,B), (B,C))]] -compiling Set(E.scala, F.scala) -Changes: Map(object F -> List(), trait E -> List()) diff --git a/test/files/buildmanager/t2557/t2557.test b/test/files/buildmanager/t2557/t2557.test deleted file mode 100644 index 6b0103092f..0000000000 --- a/test/files/buildmanager/t2557/t2557.test +++ /dev/null @@ -1,3 +0,0 @@ ->>compile A.scala B.scala C.scala D.scala E.scala F.scala ->>update D.scala=>D2.scala ->>compile D.scala diff --git a/test/files/buildmanager/t2559/A.scala b/test/files/buildmanager/t2559/A.scala deleted file mode 100644 index fb4f6e3545..0000000000 --- a/test/files/buildmanager/t2559/A.scala +++ /dev/null @@ -1,5 +0,0 @@ -sealed trait A -class B extends A -class C extends A -//class E extends A - diff --git a/test/files/buildmanager/t2559/D.scala b/test/files/buildmanager/t2559/D.scala deleted file mode 100644 index 62dc5427f9..0000000000 --- a/test/files/buildmanager/t2559/D.scala +++ /dev/null @@ -1,4 +0,0 @@ -object D { - def x(a: A) = if (a.isInstanceOf[B] || a.isInstanceOf[C]) () -} - diff --git a/test/files/buildmanager/t2559/t2559.changes/A2.scala b/test/files/buildmanager/t2559/t2559.changes/A2.scala deleted file mode 100644 index 8e90594e2c..0000000000 --- a/test/files/buildmanager/t2559/t2559.changes/A2.scala +++ /dev/null @@ -1,5 +0,0 @@ -sealed trait A -class B extends A -class C extends A -class E extends A - diff --git a/test/files/buildmanager/t2559/t2559.check b/test/files/buildmanager/t2559/t2559.check deleted file mode 100644 index 4d43838cf5..0000000000 --- a/test/files/buildmanager/t2559/t2559.check +++ /dev/null @@ -1,9 +0,0 @@ -builder > A.scala D.scala -compiling Set(A.scala, D.scala) -Changes: Map() -builder > A.scala -compiling Set(A.scala) -Changes: Map(class B -> List(), class C -> List(), class E -> List(Changed(Class(A))[class E extends a sealed trait A]), trait A -> List()) -invalidate D.scala because it references changed class [Changed(Class(A))[class E extends a sealed trait A]] -compiling Set(D.scala) -Changes: Map(object D -> List()) diff --git a/test/files/buildmanager/t2559/t2559.test b/test/files/buildmanager/t2559/t2559.test deleted file mode 100644 index b787c5b39f..0000000000 --- a/test/files/buildmanager/t2559/t2559.test +++ /dev/null @@ -1,3 +0,0 @@ ->>compile A.scala D.scala ->>update A.scala=>A2.scala ->>compile A.scala diff --git a/test/files/buildmanager/t2562/A.scala b/test/files/buildmanager/t2562/A.scala deleted file mode 100644 index 740cd1e868..0000000000 --- a/test/files/buildmanager/t2562/A.scala +++ /dev/null @@ -1,7 +0,0 @@ -object A -{ - def x0 = B.x0 - def x1 = B.x1 - def x2 = B.x2 - def x3 = 3 -} diff --git a/test/files/buildmanager/t2562/B.scala b/test/files/buildmanager/t2562/B.scala deleted file mode 100644 index a524e5cc84..0000000000 --- a/test/files/buildmanager/t2562/B.scala +++ /dev/null @@ -1,8 +0,0 @@ -object B -{ - def x0 = A.x1 - def x1 = A.x2 - def x2 = A.x3 -} - - diff --git a/test/files/buildmanager/t2562/t2562.changes/A2.scala b/test/files/buildmanager/t2562/t2562.changes/A2.scala deleted file mode 100644 index c560e1e816..0000000000 --- a/test/files/buildmanager/t2562/t2562.changes/A2.scala +++ /dev/null @@ -1,8 +0,0 @@ -object A -{ - def x0 = B.x0 - def x1 = B.x1 - def x2 = B.x2 - def x3 = "3" -} - diff --git a/test/files/buildmanager/t2562/t2562.check b/test/files/buildmanager/t2562/t2562.check deleted file mode 100644 index 74575f28ea..0000000000 --- a/test/files/buildmanager/t2562/t2562.check +++ /dev/null @@ -1,12 +0,0 @@ -builder > A.scala B.scala -compiling Set(A.scala, B.scala) -Changes: Map() -builder > A.scala -compiling Set(A.scala) -Changes: Map(object A -> List(Changed(Definition(A.x3))[method x3 changed from ()Int to ()String flags: ])) -invalidate B.scala because it references changed definition [Changed(Definition(A.x3))[method x3 changed from ()Int to ()String flags: ]] -compiling Set(B.scala) -Changes: Map(object B -> List(Changed(Definition(B.x2))[method x2 changed from ()Int to ()String flags: ])) -invalidate A.scala because it references changed definition [Changed(Definition(B.x2))[method x2 changed from ()Int to ()String flags: ]] -compiling Set(A.scala, B.scala) -Changes: Map(object A -> List(Changed(Definition(A.x0))[method x0 changed from ()Int to ()String flags: ], Changed(Definition(A.x1))[method x1 changed from ()Int to ()String flags: ], Changed(Definition(A.x2))[method x2 changed from ()Int to ()String flags: ]), object B -> List(Changed(Definition(B.x0))[method x0 changed from ()Int to ()String flags: ], Changed(Definition(B.x1))[method x1 changed from ()Int to ()String flags: ])) diff --git a/test/files/buildmanager/t2562/t2562.test b/test/files/buildmanager/t2562/t2562.test deleted file mode 100644 index 6f3bd03361..0000000000 --- a/test/files/buildmanager/t2562/t2562.test +++ /dev/null @@ -1,3 +0,0 @@ ->>compile A.scala B.scala ->>update A.scala=>A2.scala ->>compile A.scala diff --git a/test/files/buildmanager/t2649/A.scala b/test/files/buildmanager/t2649/A.scala deleted file mode 100644 index 86cc3f2c15..0000000000 --- a/test/files/buildmanager/t2649/A.scala +++ /dev/null @@ -1,3 +0,0 @@ -object A { - def x(zz: Int, yy: Int) = yy - zz -} diff --git a/test/files/buildmanager/t2649/B.scala b/test/files/buildmanager/t2649/B.scala deleted file mode 100644 index 26c89518cb..0000000000 --- a/test/files/buildmanager/t2649/B.scala +++ /dev/null @@ -1,4 +0,0 @@ -object B { - def main(args: Array[String]): Unit = - println( A.x(zz = 3, yy = 4) ) -} diff --git a/test/files/buildmanager/t2649/t2649.changes/A2.scala b/test/files/buildmanager/t2649/t2649.changes/A2.scala deleted file mode 100644 index 9a6309fca3..0000000000 --- a/test/files/buildmanager/t2649/t2649.changes/A2.scala +++ /dev/null @@ -1,4 +0,0 @@ -object A { - def x(yy: Int, zz: Int) = yy - zz -} - diff --git a/test/files/buildmanager/t2649/t2649.check b/test/files/buildmanager/t2649/t2649.check deleted file mode 100644 index d0f41f32ec..0000000000 --- a/test/files/buildmanager/t2649/t2649.check +++ /dev/null @@ -1,9 +0,0 @@ -builder > A.scala B.scala -compiling Set(A.scala, B.scala) -Changes: Map() -builder > A.scala -compiling Set(A.scala) -Changes: Map(object A -> List(Changed(Definition(A.x))[method x changed from (zz: Int, yy: Int)Int to (yy: Int, zz: Int)Int flags: ])) -invalidate B.scala because it references changed definition [Changed(Definition(A.x))[method x changed from (zz: Int, yy: Int)Int to (yy: Int, zz: Int)Int flags: ]] -compiling Set(B.scala) -Changes: Map(object B -> List()) diff --git a/test/files/buildmanager/t2649/t2649.test b/test/files/buildmanager/t2649/t2649.test deleted file mode 100644 index 6f3bd03361..0000000000 --- a/test/files/buildmanager/t2649/t2649.test +++ /dev/null @@ -1,3 +0,0 @@ ->>compile A.scala B.scala ->>update A.scala=>A2.scala ->>compile A.scala diff --git a/test/files/buildmanager/t2650_1/A.scala b/test/files/buildmanager/t2650_1/A.scala deleted file mode 100644 index 74714a3c47..0000000000 --- a/test/files/buildmanager/t2650_1/A.scala +++ /dev/null @@ -1,4 +0,0 @@ -trait A { - type S[_] -} - diff --git a/test/files/buildmanager/t2650_1/B.scala b/test/files/buildmanager/t2650_1/B.scala deleted file mode 100644 index 80f0e30259..0000000000 --- a/test/files/buildmanager/t2650_1/B.scala +++ /dev/null @@ -1,3 +0,0 @@ -trait B extends A { - type F = S[Int] -} diff --git a/test/files/buildmanager/t2650_1/t2650_1.changes/A2.scala b/test/files/buildmanager/t2650_1/t2650_1.changes/A2.scala deleted file mode 100644 index 2b8ead4ff1..0000000000 --- a/test/files/buildmanager/t2650_1/t2650_1.changes/A2.scala +++ /dev/null @@ -1,3 +0,0 @@ -trait A { - type S -} diff --git a/test/files/buildmanager/t2650_1/t2650_1.check b/test/files/buildmanager/t2650_1/t2650_1.check deleted file mode 100644 index f1e4b1b8bc..0000000000 --- a/test/files/buildmanager/t2650_1/t2650_1.check +++ /dev/null @@ -1,12 +0,0 @@ -builder > A.scala B.scala -compiling Set(A.scala, B.scala) -warning: there were 1 feature warnings; re-run with -feature for details -Changes: Map() -builder > A.scala -compiling Set(A.scala) -Changes: Map(trait A -> List(Changed(Definition(A.S))[type S changed from A.this.S[_] to A.this.S flags: ])) -invalidate B.scala because inherited method changed [Changed(Definition(A.S))[type S changed from A.this.S[_] to A.this.S flags: ]] -compiling Set(B.scala) -B.scala:2: error: B.this.S does not take type parameters - type F = S[Int] - ^ diff --git a/test/files/buildmanager/t2650_1/t2650_1.test b/test/files/buildmanager/t2650_1/t2650_1.test deleted file mode 100644 index 6f3bd03361..0000000000 --- a/test/files/buildmanager/t2650_1/t2650_1.test +++ /dev/null @@ -1,3 +0,0 @@ ->>compile A.scala B.scala ->>update A.scala=>A2.scala ->>compile A.scala diff --git a/test/files/buildmanager/t2650_2/A.scala b/test/files/buildmanager/t2650_2/A.scala deleted file mode 100644 index bcea634485..0000000000 --- a/test/files/buildmanager/t2650_2/A.scala +++ /dev/null @@ -1,3 +0,0 @@ -trait A { - type S = Int -} diff --git a/test/files/buildmanager/t2650_2/B.scala b/test/files/buildmanager/t2650_2/B.scala deleted file mode 100644 index 22a3a9a48e..0000000000 --- a/test/files/buildmanager/t2650_2/B.scala +++ /dev/null @@ -1,4 +0,0 @@ -trait B extends A { - def x: S - def y: Int = x -} diff --git a/test/files/buildmanager/t2650_2/t2650_2.changes/A2.scala b/test/files/buildmanager/t2650_2/t2650_2.changes/A2.scala deleted file mode 100644 index 8274c1b62d..0000000000 --- a/test/files/buildmanager/t2650_2/t2650_2.changes/A2.scala +++ /dev/null @@ -1,4 +0,0 @@ -trait A { - type S = Long -} - diff --git a/test/files/buildmanager/t2650_2/t2650_2.check b/test/files/buildmanager/t2650_2/t2650_2.check deleted file mode 100644 index 53a0287dfc..0000000000 --- a/test/files/buildmanager/t2650_2/t2650_2.check +++ /dev/null @@ -1,14 +0,0 @@ -builder > A.scala B.scala -compiling Set(A.scala, B.scala) -Changes: Map() -builder > A.scala -compiling Set(A.scala) -Changes: Map(trait A -> List(Changed(Definition(A.S))[type S changed from A.this.S to A.this.S flags: ])) -invalidate B.scala because inherited method changed [Changed(Definition(A.S))[type S changed from A.this.S to A.this.S flags: ]] -compiling Set(B.scala) -B.scala:3: error: type mismatch; - found : B.this.S - (which expands to) Long - required: Int - def y: Int = x - ^ diff --git a/test/files/buildmanager/t2650_2/t2650_2.test b/test/files/buildmanager/t2650_2/t2650_2.test deleted file mode 100644 index 6f3bd03361..0000000000 --- a/test/files/buildmanager/t2650_2/t2650_2.test +++ /dev/null @@ -1,3 +0,0 @@ ->>compile A.scala B.scala ->>update A.scala=>A2.scala ->>compile A.scala diff --git a/test/files/buildmanager/t2650_3/A.scala b/test/files/buildmanager/t2650_3/A.scala deleted file mode 100644 index cd13843eb9..0000000000 --- a/test/files/buildmanager/t2650_3/A.scala +++ /dev/null @@ -1,4 +0,0 @@ -trait A { - type T = Int - def x: T -} diff --git a/test/files/buildmanager/t2650_3/B.scala b/test/files/buildmanager/t2650_3/B.scala deleted file mode 100644 index 46a8cf270a..0000000000 --- a/test/files/buildmanager/t2650_3/B.scala +++ /dev/null @@ -1,3 +0,0 @@ -object B { - def x(a: A): Int = a.x -} diff --git a/test/files/buildmanager/t2650_3/t2650_3.changes/A2.scala b/test/files/buildmanager/t2650_3/t2650_3.changes/A2.scala deleted file mode 100644 index e5667b2539..0000000000 --- a/test/files/buildmanager/t2650_3/t2650_3.changes/A2.scala +++ /dev/null @@ -1,4 +0,0 @@ -trait A { - type T = Long - def x: T -} diff --git a/test/files/buildmanager/t2650_3/t2650_3.check b/test/files/buildmanager/t2650_3/t2650_3.check deleted file mode 100644 index 5c6326d59f..0000000000 --- a/test/files/buildmanager/t2650_3/t2650_3.check +++ /dev/null @@ -1,14 +0,0 @@ -builder > A.scala B.scala -compiling Set(A.scala, B.scala) -Changes: Map() -builder > A.scala -compiling Set(A.scala) -Changes: Map(trait A -> List(Changed(Definition(A.T))[type T changed from A.this.T to A.this.T flags: ])) -invalidate B.scala because it references changed definition [Changed(Definition(A.T))[type T changed from A.this.T to A.this.T flags: ]] -compiling Set(B.scala) -B.scala:2: error: type mismatch; - found : a.T - (which expands to) Long - required: Int - def x(a: A): Int = a.x - ^ diff --git a/test/files/buildmanager/t2650_3/t2650_3.test b/test/files/buildmanager/t2650_3/t2650_3.test deleted file mode 100644 index 6f3bd03361..0000000000 --- a/test/files/buildmanager/t2650_3/t2650_3.test +++ /dev/null @@ -1,3 +0,0 @@ ->>compile A.scala B.scala ->>update A.scala=>A2.scala ->>compile A.scala diff --git a/test/files/buildmanager/t2650_4/A.scala b/test/files/buildmanager/t2650_4/A.scala deleted file mode 100644 index b9a519eb48..0000000000 --- a/test/files/buildmanager/t2650_4/A.scala +++ /dev/null @@ -1,5 +0,0 @@ -trait A { - type T = Int - type T2 = T - def x: T2 -} diff --git a/test/files/buildmanager/t2650_4/B.scala b/test/files/buildmanager/t2650_4/B.scala deleted file mode 100644 index 46a8cf270a..0000000000 --- a/test/files/buildmanager/t2650_4/B.scala +++ /dev/null @@ -1,3 +0,0 @@ -object B { - def x(a: A): Int = a.x -} diff --git a/test/files/buildmanager/t2650_4/t2650_4.changes/A2.scala b/test/files/buildmanager/t2650_4/t2650_4.changes/A2.scala deleted file mode 100644 index 0220e7b7bc..0000000000 --- a/test/files/buildmanager/t2650_4/t2650_4.changes/A2.scala +++ /dev/null @@ -1,5 +0,0 @@ -trait A { - type T = Long - type T2 = T - def x: T2 -} diff --git a/test/files/buildmanager/t2650_4/t2650_4.check b/test/files/buildmanager/t2650_4/t2650_4.check deleted file mode 100644 index a4aeaddfbb..0000000000 --- a/test/files/buildmanager/t2650_4/t2650_4.check +++ /dev/null @@ -1,14 +0,0 @@ -builder > A.scala B.scala -compiling Set(A.scala, B.scala) -Changes: Map() -builder > A.scala -compiling Set(A.scala) -Changes: Map(trait A -> List(Changed(Definition(A.T))[type T changed from A.this.T to A.this.T flags: ])) -invalidate B.scala because it references changed definition [Changed(Definition(A.T))[type T changed from A.this.T to A.this.T flags: ]] -compiling Set(B.scala) -B.scala:2: error: type mismatch; - found : a.T2 - (which expands to) Long - required: Int - def x(a: A): Int = a.x - ^ diff --git a/test/files/buildmanager/t2650_4/t2650_4.test b/test/files/buildmanager/t2650_4/t2650_4.test deleted file mode 100644 index 6f3bd03361..0000000000 --- a/test/files/buildmanager/t2650_4/t2650_4.test +++ /dev/null @@ -1,3 +0,0 @@ ->>compile A.scala B.scala ->>update A.scala=>A2.scala ->>compile A.scala diff --git a/test/files/buildmanager/t2651_2/A.scala b/test/files/buildmanager/t2651_2/A.scala deleted file mode 100644 index d712f6febe..0000000000 --- a/test/files/buildmanager/t2651_2/A.scala +++ /dev/null @@ -1 +0,0 @@ -trait A[T] diff --git a/test/files/buildmanager/t2651_2/t2651_2.changes/A2.scala b/test/files/buildmanager/t2651_2/t2651_2.changes/A2.scala deleted file mode 100644 index 7fb573e077..0000000000 --- a/test/files/buildmanager/t2651_2/t2651_2.changes/A2.scala +++ /dev/null @@ -1 +0,0 @@ -trait A[S] diff --git a/test/files/buildmanager/t2651_2/t2651_2.check b/test/files/buildmanager/t2651_2/t2651_2.check deleted file mode 100644 index dd789b7565..0000000000 --- a/test/files/buildmanager/t2651_2/t2651_2.check +++ /dev/null @@ -1,6 +0,0 @@ -builder > A.scala -compiling Set(A.scala) -Changes: Map() -builder > A.scala -compiling Set(A.scala) -Changes: Map(trait A -> List()) diff --git a/test/files/buildmanager/t2651_2/t2651_2.test b/test/files/buildmanager/t2651_2/t2651_2.test deleted file mode 100644 index d0614473ce..0000000000 --- a/test/files/buildmanager/t2651_2/t2651_2.test +++ /dev/null @@ -1,3 +0,0 @@ ->>compile A.scala ->>update A.scala=>A2.scala ->>compile A.scala diff --git a/test/files/buildmanager/t2651_3/A.scala b/test/files/buildmanager/t2651_3/A.scala deleted file mode 100644 index 14f9e4662f..0000000000 --- a/test/files/buildmanager/t2651_3/A.scala +++ /dev/null @@ -1,3 +0,0 @@ -trait A[T, S] { - def x: T -} diff --git a/test/files/buildmanager/t2651_3/t2651_3.changes/A2.scala b/test/files/buildmanager/t2651_3/t2651_3.changes/A2.scala deleted file mode 100644 index 51bf27d1fa..0000000000 --- a/test/files/buildmanager/t2651_3/t2651_3.changes/A2.scala +++ /dev/null @@ -1,3 +0,0 @@ -trait A[T, S] { - def x: S -} diff --git a/test/files/buildmanager/t2651_3/t2651_3.check b/test/files/buildmanager/t2651_3/t2651_3.check deleted file mode 100644 index 2a60e3d806..0000000000 --- a/test/files/buildmanager/t2651_3/t2651_3.check +++ /dev/null @@ -1,6 +0,0 @@ -builder > A.scala -compiling Set(A.scala) -Changes: Map() -builder > A.scala -compiling Set(A.scala) -Changes: Map(trait A -> List(Changed(Definition(A.x))[method x changed from ()T to ()S flags: ])) diff --git a/test/files/buildmanager/t2651_3/t2651_3.test b/test/files/buildmanager/t2651_3/t2651_3.test deleted file mode 100644 index d0614473ce..0000000000 --- a/test/files/buildmanager/t2651_3/t2651_3.test +++ /dev/null @@ -1,3 +0,0 @@ ->>compile A.scala ->>update A.scala=>A2.scala ->>compile A.scala diff --git a/test/files/buildmanager/t2651_4/A.scala b/test/files/buildmanager/t2651_4/A.scala deleted file mode 100644 index 63f2a1643e..0000000000 --- a/test/files/buildmanager/t2651_4/A.scala +++ /dev/null @@ -1,5 +0,0 @@ -trait A[T, S] { - def x: T - def y(a: T) - def z[B <: T] -} diff --git a/test/files/buildmanager/t2651_4/B.scala b/test/files/buildmanager/t2651_4/B.scala deleted file mode 100644 index b33dbde676..0000000000 --- a/test/files/buildmanager/t2651_4/B.scala +++ /dev/null @@ -1,3 +0,0 @@ -trait B extends A[Int, String] { - def x = 3 -} diff --git a/test/files/buildmanager/t2651_4/t2651_4.changes/A2.scala b/test/files/buildmanager/t2651_4/t2651_4.changes/A2.scala deleted file mode 100644 index f155129d13..0000000000 --- a/test/files/buildmanager/t2651_4/t2651_4.changes/A2.scala +++ /dev/null @@ -1,5 +0,0 @@ -trait A[S, T] { - def x: T - def y(a: T) - def z[B <: T] -} diff --git a/test/files/buildmanager/t2651_4/t2651_4.check b/test/files/buildmanager/t2651_4/t2651_4.check deleted file mode 100644 index 74e5d8f99b..0000000000 --- a/test/files/buildmanager/t2651_4/t2651_4.check +++ /dev/null @@ -1,13 +0,0 @@ -builder > A.scala B.scala -compiling Set(A.scala, B.scala) -Changes: Map() -builder > A.scala -compiling Set(A.scala) -Changes: Map(trait A -> List(Changed(Definition(A.x))[method x changed from ()T to ()T flags: ], Changed(Definition(A.y))[method y changed from (a: T)Unit to (a: T)Unit flags: ], Changed(Definition(A.z))[method z changed from [B <: T]()Unit to [B <: T]()Unit flags: ])) -invalidate B.scala because inherited method changed [Changed(Definition(A.x))[method x changed from ()T to ()T flags: ]] -compiling Set(B.scala) -B.scala:2: error: type mismatch; - found : Int(3) - required: String - def x = 3 - ^ diff --git a/test/files/buildmanager/t2651_4/t2651_4.test b/test/files/buildmanager/t2651_4/t2651_4.test deleted file mode 100644 index 6f3bd03361..0000000000 --- a/test/files/buildmanager/t2651_4/t2651_4.test +++ /dev/null @@ -1,3 +0,0 @@ ->>compile A.scala B.scala ->>update A.scala=>A2.scala ->>compile A.scala diff --git a/test/files/buildmanager/t2653/A.scala b/test/files/buildmanager/t2653/A.scala deleted file mode 100644 index fb17a158c7..0000000000 --- a/test/files/buildmanager/t2653/A.scala +++ /dev/null @@ -1,2 +0,0 @@ -class A[+T] - diff --git a/test/files/buildmanager/t2653/B.scala b/test/files/buildmanager/t2653/B.scala deleted file mode 100644 index 8f55a88e05..0000000000 --- a/test/files/buildmanager/t2653/B.scala +++ /dev/null @@ -1,3 +0,0 @@ -object B { - val a: A[Any] = new A[Int] -} diff --git a/test/files/buildmanager/t2653/t2653.changes/A2.scala b/test/files/buildmanager/t2653/t2653.changes/A2.scala deleted file mode 100644 index 51d13cce6e..0000000000 --- a/test/files/buildmanager/t2653/t2653.changes/A2.scala +++ /dev/null @@ -1,2 +0,0 @@ -class A[T] - diff --git a/test/files/buildmanager/t2653/t2653.check b/test/files/buildmanager/t2653/t2653.check deleted file mode 100644 index 36781522af..0000000000 --- a/test/files/buildmanager/t2653/t2653.check +++ /dev/null @@ -1,15 +0,0 @@ -builder > A.scala B.scala -compiling Set(A.scala, B.scala) -Changes: Map() -builder > A.scala -compiling Set(A.scala) -Changes: Map(class A -> List(Changed(Class(A))[ tparams: List((type T,type T))], Changed(Definition(A.))[constructor A changed from ()A[T] to ()A[T] flags: ])) -invalidate B.scala because it references changed class [Changed(Class(A))[ tparams: List((type T,type T))]] -compiling Set(B.scala) -B.scala:2: error: type mismatch; - found : A[Int] - required: A[Any] -Note: Int <: Any, but class A is invariant in type T. -You may wish to define T as +T instead. (SLS 4.5) - val a: A[Any] = new A[Int] - ^ diff --git a/test/files/buildmanager/t2653/t2653.test b/test/files/buildmanager/t2653/t2653.test deleted file mode 100644 index 6f3bd03361..0000000000 --- a/test/files/buildmanager/t2653/t2653.test +++ /dev/null @@ -1,3 +0,0 @@ ->>compile A.scala B.scala ->>update A.scala=>A2.scala ->>compile A.scala diff --git a/test/files/buildmanager/t2654/A.scala b/test/files/buildmanager/t2654/A.scala deleted file mode 100644 index 75f396d039..0000000000 --- a/test/files/buildmanager/t2654/A.scala +++ /dev/null @@ -1,2 +0,0 @@ -class A - diff --git a/test/files/buildmanager/t2654/B.scala b/test/files/buildmanager/t2654/B.scala deleted file mode 100644 index a18aec3dbe..0000000000 --- a/test/files/buildmanager/t2654/B.scala +++ /dev/null @@ -1 +0,0 @@ -class B extends A diff --git a/test/files/buildmanager/t2654/t2654.changes/A2.scala b/test/files/buildmanager/t2654/t2654.changes/A2.scala deleted file mode 100644 index c302edbd85..0000000000 --- a/test/files/buildmanager/t2654/t2654.changes/A2.scala +++ /dev/null @@ -1,4 +0,0 @@ -class A { - private def x = 5 -} - diff --git a/test/files/buildmanager/t2654/t2654.check b/test/files/buildmanager/t2654/t2654.check deleted file mode 100644 index 68f6e8efc0..0000000000 --- a/test/files/buildmanager/t2654/t2654.check +++ /dev/null @@ -1,6 +0,0 @@ -builder > A.scala B.scala -compiling Set(A.scala, B.scala) -Changes: Map() -builder > A.scala -compiling Set(A.scala) -Changes: Map(class A -> List()) diff --git a/test/files/buildmanager/t2654/t2654.test b/test/files/buildmanager/t2654/t2654.test deleted file mode 100644 index 6f3bd03361..0000000000 --- a/test/files/buildmanager/t2654/t2654.test +++ /dev/null @@ -1,3 +0,0 @@ ->>compile A.scala B.scala ->>update A.scala=>A2.scala ->>compile A.scala diff --git a/test/files/buildmanager/t2655/A.scala b/test/files/buildmanager/t2655/A.scala deleted file mode 100644 index b2c54ac47d..0000000000 --- a/test/files/buildmanager/t2655/A.scala +++ /dev/null @@ -1,4 +0,0 @@ -object A { - def x(i: => String) = () -} - diff --git a/test/files/buildmanager/t2655/B.scala b/test/files/buildmanager/t2655/B.scala deleted file mode 100644 index 6c1918c0fb..0000000000 --- a/test/files/buildmanager/t2655/B.scala +++ /dev/null @@ -1,3 +0,0 @@ -object B { - val x = A.x("3") -} diff --git a/test/files/buildmanager/t2655/t2655.changes/A2.scala b/test/files/buildmanager/t2655/t2655.changes/A2.scala deleted file mode 100644 index 0d6a7c69bb..0000000000 --- a/test/files/buildmanager/t2655/t2655.changes/A2.scala +++ /dev/null @@ -1,4 +0,0 @@ -object A { - def x(i: Function0[String]) = () -} - diff --git a/test/files/buildmanager/t2655/t2655.check b/test/files/buildmanager/t2655/t2655.check deleted file mode 100644 index 41ce65a2f5..0000000000 --- a/test/files/buildmanager/t2655/t2655.check +++ /dev/null @@ -1,13 +0,0 @@ -builder > A.scala B.scala -compiling Set(A.scala, B.scala) -Changes: Map() -builder > A.scala -compiling Set(A.scala) -Changes: Map(object A -> List(Changed(Definition(A.x))[method x changed from (i: Function0)Unit to (i: Function0)Unit flags: ])) -invalidate B.scala because it references changed definition [Changed(Definition(A.x))[method x changed from (i: Function0)Unit to (i: Function0)Unit flags: ]] -compiling Set(B.scala) -B.scala:2: error: type mismatch; - found : String("3") - required: () => String - val x = A.x("3") - ^ diff --git a/test/files/buildmanager/t2655/t2655.test b/test/files/buildmanager/t2655/t2655.test deleted file mode 100644 index 6f3bd03361..0000000000 --- a/test/files/buildmanager/t2655/t2655.test +++ /dev/null @@ -1,3 +0,0 @@ ->>compile A.scala B.scala ->>update A.scala=>A2.scala ->>compile A.scala diff --git a/test/files/buildmanager/t2657/A.scala b/test/files/buildmanager/t2657/A.scala deleted file mode 100644 index 2a6c62d29c..0000000000 --- a/test/files/buildmanager/t2657/A.scala +++ /dev/null @@ -1,3 +0,0 @@ -class A { - implicit def y(i: Int): String = i.toString -} diff --git a/test/files/buildmanager/t2657/B.scala b/test/files/buildmanager/t2657/B.scala deleted file mode 100644 index 77869890db..0000000000 --- a/test/files/buildmanager/t2657/B.scala +++ /dev/null @@ -1,4 +0,0 @@ -object B extends A { - val x: String = 3 -} - diff --git a/test/files/buildmanager/t2657/t2657.changes/A2.scala b/test/files/buildmanager/t2657/t2657.changes/A2.scala deleted file mode 100644 index 7dc99d425e..0000000000 --- a/test/files/buildmanager/t2657/t2657.changes/A2.scala +++ /dev/null @@ -1,3 +0,0 @@ -class A { - def y(i: Int): String = i.toString -} diff --git a/test/files/buildmanager/t2657/t2657.check b/test/files/buildmanager/t2657/t2657.check deleted file mode 100644 index 0d6709e58b..0000000000 --- a/test/files/buildmanager/t2657/t2657.check +++ /dev/null @@ -1,14 +0,0 @@ -builder > A.scala B.scala -compiling Set(A.scala, B.scala) -warning: there were 1 feature warnings; re-run with -feature for details -Changes: Map() -builder > A.scala -compiling Set(A.scala) -Changes: Map(class A -> List(Changed(Definition(A.y))[method y changed from (i: Int)String to (i: Int)String flags: implicit ])) -invalidate B.scala because inherited method changed [Changed(Definition(A.y))[method y changed from (i: Int)String to (i: Int)String flags: implicit ]] -compiling Set(B.scala) -B.scala:2: error: type mismatch; - found : Int(3) - required: String - val x: String = 3 - ^ diff --git a/test/files/buildmanager/t2657/t2657.test b/test/files/buildmanager/t2657/t2657.test deleted file mode 100644 index 6f3bd03361..0000000000 --- a/test/files/buildmanager/t2657/t2657.test +++ /dev/null @@ -1,3 +0,0 @@ ->>compile A.scala B.scala ->>update A.scala=>A2.scala ->>compile A.scala diff --git a/test/files/buildmanager/t2789/A.scala b/test/files/buildmanager/t2789/A.scala deleted file mode 100644 index 08d5bc840c..0000000000 --- a/test/files/buildmanager/t2789/A.scala +++ /dev/null @@ -1,5 +0,0 @@ -class A { - implicit def e: E = new E - def x(i: Int)(implicit y: E): String = "" -} -class E diff --git a/test/files/buildmanager/t2789/B.scala b/test/files/buildmanager/t2789/B.scala deleted file mode 100644 index dcefbeec1b..0000000000 --- a/test/files/buildmanager/t2789/B.scala +++ /dev/null @@ -1,3 +0,0 @@ -object B extends A { - val y = x(3) -} diff --git a/test/files/buildmanager/t2789/t2789.changes/A2.scala b/test/files/buildmanager/t2789/t2789.changes/A2.scala deleted file mode 100644 index 4ba3814e71..0000000000 --- a/test/files/buildmanager/t2789/t2789.changes/A2.scala +++ /dev/null @@ -1,5 +0,0 @@ -class A { - def e: E = new E - def x(i: Int)(implicit y: E): String = "" -} -class E diff --git a/test/files/buildmanager/t2789/t2789.check b/test/files/buildmanager/t2789/t2789.check deleted file mode 100644 index 066561ac44..0000000000 --- a/test/files/buildmanager/t2789/t2789.check +++ /dev/null @@ -1,11 +0,0 @@ -builder > A.scala B.scala -compiling Set(A.scala, B.scala) -Changes: Map() -builder > A.scala -compiling Set(A.scala) -Changes: Map(class A -> List(Changed(Definition(A.e))[method e changed from ()E to ()E flags: implicit ]), class E -> List()) -invalidate B.scala because inherited method changed [Changed(Definition(A.e))[method e changed from ()E to ()E flags: implicit ]] -compiling Set(B.scala) -B.scala:2: error: could not find implicit value for parameter y: E - val y = x(3) - ^ diff --git a/test/files/buildmanager/t2789/t2789.test b/test/files/buildmanager/t2789/t2789.test deleted file mode 100644 index 6f3bd03361..0000000000 --- a/test/files/buildmanager/t2789/t2789.test +++ /dev/null @@ -1,3 +0,0 @@ ->>compile A.scala B.scala ->>update A.scala=>A2.scala ->>compile A.scala diff --git a/test/files/buildmanager/t2790/A.scala b/test/files/buildmanager/t2790/A.scala deleted file mode 100644 index 6e9c1a90db..0000000000 --- a/test/files/buildmanager/t2790/A.scala +++ /dev/null @@ -1,5 +0,0 @@ -object A { - def x(f: String, g: Int): Int = g - def x(f: Int, g: Int = 3): Int = g -} - diff --git a/test/files/buildmanager/t2790/B.scala b/test/files/buildmanager/t2790/B.scala deleted file mode 100644 index 441055ca12..0000000000 --- a/test/files/buildmanager/t2790/B.scala +++ /dev/null @@ -1,4 +0,0 @@ -object B { - val y = A.x(5) -} - diff --git a/test/files/buildmanager/t2790/t2790.changes/A2.scala b/test/files/buildmanager/t2790/t2790.changes/A2.scala deleted file mode 100644 index 704ef4e96e..0000000000 --- a/test/files/buildmanager/t2790/t2790.changes/A2.scala +++ /dev/null @@ -1,4 +0,0 @@ -object A { - def x(f: String, g: Int = 3): Int = g - def x(f: Int, g: Int): Int = g -} diff --git a/test/files/buildmanager/t2790/t2790.check b/test/files/buildmanager/t2790/t2790.check deleted file mode 100644 index 13d61dac42..0000000000 --- a/test/files/buildmanager/t2790/t2790.check +++ /dev/null @@ -1,13 +0,0 @@ -builder > A.scala B.scala -compiling Set(A.scala, B.scala) -Changes: Map() -builder > A.scala -compiling Set(A.scala) -Changes: Map(object A -> List(Added(Definition(A.x)), Changed(Definition(A.x))[value x changed from (f: String, g: Int)Int to (f: String, g: Int)Int (f: Int, g: Int)Int flags: ])) -invalidate B.scala because it references changed definition [Changed(Definition(A.x))[value x changed from (f: String, g: Int)Int to (f: String, g: Int)Int (f: Int, g: Int)Int flags: ]] -compiling Set(B.scala) -B.scala:2: error: type mismatch; - found : Int(5) - required: String - val y = A.x(5) - ^ diff --git a/test/files/buildmanager/t2790/t2790.test b/test/files/buildmanager/t2790/t2790.test deleted file mode 100644 index 6f3bd03361..0000000000 --- a/test/files/buildmanager/t2790/t2790.test +++ /dev/null @@ -1,3 +0,0 @@ ->>compile A.scala B.scala ->>update A.scala=>A2.scala ->>compile A.scala diff --git a/test/files/buildmanager/t2792/A1.scala b/test/files/buildmanager/t2792/A1.scala deleted file mode 100644 index 96dc0ef933..0000000000 --- a/test/files/buildmanager/t2792/A1.scala +++ /dev/null @@ -1,3 +0,0 @@ -object A { - val x = new C -} diff --git a/test/files/buildmanager/t2792/A2.scala b/test/files/buildmanager/t2792/A2.scala deleted file mode 100644 index e55e681c76..0000000000 --- a/test/files/buildmanager/t2792/A2.scala +++ /dev/null @@ -1,4 +0,0 @@ -object B { - import A.x.y - val z = y -} diff --git a/test/files/buildmanager/t2792/A3.scala b/test/files/buildmanager/t2792/A3.scala deleted file mode 100644 index cd083cdb34..0000000000 --- a/test/files/buildmanager/t2792/A3.scala +++ /dev/null @@ -1,3 +0,0 @@ -class C { - val y = 4 -} diff --git a/test/files/buildmanager/t2792/t2792.changes/A1_1.scala b/test/files/buildmanager/t2792/t2792.changes/A1_1.scala deleted file mode 100644 index 00ee05f273..0000000000 --- a/test/files/buildmanager/t2792/t2792.changes/A1_1.scala +++ /dev/null @@ -1,3 +0,0 @@ -object A { - var x = new C -} diff --git a/test/files/buildmanager/t2792/t2792.check b/test/files/buildmanager/t2792/t2792.check deleted file mode 100644 index 00a2b83469..0000000000 --- a/test/files/buildmanager/t2792/t2792.check +++ /dev/null @@ -1,14 +0,0 @@ -builder > A1.scala A2.scala A3.scala -compiling Set(A1.scala, A2.scala, A3.scala) -Changes: Map() -builder > A1.scala -compiling Set(A1.scala) -Changes: Map(object A -> List(Added(Definition(A.x_$eq)), Changed(Definition(A.x))[value x changed to variable x])) -invalidate A2.scala because it references changed definition [Changed(Definition(A.x))[value x changed to variable x]] -compiling Set(A2.scala) -A2.scala:2: error: stable identifier required, but A.x found. - import A.x.y - ^ -A2.scala:3: error: not found: value y - val z = y - ^ diff --git a/test/files/buildmanager/t2792/t2792.test b/test/files/buildmanager/t2792/t2792.test deleted file mode 100644 index f199950bba..0000000000 --- a/test/files/buildmanager/t2792/t2792.test +++ /dev/null @@ -1,3 +0,0 @@ ->>compile A1.scala A2.scala A3.scala ->>update A1.scala=>A1_1.scala ->>compile A1.scala diff --git a/test/files/buildmanager/t3045/A.java b/test/files/buildmanager/t3045/A.java deleted file mode 100644 index d1acb00cd6..0000000000 --- a/test/files/buildmanager/t3045/A.java +++ /dev/null @@ -1,7 +0,0 @@ -public interface A { - public class C implements A {} -} - -class B { - static class C {} -} diff --git a/test/files/buildmanager/t3045/t3045.check b/test/files/buildmanager/t3045/t3045.check deleted file mode 100644 index 5e4e71e045..0000000000 --- a/test/files/buildmanager/t3045/t3045.check +++ /dev/null @@ -1,3 +0,0 @@ -builder > A.java -compiling Set(A.java) -Changes: Map() diff --git a/test/files/buildmanager/t3045/t3045.test b/test/files/buildmanager/t3045/t3045.test deleted file mode 100644 index 6cf7e35543..0000000000 --- a/test/files/buildmanager/t3045/t3045.test +++ /dev/null @@ -1 +0,0 @@ ->>compile A.java diff --git a/test/files/buildmanager/t3054/bar/Bar.java b/test/files/buildmanager/t3054/bar/Bar.java deleted file mode 100644 index e1b056d4e5..0000000000 --- a/test/files/buildmanager/t3054/bar/Bar.java +++ /dev/null @@ -1,7 +0,0 @@ -package bar; -import foo.Foo$; - - -public class Bar { - void bar() { Foo$.MODULE$.foo(); } -} diff --git a/test/files/buildmanager/t3054/foo/Foo.scala b/test/files/buildmanager/t3054/foo/Foo.scala deleted file mode 100644 index c0fcd97390..0000000000 --- a/test/files/buildmanager/t3054/foo/Foo.scala +++ /dev/null @@ -1,5 +0,0 @@ -package foo - -class Foo { - def foo() = println("foo") -} diff --git a/test/files/buildmanager/t3054/t3054.check b/test/files/buildmanager/t3054/t3054.check deleted file mode 100644 index 97cca8862e..0000000000 --- a/test/files/buildmanager/t3054/t3054.check +++ /dev/null @@ -1,3 +0,0 @@ -builder > bar/Bar.java foo/Foo.scala -compiling Set(bar/Bar.java, foo/Foo.scala) -Changes: Map() diff --git a/test/files/buildmanager/t3054/t3054.test b/test/files/buildmanager/t3054/t3054.test deleted file mode 100644 index 903df24b13..0000000000 --- a/test/files/buildmanager/t3054/t3054.test +++ /dev/null @@ -1 +0,0 @@ ->>compile bar/Bar.java foo/Foo.scala diff --git a/test/files/buildmanager/t3059/A.scala b/test/files/buildmanager/t3059/A.scala deleted file mode 100644 index 0dd25f6647..0000000000 --- a/test/files/buildmanager/t3059/A.scala +++ /dev/null @@ -1,4 +0,0 @@ -class A extends B { - private def getBar = List(1,2,3) - lazy val bar: List[Int] = getBar -} diff --git a/test/files/buildmanager/t3059/B.scala b/test/files/buildmanager/t3059/B.scala deleted file mode 100644 index 46596870ac..0000000000 --- a/test/files/buildmanager/t3059/B.scala +++ /dev/null @@ -1,4 +0,0 @@ -abstract class B { - private def getFoo = 12 - lazy val foo: Int = getFoo -} diff --git a/test/files/buildmanager/t3059/t3059.check b/test/files/buildmanager/t3059/t3059.check deleted file mode 100644 index 4a8076aae1..0000000000 --- a/test/files/buildmanager/t3059/t3059.check +++ /dev/null @@ -1,6 +0,0 @@ -builder > A.scala B.scala -compiling Set(A.scala, B.scala) -Changes: Map() -builder > A.scala -compiling Set(A.scala) -Changes: Map(class A -> List()) \ No newline at end of file diff --git a/test/files/buildmanager/t3059/t3059.test b/test/files/buildmanager/t3059/t3059.test deleted file mode 100644 index 6f3749dc4b..0000000000 --- a/test/files/buildmanager/t3059/t3059.test +++ /dev/null @@ -1,2 +0,0 @@ ->>compile A.scala B.scala ->>compile A.scala \ No newline at end of file diff --git a/test/files/buildmanager/t3133/A.java b/test/files/buildmanager/t3133/A.java deleted file mode 100644 index c4e7f3af0e..0000000000 --- a/test/files/buildmanager/t3133/A.java +++ /dev/null @@ -1,7 +0,0 @@ -public class A { - class Foo {} - - public A(Foo a) {} - - private void bar(Foo z) {} -} diff --git a/test/files/buildmanager/t3133/t3133.check b/test/files/buildmanager/t3133/t3133.check deleted file mode 100644 index 5e4e71e045..0000000000 --- a/test/files/buildmanager/t3133/t3133.check +++ /dev/null @@ -1,3 +0,0 @@ -builder > A.java -compiling Set(A.java) -Changes: Map() diff --git a/test/files/buildmanager/t3133/t3133.test b/test/files/buildmanager/t3133/t3133.test deleted file mode 100644 index 6cf7e35543..0000000000 --- a/test/files/buildmanager/t3133/t3133.test +++ /dev/null @@ -1 +0,0 @@ ->>compile A.java diff --git a/test/files/buildmanager/t3140/A.scala b/test/files/buildmanager/t3140/A.scala deleted file mode 100644 index f7768044d1..0000000000 --- a/test/files/buildmanager/t3140/A.scala +++ /dev/null @@ -1,8 +0,0 @@ -class As { - trait A { - def foo(parents: String): A = { - (() => parents) - null - } - } -} diff --git a/test/files/buildmanager/t3140/t3140.check b/test/files/buildmanager/t3140/t3140.check deleted file mode 100644 index 008d5a9618..0000000000 --- a/test/files/buildmanager/t3140/t3140.check +++ /dev/null @@ -1,6 +0,0 @@ -builder > A.scala -compiling Set(A.scala) -Changes: Map() -builder > A.scala -compiling Set(A.scala) -Changes: Map(class As -> List(), object As$A$class -> List(), trait As$A -> List()) diff --git a/test/files/buildmanager/t3140/t3140.test b/test/files/buildmanager/t3140/t3140.test deleted file mode 100644 index 392e0d365f..0000000000 --- a/test/files/buildmanager/t3140/t3140.test +++ /dev/null @@ -1,2 +0,0 @@ ->>compile A.scala ->>compile A.scala diff --git a/test/files/buildmanager/t4215/A.scala b/test/files/buildmanager/t4215/A.scala deleted file mode 100644 index 9db40b0fee..0000000000 --- a/test/files/buildmanager/t4215/A.scala +++ /dev/null @@ -1,5 +0,0 @@ -class A { - def B() { - object C - } -} diff --git a/test/files/buildmanager/t4215/t4215.check b/test/files/buildmanager/t4215/t4215.check deleted file mode 100644 index d9ec9a743a..0000000000 --- a/test/files/buildmanager/t4215/t4215.check +++ /dev/null @@ -1,6 +0,0 @@ -builder > A.scala -compiling Set(A.scala) -Changes: Map() -builder > A.scala -compiling Set(A.scala) -Changes: Map(class A -> List(), object A$C$2 -> List()) diff --git a/test/files/buildmanager/t4215/t4215.test b/test/files/buildmanager/t4215/t4215.test deleted file mode 100644 index 392e0d365f..0000000000 --- a/test/files/buildmanager/t4215/t4215.test +++ /dev/null @@ -1,2 +0,0 @@ ->>compile A.scala ->>compile A.scala -- cgit v1.2.3 From 58bfa19332c4aac8b7250d5866cfb153ae78c9ad Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Sat, 12 Jan 2013 17:29:54 +0100 Subject: SI-6966 Fix regression in implicit resolution Reverts this line: 9c09c17#L50L671. That value was apparantly discarded intentionally. --- .../scala/tools/nsc/typechecker/Implicits.scala | 8 ++++++-- test/files/pos/t6966.scala | 17 +++++++++++++++++ 2 files changed, 23 insertions(+), 2 deletions(-) create mode 100644 test/files/pos/t6966.scala (limited to 'src') diff --git a/src/compiler/scala/tools/nsc/typechecker/Implicits.scala b/src/compiler/scala/tools/nsc/typechecker/Implicits.scala index ed1e6d01e8..e435717839 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Implicits.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Implicits.scala @@ -668,7 +668,11 @@ trait Implicits { // duplicating the code here, but this is probably a // hotspot (and you can't just call typed, need to force // re-typecheck) - val checked = itree2 match { + // + // This is just called for the side effect of error detection, + // see SI-6966 to see what goes wrong if we use the result of this + // as the SearchResult. + itree2 match { case TypeApply(fun, args) => typedTypeApply(itree2, EXPRmode, fun, args) case Apply(TypeApply(fun, args), _) => typedTypeApply(itree2, EXPRmode, fun, args) // t2421c case t => t @@ -677,7 +681,7 @@ trait Implicits { if (context.hasErrors) fail("typing TypeApply reported errors for the implicit tree: " + context.errBuffer.head.errMsg) else { - val result = new SearchResult(checked, subst) + val result = new SearchResult(itree2, subst) if (Statistics.canEnable) Statistics.incCounter(foundImplicits) printInference("[success] found %s for pt %s".format(result, ptInstantiated)) result diff --git a/test/files/pos/t6966.scala b/test/files/pos/t6966.scala new file mode 100644 index 0000000000..23adc6d0d2 --- /dev/null +++ b/test/files/pos/t6966.scala @@ -0,0 +1,17 @@ +import Ordering.{Byte, comparatorToOrdering} +trait Format[T] +trait InputCache[T] +object CacheIvy { + implicit def basicInputCache[I](implicit fmt: Format[I], eqv: Equiv[I]): InputCache[I] = null + implicit def arrEquiv[T](implicit t: Equiv[T]): Equiv[Array[T]] = null + implicit def hNilCache: InputCache[HNil] = null + implicit def ByteArrayFormat: Format[Array[Byte]] = null + type :+:[H, T <: HList] = HCons[H,T] + implicit def hConsCache[H, T <: HList](implicit head: InputCache[H], tail: InputCache[T]): InputCache[H :+: T] = null + hConsCache[Array[Byte], HNil] +} + +sealed trait HList +sealed trait HNil extends HList +object HNil extends HNil +final class HCons[H, T <: HList](head : H, tail : T) extends HList \ No newline at end of file -- cgit v1.2.3 From 78bc17b0a424647a2275303fe924234cf92e3271 Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Mon, 14 Jan 2013 17:13:53 +0100 Subject: Remove EqualsPatternClass. Detritus from the classic pattern matcher, which was recently removed 6084d2d9. --- src/reflect/scala/reflect/internal/Definitions.scala | 4 +--- src/reflect/scala/reflect/internal/StdNames.scala | 1 - 2 files changed, 1 insertion(+), 4 deletions(-) (limited to 'src') diff --git a/src/reflect/scala/reflect/internal/Definitions.scala b/src/reflect/scala/reflect/internal/Definitions.scala index d165f66004..eb71574022 100644 --- a/src/reflect/scala/reflect/internal/Definitions.scala +++ b/src/reflect/scala/reflect/internal/Definitions.scala @@ -401,7 +401,6 @@ trait Definitions extends api.StandardDefinitions { lazy val RemoteExceptionClass = requiredClass[java.rmi.RemoteException] lazy val ByNameParamClass = specialPolyClass(tpnme.BYNAME_PARAM_CLASS_NAME, COVARIANT)(_ => AnyClass.tpe) - lazy val EqualsPatternClass = specialPolyClass(tpnme.EQUALS_PATTERN_NAME, 0L)(_ => AnyClass.tpe) lazy val JavaRepeatedParamClass = specialPolyClass(tpnme.JAVA_REPEATED_PARAM_CLASS_NAME, COVARIANT)(tparam => arrayType(tparam.tpe)) lazy val RepeatedParamClass = specialPolyClass(tpnme.REPEATED_PARAM_CLASS_NAME, COVARIANT)(tparam => seqType(tparam.tpe)) @@ -1060,8 +1059,7 @@ trait Definitions extends api.StandardDefinitions { AnyValClass, NullClass, NothingClass, - SingletonClass, - EqualsPatternClass + SingletonClass ) /** Lists core methods that don't have underlying bytecode, but are synthesized on-the-fly in every reflection universe */ lazy val syntheticCoreMethods = List( diff --git a/src/reflect/scala/reflect/internal/StdNames.scala b/src/reflect/scala/reflect/internal/StdNames.scala index 10dd2c82aa..3d1701386e 100644 --- a/src/reflect/scala/reflect/internal/StdNames.scala +++ b/src/reflect/scala/reflect/internal/StdNames.scala @@ -200,7 +200,6 @@ trait StdNames { protected implicit def createNameType(name: String): TypeName = newTypeNameCached(name) final val BYNAME_PARAM_CLASS_NAME: NameType = "" - final val EQUALS_PATTERN_NAME: NameType = "" final val JAVA_REPEATED_PARAM_CLASS_NAME: NameType = "" final val LOCAL_CHILD: NameType = "" final val REFINE_CLASS_NAME: NameType = "" -- cgit v1.2.3 From ebdc0ff73b533d2b625a919d0bb57dbe94430c02 Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Mon, 14 Jan 2013 14:29:41 -0800 Subject: Cleaned up meta-annotations. There were some missing and some others not added to the set of meta-annotations. I added the missing ones and defined the set in such a way as to include them all, even those which have not yet been born. --- .../scala/reflect/internal/Definitions.scala | 21 ++++++++++++++++----- 1 file changed, 16 insertions(+), 5 deletions(-) (limited to 'src') diff --git a/src/reflect/scala/reflect/internal/Definitions.scala b/src/reflect/scala/reflect/internal/Definitions.scala index eb71574022..edd295aa65 100644 --- a/src/reflect/scala/reflect/internal/Definitions.scala +++ b/src/reflect/scala/reflect/internal/Definitions.scala @@ -901,6 +901,7 @@ trait Definitions extends api.StandardDefinitions { lazy val GetterTargetClass = requiredClass[meta.getter] lazy val ParamTargetClass = requiredClass[meta.param] lazy val SetterTargetClass = requiredClass[meta.setter] + lazy val ObjectTargetClass = requiredClass[meta.companionObject] lazy val ClassTargetClass = requiredClass[meta.companionClass] lazy val MethodTargetClass = requiredClass[meta.companionMethod] // TODO: module, moduleClass? package, packageObject? lazy val LanguageFeatureAnnot = requiredClass[meta.languageFeature] @@ -920,11 +921,21 @@ trait Definitions extends api.StandardDefinitions { // Trying to allow for deprecated locations sym.isAliasType && isMetaAnnotation(sym.info.typeSymbol) ) - lazy val metaAnnotations = Set[Symbol]( - FieldTargetClass, ParamTargetClass, - GetterTargetClass, SetterTargetClass, - BeanGetterTargetClass, BeanSetterTargetClass - ) + lazy val metaAnnotations: Set[Symbol] = getPackage("scala.annotation.meta").info.members filter (_ isSubClass StaticAnnotationClass) toSet + + // According to the scala.annotation.meta package object: + // * By default, annotations on (`val`-, `var`- or plain) constructor parameters + // * end up on the parameter, not on any other entity. Annotations on fields + // * by default only end up on the field. + def defaultAnnotationTarget(t: Tree): Symbol = t match { + case ClassDef(_, _, _, _) => ClassTargetClass + case ModuleDef(_, _, _) => ObjectTargetClass + case vd @ ValDef(_, _, _, _) if vd.symbol.isParamAccessor => ParamTargetClass + case vd @ ValDef(_, _, _, _) if vd.symbol.isValueParameter => ParamTargetClass + case ValDef(_, _, _, _) => FieldTargetClass + case DefDef(_, _, _, _, _, _) => MethodTargetClass + case _ => GetterTargetClass + } lazy val AnnotationDefaultAttr: ClassSymbol = { val attr = enterNewClass(RuntimePackageClass, tpnme.AnnotationDefaultATTR, List(AnnotationClass.tpe)) -- cgit v1.2.3 From 61f70e48cecddf27fba162c165dfaf712c84278c Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Mon, 14 Jan 2013 14:30:57 -0800 Subject: SI-6375, warn on lost annotation. Annotations on abstract vals which are not meta-annotated were silently discarded. Still discarded, only less silently. I warned on as many "lost annotation" situations as I was reasonably able to cover without false positives. --- .../tools/nsc/typechecker/MethodSynthesis.scala | 35 ++++++++++- test/files/neg/t6375.check | 27 +++++++++ test/files/neg/t6375.flags | 1 + test/files/neg/t6375.scala | 67 ++++++++++++++++++++++ 4 files changed, 128 insertions(+), 2 deletions(-) create mode 100644 test/files/neg/t6375.check create mode 100644 test/files/neg/t6375.flags create mode 100644 test/files/neg/t6375.scala (limited to 'src') diff --git a/src/compiler/scala/tools/nsc/typechecker/MethodSynthesis.scala b/src/compiler/scala/tools/nsc/typechecker/MethodSynthesis.scala index d74d5ecfbe..438c783810 100644 --- a/src/compiler/scala/tools/nsc/typechecker/MethodSynthesis.scala +++ b/src/compiler/scala/tools/nsc/typechecker/MethodSynthesis.scala @@ -126,7 +126,7 @@ trait MethodSynthesis { /** There are two key methods in here. * - * 1) Enter methods such as enterGetterSetterare called + * 1) Enter methods such as enterGetterSetter are called * from Namer with a tree which may generate further trees such as accessors or * implicit wrappers. Some setup is performed. In general this creates symbols * and enters them into the scope of the owner. @@ -171,14 +171,45 @@ trait MethodSynthesis { enterBeans(tree) } + /** This is called for those ValDefs which addDerivedTrees ignores, but + * which might have a warnable annotation situation. + */ + private def warnForDroppedAnnotations(tree: Tree) { + val annotations = tree.symbol.initialize.annotations + val targetClass = defaultAnnotationTarget(tree) + val retained = deriveAnnotations(annotations, targetClass, keepClean = true) + + annotations filterNot (retained contains _) foreach (ann => issueAnnotationWarning(ann, targetClass)) + } + private def issueAnnotationWarning(ann: AnnotationInfo, defaultTarget: Symbol) { + global.reporter.warning(ann.pos, + s"Annotation is unused - it can be retained with a meta-annotation such as @($ann @${defaultTarget.name})") + } + def addDerivedTrees(typer: Typer, stat: Tree): List[Tree] = stat match { case vd @ ValDef(mods, name, tpt, rhs) if !noFinishGetterSetter(vd) => // If we don't save the annotations, they seem to wander off. val annotations = stat.symbol.initialize.annotations - ( allValDefDerived(vd) + val trees = ( + allValDefDerived(vd) map (acc => atPos(vd.pos.focus)(acc derive annotations)) filterNot (_ eq EmptyTree) ) + // Verify each annotation landed safely somewhere, else warn. + // Filtering when isParamAccessor is a necessary simplification + // because there's a bunch of unwritten annotation code involving + // the propagation of annotations - constructor parameter annotations + // may need to make their way to parameters of the constructor as + // well as fields of the class, etc. + if (!mods.isParamAccessor) annotations foreach (ann => + if (!trees.exists(_.symbol hasAnnotation ann.symbol)) + issueAnnotationWarning(ann, GetterTargetClass) + ) + + trees + case vd: ValDef => + warnForDroppedAnnotations(vd) + vd :: Nil case cd @ ClassDef(mods, _, _, _) if mods.isImplicit => val annotations = stat.symbol.initialize.annotations // TODO: need to shuffle annotations between wrapper and class. diff --git a/test/files/neg/t6375.check b/test/files/neg/t6375.check new file mode 100644 index 0000000000..b94a067cbb --- /dev/null +++ b/test/files/neg/t6375.check @@ -0,0 +1,27 @@ +t6375.scala:6: warning: Annotation is unused - it can be retained with a meta-annotation such as @(Bippy @getter) + @Bippy val x1: Int // warn + ^ +t6375.scala:7: warning: Annotation is unused - it can be retained with a meta-annotation such as @(Bippy @scala.annotation.meta.field @getter) + @(Bippy @field) val x2: Int // warn + ^ +t6375.scala:9: warning: Annotation is unused - it can be retained with a meta-annotation such as @(Bippy @scala.annotation.meta.setter @getter) + @(Bippy @setter) val x4: Int // warn + ^ +t6375.scala:10: warning: Annotation is unused - it can be retained with a meta-annotation such as @(Bippy @scala.annotation.meta.param @getter) + @(Bippy @param) val x5: Int // warn + ^ +t6375.scala:20: warning: Annotation is unused - it can be retained with a meta-annotation such as @(Bippy @scala.annotation.meta.getter @field) + @(Bippy @getter) private[this] val q1: Int = 1 // warn + ^ +t6375.scala:40: warning: Annotation is unused - it can be retained with a meta-annotation such as @(Bippy @scala.annotation.meta.getter @param) + @(Bippy @getter) p2: Int, // warn + ^ +t6375.scala:41: warning: Annotation is unused - it can be retained with a meta-annotation such as @(Bippy @scala.annotation.meta.setter @param) + @(Bippy @setter) p3: Int, // warn + ^ +t6375.scala:42: warning: Annotation is unused - it can be retained with a meta-annotation such as @(Bippy @scala.annotation.meta.field @param) + @(Bippy @field) p4: Int // warn + ^ +error: No warnings can be incurred under -Xfatal-warnings. +8 warnings found +one error found diff --git a/test/files/neg/t6375.flags b/test/files/neg/t6375.flags new file mode 100644 index 0000000000..85d8eb2ba2 --- /dev/null +++ b/test/files/neg/t6375.flags @@ -0,0 +1 @@ +-Xfatal-warnings diff --git a/test/files/neg/t6375.scala b/test/files/neg/t6375.scala new file mode 100644 index 0000000000..21634df688 --- /dev/null +++ b/test/files/neg/t6375.scala @@ -0,0 +1,67 @@ +import scala.annotation.meta._ + +class Bippy extends scala.annotation.StaticAnnotation + +abstract class Foo { + @Bippy val x1: Int // warn + @(Bippy @field) val x2: Int // warn + @(Bippy @getter) val x3: Int // no warn + @(Bippy @setter) val x4: Int // warn + @(Bippy @param) val x5: Int // warn +} + +object Bar extends Foo { + val x1 = 1 + val x2 = 2 + val x3 = 3 + val x4 = 4 + val x5 = 5 + + @(Bippy @getter) private[this] val q1: Int = 1 // warn + @(Bippy @getter) private val q2: Int = 1 // no warn + + def f1(@(Bippy @param) x: Int): Int = 0 // no warn + def f2(@(Bippy @getter) x: Int): Int = 0 // warn - todo + def f3(@(Bippy @setter) x: Int): Int = 0 // warn - todo + def f4(@(Bippy @field) x: Int): Int = 0 // warn - todo + def f5(@Bippy x: Int): Int = 0 // no warn + + @(Bippy @companionClass) def g1(x: Int): Int = 0 // warn - todo + @(Bippy @companionObject) def g2(x: Int): Int = 0 // warn - todo + @(Bippy @companionMethod) def g3(x: Int): Int = 0 // no warn + @Bippy def g4(x: Int): Int = 0 // no warn + + @(Bippy @companionObject @companionMethod) def g5(x: Int): Int = 0 // no warn +} + +class Dingo( + @Bippy p0: Int, // no warn + @(Bippy @param) p1: Int, // no warn + @(Bippy @getter) p2: Int, // warn + @(Bippy @setter) p3: Int, // warn + @(Bippy @field) p4: Int // warn +) + +class ValDingo( + @Bippy val p0: Int, // no warn + @(Bippy @param) val p1: Int, // no warn + @(Bippy @getter) val p2: Int, // no warn + @(Bippy @setter) val p3: Int, // warn - todo + @(Bippy @field) val p4: Int // no warn +) + +class VarDingo( + @Bippy var p0: Int, // no warn + @(Bippy @param) var p1: Int, // no warn + @(Bippy @getter) var p2: Int, // no warn + @(Bippy @setter) var p3: Int, // no warn + @(Bippy @field) var p4: Int // no warn +) + +case class CaseDingo( + @Bippy p0: Int, // no warn + @(Bippy @param) p1: Int, // no warn + @(Bippy @getter) p2: Int, // no warn + @(Bippy @setter) p3: Int, // warn - todo + @(Bippy @field) p4: Int // no warn +) -- cgit v1.2.3 From bd4bfface0581041d27c5d243723e39dd99c28fc Mon Sep 17 00:00:00 2001 From: Adriaan Moors Date: Wed, 9 Jan 2013 14:44:47 -0800 Subject: SI-5189 detect unsoundness when inferring type of match GADT skolems encode type slack that results from pattern matching on variant type constructors I thought they would not longer be relevant after cases have been typed, and since they caused weird issues with the old pattern matcher, I deskolemized in typedCase however, when we don't have an expected type for the match, we need to keep the skolems around until the skolemized type makes it out of the match and it becomes the result of type inference for that match when you do have an expected type, it will propagate to the case-level and the confrontation will thus already take place when typing individual cases --- src/compiler/scala/tools/nsc/typechecker/Typers.scala | 11 +++++------ test/files/neg/t5189_inferred.check | 6 ++++++ test/files/neg/t5189_inferred.scala | 8 ++++++++ 3 files changed, 19 insertions(+), 6 deletions(-) create mode 100644 test/files/neg/t5189_inferred.check create mode 100644 test/files/neg/t5189_inferred.scala (limited to 'src') diff --git a/src/compiler/scala/tools/nsc/typechecker/Typers.scala b/src/compiler/scala/tools/nsc/typechecker/Typers.scala index 96a93e2a20..57053e81ce 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Typers.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Typers.scala @@ -2433,11 +2433,7 @@ trait Typers extends Adaptations with Tags { } // body1 = checkNoEscaping.locals(context.scope, pt, body1) - val treeWithSkolems = treeCopy.CaseDef(cdef, pat1, guard1, body1) setType body1.tpe - - new TypeMapTreeSubstituter(deskolemizeGADTSkolems).traverse(treeWithSkolems) - - treeWithSkolems // now without skolems, actually + treeCopy.CaseDef(cdef, pat1, guard1, body1) setType body1.tpe } // undo adaptConstrPattern's evil deeds, as they confuse the old pattern matcher @@ -2470,7 +2466,10 @@ trait Typers extends Adaptations with Tags { val casesAdapted = if (!needAdapt) casesTyped else casesTyped map (adaptCase(_, mode, resTp)) - treeCopy.Match(tree, selector1, casesAdapted) setType resTp + val matchTyped = treeCopy.Match(tree, selector1, casesAdapted) setType resTp + if (!newPatternMatching) // TODO: remove this in 2.11 -- only needed for old pattern matcher + new TypeMapTreeSubstituter(deskolemizeGADTSkolems).traverse(matchTyped) + matchTyped } // match has been typed -- virtualize it during type checking so the full context is available diff --git a/test/files/neg/t5189_inferred.check b/test/files/neg/t5189_inferred.check new file mode 100644 index 0000000000..9cc5dcc242 --- /dev/null +++ b/test/files/neg/t5189_inferred.check @@ -0,0 +1,6 @@ +t5189_inferred.scala:7: error: type mismatch; + found : scala.collection.immutable.Nil.type + required: ?A1 where type ?A1 + f(Invariant(arr): Covariant[Any])(0) = Nil + ^ +one error found diff --git a/test/files/neg/t5189_inferred.scala b/test/files/neg/t5189_inferred.scala new file mode 100644 index 0000000000..e4e8765445 --- /dev/null +++ b/test/files/neg/t5189_inferred.scala @@ -0,0 +1,8 @@ +trait Covariant[+A] +case class Invariant[A](xs: Array[A]) extends Covariant[A] + +class Test { + val arr = Array("abc") + def f[A](v: Covariant[A]) /*inferred!*/ = v match { case Invariant(xs) => xs } + f(Invariant(arr): Covariant[Any])(0) = Nil +} \ No newline at end of file -- cgit v1.2.3 From fdca5081fd5e9bdefba04ab015ba7f81f79bf6b9 Mon Sep 17 00:00:00 2001 From: Adriaan Moors Date: Mon, 14 Jan 2013 15:43:43 -0800 Subject: remove hack for old patmat unnecessary in 2.11 --- src/compiler/scala/tools/nsc/typechecker/Typers.scala | 18 ++---------------- 1 file changed, 2 insertions(+), 16 deletions(-) (limited to 'src') diff --git a/src/compiler/scala/tools/nsc/typechecker/Typers.scala b/src/compiler/scala/tools/nsc/typechecker/Typers.scala index 57053e81ce..20907979e9 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Typers.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Typers.scala @@ -2436,16 +2436,6 @@ trait Typers extends Adaptations with Tags { treeCopy.CaseDef(cdef, pat1, guard1, body1) setType body1.tpe } - // undo adaptConstrPattern's evil deeds, as they confuse the old pattern matcher - // the flags are used to avoid accidentally deskolemizing unrelated skolems of skolems - object deskolemizeGADTSkolems extends TypeMap { - def apply(tp: Type): Type = mapOver(tp) match { - case TypeRef(pre, sym, args) if sym.isGADTSkolem => - typeRef(NoPrefix, sym.deSkolemize, args) - case tp1 => tp1 - } - } - def typedCases(cases: List[CaseDef], pattp: Type, pt: Type): List[CaseDef] = cases mapConserve { cdef => newTyper(context.makeNewScope(cdef, context.owner)).typedCase(cdef, pattp, pt) @@ -2466,10 +2456,7 @@ trait Typers extends Adaptations with Tags { val casesAdapted = if (!needAdapt) casesTyped else casesTyped map (adaptCase(_, mode, resTp)) - val matchTyped = treeCopy.Match(tree, selector1, casesAdapted) setType resTp - if (!newPatternMatching) // TODO: remove this in 2.11 -- only needed for old pattern matcher - new TypeMapTreeSubstituter(deskolemizeGADTSkolems).traverse(matchTyped) - matchTyped + treeCopy.Match(tree, selector1, casesAdapted) setType resTp } // match has been typed -- virtualize it during type checking so the full context is available @@ -2491,12 +2478,11 @@ trait Typers extends Adaptations with Tags { // Match(EmptyTree, cases) ==> new PartialFunction { def apply(params) = `translateMatch('`(param1,...,paramN)` match { cases }')` } // for fresh params, the selector of the match we'll translated simply gathers those in a tuple // NOTE: restricted to PartialFunction -- leave Function trees if the expected type does not demand a partial function - class MatchFunTyper(tree: Tree, cases: List[CaseDef], mode: Mode, pt0: Type) { + class MatchFunTyper(tree: Tree, cases: List[CaseDef], mode: Mode, pt: Type) { // TODO: remove FunctionN support -- this is currently designed so that it can emit FunctionN and PartialFunction subclasses // however, we should leave Function nodes until Uncurry so phases after typer can still detect normal Function trees // we need to synthesize PartialFunction impls, though, to avoid nastiness in Uncurry in transforming&duplicating generated pattern matcher trees // TODO: remove PartialFunction support from UnCurry - private val pt = deskolemizeGADTSkolems(pt0) private val targs = pt.normalize.typeArgs private val arity = if (isFunctionType(pt)) targs.length - 1 else 1 // TODO pt should always be a (Partial)Function, right? private val ptRes = if (targs.isEmpty) WildcardType else targs.last // may not be fully defined -- cgit v1.2.3 From f98ccad8d59c6e63a7e4e984f18c3e2b39ed0b68 Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Mon, 14 Jan 2013 15:41:20 -0800 Subject: Tweaked meta-annotation error based on feedback. --- .../scala/tools/nsc/typechecker/MethodSynthesis.scala | 9 +++++---- test/files/neg/t6375.check | 16 ++++++++-------- 2 files changed, 13 insertions(+), 12 deletions(-) (limited to 'src') diff --git a/src/compiler/scala/tools/nsc/typechecker/MethodSynthesis.scala b/src/compiler/scala/tools/nsc/typechecker/MethodSynthesis.scala index 438c783810..b226591c8d 100644 --- a/src/compiler/scala/tools/nsc/typechecker/MethodSynthesis.scala +++ b/src/compiler/scala/tools/nsc/typechecker/MethodSynthesis.scala @@ -179,11 +179,12 @@ trait MethodSynthesis { val targetClass = defaultAnnotationTarget(tree) val retained = deriveAnnotations(annotations, targetClass, keepClean = true) - annotations filterNot (retained contains _) foreach (ann => issueAnnotationWarning(ann, targetClass)) + annotations filterNot (retained contains _) foreach (ann => issueAnnotationWarning(tree, ann, targetClass)) } - private def issueAnnotationWarning(ann: AnnotationInfo, defaultTarget: Symbol) { + private def issueAnnotationWarning(tree: Tree, ann: AnnotationInfo, defaultTarget: Symbol) { global.reporter.warning(ann.pos, - s"Annotation is unused - it can be retained with a meta-annotation such as @($ann @${defaultTarget.name})") + s"no valid targets for annotation on ${tree.symbol} - it is discarded unused. " + + s"You may specify targets with meta-annotations, e.g. @($ann @${defaultTarget.name})") } def addDerivedTrees(typer: Typer, stat: Tree): List[Tree] = stat match { @@ -203,7 +204,7 @@ trait MethodSynthesis { // well as fields of the class, etc. if (!mods.isParamAccessor) annotations foreach (ann => if (!trees.exists(_.symbol hasAnnotation ann.symbol)) - issueAnnotationWarning(ann, GetterTargetClass) + issueAnnotationWarning(vd, ann, GetterTargetClass) ) trees diff --git a/test/files/neg/t6375.check b/test/files/neg/t6375.check index b94a067cbb..89d7d8060f 100644 --- a/test/files/neg/t6375.check +++ b/test/files/neg/t6375.check @@ -1,25 +1,25 @@ -t6375.scala:6: warning: Annotation is unused - it can be retained with a meta-annotation such as @(Bippy @getter) +t6375.scala:6: warning: no valid targets for annotation on value x1 - it is discarded unused. You may specify targets with meta-annotations, e.g. @(Bippy @getter) @Bippy val x1: Int // warn ^ -t6375.scala:7: warning: Annotation is unused - it can be retained with a meta-annotation such as @(Bippy @scala.annotation.meta.field @getter) +t6375.scala:7: warning: no valid targets for annotation on value x2 - it is discarded unused. You may specify targets with meta-annotations, e.g. @(Bippy @scala.annotation.meta.field @getter) @(Bippy @field) val x2: Int // warn ^ -t6375.scala:9: warning: Annotation is unused - it can be retained with a meta-annotation such as @(Bippy @scala.annotation.meta.setter @getter) +t6375.scala:9: warning: no valid targets for annotation on value x4 - it is discarded unused. You may specify targets with meta-annotations, e.g. @(Bippy @scala.annotation.meta.setter @getter) @(Bippy @setter) val x4: Int // warn ^ -t6375.scala:10: warning: Annotation is unused - it can be retained with a meta-annotation such as @(Bippy @scala.annotation.meta.param @getter) +t6375.scala:10: warning: no valid targets for annotation on value x5 - it is discarded unused. You may specify targets with meta-annotations, e.g. @(Bippy @scala.annotation.meta.param @getter) @(Bippy @param) val x5: Int // warn ^ -t6375.scala:20: warning: Annotation is unused - it can be retained with a meta-annotation such as @(Bippy @scala.annotation.meta.getter @field) +t6375.scala:20: warning: no valid targets for annotation on value q1 - it is discarded unused. You may specify targets with meta-annotations, e.g. @(Bippy @scala.annotation.meta.getter @field) @(Bippy @getter) private[this] val q1: Int = 1 // warn ^ -t6375.scala:40: warning: Annotation is unused - it can be retained with a meta-annotation such as @(Bippy @scala.annotation.meta.getter @param) +t6375.scala:40: warning: no valid targets for annotation on value p2 - it is discarded unused. You may specify targets with meta-annotations, e.g. @(Bippy @scala.annotation.meta.getter @param) @(Bippy @getter) p2: Int, // warn ^ -t6375.scala:41: warning: Annotation is unused - it can be retained with a meta-annotation such as @(Bippy @scala.annotation.meta.setter @param) +t6375.scala:41: warning: no valid targets for annotation on value p3 - it is discarded unused. You may specify targets with meta-annotations, e.g. @(Bippy @scala.annotation.meta.setter @param) @(Bippy @setter) p3: Int, // warn ^ -t6375.scala:42: warning: Annotation is unused - it can be retained with a meta-annotation such as @(Bippy @scala.annotation.meta.field @param) +t6375.scala:42: warning: no valid targets for annotation on value p4 - it is discarded unused. You may specify targets with meta-annotations, e.g. @(Bippy @scala.annotation.meta.field @param) @(Bippy @field) p4: Int // warn ^ error: No warnings can be incurred under -Xfatal-warnings. -- cgit v1.2.3 From 76b92ef78d3cb68d7e517bb4611efff45955f1e9 Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Mon, 14 Jan 2013 23:26:16 -0800 Subject: Modifies "maybeRewrap" to focus more on the maybe. Existential types are rewrapped under a bunch of conditions unless the operation performed on the underlying type returns the same type by reference equality. That depends on a foundation of predictability which doesn't exist. The upshot is that existential types were rewrapped with abandon, even when the type were identical. This had both performance and correctness implications. Note where the test case output changes like so: -scala.collection.immutable.List[Any] +scala.collection.immutable.List[] That's correctness. --- src/reflect/scala/reflect/internal/Types.scala | 9 +++++++- test/files/run/t6329_repl.check | 32 +++++++++++++++++++++++--- test/files/run/t6329_repl.scala | 13 ++++++++--- test/files/run/t6329_vanilla.check | 8 ++++++- test/files/run/t6329_vanilla.scala | 14 ++++++++--- 5 files changed, 65 insertions(+), 11 deletions(-) (limited to 'src') diff --git a/src/reflect/scala/reflect/internal/Types.scala b/src/reflect/scala/reflect/internal/Types.scala index 9d0d38913c..1f2f86c46b 100644 --- a/src/reflect/scala/reflect/internal/Types.scala +++ b/src/reflect/scala/reflect/internal/Types.scala @@ -234,7 +234,14 @@ trait Types extends api.Types { self: SymbolTable => * forwarded here. Some operations are rewrapped again. */ trait RewrappingTypeProxy extends SimpleTypeProxy { - protected def maybeRewrap(newtp: Type) = if (newtp eq underlying) this else rewrap(newtp) + protected def maybeRewrap(newtp: Type) = ( + if (newtp eq underlying) this + // BoundedWildcardTypes reach here during erroneous compilation: neg/t6258 + // Higher-kinded exclusion is because [x]CC[x] compares =:= to CC: pos/t3800 + // Otherwise, if newtp =:= underlying, don't rewrap it. + else if (!newtp.isWildcard && !newtp.isHigherKinded && (newtp =:= underlying)) this + else rewrap(newtp) + ) protected def rewrap(newtp: Type): Type // the following are all operations in class Type that are overridden in some subclass diff --git a/test/files/run/t6329_repl.check b/test/files/run/t6329_repl.check index 8663184bde..55d689f2fb 100644 --- a/test/files/run/t6329_repl.check +++ b/test/files/run/t6329_repl.check @@ -3,11 +3,37 @@ Type :help for more information. scala> -scala> classManifest[List[_]] +scala> import scala.reflect.classTag +import scala.reflect.classTag + +scala> classManifest[scala.List[_]] warning: there were 1 deprecation warnings; re-run with -deprecation for details -res0: scala.reflect.ClassTag[List[_]] = scala.collection.immutable.List[Any] +res0: scala.reflect.ClassTag[List[_]] = scala.collection.immutable.List[] -scala> scala.reflect.classTag[List[_]] +scala> classTag[scala.List[_]] res1: scala.reflect.ClassTag[List[_]] = scala.collection.immutable.List +scala> classManifest[scala.collection.immutable.List[_]] +warning: there were 1 deprecation warnings; re-run with -deprecation for details +res2: scala.reflect.ClassTag[List[_]] = scala.collection.immutable.List[] + +scala> classTag[scala.collection.immutable.List[_]] +res3: scala.reflect.ClassTag[List[_]] = scala.collection.immutable.List + +scala> classManifest[Predef.Set[_]] +warning: there were 1 deprecation warnings; re-run with -deprecation for details +res4: scala.reflect.ClassTag[scala.collection.immutable.Set[_]] = scala.collection.immutable.Set[] + +scala> classTag[Predef.Set[_]] +res5: scala.reflect.ClassTag[scala.collection.immutable.Set[_]] = scala.collection.immutable.Set + +scala> classManifest[scala.collection.immutable.Set[_]] +warning: there were 1 deprecation warnings; re-run with -deprecation for details +res6: scala.reflect.ClassTag[scala.collection.immutable.Set[_]] = scala.collection.immutable.Set[] + +scala> classTag[scala.collection.immutable.Set[_]] +res7: scala.reflect.ClassTag[scala.collection.immutable.Set[_]] = scala.collection.immutable.Set + +scala> + scala> diff --git a/test/files/run/t6329_repl.scala b/test/files/run/t6329_repl.scala index add6d64962..f210d6512c 100644 --- a/test/files/run/t6329_repl.scala +++ b/test/files/run/t6329_repl.scala @@ -2,7 +2,14 @@ import scala.tools.partest.ReplTest object Test extends ReplTest { def code = """ - |classManifest[List[_]] - |scala.reflect.classTag[List[_]] - |""".stripMargin + |import scala.reflect.classTag + |classManifest[scala.List[_]] + |classTag[scala.List[_]] + |classManifest[scala.collection.immutable.List[_]] + |classTag[scala.collection.immutable.List[_]] + |classManifest[Predef.Set[_]] + |classTag[Predef.Set[_]] + |classManifest[scala.collection.immutable.Set[_]] + |classTag[scala.collection.immutable.Set[_]] + """.stripMargin } diff --git a/test/files/run/t6329_vanilla.check b/test/files/run/t6329_vanilla.check index 8282afaeba..ad8f4b5c77 100644 --- a/test/files/run/t6329_vanilla.check +++ b/test/files/run/t6329_vanilla.check @@ -1,2 +1,8 @@ -scala.collection.immutable.List[Any] +scala.collection.immutable.List[] scala.collection.immutable.List +scala.collection.immutable.List[] +scala.collection.immutable.List +scala.collection.immutable.Set[] +scala.collection.immutable.Set +scala.collection.immutable.Set[] +scala.collection.immutable.Set diff --git a/test/files/run/t6329_vanilla.scala b/test/files/run/t6329_vanilla.scala index a31cd5c72e..f2d843896d 100644 --- a/test/files/run/t6329_vanilla.scala +++ b/test/files/run/t6329_vanilla.scala @@ -1,4 +1,12 @@ +import scala.reflect.classTag + object Test extends App { - println(classManifest[List[_]]) - println(scala.reflect.classTag[List[_]]) -} \ No newline at end of file + println(classManifest[scala.List[_]]) + println(classTag[scala.List[_]]) + println(classManifest[scala.collection.immutable.List[_]]) + println(classTag[scala.collection.immutable.List[_]]) + println(classManifest[Predef.Set[_]]) + println(classTag[Predef.Set[_]]) + println(classManifest[scala.collection.immutable.Set[_]]) + println(classTag[scala.collection.immutable.Set[_]]) +} -- cgit v1.2.3 From aedec19808a7a3d383f839d2ee2c2ec4265fb9c6 Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Mon, 14 Jan 2013 23:31:30 -0800 Subject: Granted scaladoc its own Global. An incremental step on the road to disentangling scaladoc from the compiler sources. This pushes the elements in typer outward so scaladoc can subclass Global like everyone else. --- src/compiler/scala/tools/nsc/doc/DocFactory.scala | 10 +- .../scala/tools/nsc/doc/ScaladocGlobal.scala | 105 +++++++++++++++++++++ .../scala/tools/nsc/typechecker/Typers.scala | 65 +------------ 3 files changed, 111 insertions(+), 69 deletions(-) create mode 100644 src/compiler/scala/tools/nsc/doc/ScaladocGlobal.scala (limited to 'src') diff --git a/src/compiler/scala/tools/nsc/doc/DocFactory.scala b/src/compiler/scala/tools/nsc/doc/DocFactory.scala index 77e53bd90b..a99b17dce4 100644 --- a/src/compiler/scala/tools/nsc/doc/DocFactory.scala +++ b/src/compiler/scala/tools/nsc/doc/DocFactory.scala @@ -31,15 +31,7 @@ import scala.reflect.internal.util.BatchSourceFile * @author Gilles Dubochet */ class DocFactory(val reporter: Reporter, val settings: doc.Settings) { processor => /** The unique compiler instance used by this processor and constructed from its `settings`. */ - object compiler extends Global(settings, reporter) with interactive.RangePositions { - override protected def computeInternalPhases() { - phasesSet += syntaxAnalyzer - phasesSet += analyzer.namerFactory - phasesSet += analyzer.packageObjects - phasesSet += analyzer.typerFactory - } - override def forScaladoc = true - } + object compiler extends ScaladocGlobal(settings, reporter) /** Creates a scaladoc site for all symbols defined in this call's `source`, * as well as those defined in `sources` of previous calls to the same processor. diff --git a/src/compiler/scala/tools/nsc/doc/ScaladocGlobal.scala b/src/compiler/scala/tools/nsc/doc/ScaladocGlobal.scala new file mode 100644 index 0000000000..5e68152936 --- /dev/null +++ b/src/compiler/scala/tools/nsc/doc/ScaladocGlobal.scala @@ -0,0 +1,105 @@ +/* NSC -- new Scala compiler + * Copyright 2007-2013 LAMP/EPFL + * @author Paul Phillips + */ + +package scala.tools.nsc +package doc + +import scala.util.control.ControlThrowable +import reporters.Reporter +import typechecker.Analyzer +import scala.reflect.internal.util.BatchSourceFile + +trait ScaladocAnalyzer extends Analyzer { + val global : ScaladocGlobal + import global._ + + override def newTyper(context: Context): ScaladocTyper = new ScaladocTyper(context) + + class ScaladocTyper(context0: Context) extends Typer(context0) { + private def unit = context.unit + + override def typedDocDef(docDef: DocDef, mode: Mode, pt: Type): Tree = { + val sym = docDef.symbol + + if ((sym ne null) && (sym ne NoSymbol)) { + val comment = docDef.comment + docComments(sym) = comment + comment.defineVariables(sym) + val typer1 = newTyper(context.makeNewScope(docDef, context.owner)) + for (useCase <- comment.useCases) { + typer1.silent(_ => typer1 defineUseCases useCase) match { + case SilentTypeError(err) => + unit.warning(useCase.pos, err.errMsg) + case _ => + } + for (useCaseSym <- useCase.defined) { + if (sym.name != useCaseSym.name) + unit.warning(useCase.pos, "@usecase " + useCaseSym.name.decode + " does not match commented symbol: " + sym.name.decode) + } + } + } + + super.typedDocDef(docDef, mode, pt) + } + + def defineUseCases(useCase: UseCase): List[Symbol] = { + def stringParser(str: String): syntaxAnalyzer.Parser = { + val file = new BatchSourceFile(context.unit.source.file, str) { + override def positionInUltimateSource(pos: Position) = { + pos.withSource(context.unit.source, useCase.pos.start) + } + } + val unit = new CompilationUnit(file) + new syntaxAnalyzer.UnitParser(unit) + } + + val trees = stringParser(useCase.body+";").nonLocalDefOrDcl + val enclClass = context.enclClass.owner + + def defineAlias(name: Name) = ( + if (context.scope.lookup(name) == NoSymbol) { + lookupVariable(name.toString.substring(1), enclClass) foreach { repl => + silent(_.typedTypeConstructor(stringParser(repl).typ())) map { tpt => + val alias = enclClass.newAliasType(name.toTypeName, useCase.pos) + val tparams = cloneSymbolsAtOwner(tpt.tpe.typeSymbol.typeParams, alias) + val newInfo = genPolyType(tparams, appliedType(tpt.tpe, tparams map (_.tpe))) + alias setInfo newInfo + context.scope.enter(alias) + } + } + } + ) + + for (tree <- trees; t <- tree) + t match { + case Ident(name) if name startsWith '$' => defineAlias(name) + case _ => + } + + useCase.aliases = context.scope.toList + namer.enterSyms(trees) + typedStats(trees, NoSymbol) + useCase.defined = context.scope.toList filterNot (useCase.aliases contains _) + + if (settings.debug.value) + useCase.defined foreach (sym => println("defined use cases: %s:%s".format(sym, sym.tpe))) + + useCase.defined + } + } +} + +class ScaladocGlobal(settings: doc.Settings, reporter: Reporter) extends Global(settings, reporter) with interactive.RangePositions { + override protected def computeInternalPhases() { + phasesSet += syntaxAnalyzer + phasesSet += analyzer.namerFactory + phasesSet += analyzer.packageObjects + phasesSet += analyzer.typerFactory + } + override def forScaladoc = true + override lazy val analyzer = new { + val global: ScaladocGlobal.this.type = ScaladocGlobal.this + } with ScaladocAnalyzer +} diff --git a/src/compiler/scala/tools/nsc/typechecker/Typers.scala b/src/compiler/scala/tools/nsc/typechecker/Typers.scala index 20907979e9..61ef13cfa9 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Typers.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Typers.scala @@ -80,6 +80,7 @@ trait Typers extends Adaptations with Tags { case class SilentResultValue[+T](value: T) extends SilentResult[T] { } def newTyper(context: Context): Typer = new NormalTyper(context) + private class NormalTyper(context : Context) extends Typer(context) // A transient flag to mark members of anonymous classes @@ -105,6 +106,9 @@ trait Typers extends Adaptations with Tags { import typeDebug.{ ptTree, ptBlock, ptLine } import TyperErrorGen._ + def typedDocDef(docDef: DocDef, mode: Mode, pt: Type): Tree = + typed(docDef.definition, mode, pt) + val infer = new Inferencer(context0) { override def isCoercible(tp: Type, pt: Type): Boolean = undoLog undo { // #3281 tp.isError || pt.isError || @@ -2135,43 +2139,6 @@ trait Typers extends Adaptations with Tags { failStruct(ddef.tpt.pos, "a user-defined value class", where = "Result type") } - def typedUseCase(useCase: UseCase) { - def stringParser(str: String): syntaxAnalyzer.Parser = { - val file = new BatchSourceFile(context.unit.source.file, str) { - override def positionInUltimateSource(pos: Position) = { - pos.withSource(context.unit.source, useCase.pos.start) - } - } - val unit = new CompilationUnit(file) - new syntaxAnalyzer.UnitParser(unit) - } - val trees = stringParser(useCase.body+";").nonLocalDefOrDcl - val enclClass = context.enclClass.owner - def defineAlias(name: Name) = - if (context.scope.lookup(name) == NoSymbol) { - lookupVariable(name.toString.substring(1), enclClass) foreach { repl => - silent(_.typedTypeConstructor(stringParser(repl).typ())) map { tpt => - val alias = enclClass.newAliasType(name.toTypeName, useCase.pos) - val tparams = cloneSymbolsAtOwner(tpt.tpe.typeSymbol.typeParams, alias) - val newInfo = genPolyType(tparams, appliedType(tpt.tpe, tparams map (_.tpe))) - alias setInfo newInfo - context.scope.enter(alias) - } - } - } - for (tree <- trees; t <- tree) - t match { - case Ident(name) if name startsWith '$' => defineAlias(name) - case _ => - } - useCase.aliases = context.scope.toList - namer.enterSyms(trees) - typedStats(trees, NoSymbol) - useCase.defined = context.scope.toList filterNot (useCase.aliases contains _) - if (settings.debug.value) - useCase.defined foreach (sym => println("defined use cases: %s:%s".format(sym, sym.tpe))) - } - def typedDefDef(ddef: DefDef): DefDef = { val meth = ddef.symbol.initialize @@ -4875,28 +4842,6 @@ trait Typers extends Adaptations with Tags { .typedStats(pdef.stats, NoSymbol) treeCopy.PackageDef(tree, pid1, stats1) setType NoType } - - def typedDocDef(docdef: DocDef) = { - if (forScaladoc && (sym ne null) && (sym ne NoSymbol)) { - val comment = docdef.comment - docComments(sym) = comment - comment.defineVariables(sym) - val typer1 = newTyper(context.makeNewScope(tree, context.owner)) - for (useCase <- comment.useCases) { - typer1.silent(_.typedUseCase(useCase)) match { - case SilentTypeError(err) => - unit.warning(useCase.pos, err.errMsg) - case _ => - } - for (useCaseSym <- useCase.defined) { - if (sym.name != useCaseSym.name) - unit.warning(useCase.pos, "@usecase " + useCaseSym.name.decode + " does not match commented symbol: " + sym.name.decode) - } - } - } - typed(docdef.definition, mode, pt) - } - def defDefTyper(ddef: DefDef) = { val flag = ddef.mods.hasDefaultFlag && sym.owner.isModuleClass && nme.defaultGetterToMethod(sym.name) == nme.CONSTRUCTOR @@ -5154,7 +5099,7 @@ trait Typers extends Adaptations with Tags { case tree: TypeDef => typedTypeDef(tree) case tree: LabelDef => labelTyper(tree).typedLabelDef(tree) case tree: PackageDef => typedPackageDef(tree) - case tree: DocDef => typedDocDef(tree) + case tree: DocDef => typedDocDef(tree, mode, pt) case tree: Annotated => typedAnnotated(tree) case tree: SingletonTypeTree => typedSingletonTypeTree(tree) case tree: SelectFromTypeTree => typedSelectFromTypeTree(tree) -- cgit v1.2.3 From decc9a9f0399d7613017db747732d251fe129236 Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Wed, 16 Jan 2013 12:29:16 +0100 Subject: SI-6979 Small optimization in lub If a member of `lubBase` is final, it cannot be refined in the types we're lubbing. --- src/reflect/scala/reflect/internal/Types.scala | 1 + 1 file changed, 1 insertion(+) (limited to 'src') diff --git a/src/reflect/scala/reflect/internal/Types.scala b/src/reflect/scala/reflect/internal/Types.scala index 9d0d38913c..1ef983c1c9 100644 --- a/src/reflect/scala/reflect/internal/Types.scala +++ b/src/reflect/scala/reflect/internal/Types.scala @@ -6437,6 +6437,7 @@ trait Types extends api.Types { self: SymbolTable => || sym.isConstructor || !sym.isPublic || isGetClass(sym) + || sym.isFinal || narrowts.exists(t => !refines(t, sym)) ) def lubsym(proto: Symbol): Symbol = { -- cgit v1.2.3 From 4805b97b62b00b39fee55ee9855ae8c046f5d621 Mon Sep 17 00:00:00 2001 From: Simon Ochsenreither Date: Thu, 17 Jan 2013 19:59:30 +0100 Subject: SI-6811 Remove scala.annotation.serializable Every usage of it has been eliminated in earlier commits, so the source file can finally be removed, too. --- src/library/scala/annotation/serializable.scala | 15 --------------- 1 file changed, 15 deletions(-) delete mode 100644 src/library/scala/annotation/serializable.scala (limited to 'src') diff --git a/src/library/scala/annotation/serializable.scala b/src/library/scala/annotation/serializable.scala deleted file mode 100644 index 1e1aff19d3..0000000000 --- a/src/library/scala/annotation/serializable.scala +++ /dev/null @@ -1,15 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala.annotation - -/** - * An annotation that designates the class to which it is applied as serializable - */ -@deprecated("instead of `@serializable class C`, use `class C extends Serializable`", "2.9.0") -class serializable extends scala.annotation.StaticAnnotation -- cgit v1.2.3 From 167fc0acb9bc75f3f378d2bfbabd61a2782a3568 Mon Sep 17 00:00:00 2001 From: Simon Ochsenreither Date: Thu, 17 Jan 2013 19:55:42 +0100 Subject: SI-6811 Remove usages of scala.annotation.cloneable The source file itself will be removed later, because the compiler seems to need it for boot-strapping. --- src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala | 1 - src/compiler/scala/tools/nsc/doc/html/SyntaxHigh.scala | 4 ++-- src/library/scala/package.scala | 4 ---- src/reflect/scala/reflect/internal/Definitions.scala | 1 - test/files/pos/annotations.scala | 2 +- test/files/pos/spec-annotations.scala | 2 +- test/files/pos/t5223.scala | 2 +- test/files/run/t5225_2.check | 2 +- test/files/run/t5225_2.scala | 2 +- 9 files changed, 7 insertions(+), 13 deletions(-) (limited to 'src') diff --git a/src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala b/src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala index 92d732ed04..c1bd7fd389 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala @@ -1274,7 +1274,6 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM { // Additional interface parents based on annotations and other cues def newParentForAttr(attr: Symbol): Option[Symbol] = attr match { - case CloneableAttr => Some(CloneableClass) case RemoteAttr => Some(RemoteInterfaceClass) case _ => None } diff --git a/src/compiler/scala/tools/nsc/doc/html/SyntaxHigh.scala b/src/compiler/scala/tools/nsc/doc/html/SyntaxHigh.scala index 2783a27811..db9edd165d 100644 --- a/src/compiler/scala/tools/nsc/doc/html/SyntaxHigh.scala +++ b/src/compiler/scala/tools/nsc/doc/html/SyntaxHigh.scala @@ -29,8 +29,8 @@ private[html] object SyntaxHigh { /** Annotations, sorted alphabetically */ val annotations = Array( "BeanProperty", "SerialVersionUID", - "beanGetter", "beanSetter", "bridge", "cloneable", - "deprecated", "deprecatedName", + "beanGetter", "beanSetter", "bridge", + "deprecated", "deprecatedName", "deprecatedOverriding", "deprecatedInheritance", "elidable", "field", "getter", "inline", "migration", "native", "noinline", "param", "remote", "setter", "specialized", "strictfp", "switch", diff --git a/src/library/scala/package.scala b/src/library/scala/package.scala index 15d6dce8a7..224112c11c 100644 --- a/src/library/scala/package.scala +++ b/src/library/scala/package.scala @@ -34,9 +34,6 @@ package object scala { override def toString = "object AnyRef" } - @deprecated("instead of `@cloneable class C`, use `class C extends Cloneable`", "2.10.0") - type cloneable = annotation.cloneable - type TraversableOnce[+A] = scala.collection.TraversableOnce[A] type Traversable[+A] = scala.collection.Traversable[A] @@ -121,7 +118,6 @@ package object scala { // Annotations which we might move to annotation.* /* type SerialVersionUID = annotation.SerialVersionUID - type cloneable = annotation.cloneable type deprecated = annotation.deprecated type deprecatedName = annotation.deprecatedName type inline = annotation.inline diff --git a/src/reflect/scala/reflect/internal/Definitions.scala b/src/reflect/scala/reflect/internal/Definitions.scala index edd295aa65..dbf07c7f06 100644 --- a/src/reflect/scala/reflect/internal/Definitions.scala +++ b/src/reflect/scala/reflect/internal/Definitions.scala @@ -876,7 +876,6 @@ trait Definitions extends api.StandardDefinitions { lazy val BeanPropertyAttr = requiredClass[scala.beans.BeanProperty] lazy val BooleanBeanPropertyAttr = requiredClass[scala.beans.BooleanBeanProperty] - lazy val CloneableAttr = requiredClass[scala.annotation.cloneable] lazy val CompileTimeOnlyAttr = getClassIfDefined("scala.reflect.macros.compileTimeOnly") lazy val DeprecatedAttr = requiredClass[scala.deprecated] lazy val DeprecatedNameAttr = requiredClass[scala.deprecatedName] diff --git a/test/files/pos/annotations.scala b/test/files/pos/annotations.scala index 501e2a6bd3..4832ce4ecd 100644 --- a/test/files/pos/annotations.scala +++ b/test/files/pos/annotations.scala @@ -2,7 +2,7 @@ class ann(i: Int) extends scala.annotation.Annotation class cfann(x: String) extends annotation.ClassfileAnnotation // annotations on abstract types -abstract class C1[@cloneable +T, U, V[_]] +abstract class C1[@annotation.elidable(0) +T, U, V[_]] abstract class C2[@deprecated @ann(1) T <: Number, V] diff --git a/test/files/pos/spec-annotations.scala b/test/files/pos/spec-annotations.scala index 6c1f737470..b23abf48e8 100644 --- a/test/files/pos/spec-annotations.scala +++ b/test/files/pos/spec-annotations.scala @@ -1,7 +1,7 @@ class ann(i: Int) extends scala.annotation.Annotation // annotations on abstract types -abstract class C1[@cloneable +T, U, V[_]] +abstract class C1[@annotation.elidable(0) +T, U, V[_]] abstract class C2[@deprecated @ann(1) T <: Number, V] diff --git a/test/files/pos/t5223.scala b/test/files/pos/t5223.scala index 0b2528e367..d81daa9907 100644 --- a/test/files/pos/t5223.scala +++ b/test/files/pos/t5223.scala @@ -2,5 +2,5 @@ import scala.reflect.runtime.universe._ object Foo extends App { reify{def printf(format: String, args: Any*): String = null } - reify{def printf(format: String, args: Any*): String = ("abc": @cloneable)} + reify{def printf(format: String, args: Any*): String = ("abc": @deprecated)} } \ No newline at end of file diff --git a/test/files/run/t5225_2.check b/test/files/run/t5225_2.check index 8ed54a14bb..477ea4eb6d 100644 --- a/test/files/run/t5225_2.check +++ b/test/files/run/t5225_2.check @@ -1,4 +1,4 @@ { - def foo(@new cloneable() x: Int) = ""; + def foo(@new elidable(0) x: Int) = ""; () } diff --git a/test/files/run/t5225_2.scala b/test/files/run/t5225_2.scala index d1b607499c..cf0f23a5c8 100644 --- a/test/files/run/t5225_2.scala +++ b/test/files/run/t5225_2.scala @@ -1,6 +1,6 @@ import scala.reflect.runtime.universe._ object Test extends App { - val tree = reify{def foo(@cloneable x: Int) = ""}.tree + val tree = reify{def foo(@annotation.elidable(0) x: Int) = ""}.tree println(tree.toString) } \ No newline at end of file -- cgit v1.2.3 From 2ee85683cdd2f8f41508fef0880edb31c1d97a4c Mon Sep 17 00:00:00 2001 From: Simon Ochsenreither Date: Thu, 17 Jan 2013 20:05:30 +0100 Subject: SI-6811 Remove deprecated constructors --- src/library/scala/Enumeration.scala | 12 ++---------- test/files/pos/t342.scala | 8 -------- test/files/run/enums.scala | 14 ++++++++++---- test/files/run/t1505.scala | 13 ++++++++----- 4 files changed, 20 insertions(+), 27 deletions(-) delete mode 100644 test/files/pos/t342.scala (limited to 'src') diff --git a/src/library/scala/Enumeration.scala b/src/library/scala/Enumeration.scala index 47d7840e27..21f0c8fd3e 100644 --- a/src/library/scala/Enumeration.scala +++ b/src/library/scala/Enumeration.scala @@ -56,14 +56,6 @@ abstract class Enumeration (initial: Int) extends Serializable { def this() = this(0) - @deprecated("Names should be specified individually or discovered via reflection", "2.10.0") - def this(initial: Int, names: String*) = { - this(initial) - this.nextName = names.iterator - } - @deprecated("Names should be specified individually or discovered via reflection", "2.10.0") - def this(names: String*) = this(0, names: _*) - /* Note that `readResolve` cannot be private, since otherwise the JVM does not invoke it when deserializing subclasses. */ protected def readResolve(): AnyRef = thisenum.getClass.getField(MODULE_INSTANCE_NAME).get(null) @@ -71,7 +63,7 @@ abstract class Enumeration (initial: Int) extends Serializable { /** The name of this enumeration. */ override def toString = - ((getClass.getName stripSuffix MODULE_SUFFIX_STRING split '.').last split + ((getClass.getName stripSuffix MODULE_SUFFIX_STRING split '.').last split Pattern.quote(NAME_JOIN_STRING)).last /** The mapping from the integer used to identify values to the actual @@ -126,7 +118,7 @@ abstract class Enumeration (initial: Int) extends Serializable { * * @param s an `Enumeration` name * @return the `Value` of this `Enumeration` if its name matches `s` - * @throws java.util.NoSuchElementException if no `Value` with a matching + * @throws NoSuchElementException if no `Value` with a matching * name is in this `Enumeration` */ final def withName(s: String): Value = values.find(_.toString == s).get diff --git a/test/files/pos/t342.scala b/test/files/pos/t342.scala deleted file mode 100644 index 752b24d2ba..0000000000 --- a/test/files/pos/t342.scala +++ /dev/null @@ -1,8 +0,0 @@ -object Main extends App { - - object Foo extends Enumeration(0, "Bar") { // 2 - val Bar = Value - } - import Foo._; - Console.println(Bar) -} diff --git a/test/files/run/enums.scala b/test/files/run/enums.scala index 9cdeed2691..3aad7ec320 100644 --- a/test/files/run/enums.scala +++ b/test/files/run/enums.scala @@ -36,8 +36,11 @@ object Test2 { object Test3 { - object Direction extends Enumeration("North", "South", "East", "West") { - val North, South, East, West = Value; + object Direction extends Enumeration { + val North = Value("North") + val South = Value("South") + val East = Value("East") + val West = Value("West") } def run: Int = { @@ -48,8 +51,11 @@ object Test3 { object Test4 { - object Direction extends Enumeration("North", "South", "East", "West") { - val North, South, East, West = Value; + object Direction extends Enumeration { + val North = Value("North") + val South = Value("South") + val East = Value("East") + val West = Value("West") } def run: Int = { diff --git a/test/files/run/t1505.scala b/test/files/run/t1505.scala index a246e8a35b..d7feb30ce3 100644 --- a/test/files/run/t1505.scala +++ b/test/files/run/t1505.scala @@ -1,5 +1,3 @@ -object P extends Enumeration(0, "A", "B", "C") { val A, B, C = Value } - object Q extends Enumeration { val A = Value("A") val B = Value("B") @@ -11,9 +9,14 @@ object R extends Enumeration { } object Test extends App { - assert(P(0) == P.withName("A")) - assert(P.C == P.withName("C")) - assert(Q(0) == Q.withName("A")) assert(Q.C == Q.withName("C")) + + assert(R(0) == R.withName("A")) + assert(R.C == R.withName("C")) + + var failed = false + try { Q.withName("x") } catch { case _: NoSuchElementException => failed = true } + assert(failed) + } -- cgit v1.2.3 From ed52ea098a28b7c655fe35af25e6d38ce171f356 Mon Sep 17 00:00:00 2001 From: Simon Ochsenreither Date: Thu, 17 Jan 2013 20:11:34 +0100 Subject: SI-6811 Remove primitive widenings and /:\ --- src/library/scala/Predef.scala | 37 ---------------------- .../scala/collection/GenTraversableOnce.scala | 13 -------- test/files/presentation/callcc-interpreter.check | 3 +- test/files/presentation/ide-bug-1000349.check | 3 +- test/files/presentation/ide-bug-1000475.check | 9 ++---- test/files/presentation/ide-bug-1000531.check | 4 +-- test/files/presentation/implicit-member.check | 3 +- test/files/presentation/ping-pong.check | 6 ++-- test/files/presentation/t5708.check | 3 +- test/files/presentation/visibility.check | 15 +++------ 10 files changed, 15 insertions(+), 81 deletions(-) (limited to 'src') diff --git a/src/library/scala/Predef.scala b/src/library/scala/Predef.scala index 9bb57877d9..357ea2f468 100644 --- a/src/library/scala/Predef.scala +++ b/src/library/scala/Predef.scala @@ -147,9 +147,6 @@ object Predef extends LowPriorityImplicits { @deprecated("Use `sys.exit(status)` instead", "2.9.0") def exit(status: Int): Nothing = sys.exit(status) - @deprecated("Use `formatString.format(args: _*)` or `arg.formatted(formatString)` instead", "2.9.0") - def format(text: String, xs: Any*) = augmentString(text).format(xs: _*) - // errors and asserts ------------------------------------------------- /** Tests an expression, throwing an `AssertionError` if false. @@ -236,8 +233,6 @@ object Predef extends LowPriorityImplicits { final class Ensuring[A](val __resultOfEnsuring: A) extends AnyVal { // `__resultOfEnsuring` must be a public val to allow inlining. // See comments in ArrowAssoc for more. - @deprecated("Use `__resultOfEnsuring` instead", "2.10.0") - def x = __resultOfEnsuring def ensuring(cond: Boolean): A = { assert(cond); __resultOfEnsuring } def ensuring(cond: Boolean, msg: => Any): A = { assert(cond, msg); __resultOfEnsuring } @@ -272,8 +267,6 @@ object Predef extends LowPriorityImplicits { // being confused why they get an ambiguous implicit conversion // error. (`foo.x` used to produce this error since both // any2Ensuring and any2ArrowAssoc pimped an `x` onto everything) - @deprecated("Use `__leftOfArrow` instead", "2.10.0") - def x = __leftOfArrow @inline def -> [B](y: B): Tuple2[A, B] = Tuple2(__leftOfArrow, y) def →[B](y: B): Tuple2[A, B] = ->(y) @@ -335,33 +328,6 @@ object Predef extends LowPriorityImplicits { implicit def shortArrayOps(xs: Array[Short]): ArrayOps[Short] = new ArrayOps.ofShort(xs) implicit def unitArrayOps(xs: Array[Unit]): ArrayOps[Unit] = new ArrayOps.ofUnit(xs) - // Primitive Widenings -------------------------------------------------------------- - - @deprecated("Use `.toShort` for explicit conversion and `Byte.byte2short` for implicit conversion", "2.10.0") def byte2short(x: Byte): Short = x.toShort - @deprecated("Use `.toInt` for explicit conversion and `Byte.byte2int` for implicit conversion", "2.10.0") def byte2int(x: Byte): Int = x.toInt - @deprecated("Use `.toLong` for explicit conversion and `Byte.byte2long for implicit conversion", "2.10.0") def byte2long(x: Byte): Long = x.toLong - @deprecated("Use `.toFloat` for explicit conversion and `Byte.byte2float` for implicit conversion", "2.10.0") def byte2float(x: Byte): Float = x.toFloat - @deprecated("Use `.toDouble` for explicit conversion and `Byte.byte2double` for implicit conversion", "2.10.0") def byte2double(x: Byte): Double = x.toDouble - - @deprecated("Use `.toInt` for explicit conversion and `Short.short2int` for implicit conversion", "2.10.0") def short2int(x: Short): Int = x.toInt - @deprecated("Use `.toLong` for explicit conversion and `Short.short2long` for implicit conversion", "2.10.0") def short2long(x: Short): Long = x.toLong - @deprecated("Use `.toFloat` for explicit conversion and `Short.short2float` for implicit conversion", "2.10.0") def short2float(x: Short): Float = x.toFloat - @deprecated("Use `.toDouble` for explicit conversion and `Short.short2double` for implicit conversion", "2.10.0") def short2double(x: Short): Double = x.toDouble - - @deprecated("Use `.toInt` for explicit conversion and `Char.char2int` for implicit conversion", "2.10.0") def char2int(x: Char): Int = x.toInt - @deprecated("Use `.toLong` for explicit conversion and `Char.char2long` for implicit conversion", "2.10.0") def char2long(x: Char): Long = x.toLong - @deprecated("Use `.toFloat` for explicit conversion and `Char.char2float` for implicit conversion", "2.10.0") def char2float(x: Char): Float = x.toFloat - @deprecated("Use `.toDouble` for explicit conversion and `Char.char2double` for implicit conversion", "2.10.0") def char2double(x: Char): Double = x.toDouble - - @deprecated("Use `.toLong` for explicit conversion and `Int.int2long` for implicit conversion", "2.10.0") def int2long(x: Int): Long = x.toLong - @deprecated("Use `.toFloat` for explicit conversion and `Int.int2float` for implicit conversion", "2.10.0") def int2float(x: Int): Float = x.toFloat - @deprecated("Use `.toDouble` for explicit conversion and `Int.int2double` for implicit conversion", "2.10.0") def int2double(x: Int): Double = x.toDouble - - @deprecated("Use `.toFloat` for explicit conversion and `Long.long2float` for implicit conversion", "2.10.0") def long2float(x: Long): Float = x.toFloat - @deprecated("Use `.toDouble` for explicit conversion and `Long.long2double` for implicit conversion", "2.10.0") def long2double(x: Long): Double = x.toDouble - - @deprecated("Use `.toDouble` for explicit conversion and `Float.float2double` for implicit conversion", "2.10.0") def float2double(x: Float): Double = x.toDouble - // "Autoboxing" and "Autounboxing" --------------------------------------------------- implicit def byte2Byte(x: Byte) = java.lang.Byte.valueOf(x) @@ -402,9 +368,6 @@ object Predef extends LowPriorityImplicits { implicit def any2stringadd(x: Any) = new runtime.StringAdd(x) implicit def unaugmentString(x: StringOps): String = x.repr - @deprecated("Use `StringCanBuildFrom`", "2.10.0") - def stringCanBuildFrom: CanBuildFrom[String, Char, String] = StringCanBuildFrom - implicit val StringCanBuildFrom: CanBuildFrom[String, Char, String] = new CanBuildFrom[String, Char, String] { def apply(from: String) = apply() def apply() = mutable.StringBuilder.newBuilder diff --git a/src/library/scala/collection/GenTraversableOnce.scala b/src/library/scala/collection/GenTraversableOnce.scala index afaced4264..a05ee0fb54 100644 --- a/src/library/scala/collection/GenTraversableOnce.scala +++ b/src/library/scala/collection/GenTraversableOnce.scala @@ -119,19 +119,6 @@ trait GenTraversableOnce[+A] extends Any { */ def fold[A1 >: A](z: A1)(op: (A1, A1) => A1): A1 - /** A syntactic sugar for out of order folding. See `fold`. - * - * Example: - * {{{ - * scala> val a = LinkedList(1,2,3,4) - * a: scala.collection.mutable.LinkedList[Int] = LinkedList(1, 2, 3, 4) - * - * scala> val b = (a /:\ 5)(_+_) - * b: Int = 15 - * }}}*/ - @deprecated("use fold instead", "2.10.0") - def /:\[A1 >: A](z: A1)(op: (A1, A1) => A1): A1 = fold(z)(op) - /** Applies a binary operator to a start value and all elements of this $coll, * going left to right. * diff --git a/test/files/presentation/callcc-interpreter.check b/test/files/presentation/callcc-interpreter.check index 3a08e2a2ea..dd3ee68e45 100644 --- a/test/files/presentation/callcc-interpreter.check +++ b/test/files/presentation/callcc-interpreter.check @@ -3,7 +3,7 @@ reload: CallccInterpreter.scala askTypeCompletion at CallccInterpreter.scala(51,38) ================================================================================ [response] aksTypeCompletion at (51,38) -retrieved 64 members +retrieved 63 members [accessible: true] `class AddcallccInterpreter.Add` [accessible: true] `class AppcallccInterpreter.App` [accessible: true] `class CcccallccInterpreter.Ccc` @@ -50,7 +50,6 @@ retrieved 64 members [accessible: true] `method wait()Unit` [accessible: true] `method wait(x$1: Long)Unit` [accessible: true] `method wait(x$1: Long, x$2: Int)Unit` -[accessible: true] `method x=> callccInterpreter.type` [accessible: true] `method →[B](y: B)(callccInterpreter.type, B)` [accessible: true] `object WrongcallccInterpreter.Wrong.type` [accessible: true] `trait TermcallccInterpreter.Term` diff --git a/test/files/presentation/ide-bug-1000349.check b/test/files/presentation/ide-bug-1000349.check index 44a3207d75..7eeaddc054 100644 --- a/test/files/presentation/ide-bug-1000349.check +++ b/test/files/presentation/ide-bug-1000349.check @@ -3,7 +3,7 @@ reload: CompletionOnEmptyArgMethod.scala askTypeCompletion at CompletionOnEmptyArgMethod.scala(2,17) ================================================================================ [response] aksTypeCompletion at (2,17) -retrieved 37 members +retrieved 36 members [accessible: true] `method !=(x$1: Any)Boolean` [accessible: true] `method !=(x$1: AnyRef)Boolean` [accessible: true] `method ##()Int` @@ -32,7 +32,6 @@ retrieved 37 members [accessible: true] `method wait()Unit` [accessible: true] `method wait(x$1: Long)Unit` [accessible: true] `method wait(x$1: Long, x$2: Int)Unit` -[accessible: true] `method x=> Foo` [accessible: true] `method →[B](y: B)(Foo, B)` [accessible: true] `value __leftOfArrowFoo` [accessible: true] `value __resultOfEnsuringFoo` diff --git a/test/files/presentation/ide-bug-1000475.check b/test/files/presentation/ide-bug-1000475.check index 34c3b557d8..01de4608ca 100644 --- a/test/files/presentation/ide-bug-1000475.check +++ b/test/files/presentation/ide-bug-1000475.check @@ -3,7 +3,7 @@ reload: Foo.scala askTypeCompletion at Foo.scala(3,7) ================================================================================ [response] aksTypeCompletion at (3,7) -retrieved 36 members +retrieved 35 members [accessible: true] `method !=(x$1: Any)Boolean` [accessible: true] `method !=(x$1: AnyRef)Boolean` [accessible: true] `method ##()Int` @@ -29,7 +29,6 @@ retrieved 36 members [accessible: true] `method wait()Unit` [accessible: true] `method wait(x$1: Long)Unit` [accessible: true] `method wait(x$1: Long, x$2: Int)Unit` -[accessible: true] `method x=> Object` [accessible: true] `method →[B](y: B)(Object, B)` [accessible: true] `value __leftOfArrowObject` [accessible: true] `value __resultOfEnsuringObject` @@ -41,7 +40,7 @@ retrieved 36 members askTypeCompletion at Foo.scala(6,10) ================================================================================ [response] aksTypeCompletion at (6,10) -retrieved 36 members +retrieved 35 members [accessible: true] `method !=(x$1: Any)Boolean` [accessible: true] `method !=(x$1: AnyRef)Boolean` [accessible: true] `method ##()Int` @@ -67,7 +66,6 @@ retrieved 36 members [accessible: true] `method wait()Unit` [accessible: true] `method wait(x$1: Long)Unit` [accessible: true] `method wait(x$1: Long, x$2: Int)Unit` -[accessible: true] `method x=> Object` [accessible: true] `method →[B](y: B)(Object, B)` [accessible: true] `value __leftOfArrowObject` [accessible: true] `value __resultOfEnsuringObject` @@ -79,7 +77,7 @@ retrieved 36 members askTypeCompletion at Foo.scala(7,7) ================================================================================ [response] aksTypeCompletion at (7,7) -retrieved 36 members +retrieved 35 members [accessible: true] `method !=(x$1: Any)Boolean` [accessible: true] `method !=(x$1: AnyRef)Boolean` [accessible: true] `method ##()Int` @@ -105,7 +103,6 @@ retrieved 36 members [accessible: true] `method wait()Unit` [accessible: true] `method wait(x$1: Long)Unit` [accessible: true] `method wait(x$1: Long, x$2: Int)Unit` -[accessible: true] `method x=> Object` [accessible: true] `method →[B](y: B)(Object, B)` [accessible: true] `value __leftOfArrowObject` [accessible: true] `value __resultOfEnsuringObject` diff --git a/test/files/presentation/ide-bug-1000531.check b/test/files/presentation/ide-bug-1000531.check index 6c3892d272..7fa550179f 100644 --- a/test/files/presentation/ide-bug-1000531.check +++ b/test/files/presentation/ide-bug-1000531.check @@ -3,7 +3,7 @@ reload: CrashOnLoad.scala askTypeCompletion at CrashOnLoad.scala(6,12) ================================================================================ [response] aksTypeCompletion at (6,12) -retrieved 126 members +retrieved 124 members [accessible: true] `class GroupedIteratorIterator[B]#GroupedIterator` [accessible: true] `method !=(x$1: Any)Boolean` [accessible: true] `method !=(x$1: AnyRef)Boolean` @@ -12,7 +12,6 @@ retrieved 126 members [accessible: true] `method ++[B >: B](that: => scala.collection.GenTraversableOnce[B])Iterator[B]` [accessible: true] `method ->[B](y: B)(java.util.Iterator[B], B)` [accessible: true] `method /:[B](z: B)(op: (B, B) => B)B` -[accessible: true] `method /:\[A1 >: B](z: A1)(op: (A1, A1) => A1)A1` [accessible: true] `method :\[B](z: B)(op: (B, B) => B)B` [accessible: true] `method ==(x$1: Any)Boolean` [accessible: true] `method ==(x$1: AnyRef)Boolean` @@ -115,7 +114,6 @@ retrieved 126 members [accessible: true] `method wait(x$1: Long)Unit` [accessible: true] `method wait(x$1: Long, x$2: Int)Unit` [accessible: true] `method withFilter(p: B => Boolean)Iterator[B]` -[accessible: true] `method x=> java.util.Iterator[B]` [accessible: true] `method zipAll[B, A1 >: B, B1 >: B](that: Iterator[B], thisElem: A1, thatElem: B1)Iterator[(A1, B1)]` [accessible: true] `method zipWithIndex=> Iterator[(B, Int)]` [accessible: true] `method zip[B](that: Iterator[B])Iterator[(B, B)]` diff --git a/test/files/presentation/implicit-member.check b/test/files/presentation/implicit-member.check index 05d6f61699..7b4f792bf3 100644 --- a/test/files/presentation/implicit-member.check +++ b/test/files/presentation/implicit-member.check @@ -3,7 +3,7 @@ reload: ImplicitMember.scala askTypeCompletion at ImplicitMember.scala(7,7) ================================================================================ [response] aksTypeCompletion at (7,7) -retrieved 39 members +retrieved 38 members [accessible: true] `class AppliedImplicitImplicit.AppliedImplicit` [accessible: true] `method !=(x$1: Any)Boolean` [accessible: true] `method !=(x$1: AnyRef)Boolean` @@ -33,7 +33,6 @@ retrieved 39 members [accessible: true] `method wait()Unit` [accessible: true] `method wait(x$1: Long)Unit` [accessible: true] `method wait(x$1: Long, x$2: Int)Unit` -[accessible: true] `method x=> Implicit.type` [accessible: true] `method →[B](y: B)(Implicit.type, B)` [accessible: true] `value __leftOfArrowImplicit.type` [accessible: true] `value __resultOfEnsuringImplicit.type` diff --git a/test/files/presentation/ping-pong.check b/test/files/presentation/ping-pong.check index b666d51de5..c85f6cc21a 100644 --- a/test/files/presentation/ping-pong.check +++ b/test/files/presentation/ping-pong.check @@ -3,7 +3,7 @@ reload: PingPong.scala askTypeCompletion at PingPong.scala(10,23) ================================================================================ [response] aksTypeCompletion at (10,23) -retrieved 40 members +retrieved 39 members [accessible: true] `method !=(x$1: Any)Boolean` [accessible: true] `method !=(x$1: AnyRef)Boolean` [accessible: true] `method ##()Int` @@ -30,7 +30,6 @@ retrieved 40 members [accessible: true] `method wait()Unit` [accessible: true] `method wait(x$1: Long)Unit` [accessible: true] `method wait(x$1: Long, x$2: Int)Unit` -[accessible: true] `method x=> Pong` [accessible: true] `method →[B](y: B)(Pong, B)` [accessible: true] `value __leftOfArrowPong` [accessible: true] `value __resultOfEnsuringPong` @@ -44,7 +43,7 @@ retrieved 40 members askTypeCompletion at PingPong.scala(19,20) ================================================================================ [response] aksTypeCompletion at (19,20) -retrieved 40 members +retrieved 39 members [accessible: true] `method !=(x$1: Any)Boolean` [accessible: true] `method !=(x$1: AnyRef)Boolean` [accessible: true] `method ##()Int` @@ -73,7 +72,6 @@ retrieved 40 members [accessible: true] `method wait()Unit` [accessible: true] `method wait(x$1: Long)Unit` [accessible: true] `method wait(x$1: Long, x$2: Int)Unit` -[accessible: true] `method x=> Ping` [accessible: true] `method →[B](y: B)(Ping, B)` [accessible: true] `value __leftOfArrowPing` [accessible: true] `value __resultOfEnsuringPing` diff --git a/test/files/presentation/t5708.check b/test/files/presentation/t5708.check index c6d4762635..572f404cf4 100644 --- a/test/files/presentation/t5708.check +++ b/test/files/presentation/t5708.check @@ -3,7 +3,7 @@ reload: Completions.scala askTypeCompletion at Completions.scala(17,9) ================================================================================ [response] aksTypeCompletion at (17,9) -retrieved 44 members +retrieved 43 members [accessible: true] `lazy value fooInt` [accessible: true] `method !=(x$1: Any)Boolean` [accessible: true] `method !=(x$1: AnyRef)Boolean` @@ -31,7 +31,6 @@ retrieved 44 members [accessible: true] `method wait()Unit` [accessible: true] `method wait(x$1: Long)Unit` [accessible: true] `method wait(x$1: Long, x$2: Int)Unit` -[accessible: true] `method x=> test.Compat.type` [accessible: true] `method →[B](y: B)(test.Compat.type, B)` [accessible: true] `value CONST_STRINGString("constant")` [accessible: true] `value __leftOfArrowtest.Compat.type` diff --git a/test/files/presentation/visibility.check b/test/files/presentation/visibility.check index 3026e58f7e..87b4463bf7 100644 --- a/test/files/presentation/visibility.check +++ b/test/files/presentation/visibility.check @@ -3,7 +3,7 @@ reload: Completions.scala askTypeCompletion at Completions.scala(14,12) ================================================================================ [response] aksTypeCompletion at (14,12) -retrieved 42 members +retrieved 41 members [accessible: true] `method !=(x$1: Any)Boolean` [accessible: true] `method !=(x$1: AnyRef)Boolean` [accessible: true] `method ##()Int` @@ -36,7 +36,6 @@ retrieved 42 members [accessible: true] `method wait()Unit` [accessible: true] `method wait(x$1: Long)Unit` [accessible: true] `method wait(x$1: Long, x$2: Int)Unit` -[accessible: true] `method x=> accessibility.Foo` [accessible: true] `method →[B](y: B)(accessibility.Foo, B)` [accessible: true] `value __leftOfArrowaccessibility.Foo` [accessible: true] `value __resultOfEnsuringaccessibility.Foo` @@ -47,7 +46,7 @@ retrieved 42 members askTypeCompletion at Completions.scala(16,11) ================================================================================ [response] aksTypeCompletion at (16,11) -retrieved 42 members +retrieved 41 members [accessible: true] `method !=(x$1: Any)Boolean` [accessible: true] `method !=(x$1: AnyRef)Boolean` [accessible: true] `method ##()Int` @@ -81,7 +80,6 @@ retrieved 42 members [accessible: true] `method wait()Unit` [accessible: true] `method wait(x$1: Long)Unit` [accessible: true] `method wait(x$1: Long, x$2: Int)Unit` -[accessible: true] `method x=> accessibility.Foo` [accessible: true] `method →[B](y: B)(accessibility.Foo, B)` [accessible: true] `value __leftOfArrowaccessibility.Foo` [accessible: true] `value __resultOfEnsuringaccessibility.Foo` @@ -91,7 +89,7 @@ retrieved 42 members askTypeCompletion at Completions.scala(22,11) ================================================================================ [response] aksTypeCompletion at (22,11) -retrieved 42 members +retrieved 41 members [accessible: true] `method !=(x$1: Any)Boolean` [accessible: true] `method !=(x$1: AnyRef)Boolean` [accessible: true] `method ##()Int` @@ -124,7 +122,6 @@ retrieved 42 members [accessible: true] `method wait()Unit` [accessible: true] `method wait(x$1: Long)Unit` [accessible: true] `method wait(x$1: Long, x$2: Int)Unit` -[accessible: true] `method x=> accessibility.AccessibilityChecks` [accessible: true] `method →[B](y: B)(accessibility.AccessibilityChecks, B)` [accessible: true] `value __leftOfArrowaccessibility.AccessibilityChecks` [accessible: true] `value __resultOfEnsuringaccessibility.AccessibilityChecks` @@ -135,7 +132,7 @@ retrieved 42 members askTypeCompletion at Completions.scala(28,10) ================================================================================ [response] aksTypeCompletion at (28,10) -retrieved 42 members +retrieved 41 members [accessible: true] `method !=(x$1: Any)Boolean` [accessible: true] `method !=(x$1: AnyRef)Boolean` [accessible: true] `method ##()Int` @@ -164,7 +161,6 @@ retrieved 42 members [accessible: true] `method wait()Unit` [accessible: true] `method wait(x$1: Long)Unit` [accessible: true] `method wait(x$1: Long, x$2: Int)Unit` -[accessible: true] `method x=> accessibility.Foo` [accessible: true] `method →[B](y: B)(accessibility.Foo, B)` [accessible: true] `value __leftOfArrowaccessibility.Foo` [accessible: true] `value __resultOfEnsuringaccessibility.Foo` @@ -179,7 +175,7 @@ retrieved 42 members askTypeCompletion at Completions.scala(37,8) ================================================================================ [response] aksTypeCompletion at (37,8) -retrieved 42 members +retrieved 41 members [accessible: true] `method !=(x$1: Any)Boolean` [accessible: true] `method !=(x$1: AnyRef)Boolean` [accessible: true] `method ##()Int` @@ -207,7 +203,6 @@ retrieved 42 members [accessible: true] `method wait()Unit` [accessible: true] `method wait(x$1: Long)Unit` [accessible: true] `method wait(x$1: Long, x$2: Int)Unit` -[accessible: true] `method x=> accessibility.Foo` [accessible: true] `method →[B](y: B)(accessibility.Foo, B)` [accessible: true] `value __leftOfArrowaccessibility.Foo` [accessible: true] `value __resultOfEnsuringaccessibility.Foo` -- cgit v1.2.3 From c2903d6ebc4ffb37c0e2179df87798813db3c695 Mon Sep 17 00:00:00 2001 From: Simon Ochsenreither Date: Thu, 17 Jan 2013 20:20:17 +0100 Subject: SI-6811 Remove scala.collection.mutable.ConcurrentMap --- src/library/scala/collection/JavaConversions.scala | 1 - src/library/scala/collection/JavaConverters.scala | 2 +- .../scala/collection/convert/DecorateAsJava.scala | 22 +----- .../scala/collection/convert/DecorateAsScala.scala | 19 ----- .../scala/collection/convert/WrapAsJava.scala | 21 ----- .../scala/collection/convert/WrapAsScala.scala | 44 +---------- .../scala/collection/convert/Wrappers.scala | 47 ----------- .../scala/collection/mutable/ConcurrentMap.scala | 90 ---------------------- .../files/neg/javaConversions-2.10-ambiguity.check | 6 -- .../files/neg/javaConversions-2.10-ambiguity.scala | 10 --- .../files/pos/javaConversions-2.10-ambiguity.scala | 10 +++ .../pos/javaConversions-2.10-regression.scala | 6 +- test/files/run/map_java_conversions.scala | 2 +- 13 files changed, 17 insertions(+), 263 deletions(-) delete mode 100644 src/library/scala/collection/mutable/ConcurrentMap.scala delete mode 100644 test/files/neg/javaConversions-2.10-ambiguity.check delete mode 100644 test/files/neg/javaConversions-2.10-ambiguity.scala create mode 100644 test/files/pos/javaConversions-2.10-ambiguity.scala (limited to 'src') diff --git a/src/library/scala/collection/JavaConversions.scala b/src/library/scala/collection/JavaConversions.scala index 7ff29650fa..3cb7edacd6 100644 --- a/src/library/scala/collection/JavaConversions.scala +++ b/src/library/scala/collection/JavaConversions.scala @@ -21,7 +21,6 @@ import convert._ * scala.collection.mutable.Buffer <=> java.util.List * scala.collection.mutable.Set <=> java.util.Set * scala.collection.mutable.Map <=> java.util.{ Map, Dictionary } - * scala.collection.mutable.ConcurrentMap (deprecated since 2.10) <=> java.util.concurrent.ConcurrentMap * scala.collection.concurrent.Map <=> java.util.concurrent.ConcurrentMap *}}} * In all cases, converting from a source type to a target type and back diff --git a/src/library/scala/collection/JavaConverters.scala b/src/library/scala/collection/JavaConverters.scala index 439991708e..7700d90560 100755 --- a/src/library/scala/collection/JavaConverters.scala +++ b/src/library/scala/collection/JavaConverters.scala @@ -24,7 +24,7 @@ import convert._ * - `scala.collection.mutable.Buffer` <=> `java.util.List` * - `scala.collection.mutable.Set` <=> `java.util.Set` * - `scala.collection.mutable.Map` <=> `java.util.Map` - * - `scala.collection.mutable.ConcurrentMap` <=> `java.util.concurrent.ConcurrentMap` + * - `scala.collection.mutable.concurrent.Map` <=> `java.util.concurrent.ConcurrentMap` * * In all cases, converting from a source type to a target type and back * again will return the original source object, e.g. diff --git a/src/library/scala/collection/convert/DecorateAsJava.scala b/src/library/scala/collection/convert/DecorateAsJava.scala index 87bcae3923..7447c1bbaf 100644 --- a/src/library/scala/collection/convert/DecorateAsJava.scala +++ b/src/library/scala/collection/convert/DecorateAsJava.scala @@ -25,7 +25,7 @@ import scala.language.implicitConversions * - `scala.collection.mutable.Buffer` <=> `java.util.List` * - `scala.collection.mutable.Set` <=> `java.util.Set` * - `scala.collection.mutable.Map` <=> `java.util.Map` - * - `scala.collection.mutable.ConcurrentMap` <=> `java.util.concurrent.ConcurrentMap` + * - `scala.collection.mutable.concurrent.Map` <=> `java.util.concurrent.ConcurrentMap` * * In all cases, converting from a source type to a target type and back * again will return the original source object, e.g. @@ -277,26 +277,6 @@ trait DecorateAsJava { implicit def mapAsJavaMapConverter[A, B](m : Map[A, B]): AsJava[ju.Map[A, B]] = new AsJava(mapAsJavaMap(m)) - /** - * Adds an `asJava` method that implicitly converts a Scala mutable - * `ConcurrentMap` to a Java `ConcurrentMap`. - * - * The returned Java `ConcurrentMap` is backed by the provided Scala - * `ConcurrentMap` and any side-effects of using it via the Java interface - * will be visible via the Scala interface and vice versa. - * - * If the Scala `ConcurrentMap` was previously obtained from an implicit or - * explicit call of `asConcurrentMap(java.util.concurrect.ConcurrentMap)` - * then the original Java `ConcurrentMap` will be returned. - * - * @param m The `ConcurrentMap` to be converted. - * @return An object with an `asJava` method that returns a Java - * `ConcurrentMap` view of the argument. - */ - @deprecated("Use `concurrent.Map` instead of `ConcurrentMap`.", "2.10.0") - implicit def asJavaConcurrentMapConverter[A, B](m: mutable.ConcurrentMap[A, B]): AsJava[juc.ConcurrentMap[A, B]] = - new AsJava(asJavaConcurrentMap(m)) - /** * Adds an `asJava` method that implicitly converts a Scala mutable * `concurrent.Map` to a Java `ConcurrentMap`. diff --git a/src/library/scala/collection/convert/DecorateAsScala.scala b/src/library/scala/collection/convert/DecorateAsScala.scala index 94847a76e3..90e8dded6e 100644 --- a/src/library/scala/collection/convert/DecorateAsScala.scala +++ b/src/library/scala/collection/convert/DecorateAsScala.scala @@ -142,25 +142,6 @@ trait DecorateAsScala { implicit def mapAsScalaMapConverter[A, B](m : ju.Map[A, B]): AsScala[mutable.Map[A, B]] = new AsScala(mapAsScalaMap(m)) - /** - * Adds an `asScala` method that implicitly converts a Java `ConcurrentMap` - * to a Scala mutable `ConcurrentMap`. The returned Scala `ConcurrentMap` is - * backed by the provided Java `ConcurrentMap` and any side-effects of using - * it via the Scala interface will be visible via the Java interface and - * vice versa. - * - * If the Java `ConcurrentMap` was previously obtained from an implicit or - * explicit call of `asConcurrentMap(scala.collection.mutable.ConcurrentMap)` - * then the original Scala `ConcurrentMap` will be returned. - * - * @param m The `ConcurrentMap` to be converted. - * @return An object with an `asScala` method that returns a Scala mutable - * `ConcurrentMap` view of the argument. - */ - @deprecated("Use `mapAsScalaConcurrentMapConverter` instead, and use `concurrent.Map` instead of `ConcurrentMap`.", "2.10.0") - def asScalaConcurrentMapConverter[A, B](m: juc.ConcurrentMap[A, B]): AsScala[mutable.ConcurrentMap[A, B]] = - new AsScala(asScalaConcurrentMap(m)) - /** * Adds an `asScala` method that implicitly converts a Java `ConcurrentMap` * to a Scala mutable `concurrent.Map`. The returned Scala `concurrent.Map` is diff --git a/src/library/scala/collection/convert/WrapAsJava.scala b/src/library/scala/collection/convert/WrapAsJava.scala index 5e6126a7cf..9665ffa045 100644 --- a/src/library/scala/collection/convert/WrapAsJava.scala +++ b/src/library/scala/collection/convert/WrapAsJava.scala @@ -234,27 +234,6 @@ trait WrapAsJava { case _ => new MapWrapper(m) } - /** - * Implicitly converts a Scala mutable `ConcurrentMap` to a Java - * `ConcurrentMap`. - * - * The returned Java `ConcurrentMap` is backed by the provided Scala - * `ConcurrentMap` and any side-effects of using it via the Java interface - * will be visible via the Scala interface and vice versa. - * - * If the Scala `ConcurrentMap` was previously obtained from an implicit or - * explicit call of `asScalaConcurrentMap(java.util.concurrect.ConcurrentMap)` - * then the original Java ConcurrentMap will be returned. - * - * @param m The `ConcurrentMap` to be converted. - * @return A Java `ConcurrentMap` view of the argument. - */ - @deprecated("Use `concurrent.Map` instead of `ConcurrentMap`.", "2.10.0") - implicit def asJavaConcurrentMap[A, B](m: mutable.ConcurrentMap[A, B]): juc.ConcurrentMap[A, B] = m match { - case JConcurrentMapDeprecatedWrapper(wrapped) => wrapped - case _ => new ConcurrentMapDeprecatedWrapper(m) - } - /** * Implicitly converts a Scala mutable `concurrent.Map` to a Java * `ConcurrentMap`. diff --git a/src/library/scala/collection/convert/WrapAsScala.scala b/src/library/scala/collection/convert/WrapAsScala.scala index ffcca62291..f43eae10d6 100644 --- a/src/library/scala/collection/convert/WrapAsScala.scala +++ b/src/library/scala/collection/convert/WrapAsScala.scala @@ -12,30 +12,7 @@ package convert import java.{ lang => jl, util => ju }, java.util.{ concurrent => juc } import scala.language.implicitConversions -trait LowPriorityWrapAsScala { - this: WrapAsScala => - - import Wrappers._ - - /** - * Implicitly converts a Java ConcurrentMap to a Scala mutable ConcurrentMap. - * The returned Scala ConcurrentMap is backed by the provided Java - * ConcurrentMap and any side-effects of using it via the Scala interface will - * be visible via the Java interface and vice versa. - * - * If the Java ConcurrentMap was previously obtained from an implicit or - * explicit call of `asConcurrentMap(scala.collection.mutable.ConcurrentMap)` - * then the original Scala ConcurrentMap will be returned. - * - * @param m The ConcurrentMap to be converted. - * @return A Scala mutable ConcurrentMap view of the argument. - */ - @deprecated("Use `mapAsScalaConcurrentMap` instead, and use `concurrent.Map` instead of `ConcurrentMap`.", "2.10.0") - implicit def mapAsScalaDeprecatedConcurrentMap[A, B](m: juc.ConcurrentMap[A, B]): mutable.ConcurrentMap[A, B] = - asScalaConcurrentMap(m) -} - -trait WrapAsScala extends LowPriorityWrapAsScala { +trait WrapAsScala { import Wrappers._ /** * Implicitly converts a Java `Iterator` to a Scala `Iterator`. @@ -165,25 +142,6 @@ trait WrapAsScala extends LowPriorityWrapAsScala { case _ => new JMapWrapper(m) } - /** - * Implicitly converts a Java ConcurrentMap to a Scala mutable ConcurrentMap. - * The returned Scala ConcurrentMap is backed by the provided Java - * ConcurrentMap and any side-effects of using it via the Scala interface will - * be visible via the Java interface and vice versa. - * - * If the Java ConcurrentMap was previously obtained from an implicit or - * explicit call of `asConcurrentMap(scala.collection.mutable.ConcurrentMap)` - * then the original Scala ConcurrentMap will be returned. - * - * @param m The ConcurrentMap to be converted. - * @return A Scala mutable ConcurrentMap view of the argument. - */ - @deprecated("Use `mapAsScalaConcurrentMap` instead, and use `concurrent.Map` instead of `ConcurrentMap`.", "2.10.0") - def asScalaConcurrentMap[A, B](m: juc.ConcurrentMap[A, B]): mutable.ConcurrentMap[A, B] = m match { - case cmw: ConcurrentMapDeprecatedWrapper[a, b] => cmw.underlying - case _ => new JConcurrentMapDeprecatedWrapper(m) - } - /** * Implicitly converts a Java ConcurrentMap to a Scala mutable ConcurrentMap. * The returned Scala ConcurrentMap is backed by the provided Java diff --git a/src/library/scala/collection/convert/Wrappers.scala b/src/library/scala/collection/convert/Wrappers.scala index 20add3365d..0f4506b5d5 100644 --- a/src/library/scala/collection/convert/Wrappers.scala +++ b/src/library/scala/collection/convert/Wrappers.scala @@ -276,28 +276,6 @@ private[collection] trait Wrappers { override def empty = JMapWrapper(new ju.HashMap[A, B]) } - class ConcurrentMapDeprecatedWrapper[A, B](override val underlying: mutable.ConcurrentMap[A, B]) extends MutableMapWrapper[A, B](underlying) with juc.ConcurrentMap[A, B] { - - def putIfAbsent(k: A, v: B) = underlying.putIfAbsent(k, v) match { - case Some(v) => v - case None => null.asInstanceOf[B] - } - - def remove(k: AnyRef, v: AnyRef) = try { - underlying.remove(k.asInstanceOf[A], v.asInstanceOf[B]) - } catch { - case ex: ClassCastException => - false - } - - def replace(k: A, v: B): B = underlying.replace(k, v) match { - case Some(v) => v - case None => null.asInstanceOf[B] - } - - def replace(k: A, oldval: B, newval: B) = underlying.replace(k, oldval, newval) - } - class ConcurrentMapWrapper[A, B](override val underlying: concurrent.Map[A, B]) extends MutableMapWrapper[A, B](underlying) with juc.ConcurrentMap[A, B] { def putIfAbsent(k: A, v: B) = underlying.putIfAbsent(k, v) match { @@ -320,31 +298,6 @@ private[collection] trait Wrappers { def replace(k: A, oldval: B, newval: B) = underlying.replace(k, oldval, newval) } - case class JConcurrentMapDeprecatedWrapper[A, B](val underlying: juc.ConcurrentMap[A, B]) extends mutable.AbstractMap[A, B] with JMapWrapperLike[A, B, JConcurrentMapDeprecatedWrapper[A, B]] with mutable.ConcurrentMap[A, B] { - override def get(k: A) = { - val v = underlying get k - if (v != null) Some(v) - else None - } - - override def empty = new JConcurrentMapDeprecatedWrapper(new juc.ConcurrentHashMap[A, B]) - - def putIfAbsent(k: A, v: B): Option[B] = { - val r = underlying.putIfAbsent(k, v) - if (r != null) Some(r) else None - } - - def remove(k: A, v: B): Boolean = underlying.remove(k, v) - - def replace(k: A, v: B): Option[B] = { - val prev = underlying.replace(k, v) - if (prev != null) Some(prev) else None - } - - def replace(k: A, oldvalue: B, newvalue: B): Boolean = - underlying.replace(k, oldvalue, newvalue) - } - case class JConcurrentMapWrapper[A, B](val underlying: juc.ConcurrentMap[A, B]) extends mutable.AbstractMap[A, B] with JMapWrapperLike[A, B, JConcurrentMapWrapper[A, B]] with concurrent.Map[A, B] { override def get(k: A) = { val v = underlying get k diff --git a/src/library/scala/collection/mutable/ConcurrentMap.scala b/src/library/scala/collection/mutable/ConcurrentMap.scala deleted file mode 100644 index 5b5d738d03..0000000000 --- a/src/library/scala/collection/mutable/ConcurrentMap.scala +++ /dev/null @@ -1,90 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2010-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala.collection -package mutable - -/** A template trait for mutable maps that allow concurrent access. - * - * $concurrentmapinfo - * - * @since 2.8 - * @see [[http://docs.scala-lang.org/overviews/collections/concrete-mutable-collection-classes.html#concurrent_maps "Scala's Collection Library overview"]] - * section on `Concurrent Maps` for more information. - * - * @tparam A the key type of the map - * @tparam B the value type of the map - * - * @define Coll `ConcurrentMap` - * @define coll concurrent map - * @define concurrentmapinfo - * This is a base trait for all Scala concurrent map implementations. It - * provides all of the methods a `Map` does, with the difference that all the - * changes are atomic. It also describes methods specific to concurrent maps. - * - * '''Note''': The concurrent maps do not accept `'''null'''` for keys or values. - * - * @define atomicop - * This is an atomic operation. - */ -@deprecated("Use `scala.collection.concurrent.Map` instead.", "2.10.0") -trait ConcurrentMap[A, B] extends Map[A, B] { - - /** - * Associates the given key with a given value, unless the key was already - * associated with some other value. - * - * $atomicop - * - * @param k key with which the specified value is to be associated with - * @param v value to be associated with the specified key - * @return `Some(oldvalue)` if there was a value `oldvalue` previously - * associated with the specified key, or `None` if there was no - * mapping for the specified key - */ - def putIfAbsent(k: A, v: B): Option[B] - - /** - * Removes the entry for the specified key if its currently mapped to the - * specified value. - * - * $atomicop - * - * @param k key for which the entry should be removed - * @param v value expected to be associated with the specified key if - * the removal is to take place - * @return `true` if the removal took place, `false` otherwise - */ - def remove(k: A, v: B): Boolean - - /** - * Replaces the entry for the given key only if it was previously mapped to - * a given value. - * - * $atomicop - * - * @param k key for which the entry should be replaced - * @param oldvalue value expected to be associated with the specified key - * if replacing is to happen - * @param newvalue value to be associated with the specified key - * @return `true` if the entry was replaced, `false` otherwise - */ - def replace(k: A, oldvalue: B, newvalue: B): Boolean - - /** - * Replaces the entry for the given key only if it was previously mapped - * to some value. - * - * $atomicop - * - * @param k key for which the entry should be replaced - * @param v value to be associated with the specified key - * @return `Some(v)` if the given key was previously mapped to some value `v`, or `None` otherwise - */ - def replace(k: A, v: B): Option[B] -} diff --git a/test/files/neg/javaConversions-2.10-ambiguity.check b/test/files/neg/javaConversions-2.10-ambiguity.check deleted file mode 100644 index c064a22964..0000000000 --- a/test/files/neg/javaConversions-2.10-ambiguity.check +++ /dev/null @@ -1,6 +0,0 @@ -javaConversions-2.10-ambiguity.scala:8: error: type mismatch; - found : scala.collection.concurrent.Map[String,String] - required: scala.collection.mutable.ConcurrentMap[String,String] - assertType[mutable.ConcurrentMap[String, String]](a) - ^ -one error found diff --git a/test/files/neg/javaConversions-2.10-ambiguity.scala b/test/files/neg/javaConversions-2.10-ambiguity.scala deleted file mode 100644 index e856846a29..0000000000 --- a/test/files/neg/javaConversions-2.10-ambiguity.scala +++ /dev/null @@ -1,10 +0,0 @@ -import collection.{JavaConversions, mutable, concurrent} -import JavaConversions._ -import java.util.concurrent.{ConcurrentHashMap => CHM} - -object Bar { - def assertType[T](t: T) = t - val a = new CHM[String, String]() += (("", "")) - assertType[mutable.ConcurrentMap[String, String]](a) -} -// vim: set et: diff --git a/test/files/pos/javaConversions-2.10-ambiguity.scala b/test/files/pos/javaConversions-2.10-ambiguity.scala new file mode 100644 index 0000000000..c4aad6cbfc --- /dev/null +++ b/test/files/pos/javaConversions-2.10-ambiguity.scala @@ -0,0 +1,10 @@ +import collection.{JavaConversions, mutable, concurrent} +import JavaConversions._ +import java.util.concurrent.{ConcurrentHashMap => CHM} + +object Bar { + def assertType[T](t: T) = t + val a = new CHM[String, String]() += (("", "")) + assertType[concurrent.Map[String, String]](a) +} +// vim: set et: diff --git a/test/files/pos/javaConversions-2.10-regression.scala b/test/files/pos/javaConversions-2.10-regression.scala index e1b81015ba..7c7ff03b55 100644 --- a/test/files/pos/javaConversions-2.10-regression.scala +++ b/test/files/pos/javaConversions-2.10-regression.scala @@ -3,10 +3,10 @@ import JavaConversions._ import java.util.concurrent.{ConcurrentHashMap => CHM} object Foo { - def buildCache2_9_simple[K <: AnyRef, V <: AnyRef]: mutable.ConcurrentMap[K, V] = - asScalaConcurrentMap(new CHM()) + def buildCache2_9_simple[K <: AnyRef, V <: AnyRef]: concurrent.Map[K, V] = + mapAsScalaConcurrentMap(new CHM()) - def buildCache2_9_implicit[K <: AnyRef, V <: AnyRef]: mutable.ConcurrentMap[K, V] = + def buildCache2_9_implicit[K <: AnyRef, V <: AnyRef]: concurrent.Map[K, V] = new CHM[K, V]() } diff --git a/test/files/run/map_java_conversions.scala b/test/files/run/map_java_conversions.scala index 7714b2cc74..751167c04d 100644 --- a/test/files/run/map_java_conversions.scala +++ b/test/files/run/map_java_conversions.scala @@ -19,7 +19,7 @@ object Test { val concMap = new java.util.concurrent.ConcurrentHashMap[String, String] test(concMap) - val cmap = asScalaConcurrentMap(concMap) + val cmap = mapAsScalaConcurrentMap(concMap) cmap.putIfAbsent("absentKey", "absentValue") cmap.put("somekey", "somevalue") assert(cmap.remove("somekey", "somevalue") == true) -- cgit v1.2.3 From b13bf260b46f6498d0e995d0bbf3ce7b39bc8b3b Mon Sep 17 00:00:00 2001 From: Simon Ochsenreither Date: Thu, 17 Jan 2013 20:24:37 +0100 Subject: SI-6811 Remove the scala.util.grammar package --- src/library/scala/util/grammar/HedgeRHS.scala | 26 -------------------------- src/library/scala/util/grammar/TreeRHS.scala | 22 ---------------------- 2 files changed, 48 deletions(-) delete mode 100644 src/library/scala/util/grammar/HedgeRHS.scala delete mode 100644 src/library/scala/util/grammar/TreeRHS.scala (limited to 'src') diff --git a/src/library/scala/util/grammar/HedgeRHS.scala b/src/library/scala/util/grammar/HedgeRHS.scala deleted file mode 100644 index d1c11a2f99..0000000000 --- a/src/library/scala/util/grammar/HedgeRHS.scala +++ /dev/null @@ -1,26 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - - - -package scala.util.grammar - -@deprecated("This class will be removed", "2.10.0") -abstract class HedgeRHS - -/** Right hand side of a hedge production, deriving a single tree. */ -@deprecated("This class will be removed", "2.10.0") -case class ConsRHS(tnt: Int, hnt: Int) extends HedgeRHS - -/** Right hand side of a hedge production, deriving any hedge. */ -@deprecated("This class will be removed", "2.10.0") -case object AnyHedgeRHS extends HedgeRHS - -/** Right hand side of a hedge production, deriving the empty hedge. */ -@deprecated("This class will be removed", "2.10.0") -case object EmptyHedgeRHS extends HedgeRHS diff --git a/src/library/scala/util/grammar/TreeRHS.scala b/src/library/scala/util/grammar/TreeRHS.scala deleted file mode 100644 index ee72ea982d..0000000000 --- a/src/library/scala/util/grammar/TreeRHS.scala +++ /dev/null @@ -1,22 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - - - -package scala.util.grammar - -/** Right hand side of a tree production. */ -@deprecated("This class will be removed", "2.10.0") -abstract class TreeRHS - -/** Right hand side of a tree production, labelled with a letter from an alphabet. */ -@deprecated("This class will be removed", "2.10.0") -case class LabelledRHS[A](label: A, hnt: Int) extends TreeRHS - -@deprecated("This class will be removed", "2.10.0") -case object AnyTreeRHS extends TreeRHS -- cgit v1.2.3 From 67d7e26657a0a52e2bd5dc46bd1bbedda52d2dc0 Mon Sep 17 00:00:00 2001 From: Simon Ochsenreither Date: Thu, 17 Jan 2013 20:29:48 +0100 Subject: SI-6811 Remove parts of scala.concurrent not needed by scala.actors --- src/library/scala/collection/parallel/Tasks.scala | 54 ---------------- src/library/scala/concurrent/JavaConversions.scala | 28 --------- src/library/scala/concurrent/TaskRunners.scala | 36 ----------- src/library/scala/concurrent/ThreadRunner.scala | 60 ------------------ src/library/scala/concurrent/ops.scala | 73 ---------------------- src/library/scala/parallel/Future.scala | 39 ------------ test/files/pos/t2484.scala | 4 +- 7 files changed, 3 insertions(+), 291 deletions(-) delete mode 100644 src/library/scala/concurrent/TaskRunners.scala delete mode 100644 src/library/scala/concurrent/ThreadRunner.scala delete mode 100644 src/library/scala/concurrent/ops.scala delete mode 100644 src/library/scala/parallel/Future.scala (limited to 'src') diff --git a/src/library/scala/collection/parallel/Tasks.scala b/src/library/scala/collection/parallel/Tasks.scala index 12f8012a5b..4e350a2adf 100644 --- a/src/library/scala/collection/parallel/Tasks.scala +++ b/src/library/scala/collection/parallel/Tasks.scala @@ -346,60 +346,6 @@ object ThreadPoolTasks { ) } - -/** An implementation of tasks objects based on the Java thread pooling API and synchronization using futures. */ -@deprecated("This implementation is not used.", "2.10.0") -trait FutureThreadPoolTasks extends Tasks { - import java.util.concurrent._ - - trait WrappedTask[R, +Tp] extends Runnable with super.WrappedTask[R, Tp] { - @volatile var future: Future[_] = null - - def start() = { - executor.synchronized { - future = executor.submit(this) - } - } - def sync() = future.get - def tryCancel = false - def run = { - compute() - } - } - - protected def newWrappedTask[R, Tp](b: Task[R, Tp]): WrappedTask[R, Tp] - - val environment: AnyRef = FutureThreadPoolTasks.defaultThreadPool - def executor = environment.asInstanceOf[ThreadPoolExecutor] - - def execute[R, Tp](task: Task[R, Tp]): () => R = { - val t = newWrappedTask(task) - - // debuglog("-----------> Executing without wait: " + task) - t.start - - () => { - t.sync - t.body.forwardThrowable - t.body.result - } - } - - def executeAndWaitResult[R, Tp](task: Task[R, Tp]): R = { - val t = newWrappedTask(task) - - // debuglog("-----------> Executing with wait: " + task) - t.start - - t.sync - t.body.forwardThrowable - t.body.result - } - - def parallelismLevel = FutureThreadPoolTasks.numCores - -} - object FutureThreadPoolTasks { import java.util.concurrent._ diff --git a/src/library/scala/concurrent/JavaConversions.scala b/src/library/scala/concurrent/JavaConversions.scala index 573882ee34..3d0597ca22 100644 --- a/src/library/scala/concurrent/JavaConversions.scala +++ b/src/library/scala/concurrent/JavaConversions.scala @@ -18,34 +18,6 @@ import scala.language.implicitConversions */ object JavaConversions { - @deprecated("Use `asExecutionContext` instead.", "2.10.0") - implicit def asTaskRunner(exec: ExecutorService): FutureTaskRunner = - new ThreadPoolRunner { - override protected def executor = - exec - - def shutdown() = - exec.shutdown() - } - - @deprecated("Use `asExecutionContext` instead.", "2.10.0") - implicit def asTaskRunner(exec: Executor): TaskRunner = - new TaskRunner { - type Task[T] = Runnable - - implicit def functionAsTask[T](fun: () => T): Task[T] = new Runnable { - def run() { fun() } - } - - def execute[S](task: Task[S]) { - exec.execute(task) - } - - def shutdown() { - // do nothing - } - } - /** * Creates a new `ExecutionContext` which uses the provided `ExecutorService`. */ diff --git a/src/library/scala/concurrent/TaskRunners.scala b/src/library/scala/concurrent/TaskRunners.scala deleted file mode 100644 index e109a8abf9..0000000000 --- a/src/library/scala/concurrent/TaskRunners.scala +++ /dev/null @@ -1,36 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala.concurrent - -import java.util.concurrent.{ThreadPoolExecutor, LinkedBlockingQueue, TimeUnit} - -/** The `TaskRunners` object... - * - * @author Philipp Haller - */ -@deprecated("Use `ExecutionContext` instead.", "2.10.0") -object TaskRunners { - - implicit val threadRunner: FutureTaskRunner = - new ThreadRunner - - implicit val threadPoolRunner: FutureTaskRunner = { - val numCores = Runtime.getRuntime().availableProcessors() - val keepAliveTime = 60000L - val workQueue = new LinkedBlockingQueue[Runnable] - val exec = new ThreadPoolExecutor(numCores, - numCores, - keepAliveTime, - TimeUnit.MILLISECONDS, - workQueue, - new ThreadPoolExecutor.CallerRunsPolicy) - JavaConversions.asTaskRunner(exec) - } - -} diff --git a/src/library/scala/concurrent/ThreadRunner.scala b/src/library/scala/concurrent/ThreadRunner.scala deleted file mode 100644 index cd92db9486..0000000000 --- a/src/library/scala/concurrent/ThreadRunner.scala +++ /dev/null @@ -1,60 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala.concurrent - -import java.lang.Thread -import scala.language.implicitConversions - -/** The `ThreadRunner` trait... - * - * @author Philipp Haller - */ -@deprecated("Use `ExecutionContext` instead.", "2.10.0") -class ThreadRunner extends FutureTaskRunner { - - type Task[T] = () => T - type Future[T] = () => T - - implicit def functionAsTask[S](fun: () => S): Task[S] = fun - implicit def futureAsFunction[S](x: Future[S]): () => S = x - - /* If expression computed successfully return it in `Right`, - * otherwise return exception in `Left`. - */ - private def tryCatch[A](body: => A): Either[Exception, A] = - try Right(body) catch { - case ex: Exception => Left(ex) - } - - def execute[S](task: Task[S]) { - val runnable = new Runnable { - def run() { tryCatch(task()) } - } - (new Thread(runnable)).start() - } - - def submit[S](task: Task[S]): Future[S] = { - val result = new SyncVar[Either[Exception, S]] - val runnable = new Runnable { - def run() { result set tryCatch(task()) } - } - (new Thread(runnable)).start() - () => result.get.fold[S](throw _, identity _) - } - - @deprecated("Use `blocking` instead.", "2.10.0") - def managedBlock(blocker: ManagedBlocker) { - blocker.block() - } - - def shutdown() { - // do nothing - } - -} diff --git a/src/library/scala/concurrent/ops.scala b/src/library/scala/concurrent/ops.scala deleted file mode 100644 index 4c91e78dc7..0000000000 --- a/src/library/scala/concurrent/ops.scala +++ /dev/null @@ -1,73 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala.concurrent - -import java.lang.Thread -import scala.util.control.Exception.allCatch - -/** The object `ops` ... - * - * @author Martin Odersky, Stepan Koltsov, Philipp Haller - */ -@deprecated("Use `Future` instead.", "2.10.0") -object ops -{ - val defaultRunner: FutureTaskRunner = TaskRunners.threadRunner - - /** - * If expression computed successfully return it in `Right`, - * otherwise return exception in `Left`. - */ - private def tryCatch[A](body: => A): Either[Throwable, A] = - allCatch[A] either body - - private def getOrThrow[T <: Throwable, A](x: Either[T, A]): A = - x.fold[A](throw _, identity _) - - /** Evaluates an expression asynchronously. - * - * @param p the expression to evaluate - */ - def spawn(p: => Unit)(implicit runner: TaskRunner = defaultRunner): Unit = { - runner execute runner.functionAsTask(() => p) - } - - /** Evaluates an expression asynchronously, and returns a closure for - * retrieving the result. - * - * @param p the expression to evaluate - * @return a closure which returns the result once it has been computed - */ - def future[A](p: => A)(implicit runner: FutureTaskRunner = defaultRunner): () => A = { - runner.futureAsFunction(runner submit runner.functionAsTask(() => p)) - } - - /** Evaluates two expressions in parallel. Invoking `par` blocks the current - * thread until both expressions have been evaluated. - * - * @param xp the first expression to evaluate - * @param yp the second expression to evaluate - * - * @return a pair holding the evaluation results - */ - def par[A, B](xp: => A, yp: => B)(implicit runner: TaskRunner = defaultRunner): (A, B) = { - val y = new SyncVar[Either[Throwable, B]] - spawn { y set tryCatch(yp) } - (xp, getOrThrow(y.get)) - } - -/* - def parMap[a,b](f: a => b, xs: Array[a]): Array[b] = { - val results = new Array[b](xs.length); - replicate(0, xs.length) { i => results(i) = f(xs(i)) } - results - } -*/ - -} diff --git a/src/library/scala/parallel/Future.scala b/src/library/scala/parallel/Future.scala deleted file mode 100644 index e255a5772b..0000000000 --- a/src/library/scala/parallel/Future.scala +++ /dev/null @@ -1,39 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2005-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala.parallel - - - -/** A future is a function without parameters that will block the caller if - * the parallel computation associated with the function is not completed. - * - * @tparam R the type of the result - * - * @since 2.9 - */ -@deprecated("Use `scala.concurrent.Future` instead.", "2.10.0") -trait Future[@specialized +R] extends (() => R) { - /** Returns a result once the parallel computation completes. If the - * computation produced an exception, an exception is forwarded. - * - * '''Note:''' creating a circular dependency between futures by calling - * this method will result in a deadlock. - * - * @return the result - * @throws the exception that was thrown during a parallel computation - */ - def apply(): R - - /** Returns `true` if the parallel computation is completed. - * - * @return `true` if the parallel computation is completed, `false` otherwise - */ - def isDone(): Boolean -} - diff --git a/test/files/pos/t2484.scala b/test/files/pos/t2484.scala index 7d1b7cb03c..29f798edf9 100755 --- a/test/files/pos/t2484.scala +++ b/test/files/pos/t2484.scala @@ -1,7 +1,9 @@ +import concurrent.ExecutionContext.Implicits.global + class Admin extends javax.swing.JApplet { val jScrollPane = new javax.swing.JScrollPane (null, 0, 0) def t2484: Unit = { - scala.concurrent.ops.spawn {jScrollPane.synchronized { + scala.concurrent.future {jScrollPane.synchronized { def someFunction () = {} //scala.concurrent.ops.spawn {someFunction ()} jScrollPane.addComponentListener (new java.awt.event.ComponentAdapter {override def componentShown (e: java.awt.event.ComponentEvent) = { -- cgit v1.2.3 From be5554f0c13879d8b7c361f9956dfc9f0093a0b3 Mon Sep 17 00:00:00 2001 From: Simon Ochsenreither Date: Thu, 17 Jan 2013 20:36:20 +0100 Subject: SI-6811 Remove deprecated elements in scala.collection --- src/library/scala/collection/TraversableOnce.scala | 5 - .../scala/collection/immutable/BitSet.scala | 7 - .../scala/collection/immutable/HashMap.scala | 3 - .../scala/collection/immutable/RedBlack.scala | 293 --------------------- .../scala/collection/immutable/TreeMap.scala | 3 - .../scala/collection/immutable/TreeSet.scala | 3 - .../scala/collection/immutable/package.scala | 93 ------- .../scala/collection/mutable/PriorityQueue.scala | 8 - .../collection/mutable/PriorityQueueProxy.scala | 8 - test/files/run/bitsets.scala | 4 +- test/files/run/t2873.check | 2 +- test/files/run/t2873.scala | 7 +- test/files/run/t5879.check | 8 - test/files/run/t5879.scala | 15 -- test/files/scalacheck/redblack.scala | 213 --------------- 15 files changed, 9 insertions(+), 663 deletions(-) delete mode 100644 src/library/scala/collection/immutable/RedBlack.scala delete mode 100644 src/library/scala/collection/immutable/package.scala delete mode 100644 test/files/scalacheck/redblack.scala (limited to 'src') diff --git a/src/library/scala/collection/TraversableOnce.scala b/src/library/scala/collection/TraversableOnce.scala index 82cf1d1198..c7c54fe302 100644 --- a/src/library/scala/collection/TraversableOnce.scala +++ b/src/library/scala/collection/TraversableOnce.scala @@ -364,11 +364,6 @@ trait TraversableOnce[+A] extends Any with GenTraversableOnce[A] { object TraversableOnce { - @deprecated("use OnceCanBuildFrom instead", "2.10.0") - def traversableOnceCanBuildFrom[T] = new OnceCanBuildFrom[T] - @deprecated("use MonadOps instead", "2.10.0") - def wrapTraversableOnce[A](trav: TraversableOnce[A]) = new MonadOps(trav) - implicit def alternateImplicit[A](trav: TraversableOnce[A]) = new ForceImplicitAmbiguity implicit def flattenTraversableOnce[A, CC[_]](travs: TraversableOnce[CC[A]])(implicit ev: CC[A] => TraversableOnce[A]) = new FlattenOps[A](travs map ev) diff --git a/src/library/scala/collection/immutable/BitSet.scala b/src/library/scala/collection/immutable/BitSet.scala index ed3630edc1..2824309ca2 100644 --- a/src/library/scala/collection/immutable/BitSet.scala +++ b/src/library/scala/collection/immutable/BitSet.scala @@ -31,9 +31,6 @@ abstract class BitSet extends scala.collection.AbstractSet[Int] with Serializable { override def empty = BitSet.empty - @deprecated("Use BitSet.fromBitMask[NoCopy] instead of fromArray", "2.10.0") - def fromArray(elems: Array[Long]): BitSet = fromBitMaskNoCopy(elems) - protected def fromBitMaskNoCopy(elems: Array[Long]): BitSet = BitSet.fromBitMaskNoCopy(elems) /** Update word at index `idx`; enlarge set if `idx` outside range of set. @@ -81,10 +78,6 @@ object BitSet extends BitSetFactory[BitSet] { /** $bitsetCanBuildFrom */ implicit def canBuildFrom: CanBuildFrom[BitSet, Int, BitSet] = bitsetCanBuildFrom - /** A bitset containing all the bits in an array */ - @deprecated("Use fromBitMask[NoCopy] instead of fromArray", "2.10.0") - def fromArray(elems: Array[Long]): BitSet = fromBitMaskNoCopy(elems) - /** A bitset containing all the bits in an array */ def fromBitMask(elems: Array[Long]): BitSet = { val len = elems.length diff --git a/src/library/scala/collection/immutable/HashMap.scala b/src/library/scala/collection/immutable/HashMap.scala index 29267f22dc..83f0d2c8a2 100644 --- a/src/library/scala/collection/immutable/HashMap.scala +++ b/src/library/scala/collection/immutable/HashMap.scala @@ -87,9 +87,6 @@ class HashMap[A, +B] extends AbstractMap[A, B] def split: Seq[HashMap[A, B]] = Seq(this) - @deprecated("Use the `merged` method instead.", "2.10.0") - def merge[B1 >: B](that: HashMap[A, B1], mergef: MergeFunction[A, B1] = null): HashMap[A, B1] = merge0(that, 0, liftMerger(mergef)) - /** Creates a new map which is the merge of this and the argument hash map. * * Uses the specified collision resolution function if two keys are the same. diff --git a/src/library/scala/collection/immutable/RedBlack.scala b/src/library/scala/collection/immutable/RedBlack.scala deleted file mode 100644 index 9739e8f3f3..0000000000 --- a/src/library/scala/collection/immutable/RedBlack.scala +++ /dev/null @@ -1,293 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2005-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - - - -package scala -package collection -package immutable - -/** Old base class that was used by previous implementations of `TreeMaps` and `TreeSets`. - * - * Deprecated due to various performance bugs (see [[https://issues.scala-lang.org/browse/SI-5331 SI-5331]] for more information). - * - * @since 2.3 - */ -@deprecated("use `TreeMap` or `TreeSet` instead", "2.10.0") -@SerialVersionUID(8691885935445612921L) -abstract class RedBlack[A] extends Serializable { - - def isSmaller(x: A, y: A): Boolean - - private def blacken[B](t: Tree[B]): Tree[B] = t match { - case RedTree(k, v, l, r) => BlackTree(k, v, l, r) - case t => t - } - private def mkTree[B](isBlack: Boolean, k: A, v: B, l: Tree[B], r: Tree[B]) = - if (isBlack) BlackTree(k, v, l, r) else RedTree(k, v, l, r) - - abstract class Tree[+B] extends Serializable { - def isEmpty: Boolean - def isBlack: Boolean - def lookup(x: A): Tree[B] - def update[B1 >: B](k: A, v: B1): Tree[B1] = blacken(upd(k, v)) - def delete(k: A): Tree[B] = blacken(del(k)) - def range(from: Option[A], until: Option[A]): Tree[B] = blacken(rng(from, until)) - def foreach[U](f: (A, B) => U) - def toStream: Stream[(A,B)] - def iterator: Iterator[(A, B)] - def upd[B1 >: B](k: A, v: B1): Tree[B1] - def del(k: A): Tree[B] - def smallest: NonEmpty[B] - def rng(from: Option[A], until: Option[A]): Tree[B] - def first : A - def last : A - def count : Int - } - abstract class NonEmpty[+B] extends Tree[B] with Serializable { - def isEmpty = false - def key: A - def value: B - def left: Tree[B] - def right: Tree[B] - def lookup(k: A): Tree[B] = - if (isSmaller(k, key)) left.lookup(k) - else if (isSmaller(key, k)) right.lookup(k) - else this - private[this] def balanceLeft[B1 >: B](isBlack: Boolean, z: A, zv: B, l: Tree[B1], d: Tree[B1])/*: NonEmpty[B1]*/ = l match { - case RedTree(y, yv, RedTree(x, xv, a, b), c) => - RedTree(y, yv, BlackTree(x, xv, a, b), BlackTree(z, zv, c, d)) - case RedTree(x, xv, a, RedTree(y, yv, b, c)) => - RedTree(y, yv, BlackTree(x, xv, a, b), BlackTree(z, zv, c, d)) - case _ => - mkTree(isBlack, z, zv, l, d) - } - private[this] def balanceRight[B1 >: B](isBlack: Boolean, x: A, xv: B, a: Tree[B1], r: Tree[B1])/*: NonEmpty[B1]*/ = r match { - case RedTree(z, zv, RedTree(y, yv, b, c), d) => - RedTree(y, yv, BlackTree(x, xv, a, b), BlackTree(z, zv, c, d)) - case RedTree(y, yv, b, RedTree(z, zv, c, d)) => - RedTree(y, yv, BlackTree(x, xv, a, b), BlackTree(z, zv, c, d)) - case _ => - mkTree(isBlack, x, xv, a, r) - } - def upd[B1 >: B](k: A, v: B1): Tree[B1] = { - if (isSmaller(k, key)) balanceLeft(isBlack, key, value, left.upd(k, v), right) - else if (isSmaller(key, k)) balanceRight(isBlack, key, value, left, right.upd(k, v)) - else mkTree(isBlack, k, v, left, right) - } - // Based on Stefan Kahrs' Haskell version of Okasaki's Red&Black Trees - // http://www.cse.unsw.edu.au/~dons/data/RedBlackTree.html - def del(k: A): Tree[B] = { - def balance(x: A, xv: B, tl: Tree[B], tr: Tree[B]) = (tl, tr) match { - case (RedTree(y, yv, a, b), RedTree(z, zv, c, d)) => - RedTree(x, xv, BlackTree(y, yv, a, b), BlackTree(z, zv, c, d)) - case (RedTree(y, yv, RedTree(z, zv, a, b), c), d) => - RedTree(y, yv, BlackTree(z, zv, a, b), BlackTree(x, xv, c, d)) - case (RedTree(y, yv, a, RedTree(z, zv, b, c)), d) => - RedTree(z, zv, BlackTree(y, yv, a, b), BlackTree(x, xv, c, d)) - case (a, RedTree(y, yv, b, RedTree(z, zv, c, d))) => - RedTree(y, yv, BlackTree(x, xv, a, b), BlackTree(z, zv, c, d)) - case (a, RedTree(y, yv, RedTree(z, zv, b, c), d)) => - RedTree(z, zv, BlackTree(x, xv, a, b), BlackTree(y, yv, c, d)) - case (a, b) => - BlackTree(x, xv, a, b) - } - def subl(t: Tree[B]) = t match { - case BlackTree(x, xv, a, b) => RedTree(x, xv, a, b) - case _ => sys.error("Defect: invariance violation; expected black, got "+t) - } - def balLeft(x: A, xv: B, tl: Tree[B], tr: Tree[B]) = (tl, tr) match { - case (RedTree(y, yv, a, b), c) => - RedTree(x, xv, BlackTree(y, yv, a, b), c) - case (bl, BlackTree(y, yv, a, b)) => - balance(x, xv, bl, RedTree(y, yv, a, b)) - case (bl, RedTree(y, yv, BlackTree(z, zv, a, b), c)) => - RedTree(z, zv, BlackTree(x, xv, bl, a), balance(y, yv, b, subl(c))) - case _ => sys.error("Defect: invariance violation at "+right) - } - def balRight(x: A, xv: B, tl: Tree[B], tr: Tree[B]) = (tl, tr) match { - case (a, RedTree(y, yv, b, c)) => - RedTree(x, xv, a, BlackTree(y, yv, b, c)) - case (BlackTree(y, yv, a, b), bl) => - balance(x, xv, RedTree(y, yv, a, b), bl) - case (RedTree(y, yv, a, BlackTree(z, zv, b, c)), bl) => - RedTree(z, zv, balance(y, yv, subl(a), b), BlackTree(x, xv, c, bl)) - case _ => sys.error("Defect: invariance violation at "+left) - } - def delLeft = left match { - case _: BlackTree[_] => balLeft(key, value, left.del(k), right) - case _ => RedTree(key, value, left.del(k), right) - } - def delRight = right match { - case _: BlackTree[_] => balRight(key, value, left, right.del(k)) - case _ => RedTree(key, value, left, right.del(k)) - } - def append(tl: Tree[B], tr: Tree[B]): Tree[B] = (tl, tr) match { - case (Empty, t) => t - case (t, Empty) => t - case (RedTree(x, xv, a, b), RedTree(y, yv, c, d)) => - append(b, c) match { - case RedTree(z, zv, bb, cc) => RedTree(z, zv, RedTree(x, xv, a, bb), RedTree(y, yv, cc, d)) - case bc => RedTree(x, xv, a, RedTree(y, yv, bc, d)) - } - case (BlackTree(x, xv, a, b), BlackTree(y, yv, c, d)) => - append(b, c) match { - case RedTree(z, zv, bb, cc) => RedTree(z, zv, BlackTree(x, xv, a, bb), BlackTree(y, yv, cc, d)) - case bc => balLeft(x, xv, a, BlackTree(y, yv, bc, d)) - } - case (a, RedTree(x, xv, b, c)) => RedTree(x, xv, append(a, b), c) - case (RedTree(x, xv, a, b), c) => RedTree(x, xv, a, append(b, c)) - } - // RedBlack is neither A : Ordering[A], nor A <% Ordered[A] - k match { - case _ if isSmaller(k, key) => delLeft - case _ if isSmaller(key, k) => delRight - case _ => append(left, right) - } - } - - def smallest: NonEmpty[B] = if (left.isEmpty) this else left.smallest - - def toStream: Stream[(A,B)] = - left.toStream ++ Stream((key,value)) ++ right.toStream - - def iterator: Iterator[(A, B)] = - left.iterator ++ Iterator.single(Pair(key, value)) ++ right.iterator - - def foreach[U](f: (A, B) => U) { - left foreach f - f(key, value) - right foreach f - } - - override def rng(from: Option[A], until: Option[A]): Tree[B] = { - if (from == None && until == None) return this - if (from != None && isSmaller(key, from.get)) return right.rng(from, until); - if (until != None && (isSmaller(until.get,key) || !isSmaller(key,until.get))) - return left.rng(from, until); - val newLeft = left.rng(from, None) - val newRight = right.rng(None, until) - if ((newLeft eq left) && (newRight eq right)) this - else if (newLeft eq Empty) newRight.upd(key, value); - else if (newRight eq Empty) newLeft.upd(key, value); - else rebalance(newLeft, newRight) - } - - // The zipper returned might have been traversed left-most (always the left child) - // or right-most (always the right child). Left trees are traversed right-most, - // and right trees are traversed leftmost. - - // Returns the zipper for the side with deepest black nodes depth, a flag - // indicating whether the trees were unbalanced at all, and a flag indicating - // whether the zipper was traversed left-most or right-most. - - // If the trees were balanced, returns an empty zipper - private[this] def compareDepth(left: Tree[B], right: Tree[B]): (List[NonEmpty[B]], Boolean, Boolean, Int) = { - // Once a side is found to be deeper, unzip it to the bottom - def unzip(zipper: List[NonEmpty[B]], leftMost: Boolean): List[NonEmpty[B]] = { - val next = if (leftMost) zipper.head.left else zipper.head.right - next match { - case node: NonEmpty[_] => unzip(node :: zipper, leftMost) - case Empty => zipper - } - } - - // Unzip left tree on the rightmost side and right tree on the leftmost side until one is - // found to be deeper, or the bottom is reached - def unzipBoth(left: Tree[B], - right: Tree[B], - leftZipper: List[NonEmpty[B]], - rightZipper: List[NonEmpty[B]], - smallerDepth: Int): (List[NonEmpty[B]], Boolean, Boolean, Int) = (left, right) match { - case (l @ BlackTree(_, _, _, _), r @ BlackTree(_, _, _, _)) => - unzipBoth(l.right, r.left, l :: leftZipper, r :: rightZipper, smallerDepth + 1) - case (l @ RedTree(_, _, _, _), r @ RedTree(_, _, _, _)) => - unzipBoth(l.right, r.left, l :: leftZipper, r :: rightZipper, smallerDepth) - case (_, r @ RedTree(_, _, _, _)) => - unzipBoth(left, r.left, leftZipper, r :: rightZipper, smallerDepth) - case (l @ RedTree(_, _, _, _), _) => - unzipBoth(l.right, right, l :: leftZipper, rightZipper, smallerDepth) - case (Empty, Empty) => - (Nil, true, false, smallerDepth) - case (Empty, r @ BlackTree(_, _, _, _)) => - val leftMost = true - (unzip(r :: rightZipper, leftMost), false, leftMost, smallerDepth) - case (l @ BlackTree(_, _, _, _), Empty) => - val leftMost = false - (unzip(l :: leftZipper, leftMost), false, leftMost, smallerDepth) - } - unzipBoth(left, right, Nil, Nil, 0) - } - - private[this] def rebalance(newLeft: Tree[B], newRight: Tree[B]) = { - // This is like drop(n-1), but only counting black nodes - def findDepth(zipper: List[NonEmpty[B]], depth: Int): List[NonEmpty[B]] = zipper match { - case BlackTree(_, _, _, _) :: tail => - if (depth == 1) zipper else findDepth(tail, depth - 1) - case _ :: tail => findDepth(tail, depth) - case Nil => sys.error("Defect: unexpected empty zipper while computing range") - } - - // Blackening the smaller tree avoids balancing problems on union; - // this can't be done later, though, or it would change the result of compareDepth - val blkNewLeft = blacken(newLeft) - val blkNewRight = blacken(newRight) - val (zipper, levelled, leftMost, smallerDepth) = compareDepth(blkNewLeft, blkNewRight) - - if (levelled) { - BlackTree(key, value, blkNewLeft, blkNewRight) - } else { - val zipFrom = findDepth(zipper, smallerDepth) - val union = if (leftMost) { - RedTree(key, value, blkNewLeft, zipFrom.head) - } else { - RedTree(key, value, zipFrom.head, blkNewRight) - } - val zippedTree = zipFrom.tail.foldLeft(union: Tree[B]) { (tree, node) => - if (leftMost) - balanceLeft(node.isBlack, node.key, node.value, tree, node.right) - else - balanceRight(node.isBlack, node.key, node.value, node.left, tree) - } - zippedTree - } - } - def first = if (left .isEmpty) key else left.first - def last = if (right.isEmpty) key else right.last - def count = 1 + left.count + right.count - } - case object Empty extends Tree[Nothing] { - def isEmpty = true - def isBlack = true - def lookup(k: A): Tree[Nothing] = this - def upd[B](k: A, v: B): Tree[B] = RedTree(k, v, Empty, Empty) - def del(k: A): Tree[Nothing] = this - def smallest: NonEmpty[Nothing] = throw new NoSuchElementException("empty map") - def iterator: Iterator[(A, Nothing)] = Iterator.empty - def toStream: Stream[(A,Nothing)] = Stream.empty - - def foreach[U](f: (A, Nothing) => U) {} - - def rng(from: Option[A], until: Option[A]) = this - def first = throw new NoSuchElementException("empty map") - def last = throw new NoSuchElementException("empty map") - def count = 0 - } - case class RedTree[+B](override val key: A, - override val value: B, - override val left: Tree[B], - override val right: Tree[B]) extends NonEmpty[B] { - def isBlack = false - } - case class BlackTree[+B](override val key: A, - override val value: B, - override val left: Tree[B], - override val right: Tree[B]) extends NonEmpty[B] { - def isBlack = true - } -} diff --git a/src/library/scala/collection/immutable/TreeMap.scala b/src/library/scala/collection/immutable/TreeMap.scala index 5b4db2686a..9a87d8636b 100644 --- a/src/library/scala/collection/immutable/TreeMap.scala +++ b/src/library/scala/collection/immutable/TreeMap.scala @@ -51,9 +51,6 @@ class TreeMap[A, +B] private (tree: RB.Tree[A, B])(implicit val ordering: Orderi with MapLike[A, B, TreeMap[A, B]] with Serializable { - @deprecated("use `ordering.lt` instead", "2.10.0") - def isSmaller(x: A, y: A) = ordering.lt(x, y) - override protected[this] def newBuilder : Builder[(A, B), TreeMap[A, B]] = TreeMap.newBuilder[A, B] diff --git a/src/library/scala/collection/immutable/TreeSet.scala b/src/library/scala/collection/immutable/TreeSet.scala index 494776587d..8bceb936aa 100644 --- a/src/library/scala/collection/immutable/TreeSet.scala +++ b/src/library/scala/collection/immutable/TreeSet.scala @@ -96,9 +96,6 @@ class TreeSet[A] private (tree: RB.Tree[A, Unit])(implicit val ordering: Orderin override def takeWhile(p: A => Boolean) = take(countWhile(p)) override def span(p: A => Boolean) = splitAt(countWhile(p)) - @deprecated("use `ordering.lt` instead", "2.10.0") - def isSmaller(x: A, y: A) = compare(x,y) < 0 - def this()(implicit ordering: Ordering[A]) = this(null)(ordering) private def newSet(t: RB.Tree[A, Unit]) = new TreeSet[A](t) diff --git a/src/library/scala/collection/immutable/package.scala b/src/library/scala/collection/immutable/package.scala deleted file mode 100644 index ed0c1b3736..0000000000 --- a/src/library/scala/collection/immutable/package.scala +++ /dev/null @@ -1,93 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala.collection - -package immutable { - /** It looks like once upon a time this was used by ParRange, but - * since December 2010 in r23721 it is not used by anything. We - * should not have public API traits with seductive names like - * "RangeUtils" which are neither documented nor used. - */ - @deprecated("this class will be removed", "2.10.0") - trait RangeUtils[+Repr <: RangeUtils[Repr]] { - def start: Int - def end: Int - def step: Int - def inclusive: Boolean - def create(_start: Int, _end: Int, _step: Int, _inclusive: Boolean): Repr - - private final def inclusiveLast: Int = { - val size = end.toLong - start.toLong - (size / step.toLong * step.toLong + start.toLong).toInt - } - - final def _last: Int = ( - if (!inclusive) { - if (step == 1 || step == -1) end - step - else { - val inclast = inclusiveLast - if ((end.toLong - start.toLong) % step == 0) inclast - step else inclast - } - } - else if (step == 1 || step == -1) end - else inclusiveLast - ) - - final def _foreach[U](f: Int => U) = if (_length > 0) { - var i = start - val last = _last - while (i != last) { - f(i) - i += step - } - } - - final def _length: Int = ( - if (!inclusive) { - if (end > start == step > 0 && start != end) { - (_last.toLong - start.toLong) / step.toLong + 1 - } else 0 - }.toInt - else { - if (end > start == step > 0 || start == end) { - (_last.toLong - start.toLong) / step.toLong + 1 - } else 0 - }.toInt - ) - - final def _apply(idx: Int): Int = { - if (idx < 0 || idx >= _length) throw new IndexOutOfBoundsException(idx.toString) - start + idx * step - } - - private def locationAfterN(n: Int) = ( - if (n > 0) { - if (step > 0) - scala.math.min(start.toLong + step.toLong * n.toLong, _last.toLong).toInt - else - scala.math.max(start.toLong + step.toLong * n.toLong, _last.toLong).toInt - } - else start - ) - - final def _take(n: Int) = ( - if (n > 0 && _length > 0) - create(start, locationAfterN(n), step, true) - else - create(start, start, step, false) - ) - - final def _drop(n: Int) = create(locationAfterN(n), end, step, inclusive) - final def _slice(from: Int, until: Int) = _drop(from)._take(until - from) - } -} - -package object immutable { - /** Nothing left after I promoted RangeUtils to the package. */ -} diff --git a/src/library/scala/collection/mutable/PriorityQueue.scala b/src/library/scala/collection/mutable/PriorityQueue.scala index 84257c6e97..f59cbe878c 100644 --- a/src/library/scala/collection/mutable/PriorityQueue.scala +++ b/src/library/scala/collection/mutable/PriorityQueue.scala @@ -141,14 +141,6 @@ class PriorityQueue[A](implicit val ord: Ordering[A]) b.result } - /** Returns the element with the highest priority in the queue, - * or throws an error if there is no element contained in the queue. - * - * @return the element with the highest priority. - */ - @deprecated("Use `head` instead.", "2.9.0") - def max: A = if (resarr.p_size0 > 1) toA(resarr.p_array(1)) else throw new NoSuchElementException("queue is empty") - /** Returns the element with the highest priority in the queue, * or throws an error if there is no element contained in the queue. * diff --git a/src/library/scala/collection/mutable/PriorityQueueProxy.scala b/src/library/scala/collection/mutable/PriorityQueueProxy.scala index 3bb5d32cf8..52a3755007 100644 --- a/src/library/scala/collection/mutable/PriorityQueueProxy.scala +++ b/src/library/scala/collection/mutable/PriorityQueueProxy.scala @@ -75,14 +75,6 @@ abstract class PriorityQueueProxy[A](implicit ord: Ordering[A]) extends Priority */ override def head: A = self.head - /** Returns the element with the highest priority in the queue, - * or throws an error if there is no element contained in the queue. - * - * @return the element with the highest priority. - */ - @deprecated("Use `head` instead.", "2.9.0") - override def max: A = self.max - /** Removes all elements from the queue. After this operation is completed, * the queue will be empty. */ diff --git a/test/files/run/bitsets.scala b/test/files/run/bitsets.scala index 27395683b4..bdeb1fd811 100644 --- a/test/files/run/bitsets.scala +++ b/test/files/run/bitsets.scala @@ -85,8 +85,8 @@ object TestImmutable { import scala.collection.immutable.BitSet val is0 = BitSet() - val is1 = BitSet.fromArray(Array()) - val is2 = BitSet.fromArray(Array(4)) + val is1 = BitSet.fromBitMask(Array()) + val is2 = BitSet.fromBitMask(Array(4)) val is3 = BitSet.empty Console.println("is0 = " + is0) diff --git a/test/files/run/t2873.check b/test/files/run/t2873.check index 9198280f61..209b679c07 100644 --- a/test/files/run/t2873.check +++ b/test/files/run/t2873.check @@ -1 +1 @@ -scala.collection.immutable.RedBlack.Empty$ +RedBlack.Empty$ diff --git a/test/files/run/t2873.scala b/test/files/run/t2873.scala index 8d48a8dbb4..3a3cc59b46 100644 --- a/test/files/run/t2873.scala +++ b/test/files/run/t2873.scala @@ -1,5 +1,10 @@ +abstract class RedBlack[A] extends Serializable { + abstract class Tree[+B] extends Serializable + case object Empty extends Tree[Nothing] +} + object Test { def main(args: Array[String]): Unit = { - println(classOf[scala.collection.immutable.RedBlack[_]].getMethod("Empty").getGenericReturnType) + println(classOf[RedBlack[_]].getMethod("Empty").getGenericReturnType) } } diff --git a/test/files/run/t5879.check b/test/files/run/t5879.check index b6cbda35a7..4bdf3f5fcf 100644 --- a/test/files/run/t5879.check +++ b/test/files/run/t5879.check @@ -1,16 +1,8 @@ Map(1 -> 1) 1 -Map(1 -> 1) -1 -(1,1) -Map(1 -> 1) -1 (1,1) Map(1 -> 1) 1 (1,2) Map(1 -> 2) 2 -(1,2) -Map(1 -> 2) -2 \ No newline at end of file diff --git a/test/files/run/t5879.scala b/test/files/run/t5879.scala index e1c07fc4c2..18dd94289d 100644 --- a/test/files/run/t5879.scala +++ b/test/files/run/t5879.scala @@ -17,10 +17,6 @@ object Test { val r = a.merged(b)(null) println(r) println(r(1)) - - val rold = a.merge(b) - println(rold) - println(rold(1)) } def resolveFirst() { @@ -34,10 +30,6 @@ object Test { val r = a.merged(b) { collision } println(r) println(r(1)) - - val rold = a.merge(b, collision) - println(rold) - println(rold(1)) } def resolveSecond() { @@ -51,10 +43,6 @@ object Test { val r = a.merged(b) { collision } println(r) println(r(1)) - - val rold = a.merge(b, collision) - println(rold) - println(rold(1)) } def resolveMany() { @@ -66,9 +54,6 @@ object Test { val r = a.merged(b) { collision } for ((k, v) <- r) assert(v == 100 + 2 * k, (k, v)) - - val rold = a.merge(b, collision) - for ((k, v) <- r) assert(v == 100 + 2 * k, (k, v)) } } diff --git a/test/files/scalacheck/redblack.scala b/test/files/scalacheck/redblack.scala deleted file mode 100644 index bbc6504f58..0000000000 --- a/test/files/scalacheck/redblack.scala +++ /dev/null @@ -1,213 +0,0 @@ -import org.scalacheck._ -import Prop._ -import Gen._ - -/* -Properties of a Red & Black Tree: - -A node is either red or black. -The root is black. (This rule is used in some definitions and not others. Since the -root can always be changed from red to black but not necessarily vice-versa this -rule has little effect on analysis.) -All leaves are black. -Both children of every red node are black. -Every simple path from a given node to any of its descendant leaves contains the same number of black nodes. -*/ - -abstract class RedBlackTest extends Properties("RedBlack") { - def minimumSize = 0 - def maximumSize = 5 - - object RedBlackTest extends scala.collection.immutable.RedBlack[String] { - def isSmaller(x: String, y: String) = x < y - } - - import RedBlackTest._ - - def nodeAt[A](tree: Tree[A], n: Int): Option[(String, A)] = if (n < tree.iterator.size && n >= 0) - Some(tree.iterator.drop(n).next) - else - None - - def treeContains[A](tree: Tree[A], key: String) = tree.iterator.map(_._1) contains key - - def mkTree(level: Int, parentIsBlack: Boolean = false, label: String = ""): Gen[Tree[Int]] = - if (level == 0) { - value(Empty) - } else { - for { - oddOrEven <- choose(0, 2) - tryRed = oddOrEven.sample.get % 2 == 0 // work around arbitrary[Boolean] bug - isRed = parentIsBlack && tryRed - nextLevel = if (isRed) level else level - 1 - left <- mkTree(nextLevel, !isRed, label + "L") - right <- mkTree(nextLevel, !isRed, label + "R") - } yield { - if (isRed) - RedTree(label + "N", 0, left, right) - else - BlackTree(label + "N", 0, left, right) - } - } - - def genTree = for { - depth <- choose(minimumSize, maximumSize + 1) - tree <- mkTree(depth) - } yield tree - - type ModifyParm - def genParm(tree: Tree[Int]): Gen[ModifyParm] - def modify(tree: Tree[Int], parm: ModifyParm): Tree[Int] - - def genInput: Gen[(Tree[Int], ModifyParm, Tree[Int])] = for { - tree <- genTree - parm <- genParm(tree) - } yield (tree, parm, modify(tree, parm)) -} - -trait RedBlackInvariants { - self: RedBlackTest => - - import RedBlackTest._ - - def rootIsBlack[A](t: Tree[A]) = t.isBlack - - def areAllLeavesBlack[A](t: Tree[A]): Boolean = t match { - case Empty => t.isBlack - case ne: NonEmpty[_] => List(ne.left, ne.right) forall areAllLeavesBlack - } - - def areRedNodeChildrenBlack[A](t: Tree[A]): Boolean = t match { - case RedTree(_, _, left, right) => List(left, right) forall (t => t.isBlack && areRedNodeChildrenBlack(t)) - case BlackTree(_, _, left, right) => List(left, right) forall areRedNodeChildrenBlack - case Empty => true - } - - def blackNodesToLeaves[A](t: Tree[A]): List[Int] = t match { - case Empty => List(1) - case BlackTree(_, _, left, right) => List(left, right) flatMap blackNodesToLeaves map (_ + 1) - case RedTree(_, _, left, right) => List(left, right) flatMap blackNodesToLeaves - } - - def areBlackNodesToLeavesEqual[A](t: Tree[A]): Boolean = t match { - case Empty => true - case ne: NonEmpty[_] => - ( - blackNodesToLeaves(ne).distinct.size == 1 - && areBlackNodesToLeavesEqual(ne.left) - && areBlackNodesToLeavesEqual(ne.right) - ) - } - - def orderIsPreserved[A](t: Tree[A]): Boolean = - t.iterator zip t.iterator.drop(1) forall { case (x, y) => isSmaller(x._1, y._1) } - - def setup(invariant: Tree[Int] => Boolean) = forAll(genInput) { case (tree, parm, newTree) => - invariant(newTree) - } - - property("root is black") = setup(rootIsBlack) - property("all leaves are black") = setup(areAllLeavesBlack) - property("children of red nodes are black") = setup(areRedNodeChildrenBlack) - property("black nodes are balanced") = setup(areBlackNodesToLeavesEqual) - property("ordering of keys is preserved") = setup(orderIsPreserved) -} - -object TestInsert extends RedBlackTest with RedBlackInvariants { - import RedBlackTest._ - - override type ModifyParm = Int - override def genParm(tree: Tree[Int]): Gen[ModifyParm] = choose(0, tree.iterator.size + 1) - override def modify(tree: Tree[Int], parm: ModifyParm): Tree[Int] = tree update (generateKey(tree, parm), 0) - - def generateKey(tree: Tree[Int], parm: ModifyParm): String = nodeAt(tree, parm) match { - case Some((key, _)) => key.init.mkString + "MN" - case None => nodeAt(tree, parm - 1) match { - case Some((key, _)) => key.init.mkString + "RN" - case None => "N" - } - } - - property("update adds elements") = forAll(genInput) { case (tree, parm, newTree) => - treeContains(newTree, generateKey(tree, parm)) - } -} - -object TestModify extends RedBlackTest { - import RedBlackTest._ - - def newValue = 1 - override def minimumSize = 1 - override type ModifyParm = Int - override def genParm(tree: Tree[Int]): Gen[ModifyParm] = choose(0, tree.iterator.size) - override def modify(tree: Tree[Int], parm: ModifyParm): Tree[Int] = nodeAt(tree, parm) map { - case (key, _) => tree update (key, newValue) - } getOrElse tree - - property("update modifies values") = forAll(genInput) { case (tree, parm, newTree) => - nodeAt(tree,parm) forall { case (key, _) => - newTree.iterator contains (key, newValue) - } - } -} - -object TestDelete extends RedBlackTest with RedBlackInvariants { - import RedBlackTest._ - - override def minimumSize = 1 - override type ModifyParm = Int - override def genParm(tree: Tree[Int]): Gen[ModifyParm] = choose(0, tree.iterator.size) - override def modify(tree: Tree[Int], parm: ModifyParm): Tree[Int] = nodeAt(tree, parm) map { - case (key, _) => tree delete key - } getOrElse tree - - property("delete removes elements") = forAll(genInput) { case (tree, parm, newTree) => - nodeAt(tree, parm) forall { case (key, _) => - !treeContains(newTree, key) - } - } -} - -object TestRange extends RedBlackTest with RedBlackInvariants { - import RedBlackTest._ - - override type ModifyParm = (Option[Int], Option[Int]) - override def genParm(tree: Tree[Int]): Gen[ModifyParm] = for { - from <- choose(0, tree.iterator.size) - to <- choose(0, tree.iterator.size) suchThat (from <=) - optionalFrom <- oneOf(Some(from), None, Some(from)) // Double Some(n) to get around a bug - optionalTo <- oneOf(Some(to), None, Some(to)) // Double Some(n) to get around a bug - } yield (optionalFrom, optionalTo) - - override def modify(tree: Tree[Int], parm: ModifyParm): Tree[Int] = { - val from = parm._1 flatMap (nodeAt(tree, _) map (_._1)) - val to = parm._2 flatMap (nodeAt(tree, _) map (_._1)) - tree range (from, to) - } - - property("range boundaries respected") = forAll(genInput) { case (tree, parm, newTree) => - val from = parm._1 flatMap (nodeAt(tree, _) map (_._1)) - val to = parm._2 flatMap (nodeAt(tree, _) map (_._1)) - ("lower boundary" |: (from forall ( key => newTree.iterator.map(_._1) forall (key <=)))) && - ("upper boundary" |: (to forall ( key => newTree.iterator.map(_._1) forall (key >)))) - } - - property("range returns all elements") = forAll(genInput) { case (tree, parm, newTree) => - val from = parm._1 flatMap (nodeAt(tree, _) map (_._1)) - val to = parm._2 flatMap (nodeAt(tree, _) map (_._1)) - val filteredTree = (tree.iterator - .map(_._1) - .filter(key => from forall (key >=)) - .filter(key => to forall (key <)) - .toList) - filteredTree == newTree.iterator.map(_._1).toList - } -} - -object Test extends Properties("RedBlack") { - include(TestInsert) - include(TestModify) - include(TestDelete) - include(TestRange) -} - -- cgit v1.2.3 From f931833df8cc69d119f636d8a553941bf7ce2349 Mon Sep 17 00:00:00 2001 From: Simon Ochsenreither Date: Thu, 17 Jan 2013 20:38:24 +0100 Subject: SI-6811 Misc. removals in util, testing, io, ... --- src/library/scala/Specializable.scala | 2 +- src/library/scala/SpecializableCompanion.scala | 14 - src/library/scala/io/BytePickle.scala | 318 ----------------------- src/library/scala/io/UTF8Codec.scala | 32 --- src/library/scala/math/BigInt.scala | 3 - src/library/scala/testing/Benchmark.scala | 114 -------- src/library/scala/testing/Show.scala | 75 ------ src/library/scala/util/Either.scala | 2 - src/library/scala/util/Marshal.scala | 50 ---- src/library/scala/util/MurmurHash.scala | 197 -------------- src/library/scala/util/hashing/MurmurHash3.scala | 8 - src/library/scala/util/matching/Regex.scala | 10 - test/files/jvm/manifests-new.scala | 34 ++- test/files/jvm/manifests-old.scala | 34 ++- test/files/neg/t6406-regextract.check | 9 +- test/files/pos/spec-arrays.scala | 41 +-- test/files/pos/spec-funs.scala | 9 +- 17 files changed, 82 insertions(+), 870 deletions(-) delete mode 100644 src/library/scala/SpecializableCompanion.scala delete mode 100644 src/library/scala/io/BytePickle.scala delete mode 100644 src/library/scala/io/UTF8Codec.scala delete mode 100644 src/library/scala/testing/Benchmark.scala delete mode 100644 src/library/scala/testing/Show.scala delete mode 100644 src/library/scala/util/Marshal.scala delete mode 100644 src/library/scala/util/MurmurHash.scala (limited to 'src') diff --git a/src/library/scala/Specializable.scala b/src/library/scala/Specializable.scala index c7a6091a65..137598c28d 100644 --- a/src/library/scala/Specializable.scala +++ b/src/library/scala/Specializable.scala @@ -11,7 +11,7 @@ package scala /** A common supertype for companions of specializable types. * Should not be extended in user code. */ -trait Specializable extends SpecializableCompanion +trait Specializable object Specializable { // No type parameter in @specialized annotation. diff --git a/src/library/scala/SpecializableCompanion.scala b/src/library/scala/SpecializableCompanion.scala deleted file mode 100644 index 1a9ce71d2a..0000000000 --- a/src/library/scala/SpecializableCompanion.scala +++ /dev/null @@ -1,14 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala - -/** A common supertype for companion classes which specialization takes into account. - */ -@deprecated("Use Specializable instead", "2.10.0") -private[scala] trait SpecializableCompanion diff --git a/src/library/scala/io/BytePickle.scala b/src/library/scala/io/BytePickle.scala deleted file mode 100644 index 2c4a0bd2da..0000000000 --- a/src/library/scala/io/BytePickle.scala +++ /dev/null @@ -1,318 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala.io - -import scala.collection.mutable - -/** - * Pickler combinators. - * Based on a Haskell library by Andrew Kennedy, - * see http://research.microsoft.com/~akenn/fun/. - * - * @author Philipp Haller - * @version 1.1 - */ -@deprecated("This class will be removed.", "2.10.0") -object BytePickle { - abstract class SPU[T] { - def appP(a: T, state: PicklerState): PicklerState - def appU(state: UnPicklerState): (T, UnPicklerState) - } - - def pickle[T](p: SPU[T], a: T): Array[Byte] = - p.appP(a, new PicklerState(new Array[Byte](0), new PicklerEnv)).stream - - def unpickle[T](p: SPU[T], stream: Array[Byte]): T = - p.appU(new UnPicklerState(stream, new UnPicklerEnv))._1 - - abstract class PU[T] { - def appP(a: T, state: Array[Byte]): Array[Byte] - def appU(state: Array[Byte]): (T, Array[Byte]) - } - - def upickle[T](p: PU[T], a: T): Array[Byte] = - p.appP(a, new Array[Byte](0)) - - def uunpickle[T](p: PU[T], stream: Array[Byte]): T = - p.appU(stream)._1 - - class PicklerEnv extends mutable.HashMap[Any, Int] { - private var cnt: Int = 64 - def nextLoc() = { cnt += 1; cnt } - } - - class UnPicklerEnv extends mutable.HashMap[Int, Any] { - private var cnt: Int = 64 - def nextLoc() = { cnt += 1; cnt } - } - - class PicklerState(val stream: Array[Byte], val dict: PicklerEnv) - class UnPicklerState(val stream: Array[Byte], val dict: UnPicklerEnv) - - abstract class RefDef - case class Ref() extends RefDef - case class Def() extends RefDef - - def refDef: PU[RefDef] = new PU[RefDef] { - def appP(b: RefDef, s: Array[Byte]): Array[Byte] = - b match { - case Ref() => Array.concat(s, Array[Byte](0)) - case Def() => Array.concat(s, Array[Byte](1)) - }; - def appU(s: Array[Byte]): (RefDef, Array[Byte]) = - if (s(0) == (0: Byte)) (Ref(), s.slice(1, s.length)) - else (Def(), s.slice(1, s.length)); - } - - val REF = 0 - val DEF = 1 - - def unat: PU[Int] = new PU[Int] { - def appP(n: Int, s: Array[Byte]): Array[Byte] = - Array.concat(s, nat2Bytes(n)); - def appU(s: Array[Byte]): (Int, Array[Byte]) = { - var num = 0 - def readNat: Int = { - var b = 0; - var x = 0; - do { - b = s(num) - num += 1 - x = (x << 7) + (b & 0x7f); - } while ((b & 0x80) != 0); - x - } - (readNat, s.slice(num, s.length)) - } - } - - def share[a](pa: SPU[a]): SPU[a] = new SPU[a] { - def appP(v: a, state: PicklerState): PicklerState = { - /* - - is there some value equal to v associated with a location l in the pickle environment? - - yes: write REF-tag to outstream together with l - - no: - write DEF-tag to outstream - record current location l of outstream - --> serialize value - add entry to pickle environment, mapping v onto l - */ - val pe = state.dict - pe.get(v) match { - case None => - val sPrime = refDef.appP(Def(), state.stream) - val l = pe.nextLoc() - - val sPrimePrime = pa.appP(v, new PicklerState(sPrime, pe)) - - pe.update(v, l) - - return sPrimePrime - case Some(l) => - val sPrime = refDef.appP(Ref(), state.stream) - - return new PicklerState(unat.appP(l, sPrime), pe) - } - } - def appU(state: UnPicklerState): (a, UnPicklerState) = { - /* - - first, read tag (i.e. DEF or REF) - - if REF: - read location l - look up resulting value in unpickler environment - - if DEF: - record location l of input stream - --> deserialize value v with argument deserializer - add entry to unpickler environment, mapping l onto v - */ - val upe = state.dict - val res = refDef.appU(state.stream) - res._1 match { - case Def() => - val l = upe.nextLoc - val res2 = pa.appU(new UnPicklerState(res._2, upe)) - upe.update(l, res2._1) - return res2 - case Ref() => - val res2 = unat.appU(res._2) // read location - upe.get(res2._1) match { // lookup value in unpickler env - case None => throw new IllegalArgumentException("invalid unpickler environment") - case Some(v) => return (v.asInstanceOf[a], new UnPicklerState(res2._2, upe)) - } - } - } - } - - def ulift[t](x: t): PU[t] = new PU[t] { - def appP(a: t, state: Array[Byte]): Array[Byte] = - if (x != a) throw new IllegalArgumentException("value to be pickled (" + a + ") != " + x) - else state; - def appU(state: Array[Byte]) = (x, state) - } - - def lift[t](x: t): SPU[t] = new SPU[t] { - def appP(a: t, state: PicklerState): PicklerState = - if (x != a) { /*throw new IllegalArgumentException("value to be pickled (" + a + ") != " + x);*/ state } - else state; - def appU(state: UnPicklerState) = (x, state) - } - - def usequ[t,u](f: u => t, pa: PU[t], k: t => PU[u]): PU[u] = new PU[u] { - def appP(b: u, s: Array[Byte]): Array[Byte] = { - val a = f(b) - val sPrime = pa.appP(a, s) - val pb = k(a) - val sPrimePrime = pb.appP(b, sPrime) - sPrimePrime - } - def appU(s: Array[Byte]): (u, Array[Byte]) = { - val resPa = pa.appU(s) - val a = resPa._1 - val sPrime = resPa._2 - val pb = k(a) - pb.appU(sPrime) - } - } - - def sequ[t,u](f: u => t, pa: SPU[t], k: t => SPU[u]): SPU[u] = new SPU[u] { - def appP(b: u, s: PicklerState): PicklerState = { - val a = f(b) - val sPrime = pa.appP(a, s) - val pb = k(a) - pb.appP(b, sPrime) - } - def appU(s: UnPicklerState): (u, UnPicklerState) = { - val resPa = pa.appU(s) - val a = resPa._1 - val sPrime = resPa._2 - val pb = k(a) - pb.appU(sPrime) - } - } - - def upair[a,b](pa: PU[a], pb: PU[b]): PU[(a,b)] = { - def fst(p: (a,b)): a = p._1 - def snd(p: (a,b)): b = p._2 - usequ(fst, pa, (x: a) => usequ(snd, pb, (y: b) => ulift((x, y)))) - } - - def pair[a,b](pa: SPU[a], pb: SPU[b]): SPU[(a,b)] = { - def fst(p: (a,b)): a = p._1 - def snd(p: (a,b)): b = p._2 - sequ(fst, pa, (x: a) => sequ(snd, pb, (y: b) => lift((x, y)))) - } - - def triple[a,b,c](pa: SPU[a], pb: SPU[b], pc: SPU[c]): SPU[(a,b,c)] = { - def fst(p: (a,b,c)): a = p._1 - def snd(p: (a,b,c)): b = p._2 - def trd(p: (a,b,c)): c = p._3 - - sequ(fst, pa, - (x: a) => sequ(snd, pb, - (y: b) => sequ(trd, pc, - (z: c) => lift((x, y, z))))) - } - - def uwrap[a,b](i: a => b, j: b => a, pa: PU[a]): PU[b] = - usequ(j, pa, (x: a) => ulift(i(x))) - - def wrap[a,b](i: a => b, j: b => a, pa: SPU[a]): SPU[b] = - sequ(j, pa, (x: a) => lift(i(x))) - - def appendByte(a: Array[Byte], b: Int): Array[Byte] = - Array.concat(a, Array(b.toByte)) - - def nat2Bytes(x: Int): Array[Byte] = { - val buf = new mutable.ArrayBuffer[Byte] - def writeNatPrefix(x: Int) { - val y = x >>> 7; - if (y != 0) writeNatPrefix(y); - buf += ((x & 0x7f) | 0x80).asInstanceOf[Byte]; - } - val y = x >>> 7; - if (y != 0) writeNatPrefix(y); - buf += (x & 0x7f).asInstanceOf[Byte]; - buf.toArray - } - - def nat: SPU[Int] = new SPU[Int] { - def appP(n: Int, s: PicklerState): PicklerState = { - new PicklerState(Array.concat(s.stream, nat2Bytes(n)), s.dict); - } - def appU(s: UnPicklerState): (Int,UnPicklerState) = { - var num = 0 - def readNat: Int = { - var b = 0 - var x = 0 - do { - b = s.stream(num) - num += 1 - x = (x << 7) + (b & 0x7f); - } while ((b & 0x80) != 0); - x - } - (readNat, new UnPicklerState(s.stream.slice(num, s.stream.length), s.dict)) - } - } - - def byte: SPU[Byte] = new SPU[Byte] { - def appP(b: Byte, s: PicklerState): PicklerState = - new PicklerState(Array.concat(s.stream, Array(b)), s.dict) - def appU(s: UnPicklerState): (Byte, UnPicklerState) = - (s.stream(0), new UnPicklerState(s.stream.slice(1, s.stream.length), s.dict)); - } - - def string: SPU[String] = share(wrap( - (a: Array[Byte]) => (Codec fromUTF8 a).mkString, - (s: String) => Codec toUTF8 s, - bytearray - )) - - def bytearray: SPU[Array[Byte]] = { - wrap((l:List[Byte]) => l.toArray, (_.toList), list(byte)) - } - - def bool: SPU[Boolean] = { - def toEnum(b: Boolean) = if (b) 1 else 0 - def fromEnum(n: Int) = if (n == 0) false else true - wrap(fromEnum, toEnum, nat) - } - - def ufixedList[A](pa: PU[A])(n: Int): PU[List[A]] = { - def pairToList(p: (A, List[A])): List[A] = - p._1 :: p._2; - def listToPair(l: List[A]): (A, List[A]) = - (l: @unchecked) match { case x :: xs => (x, xs) } - - if (n == 0) ulift(Nil) - else - uwrap(pairToList, listToPair, upair(pa, ufixedList(pa)(n-1))) - } - - def fixedList[a](pa: SPU[a])(n: Int): SPU[List[a]] = { - def pairToList(p: (a,List[a])): List[a] = - p._1 :: p._2; - def listToPair(l: List[a]): (a,List[a]) = - (l: @unchecked) match { case x :: xs => (x, xs) } - - if (n == 0) lift(Nil) - else - wrap(pairToList, listToPair, pair(pa, fixedList(pa)(n-1))) - } - - def list[a](pa: SPU[a]): SPU[List[a]] = - sequ((l: List[a])=>l.length, nat, fixedList(pa)); - - def ulist[a](pa: PU[a]): PU[List[a]] = - usequ((l:List[a]) => l.length, unat, ufixedList(pa)); - - def data[a](tag: a => Int, ps: List[()=>SPU[a]]): SPU[a] = - sequ(tag, nat, (x: Int)=> ps.apply(x)()); -} diff --git a/src/library/scala/io/UTF8Codec.scala b/src/library/scala/io/UTF8Codec.scala deleted file mode 100644 index e4c2145153..0000000000 --- a/src/library/scala/io/UTF8Codec.scala +++ /dev/null @@ -1,32 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - - -package scala.io - -/** - * @author Martin Odersky - * @version 1.0, 04/10/2004 - */ -@deprecated("This class will be removed.", "2.10.0") -object UTF8Codec { - final val UNI_REPLACEMENT_CHAR: Int = 0x0000FFFD - final val UNI_REPLACEMENT_BYTES = Array[Byte](-17, -65, -67) - - // Note, from http://unicode.org/faq/utf_bom.html#utf8-5 - // - // A different issue arises if an unpaired surrogate is encountered when converting - // ill-formed UTF-16 data. By represented such an unpaired surrogate on its own as a - // 3-byte sequence, the resulting UTF-8 data stream would become ill-formed. - // While it faithfully reflects the nature of the input, Unicode conformance - // requires that encoding form conversion always results in valid data stream. - // Therefore a converter must treat this as an error. - // - // Some useful locations: - // http://www.cl.cam.ac.uk/~mgk25/ucs/examples/UTF-8-test.txt -} diff --git a/src/library/scala/math/BigInt.scala b/src/library/scala/math/BigInt.scala index 0cddd71721..02c591965d 100644 --- a/src/library/scala/math/BigInt.scala +++ b/src/library/scala/math/BigInt.scala @@ -289,9 +289,6 @@ class BigInt(val bigInteger: BigInteger) extends ScalaNumber with ScalaNumericCo */ def signum: Int = this.bigInteger.signum() - @deprecated("Use ~bigInt (the unary_~ method) instead", "2.10.0") - def ~ : BigInt = ~this - /** Returns the bitwise complement of this BigInt */ def unary_~ : BigInt = new BigInt(this.bigInteger.not()) diff --git a/src/library/scala/testing/Benchmark.scala b/src/library/scala/testing/Benchmark.scala deleted file mode 100644 index 66d7d448eb..0000000000 --- a/src/library/scala/testing/Benchmark.scala +++ /dev/null @@ -1,114 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala.testing - -import scala.compat.Platform - -/** `Benchmark` can be used to quickly turn an existing class into a - * benchmark. Here is a short example: - * {{{ - * object sort1 extends Sorter with Benchmark { - * def run = sort(List.range(1, 1000)) - * } - * }}} - * The `run` method has to be defined by the user, who will perform the - * timed operation there. Run the benchmark as follows: - * {{{ - * > scala sort1 5 - * }}} - * This will run the benchmark 5 times, forcing a garbage collection - * between runs, and printing the execution times to stdout. - * - * It is also possible to add a multiplier, so - * {{{ - * > scala sort1 5 10 - * }}} - * will run the entire benchmark 10 times, each time for 5 runs. - * - * @author Iulian Dragos, Burak Emir - */ -@deprecated("This class will be removed.", "2.10.0") -trait Benchmark { - - /** this method should be implemented by the concrete benchmark. - * This method is called by the benchmarking code for a number of times. - * The GC is called between "multiplier" calls to run, right after tear - * down. - * - * @see setUp - * @see tearDown - */ - def run() - - var multiplier = 1 - - /** Run the benchmark the specified number of times and return a list with - * the execution times in milliseconds in reverse order of the execution. - */ - def runBenchmark(noTimes: Int): List[Long] = - for (i <- List.range(1, noTimes + 1)) yield { - setUp - val startTime = Platform.currentTime - var i = 0; while (i < multiplier) { - run() - i += 1 - } - val stopTime = Platform.currentTime - tearDown - Platform.collectGarbage - - stopTime - startTime - } - - /** Prepare any data needed by the benchmark, but whose execution time - * should not be measured. This method is run before each call to the - * benchmark payload, 'run'. - */ - def setUp() {} - - /** Perform cleanup operations after each 'run'. For micro benchmarks, - * think about using the result of 'run' in a way that prevents the JVM - * to dead-code eliminate the whole 'run' method. For instance, print or - * write the results to a file. The execution time of this method is not - * measured. - */ - def tearDown() {} - - /** a string that is written at the beginning of the output line - * that contains the timings. By default, this is the class name. - */ - def prefix: String = getClass().getName() - - /** - * The entry point. It takes two arguments: - * - argument `n` is the number of consecutive runs - * - optional argument `mult` specifies that the `n` runs are repeated - * `mult` times. - */ - def main(args: Array[String]) { - if (args.length > 0) { - val logFile = new java.io.OutputStreamWriter(System.out) - if (args.length > 1) multiplier = args(1).toInt - logFile.write(prefix) - for (t <- runBenchmark(args(0).toInt)) - logFile.write("\t" + t) - - logFile.write(Platform.EOL) - logFile.flush() - } else { - println("Usage: scala benchmarks.program ") - println(" or: scala benchmarks.program ") - println(""" - The benchmark is run times, forcing a garbage collection between runs. The optional - causes the benchmark to be repeated times, each time for - executions. - """) - } - } -} diff --git a/src/library/scala/testing/Show.scala b/src/library/scala/testing/Show.scala deleted file mode 100644 index 9376e26db4..0000000000 --- a/src/library/scala/testing/Show.scala +++ /dev/null @@ -1,75 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - - - -package scala.testing - -/** Classes inheriting trait `Show` can test their member methods using the - * notation `meth(arg,,1,,, ..., arg,,n,,)`, where `meth` is the name of - * the method and `arg,,1,,,...,arg,,n,,` are the arguments. - * - * The only difference to a normal method call is the leading quote - * character (`'`). A quoted method call like the one above will produces - * a legible diagnostic to be printed on [[scala.Console]]. - * - * It is of the form - * - * `meth(arg,,1,,, ..., arg,,n,,)` gives `<result>` - * - * where `<result>` is the result of evaluating the call. - * - */ -@deprecated("This class will be removed.", "2.10.0") -trait Show { - - /** An implicit definition that adds an apply method to Symbol which forwards to `test`. - * Prints out diagnostics of method applications. - */ - implicit class SymApply(f: Symbol) { - def apply[A](args: A*) { - println(test(f, args: _*)) - } - } - - @deprecated("use SymApply instead", "2.10.0") - def symApply(sym: Symbol): SymApply = new SymApply(sym) - - /** Apply method with name of given symbol `f` to given arguments and return - * a result diagnostics. - */ - def test[A](f: Symbol, args: A*): String = { - val args1 = args map (_.asInstanceOf[AnyRef]) - def testMethod(meth: java.lang.reflect.Method): String = - f.name+"("+(args mkString ",")+") gives "+ - { - try { - meth.invoke(this, args1: _*) - } catch { - case ex: IllegalAccessException => ex - case ex: IllegalArgumentException => ex - case ex: java.lang.reflect.InvocationTargetException => ex - } - } - getClass.getMethods.toList filter (_.getName == f.name) match { - case List() => - f.name+" is not defined" - case List(m) => - testMethod(m) - case ms => // multiple methods, disambiguate by number of arguments - ms filter (_.getParameterTypes.length == args.length) match { - case List() => - testMethod(ms.head) // go ahead anyway, to get an exception - case List(m) => - testMethod(m) - case ms => - "cannot disambiguate between multiple implementations of "+f.name - } - } - } -} diff --git a/src/library/scala/util/Either.scala b/src/library/scala/util/Either.scala index dba11ed73c..864d8953c4 100644 --- a/src/library/scala/util/Either.scala +++ b/src/library/scala/util/Either.scala @@ -221,8 +221,6 @@ object Either { case Right(a) => a } } - @deprecated("use MergeableEither instead", "2.10.0") - def either2mergeable[A](x: Either[A, A]): MergeableEither[A] = new MergeableEither(x) /** * Projects an `Either` into a `Left`. diff --git a/src/library/scala/util/Marshal.scala b/src/library/scala/util/Marshal.scala deleted file mode 100644 index b78ed2140e..0000000000 --- a/src/library/scala/util/Marshal.scala +++ /dev/null @@ -1,50 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2008-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - - - -package scala.util - -/** - * Marshalling of Scala objects using Scala tags. - * - * @author Stephane Micheloud - * @version 1.0 - */ -@deprecated("This class will be removed", "2.10.0") -object Marshal { - import java.io._ - import scala.reflect.ClassTag - - def dump[A](o: A)(implicit t: ClassTag[A]): Array[Byte] = { - val ba = new ByteArrayOutputStream(512) - val out = new ObjectOutputStream(ba) - out.writeObject(t) - out.writeObject(o) - out.close() - ba.toByteArray() - } - - @throws(classOf[IOException]) - @throws(classOf[ClassCastException]) - @throws(classOf[ClassNotFoundException]) - def load[A](buffer: Array[Byte])(implicit expected: ClassTag[A]): A = { - val in = new ObjectInputStream(new ByteArrayInputStream(buffer)) - val found = in.readObject.asInstanceOf[ClassTag[_]] - try { - found.runtimeClass.asSubclass(expected.runtimeClass) - in.readObject.asInstanceOf[A] - } catch { - case _: ClassCastException => - in.close() - throw new ClassCastException("type mismatch;"+ - "\n found : "+found+ - "\n required: "+expected) - } - } -} diff --git a/src/library/scala/util/MurmurHash.scala b/src/library/scala/util/MurmurHash.scala deleted file mode 100644 index a5bc8faf8d..0000000000 --- a/src/library/scala/util/MurmurHash.scala +++ /dev/null @@ -1,197 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala.util - -/** An implementation of Austin Appleby's MurmurHash 3.0 algorithm - * (32 bit version); reference: http://code.google.com/p/smhasher - * - * This is the hash used by collections and case classes (including - * tuples). - * - * @author Rex Kerr - * @version 2.9 - * @since 2.9 - */ - -import java.lang.Integer.{ rotateLeft => rotl } -import scala.collection.Iterator - -/** A class designed to generate well-distributed non-cryptographic - * hashes. It is designed to be passed to a collection's foreach method, - * or can take individual hash values with append. Its own hash code is - * set equal to the hash code of whatever it is hashing. - */ -@deprecated("Use the object MurmurHash3 instead.", "2.10.0") -class MurmurHash[@specialized(Int,Long,Float,Double) T](seed: Int) extends (T => Unit) { - import MurmurHash._ - - private var h = startHash(seed) - private var c = hiddenMagicA - private var k = hiddenMagicB - private var hashed = false - private var hashvalue = h - - /** Begin a new hash using the same seed. */ - def reset() { - h = startHash(seed) - c = hiddenMagicA - k = hiddenMagicB - hashed = false - } - - /** Incorporate the hash value of one item. */ - def apply(t: T) { - h = extendHash(h,t.##,c,k) - c = nextMagicA(c) - k = nextMagicB(k) - hashed = false - } - - /** Incorporate a known hash value. */ - def append(i: Int) { - h = extendHash(h,i,c,k) - c = nextMagicA(c) - k = nextMagicB(k) - hashed = false - } - - /** Retrieve the hash value */ - def hash = { - if (!hashed) { - hashvalue = finalizeHash(h) - hashed = true - } - hashvalue - } - override def hashCode = hash -} - -/** An object designed to generate well-distributed non-cryptographic - * hashes. It is designed to hash a collection of integers; along with - * the integers to hash, it generates two magic streams of integers to - * increase the distribution of repetitive input sequences. Thus, - * three methods need to be called at each step (to start and to - * incorporate a new integer) to update the values. Only one method - * needs to be called to finalize the hash. - */ -@deprecated("Use the object MurmurHash3 instead.", "2.10.0") -object MurmurHash { - // Magic values used for MurmurHash's 32 bit hash. - // Don't change these without consulting a hashing expert! - final private val visibleMagic = 0x971e137b - final private val hiddenMagicA = 0x95543787 - final private val hiddenMagicB = 0x2ad7eb25 - final private val visibleMixer = 0x52dce729 - final private val hiddenMixerA = 0x7b7d159c - final private val hiddenMixerB = 0x6bce6396 - final private val finalMixer1 = 0x85ebca6b - final private val finalMixer2 = 0xc2b2ae35 - - // Arbitrary values used for hashing certain classes - final private val seedString = 0xf7ca7fd2 - final private val seedArray = 0x3c074a61 - - /** The first 23 magic integers from the first stream are stored here */ - val storedMagicA = - Iterator.iterate(hiddenMagicA)(nextMagicA).take(23).toArray - - /** The first 23 magic integers from the second stream are stored here */ - val storedMagicB = - Iterator.iterate(hiddenMagicB)(nextMagicB).take(23).toArray - - /** Begin a new hash with a seed value. */ - def startHash(seed: Int) = seed ^ visibleMagic - - /** The initial magic integers in the first stream. */ - def startMagicA = hiddenMagicA - - /** The initial magic integer in the second stream. */ - def startMagicB = hiddenMagicB - - /** Incorporates a new value into an existing hash. - * - * @param hash the prior hash value - * @param value the new value to incorporate - * @param magicA a magic integer from the stream - * @param magicB a magic integer from a different stream - * @return the updated hash value - */ - def extendHash(hash: Int, value: Int, magicA: Int, magicB: Int) = { - (hash ^ rotl(value*magicA,11)*magicB)*3 + visibleMixer - } - - /** Given a magic integer from the first stream, compute the next */ - def nextMagicA(magicA: Int) = magicA*5 + hiddenMixerA - - /** Given a magic integer from the second stream, compute the next */ - def nextMagicB(magicB: Int) = magicB*5 + hiddenMixerB - - /** Once all hashes have been incorporated, this performs a final mixing */ - def finalizeHash(hash: Int) = { - var i = (hash ^ (hash>>>16)) - i *= finalMixer1 - i ^= (i >>> 13) - i *= finalMixer2 - i ^= (i >>> 16) - i - } - - /** Compute a high-quality hash of an array */ - def arrayHash[@specialized T](a: Array[T]) = { - var h = startHash(a.length * seedArray) - var c = hiddenMagicA - var k = hiddenMagicB - var j = 0 - while (j < a.length) { - h = extendHash(h, a(j).##, c, k) - c = nextMagicA(c) - k = nextMagicB(k) - j += 1 - } - finalizeHash(h) - } - - /** Compute a high-quality hash of a string */ - def stringHash(s: String) = { - var h = startHash(s.length * seedString) - var c = hiddenMagicA - var k = hiddenMagicB - var j = 0 - while (j+1 < s.length) { - val i = (s.charAt(j)<<16) + s.charAt(j+1); - h = extendHash(h,i,c,k) - c = nextMagicA(c) - k = nextMagicB(k) - j += 2 - } - if (j < s.length) h = extendHash(h,s.charAt(j),c,k) - finalizeHash(h) - } - - /** Compute a hash that is symmetric in its arguments--that is, - * where the order of appearance of elements does not matter. - * This is useful for hashing sets, for example. - */ - def symmetricHash[T](xs: scala.collection.TraversableOnce[T], seed: Int) = { - var a,b,n = 0 - var c = 1 - xs.seq.foreach(i => { - val h = i.## - a += h - b ^= h - if (h != 0) c *= h - n += 1 - }) - var h = startHash(seed * n) - h = extendHash(h, a, storedMagicA(0), storedMagicB(0)) - h = extendHash(h, b, storedMagicA(1), storedMagicB(1)) - h = extendHash(h, c, storedMagicA(2), storedMagicB(2)) - finalizeHash(h) - } -} diff --git a/src/library/scala/util/hashing/MurmurHash3.scala b/src/library/scala/util/hashing/MurmurHash3.scala index 0aa7e6f1cb..5c74bc5a2e 100644 --- a/src/library/scala/util/hashing/MurmurHash3.scala +++ b/src/library/scala/util/hashing/MurmurHash3.scala @@ -274,12 +274,4 @@ object MurmurHash3 extends MurmurHash3 { finalizeHash(h, n) } */ - - @deprecated("Use unorderedHash", "2.10.0") - final def symmetricHash[T](xs: scala.collection.GenTraversableOnce[T], seed: Int = symmetricSeed): Int = - unorderedHash(xs.seq, seed) - - @deprecated("Use orderedHash", "2.10.0") - final def traversableHash[T](xs: scala.collection.GenTraversableOnce[T], seed: Int = traversableSeed): Int = - orderedHash(xs.seq, seed) } diff --git a/src/library/scala/util/matching/Regex.scala b/src/library/scala/util/matching/Regex.scala index 830710432c..7af75173d3 100644 --- a/src/library/scala/util/matching/Regex.scala +++ b/src/library/scala/util/matching/Regex.scala @@ -204,16 +204,6 @@ class Regex private[matching](val pattern: Pattern, groupNames: String*) extends else if (m.matcher.pattern == this.pattern) Some(1 to m.groupCount map m.group) else unapplySeq(m.matched) - @deprecated("Extracting a match result from anything but a CharSequence or Match is deprecated", "2.10.0") - def unapplySeq(target: Any): Option[List[String]] = target match { - case s: CharSequence => - val m = pattern matcher s - if (runMatcher(m)) Some((1 to m.groupCount).toList map m.group) - else None - case m: Match => unapplySeq(m.matched) - case _ => None - } - // @see UnanchoredRegex protected def runMatcher(m: Matcher) = m.matches() diff --git a/test/files/jvm/manifests-new.scala b/test/files/jvm/manifests-new.scala index f730be67bb..3937fdec69 100644 --- a/test/files/jvm/manifests-new.scala +++ b/test/files/jvm/manifests-new.scala @@ -56,7 +56,7 @@ object Test1 extends TestUtil { } object Test2 { - import scala.util.Marshal._ + import Marshal._ println("()="+load[Unit](dump(()))) println("true="+load[Boolean](dump(true))) println("a="+load[Char](dump('a'))) @@ -88,6 +88,38 @@ object Test2 { println() } +object Marshal { + import java.io._ + import scala.reflect.ClassTag + + def dump[A](o: A)(implicit t: ClassTag[A]): Array[Byte] = { + val ba = new ByteArrayOutputStream(512) + val out = new ObjectOutputStream(ba) + out.writeObject(t) + out.writeObject(o) + out.close() + ba.toByteArray() + } + + @throws(classOf[IOException]) + @throws(classOf[ClassCastException]) + @throws(classOf[ClassNotFoundException]) + def load[A](buffer: Array[Byte])(implicit expected: ClassTag[A]): A = { + val in = new ObjectInputStream(new ByteArrayInputStream(buffer)) + val found = in.readObject.asInstanceOf[ClassTag[_]] + try { + found.runtimeClass.asSubclass(expected.runtimeClass) + in.readObject.asInstanceOf[A] + } catch { + case _: ClassCastException => + in.close() + throw new ClassCastException("type mismatch;"+ + "\n found : "+found+ + "\n required: "+expected) + } + } +} + trait TestUtil { import java.io._ def write[A](o: A): Array[Byte] = { diff --git a/test/files/jvm/manifests-old.scala b/test/files/jvm/manifests-old.scala index 241966fd9d..bb1928f094 100644 --- a/test/files/jvm/manifests-old.scala +++ b/test/files/jvm/manifests-old.scala @@ -55,7 +55,7 @@ object Test1 extends TestUtil { } object Test2 { - import scala.util.Marshal._ + import Marshal._ println("()="+load[Unit](dump(()))) println("true="+load[Boolean](dump(true))) println("a="+load[Char](dump('a'))) @@ -87,6 +87,38 @@ object Test2 { println() } +object Marshal { + import java.io._ + import scala.reflect.ClassTag + + def dump[A](o: A)(implicit t: ClassTag[A]): Array[Byte] = { + val ba = new ByteArrayOutputStream(512) + val out = new ObjectOutputStream(ba) + out.writeObject(t) + out.writeObject(o) + out.close() + ba.toByteArray() + } + + @throws(classOf[IOException]) + @throws(classOf[ClassCastException]) + @throws(classOf[ClassNotFoundException]) + def load[A](buffer: Array[Byte])(implicit expected: ClassTag[A]): A = { + val in = new ObjectInputStream(new ByteArrayInputStream(buffer)) + val found = in.readObject.asInstanceOf[ClassTag[_]] + try { + found.runtimeClass.asSubclass(expected.runtimeClass) + in.readObject.asInstanceOf[A] + } catch { + case _: ClassCastException => + in.close() + throw new ClassCastException("type mismatch;"+ + "\n found : "+found+ + "\n required: "+expected) + } + } +} + trait TestUtil { import java.io._ def write[A](o: A): Array[Byte] = { diff --git a/test/files/neg/t6406-regextract.check b/test/files/neg/t6406-regextract.check index 19425a68b0..4fea66f760 100644 --- a/test/files/neg/t6406-regextract.check +++ b/test/files/neg/t6406-regextract.check @@ -1,6 +1,7 @@ -t6406-regextract.scala:4: warning: method unapplySeq in class Regex is deprecated: Extracting a match result from anything but a CharSequence or Match is deprecated +t6406-regextract.scala:4: error: cannot resolve overloaded unapply List(1) collect { case r(i) => i } ^ -error: No warnings can be incurred under -Xfatal-warnings. -one warning found -one error found +t6406-regextract.scala:4: error: not found: value i + List(1) collect { case r(i) => i } + ^ +two errors found diff --git a/test/files/pos/spec-arrays.scala b/test/files/pos/spec-arrays.scala index 84f6eef071..7ae2cb1efb 100644 --- a/test/files/pos/spec-arrays.scala +++ b/test/files/pos/spec-arrays.scala @@ -177,38 +177,11 @@ class ScalaSpec3Test extends Test { } } -object TestJava extends scala.testing.Benchmark { - def run() { - (new JavaTest).run() - } -} - -object TestSpec extends scala.testing.Benchmark { - def run() { - (new ScalaSpecTest).run() - } -} - -object TestSpec2 extends scala.testing.Benchmark { - def run() { - (new ScalaSpec2Test).run() - } -} - -object TestGen extends scala.testing.Benchmark { - def run() { - (new ScalaGenTest).run() - } -} - -object TestWrap extends scala.testing.Benchmark { - def run() { - (new ScalaWrapTest).run() - } -} - -object TestSpec3 extends scala.testing.Benchmark { - def run() { - (new ScalaSpec3Test).run() - } +object TestRunner { + (new JavaTest).run() + (new ScalaSpecTest).run() + (new ScalaSpec2Test).run() + (new ScalaGenTest).run() + (new ScalaWrapTest).run() + (new ScalaSpec3Test).run() } diff --git a/test/files/pos/spec-funs.scala b/test/files/pos/spec-funs.scala index 611ec0ef62..b9acbe171a 100644 --- a/test/files/pos/spec-funs.scala +++ b/test/files/pos/spec-funs.scala @@ -54,10 +54,7 @@ final class ClosureTest { } } -object TestInt extends scala.testing.Benchmark { - def run() = (new IntTest).run() -} - -object TestClosure extends scala.testing.Benchmark { - def run() = (new ClosureTest).run() +object TestRunner { + (new IntTest).run() + (new ClosureTest).run() } -- cgit v1.2.3 From 684f549372281219fb99f75dc0ee10ae1d5505e3 Mon Sep 17 00:00:00 2001 From: Simon Ochsenreither Date: Thu, 17 Jan 2013 20:22:43 +0100 Subject: SI-6811 Remove the scala.annotation.target package --- src/library/scala/annotation/target/package.scala | 29 ----------------------- 1 file changed, 29 deletions(-) delete mode 100644 src/library/scala/annotation/target/package.scala (limited to 'src') diff --git a/src/library/scala/annotation/target/package.scala b/src/library/scala/annotation/target/package.scala deleted file mode 100644 index ac2836c0a8..0000000000 --- a/src/library/scala/annotation/target/package.scala +++ /dev/null @@ -1,29 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala.annotation - -package object target { - @deprecated("Use `@scala.annotation.meta.beanGetter` instead", "2.10.0") - type beanGetter = scala.annotation.meta.beanGetter - - @deprecated("Use `@scala.annotation.meta.beanSetter` instead", "2.10.0") - type beanSetter = scala.annotation.meta.beanSetter - - @deprecated("Use `@scala.annotation.meta.field` instead", "2.10.0") - type field = scala.annotation.meta.field - - @deprecated("Use `@scala.annotation.meta.getter` instead", "2.10.0") - type getter = scala.annotation.meta.getter - - @deprecated("Use `@scala.annotation.meta.param` instead", "2.10.0") - type param = scala.annotation.meta.param - - @deprecated("Use `@scala.annotation.meta.setter` instead", "2.10.0") - type setter = scala.annotation.meta.setter -} -- cgit v1.2.3 From 98d3368ef037c47fb41c22fe2d28117c24f29d97 Mon Sep 17 00:00:00 2001 From: Simon Ochsenreither Date: Fri, 18 Jan 2013 15:50:57 +0100 Subject: SI-6811 Remove scala.ScalaObject --- src/library/scala/ScalaObject.scala | 16 ---------------- 1 file changed, 16 deletions(-) delete mode 100644 src/library/scala/ScalaObject.scala (limited to 'src') diff --git a/src/library/scala/ScalaObject.scala b/src/library/scala/ScalaObject.scala deleted file mode 100644 index f67dc3a6c5..0000000000 --- a/src/library/scala/ScalaObject.scala +++ /dev/null @@ -1,16 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala - -/** Until scala 2.10.0 this marker trait was added to - * scala-compiled classes. Now it only exists for backward - * compatibility. - */ -@deprecated("ScalaObject will be removed", "2.10.0") -trait ScalaObject -- cgit v1.2.3 From 8d4402d839c8e01413a411752d1d0ab95378661b Mon Sep 17 00:00:00 2001 From: Dan Hopkins Date: Sat, 19 Jan 2013 10:02:40 -0700 Subject: Remove the term "pimp" from the repository Small terminology change aimed at improving inclusion. --- .../doc/model/ModelFactoryImplicitSupport.scala | 14 +- .../scala/tools/nsc/interactive/Global.scala | 2 +- src/library/scala/Predef.scala | 2 +- test/files/pos/t3864/tuples_1.scala | 36 ++--- test/files/pos/t5809.scala | 2 +- test/files/pos/t5877.scala | 4 +- test/files/pos/t5877b.scala | 2 +- test/scaladoc/resources/implicits-base-res.scala | 80 +++++------ test/scaladoc/run/implicits-base.scala | 148 ++++++++++----------- 9 files changed, 145 insertions(+), 145 deletions(-) (limited to 'src') diff --git a/src/compiler/scala/tools/nsc/doc/model/ModelFactoryImplicitSupport.scala b/src/compiler/scala/tools/nsc/doc/model/ModelFactoryImplicitSupport.scala index 5d5d7d483c..c00afee064 100644 --- a/src/compiler/scala/tools/nsc/doc/model/ModelFactoryImplicitSupport.scala +++ b/src/compiler/scala/tools/nsc/doc/model/ModelFactoryImplicitSupport.scala @@ -65,7 +65,7 @@ trait ModelFactoryImplicitSupport { * class A[T] * class B extends A[Int] * class C extends A[String] - * implicit def pimpA[T: Numeric](a: A[T]): D + * implicit def enrichA[T: Numeric](a: A[T]): D * }}} * For B, no constraints are generated as Numeric[Int] is already in the default scope. On the other hand, for the * conversion from C to D, depending on -implicits-show-all, the conversion can: @@ -121,13 +121,13 @@ trait ModelFactoryImplicitSupport { * What? in details: * - say we start from a class A[T1, T2, T3, T4] * - we have an implicit function (view) in scope: - * def pimpA[T3 <: Long, T4](a: A[Int, Foo[Bar[X]], T3, T4])(implicit ev1: TypeTag[T4], ev2: Numeric[T4]): PimpedA - * - A is converted to PimpedA ONLY if a couple of constraints are satisfied: + * def enrichA[T3 <: Long, T4](a: A[Int, Foo[Bar[X]], T3, T4])(implicit ev1: TypeTag[T4], ev2: Numeric[T4]): EnrichedA + * - A is converted to EnrichedA ONLY if a couple of constraints are satisfied: * * T1 must be equal to Int * * T2 must be equal to Foo[Bar[X]] * * T3 must be upper bounded by Long * * there must be evidence of Numeric[T4] and a TypeTag[T4] within scope - * - the final type is PimpedA and A therefore inherits a couple of members from pimpedA + * - the final type is EnrichedA and A therefore inherits a couple of members from enrichA * * How? * some notes: @@ -495,11 +495,11 @@ trait ModelFactoryImplicitSupport { * returns the simplified type of the view * * for the example view: - * implicit def pimpMyClass[T](a: MyClass[T])(implicit ev: Numeric[T]): PimpedMyClass[T] + * implicit def enrichMyClass[T](a: MyClass[T])(implicit ev: Numeric[T]): EnrichedMyClass[T] * the implicit view result type is: - * (a: MyClass[T])(implicit ev: Numeric[T]): PimpedMyClass[T] + * (a: MyClass[T])(implicit ev: Numeric[T]): EnrichedMyClass[T] * and the simplified type will be: - * MyClass[T] => PimpedMyClass[T] + * MyClass[T] => EnrichedMyClass[T] */ def removeImplicitParameters(viewType: Type): (Type, List[Type]) = { diff --git a/src/compiler/scala/tools/nsc/interactive/Global.scala b/src/compiler/scala/tools/nsc/interactive/Global.scala index e3d59d83ea..2f63fbbff2 100644 --- a/src/compiler/scala/tools/nsc/interactive/Global.scala +++ b/src/compiler/scala/tools/nsc/interactive/Global.scala @@ -948,7 +948,7 @@ class Global(settings: Settings, _reporter: Reporter, projectName: String = "") for (sym <- ownerTpe.members) addTypeMember(sym, pre, sym.owner != ownerTpe.typeSymbol, NoSymbol) members.allMembers #:: { - //print("\nadd pimped") + //print("\nadd enrichment") val applicableViews: List[SearchResult] = if (ownerTpe.isErroneous) List() else new ImplicitSearch( diff --git a/src/library/scala/Predef.scala b/src/library/scala/Predef.scala index 9bb57877d9..ea1c0d546e 100644 --- a/src/library/scala/Predef.scala +++ b/src/library/scala/Predef.scala @@ -271,7 +271,7 @@ object Predef extends LowPriorityImplicits { // reduces the chances of a user's writing `foo.__leftOfArrow` and // being confused why they get an ambiguous implicit conversion // error. (`foo.x` used to produce this error since both - // any2Ensuring and any2ArrowAssoc pimped an `x` onto everything) + // any2Ensuring and any2ArrowAssoc enrich an `x` onto everything) @deprecated("Use `__leftOfArrow` instead", "2.10.0") def x = __leftOfArrow diff --git a/test/files/pos/t3864/tuples_1.scala b/test/files/pos/t3864/tuples_1.scala index 1d19af6e41..5e97f8452b 100644 --- a/test/files/pos/t3864/tuples_1.scala +++ b/test/files/pos/t3864/tuples_1.scala @@ -1,11 +1,11 @@ -trait PimpedType[X] { +trait EnrichedType[X] { val value: X } trait Tuples { - -trait Tuple15W[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O] extends PimpedType[Tuple15[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O]] { + +trait Tuple15W[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O] extends EnrichedType[Tuple15[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O]] { def fold[Z](f: => (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O) => Z): Z = {import value._; f(_1, _2, _3, _4, _5, _6, _7, _8, _9, _10, _11, _12, _13, _14, _15)} def toIndexedSeq[Z](implicit ev: value.type <:< Tuple15[Z, Z, Z, Z, Z, Z, Z, Z, Z, Z, Z, Z, Z, Z, Z]): IndexedSeq[Z] = {val zs = ev(value); import zs._; IndexedSeq(_1, _2, _3, _4, _5, _6, _7, _8, _9, _10, _11, _12, _13, _14, _15)} def mapElements[AA, BB, CC, DD, EE, FF, GG, HH, II, JJ, KK, LL, MM, NN, OO](_1: (A => AA) = identity[A] _, _2: (B => BB) = identity[B] _, _3: (C => CC) = identity[C] _, _4: (D => DD) = identity[D] _, _5: (E => EE) = identity[E] _, _6: (F => FF) = identity[F] _, _7: (G => GG) = identity[G] _, _8: (H => HH) = identity[H] _, _9: (I => II) = identity[I] _, _10: (J => JJ) = identity[J] _, _11: (K => KK) = identity[K] _, _12: (L => LL) = identity[L] _, _13: (M => MM) = identity[M] _, _14: (N => NN) = identity[N] _, _15: (O => OO) = identity[O] _): (AA, BB, CC, DD, EE, FF, GG, HH, II, JJ, KK, LL, MM, NN, OO) = (_1(value._1), _2(value._2), _3(value._3), _4(value._4), _5(value._5), _6(value._6), _7(value._7), _8(value._8), _9(value._9), _10(value._10), _11(value._11), _12(value._12), _13(value._13), _14(value._14), _15(value._15)) @@ -13,8 +13,8 @@ trait Tuple15W[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O] extends PimpedType[T implicit def ToTuple15W[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O](t: (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O)): Tuple15W[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O] = new { val value = t } with Tuple15W[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O] - -trait Tuple16W[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P] extends PimpedType[Tuple16[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P]] { + +trait Tuple16W[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P] extends EnrichedType[Tuple16[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P]] { def fold[Z](f: => (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P) => Z): Z = {import value._; f(_1, _2, _3, _4, _5, _6, _7, _8, _9, _10, _11, _12, _13, _14, _15, _16)} def toIndexedSeq[Z](implicit ev: value.type <:< Tuple16[Z, Z, Z, Z, Z, Z, Z, Z, Z, Z, Z, Z, Z, Z, Z, Z]): IndexedSeq[Z] = {val zs = ev(value); import zs._; IndexedSeq(_1, _2, _3, _4, _5, _6, _7, _8, _9, _10, _11, _12, _13, _14, _15, _16)} def mapElements[AA, BB, CC, DD, EE, FF, GG, HH, II, JJ, KK, LL, MM, NN, OO, PP](_1: (A => AA) = identity[A] _, _2: (B => BB) = identity[B] _, _3: (C => CC) = identity[C] _, _4: (D => DD) = identity[D] _, _5: (E => EE) = identity[E] _, _6: (F => FF) = identity[F] _, _7: (G => GG) = identity[G] _, _8: (H => HH) = identity[H] _, _9: (I => II) = identity[I] _, _10: (J => JJ) = identity[J] _, _11: (K => KK) = identity[K] _, _12: (L => LL) = identity[L] _, _13: (M => MM) = identity[M] _, _14: (N => NN) = identity[N] _, _15: (O => OO) = identity[O] _, _16: (P => PP) = identity[P] _): (AA, BB, CC, DD, EE, FF, GG, HH, II, JJ, KK, LL, MM, NN, OO, PP) = (_1(value._1), _2(value._2), _3(value._3), _4(value._4), _5(value._5), _6(value._6), _7(value._7), _8(value._8), _9(value._9), _10(value._10), _11(value._11), _12(value._12), _13(value._13), _14(value._14), _15(value._15), _16(value._16)) @@ -22,8 +22,8 @@ trait Tuple16W[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P] extends PimpedTyp implicit def ToTuple16W[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P](t: (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P)): Tuple16W[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P] = new { val value = t } with Tuple16W[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P] - -trait Tuple17W[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q] extends PimpedType[Tuple17[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q]] { + +trait Tuple17W[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q] extends EnrichedType[Tuple17[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q]] { def fold[Z](f: => (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q) => Z): Z = {import value._; f(_1, _2, _3, _4, _5, _6, _7, _8, _9, _10, _11, _12, _13, _14, _15, _16, _17)} def toIndexedSeq[Z](implicit ev: value.type <:< Tuple17[Z, Z, Z, Z, Z, Z, Z, Z, Z, Z, Z, Z, Z, Z, Z, Z, Z]): IndexedSeq[Z] = {val zs = ev(value); import zs._; IndexedSeq(_1, _2, _3, _4, _5, _6, _7, _8, _9, _10, _11, _12, _13, _14, _15, _16, _17)} def mapElements[AA, BB, CC, DD, EE, FF, GG, HH, II, JJ, KK, LL, MM, NN, OO, PP, QQ](_1: (A => AA) = identity[A] _, _2: (B => BB) = identity[B] _, _3: (C => CC) = identity[C] _, _4: (D => DD) = identity[D] _, _5: (E => EE) = identity[E] _, _6: (F => FF) = identity[F] _, _7: (G => GG) = identity[G] _, _8: (H => HH) = identity[H] _, _9: (I => II) = identity[I] _, _10: (J => JJ) = identity[J] _, _11: (K => KK) = identity[K] _, _12: (L => LL) = identity[L] _, _13: (M => MM) = identity[M] _, _14: (N => NN) = identity[N] _, _15: (O => OO) = identity[O] _, _16: (P => PP) = identity[P] _, _17: (Q => QQ) = identity[Q] _): (AA, BB, CC, DD, EE, FF, GG, HH, II, JJ, KK, LL, MM, NN, OO, PP, QQ) = (_1(value._1), _2(value._2), _3(value._3), _4(value._4), _5(value._5), _6(value._6), _7(value._7), _8(value._8), _9(value._9), _10(value._10), _11(value._11), _12(value._12), _13(value._13), _14(value._14), _15(value._15), _16(value._16), _17(value._17)) @@ -31,8 +31,8 @@ trait Tuple17W[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q] extends Pimped implicit def ToTuple17W[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q](t: (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q)): Tuple17W[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q] = new { val value = t } with Tuple17W[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q] - -trait Tuple18W[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R] extends PimpedType[Tuple18[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R]] { + +trait Tuple18W[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R] extends EnrichedType[Tuple18[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R]] { def fold[Z](f: => (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R) => Z): Z = {import value._; f(_1, _2, _3, _4, _5, _6, _7, _8, _9, _10, _11, _12, _13, _14, _15, _16, _17, _18)} def toIndexedSeq[Z](implicit ev: value.type <:< Tuple18[Z, Z, Z, Z, Z, Z, Z, Z, Z, Z, Z, Z, Z, Z, Z, Z, Z, Z]): IndexedSeq[Z] = {val zs = ev(value); import zs._; IndexedSeq(_1, _2, _3, _4, _5, _6, _7, _8, _9, _10, _11, _12, _13, _14, _15, _16, _17, _18)} def mapElements[AA, BB, CC, DD, EE, FF, GG, HH, II, JJ, KK, LL, MM, NN, OO, PP, QQ, RR](_1: (A => AA) = identity[A] _, _2: (B => BB) = identity[B] _, _3: (C => CC) = identity[C] _, _4: (D => DD) = identity[D] _, _5: (E => EE) = identity[E] _, _6: (F => FF) = identity[F] _, _7: (G => GG) = identity[G] _, _8: (H => HH) = identity[H] _, _9: (I => II) = identity[I] _, _10: (J => JJ) = identity[J] _, _11: (K => KK) = identity[K] _, _12: (L => LL) = identity[L] _, _13: (M => MM) = identity[M] _, _14: (N => NN) = identity[N] _, _15: (O => OO) = identity[O] _, _16: (P => PP) = identity[P] _, _17: (Q => QQ) = identity[Q] _, _18: (R => RR) = identity[R] _): (AA, BB, CC, DD, EE, FF, GG, HH, II, JJ, KK, LL, MM, NN, OO, PP, QQ, RR) = (_1(value._1), _2(value._2), _3(value._3), _4(value._4), _5(value._5), _6(value._6), _7(value._7), _8(value._8), _9(value._9), _10(value._10), _11(value._11), _12(value._12), _13(value._13), _14(value._14), _15(value._15), _16(value._16), _17(value._17), _18(value._18)) @@ -40,8 +40,8 @@ trait Tuple18W[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R] extends Pim implicit def ToTuple18W[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R](t: (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R)): Tuple18W[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R] = new { val value = t } with Tuple18W[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R] - -trait Tuple19W[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S] extends PimpedType[Tuple19[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S]] { + +trait Tuple19W[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S] extends EnrichedType[Tuple19[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S]] { def fold[Z](f: => (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S) => Z): Z = {import value._; f(_1, _2, _3, _4, _5, _6, _7, _8, _9, _10, _11, _12, _13, _14, _15, _16, _17, _18, _19)} def toIndexedSeq[Z](implicit ev: value.type <:< Tuple19[Z, Z, Z, Z, Z, Z, Z, Z, Z, Z, Z, Z, Z, Z, Z, Z, Z, Z, Z]): IndexedSeq[Z] = {val zs = ev(value); import zs._; IndexedSeq(_1, _2, _3, _4, _5, _6, _7, _8, _9, _10, _11, _12, _13, _14, _15, _16, _17, _18, _19)} def mapElements[AA, BB, CC, DD, EE, FF, GG, HH, II, JJ, KK, LL, MM, NN, OO, PP, QQ, RR, SS](_1: (A => AA) = identity[A] _, _2: (B => BB) = identity[B] _, _3: (C => CC) = identity[C] _, _4: (D => DD) = identity[D] _, _5: (E => EE) = identity[E] _, _6: (F => FF) = identity[F] _, _7: (G => GG) = identity[G] _, _8: (H => HH) = identity[H] _, _9: (I => II) = identity[I] _, _10: (J => JJ) = identity[J] _, _11: (K => KK) = identity[K] _, _12: (L => LL) = identity[L] _, _13: (M => MM) = identity[M] _, _14: (N => NN) = identity[N] _, _15: (O => OO) = identity[O] _, _16: (P => PP) = identity[P] _, _17: (Q => QQ) = identity[Q] _, _18: (R => RR) = identity[R] _, _19: (S => SS) = identity[S] _): (AA, BB, CC, DD, EE, FF, GG, HH, II, JJ, KK, LL, MM, NN, OO, PP, QQ, RR, SS) = (_1(value._1), _2(value._2), _3(value._3), _4(value._4), _5(value._5), _6(value._6), _7(value._7), _8(value._8), _9(value._9), _10(value._10), _11(value._11), _12(value._12), _13(value._13), _14(value._14), _15(value._15), _16(value._16), _17(value._17), _18(value._18), _19(value._19)) @@ -49,8 +49,8 @@ trait Tuple19W[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S] extends implicit def ToTuple19W[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S](t: (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S)): Tuple19W[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S] = new { val value = t } with Tuple19W[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S] - -trait Tuple20W[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T] extends PimpedType[Tuple20[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T]] { + +trait Tuple20W[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T] extends EnrichedType[Tuple20[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T]] { def fold[Z](f: => (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T) => Z): Z = {import value._; f(_1, _2, _3, _4, _5, _6, _7, _8, _9, _10, _11, _12, _13, _14, _15, _16, _17, _18, _19, _20)} def toIndexedSeq[Z](implicit ev: value.type <:< Tuple20[Z, Z, Z, Z, Z, Z, Z, Z, Z, Z, Z, Z, Z, Z, Z, Z, Z, Z, Z, Z]): IndexedSeq[Z] = {val zs = ev(value); import zs._; IndexedSeq(_1, _2, _3, _4, _5, _6, _7, _8, _9, _10, _11, _12, _13, _14, _15, _16, _17, _18, _19, _20)} def mapElements[AA, BB, CC, DD, EE, FF, GG, HH, II, JJ, KK, LL, MM, NN, OO, PP, QQ, RR, SS, TT](_1: (A => AA) = identity[A] _, _2: (B => BB) = identity[B] _, _3: (C => CC) = identity[C] _, _4: (D => DD) = identity[D] _, _5: (E => EE) = identity[E] _, _6: (F => FF) = identity[F] _, _7: (G => GG) = identity[G] _, _8: (H => HH) = identity[H] _, _9: (I => II) = identity[I] _, _10: (J => JJ) = identity[J] _, _11: (K => KK) = identity[K] _, _12: (L => LL) = identity[L] _, _13: (M => MM) = identity[M] _, _14: (N => NN) = identity[N] _, _15: (O => OO) = identity[O] _, _16: (P => PP) = identity[P] _, _17: (Q => QQ) = identity[Q] _, _18: (R => RR) = identity[R] _, _19: (S => SS) = identity[S] _, _20: (T => TT) = identity[T] _): (AA, BB, CC, DD, EE, FF, GG, HH, II, JJ, KK, LL, MM, NN, OO, PP, QQ, RR, SS, TT) = (_1(value._1), _2(value._2), _3(value._3), _4(value._4), _5(value._5), _6(value._6), _7(value._7), _8(value._8), _9(value._9), _10(value._10), _11(value._11), _12(value._12), _13(value._13), _14(value._14), _15(value._15), _16(value._16), _17(value._17), _18(value._18), _19(value._19), _20(value._20)) @@ -58,8 +58,8 @@ trait Tuple20W[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T] exten implicit def ToTuple20W[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T](t: (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T)): Tuple20W[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T] = new { val value = t } with Tuple20W[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T] - -trait Tuple21W[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U] extends PimpedType[Tuple21[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U]] { + +trait Tuple21W[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U] extends EnrichedType[Tuple21[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U]] { def fold[Z](f: => (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U) => Z): Z = {import value._; f(_1, _2, _3, _4, _5, _6, _7, _8, _9, _10, _11, _12, _13, _14, _15, _16, _17, _18, _19, _20, _21)} def toIndexedSeq[Z](implicit ev: value.type <:< Tuple21[Z, Z, Z, Z, Z, Z, Z, Z, Z, Z, Z, Z, Z, Z, Z, Z, Z, Z, Z, Z, Z]): IndexedSeq[Z] = {val zs = ev(value); import zs._; IndexedSeq(_1, _2, _3, _4, _5, _6, _7, _8, _9, _10, _11, _12, _13, _14, _15, _16, _17, _18, _19, _20, _21)} def mapElements[AA, BB, CC, DD, EE, FF, GG, HH, II, JJ, KK, LL, MM, NN, OO, PP, QQ, RR, SS, TT, UU](_1: (A => AA) = identity[A] _, _2: (B => BB) = identity[B] _, _3: (C => CC) = identity[C] _, _4: (D => DD) = identity[D] _, _5: (E => EE) = identity[E] _, _6: (F => FF) = identity[F] _, _7: (G => GG) = identity[G] _, _8: (H => HH) = identity[H] _, _9: (I => II) = identity[I] _, _10: (J => JJ) = identity[J] _, _11: (K => KK) = identity[K] _, _12: (L => LL) = identity[L] _, _13: (M => MM) = identity[M] _, _14: (N => NN) = identity[N] _, _15: (O => OO) = identity[O] _, _16: (P => PP) = identity[P] _, _17: (Q => QQ) = identity[Q] _, _18: (R => RR) = identity[R] _, _19: (S => SS) = identity[S] _, _20: (T => TT) = identity[T] _, _21: (U => UU) = identity[U] _): (AA, BB, CC, DD, EE, FF, GG, HH, II, JJ, KK, LL, MM, NN, OO, PP, QQ, RR, SS, TT, UU) = (_1(value._1), _2(value._2), _3(value._3), _4(value._4), _5(value._5), _6(value._6), _7(value._7), _8(value._8), _9(value._9), _10(value._10), _11(value._11), _12(value._12), _13(value._13), _14(value._14), _15(value._15), _16(value._16), _17(value._17), _18(value._18), _19(value._19), _20(value._20), _21(value._21)) @@ -67,12 +67,12 @@ trait Tuple21W[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U] ex implicit def ToTuple21W[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U](t: (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U)): Tuple21W[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U] = new { val value = t } with Tuple21W[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U] - -trait Tuple22W[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U, V] extends PimpedType[Tuple22[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U, V]] { + +trait Tuple22W[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U, V] extends EnrichedType[Tuple22[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U, V]] { def fold[Z](f: => (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U, V) => Z): Z = {import value._; f(_1, _2, _3, _4, _5, _6, _7, _8, _9, _10, _11, _12, _13, _14, _15, _16, _17, _18, _19, _20, _21, _22)} def toIndexedSeq[Z](implicit ev: value.type <:< Tuple22[Z, Z, Z, Z, Z, Z, Z, Z, Z, Z, Z, Z, Z, Z, Z, Z, Z, Z, Z, Z, Z, Z]): IndexedSeq[Z] = {val zs = ev(value); import zs._; IndexedSeq(_1, _2, _3, _4, _5, _6, _7, _8, _9, _10, _11, _12, _13, _14, _15, _16, _17, _18, _19, _20, _21, _22)} def mapElements[AA, BB, CC, DD, EE, FF, GG, HH, II, JJ, KK, LL, MM, NN, OO, PP, QQ, RR, SS, TT, UU, VV](_1: (A => AA) = identity[A] _, _2: (B => BB) = identity[B] _, _3: (C => CC) = identity[C] _, _4: (D => DD) = identity[D] _, _5: (E => EE) = identity[E] _, _6: (F => FF) = identity[F] _, _7: (G => GG) = identity[G] _, _8: (H => HH) = identity[H] _, _9: (I => II) = identity[I] _, _10: (J => JJ) = identity[J] _, _11: (K => KK) = identity[K] _, _12: (L => LL) = identity[L] _, _13: (M => MM) = identity[M] _, _14: (N => NN) = identity[N] _, _15: (O => OO) = identity[O] _, _16: (P => PP) = identity[P] _, _17: (Q => QQ) = identity[Q] _, _18: (R => RR) = identity[R] _, _19: (S => SS) = identity[S] _, _20: (T => TT) = identity[T] _, _21: (U => UU) = identity[U] _, _22: (V => VV) = identity[V] _): (AA, BB, CC, DD, EE, FF, GG, HH, II, JJ, KK, LL, MM, NN, OO, PP, QQ, RR, SS, TT, UU, VV) = (_1(value._1), _2(value._2), _3(value._3), _4(value._4), _5(value._5), _6(value._6), _7(value._7), _8(value._8), _9(value._9), _10(value._10), _11(value._11), _12(value._12), _13(value._13), _14(value._14), _15(value._15), _16(value._16), _17(value._17), _18(value._18), _19(value._19), _20(value._20), _21(value._21), _22(value._22)) } implicit def ToTuple22W[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U, V](t: (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U, V)): Tuple22W[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U, V] = new { val value = t } with Tuple22W[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U, V] -} \ No newline at end of file +} diff --git a/test/files/pos/t5809.scala b/test/files/pos/t5809.scala index 4bcd743faa..6101f546b3 100644 --- a/test/files/pos/t5809.scala +++ b/test/files/pos/t5809.scala @@ -1,5 +1,5 @@ package object foo { - implicit class PimpedInt(foo: Int) { + implicit class EnrichedInt(foo: Int) { def bar = ??? def bippy = foo } diff --git a/test/files/pos/t5877.scala b/test/files/pos/t5877.scala index c7827df99f..939013cd01 100644 --- a/test/files/pos/t5877.scala +++ b/test/files/pos/t5877.scala @@ -7,8 +7,8 @@ package foo { } package object foo { - // Crasher: No synthetics for method PimpedFoo2: synthetics contains - implicit class PimpedFoo2(value: Foo) { + // Crasher: No synthetics for method EnrichedFoo2: synthetics contains + implicit class EnrichedFoo2(value: Foo) { def huzzah = "" } } diff --git a/test/files/pos/t5877b.scala b/test/files/pos/t5877b.scala index 6b8cbd473e..43a2ea2f06 100644 --- a/test/files/pos/t5877b.scala +++ b/test/files/pos/t5877b.scala @@ -7,7 +7,7 @@ object Test { } object `package` { - implicit class PimpedFoo2(value: Foo) { + implicit class EnrichedFoo2(value: Foo) { def huzzah = "" } } diff --git a/test/scaladoc/resources/implicits-base-res.scala b/test/scaladoc/resources/implicits-base-res.scala index d6c0332c10..1d17e9a6d3 100644 --- a/test/scaladoc/resources/implicits-base-res.scala +++ b/test/scaladoc/resources/implicits-base-res.scala @@ -11,21 +11,21 @@ trait MyNumeric[R] * - tests the complete type inference * - the following inherited methods should appear: * {{{ - * def convToGtColonDoubleA(x: Double) // pimpA3: with a constraint that T <: Double - * def convToIntA(x: Int) // pimpA2: with a constraint that T = Int - * def convToManifestA(x: T) // pimpA7: with 2 constraints: T: Manifest and T <: Double - * def convToMyNumericA(x: T) // pimpA6: with a constraint that there is x: MyNumeric[T] implicit in scope - * def convToNumericA(x: T) // pimpA1: with a constraint that there is x: Numeric[T] implicit in scope - * def convToPimpedA(x: Bar[Foo[T]]) // pimpA5: no constraints, SHADOWED - * def convToPimpedA(x: S) // pimpA4: with 3 constraints: T = Foo[Bar[S]], S: Foo and S: Bar, SHADOWED - * def convToPimpedA(x: T) // pimpA0: with no constraints, SHADOWED - * def convToTraversableOps(x: T) // pimpA7: with 2 constraints: T: Manifest and T <: Double + * def convToGtColonDoubleA(x: Double) // enrichA3: with a constraint that T <: Double + * def convToIntA(x: Int) // enrichA2: with a constraint that T = Int + * def convToManifestA(x: T) // enrichA7: with 2 constraints: T: Manifest and T <: Double + * def convToMyNumericA(x: T) // enrichA6: with a constraint that there is x: MyNumeric[T] implicit in scope + * def convToNumericA(x: T) // enrichA1: with a constraint that there is x: Numeric[T] implicit in scope + * def convToEnrichedA(x: Bar[Foo[T]]) // enrichA5: no constraints, SHADOWED + * def convToEnrichedA(x: S) // enrichA4: with 3 constraints: T = Foo[Bar[S]], S: Foo and S: Bar, SHADOWED + * def convToEnrichedA(x: T) // enrichA0: with no constraints, SHADOWED + * def convToTraversableOps(x: T) // enrichA7: with 2 constraints: T: Manifest and T <: Double * // should not be abstract! * }}} */ class A[T] { - /** This should prevent the implicitly inherited `def convToPimpedA: T` from `pimpA0` from showing up */ - def convToPimpedA(x: T): T = sys.error("Let's check it out!") + /** This should prevent the implicitly inherited `def convToEnrichedA: T` from `enrichA0` from showing up */ + def convToEnrichedA(x: T): T = sys.error("Let's check it out!") /** This should check implicit member elimination in the case of subtyping */ def foo(a: T, b: AnyRef): T } @@ -33,15 +33,15 @@ class A[T] { object A { import language.implicitConversions // according to SIP18 - implicit def pimpA0[V](a: A[V]) = new PimpedA(a) - implicit def pimpA1[ZBUR: Numeric](a: A[ZBUR]) = new NumericA[ZBUR](a) - implicit def pimpA2(a: A[Int]) = new IntA(a) - implicit def pimpA3(a: A[T] forSome { type T <: Double }) = new GtColonDoubleA(a) - implicit def pimpA4[S](a: A[Foo[Bar[S]]])(implicit foo: Foo[S], bar: Bar[S]): PimpedA[S] = sys.error("not implemented") - implicit def pimpA5[Z](a: A[Z]): PimpedA[Bar[Foo[Z]]] = sys.error("not implemented") - implicit def pimpA6[Z: MyNumeric](a: A[Z]) = new MyNumericA[Z](a) + implicit def enrichA0[V](a: A[V]) = new EnrichedA(a) + implicit def enrichA1[ZBUR: Numeric](a: A[ZBUR]) = new NumericA[ZBUR](a) + implicit def enrichA2(a: A[Int]) = new IntA(a) + implicit def enrichA3(a: A[T] forSome { type T <: Double }) = new GtColonDoubleA(a) + implicit def enrichA4[S](a: A[Foo[Bar[S]]])(implicit foo: Foo[S], bar: Bar[S]): EnrichedA[S] = sys.error("not implemented") + implicit def enrichA5[Z](a: A[Z]): EnrichedA[Bar[Foo[Z]]] = sys.error("not implemented") + implicit def enrichA6[Z: MyNumeric](a: A[Z]) = new MyNumericA[Z](a) // TODO: Add H <: Double and see why it crashes for C and D -- context bounds, need to check! - implicit def pimpA7[H <: Double : Manifest](a: A[H]) = new ManifestA[H](a) with MyTraversableOps[H] { def convToTraversableOps(x: H): H = sys.error("no") } + implicit def enrichA7[H <: Double : Manifest](a: A[H]) = new ManifestA[H](a) with MyTraversableOps[H] { def convToTraversableOps(x: H): H = sys.error("no") } } @@ -49,14 +49,14 @@ object A { * - tests the existential type solving * - the following inherited methods should appear: * {{{ - * def convToGtColonDoubleA(x: Double) // pimpA3: no constraints - * def convToManifestA(x: Double) // pimpA7: no constraints - * def convToMyNumericA(x: Double) // pimpA6: (if showAll is set) with a constraint that there is x: MyNumeric[Double] implicit in scope - * def convToNumericA(x: Double) // pimpA1: no constraintsd - * def convToPimpedA(x: Bar[Foo[Double]]) // pimpA5: no constraints, SHADOWED - * def convToPimpedA(x: Double) // pimpA0: no constraints, SHADOWED - * def convToTraversableOps(x: Double) // pimpA7: no constraints - * // should not be abstract! + * def convToGtColonDoubleA(x: Double) // enrichA3: no constraints + * def convToManifestA(x: Double) // enrichA7: no constraints + * def convToMyNumericA(x: Double) // enrichA6: (if showAll is set) with a constraint that there is x: MyNumeric[Double] implicit in scope + * def convToNumericA(x: Double) // enrichA1: no constraintsd + * def convToEnrichedA(x: Bar[Foo[Double]]) // enrichA5: no constraints, SHADOWED + * def convToEnrichedA(x: Double) // enrichA0: no constraints, SHADOWED + * def convToTraversableOps(x: Double) // enrichA7: no constraints + * // should not be abstract! * }}} */ class B extends A[Double] @@ -67,11 +67,11 @@ object B extends A * - tests asSeenFrom * - the following inherited methods should appear: * {{{ - * def convToIntA(x: Int) // pimpA2: no constraints - * def convToMyNumericA(x: Int) // pimpA6: (if showAll is set) with a constraint that there is x: MyNumeric[Int] implicit in scope - * def convToNumericA(x: Int) // pimpA1: no constraints - * def convToPimpedA(x: Int) // pimpA0: no constraints, SHADOWED - * def convToPimpedA(x: Bar[Foo[Int]]) // pimpA5: no constraints, SHADOWED + * def convToIntA(x: Int) // enrichA2: no constraints + * def convToMyNumericA(x: Int) // enrichA6: (if showAll is set) with a constraint that there is x: MyNumeric[Int] implicit in scope + * def convToNumericA(x: Int) // enrichA1: no constraints + * def convToEnrichedA(x: Int) // enrichA0: no constraints, SHADOWED + * def convToEnrichedA(x: Bar[Foo[Int]]) // enrichA5: no constraints, SHADOWED * }}} */ class C extends A[Int] @@ -82,10 +82,10 @@ object C extends A * - tests implicit elimination * - the following inherited methods should appear: * {{{ - * def convToMyNumericA(x: String) // pimpA6: (if showAll is set) with a constraint that there is x: MyNumeric[String] implicit in scope - * def convToNumericA(x: String) // pimpA1: (if showAll is set) with a constraint that there is x: Numeric[String] implicit in scope - * def convToPimpedA(x: Bar[Foo[String]]) // pimpA5: no constraints, SHADOWED - * def convToPimpedA(x: String) // pimpA0: no constraints, SHADOWED + * def convToMyNumericA(x: String) // enrichA6: (if showAll is set) with a constraint that there is x: MyNumeric[String] implicit in scope + * def convToNumericA(x: String) // enrichA1: (if showAll is set) with a constraint that there is x: Numeric[String] implicit in scope + * def convToEnrichedA(x: Bar[Foo[String]]) // enrichA5: no constraints, SHADOWED + * def convToEnrichedA(x: String) // enrichA0: no constraints, SHADOWED * }}} */ class D extends A[String] @@ -93,12 +93,12 @@ class D extends A[String] object D extends A -/** PimpedA class
    +/** EnrichedA class
    * - tests simple inheritance and asSeenFrom * - A, B and C should be implicitly converted to this */ -class PimpedA[V](a: A[V]) { - /** The convToPimpedA: V documentation... */ - def convToPimpedA(x: V): V = sys.error("Not implemented") +class EnrichedA[V](a: A[V]) { + /** The convToEnrichedA: V documentation... */ + def convToEnrichedA(x: V): V = sys.error("Not implemented") } /** NumericA class
    diff --git a/test/scaladoc/run/implicits-base.scala b/test/scaladoc/run/implicits-base.scala index 3d57306f5d..8f8652cdb3 100644 --- a/test/scaladoc/run/implicits-base.scala +++ b/test/scaladoc/run/implicits-base.scala @@ -25,54 +25,54 @@ object Test extends ScaladocModelTest { val A = base._class("A") - // def convToPimpedA(x: T) // pimpA0: with no constraints, SHADOWED - conv = A._conversion(A.qualifiedName + ".pimpA0") + // def convToEnrichedA(x: T) // enrichA0: with no constraints, SHADOWED + conv = A._conversion(A.qualifiedName + ".enrichA0") assert(conv.members.length == 1) assert(conv.constraints.length == 0) - assert(isShadowed(conv._member("convToPimpedA"))) - assert(conv._member("convToPimpedA").resultType.name == "T") + assert(isShadowed(conv._member("convToEnrichedA"))) + assert(conv._member("convToEnrichedA").resultType.name == "T") - // def convToNumericA: T // pimpA1: with a constraint that there is x: Numeric[T] implicit in scope - conv = A._conversion(A.qualifiedName + ".pimpA1") + // def convToNumericA: T // enrichA1: with a constraint that there is x: Numeric[T] implicit in scope + conv = A._conversion(A.qualifiedName + ".enrichA1") assert(conv.members.length == 1) assert(conv.constraints.length == 1) assert(conv._member("convToNumericA").resultType.name == "T") - // def convToIntA: Int // pimpA2: with a constraint that T = Int - conv = A._conversion(A.qualifiedName + ".pimpA2") + // def convToIntA: Int // enrichA2: with a constraint that T = Int + conv = A._conversion(A.qualifiedName + ".enrichA2") assert(conv.members.length == 1) assert(conv.constraints.length == 1) assert(conv._member("convToIntA").resultType.name == "Int") - // def convToGtColonDoubleA: Double // pimpA3: with a constraint that T <: Double - conv = A._conversion(A.qualifiedName + ".pimpA3") + // def convToGtColonDoubleA: Double // enrichA3: with a constraint that T <: Double + conv = A._conversion(A.qualifiedName + ".enrichA3") assert(conv.members.length == 1) assert(conv.constraints.length == 1) assert(conv._member("convToGtColonDoubleA").resultType.name == "Double") - // def convToPimpedA: S // pimpA4: with 3 constraints: T = Foo[Bar[S]], S: Foo and S: Bar - conv = A._conversion(A.qualifiedName + ".pimpA4") + // def convToEnrichedA: S // enrichA4: with 3 constraints: T = Foo[Bar[S]], S: Foo and S: Bar + conv = A._conversion(A.qualifiedName + ".enrichA4") assert(conv.members.length == 1) assert(conv.constraints.length == 3) - assert(conv._member("convToPimpedA").resultType.name == "S") + assert(conv._member("convToEnrichedA").resultType.name == "S") - // def convToPimpedA: Bar[Foo[T]] // pimpA5: no constraints - conv = A._conversion(A.qualifiedName + ".pimpA5") + // def convToEnrichedA: Bar[Foo[T]] // enrichA5: no constraints + conv = A._conversion(A.qualifiedName + ".enrichA5") assert(conv.members.length == 1) assert(conv.constraints.length == 0) - assert(isShadowed(conv._member("convToPimpedA"))) - assert(conv._member("convToPimpedA").resultType.name == "Bar[Foo[T]]") + assert(isShadowed(conv._member("convToEnrichedA"))) + assert(conv._member("convToEnrichedA").resultType.name == "Bar[Foo[T]]") - // def convToMyNumericA: T // pimpA6: with a constraint that there is x: MyNumeric[T] implicit in scope - conv = A._conversion(A.qualifiedName + ".pimpA6") + // def convToMyNumericA: T // enrichA6: with a constraint that there is x: MyNumeric[T] implicit in scope + conv = A._conversion(A.qualifiedName + ".enrichA6") assert(conv.members.length == 1) assert(conv.constraints.length == 1) assert(conv._member("convToMyNumericA").resultType.name == "T") - // def convToManifestA: T // pimpA7: with 2 constraints: T: Manifest and T <: Double - // def convToTraversableOps: T // pimpA7: with 2 constraints: T: Manifest and T <: Double + // def convToManifestA: T // enrichA7: with 2 constraints: T: Manifest and T <: Double + // def convToTraversableOps: T // enrichA7: with 2 constraints: T: Manifest and T <: Double // should not be abstract! - conv = A._conversion(A.qualifiedName + ".pimpA7") + conv = A._conversion(A.qualifiedName + ".enrichA7") assert(conv.members.length == 2) assert(conv.constraints.length == 2) assert(conv._member("convToManifestA").resultType.name == "T") @@ -84,45 +84,45 @@ object Test extends ScaladocModelTest { val B = base._class("B") // these conversions should not affect B - assert(B._conversions(A.qualifiedName + ".pimpA2").isEmpty) - assert(B._conversions(A.qualifiedName + ".pimpA4").isEmpty) + assert(B._conversions(A.qualifiedName + ".enrichA2").isEmpty) + assert(B._conversions(A.qualifiedName + ".enrichA4").isEmpty) - // def convToPimpedA(x: Double) // pimpA0: no constraints, SHADOWED - conv = B._conversion(A.qualifiedName + ".pimpA0") + // def convToEnrichedA(x: Double) // enrichA0: no constraints, SHADOWED + conv = B._conversion(A.qualifiedName + ".enrichA0") assert(conv.members.length == 1) assert(conv.constraints.length == 0) - assert(isShadowed(conv._member("convToPimpedA"))) - assert(conv._member("convToPimpedA").resultType.name == "Double") + assert(isShadowed(conv._member("convToEnrichedA"))) + assert(conv._member("convToEnrichedA").resultType.name == "Double") - // def convToNumericA: Double // pimpA1: no constraintsd - conv = B._conversion(A.qualifiedName + ".pimpA1") + // def convToNumericA: Double // enrichA1: no constraintsd + conv = B._conversion(A.qualifiedName + ".enrichA1") assert(conv.members.length == 1) assert(conv.constraints.length == 0) assert(conv._member("convToNumericA").resultType.name == "Double") - // def convToGtColonDoubleA: Double // pimpA3: no constraints - conv = B._conversion(A.qualifiedName + ".pimpA3") + // def convToGtColonDoubleA: Double // enrichA3: no constraints + conv = B._conversion(A.qualifiedName + ".enrichA3") assert(conv.members.length == 1) assert(conv.constraints.length == 0) assert(conv._member("convToGtColonDoubleA").resultType.name == "Double") - // def convToPimpedA: Bar[Foo[Double]] // pimpA5: no constraints - conv = B._conversion(A.qualifiedName + ".pimpA5") + // def convToEnrichedA: Bar[Foo[Double]] // enrichA5: no constraints + conv = B._conversion(A.qualifiedName + ".enrichA5") assert(conv.members.length == 1) assert(conv.constraints.length == 0) - assert(isShadowed(conv._member("convToPimpedA"))) - assert(conv._member("convToPimpedA").resultType.name == "Bar[Foo[Double]]") + assert(isShadowed(conv._member("convToEnrichedA"))) + assert(conv._member("convToEnrichedA").resultType.name == "Bar[Foo[Double]]") - // def convToMyNumericA: Double // pimpA6: (if showAll is set) with a constraint that there is x: MyNumeric[Double] implicit in scope - conv = B._conversion(A.qualifiedName + ".pimpA6") + // def convToMyNumericA: Double // enrichA6: (if showAll is set) with a constraint that there is x: MyNumeric[Double] implicit in scope + conv = B._conversion(A.qualifiedName + ".enrichA6") assert(conv.members.length == 1) assert(conv.constraints.length == 1) assert(conv._member("convToMyNumericA").resultType.name == "Double") - // def convToManifestA: Double // pimpA7: no constraints - // def convToTraversableOps: Double // pimpA7: no constraints + // def convToManifestA: Double // enrichA7: no constraints + // def convToTraversableOps: Double // enrichA7: no constraints // // should not be abstract! - conv = B._conversion(A.qualifiedName + ".pimpA7") + conv = B._conversion(A.qualifiedName + ".enrichA7") assert(conv.members.length == 2) assert(conv.constraints.length == 0) assert(conv._member("convToManifestA").resultType.name == "Double") @@ -134,38 +134,38 @@ object Test extends ScaladocModelTest { val C = base._class("C") // these conversions should not affect C - assert(C._conversions(A.qualifiedName + ".pimpA3").isEmpty) - assert(C._conversions(A.qualifiedName + ".pimpA4").isEmpty) - assert(C._conversions(A.qualifiedName + ".pimpA7").isEmpty) + assert(C._conversions(A.qualifiedName + ".enrichA3").isEmpty) + assert(C._conversions(A.qualifiedName + ".enrichA4").isEmpty) + assert(C._conversions(A.qualifiedName + ".enrichA7").isEmpty) - // def convToPimpedA(x: Int) // pimpA0: no constraints, SHADOWED - conv = C._conversion(A.qualifiedName + ".pimpA0") + // def convToEnrichedA(x: Int) // enrichA0: no constraints, SHADOWED + conv = C._conversion(A.qualifiedName + ".enrichA0") assert(conv.members.length == 1) assert(conv.constraints.length == 0) - assert(isShadowed(conv._member("convToPimpedA"))) - assert(conv._member("convToPimpedA").resultType.name == "Int") + assert(isShadowed(conv._member("convToEnrichedA"))) + assert(conv._member("convToEnrichedA").resultType.name == "Int") - // def convToNumericA: Int // pimpA1: no constraints - conv = C._conversion(A.qualifiedName + ".pimpA1") + // def convToNumericA: Int // enrichA1: no constraints + conv = C._conversion(A.qualifiedName + ".enrichA1") assert(conv.members.length == 1) assert(conv.constraints.length == 0) assert(conv._member("convToNumericA").resultType.name == "Int") - // def convToIntA: Int // pimpA2: no constraints - conv = C._conversion(A.qualifiedName + ".pimpA2") + // def convToIntA: Int // enrichA2: no constraints + conv = C._conversion(A.qualifiedName + ".enrichA2") assert(conv.members.length == 1) assert(conv.constraints.length == 0) assert(conv._member("convToIntA").resultType.name == "Int") - // def convToPimpedA: Bar[Foo[Int]] // pimpA5: no constraints - conv = C._conversion(A.qualifiedName + ".pimpA5") + // def convToEnrichedA: Bar[Foo[Int]] // enrichA5: no constraints + conv = C._conversion(A.qualifiedName + ".enrichA5") assert(conv.members.length == 1) assert(conv.constraints.length == 0) - assert(isShadowed(conv._member("convToPimpedA"))) - assert(conv._member("convToPimpedA").resultType.name == "Bar[Foo[Int]]") + assert(isShadowed(conv._member("convToEnrichedA"))) + assert(conv._member("convToEnrichedA").resultType.name == "Bar[Foo[Int]]") - // def convToMyNumericA: Int // pimpA6: (if showAll is set) with a constraint that there is x: MyNumeric[Int] implicit in scope - conv = C._conversion(A.qualifiedName + ".pimpA6") + // def convToMyNumericA: Int // enrichA6: (if showAll is set) with a constraint that there is x: MyNumeric[Int] implicit in scope + conv = C._conversion(A.qualifiedName + ".enrichA6") assert(conv.members.length == 1) assert(conv.constraints.length == 1) assert(conv._member("convToMyNumericA").resultType.name == "Int") @@ -175,33 +175,33 @@ object Test extends ScaladocModelTest { val D = base._class("D") // these conversions should not affect D - assert(D._conversions(A.qualifiedName + ".pimpA2").isEmpty) - assert(D._conversions(A.qualifiedName + ".pimpA3").isEmpty) - assert(D._conversions(A.qualifiedName + ".pimpA4").isEmpty) - assert(D._conversions(A.qualifiedName + ".pimpA7").isEmpty) + assert(D._conversions(A.qualifiedName + ".enrichA2").isEmpty) + assert(D._conversions(A.qualifiedName + ".enrichA3").isEmpty) + assert(D._conversions(A.qualifiedName + ".enrichA4").isEmpty) + assert(D._conversions(A.qualifiedName + ".enrichA7").isEmpty) - // def convToPimpedA(x: String) // pimpA0: no constraints, SHADOWED - conv = D._conversion(A.qualifiedName + ".pimpA0") + // def convToEnrichedA(x: String) // enrichA0: no constraints, SHADOWED + conv = D._conversion(A.qualifiedName + ".enrichA0") assert(conv.members.length == 1) assert(conv.constraints.length == 0) - assert(isShadowed(conv._member("convToPimpedA"))) - assert(conv._member("convToPimpedA").resultType.name == "String") + assert(isShadowed(conv._member("convToEnrichedA"))) + assert(conv._member("convToEnrichedA").resultType.name == "String") - // def convToNumericA: String // pimpA1: (if showAll is set) with a constraint that there is x: Numeric[String] implicit in scope - conv = D._conversion(A.qualifiedName + ".pimpA1") + // def convToNumericA: String // enrichA1: (if showAll is set) with a constraint that there is x: Numeric[String] implicit in scope + conv = D._conversion(A.qualifiedName + ".enrichA1") assert(conv.members.length == 1) assert(conv.constraints.length == 1) assert(conv._member("convToNumericA").resultType.name == "String") - // def convToPimpedA: Bar[Foo[String]] // pimpA5: no constraints - conv = D._conversion(A.qualifiedName + ".pimpA5") + // def convToEnrichedA: Bar[Foo[String]] // enrichA5: no constraints + conv = D._conversion(A.qualifiedName + ".enrichA5") assert(conv.members.length == 1) assert(conv.constraints.length == 0) - assert(isShadowed(conv._member("convToPimpedA"))) - assert(conv._member("convToPimpedA").resultType.name == "Bar[Foo[String]]") + assert(isShadowed(conv._member("convToEnrichedA"))) + assert(conv._member("convToEnrichedA").resultType.name == "Bar[Foo[String]]") - // def convToMyNumericA: String // pimpA6: (if showAll is set) with a constraint that there is x: MyNumeric[String] implicit in scope - conv = D._conversion(A.qualifiedName + ".pimpA6") + // def convToMyNumericA: String // enrichA6: (if showAll is set) with a constraint that there is x: MyNumeric[String] implicit in scope + conv = D._conversion(A.qualifiedName + ".enrichA6") assert(conv.members.length == 1) assert(conv.constraints.length == 1) assert(conv._member("convToMyNumericA").resultType.name == "String") -- cgit v1.2.3 From 8f1d4a5e5417915d31c4fc8cc22be2ac5925dcc9 Mon Sep 17 00:00:00 2001 From: Dan Hopkins Date: Sat, 19 Jan 2013 15:04:37 -0700 Subject: Grammatical fix --- src/library/scala/Predef.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) (limited to 'src') diff --git a/src/library/scala/Predef.scala b/src/library/scala/Predef.scala index ea1c0d546e..5557d80c25 100644 --- a/src/library/scala/Predef.scala +++ b/src/library/scala/Predef.scala @@ -271,7 +271,7 @@ object Predef extends LowPriorityImplicits { // reduces the chances of a user's writing `foo.__leftOfArrow` and // being confused why they get an ambiguous implicit conversion // error. (`foo.x` used to produce this error since both - // any2Ensuring and any2ArrowAssoc enrich an `x` onto everything) + // any2Ensuring and any2ArrowAssoc enrich everything with an `x`) @deprecated("Use `__leftOfArrow` instead", "2.10.0") def x = __leftOfArrow -- cgit v1.2.3 From a38629160637a3d3018fc0e486a27cf3b3d901f5 Mon Sep 17 00:00:00 2001 From: Simon Ochsenreither Date: Sun, 20 Jan 2013 19:30:33 +0100 Subject: SI-6811 Remove scala.xml.include.sax.Main --- src/library/scala/xml/include/sax/Main.scala | 82 ---------------------------- 1 file changed, 82 deletions(-) delete mode 100644 src/library/scala/xml/include/sax/Main.scala (limited to 'src') diff --git a/src/library/scala/xml/include/sax/Main.scala b/src/library/scala/xml/include/sax/Main.scala deleted file mode 100644 index 92d4d6ea73..0000000000 --- a/src/library/scala/xml/include/sax/Main.scala +++ /dev/null @@ -1,82 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - - -package scala.xml -package include.sax - -import scala.util.control.Exception.{ catching, ignoring } -import org.xml.sax.XMLReader -import org.xml.sax.helpers.XMLReaderFactory - -@deprecated("Code example will be moved to documentation.", "2.10.0") -object Main { - private val namespacePrefixes = "http://xml.org/sax/features/namespace-prefixes" - private val lexicalHandler = "http://xml.org/sax/properties/lexical-handler" - - /** - * The driver method for xinc - * Output is written to System.out via Conolse - *

    - * - * @param args contains the URLs and/or filenames - * of the documents to be processed. - */ - def main(args: Array[String]) { - def saxe[T](body: => T) = catching[T](classOf[SAXException]) opt body - def fail(msg: String) = System.err.println(msg) - - val parser: XMLReader = - saxe[XMLReader](XMLReaderFactory.createXMLReader()) getOrElse ( - saxe[XMLReader](XMLReaderFactory.createXMLReader(XercesClassName)) getOrElse ( - return fail("Could not find an XML parser") - ) - ) - - // Need better namespace handling - try parser.setFeature(namespacePrefixes, true) - catch { case e: SAXException => return System.err.println(e) } - - if (args.isEmpty) - return - - def dashR = args.size >= 2 && args(0) == "-r" - val args2 = if (dashR) args drop 2 else args - val resolver: Option[EntityResolver] = - if (dashR) None - else catching(classOf[Exception]) opt { - val r = Class.forName(args(1)).newInstance().asInstanceOf[EntityResolver] - parser setEntityResolver r - r - } orElse (return fail("Could not load requested EntityResolver")) - - for (arg <- args2) { - try { - val includer = new XIncludeFilter() - includer setParent parser - val s = new XIncluder(System.out, "UTF-8") - includer setContentHandler s - - resolver map (includer setEntityResolver _) - // SAXException here means will not support comments - ignoring(classOf[SAXException]) { - includer.setProperty(lexicalHandler, s) - s setFilter includer - } - includer parse arg - } - catch { - case e: SAXParseException => - fail(e.toString) - fail("Problem in %s at line %d".format(e.getSystemId, e.getLineNumber)) - case e: SAXException => - fail(e.toString) - } - } - } -} -- cgit v1.2.3 From a9c374b56fb418b62fc3dda57d5646ecdc6a5626 Mon Sep 17 00:00:00 2001 From: Simon Ochsenreither Date: Sun, 20 Jan 2013 20:08:12 +0100 Subject: SI-6811 Move scala.util.{automata,regexp} ... ... to scala.xml.dtd.impl and make it private[dtd] --- .../scala/util/automata/BaseBerrySethi.scala | 98 ------------- src/library/scala/util/automata/DetWordAutom.scala | 49 ------- src/library/scala/util/automata/Inclusion.scala | 69 --------- .../scala/util/automata/NondetWordAutom.scala | 59 -------- .../scala/util/automata/SubsetConstruction.scala | 107 -------------- .../scala/util/automata/WordBerrySethi.scala | 162 --------------------- src/library/scala/util/regexp/Base.scala | 66 --------- .../scala/util/regexp/PointedHedgeExp.scala | 36 ----- src/library/scala/util/regexp/SyntaxError.scala | 20 --- src/library/scala/util/regexp/WordExp.scala | 58 -------- src/library/scala/xml/dtd/ContentModel.scala | 3 +- src/library/scala/xml/dtd/DocType.scala | 6 +- src/library/scala/xml/dtd/ElementValidator.scala | 6 +- src/library/scala/xml/dtd/ExternalID.scala | 3 +- src/library/scala/xml/dtd/impl/Base.scala | 66 +++++++++ .../scala/xml/dtd/impl/BaseBerrySethi.scala | 97 ++++++++++++ src/library/scala/xml/dtd/impl/DetWordAutom.scala | 49 +++++++ src/library/scala/xml/dtd/impl/Inclusion.scala | 69 +++++++++ .../scala/xml/dtd/impl/NondetWordAutom.scala | 59 ++++++++ .../scala/xml/dtd/impl/PointedHedgeExp.scala | 36 +++++ .../scala/xml/dtd/impl/SubsetConstruction.scala | 107 ++++++++++++++ src/library/scala/xml/dtd/impl/SyntaxError.scala | 20 +++ .../scala/xml/dtd/impl/WordBerrySethi.scala | 161 ++++++++++++++++++++ src/library/scala/xml/dtd/impl/WordExp.scala | 58 ++++++++ test/files/pos/t0422.scala | 3 +- test/files/pos/t2698.scala | 3 +- test/files/pos/t422.scala | 17 --- 27 files changed, 733 insertions(+), 754 deletions(-) delete mode 100644 src/library/scala/util/automata/BaseBerrySethi.scala delete mode 100644 src/library/scala/util/automata/DetWordAutom.scala delete mode 100644 src/library/scala/util/automata/Inclusion.scala delete mode 100644 src/library/scala/util/automata/NondetWordAutom.scala delete mode 100644 src/library/scala/util/automata/SubsetConstruction.scala delete mode 100644 src/library/scala/util/automata/WordBerrySethi.scala delete mode 100644 src/library/scala/util/regexp/Base.scala delete mode 100644 src/library/scala/util/regexp/PointedHedgeExp.scala delete mode 100644 src/library/scala/util/regexp/SyntaxError.scala delete mode 100644 src/library/scala/util/regexp/WordExp.scala create mode 100644 src/library/scala/xml/dtd/impl/Base.scala create mode 100644 src/library/scala/xml/dtd/impl/BaseBerrySethi.scala create mode 100644 src/library/scala/xml/dtd/impl/DetWordAutom.scala create mode 100644 src/library/scala/xml/dtd/impl/Inclusion.scala create mode 100644 src/library/scala/xml/dtd/impl/NondetWordAutom.scala create mode 100644 src/library/scala/xml/dtd/impl/PointedHedgeExp.scala create mode 100644 src/library/scala/xml/dtd/impl/SubsetConstruction.scala create mode 100644 src/library/scala/xml/dtd/impl/SyntaxError.scala create mode 100644 src/library/scala/xml/dtd/impl/WordBerrySethi.scala create mode 100644 src/library/scala/xml/dtd/impl/WordExp.scala delete mode 100644 test/files/pos/t422.scala (limited to 'src') diff --git a/src/library/scala/util/automata/BaseBerrySethi.scala b/src/library/scala/util/automata/BaseBerrySethi.scala deleted file mode 100644 index 3f6f4507a9..0000000000 --- a/src/library/scala/util/automata/BaseBerrySethi.scala +++ /dev/null @@ -1,98 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala.util.automata - -import scala.util.regexp.{ Base } -import scala.collection.{ mutable, immutable } - -// todo: replace global variable pos with acc - -/** This class turns a regular expression over `A` into a - * [[scala.util.automata.NondetWordAutom]] over `A` using the celebrated - * position automata construction (also called ''Berry-Sethi'' or ''Glushkov''). - */ -@deprecated("This class will be removed", "2.10.0") -abstract class BaseBerrySethi { - val lang: Base - import lang.{ Alt, Eps, Meta, RegExp, Sequ, Star } - - protected var pos = 0 - - // results which hold all info for the NondetWordAutomaton - protected var follow: mutable.HashMap[Int, Set[Int]] = _ - - protected var finalTag: Int = _ - - protected var finals: immutable.Map[Int, Int] = _ // final states - - // constants -------------------------- - - final val emptySet: Set[Int] = Set() - - private def doComp(r: RegExp, compFunction: RegExp => Set[Int]) = r match { - case x: Alt => (x.rs map compFirst).foldLeft(emptySet)(_ ++ _) - case Eps => emptySet - case x: Meta => compFunction(x.r) - case x: Sequ => - val (l1, l2) = x.rs span (_.isNullable) - ((l1 ++ (l2 take 1)) map compFunction).foldLeft(emptySet)(_ ++ _) - case Star(t) => compFunction(t) - case _ => throw new IllegalArgumentException("unexpected pattern " + r.getClass) - } - - /** Computes `first(r)` for the word regexp `r`. */ - protected def compFirst(r: RegExp): Set[Int] = doComp(r, compFirst) - - /** Computes `last(r)` for the regexp `r`. */ - protected def compLast(r: RegExp): Set[Int] = doComp(r, compLast) - - /** Starts from the right-to-left - * precondition: pos is final - * pats are successor patterns of a Sequence node - */ - protected def compFollow(rs: Seq[RegExp]): Set[Int] = { - follow(0) = - if (rs.isEmpty) emptySet - else rs.foldRight(Set(pos))((p, fol) => { - val first = compFollow1(fol, p) - - if (p.isNullable) fol ++ first - else first - }) - - follow(0) - } - - /** Returns the first set of an expression, setting the follow set along the way. - */ - protected def compFollow1(fol1: Set[Int], r: RegExp): Set[Int] = r match { - case x: Alt => Set((x.rs reverseMap (compFollow1(fol1, _))).flatten: _*) - case x: Meta => compFollow1(fol1, x.r) - case x: Star => compFollow1(fol1 ++ compFirst(x.r), x.r) - case x: Sequ => - x.rs.foldRight(fol1) { (p, fol) => - val first = compFollow1(fol, p) - - if (p.isNullable) fol ++ first - else first - } - case _ => throw new IllegalArgumentException("unexpected pattern: " + r.getClass) - } - - /** Returns the "Sethi-length" of a pattern, creating the set of position along the way. - */ - protected def traverse(r: RegExp): Unit = r match { - // (is tree automaton stuff, more than Berry-Sethi) - case x: Alt => x.rs foreach traverse - case x: Sequ => x.rs foreach traverse - case x: Meta => traverse(x.r) - case Star(t) => traverse(t) - case _ => throw new IllegalArgumentException("unexp pattern " + r.getClass) - } -} diff --git a/src/library/scala/util/automata/DetWordAutom.scala b/src/library/scala/util/automata/DetWordAutom.scala deleted file mode 100644 index 5d709106f8..0000000000 --- a/src/library/scala/util/automata/DetWordAutom.scala +++ /dev/null @@ -1,49 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala.util.automata - -import scala.collection.{ mutable, immutable } - -/** A deterministic automaton. States are integers, where - * 0 is always the only initial state. Transitions are represented - * in the delta function. A default transitions is one that - * is taken when no other transition can be taken. - * All states are reachable. Accepting states are those for which - * the partial function 'finals' is defined. - * - * @author Burak Emir - * @version 1.0 - */ -@deprecated("This class will be removed", "2.10.0") -abstract class DetWordAutom[T <: AnyRef] { - val nstates: Int - val finals: Array[Int] - val delta: Array[mutable.Map[T, Int]] - val default: Array[Int] - - def isFinal(q: Int) = finals(q) != 0 - def isSink(q: Int) = delta(q).isEmpty && default(q) == q - def next(q: Int, label: T) = delta(q).getOrElse(label, default(q)) - - override def toString() = { - val sb = new StringBuilder("[DetWordAutom nstates=") - sb.append(nstates) - sb.append(" finals=") - val map = Map(finals.zipWithIndex map (_.swap): _*) - sb.append(map.toString()) - sb.append(" delta=\n") - - for (i <- 0 until nstates) { - sb append "%d->%s\n".format(i, delta(i)) - if (i < default.length) - sb append "_>%s\n".format(default(i)) - } - sb.toString - } -} diff --git a/src/library/scala/util/automata/Inclusion.scala b/src/library/scala/util/automata/Inclusion.scala deleted file mode 100644 index 91441bd3a8..0000000000 --- a/src/library/scala/util/automata/Inclusion.scala +++ /dev/null @@ -1,69 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - - - -package scala.util.automata - - -/** A fast test of language inclusion between minimal automata. - * inspired by the ''AMoRE automata library''. - * - * @author Burak Emir - * @version 1.0 - */ -@deprecated("This class will be removed", "2.10.0") -trait Inclusion[A <: AnyRef] { - - val labels: Seq[A] - - /** Returns true if `dfa1` is included in `dfa2`. - */ - def inclusion(dfa1: DetWordAutom[A], dfa2: DetWordAutom[A]) = { - - def encode(q1: Int, q2: Int) = 1 + q1 + q2 * dfa1.nstates - def decode2(c: Int) = (c-1) / (dfa1.nstates) //integer division - def decode1(c: Int) = (c-1) % (dfa1.nstates) - - var q1 = 0 //dfa1.initstate; // == 0 - var q2 = 0 //dfa2.initstate; // == 0 - - val max = 1 + dfa1.nstates * dfa2.nstates - val mark = new Array[Int](max) - - var result = true - var current = encode(q1, q2) - var last = current - mark(last) = max // mark (q1,q2) - while (current != 0 && result) { - //Console.println("current = [["+q1+" "+q2+"]] = "+current); - for (letter <- labels) { - val r1 = dfa1.next(q1,letter) - val r2 = dfa2.next(q2,letter) - if (dfa1.isFinal(r1) && !dfa2.isFinal(r2)) - result = false - val test = encode(r1, r2) - //Console.println("test = [["+r1+" "+r2+"]] = "+test); - if (mark(test) == 0) { - mark(last) = test - mark(test) = max - last = test - } - } - val ncurrent = mark(current) - if( ncurrent != max ) { - q1 = decode1(ncurrent) - q2 = decode2(ncurrent) - current = ncurrent - } else { - current = 0 - } - } - result - } -} diff --git a/src/library/scala/util/automata/NondetWordAutom.scala b/src/library/scala/util/automata/NondetWordAutom.scala deleted file mode 100644 index 24c6612d0f..0000000000 --- a/src/library/scala/util/automata/NondetWordAutom.scala +++ /dev/null @@ -1,59 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala.util.automata - -import scala.collection.{ immutable, mutable } - -/** A nondeterministic automaton. States are integers, where - * 0 is always the only initial state. Transitions are represented - * in the delta function. Default transitions are transitions that - * are taken when no other transitions can be applied. - * All states are reachable. Accepting states are those for which - * the partial function `finals` is defined. - */ -@deprecated("This class will be removed", "2.10.0") -abstract class NondetWordAutom[T <: AnyRef] { - val nstates: Int - val labels: Seq[T] - val finals: Array[Int] // 0 means not final - val delta: Array[mutable.Map[T, immutable.BitSet]] - val default: Array[immutable.BitSet] - - /** @return true if the state is final */ - final def isFinal(state: Int) = finals(state) > 0 - - /** @return tag of final state */ - final def finalTag(state: Int) = finals(state) - - /** @return true if the set of states contains at least one final state */ - final def containsFinal(Q: immutable.BitSet): Boolean = Q exists isFinal - - /** @return true if there are no accepting states */ - final def isEmpty = (0 until nstates) forall (x => !isFinal(x)) - - /** @return a immutable.BitSet with the next states for given state and label */ - def next(q: Int, a: T): immutable.BitSet = delta(q).getOrElse(a, default(q)) - - /** @return a immutable.BitSet with the next states for given state and label */ - def next(Q: immutable.BitSet, a: T): immutable.BitSet = next(Q, next(_, a)) - def nextDefault(Q: immutable.BitSet): immutable.BitSet = next(Q, default) - - private def next(Q: immutable.BitSet, f: (Int) => immutable.BitSet): immutable.BitSet = - (Q map f).foldLeft(immutable.BitSet.empty)(_ ++ _) - - private def finalStates = 0 until nstates filter isFinal - override def toString = { - - val finalString = Map(finalStates map (j => j -> finals(j)) : _*).toString - val deltaString = (0 until nstates) - .map(i => " %d->%s\n _>%s\n".format(i, delta(i), default(i))).mkString - - "[NondetWordAutom nstates=%d finals=%s delta=\n%s".format(nstates, finalString, deltaString) - } -} diff --git a/src/library/scala/util/automata/SubsetConstruction.scala b/src/library/scala/util/automata/SubsetConstruction.scala deleted file mode 100644 index 0ee768587c..0000000000 --- a/src/library/scala/util/automata/SubsetConstruction.scala +++ /dev/null @@ -1,107 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala.util.automata - -import scala.collection.{ mutable, immutable } - -@deprecated("This class will be removed", "2.10.0") -class SubsetConstruction[T <: AnyRef](val nfa: NondetWordAutom[T]) { - import nfa.labels - - def selectTag(Q: immutable.BitSet, finals: Array[Int]) = - (Q map finals filter (_ > 0)).min - - def determinize: DetWordAutom[T] = { - // for assigning numbers to bitsets - var indexMap = scala.collection.Map[immutable.BitSet, Int]() - var invIndexMap = scala.collection.Map[Int, immutable.BitSet]() - var ix = 0 - - // we compute the dfa with states = bitsets - val q0 = immutable.BitSet(0) // the set { 0 } - val sink = immutable.BitSet.empty // the set { } - - var states = Set(q0, sink) // initial set of sets - val delta = new mutable.HashMap[immutable.BitSet, mutable.HashMap[T, immutable.BitSet]] - var deftrans = mutable.Map(q0 -> sink, sink -> sink) // initial transitions - var finals: mutable.Map[immutable.BitSet, Int] = mutable.Map() - val rest = new mutable.Stack[immutable.BitSet] - - rest.push(sink, q0) - - def addFinal(q: immutable.BitSet) { - if (nfa containsFinal q) - finals = finals.updated(q, selectTag(q, nfa.finals)) - } - def add(Q: immutable.BitSet) { - if (!states(Q)) { - states += Q - rest push Q - addFinal(Q) - } - } - - addFinal(q0) // initial state may also be a final state - - while (!rest.isEmpty) { - val P = rest.pop - // assign a number to this bitset - indexMap = indexMap.updated(P, ix) - invIndexMap = invIndexMap.updated(ix, P) - ix += 1 - - // make transition map - val Pdelta = new mutable.HashMap[T, immutable.BitSet] - delta.update(P, Pdelta) - - labels foreach { label => - val Q = nfa.next(P, label) - Pdelta.update(label, Q) - add(Q) - } - - // collect default transitions - val Pdef = nfa nextDefault P - deftrans = deftrans.updated(P, Pdef) - add(Pdef) - } - - // create DetWordAutom, using indices instead of sets - val nstatesR = states.size - val deltaR = new Array[mutable.Map[T, Int]](nstatesR) - val defaultR = new Array[Int](nstatesR) - val finalsR = new Array[Int](nstatesR) - - for (Q <- states) { - val q = indexMap(Q) - val trans = delta(Q) - val transDef = deftrans(Q) - val qDef = indexMap(transDef) - val ntrans = new mutable.HashMap[T, Int]() - - for ((label, value) <- trans) { - val p = indexMap(value) - if (p != qDef) - ntrans.update(label, p) - } - - deltaR(q) = ntrans - defaultR(q) = qDef - } - - finals foreach { case (k,v) => finalsR(indexMap(k)) = v } - - new DetWordAutom [T] { - val nstates = nstatesR - val delta = deltaR - val default = defaultR - val finals = finalsR - } - } -} diff --git a/src/library/scala/util/automata/WordBerrySethi.scala b/src/library/scala/util/automata/WordBerrySethi.scala deleted file mode 100644 index 2f4625da44..0000000000 --- a/src/library/scala/util/automata/WordBerrySethi.scala +++ /dev/null @@ -1,162 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala.util.automata - -import scala.collection.{ immutable, mutable } -import scala.util.regexp.WordExp - -/** This class turns a regular expression into a [[scala.util.automata.NondetWordAutom]] - * celebrated position automata construction (also called ''Berry-Sethi'' or ''Glushkov''). - * - * @author Burak Emir - * @version 1.0 - */ -@deprecated("This class will be removed", "2.10.0") -abstract class WordBerrySethi extends BaseBerrySethi { - override val lang: WordExp - - import lang.{ Alt, Eps, Letter, RegExp, Sequ, Star, _labelT } - - protected var labels: mutable.HashSet[_labelT] = _ - // don't let this fool you, only labelAt is a real, surjective mapping - protected var labelAt: Map[Int, _labelT] = _ // new alphabet "gamma" - protected var deltaq: Array[mutable.HashMap[_labelT, List[Int]]] = _ // delta - protected var defaultq: Array[List[Int]] = _ // default transitions - protected var initials: Set[Int] = _ - - /** Computes `first(r)` where the word regexp `r`. - * - * @param r the regular expression - * @return the computed set `first(r)` - */ - protected override def compFirst(r: RegExp): Set[Int] = r match { - case x: Letter => Set(x.pos) - case _ => super.compFirst(r) - } - - /** Computes `last(r)` where the word regexp `r`. - * - * @param r the regular expression - * @return the computed set `last(r)` - */ - protected override def compLast(r: RegExp): Set[Int] = r match { - case x: Letter => Set(x.pos) - case _ => super.compLast(r) - } - - /** Returns the first set of an expression, setting the follow set along - * the way. - * - * @param r the regular expression - * @return the computed set - */ - protected override def compFollow1(fol1: Set[Int], r: RegExp): Set[Int] = r match { - case x: Letter => follow(x.pos) = fol1 ; Set(x.pos) - case Eps => emptySet - case _ => super.compFollow1(fol1, r) - } - - /** Returns "Sethi-length" of a pattern, creating the set of position - * along the way - */ - - /** Called at the leaves of the regexp */ - protected def seenLabel(r: RegExp, i: Int, label: _labelT) { - labelAt = labelAt.updated(i, label) - this.labels += label - } - - // overridden in BindingBerrySethi - protected def seenLabel(r: RegExp, label: _labelT): Int = { - pos += 1 - seenLabel(r, pos, label) - pos - } - - // todo: replace global variable pos with acc - override def traverse(r: RegExp): Unit = r match { - case a @ Letter(label) => a.pos = seenLabel(r, label) - case Eps => // ignore - case _ => super.traverse(r) - } - - - protected def makeTransition(src: Int, dest: Int, label: _labelT) { - val q = deltaq(src) - q.update(label, dest :: q.getOrElse(label, Nil)) - } - - protected def initialize(subexpr: Seq[RegExp]): Unit = { - this.labelAt = immutable.Map() - this.follow = mutable.HashMap() - this.labels = mutable.HashSet() - this.pos = 0 - - // determine "Sethi-length" of the regexp - subexpr foreach traverse - - this.initials = Set(0) - } - - protected def initializeAutom() { - finals = immutable.Map.empty[Int, Int] // final states - deltaq = new Array[mutable.HashMap[_labelT, List[Int]]](pos) // delta - defaultq = new Array[List[Int]](pos) // default transitions - - for (j <- 0 until pos) { - deltaq(j) = mutable.HashMap[_labelT, List[Int]]() - defaultq(j) = Nil - } - } - - protected def collectTransitions(): Unit = // make transitions - for (j <- 0 until pos ; fol = follow(j) ; k <- fol) { - if (pos == k) finals = finals.updated(j, finalTag) - else makeTransition(j, k, labelAt(k)) - } - - def automatonFrom(pat: RegExp, finalTag: Int): NondetWordAutom[_labelT] = { - this.finalTag = finalTag - - pat match { - case x: Sequ => - // (1,2) compute follow + first - initialize(x.rs) - pos += 1 - compFollow(x.rs) // this used to be assigned to var globalFirst and then never used. - - // (3) make automaton from follow sets - initializeAutom() - collectTransitions() - - if (x.isNullable) // initial state is final - finals = finals.updated(0, finalTag) - - val delta1 = immutable.Map(deltaq.zipWithIndex map (_.swap): _*) - val finalsArr = (0 until pos map (k => finals.getOrElse(k, 0))).toArray // 0 == not final - - val deltaArr: Array[mutable.Map[_labelT, immutable.BitSet]] = - (0 until pos map { x => - mutable.HashMap(delta1(x).toSeq map { case (k, v) => k -> immutable.BitSet(v: _*) } : _*) - }).toArray - - val defaultArr = (0 until pos map (k => immutable.BitSet(defaultq(k): _*))).toArray - - new NondetWordAutom[_labelT] { - val nstates = pos - val labels = WordBerrySethi.this.labels.toList - val finals = finalsArr - val delta = deltaArr - val default = defaultArr - } - case z => - automatonFrom(Sequ(z.asInstanceOf[this.lang._regexpT]), finalTag) - } - } -} diff --git a/src/library/scala/util/regexp/Base.scala b/src/library/scala/util/regexp/Base.scala deleted file mode 100644 index 7dbe60a34e..0000000000 --- a/src/library/scala/util/regexp/Base.scala +++ /dev/null @@ -1,66 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - - - -package scala.util.regexp - -/** Basic regular expressions. - * - * @author Burak Emir - * @version 1.0 - */ - -@deprecated("This class will be removed", "2.10.0") -abstract class Base { - type _regexpT <: RegExp - - abstract class RegExp { - val isNullable: Boolean - } - - object Alt { - /** `Alt( R,R,R* )`. */ - def apply(rs: _regexpT*) = - if (rs.size < 2) throw new SyntaxError("need at least 2 branches in Alt") - else new Alt(rs: _*) - // Can't enforce that statically without changing the interface - // def apply(r1: _regexpT, r2: _regexpT, rs: _regexpT*) = new Alt(Seq(r1, r2) ++ rs: _*) - def unapplySeq(x: Alt) = Some(x.rs) - } - - class Alt private (val rs: _regexpT*) extends RegExp { - final val isNullable = rs exists (_.isNullable) - } - - object Sequ { - /** Sequ( R,R* ) */ - def apply(rs: _regexpT*) = if (rs.isEmpty) Eps else new Sequ(rs: _*) - def unapplySeq(x: Sequ) = Some(x.rs) - } - - class Sequ private (val rs: _regexpT*) extends RegExp { - final val isNullable = rs forall (_.isNullable) - } - - case class Star(r: _regexpT) extends RegExp { - final lazy val isNullable = true - } - - // The empty Sequ. - case object Eps extends RegExp { - final lazy val isNullable = true - override def toString() = "Eps" - } - - /** this class can be used to add meta information to regexps. */ - class Meta(r1: _regexpT) extends RegExp { - final val isNullable = r1.isNullable - def r = r1 - } -} diff --git a/src/library/scala/util/regexp/PointedHedgeExp.scala b/src/library/scala/util/regexp/PointedHedgeExp.scala deleted file mode 100644 index 5c0379b6f8..0000000000 --- a/src/library/scala/util/regexp/PointedHedgeExp.scala +++ /dev/null @@ -1,36 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - - - -package scala.util.regexp - -/** Pointed regular hedge expressions, a useful subclass of regular hedge expressions. - * - * @author Burak Emir - * @version 1.0 - */ -@deprecated("This class will be removed", "2.10.0") -abstract class PointedHedgeExp extends Base { - - type _regexpT <: RegExp - type _labelT - - case class Node(label: _labelT, r: _regexpT) extends RegExp { - final val isNullable = false - } - - case class TopIter(r1: _regexpT, r2: _regexpT) extends RegExp { - final val isNullable = r1.isNullable && r2.isNullable //? - } - - case object Point extends RegExp { - final val isNullable = false - } - -} diff --git a/src/library/scala/util/regexp/SyntaxError.scala b/src/library/scala/util/regexp/SyntaxError.scala deleted file mode 100644 index 1788fdfb84..0000000000 --- a/src/library/scala/util/regexp/SyntaxError.scala +++ /dev/null @@ -1,20 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - - - -package scala.util.regexp - -/** This runtime exception is thrown if an attempt to instantiate a - * syntactically incorrect expression is detected. - * - * @author Burak Emir - * @version 1.0 - */ -@deprecated("This class will be removed", "2.10.0") -class SyntaxError(e: String) extends RuntimeException(e) diff --git a/src/library/scala/util/regexp/WordExp.scala b/src/library/scala/util/regexp/WordExp.scala deleted file mode 100644 index 3c0c2ec156..0000000000 --- a/src/library/scala/util/regexp/WordExp.scala +++ /dev/null @@ -1,58 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - - - -package scala.util.regexp - -/** - * The class `WordExp` provides regular word expressions. - * - * Users have to instantiate type member `_regexpT <;: RegExp` - * (from class `Base`) and a type member `_labelT <;: Label`. - * - * Here is a short example: - * {{{ - * import scala.util.regexp._ - * import scala.util.automata._ - * object MyLang extends WordExp { - * type _regexpT = RegExp - * type _labelT = MyChar - * - * case class MyChar(c:Char) extends Label - * } - * import MyLang._ - * // (a* | b)* - * val rex = Star(Alt(Star(Letter(MyChar('a'))),Letter(MyChar('b')))) - * object MyBerriSethi extends WordBerrySethi { - * override val lang = MyLang - * } - * val nfa = MyBerriSethi.automatonFrom(Sequ(rex), 1) - * }}} - * - * @author Burak Emir - * @version 1.0 - */ -@deprecated("This class will be removed", "2.10.0") -abstract class WordExp extends Base { - - abstract class Label - - type _regexpT <: RegExp - type _labelT <: Label - - case class Letter(a: _labelT) extends RegExp { - final lazy val isNullable = false - var pos = -1 - } - - case class Wildcard() extends RegExp { - final lazy val isNullable = false - var pos = -1 - } -} diff --git a/src/library/scala/xml/dtd/ContentModel.scala b/src/library/scala/xml/dtd/ContentModel.scala index abc71f55bd..debdf37975 100644 --- a/src/library/scala/xml/dtd/ContentModel.scala +++ b/src/library/scala/xml/dtd/ContentModel.scala @@ -11,8 +11,7 @@ package scala.xml package dtd -import scala.util.regexp.WordExp -import scala.util.automata._ +import scala.xml.dtd.impl._ import scala.xml.Utility.sbToString import PartialFunction._ diff --git a/src/library/scala/xml/dtd/DocType.scala b/src/library/scala/xml/dtd/DocType.scala index ce067bee79..b2510baa18 100644 --- a/src/library/scala/xml/dtd/DocType.scala +++ b/src/library/scala/xml/dtd/DocType.scala @@ -18,8 +18,7 @@ package dtd * @param extID NoExternalID or the external ID of this doctype * @param intSubset sequence of internal subset declarations */ -case class DocType(name: String, extID: ExternalID, intSubset: Seq[dtd.Decl]) -{ +case class DocType(name: String, extID: ExternalID, intSubset: Seq[dtd.Decl]) { if (!Utility.isName(name)) throw new IllegalArgumentException(name+" must be an XML Name") @@ -33,8 +32,7 @@ case class DocType(name: String, extID: ExternalID, intSubset: Seq[dtd.Decl]) } } -object DocType -{ +object DocType { /** Creates a doctype with no external id, nor internal subset declarations. */ def apply(name: String): DocType = apply(name, NoExternalID, Nil) } diff --git a/src/library/scala/xml/dtd/ElementValidator.scala b/src/library/scala/xml/dtd/ElementValidator.scala index 66951bf390..e73e209daa 100644 --- a/src/library/scala/xml/dtd/ElementValidator.scala +++ b/src/library/scala/xml/dtd/ElementValidator.scala @@ -12,10 +12,12 @@ package scala.xml package dtd import PartialFunction._ +import scala.collection.mutable + import ContentModel.ElemName import MakeValidationException._ // @todo other exceptions -import scala.util.automata._ -import scala.collection.mutable + +import impl._ /** validate children and/or attributes of an element * exceptions are created but not thrown. diff --git a/src/library/scala/xml/dtd/ExternalID.scala b/src/library/scala/xml/dtd/ExternalID.scala index e346f89d0a..80ada0caaa 100644 --- a/src/library/scala/xml/dtd/ExternalID.scala +++ b/src/library/scala/xml/dtd/ExternalID.scala @@ -14,8 +14,7 @@ package dtd * * @author Burak Emir */ -abstract class ExternalID extends parsing.TokenTests -{ +abstract class ExternalID extends parsing.TokenTests { def quoted(s: String) = { val c = if (s contains '"') '\'' else '"' c + s + c diff --git a/src/library/scala/xml/dtd/impl/Base.scala b/src/library/scala/xml/dtd/impl/Base.scala new file mode 100644 index 0000000000..dd277779f6 --- /dev/null +++ b/src/library/scala/xml/dtd/impl/Base.scala @@ -0,0 +1,66 @@ +/* __ *\ +** ________ ___ / / ___ Scala API ** +** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** +** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** +** /____/\___/_/ |_/____/_/ | | ** +** |/ ** +\* */ + + + +package scala.xml.dtd.impl + +/** Basic regular expressions. + * + * @author Burak Emir + * @version 1.0 + */ + +@deprecated("This class will be removed", "2.10.0") +private[dtd] abstract class Base { + type _regexpT <: RegExp + + abstract class RegExp { + val isNullable: Boolean + } + + object Alt { + /** `Alt( R,R,R* )`. */ + def apply(rs: _regexpT*) = + if (rs.size < 2) throw new SyntaxError("need at least 2 branches in Alt") + else new Alt(rs: _*) + // Can't enforce that statically without changing the interface + // def apply(r1: _regexpT, r2: _regexpT, rs: _regexpT*) = new Alt(Seq(r1, r2) ++ rs: _*) + def unapplySeq(x: Alt) = Some(x.rs) + } + + class Alt private (val rs: _regexpT*) extends RegExp { + final val isNullable = rs exists (_.isNullable) + } + + object Sequ { + /** Sequ( R,R* ) */ + def apply(rs: _regexpT*) = if (rs.isEmpty) Eps else new Sequ(rs: _*) + def unapplySeq(x: Sequ) = Some(x.rs) + } + + class Sequ private (val rs: _regexpT*) extends RegExp { + final val isNullable = rs forall (_.isNullable) + } + + case class Star(r: _regexpT) extends RegExp { + final lazy val isNullable = true + } + + // The empty Sequ. + case object Eps extends RegExp { + final lazy val isNullable = true + override def toString() = "Eps" + } + + /** this class can be used to add meta information to regexps. */ + class Meta(r1: _regexpT) extends RegExp { + final val isNullable = r1.isNullable + def r = r1 + } +} diff --git a/src/library/scala/xml/dtd/impl/BaseBerrySethi.scala b/src/library/scala/xml/dtd/impl/BaseBerrySethi.scala new file mode 100644 index 0000000000..99d5ab62e1 --- /dev/null +++ b/src/library/scala/xml/dtd/impl/BaseBerrySethi.scala @@ -0,0 +1,97 @@ +/* __ *\ +** ________ ___ / / ___ Scala API ** +** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** +** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** +** /____/\___/_/ |_/____/_/ | | ** +** |/ ** +\* */ + +package scala.xml.dtd.impl + +import scala.collection.{ mutable, immutable } + +// todo: replace global variable pos with acc + +/** This class turns a regular expression over `A` into a + * [[scala.util.automata.NondetWordAutom]] over `A` using the celebrated + * position automata construction (also called ''Berry-Sethi'' or ''Glushkov''). + */ +@deprecated("This class will be removed", "2.10.0") +private[dtd] abstract class BaseBerrySethi { + val lang: Base + import lang.{ Alt, Eps, Meta, RegExp, Sequ, Star } + + protected var pos = 0 + + // results which hold all info for the NondetWordAutomaton + protected var follow: mutable.HashMap[Int, Set[Int]] = _ + + protected var finalTag: Int = _ + + protected var finals: immutable.Map[Int, Int] = _ // final states + + // constants -------------------------- + + final val emptySet: Set[Int] = Set() + + private def doComp(r: RegExp, compFunction: RegExp => Set[Int]) = r match { + case x: Alt => (x.rs map compFirst).foldLeft(emptySet)(_ ++ _) + case Eps => emptySet + case x: Meta => compFunction(x.r) + case x: Sequ => + val (l1, l2) = x.rs span (_.isNullable) + ((l1 ++ (l2 take 1)) map compFunction).foldLeft(emptySet)(_ ++ _) + case Star(t) => compFunction(t) + case _ => throw new IllegalArgumentException("unexpected pattern " + r.getClass) + } + + /** Computes `first(r)` for the word regexp `r`. */ + protected def compFirst(r: RegExp): Set[Int] = doComp(r, compFirst) + + /** Computes `last(r)` for the regexp `r`. */ + protected def compLast(r: RegExp): Set[Int] = doComp(r, compLast) + + /** Starts from the right-to-left + * precondition: pos is final + * pats are successor patterns of a Sequence node + */ + protected def compFollow(rs: Seq[RegExp]): Set[Int] = { + follow(0) = + if (rs.isEmpty) emptySet + else rs.foldRight(Set(pos))((p, fol) => { + val first = compFollow1(fol, p) + + if (p.isNullable) fol ++ first + else first + }) + + follow(0) + } + + /** Returns the first set of an expression, setting the follow set along the way. + */ + protected def compFollow1(fol1: Set[Int], r: RegExp): Set[Int] = r match { + case x: Alt => Set((x.rs reverseMap (compFollow1(fol1, _))).flatten: _*) + case x: Meta => compFollow1(fol1, x.r) + case x: Star => compFollow1(fol1 ++ compFirst(x.r), x.r) + case x: Sequ => + x.rs.foldRight(fol1) { (p, fol) => + val first = compFollow1(fol, p) + + if (p.isNullable) fol ++ first + else first + } + case _ => throw new IllegalArgumentException("unexpected pattern: " + r.getClass) + } + + /** Returns the "Sethi-length" of a pattern, creating the set of position along the way. + */ + protected def traverse(r: RegExp): Unit = r match { + // (is tree automaton stuff, more than Berry-Sethi) + case x: Alt => x.rs foreach traverse + case x: Sequ => x.rs foreach traverse + case x: Meta => traverse(x.r) + case Star(t) => traverse(t) + case _ => throw new IllegalArgumentException("unexp pattern " + r.getClass) + } +} diff --git a/src/library/scala/xml/dtd/impl/DetWordAutom.scala b/src/library/scala/xml/dtd/impl/DetWordAutom.scala new file mode 100644 index 0000000000..5c1dcb7ff8 --- /dev/null +++ b/src/library/scala/xml/dtd/impl/DetWordAutom.scala @@ -0,0 +1,49 @@ +/* __ *\ +** ________ ___ / / ___ Scala API ** +** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** +** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** +** /____/\___/_/ |_/____/_/ | | ** +** |/ ** +\* */ + +package scala.xml.dtd.impl + +import scala.collection.{ mutable, immutable } + +/** A deterministic automaton. States are integers, where + * 0 is always the only initial state. Transitions are represented + * in the delta function. A default transitions is one that + * is taken when no other transition can be taken. + * All states are reachable. Accepting states are those for which + * the partial function 'finals' is defined. + * + * @author Burak Emir + * @version 1.0 + */ +@deprecated("This class will be removed", "2.10.0") +private[dtd] abstract class DetWordAutom[T <: AnyRef] { + val nstates: Int + val finals: Array[Int] + val delta: Array[mutable.Map[T, Int]] + val default: Array[Int] + + def isFinal(q: Int) = finals(q) != 0 + def isSink(q: Int) = delta(q).isEmpty && default(q) == q + def next(q: Int, label: T) = delta(q).getOrElse(label, default(q)) + + override def toString() = { + val sb = new StringBuilder("[DetWordAutom nstates=") + sb.append(nstates) + sb.append(" finals=") + val map = Map(finals.zipWithIndex map (_.swap): _*) + sb.append(map.toString()) + sb.append(" delta=\n") + + for (i <- 0 until nstates) { + sb append "%d->%s\n".format(i, delta(i)) + if (i < default.length) + sb append "_>%s\n".format(default(i)) + } + sb.toString + } +} diff --git a/src/library/scala/xml/dtd/impl/Inclusion.scala b/src/library/scala/xml/dtd/impl/Inclusion.scala new file mode 100644 index 0000000000..0ae78519ca --- /dev/null +++ b/src/library/scala/xml/dtd/impl/Inclusion.scala @@ -0,0 +1,69 @@ +/* __ *\ +** ________ ___ / / ___ Scala API ** +** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** +** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** +** /____/\___/_/ |_/____/_/ | | ** +** |/ ** +\* */ + + + +package scala.xml.dtd.impl + + +/** A fast test of language inclusion between minimal automata. + * inspired by the ''AMoRE automata library''. + * + * @author Burak Emir + * @version 1.0 + */ +@deprecated("This class will be removed", "2.10.0") +private[dtd] trait Inclusion[A <: AnyRef] { + + val labels: Seq[A] + + /** Returns true if `dfa1` is included in `dfa2`. + */ + def inclusion(dfa1: DetWordAutom[A], dfa2: DetWordAutom[A]) = { + + def encode(q1: Int, q2: Int) = 1 + q1 + q2 * dfa1.nstates + def decode2(c: Int) = (c-1) / (dfa1.nstates) //integer division + def decode1(c: Int) = (c-1) % (dfa1.nstates) + + var q1 = 0 //dfa1.initstate; // == 0 + var q2 = 0 //dfa2.initstate; // == 0 + + val max = 1 + dfa1.nstates * dfa2.nstates + val mark = new Array[Int](max) + + var result = true + var current = encode(q1, q2) + var last = current + mark(last) = max // mark (q1,q2) + while (current != 0 && result) { + //Console.println("current = [["+q1+" "+q2+"]] = "+current); + for (letter <- labels) { + val r1 = dfa1.next(q1,letter) + val r2 = dfa2.next(q2,letter) + if (dfa1.isFinal(r1) && !dfa2.isFinal(r2)) + result = false + val test = encode(r1, r2) + //Console.println("test = [["+r1+" "+r2+"]] = "+test); + if (mark(test) == 0) { + mark(last) = test + mark(test) = max + last = test + } + } + val ncurrent = mark(current) + if( ncurrent != max ) { + q1 = decode1(ncurrent) + q2 = decode2(ncurrent) + current = ncurrent + } else { + current = 0 + } + } + result + } +} diff --git a/src/library/scala/xml/dtd/impl/NondetWordAutom.scala b/src/library/scala/xml/dtd/impl/NondetWordAutom.scala new file mode 100644 index 0000000000..ddb994c4a3 --- /dev/null +++ b/src/library/scala/xml/dtd/impl/NondetWordAutom.scala @@ -0,0 +1,59 @@ +/* __ *\ +** ________ ___ / / ___ Scala API ** +** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** +** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** +** /____/\___/_/ |_/____/_/ | | ** +** |/ ** +\* */ + +package scala.xml.dtd.impl + +import scala.collection.{ immutable, mutable } + +/** A nondeterministic automaton. States are integers, where + * 0 is always the only initial state. Transitions are represented + * in the delta function. Default transitions are transitions that + * are taken when no other transitions can be applied. + * All states are reachable. Accepting states are those for which + * the partial function `finals` is defined. + */ +@deprecated("This class will be removed", "2.10.0") +private[dtd] abstract class NondetWordAutom[T <: AnyRef] { + val nstates: Int + val labels: Seq[T] + val finals: Array[Int] // 0 means not final + val delta: Array[mutable.Map[T, immutable.BitSet]] + val default: Array[immutable.BitSet] + + /** @return true if the state is final */ + final def isFinal(state: Int) = finals(state) > 0 + + /** @return tag of final state */ + final def finalTag(state: Int) = finals(state) + + /** @return true if the set of states contains at least one final state */ + final def containsFinal(Q: immutable.BitSet): Boolean = Q exists isFinal + + /** @return true if there are no accepting states */ + final def isEmpty = (0 until nstates) forall (x => !isFinal(x)) + + /** @return a immutable.BitSet with the next states for given state and label */ + def next(q: Int, a: T): immutable.BitSet = delta(q).getOrElse(a, default(q)) + + /** @return a immutable.BitSet with the next states for given state and label */ + def next(Q: immutable.BitSet, a: T): immutable.BitSet = next(Q, next(_, a)) + def nextDefault(Q: immutable.BitSet): immutable.BitSet = next(Q, default) + + private def next(Q: immutable.BitSet, f: (Int) => immutable.BitSet): immutable.BitSet = + (Q map f).foldLeft(immutable.BitSet.empty)(_ ++ _) + + private def finalStates = 0 until nstates filter isFinal + override def toString = { + + val finalString = Map(finalStates map (j => j -> finals(j)) : _*).toString + val deltaString = (0 until nstates) + .map(i => " %d->%s\n _>%s\n".format(i, delta(i), default(i))).mkString + + "[NondetWordAutom nstates=%d finals=%s delta=\n%s".format(nstates, finalString, deltaString) + } +} diff --git a/src/library/scala/xml/dtd/impl/PointedHedgeExp.scala b/src/library/scala/xml/dtd/impl/PointedHedgeExp.scala new file mode 100644 index 0000000000..0b5297510d --- /dev/null +++ b/src/library/scala/xml/dtd/impl/PointedHedgeExp.scala @@ -0,0 +1,36 @@ +/* __ *\ +** ________ ___ / / ___ Scala API ** +** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** +** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** +** /____/\___/_/ |_/____/_/ | | ** +** |/ ** +\* */ + + + +package scala.xml.dtd.impl + +/** Pointed regular hedge expressions, a useful subclass of regular hedge expressions. + * + * @author Burak Emir + * @version 1.0 + */ +@deprecated("This class will be removed", "2.10.0") +private[dtd] abstract class PointedHedgeExp extends Base { + + type _regexpT <: RegExp + type _labelT + + case class Node(label: _labelT, r: _regexpT) extends RegExp { + final val isNullable = false + } + + case class TopIter(r1: _regexpT, r2: _regexpT) extends RegExp { + final val isNullable = r1.isNullable && r2.isNullable //? + } + + case object Point extends RegExp { + final val isNullable = false + } + +} diff --git a/src/library/scala/xml/dtd/impl/SubsetConstruction.scala b/src/library/scala/xml/dtd/impl/SubsetConstruction.scala new file mode 100644 index 0000000000..8e4b5cc0f0 --- /dev/null +++ b/src/library/scala/xml/dtd/impl/SubsetConstruction.scala @@ -0,0 +1,107 @@ +/* __ *\ +** ________ ___ / / ___ Scala API ** +** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** +** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** +** /____/\___/_/ |_/____/_/ | | ** +** |/ ** +\* */ + +package scala.xml.dtd.impl + +import scala.collection.{ mutable, immutable } + +@deprecated("This class will be removed", "2.10.0") +private[dtd] class SubsetConstruction[T <: AnyRef](val nfa: NondetWordAutom[T]) { + import nfa.labels + + def selectTag(Q: immutable.BitSet, finals: Array[Int]) = + (Q map finals filter (_ > 0)).min + + def determinize: DetWordAutom[T] = { + // for assigning numbers to bitsets + var indexMap = scala.collection.Map[immutable.BitSet, Int]() + var invIndexMap = scala.collection.Map[Int, immutable.BitSet]() + var ix = 0 + + // we compute the dfa with states = bitsets + val q0 = immutable.BitSet(0) // the set { 0 } + val sink = immutable.BitSet.empty // the set { } + + var states = Set(q0, sink) // initial set of sets + val delta = new mutable.HashMap[immutable.BitSet, mutable.HashMap[T, immutable.BitSet]] + var deftrans = mutable.Map(q0 -> sink, sink -> sink) // initial transitions + var finals: mutable.Map[immutable.BitSet, Int] = mutable.Map() + val rest = new mutable.Stack[immutable.BitSet] + + rest.push(sink, q0) + + def addFinal(q: immutable.BitSet) { + if (nfa containsFinal q) + finals = finals.updated(q, selectTag(q, nfa.finals)) + } + def add(Q: immutable.BitSet) { + if (!states(Q)) { + states += Q + rest push Q + addFinal(Q) + } + } + + addFinal(q0) // initial state may also be a final state + + while (!rest.isEmpty) { + val P = rest.pop + // assign a number to this bitset + indexMap = indexMap.updated(P, ix) + invIndexMap = invIndexMap.updated(ix, P) + ix += 1 + + // make transition map + val Pdelta = new mutable.HashMap[T, immutable.BitSet] + delta.update(P, Pdelta) + + labels foreach { label => + val Q = nfa.next(P, label) + Pdelta.update(label, Q) + add(Q) + } + + // collect default transitions + val Pdef = nfa nextDefault P + deftrans = deftrans.updated(P, Pdef) + add(Pdef) + } + + // create DetWordAutom, using indices instead of sets + val nstatesR = states.size + val deltaR = new Array[mutable.Map[T, Int]](nstatesR) + val defaultR = new Array[Int](nstatesR) + val finalsR = new Array[Int](nstatesR) + + for (Q <- states) { + val q = indexMap(Q) + val trans = delta(Q) + val transDef = deftrans(Q) + val qDef = indexMap(transDef) + val ntrans = new mutable.HashMap[T, Int]() + + for ((label, value) <- trans) { + val p = indexMap(value) + if (p != qDef) + ntrans.update(label, p) + } + + deltaR(q) = ntrans + defaultR(q) = qDef + } + + finals foreach { case (k,v) => finalsR(indexMap(k)) = v } + + new DetWordAutom [T] { + val nstates = nstatesR + val delta = deltaR + val default = defaultR + val finals = finalsR + } + } +} diff --git a/src/library/scala/xml/dtd/impl/SyntaxError.scala b/src/library/scala/xml/dtd/impl/SyntaxError.scala new file mode 100644 index 0000000000..b0e0b8b6cd --- /dev/null +++ b/src/library/scala/xml/dtd/impl/SyntaxError.scala @@ -0,0 +1,20 @@ +/* __ *\ +** ________ ___ / / ___ Scala API ** +** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** +** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** +** /____/\___/_/ |_/____/_/ | | ** +** |/ ** +\* */ + + + +package scala.xml.dtd.impl + +/** This runtime exception is thrown if an attempt to instantiate a + * syntactically incorrect expression is detected. + * + * @author Burak Emir + * @version 1.0 + */ +@deprecated("This class will be removed", "2.10.0") +private[dtd] class SyntaxError(e: String) extends RuntimeException(e) diff --git a/src/library/scala/xml/dtd/impl/WordBerrySethi.scala b/src/library/scala/xml/dtd/impl/WordBerrySethi.scala new file mode 100644 index 0000000000..90d7fe760a --- /dev/null +++ b/src/library/scala/xml/dtd/impl/WordBerrySethi.scala @@ -0,0 +1,161 @@ +/* __ *\ +** ________ ___ / / ___ Scala API ** +** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** +** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** +** /____/\___/_/ |_/____/_/ | | ** +** |/ ** +\* */ + +package scala.xml.dtd.impl + +import scala.collection.{ immutable, mutable } + +/** This class turns a regular expression into a [[scala.util.automata.NondetWordAutom]] + * celebrated position automata construction (also called ''Berry-Sethi'' or ''Glushkov''). + * + * @author Burak Emir + * @version 1.0 + */ +@deprecated("This class will be removed", "2.10.0") +private[dtd] abstract class WordBerrySethi extends BaseBerrySethi { + override val lang: WordExp + + import lang.{ Alt, Eps, Letter, RegExp, Sequ, Star, _labelT } + + protected var labels: mutable.HashSet[_labelT] = _ + // don't let this fool you, only labelAt is a real, surjective mapping + protected var labelAt: Map[Int, _labelT] = _ // new alphabet "gamma" + protected var deltaq: Array[mutable.HashMap[_labelT, List[Int]]] = _ // delta + protected var defaultq: Array[List[Int]] = _ // default transitions + protected var initials: Set[Int] = _ + + /** Computes `first(r)` where the word regexp `r`. + * + * @param r the regular expression + * @return the computed set `first(r)` + */ + protected override def compFirst(r: RegExp): Set[Int] = r match { + case x: Letter => Set(x.pos) + case _ => super.compFirst(r) + } + + /** Computes `last(r)` where the word regexp `r`. + * + * @param r the regular expression + * @return the computed set `last(r)` + */ + protected override def compLast(r: RegExp): Set[Int] = r match { + case x: Letter => Set(x.pos) + case _ => super.compLast(r) + } + + /** Returns the first set of an expression, setting the follow set along + * the way. + * + * @param r the regular expression + * @return the computed set + */ + protected override def compFollow1(fol1: Set[Int], r: RegExp): Set[Int] = r match { + case x: Letter => follow(x.pos) = fol1 ; Set(x.pos) + case Eps => emptySet + case _ => super.compFollow1(fol1, r) + } + + /** Returns "Sethi-length" of a pattern, creating the set of position + * along the way + */ + + /** Called at the leaves of the regexp */ + protected def seenLabel(r: RegExp, i: Int, label: _labelT) { + labelAt = labelAt.updated(i, label) + this.labels += label + } + + // overridden in BindingBerrySethi + protected def seenLabel(r: RegExp, label: _labelT): Int = { + pos += 1 + seenLabel(r, pos, label) + pos + } + + // todo: replace global variable pos with acc + override def traverse(r: RegExp): Unit = r match { + case a @ Letter(label) => a.pos = seenLabel(r, label) + case Eps => // ignore + case _ => super.traverse(r) + } + + + protected def makeTransition(src: Int, dest: Int, label: _labelT) { + val q = deltaq(src) + q.update(label, dest :: q.getOrElse(label, Nil)) + } + + protected def initialize(subexpr: Seq[RegExp]): Unit = { + this.labelAt = immutable.Map() + this.follow = mutable.HashMap() + this.labels = mutable.HashSet() + this.pos = 0 + + // determine "Sethi-length" of the regexp + subexpr foreach traverse + + this.initials = Set(0) + } + + protected def initializeAutom() { + finals = immutable.Map.empty[Int, Int] // final states + deltaq = new Array[mutable.HashMap[_labelT, List[Int]]](pos) // delta + defaultq = new Array[List[Int]](pos) // default transitions + + for (j <- 0 until pos) { + deltaq(j) = mutable.HashMap[_labelT, List[Int]]() + defaultq(j) = Nil + } + } + + protected def collectTransitions(): Unit = // make transitions + for (j <- 0 until pos ; fol = follow(j) ; k <- fol) { + if (pos == k) finals = finals.updated(j, finalTag) + else makeTransition(j, k, labelAt(k)) + } + + def automatonFrom(pat: RegExp, finalTag: Int): NondetWordAutom[_labelT] = { + this.finalTag = finalTag + + pat match { + case x: Sequ => + // (1,2) compute follow + first + initialize(x.rs) + pos += 1 + compFollow(x.rs) // this used to be assigned to var globalFirst and then never used. + + // (3) make automaton from follow sets + initializeAutom() + collectTransitions() + + if (x.isNullable) // initial state is final + finals = finals.updated(0, finalTag) + + val delta1 = immutable.Map(deltaq.zipWithIndex map (_.swap): _*) + val finalsArr = (0 until pos map (k => finals.getOrElse(k, 0))).toArray // 0 == not final + + val deltaArr: Array[mutable.Map[_labelT, immutable.BitSet]] = + (0 until pos map { x => + mutable.HashMap(delta1(x).toSeq map { case (k, v) => k -> immutable.BitSet(v: _*) } : _*) + }).toArray + + val defaultArr = (0 until pos map (k => immutable.BitSet(defaultq(k): _*))).toArray + + new NondetWordAutom[_labelT] { + val nstates = pos + val labels = WordBerrySethi.this.labels.toList + val finals = finalsArr + val delta = deltaArr + val default = defaultArr + } + case z => + automatonFrom(Sequ(z.asInstanceOf[this.lang._regexpT]), finalTag) + } + } +} diff --git a/src/library/scala/xml/dtd/impl/WordExp.scala b/src/library/scala/xml/dtd/impl/WordExp.scala new file mode 100644 index 0000000000..38f8aea697 --- /dev/null +++ b/src/library/scala/xml/dtd/impl/WordExp.scala @@ -0,0 +1,58 @@ +/* __ *\ +** ________ ___ / / ___ Scala API ** +** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** +** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** +** /____/\___/_/ |_/____/_/ | | ** +** |/ ** +\* */ + + + +package scala.xml.dtd.impl + +/** + * The class `WordExp` provides regular word expressions. + * + * Users have to instantiate type member `_regexpT <;: RegExp` + * (from class `Base`) and a type member `_labelT <;: Label`. + * + * Here is a short example: + * {{{ + * import scala.util.regexp._ + * import scala.util.automata._ + * object MyLang extends WordExp { + * type _regexpT = RegExp + * type _labelT = MyChar + * + * case class MyChar(c:Char) extends Label + * } + * import MyLang._ + * // (a* | b)* + * val rex = Star(Alt(Star(Letter(MyChar('a'))),Letter(MyChar('b')))) + * object MyBerriSethi extends WordBerrySethi { + * override val lang = MyLang + * } + * val nfa = MyBerriSethi.automatonFrom(Sequ(rex), 1) + * }}} + * + * @author Burak Emir + * @version 1.0 + */ +@deprecated("This class will be removed", "2.10.0") +private[dtd] abstract class WordExp extends Base { + + abstract class Label + + type _regexpT <: RegExp + type _labelT <: Label + + case class Letter(a: _labelT) extends RegExp { + final lazy val isNullable = false + var pos = -1 + } + + case class Wildcard() extends RegExp { + final lazy val isNullable = false + var pos = -1 + } +} diff --git a/test/files/pos/t0422.scala b/test/files/pos/t0422.scala index cb3ba279d4..2adfa392d2 100644 --- a/test/files/pos/t0422.scala +++ b/test/files/pos/t0422.scala @@ -1,5 +1,4 @@ -import scala.util.regexp.WordExp; -import scala.util.automata.WordBerrySethi; +package scala.xml.dtd.impl object BoolWordExp extends WordExp { type _labelT = MyLabels; diff --git a/test/files/pos/t2698.scala b/test/files/pos/t2698.scala index 0e2662de61..7de50a13d6 100644 --- a/test/files/pos/t2698.scala +++ b/test/files/pos/t2698.scala @@ -1,5 +1,6 @@ +package scala.xml.dtd.impl + import scala.collection._ -import scala.util.regexp._ abstract class S2 { val lang: WordExp diff --git a/test/files/pos/t422.scala b/test/files/pos/t422.scala deleted file mode 100644 index cb3ba279d4..0000000000 --- a/test/files/pos/t422.scala +++ /dev/null @@ -1,17 +0,0 @@ -import scala.util.regexp.WordExp; -import scala.util.automata.WordBerrySethi; - -object BoolWordExp extends WordExp { - type _labelT = MyLabels; - type _regexpT = RegExp; - abstract class MyLabels extends Label ; - case class MyLabel(c:Char) extends MyLabels; -} - -object MyTranslator extends WordBerrySethi { - override val lang = BoolWordExp; - import lang._; - override protected def seenLabel( r:RegExp, i:Int, label: _labelT ): Unit = { - super.seenLabel(r,i,label) - } -} -- cgit v1.2.3 From 0a25ee3431d0314c782dd2e6620bc75c4de0d1a4 Mon Sep 17 00:00:00 2001 From: Evgeny Kotelnikov Date: Sat, 19 Jan 2013 17:42:12 +0400 Subject: SI-5824 Fix crashes in reify with _* Reification crashes if "foo: _*" construct is used. This happens besause type tree is represented either with TypeTree, or with Ident (present case), and `toPreTyperTypedOrAnnotated' only matches of the former. The fix is to cover the latter too. A test is included. --- src/compiler/scala/reflect/reify/codegen/GenTrees.scala | 2 +- src/compiler/scala/reflect/reify/phases/Reshape.scala | 12 ++++++++---- src/reflect/scala/reflect/internal/TreeInfo.scala | 7 +++++++ test/files/run/t5824.check | 1 + test/files/run/t5824.scala | 8 ++++++++ 5 files changed, 25 insertions(+), 5 deletions(-) create mode 100644 test/files/run/t5824.check create mode 100644 test/files/run/t5824.scala (limited to 'src') diff --git a/src/compiler/scala/reflect/reify/codegen/GenTrees.scala b/src/compiler/scala/reflect/reify/codegen/GenTrees.scala index f60089c935..fd3673552b 100644 --- a/src/compiler/scala/reflect/reify/codegen/GenTrees.scala +++ b/src/compiler/scala/reflect/reify/codegen/GenTrees.scala @@ -177,7 +177,7 @@ trait GenTrees { // then we can reify the scrutinee as a symless AST and that will definitely be hygienic // why? because then typechecking of a scrutinee doesn't depend on the environment external to the quasiquote // otherwise we need to reify the corresponding type - if (sym.isLocalToReifee || tpe.isLocalToReifee) + if (tree.symbol.isLocalToReifee || tree.tpe.isLocalToReifee || treeInfo.isWildcardStarType(tree)) reifyProduct(tree) else { if (reifyDebug) println("reifying bound type %s (underlying type is %s)".format(sym, tpe)) diff --git a/src/compiler/scala/reflect/reify/phases/Reshape.scala b/src/compiler/scala/reflect/reify/phases/Reshape.scala index 5dd5f08b45..71fe4ddeea 100644 --- a/src/compiler/scala/reflect/reify/phases/Reshape.scala +++ b/src/compiler/scala/reflect/reify/phases/Reshape.scala @@ -187,8 +187,12 @@ trait Reshape { } private def toPreTyperTypedOrAnnotated(tree: Tree): Tree = tree match { - case ty @ Typed(expr1, tt @ TypeTree()) => + case ty @ Typed(expr1, tpt) => if (reifyDebug) println("reify typed: " + tree) + val original = tpt match { + case tt @ TypeTree() => tt.original + case tpt => tpt + } val annotatedArg = { def loop(tree: Tree): Tree = tree match { case annotated1 @ Annotated(ann, annotated2 @ Annotated(_, _)) => loop(annotated2) @@ -196,15 +200,15 @@ trait Reshape { case _ => EmptyTree } - loop(tt.original) + loop(original) } if (annotatedArg != EmptyTree) { if (annotatedArg.isType) { if (reifyDebug) println("verdict: was an annotated type, reify as usual") ty } else { - if (reifyDebug) println("verdict: was an annotated value, equivalent is " + tt.original) - toPreTyperTypedOrAnnotated(tt.original) + if (reifyDebug) println("verdict: was an annotated value, equivalent is " + original) + toPreTyperTypedOrAnnotated(original) } } else { if (reifyDebug) println("verdict: wasn't annotated, reify as usual") diff --git a/src/reflect/scala/reflect/internal/TreeInfo.scala b/src/reflect/scala/reflect/internal/TreeInfo.scala index 032a4aebef..0a8f1cb9ed 100644 --- a/src/reflect/scala/reflect/internal/TreeInfo.scala +++ b/src/reflect/scala/reflect/internal/TreeInfo.scala @@ -412,6 +412,13 @@ abstract class TreeInfo { case _ => false } + /** Is the argument a wildcard star type of the form `_*`? + */ + def isWildcardStarType(tree: Tree): Boolean = tree match { + case Ident(tpnme.WILDCARD_STAR) => true + case _ => false + } + /** Is this pattern node a catch-all (wildcard or variable) pattern? */ def isDefaultCase(cdef: CaseDef) = cdef match { case CaseDef(pat, EmptyTree, _) => isWildcardArg(pat) diff --git a/test/files/run/t5824.check b/test/files/run/t5824.check new file mode 100644 index 0000000000..3774da60e5 --- /dev/null +++ b/test/files/run/t5824.check @@ -0,0 +1 @@ +a b c diff --git a/test/files/run/t5824.scala b/test/files/run/t5824.scala new file mode 100644 index 0000000000..2ad169e2d1 --- /dev/null +++ b/test/files/run/t5824.scala @@ -0,0 +1,8 @@ +import scala.reflect.runtime.universe._ +import scala.tools.reflect.Eval + +object Test extends App { + reify { + println("%s %s %s".format(List("a", "b", "c"): _*)) + }.eval +} -- cgit v1.2.3 From 373b0015f948c19cae9b140e826f5b3075154115 Mon Sep 17 00:00:00 2001 From: secwall Date: Tue, 22 Jan 2013 18:38:19 +0400 Subject: Fixed typo in ProcessBuilder scaladoc. ProcessBuilder creation sample code did not complie due to an error in import statement. --- src/library/scala/sys/process/ProcessBuilder.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) (limited to 'src') diff --git a/src/library/scala/sys/process/ProcessBuilder.scala b/src/library/scala/sys/process/ProcessBuilder.scala index d0b2ecfe73..30fd4d83ff 100644 --- a/src/library/scala/sys/process/ProcessBuilder.scala +++ b/src/library/scala/sys/process/ProcessBuilder.scala @@ -23,7 +23,7 @@ import ProcessBuilder._ * based on these factories made available in the package object * [[scala.sys.process]]. Here are some examples: * {{{ - * import.scala.sys.process._ + * import scala.sys.process._ * * // Executes "ls" and sends output to stdout * "ls".! -- cgit v1.2.3 From a01e535f3a53eb05c7c4dbc5a1fa511fc486ee7f Mon Sep 17 00:00:00 2001 From: Samy Dindane Date: Thu, 24 Jan 2013 17:42:02 +0100 Subject: Fix some typos Fixes mostly "a int", "a a thing" kind of typos. Also removes trailing whitespaces, useless empty lines and commented println() from "test/files/run/ctries-new/iterator.scala". --- project/Packaging.scala | 2 +- src/compiler/scala/tools/nsc/ast/DocComments.scala | 2 +- .../tools/nsc/backend/icode/ICodeCheckers.scala | 2 +- .../scala/tools/nsc/doc/html/page/Template.scala | 2 +- src/compiler/scala/tools/nsc/io/Lexer.scala | 4 +- .../scala/tools/nsc/transform/ExplicitOuter.scala | 2 +- .../scala/tools/nsc/transform/LambdaLift.scala | 2 +- .../scala/collection/GenTraversableLike.scala | 2 +- .../collection/parallel/ParIterableLike.scala | 2 +- src/library/scala/math/BigInt.scala | 2 +- src/library/scala/math/Ordering.scala | 2 +- .../scala/util/automata/NondetWordAutom.scala | 4 +- src/reflect/scala/reflect/api/Names.scala | 4 +- src/reflect/scala/reflect/internal/Types.scala | 2 +- src/reflect/scala/reflect/macros/Context.scala | 2 +- .../akka/src/akka/dispatch/Dispatchers.scala | 16 +-- test/files/pos/t2421b_pos.scala | 2 +- test/files/run/ctries-new/iterator.scala | 114 +++++++++------------ test/files/run/lazy-locals.scala | 2 +- test/files/run/t4729/S_2.scala | 2 +- test/files/run/test-cpp.scala | 2 +- 21 files changed, 80 insertions(+), 94 deletions(-) (limited to 'src') diff --git a/project/Packaging.scala b/project/Packaging.scala index 6cb51a10a6..b0060283ac 100644 --- a/project/Packaging.scala +++ b/project/Packaging.scala @@ -24,7 +24,7 @@ trait Packaging { self: ScalaBuild.type => genBinQuick <<= genBinTask(genBinRunner, binDir in genBinQuick, fullClasspath in Runtime in genBinQuick, true), runManmakerMan <<= runManmakerTask(fullClasspath in Runtime in manmaker, runner in manmaker, "scala.tools.docutil.EmitManPage", "man1", ".1"), runManmakerHtml <<= runManmakerTask(fullClasspath in Runtime in manmaker, runner in manmaker, "scala.tools.docutil.EmitHtml", "doc", ".html"), - // TODO - We could *really* clean this up in many ways. Let's look into making a a Seq of "direct jars" (scalaLibrary, scalaCompiler, jline, scalap) + // TODO - We could *really* clean this up in many ways. Let's look into making a Seq of "direct jars" (scalaLibrary, scalaCompiler, jline, scalap) // a seq of "plugin jars" (continuationsPlugin) and "binaries" (genBin) and "documentation" mappings (genBin) that this can aggregate. // really need to figure out a better way to pull jline + jansi. makeDistMappings <<= (genBin, diff --git a/src/compiler/scala/tools/nsc/ast/DocComments.scala b/src/compiler/scala/tools/nsc/ast/DocComments.scala index c9bf131b79..7e6a323d3d 100755 --- a/src/compiler/scala/tools/nsc/ast/DocComments.scala +++ b/src/compiler/scala/tools/nsc/ast/DocComments.scala @@ -320,7 +320,7 @@ trait DocComments { self: Global => } /** Expand variable occurrences in string `str`, until a fix point is reached or - * a expandLimit is exceeded. + * an expandLimit is exceeded. * * @param str The string to be expanded * @param sym The symbol for which doc comments are generated diff --git a/src/compiler/scala/tools/nsc/backend/icode/ICodeCheckers.scala b/src/compiler/scala/tools/nsc/backend/icode/ICodeCheckers.scala index 95913c7768..5d32795e24 100644 --- a/src/compiler/scala/tools/nsc/backend/icode/ICodeCheckers.scala +++ b/src/compiler/scala/tools/nsc/backend/icode/ICodeCheckers.scala @@ -464,7 +464,7 @@ abstract class ICodeCheckers { subtypeTest(elem, kind) pushStack(elem) case (a, b) => - icodeError(" expected and INT and a array reference, but " + + icodeError(" expected an INT and an array reference, but " + a + ", " + b + " found"); } diff --git a/src/compiler/scala/tools/nsc/doc/html/page/Template.scala b/src/compiler/scala/tools/nsc/doc/html/page/Template.scala index ff64fb4c0f..0685f9ad70 100644 --- a/src/compiler/scala/tools/nsc/doc/html/page/Template.scala +++ b/src/compiler/scala/tools/nsc/doc/html/page/Template.scala @@ -760,7 +760,7 @@ class Template(universe: doc.Universe, generator: DiagramGenerator, tpl: DocTemp if (isReduced) NodeSeq.Empty else { def paramsToHtml(vlsss: List[List[ValueParam]]): NodeSeq = { def param0(vl: ValueParam): NodeSeq = - // notice the }{ in the next lines, they are necessary to avoid a undesired withspace in output + // notice the }{ in the next lines, they are necessary to avoid an undesired withspace in output { Text(vl.name) }{ Text(": ") ++ typeToHtml(vl.resultType, hasLinks) }{ diff --git a/src/compiler/scala/tools/nsc/io/Lexer.scala b/src/compiler/scala/tools/nsc/io/Lexer.scala index e843f8d5ce..aed6e882e6 100644 --- a/src/compiler/scala/tools/nsc/io/Lexer.scala +++ b/src/compiler/scala/tools/nsc/io/Lexer.scala @@ -7,8 +7,8 @@ import java.io.Reader */ object Lexer { - /** An exception raised if a if input does not correspond to what's expected - * @param rdr the lexer form which the bad input is read + /** An exception raised if an input does not correspond to what's expected + * @param rdr the lexer from which the bad input is read * @param msg the error message */ class MalformedInput(val rdr: Lexer, val msg: String) extends Exception("Malformed JSON input at "+rdr.tokenPos+": "+msg) diff --git a/src/compiler/scala/tools/nsc/transform/ExplicitOuter.scala b/src/compiler/scala/tools/nsc/transform/ExplicitOuter.scala index 9696692146..7a2caf2330 100644 --- a/src/compiler/scala/tools/nsc/transform/ExplicitOuter.scala +++ b/src/compiler/scala/tools/nsc/transform/ExplicitOuter.scala @@ -104,7 +104,7 @@ abstract class ExplicitOuter extends InfoTransform *
      *
    1. * Add an outer parameter to the formal parameters of a constructor - * in a inner non-trait class; + * in an inner non-trait class; *
    2. *
    3. * Add a protected $outer field to an inner class which is diff --git a/src/compiler/scala/tools/nsc/transform/LambdaLift.scala b/src/compiler/scala/tools/nsc/transform/LambdaLift.scala index 0198f959e3..b081fb7e3f 100644 --- a/src/compiler/scala/tools/nsc/transform/LambdaLift.scala +++ b/src/compiler/scala/tools/nsc/transform/LambdaLift.scala @@ -245,7 +245,7 @@ abstract class LambdaLift extends InfoTransform { freshen(sym.name + nme.NAME_JOIN_STRING + sym.owner.name + nme.NAME_JOIN_STRING) } else { // SI-5652 If the lifted symbol is accessed from an inner class, it will be made public. (where?) - // Generating a a unique name, mangled with the enclosing class name, avoids a VerifyError + // Generating a unique name, mangled with the enclosing class name, avoids a VerifyError // in the case that a sub-class happens to lifts out a method with the *same* name. val name = freshen("" + sym.name + nme.NAME_JOIN_STRING) if (originalName.isTermName && !sym.enclClass.isImplClass && calledFromInner(sym)) nme.expandedName(name.toTermName, sym.enclClass) diff --git a/src/library/scala/collection/GenTraversableLike.scala b/src/library/scala/collection/GenTraversableLike.scala index 46134c921e..1080c54325 100644 --- a/src/library/scala/collection/GenTraversableLike.scala +++ b/src/library/scala/collection/GenTraversableLike.scala @@ -238,7 +238,7 @@ trait GenTraversableLike[+A, +Repr] extends Any with GenTraversableOnce[A] with * // lettersOf will return a Set[Char], not a Seq * def lettersOf(words: Seq[String]) = words.toSet flatMap (word => word.toSeq) * - * // xs will be a an Iterable[Int] + * // xs will be an Iterable[Int] * val xs = Map("a" -> List(11,111), "b" -> List(22,222)).flatMap(_._2) * * // ys will be a Map[Int, Int] diff --git a/src/library/scala/collection/parallel/ParIterableLike.scala b/src/library/scala/collection/parallel/ParIterableLike.scala index d77e5a6744..6eda29e6b0 100644 --- a/src/library/scala/collection/parallel/ParIterableLike.scala +++ b/src/library/scala/collection/parallel/ParIterableLike.scala @@ -454,7 +454,7 @@ self: ParIterableLike[T, Repr, Sequential] => def reduceRightOption[U >: T](op: (T, U) => U): Option[U] = seq.reduceRightOption(op) - /** Applies a function `f` to all the elements of $coll in a undefined order. + /** Applies a function `f` to all the elements of $coll in an undefined order. * * @tparam U the result type of the function applied to each element, which is always discarded * @param f function applied to each element diff --git a/src/library/scala/math/BigInt.scala b/src/library/scala/math/BigInt.scala index 0cddd71721..a96af4615d 100644 --- a/src/library/scala/math/BigInt.scala +++ b/src/library/scala/math/BigInt.scala @@ -358,7 +358,7 @@ class BigInt(val bigInteger: BigInteger) extends ScalaNumber with ScalaNumericCo def charValue = intValue.toChar /** Converts this BigInt to an int. - * If the BigInt is too big to fit in a int, only the low-order 32 bits + * If the BigInt is too big to fit in an int, only the low-order 32 bits * are returned. Note that this conversion can lose information about the * overall magnitude of the BigInt value as well as return a result with * the opposite sign. diff --git a/src/library/scala/math/Ordering.scala b/src/library/scala/math/Ordering.scala index e9b92541c2..aea512a541 100644 --- a/src/library/scala/math/Ordering.scala +++ b/src/library/scala/math/Ordering.scala @@ -33,7 +33,7 @@ import scala.language.{implicitConversions, higherKinds} * }}} * * An Ordering[T] is implemented by specifying compare(a:T, b:T), which - * decides how to order to instances a and b. Instances of Ordering[T] can be + * decides how to order two instances a and b. Instances of Ordering[T] can be * used by things like scala.util.Sorting to sort collections like Array[T]. * * For example: diff --git a/src/library/scala/util/automata/NondetWordAutom.scala b/src/library/scala/util/automata/NondetWordAutom.scala index 24c6612d0f..3a57d87654 100644 --- a/src/library/scala/util/automata/NondetWordAutom.scala +++ b/src/library/scala/util/automata/NondetWordAutom.scala @@ -37,10 +37,10 @@ abstract class NondetWordAutom[T <: AnyRef] { /** @return true if there are no accepting states */ final def isEmpty = (0 until nstates) forall (x => !isFinal(x)) - /** @return a immutable.BitSet with the next states for given state and label */ + /** @return an immutable.BitSet with the next states for given state and label */ def next(q: Int, a: T): immutable.BitSet = delta(q).getOrElse(a, default(q)) - /** @return a immutable.BitSet with the next states for given state and label */ + /** @return an immutable.BitSet with the next states for given state and label */ def next(Q: immutable.BitSet, a: T): immutable.BitSet = next(Q, next(_, a)) def nextDefault(Q: immutable.BitSet): immutable.BitSet = next(Q, default) diff --git a/src/reflect/scala/reflect/api/Names.scala b/src/reflect/scala/reflect/api/Names.scala index 8add98d815..6290b88d33 100644 --- a/src/reflect/scala/reflect/api/Names.scala +++ b/src/reflect/scala/reflect/api/Names.scala @@ -75,10 +75,10 @@ trait Names { * @group API */ abstract class NameApi { - /** Checks wether the name is a a term name */ + /** Checks wether the name is a term name */ def isTermName: Boolean - /** Checks wether the name is a a type name */ + /** Checks wether the name is a type name */ def isTypeName: Boolean /** Returns a term name that wraps the same string as `this` */ diff --git a/src/reflect/scala/reflect/internal/Types.scala b/src/reflect/scala/reflect/internal/Types.scala index 1ef983c1c9..98cc9a88b8 100644 --- a/src/reflect/scala/reflect/internal/Types.scala +++ b/src/reflect/scala/reflect/internal/Types.scala @@ -153,7 +153,7 @@ trait Types extends api.Types { self: SymbolTable => } /** No sync necessary, because record should only - * be called from within a undo or undoUnless block, + * be called from within an undo or undoUnless block, * which is already synchronized. */ private[reflect] def record(tv: TypeVar) = { diff --git a/src/reflect/scala/reflect/macros/Context.scala b/src/reflect/scala/reflect/macros/Context.scala index 1adc6928da..f4a4631e53 100644 --- a/src/reflect/scala/reflect/macros/Context.scala +++ b/src/reflect/scala/reflect/macros/Context.scala @@ -52,7 +52,7 @@ trait Context extends Aliases /** The prefix tree from which the macro is selected. * - * For a example, for a macro `filter` defined as an instance method on a collection `Coll`, + * For example, for a macro `filter` defined as an instance method on a collection `Coll`, * `prefix` represents an equivalent of `this` for normal instance methods: * * {{{ diff --git a/test/disabled/presentation/akka/src/akka/dispatch/Dispatchers.scala b/test/disabled/presentation/akka/src/akka/dispatch/Dispatchers.scala index 7dd1bf6218..a567d0bcb0 100644 --- a/test/disabled/presentation/akka/src/akka/dispatch/Dispatchers.scala +++ b/test/disabled/presentation/akka/src/akka/dispatch/Dispatchers.scala @@ -89,7 +89,7 @@ object Dispatchers { new ThreadBasedDispatcher(actor, mailboxCapacity, pushTimeOut) /** - * Creates a executor-based event-driven dispatcher serving multiple (millions) of actors through a thread pool. + * Creates an executor-based event-driven dispatcher serving multiple (millions) of actors through a thread pool. *

      * Has a fluent builder interface for configuring its semantics. */ @@ -97,7 +97,7 @@ object Dispatchers { ThreadPoolConfigDispatcherBuilder(config => new ExecutorBasedEventDrivenDispatcher(name, config), ThreadPoolConfig()) /** - * Creates a executor-based event-driven dispatcher serving multiple (millions) of actors through a thread pool. + * Creates an executor-based event-driven dispatcher serving multiple (millions) of actors through a thread pool. *

      * Has a fluent builder interface for configuring its semantics. */ @@ -106,7 +106,7 @@ object Dispatchers { new ExecutorBasedEventDrivenDispatcher(name, throughput, THROUGHPUT_DEADLINE_TIME_MILLIS, mailboxType, config), ThreadPoolConfig()) /** - * Creates a executor-based event-driven dispatcher serving multiple (millions) of actors through a thread pool. + * Creates an executor-based event-driven dispatcher serving multiple (millions) of actors through a thread pool. *

      * Has a fluent builder interface for configuring its semantics. */ @@ -115,7 +115,7 @@ object Dispatchers { new ExecutorBasedEventDrivenDispatcher(name, throughput, throughputDeadlineMs, mailboxType, config), ThreadPoolConfig()) /** - * Creates a executor-based event-driven dispatcher, with work-stealing, serving multiple (millions) of actors through a thread pool. + * Creates an executor-based event-driven dispatcher, with work-stealing, serving multiple (millions) of actors through a thread pool. *

      * Has a fluent builder interface for configuring its semantics. */ @@ -123,7 +123,7 @@ object Dispatchers { ThreadPoolConfigDispatcherBuilder(config => new ExecutorBasedEventDrivenWorkStealingDispatcher(name, config), ThreadPoolConfig()) /** - * Creates a executor-based event-driven dispatcher, with work-stealing, serving multiple (millions) of actors through a thread pool. + * Creates an executor-based event-driven dispatcher, with work-stealing, serving multiple (millions) of actors through a thread pool. *

      * Has a fluent builder interface for configuring its semantics. */ @@ -132,7 +132,7 @@ object Dispatchers { new ExecutorBasedEventDrivenWorkStealingDispatcher(name, throughput, THROUGHPUT_DEADLINE_TIME_MILLIS, MAILBOX_TYPE, config), ThreadPoolConfig()) /** - * Creates a executor-based event-driven dispatcher, with work-stealing, serving multiple (millions) of actors through a thread pool. + * Creates an executor-based event-driven dispatcher, with work-stealing, serving multiple (millions) of actors through a thread pool. *

      * Has a fluent builder interface for configuring its semantics. */ @@ -141,7 +141,7 @@ object Dispatchers { new ExecutorBasedEventDrivenWorkStealingDispatcher(name, throughput, THROUGHPUT_DEADLINE_TIME_MILLIS, mailboxType, config), ThreadPoolConfig()) /** - * Creates a executor-based event-driven dispatcher, with work-stealing, serving multiple (millions) of actors through a thread pool. + * Creates an executor-based event-driven dispatcher, with work-stealing, serving multiple (millions) of actors through a thread pool. *

      * Has a fluent builder interface for configuring its semantics. */ @@ -224,4 +224,4 @@ class ExecutorBasedEventDrivenWorkStealingDispatcherConfigurator extends Message mailboxType(config), threadPoolConfig)).build } -} \ No newline at end of file +} diff --git a/test/files/pos/t2421b_pos.scala b/test/files/pos/t2421b_pos.scala index 8b848abb75..0df3461662 100644 --- a/test/files/pos/t2421b_pos.scala +++ b/test/files/pos/t2421b_pos.scala @@ -11,7 +11,7 @@ object Test { f } -/* bug: +/* bug: error: ambiguous implicit values: both method b in object Test1 of type [X <: Test1.B]Test1.F[X] and method a in object Test1 of type => Test1.F[Test1.A] diff --git a/test/files/run/ctries-new/iterator.scala b/test/files/run/ctries-new/iterator.scala index b953a40e00..bb1175e61b 100644 --- a/test/files/run/ctries-new/iterator.scala +++ b/test/files/run/ctries-new/iterator.scala @@ -1,144 +1,134 @@ - - - - import collection._ import collection.concurrent.TrieMap - - object IteratorSpec extends Spec { - + def test() { "work for an empty trie" in { val ct = new TrieMap val it = ct.iterator - + it.hasNext shouldEqual (false) evaluating { it.next() }.shouldProduce [NoSuchElementException] } - + def nonEmptyIteratorCheck(sz: Int) { val ct = new TrieMap[Wrap, Int] for (i <- 0 until sz) ct.put(new Wrap(i), i) - + val it = ct.iterator val tracker = mutable.Map[Wrap, Int]() for (i <- 0 until sz) { assert(it.hasNext == true) tracker += it.next } - + it.hasNext shouldEqual (false) evaluating { it.next() }.shouldProduce [NoSuchElementException] tracker.size shouldEqual (sz) tracker shouldEqual (ct) } - + "work for a 1 element trie" in { nonEmptyIteratorCheck(1) } - + "work for a 2 element trie" in { nonEmptyIteratorCheck(2) } - + "work for a 3 element trie" in { nonEmptyIteratorCheck(3) } - + "work for a 5 element trie" in { nonEmptyIteratorCheck(5) } - + "work for a 10 element trie" in { nonEmptyIteratorCheck(10) } - + "work for a 20 element trie" in { nonEmptyIteratorCheck(20) } - + "work for a 50 element trie" in { nonEmptyIteratorCheck(50) } - + "work for a 100 element trie" in { nonEmptyIteratorCheck(100) } - + "work for a 1k element trie" in { nonEmptyIteratorCheck(1000) } - + "work for a 5k element trie" in { nonEmptyIteratorCheck(5000) } - + "work for a 75k element trie" in { nonEmptyIteratorCheck(75000) } - + "work for a 250k element trie" in { nonEmptyIteratorCheck(500000) } - + def nonEmptyCollideCheck(sz: Int) { val ct = new TrieMap[DumbHash, Int] for (i <- 0 until sz) ct.put(new DumbHash(i), i) - + val it = ct.iterator val tracker = mutable.Map[DumbHash, Int]() for (i <- 0 until sz) { assert(it.hasNext == true) tracker += it.next } - + it.hasNext shouldEqual (false) evaluating { it.next() }.shouldProduce [NoSuchElementException] tracker.size shouldEqual (sz) tracker shouldEqual (ct) } - + "work for colliding hashcodes, 2 element trie" in { nonEmptyCollideCheck(2) } - + "work for colliding hashcodes, 3 element trie" in { nonEmptyCollideCheck(3) } - + "work for colliding hashcodes, 5 element trie" in { nonEmptyCollideCheck(5) } - + "work for colliding hashcodes, 10 element trie" in { nonEmptyCollideCheck(10) } - + "work for colliding hashcodes, 100 element trie" in { nonEmptyCollideCheck(100) } - + "work for colliding hashcodes, 500 element trie" in { nonEmptyCollideCheck(500) } - + "work for colliding hashcodes, 5k element trie" in { nonEmptyCollideCheck(5000) } - + def assertEqual(a: Map[Wrap, Int], b: Map[Wrap, Int]) { if (a != b) { println(a.size + " vs " + b.size) - // println(a) - // println(b) - // println(a.toSeq.sortBy((x: (Wrap, Int)) => x._1.i)) - // println(b.toSeq.sortBy((x: (Wrap, Int)) => x._1.i)) } assert(a == b) } - + "be consistent when taken with concurrent modifications" in { val sz = 25000 val W = 15 @@ -146,40 +136,40 @@ object IteratorSpec extends Spec { val checks = 5 val ct = new TrieMap[Wrap, Int] for (i <- 0 until sz) ct.put(new Wrap(i), i) - + class Modifier extends Thread { override def run() { for (i <- 0 until sz) ct.putIfAbsent(new Wrap(i), i) match { case Some(_) => ct.remove(new Wrap(i)) - case None => + case None => } } } - + def consistentIteration(ct: TrieMap[Wrap, Int], checks: Int) { class Iter extends Thread { override def run() { val snap = ct.readOnlySnapshot() val initial = mutable.Map[Wrap, Int]() for (kv <- snap) initial += kv - + for (i <- 0 until checks) { assertEqual(snap.iterator.toMap, initial) } } } - + val iter = new Iter iter.start() iter.join() } - + val threads = for (_ <- 0 until W) yield new Modifier threads.foreach(_.start()) for (_ <- 0 until S) consistentIteration(ct, checks) threads.foreach(_.join()) } - + "be consistent with a concurrent removal with a well defined order" in { val sz = 150000 val sgroupsize = 10 @@ -187,17 +177,16 @@ object IteratorSpec extends Spec { val removerslowdown = 50 val ct = new TrieMap[Wrap, Int] for (i <- 0 until sz) ct.put(new Wrap(i), i) - + class Remover extends Thread { override def run() { for (i <- 0 until sz) { assert(ct.remove(new Wrap(i)) == Some(i)) for (i <- 0 until removerslowdown) ct.get(new Wrap(i)) // slow down, mate } - //println("done removing") } } - + def consistentIteration(it: Iterator[(Wrap, Int)]) = { class Iter extends Thread { override def run() { @@ -210,7 +199,7 @@ object IteratorSpec extends Spec { } new Iter } - + val remover = new Remover remover.start() for (_ <- 0 until sgroupnum) { @@ -218,27 +207,25 @@ object IteratorSpec extends Spec { iters.foreach(_.start()) iters.foreach(_.join()) } - //println("done with iterators") remover.join() } - + "be consistent with a concurrent insertion with a well defined order" in { val sz = 150000 val sgroupsize = 10 val sgroupnum = 10 val inserterslowdown = 50 val ct = new TrieMap[Wrap, Int] - + class Inserter extends Thread { override def run() { for (i <- 0 until sz) { assert(ct.put(new Wrap(i), i) == None) for (i <- 0 until inserterslowdown) ct.get(new Wrap(i)) // slow down, mate } - //println("done inserting") } } - + def consistentIteration(it: Iterator[(Wrap, Int)]) = { class Iter extends Thread { override def run() { @@ -251,7 +238,7 @@ object IteratorSpec extends Spec { } new Iter } - + val inserter = new Inserter inserter.start() for (_ <- 0 until sgroupnum) { @@ -259,31 +246,30 @@ object IteratorSpec extends Spec { iters.foreach(_.start()) iters.foreach(_.join()) } - //println("done with iterators") inserter.join() } - + "work on a yet unevaluated snapshot" in { val sz = 50000 val ct = new TrieMap[Wrap, Int] for (i <- 0 until sz) ct.update(new Wrap(i), i) - + val snap = ct.snapshot() val it = snap.iterator - + while (it.hasNext) it.next() } - + "be duplicated" in { val sz = 50 val ct = collection.parallel.mutable.ParTrieMap((0 until sz) zip (0 until sz): _*) val it = ct.splitter for (_ <- 0 until (sz / 2)) it.next() val dupit = it.dup - + it.toList shouldEqual dupit.toList } - + } - + } diff --git a/test/files/run/lazy-locals.scala b/test/files/run/lazy-locals.scala index aca15d0357..8d4c61be8c 100644 --- a/test/files/run/lazy-locals.scala +++ b/test/files/run/lazy-locals.scala @@ -120,7 +120,7 @@ object Test extends App { t } - /** test recursive method with lazy vals and a all vals forced */ + /** test recursive method with lazy vals and all vals forced */ def testLazyRecMany(n: Int): Int = { lazy val t = { println("forced lazy val t at n = " + n); 42 } if (n > 0) { diff --git a/test/files/run/t4729/S_2.scala b/test/files/run/t4729/S_2.scala index e34e3d34d4..a80afb0257 100644 --- a/test/files/run/t4729/S_2.scala +++ b/test/files/run/t4729/S_2.scala @@ -20,7 +20,7 @@ object Test { (new ScalaVarArgs).method("1", "2") (new ScalaVarArgs: J_1).method("1", "2") - //[4] Not Ok -- error when assigning anonymous class to a explictly typed val + //[4] Not Ok -- error when assigning anonymous class to an explictly typed val // Compiler error: object creation impossible, since method method in trait VarArgs of type (s: [java.lang.String])Unit is not defined val tagged: J_1 = new J_1 { def method(s: String*) { println(s) } diff --git a/test/files/run/test-cpp.scala b/test/files/run/test-cpp.scala index 5b3bc7b746..f9fa85c4d0 100644 --- a/test/files/run/test-cpp.scala +++ b/test/files/run/test-cpp.scala @@ -3,7 +3,7 @@ * in the copy-propagation performed before ClosureElimination. * * In the general case, the local variable 'l' is connected through - * a alias chain with other local variables and at the end of the + * an alias chain with other local variables and at the end of the * alias chain there may be a Value, call it 'v'. * * If 'v' is cheaper to access (it is a Deref(This) or Const(_)), then -- cgit v1.2.3 From 950e938bb08afc08ba6b91af5468d0f703924356 Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Fri, 25 Jan 2013 13:39:20 -0800 Subject: Revert "SI-5824 Fix crashes in reify with _*" This reverts commit 0a25ee3431d0314c782dd2e6620bc75c4de0d1a4. It came with a test failure which I overlooked. --- src/compiler/scala/reflect/reify/codegen/GenTrees.scala | 2 +- src/compiler/scala/reflect/reify/phases/Reshape.scala | 12 ++++-------- src/reflect/scala/reflect/internal/TreeInfo.scala | 7 ------- test/files/run/t5824.check | 1 - test/files/run/t5824.scala | 8 -------- 5 files changed, 5 insertions(+), 25 deletions(-) delete mode 100644 test/files/run/t5824.check delete mode 100644 test/files/run/t5824.scala (limited to 'src') diff --git a/src/compiler/scala/reflect/reify/codegen/GenTrees.scala b/src/compiler/scala/reflect/reify/codegen/GenTrees.scala index fd3673552b..f60089c935 100644 --- a/src/compiler/scala/reflect/reify/codegen/GenTrees.scala +++ b/src/compiler/scala/reflect/reify/codegen/GenTrees.scala @@ -177,7 +177,7 @@ trait GenTrees { // then we can reify the scrutinee as a symless AST and that will definitely be hygienic // why? because then typechecking of a scrutinee doesn't depend on the environment external to the quasiquote // otherwise we need to reify the corresponding type - if (tree.symbol.isLocalToReifee || tree.tpe.isLocalToReifee || treeInfo.isWildcardStarType(tree)) + if (sym.isLocalToReifee || tpe.isLocalToReifee) reifyProduct(tree) else { if (reifyDebug) println("reifying bound type %s (underlying type is %s)".format(sym, tpe)) diff --git a/src/compiler/scala/reflect/reify/phases/Reshape.scala b/src/compiler/scala/reflect/reify/phases/Reshape.scala index 71fe4ddeea..5dd5f08b45 100644 --- a/src/compiler/scala/reflect/reify/phases/Reshape.scala +++ b/src/compiler/scala/reflect/reify/phases/Reshape.scala @@ -187,12 +187,8 @@ trait Reshape { } private def toPreTyperTypedOrAnnotated(tree: Tree): Tree = tree match { - case ty @ Typed(expr1, tpt) => + case ty @ Typed(expr1, tt @ TypeTree()) => if (reifyDebug) println("reify typed: " + tree) - val original = tpt match { - case tt @ TypeTree() => tt.original - case tpt => tpt - } val annotatedArg = { def loop(tree: Tree): Tree = tree match { case annotated1 @ Annotated(ann, annotated2 @ Annotated(_, _)) => loop(annotated2) @@ -200,15 +196,15 @@ trait Reshape { case _ => EmptyTree } - loop(original) + loop(tt.original) } if (annotatedArg != EmptyTree) { if (annotatedArg.isType) { if (reifyDebug) println("verdict: was an annotated type, reify as usual") ty } else { - if (reifyDebug) println("verdict: was an annotated value, equivalent is " + original) - toPreTyperTypedOrAnnotated(original) + if (reifyDebug) println("verdict: was an annotated value, equivalent is " + tt.original) + toPreTyperTypedOrAnnotated(tt.original) } } else { if (reifyDebug) println("verdict: wasn't annotated, reify as usual") diff --git a/src/reflect/scala/reflect/internal/TreeInfo.scala b/src/reflect/scala/reflect/internal/TreeInfo.scala index 0a8f1cb9ed..032a4aebef 100644 --- a/src/reflect/scala/reflect/internal/TreeInfo.scala +++ b/src/reflect/scala/reflect/internal/TreeInfo.scala @@ -412,13 +412,6 @@ abstract class TreeInfo { case _ => false } - /** Is the argument a wildcard star type of the form `_*`? - */ - def isWildcardStarType(tree: Tree): Boolean = tree match { - case Ident(tpnme.WILDCARD_STAR) => true - case _ => false - } - /** Is this pattern node a catch-all (wildcard or variable) pattern? */ def isDefaultCase(cdef: CaseDef) = cdef match { case CaseDef(pat, EmptyTree, _) => isWildcardArg(pat) diff --git a/test/files/run/t5824.check b/test/files/run/t5824.check deleted file mode 100644 index 3774da60e5..0000000000 --- a/test/files/run/t5824.check +++ /dev/null @@ -1 +0,0 @@ -a b c diff --git a/test/files/run/t5824.scala b/test/files/run/t5824.scala deleted file mode 100644 index 2ad169e2d1..0000000000 --- a/test/files/run/t5824.scala +++ /dev/null @@ -1,8 +0,0 @@ -import scala.reflect.runtime.universe._ -import scala.tools.reflect.Eval - -object Test extends App { - reify { - println("%s %s %s".format(List("a", "b", "c"): _*)) - }.eval -} -- cgit v1.2.3 From 982633a77ccef39e2c2611b88294d39d022a0f03 Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Tue, 8 Jan 2013 18:36:30 +0100 Subject: SI-6556 Remove unneeded workaround in erasure. In fb6e687, a workaround was added, described as: > See SI-6556. It seems in some cases the result constructor > type of an anonymous class is a different version of the class. > This has nothing to do with value classes per se. > We simply used a less discriminating transform before, that > did not look at the cases in detail. > It seems there is a deeper problem here, which needs > following up to. But we will not risk regressions > in 2.10 because of it. I was expecting an arduous investigation into this, but happilly the underlying problem was already solved in the fix for SI-6648 (1587a77e). (Chalk up another win for libscala's scala-hash!) This commit escalates the logging to an assertion. It also reverts the other change in fb6e687, which introduced a case for RefinedType, which are not needed to pass pos/t6556.scala. --- src/reflect/scala/reflect/internal/transform/Erasure.scala | 13 +------------ 1 file changed, 1 insertion(+), 12 deletions(-) (limited to 'src') diff --git a/src/reflect/scala/reflect/internal/transform/Erasure.scala b/src/reflect/scala/reflect/internal/transform/Erasure.scala index 59bf51d638..5581c78a3a 100644 --- a/src/reflect/scala/reflect/internal/transform/Erasure.scala +++ b/src/reflect/scala/reflect/internal/transform/Erasure.scala @@ -214,9 +214,6 @@ trait Erasure { specialConstructorErasure(clazz, restpe) case ExistentialType(tparams, restpe) => specialConstructorErasure(clazz, restpe) - case RefinedType(parents, decls) => - specialConstructorErasure( - clazz, specialScalaErasure.mergeParents(parents)) case mt @ MethodType(params, restpe) => MethodType( cloneSymbolsAndModify(params, specialScalaErasure), @@ -225,15 +222,7 @@ trait Erasure { typeRef(pre, clazz, List()) case tp => if (!(clazz == ArrayClass || tp.isError)) - // See SI-6556. It seems in some cases the result constructor - // type of an anonymous class is a different version of the class. - // This has nothing to do with value classes per se. - // We simply used a less discriminating transform before, that - // did not look at the cases in detail. - // It seems there is a deeper problem here, which needs - // following up to. But we will not risk regressions - // in 2.10 because of it. - devWarning(s"unexpected constructor erasure $tp for $clazz") + assert(clazz == ArrayClass || tp.isError, s"!!! unexpected constructor erasure $tp for $clazz") specialScalaErasure(tp) } } -- cgit v1.2.3 From 61f29368fecf620585c8bb26bb83c746cbbe6571 Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Wed, 5 Dec 2012 10:18:53 +0100 Subject: SI-4859 Don't rewrite CC().CC2() to new CC2 Where CC and CC2 are case classes. Attempting to do so leads to a "no legal prefix" error. Now, we restrict this optimization (living in RefChecks ?!) to case class applies with a "safe to inline" qualifier. --- .../scala/tools/nsc/typechecker/RefChecks.scala | 9 ++++++--- test/files/pos/t4859.scala | 17 +++++++++++++++++ test/pending/pos/t4859.scala | 15 --------------- 3 files changed, 23 insertions(+), 18 deletions(-) create mode 100644 test/files/pos/t4859.scala delete mode 100644 test/pending/pos/t4859.scala (limited to 'src') diff --git a/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala b/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala index e88447c46d..f0ced1a8d4 100644 --- a/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala +++ b/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala @@ -1389,9 +1389,12 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans case TypeApply(fun, targs) => isClassTypeAccessible(fun) case Select(module, apply) => - // Fixes SI-5626. Classes in refinement types cannot be constructed with `new`. In this case, - // the companion class is actually not a ClassSymbol, but a reference to an abstract type. - module.symbol.companionClass.isClass + ( // SI-4859 `CaseClass1().InnerCaseClass2()` must not be rewritten to `new InnerCaseClass2()` + treeInfo.isExprSafeToInline(module) && + // SI-5626 Classes in refinement types cannot be constructed with `new`. In this case, + // the companion class is actually not a ClassSymbol, but a reference to an abstract type. + module.symbol.companionClass.isClass + ) } val doTransform = diff --git a/test/files/pos/t4859.scala b/test/files/pos/t4859.scala new file mode 100644 index 0000000000..284a39b7ab --- /dev/null +++ b/test/files/pos/t4859.scala @@ -0,0 +1,17 @@ +object O { + // error: C is not a legal prefix for a constructor + C().CC() + // but this works. + D().DD() +} + +case class C() { + case class CC() +} + +case class D() { + class DD() + object DD { + def apply() = new DD() + } +} diff --git a/test/pending/pos/t4859.scala b/test/pending/pos/t4859.scala deleted file mode 100644 index ec5abd966d..0000000000 --- a/test/pending/pos/t4859.scala +++ /dev/null @@ -1,15 +0,0 @@ -object O { - C().CC() - D().DD() -} - -case class C() { - case class CC() -} - -case class D() { - class DD() - object DD { - def apply() = new DD() - } -} -- cgit v1.2.3 From 3813d75fa8c83a593859919f5979a1a217e8da44 Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Mon, 7 Jan 2013 08:58:11 +0100 Subject: Introduce a new Symbol test: isTopLevel. --- src/reflect/scala/reflect/internal/Symbols.scala | 3 +++ 1 file changed, 3 insertions(+) (limited to 'src') diff --git a/src/reflect/scala/reflect/internal/Symbols.scala b/src/reflect/scala/reflect/internal/Symbols.scala index 2d42d2df58..45ec99860d 100644 --- a/src/reflect/scala/reflect/internal/Symbols.scala +++ b/src/reflect/scala/reflect/internal/Symbols.scala @@ -821,6 +821,9 @@ trait Symbols extends api.Symbols { self: SymbolTable => ) ) + /** Is this symbol owned by a package? */ + final def isTopLevel = owner.isPackageClass + /** Is this symbol locally defined? I.e. not accessed from outside `this` instance */ final def isLocal: Boolean = owner.isTerm -- cgit v1.2.3 From eb4b06544a4362231357686c39beef9dbe00d932 Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Mon, 7 Jan 2013 08:58:53 +0100 Subject: Wider use of isTopLevel --- src/compiler/scala/reflect/reify/Taggers.scala | 2 +- src/compiler/scala/tools/nsc/Global.scala | 2 +- src/compiler/scala/tools/nsc/transform/Erasure.scala | 2 +- src/compiler/scala/tools/nsc/transform/Flatten.scala | 2 +- .../scala/tools/nsc/typechecker/ContextErrors.scala | 4 ++-- src/compiler/scala/tools/nsc/typechecker/Macros.scala | 2 +- src/compiler/scala/tools/nsc/typechecker/Namers.scala | 16 ++++++++-------- src/compiler/scala/tools/nsc/typechecker/RefChecks.scala | 2 +- src/compiler/scala/tools/nsc/typechecker/Typers.scala | 4 ++-- src/reflect/scala/reflect/internal/Definitions.scala | 2 +- src/reflect/scala/reflect/internal/Symbols.scala | 14 +++++++------- src/reflect/scala/reflect/internal/TreeInfo.scala | 2 +- src/reflect/scala/reflect/runtime/JavaMirrors.scala | 6 +++--- 13 files changed, 30 insertions(+), 30 deletions(-) (limited to 'src') diff --git a/src/compiler/scala/reflect/reify/Taggers.scala b/src/compiler/scala/reflect/reify/Taggers.scala index af0341fd38..9659134e5b 100644 --- a/src/compiler/scala/reflect/reify/Taggers.scala +++ b/src/compiler/scala/reflect/reify/Taggers.scala @@ -58,7 +58,7 @@ abstract class Taggers { val result = tpe match { case coreTpe if coreTags contains coreTpe => - val ref = if (tagModule.owner.isPackageClass) Ident(tagModule) else Select(prefix, tagModule.name) + val ref = if (tagModule.isTopLevel) Ident(tagModule) else Select(prefix, tagModule.name) Select(ref, coreTags(coreTpe)) case _ => translatingReificationErrors(materializer) diff --git a/src/compiler/scala/tools/nsc/Global.scala b/src/compiler/scala/tools/nsc/Global.scala index 05d0bcf6b0..d751669612 100644 --- a/src/compiler/scala/tools/nsc/Global.scala +++ b/src/compiler/scala/tools/nsc/Global.scala @@ -1378,7 +1378,7 @@ class Global(var currentSettings: Settings, var reporter: Reporter) def compiles(sym: Symbol): Boolean = if (sym == NoSymbol) false else if (symSource.isDefinedAt(sym)) true - else if (!sym.owner.isPackageClass) compiles(sym.enclosingTopLevelClass) + else if (!sym.isTopLevel) compiles(sym.enclosingTopLevelClass) else if (sym.isModuleClass) compiles(sym.sourceModule) else false diff --git a/src/compiler/scala/tools/nsc/transform/Erasure.scala b/src/compiler/scala/tools/nsc/transform/Erasure.scala index e3b5efde1f..a8119a7a24 100644 --- a/src/compiler/scala/tools/nsc/transform/Erasure.scala +++ b/src/compiler/scala/tools/nsc/transform/Erasure.scala @@ -50,7 +50,7 @@ abstract class Erasure extends AddInterfaces if (sym == ArrayClass) args foreach traverse else if (sym.isTypeParameterOrSkolem || sym.isExistentiallyBound || !args.isEmpty) result = true else if (sym.isClass) traverse(rebindInnerClass(pre, sym)) // #2585 - else if (!sym.owner.isPackageClass) traverse(pre) + else if (!sym.isTopLevel) traverse(pre) case PolyType(_, _) | ExistentialType(_, _) => result = true case RefinedType(parents, _) => diff --git a/src/compiler/scala/tools/nsc/transform/Flatten.scala b/src/compiler/scala/tools/nsc/transform/Flatten.scala index b2602f47de..7fe3a5da5c 100644 --- a/src/compiler/scala/tools/nsc/transform/Flatten.scala +++ b/src/compiler/scala/tools/nsc/transform/Flatten.scala @@ -116,7 +116,7 @@ abstract class Flatten extends InfoTransform { case ClassDef(_, _, _, _) if sym.isNestedClass => liftedDefs(sym.enclosingTopLevelClass.owner) += tree EmptyTree - case Select(qual, name) if (sym.isStaticModule && !sym.owner.isPackageClass) => + case Select(qual, name) if sym.isStaticModule && !sym.isTopLevel => exitingFlatten(atPos(tree.pos)(gen.mkAttributedRef(sym))) case _ => tree diff --git a/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala b/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala index 2d4054e93b..96e205eaa1 100644 --- a/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala +++ b/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala @@ -1048,8 +1048,8 @@ trait ContextErrors { val s1 = if (prevSym.isModule) "case class companion " else "" val s2 = if (prevSym.isSynthetic) "(compiler-generated) " + s1 else "" val s3 = if (prevSym.isCase) "case class " + prevSym.name else "" + prevSym - val where = if (currentSym.owner.isPackageClass != prevSym.owner.isPackageClass) { - val inOrOut = if (prevSym.owner.isPackageClass) "outside of" else "in" + val where = if (currentSym.isTopLevel != prevSym.isTopLevel) { + val inOrOut = if (prevSym.isTopLevel) "outside of" else "in" " %s package object %s".format(inOrOut, ""+prevSym.effectiveOwner.name) } else "" diff --git a/src/compiler/scala/tools/nsc/typechecker/Macros.scala b/src/compiler/scala/tools/nsc/typechecker/Macros.scala index 5b27fd9352..7d6d47b410 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Macros.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Macros.scala @@ -124,7 +124,7 @@ trait Macros extends scala.tools.reflect.FastTrack with Traces { // todo. refactor when fixing SI-5498 def className: String = { def loop(sym: Symbol): String = sym match { - case sym if sym.owner.isPackageClass => + case sym if sym.isTopLevel => val suffix = if (sym.isModuleClass) "$" else "" sym.fullName + suffix case sym => diff --git a/src/compiler/scala/tools/nsc/typechecker/Namers.scala b/src/compiler/scala/tools/nsc/typechecker/Namers.scala index 352090892e..2eabc126e3 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Namers.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Namers.scala @@ -134,7 +134,7 @@ trait Namers extends MethodSynthesis { sym reset NoType setFlag newFlags setPos pos sym.moduleClass andAlso (updatePosFlags(_, pos, moduleClassFlags(flags))) - if (sym.owner.isPackageClass) { + if (sym.isTopLevel) { companionSymbolOf(sym, context) andAlso { companion => val assignNoType = companion.rawInfo match { case _: SymLoader => true @@ -163,7 +163,7 @@ trait Namers extends MethodSynthesis { protected def conflict(newS: Symbol, oldS: Symbol) = ( ( !oldS.isSourceMethod || nme.isSetterName(newS.name) - || newS.owner.isPackageClass + || newS.isTopLevel ) && !( // @M: allow repeated use of `_` for higher-order type params (newS.owner.isTypeParameter || newS.owner.isAbstractType) @@ -174,7 +174,7 @@ trait Namers extends MethodSynthesis { ) private def allowsOverload(sym: Symbol) = ( - sym.isSourceMethod && sym.owner.isClass && !sym.owner.isPackageClass + sym.isSourceMethod && sym.owner.isClass && !sym.isTopLevel ) private def inCurrentScope(m: Symbol): Boolean = { @@ -352,7 +352,7 @@ trait Namers extends MethodSynthesis { val existing = context.scope.lookup(tree.name) val isRedefinition = ( existing.isType - && existing.owner.isPackageClass + && existing.isTopLevel && context.scope == existing.owner.info.decls && currentRun.canRedefine(existing) ) @@ -365,8 +365,8 @@ trait Namers extends MethodSynthesis { else assignAndEnterSymbol(tree) setFlag inConstructorFlag } clazz match { - case csym: ClassSymbol if csym.owner.isPackageClass => enterClassSymbol(tree, csym) - case _ => clazz + case csym: ClassSymbol if csym.isTopLevel => enterClassSymbol(tree, csym) + case _ => clazz } } @@ -425,7 +425,7 @@ trait Namers extends MethodSynthesis { m.moduleClass setFlag moduleClassFlags(moduleFlags) setPrivateWithin(tree, m.moduleClass) } - if (m.owner.isPackageClass && !m.isPackage) { + if (m.isTopLevel && !m.isPackage) { m.moduleClass.associatedFile = contextFile currentRun.symSource(m) = m.moduleClass.sourceFile registerTopLevelSym(m) @@ -1446,7 +1446,7 @@ trait Namers extends MethodSynthesis { fail(ImplicitConstr) if (!(sym.isTerm || (sym.isClass && !sym.isTrait))) fail(ImplicitNotTermOrClass) - if (sym.owner.isPackageClass) + if (sym.isTopLevel) fail(ImplicitAtToplevel) } if (sym.isClass) { diff --git a/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala b/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala index f0ced1a8d4..c316955a02 100644 --- a/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala +++ b/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala @@ -1620,7 +1620,7 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans result match { case ClassDef(_, _, _, _) | TypeDef(_, _, _, _) => - if (result.symbol.isLocal || result.symbol.owner.isPackageClass) + if (result.symbol.isLocal || result.symbol.isTopLevel) varianceValidator.traverse(result) case _ => } diff --git a/src/compiler/scala/tools/nsc/typechecker/Typers.scala b/src/compiler/scala/tools/nsc/typechecker/Typers.scala index 45f290d6c1..2298b9b2a4 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Typers.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Typers.scala @@ -1919,7 +1919,7 @@ trait Typers extends Adaptations with Tags { if (clazz.isTrait && hasSuperArgs(parents1.head)) ConstrArgsInParentOfTraitError(parents1.head, clazz) - if ((clazz isSubClass ClassfileAnnotationClass) && !clazz.owner.isPackageClass) + if ((clazz isSubClass ClassfileAnnotationClass) && !clazz.isTopLevel) unit.error(clazz.pos, "inner classes cannot be classfile annotations") if (!phase.erasedTypes && !clazz.info.resultType.isError) // @S: prevent crash for duplicated type members @@ -4788,7 +4788,7 @@ trait Typers extends Adaptations with Tags { else if (isPredefMemberNamed(sym, nme.classOf) && pt.typeSymbol == ClassClass && pt.typeArgs.nonEmpty) typedClassOf(tree, TypeTree(pt.typeArgs.head)) else { - val pre1 = if (sym.owner.isPackageClass) sym.owner.thisType else if (qual == EmptyTree) NoPrefix else qual.tpe + val pre1 = if (sym.isTopLevel) sym.owner.thisType else if (qual == EmptyTree) NoPrefix else qual.tpe val tree1 = if (qual == EmptyTree) tree else atPos(tree.pos)(Select(atPos(tree.pos.focusStart)(qual), name)) val (tree2, pre2) = makeAccessible(tree1, sym, pre1, qual) // SI-5967 Important to replace param type A* with Seq[A] when seen from from a reference, to avoid diff --git a/src/reflect/scala/reflect/internal/Definitions.scala b/src/reflect/scala/reflect/internal/Definitions.scala index dbf07c7f06..7295184d5b 100644 --- a/src/reflect/scala/reflect/internal/Definitions.scala +++ b/src/reflect/scala/reflect/internal/Definitions.scala @@ -1139,7 +1139,7 @@ trait Definitions extends api.StandardDefinitions { } def flatNameString(sym: Symbol, separator: Char): String = if (sym == NoSymbol) "" // be more resistant to error conditions, e.g. neg/t3222.scala - else if (sym.owner.isPackageClass) sym.javaClassName + else if (sym.isTopLevel) sym.javaClassName else flatNameString(sym.owner, separator) + nme.NAME_JOIN_STRING + sym.simpleName def signature1(etp: Type): String = { if (etp.typeSymbol == ArrayClass) "[" + signature1(erasure(etp.normalize.typeArgs.head)) diff --git a/src/reflect/scala/reflect/internal/Symbols.scala b/src/reflect/scala/reflect/internal/Symbols.scala index 45ec99860d..0969d9e3fa 100644 --- a/src/reflect/scala/reflect/internal/Symbols.scala +++ b/src/reflect/scala/reflect/internal/Symbols.scala @@ -591,7 +591,7 @@ trait Symbols extends api.Symbols { self: SymbolTable => /** Does this symbol denote a wrapper created by the repl? */ final def isInterpreterWrapper = ( (this hasFlag MODULE) - && owner.isPackageClass + && isTopLevel && nme.isReplWrapperName(name) ) final def getFlag(mask: Long): Long = { @@ -813,7 +813,7 @@ trait Symbols extends api.Symbols { self: SymbolTable => /** Is this symbol effectively final? I.e, it cannot be overridden */ final def isEffectivelyFinal: Boolean = ( (this hasFlag FINAL | PACKAGE) - || isModuleOrModuleClass && (owner.isPackageClass || !settings.overrideObjects.value) + || isModuleOrModuleClass && (isTopLevel || !settings.overrideObjects.value) || isTerm && ( isPrivate || isLocal @@ -873,7 +873,7 @@ trait Symbols extends api.Symbols { self: SymbolTable => // Does not always work if the rawInfo is a SourcefileLoader, see comment // in "def coreClassesFirst" in Global. - def exists = !owner.isPackageClass || { rawInfo.load(this); rawInfo != NoType } + def exists = !isTopLevel || { rawInfo.load(this); rawInfo != NoType } final def isInitialized: Boolean = validTo != NoPeriod @@ -1919,7 +1919,7 @@ trait Symbols extends api.Symbols { self: SymbolTable => /** The top-level class containing this symbol. */ def enclosingTopLevelClass: Symbol = - if (owner.isPackageClass) { + if (isTopLevel) { if (isClass) this else moduleClass } else owner.enclosingTopLevelClass @@ -2892,7 +2892,7 @@ trait Symbols extends api.Symbols { self: SymbolTable => override def isAnonymousClass = name containsName tpnme.ANON_CLASS_NAME override def isConcreteClass = !(this hasFlag ABSTRACT | TRAIT) override def isJavaInterface = hasAllFlags(JAVA | TRAIT) - override def isNestedClass = !owner.isPackageClass + override def isNestedClass = !isTopLevel override def isNumericValueClass = definitions.isNumericValueClass(this) override def isNumeric = isNumericValueClass override def isPackageObjectClass = isModuleClass && (name == tpnme.PACKAGE) @@ -2918,7 +2918,7 @@ trait Symbols extends api.Symbols { self: SymbolTable => override def isLocalClass = ( isAnonOrRefinementClass || isLocal - || !owner.isPackageClass && owner.isLocalClass + || !isTopLevel && owner.isLocalClass ) override def enclClassChain = this :: owner.enclClassChain @@ -2947,7 +2947,7 @@ trait Symbols extends api.Symbols { self: SymbolTable => } override def associatedFile = ( - if (!owner.isPackageClass) super.associatedFile + if (!isTopLevel) super.associatedFile else if (_associatedFile eq null) NoAbstractFile // guarantee not null, but save cost of initializing the var else _associatedFile ) diff --git a/src/reflect/scala/reflect/internal/TreeInfo.scala b/src/reflect/scala/reflect/internal/TreeInfo.scala index 032a4aebef..b121933db2 100644 --- a/src/reflect/scala/reflect/internal/TreeInfo.scala +++ b/src/reflect/scala/reflect/internal/TreeInfo.scala @@ -467,7 +467,7 @@ abstract class TreeInfo { tp match { case TypeRef(pre, sym, args) => - args.isEmpty && (sym.owner.isPackageClass || isSimple(pre)) + args.isEmpty && (sym.isTopLevel || isSimple(pre)) case NoPrefix => true case _ => diff --git a/src/reflect/scala/reflect/runtime/JavaMirrors.scala b/src/reflect/scala/reflect/runtime/JavaMirrors.scala index cdb9e7159c..2bffe398f6 100644 --- a/src/reflect/scala/reflect/runtime/JavaMirrors.scala +++ b/src/reflect/scala/reflect/runtime/JavaMirrors.scala @@ -507,7 +507,7 @@ private[reflect] trait JavaMirrors extends internal.SymbolTable with api.JavaUni def erasure = symbol.moduleClass.asClass def isStatic = true def instance = { - if (symbol.owner.isPackageClass) + if (symbol.isTopLevel) staticSingletonInstance(classLoader, symbol.fullName) else if (outer == null) staticSingletonInstance(classToJava(symbol.moduleClass.asClass)) @@ -1156,11 +1156,11 @@ private[reflect] trait JavaMirrors extends internal.SymbolTable with api.JavaUni valueClassToJavaType(clazz) else if (clazz == ArrayClass) noClass - else if (clazz.owner.isPackageClass) + else if (clazz.isTopLevel) javaClass(clazz.javaClassName) else if (clazz.owner.isClass) { val childOfClass = !clazz.owner.isModuleClass - val childOfTopLevel = clazz.owner.owner.isPackageClass + val childOfTopLevel = clazz.owner.isTopLevel val childOfTopLevelObject = clazz.owner.isModuleClass && childOfTopLevel // suggested in https://issues.scala-lang.org/browse/SI-4023?focusedCommentId=54759#comment-54759 -- cgit v1.2.3 From f21b1ce7fda9022d6d805a708882c5a2ab241f41 Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Sat, 15 Dec 2012 14:22:14 +0100 Subject: SI-4859 Don't elide qualifiers when selecting nested modules. Otherwise we fail to throw in: {???; Predef}.DummyImplicit.dummyImplicit We still elide the initialization of `Outer` in `Outer.Inner.foo` as before, although that seems a little dubious to me. In total, we had to change RefChecks, Flatten, and GenICode to effect this change. A recently fixed bug in tail call elimination was also due to assuming that the the qualifier of a Select node wasn't worthy of traversal. Let's keep a close eye out for more instances of this problem. --- .../scala/tools/nsc/backend/icode/GenICode.scala | 11 +++++--- .../scala/tools/nsc/transform/Flatten.scala | 9 ++++++- .../scala/tools/nsc/typechecker/RefChecks.scala | 3 ++- test/files/run/t4859.check | 8 ++++++ test/files/run/t4859.scala | 29 ++++++++++++++++++++++ 5 files changed, 55 insertions(+), 5 deletions(-) create mode 100644 test/files/run/t4859.check create mode 100644 test/files/run/t4859.scala (limited to 'src') diff --git a/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala b/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala index 2ea26ddaa9..91a0ca4ff0 100644 --- a/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala +++ b/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala @@ -835,13 +835,18 @@ abstract class GenICode extends SubComponent { generatedType = toTypeKind(sym.info) val hostClass = findHostClass(qualifier.tpe, sym) log(s"Host class of $sym with qual $qualifier (${qualifier.tpe}) is $hostClass") + val qualSafeToInline = treeInfo isExprSafeToInline qualifier + + def genLoadQualUnlessInlinable: Context = + if (qualSafeToInline) ctx else genLoadQualifier(tree, ctx) if (sym.isModule) { - genLoadModule(ctx, tree) + genLoadModule(genLoadQualUnlessInlinable, tree) } else if (sym.isStaticMember) { - ctx.bb.emit(LOAD_FIELD(sym, true) setHostClass hostClass, tree.pos) - ctx + val ctx1 = genLoadQualUnlessInlinable + ctx1.bb.emit(LOAD_FIELD(sym, true) setHostClass hostClass, tree.pos) + ctx1 } else { val ctx1 = genLoadQualifier(tree, ctx) ctx1.bb.emit(LOAD_FIELD(sym, false) setHostClass hostClass, tree.pos) diff --git a/src/compiler/scala/tools/nsc/transform/Flatten.scala b/src/compiler/scala/tools/nsc/transform/Flatten.scala index 7fe3a5da5c..85516f1995 100644 --- a/src/compiler/scala/tools/nsc/transform/Flatten.scala +++ b/src/compiler/scala/tools/nsc/transform/Flatten.scala @@ -12,6 +12,7 @@ import scala.collection.mutable.ListBuffer abstract class Flatten extends InfoTransform { import global._ + import treeInfo.isExprSafeToInline /** the following two members override abstract members in Transform */ val phaseName: String = "flatten" @@ -117,7 +118,13 @@ abstract class Flatten extends InfoTransform { liftedDefs(sym.enclosingTopLevelClass.owner) += tree EmptyTree case Select(qual, name) if sym.isStaticModule && !sym.isTopLevel => - exitingFlatten(atPos(tree.pos)(gen.mkAttributedRef(sym))) + exitingFlatten { + atPos(tree.pos) { + val ref = gen.mkAttributedRef(sym) + if (isExprSafeToInline(qual)) ref + else Block(List(qual), ref).setType(tree.tpe) // need to execute the qualifier but refer directly to the lifted module. + } + } case _ => tree } diff --git a/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala b/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala index c316955a02..b820d8a386 100644 --- a/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala +++ b/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala @@ -1389,7 +1389,8 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans case TypeApply(fun, targs) => isClassTypeAccessible(fun) case Select(module, apply) => - ( // SI-4859 `CaseClass1().InnerCaseClass2()` must not be rewritten to `new InnerCaseClass2()` + ( // SI-4859 `CaseClass1().InnerCaseClass2()` must not be rewritten to `new InnerCaseClass2()`; + // {expr; Outer}.Inner() must not be rewritten to `new Outer.Inner()`. treeInfo.isExprSafeToInline(module) && // SI-5626 Classes in refinement types cannot be constructed with `new`. In this case, // the companion class is actually not a ClassSymbol, but a reference to an abstract type. diff --git a/test/files/run/t4859.check b/test/files/run/t4859.check new file mode 100644 index 0000000000..d329744ca0 --- /dev/null +++ b/test/files/run/t4859.check @@ -0,0 +1,8 @@ +Inner +Inner.i +About to reference Inner.i +Outer +Inner.i +About to reference O.N +About to reference O.N +About to reference O.N.apply() diff --git a/test/files/run/t4859.scala b/test/files/run/t4859.scala new file mode 100644 index 0000000000..6d223f2179 --- /dev/null +++ b/test/files/run/t4859.scala @@ -0,0 +1,29 @@ +object O { + case class N() + object P +} + +object Outer { + println("Outer") + object Inner { + println("Inner") + def i { + println("Inner.i") + } + } +} + +object Test { + def main(args: Array[String]) { + Outer.Inner.i // we still don't initiialize Outer here (but should we?) + + {println("About to reference Inner.i"); Outer}.Inner.i // Outer will be initialized. + + {println("About to reference O.N" ); O}.N + + {println("About to reference O.N" ); O}.N + + {println("About to reference O.N.apply()"); O}.N.apply() + } +} + -- cgit v1.2.3 From 412ad5724c0ca34f5fd6982ff6a0b12a437218bc Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Mon, 7 Jan 2013 09:09:30 +0100 Subject: SI-4859 Retain MODULE_LOAD in dead code elim. Without this, the following test fails: SCALAC_OPTS="-optimize" ./test/partest test/files/run/t4859.scala --- src/compiler/scala/tools/nsc/backend/opt/DeadCodeElimination.scala | 2 ++ test/files/run/t4859.scala | 2 +- 2 files changed, 3 insertions(+), 1 deletion(-) (limited to 'src') diff --git a/src/compiler/scala/tools/nsc/backend/opt/DeadCodeElimination.scala b/src/compiler/scala/tools/nsc/backend/opt/DeadCodeElimination.scala index f7e743a6f1..0282457a12 100644 --- a/src/compiler/scala/tools/nsc/backend/opt/DeadCodeElimination.scala +++ b/src/compiler/scala/tools/nsc/backend/opt/DeadCodeElimination.scala @@ -144,6 +144,8 @@ abstract class DeadCodeElimination extends SubComponent { } } if (necessary) worklist += ((bb, idx)) + case LOAD_MODULE(sym) if isLoadNeeded(sym) => + worklist += ((bb, idx)) // SI-4859 Module initialization might side-effect. case _ => () } rd = rdef.interpret(bb, idx, rd) diff --git a/test/files/run/t4859.scala b/test/files/run/t4859.scala index 6d223f2179..3c20cea983 100644 --- a/test/files/run/t4859.scala +++ b/test/files/run/t4859.scala @@ -15,7 +15,7 @@ object Outer { object Test { def main(args: Array[String]) { - Outer.Inner.i // we still don't initiialize Outer here (but should we?) + Outer.Inner.i // we still don't initialize Outer here (but should we?) {println("About to reference Inner.i"); Outer}.Inner.i // Outer will be initialized. -- cgit v1.2.3 From 2580a51bbaccb31ad88679874d6ad626f8d4491c Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Tue, 15 Jan 2013 13:58:45 +0100 Subject: Laying groundwork for a followup ticket. To solve SI-5304, we should change `isQualifierSafeToElide`. --- src/compiler/scala/tools/nsc/backend/icode/GenICode.scala | 10 +++++----- src/compiler/scala/tools/nsc/transform/Flatten.scala | 4 ++-- src/compiler/scala/tools/nsc/typechecker/RefChecks.scala | 2 +- src/reflect/scala/reflect/internal/TreeInfo.scala | 3 +++ 4 files changed, 11 insertions(+), 8 deletions(-) (limited to 'src') diff --git a/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala b/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala index 91a0ca4ff0..3363f19025 100644 --- a/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala +++ b/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala @@ -835,16 +835,16 @@ abstract class GenICode extends SubComponent { generatedType = toTypeKind(sym.info) val hostClass = findHostClass(qualifier.tpe, sym) log(s"Host class of $sym with qual $qualifier (${qualifier.tpe}) is $hostClass") - val qualSafeToInline = treeInfo isExprSafeToInline qualifier + val qualSafeToElide = treeInfo isQualifierSafeToElide qualifier - def genLoadQualUnlessInlinable: Context = - if (qualSafeToInline) ctx else genLoadQualifier(tree, ctx) + def genLoadQualUnlessElidable: Context = + if (qualSafeToElide) ctx else genLoadQualifier(tree, ctx) if (sym.isModule) { - genLoadModule(genLoadQualUnlessInlinable, tree) + genLoadModule(genLoadQualUnlessElidable, tree) } else if (sym.isStaticMember) { - val ctx1 = genLoadQualUnlessInlinable + val ctx1 = genLoadQualUnlessElidable ctx1.bb.emit(LOAD_FIELD(sym, true) setHostClass hostClass, tree.pos) ctx1 } else { diff --git a/src/compiler/scala/tools/nsc/transform/Flatten.scala b/src/compiler/scala/tools/nsc/transform/Flatten.scala index 85516f1995..a370b45be0 100644 --- a/src/compiler/scala/tools/nsc/transform/Flatten.scala +++ b/src/compiler/scala/tools/nsc/transform/Flatten.scala @@ -12,7 +12,7 @@ import scala.collection.mutable.ListBuffer abstract class Flatten extends InfoTransform { import global._ - import treeInfo.isExprSafeToInline + import treeInfo.isQualifierSafeToElide /** the following two members override abstract members in Transform */ val phaseName: String = "flatten" @@ -121,7 +121,7 @@ abstract class Flatten extends InfoTransform { exitingFlatten { atPos(tree.pos) { val ref = gen.mkAttributedRef(sym) - if (isExprSafeToInline(qual)) ref + if (isQualifierSafeToElide(qual)) ref else Block(List(qual), ref).setType(tree.tpe) // need to execute the qualifier but refer directly to the lifted module. } } diff --git a/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala b/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala index b820d8a386..fd3b020b1a 100644 --- a/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala +++ b/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala @@ -1391,7 +1391,7 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans case Select(module, apply) => ( // SI-4859 `CaseClass1().InnerCaseClass2()` must not be rewritten to `new InnerCaseClass2()`; // {expr; Outer}.Inner() must not be rewritten to `new Outer.Inner()`. - treeInfo.isExprSafeToInline(module) && + treeInfo.isQualifierSafeToElide(module) && // SI-5626 Classes in refinement types cannot be constructed with `new`. In this case, // the companion class is actually not a ClassSymbol, but a reference to an abstract type. module.symbol.companionClass.isClass diff --git a/src/reflect/scala/reflect/internal/TreeInfo.scala b/src/reflect/scala/reflect/internal/TreeInfo.scala index b121933db2..c4c51a91a5 100644 --- a/src/reflect/scala/reflect/internal/TreeInfo.scala +++ b/src/reflect/scala/reflect/internal/TreeInfo.scala @@ -65,6 +65,9 @@ abstract class TreeInfo { false } + // TODO SI-5304 tighten this up so we don't elide side effect in module loads + def isQualifierSafeToElide(tree: Tree): Boolean = isExprSafeToInline(tree) + /** Is tree an expression which can be inlined without affecting program semantics? * * Note that this is not called "isExprPure" since purity (lack of side-effects) -- cgit v1.2.3 From 832fc9a67e5aa85bdde61883527d3ac9554094d7 Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Mon, 14 Jan 2013 23:29:50 -0800 Subject: SI-2577, SI-6860: annotation type inference. This is less than ideal: scala> class Bippy[T] extends annotation.StaticAnnotation defined class Bippy scala> def f: Int @Bippy = 5 f: Int @Bippy[T] Turns out we can infer such types. Now it says: scala> def f: Int @Bippy = 5 f: Int @Bippy[Nothing] This should put to rest many an issue with parameterized annotations. --- .../scala/tools/nsc/typechecker/Typers.scala | 37 ++++++++++------------ src/library/scala/throws.scala | 2 +- .../scala/reflect/internal/AnnotationInfos.scala | 11 ++++--- src/reflect/scala/reflect/internal/TreeInfo.scala | 6 ++++ src/reflect/scala/reflect/internal/Types.scala | 11 +++++-- test/files/pos/annotations2.scala | 31 ++++++++++++++++++ test/files/run/t2577.check | 1 + test/files/run/t2577.scala | 17 ++++++++++ test/files/run/t6860.check | 4 +++ test/files/run/t6860.scala | 20 ++++++++++++ 10 files changed, 113 insertions(+), 27 deletions(-) create mode 100644 test/files/pos/annotations2.scala create mode 100644 test/files/run/t2577.check create mode 100644 test/files/run/t2577.scala create mode 100644 test/files/run/t6860.check create mode 100644 test/files/run/t6860.scala (limited to 'src') diff --git a/src/compiler/scala/tools/nsc/typechecker/Typers.scala b/src/compiler/scala/tools/nsc/typechecker/Typers.scala index f2f8f47bf2..c12233b726 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Typers.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Typers.scala @@ -3458,31 +3458,28 @@ trait Typers extends Adaptations with Tags { } // begin typedAnnotation - val (fun, argss) = { - def extract(fun: Tree, outerArgss: List[List[Tree]]): - (Tree, List[List[Tree]]) = fun match { - case Apply(f, args) => - extract(f, args :: outerArgss) - case Select(New(tpt), nme.CONSTRUCTOR) => - (fun, outerArgss) - case _ => - reportAnnotationError(UnexpectedTreeAnnotation(fun)) - (setError(fun), outerArgss) - } - extract(ann, List()) - } + val treeInfo.Applied(fun0, targs, argss) = treeInfo.dissectApplied(ann) + val typedFun0 = typed(fun0, forFunMode(mode), WildcardType) + val typedFunPart = ( + // If there are dummy type arguments in typeFun part, it suggests we + // must type the actual constructor call, not only the select. The value + // arguments are how the type arguments will be inferred. + if (targs.isEmpty && typedFun0.exists(t => isDummyAppliedType(t.tpe))) + logResult(s"Retyped $typedFun0 to find type args")(typed(argss.foldLeft(fun0)(Apply(_, _)))) + else + typedFun0 + ) + val typedFun @ Select(New(annTpt), _) = treeInfo.dissectApplied(typedFunPart).core + val annType = annTpt.tpe - val res = if (fun.isErroneous) ErroneousAnnotation + val res = if (typedFun.isErroneous) ErroneousAnnotation else { - val typedFun @ Select(New(tpt), _) = typed(fun, mode.forFunMode, WildcardType) - val annType = tpt.tpe - if (typedFun.isErroneous) ErroneousAnnotation else if (annType.typeSymbol isNonBottomSubClass ClassfileAnnotationClass) { // annotation to be saved as java classfile annotation val isJava = typedFun.symbol.owner.isJavaDefined if (!annType.typeSymbol.isNonBottomSubClass(annClass)) { - reportAnnotationError(AnnotationTypeMismatchError(tpt, annClass.tpe, annType)) + reportAnnotationError(AnnotationTypeMismatchError(annTpt, annType, annType)) } else if (argss.length > 1) { reportAnnotationError(MultipleArgumentListForAnnotationError(ann)) } else { @@ -3534,7 +3531,7 @@ trait Typers extends Adaptations with Tags { val typedAnn = if (selfsym == NoSymbol) { // local dummy fixes SI-5544 val localTyper = newTyper(context.make(ann, context.owner.newLocalDummy(ann.pos))) - localTyper.typed(ann, mode, annClass.tpe) + localTyper.typed(ann, mode, annType) } else { // Since a selfsym is supplied, the annotation should have an extra @@ -3548,7 +3545,7 @@ trait Typers extends Adaptations with Tags { // sometimes does. The problem is that "self" ident's within // annot.constr will retain the old symbol from the previous typing. val func = Function(funcparm :: Nil, ann.duplicate) - val funcType = appliedType(FunctionClass(1), selfsym.info, annClass.tpe_*) + val funcType = appliedType(FunctionClass(1), selfsym.info, annType) val Function(arg :: Nil, rhs) = typed(func, mode, funcType) rhs.substituteSymbols(arg.symbol :: Nil, selfsym :: Nil) diff --git a/src/library/scala/throws.scala b/src/library/scala/throws.scala index 159f1f02f4..5a5dd9a1f5 100644 --- a/src/library/scala/throws.scala +++ b/src/library/scala/throws.scala @@ -24,5 +24,5 @@ package scala * @since 2.1 */ class throws[T <: Throwable](cause: String = "") extends scala.annotation.StaticAnnotation { - def this(clazz: Class[T]) = this() + def this(clazz: Class[T]) = this("") } diff --git a/src/reflect/scala/reflect/internal/AnnotationInfos.scala b/src/reflect/scala/reflect/internal/AnnotationInfos.scala index 7a972c3f1a..70b8bd9be5 100644 --- a/src/reflect/scala/reflect/internal/AnnotationInfos.scala +++ b/src/reflect/scala/reflect/internal/AnnotationInfos.scala @@ -12,7 +12,7 @@ import scala.collection.immutable.ListMap /** AnnotationInfo and its helpers */ trait AnnotationInfos extends api.Annotations { self: SymbolTable => - import definitions.{ ThrowsClass, StaticAnnotationClass, isMetaAnnotation } + import definitions.{ ThrowsClass, ThrowableClass, StaticAnnotationClass, isMetaAnnotation } // Common annotation code between Symbol and Type. // For methods altering the annotation list, on Symbol it mutates @@ -334,7 +334,7 @@ trait AnnotationInfos extends api.Annotations { self: SymbolTable => * as well as “new-stye” `@throws[Exception]("cause")` annotations. */ object ThrownException { - def unapply(ann: AnnotationInfo): Option[Symbol] = + def unapply(ann: AnnotationInfo): Option[Symbol] = { ann match { case AnnotationInfo(tpe, _, _) if tpe.typeSymbol != ThrowsClass => None @@ -342,8 +342,11 @@ trait AnnotationInfos extends api.Annotations { self: SymbolTable => case AnnotationInfo(_, List(Literal(Constant(tpe: Type))), _) => Some(tpe.typeSymbol) // new-style: @throws[Exception], @throws[Exception]("cause") - case AnnotationInfo(TypeRef(_, _, args), _, _) => - Some(args.head.typeSymbol) + case AnnotationInfo(TypeRef(_, _, arg :: _), _, _) => + Some(arg.typeSymbol) + case AnnotationInfo(TypeRef(_, _, Nil), _, _) => + Some(ThrowableClass) } + } } } diff --git a/src/reflect/scala/reflect/internal/TreeInfo.scala b/src/reflect/scala/reflect/internal/TreeInfo.scala index 032a4aebef..c90e94c1c1 100644 --- a/src/reflect/scala/reflect/internal/TreeInfo.scala +++ b/src/reflect/scala/reflect/internal/TreeInfo.scala @@ -605,6 +605,12 @@ abstract class TreeInfo { } loop(tree) } + + override def toString = { + val tstr = if (targs.isEmpty) "" else targs.mkString("[", ", ", "]") + val astr = argss map (args => args.mkString("(", ", ", ")")) mkString "" + s"$core$tstr$astr" + } } /** Returns a wrapper that knows how to destructure and analyze applications. diff --git a/src/reflect/scala/reflect/internal/Types.scala b/src/reflect/scala/reflect/internal/Types.scala index b336192b67..b51028c502 100644 --- a/src/reflect/scala/reflect/internal/Types.scala +++ b/src/reflect/scala/reflect/internal/Types.scala @@ -2944,12 +2944,19 @@ trait Types extends api.Types { self: SymbolTable => else existentialAbstraction(existentialsInType(tp), tp) ) - def containsExistential(tpe: Type) = - tpe exists typeIsExistentiallyBound + def containsDummyTypeArg(tp: Type) = tp exists isDummyTypeArg + def isDummyTypeArg(tp: Type) = tp.typeSymbol.isTypeParameter + def isDummyAppliedType(tp: Type) = tp match { + case TypeRef(_, _, args) if args.nonEmpty => args exists isDummyTypeArg + case _ => false + } def existentialsInType(tpe: Type) = tpe withFilter typeIsExistentiallyBound map (_.typeSymbol) + def containsExistential(tpe: Type) = + tpe exists typeIsExistentiallyBound + /** Precondition: params.nonEmpty. (args.nonEmpty enforced structurally.) */ class HKTypeVar( diff --git a/test/files/pos/annotations2.scala b/test/files/pos/annotations2.scala new file mode 100644 index 0000000000..3bce7f8ac4 --- /dev/null +++ b/test/files/pos/annotations2.scala @@ -0,0 +1,31 @@ + +class B[T](x: (T, T)) { + def this(xx: (T, Any, Any)) = this((xx._1, xx._1)) +} +class BAnn[T](x: (T, T)) extends scala.annotation.StaticAnnotation { + def this(xx: (T, Any, Any)) = this((xx._1, xx._1)) +} +class CAnn[T](x: (T, T)) extends scala.annotation.StaticAnnotation { + def this(xx: Class[T]) = this((xx.newInstance(), xx.newInstance())) +} + +class A1 { + val b1 = new B((1, 2, 3)) + val b2 = new B((1, 2)) + val b3 = new B[Int]((1, 2, 3)) + val b4 = new B[Int]((1, 2)) +} + +class A2 { + @BAnn((1, 2, 3)) val b1 = null + @BAnn((1, 2)) val b2 = null + @BAnn[Int]((1, 2, 3)) val b3 = null + @BAnn[Int]((1, 2)) val b4 = null +} + +class A3 { + @CAnn(classOf[Int]) val b1 = null + @CAnn((1, 2)) val b2 = null + @CAnn[Int](classOf[Int]) val b3 = null + @CAnn[Int]((1, 2)) val b4 = null +} diff --git a/test/files/run/t2577.check b/test/files/run/t2577.check new file mode 100644 index 0000000000..4a584e4989 --- /dev/null +++ b/test/files/run/t2577.check @@ -0,0 +1 @@ +Nothing diff --git a/test/files/run/t2577.scala b/test/files/run/t2577.scala new file mode 100644 index 0000000000..6d836a3996 --- /dev/null +++ b/test/files/run/t2577.scala @@ -0,0 +1,17 @@ +case class annot[T]() extends scala.annotation.StaticAnnotation + +// type inference should infer @annot[Nothing] instead of @annot[T] +// note the T is not in scope here! +class Foo[@annot U] + +object Test { + import scala.reflect.runtime.universe._ + val x = new Foo + + def main(args: Array[String]): Unit = { + val targ = typeOf[x.type].widen match { + case TypeRef(_, _, arg :: _) => arg + } + println(targ) + } +} diff --git a/test/files/run/t6860.check b/test/files/run/t6860.check new file mode 100644 index 0000000000..c96331f540 --- /dev/null +++ b/test/files/run/t6860.check @@ -0,0 +1,4 @@ +Bippy[String] +Bippy[String] +throws[Nothing] +throws[RuntimeException] diff --git a/test/files/run/t6860.scala b/test/files/run/t6860.scala new file mode 100644 index 0000000000..2dcc2a67f7 --- /dev/null +++ b/test/files/run/t6860.scala @@ -0,0 +1,20 @@ +class Bippy[T](val value: T) extends annotation.StaticAnnotation + +class A { + @Bippy("hi") def f1: Int = 1 + @Bippy[String]("hi") def f2: Int = 2 + + @throws("what do I throw?") def f3 = throw new RuntimeException + @throws[RuntimeException]("that's good to know!") def f4 = throw new RuntimeException +} + +object Test { + import scala.reflect.runtime.universe._ + + def main(args: Array[String]): Unit = { + val members = typeOf[A].declarations.toList + val tpes = members flatMap (_.annotations) map (_.tpe) + + tpes.map(_.toString).sorted foreach println + } +} -- cgit v1.2.3 From 801eab55019c433d2fa6a925d02e41b1c47cbf22 Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Mon, 14 Jan 2013 23:29:50 -0800 Subject: SI-5182, no position on annotation error. Now there's a position on the synthetic "value" Ident. --- src/compiler/scala/tools/nsc/typechecker/Typers.scala | 13 +++++++------ test/files/neg/t5182.check | 7 +++++++ test/files/neg/t5182.flags | 1 + test/files/neg/t5182.scala | 5 +++++ 4 files changed, 20 insertions(+), 6 deletions(-) create mode 100644 test/files/neg/t5182.check create mode 100644 test/files/neg/t5182.flags create mode 100644 test/files/neg/t5182.scala (limited to 'src') diff --git a/src/compiler/scala/tools/nsc/typechecker/Typers.scala b/src/compiler/scala/tools/nsc/typechecker/Typers.scala index c12233b726..162bdd22b2 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Typers.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Typers.scala @@ -117,6 +117,10 @@ trait Typers extends Adaptations with Tags { } } + private def mkNamedArg(tree: Tree, name: Name): Tree = { + atPos(tree.pos)(new AssignOrNamedArg(Ident(name), tree)) + } + /** Find implicit arguments and pass them to given tree. */ def applyImplicitArgs(fun: Tree): Tree = fun.tpe match { @@ -128,7 +132,6 @@ trait Typers extends Adaptations with Tags { var paramFailed = false def mkPositionalArg(argTree: Tree, paramName: Name) = argTree - def mkNamedArg(argTree: Tree, paramName: Name) = atPos(argTree.pos)(new AssignOrNamedArg(Ident(paramName), (argTree))) var mkArg: (Tree, Name) => Tree = mkPositionalArg // DEPMETTODO: instantiate type vars that depend on earlier implicit args (see adapt (4.1)) @@ -3459,7 +3462,7 @@ trait Typers extends Adaptations with Tags { // begin typedAnnotation val treeInfo.Applied(fun0, targs, argss) = treeInfo.dissectApplied(ann) - val typedFun0 = typed(fun0, forFunMode(mode), WildcardType) + val typedFun0 = typed(fun0, mode.forFunMode, WildcardType) val typedFunPart = ( // If there are dummy type arguments in typeFun part, it suggests we // must type the actual constructor call, not only the select. The value @@ -3486,13 +3489,11 @@ trait Typers extends Adaptations with Tags { val annScope = annType.decls .filter(sym => sym.isMethod && !sym.isConstructor && sym.isJavaDefined) val names = new scala.collection.mutable.HashSet[Symbol] - def hasValue = names exists (_.name == nme.value) names ++= (if (isJava) annScope.iterator else typedFun.tpe.params.iterator) val args = argss match { - case List(List(arg)) if !isNamed(arg) && hasValue => - List(new AssignOrNamedArg(Ident(nme.value), arg)) - case as :: _ => as + case (arg :: Nil) :: Nil if !isNamed(arg) => mkNamedArg(arg, nme.value) :: Nil + case args :: Nil => args } val nvPairs = args map { diff --git a/test/files/neg/t5182.check b/test/files/neg/t5182.check new file mode 100644 index 0000000000..3161f92680 --- /dev/null +++ b/test/files/neg/t5182.check @@ -0,0 +1,7 @@ +t5182.scala:2: error: unknown annotation argument name: qwe + @java.lang.Deprecated(qwe = "wer") def ok(q:Int) = 1 + ^ +t5182.scala:3: error: classfile annotation arguments have to be supplied as named arguments + @java.lang.Deprecated("wer") def whereAmI(q:Int) = 1 + ^ +two errors found diff --git a/test/files/neg/t5182.flags b/test/files/neg/t5182.flags new file mode 100644 index 0000000000..85d8eb2ba2 --- /dev/null +++ b/test/files/neg/t5182.flags @@ -0,0 +1 @@ +-Xfatal-warnings diff --git a/test/files/neg/t5182.scala b/test/files/neg/t5182.scala new file mode 100644 index 0000000000..0687e99efb --- /dev/null +++ b/test/files/neg/t5182.scala @@ -0,0 +1,5 @@ +class test { + @java.lang.Deprecated(qwe = "wer") def ok(q:Int) = 1 + @java.lang.Deprecated("wer") def whereAmI(q:Int) = 1 + @java.lang.Deprecated() def bippy(q:Int) = 1 +} -- cgit v1.2.3 From 76bb23df5e2909871f4f6ba3184abe99f9ba667a Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Mon, 14 Jan 2013 23:29:50 -0800 Subject: SI-6083, misleading annotation error message. When an implicit conversion causes an apparently constant argument not to be constant, show the conversion, not the constant. --- src/compiler/scala/tools/nsc/typechecker/Typers.scala | 7 +++++-- test/files/neg/annot-nonconst.check | 2 +- test/files/neg/t6083.check | 10 ++++++++++ test/files/neg/t6083.scala | 7 +++++++ 4 files changed, 23 insertions(+), 3 deletions(-) create mode 100644 test/files/neg/t6083.check create mode 100644 test/files/neg/t6083.scala (limited to 'src') diff --git a/src/compiler/scala/tools/nsc/typechecker/Typers.scala b/src/compiler/scala/tools/nsc/typechecker/Typers.scala index 162bdd22b2..dcc2ee0f23 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Typers.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Typers.scala @@ -3402,7 +3402,10 @@ trait Typers extends Adaptations with Tags { * floats and literals in particular) are not yet folded. */ def tryConst(tr: Tree, pt: Type): Option[LiteralAnnotArg] = { - val const: Constant = typed(constfold(tr), EXPRmode, pt) match { + // The typed tree may be relevantly different than the tree `tr`, + // e.g. it may have encountered an implicit conversion. + val ttree = typed(constfold(tr), EXPRmode, pt) + val const: Constant = ttree match { case l @ Literal(c) if !l.isErroneous => c case tree => tree.tpe match { case ConstantType(c) => c @@ -3411,7 +3414,7 @@ trait Typers extends Adaptations with Tags { } if (const == null) { - reportAnnotationError(AnnotationNotAConstantError(tr)); None + reportAnnotationError(AnnotationNotAConstantError(ttree)); None } else if (const.value == null) { reportAnnotationError(AnnotationArgNullError(tr)); None } else diff --git a/test/files/neg/annot-nonconst.check b/test/files/neg/annot-nonconst.check index b43e58a0ca..5b3da7a13c 100644 --- a/test/files/neg/annot-nonconst.check +++ b/test/files/neg/annot-nonconst.check @@ -8,7 +8,7 @@ make your annotation visible at runtime. If that is what you want, you must write the annotation class in Java. class Ann2(value: String) extends annotation.ClassfileAnnotation ^ -annot-nonconst.scala:6: error: annotation argument needs to be a constant; found: n +annot-nonconst.scala:6: error: annotation argument needs to be a constant; found: Test.this.n @Length(n) def foo = "foo" ^ annot-nonconst.scala:7: error: annotation argument cannot be null diff --git a/test/files/neg/t6083.check b/test/files/neg/t6083.check new file mode 100644 index 0000000000..c9b5ba05d3 --- /dev/null +++ b/test/files/neg/t6083.check @@ -0,0 +1,10 @@ +t6083.scala:6: warning: Implementation restriction: subclassing Classfile does not +make your annotation visible at runtime. If that is what +you want, you must write the annotation class in Java. +class annot(value: String) extends annotation.ClassfileAnnotation + ^ +t6083.scala:7: error: annotation argument needs to be a constant; found: conv.i2s(101) +@annot(101) class C + ^ +one warning found +one error found diff --git a/test/files/neg/t6083.scala b/test/files/neg/t6083.scala new file mode 100644 index 0000000000..1de18e6527 --- /dev/null +++ b/test/files/neg/t6083.scala @@ -0,0 +1,7 @@ +object conv { + implicit def i2s(i: Int): String = "" +} +import conv._ + +class annot(value: String) extends annotation.ClassfileAnnotation +@annot(101) class C -- cgit v1.2.3 From 5878099cbb3ca0b0224c9931d3510c7234e7c686 Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Mon, 14 Jan 2013 23:29:50 -0800 Subject: Renamed methods to be less ambiguous in intent. isNamed => isNamedArg isIdentity => allArgsArePositional nameOf => nameOfNamedArg Moved mkNamedArg into TreeGen. --- .../scala/tools/nsc/ast/parser/Parsers.scala | 11 ++------ .../scala/tools/nsc/ast/parser/TreeBuilder.scala | 9 ++---- .../scala/tools/nsc/typechecker/Infer.scala | 2 +- .../tools/nsc/typechecker/NamesDefaults.scala | 14 +++++----- .../scala/tools/nsc/typechecker/Typers.scala | 32 +++++++++------------- src/reflect/scala/reflect/internal/TreeGen.scala | 4 +++ src/reflect/scala/reflect/internal/TreeInfo.scala | 8 ++++++ 7 files changed, 38 insertions(+), 42 deletions(-) (limited to 'src') diff --git a/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala b/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala index 744644fd49..37da1b44bb 100644 --- a/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala +++ b/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala @@ -1584,14 +1584,9 @@ self => * }}} */ def argumentExprs(): List[Tree] = { - def args(): List[Tree] = commaSeparated { - val maybeNamed = isIdent - expr() match { - case a @ Assign(id, rhs) if maybeNamed => - atPos(a.pos) { AssignOrNamedArg(id, rhs) } - case e => e - } - } + def args(): List[Tree] = commaSeparated( + if (isIdent) treeInfo.assignmentToMaybeNamedArg(expr()) else expr() + ) in.token match { case LBRACE => List(blockExpr()) case LPAREN => inParens(if (in.token == RPAREN) Nil else args()) diff --git a/src/compiler/scala/tools/nsc/ast/parser/TreeBuilder.scala b/src/compiler/scala/tools/nsc/ast/parser/TreeBuilder.scala index 39270719fb..379261906a 100644 --- a/src/compiler/scala/tools/nsc/ast/parser/TreeBuilder.scala +++ b/src/compiler/scala/tools/nsc/ast/parser/TreeBuilder.scala @@ -171,15 +171,10 @@ abstract class TreeBuilder { /** Create tree representing (unencoded) binary operation expression or pattern. */ def makeBinop(isExpr: Boolean, left: Tree, op: TermName, right: Tree, opPos: Position): Tree = { - def mkNamed(args: List[Tree]) = - if (isExpr) args map { - case a @ Assign(id @ Ident(name), rhs) => - atPos(a.pos) { AssignOrNamedArg(id, rhs) } - case e => e - } else args + def mkNamed(args: List[Tree]) = if (isExpr) args map treeInfo.assignmentToMaybeNamedArg else args val arguments = right match { case Parens(args) => mkNamed(args) - case _ => List(right) + case _ => List(right) } if (isExpr) { if (treeInfo.isLeftAssoc(op)) { diff --git a/src/compiler/scala/tools/nsc/typechecker/Infer.scala b/src/compiler/scala/tools/nsc/typechecker/Infer.scala index 4177b7cdfa..07b3a926a0 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Infer.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Infer.scala @@ -866,7 +866,7 @@ trait Infer extends Checkable { val (argtpes1, argPos, namesOK) = checkNames(argtpes0, params) // when using named application, the vararg param has to be specified exactly once ( namesOK - && (isIdentity(argPos) || sameLength(formals, params)) + && (allArgsArePositional(argPos) || sameLength(formals, params)) && typesCompatible(reorderArgs(argtpes1, argPos)) // nb. arguments and names are OK, check if types are compatible ) } diff --git a/src/compiler/scala/tools/nsc/typechecker/NamesDefaults.scala b/src/compiler/scala/tools/nsc/typechecker/NamesDefaults.scala index f5884e5c34..dd60b292bf 100644 --- a/src/compiler/scala/tools/nsc/typechecker/NamesDefaults.scala +++ b/src/compiler/scala/tools/nsc/typechecker/NamesDefaults.scala @@ -41,11 +41,11 @@ trait NamesDefaults { self: Analyzer => blockTyper: Typer ) { } - def nameOf(arg: Tree) = arg match { - case AssignOrNamedArg(Ident(name), rhs) => Some(name) - case _ => None + private def nameOfNamedArg(arg: Tree) = Some(arg) collect { case AssignOrNamedArg(Ident(name), _) => name } + def isNamedArg(arg: Tree) = arg match { + case AssignOrNamedArg(Ident(_), _) => true + case _ => false } - def isNamed(arg: Tree) = nameOf(arg).isDefined /** @param pos maps indices from old to new */ def reorderArgs[T: ClassTag](args: List[T], pos: Int => Int): List[T] = { @@ -55,13 +55,13 @@ trait NamesDefaults { self: Analyzer => } /** @param pos maps indices from new to old (!) */ - def reorderArgsInv[T: ClassTag](args: List[T], pos: Int => Int): List[T] = { + private def reorderArgsInv[T: ClassTag](args: List[T], pos: Int => Int): List[T] = { val argsArray = args.toArray (argsArray.indices map (i => argsArray(pos(i)))).toList } /** returns `true` if every element is equal to its index */ - def isIdentity(a: Array[Int]) = (0 until a.length).forall(i => a(i) == i) + def allArgsArePositional(a: Array[Int]) = (0 until a.length).forall(i => a(i) == i) /** * Transform a function application into a Block, and assigns typer.context @@ -359,7 +359,7 @@ trait NamesDefaults { self: Analyzer => } } - def missingParams[T](args: List[T], params: List[Symbol], argName: T => Option[Name] = nameOf _): (List[Symbol], Boolean) = { + def missingParams[T](args: List[T], params: List[Symbol], argName: T => Option[Name] = nameOfNamedArg _): (List[Symbol], Boolean) = { val namedArgs = args.dropWhile(arg => { val n = argName(arg) n.isEmpty || params.forall(p => p.name != n.get) diff --git a/src/compiler/scala/tools/nsc/typechecker/Typers.scala b/src/compiler/scala/tools/nsc/typechecker/Typers.scala index dcc2ee0f23..e8d0a497ca 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Typers.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Typers.scala @@ -117,10 +117,6 @@ trait Typers extends Adaptations with Tags { } } - private def mkNamedArg(tree: Tree, name: Name): Tree = { - atPos(tree.pos)(new AssignOrNamedArg(Ident(name), tree)) - } - /** Find implicit arguments and pass them to given tree. */ def applyImplicitArgs(fun: Tree): Tree = fun.tpe match { @@ -130,9 +126,7 @@ trait Typers extends Adaptations with Tags { // paramFailed cannot be initialized with params.exists(_.tpe.isError) because that would // hide some valid errors for params preceding the erroneous one. var paramFailed = false - - def mkPositionalArg(argTree: Tree, paramName: Name) = argTree - var mkArg: (Tree, Name) => Tree = mkPositionalArg + var mkArg: (Name, Tree) => Tree = (_, tree) => tree // DEPMETTODO: instantiate type vars that depend on earlier implicit args (see adapt (4.1)) // @@ -147,9 +141,9 @@ trait Typers extends Adaptations with Tags { argResultsBuff += res if (res.isSuccess) { - argBuff += mkArg(res.tree, param.name) + argBuff += mkArg(param.name, res.tree) } else { - mkArg = mkNamedArg // don't pass the default argument (if any) here, but start emitting named arguments for the following args + mkArg = gen.mkNamedArg // don't pass the default argument (if any) here, but start emitting named arguments for the following args if (!param.hasDefault && !paramFailed) { context.errBuffer.find(_.kind == ErrorKinds.Divergent) match { case Some(divergentImplicit) => @@ -3104,10 +3098,10 @@ trait Typers extends Adaptations with Tags { val (namelessArgs, argPos) = removeNames(Typer.this)(args, params) if (namelessArgs exists (_.isErroneous)) { duplErrTree - } else if (!isIdentity(argPos) && !sameLength(formals, params)) - // !isIdentity indicates that named arguments are used to re-order arguments + } else if (!allArgsArePositional(argPos) && !sameLength(formals, params)) + // !allArgsArePositional indicates that named arguments are used to re-order arguments duplErrorTree(MultipleVarargError(tree)) - else if (isIdentity(argPos) && !isNamedApplyBlock(fun)) { + else if (allArgsArePositional(argPos) && !isNamedApplyBlock(fun)) { // if there's no re-ordering, and fun is not transformed, no need to transform // more than an optimization, e.g. important in "synchronized { x = update-x }" checkNotMacro() @@ -3157,7 +3151,7 @@ trait Typers extends Adaptations with Tags { } if (!sameLength(formals, args) || // wrong nb of arguments - (args exists isNamed) || // uses a named argument + (args exists isNamedArg) || // uses a named argument isNamedApplyBlock(fun)) { // fun was transformed to a named apply block => // integrate this application into the block if (dyna.isApplyDynamicNamed(fun)) dyna.typedNamedApply(tree, fun, args, mode, pt) @@ -3464,7 +3458,7 @@ trait Typers extends Adaptations with Tags { } // begin typedAnnotation - val treeInfo.Applied(fun0, targs, argss) = treeInfo.dissectApplied(ann) + val treeInfo.Applied(fun0, targs, argss) = ann val typedFun0 = typed(fun0, mode.forFunMode, WildcardType) val typedFunPart = ( // If there are dummy type arguments in typeFun part, it suggests we @@ -3475,7 +3469,7 @@ trait Typers extends Adaptations with Tags { else typedFun0 ) - val typedFun @ Select(New(annTpt), _) = treeInfo.dissectApplied(typedFunPart).core + val treeInfo.Applied(typedFun @ Select(New(annTpt), _), _, _) = typedFunPart val annType = annTpt.tpe val res = if (typedFun.isErroneous) ErroneousAnnotation @@ -3489,15 +3483,15 @@ trait Typers extends Adaptations with Tags { } else if (argss.length > 1) { reportAnnotationError(MultipleArgumentListForAnnotationError(ann)) } else { + val args = argss match { + case (arg :: Nil) :: Nil if !isNamedArg(arg) => gen.mkNamedArg(nme.value, arg) :: Nil + case args :: Nil => args + } val annScope = annType.decls .filter(sym => sym.isMethod && !sym.isConstructor && sym.isJavaDefined) val names = new scala.collection.mutable.HashSet[Symbol] names ++= (if (isJava) annScope.iterator else typedFun.tpe.params.iterator) - val args = argss match { - case (arg :: Nil) :: Nil if !isNamed(arg) => mkNamedArg(arg, nme.value) :: Nil - case args :: Nil => args - } val nvPairs = args map { case arg @ AssignOrNamedArg(Ident(name), rhs) => diff --git a/src/reflect/scala/reflect/internal/TreeGen.scala b/src/reflect/scala/reflect/internal/TreeGen.scala index 6a2006e56f..54a85dee86 100644 --- a/src/reflect/scala/reflect/internal/TreeGen.scala +++ b/src/reflect/scala/reflect/internal/TreeGen.scala @@ -271,6 +271,10 @@ abstract class TreeGen extends macros.TreeBuilder { case _ => Constant(null) } + /** Wrap an expression in a named argument. */ + def mkNamedArg(name: Name, tree: Tree): Tree = mkNamedArg(Ident(name), tree) + def mkNamedArg(lhs: Tree, rhs: Tree): Tree = atPos(rhs.pos)(AssignOrNamedArg(lhs, rhs)) + /** Builds a tuple */ def mkTuple(elems: List[Tree]): Tree = if (elems.isEmpty) Literal(Constant()) diff --git a/src/reflect/scala/reflect/internal/TreeInfo.scala b/src/reflect/scala/reflect/internal/TreeInfo.scala index c90e94c1c1..45720053a8 100644 --- a/src/reflect/scala/reflect/internal/TreeInfo.scala +++ b/src/reflect/scala/reflect/internal/TreeInfo.scala @@ -370,6 +370,14 @@ abstract class TreeInfo { case _ => false } + /** Translates an Assign(_, _) node to AssignOrNamedArg(_, _) if + * the lhs is a simple ident. Otherwise returns unchanged. + */ + def assignmentToMaybeNamedArg(tree: Tree) = tree match { + case t @ Assign(id: Ident, rhs) => atPos(t.pos)(AssignOrNamedArg(id, rhs)) + case t => t + } + /** Is name a left-associative operator? */ def isLeftAssoc(operator: Name) = operator.nonEmpty && (operator.endChar != ':') -- cgit v1.2.3 From 901ac16875b3cd0caf948fccf985692c63f259eb Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Mon, 14 Jan 2013 23:29:50 -0800 Subject: Removing superfluous method parameters. Adding a method parameter with a default argument which deviates from the default at a single call site is a pretty questionable tradeoff. Even more so when done twice. Like sands through the hourglass, so go the parameters of our method. --- .../scala/tools/nsc/typechecker/Typers.scala | 56 ++++++++++++---------- 1 file changed, 31 insertions(+), 25 deletions(-) (limited to 'src') diff --git a/src/compiler/scala/tools/nsc/typechecker/Typers.scala b/src/compiler/scala/tools/nsc/typechecker/Typers.scala index e8d0a497ca..dce4f464b7 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Typers.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Typers.scala @@ -1786,12 +1786,16 @@ trait Typers extends Adaptations with Tags { val impl2 = finishMethodSynthesis(impl1, clazz, context) if (clazz.isTrait && clazz.info.parents.nonEmpty && clazz.info.firstParent.typeSymbol == AnyClass) checkEphemeral(clazz, impl2.body) - if ((clazz != ClassfileAnnotationClass) && - (clazz isNonBottomSubClass ClassfileAnnotationClass)) - restrictionWarning(cdef.pos, unit, - "subclassing Classfile does not\n"+ - "make your annotation visible at runtime. If that is what\n"+ - "you want, you must write the annotation class in Java.") + + if ((clazz isNonBottomSubClass ClassfileAnnotationClass) && (clazz != ClassfileAnnotationClass)) { + if (!clazz.owner.isPackageClass) + unit.error(clazz.pos, "inner classes cannot be classfile annotations") + else restrictionWarning(cdef.pos, unit, + """|subclassing Classfile does not + |make your annotation visible at runtime. If that is what + |you want, you must write the annotation class in Java.""".stripMargin) + } + if (!isPastTyper) { for (ann <- clazz.getAnnotation(DeprecatedAttr)) { val m = companionSymbolOf(clazz, context) @@ -1924,9 +1928,6 @@ trait Typers extends Adaptations with Tags { if (clazz.isTrait && hasSuperArgs(parents1.head)) ConstrArgsInParentOfTraitError(parents1.head, clazz) - if ((clazz isSubClass ClassfileAnnotationClass) && !clazz.owner.isPackageClass) - unit.error(clazz.pos, "inner classes cannot be classfile annotations") - if (!phase.erasedTypes && !clazz.info.resultType.isError) // @S: prevent crash for duplicated type members checkFinitary(clazz.info.resultType.asInstanceOf[ClassInfoType]) @@ -3374,15 +3375,13 @@ trait Typers extends Adaptations with Tags { else None case _ => None - } + } } /** * Convert an annotation constructor call into an AnnotationInfo. - * - * @param annClass the expected annotation class */ - def typedAnnotation(ann: Tree, mode: Mode = EXPRmode, selfsym: Symbol = NoSymbol, annClass: Symbol = AnnotationClass, requireJava: Boolean = false): AnnotationInfo = { + def typedAnnotation(ann: Tree, mode: Mode = EXPRmode, selfsym: Symbol = NoSymbol): AnnotationInfo = { var hasError: Boolean = false val pending = ListBuffer[AbsTypeError]() @@ -3423,7 +3422,14 @@ trait Typers extends Adaptations with Tags { reportAnnotationError(ArrayConstantsError(tree)); None case ann @ Apply(Select(New(tpt), nme.CONSTRUCTOR), args) => - val annInfo = typedAnnotation(ann, mode, NoSymbol, pt.typeSymbol, true) + val annInfo = typedAnnotation(ann, mode, NoSymbol) + val annType = annInfo.tpe + + if (!annType.typeSymbol.isSubClass(pt.typeSymbol)) + reportAnnotationError(AnnotationTypeMismatchError(tpt, annType, annType)) + else if (!annType.typeSymbol.isSubClass(ClassfileAnnotationClass)) + reportAnnotationError(NestedAnnotationError(ann, annType)) + if (annInfo.atp.isErroneous) { hasError = true; None } else Some(NestedAnnotArg(annInfo)) @@ -3478,21 +3484,22 @@ trait Typers extends Adaptations with Tags { else if (annType.typeSymbol isNonBottomSubClass ClassfileAnnotationClass) { // annotation to be saved as java classfile annotation val isJava = typedFun.symbol.owner.isJavaDefined - if (!annType.typeSymbol.isNonBottomSubClass(annClass)) { - reportAnnotationError(AnnotationTypeMismatchError(annTpt, annType, annType)) - } else if (argss.length > 1) { + if (argss.length > 1) { reportAnnotationError(MultipleArgumentListForAnnotationError(ann)) - } else { - val args = argss match { - case (arg :: Nil) :: Nil if !isNamedArg(arg) => gen.mkNamedArg(nme.value, arg) :: Nil - case args :: Nil => args - } + } + else { val annScope = annType.decls .filter(sym => sym.isMethod && !sym.isConstructor && sym.isJavaDefined) val names = new scala.collection.mutable.HashSet[Symbol] names ++= (if (isJava) annScope.iterator else typedFun.tpe.params.iterator) + def hasValue = names exists (_.name == nme.value) + val args = argss match { + case (arg :: Nil) :: Nil if !isNamedArg(arg) && hasValue => gen.mkNamedArg(nme.value, arg) :: Nil + case args :: Nil => args + } + val nvPairs = args map { case arg @ AssignOrNamedArg(Ident(name), rhs) => val sym = if (isJava) annScope.lookup(name) @@ -3523,9 +3530,8 @@ trait Typers extends Adaptations with Tags { if (hasError) ErroneousAnnotation else AnnotationInfo(annType, List(), nvPairs map {p => (p._1, p._2.get)}).setOriginal(Apply(typedFun, args).setPos(ann.pos)) } - } else if (requireJava) { - reportAnnotationError(NestedAnnotationError(ann, annType)) - } else { + } + else { val typedAnn = if (selfsym == NoSymbol) { // local dummy fixes SI-5544 val localTyper = newTyper(context.make(ann, context.owner.newLocalDummy(ann.pos))) -- cgit v1.2.3 From 46e8eceecc5b5f6b49075550e54f035126fc256a Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Mon, 14 Jan 2013 23:29:51 -0800 Subject: Cleaning up dummy applied types and friends. Incorporates feedback from dealias/widen PR. --- .../scala/tools/nsc/interpreter/IMain.scala | 6 +- .../nsc/symtab/classfile/ClassfileParser.scala | 16 +-- .../scala/tools/nsc/typechecker/Checkable.scala | 7 +- .../scala/tools/nsc/typechecker/Infer.scala | 6 +- .../scala/tools/nsc/typechecker/Typers.scala | 16 ++- .../scala/reflect/internal/Definitions.scala | 5 +- src/reflect/scala/reflect/internal/Types.scala | 130 +++++++++++---------- 7 files changed, 100 insertions(+), 86 deletions(-) (limited to 'src') diff --git a/src/compiler/scala/tools/nsc/interpreter/IMain.scala b/src/compiler/scala/tools/nsc/interpreter/IMain.scala index ac0351dd78..7f177c7ce4 100644 --- a/src/compiler/scala/tools/nsc/interpreter/IMain.scala +++ b/src/compiler/scala/tools/nsc/interpreter/IMain.scala @@ -521,8 +521,8 @@ class IMain(initialSettings: Settings, protected val out: JPrintWriter) extends Right(buildRequest(line, trees)) } - // normalize non-public types so we don't see protected aliases like Self - def normalizeNonPublic(tp: Type) = tp match { + // dealias non-public types so we don't see protected aliases like Self + def dealiasNonPublic(tp: Type) = tp match { case TypeRef(_, sym, _) if sym.isAliasType && !sym.isPublic => tp.dealias case _ => tp } @@ -980,7 +980,7 @@ class IMain(initialSettings: Settings, protected val out: JPrintWriter) extends def cleanTypeAfterTyper(sym: => Symbol): Type = { exitingTyper( - normalizeNonPublic( + dealiasNonPublic( dropNullaryMethod( sym.tpe_* ) diff --git a/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala b/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala index 04e860f9db..19d03ad04d 100644 --- a/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala +++ b/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala @@ -29,8 +29,8 @@ abstract class ClassfileParser { protected var in: AbstractFileReader = _ // the class file reader protected var clazz: Symbol = _ // the class symbol containing dynamic members protected var staticModule: Symbol = _ // the module symbol containing static members - protected var instanceScope: Scope = _ // the scope of all instance definitions - protected var staticScope: Scope = _ // the scope of all static definitions + protected var instanceScope: Scope = _ // the scope of all instance definitions + protected var staticScope: Scope = _ // the scope of all static definitions protected var pool: ConstantPool = _ // the classfile's constant pool protected var isScala: Boolean = _ // does class file describe a scala class? protected var isScalaAnnot: Boolean = _ // does class file describe a scala class with its pickled info in an annotation? @@ -739,15 +739,9 @@ abstract class ClassfileParser { // isMonomorphicType is false if the info is incomplete, as it usually is here // so have to check unsafeTypeParams.isEmpty before worrying about raw type case below, // or we'll create a boatload of needless existentials. - else if (classSym.isMonomorphicType || classSym.unsafeTypeParams.isEmpty) { - tp - } - else { - // raw type - existentially quantify all type parameters - val eparams = typeParamsToExistentials(classSym, classSym.unsafeTypeParams) - val t = typeRef(pre, classSym, eparams.map(_.tpeHK)) - logResult(s"raw type from $classSym")(newExistentialType(eparams, t)) - } + else if (classSym.isMonomorphicType || classSym.unsafeTypeParams.isEmpty) tp + // raw type - existentially quantify all type parameters + else logResult(s"raw type from $classSym")(definitions.unsafeClassExistentialType(classSym)) case tp => assert(sig.charAt(index) != '<', tp) tp diff --git a/src/compiler/scala/tools/nsc/typechecker/Checkable.scala b/src/compiler/scala/tools/nsc/typechecker/Checkable.scala index fb3909746f..88bfa6099d 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Checkable.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Checkable.scala @@ -62,6 +62,9 @@ trait Checkable { bases foreach { bc => val tps1 = (from baseType bc).typeArgs val tps2 = (tvarType baseType bc).typeArgs + if (tps1.size != tps2.size) + devWarning(s"Unequally sized type arg lists in propagateKnownTypes($from, $to): ($tps1, $tps2)") + (tps1, tps2).zipped foreach (_ =:= _) // Alternate, variance respecting formulation causes // neg/unchecked3.scala to fail (abstract types). TODO - @@ -78,7 +81,7 @@ trait Checkable { val resArgs = tparams zip tvars map { case (_, tvar) if tvar.instValid => tvar.constr.inst - case (tparam, _) => tparam.tpe + case (tparam, _) => tparam.tpeHK } appliedType(to, resArgs: _*) } @@ -108,7 +111,7 @@ trait Checkable { private class CheckabilityChecker(val X: Type, val P: Type) { def Xsym = X.typeSymbol def Psym = P.typeSymbol - def XR = propagateKnownTypes(X, Psym) + def XR = if (Xsym == AnyClass) classExistentialType(Psym) else propagateKnownTypes(X, Psym) // sadly the spec says (new java.lang.Boolean(true)).isInstanceOf[scala.Boolean] def P1 = X matchesPattern P def P2 = !Psym.isPrimitiveValueClass && isNeverSubType(X, P) diff --git a/src/compiler/scala/tools/nsc/typechecker/Infer.scala b/src/compiler/scala/tools/nsc/typechecker/Infer.scala index 07b3a926a0..e9f3ad7ff8 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Infer.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Infer.scala @@ -1222,8 +1222,6 @@ trait Infer extends Checkable { } } - def widen(tp: Type): Type = abstractTypesToBounds(tp) - /** Substitute free type variables `undetparams` of type constructor * `tree` in pattern, given prototype `pt`. * @@ -1232,7 +1230,7 @@ trait Infer extends Checkable { * @param pt the expected result type of the instance */ def inferConstructorInstance(tree: Tree, undetparams: List[Symbol], pt0: Type) { - val pt = widen(pt0) + val pt = abstractTypesToBounds(pt0) val ptparams = freeTypeParamsOfTerms(pt) val ctorTp = tree.tpe val resTp = ctorTp.finalResultType @@ -1371,7 +1369,7 @@ trait Infer extends Checkable { } def inferTypedPattern(tree0: Tree, pattp: Type, pt0: Type, canRemedy: Boolean): Type = { - val pt = widen(pt0) + val pt = abstractTypesToBounds(pt0) val ptparams = freeTypeParamsOfTerms(pt) val tpparams = freeTypeParamsOfTerms(pattp) diff --git a/src/compiler/scala/tools/nsc/typechecker/Typers.scala b/src/compiler/scala/tools/nsc/typechecker/Typers.scala index dce4f464b7..f79723002e 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Typers.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Typers.scala @@ -3360,7 +3360,12 @@ trait Typers extends Adaptations with Tags { // return the corresponding extractor (an instance of ClassTag[`pt`]) def extractorForUncheckedType(pos: Position, pt: Type): Option[Tree] = if (isPastTyper) None else { // only look at top-level type, can't (reliably) do anything about unchecked type args (in general) - pt.normalize.typeConstructor match { + // but at least make a proper type before passing it elsewhere + val pt1 = pt.dealias match { + case tr @ TypeRef(pre, sym, args) if args.nonEmpty => copyTypeRef(tr, pre, sym, sym.typeParams map (_.tpeHK)) // replace actual type args with dummies + case pt1 => pt1 + } + pt1 match { // if at least one of the types in an intersection is checkable, use the checkable ones // this avoids problems as in run/matchonseq.scala, where the expected type is `Coll with scala.collection.SeqLike` // Coll is an abstract type, but SeqLike of course is not @@ -3706,7 +3711,8 @@ trait Typers extends Adaptations with Tags { else containsDef(owner, sym) || isRawParameter(sym) || isCapturedExistential(sym) def containsLocal(tp: Type): Boolean = tp exists (t => isLocal(t.typeSymbol) || isLocal(t.termSymbol)) - val normalizeLocals = new TypeMap { + + val dealiasLocals = new TypeMap { def apply(tp: Type): Type = tp match { case TypeRef(pre, sym, args) => if (sym.isAliasType && containsLocal(tp)) apply(tp.dealias) @@ -3759,9 +3765,9 @@ trait Typers extends Adaptations with Tags { for (sym <- remainingSyms) addLocals(sym.existentialBound) } - val normalizedTpe = normalizeLocals(tree.tpe) - addLocals(normalizedTpe) - packSymbols(localSyms.toList, normalizedTpe) + val dealiasedType = dealiasLocals(tree.tpe) + addLocals(dealiasedType) + packSymbols(localSyms.toList, dealiasedType) } def typedClassOf(tree: Tree, tpt: Tree, noGen: Boolean = false) = diff --git a/src/reflect/scala/reflect/internal/Definitions.scala b/src/reflect/scala/reflect/internal/Definitions.scala index dbf07c7f06..54406fa4f5 100644 --- a/src/reflect/scala/reflect/internal/Definitions.scala +++ b/src/reflect/scala/reflect/internal/Definitions.scala @@ -714,7 +714,10 @@ trait Definitions extends api.StandardDefinitions { * C[E1, ..., En] forSome { E1 >: LB1 <: UB1 ... en >: LBn <: UBn }. */ def classExistentialType(clazz: Symbol): Type = - newExistentialType(clazz.typeParams, clazz.tpe_*) + existentialAbstraction(clazz.typeParams, clazz.tpe_*) + + def unsafeClassExistentialType(clazz: Symbol): Type = + existentialAbstraction(clazz.unsafeTypeParams, clazz.tpe_*) // members of class scala.Any lazy val Any_== = enterNewMethod(AnyClass, nme.EQ, anyparam, booltype, FINAL) diff --git a/src/reflect/scala/reflect/internal/Types.scala b/src/reflect/scala/reflect/internal/Types.scala index b51028c502..dd73dbe6f6 100644 --- a/src/reflect/scala/reflect/internal/Types.scala +++ b/src/reflect/scala/reflect/internal/Types.scala @@ -835,23 +835,27 @@ trait Types extends api.Types { self: SymbolTable => } /** Is this type a subtype of that type in a pattern context? - * Any type arguments on the right hand side are replaced with + * Dummy type arguments on the right hand side are replaced with * fresh existentials, except for Arrays. * * See bug1434.scala for an example of code which would fail * if only a <:< test were applied. */ - def matchesPattern(that: Type): Boolean = { - (this <:< that) || ((this, that) match { - case (TypeRef(_, ArrayClass, List(arg1)), TypeRef(_, ArrayClass, List(arg2))) if arg2.typeSymbol.typeParams.nonEmpty => - arg1 matchesPattern arg2 - case (_, TypeRef(_, _, args)) => - val newtp = existentialAbstraction(args map (_.typeSymbol), that) - !(that =:= newtp) && (this <:< newtp) - case _ => - false - }) - } + def matchesPattern(that: Type): Boolean = (this <:< that) || (that match { + case ArrayTypeRef(elem2) if elem2.typeConstructor.isHigherKinded => + this match { + case ArrayTypeRef(elem1) => elem1 matchesPattern elem2 + case _ => false + } + case TypeRef(_, sym, args) => + val that1 = existentialAbstraction(args map (_.typeSymbol), that) + (that ne that1) && (this <:< that1) && { + log(s"$this.matchesPattern($that) depended on discarding args and testing <:< $that1") + true + } + case _ => + false + }) def stat_<:<(that: Type): Boolean = { if (Statistics.canEnable) Statistics.incCounter(subtypeCount) @@ -2867,6 +2871,13 @@ trait Types extends api.Types { self: SymbolTable => } } + object ArrayTypeRef { + def unapply(tp: Type) = tp match { + case TypeRef(_, ArrayClass, arg :: Nil) => Some(arg) + case _ => None + } + } + //@M // a TypeVar used to be a case class with only an origin and a constr // then, constr became mutable (to support UndoLog, I guess), @@ -2936,27 +2947,6 @@ trait Types extends api.Types { self: SymbolTable => createTypeVar(tparam.tpeHK, deriveConstraint(tparam), Nil, tparam.typeParams, untouchable) } - /** Repack existential types, otherwise they sometimes get unpacked in the - * wrong location (type inference comes up with an unexpected skolem) - */ - def repackExistential(tp: Type): Type = ( - if (tp == NoType) tp - else existentialAbstraction(existentialsInType(tp), tp) - ) - - def containsDummyTypeArg(tp: Type) = tp exists isDummyTypeArg - def isDummyTypeArg(tp: Type) = tp.typeSymbol.isTypeParameter - def isDummyAppliedType(tp: Type) = tp match { - case TypeRef(_, _, args) if args.nonEmpty => args exists isDummyTypeArg - case _ => false - } - - def existentialsInType(tpe: Type) = - tpe withFilter typeIsExistentiallyBound map (_.typeSymbol) - - def containsExistential(tpe: Type) = - tpe exists typeIsExistentiallyBound - /** Precondition: params.nonEmpty. (args.nonEmpty enforced structurally.) */ class HKTypeVar( @@ -3742,17 +3732,14 @@ trait Types extends api.Types { self: SymbolTable => } /** Type with all top-level occurrences of abstract types replaced by their bounds */ - def abstractTypesToBounds(tp: Type): Type = tp match { // @M don't normalize here (compiler loops on pos/bug1090.scala ) - case TypeRef(_, sym, _) if sym.isAbstractType => - abstractTypesToBounds(tp.bounds.hi) - case TypeRef(_, sym, _) if sym.isAliasType => - abstractTypesToBounds(tp.normalize) - case rtp @ RefinedType(parents, decls) => - copyRefinedType(rtp, parents mapConserve abstractTypesToBounds, decls) - case AnnotatedType(_, underlying, _) => - abstractTypesToBounds(underlying) - case _ => - tp + object abstractTypesToBounds extends TypeMap { + def apply(tp: Type): Type = tp match { + case TypeRef(_, sym, _) if sym.isAliasType => apply(tp.dealias) + case TypeRef(_, sym, _) if sym.isAbstractType => apply(tp.bounds.hi) + case rtp @ RefinedType(parents, decls) => copyRefinedType(rtp, parents mapConserve this, decls) + case AnnotatedType(_, _, _) => mapOver(tp) + case _ => tp // no recursion - top level only + } } // Set to true for A* => Seq[A] @@ -3923,7 +3910,7 @@ trait Types extends api.Types { self: SymbolTable => } } class ClassUnwrapper(existential: Boolean) extends TypeUnwrapper(poly = true, existential, annotated = true, nullary = false) { - override def apply(tp: Type) = super.apply(tp.normalize) + override def apply(tp: Type) = super.apply(tp.normalize) // normalize is required here } object unwrapToClass extends ClassUnwrapper(existential = true) { } @@ -4170,6 +4157,26 @@ trait Types extends api.Types { self: SymbolTable => } } + /** Repack existential types, otherwise they sometimes get unpacked in the + * wrong location (type inference comes up with an unexpected skolem) + */ + def repackExistential(tp: Type): Type = ( + if (tp == NoType) tp + else existentialAbstraction(existentialsInType(tp), tp) + ) + + def containsExistential(tpe: Type) = tpe exists typeIsExistentiallyBound + def existentialsInType(tpe: Type) = tpe withFilter typeIsExistentiallyBound map (_.typeSymbol) + + private def isDummyOf(tpe: Type)(targ: Type) = { + val sym = targ.typeSymbol + sym.isTypeParameter && sym.owner == tpe.typeSymbol + } + def isDummyAppliedType(tp: Type) = tp.dealias match { + case tr @ TypeRef(_, _, args) => args exists isDummyOf(tr) + case _ => false + } + def typeParamsToExistentials(clazz: Symbol, tparams: List[Symbol]): List[Symbol] = { val eparams = mapWithIndex(tparams)((tparam, i) => clazz.newExistential(newTypeName("?"+i), clazz.pos) setInfo tparam.info.bounds) @@ -4179,19 +4186,14 @@ trait Types extends api.Types { self: SymbolTable => def typeParamsToExistentials(clazz: Symbol): List[Symbol] = typeParamsToExistentials(clazz, clazz.typeParams) + def isRawIfWithoutArgs(sym: Symbol) = sym.isClass && sym.typeParams.nonEmpty && sym.isJavaDefined + /** Is type tp a ''raw type''? */ // note: it's important to write the two tests in this order, // as only typeParams forces the classfile to be read. See #400 - private def isRawIfWithoutArgs(sym: Symbol) = - sym.isClass && sym.typeParams.nonEmpty && sym.isJavaDefined - - def isRaw(sym: Symbol, args: List[Type]) = - !phase.erasedTypes && isRawIfWithoutArgs(sym) && args.isEmpty - - /** Is type tp a ''raw type''? */ - def isRawType(tp: Type) = tp match { - case TypeRef(_, sym, args) => isRaw(sym, args) - case _ => false - } + def isRawType(tp: Type) = !phase.erasedTypes && (tp match { + case TypeRef(_, sym, Nil) => isRawIfWithoutArgs(sym) + case _ => false + }) /** The raw to existential map converts a ''raw type'' to an existential type. * It is necessary because we might have read a raw type of a @@ -5624,7 +5626,11 @@ trait Types extends api.Types { self: SymbolTable => // for (tpFresh <- tpsFresh) tpFresh.setInfo(tpFresh.info.substSym(tparams1, tpsFresh)) } } && annotationsConform(tp1.normalize, tp2.normalize) - case (_, _) => false // @assume !tp1.isHigherKinded || !tp2.isHigherKinded + case (ntp1, ntp2) => + if (isDummyAppliedType(ntp1) || isDummyAppliedType(ntp2)) { + devWarning(s"isHKSubType0($tp1, $tp2, _) contains dummy typeref: ($ntp1, $ntp2)") + } + false // @assume !tp1.isHigherKinded || !tp2.isHigherKinded // --> thus, cannot be subtypes (Any/Nothing has already been checked) })) @@ -5644,7 +5650,11 @@ trait Types extends api.Types { self: SymbolTable => if (tp1 eq NoPrefix) return (tp2 eq NoPrefix) || tp2.typeSymbol.isPackageClass // !! I do not see how the "isPackageClass" would be warranted by the spec if (tp2 eq NoPrefix) return tp1.typeSymbol.isPackageClass if (isSingleType(tp1) && isSingleType(tp2) || isConstantType(tp1) && isConstantType(tp2)) return tp1 =:= tp2 - if (tp1.isHigherKinded || tp2.isHigherKinded) return isHKSubType0(tp1, tp2, depth) + if (tp1.isHigherKinded && tp2.isHigherKinded) return isHKSubType0(tp1, tp2, depth) + if (tp1.isHigherKinded || tp2.isHigherKinded) { + devWarning(s"isSubType2($tp1, $tp2, _) compares HK type with proper type") + return isHKSubType0(tp1, tp2, depth) + } /** First try, on the right: * - unwrap Annotated types, BoundedWildcardTypes, @@ -5722,7 +5732,7 @@ trait Types extends api.Types { self: SymbolTable => case NotNullClass => tp1.isNotNull case SingletonClass => tp1.isStable || fourthTry case _: ClassSymbol => - if (isRaw(sym2, tp2.args)) + if (isRawType(tp2)) isSubType(tp1, rawToExistential(tp2), depth) else if (sym2.name == tpnme.REFINE_CLASS_NAME) isSubType(tp1, sym2.info, depth) @@ -5804,7 +5814,7 @@ trait Types extends api.Types { self: SymbolTable => isSingleType(tp2) && isSubType(tp1, tp2.widen, depth) } case _: ClassSymbol => - if (isRaw(sym1, tr1.args)) + if (isRawType(tp1)) isSubType(rawToExistential(tp1), tp2, depth) else if (sym1.isModuleClass) tp2 match { case SingleType(pre2, sym2) => equalSymsAndPrefixes(sym1.sourceModule, pre1, sym2, pre2) -- cgit v1.2.3 From a8fe8292956f73db3b185bf29d6349f2eb1c3df8 Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Mon, 14 Jan 2013 23:29:51 -0800 Subject: Add PolyType to Infer#normalize. It arises when inferring the type of an overloaded call: def g(s: String): String = s def f: String = ??? def f[C](c: C): String = g(f) Also refined warning when isHKSubType is called with arguments which very likely were never meant to be compared. --- .../scala/tools/nsc/typechecker/Infer.scala | 2 + src/reflect/scala/reflect/internal/Types.scala | 14 +++--- test/files/pos/kinds.scala | 13 ++++++ test/pending/pos/those-kinds-are-high.scala | 53 ++++++++++++++++++++-- 4 files changed, 69 insertions(+), 13 deletions(-) create mode 100644 test/files/pos/kinds.scala (limited to 'src') diff --git a/src/compiler/scala/tools/nsc/typechecker/Infer.scala b/src/compiler/scala/tools/nsc/typechecker/Infer.scala index e9f3ad7ff8..e652b68b14 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Infer.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Infer.scala @@ -244,6 +244,8 @@ trait Infer extends Checkable { * This method seems to be performance critical. */ def normalize(tp: Type): Type = tp match { + case pt @ PolyType(tparams, restpe) => + logResult(s"Normalizing $tp in infer")(normalize(restpe)) case mt @ MethodType(params, restpe) if mt.isImplicit => normalize(restpe) case mt @ MethodType(_, restpe) if !mt.isDependentMethodType => diff --git a/src/reflect/scala/reflect/internal/Types.scala b/src/reflect/scala/reflect/internal/Types.scala index dd73dbe6f6..6fc99874ed 100644 --- a/src/reflect/scala/reflect/internal/Types.scala +++ b/src/reflect/scala/reflect/internal/Types.scala @@ -5626,10 +5626,12 @@ trait Types extends api.Types { self: SymbolTable => // for (tpFresh <- tpsFresh) tpFresh.setInfo(tpFresh.info.substSym(tparams1, tpsFresh)) } } && annotationsConform(tp1.normalize, tp2.normalize) + + case (PolyType(_, _), MethodType(params, _)) if params exists (_.tpe.isWildcard) => + false // don't warn on HasMethodMatching on right hand side + case (ntp1, ntp2) => - if (isDummyAppliedType(ntp1) || isDummyAppliedType(ntp2)) { - devWarning(s"isHKSubType0($tp1, $tp2, _) contains dummy typeref: ($ntp1, $ntp2)") - } + devWarning(s"isHKSubType0($tp1, $tp2, _) is ${tp1.getClass}, ${tp2.getClass}: ($ntp1, $ntp2)") false // @assume !tp1.isHigherKinded || !tp2.isHigherKinded // --> thus, cannot be subtypes (Any/Nothing has already been checked) })) @@ -5650,11 +5652,7 @@ trait Types extends api.Types { self: SymbolTable => if (tp1 eq NoPrefix) return (tp2 eq NoPrefix) || tp2.typeSymbol.isPackageClass // !! I do not see how the "isPackageClass" would be warranted by the spec if (tp2 eq NoPrefix) return tp1.typeSymbol.isPackageClass if (isSingleType(tp1) && isSingleType(tp2) || isConstantType(tp1) && isConstantType(tp2)) return tp1 =:= tp2 - if (tp1.isHigherKinded && tp2.isHigherKinded) return isHKSubType0(tp1, tp2, depth) - if (tp1.isHigherKinded || tp2.isHigherKinded) { - devWarning(s"isSubType2($tp1, $tp2, _) compares HK type with proper type") - return isHKSubType0(tp1, tp2, depth) - } + if (tp1.isHigherKinded || tp2.isHigherKinded) return isHKSubType0(tp1, tp2, depth) /** First try, on the right: * - unwrap Annotated types, BoundedWildcardTypes, diff --git a/test/files/pos/kinds.scala b/test/files/pos/kinds.scala new file mode 100644 index 0000000000..6d6da0c8b6 --- /dev/null +++ b/test/files/pos/kinds.scala @@ -0,0 +1,13 @@ +trait IllKind1 { + def g(s: String): String = s + def f: String = ??? + def f[C](c: C): String = g(f) +} + +trait IllKind2 { + def b1: Char = ??? + def b2: Byte = ??? + + def f1 = "abc" contains b1 + def f2 = "abc" contains b2 +} diff --git a/test/pending/pos/those-kinds-are-high.scala b/test/pending/pos/those-kinds-are-high.scala index 434e64cefb..78367cb746 100644 --- a/test/pending/pos/those-kinds-are-high.scala +++ b/test/pending/pos/those-kinds-are-high.scala @@ -4,18 +4,18 @@ class A { class C1[T] extends Template[C1] with Container[T] class C2[T] extends Template[C2] with Container[T] - + /** Target expression: * List(new C1[String], new C2[String]) */ - + // Here's what would ideally be inferred. // // scala> :type List[Template[Container] with Container[String]](new C1[String], new C2[String]) // List[Template[Container] with Container[java.lang.String]] // // Here's what it does infer. - // + // // scala> :type List(new C1[String], new C2[String]) // :8: error: type mismatch; // found : C1[String] @@ -43,11 +43,54 @@ class A { // def fFail = List(new C1[String], new C2[String]) // ^ // two errors found - + /** Working version explicitly typed. */ def fExplicit = List[Template[Container] with Container[String]](new C1[String], new C2[String]) - + // nope def fFail = List(new C1[String], new C2[String]) } + + +trait Other { + trait GenBar[+A] + trait Bar[+A] extends GenBar[A] + trait Templ[+A, +CC[X] <: GenBar[X]] + + abstract class CC1[+A] extends Templ[A, CC1] with Bar[A] + abstract class CC2[+A] extends Templ[A, CC2] with Bar[A] + + // Compiles + class A1 { + abstract class BarFactory[CC[X] <: Bar[X]] + + def f(x: Boolean) = if (x) (null: BarFactory[CC1]) else (null: BarFactory[CC2]) + } + + // Fails - only difference is CC covariant. + class A2 { + abstract class BarFactory[+CC[X] <: Bar[X]] + + def f(x: Boolean) = if (x) (null: BarFactory[CC1]) else (null: BarFactory[CC2]) + // c.scala:23: error: kinds of the type arguments (Bar with Templ[Any,Bar]) do not conform to the expected kinds of the type parameters (type CC) in class BarFactory. + // Bar with Templ[Any,Bar]'s type parameters do not match type CC's expected parameters: + // has no type parameters, but type CC has one + // def f(x: Boolean) = if (x) (null: BarFactory[CC1]) else (null: BarFactory[CC2]) + // ^ + // one error found + } + + // Compiles - CC contravariant. + class A3 { + abstract class BarFactory[-CC[X] <: Bar[X]] // with Templ[X, CC]] + + def f(x: Boolean) = if (x) (null: BarFactory[CC1]) else (null: BarFactory[CC2]) + // c.scala:23: error: kinds of the type arguments (Bar with Templ[Any,Bar]) do not conform to the expected kinds of the type parameters (type CC) in class BarFactory. + // Bar with Templ[Any,Bar]'s type parameters do not match type CC's expected parameters: + // has no type parameters, but type CC has one + // def f(x: Boolean) = if (x) (null: BarFactory[CC1]) else (null: BarFactory[CC2]) + // ^ + // one error found + } +} -- cgit v1.2.3 From 3623432292d660b3420c13d584871a1164a71727 Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Tue, 15 Jan 2013 07:35:16 -0800 Subject: Put back a method which sbt is using. Good catch, ant target sbt.compile. --- src/reflect/scala/reflect/internal/Types.scala | 7 +++++++ 1 file changed, 7 insertions(+) (limited to 'src') diff --git a/src/reflect/scala/reflect/internal/Types.scala b/src/reflect/scala/reflect/internal/Types.scala index 6fc99874ed..1bb3fd300b 100644 --- a/src/reflect/scala/reflect/internal/Types.scala +++ b/src/reflect/scala/reflect/internal/Types.scala @@ -4195,6 +4195,13 @@ trait Types extends api.Types { self: SymbolTable => case _ => false }) + @deprecated("Use isRawType", "2.10.1") // presently used by sbt + def isRaw(sym: Symbol, args: List[Type]) = ( + !phase.erasedTypes + && args.isEmpty + && isRawIfWithoutArgs(sym) + ) + /** The raw to existential map converts a ''raw type'' to an existential type. * It is necessary because we might have read a raw type of a * parameterized Java class from a class file. At the time we read the type -- cgit v1.2.3 From f01e001c77bca0bf09c6594251af6573c76f1c4c Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Sat, 26 Jan 2013 11:16:58 -0800 Subject: Make sure typed isn't called with an erroneous tree. I can't see that it makes any difference, but this is approximately the way it was before. --- .../scala/tools/nsc/typechecker/Typers.scala | 26 +++++++++++++--------- 1 file changed, 15 insertions(+), 11 deletions(-) (limited to 'src') diff --git a/src/compiler/scala/tools/nsc/typechecker/Typers.scala b/src/compiler/scala/tools/nsc/typechecker/Typers.scala index f79723002e..4e4cbabd9d 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Typers.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Typers.scala @@ -3390,6 +3390,14 @@ trait Typers extends Adaptations with Tags { var hasError: Boolean = false val pending = ListBuffer[AbsTypeError]() + def finish(res: AnnotationInfo): AnnotationInfo = { + if (hasError) { + pending.foreach(ErrorUtils.issueTypeError) + ErroneousAnnotation + } + else res + } + def reportAnnotationError(err: AbsTypeError) = { pending += err hasError = true @@ -3470,6 +3478,8 @@ trait Typers extends Adaptations with Tags { // begin typedAnnotation val treeInfo.Applied(fun0, targs, argss) = ann + if (fun0.isErroneous) + return finish(ErroneousAnnotation) val typedFun0 = typed(fun0, mode.forFunMode, WildcardType) val typedFunPart = ( // If there are dummy type arguments in typeFun part, it suggests we @@ -3483,9 +3493,9 @@ trait Typers extends Adaptations with Tags { val treeInfo.Applied(typedFun @ Select(New(annTpt), _), _, _) = typedFunPart val annType = annTpt.tpe - val res = if (typedFun.isErroneous) ErroneousAnnotation - else { - if (typedFun.isErroneous) ErroneousAnnotation + finish( + if (typedFun.isErroneous) + ErroneousAnnotation else if (annType.typeSymbol isNonBottomSubClass ClassfileAnnotationClass) { // annotation to be saved as java classfile annotation val isJava = typedFun.symbol.owner.isJavaDefined @@ -3558,7 +3568,7 @@ trait Typers extends Adaptations with Tags { val Function(arg :: Nil, rhs) = typed(func, mode, funcType) rhs.substituteSymbols(arg.symbol :: Nil, selfsym :: Nil) - } + } def annInfo(t: Tree): AnnotationInfo = t match { case Apply(Select(New(tpt), nme.CONSTRUCTOR), args) => @@ -3585,13 +3595,7 @@ trait Typers extends Adaptations with Tags { if ((typedAnn.tpe == null) || typedAnn.tpe.isErroneous) ErroneousAnnotation else annInfo(typedAnn) - } - } - - if (hasError) { - pending.foreach(ErrorUtils.issueTypeError) - ErroneousAnnotation - } else res + }) } def isRawParameter(sym: Symbol) = // is it a type parameter leaked by a raw type? -- cgit v1.2.3 From b6f898f0811a72b423b6bef17cd2bf6791f1f5f0 Mon Sep 17 00:00:00 2001 From: Eugene Yokota Date: Fri, 25 Jan 2013 11:44:09 -0500 Subject: SI-6939 Fix namespace binding (xmlns) not overriding outer binding Given a nested XML literal to the compiler Elem instance is generated with namespace binding of the inner element copying that of the outer element: val foo = With the above example, `foo.child.head.scope.toString` returns " xmlns:x="http://bar.com/" xmlns:x="http://foo.com/"" This is incorrect since the outer xmls:x should be overridden by the inner binding. XML library also parses XML document in a similar manner: val foo2 = scala.xml.XML.loadString("""""") Despite this erroneous behavior, since the structure of NamespaceBinding class is designed to be singly-linked list, the stacking of namespace bindings allows constant-time creation with simple implementation. Since the inner namespace binding comes before the outer one, query methods like `getURI` method behave correctly. Because this bug is manifested when Elem is turned back into XML string, it could be fixed by shadowing the redefined namespace binding right when buildString is called. With this change `foo.child.head.scope.toString` now returns: " xmlns:x="http://bar.com/"" --- src/library/scala/xml/NamespaceBinding.scala | 24 ++++++++++++++++++++++-- test/files/run/t6939.scala | 13 +++++++++++++ 2 files changed, 35 insertions(+), 2 deletions(-) create mode 100644 test/files/run/t6939.scala (limited to 'src') diff --git a/src/library/scala/xml/NamespaceBinding.scala b/src/library/scala/xml/NamespaceBinding.scala index c7cd9e6b6c..3a63d47d4e 100644 --- a/src/library/scala/xml/NamespaceBinding.scala +++ b/src/library/scala/xml/NamespaceBinding.scala @@ -38,6 +38,22 @@ case class NamespaceBinding(prefix: String, uri: String, parent: NamespaceBindin override def toString(): String = sbToString(buildString(_, TopScope)) + private def shadowRedefined: NamespaceBinding = shadowRedefined(TopScope) + + private def shadowRedefined(stop: NamespaceBinding): NamespaceBinding = { + def prefixList(x: NamespaceBinding): List[String] = + if ((x == null) || (x eq stop)) Nil + else x.prefix :: prefixList(x.parent) + def fromPrefixList(l: List[String]): NamespaceBinding = l match { + case Nil => stop + case x :: xs => new NamespaceBinding(x, this.getURI(x), fromPrefixList(xs)) + } + val ps0 = prefixList(this).reverse + val ps = ps0.distinct + if (ps.size == ps0.size) this + else fromPrefixList(ps) + } + override def canEqual(other: Any) = other match { case _: NamespaceBinding => true case _ => false @@ -53,12 +69,16 @@ case class NamespaceBinding(prefix: String, uri: String, parent: NamespaceBindin def buildString(stop: NamespaceBinding): String = sbToString(buildString(_, stop)) def buildString(sb: StringBuilder, stop: NamespaceBinding) { - if (this eq stop) return // contains? + shadowRedefined(stop).doBuildString(sb, stop) + } + + private def doBuildString(sb: StringBuilder, stop: NamespaceBinding) { + if ((this == null) || (this eq stop)) return // contains? val s = " xmlns%s=\"%s\"".format( (if (prefix != null) ":" + prefix else ""), (if (uri != null) uri else "") ) - parent.buildString(sb append s, stop) // copy(ignore) + parent.doBuildString(sb append s, stop) // copy(ignore) } } diff --git a/test/files/run/t6939.scala b/test/files/run/t6939.scala new file mode 100644 index 0000000000..9fe721555f --- /dev/null +++ b/test/files/run/t6939.scala @@ -0,0 +1,13 @@ +object Test extends App { + val foo = + assert(foo.child.head.scope.toString == """ xmlns:x="http://bar.com/"""") + + val fooDefault = + assert(fooDefault.child.head.scope.toString == """ xmlns="http://bar.com/"""") + + val foo2 = scala.xml.XML.loadString("""""") + assert(foo2.child.head.scope.toString == """ xmlns:x="http://bar.com/"""") + + val foo2Default = scala.xml.XML.loadString("""""") + assert(foo2Default.child.head.scope.toString == """ xmlns="http://bar.com/"""") +} -- cgit v1.2.3 From aa199b8f612724175c526eef7413bcfa5534b653 Mon Sep 17 00:00:00 2001 From: Grzegorz Kossakowski Date: Mon, 28 Jan 2013 20:45:49 -0800 Subject: Revert "SI-6811 Misc. removals in util, testing, io, ..." This partially reverts commit f931833df8cc69d119f636d8a553941bf7ce2349. The commit got reverted because it breaks Sbt that relies on the old implementation of MurmurHash. The new implementation got introduced in Scala 2.10 but sbt supports Scala 2.9 so there's no way to migrate it to the new implementation hence we have to keep the old one a while longer. Review by @paulp --- src/library/scala/util/MurmurHash.scala | 197 +++++++++++++++++++++++ src/library/scala/util/hashing/MurmurHash3.scala | 8 + 2 files changed, 205 insertions(+) create mode 100644 src/library/scala/util/MurmurHash.scala (limited to 'src') diff --git a/src/library/scala/util/MurmurHash.scala b/src/library/scala/util/MurmurHash.scala new file mode 100644 index 0000000000..a5bc8faf8d --- /dev/null +++ b/src/library/scala/util/MurmurHash.scala @@ -0,0 +1,197 @@ +/* __ *\ +** ________ ___ / / ___ Scala API ** +** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** +** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** +** /____/\___/_/ |_/____/_/ | | ** +** |/ ** +\* */ + +package scala.util + +/** An implementation of Austin Appleby's MurmurHash 3.0 algorithm + * (32 bit version); reference: http://code.google.com/p/smhasher + * + * This is the hash used by collections and case classes (including + * tuples). + * + * @author Rex Kerr + * @version 2.9 + * @since 2.9 + */ + +import java.lang.Integer.{ rotateLeft => rotl } +import scala.collection.Iterator + +/** A class designed to generate well-distributed non-cryptographic + * hashes. It is designed to be passed to a collection's foreach method, + * or can take individual hash values with append. Its own hash code is + * set equal to the hash code of whatever it is hashing. + */ +@deprecated("Use the object MurmurHash3 instead.", "2.10.0") +class MurmurHash[@specialized(Int,Long,Float,Double) T](seed: Int) extends (T => Unit) { + import MurmurHash._ + + private var h = startHash(seed) + private var c = hiddenMagicA + private var k = hiddenMagicB + private var hashed = false + private var hashvalue = h + + /** Begin a new hash using the same seed. */ + def reset() { + h = startHash(seed) + c = hiddenMagicA + k = hiddenMagicB + hashed = false + } + + /** Incorporate the hash value of one item. */ + def apply(t: T) { + h = extendHash(h,t.##,c,k) + c = nextMagicA(c) + k = nextMagicB(k) + hashed = false + } + + /** Incorporate a known hash value. */ + def append(i: Int) { + h = extendHash(h,i,c,k) + c = nextMagicA(c) + k = nextMagicB(k) + hashed = false + } + + /** Retrieve the hash value */ + def hash = { + if (!hashed) { + hashvalue = finalizeHash(h) + hashed = true + } + hashvalue + } + override def hashCode = hash +} + +/** An object designed to generate well-distributed non-cryptographic + * hashes. It is designed to hash a collection of integers; along with + * the integers to hash, it generates two magic streams of integers to + * increase the distribution of repetitive input sequences. Thus, + * three methods need to be called at each step (to start and to + * incorporate a new integer) to update the values. Only one method + * needs to be called to finalize the hash. + */ +@deprecated("Use the object MurmurHash3 instead.", "2.10.0") +object MurmurHash { + // Magic values used for MurmurHash's 32 bit hash. + // Don't change these without consulting a hashing expert! + final private val visibleMagic = 0x971e137b + final private val hiddenMagicA = 0x95543787 + final private val hiddenMagicB = 0x2ad7eb25 + final private val visibleMixer = 0x52dce729 + final private val hiddenMixerA = 0x7b7d159c + final private val hiddenMixerB = 0x6bce6396 + final private val finalMixer1 = 0x85ebca6b + final private val finalMixer2 = 0xc2b2ae35 + + // Arbitrary values used for hashing certain classes + final private val seedString = 0xf7ca7fd2 + final private val seedArray = 0x3c074a61 + + /** The first 23 magic integers from the first stream are stored here */ + val storedMagicA = + Iterator.iterate(hiddenMagicA)(nextMagicA).take(23).toArray + + /** The first 23 magic integers from the second stream are stored here */ + val storedMagicB = + Iterator.iterate(hiddenMagicB)(nextMagicB).take(23).toArray + + /** Begin a new hash with a seed value. */ + def startHash(seed: Int) = seed ^ visibleMagic + + /** The initial magic integers in the first stream. */ + def startMagicA = hiddenMagicA + + /** The initial magic integer in the second stream. */ + def startMagicB = hiddenMagicB + + /** Incorporates a new value into an existing hash. + * + * @param hash the prior hash value + * @param value the new value to incorporate + * @param magicA a magic integer from the stream + * @param magicB a magic integer from a different stream + * @return the updated hash value + */ + def extendHash(hash: Int, value: Int, magicA: Int, magicB: Int) = { + (hash ^ rotl(value*magicA,11)*magicB)*3 + visibleMixer + } + + /** Given a magic integer from the first stream, compute the next */ + def nextMagicA(magicA: Int) = magicA*5 + hiddenMixerA + + /** Given a magic integer from the second stream, compute the next */ + def nextMagicB(magicB: Int) = magicB*5 + hiddenMixerB + + /** Once all hashes have been incorporated, this performs a final mixing */ + def finalizeHash(hash: Int) = { + var i = (hash ^ (hash>>>16)) + i *= finalMixer1 + i ^= (i >>> 13) + i *= finalMixer2 + i ^= (i >>> 16) + i + } + + /** Compute a high-quality hash of an array */ + def arrayHash[@specialized T](a: Array[T]) = { + var h = startHash(a.length * seedArray) + var c = hiddenMagicA + var k = hiddenMagicB + var j = 0 + while (j < a.length) { + h = extendHash(h, a(j).##, c, k) + c = nextMagicA(c) + k = nextMagicB(k) + j += 1 + } + finalizeHash(h) + } + + /** Compute a high-quality hash of a string */ + def stringHash(s: String) = { + var h = startHash(s.length * seedString) + var c = hiddenMagicA + var k = hiddenMagicB + var j = 0 + while (j+1 < s.length) { + val i = (s.charAt(j)<<16) + s.charAt(j+1); + h = extendHash(h,i,c,k) + c = nextMagicA(c) + k = nextMagicB(k) + j += 2 + } + if (j < s.length) h = extendHash(h,s.charAt(j),c,k) + finalizeHash(h) + } + + /** Compute a hash that is symmetric in its arguments--that is, + * where the order of appearance of elements does not matter. + * This is useful for hashing sets, for example. + */ + def symmetricHash[T](xs: scala.collection.TraversableOnce[T], seed: Int) = { + var a,b,n = 0 + var c = 1 + xs.seq.foreach(i => { + val h = i.## + a += h + b ^= h + if (h != 0) c *= h + n += 1 + }) + var h = startHash(seed * n) + h = extendHash(h, a, storedMagicA(0), storedMagicB(0)) + h = extendHash(h, b, storedMagicA(1), storedMagicB(1)) + h = extendHash(h, c, storedMagicA(2), storedMagicB(2)) + finalizeHash(h) + } +} diff --git a/src/library/scala/util/hashing/MurmurHash3.scala b/src/library/scala/util/hashing/MurmurHash3.scala index 5c74bc5a2e..0aa7e6f1cb 100644 --- a/src/library/scala/util/hashing/MurmurHash3.scala +++ b/src/library/scala/util/hashing/MurmurHash3.scala @@ -274,4 +274,12 @@ object MurmurHash3 extends MurmurHash3 { finalizeHash(h, n) } */ + + @deprecated("Use unorderedHash", "2.10.0") + final def symmetricHash[T](xs: scala.collection.GenTraversableOnce[T], seed: Int = symmetricSeed): Int = + unorderedHash(xs.seq, seed) + + @deprecated("Use orderedHash", "2.10.0") + final def traversableHash[T](xs: scala.collection.GenTraversableOnce[T], seed: Int = traversableSeed): Int = + orderedHash(xs.seq, seed) } -- cgit v1.2.3 From 0388a7cdb111f0dd6b86bc838ffe51de3df28b4c Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Tue, 29 Jan 2013 13:21:39 -0800 Subject: Renames normalize to normalizeModifiers. Since we still have too many methods called normalize. --- src/compiler/scala/tools/nsc/ast/parser/Parsers.scala | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) (limited to 'src') diff --git a/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala b/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala index 37da1b44bb..61c65c211b 100644 --- a/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala +++ b/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala @@ -1957,11 +1957,11 @@ self => /** Drop `private` modifier when followed by a qualifier. * Contract `abstract` and `override` to ABSOVERRIDE */ - private def normalize(mods: Modifiers): Modifiers = + private def normalizeModifers(mods: Modifiers): Modifiers = if (mods.isPrivate && mods.hasAccessBoundary) - normalize(mods &~ Flags.PRIVATE) + normalizeModifers(mods &~ Flags.PRIVATE) else if (mods hasAllFlags (Flags.ABSTRACT | Flags.OVERRIDE)) - normalize(mods &~ (Flags.ABSTRACT | Flags.OVERRIDE) | Flags.ABSOVERRIDE) + normalizeModifers(mods &~ (Flags.ABSTRACT | Flags.OVERRIDE) | Flags.ABSOVERRIDE) else mods @@ -2006,7 +2006,7 @@ self => * AccessModifier ::= (private | protected) [AccessQualifier] * }}} */ - def accessModifierOpt(): Modifiers = normalize { + def accessModifierOpt(): Modifiers = normalizeModifers { in.token match { case m @ (PRIVATE | PROTECTED) => in.nextToken() ; accessQualifierOpt(Modifiers(flagTokens(m))) case _ => NoMods @@ -2020,7 +2020,7 @@ self => * | override * }}} */ - def modifiers(): Modifiers = normalize { + def modifiers(): Modifiers = normalizeModifers { def loop(mods: Modifiers): Modifiers = in.token match { case PRIVATE | PROTECTED => loop(accessQualifierOpt(addMod(mods, flagTokens(in.token), tokenRange(in)))) -- cgit v1.2.3 From 039b1cb1a5b8738bb3731035838d2fcaeb317d07 Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Tue, 29 Jan 2013 13:16:23 -0800 Subject: Changes many calls to normalize to dealiasWiden. Calling normalize is very aggressive and is usually the wrong thing. It is one of the leading contributors to non-determinism in compiler outcomes (often of the form "I gave a debugging or logging compiler option and it started/stopped working") and should be used only in very specific circumstances. Almost without exception, dealiasWiden is what you want; not widen, not normalize. If possible I will remove normalize from Type entirely, making it private to those areas of the compiler which actually require it. --- src/compiler/scala/reflect/reify/package.scala | 2 +- src/compiler/scala/tools/nsc/backend/icode/TypeKinds.scala | 4 ++-- src/compiler/scala/tools/nsc/dependencies/Changes.scala | 2 +- .../scala/tools/nsc/interpreter/CompletionOutput.scala | 2 +- src/compiler/scala/tools/nsc/transform/Erasure.scala | 10 +++++----- .../scala/tools/nsc/transform/SpecializeTypes.scala | 2 +- .../scala/tools/nsc/typechecker/ContextErrors.scala | 2 +- src/compiler/scala/tools/nsc/typechecker/Implicits.scala | 6 +++--- .../scala/tools/nsc/typechecker/PatternMatching.scala | 2 +- src/compiler/scala/tools/nsc/typechecker/RefChecks.scala | 12 ++++++------ .../scala/tools/nsc/typechecker/TypeDiagnostics.scala | 2 +- src/compiler/scala/tools/nsc/typechecker/Typers.scala | 14 +++++++------- .../plugin/scala/tools/selectivecps/CPSUtils.scala | 2 +- src/reflect/scala/reflect/internal/Definitions.scala | 10 +++++----- src/reflect/scala/reflect/internal/Symbols.scala | 2 +- src/reflect/scala/reflect/internal/TreeGen.scala | 2 +- src/reflect/scala/reflect/internal/Types.scala | 2 +- src/reflect/scala/reflect/internal/transform/Erasure.scala | 6 +++--- src/reflect/scala/reflect/internal/transform/UnCurry.scala | 10 +++++++++- test/files/neg/t2641.check | 12 ++---------- 20 files changed, 53 insertions(+), 53 deletions(-) (limited to 'src') diff --git a/src/compiler/scala/reflect/reify/package.scala b/src/compiler/scala/reflect/reify/package.scala index 7be57c0cb7..78f85c2634 100644 --- a/src/compiler/scala/reflect/reify/package.scala +++ b/src/compiler/scala/reflect/reify/package.scala @@ -56,7 +56,7 @@ package object reify { if (concrete) throw new ReificationException(enclosingMacroPosition, "tpe %s is an unresolved spliceable type".format(tpe)) } - tpe.normalize match { + tpe.dealiasWiden match { case TypeRef(_, ArrayClass, componentTpe :: Nil) => val componentErasure = reifyRuntimeClass(global)(typer0, componentTpe, concrete) gen.mkMethodCall(arrayClassMethod, List(componentErasure)) diff --git a/src/compiler/scala/tools/nsc/backend/icode/TypeKinds.scala b/src/compiler/scala/tools/nsc/backend/icode/TypeKinds.scala index 84f5fe2678..a32b00f385 100644 --- a/src/compiler/scala/tools/nsc/backend/icode/TypeKinds.scala +++ b/src/compiler/scala/tools/nsc/backend/icode/TypeKinds.scala @@ -368,10 +368,10 @@ trait TypeKinds { self: ICodes => /** Return the TypeKind of the given type * - * Call to .normalize fixes #3003 (follow type aliases). Otherwise, + * Call to dealiasWiden fixes #3003 (follow type aliases). Otherwise, * arrayOrClassType below would return ObjectReference. */ - def toTypeKind(t: Type): TypeKind = t.normalize match { + def toTypeKind(t: Type): TypeKind = t.dealiasWiden match { case ThisType(ArrayClass) => ObjectReference case ThisType(sym) => REFERENCE(sym) case SingleType(_, sym) => primitiveOrRefType(sym) diff --git a/src/compiler/scala/tools/nsc/dependencies/Changes.scala b/src/compiler/scala/tools/nsc/dependencies/Changes.scala index b3cacee20a..7807f0ba03 100644 --- a/src/compiler/scala/tools/nsc/dependencies/Changes.scala +++ b/src/compiler/scala/tools/nsc/dependencies/Changes.scala @@ -90,11 +90,11 @@ abstract class Changes { } else !sym1.isTypeParameter || !changedTypeParams.contains(sym1.fullName) + // @M! normalize reduces higher-kinded case to PolyType's testSymbols && sameType(pre1, pre2) && (sym1.variance == sym2.variance) && ((tp1.isHigherKinded && tp2.isHigherKinded && tp1.normalize =:= tp2.normalize) || sameTypes(args1, args2)) - // @M! normalize reduces higher-kinded case to PolyType's case (RefinedType(parents1, ref1), RefinedType(parents2, ref2)) => def isSubScope(s1: Scope, s2: Scope): Boolean = s2.toList.forall { diff --git a/src/compiler/scala/tools/nsc/interpreter/CompletionOutput.scala b/src/compiler/scala/tools/nsc/interpreter/CompletionOutput.scala index c5bb8494ce..d24ad60974 100644 --- a/src/compiler/scala/tools/nsc/interpreter/CompletionOutput.scala +++ b/src/compiler/scala/tools/nsc/interpreter/CompletionOutput.scala @@ -75,7 +75,7 @@ trait CompletionOutput { } def methodString() = - method.keyString + " " + method.nameString + (method.info.normalize match { + method.keyString + " " + method.nameString + (method.info.dealiasWiden match { case NullaryMethodType(resType) => ": " + typeToString(resType) case PolyType(tparams, resType) => tparamsString(tparams) + typeToString(resType) case mt @ MethodType(_, _) => methodTypeToString(mt) diff --git a/src/compiler/scala/tools/nsc/transform/Erasure.scala b/src/compiler/scala/tools/nsc/transform/Erasure.scala index 0d50282000..f380b9d04f 100644 --- a/src/compiler/scala/tools/nsc/transform/Erasure.scala +++ b/src/compiler/scala/tools/nsc/transform/Erasure.scala @@ -101,7 +101,7 @@ abstract class Erasure extends AddInterfaces * unboxing some primitive types and further simplifications as they are done in jsig. */ val prepareSigMap = new TypeMap { - def squashBoxed(tp: Type): Type = tp.normalize match { + def squashBoxed(tp: Type): Type = tp.dealiasWiden match { case t @ RefinedType(parents, decls) => val parents1 = parents mapConserve squashBoxed if (parents1 eq parents) tp @@ -114,7 +114,7 @@ abstract class Erasure extends AddInterfaces if (boxedClass contains t.typeSymbol) ObjectClass.tpe else tp } - def apply(tp: Type): Type = tp.normalize match { + def apply(tp: Type): Type = tp.dealiasWiden match { case tp1 @ TypeBounds(lo, hi) => val lo1 = squashBoxed(apply(lo)) val hi1 = squashBoxed(apply(hi)) @@ -145,7 +145,7 @@ abstract class Erasure extends AddInterfaces } case tp1 @ MethodType(params, restpe) => val params1 = mapOver(params) - val restpe1 = if (restpe.normalize.typeSymbol == UnitClass) UnitClass.tpe else apply(restpe) + val restpe1 = if (restpe.typeSymbol == UnitClass) UnitClass.tpe else apply(restpe) if ((params1 eq params) && (restpe1 eq restpe)) tp1 else MethodType(params1, restpe1) case tp1 @ RefinedType(parents, decls) => @@ -163,8 +163,8 @@ abstract class Erasure extends AddInterfaces } } - private def hiBounds(bounds: TypeBounds): List[Type] = bounds.hi.normalize match { - case RefinedType(parents, _) => parents map (_.normalize) + private def hiBounds(bounds: TypeBounds): List[Type] = bounds.hi.dealiasWiden match { + case RefinedType(parents, _) => parents map (_.dealiasWiden) case tp => tp :: Nil } diff --git a/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala b/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala index 343f95782e..0cd7f516ef 100644 --- a/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala +++ b/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala @@ -424,7 +424,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers { def specializedTypeVars(tpe: Type): immutable.Set[Symbol] = tpe match { case TypeRef(pre, sym, args) => if (sym.isAliasType) - specializedTypeVars(tpe.normalize) + specializedTypeVars(tpe.dealiasWiden) else if (sym.isTypeParameter && sym.isSpecialized || (sym.isTypeSkolem && sym.deSkolemize.isSpecialized)) Set(sym) else if (sym == ArrayClass) diff --git a/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala b/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala index 7bbbcdf541..4e4513dcef 100644 --- a/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala +++ b/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala @@ -153,7 +153,7 @@ trait ContextErrors { // members present, then display along with the expected members. This is done here because // this is the last point where we still have access to the original tree, rather than just // the found/req types. - val foundType: Type = req.normalize match { + val foundType: Type = req.dealiasWiden match { case RefinedType(parents, decls) if !decls.isEmpty && found.typeSymbol.isAnonOrRefinementClass => val retyped = typed (tree.duplicate.clearType()) val foundDecls = retyped.tpe.decls filter (sym => !sym.isConstructor && !sym.isSynthetic) diff --git a/src/compiler/scala/tools/nsc/typechecker/Implicits.scala b/src/compiler/scala/tools/nsc/typechecker/Implicits.scala index bcf9910c5a..d32930f4f2 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Implicits.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Implicits.scala @@ -347,7 +347,7 @@ trait Implicits { * if one or both are intersection types with a pair of overlapping parent types. */ private def dominates(dtor: Type, dted: Type): Boolean = { - def core(tp: Type): Type = tp.normalize match { + def core(tp: Type): Type = tp.dealiasWiden match { case RefinedType(parents, defs) => intersectionType(parents map core, tp.typeSymbol.owner) case AnnotatedType(annots, tp, selfsym) => core(tp) case ExistentialType(tparams, result) => core(result).subst(tparams, tparams map (t => core(t.info.bounds.hi))) @@ -362,11 +362,11 @@ trait Implicits { deriveTypeWithWildcards(syms.distinct)(tp) } def sum(xs: List[Int]) = (0 /: xs)(_ + _) - def complexity(tp: Type): Int = tp.normalize match { + def complexity(tp: Type): Int = tp.dealiasWiden match { case NoPrefix => 0 case SingleType(pre, sym) => - if (sym.isPackage) 0 else complexity(tp.normalize.widen) + if (sym.isPackage) 0 else complexity(tp.dealiasWiden) case TypeRef(pre, sym, args) => complexity(pre) + sum(args map complexity) + 1 case RefinedType(parents, _) => diff --git a/src/compiler/scala/tools/nsc/typechecker/PatternMatching.scala b/src/compiler/scala/tools/nsc/typechecker/PatternMatching.scala index 27bdad3066..6d5eff460f 100644 --- a/src/compiler/scala/tools/nsc/typechecker/PatternMatching.scala +++ b/src/compiler/scala/tools/nsc/typechecker/PatternMatching.scala @@ -477,7 +477,7 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL **/ // must treat Typed and Bind together -- we need to know the patBinder of the Bind pattern to get at the actual type case MaybeBoundTyped(subPatBinder, pt) => - val next = glb(List(patBinder.info.widen, pt)).normalize + val next = glb(List(patBinder.info.dealiasWiden, pt)).normalize // a typed pattern never has any subtrees noFurtherSubPats(TypeTestTreeMaker(subPatBinder, patBinder, pt, next)(pos)) diff --git a/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala b/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala index fd3b020b1a..285e1cb7af 100644 --- a/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala +++ b/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala @@ -68,7 +68,7 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans if (sym.hasAccessBoundary) "" + sym.privateWithin.name else "" ) - def overridesTypeInPrefix(tp1: Type, tp2: Type, prefix: Type): Boolean = (tp1.normalize, tp2.normalize) match { + def overridesTypeInPrefix(tp1: Type, tp2: Type, prefix: Type): Boolean = (tp1.dealiasWiden, tp2.dealiasWiden) match { case (MethodType(List(), rtp1), NullaryMethodType(rtp2)) => rtp1 <:< rtp2 case (NullaryMethodType(rtp1), MethodType(List(), rtp2)) => @@ -472,12 +472,12 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans // check a type alias's RHS corresponds to its declaration // this overlaps somewhat with validateVariance if(member.isAliasType) { - // println("checkKindBounds" + ((List(member), List(memberTp.normalize), self, member.owner))) - val kindErrors = typer.infer.checkKindBounds(List(member), List(memberTp.normalize), self, member.owner) + // println("checkKindBounds" + ((List(member), List(memberTp.dealiasWiden), self, member.owner))) + val kindErrors = typer.infer.checkKindBounds(List(member), List(memberTp.dealiasWiden), self, member.owner) if(!kindErrors.isEmpty) unit.error(member.pos, - "The kind of the right-hand side "+memberTp.normalize+" of "+member.keyString+" "+ + "The kind of the right-hand side "+memberTp.dealiasWiden+" of "+member.keyString+" "+ member.varianceString + member.nameString+ " does not conform to its expected kind."+ kindErrors.toList.mkString("\n", ", ", "")) } else if (member.isAbstractType) { @@ -496,7 +496,7 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans if (member.isStable && !otherTp.isVolatile) { if (memberTp.isVolatile) overrideError("has a volatile type; cannot override a member with non-volatile type") - else memberTp.normalize.resultType match { + else memberTp.dealiasWiden.resultType match { case rt: RefinedType if !(rt =:= otherTp) && !(checkedCombinations contains rt.parents) => // might mask some inconsistencies -- check overrides checkedCombinations += rt.parents @@ -1298,7 +1298,7 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans // if the unnormalized type is accessible, that's good enough if (inaccessible.isEmpty) () // or if the normalized type is, that's good too - else if ((tpe ne tpe.normalize) && lessAccessibleSymsInType(tpe.normalize, member).isEmpty) () + else if ((tpe ne tpe.normalize) && lessAccessibleSymsInType(tpe.dealiasWiden, member).isEmpty) () // otherwise warn about the inaccessible syms in the unnormalized type else inaccessible foreach (sym => warnLessAccessible(sym, member)) } diff --git a/src/compiler/scala/tools/nsc/typechecker/TypeDiagnostics.scala b/src/compiler/scala/tools/nsc/typechecker/TypeDiagnostics.scala index ab1751b4f0..af484a47e2 100644 --- a/src/compiler/scala/tools/nsc/typechecker/TypeDiagnostics.scala +++ b/src/compiler/scala/tools/nsc/typechecker/TypeDiagnostics.scala @@ -213,7 +213,7 @@ trait TypeDiagnostics { // force measures than comparing normalized Strings were producing error messages // like "and java.util.ArrayList[String] <: java.util.ArrayList[String]" but there // should be a cleaner way to do this. - if (found.normalize.toString == tp.normalize.toString) "" + if (found.dealiasWiden.toString == tp.dealiasWiden.toString) "" else " (and %s <: %s)".format(found, tp) ) val explainDef = { diff --git a/src/compiler/scala/tools/nsc/typechecker/Typers.scala b/src/compiler/scala/tools/nsc/typechecker/Typers.scala index 8fbc143fbf..51a31f6fc7 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Typers.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Typers.scala @@ -2437,7 +2437,7 @@ trait Typers extends Adaptations with Tags { // but not in real life (i.e., now that's we've reset the method's type skolems' // infos back to their pre-GADT-constraint state) if (isFullyDefined(pt) && !(body1.tpe <:< pt)) - body1 = typedPos(body1.pos)(gen.mkCast(body1, pt.normalize)) + body1 = typedPos(body1.pos)(gen.mkCast(body1, pt.dealiasWiden)) } @@ -2507,7 +2507,7 @@ trait Typers extends Adaptations with Tags { */ def synthesizePartialFunction(paramName: TermName, paramPos: Position, tree: Tree, mode: Mode, pt: Type): Tree = { assert(pt.typeSymbol == PartialFunctionClass, s"PartialFunction synthesis for match in $tree requires PartialFunction expected type, but got $pt.") - val targs = pt.normalize.typeArgs + val targs = pt.dealiasWiden.typeArgs // if targs.head isn't fully defined, we can translate --> error targs match { @@ -2665,10 +2665,10 @@ trait Typers extends Adaptations with Tags { def decompose(pt: Type): (Symbol, List[Type], Type) = if ((isFunctionType(pt) || (pt.typeSymbol == PartialFunctionClass && numVparams == 1 && fun.body.isInstanceOf[Match])) && // see bug901 for a reason why next conditions are needed - ( pt.normalize.typeArgs.length - 1 == numVparams + ( pt.dealiasWiden.typeArgs.length - 1 == numVparams || fun.vparams.exists(_.tpt.isEmpty) )) - (pt.typeSymbol, pt.normalize.typeArgs.init, pt.normalize.typeArgs.last) + (pt.typeSymbol, pt.dealiasWiden.typeArgs.init, pt.dealiasWiden.typeArgs.last) else (FunctionClass(numVparams), fun.vparams map (x => NoType), WildcardType) @@ -3316,7 +3316,7 @@ trait Typers extends Adaptations with Tags { if (fun1.tpe.isErroneous) duplErrTree else { - val resTp = fun1.tpe.finalResultType.normalize + val resTp = fun1.tpe.finalResultType.dealiasWiden val nbSubPats = args.length val (formals, formalsExpanded) = extractorFormalTypes(fun0.pos, resTp, nbSubPats, fun1.symbol) @@ -3364,7 +3364,7 @@ trait Typers extends Adaptations with Tags { def extractorForUncheckedType(pos: Position, pt: Type): Option[Tree] = if (isPastTyper) None else { // only look at top-level type, can't (reliably) do anything about unchecked type args (in general) // but at least make a proper type before passing it elsewhere - val pt1 = pt.dealias match { + val pt1 = pt.dealiasWiden match { case tr @ TypeRef(pre, sym, args) if args.nonEmpty => copyTypeRef(tr, pre, sym, sym.typeParams map (_.tpeHK)) // replace actual type args with dummies case pt1 => pt1 } @@ -4209,7 +4209,7 @@ trait Typers extends Adaptations with Tags { if (newPatternMatching && (pt.typeSymbol == PartialFunctionClass)) synthesizePartialFunction(newTermName(context.unit.fresh.newName("x")), tree.pos, tree, mode, pt) else { - val arity = if (isFunctionType(pt)) pt.normalize.typeArgs.length - 1 else 1 + val arity = if (isFunctionType(pt)) pt.dealiasWiden.typeArgs.length - 1 else 1 val params = for (i <- List.range(0, arity)) yield atPos(tree.pos.focusStart) { ValDef(Modifiers(PARAM | SYNTHETIC), diff --git a/src/continuations/plugin/scala/tools/selectivecps/CPSUtils.scala b/src/continuations/plugin/scala/tools/selectivecps/CPSUtils.scala index 4924e056af..29480576ea 100644 --- a/src/continuations/plugin/scala/tools/selectivecps/CPSUtils.scala +++ b/src/continuations/plugin/scala/tools/selectivecps/CPSUtils.scala @@ -61,7 +61,7 @@ trait CPSUtils { // annotation checker protected def annTypes(ann: AnnotationInfo): (Type, Type) = { - val tp0 :: tp1 :: Nil = ann.atp.normalize.typeArgs + val tp0 :: tp1 :: Nil = ann.atp.dealiasWiden.typeArgs ((tp0, tp1)) } protected def hasMinusMarker(tpe: Type) = tpe hasAnnotation MarkerCPSAdaptMinus diff --git a/src/reflect/scala/reflect/internal/Definitions.scala b/src/reflect/scala/reflect/internal/Definitions.scala index 12fb77dab1..1b0bbe5a06 100644 --- a/src/reflect/scala/reflect/internal/Definitions.scala +++ b/src/reflect/scala/reflect/internal/Definitions.scala @@ -626,7 +626,7 @@ trait Definitions extends api.StandardDefinitions { len <= MaxTupleArity && sym == TupleClass(len) case _ => false } - def isTupleType(tp: Type) = isTupleTypeDirect(tp.normalize) + def isTupleType(tp: Type) = isTupleTypeDirect(tp.dealiasWiden) lazy val ProductRootClass: ClassSymbol = requiredClass[scala.Product] def Product_productArity = getMemberMethod(ProductRootClass, nme.productArity) @@ -648,8 +648,8 @@ trait Definitions extends api.StandardDefinitions { case _ => tp } - def unapplyUnwrap(tpe:Type) = tpe.finalResultType.normalize match { - case RefinedType(p :: _, _) => p.normalize + def unapplyUnwrap(tpe:Type) = tpe.finalResultType.dealiasWiden match { + case RefinedType(p :: _, _) => p.dealiasWiden case tp => tp } @@ -657,7 +657,7 @@ trait Definitions extends api.StandardDefinitions { if (isFunctionType(tp)) abstractFunctionType(tp.typeArgs.init, tp.typeArgs.last) else NoType - def isFunctionType(tp: Type): Boolean = tp.normalize match { + def isFunctionType(tp: Type): Boolean = tp.dealiasWiden match { case TypeRef(_, sym, args) if args.nonEmpty => val arity = args.length - 1 // -1 is the return type arity <= MaxFunctionArity && sym == FunctionClass(arity) @@ -1145,7 +1145,7 @@ trait Definitions extends api.StandardDefinitions { else if (sym.isTopLevel) sym.javaClassName else flatNameString(sym.owner, separator) + nme.NAME_JOIN_STRING + sym.simpleName def signature1(etp: Type): String = { - if (etp.typeSymbol == ArrayClass) "[" + signature1(erasure(etp.normalize.typeArgs.head)) + if (etp.typeSymbol == ArrayClass) "[" + signature1(erasure(etp.dealiasWiden.typeArgs.head)) else if (isPrimitiveValueClass(etp.typeSymbol)) abbrvTag(etp.typeSymbol).toString() else "L" + flatNameString(etp.typeSymbol, '/') + ";" } diff --git a/src/reflect/scala/reflect/internal/Symbols.scala b/src/reflect/scala/reflect/internal/Symbols.scala index 0969d9e3fa..d9fafd25ae 100644 --- a/src/reflect/scala/reflect/internal/Symbols.scala +++ b/src/reflect/scala/reflect/internal/Symbols.scala @@ -844,7 +844,7 @@ trait Symbols extends api.Symbols { self: SymbolTable => /** Is this class or type defined as a structural refinement type? */ final def isStructuralRefinement: Boolean = - (isClass || isType || isModule) && info.normalize/*.underlying*/.isStructuralRefinement + (isClass || isType || isModule) && info.dealiasWiden/*.underlying*/.isStructuralRefinement /** Is this a term symbol only defined in a refinement (so that it needs * to be accessed by reflection)? diff --git a/src/reflect/scala/reflect/internal/TreeGen.scala b/src/reflect/scala/reflect/internal/TreeGen.scala index 54a85dee86..b2269e476f 100644 --- a/src/reflect/scala/reflect/internal/TreeGen.scala +++ b/src/reflect/scala/reflect/internal/TreeGen.scala @@ -212,7 +212,7 @@ abstract class TreeGen extends macros.TreeBuilder { mkTypeApply(mkAttributedSelect(target, method), targs map TypeTree) private def mkSingleTypeApply(value: Tree, tpe: Type, what: Symbol, wrapInApply: Boolean) = { - val tapp = mkAttributedTypeApply(value, what, tpe.normalize :: Nil) + val tapp = mkAttributedTypeApply(value, what, tpe.dealias :: Nil) if (wrapInApply) Apply(tapp, Nil) else tapp } private def typeTestSymbol(any: Boolean) = if (any) Any_isInstanceOf else Object_isInstanceOf diff --git a/src/reflect/scala/reflect/internal/Types.scala b/src/reflect/scala/reflect/internal/Types.scala index 1bb3fd300b..0125722ca2 100644 --- a/src/reflect/scala/reflect/internal/Types.scala +++ b/src/reflect/scala/reflect/internal/Types.scala @@ -2456,7 +2456,7 @@ trait Types extends api.Types { self: SymbolTable => case RepeatedParamClass => args.head + "*" case ByNameParamClass => "=> " + args.head case _ => - def targs = normalize.typeArgs + def targs = dealiasWiden.typeArgs if (isFunctionType(this)) { // Aesthetics: printing Function1 as T => R rather than (T) => R diff --git a/src/reflect/scala/reflect/internal/transform/Erasure.scala b/src/reflect/scala/reflect/internal/transform/Erasure.scala index 5581c78a3a..abf380ac44 100644 --- a/src/reflect/scala/reflect/internal/transform/Erasure.scala +++ b/src/reflect/scala/reflect/internal/transform/Erasure.scala @@ -16,7 +16,7 @@ trait Erasure { /** Is `tp` an unbounded generic type (i.e. which could be instantiated * with primitive as well as class types)?. */ - private def genericCore(tp: Type): Type = tp.normalize match { + private def genericCore(tp: Type): Type = tp.dealiasWiden match { /* A Java Array is erased to Array[Object] (T can only be a reference type), where as a Scala Array[T] is * erased to Object. However, there is only symbol for the Array class. So to make the distinction between * a Java and a Scala array, we check if the owner of T comes from a Java class. @@ -36,7 +36,7 @@ trait Erasure { * then Some((N, T)) where N is the number of Array constructors enclosing `T`, * otherwise None. Existentials on any level are ignored. */ - def unapply(tp: Type): Option[(Int, Type)] = tp.normalize match { + def unapply(tp: Type): Option[(Int, Type)] = tp.dealiasWiden match { case TypeRef(_, ArrayClass, List(arg)) => genericCore(arg) match { case NoType => @@ -101,7 +101,7 @@ trait Erasure { def valueClassIsParametric(clazz: Symbol): Boolean = { assert(!phase.erasedTypes) clazz.typeParams contains - clazz.derivedValueClassUnbox.tpe.resultType.normalize.typeSymbol + clazz.derivedValueClassUnbox.tpe.resultType.typeSymbol } abstract class ErasureMap extends TypeMap { diff --git a/src/reflect/scala/reflect/internal/transform/UnCurry.scala b/src/reflect/scala/reflect/internal/transform/UnCurry.scala index 6dc6a0f7b8..32d3171b26 100644 --- a/src/reflect/scala/reflect/internal/transform/UnCurry.scala +++ b/src/reflect/scala/reflect/internal/transform/UnCurry.scala @@ -10,6 +10,14 @@ trait UnCurry { import global._ import definitions._ + /** Note: changing tp.normalize to tp.dealias in this method leads to a single + * test failure: run/t5688.scala, where instead of the expected output + * Vector(ta, tb, tab) + * we instead get + * Vector(tab, tb, tab) + * I think that difference is not the product of sentience but of randomness. + * Let us figure out why it is and then change this method. + */ private def expandAlias(tp: Type): Type = if (!tp.isHigherKinded) tp.normalize else tp val uncurry: TypeMap = new TypeMap { @@ -60,4 +68,4 @@ trait UnCurry { */ def transformInfo(sym: Symbol, tp: Type): Type = if (sym.isType) uncurryType(tp) else uncurry(tp) -} \ No newline at end of file +} diff --git a/test/files/neg/t2641.check b/test/files/neg/t2641.check index 909f4f0cf3..a0a960f0ea 100644 --- a/test/files/neg/t2641.check +++ b/test/files/neg/t2641.check @@ -1,15 +1,7 @@ t2641.scala:18: error: wrong number of type arguments for ManagedSeq, should be 2 with TraversableViewLike[A, ManagedSeqStrict[A], ManagedSeq[A]] ^ -t2641.scala:16: error: illegal inheritance; - self-type ManagedSeq does not conform to ManagedSeqStrict[A]'s selftype ManagedSeqStrict[A] - extends ManagedSeqStrict[A] - ^ -t2641.scala:17: error: illegal inheritance; - self-type ManagedSeq does not conform to scala.collection.TraversableView[A,ManagedSeqStrict[A]]'s selftype scala.collection.TraversableView[A,ManagedSeqStrict[A]] - with TraversableView[A, ManagedSeqStrict[A]] - ^ -t2641.scala:27: error: value managedIterator is not a member of ManagedSeq +t2641.scala:27: error: value managedIterator is not a member of ManagedSeq[A,Coll] override def managedIterator = self.managedIterator slice (from, until) ^ -four errors found +two errors found -- cgit v1.2.3 From c8293b721b9dbd48ce27eb849abef069ae01d26f Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Wed, 30 Jan 2013 13:22:10 -0800 Subject: Expanded the comment on Type#normalize. As suggested by reviewer. --- src/reflect/scala/reflect/internal/Types.scala | 22 ++++++++++++++++++++++ 1 file changed, 22 insertions(+) (limited to 'src') diff --git a/src/reflect/scala/reflect/internal/Types.scala b/src/reflect/scala/reflect/internal/Types.scala index 0125722ca2..0dfa3abf29 100644 --- a/src/reflect/scala/reflect/internal/Types.scala +++ b/src/reflect/scala/reflect/internal/Types.scala @@ -564,6 +564,26 @@ trait Types extends api.Types { self: SymbolTable => * Example: (in the below, is the type constructor of List) * TypeRef(pre, , List()) is replaced by * PolyType(X, TypeRef(pre, , List(X))) + * + * Discussion: normalize is NOT usually what you want to be calling. + * The (very real) danger with normalize is that it will force types + * which would not otherwise have been forced, leading to mysterious + * behavioral differences, cycles, and other elements of mysteries. + * Under most conditions the method you should be calling is `dealiasWiden` + * (see that method for more info.) + * + * Here are a few of the side-effect-trail-leaving methods called + * by various implementations of normalize: + * + * - sym.info + * - tpe.etaExpand + * - tpe.betaReduce + * - tpe.memberType + * - sym.nextOverriddenSymbol + * - constraint.inst + * + * If you've been around the compiler a while that list must fill + * your heart with fear. */ def normalize = this // @MAT @@ -573,6 +593,8 @@ trait Types extends api.Types { self: SymbolTable => /** Repeatedly apply widen and dealias until they have no effect. * This compensates for the fact that type aliases can hide beneath * singleton types and singleton types can hide inside type aliases. + * !!! - and yet it is still inadequate, because aliases and singletons + * might lurk in the upper bounds of an abstract type. See SI-7051. */ def dealiasWiden: Type = ( if (this ne widen) widen.dealiasWiden -- cgit v1.2.3 From fd6fe4e428948cbbc3feb5ee186f784e0205d697 Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Wed, 30 Jan 2013 15:51:45 -0800 Subject: Fix access to empty package from the repl. It seems that way back in f5c336d566 three months ago I booched the repl's ability to get at the empty package. I've noticed this a hundred times but strangely it has not been reported by anyone else. Perhaps you are all religious package users. In any case, it is back. --- src/compiler/scala/tools/nsc/typechecker/Typers.scala | 5 +++-- test/files/run/repl-empty-package.check | 7 +++++++ test/files/run/repl-empty-package/s_1.scala | 3 +++ test/files/run/repl-empty-package/s_2.scala | 5 +++++ 4 files changed, 18 insertions(+), 2 deletions(-) create mode 100644 test/files/run/repl-empty-package.check create mode 100644 test/files/run/repl-empty-package/s_1.scala create mode 100644 test/files/run/repl-empty-package/s_2.scala (limited to 'src') diff --git a/src/compiler/scala/tools/nsc/typechecker/Typers.scala b/src/compiler/scala/tools/nsc/typechecker/Typers.scala index 994339a028..c5484ca44f 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Typers.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Typers.scala @@ -3981,7 +3981,8 @@ trait Typers extends Adaptations with Tags { // Lookup in the given qualifier. Used in last-ditch efforts by typedIdent and typedSelect. def lookupInRoot(name: Name): Symbol = lookupInOwner(rootMirror.RootClass, name) - def lookupInEmpty(name: Name): Symbol = lookupInOwner(rootMirror.EmptyPackageClass, name) + def lookupInEmpty(name: Name): Symbol = rootMirror.EmptyPackageClass.info member name + def lookupInQualifier(qual: Tree, name: Name): Symbol = ( if (name == nme.ERROR || qual.tpe.widen.isErroneous) NoSymbol @@ -4775,7 +4776,7 @@ trait Typers extends Adaptations with Tags { * (2) Change imported symbols to selections */ def typedIdent(tree: Tree, name: Name): Tree = { - // setting to enable unqualified idents in empty package + // setting to enable unqualified idents in empty package (used by the repl) def inEmptyPackage = if (settings.exposeEmptyPackage.value) lookupInEmpty(name) else NoSymbol def issue(err: AbsTypeError) = { diff --git a/test/files/run/repl-empty-package.check b/test/files/run/repl-empty-package.check new file mode 100644 index 0000000000..ecf79c2c6d --- /dev/null +++ b/test/files/run/repl-empty-package.check @@ -0,0 +1,7 @@ +Type in expressions to have them evaluated. +Type :help for more information. + +scala> println(Bippy.bippy) +bippy! + +scala> diff --git a/test/files/run/repl-empty-package/s_1.scala b/test/files/run/repl-empty-package/s_1.scala new file mode 100644 index 0000000000..b59d16b338 --- /dev/null +++ b/test/files/run/repl-empty-package/s_1.scala @@ -0,0 +1,3 @@ +object Bippy { + def bippy = "bippy!" +} diff --git a/test/files/run/repl-empty-package/s_2.scala b/test/files/run/repl-empty-package/s_2.scala new file mode 100644 index 0000000000..512e6dd382 --- /dev/null +++ b/test/files/run/repl-empty-package/s_2.scala @@ -0,0 +1,5 @@ +import scala.tools.partest.ReplTest + +object Test extends ReplTest { + def code = "println(Bippy.bippy)" +} -- cgit v1.2.3 From 8bd03e063c412cefcd52f88fef68283290893708 Mon Sep 17 00:00:00 2001 From: Michael Thorpe Date: Thu, 31 Jan 2013 21:06:02 +0000 Subject: SI-5151 - Add firstKey and lastKey to LongMap. --- src/library/scala/collection/immutable/LongMap.scala | 16 ++++++++++++++++ test/files/run/longmap.check | 0 test/files/run/longmap.scala | 8 ++++++++ 3 files changed, 24 insertions(+) create mode 100644 test/files/run/longmap.check create mode 100644 test/files/run/longmap.scala (limited to 'src') diff --git a/src/library/scala/collection/immutable/LongMap.scala b/src/library/scala/collection/immutable/LongMap.scala index fab1b7f00b..60300c2a9e 100644 --- a/src/library/scala/collection/immutable/LongMap.scala +++ b/src/library/scala/collection/immutable/LongMap.scala @@ -12,6 +12,7 @@ package immutable import scala.collection.generic.{ CanBuildFrom, BitOperations } import scala.collection.mutable.{ Builder, MapBuilder } +import scala.annotation.tailrec /** Utility class for long maps. * @author David MacIver @@ -416,5 +417,20 @@ extends AbstractMap[Long, T] def ++[S >: T](that: LongMap[S]) = this.unionWith[S](that, (key, x, y) => y) + + @tailrec + final def firstKey: Long = this match { + case LongMap.Bin(_, _, l, r) => l.firstKey + case LongMap.Tip(k, v) => k + case LongMap.Nil => sys.error("Empty set") + } + + @tailrec + final def lastKey: Long = this match { + case LongMap.Bin(_, _, l, r) => r.lastKey + case LongMap.Tip(k , v) => k + case LongMap.Nil => sys.error("Empty set") + } + } diff --git a/test/files/run/longmap.check b/test/files/run/longmap.check new file mode 100644 index 0000000000..e69de29bb2 diff --git a/test/files/run/longmap.scala b/test/files/run/longmap.scala new file mode 100644 index 0000000000..1f18eebd31 --- /dev/null +++ b/test/files/run/longmap.scala @@ -0,0 +1,8 @@ +object Test extends App{ + import scala.collection.immutable.LongMap; + + val it = LongMap(8L -> 2, 11L -> 3, 1L -> 2, 7L -> 13); + + assert(it.firstKey == 1L); + assert(it.lastKey == 11L); +} -- cgit v1.2.3 From e3d9a08e08088cf6631eb0a7dbabf8360c4618a0 Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Fri, 1 Feb 2013 18:19:11 -0800 Subject: Cleaning up after brutal merge of 2.10.x into master. That's the best I can do. The tests pass, if someone wants a cleaner merge it is all theirs. For reference, the merge leading up to this commit was achieved as follows. The lines with -s ours are where the merge commit being merged was completely made up of backports from master. git merge -s ours eff78b852e c1dd8bbaa4 && \ git merge 7026376dcc ccd7abe897 && \ git merge -s ours 62681e191a && \ git merge 74b3e9aefe 7d80e08469 d24f341f08 c4f49759fe \ 27d73a2352 ba72ee7c6f 42c4cc7a1e d672102fd8 644eb7078a && \ git merge -s ours 08596af059 b573c287d2 && \ git merge d1b6d8b20f && \ git merge -s ours 110b54a575 36f78dd606 309ff57ba6 && \ git merge 06295f9682 d3886086c3 adf51eef76 b403234a27 && \ git merge -s ours 09d1433064 && \ git merge 9ddcc1b90e cabf626bbc && \ git merge -s ours 283924bfa5 --- .../scala/tools/nsc/transform/ExplicitOuter.scala | 7 - .../tools/nsc/transform/ExtensionMethods.scala | 159 +++++++++++---------- .../scala/tools/nsc/typechecker/Macros.scala | 2 +- test/files/neg/t6963a.check | 4 +- test/files/neg/t6963b.check | 13 -- test/files/neg/t6963b.flags | 1 - test/files/neg/t6963b.scala | 20 --- 7 files changed, 90 insertions(+), 116 deletions(-) delete mode 100644 test/files/neg/t6963b.check delete mode 100644 test/files/neg/t6963b.flags delete mode 100644 test/files/neg/t6963b.scala (limited to 'src') diff --git a/src/compiler/scala/tools/nsc/transform/ExplicitOuter.scala b/src/compiler/scala/tools/nsc/transform/ExplicitOuter.scala index 0e991a5d72..45ec73ab99 100644 --- a/src/compiler/scala/tools/nsc/transform/ExplicitOuter.scala +++ b/src/compiler/scala/tools/nsc/transform/ExplicitOuter.scala @@ -462,13 +462,6 @@ abstract class ExplicitOuter extends InfoTransform } case _ => - if (settings.Xmigration.value < ScalaVersion.twoDotEight) tree match { - case TypeApply(fn @ Select(qual, _), args) if fn.symbol == Object_isInstanceOf || fn.symbol == Any_isInstanceOf => - if (isArraySeqTest(qual.tpe, args.head.tpe)) - unit.warning(tree.pos, "An Array will no longer match as Seq[_].") - case _ => () - } - val x = super.transform(tree) if (x.tpe eq null) x else x setType transformInfo(currentOwner, x.tpe) diff --git a/src/compiler/scala/tools/nsc/transform/ExtensionMethods.scala b/src/compiler/scala/tools/nsc/transform/ExtensionMethods.scala index f929b1c48e..672d9d232a 100644 --- a/src/compiler/scala/tools/nsc/transform/ExtensionMethods.scala +++ b/src/compiler/scala/tools/nsc/transform/ExtensionMethods.scala @@ -83,7 +83,7 @@ abstract class ExtensionMethods extends Transform with TypingTransformers { | | ${candidates.map(c => c.name+":"+normalize(c.tpe, imeth.owner)).mkString("\n")} | - | Eligible Names: ${extensionNames(imeth).mkString(",")}"""") + | Eligible Names: ${extensionNames(imeth).mkString(",")}" """) matching.head } @@ -98,7 +98,7 @@ abstract class ExtensionMethods extends Transform with TypingTransformers { object ExtensionMethodType { def unapply(tp: Type) = tp match { case MethodType(thiz :: rest, restpe) if thiz.name == nme.SELF => - Some( if (rest.isEmpty) restpe else MethodType(rest, restpe) ) + Some((thiz, if (rest.isEmpty) restpe else MethodType(rest, restpe) )) case _ => None } @@ -107,36 +107,22 @@ abstract class ExtensionMethods extends Transform with TypingTransformers { /** This method removes the `$this` argument from the parameter list a method. * * A method may be a `PolyType`, in which case we tear out the `$this` and the class - * type params from its nested `MethodType`. - * It may be a `MethodType`, either with a curried parameter list in which the first argument - * is a `$this` - we just return the rest of the list. - * This means that the corresponding symbol was generated during `extmethods`. - * - * It may also be a `MethodType` in which the `$this` does not appear in a curried parameter list. - * The curried lists disappear during `uncurry`, and the methods may be duplicated afterwards, - * for instance, during `specialize`. - * In this case, the first argument is `$this` and we just get rid of it. + * type params from its nested `MethodType`. Or it may be a MethodType, as + * described at the ExtensionMethodType extractor. */ private def normalize(stpe: Type, clazz: Symbol): Type = stpe match { case PolyType(tparams, restpe) => - // Split the type parameters of the extension method into two groups, - // corresponding the to class and method type parameters. - val numClassParams = clazz.typeParams.length - val methTParams = tparams dropRight numClassParams - val classTParams = tparams takeRight numClassParams - - GenPolyType(methTParams, - normalize(restpe.substSym(classTParams, clazz.typeParams), clazz)) - case MethodType(List(thiz), restpe) if thiz.name == nme.SELF => - restpe.substituteTypes(thiz :: Nil, clazz.thisType :: Nil) - case MethodType(thiz :: params, restpe) => - MethodType(params, restpe) + // method type parameters, class type parameters + val (mtparams, ctparams) = tparams splitAt (tparams.length - clazz.typeParams.length) + GenPolyType(mtparams, + normalize(restpe.substSym(ctparams, clazz.typeParams), clazz)) + case ExtensionMethodType(thiz, etpe) => + etpe.substituteTypes(thiz :: Nil, clazz.thisType :: Nil) case _ => stpe } class Extender(unit: CompilationUnit) extends TypingTransformer(unit) { - private val extensionDefs = mutable.Map[Symbol, mutable.ListBuffer[Tree]]() def checkNonCyclic(pos: Position, seen: Set[Symbol], clazz: Symbol): Unit = @@ -164,28 +150,36 @@ abstract class ExtensionMethods extends Transform with TypingTransformers { * some higher level facilities. */ def extensionMethInfo(extensionMeth: Symbol, origInfo: Type, clazz: Symbol): Type = { - // No variance for method type parameters - var newTypeParams = cloneSymbolsAtOwner(clazz.typeParams, extensionMeth) map (_ resetFlag COVARIANT | CONTRAVARIANT) - val thisParamType = appliedType(clazz.typeConstructor, newTypeParams map (_.tpeHK)) + val GenPolyType(tparamsFromMethod, methodResult) = origInfo cloneInfo extensionMeth + // Start with the class type parameters - clones will be method type parameters + // so must drop their variance. + val tparamsFromClass = cloneSymbolsAtOwner(clazz.typeParams, extensionMeth) map (_ resetFlag COVARIANT | CONTRAVARIANT) + + val thisParamType = appliedType(clazz, tparamsFromClass map (_.tpeHK): _*) val thisParam = extensionMeth.newValueParameter(nme.SELF, extensionMeth.pos) setInfo thisParamType - def transform(clonedType: Type): Type = clonedType match { - case MethodType(params, restpe) => - // I assume it was a bug that this was dropping params... [Martin]: No, it wasn't; it's curried. - MethodType(List(thisParam), clonedType) - case NullaryMethodType(restpe) => - MethodType(List(thisParam), restpe) - } - val GenPolyType(tparams, restpe) = origInfo cloneInfo extensionMeth - val selfParamSingletonType = singleType(currentOwner.companionModule.thisType, thisParam) - GenPolyType( - tparams ::: newTypeParams, - transform(restpe) substThisAndSym (clazz, selfParamSingletonType, clazz.typeParams, newTypeParams) - ) - } + val resultType = MethodType(List(thisParam), dropNullaryMethod(methodResult)) + val selfParamType = singleType(currentOwner.companionModule.thisType, thisParam) - private def allParams(tpe: Type): List[Symbol] = tpe match { - case MethodType(params, res) => params ::: allParams(res) - case _ => List() + def fixres(tp: Type) = tp substThisAndSym (clazz, selfParamType, clazz.typeParams, tparamsFromClass) + def fixtparam(tp: Type) = tp substSym (clazz.typeParams, tparamsFromClass) + + // We can't substitute symbols on the entire polytype because we + // need to modify the bounds of the cloned type parameters, but we + // don't want to substitute for the cloned type parameters themselves. + val tparams = tparamsFromMethod ::: tparamsFromClass + GenPolyType(tparams map (_ modifyInfo fixtparam), fixres(resultType)) + + // For reference, calling fix on the GenPolyType plays out like this: + // error: scala.reflect.internal.Types$TypeError: type arguments [B#7344,A#6966] + // do not conform to method extension$baz#16148's type parameter bounds + // + // And the difference is visible here. See how B is bounded from below by A#16149 + // in both cases, but in the failing case, the other type parameter has turned into + // a different A. (What is that A? It is a clone of the original A created in + // SubstMap during the call to substSym, but I am not clear on all the particulars.) + // + // bad: [B#16154 >: A#16149, A#16155 <: AnyRef#2189]($this#16156: Foo#6965[A#16155])(x#16157: B#16154)List#2457[B#16154] + // good: [B#16151 >: A#16149, A#16149 <: AnyRef#2189]($this#16150: Foo#6965[A#16149])(x#16153: B#16151)List#2457[B#16151] } override def transform(tree: Tree): Tree = { @@ -202,37 +196,56 @@ abstract class ExtensionMethods extends Transform with TypingTransformers { super.transform(tree) } else tree case DefDef(_, _, tparams, vparamss, _, rhs) if tree.symbol.isMethodWithExtension => - val companion = currentOwner.companionModule - val origMeth = tree.symbol - val extensionName = extensionNames(origMeth).head - val extensionMeth = companion.moduleClass.newMethod(extensionName, origMeth.pos, origMeth.flags & ~OVERRIDE & ~PROTECTED | FINAL) - .setAnnotations(origMeth.annotations) - companion.info.decls.enter(extensionMeth) - val newInfo = extensionMethInfo(extensionMeth, origMeth.info, currentOwner) + val origMeth = tree.symbol + val origThis = currentOwner + val origTpeParams = tparams.map(_.symbol) ::: origThis.typeParams // method type params ++ class type params + val origParams = vparamss.flatten map (_.symbol) + val companion = origThis.companionModule + + def makeExtensionMethodSymbol = { + val extensionName = extensionNames(origMeth).head.toTermName + val extensionMeth = ( + companion.moduleClass.newMethod(extensionName, origMeth.pos, origMeth.flags & ~OVERRIDE & ~PROTECTED | FINAL) + setAnnotations origMeth.annotations + ) + companion.info.decls.enter(extensionMeth) + } + + val extensionMeth = makeExtensionMethodSymbol + val newInfo = extensionMethInfo(extensionMeth, origMeth.info, origThis) extensionMeth setInfo newInfo - log("Value class %s spawns extension method.\n Old: %s\n New: %s".format( - currentOwner, - origMeth.defString, - extensionMeth.defString)) // extensionMeth.defStringSeenAs(origInfo - - def thisParamRef = gen.mkAttributedStableRef(extensionMeth.info.params.head setPos extensionMeth.pos) - val GenPolyType(extensionTpeParams, extensionMono) = extensionMeth.info - val origTpeParams = (tparams map (_.symbol)) ::: currentOwner.typeParams - val extensionBody = rhs + + log(s"Value class $origThis spawns extension method.\n Old: ${origMeth.defString}\n New: ${extensionMeth.defString}") + + val GenPolyType(extensionTpeParams, MethodType(thiz :: Nil, extensionMono)) = newInfo + val extensionParams = allParameters(extensionMono) + val extensionThis = gen.mkAttributedStableRef(thiz setPos extensionMeth.pos) + + val extensionBody = ( + rhs .substituteSymbols(origTpeParams, extensionTpeParams) - .substituteSymbols(vparamss.flatten map (_.symbol), allParams(extensionMono).tail) - .substituteThis(currentOwner, thisParamRef) - .changeOwner((origMeth, extensionMeth)) - extensionDefs(companion) += atPos(tree.pos) { DefDef(extensionMeth, extensionBody) } - val extensionCallPrefix = Apply( - gen.mkTypeApply(gen.mkAttributedRef(companion), extensionMeth, origTpeParams map (_.tpeHK)), - List(This(currentOwner))) - val extensionCall = atOwner(origMeth) { - localTyper.typedPos(rhs.pos) { - gen.mkForwarder(extensionCallPrefix, mmap(vparamss)(_.symbol)) - } - } - deriveDefDef(tree)(_ => extensionCall) + .substituteSymbols(origParams, extensionParams) + .substituteThis(origThis, extensionThis) + .changeOwner(origMeth -> extensionMeth) + ) + + // Record the extension method ( FIXME: because... ? ) + extensionDefs(companion) += atPos(tree.pos)(DefDef(extensionMeth, extensionBody)) + + // These three lines are assembling Foo.bar$extension[T1, T2, ...]($this) + // which leaves the actual argument application for extensionCall. + val sel = Select(gen.mkAttributedRef(companion), extensionMeth) + val targs = origTpeParams map (_.tpeHK) + val callPrefix = gen.mkMethodCall(sel, targs, This(origThis) :: Nil) + + // Apply all the argument lists. + deriveDefDef(tree)(_ => + atOwner(origMeth)( + localTyper.typedPos(rhs.pos)( + gen.mkForwarder(callPrefix, mmap(vparamss)(_.symbol)) + ) + ) + ) case _ => super.transform(tree) } diff --git a/src/compiler/scala/tools/nsc/typechecker/Macros.scala b/src/compiler/scala/tools/nsc/typechecker/Macros.scala index 7d6d47b410..fb8d6b934f 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Macros.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Macros.scala @@ -450,7 +450,7 @@ trait Macros extends scala.tools.reflect.FastTrack with Traces { if (aparam.name != rparam.name && !rparam.isSynthetic) MacroImplParamNameMismatchError(aparam, rparam) if (isRepeated(aparam) ^ isRepeated(rparam)) MacroImplVarargMismatchError(aparam, rparam) val aparamtpe = aparam.tpe.dealias match { - case RefinedType(List(tpe), Scope(sym)) if tpe == MacroContextClass.tpe && sym.allOverriddenSymbols.contains(MacroContextPrefixType) => tpe + case RefinedType(List(tpe), Scope(sym)) if tpe =:= MacroContextClass.tpe && sym.allOverriddenSymbols.contains(MacroContextPrefixType) => tpe case tpe => tpe } checkMacroImplParamTypeMismatch(atpeToRtpe(aparamtpe), rparam) diff --git a/test/files/neg/t6963a.check b/test/files/neg/t6963a.check index 159896fd10..5858e7740a 100644 --- a/test/files/neg/t6963a.check +++ b/test/files/neg/t6963a.check @@ -1,5 +1,7 @@ -t6963a.scala:4: error: method scanRight in trait TraversableLike has changed semantics in version 2.9.0: +t6963a.scala:4: warning: method scanRight in trait TraversableLike has changed semantics in version 2.9.0: The behavior of `scanRight` has changed. The previous behavior can be reproduced with scanRight.reverse. List(1,2,3,4,5).scanRight(0)(_+_) ^ +error: No warnings can be incurred under -Xfatal-warnings. +one warning found one error found diff --git a/test/files/neg/t6963b.check b/test/files/neg/t6963b.check deleted file mode 100644 index 7e205a41d0..0000000000 --- a/test/files/neg/t6963b.check +++ /dev/null @@ -1,13 +0,0 @@ -t6963b.scala:2: error: An Array will no longer match as Seq[_]. - def f1(x: Any) = x.isInstanceOf[Seq[_]] - ^ -t6963b.scala:4: error: An Array will no longer match as Seq[_]. - case _: Seq[_] => true - ^ -t6963b.scala:16: error: An Array will no longer match as Seq[_]. - case (Some(_: Seq[_]), Nil, _) => 1 - ^ -t6963b.scala:17: error: An Array will no longer match as Seq[_]. - case (None, List(_: List[_], _), _) => 2 - ^ -four errors found diff --git a/test/files/neg/t6963b.flags b/test/files/neg/t6963b.flags deleted file mode 100644 index 83caa2b147..0000000000 --- a/test/files/neg/t6963b.flags +++ /dev/null @@ -1 +0,0 @@ --Xmigration:2.7 -Xfatal-warnings \ No newline at end of file diff --git a/test/files/neg/t6963b.scala b/test/files/neg/t6963b.scala deleted file mode 100644 index 3cfa8f0dca..0000000000 --- a/test/files/neg/t6963b.scala +++ /dev/null @@ -1,20 +0,0 @@ -object Test { - def f1(x: Any) = x.isInstanceOf[Seq[_]] - def f2(x: Any) = x match { - case _: Seq[_] => true - case _ => false - } - - def f3(x: Any) = x match { - case _: Array[_] => true - case _ => false - } - - def f4(x: Any) = x.isInstanceOf[Traversable[_]] - - def f5(x1: Any, x2: Any, x3: AnyRef) = (x1, x2, x3) match { - case (Some(_: Seq[_]), Nil, _) => 1 - case (None, List(_: List[_], _), _) => 2 - case _ => 3 - } -} -- cgit v1.2.3 From 71fb0b83a792ce4e35712c0d23071aa6ba4dc390 Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Sat, 2 Feb 2013 03:01:25 -0800 Subject: Removed -Ymacro-no-expand. Don't know where/how to fix it and it says it's a temporary option. --- .../scala/tools/nsc/settings/ScalaSettings.scala | 1 - test/scaladoc/run/SI-6812.scala | 24 ---------------------- 2 files changed, 25 deletions(-) delete mode 100644 test/scaladoc/run/SI-6812.scala (limited to 'src') diff --git a/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala b/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala index 14f8f6e9bb..9fe3016c02 100644 --- a/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala +++ b/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala @@ -163,7 +163,6 @@ trait ScalaSettings extends AbsScalaSettings = ChoiceSetting ("-Ystruct-dispatch", "policy", "structural method dispatch policy", List("no-cache", "mono-cache", "poly-cache", "invoke-dynamic"), "poly-cache") val Yrangepos = BooleanSetting ("-Yrangepos", "Use range positions for syntax trees.") val Yreifycopypaste = BooleanSetting ("-Yreify-copypaste", "Dump the reified trees in copypasteable representation.") - val Ymacronoexpand = BooleanSetting ("-Ymacro-no-expand", "Don't expand macros. Might be useful for scaladoc and presentation compiler, but will crash anything which uses macros and gets past typer.") val Yreplsync = BooleanSetting ("-Yrepl-sync", "Do not use asynchronous code for repl startup") val Yreploutdir = StringSetting ("-Yrepl-outdir", "path", "Write repl-generated classfiles to given output directory (use \"\" to generate a temporary dir)" , "") val Ynotnull = BooleanSetting ("-Ynotnull", "Enable (experimental and incomplete) scala.NotNull.") diff --git a/test/scaladoc/run/SI-6812.scala b/test/scaladoc/run/SI-6812.scala deleted file mode 100644 index fbd9588ede..0000000000 --- a/test/scaladoc/run/SI-6812.scala +++ /dev/null @@ -1,24 +0,0 @@ -import scala.tools.nsc.doc.model._ -import scala.tools.partest.ScaladocModelTest -import language._ - -object Test extends ScaladocModelTest { - - override def code = """ - import scala.reflect.macros.Context - import language.experimental.macros - - object Macros { - def impl(c: Context) = c.literalUnit - def foo = macro impl - } - - class C { - def bar = Macros.foo - } - """ - - def scaladocSettings = "" - override def extraSettings = super.extraSettings + " -Ymacro-no-expand" - def testModel(root: Package) = () -} -- cgit v1.2.3 From 108a1f79d25ce05b84beaca2a80a2dabade3eee2 Mon Sep 17 00:00:00 2001 From: James Iry Date: Tue, 5 Feb 2013 07:56:01 -0800 Subject: SI-6773 Changes IndexSeqFactory to be "since 2.11" The addition of IndexSeqFactory to 2.10.x branch created a binary incompatibility so it was removed in that branch before being released. This commit fixes the since annotation to 2.11 on IndexSeqFactory in the master branch. --- src/library/scala/collection/generic/IndexedSeqFactory.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) (limited to 'src') diff --git a/src/library/scala/collection/generic/IndexedSeqFactory.scala b/src/library/scala/collection/generic/IndexedSeqFactory.scala index 451e5e0f46..e86d163b3c 100644 --- a/src/library/scala/collection/generic/IndexedSeqFactory.scala +++ b/src/library/scala/collection/generic/IndexedSeqFactory.scala @@ -13,7 +13,7 @@ import language.higherKinds /** A template for companion objects of IndexedSeq and subclasses thereof. * - * @since 2.10 + * @since 2.11 */ abstract class IndexedSeqFactory[CC[X] <: IndexedSeq[X] with GenericTraversableTemplate[X, CC]] extends SeqFactory[CC] { override def ReusableCBF: GenericCanBuildFrom[Nothing] = -- cgit v1.2.3 From 8eadc6da3a0c1016c0293dca65dd81464f66a688 Mon Sep 17 00:00:00 2001 From: Volkan Yazıcı Date: Wed, 6 Feb 2013 18:52:01 +0200 Subject: Update src/library/scala/sys/process/ProcessBuilder.scala Fix typesetting of unordered list items in the docs. --- src/library/scala/sys/process/ProcessBuilder.scala | 40 +++++++++++----------- 1 file changed, 20 insertions(+), 20 deletions(-) (limited to 'src') diff --git a/src/library/scala/sys/process/ProcessBuilder.scala b/src/library/scala/sys/process/ProcessBuilder.scala index 30fd4d83ff..3a86f6dc3c 100644 --- a/src/library/scala/sys/process/ProcessBuilder.scala +++ b/src/library/scala/sys/process/ProcessBuilder.scala @@ -46,14 +46,14 @@ import ProcessBuilder._ * * Two existing `ProcessBuilder` can be combined in the following ways: * - * * They can be executed in parallel, with the output of the first being fed - * as input to the second, like Unix pipes. This is achieved with the `#|` - * method. - * * They can be executed in sequence, with the second starting as soon as - * the first ends. This is done by the `###` method. - * * The execution of the second one can be conditioned by the return code - * (exit status) of the first, either only when it's zero, or only when it's - * not zero. The methods `#&&` and `#||` accomplish these tasks. + * - They can be executed in parallel, with the output of the first being fed + * as input to the second, like Unix pipes. This is achieved with the `#|` + * method. + * - They can be executed in sequence, with the second starting as soon as + * the first ends. This is done by the `###` method. + * - The execution of the second one can be conditioned by the return code + * (exit status) of the first, either only when it's zero, or only when it's + * not zero. The methods `#&&` and `#||` accomplish these tasks. * * ==Redirecting Input/Output== * @@ -74,18 +74,18 @@ import ProcessBuilder._ * overloads and variations to enable further control over the I/O. These * methods are: * - * * `run`: the most general method, it returns a - * [[scala.sys.process.Process]] immediately, and the external command - * executes concurrently. - * * `!`: blocks until all external commands exit, and returns the exit code - * of the last one in the chain of execution. - * * `!!`: blocks until all external commands exit, and returns a `String` - * with the output generated. - * * `lines`: returns immediately like `run`, and the output being generared - * is provided through a `Stream[String]`. Getting the next element of that - * `Stream` may block until it becomes available. This method will throw an - * exception if the return code is different than zero -- if this is not - * desired, use the `lines_!` method. + * - `run`: the most general method, it returns a + * [[scala.sys.process.Process]] immediately, and the external command + * executes concurrently. + * - `!`: blocks until all external commands exit, and returns the exit code + * of the last one in the chain of execution. + * - `!!`: blocks until all external commands exit, and returns a `String` + * with the output generated. + * - `lines`: returns immediately like `run`, and the output being generared + * is provided through a `Stream[String]`. Getting the next element of that + * `Stream` may block until it becomes available. This method will throw an + * exception if the return code is different than zero -- if this is not + * desired, use the `lines_!` method. * * ==Handling Input and Output== * -- cgit v1.2.3 From 37824d3c62a9ebbc1f462fa5467dbf9a2761c803 Mon Sep 17 00:00:00 2001 From: Volkan Yazıcı Date: Wed, 6 Feb 2013 19:08:18 +0200 Subject: Update src/library/scala/sys/process/package.scala Fix broken wildcard expansion in the `sys.process` docs. --- src/library/scala/sys/process/package.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) (limited to 'src') diff --git a/src/library/scala/sys/process/package.scala b/src/library/scala/sys/process/package.scala index ed436febc0..902543665f 100644 --- a/src/library/scala/sys/process/package.scala +++ b/src/library/scala/sys/process/package.scala @@ -25,7 +25,7 @@ package scala.sys { * * {{{ * import scala.sys.process._ - * "ls" #| "grep .scala" #&& "scalac *.scala" #|| "echo nothing found" lines + * "ls" #| "grep .scala" #&& Seq("sh", "-c", "scalac *.scala") #|| "echo nothing found" lines * }}} * * We describe below the general concepts and architecture of the package, -- cgit v1.2.3 From 8a2cebea5654b5bccbfcf840e935770cf12b5f30 Mon Sep 17 00:00:00 2001 From: Vojin Jovanovic Date: Tue, 15 Jan 2013 17:48:25 +0100 Subject: SI-6807 Deprecating the Actors library. All public classes, traits and objects marked as deprecated. Added deprecation note on the package object. Embedded external libraries (ThreadPool etc.) are not deprecated as they are intended for internal use only. Review by: @phaller --- src/actors/scala/actors/AbstractActor.scala | 1 + src/actors/scala/actors/Actor.scala | 2 ++ src/actors/scala/actors/ActorRef.scala | 3 ++- src/actors/scala/actors/CanReply.scala | 1 + src/actors/scala/actors/Channel.scala | 2 ++ src/actors/scala/actors/DaemonActor.scala | 1 + src/actors/scala/actors/Debug.scala | 1 + src/actors/scala/actors/Future.scala | 2 ++ src/actors/scala/actors/IScheduler.scala | 1 + src/actors/scala/actors/InputChannel.scala | 1 + src/actors/scala/actors/InternalActor.scala | 2 ++ src/actors/scala/actors/InternalReplyReactor.scala | 1 + src/actors/scala/actors/OutputChannel.scala | 1 + src/actors/scala/actors/Reactor.scala | 1 + src/actors/scala/actors/ReplyReactor.scala | 2 +- src/actors/scala/actors/Scheduler.scala | 1 + src/actors/scala/actors/SchedulerAdapter.scala | 1 + src/actors/scala/actors/UncaughtException.scala | 1 + src/actors/scala/actors/package.scala | 1 + src/actors/scala/actors/remote/JavaSerializer.scala | 1 + src/actors/scala/actors/remote/RemoteActor.scala | 2 ++ src/actors/scala/actors/remote/Serializer.scala | 1 + src/actors/scala/actors/remote/Service.scala | 1 + src/actors/scala/actors/remote/TcpService.scala | 2 ++ src/actors/scala/actors/scheduler/ActorGC.scala | 1 + src/actors/scala/actors/scheduler/DaemonScheduler.scala | 1 + src/actors/scala/actors/scheduler/ExecutorScheduler.scala | 2 ++ src/actors/scala/actors/scheduler/ForkJoinScheduler.scala | 1 + src/actors/scala/actors/scheduler/ResizableThreadPoolScheduler.scala | 1 + src/actors/scala/actors/scheduler/SingleThreadedScheduler.scala | 1 + 30 files changed, 38 insertions(+), 2 deletions(-) (limited to 'src') diff --git a/src/actors/scala/actors/AbstractActor.scala b/src/actors/scala/actors/AbstractActor.scala index 5a4e0d9804..3c6299aab4 100644 --- a/src/actors/scala/actors/AbstractActor.scala +++ b/src/actors/scala/actors/AbstractActor.scala @@ -15,6 +15,7 @@ import scala.language.higherKinds * * @define actor actor */ +@deprecated("Use the akka.actor package instead. For migration from the scala.actors package refer to the Actors Migration Guide.", "2.11.0") trait AbstractActor extends OutputChannel[Any] with CanReply[Any, Any] { type Future[+R] <: scala.actors.Future[R] diff --git a/src/actors/scala/actors/Actor.scala b/src/actors/scala/actors/Actor.scala index 61124b3e85..75160fa18f 100644 --- a/src/actors/scala/actors/Actor.scala +++ b/src/actors/scala/actors/Actor.scala @@ -18,6 +18,7 @@ import scala.language.implicitConversions * * @author Philipp Haller */ +@deprecated("Use the akka.actor package instead. For migration from the scala.actors package refer to the Actors Migration Guide.", "2.11.0") object Actor extends Combinators { /** State of an actor. @@ -398,6 +399,7 @@ object Actor extends Combinators { * @define channel actor's mailbox */ @SerialVersionUID(-781154067877019505L) +@deprecated("Use the akka.actor package instead. For migration from the scala.actors package refer to the Actors Migration Guide.", "2.11.0") trait Actor extends InternalActor with ReplyReactor { override def start(): Actor = synchronized { diff --git a/src/actors/scala/actors/ActorRef.scala b/src/actors/scala/actors/ActorRef.scala index 5c1790669b..0da167aede 100644 --- a/src/actors/scala/actors/ActorRef.scala +++ b/src/actors/scala/actors/ActorRef.scala @@ -45,8 +45,9 @@ trait ActorRef { * This is what is used to complete a Future that is returned from an ask/? call, * when it times out. */ +@deprecated("Use the akka.actor package instead. For migration from the scala.actors package refer to the Actors Migration Guide.", "2.11.0") class AskTimeoutException(message: String, cause: Throwable) extends TimeoutException { def this(message: String) = this(message, null: Throwable) } - +@deprecated("Use the akka.actor package instead. For migration from the scala.actors package refer to the Actors Migration Guide.", "2.11.0") object PoisonPill diff --git a/src/actors/scala/actors/CanReply.scala b/src/actors/scala/actors/CanReply.scala index 3d264777a0..3f2c53f423 100644 --- a/src/actors/scala/actors/CanReply.scala +++ b/src/actors/scala/actors/CanReply.scala @@ -17,6 +17,7 @@ import scala.language.higherKinds * * @define actor `CanReply` */ +@deprecated("Use the akka.actor package instead. For migration from the scala.actors package refer to the Actors Migration Guide.", "2.11.0") trait CanReply[-T, +R] { type Future[+P] <: () => P diff --git a/src/actors/scala/actors/Channel.scala b/src/actors/scala/actors/Channel.scala index 9669ffbc17..ddf7b329c8 100644 --- a/src/actors/scala/actors/Channel.scala +++ b/src/actors/scala/actors/Channel.scala @@ -23,6 +23,7 @@ import scala.concurrent.SyncVar * * @author Philipp Haller */ +@deprecated("Use the akka.actor package instead. For migration from the scala.actors package refer to the Actors Migration Guide.", "2.11.0") case class ! [a](ch: Channel[a], msg: a) /** @@ -34,6 +35,7 @@ case class ! [a](ch: Channel[a], msg: a) * @define actor channel * @define channel channel */ +@deprecated("Use the akka.actor package instead. For migration from the scala.actors package refer to the Actors Migration Guide.", "2.11.0") class Channel[Msg](val receiver: InternalActor) extends InputChannel[Msg] with OutputChannel[Msg] with CanReply[Msg, Any] { type Future[+P] = scala.actors.Future[P] diff --git a/src/actors/scala/actors/DaemonActor.scala b/src/actors/scala/actors/DaemonActor.scala index ffe8b75c27..04a4b4a40c 100644 --- a/src/actors/scala/actors/DaemonActor.scala +++ b/src/actors/scala/actors/DaemonActor.scala @@ -18,6 +18,7 @@ import scheduler.DaemonScheduler * * @author Erik Engbrecht */ +@deprecated("Use the akka.actor package instead. For migration from the scala.actors package refer to the Actors Migration Guide.", "2.11.0") trait DaemonActor extends Actor { override def scheduler: IScheduler = DaemonScheduler } diff --git a/src/actors/scala/actors/Debug.scala b/src/actors/scala/actors/Debug.scala index cc51dfdbae..31ef53bdbe 100644 --- a/src/actors/scala/actors/Debug.scala +++ b/src/actors/scala/actors/Debug.scala @@ -14,6 +14,7 @@ package scala.actors * * @author Philipp Haller */ +@deprecated("Use the akka.actor package instead. For migration from the scala.actors package refer to the Actors Migration Guide.", "2.11.0") object Debug extends Logger("") {} private[actors] class Logger(tag: String) { diff --git a/src/actors/scala/actors/Future.scala b/src/actors/scala/actors/Future.scala index 1abd7b160e..9d123cb2d5 100644 --- a/src/actors/scala/actors/Future.scala +++ b/src/actors/scala/actors/Future.scala @@ -21,6 +21,7 @@ import scala.concurrent.SyncVar * * @author Philipp Haller */ +@deprecated("Use the scala.concurrent.Future instead. For migration from the scala.actors package refer to the Actors Migration Guide.", "2.11.0") abstract class Future[+T] extends Responder[T] with Function0[T] { @volatile @@ -107,6 +108,7 @@ private class FutureActor[T](fun: SyncVar[T] => Unit, channel: Channel[T]) exten * * @author Philipp Haller */ +@deprecated("Use the object scala.concurrent.Future instead. For migration from the scala.actors package refer to the Actors Migration Guide.", "2.11.0") object Futures { /** Arranges for the asynchronous execution of `body`, diff --git a/src/actors/scala/actors/IScheduler.scala b/src/actors/scala/actors/IScheduler.scala index 35c2d32590..9d61d48561 100644 --- a/src/actors/scala/actors/IScheduler.scala +++ b/src/actors/scala/actors/IScheduler.scala @@ -17,6 +17,7 @@ package scala.actors * * @author Philipp Haller */ +@deprecated("Use the akka.actor package instead. For migration from the scala.actors package refer to the Actors Migration Guide.", "2.11.0") trait IScheduler { /** Submits a closure for execution. diff --git a/src/actors/scala/actors/InputChannel.scala b/src/actors/scala/actors/InputChannel.scala index 3d7dd7d49b..d2dd6d24df 100644 --- a/src/actors/scala/actors/InputChannel.scala +++ b/src/actors/scala/actors/InputChannel.scala @@ -16,6 +16,7 @@ package scala.actors * * @define channel `InputChannel` */ +@deprecated("Use the akka.actor package instead. For migration from the scala.actors package refer to the Actors Migration Guide.", "2.11.0") trait InputChannel[+Msg] { /** diff --git a/src/actors/scala/actors/InternalActor.scala b/src/actors/scala/actors/InternalActor.scala index ed9e25c1e6..5045ea56e8 100644 --- a/src/actors/scala/actors/InternalActor.scala +++ b/src/actors/scala/actors/InternalActor.scala @@ -524,6 +524,7 @@ private[actors] trait InternalActor extends AbstractActor with InternalReplyReac * * @author Philipp Haller */ +@deprecated("Use the akka.actor package instead. For migration from the scala.actors package refer to the Actors Migration Guide.", "2.11.0") case object TIMEOUT /** @@ -534,6 +535,7 @@ case object TIMEOUT * @param from the actor that terminated * @param reason the reason that caused the actor to terminate */ +@deprecated("Use the akka.actor package instead. For migration from the scala.actors package refer to the Actors Migration Guide.", "2.11.0") case class Exit(from: AbstractActor, reason: AnyRef) /** diff --git a/src/actors/scala/actors/InternalReplyReactor.scala b/src/actors/scala/actors/InternalReplyReactor.scala index 38295138d4..c744984fd8 100644 --- a/src/actors/scala/actors/InternalReplyReactor.scala +++ b/src/actors/scala/actors/InternalReplyReactor.scala @@ -12,6 +12,7 @@ import java.util.{TimerTask} * * @define actor `ReplyReactor` */ +@deprecated("Use the akka.actor package instead. For migration from the scala.actors package refer to the Actors Migration Guide.", "2.11.0") trait InternalReplyReactor extends Reactor[Any] with ReactorCanReply { /* A list of the current senders. The head of the list is diff --git a/src/actors/scala/actors/OutputChannel.scala b/src/actors/scala/actors/OutputChannel.scala index fd87f813a0..f0f475e123 100644 --- a/src/actors/scala/actors/OutputChannel.scala +++ b/src/actors/scala/actors/OutputChannel.scala @@ -15,6 +15,7 @@ package scala.actors * * @define actor `OutputChannel` */ +@deprecated("Use the akka.actor package instead. For migration from the scala.actors package refer to the Actors Migration Guide.", "2.11.0") trait OutputChannel[-Msg] { /** diff --git a/src/actors/scala/actors/Reactor.scala b/src/actors/scala/actors/Reactor.scala index f025f6bc29..aa985b3a17 100644 --- a/src/actors/scala/actors/Reactor.scala +++ b/src/actors/scala/actors/Reactor.scala @@ -52,6 +52,7 @@ private[actors] object Reactor { * * @define actor reactor */ +@deprecated("Use the akka.actor package instead. For migration from the scala.actors package refer to the Actors Migration Guide.", "2.11.0") trait Reactor[Msg >: Null] extends OutputChannel[Msg] with Combinators { /* The $actor's mailbox. */ diff --git a/src/actors/scala/actors/ReplyReactor.scala b/src/actors/scala/actors/ReplyReactor.scala index a2051d4354..01e6da000f 100644 --- a/src/actors/scala/actors/ReplyReactor.scala +++ b/src/actors/scala/actors/ReplyReactor.scala @@ -7,7 +7,7 @@ \* */ package scala.actors -@deprecated("Scala Actors are being removed from the standard library. Please refer to the migration guide.", "2.10") +@deprecated("Use the akka.actor package instead. For migration from the scala.actors package refer to the Actors Migration Guide.", "2.11.0") trait ReplyReactor extends InternalReplyReactor { protected[actors] def sender: OutputChannel[Any] = super.internalSender } diff --git a/src/actors/scala/actors/Scheduler.scala b/src/actors/scala/actors/Scheduler.scala index dd6c110ed3..5b5b4a946d 100644 --- a/src/actors/scala/actors/Scheduler.scala +++ b/src/actors/scala/actors/Scheduler.scala @@ -18,6 +18,7 @@ import scheduler.{DelegatingScheduler, ForkJoinScheduler, ResizableThreadPoolSch * * @author Philipp Haller */ +@deprecated("Use the akka.actor package instead. For migration from the scala.actors package refer to the Actors Migration Guide.", "2.11.0") object Scheduler extends DelegatingScheduler { Debug.info("initializing "+this+"...") diff --git a/src/actors/scala/actors/SchedulerAdapter.scala b/src/actors/scala/actors/SchedulerAdapter.scala index fb28b3f93a..b8e66dd6cc 100644 --- a/src/actors/scala/actors/SchedulerAdapter.scala +++ b/src/actors/scala/actors/SchedulerAdapter.scala @@ -18,6 +18,7 @@ package scala.actors * * @author Philipp Haller */ +@deprecated("Use the akka.actor package instead. For migration from the scala.actors package refer to the Actors Migration Guide.", "2.11.0") trait SchedulerAdapter extends IScheduler { /** Submits a Runnable for execution. diff --git a/src/actors/scala/actors/UncaughtException.scala b/src/actors/scala/actors/UncaughtException.scala index f225987ddc..02b916a3b5 100644 --- a/src/actors/scala/actors/UncaughtException.scala +++ b/src/actors/scala/actors/UncaughtException.scala @@ -20,6 +20,7 @@ package scala.actors * @author Philipp Haller * @author Erik Engbrecht */ +@deprecated("Use the akka.actor package instead. For migration from the scala.actors package refer to the Actors Migration Guide.", "2.11.0") case class UncaughtException(actor: InternalActor, message: Option[Any], sender: Option[OutputChannel[Any]], diff --git a/src/actors/scala/actors/package.scala b/src/actors/scala/actors/package.scala index d176487e03..ae960860cf 100644 --- a/src/actors/scala/actors/package.scala +++ b/src/actors/scala/actors/package.scala @@ -14,6 +14,7 @@ package scala * A starting point for using the actors library would be [[scala.actors.Reactor]], * [[scala.actors.ReplyReactor]], or [[scala.actors.Actor]] or their companion objects. * + * @note As of release 2.10.1, replaced by akka.actor package. For migration of existing actors refer to the Actors Migration Guide. */ package object actors { diff --git a/src/actors/scala/actors/remote/JavaSerializer.scala b/src/actors/scala/actors/remote/JavaSerializer.scala index 6e9f4a7c51..7549bbf429 100644 --- a/src/actors/scala/actors/remote/JavaSerializer.scala +++ b/src/actors/scala/actors/remote/JavaSerializer.scala @@ -39,6 +39,7 @@ extends ObjectInputStream(in) { /** * @author Philipp Haller */ +@deprecated("Use the akka.actor package instead. For migration from the scala.actors package refer to the Actors Migration Guide.", "2.11.0") class JavaSerializer(serv: Service, cl: ClassLoader) extends Serializer(serv) { def serialize(o: AnyRef): Array[Byte] = { val bos = new ByteArrayOutputStream() diff --git a/src/actors/scala/actors/remote/RemoteActor.scala b/src/actors/scala/actors/remote/RemoteActor.scala index f1644c27ba..799076a01f 100644 --- a/src/actors/scala/actors/remote/RemoteActor.scala +++ b/src/actors/scala/actors/remote/RemoteActor.scala @@ -38,6 +38,7 @@ package remote * * @author Philipp Haller */ +@deprecated("Use the akka.actor package instead. For migration from the scala.actors package refer to the Actors Migration Guide.", "2.11.0") object RemoteActor { private val kernels = new scala.collection.mutable.HashMap[InternalActor, NetKernel] @@ -127,4 +128,5 @@ object RemoteActor { * * @author Philipp Haller */ +@deprecated("Use the akka.actor package instead. For migration from the scala.actors package refer to the Actors Migration Guide.", "2.11.0") case class Node(address: String, port: Int) diff --git a/src/actors/scala/actors/remote/Serializer.scala b/src/actors/scala/actors/remote/Serializer.scala index e39b01fe24..7be4aa6583 100644 --- a/src/actors/scala/actors/remote/Serializer.scala +++ b/src/actors/scala/actors/remote/Serializer.scala @@ -16,6 +16,7 @@ import java.lang.ClassNotFoundException import java.io.{DataInputStream, DataOutputStream, EOFException, IOException} +@deprecated("Use the akka.actor package instead. For migration from the scala.actors package refer to the Actors Migration Guide.", "2.11.0") abstract class Serializer(val service: Service) { def serialize(o: AnyRef): Array[Byte] def deserialize(a: Array[Byte]): AnyRef diff --git a/src/actors/scala/actors/remote/Service.scala b/src/actors/scala/actors/remote/Service.scala index 4584cc308b..d102df1970 100644 --- a/src/actors/scala/actors/remote/Service.scala +++ b/src/actors/scala/actors/remote/Service.scala @@ -14,6 +14,7 @@ package remote * @version 0.9.10 * @author Philipp Haller */ +@deprecated("Use the akka.actor package instead. For migration from the scala.actors package refer to the Actors Migration Guide.", "2.11.0") trait Service { val kernel = new NetKernel(this) val serializer: Serializer diff --git a/src/actors/scala/actors/remote/TcpService.scala b/src/actors/scala/actors/remote/TcpService.scala index bde05fd816..8163ae9fc6 100644 --- a/src/actors/scala/actors/remote/TcpService.scala +++ b/src/actors/scala/actors/remote/TcpService.scala @@ -24,6 +24,7 @@ import scala.util.Random * @version 0.9.9 * @author Philipp Haller */ +@deprecated("Use the akka.actor package instead. For migration from the scala.actors package refer to the Actors Migration Guide.", "2.11.0") object TcpService { private val random = new Random private val ports = new mutable.HashMap[Int, TcpService] @@ -67,6 +68,7 @@ object TcpService { * @version 0.9.10 * @author Philipp Haller */ +@deprecated("Use the akka.actor package instead. For migration from the scala.actors package refer to the Actors Migration Guide.", "2.11.0") class TcpService(port: Int, cl: ClassLoader) extends Thread with Service { val serializer: JavaSerializer = new JavaSerializer(this, cl) diff --git a/src/actors/scala/actors/scheduler/ActorGC.scala b/src/actors/scala/actors/scheduler/ActorGC.scala index 6d9a9458ba..a27799d132 100644 --- a/src/actors/scala/actors/scheduler/ActorGC.scala +++ b/src/actors/scala/actors/scheduler/ActorGC.scala @@ -23,6 +23,7 @@ import scala.collection.mutable * (e.g. act method finishes, exit explicitly called, an exception is thrown), * the ActorGC is informed via the `terminated` method. */ +@deprecated("Use the akka.actor package instead. For migration from the scala.actors package refer to the Actors Migration Guide.", "2.11.0") trait ActorGC extends TerminationMonitor { self: IScheduler => diff --git a/src/actors/scala/actors/scheduler/DaemonScheduler.scala b/src/actors/scala/actors/scheduler/DaemonScheduler.scala index a2d6941ec1..b21a1aa3e6 100644 --- a/src/actors/scala/actors/scheduler/DaemonScheduler.scala +++ b/src/actors/scala/actors/scheduler/DaemonScheduler.scala @@ -14,6 +14,7 @@ package scheduler * * @author Erik Engbrecht */ +@deprecated("Use the akka.actor package instead. For migration from the scala.actors package refer to the Actors Migration Guide.", "2.11.0") object DaemonScheduler extends DelegatingScheduler { protected def makeNewScheduler(): IScheduler = { diff --git a/src/actors/scala/actors/scheduler/ExecutorScheduler.scala b/src/actors/scala/actors/scheduler/ExecutorScheduler.scala index a1d5666a24..4d3ebc3c04 100644 --- a/src/actors/scala/actors/scheduler/ExecutorScheduler.scala +++ b/src/actors/scala/actors/scheduler/ExecutorScheduler.scala @@ -19,6 +19,7 @@ import scala.concurrent.ThreadPoolRunner * * @author Philipp Haller */ +@deprecated("Use the akka.actor package instead. For migration from the scala.actors package refer to the Actors Migration Guide.", "2.11.0") object ExecutorScheduler { private def start(sched: ExecutorScheduler): ExecutorScheduler = { @@ -58,6 +59,7 @@ object ExecutorScheduler { * * @author Philipp Haller */ +@deprecated("Use the akka.actor package instead. For migration from the scala.actors package refer to the Actors Migration Guide.", "2.11.0") trait ExecutorScheduler extends Thread with IScheduler with TerminationService with ThreadPoolRunner { diff --git a/src/actors/scala/actors/scheduler/ForkJoinScheduler.scala b/src/actors/scala/actors/scheduler/ForkJoinScheduler.scala index ce67ffd037..ac123cfe26 100644 --- a/src/actors/scala/actors/scheduler/ForkJoinScheduler.scala +++ b/src/actors/scala/actors/scheduler/ForkJoinScheduler.scala @@ -9,6 +9,7 @@ import scala.concurrent.forkjoin._ * * @author Philipp Haller */ +@deprecated("Use the akka.actor package instead. For migration from the scala.actors package refer to the Actors Migration Guide.", "2.11.0") class ForkJoinScheduler(val initCoreSize: Int, val maxSize: Int, daemon: Boolean, fair: Boolean) extends Runnable with IScheduler with TerminationMonitor { diff --git a/src/actors/scala/actors/scheduler/ResizableThreadPoolScheduler.scala b/src/actors/scala/actors/scheduler/ResizableThreadPoolScheduler.scala index f370d45094..2c4b7677b0 100644 --- a/src/actors/scala/actors/scheduler/ResizableThreadPoolScheduler.scala +++ b/src/actors/scala/actors/scheduler/ResizableThreadPoolScheduler.scala @@ -22,6 +22,7 @@ import scala.concurrent.ManagedBlocker * * @author Philipp Haller */ +@deprecated("Use the akka.actor package instead. For migration from the scala.actors package refer to the Actors Migration Guide.", "2.11.0") class ResizableThreadPoolScheduler(protected val terminate: Boolean, protected val daemon: Boolean) extends Thread with IScheduler with TerminationMonitor { diff --git a/src/actors/scala/actors/scheduler/SingleThreadedScheduler.scala b/src/actors/scala/actors/scheduler/SingleThreadedScheduler.scala index 04d1d2c5c1..03b235fe74 100644 --- a/src/actors/scala/actors/scheduler/SingleThreadedScheduler.scala +++ b/src/actors/scala/actors/scheduler/SingleThreadedScheduler.scala @@ -17,6 +17,7 @@ import scala.collection.mutable * * @author Philipp Haller */ +@deprecated("Use the akka.actor package instead. For migration from the scala.actors package refer to the Actors Migration Guide.", "2.11.0") class SingleThreadedScheduler extends IScheduler { private val tasks = new mutable.Queue[Runnable] -- cgit v1.2.3 From c26a8db067e4f04ef959bb9a8402fa3e931c3cd7 Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Mon, 11 Feb 2013 08:53:14 -0800 Subject: Maintenance of Predef. 1) Deprecates much of Predef and scala.Console, especially: - the read* methods (see below) - the set{Out,Err,In} methods (see SI-4793) 2) Removed long-deprecated: - Predef#exit - Predef#error should have gone, but could not due to sbt At least the whole source base has now been future-proofed against the eventual removal of Predef#error. The low justification for the read* methods should be readily apparent: they are little used and have no call to be in global namespace, especially given their weird ad hoc semantics and unreasonably tempting names such as readBoolean(). 3) Segregated the deprecated elements in Predef from the part which still thrives. 4) Converted all the standard Predef implicits into implicit classes, value classes where possible: - ArrowAssoc, Ensuring, StringFormat, StringAdd, RichException (value) - SeqCharSequence, ArrayCharSequence (non-value) Non-implicit deprecated stubs prop up the names of the formerly converting methods. --- src/compiler/scala/tools/nsc/doc/Settings.scala | 8 +- .../scala/tools/nsc/interactive/REPL.scala | 4 +- src/library/scala/Console.scala | 344 +++------------------ src/library/scala/LowPriorityImplicits.scala | 2 +- src/library/scala/Predef.scala | 150 +++++---- src/library/scala/io/AnsiColor.scala | 53 ++++ src/library/scala/io/ReadStdin.scala | 228 ++++++++++++++ src/library/scala/runtime/RichException.scala | 1 + src/library/scala/runtime/SeqCharSequence.scala | 3 + src/library/scala/runtime/StringAdd.scala | 1 + src/library/scala/runtime/StringFormat.scala | 1 + test/files/instrumented/InstrumentationTest.check | 2 + .../neg/classmanifests_new_deprecations.check | 10 +- test/files/neg/logImplicits.check | 4 +- test/files/neg/predef-masking.scala | 2 +- test/files/neg/t1010.scala | 4 +- test/files/neg/t414.scala | 2 +- test/files/neg/t421.check | 2 +- test/files/neg/t421.scala | 2 +- test/files/neg/t4271.scala | 4 +- test/files/pos/List1.scala | 6 +- test/files/pos/depmet_implicit_chaining_zw.scala | 6 +- test/files/pos/depmet_implicit_norm_ret.scala | 20 +- test/files/pos/implicits-new.scala | 8 +- test/files/pos/implicits-old.scala | 40 +-- test/files/pos/relax_implicit_divergence.scala | 6 +- test/files/pos/simple-exceptions.scala | 2 +- test/files/pos/spec-asseenfrom.scala | 6 +- test/files/pos/spec-cyclic.scala | 10 +- test/files/pos/spec-sealed.scala | 8 +- test/files/pos/spec-sparsearray-new.scala | 16 +- test/files/pos/spec-sparsearray-old.scala | 14 +- test/files/pos/spec-traits.scala | 12 +- test/files/pos/t0031.scala | 6 +- test/files/pos/t0227.scala | 4 +- test/files/pos/t2331.scala | 4 +- test/files/pos/t2421.scala | 14 +- test/files/pos/t2429.scala | 10 +- test/files/pos/t2797.scala | 4 +- test/files/pos/t3152.scala | 10 +- test/files/pos/t3252.scala | 6 +- test/files/pos/t3349/Test.scala | 4 +- test/files/pos/t3363-new.scala | 4 +- test/files/pos/t3363-old.scala | 2 +- test/files/pos/t3440.scala | 10 +- test/files/pos/t3477.scala | 4 +- test/files/pos/t3731.scala | 4 +- test/files/pos/t3883.scala | 8 +- test/files/pos/t3927.scala | 4 +- test/files/pos/tcpoly_boundedmonad.scala | 18 +- .../pos/tcpoly_infer_explicit_tuple_wrapper.scala | 8 +- .../pos/tcpoly_infer_implicit_tuple_wrapper.scala | 4 +- test/files/pos/tcpoly_overloaded.scala | 18 +- test/files/pos/tcpoly_subst.scala | 2 +- test/files/pos/tcpoly_variance_pos.scala | 4 +- test/files/pos/tcpoly_wildcards.scala | 2 +- test/files/pos/typealias_dubious.scala | 14 +- test/files/pos/virtpatmat_binding_opt.scala | 4 +- test/files/presentation/callcc-interpreter.check | 3 +- test/files/presentation/ide-bug-1000349.check | 3 +- test/files/presentation/ide-bug-1000475.check | 9 +- test/files/presentation/ide-bug-1000531.check | 3 +- test/files/presentation/implicit-member.check | 3 +- test/files/presentation/ping-pong.check | 6 +- test/files/presentation/t5708.check | 3 +- test/files/presentation/visibility.check | 15 +- test/files/run/Course-2002-07.scala | 24 +- test/files/run/Course-2002-08.scala | 4 +- test/files/run/Course-2002-09.scala | 12 +- test/files/run/Course-2002-13.scala | 4 +- test/files/run/analyzerPlugins.check | 13 +- test/files/run/array-charSeq.scala | 1 + test/files/run/arrays.scala | 2 +- test/files/run/exceptions-2.scala | 50 +-- test/files/run/exceptions.scala | 4 +- test/files/run/exoticnames.scala | 8 +- test/files/run/genericValueClass.scala | 11 +- .../run/macro-typecheck-implicitsdisabled.check | 2 +- test/files/run/runtime.scala | 2 +- test/files/run/t1042.scala | 2 +- test/files/run/tailcalls.scala | 18 +- .../run/toolbox_typecheck_implicitsdisabled.check | 2 +- test/files/run/try-2.scala | 16 +- test/files/run/try.scala | 10 +- test/files/run/verify-ctor.scala | 2 +- test/files/scalacheck/CheckEither.scala | 28 +- test/scaladoc/resources/SI_4715.scala | 4 +- 87 files changed, 752 insertions(+), 665 deletions(-) create mode 100644 src/library/scala/io/AnsiColor.scala create mode 100644 src/library/scala/io/ReadStdin.scala (limited to 'src') diff --git a/src/compiler/scala/tools/nsc/doc/Settings.scala b/src/compiler/scala/tools/nsc/doc/Settings.scala index 02630a99b2..75312e2279 100644 --- a/src/compiler/scala/tools/nsc/doc/Settings.scala +++ b/src/compiler/scala/tools/nsc/doc/Settings.scala @@ -315,10 +315,10 @@ class Settings(error: String => Unit, val printMsg: String => Unit = println(_)) /** Common conversion targets that affect any class in Scala */ val commonConversionTargets = Set( - "scala.Predef.any2stringfmt", - "scala.Predef.any2stringadd", - "scala.Predef.any2ArrowAssoc", - "scala.Predef.any2Ensuring", + "scala.Predef.StringFormat", + "scala.Predef.StringAdd", + "scala.Predef.ArrowAssoc", + "scala.Predef.Ensuring", "scala.collection.TraversableOnce.alternateImplicit") /** There's a reason all these are specialized by hand but documenting each of them is beyond the point */ diff --git a/src/compiler/scala/tools/nsc/interactive/REPL.scala b/src/compiler/scala/tools/nsc/interactive/REPL.scala index ae6ab247fd..d545a5738c 100644 --- a/src/compiler/scala/tools/nsc/interactive/REPL.scala +++ b/src/compiler/scala/tools/nsc/interactive/REPL.scala @@ -59,7 +59,7 @@ object REPL { def main(args: Array[String]) { process(args) - /*sys.*/exit(if (reporter.hasErrors) 1 else 0)// Don't use sys yet as this has to run on 2.8.2 also. + sys.exit(if (reporter.hasErrors) 1 else 0) } def loop(action: (String) => Unit) { @@ -182,7 +182,7 @@ object REPL { println(instrument(arguments, line.toInt)) case List("quit") => comp.askShutdown() - exit(1) // Don't use sys yet as this has to run on 2.8.2 also. + sys.exit(1) case List("structure", file) => doStructure(file) case _ => diff --git a/src/library/scala/Console.scala b/src/library/scala/Console.scala index 5b015502ea..275d7629ee 100644 --- a/src/library/scala/Console.scala +++ b/src/library/scala/Console.scala @@ -6,16 +6,12 @@ ** |/ ** \* */ - - package scala -import java.io.{BufferedReader, InputStream, InputStreamReader, - IOException, OutputStream, PrintStream, Reader} -import java.text.MessageFormat +import java.io.{ BufferedReader, InputStream, InputStreamReader, OutputStream, PrintStream, Reader } +import scala.io.{ AnsiColor, ReadStdin } import scala.util.DynamicVariable - /** Implements functionality for * printing Scala values on the terminal as well as reading specific values. * Also defines constants for marking up text on ANSI terminals. @@ -23,60 +19,16 @@ import scala.util.DynamicVariable * @author Matthias Zenger * @version 1.0, 03/09/2003 */ -object Console { - - /** Foreground color for ANSI black */ - final val BLACK = "\033[30m" - /** Foreground color for ANSI red */ - final val RED = "\033[31m" - /** Foreground color for ANSI green */ - final val GREEN = "\033[32m" - /** Foreground color for ANSI yellow */ - final val YELLOW = "\033[33m" - /** Foreground color for ANSI blue */ - final val BLUE = "\033[34m" - /** Foreground color for ANSI magenta */ - final val MAGENTA = "\033[35m" - /** Foreground color for ANSI cyan */ - final val CYAN = "\033[36m" - /** Foreground color for ANSI white */ - final val WHITE = "\033[37m" - - /** Background color for ANSI black */ - final val BLACK_B = "\033[40m" - /** Background color for ANSI red */ - final val RED_B = "\033[41m" - /** Background color for ANSI green */ - final val GREEN_B = "\033[42m" - /** Background color for ANSI yellow */ - final val YELLOW_B = "\033[43m" - /** Background color for ANSI blue */ - final val BLUE_B = "\033[44m" - /** Background color for ANSI magenta */ - final val MAGENTA_B = "\033[45m" - /** Background color for ANSI cyan */ - final val CYAN_B = "\033[46m" - /** Background color for ANSI white */ - final val WHITE_B = "\033[47m" - - /** Reset ANSI styles */ - final val RESET = "\033[0m" - /** ANSI bold */ - final val BOLD = "\033[1m" - /** ANSI underlines */ - final val UNDERLINED = "\033[4m" - /** ANSI blink */ - final val BLINK = "\033[5m" - /** ANSI reversed */ - final val REVERSED = "\033[7m" - /** ANSI invisible */ - final val INVISIBLE = "\033[8m" - +object Console extends DeprecatedConsole with AnsiColor { private val outVar = new DynamicVariable[PrintStream](java.lang.System.out) private val errVar = new DynamicVariable[PrintStream](java.lang.System.err) - private val inVar = new DynamicVariable[BufferedReader]( + private val inVar = new DynamicVariable[BufferedReader]( new BufferedReader(new InputStreamReader(java.lang.System.in))) + protected def setOutDirect(out: PrintStream): Unit = outVar.value = out + protected def setErrDirect(err: PrintStream): Unit = errVar.value = err + protected def setInDirect(in: BufferedReader): Unit = inVar.value = in + /** The default output, can be overridden by `setOut` */ def out = outVar.value /** The default error, can be overridden by `setErr` */ @@ -84,12 +36,6 @@ object Console { /** The default input, can be overridden by `setIn` */ def in = inVar.value - /** Sets the default output stream. - * - * @param out the new output stream. - */ - def setOut(out: PrintStream) { outVar.value = out } - /** Sets the default output stream for the duration * of execution of one thunk. * @@ -106,13 +52,6 @@ object Console { def withOut[T](out: PrintStream)(thunk: =>T): T = outVar.withValue(out)(thunk) - /** Sets the default output stream. - * - * @param out the new output stream. - */ - def setOut(out: OutputStream): Unit = - setOut(new PrintStream(out)) - /** Sets the default output stream for the duration * of execution of one thunk. * @@ -125,13 +64,6 @@ object Console { def withOut[T](out: OutputStream)(thunk: =>T): T = withOut(new PrintStream(out))(thunk) - - /** Sets the default error stream. - * - * @param err the new error stream. - */ - def setErr(err: PrintStream) { errVar.value = err } - /** Set the default error stream for the duration * of execution of one thunk. * @example {{{ @@ -147,13 +79,6 @@ object Console { def withErr[T](err: PrintStream)(thunk: =>T): T = errVar.withValue(err)(thunk) - /** Sets the default error stream. - * - * @param err the new error stream. - */ - def setErr(err: OutputStream): Unit = - setErr(new PrintStream(err)) - /** Sets the default error stream for the duration * of execution of one thunk. * @@ -166,15 +91,6 @@ object Console { def withErr[T](err: OutputStream)(thunk: =>T): T = withErr(new PrintStream(err))(thunk) - - /** Sets the default input stream. - * - * @param reader specifies the new input stream. - */ - def setIn(reader: Reader) { - inVar.value = new BufferedReader(reader) - } - /** Sets the default input stream for the duration * of execution of one thunk. * @@ -195,14 +111,6 @@ object Console { def withIn[T](reader: Reader)(thunk: =>T): T = inVar.withValue(new BufferedReader(reader))(thunk) - /** Sets the default input stream. - * - * @param in the new input stream. - */ - def setIn(in: InputStream) { - setIn(new InputStreamReader(in)) - } - /** Sets the default input stream for the duration * of execution of one thunk. * @@ -251,218 +159,64 @@ object Console { * @throws java.lang.IllegalArgumentException if there was a problem with the format string or arguments */ def printf(text: String, args: Any*) { out.print(text format (args : _*)) } +} - /** Read a full line from the default input. Returns `null` if the end of the - * input stream has been reached. - * - * @return the string read from the terminal or null if the end of stream was reached. - */ - def readLine(): String = in.readLine() - - /** Print formatted text to the default output and read a full line from the default input. - * Returns `null` if the end of the input stream has been reached. - * - * @param text the format of the text to print out, as in `printf`. - * @param args the parameters used to instantiate the format, as in `printf`. - * @return the string read from the default input - */ - def readLine(text: String, args: Any*): String = { - printf(text, args: _*) - readLine() - } - - /** Reads a boolean value from an entire line of the default input. - * Has a fairly liberal interpretation of the input. - * - * @return the boolean value read, or false if it couldn't be converted to a boolean - * @throws java.io.EOFException if the end of the input stream has been reached. - */ - def readBoolean(): Boolean = { - val s = readLine() - if (s == null) - throw new java.io.EOFException("Console has reached end of input") - else - s.toLowerCase() match { - case "true" => true - case "t" => true - case "yes" => true - case "y" => true - case _ => false - } - } - - /** Reads a byte value from an entire line of the default input. - * - * @return the Byte that was read - * @throws java.io.EOFException if the end of the - * input stream has been reached. - * @throws java.lang.NumberFormatException if the value couldn't be converted to a Byte - */ - def readByte(): Byte = { - val s = readLine() - if (s == null) - throw new java.io.EOFException("Console has reached end of input") - else - s.toByte - } - - /** Reads a short value from an entire line of the default input. - * - * @return the short that was read - * @throws java.io.EOFException if the end of the - * input stream has been reached. - * @throws java.lang.NumberFormatException if the value couldn't be converted to a Short - */ - def readShort(): Short = { - val s = readLine() - if (s == null) - throw new java.io.EOFException("Console has reached end of input") - else - s.toShort - } - - /** Reads a char value from an entire line of the default input. - * - * @return the Char that was read - * @throws java.io.EOFException if the end of the - * input stream has been reached. - * @throws java.lang.StringIndexOutOfBoundsException if the line read from default input was empty - */ - def readChar(): Char = { - val s = readLine() - if (s == null) - throw new java.io.EOFException("Console has reached end of input") - else - s charAt 0 - } - - /** Reads an int value from an entire line of the default input. - * - * @return the Int that was read - * @throws java.io.EOFException if the end of the - * input stream has been reached. - * @throws java.lang.NumberFormatException if the value couldn't be converted to an Int - */ - def readInt(): Int = { - val s = readLine() - if (s == null) - throw new java.io.EOFException("Console has reached end of input") - else - s.toInt - } - - /** Reads an long value from an entire line of the default input. - * - * @return the Long that was read - * @throws java.io.EOFException if the end of the - * input stream has been reached. - * @throws java.lang.NumberFormatException if the value couldn't be converted to a Long - */ - def readLong(): Long = { - val s = readLine() - if (s == null) - throw new java.io.EOFException("Console has reached end of input") - else - s.toLong - } +private[scala] abstract class DeprecatedConsole { + self: Console.type => + + /** Internal usage only. */ + protected def setOutDirect(out: PrintStream): Unit + protected def setErrDirect(err: PrintStream): Unit + protected def setInDirect(in: BufferedReader): Unit + + @deprecated("Use the method in scala.io.ReadStdin", "2.11.0") def readBoolean(): Boolean = ReadStdin.readBoolean() + @deprecated("Use the method in scala.io.ReadStdin", "2.11.0") def readByte(): Byte = ReadStdin.readByte() + @deprecated("Use the method in scala.io.ReadStdin", "2.11.0") def readChar(): Char = ReadStdin.readChar() + @deprecated("Use the method in scala.io.ReadStdin", "2.11.0") def readDouble(): Double = ReadStdin.readDouble() + @deprecated("Use the method in scala.io.ReadStdin", "2.11.0") def readFloat(): Float = ReadStdin.readFloat() + @deprecated("Use the method in scala.io.ReadStdin", "2.11.0") def readInt(): Int = ReadStdin.readInt() + @deprecated("Use the method in scala.io.ReadStdin", "2.11.0") def readLine(): String = ReadStdin.readLine() + @deprecated("Use the method in scala.io.ReadStdin", "2.11.0") def readLine(text: String, args: Any*): String = ReadStdin.readLine(text, args: _*) + @deprecated("Use the method in scala.io.ReadStdin", "2.11.0") def readLong(): Long = ReadStdin.readLong() + @deprecated("Use the method in scala.io.ReadStdin", "2.11.0") def readShort(): Short = ReadStdin.readShort() + @deprecated("Use the method in scala.io.ReadStdin", "2.11.0") def readf(format: String): List[Any] = ReadStdin.readf(format) + @deprecated("Use the method in scala.io.ReadStdin", "2.11.0") def readf1(format: String): Any = ReadStdin.readf1(format) + @deprecated("Use the method in scala.io.ReadStdin", "2.11.0") def readf2(format: String): (Any, Any) = ReadStdin.readf2(format) + @deprecated("Use the method in scala.io.ReadStdin", "2.11.0") def readf3(format: String): (Any, Any, Any) = ReadStdin.readf3(format) - /** Reads a float value from an entire line of the default input. - * @return the Float that was read. - * @throws java.io.EOFException if the end of the - * input stream has been reached. - * @throws java.lang.NumberFormatException if the value couldn't be converted to a Float + /** Sets the default output stream. * + * @param out the new output stream. */ - def readFloat(): Float = { - val s = readLine() - if (s == null) - throw new java.io.EOFException("Console has reached end of input") - else - s.toFloat - } + @deprecated("Use withOut", "2.11.0") def setOut(out: PrintStream): Unit = setOutDirect(out) - /** Reads a double value from an entire line of the default input. + /** Sets the default output stream. * - * @return the Double that was read. - * @throws java.io.EOFException if the end of the - * input stream has been reached. - * @throws java.lang.NumberFormatException if the value couldn't be converted to a Float + * @param out the new output stream. */ - def readDouble(): Double = { - val s = readLine() - if (s == null) - throw new java.io.EOFException("Console has reached end of input") - else - s.toDouble - } + @deprecated("Use withOut", "2.11.0") def setOut(out: OutputStream): Unit = setOutDirect(new PrintStream(out)) - /** Reads in some structured input (from the default input), specified by - * a format specifier. See class `java.text.MessageFormat` for details of - * the format specification. + /** Sets the default error stream. * - * @param format the format of the input. - * @return a list of all extracted values. - * @throws java.io.EOFException if the end of the input stream has been - * reached. + * @param err the new error stream. */ - def readf(format: String): List[Any] = { - val s = readLine() - if (s == null) - throw new java.io.EOFException("Console has reached end of input") - else - textComponents(new MessageFormat(format).parse(s)) - } + @deprecated("Use withErr", "2.11.0") def setErr(err: PrintStream): Unit = setErrDirect(err) - /** Reads in some structured input (from the default input), specified by - * a format specifier, returning only the first value extracted, according - * to the format specification. + /** Sets the default error stream. * - * @param format format string, as accepted by `readf`. - * @return The first value that was extracted from the input + * @param err the new error stream. */ - def readf1(format: String): Any = readf(format).head + @deprecated("Use withErr", "2.11.0") def setErr(err: OutputStream): Unit = setErrDirect(new PrintStream(err)) - /** Reads in some structured input (from the default input), specified - * by a format specifier, returning only the first two values extracted, - * according to the format specification. + /** Sets the default input stream. * - * @param format format string, as accepted by `readf`. - * @return A [[scala.Tuple2]] containing the first two values extracted + * @param reader specifies the new input stream. */ - def readf2(format: String): (Any, Any) = { - val res = readf(format) - (res.head, res.tail.head) - } + @deprecated("Use withIn", "2.11.0") def setIn(reader: Reader): Unit = setInDirect(new BufferedReader(reader)) - /** Reads in some structured input (from the default input), specified - * by a format specifier, returning only the first three values extracted, - * according to the format specification. + /** Sets the default input stream. * - * @param format format string, as accepted by `readf`. - * @return A [[scala.Tuple3]] containing the first three values extracted + * @param in the new input stream. */ - def readf3(format: String): (Any, Any, Any) = { - val res = readf(format) - (res.head, res.tail.head, res.tail.tail.head) - } - - private def textComponents(a: Array[AnyRef]): List[Any] = { - var i: Int = a.length - 1 - var res: List[Any] = Nil - while (i >= 0) { - res = (a(i) match { - case x: java.lang.Boolean => x.booleanValue() - case x: java.lang.Byte => x.byteValue() - case x: java.lang.Short => x.shortValue() - case x: java.lang.Character => x.charValue() - case x: java.lang.Integer => x.intValue() - case x: java.lang.Long => x.longValue() - case x: java.lang.Float => x.floatValue() - case x: java.lang.Double => x.doubleValue() - case x => x - }) :: res; - i -= 1 - } - res - } + @deprecated("Use withIn", "2.11.0") def setIn(in: InputStream): Unit = setInDirect(new BufferedReader(new InputStreamReader(in))) } diff --git a/src/library/scala/LowPriorityImplicits.scala b/src/library/scala/LowPriorityImplicits.scala index bf6e494c11..535f1ac699 100644 --- a/src/library/scala/LowPriorityImplicits.scala +++ b/src/library/scala/LowPriorityImplicits.scala @@ -22,7 +22,7 @@ import scala.language.implicitConversions * @author Martin Odersky * @since 2.8 */ -class LowPriorityImplicits { +private[scala] abstract class LowPriorityImplicits { /** We prefer the java.lang.* boxed types to these wrappers in * any potential conflicts. Conflicts do exist because the wrappers * need to implement ScalaNumber in order to have a symmetric equals diff --git a/src/library/scala/Predef.scala b/src/library/scala/Predef.scala index be57c38298..9a468489a2 100644 --- a/src/library/scala/Predef.scala +++ b/src/library/scala/Predef.scala @@ -15,6 +15,7 @@ import generic.CanBuildFrom import scala.annotation.{ elidable, implicitNotFound } import scala.annotation.elidable.ASSERTION import scala.language.{implicitConversions, existentials} +import scala.io.ReadStdin /** The `Predef` object provides definitions that are accessible in all Scala * compilation units without explicit qualification. @@ -68,7 +69,7 @@ import scala.language.{implicitConversions, existentials} * Short value to a Long value as required, and to add additional higher-order * functions to Array values. These are described in more detail in the documentation of [[scala.Array]]. */ -object Predef extends LowPriorityImplicits { +object Predef extends LowPriorityImplicits with DeprecatedPredef { /** * Retrieve the runtime representation of a class type. `classOf[T]` is equivalent to * the class literal `T.class` in Java. @@ -101,19 +102,19 @@ object Predef extends LowPriorityImplicits { // Manifest types, companions, and incantations for summoning @annotation.implicitNotFound(msg = "No ClassManifest available for ${T}.") - @deprecated("Use scala.reflect.ClassTag instead", "2.10.0") + @deprecated("Use `scala.reflect.ClassTag` instead", "2.10.0") type ClassManifest[T] = scala.reflect.ClassManifest[T] // TODO undeprecated until Scala reflection becomes non-experimental // @deprecated("This notion doesn't have a corresponding concept in 2.10, because scala.reflect.runtime.universe.TypeTag can capture arbitrary types. Use type tags instead of manifests, and there will be no need in opt manifests.", "2.10.0") type OptManifest[T] = scala.reflect.OptManifest[T] @annotation.implicitNotFound(msg = "No Manifest available for ${T}.") // TODO undeprecated until Scala reflection becomes non-experimental - // @deprecated("Use scala.reflect.ClassTag (to capture erasures) or scala.reflect.runtime.universe.TypeTag (to capture types) or both instead", "2.10.0") + // @deprecated("Use `scala.reflect.ClassTag` (to capture erasures) or scala.reflect.runtime.universe.TypeTag (to capture types) or both instead", "2.10.0") type Manifest[T] = scala.reflect.Manifest[T] - @deprecated("Use scala.reflect.ClassTag instead", "2.10.0") + @deprecated("Use `scala.reflect.ClassTag` instead", "2.10.0") val ClassManifest = scala.reflect.ClassManifest // TODO undeprecated until Scala reflection becomes non-experimental - // @deprecated("Use scala.reflect.ClassTag (to capture erasures) or scala.reflect.runtime.universe.TypeTag (to capture types) or both instead", "2.10.0") + // @deprecated("Use `scala.reflect.ClassTag` (to capture erasures) or scala.reflect.runtime.universe.TypeTag (to capture types) or both instead", "2.10.0") val Manifest = scala.reflect.Manifest // TODO undeprecated until Scala reflection becomes non-experimental // @deprecated("This notion doesn't have a corresponding concept in 2.10, because scala.reflect.runtime.universe.TypeTag can capture arbitrary types. Use type tags instead of manifests, and there will be no need in opt manifests.", "2.10.0") @@ -136,19 +137,14 @@ object Predef extends LowPriorityImplicits { // Apparently needed for the xml library val $scope = scala.xml.TopScope - // Deprecated + // errors and asserts ------------------------------------------------- + // !!! Remove this when possible - ideally for 2.11. + // We are stuck with it a while longer because sbt's compiler interface + // still calls it as of 0.12.2. @deprecated("Use `sys.error(message)` instead", "2.9.0") def error(message: String): Nothing = sys.error(message) - @deprecated("Use `sys.exit()` instead", "2.9.0") - def exit(): Nothing = sys.exit() - - @deprecated("Use `sys.exit(status)` instead", "2.9.0") - def exit(status: Int): Nothing = sys.exit(status) - - // errors and asserts ------------------------------------------------- - /** Tests an expression, throwing an `AssertionError` if false. * Calls to this method will not be generated if `-Xelide-below` * is at least `ASSERTION`. @@ -230,17 +226,6 @@ object Predef extends LowPriorityImplicits { throw new IllegalArgumentException("requirement failed: "+ message) } - final class Ensuring[A](val __resultOfEnsuring: A) extends AnyVal { - // `__resultOfEnsuring` must be a public val to allow inlining. - // See comments in ArrowAssoc for more. - - def ensuring(cond: Boolean): A = { assert(cond); __resultOfEnsuring } - def ensuring(cond: Boolean, msg: => Any): A = { assert(cond, msg); __resultOfEnsuring } - def ensuring(cond: A => Boolean): A = { assert(cond(__resultOfEnsuring)); __resultOfEnsuring } - def ensuring(cond: A => Boolean, msg: => Any): A = { assert(cond(__resultOfEnsuring), msg); __resultOfEnsuring } - } - @inline implicit def any2Ensuring[A](x: A): Ensuring[A] = new Ensuring(x) - /** `???` can be used for marking methods that remain to be implemented. * @throws A `NotImplementedError` */ @@ -260,17 +245,58 @@ object Predef extends LowPriorityImplicits { def unapply[A, B, C](x: Tuple3[A, B, C]): Option[Tuple3[A, B, C]] = Some(x) } - final class ArrowAssoc[A](val __leftOfArrow: A) extends AnyVal { - // `__leftOfArrow` must be a public val to allow inlining. The val - // used to be called `x`, but now goes by `__leftOfArrow`, as that - // reduces the chances of a user's writing `foo.__leftOfArrow` and - // being confused why they get an ambiguous implicit conversion - // error. (`foo.x` used to produce this error since both - // any2Ensuring and any2ArrowAssoc enrich everything with an `x`) + // implicit classes ----------------------------------------------------- + + implicit final class ArrowAssoc[A](val __leftOfArrow: A) extends AnyVal { @inline def -> [B](y: B): Tuple2[A, B] = Tuple2(__leftOfArrow, y) def →[B](y: B): Tuple2[A, B] = ->(y) } - @inline implicit def any2ArrowAssoc[A](x: A): ArrowAssoc[A] = new ArrowAssoc(x) + + implicit final class Ensuring[A](val __resultOfEnsuring: A) extends AnyVal { + def ensuring(cond: Boolean): A = { assert(cond); __resultOfEnsuring } + def ensuring(cond: Boolean, msg: => Any): A = { assert(cond, msg); __resultOfEnsuring } + def ensuring(cond: A => Boolean): A = { assert(cond(__resultOfEnsuring)); __resultOfEnsuring } + def ensuring(cond: A => Boolean, msg: => Any): A = { assert(cond(__resultOfEnsuring), msg); __resultOfEnsuring } + } + + implicit final class StringFormat[A](val __stringToFormat: A) extends AnyVal { + /** Returns string formatted according to given `format` string. + * Format strings are as for `String.format` + * (@see java.lang.String.format). + */ + @inline def formatted(fmtstr: String): String = fmtstr format __stringToFormat + } + + implicit final class StringAdd[A](val __thingToAdd: A) extends AnyVal { + def +(other: String) = String.valueOf(__thingToAdd) + other + } + + implicit final class RichException(val __throwableToEnrich: Throwable) extends AnyVal { + import scala.compat.Platform.EOL + @deprecated("Use Throwable#getStackTrace", "2.11.0") def getStackTraceString = __throwableToEnrich.getStackTrace().mkString("", EOL, EOL) + } + + implicit final class SeqCharSequence(val __sequenceOfChars: scala.collection.IndexedSeq[Char]) extends CharSequence { + def length: Int = __sequenceOfChars.length + def charAt(index: Int): Char = __sequenceOfChars(index) + def subSequence(start: Int, end: Int): CharSequence = new SeqCharSequence(__sequenceOfChars.slice(start, end)) + override def toString = __sequenceOfChars mkString "" + } + + implicit final class ArrayCharSequence(val __arrayOfChars: Array[Char]) extends CharSequence { + def length: Int = __arrayOfChars.length + def charAt(index: Int): Char = __arrayOfChars(index) + def subSequence(start: Int, end: Int): CharSequence = new runtime.ArrayCharSequence(__arrayOfChars, start, end) + override def toString = __arrayOfChars mkString "" + } + + implicit val StringCanBuildFrom: CanBuildFrom[String, Char, String] = new CanBuildFrom[String, Char, String] { + def apply(from: String) = apply() + def apply() = mutable.StringBuilder.newBuilder + } + + @inline implicit def augmentString(x: String): StringOps = new StringOps(x) + @inline implicit def unaugmentString(x: StringOps): String = x.repr // printing and reading ----------------------------------------------- @@ -279,28 +305,10 @@ object Predef extends LowPriorityImplicits { def println(x: Any) = Console.println(x) def printf(text: String, xs: Any*) = Console.print(text.format(xs: _*)) - def readLine(): String = Console.readLine() - def readLine(text: String, args: Any*) = Console.readLine(text, args: _*) - def readBoolean() = Console.readBoolean() - def readByte() = Console.readByte() - def readShort() = Console.readShort() - def readChar() = Console.readChar() - def readInt() = Console.readInt() - def readLong() = Console.readLong() - def readFloat() = Console.readFloat() - def readDouble() = Console.readDouble() - def readf(format: String) = Console.readf(format) - def readf1(format: String) = Console.readf1(format) - def readf2(format: String) = Console.readf2(format) - def readf3(format: String) = Console.readf3(format) - // views -------------------------------------------------------------- - implicit def exceptionWrapper(exc: Throwable) = new runtime.RichException(exc) implicit def tuple2ToZippedOps[T1, T2](x: (T1, T2)) = new runtime.Tuple2Zipped.Ops(x) implicit def tuple3ToZippedOps[T1, T2, T3](x: (T1, T2, T3)) = new runtime.Tuple3Zipped.Ops(x) - implicit def seqToCharSequence(xs: scala.collection.IndexedSeq[Char]): CharSequence = new runtime.SeqCharSequence(xs) - implicit def arrayToCharSequence(xs: Array[Char]): CharSequence = new runtime.ArrayCharSequence(xs, 0, xs.length) implicit def genericArrayOps[T](xs: Array[T]): ArrayOps[T] = (xs match { case x: Array[AnyRef] => refArrayOps[AnyRef](x) @@ -360,18 +368,6 @@ object Predef extends LowPriorityImplicits { implicit def Double2double(x: java.lang.Double): Double = x.doubleValue implicit def Boolean2boolean(x: java.lang.Boolean): Boolean = x.booleanValue - // Strings and CharSequences -------------------------------------------------------------- - - @inline implicit def any2stringfmt(x: Any) = new runtime.StringFormat(x) - @inline implicit def augmentString(x: String): StringOps = new StringOps(x) - implicit def any2stringadd(x: Any) = new runtime.StringAdd(x) - implicit def unaugmentString(x: StringOps): String = x.repr - - implicit val StringCanBuildFrom: CanBuildFrom[String, Char, String] = new CanBuildFrom[String, Char, String] { - def apply(from: String) = apply() - def apply() = mutable.StringBuilder.newBuilder - } - // Type Constraints -------------------------------------------------------------- /** @@ -422,3 +418,31 @@ object Predef extends LowPriorityImplicits { implicit def dummyImplicit: DummyImplicit = new DummyImplicit } } + +private[scala] trait DeprecatedPredef { + self: Predef.type => + + // Deprecated stubs for any who may have been calling these methods directly. + @deprecated("Use `ArrowAssoc`", "2.11.0") def any2ArrowAssoc[A](x: A): ArrowAssoc[A] = new ArrowAssoc(x) + @deprecated("Use `Ensuring`", "2.11.0") def any2Ensuring[A](x: A): Ensuring[A] = new Ensuring(x) + @deprecated("Use `StringFormat`", "2.11.0") def any2stringfmt(x: Any): StringFormat[Any] = new StringFormat(x) + @deprecated("Use String interpolation", "2.11.0") def any2stringadd(x: Any): StringAdd[Any] = new StringAdd(x) + @deprecated("Use `Throwable` directly", "2.11.0") def exceptionWrapper(exc: Throwable) = new RichException(exc) + @deprecated("Use `SeqCharSequence`", "2.11.0") def seqToCharSequence(xs: scala.collection.IndexedSeq[Char]): CharSequence = new SeqCharSequence(xs) + @deprecated("Use `ArrayCharSequence`", "2.11.0") def arrayToCharSequence(xs: Array[Char]): CharSequence = new ArrayCharSequence(xs) + + @deprecated("Use the method in `scala.io.ReadStdin`", "2.11.0") def readLine(): String = ReadStdin.readLine() + @deprecated("Use the method in `scala.io.ReadStdin`", "2.11.0") def readLine(text: String, args: Any*) = ReadStdin.readLine(text, args: _*) + @deprecated("Use the method in `scala.io.ReadStdin`", "2.11.0") def readBoolean() = ReadStdin.readBoolean() + @deprecated("Use the method in `scala.io.ReadStdin`", "2.11.0") def readByte() = ReadStdin.readByte() + @deprecated("Use the method in `scala.io.ReadStdin`", "2.11.0") def readShort() = ReadStdin.readShort() + @deprecated("Use the method in `scala.io.ReadStdin`", "2.11.0") def readChar() = ReadStdin.readChar() + @deprecated("Use the method in `scala.io.ReadStdin`", "2.11.0") def readInt() = ReadStdin.readInt() + @deprecated("Use the method in `scala.io.ReadStdin`", "2.11.0") def readLong() = ReadStdin.readLong() + @deprecated("Use the method in `scala.io.ReadStdin`", "2.11.0") def readFloat() = ReadStdin.readFloat() + @deprecated("Use the method in `scala.io.ReadStdin`", "2.11.0") def readDouble() = ReadStdin.readDouble() + @deprecated("Use the method in `scala.io.ReadStdin`", "2.11.0") def readf(format: String) = ReadStdin.readf(format) + @deprecated("Use the method in `scala.io.ReadStdin`", "2.11.0") def readf1(format: String) = ReadStdin.readf1(format) + @deprecated("Use the method in `scala.io.ReadStdin`", "2.11.0") def readf2(format: String) = ReadStdin.readf2(format) + @deprecated("Use the method in `scala.io.ReadStdin`", "2.11.0") def readf3(format: String) = ReadStdin.readf3(format) +} diff --git a/src/library/scala/io/AnsiColor.scala b/src/library/scala/io/AnsiColor.scala new file mode 100644 index 0000000000..6b00eb283f --- /dev/null +++ b/src/library/scala/io/AnsiColor.scala @@ -0,0 +1,53 @@ +package scala +package io + +trait AnsiColor { + /** Foreground color for ANSI black */ + final val BLACK = "\033[30m" + /** Foreground color for ANSI red */ + final val RED = "\033[31m" + /** Foreground color for ANSI green */ + final val GREEN = "\033[32m" + /** Foreground color for ANSI yellow */ + final val YELLOW = "\033[33m" + /** Foreground color for ANSI blue */ + final val BLUE = "\033[34m" + /** Foreground color for ANSI magenta */ + final val MAGENTA = "\033[35m" + /** Foreground color for ANSI cyan */ + final val CYAN = "\033[36m" + /** Foreground color for ANSI white */ + final val WHITE = "\033[37m" + + /** Background color for ANSI black */ + final val BLACK_B = "\033[40m" + /** Background color for ANSI red */ + final val RED_B = "\033[41m" + /** Background color for ANSI green */ + final val GREEN_B = "\033[42m" + /** Background color for ANSI yellow */ + final val YELLOW_B = "\033[43m" + /** Background color for ANSI blue */ + final val BLUE_B = "\033[44m" + /** Background color for ANSI magenta */ + final val MAGENTA_B = "\033[45m" + /** Background color for ANSI cyan */ + final val CYAN_B = "\033[46m" + /** Background color for ANSI white */ + final val WHITE_B = "\033[47m" + + /** Reset ANSI styles */ + final val RESET = "\033[0m" + /** ANSI bold */ + final val BOLD = "\033[1m" + /** ANSI underlines */ + final val UNDERLINED = "\033[4m" + /** ANSI blink */ + final val BLINK = "\033[5m" + /** ANSI reversed */ + final val REVERSED = "\033[7m" + /** ANSI invisible */ + final val INVISIBLE = "\033[8m" +} + +object AnsiColor extends AnsiColor { } diff --git a/src/library/scala/io/ReadStdin.scala b/src/library/scala/io/ReadStdin.scala new file mode 100644 index 0000000000..429d7cec75 --- /dev/null +++ b/src/library/scala/io/ReadStdin.scala @@ -0,0 +1,228 @@ +package scala +package io + +import java.text.MessageFormat + +/** private[scala] because this is not functionality we should be providing + * in the standard library, at least not in this idiosyncractic form. + * Factored into trait because it is better code structure regardless. + */ +private[scala] trait ReadStdin { + import scala.Console._ + + /** Read a full line from the default input. Returns `null` if the end of the + * input stream has been reached. + * + * @return the string read from the terminal or null if the end of stream was reached. + */ + def readLine(): String = in.readLine() + + /** Print formatted text to the default output and read a full line from the default input. + * Returns `null` if the end of the input stream has been reached. + * + * @param text the format of the text to print out, as in `printf`. + * @param args the parameters used to instantiate the format, as in `printf`. + * @return the string read from the default input + */ + def readLine(text: String, args: Any*): String = { + printf(text, args: _*) + readLine() + } + + /** Reads a boolean value from an entire line of the default input. + * Has a fairly liberal interpretation of the input. + * + * @return the boolean value read, or false if it couldn't be converted to a boolean + * @throws java.io.EOFException if the end of the input stream has been reached. + */ + def readBoolean(): Boolean = { + val s = readLine() + if (s == null) + throw new java.io.EOFException("Console has reached end of input") + else + s.toLowerCase() match { + case "true" => true + case "t" => true + case "yes" => true + case "y" => true + case _ => false + } + } + + /** Reads a byte value from an entire line of the default input. + * + * @return the Byte that was read + * @throws java.io.EOFException if the end of the + * input stream has been reached. + * @throws java.lang.NumberFormatException if the value couldn't be converted to a Byte + */ + def readByte(): Byte = { + val s = readLine() + if (s == null) + throw new java.io.EOFException("Console has reached end of input") + else + s.toByte + } + + /** Reads a short value from an entire line of the default input. + * + * @return the short that was read + * @throws java.io.EOFException if the end of the + * input stream has been reached. + * @throws java.lang.NumberFormatException if the value couldn't be converted to a Short + */ + def readShort(): Short = { + val s = readLine() + if (s == null) + throw new java.io.EOFException("Console has reached end of input") + else + s.toShort + } + + /** Reads a char value from an entire line of the default input. + * + * @return the Char that was read + * @throws java.io.EOFException if the end of the + * input stream has been reached. + * @throws java.lang.StringIndexOutOfBoundsException if the line read from default input was empty + */ + def readChar(): Char = { + val s = readLine() + if (s == null) + throw new java.io.EOFException("Console has reached end of input") + else + s charAt 0 + } + + /** Reads an int value from an entire line of the default input. + * + * @return the Int that was read + * @throws java.io.EOFException if the end of the + * input stream has been reached. + * @throws java.lang.NumberFormatException if the value couldn't be converted to an Int + */ + def readInt(): Int = { + val s = readLine() + if (s == null) + throw new java.io.EOFException("Console has reached end of input") + else + s.toInt + } + + /** Reads an long value from an entire line of the default input. + * + * @return the Long that was read + * @throws java.io.EOFException if the end of the + * input stream has been reached. + * @throws java.lang.NumberFormatException if the value couldn't be converted to a Long + */ + def readLong(): Long = { + val s = readLine() + if (s == null) + throw new java.io.EOFException("Console has reached end of input") + else + s.toLong + } + + /** Reads a float value from an entire line of the default input. + * @return the Float that was read. + * @throws java.io.EOFException if the end of the + * input stream has been reached. + * @throws java.lang.NumberFormatException if the value couldn't be converted to a Float + * + */ + def readFloat(): Float = { + val s = readLine() + if (s == null) + throw new java.io.EOFException("Console has reached end of input") + else + s.toFloat + } + + /** Reads a double value from an entire line of the default input. + * + * @return the Double that was read. + * @throws java.io.EOFException if the end of the + * input stream has been reached. + * @throws java.lang.NumberFormatException if the value couldn't be converted to a Float + */ + def readDouble(): Double = { + val s = readLine() + if (s == null) + throw new java.io.EOFException("Console has reached end of input") + else + s.toDouble + } + + /** Reads in some structured input (from the default input), specified by + * a format specifier. See class `java.text.MessageFormat` for details of + * the format specification. + * + * @param format the format of the input. + * @return a list of all extracted values. + * @throws java.io.EOFException if the end of the input stream has been + * reached. + */ + def readf(format: String): List[Any] = { + val s = readLine() + if (s == null) + throw new java.io.EOFException("Console has reached end of input") + else + textComponents(new MessageFormat(format).parse(s)) + } + + /** Reads in some structured input (from the default input), specified by + * a format specifier, returning only the first value extracted, according + * to the format specification. + * + * @param format format string, as accepted by `readf`. + * @return The first value that was extracted from the input + */ + def readf1(format: String): Any = readf(format).head + + /** Reads in some structured input (from the default input), specified + * by a format specifier, returning only the first two values extracted, + * according to the format specification. + * + * @param format format string, as accepted by `readf`. + * @return A [[scala.Tuple2]] containing the first two values extracted + */ + def readf2(format: String): (Any, Any) = { + val res = readf(format) + (res.head, res.tail.head) + } + + /** Reads in some structured input (from the default input), specified + * by a format specifier, returning only the first three values extracted, + * according to the format specification. + * + * @param format format string, as accepted by `readf`. + * @return A [[scala.Tuple3]] containing the first three values extracted + */ + def readf3(format: String): (Any, Any, Any) = { + val res = readf(format) + (res.head, res.tail.head, res.tail.tail.head) + } + + private def textComponents(a: Array[AnyRef]): List[Any] = { + var i: Int = a.length - 1 + var res: List[Any] = Nil + while (i >= 0) { + res = (a(i) match { + case x: java.lang.Boolean => x.booleanValue() + case x: java.lang.Byte => x.byteValue() + case x: java.lang.Short => x.shortValue() + case x: java.lang.Character => x.charValue() + case x: java.lang.Integer => x.intValue() + case x: java.lang.Long => x.longValue() + case x: java.lang.Float => x.floatValue() + case x: java.lang.Double => x.doubleValue() + case x => x + }) :: res; + i -= 1 + } + res + } +} + +object ReadStdin extends ReadStdin { } diff --git a/src/library/scala/runtime/RichException.scala b/src/library/scala/runtime/RichException.scala index 94c4137674..cf4eb71ded 100644 --- a/src/library/scala/runtime/RichException.scala +++ b/src/library/scala/runtime/RichException.scala @@ -10,6 +10,7 @@ package scala.runtime import scala.compat.Platform.EOL +@deprecated("Use Throwable#getStackTrace", "2.11.0") final class RichException(exc: Throwable) { def getStackTraceString = exc.getStackTrace().mkString("", EOL, EOL) } diff --git a/src/library/scala/runtime/SeqCharSequence.scala b/src/library/scala/runtime/SeqCharSequence.scala index d2084a6598..ce7d7afc9e 100644 --- a/src/library/scala/runtime/SeqCharSequence.scala +++ b/src/library/scala/runtime/SeqCharSequence.scala @@ -11,6 +11,7 @@ package runtime import java.util.Arrays.copyOfRange +@deprecated("Use Predef.SeqCharSequence", "2.11.0") final class SeqCharSequence(val xs: scala.collection.IndexedSeq[Char]) extends CharSequence { def length: Int = xs.length def charAt(index: Int): Char = xs(index) @@ -18,6 +19,8 @@ final class SeqCharSequence(val xs: scala.collection.IndexedSeq[Char]) extends C override def toString = xs.mkString("") } +// Still need this one since the implicit class ArrayCharSequence only converts +// a single argument. final class ArrayCharSequence(val xs: Array[Char], start: Int, end: Int) extends CharSequence { // yikes // java.lang.VerifyError: (class: scala/runtime/ArrayCharSequence, method: signature: ([C)V) diff --git a/src/library/scala/runtime/StringAdd.scala b/src/library/scala/runtime/StringAdd.scala index 9d848f0ba7..1456d9a4e4 100644 --- a/src/library/scala/runtime/StringAdd.scala +++ b/src/library/scala/runtime/StringAdd.scala @@ -9,6 +9,7 @@ package scala.runtime /** A wrapper class that adds string concatenation `+` to any value */ +@deprecated("Use Predef.StringAdd", "2.11.0") final class StringAdd(val self: Any) extends AnyVal { def +(other: String) = String.valueOf(self) + other } diff --git a/src/library/scala/runtime/StringFormat.scala b/src/library/scala/runtime/StringFormat.scala index 983ae2fc54..21e5efd1fc 100644 --- a/src/library/scala/runtime/StringFormat.scala +++ b/src/library/scala/runtime/StringFormat.scala @@ -10,6 +10,7 @@ package scala.runtime /** A wrapper class that adds a `formatted` operation to any value */ +@deprecated("Use Predef.StringFormat", "2.11.0") final class StringFormat(val self: Any) extends AnyVal { /** Returns string formatted according to given `format` string. * Format strings are as for `String.format` diff --git a/test/files/instrumented/InstrumentationTest.check b/test/files/instrumented/InstrumentationTest.check index f0f447560a..0c570fa12c 100644 --- a/test/files/instrumented/InstrumentationTest.check +++ b/test/files/instrumented/InstrumentationTest.check @@ -4,5 +4,7 @@ Method call statistics: 1 Foo1.someMethod()I 1 instrumented/Foo2.()V 1 instrumented/Foo2.someMethod()I + 1 scala/DeprecatedConsole.()V 1 scala/Predef$.println(Ljava/lang/Object;)V + 1 scala/io/AnsiColor$class.$init$(Lscala/io/AnsiColor;)V 1 scala/runtime/BoxesRunTime.boxToBoolean(Z)Ljava/lang/Boolean; diff --git a/test/files/neg/classmanifests_new_deprecations.check b/test/files/neg/classmanifests_new_deprecations.check index fddd6bf5b4..5f9d0a1ccc 100644 --- a/test/files/neg/classmanifests_new_deprecations.check +++ b/test/files/neg/classmanifests_new_deprecations.check @@ -1,13 +1,13 @@ -classmanifests_new_deprecations.scala:2: warning: type ClassManifest in object Predef is deprecated: Use scala.reflect.ClassTag instead +classmanifests_new_deprecations.scala:2: warning: type ClassManifest in object Predef is deprecated: Use `scala.reflect.ClassTag` instead def cm1[T: ClassManifest] = ??? ^ -classmanifests_new_deprecations.scala:3: warning: type ClassManifest in object Predef is deprecated: Use scala.reflect.ClassTag instead +classmanifests_new_deprecations.scala:3: warning: type ClassManifest in object Predef is deprecated: Use `scala.reflect.ClassTag` instead def cm2[T](implicit evidence$1: ClassManifest[T]) = ??? ^ -classmanifests_new_deprecations.scala:4: warning: type ClassManifest in object Predef is deprecated: Use scala.reflect.ClassTag instead +classmanifests_new_deprecations.scala:4: warning: type ClassManifest in object Predef is deprecated: Use `scala.reflect.ClassTag` instead val cm3: ClassManifest[Int] = null ^ -classmanifests_new_deprecations.scala:4: warning: type ClassManifest in object Predef is deprecated: Use scala.reflect.ClassTag instead +classmanifests_new_deprecations.scala:4: warning: type ClassManifest in object Predef is deprecated: Use `scala.reflect.ClassTag` instead val cm3: ClassManifest[Int] = null ^ classmanifests_new_deprecations.scala:6: warning: type ClassManifest in package reflect is deprecated: Use scala.reflect.ClassTag instead @@ -22,7 +22,7 @@ classmanifests_new_deprecations.scala:8: warning: type ClassManifest in package classmanifests_new_deprecations.scala:8: warning: type ClassManifest in package reflect is deprecated: Use scala.reflect.ClassTag instead val rcm3: scala.reflect.ClassManifest[Int] = null ^ -classmanifests_new_deprecations.scala:10: warning: type ClassManifest in object Predef is deprecated: Use scala.reflect.ClassTag instead +classmanifests_new_deprecations.scala:10: warning: type ClassManifest in object Predef is deprecated: Use `scala.reflect.ClassTag` instead type CM[T] = ClassManifest[T] ^ classmanifests_new_deprecations.scala:15: warning: type ClassManifest in package reflect is deprecated: Use scala.reflect.ClassTag instead diff --git a/test/files/neg/logImplicits.check b/test/files/neg/logImplicits.check index 54afc6f86d..0522bd8354 100644 --- a/test/files/neg/logImplicits.check +++ b/test/files/neg/logImplicits.check @@ -7,10 +7,10 @@ logImplicits.scala:7: applied implicit conversion from String("abc") to ?{def ma logImplicits.scala:15: inferred view from String("abc") to Int = C.this.convert:(p: String("abc"))Int math.max(122, x: Int) ^ -logImplicits.scala:19: applied implicit conversion from Int(1) to ?{def ->: ?} = implicit def any2ArrowAssoc[A](x: A): ArrowAssoc[A] +logImplicits.scala:19: applied implicit conversion from Int(1) to ?{def ->: ?} = implicit def ArrowAssoc[A](__leftOfArrow: A): ArrowAssoc[A] def f = (1 -> 2) + "c" ^ -logImplicits.scala:19: applied implicit conversion from (Int, Int) to ?{def +: ?} = implicit def any2stringadd(x: Any): scala.runtime.StringAdd +logImplicits.scala:19: applied implicit conversion from (Int, Int) to ?{def +: ?} = implicit def StringAdd[A](__thingToAdd: A): StringAdd[A] def f = (1 -> 2) + "c" ^ logImplicits.scala:22: error: class Un needs to be abstract, since method unimplemented is not defined diff --git a/test/files/neg/predef-masking.scala b/test/files/neg/predef-masking.scala index 67b69aa169..6f4f4859d0 100644 --- a/test/files/neg/predef-masking.scala +++ b/test/files/neg/predef-masking.scala @@ -1,5 +1,5 @@ // Testing predef masking -import Predef.{ any2stringadd => _, _ } +import Predef.{ StringAdd => _, _ } object StringPlusConfusion { // Would love to do something about this error message, but by the diff --git a/test/files/neg/t1010.scala b/test/files/neg/t1010.scala index 7a1e6615e5..fd142978ec 100644 --- a/test/files/neg/t1010.scala +++ b/test/files/neg/t1010.scala @@ -6,9 +6,9 @@ class MailBox { abstract class Actor { private val in = new MailBox - def send(msg: in.Message) = error("foo") + def send(msg: in.Message) = sys.error("foo") - def unstable: Actor = error("foo") + def unstable: Actor = sys.error("foo") def dubiousSend(msg: MailBox#Message): Nothing = unstable.send(msg) // in.Message becomes unstable.Message, but that's ok since Message is a concrete type member diff --git a/test/files/neg/t414.scala b/test/files/neg/t414.scala index 2bc83eedcb..1662b9a105 100644 --- a/test/files/neg/t414.scala +++ b/test/files/neg/t414.scala @@ -3,7 +3,7 @@ case class Node[a](left: IntMap[a], keyVal: Pair[Int, a], right: IntMap[a]) exte abstract class IntMap[a] { def lookup(key: Int): a = this match { case Empty => - error("clef inexistante") + sys.error("clef inexistante") case _ => }; diff --git a/test/files/neg/t421.check b/test/files/neg/t421.check index e81df52ab0..dc5fa425ac 100644 --- a/test/files/neg/t421.check +++ b/test/files/neg/t421.check @@ -1,4 +1,4 @@ t421.scala:5: error: star patterns must correspond with varargs parameters - case Bar("foo",_*) => error("huh?"); + case Bar("foo",_*) => sys.error("huh?"); ^ one error found diff --git a/test/files/neg/t421.scala b/test/files/neg/t421.scala index 43f6c9dafd..9a327be896 100644 --- a/test/files/neg/t421.scala +++ b/test/files/neg/t421.scala @@ -2,7 +2,7 @@ object foo { case class Bar(a:String, b:AnyRef, c:String*); Bar("foo","meets","bar") match { - case Bar("foo",_*) => error("huh?"); + case Bar("foo",_*) => sys.error("huh?"); } } diff --git a/test/files/neg/t4271.scala b/test/files/neg/t4271.scala index 50526c8958..46ae3ad9ec 100644 --- a/test/files/neg/t4271.scala +++ b/test/files/neg/t4271.scala @@ -1,11 +1,11 @@ object foo { object Donotuseme - implicit def any2Ensuring[A](x: A) = Donotuseme + implicit def Ensuring[A](x: A) = Donotuseme implicit def doubleWrapper(x: Int) = Donotuseme implicit def floatWrapper(x: Int) = Donotuseme implicit def intWrapper(x: Int) = Donotuseme implicit def longWrapper(x: Int) = Donotuseme - implicit def any2ArrowAssoc[A](x: A) = Donotuseme + implicit def ArrowAssoc[A](x: A) = Donotuseme 3 to 5 5 ensuring true 3 -> 5 diff --git a/test/files/pos/List1.scala b/test/files/pos/List1.scala index 9d3a51f4e3..30ebf5e1e7 100644 --- a/test/files/pos/List1.scala +++ b/test/files/pos/List1.scala @@ -9,15 +9,15 @@ object lists { def Nil[b] = new List[b] { def isEmpty: Boolean = true; - def head = error("head of Nil"); - def tail = error("tail of Nil"); + def head = sys.error("head of Nil"); + def tail = sys.error("tail of Nil"); } def Cons[c](x: c, xs: List[c]): List[c] = new List[c] { def isEmpty = false; def head = x; def tail = xs; - } + } def foo = { val intnil = Nil[Int]; diff --git a/test/files/pos/depmet_implicit_chaining_zw.scala b/test/files/pos/depmet_implicit_chaining_zw.scala index 93da3b0f8e..ce5ea476d8 100644 --- a/test/files/pos/depmet_implicit_chaining_zw.scala +++ b/test/files/pos/depmet_implicit_chaining_zw.scala @@ -3,7 +3,7 @@ trait Succ[N] trait ZipWith[N, S] { type T - val x: T = error("") + val x: T = sys.error("") } object ZipWith { @@ -15,7 +15,7 @@ object ZipWith { type T = Stream[S] => zWith.T // dependent types replace the associated types functionality } - // can't use implicitly[ZipWith[Succ[Succ[Zero]], Int => String => Boolean]], + // can't use implicitly[ZipWith[Succ[Succ[Zero]], Int => String => Boolean]], // since that will chop of the {type T = ... } refinement in adapt (pt = ZipWith[Succ[Succ[Zero]], Int => String => Boolean]) // this works // def zipWith(implicit zw: ZipWith[Succ[Succ[Zero]], Int => String => Boolean]): zw.T = zw.x @@ -25,4 +25,4 @@ object ZipWith { type _2 = Succ[Succ[Zero]] val zw = ?[ZipWith[_2, Int => String => Boolean]].x // : Stream[Int] => Stream[String] => Stream[Boolean] // val zw = implicitly[ZipWith[Succ[Succ[Zero]], Int => String => Boolean]{type T = Stream[Int] => Stream[String] => Stream[Boolean]}].x -} \ No newline at end of file +} diff --git a/test/files/pos/depmet_implicit_norm_ret.scala b/test/files/pos/depmet_implicit_norm_ret.scala index bafd2f7c51..0c587cf164 100644 --- a/test/files/pos/depmet_implicit_norm_ret.scala +++ b/test/files/pos/depmet_implicit_norm_ret.scala @@ -1,29 +1,29 @@ object Test{ def ?[S <: AnyRef](implicit w : S) : w.type = w - + // fallback, lower priority (overloading rules apply: pick alternative in subclass lowest in subtyping lattice) class ZipWithDefault { implicit def ZeroZipWith[S] = new ZipWith[S] { type T = Stream[S] - } + } } - + object ZipWith extends ZipWithDefault { // def apply[S: ZipWith](s : S) = ?[ZipWith[S]].zipWith(s) // TODO: bug return type should be inferred def apply[S](s : S)(implicit zw: ZipWith[S]): zw.T = zw.zipWith(s) implicit def SuccZipWith[S,R](implicit zWith : ZipWith[R]) = new ZipWith[S => R] { type T = Stream[S] => zWith.T // dependent types replace the associated types functionality - } + } } - + trait ZipWith[S] { type T - def zipWith : S => T = error("") + def zipWith : S => T = sys.error("") } - + // bug: inferred return type = (Stream[A]) => java.lang.Object with Test.ZipWith[B]{type T = Stream[B]}#T // this seems incompatible with vvvvvvvvvvvvvvvvvvvvvv -- #3731 - def map[A,B](f : A => B) /* : Stream[A] => Stream[B]*/ = ZipWith(f) - val tst: Stream[Int] = map{x: String => x.length}(Stream("a")) -} \ No newline at end of file + def map[A,B](f : A => B) /* : Stream[A] => Stream[B]*/ = ZipWith(f) + val tst: Stream[Int] = map{x: String => x.length}(Stream("a")) +} diff --git a/test/files/pos/implicits-new.scala b/test/files/pos/implicits-new.scala index ffc387132a..7b4f20c6c9 100644 --- a/test/files/pos/implicits-new.scala +++ b/test/files/pos/implicits-new.scala @@ -3,9 +3,9 @@ import scala.reflect.{ClassTag, classTag} // #1435 object t1435 { - implicit def a(s:String):String = error("") - implicit def a(i:Int):String = error("") - implicit def b(i:Int):String = error("") + implicit def a(s:String):String = sys.error("") + implicit def a(i:Int):String = sys.error("") + implicit def b(i:Int):String = sys.error("") } class C1435 { @@ -89,4 +89,4 @@ package foo2709 { // Problem with specs object specsProblem { println(implicitly[TypeTag[Class[_]]]) -} \ No newline at end of file +} diff --git a/test/files/pos/implicits-old.scala b/test/files/pos/implicits-old.scala index 2c01dd0ba8..62ae6b835c 100644 --- a/test/files/pos/implicits-old.scala +++ b/test/files/pos/implicits-old.scala @@ -1,8 +1,8 @@ // #1435 object t1435 { - implicit def a(s:String):String = error("") - implicit def a(i:Int):String = error("") - implicit def b(i:Int):String = error("") + implicit def a(s:String):String = sys.error("") + implicit def a(i:Int):String = sys.error("") + implicit def b(i:Int):String = sys.error("") } class C1435 { @@ -45,7 +45,7 @@ object Test1625 { implicit def byName[A](x: =>A) = new Wrapped(x) implicit def byVal[A](x: A) = x - + def main(args: Array[String]) = { // val res:Wrapped = 7 // works @@ -57,7 +57,7 @@ object Test1625 { } object Test2188 { - implicit def toJavaList[A: ClassManifest](t:collection.Seq[A]):java.util.List[A] = java.util.Arrays.asList(t.toArray:_*) + implicit def toJavaList[A: ClassManifest](t:collection.Seq[A]):java.util.List[A] = java.util.Arrays.asList(t.toArray:_*) val x: java.util.List[String] = List("foo") } @@ -67,21 +67,21 @@ object TestNumericWidening { val x: java.lang.Long = y } -// #2709 -package foo2709 { - class A - class B - - package object bar { - implicit def a2b(a: A): B = new B - } - - package bar { - object test { - new A: B - } - } -} +// #2709 +package foo2709 { + class A + class B + + package object bar { + implicit def a2b(a: A): B = new B + } + + package bar { + object test { + new A: B + } + } +} // Problem with specs object specsProblem { diff --git a/test/files/pos/relax_implicit_divergence.scala b/test/files/pos/relax_implicit_divergence.scala index 8525c84bab..f17d0239d8 100644 --- a/test/files/pos/relax_implicit_divergence.scala +++ b/test/files/pos/relax_implicit_divergence.scala @@ -1,7 +1,7 @@ class A(val options: Seq[String]) object Test { - implicit def ss: Equiv[Seq[String]] = error("dummy") - implicit def equivA(implicit seqEq: Equiv[Seq[String]]): Equiv[A] = error("dummy") + implicit def ss: Equiv[Seq[String]] = sys.error("dummy") + implicit def equivA(implicit seqEq: Equiv[Seq[String]]): Equiv[A] = sys.error("dummy") implicitly[Equiv[A]] -} \ No newline at end of file +} diff --git a/test/files/pos/simple-exceptions.scala b/test/files/pos/simple-exceptions.scala index 38f2fc8500..a9f16bf90b 100644 --- a/test/files/pos/simple-exceptions.scala +++ b/test/files/pos/simple-exceptions.scala @@ -8,7 +8,7 @@ object Test { try { try { Console.println("hi!") - error("xx") + sys.error("xx") } finally Console.println("ho!") } diff --git a/test/files/pos/spec-asseenfrom.scala b/test/files/pos/spec-asseenfrom.scala index cf20fc5ffa..ede5791709 100644 --- a/test/files/pos/spec-asseenfrom.scala +++ b/test/files/pos/spec-asseenfrom.scala @@ -1,8 +1,8 @@ -class Automaton[@specialized(Double) W,State] { +class Automaton[@specialized(Double) W,State] { - def finalWeight(s: State): W = error("todo"); + def finalWeight(s: State): W = sys.error("todo"); - def allStates: Set[State] = error("toodo"); + def allStates: Set[State] = sys.error("toodo"); /** * Returns a map from states to its final weight. may expand all nodes. diff --git a/test/files/pos/spec-cyclic.scala b/test/files/pos/spec-cyclic.scala index b983caa6db..6cd7685370 100644 --- a/test/files/pos/spec-cyclic.scala +++ b/test/files/pos/spec-cyclic.scala @@ -6,25 +6,25 @@ trait MyPartialFunction[-A, +B] extends AnyRef with AbsFun[A, B] trait ColMap[A, +B] extends MyPartialFunction[A, B] /*with Collection[(A, B)] */ -trait ColSorted[K,+A] extends ColRanged[K,A] +trait ColSorted[K,+A] extends ColRanged[K,A] -trait ColSortedMap[K,+E] extends ColMap[K,E] with ColSorted[K,Tuple2[K,E]] +trait ColSortedMap[K,+E] extends ColMap[K,E] with ColSorted[K,Tuple2[K,E]] trait MutMap[A, B] extends AnyRef with ColMap[A, B] -trait ColRanged[K, +A] //extends Iterable[A] +trait ColRanged[K, +A] //extends Iterable[A] trait JclRanged[K,A] extends ColRanged[K,A] //with MutableIterable[A] { -trait JclMap[K,E] extends /*collection.jcl.MutableIterable[Tuple2[K,E]] with*/ MutMap[K,E] +trait JclMap[K,E] extends /*collection.jcl.MutableIterable[Tuple2[K,E]] with*/ MutMap[K,E] trait JclSorted[K,A] extends ColSorted[K,A] with JclRanged[K,A] trait JclSortedMap[K,E] extends ColSortedMap[K,E] with JclMap[K,E] with JclSorted[K,Tuple2[K,E]] class Foo[A, B] extends JclSortedMap[A, B] { - def apply(x: A): B = error("NYI") + def apply(x: A): B = sys.error("NYI") } class Bar { diff --git a/test/files/pos/spec-sealed.scala b/test/files/pos/spec-sealed.scala index 5782930899..d7ecfaaabd 100644 --- a/test/files/pos/spec-sealed.scala +++ b/test/files/pos/spec-sealed.scala @@ -2,13 +2,13 @@ sealed abstract class MyList[@specialized +A] { def head: A def tail: MyList[A] - def ::[@specialized B >: A](x: B): MyList[B] = + def ::[@specialized B >: A](x: B): MyList[B] = new Cons[B](x, this) } case object MyNil extends MyList[Nothing] { - def head = error("nil") - def tail = error("nil") + def head = sys.error("nil") + def tail = sys.error("nil") } case class Cons[@specialized a](private val hd: a, tl: MyList[a]) extends MyList[a] { @@ -19,7 +19,7 @@ case class Cons[@specialized a](private val hd: a, tl: MyList[a]) extends MyList abstract class IntList extends MyList[Int] object Main extends App { - val xs = 1 :: 2 :: 3 :: MyNil + val xs = 1 :: 2 :: 3 :: MyNil println(xs) } diff --git a/test/files/pos/spec-sparsearray-new.scala b/test/files/pos/spec-sparsearray-new.scala index 7b3934c476..df31089fe2 100644 --- a/test/files/pos/spec-sparsearray-new.scala +++ b/test/files/pos/spec-sparsearray-new.scala @@ -4,7 +4,7 @@ import scala.collection.mutable.MapLike class SparseArray[@specialized(Int) T:ClassTag] extends collection.mutable.Map[Int,T] with collection.mutable.MapLike[Int,T,SparseArray[T]] { override def get(x: Int) = { val ind = findOffset(x) - if(ind < 0) None else Some(error("ignore")) + if(ind < 0) None else Some(sys.error("ignore")) } /** @@ -13,13 +13,13 @@ class SparseArray[@specialized(Int) T:ClassTag] extends collection.mutable.Map[I * negative and can be converted into an insertion point with -(rv+1). */ private def findOffset(i : Int) : Int = { - error("impl doesn't matter") + sys.error("impl doesn't matter") } - override def apply(i : Int) : T = { error("ignore") } - override def update(i : Int, value : T) = error("ignore") + override def apply(i : Int) : T = { sys.error("ignore") } + override def update(i : Int, value : T) = sys.error("ignore") override def empty = new SparseArray[T] - def -=(ind: Int) = error("ignore") - def +=(kv: (Int,T)) = error("ignore") - override final def iterator = error("ignore") -} \ No newline at end of file + def -=(ind: Int) = sys.error("ignore") + def +=(kv: (Int,T)) = sys.error("ignore") + override final def iterator = sys.error("ignore") +} diff --git a/test/files/pos/spec-sparsearray-old.scala b/test/files/pos/spec-sparsearray-old.scala index ea7710a785..e10dabd542 100644 --- a/test/files/pos/spec-sparsearray-old.scala +++ b/test/files/pos/spec-sparsearray-old.scala @@ -3,7 +3,7 @@ import scala.collection.mutable.MapLike class SparseArray[@specialized(Int) T:ClassManifest] extends collection.mutable.Map[Int,T] with collection.mutable.MapLike[Int,T,SparseArray[T]] { override def get(x: Int) = { val ind = findOffset(x) - if(ind < 0) None else Some(error("ignore")) + if(ind < 0) None else Some(sys.error("ignore")) } /** @@ -12,13 +12,13 @@ class SparseArray[@specialized(Int) T:ClassManifest] extends collection.mutable. * negative and can be converted into an insertion point with -(rv+1). */ private def findOffset(i : Int) : Int = { - error("impl doesn't matter") + sys.error("impl doesn't matter") } - override def apply(i : Int) : T = { error("ignore") } - override def update(i : Int, value : T) = error("ignore") + override def apply(i : Int) : T = { sys.error("ignore") } + override def update(i : Int, value : T) = sys.error("ignore") override def empty = new SparseArray[T] - def -=(ind: Int) = error("ignore") - def +=(kv: (Int,T)) = error("ignore") - override final def iterator = error("ignore") + def -=(ind: Int) = sys.error("ignore") + def +=(kv: (Int,T)) = sys.error("ignore") + override final def iterator = sys.error("ignore") } diff --git a/test/files/pos/spec-traits.scala b/test/files/pos/spec-traits.scala index c6cc2921b7..074f6c3d3c 100644 --- a/test/files/pos/spec-traits.scala +++ b/test/files/pos/spec-traits.scala @@ -11,19 +11,19 @@ class Lazy { // issue 3307 class Bug3307 { - def f[Z](block: String => Z) { - block("abc") + def f[Z](block: String => Z) { + block("abc") } - - ({ () => - f { implicit x => println(x) } })() + + ({ () => + f { implicit x => println(x) } })() } // issue 3301 trait T[X] class Bug3301 { - def t[A]: T[A] = error("stub") + def t[A]: T[A] = sys.error("stub") () => { type X = Int diff --git a/test/files/pos/t0031.scala b/test/files/pos/t0031.scala index ec6eae9282..d4050c8184 100644 --- a/test/files/pos/t0031.scala +++ b/test/files/pos/t0031.scala @@ -4,17 +4,17 @@ object Main { def ensure(postcondition: a => Boolean): a } - def require[a](precondition: => Boolean)(command: => a): Ensure[a] = + def require[a](precondition: => Boolean)(command: => a): Ensure[a] = if (precondition) new Ensure[a] { def ensure(postcondition: a => Boolean): a = { val result = command; if (postcondition(result)) result - else error("Assertion error") + else sys.error("Assertion error") } } else - error("Assertion error"); + sys.error("Assertion error"); def arb[a](s: List[a]) = require (! s.isEmpty) { diff --git a/test/files/pos/t0227.scala b/test/files/pos/t0227.scala index 8650350c4a..806b20d409 100644 --- a/test/files/pos/t0227.scala +++ b/test/files/pos/t0227.scala @@ -5,7 +5,7 @@ final class Settings { abstract class Factory { type libraryType <: Base - final def apply(settings: Settings): libraryType = error("bla") + final def apply(settings: Settings): libraryType = sys.error("bla") } abstract class Base { @@ -19,7 +19,7 @@ class SA(val settings: Settings) extends Base { SD ) ::: settings.f( SC - ) + ) } object SC extends Factory { diff --git a/test/files/pos/t2331.scala b/test/files/pos/t2331.scala index 9a15b5c2a9..a7f80ac98e 100644 --- a/test/files/pos/t2331.scala +++ b/test/files/pos/t2331.scala @@ -4,8 +4,8 @@ trait C { object Test { val o /*: C --> no crash*/ = new C { - def m[T]: Nothing /*: T --> no crash*/ = error("omitted") + def m[T]: Nothing /*: T --> no crash*/ = sys.error("omitted") } o.m[Nothing] -} \ No newline at end of file +} diff --git a/test/files/pos/t2421.scala b/test/files/pos/t2421.scala index 26e485c160..2544a1cb36 100644 --- a/test/files/pos/t2421.scala +++ b/test/files/pos/t2421.scala @@ -1,14 +1,14 @@ object Test { abstract class <~<[-From, +To] extends (From => To) - implicit def trivial[A]: A <~< A = error("") + implicit def trivial[A]: A <~< A = sys.error("") trait Forcible[T] - implicit val forcibleInt: (Int <~< Forcible[Int]) = error("") + implicit val forcibleInt: (Int <~< Forcible[Int]) = sys.error("") - def headProxy[P <: Forcible[Int]](implicit w: Int <~< P): P = error("") - - headProxy - // trivial[Int] should not be considered a valid implicit, since w would have type Int <~< Int, + def headProxy[P <: Forcible[Int]](implicit w: Int <~< P): P = sys.error("") + + headProxy + // trivial[Int] should not be considered a valid implicit, since w would have type Int <~< Int, // and headProxy's type parameter P cannot be instantiated to Int -} \ No newline at end of file +} diff --git a/test/files/pos/t2429.scala b/test/files/pos/t2429.scala index 3ea3f9e2a5..550681b6a2 100755 --- a/test/files/pos/t2429.scala +++ b/test/files/pos/t2429.scala @@ -1,10 +1,10 @@ object Msg { trait T - + trait TSeq - + object TSeq { - implicit def fromSeq(s: Seq[T]): TSeq = error("stub") + implicit def fromSeq(s: Seq[T]): TSeq = sys.error("stub") } def render { @@ -12,7 +12,7 @@ object Msg { case (a, b) => { a match { case _ => b match { - case _ => error("stub") + case _ => sys.error("stub") } } } @@ -20,6 +20,6 @@ object Msg { } } object Oops { - implicit def someImplicit(s: Seq[_]): String = error("stub") + implicit def someImplicit(s: Seq[_]): String = sys.error("stub") def item: String = Nil map { case e: Any => e } } diff --git a/test/files/pos/t2797.scala b/test/files/pos/t2797.scala index 4323664e91..cf579d8de4 100644 --- a/test/files/pos/t2797.scala +++ b/test/files/pos/t2797.scala @@ -1,9 +1,9 @@ class MyVector[A] { - def map[B](f: A => B): MyVector[B] = error("") + def map[B](f: A => B): MyVector[B] = sys.error("") } object Test { def unzip[B, C](_this: MyVector[(B, C)]): (MyVector[B], MyVector[C]) = { (_this.map{ bc => bc._1 }, _this.map{ bc => bc._2 }) } -} \ No newline at end of file +} diff --git a/test/files/pos/t3152.scala b/test/files/pos/t3152.scala index a20428dbee..3d1dcbd6f0 100644 --- a/test/files/pos/t3152.scala +++ b/test/files/pos/t3152.scala @@ -1,13 +1,13 @@ trait Applicative[M[_]] sealed trait MA[M[_], A] { - def sequence[N[_], B](implicit a: A <:< N[B], n: Applicative[N]): N[M[B]] = error("stub") - // def sequence3[N[_], B]()(implicit a: A <:< N[B], n: Applicative[N]): N[M[B]] = error("stub") + def sequence[N[_], B](implicit a: A <:< N[B], n: Applicative[N]): N[M[B]] = sys.error("stub") + // def sequence3[N[_], B]()(implicit a: A <:< N[B], n: Applicative[N]): N[M[B]] = sys.error("stub") } object test { - implicit def ListMA[A](l: List[A]): MA[List, A] = error("stub") - implicit val ao: Applicative[Option] = error("stub") + implicit def ListMA[A](l: List[A]): MA[List, A] = sys.error("stub") + implicit val ao: Applicative[Option] = sys.error("stub") /* This compiles OK: (Nil: List[Option[Int]]).sequence3(): Option[List[Int]] @@ -17,4 +17,4 @@ object test { // !!! No line number is reported with the error (Nil: List[Option[Int]]).sequence: Option[List[Int]] (List[Option[Int]]()).sequence: Option[List[Int]] -} \ No newline at end of file +} diff --git a/test/files/pos/t3252.scala b/test/files/pos/t3252.scala index 4b8e862714..3ecc1e7cef 100644 --- a/test/files/pos/t3252.scala +++ b/test/files/pos/t3252.scala @@ -8,8 +8,8 @@ class A { } } - private def g[T](block : => T) = error("") + private def g[T](block : => T) = sys.error("") } object B { - def h(block : => Unit) : Nothing = error("") -} \ No newline at end of file + def h(block : => Unit) : Nothing = sys.error("") +} diff --git a/test/files/pos/t3349/Test.scala b/test/files/pos/t3349/Test.scala index 8174e4c4f8..595beadc20 100644 --- a/test/files/pos/t3349/Test.scala +++ b/test/files/pos/t3349/Test.scala @@ -1,5 +1,5 @@ object Test { val label = "name" - val table: Table = error("") + val table: Table = sys.error("") table.addColumn( label, label.getClass ) -} \ No newline at end of file +} diff --git a/test/files/pos/t3363-new.scala b/test/files/pos/t3363-new.scala index e609f4d55f..fef2bf8a72 100644 --- a/test/files/pos/t3363-new.scala +++ b/test/files/pos/t3363-new.scala @@ -9,7 +9,7 @@ object TestCase { //if you inherit from MapOps[T] instead of MapOps[F] then code compiles fine implicit def map2ops[T,F](fs: Map[T,F]) = new MapOps[F] { //if you remove this line, then code compiles - lazy val m: TypeTag[T] = error("just something to make it compile") + lazy val m: TypeTag[T] = sys.error("just something to make it compile") def is(xs: List[T]) = List(xs) } @@ -17,4 +17,4 @@ object TestCase { println(Map(1 -> "2") is List(2)) } - } \ No newline at end of file + } diff --git a/test/files/pos/t3363-old.scala b/test/files/pos/t3363-old.scala index bae54084ea..c08cf2a6b6 100644 --- a/test/files/pos/t3363-old.scala +++ b/test/files/pos/t3363-old.scala @@ -7,7 +7,7 @@ object TestCase { //if you inherit from MapOps[T] instead of MapOps[F] then code compiles fine implicit def map2ops[T,F](fs: Map[T,F]) = new MapOps[F] { //if you remove this line, then code compiles - lazy val m: Manifest[T] = error("just something to make it compile") + lazy val m: Manifest[T] = sys.error("just something to make it compile") def is(xs: List[T]) = List(xs) } diff --git a/test/files/pos/t3440.scala b/test/files/pos/t3440.scala index 46bba1b207..0e7ca6b70f 100644 --- a/test/files/pos/t3440.scala +++ b/test/files/pos/t3440.scala @@ -4,15 +4,15 @@ object test { } case object Int8 extends SampleFormat1 { - def readerFactory = error("") + def readerFactory = sys.error("") } case object Int16 extends SampleFormat1 { - def readerFactory = error("") + def readerFactory = sys.error("") } - + (new {}: Any) match { case 8 => Int8 case 16 => Int16 - case _ => error("") + case _ => sys.error("") } -} \ No newline at end of file +} diff --git a/test/files/pos/t3477.scala b/test/files/pos/t3477.scala index 660aa55736..6a94baa6c8 100644 --- a/test/files/pos/t3477.scala +++ b/test/files/pos/t3477.scala @@ -1,7 +1,7 @@ class J3 { - def f[K, K1 >: K, V](x: Map[K1, V]): Map[K, V] = error("") + def f[K, K1 >: K, V](x: Map[K1, V]): Map[K, V] = sys.error("") } object Test { (new J3).f(Map[Int, Int]()) -} \ No newline at end of file +} diff --git a/test/files/pos/t3731.scala b/test/files/pos/t3731.scala index 75938540c0..7a3cbec0f4 100644 --- a/test/files/pos/t3731.scala +++ b/test/files/pos/t3731.scala @@ -1,8 +1,8 @@ object Test{ trait ZW[S]{type T} - def ZipWith[S, M <: ZW[S]]: M#T = error("ZW") + def ZipWith[S, M <: ZW[S]]: M#T = sys.error("ZW") - // meh must be parameterised to force an asSeenFrom that + // meh must be parameterised to force an asSeenFrom that // duplicates the refinement in the TR's pre without updating its sym def meh[A] = ZipWith[A, ZW[A]{type T=Stream[A]}] diff --git a/test/files/pos/t3883.scala b/test/files/pos/t3883.scala index adde0526b2..1b62c0c6d6 100644 --- a/test/files/pos/t3883.scala +++ b/test/files/pos/t3883.scala @@ -1,14 +1,14 @@ // need to test both orders object A1 { - implicit def i: Equiv[Boolean] = error("") - implicit def div[T, A](implicit f: T => A, eq: Equiv[A]): Equiv[T] = error("") + implicit def i: Equiv[Boolean] = sys.error("") + implicit def div[T, A](implicit f: T => A, eq: Equiv[A]): Equiv[T] = sys.error("") implicitly[Equiv[Boolean]] } object A2 { - implicit def div[T, A](implicit f: T => A, eq: Equiv[A]): Equiv[T] = error("") - implicit def i: Equiv[Boolean] = error("") + implicit def div[T, A](implicit f: T => A, eq: Equiv[A]): Equiv[T] = sys.error("") + implicit def i: Equiv[Boolean] = sys.error("") implicitly[Equiv[Boolean]] } diff --git a/test/files/pos/t3927.scala b/test/files/pos/t3927.scala index eb4c4b3be5..f5869c55d5 100644 --- a/test/files/pos/t3927.scala +++ b/test/files/pos/t3927.scala @@ -1,6 +1,6 @@ object A { def x { - implicit lazy val e: Equiv[Int] = error("") + implicit lazy val e: Equiv[Int] = sys.error("") implicitly[Equiv[Int]] } -} +} diff --git a/test/files/pos/tcpoly_boundedmonad.scala b/test/files/pos/tcpoly_boundedmonad.scala index 24a911769b..8c605dc7b6 100644 --- a/test/files/pos/tcpoly_boundedmonad.scala +++ b/test/files/pos/tcpoly_boundedmonad.scala @@ -1,19 +1,19 @@ trait Monad[T <: Bound[T], MyType[x <: Bound[x]], Bound[_]] { - def map[S <: Bound[S]](f: T => S): MyType[S] + def map[S <: Bound[S]](f: T => S): MyType[S] - def flatMap[S <: RBound[S], RContainer[x <: RBound[x]], RBound[_], + def flatMap[S <: RBound[S], RContainer[x <: RBound[x]], RBound[_], Result[x <: RBound[x]] <: Monad[x, RContainer, RBound]] - (f: T => Result[S]): Result[S] + (f: T => Result[S]): Result[S] def filter(p: T => Boolean): MyType[T] } class Set[T <: Ordered[T]] extends Monad[T, Set, Ordered] { - def map[S <: Ordered[S]](f: T => S): Set[S] = error("TODO") - - def flatMap[S <: RBound[S], RContainer[x <: RBound[x]], RBound[_], + def map[S <: Ordered[S]](f: T => S): Set[S] = sys.error("TODO") + + def flatMap[S <: RBound[S], RContainer[x <: RBound[x]], RBound[_], Result[x <: RBound[x]] <: Monad[x, RContainer, RBound]] - (f: T => Result[S]): Result[S] = error("TODO") - - def filter(p: T => Boolean): Set[T] = error("TODO") + (f: T => Result[S]): Result[S] = sys.error("TODO") + + def filter(p: T => Boolean): Set[T] = sys.error("TODO") } diff --git a/test/files/pos/tcpoly_infer_explicit_tuple_wrapper.scala b/test/files/pos/tcpoly_infer_explicit_tuple_wrapper.scala index 97594d506d..f719972a17 100644 --- a/test/files/pos/tcpoly_infer_explicit_tuple_wrapper.scala +++ b/test/files/pos/tcpoly_infer_explicit_tuple_wrapper.scala @@ -2,15 +2,15 @@ import scala.collection.generic.GenericTraversableTemplate import scala.collection.Iterable class IterableOps[CC[+B] <: Iterable[B] with GenericTraversableTemplate[B, CC], A1, A2](tuple: (CC[A1], Iterable[A2])) { - def unzip: (CC[A1], CC[A2]) = error("foo") + def unzip: (CC[A1], CC[A2]) = sys.error("foo") } object Test { - implicit def tupleOfIterableWrapper[CC[+B] <: Iterable[B] with GenericTraversableTemplate[B, CC], A1, A2](tuple: (CC[A1], Iterable[A2])) + implicit def tupleOfIterableWrapper[CC[+B] <: Iterable[B] with GenericTraversableTemplate[B, CC], A1, A2](tuple: (CC[A1], Iterable[A2])) = new IterableOps[CC, A1, A2](tuple) - + val t = (List(1, 2, 3), List(6, 5, 4)) tupleOfIterableWrapper(t) unzip -} \ No newline at end of file +} diff --git a/test/files/pos/tcpoly_infer_implicit_tuple_wrapper.scala b/test/files/pos/tcpoly_infer_implicit_tuple_wrapper.scala index 3073b298de..19243505b4 100644 --- a/test/files/pos/tcpoly_infer_implicit_tuple_wrapper.scala +++ b/test/files/pos/tcpoly_infer_implicit_tuple_wrapper.scala @@ -2,7 +2,7 @@ import scala.collection.generic.GenericTraversableTemplate import scala.collection.Iterable class IterableOps[CC[+B] <: Iterable[B] with GenericTraversableTemplate[B, CC], A1, A2](tuple: (CC[A1], Iterable[A2])) { - def unzip: (CC[A1], CC[A2]) = error("foo") + def unzip: (CC[A1], CC[A2]) = sys.error("foo") } object Test { @@ -15,4 +15,4 @@ object Test { tupleOfIterableWrapper(t) unzip t unzip -} \ No newline at end of file +} diff --git a/test/files/pos/tcpoly_overloaded.scala b/test/files/pos/tcpoly_overloaded.scala index 4240074d85..4f6334685b 100644 --- a/test/files/pos/tcpoly_overloaded.scala +++ b/test/files/pos/tcpoly_overloaded.scala @@ -1,10 +1,10 @@ trait Monad[T <: Bound[T], MyType[x <: Bound[x]], Bound[_]] { - def flatMap[S <: RBound[S], RContainer[x <: RBound[x]], RBound[_], + def flatMap[S <: RBound[S], RContainer[x <: RBound[x]], RBound[_], Result[x <: RBound[x]] <: Monad[x, RContainer, RBound]] - (f: T => Result[S]): Result[S] - def flatMap[S <: RBound[S], RContainer[x <: RBound[x]], RBound[_], + (f: T => Result[S]): Result[S] + def flatMap[S <: RBound[S], RContainer[x <: RBound[x]], RBound[_], Result[x <: RBound[x]] <: Monad[x, RContainer, RBound]] - (f: T => Result[S], foo: String): Result[S] + (f: T => Result[S], foo: String): Result[S] def flatMap[S <: Bound[S]] (f: T => MyType[S], foo: Int): MyType[S] } @@ -12,14 +12,14 @@ trait Monad[T <: Bound[T], MyType[x <: Bound[x]], Bound[_]] { trait Test { def moo: MList[Int] class MList[T](el: T) extends Monad[T, List, Any] { - def flatMap[S <: RBound[S], RContainer[x <: RBound[x]], RBound[_], + def flatMap[S <: RBound[S], RContainer[x <: RBound[x]], RBound[_], Result[x <: RBound[x]] <: Monad[x, RContainer, RBound]] - (f: T => Result[S]): Result[S] = error("foo") - def flatMap[S <: RBound[S], RContainer[x <: RBound[x]], RBound[_], + (f: T => Result[S]): Result[S] = sys.error("foo") + def flatMap[S <: RBound[S], RContainer[x <: RBound[x]], RBound[_], Result[x <: RBound[x]] <: Monad[x, RContainer, RBound]] - (f: T => Result[S], foo: String): Result[S] = error("foo") + (f: T => Result[S], foo: String): Result[S] = sys.error("foo") def flatMap[S] - (f: T => List[S], foo: Int): List[S] = error("foo") + (f: T => List[S], foo: Int): List[S] = sys.error("foo") } val l: MList[String] = moo.flatMap[String, List, Any, MList]((x: Int) => new MList("String")) } diff --git a/test/files/pos/tcpoly_subst.scala b/test/files/pos/tcpoly_subst.scala index f8ddb9a715..88cc4d0610 100644 --- a/test/files/pos/tcpoly_subst.scala +++ b/test/files/pos/tcpoly_subst.scala @@ -1,4 +1,4 @@ object test { - def make[m[x], b]: m[b] = error("foo") + def make[m[x], b]: m[b] = sys.error("foo") val lst: List[Int] = make[List, Int] } diff --git a/test/files/pos/tcpoly_variance_pos.scala b/test/files/pos/tcpoly_variance_pos.scala index b641716d50..b63abce202 100644 --- a/test/files/pos/tcpoly_variance_pos.scala +++ b/test/files/pos/tcpoly_variance_pos.scala @@ -1,7 +1,7 @@ class A[m[+x]] { - def str: m[Object] = error("foo") + def str: m[Object] = sys.error("foo") } class B[m[+x]] extends A[m] { - override def str: m[String] = error("foo") + override def str: m[String] = sys.error("foo") } diff --git a/test/files/pos/tcpoly_wildcards.scala b/test/files/pos/tcpoly_wildcards.scala index d3bb86b591..f6d1b666d0 100644 --- a/test/files/pos/tcpoly_wildcards.scala +++ b/test/files/pos/tcpoly_wildcards.scala @@ -1,3 +1,3 @@ trait test[b[_,_]] { - def moo[a[_, _]] = error("a") + def moo[a[_, _]] = sys.error("a") } diff --git a/test/files/pos/typealias_dubious.scala b/test/files/pos/typealias_dubious.scala index 587453a037..cdba1a64d0 100644 --- a/test/files/pos/typealias_dubious.scala +++ b/test/files/pos/typealias_dubious.scala @@ -1,15 +1,15 @@ class MailBox { - //class Message + //class Message type Message = AnyRef -} - +} + abstract class Actor { private val in = new MailBox - def send(msg: in.Message) = error("foo") + def send(msg: in.Message) = sys.error("foo") - def unstable: Actor = error("foo") + def unstable: Actor = sys.error("foo") - def dubiousSend(msg: MailBox#Message) = + def dubiousSend(msg: MailBox#Message) = unstable.send(msg) // in.Message becomes unstable.Message, but that's ok since Message is a concrete type member -} +} diff --git a/test/files/pos/virtpatmat_binding_opt.scala b/test/files/pos/virtpatmat_binding_opt.scala index 962e3d7dbe..8ec931fe78 100644 --- a/test/files/pos/virtpatmat_binding_opt.scala +++ b/test/files/pos/virtpatmat_binding_opt.scala @@ -4,8 +4,8 @@ class Test { case that: Test2 => println(that) this - case _ => error("meh") + case _ => sys.error("meh") } } -class Test2 extends Test \ No newline at end of file +class Test2 extends Test diff --git a/test/files/presentation/callcc-interpreter.check b/test/files/presentation/callcc-interpreter.check index dd3ee68e45..af0154fe60 100644 --- a/test/files/presentation/callcc-interpreter.check +++ b/test/files/presentation/callcc-interpreter.check @@ -59,7 +59,8 @@ retrieved 63 members [accessible: true] `type NamecallccInterpreter.Name` [accessible: true] `value __leftOfArrowcallccInterpreter.type` [accessible: true] `value __resultOfEnsuringcallccInterpreter.type` -[accessible: true] `value selfAny` +[accessible: true] `value __stringToFormatcallccInterpreter.type` +[accessible: true] `value __thingToAddcallccInterpreter.type` [accessible: true] `value term0callccInterpreter.App` [accessible: true] `value term1callccInterpreter.App` [accessible: true] `value term2callccInterpreter.Add` diff --git a/test/files/presentation/ide-bug-1000349.check b/test/files/presentation/ide-bug-1000349.check index 7eeaddc054..0040300083 100644 --- a/test/files/presentation/ide-bug-1000349.check +++ b/test/files/presentation/ide-bug-1000349.check @@ -35,5 +35,6 @@ retrieved 36 members [accessible: true] `method →[B](y: B)(Foo, B)` [accessible: true] `value __leftOfArrowFoo` [accessible: true] `value __resultOfEnsuringFoo` -[accessible: true] `value selfAny` +[accessible: true] `value __stringToFormatFoo` +[accessible: true] `value __thingToAddFoo` ================================================================================ diff --git a/test/files/presentation/ide-bug-1000475.check b/test/files/presentation/ide-bug-1000475.check index 01de4608ca..7866e4af15 100644 --- a/test/files/presentation/ide-bug-1000475.check +++ b/test/files/presentation/ide-bug-1000475.check @@ -32,7 +32,8 @@ retrieved 35 members [accessible: true] `method →[B](y: B)(Object, B)` [accessible: true] `value __leftOfArrowObject` [accessible: true] `value __resultOfEnsuringObject` -[accessible: true] `value selfAny` +[accessible: true] `value __stringToFormatObject` +[accessible: true] `value __thingToAddObject` [accessible: false] `method clone()Object` [accessible: false] `method finalize()Unit` ================================================================================ @@ -69,7 +70,8 @@ retrieved 35 members [accessible: true] `method →[B](y: B)(Object, B)` [accessible: true] `value __leftOfArrowObject` [accessible: true] `value __resultOfEnsuringObject` -[accessible: true] `value selfAny` +[accessible: true] `value __stringToFormatObject` +[accessible: true] `value __thingToAddObject` [accessible: false] `method clone()Object` [accessible: false] `method finalize()Unit` ================================================================================ @@ -106,7 +108,8 @@ retrieved 35 members [accessible: true] `method →[B](y: B)(Object, B)` [accessible: true] `value __leftOfArrowObject` [accessible: true] `value __resultOfEnsuringObject` -[accessible: true] `value selfAny` +[accessible: true] `value __stringToFormatObject` +[accessible: true] `value __thingToAddObject` [accessible: false] `method clone()Object` [accessible: false] `method finalize()Unit` ================================================================================ diff --git a/test/files/presentation/ide-bug-1000531.check b/test/files/presentation/ide-bug-1000531.check index 7fa550179f..18ecd4b536 100644 --- a/test/files/presentation/ide-bug-1000531.check +++ b/test/files/presentation/ide-bug-1000531.check @@ -120,7 +120,8 @@ retrieved 124 members [accessible: true] `method →[B](y: B)(java.util.Iterator[B], B)` [accessible: true] `value __leftOfArrowjava.util.Iterator[B]` [accessible: true] `value __resultOfEnsuringjava.util.Iterator[B]` -[accessible: true] `value selfAny` +[accessible: true] `value __stringToFormatjava.util.Iterator[B]` +[accessible: true] `value __thingToAddjava.util.Iterator[B]` [accessible: false] `method clone()Object` [accessible: false] `method finalize()Unit` [accessible: false] `method reversed=> List[B]` diff --git a/test/files/presentation/implicit-member.check b/test/files/presentation/implicit-member.check index 7b4f792bf3..6a23facc78 100644 --- a/test/files/presentation/implicit-member.check +++ b/test/files/presentation/implicit-member.check @@ -36,6 +36,7 @@ retrieved 38 members [accessible: true] `method →[B](y: B)(Implicit.type, B)` [accessible: true] `value __leftOfArrowImplicit.type` [accessible: true] `value __resultOfEnsuringImplicit.type` -[accessible: true] `value selfAny` +[accessible: true] `value __stringToFormatImplicit.type` +[accessible: true] `value __thingToAddImplicit.type` [accessible: true] `value xImplicit.type` ================================================================================ diff --git a/test/files/presentation/ping-pong.check b/test/files/presentation/ping-pong.check index c85f6cc21a..c7a5d0b5d1 100644 --- a/test/files/presentation/ping-pong.check +++ b/test/files/presentation/ping-pong.check @@ -33,8 +33,9 @@ retrieved 39 members [accessible: true] `method →[B](y: B)(Pong, B)` [accessible: true] `value __leftOfArrowPong` [accessible: true] `value __resultOfEnsuringPong` +[accessible: true] `value __stringToFormatPong` +[accessible: true] `value __thingToAddPong` [accessible: true] `value nameString` -[accessible: true] `value selfAny` [accessible: false] `method clone()Object` [accessible: false] `method finalize()Unit` [accessible: false] `value pingPing` @@ -75,8 +76,9 @@ retrieved 39 members [accessible: true] `method →[B](y: B)(Ping, B)` [accessible: true] `value __leftOfArrowPing` [accessible: true] `value __resultOfEnsuringPing` +[accessible: true] `value __stringToFormatPing` +[accessible: true] `value __thingToAddPing` [accessible: true] `value pongPong` -[accessible: true] `value selfAny` [accessible: false] `method clone()Object` [accessible: false] `method finalize()Unit` ================================================================================ diff --git a/test/files/presentation/t5708.check b/test/files/presentation/t5708.check index 572f404cf4..4fc7a56426 100644 --- a/test/files/presentation/t5708.check +++ b/test/files/presentation/t5708.check @@ -35,8 +35,9 @@ retrieved 43 members [accessible: true] `value CONST_STRINGString("constant")` [accessible: true] `value __leftOfArrowtest.Compat.type` [accessible: true] `value __resultOfEnsuringtest.Compat.type` +[accessible: true] `value __stringToFormattest.Compat.type` +[accessible: true] `value __thingToAddtest.Compat.type` [accessible: true] `value pkgPrivateVString` -[accessible: true] `value selfAny` [accessible: false] `method clone()Object` [accessible: false] `method finalize()Unit` [accessible: false] `method privateM=> String` diff --git a/test/files/presentation/visibility.check b/test/files/presentation/visibility.check index 87b4463bf7..e9b349ac06 100644 --- a/test/files/presentation/visibility.check +++ b/test/files/presentation/visibility.check @@ -39,7 +39,8 @@ retrieved 41 members [accessible: true] `method →[B](y: B)(accessibility.Foo, B)` [accessible: true] `value __leftOfArrowaccessibility.Foo` [accessible: true] `value __resultOfEnsuringaccessibility.Foo` -[accessible: true] `value selfAny` +[accessible: true] `value __stringToFormataccessibility.Foo` +[accessible: true] `value __thingToAddaccessibility.Foo` [accessible: false] `method secretPrivateThis()Unit` ================================================================================ @@ -83,7 +84,8 @@ retrieved 41 members [accessible: true] `method →[B](y: B)(accessibility.Foo, B)` [accessible: true] `value __leftOfArrowaccessibility.Foo` [accessible: true] `value __resultOfEnsuringaccessibility.Foo` -[accessible: true] `value selfAny` +[accessible: true] `value __stringToFormataccessibility.Foo` +[accessible: true] `value __thingToAddaccessibility.Foo` ================================================================================ askTypeCompletion at Completions.scala(22,11) @@ -125,7 +127,8 @@ retrieved 41 members [accessible: true] `method →[B](y: B)(accessibility.AccessibilityChecks, B)` [accessible: true] `value __leftOfArrowaccessibility.AccessibilityChecks` [accessible: true] `value __resultOfEnsuringaccessibility.AccessibilityChecks` -[accessible: true] `value selfAny` +[accessible: true] `value __stringToFormataccessibility.AccessibilityChecks` +[accessible: true] `value __thingToAddaccessibility.AccessibilityChecks` [accessible: false] `method secretPrivate()Unit` ================================================================================ @@ -164,7 +167,8 @@ retrieved 41 members [accessible: true] `method →[B](y: B)(accessibility.Foo, B)` [accessible: true] `value __leftOfArrowaccessibility.Foo` [accessible: true] `value __resultOfEnsuringaccessibility.Foo` -[accessible: true] `value selfAny` +[accessible: true] `value __stringToFormataccessibility.Foo` +[accessible: true] `value __thingToAddaccessibility.Foo` [accessible: false] `method clone()Object` [accessible: false] `method finalize()Unit` [accessible: false] `method secretPrivate()Unit` @@ -206,7 +210,8 @@ retrieved 41 members [accessible: true] `method →[B](y: B)(accessibility.Foo, B)` [accessible: true] `value __leftOfArrowaccessibility.Foo` [accessible: true] `value __resultOfEnsuringaccessibility.Foo` -[accessible: true] `value selfAny` +[accessible: true] `value __stringToFormataccessibility.Foo` +[accessible: true] `value __thingToAddaccessibility.Foo` [accessible: false] `method clone()Object` [accessible: false] `method finalize()Unit` [accessible: false] `method secretPrivate()Unit` diff --git a/test/files/run/Course-2002-07.scala b/test/files/run/Course-2002-07.scala index 7848ae3e8e..055ff74d81 100644 --- a/test/files/run/Course-2002-07.scala +++ b/test/files/run/Course-2002-07.scala @@ -16,13 +16,13 @@ object M0 { def isNumber: Boolean = true; def isSum: Boolean = false; def numValue: Int = n; - def leftOp: Expr = error("Number.leftOp"); - def rightOp: Expr = error("Number.rightOp"); + def leftOp: Expr = sys.error("Number.leftOp"); + def rightOp: Expr = sys.error("Number.rightOp"); } class Sum(e1: Expr, e2: Expr) extends Expr { def isNumber: Boolean = false; def isSum: Boolean = true; - def numValue: Int = error("Sum.numValue"); + def numValue: Int = sys.error("Sum.numValue"); def leftOp: Expr = e1; def rightOp: Expr = e2; } @@ -30,7 +30,7 @@ object M0 { class Prod(e1: Expr, e2: Expr) extends Expr { def isNumber: Boolean = false; def isSum: Boolean = false; - def numValue: Int = error("Prod.numValue"); + def numValue: Int = sys.error("Prod.numValue"); def leftOp: Expr = e1; def rightOp: Expr = e2; } @@ -38,15 +38,15 @@ object M0 { class Var(x: String) extends Expr { def isNumber: Boolean = false; def isSum: Boolean = false; - def numValue: Int = error("Var.numValue"); - def leftOp: Expr = error("Var.leftOp"); - def rightOp: Expr = error("Var.rightOp"); + def numValue: Int = sys.error("Var.numValue"); + def leftOp: Expr = sys.error("Var.leftOp"); + def rightOp: Expr = sys.error("Var.rightOp"); } def eval(e: Expr): Int = { if (e.isNumber) e.numValue else if (e.isSum) eval(e.leftOp) + eval(e.rightOp) - else error("unknown expression") + else sys.error("unknown expression") } def test = { @@ -375,7 +375,7 @@ object M9 { object MA { def lookup[k,v](xs: List[Pair[k,v]], k: k): v = xs match { - case List() => error("no value for " + k) + case List() => sys.error("no value for " + k) case Pair(k1,v1) :: xs1 => if (k1 == k) v1 else lookup(xs1, k) } @@ -410,7 +410,7 @@ object MA { def eval(e: Expr): Int = e match { case Number(n) => n - case Var(_) => error("cannot evaluate variable") + case Var(_) => sys.error("cannot evaluate variable") case Sum(e1, e2) => eval(e1) + eval(e2) case Prod(e1, e2) => eval(e1) * eval(e2) } @@ -453,7 +453,7 @@ object Utils { if (y == 1) x else if (y % 2 == 0) power0(x*x,y/2) else x*power0(x, y-1); def power(x: Int, y: Int): Int = (x,y) match { - case Pair(0,0) => error("power(0,0)") + case Pair(0,0) => sys.error("power(0,0)") case Pair(0,_) => 0 case Pair(1,_) => 1 case Pair(_,0) => 1 @@ -463,7 +463,7 @@ object Utils { } def lookup(entries: List[(String,Int)], key: String): Int = entries match { - case List() => error("no value for " + key) + case List() => sys.error("no value for " + key) case Pair(k,v) :: _ if (k == key) => v case _ :: rest => lookup(rest, key) } diff --git a/test/files/run/Course-2002-08.scala b/test/files/run/Course-2002-08.scala index 85a83e0146..38b8363661 100644 --- a/test/files/run/Course-2002-08.scala +++ b/test/files/run/Course-2002-08.scala @@ -33,7 +33,7 @@ object M1 { if (0 < amount && amount <= balance) { balance = balance - amount; balance - } else error("insufficient funds"); + } else sys.error("insufficient funds"); } def test0 = { @@ -520,7 +520,7 @@ abstract class CircuitSimulator() extends BasicCircuitSimulator() { val w1 = new Wire(); val w2 = new Wire(); val w3 = new Wire(); - + andGate(in, ctrl(1), w3); andGate(in, ctrl(1), w2); andGate(in, ctrlN(1), w1); diff --git a/test/files/run/Course-2002-09.scala b/test/files/run/Course-2002-09.scala index 384a91efd8..87f91111d8 100644 --- a/test/files/run/Course-2002-09.scala +++ b/test/files/run/Course-2002-09.scala @@ -8,8 +8,8 @@ trait Constraint { } object NoConstraint extends Constraint { - def newValue: Unit = error("NoConstraint.newValue"); - def dropValue: Unit = error("NoConstraint.dropValue"); + def newValue: Unit = sys.error("NoConstraint.newValue"); + def dropValue: Unit = sys.error("NoConstraint.dropValue"); } class Adder(a1: Quantity,a2: Quantity,sum: Quantity) extends Constraint { @@ -47,7 +47,7 @@ class Multiplier(m1: Quantity, m2: Quantity, prod: Quantity) class Squarer(square: Quantity, root: Quantity) extends Constraint { def newValue: Unit = Pair(square.getValue, root.getValue) match { - case Pair(Some(x), _ )if (x < 0) => error("Square of negative number") + case Pair(Some(x), _ )if (x < 0) => sys.error("Square of negative number") case Pair(Some(x), _ ) => root.setValue(Math.sqrt(x), this) case Pair(_ , Some(x)) => square.setValue(x*x, this) case _ => @@ -72,8 +72,8 @@ class Eq(a: Quantity, b: Quantity) extends Constraint { } class Constant(q: Quantity, v: Double) extends Constraint { - def newValue: Unit = error("Constant.newValue"); - def dropValue: Unit = error("Constant.dropValue"); + def newValue: Unit = sys.error("Constant.newValue"); + def dropValue: Unit = sys.error("Constant.dropValue"); q connect this; q.setValue(v, this); } @@ -100,7 +100,7 @@ class Quantity() { def setValue(v: Double, setter: Constraint) = value match { case Some(v1) => - if (v != v1) error("Error! contradiction: " + v + " and " + v1); + if (v != v1) sys.error("Error! contradiction: " + v + " and " + v1); case None => informant = setter; value = Some(v); for (c <- constraints; if !(c == informant)) { diff --git a/test/files/run/Course-2002-13.scala b/test/files/run/Course-2002-13.scala index c266af8c32..4bd3614fb0 100644 --- a/test/files/run/Course-2002-13.scala +++ b/test/files/run/Course-2002-13.scala @@ -42,7 +42,7 @@ object Terms { } case class Binding(name: String, term: Term) { - term match { case Var(n) if (name == n) => error("bad binding") case _ => () } + term match { case Var(n) if (name == n) => sys.error("bad binding") case _ => () } override def toString() = name + " = " + term; } @@ -168,7 +168,7 @@ class Parser(s: String) { var token: String = it.next; - def syntaxError(msg: String): Unit = error(msg + ", but " + token + " found"); + def syntaxError(msg: String): Unit = sys.error(msg + ", but " + token + " found"); def rep[a](p: => a): List[a] = { val t = p; diff --git a/test/files/run/analyzerPlugins.check b/test/files/run/analyzerPlugins.check index 0788086459..297bd36bae 100644 --- a/test/files/run/analyzerPlugins.check +++ b/test/files/run/analyzerPlugins.check @@ -19,7 +19,7 @@ canAdaptAnnotations(Trees$Typed, Any) [1] canAdaptAnnotations(Trees$Typed, Int) [1] lub(List(Int @testAnn, Int)) [1] pluginsPt(?, Trees$Annotated) [7] -pluginsPt(?, Trees$Apply) [8] +pluginsPt(?, Trees$Apply) [9] pluginsPt(?, Trees$ApplyImplicitView) [2] pluginsPt(?, Trees$Assign) [7] pluginsPt(?, Trees$Block) [4] @@ -31,13 +31,13 @@ pluginsPt(?, Trees$Literal) [16] pluginsPt(?, Trees$New) [5] pluginsPt(?, Trees$PackageDef) [1] pluginsPt(?, Trees$Return) [1] -pluginsPt(?, Trees$Select) [51] +pluginsPt(?, Trees$Select) [52] pluginsPt(?, Trees$Super) [2] pluginsPt(?, Trees$This) [20] -pluginsPt(?, Trees$TypeApply) [3] +pluginsPt(?, Trees$TypeApply) [4] pluginsPt(?, Trees$TypeBoundsTree) [2] pluginsPt(?, Trees$TypeDef) [1] -pluginsPt(?, Trees$TypeTree) [38] +pluginsPt(?, Trees$TypeTree) [39] pluginsPt(?, Trees$Typed) [1] pluginsPt(?, Trees$ValDef) [21] pluginsPt(Any, Trees$Literal) [2] @@ -98,6 +98,7 @@ pluginsTyped(()String, Trees$Ident) [1] pluginsTyped(()String, Trees$TypeApply) [1] pluginsTyped(()scala.annotation.Annotation, Trees$Select) [1] pluginsTyped(()testAnn, Trees$Select) [10] +pluginsTyped(()type, Trees$TypeApply) [1] pluginsTyped((str: String)A (param: Double)A, Trees$Select) [1] pluginsTyped((x$1: Any)Boolean (x: Double)Boolean (x: Float)Boolean (x: Long)Boolean (x: Int)Boolean (x: Char)Boolean (x: Short)Boolean (x: Byte)Boolean, Trees$Select) [1] pluginsTyped((x$1: Int)Unit, Trees$Select) [1] @@ -174,7 +175,7 @@ pluginsTyped(Unit, Trees$Literal) [5] pluginsTyped(Unit, Trees$TypeTree) [1] pluginsTyped([A](xs: A*)List[A], Trees$Select) [1] pluginsTyped([T <: Int]=> Int, Trees$Select) [1] -pluginsTyped([T0]()T0, Trees$Select) [1] +pluginsTyped([T0]()T0, Trees$Select) [2] pluginsTyped([T](xs: Array[T])scala.collection.mutable.WrappedArray[T], Trees$Select) [1] pluginsTyped(annotation.type, Trees$Select) [4] pluginsTyped(math.type, Trees$Select) [9] @@ -193,5 +194,7 @@ pluginsTyped(testAnn, Trees$New) [5] pluginsTyped(testAnn, Trees$This) [1] pluginsTyped(testAnn, Trees$TypeTree) [2] pluginsTyped(testAnn.super.type, Trees$Super) [1] +pluginsTyped(type, Trees$Apply) [1] pluginsTyped(type, Trees$Select) [1] +pluginsTyped(type, Trees$TypeTree) [1] pluginsTypedReturn(return f, String) [1] diff --git a/test/files/run/array-charSeq.scala b/test/files/run/array-charSeq.scala index f7d0586f03..53796bb9d5 100644 --- a/test/files/run/array-charSeq.scala +++ b/test/files/run/array-charSeq.scala @@ -6,6 +6,7 @@ object Test { def check(chars: CharSequence) { println("\n[check '" + chars + "'] len = " + chars.length) chars match { + case x: Predef.ArrayCharSequence => assert(x.__arrayOfChars eq arr, ((x.__arrayOfChars, arr))) case x: runtime.ArrayCharSequence => assert(x.xs eq arr, ((x.xs, arr))) case x => assert(false, x) } diff --git a/test/files/run/arrays.scala b/test/files/run/arrays.scala index ecebc78a6f..c8bf80ea60 100644 --- a/test/files/run/arrays.scala +++ b/test/files/run/arrays.scala @@ -107,7 +107,7 @@ object Test { val s1 = if (test1) "ok" else "KO"; val s2 = actual.toString(); val s3 = expected.toString(); - error(s0 + " - " + s1 + ": " + s2 + " != " + s3); + sys.error(s0 + " - " + s1 + ": " + s2 + " != " + s3); } checks += 1 } diff --git a/test/files/run/exceptions-2.scala b/test/files/run/exceptions-2.scala index d0312a49b2..f5bbcca210 100644 --- a/test/files/run/exceptions-2.scala +++ b/test/files/run/exceptions-2.scala @@ -42,14 +42,14 @@ object NoExcep { def method4 = try { Console.println(".."); } catch { - case _ => error(".."); + case _ => sys.error(".."); } } object Test { def nested1: Unit = try { try { - error("nnnnoooo"); + sys.error("nnnnoooo"); } finally { Console.println("Innermost finally"); } @@ -59,7 +59,7 @@ object Test { def nested2 = try { try { - error("nnnnoooo"); + sys.error("nnnnoooo"); } finally { Console.println("Innermost finally"); } @@ -68,7 +68,7 @@ object Test { Console.println("Outermost finally"); } - def mixed = + def mixed = try { if (10 > 0) throw Leaf(10); @@ -107,7 +107,7 @@ object Test { case Leaf(a) => Console.println(a); } } catch { - case npe: NullPointerException => + case npe: NullPointerException => Console.println("Caught an NPE"); } @@ -134,21 +134,21 @@ object Test { () } finally { try { - error("a"); + sys.error("a"); } catch { case _ => Console.println("Silently ignore exception in finally"); } } } - def valInFinally: Unit = - try { + def valInFinally: Unit = + try { } finally { val fin = "Abc"; Console.println(fin); }; - def tryAndValInFinally: Unit = + def tryAndValInFinally: Unit = try { } finally { val fin = "Abc"; @@ -157,51 +157,51 @@ object Test { } catch { case _ => () } }; - def returnInBody: Unit = try { + def returnInBody: Unit = try { try { Console.println("Normal execution..."); - return + return Console.println("non reachable code"); } finally { Console.println("inner finally"); } - } finally { + } finally { Console.println("Outer finally"); } - def returnInBodySynch: Unit = try { + def returnInBodySynch: Unit = try { synchronized { try { Console.println("Synchronized normal execution..."); - return + return Console.println("non reachable code"); } finally { Console.println("inner finally"); } } - } finally { + } finally { Console.println("Outer finally"); } - def returnInBodyAndInFinally: Unit = try { + def returnInBodyAndInFinally: Unit = try { try { Console.println("Normal execution..."); - return + return Console.println("non reachable code"); } finally { Console.println("inner finally"); return } - } finally { + } finally { Console.println("Outer finally"); return } - def returnInBodyAndInFinally2: Unit = try { + def returnInBodyAndInFinally2: Unit = try { try { Console.println("Normal execution..."); - return + return Console.println("non reachable code"); } finally { try { @@ -211,7 +211,7 @@ object Test { Console.println("finally inside finally"); } } - } finally { + } finally { Console.println("Outer finally"); return } @@ -253,7 +253,7 @@ object Test { } - def returnWithFinallyClean: Int = try { + def returnWithFinallyClean: Int = try { try { Console.println("Normal execution..."); return 10 @@ -262,7 +262,7 @@ object Test { } finally { Console.println("inner finally"); } - } finally { + } finally { Console.println("Outer finally"); try { 1 } catch { case e: java.io.IOException => () } } @@ -294,7 +294,7 @@ object Test { Console.println("mixed: "); execute(mixed); - + Console.println("withValue1:"); execute(withValue1); @@ -322,7 +322,7 @@ object Test { Console.println("NoExcep.method3:"); execute(NoExcep.method3); - + Console.println("NoExcep.method4:"); execute(NoExcep.method4); diff --git a/test/files/run/exceptions.scala b/test/files/run/exceptions.scala index fc3566f85e..90f681e3c5 100644 --- a/test/files/run/exceptions.scala +++ b/test/files/run/exceptions.scala @@ -6,8 +6,8 @@ abstract class IntMap[A] { def lookup(key: Int): A = this match { - case Empty() => error("KO") - case _ => error("ok") + case Empty() => sys.error("KO") + case _ => sys.error("ok") } } diff --git a/test/files/run/exoticnames.scala b/test/files/run/exoticnames.scala index fa0e5e6ec5..98f9a88776 100644 --- a/test/files/run/exoticnames.scala +++ b/test/files/run/exoticnames.scala @@ -1,7 +1,7 @@ // this is a run-test because the compiler should emit bytecode that'll pass the JVM's verifier object Test extends App { - def `(` = error("bla") - def `.` = error("bla") - def `)` = error("bla") - def `,` = error("bla") + def `(` = sys.error("bla") + def `.` = sys.error("bla") + def `)` = sys.error("bla") + def `,` = sys.error("bla") } diff --git a/test/files/run/genericValueClass.scala b/test/files/run/genericValueClass.scala index 68162bb685..768e1f86a5 100644 --- a/test/files/run/genericValueClass.scala +++ b/test/files/run/genericValueClass.scala @@ -1,11 +1,12 @@ -final class ArrowAssoc[A](val __leftOfArrow: A) extends AnyVal { - @inline def -> [B](y: B): Tuple2[A, B] = Tuple2(__leftOfArrow, y) - def →[B](y: B): Tuple2[A, B] = ->(y) -} object Test extends App { + class ArrowAssocClass[A](val __leftOfArrow: A) extends AnyVal { + @inline def -> [B](y: B): Tuple2[A, B] = Tuple2(__leftOfArrow, y) + def →[B](y: B): Tuple2[A, B] = ->(y) + } + { - @inline implicit def any2ArrowAssoc[A](x: A): ArrowAssoc[A] = new ArrowAssoc(x) + @inline implicit def ArrowAssoc[A](x: A): ArrowAssocClass[A] = new ArrowAssocClass(x) val x = 1 -> "abc" println(x) } diff --git a/test/files/run/macro-typecheck-implicitsdisabled.check b/test/files/run/macro-typecheck-implicitsdisabled.check index c4fa2c5c28..91d8fabd72 100644 --- a/test/files/run/macro-typecheck-implicitsdisabled.check +++ b/test/files/run/macro-typecheck-implicitsdisabled.check @@ -1,2 +1,2 @@ -scala.this.Predef.any2ArrowAssoc[Int](1).->[Int](2) +scala.this.Predef.ArrowAssoc[Int](1).->[Int](2) scala.reflect.macros.TypecheckException: value -> is not a member of Int diff --git a/test/files/run/runtime.scala b/test/files/run/runtime.scala index 2dcb41fb50..a2ac204e8a 100644 --- a/test/files/run/runtime.scala +++ b/test/files/run/runtime.scala @@ -125,7 +125,7 @@ object Test2Test { object Test3Test { - class Foo { override def equals(that: Any) = error("abort"); } + class Foo { override def equals(that: Any) = sys.error("abort"); } def check(expected: Boolean, actual1: Boolean, actual2: Boolean): Unit = Console.println( diff --git a/test/files/run/t1042.scala b/test/files/run/t1042.scala index 1f39fff24a..302ff31053 100644 --- a/test/files/run/t1042.scala +++ b/test/files/run/t1042.scala @@ -6,7 +6,7 @@ abstract class A { case class B() extends A { // overloaded version is implemented, causing toString not to be implemented? - def toString(sb: StringBuilder): StringBuilder = error("") + def toString(sb: StringBuilder): StringBuilder = sys.error("") } object Test extends App { diff --git a/test/files/run/tailcalls.scala b/test/files/run/tailcalls.scala index 04a1a8ba19..7d06a7e69d 100644 --- a/test/files/run/tailcalls.scala +++ b/test/files/run/tailcalls.scala @@ -194,10 +194,10 @@ object FancyTailCalls { } object PolyObject extends App { - def tramp[A](x: Int): Int = + def tramp[A](x: Int): Int = if (x > 0) tramp[A](x - 1) - else + else 0 } @@ -233,7 +233,7 @@ class NonTailCall { if (n == 0) 0 else f2(n - 1) } - + } //############################################################################ @@ -273,7 +273,7 @@ object Test { } println } - + def check_overflow(name: String, closure: => Int) { print("test " + name) try { @@ -295,7 +295,7 @@ object Test { while (!stop) { try { calibrator.f(n, n); - if (n >= Int.MaxValue / 2) error("calibration failure"); + if (n >= Int.MaxValue / 2) sys.error("calibration failure"); n = 2 * n; } catch { case exception: compat.Platform.StackOverflowError => stop = true @@ -367,7 +367,7 @@ object Test { check_success("TailCall.g3", TailCall.g3(max, max, Nil), 0) check_success("TailCall.h1", TailCall.h1(max, max ), 0) println - + val NonTailCall = new NonTailCall check_success("NonTailCall.f1", NonTailCall.f1(2), 0) check_overflow("NonTailCall.f2", NonTailCall.f2(max)) @@ -382,17 +382,17 @@ object Test { } // testing explicit tailcalls. - + import scala.util.control.TailCalls._ def isEven(xs: List[Int]): TailRec[Boolean] = if (xs.isEmpty) done(true) else tailcall(isOdd(xs.tail)) def isOdd(xs: List[Int]): TailRec[Boolean] = - if (xs.isEmpty) done(false) else tailcall(isEven(xs.tail)) + if (xs.isEmpty) done(false) else tailcall(isEven(xs.tail)) assert(isEven((1 to 100000).toList).result) - + } //############################################################################ diff --git a/test/files/run/toolbox_typecheck_implicitsdisabled.check b/test/files/run/toolbox_typecheck_implicitsdisabled.check index db64e118ca..009ba651fe 100644 --- a/test/files/run/toolbox_typecheck_implicitsdisabled.check +++ b/test/files/run/toolbox_typecheck_implicitsdisabled.check @@ -1,5 +1,5 @@ { import scala.Predef._; - scala.Predef.any2ArrowAssoc[Int](1).->[Int](2) + scala.Predef.ArrowAssoc[Int](1).->[Int](2) } scala.tools.reflect.ToolBoxError: reflective typecheck has failed: value -> is not a member of Int diff --git a/test/files/run/try-2.scala b/test/files/run/try-2.scala index 677f0b48eb..da321f2668 100644 --- a/test/files/run/try-2.scala +++ b/test/files/run/try-2.scala @@ -7,7 +7,7 @@ object Test { - def tryAllUnit: Unit = + def tryAllUnit: Unit = try { throw new Error(); } @@ -15,28 +15,28 @@ object Test { case _ => Console.println("exception happened\n"); } - def tryUnitAll: Unit = + def tryUnitAll: Unit = try { Console.println("Nothin"); } catch { - case _ => error("Bad, bad, lama!"); + case _ => sys.error("Bad, bad, lama!"); } - def tryAllAll: Unit = + def tryAllAll: Unit = try { throw new Error(); } catch { - case _ => error("Bad, bad, lama!"); + case _ => sys.error("Bad, bad, lama!"); } - def tryUnitUnit: Unit = + def tryUnitUnit: Unit = try { Console.println("Nothin"); } catch { case _ => Console.println("Nothin"); } - def tryIntUnit: Unit = + def tryIntUnit: Unit = try { 10; } catch { @@ -55,7 +55,7 @@ object Test { execute(tryAllUnit); execute(tryUnitAll); execute(tryAllAll); - execute(tryUnitUnit); + execute(tryUnitUnit); execute(tryIntUnit); } } diff --git a/test/files/run/try.scala b/test/files/run/try.scala index ad3d606246..e393c0b4b1 100644 --- a/test/files/run/try.scala +++ b/test/files/run/try.scala @@ -17,8 +17,8 @@ object Test extends AnyRef with App { Console.println( (try { x } catch { case _: Error => 1; - }) - + + }) + + (try { x } catch { case _: Error => 1; }) @@ -61,13 +61,13 @@ object Test extends AnyRef with App { Console.print("1 + 1 = "); try { if (true) - error("exit"); + sys.error("exit"); 1+1; () } catch { case _ => Console.println("2"); - error("for good"); + sys.error("for good"); } Console.println("a"); } catch { @@ -116,7 +116,7 @@ object Test extends AnyRef with App { } */ - + try1; try2; try3; diff --git a/test/files/run/verify-ctor.scala b/test/files/run/verify-ctor.scala index 17e4f71be5..528d038a8e 100644 --- a/test/files/run/verify-ctor.scala +++ b/test/files/run/verify-ctor.scala @@ -1,6 +1,6 @@ class Foo(val str: String) { def this(arr: Array[Char]) = this({ - if (arr.length == 0) exit(1) + if (arr.length == 0) sys.exit(1) new String(arr) }) } diff --git a/test/files/scalacheck/CheckEither.scala b/test/files/scalacheck/CheckEither.scala index 4e8480d72e..4d0cab4693 100644 --- a/test/files/scalacheck/CheckEither.scala +++ b/test/files/scalacheck/CheckEither.scala @@ -8,18 +8,18 @@ import org.scalacheck.ConsoleReporter.testStatsEx import Function.tupled object Test extends Properties("Either") { - implicit def arbitraryEither[X, Y](implicit xa: Arbitrary[X], ya: Arbitrary[Y]): Arbitrary[Either[X, Y]] = + implicit def arbitraryEither[X, Y](implicit xa: Arbitrary[X], ya: Arbitrary[Y]): Arbitrary[Either[X, Y]] = Arbitrary[Either[X, Y]](oneOf(arbitrary[X].map(Left(_)), arbitrary[Y].map(Right(_)))) - val prop_either1 = forAll((n: Int) => Left(n).fold(x => x, b => error("fail")) == n) + val prop_either1 = forAll((n: Int) => Left(n).fold(x => x, b => sys.error("fail")) == n) - val prop_either2 = forAll((n: Int) => Right(n).fold(a => error("fail"), x => x) == n) + val prop_either2 = forAll((n: Int) => Right(n).fold(a => sys.error("fail"), x => x) == n) val prop_swap = forAll((e: Either[Int, Int]) => e match { case Left(a) => e.swap.right.get == a case Right(b) => e.swap.left.get == b }) - + val prop_isLeftRight = forAll((e: Either[Int, Int]) => e.isLeft != e.isRight) object CheckLeftProjection { @@ -35,7 +35,7 @@ object Test extends Properties("Either") { val prop_exists = forAll((e: Either[Int, Int]) => e.left.exists(_ % 2 == 0) == (e.isLeft && e.left.get % 2 == 0)) - + val prop_flatMapLeftIdentity = forAll((e: Either[Int, Int], n: Int, s: String) => { def f(x: Int) = if(x % 2 == 0) Left(s) else Right(s) Left(n).left.flatMap(f(_)) == f(n)}) @@ -115,7 +115,7 @@ object Test extends Properties("Either") { } val prop_Either_left = forAll((n: Int) => Left(n).left.get == n) - + val prop_Either_right = forAll((n: Int) => Right(n).right.get == n) val prop_Either_joinLeft = forAll((e: Either[Either[Int, Int], Int]) => e match { @@ -128,12 +128,12 @@ object Test extends Properties("Either") { case Right(ee) => e.joinRight == ee }) - val prop_Either_reduce = forAll((e: Either[Int, Int]) => + val prop_Either_reduce = forAll((e: Either[Int, Int]) => e.merge == (e match { case Left(a) => a case Right(a) => a })) - + /** Hard to believe I'm "fixing" a test to reflect B before A ... */ val prop_Either_cond = forAll((c: Boolean, a: Int, b: Int) => Either.cond(c, a, b) == (if(c) Right(a) else Left(b))) @@ -168,19 +168,19 @@ object Test extends Properties("Either") { ("Right.prop_seq", CheckRightProjection.prop_seq), ("Right.prop_option", CheckRightProjection.prop_option), ("prop_Either_left", prop_Either_left), - ("prop_Either_right", prop_Either_right), + ("prop_Either_right", prop_Either_right), ("prop_Either_joinLeft", prop_Either_joinLeft), - ("prop_Either_joinRight", prop_Either_joinRight), - ("prop_Either_reduce", prop_Either_reduce), + ("prop_Either_joinRight", prop_Either_joinRight), + ("prop_Either_reduce", prop_Either_reduce), ("prop_Either_cond", prop_Either_cond) ) - + for ((label, prop) <- tests) { property(label) = prop } - + import org.scalacheck.{ Test => STest } - + def runTests() = { STest.checkProperties(STest.Params(testCallback = ConsoleReporter(0)), this) } diff --git a/test/scaladoc/resources/SI_4715.scala b/test/scaladoc/resources/SI_4715.scala index 29daf43717..de286956bc 100644 --- a/test/scaladoc/resources/SI_4715.scala +++ b/test/scaladoc/resources/SI_4715.scala @@ -1,7 +1,7 @@ class SI_4715 { type :+:[X,Y] = Map[X,Y] - val withType: Int :+: Double = error("") + val withType: Int :+: Double = sys.error("") trait :-:[X,Y] - val withTrait: Int :-: Double = error("") + val withTrait: Int :-: Double = sys.error("") } -- cgit v1.2.3 From c26cc531f655cfa5b27ffb8ab25adc7ffb97aa71 Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Tue, 12 Feb 2013 11:05:14 -0800 Subject: SI-6355, weakend implementation restriction on applyDynamic. I realized one can successfully call an overloaded applyDynamic, under conditions such as these: def applyDynamic[T1](m: String)(x1: T1): Any = 1 def applyDynamic[T1, T2](m: String)(x: T1, y: T2): Any = 2 def applyDynamic[T1, T2, T3](m: String)(x: T1, y: T2, z: T3): Any = 3 So I weakened the overloading restriction to allow overloading if each method has a distinct number of type parameters. This very likely still allows the creation of uncallable overloads, but an overly restrictive rule is worse. If the overload cannot be called, it will still be discovered at the call site. --- src/compiler/scala/tools/nsc/typechecker/RefChecks.scala | 11 +++++------ test/files/neg/t6355.check | 7 +++++-- test/files/neg/t6355.scala | 6 ++++++ test/files/pos/t6355pos.scala | 16 ++++++++++++++++ 4 files changed, 32 insertions(+), 8 deletions(-) create mode 100644 test/files/pos/t6355pos.scala (limited to 'src') diff --git a/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala b/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala index 7c60ce275a..60a73036f8 100644 --- a/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala +++ b/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala @@ -136,9 +136,8 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans // Check for doomed attempt to overload applyDynamic if (clazz isSubClass DynamicClass) { - clazz.info member nme.applyDynamic match { - case sym if sym.isOverloaded => unit.error(sym.pos, "implementation restriction: applyDynamic cannot be overloaded") - case _ => + for ((_, m1 :: m2 :: _) <- (clazz.info member nme.applyDynamic).alternatives groupBy (_.typeParams.length)) { + unit.error(m1.pos, "implementation restriction: applyDynamic cannot be overloaded except by methods with different numbers of type parameters, e.g. applyDynamic[T1](method: String)(arg: T1) and applyDynamic[T1, T2](method: String)(arg1: T1, arg2: T2)") } } @@ -1237,12 +1236,12 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans */ private def checkMigration(sym: Symbol, pos: Position) = { if (sym.hasMigrationAnnotation) { - val changed = try + val changed = try settings.Xmigration.value < ScalaVersion(sym.migrationVersion.get) catch { - case e : NumberFormatException => + case e : NumberFormatException => unit.warning(pos, s"${sym.fullLocationString} has an unparsable version number: ${e.getMessage()}") - // if we can't parse the format on the migration annotation just conservatively assume it changed + // if we can't parse the format on the migration annotation just conservatively assume it changed true } if (changed) diff --git a/test/files/neg/t6355.check b/test/files/neg/t6355.check index c1fa147f52..607829d99a 100644 --- a/test/files/neg/t6355.check +++ b/test/files/neg/t6355.check @@ -1,4 +1,7 @@ -t6355.scala:12: error: implementation restriction: applyDynamic cannot be overloaded +t6355.scala:12: error: implementation restriction: applyDynamic cannot be overloaded except by methods with different numbers of type parameters, e.g. applyDynamic[T1](method: String)(arg: T1) and applyDynamic[T1, T2](method: String)(arg1: T1, arg2: T2) def applyDynamic(name: String)(x: Int): Int = 2 ^ -one error found +t6355.scala:18: error: implementation restriction: applyDynamic cannot be overloaded except by methods with different numbers of type parameters, e.g. applyDynamic[T1](method: String)(arg: T1) and applyDynamic[T1, T2](method: String)(arg1: T1, arg2: T2) + def applyDynamic[T1, T2](name: String)(x: String, y: T1, z: T2): Int = 3 + ^ +two errors found diff --git a/test/files/neg/t6355.scala b/test/files/neg/t6355.scala index 3007dc49f6..0500ed04c6 100644 --- a/test/files/neg/t6355.scala +++ b/test/files/neg/t6355.scala @@ -11,3 +11,9 @@ class A extends Dynamic { def applyDynamic(name: String)(s: String): Int = 1 def applyDynamic(name: String)(x: Int): Int = 2 } + +class B extends Dynamic { + def applyDynamic[T1](name: String)(x: T1): Int = 1 + def applyDynamic[T1, T2](name: String)(x: T1, y: T2): Int = 2 + def applyDynamic[T1, T2](name: String)(x: String, y: T1, z: T2): Int = 3 +} diff --git a/test/files/pos/t6355pos.scala b/test/files/pos/t6355pos.scala new file mode 100644 index 0000000000..c0e740dd68 --- /dev/null +++ b/test/files/pos/t6355pos.scala @@ -0,0 +1,16 @@ +import scala.language.dynamics + +class A extends Dynamic { + def applyDynamic[T1](method: String)(x1: T1): Any = 1 + def applyDynamic[T1, T2](method: String)(x: T1, y: T2): Any = 2 + def applyDynamic[T1, T2, T3](method: String)(x: T1, y: T2, z: T3): Any = 3 +} + +object Test { + def main(args: Array[String]): Unit = { + val x = new A + println(x[Int](5)) + println(x[Int, String](5, "a")) + println(x[Int, String, Int](5, "a", 5)) + } +} -- cgit v1.2.3 From a0b1db4ce72e2f449de9ce2da2b6b0958bc33579 Mon Sep 17 00:00:00 2001 From: James Iry Date: Mon, 11 Feb 2013 12:44:21 -0800 Subject: SI-6642 Code cleanup on RedBlackTree#TreeIterator In anticipation of some work needed to implement iteratorFrom, this commit does some variable renaming and general code clean up on RedBlackTree's TreeIterator. --- .../scala/collection/immutable/RedBlackTree.scala | 45 ++++++++++++---------- 1 file changed, 24 insertions(+), 21 deletions(-) (limited to 'src') diff --git a/src/library/scala/collection/immutable/RedBlackTree.scala b/src/library/scala/collection/immutable/RedBlackTree.scala index 99f8d95517..004c0ae8c6 100644 --- a/src/library/scala/collection/immutable/RedBlackTree.scala +++ b/src/library/scala/collection/immutable/RedBlackTree.scala @@ -425,32 +425,28 @@ object RedBlackTree { def unapply[A, B](t: BlackTree[A, B]) = Some((t.key, t.value, t.left, t.right)) } - private[this] abstract class TreeIterator[A, B, R](tree: Tree[A, B]) extends Iterator[R] { + private[this] abstract class TreeIterator[A, B, R](root: Tree[A, B]) extends Iterator[R] { protected[this] def nextResult(tree: Tree[A, B]): R - override def hasNext: Boolean = next ne null + override def hasNext: Boolean = lookahead ne null - override def next: R = next match { + override def next: R = lookahead match { case null => throw new NoSuchElementException("next on empty iterator") case tree => - next = findNext(tree.right) + lookahead = findLeftMostOrPopOnEmpty(goRight(tree)) nextResult(tree) } @tailrec - private[this] def findNext(tree: Tree[A, B]): Tree[A, B] = { - if (tree eq null) popPath() + private[this] def findLeftMostOrPopOnEmpty(tree: Tree[A, B]): Tree[A, B] = + if (tree eq null) popNext() else if (tree.left eq null) tree - else { - pushPath(tree) - findNext(tree.left) - } - } + else findLeftMostOrPopOnEmpty(goLeft(tree)) - private[this] def pushPath(tree: Tree[A, B]) { + private[this] def pushNext(tree: Tree[A, B]) { try { - path(index) = tree + stackOfNexts(index) = tree index += 1 } catch { case _: ArrayIndexOutOfBoundsException => @@ -462,17 +458,17 @@ object RedBlackTree { * An exception handler is used instead of an if-condition to optimize the normal path. * This makes a large difference in iteration speed! */ - assert(index >= path.length) - path :+= null - pushPath(tree) + assert(index >= stackOfNexts.length) + stackOfNexts :+= null + pushNext(tree) } } - private[this] def popPath(): Tree[A, B] = if (index == 0) null else { + private[this] def popNext(): Tree[A, B] = if (index == 0) null else { index -= 1 - path(index) + stackOfNexts(index) } - private[this] var path = if (tree eq null) null else { + private[this] var stackOfNexts = if (root eq null) null else { /* * According to "Ralf Hinze. Constructing red-black trees" [http://www.cs.ox.ac.uk/ralf.hinze/publications/#P5] * the maximum height of a red-black tree is 2*log_2(n + 2) - 2. @@ -481,11 +477,18 @@ object RedBlackTree { * * We also don't store the deepest nodes in the path so the maximum path length is further reduced by one. */ - val maximumHeight = 2 * (32 - Integer.numberOfLeadingZeros(tree.count + 2 - 1)) - 2 - 1 + val maximumHeight = 2 * (32 - Integer.numberOfLeadingZeros(root.count + 2 - 1)) - 2 - 1 new Array[Tree[A, B]](maximumHeight) } private[this] var index = 0 - private[this] var next: Tree[A, B] = findNext(tree) + private[this] var lookahead: Tree[A, B] = findLeftMostOrPopOnEmpty(root) + + private[this] def goLeft(tree: Tree[A, B]) = { + pushNext(tree) + tree.left + } + + private[this] def goRight(tree: Tree[A, B]) = tree.right } private[this] class EntriesIterator[A, B](tree: Tree[A, B]) extends TreeIterator[A, B, (A, B)](tree) { -- cgit v1.2.3 From 62bc99d3b20a7b37a977b19a6202cdac474eb5f6 Mon Sep 17 00:00:00 2001 From: James Iry Date: Mon, 11 Feb 2013 12:55:06 -0800 Subject: SI-6642 Adds iteratorFrom, keysIteratorFrom, and valuesIteratorFrom Adds the ability to efficiently create an iterator that starts at a given key or element of a sorted set or map. Similar work is done for key and value only iterators on maps. The bulk of the work is in RedBlackTree. Most of the rest is pushing the new api methods throughout the appropriate spots in the collection API. This commit leaves undone some similar work possible on mutable TreeSets --- src/library/scala/Enumeration.scala | 1 + src/library/scala/collection/BitSetLike.scala | 6 +- src/library/scala/collection/SortedMapLike.scala | 29 +++++++++ src/library/scala/collection/SortedSetLike.scala | 10 ++++ src/library/scala/collection/generic/Sorted.scala | 12 ++++ .../scala/collection/immutable/RedBlackTree.scala | 32 +++++++--- .../scala/collection/immutable/SortedMap.scala | 6 ++ .../scala/collection/immutable/TreeMap.scala | 4 ++ .../scala/collection/immutable/TreeSet.scala | 1 + src/library/scala/collection/mutable/TreeSet.scala | 17 ++++++ test/files/run/iterator-from.scala | 69 ++++++++++++++++++++++ 11 files changed, 177 insertions(+), 10 deletions(-) create mode 100644 test/files/run/iterator-from.scala (limited to 'src') diff --git a/src/library/scala/Enumeration.scala b/src/library/scala/Enumeration.scala index 21f0c8fd3e..e7ce21b229 100644 --- a/src/library/scala/Enumeration.scala +++ b/src/library/scala/Enumeration.scala @@ -255,6 +255,7 @@ abstract class Enumeration (initial: Int) extends Serializable { def + (value: Value) = new ValueSet(nnIds + (value.id - bottomId)) def - (value: Value) = new ValueSet(nnIds - (value.id - bottomId)) def iterator = nnIds.iterator map (id => thisenum.apply(id + bottomId)) + override def keysIteratorFrom(start: Value) = nnIds keysIteratorFrom start.id map (id => thisenum.apply(id + bottomId)) override def stringPrefix = thisenum + ".ValueSet" /** Creates a bit mask for the zero-adjusted ids in this set as a * new array of longs */ diff --git a/src/library/scala/collection/BitSetLike.scala b/src/library/scala/collection/BitSetLike.scala index d0f4e323c7..bf05331cb1 100644 --- a/src/library/scala/collection/BitSetLike.scala +++ b/src/library/scala/collection/BitSetLike.scala @@ -98,8 +98,10 @@ trait BitSetLike[+This <: BitSetLike[This] with SortedSet[Int]] extends SortedSe fromBitMaskNoCopy(a) } - def iterator: Iterator[Int] = new AbstractIterator[Int] { - private var current = 0 + def iterator: Iterator[Int] = iteratorFrom(0) + + override def keysIteratorFrom(start: Int) = new AbstractIterator[Int] { + private var current = start private val end = nwords * WordLength def hasNext: Boolean = { while (current < end && !self.contains(current)) current += 1 diff --git a/src/library/scala/collection/SortedMapLike.scala b/src/library/scala/collection/SortedMapLike.scala index 57ad3497c7..3c3e6095df 100644 --- a/src/library/scala/collection/SortedMapLike.scala +++ b/src/library/scala/collection/SortedMapLike.scala @@ -42,6 +42,7 @@ self => val map = self.rangeImpl(from, until) new map.DefaultKeySortedSet } + override def keysIteratorFrom(start: A) = self.keysIteratorFrom(start) } /** Add a key/value pair to this map. @@ -76,11 +77,17 @@ self => override def filterKeys(p: A => Boolean): SortedMap[A, B] = new FilteredKeys(p) with SortedMap.Default[A, B] { implicit def ordering: Ordering[A] = self.ordering override def rangeImpl(from : Option[A], until : Option[A]): SortedMap[A, B] = self.rangeImpl(from, until).filterKeys(p) + override def iteratorFrom(start: A) = self iteratorFrom start filter {case (k, _) => p(k)} + override def keysIteratorFrom(start: A) = self keysIteratorFrom start filter p + override def valuesIteratorFrom(start: A) = self iteratorFrom start collect {case (k,v) if p(k) => v} } override def mapValues[C](f: B => C): SortedMap[A, C] = new MappedValues(f) with SortedMap.Default[A, C] { implicit def ordering: Ordering[A] = self.ordering override def rangeImpl(from : Option[A], until : Option[A]): SortedMap[A, C] = self.rangeImpl(from, until).mapValues(f) + override def iteratorFrom(start: A) = (self iteratorFrom start) map {case (k,v) => (k, f(v))} + override def keysIteratorFrom(start: A) = self keysIteratorFrom start + override def valuesIteratorFrom(start: A) = self valuesIteratorFrom start map f } /** Adds a number of elements provided by a traversable object @@ -91,6 +98,28 @@ self => override def ++[B1 >: B](xs: GenTraversableOnce[(A, B1)]): SortedMap[A, B1] = ((repr: SortedMap[A, B1]) /: xs.seq) (_ + _) + /** + * Creates an iterator over all the key/value pairs + * contained in this map having a key greater than or + * equal to `start` according to the ordering of + * this map. x.iteratorFrom(y) is equivalent + * to but often more efficient than x.from(y).iterator. + * + * @param start The lower bound (inclusive) + * on the keys to be returned + */ + def iteratorFrom(start: A): Iterator[(A, B)] + /** + * Creates an iterator over all the values contained in this + * map that are associated with a key greater than or equal to `start` + * according to the ordering of this map. x.valuesIteratorFrom(y) is + * equivalent to but often more efficient than + * x.from(y).valuesIterator. + * + * @param start The lower bound (inclusive) + * on the keys to be returned + */ + def valuesIteratorFrom(start: A): Iterator[B] } diff --git a/src/library/scala/collection/SortedSetLike.scala b/src/library/scala/collection/SortedSetLike.scala index 71b45c72ff..6d1d1ac111 100644 --- a/src/library/scala/collection/SortedSetLike.scala +++ b/src/library/scala/collection/SortedSetLike.scala @@ -40,4 +40,14 @@ self => case that: SortedSet[_] if that.ordering == ordering => that.hasAll(this.iterator) case that => super.subsetOf(that) } + + /** + * Creates an iterator that contains all values from this collection + * greater than or equal to `start` according to the ordering of + * this collection. x.iteratorFrom(y) is equivalent to but will usually + * be more efficient than x.from(y).iterator + * + * @param start The lower-bound (inclusive) of the iterator + */ + def iteratorFrom(start: A): Iterator[A] = keysIteratorFrom(start) } diff --git a/src/library/scala/collection/generic/Sorted.scala b/src/library/scala/collection/generic/Sorted.scala index f962b26bd3..b3847fffc9 100644 --- a/src/library/scala/collection/generic/Sorted.scala +++ b/src/library/scala/collection/generic/Sorted.scala @@ -78,6 +78,18 @@ trait Sorted[K, +This <: Sorted[K, This]] { else until(next) } + + /** + * Creates an iterator over all the keys(or elements) contained in this + * collection greater than or equal to `start` + * according to the ordering of this collection. x.keysIteratorFrom(y) + * is equivalent to but often more efficient than + * x.from(y).keysIterator. + * + * @param start The lower bound (inclusive) + * on the keys to be returned + */ + def keysIteratorFrom(start: K): Iterator[K] protected def hasAll(j: Iterator[K]): Boolean = { val i = keySet.iterator diff --git a/src/library/scala/collection/immutable/RedBlackTree.scala b/src/library/scala/collection/immutable/RedBlackTree.scala index 004c0ae8c6..c0d46765be 100644 --- a/src/library/scala/collection/immutable/RedBlackTree.scala +++ b/src/library/scala/collection/immutable/RedBlackTree.scala @@ -91,9 +91,9 @@ object RedBlackTree { if (tree.right ne null) _foreachKey(tree.right, f) } - def iterator[A, B](tree: Tree[A, B]): Iterator[(A, B)] = new EntriesIterator(tree) - def keysIterator[A, _](tree: Tree[A, _]): Iterator[A] = new KeysIterator(tree) - def valuesIterator[_, B](tree: Tree[_, B]): Iterator[B] = new ValuesIterator(tree) + def iterator[A, B](tree: Tree[A, B], start: Option[A] = None)(implicit ordering: Ordering[A]): Iterator[(A, B)] = new EntriesIterator(tree, start) + def keysIterator[A, _](tree: Tree[A, _], start: Option[A] = None)(implicit ordering: Ordering[A]): Iterator[A] = new KeysIterator(tree, start) + def valuesIterator[A, B](tree: Tree[A, B], start: Option[A] = None)(implicit ordering: Ordering[A]): Iterator[B] = new ValuesIterator(tree, start) @tailrec def nth[A, B](tree: Tree[A, B], n: Int): Tree[A, B] = { @@ -425,7 +425,7 @@ object RedBlackTree { def unapply[A, B](t: BlackTree[A, B]) = Some((t.key, t.value, t.left, t.right)) } - private[this] abstract class TreeIterator[A, B, R](root: Tree[A, B]) extends Iterator[R] { + private[this] abstract class TreeIterator[A, B, R](root: Tree[A, B], start: Option[A])(implicit ordering: Ordering[A]) extends Iterator[R] { protected[this] def nextResult(tree: Tree[A, B]): R override def hasNext: Boolean = lookahead ne null @@ -481,7 +481,23 @@ object RedBlackTree { new Array[Tree[A, B]](maximumHeight) } private[this] var index = 0 - private[this] var lookahead: Tree[A, B] = findLeftMostOrPopOnEmpty(root) + private[this] var lookahead: Tree[A, B] = start map startFrom getOrElse findLeftMostOrPopOnEmpty(root) + + /** + * Find the leftmost subtree whose key is equal to the given key, or if no such thing, + * the leftmost subtree with the key that would be "next" after it according + * to the ordering. Along the way build up the iterator's path stack so that "next" + * functionality works. + */ + private[this] def startFrom(key: A) : Tree[A,B] = if (root eq null) null else { + @tailrec def find(tree: Tree[A, B]): Tree[A, B] = + if (tree == null) popNext + else find( + if (ordering.lteq(key, tree.key)) goLeft(tree) + else goRight(tree) + ) + find(root) + } private[this] def goLeft(tree: Tree[A, B]) = { pushNext(tree) @@ -491,15 +507,15 @@ object RedBlackTree { private[this] def goRight(tree: Tree[A, B]) = tree.right } - private[this] class EntriesIterator[A, B](tree: Tree[A, B]) extends TreeIterator[A, B, (A, B)](tree) { + private[this] class EntriesIterator[A, B](tree: Tree[A, B], focus: Option[A])(implicit ordering: Ordering[A]) extends TreeIterator[A, B, (A, B)](tree, focus) { override def nextResult(tree: Tree[A, B]) = (tree.key, tree.value) } - private[this] class KeysIterator[A, B](tree: Tree[A, B]) extends TreeIterator[A, B, A](tree) { + private[this] class KeysIterator[A, B](tree: Tree[A, B], focus: Option[A])(implicit ordering: Ordering[A]) extends TreeIterator[A, B, A](tree, focus) { override def nextResult(tree: Tree[A, B]) = tree.key } - private[this] class ValuesIterator[A, B](tree: Tree[A, B]) extends TreeIterator[A, B, B](tree) { + private[this] class ValuesIterator[A, B](tree: Tree[A, B], focus: Option[A])(implicit ordering: Ordering[A]) extends TreeIterator[A, B, B](tree, focus) { override def nextResult(tree: Tree[A, B]) = tree.value } } diff --git a/src/library/scala/collection/immutable/SortedMap.scala b/src/library/scala/collection/immutable/SortedMap.scala index eb04231c55..5e833f87af 100644 --- a/src/library/scala/collection/immutable/SortedMap.scala +++ b/src/library/scala/collection/immutable/SortedMap.scala @@ -82,11 +82,17 @@ self => override def filterKeys(p: A => Boolean): SortedMap[A, B] = new FilteredKeys(p) with SortedMap.Default[A, B] { implicit def ordering: Ordering[A] = self.ordering override def rangeImpl(from : Option[A], until : Option[A]): SortedMap[A, B] = self.rangeImpl(from, until).filterKeys(p) + override def iteratorFrom(start: A) = self iteratorFrom start filter {case (k, _) => p(k)} + override def keysIteratorFrom(start : A) = self keysIteratorFrom start filter p + override def valuesIteratorFrom(start : A) = self iteratorFrom start collect {case (k,v) if p(k) => v} } override def mapValues[C](f: B => C): SortedMap[A, C] = new MappedValues(f) with SortedMap.Default[A, C] { implicit def ordering: Ordering[A] = self.ordering override def rangeImpl(from : Option[A], until : Option[A]): SortedMap[A, C] = self.rangeImpl(from, until).mapValues(f) + override def iteratorFrom(start: A) = self iteratorFrom start map {case (k, v) => (k, f(v))} + override def keysIteratorFrom(start : A) = self keysIteratorFrom start + override def valuesIteratorFrom(start : A) = self valuesIteratorFrom start map f } } diff --git a/src/library/scala/collection/immutable/TreeMap.scala b/src/library/scala/collection/immutable/TreeMap.scala index 9a87d8636b..a6a6b75c32 100644 --- a/src/library/scala/collection/immutable/TreeMap.scala +++ b/src/library/scala/collection/immutable/TreeMap.scala @@ -189,9 +189,13 @@ class TreeMap[A, +B] private (tree: RB.Tree[A, B])(implicit val ordering: Orderi * @return the new iterator */ override def iterator: Iterator[(A, B)] = RB.iterator(tree) + override def iteratorFrom(start: A): Iterator[(A, B)] = RB.iterator(tree, Some(start)) override def keysIterator: Iterator[A] = RB.keysIterator(tree) + override def keysIteratorFrom(start: A): Iterator[A] = RB.keysIterator(tree, Some(start)) + override def valuesIterator: Iterator[B] = RB.valuesIterator(tree) + override def valuesIteratorFrom(start: A): Iterator[B] = RB.valuesIterator(tree, Some(start)) override def contains(key: A): Boolean = RB.contains(tree, key) override def isDefinedAt(key: A): Boolean = RB.contains(tree, key) diff --git a/src/library/scala/collection/immutable/TreeSet.scala b/src/library/scala/collection/immutable/TreeSet.scala index 8bceb936aa..67668b3bef 100644 --- a/src/library/scala/collection/immutable/TreeSet.scala +++ b/src/library/scala/collection/immutable/TreeSet.scala @@ -144,6 +144,7 @@ class TreeSet[A] private (tree: RB.Tree[A, Unit])(implicit val ordering: Orderin * @return the new iterator */ def iterator: Iterator[A] = RB.keysIterator(tree) + override def keysIteratorFrom(start: A): Iterator[A] = RB.keysIterator(tree, Some(start)) override def foreach[U](f: A => U) = RB.foreachKey(tree, f) diff --git a/src/library/scala/collection/mutable/TreeSet.scala b/src/library/scala/collection/mutable/TreeSet.scala index 5197af1b04..4fd35658fa 100644 --- a/src/library/scala/collection/mutable/TreeSet.scala +++ b/src/library/scala/collection/mutable/TreeSet.scala @@ -116,8 +116,25 @@ class TreeSet[A](implicit val ordering: Ordering[A]) extends SortedSet[A] with S resolve.avl.contains(elem, ordering) } + // TODO see the discussion on keysIteratorFrom override def iterator: Iterator[A] = resolve.avl.iterator .dropWhile(e => !isLeftAcceptable(from, ordering)(e)) .takeWhile(e => isRightAcceptable(until, ordering)(e)) + + // TODO because TreeSets are potentially ranged views into other TreeSets + // what this really needs to do is walk the whole stack of tree sets, find + // the highest "from", and then do a tree walk of the underlying avl tree + // to find that spot in max(O(stack depth), O(log tree.size)) time which + // should effectively be O(log size) since ranged views are rare and + // even more rarely deep. With the following implementation it's + // O(N log N) to get an iterator from a start point. + // But before engaging that endeavor I think mutable.TreeSet should be + // based on the same immutable RedBlackTree that immutable.TreeSet is + // based on. There's no good reason to have these two collections based + // on two different balanced binary trees. That'll save + // having to duplicate logic for finding the starting point of a + // sorted binary tree iterator, logic that has already been + // baked into RedBlackTree. + override def keysIteratorFrom(start: A) = from(start).iterator } diff --git a/test/files/run/iterator-from.scala b/test/files/run/iterator-from.scala new file mode 100644 index 0000000000..8dc6ae4e51 --- /dev/null +++ b/test/files/run/iterator-from.scala @@ -0,0 +1,69 @@ +// This file tests iteratorFrom, keysIteratorFrom, and valueIteratorFrom on various sorted sets and maps + +import scala.util.{Random => R} +import scala.collection._ +import scala.math.Ordered + +object Test extends App { + val maxLength = 25 + val maxKey = 50 + val maxValue = 50 + + def testSet[A <% Ordered[A]](set: SortedSet[A], list: List[A]) { + val distinctSorted = list.distinct.sorted + assertEquals("Set size wasn't the same as list sze", set.size, distinctSorted.size) + + for(key <- distinctSorted) { + val clazz = set.getClass + val iteratorFrom = (set iteratorFrom key).toList + check(clazz, list, s"set iteratorFrom $key", s"(set from $key).iterator", iteratorFrom, (set from key).iterator.toList) + check(clazz, list, s"set.iteratorFrom $key", s"distinctSorted dropWhile (_ < $key)", iteratorFrom, distinctSorted dropWhile (_ < key)) + check(clazz, list, s"set iteratorFrom $key", s"set keysIterator from $key", iteratorFrom, (set keysIteratorFrom key).toList) + } + } + + def testMap[A <% Ordered[A], B](map: SortedMap[A, B], list: List[(A, B)]) { + val distinctSorted = distinctByKey(list).sortBy(_._1) + assertEquals("Map size wasn't the same as list sze", map.size, distinctSorted.size) + + for(keyValue <- distinctSorted) { + val key = keyValue._1 + val clazz = map.getClass + val iteratorFrom = (map iteratorFrom key).toList + check(clazz, list, s"map iteratorFrom $key", s"(map from $key).iterator", iteratorFrom, (map from key).iterator.toList) + check(clazz, list, s"map iteratorFrom $key", s"distinctSorted dropWhile (_._1 < $key)", iteratorFrom, distinctSorted dropWhile (_._1 < key)) + check(clazz, list, s"map iteratorFrom $key map (_._1)", s"map keysIteratorFrom $key", iteratorFrom map (_._1), (map keysIteratorFrom key).toList) + check(clazz, list, s"map iteratorFrom $key map (_._2)", s"map valuesIteratorFrom $key", iteratorFrom map (_._2), (map valuesIteratorFrom key).toList) + } + } + + def check[A](clazz: Class[_], list: List[_], m1: String, m2: String, l1: List[A], l2: List[A]) { + assertEquals(s"$clazz: `$m1` didn't match `$m2` on list $list", l1, l2) + } + + def assertEquals[A](msg: String, x: A, y: A) { + assert(x == y, s"$msg\n1: $x\n2: $y") + } + + def distinctByKey[A,B](list: List[(A, B)]) : List[(A,B)] = list.groupBy(_._1).map(_._2.last).toList + + object Weekday extends Enumeration { + type Weekday = Value + val Mon, Tue, Wed, Thu, Fri, Sat, Sun = Value + } + + 0 until maxLength foreach {length => + val keyValues = (0 until length map {_ => (R nextInt maxKey, R nextInt maxValue)}).toList + val keys = keyValues map (_._2) + testSet(immutable.BitSet(keys:_*), keys) + testSet(immutable.TreeSet(keys:_*), keys) + testSet(mutable.TreeSet(keys:_*), keys) + val days = keys map {n => Weekday(n % Weekday.values.size)} + testSet(Weekday.ValueSet(days:_*), days) + + val treeMap = immutable.TreeMap(keyValues:_*) + testMap(treeMap, keyValues) + testMap(treeMap.filterKeys(_ % 2 == 0), keyValues filter (_._1 % 2 == 0)) + testMap(treeMap mapValues (_ + 1), keyValues map {case (k,v) => (k, v + 1)}) + } +} -- cgit v1.2.3 From 39037798c94e6e862f39dacffc5e65bb08b78d6a Mon Sep 17 00:00:00 2001 From: James Iry Date: Tue, 12 Feb 2013 15:30:50 -0800 Subject: SI-6642 Refactor mutable.TreeSet to use RedBlackTree instead of AVL There was no reason to have mutable.TreeSet use AVLTree while immutable.TreeSet and immutable.HashSet used RedBlackTree. In particular that would have meant duplicating the iteratorFrom logic unnecessarily. So this commit refactors mutable.TreeSet to use RedBlackTree for everything, including iteratorFrom. It also adds a test to make sure TreeSet works as expected. AVLTree should be dead code since it's private[scala.collection.mutable] and only used by mutable.TreeSet, but to be safe it's only deprecated in this commit. --- .../scala/collection/immutable/RedBlackTree.scala | 21 ++- src/library/scala/collection/mutable/AVLTree.scala | 11 +- src/library/scala/collection/mutable/TreeSet.scala | 125 +++++++----------- test/files/run/mutable-treeset.scala | 145 +++++++++++++++++++++ 4 files changed, 223 insertions(+), 79 deletions(-) create mode 100644 test/files/run/mutable-treeset.scala (limited to 'src') diff --git a/src/library/scala/collection/immutable/RedBlackTree.scala b/src/library/scala/collection/immutable/RedBlackTree.scala index c0d46765be..d8c69f026b 100644 --- a/src/library/scala/collection/immutable/RedBlackTree.scala +++ b/src/library/scala/collection/immutable/RedBlackTree.scala @@ -24,7 +24,7 @@ import scala.annotation.meta.getter * * @since 2.10 */ -private[immutable] +private[collection] object RedBlackTree { def isEmpty(tree: Tree[_, _]): Boolean = tree eq null @@ -44,6 +44,25 @@ object RedBlackTree { } def count(tree: Tree[_, _]) = if (tree eq null) 0 else tree.count + /** + * Count all the nodes with keys greater than or equal to the lower bound and less than the upper bound. + * The two bounds are optional. + */ + def countInRange[A, _](tree: Tree[A, _], from: Option[A], to:Option[A])(implicit ordering: Ordering[A]) : Int = + if (tree eq null) 0 else + (from, to) match { + // with no bounds use this node's count + case (None, None) => tree.count + // if node is less than the lower bound, try the tree on the right, it might be in range + case (Some(lb), _) if ordering.lt(tree.key, lb) => countInRange(tree.right, from, to) + // if node is greater than or equal to the upper bound, try the tree on the left, it might be in range + case (_, Some(ub)) if ordering.gteq(tree.key, ub) => countInRange(tree.left, from, to) + // node is in range so the tree on the left will all be less than the upper bound and the tree on the + // right will all be greater than or equal to the lower bound. So 1 for this node plus + // count the subtrees by stripping off the bounds that we don't need any more + case _ => 1 + countInRange(tree.left, from, None) + countInRange(tree.right, None, to) + + } def update[A, B, B1 >: B](tree: Tree[A, B], k: A, v: B1, overwrite: Boolean)(implicit ordering: Ordering[A]): Tree[A, B1] = blacken(upd(tree, k, v, overwrite)) def delete[A, B](tree: Tree[A, B], k: A)(implicit ordering: Ordering[A]): Tree[A, B] = blacken(del(tree, k)) def rangeImpl[A: Ordering, B](tree: Tree[A, B], from: Option[A], until: Option[A]): Tree[A, B] = (from, until) match { diff --git a/src/library/scala/collection/mutable/AVLTree.scala b/src/library/scala/collection/mutable/AVLTree.scala index 157e5dae62..da63778fcc 100644 --- a/src/library/scala/collection/mutable/AVLTree.scala +++ b/src/library/scala/collection/mutable/AVLTree.scala @@ -15,7 +15,7 @@ package mutable * An immutable AVL Tree implementation used by mutable.TreeSet * * @author Lucien Pereira - * + * @deprecated("AVLTree and its related classes are being removed from the standard library since they're not different enough from RedBlackTree to justify keeping them.", "2.11") */ private[mutable] sealed trait AVLTree[+A] extends Serializable { def balance: Int @@ -65,12 +65,18 @@ private[mutable] sealed trait AVLTree[+A] extends Serializable { def doubleRightRotation[B >: A]: Node[B] = sys.error("Should not happen.") } +/** + * @deprecated("AVLTree and its related classes are being removed from the standard library since they're not different enough from RedBlackTree to justify keeping them.", "2.11") + */ private case object Leaf extends AVLTree[Nothing] { override val balance: Int = 0 override val depth: Int = -1 } +/** + * @deprecated("AVLTree and its related classes are being removed from the standard library since they're not different enough from RedBlackTree to justify keeping them.", "2.11") + */ private case class Node[A](val data: A, val left: AVLTree[A], val right: AVLTree[A]) extends AVLTree[A] { override val balance: Int = right.depth - left.depth @@ -205,6 +211,9 @@ private case class Node[A](val data: A, val left: AVLTree[A], val right: AVLTree } } +/** + * @deprecated("AVLTree and its related classes are being removed from the standard library since they're not different enough from RedBlackTree to justify keeping them.", "2.11") + */ private class AVLIterator[A](root: Node[A]) extends Iterator[A] { val stack = mutable.ArrayStack[Node[A]](root) diveLeft() diff --git a/src/library/scala/collection/mutable/TreeSet.scala b/src/library/scala/collection/mutable/TreeSet.scala index 4fd35658fa..9113d8221b 100644 --- a/src/library/scala/collection/mutable/TreeSet.scala +++ b/src/library/scala/collection/mutable/TreeSet.scala @@ -10,6 +10,8 @@ package scala.collection package mutable import generic._ +import scala.collection.immutable.{RedBlackTree => RB} +import scala.runtime.ObjectRef /** * @define Coll `mutable.TreeSet` @@ -29,112 +31,81 @@ object TreeSet extends MutableSortedSetFactory[TreeSet] { } /** - * A mutable SortedSet using an immutable AVL Tree as underlying data structure. + * A mutable SortedSet using an immutable RedBlack Tree as underlying data structure. * * @author Lucien Pereira * */ -class TreeSet[A](implicit val ordering: Ordering[A]) extends SortedSet[A] with SetLike[A, TreeSet[A]] +class TreeSet[A] private (treeRef: ObjectRef[RB.Tree[A, Null]], from: Option[A], until: Option[A])(implicit val ordering: Ordering[A]) + extends SortedSet[A] with SetLike[A, TreeSet[A]] with SortedSetLike[A, TreeSet[A]] with Set[A] with Serializable { - // Projection constructor - private def this(base: Option[TreeSet[A]], from: Option[A], until: Option[A])(implicit ordering: Ordering[A]) { - this(); - this.base = base - this.from = from - this.until = until - } - - private var base: Option[TreeSet[A]] = None - - private var from: Option[A] = None - - private var until: Option[A] = None - - private var avl: AVLTree[A] = Leaf - - private var cardinality: Int = 0 + def this()(implicit ordering: Ordering[A]) = this(new ObjectRef(null), None, None) - def resolve: TreeSet[A] = base.getOrElse(this) - - private def isLeftAcceptable(from: Option[A], ordering: Ordering[A])(a: A): Boolean = - from.map(x => ordering.gteq(a, x)).getOrElse(true) - - private def isRightAcceptable(until: Option[A], ordering: Ordering[A])(a: A): Boolean = - until.map(x => ordering.lt(a, x)).getOrElse(true) - - /** - * Cardinality store the set size, unfortunately a - * set view (given by rangeImpl) - * cannot take advantage of this optimisation - * - */ - override def size: Int = base.map(_ => super.size).getOrElse(cardinality) + override def size: Int = RB.countInRange(treeRef.elem, from, until) override def stringPrefix = "TreeSet" override def empty: TreeSet[A] = TreeSet.empty - override def rangeImpl(from: Option[A], until: Option[A]): TreeSet[A] = new TreeSet(Some(this), from, until) + private def pickBound(comparison: (A, A) => A, oldBound: Option[A], newBound: Option[A]) = (newBound, oldBound) match { + case (Some(newB), Some(oldB)) => Some(comparison(newB, oldB)) + case (None, _) => oldBound + case _ => newBound + } + + override def rangeImpl(fromArg: Option[A], untilArg: Option[A]): TreeSet[A] = { + val newFrom = pickBound(ordering.max, fromArg, from) + val newUntil = pickBound(ordering.min, untilArg, until) + + new TreeSet(treeRef, newFrom, newUntil) + } override def -=(elem: A): this.type = { - try { - resolve.avl = resolve.avl.remove(elem, ordering) - resolve.cardinality = resolve.cardinality - 1 - } catch { - case e: NoSuchElementException => () - } + treeRef.elem = RB.delete(treeRef.elem, elem) this } override def +=(elem: A): this.type = { - try { - resolve.avl = resolve.avl.insert(elem, ordering) - resolve.cardinality = resolve.cardinality + 1 - } catch { - case e: IllegalArgumentException => () - } + treeRef.elem = RB.update(treeRef.elem, elem, null, false) this } /** * Thanks to the immutable nature of the - * underlying AVL Tree, we can share it with + * underlying Tree, we can share it with * the clone. So clone complexity in time is O(1). * */ - override def clone(): TreeSet[A] = { - val clone = new TreeSet[A](base, from, until) - clone.avl = resolve.avl - clone.cardinality = resolve.cardinality - clone - } + override def clone(): TreeSet[A] = + new TreeSet[A](new ObjectRef(treeRef.elem), from, until) + + private val notProjection = !(from.isDefined || until.isDefined) override def contains(elem: A): Boolean = { - isLeftAcceptable(from, ordering)(elem) && - isRightAcceptable(until, ordering)(elem) && - resolve.avl.contains(elem, ordering) + def leftAcceptable: Boolean = from match { + case Some(lb) => ordering.gteq(elem, lb) + case _ => true + } + + def rightAcceptable: Boolean = until match { + case Some(ub) => ordering.lt(elem, ub) + case _ => true + } + + (notProjection || (leftAcceptable && rightAcceptable)) && + RB.contains(treeRef.elem, elem) } - // TODO see the discussion on keysIteratorFrom - override def iterator: Iterator[A] = resolve.avl.iterator - .dropWhile(e => !isLeftAcceptable(from, ordering)(e)) - .takeWhile(e => isRightAcceptable(until, ordering)(e)) + override def iterator: Iterator[A] = iteratorFrom(None) - // TODO because TreeSets are potentially ranged views into other TreeSets - // what this really needs to do is walk the whole stack of tree sets, find - // the highest "from", and then do a tree walk of the underlying avl tree - // to find that spot in max(O(stack depth), O(log tree.size)) time which - // should effectively be O(log size) since ranged views are rare and - // even more rarely deep. With the following implementation it's - // O(N log N) to get an iterator from a start point. - // But before engaging that endeavor I think mutable.TreeSet should be - // based on the same immutable RedBlackTree that immutable.TreeSet is - // based on. There's no good reason to have these two collections based - // on two different balanced binary trees. That'll save - // having to duplicate logic for finding the starting point of a - // sorted binary tree iterator, logic that has already been - // baked into RedBlackTree. - override def keysIteratorFrom(start: A) = from(start).iterator - + override def keysIteratorFrom(start: A) = iteratorFrom(Some(start)) + + private def iteratorFrom(start: Option[A]) = { + val it = RB.keysIterator(treeRef.elem, pickBound(ordering.max, from, start)) + until match { + case None => it + case Some(ub) => it takeWhile (k => ordering.lt(k, ub)) + } + } } diff --git a/test/files/run/mutable-treeset.scala b/test/files/run/mutable-treeset.scala new file mode 100644 index 0000000000..c9918cba96 --- /dev/null +++ b/test/files/run/mutable-treeset.scala @@ -0,0 +1,145 @@ +import scala.collection.mutable.TreeSet + +object Test extends App { + val list = List(6,5,4,3,2,1,1,2,3,4,5,6,6,5,4,3,2,1) + val distinct = list.distinct + val sorted = distinct.sorted + + // sublist stuff for a single level of slicing + val min = list.min + val max = list.max + val nonlist = ((min - 10) until (max + 20) filterNot list.contains).toList + val sublist = list filter {x => x >=(min + 1) && x < max} + val distinctSublist = sublist.distinct + val subnonlist = min :: max :: nonlist + val subsorted = distinctSublist.sorted + + // subsublist for a 2nd level of slicing + val almostmin = sublist.min + val almostmax = sublist.max + val subsublist = sublist filter {x => x >=(almostmin + 1) && x < almostmax} + val distinctSubsublist = subsublist.distinct + val subsubnonlist = almostmin :: almostmax :: subnonlist + val subsubsorted = distinctSubsublist.sorted + + def testSize { + def check(set : TreeSet[Int], list: List[Int]) { + assert(set.size == list.size, s"$set had size ${set.size} while $list had size ${list.size}") + } + + check(TreeSet[Int](), List[Int]()) + val set = TreeSet(list:_*) + check(set, distinct) + check(set.clone, distinct) + + val subset = set from (min + 1) until max + check(subset, distinctSublist) + check(subset.clone, distinctSublist) + + val subsubset = subset from (almostmin + 1) until almostmax + check(subsubset, distinctSubsublist) + check(subsubset.clone, distinctSubsublist) + } + + def testContains { + def check(set : TreeSet[Int], list: List[Int], nonlist: List[Int]) { + assert(list forall set.apply, s"$set did not contain all elements of $list using apply") + assert(list forall set.contains, s"$set did not contain all elements of $list using contains") + assert(!(nonlist exists set.apply), s"$set had an element from $nonlist using apply") + assert(!(nonlist exists set.contains), s"$set had an element from $nonlist using contains") + } + + val set = TreeSet(list:_*) + check(set, list, nonlist) + check(set.clone, list, nonlist) + + val subset = set from (min + 1) until max + check(subset, sublist, subnonlist) + check(subset.clone, sublist, subnonlist) + + val subsubset = subset from (almostmin + 1) until almostmax + check(subsubset, subsublist, subsubnonlist) + check(subsubset.clone, subsublist, subsubnonlist) + } + + def testAdd { + def check(set : TreeSet[Int], list: List[Int], nonlist: List[Int]) { + var builtList = List[Int]() + for (x <- list) { + set += x + builtList = (builtList :+ x).distinct.sorted filterNot nonlist.contains + assert(builtList forall set.apply, s"$set did not contain all elements of $builtList using apply") + assert(builtList.size == set.size, s"$set had size ${set.size} while $builtList had size ${builtList.size}") + } + assert(!(nonlist exists set.apply), s"$set had an element from $nonlist using apply") + assert(!(nonlist exists set.contains), s"$set had an element from $nonlist using contains") + } + + val set = TreeSet[Int]() + val clone = set.clone + val subset = set.clone from (min + 1) until max + val subclone = subset.clone + val subsubset = subset.clone from (almostmin + 1) until almostmax + val subsubclone = subsubset.clone + + check(set, list, nonlist) + check(clone, list, nonlist) + + check(subset, list, subnonlist) + check(subclone, list, subnonlist) + + check(subsubset, list, subsubnonlist) + check(subsubclone, list, subsubnonlist) + } + + def testRemove { + def check(set: TreeSet[Int], sorted: List[Int]) { + var builtList = sorted + for (x <- list) { + set remove x + builtList = builtList filterNot (_ == x) + assert(builtList forall set.apply, s"$set did not contain all elements of $builtList using apply") + assert(builtList.size == set.size, s"$set had size $set.size while $builtList had size $builtList.size") + } + } + val set = TreeSet(list:_*) + val clone = set.clone + val subset = set.clone from (min + 1) until max + val subclone = subset.clone + val subsubset = subset.clone from (almostmin + 1) until almostmax + val subsubclone = subsubset.clone + + check(set, sorted) + check(clone, sorted) + + check(subset, subsorted) + check(subclone, subsorted) + + check(subsubset, subsubsorted) + check(subsubclone, subsubsorted) + } + + def testIterator { + def check(set: TreeSet[Int], list: List[Int]) { + val it = set.iterator.toList + assert(it == list, s"$it did not equal $list") + } + val set = TreeSet(list: _*) + check(set, sorted) + check(set.clone, sorted) + + val subset = set from (min + 1) until max + check(subset, subsorted) + check(subset.clone, subsorted) + + val subsubset = subset from (almostmin + 1) until almostmax + check(subsubset, subsubsorted) + check(subsubset.clone, subsubsorted) + } + + testSize + testContains + testAdd + testRemove + testIterator +} -- cgit v1.2.3 From 07ba1f8002b5f81bd3849ba38144efdabc8ef4a4 Mon Sep 17 00:00:00 2001 From: James Iry Date: Wed, 13 Feb 2013 21:23:04 -0800 Subject: SI-6642 Code cleanup from review of iteratorFrom MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit This commit is code cleanup from the review on https://github.com/scala/scala/pull/2119 * Changes several instances of def f[A, ]… to def f[A]… * Changes several instances of def f[A](…)(implicit ordering : Ordering[A])… to def f[A: Ordering](…)… * Changes one instance of x == null to x eq null * Changes two instances of id + bottomid to bottomid + id --- src/library/scala/Enumeration.scala | 4 ++-- .../scala/collection/immutable/RedBlackTree.scala | 24 +++++++++++----------- 2 files changed, 14 insertions(+), 14 deletions(-) (limited to 'src') diff --git a/src/library/scala/Enumeration.scala b/src/library/scala/Enumeration.scala index e7ce21b229..d522539e83 100644 --- a/src/library/scala/Enumeration.scala +++ b/src/library/scala/Enumeration.scala @@ -254,8 +254,8 @@ abstract class Enumeration (initial: Int) extends Serializable { def contains(v: Value) = nnIds contains (v.id - bottomId) def + (value: Value) = new ValueSet(nnIds + (value.id - bottomId)) def - (value: Value) = new ValueSet(nnIds - (value.id - bottomId)) - def iterator = nnIds.iterator map (id => thisenum.apply(id + bottomId)) - override def keysIteratorFrom(start: Value) = nnIds keysIteratorFrom start.id map (id => thisenum.apply(id + bottomId)) + def iterator = nnIds.iterator map (id => thisenum.apply(bottomId + id)) + override def keysIteratorFrom(start: Value) = nnIds keysIteratorFrom start.id map (id => thisenum.apply(bottomId + id)) override def stringPrefix = thisenum + ".ValueSet" /** Creates a bit mask for the zero-adjusted ids in this set as a * new array of longs */ diff --git a/src/library/scala/collection/immutable/RedBlackTree.scala b/src/library/scala/collection/immutable/RedBlackTree.scala index d8c69f026b..1cd0128c05 100644 --- a/src/library/scala/collection/immutable/RedBlackTree.scala +++ b/src/library/scala/collection/immutable/RedBlackTree.scala @@ -29,8 +29,8 @@ object RedBlackTree { def isEmpty(tree: Tree[_, _]): Boolean = tree eq null - def contains[A](tree: Tree[A, _], x: A)(implicit ordering: Ordering[A]): Boolean = lookup(tree, x) ne null - def get[A, B](tree: Tree[A, B], x: A)(implicit ordering: Ordering[A]): Option[B] = lookup(tree, x) match { + def contains[A: Ordering](tree: Tree[A, _], x: A): Boolean = lookup(tree, x) ne null + def get[A: Ordering, B](tree: Tree[A, B], x: A): Option[B] = lookup(tree, x) match { case null => None case tree => Some(tree.value) } @@ -48,7 +48,7 @@ object RedBlackTree { * Count all the nodes with keys greater than or equal to the lower bound and less than the upper bound. * The two bounds are optional. */ - def countInRange[A, _](tree: Tree[A, _], from: Option[A], to:Option[A])(implicit ordering: Ordering[A]) : Int = + def countInRange[A](tree: Tree[A, _], from: Option[A], to:Option[A])(implicit ordering: Ordering[A]) : Int = if (tree eq null) 0 else (from, to) match { // with no bounds use this node's count @@ -63,8 +63,8 @@ object RedBlackTree { case _ => 1 + countInRange(tree.left, from, None) + countInRange(tree.right, None, to) } - def update[A, B, B1 >: B](tree: Tree[A, B], k: A, v: B1, overwrite: Boolean)(implicit ordering: Ordering[A]): Tree[A, B1] = blacken(upd(tree, k, v, overwrite)) - def delete[A, B](tree: Tree[A, B], k: A)(implicit ordering: Ordering[A]): Tree[A, B] = blacken(del(tree, k)) + def update[A: Ordering, B, B1 >: B](tree: Tree[A, B], k: A, v: B1, overwrite: Boolean): Tree[A, B1] = blacken(upd(tree, k, v, overwrite)) + def delete[A: Ordering, B](tree: Tree[A, B], k: A): Tree[A, B] = blacken(del(tree, k)) def rangeImpl[A: Ordering, B](tree: Tree[A, B], from: Option[A], until: Option[A]): Tree[A, B] = (from, until) match { case (Some(from), Some(until)) => this.range(tree, from, until) case (Some(from), None) => this.from(tree, from) @@ -110,9 +110,9 @@ object RedBlackTree { if (tree.right ne null) _foreachKey(tree.right, f) } - def iterator[A, B](tree: Tree[A, B], start: Option[A] = None)(implicit ordering: Ordering[A]): Iterator[(A, B)] = new EntriesIterator(tree, start) - def keysIterator[A, _](tree: Tree[A, _], start: Option[A] = None)(implicit ordering: Ordering[A]): Iterator[A] = new KeysIterator(tree, start) - def valuesIterator[A, B](tree: Tree[A, B], start: Option[A] = None)(implicit ordering: Ordering[A]): Iterator[B] = new ValuesIterator(tree, start) + def iterator[A: Ordering, B](tree: Tree[A, B], start: Option[A] = None): Iterator[(A, B)] = new EntriesIterator(tree, start) + def keysIterator[A: Ordering](tree: Tree[A, _], start: Option[A] = None): Iterator[A] = new KeysIterator(tree, start) + def valuesIterator[A: Ordering, B](tree: Tree[A, B], start: Option[A] = None): Iterator[B] = new ValuesIterator(tree, start) @tailrec def nth[A, B](tree: Tree[A, B], n: Int): Tree[A, B] = { @@ -510,7 +510,7 @@ object RedBlackTree { */ private[this] def startFrom(key: A) : Tree[A,B] = if (root eq null) null else { @tailrec def find(tree: Tree[A, B]): Tree[A, B] = - if (tree == null) popNext + if (tree eq null) popNext else find( if (ordering.lteq(key, tree.key)) goLeft(tree) else goRight(tree) @@ -526,15 +526,15 @@ object RedBlackTree { private[this] def goRight(tree: Tree[A, B]) = tree.right } - private[this] class EntriesIterator[A, B](tree: Tree[A, B], focus: Option[A])(implicit ordering: Ordering[A]) extends TreeIterator[A, B, (A, B)](tree, focus) { + private[this] class EntriesIterator[A: Ordering, B](tree: Tree[A, B], focus: Option[A]) extends TreeIterator[A, B, (A, B)](tree, focus) { override def nextResult(tree: Tree[A, B]) = (tree.key, tree.value) } - private[this] class KeysIterator[A, B](tree: Tree[A, B], focus: Option[A])(implicit ordering: Ordering[A]) extends TreeIterator[A, B, A](tree, focus) { + private[this] class KeysIterator[A: Ordering, B](tree: Tree[A, B], focus: Option[A]) extends TreeIterator[A, B, A](tree, focus) { override def nextResult(tree: Tree[A, B]) = tree.key } - private[this] class ValuesIterator[A, B](tree: Tree[A, B], focus: Option[A])(implicit ordering: Ordering[A]) extends TreeIterator[A, B, B](tree, focus) { + private[this] class ValuesIterator[A: Ordering, B](tree: Tree[A, B], focus: Option[A]) extends TreeIterator[A, B, B](tree, focus) { override def nextResult(tree: Tree[A, B]) = tree.value } } -- cgit v1.2.3 From c11cf0b6c55cc2ec15820dceb6ba825726deed88 Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Wed, 13 Feb 2013 17:01:36 +0100 Subject: SI-7120 Erasure must honor typeref prefixes Erasure was discarding these, which led to unnecessarily wide types in quite particular circumstances. This showed up as a double definition error in the reported bug when the bridge method clashed with the erased signature. --- .../scala/reflect/internal/transform/Erasure.scala | 2 +- test/files/run/t7120.check | 1 + test/files/run/t7120/Base_1.scala | 10 ++++++++ test/files/run/t7120/Derived_2.scala | 9 ++++++++ test/files/run/t7120/Run_3.scala | 3 +++ test/files/run/t7120b.check | 2 ++ test/files/run/t7120b.scala | 27 ++++++++++++++++++++++ 7 files changed, 53 insertions(+), 1 deletion(-) create mode 100644 test/files/run/t7120.check create mode 100644 test/files/run/t7120/Base_1.scala create mode 100644 test/files/run/t7120/Derived_2.scala create mode 100644 test/files/run/t7120/Run_3.scala create mode 100644 test/files/run/t7120b.check create mode 100644 test/files/run/t7120b.scala (limited to 'src') diff --git a/src/reflect/scala/reflect/internal/transform/Erasure.scala b/src/reflect/scala/reflect/internal/transform/Erasure.scala index abf380ac44..d83b4d71d9 100644 --- a/src/reflect/scala/reflect/internal/transform/Erasure.scala +++ b/src/reflect/scala/reflect/internal/transform/Erasure.scala @@ -130,7 +130,7 @@ trait Erasure { else if (sym.isRefinementClass) apply(mergeParents(tp.parents)) else if (sym.isDerivedValueClass) eraseDerivedValueClassRef(tref) else if (sym.isClass) eraseNormalClassRef(pre, sym) - else apply(sym.info) // alias type or abstract type + else apply(sym.info asSeenFrom (pre, sym.owner)) // alias type or abstract type case PolyType(tparams, restpe) => apply(restpe) case ExistentialType(tparams, restpe) => diff --git a/test/files/run/t7120.check b/test/files/run/t7120.check new file mode 100644 index 0000000000..45a4fb75db --- /dev/null +++ b/test/files/run/t7120.check @@ -0,0 +1 @@ +8 diff --git a/test/files/run/t7120/Base_1.scala b/test/files/run/t7120/Base_1.scala new file mode 100644 index 0000000000..be07b4f34f --- /dev/null +++ b/test/files/run/t7120/Base_1.scala @@ -0,0 +1,10 @@ +// This bug doesn't depend on separate compilation, +// in the interests of minimizing the log output during +// debugging this problem, I've split the compilation. + +case class Container( v: String ) + +trait Base[ T <: AnyRef ] { + type UserType = T + protected def defect: PartialFunction[ UserType, String ] +} diff --git a/test/files/run/t7120/Derived_2.scala b/test/files/run/t7120/Derived_2.scala new file mode 100644 index 0000000000..e0de629f82 --- /dev/null +++ b/test/files/run/t7120/Derived_2.scala @@ -0,0 +1,9 @@ +trait Derived extends Base[ Container ] { + protected def defect = { case c: Container => c.v.toString } +} + +// Erasure was ignoring the prefix `Derived#7001.this` when erasing +// A1, and consequently used `Object` rather than `Container`, which +// was only seen because that signature clashed with the bridge method. +// +// applyOrElse[A1 <: Derived#7001.this.UserType#7318, B1 >: String](x1: A1) diff --git a/test/files/run/t7120/Run_3.scala b/test/files/run/t7120/Run_3.scala new file mode 100644 index 0000000000..95e7f994ff --- /dev/null +++ b/test/files/run/t7120/Run_3.scala @@ -0,0 +1,3 @@ +object Test extends Derived with App { + println( defect( Container( "8" ) ) ) +} diff --git a/test/files/run/t7120b.check b/test/files/run/t7120b.check new file mode 100644 index 0000000000..aa2f5e7c9f --- /dev/null +++ b/test/files/run/t7120b.check @@ -0,0 +1,2 @@ +public int C$D.foo(java.lang.String) +public int C$D.foo(java.lang.String) diff --git a/test/files/run/t7120b.scala b/test/files/run/t7120b.scala new file mode 100644 index 0000000000..9f6591aa06 --- /dev/null +++ b/test/files/run/t7120b.scala @@ -0,0 +1,27 @@ +trait Base[A] { type B = A; } +class C extends Base[String] { + class D { + def foo[B1 <: B](b: B1) = 0 + } +} + +trait BaseHK[M[_], A] { type B = M[A]; } +object BaseHK { type Id[X] = X } +class CHK extends BaseHK[BaseHK.Id, String] { + class D { + def foo[B1 <: B](b: B1) = 0 + } +} + + +object Test extends App { + val c = new C + val d = new c.D() + val meth = d.getClass.getMethods.find(_.getName == "foo").get + println(meth) + + val chk = new CHK + val dhk = new chk.D() + val methhk = d.getClass.getMethods.find(_.getName == "foo").get + println(methhk) +} -- cgit v1.2.3 From 0eff6cd49d32c20d5648b57a01b5e80339a1cca7 Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Tue, 19 Feb 2013 12:57:21 -0800 Subject: Fix and optimization in overriding logic. Given: trait Foo { def f: Int = 5 } trait Bar extends Foo { def f: Int } I noticed allOverriddenSymbols for the abstract f defined in Bar was returning the method from Foo, even though an abstract method cannot override a concrete one. There were other bits of code which accidentally depended on this outcome. Now allOverriddenSymbols for Bar is empty. The optimization is that whether or not a symbol overrides any other symbols is known at creation time and does not change. We now spend a lot less time looking for overridden symbols in base classes by storing that value, "isOverridingSymbol". --- .../scala/tools/nsc/typechecker/Contexts.scala | 3 +- .../scala/tools/nsc/typechecker/RefChecks.scala | 9 +- src/reflect/scala/reflect/internal/Symbols.scala | 133 ++++++++++++--------- test/files/run/all-overridden.check | 1 + test/files/run/all-overridden.scala | 11 ++ 5 files changed, 97 insertions(+), 60 deletions(-) create mode 100644 test/files/run/all-overridden.check create mode 100644 test/files/run/all-overridden.scala (limited to 'src') diff --git a/src/compiler/scala/tools/nsc/typechecker/Contexts.scala b/src/compiler/scala/tools/nsc/typechecker/Contexts.scala index 711085e6c9..b070bd1b49 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Contexts.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Contexts.scala @@ -573,8 +573,7 @@ trait Contexts { self: Analyzer => ( superAccess || pre.isInstanceOf[ThisType] || phase.erasedTypes - || isProtectedAccessOK(sym) - || (sym.allOverriddenSymbols exists isProtectedAccessOK) + || (sym.overrideChain exists isProtectedAccessOK) // that last condition makes protected access via self types work. ) ) diff --git a/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala b/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala index 60a73036f8..dd16e9de30 100644 --- a/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala +++ b/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala @@ -726,8 +726,11 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans else if (clazz.isTrait && !(clazz isSubClass AnyValClass)) { // For non-AnyVal classes, prevent abstract methods in interfaces that override // final members in Object; see #4431 - for (decl <- clazz.info.decls.iterator) { - val overridden = decl.overriddenSymbol(ObjectClass) + for (decl <- clazz.info.decls) { + // Have to use matchingSymbol, not a method involving overridden symbols, + // because the scala type system understands that an abstract method here does not + // override a concrete method in Object. The jvm, however, does not. + val overridden = decl.matchingSymbol(ObjectClass, ObjectClass.tpe) if (overridden.isFinal) unit.error(decl.pos, "trait cannot redefine final method from class AnyRef") } @@ -1499,8 +1502,8 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans // on Unit, in which case we had better let it slide. val isOk = ( sym.isGetter - || sym.allOverriddenSymbols.exists(over => !(over.tpe.resultType =:= sym.tpe.resultType)) || (sym.name containsName nme.DEFAULT_GETTER_STRING) + || sym.allOverriddenSymbols.exists(over => !(over.tpe.resultType =:= sym.tpe.resultType)) ) if (!isOk) unit.warning(sym.pos, s"side-effecting nullary methods are discouraged: suggest defining as `def ${sym.name.decode}()` instead") diff --git a/src/reflect/scala/reflect/internal/Symbols.scala b/src/reflect/scala/reflect/internal/Symbols.scala index 4f6dab3e7c..fbf14e8156 100644 --- a/src/reflect/scala/reflect/internal/Symbols.scala +++ b/src/reflect/scala/reflect/internal/Symbols.scala @@ -853,12 +853,13 @@ trait Symbols extends api.Symbols { self: SymbolTable => /** Is this a term symbol only defined in a refinement (so that it needs * to be accessed by reflection)? */ - def isOnlyRefinementMember: Boolean = + def isOnlyRefinementMember: Boolean = ( isTerm && // type members are not affected owner.isRefinementClass && // owner must be a refinement class (owner.info decl name) == this && // symbol must be explicitly declared in the refinement (not synthesized from glb) - allOverriddenSymbols.isEmpty && // symbol must not override a symbol in a base class + !isOverridingSymbol && // symbol must not override a symbol in a base class !isConstant // symbol must not be a constant. Question: Can we exclude @inline methods as well? + ) final def isStructuralRefinementMember = owner.isStructuralRefinement && isPossibleInRefinement && isPublic final def isPossibleInRefinement = !isConstructor && !isOverridingSymbol @@ -960,14 +961,6 @@ trait Symbols extends api.Symbols { self: SymbolTable => def ownerChain: List[Symbol] = this :: owner.ownerChain def originalOwnerChain: List[Symbol] = this :: originalOwner.getOrElse(this, rawowner).originalOwnerChain - // All the symbols overridden by this symbol and this symbol at the head, - // or Nil if this is NoSymbol. - def overrideChain = ( - if (this eq NoSymbol) Nil - else if (!owner.isClass) this :: Nil - else this :: allOverriddenSymbols - ) - // Non-classes skip self and return rest of owner chain; overridden in ClassSymbol. def enclClassChain: List[Symbol] = owner.enclClassChain @@ -2070,81 +2063,111 @@ trait Symbols extends api.Symbols { self: SymbolTable => * @param ofclazz The class containing the symbol's definition * @param site The base type from which member types are computed */ - final def matchingSymbol(ofclazz: Symbol, site: Type): Symbol = { - //OPT cut down on #closures by special casing non-overloaded case - // was: ofclazz.info.nonPrivateDecl(name) filter (sym => - // !sym.isTerm || (site.memberType(this) matches site.memberType(sym))) - val result = ofclazz.info.nonPrivateDecl(name) - def qualifies(sym: Symbol) = !sym.isTerm || (site.memberType(this) matches site.memberType(sym)) - if ((result eq NoSymbol) || !result.isOverloaded && qualifies(result)) result - else result filter qualifies - } + final def matchingSymbol(ofclazz: Symbol, site: Type): Symbol = + matchingSymbolInternal(site, ofclazz.info nonPrivateDecl name) /** The non-private member of `site` whose type and name match the type of this symbol. */ final def matchingSymbol(site: Type, admit: Long = 0L): Symbol = - site.nonPrivateMemberAdmitting(name, admit).filter(sym => - !sym.isTerm || (site.memberType(this) matches site.memberType(sym))) + matchingSymbolInternal(site, site.nonPrivateMemberAdmitting(name, admit)) - /** The symbol, in class `ofclazz`, that is overridden by this symbol. + private def matchingSymbolInternal(site: Type, candidate: Symbol): Symbol = { + def qualifies(sym: Symbol) = !sym.isTerm || ((site memberType this) matches (site memberType sym)) + //OPT cut down on #closures by special casing non-overloaded case + if (candidate.isOverloaded) candidate filter qualifies + else if (qualifies(candidate)) candidate + else NoSymbol + } + + /** The symbol, in class `baseClass`, that is overridden by this symbol. * - * @param ofclazz is a base class of this symbol's owner. + * @param baseClass is a base class of this symbol's owner. */ - final def overriddenSymbol(ofclazz: Symbol): Symbol = - if (isClassConstructor) NoSymbol else matchingSymbol(ofclazz, owner.thisType) + final def overriddenSymbol(baseClass: Symbol): Symbol = ( + // concrete always overrides abstract, so don't let an abstract definition + // claim to be overriding an inherited concrete one. + matchingInheritedSymbolIn(baseClass) filter (res => res.isDeferred || !this.isDeferred) + ) + + private def matchingInheritedSymbolIn(baseClass: Symbol): Symbol = + if (canMatchInheritedSymbols) matchingSymbol(baseClass, owner.thisType) else NoSymbol /** The symbol overriding this symbol in given subclass `ofclazz`. * * @param ofclazz is a subclass of this symbol's owner */ - final def overridingSymbol(ofclazz: Symbol): Symbol = - if (isClassConstructor) NoSymbol else matchingSymbol(ofclazz, ofclazz.thisType) + final def overridingSymbol(ofclazz: Symbol): Symbol = ( + if (canMatchInheritedSymbols) + matchingSymbol(ofclazz, ofclazz.thisType) + else + NoSymbol + ) - /** Returns all symbols overriden by this symbol. */ - final def allOverriddenSymbols: List[Symbol] = ( - if ((this eq NoSymbol) || !owner.isClass) Nil - else { - def loop(xs: List[Symbol]): List[Symbol] = xs match { - case Nil => Nil - case x :: xs => - overriddenSymbol(x) match { - case NoSymbol => loop(xs) - case sym => sym :: loop(xs) - } - } - loop(owner.ancestors) - } + /** If false, this symbol cannot possibly participate in an override, + * either as overrider or overridee. For internal use; you should consult + * with isOverridingSymbol. This is used by isOverridingSymbol to escape + * the recursive knot. + */ + private def canMatchInheritedSymbols = ( + (this ne NoSymbol) + && owner.isClass + && !this.isClass + && !this.isConstructor + ) + + // All the symbols overridden by this symbol and this symbol at the head, + // or Nil if this is NoSymbol. + def overrideChain = ( + if (this eq NoSymbol) Nil + else if (isOverridingSymbol) this :: allOverriddenSymbols + else this :: Nil ) + /** Returns all symbols overridden by this symbol. */ + final def allOverriddenSymbols: List[Symbol] = { + def loop(xs: List[Symbol]): List[Symbol] = xs match { + case Nil => Nil + case x :: xs => + overriddenSymbol(x) match { + case NoSymbol => loop(xs) + case sym => sym :: loop(xs) + } + } + if (isOverridingSymbol) loop(owner.ancestors) else Nil + } + /** Equivalent to allOverriddenSymbols.nonEmpty, but more efficient. */ - // !!! When if ever will this answer differ from .isOverride? - // How/where is the OVERRIDE flag managed, as compared to how checks - // based on type membership will evaluate? - def isOverridingSymbol = owner.isClass && ( - owner.ancestors exists (cls => matchingSymbol(cls, owner.thisType) != NoSymbol) + lazy val isOverridingSymbol = ( + canMatchInheritedSymbols + && owner.ancestors.exists(base => overriddenSymbol(base) != NoSymbol) ) + /** Equivalent to allOverriddenSymbols.head (or NoSymbol if no overrides) but more efficient. */ def nextOverriddenSymbol: Symbol = { - if ((this ne NoSymbol) && owner.isClass) owner.ancestors foreach { base => - val sym = overriddenSymbol(base) - if (sym != NoSymbol) - return sym + @tailrec def loop(bases: List[Symbol]): Symbol = bases match { + case Nil => NoSymbol + case base :: rest => + val sym = overriddenSymbol(base) + if (sym == NoSymbol) loop(rest) else sym } - NoSymbol + if (isOverridingSymbol) loop(owner.ancestors) else NoSymbol } /** Returns all symbols overridden by this symbol, plus all matching symbols * defined in parents of the selftype. */ - final def extendedOverriddenSymbols: List[Symbol] = - if (!owner.isClass) Nil - else owner.thisSym.ancestors map overriddenSymbol filter (_ != NoSymbol) + final def extendedOverriddenSymbols: List[Symbol] = ( + if (canMatchInheritedSymbols) + owner.thisSym.ancestors map overriddenSymbol filter (_ != NoSymbol) + else + Nil + ) /** The symbol accessed by a super in the definition of this symbol when * seen from class `base`. This symbol is always concrete. * pre: `this.owner` is in the base class sequence of `base`. */ final def superSymbol(base: Symbol): Symbol = { - var bcs = base.info.baseClasses.dropWhile(owner != _).tail + var bcs = base.info.baseClasses dropWhile (owner != _) drop 1 var sym: Symbol = NoSymbol while (!bcs.isEmpty && sym == NoSymbol) { if (!bcs.head.isImplClass) diff --git a/test/files/run/all-overridden.check b/test/files/run/all-overridden.check new file mode 100644 index 0000000000..1b620b1176 --- /dev/null +++ b/test/files/run/all-overridden.check @@ -0,0 +1 @@ +method g diff --git a/test/files/run/all-overridden.scala b/test/files/run/all-overridden.scala new file mode 100644 index 0000000000..1b798ef748 --- /dev/null +++ b/test/files/run/all-overridden.scala @@ -0,0 +1,11 @@ +import scala.reflect.runtime.universe._ + +object Test { + trait Foo { def f: Int = 5 ; def g: Int } + trait Bar extends Foo { def f: Int ; def g: Int = 5 } + + def main(args: Array[String]): Unit = { + // We should see g, but not f or $init$. + typeOf[Bar].declarations.toList.flatMap(_.allOverriddenSymbols) foreach println + } +} -- cgit v1.2.3 From 6879451bfefbcc9646a9b0b534ae45b13c354bea Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Tue, 19 Feb 2013 07:42:24 -0800 Subject: Extracted abstract implicit vals from Types. These should be considered plumbing and not blended in with the rest of the api. --- src/reflect/scala/reflect/api/ImplicitTags.scala | 108 +++++++++++++++++++++++ src/reflect/scala/reflect/api/Types.scala | 105 +--------------------- 2 files changed, 110 insertions(+), 103 deletions(-) create mode 100644 src/reflect/scala/reflect/api/ImplicitTags.scala (limited to 'src') diff --git a/src/reflect/scala/reflect/api/ImplicitTags.scala b/src/reflect/scala/reflect/api/ImplicitTags.scala new file mode 100644 index 0000000000..3f377d6cff --- /dev/null +++ b/src/reflect/scala/reflect/api/ImplicitTags.scala @@ -0,0 +1,108 @@ +package scala.reflect +package api + +trait ImplicitTags { + self: Types => + + /** A tag that preserves the identity of the `Type` abstract type from erasure. + * Can be used for pattern matching, instance tests, serialization and likes. + * @group Tags + */ + implicit val TypeTagg: ClassTag[Type] + + /** A tag that preserves the identity of the `SingletonType` abstract type from erasure. + * Can be used for pattern matching, instance tests, serialization and likes. + * @group Tags + */ + implicit val SingletonTypeTag: ClassTag[SingletonType] + + /** A tag that preserves the identity of the `ThisType` abstract type from erasure. + * Can be used for pattern matching, instance tests, serialization and likes. + * @group Tags + */ + implicit val ThisTypeTag: ClassTag[ThisType] + + /** A tag that preserves the identity of the `SingleType` abstract type from erasure. + * Can be used for pattern matching, instance tests, serialization and likes. + * @group Tags + */ + implicit val SingleTypeTag: ClassTag[SingleType] + + /** A tag that preserves the identity of the `SuperType` abstract type from erasure. + * Can be used for pattern matching, instance tests, serialization and likes. + * @group Tags + */ + implicit val SuperTypeTag: ClassTag[SuperType] + + /** A tag that preserves the identity of the `ConstantType` abstract type from erasure. + * Can be used for pattern matching, instance tests, serialization and likes. + * @group Tags + */ + implicit val ConstantTypeTag: ClassTag[ConstantType] + + /** A tag that preserves the identity of the `TypeRef` abstract type from erasure. + * Can be used for pattern matching, instance tests, serialization and likes. + * @group Tags + */ + implicit val TypeRefTag: ClassTag[TypeRef] + + /** A tag that preserves the identity of the `CompoundType` abstract type from erasure. + * Can be used for pattern matching, instance tests, serialization and likes. + * @group Tags + */ + implicit val CompoundTypeTag: ClassTag[CompoundType] + + /** A tag that preserves the identity of the `RefinedType` abstract type from erasure. + * Can be used for pattern matching, instance tests, serialization and likes. + * @group Tags + */ + implicit val RefinedTypeTag: ClassTag[RefinedType] + + /** A tag that preserves the identity of the `ClassInfoType` abstract type from erasure. + * Can be used for pattern matching, instance tests, serialization and likes. + * @group Tags + */ + implicit val ClassInfoTypeTag: ClassTag[ClassInfoType] + + /** A tag that preserves the identity of the `MethodType` abstract type from erasure. + * Can be used for pattern matching, instance tests, serialization and likes. + * @group Tags + */ + implicit val MethodTypeTag: ClassTag[MethodType] + + /** A tag that preserves the identity of the `NullaryMethodType` abstract type from erasure. + * Can be used for pattern matching, instance tests, serialization and likes. + * @group Tags + */ + implicit val NullaryMethodTypeTag: ClassTag[NullaryMethodType] + + /** A tag that preserves the identity of the `PolyType` abstract type from erasure. + * Can be used for pattern matching, instance tests, serialization and likes. + * @group Tags + */ + implicit val PolyTypeTag: ClassTag[PolyType] + + /** A tag that preserves the identity of the `ExistentialType` abstract type from erasure. + * Can be used for pattern matching, instance tests, serialization and likes. + * @group Tags + */ + implicit val ExistentialTypeTag: ClassTag[ExistentialType] + + /** A tag that preserves the identity of the `AnnotatedType` abstract type from erasure. + * Can be used for pattern matching, instance tests, serialization and likes. + * @group Tags + */ + implicit val AnnotatedTypeTag: ClassTag[AnnotatedType] + + /** A tag that preserves the identity of the `TypeBounds` abstract type from erasure. + * Can be used for pattern matching, instance tests, serialization and likes. + * @group Tags + */ + implicit val TypeBoundsTag: ClassTag[TypeBounds] + + /** A tag that preserves the identity of the `BoundedWildcardType` abstract type from erasure. + * Can be used for pattern matching, instance tests, serialization and likes. + * @group Tags + */ + implicit val BoundedWildcardTypeTag: ClassTag[BoundedWildcardType] +} diff --git a/src/reflect/scala/reflect/api/Types.scala b/src/reflect/scala/reflect/api/Types.scala index 143438b8f5..e8e9e9c048 100644 --- a/src/reflect/scala/reflect/api/Types.scala +++ b/src/reflect/scala/reflect/api/Types.scala @@ -50,7 +50,8 @@ package api * * @contentDiagram hideNodes "*Api" */ -trait Types { self: Universe => +trait Types extends ImplicitTags { + self: Universe => /** The type of Scala types, and also Scala type signatures. * (No difference is internally made between the two). @@ -59,12 +60,6 @@ trait Types { self: Universe => */ type Type >: Null <: TypeApi - /** A tag that preserves the identity of the `Type` abstract type from erasure. - * Can be used for pattern matching, instance tests, serialization and likes. - * @group Tags - */ - implicit val TypeTagg: ClassTag[Type] - /** This constant is used as a special value that indicates that no meaningful type exists. * @group Types */ @@ -256,12 +251,6 @@ trait Types { self: Universe => */ type SingletonType >: Null <: Type - /** A tag that preserves the identity of the `SingletonType` abstract type from erasure. - * Can be used for pattern matching, instance tests, serialization and likes. - * @group Tags - */ - implicit val SingletonTypeTag: ClassTag[SingletonType] - /** A singleton type that describes types of the form on the left with the * corresponding `ThisType` representation to the right: * {{{ @@ -272,12 +261,6 @@ trait Types { self: Universe => */ type ThisType >: Null <: AnyRef with SingletonType with ThisTypeApi - /** A tag that preserves the identity of the `ThisType` abstract type from erasure. - * Can be used for pattern matching, instance tests, serialization and likes. - * @group Tags - */ - implicit val ThisTypeTag: ClassTag[ThisType] - /** The constructor/extractor for `ThisType` instances. * @group Extractors */ @@ -316,12 +299,6 @@ trait Types { self: Universe => */ type SingleType >: Null <: AnyRef with SingletonType with SingleTypeApi - /** A tag that preserves the identity of the `SingleType` abstract type from erasure. - * Can be used for pattern matching, instance tests, serialization and likes. - * @group Tags - */ - implicit val SingleTypeTag: ClassTag[SingleType] - /** The constructor/extractor for `SingleType` instances. * @group Extractors */ @@ -361,12 +338,6 @@ trait Types { self: Universe => */ type SuperType >: Null <: AnyRef with SingletonType with SuperTypeApi - /** A tag that preserves the identity of the `SuperType` abstract type from erasure. - * Can be used for pattern matching, instance tests, serialization and likes. - * @group Tags - */ - implicit val SuperTypeTag: ClassTag[SuperType] - /** The constructor/extractor for `SuperType` instances. * @group Extractors */ @@ -406,12 +377,6 @@ trait Types { self: Universe => */ type ConstantType >: Null <: AnyRef with SingletonType with ConstantTypeApi - /** A tag that preserves the identity of the `ConstantType` abstract type from erasure. - * Can be used for pattern matching, instance tests, serialization and likes. - * @group Tags - */ - implicit val ConstantTypeTag: ClassTag[ConstantType] - /** The constructor/extractor for `ConstantType` instances. * @group Extractors */ @@ -450,12 +415,6 @@ trait Types { self: Universe => */ type TypeRef >: Null <: AnyRef with Type with TypeRefApi - /** A tag that preserves the identity of the `TypeRef` abstract type from erasure. - * Can be used for pattern matching, instance tests, serialization and likes. - * @group Tags - */ - implicit val TypeRefTag: ClassTag[TypeRef] - /** The constructor/extractor for `TypeRef` instances. * @group Extractors */ @@ -497,12 +456,6 @@ trait Types { self: Universe => */ type CompoundType >: Null <: AnyRef with Type - /** A tag that preserves the identity of the `CompoundType` abstract type from erasure. - * Can be used for pattern matching, instance tests, serialization and likes. - * @group Tags - */ - implicit val CompoundTypeTag: ClassTag[CompoundType] - /** The `RefinedType` type defines types of any of the forms on the left, * with their RefinedType representations to the right. * {{{ @@ -515,12 +468,6 @@ trait Types { self: Universe => */ type RefinedType >: Null <: AnyRef with CompoundType with RefinedTypeApi - /** A tag that preserves the identity of the `RefinedType` abstract type from erasure. - * Can be used for pattern matching, instance tests, serialization and likes. - * @group Tags - */ - implicit val RefinedTypeTag: ClassTag[RefinedType] - /** The constructor/extractor for `RefinedType` instances. * @group Extractors */ @@ -567,12 +514,6 @@ trait Types { self: Universe => */ type ClassInfoType >: Null <: AnyRef with CompoundType with ClassInfoTypeApi - /** A tag that preserves the identity of the `ClassInfoType` abstract type from erasure. - * Can be used for pattern matching, instance tests, serialization and likes. - * @group Tags - */ - implicit val ClassInfoTypeTag: ClassTag[ClassInfoType] - /** The constructor/extractor for `ClassInfoType` instances. * @group Extractors */ @@ -610,12 +551,6 @@ trait Types { self: Universe => */ type MethodType >: Null <: AnyRef with Type with MethodTypeApi - /** A tag that preserves the identity of the `MethodType` abstract type from erasure. - * Can be used for pattern matching, instance tests, serialization and likes. - * @group Tags - */ - implicit val MethodTypeTag: ClassTag[MethodType] - /** The constructor/extractor for `MethodType` instances. * @group Extractors */ @@ -660,12 +595,6 @@ trait Types { self: Universe => */ type NullaryMethodType >: Null <: AnyRef with Type with NullaryMethodTypeApi - /** A tag that preserves the identity of the `NullaryMethodType` abstract type from erasure. - * Can be used for pattern matching, instance tests, serialization and likes. - * @group Tags - */ - implicit val NullaryMethodTypeTag: ClassTag[NullaryMethodType] - /** The constructor/extractor for `NullaryMethodType` instances. * @group Extractors */ @@ -696,12 +625,6 @@ trait Types { self: Universe => */ type PolyType >: Null <: AnyRef with Type with PolyTypeApi - /** A tag that preserves the identity of the `PolyType` abstract type from erasure. - * Can be used for pattern matching, instance tests, serialization and likes. - * @group Tags - */ - implicit val PolyTypeTag: ClassTag[PolyType] - /** The constructor/extractor for `PolyType` instances. * @group Extractors */ @@ -736,12 +659,6 @@ trait Types { self: Universe => */ type ExistentialType >: Null <: AnyRef with Type with ExistentialTypeApi - /** A tag that preserves the identity of the `ExistentialType` abstract type from erasure. - * Can be used for pattern matching, instance tests, serialization and likes. - * @group Tags - */ - implicit val ExistentialTypeTag: ClassTag[ExistentialType] - /** The constructor/extractor for `ExistentialType` instances. * @group Extractors */ @@ -777,12 +694,6 @@ trait Types { self: Universe => */ type AnnotatedType >: Null <: AnyRef with Type with AnnotatedTypeApi - /** A tag that preserves the identity of the `AnnotatedType` abstract type from erasure. - * Can be used for pattern matching, instance tests, serialization and likes. - * @group Tags - */ - implicit val AnnotatedTypeTag: ClassTag[AnnotatedType] - /** The constructor/extractor for `AnnotatedType` instances. * @group Extractors */ @@ -828,12 +739,6 @@ trait Types { self: Universe => */ type TypeBounds >: Null <: AnyRef with Type with TypeBoundsApi - /** A tag that preserves the identity of the `TypeBounds` abstract type from erasure. - * Can be used for pattern matching, instance tests, serialization and likes. - * @group Tags - */ - implicit val TypeBoundsTag: ClassTag[TypeBounds] - /** The constructor/extractor for `TypeBounds` instances. * @group Extractors */ @@ -885,12 +790,6 @@ trait Types { self: Universe => */ type BoundedWildcardType >: Null <: AnyRef with Type with BoundedWildcardTypeApi - /** A tag that preserves the identity of the `BoundedWildcardType` abstract type from erasure. - * Can be used for pattern matching, instance tests, serialization and likes. - * @group Tags - */ - implicit val BoundedWildcardTypeTag: ClassTag[BoundedWildcardType] - /** The constructor/extractor for `BoundedWildcardType` instances. * @group Extractors */ -- cgit v1.2.3 From d8ba6afbd4c039b26562a331f0b1ec3885c0e121 Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Wed, 20 Feb 2013 08:51:30 -0800 Subject: Boxing cleanup: erasure, post-erasure, value classes. Introduces extractors for value class trees. Puts them to work to make the value class tree manipulations believable. Eliminated some boxing code in erasure which had been marked "maybe subsumed by posterasure?" after deciding that it had been subsumed by posterasure. Added some same-bytecode tests involving value class boxing (actually the lack thereof.) --- src/compiler/scala/tools/nsc/ast/TreeInfo.scala | 59 ++++++++++++++++ .../scala/tools/nsc/transform/Erasure.scala | 79 ++++++---------------- .../scala/tools/nsc/transform/PostErasure.scala | 60 ++++++---------- test/files/jvm/value-class-boxing.check | 7 ++ test/files/jvm/value-class-boxing/Analyzed_1.scala | 17 +++++ test/files/jvm/value-class-boxing/test.scala | 15 ++++ 6 files changed, 138 insertions(+), 99 deletions(-) create mode 100644 test/files/jvm/value-class-boxing.check create mode 100644 test/files/jvm/value-class-boxing/Analyzed_1.scala create mode 100644 test/files/jvm/value-class-boxing/test.scala (limited to 'src') diff --git a/src/compiler/scala/tools/nsc/ast/TreeInfo.scala b/src/compiler/scala/tools/nsc/ast/TreeInfo.scala index f53f99a279..6a0f4407fc 100644 --- a/src/compiler/scala/tools/nsc/ast/TreeInfo.scala +++ b/src/compiler/scala/tools/nsc/ast/TreeInfo.scala @@ -14,6 +14,65 @@ package ast abstract class TreeInfo extends scala.reflect.internal.TreeInfo { val global: Global import global._ + import definitions._ + + // arg1.op(arg2) returns (arg1, op.symbol, arg2) + object BinaryOp { + def unapply(t: Tree): Option[(Tree, Symbol, Tree)] = t match { + case Apply(sel @ Select(arg1, _), arg2 :: Nil) => Some((arg1, sel.symbol, arg2)) + case _ => None + } + } + // recv.op[T1, ...] returns (recv, op.symbol, type argument types) + object TypeApplyOp { + def unapply(t: Tree): Option[(Tree, Symbol, List[Type])] = t match { + case TypeApply(sel @ Select(recv, _), targs) => Some((recv, sel.symbol, targs map (_.tpe))) + case _ => None + } + } + + // x.asInstanceOf[T] returns (x, typeOf[T]) + object AsInstanceOf { + def unapply(t: Tree): Option[(Tree, Type)] = t match { + case Apply(TypeApplyOp(recv, Object_asInstanceOf, tpe :: Nil), Nil) => Some((recv, tpe)) + case _ => None + } + } + + // Extractors for value classes. + object ValueClass { + def isValueClass(tpe: Type) = enteringErasure(tpe.typeSymbol.isDerivedValueClass) + def valueUnbox(tpe: Type) = enteringErasure(tpe.typeSymbol.derivedValueClassUnbox) + + // B.unbox. Returns B. + object Unbox { + def unapply(t: Tree): Option[Tree] = t match { + case Apply(sel @ Select(ref, _), Nil) if valueUnbox(ref.tpe) == sel.symbol => Some(ref) + case _ => None + } + } + // new B(v). Returns B and v. + object Box { + def unapply(t: Tree): Option[(Tree, Type)] = t match { + case Apply(sel @ Select(New(tpt), nme.CONSTRUCTOR), v :: Nil) => Some((v, tpt.tpe.finalResultType)) + case _ => None + } + } + // (new B(v)).unbox. returns v. + object BoxAndUnbox { + def unapply(t: Tree): Option[Tree] = t match { + case Unbox(Box(v, tpe)) if isValueClass(tpe) => Some(v) + case _ => None + } + } + // new B(v1) op new B(v2) where op is == or !=. Returns v1, op, v2. + object BoxAndCompare { + def unapply(t: Tree): Option[(Tree, Symbol, Tree)] = t match { + case BinaryOp(Box(v1, tpe1), op @ (Object_== | Object_!=), Box(v2, tpe2)) if isValueClass(tpe1) && tpe1 =:= tpe2 => Some((v1, op, v2)) + case _ => None + } + } + } /** Is tree legal as a member definition of an interface? */ diff --git a/src/compiler/scala/tools/nsc/transform/Erasure.scala b/src/compiler/scala/tools/nsc/transform/Erasure.scala index f380b9d04f..8287c1f631 100644 --- a/src/compiler/scala/tools/nsc/transform/Erasure.scala +++ b/src/compiler/scala/tools/nsc/transform/Erasure.scala @@ -21,6 +21,7 @@ abstract class Erasure extends AddInterfaces import global._ import definitions._ import CODE._ + import treeInfo._ val phaseName: String = "erasure" @@ -357,41 +358,10 @@ abstract class Erasure extends AddInterfaces override def newTyper(context: Context) = new Eraser(context) - private def safeToRemoveUnbox(cls: Symbol): Boolean = - (cls == definitions.NullClass) || isBoxedValueClass(cls) - - /** An extractor object for unboxed expressions (maybe subsumed by posterasure?) */ - object Unboxed { - def unapply(tree: Tree): Option[Tree] = tree match { - case Apply(fn, List(arg)) if isUnbox(fn.symbol) && safeToRemoveUnbox(arg.tpe.typeSymbol) => - Some(arg) - case Apply( - TypeApply( - cast @ Select( - Apply( - sel @ Select(arg, acc), - List()), - asinstanceof), - List(tpt)), - List()) - if cast.symbol == Object_asInstanceOf && - tpt.tpe.typeSymbol.isDerivedValueClass && - sel.symbol == tpt.tpe.typeSymbol.derivedValueClassUnbox => - Some(arg) - case _ => - None - } - } - - /** An extractor object for boxed expressions (maybe subsumed by posterasure?) */ - object Boxed { - def unapply(tree: Tree): Option[Tree] = tree match { - case Apply(Select(New(tpt), nme.CONSTRUCTOR), List(arg)) if (tpt.tpe.typeSymbol.isDerivedValueClass) => - Some(arg) - case LabelDef(name, params, Boxed(rhs)) => - Some(treeCopy.LabelDef(tree, name, params, rhs) setType rhs.tpe) - case _ => - None + private def isSafelyRemovableUnbox(fn: Tree, arg: Tree): Boolean = { + isUnbox(fn.symbol) && { + val cls = arg.tpe.typeSymbol + (cls == definitions.NullClass) || isBoxedValueClass(cls) } } @@ -578,12 +548,7 @@ abstract class Erasure extends AddInterfaces val tree1 = tree.tpe match { case ErasedValueType(tref) => val clazz = tref.sym - tree match { - case Unboxed(arg) if arg.tpe.typeSymbol == clazz => - log("shortcircuiting unbox -> box "+arg); arg - case _ => - New(clazz, cast(tree, underlyingOfValueClass(clazz))) - } + New(clazz, cast(tree, underlyingOfValueClass(clazz))) case _ => tree.tpe.typeSymbol match { case UnitClass => @@ -599,7 +564,7 @@ abstract class Erasure extends AddInterfaces * This is important for specialization: calls to the super constructor should not box/unbox specialized * fields (see TupleX). (ID) */ - case Apply(boxFun, List(arg)) if isUnbox(tree.symbol) && safeToRemoveUnbox(arg.tpe.typeSymbol) => + case Apply(boxFun, List(arg)) if isSafelyRemovableUnbox(tree, arg) => log(s"boxing an unbox: ${tree.symbol} -> ${arg.tpe}") arg case _ => @@ -634,24 +599,18 @@ abstract class Erasure extends AddInterfaces case _ => val tree1 = pt match { case ErasedValueType(tref) => - tree match { - case Boxed(arg) if arg.tpe.isInstanceOf[ErasedValueType] => - log("shortcircuiting box -> unbox "+arg) - arg - case _ => - val clazz = tref.sym - log("not boxed: "+tree) - lazy val underlying = underlyingOfValueClass(clazz) - val tree0 = - if (tree.tpe.typeSymbol == NullClass && - isPrimitiveValueClass(underlying.typeSymbol)) { - // convert `null` directly to underlying type, as going - // via the unboxed type would yield a NPE (see SI-5866) - unbox1(tree, underlying) - } else - Apply(Select(adaptToType(tree, clazz.tpe), clazz.derivedValueClassUnbox), List()) - cast(tree0, pt) - } + val clazz = tref.sym + log("not boxed: "+tree) + lazy val underlying = underlyingOfValueClass(clazz) + val tree0 = + if (tree.tpe.typeSymbol == NullClass && + isPrimitiveValueClass(underlying.typeSymbol)) { + // convert `null` directly to underlying type, as going + // via the unboxed type would yield a NPE (see SI-5866) + unbox1(tree, underlying) + } else + Apply(Select(adaptToType(tree, clazz.tpe), clazz.derivedValueClassUnbox), List()) + cast(tree0, pt) case _ => pt.typeSymbol match { case UnitClass => diff --git a/src/compiler/scala/tools/nsc/transform/PostErasure.scala b/src/compiler/scala/tools/nsc/transform/PostErasure.scala index a8dc47046b..2a86d711f1 100644 --- a/src/compiler/scala/tools/nsc/transform/PostErasure.scala +++ b/src/compiler/scala/tools/nsc/transform/PostErasure.scala @@ -9,10 +9,10 @@ package transform * performs peephole optimizations. */ trait PostErasure extends InfoTransform with TypingTransformers { - val global: Global + import global._ - import definitions._ + import treeInfo._ val phaseName: String = "posterasure" @@ -21,51 +21,33 @@ trait PostErasure extends InfoTransform with TypingTransformers { object elimErasedValueType extends TypeMap { def apply(tp: Type) = tp match { - case ConstantType(Constant(tp: Type)) => - ConstantType(Constant(apply(tp))) - case ErasedValueType(tref) => - enteringPhase(currentRun.erasurePhase)(erasure.erasedValueClassArg(tref)) - case _ => mapOver(tp) + case ConstantType(Constant(tp: Type)) => ConstantType(Constant(apply(tp))) + case ErasedValueType(tref) => enteringErasure(erasure.erasedValueClassArg(tref)) + case _ => mapOver(tp) } } def transformInfo(sym: Symbol, tp: Type) = elimErasedValueType(tp) class PostErasureTransformer(unit: CompilationUnit) extends TypingTransformer(unit) { + override def transform(tree: Tree) = { + def finish(res: Tree) = logResult(s"Posterasure reduction\n Old: $tree\n New")(res) + + /** We use the name of the operation being performed and not the symbol + * itself because the symbol hails from the boxed class, and this transformation + * exists to operate directly on the values. So we are for instance looking + * up == on an lhs of type Int, whereas the symbol which has been passed in + * is from java.lang.Integer. + */ + def binop(lhs: Tree, op: Symbol, rhs: Tree) = + finish(localTyper typed (Apply(Select(lhs, op.name) setPos tree.pos, rhs :: Nil) setPos tree.pos)) - override def transform(tree: Tree) = super.transform(tree) setType elimErasedValueType(tree.tpe) match { - case // new C(arg).underlying ==> arg - Apply(sel @ Select( - Apply(Select(New(tpt), nme.CONSTRUCTOR), List(arg)), - acc), List()) - if enteringPhase(currentRun.erasurePhase) { - tpt.tpe.typeSymbol.isDerivedValueClass && - sel.symbol == tpt.tpe.typeSymbol.derivedValueClassUnbox - } => - if (settings.debug.value) log("Removing "+tree+" -> "+arg) - arg - case // new C(arg1) == new C(arg2) ==> arg1 == arg2 - Apply(sel @ Select( - Apply(Select(New(tpt1), nme.CONSTRUCTOR), List(arg1)), - cmp), - List(Apply(Select(New(tpt2), nme.CONSTRUCTOR), List(arg2)))) - if enteringPhase(currentRun.erasurePhase) { - tpt1.tpe.typeSymbol.isDerivedValueClass && - (sel.symbol == Object_== || sel.symbol == Object_!=) && - tpt2.tpe.typeSymbol == tpt1.tpe.typeSymbol - } => - val result = Apply(Select(arg1, cmp) setPos sel.pos, List(arg2)) setPos tree.pos - log("shortcircuiting equality "+tree+" -> "+result) - localTyper.typed(result) - - case // arg.asInstanceOf[T] ==> arg if arg.tpe == T - Apply(TypeApply(cast @ Select(arg, asinstanceof), List(tpt)), List()) - if cast.symbol == Object_asInstanceOf && arg.tpe =:= tpt.tpe => // !!! <:< ? - if (settings.debug.value) log("Shortening "+tree+" -> "+arg) - arg - case tree1 => - tree1 + case AsInstanceOf(v, tpe) if v.tpe <:< tpe => finish(v) // x.asInstanceOf[X] ==> x + case ValueClass.BoxAndUnbox(v) => finish(v) // (new B(v)).unbox ==> v + case ValueClass.BoxAndCompare(v1, op, v2) => binop(v1, op, v2) // new B(v1) == new B(v2) ==> v1 == v2 + case tree => tree } + } } } diff --git a/test/files/jvm/value-class-boxing.check b/test/files/jvm/value-class-boxing.check new file mode 100644 index 0000000000..20a9fe2ba8 --- /dev/null +++ b/test/files/jvm/value-class-boxing.check @@ -0,0 +1,7 @@ +a2 and a1: bytecode identical +a3 and a1: bytecode identical +a4 and a1: bytecode identical +b2 and b1: bytecode identical +b3 and b1: bytecode identical +b4 and b1: bytecode identical +b5 and b1: bytecode identical diff --git a/test/files/jvm/value-class-boxing/Analyzed_1.scala b/test/files/jvm/value-class-boxing/Analyzed_1.scala new file mode 100644 index 0000000000..dec8565351 --- /dev/null +++ b/test/files/jvm/value-class-boxing/Analyzed_1.scala @@ -0,0 +1,17 @@ +class Wrap(val x: Int) extends AnyVal { + def ***(other: Bip): Wrap = new Wrap(x * other.x) +} +class Bip(val x: Int) extends AnyVal + +class SameBytecode { + def a1(x: Int, y: Int): Int = x + y + def a2(x: Wrap, y: Wrap): Wrap = new Wrap(x.x + y.x) + def a3(x: Int, y: Wrap): Wrap = new Wrap(x + y.x) + def a4(x: Int, y: Wrap): Int = x + y.x + + def b1(x: Wrap, y: Int): Int = (x *** new Bip(y)).x + def b2(x: Wrap, y: Bip): Wrap = x *** y + def b3(x: Wrap, y: Int): Wrap = x *** new Bip(y) + def b4(x: Wrap, y: Bip): Bip = new Bip((x *** y).x) + def b5(x: Wrap, y: Int): Bip = new Bip((x *** new Bip(y)).x) +} diff --git a/test/files/jvm/value-class-boxing/test.scala b/test/files/jvm/value-class-boxing/test.scala new file mode 100644 index 0000000000..cf331832de --- /dev/null +++ b/test/files/jvm/value-class-boxing/test.scala @@ -0,0 +1,15 @@ +import scala.tools.partest.BytecodeTest + +object Test extends BytecodeTest { + def show: Unit = { + val classNode = loadClassNode("SameBytecode") + List("a2", "a3", "a4") foreach { m => + print(m + " and a1: ") + sameBytecode(getMethod(classNode, "a1"), getMethod(classNode, m)) + } + List("b2", "b3", "b4", "b5") foreach { m => + print(m + " and b1: ") + sameBytecode(getMethod(classNode, "b1"), getMethod(classNode, m)) + } + } +} -- cgit v1.2.3 From 68f62d7e9c200034bd42ffaf795b57fb379d9d38 Mon Sep 17 00:00:00 2001 From: Viktor Klang Date: Thu, 21 Feb 2013 15:31:48 +0100 Subject: SI-7164 - Removing NotImplementedError as Fatal from s.u.c.NonFatal --- src/library/scala/util/control/NonFatal.scala | 4 ++-- test/files/jvm/future-spec/FutureTests.scala | 2 +- test/files/jvm/non-fatal-tests.scala | 6 +++--- 3 files changed, 6 insertions(+), 6 deletions(-) (limited to 'src') diff --git a/src/library/scala/util/control/NonFatal.scala b/src/library/scala/util/control/NonFatal.scala index 0d8cdfbace..74478f2a49 100644 --- a/src/library/scala/util/control/NonFatal.scala +++ b/src/library/scala/util/control/NonFatal.scala @@ -11,7 +11,7 @@ package scala.util.control /** * Extractor of non-fatal Throwables. Will not match fatal errors like `VirtualMachineError` * (for example, `OutOfMemoryError`, a subclass of `VirtualMachineError`), `ThreadDeath`, - * `LinkageError`, `InterruptedException`, `ControlThrowable`, or `NotImplementedError`. + * `LinkageError`, `InterruptedException`, `ControlThrowable`. * However, `StackOverflowError` is matched, i.e. considered non-fatal. * * Note that [[scala.util.control.ControlThrowable]], an internal Throwable, is not matched by @@ -35,7 +35,7 @@ object NonFatal { def apply(t: Throwable): Boolean = t match { case _: StackOverflowError => true // StackOverflowError ok even though it is a VirtualMachineError // VirtualMachineError includes OutOfMemoryError and other fatal errors - case _: VirtualMachineError | _: ThreadDeath | _: InterruptedException | _: LinkageError | _: ControlThrowable | _: NotImplementedError => false + case _: VirtualMachineError | _: ThreadDeath | _: InterruptedException | _: LinkageError | _: ControlThrowable => false case _ => true } /** diff --git a/test/files/jvm/future-spec/FutureTests.scala b/test/files/jvm/future-spec/FutureTests.scala index 0efa83fbd9..01c9cf82ba 100644 --- a/test/files/jvm/future-spec/FutureTests.scala +++ b/test/files/jvm/future-spec/FutureTests.scala @@ -77,7 +77,7 @@ object FutureTests extends MinimalScalaTest { val logThrowable: Throwable => Unit = p.trySuccess(_) val ec: ExecutionContext = ExecutionContext.fromExecutor(null, logThrowable) - val t = new NotImplementedError("foo") + val t = new InterruptedException() val f = Future(throw t)(ec) Await.result(p.future, 2.seconds) mustBe t } diff --git a/test/files/jvm/non-fatal-tests.scala b/test/files/jvm/non-fatal-tests.scala index 471a9d227a..22c7cba51f 100644 --- a/test/files/jvm/non-fatal-tests.scala +++ b/test/files/jvm/non-fatal-tests.scala @@ -7,7 +7,8 @@ trait NonFatalTests { Seq(new StackOverflowError, new RuntimeException, new Exception, - new Throwable) + new Throwable, + new NotImplementedError) //Fatals val fatals: Seq[Throwable] = @@ -15,8 +16,7 @@ trait NonFatalTests { new OutOfMemoryError, new LinkageError, new VirtualMachineError {}, - new Throwable with scala.util.control.ControlThrowable, - new NotImplementedError) + new Throwable with scala.util.control.ControlThrowable) def testFatalsUsingApply(): Unit = { fatals foreach { t => assert(NonFatal(t) == false) } -- cgit v1.2.3 From 1b6661b8b586637ba5d54510c7bda1144acab23b Mon Sep 17 00:00:00 2001 From: James Iry Date: Wed, 20 Feb 2013 15:31:32 -0800 Subject: SI-7015 Removes redundant aconst_null; pop; aconst_null creation In an effort to adapt methods and field accesses of type Null to other types, we were always emitting aconst_null pop aconst_null The problem is we were doing that even when the JVM was in a position to know it had null value, e.g. when the user had written a null constant. This commit fixes that and includes a test to show that the resulting byte code still works even without repeating ourselves and/or repeating ourselves. This commit also makes the scala.runtim.Null$ constructor private. It was a sealed abstract class which prevented subclassing in Scala, but it didn't prevent subclassing in Java. A private constructor takes care of that hole so now the only value of type Null$ should be null. Along the way I found some other questionable things in adapt and I've added TODO's and issue https://issues.scala-lang.org/browse/SI-7159 to track. --- .../scala/tools/nsc/backend/icode/GenICode.scala | 62 ++++++++++++++++++---- src/library/scala/runtime/Null$.scala | 5 +- test/files/run/t7015.check | 11 ++++ test/files/run/t7015.scala | 49 +++++++++++++++++ 4 files changed, 115 insertions(+), 12 deletions(-) create mode 100644 test/files/run/t7015.check create mode 100644 test/files/run/t7015.scala (limited to 'src') diff --git a/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala b/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala index 3363f19025..439bb74267 100644 --- a/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala +++ b/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala @@ -275,6 +275,11 @@ abstract class GenICode extends SubComponent { ctx1 = genLoad(args.head, ctx1, INT) generatedType = elem ctx1.bb.emit(LOAD_ARRAY_ITEM(elementType), tree.pos) + // it's tempting to just drop array loads of type Null instead + // of adapting them but array accesses can cause + // ArrayIndexOutOfBounds so we can't. Besides, Array[Null] + // probably isn't common enough to figure out an optimization + adaptNullRef(generatedType, expectedType, ctx1, tree.pos) } else if (scalaPrimitives.isArraySet(code)) { debugassert(args.length == 2, @@ -790,7 +795,9 @@ abstract class GenICode extends SubComponent { } generatedType = if (sym.isClassConstructor) UNIT - else toTypeKind(sym.info.resultType); + else toTypeKind(sym.info.resultType) + // deal with methods that return Null + adaptNullRef(generatedType, expectedType, ctx1, tree.pos) ctx1 } } @@ -842,14 +849,15 @@ abstract class GenICode extends SubComponent { if (sym.isModule) { genLoadModule(genLoadQualUnlessElidable, tree) - } - else if (sym.isStaticMember) { - val ctx1 = genLoadQualUnlessElidable - ctx1.bb.emit(LOAD_FIELD(sym, true) setHostClass hostClass, tree.pos) - ctx1 } else { - val ctx1 = genLoadQualifier(tree, ctx) - ctx1.bb.emit(LOAD_FIELD(sym, false) setHostClass hostClass, tree.pos) + val isStatic = sym.isStaticMember + val ctx1 = if (isStatic) genLoadQualUnlessElidable + else genLoadQualifier(tree, ctx) + ctx1.bb.emit(LOAD_FIELD(sym, isStatic) setHostClass hostClass, tree.pos) + // it's tempting to drop field accesses of type Null instead of adapting them, + // but field access can cause static class init so we can't. Besides, fields + // of type Null probably aren't common enough to figure out an optimization + adaptNullRef(generatedType, expectedType, ctx1, tree.pos) ctx1 } } @@ -997,13 +1005,40 @@ abstract class GenICode extends SubComponent { resCtx } + + /** + * If we have a method call, field load, or array element load of type Null then + * we need to convince the JVM that we have a null value because in Scala + * land Null is a subtype of all ref types, but in JVM land scala.runtime.Null$ + * is not. Note we don't have to adapt loads of locals because the JVM type + * system for locals does have a null type which it tracks internally. As + * long as we adapt these other things, the JVM will know that a Scala local of + * type Null is holding a null. + */ + private def adaptNullRef(from: TypeKind, to: TypeKind, ctx: Context, pos: Position) { + log(s"GenICode#adaptNullRef($from, $to, $ctx, $pos)") + + // Don't need to adapt null to unit because we'll just drop it anyway. Don't + // need to adapt to Object or AnyRef because the JVM is happy with + // upcasting Null to them. + // We do have to adapt from NullReference to NullReference because we could be storing + // this value into a local of type Null and we want the JVM to see that it's + // a null value so we don't have to also adapt local loads. + if (from == NullReference && to != UNIT && to != ObjectReference && to != AnyRefReference) { + assert(to.isReferenceType, "Attempt to adapt a null to a non reference type") + // adapt by dropping what we've got and pushing a null which + // will convince the JVM we really do have null + ctx.bb.emit(DROP(from), pos) + ctx.bb.emit(CONSTANT(Constant(null)), pos) + } + } private def adapt(from: TypeKind, to: TypeKind, ctx: Context, pos: Position) { // An awful lot of bugs explode here - let's leave ourselves more clues. // A typical example is an overloaded type assigned after typer. log(s"GenICode#adapt($from, $to, $ctx, $pos)") - val conforms = (from <:< to) || (from == NullReference && to == NothingReference) + val conforms = (from <:< to) || (from == NullReference && to == NothingReference) // TODO why would we have null where we expect nothing? def coerce(from: TypeKind, to: TypeKind) = ctx.bb.emit(CALL_PRIMITIVE(Conversion(from, to)), pos) def checkAssertions() { def msg = s"Can't convert from $from to $to in unit ${unit.source} at $pos" @@ -1011,8 +1046,15 @@ abstract class GenICode extends SubComponent { assert(!from.isReferenceType && !to.isReferenceType, msg) } if (conforms) from match { + // The JVM doesn't have a Nothing equivalent, so it doesn't know that a method of type Nothing can't actually return. So for instance, with + // def f: String = ??? + // we need + // 0: getstatic #25; //Field scala/Predef$.MODULE$:Lscala/Predef$; + // 3: invokevirtual #29; //Method scala/Predef$.$qmark$qmark$qmark:()Lscala/runtime/Nothing$; + // 6: athrow + // So this case tacks on the ahtrow which makes the JVM happy because class Nothing is declared as a subclass of Throwable case NothingReference => ctx.bb.emit(THROW(ThrowableClass)) ; ctx.bb.enterIgnoreMode - case NullReference => ctx.bb.emit(Seq(DROP(from), CONSTANT(Constant(null)))) + // TODO why do we have this case? It's saying if we have a throwable and a non-throwable is expected then we should emit a cast? Why would we get here? case ThrowableReference if !(ThrowableClass.tpe <:< to.toType) => ctx.bb.emit(CHECK_CAST(to)) // downcast throwables case _ => // widen subrange types diff --git a/src/library/scala/runtime/Null$.scala b/src/library/scala/runtime/Null$.scala index 797b31583d..25b797a606 100644 --- a/src/library/scala/runtime/Null$.scala +++ b/src/library/scala/runtime/Null$.scala @@ -11,6 +11,7 @@ package scala.runtime /** * Dummy class which exist only to satisfy the JVM. It corresponds to * `scala.Null`. If such type appears in method signatures, it is erased - * to this one. + * to this one. A private constructor ensures that Java code can't create + * subclasses. The only value of type Null$ should be null */ -sealed abstract class Null$ +sealed abstract class Null$ private () diff --git a/test/files/run/t7015.check b/test/files/run/t7015.check new file mode 100644 index 0000000000..7651fe06b0 --- /dev/null +++ b/test/files/run/t7015.check @@ -0,0 +1,11 @@ +Method returns Null type: null +Method takes non Null type: null +call through method null +call through bridge null +fetch field: null +fetch field on companion: null +fetch local: null +fetch array element: null +method that takes object: null +method that takes anyref: null +method that takes any: null diff --git a/test/files/run/t7015.scala b/test/files/run/t7015.scala new file mode 100644 index 0000000000..9344ca2906 --- /dev/null +++ b/test/files/run/t7015.scala @@ -0,0 +1,49 @@ +object Test { + def main(args : Array[String]) : Unit = { + println(s"Method returns Null type: $f") + println(s"Method takes non Null type: ${g(null)}") + + // pass things through the g function because it expects + // a string. If we haven't adapted properly then we'll + // get verify errors + val b = new B + println(s"call through method ${g(b.f(null))}") + println(s"call through bridge ${g((b: A).f(null))}") + + println(s"fetch field: ${g(b.nullField)}") + println(s"fetch field on companion: ${g(B.nullCompanionField)}") + + val x = f + println(s"fetch local: ${g(x)}") + + val nulls = Array(f, f, f) + println(s"fetch array element: ${g(nulls(0))}") + + println(s"method that takes object: ${q(f)}") + println(s"method that takes anyref: ${r(f)}") + println(s"method that takes any: ${s(f)}") + } + + def f = null + + def g(x: String) = x + + def q(x: java.lang.Object) = x + def r(x: AnyRef) = x + def s(x: Any) = x +} + +abstract class A { + def f(x: String): String +} + +class B extends A { + val nullField = null + + // this forces a bridge method because the return type is different + override def f(x: String) : Null = null +} + +object B { + val nullCompanionField = null +} \ No newline at end of file -- cgit v1.2.3 From 62fcd3d922056407703ac3363b897f82980b0926 Mon Sep 17 00:00:00 2001 From: James Iry Date: Fri, 22 Feb 2013 09:17:30 -0800 Subject: SI-7015 Cleanup from review of null duplication Based on feedback on https://github.com/scala/scala/pull/2147 * Assertion in GenICode#adaptNullRef reports the erroneous type * Test makes the Null type explicit for greater clarity --- src/compiler/scala/tools/nsc/backend/icode/GenICode.scala | 2 +- test/files/run/t7015.scala | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) (limited to 'src') diff --git a/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala b/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala index 439bb74267..7e17495035 100644 --- a/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala +++ b/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala @@ -1025,7 +1025,7 @@ abstract class GenICode extends SubComponent { // this value into a local of type Null and we want the JVM to see that it's // a null value so we don't have to also adapt local loads. if (from == NullReference && to != UNIT && to != ObjectReference && to != AnyRefReference) { - assert(to.isReferenceType, "Attempt to adapt a null to a non reference type") + assert(to.isReferenceType, "Attempt to adapt a null to a non reference type $to.") // adapt by dropping what we've got and pushing a null which // will convince the JVM we really do have null ctx.bb.emit(DROP(from), pos) diff --git a/test/files/run/t7015.scala b/test/files/run/t7015.scala index 9344ca2906..37a73a9fc4 100644 --- a/test/files/run/t7015.scala +++ b/test/files/run/t7015.scala @@ -24,7 +24,7 @@ object Test { println(s"method that takes any: ${s(f)}") } - def f = null + def f: Null = null def g(x: String) = x -- cgit v1.2.3 From d1b16c4dc484d5f431bc4635148b065e722b2315 Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Sun, 24 Feb 2013 22:17:49 +0100 Subject: Don't override empty-paren methods as paren-less. An exception is made for toString. --- src/compiler/scala/tools/nsc/CompileServer.scala | 2 +- src/compiler/scala/tools/nsc/ast/parser/Parsers.scala | 4 ++-- .../scala/tools/nsc/backend/icode/analysis/TypeFlowAnalysis.scala | 2 +- 3 files changed, 4 insertions(+), 4 deletions(-) (limited to 'src') diff --git a/src/compiler/scala/tools/nsc/CompileServer.scala b/src/compiler/scala/tools/nsc/CompileServer.scala index 72e8cc69c7..cf19eb4a2f 100644 --- a/src/compiler/scala/tools/nsc/CompileServer.scala +++ b/src/compiler/scala/tools/nsc/CompileServer.scala @@ -115,7 +115,7 @@ class StandardCompileServer extends SocketServer { reporter = new ConsoleReporter(newSettings, in, out) { // disable prompts, so that compile server cannot block - override def displayPrompt = () + override def displayPrompt() = () } def isCompilerReusable: Boolean = { if (compiler == null) { diff --git a/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala b/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala index 61c65c211b..183aa7f294 100644 --- a/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala +++ b/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala @@ -170,8 +170,8 @@ self => val global: self.global.type = self.global } - def xmlLiteral : Tree = xmlp.xLiteral - def xmlLiteralPattern : Tree = xmlp.xLiteralPattern + def xmlLiteral() : Tree = xmlp.xLiteral + def xmlLiteralPattern() : Tree = xmlp.xLiteralPattern } class OutlineParser(source: SourceFile) extends SourceFileParser(source) { diff --git a/src/compiler/scala/tools/nsc/backend/icode/analysis/TypeFlowAnalysis.scala b/src/compiler/scala/tools/nsc/backend/icode/analysis/TypeFlowAnalysis.scala index c9d295a350..3ce1a2fd95 100644 --- a/src/compiler/scala/tools/nsc/backend/icode/analysis/TypeFlowAnalysis.scala +++ b/src/compiler/scala/tools/nsc/backend/icode/analysis/TypeFlowAnalysis.scala @@ -131,7 +131,7 @@ abstract class TypeFlowAnalysis { init(m) } - def run = { + def run() = { timer.start // icodes.lubs0 = 0 forwardAnalysis(blockTransfer) -- cgit v1.2.3 From e7ab2f4a978e244cdd4c8c03170caa2a44c7adea Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Sun, 24 Feb 2013 22:35:56 +0100 Subject: Be explicit about empty param list calls. With the exception of toString and the odd JavaBean getter. --- src/compiler/scala/tools/ant/Same.scala | 2 +- src/compiler/scala/tools/ant/ScalaTool.scala | 12 +- src/compiler/scala/tools/ant/Scalac.scala | 2 +- src/compiler/scala/tools/nsc/CompileServer.scala | 2 +- src/compiler/scala/tools/nsc/ConsoleWriter.scala | 4 +- src/compiler/scala/tools/nsc/EvalLoop.scala | 2 +- src/compiler/scala/tools/nsc/Global.scala | 20 +- src/compiler/scala/tools/nsc/Main.scala | 2 +- src/compiler/scala/tools/nsc/MainTokenMetric.scala | 4 +- src/compiler/scala/tools/nsc/PhaseAssembly.scala | 2 +- src/compiler/scala/tools/nsc/ast/Printers.scala | 6 +- .../scala/tools/nsc/ast/TreeBrowsers.scala | 10 +- .../scala/tools/nsc/ast/parser/MarkupParsers.scala | 44 ++--- .../scala/tools/nsc/ast/parser/Parsers.scala | 10 +- .../scala/tools/nsc/ast/parser/Scanners.scala | 10 +- .../scala/tools/nsc/backend/icode/GenICode.scala | 62 +++---- .../tools/nsc/backend/icode/ICodeCheckers.scala | 2 +- .../tools/nsc/backend/icode/Linearizers.scala | 4 +- .../scala/tools/nsc/backend/icode/Members.scala | 12 +- .../scala/tools/nsc/backend/icode/Printers.scala | 24 +-- .../backend/icode/analysis/DataFlowAnalysis.scala | 4 +- .../backend/icode/analysis/TypeFlowAnalysis.scala | 8 +- .../scala/tools/nsc/backend/jvm/GenASM.scala | 42 ++--- .../tools/nsc/backend/opt/ClosureElimination.scala | 4 +- .../nsc/backend/opt/DeadCodeElimination.scala | 8 +- .../nsc/backend/opt/InlineExceptionHandlers.scala | 6 +- .../scala/tools/nsc/backend/opt/Inliners.scala | 20 +- .../scala/tools/nsc/dependencies/Changes.scala | 2 +- src/compiler/scala/tools/nsc/doc/DocFactory.scala | 2 +- .../tools/nsc/doc/base/CommentFactoryBase.scala | 2 +- .../scala/tools/nsc/doc/doclet/Generator.scala | 2 +- src/compiler/scala/tools/nsc/doc/html/Doclet.scala | 2 +- .../scala/tools/nsc/doc/html/HtmlFactory.scala | 2 +- .../html/page/diagram/DotDiagramGenerator.scala | 2 +- .../scala/tools/nsc/doc/model/ModelFactory.scala | 6 +- .../scala/tools/nsc/interactive/REPL.scala | 2 +- .../tools/nsc/interactive/ScratchPadMaker.scala | 2 +- .../scala/tools/nsc/interpreter/ILoop.scala | 4 +- .../scala/tools/nsc/interpreter/TypeStrings.scala | 2 +- src/compiler/scala/tools/nsc/io/Jar.scala | 4 +- .../scala/tools/nsc/javac/JavaParsers.scala | 108 +++++------ .../scala/tools/nsc/javac/JavaScanners.scala | 202 ++++++++++----------- src/compiler/scala/tools/nsc/plugins/Plugins.scala | 4 +- .../tools/nsc/reporters/AbstractReporter.scala | 6 +- .../scala/tools/nsc/settings/MutableSettings.scala | 4 +- .../nsc/symtab/classfile/ClassfileParser.scala | 6 +- .../tools/nsc/symtab/classfile/ICodeReader.scala | 12 +- .../scala/tools/nsc/transform/CleanUp.scala | 4 +- .../scala/tools/nsc/transform/Constructors.scala | 4 +- .../scala/tools/nsc/transform/Erasure.scala | 4 +- src/compiler/scala/tools/nsc/transform/Mixin.scala | 2 +- .../tools/nsc/transform/OverridingPairs.scala | 4 +- .../scala/tools/nsc/transform/patmat/Logic.scala | 12 +- .../nsc/transform/patmat/MatchTranslation.scala | 2 +- .../scala/tools/nsc/typechecker/Duplicators.scala | 2 +- .../scala/tools/nsc/typechecker/Infer.scala | 2 +- .../scala/tools/nsc/typechecker/Macros.scala | 6 +- .../scala/tools/nsc/typechecker/RefChecks.scala | 2 +- .../scala/tools/nsc/typechecker/TreeCheckers.scala | 4 +- .../scala/tools/nsc/typechecker/Typers.scala | 8 +- src/compiler/scala/tools/nsc/util/ClassPath.scala | 4 +- .../scala/tools/nsc/util/ShowPickled.scala | 2 +- .../scala/tools/reflect/MacroImplementations.scala | 2 +- src/compiler/scala/tools/util/Javap.scala | 6 +- src/compiler/scala/tools/util/PathResolver.scala | 2 +- src/compiler/scala/tools/util/SocketServer.scala | 4 +- 66 files changed, 390 insertions(+), 390 deletions(-) (limited to 'src') diff --git a/src/compiler/scala/tools/ant/Same.scala b/src/compiler/scala/tools/ant/Same.scala index e53679f052..a1f0cda662 100644 --- a/src/compiler/scala/tools/ant/Same.scala +++ b/src/compiler/scala/tools/ant/Same.scala @@ -110,7 +110,7 @@ class Same extends ScalaMatchingTask { \*============================================================================*/ override def execute() = { - validateAttributes + validateAttributes() val mapper = getMapper allEqualNow = true val originNames: Array[String] = getDirectoryScanner(origin.get).getIncludedFiles diff --git a/src/compiler/scala/tools/ant/ScalaTool.scala b/src/compiler/scala/tools/ant/ScalaTool.scala index 633145a97c..e7ac53c8fb 100644 --- a/src/compiler/scala/tools/ant/ScalaTool.scala +++ b/src/compiler/scala/tools/ant/ScalaTool.scala @@ -190,13 +190,13 @@ class ScalaTool extends ScalaMatchingTask { val builder = new StringBuilder() while (chars.hasNext) { - val char = chars.next + val char = chars.next() if (char == '@') { - var char = chars.next + var char = chars.next() val token = new StringBuilder() while (chars.hasNext && char != '@') { token.append(char) - char = chars.next + char = chars.next() } if (token.toString == "") builder.append('@') @@ -212,13 +212,13 @@ class ScalaTool extends ScalaMatchingTask { val builder = new StringBuilder() while (chars.hasNext) { - val char = chars.next + val char = chars.next() if (char == '@') { - var char = chars.next + var char = chars.next() val token = new StringBuilder() while (chars.hasNext && char != '@') { token.append(char) - char = chars.next + char = chars.next() } if (tokens.contains(token.toString)) builder.append(tokens(token.toString)) diff --git a/src/compiler/scala/tools/ant/Scalac.scala b/src/compiler/scala/tools/ant/Scalac.scala index 3b8ae202f6..e6bd32c757 100644 --- a/src/compiler/scala/tools/ant/Scalac.scala +++ b/src/compiler/scala/tools/ant/Scalac.scala @@ -676,7 +676,7 @@ class Scalac extends ScalaMatchingTask with ScalacShared { file } - val res = execWithArgFiles(java, List(writeSettings.getAbsolutePath)) + val res = execWithArgFiles(java, List(writeSettings().getAbsolutePath)) if (failonerror && res != 0) buildError("Compilation failed because of an internal compiler error;"+ " see the error output for details.") diff --git a/src/compiler/scala/tools/nsc/CompileServer.scala b/src/compiler/scala/tools/nsc/CompileServer.scala index cf19eb4a2f..c5366566d9 100644 --- a/src/compiler/scala/tools/nsc/CompileServer.scala +++ b/src/compiler/scala/tools/nsc/CompileServer.scala @@ -157,7 +157,7 @@ class StandardCompileServer extends SocketServer { } } reporter.printSummary() - if (isMemoryFullEnough) { + if (isMemoryFullEnough()) { info("Nulling out compiler due to memory utilization.") clearCompiler() } diff --git a/src/compiler/scala/tools/nsc/ConsoleWriter.scala b/src/compiler/scala/tools/nsc/ConsoleWriter.scala index 5c5606e98b..6c16d19d2c 100644 --- a/src/compiler/scala/tools/nsc/ConsoleWriter.scala +++ b/src/compiler/scala/tools/nsc/ConsoleWriter.scala @@ -13,9 +13,9 @@ import java.io.Writer * @version 1.0 */ class ConsoleWriter extends Writer { - def close = flush + def close() = flush() - def flush = Console.flush + def flush() = Console.flush() def write(cbuf: Array[Char], off: Int, len: Int) { if (len > 0) diff --git a/src/compiler/scala/tools/nsc/EvalLoop.scala b/src/compiler/scala/tools/nsc/EvalLoop.scala index c4147fad4c..15a296c836 100644 --- a/src/compiler/scala/tools/nsc/EvalLoop.scala +++ b/src/compiler/scala/tools/nsc/EvalLoop.scala @@ -14,7 +14,7 @@ trait EvalLoop { def loop(action: (String) => Unit) { @tailrec def inner() { Console.print(prompt) - val line = try Console.readLine catch { case _: EOFException => null } + val line = try Console.readLine() catch { case _: EOFException => null } if (line != null && line != "") { action(line) inner() diff --git a/src/compiler/scala/tools/nsc/Global.scala b/src/compiler/scala/tools/nsc/Global.scala index c5184eeae8..7c8dbc211e 100644 --- a/src/compiler/scala/tools/nsc/Global.scala +++ b/src/compiler/scala/tools/nsc/Global.scala @@ -409,7 +409,7 @@ class Global(var currentSettings: Settings, var reporter: Reporter) currentRun.informUnitStarting(this, unit) apply(unit) } - currentRun.advanceUnit + currentRun.advanceUnit() } finally { //assert(currentUnit == unit) currentRun.currentUnit = unit0 @@ -1200,7 +1200,7 @@ class Global(var currentSettings: Settings, var reporter: Reporter) // Flush the cache in the terminal phase: the chain could have been built // before without being used. (This happens in the interpreter.) - terminal.reset + terminal.reset() // Each subcomponent supplies a phase, which are chained together. // If -Ystop:phase is given, neither that phase nor any beyond it is added. @@ -1283,14 +1283,14 @@ class Global(var currentSettings: Settings, var reporter: Reporter) def advancePhase() { unitc = 0 phasec += 1 - refreshProgress + refreshProgress() } /** take note that a phase on a unit is completed * (for progress reporting) */ def advanceUnit() { unitc += 1 - refreshProgress + refreshProgress() } def cancel() { reporter.cancelled = true } @@ -1400,8 +1400,8 @@ class Global(var currentSettings: Settings, var reporter: Reporter) if (canCheck) { phase = globalPhase - if (globalPhase.id >= icodePhase.id) icodeChecker.checkICodes - else treeChecker.checkTrees + if (globalPhase.id >= icodePhase.id) icodeChecker.checkICodes() + else treeChecker.checkTrees() } } @@ -1451,7 +1451,7 @@ class Global(var currentSettings: Settings, var reporter: Reporter) } } else { - allConditionalWarnings foreach (_.summarize) + allConditionalWarnings foreach (_.summarize()) if (seenMacroExpansionsFallingBack) warning("some macros could not be expanded and code fell back to overridden methods;"+ @@ -1502,7 +1502,7 @@ class Global(var currentSettings: Settings, var reporter: Reporter) while (globalPhase.hasNext && !reporter.hasErrors) { val startTime = currentTime phase = globalPhase - globalPhase.run + globalPhase.run() // progress update informTime(globalPhase.description, startTime) @@ -1542,7 +1542,7 @@ class Global(var currentSettings: Settings, var reporter: Reporter) if (settings.Ystatistics.value) statistics.print(phase) - advancePhase + advancePhase() } if (traceSymbolActivity) @@ -1602,7 +1602,7 @@ class Global(var currentSettings: Settings, var reporter: Reporter) val maxId = math.max(globalPhase.id, typerPhase.id) firstPhase.iterator takeWhile (_.id < maxId) foreach (ph => enteringPhase(ph)(ph.asInstanceOf[GlobalPhase] applyPhase unit)) - refreshProgress + refreshProgress() } } diff --git a/src/compiler/scala/tools/nsc/Main.scala b/src/compiler/scala/tools/nsc/Main.scala index c3c919fae4..27132f3c51 100644 --- a/src/compiler/scala/tools/nsc/Main.scala +++ b/src/compiler/scala/tools/nsc/Main.scala @@ -38,7 +38,7 @@ object Main extends Driver with EvalLoop { case Some(ex) => reporter.cancelled = true // Causes exit code to be non-0 case None => reporter.reset() // Causes other compiler errors to be ignored } - askShutdown + askShutdown() false } else true diff --git a/src/compiler/scala/tools/nsc/MainTokenMetric.scala b/src/compiler/scala/tools/nsc/MainTokenMetric.scala index 50cd51d486..9eb162a377 100644 --- a/src/compiler/scala/tools/nsc/MainTokenMetric.scala +++ b/src/compiler/scala/tools/nsc/MainTokenMetric.scala @@ -21,11 +21,11 @@ object MainTokenMetric { var totale = 0 for (source <- fnames) { val s = new UnitScanner(new CompilationUnit(compiler.getSourceFile(source))) - s.nextToken + s.nextToken() var i = 0 while (s.token != EOF) { i += 1 - s.nextToken + s.nextToken() } Console.println(i.toString + " " + source.toString()) totale += i diff --git a/src/compiler/scala/tools/nsc/PhaseAssembly.scala b/src/compiler/scala/tools/nsc/PhaseAssembly.scala index 67dc1e3b66..ae71eb7255 100644 --- a/src/compiler/scala/tools/nsc/PhaseAssembly.scala +++ b/src/compiler/scala/tools/nsc/PhaseAssembly.scala @@ -165,7 +165,7 @@ trait PhaseAssembly { } else { val promote = hl.to.before.filter(e => (!e.hard)) - hl.to.before.clear + hl.to.before.clear() sanity foreach (edge => hl.to.before += edge) for (edge <- promote) { rerun = true diff --git a/src/compiler/scala/tools/nsc/ast/Printers.scala b/src/compiler/scala/tools/nsc/ast/Printers.scala index b9f348632a..bff036e782 100644 --- a/src/compiler/scala/tools/nsc/ast/Printers.scala +++ b/src/compiler/scala/tools/nsc/ast/Printers.scala @@ -152,7 +152,7 @@ trait Printers extends scala.reflect.internal.Printers { this: Global => // If thenp or elsep has only one statement, it doesn't need more than one line. case If(cond, thenp, elsep) => def ifIndented(x: Tree) = { - indent ; println() ; printTree(x) ; undent + indent() ; println() ; printTree(x) ; undent() } val List(thenStmts, elseStmts) = List(thenp, elsep) map allStatements @@ -166,12 +166,12 @@ trait Printers extends scala.reflect.internal.Printers { this: Global => if (elseStmts.nonEmpty) { print(" else") - indent ; println() + indent() ; println() elseStmts match { case List(x) => printTree(x) case _ => printTree(elsep) } - undent ; println() + undent() ; println() } case _ => s() } diff --git a/src/compiler/scala/tools/nsc/ast/TreeBrowsers.scala b/src/compiler/scala/tools/nsc/ast/TreeBrowsers.scala index 30a9348fb0..329f0fa54b 100644 --- a/src/compiler/scala/tools/nsc/ast/TreeBrowsers.scala +++ b/src/compiler/scala/tools/nsc/ast/TreeBrowsers.scala @@ -59,7 +59,7 @@ abstract class TreeBrowsers { frame.createFrame(lock) // wait for the frame to be closed - lock.acquire + lock.acquire() t } @@ -81,7 +81,7 @@ abstract class TreeBrowsers { frame.createFrame(lock) // wait for the frame to be closed - lock.acquire + lock.acquire() } } @@ -182,13 +182,13 @@ abstract class TreeBrowsers { * especially symbols/types would change while the window is visible. */ def createFrame(lock: Lock): Unit = { - lock.acquire // keep the lock until the user closes the window + lock.acquire() // keep the lock until the user closes the window frame.setDefaultCloseOperation(WindowConstants.DISPOSE_ON_CLOSE) frame.addWindowListener(new WindowAdapter() { /** Release the lock, so compilation may resume after the window is closed. */ - override def windowClosed(e: WindowEvent): Unit = lock.release + override def windowClosed(e: WindowEvent): Unit = lock.release() }); jTree = new JTree(treeModel) { @@ -251,7 +251,7 @@ abstract class TreeBrowsers { putValue(Action.ACCELERATOR_KEY, KeyStroke.getKeyStroke(KeyEvent.VK_Q, menuKey + shiftKey, false)) override def actionPerformed(e: ActionEvent) { closeWindow() - global.currentRun.cancel + global.currentRun.cancel() } } ) diff --git a/src/compiler/scala/tools/nsc/ast/parser/MarkupParsers.scala b/src/compiler/scala/tools/nsc/ast/parser/MarkupParsers.scala index 639780149e..e8cef0d9b1 100755 --- a/src/compiler/scala/tools/nsc/ast/parser/MarkupParsers.scala +++ b/src/compiler/scala/tools/nsc/ast/parser/MarkupParsers.scala @@ -99,7 +99,7 @@ trait MarkupParsers { */ def xCheckEmbeddedBlock: Boolean = { // attentions, side-effect, used in xText - xEmbeddedBlock = (ch == '{') && { nextch; (ch != '{') } + xEmbeddedBlock = (ch == '{') && { nextch(); (ch != '{') } xEmbeddedBlock } @@ -115,7 +115,7 @@ trait MarkupParsers { while (isNameStart(ch)) { val start = curOffset val key = xName - xEQ + xEQ() val mid = curOffset val value: Tree = ch match { case '"' | '\'' => @@ -128,7 +128,7 @@ trait MarkupParsers { } case '{' => - nextch + nextch() xEmbeddedExpr case SU => throw TruncatedXMLControl @@ -141,7 +141,7 @@ trait MarkupParsers { aMap(key) = value if (ch != '/' && ch != '>') - xSpace + xSpace() } aMap } @@ -184,10 +184,10 @@ trait MarkupParsers { * @precond ch == '&' */ def content_AMP(ts: ArrayBuffer[Tree]) { - nextch + nextch() val toAppend = ch match { case '#' => // CharacterRef - nextch + nextch() val theChar = handle.text(tmppos, xCharRef) xToken(';') theChar @@ -216,8 +216,8 @@ trait MarkupParsers { return true // end tag val toAppend = ch match { - case '!' => nextch ; if (ch =='[') xCharData else xComment // CDATA or Comment - case '?' => nextch ; xProcInstr // PI + case '!' => nextch() ; if (ch =='[') xCharData else xComment // CDATA or Comment + case '?' => nextch() ; xProcInstr // PI case _ => element // child node } @@ -234,7 +234,7 @@ trait MarkupParsers { tmppos = o2p(curOffset) ch match { // end tag, cdata, comment, pi or child node - case '<' => nextch ; if (content_LT(ts)) return ts + case '<' => nextch() ; if (content_LT(ts)) return ts // either the character '{' or an embedded scala block } case '{' => content_BRACE(tmppos, ts) // } // EntityRef or CharRef @@ -266,7 +266,7 @@ trait MarkupParsers { debugLastStartElement.push((start, qname)) val ts = content xEndTag(qname) - debugLastStartElement.pop + debugLastStartElement.pop() val pos = r2p(start, start, curOffset) qname match { case "xml:group" => handle.group(pos, ts) @@ -285,12 +285,12 @@ trait MarkupParsers { while (ch != SU) { if (ch == '}') { - if (charComingAfter(nextch) == '}') nextch + if (charComingAfter(nextch()) == '}') nextch() else errorBraces() } buf append ch - nextch + nextch() if (xCheckEmbeddedBlock || ch == '<' || ch == '&') return done } @@ -337,12 +337,12 @@ trait MarkupParsers { content_LT(ts) // parse more XML ? - if (charComingAfter(xSpaceOpt) == '<') { - xSpaceOpt + if (charComingAfter(xSpaceOpt()) == '<') { + xSpaceOpt() while (ch == '<') { - nextch + nextch() ts append element - xSpaceOpt + xSpaceOpt() } handle.makeXMLseq(r2p(start, start, curOffset), ts) } @@ -363,7 +363,7 @@ trait MarkupParsers { saving[Boolean, Tree](handle.isPattern, handle.isPattern = _) { handle.isPattern = true val tree = xPattern - xSpaceOpt + xSpaceOpt() tree } }, @@ -401,10 +401,10 @@ trait MarkupParsers { val start = curOffset val qname = xName debugLastStartElement.push((start, qname)) - xSpaceOpt + xSpaceOpt() val ts = new ArrayBuffer[Tree] - val isEmptyTag = (ch == '/') && { nextch ; true } + val isEmptyTag = (ch == '/') && { nextch() ; true } xToken('>') if (!isEmptyTag) { @@ -414,13 +414,13 @@ trait MarkupParsers { if (xEmbeddedBlock) ts ++= xScalaPatterns else ch match { case '<' => // tag - nextch + nextch() if (ch != '/') ts append xPattern // child else return false // terminate case '{' => // embedded Scala patterns while (ch == '{') { - nextch + nextch() ts ++= xScalaPatterns } assert(!xEmbeddedBlock, "problem with embedded block") @@ -438,7 +438,7 @@ trait MarkupParsers { while (doPattern) { } // call until false xEndTag(qname) - debugLastStartElement.pop + debugLastStartElement.pop() } handle.makeXMLpat(r2p(start, start, curOffset), qname, ts) diff --git a/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala b/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala index 183aa7f294..17c9d7814d 100644 --- a/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala +++ b/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala @@ -1519,7 +1519,7 @@ self => placeholderParams = param :: placeholderParams id case LPAREN => - atPos(in.offset)(makeParens(commaSeparated(expr))) + atPos(in.offset)(makeParens(commaSeparated(expr()))) case LBRACE => canApply = false blockExpr() @@ -2482,7 +2482,7 @@ self => * }}} */ def funDefOrDcl(start : Int, mods: Modifiers): Tree = { - in.nextToken + in.nextToken() if (in.token == THIS) { atPos(start, in.skipToken()) { val vparamss = paramClauses(nme.CONSTRUCTOR, classContextBounds map (_.duplicate), ofCaseClass = false) @@ -2644,7 +2644,7 @@ self => * }}} */ def classDef(start: Int, mods: Modifiers): ClassDef = { - in.nextToken + in.nextToken() val nameOffset = in.offset val name = identForType() atPos(start, if (name == tpnme.ERROR) start else nameOffset) { @@ -2684,7 +2684,7 @@ self => * }}} */ def objectDef(start: Int, mods: Modifiers): ModuleDef = { - in.nextToken + in.nextToken() val nameOffset = in.offset val name = ident() val tstart = in.offset @@ -2790,7 +2790,7 @@ self => if (inScalaRootPackage && ScalaValueClassNames.contains(name)) Template(parents0, self, anyvalConstructor :: body) else - Template(anyrefParents, self, constrMods, vparamss, body, o2p(tstart)) + Template(anyrefParents(), self, constrMods, vparamss, body, o2p(tstart)) } } diff --git a/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala b/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala index 1554be6ebb..b28d4cd08d 100644 --- a/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala +++ b/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala @@ -366,7 +366,7 @@ trait Scanners extends ScannersCommon { getOperatorRest() } } - fetchLT + fetchLT() case '~' | '!' | '@' | '#' | '%' | '^' | '*' | '+' | '-' | /*'<' | */ '>' | '?' | ':' | '=' | '&' | @@ -403,7 +403,7 @@ trait Scanners extends ScannersCommon { } getNumber() } - fetchZero + fetchZero() case '1' | '2' | '3' | '4' | '5' | '6' | '7' | '8' | '9' => base = 10 getNumber() @@ -444,7 +444,7 @@ trait Scanners extends ScannersCommon { } } } - fetchDoubleQuote + fetchDoubleQuote() case '\'' => def fetchSingleQuote() = { nextChar() @@ -463,7 +463,7 @@ trait Scanners extends ScannersCommon { } } } - fetchSingleQuote + fetchSingleQuote() case '.' => nextChar() if ('0' <= ch && ch <= '9') { @@ -512,7 +512,7 @@ trait Scanners extends ScannersCommon { nextChar() } } - fetchOther + fetchOther() } } diff --git a/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala b/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala index 7e17495035..f19fb56db0 100644 --- a/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala +++ b/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala @@ -50,14 +50,14 @@ abstract class GenICode extends SubComponent { var unit: CompilationUnit = NoCompilationUnit override def run() { - scalaPrimitives.init + scalaPrimitives.init() classes.clear() super.run() } override def apply(unit: CompilationUnit): Unit = { this.unit = unit - unit.icode.clear + unit.icode.clear() informProgress("Generating icode for " + unit) gen(unit.body) this.unit = NoCompilationUnit @@ -137,7 +137,7 @@ abstract class GenICode extends SubComponent { else ctx1.bb.closeWith(RETURN(m.returnType)) } - if (!ctx1.bb.closed) ctx1.bb.close + if (!ctx1.bb.closed) ctx1.bb.close() prune(ctx1.method) } else ctx1.method.setCode(NoCode) @@ -186,7 +186,7 @@ abstract class GenICode extends SubComponent { val thrownKind = toTypeKind(expr.tpe) val ctx1 = genLoad(expr, ctx, thrownKind) ctx1.bb.emit(THROW(expr.tpe.typeSymbol), expr.pos) - ctx1.bb.enterIgnoreMode + ctx1.bb.enterIgnoreMode() (ctx1, NothingReference) } @@ -335,7 +335,7 @@ abstract class GenICode extends SubComponent { MONITOR_EXIT() setPos tree.pos, THROW(ThrowableClass) )) - exhCtx.bb.enterIgnoreMode + exhCtx.bb.enterIgnoreMode() exhCtx })), EmptyTree, tree) @@ -349,9 +349,9 @@ abstract class GenICode extends SubComponent { private def genLoadIf(tree: If, ctx: Context, expectedType: TypeKind): (Context, TypeKind) = { val If(cond, thenp, elsep) = tree - var thenCtx = ctx.newBlock - var elseCtx = ctx.newBlock - val contCtx = ctx.newBlock + var thenCtx = ctx.newBlock() + var elseCtx = ctx.newBlock() + val contCtx = ctx.newBlock() genCond(cond, ctx, thenCtx, elseCtx) @@ -434,7 +434,7 @@ abstract class GenICode extends SubComponent { else if (isArrayOp(code)) genArrayOp(tree, ctx, code, expectedType) else if (isLogicalOp(code) || isComparisonOp(code)) { - val trueCtx, falseCtx, afterCtx = ctx.newBlock + val trueCtx, falseCtx, afterCtx = ctx.newBlock() genCond(tree, ctx, trueCtx, falseCtx) trueCtx.bb.emitOnly( @@ -477,7 +477,7 @@ abstract class GenICode extends SubComponent { val resCtx: Context = tree match { case LabelDef(name, params, rhs) => def genLoadLabelDef = { - val ctx1 = ctx.newBlock + val ctx1 = ctx.newBlock() if (nme.isLoopHeaderLabel(name)) ctx1.bb.loopHeader = true @@ -559,7 +559,7 @@ abstract class GenICode extends SubComponent { // we have to run this without the same finalizer in // the list, otherwise infinite recursion happens for // finalizers that contain 'return' - val fctx = finalizerCtx.newBlock + val fctx = finalizerCtx.newBlock() ctx1.bb.closeWith(JUMP(fctx.bb)) ctx1 = genLoad(f1, fctx, UNIT) } @@ -572,7 +572,7 @@ abstract class GenICode extends SubComponent { } adapt(returnedKind, ctx1.method.returnType, ctx1, tree.pos) ctx1.bb.emit(RETURN(ctx.method.returnType), tree.pos) - ctx1.bb.enterIgnoreMode + ctx1.bb.enterIgnoreMode() generatedType = expectedType ctx1 } @@ -750,7 +750,7 @@ abstract class GenICode extends SubComponent { // (if it's not in ignore mode, double-closing is an error) val ctx1 = genLoadLabelArguments(args, label, ctx) ctx1.bb.emitOnly(if (label.anchored) JUMP(label.block) else PJUMP(label)) - ctx1.bb.enterIgnoreMode + ctx1.bb.enterIgnoreMode() ctx1 } else if (isPrimitive(sym)) { // primitive method call val (newCtx, resKind) = genPrimitiveOp(app, ctx, expectedType) @@ -906,10 +906,10 @@ abstract class GenICode extends SubComponent { genLoadLiteral case Block(stats, expr) => - ctx.enterScope + ctx.enterScope() var ctx1 = genStat(stats, ctx) ctx1 = genLoad(expr, ctx1, expectedType) - ctx1.exitScope + ctx1.exitScope() ctx1 case Typed(Super(_, _), _) => @@ -948,7 +948,7 @@ abstract class GenICode extends SubComponent { def genLoadMatch = { debuglog("Generating SWITCH statement."); val ctx1 = genLoad(selector, ctx, INT) // TODO: Java 7 allows strings in switches (so, don't assume INT and don't convert the literals using intValue) - val afterCtx = ctx1.newBlock + val afterCtx = ctx1.newBlock() var caseCtx: Context = null generatedType = toTypeKind(tree.tpe) @@ -958,7 +958,7 @@ abstract class GenICode extends SubComponent { for (caze @ CaseDef(pat, guard, body) <- cases) { assert(guard == EmptyTree, guard) - val tmpCtx = ctx1.newBlock + val tmpCtx = ctx1.newBlock() pat match { case Literal(value) => tags = value.intValue :: tags @@ -1053,7 +1053,7 @@ abstract class GenICode extends SubComponent { // 3: invokevirtual #29; //Method scala/Predef$.$qmark$qmark$qmark:()Lscala/runtime/Nothing$; // 6: athrow // So this case tacks on the ahtrow which makes the JVM happy because class Nothing is declared as a subclass of Throwable - case NothingReference => ctx.bb.emit(THROW(ThrowableClass)) ; ctx.bb.enterIgnoreMode + case NothingReference => ctx.bb.emit(THROW(ThrowableClass)) ; ctx.bb.enterIgnoreMode() // TODO why do we have this case? It's saying if we have a throwable and a non-throwable is expected then we should emit a cast? Why would we get here? case ThrowableReference if !(ThrowableClass.tpe <:< to.toType) => ctx.bb.emit(CHECK_CAST(to)) // downcast throwables case _ => @@ -1397,7 +1397,7 @@ abstract class GenICode extends SubComponent { lazy val rhs = args.head def genZandOrZor(and: Boolean) = { - val ctxInterm = ctx.newBlock + val ctxInterm = ctx.newBlock() if (and) genCond(lhs, ctx, ctxInterm, elseCtx) else genCond(lhs, ctx, thenCtx, ctxInterm) @@ -1423,10 +1423,10 @@ abstract class GenICode extends SubComponent { else if (isComparisonOp(code)) genComparisonOp(lhs, rhs, code) else - default + default() } - case _ => default + case _ => default() } } @@ -1495,11 +1495,11 @@ abstract class GenICode extends SubComponent { } else { val eqEqTempLocal = getTempLocal var ctx1 = genLoad(l, ctx, ObjectReference) - lazy val nonNullCtx = ctx1.newBlock + lazy val nonNullCtx = ctx1.newBlock() // l == r -> if (l eq null) r eq null else l.equals(r) ctx1 = genLoad(r, ctx1, ObjectReference) - val nullCtx = ctx1.newBlock + val nullCtx = ctx1.newBlock() ctx1.bb.emitOnly( STORE_LOCAL(eqEqTempLocal) setPos l.pos, @@ -1833,13 +1833,13 @@ abstract class GenICode extends SubComponent { ctx1.bb = ctx1.method.startBlock ctx1.defdef = d ctx1.scope = EmptyScope - ctx1.enterScope + ctx1.enterScope() ctx1 } /** Return a new context for a new basic block. */ def newBlock(): Context = { - val block = method.code.newBlock + val block = method.code.newBlock() handlers foreach (_ addCoveredBlock block) currentExceptionHandlers foreach (_ addBlock block) block.varsInScope.clear() @@ -1886,7 +1886,7 @@ abstract class GenICode extends SubComponent { */ private def enterExceptionHandler(exh: ExceptionHandler): Context = { currentExceptionHandlers ::= exh - val ctx = newBlock + val ctx = newBlock() exh.setStartBlock(ctx.bb) ctx } @@ -1929,7 +1929,7 @@ abstract class GenICode extends SubComponent { val outerCtx = this.dup // context for generating exception handlers, covered by finalizer val finalizerCtx = this.dup // context for generating finalizer handler - val afterCtx = outerCtx.newBlock + val afterCtx = outerCtx.newBlock() var tmp: Local = null val kind = toTypeKind(tree.tpe) val guardResult = kind != UNIT && mayCleanStack(finalizer) @@ -1943,7 +1943,7 @@ abstract class GenICode extends SubComponent { } def emitFinalizer(ctx: Context): Context = if (!finalizer.isEmpty) { - val ctx1 = finalizerCtx.dup.newBlock + val ctx1 = finalizerCtx.dup.newBlock() ctx.bb.closeWith(JUMP(ctx1.bb)) if (guardResult) { @@ -1966,8 +1966,8 @@ abstract class GenICode extends SubComponent { val ctx1 = genLoad(finalizer, ctx, UNIT); ctx1.bb.emit(LOAD_LOCAL(exception)); ctx1.bb.emit(THROW(ThrowableClass)); - ctx1.bb.enterIgnoreMode; - ctx1.bb.close + ctx1.bb.enterIgnoreMode(); + ctx1.bb.close() finalizerCtx.endHandler() } @@ -1983,7 +1983,7 @@ abstract class GenICode extends SubComponent { outerCtx.endHandler() } - val bodyCtx = this.newBlock + val bodyCtx = this.newBlock() if (finalizer != EmptyTree) bodyCtx.addFinalizer(finalizer, finalizerCtx) diff --git a/src/compiler/scala/tools/nsc/backend/icode/ICodeCheckers.scala b/src/compiler/scala/tools/nsc/backend/icode/ICodeCheckers.scala index 5d32795e24..8cd7c70bf0 100644 --- a/src/compiler/scala/tools/nsc/backend/icode/ICodeCheckers.scala +++ b/src/compiler/scala/tools/nsc/backend/icode/ICodeCheckers.scala @@ -208,7 +208,7 @@ abstract class ICodeCheckers { if (s1.length != s2.length) { if (allUnits(s1) && allUnits(s2)) workaround("Ignoring mismatched boxed units") - else if (isHandlerBlock) + else if (isHandlerBlock()) workaround("Ignoring mismatched stacks entering exception handler") else throw new CheckerException(incompatibleString) diff --git a/src/compiler/scala/tools/nsc/backend/icode/Linearizers.scala b/src/compiler/scala/tools/nsc/backend/icode/Linearizers.scala index 80477f0c6e..35eedc3539 100644 --- a/src/compiler/scala/tools/nsc/backend/icode/Linearizers.scala +++ b/src/compiler/scala/tools/nsc/backend/icode/Linearizers.scala @@ -78,7 +78,7 @@ trait Linearizers { } } - def dequeue: Elem = worklist.pop; + def dequeue: Elem = worklist.pop(); /** * Prepend b to the list, if not already scheduled. @@ -146,7 +146,7 @@ trait Linearizers { def linearize(m: IMethod): List[BasicBlock] = { blocks = Nil; visited.clear() - added.clear; + added.clear(); m.exh foreach (b => rpo(b.startBlock)); rpo(m.startBlock); diff --git a/src/compiler/scala/tools/nsc/backend/icode/Members.scala b/src/compiler/scala/tools/nsc/backend/icode/Members.scala index 248a505b54..fe837216ed 100644 --- a/src/compiler/scala/tools/nsc/backend/icode/Members.scala +++ b/src/compiler/scala/tools/nsc/backend/icode/Members.scala @@ -58,7 +58,7 @@ trait Members { } // Constructor code - startBlock = newBlock + startBlock = newBlock() def removeBlock(b: BasicBlock) { if (settings.debug.value) { @@ -155,7 +155,7 @@ trait Members { class IMethod(val symbol: Symbol) extends IMember { var code: Code = NoCode - def newBlock() = code.newBlock + def newBlock() = code.newBlock() def startBlock = code.startBlock def lastBlock = { assert(blocks.nonEmpty, symbol); blocks.last } def blocks = code.blocksList @@ -232,7 +232,7 @@ trait Members { var bb = code.startBlock while (!nextBlock.isEmpty) { if (nextBlock.isDefinedAt(bb)) { - bb.open + bb.open() var succ = bb do { succ = nextBlock(succ); @@ -246,7 +246,7 @@ trait Members { val oldTKs = lastInstr.consumedTypes assert(lastInstr.consumed == oldTKs.size, "Someone forgot to override consumedTypes() in " + lastInstr) - bb.removeLastInstruction + bb.removeLastInstruction() for(tk <- oldTKs.reverse) { bb.emit(DROP(tk), lastInstr.pos) } succ.toList foreach { i => bb.emit(i, i.pos) } code.removeBlock(succ) @@ -254,9 +254,9 @@ trait Members { nextBlock -= bb } while (nextBlock.isDefinedAt(succ)) - bb.close + bb.close() } else - bb = nextBlock.keysIterator.next + bb = nextBlock.keysIterator.next() } checkValid(this) } diff --git a/src/compiler/scala/tools/nsc/backend/icode/Printers.scala b/src/compiler/scala/tools/nsc/backend/icode/Printers.scala index 61af6e5119..253f766469 100644 --- a/src/compiler/scala/tools/nsc/backend/icode/Printers.scala +++ b/src/compiler/scala/tools/nsc/backend/icode/Printers.scala @@ -28,7 +28,7 @@ trait Printers { self: ICodes => def println(s: String) { print(s); - println + println() } def println() { @@ -55,12 +55,12 @@ trait Printers { self: ICodes => def printClass(cls: IClass) { print(cls.symbol.toString()); print(" extends "); printList(cls.symbol.info.parents, ", "); - indent; println(" {"); + indent(); println(" {"); println("// fields:"); - cls.fields.foreach(printField); println; + cls.fields.foreach(printField); println(); println("// methods"); cls.methods.foreach(printMethod); - undent; println; + undent(); println(); println("}") } @@ -80,16 +80,16 @@ trait Printers { self: ICodes => println("locals: " + m.locals.mkString("", ", ", "")) println("startBlock: " + m.startBlock) println("blocks: " + m.code.blocks.mkString("[", ",", "]")) - println + println() lin.linearize(m) foreach printBlock println("}") - indent; println("Exception handlers: ") + indent(); println("Exception handlers: ") m.exh foreach printExceptionHandler - undent; println + undent(); println() } else - println + println() } def printParam(p: Local) { @@ -98,10 +98,10 @@ trait Printers { self: ICodes => } def printExceptionHandler(e: ExceptionHandler) { - indent; + indent(); println("catch (" + e.cls.simpleName + ") in " + e.covered.toSeq.sortBy(_.label) + " starting at: " + e.startBlock); println("consisting of blocks: " + e.blocks); - undent; + undent(); println("with finalizer: " + e.finalizer); // linearizer.linearize(e.startBlock) foreach printBlock; } @@ -111,9 +111,9 @@ trait Printers { self: ICodes => if (bb.loopHeader) print("[loop header]") print(": "); if (settings.debug.value) print("pred: " + bb.predecessors + " succs: " + bb.successors + " flags: " + bb.flagsString) - indent; println + indent(); println() bb.toList foreach printInstruction - undent; println + undent(); println() } def printInstruction(i: Instruction) { diff --git a/src/compiler/scala/tools/nsc/backend/icode/analysis/DataFlowAnalysis.scala b/src/compiler/scala/tools/nsc/backend/icode/analysis/DataFlowAnalysis.scala index cc3a7eb876..a9783b43dc 100644 --- a/src/compiler/scala/tools/nsc/backend/icode/analysis/DataFlowAnalysis.scala +++ b/src/compiler/scala/tools/nsc/backend/icode/analysis/DataFlowAnalysis.scala @@ -30,7 +30,7 @@ trait DataFlowAnalysis[L <: SemiLattice] { /* Implement this function to initialize the worklist. */ def init(f: => Unit): Unit = { iterations = 0 - in.clear; out.clear; worklist.clear; visited.clear; + in.clear(); out.clear(); worklist.clear(); visited.clear(); f } @@ -46,7 +46,7 @@ trait DataFlowAnalysis[L <: SemiLattice] { while (!worklist.isEmpty) { if (stat) iterations += 1 //Console.println("worklist in: " + worklist); - val point = worklist.iterator.next; worklist -= point; visited += point; + val point = worklist.iterator.next(); worklist -= point; visited += point; //Console.println("taking out point: " + point + " worklist out: " + worklist); val output = f(point, in(point)) diff --git a/src/compiler/scala/tools/nsc/backend/icode/analysis/TypeFlowAnalysis.scala b/src/compiler/scala/tools/nsc/backend/icode/analysis/TypeFlowAnalysis.scala index 3ce1a2fd95..7b0627294e 100644 --- a/src/compiler/scala/tools/nsc/backend/icode/analysis/TypeFlowAnalysis.scala +++ b/src/compiler/scala/tools/nsc/backend/icode/analysis/TypeFlowAnalysis.scala @@ -132,7 +132,7 @@ abstract class TypeFlowAnalysis { } def run() = { - timer.start + timer.start() // icodes.lubs0 = 0 forwardAnalysis(blockTransfer) timer.stop @@ -355,7 +355,7 @@ abstract class TypeFlowAnalysis { override def run { - timer.start + timer.start() forwardAnalysis(blockTransfer) timer.stop @@ -601,7 +601,7 @@ abstract class TypeFlowAnalysis { return; } - worklist.clear // calling reinit(f: => Unit) would also clear visited, thus forgetting about blocks visited before reinit. + worklist.clear() // calling reinit(f: => Unit) would also clear visited, thus forgetting about blocks visited before reinit. // asserts conveying an idea what CFG shapes arrive here: // staleIn foreach (p => assert( !in.isDefinedAt(p), p)) @@ -665,7 +665,7 @@ abstract class TypeFlowAnalysis { override def forwardAnalysis(f: (P, lattice.Elem) => lattice.Elem): Unit = { while (!worklist.isEmpty && relevantBBs.nonEmpty) { if (stat) iterations += 1 - val point = worklist.iterator.next; worklist -= point; + val point = worklist.iterator.next(); worklist -= point; if(relevantBBs(point)) { shrinkedWatchlist = false val output = f(point, in(point)) diff --git a/src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala b/src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala index 45c366cc69..909c82ff23 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala @@ -1618,7 +1618,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM { if (isParcelableClass) { addCreatorCode(lastBlock) } lastBlock emit RETURN(UNIT) - lastBlock.close + lastBlock.close() method = m jmethod = clinitMethod @@ -1798,8 +1798,8 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM { } def goTo(label: asm.Label) { jmethod.visitJumpInsn(Opcodes.GOTO, label) } - def emitIF(cond: TestOp, label: asm.Label) { jmethod.visitJumpInsn(cond.opcodeIF, label) } - def emitIF_ICMP(cond: TestOp, label: asm.Label) { jmethod.visitJumpInsn(cond.opcodeIFICMP, label) } + def emitIF(cond: TestOp, label: asm.Label) { jmethod.visitJumpInsn(cond.opcodeIF(), label) } + def emitIF_ICMP(cond: TestOp, label: asm.Label) { jmethod.visitJumpInsn(cond.opcodeIFICMP(), label) } def emitIF_ACMP(cond: TestOp, label: asm.Label) { assert((cond == EQ) || (cond == NE), cond) val opc = (if(cond == EQ) Opcodes.IF_ACMPEQ else Opcodes.IF_ACMPNE) @@ -2365,7 +2365,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM { scoping.popScope(lv, end, instr.pos) } } - genLocalInstr + genLocalInstr() case icodes.stackCat => def genStackInstr() = (instr: @unchecked) match { @@ -2389,7 +2389,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM { case LOAD_EXCEPTION(_) => () } - genStackInstr + genStackInstr() case icodes.constCat => genConstant(jmethod, instr.asInstanceOf[CONSTANT].constant) @@ -2423,7 +2423,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM { } } - genCastInstr + genCastInstr() case icodes.objsCat => def genObjsInstr() = (instr: @unchecked) match { @@ -2442,7 +2442,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM { case MONITOR_ENTER() => emit(Opcodes.MONITORENTER) case MONITOR_EXIT() => emit(Opcodes.MONITOREXIT) } - genObjsInstr + genObjsInstr() case icodes.fldsCat => def genFldsInstr() = (instr: @unchecked) match { @@ -2463,7 +2463,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM { jmethod.visitFieldInsn(opc, owner, fieldJName, fieldDescr) } - genFldsInstr + genFldsInstr() case icodes.mthdsCat => def genMethodsInstr() = (instr: @unchecked) match { @@ -2476,7 +2476,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM { case call @ CALL_METHOD(method, style) => genCallMethod(call) } - genMethodsInstr + genMethodsInstr() case icodes.arraysCat => def genArraysInstr() = (instr: @unchecked) match { @@ -2485,7 +2485,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM { case CREATE_ARRAY(elem, 1) => jcode newarray elem case CREATE_ARRAY(elem, dims) => jmethod.visitMultiANewArrayInsn(descriptor(ArrayN(elem, dims)), dims) } - genArraysInstr + genArraysInstr() case icodes.jumpsCat => def genJumpInstr() = (instr: @unchecked) match { @@ -2535,7 +2535,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM { case CJUMP(success, failure, cond, kind) => if (kind.isIntSizedType) { // BOOL, BYTE, CHAR, SHORT, or INT if (nextBlock == success) { - jcode.emitIF_ICMP(cond.negate, labels(failure)) + jcode.emitIF_ICMP(cond.negate(), labels(failure)) // .. and fall through to success label } else { jcode.emitIF_ICMP(cond, labels(success)) @@ -2543,7 +2543,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM { } } else if (kind.isRefOrArrayType) { // REFERENCE(_) | ARRAY(_) if (nextBlock == success) { - jcode.emitIF_ACMP(cond.negate, labels(failure)) + jcode.emitIF_ACMP(cond.negate(), labels(failure)) // .. and fall through to success label } else { jcode.emitIF_ACMP(cond, labels(success)) @@ -2560,7 +2560,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM { else emit(Opcodes.DCMPL) } if (nextBlock == success) { - jcode.emitIF(cond.negate, labels(failure)) + jcode.emitIF(cond.negate(), labels(failure)) // .. and fall through to success label } else { jcode.emitIF(cond, labels(success)) @@ -2571,7 +2571,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM { case CZJUMP(success, failure, cond, kind) => if (kind.isIntSizedType) { // BOOL, BYTE, CHAR, SHORT, or INT if (nextBlock == success) { - jcode.emitIF(cond.negate, labels(failure)) + jcode.emitIF(cond.negate(), labels(failure)) } else { jcode.emitIF(cond, labels(success)) if (nextBlock != failure) { jcode goTo labels(failure) } @@ -2607,7 +2607,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM { else emit(Opcodes.DCMPL) } if (nextBlock == success) { - jcode.emitIF(cond.negate, labels(failure)) + jcode.emitIF(cond.negate(), labels(failure)) } else { jcode.emitIF(cond, labels(success)) if (nextBlock != failure) { jcode goTo labels(failure) } @@ -2615,14 +2615,14 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM { } } - genJumpInstr + genJumpInstr() case icodes.retCat => def genRetInstr() = (instr: @unchecked) match { case RETURN(kind) => jcode emitRETURN kind case THROW(_) => emit(Opcodes.ATHROW) } - genRetInstr + genRetInstr() } } @@ -2732,7 +2732,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM { abort("Unknown arithmetic primitive " + primitive) } } - genArith + genArith() // TODO Logical's 2nd elem should be declared ValueTypeKind, to better approximate its allowed values (isIntSized, its comments appears to convey) // TODO GenICode uses `toTypeKind` to define that elem, `toValueTypeKind` would be needed instead. @@ -2764,7 +2764,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM { if (kind != BOOL) { emitT2T(INT, kind) } } } - genLogical + genLogical() case Shift(op, kind) => def genShift() = op match { @@ -2793,7 +2793,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM { emitT2T(INT, kind) } } - genShift + genShift() case Comparison(op, kind) => def genCompare() = op match { @@ -2813,7 +2813,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM { } } - genCompare + genCompare() case Conversion(src, dst) => debuglog("Converting from: " + src + " to: " + dst) diff --git a/src/compiler/scala/tools/nsc/backend/opt/ClosureElimination.scala b/src/compiler/scala/tools/nsc/backend/opt/ClosureElimination.scala index 5dd20a6919..2d53eb2ed9 100644 --- a/src/compiler/scala/tools/nsc/backend/opt/ClosureElimination.scala +++ b/src/compiler/scala/tools/nsc/backend/opt/ClosureElimination.scala @@ -96,7 +96,7 @@ abstract class ClosureElimination extends SubComponent { /* Some embryonic copy propagation. */ def analyzeMethod(m: IMethod): Unit = try {if (m.hasCode) { cpp.init(m) - cpp.run + cpp.run() m.linearizedBlocks() foreach { bb => var info = cpp.in(bb) @@ -200,7 +200,7 @@ abstract class ClosureElimination extends SubComponent { def apply(m: IMethod): Unit = if (m.hasCode) { liveness = new global.icodes.liveness.LivenessAnalysis liveness.init(m) - liveness.run + liveness.run() m foreachBlock transformBlock } diff --git a/src/compiler/scala/tools/nsc/backend/opt/DeadCodeElimination.scala b/src/compiler/scala/tools/nsc/backend/opt/DeadCodeElimination.scala index 20e1cd2188..b998e3fbd2 100644 --- a/src/compiler/scala/tools/nsc/backend/opt/DeadCodeElimination.scala +++ b/src/compiler/scala/tools/nsc/backend/opt/DeadCodeElimination.scala @@ -106,7 +106,7 @@ abstract class DeadCodeElimination extends SubComponent { def collectRDef(m: IMethod): Unit = if (m.hasCode) { defs = immutable.HashMap.empty; worklist.clear(); useful.clear(); rdef.init(m); - rdef.run; + rdef.run(); m foreachBlock { bb => useful(bb) = new mutable.BitSet(bb.size) @@ -340,8 +340,8 @@ abstract class DeadCodeElimination extends SubComponent { m foreachBlock { bb => debuglog(bb + ":") val oldInstr = bb.toList - bb.open - bb.clear + bb.open() + bb.clear() for (Pair(i, idx) <- oldInstr.zipWithIndex) { if (useful(bb)(idx)) { debuglog(" * " + i + " is useful") @@ -374,7 +374,7 @@ abstract class DeadCodeElimination extends SubComponent { } } - if (bb.nonEmpty) bb.close + if (bb.nonEmpty) bb.close() else log(s"empty block encountered in $m") } } diff --git a/src/compiler/scala/tools/nsc/backend/opt/InlineExceptionHandlers.scala b/src/compiler/scala/tools/nsc/backend/opt/InlineExceptionHandlers.scala index 4e65c72b0b..7f76839ae5 100644 --- a/src/compiler/scala/tools/nsc/backend/opt/InlineExceptionHandlers.scala +++ b/src/compiler/scala/tools/nsc/backend/opt/InlineExceptionHandlers.scala @@ -69,9 +69,9 @@ abstract class InlineExceptionHandlers extends SubComponent { * -some exception handler duplicates expect the exception on the stack while others expect it in a local * => Option[Local] */ - private val handlerCopies = perRunCaches.newMap[BasicBlock, Option[(Option[Local], BasicBlock)]] + private val handlerCopies = perRunCaches.newMap[BasicBlock, Option[(Option[Local], BasicBlock)]]() /* This map is the inverse of handlerCopies, used to compute the stack of duplicate blocks */ - private val handlerCopiesInverted = perRunCaches.newMap[BasicBlock, (BasicBlock, TypeKind)] + private val handlerCopiesInverted = perRunCaches.newMap[BasicBlock, (BasicBlock, TypeKind)]() private def handlerLocal(bb: BasicBlock): Option[Local] = for (v <- handlerCopies get bb ; (local, block) <- v ; l <- local) yield l @@ -357,7 +357,7 @@ abstract class InlineExceptionHandlers extends SubComponent { } val caughtException = toTypeKind(caughtClass.tpe) // copy the exception handler code once again, dropping the LOAD_EXCEPTION - val copy = handler.code.newBlock + val copy = handler.code.newBlock() copy.emitOnly((handler.iterator drop dropCount).toSeq: _*) // extend the handlers of the handler to the copy diff --git a/src/compiler/scala/tools/nsc/backend/opt/Inliners.scala b/src/compiler/scala/tools/nsc/backend/opt/Inliners.scala index ca1cfc8929..c834607203 100644 --- a/src/compiler/scala/tools/nsc/backend/opt/Inliners.scala +++ b/src/compiler/scala/tools/nsc/backend/opt/Inliners.scala @@ -242,7 +242,7 @@ abstract class Inliners extends SubComponent { def clearCaches() { // methods NonPublicRefs.usesNonPublics.clear() - recentTFAs.clear + recentTFAs.clear() tfa.knownUnsafe.clear() tfa.knownSafe.clear() tfa.knownNever.clear() @@ -363,7 +363,7 @@ abstract class Inliners extends SubComponent { assert(ocm.method.isEffectivelyFinal && ocm.method.owner.isEffectivelyFinal) if(analyzeInc(ocm, x, ocm.method.owner, -1, ocm.method)) { inlineCount += 1 - break + break() } } } @@ -513,7 +513,7 @@ abstract class Inliners extends SubComponent { for (cm <- cms; if tfa.remainingCALLs.isDefinedAt(cm)) { val analysis.CallsiteInfo(_, receiver, stackLength, concreteMethod) = tfa.remainingCALLs(cm) if (analyzeInc(cm, bb, receiver, stackLength, concreteMethod)) { - break + break() } } } @@ -568,7 +568,7 @@ abstract class Inliners extends SubComponent { warn(inlFail.pos, "At the end of the day, could not inline @inline-marked method " + inlFail.method.originalName.decode) } - m.normalize + m.normalize() if (sizeBeforeInlining > 0) { val instrAfterInlining = m.code.instructionCount val inlinings = caller.inlinedCalls @@ -811,7 +811,7 @@ abstract class Inliners extends SubComponent { /** Add a new block in the current context. */ def newBlock() = { - val b = caller.m.code.newBlock + val b = caller.m.code.newBlock() activeHandlers foreach (_ addCoveredBlock b) if (retVal ne null) b.varsInScope += retVal b.varsInScope += inlinedThis @@ -890,8 +890,8 @@ abstract class Inliners extends SubComponent { } // re-emit the instructions before the call - block.open - block.clear + block.open() + block.clear() block emit instrBefore // store the arguments into special locals @@ -900,7 +900,7 @@ abstract class Inliners extends SubComponent { // jump to the start block of the callee blockEmit(JUMP(inlinedBlock(inc.m.startBlock))) - block.close + block.close() // duplicate the other blocks in the callee val calleeLin = inc.m.linearizedBlocks() @@ -923,11 +923,11 @@ abstract class Inliners extends SubComponent { emitInlined(map(i)) info = if(hasRETURN) a.interpret(info, i) else null } - inlinedBlock(bb).close + inlinedBlock(bb).close() } afterBlock emit instrAfter - afterBlock.close + afterBlock.close() staleIn += afterBlock splicedBlocks ++= (calleeLin map inlinedBlock) diff --git a/src/compiler/scala/tools/nsc/dependencies/Changes.scala b/src/compiler/scala/tools/nsc/dependencies/Changes.scala index 7807f0ba03..531348b451 100644 --- a/src/compiler/scala/tools/nsc/dependencies/Changes.scala +++ b/src/compiler/scala/tools/nsc/dependencies/Changes.scala @@ -168,7 +168,7 @@ abstract class Changes { implicit val defaultStrictTypeRefTest = true val to = toSym.info - changedTypeParams.clear + changedTypeParams.clear() def omitSymbols(s: Symbol): Boolean = !s.hasFlag(LOCAL | LIFTED | PRIVATE | SYNTHETIC) val cs = new mutable.ListBuffer[Change] diff --git a/src/compiler/scala/tools/nsc/doc/DocFactory.scala b/src/compiler/scala/tools/nsc/doc/DocFactory.scala index a99b17dce4..d63881170e 100644 --- a/src/compiler/scala/tools/nsc/doc/DocFactory.scala +++ b/src/compiler/scala/tools/nsc/doc/DocFactory.scala @@ -118,7 +118,7 @@ class DocFactory(val reporter: Reporter, val settings: doc.Settings) { processor } case _ => () } - docletInstance.generate + docletInstance.generate() } try generate() diff --git a/src/compiler/scala/tools/nsc/doc/base/CommentFactoryBase.scala b/src/compiler/scala/tools/nsc/doc/base/CommentFactoryBase.scala index c2b3c410fe..5a3dffbf16 100755 --- a/src/compiler/scala/tools/nsc/doc/base/CommentFactoryBase.scala +++ b/src/compiler/scala/tools/nsc/doc/base/CommentFactoryBase.scala @@ -516,7 +516,7 @@ trait CommentFactoryBase { this: MemberLookupBase => else { val s = summary() val r = - if (checkParaEnded) List(s) else List(s, inline(false)) + if (checkParaEnded()) List(s) else List(s, inline(false)) summaryParsed = true Paragraph(Chain(r)) } diff --git a/src/compiler/scala/tools/nsc/doc/doclet/Generator.scala b/src/compiler/scala/tools/nsc/doc/doclet/Generator.scala index 735b79c336..42b56aa927 100644 --- a/src/compiler/scala/tools/nsc/doc/doclet/Generator.scala +++ b/src/compiler/scala/tools/nsc/doc/doclet/Generator.scala @@ -21,7 +21,7 @@ abstract class Generator { /** Outputs documentation (as a side effect). */ def generate(): Unit = { assert(checks forall { check => check() }) - generateImpl + generateImpl() } /** Outputs documentation (as a side effect). This method is called only if all `checks` are true. */ diff --git a/src/compiler/scala/tools/nsc/doc/html/Doclet.scala b/src/compiler/scala/tools/nsc/doc/html/Doclet.scala index 3aa3e87554..21c5f6bb67 100644 --- a/src/compiler/scala/tools/nsc/doc/html/Doclet.scala +++ b/src/compiler/scala/tools/nsc/doc/html/Doclet.scala @@ -13,7 +13,7 @@ import doclet._ class Doclet extends Generator with Universer with Indexer { def generateImpl() { - new html.HtmlFactory(universe, index).generate + new html.HtmlFactory(universe, index).generate() } } diff --git a/src/compiler/scala/tools/nsc/doc/html/HtmlFactory.scala b/src/compiler/scala/tools/nsc/doc/html/HtmlFactory.scala index 4630c3dda8..f81f55b934 100644 --- a/src/compiler/scala/tools/nsc/doc/html/HtmlFactory.scala +++ b/src/compiler/scala/tools/nsc/doc/html/HtmlFactory.scala @@ -111,7 +111,7 @@ class HtmlFactory(val universe: doc.Universe, index: doc.Index) { val p = "/scala/tools/nsc/doc/html/resource/" + subPath val inputStream = getClass.getResourceAsStream(p) assert(inputStream != null, p) - }.toByteArray + }.toByteArray() val dest = Directory(siteRoot) / subPath dest.parent.createDirectory() val out = dest.toFile.bufferedOutput() diff --git a/src/compiler/scala/tools/nsc/doc/html/page/diagram/DotDiagramGenerator.scala b/src/compiler/scala/tools/nsc/doc/html/page/diagram/DotDiagramGenerator.scala index 512becd04d..14b7b80ea5 100644 --- a/src/compiler/scala/tools/nsc/doc/html/page/diagram/DotDiagramGenerator.scala +++ b/src/compiler/scala/tools/nsc/doc/html/page/diagram/DotDiagramGenerator.scala @@ -315,7 +315,7 @@ class DotDiagramGenerator(settings: doc.Settings) extends DiagramGenerator { * Calls dot with a given dot string and returns the SVG output. */ private def generateSVG(dotInput: String, template: DocTemplateEntity) = { - val dotOutput = DiagramGenerator.getDotRunner.feedToDot(dotInput, template) + val dotOutput = DiagramGenerator.getDotRunner().feedToDot(dotInput, template) var tSVG = -System.currentTimeMillis val result = if (dotOutput != null) { diff --git a/src/compiler/scala/tools/nsc/doc/model/ModelFactory.scala b/src/compiler/scala/tools/nsc/doc/model/ModelFactory.scala index 303fe9f184..1df725636a 100644 --- a/src/compiler/scala/tools/nsc/doc/model/ModelFactory.scala +++ b/src/compiler/scala/tools/nsc/doc/model/ModelFactory.scala @@ -51,7 +51,7 @@ class ModelFactory(val global: Global, val settings: doc.Settings) { } _modelFinished = true // complete the links between model entities, everthing that couldn't have been done before - universe.rootPackage.completeModel + universe.rootPackage.completeModel() Some(universe) filter (_.rootPackage != null) } @@ -382,7 +382,7 @@ class ModelFactory(val global: Global, val settings: doc.Settings) { if (!sym.isAliasType && !sym.isAbstractType) for (member <- members) member match { - case d: DocTemplateImpl => d.completeModel + case d: DocTemplateImpl => d.completeModel() case _ => } @@ -631,7 +631,7 @@ class ModelFactory(val global: Global, val settings: doc.Settings) { import Streamable._ Path(settings.docRootContent.value) match { case f : File => { - val rootComment = closing(f.inputStream)(is => parse(slurp(is), "", NoPosition, Option(inTpl))) + val rootComment = closing(f.inputStream())(is => parse(slurp(is), "", NoPosition, Option(inTpl))) Some(rootComment) } case _ => None diff --git a/src/compiler/scala/tools/nsc/interactive/REPL.scala b/src/compiler/scala/tools/nsc/interactive/REPL.scala index d545a5738c..be1c656c81 100644 --- a/src/compiler/scala/tools/nsc/interactive/REPL.scala +++ b/src/compiler/scala/tools/nsc/interactive/REPL.scala @@ -65,7 +65,7 @@ object REPL { def loop(action: (String) => Unit) { Console.print(prompt) try { - val line = Console.readLine + val line = Console.readLine() if (line.length() > 0) { action(line) } diff --git a/src/compiler/scala/tools/nsc/interactive/ScratchPadMaker.scala b/src/compiler/scala/tools/nsc/interactive/ScratchPadMaker.scala index 7f0265bf4f..8b4c2ce4eb 100644 --- a/src/compiler/scala/tools/nsc/interactive/ScratchPadMaker.scala +++ b/src/compiler/scala/tools/nsc/interactive/ScratchPadMaker.scala @@ -163,7 +163,7 @@ trait ScratchPadMaker { self: Global => while (scanner.token != EOF) { startOffset += scanner.offset token += scanner.token - scanner.nextToken + scanner.nextToken() endOffset += scanner.lastOffset } diff --git a/src/compiler/scala/tools/nsc/interpreter/ILoop.scala b/src/compiler/scala/tools/nsc/interpreter/ILoop.scala index b2af53574f..c7e682cb08 100644 --- a/src/compiler/scala/tools/nsc/interpreter/ILoop.scala +++ b/src/compiler/scala/tools/nsc/interpreter/ILoop.scala @@ -265,7 +265,7 @@ class ILoop(in0: Option[BufferedReader], protected val out: JPrintWriter) else None } private def addToolsJarToLoader() = { - val cl = findToolsJar match { + val cl = findToolsJar() match { case Some(tools) => ScalaClassLoader.fromURLs(Seq(tools.toURL), intp.classLoader) case _ => intp.classLoader } @@ -656,7 +656,7 @@ class ILoop(in0: Option[BufferedReader], protected val out: JPrintWriter) foreach (intp quietRun _) ) // classloader and power mode setup - intp.setContextClassLoader + intp.setContextClassLoader() if (isReplPower) { replProps.power setValue true unleashAndSetPhase() diff --git a/src/compiler/scala/tools/nsc/interpreter/TypeStrings.scala b/src/compiler/scala/tools/nsc/interpreter/TypeStrings.scala index 33311f5bb3..a73bb80157 100644 --- a/src/compiler/scala/tools/nsc/interpreter/TypeStrings.scala +++ b/src/compiler/scala/tools/nsc/interpreter/TypeStrings.scala @@ -140,7 +140,7 @@ trait StructuredTypeStrings extends DestructureTypes { def wrapAtom[U](value: U) = new TypeAtom(value) } - def show(tp: Type): String = intoNodes(tp).show + def show(tp: Type): String = intoNodes(tp).show() } diff --git a/src/compiler/scala/tools/nsc/io/Jar.scala b/src/compiler/scala/tools/nsc/io/Jar.scala index 0dca75dab9..94399f936b 100644 --- a/src/compiler/scala/tools/nsc/io/Jar.scala +++ b/src/compiler/scala/tools/nsc/io/Jar.scala @@ -115,9 +115,9 @@ class JarWriter(val file: File, val manifest: Manifest) { val buf = new Array[Byte](10240) def loop(): Unit = in.read(buf, 0, buf.length) match { case -1 => in.close() - case n => out.write(buf, 0, n) ; loop + case n => out.write(buf, 0, n) ; loop() } - loop + loop() } def close() = out.close() diff --git a/src/compiler/scala/tools/nsc/javac/JavaParsers.scala b/src/compiler/scala/tools/nsc/javac/JavaParsers.scala index bb82cfb827..0a6716e396 100644 --- a/src/compiler/scala/tools/nsc/javac/JavaParsers.scala +++ b/src/compiler/scala/tools/nsc/javac/JavaParsers.scala @@ -74,7 +74,7 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners { nbraces += 1 case _ => } - in.nextToken + in.nextToken() } } @@ -148,7 +148,7 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners { nbraces += 1 case _ => } - in.nextToken + in.nextToken() in.token match { case RPAREN => nparens -= 1 @@ -163,7 +163,7 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners { while (!(tokens contains in.token) && in.token != EOF) { if (in.token == LBRACE) { skipAhead(); accept(RBRACE) } else if (in.token == LPAREN) { skipAhead(); accept(RPAREN) } - else in.nextToken + else in.nextToken() } } @@ -180,7 +180,7 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners { syntaxError(posToReport, msg, true) } - if (in.token == token) in.nextToken + if (in.token == token) in.nextToken() pos } @@ -200,7 +200,7 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners { def ident(): Name = if (in.token == IDENTIFIER) { val name = in.name - in.nextToken + in.nextToken() name } else { accept(IDENTIFIER) @@ -210,7 +210,7 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners { def repsep[T <: Tree](p: () => T, sep: Int): List[T] = { val buf = ListBuffer[T](p()) while (in.token == sep) { - in.nextToken + in.nextToken() buf += p() } buf.toList @@ -234,7 +234,7 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners { def qualId(): RefTree = { var t: RefTree = atPos(in.currentPos) { Ident(ident()) } while (in.token == DOT) { - in.nextToken + in.nextToken() t = atPos(in.currentPos) { Select(t, ident()) } } t @@ -243,7 +243,7 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners { def optArrayBrackets(tpt: Tree): Tree = if (in.token == LBRACKET) { val tpt1 = atPos(in.pos) { arrayOf(tpt) } - in.nextToken + in.nextToken() accept(RBRACKET) optArrayBrackets(tpt1) } else tpt @@ -251,21 +251,21 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners { def basicType(): Tree = atPos(in.pos) { in.token match { - case BYTE => in.nextToken; TypeTree(ByteClass.tpe) - case SHORT => in.nextToken; TypeTree(ShortClass.tpe) - case CHAR => in.nextToken; TypeTree(CharClass.tpe) - case INT => in.nextToken; TypeTree(IntClass.tpe) - case LONG => in.nextToken; TypeTree(LongClass.tpe) - case FLOAT => in.nextToken; TypeTree(FloatClass.tpe) - case DOUBLE => in.nextToken; TypeTree(DoubleClass.tpe) - case BOOLEAN => in.nextToken; TypeTree(BooleanClass.tpe) + case BYTE => in.nextToken(); TypeTree(ByteClass.tpe) + case SHORT => in.nextToken(); TypeTree(ShortClass.tpe) + case CHAR => in.nextToken(); TypeTree(CharClass.tpe) + case INT => in.nextToken(); TypeTree(IntClass.tpe) + case LONG => in.nextToken(); TypeTree(LongClass.tpe) + case FLOAT => in.nextToken(); TypeTree(FloatClass.tpe) + case DOUBLE => in.nextToken(); TypeTree(DoubleClass.tpe) + case BOOLEAN => in.nextToken(); TypeTree(BooleanClass.tpe) case _ => syntaxError("illegal start of type", true); errorTypeTree } } def typ(): Tree = optArrayBrackets { - if (in.token == FINAL) in.nextToken + if (in.token == FINAL) in.nextToken() if (in.token == IDENTIFIER) { var t = typeArgs(atPos(in.currentPos)(Ident(ident()))) // typeSelect generates Select nodes is the lhs is an Ident or Select, @@ -278,7 +278,7 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners { case _ => SelectFromTypeTree(t, name.toTypeName) } while (in.token == DOT) { - in.nextToken + in.nextToken() t = typeArgs(atPos(in.currentPos)(typeSelect(t, ident()))) } convertToTypeId(t) @@ -292,14 +292,14 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners { def typeArg(): Tree = if (in.token == QMARK) { val pos = in.currentPos - in.nextToken + in.nextToken() var lo: Tree = TypeTree(NothingClass.tpe) var hi: Tree = TypeTree(AnyClass.tpe) if (in.token == EXTENDS) { - in.nextToken + in.nextToken() hi = typ() } else if (in.token == SUPER) { - in.nextToken + in.nextToken() lo = typ() } val tdef = atPos(pos) { @@ -315,7 +315,7 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners { typ() } if (in.token == LT) { - in.nextToken + in.nextToken() val t1 = convertToTypeId(t) val args = repsep(typeArg, COMMA) acceptClosingAngle() @@ -330,7 +330,7 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners { def annotations(): List[Tree] = { //var annots = new ListBuffer[Tree] while (in.token == AT) { - in.nextToken + in.nextToken() annotation() } List() // don't pass on annotations for now @@ -354,38 +354,38 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners { while (true) { in.token match { case AT if (in.lookaheadToken != INTERFACE) => - in.nextToken + in.nextToken() annotation() case PUBLIC => isPackageAccess = false - in.nextToken + in.nextToken() case PROTECTED => flags |= Flags.PROTECTED - in.nextToken + in.nextToken() case PRIVATE => isPackageAccess = false flags |= Flags.PRIVATE - in.nextToken + in.nextToken() case STATIC => flags |= Flags.STATIC - in.nextToken + in.nextToken() case ABSTRACT => flags |= Flags.ABSTRACT - in.nextToken + in.nextToken() case FINAL => flags |= Flags.FINAL - in.nextToken + in.nextToken() case NATIVE => addAnnot(NativeAttr) - in.nextToken + in.nextToken() case TRANSIENT => addAnnot(TransientAttr) - in.nextToken + in.nextToken() case VOLATILE => addAnnot(VolatileAttr) - in.nextToken + in.nextToken() case SYNCHRONIZED | STRICTFP => - in.nextToken + in.nextToken() case _ => val privateWithin: TypeName = if (isPackageAccess && !inInterface) thisPackageName @@ -399,7 +399,7 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners { def typeParams(): List[TypeDef] = if (in.token == LT) { - in.nextToken + in.nextToken() val tparams = repsep(typeParam, COMMA) acceptClosingAngle() tparams @@ -410,7 +410,7 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners { val name = identForType() val hi = if (in.token == EXTENDS) { - in.nextToken + in.nextToken() bound() } else { scalaDot(tpnme.Any) @@ -423,7 +423,7 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners { atPos(in.currentPos) { val buf = ListBuffer[Tree](typ()) while (in.token == AMP) { - in.nextToken + in.nextToken() buf += typ() } val ts = buf.toList @@ -439,11 +439,11 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners { } def formalParam(): ValDef = { - if (in.token == FINAL) in.nextToken + if (in.token == FINAL) in.nextToken() annotations() var t = typ() if (in.token == DOTDOTDOT) { - in.nextToken + in.nextToken() t = atPos(t.pos) { AppliedTypeTree(scalaDot(tpnme.JAVA_REPEATED_PARAM_CLASS_NAME), List(t)) } @@ -453,7 +453,7 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners { def optThrows() { if (in.token == THROWS) { - in.nextToken + in.nextToken() repsep(typ, COMMA) } } @@ -472,7 +472,7 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners { val isVoid = in.token == VOID var rtpt = if (isVoid) { - in.nextToken + in.nextToken() TypeTree(UnitClass.tpe) setPos in.pos } else typ() var pos = in.currentPos @@ -545,7 +545,7 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners { val buf = ListBuffer[Tree](varDecl(pos, mods, tpt, name.toTermName)) val maybe = new ListBuffer[Tree] // potential variable definitions. while (in.token == COMMA) { - in.nextToken + in.nextToken() if (in.token == IDENTIFIER) { // if there's an ident after the comma ... val name = ident() if (in.token == ASSIGN || in.token == SEMI) { // ... followed by a `=` or `;`, we know it's a real variable definition @@ -626,19 +626,19 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners { def collectIdents() : Int = { if (in.token == ASTERISK) { val starOffset = in.pos - in.nextToken + in.nextToken() buf += nme.WILDCARD starOffset } else { val nameOffset = in.pos buf += ident() if (in.token == DOT) { - in.nextToken + in.nextToken() collectIdents() } else nameOffset } } - if (in.token == STATIC) in.nextToken + if (in.token == STATIC) in.nextToken() else buf += nme.ROOTPKG val lastnameOffset = collectIdents() accept(SEMI) @@ -659,7 +659,7 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners { def interfacesOpt() = if (in.token == IMPLEMENTS) { - in.nextToken + in.nextToken() repsep(typ, COMMA) } else { List() @@ -672,7 +672,7 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners { val tparams = typeParams() val superclass = if (in.token == EXTENDS) { - in.nextToken + in.nextToken() typ() } else { javaLangObject() @@ -691,10 +691,10 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners { val tparams = typeParams() val parents = if (in.token == EXTENDS) { - in.nextToken + in.nextToken() repsep(typ, COMMA) } else { - List(javaLangObject) + List(javaLangObject()) } val (statics, body) = typeBody(INTERFACE, name) addCompanionObject(statics, atPos(pos) { @@ -721,7 +721,7 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners { skipAhead() // skip init block, we just assume we have seen only static accept(RBRACE) } else if (in.token == SEMI) { - in.nextToken + in.nextToken() } else { if (in.token == ENUM || definesInterface(in.token)) mods |= Flags.STATIC val decls = memberDecl(mods, parentToken) @@ -779,7 +779,7 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners { if (in.token != RBRACE && in.token != SEMI && in.token != EOF) { buf += enumConst(enumType) if (in.token == COMMA) { - in.nextToken + in.nextToken() parseEnumConsts() } } @@ -788,7 +788,7 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners { val consts = buf.toList val (statics, body) = if (in.token == SEMI) { - in.nextToken + in.nextToken() typeBodyDecls(ENUM, name) } else { (List(), List()) @@ -839,7 +839,7 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners { case INTERFACE => interfaceDecl(mods) case AT => annotationDecl(mods) case CLASS => classDecl(mods) - case _ => in.nextToken; syntaxError("illegal start of type declaration", true); List(errorTypeTree) + case _ => in.nextToken(); syntaxError("illegal start of type declaration", true); List(errorTypeTree) } /** CompilationUnit ::= [package QualId semi] TopStatSeq @@ -865,7 +865,7 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners { while (in.token == IMPORT) buf ++= importDecl() while (in.token != EOF && in.token != RBRACE) { - while (in.token == SEMI) in.nextToken + while (in.token == SEMI) in.nextToken() if (in.token != EOF) buf ++= typeDecl(modifiers(false)) } diff --git a/src/compiler/scala/tools/nsc/javac/JavaScanners.scala b/src/compiler/scala/tools/nsc/javac/JavaScanners.scala index 84eee36f18..ad92b2e742 100644 --- a/src/compiler/scala/tools/nsc/javac/JavaScanners.scala +++ b/src/compiler/scala/tools/nsc/javac/JavaScanners.scala @@ -266,7 +266,7 @@ trait JavaScanners extends ast.parser.ScannersCommon { def lookaheadToken: Int = { prev copyFrom this - nextToken + nextToken() val t = token next copyFrom this this copyFrom prev @@ -281,7 +281,7 @@ trait JavaScanners extends ast.parser.ScannersCommon { while (true) { in.ch match { case ' ' | '\t' | CR | LF | FF => - in.next + in.next() case _ => pos = in.cpos (in.ch: @switch) match { @@ -298,47 +298,47 @@ trait JavaScanners extends ast.parser.ScannersCommon { 'u' | 'v' | 'w' | 'x' | 'y' | 'z' => putChar(in.ch) - in.next - getIdentRest + in.next() + getIdentRest() return case '0' => putChar(in.ch) - in.next + in.next() if (in.ch == 'x' || in.ch == 'X') { - in.next + in.next() base = 16 } else { base = 8 } - getNumber + getNumber() return case '1' | '2' | '3' | '4' | '5' | '6' | '7' | '8' | '9' => base = 10 - getNumber + getNumber() return case '\"' => - in.next + in.next() while (in.ch != '\"' && (in.isUnicode || in.ch != CR && in.ch != LF && in.ch != SU)) { getlitch() } if (in.ch == '\"') { token = STRINGLIT setName() - in.next + in.next() } else { syntaxError("unclosed string literal") } return case '\'' => - in.next + in.next() getlitch() if (in.ch == '\'') { - in.next + in.next() token = CHARLIT setName() } else { @@ -348,31 +348,31 @@ trait JavaScanners extends ast.parser.ScannersCommon { case '=' => token = ASSIGN - in.next + in.next() if (in.ch == '=') { token = EQEQ - in.next + in.next() } return case '>' => token = GT - in.next + in.next() if (in.ch == '=') { token = GTEQ - in.next + in.next() } else if (in.ch == '>') { token = GTGT - in.next + in.next() if (in.ch == '=') { token = GTGTEQ - in.next + in.next() } else if (in.ch == '>') { token = GTGTGT - in.next + in.next() if (in.ch == '=') { token = GTGTGTEQ - in.next + in.next() } } } @@ -380,145 +380,145 @@ trait JavaScanners extends ast.parser.ScannersCommon { case '<' => token = LT - in.next + in.next() if (in.ch == '=') { token = LTEQ - in.next + in.next() } else if (in.ch == '<') { token = LTLT - in.next + in.next() if (in.ch == '=') { token = LTLTEQ - in.next + in.next() } } return case '!' => token = BANG - in.next + in.next() if (in.ch == '=') { token = BANGEQ - in.next + in.next() } return case '~' => token = TILDE - in.next + in.next() return case '?' => token = QMARK - in.next + in.next() return case ':' => token = COLON - in.next + in.next() return case '@' => token = AT - in.next + in.next() return case '&' => token = AMP - in.next + in.next() if (in.ch == '&') { token = AMPAMP - in.next + in.next() } else if (in.ch == '=') { token = AMPEQ - in.next + in.next() } return case '|' => token = BAR - in.next + in.next() if (in.ch == '|') { token = BARBAR - in.next + in.next() } else if (in.ch == '=') { token = BAREQ - in.next + in.next() } return case '+' => token = PLUS - in.next + in.next() if (in.ch == '+') { token = PLUSPLUS - in.next + in.next() } else if (in.ch == '=') { token = PLUSEQ - in.next + in.next() } return case '-' => token = MINUS - in.next + in.next() if (in.ch == '-') { token = MINUSMINUS - in.next + in.next() } else if (in.ch == '=') { token = MINUSEQ - in.next + in.next() } return case '*' => token = ASTERISK - in.next + in.next() if (in.ch == '=') { token = ASTERISKEQ - in.next + in.next() } return case '/' => - in.next + in.next() if (!skipComment()) { token = SLASH - in.next + in.next() if (in.ch == '=') { token = SLASHEQ - in.next + in.next() } return } case '^' => token = HAT - in.next + in.next() if (in.ch == '=') { token = HATEQ - in.next + in.next() } return case '%' => token = PERCENT - in.next + in.next() if (in.ch == '=') { token = PERCENTEQ - in.next + in.next() } return case '.' => token = DOT - in.next + in.next() if ('0' <= in.ch && in.ch <= '9') { - putChar('.'); getFraction + putChar('.'); getFraction() } else if (in.ch == '.') { - in.next + in.next() if (in.ch == '.') { - in.next + in.next() token = DOTDOTDOT } else syntaxError("`.' character expected") } @@ -526,60 +526,60 @@ trait JavaScanners extends ast.parser.ScannersCommon { case ';' => token = SEMI - in.next + in.next() return case ',' => token = COMMA - in.next + in.next() return case '(' => token = LPAREN - in.next + in.next() return case '{' => token = LBRACE - in.next + in.next() return case ')' => token = RPAREN - in.next + in.next() return case '}' => token = RBRACE - in.next + in.next() return case '[' => token = LBRACKET - in.next + in.next() return case ']' => token = RBRACKET - in.next + in.next() return case SU => if (!in.hasNext) token = EOF else { syntaxError("illegal character") - in.next + in.next() } return case _ => if (Character.isUnicodeIdentifierStart(in.ch)) { putChar(in.ch) - in.next - getIdentRest + in.next() + getIdentRest() } else { syntaxError("illegal character: "+in.ch.toInt) - in.next + in.next() } return } @@ -590,26 +590,26 @@ trait JavaScanners extends ast.parser.ScannersCommon { private def skipComment(): Boolean = { if (in.ch == '/') { do { - in.next + in.next() } while ((in.ch != CR) && (in.ch != LF) && (in.ch != SU)) true } else if (in.ch == '*') { docBuffer = null - in.next + in.next() val scalaDoc = ("/**", "*/") if (in.ch == '*' && forScaladoc) docBuffer = new StringBuilder(scalaDoc._1) do { do { if (in.ch != '*' && in.ch != SU) { - in.next; putDocChar(in.ch) + in.next(); putDocChar(in.ch) } } while (in.ch != '*' && in.ch != SU) while (in.ch == '*') { - in.next; putDocChar(in.ch) + in.next(); putDocChar(in.ch) } } while (in.ch != '/' && in.ch != SU) - if (in.ch == '/') in.next + if (in.ch == '/') in.next() else incompleteInputError("unclosed comment") true } else { @@ -637,12 +637,12 @@ trait JavaScanners extends ast.parser.ScannersCommon { '0' | '1' | '2' | '3' | '4' | '5' | '6' | '7' | '8' | '9' => putChar(in.ch) - in.next + in.next() case '_' => putChar(in.ch) - in.next - getIdentRest + in.next() + getIdentRest() return case SU => setName() @@ -651,7 +651,7 @@ trait JavaScanners extends ast.parser.ScannersCommon { case _ => if (Character.isUnicodeIdentifierPart(in.ch)) { putChar(in.ch) - in.next + in.next() } else { setName() token = JavaScannerConfiguration.name2token(name) @@ -667,17 +667,17 @@ trait JavaScanners extends ast.parser.ScannersCommon { */ protected def getlitch() = if (in.ch == '\\') { - in.next + in.next() if ('0' <= in.ch && in.ch <= '7') { val leadch: Char = in.ch var oct: Int = digit2int(in.ch, 8) - in.next + in.next() if ('0' <= in.ch && in.ch <= '7') { oct = oct * 8 + digit2int(in.ch, 8) - in.next + in.next() if (leadch <= '3' && '0' <= in.ch && in.ch <= '7') { oct = oct * 8 + digit2int(in.ch, 8) - in.next + in.next() } } putChar(oct.asInstanceOf[Char]) @@ -695,11 +695,11 @@ trait JavaScanners extends ast.parser.ScannersCommon { syntaxError(in.cpos - 1, "invalid escape character") putChar(in.ch) } - in.next + in.next() } } else { putChar(in.ch) - in.next + in.next() } /** read fractional part and exponent of floating point number @@ -709,35 +709,35 @@ trait JavaScanners extends ast.parser.ScannersCommon { token = DOUBLELIT while ('0' <= in.ch && in.ch <= '9') { putChar(in.ch) - in.next + in.next() } if (in.ch == 'e' || in.ch == 'E') { val lookahead = in.copy - lookahead.next + lookahead.next() if (lookahead.ch == '+' || lookahead.ch == '-') { - lookahead.next + lookahead.next() } if ('0' <= lookahead.ch && lookahead.ch <= '9') { putChar(in.ch) - in.next + in.next() if (in.ch == '+' || in.ch == '-') { putChar(in.ch) - in.next + in.next() } while ('0' <= in.ch && in.ch <= '9') { putChar(in.ch) - in.next + in.next() } } token = DOUBLELIT } if (in.ch == 'd' || in.ch == 'D') { putChar(in.ch) - in.next + in.next() token = DOUBLELIT } else if (in.ch == 'f' || in.ch == 'F') { putChar(in.ch) - in.next + in.next() token = FLOATLIT } setName() @@ -797,23 +797,23 @@ trait JavaScanners extends ast.parser.ScannersCommon { protected def getNumber() { while (digit2int(in.ch, if (base < 10) 10 else base) >= 0) { putChar(in.ch) - in.next + in.next() } token = INTLIT if (base <= 10 && in.ch == '.') { val lookahead = in.copy - lookahead.next + lookahead.next() lookahead.ch match { case '0' | '1' | '2' | '3' | '4' | '5' | '6' | '7' | '8' | '9' | 'd' | 'D' | 'e' | 'E' | 'f' | 'F' => putChar(in.ch) - in.next - return getFraction + in.next() + return getFraction() case _ => if (!isIdentifierStart(lookahead.ch)) { putChar(in.ch) - in.next - return getFraction + in.next() + return getFraction() } } } @@ -821,11 +821,11 @@ trait JavaScanners extends ast.parser.ScannersCommon { (in.ch == 'e' || in.ch == 'E' || in.ch == 'f' || in.ch == 'F' || in.ch == 'd' || in.ch == 'D')) { - return getFraction + return getFraction() } setName() if (in.ch == 'l' || in.ch == 'L') { - in.next + in.next() token = LONGLIT } } @@ -875,14 +875,14 @@ trait JavaScanners extends ast.parser.ScannersCommon { /** INIT: read lookahead character and token. */ def init() { - in.next - nextToken + in.next() + nextToken() } } class JavaUnitScanner(unit: CompilationUnit) extends JavaScanner { in = new JavaCharArrayReader(unit.source.content, !settings.nouescape.value, syntaxError) - init + init() def error (pos: Int, msg: String) = unit. error(pos, msg) def incompleteInputError(pos: Int, msg: String) = unit.incompleteInputError(pos, msg) def deprecationWarning(pos: Int, msg: String) = unit.deprecationWarning(pos, msg) diff --git a/src/compiler/scala/tools/nsc/plugins/Plugins.scala b/src/compiler/scala/tools/nsc/plugins/Plugins.scala index bb7d54d8f6..00e5875852 100644 --- a/src/compiler/scala/tools/nsc/plugins/Plugins.scala +++ b/src/compiler/scala/tools/nsc/plugins/Plugins.scala @@ -41,7 +41,7 @@ trait Plugins { classes map (Plugin.instantiate(_, this)) } - protected lazy val roughPluginsList: List[Plugin] = loadRoughPluginsList + protected lazy val roughPluginsList: List[Plugin] = loadRoughPluginsList() /** Load all available plugins. Skips plugins that * either have the same name as another one, or which @@ -102,7 +102,7 @@ trait Plugins { plugs } - lazy val plugins: List[Plugin] = loadPlugins + lazy val plugins: List[Plugin] = loadPlugins() /** A description of all the plugins that are loaded */ def pluginDescriptions: String = diff --git a/src/compiler/scala/tools/nsc/reporters/AbstractReporter.scala b/src/compiler/scala/tools/nsc/reporters/AbstractReporter.scala index 025fc8e068..44670ea578 100644 --- a/src/compiler/scala/tools/nsc/reporters/AbstractReporter.scala +++ b/src/compiler/scala/tools/nsc/reporters/AbstractReporter.scala @@ -21,8 +21,8 @@ abstract class AbstractReporter extends Reporter { private val positions = new mutable.HashMap[Position, Severity] override def reset() { - super.reset - positions.clear + super.reset() + positions.clear() } private def isVerbose = settings.verbose.value @@ -49,7 +49,7 @@ abstract class AbstractReporter extends Reporter { } if (isPromptSet) - displayPrompt + displayPrompt() } } } diff --git a/src/compiler/scala/tools/nsc/settings/MutableSettings.scala b/src/compiler/scala/tools/nsc/settings/MutableSettings.scala index 0e44ef63a1..7a17180724 100644 --- a/src/compiler/scala/tools/nsc/settings/MutableSettings.scala +++ b/src/compiler/scala/tools/nsc/settings/MutableSettings.scala @@ -382,7 +382,7 @@ class MutableSettings(val errorFn: String => Unit) def max = range map (_._2) getOrElse IntMax override def value_=(s: Int) = - if (isInputValid(s)) super.value_=(s) else errorMsg + if (isInputValid(s)) super.value_=(s) else errorMsg() // Validate that min and max are consistent assert(min <= max) @@ -414,7 +414,7 @@ class MutableSettings(val errorFn: String => Unit) if (args.isEmpty) errorAndValue("missing argument", None) else parseArgument(args.head) match { case Some(i) => value = i ; Some(args.tail) - case None => errorMsg ; None + case None => errorMsg() ; None } def unparse: List[String] = diff --git a/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala b/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala index a5f41dc82b..9f89f47240 100644 --- a/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala +++ b/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala @@ -99,7 +99,7 @@ abstract class ClassfileParser { this.staticModule = if (root.isModule) root else root.companionModule this.isScala = false - parseHeader + parseHeader() this.pool = new ConstantPool parseClass() } @@ -540,7 +540,7 @@ abstract class ClassfileParser { val staticInfo = ClassInfoType(List(), staticScope, moduleClass) if (!isScala && !isScalaRaw) - enterOwnInnerClasses + enterOwnInnerClasses() val curbp = in.bp skipMembers() // fields @@ -1128,7 +1128,7 @@ abstract class ClassfileParser { case tpnme.ScalaSignatureATTR => isScala = true val pbuf = new PickleBuffer(in.buf, in.bp, in.bp + attrLen) - pbuf.readNat; pbuf.readNat; + pbuf.readNat(); pbuf.readNat(); if (pbuf.readNat == 0) // a scala signature attribute with no entries means that the actual scala signature isScalaAnnot = true // is in a ScalaSignature annotation. in.skip(attrLen) diff --git a/src/compiler/scala/tools/nsc/symtab/classfile/ICodeReader.scala b/src/compiler/scala/tools/nsc/symtab/classfile/ICodeReader.scala index 79b08bcabf..7871ac8f20 100644 --- a/src/compiler/scala/tools/nsc/symtab/classfile/ICodeReader.scala +++ b/src/compiler/scala/tools/nsc/symtab/classfile/ICodeReader.scala @@ -136,7 +136,7 @@ abstract class ICodeReader extends ClassfileParser { } } catch { case e: MissingRequirementError => - in.bp = beginning; skipAttributes + in.bp = beginning; skipAttributes() debuglog("Skipping non-existent method. " + e.msg); } } @@ -217,7 +217,7 @@ abstract class ICodeReader extends ClassfileParser { val instr = toUnsignedByte(in.nextByte) instr match { - case JVM.nop => parseInstruction + case JVM.nop => parseInstruction() case JVM.aconst_null => code emit CONSTANT(Constant(null)) case JVM.iconst_m1 => code emit CONSTANT(Constant(-1)) case JVM.iconst_0 => code emit CONSTANT(Constant(0)) @@ -581,7 +581,7 @@ abstract class ICodeReader extends ClassfileParser { } pc = 0 - while (pc < codeLength) parseInstruction + while (pc < codeLength) parseInstruction() val exceptionEntries = in.nextChar.toInt code.containsEHs = (exceptionEntries != 0) @@ -671,7 +671,7 @@ abstract class ICodeReader extends ClassfileParser { otherBlock = blocks(pc) if (!bb.closed && otherBlock != bb) { bb.emit(JUMP(otherBlock)) - bb.close + bb.close() // Console.println("\t> closing bb: " + bb) } bb = otherBlock @@ -889,7 +889,7 @@ abstract class ICodeReader extends ClassfileParser { import opcodes._ val rdef = new reachingDefinitions.ReachingDefinitionsAnalysis rdef.init(method) - rdef.run + rdef.run() for (bb <- method.code.blocks ; (i, idx) <- bb.toList.zipWithIndex) i match { case cm @ CALL_METHOD(m, Static(true)) if m.isClassConstructor => @@ -941,7 +941,7 @@ abstract class ICodeReader extends ClassfileParser { l } case None => - checkValidIndex + checkValidIndex() val l = freshLocal(idx, kind, false) debuglog("Added new local for idx " + idx + ": " + kind) locals += (idx -> List((l, kind))) diff --git a/src/compiler/scala/tools/nsc/transform/CleanUp.scala b/src/compiler/scala/tools/nsc/transform/CleanUp.scala index f5c8907991..a871c72fc2 100644 --- a/src/compiler/scala/tools/nsc/transform/CleanUp.scala +++ b/src/compiler/scala/tools/nsc/transform/CleanUp.scala @@ -127,7 +127,7 @@ abstract class CleanUp extends Transform with ast.TreeDSL { ArrayValue(TypeTree(ClassClass.tpe), paramTypes map LIT) /* ... */ - def reflectiveMethodCache(method: String, paramTypes: List[Type]): Symbol = dispatchType match { + def reflectiveMethodCache(method: String, paramTypes: List[Type]): Symbol = dispatchType() match { case NO_CACHE => /* Implementation of the cache is as follows for method "def xyz(a: A, b: B)": @@ -356,7 +356,7 @@ abstract class CleanUp extends Transform with ast.TreeDSL { // reflective method call machinery val invokeName = MethodClass.tpe member nme.invoke_ // scala.reflect.Method.invoke(...) def cache = REF(reflectiveMethodCache(ad.symbol.name.toString, paramTypes)) // cache Symbol - def lookup = Apply(cache, List(qual1() GETCLASS)) // get Method object from cache + def lookup = Apply(cache, List(qual1() GETCLASS())) // get Method object from cache def invokeArgs = ArrayValue(TypeTree(ObjectClass.tpe), params) // args for invocation def invocation = (lookup DOT invokeName)(qual1(), invokeArgs) // .invoke(qual1, ...) diff --git a/src/compiler/scala/tools/nsc/transform/Constructors.scala b/src/compiler/scala/tools/nsc/transform/Constructors.scala index e99b42a402..79dd36803d 100644 --- a/src/compiler/scala/tools/nsc/transform/Constructors.scala +++ b/src/compiler/scala/tools/nsc/transform/Constructors.scala @@ -24,8 +24,8 @@ abstract class Constructors extends Transform with ast.TreeDSL { protected def newTransformer(unit: CompilationUnit): Transformer = new ConstructorTransformer(unit) - private val guardedCtorStats: mutable.Map[Symbol, List[Tree]] = perRunCaches.newMap[Symbol, List[Tree]] - private val ctorParams: mutable.Map[Symbol, List[Symbol]] = perRunCaches.newMap[Symbol, List[Symbol]] + private val guardedCtorStats: mutable.Map[Symbol, List[Tree]] = perRunCaches.newMap[Symbol, List[Tree]]() + private val ctorParams: mutable.Map[Symbol, List[Symbol]] = perRunCaches.newMap[Symbol, List[Symbol]]() class ConstructorTransformer(unit: CompilationUnit) extends Transformer { diff --git a/src/compiler/scala/tools/nsc/transform/Erasure.scala b/src/compiler/scala/tools/nsc/transform/Erasure.scala index 8287c1f631..60eab773aa 100644 --- a/src/compiler/scala/tools/nsc/transform/Erasure.scala +++ b/src/compiler/scala/tools/nsc/transform/Erasure.scala @@ -389,7 +389,7 @@ abstract class Erasure extends AddInterfaces if (enteringExplicitOuter(!member.isDeferred)) checkPair(member, other) - opc.next + opc.next() } (bridges, toBeRemoved) } @@ -900,7 +900,7 @@ abstract class Erasure extends AddInterfaces opc.overridden.infosString) doubleDefError(opc.overriding, opc.overridden) } - opc.next + opc.next() } } diff --git a/src/compiler/scala/tools/nsc/transform/Mixin.scala b/src/compiler/scala/tools/nsc/transform/Mixin.scala index 3e5ac6922e..b6d4bdb0c5 100644 --- a/src/compiler/scala/tools/nsc/transform/Mixin.scala +++ b/src/compiler/scala/tools/nsc/transform/Mixin.scala @@ -27,7 +27,7 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL { private val treatedClassInfos = perRunCaches.newMap[Symbol, Type]() withDefaultValue NoType /** Map a lazy, mixedin field accessor to it's trait member accessor */ - private val initializer = perRunCaches.newMap[Symbol, Symbol] + private val initializer = perRunCaches.newMap[Symbol, Symbol]() // --------- helper functions ----------------------------------------------- diff --git a/src/compiler/scala/tools/nsc/transform/OverridingPairs.scala b/src/compiler/scala/tools/nsc/transform/OverridingPairs.scala index 28e6e3be26..822ef79cd0 100644 --- a/src/compiler/scala/tools/nsc/transform/OverridingPairs.scala +++ b/src/compiler/scala/tools/nsc/transform/OverridingPairs.scala @@ -215,12 +215,12 @@ abstract class OverridingPairs { curEntry = curEntry.next } while ((curEntry ne null) && (visited contains curEntry)); nextEntry = curEntry - next + next() } } } } - next + next() } } diff --git a/src/compiler/scala/tools/nsc/transform/patmat/Logic.scala b/src/compiler/scala/tools/nsc/transform/patmat/Logic.scala index 9af4800a70..3ef08e1a6d 100644 --- a/src/compiler/scala/tools/nsc/transform/patmat/Logic.scala +++ b/src/compiler/scala/tools/nsc/transform/patmat/Logic.scala @@ -212,7 +212,7 @@ trait Logic extends Debugging { } props foreach gatherEqualities.apply - if (modelNull) vars foreach (_.registerNull) + if (modelNull) vars foreach (_.registerNull()) val pure = props map (p => eqFreePropToSolvable(rewriteEqualsToProp(p))) @@ -548,7 +548,7 @@ trait ScalaLogic extends Logic { self: PatternMatching => val staticTpCheckable: Type = checkableType(staticTp) private[this] var _mayBeNull = false - def registerNull(): Unit = { ensureCanModify; if (NullTp <:< staticTpCheckable) _mayBeNull = true } + def registerNull(): Unit = { ensureCanModify(); if (NullTp <:< staticTpCheckable) _mayBeNull = true } def mayBeNull: Boolean = _mayBeNull // case None => domain is unknown, @@ -572,16 +572,16 @@ trait ScalaLogic extends Logic { self: PatternMatching => } else subConsts - observed; allConsts + observed(); allConsts } // populate equalitySyms // don't care about the result, but want only one fresh symbol per distinct constant c - def registerEquality(c: Const): Unit = {ensureCanModify; symForEqualsTo getOrElseUpdate(c, Sym(this, c))} + def registerEquality(c: Const): Unit = {ensureCanModify(); symForEqualsTo getOrElseUpdate(c, Sym(this, c))} // return the symbol that represents this variable being equal to the constant `c`, if it exists, otherwise False (for robustness) // (registerEquality(c) must have been called prior, either when constructing the domain or from outside) - def propForEqualsTo(c: Const): Prop = {observed; symForEqualsTo.getOrElse(c, False)} + def propForEqualsTo(c: Const): Prop = {observed(); symForEqualsTo.getOrElse(c, False)} // [implementation NOTE: don't access until all potential equalities have been registered using registerEquality]p /** the information needed to construct the boolean proposition that encods the equality proposition (V = C) @@ -689,7 +689,7 @@ trait ScalaLogic extends Logic { self: PatternMatching => lazy val symForStaticTp: Option[Sym] = symForEqualsTo.get(TypeConst(staticTpCheckable)) // don't access until all potential equalities have been registered using registerEquality - private lazy val equalitySyms = {observed; symForEqualsTo.values.toList} + private lazy val equalitySyms = {observed(); symForEqualsTo.values.toList} // don't call until all equalities have been registered and registerNull has been called (if needed) def describe = { diff --git a/src/compiler/scala/tools/nsc/transform/patmat/MatchTranslation.scala b/src/compiler/scala/tools/nsc/transform/patmat/MatchTranslation.scala index 82f13d9777..3baa88002f 100644 --- a/src/compiler/scala/tools/nsc/transform/patmat/MatchTranslation.scala +++ b/src/compiler/scala/tools/nsc/transform/patmat/MatchTranslation.scala @@ -71,7 +71,7 @@ trait MatchTranslation { self: PatternMatching => } while (it.hasNext) { - val cdef = it.next + val cdef = it.next() // If a default case has been seen, then every succeeding case is unreachable. if (vpat != null) context.unit./*error*/warning(cdef.body.pos, "unreachable code due to " + vpat + addendum(cdef.pat)) diff --git a/src/compiler/scala/tools/nsc/typechecker/Duplicators.scala b/src/compiler/scala/tools/nsc/typechecker/Duplicators.scala index face149b9f..ad45fc0354 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Duplicators.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Duplicators.scala @@ -28,7 +28,7 @@ abstract class Duplicators extends Analyzer { if (oldThis ne newThis) { oldClassOwner = oldThis newClassOwner = newThis - } else resetClassOwners + } else resetClassOwners() envSubstitution = new SubstSkolemsTypeMap(env.keysIterator.toList, env.valuesIterator.toList) debuglog("retyped with env: " + env) diff --git a/src/compiler/scala/tools/nsc/typechecker/Infer.scala b/src/compiler/scala/tools/nsc/typechecker/Infer.scala index 0207c841d2..d593694ce1 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Infer.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Infer.scala @@ -607,7 +607,7 @@ trait Infer extends Checkable { ) )) } - buf.result + buf.result() } /** Return inferred type arguments, given type parameters, formal parameters, diff --git a/src/compiler/scala/tools/nsc/typechecker/Macros.scala b/src/compiler/scala/tools/nsc/typechecker/Macros.scala index 8829a9a92e..776efd6367 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Macros.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Macros.scala @@ -500,7 +500,7 @@ trait Macros extends scala.tools.reflect.FastTrack with Traces { * `null` otherwise. */ type MacroRuntime = MacroArgs => Any - private val macroRuntimesCache = perRunCaches.newWeakMap[Symbol, MacroRuntime] + private val macroRuntimesCache = perRunCaches.newWeakMap[Symbol, MacroRuntime]() private def macroRuntime(macroDef: Symbol): MacroRuntime = { macroTraceVerbose("looking for macro implementation: ")(macroDef) if (fastTrack contains macroDef) { @@ -909,7 +909,7 @@ trait Macros extends scala.tools.reflect.FastTrack with Traces { * 2) undetparams (sym.isTypeParameter && !sym.isSkolem) */ var hasPendingMacroExpansions = false - private val delayed = perRunCaches.newWeakMap[Tree, scala.collection.mutable.Set[Int]] + private val delayed = perRunCaches.newWeakMap[Tree, scala.collection.mutable.Set[Int]]() private def isDelayed(expandee: Tree) = delayed contains expandee private def calculateUndetparams(expandee: Tree): scala.collection.mutable.Set[Int] = delayed.get(expandee).getOrElse { @@ -922,7 +922,7 @@ trait Macros extends scala.tools.reflect.FastTrack with Traces { macroLogVerbose("calculateUndetparams: %s".format(calculated)) calculated map (_.id) } - private val undetparams = perRunCaches.newSet[Int] + private val undetparams = perRunCaches.newSet[Int]() def notifyUndetparamsAdded(newUndets: List[Symbol]): Unit = { undetparams ++= newUndets map (_.id) if (macroDebugVerbose) newUndets foreach (sym => println("undetParam added: %s".format(sym))) diff --git a/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala b/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala index dd16e9de30..0bd164a0cb 100644 --- a/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala +++ b/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala @@ -525,7 +525,7 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans //Console.println(opc.overriding/* + ":" + opc.overriding.tpe*/ + " in "+opc.overriding.fullName + " overrides " + opc.overridden/* + ":" + opc.overridden.tpe*/ + " in "+opc.overridden.fullName + "/"+ opc.overridden.hasFlag(DEFERRED));//debug if (!opc.overridden.isClass) checkOverride(opc.overriding, opc.overridden); - opc.next + opc.next() } printMixinOverrideErrors() diff --git a/src/compiler/scala/tools/nsc/typechecker/TreeCheckers.scala b/src/compiler/scala/tools/nsc/typechecker/TreeCheckers.scala index 52497411d1..5c863469e4 100644 --- a/src/compiler/scala/tools/nsc/typechecker/TreeCheckers.scala +++ b/src/compiler/scala/tools/nsc/typechecker/TreeCheckers.scala @@ -148,7 +148,7 @@ abstract class TreeCheckers extends Analyzer { val unit0 = currentUnit currentRun.currentUnit = unit body - currentRun.advanceUnit + currentRun.advanceUnit() assertFn(currentUnit == unit, "currentUnit is " + currentUnit + ", but unit is " + unit) currentRun.currentUnit = unit0 } @@ -156,7 +156,7 @@ abstract class TreeCheckers extends Analyzer { informProgress("checking "+unit) val context = rootContext(unit) context.checking = true - tpeOfTree.clear + tpeOfTree.clear() SymbolTracker.check(phase, unit) val checker = new TreeChecker(context) runWithUnit(unit) { diff --git a/src/compiler/scala/tools/nsc/typechecker/Typers.scala b/src/compiler/scala/tools/nsc/typechecker/Typers.scala index 561ca7f382..ef3414f446 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Typers.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Typers.scala @@ -1893,7 +1893,7 @@ trait Typers extends Adaptations with Tags { */ def typedTemplate(templ: Template, parents1: List[Tree]): Template = { val clazz = context.owner - clazz.annotations.map(_.completeInfo) + clazz.annotations.map(_.completeInfo()) if (templ.symbol == NoSymbol) templ setSymbol clazz.newLocalDummy(templ.pos) val self1 = templ.self match { @@ -1994,7 +1994,7 @@ trait Typers extends Adaptations with Tags { val sym = vdef.symbol.initialize val typedMods = typedModifiers(vdef.mods) - sym.annotations.map(_.completeInfo) + sym.annotations.map(_.completeInfo()) val tpt1 = checkNoEscaping.privates(sym, typedType(vdef.tpt)) checkNonCyclic(vdef, tpt1) @@ -2209,7 +2209,7 @@ trait Typers extends Adaptations with Tags { val tparams1 = ddef.tparams mapConserve typedTypeDef val vparamss1 = ddef.vparamss mapConserve (_ mapConserve typedValDef) - meth.annotations.map(_.completeInfo) + meth.annotations.map(_.completeInfo()) for (vparams1 <- vparamss1; vparam1 <- vparams1 dropRight 1) if (isRepeatedParamType(vparam1.symbol.tpe)) @@ -2283,7 +2283,7 @@ trait Typers extends Adaptations with Tags { reenterTypeParams(tdef.tparams) val tparams1 = tdef.tparams mapConserve typedTypeDef val typedMods = typedModifiers(tdef.mods) - tdef.symbol.annotations.map(_.completeInfo) + tdef.symbol.annotations.map(_.completeInfo()) // @specialized should not be pickled when compiling with -no-specialize if (settings.nospecialization.value && currentRun.compiles(tdef.symbol)) { diff --git a/src/compiler/scala/tools/nsc/util/ClassPath.scala b/src/compiler/scala/tools/nsc/util/ClassPath.scala index c51fc442dc..f125db3839 100644 --- a/src/compiler/scala/tools/nsc/util/ClassPath.scala +++ b/src/compiler/scala/tools/nsc/util/ClassPath.scala @@ -254,7 +254,7 @@ class SourcePath[T](dir: AbstractFile, val context: ClassPathContext[T]) extends else if (f.isDirectory && validPackage(f.name)) packageBuf += new SourcePath[T](f, context) } - (packageBuf.result, classBuf.result) + (packageBuf.result(), classBuf.result()) } lazy val (packages, classes) = traverse() @@ -281,7 +281,7 @@ class DirectoryClassPath(val dir: AbstractFile, val context: ClassPathContext[Ab else if (f.isDirectory && validPackage(f.name)) packageBuf += new DirectoryClassPath(f, context) } - (packageBuf.result, classBuf.result) + (packageBuf.result(), classBuf.result()) } lazy val (packages, classes) = traverse() diff --git a/src/compiler/scala/tools/nsc/util/ShowPickled.scala b/src/compiler/scala/tools/nsc/util/ShowPickled.scala index 759c06dc0f..b060ea90b8 100644 --- a/src/compiler/scala/tools/nsc/util/ShowPickled.scala +++ b/src/compiler/scala/tools/nsc/util/ShowPickled.scala @@ -271,7 +271,7 @@ object ShowPickled extends Names { for (i <- 0 until index.length) printEntry(i) } - def fromFile(path: String) = fromBytes(io.File(path).toByteArray) + def fromFile(path: String) = fromBytes(io.File(path).toByteArray()) def fromName(name: String) = fromBytes(scalaSigBytesForPath(name) getOrElse Array()) def fromBytes(data: => Array[Byte]): Option[PickleBuffer] = try Some(new PickleBuffer(data, 0, data.length)) diff --git a/src/compiler/scala/tools/reflect/MacroImplementations.scala b/src/compiler/scala/tools/reflect/MacroImplementations.scala index ab967496c4..47ffbda6ca 100644 --- a/src/compiler/scala/tools/reflect/MacroImplementations.scala +++ b/src/compiler/scala/tools/reflect/MacroImplementations.scala @@ -88,7 +88,7 @@ abstract class MacroImplementations { var idx = 0 if (!first) { - val arg = argsStack.pop + val arg = argsStack.pop() if (strIsEmpty || (str charAt 0) != '%') { bldr append "%s" defval(arg, AnyTpe) diff --git a/src/compiler/scala/tools/util/Javap.scala b/src/compiler/scala/tools/util/Javap.scala index cbfd8fec51..181bbedac5 100644 --- a/src/compiler/scala/tools/util/Javap.scala +++ b/src/compiler/scala/tools/util/Javap.scala @@ -98,7 +98,7 @@ class JavapClass( * it represents. */ def tryFile(path: String): Option[Array[Byte]] = - (Try (File(path.asClassResource)) filter (_.exists) map (_.toByteArray)).toOption + (Try (File(path.asClassResource)) filter (_.exists) map (_.toByteArray())).toOption /** Assume the string is a fully qualified class name and try to * find the class object it represents. @@ -209,7 +209,7 @@ class JavapClass( } filtering } - for (line <- Source.fromString(preamble + written).getLines; if checkFilter(line)) + for (line <- Source.fromString(preamble + written).getLines(); if checkFilter(line)) printWriter write line+lineSeparator printWriter.flush() } @@ -371,7 +371,7 @@ class JavapClass( case x => Failure(x) } } lastly { - reporter.clear + reporter.clear() } override def apply(raw: Boolean, options: Seq[String])(inputs: Seq[Input]): List[JpResult] = (inputs map { case (claas, Success(_)) => applyOne(raw, options, claas, inputs).get diff --git a/src/compiler/scala/tools/util/PathResolver.scala b/src/compiler/scala/tools/util/PathResolver.scala index 5d79a7d6cd..a6c0f0f5c2 100644 --- a/src/compiler/scala/tools/util/PathResolver.scala +++ b/src/compiler/scala/tools/util/PathResolver.scala @@ -139,7 +139,7 @@ object PathResolver { val pr = new PathResolver(settings) println(" COMMAND: 'scala %s'".format(args.mkString(" "))) println("RESIDUAL: 'scala %s'\n".format(rest.mkString(" "))) - pr.result.show + pr.result.show() } } } diff --git a/src/compiler/scala/tools/util/SocketServer.scala b/src/compiler/scala/tools/util/SocketServer.scala index 1b06ce2ff2..7da9479dab 100644 --- a/src/compiler/scala/tools/util/SocketServer.scala +++ b/src/compiler/scala/tools/util/SocketServer.scala @@ -16,8 +16,8 @@ trait CompileOutputCommon { def verbose: Boolean def info(msg: String) = if (verbose) echo(msg) - def echo(msg: String) = {Console println msg; Console.flush} - def warn(msg: String) = {Console.err println msg; Console.flush} + def echo(msg: String) = {Console println msg; Console.flush()} + def warn(msg: String) = {Console.err println msg; Console.flush()} def fatal(msg: String) = { warn(msg) ; sys.exit(1) } } -- cgit v1.2.3 From 8cdf3b3f51adff8dbeff5217505f74cfbedb55cd Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Sun, 24 Feb 2013 23:00:47 +0100 Subject: Banish needless semicolons. --- .../scala/reflect/reify/phases/Reshape.scala | 2 +- .../scala/reflect/reify/utils/NodePrinters.scala | 4 +- src/compiler/scala/tools/nsc/Global.scala | 4 +- src/compiler/scala/tools/nsc/MainTokenMetric.scala | 4 +- .../scala/tools/nsc/ast/TreeBrowsers.scala | 6 +- src/compiler/scala/tools/nsc/ast/Trees.scala | 2 +- .../scala/tools/nsc/ast/parser/TreeBuilder.scala | 2 +- .../scala/tools/nsc/backend/ScalaPrimitives.scala | 6 +- .../nsc/backend/icode/ExceptionHandlers.scala | 12 ++-- .../scala/tools/nsc/backend/icode/GenICode.scala | 80 +++++++++++----------- .../tools/nsc/backend/icode/ICodeCheckers.scala | 26 +++---- .../tools/nsc/backend/icode/Linearizers.scala | 64 ++++++++--------- .../scala/tools/nsc/backend/icode/Members.scala | 24 +++---- .../scala/tools/nsc/backend/icode/Opcodes.scala | 40 ++++++----- .../scala/tools/nsc/backend/icode/Primitives.scala | 8 +-- .../scala/tools/nsc/backend/icode/Printers.scala | 46 ++++++------- .../backend/icode/analysis/CopyPropagation.scala | 46 ++++++------- .../backend/icode/analysis/DataFlowAnalysis.scala | 4 +- .../nsc/backend/icode/analysis/Liveness.scala | 2 +- .../icode/analysis/ReachingDefinitions.scala | 2 +- .../backend/icode/analysis/TypeFlowAnalysis.scala | 14 ++-- .../scala/tools/nsc/backend/jvm/GenASM.scala | 54 +++++++-------- .../tools/nsc/backend/opt/ClosureElimination.scala | 4 +- .../nsc/backend/opt/DeadCodeElimination.scala | 12 ++-- .../scala/tools/nsc/backend/opt/Inliners.scala | 10 +-- .../html/page/diagram/DotDiagramGenerator.scala | 6 +- .../scala/tools/nsc/interactive/Global.scala | 4 +- .../scala/tools/nsc/interactive/REPL.scala | 4 +- .../scala/tools/nsc/interpreter/ILoop.scala | 2 +- src/compiler/scala/tools/nsc/io/Pickler.scala | 2 +- .../scala/tools/nsc/javac/JavaParsers.scala | 2 +- .../scala/tools/nsc/symtab/SymbolLoaders.scala | 2 +- .../nsc/symtab/classfile/ClassfileParser.scala | 2 +- .../tools/nsc/symtab/classfile/ICodeReader.scala | 69 ++++++++++--------- .../scala/tools/nsc/symtab/classfile/Pickler.scala | 6 +- .../scala/tools/nsc/transform/Constructors.scala | 2 +- .../scala/tools/nsc/transform/Erasure.scala | 4 +- .../scala/tools/nsc/transform/Flatten.scala | 2 +- .../scala/tools/nsc/transform/LambdaLift.scala | 4 +- src/compiler/scala/tools/nsc/transform/Mixin.scala | 4 +- .../tools/nsc/transform/OverridingPairs.scala | 12 ++-- .../scala/tools/nsc/transform/TailCalls.scala | 2 +- .../nsc/transform/patmat/MatchTranslation.scala | 2 +- .../scala/tools/nsc/typechecker/Contexts.scala | 2 +- .../scala/tools/nsc/typechecker/RefChecks.scala | 22 +++--- .../tools/nsc/typechecker/SuperAccessors.scala | 8 +-- .../scala/tools/nsc/util/JavaCharArrayReader.scala | 4 +- .../scala/tools/nsc/util/ShowPickled.scala | 2 +- .../scala/tools/reflect/MacroImplementations.scala | 4 +- src/library/scala/beans/ScalaBeanInfo.scala | 4 +- src/library/scala/collection/SortedMapLike.scala | 2 +- .../scala/collection/generic/Signalling.scala | 4 +- src/library/scala/collection/generic/Sorted.scala | 16 ++--- .../collection/generic/SortedSetFactory.scala | 2 +- .../scala/collection/immutable/HashMap.scala | 2 +- .../scala/collection/immutable/HashSet.scala | 4 +- .../scala/collection/immutable/IntMap.scala | 6 +- .../scala/collection/immutable/ListSet.scala | 6 +- .../scala/collection/immutable/LongMap.scala | 4 +- .../scala/collection/immutable/RedBlackTree.scala | 8 +-- .../scala/collection/mutable/HashTable.scala | 16 ++--- .../scala/collection/mutable/ListBuffer.scala | 2 +- .../scala/collection/mutable/OpenHashMap.scala | 38 +++++----- .../collection/parallel/ParIterableLike.scala | 10 +-- .../collection/parallel/ParIterableViewLike.scala | 3 +- .../scala/collection/parallel/ParSeqLike.scala | 4 +- .../scala/collection/parallel/ParSeqViewLike.scala | 4 +- .../collection/parallel/RemainsIterator.scala | 5 +- src/library/scala/collection/parallel/Tasks.scala | 2 +- .../collection/parallel/immutable/ParRange.scala | 2 +- .../collection/parallel/mutable/ParArray.scala | 6 +- .../collection/parallel/mutable/ParHashMap.scala | 5 +- .../collection/parallel/mutable/ParHashSet.scala | 9 +-- .../mutable/UnrolledParArrayCombiner.scala | 3 +- src/library/scala/io/ReadStdin.scala | 2 +- src/library/scala/ref/SoftReference.scala | 3 +- src/library/scala/reflect/NameTransformer.scala | 2 +- src/library/scala/text/Document.scala | 2 +- src/library/scala/util/MurmurHash.scala | 2 +- src/library/scala/util/matching/Regex.scala | 2 +- src/library/scala/xml/Utility.scala | 6 +- src/library/scala/xml/dtd/ContentModelParser.scala | 60 ++++++++-------- src/library/scala/xml/dtd/Decl.scala | 2 +- src/library/scala/xml/dtd/Scanner.scala | 6 +- .../scala/xml/dtd/ValidationException.scala | 2 +- src/library/scala/xml/factory/Binder.scala | 2 +- .../scala/xml/factory/LoggedNodeFactory.scala | 10 +-- .../scala/xml/include/sax/XIncludeFilter.scala | 16 ++--- src/library/scala/xml/include/sax/XIncluder.scala | 30 ++++---- src/library/scala/xml/parsing/MarkupParser.scala | 22 +++--- .../xml/parsing/ValidatingMarkupHandler.scala | 6 +- .../scala/xml/transform/BasicTransformer.scala | 2 +- src/reflect/scala/reflect/internal/Constants.scala | 2 +- .../scala/reflect/internal/InfoTransformers.scala | 2 +- src/reflect/scala/reflect/internal/Kinds.scala | 2 +- src/reflect/scala/reflect/internal/Names.scala | 8 +-- src/reflect/scala/reflect/internal/Printers.scala | 4 +- src/reflect/scala/reflect/internal/Scopes.scala | 6 +- .../scala/reflect/internal/SymbolTable.scala | 2 +- src/reflect/scala/reflect/internal/Trees.scala | 4 +- src/reflect/scala/reflect/internal/Types.scala | 24 ++++--- .../reflect/internal/pickling/PickleBuffer.scala | 4 +- .../scala/reflect/io/VirtualDirectory.scala | 2 +- src/reflect/scala/reflect/io/VirtualFile.scala | 2 +- 104 files changed, 538 insertions(+), 519 deletions(-) (limited to 'src') diff --git a/src/compiler/scala/reflect/reify/phases/Reshape.scala b/src/compiler/scala/reflect/reify/phases/Reshape.scala index 71fe4ddeea..4c27ba4da1 100644 --- a/src/compiler/scala/reflect/reify/phases/Reshape.scala +++ b/src/compiler/scala/reflect/reify/phases/Reshape.scala @@ -280,7 +280,7 @@ trait Reshape { detectBeanAccessors("get") detectBeanAccessors("set") detectBeanAccessors("is") - }); + }) val stats1 = stats flatMap { case vdef @ ValDef(mods, name, tpt, rhs) if !mods.isLazy => diff --git a/src/compiler/scala/reflect/reify/utils/NodePrinters.scala b/src/compiler/scala/reflect/reify/utils/NodePrinters.scala index 0740f8d0b6..0903bc481c 100644 --- a/src/compiler/scala/reflect/reify/utils/NodePrinters.scala +++ b/src/compiler/scala/reflect/reify/utils/NodePrinters.scala @@ -71,10 +71,10 @@ trait NodePrinters { s.trim }) - val printout = scala.collection.mutable.ListBuffer[String](); + val printout = scala.collection.mutable.ListBuffer[String]() printout += universe.trim if (mirrorIsUsed) printout += mirror.replace("Mirror[", "scala.reflect.api.Mirror[").trim - val imports = scala.collection.mutable.ListBuffer[String](); + val imports = scala.collection.mutable.ListBuffer[String]() imports += nme.UNIVERSE_SHORT.toString // if (buildIsUsed) imports += nme.build if (mirrorIsUsed) imports += nme.MIRROR_SHORT.toString diff --git a/src/compiler/scala/tools/nsc/Global.scala b/src/compiler/scala/tools/nsc/Global.scala index 7c8dbc211e..304bdf1536 100644 --- a/src/compiler/scala/tools/nsc/Global.scala +++ b/src/compiler/scala/tools/nsc/Global.scala @@ -1256,8 +1256,8 @@ class Global(var currentSettings: Settings, var reporter: Reporter) // this handler should not be nessasary, but it seems that `fsc` // eats exceptions if they appear here. Need to find out the cause for // this and fix it. - inform("[reset] exception happened: "+ex); - ex.printStackTrace(); + inform("[reset] exception happened: "+ex) + ex.printStackTrace() throw ex } diff --git a/src/compiler/scala/tools/nsc/MainTokenMetric.scala b/src/compiler/scala/tools/nsc/MainTokenMetric.scala index 9eb162a377..584805b37e 100644 --- a/src/compiler/scala/tools/nsc/MainTokenMetric.scala +++ b/src/compiler/scala/tools/nsc/MainTokenMetric.scala @@ -43,8 +43,8 @@ object MainTokenMetric { } catch { case ex @ FatalError(msg) => if (command.settings.debug.value) - ex.printStackTrace(); - reporter.error(null, "fatal error: " + msg) + ex.printStackTrace() + reporter.error(null, "fatal error: " + msg) } } diff --git a/src/compiler/scala/tools/nsc/ast/TreeBrowsers.scala b/src/compiler/scala/tools/nsc/ast/TreeBrowsers.scala index 329f0fa54b..b73016837d 100644 --- a/src/compiler/scala/tools/nsc/ast/TreeBrowsers.scala +++ b/src/compiler/scala/tools/nsc/ast/TreeBrowsers.scala @@ -32,7 +32,7 @@ abstract class TreeBrowsers { val borderSize = 10 - def create(): SwingBrowser = new SwingBrowser(); + def create(): SwingBrowser = new SwingBrowser() /** Pseudo tree class, so that all JTree nodes are treated uniformly */ case class ProgramTree(units: List[UnitTree]) extends Tree { @@ -189,7 +189,7 @@ abstract class TreeBrowsers { frame.addWindowListener(new WindowAdapter() { /** Release the lock, so compilation may resume after the window is closed. */ override def windowClosed(e: WindowEvent): Unit = lock.release() - }); + }) jTree = new JTree(treeModel) { /** Return the string for a tree node. */ @@ -530,7 +530,7 @@ abstract class TreeBrowsers { if ((s ne null) && (s != NoSymbol)) { var str = s.flagString - if (s.isStaticMember) str = str + " isStatic "; + if (s.isStaticMember) str = str + " isStatic " (str + " annotations: " + s.annotations.mkString("", " ", "") + (if (s.isTypeSkolem) "\ndeSkolemized annotations: " + s.deSkolemize.annotations.mkString("", " ", "") else "")) } diff --git a/src/compiler/scala/tools/nsc/ast/Trees.scala b/src/compiler/scala/tools/nsc/ast/Trees.scala index 4b5e23e177..c8b878225e 100644 --- a/src/compiler/scala/tools/nsc/ast/Trees.scala +++ b/src/compiler/scala/tools/nsc/ast/Trees.scala @@ -122,7 +122,7 @@ trait Trees extends scala.reflect.internal.Trees { self: Global => } else { // convert (implicit ... ) to ()(implicit ... ) if its the only parameter section if (vparamss1.isEmpty || !vparamss1.head.isEmpty && vparamss1.head.head.mods.isImplicit) - vparamss1 = List() :: vparamss1; + vparamss1 = List() :: vparamss1 val superRef: Tree = atPos(superPos)(gen.mkSuperInitCall) val superCall = pendingSuperCall // we can't know in advance which of the parents will end up as a superclass // this requires knowing which of the parents is a type macro and which is not diff --git a/src/compiler/scala/tools/nsc/ast/parser/TreeBuilder.scala b/src/compiler/scala/tools/nsc/ast/parser/TreeBuilder.scala index add932441d..f361daa574 100644 --- a/src/compiler/scala/tools/nsc/ast/parser/TreeBuilder.scala +++ b/src/compiler/scala/tools/nsc/ast/parser/TreeBuilder.scala @@ -411,7 +411,7 @@ abstract class TreeBuilder { ValFrom(pos, pat, makeCombination(rhs.pos union test.pos, nme.withFilter, rhs, pat.duplicate, test)) :: rest, body) case ValFrom(pos, pat, rhs) :: rest => - val valeqs = rest.take(definitions.MaxTupleArity - 1).takeWhile(_.isInstanceOf[ValEq]); + val valeqs = rest.take(definitions.MaxTupleArity - 1).takeWhile(_.isInstanceOf[ValEq]) assert(!valeqs.isEmpty) val rest1 = rest.drop(valeqs.length) val pats = valeqs map { case ValEq(_, pat, _) => pat } diff --git a/src/compiler/scala/tools/nsc/backend/ScalaPrimitives.scala b/src/compiler/scala/tools/nsc/backend/ScalaPrimitives.scala index f6b0701f86..1f9862596c 100644 --- a/src/compiler/scala/tools/nsc/backend/ScalaPrimitives.scala +++ b/src/compiler/scala/tools/nsc/backend/ScalaPrimitives.scala @@ -494,8 +494,8 @@ abstract class ScalaPrimitives { def isArraySet(code: Int): Boolean = code match { case ZARRAY_SET | BARRAY_SET | SARRAY_SET | CARRAY_SET | IARRAY_SET | LARRAY_SET | FARRAY_SET | DARRAY_SET | - OARRAY_SET | UPDATE => true; - case _ => false; + OARRAY_SET | UPDATE => true + case _ => false } /** Check whether the given code is a comparison operator */ @@ -514,7 +514,7 @@ abstract class ScalaPrimitives { DIV | MOD => true; // binary case OR | XOR | AND | LSL | LSR | ASR => true; // bitwise - case _ => false; + case _ => false } def isLogicalOp(code: Int): Boolean = code match { diff --git a/src/compiler/scala/tools/nsc/backend/icode/ExceptionHandlers.scala b/src/compiler/scala/tools/nsc/backend/icode/ExceptionHandlers.scala index a872e9cd00..7243264773 100644 --- a/src/compiler/scala/tools/nsc/backend/icode/ExceptionHandlers.scala +++ b/src/compiler/scala/tools/nsc/backend/icode/ExceptionHandlers.scala @@ -24,11 +24,11 @@ trait ExceptionHandlers { class ExceptionHandler(val method: IMethod, val label: TermName, val cls: Symbol, val pos: Position) { def loadExceptionClass = if (cls == NoSymbol) ThrowableClass else cls - private var _startBlock: BasicBlock = _; - var finalizer: Finalizer = _; + private var _startBlock: BasicBlock = _ + var finalizer: Finalizer = _ def setStartBlock(b: BasicBlock) = { - _startBlock = b; + _startBlock = b b.exceptionHandlerStart = true } def startBlock = _startBlock @@ -46,11 +46,11 @@ trait ExceptionHandlers { /** The body of this exception handler. May contain 'dead' blocks (which will not * make it into generated code because linearizers may not include them) */ - var blocks: List[BasicBlock] = Nil; + var blocks: List[BasicBlock] = Nil - def addBlock(b: BasicBlock): Unit = blocks = b :: blocks; + def addBlock(b: BasicBlock): Unit = blocks = b :: blocks - override def toString() = "exh_" + label + "(" + cls.simpleName + ")"; + override def toString() = "exh_" + label + "(" + cls.simpleName + ")" /** A standard copy constructor */ def this(other: ExceptionHandler) = { diff --git a/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala b/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala index f19fb56db0..122972039b 100644 --- a/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala +++ b/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala @@ -91,7 +91,7 @@ abstract class GenICode extends SubComponent { debuglog("Generating class: " + tree.symbol.fullName) val outerClass = ctx.clazz ctx setClass (new IClass(tree.symbol) setCompilationUnit unit) - addClassFields(ctx, tree.symbol); + addClassFields(ctx, tree.symbol) classes += (tree.symbol -> ctx.clazz) unit.icode += ctx.clazz gen(impl, ctx) @@ -119,7 +119,7 @@ abstract class GenICode extends SubComponent { m.native = m.symbol.hasAnnotation(definitions.NativeAttr) if (!m.isAbstractMethod && !m.native) { - ctx1 = genLoad(rhs, ctx1, m.returnType); + ctx1 = genLoad(rhs, ctx1, m.returnType) // reverse the order of the local variables, to match the source-order m.locals = m.locals.reverse @@ -224,10 +224,10 @@ abstract class GenICode extends SubComponent { // binary operation case rarg :: Nil => - resKind = getMaxType(larg.tpe :: rarg.tpe :: Nil); + resKind = getMaxType(larg.tpe :: rarg.tpe :: Nil) if (scalaPrimitives.isShiftOp(code) || scalaPrimitives.isBitwiseOp(code)) assert(resKind.isIntegralType | resKind == BOOL, - resKind.toString() + " incompatible with arithmetic modulo operation: " + ctx1); + resKind.toString() + " incompatible with arithmetic modulo operation: " + ctx1) ctx1 = genLoad(larg, ctx1, resKind) ctx1 = genLoad(rarg, @@ -271,7 +271,7 @@ abstract class GenICode extends SubComponent { if (scalaPrimitives.isArrayGet(code)) { // load argument on stack debugassert(args.length == 1, - "Too many arguments for array get operation: " + tree); + "Too many arguments for array get operation: " + tree) ctx1 = genLoad(args.head, ctx1, INT) generatedType = elem ctx1.bb.emit(LOAD_ARRAY_ITEM(elementType), tree.pos) @@ -283,7 +283,7 @@ abstract class GenICode extends SubComponent { } else if (scalaPrimitives.isArraySet(code)) { debugassert(args.length == 2, - "Too many arguments for array set operation: " + tree); + "Too many arguments for array set operation: " + tree) ctx1 = genLoad(args.head, ctx1, INT) ctx1 = genLoad(args.tail.head, ctx1, toTypeKind(args.tail.head.tpe)) // the following line should really be here, but because of bugs in erasure @@ -404,8 +404,8 @@ abstract class GenICode extends SubComponent { (pat.symbol.tpe.typeSymbol, kind, { ctx: Context => - ctx.bb.emit(STORE_LOCAL(exception), pat.pos); - genLoad(body, ctx, kind); + ctx.bb.emit(STORE_LOCAL(exception), pat.pos) + genLoad(body, ctx, kind) }) } } @@ -491,7 +491,7 @@ abstract class GenICode extends SubComponent { val pair = (tree.symbol -> (new Label(tree.symbol) anchor ctx1.bb setParams (params map (_.symbol)))) debuglog("Adding label " + tree.symbol.fullLocationString + " in genLoad.") ctx1.labels += pair - ctx.method.addLocals(params map (p => new Local(p.symbol, toTypeKind(p.symbol.info), false))); + ctx.method.addLocals(params map (p => new Local(p.symbol, toTypeKind(p.symbol.info), false))) } ctx.bb.closeWith(JUMP(ctx1.bb), tree.pos) @@ -509,13 +509,13 @@ abstract class GenICode extends SubComponent { val local = ctx.method.addLocal(new Local(sym, toTypeKind(sym.info), false)) if (rhs == EmptyTree) { - debuglog("Uninitialized variable " + tree + " at: " + (tree.pos)); + debuglog("Uninitialized variable " + tree + " at: " + (tree.pos)) ctx.bb.emit(getZeroOf(local.kind)) } var ctx1 = ctx if (rhs != EmptyTree) - ctx1 = genLoad(rhs, ctx, local.kind); + ctx1 = genLoad(rhs, ctx, local.kind) ctx1.bb.emit(STORE_LOCAL(local), tree.pos) ctx1.scope.add(local) @@ -624,7 +624,7 @@ abstract class GenICode extends SubComponent { } else { genCast(l, r, ctx1, cast) } - generatedType = if (cast) r else BOOL; + generatedType = if (cast) r else BOOL ctx1 } genLoadApply1 @@ -637,7 +637,7 @@ abstract class GenICode extends SubComponent { // on the stack (contrary to what the type in the AST says). case Apply(fun @ Select(Super(_, mix), _), args) => def genLoadApply2 = { - debuglog("Call to super: " + tree); + debuglog("Call to super: " + tree) val invokeStyle = SuperCall(mix) // if (fun.symbol.isConstructor) Static(true) else SuperCall(mix); @@ -700,7 +700,7 @@ abstract class GenICode extends SubComponent { case Apply(fun @ _, List(expr)) if (definitions.isBox(fun.symbol)) => def genLoadApply4 = { - debuglog("BOX : " + fun.symbol.fullName); + debuglog("BOX : " + fun.symbol.fullName) val ctx1 = genLoad(expr, ctx, toTypeKind(expr.tpe)) val nativeKind = toTypeKind(expr.tpe) if (settings.Xdce.value) { @@ -757,7 +757,7 @@ abstract class GenICode extends SubComponent { generatedType = resKind newCtx } else { // normal method call - debuglog("Gen CALL_METHOD with sym: " + sym + " isStaticSymbol: " + sym.isStaticMember); + debuglog("Gen CALL_METHOD with sym: " + sym + " isStaticSymbol: " + sym.isStaticMember) val invokeStyle = if (sym.isStaticMember) Static(false) @@ -889,16 +889,16 @@ abstract class GenICode extends SubComponent { def genLoadLiteral = { if (value.tag != UnitTag) (value.tag, expectedType) match { case (IntTag, LONG) => - ctx.bb.emit(CONSTANT(Constant(value.longValue)), tree.pos); + ctx.bb.emit(CONSTANT(Constant(value.longValue)), tree.pos) generatedType = LONG case (FloatTag, DOUBLE) => - ctx.bb.emit(CONSTANT(Constant(value.doubleValue)), tree.pos); + ctx.bb.emit(CONSTANT(Constant(value.doubleValue)), tree.pos) generatedType = DOUBLE case (NullTag, _) => - ctx.bb.emit(CONSTANT(value), tree.pos); + ctx.bb.emit(CONSTANT(value), tree.pos) generatedType = NullReference case _ => - ctx.bb.emit(CONSTANT(value), tree.pos); + ctx.bb.emit(CONSTANT(value), tree.pos) generatedType = toTypeKind(tree.tpe) } ctx @@ -946,7 +946,7 @@ abstract class GenICode extends SubComponent { case Match(selector, cases) => def genLoadMatch = { - debuglog("Generating SWITCH statement."); + debuglog("Generating SWITCH statement.") val ctx1 = genLoad(selector, ctx, INT) // TODO: Java 7 allows strings in switches (so, don't assume INT and don't convert the literals using intValue) val afterCtx = ctx1.newBlock() var caseCtx: Context = null @@ -1379,7 +1379,7 @@ abstract class GenICode extends SubComponent { } } - debuglog("Entering genCond with tree: " + tree); + debuglog("Entering genCond with tree: " + tree) // the default emission def default() = { @@ -1582,14 +1582,14 @@ abstract class GenICode extends SubComponent { case _ => None } if (block.size == 1 && optCont.isDefined) { - val Some(cont) = optCont; - val pred = block.predecessors; - debuglog("Preds: " + pred + " of " + block + " (" + optCont + ")"); + val Some(cont) = optCont + val pred = block.predecessors + debuglog("Preds: " + pred + " of " + block + " (" + optCont + ")") pred foreach { p => changed = true p.lastInstruction match { case CJUMP(succ, fail, cond, kind) if (succ == block || fail == block) => - debuglog("Pruning empty if branch."); + debuglog("Pruning empty if branch.") p.replaceInstruction(p.lastInstruction, if (block == succ) if (block == fail) @@ -1602,7 +1602,7 @@ abstract class GenICode extends SubComponent { abort("Could not find block in preds: " + method + " " + block + " " + pred + " " + p)) case CZJUMP(succ, fail, cond, kind) if (succ == block || fail == block) => - debuglog("Pruning empty ifz branch."); + debuglog("Pruning empty ifz branch.") p.replaceInstruction(p.lastInstruction, if (block == succ) if (block == fail) @@ -1615,12 +1615,12 @@ abstract class GenICode extends SubComponent { abort("Could not find block in preds")) case JUMP(b) if (b == block) => - debuglog("Pruning empty JMP branch."); + debuglog("Pruning empty JMP branch.") val replaced = p.replaceInstruction(p.lastInstruction, JUMP(cont)) debugassert(replaced, "Didn't find p.lastInstruction") case SWITCH(tags, labels) if (labels contains block) => - debuglog("Pruning empty SWITCH branch."); + debuglog("Pruning empty SWITCH branch.") p.replaceInstruction(p.lastInstruction, SWITCH(tags, labels map (l => if (l == block) cont else l))) @@ -1636,7 +1636,7 @@ abstract class GenICode extends SubComponent { e.covered = e.covered filter (_ != block) e.blocks = e.blocks filter (_ != block) if (e.startBlock eq block) - e setStartBlock cont; + e setStartBlock cont } } } @@ -1648,7 +1648,7 @@ abstract class GenICode extends SubComponent { method.blocks foreach prune0 } while (changed) - debuglog("Prune fixpoint reached in " + n + " iterations."); + debuglog("Prune fixpoint reached in " + n + " iterations.") } def getMaxType(ts: List[Type]): TypeKind = @@ -1820,7 +1820,7 @@ abstract class GenICode extends SubComponent { } def addFinalizer(f: Tree, ctx: Context): this.type = { - cleanups = Finalizer(f, ctx) :: cleanups; + cleanups = Finalizer(f, ctx) :: cleanups this } @@ -1868,7 +1868,7 @@ abstract class GenICode extends SubComponent { val exh = new ExceptionHandler(method, newTermNameCached("" + handlerCount), cls, pos) method.addHandler(exh) handlers = exh :: handlers - debuglog("added handler: " + exh); + debuglog("added handler: " + exh) exh } @@ -1878,7 +1878,7 @@ abstract class GenICode extends SubComponent { private def addActiveHandler(exh: ExceptionHandler) { handlerCount += 1 handlers = exh :: handlers - debuglog("added handler: " + exh); + debuglog("added handler: " + exh) } /** Return a new context for generating code for the given @@ -1962,11 +1962,11 @@ abstract class GenICode extends SubComponent { val ctx = finalizerCtx.enterExceptionHandler(exh) val exception = ctx.makeLocal(finalizer.pos, ThrowableClass.tpe, "exc") loadException(ctx, exh, finalizer.pos) - ctx.bb.emit(STORE_LOCAL(exception)); - val ctx1 = genLoad(finalizer, ctx, UNIT); - ctx1.bb.emit(LOAD_LOCAL(exception)); - ctx1.bb.emit(THROW(ThrowableClass)); - ctx1.bb.enterIgnoreMode(); + ctx.bb.emit(STORE_LOCAL(exception)) + val ctx1 = genLoad(finalizer, ctx, UNIT) + ctx1.bb.emit(LOAD_LOCAL(exception)) + ctx1.bb.emit(THROW(ThrowableClass)) + ctx1.bb.enterIgnoreMode() ctx1.bb.close() finalizerCtx.endHandler() } @@ -2028,7 +2028,7 @@ abstract class GenICode extends SubComponent { /** Add an instruction that refers to this label. */ def addCallingInstruction(i: Instruction) = - toPatch = i :: toPatch; + toPatch = i :: toPatch /** * Patch the code by replacing pseudo call instructions with @@ -2090,7 +2090,7 @@ abstract class GenICode extends SubComponent { // register with the given label if (!label.anchored) - label.addCallingInstruction(this); + label.addCallingInstruction(this) } case class PJUMP(whereto: Label) extends PseudoJUMP(whereto) diff --git a/src/compiler/scala/tools/nsc/backend/icode/ICodeCheckers.scala b/src/compiler/scala/tools/nsc/backend/icode/ICodeCheckers.scala index 8cd7c70bf0..fb1ef311d2 100644 --- a/src/compiler/scala/tools/nsc/backend/icode/ICodeCheckers.scala +++ b/src/compiler/scala/tools/nsc/backend/icode/ICodeCheckers.scala @@ -233,8 +233,8 @@ abstract class ICodeCheckers { } if (preds.nonEmpty) { - in(bl) = (preds map out.apply) reduceLeft meet2; - log("Input changed for block: " + bl +" to: " + in(bl)); + in(bl) = (preds map out.apply) reduceLeft meet2 + log("Input changed for block: " + bl +" to: " + in(bl)) } } @@ -380,9 +380,9 @@ abstract class ICodeCheckers { def checkField(obj: TypeKind, field: Symbol): Unit = obj match { case REFERENCE(sym) => if (sym.info.member(field.name) == NoSymbol) - icodeError(" " + field + " is not defined in class " + clasz); + icodeError(" " + field + " is not defined in class " + clasz) case _ => - icodeError(" expected reference type, but " + obj + " found"); + icodeError(" expected reference type, but " + obj + " found") } /** Checks that tpe is a subtype of one of the allowed types */ @@ -419,11 +419,11 @@ abstract class ICodeCheckers { receiver match { case REFERENCE(sym) => checkBool(sym.info.member(method.name) != NoSymbol, - "Method " + method + " does not exist in " + sym.fullName); + "Method " + method + " does not exist in " + sym.fullName) if (method.isPrivate) checkBool(method.owner == clasz.symbol, "Cannot call private method of " + method.owner.fullName - + " from " + clasz.symbol.fullName); + + " from " + clasz.symbol.fullName) else if (method.isProtected) { val isProtectedOK = ( (clasz.symbol isSubClass method.owner) || @@ -432,7 +432,7 @@ abstract class ICodeCheckers { checkBool(isProtectedOK, "Cannot call protected method of " + method.owner.fullName - + " from " + clasz.symbol.fullName); + + " from " + clasz.symbol.fullName) } case ARRAY(_) => @@ -465,7 +465,7 @@ abstract class ICodeCheckers { pushStack(elem) case (a, b) => icodeError(" expected an INT and an array reference, but " + - a + ", " + b + " found"); + a + ", " + b + " found") } case LOAD_LOCAL(local) => @@ -483,10 +483,10 @@ abstract class ICodeCheckers { case LOAD_MODULE(module) => checkBool((module.isModule || module.isModuleClass), - "Expected module: " + module + " flags: " + module.flagString); - pushStack(toTypeKind(module.tpe)); + "Expected module: " + module + " flags: " + module.flagString) + pushStack(toTypeKind(module.tpe)) - case STORE_THIS(kind) => + case STORE_THIS(kind) => val actualType = popStack if (actualType.isReferenceType) subtypeTest(actualType, kind) else icodeError("Expected this reference but found: " + actualType) @@ -498,7 +498,7 @@ abstract class ICodeCheckers { subtypeTest(k, elem) case (a, b, c) => icodeError(" expected and array reference, and int and " + kind + - " but " + a + ", " + b + ", " + c + " found"); + " but " + a + ", " + b + ", " + c + " found") } case STORE_LOCAL(local) => @@ -653,7 +653,7 @@ abstract class ICodeCheckers { case RETURN(kind) => val top = popStack if (kind.isValueType) checkType(top, kind) - else checkBool(!top.isValueType, "" + kind + " is a reference type, but " + top + " is not"); + else checkBool(!top.isValueType, "" + kind + " is a reference type, but " + top + " is not") case THROW(clasz) => checkType(popStack, toTypeKind(clasz.tpe)) diff --git a/src/compiler/scala/tools/nsc/backend/icode/Linearizers.scala b/src/compiler/scala/tools/nsc/backend/icode/Linearizers.scala index 35eedc3539..c5fe3228a3 100644 --- a/src/compiler/scala/tools/nsc/backend/icode/Linearizers.scala +++ b/src/compiler/scala/tools/nsc/backend/icode/Linearizers.scala @@ -35,15 +35,15 @@ trait Linearizers { var blocks: List[BasicBlock] = Nil def linearize(m: IMethod): List[BasicBlock] = { - val b = m.startBlock; - blocks = Nil; + val b = m.startBlock + blocks = Nil run { - worklist pushAll (m.exh map (_.startBlock)); - worklist.push(b); + worklist pushAll (m.exh map (_.startBlock)) + worklist.push(b) } - blocks.reverse; + blocks.reverse } def linearizeAt(m: IMethod, start: BasicBlock): List[BasicBlock] = { @@ -55,30 +55,30 @@ trait Linearizers { /** Linearize another subtree and append it to the existing blocks. */ def linearize(startBlock: BasicBlock): List[BasicBlock] = { //blocks = startBlock :: Nil; - run( { worklist.push(startBlock); } ); - blocks.reverse; + run( { worklist.push(startBlock); } ) + blocks.reverse } def processElement(b: BasicBlock) = if (b.nonEmpty) { - add(b); + add(b) b.lastInstruction match { case JUMP(whereto) => - add(whereto); + add(whereto) case CJUMP(success, failure, _, _) => - add(success); - add(failure); + add(success) + add(failure) case CZJUMP(success, failure, _, _) => - add(success); - add(failure); + add(success) + add(failure) case SWITCH(_, labels) => - add(labels); - case RETURN(_) => (); - case THROW(clasz) => (); + add(labels) + case RETURN(_) => () + case THROW(clasz) => () } } - def dequeue: Elem = worklist.pop(); + def dequeue: Elem = worklist.pop() /** * Prepend b to the list, if not already scheduled. @@ -88,25 +88,25 @@ trait Linearizers { if (blocks.contains(b)) () else { - blocks = b :: blocks; - worklist push b; + blocks = b :: blocks + worklist push b } } - def add(bs: List[BasicBlock]): Unit = bs foreach add; + def add(bs: List[BasicBlock]): Unit = bs foreach add } /** * Linearize code using a depth first traversal. */ class DepthFirstLinerizer extends Linearizer { - var blocks: List[BasicBlock] = Nil; + var blocks: List[BasicBlock] = Nil def linearize(m: IMethod): List[BasicBlock] = { - blocks = Nil; + blocks = Nil - dfs(m.startBlock); - m.exh foreach (b => dfs(b.startBlock)); + dfs(m.startBlock) + m.exh foreach (b => dfs(b.startBlock)) blocks.reverse } @@ -119,7 +119,7 @@ trait Linearizers { def dfs(b: BasicBlock): Unit = if (b.nonEmpty && add(b)) - b.successors foreach dfs; + b.successors foreach dfs /** * Prepend b to the list, if not already scheduled. @@ -128,7 +128,7 @@ trait Linearizers { */ def add(b: BasicBlock): Boolean = !(blocks contains b) && { - blocks = b :: blocks; + blocks = b :: blocks true } } @@ -144,12 +144,12 @@ trait Linearizers { val added = new mutable.BitSet def linearize(m: IMethod): List[BasicBlock] = { - blocks = Nil; + blocks = Nil visited.clear() - added.clear(); + added.clear() - m.exh foreach (b => rpo(b.startBlock)); - rpo(m.startBlock); + m.exh foreach (b => rpo(b.startBlock)) + rpo(m.startBlock) // if the start block has predecessors, it won't be the first one // in the linearization, so we need to enforce it here @@ -170,7 +170,7 @@ trait Linearizers { def rpo(b: BasicBlock): Unit = if (b.nonEmpty && !visited(b)) { - visited += b; + visited += b b.successors foreach rpo add(b) } @@ -184,7 +184,7 @@ trait Linearizers { if (!added(b.label)) { added += b.label - blocks = b :: blocks; + blocks = b :: blocks } } } diff --git a/src/compiler/scala/tools/nsc/backend/icode/Members.scala b/src/compiler/scala/tools/nsc/backend/icode/Members.scala index fe837216ed..5c90fbf366 100644 --- a/src/compiler/scala/tools/nsc/backend/icode/Members.scala +++ b/src/compiler/scala/tools/nsc/backend/icode/Members.scala @@ -80,7 +80,7 @@ trait Members { } /** This methods returns a string representation of the ICode */ - override def toString = "ICode '" + name + "'"; + override def toString = "ICode '" + name + "'" /* Compute a unique new label */ def nextLabel: Int = { @@ -92,8 +92,8 @@ trait Members { */ def newBlock(): BasicBlock = { touched = true - val block = new BasicBlock(nextLabel, method); - blocks += block; + val block = new BasicBlock(nextLabel, method) + blocks += block block } } @@ -115,17 +115,17 @@ trait Members { var cunit: CompilationUnit = _ def addField(f: IField): this.type = { - fields = f :: fields; + fields = f :: fields this } def addMethod(m: IMethod): this.type = { - methods = m :: methods; + methods = m :: methods this } def setCompilationUnit(unit: CompilationUnit): this.type = { - this.cunit = unit; + this.cunit = unit this } @@ -180,7 +180,7 @@ trait Members { def hasCode = code ne NoCode def setCode(code: Code): IMethod = { - this.code = code; + this.code = code this } @@ -220,10 +220,10 @@ trait Members { val nextBlock: mutable.Map[BasicBlock, BasicBlock] = mutable.HashMap.empty for (b <- code.blocks.toList if b.successors.length == 1; - succ = b.successors.head; - if succ ne b; - if succ.predecessors.length == 1; - if succ.predecessors.head eq b; + succ = b.successors.head + if succ ne b + if succ.predecessors.length == 1 + if succ.predecessors.head eq b if !(exh.exists { (e: ExceptionHandler) => (e.covers(succ) && !e.covers(b)) || (e.covers(b) && !e.covers(succ)) })) { nextBlock(b) = succ @@ -235,7 +235,7 @@ trait Members { bb.open() var succ = bb do { - succ = nextBlock(succ); + succ = nextBlock(succ) val lastInstr = bb.lastInstruction /* Ticket SI-5672 * Besides removing the control-flow instruction at the end of `bb` (usually a JUMP), we have to pop any values it pushes. diff --git a/src/compiler/scala/tools/nsc/backend/icode/Opcodes.scala b/src/compiler/scala/tools/nsc/backend/icode/Opcodes.scala index 137e2b556f..d8aac8e9db 100644 --- a/src/compiler/scala/tools/nsc/backend/icode/Opcodes.scala +++ b/src/compiler/scala/tools/nsc/backend/icode/Opcodes.scala @@ -64,7 +64,7 @@ import scala.reflect.internal.util.{Position,NoPosition} * in the source files. */ trait Opcodes { self: ICodes => - import global.{Symbol, NoSymbol, Name, Constant}; + import global.{Symbol, NoSymbol, Name, Constant} // categories of ICode instructions final val localsCat = 1 @@ -195,7 +195,7 @@ trait Opcodes { self: ICodes => case class LOAD_FIELD(field: Symbol, isStatic: Boolean) extends Instruction { /** Returns a string representation of this instruction */ override def toString(): String = - "LOAD_FIELD " + (if (isStatic) field.fullName else field.toString()); + "LOAD_FIELD " + (if (isStatic) field.fullName else field.toString()) override def consumed = if (isStatic) 0 else 1 override def produced = 1 @@ -257,16 +257,17 @@ trait Opcodes { self: ICodes => case class STORE_FIELD(field: Symbol, isStatic: Boolean) extends Instruction { /** Returns a string representation of this instruction */ override def toString(): String = - "STORE_FIELD "+field + (if (isStatic) " (static)" else " (dynamic)"); + "STORE_FIELD "+field + (if (isStatic) " (static)" else " (dynamic)") - override def consumed = if(isStatic) 1 else 2; - override def produced = 0; + override def consumed = if(isStatic) 1 else 2 + + override def produced = 0 override def consumedTypes = if (isStatic) toTypeKind(field.tpe) :: Nil else - REFERENCE(field.owner) :: toTypeKind(field.tpe) :: Nil; + REFERENCE(field.owner) :: toTypeKind(field.tpe) :: Nil override def category = fldsCat } @@ -420,10 +421,12 @@ trait Opcodes { self: ICodes => */ case class NEW(kind: REFERENCE) extends Instruction { /** Returns a string representation of this instruction */ - override def toString(): String = "NEW "+ kind; + override def toString(): String = "NEW "+ kind + + override def consumed = 0 + + override def produced = 1 - override def consumed = 0; - override def produced = 1; override def producedTypes = kind :: Nil /** The corresponding constructor call. */ @@ -439,11 +442,13 @@ trait Opcodes { self: ICodes => */ case class CREATE_ARRAY(elem: TypeKind, dims: Int) extends Instruction { /** Returns a string representation of this instruction */ - override def toString(): String ="CREATE_ARRAY "+elem + " x " + dims; + override def toString(): String ="CREATE_ARRAY "+elem + " x " + dims + + override def consumed = dims - override def consumed = dims; override def consumedTypes = List.fill(dims)(INT) - override def produced = 1; + override def produced = 1 + override def producedTypes = ARRAY(elem) :: Nil override def category = arraysCat @@ -532,7 +537,7 @@ trait Opcodes { self: ICodes => override def toString(): String = ( "CJUMP (" + kind + ")" + cond + " ? "+successBlock.label+" : "+failureBlock.label - ); + ) override def consumed = 2 override def produced = 0 @@ -555,7 +560,7 @@ trait Opcodes { self: ICodes => override def toString(): String = ( "CZJUMP (" + kind + ")" + cond + " ? "+successBlock.label+" : "+failureBlock.label - ); + ) override def consumed = 1 override def produced = 0 @@ -647,10 +652,11 @@ trait Opcodes { self: ICodes => */ case class MONITOR_EXIT() extends Instruction { /** Returns a string representation of this instruction */ - override def toString(): String ="MONITOR_EXIT"; + override def toString(): String ="MONITOR_EXIT" - override def consumed = 1; - override def produced = 0; + override def consumed = 1 + + override def produced = 0 override def consumedTypes = ObjectReference :: Nil diff --git a/src/compiler/scala/tools/nsc/backend/icode/Primitives.scala b/src/compiler/scala/tools/nsc/backend/icode/Primitives.scala index 351d99f51a..5eceb1cf6b 100644 --- a/src/compiler/scala/tools/nsc/backend/icode/Primitives.scala +++ b/src/compiler/scala/tools/nsc/backend/icode/Primitives.scala @@ -6,9 +6,9 @@ package scala.tools.nsc package backend -package icode; +package icode -import java.io.PrintWriter; +import java.io.PrintWriter trait Primitives { self: ICodes => @@ -51,12 +51,12 @@ trait Primitives { self: ICodes => // type : (src) => dst // range: src,dst <- { Ix, Ux, Rx } // jvm : i2{l, f, d}, l2{i, f, d}, f2{i, l, d}, d2{i, l, f}, i2{b, c, s} - case class Conversion(src: TypeKind, dst: TypeKind) extends Primitive; + case class Conversion(src: TypeKind, dst: TypeKind) extends Primitive // type : (Array[REF]) => I4 // range: type <- { BOOL, Ix, Ux, Rx, REF } // jvm : arraylength - case class ArrayLength(kind: TypeKind) extends Primitive; + case class ArrayLength(kind: TypeKind) extends Primitive // type : (buf,el) => buf // range: lf,rg <- { BOOL, Ix, Ux, Rx, REF, STR } diff --git a/src/compiler/scala/tools/nsc/backend/icode/Printers.scala b/src/compiler/scala/tools/nsc/backend/icode/Printers.scala index 253f766469..5b47e3cfff 100644 --- a/src/compiler/scala/tools/nsc/backend/icode/Printers.scala +++ b/src/compiler/scala/tools/nsc/backend/icode/Printers.scala @@ -27,7 +27,7 @@ trait Printers { self: ICodes => def print(o: Any) { print(o.toString()) } def println(s: String) { - print(s); + print(s) println() } @@ -35,7 +35,7 @@ trait Printers { self: ICodes => out.println() var i = 0 while (i < margin) { - print(" "); + print(" ") i += 1 } } @@ -53,26 +53,26 @@ trait Printers { self: ICodes => } def printClass(cls: IClass) { - print(cls.symbol.toString()); print(" extends "); - printList(cls.symbol.info.parents, ", "); - indent(); println(" {"); - println("// fields:"); - cls.fields.foreach(printField); println(); - println("// methods"); - cls.methods.foreach(printMethod); - undent(); println(); + print(cls.symbol.toString()); print(" extends ") + printList(cls.symbol.info.parents, ", ") + indent(); println(" {") + println("// fields:") + cls.fields.foreach(printField); println() + println("// methods") + cls.methods.foreach(printMethod) + undent(); println() println("}") } def printField(f: IField) { - print(f.symbol.keyString); print(" "); - print(f.symbol.nameString); print(": "); - println(f.symbol.info.toString()); + print(f.symbol.keyString); print(" ") + print(f.symbol.nameString); print(": ") + println(f.symbol.info.toString()) } def printMethod(m: IMethod) { - print("def "); print(m.symbol.name); - print("("); printList(printParam)(m.params, ", "); print(")"); + print("def "); print(m.symbol.name) + print("("); printList(printParam)(m.params, ", "); print(")") print(": "); print(m.symbol.info.resultType) if (!m.isAbstractMethod) { @@ -93,23 +93,23 @@ trait Printers { self: ICodes => } def printParam(p: Local) { - print(p.sym.name); print(": "); print(p.sym.info); + print(p.sym.name); print(": "); print(p.sym.info) print(" ("); print(p.kind); print(")") } def printExceptionHandler(e: ExceptionHandler) { - indent(); - println("catch (" + e.cls.simpleName + ") in " + e.covered.toSeq.sortBy(_.label) + " starting at: " + e.startBlock); - println("consisting of blocks: " + e.blocks); - undent(); - println("with finalizer: " + e.finalizer); -// linearizer.linearize(e.startBlock) foreach printBlock; + indent() + println("catch (" + e.cls.simpleName + ") in " + e.covered.toSeq.sortBy(_.label) + " starting at: " + e.startBlock) + println("consisting of blocks: " + e.blocks) + undent() + println("with finalizer: " + e.finalizer) + // linearizer.linearize(e.startBlock) foreach printBlock; } def printBlock(bb: BasicBlock) { print(bb.label) if (bb.loopHeader) print("[loop header]") - print(": "); + print(": ") if (settings.debug.value) print("pred: " + bb.predecessors + " succs: " + bb.successors + " flags: " + bb.flagsString) indent(); println() bb.toList foreach printInstruction diff --git a/src/compiler/scala/tools/nsc/backend/icode/analysis/CopyPropagation.scala b/src/compiler/scala/tools/nsc/backend/icode/analysis/CopyPropagation.scala index 7f32b2b764..941d200d13 100644 --- a/src/compiler/scala/tools/nsc/backend/icode/analysis/CopyPropagation.scala +++ b/src/compiler/scala/tools/nsc/backend/icode/analysis/CopyPropagation.scala @@ -123,7 +123,7 @@ abstract class CopyPropagation { } override def toString(): String = - "\nBindings: " + bindings + "\nStack: " + stack; + "\nBindings: " + bindings + "\nStack: " + stack def dup: State = { val b: Bindings = mutable.HashMap() @@ -164,7 +164,7 @@ abstract class CopyPropagation { val resBindings = mutable.HashMap[Location, Value]() for ((k, v) <- a.bindings if b.bindings.isDefinedAt(k) && v == b.bindings(k)) - resBindings += (k -> v); + resBindings += (k -> v) new State(resBindings, resStack) } } @@ -189,11 +189,11 @@ abstract class CopyPropagation { debuglog("CopyAnalysis added point: " + b) } m.exh foreach { e => - in(e.startBlock) = new copyLattice.State(copyLattice.emptyBinding, copyLattice.exceptionHandlerStack); + in(e.startBlock) = new copyLattice.State(copyLattice.emptyBinding, copyLattice.exceptionHandlerStack) } // first block is special: it's not bottom, but a precisely defined state with no bindings - in(m.startBlock) = new lattice.State(lattice.emptyBinding, Nil); + in(m.startBlock) = new lattice.State(lattice.emptyBinding, Nil) } } @@ -202,7 +202,7 @@ abstract class CopyPropagation { if (settings.debug.value) { linearizer.linearize(method).foreach(b => if (b != method.startBlock) assert(in(b) != lattice.bottom, - "Block " + b + " in " + this.method + " has input equal to bottom -- not visited?")); + "Block " + b + " in " + this.method + " has input equal to bottom -- not visited?")) } } @@ -227,7 +227,7 @@ abstract class CopyPropagation { case CONSTANT(k) => if (k.tag != UnitTag) - out.stack = Const(k) :: out.stack; + out.stack = Const(k) :: out.stack case LOAD_ARRAY_ITEM(_) => out.stack = (Unknown :: out.stack.drop(2)) @@ -276,14 +276,14 @@ abstract class CopyPropagation { v match { case Deref(LocalVar(other)) => if (other != local) - out.bindings += (LocalVar(local) -> v); + out.bindings += (LocalVar(local) -> v) case _ => out.bindings += (LocalVar(local) -> v) } case Nil => sys.error("Incorrect icode in " + method + ". Expecting something on the stack.") } - out.stack = out.stack drop 1; + out.stack = out.stack drop 1 case STORE_THIS(_) => cleanReferencesTo(out, This) @@ -291,14 +291,14 @@ abstract class CopyPropagation { case STORE_FIELD(field, isStatic) => if (isStatic) - out.stack = out.stack.drop(1); + out.stack = out.stack.drop(1) else { - out.stack = out.stack.drop(2); - cleanReferencesTo(out, Field(AllRecords, field)); + out.stack = out.stack.drop(2) + cleanReferencesTo(out, Field(AllRecords, field)) in.stack match { case v :: Record(_, bindings) :: vs => bindings += (field -> v) - case _ => (); + case _ => () } } @@ -319,7 +319,7 @@ abstract class CopyPropagation { case Record(_, bindings) => for (v <- out.stack.take(method.info.paramTypes.length + 1) if v ne obj) { - bindings ++= getBindingsForPrimaryCtor(in, method); + bindings ++= getBindingsForPrimaryCtor(in, method) } case _ => () } @@ -390,7 +390,7 @@ abstract class CopyPropagation { out.stack = out.stack.head :: out.stack case MONITOR_ENTER() => - out.stack = out.stack.drop(1); + out.stack = out.stack.drop(1) case MONITOR_EXIT() => out.stack = out.stack.drop(1) @@ -438,7 +438,7 @@ abstract class CopyPropagation { case Deref(loc1) if (loc1 == target) => false case Boxed(loc1) if (loc1 == target) => false case rec @ Record(_, _) => - cleanRecord(rec); + cleanRecord(rec) true case _ => true }) && @@ -454,12 +454,12 @@ abstract class CopyPropagation { * If the method is impure, all bindings to record fields are cleared. */ final def simulateCall(state: copyLattice.State, method: Symbol, static: Boolean): copyLattice.State = { - val out = new copyLattice.State(state.bindings, state.stack); - out.stack = out.stack.drop(method.info.paramTypes.length + (if (static) 0 else 1)); + val out = new copyLattice.State(state.bindings, state.stack) + out.stack = out.stack.drop(method.info.paramTypes.length + (if (static) 0 else 1)) if (method.info.resultType != definitions.UnitClass.tpe && !method.isConstructor) - out.stack = Unknown :: out.stack; + out.stack = Unknown :: out.stack if (!isPureMethod(method)) - invalidateRecords(out); + invalidateRecords(out) out } @@ -500,8 +500,8 @@ abstract class CopyPropagation { * they are passed on the stack. It works for primary constructors. */ private def getBindingsForPrimaryCtor(in: copyLattice.State, ctor: Symbol): mutable.Map[Symbol, Value] = { - val paramAccessors = ctor.owner.constrParamAccessors; - var values = in.stack.take(1 + ctor.info.paramTypes.length).reverse.drop(1); + val paramAccessors = ctor.owner.constrParamAccessors + var values = in.stack.take(1 + ctor.info.paramTypes.length).reverse.drop(1) val bindings = mutable.HashMap[Symbol, Value]() debuglog("getBindings for: " + ctor + " acc: " + paramAccessors) @@ -527,8 +527,8 @@ abstract class CopyPropagation { // + " having acc: " + (paramAccessors map (_.tpe))+ " vs. params" + paramTypes // + "\n\t failed at pos " + i + " with " + p.tpe + " == " + paramTypes(i)) if (p.tpe == paramTypes(i)) - bindings += (p -> values.head); - values = values.tail; + bindings += (p -> values.head) + values = values.tail } debuglog("\t" + bindings) diff --git a/src/compiler/scala/tools/nsc/backend/icode/analysis/DataFlowAnalysis.scala b/src/compiler/scala/tools/nsc/backend/icode/analysis/DataFlowAnalysis.scala index a9783b43dc..704439e178 100644 --- a/src/compiler/scala/tools/nsc/backend/icode/analysis/DataFlowAnalysis.scala +++ b/src/compiler/scala/tools/nsc/backend/icode/analysis/DataFlowAnalysis.scala @@ -30,7 +30,7 @@ trait DataFlowAnalysis[L <: SemiLattice] { /* Implement this function to initialize the worklist. */ def init(f: => Unit): Unit = { iterations = 0 - in.clear(); out.clear(); worklist.clear(); visited.clear(); + in.clear(); out.clear(); worklist.clear(); visited.clear() f } @@ -46,7 +46,7 @@ trait DataFlowAnalysis[L <: SemiLattice] { while (!worklist.isEmpty) { if (stat) iterations += 1 //Console.println("worklist in: " + worklist); - val point = worklist.iterator.next(); worklist -= point; visited += point; + val point = worklist.iterator.next(); worklist -= point; visited += point //Console.println("taking out point: " + point + " worklist out: " + worklist); val output = f(point, in(point)) diff --git a/src/compiler/scala/tools/nsc/backend/icode/analysis/Liveness.scala b/src/compiler/scala/tools/nsc/backend/icode/analysis/Liveness.scala index abda639dec..14b57f287f 100644 --- a/src/compiler/scala/tools/nsc/backend/icode/analysis/Liveness.scala +++ b/src/compiler/scala/tools/nsc/backend/icode/analysis/Liveness.scala @@ -77,7 +77,7 @@ abstract class Liveness { if (settings.debug.value) { linearizer.linearize(method).foreach(b => if (b != method.startBlock) assert(lattice.bottom != in(b), - "Block " + b + " in " + this.method + " has input equal to bottom -- not visited?")); + "Block " + b + " in " + this.method + " has input equal to bottom -- not visited?")) } } diff --git a/src/compiler/scala/tools/nsc/backend/icode/analysis/ReachingDefinitions.scala b/src/compiler/scala/tools/nsc/backend/icode/analysis/ReachingDefinitions.scala index 48755d4424..2d29e6b14f 100644 --- a/src/compiler/scala/tools/nsc/backend/icode/analysis/ReachingDefinitions.scala +++ b/src/compiler/scala/tools/nsc/backend/icode/analysis/ReachingDefinitions.scala @@ -147,7 +147,7 @@ abstract class ReachingDefinitions { "Block " + b + " in " + this.method + " has input equal to bottom -- not visited? " + in(b) + ": bot: " + lattice.bottom + "\nin(b) == bottom: " + (in(b) == lattice.bottom) - + "\nbottom == in(b): " + (lattice.bottom == in(b)))); + + "\nbottom == in(b): " + (lattice.bottom == in(b)))) } } diff --git a/src/compiler/scala/tools/nsc/backend/icode/analysis/TypeFlowAnalysis.scala b/src/compiler/scala/tools/nsc/backend/icode/analysis/TypeFlowAnalysis.scala index 7b0627294e..227c1064ea 100644 --- a/src/compiler/scala/tools/nsc/backend/icode/analysis/TypeFlowAnalysis.scala +++ b/src/compiler/scala/tools/nsc/backend/icode/analysis/TypeFlowAnalysis.scala @@ -139,7 +139,7 @@ abstract class TypeFlowAnalysis { if (settings.debug.value) { linearizer.linearize(method).foreach(b => if (b != method.startBlock) assert(visited.contains(b), - "Block " + b + " in " + this.method + " has input equal to bottom -- not visited? .." + visited)); + "Block " + b + " in " + this.method + " has input equal to bottom -- not visited? .." + visited)) } // log("" + method.symbol.fullName + " [" + method.code.blocks.size + " blocks] " // + "\n\t" + iterations + " iterations: " + t + " ms." @@ -207,7 +207,7 @@ abstract class TypeFlowAnalysis { case Test(_, kind, zero) => stack.pop if (!zero) { stack.pop } - stack push BOOL; + stack push BOOL case Comparison(_, _) => stack.pop2; stack push INT @@ -396,7 +396,7 @@ abstract class TypeFlowAnalysis { override def blockTransfer(b: BasicBlock, in: lattice.Elem): lattice.Elem = { var result = lattice.IState(new VarBinding(in.vars), new TypeStack(in.stack)) - val stopAt = if(isOnPerimeter(b)) lastInstruction(b) else null; + val stopAt = if(isOnPerimeter(b)) lastInstruction(b) else null var isPastLast = false var instrs = b.toList @@ -598,7 +598,7 @@ abstract class TypeFlowAnalysis { return } else if(staleOut.isEmpty && inlined.isEmpty && staleIn.isEmpty) { // this promotes invoking reinit if in doubt, no performance degradation will ensue! - return; + return } worklist.clear() // calling reinit(f: => Unit) would also clear visited, thus forgetting about blocks visited before reinit. @@ -665,14 +665,14 @@ abstract class TypeFlowAnalysis { override def forwardAnalysis(f: (P, lattice.Elem) => lattice.Elem): Unit = { while (!worklist.isEmpty && relevantBBs.nonEmpty) { if (stat) iterations += 1 - val point = worklist.iterator.next(); worklist -= point; + val point = worklist.iterator.next(); worklist -= point if(relevantBBs(point)) { shrinkedWatchlist = false val output = f(point, in(point)) - visited += point; + visited += point if(isOnPerimeter(point)) { if(shrinkedWatchlist && !isWatching(point)) { - relevantBBs -= point; + relevantBBs -= point populatePerimeter() } } else { diff --git a/src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala b/src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala index 909c82ff23..8440a6cb49 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala @@ -456,8 +456,8 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM { } def createJAttribute(name: String, b: Array[Byte], offset: Int, len: Int): asm.Attribute = { - val dest = new Array[Byte](len); - System.arraycopy(b, offset, dest, 0, len); + val dest = new Array[Byte](len) + System.arraycopy(b, offset, dest, 0, len) new asm.CustomAttr(name, dest) } @@ -606,7 +606,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM { def javaType(s: Symbol): asm.Type = { if (s.isMethod) { - val resT: asm.Type = if (s.isClassConstructor) asm.Type.VOID_TYPE else javaType(s.tpe.resultType); + val resT: asm.Type = if (s.isClassConstructor) asm.Type.VOID_TYPE else javaType(s.tpe.resultType) asm.Type.getMethodType( resT, (s.tpe.paramTypes map javaType): _*) } else { javaType(s.tpe) } } @@ -1297,7 +1297,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM { } val ps = c.symbol.info.parents - val superInterfaces0: List[Symbol] = if(ps.isEmpty) Nil else c.symbol.mixinClasses; + val superInterfaces0: List[Symbol] = if(ps.isEmpty) Nil else c.symbol.mixinClasses val superInterfaces = (superInterfaces0 ++ c.symbol.annotations.flatMap(ann => newParentForAttr(ann.symbol))).distinct if(superInterfaces.isEmpty) EMPTY_STRING_ARRAY @@ -1322,7 +1322,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM { thisName = javaName(c.symbol) // the internal name of the class being emitted val ps = c.symbol.info.parents - val superClass: String = if(ps.isEmpty) JAVA_LANG_OBJECT.getInternalName else javaName(ps.head.typeSymbol); + val superClass: String = if(ps.isEmpty) JAVA_LANG_OBJECT.getInternalName else javaName(ps.head.typeSymbol) val ifaces = getSuperInterfaces(c) @@ -1680,7 +1680,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM { val kind = toTypeKind(const.typeValue) val toPush: asm.Type = if (kind.isValueType) classLiteral(kind) - else javaType(kind); + else javaType(kind) mv.visitLdcInsn(toPush) case EnumTag => @@ -1703,7 +1703,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM { */ object jcode { - import asm.Opcodes; + import asm.Opcodes final def boolconst(b: Boolean) { iconst(if(b) 1 else 0) } @@ -1867,10 +1867,10 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM { // use a table in which holes are filled with defaultBranch. val keyRange = (keyMax - keyMin + 1) val newBranches = new Array[asm.Label](keyRange) - var oldPos = 0; + var oldPos = 0 var i = 0 while(i < keyRange) { - val key = keyMin + i; + val key = keyMin + i if (keys(oldPos) == key) { newBranches(i) = branches(oldPos) oldPos += 1 @@ -2069,7 +2069,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM { // TODO in that case, ExceptionHandler.cls doesn't go through javaName(). What if cls is an inner class? for (e <- this.method.exh ; if e.covered.nonEmpty ; p <- intervals(e)) { debuglog("Adding exception handler " + e + "at block: " + e.startBlock + " for " + method + - " from: " + p.start + " to: " + p.end + " catching: " + e.cls); + " from: " + p.start + " to: " + p.end + " catching: " + e.cls) val cls: String = if (e.cls == NoSymbol || e.cls == ThrowableClass) null else javaName(e.cls) jmethod.visitTryCatchBlock(labels(p.start), linNext(p.end), labels(e.startBlock), cls) @@ -2093,8 +2093,8 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM { def overlaps(that: Interval): Boolean = { !(this.precedes(that) || that.precedes(this)) } def mergeWith(that: Interval): Interval = { - val newStart = if(this.start <= that.start) this.lstart else that.lstart; - val newEnd = if(this.end <= that.end) that.lend else this.lend; + val newStart = if(this.start <= that.start) this.lstart else that.lstart + val newEnd = if(this.end <= that.end) that.lend else this.lend Interval(newStart, newEnd) } @@ -2150,7 +2150,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM { def getMerged(): scala.collection.Map[Local, List[Interval]] = { // TODO should but isn't: unbalanced start(s) of scope(s) - val shouldBeEmpty = pending filter { p => val Pair(_, st) = p; st.nonEmpty }; + val shouldBeEmpty = pending filter { p => val Pair(_, st) = p; st.nonEmpty } val merged = mutable.Map[Local, List[Interval]]() def addToMerged(lv: Local, start: Label, end: Label) { val intv = Interval(start, end) @@ -2168,10 +2168,10 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM { if(merged.isDefinedAt(k)) { val balancedStart = merged(k).head.lstart if(balancedStart.getOffset < start.getOffset) { - start = balancedStart; + start = balancedStart } } - val endOpt: Option[Label] = for(ranges <- merged.get(k)) yield ranges.last.lend; + val endOpt: Option[Label] = for(ranges <- merged.get(k)) yield ranges.last.lend val end = endOpt.getOrElse(onePastLast) addToMerged(k, start, end) } @@ -2204,7 +2204,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM { for(Pair(local, ranges) <- scoping.getMerged()) { var name = javaName(local.sym) if (name == null) { - anonCounter += 1; + anonCounter += 1 name = "" } for(intrvl <- ranges) { @@ -2372,7 +2372,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM { case LOAD_MODULE(module) => // assert(module.isModule, "Expected module: " + module) - debuglog("generating LOAD_MODULE for: " + module + " flags: " + module.flagString); + debuglog("generating LOAD_MODULE for: " + module + " flags: " + module.flagString) if (clasz.symbol == module.moduleClass && jMethodName != nme.readResolve.toString) { jmethod.visitVarInsn(Opcodes.ALOAD, 0) } else { @@ -2502,7 +2502,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM { while (restTagss.nonEmpty) { val currLabel = labels(restBranches.head) for (cTag <- restTagss.head) { - flatKeys(k) = cTag; + flatKeys(k) = cTag flatBranches(k) = currLabel k += 1 } @@ -2701,7 +2701,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM { def genPrimitive(primitive: Primitive, pos: Position) { - import asm.Opcodes; + import asm.Opcodes primitive match { @@ -2879,7 +2879,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM { * *Does not assume the parameters come first!* */ def computeLocalVarsIndex(m: IMethod) { - var idx = if (m.symbol.isStaticMember) 0 else 1; + var idx = if (m.symbol.isStaticMember) 0 else 1 for (l <- m.params) { debuglog("Index value for " + l + "{" + l.## + "}: " + idx) @@ -2901,7 +2901,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM { class JMirrorBuilder(bytecodeWriter: BytecodeWriter) extends JCommonBuilder(bytecodeWriter) { private var cunit: CompilationUnit = _ - def getCurrentCUnit(): CompilationUnit = cunit; + def getCurrentCUnit(): CompilationUnit = cunit /** Generate a mirror class for a top-level module. A mirror class is a class * containing only static methods that forward to the corresponding method @@ -2994,8 +2994,8 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM { for (f <- clasz.fields if f.symbol.hasGetter; g = f.symbol.getter(clasz.symbol); - s = f.symbol.setter(clasz.symbol); - if g.isPublic && !(f.symbol.name startsWith "$") + s = f.symbol.setter(clasz.symbol) + if g.isPublic && !(f.symbol.name startsWith "$") ) { // inserting $outer breaks the bean fieldList = javaName(f.symbol) :: javaName(g) :: (if (s != NoSymbol) javaName(s) else null) :: fieldList @@ -3180,7 +3180,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM { // leave infinite-loops in place return (dest, hops filterNot (dest eq _)) } - prev = dest; + prev = dest false case None => true } @@ -3268,11 +3268,11 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM { /* remove from all containers that may contain a reference to */ def elide(redu: BasicBlock) { assert(m.startBlock != redu, "startBlock should have been re-wired by now") - m.code.removeBlock(redu); + m.code.removeBlock(redu) } var wasReduced = false - val entryPoints: List[BasicBlock] = m.startBlock :: (m.exh map (_.startBlock)); + val entryPoints: List[BasicBlock] = m.startBlock :: (m.exh map (_.startBlock)) val elided = mutable.Set.empty[BasicBlock] // debug val newTargets = mutable.Set.empty[BasicBlock] // debug @@ -3303,7 +3303,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM { def normalize(m: IMethod) { if(!m.hasCode) { return } collapseJumpOnlyBlocks(m) - var wasReduced = false; + var wasReduced = false do { wasReduced = false // Prune from an exception handler those covered blocks which are jump-only. diff --git a/src/compiler/scala/tools/nsc/backend/opt/ClosureElimination.scala b/src/compiler/scala/tools/nsc/backend/opt/ClosureElimination.scala index 2d53eb2ed9..8f439fc800 100644 --- a/src/compiler/scala/tools/nsc/backend/opt/ClosureElimination.scala +++ b/src/compiler/scala/tools/nsc/backend/opt/ClosureElimination.scala @@ -108,7 +108,7 @@ abstract class ClosureElimination extends SubComponent { val t = info.getBinding(l) t match { case Deref(This) | Const(_) => - bb.replaceInstruction(i, valueToInstruction(t)); + bb.replaceInstruction(i, valueToInstruction(t)) debuglog(s"replaced $i with $t") case _ => @@ -226,7 +226,7 @@ abstract class ClosureElimination extends SubComponent { h = t.head t = t.tail } - } while (redo); + } while (redo) b fromList newInstructions } } diff --git a/src/compiler/scala/tools/nsc/backend/opt/DeadCodeElimination.scala b/src/compiler/scala/tools/nsc/backend/opt/DeadCodeElimination.scala index b998e3fbd2..3b94e2bd8d 100644 --- a/src/compiler/scala/tools/nsc/backend/opt/DeadCodeElimination.scala +++ b/src/compiler/scala/tools/nsc/backend/opt/DeadCodeElimination.scala @@ -54,7 +54,7 @@ abstract class DeadCodeElimination extends SubComponent { } } - val rdef = new reachingDefinitions.ReachingDefinitionsAnalysis; + val rdef = new reachingDefinitions.ReachingDefinitionsAnalysis /** Use-def chain: give the reaching definitions at the beginning of given instruction. */ var defs: immutable.Map[InstrLoc, immutable.Set[rdef.lattice.Definition]] = immutable.HashMap.empty @@ -82,7 +82,7 @@ abstract class DeadCodeElimination extends SubComponent { def dieCodeDie(m: IMethod) { if (m.hasCode) { - debuglog("dead code elimination on " + m); + debuglog("dead code elimination on " + m) dropOf.clear() localStores.clear() clobbers.clear() @@ -104,13 +104,13 @@ abstract class DeadCodeElimination extends SubComponent { /** collect reaching definitions and initial useful instructions for this method. */ def collectRDef(m: IMethod): Unit = if (m.hasCode) { - defs = immutable.HashMap.empty; worklist.clear(); useful.clear(); - rdef.init(m); - rdef.run(); + defs = immutable.HashMap.empty; worklist.clear(); useful.clear() + rdef.init(m) + rdef.run() m foreachBlock { bb => useful(bb) = new mutable.BitSet(bb.size) - var rd = rdef.in(bb); + var rd = rdef.in(bb) for (Pair(i, idx) <- bb.toList.zipWithIndex) { // utility for adding to worklist diff --git a/src/compiler/scala/tools/nsc/backend/opt/Inliners.scala b/src/compiler/scala/tools/nsc/backend/opt/Inliners.scala index c834607203..010f5b8319 100644 --- a/src/compiler/scala/tools/nsc/backend/opt/Inliners.scala +++ b/src/compiler/scala/tools/nsc/backend/opt/Inliners.scala @@ -969,7 +969,7 @@ abstract class Inliners extends SubComponent { } if(sameSymbols) { // TODO but this also amounts to recursive, ie should lead to adding to tfa.knownNever, right? - tfa.knownUnsafe += inc.sym; + tfa.knownUnsafe += inc.sym return DontInlineHere("sameSymbols (ie caller == callee)") } @@ -1043,9 +1043,9 @@ abstract class Inliners extends SubComponent { if (caller.isInClosure) score -= 2 else if (caller.inlinedCalls < 1) score -= 1 // only monadic methods can trigger the first inline - if (inc.isSmall) score += 1; + if (inc.isSmall) score += 1 // if (inc.hasClosureParam) score += 2 - if (inc.isLarge) score -= 1; + if (inc.isLarge) score -= 1 if (caller.isSmall && isLargeSum) { score -= 1 debuglog(s"inliner score decreased to $score because small caller $caller would become large") @@ -1054,8 +1054,8 @@ abstract class Inliners extends SubComponent { if (inc.isMonadic) score += 3 else if (inc.isHigherOrder) score += 1 - if (inc.isInClosure) score += 2; - if (inlinedMethodCount(inc.sym) > 2) score -= 2; + if (inc.isInClosure) score += 2 + if (inlinedMethodCount(inc.sym) > 2) score -= 2 score } } diff --git a/src/compiler/scala/tools/nsc/doc/html/page/diagram/DotDiagramGenerator.scala b/src/compiler/scala/tools/nsc/doc/html/page/diagram/DotDiagramGenerator.scala index 14b7b80ea5..92dd05e70a 100644 --- a/src/compiler/scala/tools/nsc/doc/html/page/diagram/DotDiagramGenerator.scala +++ b/src/compiler/scala/tools/nsc/doc/html/page/diagram/DotDiagramGenerator.scala @@ -31,7 +31,7 @@ class DotDiagramGenerator(settings: doc.Settings) extends DiagramGenerator { private var counter = 0 def generate(diagram: Diagram, template: DocTemplateEntity, page: HtmlPage):NodeSeq = { - counter = counter + 1; + counter = counter + 1 this.page = page pathToLib = "../" * (page.templateToPath(template).size - 1) + "lib/" val dot = generateDot(diagram) @@ -207,7 +207,7 @@ class DotDiagramGenerator(settings: doc.Settings) extends DiagramGenerator { private def node2Dot(node: Node) = { // escape HTML characters in node names - def escape(name: String) = name.replace("&", "&").replace("<", "<").replace(">", ">"); + def escape(name: String) = name.replace("&", "&").replace("<", "<").replace(">", ">") // assemble node attribues in a map val attr = scala.collection.mutable.Map[String, String]() @@ -319,7 +319,7 @@ class DotDiagramGenerator(settings: doc.Settings) extends DiagramGenerator { var tSVG = -System.currentTimeMillis val result = if (dotOutput != null) { - val src = scala.io.Source.fromString(dotOutput); + val src = scala.io.Source.fromString(dotOutput) try { val cpa = scala.xml.parsing.ConstructingParser.fromSource(src, false) val doc = cpa.document() diff --git a/src/compiler/scala/tools/nsc/interactive/Global.scala b/src/compiler/scala/tools/nsc/interactive/Global.scala index b0318f40c4..fa1d4a38b9 100644 --- a/src/compiler/scala/tools/nsc/interactive/Global.scala +++ b/src/compiler/scala/tools/nsc/interactive/Global.scala @@ -366,7 +366,7 @@ class Global(settings: Settings, _reporter: Reporter, projectName: String = "") .format(waitLoadedTypeResponses.size, getParsedEnteredResponses.size)) checkNoResponsesOutstanding() - log.flush(); + log.flush() scheduler = new NoWorkScheduler throw ShutdownReq } @@ -1025,7 +1025,7 @@ class Global(settings: Settings, _reporter: Reporter, projectName: String = "") getUnit(source) match { case Some(unit) => if (unit.isUpToDate) { - debugLog("already typed"); + debugLog("already typed") response set unit.body } else if (ignoredFiles(source.file)) { response.raise(lastException.getOrElse(CancelException)) diff --git a/src/compiler/scala/tools/nsc/interactive/REPL.scala b/src/compiler/scala/tools/nsc/interactive/REPL.scala index be1c656c81..71dd0d3bbf 100644 --- a/src/compiler/scala/tools/nsc/interactive/REPL.scala +++ b/src/compiler/scala/tools/nsc/interactive/REPL.scala @@ -51,8 +51,8 @@ object REPL { } catch { case ex @ FatalError(msg) => if (true || command.settings.debug.value) // !!! - ex.printStackTrace(); - reporter.error(null, "fatal error: " + msg) + ex.printStackTrace() + reporter.error(null, "fatal error: " + msg) } } } diff --git a/src/compiler/scala/tools/nsc/interpreter/ILoop.scala b/src/compiler/scala/tools/nsc/interpreter/ILoop.scala index c7e682cb08..d08c9cb36c 100644 --- a/src/compiler/scala/tools/nsc/interpreter/ILoop.scala +++ b/src/compiler/scala/tools/nsc/interpreter/ILoop.scala @@ -256,7 +256,7 @@ class ILoop(in0: Option[BufferedReader], protected val out: JPrintWriter) private def findToolsJar() = { val jdkPath = Directory(jdkHome) - val jar = jdkPath / "lib" / "tools.jar" toFile; + val jar = jdkPath / "lib" / "tools.jar" toFile if (jar isFile) Some(jar) diff --git a/src/compiler/scala/tools/nsc/io/Pickler.scala b/src/compiler/scala/tools/nsc/io/Pickler.scala index 5d32c10143..862046eb66 100644 --- a/src/compiler/scala/tools/nsc/io/Pickler.scala +++ b/src/compiler/scala/tools/nsc/io/Pickler.scala @@ -167,7 +167,7 @@ object Pickler { */ def labelledPickler[T](label: String, p: Pickler[T]): Pickler[T] = new Pickler[T] { def pickle(wr: Writer, x: T) = { - wr.write(quoted(label)); + wr.write(quoted(label)) wr.write("(") p.pickle(wr, x) wr.write(")") diff --git a/src/compiler/scala/tools/nsc/javac/JavaParsers.scala b/src/compiler/scala/tools/nsc/javac/JavaParsers.scala index 0a6716e396..8f5dca2702 100644 --- a/src/compiler/scala/tools/nsc/javac/JavaParsers.scala +++ b/src/compiler/scala/tools/nsc/javac/JavaParsers.scala @@ -845,7 +845,7 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners { /** CompilationUnit ::= [package QualId semi] TopStatSeq */ def compilationUnit(): Tree = { - var pos = in.currentPos; + var pos = in.currentPos val pkg: RefTree = if (in.token == AT || in.token == PACKAGE) { annotations() diff --git a/src/compiler/scala/tools/nsc/symtab/SymbolLoaders.scala b/src/compiler/scala/tools/nsc/symtab/SymbolLoaders.scala index 129331f435..5b5118a94f 100644 --- a/src/compiler/scala/tools/nsc/symtab/SymbolLoaders.scala +++ b/src/compiler/scala/tools/nsc/symtab/SymbolLoaders.scala @@ -178,7 +178,7 @@ abstract class SymbolLoaders { if (!settings.isScaladoc) globalError( if (msg eq null) "i/o error while loading " + root.name - else "error while loading " + root.name + ", " + msg); + else "error while loading " + root.name + ", " + msg) } try { val start = currentTime diff --git a/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala b/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala index 9f89f47240..a7e4006fbe 100644 --- a/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala +++ b/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala @@ -1128,7 +1128,7 @@ abstract class ClassfileParser { case tpnme.ScalaSignatureATTR => isScala = true val pbuf = new PickleBuffer(in.buf, in.bp, in.bp + attrLen) - pbuf.readNat(); pbuf.readNat(); + pbuf.readNat(); pbuf.readNat() if (pbuf.readNat == 0) // a scala signature attribute with no entries means that the actual scala signature isScalaAnnot = true // is in a ScalaSignature annotation. in.skip(attrLen) diff --git a/src/compiler/scala/tools/nsc/symtab/classfile/ICodeReader.scala b/src/compiler/scala/tools/nsc/symtab/classfile/ICodeReader.scala index 7871ac8f20..39788ee3e7 100644 --- a/src/compiler/scala/tools/nsc/symtab/classfile/ICodeReader.scala +++ b/src/compiler/scala/tools/nsc/symtab/classfile/ICodeReader.scala @@ -65,7 +65,7 @@ abstract class ICodeReader extends ClassfileParser { val fieldCount = in.nextChar for (i <- 0 until fieldCount) parseField() val methodCount = in.nextChar - for (i <- 0 until methodCount) parseMethod(); + for (i <- 0 until methodCount) parseMethod() instanceCode.methods = instanceCode.methods.reverse staticCode.methods = staticCode.methods.reverse } @@ -131,13 +131,13 @@ abstract class ICodeReader extends ClassfileParser { val attributeCount = in.nextChar for (i <- 0 until attributeCount) parseAttribute() } else { - debuglog("Skipping non-existent method."); - skipAttributes(); + debuglog("Skipping non-existent method.") + skipAttributes() } } catch { case e: MissingRequirementError => in.bp = beginning; skipAttributes() - debuglog("Skipping non-existent method. " + e.msg); + debuglog("Skipping non-existent method. " + e.msg) } } @@ -247,9 +247,9 @@ abstract class ICodeReader extends ClassfileParser { case JVM.aload => val local = in.nextByte.toInt; size += 1 if (local == 0 && !method.isStatic) - code.emit(THIS(method.symbol.owner)); + code.emit(THIS(method.symbol.owner)) else - code.emit(LOAD_LOCAL(code.getLocal(local, ObjectReference))); + code.emit(LOAD_LOCAL(code.getLocal(local, ObjectReference))) case JVM.iload_0 => code.emit(LOAD_LOCAL(code.getLocal(0, INT))) case JVM.iload_1 => code.emit(LOAD_LOCAL(code.getLocal(1, INT))) @@ -269,9 +269,9 @@ abstract class ICodeReader extends ClassfileParser { case JVM.dload_3 => code.emit(LOAD_LOCAL(code.getLocal(3, DOUBLE))) case JVM.aload_0 => if (!method.isStatic) - code.emit(THIS(method.symbol.owner)); + code.emit(THIS(method.symbol.owner)) else - code.emit(LOAD_LOCAL(code.getLocal(0, ObjectReference))); + code.emit(LOAD_LOCAL(code.getLocal(0, ObjectReference))) case JVM.aload_1 => code.emit(LOAD_LOCAL(code.getLocal(1, ObjectReference))) case JVM.aload_2 => code.emit(LOAD_LOCAL(code.getLocal(2, ObjectReference))) case JVM.aload_3 => code.emit(LOAD_LOCAL(code.getLocal(3, ObjectReference))) @@ -491,7 +491,7 @@ abstract class ICodeReader extends ClassfileParser { case JVM.invokespecial => val m = pool.getMemberSymbol(in.nextChar, false); size += 2 val style = if (m.name == nme.CONSTRUCTOR || m.isPrivate) Static(true) - else SuperCall(m.owner.name); + else SuperCall(m.owner.name) code.emit(CALL_METHOD(m, style)) case JVM.invokestatic => val m = pool.getMemberSymbol(in.nextChar, true); size += 2 @@ -722,36 +722,36 @@ abstract class ICodeReader extends ClassfileParser { i match { case DUP_X1 => val (one, two) = stack.pop2 - push(one); push(two); push(one); + push(one); push(two); push(one) case DUP_X2 => val (one, two, three) = stack.pop3 - push(one); push(three); push(two); push(one); + push(one); push(three); push(two); push(one) case DUP2_X1 => val (one, two) = stack.pop2 if (one.isWideType) { - push(one); push(two); push(one); + push(one); push(two); push(one) } else { val three = stack.pop - push(two); push(one); push(three); push(two); push(one); + push(two); push(one); push(three); push(two); push(one) } case DUP2_X2 => val (one, two) = stack.pop2 if (one.isWideType && two.isWideType) { - push(one); push(two); push(one); + push(one); push(two); push(one) } else if (one.isWideType) { val three = stack.pop assert(!three.isWideType, "Impossible") - push(one); push(three); push(two); push(one); + push(one); push(three); push(two); push(one) } else { val three = stack.pop if (three.isWideType) { - push(two); push(one); push(one); push(three); push(two); push(one); + push(two); push(one); push(one); push(three); push(two); push(one) } else { val four = stack.pop - push(two); push(one); push(four); push(one); push(three); push(two); push(one); + push(two); push(one); push(four); push(one); push(three); push(two); push(one) } } @@ -779,7 +779,7 @@ abstract class ICodeReader extends ClassfileParser { STORE_LOCAL(tmp2), LOAD_LOCAL(tmp1), LOAD_LOCAL(tmp2), - LOAD_LOCAL(tmp1))); + LOAD_LOCAL(tmp1))) case DUP_X2 => val one = info.stack.types(0) @@ -792,30 +792,30 @@ abstract class ICodeReader extends ClassfileParser { STORE_LOCAL(tmp2), LOAD_LOCAL(tmp1), LOAD_LOCAL(tmp2), - LOAD_LOCAL(tmp1))); + LOAD_LOCAL(tmp1))) else { - val tmp3 = freshLocal(info.stack.types(2)); + val tmp3 = freshLocal(info.stack.types(2)) bb.replaceInstruction(i, List(STORE_LOCAL(tmp1), STORE_LOCAL(tmp2), STORE_LOCAL(tmp3), LOAD_LOCAL(tmp1), LOAD_LOCAL(tmp3), LOAD_LOCAL(tmp2), - LOAD_LOCAL(tmp1))); + LOAD_LOCAL(tmp1))) } case DUP2_X1 => val one = info.stack.types(0) val two = info.stack.types(1) - val tmp1 = freshLocal(one); - val tmp2 = freshLocal(two); + val tmp1 = freshLocal(one) + val tmp2 = freshLocal(two) if (one.isWideType) { assert(!two.isWideType, "Impossible") bb.replaceInstruction(i, List(STORE_LOCAL(tmp1), STORE_LOCAL(tmp2), LOAD_LOCAL(tmp1), LOAD_LOCAL(tmp2), - LOAD_LOCAL(tmp1))); + LOAD_LOCAL(tmp1))) } else { val tmp3 = freshLocal(info.stack.types(2)) bb.replaceInstruction(i, List(STORE_LOCAL(tmp1), @@ -824,7 +824,7 @@ abstract class ICodeReader extends ClassfileParser { LOAD_LOCAL(tmp1), LOAD_LOCAL(tmp3), LOAD_LOCAL(tmp2), - LOAD_LOCAL(tmp1))); + LOAD_LOCAL(tmp1))) } case DUP2_X2 => @@ -837,21 +837,21 @@ abstract class ICodeReader extends ClassfileParser { STORE_LOCAL(tmp2), LOAD_LOCAL(tmp1), LOAD_LOCAL(tmp2), - LOAD_LOCAL(tmp1))); + LOAD_LOCAL(tmp1))) } else if (one.isWideType) { val three = info.stack.types(2) assert(!two.isWideType && !three.isWideType, "Impossible") - val tmp3 = freshLocal(three); + val tmp3 = freshLocal(three) bb.replaceInstruction(i, List(STORE_LOCAL(tmp1), STORE_LOCAL(tmp2), STORE_LOCAL(tmp3), LOAD_LOCAL(tmp1), LOAD_LOCAL(tmp3), LOAD_LOCAL(tmp2), - LOAD_LOCAL(tmp1))); + LOAD_LOCAL(tmp1))) } else { val three = info.stack.types(2) - val tmp3 = freshLocal(three); + val tmp3 = freshLocal(three) if (three.isWideType) { bb.replaceInstruction(i, List(STORE_LOCAL(tmp1), STORE_LOCAL(tmp2), @@ -860,10 +860,10 @@ abstract class ICodeReader extends ClassfileParser { LOAD_LOCAL(tmp1), LOAD_LOCAL(tmp3), LOAD_LOCAL(tmp2), - LOAD_LOCAL(tmp1))); + LOAD_LOCAL(tmp1))) } else { val four = info.stack.types(3) - val tmp4 = freshLocal(three); + val tmp4 = freshLocal(three) assert(!four.isWideType, "Impossible") bb.replaceInstruction(i, List(STORE_LOCAL(tmp1), STORE_LOCAL(tmp2), @@ -874,7 +874,7 @@ abstract class ICodeReader extends ClassfileParser { LOAD_LOCAL(tmp4), LOAD_LOCAL(tmp3), LOAD_LOCAL(tmp2), - LOAD_LOCAL(tmp1))); + LOAD_LOCAL(tmp1))) } } case _ => @@ -954,7 +954,7 @@ abstract class ICodeReader extends ClassfileParser { /** Return a fresh Local variable for the given index. */ private def freshLocal(idx: Int, kind: TypeKind, isArg: Boolean) = { - val sym = method.symbol.newVariable(newTermName("loc" + idx)).setInfo(kind.toType); + val sym = method.symbol.newVariable(newTermName("loc" + idx)).setInfo(kind.toType) val l = new Local(sym, kind, isArg) method.addLocal(l) l @@ -984,7 +984,8 @@ abstract class ICodeReader extends ClassfileParser { jmpTargets += pc } - case class LJUMP(pc: Int) extends LazyJump(pc); + case class LJUMP(pc: Int) extends LazyJump(pc) + case class LCJUMP(success: Int, failure: Int, cond: TestOp, kind: TypeKind) extends LazyJump(success) { override def toString(): String = "LCJUMP (" + kind + ") " + success + " : " + failure diff --git a/src/compiler/scala/tools/nsc/symtab/classfile/Pickler.scala b/src/compiler/scala/tools/nsc/symtab/classfile/Pickler.scala index c8b7fcee8f..79d0df5a29 100644 --- a/src/compiler/scala/tools/nsc/symtab/classfile/Pickler.scala +++ b/src/compiler/scala/tools/nsc/symtab/classfile/Pickler.scala @@ -179,7 +179,7 @@ abstract class Pickler extends SubComponent { putSymbol(sym.privateWithin) putType(sym.info) if (sym.thisSym.tpeHK != sym.tpeHK) - putType(sym.typeOfThis); + putType(sym.typeOfThis) putSymbol(sym.alias) if (!sym.children.isEmpty) { val (locals, globals) = sym.children partition (_.isLocalClass) @@ -246,8 +246,8 @@ abstract class Pickler extends SubComponent { // val savedBoundSyms = boundSyms // boundSyms are known to be local based on the EXISTENTIAL flag (see isLocal) // boundSyms = tparams ::: boundSyms // try { - putType(restpe); -// } finally { + putType(restpe) + // } finally { // boundSyms = savedBoundSyms // } putSymbols(tparams) diff --git a/src/compiler/scala/tools/nsc/transform/Constructors.scala b/src/compiler/scala/tools/nsc/transform/Constructors.scala index 79dd36803d..a4a6c3ff31 100644 --- a/src/compiler/scala/tools/nsc/transform/Constructors.scala +++ b/src/compiler/scala/tools/nsc/transform/Constructors.scala @@ -188,7 +188,7 @@ abstract class Constructors extends Transform with ast.TreeDSL { // Lazy vals don't get the assignment in the constructor. if (!stat.symbol.tpe.isInstanceOf[ConstantType]) { if (rhs != EmptyTree && !stat.symbol.isLazy) { - val rhs1 = intoConstructor(stat.symbol, rhs); + val rhs1 = intoConstructor(stat.symbol, rhs) (if (canBeMoved(stat)) constrPrefixBuf else constrStatBuf) += mkAssign( stat.symbol, rhs1) } diff --git a/src/compiler/scala/tools/nsc/transform/Erasure.scala b/src/compiler/scala/tools/nsc/transform/Erasure.scala index 60eab773aa..141a63d36e 100644 --- a/src/compiler/scala/tools/nsc/transform/Erasure.scala +++ b/src/compiler/scala/tools/nsc/transform/Erasure.scala @@ -745,7 +745,7 @@ abstract class Erasure extends AddInterfaces tree.symbol = NoSymbol selectFrom(qual1) } else if (isMethodTypeWithEmptyParams(qual1.tpe)) { - assert(qual1.symbol.isStable, qual1.symbol); + assert(qual1.symbol.isStable, qual1.symbol) val applied = Apply(qual1, List()) setPos qual1.pos setType qual1.tpe.resultType adaptMember(selectFrom(applied)) } else if (!(qual1.isInstanceOf[Super] || (qual1.tpe.typeSymbol isSubClass tree.symbol.owner))) { @@ -806,7 +806,7 @@ abstract class Erasure extends AddInterfaces newCdef setType newCdef.body.tpe } def adaptBranch(branch: Tree): Tree = - if (branch == EmptyTree) branch else adaptToType(branch, tree1.tpe); + if (branch == EmptyTree) branch else adaptToType(branch, tree1.tpe) tree1 match { case If(cond, thenp, elsep) => diff --git a/src/compiler/scala/tools/nsc/transform/Flatten.scala b/src/compiler/scala/tools/nsc/transform/Flatten.scala index a370b45be0..44d39de205 100644 --- a/src/compiler/scala/tools/nsc/transform/Flatten.scala +++ b/src/compiler/scala/tools/nsc/transform/Flatten.scala @@ -85,7 +85,7 @@ abstract class Flatten extends InfoTransform { val restp1 = apply(restp) if (restp1 eq restp) tp else copyMethodType(tp, params, restp1) case PolyType(tparams, restp) => - val restp1 = apply(restp); + val restp1 = apply(restp) if (restp1 eq restp) tp else PolyType(tparams, restp1) case _ => mapOver(tp) diff --git a/src/compiler/scala/tools/nsc/transform/LambdaLift.scala b/src/compiler/scala/tools/nsc/transform/LambdaLift.scala index a4b725d313..60815da967 100644 --- a/src/compiler/scala/tools/nsc/transform/LambdaLift.scala +++ b/src/compiler/scala/tools/nsc/transform/LambdaLift.scala @@ -143,7 +143,7 @@ abstract class LambdaLift extends InfoTransform { ss addEntry sym renamable addEntry sym changedFreeVars = true - debuglog("" + sym + " is free in " + enclosure); + debuglog("" + sym + " is free in " + enclosure) if (sym.isVariable) sym setFlag CAPTURED } !enclosure.isClass @@ -161,7 +161,7 @@ abstract class LambdaLift extends InfoTransform { private val freeVarTraverser = new Traverser { override def traverse(tree: Tree) { try { //debug - val sym = tree.symbol; + val sym = tree.symbol tree match { case ClassDef(_, _, _, _) => liftedDefs(tree.symbol) = Nil diff --git a/src/compiler/scala/tools/nsc/transform/Mixin.scala b/src/compiler/scala/tools/nsc/transform/Mixin.scala index b6d4bdb0c5..e33d665cd0 100644 --- a/src/compiler/scala/tools/nsc/transform/Mixin.scala +++ b/src/compiler/scala/tools/nsc/transform/Mixin.scala @@ -126,7 +126,7 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL { " " + mixinClass + " " + base.info.baseClasses + "/" + bcs) while (!bcs.isEmpty && sym == NoSymbol) { if (settings.debug.value) { - val other = bcs.head.info.nonPrivateDecl(member.name); + val other = bcs.head.info.nonPrivateDecl(member.name) debuglog("rebindsuper " + bcs.head + " " + other + " " + other.tpe + " " + other.isDeferred) } @@ -242,7 +242,7 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL { } } } - debuglog("new defs of " + clazz + " = " + clazz.info.decls); + debuglog("new defs of " + clazz + " = " + clazz.info.decls) } } diff --git a/src/compiler/scala/tools/nsc/transform/OverridingPairs.scala b/src/compiler/scala/tools/nsc/transform/OverridingPairs.scala index 822ef79cd0..2610679542 100644 --- a/src/compiler/scala/tools/nsc/transform/OverridingPairs.scala +++ b/src/compiler/scala/tools/nsc/transform/OverridingPairs.scala @@ -86,10 +86,10 @@ abstract class OverridingPairs { { def fillDecls(bcs: List[Symbol], deferredflag: Int) { if (!bcs.isEmpty) { fillDecls(bcs.tail, deferredflag) - var e = bcs.head.info.decls.elems; + var e = bcs.head.info.decls.elems while (e ne null) { if (e.sym.getFlag(DEFERRED) == deferredflag.toLong && !exclude(e.sym)) - decls enter e.sym; + decls enter e.sym e = e.next } } @@ -134,7 +134,7 @@ abstract class OverridingPairs { private val subParents = new Array[BitSet](size) { for (i <- List.range(0, size)) - subParents(i) = new BitSet(size); + subParents(i) = new BitSet(size) for (p <- parents) { val pIndex = index(p.typeSymbol) if (pIndex >= 0) @@ -190,7 +190,7 @@ abstract class OverridingPairs { if (nextEntry ne null) { do { do { - nextEntry = decls.lookupNextEntry(nextEntry); + nextEntry = decls.lookupNextEntry(nextEntry) /* DEBUG if ((nextEntry ne null) && !(nextEntry.sym hasFlag PRIVATE) && @@ -208,12 +208,12 @@ abstract class OverridingPairs { // overriding and nextEntry.sym } while ((nextEntry ne null) && (hasCommonParentAsSubclass(overriding, nextEntry.sym))) if (nextEntry ne null) { - overridden = nextEntry.sym; + overridden = nextEntry.sym //Console.println("yield: " + overriding + overriding.locationString + " / " + overridden + overridden.locationString);//DEBUG } else { do { curEntry = curEntry.next - } while ((curEntry ne null) && (visited contains curEntry)); + } while ((curEntry ne null) && (visited contains curEntry)) nextEntry = curEntry next() } diff --git a/src/compiler/scala/tools/nsc/transform/TailCalls.scala b/src/compiler/scala/tools/nsc/transform/TailCalls.scala index c375bc4362..b2d05f98b1 100644 --- a/src/compiler/scala/tools/nsc/transform/TailCalls.scala +++ b/src/compiler/scala/tools/nsc/transform/TailCalls.scala @@ -31,7 +31,7 @@ abstract class TailCalls extends Transform { class Phase(prev: scala.tools.nsc.Phase) extends StdPhase(prev) { def apply(unit: global.CompilationUnit) { if (!(settings.debuginfo.value == "notailcalls")) { - newTransformer(unit).transformUnit(unit); + newTransformer(unit).transformUnit(unit) } } } diff --git a/src/compiler/scala/tools/nsc/transform/patmat/MatchTranslation.scala b/src/compiler/scala/tools/nsc/transform/patmat/MatchTranslation.scala index 3baa88002f..3ee9009116 100644 --- a/src/compiler/scala/tools/nsc/transform/patmat/MatchTranslation.scala +++ b/src/compiler/scala/tools/nsc/transform/patmat/MatchTranslation.scala @@ -173,7 +173,7 @@ trait MatchTranslation { self: PatternMatching => (caseScrutSym, propagateSubstitution(translateCase(caseScrutSym, pt)(caseDef), EmptySubstitution)) } - for(cases <- emitTypeSwitch(bindersAndCases, pt).toList; + for(cases <- emitTypeSwitch(bindersAndCases, pt).toList if cases forall treeInfo.isCatchCase; // must check again, since it's not guaranteed -- TODO: can we eliminate this? e.g., a type test could test for a trait or a non-trivial prefix, which are not handled by the back-end cse <- cases) yield fixerUpper(matchOwner, pos)(cse).asInstanceOf[CaseDef] } diff --git a/src/compiler/scala/tools/nsc/typechecker/Contexts.scala b/src/compiler/scala/tools/nsc/typechecker/Contexts.scala index b070bd1b49..eb91251930 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Contexts.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Contexts.scala @@ -590,7 +590,7 @@ trait Contexts { self: Analyzer => def restoreTypeBounds(tp: Type): Type = { var current = tp for ((sym, info) <- savedTypeBounds) { - debuglog("resetting " + sym + " to " + info); + debuglog("resetting " + sym + " to " + info) sym.info match { case TypeBounds(lo, hi) if (hi <:< lo && lo <:< hi) => current = current.instantiateTypeParams(List(sym), List(lo)) diff --git a/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala b/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala index 0bd164a0cb..b7221a78ec 100644 --- a/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala +++ b/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala @@ -95,7 +95,7 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans class RefCheckTransformer(unit: CompilationUnit) extends Transformer { - var localTyper: analyzer.Typer = typer; + var localTyper: analyzer.Typer = typer var currentApplication: Tree = EmptyTree var inPattern: Boolean = false var checkedCombinations = Set[List[Type]]() @@ -386,11 +386,11 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans if (!isOverrideAccessOK) { overrideAccessError() } else if (other.isClass) { - overrideError("cannot be used here - class definitions cannot be overridden"); + overrideError("cannot be used here - class definitions cannot be overridden") } else if (!other.isDeferred && member.isClass) { - overrideError("cannot be used here - classes can only override abstract types"); + overrideError("cannot be used here - classes can only override abstract types") } else if (other.isEffectivelyFinal) { // (1.2) - overrideError("cannot override final member"); + overrideError("cannot override final member") } else if (!other.isDeferred && !member.isAnyOverride && !member.isSynthetic) { // (*) // (*) Synthetic exclusion for (at least) default getters, fixes SI-5178. We cannot assign the OVERRIDE flag to // the default getter: one default getter might sometimes override, sometimes not. Example in comment on ticket. @@ -449,7 +449,7 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans // @M: substSym if( !(sameLength(member.typeParams, other.typeParams) && (memberTp.substSym(member.typeParams, other.typeParams) =:= otherTp)) ) // (1.6) - overrideTypeError(); + overrideTypeError() } else if (other.isAbstractType) { //if (!member.typeParams.isEmpty) // (1.7) @MAT @@ -502,7 +502,7 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans case rt: RefinedType if !(rt =:= otherTp) && !(checkedCombinations contains rt.parents) => // might mask some inconsistencies -- check overrides checkedCombinations += rt.parents - val tsym = rt.typeSymbol; + val tsym = rt.typeSymbol if (tsym.pos == NoPosition) tsym setPos member.pos checkAllOverrides(tsym, typesOnly = true) case _ => @@ -523,7 +523,7 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans val opc = new overridingPairs.Cursor(clazz) while (opc.hasNext) { //Console.println(opc.overriding/* + ":" + opc.overriding.tpe*/ + " in "+opc.overriding.fullName + " overrides " + opc.overridden/* + ":" + opc.overridden.tpe*/ + " in "+opc.overridden.fullName + "/"+ opc.overridden.hasFlag(DEFERRED));//debug - if (!opc.overridden.isClass) checkOverride(opc.overriding, opc.overridden); + if (!opc.overridden.isClass) checkOverride(opc.overriding, opc.overridden) opc.next() } @@ -785,7 +785,7 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans // for (bc <- clazz.info.baseClasses.tail) Console.println("" + bc + " has " + bc.info.decl(member.name) + ":" + bc.info.decl(member.name).tpe);//DEBUG val nonMatching: List[Symbol] = clazz.info.member(member.name).alternatives.filterNot(_.owner == clazz).filterNot(_.isFinal) - def issueError(suffix: String) = unit.error(member.pos, member.toString() + " overrides nothing" + suffix); + def issueError(suffix: String) = unit.error(member.pos, member.toString() + " overrides nothing" + suffix) nonMatching match { case Nil => issueError("") @@ -840,7 +840,7 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans case tp1 :: tp2 :: _ => unit.error(clazz.pos, "illegal inheritance;\n " + clazz + " inherits different type instances of " + baseClass + - ":\n" + tp1 + " and " + tp2); + ":\n" + tp1 + " and " + tp2) explainTypes(tp1, tp2) explainTypes(tp2, tp1) } @@ -905,7 +905,7 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans val e = currentLevel.scope.lookupEntry(sym.name) if ((e ne null) && sym == e.sym) { var l = currentLevel - while (l.scope != e.owner) l = l.outer; + while (l.scope != e.owner) l = l.outer val symindex = symIndex(sym) if (l.maxindex < symindex) { l.refpos = pos @@ -1093,7 +1093,7 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans /* Convert a reference to a case factory of type `tpe` to a new of the class it produces. */ def toConstructor(pos: Position, tpe: Type): Tree = { val rtpe = tpe.finalResultType - assert(rtpe.typeSymbol hasFlag CASE, tpe); + assert(rtpe.typeSymbol hasFlag CASE, tpe) localTyper.typedOperator { atPos(pos) { Select(New(TypeTree(rtpe)), rtpe.typeSymbol.primaryConstructor) diff --git a/src/compiler/scala/tools/nsc/typechecker/SuperAccessors.scala b/src/compiler/scala/tools/nsc/typechecker/SuperAccessors.scala index f2129992e5..d8cedd119b 100644 --- a/src/compiler/scala/tools/nsc/typechecker/SuperAccessors.scala +++ b/src/compiler/scala/tools/nsc/typechecker/SuperAccessors.scala @@ -129,11 +129,11 @@ abstract class SuperAccessors extends transform.Transform with transform.TypingT val clazz = sup.symbol if (sym.isDeferred) { - val member = sym.overridingSymbol(clazz); + val member = sym.overridingSymbol(clazz) if (mix != tpnme.EMPTY || member == NoSymbol || !(member.isAbstractOverride && member.isIncompleteIn(clazz))) unit.error(sel.pos, ""+sym.fullLocationString+" is accessed from super. It may not be abstract "+ - "unless it is overridden by a member declared `abstract' and `override'"); + "unless it is overridden by a member declared `abstract' and `override'") } else if (mix == tpnme.EMPTY && !sym.owner.isTrait){ // SI-4989 Check if an intermediate class between `clazz` and `sym.owner` redeclares the method as abstract. val intermediateClasses = clazz.info.baseClasses.tail.takeWhile(_ != sym.owner) @@ -332,8 +332,8 @@ abstract class SuperAccessors extends transform.Transform with transform.TypingT lhs.symbol.isJavaDefined && needsProtectedAccessor(lhs.symbol, tree.pos)) { debuglog("Adding protected setter for " + tree) - val setter = makeSetter(lhs); - debuglog("Replaced " + tree + " with " + setter); + val setter = makeSetter(lhs) + debuglog("Replaced " + tree + " with " + setter) transform(localTyper.typed(Apply(setter, List(qual, rhs)))) } else super.transform(tree) diff --git a/src/compiler/scala/tools/nsc/util/JavaCharArrayReader.scala b/src/compiler/scala/tools/nsc/util/JavaCharArrayReader.scala index fc3dd2bac2..4f11d11e8f 100644 --- a/src/compiler/scala/tools/nsc/util/JavaCharArrayReader.scala +++ b/src/compiler/scala/tools/nsc/util/JavaCharArrayReader.scala @@ -49,13 +49,13 @@ class JavaCharArrayReader(buf: IndexedSeq[Char], start: Int, /* startline: int, def udigit: Int = { val d = digit2int(buf(bp), 16) if (d >= 0) bp += 1 - else error("error in unicode escape"); + else error("error in unicode escape") d } if (buf(bp) == 'u' && decodeUni && evenSlashPrefix) { do { bp += 1 //; nextcol += 1 - } while (buf(bp) == 'u'); + } while (buf(bp) == 'u') val code = udigit << 12 | udigit << 8 | udigit << 4 | udigit ch = code.asInstanceOf[Char] isUnicode = true diff --git a/src/compiler/scala/tools/nsc/util/ShowPickled.scala b/src/compiler/scala/tools/nsc/util/ShowPickled.scala index b060ea90b8..4bc393bd0b 100644 --- a/src/compiler/scala/tools/nsc/util/ShowPickled.scala +++ b/src/compiler/scala/tools/nsc/util/ShowPickled.scala @@ -250,7 +250,7 @@ object ShowPickled extends Names { case SYMANNOT => printSymbolRef(); printTypeRef(); buf.until(end, printAnnotArgRef) case ANNOTATEDtpe => - printTypeRef(); buf.until(end, printAnnotInfoRef); + printTypeRef(); buf.until(end, printAnnotInfoRef) case ANNOTINFO => printTypeRef(); buf.until(end, printAnnotArgRef) case ANNOTARGARRAY => diff --git a/src/compiler/scala/tools/reflect/MacroImplementations.scala b/src/compiler/scala/tools/reflect/MacroImplementations.scala index 47ffbda6ca..002a3fce82 100644 --- a/src/compiler/scala/tools/reflect/MacroImplementations.scala +++ b/src/compiler/scala/tools/reflect/MacroImplementations.scala @@ -26,7 +26,7 @@ abstract class MacroImplementations { "too many arguments for interpolated string") } val stringParts = parts map { - case Literal(Constant(s: String)) => s; + case Literal(Constant(s: String)) => s case _ => throw new IllegalArgumentException("argument parts must be a list of string literals") } @@ -141,7 +141,7 @@ abstract class MacroImplementations { Literal(Constant(fstring)), newTermName("format")), List(ids: _* ) - ); + ) Block(evals.toList, atPos(origApplyPos.focus)(expr)) setPos origApplyPos.makeTransparent } diff --git a/src/library/scala/beans/ScalaBeanInfo.scala b/src/library/scala/beans/ScalaBeanInfo.scala index 3a95335d71..c192a990f1 100644 --- a/src/library/scala/beans/ScalaBeanInfo.scala +++ b/src/library/scala/beans/ScalaBeanInfo.scala @@ -35,10 +35,10 @@ abstract class ScalaBeanInfo(clazz: java.lang.Class[_], // override def getAdditionalBeanInfo() = Array(Introspector getBeanInfo clazz.getSuperclass) private def init() { - var i = 0; + var i = 0 while (i < props.length) { pd(i/3) = new PropertyDescriptor(props(i), clazz, props(i+1), props(i+2)) - i = i + 3; + i = i + 3 } } diff --git a/src/library/scala/collection/SortedMapLike.scala b/src/library/scala/collection/SortedMapLike.scala index 3c3e6095df..934ed831f5 100644 --- a/src/library/scala/collection/SortedMapLike.scala +++ b/src/library/scala/collection/SortedMapLike.scala @@ -69,7 +69,7 @@ self => * @param elems the remaining elements to add. */ override def + [B1 >: B] (elem1: (A, B1), elem2: (A, B1), elems: (A, B1) *): SortedMap[A, B1] = { - var m = this + elem1 + elem2; + var m = this + elem1 + elem2 for (e <- elems) m = m + e m } diff --git a/src/library/scala/collection/generic/Signalling.scala b/src/library/scala/collection/generic/Signalling.scala index 498db7f8fa..1f2f224283 100644 --- a/src/library/scala/collection/generic/Signalling.scala +++ b/src/library/scala/collection/generic/Signalling.scala @@ -140,7 +140,7 @@ trait AtomicIndexFlag extends Signalling { val old = intflag.get if (f <= old) loop = false else if (intflag.compareAndSet(old, f)) loop = false - } while (loop); + } while (loop) } abstract override def setIndexFlagIfLesser(f: Int) = { var loop = true @@ -148,7 +148,7 @@ trait AtomicIndexFlag extends Signalling { val old = intflag.get if (f >= old) loop = false else if (intflag.compareAndSet(old, f)) loop = false - } while (loop); + } while (loop) } } diff --git a/src/library/scala/collection/generic/Sorted.scala b/src/library/scala/collection/generic/Sorted.scala index b3847fffc9..997a136d30 100644 --- a/src/library/scala/collection/generic/Sorted.scala +++ b/src/library/scala/collection/generic/Sorted.scala @@ -95,16 +95,16 @@ trait Sorted[K, +This <: Sorted[K, This]] { val i = keySet.iterator if (i.isEmpty) return j.isEmpty - var in = i.next; + var in = i.next while (j.hasNext) { - val jn = j.next; + val jn = j.next while ({ - val n = compare(jn, in); - if (n == 0) false; - else if (n < 0) return false; - else if (!i.hasNext) return false; - else true; - }) in = i.next; + val n = compare(jn, in) + if (n == 0) false + else if (n < 0) return false + else if (!i.hasNext) return false + else true + }) in = i.next } true } diff --git a/src/library/scala/collection/generic/SortedSetFactory.scala b/src/library/scala/collection/generic/SortedSetFactory.scala index 08bca04e42..2993209628 100644 --- a/src/library/scala/collection/generic/SortedSetFactory.scala +++ b/src/library/scala/collection/generic/SortedSetFactory.scala @@ -27,7 +27,7 @@ abstract class SortedSetFactory[CC[A] <: SortedSet[A] with SortedSetLike[A, CC[A def newBuilder[A](implicit ord: Ordering[A]): Builder[A, CC[A]] = new SetBuilder[A, CC[A]](empty) - implicit def newCanBuildFrom[A](implicit ord : Ordering[A]) : CanBuildFrom[Coll, A, CC[A]] = new SortedSetCanBuildFrom()(ord); + implicit def newCanBuildFrom[A](implicit ord : Ordering[A]) : CanBuildFrom[Coll, A, CC[A]] = new SortedSetCanBuildFrom()(ord) class SortedSetCanBuildFrom[A](implicit ord: Ordering[A]) extends CanBuildFrom[Coll, A, CC[A]] { def apply(from: Coll) = newBuilder[A](ord) diff --git a/src/library/scala/collection/immutable/HashMap.scala b/src/library/scala/collection/immutable/HashMap.scala index 83f0d2c8a2..44e5304e09 100644 --- a/src/library/scala/collection/immutable/HashMap.scala +++ b/src/library/scala/collection/immutable/HashMap.scala @@ -395,7 +395,7 @@ time { mNew.iterator.foreach( p => ()) } */ override def foreach[U](f: ((A, B)) => U): Unit = { - var i = 0; + var i = 0 while (i < elems.length) { elems(i).foreach(f) i += 1 diff --git a/src/library/scala/collection/immutable/HashSet.scala b/src/library/scala/collection/immutable/HashSet.scala index 87995f705f..e17f07c87b 100644 --- a/src/library/scala/collection/immutable/HashSet.scala +++ b/src/library/scala/collection/immutable/HashSet.scala @@ -301,8 +301,8 @@ time { mNew.iterator.foreach( p => ()) } */ override def foreach[U](f: A => U): Unit = { - var i = 0; - while (i < elems.length) { + var i = 0 + while (i < elems.length) { elems(i).foreach(f) i += 1 } diff --git a/src/library/scala/collection/immutable/IntMap.scala b/src/library/scala/collection/immutable/IntMap.scala index ab1faf363e..83356b4932 100644 --- a/src/library/scala/collection/immutable/IntMap.scala +++ b/src/library/scala/collection/immutable/IntMap.scala @@ -50,8 +50,10 @@ object IntMap { def apply(): Builder[(Int, B), IntMap[B]] = new MapBuilder[Int, B, IntMap[B]](empty[B]) } - def empty[T] : IntMap[T] = IntMap.Nil; - def singleton[T](key: Int, value: T): IntMap[T] = IntMap.Tip(key, value); + def empty[T] : IntMap[T] = IntMap.Nil + + def singleton[T](key: Int, value: T): IntMap[T] = IntMap.Tip(key, value) + def apply[T](elems: (Int, T)*): IntMap[T] = elems.foldLeft(empty[T])((x, y) => x.updated(y._1, y._2)) diff --git a/src/library/scala/collection/immutable/ListSet.scala b/src/library/scala/collection/immutable/ListSet.scala index 6cf6c4259e..fd23276c8d 100644 --- a/src/library/scala/collection/immutable/ListSet.scala +++ b/src/library/scala/collection/immutable/ListSet.scala @@ -75,7 +75,7 @@ class ListSet[A] extends AbstractSet[A] * @return number of set elements. */ override def size: Int = 0 - override def isEmpty: Boolean = true; + override def isEmpty: Boolean = true /** Checks if this set contains element `elem`. * @@ -126,12 +126,12 @@ class ListSet[A] extends AbstractSet[A] /** * @throws Predef.NoSuchElementException */ - override def head: A = throw new NoSuchElementException("Set has no elements"); + override def head: A = throw new NoSuchElementException("Set has no elements") /** * @throws Predef.NoSuchElementException */ - override def tail: ListSet[A] = throw new NoSuchElementException("Next of an empty set"); + override def tail: ListSet[A] = throw new NoSuchElementException("Next of an empty set") override def stringPrefix = "ListSet" diff --git a/src/library/scala/collection/immutable/LongMap.scala b/src/library/scala/collection/immutable/LongMap.scala index 60300c2a9e..506546c5ba 100644 --- a/src/library/scala/collection/immutable/LongMap.scala +++ b/src/library/scala/collection/immutable/LongMap.scala @@ -97,7 +97,7 @@ private[immutable] abstract class LongMapIterator[V, T](it: LongMap[V]) extends buffer(index) = x.asInstanceOf[AnyRef] index += 1 } - push(it); + push(it) /** * What value do we assign to a tip? @@ -178,7 +178,7 @@ extends AbstractMap[Long, T] */ override final def foreach[U](f: ((Long, T)) => U): Unit = this match { case LongMap.Bin(_, _, left, right) => { left.foreach(f); right.foreach(f) } - case LongMap.Tip(key, value) => f((key, value)); + case LongMap.Tip(key, value) => f((key, value)) case LongMap.Nil => } diff --git a/src/library/scala/collection/immutable/RedBlackTree.scala b/src/library/scala/collection/immutable/RedBlackTree.scala index 1cd0128c05..d3ce3ab58c 100644 --- a/src/library/scala/collection/immutable/RedBlackTree.scala +++ b/src/library/scala/collection/immutable/RedBlackTree.scala @@ -273,13 +273,13 @@ object RedBlackTree { } private[this] def doRange[A, B](tree: Tree[A, B], from: A, until: A)(implicit ordering: Ordering[A]): Tree[A, B] = { if (tree eq null) return null - if (ordering.lt(tree.key, from)) return doRange(tree.right, from, until); - if (ordering.lteq(until, tree.key)) return doRange(tree.left, from, until); + if (ordering.lt(tree.key, from)) return doRange(tree.right, from, until) + if (ordering.lteq(until, tree.key)) return doRange(tree.left, from, until) val newLeft = doFrom(tree.left, from) val newRight = doUntil(tree.right, until) if ((newLeft eq tree.left) && (newRight eq tree.right)) tree - else if (newLeft eq null) upd(newRight, tree.key, tree.value, false); - else if (newRight eq null) upd(newLeft, tree.key, tree.value, false); + else if (newLeft eq null) upd(newRight, tree.key, tree.value, false) + else if (newRight eq null) upd(newLeft, tree.key, tree.value, false) else rebalance(tree, newLeft, newRight) } diff --git a/src/library/scala/collection/mutable/HashTable.scala b/src/library/scala/collection/mutable/HashTable.scala index 8fef1be66b..23b68b7969 100644 --- a/src/library/scala/collection/mutable/HashTable.scala +++ b/src/library/scala/collection/mutable/HashTable.scala @@ -382,7 +382,7 @@ private[collection] object HashTable { /** The load factor for the hash table (in 0.001 step). */ private[collection] final def defaultLoadFactor: Int = 750 // corresponds to 75% - private[collection] final def loadFactorDenum = 1000; + private[collection] final def loadFactorDenum = 1000 private[collection] final def newThreshold(_loadFactor: Int, size: Int) = ((size.toLong * _loadFactor) / loadFactorDenum).toInt @@ -457,13 +457,13 @@ private[collection] object HashTable { */ private[collection] def powerOfTwo(target: Int): Int = { /* See http://bits.stephan-brumme.com/roundUpToNextPowerOfTwo.html */ - var c = target - 1; - c |= c >>> 1; - c |= c >>> 2; - c |= c >>> 4; - c |= c >>> 8; - c |= c >>> 16; - c + 1; + var c = target - 1 + c |= c >>> 1 + c |= c >>> 2 + c |= c >>> 4 + c |= c >>> 8 + c |= c >>> 16 + c + 1 } class Contents[A, Entry >: Null <: HashEntry[A, Entry]]( diff --git a/src/library/scala/collection/mutable/ListBuffer.scala b/src/library/scala/collection/mutable/ListBuffer.scala index 97d469bca2..af1d7e4183 100644 --- a/src/library/scala/collection/mutable/ListBuffer.scala +++ b/src/library/scala/collection/mutable/ListBuffer.scala @@ -137,7 +137,7 @@ final class ListBuffer[A] if (n < 0 || n >= len) throw new IndexOutOfBoundsException(n.toString) if (exported) copy() if (n == 0) { - val newElem = new :: (x, start.tail); + val newElem = new :: (x, start.tail) if (last0 eq start) { last0 = newElem } diff --git a/src/library/scala/collection/mutable/OpenHashMap.scala b/src/library/scala/collection/mutable/OpenHashMap.scala index 8b3e52470a..ad001fd79c 100644 --- a/src/library/scala/collection/mutable/OpenHashMap.scala +++ b/src/library/scala/collection/mutable/OpenHashMap.scala @@ -27,7 +27,7 @@ object OpenHashMap { var value: Option[Value]) extends HashEntry[Key, OpenEntry[Key, Value]] - private[mutable] def nextPowerOfTwo(i : Int) = highestOneBit(i) << 1; + private[mutable] def nextPowerOfTwo(i : Int) = highestOneBit(i) << 1 } /** A mutable hash map based on an open hashing scheme. The precise scheme is @@ -78,8 +78,8 @@ extends AbstractMap[Key, Value] /** Returns a mangled hash code of the provided key. */ protected def hashOf(key: Key) = { var h = key.## - h ^= ((h >>> 20) ^ (h >>> 12)); - h ^ (h >>> 7) ^ (h >>> 4); + h ^= ((h >>> 20) ^ (h >>> 12)) + h ^ (h >>> 7) ^ (h >>> 4) } private[this] def growTable() = { @@ -89,7 +89,7 @@ extends AbstractMap[Key, Value] table = new Array[Entry](newSize) mask = newSize - 1 oldTable.foreach( entry => - if (entry != null && entry.value != None) addEntry(entry)); + if (entry != null && entry.value != None) addEntry(entry)) deleted = 0 } @@ -128,14 +128,14 @@ extends AbstractMap[Key, Value] val index = findIndex(key, hash) val entry = table(index) if (entry == null) { - table(index) = new OpenEntry(key, hash, Some(value)); + table(index) = new OpenEntry(key, hash, Some(value)) modCount += 1 size += 1 None } else { val res = entry.value if (entry.value == None) { size += 1; modCount += 1 } - entry.value = Some(value); + entry.value = Some(value) res } } @@ -161,13 +161,13 @@ extends AbstractMap[Key, Value] while(entry != null){ if (entry.hash == hash && entry.key == key){ - return entry.value; + return entry.value } - j = 5 * j + 1 + perturb; - perturb >>= 5; - index = j & mask; - entry = table(index); + j = 5 * j + 1 + perturb + perturb >>= 5 + index = j & mask + entry = table(index) } None } @@ -182,8 +182,8 @@ extends AbstractMap[Key, Value] val initialModCount = modCount private[this] def advance() { - if (initialModCount != modCount) sys.error("Concurrent modification"); - while((index <= mask) && (table(index) == null || table(index).value == None)) index+=1; + if (initialModCount != modCount) sys.error("Concurrent modification") + while((index <= mask) && (table(index) == null || table(index).value == None)) index+=1 } def hasNext = {advance(); index <= mask } @@ -198,7 +198,7 @@ extends AbstractMap[Key, Value] override def clone() = { val it = new OpenHashMap[Key, Value] - foreachUndeletedEntry(entry => it.put(entry.key, entry.hash, entry.value.get)); + foreachUndeletedEntry(entry => it.put(entry.key, entry.hash, entry.value.get)) it } @@ -213,24 +213,24 @@ extends AbstractMap[Key, Value] * @param f The function to apply to each key, value mapping. */ override def foreach[U](f : ((Key, Value)) => U) { - val startModCount = modCount; + val startModCount = modCount foreachUndeletedEntry(entry => { if (modCount != startModCount) sys.error("Concurrent Modification") f((entry.key, entry.value.get))} - ); + ) } private[this] def foreachUndeletedEntry(f : Entry => Unit){ - table.foreach(entry => if (entry != null && entry.value != None) f(entry)); + table.foreach(entry => if (entry != null && entry.value != None) f(entry)) } override def transform(f : (Key, Value) => Value) = { - foreachUndeletedEntry(entry => entry.value = Some(f(entry.key, entry.value.get))); + foreachUndeletedEntry(entry => entry.value = Some(f(entry.key, entry.value.get))) this } override def retain(f : (Key, Value) => Boolean) = { - foreachUndeletedEntry(entry => if (!f(entry.key, entry.value.get)) {entry.value = None; size -= 1; deleted += 1} ); + foreachUndeletedEntry(entry => if (!f(entry.key, entry.value.get)) {entry.value = None; size -= 1; deleted += 1} ) this } diff --git a/src/library/scala/collection/parallel/ParIterableLike.scala b/src/library/scala/collection/parallel/ParIterableLike.scala index 6eda29e6b0..33af99067d 100644 --- a/src/library/scala/collection/parallel/ParIterableLike.scala +++ b/src/library/scala/collection/parallel/ParIterableLike.scala @@ -820,7 +820,7 @@ self: ParIterableLike[T, Repr, Sequential] => def zip[U >: T, S, That](that: GenIterable[S])(implicit bf: CanBuildFrom[Repr, (U, S), That]): That = if (bf(repr).isCombiner && that.isParSeq) { val thatseq = that.asParSeq - tasksupport.executeAndWaitResult(new Zip(combinerFactory(() => bf(repr).asCombiner), splitter, thatseq.splitter) mapResult { _.resultWithTaskSupport }); + tasksupport.executeAndWaitResult(new Zip(combinerFactory(() => bf(repr).asCombiner), splitter, thatseq.splitter) mapResult { _.resultWithTaskSupport }) } else setTaskSupport(seq.zip(that)(bf2seq(bf)), tasksupport) def zipWithIndex[U >: T, That](implicit bf: CanBuildFrom[Repr, (U, Int), That]): That = this zip immutable.ParRange(0, size, 1, false) @@ -831,11 +831,11 @@ self: ParIterableLike[T, Repr, Sequential] => new ZipAll(size max thatseq.length, thisElem, thatElem, combinerFactory(() => bf(repr).asCombiner), splitter, thatseq.splitter) mapResult { _.resultWithTaskSupport } - ); + ) } else setTaskSupport(seq.zipAll(that, thisElem, thatElem)(bf2seq(bf)), tasksupport) protected def toParCollection[U >: T, That](cbf: () => Combiner[U, That]): That = { - tasksupport.executeAndWaitResult(new ToParCollection(combinerFactory(cbf), splitter) mapResult { _.resultWithTaskSupport }); + tasksupport.executeAndWaitResult(new ToParCollection(combinerFactory(cbf), splitter) mapResult { _.resultWithTaskSupport }) } protected def toParMap[K, V, That](cbf: () => Combiner[(K, V), That])(implicit ev: T <:< (K, V)): That = { @@ -1474,9 +1474,9 @@ self: ParIterableLike[T, Repr, Sequential] => /* alias methods */ - def /:[S](z: S)(op: (S, T) => S): S = foldLeft(z)(op); + def /:[S](z: S)(op: (S, T) => S): S = foldLeft(z)(op) - def :\[S](z: S)(op: (T, S) => S): S = foldRight(z)(op); + def :\[S](z: S)(op: (T, S) => S): S = foldRight(z)(op) /* debug information */ diff --git a/src/library/scala/collection/parallel/ParIterableViewLike.scala b/src/library/scala/collection/parallel/ParIterableViewLike.scala index 0ecd6bd9ec..b2105e1e9e 100644 --- a/src/library/scala/collection/parallel/ParIterableViewLike.scala +++ b/src/library/scala/collection/parallel/ParIterableViewLike.scala @@ -50,7 +50,8 @@ extends GenIterableView[T, Coll] self => override def foreach[U](f: T => U): Unit = super[ParIterableLike].foreach(f) - override protected[this] def newCombiner: Combiner[T, This] = throw new UnsupportedOperationException(this + ".newCombiner"); + override protected[this] def newCombiner: Combiner[T, This] = throw new UnsupportedOperationException(this + ".newCombiner") + protected[this] def viewIdentifier: String protected[this] def viewIdString: String diff --git a/src/library/scala/collection/parallel/ParSeqLike.scala b/src/library/scala/collection/parallel/ParSeqLike.scala index 874cf6fee9..4aaadbaac5 100644 --- a/src/library/scala/collection/parallel/ParSeqLike.scala +++ b/src/library/scala/collection/parallel/ParSeqLike.scala @@ -252,7 +252,7 @@ self => def padTo[U >: T, That](len: Int, elem: U)(implicit bf: CanBuildFrom[Repr, U, That]): That = if (length < len) { patch(length, new immutable.Repetition(elem, len - length), 0) - } else patch(length, Nil, 0); + } else patch(length, Nil, 0) override def zip[U >: T, S, That](that: GenIterable[S])(implicit bf: CanBuildFrom[Repr, (U, S), That]): That = if (bf(repr).isCombiner && that.isParSeq) { val thatseq = that.asParSeq @@ -260,7 +260,7 @@ self => new Zip(length min thatseq.length, combinerFactory(() => bf(repr).asCombiner), splitter, thatseq.splitter) mapResult { _.resultWithTaskSupport } - ); + ) } else super.zip(that)(bf) /** Tests whether every element of this $coll relates to the diff --git a/src/library/scala/collection/parallel/ParSeqViewLike.scala b/src/library/scala/collection/parallel/ParSeqViewLike.scala index 04369d8fde..d03b377860 100644 --- a/src/library/scala/collection/parallel/ParSeqViewLike.scala +++ b/src/library/scala/collection/parallel/ParSeqViewLike.scala @@ -125,8 +125,8 @@ self => } protected def newReversed: Transformed[T] = new Reversed { } protected def newPatched[U >: T](_from: Int, _patch: GenSeq[U], _replaced: Int): Transformed[U] = new { - val from = _from; - val patch = _patch; + val from = _from + val patch = _patch val replaced = _replaced } with Patched[U] diff --git a/src/library/scala/collection/parallel/RemainsIterator.scala b/src/library/scala/collection/parallel/RemainsIterator.scala index 732ebc3709..726f5a2e93 100644 --- a/src/library/scala/collection/parallel/RemainsIterator.scala +++ b/src/library/scala/collection/parallel/RemainsIterator.scala @@ -517,7 +517,8 @@ self => def next = if (self.hasNext) { if (that.hasNext) (self.next, that.next) else (self.next, thatelem) - } else (thiselem, that.next); + } else (thiselem, that.next) + def remaining = self.remaining max that.remaining def dup: IterableSplitter[(U, S)] = self.dup.zipAllParSeq(that, thiselem, thatelem) def split: Seq[IterableSplitter[(U, S)]] = { @@ -606,7 +607,7 @@ self => } else Seq(sz) } val (selfszfrom, thatszfrom) = splitsizes.zip(szcum.init).span(_._2 < selfrem) - val (selfsizes, thatsizes) = (selfszfrom map { _._1 }, thatszfrom map { _._1 }); + val (selfsizes, thatsizes) = (selfszfrom map { _._1 }, thatszfrom map { _._1 }) // split iterators val selfs = self.psplit(selfsizes: _*) diff --git a/src/library/scala/collection/parallel/Tasks.scala b/src/library/scala/collection/parallel/Tasks.scala index 4e350a2adf..ec1bcbb27a 100644 --- a/src/library/scala/collection/parallel/Tasks.scala +++ b/src/library/scala/collection/parallel/Tasks.scala @@ -191,7 +191,7 @@ trait AdaptiveWorkStealingTasks extends Tasks { last = t t.start() } - } while (head.body.shouldSplitFurther); + } while (head.body.shouldSplitFurther) head.next = last head } diff --git a/src/library/scala/collection/parallel/immutable/ParRange.scala b/src/library/scala/collection/parallel/immutable/ParRange.scala index 0c9f82ba2a..a3f473c6a7 100644 --- a/src/library/scala/collection/parallel/immutable/ParRange.scala +++ b/src/library/scala/collection/parallel/immutable/ParRange.scala @@ -42,7 +42,7 @@ self => @inline final def length = range.length - @inline final def apply(idx: Int) = range.apply(idx); + @inline final def apply(idx: Int) = range.apply(idx) def splitter = new ParRangeIterator diff --git a/src/library/scala/collection/parallel/mutable/ParArray.scala b/src/library/scala/collection/parallel/mutable/ParArray.scala index 770599e9d3..0e9eac62e2 100644 --- a/src/library/scala/collection/parallel/mutable/ParArray.scala +++ b/src/library/scala/collection/parallel/mutable/ParArray.scala @@ -611,7 +611,8 @@ self => class ScanToArray[U >: T](tree: ScanTree[U], z: U, op: (U, U) => U, targetarr: Array[Any]) extends Task[Unit, ScanToArray[U]] { - var result = (); + var result = () + def leaf(prev: Option[Unit]) = iterate(tree) private def iterate(tree: ScanTree[U]): Unit = tree match { case ScanNode(left, right) => @@ -647,7 +648,8 @@ self => } class Map[S](f: T => S, targetarr: Array[Any], offset: Int, howmany: Int) extends Task[Unit, Map[S]] { - var result = (); + var result = () + def leaf(prev: Option[Unit]) = { val tarr = targetarr val sarr = array diff --git a/src/library/scala/collection/parallel/mutable/ParHashMap.scala b/src/library/scala/collection/parallel/mutable/ParHashMap.scala index 541d75290b..e94db89865 100644 --- a/src/library/scala/collection/parallel/mutable/ParHashMap.scala +++ b/src/library/scala/collection/parallel/mutable/ParHashMap.scala @@ -97,7 +97,8 @@ self => class ParHashMapIterator(start: Int, untilIdx: Int, totalSize: Int, e: DefaultEntry[K, V]) extends EntryIterator[(K, V), ParHashMapIterator](start, untilIdx, totalSize, e) { - def entry2item(entry: DefaultEntry[K, V]) = (entry.key, entry.value); + def entry2item(entry: DefaultEntry[K, V]) = (entry.key, entry.value) + def newIterator(idxFrom: Int, idxUntil: Int, totalSz: Int, es: DefaultEntry[K, V]) = new ParHashMapIterator(idxFrom, idxUntil, totalSz, es) } @@ -303,7 +304,7 @@ extends scala.collection.parallel.BucketCombiner[(K, V), ParHashMap[K, V], Defau private[parallel] object ParHashMapCombiner { private[mutable] val discriminantbits = 5 private[mutable] val numblocks = 1 << discriminantbits - private[mutable] val discriminantmask = ((1 << discriminantbits) - 1); + private[mutable] val discriminantmask = ((1 << discriminantbits) - 1) private[mutable] val nonmasklength = 32 - discriminantbits def apply[K, V] = new ParHashMapCombiner[K, V](HashTable.defaultLoadFactor) {} // was: with EnvironmentPassingCombiner[(K, V), ParHashMap[K, V]] diff --git a/src/library/scala/collection/parallel/mutable/ParHashSet.scala b/src/library/scala/collection/parallel/mutable/ParHashSet.scala index e5de6182e6..2431baf3e7 100644 --- a/src/library/scala/collection/parallel/mutable/ParHashSet.scala +++ b/src/library/scala/collection/parallel/mutable/ParHashSet.scala @@ -159,8 +159,8 @@ with scala.collection.mutable.FlatHashTable.HashUtils[T] { sizeMapInit(table.length) seedvalue = ParHashSetCombiner.this.seedvalue for { - buffer <- buckets; - if buffer ne null; + buffer <- buckets + if buffer ne null entry <- buffer } addEntry(entry) } @@ -235,7 +235,8 @@ with scala.collection.mutable.FlatHashTable.HashUtils[T] { class FillBlocks(buckets: Array[UnrolledBuffer[AnyRef]], table: AddingFlatHashTable, val offset: Int, val howmany: Int) extends Task[(Int, UnrolledBuffer[AnyRef]), FillBlocks] { - var result = (Int.MinValue, new UnrolledBuffer[AnyRef]); + var result = (Int.MinValue, new UnrolledBuffer[AnyRef]) + def leaf(prev: Option[(Int, UnrolledBuffer[AnyRef])]) { var i = offset var totalinserts = 0 @@ -319,7 +320,7 @@ with scala.collection.mutable.FlatHashTable.HashUtils[T] { private[parallel] object ParHashSetCombiner { private[mutable] val discriminantbits = 5 private[mutable] val numblocks = 1 << discriminantbits - private[mutable] val discriminantmask = ((1 << discriminantbits) - 1); + private[mutable] val discriminantmask = ((1 << discriminantbits) - 1) private[mutable] val nonmasklength = 32 - discriminantbits def apply[T] = new ParHashSetCombiner[T](FlatHashTable.defaultLoadFactor) {} //with EnvironmentPassingCombiner[T, ParHashSet[T]] diff --git a/src/library/scala/collection/parallel/mutable/UnrolledParArrayCombiner.scala b/src/library/scala/collection/parallel/mutable/UnrolledParArrayCombiner.scala index c3a379485d..f5c0b10526 100644 --- a/src/library/scala/collection/parallel/mutable/UnrolledParArrayCombiner.scala +++ b/src/library/scala/collection/parallel/mutable/UnrolledParArrayCombiner.scala @@ -69,7 +69,8 @@ extends Combiner[T, ParArray[T]] { class CopyUnrolledToArray(array: Array[Any], offset: Int, howmany: Int) extends Task[Unit, CopyUnrolledToArray] { - var result = (); + var result = () + def leaf(prev: Option[Unit]) = if (howmany > 0) { var totalleft = howmany val (startnode, startpos) = findStart(offset) diff --git a/src/library/scala/io/ReadStdin.scala b/src/library/scala/io/ReadStdin.scala index 429d7cec75..e82c26ef7a 100644 --- a/src/library/scala/io/ReadStdin.scala +++ b/src/library/scala/io/ReadStdin.scala @@ -218,7 +218,7 @@ private[scala] trait ReadStdin { case x: java.lang.Float => x.floatValue() case x: java.lang.Double => x.doubleValue() case x => x - }) :: res; + }) :: res i -= 1 } res diff --git a/src/library/scala/ref/SoftReference.scala b/src/library/scala/ref/SoftReference.scala index b414db6e97..e4ce667981 100644 --- a/src/library/scala/ref/SoftReference.scala +++ b/src/library/scala/ref/SoftReference.scala @@ -13,7 +13,8 @@ package scala.ref * @author Sean McDirmid */ class SoftReference[+T <: AnyRef](value : T, queue : ReferenceQueue[T]) extends ReferenceWrapper[T] { - def this(value : T) = this(value, null); + def this(value : T) = this(value, null) + val underlying: java.lang.ref.SoftReference[_ <: T] = new SoftReferenceWithWrapper[T](value, queue, this) } diff --git a/src/library/scala/reflect/NameTransformer.scala b/src/library/scala/reflect/NameTransformer.scala index 0beb840bed..8a1cce6b02 100755 --- a/src/library/scala/reflect/NameTransformer.scala +++ b/src/library/scala/reflect/NameTransformer.scala @@ -94,7 +94,7 @@ object NameTransformer { def decode(name0: String): String = { //System.out.println("decode: " + name);//DEBUG val name = if (name0.endsWith("")) name0.stripSuffix("") + "this" - else name0; + else name0 var buf: StringBuilder = null val len = name.length() var i = 0 diff --git a/src/library/scala/text/Document.scala b/src/library/scala/text/Document.scala index b74fd152b5..59d5b1bf93 100644 --- a/src/library/scala/text/Document.scala +++ b/src/library/scala/text/Document.scala @@ -80,7 +80,7 @@ abstract class Document { fmt(k, (i + ii, b, d) :: z) case (i, true, DocBreak) :: z => writer write "\n" - spaces(i); + spaces(i) fmt(i, z) case (i, false, DocBreak) :: z => writer write " " diff --git a/src/library/scala/util/MurmurHash.scala b/src/library/scala/util/MurmurHash.scala index a5bc8faf8d..b82259c217 100644 --- a/src/library/scala/util/MurmurHash.scala +++ b/src/library/scala/util/MurmurHash.scala @@ -164,7 +164,7 @@ object MurmurHash { var k = hiddenMagicB var j = 0 while (j+1 < s.length) { - val i = (s.charAt(j)<<16) + s.charAt(j+1); + val i = (s.charAt(j)<<16) + s.charAt(j+1) h = extendHash(h,i,c,k) c = nextMagicA(c) k = nextMagicB(k) diff --git a/src/library/scala/util/matching/Regex.scala b/src/library/scala/util/matching/Regex.scala index 7af75173d3..0cd0cfd7f6 100644 --- a/src/library/scala/util/matching/Regex.scala +++ b/src/library/scala/util/matching/Regex.scala @@ -233,7 +233,7 @@ class Regex private[matching](val pattern: Pattern, groupNames: String*) extends new Iterator[Match] { def hasNext = matchIterator.hasNext def next: Match = { - matchIterator.next; + matchIterator.next new Match(matchIterator.source, matchIterator.matcher, matchIterator.groupNames).force } } diff --git a/src/library/scala/xml/Utility.scala b/src/library/scala/xml/Utility.scala index 9429e9caa7..f3c162fcc8 100755 --- a/src/library/scala/xml/Utility.scala +++ b/src/library/scala/xml/Utility.scala @@ -311,14 +311,14 @@ object Utility extends AnyRef with parsing.TokenTests { while (i < value.length) { value.charAt(i) match { case '<' => - return "< not allowed in attribute value"; + return "< not allowed in attribute value" case '&' => val n = getName(value, i+1) if (n eq null) - return "malformed entity reference in attribute value ["+value+"]"; + return "malformed entity reference in attribute value ["+value+"]" i = i + n.length + 1 if (i >= value.length || value.charAt(i) != ';') - return "malformed entity reference in attribute value ["+value+"]"; + return "malformed entity reference in attribute value ["+value+"]" case _ => } i = i + 1 diff --git a/src/library/scala/xml/dtd/ContentModelParser.scala b/src/library/scala/xml/dtd/ContentModelParser.scala index ace02193da..6bc9c05832 100644 --- a/src/library/scala/xml/dtd/ContentModelParser.scala +++ b/src/library/scala/xml/dtd/ContentModelParser.scala @@ -21,10 +21,10 @@ object ContentModelParser extends Scanner { // a bit too permissive concerning # if (token != tok) { if ((tok == STAR) && (token == END)) // common mistake scala.sys.error("in DTDs, \n"+ - "mixed content models must be like (#PCDATA|Name|Name|...)*"); + "mixed content models must be like (#PCDATA|Name|Name|...)*") else scala.sys.error("expected "+token2string(tok)+ - ", got unexpected token:"+token2string(token)); + ", got unexpected token:"+token2string(token)) } nextToken } @@ -44,43 +44,43 @@ object ContentModelParser extends Scanner { // a bit too permissive concerning # case NAME => value match { case "ANY" => ANY case "EMPTY" => EMPTY - case _ => scala.sys.error("expected ANY, EMPTY or '(' instead of " + value ); + case _ => scala.sys.error("expected ANY, EMPTY or '(' instead of " + value ) } case LPAREN => - nextToken; - sOpt; + nextToken + sOpt if (token != TOKEN_PCDATA) - ELEMENTS(regexp); + ELEMENTS(regexp) else { - nextToken; + nextToken token match { case RPAREN => PCDATA case CHOICE => - val res = MIXED(choiceRest(Eps)); - sOpt; - accept( RPAREN ); - accept( STAR ); + val res = MIXED(choiceRest(Eps)) + sOpt + accept( RPAREN ) + accept( STAR ) res case _ => - scala.sys.error("unexpected token:" + token2string(token) ); + scala.sys.error("unexpected token:" + token2string(token) ) } } case _ => - scala.sys.error("unexpected token:" + token2string(token) ); - } + scala.sys.error("unexpected token:" + token2string(token) ) + } // sopt ::= S? - def sOpt() = if( token == S ) nextToken; + def sOpt() = if( token == S ) nextToken // (' S? mixed ::= '#PCDATA' S? ')' // | '#PCDATA' (S? '|' S? atom)* S? ')*' // '(' S? regexp ::= cp S? [seqRest|choiceRest] ')' [ '+' | '*' | '?' ] def regexp: RegExp = { - val p = particle; - sOpt; + val p = particle + sOpt maybeSuffix(token match { case RPAREN => nextToken; p case CHOICE => val q = choiceRest( p );accept( RPAREN ); q @@ -90,24 +90,24 @@ object ContentModelParser extends Scanner { // a bit too permissive concerning # // seqRest ::= (',' S? cp S?)+ def seqRest(p: RegExp) = { - var k = List(p); + var k = List(p) while( token == COMMA ) { - nextToken; - sOpt; - k = particle::k; - sOpt; + nextToken + sOpt + k = particle::k + sOpt } Sequ( k.reverse:_* ) } // choiceRest ::= ('|' S? cp S?)+ def choiceRest( p:RegExp ) = { - var k = List( p ); + var k = List( p ) while( token == CHOICE ) { - nextToken; - sOpt; - k = particle::k; - sOpt; + nextToken + sOpt + k = particle::k + sOpt } Alt( k.reverse:_* ) } @@ -115,14 +115,14 @@ object ContentModelParser extends Scanner { // a bit too permissive concerning # // particle ::= '(' S? regexp // | name [ '+' | '*' | '?' ] def particle = token match { - case LPAREN => nextToken; sOpt; regexp; + case LPAREN => nextToken; sOpt; regexp case NAME => val a = Letter(ElemName(value)); nextToken; maybeSuffix(a) - case _ => scala.sys.error("expected '(' or Name, got:"+token2string(token)); + case _ => scala.sys.error("expected '(' or Name, got:"+token2string(token)) } // atom ::= name def atom = token match { case NAME => val a = Letter(ElemName(value)); nextToken; a - case _ => scala.sys.error("expected Name, got:"+token2string(token)); + case _ => scala.sys.error("expected Name, got:"+token2string(token)) } } diff --git a/src/library/scala/xml/dtd/Decl.scala b/src/library/scala/xml/dtd/Decl.scala index dc4cb93ddf..fd2eaa30ba 100644 --- a/src/library/scala/xml/dtd/Decl.scala +++ b/src/library/scala/xml/dtd/Decl.scala @@ -123,7 +123,7 @@ case class ExtDef(extID:ExternalID) extends EntityDef { /** a parsed entity reference */ case class PEReference(ent:String) extends MarkupDecl { if( !Utility.isName( ent )) - throw new IllegalArgumentException("ent must be an XML Name"); + throw new IllegalArgumentException("ent must be an XML Name") override def buildString(sb: StringBuilder): StringBuilder = sb append '%' append ent append ';' diff --git a/src/library/scala/xml/dtd/Scanner.scala b/src/library/scala/xml/dtd/Scanner.scala index 9b64cc61e2..d4d648c8df 100644 --- a/src/library/scala/xml/dtd/Scanner.scala +++ b/src/library/scala/xml/dtd/Scanner.scala @@ -39,12 +39,12 @@ class Scanner extends Tokens with parsing.TokenTests { // todo: see XML specification... probably isLetter,isDigit is fine final def isIdentChar = ( ('a' <= c && c <= 'z') - || ('A' <= c && c <= 'Z')); + || ('A' <= c && c <= 'Z')) final def next() = if (it.hasNext) c = it.next else c = ENDCH final def acc(d: Char) { - if (c == d) next else scala.sys.error("expected '"+d+"' found '"+c+"' !"); + if (c == d) next else scala.sys.error("expected '"+d+"' found '"+c+"' !") } final def accS(ds: Seq[Char]) { ds foreach acc } @@ -70,7 +70,7 @@ class Scanner extends Tokens with parsing.TokenTests { final def name = { val sb = new StringBuilder() - do { sb.append(c); next } while (isNameChar(c)); + do { sb.append(c); next } while (isNameChar(c)) value = sb.toString() NAME } diff --git a/src/library/scala/xml/dtd/ValidationException.scala b/src/library/scala/xml/dtd/ValidationException.scala index 243db69ab7..15640e2da7 100644 --- a/src/library/scala/xml/dtd/ValidationException.scala +++ b/src/library/scala/xml/dtd/ValidationException.scala @@ -33,7 +33,7 @@ object MakeValidationException { def fromMissingAttribute(allKeys: Set[String]) = { val sb = new StringBuilder("missing value for REQUIRED attribute") - if (allKeys.size > 1) sb.append('s'); + if (allKeys.size > 1) sb.append('s') allKeys foreach (k => sb append "'%s'".format(k)) new ValidationException(sb.toString()) } diff --git a/src/library/scala/xml/factory/Binder.scala b/src/library/scala/xml/factory/Binder.scala index bad4a4ea09..b463fda5ba 100755 --- a/src/library/scala/xml/factory/Binder.scala +++ b/src/library/scala/xml/factory/Binder.scala @@ -48,7 +48,7 @@ abstract class Binder(val preserveWS: Boolean) extends ValidatingMarkupHandler { val old = result result = new NodeBuffer() for (m <- x.child) traverse(m) - result = old &+ elem(0, x.prefix, x.label, x.attributes, x.scope, x.minimizeEmpty, NodeSeq.fromSeq(result)).toList; + result = old &+ elem(0, x.prefix, x.label, x.attributes, x.scope, x.minimizeEmpty, NodeSeq.fromSeq(result)).toList elemEnd(0, x.prefix, x.label) } diff --git a/src/library/scala/xml/factory/LoggedNodeFactory.scala b/src/library/scala/xml/factory/LoggedNodeFactory.scala index cac61acc39..49a6d622a7 100644 --- a/src/library/scala/xml/factory/LoggedNodeFactory.scala +++ b/src/library/scala/xml/factory/LoggedNodeFactory.scala @@ -46,7 +46,7 @@ trait LoggedNodeFactory[A <: Node] extends NodeFactory[A] with scala.util.loggin override def makeNode(pre: String, label: String, attrSeq: MetaData, scope: NamespaceBinding, children: Seq[Node]): A = { if (logNode) - log("[makeNode for "+label+"]"); + log("[makeNode for "+label+"]") val hash = Utility.hashCode(pre, label, attrSeq.##, scope.##, children) @@ -59,26 +59,26 @@ trait LoggedNodeFactory[A <: Node] extends NodeFactory[A] with scala.util.loggin } */ if (!cache.get( hash ).isEmpty && (logCompressLevel >= CACHE)) - log("[cache hit !]"); + log("[cache hit !]") super.makeNode(pre, label, attrSeq, scope, children) } override def makeText(s: String) = { if (logText) - log("[makeText:\""+s+"\"]"); + log("[makeText:\""+s+"\"]") super.makeText(s) } override def makeComment(s: String): Seq[Comment] = { if (logComment) - log("[makeComment:\""+s+"\"]"); + log("[makeComment:\""+s+"\"]") super.makeComment(s) } override def makeProcInstr(t: String, s: String): Seq[ProcInstr] = { if (logProcInstr) - log("[makeProcInstr:\""+t+" "+ s+"\"]"); + log("[makeProcInstr:\""+t+" "+ s+"\"]") super.makeProcInstr(t, s) } diff --git a/src/library/scala/xml/include/sax/XIncludeFilter.scala b/src/library/scala/xml/include/sax/XIncludeFilter.scala index 103cddcb11..9079b5f9c7 100644 --- a/src/library/scala/xml/include/sax/XIncludeFilter.scala +++ b/src/library/scala/xml/include/sax/XIncludeFilter.scala @@ -147,10 +147,10 @@ class XIncludeFilter extends XMLFilterImpl { if (parse equals "text") { val encoding = atts getValue "encoding" - includeTextDocument(href, encoding); + includeTextDocument(href, encoding) } else if (parse equals "xml") { - includeXMLDocument(href); + includeXMLDocument(href) } // Need to check this also in DOM and JDOM???? else { @@ -184,7 +184,7 @@ class XIncludeFilter extends XMLFilterImpl { } } - private var depth = 0; + private var depth = 0 override def startDocument() { level = 0 @@ -240,7 +240,7 @@ class XIncludeFilter extends XMLFilterImpl { } locationString = (" in document included from " + publicID + " at " + systemID - + " at line " + line + ", column " + column); + + " at line " + line + ", column " + column) locationString } @@ -258,7 +258,7 @@ class XIncludeFilter extends XMLFilterImpl { */ private def includeTextDocument(url: String, encoding1: String) { var encoding = encoding1 - if (encoding == null || encoding.trim().equals("")) encoding = "UTF-8"; + if (encoding == null || encoding.trim().equals("")) encoding = "UTF-8" var source: URL = null try { val base = bases.peek().asInstanceOf[URL] @@ -284,13 +284,13 @@ class XIncludeFilter extends XMLFilterImpl { // MIME types are case-insensitive // Java may be picking this up from file URL if (contentType != null) { - contentType = contentType.toLowerCase(); + contentType = contentType.toLowerCase() if (contentType.equals("text/xml") || contentType.equals("application/xml") || (contentType.startsWith("text/") && contentType.endsWith("+xml") ) || (contentType.startsWith("application/") && contentType.endsWith("+xml"))) { - encoding = EncodingHeuristics.readEncodingFromStream(in); - } + encoding = EncodingHeuristics.readEncodingFromStream(in) + } } } val reader = new InputStreamReader(in, encoding) diff --git a/src/library/scala/xml/include/sax/XIncluder.scala b/src/library/scala/xml/include/sax/XIncluder.scala index 81c5613541..8fcd66d4c0 100644 --- a/src/library/scala/xml/include/sax/XIncluder.scala +++ b/src/library/scala/xml/include/sax/XIncluder.scala @@ -28,7 +28,7 @@ class XIncluder(outs: OutputStream, encoding: String) extends ContentHandler wit def startDocument() { try { out.write("\r\n"); + + encoding + "'?>\r\n") } catch { case e:IOException => @@ -52,16 +52,16 @@ class XIncluder(outs: OutputStream, encoding: String) extends ContentHandler wit def startElement(namespaceURI: String, localName: String, qualifiedName: String, atts: Attributes) = { try { - out.write("<" + qualifiedName); + out.write("<" + qualifiedName) var i = 0; while (i < atts.getLength()) { - out.write(" "); - out.write(atts.getQName(i)); - out.write("='"); - val value = atts.getValue(i); + out.write(" ") + out.write(atts.getQName(i)) + out.write("='") + val value = atts.getValue(i) // @todo Need to use character references if the encoding // can't support the character out.write(scala.xml.Utility.escape(value)) - out.write("'"); + out.write("'") i += 1 } out.write(">") @@ -87,20 +87,20 @@ class XIncluder(outs: OutputStream, encoding: String) extends ContentHandler wit def characters(ch: Array[Char], start: Int, length: Int) { try { var i = 0; while (i < length) { - val c = ch(start+i); - if (c == '&') out.write("&"); - else if (c == '<') out.write("<"); + val c = ch(start+i) + if (c == '&') out.write("&") + else if (c == '<') out.write("<") // This next fix is normally not necessary. // However, it is required if text contains ]]> // (The end CDATA section delimiter) - else if (c == '>') out.write(">"); - else out.write(c); + else if (c == '>') out.write(">") + else out.write(c) i += 1 } } catch { case e: IOException => - throw new SAXException("Write failed", e); + throw new SAXException("Write failed", e) } } @@ -138,8 +138,8 @@ class XIncluder(outs: OutputStream, encoding: String) extends ContentHandler wit // if this is the source document, output a DOCTYPE declaration if (entities.isEmpty) { var id = "" - if (publicID != null) id = " PUBLIC \"" + publicID + "\" \"" + systemID + '"'; - else if (systemID != null) id = " SYSTEM \"" + systemID + '"'; + if (publicID != null) id = " PUBLIC \"" + publicID + "\" \"" + systemID + '"' + else if (systemID != null) id = " SYSTEM \"" + systemID + '"' try { out.write("\r\n") } diff --git a/src/library/scala/xml/parsing/MarkupParser.scala b/src/library/scala/xml/parsing/MarkupParser.scala index 6b8f58dca3..228043e183 100755 --- a/src/library/scala/xml/parsing/MarkupParser.scala +++ b/src/library/scala/xml/parsing/MarkupParser.scala @@ -105,7 +105,7 @@ trait MarkupParser extends MarkupParserCommon with TokenTests lastChRead = curInput.next pos = curInput.pos } else { - val ilen = inpStack.length; + val ilen = inpStack.length //Console.println(" ilen = "+ilen+ " extIndex = "+extIndex); if ((ilen != extIndex) && (ilen > 0)) { /** for external source, inpStack == Nil ! need notify of eof! */ @@ -141,7 +141,7 @@ trait MarkupParser extends MarkupParserCommon with TokenTests xSpace val (md,scp) = xAttributes(TopScope) if (scp != TopScope) - reportSyntaxError("no xmlns definitions here, please."); + reportSyntaxError("no xmlns definitions here, please.") xToken('?') xToken('>') md @@ -247,7 +247,7 @@ trait MarkupParser extends MarkupParserCommon with TokenTests case _:ProcInstr => case _:Comment => case _:EntityRef => // todo: fix entities, shouldn't be "special" - reportSyntaxError("no entity references allowed here"); + reportSyntaxError("no entity references allowed here") case s:SpecialNode => if (s.toString.trim().length > 0) //non-empty text nodes not allowed elemCount += 2 @@ -328,7 +328,7 @@ trait MarkupParser extends MarkupParserCommon with TokenTests } if(!aMap.wellformed(scope)) - reportSyntaxError( "double attribute"); + reportSyntaxError( "double attribute") (aMap,scope) } @@ -389,10 +389,10 @@ trait MarkupParser extends MarkupParserCommon with TokenTests /* todo: move this into the NodeBuilder class */ def appendText(pos: Int, ts: NodeBuffer, txt: String): Unit = { if (preserveWS) - ts &+ handle.text(pos, txt); + ts &+ handle.text(pos, txt) else for (t <- TextBuffer.fromString(txt).toText) { - ts &+ handle.text(pos, t.text); + ts &+ handle.text(pos, t.text) } } @@ -446,7 +446,7 @@ trait MarkupParser extends MarkupParserCommon with TokenTests case '#' => // CharacterRef nextch val theChar = handle.text(tmppos, xCharRef(() => ch, () => nextch)) - xToken(';'); + xToken(';') ts &+ theChar case _ => // EntityRef val n = xName @@ -597,7 +597,7 @@ trait MarkupParser extends MarkupParserCommon with TokenTests def systemLiteral(): String = { val endch = ch if (ch != '\'' && ch != '"') - reportSyntaxError("quote ' or \" expected"); + reportSyntaxError("quote ' or \" expected") nextch while (ch != endch && !eof) { putChar(ch) @@ -615,7 +615,7 @@ trait MarkupParser extends MarkupParserCommon with TokenTests def pubidLiteral(): String = { val endch = ch if (ch!='\'' && ch != '"') - reportSyntaxError("quote ' or \" expected"); + reportSyntaxError("quote ' or \" expected") nextch while (ch != endch && !eof) { putChar(ch) @@ -889,10 +889,10 @@ trait MarkupParser extends MarkupParserCommon with TokenTests val sysID = if (ch != '>') systemLiteral() else - null; + null new PublicID(pubID, sysID) } else { - reportSyntaxError("PUBLIC or SYSTEM expected"); + reportSyntaxError("PUBLIC or SYSTEM expected") scala.sys.error("died parsing notationdecl") } xSpaceOpt diff --git a/src/library/scala/xml/parsing/ValidatingMarkupHandler.scala b/src/library/scala/xml/parsing/ValidatingMarkupHandler.scala index 0edea043a5..018ae4d2cd 100644 --- a/src/library/scala/xml/parsing/ValidatingMarkupHandler.scala +++ b/src/library/scala/xml/parsing/ValidatingMarkupHandler.scala @@ -50,8 +50,8 @@ abstract class ValidatingMarkupHandler extends MarkupHandler with Logged { log("advanceDFA(trans): " + trans) trans.get(ContentModel.ElemName(label)) match { case Some(qNew) => qCurrent = qNew - case _ => reportValidationError(pos, "DTD says, wrong element, expected one of "+trans.keys); - } + case _ => reportValidationError(pos, "DTD says, wrong element, expected one of "+trans.keys) + } } // advance in current automaton log("[qCurrent = "+qCurrent+" visiting "+label+"]") @@ -106,7 +106,7 @@ abstract class ValidatingMarkupHandler extends MarkupHandler with Logged { } final override def notationDecl(notat: String, extID: ExternalID) { - decls = NotationDecl(notat, extID) :: decls; + decls = NotationDecl(notat, extID) :: decls } final override def peReference(name: String) { diff --git a/src/library/scala/xml/transform/BasicTransformer.scala b/src/library/scala/xml/transform/BasicTransformer.scala index 1402ccd6aa..e427071177 100644 --- a/src/library/scala/xml/transform/BasicTransformer.scala +++ b/src/library/scala/xml/transform/BasicTransformer.scala @@ -53,7 +53,7 @@ abstract class BasicTransformer extends Function1[Node,Node] def apply(n: Node): Node = { val seq = transform(n) if (seq.length > 1) - throw new UnsupportedOperationException("transform must return single node for root"); + throw new UnsupportedOperationException("transform must return single node for root") else seq.head } } diff --git a/src/reflect/scala/reflect/internal/Constants.scala b/src/reflect/scala/reflect/internal/Constants.scala index 28bc3e1dd0..5ed2f675b2 100644 --- a/src/reflect/scala/reflect/internal/Constants.scala +++ b/src/reflect/scala/reflect/internal/Constants.scala @@ -94,7 +94,7 @@ trait Constants extends api.Constants { def booleanValue: Boolean = if (tag == BooleanTag) value.asInstanceOf[Boolean] - else throw new Error("value " + value + " is not a boolean"); + else throw new Error("value " + value + " is not a boolean") def byteValue: Byte = tag match { case ByteTag => value.asInstanceOf[Byte] diff --git a/src/reflect/scala/reflect/internal/InfoTransformers.scala b/src/reflect/scala/reflect/internal/InfoTransformers.scala index 82904b0b68..4e84a29fd0 100644 --- a/src/reflect/scala/reflect/internal/InfoTransformers.scala +++ b/src/reflect/scala/reflect/internal/InfoTransformers.scala @@ -43,7 +43,7 @@ trait InfoTransformers { if (from == this.pid) this else if (from < this.pid) if (prev.pid < from) this - else prev.nextFrom(from); + else prev.nextFrom(from) else if (next.pid == NoPhase.id) next else next.nextFrom(from) } diff --git a/src/reflect/scala/reflect/internal/Kinds.scala b/src/reflect/scala/reflect/internal/Kinds.scala index 5fecc06128..5d7df8c367 100644 --- a/src/reflect/scala/reflect/internal/Kinds.scala +++ b/src/reflect/scala/reflect/internal/Kinds.scala @@ -36,7 +36,7 @@ trait Kinds { private def varStr(s: Symbol): String = if (s.isCovariant) "covariant" else if (s.isContravariant) "contravariant" - else "invariant"; + else "invariant" private def qualify(a0: Symbol, b0: Symbol): String = if (a0.toString != b0.toString) "" else { if((a0 eq b0) || (a0.owner eq b0.owner)) "" diff --git a/src/reflect/scala/reflect/internal/Names.scala b/src/reflect/scala/reflect/internal/Names.scala index b60d1e619f..8b64bf7a32 100644 --- a/src/reflect/scala/reflect/internal/Names.scala +++ b/src/reflect/scala/reflect/internal/Names.scala @@ -34,7 +34,7 @@ trait Names extends api.Names { cs(offset) * (41 * 41) + cs(offset + len - 1) * 41 + cs(offset + (len >> 1))) - else 0; + else 0 /** Is (the ASCII representation of) name at given index equal to * cs[offset..offset+len-1]? @@ -42,7 +42,7 @@ trait Names extends api.Names { private def equals(index: Int, cs: Array[Char], offset: Int, len: Int): Boolean = { var i = 0 while ((i < len) && (chrs(index + i) == cs(offset + i))) - i += 1; + i += 1 i == len } @@ -275,7 +275,7 @@ trait Names extends api.Names { var i = 0 while (i < prefix.length && start + i < len && chrs(index + start + i) == chrs(prefix.start + i)) - i += 1; + i += 1 i == prefix.length } @@ -287,7 +287,7 @@ trait Names extends api.Names { var i = 1 while (i <= suffix.length && i <= end && chrs(index + end - i) == chrs(suffix.start + suffix.length - i)) - i += 1; + i += 1 i > suffix.length } diff --git a/src/reflect/scala/reflect/internal/Printers.scala b/src/reflect/scala/reflect/internal/Printers.scala index a745a78a4f..9e72fb9145 100644 --- a/src/reflect/scala/reflect/internal/Printers.scala +++ b/src/reflect/scala/reflect/internal/Printers.scala @@ -208,7 +208,7 @@ trait Printers extends api.Printers { self: SymbolTable => case ModuleDef(mods, name, impl) => printAnnotations(tree) - printModifiers(tree, mods); + printModifiers(tree, mods) print("object " + symName(tree, name), " extends ", impl) case ValDef(mods, name, tp, rhs) => @@ -423,7 +423,7 @@ trait Printers extends api.Printers { self: SymbolTable => printOpt(" >: ", lo); printOpt(" <: ", hi) case ExistentialTypeTree(tpt, whereClauses) => - print(tpt); + print(tpt) printColumn(whereClauses, " forSome { ", ";", "}") // SelectFromArray is no longer visible in reflect.internal. diff --git a/src/reflect/scala/reflect/internal/Scopes.scala b/src/reflect/scala/reflect/internal/Scopes.scala index b1cfaa4774..850c497d4b 100644 --- a/src/reflect/scala/reflect/internal/Scopes.scala +++ b/src/reflect/scala/reflect/internal/Scopes.scala @@ -188,7 +188,7 @@ trait Scopes extends api.Scopes { self: SymbolTable => if (e1 == e) { hashtable(index) = e.tail } else { - while (e1.tail != e) e1 = e1.tail; + while (e1.tail != e) e1 = e1.tail e1.tail = e.tail } } @@ -199,7 +199,7 @@ trait Scopes extends api.Scopes { self: SymbolTable => def unlink(sym: Symbol) { var e = lookupEntry(sym.name) while (e ne null) { - if (e.sym == sym) unlink(e); + if (e.sym == sym) unlink(e) e = lookupNextEntry(e) } } @@ -300,7 +300,7 @@ trait Scopes extends api.Scopes { self: SymbolTable => if (hashtable ne null) do { e = e.tail } while ((e ne null) && e.sym.name != entry.sym.name) else - do { e = e.next } while ((e ne null) && e.sym.name != entry.sym.name); + do { e = e.next } while ((e ne null) && e.sym.name != entry.sym.name) e } diff --git a/src/reflect/scala/reflect/internal/SymbolTable.scala b/src/reflect/scala/reflect/internal/SymbolTable.scala index b3a398a8d7..9b5778b9da 100644 --- a/src/reflect/scala/reflect/internal/SymbolTable.scala +++ b/src/reflect/scala/reflect/internal/SymbolTable.scala @@ -222,7 +222,7 @@ abstract class SymbolTable extends macros.Universe def noChangeInBaseClasses(it: InfoTransformer, limit: Phase#Id): Boolean = ( it.pid >= limit || !it.changesBaseClasses && noChangeInBaseClasses(it.next, limit) - ); + ) period != 0 && runId(period) == currentRunId && { val pid = phaseId(period) if (phase.id > pid) noChangeInBaseClasses(infoTransformers.nextFrom(pid), phase.id) diff --git a/src/reflect/scala/reflect/internal/Trees.scala b/src/reflect/scala/reflect/internal/Trees.scala index 408c7c648f..ce33fd8408 100644 --- a/src/reflect/scala/reflect/internal/Trees.scala +++ b/src/reflect/scala/reflect/internal/Trees.scala @@ -522,7 +522,7 @@ trait Trees extends api.Trees { self: SymbolTable => case t => t } - orig = followOriginal(tree); setPos(tree.pos); + orig = followOriginal(tree); setPos(tree.pos) this } @@ -1425,7 +1425,7 @@ trait Trees extends api.Trees { self: SymbolTable => def subst(from: List[Symbol], to: List[Tree]): Tree = if (from.isEmpty) tree else if (tree.symbol == from.head) to.head.shallowDuplicate // TODO: does it ever make sense *not* to perform a shallowDuplicate on `to.head`? - else subst(from.tail, to.tail); + else subst(from.tail, to.tail) subst(from, to) case _ => super.transform(tree) diff --git a/src/reflect/scala/reflect/internal/Types.scala b/src/reflect/scala/reflect/internal/Types.scala index 5cb6f78874..09f78d1d5b 100644 --- a/src/reflect/scala/reflect/internal/Types.scala +++ b/src/reflect/scala/reflect/internal/Types.scala @@ -908,7 +908,7 @@ trait Types extends api.Types { self: SymbolTable => (this eq that) || (if (explainSwitch) explain("=", isSameType, this, that) else isSameType(this, that)) - ); + ) /** Is this type close enough to that type so that members * with the two type would override each other? @@ -1488,7 +1488,7 @@ trait Types extends api.Types { self: SymbolTable => tpe.underlyingPeriod = currentPeriod if (!isValid(period)) { // [Eugene to Paul] needs review - tpe.underlyingCache = if (tpe.sym == NoSymbol) ThisType(rootMirror.RootClass) else tpe.pre.memberType(tpe.sym).resultType; + tpe.underlyingCache = if (tpe.sym == NoSymbol) ThisType(rootMirror.RootClass) else tpe.pre.memberType(tpe.sym).resultType assert(tpe.underlyingCache ne tpe, tpe) } } @@ -1500,7 +1500,8 @@ trait Types extends api.Types { self: SymbolTable => if (trivial == UNKNOWN) trivial = fromBoolean(thistpe.isTrivial && supertpe.isTrivial) toBoolean(trivial) } - override def isNotNull = true; + override def isNotNull = true + override def typeSymbol = thistpe.typeSymbol override def underlying = supertpe override def prefix: Type = supertpe.prefix @@ -1637,8 +1638,8 @@ trait Types extends api.Types { self: SymbolTable => var bcs = sbcs def isNew(clazz: Symbol): Boolean = ( superclazz.baseTypeIndex(clazz) < 0 && - { var p = bcs; - while ((p ne sbcs) && (p.head != clazz)) p = p.tail; + { var p = bcs + while ((p ne sbcs) && (p.head != clazz)) p = p.tail p eq sbcs } ) @@ -2874,7 +2875,8 @@ trait Types extends api.Types { self: SymbolTable => */ case class AntiPolyType(pre: Type, targs: List[Type]) extends Type { override def safeToString = - pre.toString + targs.mkString("(with type arguments ", ", ", ")"); + pre.toString + targs.mkString("(with type arguments ", ", ", ")") + override def memberType(sym: Symbol) = appliedType(pre.memberType(sym), targs) override def kind = "AntiPolyType" } @@ -4976,7 +4978,7 @@ trait Types extends api.Types { self: SymbolTable => sym1.name == sym2.name && (sym1.isPackageClass || corresponds(sym1.owner, sym2.owner)) if (!corresponds(sym.owner, rebind0.owner)) { debuglog("ADAPT1 pre = "+pre+", sym = "+sym.fullLocationString+", rebind = "+rebind0.fullLocationString) - val bcs = pre.baseClasses.dropWhile(bc => !corresponds(bc, sym.owner)); + val bcs = pre.baseClasses.dropWhile(bc => !corresponds(bc, sym.owner)) if (bcs.isEmpty) assert(pre.typeSymbol.isRefinementClass, pre) // if pre is a refinementclass it might be a structural type => OK to leave it in. else @@ -6569,7 +6571,7 @@ trait Types extends api.Types { self: SymbolTable => } } def refines(tp: Type, sym: Symbol): Boolean = { - val syms = tp.nonPrivateMember(sym.name).alternatives; + val syms = tp.nonPrivateMember(sym.name).alternatives !syms.isEmpty && (syms forall (alt => // todo alt != sym is strictly speaking not correct, but without it we lose // efficiency. @@ -6708,8 +6710,8 @@ trait Types extends api.Types { self: SymbolTable => def glbsym(proto: Symbol): Symbol = { val prototp = glbThisType.memberInfo(proto) val syms = for (t <- ts; - alt <- (t.nonPrivateMember(proto.name).alternatives); - if glbThisType.memberInfo(alt) matches prototp + alt <- (t.nonPrivateMember(proto.name).alternatives) + if glbThisType.memberInfo(alt) matches prototp ) yield alt val symtypes = syms map glbThisType.memberInfo assert(!symtypes.isEmpty) @@ -6891,7 +6893,7 @@ trait Types extends api.Types { self: SymbolTable => if (sym.isTerm) for (alt <- tp.nonPrivateDecl(sym.name).alternatives) if (specializesSym(thistp, sym, thistp, alt, depth)) - tp.decls unlink alt; + tp.decls unlink alt tp.decls enter sym } } diff --git a/src/reflect/scala/reflect/internal/pickling/PickleBuffer.scala b/src/reflect/scala/reflect/internal/pickling/PickleBuffer.scala index 34c6fe234c..c9dfb7fe71 100644 --- a/src/reflect/scala/reflect/internal/pickling/PickleBuffer.scala +++ b/src/reflect/scala/reflect/internal/pickling/PickleBuffer.scala @@ -106,7 +106,7 @@ class PickleBuffer(data: Array[Byte], from: Int, to: Int) { do { b = readByte() x = (x << 7) + (b & 0x7f) - } while ((b & 0x80) != 0L); + } while ((b & 0x80) != 0L) x } @@ -150,7 +150,7 @@ class PickleBuffer(data: Array[Byte], from: Int, to: Int) { * Concatenate results into a list. */ def until[T](end: Int, op: () => T): List[T] = - if (readIndex == end) List() else op() :: until(end, op); + if (readIndex == end) List() else op() :: until(end, op) /** Perform operation `op` the number of * times specified. Concatenate the results into a list. diff --git a/src/reflect/scala/reflect/io/VirtualDirectory.scala b/src/reflect/scala/reflect/io/VirtualDirectory.scala index 589076d693..09b99087e6 100644 --- a/src/reflect/scala/reflect/io/VirtualDirectory.scala +++ b/src/reflect/scala/reflect/io/VirtualDirectory.scala @@ -68,6 +68,6 @@ extends AbstractFile { } def clear() { - files.clear(); + files.clear() } } diff --git a/src/reflect/scala/reflect/io/VirtualFile.scala b/src/reflect/scala/reflect/io/VirtualFile.scala index 0dfa7d5473..6f98b8385b 100644 --- a/src/reflect/scala/reflect/io/VirtualFile.scala +++ b/src/reflect/scala/reflect/io/VirtualFile.scala @@ -41,7 +41,7 @@ class VirtualFile(val name: String, override val path: String) extends AbstractF override def sizeOption: Option[Int] = Some(content.size) - def input : InputStream = new ByteArrayInputStream(content); + def input : InputStream = new ByteArrayInputStream(content) override def output: OutputStream = { new ByteArrayOutputStream() { -- cgit v1.2.3 From 6e450ed030aa7a5af53475f1613257a8e12322bc Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Sun, 24 Feb 2013 23:31:52 +0100 Subject: Reorder to avoid code appearing like a forward reference. It isn't, because LOOP_HEADER and friends are compile time constants. But I'd argue that its clearer not to rely on that here. --- .../tools/nsc/backend/icode/BasicBlocks.scala | 24 +++++++++++----------- 1 file changed, 12 insertions(+), 12 deletions(-) (limited to 'src') diff --git a/src/compiler/scala/tools/nsc/backend/icode/BasicBlocks.scala b/src/compiler/scala/tools/nsc/backend/icode/BasicBlocks.scala index 24c18e6530..cc10479ca1 100644 --- a/src/compiler/scala/tools/nsc/backend/icode/BasicBlocks.scala +++ b/src/compiler/scala/tools/nsc/backend/icode/BasicBlocks.scala @@ -500,18 +500,6 @@ trait BasicBlocks { } object BBFlags { - val flagMap = Map[Int, String]( - LOOP_HEADER -> "loopheader", - IGNORING -> "ignore", - EX_HEADER -> "exheader", - CLOSED -> "closed", - DIRTYSUCCS -> "dirtysuccs", - DIRTYPREDS -> "dirtypreds" - ) - def flagsToString(flags: Int) = { - flagMap collect { case (bit, name) if (bit & flags) != 0 => "<" + name + ">" } mkString " " - } - /** This block is a loop header (was translated from a while). */ final val LOOP_HEADER = (1 << 0) @@ -529,4 +517,16 @@ object BBFlags { /** Code has been changed, recompute predecessors. */ final val DIRTYPREDS = (1 << 5) + + val flagMap = Map[Int, String]( + LOOP_HEADER -> "loopheader", + IGNORING -> "ignore", + EX_HEADER -> "exheader", + CLOSED -> "closed", + DIRTYSUCCS -> "dirtysuccs", + DIRTYPREDS -> "dirtypreds" + ) + def flagsToString(flags: Int) = { + flagMap collect { case (bit, name) if (bit & flags) != 0 => "<" + name + ">" } mkString " " + } } -- cgit v1.2.3 From 41703dfef181caa7877aec77e90249264fd37e02 Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Sun, 24 Feb 2013 23:49:22 +0100 Subject: More explicit empty paren lists in method calls. --- .../nsc/backend/opt/InlineExceptionHandlers.scala | 2 +- .../scala/tools/nsc/backend/opt/Inliners.scala | 2 +- .../tools/nsc/symtab/classfile/ICodeReader.scala | 2 +- src/library/scala/Array.scala | 10 +- src/library/scala/Enumeration.scala | 4 +- src/library/scala/beans/ScalaBeanInfo.scala | 2 +- src/library/scala/collection/BitSetLike.scala | 2 +- src/library/scala/collection/DefaultMap.scala | 4 +- src/library/scala/collection/IndexedSeqLike.scala | 4 +- .../scala/collection/IndexedSeqOptimized.scala | 10 +- src/library/scala/collection/IterableLike.scala | 48 +++--- src/library/scala/collection/Iterator.scala | 18 +- src/library/scala/collection/LinearSeqLike.scala | 4 +- .../scala/collection/LinearSeqOptimized.scala | 12 +- src/library/scala/collection/MapLike.scala | 4 +- src/library/scala/collection/Parallelizable.scala | 2 +- src/library/scala/collection/SeqLike.scala | 38 ++--- src/library/scala/collection/SetLike.scala | 8 +- src/library/scala/collection/SortedMap.scala | 4 +- src/library/scala/collection/TraversableOnce.scala | 6 +- .../scala/collection/concurrent/TrieMap.scala | 2 +- .../scala/collection/convert/Wrappers.scala | 8 +- .../scala/collection/generic/GenMapFactory.scala | 2 +- .../collection/generic/GenTraversableFactory.scala | 10 +- .../generic/GenericClassTagCompanion.scala | 4 +- .../collection/generic/GenericCompanion.scala | 4 +- .../generic/GenericOrderedCompanion.scala | 4 +- .../generic/GenericTraversableTemplate.scala | 10 +- .../scala/collection/generic/Signalling.scala | 2 +- src/library/scala/collection/generic/Sorted.scala | 10 +- .../collection/generic/SortedMapFactory.scala | 2 +- .../collection/generic/SortedSetFactory.scala | 2 +- .../scala/collection/immutable/BitSet.scala | 2 +- .../scala/collection/immutable/DefaultMap.scala | 4 +- .../scala/collection/immutable/ListSet.scala | 4 +- .../scala/collection/immutable/MapLike.scala | 2 +- .../scala/collection/immutable/PagedSeq.scala | 4 +- .../scala/collection/immutable/RedBlackTree.scala | 2 +- .../scala/collection/immutable/SortedMap.scala | 4 +- .../scala/collection/immutable/Stream.scala | 2 +- .../scala/collection/immutable/StringLike.scala | 4 +- .../scala/collection/immutable/TreeMap.scala | 2 +- .../scala/collection/immutable/TreeSet.scala | 2 +- .../scala/collection/immutable/TrieIterator.scala | 4 +- .../scala/collection/immutable/Vector.scala | 22 +-- src/library/scala/collection/mutable/AVLTree.scala | 4 +- .../scala/collection/mutable/ArrayOps.scala | 4 +- .../scala/collection/mutable/ArraySeq.scala | 2 +- .../scala/collection/mutable/ArrayStack.scala | 4 +- .../scala/collection/mutable/BufferLike.scala | 4 +- .../scala/collection/mutable/BufferProxy.scala | 2 +- src/library/scala/collection/mutable/Builder.scala | 2 +- .../collection/mutable/DoubleLinkedList.scala | 2 +- .../scala/collection/mutable/FlatHashTable.scala | 4 +- src/library/scala/collection/mutable/HashMap.scala | 8 +- src/library/scala/collection/mutable/HashSet.scala | 4 +- .../scala/collection/mutable/HashTable.scala | 2 +- src/library/scala/collection/mutable/History.scala | 6 +- .../scala/collection/mutable/LinkedHashMap.scala | 6 +- .../scala/collection/mutable/LinkedHashSet.scala | 2 +- .../scala/collection/mutable/LinkedListLike.scala | 2 +- .../scala/collection/mutable/MutableList.scala | 2 +- .../collection/mutable/ObservableBuffer.scala | 2 +- .../scala/collection/mutable/ObservableMap.scala | 2 +- .../scala/collection/mutable/ObservableSet.scala | 2 +- .../scala/collection/mutable/OpenHashMap.scala | 2 +- .../scala/collection/mutable/PriorityQueue.scala | 4 +- .../collection/mutable/PriorityQueueProxy.scala | 4 +- .../scala/collection/mutable/Publisher.scala | 2 +- src/library/scala/collection/mutable/Queue.scala | 2 +- .../scala/collection/mutable/QueueProxy.scala | 4 +- .../collection/mutable/RevertibleHistory.scala | 4 +- src/library/scala/collection/mutable/SetLike.scala | 2 +- .../scala/collection/mutable/StackProxy.scala | 4 +- .../collection/mutable/SynchronizedBuffer.scala | 2 +- .../mutable/SynchronizedPriorityQueue.scala | 4 +- .../collection/mutable/SynchronizedQueue.scala | 4 +- .../scala/collection/mutable/SynchronizedSet.scala | 2 +- .../collection/mutable/SynchronizedStack.scala | 4 +- .../scala/collection/mutable/UnrolledBuffer.scala | 4 +- .../scala/collection/parallel/Combiner.scala | 2 +- .../collection/parallel/ParIterableLike.scala | 28 ++-- .../collection/parallel/ParIterableViewLike.scala | 2 +- .../scala/collection/parallel/ParMapLike.scala | 4 +- .../scala/collection/parallel/ParSeqLike.scala | 8 +- .../scala/collection/parallel/ParSeqViewLike.scala | 2 +- .../collection/parallel/RemainsIterator.scala | 84 +++++----- .../scala/collection/parallel/Splitter.scala | 2 +- src/library/scala/collection/parallel/Tasks.scala | 16 +- .../collection/parallel/immutable/ParHashMap.scala | 2 +- .../collection/parallel/immutable/ParHashSet.scala | 2 +- .../collection/parallel/immutable/ParRange.scala | 2 +- .../collection/parallel/mutable/ParArray.scala | 6 +- .../parallel/mutable/ParFlatHashTable.scala | 2 +- .../mutable/UnrolledParArrayCombiner.scala | 2 +- src/library/scala/concurrent/Future.scala | 6 +- .../scala/concurrent/duration/Duration.scala | 4 +- src/library/scala/concurrent/impl/Promise.scala | 6 +- src/library/scala/io/BufferedSource.scala | 2 +- src/library/scala/io/Source.scala | 12 +- src/library/scala/math/BigDecimal.scala | 2 +- src/library/scala/math/BigInt.scala | 2 +- src/library/scala/math/Ordering.scala | 4 +- .../scala/math/ScalaNumericConversions.scala | 14 +- src/library/scala/runtime/ScalaNumberProxy.scala | 4 +- src/library/scala/runtime/Tuple2Zipped.scala | 26 +-- src/library/scala/runtime/Tuple3Zipped.scala | 26 +-- src/library/scala/sys/process/BasicIO.scala | 2 +- src/library/scala/sys/process/ProcessImpl.scala | 10 +- src/library/scala/util/Random.scala | 2 +- src/library/scala/util/matching/Regex.scala | 6 +- src/library/scala/xml/PrettyPrinter.scala | 2 +- src/library/scala/xml/Utility.scala | 16 +- src/library/scala/xml/dtd/ContentModelParser.scala | 40 ++--- src/library/scala/xml/dtd/DocType.scala | 2 +- src/library/scala/xml/dtd/Scanner.scala | 28 ++-- .../scala/xml/dtd/impl/SubsetConstruction.scala | 2 +- src/library/scala/xml/factory/XMLLoader.scala | 2 +- src/library/scala/xml/parsing/FactoryAdapter.scala | 10 +- src/library/scala/xml/parsing/MarkupParser.scala | 184 ++++++++++----------- .../scala/xml/parsing/MarkupParserCommon.scala | 30 ++-- src/library/scala/xml/parsing/XhtmlParser.scala | 2 +- .../scala/xml/persistent/CachedFileStorage.scala | 10 +- src/library/scala/xml/pull/XMLEventReader.scala | 6 +- src/reflect/scala/reflect/internal/Printers.scala | 12 +- src/reflect/scala/reflect/internal/TreeInfo.scala | 8 +- src/reflect/scala/reflect/internal/Types.scala | 2 +- .../scala/reflect/internal/util/Collections.scala | 2 +- src/reflect/scala/reflect/io/PlainFile.scala | 2 +- src/reflect/scala/reflect/io/Streamable.scala | 6 +- .../scala/reflect/io/VirtualDirectory.scala | 6 +- src/reflect/scala/reflect/io/VirtualFile.scala | 6 +- src/reflect/scala/reflect/io/ZipArchive.scala | 14 +- 133 files changed, 552 insertions(+), 550 deletions(-) (limited to 'src') diff --git a/src/compiler/scala/tools/nsc/backend/opt/InlineExceptionHandlers.scala b/src/compiler/scala/tools/nsc/backend/opt/InlineExceptionHandlers.scala index 7f76839ae5..dcf0590951 100644 --- a/src/compiler/scala/tools/nsc/backend/opt/InlineExceptionHandlers.scala +++ b/src/compiler/scala/tools/nsc/backend/opt/InlineExceptionHandlers.scala @@ -262,7 +262,7 @@ abstract class InlineExceptionHandlers extends SubComponent { if (analyzedMethod eq NoIMethod) { analyzedMethod = bblock.method tfa.init(bblock.method) - tfa.run + tfa.run() log(" performed tfa on method: " + bblock.method) for (block <- bblock.method.blocks.sortBy(_.label)) diff --git a/src/compiler/scala/tools/nsc/backend/opt/Inliners.scala b/src/compiler/scala/tools/nsc/backend/opt/Inliners.scala index 010f5b8319..d183b3a291 100644 --- a/src/compiler/scala/tools/nsc/backend/opt/Inliners.scala +++ b/src/compiler/scala/tools/nsc/backend/opt/Inliners.scala @@ -232,7 +232,7 @@ abstract class Inliners extends SubComponent { val hasRETURN = containsRETURN(incm.code.blocksList) || (incm.exh exists { eh => containsRETURN(eh.blocks) }) var a: analysis.MethodTFA = null - if(hasRETURN) { a = new analysis.MethodTFA(incm); a.run } + if(hasRETURN) { a = new analysis.MethodTFA(incm); a.run() } if(forceable) { recentTFAs.put(incm.symbol, (hasRETURN, a)) } diff --git a/src/compiler/scala/tools/nsc/symtab/classfile/ICodeReader.scala b/src/compiler/scala/tools/nsc/symtab/classfile/ICodeReader.scala index 39788ee3e7..6e99129ee5 100644 --- a/src/compiler/scala/tools/nsc/symtab/classfile/ICodeReader.scala +++ b/src/compiler/scala/tools/nsc/symtab/classfile/ICodeReader.scala @@ -764,7 +764,7 @@ abstract class ICodeReader extends ClassfileParser { // method.dump tfa.init(method) - tfa.run + tfa.run() for (bb <- linearizer.linearize(method)) { var info = tfa.in(bb) for (i <- bb.toList) { diff --git a/src/library/scala/Array.scala b/src/library/scala/Array.scala index b9f51803ec..aede6a5d37 100644 --- a/src/library/scala/Array.scala +++ b/src/library/scala/Array.scala @@ -242,7 +242,7 @@ object Array extends FallbackArrayBuilding { val b = newBuilder[T] b.sizeHint(xss.map(_.size).sum) for (xs <- xss) b ++= xs - b.result + b.result() } /** Returns an array that contains the results of some element computation a number @@ -267,7 +267,7 @@ object Array extends FallbackArrayBuilding { b += elem i += 1 } - b.result + b.result() } /** Returns a two-dimensional array that contains the results of some element @@ -331,7 +331,7 @@ object Array extends FallbackArrayBuilding { b += f(i) i += 1 } - b.result + b.result() } /** Returns a two-dimensional array containing values of a given function @@ -406,7 +406,7 @@ object Array extends FallbackArrayBuilding { b += i i += step } - b.result + b.result() } /** Returns an array containing repeated applications of a function to a start value. @@ -431,7 +431,7 @@ object Array extends FallbackArrayBuilding { b += acc } } - b.result + b.result() } /** Called in a pattern match like `{ case Array(x,y,z) => println('3 elements')}`. diff --git a/src/library/scala/Enumeration.scala b/src/library/scala/Enumeration.scala index d522539e83..59be0cdfa3 100644 --- a/src/library/scala/Enumeration.scala +++ b/src/library/scala/Enumeration.scala @@ -95,7 +95,7 @@ abstract class Enumeration (initial: Int) extends Serializable { protected var nextName: Iterator[String] = _ private def nextNameOrNull = - if (nextName != null && nextName.hasNext) nextName.next else null + if (nextName != null && nextName.hasNext) nextName.next() else null /** The highest integer amongst those used to identify values in this * enumeration. */ @@ -277,7 +277,7 @@ abstract class Enumeration (initial: Int) extends Serializable { def newBuilder: mutable.Builder[Value, ValueSet] = new mutable.Builder[Value, ValueSet] { private[this] val b = new mutable.BitSet def += (x: Value) = { b += (x.id - bottomId); this } - def clear() = b.clear + def clear() = b.clear() def result() = new ValueSet(b.toImmutable) } /** The implicit builder for value sets */ diff --git a/src/library/scala/beans/ScalaBeanInfo.scala b/src/library/scala/beans/ScalaBeanInfo.scala index c192a990f1..ac8fa263d7 100644 --- a/src/library/scala/beans/ScalaBeanInfo.scala +++ b/src/library/scala/beans/ScalaBeanInfo.scala @@ -27,7 +27,7 @@ abstract class ScalaBeanInfo(clazz: java.lang.Class[_], for (m <- clazz.getMethods if methods.exists(_ == m.getName)) yield new MethodDescriptor(m) - init + init() override def getPropertyDescriptors() = pd override def getMethodDescriptors() = md diff --git a/src/library/scala/collection/BitSetLike.scala b/src/library/scala/collection/BitSetLike.scala index bf05331cb1..72a6713ffd 100644 --- a/src/library/scala/collection/BitSetLike.scala +++ b/src/library/scala/collection/BitSetLike.scala @@ -109,7 +109,7 @@ trait BitSetLike[+This <: BitSetLike[This] with SortedSet[Int]] extends SortedSe } def next(): Int = if (hasNext) { val r = current; current += 1; r } - else Iterator.empty.next + else Iterator.empty.next() } override def foreach[B](f: Int => B) { diff --git a/src/library/scala/collection/DefaultMap.scala b/src/library/scala/collection/DefaultMap.scala index cbd7e3f8b9..bbd6b2c2fc 100644 --- a/src/library/scala/collection/DefaultMap.scala +++ b/src/library/scala/collection/DefaultMap.scala @@ -30,7 +30,7 @@ trait DefaultMap[A, +B] extends Map[A, B] { self => val b = Map.newBuilder[A, B1] b ++= this b += ((kv._1, kv._2)) - b.result + b.result() } /** A default implementation which creates a new immutable map. @@ -38,6 +38,6 @@ trait DefaultMap[A, +B] extends Map[A, B] { self => override def - (key: A): Map[A, B] = { val b = newBuilder b ++= this filter (key != _._1) - b.result + b.result() } } diff --git a/src/library/scala/collection/IndexedSeqLike.scala b/src/library/scala/collection/IndexedSeqLike.scala index 1d8e2b1583..473202a8eb 100644 --- a/src/library/scala/collection/IndexedSeqLike.scala +++ b/src/library/scala/collection/IndexedSeqLike.scala @@ -59,7 +59,7 @@ trait IndexedSeqLike[+A, +Repr] extends Any with SeqLike[A, Repr] { def next(): A = { if (index >= end) - Iterator.empty.next + Iterator.empty.next() val x = self(index) index += 1 @@ -68,7 +68,7 @@ trait IndexedSeqLike[+A, +Repr] extends Any with SeqLike[A, Repr] { def head = { if (index >= end) - Iterator.empty.next + Iterator.empty.next() self(index) } diff --git a/src/library/scala/collection/IndexedSeqOptimized.scala b/src/library/scala/collection/IndexedSeqOptimized.scala index 9721a42e91..ade04e4de8 100755 --- a/src/library/scala/collection/IndexedSeqOptimized.scala +++ b/src/library/scala/collection/IndexedSeqOptimized.scala @@ -88,7 +88,7 @@ trait IndexedSeqOptimized[+A, +Repr] extends Any with IndexedSeqLike[A, Repr] { b += ((this(i), that(i).asInstanceOf[B])) i += 1 } - b.result + b.result() case _ => super.zip[A1, B, That](that)(bf) } @@ -103,7 +103,7 @@ trait IndexedSeqOptimized[+A, +Repr] extends Any with IndexedSeqLike[A, Repr] { b += ((this(i), i)) i += 1 } - b.result + b.result() } override /*IterableLike*/ @@ -119,7 +119,7 @@ trait IndexedSeqOptimized[+A, +Repr] extends Any with IndexedSeqLike[A, Repr] { b += self(i) i += 1 } - b.result + b.result() } override /*IterableLike*/ @@ -220,7 +220,7 @@ trait IndexedSeqOptimized[+A, +Repr] extends Any with IndexedSeqLike[A, Repr] { i -= 1 b += this(i) } - b.result + b.result() } override /*SeqLike*/ @@ -231,7 +231,7 @@ trait IndexedSeqOptimized[+A, +Repr] extends Any with IndexedSeqLike[A, Repr] { if (0 < i) { i -= 1 self(i) - } else Iterator.empty.next + } else Iterator.empty.next() } override /*SeqLike*/ diff --git a/src/library/scala/collection/IterableLike.scala b/src/library/scala/collection/IterableLike.scala index 540bd84b79..b043d1f2a6 100644 --- a/src/library/scala/collection/IterableLike.scala +++ b/src/library/scala/collection/IterableLike.scala @@ -88,13 +88,13 @@ self => override /*TraversableLike*/ def toIterator: Iterator[A] = iterator override /*TraversableLike*/ def head: A = - iterator.next + iterator.next() override /*TraversableLike*/ def slice(from: Int, until: Int): Repr = { val lo = math.max(from, 0) val elems = until - lo val b = newBuilder - if (elems <= 0) b.result + if (elems <= 0) b.result() else { b.sizeHintBounded(elems, this) var i = 0 @@ -103,14 +103,14 @@ self => b += it.next i += 1 } - b.result + b.result() } } override /*TraversableLike*/ def take(n: Int): Repr = { val b = newBuilder - if (n <= 0) b.result + if (n <= 0) b.result() else { b.sizeHintBounded(n, this) var i = 0 @@ -119,7 +119,7 @@ self => b += it.next i += 1 } - b.result + b.result() } } @@ -130,21 +130,21 @@ self => var i = 0 val it = iterator while (i < n && it.hasNext) { - it.next + it.next() i += 1 } - (b ++= it).result + (b ++= it).result() } override /*TraversableLike*/ def takeWhile(p: A => Boolean): Repr = { val b = newBuilder val it = iterator while (it.hasNext) { - val x = it.next - if (!p(x)) return b.result + val x = it.next() + if (!p(x)) return b.result() b += x } - b.result + b.result() } /** Partitions elements in fixed size ${coll}s. @@ -158,7 +158,7 @@ self => for (xs <- iterator grouped size) yield { val b = newBuilder b ++= xs - b.result + b.result() } /** Groups elements in fixed size blocks by passing a "sliding window" @@ -187,7 +187,7 @@ self => for (xs <- iterator.sliding(size, step)) yield { val b = newBuilder b ++= xs - b.result + b.result() } /** Selects last ''n'' elements. @@ -203,11 +203,11 @@ self => val lead = this.iterator drop n var go = false for (x <- this.seq) { - if (lead.hasNext) lead.next + if (lead.hasNext) lead.next() else go = true if (go) b += x } - b.result + b.result() } /** Selects all elements except last ''n'' ones. @@ -224,9 +224,9 @@ self => val it = iterator while (lead.hasNext) { b += it.next - lead.next + lead.next() } - b.result + b.result() } override /*TraversableLike*/ def copyToArray[B >: A](xs: Array[B], start: Int, len: Int) { @@ -234,7 +234,7 @@ self => val end = (start + len) min xs.length val it = iterator while (i < end && it.hasNext) { - xs(i) = it.next + xs(i) = it.next() i += 1 } } @@ -244,8 +244,8 @@ self => val these = this.iterator val those = that.iterator while (these.hasNext && those.hasNext) - b += ((these.next, those.next)) - b.result + b += ((these.next(), those.next())) + b.result() } def zipAll[B, A1 >: A, That](that: GenIterable[B], thisElem: A1, thatElem: B)(implicit bf: CanBuildFrom[Repr, (A1, B), That]): That = { @@ -253,12 +253,12 @@ self => val these = this.iterator val those = that.iterator while (these.hasNext && those.hasNext) - b += ((these.next, those.next)) + b += ((these.next(), those.next())) while (these.hasNext) - b += ((these.next, thatElem)) + b += ((these.next(), thatElem)) while (those.hasNext) - b += ((thisElem, those.next)) - b.result + b += ((thisElem, those.next())) + b.result() } def zipWithIndex[A1 >: A, That](implicit bf: CanBuildFrom[Repr, (A1, Int), That]): That = { @@ -268,7 +268,7 @@ self => b += ((x, i)) i +=1 } - b.result + b.result() } def sameElements[B >: A](that: GenIterable[B]): Boolean = { diff --git a/src/library/scala/collection/Iterator.scala b/src/library/scala/collection/Iterator.scala index cb7d2095bc..77baad71d3 100644 --- a/src/library/scala/collection/Iterator.scala +++ b/src/library/scala/collection/Iterator.scala @@ -368,7 +368,7 @@ trait Iterator[+A] extends TraversableOnce[A] { def flatMap[B](f: A => GenTraversableOnce[B]): Iterator[B] = new AbstractIterator[B] { private var cur: Iterator[B] = empty def hasNext: Boolean = - cur.hasNext || self.hasNext && { cur = f(self.next).toIterator; hasNext } + cur.hasNext || self.hasNext && { cur = f(self.next()).toIterator; hasNext } def next(): B = (if (hasNext) cur else empty).next() } @@ -408,7 +408,7 @@ trait Iterator[+A] extends TraversableOnce[A] { def corresponds[B](that: GenTraversableOnce[B])(p: (A, B) => Boolean): Boolean = { val that0 = that.toIterator while (hasNext && that0.hasNext) - if (!p(next, that0.next)) return false + if (!p(next(), that0.next())) return false hasNext == that0.hasNext } @@ -630,7 +630,7 @@ trait Iterator[+A] extends TraversableOnce[A] { */ def zip[B](that: Iterator[B]): Iterator[(A, B)] = new AbstractIterator[(A, B)] { def hasNext = self.hasNext && that.hasNext - def next = (self.next, that.next) + def next = (self.next(), that.next()) } /** Appends an element value to this iterator until a given target length is reached. @@ -650,9 +650,9 @@ trait Iterator[+A] extends TraversableOnce[A] { def hasNext = self.hasNext || count < len def next = { count += 1 - if (self.hasNext) self.next + if (self.hasNext) self.next() else if (count <= len) elem - else empty.next + else empty.next() } } @@ -667,7 +667,7 @@ trait Iterator[+A] extends TraversableOnce[A] { var idx = 0 def hasNext = self.hasNext def next = { - val ret = (self.next, idx) + val ret = (self.next(), idx) idx += 1 ret } @@ -1052,12 +1052,12 @@ trait Iterator[+A] extends TraversableOnce[A] { val e = self.next() gap enqueue e e - } else gap.dequeue + } else gap.dequeue() } // to verify partnerhood we use reference equality on gap because // type testing does not discriminate based on origin. private def compareGap(queue: scala.collection.mutable.Queue[A]) = gap eq queue - override def hashCode = gap.hashCode + override def hashCode = gap.hashCode() override def equals(other: Any) = other match { case x: Partner => x.compareGap(gap) && gap.isEmpty case _ => super.equals(other) @@ -1139,7 +1139,7 @@ trait Iterator[+A] extends TraversableOnce[A] { def toTraversable: Traversable[A] = toStream def toIterator: Iterator[A] = self def toStream: Stream[A] = - if (self.hasNext) Stream.cons(self.next, self.toStream) + if (self.hasNext) Stream.cons(self.next(), self.toStream) else Stream.empty[A] diff --git a/src/library/scala/collection/LinearSeqLike.scala b/src/library/scala/collection/LinearSeqLike.scala index 2a824bcff3..a4bb194f8a 100644 --- a/src/library/scala/collection/LinearSeqLike.scala +++ b/src/library/scala/collection/LinearSeqLike.scala @@ -55,14 +55,14 @@ trait LinearSeqLike[+A, +Repr <: LinearSeqLike[A, Repr]] extends SeqLike[A, Repr def next(): A = if (hasNext) { val result = these.head; these = these.tail; result - } else Iterator.empty.next + } else Iterator.empty.next() /** Have to clear `these` so the iterator is exhausted like * it would be without the optimization. */ override def toList: List[A] = { val xs = these.toList - these = newBuilder.result + these = newBuilder.result() xs } } diff --git a/src/library/scala/collection/LinearSeqOptimized.scala b/src/library/scala/collection/LinearSeqOptimized.scala index ed5f2406e8..de4d5e2ba2 100755 --- a/src/library/scala/collection/LinearSeqOptimized.scala +++ b/src/library/scala/collection/LinearSeqOptimized.scala @@ -151,7 +151,7 @@ trait LinearSeqOptimized[+A, +Repr <: LinearSeqOptimized[A, Repr]] extends Linea b += these.head these = these.tail } - b.result + b.result() } override /*TraversableLike*/ @@ -186,7 +186,7 @@ trait LinearSeqOptimized[+A, +Repr <: LinearSeqOptimized[A, Repr]] extends Linea these = these.tail lead = lead.tail } - b.result + b.result() } override /*IterableLike*/ @@ -194,7 +194,7 @@ trait LinearSeqOptimized[+A, +Repr <: LinearSeqOptimized[A, Repr]] extends Linea var these: Repr = repr var count = from max 0 if (until <= count) - return newBuilder.result + return newBuilder.result() val b = newBuilder var sliceElems = until - count @@ -207,7 +207,7 @@ trait LinearSeqOptimized[+A, +Repr <: LinearSeqOptimized[A, Repr]] extends Linea b += these.head these = these.tail } - b.result + b.result() } override /*IterableLike*/ @@ -218,7 +218,7 @@ trait LinearSeqOptimized[+A, +Repr <: LinearSeqOptimized[A, Repr]] extends Linea b += these.head these = these.tail } - b.result + b.result() } override /*TraversableLike*/ @@ -229,7 +229,7 @@ trait LinearSeqOptimized[+A, +Repr <: LinearSeqOptimized[A, Repr]] extends Linea b += these.head these = these.tail } - (b.result, these) + (b.result(), these) } override /*IterableLike*/ diff --git a/src/library/scala/collection/MapLike.scala b/src/library/scala/collection/MapLike.scala index 93d02a435c..cc0129202f 100644 --- a/src/library/scala/collection/MapLike.scala +++ b/src/library/scala/collection/MapLike.scala @@ -181,7 +181,7 @@ self => def keysIterator: Iterator[A] = new AbstractIterator[A] { val iter = self.iterator def hasNext = iter.hasNext - def next() = iter.next._1 + def next() = iter.next()._1 } /** Collects all keys of this map in an iterable collection. @@ -213,7 +213,7 @@ self => def valuesIterator: Iterator[B] = new AbstractIterator[B] { val iter = self.iterator def hasNext = iter.hasNext - def next() = iter.next._2 + def next() = iter.next()._2 } /** Defines the default value computation for the map, diff --git a/src/library/scala/collection/Parallelizable.scala b/src/library/scala/collection/Parallelizable.scala index d97c44abc0..626dfa4032 100644 --- a/src/library/scala/collection/Parallelizable.scala +++ b/src/library/scala/collection/Parallelizable.scala @@ -39,7 +39,7 @@ trait Parallelizable[+A, +ParRepr <: Parallel] extends Any { def par: ParRepr = { val cb = parCombiner for (x <- seq) cb += x - cb.result + cb.result() } /** The default `par` implementation uses the combiner provided by this method diff --git a/src/library/scala/collection/SeqLike.scala b/src/library/scala/collection/SeqLike.scala index 35df680783..307ee3f2a8 100644 --- a/src/library/scala/collection/SeqLike.scala +++ b/src/library/scala/collection/SeqLike.scala @@ -127,7 +127,7 @@ trait SeqLike[+A, +Repr] extends Any with IterableLike[A, Repr] with GenSeqLike[ def lastIndexWhere(p: A => Boolean, end: Int): Int = { var i = length - 1 val it = reverseIterator - while (it.hasNext && { val elem = it.next; (i > end || !p(elem)) }) i -= 1 + while (it.hasNext && { val elem = it.next(); (i > end || !p(elem)) }) i -= 1 i } @@ -156,10 +156,10 @@ trait SeqLike[+A, +Repr] extends Any with IterableLike[A, Repr] with GenSeqLike[ def hasNext = _hasNext def next(): Repr = { if (!hasNext) - Iterator.empty.next + Iterator.empty.next() val forcedElms = new mutable.ArrayBuffer[A](elms.size) ++= elms - val result = (self.newBuilder ++= forcedElms).result + val result = (self.newBuilder ++= forcedElms).result() var i = idxs.length - 2 while(i >= 0 && idxs(i) >= idxs(i+1)) i -= 1 @@ -208,13 +208,13 @@ trait SeqLike[+A, +Repr] extends Any with IterableLike[A, Repr] with GenSeqLike[ def hasNext = _hasNext def next(): Repr = { if (!hasNext) - Iterator.empty.next + Iterator.empty.next() /** Calculate this result. */ val buf = self.newBuilder for(k <- 0 until nums.length; j <- 0 until nums(k)) buf += elms(offs(k)+j) - val res = buf.result + val res = buf.result() /** Prepare for the next call to next. */ var idx = nums.length - 1 @@ -268,7 +268,7 @@ trait SeqLike[+A, +Repr] extends Any with IterableLike[A, Repr] with GenSeqLike[ b.sizeHint(this) for (x <- xs) b += x - b.result + b.result() } def reverseMap[B, That](f: A => B)(implicit bf: CanBuildFrom[Repr, B, That]): That = { @@ -279,7 +279,7 @@ trait SeqLike[+A, +Repr] extends Any with IterableLike[A, Repr] with GenSeqLike[ for (x <- xs) b += f(x) - b.result + b.result() } /** An iterator yielding elements in reversed order. @@ -442,7 +442,7 @@ trait SeqLike[+A, +Repr] extends Any with IterableLike[A, Repr] with GenSeqLike[ for (x <- this) if (occ(x) == 0) b += x else occ(x) -= 1 - b.result + b.result() } /** Computes the multiset intersection between this $coll and another sequence. @@ -473,7 +473,7 @@ trait SeqLike[+A, +Repr] extends Any with IterableLike[A, Repr] with GenSeqLike[ b += x occ(x) -= 1 } - b.result + b.result() } private def occCounts[B](sq: Seq[B]): mutable.Map[B, Int] = { @@ -496,7 +496,7 @@ trait SeqLike[+A, +Repr] extends Any with IterableLike[A, Repr] with GenSeqLike[ seen += x } } - b.result + b.result() } def patch[B >: A, That](from: Int, patch: GenSeq[B], replaced: Int)(implicit bf: CanBuildFrom[Repr, B, That]): That = { @@ -505,7 +505,7 @@ trait SeqLike[+A, +Repr] extends Any with IterableLike[A, Repr] with GenSeqLike[ b ++= toCollection(prefix) b ++= patch.seq b ++= toCollection(rest).view drop replaced - b.result + b.result() } def updated[B >: A, That](index: Int, elem: B)(implicit bf: CanBuildFrom[Repr, B, That]): That = { @@ -514,21 +514,21 @@ trait SeqLike[+A, +Repr] extends Any with IterableLike[A, Repr] with GenSeqLike[ b ++= toCollection(prefix) b += elem b ++= toCollection(rest).view.tail - b.result + b.result() } def +:[B >: A, That](elem: B)(implicit bf: CanBuildFrom[Repr, B, That]): That = { val b = bf(repr) b += elem b ++= thisCollection - b.result + b.result() } def :+[B >: A, That](elem: B)(implicit bf: CanBuildFrom[Repr, B, That]): That = { val b = bf(repr) b ++= thisCollection b += elem - b.result + b.result() } def padTo[B >: A, That](len: Int, elem: B)(implicit bf: CanBuildFrom[Repr, B, That]): That = { @@ -540,14 +540,14 @@ trait SeqLike[+A, +Repr] extends Any with IterableLike[A, Repr] with GenSeqLike[ b += elem diff -= 1 } - b.result + b.result() } def corresponds[B](that: GenSeq[B])(p: (A,B) => Boolean): Boolean = { val i = this.iterator val j = that.iterator while (i.hasNext && j.hasNext) - if (!p(i.next, j.next)) + if (!p(i.next(), j.next())) return false !i.hasNext && !j.hasNext @@ -616,7 +616,7 @@ trait SeqLike[+A, +Repr] extends Any with IterableLike[A, Repr] with GenSeqLike[ val b = newBuilder b.sizeHint(len) for (x <- arr) b += x - b.result + b.result() } /** Converts this $coll to a sequence. @@ -682,7 +682,7 @@ object SeqLike { val wit = W.iterator.drop(n0) var i = if (forward) 0 else (n1-n0-1) while (i != done) { - Warr(i) = wit.next.asInstanceOf[AnyRef] + Warr(i) = wit.next().asInstanceOf[AnyRef] i += delta } @@ -786,7 +786,7 @@ object SeqLike { var answer = -1 while (m+m0+n1-n0 <= m1) { while (i+m >= largest) { - cache(largest%(n1-n0)) = iter.next.asInstanceOf[AnyRef] + cache(largest%(n1-n0)) = iter.next().asInstanceOf[AnyRef] largest += 1 } if (Wopt(i) == cache((i+m)%(n1-n0))) { diff --git a/src/library/scala/collection/SetLike.scala b/src/library/scala/collection/SetLike.scala index a6ebcc0e20..9fd24317f2 100644 --- a/src/library/scala/collection/SetLike.scala +++ b/src/library/scala/collection/SetLike.scala @@ -180,14 +180,14 @@ self => def hasNext = len <= elms.size || itr.hasNext def next = { if (!itr.hasNext) { - if (len > elms.size) Iterator.empty.next + if (len > elms.size) Iterator.empty.next() else { itr = new SubsetsItr(elms, len) len += 1 } } - itr.next + itr.next() } } @@ -205,11 +205,11 @@ self => def hasNext = _hasNext def next(): This = { - if (!hasNext) Iterator.empty.next + if (!hasNext) Iterator.empty.next() val buf = self.newBuilder idxs.slice(0, len) foreach (idx => buf += elms(idx)) - val result = buf.result + val result = buf.result() var i = len - 1 while (i >= 0 && idxs(i) == idxs(i+1)-1) i -= 1 diff --git a/src/library/scala/collection/SortedMap.scala b/src/library/scala/collection/SortedMap.scala index c81c16e8bb..86fcfac94d 100644 --- a/src/library/scala/collection/SortedMap.scala +++ b/src/library/scala/collection/SortedMap.scala @@ -40,13 +40,13 @@ object SortedMap extends SortedMapFactory[SortedMap] { val b = SortedMap.newBuilder[A, B1] b ++= this b += ((kv._1, kv._2)) - b.result + b.result() } override def - (key: A): SortedMap[A, B] = { val b = newBuilder for (kv <- this; if kv._1 != key) b += kv - b.result + b.result() } } diff --git a/src/library/scala/collection/TraversableOnce.scala b/src/library/scala/collection/TraversableOnce.scala index 7345ef8328..679e8e3e61 100644 --- a/src/library/scala/collection/TraversableOnce.scala +++ b/src/library/scala/collection/TraversableOnce.scala @@ -269,7 +269,7 @@ trait TraversableOnce[+A] extends Any with GenTraversableOnce[A] { def to[Col[_]](implicit cbf: CanBuildFrom[Nothing, A, Col[A @uV]]): Col[A @uV] = { val b = cbf() b ++= seq - b.result + b.result() } def toMap[T, U](implicit ev: A <:< (T, U)): immutable.Map[T, U] = { @@ -277,7 +277,7 @@ trait TraversableOnce[+A] extends Any with GenTraversableOnce[A] { for (x <- self) b += x - b.result + b.result() } def mkString(start: String, sep: String, end: String): String = @@ -422,7 +422,7 @@ object TraversableOnce { def flatten: Iterator[A] = new AbstractIterator[A] { val its = travs.toIterator private var it: Iterator[A] = Iterator.empty - def hasNext: Boolean = it.hasNext || its.hasNext && { it = its.next.toIterator; hasNext } + def hasNext: Boolean = it.hasNext || its.hasNext && { it = its.next().toIterator; hasNext } def next(): A = if (hasNext) it.next() else Iterator.empty.next() } } diff --git a/src/library/scala/collection/concurrent/TrieMap.scala b/src/library/scala/collection/concurrent/TrieMap.scala index 14b475dd1f..6bf9c1056a 100644 --- a/src/library/scala/collection/concurrent/TrieMap.scala +++ b/src/library/scala/collection/concurrent/TrieMap.scala @@ -437,7 +437,7 @@ extends MainNode[K, V] { val updmap = listmap - k if (updmap.size > 1) new LNode(updmap) else { - val (k, v) = updmap.iterator.next + val (k, v) = updmap.iterator.next() new TNode(k, v, ct.computeHash(k)) // create it tombed so that it gets compressed on subsequent accesses } } diff --git a/src/library/scala/collection/convert/Wrappers.scala b/src/library/scala/collection/convert/Wrappers.scala index 0f4506b5d5..b121f32ba6 100644 --- a/src/library/scala/collection/convert/Wrappers.scala +++ b/src/library/scala/collection/convert/Wrappers.scala @@ -27,9 +27,9 @@ private[collection] trait Wrappers { case class IteratorWrapper[A](underlying: Iterator[A]) extends ju.Iterator[A] with ju.Enumeration[A] { def hasNext = underlying.hasNext - def next() = underlying.next + def next() = underlying.next() def hasMoreElements = underlying.hasNext - def nextElement() = underlying.next + def nextElement() = underlying.next() def remove() = throw new UnsupportedOperationException } @@ -108,7 +108,7 @@ private[collection] trait Wrappers { val ui = underlying.iterator var prev: Option[A] = None def hasNext = ui.hasNext - def next = { val e = ui.next; prev = Some(e); e } + def next = { val e = ui.next(); prev = Some(e); e } def remove = prev match { case Some(e) => underlying match { @@ -180,7 +180,7 @@ private[collection] trait Wrappers { def hasNext = ui.hasNext def next() = { - val (k, v) = ui.next + val (k, v) = ui.next() prev = Some(k) new ju.Map.Entry[A, B] { import scala.util.hashing.byteswap32 diff --git a/src/library/scala/collection/generic/GenMapFactory.scala b/src/library/scala/collection/generic/GenMapFactory.scala index e869bba51a..5a183c307b 100644 --- a/src/library/scala/collection/generic/GenMapFactory.scala +++ b/src/library/scala/collection/generic/GenMapFactory.scala @@ -44,7 +44,7 @@ abstract class GenMapFactory[CC[A, B] <: GenMap[A, B] with GenMapLike[A, B, CC[A * @tparam B the type of the associated values * @return a new $coll consisting key/value pairs given by `elems`. */ - def apply[A, B](elems: (A, B)*): CC[A, B] = (newBuilder[A, B] ++= elems).result + def apply[A, B](elems: (A, B)*): CC[A, B] = (newBuilder[A, B] ++= elems).result() /** The default builder for $Coll objects. * @tparam A the type of the keys diff --git a/src/library/scala/collection/generic/GenTraversableFactory.scala b/src/library/scala/collection/generic/GenTraversableFactory.scala index b36dd3ccaf..0b8c9835da 100644 --- a/src/library/scala/collection/generic/GenTraversableFactory.scala +++ b/src/library/scala/collection/generic/GenTraversableFactory.scala @@ -73,7 +73,7 @@ extends GenericCompanion[CC] { b.sizeHint(xss.map(_.size).sum) for (xs <- xss.seq) b ++= xs - b.result + b.result() } /** Produces a $coll containing the results of some element computation a number of times. @@ -89,7 +89,7 @@ extends GenericCompanion[CC] { b += elem i += 1 } - b.result + b.result() } /** Produces a two-dimensional $coll containing the results of some element computation a number of times. @@ -147,7 +147,7 @@ extends GenericCompanion[CC] { b += f(i) i += 1 } - b.result + b.result() } /** Produces a two-dimensional $coll containing values of a given function over ranges of integer values starting from 0. @@ -222,7 +222,7 @@ extends GenericCompanion[CC] { b += i i += step } - b.result + b.result() } /** Produces a $coll containing repeated applications of a function to a start value. @@ -246,6 +246,6 @@ extends GenericCompanion[CC] { b += acc } } - b.result + b.result() } } diff --git a/src/library/scala/collection/generic/GenericClassTagCompanion.scala b/src/library/scala/collection/generic/GenericClassTagCompanion.scala index 76c12d118e..cdfee5252f 100644 --- a/src/library/scala/collection/generic/GenericClassTagCompanion.scala +++ b/src/library/scala/collection/generic/GenericClassTagCompanion.scala @@ -23,11 +23,11 @@ abstract class GenericClassTagCompanion[+CC[X] <: Traversable[X]] { def newBuilder[A](implicit ord: ClassTag[A]): Builder[A, CC[A]] - def empty[A: ClassTag]: CC[A] = newBuilder[A].result + def empty[A: ClassTag]: CC[A] = newBuilder[A].result() def apply[A](elems: A*)(implicit ord: ClassTag[A]): CC[A] = { val b = newBuilder[A] b ++= elems - b.result + b.result() } } diff --git a/src/library/scala/collection/generic/GenericCompanion.scala b/src/library/scala/collection/generic/GenericCompanion.scala index b966ce51db..66052d0e6f 100644 --- a/src/library/scala/collection/generic/GenericCompanion.scala +++ b/src/library/scala/collection/generic/GenericCompanion.scala @@ -34,7 +34,7 @@ abstract class GenericCompanion[+CC[X] <: GenTraversable[X]] { /** An empty collection of type `$Coll[A]` * @tparam A the type of the ${coll}'s elements */ - def empty[A]: CC[A] = newBuilder[A].result + def empty[A]: CC[A] = newBuilder[A].result() /** Creates a $coll with the specified elements. * @tparam A the type of the ${coll}'s elements @@ -46,7 +46,7 @@ abstract class GenericCompanion[+CC[X] <: GenTraversable[X]] { else { val b = newBuilder[A] b ++= elems - b.result + b.result() } } } diff --git a/src/library/scala/collection/generic/GenericOrderedCompanion.scala b/src/library/scala/collection/generic/GenericOrderedCompanion.scala index 094912c75a..7a0c0a63e8 100644 --- a/src/library/scala/collection/generic/GenericOrderedCompanion.scala +++ b/src/library/scala/collection/generic/GenericOrderedCompanion.scala @@ -23,12 +23,12 @@ abstract class GenericOrderedCompanion[+CC[X] <: Traversable[X]] { def newBuilder[A](implicit ord: Ordering[A]): Builder[A, CC[A]] - def empty[A: Ordering]: CC[A] = newBuilder[A].result + def empty[A: Ordering]: CC[A] = newBuilder[A].result() def apply[A](elems: A*)(implicit ord: Ordering[A]): CC[A] = { val b = newBuilder[A] b ++= elems - b.result + b.result() } } diff --git a/src/library/scala/collection/generic/GenericTraversableTemplate.scala b/src/library/scala/collection/generic/GenericTraversableTemplate.scala index f7a8a9aa88..908aa5b126 100644 --- a/src/library/scala/collection/generic/GenericTraversableTemplate.scala +++ b/src/library/scala/collection/generic/GenericTraversableTemplate.scala @@ -88,7 +88,7 @@ trait GenericTraversableTemplate[+A, +CC[X] <: GenTraversable[X]] extends HasNew b1 += x b2 += y } - (b1.result, b2.result) + (b1.result(), b2.result()) } /** Converts this $coll of triples into three collections of the first, second, @@ -113,7 +113,7 @@ trait GenericTraversableTemplate[+A, +CC[X] <: GenTraversable[X]] extends HasNew b2 += y b3 += z } - (b1.result, b2.result, b3.result) + (b1.result(), b2.result(), b3.result()) } /** Converts this $coll of traversable collections into @@ -144,7 +144,7 @@ trait GenericTraversableTemplate[+A, +CC[X] <: GenTraversable[X]] extends HasNew val b = genericBuilder[B] for (xs <- sequential) b ++= asTraversable(xs).seq - b.result + b.result() } /** Transposes this $coll of traversable collections into @@ -161,7 +161,7 @@ trait GenericTraversableTemplate[+A, +CC[X] <: GenTraversable[X]] extends HasNew @migration("`transpose` throws an `IllegalArgumentException` if collections are not uniformly sized.", "2.9.0") def transpose[B](implicit asTraversable: A => /*<: val b = Map.newBuilder[A, B1] b ++= this b += ((kv._1, kv._2)) - b.result + b.result() } /** A default implementation which creates a new immutable map. @@ -46,7 +46,7 @@ trait DefaultMap[A, +B] extends Map[A, B] { self => override def - (key: A): Map[A, B] = { val b = newBuilder for (kv <- this.seq ; if kv._1 != key) b += kv - b.result + b.result() } } diff --git a/src/library/scala/collection/immutable/ListSet.scala b/src/library/scala/collection/immutable/ListSet.scala index fd23276c8d..def3d7eb23 100644 --- a/src/library/scala/collection/immutable/ListSet.scala +++ b/src/library/scala/collection/immutable/ListSet.scala @@ -100,7 +100,7 @@ class ListSet[A] extends AbstractSet[A] */ override def ++(xs: GenTraversableOnce[A]): ListSet[A] = if (xs.isEmpty) this - else (new ListSet.ListSetBuilder(this) ++= xs.seq).result + else (new ListSet.ListSetBuilder(this) ++= xs.seq).result() private[ListSet] def unchecked_+(e: A): ListSet[A] = new Node(e) private[ListSet] def unchecked_outer: ListSet[A] = @@ -120,7 +120,7 @@ class ListSet[A] extends AbstractSet[A] that = that.tail res } - else Iterator.empty.next + else Iterator.empty.next() } /** diff --git a/src/library/scala/collection/immutable/MapLike.scala b/src/library/scala/collection/immutable/MapLike.scala index 7e60f07847..1c2ab1c662 100644 --- a/src/library/scala/collection/immutable/MapLike.scala +++ b/src/library/scala/collection/immutable/MapLike.scala @@ -123,7 +123,7 @@ self => def transform[C, That](f: (A, B) => C)(implicit bf: CanBuildFrom[This, (A, C), That]): That = { val b = bf(repr) for ((key, value) <- this) b += ((key, f(key, value))) - b.result + b.result() } } diff --git a/src/library/scala/collection/immutable/PagedSeq.scala b/src/library/scala/collection/immutable/PagedSeq.scala index 952107bf78..4069f6f0e4 100644 --- a/src/library/scala/collection/immutable/PagedSeq.scala +++ b/src/library/scala/collection/immutable/PagedSeq.scala @@ -30,7 +30,7 @@ object PagedSeq { new PagedSeq[T]((data: Array[T], start: Int, len: Int) => { var i = 0 while (i < len && source.hasNext) { - data(start + i) = source.next + data(start + i) = source.next() i += 1 } if (i == 0) -1 else i @@ -51,7 +51,7 @@ object PagedSeq { if (cnt == len) cnt else (more(data, start + cnt, len - cnt) max 0) + cnt } else if (source.hasNext) { - current = source.next + current = source.next() more(data, start, len) } else -1 new PagedSeq(more(_: Array[Char], _: Int, _: Int)) diff --git a/src/library/scala/collection/immutable/RedBlackTree.scala b/src/library/scala/collection/immutable/RedBlackTree.scala index d3ce3ab58c..19414f8e10 100644 --- a/src/library/scala/collection/immutable/RedBlackTree.scala +++ b/src/library/scala/collection/immutable/RedBlackTree.scala @@ -510,7 +510,7 @@ object RedBlackTree { */ private[this] def startFrom(key: A) : Tree[A,B] = if (root eq null) null else { @tailrec def find(tree: Tree[A, B]): Tree[A, B] = - if (tree eq null) popNext + if (tree eq null) popNext() else find( if (ordering.lteq(key, tree.key)) goLeft(tree) else goRight(tree) diff --git a/src/library/scala/collection/immutable/SortedMap.scala b/src/library/scala/collection/immutable/SortedMap.scala index 5e833f87af..73cc55df00 100644 --- a/src/library/scala/collection/immutable/SortedMap.scala +++ b/src/library/scala/collection/immutable/SortedMap.scala @@ -112,13 +112,13 @@ object SortedMap extends ImmutableSortedMapFactory[SortedMap] { val b = SortedMap.newBuilder[A, B1] b ++= this b += ((kv._1, kv._2)) - b.result + b.result() } override def - (key: A): SortedMap[A, B] = { val b = newBuilder for (kv <- this; if kv._1 != key) b += kv - b.result + b.result() } } } diff --git a/src/library/scala/collection/immutable/Stream.scala b/src/library/scala/collection/immutable/Stream.scala index e2719df531..0770bd3175 100644 --- a/src/library/scala/collection/immutable/Stream.scala +++ b/src/library/scala/collection/immutable/Stream.scala @@ -998,7 +998,7 @@ final class StreamIterator[+A] private() extends AbstractIterator[A] with Iterat def hasNext: Boolean = these.v.nonEmpty def next(): A = - if (isEmpty) Iterator.empty.next + if (isEmpty) Iterator.empty.next() else { val cur = these.v val result = cur.head diff --git a/src/library/scala/collection/immutable/StringLike.scala b/src/library/scala/collection/immutable/StringLike.scala index 663318330c..389e1579f2 100644 --- a/src/library/scala/collection/immutable/StringLike.scala +++ b/src/library/scala/collection/immutable/StringLike.scala @@ -58,8 +58,8 @@ self => val start = from max 0 val end = until min length - if (start >= end) newBuilder.result - else (newBuilder ++= toString.substring(start, end)).result + if (start >= end) newBuilder.result() + else (newBuilder ++= toString.substring(start, end)).result() } /** Return the current string concatenated `n` times. diff --git a/src/library/scala/collection/immutable/TreeMap.scala b/src/library/scala/collection/immutable/TreeMap.scala index a6a6b75c32..1093177172 100644 --- a/src/library/scala/collection/immutable/TreeMap.scala +++ b/src/library/scala/collection/immutable/TreeMap.scala @@ -108,7 +108,7 @@ class TreeMap[A, +B] private (tree: RB.Tree[A, B])(implicit val ordering: Orderi private[this] def countWhile(p: ((A, B)) => Boolean): Int = { var result = 0 val it = iterator - while (it.hasNext && p(it.next)) result += 1 + while (it.hasNext && p(it.next())) result += 1 result } override def dropWhile(p: ((A, B)) => Boolean) = drop(countWhile(p)) diff --git a/src/library/scala/collection/immutable/TreeSet.scala b/src/library/scala/collection/immutable/TreeSet.scala index 67668b3bef..26c3d44bbb 100644 --- a/src/library/scala/collection/immutable/TreeSet.scala +++ b/src/library/scala/collection/immutable/TreeSet.scala @@ -89,7 +89,7 @@ class TreeSet[A] private (tree: RB.Tree[A, Unit])(implicit val ordering: Orderin private[this] def countWhile(p: A => Boolean): Int = { var result = 0 val it = iterator - while (it.hasNext && p(it.next)) result += 1 + while (it.hasNext && p(it.next())) result += 1 result } override def dropWhile(p: A => Boolean) = drop(countWhile(p)) diff --git a/src/library/scala/collection/immutable/TrieIterator.scala b/src/library/scala/collection/immutable/TrieIterator.scala index 550f4cd7e0..dbe013d6e8 100644 --- a/src/library/scala/collection/immutable/TrieIterator.scala +++ b/src/library/scala/collection/immutable/TrieIterator.scala @@ -94,7 +94,7 @@ private[collection] abstract class TrieIterator[+T](elems: Array[Iterable[T]]) e def hasNext = (subIter ne null) || depth >= 0 def next(): T = { if (subIter ne null) { - val el = subIter.next + val el = subIter.next() if (!subIter.hasNext) subIter = null el @@ -135,7 +135,7 @@ private[collection] abstract class TrieIterator[+T](elems: Array[Iterable[T]]) e } else { subIter = m.iterator - next + next() } // The much slower version: // diff --git a/src/library/scala/collection/immutable/Vector.scala b/src/library/scala/collection/immutable/Vector.scala index abaffd9d6a..571e6775c8 100644 --- a/src/library/scala/collection/immutable/Vector.scala +++ b/src/library/scala/collection/immutable/Vector.scala @@ -104,7 +104,7 @@ override def companion: GenericCompanion[Vector] = Vector if (0 < i) { i -= 1 self(i) - } else Iterator.empty.next + } else Iterator.empty.next() } // TODO: reverse @@ -261,7 +261,7 @@ override def companion: GenericCompanion[Vector] = Vector //println("----- appendFront " + value + " at " + (startIndex - 1) + " reached block start") if (shift != 0) { // case A: we can shift right on the top level - debug + debug() //println("shifting right by " + shiftBlocks + " at level " + (depth-1) + " (had "+freeSpace+" free space)") if (depth > 1) { @@ -271,7 +271,7 @@ override def companion: GenericCompanion[Vector] = Vector s.initFrom(this) s.dirty = dirty s.shiftTopLevel(0, shiftBlocks) // shift right by n blocks - s.debug + s.debug() s.gotoFreshPosWritable(newFocus, newBlockIndex, newFocus ^ newBlockIndex) // maybe create pos; prepare for writing s.display0(lo) = value.asInstanceOf[AnyRef] //assert(depth == s.depth) @@ -289,7 +289,7 @@ override def companion: GenericCompanion[Vector] = Vector s.shiftTopLevel(0, shiftBlocks) // shift right by n elements s.gotoPosWritable(newFocus, newBlockIndex, newFocus ^ newBlockIndex) // prepare for writing s.display0(shift-1) = value.asInstanceOf[AnyRef] - s.debug + s.debug() s } } else if (blockIndex < 0) { @@ -304,10 +304,10 @@ override def companion: GenericCompanion[Vector] = Vector val s = new Vector(startIndex - 1 + move, endIndex + move, newBlockIndex) s.initFrom(this) s.dirty = dirty - s.debug + s.debug() s.gotoFreshPosWritable(newFocus, newBlockIndex, newFocus ^ newBlockIndex) // could optimize: we know it will create a whole branch s.display0(lo) = value.asInstanceOf[AnyRef] - s.debug + s.debug() //assert(s.depth == depth+1) s } else { @@ -357,7 +357,7 @@ override def companion: GenericCompanion[Vector] = Vector //println("----- appendBack " + value + " at " + endIndex + " reached block end") if (shift != 0) { - debug + debug() //println("shifting left by " + shiftBlocks + " at level " + (depth-1) + " (had "+startIndex+" free space)") if (depth > 1) { val newBlockIndex = blockIndex - shift @@ -366,10 +366,10 @@ override def companion: GenericCompanion[Vector] = Vector s.initFrom(this) s.dirty = dirty s.shiftTopLevel(shiftBlocks, 0) // shift left by n blocks - s.debug + s.debug() s.gotoFreshPosWritable(newFocus, newBlockIndex, newFocus ^ newBlockIndex) s.display0(lo) = value.asInstanceOf[AnyRef] - s.debug + s.debug() //assert(depth == s.depth) s } else { @@ -385,7 +385,7 @@ override def companion: GenericCompanion[Vector] = Vector s.shiftTopLevel(shiftBlocks, 0) // shift right by n elements s.gotoPosWritable(newFocus, newBlockIndex, newFocus ^ newBlockIndex) s.display0(32 - shift) = value.asInstanceOf[AnyRef] - s.debug + s.debug() s } } else { @@ -400,7 +400,7 @@ override def companion: GenericCompanion[Vector] = Vector //assert(s.depth == depth+1) might or might not create new level! if (s.depth == depth+1) { //println("creating new level " + s.depth + " (had "+0+" free space)") - s.debug + s.debug() } s } diff --git a/src/library/scala/collection/mutable/AVLTree.scala b/src/library/scala/collection/mutable/AVLTree.scala index da63778fcc..878ea94987 100644 --- a/src/library/scala/collection/mutable/AVLTree.scala +++ b/src/library/scala/collection/mutable/AVLTree.scala @@ -229,11 +229,11 @@ private class AVLIterator[A](root: Node[A]) extends Iterator[A] { private def engageRight(): Unit = { if (Leaf != stack.head.right) { val right: Node[A] = stack.head.right.asInstanceOf[Node[A]] - stack.pop + stack.pop() stack.push(right) diveLeft() } else - stack.pop + stack.pop() } override def hasNext: Boolean = !stack.isEmpty diff --git a/src/library/scala/collection/mutable/ArrayOps.scala b/src/library/scala/collection/mutable/ArrayOps.scala index 6b778b26f5..fcbfd27738 100644 --- a/src/library/scala/collection/mutable/ArrayOps.scala +++ b/src/library/scala/collection/mutable/ArrayOps.scala @@ -80,7 +80,7 @@ trait ArrayOps[T] extends Any with ArrayLike[T, Array[T]] with CustomParalleliza b.sizeHint(map{case is: scala.collection.IndexedSeq[_] => is.size case _ => 0}.sum) for (xs <- this) b ++= asTrav(xs) - b.result + b.result() } /** Transposes a two dimensional array. @@ -101,7 +101,7 @@ trait ArrayOps[T] extends Any with ArrayLike[T, Array[T]] with CustomParalleliza } val bb: Builder[Array[U], Array[Array[U]]] = Array.newBuilder(ClassTag[Array[U]](elementClass)) for (b <- bs) bb += b.result - bb.result + bb.result() } def seq = thisCollection diff --git a/src/library/scala/collection/mutable/ArraySeq.scala b/src/library/scala/collection/mutable/ArraySeq.scala index 33f6949662..334b26ae03 100644 --- a/src/library/scala/collection/mutable/ArraySeq.scala +++ b/src/library/scala/collection/mutable/ArraySeq.scala @@ -90,7 +90,7 @@ extends AbstractSeq[A] } override def clone(): ArraySeq[A] = { - val cloned = array.clone.asInstanceOf[Array[AnyRef]] + val cloned = array.clone().asInstanceOf[Array[AnyRef]] new ArraySeq[A](length) { override val array = cloned } diff --git a/src/library/scala/collection/mutable/ArrayStack.scala b/src/library/scala/collection/mutable/ArrayStack.scala index 670558ab06..e05d668519 100644 --- a/src/library/scala/collection/mutable/ArrayStack.scala +++ b/src/library/scala/collection/mutable/ArrayStack.scala @@ -150,7 +150,7 @@ extends AbstractSeq[T] * * @param f The function to drain to. */ - def drain(f: T => Unit) = while (!isEmpty) f(pop) + def drain(f: T => Unit) = while (!isEmpty) f(pop()) /** Pushes all the provided elements in the traversable object onto the stack. * @@ -190,7 +190,7 @@ extends AbstractSeq[T] * * @param f The function to apply to the top two elements. */ - def combine(f: (T, T) => T): Unit = push(f(pop, pop)) + def combine(f: (T, T) => T): Unit = push(f(pop(), pop())) /** Repeatedly combine the top elements of the stack until the stack contains only * one element. diff --git a/src/library/scala/collection/mutable/BufferLike.scala b/src/library/scala/collection/mutable/BufferLike.scala index 5935a2858a..322522fdd2 100644 --- a/src/library/scala/collection/mutable/BufferLike.scala +++ b/src/library/scala/collection/mutable/BufferLike.scala @@ -198,7 +198,7 @@ trait BufferLike[A, +This <: BufferLike[A, This] with Buffer[A]] case Remove(Index(n), x) => if (this(n) == x) remove(n) case Remove(NoLo, x) => this -= x - case Reset() => clear + case Reset() => clear() case s: Script[_] => s.iterator foreach << case _ => throw new UnsupportedOperationException("message " + cmd + " not understood") } @@ -260,6 +260,6 @@ trait BufferLike[A, +This <: BufferLike[A, This] with Buffer[A]] override def clone(): This = { val bf = newBuilder bf ++= this - bf.result.asInstanceOf[This] + bf.result().asInstanceOf[This] } } diff --git a/src/library/scala/collection/mutable/BufferProxy.scala b/src/library/scala/collection/mutable/BufferProxy.scala index ade0b94230..d3f96f69ad 100644 --- a/src/library/scala/collection/mutable/BufferProxy.scala +++ b/src/library/scala/collection/mutable/BufferProxy.scala @@ -124,7 +124,7 @@ trait BufferProxy[A] extends Buffer[A] with Proxy { /** Clears the buffer contents. */ - def clear() { self.clear } + def clear() { self.clear() } /** Send a message to this scriptable object. * diff --git a/src/library/scala/collection/mutable/Builder.scala b/src/library/scala/collection/mutable/Builder.scala index 5c0681df1d..75560580cc 100644 --- a/src/library/scala/collection/mutable/Builder.scala +++ b/src/library/scala/collection/mutable/Builder.scala @@ -121,7 +121,7 @@ trait Builder[-Elem, +To] extends Growable[Elem] { override def ++=(xs: TraversableOnce[Elem]): this.type = { self ++= xs; this } override def sizeHint(size: Int) = self.sizeHint(size) override def sizeHintBounded(size: Int, boundColl: TraversableLike[_, _]) = self.sizeHintBounded(size, boundColl) - def result: NewTo = f(self.result) + def result: NewTo = f(self.result()) } } diff --git a/src/library/scala/collection/mutable/DoubleLinkedList.scala b/src/library/scala/collection/mutable/DoubleLinkedList.scala index 18a1e234f6..a106794912 100644 --- a/src/library/scala/collection/mutable/DoubleLinkedList.scala +++ b/src/library/scala/collection/mutable/DoubleLinkedList.scala @@ -68,7 +68,7 @@ class DoubleLinkedList[A]() extends AbstractSeq[A] override def clone(): DoubleLinkedList[A] = { val builder = newBuilder builder ++= this - builder.result + builder.result() } } diff --git a/src/library/scala/collection/mutable/FlatHashTable.scala b/src/library/scala/collection/mutable/FlatHashTable.scala index 7f4a8d1cbd..4ffc5be7ad 100644 --- a/src/library/scala/collection/mutable/FlatHashTable.scala +++ b/src/library/scala/collection/mutable/FlatHashTable.scala @@ -208,7 +208,7 @@ trait FlatHashTable[A] extends FlatHashTable.HashUtils[A] { } def next(): A = if (hasNext) { i += 1; entryToElem(table(i - 1)) } - else Iterator.empty.next + else Iterator.empty.next() } private def growTable() { @@ -358,7 +358,7 @@ trait FlatHashTable[A] extends FlatHashTable.HashUtils[A] { seedvalue = c.seedvalue sizemap = c.sizemap } - if (alwaysInitSizeMap && sizemap == null) sizeMapInitAndRebuild + if (alwaysInitSizeMap && sizemap == null) sizeMapInitAndRebuild() } } diff --git a/src/library/scala/collection/mutable/HashMap.scala b/src/library/scala/collection/mutable/HashMap.scala index 3cd7f07d83..6943967791 100644 --- a/src/library/scala/collection/mutable/HashMap.scala +++ b/src/library/scala/collection/mutable/HashMap.scala @@ -111,21 +111,21 @@ extends AbstractMap[A, B] override def keysIterator: Iterator[A] = new AbstractIterator[A] { val iter = entriesIterator def hasNext = iter.hasNext - def next() = iter.next.key + def next() = iter.next().key } /* Override to avoid tuple allocation */ override def valuesIterator: Iterator[B] = new AbstractIterator[B] { val iter = entriesIterator def hasNext = iter.hasNext - def next() = iter.next.value + def next() = iter.next().value } /** Toggles whether a size map is used to track hash map statistics. */ def useSizeMap(t: Boolean) = if (t) { - if (!isSizeMapDefined) sizeMapInitAndRebuild - } else sizeMapDisable + if (!isSizeMapDefined) sizeMapInitAndRebuild() + } else sizeMapDisable() protected def createNewEntry[B1](key: A, value: B1): Entry = { new Entry(key, value.asInstanceOf[B]) diff --git a/src/library/scala/collection/mutable/HashSet.scala b/src/library/scala/collection/mutable/HashSet.scala index c4c68fdb7a..753f7f8d01 100644 --- a/src/library/scala/collection/mutable/HashSet.scala +++ b/src/library/scala/collection/mutable/HashSet.scala @@ -92,8 +92,8 @@ extends AbstractSet[A] /** Toggles whether a size map is used to track hash map statistics. */ def useSizeMap(t: Boolean) = if (t) { - if (!isSizeMapDefined) sizeMapInitAndRebuild - } else sizeMapDisable + if (!isSizeMapDefined) sizeMapInitAndRebuild() + } else sizeMapDisable() } diff --git a/src/library/scala/collection/mutable/HashTable.scala b/src/library/scala/collection/mutable/HashTable.scala index 23b68b7969..83ffc4a030 100644 --- a/src/library/scala/collection/mutable/HashTable.scala +++ b/src/library/scala/collection/mutable/HashTable.scala @@ -365,7 +365,7 @@ trait HashTable[A, Entry >: Null <: HashEntry[A, Entry]] extends HashTable.HashU seedvalue = c.seedvalue sizemap = c.sizemap } - if (alwaysInitSizeMap && sizemap == null) sizeMapInitAndRebuild + if (alwaysInitSizeMap && sizemap == null) sizeMapInitAndRebuild() } private[collection] def hashTableContents = new HashTable.Contents( diff --git a/src/library/scala/collection/mutable/History.scala b/src/library/scala/collection/mutable/History.scala index 2b8d1922b8..34e8f7d5b8 100644 --- a/src/library/scala/collection/mutable/History.scala +++ b/src/library/scala/collection/mutable/History.scala @@ -41,7 +41,7 @@ extends AbstractIterable[(Pub, Evt)] */ def notify(pub: Pub, event: Evt) { if (log.length >= maxHistory) - log.dequeue + log.dequeue() log.enqueue((pub, event)) } @@ -50,7 +50,7 @@ extends AbstractIterable[(Pub, Evt)] def iterator: Iterator[(Pub, Evt)] = log.iterator def events: Iterator[Evt] = log.iterator map (_._2) - def clear() { log.clear } + def clear() { log.clear() } /** Checks if two history objects are structurally identical. * @@ -60,5 +60,5 @@ extends AbstractIterable[(Pub, Evt)] case that: History[_, _] => this.log equals that.log case _ => false } - override def hashCode = log.hashCode + override def hashCode = log.hashCode() } diff --git a/src/library/scala/collection/mutable/LinkedHashMap.scala b/src/library/scala/collection/mutable/LinkedHashMap.scala index da2c36ac2d..14f30d74e8 100644 --- a/src/library/scala/collection/mutable/LinkedHashMap.scala +++ b/src/library/scala/collection/mutable/LinkedHashMap.scala @@ -92,7 +92,7 @@ class LinkedHashMap[A, B] extends AbstractMap[A, B] def hasNext = cur ne null def next = if (hasNext) { val res = (cur.key, cur.value); cur = cur.later; res } - else Iterator.empty.next + else Iterator.empty.next() } protected class FilteredKeys(p: A => Boolean) extends super.FilteredKeys(p) { @@ -118,7 +118,7 @@ class LinkedHashMap[A, B] extends AbstractMap[A, B] def hasNext = cur ne null def next = if (hasNext) { val res = cur.key; cur = cur.later; res } - else Iterator.empty.next + else Iterator.empty.next() } override def valuesIterator: Iterator[B] = new AbstractIterator[B] { @@ -126,7 +126,7 @@ class LinkedHashMap[A, B] extends AbstractMap[A, B] def hasNext = cur ne null def next = if (hasNext) { val res = cur.value; cur = cur.later; res } - else Iterator.empty.next + else Iterator.empty.next() } override def foreach[U](f: ((A, B)) => U) { diff --git a/src/library/scala/collection/mutable/LinkedHashSet.scala b/src/library/scala/collection/mutable/LinkedHashSet.scala index 1723258433..5641a78d46 100644 --- a/src/library/scala/collection/mutable/LinkedHashSet.scala +++ b/src/library/scala/collection/mutable/LinkedHashSet.scala @@ -78,7 +78,7 @@ class LinkedHashSet[A] extends AbstractSet[A] def hasNext = cur ne null def next = if (hasNext) { val res = cur.key; cur = cur.later; res } - else Iterator.empty.next + else Iterator.empty.next() } override def foreach[U](f: A => U) { diff --git a/src/library/scala/collection/mutable/LinkedListLike.scala b/src/library/scala/collection/mutable/LinkedListLike.scala index b3470ed3cd..3003080060 100644 --- a/src/library/scala/collection/mutable/LinkedListLike.scala +++ b/src/library/scala/collection/mutable/LinkedListLike.scala @@ -185,6 +185,6 @@ trait LinkedListLike[A, This <: Seq[A] with LinkedListLike[A, This]] extends Seq override def clone(): This = { val bf = newBuilder bf ++= this - bf.result + bf.result() } } diff --git a/src/library/scala/collection/mutable/MutableList.scala b/src/library/scala/collection/mutable/MutableList.scala index fd92d2e555..03110569c4 100644 --- a/src/library/scala/collection/mutable/MutableList.scala +++ b/src/library/scala/collection/mutable/MutableList.scala @@ -148,7 +148,7 @@ extends AbstractSeq[A] override def clone(): MutableList[A] = { val bf = newBuilder bf ++= seq - bf.result + bf.result() } } diff --git a/src/library/scala/collection/mutable/ObservableBuffer.scala b/src/library/scala/collection/mutable/ObservableBuffer.scala index bcaf977727..7a2fce9128 100644 --- a/src/library/scala/collection/mutable/ObservableBuffer.scala +++ b/src/library/scala/collection/mutable/ObservableBuffer.scala @@ -65,7 +65,7 @@ trait ObservableBuffer[A] extends Buffer[A] with Publisher[Message[A] with Undoa } abstract override def clear(): Unit = { - super.clear + super.clear() publish(new Reset with Undoable { def undo() { throw new UnsupportedOperationException("cannot undo") } }) diff --git a/src/library/scala/collection/mutable/ObservableMap.scala b/src/library/scala/collection/mutable/ObservableMap.scala index d81c90bf4c..3544275300 100644 --- a/src/library/scala/collection/mutable/ObservableMap.scala +++ b/src/library/scala/collection/mutable/ObservableMap.scala @@ -60,7 +60,7 @@ trait ObservableMap[A, B] extends Map[A, B] with Publisher[Message[(A, B)] with } abstract override def clear(): Unit = { - super.clear + super.clear() publish(new Reset with Undoable { def undo(): Unit = throw new UnsupportedOperationException("cannot undo") }) diff --git a/src/library/scala/collection/mutable/ObservableSet.scala b/src/library/scala/collection/mutable/ObservableSet.scala index 3e79506413..81580316ff 100644 --- a/src/library/scala/collection/mutable/ObservableSet.scala +++ b/src/library/scala/collection/mutable/ObservableSet.scala @@ -44,7 +44,7 @@ trait ObservableSet[A] extends Set[A] with Publisher[Message[A] with Undoable] } abstract override def clear(): Unit = { - super.clear + super.clear() publish(new Reset with Undoable { def undo(): Unit = throw new UnsupportedOperationException("cannot undo") }) diff --git a/src/library/scala/collection/mutable/OpenHashMap.scala b/src/library/scala/collection/mutable/OpenHashMap.scala index ad001fd79c..a0aea43121 100644 --- a/src/library/scala/collection/mutable/OpenHashMap.scala +++ b/src/library/scala/collection/mutable/OpenHashMap.scala @@ -124,7 +124,7 @@ extends AbstractMap[Key, Value] put(key, hashOf(key), value) private def put(key: Key, hash: Int, value: Value): Option[Value] = { - if (2 * (size + deleted) > mask) growTable + if (2 * (size + deleted) > mask) growTable() val index = findIndex(key, hash) val entry = table(index) if (entry == null) { diff --git a/src/library/scala/collection/mutable/PriorityQueue.scala b/src/library/scala/collection/mutable/PriorityQueue.scala index f59cbe878c..4e8b923155 100644 --- a/src/library/scala/collection/mutable/PriorityQueue.scala +++ b/src/library/scala/collection/mutable/PriorityQueue.scala @@ -134,11 +134,11 @@ class PriorityQueue[A](implicit val ord: Ordering[A]) throw new NoSuchElementException("no element to remove from heap") def dequeueAll[A1 >: A, That](implicit bf: CanBuildFrom[_, A1, That]): That = { - val b = bf.apply + val b = bf.apply() while (nonEmpty) { b += dequeue() } - b.result + b.result() } /** Returns the element with the highest priority in the queue, diff --git a/src/library/scala/collection/mutable/PriorityQueueProxy.scala b/src/library/scala/collection/mutable/PriorityQueueProxy.scala index 52a3755007..ee54370731 100644 --- a/src/library/scala/collection/mutable/PriorityQueueProxy.scala +++ b/src/library/scala/collection/mutable/PriorityQueueProxy.scala @@ -66,7 +66,7 @@ abstract class PriorityQueueProxy[A](implicit ord: Ordering[A]) extends Priority * * @return the element with the highest priority. */ - override def dequeue(): A = self.dequeue + override def dequeue(): A = self.dequeue() /** Returns the element with the highest priority in the queue, * or throws an error if there is no element contained in the queue. @@ -78,7 +78,7 @@ abstract class PriorityQueueProxy[A](implicit ord: Ordering[A]) extends Priority /** Removes all elements from the queue. After this operation is completed, * the queue will be empty. */ - override def clear(): Unit = self.clear + override def clear(): Unit = self.clear() /** Returns a regular queue containing the same elements. */ diff --git a/src/library/scala/collection/mutable/Publisher.scala b/src/library/scala/collection/mutable/Publisher.scala index e31205b477..8c2ef0d3a3 100644 --- a/src/library/scala/collection/mutable/Publisher.scala +++ b/src/library/scala/collection/mutable/Publisher.scala @@ -45,7 +45,7 @@ trait Publisher[Evt] { def suspendSubscription(sub: Sub) { suspended += sub } def activateSubscription(sub: Sub) { suspended -= sub } def removeSubscription(sub: Sub) { filters -= sub } - def removeSubscriptions() { filters.clear } + def removeSubscriptions() { filters.clear() } protected def publish(event: Evt) { filters.keys.foreach(sub => diff --git a/src/library/scala/collection/mutable/Queue.scala b/src/library/scala/collection/mutable/Queue.scala index b947fa3cca..f1a5723818 100644 --- a/src/library/scala/collection/mutable/Queue.scala +++ b/src/library/scala/collection/mutable/Queue.scala @@ -178,7 +178,7 @@ extends MutableList[A] override def clone(): Queue[A] = { val bf = newBuilder bf ++= seq - bf.result + bf.result() } private[this] def decrementLength() { diff --git a/src/library/scala/collection/mutable/QueueProxy.scala b/src/library/scala/collection/mutable/QueueProxy.scala index c286a340e3..051b1219cd 100644 --- a/src/library/scala/collection/mutable/QueueProxy.scala +++ b/src/library/scala/collection/mutable/QueueProxy.scala @@ -67,7 +67,7 @@ trait QueueProxy[A] extends Queue[A] with Proxy { * * @return the first element of the queue. */ - override def dequeue(): A = self.dequeue + override def dequeue(): A = self.dequeue() /** Returns the first element in the queue, or throws an error if there * is no element contained in the queue. @@ -79,7 +79,7 @@ trait QueueProxy[A] extends Queue[A] with Proxy { /** Removes all elements from the queue. After this operation is completed, * the queue will be empty. */ - override def clear(): Unit = self.clear + override def clear(): Unit = self.clear() /** Returns an iterator over all elements on the queue. * diff --git a/src/library/scala/collection/mutable/RevertibleHistory.scala b/src/library/scala/collection/mutable/RevertibleHistory.scala index 5544a21a55..9b8554669b 100644 --- a/src/library/scala/collection/mutable/RevertibleHistory.scala +++ b/src/library/scala/collection/mutable/RevertibleHistory.scala @@ -30,7 +30,7 @@ class RevertibleHistory[Evt <: Undoable, Pub] extends History[Evt, Pub] with Und */ def undo(): Unit = { val old = log.toList.reverse - clear - old.foreach { case (sub, event) => event.undo } + clear() + old.foreach { case (sub, event) => event.undo() } } } diff --git a/src/library/scala/collection/mutable/SetLike.scala b/src/library/scala/collection/mutable/SetLike.scala index 4a907e7dc4..8dfcde16ce 100644 --- a/src/library/scala/collection/mutable/SetLike.scala +++ b/src/library/scala/collection/mutable/SetLike.scala @@ -210,7 +210,7 @@ trait SetLike[A, +This <: SetLike[A, This] with Set[A]] def <<(cmd: Message[A]): Unit = cmd match { case Include(_, x) => this += x case Remove(_, x) => this -= x - case Reset() => clear + case Reset() => clear() case s: Script[_] => s.iterator foreach << case _ => throw new UnsupportedOperationException("message " + cmd + " not understood") } diff --git a/src/library/scala/collection/mutable/StackProxy.scala b/src/library/scala/collection/mutable/StackProxy.scala index 16f13ff42c..8792738339 100644 --- a/src/library/scala/collection/mutable/StackProxy.scala +++ b/src/library/scala/collection/mutable/StackProxy.scala @@ -69,13 +69,13 @@ trait StackProxy[A] extends Stack[A] with Proxy { /** Removes the top element from the stack. */ - override def pop(): A = self.pop + override def pop(): A = self.pop() /** * Removes all elements from the stack. After this operation completed, * the stack will be empty. */ - override def clear(): Unit = self.clear + override def clear(): Unit = self.clear() /** Returns an iterator over all elements on the stack. This iterator * is stable with respect to state changes in the stack object; i.e. diff --git a/src/library/scala/collection/mutable/SynchronizedBuffer.scala b/src/library/scala/collection/mutable/SynchronizedBuffer.scala index bf9a70c5b7..14ec85b906 100644 --- a/src/library/scala/collection/mutable/SynchronizedBuffer.scala +++ b/src/library/scala/collection/mutable/SynchronizedBuffer.scala @@ -157,7 +157,7 @@ trait SynchronizedBuffer[A] extends Buffer[A] { /** Clears the buffer contents. */ abstract override def clear(): Unit = synchronized { - super.clear + super.clear() } override def <<(cmd: Message[A]): Unit = synchronized { diff --git a/src/library/scala/collection/mutable/SynchronizedPriorityQueue.scala b/src/library/scala/collection/mutable/SynchronizedPriorityQueue.scala index 0065d4c556..52e55677bd 100644 --- a/src/library/scala/collection/mutable/SynchronizedPriorityQueue.scala +++ b/src/library/scala/collection/mutable/SynchronizedPriorityQueue.scala @@ -64,7 +64,7 @@ class SynchronizedPriorityQueue[A](implicit ord: Ordering[A]) extends PriorityQu * * @return the element with the highest priority. */ - override def dequeue(): A = synchronized { super.dequeue } + override def dequeue(): A = synchronized { super.dequeue() } /** Returns the element with the highest priority in the queue, * or throws an error if there is no element contained in the queue. @@ -76,7 +76,7 @@ class SynchronizedPriorityQueue[A](implicit ord: Ordering[A]) extends PriorityQu /** Removes all elements from the queue. After this operation is completed, * the queue will be empty. */ - override def clear(): Unit = synchronized { super.clear } + override def clear(): Unit = synchronized { super.clear() } /** Returns an iterator which yield all the elements of the priority * queue in descending priority order. diff --git a/src/library/scala/collection/mutable/SynchronizedQueue.scala b/src/library/scala/collection/mutable/SynchronizedQueue.scala index c5f133eec7..57beab39b6 100644 --- a/src/library/scala/collection/mutable/SynchronizedQueue.scala +++ b/src/library/scala/collection/mutable/SynchronizedQueue.scala @@ -56,7 +56,7 @@ class SynchronizedQueue[A] extends Queue[A] { * * @return the first element of the queue. */ - override def dequeue(): A = synchronized { super.dequeue } + override def dequeue(): A = synchronized { super.dequeue() } /** Returns the first element in the queue which satisfies the * given predicate, and removes this element from the queue. @@ -85,7 +85,7 @@ class SynchronizedQueue[A] extends Queue[A] { /** Removes all elements from the queue. After this operation is completed, * the queue will be empty. */ - override def clear(): Unit = synchronized { super.clear } + override def clear(): Unit = synchronized { super.clear() } /** Checks if two queues are structurally identical. * diff --git a/src/library/scala/collection/mutable/SynchronizedSet.scala b/src/library/scala/collection/mutable/SynchronizedSet.scala index bc9873880c..27a696895d 100644 --- a/src/library/scala/collection/mutable/SynchronizedSet.scala +++ b/src/library/scala/collection/mutable/SynchronizedSet.scala @@ -69,7 +69,7 @@ trait SynchronizedSet[A] extends Set[A] { } abstract override def clear(): Unit = synchronized { - super.clear + super.clear() } override def subsetOf(that: scala.collection.GenSet[A]) = synchronized { diff --git a/src/library/scala/collection/mutable/SynchronizedStack.scala b/src/library/scala/collection/mutable/SynchronizedStack.scala index 5d7c9f6073..09cdcca99e 100644 --- a/src/library/scala/collection/mutable/SynchronizedStack.scala +++ b/src/library/scala/collection/mutable/SynchronizedStack.scala @@ -67,13 +67,13 @@ class SynchronizedStack[A] extends Stack[A] { /** Removes the top element from the stack. */ - override def pop(): A = synchronized { super.pop } + override def pop(): A = synchronized { super.pop() } /** * Removes all elements from the stack. After this operation completed, * the stack will be empty. */ - override def clear(): Unit = synchronized { super.clear } + override def clear(): Unit = synchronized { super.clear() } /** Returns an iterator over all elements on the stack. This iterator * is stable with respect to state changes in the stack object; i.e. diff --git a/src/library/scala/collection/mutable/UnrolledBuffer.scala b/src/library/scala/collection/mutable/UnrolledBuffer.scala index 9b48c8f24f..ac634f43aa 100644 --- a/src/library/scala/collection/mutable/UnrolledBuffer.scala +++ b/src/library/scala/collection/mutable/UnrolledBuffer.scala @@ -87,7 +87,7 @@ extends scala.collection.mutable.AbstractBuffer[T] // `that` is no longer usable, so clear it // here we rely on the fact that `clear` allocates // new nodes instead of modifying the previous ones - that.clear + that.clear() // return a reference to this this @@ -123,7 +123,7 @@ extends scala.collection.mutable.AbstractBuffer[T] val r = node.array(pos) scan() r - } else Iterator.empty.next + } else Iterator.empty.next() } // this should be faster than the iterator diff --git a/src/library/scala/collection/parallel/Combiner.scala b/src/library/scala/collection/parallel/Combiner.scala index 00993c09ff..00e20e7616 100644 --- a/src/library/scala/collection/parallel/Combiner.scala +++ b/src/library/scala/collection/parallel/Combiner.scala @@ -86,7 +86,7 @@ trait Combiner[-Elem, +To] extends Builder[Elem, To] with Sizing with Parallel { * if this is applicable. */ def resultWithTaskSupport: To = { - val res = result + val res = result() setTaskSupport(res, combinerTaskSupport) } diff --git a/src/library/scala/collection/parallel/ParIterableLike.scala b/src/library/scala/collection/parallel/ParIterableLike.scala index 33af99067d..f0b0fd2aa0 100644 --- a/src/library/scala/collection/parallel/ParIterableLike.scala +++ b/src/library/scala/collection/parallel/ParIterableLike.scala @@ -214,7 +214,7 @@ self: ParIterableLike[T, Repr, Sequential] => def nonEmpty = size != 0 - def head = iterator.next + def head = iterator.next() def headOption = if (nonEmpty) Some(head) else None @@ -627,7 +627,7 @@ self: ParIterableLike[T, Repr, Sequential] => val b = bf(repr) this.splitter.copy2builder[U, That, Builder[U, That]](b) for (elem <- that.seq) b += elem - setTaskSupport(b.result, tasksupport) + setTaskSupport(b.result(), tasksupport) } } @@ -728,7 +728,7 @@ self: ParIterableLike[T, Repr, Sequential] => tree => tasksupport.executeAndWaitResult(new FromScanTree(tree, z, op, combinerFactory(() => bf(repr).asCombiner)) mapResult { cb => cb.resultWithTaskSupport }) - }) else setTaskSupport((bf(repr) += z).result, tasksupport) + }) else setTaskSupport((bf(repr) += z).result(), tasksupport) } else setTaskSupport(seq.scan(z)(op)(bf2seq(bf)), tasksupport) } else setTaskSupport(seq.scan(z)(op)(bf2seq(bf)), tasksupport) @@ -904,7 +904,7 @@ self: ParIterableLike[T, Repr, Sequential] => protected[this] def newSubtask(p: IterableSplitter[T]): Accessor[R, Tp] def shouldSplitFurther = pit.shouldSplitFurther(self.repr, tasksupport.parallelismLevel) def split = pit.splitWithSignalling.map(newSubtask(_)) // default split procedure - private[parallel] override def signalAbort = pit.abort + private[parallel] override def signalAbort = pit.abort() override def toString = this.getClass.getSimpleName + "(" + pit.toString + ")(" + result + ")(supername: " + super.toString + ")" } @@ -921,8 +921,8 @@ self: ParIterableLike[T, Repr, Sequential] => def combineResults(fr: FR, sr: SR): R @volatile var result: R = null.asInstanceOf[R] private[parallel] override def signalAbort() { - ft.signalAbort - st.signalAbort + ft.signalAbort() + st.signalAbort() } protected def mergeSubtasks() { ft mergeThrowables st @@ -938,7 +938,7 @@ self: ParIterableLike[T, Repr, Sequential] => def leaf(prevr: Option[R]) = { tasksupport.executeAndWaitResult(ft) : Any tasksupport.executeAndWaitResult(st) : Any - mergeSubtasks + mergeSubtasks() } } @@ -950,7 +950,7 @@ self: ParIterableLike[T, Repr, Sequential] => val ftfuture: () => Any = tasksupport.execute(ft) tasksupport.executeAndWaitResult(st) : Any ftfuture() - mergeSubtasks + mergeSubtasks() } } @@ -963,7 +963,7 @@ self: ParIterableLike[T, Repr, Sequential] => result = map(initialResult) } private[parallel] override def signalAbort() { - inner.signalAbort + inner.signalAbort() } override def requiresStrictSplitters = inner.requiresStrictSplitters } @@ -1085,7 +1085,7 @@ self: ParIterableLike[T, Repr, Sequential] => protected[this] class Forall(pred: T => Boolean, protected[this] val pit: IterableSplitter[T]) extends Accessor[Boolean, Forall] { @volatile var result: Boolean = true - def leaf(prev: Option[Boolean]) = { if (!pit.isAborted) result = pit.forall(pred); if (result == false) pit.abort } + def leaf(prev: Option[Boolean]) = { if (!pit.isAborted) result = pit.forall(pred); if (result == false) pit.abort() } protected[this] def newSubtask(p: IterableSplitter[T]) = new Forall(pred, p) override def merge(that: Forall) = result = result && that.result } @@ -1093,7 +1093,7 @@ self: ParIterableLike[T, Repr, Sequential] => protected[this] class Exists(pred: T => Boolean, protected[this] val pit: IterableSplitter[T]) extends Accessor[Boolean, Exists] { @volatile var result: Boolean = false - def leaf(prev: Option[Boolean]) = { if (!pit.isAborted) result = pit.exists(pred); if (result == true) pit.abort } + def leaf(prev: Option[Boolean]) = { if (!pit.isAborted) result = pit.exists(pred); if (result == true) pit.abort() } protected[this] def newSubtask(p: IterableSplitter[T]) = new Exists(pred, p) override def merge(that: Exists) = result = result || that.result } @@ -1101,7 +1101,7 @@ self: ParIterableLike[T, Repr, Sequential] => protected[this] class Find[U >: T](pred: T => Boolean, protected[this] val pit: IterableSplitter[T]) extends Accessor[Option[U], Find[U]] { @volatile var result: Option[U] = None - def leaf(prev: Option[Option[U]]) = { if (!pit.isAborted) result = pit.find(pred); if (result != None) pit.abort } + def leaf(prev: Option[Option[U]]) = { if (!pit.isAborted) result = pit.find(pred); if (result != None) pit.abort() } protected[this] def newSubtask(p: IterableSplitter[T]) = new Find(pred, p) override def merge(that: Find[U]) = if (this.result == None) result = that.result } @@ -1153,7 +1153,7 @@ self: ParIterableLike[T, Repr, Sequential] => // note: HashMapCombiner doesn't merge same keys until evaluation val cb = mcf() while (pit.hasNext) { - val elem = pit.next + val elem = pit.next() cb += f(elem) -> elem } result = cb @@ -1489,7 +1489,7 @@ self: ParIterableLike[T, Repr, Sequential] => def debugBuffer: ArrayBuffer[String] = null private[parallel] def debugclear() = synchronized { - debugBuffer.clear + debugBuffer.clear() } private[parallel] def debuglog(s: String) = synchronized { diff --git a/src/library/scala/collection/parallel/ParIterableViewLike.scala b/src/library/scala/collection/parallel/ParIterableViewLike.scala index b2105e1e9e..aaf83e49af 100644 --- a/src/library/scala/collection/parallel/ParIterableViewLike.scala +++ b/src/library/scala/collection/parallel/ParIterableViewLike.scala @@ -140,7 +140,7 @@ self => } otherwise { val b = bf(underlying) b ++= this.iterator - b.result + b.result() } /* wrapper virtual ctors */ diff --git a/src/library/scala/collection/parallel/ParMapLike.scala b/src/library/scala/collection/parallel/ParMapLike.scala index 56594bec96..798ba71b95 100644 --- a/src/library/scala/collection/parallel/ParMapLike.scala +++ b/src/library/scala/collection/parallel/ParMapLike.scala @@ -67,7 +67,7 @@ self => i => val iter = s def hasNext = iter.hasNext - def next() = iter.next._1 + def next() = iter.next()._1 def split = { val ss = iter.split.map(keysIterator(_)) ss.foreach { _.signalDelegate = i.signalDelegate } @@ -84,7 +84,7 @@ self => i => val iter = s def hasNext = iter.hasNext - def next() = iter.next._2 + def next() = iter.next()._2 def split = { val ss = iter.split.map(valuesIterator(_)) ss.foreach { _.signalDelegate = i.signalDelegate } diff --git a/src/library/scala/collection/parallel/ParSeqLike.scala b/src/library/scala/collection/parallel/ParSeqLike.scala index 4aaadbaac5..68bc1bc12c 100644 --- a/src/library/scala/collection/parallel/ParSeqLike.scala +++ b/src/library/scala/collection/parallel/ParSeqLike.scala @@ -68,7 +68,7 @@ self => val x = self(i) i += 1 x - } else Iterator.empty.next + } else Iterator.empty.next() def head = self(i) @@ -228,7 +228,7 @@ self => b ++= pits(0) b ++= patch b ++= pits(2) - setTaskSupport(b.result, tasksupport) + setTaskSupport(b.result(), tasksupport) } def updated[U >: T, That](index: Int, elem: U)(implicit bf: CanBuildFrom[Repr, U, That]): That = if (bf(repr).isCombiner) { @@ -423,7 +423,7 @@ self => @volatile var result: Boolean = true def leaf(prev: Option[Boolean]) = if (!pit.isAborted) { result = pit.sameElements(otherpit) - if (!result) pit.abort + if (!result) pit.abort() } protected[this] def newSubtask(p: SuperParIterator) = unsupported override def split = { @@ -471,7 +471,7 @@ self => @volatile var result: Boolean = true def leaf(prev: Option[Boolean]) = if (!pit.isAborted) { result = pit.corresponds(corr)(otherpit) - if (!result) pit.abort + if (!result) pit.abort() } protected[this] def newSubtask(p: SuperParIterator) = unsupported override def split = { diff --git a/src/library/scala/collection/parallel/ParSeqViewLike.scala b/src/library/scala/collection/parallel/ParSeqViewLike.scala index d03b377860..22773464ed 100644 --- a/src/library/scala/collection/parallel/ParSeqViewLike.scala +++ b/src/library/scala/collection/parallel/ParSeqViewLike.scala @@ -173,7 +173,7 @@ self => } otherwise { val b = bf(underlying) b ++= this.iterator - b.result + b.result() } /* tasks */ diff --git a/src/library/scala/collection/parallel/RemainsIterator.scala b/src/library/scala/collection/parallel/RemainsIterator.scala index 726f5a2e93..a3a47e2e40 100644 --- a/src/library/scala/collection/parallel/RemainsIterator.scala +++ b/src/library/scala/collection/parallel/RemainsIterator.scala @@ -47,47 +47,47 @@ private[collection] trait AugmentedIterableIterator[+T] extends RemainsIterator[ override def count(p: T => Boolean): Int = { var i = 0 - while (hasNext) if (p(next)) i += 1 + while (hasNext) if (p(next())) i += 1 i } override def reduce[U >: T](op: (U, U) => U): U = { - var r: U = next - while (hasNext) r = op(r, next) + var r: U = next() + while (hasNext) r = op(r, next()) r } override def fold[U >: T](z: U)(op: (U, U) => U): U = { var r = z - while (hasNext) r = op(r, next) + while (hasNext) r = op(r, next()) r } override def sum[U >: T](implicit num: Numeric[U]): U = { var r: U = num.zero - while (hasNext) r = num.plus(r, next) + while (hasNext) r = num.plus(r, next()) r } override def product[U >: T](implicit num: Numeric[U]): U = { var r: U = num.one - while (hasNext) r = num.times(r, next) + while (hasNext) r = num.times(r, next()) r } override def min[U >: T](implicit ord: Ordering[U]): T = { - var r = next + var r = next() while (hasNext) { - val curr = next + val curr = next() if (ord.lteq(curr, r)) r = curr } r } override def max[U >: T](implicit ord: Ordering[U]): T = { - var r = next + var r = next() while (hasNext) { - val curr = next + val curr = next() if (ord.gteq(curr, r)) r = curr } r @@ -97,16 +97,16 @@ private[collection] trait AugmentedIterableIterator[+T] extends RemainsIterator[ var i = from val until = from + len while (i < until && hasNext) { - array(i) = next + array(i) = next() i += 1 } } def reduceLeft[U >: T](howmany: Int, op: (U, U) => U): U = { var i = howmany - 1 - var u: U = next + var u: U = next() while (i > 0 && hasNext) { - u = op(u, next) + u = op(u, next()) i -= 1 } u @@ -117,7 +117,7 @@ private[collection] trait AugmentedIterableIterator[+T] extends RemainsIterator[ def map2combiner[S, That](f: T => S, cb: Combiner[S, That]): Combiner[S, That] = { //val cb = pbf(repr) if (isRemainingCheap) cb.sizeHint(remaining) - while (hasNext) cb += f(next) + while (hasNext) cb += f(next()) cb } @@ -125,7 +125,7 @@ private[collection] trait AugmentedIterableIterator[+T] extends RemainsIterator[ //val cb = pbf(repr) val runWith = pf.runWith(cb += _) while (hasNext) { - val curr = next + val curr = next() runWith(curr) } cb @@ -134,7 +134,7 @@ private[collection] trait AugmentedIterableIterator[+T] extends RemainsIterator[ def flatmap2combiner[S, That](f: T => GenTraversableOnce[S], cb: Combiner[S, That]): Combiner[S, That] = { //val cb = pbf(repr) while (hasNext) { - val traversable = f(next).seq + val traversable = f(next()).seq if (traversable.isInstanceOf[Iterable[_]]) cb ++= traversable.asInstanceOf[Iterable[S]].iterator else cb ++= traversable } @@ -149,7 +149,7 @@ private[collection] trait AugmentedIterableIterator[+T] extends RemainsIterator[ def filter2combiner[U >: T, This](pred: T => Boolean, cb: Combiner[U, This]): Combiner[U, This] = { while (hasNext) { - val curr = next + val curr = next() if (pred(curr)) cb += curr } cb @@ -157,7 +157,7 @@ private[collection] trait AugmentedIterableIterator[+T] extends RemainsIterator[ def filterNot2combiner[U >: T, This](pred: T => Boolean, cb: Combiner[U, This]): Combiner[U, This] = { while (hasNext) { - val curr = next + val curr = next() if (!pred(curr)) cb += curr } cb @@ -165,7 +165,7 @@ private[collection] trait AugmentedIterableIterator[+T] extends RemainsIterator[ def partition2combiners[U >: T, This](pred: T => Boolean, btrue: Combiner[U, This], bfalse: Combiner[U, This]) = { while (hasNext) { - val curr = next + val curr = next() if (pred(curr)) btrue += curr else bfalse += curr } @@ -215,7 +215,7 @@ private[collection] trait AugmentedIterableIterator[+T] extends RemainsIterator[ def takeWhile2combiner[U >: T, This](p: T => Boolean, cb: Combiner[U, This]) = { var loop = true while (hasNext && loop) { - val curr = next + val curr = next() if (p(curr)) cb += curr else loop = false } @@ -225,7 +225,7 @@ private[collection] trait AugmentedIterableIterator[+T] extends RemainsIterator[ def span2combiners[U >: T, This](p: T => Boolean, before: Combiner[U, This], after: Combiner[U, This]) = { var isBefore = true while (hasNext && isBefore) { - val curr = next + val curr = next() if (p(curr)) before += curr else { if (isRemainingCheap) after.sizeHint(remaining + 1) @@ -241,7 +241,7 @@ private[collection] trait AugmentedIterableIterator[+T] extends RemainsIterator[ var last = z var i = from while (hasNext) { - last = op(last, next) + last = op(last, next()) array(i) = last i += 1 } @@ -250,7 +250,7 @@ private[collection] trait AugmentedIterableIterator[+T] extends RemainsIterator[ def scanToCombiner[U >: T, That](startValue: U, op: (U, U) => U, cb: Combiner[U, That]) = { var curr = startValue while (hasNext) { - curr = op(curr, next) + curr = op(curr, next()) cb += curr } cb @@ -260,7 +260,7 @@ private[collection] trait AugmentedIterableIterator[+T] extends RemainsIterator[ var curr = startValue var left = howmany while (left > 0) { - curr = op(curr, next) + curr = op(curr, next()) cb += curr left -= 1 } @@ -270,16 +270,16 @@ private[collection] trait AugmentedIterableIterator[+T] extends RemainsIterator[ def zip2combiner[U >: T, S, That](otherpit: RemainsIterator[S], cb: Combiner[(U, S), That]): Combiner[(U, S), That] = { if (isRemainingCheap && otherpit.isRemainingCheap) cb.sizeHint(remaining min otherpit.remaining) while (hasNext && otherpit.hasNext) { - cb += ((next, otherpit.next)) + cb += ((next(), otherpit.next())) } cb } def zipAll2combiner[U >: T, S, That](that: RemainsIterator[S], thiselem: U, thatelem: S, cb: Combiner[(U, S), That]): Combiner[(U, S), That] = { if (isRemainingCheap && that.isRemainingCheap) cb.sizeHint(remaining max that.remaining) - while (this.hasNext && that.hasNext) cb += ((this.next, that.next)) - while (this.hasNext) cb += ((this.next, thatelem)) - while (that.hasNext) cb += ((thiselem, that.next)) + while (this.hasNext && that.hasNext) cb += ((this.next(), that.next())) + while (this.hasNext) cb += ((this.next(), thatelem)) + while (that.hasNext) cb += ((thiselem, that.next())) cb } @@ -299,7 +299,7 @@ private[collection] trait AugmentedSeqIterator[+T] extends AugmentedIterableIter var total = 0 var loop = true while (hasNext && loop) { - if (pred(next)) total += 1 + if (pred(next())) total += 1 else loop = false } total @@ -309,7 +309,7 @@ private[collection] trait AugmentedSeqIterator[+T] extends AugmentedIterableIter var i = 0 var loop = true while (hasNext && loop) { - if (pred(next)) loop = false + if (pred(next())) loop = false else i += 1 } if (loop) -1 else i @@ -319,7 +319,7 @@ private[collection] trait AugmentedSeqIterator[+T] extends AugmentedIterableIter var pos = -1 var i = 0 while (hasNext) { - if (pred(next)) pos = i + if (pred(next())) pos = i i += 1 } pos @@ -327,7 +327,7 @@ private[collection] trait AugmentedSeqIterator[+T] extends AugmentedIterableIter def corresponds[S](corr: (T, S) => Boolean)(that: Iterator[S]): Boolean = { while (hasNext && that.hasNext) { - if (!corr(next, that.next)) return false + if (!corr(next(), that.next())) return false } hasNext == that.hasNext } @@ -349,7 +349,7 @@ private[collection] trait AugmentedSeqIterator[+T] extends AugmentedIterableIter //val cb = cbf(repr) if (isRemainingCheap) cb.sizeHint(remaining) var lst = List[S]() - while (hasNext) lst ::= f(next) + while (hasNext) lst ::= f(next()) while (lst != Nil) { cb += lst.head lst = lst.tail @@ -364,7 +364,7 @@ private[collection] trait AugmentedSeqIterator[+T] extends AugmentedIterableIter while (hasNext) { if (j == index) { cb += elem - next + next() } else cb += next j += 1 } @@ -439,7 +439,7 @@ self => class Taken(taken: Int) extends IterableSplitter[T] { var remaining = taken min self.remaining def hasNext = remaining > 0 - def next = { remaining -= 1; self.next } + def next = { remaining -= 1; self.next() } def dup: IterableSplitter[T] = self.dup.take(taken) def split: Seq[IterableSplitter[T]] = takeSeq(self.split) { (p, n) => p.take(n) } protected[this] def takeSeq[PI <: IterableSplitter[T]](sq: Seq[PI])(taker: (PI, Int) => PI) = { @@ -467,7 +467,7 @@ self => class Mapped[S](f: T => S) extends IterableSplitter[S] { signalDelegate = self.signalDelegate def hasNext = self.hasNext - def next = f(self.next) + def next = f(self.next()) def remaining = self.remaining def dup: IterableSplitter[S] = self.dup map f def split: Seq[IterableSplitter[S]] = self.split.map { _ map f } @@ -484,8 +484,8 @@ self => } else false def next = if (curr eq self) { hasNext - curr.next - } else curr.next + curr.next() + } else curr.next() def remaining = if (curr eq self) curr.remaining + that.remaining else curr.remaining protected def firstNonEmpty = (curr eq self) && curr.hasNext def dup: IterableSplitter[U] = self.dup.appendParIterable[U, PI](that) @@ -497,7 +497,7 @@ self => class Zipped[S](protected val that: SeqSplitter[S]) extends IterableSplitter[(T, S)] { signalDelegate = self.signalDelegate def hasNext = self.hasNext && that.hasNext - def next = (self.next, that.next) + def next = (self.next(), that.next()) def remaining = self.remaining min that.remaining def dup: IterableSplitter[(T, S)] = self.dup.zipParSeq(that) def split: Seq[IterableSplitter[(T, S)]] = { @@ -515,9 +515,9 @@ self => signalDelegate = self.signalDelegate def hasNext = self.hasNext || that.hasNext def next = if (self.hasNext) { - if (that.hasNext) (self.next, that.next) - else (self.next, thatelem) - } else (thiselem, that.next) + if (that.hasNext) (self.next(), that.next()) + else (self.next(), thatelem) + } else (thiselem, that.next()) def remaining = self.remaining max that.remaining def dup: IterableSplitter[(U, S)] = self.dup.zipAllParSeq(that, thiselem, thatelem) diff --git a/src/library/scala/collection/parallel/Splitter.scala b/src/library/scala/collection/parallel/Splitter.scala index dc49bcf9d7..458742df96 100644 --- a/src/library/scala/collection/parallel/Splitter.scala +++ b/src/library/scala/collection/parallel/Splitter.scala @@ -52,7 +52,7 @@ trait Splitter[+T] extends Iterator[T] { object Splitter { def empty[T]: Splitter[T] = new Splitter[T] { def hasNext = false - def next = Iterator.empty.next + def next = Iterator.empty.next() def split = Seq(this) } } diff --git a/src/library/scala/collection/parallel/Tasks.scala b/src/library/scala/collection/parallel/Tasks.scala index ec1bcbb27a..441c4269c3 100644 --- a/src/library/scala/collection/parallel/Tasks.scala +++ b/src/library/scala/collection/parallel/Tasks.scala @@ -54,13 +54,13 @@ trait Task[R, +Tp] { leaf(lastres) result = result // ensure that effects of `leaf` are visible to readers of `result` } catchBreak { - signalAbort + signalAbort() } } catch { case thr: Exception => result = result // ensure that effects of `leaf` are visible throwable = thr - signalAbort + signalAbort() } } @@ -302,7 +302,7 @@ trait ThreadPoolTasks extends Tasks { () => { t.sync() - t.body.forwardThrowable + t.body.forwardThrowable() t.body.result } } @@ -314,7 +314,7 @@ trait ThreadPoolTasks extends Tasks { t.start() t.sync() - t.body.forwardThrowable + t.body.forwardThrowable() t.body.result } @@ -402,8 +402,8 @@ trait ForkJoinTasks extends Tasks with HavingForkJoinPool { } () => { - fjtask.sync - fjtask.body.forwardThrowable + fjtask.sync() + fjtask.body.forwardThrowable() fjtask.body.result } } @@ -424,9 +424,9 @@ trait ForkJoinTasks extends Tasks with HavingForkJoinPool { forkJoinPool.execute(fjtask) } - fjtask.sync + fjtask.sync() // if (fjtask.body.throwable != null) println("throwing: " + fjtask.body.throwable + " at " + fjtask.body) - fjtask.body.forwardThrowable + fjtask.body.forwardThrowable() fjtask.body.result } diff --git a/src/library/scala/collection/parallel/immutable/ParHashMap.scala b/src/library/scala/collection/parallel/immutable/ParHashMap.scala index b25230bbeb..f3be47ea03 100644 --- a/src/library/scala/collection/parallel/immutable/ParHashMap.scala +++ b/src/library/scala/collection/parallel/immutable/ParHashMap.scala @@ -109,7 +109,7 @@ self => } def next(): (K, V) = { i += 1 - val r = triter.next + val r = triter.next() r } def hasNext: Boolean = { diff --git a/src/library/scala/collection/parallel/immutable/ParHashSet.scala b/src/library/scala/collection/parallel/immutable/ParHashSet.scala index e7e64eb2ad..4f34993b85 100644 --- a/src/library/scala/collection/parallel/immutable/ParHashSet.scala +++ b/src/library/scala/collection/parallel/immutable/ParHashSet.scala @@ -106,7 +106,7 @@ self => } def next(): T = { i += 1 - triter.next + triter.next() } def hasNext: Boolean = { i < sz diff --git a/src/library/scala/collection/parallel/immutable/ParRange.scala b/src/library/scala/collection/parallel/immutable/ParRange.scala index a3f473c6a7..78cde1724b 100644 --- a/src/library/scala/collection/parallel/immutable/ParRange.scala +++ b/src/library/scala/collection/parallel/immutable/ParRange.scala @@ -60,7 +60,7 @@ self => val r = range.apply(ind) ind += 1 r - } else Iterator.empty.next + } else Iterator.empty.next() private def rangeleft = range.drop(ind) diff --git a/src/library/scala/collection/parallel/mutable/ParArray.scala b/src/library/scala/collection/parallel/mutable/ParArray.scala index 0e9eac62e2..68c43e682e 100644 --- a/src/library/scala/collection/parallel/mutable/ParArray.scala +++ b/src/library/scala/collection/parallel/mutable/ParArray.scala @@ -226,7 +226,7 @@ self => if (all) i = nextuntil else { i = until - abort + abort() } if (isAborted) return false @@ -254,7 +254,7 @@ self => some = exists_quick(p, array, nextuntil, i) if (some) { i = until - abort + abort() } else i = nextuntil if (isAborted) return true @@ -283,7 +283,7 @@ self => if (r != None) { i = until - abort + abort() } else i = nextuntil if (isAborted) return r diff --git a/src/library/scala/collection/parallel/mutable/ParFlatHashTable.scala b/src/library/scala/collection/parallel/mutable/ParFlatHashTable.scala index b151e45d65..aa790dd548 100644 --- a/src/library/scala/collection/parallel/mutable/ParFlatHashTable.scala +++ b/src/library/scala/collection/parallel/mutable/ParFlatHashTable.scala @@ -48,7 +48,7 @@ trait ParFlatHashTable[T] extends scala.collection.mutable.FlatHashTable[T] { idx += 1 if (hasNext) scan() r - } else Iterator.empty.next + } else Iterator.empty.next() def dup = newIterator(idx, until, totalsize) def split = if (remaining > 1) { val divpt = (until + idx) / 2 diff --git a/src/library/scala/collection/parallel/mutable/UnrolledParArrayCombiner.scala b/src/library/scala/collection/parallel/mutable/UnrolledParArrayCombiner.scala index f5c0b10526..7766f07e23 100644 --- a/src/library/scala/collection/parallel/mutable/UnrolledParArrayCombiner.scala +++ b/src/library/scala/collection/parallel/mutable/UnrolledParArrayCombiner.scala @@ -47,7 +47,7 @@ extends Combiner[T, ParArray[T]] { } def clear() { - buff.clear + buff.clear() } override def sizeHint(sz: Int) = { diff --git a/src/library/scala/concurrent/Future.scala b/src/library/scala/concurrent/Future.scala index 0670da137c..95b393dd0e 100644 --- a/src/library/scala/concurrent/Future.scala +++ b/src/library/scala/concurrent/Future.scala @@ -576,7 +576,7 @@ object Future { def sequence[A, M[_] <: TraversableOnce[_]](in: M[Future[A]])(implicit cbf: CanBuildFrom[M[Future[A]], A, M[A]], executor: ExecutionContext): Future[M[A]] = { in.foldLeft(Promise.successful(cbf(in)).future) { (fr, fa) => for (r <- fr; a <- fa.asInstanceOf[Future[A]]) yield (r += a) - } map (_.result) + } map (_.result()) } /** Returns a `Future` to the result of the first future in the list that is completed. @@ -638,7 +638,7 @@ object Future { * }}} */ def reduce[T, R >: T](futures: TraversableOnce[Future[T]])(op: (R, T) => R)(implicit executor: ExecutionContext): Future[R] = { - if (futures.isEmpty) Promise[R].failure(new NoSuchElementException("reduce attempted on empty collection")).future + if (futures.isEmpty) Promise[R]().failure(new NoSuchElementException("reduce attempted on empty collection")).future else sequence(futures).map(_ reduceLeft op) } @@ -654,7 +654,7 @@ object Future { in.foldLeft(Promise.successful(cbf(in)).future) { (fr, a) => val fb = fn(a.asInstanceOf[A]) for (r <- fr; b <- fb) yield (r += b) - }.map(_.result) + }.map(_.result()) // This is used to run callbacks which are internal // to scala.concurrent; our own callbacks are only diff --git a/src/library/scala/concurrent/duration/Duration.scala b/src/library/scala/concurrent/duration/Duration.scala index 0353d61b22..6c6155279d 100644 --- a/src/library/scala/concurrent/duration/Duration.scala +++ b/src/library/scala/concurrent/duration/Duration.scala @@ -103,7 +103,7 @@ object Duration { * Extract length and time unit out of a duration, if it is finite. */ def unapply(d: Duration): Option[(Long, TimeUnit)] = - if (d.isFinite) Some((d.length, d.unit)) else None + if (d.isFinite()) Some((d.length, d.unit)) else None /** * Construct a possibly infinite or undefined Duration from the given number of nanoseconds. @@ -623,7 +623,7 @@ final class FiniteDuration(val length: Long, val unit: TimeUnit) extends Duratio // if this is made a constant, then scalac will elide the conditional and always return +0.0, SI-6331 private[this] def minusZero = -0d def /(divisor: Duration): Double = - if (divisor.isFinite) toNanos.toDouble / divisor.toNanos + if (divisor.isFinite()) toNanos.toDouble / divisor.toNanos else if (divisor eq Undefined) Double.NaN else if ((length < 0) ^ (divisor > Zero)) 0d else minusZero diff --git a/src/library/scala/concurrent/impl/Promise.scala b/src/library/scala/concurrent/impl/Promise.scala index 52f1075137..7af70400ef 100644 --- a/src/library/scala/concurrent/impl/Promise.scala +++ b/src/library/scala/concurrent/impl/Promise.scala @@ -83,7 +83,7 @@ private[concurrent] object Promise { import Duration.Undefined atMost match { case u if u eq Undefined => throw new IllegalArgumentException("cannot wait for Undefined period") - case Duration.Inf => awaitUnbounded + case Duration.Inf => awaitUnbounded() case Duration.MinusInf => isCompleted case f: FiniteDuration => if (f > Duration.Zero) awaitUnsafe(f.fromNow, f) else isCompleted } @@ -135,7 +135,7 @@ private[concurrent] object Promise { } def onComplete[U](func: Try[T] => U)(implicit executor: ExecutionContext): Unit = { - val preparedEC = executor.prepare + val preparedEC = executor.prepare() val runnable = new CallbackRunnable[T](preparedEC, func) @tailrec //Tries to add the callback, if already completed, it dispatches the callback to be executed @@ -162,7 +162,7 @@ private[concurrent] object Promise { def onComplete[U](func: Try[T] => U)(implicit executor: ExecutionContext): Unit = { val completedAs = value.get - val preparedEC = executor.prepare + val preparedEC = executor.prepare() (new CallbackRunnable(preparedEC, func)).executeWithValue(completedAs) } diff --git a/src/library/scala/io/BufferedSource.scala b/src/library/scala/io/BufferedSource.scala index 767f06fd3f..e250da27c3 100644 --- a/src/library/scala/io/BufferedSource.scala +++ b/src/library/scala/io/BufferedSource.scala @@ -73,7 +73,7 @@ class BufferedSource(inputStream: InputStream, bufferSize: Int)(implicit val cod if (nextLine == null) lineReader.readLine else try nextLine finally nextLine = null } - if (result == null) Iterator.empty.next + if (result == null) Iterator.empty.next() else result } } diff --git a/src/library/scala/io/Source.scala b/src/library/scala/io/Source.scala index b13729aefe..f976c7eb0a 100644 --- a/src/library/scala/io/Source.scala +++ b/src/library/scala/io/Source.scala @@ -194,11 +194,11 @@ abstract class Source extends Iterator[Char] { lazy val iter: BufferedIterator[Char] = Source.this.iter.buffered def isNewline(ch: Char) = ch == '\r' || ch == '\n' def getc() = iter.hasNext && { - val ch = iter.next + val ch = iter.next() if (ch == '\n') false else if (ch == '\r') { if (iter.hasNext && iter.head == '\n') - iter.next + iter.next() false } @@ -209,7 +209,7 @@ abstract class Source extends Iterator[Char] { } def hasNext = iter.hasNext def next = { - sb.clear + sb.clear() while (getc()) { } sb.toString } @@ -227,7 +227,7 @@ abstract class Source extends Iterator[Char] { /** Returns next character. */ - def next(): Char = positioner.next + def next(): Char = positioner.next() class Positioner(encoder: Position) { def this() = this(RelaxedPosition) @@ -245,7 +245,7 @@ abstract class Source extends Iterator[Char] { var tabinc = 4 def next(): Char = { - ch = iter.next + ch = iter.next() pos = encoder.encode(cline, ccol) ch match { case '\n' => @@ -267,7 +267,7 @@ abstract class Source extends Iterator[Char] { } object RelaxedPositioner extends Positioner(RelaxedPosition) { } object NoPositioner extends Positioner(Position) { - override def next(): Char = iter.next + override def next(): Char = iter.next() } def ch = positioner.ch def pos = positioner.pos diff --git a/src/library/scala/math/BigDecimal.scala b/src/library/scala/math/BigDecimal.scala index f3aabc2974..d8f4337b8f 100644 --- a/src/library/scala/math/BigDecimal.scala +++ b/src/library/scala/math/BigDecimal.scala @@ -171,7 +171,7 @@ extends ScalaNumber with ScalaNumericConversions with Serializable { * with unequal hashCodes. */ override def hashCode(): Int = - if (isWhole) unifiedPrimitiveHashcode + if (isWhole()) unifiedPrimitiveHashcode() else doubleValue.## /** Compares this BigDecimal with the specified value for equality. diff --git a/src/library/scala/math/BigInt.scala b/src/library/scala/math/BigInt.scala index feb538033b..719099b405 100644 --- a/src/library/scala/math/BigInt.scala +++ b/src/library/scala/math/BigInt.scala @@ -112,7 +112,7 @@ object BigInt { class BigInt(val bigInteger: BigInteger) extends ScalaNumber with ScalaNumericConversions with Serializable { /** Returns the hash code for this BigInt. */ override def hashCode(): Int = - if (isValidLong) unifiedPrimitiveHashcode + if (isValidLong) unifiedPrimitiveHashcode() else bigInteger.## /** Compares this BigInt with the specified value for equality. diff --git a/src/library/scala/math/Ordering.scala b/src/library/scala/math/Ordering.scala index aea512a541..d1a4e7c35c 100644 --- a/src/library/scala/math/Ordering.scala +++ b/src/library/scala/math/Ordering.scala @@ -173,7 +173,7 @@ object Ordering extends LowPriorityOrderingImplicits { val ye = y.iterator while (xe.hasNext && ye.hasNext) { - val res = ord.compare(xe.next, ye.next) + val res = ord.compare(xe.next(), ye.next()) if (res != 0) return res } @@ -347,7 +347,7 @@ object Ordering extends LowPriorityOrderingImplicits { val ye = y.iterator while (xe.hasNext && ye.hasNext) { - val res = ord.compare(xe.next, ye.next) + val res = ord.compare(xe.next(), ye.next()) if (res != 0) return res } diff --git a/src/library/scala/math/ScalaNumericConversions.scala b/src/library/scala/math/ScalaNumericConversions.scala index 59fc7f27b2..e748841c12 100644 --- a/src/library/scala/math/ScalaNumericConversions.scala +++ b/src/library/scala/math/ScalaNumericConversions.scala @@ -32,37 +32,37 @@ trait ScalaNumericAnyConversions extends Any { /** Returns the value of this as a [[scala.Char]]. This may involve * rounding or truncation. */ - def toChar = intValue.toChar + def toChar = intValue().toChar /** Returns the value of this as a [[scala.Byte]]. This may involve * rounding or truncation. */ - def toByte = byteValue + def toByte = byteValue() /** Returns the value of this as a [[scala.Short]]. This may involve * rounding or truncation. */ - def toShort = shortValue + def toShort = shortValue() /** Returns the value of this as an [[scala.Int]]. This may involve * rounding or truncation. */ - def toInt = intValue + def toInt = intValue() /** Returns the value of this as a [[scala.Long]]. This may involve * rounding or truncation. */ - def toLong = longValue + def toLong = longValue() /** Returns the value of this as a [[scala.Float]]. This may involve * rounding or truncation. */ - def toFloat = floatValue + def toFloat = floatValue() /** Returns the value of this as a [[scala.Double]]. This may involve * rounding or truncation. */ - def toDouble = doubleValue + def toDouble = doubleValue() /** Returns `true` iff this has a zero fractional part, and is within the * range of [[scala.Byte]] MinValue and MaxValue; otherwise returns `false`. diff --git a/src/library/scala/runtime/ScalaNumberProxy.scala b/src/library/scala/runtime/ScalaNumberProxy.scala index 76fc38b267..e8460a203b 100644 --- a/src/library/scala/runtime/ScalaNumberProxy.scala +++ b/src/library/scala/runtime/ScalaNumberProxy.scala @@ -28,8 +28,8 @@ trait ScalaNumberProxy[T] extends Any with ScalaNumericAnyConversions with Typed def floatValue() = num.toFloat(self) def longValue() = num.toLong(self) def intValue() = num.toInt(self) - def byteValue() = intValue.toByte - def shortValue() = intValue.toShort + def byteValue() = intValue().toByte + def shortValue() = intValue().toShort def min(that: T): T = num.min(self, that) def max(that: T): T = num.max(self, that) diff --git a/src/library/scala/runtime/Tuple2Zipped.scala b/src/library/scala/runtime/Tuple2Zipped.scala index ef29075ac3..bde69a0f54 100644 --- a/src/library/scala/runtime/Tuple2Zipped.scala +++ b/src/library/scala/runtime/Tuple2Zipped.scala @@ -37,12 +37,12 @@ final class Tuple2Zipped[El1, Repr1, El2, Repr2](val colls: (TraversableLike[El1 for (el1 <- colls._1) { if (elems2.hasNext) - b += f(el1, elems2.next) + b += f(el1, elems2.next()) else - return b.result + return b.result() } - b.result + b.result() } def flatMap[B, To](f: (El1, El2) => TraversableOnce[B])(implicit cbf: CBF[Repr1, B, To]): To = { @@ -51,12 +51,12 @@ final class Tuple2Zipped[El1, Repr1, El2, Repr2](val colls: (TraversableLike[El1 for (el1 <- colls._1) { if (elems2.hasNext) - b ++= f(el1, elems2.next) + b ++= f(el1, elems2.next()) else - return b.result + return b.result() } - b.result + b.result() } def filter[To1, To2](f: (El1, El2) => Boolean)(implicit cbf1: CBF[Repr1, El1, To1], cbf2: CBF[Repr2, El2, To2]): (To1, To2) = { @@ -66,16 +66,16 @@ final class Tuple2Zipped[El1, Repr1, El2, Repr2](val colls: (TraversableLike[El1 for (el1 <- colls._1) { if (elems2.hasNext) { - val el2 = elems2.next + val el2 = elems2.next() if (f(el1, el2)) { b1 += el1 b2 += el2 } } - else return (b1.result, b2.result) + else return (b1.result(), b2.result()) } - (b1.result, b2.result) + (b1.result(), b2.result()) } def exists(f: (El1, El2) => Boolean): Boolean = { @@ -83,7 +83,7 @@ final class Tuple2Zipped[El1, Repr1, El2, Repr2](val colls: (TraversableLike[El1 for (el1 <- colls._1) { if (elems2.hasNext) { - if (f(el1, elems2.next)) + if (f(el1, elems2.next())) return true } else return false @@ -99,7 +99,7 @@ final class Tuple2Zipped[El1, Repr1, El2, Repr2](val colls: (TraversableLike[El1 for (el1 <- colls._1) { if (elems2.hasNext) - f(el1, elems2.next) + f(el1, elems2.next()) else return } @@ -117,9 +117,9 @@ object Tuple2Zipped { val it1 = x._1.toIterator val it2 = x._2.toIterator while (it1.hasNext && it2.hasNext) - buf += ((it1.next, it2.next)) + buf += ((it1.next(), it2.next())) - buf.result + buf.result() } def zipped[El1, Repr1, El2, Repr2] diff --git a/src/library/scala/runtime/Tuple3Zipped.scala b/src/library/scala/runtime/Tuple3Zipped.scala index 3f2afaf772..34da42462a 100644 --- a/src/library/scala/runtime/Tuple3Zipped.scala +++ b/src/library/scala/runtime/Tuple3Zipped.scala @@ -34,11 +34,11 @@ final class Tuple3Zipped[El1, Repr1, El2, Repr2, El3, Repr3](val colls: (Travers for (el1 <- colls._1) { if (elems2.hasNext && elems3.hasNext) - b += f(el1, elems2.next, elems3.next) + b += f(el1, elems2.next(), elems3.next()) else - return b.result + return b.result() } - b.result + b.result() } def flatMap[B, To](f: (El1, El2, El3) => TraversableOnce[B])(implicit cbf: CBF[Repr1, B, To]): To = { @@ -48,11 +48,11 @@ final class Tuple3Zipped[El1, Repr1, El2, Repr2, El3, Repr3](val colls: (Travers for (el1 <- colls._1) { if (elems2.hasNext && elems3.hasNext) - b ++= f(el1, elems2.next, elems3.next) + b ++= f(el1, elems2.next(), elems3.next()) else - return b.result + return b.result() } - b.result + b.result() } def filter[To1, To2, To3](f: (El1, El2, El3) => Boolean)( @@ -64,12 +64,12 @@ final class Tuple3Zipped[El1, Repr1, El2, Repr2, El3, Repr3](val colls: (Travers val b3 = cbf3(colls._3.repr) val elems2 = colls._2.iterator val elems3 = colls._3.iterator - def result = (b1.result, b2.result, b3.result) + def result = (b1.result(), b2.result(), b3.result()) for (el1 <- colls._1) { if (elems2.hasNext && elems3.hasNext) { - val el2 = elems2.next - val el3 = elems3.next + val el2 = elems2.next() + val el3 = elems3.next() if (f(el1, el2, el3)) { b1 += el1 @@ -89,7 +89,7 @@ final class Tuple3Zipped[El1, Repr1, El2, Repr2, El3, Repr3](val colls: (Travers for (el1 <- colls._1) { if (elems2.hasNext && elems3.hasNext) { - if (f(el1, elems2.next, elems3.next)) + if (f(el1, elems2.next(), elems3.next())) return true } else return false @@ -106,7 +106,7 @@ final class Tuple3Zipped[El1, Repr1, El2, Repr2, El3, Repr3](val colls: (Travers for (el1 <- colls._1) { if (elems2.hasNext && elems3.hasNext) - f(el1, elems2.next, elems3.next) + f(el1, elems2.next(), elems3.next()) else return } @@ -126,9 +126,9 @@ object Tuple3Zipped { val it2 = x._2.toIterator val it3 = x._3.toIterator while (it1.hasNext && it2.hasNext && it3.hasNext) - buf += ((it1.next, it2.next, it3.next)) + buf += ((it1.next(), it2.next(), it3.next())) - buf.result + buf.result() } def zipped[El1, Repr1, El2, Repr2, El3, Repr3] diff --git a/src/library/scala/sys/process/BasicIO.scala b/src/library/scala/sys/process/BasicIO.scala index 0003df6c52..e2c4f13830 100644 --- a/src/library/scala/sys/process/BasicIO.scala +++ b/src/library/scala/sys/process/BasicIO.scala @@ -46,7 +46,7 @@ object BasicIO { def next(): Stream[T] = q.take match { case Left(0) => Stream.empty case Left(code) => if (nonzeroException) scala.sys.error("Nonzero exit code: " + code) else Stream.empty - case Right(s) => Stream.cons(s, next) + case Right(s) => Stream.cons(s, next()) } new Streamed((s: T) => q put Right(s), code => q put Left(code), () => next()) } diff --git a/src/library/scala/sys/process/ProcessImpl.scala b/src/library/scala/sys/process/ProcessImpl.scala index c21c0daa5e..bfd3551a65 100644 --- a/src/library/scala/sys/process/ProcessImpl.scala +++ b/src/library/scala/sys/process/ProcessImpl.scala @@ -32,7 +32,7 @@ private[process] trait ProcessImpl { try result set Right(f) catch { case e: Exception => result set Left(e) } - Spawn(run) + Spawn(run()) () => result.get match { case Right(value) => value @@ -68,10 +68,10 @@ private[process] trait ProcessImpl { protected[this] override def runAndExitValue() = { val first = a.run(io) - runInterruptible(first.exitValue)(first.destroy()) flatMap { codeA => + runInterruptible(first.exitValue())(first.destroy()) flatMap { codeA => if (evaluateSecondProcess(codeA)) { val second = b.run(io) - runInterruptible(second.exitValue)(second.destroy()) + runInterruptible(second.exitValue())(second.destroy()) } else Some(codeA) } @@ -132,10 +132,10 @@ private[process] trait ProcessImpl { val first = a.run(firstIO) try { runInterruptible { - val exit1 = first.exitValue + val exit1 = first.exitValue() currentSource put None currentSink put None - val exit2 = second.exitValue + val exit2 = second.exitValue() // Since file redirection (e.g. #>) is implemented as a piped process, // we ignore its exit value so cmd #> file doesn't always return 0. if (b.hasExitValue) exit2 else exit1 diff --git a/src/library/scala/util/Random.scala b/src/library/scala/util/Random.scala index 2b11594f66..b3a8617f15 100644 --- a/src/library/scala/util/Random.scala +++ b/src/library/scala/util/Random.scala @@ -117,7 +117,7 @@ class Random(val self: java.util.Random) extends AnyRef with Serializable { swap(n - 1, k) } - (bf(xs) ++= buf).result + (bf(xs) ++= buf).result() } /** Returns a Stream of pseudorandomly chosen alphanumeric characters, diff --git a/src/library/scala/util/matching/Regex.scala b/src/library/scala/util/matching/Regex.scala index 0cd0cfd7f6..981d9af02f 100644 --- a/src/library/scala/util/matching/Regex.scala +++ b/src/library/scala/util/matching/Regex.scala @@ -233,7 +233,7 @@ class Regex private[matching](val pattern: Pattern, groupNames: String*) extends new Iterator[Match] { def hasNext = matchIterator.hasNext def next: Match = { - matchIterator.next + matchIterator.next() new Match(matchIterator.source, matchIterator.matcher, matchIterator.groupNames).force } } @@ -622,14 +622,14 @@ object Regex { /** Convert to an iterator that yields MatchData elements instead of Strings */ def matchData: Iterator[Match] = new AbstractIterator[Match] { def hasNext = self.hasNext - def next = { self.next; new Match(source, matcher, groupNames).force } + def next = { self.next(); new Match(source, matcher, groupNames).force } } /** Convert to an iterator that yields MatchData elements instead of Strings and has replacement support */ private[matching] def replacementData = new AbstractIterator[Match] with Replacement { def matcher = self.matcher def hasNext = self.hasNext - def next = { self.next; new Match(source, matcher, groupNames).force } + def next = { self.next(); new Match(source, matcher, groupNames).force } } } diff --git a/src/library/scala/xml/PrettyPrinter.scala b/src/library/scala/xml/PrettyPrinter.scala index f9157802c6..98807a40a4 100755 --- a/src/library/scala/xml/PrettyPrinter.scala +++ b/src/library/scala/xml/PrettyPrinter.scala @@ -141,7 +141,7 @@ class PrettyPrinter(width: Int, step: Int) { case Text(s) if s.trim() == "" => ; case _:Atom[_] | _:Comment | _:EntityRef | _:ProcInstr => - makeBox( ind, node.toString.trim() ) + makeBox( ind, node.toString().trim() ) case g @ Group(xs) => traverse(xs.iterator, pscope, ind) case _ => diff --git a/src/library/scala/xml/Utility.scala b/src/library/scala/xml/Utility.scala index f3c162fcc8..06fd46701a 100755 --- a/src/library/scala/xml/Utility.scala +++ b/src/library/scala/xml/Utility.scala @@ -245,10 +245,10 @@ object Utility extends AnyRef with parsing.TokenTests { if (children.isEmpty) return else if (children forall isAtomAndNotText) { // add space val it = children.iterator - val f = it.next + val f = it.next() serialize(f, pscope, sb, stripComments, decodeEntities, preserveWhitespace, minimizeTags) while (it.hasNext) { - val x = it.next + val x = it.next() sb.append(' ') serialize(x, pscope, sb, stripComments, decodeEntities, preserveWhitespace, minimizeTags) } @@ -333,22 +333,22 @@ object Utility extends AnyRef with parsing.TokenTests { val it = value.iterator while (it.hasNext) { - var c = it.next + var c = it.next() // entity! flush buffer into text node if (c == '&') { - c = it.next + c = it.next() if (c == '#') { - c = it.next - val theChar = parseCharRef ({ ()=> c },{ () => c = it.next },{s => throw new RuntimeException(s)}, {s => throw new RuntimeException(s)}) + c = it.next() + val theChar = parseCharRef ({ ()=> c },{ () => c = it.next() },{s => throw new RuntimeException(s)}, {s => throw new RuntimeException(s)}) sb.append(theChar) } else { if (rfb eq null) rfb = new StringBuilder() rfb append c - c = it.next + c = it.next() while (c != ';') { rfb.append(c) - c = it.next + c = it.next() } val ref = rfb.toString() rfb.clear() diff --git a/src/library/scala/xml/dtd/ContentModelParser.scala b/src/library/scala/xml/dtd/ContentModelParser.scala index 6bc9c05832..ca84bcad70 100644 --- a/src/library/scala/xml/dtd/ContentModelParser.scala +++ b/src/library/scala/xml/dtd/ContentModelParser.scala @@ -26,14 +26,14 @@ object ContentModelParser extends Scanner { // a bit too permissive concerning # scala.sys.error("expected "+token2string(tok)+ ", got unexpected token:"+token2string(token)) } - nextToken + nextToken() } // s [ '+' | '*' | '?' ] def maybeSuffix(s: RegExp) = token match { - case STAR => nextToken; Star(s) - case PLUS => nextToken; Sequ(s, Star(s)) - case OPT => nextToken; Alt(Eps, s) + case STAR => nextToken(); Star(s) + case PLUS => nextToken(); Sequ(s, Star(s)) + case OPT => nextToken(); Alt(Eps, s) case _ => s } @@ -48,18 +48,18 @@ object ContentModelParser extends Scanner { // a bit too permissive concerning # } case LPAREN => - nextToken - sOpt + nextToken() + sOpt() if (token != TOKEN_PCDATA) ELEMENTS(regexp) else { - nextToken + nextToken() token match { case RPAREN => PCDATA case CHOICE => val res = MIXED(choiceRest(Eps)) - sOpt + sOpt() accept( RPAREN ) accept( STAR ) res @@ -72,7 +72,7 @@ object ContentModelParser extends Scanner { // a bit too permissive concerning # scala.sys.error("unexpected token:" + token2string(token) ) } // sopt ::= S? - def sOpt() = if( token == S ) nextToken + def sOpt() = if( token == S ) nextToken() // (' S? mixed ::= '#PCDATA' S? ')' // | '#PCDATA' (S? '|' S? atom)* S? ')*' @@ -80,9 +80,9 @@ object ContentModelParser extends Scanner { // a bit too permissive concerning # // '(' S? regexp ::= cp S? [seqRest|choiceRest] ')' [ '+' | '*' | '?' ] def regexp: RegExp = { val p = particle - sOpt + sOpt() maybeSuffix(token match { - case RPAREN => nextToken; p + case RPAREN => nextToken(); p case CHOICE => val q = choiceRest( p );accept( RPAREN ); q case COMMA => val q = seqRest( p ); accept( RPAREN ); q }) @@ -92,10 +92,10 @@ object ContentModelParser extends Scanner { // a bit too permissive concerning # def seqRest(p: RegExp) = { var k = List(p) while( token == COMMA ) { - nextToken - sOpt + nextToken() + sOpt() k = particle::k - sOpt + sOpt() } Sequ( k.reverse:_* ) } @@ -104,10 +104,10 @@ object ContentModelParser extends Scanner { // a bit too permissive concerning # def choiceRest( p:RegExp ) = { var k = List( p ) while( token == CHOICE ) { - nextToken - sOpt + nextToken() + sOpt() k = particle::k - sOpt + sOpt() } Alt( k.reverse:_* ) } @@ -115,14 +115,14 @@ object ContentModelParser extends Scanner { // a bit too permissive concerning # // particle ::= '(' S? regexp // | name [ '+' | '*' | '?' ] def particle = token match { - case LPAREN => nextToken; sOpt; regexp - case NAME => val a = Letter(ElemName(value)); nextToken; maybeSuffix(a) + case LPAREN => nextToken(); sOpt(); regexp + case NAME => val a = Letter(ElemName(value)); nextToken(); maybeSuffix(a) case _ => scala.sys.error("expected '(' or Name, got:"+token2string(token)) } // atom ::= name def atom = token match { - case NAME => val a = Letter(ElemName(value)); nextToken; a + case NAME => val a = Letter(ElemName(value)); nextToken(); a case _ => scala.sys.error("expected Name, got:"+token2string(token)) } } diff --git a/src/library/scala/xml/dtd/DocType.scala b/src/library/scala/xml/dtd/DocType.scala index b2510baa18..af7e77e76f 100644 --- a/src/library/scala/xml/dtd/DocType.scala +++ b/src/library/scala/xml/dtd/DocType.scala @@ -28,7 +28,7 @@ case class DocType(name: String, extID: ExternalID, intSubset: Seq[dtd.Decl]) { if (intSubset.isEmpty) "" else intSubset.mkString("[", "", "]") - """""".format(name, extID.toString, intString) + """""".format(name, extID.toString(), intString) } } diff --git a/src/library/scala/xml/dtd/Scanner.scala b/src/library/scala/xml/dtd/Scanner.scala index d4d648c8df..53404e34a7 100644 --- a/src/library/scala/xml/dtd/Scanner.scala +++ b/src/library/scala/xml/dtd/Scanner.scala @@ -28,8 +28,8 @@ class Scanner extends Tokens with parsing.TokenTests { value = "" it = (s).iterator token = 1+END - next - nextToken + next() + nextToken() } /** scans the next token */ @@ -41,27 +41,27 @@ class Scanner extends Tokens with parsing.TokenTests { final def isIdentChar = ( ('a' <= c && c <= 'z') || ('A' <= c && c <= 'Z')) - final def next() = if (it.hasNext) c = it.next else c = ENDCH + final def next() = if (it.hasNext) c = it.next() else c = ENDCH final def acc(d: Char) { - if (c == d) next else scala.sys.error("expected '"+d+"' found '"+c+"' !") + if (c == d) next() else scala.sys.error("expected '"+d+"' found '"+c+"' !") } final def accS(ds: Seq[Char]) { ds foreach acc } final def readToken: Int = if (isSpace(c)) { - while (isSpace(c)) c = it.next + while (isSpace(c)) c = it.next() S } else c match { - case '(' => next; LPAREN - case ')' => next; RPAREN - case ',' => next; COMMA - case '*' => next; STAR - case '+' => next; PLUS - case '?' => next; OPT - case '|' => next; CHOICE - case '#' => next; accS( "PCDATA" ); TOKEN_PCDATA + case '(' => next(); LPAREN + case ')' => next(); RPAREN + case ',' => next(); COMMA + case '*' => next(); STAR + case '+' => next(); PLUS + case '?' => next(); OPT + case '|' => next(); CHOICE + case '#' => next(); accS( "PCDATA" ); TOKEN_PCDATA case ENDCH => END case _ => if (isNameStart(c)) name; // NAME @@ -70,7 +70,7 @@ class Scanner extends Tokens with parsing.TokenTests { final def name = { val sb = new StringBuilder() - do { sb.append(c); next } while (isNameChar(c)) + do { sb.append(c); next() } while (isNameChar(c)) value = sb.toString() NAME } diff --git a/src/library/scala/xml/dtd/impl/SubsetConstruction.scala b/src/library/scala/xml/dtd/impl/SubsetConstruction.scala index 8e4b5cc0f0..d1ea4b6e9e 100644 --- a/src/library/scala/xml/dtd/impl/SubsetConstruction.scala +++ b/src/library/scala/xml/dtd/impl/SubsetConstruction.scala @@ -50,7 +50,7 @@ private[dtd] class SubsetConstruction[T <: AnyRef](val nfa: NondetWordAutom[T]) addFinal(q0) // initial state may also be a final state while (!rest.isEmpty) { - val P = rest.pop + val P = rest.pop() // assign a number to this bitset indexMap = indexMap.updated(P, ix) invIndexMap = invIndexMap.updated(ix, P) diff --git a/src/library/scala/xml/factory/XMLLoader.scala b/src/library/scala/xml/factory/XMLLoader.scala index efa241e388..bd18f2a699 100644 --- a/src/library/scala/xml/factory/XMLLoader.scala +++ b/src/library/scala/xml/factory/XMLLoader.scala @@ -38,7 +38,7 @@ trait XMLLoader[T <: Node] newAdapter.scopeStack push TopScope parser.parse(source, newAdapter) - newAdapter.scopeStack.pop + newAdapter.scopeStack.pop() newAdapter.rootElem.asInstanceOf[T] } diff --git a/src/library/scala/xml/parsing/FactoryAdapter.scala b/src/library/scala/xml/parsing/FactoryAdapter.scala index 5f776f5299..8659d3f0c4 100644 --- a/src/library/scala/xml/parsing/FactoryAdapter.scala +++ b/src/library/scala/xml/parsing/FactoryAdapter.scala @@ -26,7 +26,7 @@ trait ConsoleErrorHandler extends DefaultHandler { val s = "[%s]:%d:%d: %s".format( errtype, ex.getLineNumber, ex.getColumnNumber, ex.getMessage) Console.println(s) - Console.flush + Console.flush() } } @@ -91,7 +91,7 @@ abstract class FactoryAdapter extends DefaultHandler with factory.XMLLoader[Node else { var it = ch.slice(offset, offset + length).iterator while (it.hasNext) { - val c = it.next + val c = it.next() val isSpace = c.isWhitespace buffer append (if (isSpace) ' ' else c) if (isSpace) @@ -164,17 +164,17 @@ abstract class FactoryAdapter extends DefaultHandler with factory.XMLLoader[Node */ override def endElement(uri: String , _localName: String, qname: String): Unit = { captureText() - val metaData = attribStack.pop + val metaData = attribStack.pop() // reverse order to get it right val v = (Iterator continually hStack.pop takeWhile (_ != null)).toList.reverse val (pre, localName) = splitName(qname) - val scp = scopeStack.pop + val scp = scopeStack.pop() // create element rootElem = createNode(pre, localName, metaData, scp, v) hStack push rootElem - curTag = tagStack.pop + curTag = tagStack.pop() capture = curTag != null && nodeContainsText(curTag) // root level } diff --git a/src/library/scala/xml/parsing/MarkupParser.scala b/src/library/scala/xml/parsing/MarkupParser.scala index 228043e183..8129165b1b 100755 --- a/src/library/scala/xml/parsing/MarkupParser.scala +++ b/src/library/scala/xml/parsing/MarkupParser.scala @@ -102,7 +102,7 @@ trait MarkupParser extends MarkupParserCommon with TokenTests def ch: Char = { if (nextChNeeded) { if (curInput.hasNext) { - lastChRead = curInput.next + lastChRead = curInput.next() pos = curInput.pos } else { val ilen = inpStack.length @@ -138,7 +138,7 @@ trait MarkupParser extends MarkupParserCommon with TokenTests * }}} */ def xmlProcInstr(): MetaData = { xToken("xml") - xSpace + xSpace() val (md,scp) = xAttributes(TopScope) if (scp != TopScope) reportSyntaxError("no xmlns definitions here, please.") @@ -158,7 +158,7 @@ trait MarkupParser extends MarkupParserCommon with TokenTests var n = 0 if (isProlog) - xSpaceOpt + xSpaceOpt() m("version") match { case null => @@ -223,10 +223,10 @@ trait MarkupParser extends MarkupParserCommon with TokenTests return null } - nextch // is prolog ? + nextch() // is prolog ? var children: NodeSeq = null if ('?' == ch) { - nextch + nextch() info_prolog = prolog() doc.version = info_prolog._1 doc.encoding = info_prolog._2 @@ -272,7 +272,7 @@ trait MarkupParser extends MarkupParserCommon with TokenTests * after construction, this method formalizes that suboptimal reality. */ def initialize: this.type = { - nextch + nextch() this } @@ -304,7 +304,7 @@ trait MarkupParser extends MarkupParserCommon with TokenTests var aMap: MetaData = Null while (isNameStart(ch)) { val qname = xName - xEQ // side effect + xEQ() // side effect val value = xAttributeValue() Utility.prefix(qname) match { @@ -324,7 +324,7 @@ trait MarkupParser extends MarkupParserCommon with TokenTests } if ((ch != '/') && (ch != '>') && ('?' != ch)) - xSpace + xSpace() } if(!aMap.wellformed(scope)) @@ -341,12 +341,12 @@ trait MarkupParser extends MarkupParserCommon with TokenTests */ def xEntityValue(): String = { val endch = ch - nextch + nextch() while (ch != endch && !eof) { putChar(ch) - nextch + nextch() } - nextch + nextch() val str = cbuf.toString() cbuf.length = 0 str @@ -375,13 +375,13 @@ trait MarkupParser extends MarkupParserCommon with TokenTests val sb: StringBuilder = new StringBuilder() xToken("--") while (true) { - if (ch == '-' && { sb.append(ch); nextch; ch == '-' }) { + if (ch == '-' && { sb.append(ch); nextch(); ch == '-' }) { sb.length = sb.length - 1 - nextch + nextch() xToken('>') return handle.comment(pos, sb.toString()) } else sb.append(ch) - nextch + nextch() } throw FatalError("this cannot happen") } @@ -402,7 +402,7 @@ trait MarkupParser extends MarkupParserCommon with TokenTests def content1(pscope: NamespaceBinding, ts: NodeBuffer) { ch match { case '!' => - nextch + nextch() if ('[' == ch) // CDATA ts &+ xCharData else if ('D' == ch) // doctypedecl, parse DTD // @todo REMOVE HACK @@ -410,7 +410,7 @@ trait MarkupParser extends MarkupParserCommon with TokenTests else // comment ts &+ xComment case '?' => // PI - nextch + nextch() ts &+ xProcInstr case _ => ts &+ element1(pscope) // child @@ -435,17 +435,17 @@ trait MarkupParser extends MarkupParserCommon with TokenTests ch match { case '<' => // another tag - nextch; ch match { + nextch(); ch match { case '/' => exit = true // end tag case _ => content1(pscope, ts) } // postcond: xEmbeddedBlock == false! case '&' => // EntityRef or CharRef - nextch; ch match { + nextch(); ch match { case '#' => // CharacterRef - nextch - val theChar = handle.text(tmppos, xCharRef(() => ch, () => nextch)) + nextch() + val theChar = handle.text(tmppos, xCharRef(() => ch, () => nextch())) xToken(';') ts &+ theChar case _ => // EntityRef @@ -470,16 +470,16 @@ trait MarkupParser extends MarkupParserCommon with TokenTests * }}} */ def externalID(): ExternalID = ch match { case 'S' => - nextch + nextch() xToken("YSTEM") - xSpace + xSpace() val sysID = systemLiteral() new SystemID(sysID) case 'P' => - nextch; xToken("UBLIC") - xSpace + nextch(); xToken("UBLIC") + xSpace() val pubID = pubidLiteral() - xSpace + xSpace() val sysID = systemLiteral() new PublicID(pubID, sysID) } @@ -495,13 +495,13 @@ trait MarkupParser extends MarkupParserCommon with TokenTests if (this.dtd ne null) reportSyntaxError("unexpected character (DOCTYPE already defined") xToken("DOCTYPE") - xSpace + xSpace() val n = xName - xSpace + xSpace() //external ID if ('S' == ch || 'P' == ch) { extID = externalID() - xSpaceOpt + xSpaceOpt() } /* parse external subset of DTD @@ -518,12 +518,12 @@ trait MarkupParser extends MarkupParserCommon with TokenTests } if ('[' == ch) { // internal subset - nextch + nextch() /* TODO */ intSubset() // TODO: do the DTD parsing?? ?!?!?!?!! xToken(']') - xSpaceOpt + xSpaceOpt() } xToken('>') this.dtd = new DTD { @@ -580,7 +580,7 @@ trait MarkupParser extends MarkupParserCommon with TokenTests var exit = false while (! exit) { putChar(ch) - nextch + nextch() exit = eof || ( ch == '<' ) || ( ch == '&' ) } @@ -598,12 +598,12 @@ trait MarkupParser extends MarkupParserCommon with TokenTests val endch = ch if (ch != '\'' && ch != '"') reportSyntaxError("quote ' or \" expected") - nextch + nextch() while (ch != endch && !eof) { putChar(ch) - nextch + nextch() } - nextch + nextch() val str = cbuf.toString() cbuf.length = 0 str @@ -616,15 +616,15 @@ trait MarkupParser extends MarkupParserCommon with TokenTests val endch = ch if (ch!='\'' && ch != '"') reportSyntaxError("quote ' or \" expected") - nextch + nextch() while (ch != endch && !eof) { putChar(ch) //println("hello '"+ch+"'"+isPubIDChar(ch)) if (!isPubIDChar(ch)) reportSyntaxError("char '"+ch+"' is not allowed in public id") - nextch + nextch() } - nextch + nextch() val str = cbuf.toString cbuf.length = 0 str @@ -637,9 +637,9 @@ trait MarkupParser extends MarkupParserCommon with TokenTests def extSubset(): Unit = { var textdecl: (Option[String],Option[String]) = null if (ch == '<') { - nextch + nextch() if (ch == '?') { - nextch + nextch() textdecl = textDecl() } else markupDecl1() @@ -650,13 +650,13 @@ trait MarkupParser extends MarkupParserCommon with TokenTests def markupDecl1() = { def doInclude() = { - xToken('['); while(']' != ch) markupDecl(); nextch // ']' + xToken('['); while(']' != ch) markupDecl(); nextch() // ']' } def doIgnore() = { - xToken('['); while(']' != ch) nextch; nextch // ']' + xToken('['); while(']' != ch) nextch(); nextch() // ']' } if ('?' == ch) { - nextch + nextch() xProcInstr // simply ignore processing instructions! } else { xToken('!') @@ -665,35 +665,35 @@ trait MarkupParser extends MarkupParserCommon with TokenTests xComment // ignore comments case 'E' => - nextch + nextch() if ('L' == ch) { - nextch + nextch() elementDecl() } else entityDecl() case 'A' => - nextch + nextch() attrDecl() case 'N' => - nextch + nextch() notationDecl() case '[' if inpStack.length >= extIndex => - nextch - xSpaceOpt + nextch() + xSpaceOpt() ch match { case '%' => - nextch + nextch() val ent = xName xToken(';') - xSpaceOpt + xSpaceOpt() push(ent) - xSpaceOpt + xSpaceOpt() val stmt = xName - xSpaceOpt + xSpaceOpt() stmt match { // parameter entity @@ -701,15 +701,15 @@ trait MarkupParser extends MarkupParserCommon with TokenTests case "IGNORE" => doIgnore() } case 'I' => - nextch + nextch() ch match { case 'G' => - nextch + nextch() xToken("NORE") - xSpaceOpt + xSpaceOpt() doIgnore() case 'N' => - nextch + nextch() xToken("NCLUDE") doInclude() } @@ -720,14 +720,14 @@ trait MarkupParser extends MarkupParserCommon with TokenTests case _ => curInput.reportError(pos, "unexpected character '"+ch+"', expected some markupdecl") while (ch!='>') - nextch + nextch() } } } def markupDecl(): Unit = ch match { case '%' => // parameter entity reference - nextch + nextch() val ent = xName xToken(';') if (!isValidating) @@ -737,20 +737,20 @@ trait MarkupParser extends MarkupParserCommon with TokenTests //peReference case '<' => - nextch + nextch() markupDecl1() case _ if isSpace(ch) => - xSpace + xSpace() case _ => reportSyntaxError("markupdecl: unexpected character '"+ch+"' #" + ch.toInt) - nextch + nextch() } /** "rec-xml/#ExtSubset" pe references may not occur within markup declarations */ def intSubset() { //Console.println("(DEBUG) intSubset()") - xSpace + xSpace() while (']' != ch) markupDecl() } @@ -759,16 +759,16 @@ trait MarkupParser extends MarkupParserCommon with TokenTests */ def elementDecl() { xToken("EMENT") - xSpace + xSpace() val n = xName - xSpace + xSpace() while ('>' != ch) { //Console.println("["+ch+"]") putChar(ch) - nextch + nextch() } //Console.println("END["+ch+"]") - nextch + nextch() val cmstr = cbuf.toString() cbuf.length = 0 handle.elemDecl(n, cmstr) @@ -779,20 +779,20 @@ trait MarkupParser extends MarkupParserCommon with TokenTests * }}} */ def attrDecl() = { xToken("TTLIST") - xSpace + xSpace() val n = xName - xSpace + xSpace() var attList: List[AttrDecl] = Nil // later: find the elemDecl for n while ('>' != ch) { val aname = xName - xSpace + xSpace() // could be enumeration (foo,bar) parse this later :-/ while ('"' != ch && '\'' != ch && '#' != ch && '<' != ch) { if (!isSpace(ch)) cbuf.append(ch) - nextch + nextch() } val atpe = cbuf.toString cbuf.length = 0 @@ -802,21 +802,21 @@ trait MarkupParser extends MarkupParserCommon with TokenTests DEFAULT(false, xAttributeValue()) case '#' => - nextch + nextch() xName match { - case "FIXED" => xSpace ; DEFAULT(true, xAttributeValue()) + case "FIXED" => xSpace() ; DEFAULT(true, xAttributeValue()) case "IMPLIED" => IMPLIED case "REQUIRED" => REQUIRED } case _ => null } - xSpaceOpt + xSpaceOpt() attList ::= AttrDecl(aname, atpe, defdecl) cbuf.length = 0 } - nextch + nextch() handle.attListDecl(n, attList.reverse) } @@ -826,39 +826,39 @@ trait MarkupParser extends MarkupParserCommon with TokenTests def entityDecl() = { var isParameterEntity = false xToken("NTITY") - xSpace + xSpace() if ('%' == ch) { - nextch + nextch() isParameterEntity = true - xSpace + xSpace() } val n = xName - xSpace + xSpace() ch match { case 'S' | 'P' => //sy val extID = externalID() if (isParameterEntity) { - xSpaceOpt + xSpaceOpt() xToken('>') handle.parameterEntityDecl(n, ExtDef(extID)) } else { // notation? - xSpace + xSpace() if ('>' != ch) { xToken("NDATA") - xSpace + xSpace() val notat = xName - xSpaceOpt + xSpaceOpt() xToken('>') handle.unparsedEntityDecl(n, extID, notat) } else { - nextch + nextch() handle.parsedEntityDecl(n, ExtDef(extID)) } } case '"' | '\'' => val av = xEntityValue() - xSpaceOpt + xSpaceOpt() xToken('>') if (isParameterEntity) handle.parameterEntityDecl(n, IntDef(av)) @@ -873,19 +873,19 @@ trait MarkupParser extends MarkupParserCommon with TokenTests * }}} */ def notationDecl() { xToken("OTATION") - xSpace + xSpace() val notat = xName - xSpace + xSpace() val extID = if (ch == 'S') { externalID() } else if (ch == 'P') { /** PublicID (without system, only used in NOTATION) */ - nextch + nextch() xToken("UBLIC") - xSpace + xSpace() val pubID = pubidLiteral() - xSpaceOpt + xSpaceOpt() val sysID = if (ch != '>') systemLiteral() else @@ -895,7 +895,7 @@ trait MarkupParser extends MarkupParserCommon with TokenTests reportSyntaxError("PUBLIC or SYSTEM expected") scala.sys.error("died parsing notationdecl") } - xSpaceOpt + xSpaceOpt() xToken('>') handle.notationDecl(notat, extID) } @@ -912,7 +912,7 @@ trait MarkupParser extends MarkupParserCommon with TokenTests ch curInput = replacementText(entityName) - nextch + nextch() } def pushExternal(systemId: String) { @@ -923,7 +923,7 @@ trait MarkupParser extends MarkupParserCommon with TokenTests ch curInput = externalSource(systemId) - nextch + nextch() } def pop() { diff --git a/src/library/scala/xml/parsing/MarkupParserCommon.scala b/src/library/scala/xml/parsing/MarkupParserCommon.scala index 43ec539931..7bfbcc7fff 100644 --- a/src/library/scala/xml/parsing/MarkupParserCommon.scala +++ b/src/library/scala/xml/parsing/MarkupParserCommon.scala @@ -38,7 +38,7 @@ private[scala] trait MarkupParserCommon extends TokenTests { */ protected def xTag(pscope: NamespaceType): (String, AttributesType) = { val name = xName - xSpaceOpt + xSpaceOpt() (name, mkAttributes(name, pscope)) } @@ -49,7 +49,7 @@ private[scala] trait MarkupParserCommon extends TokenTests { */ def xProcInstr: ElementType = { val n = xName - xSpaceOpt + xSpaceOpt() xTakeUntil(mkProcInstr(_, n, _), () => tmppos, "?>") } @@ -77,7 +77,7 @@ private[scala] trait MarkupParserCommon extends TokenTests { private def takeUntilChar(it: Iterator[Char], end: Char): String = { val buf = new StringBuilder - while (it.hasNext) it.next match { + while (it.hasNext) it.next() match { case `end` => return buf.toString case ch => buf append ch } @@ -91,7 +91,7 @@ private[scala] trait MarkupParserCommon extends TokenTests { if (xName != startName) errorNoEnd(startName) - xSpaceOpt + xSpaceOpt() xToken('>') } @@ -138,9 +138,9 @@ private[scala] trait MarkupParserCommon extends TokenTests { val buf = new StringBuilder val it = attval.iterator.buffered - while (it.hasNext) buf append (it.next match { + while (it.hasNext) buf append (it.next() match { case ' ' | '\t' | '\n' | '\r' => " " - case '&' if it.head == '#' => it.next ; xCharRef(it) + case '&' if it.head == '#' => it.next() ; xCharRef(it) case '&' => attr_unescape(takeUntilChar(it, ';')) case c => c }) @@ -157,11 +157,11 @@ private[scala] trait MarkupParserCommon extends TokenTests { Utility.parseCharRef(ch, nextch, reportSyntaxError _, truncatedError _) def xCharRef(it: Iterator[Char]): String = { - var c = it.next - Utility.parseCharRef(() => c, () => { c = it.next }, reportSyntaxError _, truncatedError _) + var c = it.next() + Utility.parseCharRef(() => c, () => { c = it.next() }, reportSyntaxError _, truncatedError _) } - def xCharRef: String = xCharRef(() => ch, () => nextch) + def xCharRef: String = xCharRef(() => ch, () => nextch()) /** Create a lookahead reader which does not influence the input */ def lookahead(): BufferedIterator[Char] @@ -194,20 +194,20 @@ private[scala] trait MarkupParserCommon extends TokenTests { } def xToken(that: Char) { - if (ch == that) nextch + if (ch == that) nextch() else xHandleError(that, "'%s' expected instead of '%s'".format(that, ch)) } def xToken(that: Seq[Char]) { that foreach xToken } /** scan [S] '=' [S]*/ - def xEQ() = { xSpaceOpt; xToken('='); xSpaceOpt } + def xEQ() = { xSpaceOpt(); xToken('='); xSpaceOpt() } /** skip optional space S? */ - def xSpaceOpt() = while (isSpace(ch) && !eof) nextch + def xSpaceOpt() = while (isSpace(ch) && !eof) nextch() /** scan [3] S ::= (#x20 | #x9 | #xD | #xA)+ */ def xSpace() = - if (isSpace(ch)) { nextch; xSpaceOpt } + if (isSpace(ch)) { nextch(); xSpaceOpt() } else xHandleError(ch, "whitespace expected") /** Apply a function and return the passed value */ @@ -240,7 +240,7 @@ private[scala] trait MarkupParserCommon extends TokenTests { truncatedError("") // throws TruncatedXMLControl in compiler sb append ch - nextch + nextch() } unreachable } @@ -253,7 +253,7 @@ private[scala] trait MarkupParserCommon extends TokenTests { private def peek(lookingFor: String): Boolean = (lookahead() take lookingFor.length sameElements lookingFor.iterator) && { // drop the chars from the real reader (all lookahead + orig) - (0 to lookingFor.length) foreach (_ => nextch) + (0 to lookingFor.length) foreach (_ => nextch()) true } } diff --git a/src/library/scala/xml/parsing/XhtmlParser.scala b/src/library/scala/xml/parsing/XhtmlParser.scala index d08cb1fa9c..33b94c9bd7 100644 --- a/src/library/scala/xml/parsing/XhtmlParser.scala +++ b/src/library/scala/xml/parsing/XhtmlParser.scala @@ -26,5 +26,5 @@ class XhtmlParser(val input: Source) extends ConstructingHandler with MarkupPars * @author Burak Emir */ object XhtmlParser { - def apply(source: Source): NodeSeq = new XhtmlParser(source).initialize.document + def apply(source: Source): NodeSeq = new XhtmlParser(source).initialize.document() } diff --git a/src/library/scala/xml/persistent/CachedFileStorage.scala b/src/library/scala/xml/persistent/CachedFileStorage.scala index 916a1a0cf7..c0fad30da6 100644 --- a/src/library/scala/xml/persistent/CachedFileStorage.scala +++ b/src/library/scala/xml/persistent/CachedFileStorage.scala @@ -76,8 +76,8 @@ abstract class CachedFileStorage(private val file1: File) extends Thread with Lo log("[load]\nloading "+theFile) val src = Source.fromFile(theFile) log("parsing "+theFile) - val res = ConstructingParser.fromSource(src,false).document.docElem(0) - switch + val res = ConstructingParser.fromSource(src, false).document().docElem(0) + switch() log("[load done]") res.child.iterator } @@ -102,7 +102,7 @@ abstract class CachedFileStorage(private val file1: File) extends Thread with Lo c.close fos.close dirty = false - switch + switch() log("[save done]") } @@ -112,7 +112,7 @@ abstract class CachedFileStorage(private val file1: File) extends Thread with Lo log("[run]\nstarting storage thread, checking every "+interval+" ms") while (true) { Thread.sleep( this.interval ) - save + save() } } @@ -120,6 +120,6 @@ abstract class CachedFileStorage(private val file1: File) extends Thread with Lo * update. */ def flush() = { this.dirty = true - save + save() } } diff --git a/src/library/scala/xml/pull/XMLEventReader.scala b/src/library/scala/xml/pull/XMLEventReader.scala index 428c305055..3f9584fd04 100755 --- a/src/library/scala/xml/pull/XMLEventReader.scala +++ b/src/library/scala/xml/pull/XMLEventReader.scala @@ -139,10 +139,10 @@ trait ProducerConsumerIterator[T >: Null] extends Iterator[T] { def hasNext = !eos && (buffer != null || fillBuffer) def next() = { - if (eos) throw new NoSuchElementException("ProducerConsumerIterator") - if (buffer == null) fillBuffer + if (eos()) throw new NoSuchElementException("ProducerConsumerIterator") + if (buffer == null) fillBuffer() - drainBuffer + drainBuffer() } def available() = isElement(buffer) || isElement(queue.peek) diff --git a/src/reflect/scala/reflect/internal/Printers.scala b/src/reflect/scala/reflect/internal/Printers.scala index 9e72fb9145..55f7704056 100644 --- a/src/reflect/scala/reflect/internal/Printers.scala +++ b/src/reflect/scala/reflect/internal/Printers.scala @@ -91,8 +91,8 @@ trait Printers extends api.Printers { self: SymbolTable => } def printColumn(ts: List[Tree], start: String, sep: String, end: String) { - print(start); indent; println() - printSeq(ts){print(_)}{print(sep); println()}; undent; println(); print(end) + print(start); indent(); println() + printSeq(ts){print(_)}{print(sep); println()}; undent(); println(); print(end) } def printRow(ts: List[Tree], start: String, sep: String, end: String) { @@ -327,10 +327,10 @@ trait Printers extends api.Printers { self: SymbolTable => print(lhs, " = ", rhs) case If(cond, thenp, elsep) => - print("if (", cond, ")"); indent; println() - print(thenp); undent + print("if (", cond, ")"); indent(); println() + print(thenp); undent() if (!elsep.isEmpty) { - println(); print("else"); indent; println(); print(elsep); undent + println(); print("else"); indent(); println(); print(elsep); undent() } case Return(expr) => @@ -652,7 +652,7 @@ trait Printers extends api.Printers { self: SymbolTable => print("(") val it = iterable.iterator while (it.hasNext) { - body(it.next) + body(it.next()) print(if (it.hasNext) ", " else "") } print(")") diff --git a/src/reflect/scala/reflect/internal/TreeInfo.scala b/src/reflect/scala/reflect/internal/TreeInfo.scala index 1edfa84c04..c7d2fa42d3 100644 --- a/src/reflect/scala/reflect/internal/TreeInfo.scala +++ b/src/reflect/scala/reflect/internal/TreeInfo.scala @@ -140,7 +140,7 @@ abstract class TreeInfo { def mapMethodParamsAndArgs[R](params: List[Symbol], args: List[Tree])(f: (Symbol, Tree) => R): List[R] = { val b = List.newBuilder[R] foreachMethodParamAndArg(params, args)((param, arg) => b += f(param, arg)) - b.result + b.result() } def foreachMethodParamAndArg(params: List[Symbol], args: List[Tree])(f: (Symbol, Tree) => Unit): Boolean = { val plen = params.length @@ -154,21 +154,21 @@ abstract class TreeInfo { } if (plen == alen) foreach2(params, args)(f) - else if (params.isEmpty) return fail + else if (params.isEmpty) return fail() else if (isVarArgsList(params)) { val plenInit = plen - 1 if (alen == plenInit) { if (alen == 0) Nil // avoid calling mismatched zip else foreach2(params.init, args)(f) } - else if (alen < plenInit) return fail + else if (alen < plenInit) return fail() else { foreach2(params.init, args take plenInit)(f) val remainingArgs = args drop plenInit foreach2(List.fill(remainingArgs.size)(params.last), remainingArgs)(f) } } - else return fail + else return fail() true } diff --git a/src/reflect/scala/reflect/internal/Types.scala b/src/reflect/scala/reflect/internal/Types.scala index 09f78d1d5b..361c009350 100644 --- a/src/reflect/scala/reflect/internal/Types.scala +++ b/src/reflect/scala/reflect/internal/Types.scala @@ -2355,7 +2355,7 @@ trait Types extends api.Types { self: SymbolTable => h = mix(h, pre.hashCode) h = mix(h, sym.hashCode) if (hasArgs) - finalizeHash(mix(h, args.hashCode), 3) + finalizeHash(mix(h, args.hashCode()), 3) else finalizeHash(h, 2) } diff --git a/src/reflect/scala/reflect/internal/util/Collections.scala b/src/reflect/scala/reflect/internal/util/Collections.scala index 0d644aa73e..63b7f73386 100644 --- a/src/reflect/scala/reflect/internal/util/Collections.scala +++ b/src/reflect/scala/reflect/internal/util/Collections.scala @@ -126,7 +126,7 @@ trait Collections { ys1 = ys1.tail ys2 = ys2.tail } - buf.result + buf.result() } final def foreach2[A, B](xs1: List[A], xs2: List[B])(f: (A, B) => Unit): Unit = { var ys1 = xs1 diff --git a/src/reflect/scala/reflect/io/PlainFile.scala b/src/reflect/scala/reflect/io/PlainFile.scala index 0d4d55bdec..31df78f995 100644 --- a/src/reflect/scala/reflect/io/PlainFile.scala +++ b/src/reflect/scala/reflect/io/PlainFile.scala @@ -42,7 +42,7 @@ class PlainFile(val givenPath: Path) extends AbstractFile { override def sizeOption = Some(givenPath.length.toInt) override def toString = path - override def hashCode(): Int = fpath.hashCode + override def hashCode(): Int = fpath.hashCode() override def equals(that: Any): Boolean = that match { case x: PlainFile => fpath == x.fpath case _ => false diff --git a/src/reflect/scala/reflect/io/Streamable.scala b/src/reflect/scala/reflect/io/Streamable.scala index b45cffb150..6184c6776a 100644 --- a/src/reflect/scala/reflect/io/Streamable.scala +++ b/src/reflect/scala/reflect/io/Streamable.scala @@ -88,7 +88,7 @@ object Streamable { /** Obtains an InputStreamReader wrapped around a FileInputStream. */ - def reader(codec: Codec): InputStreamReader = new InputStreamReader(inputStream, codec.charSet) + def reader(codec: Codec): InputStreamReader = new InputStreamReader(inputStream(), codec.charSet) /** Wraps a BufferedReader around the result of reader(). */ @@ -115,7 +115,9 @@ object Streamable { finally stream.close() def bytes(is: => InputStream): Array[Byte] = - (new Bytes { def inputStream() = is }).toByteArray + (new Bytes { + def inputStream() = is + }).toByteArray() def slurp(is: => InputStream)(implicit codec: Codec): String = new Chars { def inputStream() = is } slurp codec diff --git a/src/reflect/scala/reflect/io/VirtualDirectory.scala b/src/reflect/scala/reflect/io/VirtualDirectory.scala index 09b99087e6..ae0dd2032c 100644 --- a/src/reflect/scala/reflect/io/VirtualDirectory.scala +++ b/src/reflect/scala/reflect/io/VirtualDirectory.scala @@ -34,15 +34,15 @@ extends AbstractFile { override def output = sys.error("directories cannot be written") /** Does this abstract file denote an existing file? */ - def create() { unsupported } + def create() { unsupported() } /** Delete the underlying file or directory (recursively). */ - def delete() { unsupported } + def delete() { unsupported() } /** Returns an abstract file with the given name. It does not * check that it exists. */ - def lookupNameUnchecked(name: String, directory: Boolean): AbstractFile = unsupported + def lookupNameUnchecked(name: String, directory: Boolean): AbstractFile = unsupported() private val files = mutable.Map.empty[String, AbstractFile] diff --git a/src/reflect/scala/reflect/io/VirtualFile.scala b/src/reflect/scala/reflect/io/VirtualFile.scala index 6f98b8385b..b28ad9f340 100644 --- a/src/reflect/scala/reflect/io/VirtualFile.scala +++ b/src/reflect/scala/reflect/io/VirtualFile.scala @@ -71,10 +71,10 @@ class VirtualFile(val name: String, override val path: String) extends AbstractF } /** Does this abstract file denote an existing file? */ - def create() { unsupported } + def create() { unsupported() } /** Delete the underlying file or directory (recursively). */ - def delete() { unsupported } + def delete() { unsupported() } /** * Returns the abstract file in this abstract directory with the @@ -90,5 +90,5 @@ class VirtualFile(val name: String, override val path: String) extends AbstractF /** Returns an abstract file with the given name. It does not * check that it exists. */ - def lookupNameUnchecked(name: String, directory: Boolean) = unsupported + def lookupNameUnchecked(name: String, directory: Boolean) = unsupported() } diff --git a/src/reflect/scala/reflect/io/ZipArchive.scala b/src/reflect/scala/reflect/io/ZipArchive.scala index 097d3cb71c..78fc8d9cc8 100644 --- a/src/reflect/scala/reflect/io/ZipArchive.scala +++ b/src/reflect/scala/reflect/io/ZipArchive.scala @@ -61,13 +61,13 @@ abstract class ZipArchive(override val file: JFile) extends AbstractFile with Eq override def underlyingSource = Some(this) def isDirectory = true - def lookupName(name: String, directory: Boolean) = unsupported - def lookupNameUnchecked(name: String, directory: Boolean) = unsupported - def create() = unsupported - def delete() = unsupported - def output = unsupported - def container = unsupported - def absolute = unsupported + def lookupName(name: String, directory: Boolean) = unsupported() + def lookupNameUnchecked(name: String, directory: Boolean) = unsupported() + def create() = unsupported() + def delete() = unsupported() + def output = unsupported() + def container = unsupported() + def absolute = unsupported() private def walkIterator(its: Iterator[AbstractFile]): Iterator[AbstractFile] = { its flatMap { f => -- cgit v1.2.3 From 54065a7b1fa632b1e74e8e944c1cd39b23ea9035 Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Sun, 24 Feb 2013 23:59:17 +0100 Subject: Fix two malformed format strings. --- src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala | 2 +- src/compiler/scala/tools/nsc/typechecker/Macros.scala | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) (limited to 'src') diff --git a/src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala b/src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala index 8440a6cb49..b50e32899f 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala @@ -1437,7 +1437,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM { assert(enclClass.isClass, enclClass) val sym = enclClass.primaryConstructor if (sym == NoSymbol) { - log("Ran out of room looking for an enclosing method for %s: no constructor here.".format(enclClass, clazz)) + log("Ran out of room looking for an enclosing method for %s: no constructor here.".format(enclClass)) } else { debuglog("enclosing method for %s is %s (in %s)".format(clazz, sym, enclClass)) res = EnclMethodEntry(javaName(enclClass), javaName(sym), javaType(sym)) diff --git a/src/compiler/scala/tools/nsc/typechecker/Macros.scala b/src/compiler/scala/tools/nsc/typechecker/Macros.scala index 776efd6367..ca257a786c 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Macros.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Macros.scala @@ -481,7 +481,7 @@ trait Macros extends scala.tools.reflect.FastTrack with Traces { // a heuristic to detect the REPL if (global.settings.exposeEmptyPackage.value) { - macroLogVerbose("macro classloader: initializing from a REPL classloader".format(global.classPath.asURLs)) + macroLogVerbose("macro classloader: initializing from a REPL classloader: %s".format(global.classPath.asURLs)) import scala.tools.nsc.interpreter._ val virtualDirectory = global.settings.outputDirs.getSingleOutput.get new AbstractFileClassLoader(virtualDirectory, loader) {} -- cgit v1.2.3 From ee033022bc4db978796321b3fcf1d81377de2974 Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Mon, 25 Feb 2013 00:07:38 +0100 Subject: Remove redundant 'val' from case class params. --- src/compiler/scala/tools/cmd/gen/AnyVals.scala | 2 +- .../scala/tools/nsc/backend/icode/TypeKinds.scala | 2 +- .../scala/tools/nsc/interactive/CompilerControl.scala | 18 +++++++++--------- .../tools/nsc/interactive/tests/core/TestMarker.scala | 2 +- .../scala/tools/nsc/interpreter/LoopCommands.scala | 2 +- .../scala/tools/nsc/transform/patmat/Logic.scala | 2 +- src/compiler/scala/tools/nsc/typechecker/Macros.scala | 8 ++++---- src/compiler/scala/tools/reflect/FrontEnd.scala | 2 +- src/compiler/scala/tools/reflect/ToolBox.scala | 2 +- src/library/scala/collection/convert/Wrappers.scala | 6 +++--- src/library/scala/collection/mutable/AVLTree.scala | 2 +- src/library/scala/collection/parallel/package.scala | 2 +- src/library/scala/util/Try.scala | 2 +- src/library/scala/xml/Group.scala | 2 +- src/reflect/scala/reflect/api/Printers.scala | 2 +- src/reflect/scala/reflect/internal/Types.scala | 2 +- src/reflect/scala/reflect/macros/Parsers.scala | 2 +- src/reflect/scala/reflect/macros/Reifiers.scala | 4 ++-- src/reflect/scala/reflect/macros/Typers.scala | 2 +- 19 files changed, 33 insertions(+), 33 deletions(-) (limited to 'src') diff --git a/src/compiler/scala/tools/cmd/gen/AnyVals.scala b/src/compiler/scala/tools/cmd/gen/AnyVals.scala index dbd2195938..35d4eaf1b6 100644 --- a/src/compiler/scala/tools/cmd/gen/AnyVals.scala +++ b/src/compiler/scala/tools/cmd/gen/AnyVals.scala @@ -13,7 +13,7 @@ trait AnyValReps { sealed abstract class AnyValNum(name: String, repr: Option[String], javaEquiv: String) extends AnyValRep(name,repr,javaEquiv) { - case class Op(val op : String, val doc : String) + case class Op(op : String, doc : String) private def companionCoercions(tos: AnyValRep*) = { tos.toList map (to => diff --git a/src/compiler/scala/tools/nsc/backend/icode/TypeKinds.scala b/src/compiler/scala/tools/nsc/backend/icode/TypeKinds.scala index a32b00f385..6a392449e0 100644 --- a/src/compiler/scala/tools/nsc/backend/icode/TypeKinds.scala +++ b/src/compiler/scala/tools/nsc/backend/icode/TypeKinds.scala @@ -294,7 +294,7 @@ trait TypeKinds { self: ICodes => else ARRAY(ArrayN(elem, dims - 1)) } - final case class ARRAY(val elem: TypeKind) extends TypeKind { + final case class ARRAY(elem: TypeKind) extends TypeKind { override def toString = "ARRAY[" + elem + "]" override def isArrayType = true override def dimensions = 1 + elem.dimensions diff --git a/src/compiler/scala/tools/nsc/interactive/CompilerControl.scala b/src/compiler/scala/tools/nsc/interactive/CompilerControl.scala index f10c00b8e0..c779403fad 100644 --- a/src/compiler/scala/tools/nsc/interactive/CompilerControl.scala +++ b/src/compiler/scala/tools/nsc/interactive/CompilerControl.scala @@ -337,7 +337,7 @@ trait CompilerControl { self: Global => response raise new MissingResponse } - case class AskTypeAtItem(val pos: Position, response: Response[Tree]) extends WorkItem { + case class AskTypeAtItem(pos: Position, response: Response[Tree]) extends WorkItem { def apply() = self.getTypedTreeAt(pos, response) override def toString = "typeat "+pos.source+" "+pos.show @@ -345,7 +345,7 @@ trait CompilerControl { self: Global => response raise new MissingResponse } - case class AskTypeItem(val source: SourceFile, val forceReload: Boolean, response: Response[Tree]) extends WorkItem { + case class AskTypeItem(source: SourceFile, forceReload: Boolean, response: Response[Tree]) extends WorkItem { def apply() = self.getTypedTree(source, forceReload, response) override def toString = "typecheck" @@ -353,7 +353,7 @@ trait CompilerControl { self: Global => response raise new MissingResponse } - case class AskTypeCompletionItem(val pos: Position, response: Response[List[Member]]) extends WorkItem { + case class AskTypeCompletionItem(pos: Position, response: Response[List[Member]]) extends WorkItem { def apply() = self.getTypeCompletion(pos, response) override def toString = "type completion "+pos.source+" "+pos.show @@ -361,7 +361,7 @@ trait CompilerControl { self: Global => response raise new MissingResponse } - case class AskScopeCompletionItem(val pos: Position, response: Response[List[Member]]) extends WorkItem { + case class AskScopeCompletionItem(pos: Position, response: Response[List[Member]]) extends WorkItem { def apply() = self.getScopeCompletion(pos, response) override def toString = "scope completion "+pos.source+" "+pos.show @@ -379,7 +379,7 @@ trait CompilerControl { self: Global => def raiseMissing() = () } - case class AskLinkPosItem(val sym: Symbol, val source: SourceFile, response: Response[Position]) extends WorkItem { + case class AskLinkPosItem(sym: Symbol, source: SourceFile, response: Response[Position]) extends WorkItem { def apply() = self.getLinkPos(sym, source, response) override def toString = "linkpos "+sym+" in "+source @@ -387,7 +387,7 @@ trait CompilerControl { self: Global => response raise new MissingResponse } - case class AskDocCommentItem(val sym: Symbol, val site: Symbol, val source: SourceFile, response: Response[(String, String, Position)]) extends WorkItem { + case class AskDocCommentItem(sym: Symbol, site: Symbol, source: SourceFile, response: Response[(String, String, Position)]) extends WorkItem { def apply() = self.getDocComment(sym, site, source, response) override def toString = "doc comment "+sym+" in "+source @@ -395,7 +395,7 @@ trait CompilerControl { self: Global => response raise new MissingResponse } - case class AskLoadedTypedItem(val source: SourceFile, response: Response[Tree]) extends WorkItem { + case class AskLoadedTypedItem(source: SourceFile, response: Response[Tree]) extends WorkItem { def apply() = self.waitLoadedTyped(source, response, this.onCompilerThread) override def toString = "wait loaded & typed "+source @@ -403,7 +403,7 @@ trait CompilerControl { self: Global => response raise new MissingResponse } - case class AskParsedEnteredItem(val source: SourceFile, val keepLoaded: Boolean, response: Response[Tree]) extends WorkItem { + case class AskParsedEnteredItem(source: SourceFile, keepLoaded: Boolean, response: Response[Tree]) extends WorkItem { def apply() = self.getParsedEntered(source, keepLoaded, response, this.onCompilerThread) override def toString = "getParsedEntered "+source+", keepLoaded = "+keepLoaded @@ -412,7 +412,7 @@ trait CompilerControl { self: Global => } @deprecated("SI-6458: Instrumentation logic will be moved out of the compiler.","2.10.0") - case class AskInstrumentedItem(val source: SourceFile, line: Int, response: Response[(String, Array[Char])]) extends WorkItem { + case class AskInstrumentedItem(source: SourceFile, line: Int, response: Response[(String, Array[Char])]) extends WorkItem { def apply() = self.getInstrumented(source, line, response) override def toString = "getInstrumented "+source diff --git a/src/compiler/scala/tools/nsc/interactive/tests/core/TestMarker.scala b/src/compiler/scala/tools/nsc/interactive/tests/core/TestMarker.scala index ba1722382b..a5c228a549 100644 --- a/src/compiler/scala/tools/nsc/interactive/tests/core/TestMarker.scala +++ b/src/compiler/scala/tools/nsc/interactive/tests/core/TestMarker.scala @@ -16,7 +16,7 @@ object TestMarker { } } -abstract case class TestMarker(val marker: String) { +abstract case class TestMarker(marker: String) { TestMarker.checkForDuplicate(this) } diff --git a/src/compiler/scala/tools/nsc/interpreter/LoopCommands.scala b/src/compiler/scala/tools/nsc/interpreter/LoopCommands.scala index 39979c8fbe..0d11020752 100644 --- a/src/compiler/scala/tools/nsc/interpreter/LoopCommands.scala +++ b/src/compiler/scala/tools/nsc/interpreter/LoopCommands.scala @@ -68,7 +68,7 @@ trait LoopCommands { } // the result of a single command - case class Result(val keepRunning: Boolean, val lineToRecord: Option[String]) + case class Result(keepRunning: Boolean, lineToRecord: Option[String]) object Result { // the default result means "keep running, and don't record that line" diff --git a/src/compiler/scala/tools/nsc/transform/patmat/Logic.scala b/src/compiler/scala/tools/nsc/transform/patmat/Logic.scala index 3ef08e1a6d..499bf1b022 100644 --- a/src/compiler/scala/tools/nsc/transform/patmat/Logic.scala +++ b/src/compiler/scala/tools/nsc/transform/patmat/Logic.scala @@ -113,7 +113,7 @@ trait Logic extends Debugging { case object False extends Prop // symbols are propositions - abstract case class Sym(val variable: Var, val const: Const) extends Prop { + abstract case class Sym(variable: Var, const: Const) extends Prop { private[this] val id = Sym.nextSymId override def toString = variable +"="+ const +"#"+ id diff --git a/src/compiler/scala/tools/nsc/typechecker/Macros.scala b/src/compiler/scala/tools/nsc/typechecker/Macros.scala index ca257a786c..1693bdbc8c 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Macros.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Macros.scala @@ -70,19 +70,19 @@ trait Macros extends scala.tools.reflect.FastTrack with Traces { private case class MacroImplBinding( // Java class name of the class that contains the macro implementation // is used to load the corresponding object with Java reflection - val className: String, + className: String, // method name of the macro implementation // `className` and `methName` are all we need to reflectively invoke a macro implementation // because macro implementations cannot be overloaded - val methName: String, + methName: String, // flattens the macro impl's parameter lists having symbols replaced with metadata // currently metadata is an index of the type parameter corresponding to that type tag (if applicable) // f.ex. for: def impl[T: WeakTypeTag, U: WeakTypeTag, V](c: Context)(x: c.Expr[T]): (U, V) = ??? // `signature` will be equal to List(-1, -1, 0, 1) - val signature: List[Int], + signature: List[Int], // type arguments part of a macro impl ref (the right-hand side of a macro definition) // these trees don't refer to a macro impl, so we can pickle them as is - val targs: List[Tree]) + targs: List[Tree]) /** Macro def -> macro impl bindings are serialized into a `macroImpl` annotation * with synthetic content that carries the payload described in `MacroImplBinding`. diff --git a/src/compiler/scala/tools/reflect/FrontEnd.scala b/src/compiler/scala/tools/reflect/FrontEnd.scala index f0d3d5973d..e3341a451f 100644 --- a/src/compiler/scala/tools/reflect/FrontEnd.scala +++ b/src/compiler/scala/tools/reflect/FrontEnd.scala @@ -21,7 +21,7 @@ trait FrontEnd { def hasErrors = ERROR.count > 0 def hasWarnings = WARNING.count > 0 - case class Info(val pos: Position, val msg: String, val severity: Severity) + case class Info(pos: Position, msg: String, severity: Severity) val infos = new scala.collection.mutable.LinkedHashSet[Info] /** Handles incoming info */ diff --git a/src/compiler/scala/tools/reflect/ToolBox.scala b/src/compiler/scala/tools/reflect/ToolBox.scala index ab814b617d..be22003114 100644 --- a/src/compiler/scala/tools/reflect/ToolBox.scala +++ b/src/compiler/scala/tools/reflect/ToolBox.scala @@ -101,4 +101,4 @@ trait ToolBox[U <: scala.reflect.api.Universe] { /** Represents an error during toolboxing */ -case class ToolBoxError(val message: String, val cause: Throwable = null) extends Throwable(message, cause) +case class ToolBoxError(message: String, cause: Throwable = null) extends Throwable(message, cause) diff --git a/src/library/scala/collection/convert/Wrappers.scala b/src/library/scala/collection/convert/Wrappers.scala index b121f32ba6..69e9a8fff4 100644 --- a/src/library/scala/collection/convert/Wrappers.scala +++ b/src/library/scala/collection/convert/Wrappers.scala @@ -81,7 +81,7 @@ private[collection] trait Wrappers { override def remove(i: Int) = underlying remove i } - case class JListWrapper[A](val underlying: ju.List[A]) extends mutable.AbstractBuffer[A] with mutable.Buffer[A] { + case class JListWrapper[A](underlying: ju.List[A]) extends mutable.AbstractBuffer[A] with mutable.Buffer[A] { def length = underlying.size override def isEmpty = underlying.isEmpty override def iterator: Iterator[A] = underlying.iterator @@ -272,7 +272,7 @@ private[collection] trait Wrappers { override def empty: Repr = null.asInstanceOf[Repr] } - case class JMapWrapper[A, B](val underlying : ju.Map[A, B]) extends mutable.AbstractMap[A, B] with JMapWrapperLike[A, B, JMapWrapper[A, B]] { + case class JMapWrapper[A, B](underlying : ju.Map[A, B]) extends mutable.AbstractMap[A, B] with JMapWrapperLike[A, B, JMapWrapper[A, B]] { override def empty = JMapWrapper(new ju.HashMap[A, B]) } @@ -298,7 +298,7 @@ private[collection] trait Wrappers { def replace(k: A, oldval: B, newval: B) = underlying.replace(k, oldval, newval) } - case class JConcurrentMapWrapper[A, B](val underlying: juc.ConcurrentMap[A, B]) extends mutable.AbstractMap[A, B] with JMapWrapperLike[A, B, JConcurrentMapWrapper[A, B]] with concurrent.Map[A, B] { + case class JConcurrentMapWrapper[A, B](underlying: juc.ConcurrentMap[A, B]) extends mutable.AbstractMap[A, B] with JMapWrapperLike[A, B, JConcurrentMapWrapper[A, B]] with concurrent.Map[A, B] { override def get(k: A) = { val v = underlying get k if (v != null) Some(v) diff --git a/src/library/scala/collection/mutable/AVLTree.scala b/src/library/scala/collection/mutable/AVLTree.scala index 878ea94987..dd7a94d677 100644 --- a/src/library/scala/collection/mutable/AVLTree.scala +++ b/src/library/scala/collection/mutable/AVLTree.scala @@ -77,7 +77,7 @@ private case object Leaf extends AVLTree[Nothing] { /** * @deprecated("AVLTree and its related classes are being removed from the standard library since they're not different enough from RedBlackTree to justify keeping them.", "2.11") */ -private case class Node[A](val data: A, val left: AVLTree[A], val right: AVLTree[A]) extends AVLTree[A] { +private case class Node[A](data: A, left: AVLTree[A], right: AVLTree[A]) extends AVLTree[A] { override val balance: Int = right.depth - left.depth override val depth: Int = math.max(left.depth, right.depth) + 1 diff --git a/src/library/scala/collection/parallel/package.scala b/src/library/scala/collection/parallel/package.scala index 83aa99ad11..d91f70da75 100644 --- a/src/library/scala/collection/parallel/package.scala +++ b/src/library/scala/collection/parallel/package.scala @@ -139,7 +139,7 @@ package parallel { /** Composite throwable - thrown when multiple exceptions are thrown at the same time. */ final case class CompositeThrowable( - val throwables: Set[Throwable] + throwables: Set[Throwable] ) extends Exception( "Multiple exceptions thrown during a parallel computation: " + throwables.map(t => t + "\n" + t.getStackTrace.take(10).++("...").mkString("\n")).mkString("\n\n") diff --git a/src/library/scala/util/Try.scala b/src/library/scala/util/Try.scala index 7749543caa..fbfeb7d4d9 100644 --- a/src/library/scala/util/Try.scala +++ b/src/library/scala/util/Try.scala @@ -164,7 +164,7 @@ object Try { } -final case class Failure[+T](val exception: Throwable) extends Try[T] { +final case class Failure[+T](exception: Throwable) extends Try[T] { def isFailure: Boolean = true def isSuccess: Boolean = false def recoverWith[U >: T](f: PartialFunction[Throwable, Try[U]]): Try[U] = diff --git a/src/library/scala/xml/Group.scala b/src/library/scala/xml/Group.scala index 92da2f993f..2ee3941aa1 100644 --- a/src/library/scala/xml/Group.scala +++ b/src/library/scala/xml/Group.scala @@ -13,7 +13,7 @@ package scala.xml * @author Burak Emir * @version 1.0 */ -final case class Group(val nodes: Seq[Node]) extends Node { +final case class Group(nodes: Seq[Node]) extends Node { override def theSeq = nodes override def canEqual(other: Any) = other match { diff --git a/src/reflect/scala/reflect/api/Printers.scala b/src/reflect/scala/reflect/api/Printers.scala index 651eaa3333..d9e05e77c1 100644 --- a/src/reflect/scala/reflect/api/Printers.scala +++ b/src/reflect/scala/reflect/api/Printers.scala @@ -157,7 +157,7 @@ trait Printers { self: Universe => } /** @group Printers */ - case class BooleanFlag(val value: Option[Boolean]) + case class BooleanFlag(value: Option[Boolean]) /** @group Printers */ object BooleanFlag { import scala.language.implicitConversions diff --git a/src/reflect/scala/reflect/internal/Types.scala b/src/reflect/scala/reflect/internal/Types.scala index 361c009350..aa32457c10 100644 --- a/src/reflect/scala/reflect/internal/Types.scala +++ b/src/reflect/scala/reflect/internal/Types.scala @@ -3024,7 +3024,7 @@ trait Types extends api.Types { self: SymbolTable => * Precondition for this class, enforced structurally: args.isEmpty && params.isEmpty. */ abstract case class TypeVar( - val origin: Type, + origin: Type, var constr: TypeConstraint ) extends Type { def untouchable = false // by other typevars diff --git a/src/reflect/scala/reflect/macros/Parsers.scala b/src/reflect/scala/reflect/macros/Parsers.scala index 93a763792c..b4b93da3fa 100644 --- a/src/reflect/scala/reflect/macros/Parsers.scala +++ b/src/reflect/scala/reflect/macros/Parsers.scala @@ -19,4 +19,4 @@ trait Parsers { /** Indicates an error during [[scala.reflect.macros.Parsers#parse]]. */ -case class ParseException(val pos: scala.reflect.api.Position, val msg: String) extends Exception(msg) +case class ParseException(pos: scala.reflect.api.Position, msg: String) extends Exception(msg) diff --git a/src/reflect/scala/reflect/macros/Reifiers.scala b/src/reflect/scala/reflect/macros/Reifiers.scala index 3db7b9af02..fa27295f4e 100644 --- a/src/reflect/scala/reflect/macros/Reifiers.scala +++ b/src/reflect/scala/reflect/macros/Reifiers.scala @@ -86,10 +86,10 @@ trait Reifiers { * Such errors represent one of the standard ways for reification to go wrong, e.g. * an attempt to create a `TypeTag` from a weak type. */ -case class ReificationException(val pos: scala.reflect.api.Position, val msg: String) extends Exception(msg) +case class ReificationException(pos: scala.reflect.api.Position, msg: String) extends Exception(msg) /** Indicates an unexpected expected error during one of the `reifyXXX` methods in [[scala.reflect.macros.Reifiers]]. * Such errors wrap random crashes in reification logic and are distinguished from expected [[scala.reflect.macros.ReificationException]]s * so that the latter can be reported as compilation errors, while the former manifest themselves as compiler crashes. */ -case class UnexpectedReificationException(val pos: scala.reflect.api.Position, val msg: String, val cause: Throwable = null) extends Exception(msg, cause) +case class UnexpectedReificationException(pos: scala.reflect.api.Position, msg: String, cause: Throwable = null) extends Exception(msg, cause) diff --git a/src/reflect/scala/reflect/macros/Typers.scala b/src/reflect/scala/reflect/macros/Typers.scala index 427e7854b2..09a2373205 100644 --- a/src/reflect/scala/reflect/macros/Typers.scala +++ b/src/reflect/scala/reflect/macros/Typers.scala @@ -88,4 +88,4 @@ trait Typers { /** Indicates an error during one of the methods in [[scala.reflect.macros.Typers]]. */ -case class TypecheckException(val pos: scala.reflect.api.Position, val msg: String) extends Exception(msg) +case class TypecheckException(pos: scala.reflect.api.Position, msg: String) extends Exception(msg) -- cgit v1.2.3 From bc997702ef50d8621d12f855ee17819d89c542f1 Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Mon, 25 Feb 2013 00:11:43 +0100 Subject: Don't wrap an array just to get its length. Use .length directly, avoiding the allocation of the WrappedArray. --- src/compiler/scala/tools/ant/sabbus/ScalacFork.scala | 2 +- src/compiler/scala/tools/ant/sabbus/Use.scala | 4 ++-- src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala | 6 +++--- src/compiler/scala/tools/nsc/util/JavaCharArrayReader.scala | 2 +- src/library/scala/Array.scala | 2 +- src/library/scala/collection/mutable/FlatHashTable.scala | 2 +- src/library/scala/collection/mutable/HashTable.scala | 2 +- src/reflect/scala/reflect/io/VirtualFile.scala | 2 +- 8 files changed, 11 insertions(+), 11 deletions(-) (limited to 'src') diff --git a/src/compiler/scala/tools/ant/sabbus/ScalacFork.scala b/src/compiler/scala/tools/ant/sabbus/ScalacFork.scala index d5545fe76a..76820b8060 100644 --- a/src/compiler/scala/tools/ant/sabbus/ScalacFork.scala +++ b/src/compiler/scala/tools/ant/sabbus/ScalacFork.scala @@ -119,7 +119,7 @@ class ScalacFork extends ScalaMatchingTask with ScalacShared with TaskArgs { return if (includedFiles.nonEmpty) - log("Compiling %d file%s to %s".format(includedFiles.size, plural(includedFiles.size), destinationDir)) + log("Compiling %d file%s to %s".format(includedFiles.length, plural(includedFiles.length), destinationDir)) argfile foreach (x => log("Using argfile file: @" + x)) diff --git a/src/compiler/scala/tools/ant/sabbus/Use.scala b/src/compiler/scala/tools/ant/sabbus/Use.scala index 2c97232aec..5f50bb7908 100644 --- a/src/compiler/scala/tools/ant/sabbus/Use.scala +++ b/src/compiler/scala/tools/ant/sabbus/Use.scala @@ -53,9 +53,9 @@ class Use extends ScalaMatchingTask { compiler.settings.d, mapper ) map (new File(sourceDir.get, _)) - if (includedFiles.size > 0) + if (includedFiles.length > 0) try { - log("Compiling " + includedFiles.size + " file" + (if (includedFiles.size > 1) "s" else "") + " to " + compiler.settings.d.getAbsolutePath) + log("Compiling " + includedFiles.length + " file" + (if (includedFiles.length > 1) "s" else "") + " to " + compiler.settings.d.getAbsolutePath) val (errors, warnings) = compiler.compile(includedFiles) if (errors > 0) sys.error("Compilation failed with " + errors + " error" + (if (errors > 1) "s" else "") + ".") diff --git a/src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala b/src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala index b50e32899f..75a8dfff90 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala @@ -881,9 +881,9 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM { } def ubytesToCharArray(bytes: Array[Byte]): Array[Char] = { - val ca = new Array[Char](bytes.size) + val ca = new Array[Char](bytes.length) var idx = 0 - while(idx < bytes.size) { + while(idx < bytes.length) { val b: Byte = bytes(idx) assert((b & ~0x7f) == 0) ca(idx) = b.asInstanceOf[Char] @@ -900,7 +900,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM { var prevOffset = 0 var offset = 0 var encLength = 0 - while(offset < bSeven.size) { + while(offset < bSeven.length) { val deltaEncLength = (if(bSeven(offset) == 0) 2 else 1) val newEncLength = encLength.toLong + deltaEncLength if(newEncLength >= 65535) { diff --git a/src/compiler/scala/tools/nsc/util/JavaCharArrayReader.scala b/src/compiler/scala/tools/nsc/util/JavaCharArrayReader.scala index 4f11d11e8f..26d19906c2 100644 --- a/src/compiler/scala/tools/nsc/util/JavaCharArrayReader.scala +++ b/src/compiler/scala/tools/nsc/util/JavaCharArrayReader.scala @@ -35,7 +35,7 @@ class JavaCharArrayReader(buf: IndexedSeq[Char], start: Int, /* startline: int, ch match { case '\t' => case CR => - if (bp < buf.size && buf(bp) == LF) { + if (bp < buf.length && buf(bp) == LF) { ch = LF bp += 1 } diff --git a/src/library/scala/Array.scala b/src/library/scala/Array.scala index aede6a5d37..1848127395 100644 --- a/src/library/scala/Array.scala +++ b/src/library/scala/Array.scala @@ -240,7 +240,7 @@ object Array extends FallbackArrayBuilding { */ def concat[T: ClassTag](xss: Array[T]*): Array[T] = { val b = newBuilder[T] - b.sizeHint(xss.map(_.size).sum) + b.sizeHint(xss.map(_.length).sum) for (xs <- xss) b ++= xs b.result() } diff --git a/src/library/scala/collection/mutable/FlatHashTable.scala b/src/library/scala/collection/mutable/FlatHashTable.scala index 4ffc5be7ad..2ca12549ef 100644 --- a/src/library/scala/collection/mutable/FlatHashTable.scala +++ b/src/library/scala/collection/mutable/FlatHashTable.scala @@ -77,7 +77,7 @@ trait FlatHashTable[A] extends FlatHashTable.HashUtils[A] { assert(size >= 0) table = new Array(capacity(sizeForThreshold(size, _loadFactor))) - threshold = newThreshold(_loadFactor, table.size) + threshold = newThreshold(_loadFactor, table.length) seedvalue = in.readInt() diff --git a/src/library/scala/collection/mutable/HashTable.scala b/src/library/scala/collection/mutable/HashTable.scala index 83ffc4a030..37d2b51a91 100644 --- a/src/library/scala/collection/mutable/HashTable.scala +++ b/src/library/scala/collection/mutable/HashTable.scala @@ -96,7 +96,7 @@ trait HashTable[A, Entry >: Null <: HashEntry[A, Entry]] extends HashTable.HashU val smDefined = in.readBoolean() table = new Array(capacity(sizeForThreshold(_loadFactor, size))) - threshold = newThreshold(_loadFactor, table.size) + threshold = newThreshold(_loadFactor, table.length) if (smDefined) sizeMapInit(table.length) else sizemap = null diff --git a/src/reflect/scala/reflect/io/VirtualFile.scala b/src/reflect/scala/reflect/io/VirtualFile.scala index b28ad9f340..8cc83b6a50 100644 --- a/src/reflect/scala/reflect/io/VirtualFile.scala +++ b/src/reflect/scala/reflect/io/VirtualFile.scala @@ -39,7 +39,7 @@ class VirtualFile(val name: String, override val path: String) extends AbstractF /** Returns null. */ def file: JFile = null - override def sizeOption: Option[Int] = Some(content.size) + override def sizeOption: Option[Int] = Some(content.length) def input : InputStream = new ByteArrayInputStream(content) -- cgit v1.2.3 From 256e46824636881f067ea0d312b5cbcdffbcf233 Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Mon, 25 Feb 2013 00:25:51 +0100 Subject: Remove redundant explicit returns. --- src/compiler/scala/reflect/reify/codegen/GenTypes.scala | 2 +- src/compiler/scala/tools/ant/Scaladoc.scala | 2 +- src/compiler/scala/tools/nsc/backend/icode/Primitives.scala | 6 +++--- .../scala/tools/nsc/interpreter/AbstractFileClassLoader.scala | 2 +- src/compiler/scala/tools/nsc/transform/ExplicitOuter.scala | 4 ++-- src/library/scala/collection/concurrent/TrieMap.scala | 2 +- src/library/scala/collection/parallel/mutable/ParArray.scala | 6 +++--- src/library/scala/runtime/ScalaRunTime.scala | 2 +- src/library/scala/util/parsing/combinator/Parsers.scala | 2 +- src/reflect/scala/reflect/internal/Symbols.scala | 2 +- src/reflect/scala/reflect/runtime/ReflectionUtils.scala | 2 +- 11 files changed, 16 insertions(+), 16 deletions(-) (limited to 'src') diff --git a/src/compiler/scala/reflect/reify/codegen/GenTypes.scala b/src/compiler/scala/reflect/reify/codegen/GenTypes.scala index 2370f18e3a..d389f3571b 100644 --- a/src/compiler/scala/reflect/reify/codegen/GenTypes.scala +++ b/src/compiler/scala/reflect/reify/codegen/GenTypes.scala @@ -155,7 +155,7 @@ trait GenTypes { */ private def reifySemiConcreteTypeMember(tpe: Type): Tree = tpe match { case tpe @ TypeRef(pre @ SingleType(prepre, presym), sym, args) if sym.isAbstractType && !sym.isExistential => - return mirrorFactoryCall(nme.TypeRef, reify(pre), mirrorBuildCall(nme.selectType, reify(sym.owner), reify(sym.name.toString)), reify(args)) + mirrorFactoryCall(nme.TypeRef, reify(pre), mirrorBuildCall(nme.selectType, reify(sym.owner), reify(sym.name.toString)), reify(args)) } /** Reify an annotated type, i.e. the one that makes us deal with AnnotationInfos */ diff --git a/src/compiler/scala/tools/ant/Scaladoc.scala b/src/compiler/scala/tools/ant/Scaladoc.scala index 7fc811788e..5c21399092 100644 --- a/src/compiler/scala/tools/ant/Scaladoc.scala +++ b/src/compiler/scala/tools/ant/Scaladoc.scala @@ -78,7 +78,7 @@ class Scaladoc extends ScalaMatchingTask { val values = List("yes", "no", "on", "off") def getBooleanValue(value: String, flagName: String): Boolean = if (Flag.isPermissible(value)) - return ("yes".equals(value) || "on".equals(value)) + ("yes".equals(value) || "on".equals(value)) else buildError("Unknown " + flagName + " flag '" + value + "'") } diff --git a/src/compiler/scala/tools/nsc/backend/icode/Primitives.scala b/src/compiler/scala/tools/nsc/backend/icode/Primitives.scala index 5eceb1cf6b..4fa717309e 100644 --- a/src/compiler/scala/tools/nsc/backend/icode/Primitives.scala +++ b/src/compiler/scala/tools/nsc/backend/icode/Primitives.scala @@ -230,9 +230,9 @@ trait Primitives { self: ICodes => /** Returns a string representation of this operation. */ override def toString(): String = this match { - case AND => return "AND" - case OR => return "OR" - case XOR => return "XOR" + case AND => "AND" + case OR => "OR" + case XOR => "XOR" case _ => throw new RuntimeException("LogicalOp unknown case") } } diff --git a/src/compiler/scala/tools/nsc/interpreter/AbstractFileClassLoader.scala b/src/compiler/scala/tools/nsc/interpreter/AbstractFileClassLoader.scala index 9fbd337b9d..3b272aee32 100644 --- a/src/compiler/scala/tools/nsc/interpreter/AbstractFileClassLoader.scala +++ b/src/compiler/scala/tools/nsc/interpreter/AbstractFileClassLoader.scala @@ -52,7 +52,7 @@ class AbstractFileClassLoader(val root: AbstractFile, parent: ClassLoader) return null } - return file + file } // parent delegation in JCL uses getResource; so either add parent.getResAsStream diff --git a/src/compiler/scala/tools/nsc/transform/ExplicitOuter.scala b/src/compiler/scala/tools/nsc/transform/ExplicitOuter.scala index 965612f926..124dd6c995 100644 --- a/src/compiler/scala/tools/nsc/transform/ExplicitOuter.scala +++ b/src/compiler/scala/tools/nsc/transform/ExplicitOuter.scala @@ -481,14 +481,14 @@ abstract class ExplicitOuter extends InfoTransform // at least don't crash... this duplicates maybeOmittable from constructors (acc.owner.isEffectivelyFinal && !acc.isOverridingSymbol)) { unit.uncheckedWarning(tree.pos, "The outer reference in this type test cannot be checked at run time.") - return transform(TRUE) // urgh... drop condition if there's no accessor (or if it may disappear after constructors) + transform(TRUE) // urgh... drop condition if there's no accessor (or if it may disappear after constructors) } else { // println("(base, acc)= "+(base, acc)) val outerSelect = localTyper typed Apply(Select(base, acc), Nil) // achieves the same as: localTyper typed atPos(tree.pos)(outerPath(base, base.tpe.typeSymbol, outerFor.outerClass)) // println("(b, tpsym, outerForI, outerFor, outerClass)= "+ (base, base.tpe.typeSymbol, outerFor, sel.symbol.owner, outerFor.outerClass)) // println("outerSelect = "+ outerSelect) - return transform(treeCopy.Apply(tree, treeCopy.Select(eqsel, outerSelect, eq), args)) + transform(treeCopy.Apply(tree, treeCopy.Select(eqsel, outerSelect, eq), args)) } case _ => diff --git a/src/library/scala/collection/concurrent/TrieMap.scala b/src/library/scala/collection/concurrent/TrieMap.scala index 6bf9c1056a..491770dcf6 100644 --- a/src/library/scala/collection/concurrent/TrieMap.scala +++ b/src/library/scala/collection/concurrent/TrieMap.scala @@ -250,7 +250,7 @@ private[collection] final class INode[K, V](bn: MainNode[K, V], g: Gen) extends if (ct.isReadOnly || (startgen eq in.gen)) in.rec_lookup(k, hc, lev + 5, this, startgen, ct) else { if (GCAS(cn, cn.renewed(startgen, ct), ct)) rec_lookup(k, hc, lev, parent, startgen, ct) - else return RESTART // used to be throw RestartException + else RESTART // used to be throw RestartException } case sn: SNode[K, V] => // 2) singleton node if (sn.hc == hc && equal(sn.k, k, ct)) sn.v.asInstanceOf[AnyRef] diff --git a/src/library/scala/collection/parallel/mutable/ParArray.scala b/src/library/scala/collection/parallel/mutable/ParArray.scala index 68c43e682e..f9563cacc7 100644 --- a/src/library/scala/collection/parallel/mutable/ParArray.scala +++ b/src/library/scala/collection/parallel/mutable/ParArray.scala @@ -241,7 +241,7 @@ self => if (p(a(j).asInstanceOf[T])) j += 1 else return false } - return true + true } override def exists(p: T => Boolean): Boolean = { @@ -269,7 +269,7 @@ self => if (p(a(j).asInstanceOf[T])) return true else j += 1 } - return false + false } override def find(p: T => Boolean): Option[T] = { @@ -298,7 +298,7 @@ self => if (p(elem)) return Some(elem) else j += 1 } - return None + None } override def drop(n: Int): ParArrayIterator = { diff --git a/src/library/scala/runtime/ScalaRunTime.scala b/src/library/scala/runtime/ScalaRunTime.scala index 1a79e6da73..753dd0205e 100644 --- a/src/library/scala/runtime/ScalaRunTime.scala +++ b/src/library/scala/runtime/ScalaRunTime.scala @@ -227,7 +227,7 @@ object ScalaRunTime { if (iv == fv) return iv val lv = fv.toLong - if (lv == fv) return hash(lv) + if (lv == fv) hash(lv) else fv.hashCode } def hash(lv: Long): Int = { diff --git a/src/library/scala/util/parsing/combinator/Parsers.scala b/src/library/scala/util/parsing/combinator/Parsers.scala index ead444653e..542a781b60 100644 --- a/src/library/scala/util/parsing/combinator/Parsers.scala +++ b/src/library/scala/util/parsing/combinator/Parsers.scala @@ -758,7 +758,7 @@ trait Parsers { if (elems.length == num) Success(elems.toList, in0) else p0(in0) match { case Success(x, rest) => elems += x ; applyp(rest) - case ns: NoSuccess => return ns + case ns: NoSuccess => ns } applyp(in) diff --git a/src/reflect/scala/reflect/internal/Symbols.scala b/src/reflect/scala/reflect/internal/Symbols.scala index fbf14e8156..03419dd576 100644 --- a/src/reflect/scala/reflect/internal/Symbols.scala +++ b/src/reflect/scala/reflect/internal/Symbols.scala @@ -904,7 +904,7 @@ trait Symbols extends api.Symbols { self: SymbolTable => if (isAliasType) return true if (isType && isNonClassType) return false if (isRefinementClass) return false - return true + true } /** The variance of this symbol. */ diff --git a/src/reflect/scala/reflect/runtime/ReflectionUtils.scala b/src/reflect/scala/reflect/runtime/ReflectionUtils.scala index 7b093e0e80..aebaea40af 100644 --- a/src/reflect/scala/reflect/runtime/ReflectionUtils.scala +++ b/src/reflect/scala/reflect/runtime/ReflectionUtils.scala @@ -43,7 +43,7 @@ private[scala] object ReflectionUtils { def isAbstractFileClassLoader(clazz: Class[_]): Boolean = { if (clazz == null) return false if (clazz.getName == "scala.tools.nsc.interpreter.AbstractFileClassLoader") return true - return isAbstractFileClassLoader(clazz.getSuperclass) + isAbstractFileClassLoader(clazz.getSuperclass) } def inferClasspath(cl: ClassLoader): String = cl match { case cl: java.net.URLClassLoader => -- cgit v1.2.3 From 6d94b35270485a5ec64f32035537c3c4c0f02dae Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Mon, 25 Feb 2013 00:49:39 +0100 Subject: Modernize legacy backquotes in comments. Was: ``blah'' Now: `blah` --- src/compiler/scala/reflect/reify/Reifier.scala | 6 +++--- .../scala/reflect/reify/codegen/GenTrees.scala | 14 +++++++------- .../scala/reflect/reify/codegen/GenTypes.scala | 2 +- .../scala/reflect/reify/phases/Calculate.scala | 2 +- .../scala/reflect/reify/phases/Metalevels.scala | 8 ++++---- .../scala/reflect/reify/phases/Reify.scala | 2 +- .../scala/reflect/reify/phases/Reshape.scala | 10 +++++----- src/compiler/scala/tools/nsc/doc/DocFactory.scala | 2 +- .../scala/tools/nsc/typechecker/Implicits.scala | 2 +- .../scala/tools/nsc/typechecker/Typers.scala | 6 +++--- src/compiler/scala/tools/nsc/util/DocStrings.scala | 2 +- src/compiler/scala/tools/reflect/package.scala | 2 +- src/library/scala/util/Either.scala | 4 ++-- .../scala/util/parsing/input/Position.scala | 4 ++-- src/reflect/scala/reflect/internal/Flags.scala | 4 ++-- .../internal/util/StripMarginInterpolator.scala | 2 +- src/reflect/scala/reflect/macros/Enclosures.scala | 8 ++++---- src/reflect/scala/reflect/macros/Reifiers.scala | 22 +++++++++++----------- src/reflect/scala/reflect/macros/TreeBuilder.scala | 2 +- 19 files changed, 52 insertions(+), 52 deletions(-) (limited to 'src') diff --git a/src/compiler/scala/reflect/reify/Reifier.scala b/src/compiler/scala/reflect/reify/Reifier.scala index b3224b1aa6..9cf069fe98 100644 --- a/src/compiler/scala/reflect/reify/Reifier.scala +++ b/src/compiler/scala/reflect/reify/Reifier.scala @@ -6,9 +6,9 @@ import scala.reflect.macros.UnexpectedReificationException import scala.reflect.reify.utils.Utils /** Given a tree or a type, generate a tree that when executed at runtime produces the original tree or type. - * See more info in the comments to ``reify'' in scala.reflect.api.Universe. + * See more info in the comments to `reify` in scala.reflect.api.Universe. * - * @author Martin Odersky + * @author Martin Odersky * @version 2.10 */ abstract class Reifier extends States @@ -32,7 +32,7 @@ abstract class Reifier extends States override def hasReifier = true /** - * For ``reifee'' and other reification parameters, generate a tree of the form + * For `reifee` and other reification parameters, generate a tree of the form * * { * val $u: universe.type = <[ universe ]> diff --git a/src/compiler/scala/reflect/reify/codegen/GenTrees.scala b/src/compiler/scala/reflect/reify/codegen/GenTrees.scala index df2eeaa932..78bdf7e132 100644 --- a/src/compiler/scala/reflect/reify/codegen/GenTrees.scala +++ b/src/compiler/scala/reflect/reify/codegen/GenTrees.scala @@ -15,7 +15,7 @@ trait GenTrees { /** * Reify a tree. - * For internal use only, use ``reified'' instead. + * For internal use only, use `reified` instead. */ def reifyTree(tree: Tree): Tree = { assert(tree != null, "tree is null") @@ -29,12 +29,12 @@ trait GenTrees { // the idea behind the new reincarnation of reifier is a simple maxim: // - // never call ``reifyType'' to reify a tree + // never call `reifyType` to reify a tree // // this works because the stuff we are reifying was once represented with trees only // and lexical scope information can be fully captured by reifying symbols // - // to enable this idyll, we work hard in the ``Reshape'' phase + // to enable this idyll, we work hard in the `Reshape` phase // which replaces all types with equivalent trees and works around non-idempotencies of the typechecker // // why bother? because this brings method to the madness @@ -65,7 +65,7 @@ trait GenTrees { } // usually we don't reify symbols/types, because they can be re-inferred during subsequent reflective compilation - // however, reification of AnnotatedTypes is special. see ``reifyType'' to find out why. + // however, reification of AnnotatedTypes is special. see `reifyType` to find out why. if (reifyTreeSymbols && tree.hasSymbolField) { if (reifyDebug) println("reifying symbol %s for tree %s".format(tree.symbol, tree)) rtree = mirrorBuildCall(nme.setSymbol, rtree, reify(tree.symbol)) @@ -86,13 +86,13 @@ trait GenTrees { case TreeSplice(splicee) => if (reifyDebug) println("splicing " + tree) - // see ``Metalevels'' for more info about metalevel breaches + // see `Metalevels` for more info about metalevel breaches // and about how we deal with splices that contain them val isMetalevelBreach = splicee exists (sub => sub.hasSymbolField && sub.symbol != NoSymbol && sub.symbol.metalevel > 0) val isRuntimeEval = splicee exists (sub => sub.hasSymbolField && sub.symbol == ExprSplice) if (isMetalevelBreach || isRuntimeEval) { // we used to convert dynamic splices into runtime evals transparently, but we no longer do that - // why? see comments in ``Metalevels'' + // why? see comments in `Metalevels` // if (reifyDebug) println("splicing has failed: cannot splice when facing a metalevel breach") // EmptyTree CannotReifyRuntimeSplice(tree) @@ -102,7 +102,7 @@ trait GenTrees { // we intentionally don't care about the prefix (the first underscore in the `RefiedTree` pattern match) case ReifiedTree(_, _, inlinedSymtab, rtree, _, _, _) => if (reifyDebug) println("inlining the splicee") - // all free vars local to the enclosing reifee should've already been inlined by ``Metalevels'' + // all free vars local to the enclosing reifee should've already been inlined by `Metalevels` for (sym <- inlinedSymtab.syms if sym.isLocalToReifee) abort("local free var, should have already been inlined by Metalevels: " + inlinedSymtab.symDef(sym)) state.symtab ++= inlinedSymtab diff --git a/src/compiler/scala/reflect/reify/codegen/GenTypes.scala b/src/compiler/scala/reflect/reify/codegen/GenTypes.scala index d389f3571b..6c94726231 100644 --- a/src/compiler/scala/reflect/reify/codegen/GenTypes.scala +++ b/src/compiler/scala/reflect/reify/codegen/GenTypes.scala @@ -9,7 +9,7 @@ trait GenTypes { /** * Reify a type. - * For internal use only, use ``reified'' instead. + * For internal use only, use `reified` instead. */ def reifyType(tpe: Type): Tree = { assert(tpe != null, "tpe is null") diff --git a/src/compiler/scala/reflect/reify/phases/Calculate.scala b/src/compiler/scala/reflect/reify/phases/Calculate.scala index 5566fd7a77..abd179b24b 100644 --- a/src/compiler/scala/reflect/reify/phases/Calculate.scala +++ b/src/compiler/scala/reflect/reify/phases/Calculate.scala @@ -29,7 +29,7 @@ trait Calculate { * Merely traverses the reifiee and records local symbols along with their metalevels. */ val calculate = new Traverser { - // see the explanation of metalevels in ``Metalevels'' + // see the explanation of metalevels in `Metalevels` var currMetalevel = 1 override def traverse(tree: Tree): Unit = tree match { diff --git a/src/compiler/scala/reflect/reify/phases/Metalevels.scala b/src/compiler/scala/reflect/reify/phases/Metalevels.scala index cccf080dbf..18ea908cdf 100644 --- a/src/compiler/scala/reflect/reify/phases/Metalevels.scala +++ b/src/compiler/scala/reflect/reify/phases/Metalevels.scala @@ -40,15 +40,15 @@ trait Metalevels { * However, how exactly do we do that in the case of y.splice? In this very scenario we can use dataflow analysis and inline it, * but what if y were a var, and what if it were calculated randomly at runtime? * - * This question has a genuinely simple answer. Sure, we cannot resolve such splices statically (i.e. during macro expansion of ``reify''), + * This question has a genuinely simple answer. Sure, we cannot resolve such splices statically (i.e. during macro expansion of `reify`), * but now we have runtime toolboxes, so noone stops us from picking up that reified tree and evaluating it at runtime - * (in fact, this is something that ``Expr.splice'' does transparently). + * (in fact, this is something that `Expr.splice` does transparently). * * This is akin to early vs late binding dilemma. * The prior is faster, plus, the latter (implemented with reflection) might not work because of visibility issues or might be not available on all platforms. * But the latter still has its uses, so I'm allowing metalevel breaches, but introducing the -Xlog-runtime-evals to log them. * - * upd. We no longer do that. In case of a runaway ``splice'' inside a `reify`, one will get a static error. + * upd. We no longer do that. In case of a runaway `splice` inside a `reify`, one will get a static error. * Why? Unfortunately, the cute idea of transparently converting between static and dynamic splices has failed. * 1) Runtime eval that services dynamic splices requires scala-compiler.jar, which might not be on library classpath * 2) Runtime eval incurs a severe performance penalty, so it'd better to be explicit about it @@ -136,7 +136,7 @@ trait Metalevels { } else { withinSplice { super.transform(tree) } } - // todo. also inline usages of ``inlineableBindings'' in the symtab itself + // todo. also inline usages of `inlineableBindings` in the symtab itself // e.g. a free$Foo can well use free$x, if Foo is path-dependent w.r.t x // FreeRef(_, _) check won't work, because metalevels of symbol table and body are different, hence, freerefs in symbol table look different from freerefs in body case FreeRef(_, name) if inlineableBindings contains name => diff --git a/src/compiler/scala/reflect/reify/phases/Reify.scala b/src/compiler/scala/reflect/reify/phases/Reify.scala index eda4cba2bf..143424dac5 100644 --- a/src/compiler/scala/reflect/reify/phases/Reify.scala +++ b/src/compiler/scala/reflect/reify/phases/Reify.scala @@ -35,7 +35,7 @@ trait Reify extends GenSymbols /** * Reifies any supported value. - * For internal use only, use ``reified'' instead. + * For internal use only, use `reified` instead. */ def reify(reifee: Any): Tree = reifyStack.push(reifee)(reifee match { // before adding some case here, in global scope, please, consider diff --git a/src/compiler/scala/reflect/reify/phases/Reshape.scala b/src/compiler/scala/reflect/reify/phases/Reshape.scala index 4c27ba4da1..50ee379c2e 100644 --- a/src/compiler/scala/reflect/reify/phases/Reshape.scala +++ b/src/compiler/scala/reflect/reify/phases/Reshape.scala @@ -130,8 +130,8 @@ trait Reshape { * * NB: This is the trickiest part of reification! * - * In most cases, we're perfectly fine to reify a Type itself (see ``reifyType''). - * However if the type involves a symbol declared inside the quasiquote (i.e. registered in ``boundSyms''), + * In most cases, we're perfectly fine to reify a Type itself (see `reifyType`). + * However if the type involves a symbol declared inside the quasiquote (i.e. registered in `boundSyms`), * then we cannot reify it, or otherwise subsequent reflective compilation will fail. * * Why will it fail? Because reified deftrees (e.g. ClassDef(...)) will generate fresh symbols during that compilation, @@ -139,7 +139,7 @@ trait Reshape { * https://issues.scala-lang.org/browse/SI-5230 * * To deal with this unpleasant fact, we need to fall back from types to equivalent trees (after all, parser trees don't contain any types, just trees, so it should be possible). - * Luckily, these original trees get preserved for us in the ``original'' field when Trees get transformed into TypeTrees. + * Luckily, these original trees get preserved for us in the `original` field when Trees get transformed into TypeTrees. * And if an original of a type tree is empty, we can safely assume that this type is non-essential (e.g. was inferred/generated by the compiler). * In that case the type can be omitted (e.g. reified as an empty TypeTree), since it will be inferred again later on. * @@ -156,8 +156,8 @@ trait Reshape { * upd. There are also problems with CompoundTypeTrees. I had to use attachments to retain necessary information. * * upd. Recently I went ahead and started using original for all TypeTrees, regardless of whether they refer to local symbols or not. - * As a result, ``reifyType'' is never called directly by tree reification (and, wow, it seems to work great!). - * The only usage of ``reifyType'' now is for servicing typetags, however, I have some ideas how to get rid of that as well. + * As a result, `reifyType` is never called directly by tree reification (and, wow, it seems to work great!). + * The only usage of `reifyType` now is for servicing typetags, however, I have some ideas how to get rid of that as well. */ private def isDiscarded(tt: TypeTree) = tt.original == null private def toPreTyperTypeTree(tt: TypeTree): Tree = { diff --git a/src/compiler/scala/tools/nsc/doc/DocFactory.scala b/src/compiler/scala/tools/nsc/doc/DocFactory.scala index d63881170e..f203a5eeb7 100644 --- a/src/compiler/scala/tools/nsc/doc/DocFactory.scala +++ b/src/compiler/scala/tools/nsc/doc/DocFactory.scala @@ -14,7 +14,7 @@ import scala.reflect.internal.util.BatchSourceFile * documentation, which is as follows. * * * A simplified compiler instance (with only the front-end phases enabled) - * * is created, and additional ''sourceless'' comments are registered. + * * is created, and additional `sourceless` comments are registered. * * Documentable files are compiled, thereby filling the compiler's symbol table. * * A documentation model is extracted from the post-compilation symbol table. * * A generator is used to transform the model into the correct final format (HTML). diff --git a/src/compiler/scala/tools/nsc/typechecker/Implicits.scala b/src/compiler/scala/tools/nsc/typechecker/Implicits.scala index 788825a6b6..c0391448d1 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Implicits.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Implicits.scala @@ -1113,7 +1113,7 @@ trait Implicits { case ThisType(thisSym) => gen.mkAttributedThis(thisSym) case _ => - // if ``pre'' is not a PDT, e.g. if someone wrote + // if `pre` is not a PDT, e.g. if someone wrote // implicitly[scala.reflect.macros.Context#TypeTag[Int]] // then we need to fail, because we don't know the prefix to use during type reification // upd. we also need to fail silently, because this is a very common situation diff --git a/src/compiler/scala/tools/nsc/typechecker/Typers.scala b/src/compiler/scala/tools/nsc/typechecker/Typers.scala index ef3414f446..d8d3c37ba6 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Typers.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Typers.scala @@ -181,7 +181,7 @@ trait Typers extends Adaptations with Tags { def inferView(tree: Tree, from: Type, to: Type, reportAmbiguous: Boolean): Tree = inferView(tree, from, to, reportAmbiguous, true) - /** Infer an implicit conversion (``view'') between two types. + /** Infer an implicit conversion (`view`) between two types. * @param tree The tree which needs to be converted. * @param from The source type of the conversion * @param to The target type of the conversion @@ -1964,14 +1964,14 @@ trait Typers extends Adaptations with Tags { } /** Remove definition annotations from modifiers (they have been saved - * into the symbol's ``annotations'' in the type completer / namer) + * into the symbol's `annotations` in the type completer / namer) * * However reification does need annotation definitions to proceed. * Unfortunately, AnnotationInfo doesn't provide enough info to reify it in general case. * The biggest problem is with the "atp: Type" field, which cannot be reified in some situations * that involve locally defined annotations. See more about that in Reifiers.scala. * - * That's why the original tree gets saved into ``original'' field of AnnotationInfo (happens elsewhere). + * That's why the original tree gets saved into `original` field of AnnotationInfo (happens elsewhere). * The field doesn't get pickled/unpickled and exists only during a single compilation run. * This simultaneously allows us to reify annotations and to preserve backward compatibility. */ diff --git a/src/compiler/scala/tools/nsc/util/DocStrings.scala b/src/compiler/scala/tools/nsc/util/DocStrings.scala index dde53dc640..ba44126df2 100755 --- a/src/compiler/scala/tools/nsc/util/DocStrings.scala +++ b/src/compiler/scala/tools/nsc/util/DocStrings.scala @@ -74,7 +74,7 @@ object DocStrings { else idx :: findAll(str, idx)(p) } - /** Produces a string index, which is a list of ``sections'', i.e + /** Produces a string index, which is a list of `sections`, i.e * pairs of start/end positions of all tagged sections in the string. * Every section starts with an at sign and extends to the next at sign, * or to the end of the comment string, but excluding the final two diff --git a/src/compiler/scala/tools/reflect/package.scala b/src/compiler/scala/tools/reflect/package.scala index bf533766d0..968b0d0863 100644 --- a/src/compiler/scala/tools/reflect/package.scala +++ b/src/compiler/scala/tools/reflect/package.scala @@ -32,7 +32,7 @@ package object reflect { /** Creates a reporter that prints messages to the console according to the settings. * - * ``minSeverity'' determines minimum severity of the messages to be printed. + * `minSeverity` determines minimum severity of the messages to be printed. * 0 stands for INFO, 1 stands for WARNING and 2 stands for ERROR. */ // todo. untangle warningsAsErrors from Reporters. I don't feel like moving this flag here! diff --git a/src/library/scala/util/Either.scala b/src/library/scala/util/Either.scala index 864d8953c4..5cd35ab6d9 100644 --- a/src/library/scala/util/Either.scala +++ b/src/library/scala/util/Either.scala @@ -21,7 +21,7 @@ import scala.language.implicitConversions * [[scala.util.Right]] takes the place of [[scala.Some]]. Convention dictates * that Left is used for failure and Right is used for success. * - * For example, you could use ``Either[String, Int]`` to detect whether a + * For example, you could use `Either[String, Int]` to detect whether a * received input is a String or an Int. * * {{{ @@ -205,7 +205,7 @@ final case class Right[+A, +B](b: B) extends Either[A, B] { object Either { /** - * Allows use of a ``merge`` method to extract values from Either instances + * Allows use of a `merge` method to extract values from Either instances * regardless of whether they are Left or Right. * * {{{ diff --git a/src/library/scala/util/parsing/input/Position.scala b/src/library/scala/util/parsing/input/Position.scala index 31715bd8da..5e0cbbff5e 100644 --- a/src/library/scala/util/parsing/input/Position.scala +++ b/src/library/scala/util/parsing/input/Position.scala @@ -8,13 +8,13 @@ package scala.util.parsing.input -/** `Position` is the base trait for objects describing a position in a ``document''. +/** `Position` is the base trait for objects describing a position in a `document`. * * It provides functionality for: * - generating a visual representation of this position (`longString`); * - comparing two positions (`<`). * - * To use this class for a concrete kind of ``document'', implement the `lineContents` method. + * To use this class for a concrete kind of `document`, implement the `lineContents` method. * * @author Martin Odersky * @author Adriaan Moors diff --git a/src/reflect/scala/reflect/internal/Flags.scala b/src/reflect/scala/reflect/internal/Flags.scala index 06f6c46fc3..1987f34474 100644 --- a/src/reflect/scala/reflect/internal/Flags.scala +++ b/src/reflect/scala/reflect/internal/Flags.scala @@ -175,8 +175,8 @@ class Flags extends ModifierFlags { final val VBRIDGE = 1L << 42 // symbol is a varargs bridge final val VARARGS = 1L << 43 // symbol is a Java-style varargs method - final val TRIEDCOOKING = 1L << 44 // ``Cooking'' has been tried on this symbol - // A Java method's type is ``cooked'' by transforming raw types to existentials + final val TRIEDCOOKING = 1L << 44 // `Cooking` has been tried on this symbol + // A Java method's type is `cooked` by transforming raw types to existentials final val SYNCHRONIZED = 1L << 45 // symbol is a method which should be marked ACC_SYNCHRONIZED diff --git a/src/reflect/scala/reflect/internal/util/StripMarginInterpolator.scala b/src/reflect/scala/reflect/internal/util/StripMarginInterpolator.scala index e7579229b2..9259c5abf1 100644 --- a/src/reflect/scala/reflect/internal/util/StripMarginInterpolator.scala +++ b/src/reflect/scala/reflect/internal/util/StripMarginInterpolator.scala @@ -6,7 +6,7 @@ trait StripMarginInterpolator { def stringContext: StringContext /** - * A safe combination of `[[scala.collection.immutable.StringLike#stripMargin]] + * A safe combination of [[scala.collection.immutable.StringLike#stripMargin]] * and [[scala.StringContext#raw]]. * * The margin of each line is defined by whitespace leading up to a '|' character. diff --git a/src/reflect/scala/reflect/macros/Enclosures.scala b/src/reflect/scala/reflect/macros/Enclosures.scala index 723b94016d..fd91333dae 100644 --- a/src/reflect/scala/reflect/macros/Enclosures.scala +++ b/src/reflect/scala/reflect/macros/Enclosures.scala @@ -34,8 +34,8 @@ trait Enclosures { * Can be useful for interoperating with other macros and for imposing compiler-friendly limits on macro expansion. * * Is also priceless for emitting sane error messages for macros that are called by other macros on synthetic (i.e. position-less) trees. - * In that dire case navigate the ``enclosingMacros'' stack, and it will most likely contain at least one macro with a position-ful macro application. - * See ``enclosingPosition'' for a default implementation of this logic. + * In that dire case navigate the `enclosingMacros` stack, and it will most likely contain at least one macro with a position-ful macro application. + * See `enclosingPosition` for a default implementation of this logic. * * Unlike `openMacros`, this is a val, which means that it gets initialized when the context is created * and always stays the same regardless of whatever happens during macro expansion. @@ -51,9 +51,9 @@ trait Enclosures { def enclosingImplicits: List[(Type, Tree)] /** Tries to guess a position for the enclosing application. - * But that is simple, right? Just dereference ``pos'' of ``macroApplication''? Not really. + * But that is simple, right? Just dereference `pos` of `macroApplication`? Not really. * If we're in a synthetic macro expansion (no positions), we must do our best to infer the position of something that triggerd this expansion. - * Surprisingly, quite often we can do this by navigation the ``enclosingMacros'' stack. + * Surprisingly, quite often we can do this by navigation the `enclosingMacros` stack. */ def enclosingPosition: Position diff --git a/src/reflect/scala/reflect/macros/Reifiers.scala b/src/reflect/scala/reflect/macros/Reifiers.scala index fa27295f4e..1eae3e3fce 100644 --- a/src/reflect/scala/reflect/macros/Reifiers.scala +++ b/src/reflect/scala/reflect/macros/Reifiers.scala @@ -11,16 +11,16 @@ trait Reifiers { self: Context => /** Given a tree, generate a tree that when compiled and executed produces the original tree. - * For more information and examples see the documentation for ``Universe.reify''. + * For more information and examples see the documentation for `Universe.reify`. * - * The produced tree will be bound to the specified ``universe'' and ``mirror''. - * Possible values for ``universe'' include ``universe.treeBuild.mkRuntimeUniverseRef''. - * Possible values for ``mirror'' include ``EmptyTree'' (in that case the reifier will automatically pick an appropriate mirror). + * The produced tree will be bound to the specified `universe` and `mirror`. + * Possible values for `universe` include `universe.treeBuild.mkRuntimeUniverseRef`. + * Possible values for `mirror` include `EmptyTree` (in that case the reifier will automatically pick an appropriate mirror). * - * This function is deeply connected to ``Universe.reify'', a macro that reifies arbitrary expressions into runtime trees. - * They do very similar things (``Universe.reify'' calls ``Context.reifyTree'' to implement itself), but they operate on different metalevels (see below). + * This function is deeply connected to `Universe.reify`, a macro that reifies arbitrary expressions into runtime trees. + * They do very similar things (`Universe.reify` calls `Context.reifyTree` to implement itself), but they operate on different metalevels (see below). * - * Let's study the differences between ``Context.reifyTree'' and ``Universe.reify'' on an example of using them inside a ``fooMacro'' macro: + * Let's study the differences between `Context.reifyTree` and `Universe.reify` on an example of using them inside a `fooMacro` macro: * * * Since reify itself is a macro, it will be executed when fooMacro is being compiled (metalevel -1) * and will produce a tree that when evaluated during macro expansion of fooMacro (metalevel 0) will recreate the input tree. @@ -39,7 +39,7 @@ trait Reifiers { * * The result of compiling and running the result of reify will be bound to the Universe that called reify. * This is possible because it's a macro, so it can generate whatever code it wishes. * - * * The result of compiling and running the result of reifyTree will be the ``prefix'' that needs to be passed explicitly. + * * The result of compiling and running the result of reifyTree will be the `prefix` that needs to be passed explicitly. * This happens because the Universe of the evaluated result is from a different metalevel than the Context the called reify. * * Typical usage of this function is to retain some of the trees received/created by a macro @@ -48,13 +48,13 @@ trait Reifiers { def reifyTree(universe: Tree, mirror: Tree, tree: Tree): Tree /** Given a type, generate a tree that when compiled and executed produces the original type. - * The produced tree will be bound to the specified ``universe'' and ``mirror''. - * For more information and examples see the documentation for ``Context.reifyTree'' and ``Universe.reify''. + * The produced tree will be bound to the specified `universe` and `mirror`. + * For more information and examples see the documentation for `Context.reifyTree` and `Universe.reify`. */ def reifyType(universe: Tree, mirror: Tree, tpe: Type, concrete: Boolean = false): Tree /** Given a type, generate a tree that when compiled and executed produces the runtime class of the original type. - * If ``concrete'' is true, then this function will bail on types, who refer to abstract types (like `ClassTag` does). + * If `concrete` is true, then this function will bail on types, who refer to abstract types (like `ClassTag` does). */ def reifyRuntimeClass(tpe: Type, concrete: Boolean = true): Tree diff --git a/src/reflect/scala/reflect/macros/TreeBuilder.scala b/src/reflect/scala/reflect/macros/TreeBuilder.scala index bdd5dc8a96..19230010e6 100644 --- a/src/reflect/scala/reflect/macros/TreeBuilder.scala +++ b/src/reflect/scala/reflect/macros/TreeBuilder.scala @@ -72,6 +72,6 @@ abstract class TreeBuilder { def mkNullaryCall(method: Symbol, targs: List[Type]): Tree - /** A tree that refers to the runtime reflexive universe, ``scala.reflect.runtime.universe''. */ + /** A tree that refers to the runtime reflexive universe, `scala.reflect.runtime.universe`. */ def mkRuntimeUniverseRef: Tree } -- cgit v1.2.3 From d6527d5083d77f67d08749b800938c97e0fcf13a Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Mon, 25 Feb 2013 01:02:10 +0100 Subject: Address some Scaladocrot - @param tags whose name drifted from the corresponding parameter - Remove or complete a few stray stub comments (@param foo ...) - Use @tparam where appropriate. --- src/compiler/scala/tools/ant/Pack200Task.scala | 2 +- src/compiler/scala/tools/ant/Scalac.scala | 2 +- src/compiler/scala/tools/ant/Scaladoc.scala | 2 +- src/compiler/scala/tools/nsc/Global.scala | 4 ++-- src/compiler/scala/tools/nsc/ast/DocComments.scala | 9 ++++----- .../scala/tools/nsc/backend/icode/BasicBlocks.scala | 4 ---- src/compiler/scala/tools/nsc/doc/Settings.scala | 2 +- .../scala/tools/nsc/doc/base/CommentFactoryBase.scala | 2 -- src/compiler/scala/tools/nsc/doc/html/HtmlFactory.scala | 2 +- src/compiler/scala/tools/nsc/doc/html/Page.scala | 4 ++-- .../scala/tools/nsc/doc/model/CommentFactory.scala | 2 -- .../scala/tools/nsc/interactive/CompilerControl.scala | 3 --- src/compiler/scala/tools/nsc/io/Lexer.scala | 2 +- src/compiler/scala/tools/nsc/io/Pickler.scala | 14 +++++++------- src/compiler/scala/tools/nsc/transform/Mixin.scala | 2 +- src/compiler/scala/tools/nsc/typechecker/Infer.scala | 13 +++++-------- .../scala/tools/nsc/typechecker/TypeDiagnostics.scala | 2 +- src/compiler/scala/tools/nsc/typechecker/Typers.scala | 7 ------- .../scala/collection/parallel/mutable/ParHashSet.scala | 2 +- src/reflect/scala/reflect/internal/Names.scala | 6 +++--- .../scala/reflect/internal/pickling/UnPickler.scala | 4 ++-- src/reflect/scala/reflect/runtime/JavaMirrors.scala | 2 +- 22 files changed, 35 insertions(+), 57 deletions(-) (limited to 'src') diff --git a/src/compiler/scala/tools/ant/Pack200Task.scala b/src/compiler/scala/tools/ant/Pack200Task.scala index 3180911414..3c1bc8cad9 100644 --- a/src/compiler/scala/tools/ant/Pack200Task.scala +++ b/src/compiler/scala/tools/ant/Pack200Task.scala @@ -65,7 +65,7 @@ class Pack200Task extends ScalaMatchingTask { /** Set the flag to specify if file reordering should be performed. Reordering * is used to remove empty packages and improve pack200 optimization. - * @param keep + * @param x * `'''true'''` to retain file ordering. * `'''false'''` to optimize directory structure (DEFAULT). */ def setKeepFileOrder(x: Boolean) { keepFileOrder = x } diff --git a/src/compiler/scala/tools/ant/Scalac.scala b/src/compiler/scala/tools/ant/Scalac.scala index e6bd32c757..2a9567b567 100644 --- a/src/compiler/scala/tools/ant/Scalac.scala +++ b/src/compiler/scala/tools/ant/Scalac.scala @@ -496,7 +496,7 @@ class Scalac extends ScalaMatchingTask with ScalacShared { path.map(asString) mkString File.pathSeparator /** Transforms a file into a Scalac-readable string. - * @param path A file to convert. + * @param file A file to convert. * @return A string-representation of the file like `/x/k/a.scala`. */ protected def asString(file: File): String = file.getAbsolutePath() diff --git a/src/compiler/scala/tools/ant/Scaladoc.scala b/src/compiler/scala/tools/ant/Scaladoc.scala index 5c21399092..fd6d637212 100644 --- a/src/compiler/scala/tools/ant/Scaladoc.scala +++ b/src/compiler/scala/tools/ant/Scaladoc.scala @@ -563,7 +563,7 @@ class Scaladoc extends ScalaMatchingTask { /** Transforms a file into a Scalac-readable string. * - * @param path A file to convert. + * @param file A file to convert. * @return A string-representation of the file like `/x/k/a.scala`. */ private def asString(file: File): String = diff --git a/src/compiler/scala/tools/nsc/Global.scala b/src/compiler/scala/tools/nsc/Global.scala index 304bdf1536..fea9e72512 100644 --- a/src/compiler/scala/tools/nsc/Global.scala +++ b/src/compiler/scala/tools/nsc/Global.scala @@ -805,8 +805,8 @@ class Global(var currentSettings: Settings, var reporter: Reporter) /** Invalidates packages that contain classes defined in a classpath entry, and * rescans that entry. - * @param path A fully qualified name that refers to a directory or jar file that's - * an entry on the classpath. + * @param paths Fully qualified names that refer to directories or jar files that are + * a entries on the classpath. * First, causes the classpath entry referred to by `path` to be rescanned, so that * any new files or deleted files or changes in subpackages are picked up. * Second, invalidates any packages for which one of the following considitions is met: diff --git a/src/compiler/scala/tools/nsc/ast/DocComments.scala b/src/compiler/scala/tools/nsc/ast/DocComments.scala index 7e6a323d3d..f86f45fb43 100755 --- a/src/compiler/scala/tools/nsc/ast/DocComments.scala +++ b/src/compiler/scala/tools/nsc/ast/DocComments.scala @@ -303,7 +303,6 @@ trait DocComments { self: Global => /** Lookup definition of variable. * * @param vble The variable for which a definition is searched - * @param owner The current owner in which variable definitions are searched. * @param site The class for which doc comments are generated */ def lookupVariable(vble: String, site: Symbol): Option[String] = site match { @@ -322,10 +321,10 @@ trait DocComments { self: Global => /** Expand variable occurrences in string `str`, until a fix point is reached or * an expandLimit is exceeded. * - * @param str The string to be expanded - * @param sym The symbol for which doc comments are generated - * @param site The class for which doc comments are generated - * @return Expanded string + * @param initialStr The string to be expanded + * @param sym The symbol for which doc comments are generated + * @param site The class for which doc comments are generated + * @return Expanded string */ protected def expandVariables(initialStr: String, sym: Symbol, site: Symbol): String = { val expandLimit = 10 diff --git a/src/compiler/scala/tools/nsc/backend/icode/BasicBlocks.scala b/src/compiler/scala/tools/nsc/backend/icode/BasicBlocks.scala index cc10479ca1..917fe8b292 100644 --- a/src/compiler/scala/tools/nsc/backend/icode/BasicBlocks.scala +++ b/src/compiler/scala/tools/nsc/backend/icode/BasicBlocks.scala @@ -263,10 +263,6 @@ trait BasicBlocks { /** Replaces `oldInstr` with `is`. It does not update * the position field in the newly inserted instructions, so it behaves * differently than the one-instruction versions of this function. - * - * @param iold .. - * @param is .. - * @return .. */ def replaceInstruction(oldInstr: Instruction, is: List[Instruction]): Boolean = { assert(closed, "Instructions can be replaced only after the basic block is closed") diff --git a/src/compiler/scala/tools/nsc/doc/Settings.scala b/src/compiler/scala/tools/nsc/doc/Settings.scala index 75312e2279..90b94e1336 100644 --- a/src/compiler/scala/tools/nsc/doc/Settings.scala +++ b/src/compiler/scala/tools/nsc/doc/Settings.scala @@ -11,7 +11,7 @@ import scala.language.postfixOps /** An extended version of compiler settings, with additional Scaladoc-specific options. * @param error A function that prints a string to the appropriate error stream - * @param print A function that prints the string, without any extra boilerplate of error */ + * @param printMsg A function that prints the string, without any extra boilerplate of error */ class Settings(error: String => Unit, val printMsg: String => Unit = println(_)) extends scala.tools.nsc.Settings(error) { /** A setting that defines in which format the documentation is output. ''Note:'' this setting is currently always diff --git a/src/compiler/scala/tools/nsc/doc/base/CommentFactoryBase.scala b/src/compiler/scala/tools/nsc/doc/base/CommentFactoryBase.scala index 5a3dffbf16..a308292811 100755 --- a/src/compiler/scala/tools/nsc/doc/base/CommentFactoryBase.scala +++ b/src/compiler/scala/tools/nsc/doc/base/CommentFactoryBase.scala @@ -17,8 +17,6 @@ import scala.language.postfixOps * Call `parse` to run the parser. Note that the parser is stateless and * should only be built once for a given Scaladoc run. * - * @param reporter The reporter on which user messages (error, warnings) should be printed. - * * @author Manohar Jonnalagedda * @author Gilles Dubochet */ trait CommentFactoryBase { this: MemberLookupBase => diff --git a/src/compiler/scala/tools/nsc/doc/html/HtmlFactory.scala b/src/compiler/scala/tools/nsc/doc/html/HtmlFactory.scala index f81f55b934..d721a96ad7 100644 --- a/src/compiler/scala/tools/nsc/doc/html/HtmlFactory.scala +++ b/src/compiler/scala/tools/nsc/doc/html/HtmlFactory.scala @@ -103,7 +103,7 @@ class HtmlFactory(val universe: doc.Universe, index: doc.Index) { /** Generates the Scaladoc site for a model into the site root. * A scaladoc site is a set of HTML and related files * that document a model extracted from a compiler run. - * @param model The model to generate in the form of a sequence of packages. */ + */ def generate() { def copyResource(subPath: String) { diff --git a/src/compiler/scala/tools/nsc/doc/html/Page.scala b/src/compiler/scala/tools/nsc/doc/html/Page.scala index ef9beb1dce..91939cf3de 100644 --- a/src/compiler/scala/tools/nsc/doc/html/Page.scala +++ b/src/compiler/scala/tools/nsc/doc/html/Page.scala @@ -45,7 +45,7 @@ abstract class Page { /** Writes this page as a file. The file's location is relative to the * generator's site root, and the encoding is also defined by the generator. - * @param generator The generator that is writing this page. */ + * @param site The generator that is writing this page. */ def writeFor(site: HtmlFactory): Unit def kindToString(mbr: MemberEntity) = @@ -84,7 +84,7 @@ abstract class Page { } /** A relative link from this page to some destination class entity. - * @param destEntity The class or object entity that the link will point to. */ + * @param destClass The class or object entity that the link will point to. */ def relativeLinkTo(destClass: TemplateEntity): String = relativeLinkTo(templateToPath(destClass)) diff --git a/src/compiler/scala/tools/nsc/doc/model/CommentFactory.scala b/src/compiler/scala/tools/nsc/doc/model/CommentFactory.scala index 9ba89146c0..574d6b04f8 100644 --- a/src/compiler/scala/tools/nsc/doc/model/CommentFactory.scala +++ b/src/compiler/scala/tools/nsc/doc/model/CommentFactory.scala @@ -18,8 +18,6 @@ import scala.language.postfixOps * Call `parse` to run the parser. Note that the parser is stateless and * should only be built once for a given Scaladoc run. * - * @param reporter The reporter on which user messages (error, warnings) should be printed. - * * @author Manohar Jonnalagedda * @author Gilles Dubochet */ trait CommentFactory extends base.CommentFactoryBase { diff --git a/src/compiler/scala/tools/nsc/interactive/CompilerControl.scala b/src/compiler/scala/tools/nsc/interactive/CompilerControl.scala index c779403fad..a81604235b 100644 --- a/src/compiler/scala/tools/nsc/interactive/CompilerControl.scala +++ b/src/compiler/scala/tools/nsc/interactive/CompilerControl.scala @@ -233,9 +233,6 @@ trait CompilerControl { self: Global => * prints its output and all defined values in a comment column. * * @param source The source file to be analyzed - * @param keepLoaded If set to `true`, source file will be kept as a loaded unit afterwards. - * If keepLoaded is `false` the operation is run at low priority, only after - * everything is brought up to date in a regular type checker run. * @param response The response. */ @deprecated("SI-6458: Instrumentation logic will be moved out of the compiler.","2.10.0") diff --git a/src/compiler/scala/tools/nsc/io/Lexer.scala b/src/compiler/scala/tools/nsc/io/Lexer.scala index aed6e882e6..b50b01aa27 100644 --- a/src/compiler/scala/tools/nsc/io/Lexer.scala +++ b/src/compiler/scala/tools/nsc/io/Lexer.scala @@ -278,7 +278,7 @@ class Lexer(rd: Reader) { /** The current token is a delimiter consisting of given character, reads next token, * otherwise raises an error. - * @param c the given delimiter character to compare current token with + * @param ch the given delimiter character to compare current token with * @throws MalformedInput if the current token `token` is not a delimiter, or * consists of a character different from `c`. */ diff --git a/src/compiler/scala/tools/nsc/io/Pickler.scala b/src/compiler/scala/tools/nsc/io/Pickler.scala index 862046eb66..43a6ef3c61 100644 --- a/src/compiler/scala/tools/nsc/io/Pickler.scala +++ b/src/compiler/scala/tools/nsc/io/Pickler.scala @@ -18,7 +18,7 @@ import scala.reflect.ClassTag * Subclasses of `Pickler` each can write and read individual classes * of values. * - * @param T the type of values handled by this pickler. + * @tparam T the type of values handled by this pickler. * * These Picklers build on the work of Andrew Kennedy. They are most closely inspired by * Iulian Dragos' picklers for Scala to XML. See: @@ -71,8 +71,8 @@ abstract class Pickler[T] { def wrapped [U] (in: T => U)(out: U => T): Pickler[U] = wrappedPickler(this)(in)(out) /** A conditional pickler obtained from the current pickler. - * @param cond the condition to test to find out whether pickler can handle - * some Scala value. + * @param p the condition to test to find out whether pickler can handle + * some Scala value. */ def cond(p: Any => Boolean): CondPickler[T] = conditionalPickler(this, p) @@ -87,7 +87,7 @@ object Pickler { /** A base class representing unpickler result. It has two subclasses: * `UnpickleSucess` for successful unpicklings and `UnpickleFailure` for failures, * where a value of the given type `T` could not be unpickled from input. - * @param T the type of unpickled values in case of success. + * @tparam T the type of unpickled values in case of success. */ abstract class Unpickled[+T] { /** Transforms success values to success values using given function, @@ -125,7 +125,7 @@ object Pickler { } /** A class representing successful unpicklings - * @param T the type of the unpickled value + * @tparam T the type of the unpickled value * @param result the unpickled value */ case class UnpickleSuccess[+T](result: T) extends Unpickled[T] @@ -361,8 +361,8 @@ abstract class CondPickler[T](val canPickle: Any => Boolean) extends Pickler[T] * To unpickle a value, this unpickler is tried first. If it cannot read * the input (as indicated by a `UnpickleFailure` result), then the * alternative pickler is tried. - * @param V The handled type of the returned pickler. - * @param U The handled type of the alternative pickler. + * @tparam V The handled type of the returned pickler. + * @tparam U The handled type of the alternative pickler. * @param that The alternative pickler. */ def | [V >: T, U <: V] (that: => CondPickler[U]): CondPickler[V] = diff --git a/src/compiler/scala/tools/nsc/transform/Mixin.scala b/src/compiler/scala/tools/nsc/transform/Mixin.scala index e33d665cd0..74459efc92 100644 --- a/src/compiler/scala/tools/nsc/transform/Mixin.scala +++ b/src/compiler/scala/tools/nsc/transform/Mixin.scala @@ -821,8 +821,8 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL { * Private fields used only in this initializer are subsequently set to null. * * @param clazz The class symbol + * @param lzyVal The symbol of this lazy field * @param init The tree which initializes the field ( f = ) - * @param fieldSym The symbol of this lazy field * @param offset The offset of this field in the flags bitmap * * The result will be a tree of the form diff --git a/src/compiler/scala/tools/nsc/typechecker/Infer.scala b/src/compiler/scala/tools/nsc/typechecker/Infer.scala index d593694ce1..db3759d65f 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Infer.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Infer.scala @@ -618,7 +618,7 @@ trait Infer extends Checkable { * * @param tparams the type parameters of the method * @param formals the value parameter types of the method - * @param restp the result type of the method + * @param restpe the result type of the method * @param argtpes the argument types of the application * @param pt the expected return type of the application * @return @see adjustTypeArgs @@ -830,14 +830,11 @@ trait Infer extends Checkable { * such that function type `ftpe` is applicable to * `argtpes` and its result conform to `pt`? * - * @param undetparams ... * @param ftpe the type of the function (often a MethodType) - * @param argtpes the argument types; a NamedType(name, tp) for named + * @param argtpes0 the argument types; a NamedType(name, tp) for named * arguments. For each NamedType, if `name` does not exist in `ftpe`, that * type is set to `Unit`, i.e. the corresponding argument is treated as * an assignment expression (@see checkNames). - * @param pt ... - * @return ... */ private def isApplicable(undetparams: List[Symbol], ftpe: Type, argtpes0: List[Type], pt: Type): Boolean = @@ -1192,7 +1189,7 @@ trait Infer extends Checkable { * @param fn fn: the function that needs to be instantiated. * @param undetparams the parameters that need to be determined * @param args the actual arguments supplied in the call. - * @param pt the expected type of the function application + * @param pt0 the expected type of the function application * @return The type parameters that remain uninstantiated, * and that thus have not been substituted. */ @@ -1243,7 +1240,7 @@ trait Infer extends Checkable { * * @param tree the constuctor that needs to be instantiated * @param undetparams the undetermined type parameters - * @param pt the expected result type of the instance + * @param pt0 the expected result type of the instance */ def inferConstructorInstance(tree: Tree, undetparams: List[Symbol], pt0: Type) { val pt = abstractTypesToBounds(pt0) @@ -1600,7 +1597,7 @@ trait Infer extends Checkable { * with pt = WildcardType. * Otherwise, if there is no best alternative, error. * - * @param argtpes contains the argument types. If an argument is named, as + * @param argtpes0 contains the argument types. If an argument is named, as * "a = 3", the corresponding type is `NamedType("a", Int)'. If the name * of some NamedType does not exist in an alternative's parameter names, * the type is replaces by `Unit`, i.e. the argument is treated as an diff --git a/src/compiler/scala/tools/nsc/typechecker/TypeDiagnostics.scala b/src/compiler/scala/tools/nsc/typechecker/TypeDiagnostics.scala index 81ea5630d0..71470222bf 100644 --- a/src/compiler/scala/tools/nsc/typechecker/TypeDiagnostics.scala +++ b/src/compiler/scala/tools/nsc/typechecker/TypeDiagnostics.scala @@ -574,7 +574,7 @@ trait TypeDiagnostics { /** Report a type error. * - * @param pos0 The position where to report the error + * @param pos The position where to report the error * @param ex The exception that caused the error */ def reportTypeError(context0: Context, pos: Position, ex: TypeError) { diff --git a/src/compiler/scala/tools/nsc/typechecker/Typers.scala b/src/compiler/scala/tools/nsc/typechecker/Typers.scala index d8d3c37ba6..c40b69bc7a 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Typers.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Typers.scala @@ -2720,13 +2720,6 @@ trait Typers extends Adaptations with Tags { } } - - /** - * @param fun ... - * @param mode ... - * @param pt ... - * @return ... - */ private def typedFunction(fun: Function, mode: Mode, pt: Type): Tree = { val numVparams = fun.vparams.length if (numVparams > definitions.MaxFunctionArity) diff --git a/src/library/scala/collection/parallel/mutable/ParHashSet.scala b/src/library/scala/collection/parallel/mutable/ParHashSet.scala index 2431baf3e7..0287171369 100644 --- a/src/library/scala/collection/parallel/mutable/ParHashSet.scala +++ b/src/library/scala/collection/parallel/mutable/ParHashSet.scala @@ -194,7 +194,7 @@ with scala.collection.mutable.FlatHashTable.HashUtils[T] { * * @param insertAt where to add the element (set to -1 to use its hashcode) * @param comesBefore the position before which the element should be added to - * @param elem the element to be added + * @param newEntry the element to be added * * If the element is to be inserted at the position corresponding to its hash code, * the table will try to add the element in such a position if possible. Collisions are resolved diff --git a/src/reflect/scala/reflect/internal/Names.scala b/src/reflect/scala/reflect/internal/Names.scala index 8b64bf7a32..f8598dca7a 100644 --- a/src/reflect/scala/reflect/internal/Names.scala +++ b/src/reflect/scala/reflect/internal/Names.scala @@ -217,7 +217,7 @@ trait Names extends api.Names { * this name from start, length if not found. * * @param c the character - * @param start ... + * @param start the index from which to search * @return the index of the first occurrence of c */ final def pos(c: Char, start: Int): Int = { @@ -230,7 +230,7 @@ trait Names extends api.Names { * in this name from start, length if not found. * * @param s the string - * @param start ... + * @param start the index from which to search * @return the index of the first occurrence of s */ final def pos(s: String, start: Int): Int = { @@ -258,7 +258,7 @@ trait Names extends api.Names { * name from start, -1 if not found. * * @param c the character - * @param start ... + * @param start the index from which to search * @return the index of the last occurrence of c */ final def lastPos(c: Char, start: Int): Int = { diff --git a/src/reflect/scala/reflect/internal/pickling/UnPickler.scala b/src/reflect/scala/reflect/internal/pickling/UnPickler.scala index 5b01b5ffa5..3850f965b0 100644 --- a/src/reflect/scala/reflect/internal/pickling/UnPickler.scala +++ b/src/reflect/scala/reflect/internal/pickling/UnPickler.scala @@ -28,8 +28,8 @@ abstract class UnPickler { * from an array of bytes. * @param bytes bytearray from which we unpickle * @param offset offset from which unpickling starts - * @param classroot the top-level class which is unpickled, or NoSymbol if inapplicable - * @param moduleroot the top-level module which is unpickled, or NoSymbol if inapplicable + * @param classRoot the top-level class which is unpickled, or NoSymbol if inapplicable + * @param moduleRoot the top-level module which is unpickled, or NoSymbol if inapplicable * @param filename filename associated with bytearray, only used for error messages */ def unpickle(bytes: Array[Byte], offset: Int, classRoot: Symbol, moduleRoot: Symbol, filename: String) { diff --git a/src/reflect/scala/reflect/runtime/JavaMirrors.scala b/src/reflect/scala/reflect/runtime/JavaMirrors.scala index 8062dea38c..c5c28ad3e9 100644 --- a/src/reflect/scala/reflect/runtime/JavaMirrors.scala +++ b/src/reflect/scala/reflect/runtime/JavaMirrors.scala @@ -1198,7 +1198,7 @@ private[reflect] trait JavaMirrors extends internal.SymbolTable with api.JavaUni else sym.name.toString /** The Java field corresponding to a given Scala field. - * @param meth The Scala field. + * @param fld The Scala field. */ def fieldToJava(fld: TermSymbol): jField = fieldCache.toJava(fld) { val jclazz = classToJava(fld.owner.asClass) -- cgit v1.2.3 From 022c57fda629bbdcedea2d2d93beb84aebc22282 Mon Sep 17 00:00:00 2001 From: James Iry Date: Fri, 22 Feb 2013 13:51:44 -0800 Subject: SI-7006 Improve jump-elision code in GenASM While working on SI-7006 I found a O(N*M) loop in jump-elision that should be O(N). This commit clean that up. It also improves the diagnostics in Members#removeBlock. --- .../scala/tools/nsc/backend/icode/Members.scala | 17 ++++--- .../scala/tools/nsc/backend/jvm/GenASM.scala | 59 ++++++++++------------ 2 files changed, 36 insertions(+), 40 deletions(-) (limited to 'src') diff --git a/src/compiler/scala/tools/nsc/backend/icode/Members.scala b/src/compiler/scala/tools/nsc/backend/icode/Members.scala index 5c90fbf366..e471f4256b 100644 --- a/src/compiler/scala/tools/nsc/backend/icode/Members.scala +++ b/src/compiler/scala/tools/nsc/backend/icode/Members.scala @@ -62,17 +62,18 @@ trait Members { def removeBlock(b: BasicBlock) { if (settings.debug.value) { - assert(blocks forall (p => !(p.successors contains b)), - "Removing block that is still referenced in method code " + b + "preds: " + b.predecessors - ) - assert(b != startBlock || b.successors.length == 1, - "Removing start block with more than one successor." - ) + // only do this sanity check when debug is turned on because it's moderately expensive + val referers = blocks filter (_.successors contains b) + assert(referers.isEmpty, s"Trying to removing block $b (with preds ${b.predecessors.mkString}) but it is still refered to from block(s) ${referers.mkString}") } - if (b == startBlock) + if (b == startBlock) { + assert(b.successors.length == 1, + s"Removing start block ${b} with ${b.successors.length} successors (${b.successors.mkString})." + ) startBlock = b.successors.head - + } + blocks -= b assert(!blocks.contains(b)) method.exh filter (_ covers b) foreach (_.covered -= b) diff --git a/src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala b/src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala index 75a8dfff90..6215503c01 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala @@ -3228,45 +3228,40 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM { } def rephraseGotos(detour: Map[BasicBlock, BasicBlock]) { - for(Pair(oldTarget, newTarget) <- detour.iterator) { - if(m.startBlock == oldTarget) { - m.code.startBlock = newTarget - } - for(eh <- m.exh; if eh.startBlock == oldTarget) { - eh.setStartBlock(newTarget) - } - for(b <- m.blocks; if !detour.isDefinedAt(b)) { - val idxLast = (b.size - 1) - b.lastInstruction match { - case JUMP(whereto) => - if (whereto == oldTarget) { - b.replaceInstruction(idxLast, JUMP(newTarget)) - } - case CJUMP(succ, fail, cond, kind) => - if ((succ == oldTarget) || (fail == oldTarget)) { - b.replaceInstruction(idxLast, CJUMP(detour.getOrElse(succ, succ), - detour.getOrElse(fail, fail), - cond, kind)) - } - case CZJUMP(succ, fail, cond, kind) => - if ((succ == oldTarget) || (fail == oldTarget)) { - b.replaceInstruction(idxLast, CZJUMP(detour.getOrElse(succ, succ), - detour.getOrElse(fail, fail), - cond, kind)) - } - case SWITCH(tags, labels) => - if(labels exists (detour.isDefinedAt(_))) { - val newLabels = (labels map { lab => detour.getOrElse(lab, lab) }) - b.replaceInstruction(idxLast, SWITCH(tags, newLabels)) - } - case _ => () + def lookup(b: BasicBlock) = detour.getOrElse(b, b) + + m.code.startBlock = lookup(m.code.startBlock) + + for(eh <- m.exh) + eh.setStartBlock(lookup(eh.startBlock)) + + for (b <- m.blocks) { + def replaceLastInstruction(i: Instruction) = { + if (b.lastInstruction != i) { + val idxLast = b.size - 1 + debuglog(s"In block $b, replacing last instruction ${b.lastInstruction} with ${i}") + b.replaceInstruction(idxLast, i) } } + + b.lastInstruction match { + case JUMP(whereto) => + replaceLastInstruction(JUMP(lookup(whereto))) + case CJUMP(succ, fail, cond, kind) => + replaceLastInstruction(CJUMP(lookup(succ), lookup(fail), cond, kind)) + case CZJUMP(succ, fail, cond, kind) => + replaceLastInstruction(CZJUMP(lookup(succ), lookup(fail), cond, kind)) + case SWITCH(tags, labels) => + val newLabels = (labels map lookup) + replaceLastInstruction(SWITCH(tags, newLabels)) + case _ => () + } } } /* remove from all containers that may contain a reference to */ def elide(redu: BasicBlock) { + debuglog(s"Eliding jump only block $redu because it can be jumped around.") assert(m.startBlock != redu, "startBlock should have been re-wired by now") m.code.removeBlock(redu) } -- cgit v1.2.3 From 0d2e19cc4c639c27a93c3ed76d892b16d40dcc9b Mon Sep 17 00:00:00 2001 From: James Iry Date: Fri, 22 Feb 2013 14:01:28 -0800 Subject: SI-7006 Recognize more jump only blocks During ASM code emission we would recognize a block that consisted of ICode-only artifacts (ENTER_SCOPE, EXIT_SCOPE, and LOAD_EXCEPTION) followed by a jump. But we weren't using the same logic to recognize all jump-only blocks. So jump-elision wasn't removing them. And that in fact was why the ASM code emission had to do its special case. This commit makes all jump-only block recognition use the same logic: a jump-only block is one that has 0 or more ICode-only instructions followed by a JUMP. It does't necessarily have to start with a JUMP. There's now a debugWarning if the old NOP emitting code is triggered and test t6102 is enhanced to error if that warning occurs. --- .../scala/tools/nsc/backend/jvm/GenASM.scala | 74 ++++++++++++++-------- test/files/run/t6102.flags | 2 +- 2 files changed, 50 insertions(+), 26 deletions(-) (limited to 'src') diff --git a/src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala b/src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala index 6215503c01..c5809cf3f4 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala @@ -2515,21 +2515,13 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM { jcode.emitSWITCH(flatKeys, flatBranches, defaultLabel, MIN_SWITCH_DENSITY) case JUMP(whereto) => - if (nextBlock != whereto) { + if (nextBlock != whereto) jcode goTo labels(whereto) - } else if (m.exh.exists(eh => eh.covers(b))) { // SI-6102: Determine whether eliding this JUMP results in an empty range being covered by some EH. - // If so, emit a NOP in place of the elided JUMP, to avoid "java.lang.ClassFormatError: Illegal exception table range" - val isSthgLeft = b.toList.exists { - case _: LOAD_EXCEPTION => false - case _: SCOPE_ENTER => false - case _: SCOPE_EXIT => false - case _: JUMP => false - case _ => true - } - if (!isSthgLeft) { - emit(asm.Opcodes.NOP) - } + // If so, emit a NOP in place of the elided JUMP, to avoid "java.lang.ClassFormatError: Illegal exception table range" + else if (newNormal.isJumpOnly(b) && m.exh.exists(eh => eh.covers(b))) { + debugwarn("Had a jump only block that wasn't collapsed") + emit(asm.Opcodes.NOP) } case CJUMP(success, failure, cond, kind) => @@ -3084,8 +3076,29 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM { * TODO Eventually, these utilities should be moved to IMethod and reused from normalize() (there's nothing JVM-specific about them). */ object newNormal { - - def startsWithJump(b: BasicBlock): Boolean = { assert(b.nonEmpty, "empty block"); b.firstInstruction.isInstanceOf[JUMP] } + /** + * True if a block is "jump only" which is defined + * as being a block that consists only of 0 or more instructions that + * won't make it to the JVM followed by a JUMP. + */ + def isJumpOnly(b: BasicBlock): Boolean = { + val nonICode = firstNonIcodeOnlyInstructions(b) + // by definition a block has to have a jump, conditional jump, return, or throw + assert(nonICode.hasNext, "empty block") + nonICode.next.isInstanceOf[JUMP] + } + + /** + * Returns the list of instructions in a block that follow all ICode only instructions, + * where an ICode only instruction is one that won't make it to the JVM + */ + private def firstNonIcodeOnlyInstructions(b: BasicBlock): Iterator[Instruction] = { + def isICodeOnlyInstruction(i: Instruction) = i match { + case LOAD_EXCEPTION(_) | SCOPE_ENTER(_) | SCOPE_EXIT(_) => true + case _ => false + } + b.iterator dropWhile isICodeOnlyInstruction + } /** Prune from an exception handler those covered blocks which are jump-only. */ private def coverWhatCountsOnly(m: IMethod): Boolean = { @@ -3093,7 +3106,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM { var wasReduced = false for(h <- m.exh) { - val shouldntCover = (h.covered filter startsWithJump) + val shouldntCover = (h.covered filter isJumpOnly) if(shouldntCover.nonEmpty) { wasReduced = true h.covered --= shouldntCover // not removing any block on purpose. @@ -3117,7 +3130,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM { def isRedundant(eh: ExceptionHandler): Boolean = { (eh.cls != NoSymbol) && ( // TODO `eh.isFinallyBlock` more readable than `eh.cls != NoSymbol` eh.covered.isEmpty - || (eh.covered forall startsWithJump) + || (eh.covered forall isJumpOnly) ) } @@ -3132,10 +3145,21 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM { wasReduced } - private def isJumpOnly(b: BasicBlock): Option[BasicBlock] = { - b.toList match { - case JUMP(whereto) :: rest => - assert(rest.isEmpty, "A block contains instructions after JUMP (looks like enterIgnoreMode() was itself ignored.)") + /** + * Returns the target of a block that is "jump only" which is defined + * as being a block that consists only of 0 or more instructions that + * won't make it to the JVM followed by a JUMP. + * + * @param b The basic block to examine + * @return Some(target) if b is a "jump only" block or None if it's not + */ + private def getJumpOnlyTarget(b: BasicBlock): Option[BasicBlock] = { + val nonICode = firstNonIcodeOnlyInstructions(b) + // by definition a block has to have a jump, conditional jump, return, or throw + assert(nonICode.nonEmpty, "empty block") + nonICode.next match { + case JUMP(whereto) => + assert(!nonICode.hasNext, "A block contains instructions after JUMP (looks like enterIgnoreMode() was itself ignored.)") Some(whereto) case _ => None } @@ -3167,12 +3191,12 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM { * Precondition: the BasicBlock given as argument starts with an unconditional JUMP. */ private def finalDestination(start: BasicBlock): (BasicBlock, List[BasicBlock]) = { - assert(startsWithJump(start), "not the start of a (single or multi-hop) chain of JUMPs.") + if (settings.debug.value) assert(isJumpOnly(start), "not the start of a (single or multi-hop) chain of JUMPs.") var hops: List[BasicBlock] = Nil var prev = start var done = false do { - done = isJumpOnly(prev) match { + done = getJumpOnlyTarget(prev) match { case Some(dest) => if (dest == start) { return (start, hops) } // leave infinite-loops in place hops ::= prev @@ -3222,7 +3246,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM { /* "start" is relative in a cycle, but we call this helper with the "first" entry-point we found. */ def realTarget(jumpStart: BasicBlock): Map[BasicBlock, BasicBlock] = { - assert(startsWithJump(jumpStart), "not part of a jump-chain") + if (settings.debug.value) assert(isJumpOnly(jumpStart), "not part of a jump-chain") val Pair(dest, redundants) = finalDestination(jumpStart) (for(skipOver <- redundants) yield Pair(skipOver, dest)).toMap } @@ -3277,7 +3301,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM { while(reachable.nonEmpty) { val h = reachable.head reachable = reachable.tail - if(startsWithJump(h)) { + if(isJumpOnly(h)) { val detour = realTarget(h) if(detour.nonEmpty) { wasReduced = true diff --git a/test/files/run/t6102.flags b/test/files/run/t6102.flags index e35535c8ea..72fe7b1aa0 100644 --- a/test/files/run/t6102.flags +++ b/test/files/run/t6102.flags @@ -1 +1 @@ - -Ydead-code + -Ydead-code -Ydebug -Xfatal-warnings -- cgit v1.2.3 From e9f6511094c1e616719221970a9f3eec39c72905 Mon Sep 17 00:00:00 2001 From: James Iry Date: Fri, 22 Feb 2013 15:33:18 -0800 Subject: SI-7006 Eliminate unreachable blocks GenASM was doing a bunch of stuff to eliminate unreachable exception handlers, but it was still leaving behind other unreachable blocks, for instance a finally block associated with an exception handler that got removed would still be left lying around. ASM would in turn turn those into a big pile of NOPs, which just take up space uselessly. This commit replaces all the logic for eliding exception handlers with a single unreachable block remover that catches unused exception handlers and a whole lot more. --- .../scala/tools/nsc/backend/jvm/GenASM.scala | 114 +++++++++------------ test/files/jvm/t7006/Foo_1.flags | 1 + test/files/jvm/t7006/Foo_1.scala | 9 ++ test/files/jvm/t7006/Test.scala | 18 ++++ 4 files changed, 79 insertions(+), 63 deletions(-) create mode 100644 test/files/jvm/t7006/Foo_1.flags create mode 100644 test/files/jvm/t7006/Foo_1.scala create mode 100644 test/files/jvm/t7006/Test.scala (limited to 'src') diff --git a/src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala b/src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala index c5809cf3f4..218c2c3ff5 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala @@ -3100,51 +3100,6 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM { b.iterator dropWhile isICodeOnlyInstruction } - /** Prune from an exception handler those covered blocks which are jump-only. */ - private def coverWhatCountsOnly(m: IMethod): Boolean = { - assert(m.hasCode, "code-less method") - - var wasReduced = false - for(h <- m.exh) { - val shouldntCover = (h.covered filter isJumpOnly) - if(shouldntCover.nonEmpty) { - wasReduced = true - h.covered --= shouldntCover // not removing any block on purpose. - } - } - - wasReduced - } - - /** An exception handler is pruned provided any of the following holds: - * (1) it covers nothing (for example, this may result after removing unreachable blocks) - * (2) each block it covers is of the form: JUMP(_) - * Return true iff one or more ExceptionHandlers were removed. - * - * A caveat: removing an exception handler, for whatever reason, means that its handler code (even if unreachable) - * won't be able to cause a class-loading-exception. As a result, behavior can be different. - */ - private def elimNonCoveringExh(m: IMethod): Boolean = { - assert(m.hasCode, "code-less method") - - def isRedundant(eh: ExceptionHandler): Boolean = { - (eh.cls != NoSymbol) && ( // TODO `eh.isFinallyBlock` more readable than `eh.cls != NoSymbol` - eh.covered.isEmpty - || (eh.covered forall isJumpOnly) - ) - } - - var wasReduced = false - val toPrune = (m.exh.toSet filter isRedundant) - if(toPrune.nonEmpty) { - wasReduced = true - for(h <- toPrune; r <- h.blocks) { m.code.removeBlock(r) } // TODO m.code.removeExh(h) - m.exh = (m.exh filterNot toPrune) - } - - wasReduced - } - /** * Returns the target of a block that is "jump only" which is defined * as being a block that consists only of 0 or more instructions that @@ -3228,7 +3183,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM { * In more detail: * Starting at each of the entry points (m.startBlock, the start block of each exception handler) * rephrase those control-flow instructions targeting a jump-only block (which jumps to a final destination D) to target D. - * The blocks thus skipped are also removed from IMethod.blocks. + * The blocks thus skipped become eligible to removed by the reachability analyzer * * Rationale for this normalization: * test/files/run/private-inline.scala after -optimize is chock full of @@ -3239,9 +3194,9 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM { * and thus ranges with identical (start, end) (i.e, identical after GenJVM omitted the JUMPs in question) * could be weeded out to avoid "java.lang.ClassFormatError: Illegal exception table range" * Now that visitTryCatchBlock() must be called before Labels are resolved, - * this method gets rid of the BasicBlocks described above (to recap, consisting of just a JUMP). + * renders the BasicBlocks described above (to recap, consisting of just a JUMP) unreachable. */ - private def collapseJumpOnlyBlocks(m: IMethod): Boolean = { + private def collapseJumpOnlyBlocks(m: IMethod) { assert(m.hasCode, "code-less method") /* "start" is relative in a cycle, but we call this helper with the "first" entry-point we found. */ @@ -3285,9 +3240,8 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM { /* remove from all containers that may contain a reference to */ def elide(redu: BasicBlock) { - debuglog(s"Eliding jump only block $redu because it can be jumped around.") + debuglog(s"Will elide jump only block $redu because it can be jumped around.") assert(m.startBlock != redu, "startBlock should have been re-wired by now") - m.code.removeBlock(redu) } var wasReduced = false @@ -3315,25 +3269,59 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM { } } assert(newTargets.intersect(elided).isEmpty, "contradiction: we just elided the final destionation of a jump-chain") + } + + /** + * Removes all blocks that are unreachable in a method using a standard reachability analysis. + */ + def elimUnreachableBlocks(m: IMethod) { + assert(m.hasCode, "code-less method") + + // assume nothing is reachable until we prove it can be reached + val reachable = mutable.Set[BasicBlock]() + + // the set of blocks that we know are reachable but have + // yet to be marked reachable, initially only the start block + val worklist = mutable.Set(m.startBlock) + + while (!worklist.isEmpty) { + val block = worklist.head + worklist remove block + // we know that one is reachable + reachable add block + // so are its successors, so go back around and add the ones we still + // think are unreachable + worklist ++= (block.successors filterNot reachable) + } + + // exception handlers need to be told not to cover unreachable blocks + // and exception handlers that no longer cover any blocks need to be + // removed entirely + val unusedExceptionHandlers = mutable.Set[ExceptionHandler]() + for (exh <- m.exh) { + exh.covered = exh.covered filter reachable + if (exh.covered.isEmpty) { + unusedExceptionHandlers += exh + } + } + + // remove the unusued exception handler references + if (settings.debug.value) + for (exh <- unusedExceptionHandlers) debuglog(s"eliding exception handler $exh because it does not cover any reachable blocks") + m.exh = m.exh filterNot unusedExceptionHandlers - wasReduced + // everything not in the reachable set is unreachable, unused, and unloved. buh bye + for (b <- m.blocks filterNot reachable) { + debuglog(s"eliding block $b because it is unreachable") + m.code removeBlock b + } } def normalize(m: IMethod) { if(!m.hasCode) { return } collapseJumpOnlyBlocks(m) - var wasReduced = false - do { - wasReduced = false - // Prune from an exception handler those covered blocks which are jump-only. - wasReduced |= coverWhatCountsOnly(m); icodes.checkValid(m) // TODO should be unnecessary now that collapseJumpOnlyBlocks(m) is in place - // Prune exception handlers covering nothing. - wasReduced |= elimNonCoveringExh(m); icodes.checkValid(m) - - // TODO see note in genExceptionHandlers about an ExceptionHandler.covered containing dead blocks (newNormal should remove them, but, where do those blocks come from?) - } while (wasReduced) - - // TODO this would be a good time to remove synthetic local vars seeing no use, don't forget to call computeLocalVarsIndex() afterwards. + elimUnreachableBlocks(m) + icodes checkValid m } } diff --git a/test/files/jvm/t7006/Foo_1.flags b/test/files/jvm/t7006/Foo_1.flags new file mode 100644 index 0000000000..72fe7b1aa0 --- /dev/null +++ b/test/files/jvm/t7006/Foo_1.flags @@ -0,0 +1 @@ + -Ydead-code -Ydebug -Xfatal-warnings diff --git a/test/files/jvm/t7006/Foo_1.scala b/test/files/jvm/t7006/Foo_1.scala new file mode 100644 index 0000000000..f84269daf2 --- /dev/null +++ b/test/files/jvm/t7006/Foo_1.scala @@ -0,0 +1,9 @@ +class Foo_1 { + def foo { + try { + val x = 3 + } finally { + print("hello") + } + } +} diff --git a/test/files/jvm/t7006/Test.scala b/test/files/jvm/t7006/Test.scala new file mode 100644 index 0000000000..5cc38e42a2 --- /dev/null +++ b/test/files/jvm/t7006/Test.scala @@ -0,0 +1,18 @@ +import scala.tools.partest.BytecodeTest +import scala.tools.asm +import asm.tree.InsnList +import scala.collection.JavaConverters._ + +object Test extends BytecodeTest { + def show: Unit = { + val classNode = loadClassNode("Foo_1") + val methodNode = getMethod(classNode, "foo") + assert(countNops(methodNode.instructions) == 0) + } + + def countNops(insnList: InsnList): Int = { + def isNop(node: asm.tree.AbstractInsnNode): Boolean = + (node.getOpcode == asm.Opcodes.NOP) + insnList.iterator.asScala.count(isNop) + } +} -- cgit v1.2.3 From 4f2d784a09e5df244ebbee33d23cf931fcacb740 Mon Sep 17 00:00:00 2001 From: James Iry Date: Sun, 24 Feb 2013 06:28:45 -0800 Subject: SI-7006 Simplify jump-only block destination determination With proper reachability analysis, the code for finding the final destination of jump-only blocks was more complicated than needed. This commit simplifies and speeds up that process using a standard Tortoise and Hare algorithm on a Map from jump-only blocks to their immediate destinations. Test t7006 is increased a bit to make sure we don't get stuck on infinite loops and to make sure we're deleting all but the essential jump. --- .../scala/tools/nsc/backend/jvm/GenASM.scala | 202 ++++++++++----------- test/files/jvm/t7006/Foo_1.scala | 3 +- test/files/jvm/t7006/Test.scala | 7 +- 3 files changed, 98 insertions(+), 114 deletions(-) (limited to 'src') diff --git a/src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala b/src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala index 218c2c3ff5..91cb1857ac 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala @@ -11,6 +11,7 @@ import scala.reflect.internal.pickling.{ PickleFormat, PickleBuffer } import scala.tools.nsc.symtab._ import scala.tools.asm import asm.Label +import scala.annotation.tailrec /** * @author Iulian Dragos (version 1.0, FJBG-based implementation) @@ -3120,54 +3121,6 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM { } } - private def directSuccStar(b: BasicBlock): List[BasicBlock] = { directSuccStar(List(b)) } - - /** Transitive closure of successors potentially reachable due to normal (non-exceptional) control flow. - Those BBs in the argument are also included in the result */ - private def directSuccStar(starters: Traversable[BasicBlock]): List[BasicBlock] = { - val result = new mutable.ListBuffer[BasicBlock] - var toVisit: List[BasicBlock] = starters.toList.distinct - while(toVisit.nonEmpty) { - val h = toVisit.head - toVisit = toVisit.tail - result += h - for(p <- h.directSuccessors; if !result.contains(p) && !toVisit.contains(p)) { toVisit = p :: toVisit } - } - result.toList - } - - /** Returns: - * for single-block self-loops, the pair (start, Nil) - * for other cycles, the pair (backedge-target, basic-blocks-in-the-cycle-except-backedge-target) - * otherwise a pair consisting of: - * (a) the endpoint of a (single or multi-hop) chain of JUMPs - * (such endpoint does not start with a JUMP and therefore is not part of the chain); and - * (b) the chain (ie blocks to be removed when collapsing the chain of jumps). - * Precondition: the BasicBlock given as argument starts with an unconditional JUMP. - */ - private def finalDestination(start: BasicBlock): (BasicBlock, List[BasicBlock]) = { - if (settings.debug.value) assert(isJumpOnly(start), "not the start of a (single or multi-hop) chain of JUMPs.") - var hops: List[BasicBlock] = Nil - var prev = start - var done = false - do { - done = getJumpOnlyTarget(prev) match { - case Some(dest) => - if (dest == start) { return (start, hops) } // leave infinite-loops in place - hops ::= prev - if (hops.contains(dest)) { - // leave infinite-loops in place - return (dest, hops filterNot (dest eq _)) - } - prev = dest - false - case None => true - } - } while(!done) - - (prev, hops) - } - /** * Collapse a chain of "jump-only" blocks such as: * @@ -3199,76 +3152,105 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM { private def collapseJumpOnlyBlocks(m: IMethod) { assert(m.hasCode, "code-less method") - /* "start" is relative in a cycle, but we call this helper with the "first" entry-point we found. */ - def realTarget(jumpStart: BasicBlock): Map[BasicBlock, BasicBlock] = { - if (settings.debug.value) assert(isJumpOnly(jumpStart), "not part of a jump-chain") - val Pair(dest, redundants) = finalDestination(jumpStart) - (for(skipOver <- redundants) yield Pair(skipOver, dest)).toMap - } - - def rephraseGotos(detour: Map[BasicBlock, BasicBlock]) { - def lookup(b: BasicBlock) = detour.getOrElse(b, b) + def rephraseGotos(detour: mutable.Map[BasicBlock, BasicBlock]) { + def lookup(b: BasicBlock) = detour.getOrElse(b, b) - m.code.startBlock = lookup(m.code.startBlock) - - for(eh <- m.exh) - eh.setStartBlock(lookup(eh.startBlock)) + m.code.startBlock = lookup(m.code.startBlock) + + for(eh <- m.exh) + eh.setStartBlock(lookup(eh.startBlock)) - for (b <- m.blocks) { - def replaceLastInstruction(i: Instruction) = { - if (b.lastInstruction != i) { - val idxLast = b.size - 1 - debuglog(s"In block $b, replacing last instruction ${b.lastInstruction} with ${i}") - b.replaceInstruction(idxLast, i) - } - } - - b.lastInstruction match { - case JUMP(whereto) => - replaceLastInstruction(JUMP(lookup(whereto))) - case CJUMP(succ, fail, cond, kind) => - replaceLastInstruction(CJUMP(lookup(succ), lookup(fail), cond, kind)) - case CZJUMP(succ, fail, cond, kind) => - replaceLastInstruction(CZJUMP(lookup(succ), lookup(fail), cond, kind)) - case SWITCH(tags, labels) => - val newLabels = (labels map lookup) - replaceLastInstruction(SWITCH(tags, newLabels)) - case _ => () - } + for (b <- m.blocks) { + def replaceLastInstruction(i: Instruction) = { + if (b.lastInstruction != i) { + val idxLast = b.size - 1 + debuglog(s"In block $b, replacing last instruction ${b.lastInstruction} with ${i}") + b.replaceInstruction(idxLast, i) } } - - /* remove from all containers that may contain a reference to */ - def elide(redu: BasicBlock) { - debuglog(s"Will elide jump only block $redu because it can be jumped around.") - assert(m.startBlock != redu, "startBlock should have been re-wired by now") + + b.lastInstruction match { + case JUMP(whereto) => + replaceLastInstruction(JUMP(lookup(whereto))) + case CJUMP(succ, fail, cond, kind) => + replaceLastInstruction(CJUMP(lookup(succ), lookup(fail), cond, kind)) + case CZJUMP(succ, fail, cond, kind) => + replaceLastInstruction(CZJUMP(lookup(succ), lookup(fail), cond, kind)) + case SWITCH(tags, labels) => + val newLabels = (labels map lookup) + replaceLastInstruction(SWITCH(tags, newLabels)) + case _ => () } + } + } - var wasReduced = false - val entryPoints: List[BasicBlock] = m.startBlock :: (m.exh map (_.startBlock)) - - val elided = mutable.Set.empty[BasicBlock] // debug - val newTargets = mutable.Set.empty[BasicBlock] // debug - - for (ep <- entryPoints) { - var reachable = directSuccStar(ep) // this list may contain blocks belonging to jump-chains that we'll skip over - while(reachable.nonEmpty) { - val h = reachable.head - reachable = reachable.tail - if(isJumpOnly(h)) { - val detour = realTarget(h) - if(detour.nonEmpty) { - wasReduced = true - reachable = (reachable filterNot (detour.keySet.contains(_))) - rephraseGotos(detour) - detour.keySet foreach elide - elided ++= detour.keySet - newTargets ++= detour.values - } + /** + * Computes a mapping from jump only block to its + * final destination which is either a non-jump-only + * block or, if it's in a jump-only block cycle, is + * itself + */ + def computeDetour: mutable.Map[BasicBlock, BasicBlock] = { + // fetch the jump only blocks and their immediate destinations + val pairs = for { + block <- m.blocks.toIterator + target <- getJumpOnlyTarget(block) + } yield(block, target) + + // mapping from a jump-only block to our current knowledge of its + // final destination. Initially it's just jump block to immediate jump + // target + val detour = mutable.Map[BasicBlock, BasicBlock](pairs.toSeq:_*) + + // for each jump-only block find its final destination + // taking advantage of the destinations we found for previous + // blocks + for (key <- detour.keySet) { + // we use the Robert Floyd's classic Tortoise and Hare algorithm + @tailrec + def findDestination(tortoise: BasicBlock, hare: BasicBlock): BasicBlock = { + if (tortoise == hare) + // cycle detected, map key to key + key + else if (detour contains hare) { + // advance hare once + val hare1 = detour(hare) + // make sure we can advance hare a second time + if (detour contains hare1) + // advance tortoise once and hare a second time + findDestination(detour(tortoise), detour(hare1)) + else + // hare1 is not in the map so it's not a jump-only block, it's the destination + hare1 + } else + // hare is not in the map so it's not a jump-only block, it's the destination + hare } + // update the mapping for key based on its final destination + detour(key) = findDestination(key, detour(key)) + } + detour + } + + val detour = computeDetour + rephraseGotos(detour) + + if (settings.debug.value) { + val (remappings, cycles) = detour partition {case (source, target) => source != target} + for ((source, target) <- remappings) { + debuglog(s"Will elide jump only block $source because it can be jumped around to get to $target.") + if (m.startBlock == source) debugwarn("startBlock should have been re-wired by now") + } + val sources = remappings.keySet + val targets = remappings.values.toSet + val intersection = sources intersect targets + + if (intersection.nonEmpty) debugwarn(s"contradiction: we seem to have some source and target overlap in blocks ${intersection.mkString}. Map was ${detour.mkString}") + + for ((source, _) <- cycles) { + debuglog(s"Block $source is in a do-nothing infinite loop. Did the user write 'while(true){}'?") } } - assert(newTargets.intersect(elided).isEmpty, "contradiction: we just elided the final destionation of a jump-chain") } /** @@ -3284,7 +3266,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM { // yet to be marked reachable, initially only the start block val worklist = mutable.Set(m.startBlock) - while (!worklist.isEmpty) { + while (worklist.nonEmpty) { val block = worklist.head worklist remove block // we know that one is reachable diff --git a/test/files/jvm/t7006/Foo_1.scala b/test/files/jvm/t7006/Foo_1.scala index f84269daf2..995619ce6b 100644 --- a/test/files/jvm/t7006/Foo_1.scala +++ b/test/files/jvm/t7006/Foo_1.scala @@ -1,9 +1,10 @@ class Foo_1 { def foo { try { - val x = 3 + val x = 3 // this will be optimized away, leaving a useless jump only block } finally { print("hello") } + while(true){} // ensure infinite loop doesn't break the algoirthm } } diff --git a/test/files/jvm/t7006/Test.scala b/test/files/jvm/t7006/Test.scala index 5cc38e42a2..065a23510e 100644 --- a/test/files/jvm/t7006/Test.scala +++ b/test/files/jvm/t7006/Test.scala @@ -7,12 +7,13 @@ object Test extends BytecodeTest { def show: Unit = { val classNode = loadClassNode("Foo_1") val methodNode = getMethod(classNode, "foo") - assert(countNops(methodNode.instructions) == 0) + assert(count(methodNode.instructions, asm.Opcodes.NOP) == 0) + assert(count(methodNode.instructions, asm.Opcodes.GOTO) == 1) } - def countNops(insnList: InsnList): Int = { + def count(insnList: InsnList, opcode: Int): Int = { def isNop(node: asm.tree.AbstractInsnNode): Boolean = - (node.getOpcode == asm.Opcodes.NOP) + (node.getOpcode == opcode) insnList.iterator.asScala.count(isNop) } } -- cgit v1.2.3 From 28a716190c5faf549ed302a1c19d9611c32d2010 Mon Sep 17 00:00:00 2001 From: James Iry Date: Mon, 25 Feb 2013 16:25:22 -0800 Subject: SI-7181 Prepare to remove duplicated finally blocks As a first step towards fixing 7181, this commit improves the comments and variable names around generating try/catch/finally blocks in GenICode and adds a test verifying the current functionality of try/catch/finally blocks --- .../scala/tools/nsc/backend/icode/GenICode.scala | 77 +++++++++++++++------ test/files/run/t7181.check | 23 +++++++ test/files/run/t7181.scala | 78 ++++++++++++++++++++++ 3 files changed, 157 insertions(+), 21 deletions(-) create mode 100644 test/files/run/t7181.check create mode 100644 test/files/run/t7181.scala (limited to 'src') diff --git a/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala b/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala index 122972039b..8881650a81 100644 --- a/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala +++ b/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala @@ -1921,15 +1921,47 @@ abstract class GenICode extends SubComponent { * }), (AnotherExceptionClass, * ctx => {... * } ))` + * + * The resulting structure will look something like + * + * outer: + * // this 'useless' jump will be removed later, + * // for now it separates the try body's blocks from previous + * // code since the try body needs its own exception handlers + * JUMP body + * + * body: + * [ try body ] + * [ finally body ] + * JUMP normalExit + * + * catch[i]: + * [ handler[i] body ] + * [ finally body ] + * JUMP normalExit + * + * catchAll: + * STORE exception + * [ finally body ] + * THROW exception + * + * normalExit: + * + * each catch[i] will cover body. catchAll will cover both body and each catch[i] + * Additional finally copies are created on the emission of every RETURN in the try body and exception handlers. + * + * This could result in unreachable code which has to be cleaned up later, e.g. if the try and all the exception + * handlers always end in RETURN then there will be no "normal" flow out of the try/catch/finally. + * Later reachability analysis will remove unreacahble code. */ def Try(body: Context => Context, handlers: List[(Symbol, TypeKind, Context => Context)], finalizer: Tree, tree: Tree) = { - val outerCtx = this.dup // context for generating exception handlers, covered by finalizer + val outerCtx = this.dup // context for generating exception handlers, covered by the catch-all finalizer val finalizerCtx = this.dup // context for generating finalizer handler - val afterCtx = outerCtx.newBlock() + val normalExitCtx = outerCtx.newBlock() // context where flow will go on a "normal" (non-return, non-throw) exit from a try or catch handler var tmp: Local = null val kind = toTypeKind(tree.tpe) val guardResult = kind != UNIT && mayCleanStack(finalizer) @@ -1956,30 +1988,33 @@ abstract class GenICode extends SubComponent { } else ctx + // Generate the catch-all exception handler that deals with uncaught exceptions coming + // from the try or exception handlers. It catches the exception, runs the finally code, then rethrows + // the exception if (finalizer != EmptyTree) { val exh = outerCtx.newExceptionHandler(NoSymbol, finalizer.pos) // finalizer covers exception handlers this.addActiveHandler(exh) // .. and body aswell - val ctx = finalizerCtx.enterExceptionHandler(exh) - val exception = ctx.makeLocal(finalizer.pos, ThrowableClass.tpe, "exc") - loadException(ctx, exh, finalizer.pos) - ctx.bb.emit(STORE_LOCAL(exception)) - val ctx1 = genLoad(finalizer, ctx, UNIT) - ctx1.bb.emit(LOAD_LOCAL(exception)) - ctx1.bb.emit(THROW(ThrowableClass)) - ctx1.bb.enterIgnoreMode() - ctx1.bb.close() + val exhStartCtx = finalizerCtx.enterExceptionHandler(exh) + val exception = exhStartCtx.makeLocal(finalizer.pos, ThrowableClass.tpe, "exc") + loadException(exhStartCtx, exh, finalizer.pos) + exhStartCtx.bb.emit(STORE_LOCAL(exception)) + val exhEndCtx = genLoad(finalizer, exhStartCtx, UNIT) + exhEndCtx.bb.emit(LOAD_LOCAL(exception)) + exhEndCtx.bb.closeWith(THROW(ThrowableClass)) + exhEndCtx.bb.enterIgnoreMode() finalizerCtx.endHandler() } + // Generate each exception handler for ((sym, kind, handler) <- handlers) { val exh = this.newExceptionHandler(sym, tree.pos) - var ctx1 = outerCtx.enterExceptionHandler(exh) - ctx1.addFinalizer(finalizer, finalizerCtx) - loadException(ctx1, exh, tree.pos) - ctx1 = handler(ctx1) + val exhStartCtx = outerCtx.enterExceptionHandler(exh) + exhStartCtx.addFinalizer(finalizer, finalizerCtx) + loadException(exhStartCtx, exh, tree.pos) + val exhEndCtx = handler(exhStartCtx) // emit finalizer - val ctx2 = emitFinalizer(ctx1) - ctx2.bb.closeWith(JUMP(afterCtx.bb)) + val exhEndCtx2 = emitFinalizer(exhEndCtx) + exhEndCtx2.bb.closeWith(JUMP(normalExitCtx.bb)) outerCtx.endHandler() } @@ -1987,14 +2022,14 @@ abstract class GenICode extends SubComponent { if (finalizer != EmptyTree) bodyCtx.addFinalizer(finalizer, finalizerCtx) - var finalCtx = body(bodyCtx) - finalCtx = emitFinalizer(finalCtx) + var bodyEndCtx = body(bodyCtx) + bodyEndCtx = emitFinalizer(bodyEndCtx) outerCtx.bb.closeWith(JUMP(bodyCtx.bb)) - finalCtx.bb.closeWith(JUMP(afterCtx.bb)) + bodyEndCtx.bb.closeWith(JUMP(normalExitCtx.bb)) - afterCtx + normalExitCtx } } } diff --git a/test/files/run/t7181.check b/test/files/run/t7181.check new file mode 100644 index 0000000000..e4b8e30dfe --- /dev/null +++ b/test/files/run/t7181.check @@ -0,0 +1,23 @@ +normal exit MainNormalExit +finally MainNormalExit +normal flow MainNormalExit + +return MainReturn +finally MainReturn + +uncaught exception MainUncaughtException +finally MainUncaughtException + +caught exception ExceptionNormalExit +normal exit ExceptionNormalExit +finally ExceptionNormalExit +normal flow ExceptionNormalExit + +caught exception ExceptionReturn +return ExceptionReturn +finally ExceptionReturn + +caught exception ExceptionUncaughtException +uncaught exception ExceptionUncaughtException +finally ExceptionUncaughtException + diff --git a/test/files/run/t7181.scala b/test/files/run/t7181.scala new file mode 100644 index 0000000000..a055e43481 --- /dev/null +++ b/test/files/run/t7181.scala @@ -0,0 +1,78 @@ +sealed abstract class Action +// exit the try body normally +case object MainNormalExit extends Action +// exit the try body with a 'return' +case object MainReturn extends Action +// exit the try body with an uncaught exception +case object MainUncaughtException extends Action +// exit the try body with a caught exception and exit the exception handler normally +case object ExceptionNormalExit extends Action +// exit the try body with a caught exception and exit the exception handler with a 'return' +case object ExceptionReturn extends Action +// exit the try body with a caught exception and exit the exception handler with an uncaught exception +case object ExceptionUncaughtException extends Action + +case class UncaughtException(action: Action) extends RuntimeException +case class CaughtException(action: Action) extends RuntimeException + +object Test extends App { + def test(action: Action, expectException: Boolean = false) { + var gotException = false + val result = try + driver(action) + catch { + case UncaughtException(a) => + gotException = true + a + } + if (gotException) assert(expectException, "Got unexpected exception") + else assert(!expectException, "Did not get expected exception") + + assert(result == action, s"Expected $action but got $result") + println() + } + + def driver(action: Action): Action = { + val result = try { + action match { + case MainNormalExit => + println(s"normal exit $action") + action + case MainReturn => + println(s"return $action") + return action + case MainUncaughtException => + println(s"uncaught exception $action") + throw UncaughtException(action) + case _ => + println(s"caught exception $action") + throw CaughtException(action) + } + } catch { + case CaughtException(action) => action match { + case ExceptionNormalExit => + println(s"normal exit $action") + action + case ExceptionReturn => + println(s"return $action") + return action + case ExceptionUncaughtException => + println(s"uncaught exception $action") + throw UncaughtException(action) + case _ => + sys.error(s"unexpected $action in exception handler") + } + } finally { + println(s"finally $action") + } + println(s"normal flow $action") + result + } + + test(MainNormalExit) + test(MainReturn) + test(MainUncaughtException, true) + test(ExceptionNormalExit) + test(ExceptionReturn) + test(ExceptionUncaughtException, true) +} -- cgit v1.2.3 From 910e5a0ceff9264c27989257f4b793ddf2322f4a Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Mon, 25 Feb 2013 23:27:42 -0800 Subject: Reconcile definitions of stability. --- src/compiler/scala/tools/nsc/typechecker/Implicits.scala | 14 +++----------- src/reflect/scala/reflect/internal/Types.scala | 3 ++- 2 files changed, 5 insertions(+), 12 deletions(-) (limited to 'src') diff --git a/src/compiler/scala/tools/nsc/typechecker/Implicits.scala b/src/compiler/scala/tools/nsc/typechecker/Implicits.scala index c0391448d1..c7a4d44588 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Implicits.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Implicits.scala @@ -149,7 +149,7 @@ trait Implicits { class SearchResult(val tree: Tree, val subst: TreeTypeSubstituter) { override def toString = "SearchResult(%s, %s)".format(tree, if (subst.isEmpty) "" else subst) - + def isFailure = false def isAmbiguousFailure = false final def isSuccess = !isFailure @@ -158,7 +158,7 @@ trait Implicits { lazy val SearchFailure = new SearchResult(EmptyTree, EmptyTreeTypeSubstituter) { override def isFailure = true } - + lazy val AmbiguousSearchFailure = new SearchResult(EmptyTree, EmptyTreeTypeSubstituter) { override def isFailure = true override def isAmbiguousFailure = true @@ -198,15 +198,7 @@ trait Implicits { tp.isError } - /** Todo reconcile with definition of stability given in Types.scala */ - private def isStable(tp: Type): Boolean = tp match { - case TypeRef(pre, sym, _) => - sym.isPackageClass || - sym.isModuleClass && isStable(pre) /*|| - sym.isAliasType && isStable(tp.normalize)*/ - case _ => tp.isStable - } - def isStablePrefix = isStable(pre) + def isStablePrefix = pre.isStable override def equals(other: Any) = other match { case that: ImplicitInfo => diff --git a/src/reflect/scala/reflect/internal/Types.scala b/src/reflect/scala/reflect/internal/Types.scala index aa32457c10..f7ee4a7e7f 100644 --- a/src/reflect/scala/reflect/internal/Types.scala +++ b/src/reflect/scala/reflect/internal/Types.scala @@ -2105,7 +2105,7 @@ trait Types extends api.Types { self: SymbolTable => class ModuleTypeRef(pre0: Type, sym0: Symbol) extends NoArgsTypeRef(pre0, sym0) with ClassTypeRef { require(sym.isModuleClass, sym) private[this] var narrowedCache: Type = _ - override def isStable = true + override def isStable = pre.isStable override def narrow = { if (narrowedCache eq null) narrowedCache = singleType(pre, sym.sourceModule) @@ -2121,6 +2121,7 @@ trait Types extends api.Types { self: SymbolTable => } class PackageTypeRef(pre0: Type, sym0: Symbol) extends ModuleTypeRef(pre0, sym0) { require(sym.isPackageClass, sym) + override def isStable = true override protected def finishPrefix(rest: String) = packagePrefix + rest } class RefinementTypeRef(pre0: Type, sym0: Symbol) extends NoArgsTypeRef(pre0, sym0) with ClassTypeRef { -- cgit v1.2.3 From 5f3cd8683d8b2e7429e73c2fa7199232ea7c46ca Mon Sep 17 00:00:00 2001 From: James Iry Date: Mon, 25 Feb 2013 16:30:46 -0800 Subject: SI-7181 Eliminate unnecessary duplication of finally blocks The main body of a try and each exception handler were getting a copy of the finally block for the "normal" flow case (i.e. where they don't throw an uncaught exception or use "return" to exit early). But that's not necessary. With this commit the try body and each exception handler can all jump to the same copy of the finally block on a normal exit. A byte code test is included to ensure we're getting fewer copies of the finally block. inline-ex-handlers.check is updated because the icode is a bit different without the extra finally block copies. --- .../scala/tools/nsc/backend/icode/GenICode.scala | 12 +- test/files/jvm/t7181/Foo_1.scala | 26 ++++ test/files/jvm/t7181/Test.scala | 24 ++++ test/files/run/inline-ex-handlers.check | 150 ++++++++++----------- 4 files changed, 129 insertions(+), 83 deletions(-) create mode 100644 test/files/jvm/t7181/Foo_1.scala create mode 100644 test/files/jvm/t7181/Test.scala (limited to 'src') diff --git a/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala b/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala index 8881650a81..5438fd8590 100644 --- a/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala +++ b/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala @@ -1932,12 +1932,10 @@ abstract class GenICode extends SubComponent { * * body: * [ try body ] - * [ finally body ] * JUMP normalExit * * catch[i]: * [ handler[i] body ] - * [ finally body ] * JUMP normalExit * * catchAll: @@ -1946,6 +1944,7 @@ abstract class GenICode extends SubComponent { * THROW exception * * normalExit: + * [ finally body ] * * each catch[i] will cover body. catchAll will cover both body and each catch[i] * Additional finally copies are created on the emission of every RETURN in the try body and exception handlers. @@ -2012,9 +2011,7 @@ abstract class GenICode extends SubComponent { exhStartCtx.addFinalizer(finalizer, finalizerCtx) loadException(exhStartCtx, exh, tree.pos) val exhEndCtx = handler(exhStartCtx) - // emit finalizer - val exhEndCtx2 = emitFinalizer(exhEndCtx) - exhEndCtx2.bb.closeWith(JUMP(normalExitCtx.bb)) + exhEndCtx.bb.closeWith(JUMP(normalExitCtx.bb)) outerCtx.endHandler() } @@ -2022,14 +2019,13 @@ abstract class GenICode extends SubComponent { if (finalizer != EmptyTree) bodyCtx.addFinalizer(finalizer, finalizerCtx) - var bodyEndCtx = body(bodyCtx) - bodyEndCtx = emitFinalizer(bodyEndCtx) + val bodyEndCtx = body(bodyCtx) outerCtx.bb.closeWith(JUMP(bodyCtx.bb)) bodyEndCtx.bb.closeWith(JUMP(normalExitCtx.bb)) - normalExitCtx + emitFinalizer(normalExitCtx) } } } diff --git a/test/files/jvm/t7181/Foo_1.scala b/test/files/jvm/t7181/Foo_1.scala new file mode 100644 index 0000000000..f9dfdd4442 --- /dev/null +++ b/test/files/jvm/t7181/Foo_1.scala @@ -0,0 +1,26 @@ +class Exception1 extends RuntimeException +class Exception2 extends RuntimeException + +class Foo_1 { + def foo(baz: Baz) { + try { + baz.bar + } catch { + case _: Exception1 => println("exception 1") + case _: Exception2 => println("exception 2") + } finally { + // this should be the only copy of the magic constant 3 + // making it easy to detect copies of this finally block + println(s"finally ${3}") + } + println(s"normal flow") + } +} + +trait Baz { + // does it throw? who knows? This way + // I can ensure that no optimization that honors + // separate compilation could ever + // change the exception handling structure + def bar: Unit +} diff --git a/test/files/jvm/t7181/Test.scala b/test/files/jvm/t7181/Test.scala new file mode 100644 index 0000000000..35dba436c1 --- /dev/null +++ b/test/files/jvm/t7181/Test.scala @@ -0,0 +1,24 @@ +import scala.tools.partest.BytecodeTest +import scala.tools.asm +import asm.tree.InsnList +import scala.collection.JavaConverters._ + +object Test extends BytecodeTest { + def show: Unit = { + val classNode = loadClassNode("Foo_1") + val methodNode = getMethod(classNode, "foo") + // there should be 2 copies of the finally block, each with the magic constant 3 + // one for the "normal" exit + // one for the uncaught exception exit + // prior to this PR there would have been 4 since each exception handler would also get a copy + val expected = 2 + val got = countMagicThrees(methodNode.instructions) + assert(got == expected, s"expected $expected but got $got magic threes") + } + + def countMagicThrees(insnList: InsnList): Int = { + def isMagicThree(node: asm.tree.AbstractInsnNode): Boolean = + (node.getOpcode == asm.Opcodes.ICONST_3) + insnList.iterator.asScala.count(isMagicThree) + } +} diff --git a/test/files/run/inline-ex-handlers.check b/test/files/run/inline-ex-handlers.check index f2f0b60687..0a234e2659 100644 --- a/test/files/run/inline-ex-handlers.check +++ b/test/files/run/inline-ex-handlers.check @@ -107,27 +107,27 @@ --- > catch (Throwable) in ArrayBuffer(4, 6, 7, 8, 9, 10, 11, 12) starting at: 3 619c642 -< blocks: [1,2,3,4,5,6,7,9,10] +< blocks: [1,3,4,5,6,8,9] --- -> blocks: [1,2,3,4,5,6,7,9,10,11,12] +> blocks: [1,3,4,5,6,8,9,10,11] 643c666,667 < 78 THROW(IllegalArgumentException) --- > ? STORE_LOCAL(value e) -> ? JUMP 11 +> ? JUMP 10 644a669,673 -> 11: +> 10: > 81 LOAD_LOCAL(value e) > ? STORE_LOCAL(variable exc1) -> ? JUMP 12 +> ? JUMP 11 > -672c701,702 +669c698,699 < 81 THROW(Exception) --- > ? STORE_LOCAL(variable exc1) -> ? JUMP 12 -688a719,731 -> 12: +> ? JUMP 11 +685a716,728 +> 11: > 83 LOAD_MODULE object Predef > 83 CONSTANT("finally") > 83 CALL_METHOD scala.Predef.println (dynamic) @@ -140,88 +140,88 @@ > 84 LOAD_LOCAL(variable exc1) > 84 THROW(Throwable) > -694c737 -< catch () in ArrayBuffer(4, 6, 7, 9) starting at: 3 +691c734 +< catch () in ArrayBuffer(4, 5, 6, 8) starting at: 3 --- -> catch () in ArrayBuffer(4, 6, 7, 9, 11) starting at: 3 -718c761 +> catch () in ArrayBuffer(4, 5, 6, 8, 10) starting at: 3 +715c758 < locals: value args, variable result, value ex6, variable exc2, value x4, value x5, value message, value x, value ex6, value x4, value x5, value message, value x --- > locals: value args, variable result, value ex6, variable exc2, value x4, value x5, value x, value ex6, value x4, value x5, value x -720c763 -< blocks: [1,2,3,4,5,6,9,11,14,15,16,19,21,22,24,25] +717c760 +< blocks: [1,3,4,5,6,9,13,14,15,18,20,21,23,24] --- -> blocks: [1,2,3,4,5,6,9,11,14,15,16,19,21,22,24,25,26,27,28] -744c787,794 +> blocks: [1,3,4,5,6,9,13,14,15,18,20,21,23,24,25,26,27] +741c784,791 < 172 THROW(MyException) --- > ? STORE_LOCAL(value ex6) -> ? JUMP 26 +> ? JUMP 25 > -> 26: +> 25: > 170 LOAD_LOCAL(value ex6) > 170 STORE_LOCAL(value x4) > 170 SCOPE_ENTER value x4 -> 170 JUMP 15 -787,790d836 +> 170 JUMP 14 +781,784d830 < 175 LOAD_LOCAL(value x5) < 175 CALL_METHOD MyException.message (dynamic) < 175 STORE_LOCAL(value message) < 175 SCOPE_ENTER value message -792c838,839 +786c832,833 < 176 LOAD_LOCAL(value message) --- > ? LOAD_LOCAL(value x5) > 176 CALL_METHOD MyException.message (dynamic) -796c843,844 +790c837,838 < 177 LOAD_LOCAL(value message) --- > ? LOAD_LOCAL(value x5) > 177 CALL_METHOD MyException.message (dynamic) -798c846,847 +792c840,841 < 177 THROW(MyException) --- > ? STORE_LOCAL(value ex6) -> ? JUMP 27 -802c851,852 +> ? JUMP 26 +796c845,846 < 170 THROW(Throwable) --- > ? STORE_LOCAL(value ex6) -> ? JUMP 27 -811a862,867 -> 27: +> ? JUMP 26 +805a856,861 +> 26: > 169 LOAD_LOCAL(value ex6) > 169 STORE_LOCAL(value x4) > 169 SCOPE_ENTER value x4 > 169 JUMP 5 > -822,825d877 +816,819d871 < 180 LOAD_LOCAL(value x5) < 180 CALL_METHOD MyException.message (dynamic) < 180 STORE_LOCAL(value message) < 180 SCOPE_ENTER value message -827c879,880 +821c873,874 < 181 LOAD_LOCAL(value message) --- > ? LOAD_LOCAL(value x5) > 181 CALL_METHOD MyException.message (dynamic) -831c884,885 +825c878,879 < 182 LOAD_LOCAL(value message) --- > ? LOAD_LOCAL(value x5) > 182 CALL_METHOD MyException.message (dynamic) -833c887,888 +827c881,882 < 182 THROW(MyException) --- > ? STORE_LOCAL(variable exc2) -> ? JUMP 28 -837c892,893 +> ? JUMP 27 +831c886,887 < 169 THROW(Throwable) --- > ? STORE_LOCAL(variable exc2) -> ? JUMP 28 -853a910,922 -> 28: +> ? JUMP 27 +847a904,916 +> 27: > 184 LOAD_MODULE object Predef > 184 CONSTANT("finally") > 184 CALL_METHOD scala.Predef.println (dynamic) @@ -234,23 +234,23 @@ > 185 LOAD_LOCAL(variable exc2) > 185 THROW(Throwable) > -859c928 -< catch (Throwable) in ArrayBuffer(14, 15, 16, 19, 21, 22, 24) starting at: 4 +853c922 +< catch (Throwable) in ArrayBuffer(13, 14, 15, 18, 20, 21, 23) starting at: 4 --- -> catch (Throwable) in ArrayBuffer(14, 15, 16, 19, 21, 22, 24, 26) starting at: 4 -862c931 -< catch () in ArrayBuffer(4, 5, 6, 9, 14, 15, 16, 19, 21, 22, 24) starting at: 3 +> catch (Throwable) in ArrayBuffer(13, 14, 15, 18, 20, 21, 23, 25) starting at: 4 +856c925 +< catch () in ArrayBuffer(4, 5, 6, 9, 13, 14, 15, 18, 20, 21, 23) starting at: 3 --- -> catch () in ArrayBuffer(4, 5, 6, 9, 14, 15, 16, 19, 21, 22, 24, 26, 27) starting at: 3 -886c955 +> catch () in ArrayBuffer(4, 5, 6, 9, 13, 14, 15, 18, 20, 21, 23, 25, 26) starting at: 3 +880c949 < locals: value args, variable result, value e, value ex6, value x4, value x5, value message, value x --- > locals: value args, variable result, value e, value ex6, value x4, value x5, value x -888c957 +882c951 < blocks: [1,2,3,6,7,8,11,13,14,16] --- > blocks: [1,2,3,6,7,8,11,13,14,16,17] -912c981,988 +906c975,982 < 124 THROW(MyException) --- > ? STORE_LOCAL(value ex6) @@ -261,29 +261,29 @@ > 122 STORE_LOCAL(value x4) > 122 SCOPE_ENTER value x4 > 122 JUMP 7 -937,940d1012 +931,934d1006 < 127 LOAD_LOCAL(value x5) < 127 CALL_METHOD MyException.message (dynamic) < 127 STORE_LOCAL(value message) < 127 SCOPE_ENTER value message -942c1014,1015 +936c1008,1009 < 127 LOAD_LOCAL(value message) --- > ? LOAD_LOCAL(value x5) > 127 CALL_METHOD MyException.message (dynamic) -971c1044 +965c1038 < catch (IllegalArgumentException) in ArrayBuffer(6, 7, 8, 11, 13, 14, 16) starting at: 3 --- > catch (IllegalArgumentException) in ArrayBuffer(6, 7, 8, 11, 13, 14, 16, 17) starting at: 3 -995c1068 +989c1062 < locals: value args, variable result, value ex6, value x4, value x5, value message, value x, value e --- > locals: value args, variable result, value ex6, value x4, value x5, value x, value e -997c1070 +991c1064 < blocks: [1,2,3,4,5,8,12,13,14,16] --- > blocks: [1,2,3,5,8,12,13,14,16,17] -1021c1094,1103 +1015c1088,1097 < 148 THROW(MyException) --- > ? STORE_LOCAL(value ex6) @@ -296,25 +296,25 @@ > 154 LOAD_LOCAL(value x4) > 154 IS_INSTANCE REF(class MyException) > 154 CZJUMP (BOOL)NE ? 5 : 8 -1042,1044d1123 +1036,1038d1117 < 145 JUMP 4 < < 4: -1054,1057d1132 +1048,1051d1126 < 154 LOAD_LOCAL(value x5) < 154 CALL_METHOD MyException.message (dynamic) < 154 STORE_LOCAL(value message) < 154 SCOPE_ENTER value message -1059c1134,1135 +1053c1128,1129 < 154 LOAD_LOCAL(value message) --- > ? LOAD_LOCAL(value x5) > 154 CALL_METHOD MyException.message (dynamic) -1276c1352 +1270c1346 < blocks: [1,2,3,4,5,7] --- > blocks: [1,2,3,4,5,7,8] -1300c1376,1383 +1294c1370,1377 < 38 THROW(IllegalArgumentException) --- > ? STORE_LOCAL(value e) @@ -325,20 +325,20 @@ > 42 CONSTANT("IllegalArgumentException") > 42 CALL_METHOD scala.Predef.println (dynamic) > 42 JUMP 2 -1347c1430 +1341c1424 < locals: value args, variable result, value ex6, value x4, value x5, value message, value x --- > locals: value args, variable result, value ex6, value x4, value x5, value x -1349c1432 +1343c1426 < blocks: [1,2,3,4,5,8,10,11,13,14,16] --- > blocks: [1,2,3,5,8,10,11,13,14,16,17] -1373c1456,1457 +1367c1450,1451 < 203 THROW(MyException) --- > ? STORE_LOCAL(value ex6) > ? JUMP 17 -1393c1477,1486 +1387c1471,1480 < 209 THROW(MyException) --- > ? STORE_LOCAL(value ex6) @@ -351,41 +351,41 @@ > 212 LOAD_LOCAL(value x4) > 212 IS_INSTANCE REF(class MyException) > 212 CZJUMP (BOOL)NE ? 5 : 8 -1406,1408d1498 +1400,1402d1492 < 200 JUMP 4 < < 4: -1418,1421d1507 +1412,1415d1501 < 212 LOAD_LOCAL(value x5) < 212 CALL_METHOD MyException.message (dynamic) < 212 STORE_LOCAL(value message) < 212 SCOPE_ENTER value message -1423c1509,1510 +1417c1503,1504 < 213 LOAD_LOCAL(value message) --- > ? LOAD_LOCAL(value x5) > 213 CALL_METHOD MyException.message (dynamic) -1467c1554 +1461c1548 < blocks: [1,2,3,4,5,7] --- > blocks: [1,2,3,4,5,7,8] -1491c1578,1579 +1485c1572,1573 < 58 THROW(IllegalArgumentException) --- > ? STORE_LOCAL(value e) > ? JUMP 8 -1492a1581,1586 +1486a1575,1580 > 8: > 62 LOAD_MODULE object Predef > 62 CONSTANT("RuntimeException") > 62 CALL_METHOD scala.Predef.println (dynamic) > 62 JUMP 2 > -1540c1634 +1534c1628 < blocks: [1,2,3,4] --- > blocks: [1,2,3,4,5] -1560c1654,1659 +1554c1648,1653 < 229 THROW(MyException) --- > ? JUMP 5 @@ -394,19 +394,19 @@ > ? LOAD_LOCAL(variable monitor1) > 228 MONITOR_EXIT > 228 THROW(Throwable) -1566c1665 +1560c1659 < ? THROW(Throwable) --- > 228 THROW(Throwable) -1594c1693 +1588c1687 < locals: value args, variable result, variable monitor2, variable monitorResult1 --- > locals: value exception$1, value args, variable result, variable monitor2, variable monitorResult1 -1596c1695 +1590c1689 < blocks: [1,2,3,4] --- > blocks: [1,2,3,4,5] -1619c1718,1726 +1613c1712,1720 < 245 THROW(MyException) --- > ? STORE_LOCAL(value exception$1) @@ -418,7 +418,7 @@ > ? LOAD_LOCAL(variable monitor2) > 244 MONITOR_EXIT > 244 THROW(Throwable) -1625c1732 +1619c1726 < ? THROW(Throwable) --- > 244 THROW(Throwable) -- cgit v1.2.3 From 208d6ad0bbbb4f7b5021f96bce14606869a6c899 Mon Sep 17 00:00:00 2001 From: James Iry Date: Wed, 20 Feb 2013 15:52:56 -0800 Subject: SI-7159 Remove unreachable cases in GenICode#adapt This commit removes some unreachable code in GenICode#adapt. It's hard to prove that these cases are unreachable, but they do appear to be and I wasn't able to find a way to get to them. Code archaeology was un-fruitful; they're very old. Which may mean they are legacies from a time when Null and Nothing types weren't fully baked. --- src/compiler/scala/tools/nsc/backend/icode/GenICode.scala | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) (limited to 'src') diff --git a/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala b/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala index 122972039b..3167289a10 100644 --- a/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala +++ b/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala @@ -1038,7 +1038,7 @@ abstract class GenICode extends SubComponent { // A typical example is an overloaded type assigned after typer. log(s"GenICode#adapt($from, $to, $ctx, $pos)") - val conforms = (from <:< to) || (from == NullReference && to == NothingReference) // TODO why would we have null where we expect nothing? + val conforms = (from <:< to) def coerce(from: TypeKind, to: TypeKind) = ctx.bb.emit(CALL_PRIMITIVE(Conversion(from, to)), pos) def checkAssertions() { def msg = s"Can't convert from $from to $to in unit ${unit.source} at $pos" @@ -1054,8 +1054,6 @@ abstract class GenICode extends SubComponent { // 6: athrow // So this case tacks on the ahtrow which makes the JVM happy because class Nothing is declared as a subclass of Throwable case NothingReference => ctx.bb.emit(THROW(ThrowableClass)) ; ctx.bb.enterIgnoreMode() - // TODO why do we have this case? It's saying if we have a throwable and a non-throwable is expected then we should emit a cast? Why would we get here? - case ThrowableReference if !(ThrowableClass.tpe <:< to.toType) => ctx.bb.emit(CHECK_CAST(to)) // downcast throwables case _ => // widen subrange types if (from.isIntSizedType && to == LONG) -- cgit v1.2.3 From 04b147e4a0c226526d3192b68f8efa8953b531ea Mon Sep 17 00:00:00 2001 From: James Iry Date: Tue, 26 Feb 2013 17:22:35 -0800 Subject: SI-7159 Prepare to remove erroneous INT <:< LONG in TypeKinds In preparation for dealing with a problem in TypeKinds, this commit does some cleanup of code related to doing coercions. * Comments are added to clarify. * A println when converting between BOOL and anything else is removed and the code is allowed to flow through to an assertion. * Assertions are refactored to use string interpolation. * A few pattern matches were reformulated to equivalent variants In addition, a test is created for SI-107, the bug that necessitated the special case in GenICode#adapt for LONG coercion --- .../scala/tools/nsc/backend/icode/GenICode.scala | 35 ++++++++++++---------- .../scala/tools/nsc/backend/icode/TypeKinds.scala | 17 ++++++++--- .../scala/tools/nsc/backend/jvm/GenASM.scala | 12 +++----- test/files/run/t107.check | 1 + test/files/run/t107.scala | 8 +++++ 5 files changed, 45 insertions(+), 28 deletions(-) create mode 100644 test/files/run/t107.check create mode 100644 test/files/run/t107.scala (limited to 'src') diff --git a/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala b/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala index 3167289a10..a76f7caba2 100644 --- a/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala +++ b/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala @@ -1038,14 +1038,9 @@ abstract class GenICode extends SubComponent { // A typical example is an overloaded type assigned after typer. log(s"GenICode#adapt($from, $to, $ctx, $pos)") - val conforms = (from <:< to) def coerce(from: TypeKind, to: TypeKind) = ctx.bb.emit(CALL_PRIMITIVE(Conversion(from, to)), pos) - def checkAssertions() { - def msg = s"Can't convert from $from to $to in unit ${unit.source} at $pos" - debugassert(from != UNIT, msg) - assert(!from.isReferenceType && !to.isReferenceType, msg) - } - if (conforms) from match { + + (from, to) match { // The JVM doesn't have a Nothing equivalent, so it doesn't know that a method of type Nothing can't actually return. So for instance, with // def f: String = ??? // we need @@ -1053,15 +1048,23 @@ abstract class GenICode extends SubComponent { // 3: invokevirtual #29; //Method scala/Predef$.$qmark$qmark$qmark:()Lscala/runtime/Nothing$; // 6: athrow // So this case tacks on the ahtrow which makes the JVM happy because class Nothing is declared as a subclass of Throwable - case NothingReference => ctx.bb.emit(THROW(ThrowableClass)) ; ctx.bb.enterIgnoreMode() - case _ => - // widen subrange types - if (from.isIntSizedType && to == LONG) - coerce(INT, LONG) - } - else to match { - case UNIT => ctx.bb.emit(DROP(from), pos) // value discarding - case _ => checkAssertions() ; coerce(from, to) // other primitive coercions + case (NothingReference, _) => + ctx.bb.emit(THROW(ThrowableClass)) + ctx.bb.enterIgnoreMode() + // this special case is needed because of a special case in TypeKinds that + // says that the int sized primitives are subtypes of LONG + // even though they aren't according to the JVM + case (_, LONG) if from.isIntSizedType => + coerce(INT, LONG) + case _ if (from <:< to) => + () + case (_, UNIT) => + ctx.bb.emit(DROP(from), pos) + // otherwise we'd better be doing a primtive -> primitive coercion or there's a problem + case _ if !from.isRefOrArrayType && !to.isRefOrArrayType => + coerce(from, to) + case _ => + assert(false, s"Can't convert from $from to $to in unit ${unit.source} at $pos") } } diff --git a/src/compiler/scala/tools/nsc/backend/icode/TypeKinds.scala b/src/compiler/scala/tools/nsc/backend/icode/TypeKinds.scala index 6a392449e0..1f8c765a69 100644 --- a/src/compiler/scala/tools/nsc/backend/icode/TypeKinds.scala +++ b/src/compiler/scala/tools/nsc/backend/icode/TypeKinds.scala @@ -88,10 +88,19 @@ trait TypeKinds { self: ICodes => final def isNumericType: Boolean = isIntegralType | isRealType /** Simple subtyping check */ - def <:<(other: TypeKind): Boolean = (this eq other) || (this match { - case BOOL | BYTE | SHORT | CHAR => other == INT || other == LONG - case _ => this eq other - }) + def <:<(other: TypeKind): Boolean = other match { + // On the JVM, BOOL, BYTE, CHAR, SHORT need no coercion to INT + // TODO it's pretty suspect to call this a subtyping relationship + // for instance JVM Arrays are covariant, but Array[Char] is not + // a subtype of Array[Int] on the JVM. However, when I attempted + // to remove it I got verify errors when compiling the library + // under -optimize + case INT => this.isIntSizedType + // this case is even more suspect than the previous because + // BOOL, BYTE, CHAR, SHORT, and INT need conversion to get to LONG + case LONG => this.isIntSizedType || this == LONG + case _ => this eq other + } /** Is this type a category 2 type in JVM terms? (ie, is it LONG or DOUBLE?) */ def isWideType: Boolean = false diff --git a/src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala b/src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala index 91cb1857ac..3830b389ba 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala @@ -2626,8 +2626,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM { * @param to The type the value will be converted into. */ def emitT2T(from: TypeKind, to: TypeKind) { - assert(isNonUnitValueTK(from), from) - assert(isNonUnitValueTK(to), to) + assert(isNonUnitValueTK(from) && isNonUnitValueTK(to), s"Cannot emit primitive conversion from $from to $to") def pickOne(opcs: Array[Int]) { val chosen = (to: @unchecked) match { @@ -2643,10 +2642,8 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM { } if(from == to) { return } - if((from == BOOL) || (to == BOOL)) { - // the only conversion involving BOOL that is allowed is (BOOL -> BOOL) - throw new Error("inconvertible types : " + from.toString() + " -> " + to.toString()) - } + // the only conversion involving BOOL that is allowed is (BOOL -> BOOL) + assert(from != BOOL && to != BOOL, "inconvertible types : $from -> $to") if(from.isIntSizedType) { // BYTE, CHAR, SHORT, and INT. (we're done with BOOL already) @@ -2810,8 +2807,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM { case Conversion(src, dst) => debuglog("Converting from: " + src + " to: " + dst) - if (dst == BOOL) { println("Illegal conversion at: " + clasz + " at: " + pos.source + ":" + pos.line) } - else { emitT2T(src, dst) } + emitT2T(src, dst) case ArrayLength(_) => emit(Opcodes.ARRAYLENGTH) diff --git a/test/files/run/t107.check b/test/files/run/t107.check new file mode 100644 index 0000000000..d00491fd7e --- /dev/null +++ b/test/files/run/t107.check @@ -0,0 +1 @@ +1 diff --git a/test/files/run/t107.scala b/test/files/run/t107.scala new file mode 100644 index 0000000000..ab1b289882 --- /dev/null +++ b/test/files/run/t107.scala @@ -0,0 +1,8 @@ +object Test { + def main(args : Array[String]) : Unit = { + var hash : Long = 0 + val bytes = Array(1.toByte, 2.toByte, 3.toByte) + hash += bytes(0) + Console.println(hash) + } +} \ No newline at end of file -- cgit v1.2.3 From 4124a09379fe1784a6069f5af482bdabdb69a569 Mon Sep 17 00:00:00 2001 From: James Iry Date: Tue, 26 Feb 2013 20:23:59 -0800 Subject: SI-7159 Remove erroneous INT <:< LONG in TypeKinds TypeKinds said INT <:< LONG. But that's not true on the JVM, you need a coercion to move up. And GenICode#adapt was checking for just that special case. This commit removes the INT <:< LONG rule and then removes the special case from GenICode#adapt. --- src/compiler/scala/tools/nsc/backend/icode/GenICode.scala | 5 ----- src/compiler/scala/tools/nsc/backend/icode/TypeKinds.scala | 3 --- 2 files changed, 8 deletions(-) (limited to 'src') diff --git a/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala b/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala index a76f7caba2..2b513a6fa3 100644 --- a/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala +++ b/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala @@ -1051,11 +1051,6 @@ abstract class GenICode extends SubComponent { case (NothingReference, _) => ctx.bb.emit(THROW(ThrowableClass)) ctx.bb.enterIgnoreMode() - // this special case is needed because of a special case in TypeKinds that - // says that the int sized primitives are subtypes of LONG - // even though they aren't according to the JVM - case (_, LONG) if from.isIntSizedType => - coerce(INT, LONG) case _ if (from <:< to) => () case (_, UNIT) => diff --git a/src/compiler/scala/tools/nsc/backend/icode/TypeKinds.scala b/src/compiler/scala/tools/nsc/backend/icode/TypeKinds.scala index 1f8c765a69..266e2b861f 100644 --- a/src/compiler/scala/tools/nsc/backend/icode/TypeKinds.scala +++ b/src/compiler/scala/tools/nsc/backend/icode/TypeKinds.scala @@ -96,9 +96,6 @@ trait TypeKinds { self: ICodes => // to remove it I got verify errors when compiling the library // under -optimize case INT => this.isIntSizedType - // this case is even more suspect than the previous because - // BOOL, BYTE, CHAR, SHORT, and INT need conversion to get to LONG - case LONG => this.isIntSizedType || this == LONG case _ => this eq other } -- cgit v1.2.3 From bfd7863406146aa830028ed77f7b0107fc60e5dc Mon Sep 17 00:00:00 2001 From: James Iry Date: Wed, 27 Feb 2013 04:59:47 -0800 Subject: SI-7159 Distinguish between assignability and sub typing in TypeKinds TypeKinds said SHORT <:< INT. But that's not quite true on the JVM. You can assign SHORT to INT, but you can't assign an ARRAY[SHORT] to ARRAY[INT]. Since JVM arrays are covariant it's clear that assignability and subtyping are distinct on the JVM. This commit adds an isAssignable method and moves the rules about the int sized primitives there. ICodeCheckers, ICodeReader, and GenICode are all updated to use isAssignable instead of <:<. --- .../scala/tools/nsc/backend/icode/GenICode.scala | 2 +- .../tools/nsc/backend/icode/ICodeCheckers.scala | 2 +- .../scala/tools/nsc/backend/icode/TypeKinds.scala | 32 ++++++++++++---------- .../tools/nsc/symtab/classfile/ICodeReader.scala | 2 +- 4 files changed, 20 insertions(+), 18 deletions(-) (limited to 'src') diff --git a/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala b/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala index 2b513a6fa3..61d0baf7b0 100644 --- a/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala +++ b/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala @@ -1051,7 +1051,7 @@ abstract class GenICode extends SubComponent { case (NothingReference, _) => ctx.bb.emit(THROW(ThrowableClass)) ctx.bb.enterIgnoreMode() - case _ if (from <:< to) => + case _ if from isAssignabledTo to => () case (_, UNIT) => ctx.bb.emit(DROP(from), pos) diff --git a/src/compiler/scala/tools/nsc/backend/icode/ICodeCheckers.scala b/src/compiler/scala/tools/nsc/backend/icode/ICodeCheckers.scala index fb1ef311d2..49f2d9859d 100644 --- a/src/compiler/scala/tools/nsc/backend/icode/ICodeCheckers.scala +++ b/src/compiler/scala/tools/nsc/backend/icode/ICodeCheckers.scala @@ -351,7 +351,7 @@ abstract class ICodeCheckers { def typeError(k1: TypeKind, k2: TypeKind) { icodeError("\n expected: " + k1 + "\n found: " + k2) } - def isSubtype(k1: TypeKind, k2: TypeKind) = (k1 <:< k2) || { + def isSubtype(k1: TypeKind, k2: TypeKind) = (k1 isAssignabledTo k2) || { import platform.isMaybeBoxed (k1, k2) match { diff --git a/src/compiler/scala/tools/nsc/backend/icode/TypeKinds.scala b/src/compiler/scala/tools/nsc/backend/icode/TypeKinds.scala index 266e2b861f..1875c8c914 100644 --- a/src/compiler/scala/tools/nsc/backend/icode/TypeKinds.scala +++ b/src/compiler/scala/tools/nsc/backend/icode/TypeKinds.scala @@ -88,17 +88,20 @@ trait TypeKinds { self: ICodes => final def isNumericType: Boolean = isIntegralType | isRealType /** Simple subtyping check */ - def <:<(other: TypeKind): Boolean = other match { - // On the JVM, BOOL, BYTE, CHAR, SHORT need no coercion to INT - // TODO it's pretty suspect to call this a subtyping relationship - // for instance JVM Arrays are covariant, but Array[Char] is not - // a subtype of Array[Int] on the JVM. However, when I attempted - // to remove it I got verify errors when compiling the library - // under -optimize + def <:<(other: TypeKind): Boolean + + /** + * this is directly assignable to other if no coercion or + * casting is needed to convert this to other. It's a distinct + * relationship from <:< because on the JVM, BOOL, BYTE, CHAR, + * SHORT need no coercion to INT even though JVM arrays + * are covariant, ARRAY[SHORT] is not a subtype of ARRAY[INT] + */ + final def isAssignabledTo(other: TypeKind): Boolean = other match { case INT => this.isIntSizedType - case _ => this eq other + case _ => this <:< other } - + /** Is this type a category 2 type in JVM terms? (ie, is it LONG or DOUBLE?) */ def isWideType: Boolean = false @@ -117,6 +120,7 @@ trait TypeKinds { self: ICodes => override def toString = { this.getClass.getName stripSuffix "$" dropWhile (_ != '$') drop 1 } + def <:<(other: TypeKind): Boolean = this eq other } /** @@ -286,7 +290,7 @@ trait TypeKinds { self: ICodes => } /** Checks subtyping relationship. */ - override def <:<(other: TypeKind) = isNothingType || (other match { + def <:<(other: TypeKind) = isNothingType || (other match { case REFERENCE(cls2) => cls.tpe <:< cls2.tpe case ARRAY(_) => cls == NullClass case _ => false @@ -324,7 +328,7 @@ trait TypeKinds { self: ICodes => /** Array subtyping is covariant, as in Java. Necessary for checking * code that interacts with Java. */ - override def <:<(other: TypeKind) = other match { + def <:<(other: TypeKind) = other match { case ARRAY(elem2) => elem <:< elem2 case REFERENCE(AnyRefClass | ObjectClass) => true // TODO: platform dependent! case _ => false @@ -342,7 +346,7 @@ trait TypeKinds { self: ICodes => } /** Checks subtyping relationship. */ - override def <:<(other: TypeKind) = other match { + def <:<(other: TypeKind) = other match { case BOXED(`kind`) => true case REFERENCE(AnyRefClass | ObjectClass) => true // TODO: platform dependent! case _ => false @@ -355,6 +359,7 @@ trait TypeKinds { self: ICodes => */ case object ConcatClass extends TypeKind { override def toString = "ConcatClass" + def <:<(other: TypeKind): Boolean = this eq other /** * Approximate `lub`. The common type of two references is @@ -365,9 +370,6 @@ trait TypeKinds { self: ICodes => case REFERENCE(_) => AnyRefReference case _ => uncomparable(other) } - - /** Checks subtyping relationship. */ - override def <:<(other: TypeKind) = this eq other } ////////////////// Conversions ////////////////////////////// diff --git a/src/compiler/scala/tools/nsc/symtab/classfile/ICodeReader.scala b/src/compiler/scala/tools/nsc/symtab/classfile/ICodeReader.scala index 6e99129ee5..703724f003 100644 --- a/src/compiler/scala/tools/nsc/symtab/classfile/ICodeReader.scala +++ b/src/compiler/scala/tools/nsc/symtab/classfile/ICodeReader.scala @@ -930,7 +930,7 @@ abstract class ICodeReader extends ClassfileParser { locals.get(idx) match { case Some(ls) => - val l = ls find { loc => loc._2 <:< kind } + val l = ls find { loc => loc._2 isAssignabledTo kind } l match { case Some((loc, _)) => loc case None => -- cgit v1.2.3 From b457b6c477dfd5f3517d31bbbd8e2db5bd8386d2 Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Fri, 1 Mar 2013 01:13:55 -0800 Subject: Refactors AsSeenFromMap to expose extension point. The extension point was the initial motivation but I also tried to bring some clarity to the internals. This is a setup commit for scaladoc and interactive modularization, and also will be followed by a fix for abstract types losing their prefixes (SI-6161.) --- src/reflect/scala/reflect/internal/Symbols.scala | 3 + src/reflect/scala/reflect/internal/Types.scala | 329 ++++++++++++++--------- 2 files changed, 201 insertions(+), 131 deletions(-) (limited to 'src') diff --git a/src/reflect/scala/reflect/internal/Symbols.scala b/src/reflect/scala/reflect/internal/Symbols.scala index 03419dd576..ff83cb5f26 100644 --- a/src/reflect/scala/reflect/internal/Symbols.scala +++ b/src/reflect/scala/reflect/internal/Symbols.scala @@ -940,6 +940,9 @@ trait Symbols extends api.Symbols { self: SymbolTable => rawowner } + // Like owner, but NoSymbol.owner == NoSymbol instead of throwing an exception. + final def safeOwner: Symbol = if (this eq NoSymbol) NoSymbol else owner + // TODO - don't allow the owner to be changed without checking invariants, at least // when under some flag. Define per-phase invariants for owner/owned relationships, // e.g. after flatten all classes are owned by package classes, there are lots and diff --git a/src/reflect/scala/reflect/internal/Types.scala b/src/reflect/scala/reflect/internal/Types.scala index f7ee4a7e7f..9cb7a71592 100644 --- a/src/reflect/scala/reflect/internal/Types.scala +++ b/src/reflect/scala/reflect/internal/Types.scala @@ -736,7 +736,7 @@ trait Types extends api.Types { self: SymbolTable => ) if (trivial) this else { - val m = new AsSeenFromMap(pre.normalize, clazz) + val m = newAsSeenFromMap(pre.normalize, clazz) val tp = m(this) val tp1 = existentialAbstraction(m.capturedParams, tp) @@ -4348,147 +4348,214 @@ trait Types extends api.Types { self: SymbolTable => (pre eq NoType) || (pre eq NoPrefix) || !isPossiblePrefix(clazz) ) - /** A map to compute the asSeenFrom method */ - class AsSeenFromMap(pre: Type, clazz: Symbol) extends TypeMap with KeepOnlyTypeConstraints { - var capturedSkolems: List[Symbol] = List() - var capturedParams: List[Symbol] = List() + def newAsSeenFromMap(pre: Type, clazz: Symbol): AsSeenFromMap = + new AsSeenFromMap(pre, clazz) - override def mapOver(tree: Tree, giveup: ()=>Nothing): Tree = { - object annotationArgRewriter extends TypeMapTransformer { - private def canRewriteThis(sym: Symbol) = ( - (sym isNonBottomSubClass clazz) - && (pre.widen.typeSymbol isNonBottomSubClass sym) - && (pre.isStable || giveup()) - ) - // what symbol should really be used? - private def newTermSym() = { - val p = pre.typeSymbol - p.owner.newValue(p.name.toTermName, p.pos) setInfo pre - } - /** Rewrite `This` trees in annotation argument trees */ - override def transform(tree: Tree): Tree = super.transform(tree) match { - case This(_) if canRewriteThis(tree.symbol) => gen.mkAttributedQualifier(pre, newTermSym()) - case tree => tree - } - } - annotationArgRewriter.transform(tree) + + /** A map to compute the asSeenFrom method. + */ + class AsSeenFromMap(seenFromPrefix: Type, seenFromClass: Symbol) extends TypeMap with KeepOnlyTypeConstraints { + // Some example source constructs relevant in asSeenFrom: + // + // object CaptureThis { + // trait X[A] { def f: this.type = this } + // class Y[A] { def f: this.type = this } + // // Created new existential to represent This(CaptureThis.X) seen from CaptureThis.X[B]: type _1.type <: CaptureThis.X[B] with Singleton + // def f1[B] = new X[B] { } + // // TODO - why is the behavior different when it's a class? + // def f2[B] = new Y[B] { } + // } + // class CaptureVal[T] { + // val f: java.util.List[_ <: T] = null + // // Captured existential skolem for type _$1 seen from CaptureVal.this.f.type: type _$1 + // def g = f get 0 + // } + // class ClassParam[T] { + // // AsSeenFromMap(Inner.this.type, class Inner)/classParameterAsSeen(T)#loop(ClassParam.this.type, class ClassParam) + // class Inner(lhs: T) { def f = lhs } + // } + def capturedParams: List[Symbol] = _capturedParams + def capturedSkolems: List[Symbol] = _capturedSkolems + + def apply(tp: Type): Type = tp match { + case tp @ ThisType(_) => thisTypeAsSeen(tp) + case tp @ SingleType(_, sym) => if (sym.isPackageClass) tp else singleTypeAsSeen(tp) + case tp @ TypeRef(_, sym, _) if isTypeParamOfEnclosingClass(sym) => classParameterAsSeen(tp) + case _ => mapOver(tp) + } + + private var _capturedSkolems: List[Symbol] = Nil + private var _capturedParams: List[Symbol] = Nil + private val isStablePrefix = seenFromPrefix.isStable + private def prefixSymbol = seenFromPrefix.widen.typeSymbol + + private def isBaseClassOfEnclosingClass(base: Symbol) = { + def loop(encl: Symbol): Boolean = ( + isPossiblePrefix(encl) + && ((encl isSubClass base) || loop(encl.owner.enclClass)) + ) + // The hasCompleteInfo guard is necessary to avoid cycles during the typing + // of certain classes, notably ones defined inside package objects. + !base.hasCompleteInfo || loop(seenFromClass) } - def stabilize(pre: Type, clazz: Symbol): Type = { + /** Is the symbol a class type parameter from one of the enclosing + * classes, or a base class of one of them? + */ + private def isTypeParamOfEnclosingClass(sym: Symbol): Boolean = ( + sym.isTypeParameter + && sym.owner.isClass + && isBaseClassOfEnclosingClass(sym.owner) + ) + + /** Creates an existential representing a type parameter which appears + * in the prefix of a ThisType. + */ + protected def captureThis(pre: Type, clazz: Symbol): Type = { capturedParams find (_.owner == clazz) match { - case Some(qvar) => qvar.tpe - case _ => + case Some(p) => p.tpe + case _ => val qvar = clazz freshExistential nme.SINGLETON_SUFFIX setInfo singletonBounds(pre) - capturedParams ::= qvar + _capturedParams ::= qvar + debuglog(s"Captured This(${clazz.fullNameString}) seen from $seenFromPrefix: ${qvar.defString}") qvar.tpe } } + protected def captureSkolems(skolems: List[Symbol]) { + for (p <- skolems; if !(capturedSkolems contains p)) { + debuglog(s"Captured $p seen from $seenFromPrefix") + _capturedSkolems ::= p + } + } - def apply(tp: Type): Type = - tp match { - case ThisType(sym) => - def toPrefix(pre: Type, clazz: Symbol): Type = - if (skipPrefixOf(pre, clazz)) tp - else if ((sym isNonBottomSubClass clazz) && - (pre.widen.typeSymbol isNonBottomSubClass sym)) { - val pre1 = pre match { - case SuperType(thistp, _) => thistp - case _ => pre - } - if (!(pre1.isStable || - pre1.typeSymbol.isPackageClass || - pre1.typeSymbol.isModuleClass && pre1.typeSymbol.isStatic)) { - stabilize(pre1, sym) - } else { - pre1 - } - } else { - toPrefix(pre.baseType(clazz).prefix, clazz.owner) - } - toPrefix(pre, clazz) - case SingleType(pre, sym) => - if (sym.isPackageClass) tp // short path - else { - val pre1 = this(pre) - if (pre1 eq pre) tp - else if (pre1.isStable) singleType(pre1, sym) - else pre1.memberType(sym).resultType //todo: this should be rolled into existential abstraction - } - // AM: Martin, is this description accurate? - // walk the owner chain of `clazz` (the original argument to asSeenFrom) until we find the type param's owner (while rewriting pre as we crawl up the owner chain) - // once we're at the owner, extract the information that pre encodes about the type param, - // by minimally subsuming pre to the type instance of the class that owns the type param, - // the type we're looking for is the type instance's type argument at the position corresponding to the type parameter - // optimisation: skip this type parameter if it's not owned by a class, as those params are not influenced by the prefix through which they are seen - // (concretely: type params of anonymous type functions, which currently can only arise from normalising type aliases, are owned by the type alias of which they are the eta-expansion) - // (skolems also aren't affected: they are ruled out by the isTypeParameter check) - case TypeRef(prefix, sym, args) if (sym.isTypeParameter && sym.owner.isClass) => - def toInstance(pre: Type, clazz: Symbol): Type = - if (skipPrefixOf(pre, clazz)) mapOver(tp) - //@M! see test pos/tcpoly_return_overriding.scala why mapOver is necessary - else { - def throwError = abort("" + tp + sym.locationString + " cannot be instantiated from " + pre.widen) - - val symclazz = sym.owner - if (symclazz == clazz && !pre.widen.isInstanceOf[TypeVar] && (pre.widen.typeSymbol isNonBottomSubClass symclazz)) { - // have to deconst because it may be a Class[T]. - pre.baseType(symclazz).deconst match { - case TypeRef(_, basesym, baseargs) => - - def instParam(ps: List[Symbol], as: List[Type]): Type = - if (ps.isEmpty) { - if (forInteractive) { - val saved = settings.uniqid.value - try { - settings.uniqid.value = true - println("*** stale type parameter: " + tp + sym.locationString + " cannot be instantiated from " + pre.widen) - println("*** confused with params: " + sym + " in " + sym.owner + " not in " + ps + " of " + basesym) - println("*** stacktrace = ") - new Error().printStackTrace() - } finally settings.uniqid.value = saved - instParamRelaxed(basesym.typeParams, baseargs) - } else throwError - } else if (sym eq ps.head) - // @M! don't just replace the whole thing, might be followed by type application - appliedType(as.head, args mapConserve (this)) // @M: was as.head - else instParam(ps.tail, as.tail) - - /** Relaxed version of instParams which matches on names not symbols. - * This is a last fallback in interactive mode because races in calls - * from the IDE to the compiler may in rare cases lead to symbols referring - * to type parameters that are no longer current. - */ - def instParamRelaxed(ps: List[Symbol], as: List[Type]): Type = - if (ps.isEmpty) throwError - else if (sym.name == ps.head.name) - // @M! don't just replace the whole thing, might be followed by type application - appliedType(as.head, args mapConserve (this)) // @M: was as.head - else instParamRelaxed(ps.tail, as.tail) - - //Console.println("instantiating " + sym + " from " + basesym + " with " + basesym.typeParams + " and " + baseargs+", pre = "+pre+", symclazz = "+symclazz);//DEBUG - if (sameLength(basesym.typeParams, baseargs)) - instParam(basesym.typeParams, baseargs) - else - if (symclazz.tpe.parents exists typeIsErroneous) - ErrorType // don't be to overzealous with throwing exceptions, see #2641 - else - throw new Error( - "something is wrong (wrong class file?): "+basesym+ - " with type parameters "+ - basesym.typeParams.map(_.name).mkString("[",",","]")+ - " gets applied to arguments "+baseargs.mkString("[",",","]")+", phase = "+phase) - case ExistentialType(tparams, qtpe) => - capturedSkolems = capturedSkolems union tparams - toInstance(qtpe, clazz) - case t => - throwError - } - } else toInstance(pre.baseType(clazz).prefix, clazz.owner) - } - toInstance(pre, clazz) - case _ => + /** Find the type argument in an applied type which corresponds to a type parameter. + * The arguments are required to be related as follows, through intermediary `clazz`. + * An exception will be thrown if this is violated. + * + * @param lhs its symbol is a type parameter of `clazz` + * @param rhs a type application constructed from `clazz` + */ + private def correspondingTypeArgument(lhs: Type, rhs: Type): Type = { + val TypeRef(_, lhsSym, lhsArgs) = lhs + val TypeRef(_, rhsSym, rhsArgs) = rhs + val clazz = lhsSym.safeOwner + require(clazz == rhsSym, s"$lhsSym is not a type parameter of $rhsSym") + + def fail: Type = ( + // don't be too zealous with the exceptions, see #2641 + if (clazz.tpe_*.parents exists typeIsErroneous) ErrorType + else abort(s"something is wrong: cannot make sense of type application\n $lhs\n $rhs") + ) + def loop(params: List[Symbol], args: List[Type]): Type = ( + // didn't find lhsSym amongst the params + if (params.isEmpty || args.isEmpty) fail + else if (params.head eq lhsSym) args.head + else loop(params.tail, args.tail) + ) + // @M! don't just replace the whole thing, might be followed by type application + appliedType(loop(rhsSym.typeParams, rhsArgs), lhsArgs mapConserve this) + } + + // 0) @pre: `classParam` is a class type parameter + // 1) Walk the owner chain of `seenFromClass` until we find the class which owns `classParam` + // 2) Take the base type of the prefix at that point with respect to the owning class + // 3) Solve for the type parameters through correspondence with the type args of the base type + // + // Only class type parameters (and not skolems) are considered, because other type parameters + // are not influenced by the prefix through which they are seen. Note that type params of + // anonymous type functions, which currently can only arise from normalising type aliases, are + // owned by the type alias of which they are the eta-expansion. + private def classParameterAsSeen(classParam: Type): Type = { + val TypeRef(_, tparam, _) = classParam + + def loop(pre: Type, clazz: Symbol): Type = { + // have to deconst because it may be a Class[T] + def nextBase = (pre baseType clazz).deconst + //@M! see test pos/tcpoly_return_overriding.scala why mapOver is necessary + if (skipPrefixOf(pre, clazz)) + mapOver(classParam) + else if (!matchesPrefixAndClass(pre, clazz)(tparam.owner)) + loop(nextBase.prefix, clazz.owner) + else nextBase match { + case applied @ TypeRef(_, _, _) => correspondingTypeArgument(classParam, applied) + case ExistentialType(eparams, qtpe) => captureSkolems(eparams) ; loop(qtpe, clazz) + case t => abort(s"$tparam in ${tparam.owner} cannot be instantiated from $seenFromPrefix") + } + } + loop(seenFromPrefix, seenFromClass) + } + + // Does the candidate symbol match the given prefix and class? + private def matchesPrefixAndClass(pre: Type, clazz: Symbol)(candidate: Symbol) = pre.widen match { + case _: TypeVar => false + case pre => (clazz == candidate) && (pre.typeSymbol isSubClass candidate) + } + + // Whether the annotation tree currently being mapped over has had a This(_) node rewritten. + private[this] var wroteAnnotation = false + private object annotationArgRewriter extends TypeMapTransformer { + private def matchesThis(thiz: Symbol) = matchesPrefixAndClass(seenFromPrefix, seenFromClass)(thiz) + + // what symbol should really be used? + private def newThis(): Tree = { + wroteAnnotation = true + val thisSym = prefixSymbol.owner.newValue(prefixSymbol.name.toTermName, prefixSymbol.pos) setInfo seenFromPrefix + gen.mkAttributedQualifier(seenFromPrefix, thisSym) + } + + /** Rewrite `This` trees in annotation argument trees */ + override def transform(tree: Tree): Tree = super.transform(tree) match { + case This(_) if matchesThis(tree.symbol) => newThis() + case tree => tree + } + } + + // This becomes considerably cheaper if we optimize for the common cases: + // where the prefix is stable and where no This nodes are rewritten. If + // either is true, then we don't need to worry about calling giveup. So if + // the prefix is unstable, use a stack variable to indicate whether the tree + // was touched. This takes us to one allocation per AsSeenFromMap rather + // than an allocation on every call to mapOver, and no extra work when the + // tree only has its types remapped. + override def mapOver(tree: Tree, giveup: ()=>Nothing): Tree = { + if (isStablePrefix) + annotationArgRewriter transform tree + else { + val saved = wroteAnnotation + wroteAnnotation = false + try annotationArgRewriter transform tree + finally if (wroteAnnotation) giveup() else wroteAnnotation = saved + } + } + + private def thisTypeAsSeen(tp: ThisType): Type = { + def loop(pre: Type, clazz: Symbol): Type = { + val pre1 = pre match { + case SuperType(thistpe, _) => thistpe + case _ => pre + } + if (skipPrefixOf(pre, clazz)) mapOver(tp) + else if (!matchesPrefixAndClass(pre, clazz)(tp.sym)) + loop((pre baseType clazz).prefix, clazz.owner) + else if (pre1.isStable) + pre1 + else + captureThis(pre1, clazz) } + loop(seenFromPrefix, seenFromClass) + } + + private def singleTypeAsSeen(tp: SingleType): Type = { + val SingleType(pre, sym) = tp + + val pre1 = this(pre) + if (pre1 eq pre) tp + else if (pre1.isStable) singleType(pre1, sym) + else pre1.memberType(sym).resultType //todo: this should be rolled into existential abstraction + } + + override def toString = s"AsSeenFromMap($seenFromPrefix, $seenFromClass)" } /** A base class to compute all substitutions */ -- cgit v1.2.3 From 3b071358a99ea3f90fb08a5a6b0b36c608b06931 Mon Sep 17 00:00:00 2001 From: James Iry Date: Fri, 1 Mar 2013 13:17:00 -0800 Subject: SI-6816 Deprecate -Yeta-expand-keeps-star This commit deprecates the -Yeta-expand-keeps-star flag. It was created in 2.10 to help in the transition from 2.9 but by the time 2.11 comes out it should no longer be necessary. --- src/compiler/scala/tools/nsc/settings/ScalaSettings.scala | 3 ++- test/files/neg/eta-expand-star-deprecation.check | 4 ++++ test/files/neg/eta-expand-star-deprecation.flags | 1 + test/files/neg/eta-expand-star-deprecation.scala | 8 ++++++++ 4 files changed, 15 insertions(+), 1 deletion(-) create mode 100644 test/files/neg/eta-expand-star-deprecation.check create mode 100644 test/files/neg/eta-expand-star-deprecation.flags create mode 100644 test/files/neg/eta-expand-star-deprecation.scala (limited to 'src') diff --git a/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala b/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala index a5496f829d..2c9c20666d 100644 --- a/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala +++ b/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala @@ -167,7 +167,8 @@ trait ScalaSettings extends AbsScalaSettings val Yreploutdir = StringSetting ("-Yrepl-outdir", "path", "Write repl-generated classfiles to given output directory (use \"\" to generate a temporary dir)" , "") val Ynotnull = BooleanSetting ("-Ynotnull", "Enable (experimental and incomplete) scala.NotNull.") val YmethodInfer = BooleanSetting ("-Yinfer-argument-types", "Infer types for arguments of overriden methods.") - val etaExpandKeepsStar = BooleanSetting ("-Yeta-expand-keeps-star", "Eta-expand varargs methods to T* rather than Seq[T]. This is a temporary option to ease transition.") + val etaExpandKeepsStar = BooleanSetting ("-Yeta-expand-keeps-star", "Eta-expand varargs methods to T* rather than Seq[T]. This is a temporary option to ease transition."). + withDeprecationMessage("This flag is scheduled for removal in 2.12. If you have a case where you need this flag then please report a bug.") val Yinvalidate = StringSetting ("-Yinvalidate", "classpath-entry", "Invalidate classpath entry before run", "") val noSelfCheck = BooleanSetting ("-Yno-self-type-checks", "Suppress check for self-type conformance among inherited members.") val YvirtClasses = false // too embryonic to even expose as a -Y //BooleanSetting ("-Yvirtual-classes", "Support virtual classes") diff --git a/test/files/neg/eta-expand-star-deprecation.check b/test/files/neg/eta-expand-star-deprecation.check new file mode 100644 index 0000000000..a79f0df76c --- /dev/null +++ b/test/files/neg/eta-expand-star-deprecation.check @@ -0,0 +1,4 @@ +warning: -Yeta-expand-keeps-star is deprecated: This flag is scheduled for removal in 2.12. If you have a case where you need this flag then please report a bug. +error: No warnings can be incurred under -Xfatal-warnings. +one warning found +one error found diff --git a/test/files/neg/eta-expand-star-deprecation.flags b/test/files/neg/eta-expand-star-deprecation.flags new file mode 100644 index 0000000000..5ac8b638e4 --- /dev/null +++ b/test/files/neg/eta-expand-star-deprecation.flags @@ -0,0 +1 @@ +-Yeta-expand-keeps-star -deprecation -Xfatal-warnings diff --git a/test/files/neg/eta-expand-star-deprecation.scala b/test/files/neg/eta-expand-star-deprecation.scala new file mode 100644 index 0000000000..5749692522 --- /dev/null +++ b/test/files/neg/eta-expand-star-deprecation.scala @@ -0,0 +1,8 @@ +object Test { + def f[T](xs: T*): Unit = () + def g[T] = f[T] _ + + def main(args: Array[String]): Unit = { + g(1, 2) + } +} -- cgit v1.2.3 From e3b7b5f9be5316fbf7c41599e377daceac4c26e8 Mon Sep 17 00:00:00 2001 From: Michael Thorpe Date: Fri, 1 Mar 2013 13:58:05 +0000 Subject: Require firstKey and lastKey on IntMap to be tail recursive. --- src/library/scala/collection/immutable/IntMap.scala | 3 +++ 1 file changed, 3 insertions(+) (limited to 'src') diff --git a/src/library/scala/collection/immutable/IntMap.scala b/src/library/scala/collection/immutable/IntMap.scala index ab1faf363e..457d46a0eb 100644 --- a/src/library/scala/collection/immutable/IntMap.scala +++ b/src/library/scala/collection/immutable/IntMap.scala @@ -12,6 +12,7 @@ package immutable import scala.collection.generic.{ CanBuildFrom, BitOperations } import scala.collection.mutable.{ Builder, MapBuilder } +import scala.annotation.tailrec /** Utility class for integer maps. * @author David MacIver @@ -427,6 +428,7 @@ sealed abstract class IntMap[+T] extends AbstractMap[Int, T] /** * The entry with the lowest key value considered in unsigned order. */ + @tailrec final def firstKey: Int = this match { case Bin(_, _, l, r) => l.firstKey case Tip(k, v) => k @@ -436,6 +438,7 @@ sealed abstract class IntMap[+T] extends AbstractMap[Int, T] /** * The entry with the highest key value considered in unsigned order. */ + @tailrec final def lastKey: Int = this match { case Bin(_, _, l, r) => r.lastKey case Tip(k, v) => k -- cgit v1.2.3 From e1ab60e60e169a874d7cacc2287ae89238e20b76 Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Sat, 2 Mar 2013 11:17:40 -0800 Subject: Simplified correspondingTypeArgument based on reviewer feedback. --- src/reflect/scala/reflect/internal/Types.scala | 25 ++++++++++--------------- 1 file changed, 10 insertions(+), 15 deletions(-) (limited to 'src') diff --git a/src/reflect/scala/reflect/internal/Types.scala b/src/reflect/scala/reflect/internal/Types.scala index 9cb7a71592..4794052e05 100644 --- a/src/reflect/scala/reflect/internal/Types.scala +++ b/src/reflect/scala/reflect/internal/Types.scala @@ -4438,22 +4438,17 @@ trait Types extends api.Types { self: SymbolTable => private def correspondingTypeArgument(lhs: Type, rhs: Type): Type = { val TypeRef(_, lhsSym, lhsArgs) = lhs val TypeRef(_, rhsSym, rhsArgs) = rhs - val clazz = lhsSym.safeOwner - require(clazz == rhsSym, s"$lhsSym is not a type parameter of $rhsSym") + require(lhsSym.safeOwner == rhsSym, s"$lhsSym is not a type parameter of $rhsSym") - def fail: Type = ( - // don't be too zealous with the exceptions, see #2641 - if (clazz.tpe_*.parents exists typeIsErroneous) ErrorType - else abort(s"something is wrong: cannot make sense of type application\n $lhs\n $rhs") - ) - def loop(params: List[Symbol], args: List[Type]): Type = ( - // didn't find lhsSym amongst the params - if (params.isEmpty || args.isEmpty) fail - else if (params.head eq lhsSym) args.head - else loop(params.tail, args.tail) - ) - // @M! don't just replace the whole thing, might be followed by type application - appliedType(loop(rhsSym.typeParams, rhsArgs), lhsArgs mapConserve this) + // Find the type parameter position; we'll use the corresponding argument + val argIndex = rhsSym.typeParams indexOf lhsSym + + if (argIndex >= 0 && argIndex < rhsArgs.length) // @M! don't just replace the whole thing, might be followed by type application + appliedType(rhsArgs(argIndex), lhsArgs mapConserve this) + else if (rhsSym.tpe_*.parents exists typeIsErroneous) // don't be too zealous with the exceptions, see #2641 + ErrorType + else + abort(s"something is wrong: cannot make sense of type application\n $lhs\n $rhs") } // 0) @pre: `classParam` is a class type parameter -- cgit v1.2.3 From 1b9c2f51d0fc37b1f2065a2b7c575cbfcd4665cd Mon Sep 17 00:00:00 2001 From: Dan Rosen Date: Sat, 2 Mar 2013 13:43:30 -0800 Subject: SI-7132 - don't discard Unit type in interpreter --- .../scala/tools/nsc/interpreter/ExprTyper.scala | 12 +++++++----- test/files/run/repl-colon-type.check | 21 +++++++++++---------- test/files/run/repl-colon-type.scala | 4 ++++ 3 files changed, 22 insertions(+), 15 deletions(-) (limited to 'src') diff --git a/src/compiler/scala/tools/nsc/interpreter/ExprTyper.scala b/src/compiler/scala/tools/nsc/interpreter/ExprTyper.scala index b087547cf8..9edd54b939 100644 --- a/src/compiler/scala/tools/nsc/interpreter/ExprTyper.scala +++ b/src/compiler/scala/tools/nsc/interpreter/ExprTyper.scala @@ -49,15 +49,13 @@ trait ExprTyper { // Typing it with a lazy val would give us the right type, but runs // into compiler bugs with things like existentials, so we compile it // behind a def and strip the NullaryMethodType which wraps the expr. - val line = "def " + name + " = {\n" + code + "\n}" + val line = "def " + name + " = " + code interpretSynthetic(line) match { case IR.Success => val sym0 = symbolOfTerm(name) // drop NullaryMethodType - val sym = sym0.cloneSymbol setInfo exitingTyper(sym0.info.finalResultType) - if (sym.info.typeSymbol eq UnitClass) NoSymbol - else sym + sym0.cloneSymbol setInfo exitingTyper(sym0.info.finalResultType) case _ => NoSymbol } } @@ -74,7 +72,11 @@ trait ExprTyper { case _ => NoSymbol } } - beQuietDuring(asExpr()) orElse beQuietDuring(asDefn()) + def asError(): Symbol = { + interpretSynthetic(code) + NoSymbol + } + beSilentDuring(asExpr()) orElse beSilentDuring(asDefn()) orElse asError() } private var typeOfExpressionDepth = 0 diff --git a/test/files/run/repl-colon-type.check b/test/files/run/repl-colon-type.check index 4cd0e1d588..27be3eb67d 100644 --- a/test/files/run/repl-colon-type.check +++ b/test/files/run/repl-colon-type.check @@ -4,12 +4,6 @@ Type :help for more information. scala> scala> :type List[1, 2, 3] -:2: error: identifier expected but integer literal found. - List[1, 2, 3] - ^ -:3: error: ']' expected but '}' found. - } - ^ :1: error: identifier expected but integer literal found. List[1, 2, 3] ^ @@ -44,12 +38,9 @@ scala> :type lazy val f = 5 Int scala> :type protected lazy val f = 5 -:2: error: illegal start of statement (no modifiers allowed here) - protected lazy val f = 5 - ^ :5: error: lazy value f cannot be accessed in object $iw Access to protected value f not permitted because - enclosing object $eval in package $line19 is not a subclass of + enclosing object $eval in package $line13 is not a subclass of object $iw where target is defined lazy val $result = f ^ @@ -221,4 +212,14 @@ PolyType( scala> +scala> // SI-7132 - :type doesn't understand Unit + +scala> :type () +Unit + +scala> :type println("side effect!") +Unit + +scala> + scala> diff --git a/test/files/run/repl-colon-type.scala b/test/files/run/repl-colon-type.scala index c055b215c2..8cf81a6afe 100644 --- a/test/files/run/repl-colon-type.scala +++ b/test/files/run/repl-colon-type.scala @@ -26,6 +26,10 @@ object Test extends ReplTest { |:type -v Nil.combinations _ |:type -v def f[T <: AnyVal] = List[T]().combinations _ |:type -v def f[T, U >: T](x: T, y: List[U]) = x :: y + | + |// SI-7132 - :type doesn't understand Unit + |:type () + |:type println("side effect!") """.stripMargin } -- cgit v1.2.3 From 6f5e525ef985f187a9132ea822a0c4ec753d0525 Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Sat, 2 Mar 2013 22:56:17 -0800 Subject: Establishes what's up with widening in asSeenFrom. Added explanatory comment. --- src/reflect/scala/reflect/internal/Types.scala | 16 ++++++++++------ 1 file changed, 10 insertions(+), 6 deletions(-) (limited to 'src') diff --git a/src/reflect/scala/reflect/internal/Types.scala b/src/reflect/scala/reflect/internal/Types.scala index 4794052e05..15883bb8a9 100644 --- a/src/reflect/scala/reflect/internal/Types.scala +++ b/src/reflect/scala/reflect/internal/Types.scala @@ -4351,7 +4351,6 @@ trait Types extends api.Types { self: SymbolTable => def newAsSeenFromMap(pre: Type, clazz: Symbol): AsSeenFromMap = new AsSeenFromMap(pre, clazz) - /** A map to compute the asSeenFrom method. */ class AsSeenFromMap(seenFromPrefix: Type, seenFromClass: Symbol) extends TypeMap with KeepOnlyTypeConstraints { @@ -4387,8 +4386,9 @@ trait Types extends api.Types { self: SymbolTable => private var _capturedSkolems: List[Symbol] = Nil private var _capturedParams: List[Symbol] = Nil private val isStablePrefix = seenFromPrefix.isStable - private def prefixSymbol = seenFromPrefix.widen.typeSymbol + // isBaseClassOfEnclosingClassOrInfoIsNotYetComplete would be a more accurate + // but less succinct name. private def isBaseClassOfEnclosingClass(base: Symbol) = { def loop(encl: Symbol): Boolean = ( isPossiblePrefix(encl) @@ -4474,16 +4474,19 @@ trait Types extends api.Types { self: SymbolTable => else nextBase match { case applied @ TypeRef(_, _, _) => correspondingTypeArgument(classParam, applied) case ExistentialType(eparams, qtpe) => captureSkolems(eparams) ; loop(qtpe, clazz) - case t => abort(s"$tparam in ${tparam.owner} cannot be instantiated from $seenFromPrefix") + case t => abort(s"$tparam in ${tparam.owner} cannot be instantiated from ${seenFromPrefix.widen}") } } loop(seenFromPrefix, seenFromClass) } // Does the candidate symbol match the given prefix and class? + // Since pre may be something like ThisType(A) where trait A { self: B => }, + // we have to test the typeSymbol of the widened type, not pre.typeSymbol, or + // B will not be considered. private def matchesPrefixAndClass(pre: Type, clazz: Symbol)(candidate: Symbol) = pre.widen match { case _: TypeVar => false - case pre => (clazz == candidate) && (pre.typeSymbol isSubClass candidate) + case wide => (clazz == candidate) && (wide.typeSymbol isSubClass clazz) } // Whether the annotation tree currently being mapped over has had a This(_) node rewritten. @@ -4494,7 +4497,8 @@ trait Types extends api.Types { self: SymbolTable => // what symbol should really be used? private def newThis(): Tree = { wroteAnnotation = true - val thisSym = prefixSymbol.owner.newValue(prefixSymbol.name.toTermName, prefixSymbol.pos) setInfo seenFromPrefix + val presym = seenFromPrefix.widen.typeSymbol + val thisSym = presym.owner.newValue(presym.name.toTermName, presym.pos) setInfo seenFromPrefix gen.mkAttributedQualifier(seenFromPrefix, thisSym) } @@ -4530,7 +4534,7 @@ trait Types extends api.Types { self: SymbolTable => case _ => pre } if (skipPrefixOf(pre, clazz)) - mapOver(tp) + mapOver(tp) // TODO - is mapOver necessary here? else if (!matchesPrefixAndClass(pre, clazz)(tp.sym)) loop((pre baseType clazz).prefix, clazz.owner) else if (pre1.isStable) -- cgit v1.2.3 From c048669a4796d902174c2c8f7b6fa714cbc0205d Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Sun, 3 Mar 2013 11:09:25 -0800 Subject: Renamed type param to be consistent with convention. It's super confusing to see debugging output showing a type constructor called "Coll". The convention in the collections is that CC[A] takes type parameters and Coll is an alias for the applied type. --- src/library/scala/collection/TraversableOnce.scala | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) (limited to 'src') diff --git a/src/library/scala/collection/TraversableOnce.scala b/src/library/scala/collection/TraversableOnce.scala index 679e8e3e61..fcca2da437 100644 --- a/src/library/scala/collection/TraversableOnce.scala +++ b/src/library/scala/collection/TraversableOnce.scala @@ -383,17 +383,17 @@ object TraversableOnce { new FlattenOps[A](travs map ev) /* Functionality reused in Iterator.CanBuildFrom */ - private[collection] abstract class BufferedCanBuildFrom[A, Coll[X] <: TraversableOnce[X]] extends generic.CanBuildFrom[Coll[_], A, Coll[A]] { - def bufferToColl[B](buff: ArrayBuffer[B]): Coll[B] - def traversableToColl[B](t: GenTraversable[B]): Coll[B] + private[collection] abstract class BufferedCanBuildFrom[A, CC[X] <: TraversableOnce[X]] extends generic.CanBuildFrom[CC[_], A, CC[A]] { + def bufferToColl[B](buff: ArrayBuffer[B]): CC[B] + def traversableToColl[B](t: GenTraversable[B]): CC[B] - def newIterator: Builder[A, Coll[A]] = new ArrayBuffer[A] mapResult bufferToColl + def newIterator: Builder[A, CC[A]] = new ArrayBuffer[A] mapResult bufferToColl /** Creates a new builder on request of a collection. * @param from the collection requesting the builder to be created. * @return the result of invoking the `genericBuilder` method on `from`. */ - def apply(from: Coll[_]): Builder[A, Coll[A]] = from match { + def apply(from: CC[_]): Builder[A, CC[A]] = from match { case xs: generic.GenericTraversableTemplate[_, _] => xs.genericBuilder.asInstanceOf[Builder[A, Traversable[A]]] mapResult { case res => traversableToColl(res.asInstanceOf[GenTraversable[A]]) } -- cgit v1.2.3 From 1bde987ee151d8898963ee503b1e6901226cabce Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Mon, 4 Mar 2013 08:34:38 -0800 Subject: Always at least log devWarnings. --- src/compiler/scala/tools/nsc/Global.scala | 2 ++ 1 file changed, 2 insertions(+) (limited to 'src') diff --git a/src/compiler/scala/tools/nsc/Global.scala b/src/compiler/scala/tools/nsc/Global.scala index fea9e72512..4e5f4faf51 100644 --- a/src/compiler/scala/tools/nsc/Global.scala +++ b/src/compiler/scala/tools/nsc/Global.scala @@ -263,6 +263,8 @@ class Global(var currentSettings: Settings, var reporter: Reporter) @inline final override def devWarning(msg: => String) { if (settings.developer.value || settings.debug.value) warning("!!! " + msg) + else + log("!!! " + msg) // such warnings always at least logged } private def elapsedMessage(msg: String, start: Long) = -- cgit v1.2.3 From 305a987da0df292b75a16aae9c698df155af0a8c Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Mon, 4 Mar 2013 08:46:40 -0800 Subject: Added methods debuglogResult and devWarningResult. Lowering the barriers to sensible logging - these methods are key in avoiding the "too much trouble" syndrome. --- src/reflect/scala/reflect/internal/SymbolTable.scala | 10 ++++++++++ 1 file changed, 10 insertions(+) (limited to 'src') diff --git a/src/reflect/scala/reflect/internal/SymbolTable.scala b/src/reflect/scala/reflect/internal/SymbolTable.scala index 9b5778b9da..03ec59f0fe 100644 --- a/src/reflect/scala/reflect/internal/SymbolTable.scala +++ b/src/reflect/scala/reflect/internal/SymbolTable.scala @@ -87,6 +87,16 @@ abstract class SymbolTable extends macros.Universe result } @inline + final private[scala] def debuglogResult[T](msg: => String)(result: T): T = { + debuglog(msg + ": " + result) + result + } + @inline + final private[scala] def devWarningResult[T](msg: => String)(result: T): T = { + devWarning(msg + ": " + result) + result + } + @inline final private[scala] def logResultIf[T](msg: => String, cond: T => Boolean)(result: T): T = { if (cond(result)) log(msg + ": " + result) -- cgit v1.2.3 From c10df64f4d05d5c01c027c4f519715cf7fb44e1e Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Mon, 4 Mar 2013 08:34:56 -0800 Subject: Add some logging to sinful typevar methods. These super-mutation-oriented methods should enthusiastically communicate what they are doing, especially when they encounter anything unexpected. None of this work should be taken as an endorsement of any of the worked-upon code. --- .../scala/tools/nsc/typechecker/Contexts.scala | 42 +++++++++++----- .../scala/tools/nsc/typechecker/Infer.scala | 57 ++++++++++------------ .../scala/tools/nsc/typechecker/Typers.scala | 7 +-- 3 files changed, 58 insertions(+), 48 deletions(-) (limited to 'src') diff --git a/src/compiler/scala/tools/nsc/typechecker/Contexts.scala b/src/compiler/scala/tools/nsc/typechecker/Contexts.scala index eb91251930..26e39d3d1b 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Contexts.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Contexts.scala @@ -8,6 +8,7 @@ package typechecker import scala.collection.mutable import scala.annotation.tailrec +import scala.reflect.internal.util.shortClassOfInstance /** * @author Martin Odersky @@ -175,6 +176,7 @@ trait Contexts { self: Analyzer => if ((owner eq NoSymbol) || (owner.isClass) || (owner.isMethod)) this else outer.enclClassOrMethod + def enclosingCaseDef = nextEnclosing(_.tree.isInstanceOf[CaseDef]) def undetparamsString = if (undetparams.isEmpty) "" else undetparams.mkString("undetparams=", ", ", "") @@ -584,23 +586,39 @@ trait Contexts { self: Analyzer => } def pushTypeBounds(sym: Symbol) { + sym.info match { + case tb: TypeBounds => if (!tb.isEmptyBounds) log(s"Saving $sym info=$tb") + case info => devWarning(s"Something other than a TypeBounds seen in pushTypeBounds: $info is a ${shortClassOfInstance(info)}") + } savedTypeBounds ::= ((sym, sym.info)) } def restoreTypeBounds(tp: Type): Type = { - var current = tp - for ((sym, info) <- savedTypeBounds) { - debuglog("resetting " + sym + " to " + info) - sym.info match { - case TypeBounds(lo, hi) if (hi <:< lo && lo <:< hi) => - current = current.instantiateTypeParams(List(sym), List(lo)) -//@M TODO: when higher-kinded types are inferred, probably need a case PolyType(_, TypeBounds(...)) if ... => - case _ => - } - sym.setInfo(info) + def restore(): Type = savedTypeBounds.foldLeft(tp) { case (current, (sym, savedInfo)) => + def bounds_s(tb: TypeBounds) = if (tb.isEmptyBounds) "" else s"TypeBounds(lo=${tb.lo}, hi=${tb.hi})" + //@M TODO: when higher-kinded types are inferred, probably need a case PolyType(_, TypeBounds(...)) if ... => + val tb @ TypeBounds(lo, hi) = sym.info.bounds + val isUnique = lo <:< hi && hi <:< lo + val isPresent = current contains sym + def saved_s = bounds_s(savedInfo.bounds) + def current_s = bounds_s(sym.info.bounds) + + if (isUnique && isPresent) + devWarningResult(s"Preserving inference: ${sym.nameString}=$hi in $current (based on $current_s) before restoring $sym to saved $saved_s")( + current.instantiateTypeParams(List(sym), List(hi)) + ) + else if (isPresent) + devWarningResult(s"Discarding inferred $current_s because it does not uniquely determine $sym in")(current) + else + logResult(s"Discarding inferred $current_s because $sym does not appear in")(current) + } + try restore() + finally { + for ((sym, savedInfo) <- savedTypeBounds) + sym setInfo debuglogResult(s"Discarding inferred $sym=${sym.info}, restoring saved info")(savedInfo) + + savedTypeBounds = Nil } - savedTypeBounds = List() - current } private var implicitsCache: List[List[ImplicitInfo]] = null diff --git a/src/compiler/scala/tools/nsc/typechecker/Infer.scala b/src/compiler/scala/tools/nsc/typechecker/Infer.scala index db3759d65f..a29cc93b6d 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Infer.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Infer.scala @@ -1317,15 +1317,18 @@ trait Infer extends Checkable { } } - def instBounds(tvar: TypeVar): (Type, Type) = { - val tparam = tvar.origin.typeSymbol - val instType = toOrigin(tvar.constr.inst) + def instBounds(tvar: TypeVar): TypeBounds = { + val tparam = tvar.origin.typeSymbol + val instType = toOrigin(tvar.constr.inst) + val TypeBounds(lo, hi) = tparam.info.bounds val (loBounds, hiBounds) = - if (instType != NoType && isFullyDefined(instType)) (List(instType), List(instType)) + if (isFullyDefined(instType)) (List(instType), List(instType)) else (tvar.constr.loBounds, tvar.constr.hiBounds) - val lo = lub(tparam.info.bounds.lo :: loBounds map toOrigin) - val hi = glb(tparam.info.bounds.hi :: hiBounds map toOrigin) - (lo, hi) + + TypeBounds( + lub(lo :: loBounds map toOrigin), + glb(hi :: hiBounds map toOrigin) + ) } def isInstantiatable(tvars: List[TypeVar]) = { @@ -1335,33 +1338,25 @@ trait Infer extends Checkable { solve(tvars1, tvars1 map (_.origin.typeSymbol), tvars1 map (_ => Variance.Covariant), false) } - // this is quite nasty: it destructively changes the info of the syms of e.g., method type params (see #3692, where the type param T's bounds were set to >: T <: T, so that parts looped) + // this is quite nasty: it destructively changes the info of the syms of e.g., method type params + // (see #3692, where the type param T's bounds were set to > : T <: T, so that parts looped) // the changes are rolled back by restoreTypeBounds, but might be unintentially observed in the mean time def instantiateTypeVar(tvar: TypeVar) { - val tparam = tvar.origin.typeSymbol - if (false && - tvar.constr.inst != NoType && - isFullyDefined(tvar.constr.inst) && - (tparam.info.bounds containsType tvar.constr.inst)) { - context.nextEnclosing(_.tree.isInstanceOf[CaseDef]).pushTypeBounds(tparam) - tparam setInfo tvar.constr.inst - tparam resetFlag DEFERRED - debuglog("new alias of " + tparam + " = " + tparam.info) - } else { - val (lo, hi) = instBounds(tvar) - if (lo <:< hi) { - if (!((lo <:< tparam.info.bounds.lo) && (tparam.info.bounds.hi <:< hi)) // bounds were improved - && tparam != lo.typeSymbolDirect && tparam != hi.typeSymbolDirect) { // don't create illegal cycles - context.nextEnclosing(_.tree.isInstanceOf[CaseDef]).pushTypeBounds(tparam) - tparam setInfo TypeBounds(lo, hi) - debuglog("new bounds of " + tparam + " = " + tparam.info) - } else { - debuglog("redundant: "+tparam+" "+tparam.info+"/"+lo+" "+hi) - } - } else { - debuglog("inconsistent: "+tparam+" "+lo+" "+hi) + val tparam = tvar.origin.typeSymbol + val TypeBounds(lo0, hi0) = tparam.info.bounds + val tb @ TypeBounds(lo1, hi1) = instBounds(tvar) + + if (lo1 <:< hi1) { + if (lo1 <:< lo0 && hi0 <:< hi1) // bounds unimproved + log(s"redundant bounds: discarding TypeBounds($lo1, $hi1) for $tparam, no improvement on TypeBounds($lo0, $hi0)") + else if (tparam == lo1.typeSymbolDirect || tparam == hi1.typeSymbolDirect) + log(s"cyclical bounds: discarding TypeBounds($lo1, $hi1) for $tparam because $tparam appears as bounds") + else { + context.enclosingCaseDef pushTypeBounds tparam + tparam setInfo logResult(s"updated bounds: $tparam from ${tparam.info} to")(tb) } } + else log(s"inconsistent bounds: discarding TypeBounds($lo1, $hi1)") } /** Type intersection of simple type tp1 with general type tp2. @@ -1524,7 +1519,7 @@ trait Infer extends Checkable { // todo: missing test case for bests.isEmpty bests match { case best :: Nil => tree setSymbol best setType (pre memberType best) - case best :: competing :: _ if alts0.nonEmpty => + case best :: competing :: _ if alts0.nonEmpty => // SI-6912 Don't give up and leave an OverloadedType on the tree. // Originally I wrote this as `if (secondTry) ... `, but `tryTwice` won't attempt the second try // unless an error is issued. We're not issuing an error, in the assumption that it would be diff --git a/src/compiler/scala/tools/nsc/typechecker/Typers.scala b/src/compiler/scala/tools/nsc/typechecker/Typers.scala index c40b69bc7a..9680b911e0 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Typers.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Typers.scala @@ -2438,16 +2438,13 @@ trait Typers extends Adaptations with Tags { else typed(cdef.guard, BooleanClass.tpe) var body1: Tree = typed(cdef.body, pt) - val contextWithTypeBounds = context.nextEnclosing(_.tree.isInstanceOf[CaseDef]) - if (contextWithTypeBounds.savedTypeBounds.nonEmpty) { - body1 modifyType (contextWithTypeBounds restoreTypeBounds _) - + if (context.enclosingCaseDef.savedTypeBounds.nonEmpty) { + body1 modifyType context.enclosingCaseDef.restoreTypeBounds // insert a cast if something typechecked under the GADT constraints, // but not in real life (i.e., now that's we've reset the method's type skolems' // infos back to their pre-GADT-constraint state) if (isFullyDefined(pt) && !(body1.tpe <:< pt)) body1 = typedPos(body1.pos)(gen.mkCast(body1, pt.dealiasWiden)) - } // body1 = checkNoEscaping.locals(context.scope, pt, body1) -- cgit v1.2.3 From 7edeb2430e4f5f5fa06150f3d79d04ec0ec4d67b Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Mon, 4 Mar 2013 11:05:23 -0800 Subject: Cleanup in isHKSubType0. Making the mechanisms more apparent. Renamed to isHKSubType, because there is no other. --- src/reflect/scala/reflect/internal/Types.scala | 73 ++++++++++++-------------- 1 file changed, 35 insertions(+), 38 deletions(-) (limited to 'src') diff --git a/src/reflect/scala/reflect/internal/Types.scala b/src/reflect/scala/reflect/internal/Types.scala index 15883bb8a9..22ba6d43e9 100644 --- a/src/reflect/scala/reflect/internal/Types.scala +++ b/src/reflect/scala/reflect/internal/Types.scala @@ -2250,7 +2250,7 @@ trait Types extends api.Types { self: SymbolTable => else ErrorType } - // isHKSubType0 introduces synthetic type params so that + // isHKSubType introduces synthetic type params so that // betaReduce can first apply sym.info to typeArgs before calling // asSeenFrom. asSeenFrom then skips synthetic type params, which // are used to reduce HO subtyping to first-order subtyping, but @@ -5746,44 +5746,41 @@ trait Types extends api.Types { self: SymbolTable => case _ => false } - // @assume tp1.isHigherKinded || tp2.isHigherKinded - def isHKSubType0(tp1: Type, tp2: Type, depth: Int): Boolean = ( - tp1.typeSymbol == NothingClass - || - tp2.typeSymbol == AnyClass // @M Any and Nothing are super-type resp. subtype of every well-kinded type - || // @M! normalize reduces higher-kinded case to PolyType's - ((tp1.normalize.withoutAnnotations , tp2.normalize.withoutAnnotations) match { - case (PolyType(tparams1, res1), PolyType(tparams2, res2)) => // @assume tp1.isHigherKinded && tp2.isHigherKinded (as they were both normalized to PolyType) - sameLength(tparams1, tparams2) && { - if (tparams1.head.owner.isMethod) { // fast-path: polymorphic method type -- type params cannot be captured - (tparams1 corresponds tparams2)((p1, p2) => p2.info.substSym(tparams2, tparams1) <:< p1.info) && - res1 <:< res2.substSym(tparams2, tparams1) - } else { // normalized higher-kinded type - //@M for an example of why we need to generate fresh symbols, see neg/tcpoly_ticket2101.scala - val tpsFresh = cloneSymbols(tparams1) - - (tparams1 corresponds tparams2)((p1, p2) => - p2.info.substSym(tparams2, tpsFresh) <:< p1.info.substSym(tparams1, tpsFresh)) && - res1.substSym(tparams1, tpsFresh) <:< res2.substSym(tparams2, tpsFresh) - - //@M the forall in the previous test could be optimised to the following, - // but not worth the extra complexity since it only shaves 1s from quick.comp - // (List.forall2(tpsFresh/*optimisation*/, tparams2)((p1, p2) => - // p2.info.substSym(tparams2, tpsFresh) <:< p1.info /*optimisation, == (p1 from tparams1).info.substSym(tparams1, tpsFresh)*/) && - // this optimisation holds because inlining cloneSymbols in `val tpsFresh = cloneSymbols(tparams1)` gives: - // val tpsFresh = tparams1 map (_.cloneSymbol) - // for (tpFresh <- tpsFresh) tpFresh.setInfo(tpFresh.info.substSym(tparams1, tpsFresh)) - } - } && annotationsConform(tp1.normalize, tp2.normalize) + private def isPolySubType(tp1: PolyType, tp2: PolyType): Boolean = { + val PolyType(tparams1, res1) = tp1 + val PolyType(tparams2, res2) = tp2 + + sameLength(tparams1, tparams2) && { + // fast-path: polymorphic method type -- type params cannot be captured + val isMethod = tparams1.head.owner.isMethod + //@M for an example of why we need to generate fresh symbols otherwise, see neg/tcpoly_ticket2101.scala + val substitutes = if (isMethod) tparams1 else cloneSymbols(tparams1) + def sub1(tp: Type) = if (isMethod) tp else tp.substSym(tparams1, substitutes) + def sub2(tp: Type) = tp.substSym(tparams2, substitutes) + def cmp(p1: Symbol, p2: Symbol) = sub2(p2.info) <:< sub1(p1.info) - case (PolyType(_, _), MethodType(params, _)) if params exists (_.tpe.isWildcard) => - false // don't warn on HasMethodMatching on right hand side + (tparams1 corresponds tparams2)(cmp) && (sub1(res1) <:< sub2(res2)) + } + } - case (ntp1, ntp2) => - devWarning(s"isHKSubType0($tp1, $tp2, _) is ${tp1.getClass}, ${tp2.getClass}: ($ntp1, $ntp2)") - false // @assume !tp1.isHigherKinded || !tp2.isHigherKinded - // --> thus, cannot be subtypes (Any/Nothing has already been checked) - })) + // @assume tp1.isHigherKinded || tp2.isHigherKinded + def isHKSubType(tp1: Type, tp2: Type, depth: Int): Boolean = { + def isSub(ntp1: Type, ntp2: Type) = (ntp1.withoutAnnotations, ntp2.withoutAnnotations) match { + case (TypeRef(_, AnyClass, _), _) => false // avoid some warnings when Nothing/Any are on the other side + case (_, TypeRef(_, NothingClass, _)) => false + case (pt1: PolyType, pt2: PolyType) => isPolySubType(pt1, pt2) // @assume both .isHigherKinded (both normalized to PolyType) + case (_: PolyType, MethodType(ps, _)) if ps exists (_.tpe.isWildcard) => false // don't warn on HasMethodMatching on right hand side + case _ => // @assume !(both .isHigherKinded) thus cannot be subtypes + def tp_s(tp: Type): String = f"$tp%-20s ${util.shortClassOfInstance(tp)}%s" + devWarning(s"HK subtype check on $tp1 and $tp2, but both don't normalize to polytypes:\n tp1=${tp_s(ntp1)}\n tp2=${tp_s(ntp2)}") + false + } + + ( tp1.typeSymbol == NothingClass // @M Nothing is subtype of every well-kinded type + || tp2.typeSymbol == AnyClass // @M Any is supertype of every well-kinded type (@PP: is it? What about continuations plugin?) + || isSub(tp1.normalize, tp2.normalize) && annotationsConform(tp1, tp2) // @M! normalize reduces higher-kinded case to PolyType's + ) + } def isSubArgs(tps1: List[Type], tps2: List[Type], tparams: List[Symbol], depth: Int): Boolean = { def isSubArg(t1: Type, t2: Type, variance: Variance) = ( @@ -5801,7 +5798,7 @@ trait Types extends api.Types { self: SymbolTable => if (tp1 eq NoPrefix) return (tp2 eq NoPrefix) || tp2.typeSymbol.isPackageClass // !! I do not see how the "isPackageClass" would be warranted by the spec if (tp2 eq NoPrefix) return tp1.typeSymbol.isPackageClass if (isSingleType(tp1) && isSingleType(tp2) || isConstantType(tp1) && isConstantType(tp2)) return tp1 =:= tp2 - if (tp1.isHigherKinded || tp2.isHigherKinded) return isHKSubType0(tp1, tp2, depth) + if (tp1.isHigherKinded || tp2.isHigherKinded) return isHKSubType(tp1, tp2, depth) /** First try, on the right: * - unwrap Annotated types, BoundedWildcardTypes, -- cgit v1.2.3 From dc1cd96ffd44ddd47ea1d5be88b4b9e438bd9c3b Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Mon, 4 Mar 2013 14:47:01 -0800 Subject: Disentangled RangePositions from interactive. This is a stepping stone to having range positions all the time, as well as to modularizing the presentation compiler. It does not enable range positions by default, only places them smoewhere where they can be. --- src/compiler/scala/tools/ant/Scalac.scala | 6 +- src/compiler/scala/tools/nsc/Global.scala | 7 +- src/compiler/scala/tools/nsc/Main.scala | 52 +--- src/compiler/scala/tools/nsc/ast/Positions.scala | 5 - src/compiler/scala/tools/nsc/ast/Trees.scala | 18 -- .../scala/tools/nsc/ast/parser/Scanners.scala | 9 + src/compiler/scala/tools/nsc/doc/DocParser.scala | 7 +- .../scala/tools/nsc/interactive/Global.scala | 4 +- .../tools/nsc/interactive/RangePositions.scala | 279 +------------------- .../scala/tools/nsc/interpreter/IMain.scala | 2 +- src/partest/scala/tools/partest/DirectTest.scala | 5 +- .../scala/tools/partest/nest/CompileManager.scala | 6 +- src/reflect/scala/reflect/internal/Positions.scala | 9 +- .../scala/reflect/internal/RangePositions.scala | 285 +++++++++++++++++++++ src/reflect/scala/reflect/internal/Trees.scala | 17 ++ .../internal/settings/MutableSettings.scala | 2 + .../scala/reflect/internal/util/Position.scala | 43 ---- .../reflect/internal/util/RangePosition.scala | 49 ++++ src/reflect/scala/reflect/runtime/Settings.scala | 2 + test/files/run/t5603.scala | 3 +- 20 files changed, 401 insertions(+), 409 deletions(-) create mode 100644 src/reflect/scala/reflect/internal/RangePositions.scala create mode 100644 src/reflect/scala/reflect/internal/util/RangePosition.scala (limited to 'src') diff --git a/src/compiler/scala/tools/ant/Scalac.scala b/src/compiler/scala/tools/ant/Scalac.scala index 2a9567b567..b2cedf6338 100644 --- a/src/compiler/scala/tools/ant/Scalac.scala +++ b/src/compiler/scala/tools/ant/Scalac.scala @@ -19,7 +19,6 @@ import org.apache.tools.ant.util.facade.{FacadeTaskHelper, ImplementationSpecificArgument} import scala.tools.nsc.{Global, Settings, CompilerCommand} -import scala.tools.nsc.interactive.RangePositions import scala.tools.nsc.io.{Path => SPath} import scala.tools.nsc.reporters.{Reporter, ConsoleReporter} @@ -509,10 +508,7 @@ class Scalac extends ScalaMatchingTask with ScalacShared { new Settings(error) protected def newGlobal(settings: Settings, reporter: Reporter) = - if (settings.Yrangepos.value) - new Global(settings, reporter) with RangePositions - else - new Global(settings, reporter) + Global(settings, reporter) /*============================================================================*\ ** The big execute method ** diff --git a/src/compiler/scala/tools/nsc/Global.scala b/src/compiler/scala/tools/nsc/Global.scala index fea9e72512..e438ac4bfb 100644 --- a/src/compiler/scala/tools/nsc/Global.scala +++ b/src/compiler/scala/tools/nsc/Global.scala @@ -12,6 +12,7 @@ import scala.collection.{ mutable, immutable } import io.{ SourceReader, AbstractFile, Path } import reporters.{ Reporter, ConsoleReporter } import util.{ ClassPath, MergedClassPath, StatisticsInfo, returning, stackTraceString, stackTraceHeadString } +import scala.reflect.internal.RangePositions import scala.reflect.internal.util.{ OffsetPosition, SourceFile, NoSourceFile, BatchSourceFile, ScriptSourceFile } import scala.reflect.internal.pickling.{ PickleBuffer, PickleFormat } import symtab.{ Flags, SymbolTable, SymbolLoaders, SymbolTrackers } @@ -1694,6 +1695,10 @@ class Global(var currentSettings: Settings, var reporter: Reporter) def createJavadoc = false } +class RangePositionGlobal(settings0: Settings, reporter0: Reporter) extends Global(settings0, reporter0) with RangePositions + object Global { - def apply(settings: Settings, reporter: Reporter): Global = new Global(settings, reporter) + def apply(settings: Settings, reporter: Reporter): Global = + if (settings.Yrangepos.value) new RangePositionGlobal(settings, reporter) + else new Global(settings, reporter) } diff --git a/src/compiler/scala/tools/nsc/Main.scala b/src/compiler/scala/tools/nsc/Main.scala index 27132f3c51..00c6c37dfd 100644 --- a/src/compiler/scala/tools/nsc/Main.scala +++ b/src/compiler/scala/tools/nsc/Main.scala @@ -2,54 +2,24 @@ * Copyright 2005-2013 LAMP/EPFL * @author Martin Odersky */ - -package scala.tools.nsc - -import java.io.File -import File.pathSeparator -import scala.tools.nsc.io.AbstractFile +package scala.tools +package nsc /** The main class for NSC, a compiler for the programming * language Scala. */ -object Main extends Driver with EvalLoop { - - def resident(compiler: Global) { - loop { line => - val args = line.split(' ').toList - val command = new CompilerCommand(args, new Settings(scalacError)) - compiler.reporter.reset() - new compiler.Run() compile command.files - } +class MainClass extends Driver with EvalLoop { + def resident(compiler: Global): Unit = loop { line => + val command = new CompilerCommand(line split "\\s+" toList, new Settings(scalacError)) + compiler.reporter.reset() + new compiler.Run() compile command.files } - override def processSettingsHook(): Boolean = - if (settings.Yidedebug.value) { - settings.Xprintpos.value = true - settings.Yrangepos.value = true - val compiler = new interactive.Global(settings, reporter) - import compiler.{ reporter => _, _ } - - val sfs = command.files map getSourceFile - val reloaded = new interactive.Response[Unit] - askReload(sfs, reloaded) - - reloaded.get.right.toOption match { - case Some(ex) => reporter.cancelled = true // Causes exit code to be non-0 - case None => reporter.reset() // Causes other compiler errors to be ignored - } - askShutdown() - false - } - else true - - override def newCompiler(): Global = - if (settings.Yrangepos.value) new Global(settings, reporter) with interactive.RangePositions - else Global(settings, reporter) - + override def newCompiler(): Global = Global(settings, reporter) override def doCompile(compiler: Global) { - if (settings.resident.value) - resident(compiler) + if (settings.resident.value) resident(compiler) else super.doCompile(compiler) } } + +object Main extends MainClass { } diff --git a/src/compiler/scala/tools/nsc/ast/Positions.scala b/src/compiler/scala/tools/nsc/ast/Positions.scala index e7bd5da9dd..63a2dd0ee7 100644 --- a/src/compiler/scala/tools/nsc/ast/Positions.scala +++ b/src/compiler/scala/tools/nsc/ast/Positions.scala @@ -6,11 +6,6 @@ import scala.reflect.internal.util.{ SourceFile, OffsetPosition } trait Positions extends scala.reflect.internal.Positions { self: Global => - def rangePos(source: SourceFile, start: Int, point: Int, end: Int) = - new OffsetPosition(source, point) - - def validatePositions(tree: Tree) {} - class ValidatingPosAssigner extends PosAssigner { var pos: Position = _ override def traverse(t: Tree) { diff --git a/src/compiler/scala/tools/nsc/ast/Trees.scala b/src/compiler/scala/tools/nsc/ast/Trees.scala index ab6a400c63..6c5c087d55 100644 --- a/src/compiler/scala/tools/nsc/ast/Trees.scala +++ b/src/compiler/scala/tools/nsc/ast/Trees.scala @@ -16,24 +16,6 @@ import scala.reflect.internal.Flags.TRAIT import scala.compat.Platform.EOL trait Trees extends scala.reflect.internal.Trees { self: Global => - - def treeLine(t: Tree): String = - if (t.pos.isDefined && t.pos.isRange) t.pos.lineContent.drop(t.pos.column - 1).take(t.pos.end - t.pos.start + 1) - else t.summaryString - - def treeStatus(t: Tree, enclosingTree: Tree = null) = { - val parent = if (enclosingTree eq null) " " else " P#%5s".format(enclosingTree.id) - - "[L%4s%8s] #%-6s %-15s %-10s // %s".format(t.pos.safeLine, parent, t.id, t.pos.show, t.shortClass, treeLine(t)) - } - def treeSymStatus(t: Tree) = { - val line = if (t.pos.isDefined) "line %-4s".format(t.pos.safeLine) else " " - "#%-5s %s %-10s // %s".format(t.id, line, t.shortClass, - if (t.symbol ne NoSymbol) "(" + t.symbol.fullLocationString + ")" - else treeLine(t) - ) - } - // --- additional cases -------------------------------------------------------- /** Only used during parsing */ case class Parens(args: List[Tree]) extends Tree diff --git a/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala b/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala index b28d4cd08d..181bba6896 100644 --- a/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala +++ b/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala @@ -1297,7 +1297,16 @@ trait Scanners extends ScannersCommon { class ParensAnalyzer(unit: CompilationUnit, patches: List[BracePatch]) extends UnitScanner(unit, patches) { val balance = mutable.Map(RPAREN -> 0, RBRACKET -> 0, RBRACE -> 0) + /** The source code with braces and line starts annotated with [NN] showing the index */ + private def markedSource = { + val code = unit.source.content + val braces = code.indices filter (idx => "{}\n" contains code(idx)) toSet; + val mapped = code.indices map (idx => if (braces(idx)) s"${code(idx)}[$idx]" else "" + code(idx)) + mapped.mkString("") + } + init() + log(s"ParensAnalyzer for ${unit.source} of length ${unit.source.content.length}\n```\n$markedSource\n```") /** The offset of the first token on this line, or next following line if blank */ diff --git a/src/compiler/scala/tools/nsc/doc/DocParser.scala b/src/compiler/scala/tools/nsc/doc/DocParser.scala index 27c995e1c3..104178a832 100644 --- a/src/compiler/scala/tools/nsc/doc/DocParser.scala +++ b/src/compiler/scala/tools/nsc/doc/DocParser.scala @@ -9,24 +9,19 @@ package doc import reporters._ import scala.reflect.internal.util._ -import interactive.RangePositions import DocParser.Parsed /** A very minimal global customized for extracting `DocDefs`. It stops * right after parsing so it can read `DocDefs` from source code which would * otherwise cause the compiler to go haywire. */ -class DocParser(settings: nsc.Settings, reporter: Reporter) - extends Global(settings, reporter) - with RangePositions { - +class DocParser(settings: nsc.Settings, reporter: Reporter) extends RangePositionGlobal(settings, reporter) { def this(settings: Settings) = this(settings, new ConsoleReporter(settings)) def this() = this(new Settings(Console println _)) // the usual global initialization locally { new Run() } - override def forScaladoc = true override protected def computeInternalPhases() { phasesSet += syntaxAnalyzer } diff --git a/src/compiler/scala/tools/nsc/interactive/Global.scala b/src/compiler/scala/tools/nsc/interactive/Global.scala index fa1d4a38b9..82eafb4b09 100644 --- a/src/compiler/scala/tools/nsc/interactive/Global.scala +++ b/src/compiler/scala/tools/nsc/interactive/Global.scala @@ -21,12 +21,12 @@ import scala.language.implicitConversions /** The main class of the presentation compiler in an interactive environment such as an IDE */ -class Global(settings: Settings, _reporter: Reporter, projectName: String = "") extends { +class Global(settings: Settings, _reporter: Reporter, projectName: String = "") extends { /* Is the compiler initializing? Early def, so that the field is true during the * execution of the super constructor. */ private var initializing = true -} with scala.tools.nsc.Global(settings, _reporter) +} with RangePositionGlobal(settings, _reporter) with CompilerControl with RangePositions with ContextTrees diff --git a/src/compiler/scala/tools/nsc/interactive/RangePositions.scala b/src/compiler/scala/tools/nsc/interactive/RangePositions.scala index 5a1a4cbdeb..0af62ad729 100644 --- a/src/compiler/scala/tools/nsc/interactive/RangePositions.scala +++ b/src/compiler/scala/tools/nsc/interactive/RangePositions.scala @@ -2,283 +2,12 @@ * Copyright 2009-2013 Typesafe/Scala Solutions and LAMP/EPFL * @author Martin Odersky */ + package scala.tools.nsc package interactive -import ast.Trees -import ast.Positions -import scala.reflect.internal.util.{SourceFile, Position, RangePosition, NoPosition} -import scala.collection.mutable.ListBuffer - -/** Handling range positions - * atPos, the main method in this trait, will add positions to a tree, - * and will ensure the following properties: - * - * 1. All nodes between the root of the tree and nodes that already have positions - * will be assigned positions. - * 2. No node which already has a position will be assigned a different range; however - * a RangePosition might become a TransparentPosition. - * 3. The position of each assigned node includes the positions of each of its children. - * 4. The positions of all solid descendants of children of an assigned node - * are mutually non-overlapping. - * - * Here, the solid descendant of a node are: - * - * If the node has a TransparentPosition, the solid descendants of all its children - * Otherwise, the singleton consisting of the node itself. - */ -trait RangePositions extends Trees with Positions { -self: scala.tools.nsc.Global => - - case class Range(pos: Position, tree: Tree) { - def isFree = tree == EmptyTree - } - - override def rangePos(source: SourceFile, start: Int, point: Int, end: Int) = - new RangePosition(source, start, point, end) - - /** A position that wraps a set of trees. - * The point of the wrapping position is the point of the default position. - * If some of the trees are ranges, returns a range position enclosing all ranges - * Otherwise returns default position that is either focused or not. - */ - override def wrappingPos(default: Position, trees: List[Tree], focus: Boolean): Position = { - val ranged = trees filter (_.pos.isRange) - if (ranged.isEmpty) if (focus) default.focus else default - else new RangePosition(default.source, (ranged map (_.pos.start)).min, default.point, (ranged map (_.pos.end)).max) - } - - /** A position that wraps a non-empty set of trees. - * The point of the wrapping position is the point of the first trees' position. - * If some of the trees are ranges, returns a range position enclosing all ranges - * Otherwise returns first tree's position. - */ - override def wrappingPos(trees: List[Tree]): Position = { - val headpos = trees.head.pos - if (headpos.isDefined) wrappingPos(headpos, trees) else headpos - } - - // -------------- ensuring no overlaps ------------------------------- - - /** Ensure that given tree has no positions that overlap with - * any of the positions of `others`. This is done by - * shortening the range, assigning TransparentPositions - * to some of the nodes in `tree` or focusing on the position. - */ - override def ensureNonOverlapping(tree: Tree, others: List[Tree], focus: Boolean) { - def isOverlapping(pos: Position) = - pos.isRange && (others exists (pos overlaps _.pos)) - if (isOverlapping(tree.pos)) { - val children = tree.children - children foreach (ensureNonOverlapping(_, others, focus)) - if (tree.pos.isOpaqueRange) { - val wpos = wrappingPos(tree.pos, children, focus) - tree setPos (if (isOverlapping(wpos)) tree.pos.makeTransparent else wpos) - } - } - } - - def solidDescendants(tree: Tree): List[Tree] = - if (tree.pos.isTransparent) tree.children flatMap solidDescendants - else List(tree) - - /** A free range from `lo` to `hi` */ - private def free(lo: Int, hi: Int): Range = - Range(new RangePosition(null, lo, lo, hi), EmptyTree) - - /** The maximal free range */ - private lazy val maxFree: Range = free(0, Int.MaxValue) - - /** A singleton list of a non-empty range from `lo` to `hi`, or else the empty List */ - private def maybeFree(lo: Int, hi: Int) = - if (lo < hi) List(free(lo, hi)) - else List() - - /** Insert `pos` into ranges `rs` if possible; - * otherwise add conflicting trees to `conflicting`. - */ - private def insert(rs: List[Range], t: Tree, conflicting: ListBuffer[Tree]): List[Range] = rs match { - case List() => - assert(conflicting.nonEmpty) - rs - case r :: rs1 => - assert(!t.pos.isTransparent) - if (r.isFree && (r.pos includes t.pos)) { -// println("subdividing "+r+"/"+t.pos) - maybeFree(t.pos.end, r.pos.end) ::: List(Range(t.pos, t)) ::: maybeFree(r.pos.start, t.pos.start) ::: rs1 - } else { - if (!r.isFree && (r.pos overlaps t.pos)) conflicting += r.tree - r :: insert(rs1, t, conflicting) - } - } - - /** Replace elem `t` of `ts` by `replacement` list. */ - private def replace(ts: List[Tree], t: Tree, replacement: List[Tree]): List[Tree] = - if (ts.head == t) replacement ::: ts.tail - else ts.head :: replace(ts.tail, t, replacement) - - /** Does given list of trees have mutually non-overlapping positions? - * pre: None of the trees is transparent - */ - def findOverlapping(cts: List[Tree]): List[(Tree, Tree)] = { - var ranges = List(maxFree) - for (ct <- cts) { - if (ct.pos.isOpaqueRange) { - val conflicting = new ListBuffer[Tree] - ranges = insert(ranges, ct, conflicting) - if (conflicting.nonEmpty) return conflicting.toList map (t => (t, ct)) - } - } - List() - } - - // -------------- setting positions ------------------------------- - - /** Set position of all children of a node - * @param pos A target position. - * Uses the point of the position as the point of all positions it assigns. - * Uses the start of this position as an Offset position for unpositioed trees - * without children. - * @param trees The children to position. All children must be positionable. - */ - private def setChildrenPos(pos: Position, trees: List[Tree]): Unit = try { - for (tree <- trees) { - if (!tree.isEmpty && tree.canHaveAttrs && tree.pos == NoPosition) { - val children = tree.children - if (children.isEmpty) { - tree setPos pos.focus - } else { - setChildrenPos(pos, children) - tree setPos wrappingPos(pos, children) - } - } - } - } catch { - case ex: Exception => - println("error while set children pos "+pos+" of "+trees) - throw ex - } - - /** Position a tree. - * This means: Set position of a node and position all its unpositioned children. - */ - override def atPos[T <: Tree](pos: Position)(tree: T): T = { - if (pos.isOpaqueRange) { - if (!tree.isEmpty && tree.canHaveAttrs && tree.pos == NoPosition) { - tree.setPos(pos) - val children = tree.children - if (children.nonEmpty) { - if (children.tail.isEmpty) atPos(pos)(children.head) - else setChildrenPos(pos, children) - } - } - tree - } else { - super.atPos(pos)(tree) - } - } - - // ---------------- Validating positions ---------------------------------- - - override def validatePositions(tree: Tree) { - def reportTree(prefix : String, tree : Tree) { - val source = if (tree.pos.isDefined) tree.pos.source else "" - inform("== "+prefix+" tree ["+tree.id+"] of type "+tree.productPrefix+" at "+tree.pos.show+source) - inform("") - inform(treeStatus(tree)) - inform("") - } - - def positionError(msg: String)(body : => Unit) { - inform("======= Position error\n" + msg) - body - inform("\nWhile validating #" + tree.id) - inform(treeStatus(tree)) - inform("\nChildren:") - tree.children map (t => " " + treeStatus(t, tree)) foreach inform - inform("=======") - throw new ValidateException(msg) - } - - def validate(tree: Tree, encltree: Tree): Unit = { - - if (!tree.isEmpty && tree.canHaveAttrs) { - if (settings.Yposdebug.value && (settings.verbose.value || settings.Yrangepos.value)) - println("[%10s] %s".format("validate", treeStatus(tree, encltree))) - - if (!tree.pos.isDefined) - positionError("Unpositioned tree #"+tree.id) { - inform("%15s %s".format("unpositioned", treeStatus(tree, encltree))) - inform("%15s %s".format("enclosing", treeStatus(encltree))) - encltree.children foreach (t => inform("%15s %s".format("sibling", treeStatus(t, encltree)))) - } - if (tree.pos.isRange) { - if (!encltree.pos.isRange) - positionError("Synthetic tree ["+encltree.id+"] contains nonsynthetic tree ["+tree.id+"]") { - reportTree("Enclosing", encltree) - reportTree("Enclosed", tree) - } - if (!(encltree.pos includes tree.pos)) - positionError("Enclosing tree ["+encltree.id+"] does not include tree ["+tree.id+"]") { - reportTree("Enclosing", encltree) - reportTree("Enclosed", tree) - } - - findOverlapping(tree.children flatMap solidDescendants) match { - case List() => ; - case xs => { - positionError("Overlapping trees "+xs.map { case (x, y) => (x.id, y.id) }.mkString("", ", ", "")) { - reportTree("Ancestor", tree) - for((x, y) <- xs) { - reportTree("First overlapping", x) - reportTree("Second overlapping", y) - } - } - } - } - } - for (ct <- tree.children flatMap solidDescendants) validate(ct, tree) - } - } - - if (phase.id <= currentRun.typerPhase.id) - validate(tree, tree) - } - - class ValidateException(msg : String) extends Exception(msg) - - // ---------------- Locating trees ---------------------------------- - - /** A locator for trees with given positions. - * Given a position `pos`, locator.apply returns - * the smallest tree that encloses `pos`. - */ - class Locator(pos: Position) extends Traverser { - var last: Tree = _ - def locateIn(root: Tree): Tree = { - this.last = EmptyTree - traverse(root) - this.last - } - protected def isEligible(t: Tree) = !t.pos.isTransparent - override def traverse(t: Tree) { - t match { - case tt : TypeTree if tt.original != null && (tt.pos includes tt.original.pos) => - traverse(tt.original) - case _ => - if (t.pos includes pos) { - if (isEligible(t)) last = t - super.traverse(t) - } else t match { - case mdef: MemberDef => - traverseTrees(mdef.mods.annotations) - case _ => - } - } - } - } +@deprecated("Use scala.reflect.internal.RangePositions", "2.11.0") +trait RangePositions extends scala.reflect.internal.RangePositions with ast.Trees with ast.Positions { + self: scala.tools.nsc.Global => - class TypedLocator(pos: Position) extends Locator(pos) { - override protected def isEligible(t: Tree) = super.isEligible(t) && t.tpe != null - } } diff --git a/src/compiler/scala/tools/nsc/interpreter/IMain.scala b/src/compiler/scala/tools/nsc/interpreter/IMain.scala index db54b5a2b1..4d1ceb2818 100644 --- a/src/compiler/scala/tools/nsc/interpreter/IMain.scala +++ b/src/compiler/scala/tools/nsc/interpreter/IMain.scala @@ -243,7 +243,7 @@ class IMain(initialSettings: Settings, protected val out: JPrintWriter) extends settings.outputDirs setSingleOutput replOutput.dir settings.exposeEmptyPackage.value = true if (settings.Yrangepos.value) - new Global(settings, reporter) with ReplGlobal with interactive.RangePositions { override def toString: String = "" } + new RangePositionGlobal(settings, reporter) with ReplGlobal { override def toString: String = "" } else new Global(settings, reporter) with ReplGlobal { override def toString: String = "" } } diff --git a/src/partest/scala/tools/partest/DirectTest.scala b/src/partest/scala/tools/partest/DirectTest.scala index 483cb491a1..3f61062073 100644 --- a/src/partest/scala/tools/partest/DirectTest.scala +++ b/src/partest/scala/tools/partest/DirectTest.scala @@ -42,10 +42,7 @@ abstract class DirectTest extends App { newCompiler(settings) } - def newCompiler(settings: Settings): Global = { - if (settings.Yrangepos.value) new Global(settings, reporter(settings)) with interactive.RangePositions - else new Global(settings, reporter(settings)) - } + def newCompiler(settings: Settings): Global = Global(settings, reporter(settings)) def reporter(settings: Settings): Reporter = new ConsoleReporter(settings) diff --git a/src/partest/scala/tools/partest/nest/CompileManager.scala b/src/partest/scala/tools/partest/nest/CompileManager.scala index 0e62f9a022..a8694cc0d6 100644 --- a/src/partest/scala/tools/partest/nest/CompileManager.scala +++ b/src/partest/scala/tools/partest/nest/CompileManager.scala @@ -10,7 +10,6 @@ package nest import scala.tools.nsc.{ Global, Settings, CompilerCommand, FatalError, io } import scala.reflect.io.{ Directory, File => SFile, FileOperationException } -import scala.tools.nsc.interactive.RangePositions import scala.tools.nsc.reporters.{ Reporter, ConsoleReporter } import scala.tools.nsc.util.{ ClassPath, FakePos } import scala.tools.nsc.Properties.{ setProp, propOrEmpty } @@ -52,10 +51,7 @@ abstract class SimpleCompiler { class DirectCompiler(val fileManager: FileManager) extends SimpleCompiler { def newGlobal(settings: Settings, reporter: Reporter): Global = - if (settings.Yrangepos.value) - new Global(settings, reporter) with RangePositions - else - new Global(settings, reporter) + Global(settings, reporter) def newGlobal(settings: Settings, logWriter: FileWriter): Global = newGlobal(settings, new ExtConsoleReporter(settings, new PrintWriter(logWriter))) diff --git a/src/reflect/scala/reflect/internal/Positions.scala b/src/reflect/scala/reflect/internal/Positions.scala index f8c670827a..69f6d22538 100644 --- a/src/reflect/scala/reflect/internal/Positions.scala +++ b/src/reflect/scala/reflect/internal/Positions.scala @@ -1,6 +1,8 @@ package scala.reflect package internal +import util._ + trait Positions extends api.Positions { self: SymbolTable => type Position = scala.reflect.internal.util.Position @@ -12,7 +14,7 @@ trait Positions extends api.Positions { self: SymbolTable => * If some of the trees are ranges, returns a range position enclosing all ranges * Otherwise returns default position that is either focused or not. */ - def wrappingPos(default: Position, trees: List[Tree]) = wrappingPos(default, trees, true) + def wrappingPos(default: Position, trees: List[Tree]): Position = wrappingPos(default, trees, true) def wrappingPos(default: Position, trees: List[Tree], focus: Boolean): Position = default /** A position that wraps the non-empty set of trees. @@ -30,6 +32,9 @@ trait Positions extends api.Positions { self: SymbolTable => def ensureNonOverlapping(tree: Tree, others: List[Tree]){ ensureNonOverlapping(tree, others, true) } def ensureNonOverlapping(tree: Tree, others: List[Tree], focus: Boolean) {} + def rangePos(source: SourceFile, start: Int, point: Int, end: Int): Position = new OffsetPosition(source, point) + def validatePositions(tree: Tree) {} + trait PosAssigner extends Traverser { var pos: Position } @@ -62,4 +67,4 @@ trait Positions extends api.Positions { self: SymbolTable => posAssigner.traverse(tree) tree } -} \ No newline at end of file +} diff --git a/src/reflect/scala/reflect/internal/RangePositions.scala b/src/reflect/scala/reflect/internal/RangePositions.scala new file mode 100644 index 0000000000..85bbaf3364 --- /dev/null +++ b/src/reflect/scala/reflect/internal/RangePositions.scala @@ -0,0 +1,285 @@ +/* NSC -- new Scala compiler + * Copyright 2009-2013 Typesafe/Scala Solutions and LAMP/EPFL + * @author Martin Odersky + */ + +package scala.reflect +package internal + +import util._ +import scala.collection.mutable.ListBuffer + +/** Handling range positions + * atPos, the main method in this trait, will add positions to a tree, + * and will ensure the following properties: + * + * 1. All nodes between the root of the tree and nodes that already have positions + * will be assigned positions. + * 2. No node which already has a position will be assigned a different range; however + * a RangePosition might become a TransparentPosition. + * 3. The position of each assigned node includes the positions of each of its children. + * 4. The positions of all solid descendants of children of an assigned node + * are mutually non-overlapping. + * + * Here, the solid descendant of a node are: + * + * If the node has a TransparentPosition, the solid descendants of all its children + * Otherwise, the singleton consisting of the node itself. + */ +trait RangePositions extends Trees with Positions { + self: SymbolTable => + + def inform(msg: String): Unit + + case class Range(pos: Position, tree: Tree) { + def isFree = tree == EmptyTree + } + + override def rangePos(source: SourceFile, start: Int, point: Int, end: Int): RangePosition = + new RangePosition(source, start, point, end) + + /** A position that wraps a set of trees. + * The point of the wrapping position is the point of the default position. + * If some of the trees are ranges, returns a range position enclosing all ranges + * Otherwise returns default position that is either focused or not. + */ + override def wrappingPos(default: Position, trees: List[Tree], focus: Boolean): Position = { + val ranged = trees filter (_.pos.isRange) + if (ranged.isEmpty) if (focus) default.focus else default + else new RangePosition(default.source, (ranged map (_.pos.start)).min, default.point, (ranged map (_.pos.end)).max) + } + + /** A position that wraps a non-empty set of trees. + * The point of the wrapping position is the point of the first trees' position. + * If some of the trees are ranges, returns a range position enclosing all ranges + * Otherwise returns first tree's position. + */ + override def wrappingPos(trees: List[Tree]): Position = { + val headpos = trees.head.pos + if (headpos.isDefined) wrappingPos(headpos, trees) else headpos + } + + // -------------- ensuring no overlaps ------------------------------- + + /** Ensure that given tree has no positions that overlap with + * any of the positions of `others`. This is done by + * shortening the range, assigning TransparentPositions + * to some of the nodes in `tree` or focusing on the position. + */ + override def ensureNonOverlapping(tree: Tree, others: List[Tree], focus: Boolean) { + def isOverlapping(pos: Position) = + pos.isRange && (others exists (pos overlaps _.pos)) + if (isOverlapping(tree.pos)) { + val children = tree.children + children foreach (ensureNonOverlapping(_, others, focus)) + if (tree.pos.isOpaqueRange) { + val wpos = wrappingPos(tree.pos, children, focus) + tree setPos (if (isOverlapping(wpos)) tree.pos.makeTransparent else wpos) + } + } + } + + def solidDescendants(tree: Tree): List[Tree] = + if (tree.pos.isTransparent) tree.children flatMap solidDescendants + else List(tree) + + /** A free range from `lo` to `hi` */ + private def free(lo: Int, hi: Int): Range = + Range(new RangePosition(null, lo, lo, hi), EmptyTree) + + /** The maximal free range */ + private lazy val maxFree: Range = free(0, Int.MaxValue) + + /** A singleton list of a non-empty range from `lo` to `hi`, or else the empty List */ + private def maybeFree(lo: Int, hi: Int) = + if (lo < hi) List(free(lo, hi)) + else List() + + /** Insert `pos` into ranges `rs` if possible; + * otherwise add conflicting trees to `conflicting`. + */ + private def insert(rs: List[Range], t: Tree, conflicting: ListBuffer[Tree]): List[Range] = rs match { + case List() => + assert(conflicting.nonEmpty) + rs + case r :: rs1 => + assert(!t.pos.isTransparent) + if (r.isFree && (r.pos includes t.pos)) { +// println("subdividing "+r+"/"+t.pos) + maybeFree(t.pos.end, r.pos.end) ::: List(Range(t.pos, t)) ::: maybeFree(r.pos.start, t.pos.start) ::: rs1 + } else { + if (!r.isFree && (r.pos overlaps t.pos)) conflicting += r.tree + r :: insert(rs1, t, conflicting) + } + } + + /** Replace elem `t` of `ts` by `replacement` list. */ + private def replace(ts: List[Tree], t: Tree, replacement: List[Tree]): List[Tree] = + if (ts.head == t) replacement ::: ts.tail + else ts.head :: replace(ts.tail, t, replacement) + + /** Does given list of trees have mutually non-overlapping positions? + * pre: None of the trees is transparent + */ + def findOverlapping(cts: List[Tree]): List[(Tree, Tree)] = { + var ranges = List(maxFree) + for (ct <- cts) { + if (ct.pos.isOpaqueRange) { + val conflicting = new ListBuffer[Tree] + ranges = insert(ranges, ct, conflicting) + if (conflicting.nonEmpty) return conflicting.toList map (t => (t, ct)) + } + } + List() + } + + // -------------- setting positions ------------------------------- + + /** Set position of all children of a node + * @param pos A target position. + * Uses the point of the position as the point of all positions it assigns. + * Uses the start of this position as an Offset position for unpositioed trees + * without children. + * @param trees The children to position. All children must be positionable. + */ + private def setChildrenPos(pos: Position, trees: List[Tree]): Unit = try { + for (tree <- trees) { + if (!tree.isEmpty && tree.canHaveAttrs && tree.pos == NoPosition) { + val children = tree.children + if (children.isEmpty) { + tree setPos pos.focus + } else { + setChildrenPos(pos, children) + tree setPos wrappingPos(pos, children) + } + } + } + } catch { + case ex: Exception => + println("error while set children pos "+pos+" of "+trees) + throw ex + } + + /** Position a tree. + * This means: Set position of a node and position all its unpositioned children. + */ + override def atPos[T <: Tree](pos: Position)(tree: T): T = { + if (pos.isOpaqueRange) { + if (!tree.isEmpty && tree.canHaveAttrs && tree.pos == NoPosition) { + tree.setPos(pos) + val children = tree.children + if (children.nonEmpty) { + if (children.tail.isEmpty) atPos(pos)(children.head) + else setChildrenPos(pos, children) + } + } + tree + } else { + super.atPos(pos)(tree) + } + } + + // ---------------- Validating positions ---------------------------------- + + override def validatePositions(tree: Tree) { + def reportTree(prefix : String, tree : Tree) { + val source = if (tree.pos.isDefined) tree.pos.source else "" + inform("== "+prefix+" tree ["+tree.id+"] of type "+tree.productPrefix+" at "+tree.pos.show+source) + inform("") + inform(treeStatus(tree)) + inform("") + } + + def positionError(msg: String)(body : => Unit) { + inform("======= Position error\n" + msg) + body + inform("\nWhile validating #" + tree.id) + inform(treeStatus(tree)) + inform("\nChildren:") + tree.children map (t => " " + treeStatus(t, tree)) foreach inform + inform("=======") + throw new ValidateException(msg) + } + + def validate(tree: Tree, encltree: Tree): Unit = { + + if (!tree.isEmpty && tree.canHaveAttrs) { + if (settings.Yposdebug.value && (settings.verbose.value || settings.Yrangepos.value)) + println("[%10s] %s".format("validate", treeStatus(tree, encltree))) + + if (!tree.pos.isDefined) + positionError("Unpositioned tree #"+tree.id) { + inform("%15s %s".format("unpositioned", treeStatus(tree, encltree))) + inform("%15s %s".format("enclosing", treeStatus(encltree))) + encltree.children foreach (t => inform("%15s %s".format("sibling", treeStatus(t, encltree)))) + } + if (tree.pos.isRange) { + if (!encltree.pos.isRange) + positionError("Synthetic tree ["+encltree.id+"] contains nonsynthetic tree ["+tree.id+"]") { + reportTree("Enclosing", encltree) + reportTree("Enclosed", tree) + } + if (!(encltree.pos includes tree.pos)) + positionError("Enclosing tree ["+encltree.id+"] does not include tree ["+tree.id+"]") { + reportTree("Enclosing", encltree) + reportTree("Enclosed", tree) + } + + findOverlapping(tree.children flatMap solidDescendants) match { + case List() => ; + case xs => { + positionError("Overlapping trees "+xs.map { case (x, y) => (x.id, y.id) }.mkString("", ", ", "")) { + reportTree("Ancestor", tree) + for((x, y) <- xs) { + reportTree("First overlapping", x) + reportTree("Second overlapping", y) + } + } + } + } + } + for (ct <- tree.children flatMap solidDescendants) validate(ct, tree) + } + } + + if (!isPastTyper) + validate(tree, tree) + } + + class ValidateException(msg : String) extends Exception(msg) + + // ---------------- Locating trees ---------------------------------- + + /** A locator for trees with given positions. + * Given a position `pos`, locator.apply returns + * the smallest tree that encloses `pos`. + */ + class Locator(pos: Position) extends Traverser { + var last: Tree = _ + def locateIn(root: Tree): Tree = { + this.last = EmptyTree + traverse(root) + this.last + } + protected def isEligible(t: Tree) = !t.pos.isTransparent + override def traverse(t: Tree) { + t match { + case tt : TypeTree if tt.original != null && (tt.pos includes tt.original.pos) => + traverse(tt.original) + case _ => + if (t.pos includes pos) { + if (isEligible(t)) last = t + super.traverse(t) + } else t match { + case mdef: MemberDef => + traverseTrees(mdef.mods.annotations) + case _ => + } + } + } + } + + class TypedLocator(pos: Position) extends Locator(pos) { + override protected def isEligible(t: Tree) = super.isEligible(t) && t.tpe != null + } +} diff --git a/src/reflect/scala/reflect/internal/Trees.scala b/src/reflect/scala/reflect/internal/Trees.scala index ce33fd8408..c00337e578 100644 --- a/src/reflect/scala/reflect/internal/Trees.scala +++ b/src/reflect/scala/reflect/internal/Trees.scala @@ -14,6 +14,23 @@ trait Trees extends api.Trees { self: SymbolTable => private[scala] var nodeCount = 0 + protected def treeLine(t: Tree): String = + if (t.pos.isDefined && t.pos.isRange) t.pos.lineContent.drop(t.pos.column - 1).take(t.pos.end - t.pos.start + 1) + else t.summaryString + + protected def treeStatus(t: Tree, enclosingTree: Tree = null) = { + val parent = if (enclosingTree eq null) " " else " P#%5s".format(enclosingTree.id) + + "[L%4s%8s] #%-6s %-15s %-10s // %s".format(t.pos.safeLine, parent, t.id, t.pos.show, t.shortClass, treeLine(t)) + } + protected def treeSymStatus(t: Tree) = { + val line = if (t.pos.isDefined) "line %-4s".format(t.pos.safeLine) else " " + "#%-5s %s %-10s // %s".format(t.id, line, t.shortClass, + if (t.symbol ne NoSymbol) "(" + t.symbol.fullLocationString + ")" + else treeLine(t) + ) + } + abstract class Tree extends TreeContextApiImpl with Attachable with Product { val id = nodeCount // TODO: add to attachment? nodeCount += 1 diff --git a/src/reflect/scala/reflect/internal/settings/MutableSettings.scala b/src/reflect/scala/reflect/internal/settings/MutableSettings.scala index d5ed9dab5b..506edb861e 100644 --- a/src/reflect/scala/reflect/internal/settings/MutableSettings.scala +++ b/src/reflect/scala/reflect/internal/settings/MutableSettings.scala @@ -40,6 +40,8 @@ abstract class MutableSettings extends AbsSettings { def verbose: BooleanSetting def uniqid: BooleanSetting def Yshowsymkinds: BooleanSetting + def Yposdebug: BooleanSetting + def Yrangepos: BooleanSetting def Xprintpos: BooleanSetting def Yrecursion: IntSetting def maxClassfileName: IntSetting diff --git a/src/reflect/scala/reflect/internal/util/Position.scala b/src/reflect/scala/reflect/internal/util/Position.scala index f0185372c5..bb8c9e9b26 100644 --- a/src/reflect/scala/reflect/internal/util/Position.scala +++ b/src/reflect/scala/reflect/internal/util/Position.scala @@ -266,46 +266,3 @@ class OffsetPosition(override val source: SourceFile, override val point: Int) e } override def show = "["+point+"]" } - -/** new for position ranges */ -class RangePosition(source: SourceFile, override val start: Int, point: Int, override val end: Int) -extends OffsetPosition(source, point) { - if (start > end) sys.error("bad position: "+show) - override def isRange: Boolean = true - override def isOpaqueRange: Boolean = true - override def startOrPoint: Int = start - override def endOrPoint: Int = end - override def withStart(off: Int) = new RangePosition(source, off, point, end) - override def withEnd(off: Int) = new RangePosition(source, start, point, off) - override def withPoint(off: Int) = new RangePosition(source, start, off, end) - override def withSource(source: SourceFile, shift: Int) = new RangePosition(source, start + shift, point + shift, end + shift) - override def focusStart = new OffsetPosition(source, start) - override def focus = { - if (focusCache eq NoPosition) focusCache = new OffsetPosition(source, point) - focusCache - } - override def focusEnd = new OffsetPosition(source, end) - override def makeTransparent = new TransparentPosition(source, start, point, end) - override def includes(pos: Position) = pos.isDefined && start <= pos.startOrPoint && pos.endOrPoint <= end - override def union(pos: Position): Position = - if (pos.isRange) new RangePosition(source, start min pos.start, point, end max pos.end) else this - - override def toSingleLine: Position = source match { - case bs: BatchSourceFile - if end > 0 && bs.offsetToLine(start) < bs.offsetToLine(end - 1) => - val pointLine = bs.offsetToLine(point) - new RangePosition(source, bs.lineToOffset(pointLine), point, bs.lineToOffset(pointLine + 1)) - case _ => this - } - - override def toString = "RangePosition("+source.file.canonicalPath+", "+start+", "+point+", "+end+")" - override def show = "["+start+":"+end+"]" - private var focusCache: Position = NoPosition -} - -class TransparentPosition(source: SourceFile, start: Int, point: Int, end: Int) extends RangePosition(source, start, point, end) { - override def isOpaqueRange: Boolean = false - override def isTransparent = true - override def makeTransparent = this - override def show = "<"+start+":"+end+">" -} diff --git a/src/reflect/scala/reflect/internal/util/RangePosition.scala b/src/reflect/scala/reflect/internal/util/RangePosition.scala new file mode 100644 index 0000000000..3712aa0a52 --- /dev/null +++ b/src/reflect/scala/reflect/internal/util/RangePosition.scala @@ -0,0 +1,49 @@ +/* NSC -- new Scala compiler + * Copyright 2005-2013 LAMP/EPFL + * @author Paul Phillips + */ + +package scala.reflect.internal.util + +/** new for position ranges */ +class RangePosition(source: SourceFile, override val start: Int, point: Int, override val end: Int) +extends OffsetPosition(source, point) { + if (start > end) sys.error("bad position: "+show) + override def isRange: Boolean = true + override def isOpaqueRange: Boolean = true + override def startOrPoint: Int = start + override def endOrPoint: Int = end + override def withStart(off: Int) = new RangePosition(source, off, point, end) + override def withEnd(off: Int) = new RangePosition(source, start, point, off) + override def withPoint(off: Int) = new RangePosition(source, start, off, end) + override def withSource(source: SourceFile, shift: Int) = new RangePosition(source, start + shift, point + shift, end + shift) + override def focusStart = new OffsetPosition(source, start) + override def focus = { + if (focusCache eq NoPosition) focusCache = new OffsetPosition(source, point) + focusCache + } + override def focusEnd = new OffsetPosition(source, end) + override def makeTransparent = new TransparentPosition(source, start, point, end) + override def includes(pos: Position) = pos.isDefined && start <= pos.startOrPoint && pos.endOrPoint <= end + override def union(pos: Position): Position = + if (pos.isRange) new RangePosition(source, start min pos.start, point, end max pos.end) else this + + override def toSingleLine: Position = source match { + case bs: BatchSourceFile + if end > 0 && bs.offsetToLine(start) < bs.offsetToLine(end - 1) => + val pointLine = bs.offsetToLine(point) + new RangePosition(source, bs.lineToOffset(pointLine), point, bs.lineToOffset(pointLine + 1)) + case _ => this + } + + override def toString = "RangePosition("+source.file.canonicalPath+", "+start+", "+point+", "+end+")" + override def show = "["+start+":"+end+"]" + private var focusCache: Position = NoPosition +} + +class TransparentPosition(source: SourceFile, start: Int, point: Int, end: Int) extends RangePosition(source, start, point, end) { + override def isOpaqueRange: Boolean = false + override def isTransparent = true + override def makeTransparent = this + override def show = "<"+start+":"+end+">" +} diff --git a/src/reflect/scala/reflect/runtime/Settings.scala b/src/reflect/scala/reflect/runtime/Settings.scala index ba524f4df2..5d58fa96d6 100644 --- a/src/reflect/scala/reflect/runtime/Settings.scala +++ b/src/reflect/scala/reflect/runtime/Settings.scala @@ -35,6 +35,8 @@ private[reflect] class Settings extends MutableSettings { val Xprintpos = new BooleanSetting(false) val Ynotnull = new BooleanSetting(false) val Yshowsymkinds = new BooleanSetting(false) + val Yposdebug = new BooleanSetting(false) + val Yrangepos = new BooleanSetting(false) val debug = new BooleanSetting(false) val deepCloning = new BooleanSetting(false) val explaintypes = new BooleanSetting(false) diff --git a/test/files/run/t5603.scala b/test/files/run/t5603.scala index 60dfd01fee..8c8038a602 100644 --- a/test/files/run/t5603.scala +++ b/test/files/run/t5603.scala @@ -36,7 +36,8 @@ object Test extends DirectTest { val settings = new Settings() settings.Xprintpos.value = true + settings.Yrangepos.value = true val command = new CompilerCommand((CommandLineParser tokenize extraSettings) ++ args.toList, settings) - new Global(command.settings, new ConsoleReporter(settings)) with interactive.RangePositions + Global(command.settings, new ConsoleReporter(settings)) } } -- cgit v1.2.3 From 6898c9fd6c3ffb975e07cdcc5619f8e771e21ac0 Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Mon, 4 Mar 2013 14:47:01 -0800 Subject: Eliminated separate RangePositions trait. One of those eternal headaches which probably sounded better on paper. Even before range positions are the default, there's no reason we can't have the range position code available in all globals enabled via settings, just like everything else. --- src/compiler/scala/tools/nsc/Global.scala | 8 +- src/compiler/scala/tools/nsc/doc/DocParser.scala | 2 +- .../scala/tools/nsc/doc/ScaladocGlobal.scala | 4 +- .../scala/tools/nsc/interactive/Global.scala | 6 +- .../tools/nsc/interactive/RangePositions.scala | 4 +- .../scala/tools/nsc/interpreter/IMain.scala | 5 +- src/reflect/scala/reflect/internal/Positions.scala | 266 ++++++++++++++++++- .../scala/reflect/internal/RangePositions.scala | 285 --------------------- .../scala/reflect/runtime/JavaUniverse.scala | 2 +- 9 files changed, 271 insertions(+), 311 deletions(-) delete mode 100644 src/reflect/scala/reflect/internal/RangePositions.scala (limited to 'src') diff --git a/src/compiler/scala/tools/nsc/Global.scala b/src/compiler/scala/tools/nsc/Global.scala index e438ac4bfb..bed446f8cb 100644 --- a/src/compiler/scala/tools/nsc/Global.scala +++ b/src/compiler/scala/tools/nsc/Global.scala @@ -12,7 +12,6 @@ import scala.collection.{ mutable, immutable } import io.{ SourceReader, AbstractFile, Path } import reporters.{ Reporter, ConsoleReporter } import util.{ ClassPath, MergedClassPath, StatisticsInfo, returning, stackTraceString, stackTraceHeadString } -import scala.reflect.internal.RangePositions import scala.reflect.internal.util.{ OffsetPosition, SourceFile, NoSourceFile, BatchSourceFile, ScriptSourceFile } import scala.reflect.internal.pickling.{ PickleBuffer, PickleFormat } import symtab.{ Flags, SymbolTable, SymbolLoaders, SymbolTrackers } @@ -43,6 +42,7 @@ class Global(var currentSettings: Settings, var reporter: Reporter) // the mirror -------------------------------------------------- override def isCompilerUniverse = true + override val useOffsetPositions = !currentSettings.Yrangepos.value class GlobalMirror extends Roots(NoSymbol) { val universe: self.type = self @@ -1695,10 +1695,6 @@ class Global(var currentSettings: Settings, var reporter: Reporter) def createJavadoc = false } -class RangePositionGlobal(settings0: Settings, reporter0: Reporter) extends Global(settings0, reporter0) with RangePositions - object Global { - def apply(settings: Settings, reporter: Reporter): Global = - if (settings.Yrangepos.value) new RangePositionGlobal(settings, reporter) - else new Global(settings, reporter) + def apply(settings: Settings, reporter: Reporter): Global = new Global(settings, reporter) } diff --git a/src/compiler/scala/tools/nsc/doc/DocParser.scala b/src/compiler/scala/tools/nsc/doc/DocParser.scala index 104178a832..b753e84426 100644 --- a/src/compiler/scala/tools/nsc/doc/DocParser.scala +++ b/src/compiler/scala/tools/nsc/doc/DocParser.scala @@ -15,7 +15,7 @@ import DocParser.Parsed * right after parsing so it can read `DocDefs` from source code which would * otherwise cause the compiler to go haywire. */ -class DocParser(settings: nsc.Settings, reporter: Reporter) extends RangePositionGlobal(settings, reporter) { +class DocParser(settings: nsc.Settings, reporter: Reporter) extends Global(settings, reporter) { def this(settings: Settings) = this(settings, new ConsoleReporter(settings)) def this() = this(new Settings(Console println _)) diff --git a/src/compiler/scala/tools/nsc/doc/ScaladocGlobal.scala b/src/compiler/scala/tools/nsc/doc/ScaladocGlobal.scala index d4777d7800..b8a0637b47 100644 --- a/src/compiler/scala/tools/nsc/doc/ScaladocGlobal.scala +++ b/src/compiler/scala/tools/nsc/doc/ScaladocGlobal.scala @@ -91,7 +91,9 @@ trait ScaladocAnalyzer extends Analyzer { } } -class ScaladocGlobal(settings: doc.Settings, reporter: Reporter) extends Global(settings, reporter) with interactive.RangePositions { +class ScaladocGlobal(settings: doc.Settings, reporter: Reporter) extends { + override val useOffsetPositions = false +} with Global(settings, reporter) { override protected def computeInternalPhases() { phasesSet += syntaxAnalyzer phasesSet += analyzer.namerFactory diff --git a/src/compiler/scala/tools/nsc/interactive/Global.scala b/src/compiler/scala/tools/nsc/interactive/Global.scala index 82eafb4b09..2091c63d8e 100644 --- a/src/compiler/scala/tools/nsc/interactive/Global.scala +++ b/src/compiler/scala/tools/nsc/interactive/Global.scala @@ -11,7 +11,7 @@ import mutable.{LinkedHashMap, SynchronizedMap, HashSet, SynchronizedSet} import scala.util.control.ControlThrowable import scala.tools.nsc.io.{ AbstractFile, LogReplay, Logger, NullLogger, Replayer } import scala.tools.nsc.util.MultiHashMap -import scala.reflect.internal.util.{ SourceFile, BatchSourceFile, Position, RangePosition, NoPosition } +import scala.reflect.internal.util.{ SourceFile, BatchSourceFile, Position, NoPosition } import scala.tools.nsc.reporters._ import scala.tools.nsc.symtab._ import scala.tools.nsc.typechecker.DivergentImplicit @@ -26,9 +26,9 @@ class Global(settings: Settings, _reporter: Reporter, projectName: String = "") * execution of the super constructor. */ private var initializing = true -} with RangePositionGlobal(settings, _reporter) + override val useOffsetPositions = false +} with scala.tools.nsc.Global(settings, _reporter) with CompilerControl - with RangePositions with ContextTrees with RichCompilationUnits with ScratchPadMaker diff --git a/src/compiler/scala/tools/nsc/interactive/RangePositions.scala b/src/compiler/scala/tools/nsc/interactive/RangePositions.scala index 0af62ad729..6288400629 100644 --- a/src/compiler/scala/tools/nsc/interactive/RangePositions.scala +++ b/src/compiler/scala/tools/nsc/interactive/RangePositions.scala @@ -6,8 +6,8 @@ package scala.tools.nsc package interactive -@deprecated("Use scala.reflect.internal.RangePositions", "2.11.0") -trait RangePositions extends scala.reflect.internal.RangePositions with ast.Trees with ast.Positions { +@deprecated("Use scala.reflect.internal.Positions", "2.11.0") +trait RangePositions extends scala.reflect.internal.Positions with ast.Trees with ast.Positions { self: scala.tools.nsc.Global => } diff --git a/src/compiler/scala/tools/nsc/interpreter/IMain.scala b/src/compiler/scala/tools/nsc/interpreter/IMain.scala index 4d1ceb2818..120761de4c 100644 --- a/src/compiler/scala/tools/nsc/interpreter/IMain.scala +++ b/src/compiler/scala/tools/nsc/interpreter/IMain.scala @@ -242,10 +242,7 @@ class IMain(initialSettings: Settings, protected val out: JPrintWriter) extends protected def newCompiler(settings: Settings, reporter: Reporter): ReplGlobal = { settings.outputDirs setSingleOutput replOutput.dir settings.exposeEmptyPackage.value = true - if (settings.Yrangepos.value) - new RangePositionGlobal(settings, reporter) with ReplGlobal { override def toString: String = "" } - else - new Global(settings, reporter) with ReplGlobal { override def toString: String = "" } + new Global(settings, reporter) with ReplGlobal { override def toString: String = "" } } /** Parent classloader. Overridable. */ diff --git a/src/reflect/scala/reflect/internal/Positions.scala b/src/reflect/scala/reflect/internal/Positions.scala index 69f6d22538..f5aeec63e1 100644 --- a/src/reflect/scala/reflect/internal/Positions.scala +++ b/src/reflect/scala/reflect/internal/Positions.scala @@ -2,27 +2,59 @@ package scala.reflect package internal import util._ +import scala.collection.mutable.ListBuffer +/** Handling range positions + * atPos, the main method in this trait, will add positions to a tree, + * and will ensure the following properties: + * + * 1. All nodes between the root of the tree and nodes that already have positions + * will be assigned positions. + * 2. No node which already has a position will be assigned a different range; however + * a RangePosition might become a TransparentPosition. + * 3. The position of each assigned node includes the positions of each of its children. + * 4. The positions of all solid descendants of children of an assigned node + * are mutually non-overlapping. + * + * Here, the solid descendant of a node are: + * + * If the node has a TransparentPosition, the solid descendants of all its children + * Otherwise, the singleton consisting of the node itself. + */ trait Positions extends api.Positions { self: SymbolTable => type Position = scala.reflect.internal.util.Position val NoPosition = scala.reflect.internal.util.NoPosition implicit val PositionTag = ClassTag[Position](classOf[Position]) + def inform(msg: String): Unit + + def useOffsetPositions: Boolean = true + /** A position that wraps a set of trees. * The point of the wrapping position is the point of the default position. * If some of the trees are ranges, returns a range position enclosing all ranges * Otherwise returns default position that is either focused or not. */ def wrappingPos(default: Position, trees: List[Tree]): Position = wrappingPos(default, trees, true) - def wrappingPos(default: Position, trees: List[Tree], focus: Boolean): Position = default + def wrappingPos(default: Position, trees: List[Tree], focus: Boolean): Position = { + if (useOffsetPositions) default else { + val ranged = trees filter (_.pos.isRange) + if (ranged.isEmpty) if (focus) default.focus else default + else new RangePosition(default.source, (ranged map (_.pos.start)).min, default.point, (ranged map (_.pos.end)).max) + } + } /** A position that wraps the non-empty set of trees. * The point of the wrapping position is the point of the first trees' position. * If some of the trees are non-synthetic, returns a range position enclosing the non-synthetic trees * Otherwise returns a synthetic offset position to point. */ - def wrappingPos(trees: List[Tree]): Position = trees.head.pos + def wrappingPos(trees: List[Tree]): Position = { + val headpos = trees.head.pos + if (useOffsetPositions || !headpos.isDefined) headpos + else wrappingPos(headpos, trees) + } /** Ensure that given tree has no positions that overlap with * any of the positions of `others`. This is done by @@ -30,10 +62,212 @@ trait Positions extends api.Positions { self: SymbolTable => * to some of the nodes in `tree` or focusing on the position. */ def ensureNonOverlapping(tree: Tree, others: List[Tree]){ ensureNonOverlapping(tree, others, true) } - def ensureNonOverlapping(tree: Tree, others: List[Tree], focus: Boolean) {} + def ensureNonOverlapping(tree: Tree, others: List[Tree], focus: Boolean) { + if (useOffsetPositions) return + + def isOverlapping(pos: Position) = + pos.isRange && (others exists (pos overlaps _.pos)) + + if (isOverlapping(tree.pos)) { + val children = tree.children + children foreach (ensureNonOverlapping(_, others, focus)) + if (tree.pos.isOpaqueRange) { + val wpos = wrappingPos(tree.pos, children, focus) + tree setPos (if (isOverlapping(wpos)) tree.pos.makeTransparent else wpos) + } + } + } + + def rangePos(source: SourceFile, start: Int, point: Int, end: Int): Position = + if (useOffsetPositions) new OffsetPosition(source, point) + else new RangePosition(source, start, point, end) + + def validatePositions(tree: Tree) { + if (useOffsetPositions) return + + def reportTree(prefix : String, tree : Tree) { + val source = if (tree.pos.isDefined) tree.pos.source else "" + inform("== "+prefix+" tree ["+tree.id+"] of type "+tree.productPrefix+" at "+tree.pos.show+source) + inform("") + inform(treeStatus(tree)) + inform("") + } + + def positionError(msg: String)(body : => Unit) { + inform("======= Position error\n" + msg) + body + inform("\nWhile validating #" + tree.id) + inform(treeStatus(tree)) + inform("\nChildren:") + tree.children map (t => " " + treeStatus(t, tree)) foreach inform + inform("=======") + throw new ValidateException(msg) + } + + def validate(tree: Tree, encltree: Tree): Unit = { + + if (!tree.isEmpty && tree.canHaveAttrs) { + if (settings.Yposdebug.value && (settings.verbose.value || settings.Yrangepos.value)) + println("[%10s] %s".format("validate", treeStatus(tree, encltree))) + + if (!tree.pos.isDefined) + positionError("Unpositioned tree #"+tree.id) { + inform("%15s %s".format("unpositioned", treeStatus(tree, encltree))) + inform("%15s %s".format("enclosing", treeStatus(encltree))) + encltree.children foreach (t => inform("%15s %s".format("sibling", treeStatus(t, encltree)))) + } + if (tree.pos.isRange) { + if (!encltree.pos.isRange) + positionError("Synthetic tree ["+encltree.id+"] contains nonsynthetic tree ["+tree.id+"]") { + reportTree("Enclosing", encltree) + reportTree("Enclosed", tree) + } + if (!(encltree.pos includes tree.pos)) + positionError("Enclosing tree ["+encltree.id+"] does not include tree ["+tree.id+"]") { + reportTree("Enclosing", encltree) + reportTree("Enclosed", tree) + } + + findOverlapping(tree.children flatMap solidDescendants) match { + case List() => ; + case xs => { + positionError("Overlapping trees "+xs.map { case (x, y) => (x.id, y.id) }.mkString("", ", ", "")) { + reportTree("Ancestor", tree) + for((x, y) <- xs) { + reportTree("First overlapping", x) + reportTree("Second overlapping", y) + } + } + } + } + } + for (ct <- tree.children flatMap solidDescendants) validate(ct, tree) + } + } + + if (!isPastTyper) + validate(tree, tree) + } + + def solidDescendants(tree: Tree): List[Tree] = + if (tree.pos.isTransparent) tree.children flatMap solidDescendants + else List(tree) + + /** A free range from `lo` to `hi` */ + private def free(lo: Int, hi: Int): Range = + Range(new RangePosition(null, lo, lo, hi), EmptyTree) + + /** The maximal free range */ + private lazy val maxFree: Range = free(0, Int.MaxValue) + + /** A singleton list of a non-empty range from `lo` to `hi`, or else the empty List */ + private def maybeFree(lo: Int, hi: Int) = + if (lo < hi) List(free(lo, hi)) + else List() + + /** Insert `pos` into ranges `rs` if possible; + * otherwise add conflicting trees to `conflicting`. + */ + private def insert(rs: List[Range], t: Tree, conflicting: ListBuffer[Tree]): List[Range] = rs match { + case List() => + assert(conflicting.nonEmpty) + rs + case r :: rs1 => + assert(!t.pos.isTransparent) + if (r.isFree && (r.pos includes t.pos)) { +// println("subdividing "+r+"/"+t.pos) + maybeFree(t.pos.end, r.pos.end) ::: List(Range(t.pos, t)) ::: maybeFree(r.pos.start, t.pos.start) ::: rs1 + } else { + if (!r.isFree && (r.pos overlaps t.pos)) conflicting += r.tree + r :: insert(rs1, t, conflicting) + } + } + + /** Replace elem `t` of `ts` by `replacement` list. */ + private def replace(ts: List[Tree], t: Tree, replacement: List[Tree]): List[Tree] = + if (ts.head == t) replacement ::: ts.tail + else ts.head :: replace(ts.tail, t, replacement) + + /** Does given list of trees have mutually non-overlapping positions? + * pre: None of the trees is transparent + */ + def findOverlapping(cts: List[Tree]): List[(Tree, Tree)] = { + var ranges = List(maxFree) + for (ct <- cts) { + if (ct.pos.isOpaqueRange) { + val conflicting = new ListBuffer[Tree] + ranges = insert(ranges, ct, conflicting) + if (conflicting.nonEmpty) return conflicting.toList map (t => (t, ct)) + } + } + List() + } + + /** Set position of all children of a node + * @param pos A target position. + * Uses the point of the position as the point of all positions it assigns. + * Uses the start of this position as an Offset position for unpositioed trees + * without children. + * @param trees The children to position. All children must be positionable. + */ + private def setChildrenPos(pos: Position, trees: List[Tree]): Unit = try { + for (tree <- trees) { + if (!tree.isEmpty && tree.canHaveAttrs && tree.pos == NoPosition) { + val children = tree.children + if (children.isEmpty) { + tree setPos pos.focus + } else { + setChildrenPos(pos, children) + tree setPos wrappingPos(pos, children) + } + } + } + } catch { + case ex: Exception => + println("error while set children pos "+pos+" of "+trees) + throw ex + } + + + class ValidateException(msg : String) extends Exception(msg) - def rangePos(source: SourceFile, start: Int, point: Int, end: Int): Position = new OffsetPosition(source, point) - def validatePositions(tree: Tree) {} + + /** A locator for trees with given positions. + * Given a position `pos`, locator.apply returns + * the smallest tree that encloses `pos`. + */ + class Locator(pos: Position) extends Traverser { + var last: Tree = _ + def locateIn(root: Tree): Tree = { + this.last = EmptyTree + traverse(root) + this.last + } + protected def isEligible(t: Tree) = !t.pos.isTransparent + override def traverse(t: Tree) { + t match { + case tt : TypeTree if tt.original != null && (tt.pos includes tt.original.pos) => + traverse(tt.original) + case _ => + if (t.pos includes pos) { + if (isEligible(t)) last = t + super.traverse(t) + } else t match { + case mdef: MemberDef => + traverseTrees(mdef.mods.annotations) + case _ => + } + } + } + } + + case class Range(pos: Position, tree: Tree) { + def isFree = tree == EmptyTree + } + + class TypedLocator(pos: Position) extends Locator(pos) { + override protected def isEligible(t: Tree) = super.isEligible(t) && t.tpe != null + } trait PosAssigner extends Traverser { var pos: Position @@ -62,9 +296,25 @@ trait Positions extends api.Positions { self: SymbolTable => } } + /** Position a tree. + * This means: Set position of a node and position all its unpositioned children. + */ def atPos[T <: Tree](pos: Position)(tree: T): T = { - posAssigner.pos = pos - posAssigner.traverse(tree) - tree + if (useOffsetPositions || !pos.isOpaqueRange) { + posAssigner.pos = pos + posAssigner.traverse(tree) + tree + } + else { + if (!tree.isEmpty && tree.canHaveAttrs && tree.pos == NoPosition) { + tree.setPos(pos) + val children = tree.children + if (children.nonEmpty) { + if (children.tail.isEmpty) atPos(pos)(children.head) + else setChildrenPos(pos, children) + } + } + tree + } } } diff --git a/src/reflect/scala/reflect/internal/RangePositions.scala b/src/reflect/scala/reflect/internal/RangePositions.scala deleted file mode 100644 index 85bbaf3364..0000000000 --- a/src/reflect/scala/reflect/internal/RangePositions.scala +++ /dev/null @@ -1,285 +0,0 @@ -/* NSC -- new Scala compiler - * Copyright 2009-2013 Typesafe/Scala Solutions and LAMP/EPFL - * @author Martin Odersky - */ - -package scala.reflect -package internal - -import util._ -import scala.collection.mutable.ListBuffer - -/** Handling range positions - * atPos, the main method in this trait, will add positions to a tree, - * and will ensure the following properties: - * - * 1. All nodes between the root of the tree and nodes that already have positions - * will be assigned positions. - * 2. No node which already has a position will be assigned a different range; however - * a RangePosition might become a TransparentPosition. - * 3. The position of each assigned node includes the positions of each of its children. - * 4. The positions of all solid descendants of children of an assigned node - * are mutually non-overlapping. - * - * Here, the solid descendant of a node are: - * - * If the node has a TransparentPosition, the solid descendants of all its children - * Otherwise, the singleton consisting of the node itself. - */ -trait RangePositions extends Trees with Positions { - self: SymbolTable => - - def inform(msg: String): Unit - - case class Range(pos: Position, tree: Tree) { - def isFree = tree == EmptyTree - } - - override def rangePos(source: SourceFile, start: Int, point: Int, end: Int): RangePosition = - new RangePosition(source, start, point, end) - - /** A position that wraps a set of trees. - * The point of the wrapping position is the point of the default position. - * If some of the trees are ranges, returns a range position enclosing all ranges - * Otherwise returns default position that is either focused or not. - */ - override def wrappingPos(default: Position, trees: List[Tree], focus: Boolean): Position = { - val ranged = trees filter (_.pos.isRange) - if (ranged.isEmpty) if (focus) default.focus else default - else new RangePosition(default.source, (ranged map (_.pos.start)).min, default.point, (ranged map (_.pos.end)).max) - } - - /** A position that wraps a non-empty set of trees. - * The point of the wrapping position is the point of the first trees' position. - * If some of the trees are ranges, returns a range position enclosing all ranges - * Otherwise returns first tree's position. - */ - override def wrappingPos(trees: List[Tree]): Position = { - val headpos = trees.head.pos - if (headpos.isDefined) wrappingPos(headpos, trees) else headpos - } - - // -------------- ensuring no overlaps ------------------------------- - - /** Ensure that given tree has no positions that overlap with - * any of the positions of `others`. This is done by - * shortening the range, assigning TransparentPositions - * to some of the nodes in `tree` or focusing on the position. - */ - override def ensureNonOverlapping(tree: Tree, others: List[Tree], focus: Boolean) { - def isOverlapping(pos: Position) = - pos.isRange && (others exists (pos overlaps _.pos)) - if (isOverlapping(tree.pos)) { - val children = tree.children - children foreach (ensureNonOverlapping(_, others, focus)) - if (tree.pos.isOpaqueRange) { - val wpos = wrappingPos(tree.pos, children, focus) - tree setPos (if (isOverlapping(wpos)) tree.pos.makeTransparent else wpos) - } - } - } - - def solidDescendants(tree: Tree): List[Tree] = - if (tree.pos.isTransparent) tree.children flatMap solidDescendants - else List(tree) - - /** A free range from `lo` to `hi` */ - private def free(lo: Int, hi: Int): Range = - Range(new RangePosition(null, lo, lo, hi), EmptyTree) - - /** The maximal free range */ - private lazy val maxFree: Range = free(0, Int.MaxValue) - - /** A singleton list of a non-empty range from `lo` to `hi`, or else the empty List */ - private def maybeFree(lo: Int, hi: Int) = - if (lo < hi) List(free(lo, hi)) - else List() - - /** Insert `pos` into ranges `rs` if possible; - * otherwise add conflicting trees to `conflicting`. - */ - private def insert(rs: List[Range], t: Tree, conflicting: ListBuffer[Tree]): List[Range] = rs match { - case List() => - assert(conflicting.nonEmpty) - rs - case r :: rs1 => - assert(!t.pos.isTransparent) - if (r.isFree && (r.pos includes t.pos)) { -// println("subdividing "+r+"/"+t.pos) - maybeFree(t.pos.end, r.pos.end) ::: List(Range(t.pos, t)) ::: maybeFree(r.pos.start, t.pos.start) ::: rs1 - } else { - if (!r.isFree && (r.pos overlaps t.pos)) conflicting += r.tree - r :: insert(rs1, t, conflicting) - } - } - - /** Replace elem `t` of `ts` by `replacement` list. */ - private def replace(ts: List[Tree], t: Tree, replacement: List[Tree]): List[Tree] = - if (ts.head == t) replacement ::: ts.tail - else ts.head :: replace(ts.tail, t, replacement) - - /** Does given list of trees have mutually non-overlapping positions? - * pre: None of the trees is transparent - */ - def findOverlapping(cts: List[Tree]): List[(Tree, Tree)] = { - var ranges = List(maxFree) - for (ct <- cts) { - if (ct.pos.isOpaqueRange) { - val conflicting = new ListBuffer[Tree] - ranges = insert(ranges, ct, conflicting) - if (conflicting.nonEmpty) return conflicting.toList map (t => (t, ct)) - } - } - List() - } - - // -------------- setting positions ------------------------------- - - /** Set position of all children of a node - * @param pos A target position. - * Uses the point of the position as the point of all positions it assigns. - * Uses the start of this position as an Offset position for unpositioed trees - * without children. - * @param trees The children to position. All children must be positionable. - */ - private def setChildrenPos(pos: Position, trees: List[Tree]): Unit = try { - for (tree <- trees) { - if (!tree.isEmpty && tree.canHaveAttrs && tree.pos == NoPosition) { - val children = tree.children - if (children.isEmpty) { - tree setPos pos.focus - } else { - setChildrenPos(pos, children) - tree setPos wrappingPos(pos, children) - } - } - } - } catch { - case ex: Exception => - println("error while set children pos "+pos+" of "+trees) - throw ex - } - - /** Position a tree. - * This means: Set position of a node and position all its unpositioned children. - */ - override def atPos[T <: Tree](pos: Position)(tree: T): T = { - if (pos.isOpaqueRange) { - if (!tree.isEmpty && tree.canHaveAttrs && tree.pos == NoPosition) { - tree.setPos(pos) - val children = tree.children - if (children.nonEmpty) { - if (children.tail.isEmpty) atPos(pos)(children.head) - else setChildrenPos(pos, children) - } - } - tree - } else { - super.atPos(pos)(tree) - } - } - - // ---------------- Validating positions ---------------------------------- - - override def validatePositions(tree: Tree) { - def reportTree(prefix : String, tree : Tree) { - val source = if (tree.pos.isDefined) tree.pos.source else "" - inform("== "+prefix+" tree ["+tree.id+"] of type "+tree.productPrefix+" at "+tree.pos.show+source) - inform("") - inform(treeStatus(tree)) - inform("") - } - - def positionError(msg: String)(body : => Unit) { - inform("======= Position error\n" + msg) - body - inform("\nWhile validating #" + tree.id) - inform(treeStatus(tree)) - inform("\nChildren:") - tree.children map (t => " " + treeStatus(t, tree)) foreach inform - inform("=======") - throw new ValidateException(msg) - } - - def validate(tree: Tree, encltree: Tree): Unit = { - - if (!tree.isEmpty && tree.canHaveAttrs) { - if (settings.Yposdebug.value && (settings.verbose.value || settings.Yrangepos.value)) - println("[%10s] %s".format("validate", treeStatus(tree, encltree))) - - if (!tree.pos.isDefined) - positionError("Unpositioned tree #"+tree.id) { - inform("%15s %s".format("unpositioned", treeStatus(tree, encltree))) - inform("%15s %s".format("enclosing", treeStatus(encltree))) - encltree.children foreach (t => inform("%15s %s".format("sibling", treeStatus(t, encltree)))) - } - if (tree.pos.isRange) { - if (!encltree.pos.isRange) - positionError("Synthetic tree ["+encltree.id+"] contains nonsynthetic tree ["+tree.id+"]") { - reportTree("Enclosing", encltree) - reportTree("Enclosed", tree) - } - if (!(encltree.pos includes tree.pos)) - positionError("Enclosing tree ["+encltree.id+"] does not include tree ["+tree.id+"]") { - reportTree("Enclosing", encltree) - reportTree("Enclosed", tree) - } - - findOverlapping(tree.children flatMap solidDescendants) match { - case List() => ; - case xs => { - positionError("Overlapping trees "+xs.map { case (x, y) => (x.id, y.id) }.mkString("", ", ", "")) { - reportTree("Ancestor", tree) - for((x, y) <- xs) { - reportTree("First overlapping", x) - reportTree("Second overlapping", y) - } - } - } - } - } - for (ct <- tree.children flatMap solidDescendants) validate(ct, tree) - } - } - - if (!isPastTyper) - validate(tree, tree) - } - - class ValidateException(msg : String) extends Exception(msg) - - // ---------------- Locating trees ---------------------------------- - - /** A locator for trees with given positions. - * Given a position `pos`, locator.apply returns - * the smallest tree that encloses `pos`. - */ - class Locator(pos: Position) extends Traverser { - var last: Tree = _ - def locateIn(root: Tree): Tree = { - this.last = EmptyTree - traverse(root) - this.last - } - protected def isEligible(t: Tree) = !t.pos.isTransparent - override def traverse(t: Tree) { - t match { - case tt : TypeTree if tt.original != null && (tt.pos includes tt.original.pos) => - traverse(tt.original) - case _ => - if (t.pos includes pos) { - if (isEligible(t)) last = t - super.traverse(t) - } else t match { - case mdef: MemberDef => - traverseTrees(mdef.mods.annotations) - case _ => - } - } - } - } - - class TypedLocator(pos: Position) extends Locator(pos) { - override protected def isEligible(t: Tree) = super.isEligible(t) && t.tpe != null - } -} diff --git a/src/reflect/scala/reflect/runtime/JavaUniverse.scala b/src/reflect/scala/reflect/runtime/JavaUniverse.scala index a12e7d43d4..5467d70cea 100644 --- a/src/reflect/scala/reflect/runtime/JavaUniverse.scala +++ b/src/reflect/scala/reflect/runtime/JavaUniverse.scala @@ -9,8 +9,8 @@ package runtime */ class JavaUniverse extends internal.SymbolTable with ReflectSetup with runtime.SymbolTable { self => + def inform(msg: String): Unit = log(msg) def picklerPhase = internal.SomePhase - def forInteractive = false def forScaladoc = false lazy val settings = new Settings -- cgit v1.2.3 From 1666f6e3f4f3959a489007d830484247c1384a74 Mon Sep 17 00:00:00 2001 From: Eugene Vigdorchik Date: Mon, 4 Mar 2013 19:10:49 +0400 Subject: Since the problem in SI-6758 is fixed, it's ok to move checking for unused imports to Analyzer. This allows the check to be used in the IDE. --- .../scala/tools/nsc/symtab/classfile/Pickler.scala | 14 -------------- .../scala/tools/nsc/typechecker/Contexts.scala | 19 ++++++++----------- .../scala/tools/nsc/typechecker/TypeDiagnostics.scala | 2 ++ test/files/neg/warn-unused-imports.check | 6 +++--- 4 files changed, 13 insertions(+), 28 deletions(-) (limited to 'src') diff --git a/src/compiler/scala/tools/nsc/symtab/classfile/Pickler.scala b/src/compiler/scala/tools/nsc/symtab/classfile/Pickler.scala index c8b7fcee8f..524f98fb84 100644 --- a/src/compiler/scala/tools/nsc/symtab/classfile/Pickler.scala +++ b/src/compiler/scala/tools/nsc/symtab/classfile/Pickler.scala @@ -31,18 +31,6 @@ abstract class Pickler extends SubComponent { def newPhase(prev: Phase): StdPhase = new PicklePhase(prev) class PicklePhase(prev: Phase) extends StdPhase(prev) { - override def run() { - super.run() - // This is run here rather than after typer because I found - // some symbols - usually annotations, possibly others - had not - // yet performed the necessary symbol lookup, leading to - // spurious claims of unusedness. - if (settings.lint.value) { - log("Clearing recorded import selectors.") - analyzer.clearUnusedImports() - } - } - def apply(unit: CompilationUnit) { def pickle(tree: Tree) { def add(sym: Symbol, pickle: Pickle) = { @@ -83,8 +71,6 @@ abstract class Pickler extends SubComponent { } pickle(unit.body) - if (settings.lint.value) - analyzer.warnUnusedImports(unit) } } diff --git a/src/compiler/scala/tools/nsc/typechecker/Contexts.scala b/src/compiler/scala/tools/nsc/typechecker/Contexts.scala index 22a28b7895..0ae5c2be03 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Contexts.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Contexts.scala @@ -51,20 +51,17 @@ trait Contexts { self: Analyzer => private lazy val allImportInfos = mutable.Map[CompilationUnit, List[ImportInfo]]() withDefaultValue Nil - def clearUnusedImports() { - allUsedSelectors.clear() - allImportInfos.clear() - } def warnUnusedImports(unit: CompilationUnit) = { - val imps = allImportInfos(unit).reverse.distinct - - for (imp <- imps) { - val used = allUsedSelectors(imp) - def isMask(s: ImportSelector) = s.name != nme.WILDCARD && s.rename == nme.WILDCARD + for (imps <- allImportInfos.remove(unit)) { + for (imp <- imps.reverse.distinct) { + val used = allUsedSelectors(imp) + def isMask(s: ImportSelector) = s.name != nme.WILDCARD && s.rename == nme.WILDCARD - imp.tree.selectors filterNot (s => isMask(s) || used(s)) foreach { sel => - unit.warning(imp posOf sel, "Unused import") + imp.tree.selectors filterNot (s => isMask(s) || used(s)) foreach { sel => + unit.warning(imp posOf sel, "Unused import") + } } + allUsedSelectors --= imps } } diff --git a/src/compiler/scala/tools/nsc/typechecker/TypeDiagnostics.scala b/src/compiler/scala/tools/nsc/typechecker/TypeDiagnostics.scala index af484a47e2..20fc44bed1 100644 --- a/src/compiler/scala/tools/nsc/typechecker/TypeDiagnostics.scala +++ b/src/compiler/scala/tools/nsc/typechecker/TypeDiagnostics.scala @@ -486,6 +486,8 @@ trait TypeDiagnostics { } def apply(unit: CompilationUnit) = { + warnUnusedImports(unit) + val p = new UnusedPrivates p traverse unit.body val unused = p.unusedTerms diff --git a/test/files/neg/warn-unused-imports.check b/test/files/neg/warn-unused-imports.check index e61ec267d3..1b938f4fd7 100644 --- a/test/files/neg/warn-unused-imports.check +++ b/test/files/neg/warn-unused-imports.check @@ -6,9 +6,6 @@ warn-unused-imports.scala:13: warning: it is not recommended to define classes/o If possible, define class A in package p2 instead. class A ^ -warn-unused-imports.scala:99: warning: local trait Warn is never used - trait Warn { // warn about unused local trait for good measure - ^ warn-unused-imports.scala:57: warning: Unused import import p1.A // warn ^ @@ -39,6 +36,9 @@ warn-unused-imports.scala:98: warning: Unused import warn-unused-imports.scala:118: warning: Unused import import p1.A // warn ^ +warn-unused-imports.scala:99: warning: local trait Warn is never used + trait Warn { // warn about unused local trait for good measure + ^ error: No warnings can be incurred under -Xfatal-warnings. 13 warnings found one error found -- cgit v1.2.3 From 9a82fc05244503c32c1ae66352114573eac379c3 Mon Sep 17 00:00:00 2001 From: Eugene Vigdorchik Date: Mon, 4 Mar 2013 23:16:24 +0400 Subject: Remove unused symbols and imports from the library. --- src/library/scala/Boolean.scala | 2 -- src/library/scala/Double.scala | 2 -- src/library/scala/Unit.scala | 3 --- src/library/scala/collection/LinearSeqOptimized.scala | 1 - src/library/scala/collection/mutable/HashMap.scala | 2 +- src/library/scala/collection/parallel/immutable/ParRange.scala | 1 - src/library/scala/xml/NamespaceBinding.scala | 2 -- 7 files changed, 1 insertion(+), 12 deletions(-) (limited to 'src') diff --git a/src/library/scala/Boolean.scala b/src/library/scala/Boolean.scala index d51afdd931..e43b7d0a82 100644 --- a/src/library/scala/Boolean.scala +++ b/src/library/scala/Boolean.scala @@ -10,8 +10,6 @@ package scala -import scala.language.implicitConversions - /** `Boolean` (equivalent to Java's `boolean` primitive type) is a * subtype of [[scala.AnyVal]]. Instances of `Boolean` are not * represented by an object in the underlying runtime system. diff --git a/src/library/scala/Double.scala b/src/library/scala/Double.scala index 977ebd19d6..85bf9fe5c5 100644 --- a/src/library/scala/Double.scala +++ b/src/library/scala/Double.scala @@ -10,8 +10,6 @@ package scala -import scala.language.implicitConversions - /** `Double`, a 64-bit IEEE-754 floating point number (equivalent to Java's `double` primitive type) is a * subtype of [[scala.AnyVal]]. Instances of `Double` are not * represented by an object in the underlying runtime system. diff --git a/src/library/scala/Unit.scala b/src/library/scala/Unit.scala index 0e59a184d1..01e592ec3c 100644 --- a/src/library/scala/Unit.scala +++ b/src/library/scala/Unit.scala @@ -10,9 +10,6 @@ package scala -import scala.language.implicitConversions - - /** `Unit` is a subtype of [[scala.AnyVal]]. There is only one value of type * `Unit`, `()`, and it is not represented by any object in the underlying * runtime system. A method with return type `Unit` is analogous to a Java diff --git a/src/library/scala/collection/LinearSeqOptimized.scala b/src/library/scala/collection/LinearSeqOptimized.scala index ed5f2406e8..48bd4777f0 100755 --- a/src/library/scala/collection/LinearSeqOptimized.scala +++ b/src/library/scala/collection/LinearSeqOptimized.scala @@ -10,7 +10,6 @@ package scala.collection import mutable.ListBuffer import immutable.List -import scala.util.control.Breaks._ import scala.annotation.tailrec /** A template trait for linear sequences of type `LinearSeq[A]` which optimizes diff --git a/src/library/scala/collection/mutable/HashMap.scala b/src/library/scala/collection/mutable/HashMap.scala index 3cd7f07d83..8755b81ff6 100644 --- a/src/library/scala/collection/mutable/HashMap.scala +++ b/src/library/scala/collection/mutable/HashMap.scala @@ -95,7 +95,7 @@ extends AbstractMap[A, B] def iterator = entriesIterator map {e => (e.key, e.value)} - override def foreach[C](f: ((A, B)) => C): Unit = foreachEntry(e => f(e.key, e.value)) + override def foreach[C](f: ((A, B)) => C): Unit = foreachEntry(e => f((e.key, e.value))) /* Override to avoid tuple allocation in foreach */ override def keySet: scala.collection.Set[A] = new DefaultKeySet { diff --git a/src/library/scala/collection/parallel/immutable/ParRange.scala b/src/library/scala/collection/parallel/immutable/ParRange.scala index 0c9f82ba2a..bf757b6072 100644 --- a/src/library/scala/collection/parallel/immutable/ParRange.scala +++ b/src/library/scala/collection/parallel/immutable/ParRange.scala @@ -12,7 +12,6 @@ import scala.collection.immutable.Range import scala.collection.parallel.Combiner import scala.collection.parallel.SeqSplitter import scala.collection.generic.CanCombineFrom -import scala.collection.parallel.IterableSplitter import scala.collection.Iterator /** Parallel ranges. diff --git a/src/library/scala/xml/NamespaceBinding.scala b/src/library/scala/xml/NamespaceBinding.scala index 3a63d47d4e..32c378f3ef 100644 --- a/src/library/scala/xml/NamespaceBinding.scala +++ b/src/library/scala/xml/NamespaceBinding.scala @@ -38,8 +38,6 @@ case class NamespaceBinding(prefix: String, uri: String, parent: NamespaceBindin override def toString(): String = sbToString(buildString(_, TopScope)) - private def shadowRedefined: NamespaceBinding = shadowRedefined(TopScope) - private def shadowRedefined(stop: NamespaceBinding): NamespaceBinding = { def prefixList(x: NamespaceBinding): List[String] = if ((x == null) || (x eq stop)) Nil -- cgit v1.2.3 From fff0f50773b45690d571852b139bd6da7b6a45ae Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Sun, 24 Feb 2013 14:44:57 +0100 Subject: Name boolean arguments in src/compiler. What would you prefer? adaptToMemberWithArgs(tree, qual, name, mode, false, false) Or: adaptToMemberWithArgs(tree, qual, name, mode, reportAmbiguous = false, saveErrors = false) --- src/compiler/scala/tools/nsc/CompileSocket.scala | 2 +- src/compiler/scala/tools/nsc/CompilerCommand.scala | 8 +-- .../scala/tools/nsc/GenericRunnerCommand.scala | 2 +- src/compiler/scala/tools/nsc/Global.scala | 4 +- .../scala/tools/nsc/OfflineCompilerCommand.scala | 2 +- src/compiler/scala/tools/nsc/ScalaDoc.scala | 2 +- src/compiler/scala/tools/nsc/ast/DocComments.scala | 2 +- src/compiler/scala/tools/nsc/ast/Printers.scala | 2 +- .../scala/tools/nsc/ast/TreeBrowsers.scala | 4 +- src/compiler/scala/tools/nsc/ast/TreeDSL.scala | 6 +- .../scala/tools/nsc/ast/parser/MarkupParsers.scala | 4 +- .../scala/tools/nsc/ast/parser/Parsers.scala | 70 +++++++++++----------- .../scala/tools/nsc/ast/parser/Scanners.scala | 8 +-- .../tools/nsc/ast/parser/SymbolicXMLBuilder.scala | 6 +- .../scala/tools/nsc/ast/parser/TreeBuilder.scala | 14 ++--- .../scala/tools/nsc/backend/icode/GenICode.scala | 12 ++-- .../tools/nsc/backend/icode/ICodeCheckers.scala | 10 ++-- .../backend/icode/analysis/CopyPropagation.scala | 8 +-- .../backend/icode/analysis/DataFlowAnalysis.scala | 2 +- .../scala/tools/nsc/backend/jvm/GenASM.scala | 10 ++-- .../nsc/backend/opt/DeadCodeElimination.scala | 6 +- .../scala/tools/nsc/backend/opt/Inliners.scala | 6 +- .../scala/tools/nsc/dependencies/Changes.scala | 6 +- src/compiler/scala/tools/nsc/doc/DocFactory.scala | 2 +- .../tools/nsc/doc/base/CommentFactoryBase.scala | 28 ++++----- .../scala/tools/nsc/doc/html/HtmlPage.scala | 2 +- .../scala/tools/nsc/doc/html/SyntaxHigh.scala | 26 ++++---- .../scala/tools/nsc/doc/html/page/Template.scala | 32 +++++----- .../html/page/diagram/DotDiagramGenerator.scala | 2 +- .../doc/model/ModelFactoryImplicitSupport.scala | 2 +- .../doc/model/diagram/DiagramDirectiveParser.scala | 4 +- .../scala/tools/nsc/interactive/Global.scala | 10 ++-- .../scala/tools/nsc/interactive/REPL.scala | 2 +- .../tools/nsc/interactive/ScratchPadMaker.scala | 2 +- .../scala/tools/nsc/interactive/tests/Tester.scala | 2 +- .../nsc/interpreter/AbstractFileClassLoader.scala | 6 +- .../scala/tools/nsc/interpreter/ILoop.scala | 20 +++---- .../scala/tools/nsc/interpreter/IMain.scala | 4 +- .../scala/tools/nsc/interpreter/LoopCommands.scala | 4 +- .../scala/tools/nsc/interpreter/TypeStrings.scala | 10 ++-- src/compiler/scala/tools/nsc/io/Jar.scala | 2 +- src/compiler/scala/tools/nsc/io/SourceReader.scala | 2 +- .../scala/tools/nsc/javac/JavaParsers.scala | 12 ++-- .../scala/tools/nsc/javac/JavaScanners.scala | 4 +- .../scala/tools/nsc/reporters/Reporter.scala | 8 +-- .../scala/tools/nsc/settings/MutableSettings.scala | 14 ++--- .../nsc/symtab/classfile/ClassfileParser.scala | 18 +++--- .../tools/nsc/symtab/classfile/ICodeReader.scala | 44 +++++++------- .../scala/tools/nsc/transform/Erasure.scala | 2 +- src/compiler/scala/tools/nsc/transform/Mixin.scala | 8 +-- .../scala/tools/nsc/transform/TailCalls.scala | 2 +- .../tools/nsc/transform/TypingTransformers.scala | 2 +- .../scala/tools/nsc/transform/UnCurry.scala | 14 ++--- .../scala/tools/nsc/transform/patmat/Logic.scala | 4 +- .../tools/nsc/transform/patmat/MatchAnalysis.scala | 2 +- .../tools/nsc/transform/patmat/MatchCodeGen.scala | 2 +- .../scala/tools/nsc/typechecker/Contexts.scala | 6 +- .../scala/tools/nsc/typechecker/EtaExpansion.scala | 2 +- .../scala/tools/nsc/typechecker/Implicits.scala | 14 ++--- .../scala/tools/nsc/typechecker/Infer.scala | 12 ++-- .../scala/tools/nsc/typechecker/Namers.scala | 2 +- .../tools/nsc/typechecker/SuperAccessors.scala | 10 ++-- .../scala/tools/nsc/typechecker/Tags.scala | 10 ++-- .../tools/nsc/typechecker/TypeDiagnostics.scala | 6 +- .../scala/tools/nsc/typechecker/Typers.scala | 24 ++++---- src/compiler/scala/tools/nsc/util/ClassPath.scala | 10 ++-- .../scala/tools/nsc/util/ScalaClassLoader.scala | 4 +- src/compiler/scala/tools/reflect/package.scala | 2 +- src/compiler/scala/tools/util/Javap.scala | 6 +- src/compiler/scala/tools/util/PathResolver.scala | 2 +- 70 files changed, 302 insertions(+), 302 deletions(-) (limited to 'src') diff --git a/src/compiler/scala/tools/nsc/CompileSocket.scala b/src/compiler/scala/tools/nsc/CompileSocket.scala index b9f62f49b3..8087a31b45 100644 --- a/src/compiler/scala/tools/nsc/CompileSocket.scala +++ b/src/compiler/scala/tools/nsc/CompileSocket.scala @@ -186,7 +186,7 @@ class CompileSocket extends CompileOutputCommon { catch { case _: NumberFormatException => None } def getSocket(serverAdr: String): Socket = ( - for ((name, portStr) <- splitWhere(serverAdr, _ == ':', true) ; port <- parseInt(portStr)) yield + for ((name, portStr) <- splitWhere(serverAdr, _ == ':', doDropIndex = true) ; port <- parseInt(portStr)) yield getSocket(name, port) ) getOrElse fatal("Malformed server address: %s; exiting" format serverAdr) diff --git a/src/compiler/scala/tools/nsc/CompilerCommand.scala b/src/compiler/scala/tools/nsc/CompilerCommand.scala index 0462e69f74..f1f5130fb8 100644 --- a/src/compiler/scala/tools/nsc/CompilerCommand.scala +++ b/src/compiler/scala/tools/nsc/CompilerCommand.scala @@ -76,9 +76,9 @@ class CompilerCommand(arguments: List[String], val settings: Settings) { } /** Messages explaining usage and options */ - def usageMsg = createUsageMsg("where possible standard", false, _.isStandard) - def xusageMsg = createUsageMsg("Possible advanced", true, _.isAdvanced) - def yusageMsg = createUsageMsg("Possible private", true, _.isPrivate) + def usageMsg = createUsageMsg("where possible standard", shouldExplain = false, _.isStandard) + def xusageMsg = createUsageMsg("Possible advanced", shouldExplain = true, _.isAdvanced) + def yusageMsg = createUsageMsg("Possible private", shouldExplain = true, _.isPrivate) // If any of these settings is set, the compiler shouldn't start; // an informative message of some sort should be printed instead. @@ -122,6 +122,6 @@ class CompilerCommand(arguments: List[String], val settings: Settings) { case x => List(x) } - settings.processArguments(expandedArguments, true) + settings.processArguments(expandedArguments, processAll = true) } } diff --git a/src/compiler/scala/tools/nsc/GenericRunnerCommand.scala b/src/compiler/scala/tools/nsc/GenericRunnerCommand.scala index c8fd5985c6..e710222285 100644 --- a/src/compiler/scala/tools/nsc/GenericRunnerCommand.scala +++ b/src/compiler/scala/tools/nsc/GenericRunnerCommand.scala @@ -26,7 +26,7 @@ extends CompilerCommand(args, settings) { // change CompilerCommand behavior override def shouldProcessArguments: Boolean = false - private lazy val (_ok, targetAndArguments) = settings.processArguments(args, false) + private lazy val (_ok, targetAndArguments) = settings.processArguments(args, processAll = false) override def ok = _ok private def guessHowToRun(target: String): GenericRunnerCommand.HowToRun = { if (!ok) Error diff --git a/src/compiler/scala/tools/nsc/Global.scala b/src/compiler/scala/tools/nsc/Global.scala index 771d3bb8bb..51fa8f0ab9 100644 --- a/src/compiler/scala/tools/nsc/Global.scala +++ b/src/compiler/scala/tools/nsc/Global.scala @@ -1421,10 +1421,10 @@ class Global(var currentSettings: Settings, var reporter: Reporter) } } if (settings.Xshowcls.isSetByUser) - showDef(splitClassAndPhase(settings.Xshowcls.value, false), false, globalPhase) + showDef(splitClassAndPhase(settings.Xshowcls.value, term = false), declsOnly = false, globalPhase) if (settings.Xshowobj.isSetByUser) - showDef(splitClassAndPhase(settings.Xshowobj.value, true), false, globalPhase) + showDef(splitClassAndPhase(settings.Xshowobj.value, term = true), declsOnly = false, globalPhase) } // Similarly, this will only be created under -Yshow-syms. diff --git a/src/compiler/scala/tools/nsc/OfflineCompilerCommand.scala b/src/compiler/scala/tools/nsc/OfflineCompilerCommand.scala index 8c54c4a1b0..2ce2fb3eaa 100644 --- a/src/compiler/scala/tools/nsc/OfflineCompilerCommand.scala +++ b/src/compiler/scala/tools/nsc/OfflineCompilerCommand.scala @@ -39,7 +39,7 @@ class OfflineCompilerCommand(arguments: List[String], settings: FscSettings) ext override def cmdName = "fsc" override def usageMsg = ( - createUsageMsg("where possible fsc", false, x => x.isStandard && settings.isFscSpecific(x.name)) + + createUsageMsg("where possible fsc", shouldExplain = false, x => x.isStandard && settings.isFscSpecific(x.name)) + "\n\nStandard scalac options also available:" + createUsageMsg(x => x.isStandard && !settings.isFscSpecific(x.name)) ) diff --git a/src/compiler/scala/tools/nsc/ScalaDoc.scala b/src/compiler/scala/tools/nsc/ScalaDoc.scala index 14b76b53b3..52a0c20a11 100644 --- a/src/compiler/scala/tools/nsc/ScalaDoc.scala +++ b/src/compiler/scala/tools/nsc/ScalaDoc.scala @@ -60,7 +60,7 @@ object ScalaDoc extends ScalaDoc { class Command(arguments: List[String], settings: doc.Settings) extends CompilerCommand(arguments, settings) { override def cmdName = "scaladoc" override def usageMsg = ( - createUsageMsg("where possible scaladoc", false, x => x.isStandard && settings.isScaladocSpecific(x.name)) + + createUsageMsg("where possible scaladoc", shouldExplain = false, x => x.isStandard && settings.isScaladocSpecific(x.name)) + "\n\nStandard scalac options also available:" + createUsageMsg(x => x.isStandard && !settings.isScaladocSpecific(x.name)) ) diff --git a/src/compiler/scala/tools/nsc/ast/DocComments.scala b/src/compiler/scala/tools/nsc/ast/DocComments.scala index f86f45fb43..eac43c146a 100755 --- a/src/compiler/scala/tools/nsc/ast/DocComments.scala +++ b/src/compiler/scala/tools/nsc/ast/DocComments.scala @@ -265,7 +265,7 @@ trait DocComments { self: Global => cleanupSectionText(parent.substring(sectionTextBounds._1, sectionTextBounds._2)) case None => reporter.info(sym.pos, "The \"" + getSectionHeader + "\" annotation of the " + sym + - " comment contains @inheritdoc, but the corresponding section in the parent is not defined.", true) + " comment contains @inheritdoc, but the corresponding section in the parent is not defined.", force = true) "" } diff --git a/src/compiler/scala/tools/nsc/ast/Printers.scala b/src/compiler/scala/tools/nsc/ast/Printers.scala index bff036e782..7fefb2ce0c 100644 --- a/src/compiler/scala/tools/nsc/ast/Printers.scala +++ b/src/compiler/scala/tools/nsc/ast/Printers.scala @@ -128,7 +128,7 @@ trait Printers extends scala.reflect.internal.Printers { this: Global => case Select(qualifier, name) => printTree(qualifier) print(".") - print(quotedName(name, true)) + print(quotedName(name, decode = true)) // target.toString() ==> target.toString case Apply(fn, Nil) => printTree(fn) diff --git a/src/compiler/scala/tools/nsc/ast/TreeBrowsers.scala b/src/compiler/scala/tools/nsc/ast/TreeBrowsers.scala index b73016837d..0077ed0c84 100644 --- a/src/compiler/scala/tools/nsc/ast/TreeBrowsers.scala +++ b/src/compiler/scala/tools/nsc/ast/TreeBrowsers.scala @@ -169,8 +169,8 @@ abstract class TreeBrowsers { _setExpansionState(root, new TreePath(root.getModel.getRoot)) } - def expandAll(subtree: JTree) = setExpansionState(subtree, true) - def collapseAll(subtree: JTree) = setExpansionState(subtree, false) + def expandAll(subtree: JTree) = setExpansionState(subtree, expand = true) + def collapseAll(subtree: JTree) = setExpansionState(subtree, expand = false) /** Create a frame that displays the AST. diff --git a/src/compiler/scala/tools/nsc/ast/TreeDSL.scala b/src/compiler/scala/tools/nsc/ast/TreeDSL.scala index 1c6bba19b3..7460d1ab31 100644 --- a/src/compiler/scala/tools/nsc/ast/TreeDSL.scala +++ b/src/compiler/scala/tools/nsc/ast/TreeDSL.scala @@ -116,8 +116,8 @@ trait TreeDSL { * See ticket #2168 for one illustration of AS vs. AS_ANY. */ def AS(tpe: Type) = gen.mkAsInstanceOf(target, tpe, any = true, wrapInApply = false) - def IS(tpe: Type) = gen.mkIsInstanceOf(target, tpe, true) - def IS_OBJ(tpe: Type) = gen.mkIsInstanceOf(target, tpe, false) + def IS(tpe: Type) = gen.mkIsInstanceOf(target, tpe, any = true) + def IS_OBJ(tpe: Type) = gen.mkIsInstanceOf(target, tpe, any = false) def TOSTRING() = fn(target, nme.toString_) def GETCLASS() = fn(target, Object_getClass) @@ -251,7 +251,7 @@ trait TreeDSL { def TRY(tree: Tree) = new TryStart(tree, Nil, EmptyTree) def BLOCK(xs: Tree*) = Block(xs.init.toList, xs.last) def NOT(tree: Tree) = Select(tree, Boolean_not) - def SOME(xs: Tree*) = Apply(SomeClass.companionSymbol, makeTupleTerm(xs.toList, true)) + def SOME(xs: Tree*) = Apply(SomeClass.companionSymbol, makeTupleTerm(xs.toList, flattenUnary = true)) /** Typed trees from symbols. */ def THIS(sym: Symbol) = gen.mkAttributedThis(sym) diff --git a/src/compiler/scala/tools/nsc/ast/parser/MarkupParsers.scala b/src/compiler/scala/tools/nsc/ast/parser/MarkupParsers.scala index e8cef0d9b1..832a9bf63e 100755 --- a/src/compiler/scala/tools/nsc/ast/parser/MarkupParsers.scala +++ b/src/compiler/scala/tools/nsc/ast/parser/MarkupParsers.scala @@ -256,7 +256,7 @@ trait MarkupParsers { val (qname, attrMap) = xTag(()) if (ch == '/') { // empty element xToken("/>") - handle.element(r2p(start, start, curOffset), qname, attrMap, true, new ListBuffer[Tree]) + handle.element(r2p(start, start, curOffset), qname, attrMap, empty = true, new ListBuffer[Tree]) } else { // handle content xToken('>') @@ -270,7 +270,7 @@ trait MarkupParsers { val pos = r2p(start, start, curOffset) qname match { case "xml:group" => handle.group(pos, ts) - case _ => handle.element(pos, qname, attrMap, false, ts) + case _ => handle.element(pos, qname, attrMap, empty = false, ts) } } } diff --git a/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala b/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala index 17c9d7814d..08a6adfded 100644 --- a/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala +++ b/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala @@ -424,13 +424,13 @@ self => placeholderParams match { case vd :: _ => - syntaxError(vd.pos, "unbound placeholder parameter", false) + syntaxError(vd.pos, "unbound placeholder parameter", skipIt = false) placeholderParams = List() case _ => } placeholderTypes match { case td :: _ => - syntaxError(td.pos, "unbound wildcard type", false) + syntaxError(td.pos, "unbound wildcard type", skipIt = false) placeholderTypes = List() case _ => } @@ -537,7 +537,7 @@ self => def accept(token: Int): Int = { val offset = in.offset if (in.token != token) { - syntaxErrorOrIncomplete(expectedMsg(token), false) + syntaxErrorOrIncomplete(expectedMsg(token), skipIt = false) if ((token == RPAREN || token == RBRACE || token == RBRACKET)) if (in.parenBalance(token) + assumedClosingParens(token) < 0) assumedClosingParens(token) += 1 @@ -570,9 +570,9 @@ self => /** Check that type parameter is not by name or repeated. */ def checkNotByNameOrVarargs(tpt: Tree) = { if (treeInfo isByNameParamType tpt) - syntaxError(tpt.pos, "no by-name parameter type allowed here", false) + syntaxError(tpt.pos, "no by-name parameter type allowed here", skipIt = false) else if (treeInfo isRepeatedParamType tpt) - syntaxError(tpt.pos, "no * parameter type allowed here", false) + syntaxError(tpt.pos, "no * parameter type allowed here", skipIt = false) } /** Check that tree is a legal clause of a forSome. */ @@ -581,7 +581,7 @@ self => ValDef(_, _, _, EmptyTree) | EmptyTree => ; case _ => - syntaxError(t.pos, "not a legal existential clause", false) + syntaxError(t.pos, "not a legal existential clause", skipIt = false) } /* -------------- TOKEN CLASSES ------------------------------------------- */ @@ -706,7 +706,7 @@ self => removeAsPlaceholder(name) makeParam(name.toTermName, tpe) case _ => - syntaxError(tree.pos, "not a legal formal parameter", false) + syntaxError(tree.pos, "not a legal formal parameter", skipIt = false) makeParam(nme.ERROR, errorTypeTree setPos o2p(tree.pos.endOrPoint)) } } @@ -714,7 +714,7 @@ self => /** Convert (qual)ident to type identifier. */ def convertToTypeId(tree: Tree): Tree = atPos(tree.pos) { convertToTypeName(tree) getOrElse { - syntaxError(tree.pos, "identifier expected", false) + syntaxError(tree.pos, "identifier expected", skipIt = false) errorTypeTree } } @@ -766,7 +766,7 @@ self => def checkAssoc(offset: Int, op: Name, leftAssoc: Boolean) = if (treeInfo.isLeftAssoc(op) != leftAssoc) syntaxError( - offset, "left- and right-associative operators with same precedence may not be mixed", false) + offset, "left- and right-associative operators with same precedence may not be mixed", skipIt = false) def reduceStack(isExpr: Boolean, base: List[OpInfo], top0: Tree, prec: Int, leftAssoc: Boolean): Tree = { var top = top0 @@ -1119,7 +1119,7 @@ self => case FALSE => false case NULL => null case _ => - syntaxErrorOrIncomplete("illegal literal", true) + syntaxErrorOrIncomplete("illegal literal", skipIt = true) null }) } @@ -1140,7 +1140,7 @@ self => else if(in.token == LBRACE) expr() else if(in.token == THIS) { in.nextToken(); atPos(in.offset)(This(tpnme.EMPTY)) } else { - syntaxErrorOrIncomplete("error in interpolated string: identifier or block expected", true) + syntaxErrorOrIncomplete("error in interpolated string: identifier or block expected", skipIt = true) EmptyTree } } @@ -1376,7 +1376,7 @@ self => Typed(t, atPos(uscorePos) { Ident(tpnme.WILDCARD_STAR) }) } } else { - syntaxErrorOrIncomplete("`*' expected", true) + syntaxErrorOrIncomplete("`*' expected", skipIt = true) } } else if (in.token == AT) { t = (t /: annotations(skipNewLines = false))(makeAnnotated) @@ -1532,7 +1532,7 @@ self => val cpos = r2p(tstart, tstart, in.lastOffset max tstart) makeNew(parents, self, stats, npos, cpos) case _ => - syntaxErrorOrIncomplete("illegal start of simple expression", true) + syntaxErrorOrIncomplete("illegal start of simple expression", skipIt = true) errorTermTree } simpleExprRest(t, canApply = canApply) @@ -1873,7 +1873,7 @@ self => def simplePattern(): Tree = { // simple diagnostics for this entry point def badStart(): Tree = { - syntaxErrorOrIncomplete("illegal start of simple pattern", true) + syntaxErrorOrIncomplete("illegal start of simple pattern", skipIt = true) errorPatternTree } simplePattern(badStart) @@ -1966,7 +1966,7 @@ self => mods private def addMod(mods: Modifiers, mod: Long, pos: Position): Modifiers = { - if (mods hasFlag mod) syntaxError(in.offset, "repeated modifier", false) + if (mods hasFlag mod) syntaxError(in.offset, "repeated modifier", skipIt = false) in.nextToken() (mods | mod) withPosition (mod, pos) } @@ -1983,7 +1983,7 @@ self => if (in.token == LBRACKET) { in.nextToken() if (mods.hasAccessBoundary) - syntaxError("duplicate private/protected qualifier", false) + syntaxError("duplicate private/protected qualifier", skipIt = false) result = if (in.token == THIS) { in.nextToken(); mods | Flags.LOCAL } else Modifiers(mods.flags, identForType()) accept(RBRACKET) @@ -2090,7 +2090,7 @@ self => var mods = Modifiers(Flags.PARAM) if (owner.isTypeName) { mods = modifiers() | Flags.PARAMACCESSOR - if (mods.isLazy) syntaxError("lazy modifier not allowed here. Use call-by-name parameters instead", false) + if (mods.isLazy) syntaxError("lazy modifier not allowed here. Use call-by-name parameters instead", skipIt = false) in.token match { case v @ (VAL | VAR) => mods = mods withPosition (in.token, tokenRange(in)) @@ -2115,11 +2115,11 @@ self => syntaxError( in.offset, (if (mods.isMutable) "`var'" else "`val'") + - " parameters may not be call-by-name", false) + " parameters may not be call-by-name", skipIt = false) else if (implicitmod != 0) syntaxError( in.offset, - "implicit parameters may not be call-by-name", false) + "implicit parameters may not be call-by-name", skipIt = false) else bynamemod = Flags.BYNAMEPARAM } paramType() @@ -2160,9 +2160,9 @@ self => val result = vds.toList if (owner == nme.CONSTRUCTOR && (result.isEmpty || (result.head take 1 exists (_.mods.isImplicit)))) { in.token match { - case LBRACKET => syntaxError(in.offset, "no type parameters allowed here", false) + case LBRACKET => syntaxError(in.offset, "no type parameters allowed here", skipIt = false) case EOF => incompleteInputError("auxiliary constructor needs non-implicit parameter list") - case _ => syntaxError(start, "auxiliary constructor needs non-implicit parameter list", false) + case _ => syntaxError(start, "auxiliary constructor needs non-implicit parameter list", skipIt = false) } } addEvidenceParams(owner, result, contextBounds) @@ -2373,7 +2373,7 @@ self => */ def defOrDcl(pos: Int, mods: Modifiers): List[Tree] = { if (mods.isLazy && in.token != VAL) - syntaxError("lazy not allowed here. Only vals can be lazy", false) + syntaxError("lazy not allowed here. Only vals can be lazy", skipIt = false) in.token match { case VAL => patDefOrDcl(pos, mods withPosition(VAL, tokenRange(in))) @@ -2431,8 +2431,8 @@ self => if (newmods.isDeferred) { trees match { case List(ValDef(_, _, _, EmptyTree)) => - if (mods.isLazy) syntaxError(p.pos, "lazy values may not be abstract", false) - case _ => syntaxError(p.pos, "pattern definition may not be abstract", false) + if (mods.isLazy) syntaxError(p.pos, "lazy values may not be abstract", skipIt = false) + case _ => syntaxError(p.pos, "pattern definition may not be abstract", skipIt = false) } } trees @@ -2598,7 +2598,7 @@ self => case SUPERTYPE | SUBTYPE | SEMI | NEWLINE | NEWLINES | COMMA | RBRACE => TypeDef(mods | Flags.DEFERRED, name, tparams, typeBounds()) case _ => - syntaxErrorOrIncomplete("`=', `>:', or `<:' expected", true) + syntaxErrorOrIncomplete("`=', `>:', or `<:' expected", skipIt = true) EmptyTree } } @@ -2619,7 +2619,7 @@ self => * }}} */ def tmplDef(pos: Int, mods: Modifiers): Tree = { - if (mods.isLazy) syntaxError("classes cannot be lazy", false) + if (mods.isLazy) syntaxError("classes cannot be lazy", skipIt = false) in.token match { case TRAIT => classDef(pos, (mods | Flags.TRAIT | Flags.ABSTRACT) withPosition (Flags.TRAIT, tokenRange(in))) @@ -2632,7 +2632,7 @@ self => case CASEOBJECT => objectDef(pos, (mods | Flags.CASE) withPosition (Flags.CASE, tokenRange(in.prev /*scanner skips on 'case' to 'object', thus take prev*/))) case _ => - syntaxErrorOrIncomplete("expected start of definition", true) + syntaxErrorOrIncomplete("expected start of definition", skipIt = true) EmptyTree } } @@ -2654,7 +2654,7 @@ self => classContextBounds = contextBoundBuf.toList val tstart = (in.offset :: classContextBounds.map(_.pos.startOrPoint)).min if (!classContextBounds.isEmpty && mods.isTrait) { - syntaxError("traits cannot have type parameters with context bounds `: ...' nor view bounds `<% ...'", false) + syntaxError("traits cannot have type parameters with context bounds `: ...' nor view bounds `<% ...'", skipIt = false) classContextBounds = List() } val constrAnnots = constructorAnnotations() @@ -2665,7 +2665,7 @@ self => if (mods.isTrait) { if (settings.YvirtClasses && in.token == SUBTYPE) mods1 |= Flags.DEFERRED } else if (in.token == SUBTYPE) { - syntaxError("classes are not allowed to be virtual", false) + syntaxError("classes are not allowed to be virtual", skipIt = false) } val template = templateOpt(mods1, name, constrMods withAnnotations constrAnnots, vparamss, tstart) if (isInterface(mods1, template.body)) mods1 |= Flags.INTERFACE @@ -2732,7 +2732,7 @@ self => case tdef @ TypeDef(mods, name, tparams, rhs) => List(treeCopy.TypeDef(tdef, mods | Flags.PRESUPER, name, tparams, rhs)) case stat if !stat.isEmpty => - syntaxError(stat.pos, "only type definitions and concrete field definitions allowed in early object initialization section", false) + syntaxError(stat.pos, "only type definitions and concrete field definitions allowed in early object initialization section", skipIt = false) List() case _ => List() } @@ -2811,7 +2811,7 @@ self => templateBody(isPre = false) } else { if (in.token == LPAREN) { - if (parenMeansSyntaxError) syntaxError(s"traits or objects may not have parameters", true) + if (parenMeansSyntaxError) syntaxError(s"traits or objects may not have parameters", skipIt = true) else abort("unexpected opening parenthesis") } (emptyValDef, List()) @@ -2895,7 +2895,7 @@ self => joinComment(List(topLevelTmplDef)) case _ => if (!isStatSep) - syntaxErrorOrIncomplete("expected class or object definition", true) + syntaxErrorOrIncomplete("expected class or object definition", skipIt = true) Nil }) acceptStatSepOpt() @@ -2954,7 +2954,7 @@ self => } else if (isDefIntro || isModifier || in.token == AT) { stats ++= joinComment(nonLocalDefOrDcl) } else if (!isStatSep) { - syntaxErrorOrIncomplete("illegal start of definition", true) + syntaxErrorOrIncomplete("illegal start of definition", skipIt = true) } acceptStatSepOpt() } @@ -2977,7 +2977,7 @@ self => syntaxErrorOrIncomplete( "illegal start of declaration"+ (if (inFunReturnType) " (possible cause: missing `=' in front of current method body)" - else ""), true) + else ""), skipIt = true) } if (in.token != RBRACE) acceptStatSep() } @@ -3047,7 +3047,7 @@ self => } else { val addendum = if (isModifier) " (no modifiers allowed here)" else "" - syntaxErrorOrIncomplete("illegal start of statement" + addendum, true) + syntaxErrorOrIncomplete("illegal start of statement" + addendum, skipIt = true) } } stats.toList diff --git a/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala b/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala index 181bba6896..19cf1b5093 100644 --- a/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala +++ b/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala @@ -915,7 +915,7 @@ trait Scanners extends ScannersCommon { } } - def intVal: Long = intVal(false) + def intVal: Long = intVal(negated = false) /** Convert current strVal, base to double value */ @@ -947,7 +947,7 @@ trait Scanners extends ScannersCommon { } } - def floatVal: Double = floatVal(false) + def floatVal: Double = floatVal(negated = false) def checkNoLetter() { if (isIdentifierPart(ch) && ch >= ' ') @@ -1440,7 +1440,7 @@ trait Scanners extends ScannersCommon { while (lin < lineStart.length && column(lineStart(lin)) > lindent) lin += 1 if (lin < lineStart.length) { - val patches1 = insertPatch(patches, BracePatch(lineStart(lin), true)) + val patches1 = insertPatch(patches, BracePatch(lineStart(lin), inserted = true)) //println("patch for "+bp+"/"+imbalanceMeasure+"/"+new ParensAnalyzer(unit, patches1).imbalanceMeasure) /*if (improves(patches1))*/ patches1 @@ -1461,7 +1461,7 @@ trait Scanners extends ScannersCommon { else { val patches1 = delete(nested) if (patches1 ne patches) patches1 - else insertPatch(patches, BracePatch(roff, false)) + else insertPatch(patches, BracePatch(roff, inserted = false)) } } delete(bracePairs) diff --git a/src/compiler/scala/tools/nsc/ast/parser/SymbolicXMLBuilder.scala b/src/compiler/scala/tools/nsc/ast/parser/SymbolicXMLBuilder.scala index 4329ccefc7..cdcfd0b834 100755 --- a/src/compiler/scala/tools/nsc/ast/parser/SymbolicXMLBuilder.scala +++ b/src/compiler/scala/tools/nsc/ast/parser/SymbolicXMLBuilder.scala @@ -132,7 +132,7 @@ abstract class SymbolicXMLBuilder(p: Parsers#Parser, preserveWS: Boolean) { case (Some(pre), rest) => (const(pre), const(rest)) case _ => (wild, const(n)) } - mkXML(pos, true, prepat, labpat, null, null, false, args) + mkXML(pos, isPattern = true, prepat, labpat, null, null, empty = false, args) } protected def convertToTextPat(t: Tree): Tree = t match { @@ -168,7 +168,7 @@ abstract class SymbolicXMLBuilder(p: Parsers#Parser, preserveWS: Boolean) { } /** Returns (Some(prefix) | None, rest) based on position of ':' */ - def splitPrefix(name: String): (Option[String], String) = splitWhere(name, _ == ':', true) match { + def splitPrefix(name: String): (Option[String], String) = splitWhere(name, _ == ':', doDropIndex = true) match { case Some((pre, rest)) => (Some(pre), rest) case _ => (None, name) } @@ -246,7 +246,7 @@ abstract class SymbolicXMLBuilder(p: Parsers#Parser, preserveWS: Boolean) { val body = mkXML( pos.makeTransparent, - false, + isPattern = false, const(pre), const(newlabel), makeSymbolicAttrs, diff --git a/src/compiler/scala/tools/nsc/ast/parser/TreeBuilder.scala b/src/compiler/scala/tools/nsc/ast/parser/TreeBuilder.scala index f361daa574..d70b1f4d9c 100644 --- a/src/compiler/scala/tools/nsc/ast/parser/TreeBuilder.scala +++ b/src/compiler/scala/tools/nsc/ast/parser/TreeBuilder.scala @@ -133,7 +133,7 @@ abstract class TreeBuilder { def makeTupleTerm(trees: List[Tree], flattenUnary: Boolean): Tree = trees match { case Nil => Literal(Constant()) case List(tree) if flattenUnary => tree - case _ => makeTuple(trees, false) + case _ => makeTuple(trees, isType = false) } def makeTupleType(trees: List[Tree], flattenUnary: Boolean): Tree = trees match { @@ -143,7 +143,7 @@ abstract class TreeBuilder { } def stripParens(t: Tree) = t match { - case Parens(ts) => atPos(t.pos) { makeTupleTerm(ts, true) } + case Parens(ts) => atPos(t.pos) { makeTupleTerm(ts, flattenUnary = true) } case _ => t } @@ -273,7 +273,7 @@ abstract class TreeBuilder { CaseDef(condition, EmptyTree, Literal(Constant(true))), CaseDef(Ident(nme.WILDCARD), EmptyTree, Literal(Constant(false))) ) - val matchTree = makeVisitor(cases, false, scrutineeName) + val matchTree = makeVisitor(cases, checkExhaustive = false, scrutineeName) atPos(tree.pos)(Apply(Select(tree, nme.withFilter), matchTree :: Nil)) } @@ -366,7 +366,7 @@ abstract class TreeBuilder { body) setPos splitpos case None => atPos(splitpos) { - makeVisitor(List(CaseDef(pat, EmptyTree, body)), false) + makeVisitor(List(CaseDef(pat, EmptyTree, body)), checkExhaustive = false) } } } @@ -422,9 +422,9 @@ abstract class TreeBuilder { val ids = (defpat1 :: defpats) map makeValue val rhs1 = makeForYield( List(ValFrom(pos, defpat1, rhs)), - Block(pdefs, atPos(wrappingPos(ids)) { makeTupleTerm(ids, true) }) setPos wrappingPos(pdefs)) + Block(pdefs, atPos(wrappingPos(ids)) { makeTupleTerm(ids, flattenUnary = true) }) setPos wrappingPos(pdefs)) val allpats = (pat :: pats) map (_.duplicate) - val vfrom1 = ValFrom(r2p(pos.startOrPoint, pos.point, rhs1.pos.endOrPoint), atPos(wrappingPos(allpats)) { makeTuple(allpats, false) } , rhs1) + val vfrom1 = ValFrom(r2p(pos.startOrPoint, pos.point, rhs1.pos.endOrPoint), atPos(wrappingPos(allpats)) { makeTuple(allpats, isType = false) } , rhs1) makeFor(mapName, flatMapName, vfrom1 :: rest1, body) case _ => EmptyTree //may happen for erroneous input @@ -533,7 +533,7 @@ abstract class TreeBuilder { rhs1, List( atPos(pat1.pos) { - CaseDef(pat1, EmptyTree, makeTupleTerm(vars map (_._1) map Ident.apply, true)) + CaseDef(pat1, EmptyTree, makeTupleTerm(vars map (_._1) map Ident.apply, flattenUnary = true)) } )) } diff --git a/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala b/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala index 65cd912dda..ed458a4bbe 100644 --- a/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala +++ b/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala @@ -688,7 +688,7 @@ abstract class GenICode extends SubComponent { ctx.bb.emit(DUP(generatedType)) val ctx1 = genLoadArguments(args, ctor.info.paramTypes, ctx) - val init = CALL_METHOD(ctor, Static(true)) + val init = CALL_METHOD(ctor, Static(onInstance = true)) nw.init = init ctx1.bb.emit(init, tree.pos) ctx1 @@ -760,9 +760,9 @@ abstract class GenICode extends SubComponent { debuglog("Gen CALL_METHOD with sym: " + sym + " isStaticSymbol: " + sym.isStaticMember) val invokeStyle = if (sym.isStaticMember) - Static(false) + Static(onInstance = false) else if (sym.isPrivate || sym.isClassConstructor) - Static(true) + Static(onInstance = true) else Dynamic @@ -1261,7 +1261,7 @@ abstract class GenICode extends SubComponent { case List(Literal(Constant("")), arg) => debuglog("Rewriting \"\" + x as String.valueOf(x) for: " + arg) val ctx1 = genLoad(arg, ctx, ObjectReference) - ctx1.bb.emit(CALL_METHOD(String_valueOf, Static(false)), arg.pos) + ctx1.bb.emit(CALL_METHOD(String_valueOf, Static(onInstance = false)), arg.pos) ctx1 case concatenations => debuglog("Lifted string concatenations for " + tree + "\n to: " + concatenations) @@ -1286,7 +1286,7 @@ abstract class GenICode extends SubComponent { } val ctx1 = genLoad(tree, ctx, ObjectReference) - ctx1.bb.emit(CALL_METHOD(hashMethod, Static(false))) + ctx1.bb.emit(CALL_METHOD(hashMethod, Static(onInstance = false))) ctx1 } @@ -1477,7 +1477,7 @@ abstract class GenICode extends SubComponent { val ctx1 = genLoad(l, ctx, ObjectReference) val ctx2 = genLoad(r, ctx1, ObjectReference) ctx2.bb.emitOnly( - CALL_METHOD(equalsMethod, if (settings.optimise.value) Dynamic else Static(false)), + CALL_METHOD(equalsMethod, if (settings.optimise.value) Dynamic else Static(onInstance = false)), CZJUMP(thenCtx.bb, elseCtx.bb, NE, BOOL) ) } diff --git a/src/compiler/scala/tools/nsc/backend/icode/ICodeCheckers.scala b/src/compiler/scala/tools/nsc/backend/icode/ICodeCheckers.scala index 49f2d9859d..82fdcbbc04 100644 --- a/src/compiler/scala/tools/nsc/backend/icode/ICodeCheckers.scala +++ b/src/compiler/scala/tools/nsc/backend/icode/ICodeCheckers.scala @@ -119,11 +119,11 @@ abstract class ICodeCheckers { clasz = cls for (f1 <- cls.fields ; f2 <- cls.fields ; if f1 < f2) - if (isConfict(f1, f2, false)) + if (isConfict(f1, f2, canOverload = false)) icodeError("Repetitive field name: " + f1.symbol.fullName) for (m1 <- cls.methods ; m2 <- cls.methods ; if m1 < m2) - if (isConfict(m1, m2, true)) + if (isConfict(m1, m2, canOverload = true)) icodeError("Repetitive method: " + m1.symbol.fullName) clasz.methods foreach check @@ -321,14 +321,14 @@ abstract class ICodeCheckers { def popStackN(num: Int, instrFn: () => String = defaultInstrPrinter) = { List.range(0, num) map { _ => val res = _popStack - printStackString(false, res, instrFn()) + printStackString(isPush = false, res, instrFn()) res } } def pushStackN(xs: Seq[TypeKind], instrFn: () => String) = { xs foreach { x => stack push x - printStackString(true, x, instrFn()) + printStackString(isPush = true, x, instrFn()) } } @@ -594,7 +594,7 @@ abstract class ICodeCheckers { case x if style.hasInstance => x + 1 case x => x } - if (style == Static(true)) + if (style == Static(onInstance = true)) checkBool(method.isPrivate || method.isConstructor, "Static call to non-private method.") checkStack(paramCount) diff --git a/src/compiler/scala/tools/nsc/backend/icode/analysis/CopyPropagation.scala b/src/compiler/scala/tools/nsc/backend/icode/analysis/CopyPropagation.scala index 941d200d13..152a11ab1a 100644 --- a/src/compiler/scala/tools/nsc/backend/icode/analysis/CopyPropagation.scala +++ b/src/compiler/scala/tools/nsc/backend/icode/analysis/CopyPropagation.scala @@ -308,7 +308,7 @@ abstract class CopyPropagation { case CALL_METHOD(method, style) => style match { case Dynamic => - out = simulateCall(in, method, false) + out = simulateCall(in, method, static = false) case Static(onInstance) => if (onInstance) { @@ -326,12 +326,12 @@ abstract class CopyPropagation { // put the Record back on the stack and remove the 'returned' value out.stack = out.stack.drop(1 + method.info.paramTypes.length) } else - out = simulateCall(in, method, false) + out = simulateCall(in, method, static = false) } else - out = simulateCall(in, method, true) + out = simulateCall(in, method, static = true) case SuperCall(_) => - out = simulateCall(in, method, false) + out = simulateCall(in, method, static = false) } case BOX(tpe) => diff --git a/src/compiler/scala/tools/nsc/backend/icode/analysis/DataFlowAnalysis.scala b/src/compiler/scala/tools/nsc/backend/icode/analysis/DataFlowAnalysis.scala index 704439e178..ebc2d33a62 100644 --- a/src/compiler/scala/tools/nsc/backend/icode/analysis/DataFlowAnalysis.scala +++ b/src/compiler/scala/tools/nsc/backend/icode/analysis/DataFlowAnalysis.scala @@ -79,7 +79,7 @@ trait DataFlowAnalysis[L <: SemiLattice] { val point = worklist.head worklist -= point - out(point) = lattice.lub(point.successors map in.apply, false) // TODO check for exception handlers + out(point) = lattice.lub(point.successors map in.apply, exceptional = false) // TODO check for exception handlers val input = f(point, out(point)) if ((lattice.bottom == in(point)) || input != in(point)) { diff --git a/src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala b/src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala index 3830b389ba..703922b20a 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala @@ -1155,8 +1155,8 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM { ) val methodSymbol = definitions.getMember(clasz.symbol.companionModule, androidFieldName) clasz addField new IField(fieldSymbol) - block emit CALL_METHOD(methodSymbol, Static(false)) - block emit STORE_FIELD(fieldSymbol, true) + block emit CALL_METHOD(methodSymbol, Static(onInstance = false)) + block emit STORE_FIELD(fieldSymbol, isStatic = true) } def legacyAddCreatorCode(clinit: asm.MethodVisitor) { @@ -1613,7 +1613,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM { if (isStaticModule(clasz.symbol)) { // call object's private ctor from static ctor lastBlock emit NEW(REFERENCE(m.symbol.enclClass)) - lastBlock emit CALL_METHOD(m.symbol.enclClass.primaryConstructor, Static(true)) + lastBlock emit CALL_METHOD(m.symbol.enclClass.primaryConstructor, Static(onInstance = true)) } if (isParcelableClass) { addCreatorCode(lastBlock) } @@ -1621,11 +1621,11 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM { lastBlock emit RETURN(UNIT) lastBlock.close() - method = m + method = m jmethod = clinitMethod jMethodName = CLASS_CONSTRUCTOR_NAME jmethod.visitCode() - genCode(m, false, true) + genCode(m, emitVars = false, isStatic = true) jmethod.visitMaxs(0, 0) // just to follow protocol, dummy arguments jmethod.visitEnd() diff --git a/src/compiler/scala/tools/nsc/backend/opt/DeadCodeElimination.scala b/src/compiler/scala/tools/nsc/backend/opt/DeadCodeElimination.scala index 3b94e2bd8d..a9c8104e79 100644 --- a/src/compiler/scala/tools/nsc/backend/opt/DeadCodeElimination.scala +++ b/src/compiler/scala/tools/nsc/backend/opt/DeadCodeElimination.scala @@ -114,7 +114,7 @@ abstract class DeadCodeElimination extends SubComponent { for (Pair(i, idx) <- bb.toList.zipWithIndex) { // utility for adding to worklist - def moveToWorkList() = moveToWorkListIf(true) + def moveToWorkList() = moveToWorkListIf(cond = true) // utility for (conditionally) adding to worklist def moveToWorkListIf(cond: Boolean) = @@ -130,7 +130,7 @@ abstract class DeadCodeElimination extends SubComponent { case LOAD_LOCAL(_) => defs = defs + Pair(((bb, idx)), rd.vars) - moveToWorkListIf(false) + moveToWorkListIf(cond = false) case STORE_LOCAL(l) => /* SI-4935 Check whether a module is stack top, if so mark the instruction that loaded it @@ -184,7 +184,7 @@ abstract class DeadCodeElimination extends SubComponent { case LOAD_MODULE(sym) if isLoadNeeded(sym) => moveToWorkList() // SI-4859 Module initialization might side-effect. case _ => () - moveToWorkListIf(false) + moveToWorkListIf(cond = false) } rd = rdef.interpret(bb, idx, rd) } diff --git a/src/compiler/scala/tools/nsc/backend/opt/Inliners.scala b/src/compiler/scala/tools/nsc/backend/opt/Inliners.scala index d183b3a291..1a73764719 100644 --- a/src/compiler/scala/tools/nsc/backend/opt/Inliners.scala +++ b/src/compiler/scala/tools/nsc/backend/opt/Inliners.scala @@ -480,8 +480,8 @@ abstract class Inliners extends SubComponent { * so that the first TFA that is run afterwards is able to gain more information as compared to a cold-start. */ /*val totalPreInlines = */ { // Val name commented out to emphasize it is never used - val firstRound = preInline(true) - if(firstRound == 0) 0 else (firstRound + preInline(false)) + val firstRound = preInline(isFirstRound = true) + if(firstRound == 0) 0 else (firstRound + preInline(isFirstRound = false)) } staleOut.clear() splicedBlocks.clear() @@ -869,7 +869,7 @@ abstract class Inliners extends SubComponent { r case CALL_METHOD(meth, Static(true)) if meth.isClassConstructor => - CALL_METHOD(meth, Static(true)) + CALL_METHOD(meth, Static(onInstance = true)) case _ => i.clone() } diff --git a/src/compiler/scala/tools/nsc/dependencies/Changes.scala b/src/compiler/scala/tools/nsc/dependencies/Changes.scala index 531348b451..c341d33a62 100644 --- a/src/compiler/scala/tools/nsc/dependencies/Changes.scala +++ b/src/compiler/scala/tools/nsc/dependencies/Changes.scala @@ -52,7 +52,7 @@ abstract class Changes { private val changedTypeParams = new mutable.HashSet[String] private def sameParameterSymbolNames(sym1: Symbol, sym2: Symbol): Boolean = - sameSymbol(sym1, sym2, true) || sym2.encodedName.startsWith(sym1.encodedName + nme.NAME_JOIN_STRING) // see #3140 + sameSymbol(sym1, sym2, simple = true) || sym2.encodedName.startsWith(sym1.encodedName + nme.NAME_JOIN_STRING) // see #3140 private def sameSymbol(sym1: Symbol, sym2: Symbol, simple: Boolean = false): Boolean = if (simple) sym1.encodedName == sym2.encodedName else sym1.fullName == sym2.fullName private def sameFlags(sym1: Symbol, sym2: Symbol): Boolean = @@ -121,7 +121,7 @@ abstract class Changes { case (NullaryMethodType(res1), NullaryMethodType(res2)) => sameType(res1, res2) case (ExistentialType(tparams1, res1), ExistentialType(tparams2, res2)) => - sameTypeParams(tparams1, tparams2)(false) && sameType(res1, res2)(false) + sameTypeParams(tparams1, tparams2)(strict = false) && sameType(res1, res2)(strict = false) case (TypeBounds(lo1, hi1), TypeBounds(lo2, hi2)) => sameType(lo1, lo2) && sameType(hi1, hi2) case (BoundedWildcardType(bounds), _) => @@ -174,7 +174,7 @@ abstract class Changes { if ((from.parents zip to.parents) exists { case (t1, t2) => !sameType(t1, t2) }) cs += Changed(toEntity(toSym))(from.parents.zip(to.parents).toString) - if (!sameTypeParams(from.typeParams, to.typeParams)(false)) + if (!sameTypeParams(from.typeParams, to.typeParams)(strict = false)) cs += Changed(toEntity(toSym))(" tparams: " + from.typeParams.zip(to.typeParams)) // new members not yet visited diff --git a/src/compiler/scala/tools/nsc/doc/DocFactory.scala b/src/compiler/scala/tools/nsc/doc/DocFactory.scala index f203a5eeb7..b4d2adaad4 100644 --- a/src/compiler/scala/tools/nsc/doc/DocFactory.scala +++ b/src/compiler/scala/tools/nsc/doc/DocFactory.scala @@ -94,7 +94,7 @@ class DocFactory(val reporter: Reporter, val settings: doc.Settings) { processor val documentError: PartialFunction[Throwable, Unit] = { case NoCompilerRunException => - reporter.info(null, "No documentation generated with unsucessful compiler run", false) + reporter.info(null, "No documentation generated with unsucessful compiler run", force = false) case _: ClassNotFoundException => () } diff --git a/src/compiler/scala/tools/nsc/doc/base/CommentFactoryBase.scala b/src/compiler/scala/tools/nsc/doc/base/CommentFactoryBase.scala index a308292811..2064d86860 100755 --- a/src/compiler/scala/tools/nsc/doc/base/CommentFactoryBase.scala +++ b/src/compiler/scala/tools/nsc/doc/base/CommentFactoryBase.scala @@ -228,11 +228,11 @@ trait CommentFactoryBase { this: MemberLookupBase => case CodeBlockStartRegex(before, marker, after) :: ls if (!inCodeBlock) => if (!before.trim.isEmpty && !after.trim.isEmpty) - parse0(docBody, tags, lastTagKey, before :: marker :: after :: ls, false) + parse0(docBody, tags, lastTagKey, before :: marker :: after :: ls, inCodeBlock = false) else if (!before.trim.isEmpty) - parse0(docBody, tags, lastTagKey, before :: marker :: ls, false) + parse0(docBody, tags, lastTagKey, before :: marker :: ls, inCodeBlock = false) else if (!after.trim.isEmpty) - parse0(docBody, tags, lastTagKey, marker :: after :: ls, true) + parse0(docBody, tags, lastTagKey, marker :: after :: ls, inCodeBlock = true) else lastTagKey match { case Some(key) => val value = @@ -240,18 +240,18 @@ trait CommentFactoryBase { this: MemberLookupBase => case Some(b :: bs) => (b + endOfLine + marker) :: bs case None => oops("lastTagKey set when no tag exists for key") } - parse0(docBody, tags + (key -> value), lastTagKey, ls, true) + parse0(docBody, tags + (key -> value), lastTagKey, ls, inCodeBlock = true) case None => - parse0(docBody append endOfLine append marker, tags, lastTagKey, ls, true) + parse0(docBody append endOfLine append marker, tags, lastTagKey, ls, inCodeBlock = true) } case CodeBlockEndRegex(before, marker, after) :: ls => if (!before.trim.isEmpty && !after.trim.isEmpty) - parse0(docBody, tags, lastTagKey, before :: marker :: after :: ls, true) + parse0(docBody, tags, lastTagKey, before :: marker :: after :: ls, inCodeBlock = true) if (!before.trim.isEmpty) - parse0(docBody, tags, lastTagKey, before :: marker :: ls, true) + parse0(docBody, tags, lastTagKey, before :: marker :: ls, inCodeBlock = true) else if (!after.trim.isEmpty) - parse0(docBody, tags, lastTagKey, marker :: after :: ls, false) + parse0(docBody, tags, lastTagKey, marker :: after :: ls, inCodeBlock = false) else lastTagKey match { case Some(key) => val value = @@ -259,9 +259,9 @@ trait CommentFactoryBase { this: MemberLookupBase => case Some(b :: bs) => (b + endOfLine + marker) :: bs case None => oops("lastTagKey set when no tag exists for key") } - parse0(docBody, tags + (key -> value), lastTagKey, ls, false) + parse0(docBody, tags + (key -> value), lastTagKey, ls, inCodeBlock = false) case None => - parse0(docBody append endOfLine append marker, tags, lastTagKey, ls, false) + parse0(docBody append endOfLine append marker, tags, lastTagKey, ls, inCodeBlock = false) } case SymbolTagRegex(name, sym, body) :: ls if (!inCodeBlock) => @@ -375,7 +375,7 @@ trait CommentFactoryBase { this: MemberLookupBase => } - parse0(new StringBuilder(comment.size), Map.empty, None, clean(comment), false) + parse0(new StringBuilder(comment.size), Map.empty, None, clean(comment), inCodeBlock = false) } @@ -451,7 +451,7 @@ trait CommentFactoryBase { this: MemberLookupBase => else { jumpWhitespace() jump(style) - val p = Paragraph(inline(false)) + val p = Paragraph(inline(isInlineEnd = false)) blockEnded("end of list line ") Some(p) } @@ -510,11 +510,11 @@ trait CommentFactoryBase { this: MemberLookupBase => def para(): Block = { val p = if (summaryParsed) - Paragraph(inline(false)) + Paragraph(inline(isInlineEnd = false)) else { val s = summary() val r = - if (checkParaEnded()) List(s) else List(s, inline(false)) + if (checkParaEnded()) List(s) else List(s, inline(isInlineEnd = false)) summaryParsed = true Paragraph(Chain(r)) } diff --git a/src/compiler/scala/tools/nsc/doc/html/HtmlPage.scala b/src/compiler/scala/tools/nsc/doc/html/HtmlPage.scala index 829df97fc2..229e26d699 100644 --- a/src/compiler/scala/tools/nsc/doc/html/HtmlPage.scala +++ b/src/compiler/scala/tools/nsc/doc/html/HtmlPage.scala @@ -123,7 +123,7 @@ abstract class HtmlPage extends Page { thisPage => case Text(text) => scala.xml.Text(text) case Summary(in) => inlineToHtml(in) case HtmlTag(tag) => scala.xml.Unparsed(tag) - case EntityLink(target, link) => linkToHtml(target, link, true) + case EntityLink(target, link) => linkToHtml(target, link, hasLinks = true) } def linkToHtml(text: Inline, link: LinkTo, hasLinks: Boolean) = link match { diff --git a/src/compiler/scala/tools/nsc/doc/html/SyntaxHigh.scala b/src/compiler/scala/tools/nsc/doc/html/SyntaxHigh.scala index db9edd165d..5781e680dd 100644 --- a/src/compiler/scala/tools/nsc/doc/html/SyntaxHigh.scala +++ b/src/compiler/scala/tools/nsc/doc/html/SyntaxHigh.scala @@ -107,14 +107,14 @@ private[html] object SyntaxHigh { case '/' => if (star) { if (level > 0) level -= 1 - if (level == 0) i else multiline(i+1, true) + if (level == 0) i else multiline(i+1, star = true) } else - multiline(i+1, false) + multiline(i+1, star = false) case _ => - multiline(i+1, false) + multiline(i+1, star = false) } } - if (buf(i) == '/') line(i) else multiline(i, true) + if (buf(i) == '/') line(i) else multiline(i, star = true) out.toString } @@ -129,16 +129,16 @@ private[html] object SyntaxHigh { out append ch ch match { case '\\' => - charlit0(i+1, true) + charlit0(i+1, bslash = true) case '\'' if !bslash => i case _ => - if (bslash && '0' <= ch && ch <= '9') charlit0(i+1, true) - else charlit0(i+1, false) + if (bslash && '0' <= ch && ch <= '9') charlit0(i+1, bslash = true) + else charlit0(i+1, bslash = false) } } } - charlit0(j, false) + charlit0(j, bslash = false) out.toString } @@ -150,14 +150,14 @@ private[html] object SyntaxHigh { out append ch ch match { case '\\' => - strlit0(i+1, true) + strlit0(i+1, bslash = true) case '"' if !bslash => i case _ => - strlit0(i+1, false) + strlit0(i+1, bslash = false) } } - strlit0(i, false) + strlit0(i, bslash = false) out.toString } @@ -183,7 +183,7 @@ private[html] object SyntaxHigh { ch match { case 'e' | 'E' => out append ch - expo(i+1, false) + expo(i+1, signed = false) case _ => if (Character.isDigit(ch)) { out append ch @@ -197,7 +197,7 @@ private[html] object SyntaxHigh { ch match { case '+' | '-' if !signed => out append ch - expo(i+1, true) + expo(i+1, signed = true) case _ => if (Character.isDigit(ch)) { out append ch diff --git a/src/compiler/scala/tools/nsc/doc/html/page/Template.scala b/src/compiler/scala/tools/nsc/doc/html/page/Template.scala index c482134c4b..63509de4b5 100644 --- a/src/compiler/scala/tools/nsc/doc/html/page/Template.scala +++ b/src/compiler/scala/tools/nsc/doc/html/page/Template.scala @@ -110,8 +110,8 @@ class Template(universe: doc.Universe, generator: DiagramGenerator, tpl: DocTemp

      { displayName }

      - { signature(tpl, true) } - { memberToCommentHtml(tpl, tpl.inTemplate, true) } + { signature(tpl, isSelf = true) } + { memberToCommentHtml(tpl, tpl.inTemplate, isSelf = true) }
      @@ -242,7 +242,7 @@ class Template(universe: doc.Universe, generator: DiagramGenerator, tpl: DocTemp NodeSeq fromSeq (for (conversion <- (tpl.conversions)) yield

      Inherited by implicit conversion { conversion.conversionShortName } from - { typeToHtml(tpl.resultType, true) } to { typeToHtml(conversion.targetType, true) } + { typeToHtml(tpl.resultType, hasLinks = true) } to { typeToHtml(conversion.targetType, hasLinks = true) }

      ) @@ -284,14 +284,14 @@ class Template(universe: doc.Universe, generator: DiagramGenerator, tpl: DocTemp } def memberToHtml(mbr: MemberEntity, inTpl: DocTemplateEntity): NodeSeq = { - val memberComment = memberToCommentHtml(mbr, inTpl, false) + val memberComment = memberToCommentHtml(mbr, inTpl, isSelf = false)
    4. - { signature(mbr, false) } + { signature(mbr, isSelf = false) } { memberComment }
    5. } @@ -398,7 +398,7 @@ class Template(universe: doc.Universe, generator: DiagramGenerator, tpl: DocTemp case Some(conv) =>
      Implicit information
      ++ { - val targetType = typeToHtml(conv.targetType, true) + val targetType = typeToHtml(conv.targetType, hasLinks = true) val conversionMethod = conv.convertorMethod match { case Left(member) => Text(member.name) case Right(name) => Text(name) @@ -424,7 +424,7 @@ class Template(universe: doc.Universe, generator: DiagramGenerator, tpl: DocTemp }
      - This member is added by an implicit conversion from { typeToHtml(inTpl.resultType, true) } to + This member is added by an implicit conversion from { typeToHtml(inTpl.resultType, hasLinks = true) } to { targetType } performed by method { conversionMethod } in { conversionOwner }. { constraintText }
      @@ -486,7 +486,7 @@ class Template(universe: doc.Universe, generator: DiagramGenerator, tpl: DocTemp case nte: NonTemplateMemberEntity if nte.isUseCase =>
      Full Signature -
      { signature(nte.useCaseOf.get,true) }
      +
      { signature(nte.useCaseOf.get,isSelf = true) }
      case _ => NodeSeq.Empty } @@ -923,13 +923,13 @@ class Template(universe: doc.Universe, generator: DiagramGenerator, tpl: DocTemp if (tpl.universe.settings.useStupidTypes.value) superTpl match { case dtpl: DocTemplateEntity => - val sig = signature(dtpl, false, true) \ "_" + val sig = signature(dtpl, isSelf = false, isReduced = true) \ "_" sig case tpl: TemplateEntity => Text(tpl.name) } else - typeToHtml(superType, true) + typeToHtml(superType, hasLinks = true) private def constraintToHtml(constraint: Constraint): NodeSeq = constraint match { case ktcc: KnownTypeClassConstraint => @@ -941,21 +941,21 @@ class Template(universe: doc.Universe, generator: DiagramGenerator, tpl: DocTemp context-bounded
      ++ scala.xml.Text(" by " + tcc.typeClassEntity.qualifiedName + " (" + tcc.typeParamName + ": ") ++ templateToHtml(tcc.typeClassEntity) ++ scala.xml.Text(")") case impl: ImplicitInScopeConstraint => - scala.xml.Text("an implicit value of type ") ++ typeToHtml(impl.implicitType, true) ++ scala.xml.Text(" is in scope") + scala.xml.Text("an implicit value of type ") ++ typeToHtml(impl.implicitType, hasLinks = true) ++ scala.xml.Text(" is in scope") case eq: EqualTypeParamConstraint => scala.xml.Text(eq.typeParamName + " is " + eq.rhs.name + " (" + eq.typeParamName + " =:= ") ++ - typeToHtml(eq.rhs, true) ++ scala.xml.Text(")") + typeToHtml(eq.rhs, hasLinks = true) ++ scala.xml.Text(")") case bt: BoundedTypeParamConstraint => scala.xml.Text(bt.typeParamName + " is a superclass of " + bt.lowerBound.name + " and a subclass of " + bt.upperBound.name + " (" + bt.typeParamName + " >: ") ++ - typeToHtml(bt.lowerBound, true) ++ scala.xml.Text(" <: ") ++ - typeToHtml(bt.upperBound, true) ++ scala.xml.Text(")") + typeToHtml(bt.lowerBound, hasLinks = true) ++ scala.xml.Text(" <: ") ++ + typeToHtml(bt.upperBound, hasLinks = true) ++ scala.xml.Text(")") case lb: LowerBoundedTypeParamConstraint => scala.xml.Text(lb.typeParamName + " is a superclass of " + lb.lowerBound.name + " (" + lb.typeParamName + " >: ") ++ - typeToHtml(lb.lowerBound, true) ++ scala.xml.Text(")") + typeToHtml(lb.lowerBound, hasLinks = true) ++ scala.xml.Text(")") case ub: UpperBoundedTypeParamConstraint => scala.xml.Text(ub.typeParamName + " is a subclass of " + ub.upperBound.name + " (" + ub.typeParamName + " <: ") ++ - typeToHtml(ub.upperBound, true) ++ scala.xml.Text(")") + typeToHtml(ub.upperBound, hasLinks = true) ++ scala.xml.Text(")") } } diff --git a/src/compiler/scala/tools/nsc/doc/html/page/diagram/DotDiagramGenerator.scala b/src/compiler/scala/tools/nsc/doc/html/page/diagram/DotDiagramGenerator.scala index 92dd05e70a..837d9e6f21 100644 --- a/src/compiler/scala/tools/nsc/doc/html/page/diagram/DotDiagramGenerator.scala +++ b/src/compiler/scala/tools/nsc/doc/html/page/diagram/DotDiagramGenerator.scala @@ -321,7 +321,7 @@ class DotDiagramGenerator(settings: doc.Settings) extends DiagramGenerator { val result = if (dotOutput != null) { val src = scala.io.Source.fromString(dotOutput) try { - val cpa = scala.xml.parsing.ConstructingParser.fromSource(src, false) + val cpa = scala.xml.parsing.ConstructingParser.fromSource(src, preserveWS = false) val doc = cpa.document() if (doc != null) transform(doc.docElem) diff --git a/src/compiler/scala/tools/nsc/doc/model/ModelFactoryImplicitSupport.scala b/src/compiler/scala/tools/nsc/doc/model/ModelFactoryImplicitSupport.scala index c00afee064..868c2fc3a4 100644 --- a/src/compiler/scala/tools/nsc/doc/model/ModelFactoryImplicitSupport.scala +++ b/src/compiler/scala/tools/nsc/doc/model/ModelFactoryImplicitSupport.scala @@ -170,7 +170,7 @@ trait ModelFactoryImplicitSupport { val newContext = context.makeImplicit(context.ambiguousErrors) newContext.macrosEnabled = false val newTyper = global.analyzer.newTyper(newContext) - newTyper.silent(_.typed(appliedTree, EXPRmode, WildcardType), false) match { + newTyper.silent(_.typed(appliedTree, EXPRmode, WildcardType), reportAmbiguousErrors = false) match { case global.analyzer.SilentResultValue(t: Tree) => t case global.analyzer.SilentTypeError(err) => diff --git a/src/compiler/scala/tools/nsc/doc/model/diagram/DiagramDirectiveParser.scala b/src/compiler/scala/tools/nsc/doc/model/diagram/DiagramDirectiveParser.scala index 96bba0498c..6395446d3b 100644 --- a/src/compiler/scala/tools/nsc/doc/model/diagram/DiagramDirectiveParser.scala +++ b/src/compiler/scala/tools/nsc/doc/model/diagram/DiagramDirectiveParser.scala @@ -63,7 +63,7 @@ trait DiagramDirectiveParser { NoDiagramAtAll if (template.comment.isDefined) - makeDiagramFilter(template, template.comment.get.inheritDiagram, defaultFilter, true) + makeDiagramFilter(template, template.comment.get.inheritDiagram, defaultFilter, isInheritanceDiagram = true) else defaultFilter } @@ -72,7 +72,7 @@ trait DiagramDirectiveParser { def makeContentDiagramFilter(template: DocTemplateImpl): DiagramFilter = { val defaultFilter = if (template.isPackage || template.isObject) FullDiagram else NoDiagramAtAll if (template.comment.isDefined) - makeDiagramFilter(template, template.comment.get.contentDiagram, defaultFilter, false) + makeDiagramFilter(template, template.comment.get.contentDiagram, defaultFilter, isInheritanceDiagram = false) else defaultFilter } diff --git a/src/compiler/scala/tools/nsc/interactive/Global.scala b/src/compiler/scala/tools/nsc/interactive/Global.scala index 2091c63d8e..921a07c805 100644 --- a/src/compiler/scala/tools/nsc/interactive/Global.scala +++ b/src/compiler/scala/tools/nsc/interactive/Global.scala @@ -746,10 +746,10 @@ class Global(settings: Settings, _reporter: Reporter, projectName: String = "") try { val tp1 = pre.memberType(alt) onTypeError NoType val tp2 = adaptToNewRunMap(sym.tpe) substSym (originalTypeParams, sym.owner.typeParams) - matchesType(tp1, tp2, false) || { + matchesType(tp1, tp2, alwaysMatchSimple = false) || { debugLog(s"findMirrorSymbol matchesType($tp1, $tp2) failed") val tp3 = adaptToNewRunMap(sym.tpe) substSym (originalTypeParams, alt.owner.typeParams) - matchesType(tp1, tp3, false) || { + matchesType(tp1, tp3, alwaysMatchSimple = false) || { debugLog(s"findMirrorSymbol fallback matchesType($tp1, $tp3) failed") false } @@ -909,8 +909,8 @@ class Global(settings: Settings, _reporter: Reporter, projectName: String = "") val locals = new Members[ScopeMember] val enclosing = new Members[ScopeMember] def addScopeMember(sym: Symbol, pre: Type, viaImport: Tree) = - locals.add(sym, pre, false) { (s, st) => - new ScopeMember(s, st, context.isAccessible(s, pre, false), viaImport) + locals.add(sym, pre, implicitlyAdded = false) { (s, st) => + new ScopeMember(s, st, context.isAccessible(s, pre, superAccess = false), viaImport) } def localsToEnclosing() = { enclosing.addNonShadowed(locals) @@ -1012,7 +1012,7 @@ class Global(settings: Settings, _reporter: Reporter, projectName: String = "") val vtree = viewApply(view) val vpre = stabilizedType(vtree) for (sym <- vtree.tpe.members) { - addTypeMember(sym, vpre, false, view.tree.symbol) + addTypeMember(sym, vpre, inherited = false, view.tree.symbol) } } //println() diff --git a/src/compiler/scala/tools/nsc/interactive/REPL.scala b/src/compiler/scala/tools/nsc/interactive/REPL.scala index 71dd0d3bbf..04c06b9357 100644 --- a/src/compiler/scala/tools/nsc/interactive/REPL.scala +++ b/src/compiler/scala/tools/nsc/interactive/REPL.scala @@ -107,7 +107,7 @@ object REPL { } def doStructure(file: String) { - comp.askParsedEntered(toSourceFile(file), false, structureResult) + comp.askParsedEntered(toSourceFile(file), keepLoaded = false, structureResult) show(structureResult) } diff --git a/src/compiler/scala/tools/nsc/interactive/ScratchPadMaker.scala b/src/compiler/scala/tools/nsc/interactive/ScratchPadMaker.scala index 8b4c2ce4eb..7af9174704 100644 --- a/src/compiler/scala/tools/nsc/interactive/ScratchPadMaker.scala +++ b/src/compiler/scala/tools/nsc/interactive/ScratchPadMaker.scala @@ -191,7 +191,7 @@ trait ScratchPadMaker { self: Global => * prints its output and all defined values in a comment column. */ protected def instrument(source: SourceFile, line: Int): (String, Array[Char]) = { - val tree = typedTree(source, true) + val tree = typedTree(source, forceReload = true) val endOffset = if (line < 0) source.length else source.lineToOffset(line + 1) val patcher = new Patcher(source.content, new LexicalStructure(source), endOffset) patcher.traverse(tree) diff --git a/src/compiler/scala/tools/nsc/interactive/tests/Tester.scala b/src/compiler/scala/tools/nsc/interactive/tests/Tester.scala index 26aabbd3e6..9382d5890f 100644 --- a/src/compiler/scala/tools/nsc/interactive/tests/Tester.scala +++ b/src/compiler/scala/tools/nsc/interactive/tests/Tester.scala @@ -199,7 +199,7 @@ class Tester(ntests: Int, inputs: Array[SourceFile], settings: Settings) { object Tester { def main(args: Array[String]) { val settings = new Settings() - val (_, filenames) = settings.processArguments(args.toList.tail, true) + val (_, filenames) = settings.processArguments(args.toList.tail, processAll = true) println("filenames = "+filenames) val files = filenames.toArray map (str => new BatchSourceFile(AbstractFile.getFile(str)): SourceFile) new Tester(args(0).toInt, files, settings).run() diff --git a/src/compiler/scala/tools/nsc/interpreter/AbstractFileClassLoader.scala b/src/compiler/scala/tools/nsc/interpreter/AbstractFileClassLoader.scala index 3b272aee32..e909cd945d 100644 --- a/src/compiler/scala/tools/nsc/interpreter/AbstractFileClassLoader.scala +++ b/src/compiler/scala/tools/nsc/interpreter/AbstractFileClassLoader.scala @@ -28,12 +28,12 @@ class AbstractFileClassLoader(val root: AbstractFile, parent: ClassLoader) val pathParts = name split '/' for (dirPart <- pathParts.init) { - file = file.lookupName(dirPart, true) + file = file.lookupName(dirPart, directory = true) if (file == null) return null } - file.lookupName(pathParts.last, false) match { + file.lookupName(pathParts.last, directory = false) match { case null => null case file => file } @@ -47,7 +47,7 @@ class AbstractFileClassLoader(val root: AbstractFile, parent: ClassLoader) val pathParts = dirNameToPath(name) split '/' for (dirPart <- pathParts) { - file = file.lookupName(dirPart, true) + file = file.lookupName(dirPart, directory = true) if (file == null) return null } diff --git a/src/compiler/scala/tools/nsc/interpreter/ILoop.scala b/src/compiler/scala/tools/nsc/interpreter/ILoop.scala index d08c9cb36c..2ea255319d 100644 --- a/src/compiler/scala/tools/nsc/interpreter/ILoop.scala +++ b/src/compiler/scala/tools/nsc/interpreter/ILoop.scala @@ -145,7 +145,7 @@ class ILoop(in0: Option[BufferedReader], protected val out: JPrintWriter) case Nil => echo(cmd + ": no such command. Type :help for help.") case xs => echo(cmd + " is ambiguous: did you mean " + xs.map(":" + _.name).mkString(" or ") + "?") } - Result(true, None) + Result(keepRunning = true, None) } private def matchingCommands(cmd: String) = commands filter (_.name startsWith cmd) private def uniqueCommand(cmd: String): Option[LoopCommand] = { @@ -216,7 +216,7 @@ class ILoop(in0: Option[BufferedReader], protected val out: JPrintWriter) cmd("load", "", "load and interpret a Scala file", loadCommand), nullary("paste", "enter paste mode: all input up to ctrl-D compiled together", pasteCommand), nullary("power", "enable power user mode", powerCmd), - nullary("quit", "exit the interpreter", () => Result(false, None)), + nullary("quit", "exit the interpreter", () => Result(keepRunning = false, None)), nullary("replay", "reset execution and replay all previous commands", replay), nullary("reset", "reset the repl to its initial state, forgetting all session entries", resetCommand), shCommand, @@ -288,8 +288,8 @@ class ILoop(in0: Option[BufferedReader], protected val out: JPrintWriter) private def typeCommand(line0: String): Result = { line0.trim match { case "" => ":type [-v] " - case s if s startsWith "-v " => intp.typeCommandInternal(s stripPrefix "-v " trim, true) - case s => intp.typeCommandInternal(s, false) + case s if s startsWith "-v " => intp.typeCommandInternal(s stripPrefix "-v " trim, verbose = true) + case s => intp.typeCommandInternal(s, verbose = false) } } @@ -403,7 +403,7 @@ class ILoop(in0: Option[BufferedReader], protected val out: JPrintWriter) savingReader { savingReplayStack { file applyReader { reader => - in = SimpleReader(reader, out, false) + in = SimpleReader(reader, out, interactive = false) echo("Loading " + file + "...") loop() } @@ -467,7 +467,7 @@ class ILoop(in0: Option[BufferedReader], protected val out: JPrintWriter) interpretAllFrom(f) shouldReplay = Some(":load " + arg) }) - Result(true, shouldReplay) + Result(keepRunning = true, shouldReplay) } def addClasspath(arg: String): Unit = { @@ -483,7 +483,7 @@ class ILoop(in0: Option[BufferedReader], protected val out: JPrintWriter) def powerCmd(): Result = { if (isReplPower) "Already in power mode." - else enablePowerMode(false) + else enablePowerMode(isDuringInit = false) } def enablePowerMode(isDuringInit: Boolean) = { replProps.power setValue true @@ -520,8 +520,8 @@ class ILoop(in0: Option[BufferedReader], protected val out: JPrintWriter) case _ => ambiguousError(cmd) } } - else if (intp.global == null) Result(false, None) // Notice failure to create compiler - else Result(true, interpretStartingWith(line)) + else if (intp.global == null) Result(keepRunning = false, None) // Notice failure to create compiler + else Result(keepRunning = true, interpretStartingWith(line)) } private def readWhile(cond: String => Boolean) = { @@ -668,7 +668,7 @@ class ILoop(in0: Option[BufferedReader], protected val out: JPrintWriter) createInterpreter() // sets in to some kind of reader depending on environmental cues - in = in0.fold(chooseReader(settings))(r => SimpleReader(r, out, true)) + in = in0.fold(chooseReader(settings))(r => SimpleReader(r, out, interactive = true)) globalFuture = future { intp.initializeSynchronous() loopPostInit() diff --git a/src/compiler/scala/tools/nsc/interpreter/IMain.scala b/src/compiler/scala/tools/nsc/interpreter/IMain.scala index 120761de4c..c54b01dbb0 100644 --- a/src/compiler/scala/tools/nsc/interpreter/IMain.scala +++ b/src/compiler/scala/tools/nsc/interpreter/IMain.scala @@ -532,8 +532,8 @@ class IMain(initialSettings: Settings, protected val out: JPrintWriter) extends * The return value is whether the line was interpreter successfully, * e.g. that there were no parse errors. */ - def interpret(line: String): IR.Result = interpret(line, false) - def interpretSynthetic(line: String): IR.Result = interpret(line, true) + def interpret(line: String): IR.Result = interpret(line, synthetic = false) + def interpretSynthetic(line: String): IR.Result = interpret(line, synthetic = true) def interpret(line: String, synthetic: Boolean): IR.Result = { def loadAndRunReq(req: Request) = { classLoader.setAsContext() diff --git a/src/compiler/scala/tools/nsc/interpreter/LoopCommands.scala b/src/compiler/scala/tools/nsc/interpreter/LoopCommands.scala index 0d11020752..4bba27b714 100644 --- a/src/compiler/scala/tools/nsc/interpreter/LoopCommands.scala +++ b/src/compiler/scala/tools/nsc/interpreter/LoopCommands.scala @@ -39,7 +39,7 @@ trait LoopCommands { // called if no args are given def showUsage(): Result = { "usage is " + usageMsg - Result(true, None) + Result(keepRunning = true, None) } } object LoopCommand { @@ -72,7 +72,7 @@ trait LoopCommands { object Result { // the default result means "keep running, and don't record that line" - val default = Result(true, None) + val default = Result(keepRunning = true, None) // most commands do not want to micromanage the Result, but they might want // to print something to the console, so we accomodate Unit and String returns. diff --git a/src/compiler/scala/tools/nsc/interpreter/TypeStrings.scala b/src/compiler/scala/tools/nsc/interpreter/TypeStrings.scala index a73bb80157..3ecd3b9ae4 100644 --- a/src/compiler/scala/tools/nsc/interpreter/TypeStrings.scala +++ b/src/compiler/scala/tools/nsc/interpreter/TypeStrings.scala @@ -30,10 +30,10 @@ trait StructuredTypeStrings extends DestructureTypes { else elems.mkString(ldelim, mdelim, rdelim) ) } - val NoGrouping = Grouping("", "", "", false) - val ListGrouping = Grouping("(", ", ", ")", false) - val ProductGrouping = Grouping("(", ", ", ")", true) - val BlockGrouping = Grouping(" { ", "; ", "}", false) + val NoGrouping = Grouping("", "", "", labels = false) + val ListGrouping = Grouping("(", ", ", ")", labels = false) + val ProductGrouping = Grouping("(", ", ", ")", labels = true) + val BlockGrouping = Grouping(" { ", "; ", "}", labels = false) private def str(level: Int)(body: => String): String = " " * level + body private def block(level: Int, grouping: Grouping)(name: String, nodes: List[TypeNode]): String = { @@ -66,7 +66,7 @@ trait StructuredTypeStrings extends DestructureTypes { def nodes: List[TypeNode] def show(indent: Int, showLabel: Boolean): String = maybeBlock(indent, grouping)(mkPrefix(showLabel), nodes) - def show(indent: Int): String = show(indent, true) + def show(indent: Int): String = show(indent, showLabel = true) def show(): String = show(0) def withLabel(l: String): this.type = modifyNameInfo(_.copy(label = l)) diff --git a/src/compiler/scala/tools/nsc/io/Jar.scala b/src/compiler/scala/tools/nsc/io/Jar.scala index 94399f936b..ee3e2b04d1 100644 --- a/src/compiler/scala/tools/nsc/io/Jar.scala +++ b/src/compiler/scala/tools/nsc/io/Jar.scala @@ -159,7 +159,7 @@ object Jar { private val ZipMagicNumber = List[Byte](80, 75, 3, 4) private def magicNumberIsZip(f: Path) = f.isFile && (f.toFile.bytes().take(4).toList == ZipMagicNumber) - def isJarOrZip(f: Path): Boolean = isJarOrZip(f, true) + def isJarOrZip(f: Path): Boolean = isJarOrZip(f, examineFile = true) def isJarOrZip(f: Path, examineFile: Boolean): Boolean = f.hasExtension("zip", "jar") || (examineFile && magicNumberIsZip(f)) diff --git a/src/compiler/scala/tools/nsc/io/SourceReader.scala b/src/compiler/scala/tools/nsc/io/SourceReader.scala index ece78db2cf..3220c2e2b2 100644 --- a/src/compiler/scala/tools/nsc/io/SourceReader.scala +++ b/src/compiler/scala/tools/nsc/io/SourceReader.scala @@ -74,7 +74,7 @@ class SourceReader(decoder: CharsetDecoder, reporter: Reporter) { protected def read(bytes: ByteBuffer): Array[Char] = { val decoder: CharsetDecoder = this.decoder.reset() val chars: CharBuffer = this.chars; chars.clear() - terminate(flush(decoder, decode(decoder, bytes, chars, true))) + terminate(flush(decoder, decode(decoder, bytes, chars, endOfInput = true))) } //######################################################################## diff --git a/src/compiler/scala/tools/nsc/javac/JavaParsers.scala b/src/compiler/scala/tools/nsc/javac/JavaParsers.scala index 8f5dca2702..cf40fe90fa 100644 --- a/src/compiler/scala/tools/nsc/javac/JavaParsers.scala +++ b/src/compiler/scala/tools/nsc/javac/JavaParsers.scala @@ -178,7 +178,7 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners { JavaScannerConfiguration.token2string(token) + " expected but " + JavaScannerConfiguration.token2string(in.token) + " found." - syntaxError(posToReport, msg, true) + syntaxError(posToReport, msg, skipIt = true) } if (in.token == token) in.nextToken() pos @@ -224,7 +224,7 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners { case AppliedTypeTree(_, _) | ExistentialTypeTree(_, _) | SelectFromTypeTree(_, _) => tree case _ => - syntaxError(tree.pos, "identifier expected", false) + syntaxError(tree.pos, "identifier expected", skipIt = false) errorTypeTree } } @@ -259,7 +259,7 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners { case FLOAT => in.nextToken(); TypeTree(FloatClass.tpe) case DOUBLE => in.nextToken(); TypeTree(DoubleClass.tpe) case BOOLEAN => in.nextToken(); TypeTree(BooleanClass.tpe) - case _ => syntaxError("illegal start of type", true); errorTypeTree + case _ => syntaxError("illegal start of type", skipIt = true); errorTypeTree } } @@ -644,7 +644,7 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners { accept(SEMI) val names = buf.toList if (names.length < 2) { - syntaxError(pos, "illegal import", false) + syntaxError(pos, "illegal import", skipIt = false) List() } else { val qual = ((Ident(names.head): Tree) /: names.tail.init) (Select(_, _)) @@ -839,7 +839,7 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners { case INTERFACE => interfaceDecl(mods) case AT => annotationDecl(mods) case CLASS => classDecl(mods) - case _ => in.nextToken(); syntaxError("illegal start of type declaration", true); List(errorTypeTree) + case _ => in.nextToken(); syntaxError("illegal start of type declaration", skipIt = true); List(errorTypeTree) } /** CompilationUnit ::= [package QualId semi] TopStatSeq @@ -867,7 +867,7 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners { while (in.token != EOF && in.token != RBRACE) { while (in.token == SEMI) in.nextToken() if (in.token != EOF) - buf ++= typeDecl(modifiers(false)) + buf ++= typeDecl(modifiers(inInterface = false)) } accept(EOF) atPos(pos) { diff --git a/src/compiler/scala/tools/nsc/javac/JavaScanners.scala b/src/compiler/scala/tools/nsc/javac/JavaScanners.scala index ad92b2e742..1d1469f87d 100644 --- a/src/compiler/scala/tools/nsc/javac/JavaScanners.scala +++ b/src/compiler/scala/tools/nsc/javac/JavaScanners.scala @@ -63,8 +63,8 @@ trait JavaScanners extends ast.parser.ScannersCommon { def next: AbstractJavaTokenData def intVal(negated: Boolean): Long def floatVal(negated: Boolean): Double - def intVal: Long = intVal(false) - def floatVal: Double = floatVal(false) + def intVal: Long = intVal(negated = false) + def floatVal: Double = floatVal(negated = false) def currentPos: Position } diff --git a/src/compiler/scala/tools/nsc/reporters/Reporter.scala b/src/compiler/scala/tools/nsc/reporters/Reporter.scala index 817ec47ab3..0544da5d3c 100644 --- a/src/compiler/scala/tools/nsc/reporters/Reporter.scala +++ b/src/compiler/scala/tools/nsc/reporters/Reporter.scala @@ -58,15 +58,15 @@ abstract class Reporter { /** For sending a message which should not be labeled as a warning/error, * but also shouldn't require -verbose to be visible. */ - def echo(msg: String): Unit = info(NoPosition, msg, true) - def echo(pos: Position, msg: String): Unit = info(pos, msg, true) + def echo(msg: String): Unit = info(NoPosition, msg, force = true) + def echo(pos: Position, msg: String): Unit = info(pos, msg, force = true) /** Informational messages, suppressed unless -verbose or force=true. */ def info(pos: Position, msg: String, force: Boolean): Unit = info0(pos, msg, INFO, force) /** Warnings and errors. */ - def warning(pos: Position, msg: String): Unit = withoutTruncating(info0(pos, msg, WARNING, false)) - def error(pos: Position, msg: String): Unit = withoutTruncating(info0(pos, msg, ERROR, false)) + def warning(pos: Position, msg: String): Unit = withoutTruncating(info0(pos, msg, WARNING, force = false)) + def error(pos: Position, msg: String): Unit = withoutTruncating(info0(pos, msg, ERROR, force = false)) def incompleteInputError(pos: Position, msg: String): Unit = { if (incompleteHandled) incompleteHandler(pos, msg) else error(pos, msg) diff --git a/src/compiler/scala/tools/nsc/settings/MutableSettings.scala b/src/compiler/scala/tools/nsc/settings/MutableSettings.scala index 7a17180724..5fa3594128 100644 --- a/src/compiler/scala/tools/nsc/settings/MutableSettings.scala +++ b/src/compiler/scala/tools/nsc/settings/MutableSettings.scala @@ -82,13 +82,13 @@ class MutableSettings(val errorFn: String => Unit) } loop(arguments, Nil) } - def processArgumentString(params: String) = processArguments(splitParams(params), true) + def processArgumentString(params: String) = processArguments(splitParams(params), processAll = true) /** Create a new Settings object, copying all user-set values. */ def copy(): Settings = { val s = new Settings() - s.processArguments(recreateArgs, true) + s.processArguments(recreateArgs, processAll = true) s } @@ -126,7 +126,7 @@ class MutableSettings(val errorFn: String => Unit) // if arg is of form -Xfoo:bar,baz,quux def parseColonArg(s: String): Option[List[String]] = { - val (p, args) = StringOps.splitWhere(s, _ == ':', true) getOrElse (return None) + val (p, args) = StringOps.splitWhere(s, _ == ':', doDropIndex = true) getOrElse (return None) // any non-Nil return value means failure and we return s unmodified tryToSetIfExists(p, (args split ",").toList, (s: Setting) => s.tryToSetColon _) @@ -249,7 +249,7 @@ class MutableSettings(val errorFn: String => Unit) if (dir != null && dir.isDirectory) dir // was: else if (allowJar && dir == null && Path.isJarOrZip(name, false)) - else if (allowJar && dir == null && Jar.isJarOrZip(name, false)) + else if (allowJar && dir == null && Jar.isJarOrZip(name, examineFile = false)) new PlainFile(Path(name)) else throw new FatalError(name + " does not exist or is not a directory") @@ -260,7 +260,7 @@ class MutableSettings(val errorFn: String => Unit) */ def setSingleOutput(outDir: String) { val dst = AbstractFile.getDirectory(outDir) - setSingleOutput(checkDir(dst, outDir, true)) + setSingleOutput(checkDir(dst, outDir, allowJar = true)) } def getSingleOutput: Option[AbstractFile] = singleOutDir @@ -323,12 +323,12 @@ class MutableSettings(val errorFn: String => Unit) case Some(d) => d match { case _: VirtualDirectory | _: io.ZipArchive => Nil - case _ => List(d.lookupPathUnchecked(srcPath, false)) + case _ => List(d.lookupPathUnchecked(srcPath, directory = false)) } case None => (outputs filter (isBelow _).tupled) match { case Nil => Nil - case matches => matches.map(_._1.lookupPathUnchecked(srcPath, false)) + case matches => matches.map(_._1.lookupPathUnchecked(srcPath, directory = false)) } } } diff --git a/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala b/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala index f0c2b05951..f8930c4ddd 100644 --- a/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala +++ b/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala @@ -226,7 +226,7 @@ abstract class ClassfileParser { val (name0, tpe0) = getNameAndType(in.getChar(start + 3), ownerTpe) debuglog("getMemberSymbol: name and tpe: " + name0 + ": " + tpe0) - forceMangledName(tpe0.typeSymbol.name, false) + forceMangledName(tpe0.typeSymbol.name, module = false) val (name, tpe) = getNameAndType(in.getChar(start + 3), ownerTpe) // println("new tpe: " + tpe + " at phase: " + phase) @@ -234,21 +234,21 @@ abstract class ClassfileParser { val index = in.getChar(start + 1) val name = getExternalName(in.getChar(starts(index) + 1)) //assert(name.endsWith("$"), "Not a module class: " + name) - f = forceMangledName(name dropRight 1, true) + f = forceMangledName(name dropRight 1, module = true) if (f == NoSymbol) f = rootMirror.getModuleByName(name dropRight 1) } else { val origName = nme.originalName(name) val owner = if (static) ownerTpe.typeSymbol.linkedClassOfClass else ownerTpe.typeSymbol // println("\t" + owner.info.member(name).tpe.widen + " =:= " + tpe) - f = owner.info.findMember(origName, 0, 0, false).suchThat(_.tpe.widen =:= tpe) + f = owner.info.findMember(origName, 0, 0, stableOnly = false).suchThat(_.tpe.widen =:= tpe) if (f == NoSymbol) - f = owner.info.findMember(newTermName(origName + nme.LOCAL_SUFFIX_STRING), 0, 0, false).suchThat(_.tpe =:= tpe) + f = owner.info.findMember(newTermName(origName + nme.LOCAL_SUFFIX_STRING), 0, 0, stableOnly = false).suchThat(_.tpe =:= tpe) if (f == NoSymbol) { // if it's an impl class, try to find it's static member inside the class if (ownerTpe.typeSymbol.isImplClass) { // println("impl class, member: " + owner.tpe.member(origName) + ": " + owner.tpe.member(origName).tpe) - f = ownerTpe.findMember(origName, 0, 0, false).suchThat(_.tpe =:= tpe) + f = ownerTpe.findMember(origName, 0, 0, stableOnly = false).suchThat(_.tpe =:= tpe) } else { log("Couldn't find " + name + ": " + tpe + " inside: \n" + ownerTpe) f = tpe match { @@ -814,14 +814,14 @@ abstract class ClassfileParser { val tpname = subName(':'.==).toTypeName val s = sym.newTypeParameter(tpname) tparams = tparams + (tpname -> s) - sig2typeBounds(tparams, true) + sig2typeBounds(tparams, skiptvs = true) newTParams += s } index = start while (sig.charAt(index) != '>') { val tpname = subName(':'.==).toTypeName val s = tparams(tpname) - s.setInfo(sig2typeBounds(tparams, false)) + s.setInfo(sig2typeBounds(tparams, skiptvs = false)) } accept('>') } @@ -830,12 +830,12 @@ abstract class ClassfileParser { sym.setInfo(new TypeParamsType(ownTypeParams)) val tpe = if ((sym eq null) || !sym.isClass) - sig2type(tparams, false) + sig2type(tparams, skiptvs = false) else { classTParams = tparams val parents = new ListBuffer[Type]() while (index < end) { - parents += sig2type(tparams, false) // here the variance doesnt'matter + parents += sig2type(tparams, skiptvs = false) // here the variance doesnt'matter } ClassInfoType(parents.toList, instanceScope, sym) } diff --git a/src/compiler/scala/tools/nsc/symtab/classfile/ICodeReader.scala b/src/compiler/scala/tools/nsc/symtab/classfile/ICodeReader.scala index 703724f003..7010c9e20a 100644 --- a/src/compiler/scala/tools/nsc/symtab/classfile/ICodeReader.scala +++ b/src/compiler/scala/tools/nsc/symtab/classfile/ICodeReader.scala @@ -71,7 +71,7 @@ abstract class ICodeReader extends ClassfileParser { } override def parseField() { - val (jflags, sym) = parseMember(true) + val (jflags, sym) = parseMember(field = true) getCode(jflags) addField new IField(sym) skipAttributes() } @@ -90,9 +90,9 @@ abstract class ICodeReader extends ClassfileParser { (jflags, NoSymbol) else { val owner = getOwner(jflags) - var sym = owner.info.findMember(name, 0, 0, false).suchThat(old => sameType(old.tpe, tpe)) + var sym = owner.info.findMember(name, 0, 0, stableOnly = false).suchThat(old => sameType(old.tpe, tpe)) if (sym == NoSymbol) - sym = owner.info.findMember(newTermName(name + nme.LOCAL_SUFFIX_STRING), 0, 0, false).suchThat(_.tpe =:= tpe) + sym = owner.info.findMember(newTermName(name + nme.LOCAL_SUFFIX_STRING), 0, 0, stableOnly = false).suchThat(_.tpe =:= tpe) if (sym == NoSymbol) { sym = if (field) owner.newValue(name.toTermName, owner.pos, toScalaFieldFlags(jflags)) else dummySym sym setInfoAndEnter tpe @@ -119,7 +119,7 @@ abstract class ICodeReader extends ClassfileParser { } override def parseMethod() { - val (jflags, sym) = parseMember(false) + val (jflags, sym) = parseMember(field = false) val beginning = in.bp try { if (sym != NoSymbol) { @@ -165,10 +165,10 @@ abstract class ICodeReader extends ClassfileParser { } else if (nme.isModuleName(name)) { val strippedName = nme.stripModuleSuffix(name) - forceMangledName(newTermName(strippedName.decode), true) orElse rootMirror.getModuleByName(strippedName) + forceMangledName(newTermName(strippedName.decode), module = true) orElse rootMirror.getModuleByName(strippedName) } else { - forceMangledName(name, false) + forceMangledName(name, module = false) exitingFlatten(rootMirror.getClassByName(name.toTypeName)) } if (sym.isModule) @@ -466,41 +466,41 @@ abstract class ICodeReader extends ClassfileParser { case JVM.return_ => code.emit(RETURN(UNIT)) case JVM.getstatic => - val field = pool.getMemberSymbol(in.nextChar, true); size += 2 + val field = pool.getMemberSymbol(in.nextChar, static = true); size += 2 if (field.hasModuleFlag) code emit LOAD_MODULE(field) else - code emit LOAD_FIELD(field, true) + code emit LOAD_FIELD(field, isStatic = true) case JVM.putstatic => - val field = pool.getMemberSymbol(in.nextChar, true); size += 2 - code.emit(STORE_FIELD(field, true)) + val field = pool.getMemberSymbol(in.nextChar, static = true); size += 2 + code.emit(STORE_FIELD(field, isStatic = true)) case JVM.getfield => - val field = pool.getMemberSymbol(in.nextChar, false); size += 2 - code.emit(LOAD_FIELD(field, false)) + val field = pool.getMemberSymbol(in.nextChar, static = false); size += 2 + code.emit(LOAD_FIELD(field, isStatic = false)) case JVM.putfield => - val field = pool.getMemberSymbol(in.nextChar, false); size += 2 - code.emit(STORE_FIELD(field, false)) + val field = pool.getMemberSymbol(in.nextChar, static = false); size += 2 + code.emit(STORE_FIELD(field, isStatic = false)) case JVM.invokevirtual => - val m = pool.getMemberSymbol(in.nextChar, false); size += 2 + val m = pool.getMemberSymbol(in.nextChar, static = false); size += 2 code.emit(CALL_METHOD(m, Dynamic)) case JVM.invokeinterface => - val m = pool.getMemberSymbol(in.nextChar, false); size += 4 + val m = pool.getMemberSymbol(in.nextChar, static = false); size += 4 in.skip(2) code.emit(CALL_METHOD(m, Dynamic)) case JVM.invokespecial => - val m = pool.getMemberSymbol(in.nextChar, false); size += 2 - val style = if (m.name == nme.CONSTRUCTOR || m.isPrivate) Static(true) + val m = pool.getMemberSymbol(in.nextChar, static = false); size += 2 + val style = if (m.name == nme.CONSTRUCTOR || m.isPrivate) Static(onInstance = true) else SuperCall(m.owner.name) code.emit(CALL_METHOD(m, style)) case JVM.invokestatic => - val m = pool.getMemberSymbol(in.nextChar, true); size += 2 + val m = pool.getMemberSymbol(in.nextChar, static = true); size += 2 if (isBox(m)) code.emit(BOX(toTypeKind(m.info.paramTypes.head))) else if (isUnbox(m)) code.emit(UNBOX(toTypeKind(m.info.resultType))) else - code.emit(CALL_METHOD(m, Static(false))) + code.emit(CALL_METHOD(m, Static(onInstance = false))) case JVM.new_ => code.emit(NEW(REFERENCE(pool.getClassSymbol(in.nextChar)))) @@ -942,7 +942,7 @@ abstract class ICodeReader extends ClassfileParser { } case None => checkValidIndex() - val l = freshLocal(idx, kind, false) + val l = freshLocal(idx, kind, isArg = false) debuglog("Added new local for idx " + idx + ": " + kind) locals += (idx -> List((l, kind))) l @@ -966,7 +966,7 @@ abstract class ICodeReader extends ClassfileParser { * the original method. */ def freshLocal(kind: TypeKind): Local = { count += 1 - freshLocal(maxLocals + count, kind, false) + freshLocal(maxLocals + count, kind, isArg = false) } /** add a method param with the given index. */ diff --git a/src/compiler/scala/tools/nsc/transform/Erasure.scala b/src/compiler/scala/tools/nsc/transform/Erasure.scala index 141a63d36e..e676abb86a 100644 --- a/src/compiler/scala/tools/nsc/transform/Erasure.scala +++ b/src/compiler/scala/tools/nsc/transform/Erasure.scala @@ -1254,7 +1254,7 @@ abstract class Erasure extends AddInterfaces val tree2 = mixinTransformer.transform(tree1) // debuglog("tree after addinterfaces: \n" + tree2) - newTyper(rootContext(unit, tree, true)).typed(tree2) + newTyper(rootContext(unit, tree, erasedTypes = true)).typed(tree2) } } } diff --git a/src/compiler/scala/tools/nsc/transform/Mixin.scala b/src/compiler/scala/tools/nsc/transform/Mixin.scala index 74459efc92..988e80aa77 100644 --- a/src/compiler/scala/tools/nsc/transform/Mixin.scala +++ b/src/compiler/scala/tools/nsc/transform/Mixin.scala @@ -148,7 +148,7 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL { sym => isConcreteAccessor(sym) && !sym.hasFlag(MIXEDIN) && - matchesType(sym.tpe, member.tpe, true)) + matchesType(sym.tpe, member.tpe, alwaysMatchSimple = true)) } ( bcs.head != member.owner && (hasOverridingAccessor(bcs.head) || isOverriddenAccessor(member, bcs.tail)) @@ -273,7 +273,7 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL { val imember = member overriddenSymbol mixinInterface imember overridingSymbol clazz match { case NoSymbol => - if (clazz.info.findMember(member.name, 0, lateDEFERRED, false).alternatives contains imember) + if (clazz.info.findMember(member.name, 0, lateDEFERRED, stableOnly = false).alternatives contains imember) cloneAndAddMixinMember(mixinInterface, imember).asInstanceOf[TermSymbol] setAlias member case _ => } @@ -855,7 +855,7 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL { val bitmapSym = bitmapFor(clazz, offset, lzyVal) val kind = bitmapKind(lzyVal) val mask = maskForOffset(offset, lzyVal, kind) - def cond = mkTest(clazz, mask, bitmapSym, true, kind) + def cond = mkTest(clazz, mask, bitmapSym, equalToZero = true, kind) val nulls = lazyValNullables(lzyVal).toList sortBy (_.id) map nullify def syncBody = init ::: List(mkSetFlag(clazz, offset, lzyVal, kind), UNIT) @@ -882,7 +882,7 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL { val mask = maskForOffset(offset, sym, kind) val msg = s"Uninitialized field: ${unit.source}: ${pos.line}" val result = - IF (mkTest(clazz, mask, bitmapSym, false, kind)) . + IF (mkTest(clazz, mask, bitmapSym, equalToZero = false, kind)) . THEN (retVal) . ELSE (Throw(NewFromConstructor(UninitializedFieldConstructor, LIT(msg)))) diff --git a/src/compiler/scala/tools/nsc/transform/TailCalls.scala b/src/compiler/scala/tools/nsc/transform/TailCalls.scala index b2d05f98b1..2418698a18 100644 --- a/src/compiler/scala/tools/nsc/transform/TailCalls.scala +++ b/src/compiler/scala/tools/nsc/transform/TailCalls.scala @@ -392,7 +392,7 @@ abstract class TailCalls extends Transform { finally maybeTail = saved } - def traverseNoTail(tree: Tree) = traverse(tree, false) + def traverseNoTail(tree: Tree) = traverse(tree, maybeTailNew = false) def traverseTreesNoTail(trees: List[Tree]) = trees foreach traverseNoTail override def traverse(tree: Tree) = tree match { diff --git a/src/compiler/scala/tools/nsc/transform/TypingTransformers.scala b/src/compiler/scala/tools/nsc/transform/TypingTransformers.scala index 73f39225bd..3feadcd9b2 100644 --- a/src/compiler/scala/tools/nsc/transform/TypingTransformers.scala +++ b/src/compiler/scala/tools/nsc/transform/TypingTransformers.scala @@ -17,7 +17,7 @@ trait TypingTransformers { abstract class TypingTransformer(unit: CompilationUnit) extends Transformer { var localTyper: analyzer.Typer = if (phase.erasedTypes) - erasure.newTyper(erasure.rootContext(unit, EmptyTree, true)).asInstanceOf[analyzer.Typer] + erasure.newTyper(erasure.rootContext(unit, EmptyTree, erasedTypes = true)).asInstanceOf[analyzer.Typer] else analyzer.newTyper(analyzer.rootContext(unit, EmptyTree, true)) protected var curTree: Tree = _ diff --git a/src/compiler/scala/tools/nsc/transform/UnCurry.scala b/src/compiler/scala/tools/nsc/transform/UnCurry.scala index 413ef473c3..94ca1206b9 100644 --- a/src/compiler/scala/tools/nsc/transform/UnCurry.scala +++ b/src/compiler/scala/tools/nsc/transform/UnCurry.scala @@ -436,7 +436,7 @@ abstract class UnCurry extends InfoTransform if (dd.symbol hasAnnotation VarargsClass) saveRepeatedParams(dd) - withNeedLift(false) { + withNeedLift(needLift = false) { if (dd.symbol.isClassConstructor) { atOwner(sym) { val rhs1 = (rhs: @unchecked) match { @@ -460,11 +460,11 @@ abstract class UnCurry extends InfoTransform case ValDef(_, _, _, rhs) => if (sym eq NoSymbol) throw new IllegalStateException("Encountered Valdef without symbol: "+ tree + " in "+ unit) if (!sym.owner.isSourceMethod) - withNeedLift(true) { super.transform(tree) } + withNeedLift(needLift = true) { super.transform(tree) } else super.transform(tree) case UnApply(fn, args) => - val fn1 = withInPattern(false)(transform(fn)) + val fn1 = withInPattern(value = false)(transform(fn)) val args1 = transformTrees(fn.symbol.name match { case nme.unapply => args case nme.unapplySeq => transformArgs(tree.pos, fn.symbol, args, analyzer.unapplyTypeList(fn.pos, fn.symbol, fn.tpe, args.length)) @@ -480,13 +480,13 @@ abstract class UnCurry extends InfoTransform } case Assign(_: RefTree, _) => - withNeedLift(true) { super.transform(tree) } + withNeedLift(needLift = true) { super.transform(tree) } case Assign(lhs, _) if lhs.symbol.owner != currentMethod || lhs.symbol.hasFlag(LAZY | ACCESSOR) => - withNeedLift(true) { super.transform(tree) } + withNeedLift(needLift = true) { super.transform(tree) } case ret @ Return(_) if (isNonLocalReturn(ret)) => - withNeedLift(true) { super.transform(ret) } + withNeedLift(needLift = true) { super.transform(ret) } case Try(_, Nil, _) => // try-finally does not need lifting: lifting is needed only for try-catch @@ -500,7 +500,7 @@ abstract class UnCurry extends InfoTransform else super.transform(tree) case CaseDef(pat, guard, body) => - val pat1 = withInPattern(true)(transform(pat)) + val pat1 = withInPattern(value = true)(transform(pat)) treeCopy.CaseDef(tree, pat1, transform(guard), transform(body)) case fun @ Function(_, _) => diff --git a/src/compiler/scala/tools/nsc/transform/patmat/Logic.scala b/src/compiler/scala/tools/nsc/transform/patmat/Logic.scala index 499bf1b022..4fc71a7410 100644 --- a/src/compiler/scala/tools/nsc/transform/patmat/Logic.scala +++ b/src/compiler/scala/tools/nsc/transform/patmat/Logic.scala @@ -347,7 +347,7 @@ trait Logic extends Debugging { val TrueF = formula() val FalseF = formula(clause()) def lit(s: Sym) = formula(clause(Lit(s))) - def negLit(s: Sym) = formula(clause(Lit(s, false))) + def negLit(s: Sym) = formula(clause(Lit(s, pos = false))) def conjunctiveNormalForm(p: Prop, budget: Int = AnalysisBudget.max): Formula = { def distribute(a: Formula, b: Formula, budget: Int): Formula = @@ -439,7 +439,7 @@ trait Logic extends Debugging { else Nil } val forced = unassigned flatMap { s => - force(Lit(s, true)) ++ force(Lit(s, false)) + force(Lit(s, pos = true)) ++ force(Lit(s, pos = false)) } debug.patmat("forced "+ forced) val negated = negateModel(model) diff --git a/src/compiler/scala/tools/nsc/transform/patmat/MatchAnalysis.scala b/src/compiler/scala/tools/nsc/transform/patmat/MatchAnalysis.scala index ed990105fd..86eb3c9666 100644 --- a/src/compiler/scala/tools/nsc/transform/patmat/MatchAnalysis.scala +++ b/src/compiler/scala/tools/nsc/transform/patmat/MatchAnalysis.scala @@ -658,7 +658,7 @@ trait MatchAnalysis extends TreeAndTypeAnalysis { self: PatternMatching => cls match { case ConsClass => ListExample(args()) - case _ if isTupleSymbol(cls) => TupleExample(args(true)) + case _ if isTupleSymbol(cls) => TupleExample(args(brevity = true)) case _ => ConstructorExample(cls, args()) } diff --git a/src/compiler/scala/tools/nsc/transform/patmat/MatchCodeGen.scala b/src/compiler/scala/tools/nsc/transform/patmat/MatchCodeGen.scala index ce19d9cba8..273aebd71e 100644 --- a/src/compiler/scala/tools/nsc/transform/patmat/MatchCodeGen.scala +++ b/src/compiler/scala/tools/nsc/transform/patmat/MatchCodeGen.scala @@ -74,7 +74,7 @@ trait MatchCodeGen { self: PatternMatching => // the force is needed mainly to deal with the GADT typing hack (we can't detect it otherwise as tp nor pt need contain an abstract type, we're just casting wildly) def _asInstanceOf(b: Symbol, tp: Type): Tree = if (b.info <:< tp) REF(b) else gen.mkCastPreservingAnnotations(REF(b), tp) - def _isInstanceOf(b: Symbol, tp: Type): Tree = gen.mkIsInstanceOf(REF(b), tp.withoutAnnotations, true, false) + def _isInstanceOf(b: Symbol, tp: Type): Tree = gen.mkIsInstanceOf(REF(b), tp.withoutAnnotations, any = true, wrapInApply = false) // duplicated out of frustration with cast generation def mkZero(tp: Type): Tree = { diff --git a/src/compiler/scala/tools/nsc/typechecker/Contexts.scala b/src/compiler/scala/tools/nsc/typechecker/Contexts.scala index 3e5a61c673..429bd7d682 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Contexts.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Contexts.scala @@ -85,8 +85,8 @@ trait Contexts { self: Analyzer => else RootImports.completeList } - def rootContext(unit: CompilationUnit): Context = rootContext(unit, EmptyTree, false) - def rootContext(unit: CompilationUnit, tree: Tree): Context = rootContext(unit, tree, false) + def rootContext(unit: CompilationUnit): Context = rootContext(unit, EmptyTree, erasedTypes = false) + def rootContext(unit: CompilationUnit, tree: Tree): Context = rootContext(unit, tree, erasedTypes = false) def rootContext(unit: CompilationUnit, tree: Tree, erasedTypes: Boolean): Context = { var sc = startContext for (sym <- rootImports(unit)) { @@ -443,7 +443,7 @@ trait Contexts { self: Analyzer => else throw new TypeError(pos, msg1) } - def warning(pos: Position, msg: String): Unit = warning(pos, msg, false) + def warning(pos: Position, msg: String): Unit = warning(pos, msg, force = false) def warning(pos: Position, msg: String, force: Boolean) { if (reportErrors || force) unit.warning(pos, msg) else if (bufferErrors) warningsBuffer += ((pos, msg)) diff --git a/src/compiler/scala/tools/nsc/typechecker/EtaExpansion.scala b/src/compiler/scala/tools/nsc/typechecker/EtaExpansion.scala index 4fbb788c7b..80dfef6c7b 100644 --- a/src/compiler/scala/tools/nsc/typechecker/EtaExpansion.scala +++ b/src/compiler/scala/tools/nsc/typechecker/EtaExpansion.scala @@ -98,7 +98,7 @@ trait EtaExpansion { self: Analyzer => case TypeApply(fn, args) => treeCopy.TypeApply(tree, liftoutPrefix(fn), args).clearType() case Select(qual, name) => - treeCopy.Select(tree, liftout(qual, false), name).clearType() setSymbol NoSymbol + treeCopy.Select(tree, liftout(qual, byName = false), name).clearType() setSymbol NoSymbol case Ident(name) => tree } diff --git a/src/compiler/scala/tools/nsc/typechecker/Implicits.scala b/src/compiler/scala/tools/nsc/typechecker/Implicits.scala index b4ec90c53e..2331f82a58 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Implicits.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Implicits.scala @@ -34,7 +34,7 @@ trait Implicits { import global.typer.{ printTyping, deindentTyping, indentTyping, printInference } def inferImplicit(tree: Tree, pt: Type, reportAmbiguous: Boolean, isView: Boolean, context: Context): SearchResult = - inferImplicit(tree, pt, reportAmbiguous, isView, context, true, tree.pos) + inferImplicit(tree, pt, reportAmbiguous, isView, context, saveAmbiguousDivergent = true, tree.pos) def inferImplicit(tree: Tree, pt: Type, reportAmbiguous: Boolean, isView: Boolean, context: Context, saveAmbiguousDivergent: Boolean): SearchResult = inferImplicit(tree, pt, reportAmbiguous, isView, context, saveAmbiguousDivergent, tree.pos) @@ -112,7 +112,7 @@ trait Implicits { val tvars = tpars map (TypeVar untouchable _) val tpSubsted = tp.subst(tpars, tvars) - val search = new ImplicitSearch(EmptyTree, functionType(List(tpSubsted), AnyClass.tpe), true, context.makeImplicit(false)) + val search = new ImplicitSearch(EmptyTree, functionType(List(tpSubsted), AnyClass.tpe), true, context.makeImplicit(reportAmbiguousErrors = false)) search.allImplicitsPoly(tvars) } @@ -630,7 +630,7 @@ trait Implicits { printTyping(ptLine("" + info.sym, "tvars" -> tvars, "tvars.constr" -> tvars.map(_.constr))) val targs = solvedTypes(tvars, undetParams, undetParams map varianceInType(pt), - false, lubDepth(List(itree2.tpe, pt))) + upper = false, lubDepth(List(itree2.tpe, pt))) // #2421: check that we correctly instantiated type parameters outside of the implicit tree: checkBounds(itree2, NoPrefix, NoSymbol, undetParams, targs, "inferred ") @@ -1154,7 +1154,7 @@ trait Implicits { /** Re-wraps a type in a manifest before calling inferImplicit on the result */ def findManifest(tp: Type, manifestClass: Symbol = if (full) FullManifestClass else PartialManifestClass) = - inferImplicit(tree, appliedType(manifestClass, tp), true, false, context).tree + inferImplicit(tree, appliedType(manifestClass, tp), reportAmbiguous = true, isView = false, context).tree def findSubManifest(tp: Type) = findManifest(tp, if (full) FullManifestClass else OptManifestClass) def mot(tp0: Type, from: List[Symbol], to: List[Type]): SearchResult = { @@ -1289,7 +1289,7 @@ trait Implicits { val failstart = if (Statistics.canEnable) Statistics.startTimer(inscopeFailNanos) else null val succstart = if (Statistics.canEnable) Statistics.startTimer(inscopeSucceedNanos) else null - var result = searchImplicit(context.implicitss, true) + var result = searchImplicit(context.implicitss, isLocal = true) if (result.isFailure) { if (Statistics.canEnable) Statistics.stopTimer(inscopeFailNanos, failstart) @@ -1307,7 +1307,7 @@ trait Implicits { // `materializeImplicit` does some preprocessing for `pt` // is it only meant for manifests/tags or we need to do the same for `implicitsOfExpectedType`? - if (result.isFailure && !wasAmbigious) result = searchImplicit(implicitsOfExpectedType, false) + if (result.isFailure && !wasAmbigious) result = searchImplicit(implicitsOfExpectedType, isLocal = false) if (result.isFailure) { context.updateBuffer(previousErrs) @@ -1326,7 +1326,7 @@ trait Implicits { def allImplicits: List[SearchResult] = { def search(iss: Infoss, isLocal: Boolean) = applicableInfos(iss, isLocal).values - (search(context.implicitss, true) ++ search(implicitsOfExpectedType, false)).toList.filter(_.tree ne EmptyTree) + (search(context.implicitss, isLocal = true) ++ search(implicitsOfExpectedType, isLocal = false)).toList.filter(_.tree ne EmptyTree) } // find all implicits for some type that contains type variables diff --git a/src/compiler/scala/tools/nsc/typechecker/Infer.scala b/src/compiler/scala/tools/nsc/typechecker/Infer.scala index a29cc93b6d..d4f402b747 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Infer.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Infer.scala @@ -491,7 +491,7 @@ trait Infer extends Checkable { } //println("try to solve "+tvars+" "+tparams) (solvedTypes(tvars, tparams, tparams map varianceInType(varianceType), - false, lubDepth(List(restpe, pt))), tvars) + upper = false, lubDepth(List(restpe, pt))), tvars) } catch { case ex: NoInstance => (null, null) } @@ -667,7 +667,7 @@ trait Infer extends Checkable { } val targs = solvedTypes( tvars, tparams, tparams map varianceInTypes(formals), - false, lubDepth(formals) max lubDepth(argtpes) + upper = false, lubDepth(formals) max lubDepth(argtpes) ) // Can warn about inferring Any/AnyVal as long as they don't appear // explicitly anywhere amongst the formal, argument, result, or expected type. @@ -914,7 +914,7 @@ trait Infer extends Checkable { */ private[typechecker] def isApplicableSafe(undetparams: List[Symbol], ftpe: Type, argtpes0: List[Type], pt: Type): Boolean = { - val silentContext = context.makeSilent(false) + val silentContext = context.makeSilent(reportAmbiguousErrors = false) val typer0 = newTyper(silentContext) val res1 = typer0.infer.isApplicable(undetparams, ftpe, argtpes0, pt) if (pt != WildcardType && silentContext.hasErrors) { @@ -1101,7 +1101,7 @@ trait Infer extends Checkable { } def checkKindBounds(tparams: List[Symbol], targs: List[Type], pre: Type, owner: Symbol): List[String] = { - checkKindBounds0(tparams, targs, pre, owner, true) map { + checkKindBounds0(tparams, targs, pre, owner, explainErrors = true) map { case (targ, tparam, kindErrors) => kindErrors.errorMessage(targ, tparam) } @@ -1263,7 +1263,7 @@ trait Infer extends Checkable { val variances = if (ctorTp.paramTypes.isEmpty) undetparams map varianceInType(ctorTp) else undetparams map varianceInTypes(ctorTp.paramTypes) - val targs = solvedTypes(tvars, undetparams, variances, true, lubDepth(List(resTp, pt))) + val targs = solvedTypes(tvars, undetparams, variances, upper = true, lubDepth(List(resTp, pt))) // checkBounds(tree, NoPrefix, NoSymbol, undetparams, targs, "inferred ") // no checkBounds here. If we enable it, test bug602 fails. // TODO: reinstate checkBounds, return params that fail to meet their bounds to undetparams @@ -1335,7 +1335,7 @@ trait Infer extends Checkable { val tvars1 = tvars map (_.cloneInternal) // Note: right now it's not clear that solving is complete, or how it can be made complete! // So we should come back to this and investigate. - solve(tvars1, tvars1 map (_.origin.typeSymbol), tvars1 map (_ => Variance.Covariant), false) + solve(tvars1, tvars1 map (_.origin.typeSymbol), tvars1 map (_ => Variance.Covariant), upper = false) } // this is quite nasty: it destructively changes the info of the syms of e.g., method type params diff --git a/src/compiler/scala/tools/nsc/typechecker/Namers.scala b/src/compiler/scala/tools/nsc/typechecker/Namers.scala index 3c5e484105..a1bf3a56c3 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Namers.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Namers.scala @@ -502,7 +502,7 @@ trait Namers extends MethodSynthesis { typer.permanentlyHiddenWarning(pos, to0, e.sym) else if (context ne context.enclClass) { val defSym = context.prefix.member(to) filter ( - sym => sym.exists && context.isAccessible(sym, context.prefix, false)) + sym => sym.exists && context.isAccessible(sym, context.prefix, superAccess = false)) defSym andAlso (typer.permanentlyHiddenWarning(pos, to0, _)) } diff --git a/src/compiler/scala/tools/nsc/typechecker/SuperAccessors.scala b/src/compiler/scala/tools/nsc/typechecker/SuperAccessors.scala index d8cedd119b..1d28add6e0 100644 --- a/src/compiler/scala/tools/nsc/typechecker/SuperAccessors.scala +++ b/src/compiler/scala/tools/nsc/typechecker/SuperAccessors.scala @@ -224,7 +224,7 @@ abstract class SuperAccessors extends transform.Transform with transform.TypingT transformTemplate case TypeApply(sel @ Select(This(_), name), args) => - mayNeedProtectedAccessor(sel, args, false) + mayNeedProtectedAccessor(sel, args, goToSuper = false) // set a flag for all type parameters with `@specialized` annotation so it can be pickled case typeDef: TypeDef if typeDef.symbol.deSkolemize.hasAnnotation(definitions.SpecializedClass) => @@ -274,7 +274,7 @@ abstract class SuperAccessors extends transform.Transform with transform.TypingT Select(Super(qual, tpnme.EMPTY) setPos qual.pos, sym.alias) }).asInstanceOf[Select] debuglog("alias replacement: " + tree + " ==> " + result); //debug - localTyper.typed(gen.maybeMkAsInstanceOf(transformSuperSelect(result), sym.tpe, sym.alias.tpe, true)) + localTyper.typed(gen.maybeMkAsInstanceOf(transformSuperSelect(result), sym.tpe, sym.alias.tpe, beforeRefChecks = true)) } else { /** * A trait which extends a class and accesses a protected member @@ -302,7 +302,7 @@ abstract class SuperAccessors extends transform.Transform with transform.TypingT ensureAccessor(sel) } else - mayNeedProtectedAccessor(sel, EmptyTree.asList, false) + mayNeedProtectedAccessor(sel, EmptyTree.asList, goToSuper = false) } case Super(_, mix) => @@ -315,7 +315,7 @@ abstract class SuperAccessors extends transform.Transform with transform.TypingT transformSuperSelect(sel) case _ => - mayNeedProtectedAccessor(sel, EmptyTree.asList, true) + mayNeedProtectedAccessor(sel, EmptyTree.asList, goToSuper = true) } } transformSelect @@ -324,7 +324,7 @@ abstract class SuperAccessors extends transform.Transform with transform.TypingT treeCopy.DefDef(tree, mods, name, tparams, vparamss, tpt, withInvalidOwner(transform(rhs))) case TypeApply(sel @ Select(qual, name), args) => - mayNeedProtectedAccessor(sel, args, true) + mayNeedProtectedAccessor(sel, args, goToSuper = true) case Assign(lhs @ Select(qual, name), rhs) => def transformAssign = { diff --git a/src/compiler/scala/tools/nsc/typechecker/Tags.scala b/src/compiler/scala/tools/nsc/typechecker/Tags.scala index 45aa1bcbdb..d2d7f57aef 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Tags.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Tags.scala @@ -15,11 +15,11 @@ trait Tags { wrapper(inferImplicit( EmptyTree, taggedTp, - /*reportAmbiguous =*/ true, - /*isView =*/ false, - /*context =*/ context, - /*saveAmbiguousDivergent =*/ true, - /*pos =*/ pos + reportAmbiguous = true, + isView = false, + context, + saveAmbiguousDivergent = true, + pos ).tree) } diff --git a/src/compiler/scala/tools/nsc/typechecker/TypeDiagnostics.scala b/src/compiler/scala/tools/nsc/typechecker/TypeDiagnostics.scala index 98c5e31ffe..46740cd03c 100644 --- a/src/compiler/scala/tools/nsc/typechecker/TypeDiagnostics.scala +++ b/src/compiler/scala/tools/nsc/typechecker/TypeDiagnostics.scala @@ -236,8 +236,8 @@ trait TypeDiagnostics { val invariant = param.variance.isInvariant if (conforms) Some("") - else if ((arg <:< reqArg) && invariant) mkMsg(true) // covariant relationship - else if ((reqArg <:< arg) && invariant) mkMsg(false) // contravariant relationship + else if ((arg <:< reqArg) && invariant) mkMsg(isSubtype = true) // covariant relationship + else if ((reqArg <:< arg) && invariant) mkMsg(isSubtype = false) // contravariant relationship else None // we assume in other cases our ham-fisted advice will merely serve to confuse } val messages = relationships.flatten @@ -546,7 +546,7 @@ trait TypeDiagnostics { // It is presumed if you are using a -Y option you would really like to hear // the warnings you've requested. if (settings.warnDeadCode.value && context.unit.exists && treeOK(tree) && exprOK) - context.warning(tree.pos, "dead code following this construct", true) + context.warning(tree.pos, "dead code following this construct", force = true) tree } diff --git a/src/compiler/scala/tools/nsc/typechecker/Typers.scala b/src/compiler/scala/tools/nsc/typechecker/Typers.scala index 9680b911e0..1781dc1932 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Typers.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Typers.scala @@ -112,7 +112,7 @@ trait Typers extends Adaptations with Tags { override def isCoercible(tp: Type, pt: Type): Boolean = undoLog undo { // #3281 tp.isError || pt.isError || context0.implicitsEnabled && // this condition prevents chains of views - inferView(EmptyTree, tp, pt, false) != EmptyTree + inferView(EmptyTree, tp, pt, reportAmbiguous = false) != EmptyTree } } @@ -136,7 +136,7 @@ trait Typers extends Adaptations with Tags { for(ar <- argResultsBuff) paramTp = paramTp.subst(ar.subst.from, ar.subst.to) - val res = if (paramFailed || (paramTp.isError && {paramFailed = true; true})) SearchFailure else inferImplicit(fun, paramTp, context.reportErrors, false, context) + val res = if (paramFailed || (paramTp.isError && {paramFailed = true; true})) SearchFailure else inferImplicit(fun, paramTp, context.reportErrors, isView = false, context) argResultsBuff += res if (res.isSuccess) { @@ -179,7 +179,7 @@ trait Typers extends Adaptations with Tags { } def inferView(tree: Tree, from: Type, to: Type, reportAmbiguous: Boolean): Tree = - inferView(tree, from, to, reportAmbiguous, true) + inferView(tree, from, to, reportAmbiguous, saveErrors = true) /** Infer an implicit conversion (`view`) between two types. * @param tree The tree which needs to be converted. @@ -201,7 +201,7 @@ trait Typers extends Adaptations with Tags { case PolyType(_, _) => EmptyTree case _ => def wrapImplicit(from: Type): Tree = { - val result = inferImplicit(tree, functionType(from.withoutAnnotations :: Nil, to), reportAmbiguous, true, context, saveErrors) + val result = inferImplicit(tree, functionType(from.withoutAnnotations :: Nil, to), reportAmbiguous, isView = true, context, saveAmbiguousDivergent = saveErrors) if (result.subst != EmptyTreeTypeSubstituter) { result.subst traverse tree notifyUndetparamsInferred(result.subst.from, result.subst.to) @@ -723,7 +723,7 @@ trait Typers extends Adaptations with Tags { featureTrait.owner.ownerChain.takeWhile(_ != languageFeatureModule.moduleClass).reverse val featureName = (nestedOwners map (_.name + ".")).mkString + featureTrait.name def action(): Boolean = { - def hasImport = inferImplicit(EmptyTree: Tree, featureTrait.tpe, true, false, context).isSuccess + def hasImport = inferImplicit(EmptyTree: Tree, featureTrait.tpe, reportAmbiguous = true, isView = false, context).isSuccess def hasOption = settings.language.value exists (s => s == featureName || s == "_") val OK = hasImport || hasOption if (!OK) { @@ -1142,7 +1142,7 @@ trait Typers extends Adaptations with Tags { if (context.implicitsEnabled && !pt.isError && !tree.isErrorTyped) { // (14); the condition prevents chains of views debuglog("inferring view from " + tree.tpe + " to " + pt) - val coercion = inferView(tree, tree.tpe, pt, true) + val coercion = inferView(tree, tree.tpe, pt, reportAmbiguous = true) if (coercion != EmptyTree) { def msg = "inferred view from " + tree.tpe + " to " + pt + " = " + coercion + ":" + coercion.tpe if (settings.logImplicitConv.value) @@ -1320,7 +1320,7 @@ trait Typers extends Adaptations with Tags { reportError } - silent(_.adaptToMember(qual, HasMember(name), false)) orElse (err => + silent(_.adaptToMember(qual, HasMember(name), reportAmbiguous = false)) orElse (err => onError { if (reportAmbiguous) context issue err setError(tree) @@ -3880,7 +3880,7 @@ trait Typers extends Adaptations with Tags { val targs = args map (_.tpe) checkBounds(tree, NoPrefix, NoSymbol, tparams, targs, "") if (fun.symbol == Predef_classOf) - typedClassOf(tree, args.head, true) + typedClassOf(tree, args.head, noGen = true) else { if (!isPastTyper && fun.symbol == Any_isInstanceOf && targs.nonEmpty) { val scrutineeType = fun match { @@ -4383,7 +4383,7 @@ trait Typers extends Adaptations with Tags { } def tryTypedArgs(args: List[Tree], mode: Mode): Option[List[Tree]] = { - val c = context.makeSilent(false) + val c = context.makeSilent(reportAmbiguousErrors = false) c.retyping = true try { val res = newTyper(c).typedArgs(args, mode) @@ -4442,7 +4442,7 @@ trait Typers extends Adaptations with Tags { tryTypedArgs(args, forArgMode(fun, mode)) match { case Some(args1) => val qual1 = - if (!pt.isError) adaptToArguments(qual, name, args1, pt, true, true) + if (!pt.isError) adaptToArguments(qual, name, args1, pt, reportAmbiguous = true, saveErrors = true) else qual if (qual1 ne qual) { val tree1 = Apply(Select(qual1, name) setPos fun.pos, args1) setPos tree.pos @@ -4672,7 +4672,7 @@ trait Typers extends Adaptations with Tags { // member. Added `| PATTERNmode` to allow enrichment in patterns (so we can add e.g., an // xml member to StringContext, which in turn has an unapply[Seq] method) if (name != nme.CONSTRUCTOR && mode.inExprModeOr(PATTERNmode)) { - val qual1 = adaptToMemberWithArgs(tree, qual, name, mode, true, true) + val qual1 = adaptToMemberWithArgs(tree, qual, name, mode, reportAmbiguous = true, saveErrors = true) if ((qual1 ne qual) && !qual1.isErrorTyped) return typed(treeCopy.Select(tree, qual1, name), mode, pt) } @@ -4757,7 +4757,7 @@ trait Typers extends Adaptations with Tags { case _ if accessibleError.isDefined => // don't adapt constructor, SI-6074 val qual1 = if (name == nme.CONSTRUCTOR) qual - else adaptToMemberWithArgs(tree, qual, name, mode, false, false) + else adaptToMemberWithArgs(tree, qual, name, mode, reportAmbiguous = false, saveErrors = false) if (!qual1.isErrorTyped && (qual1 ne qual)) typed(Select(qual1, name) setPos tree.pos, mode, pt) else diff --git a/src/compiler/scala/tools/nsc/util/ClassPath.scala b/src/compiler/scala/tools/nsc/util/ClassPath.scala index f125db3839..0104f5a57f 100644 --- a/src/compiler/scala/tools/nsc/util/ClassPath.scala +++ b/src/compiler/scala/tools/nsc/util/ClassPath.scala @@ -105,17 +105,17 @@ object ClassPath { /** Creators for sub classpaths which preserve this context. */ def sourcesInPath(path: String): List[ClassPath[T]] = - for (file <- expandPath(path, false) ; dir <- Option(AbstractFile getDirectory file)) yield + for (file <- expandPath(path, expandStar = false) ; dir <- Option(AbstractFile getDirectory file)) yield new SourcePath[T](dir, this) def contentsOfDirsInPath(path: String): List[ClassPath[T]] = - for (dir <- expandPath(path, false) ; name <- expandDir(dir) ; entry <- Option(AbstractFile getDirectory name)) yield + for (dir <- expandPath(path, expandStar = false) ; name <- expandDir(dir) ; entry <- Option(AbstractFile getDirectory name)) yield newClassPath(entry) def classesInExpandedPath(path: String): IndexedSeq[ClassPath[T]] = - classesInPathImpl(path, true).toIndexedSeq + classesInPathImpl(path, expand = true).toIndexedSeq - def classesInPath(path: String) = classesInPathImpl(path, false) + def classesInPath(path: String) = classesInPathImpl(path, expand = false) // Internal private def classesInPathImpl(path: String, expand: Boolean) = @@ -210,7 +210,7 @@ abstract class ClassPath[T] { * Does not support nested classes on .NET */ def findClass(name: String): Option[AnyClassRep] = - splitWhere(name, _ == '.', true) match { + splitWhere(name, _ == '.', doDropIndex = true) match { case Some((pkg, rest)) => val rep = packages find (_.name == pkg) flatMap (_ findClass rest) rep map { diff --git a/src/compiler/scala/tools/nsc/util/ScalaClassLoader.scala b/src/compiler/scala/tools/nsc/util/ScalaClassLoader.scala index 1d2cc73c6b..3899ef24c7 100644 --- a/src/compiler/scala/tools/nsc/util/ScalaClassLoader.scala +++ b/src/compiler/scala/tools/nsc/util/ScalaClassLoader.scala @@ -34,9 +34,9 @@ trait ScalaClassLoader extends JClassLoader { def setAsContext() { setContext(this) } /** Load and link a class with this classloader */ - def tryToLoadClass[T <: AnyRef](path: String): Option[Class[T]] = tryClass(path, false) + def tryToLoadClass[T <: AnyRef](path: String): Option[Class[T]] = tryClass(path, initialize = false) /** Load, link and initialize a class with this classloader */ - def tryToInitializeClass[T <: AnyRef](path: String): Option[Class[T]] = tryClass(path, true) + def tryToInitializeClass[T <: AnyRef](path: String): Option[Class[T]] = tryClass(path, initialize = true) private def tryClass[T <: AnyRef](path: String, initialize: Boolean): Option[Class[T]] = catching(classOf[ClassNotFoundException], classOf[SecurityException]) opt diff --git a/src/compiler/scala/tools/reflect/package.scala b/src/compiler/scala/tools/reflect/package.scala index 968b0d0863..1055894121 100644 --- a/src/compiler/scala/tools/reflect/package.scala +++ b/src/compiler/scala/tools/reflect/package.scala @@ -52,7 +52,7 @@ package object reflect { override def hasWarnings = reporter.hasWarnings def display(info: Info): Unit = info.severity match { - case API_INFO => reporter.info(info.pos, info.msg, false) + case API_INFO => reporter.info(info.pos, info.msg, force = false) case API_WARNING => reporter.warning(info.pos, info.msg) case API_ERROR => reporter.error(info.pos, info.msg) } diff --git a/src/compiler/scala/tools/util/Javap.scala b/src/compiler/scala/tools/util/Javap.scala index 181bbedac5..7e984fd1f7 100644 --- a/src/compiler/scala/tools/util/Javap.scala +++ b/src/compiler/scala/tools/util/Javap.scala @@ -125,10 +125,10 @@ class JavapClass( if (res.isDefined && loadable(res.get)) res else None } // try loading translated+suffix - val res = loadableOrNone(false) + val res = loadableOrNone(strip = false) // some synthetics lack a dollar, (e.g., suffix = delayedInit$body) // so as a hack, if prefix$$suffix fails, also try prefix$suffix - if (res.isDefined) res else loadableOrNone(true) + if (res.isDefined) res else loadableOrNone(strip = true) } else None } val p = path.asClassName // scrub any suffix @@ -649,7 +649,7 @@ object JavapClass { val fs = if (isReplish) { def outed(d: AbstractFile, p: Seq[String]): Option[AbstractFile] = { if (p.isEmpty) Option(d) - else Option(d.lookupName(p.head, true)) flatMap (f => outed(f, p.tail)) + else Option(d.lookupName(p.head, directory = true)) flatMap (f => outed(f, p.tail)) } outed(intp.get.replOutput.dir, splat.init) map { d => listFunsInAbsFile(name, member, d) map packaged diff --git a/src/compiler/scala/tools/util/PathResolver.scala b/src/compiler/scala/tools/util/PathResolver.scala index a6c0f0f5c2..9f1d0317be 100644 --- a/src/compiler/scala/tools/util/PathResolver.scala +++ b/src/compiler/scala/tools/util/PathResolver.scala @@ -135,7 +135,7 @@ object PathResolver { } else { val settings = new Settings() - val rest = settings.processArguments(args.toList, false)._2 + val rest = settings.processArguments(args.toList, processAll = false)._2 val pr = new PathResolver(settings) println(" COMMAND: 'scala %s'".format(args.mkString(" "))) println("RESIDUAL: 'scala %s'\n".format(rest.mkString(" "))) -- cgit v1.2.3 From a8d60a6c76d2d5434148c2d7c8f2bf147855252c Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Sun, 24 Feb 2013 15:04:22 +0100 Subject: Name boolean arguments in src/reflect. --- .../scala/reflect/internal/CapturedVariables.scala | 2 +- src/reflect/scala/reflect/internal/Kinds.scala | 2 +- src/reflect/scala/reflect/internal/Positions.scala | 4 ++-- src/reflect/scala/reflect/internal/Printers.scala | 8 +++---- src/reflect/scala/reflect/internal/Symbols.scala | 2 +- src/reflect/scala/reflect/internal/Types.scala | 26 +++++++++++----------- .../scala/reflect/internal/util/TableDef.scala | 4 ++-- src/reflect/scala/reflect/io/AbstractFile.scala | 4 ++-- src/reflect/scala/reflect/io/File.scala | 2 +- .../scala/reflect/io/VirtualDirectory.scala | 4 ++-- src/reflect/scala/reflect/io/ZipArchive.scala | 4 ++-- 11 files changed, 31 insertions(+), 31 deletions(-) (limited to 'src') diff --git a/src/reflect/scala/reflect/internal/CapturedVariables.scala b/src/reflect/scala/reflect/internal/CapturedVariables.scala index 77909d9157..c262c8474a 100644 --- a/src/reflect/scala/reflect/internal/CapturedVariables.scala +++ b/src/reflect/scala/reflect/internal/CapturedVariables.scala @@ -19,7 +19,7 @@ trait CapturedVariables { self: SymbolTable => /** Convert type of a captured variable to *Ref type. */ def capturedVariableType(vble: Symbol): Type = - capturedVariableType(vble, NoType, false) + capturedVariableType(vble, NoType, erasedTypes = false) /** Convert type of a captured variable to *Ref type. */ diff --git a/src/reflect/scala/reflect/internal/Kinds.scala b/src/reflect/scala/reflect/internal/Kinds.scala index 5d7df8c367..cc8dd16d69 100644 --- a/src/reflect/scala/reflect/internal/Kinds.scala +++ b/src/reflect/scala/reflect/internal/Kinds.scala @@ -86,7 +86,7 @@ trait Kinds { // plan: split into kind inference and subkinding // every Type has a (cached) Kind def kindsConform(tparams: List[Symbol], targs: List[Type], pre: Type, owner: Symbol): Boolean = - checkKindBounds0(tparams, targs, pre, owner, false).isEmpty + checkKindBounds0(tparams, targs, pre, owner, explainErrors = false).isEmpty /** Check whether `sym1`'s variance conforms to `sym2`'s variance. * diff --git a/src/reflect/scala/reflect/internal/Positions.scala b/src/reflect/scala/reflect/internal/Positions.scala index f5aeec63e1..cc32a0363f 100644 --- a/src/reflect/scala/reflect/internal/Positions.scala +++ b/src/reflect/scala/reflect/internal/Positions.scala @@ -36,7 +36,7 @@ trait Positions extends api.Positions { self: SymbolTable => * If some of the trees are ranges, returns a range position enclosing all ranges * Otherwise returns default position that is either focused or not. */ - def wrappingPos(default: Position, trees: List[Tree]): Position = wrappingPos(default, trees, true) + def wrappingPos(default: Position, trees: List[Tree]): Position = wrappingPos(default, trees, focus = true) def wrappingPos(default: Position, trees: List[Tree], focus: Boolean): Position = { if (useOffsetPositions) default else { val ranged = trees filter (_.pos.isRange) @@ -61,7 +61,7 @@ trait Positions extends api.Positions { self: SymbolTable => * shortening the range, assigning TransparentPositions * to some of the nodes in `tree` or focusing on the position. */ - def ensureNonOverlapping(tree: Tree, others: List[Tree]){ ensureNonOverlapping(tree, others, true) } + def ensureNonOverlapping(tree: Tree, others: List[Tree]){ ensureNonOverlapping(tree, others, focus = true) } def ensureNonOverlapping(tree: Tree, others: List[Tree], focus: Boolean) { if (useOffsetPositions) return diff --git a/src/reflect/scala/reflect/internal/Printers.scala b/src/reflect/scala/reflect/internal/Printers.scala index 55f7704056..28837c4ae8 100644 --- a/src/reflect/scala/reflect/internal/Printers.scala +++ b/src/reflect/scala/reflect/internal/Printers.scala @@ -25,8 +25,8 @@ trait Printers extends api.Printers { self: SymbolTable => if (nme.keywords(term) && term != nme.USCOREkw) "`%s`" format s else s } - def quotedName(name: Name): String = quotedName(name, false) - def quotedName(name: String): String = quotedName(newTermName(name), false) + def quotedName(name: Name): String = quotedName(name, decode = false) + def quotedName(name: String): String = quotedName(newTermName(name), decode = false) private def symNameInternal(tree: Tree, name: Name, decoded: Boolean): String = { val sym = tree.symbol @@ -43,8 +43,8 @@ trait Printers extends api.Printers { self: SymbolTable => } } - def decodedSymName(tree: Tree, name: Name) = symNameInternal(tree, name, true) - def symName(tree: Tree, name: Name) = symNameInternal(tree, name, false) + def decodedSymName(tree: Tree, name: Name) = symNameInternal(tree, name, decoded = true) + def symName(tree: Tree, name: Name) = symNameInternal(tree, name, decoded = false) /** Turns a path into a String, introducing backquotes * as necessary. diff --git a/src/reflect/scala/reflect/internal/Symbols.scala b/src/reflect/scala/reflect/internal/Symbols.scala index ff83cb5f26..26ca62c44a 100644 --- a/src/reflect/scala/reflect/internal/Symbols.scala +++ b/src/reflect/scala/reflect/internal/Symbols.scala @@ -2192,7 +2192,7 @@ trait Symbols extends api.Symbols { self: SymbolTable => ) /** The setter of this value or getter definition, or NoSymbol if none exists */ - final def setter(base: Symbol): Symbol = setter(base, false) + final def setter(base: Symbol): Symbol = setter(base, hasExpandedName = false) final def setter(base: Symbol, hasExpandedName: Boolean): Symbol = { var sname = nme.getterToSetter(nme.getterName(name.toTermName)) diff --git a/src/reflect/scala/reflect/internal/Types.scala b/src/reflect/scala/reflect/internal/Types.scala index 22ba6d43e9..365e9a1682 100644 --- a/src/reflect/scala/reflect/internal/Types.scala +++ b/src/reflect/scala/reflect/internal/Types.scala @@ -705,7 +705,7 @@ trait Types extends api.Types { self: SymbolTable => findMembers(excludedFlags, requiredFlags) def memberBasedOnName(name: Name, excludedFlags: Long): Symbol = - findMember(name, excludedFlags, 0, false) + findMember(name, excludedFlags, 0, stableOnly = false) /** The least type instance of given class which is a supertype * of this type. Example: @@ -925,7 +925,7 @@ trait Types extends api.Types { self: SymbolTable => def matches(that: Type): Boolean = matchesType(this, that, !phase.erasedTypes) /** Same as matches, except that non-method types are always assumed to match. */ - def looselyMatches(that: Type): Boolean = matchesType(this, that, true) + def looselyMatches(that: Type): Boolean = matchesType(this, that, alwaysMatchSimple = true) /** The shortest sorted upwards closed array of types that contains * this type as first element. @@ -2844,7 +2844,7 @@ trait Types extends api.Types { self: SymbolTable => val tvars = quantifiedFresh map (tparam => TypeVar(tparam)) val underlying1 = underlying.instantiateTypeParams(quantified, tvars) // fuse subst quantified -> quantifiedFresh -> tvars op(underlying1) && { - solve(tvars, quantifiedFresh, quantifiedFresh map (_ => Invariant), false, depth) && + solve(tvars, quantifiedFresh, quantifiedFresh map (_ => Invariant), upper = false, depth) && isWithinBounds(NoPrefix, NoSymbol, quantifiedFresh, tvars map (_.constr.inst)) } } @@ -3259,7 +3259,7 @@ trait Types extends api.Types { self: SymbolTable => * (`T` corresponds to @param sym) */ def registerTypeSelection(sym: Symbol, tp: Type): Boolean = { - registerBound(HasTypeMember(sym.name.toTypeName, tp), false) + registerBound(HasTypeMember(sym.name.toTypeName, tp), isLowerBound = false) } private def isSkolemAboveLevel(tp: Type) = tp.typeSymbol match { @@ -5035,7 +5035,7 @@ trait Types extends api.Types { self: SymbolTable => } } else { - var rebind0 = pre.findMember(sym.name, BRIDGE, 0, true) orElse { + var rebind0 = pre.findMember(sym.name, BRIDGE, 0, stableOnly = true) orElse { if (sym.isAliasType) throw missingAliasException devWarning(s"$pre.$sym no longer exist at phase $phase") throw new MissingTypeControl // For build manager and presentation compiler purposes @@ -5840,7 +5840,7 @@ trait Types extends api.Types { self: SymbolTable => case AnnotatedType(_, _, _) | BoundedWildcardType(_) => secondTry case _ => - tv2.registerBound(tp1, true) + tv2.registerBound(tp1, isLowerBound = true) } case _ => secondTry @@ -5858,7 +5858,7 @@ trait Types extends api.Types { self: SymbolTable => case BoundedWildcardType(bounds) => isSubType(tp1.bounds.lo, tp2, depth) case tv @ TypeVar(_,_) => - tv.registerBound(tp2, false) + tv.registerBound(tp2, isLowerBound = false) case ExistentialType(_, _) => try { skolemizationLevel += 1 @@ -6036,7 +6036,7 @@ trait Types extends api.Types { self: SymbolTable => def lastTry = tp2 match { case ExistentialType(_, res2) if alwaysMatchSimple => - matchesType(tp1, res2, true) + matchesType(tp1, res2, alwaysMatchSimple = true) case MethodType(_, _) => false case PolyType(_, _) => @@ -6056,7 +6056,7 @@ trait Types extends api.Types { self: SymbolTable => if (params1.isEmpty) matchesType(res1, res2, alwaysMatchSimple) else matchesType(tp1, res2, alwaysMatchSimple) case ExistentialType(_, res2) => - alwaysMatchSimple && matchesType(tp1, res2, true) + alwaysMatchSimple && matchesType(tp1, res2, alwaysMatchSimple = true) case TypeRef(_, sym, Nil) => params1.isEmpty && sym.isModuleClass && matchesType(res1, tp2, alwaysMatchSimple) case _ => @@ -6069,7 +6069,7 @@ trait Types extends api.Types { self: SymbolTable => case NullaryMethodType(res2) => matchesType(res1, res2, alwaysMatchSimple) case ExistentialType(_, res2) => - alwaysMatchSimple && matchesType(tp1, res2, true) + alwaysMatchSimple && matchesType(tp1, res2, alwaysMatchSimple = true) case TypeRef(_, sym, Nil) if sym.isModuleClass => matchesType(res1, tp2, alwaysMatchSimple) case _ => @@ -6083,7 +6083,7 @@ trait Types extends api.Types { self: SymbolTable => else matchesQuantified(tparams1, tparams2, res1, res2) case ExistentialType(_, res2) => - alwaysMatchSimple && matchesType(tp1, res2, true) + alwaysMatchSimple && matchesType(tp1, res2, alwaysMatchSimple = true) case _ => false // remember that tparams1.nonEmpty is now an invariant of PolyType } @@ -6092,7 +6092,7 @@ trait Types extends api.Types { self: SymbolTable => case ExistentialType(tparams2, res2) => matchesQuantified(tparams1, tparams2, res1, res2) case _ => - if (alwaysMatchSimple) matchesType(res1, tp2, true) + if (alwaysMatchSimple) matchesType(res1, tp2, alwaysMatchSimple = true) else lastTry } case TypeRef(_, sym, Nil) if sym.isModuleClass => @@ -6288,7 +6288,7 @@ trait Types extends api.Types { self: SymbolTable => val columns: List[Column[List[Type]]] = mapWithIndex(sorted) { case ((k, v), idx) => - Column(str(k), (xs: List[Type]) => str(xs(idx)), true) + Column(str(k), (xs: List[Type]) => str(xs(idx)), left = true) } val tableDef = TableDef(columns: _*) diff --git a/src/reflect/scala/reflect/internal/util/TableDef.scala b/src/reflect/scala/reflect/internal/util/TableDef.scala index 04ecfe8d76..d57c59757d 100644 --- a/src/reflect/scala/reflect/internal/util/TableDef.scala +++ b/src/reflect/scala/reflect/internal/util/TableDef.scala @@ -19,8 +19,8 @@ class TableDef[T](_cols: Column[T]*) { * if none is specified, a space is used. */ def ~(next: Column[T]) = retThis(cols :+= next) - def >>(pair: (String, T => Any)) = this ~ Column(pair._1, pair._2, false) - def <<(pair: (String, T => Any)) = this ~ Column(pair._1, pair._2, true) + def >>(pair: (String, T => Any)) = this ~ Column(pair._1, pair._2, left = false) + def <<(pair: (String, T => Any)) = this ~ Column(pair._1, pair._2, left = true) def >+(sep: String) = retThis(separators += ((cols.size - 1, sep))) /** Below this point should all be considered private/internal. diff --git a/src/reflect/scala/reflect/io/AbstractFile.scala b/src/reflect/scala/reflect/io/AbstractFile.scala index bd6c186825..4d6f14c71d 100644 --- a/src/reflect/scala/reflect/io/AbstractFile.scala +++ b/src/reflect/scala/reflect/io/AbstractFile.scala @@ -227,7 +227,7 @@ abstract class AbstractFile extends Iterable[AbstractFile] { */ def fileNamed(name: String): AbstractFile = { assert(isDirectory, "Tried to find '%s' in '%s' but it is not a directory".format(name, path)) - fileOrSubdirectoryNamed(name, false) + fileOrSubdirectoryNamed(name, isDir = false) } /** @@ -236,7 +236,7 @@ abstract class AbstractFile extends Iterable[AbstractFile] { */ def subdirectoryNamed(name: String): AbstractFile = { assert (isDirectory, "Tried to find '%s' in '%s' but it is not a directory".format(name, path)) - fileOrSubdirectoryNamed(name, true) + fileOrSubdirectoryNamed(name, isDir = true) } protected def unsupported(): Nothing = unsupported(null) diff --git a/src/reflect/scala/reflect/io/File.scala b/src/reflect/scala/reflect/io/File.scala index c74dc06501..64651dcfbd 100644 --- a/src/reflect/scala/reflect/io/File.scala +++ b/src/reflect/scala/reflect/io/File.scala @@ -72,7 +72,7 @@ class File(jfile: JFile)(implicit constructorCodec: Codec) extends Path(jfile) w /** Wraps a BufferedWriter around the result of writer(). */ - def bufferedWriter(): BufferedWriter = bufferedWriter(false) + def bufferedWriter(): BufferedWriter = bufferedWriter(append = false) def bufferedWriter(append: Boolean): BufferedWriter = bufferedWriter(append, creationCodec) def bufferedWriter(append: Boolean, codec: Codec): BufferedWriter = new BufferedWriter(writer(append, codec)) diff --git a/src/reflect/scala/reflect/io/VirtualDirectory.scala b/src/reflect/scala/reflect/io/VirtualDirectory.scala index ae0dd2032c..210167e5c6 100644 --- a/src/reflect/scala/reflect/io/VirtualDirectory.scala +++ b/src/reflect/scala/reflect/io/VirtualDirectory.scala @@ -54,14 +54,14 @@ extends AbstractFile { (files get name filter (_.isDirectory == directory)).orNull override def fileNamed(name: String): AbstractFile = - Option(lookupName(name, false)) getOrElse { + Option(lookupName(name, directory = false)) getOrElse { val newFile = new VirtualFile(name, path+'/'+name) files(name) = newFile newFile } override def subdirectoryNamed(name: String): AbstractFile = - Option(lookupName(name, true)) getOrElse { + Option(lookupName(name, directory = true)) getOrElse { val dir = new VirtualDirectory(name, Some(this)) files(name) = dir dir diff --git a/src/reflect/scala/reflect/io/ZipArchive.scala b/src/reflect/scala/reflect/io/ZipArchive.scala index 78fc8d9cc8..5414441e00 100644 --- a/src/reflect/scala/reflect/io/ZipArchive.scala +++ b/src/reflect/scala/reflect/io/ZipArchive.scala @@ -39,8 +39,8 @@ object ZipArchive { */ def fromURL(url: URL): URLZipArchive = new URLZipArchive(url) - private def dirName(path: String) = splitPath(path, true) - private def baseName(path: String) = splitPath(path, false) + private def dirName(path: String) = splitPath(path, front = true) + private def baseName(path: String) = splitPath(path, front = false) private def splitPath(path0: String, front: Boolean): String = { val isDir = path0.charAt(path0.length - 1) == '/' val path = if (isDir) path0.substring(0, path0.length - 1) else path0 -- cgit v1.2.3 From 9179c887cdf0ebc03c87e306cfa1cb99c5da3a88 Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Sun, 24 Feb 2013 15:36:02 +0100 Subject: Name boolean arguments in src/library. --- src/library/scala/Array.scala | 2 +- src/library/scala/collection/SeqLike.scala | 10 +++---- .../scala/collection/concurrent/TrieMap.scala | 6 ++-- .../collection/generic/GenTraversableFactory.scala | 2 +- src/library/scala/collection/immutable/Range.scala | 2 +- .../scala/collection/immutable/RedBlackTree.scala | 34 +++++++++++----------- .../scala/collection/immutable/TreeMap.scala | 4 +-- .../scala/collection/immutable/TreeSet.scala | 4 +-- .../scala/collection/mutable/FlatHashTable.scala | 2 +- src/library/scala/collection/mutable/TreeSet.scala | 2 +- .../collection/parallel/ParIterableLike.scala | 2 +- .../collection/parallel/ParIterableViewLike.scala | 2 +- .../scala/collection/parallel/ParSeqViewLike.scala | 2 +- .../collection/parallel/mutable/ParTrieMap.scala | 2 +- src/library/scala/sys/process/ProcessBuilder.scala | 4 +-- .../scala/sys/process/ProcessBuilderImpl.scala | 30 +++++++++---------- src/library/scala/sys/process/ProcessImpl.scala | 2 +- src/library/scala/xml/Attribute.scala | 2 +- src/library/scala/xml/Equality.scala | 4 +-- src/library/scala/xml/Node.scala | 2 +- src/library/scala/xml/PrettyPrinter.scala | 2 +- src/library/scala/xml/dtd/ElementValidator.scala | 8 ++--- src/library/scala/xml/parsing/MarkupParser.scala | 8 ++--- .../scala/xml/persistent/CachedFileStorage.scala | 4 +-- 24 files changed, 71 insertions(+), 71 deletions(-) (limited to 'src') diff --git a/src/library/scala/Array.scala b/src/library/scala/Array.scala index 1848127395..6ab82d998e 100644 --- a/src/library/scala/Array.scala +++ b/src/library/scala/Array.scala @@ -399,7 +399,7 @@ object Array extends FallbackArrayBuilding { def range(start: Int, end: Int, step: Int): Array[Int] = { if (step == 0) throw new IllegalArgumentException("zero step") val b = newBuilder[Int] - b.sizeHint(immutable.Range.count(start, end, step, false)) + b.sizeHint(immutable.Range.count(start, end, step, isInclusive = false)) var i = start while (if (step < 0) end < i else i < end) { diff --git a/src/library/scala/collection/SeqLike.scala b/src/library/scala/collection/SeqLike.scala index 307ee3f2a8..a83a6fe6a1 100644 --- a/src/library/scala/collection/SeqLike.scala +++ b/src/library/scala/collection/SeqLike.scala @@ -335,7 +335,7 @@ trait SeqLike[+A, +Repr] extends Any with IterableLike[A, Repr] with GenSeqLike[ if (from > l) -1 else if (tl < 1) clippedFrom else if (l < tl) -1 - else SeqLike.kmpSearch(thisCollection, clippedFrom, l, that.seq, 0, tl, true) + else SeqLike.kmpSearch(thisCollection, clippedFrom, l, that.seq, 0, tl, forward = true) } else { var i = from @@ -372,7 +372,7 @@ trait SeqLike[+A, +Repr] extends Any with IterableLike[A, Repr] with GenSeqLike[ if (end < 0) -1 else if (tl < 1) clippedL else if (l < tl) -1 - else SeqLike.kmpSearch(thisCollection, 0, clippedL+tl, that.seq, 0, tl, false) + else SeqLike.kmpSearch(thisCollection, 0, clippedL+tl, that.seq, 0, tl, forward = false) } /** Tests whether this $coll contains a given sequence as a slice. @@ -778,7 +778,7 @@ object SeqLike { case _ => // We had better not index into S directly! val iter = S.iterator.drop(m0) - val Wopt = kmpOptimizeWord(W, n0, n1, true) + val Wopt = kmpOptimizeWord(W, n0, n1, forward = true) val T = kmpJumpTable(Wopt, n1-n0) val cache = new Array[AnyRef](n1-n0) // Ring buffer--need a quick way to do a look-behind var largest = 0 @@ -851,7 +851,7 @@ object SeqLike { else if (s1 - s0 < t1 - t0) -1 // Source is too short to find target else { // Nontrivial search - val ans = kmpSearch(source, s0, s1, target, t0, t1, true) + val ans = kmpSearch(source, s0, s1, target, t0, t1, forward = true) if (ans < 0) ans else ans - math.min(slen, sourceOffset) } } @@ -883,7 +883,7 @@ object SeqLike { else if (fixed_s1 - s0 < t1 - t0) -1 // Source is too short to find target else { // Nontrivial search - val ans = kmpSearch(source, s0, fixed_s1, target, t0, t1, false) + val ans = kmpSearch(source, s0, fixed_s1, target, t0, t1, forward = false) if (ans < 0) ans else ans - s0 } } diff --git a/src/library/scala/collection/concurrent/TrieMap.scala b/src/library/scala/collection/concurrent/TrieMap.scala index 491770dcf6..4eeacd7377 100644 --- a/src/library/scala/collection/concurrent/TrieMap.scala +++ b/src/library/scala/collection/concurrent/TrieMap.scala @@ -41,7 +41,7 @@ private[collection] final class INode[K, V](bn: MainNode[K, V], g: Gen) extends @tailrec private def GCAS_Complete(m: MainNode[K, V], ct: TrieMap[K, V]): MainNode[K, V] = if (m eq null) null else { // complete the GCAS val prev = /*READ*/m.prev - val ctr = ct.readRoot(true) + val ctr = ct.readRoot(abort = true) prev match { case null => @@ -723,7 +723,7 @@ extends scala.collection.concurrent.Map[K, V] private def RDCSS_ROOT(ov: INode[K, V], expectedmain: MainNode[K, V], nv: INode[K, V]): Boolean = { val desc = RDCSS_Descriptor(ov, expectedmain, nv) if (CAS_ROOT(ov, desc)) { - RDCSS_Complete(false) + RDCSS_Complete(abort = false) /*READ*/desc.committed } else false } @@ -1027,7 +1027,7 @@ private[collection] class TrieMapIterator[K, V](var level: Int, private var ct: val (arr1, arr2) = stack(d).drop(stackpos(d) + 1).splitAt(rem / 2) stack(d) = arr1 stackpos(d) = -1 - val it = newIterator(level + 1, ct, false) + val it = newIterator(level + 1, ct, _mustInit = false) it.stack(0) = arr2 it.stackpos(0) = -1 it.depth = 0 diff --git a/src/library/scala/collection/generic/GenTraversableFactory.scala b/src/library/scala/collection/generic/GenTraversableFactory.scala index 0b8c9835da..0e1a5534c0 100644 --- a/src/library/scala/collection/generic/GenTraversableFactory.scala +++ b/src/library/scala/collection/generic/GenTraversableFactory.scala @@ -216,7 +216,7 @@ extends GenericCompanion[CC] { if (step == zero) throw new IllegalArgumentException("zero step") val b = newBuilder[T] - b sizeHint immutable.NumericRange.count(start, end, step, false) + b sizeHint immutable.NumericRange.count(start, end, step, isInclusive = false) var i = start while (if (step < zero) end < i else i < end) { b += i diff --git a/src/library/scala/collection/immutable/Range.scala b/src/library/scala/collection/immutable/Range.scala index 480c88ddcf..243e3fcb91 100644 --- a/src/library/scala/collection/immutable/Range.scala +++ b/src/library/scala/collection/immutable/Range.scala @@ -332,7 +332,7 @@ object Range { } } def count(start: Int, end: Int, step: Int): Int = - count(start, end, step, false) + count(start, end, step, isInclusive = false) class Inclusive(start: Int, end: Int, step: Int) extends Range(start, end, step) { // override def par = new ParRange(this) diff --git a/src/library/scala/collection/immutable/RedBlackTree.scala b/src/library/scala/collection/immutable/RedBlackTree.scala index 19414f8e10..37b8ecfbc4 100644 --- a/src/library/scala/collection/immutable/RedBlackTree.scala +++ b/src/library/scala/collection/immutable/RedBlackTree.scala @@ -48,7 +48,7 @@ object RedBlackTree { * Count all the nodes with keys greater than or equal to the lower bound and less than the upper bound. * The two bounds are optional. */ - def countInRange[A](tree: Tree[A, _], from: Option[A], to:Option[A])(implicit ordering: Ordering[A]) : Int = + def countInRange[A](tree: Tree[A, _], from: Option[A], to:Option[A])(implicit ordering: Ordering[A]) : Int = if (tree eq null) 0 else (from, to) match { // with no bounds use this node's count @@ -61,7 +61,7 @@ object RedBlackTree { // right will all be greater than or equal to the lower bound. So 1 for this node plus // count the subtrees by stripping off the bounds that we don't need any more case _ => 1 + countInRange(tree.left, from, None) + countInRange(tree.right, None, to) - + } def update[A: Ordering, B, B1 >: B](tree: Tree[A, B], k: A, v: B1, overwrite: Boolean): Tree[A, B1] = blacken(upd(tree, k, v, overwrite)) def delete[A: Ordering, B](tree: Tree[A, B], k: A): Tree[A, B] = blacken(del(tree, k)) @@ -252,7 +252,7 @@ object RedBlackTree { if (ordering.lt(tree.key, from)) return doFrom(tree.right, from) val newLeft = doFrom(tree.left, from) if (newLeft eq tree.left) tree - else if (newLeft eq null) upd(tree.right, tree.key, tree.value, false) + else if (newLeft eq null) upd(tree.right, tree.key, tree.value, overwrite = false) else rebalance(tree, newLeft, tree.right) } private[this] def doTo[A, B](tree: Tree[A, B], to: A)(implicit ordering: Ordering[A]): Tree[A, B] = { @@ -260,7 +260,7 @@ object RedBlackTree { if (ordering.lt(to, tree.key)) return doTo(tree.left, to) val newRight = doTo(tree.right, to) if (newRight eq tree.right) tree - else if (newRight eq null) upd(tree.left, tree.key, tree.value, false) + else if (newRight eq null) upd(tree.left, tree.key, tree.value, overwrite = false) else rebalance(tree, tree.left, newRight) } private[this] def doUntil[A, B](tree: Tree[A, B], until: A)(implicit ordering: Ordering[A]): Tree[A, B] = { @@ -268,7 +268,7 @@ object RedBlackTree { if (ordering.lteq(until, tree.key)) return doUntil(tree.left, until) val newRight = doUntil(tree.right, until) if (newRight eq tree.right) tree - else if (newRight eq null) upd(tree.left, tree.key, tree.value, false) + else if (newRight eq null) upd(tree.left, tree.key, tree.value, overwrite = false) else rebalance(tree, tree.left, newRight) } private[this] def doRange[A, B](tree: Tree[A, B], from: A, until: A)(implicit ordering: Ordering[A]): Tree[A, B] = { @@ -278,8 +278,8 @@ object RedBlackTree { val newLeft = doFrom(tree.left, from) val newRight = doUntil(tree.right, until) if ((newLeft eq tree.left) && (newRight eq tree.right)) tree - else if (newLeft eq null) upd(newRight, tree.key, tree.value, false) - else if (newRight eq null) upd(newLeft, tree.key, tree.value, false) + else if (newLeft eq null) upd(newRight, tree.key, tree.value, overwrite = false) + else if (newRight eq null) upd(newLeft, tree.key, tree.value, overwrite = false) else rebalance(tree, newLeft, newRight) } @@ -290,7 +290,7 @@ object RedBlackTree { if (n > count) return doDrop(tree.right, n - count - 1) val newLeft = doDrop(tree.left, n) if (newLeft eq tree.left) tree - else if (newLeft eq null) updNth(tree.right, n - count - 1, tree.key, tree.value, false) + else if (newLeft eq null) updNth(tree.right, n - count - 1, tree.key, tree.value, overwrite = false) else rebalance(tree, newLeft, tree.right) } private[this] def doTake[A, B](tree: Tree[A, B], n: Int): Tree[A, B] = { @@ -300,7 +300,7 @@ object RedBlackTree { if (n <= count) return doTake(tree.left, n) val newRight = doTake(tree.right, n - count - 1) if (newRight eq tree.right) tree - else if (newRight eq null) updNth(tree.left, n, tree.key, tree.value, false) + else if (newRight eq null) updNth(tree.left, n, tree.key, tree.value, overwrite = false) else rebalance(tree, tree.left, newRight) } private[this] def doSlice[A, B](tree: Tree[A, B], from: Int, until: Int): Tree[A, B] = { @@ -311,8 +311,8 @@ object RedBlackTree { val newLeft = doDrop(tree.left, from) val newRight = doTake(tree.right, until - count - 1) if ((newLeft eq tree.left) && (newRight eq tree.right)) tree - else if (newLeft eq null) updNth(newRight, from - count - 1, tree.key, tree.value, false) - else if (newRight eq null) updNth(newLeft, until, tree.key, tree.value, false) + else if (newLeft eq null) updNth(newRight, from - count - 1, tree.key, tree.value, overwrite = false) + else if (newRight eq null) updNth(newLeft, until, tree.key, tree.value, overwrite = false) else rebalance(tree, newLeft, newRight) } @@ -501,28 +501,28 @@ object RedBlackTree { } private[this] var index = 0 private[this] var lookahead: Tree[A, B] = start map startFrom getOrElse findLeftMostOrPopOnEmpty(root) - + /** - * Find the leftmost subtree whose key is equal to the given key, or if no such thing, + * Find the leftmost subtree whose key is equal to the given key, or if no such thing, * the leftmost subtree with the key that would be "next" after it according * to the ordering. Along the way build up the iterator's path stack so that "next" * functionality works. */ private[this] def startFrom(key: A) : Tree[A,B] = if (root eq null) null else { - @tailrec def find(tree: Tree[A, B]): Tree[A, B] = + @tailrec def find(tree: Tree[A, B]): Tree[A, B] = if (tree eq null) popNext() else find( if (ordering.lteq(key, tree.key)) goLeft(tree) else goRight(tree) - ) + ) find(root) } - + private[this] def goLeft(tree: Tree[A, B]) = { pushNext(tree) tree.left } - + private[this] def goRight(tree: Tree[A, B]) = tree.right } diff --git a/src/library/scala/collection/immutable/TreeMap.scala b/src/library/scala/collection/immutable/TreeMap.scala index 1093177172..5085039da5 100644 --- a/src/library/scala/collection/immutable/TreeMap.scala +++ b/src/library/scala/collection/immutable/TreeMap.scala @@ -128,7 +128,7 @@ class TreeMap[A, +B] private (tree: RB.Tree[A, B])(implicit val ordering: Orderi * @param value the value to be associated with `key` * @return a new $coll with the updated binding */ - override def updated [B1 >: B](key: A, value: B1): TreeMap[A, B1] = new TreeMap(RB.update(tree, key, value, true)) + override def updated [B1 >: B](key: A, value: B1): TreeMap[A, B1] = new TreeMap(RB.update(tree, key, value, overwrite = true)) /** Add a key/value pair to this map. * @tparam B1 type of the value of the new binding, a supertype of `B` @@ -168,7 +168,7 @@ class TreeMap[A, +B] private (tree: RB.Tree[A, B])(implicit val ordering: Orderi */ def insert [B1 >: B](key: A, value: B1): TreeMap[A, B1] = { assert(!RB.contains(tree, key)) - new TreeMap(RB.update(tree, key, value, true)) + new TreeMap(RB.update(tree, key, value, overwrite = true)) } def - (key:A): TreeMap[A, B] = diff --git a/src/library/scala/collection/immutable/TreeSet.scala b/src/library/scala/collection/immutable/TreeSet.scala index 26c3d44bbb..e25d16408a 100644 --- a/src/library/scala/collection/immutable/TreeSet.scala +++ b/src/library/scala/collection/immutable/TreeSet.scala @@ -109,7 +109,7 @@ class TreeSet[A] private (tree: RB.Tree[A, Unit])(implicit val ordering: Orderin * @param elem a new element to add. * @return a new $coll containing `elem` and all the elements of this $coll. */ - def + (elem: A): TreeSet[A] = newSet(RB.update(tree, elem, (), false)) + def + (elem: A): TreeSet[A] = newSet(RB.update(tree, elem, (), overwrite = false)) /** A new `TreeSet` with the entry added is returned, * assuming that elem is not in the TreeSet. @@ -119,7 +119,7 @@ class TreeSet[A] private (tree: RB.Tree[A, Unit])(implicit val ordering: Orderin */ def insert(elem: A): TreeSet[A] = { assert(!RB.contains(tree, elem)) - newSet(RB.update(tree, elem, (), false)) + newSet(RB.update(tree, elem, (), overwrite = false)) } /** Creates a new `TreeSet` with the entry removed. diff --git a/src/library/scala/collection/mutable/FlatHashTable.scala b/src/library/scala/collection/mutable/FlatHashTable.scala index 2ca12549ef..49a9427588 100644 --- a/src/library/scala/collection/mutable/FlatHashTable.scala +++ b/src/library/scala/collection/mutable/FlatHashTable.scala @@ -230,7 +230,7 @@ trait FlatHashTable[A] extends FlatHashTable.HashUtils[A] { private def checkConsistent() { for (i <- 0 until table.length) if (table(i) != null && !containsElem(entryToElem(table(i)))) - assert(false, i+" "+table(i)+" "+table.mkString) + assert(assertion = false, i+" "+table(i)+" "+table.mkString) } diff --git a/src/library/scala/collection/mutable/TreeSet.scala b/src/library/scala/collection/mutable/TreeSet.scala index 9113d8221b..147bc85383 100644 --- a/src/library/scala/collection/mutable/TreeSet.scala +++ b/src/library/scala/collection/mutable/TreeSet.scala @@ -67,7 +67,7 @@ class TreeSet[A] private (treeRef: ObjectRef[RB.Tree[A, Null]], from: Option[A], } override def +=(elem: A): this.type = { - treeRef.elem = RB.update(treeRef.elem, elem, null, false) + treeRef.elem = RB.update(treeRef.elem, elem, null, overwrite = false) this } diff --git a/src/library/scala/collection/parallel/ParIterableLike.scala b/src/library/scala/collection/parallel/ParIterableLike.scala index f0b0fd2aa0..961556faff 100644 --- a/src/library/scala/collection/parallel/ParIterableLike.scala +++ b/src/library/scala/collection/parallel/ParIterableLike.scala @@ -823,7 +823,7 @@ self: ParIterableLike[T, Repr, Sequential] => tasksupport.executeAndWaitResult(new Zip(combinerFactory(() => bf(repr).asCombiner), splitter, thatseq.splitter) mapResult { _.resultWithTaskSupport }) } else setTaskSupport(seq.zip(that)(bf2seq(bf)), tasksupport) - def zipWithIndex[U >: T, That](implicit bf: CanBuildFrom[Repr, (U, Int), That]): That = this zip immutable.ParRange(0, size, 1, false) + def zipWithIndex[U >: T, That](implicit bf: CanBuildFrom[Repr, (U, Int), That]): That = this zip immutable.ParRange(0, size, 1, inclusive = false) def zipAll[S, U >: T, That](that: GenIterable[S], thisElem: U, thatElem: S)(implicit bf: CanBuildFrom[Repr, (U, S), That]): That = if (bf(repr).isCombiner && that.isParSeq) { val thatseq = that.asParSeq diff --git a/src/library/scala/collection/parallel/ParIterableViewLike.scala b/src/library/scala/collection/parallel/ParIterableViewLike.scala index aaf83e49af..0567e7b396 100644 --- a/src/library/scala/collection/parallel/ParIterableViewLike.scala +++ b/src/library/scala/collection/parallel/ParIterableViewLike.scala @@ -131,7 +131,7 @@ self => override def zip[U >: T, S, That](that: GenIterable[S])(implicit bf: CanBuildFrom[This, (U, S), That]): That = newZippedTryParSeq(that).asInstanceOf[That] override def zipWithIndex[U >: T, That](implicit bf: CanBuildFrom[This, (U, Int), That]): That = - newZipped(ParRange(0, splitter.remaining, 1, false)).asInstanceOf[That] + newZipped(ParRange(0, splitter.remaining, 1, inclusive = false)).asInstanceOf[That] override def zipAll[S, U >: T, That](that: GenIterable[S], thisElem: U, thatElem: S)(implicit bf: CanBuildFrom[This, (U, S), That]): That = newZippedAllTryParSeq(that, thisElem, thatElem).asInstanceOf[That] diff --git a/src/library/scala/collection/parallel/ParSeqViewLike.scala b/src/library/scala/collection/parallel/ParSeqViewLike.scala index 22773464ed..f3dbe20e67 100644 --- a/src/library/scala/collection/parallel/ParSeqViewLike.scala +++ b/src/library/scala/collection/parallel/ParSeqViewLike.scala @@ -147,7 +147,7 @@ self => override def map[S, That](f: T => S)(implicit bf: CanBuildFrom[This, S, That]): That = newMapped(f).asInstanceOf[That] override def zip[U >: T, S, That](that: GenIterable[S])(implicit bf: CanBuildFrom[This, (U, S), That]): That = newZippedTryParSeq(that).asInstanceOf[That] override def zipWithIndex[U >: T, That](implicit bf: CanBuildFrom[This, (U, Int), That]): That = - newZipped(ParRange(0, splitter.remaining, 1, false)).asInstanceOf[That] + newZipped(ParRange(0, splitter.remaining, 1, inclusive = false)).asInstanceOf[That] override def reverse: This = newReversed.asInstanceOf[This] override def reverseMap[S, That](f: T => S)(implicit bf: CanBuildFrom[This, S, That]): That = reverse.map(f) diff --git a/src/library/scala/collection/parallel/mutable/ParTrieMap.scala b/src/library/scala/collection/parallel/mutable/ParTrieMap.scala index 61a50a124d..60f4709a8c 100644 --- a/src/library/scala/collection/parallel/mutable/ParTrieMap.scala +++ b/src/library/scala/collection/parallel/mutable/ParTrieMap.scala @@ -136,7 +136,7 @@ extends TrieMapIterator[K, V](lev, ct, mustInit) } def dup = { - val it = newIterator(0, ct, false) + val it = newIterator(0, ct, _mustInit = false) dupTo(it) it.iterated = this.iterated it diff --git a/src/library/scala/sys/process/ProcessBuilder.scala b/src/library/scala/sys/process/ProcessBuilder.scala index 3a86f6dc3c..5d89e45001 100644 --- a/src/library/scala/sys/process/ProcessBuilder.scala +++ b/src/library/scala/sys/process/ProcessBuilder.scala @@ -305,10 +305,10 @@ object ProcessBuilder extends ProcessBuilderImpl { protected def toSource: ProcessBuilder /** Writes the output stream of this process to the given file. */ - def #> (f: File): ProcessBuilder = toFile(f, false) + def #> (f: File): ProcessBuilder = toFile(f, append = false) /** Appends the output stream of this process to the given file. */ - def #>> (f: File): ProcessBuilder = toFile(f, true) + def #>> (f: File): ProcessBuilder = toFile(f, append = true) /** Writes the output stream of this process to the given OutputStream. The * argument is call-by-name, so the stream is recreated, written, and closed each diff --git a/src/library/scala/sys/process/ProcessBuilderImpl.scala b/src/library/scala/sys/process/ProcessBuilderImpl.scala index 49fea6f464..91e267d5e4 100644 --- a/src/library/scala/sys/process/ProcessBuilderImpl.scala +++ b/src/library/scala/sys/process/ProcessBuilderImpl.scala @@ -69,7 +69,7 @@ private[process] trait ProcessBuilderImpl { import io._ // spawn threads that process the input, output, and error streams using the functions defined in `io` - val inThread = Spawn(writeInput(process.getOutputStream), true) + val inThread = Spawn(writeInput(process.getOutputStream), daemon = true) val outThread = Spawn(processOutput(process.getInputStream), daemonizeThreads) val errorThread = if (p.redirectErrorStream) Nil @@ -93,26 +93,26 @@ private[process] trait ProcessBuilderImpl { def #&&(other: ProcessBuilder): ProcessBuilder = new AndBuilder(this, other) def ###(other: ProcessBuilder): ProcessBuilder = new SequenceBuilder(this, other) - def run(): Process = run(false) + def run(): Process = run(connectInput = false) def run(connectInput: Boolean): Process = run(BasicIO.standard(connectInput)) - def run(log: ProcessLogger): Process = run(log, false) + def run(log: ProcessLogger): Process = run(log, connectInput = false) def run(log: ProcessLogger, connectInput: Boolean): Process = run(BasicIO(connectInput, log)) - def !! = slurp(None, false) - def !!(log: ProcessLogger) = slurp(Some(log), false) - def !!< = slurp(None, true) - def !!<(log: ProcessLogger) = slurp(Some(log), true) + def !! = slurp(None, withIn = false) + def !!(log: ProcessLogger) = slurp(Some(log), withIn = false) + def !!< = slurp(None, withIn = true) + def !!<(log: ProcessLogger) = slurp(Some(log), withIn = true) - def lines: Stream[String] = lines(false, true, None) - def lines(log: ProcessLogger): Stream[String] = lines(false, true, Some(log)) - def lines_! : Stream[String] = lines(false, false, None) - def lines_!(log: ProcessLogger): Stream[String] = lines(false, false, Some(log)) + def lines: Stream[String] = lines(withInput = false, nonZeroException = true, None) + def lines(log: ProcessLogger): Stream[String] = lines(withInput = false, nonZeroException = true, Some(log)) + def lines_! : Stream[String] = lines(withInput = false, nonZeroException = false, None) + def lines_!(log: ProcessLogger): Stream[String] = lines(withInput = false, nonZeroException = false, Some(log)) - def ! = run(false).exitValue() + def ! = run(connectInput = false).exitValue() def !(io: ProcessIO) = run(io).exitValue() - def !(log: ProcessLogger) = runBuffered(log, false) - def !< = run(true).exitValue() - def !<(log: ProcessLogger) = runBuffered(log, true) + def !(log: ProcessLogger) = runBuffered(log, connectInput = false) + def !< = run(connectInput = true).exitValue() + def !<(log: ProcessLogger) = runBuffered(log, connectInput = true) /** Constructs a new builder which runs this command with all input/output threads marked * as daemon threads. This allows the creation of a long running process while still diff --git a/src/library/scala/sys/process/ProcessImpl.scala b/src/library/scala/sys/process/ProcessImpl.scala index bfd3551a65..c64ba246fc 100644 --- a/src/library/scala/sys/process/ProcessImpl.scala +++ b/src/library/scala/sys/process/ProcessImpl.scala @@ -17,7 +17,7 @@ private[process] trait ProcessImpl { /** Runs provided code in a new Thread and returns the Thread instance. */ private[process] object Spawn { - def apply(f: => Unit): Thread = apply(f, false) + def apply(f: => Unit): Thread = apply(f, daemon = false) def apply(f: => Unit, daemon: Boolean): Thread = { val thread = new Thread() { override def run() = { f } } thread.setDaemon(daemon) diff --git a/src/library/scala/xml/Attribute.scala b/src/library/scala/xml/Attribute.scala index 0224913cf6..234281163d 100644 --- a/src/library/scala/xml/Attribute.scala +++ b/src/library/scala/xml/Attribute.scala @@ -94,7 +94,7 @@ abstract trait Attribute extends MetaData { sb append key append '=' val sb2 = new StringBuilder() - Utility.sequenceToXML(value, TopScope, sb2, true) + Utility.sequenceToXML(value, TopScope, sb2, stripComments = true) Utility.appendQuoted(sb2.toString, sb) } } diff --git a/src/library/scala/xml/Equality.scala b/src/library/scala/xml/Equality.scala index 02db22a78a..20f2405967 100644 --- a/src/library/scala/xml/Equality.scala +++ b/src/library/scala/xml/Equality.scala @@ -86,8 +86,8 @@ trait Equality extends scala.Equals { * to maintain a semblance of order. */ override def hashCode() = basisForHashCode.## - override def equals(other: Any) = doComparison(other, false) - final def xml_==(other: Any) = doComparison(other, true) + override def equals(other: Any) = doComparison(other, blithe = false) + final def xml_==(other: Any) = doComparison(other, blithe = true) final def xml_!=(other: Any) = !xml_==(other) /** The "blithe" parameter expresses the caller's unconcerned attitude diff --git a/src/library/scala/xml/Node.scala b/src/library/scala/xml/Node.scala index dcd4c15969..7b1a97e8f2 100755 --- a/src/library/scala/xml/Node.scala +++ b/src/library/scala/xml/Node.scala @@ -163,7 +163,7 @@ abstract class Node extends NodeSeq { /** * Same as `toString('''false''')`. */ - override def toString(): String = buildString(false) + override def toString(): String = buildString(stripComments = false) /** * Appends qualified name of this node to `StringBuilder`. diff --git a/src/library/scala/xml/PrettyPrinter.scala b/src/library/scala/xml/PrettyPrinter.scala index 98807a40a4..720fe79b1d 100755 --- a/src/library/scala/xml/PrettyPrinter.scala +++ b/src/library/scala/xml/PrettyPrinter.scala @@ -147,7 +147,7 @@ class PrettyPrinter(width: Int, step: Int) { case _ => val test = { val sb = new StringBuilder() - Utility.serialize(node, pscope, sb, false) + Utility.serialize(node, pscope, sb, stripComments = false) if (doPreserve(node)) sb.toString else TextBuffer.fromString(sb.toString).toText(0).data } diff --git a/src/library/scala/xml/dtd/ElementValidator.scala b/src/library/scala/xml/dtd/ElementValidator.scala index e73e209daa..ad74acb77e 100644 --- a/src/library/scala/xml/dtd/ElementValidator.scala +++ b/src/library/scala/xml/dtd/ElementValidator.scala @@ -99,21 +99,21 @@ class ElementValidator() extends Function1[Node,Boolean] { */ def check(nodes: Seq[Node]): Boolean = contentModel match { case ANY => true - case EMPTY => getIterable(nodes, false).isEmpty - case PCDATA => getIterable(nodes, true).isEmpty + case EMPTY => getIterable(nodes, skipPCDATA = false).isEmpty + case PCDATA => getIterable(nodes, skipPCDATA = true).isEmpty case MIXED(ContentModel.Alt(branches @ _*)) => // @todo val j = exc.length def find(Key: String): Boolean = branches exists { case ContentModel.Letter(ElemName(Key)) => true ; case _ => false } - getIterable(nodes, true) map (_.name) filterNot find foreach { + getIterable(nodes, skipPCDATA = true) map (_.name) filterNot find foreach { exc ::= MakeValidationException fromUndefinedElement _ } (exc.length == j) // - true if no new exception case _: ELEMENTS => dfa isFinal { - getIterable(nodes, false).foldLeft(0) { (q, e) => + getIterable(nodes, skipPCDATA = false).foldLeft(0) { (q, e) => (dfa delta q).getOrElse(e, throw ValidationException("element %s not allowed here" format e)) } } diff --git a/src/library/scala/xml/parsing/MarkupParser.scala b/src/library/scala/xml/parsing/MarkupParser.scala index 8129165b1b..d289414c26 100755 --- a/src/library/scala/xml/parsing/MarkupParser.scala +++ b/src/library/scala/xml/parsing/MarkupParser.scala @@ -199,11 +199,11 @@ trait MarkupParser extends MarkupParserCommon with TokenTests * // this is a bit more lenient than necessary... * }}} */ def prolog(): (Option[String], Option[String], Option[Boolean]) = - prologOrTextDecl(true) + prologOrTextDecl(isProlog = true) /** prolog, but without standalone */ def textDecl(): (Option[String], Option[String]) = - prologOrTextDecl(false) match { case (x1, x2, _) => (x1, x2) } + prologOrTextDecl(isProlog = false) match { case (x1, x2, _) => (x1, x2) } /** {{{ * [22] prolog ::= XMLDecl? Misc* (doctypedecl Misc*)? @@ -799,12 +799,12 @@ trait MarkupParser extends MarkupParserCommon with TokenTests val defdecl: DefaultDecl = ch match { case '\'' | '"' => - DEFAULT(false, xAttributeValue()) + DEFAULT(fixed = false, xAttributeValue()) case '#' => nextch() xName match { - case "FIXED" => xSpace() ; DEFAULT(true, xAttributeValue()) + case "FIXED" => xSpace() ; DEFAULT(fixed = true, xAttributeValue()) case "IMPLIED" => IMPLIED case "REQUIRED" => REQUIRED } diff --git a/src/library/scala/xml/persistent/CachedFileStorage.scala b/src/library/scala/xml/persistent/CachedFileStorage.scala index c0fad30da6..fc510b5f18 100644 --- a/src/library/scala/xml/persistent/CachedFileStorage.scala +++ b/src/library/scala/xml/persistent/CachedFileStorage.scala @@ -76,7 +76,7 @@ abstract class CachedFileStorage(private val file1: File) extends Thread with Lo log("[load]\nloading "+theFile) val src = Source.fromFile(theFile) log("parsing "+theFile) - val res = ConstructingParser.fromSource(src, false).document().docElem(0) + val res = ConstructingParser.fromSource(src,preserveWS = false).document.docElem(0) switch() log("[load done]") res.child.iterator @@ -94,7 +94,7 @@ abstract class CachedFileStorage(private val file1: File) extends Thread with Lo // @todo: optimize val storageNode = { nodes.toList } val w = Channels.newWriter(c, "utf-8") - XML.write(w, storageNode, "utf-8", true, null) + XML.write(w, storageNode, "utf-8", xmlDecl = true, doctype = null) log("writing to "+theFile) -- cgit v1.2.3 From e3b36c71b8cd5ef02390651ef64892fcd6606b77 Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Mon, 4 Mar 2013 13:35:15 +0100 Subject: Carve up Types.scala Step one of a plan to bring some order and thread safety to this neck of the woods. More info: https://gist.github.com/retronym/5081754 https://groups.google.com/forum/?fromgroups=#!topic/scala-internals/MOvmcnbyb_g Note that the sub package is named 'tpe' and not 'Types' to avoid potential problems on case insensitive file systems, given the existing trait 'Types.' --- src/reflect/scala/reflect/internal/Types.scala | 2869 +------------------- .../scala/reflect/internal/tpe/CommonOwners.scala | 50 + .../scala/reflect/internal/tpe/GlbLubs.scala | 592 ++++ .../scala/reflect/internal/tpe/TypeComparers.scala | 617 +++++ .../reflect/internal/tpe/TypeConstraints.scala | 282 ++ .../scala/reflect/internal/tpe/TypeMaps.scala | 1144 ++++++++ .../scala/reflect/internal/tpe/TypeToStrings.scala | 29 + 7 files changed, 2833 insertions(+), 2750 deletions(-) create mode 100644 src/reflect/scala/reflect/internal/tpe/CommonOwners.scala create mode 100644 src/reflect/scala/reflect/internal/tpe/GlbLubs.scala create mode 100644 src/reflect/scala/reflect/internal/tpe/TypeComparers.scala create mode 100644 src/reflect/scala/reflect/internal/tpe/TypeConstraints.scala create mode 100644 src/reflect/scala/reflect/internal/tpe/TypeMaps.scala create mode 100644 src/reflect/scala/reflect/internal/tpe/TypeToStrings.scala (limited to 'src') diff --git a/src/reflect/scala/reflect/internal/Types.scala b/src/reflect/scala/reflect/internal/Types.scala index 365e9a1682..a14938eeb5 100644 --- a/src/reflect/scala/reflect/internal/Types.scala +++ b/src/reflect/scala/reflect/internal/Types.scala @@ -14,7 +14,6 @@ import Flags._ import scala.util.control.ControlThrowable import scala.annotation.tailrec import util.Statistics -import scala.runtime.ObjectRef import util.ThreeValues._ import Variance._ @@ -73,28 +72,33 @@ import Variance._ // only used during erasure of derived value classes. */ -trait Types extends api.Types { self: SymbolTable => +trait Types + extends api.Types + with tpe.TypeComparers + with tpe.TypeToStrings + with tpe.CommonOwners + with tpe.GlbLubs + with tpe.TypeMaps + with tpe.TypeConstraints { self: SymbolTable => + import definitions._ import TypesStats._ private var explainSwitch = false private final val emptySymbolSet = immutable.Set.empty[Symbol] - private final val LogPendingSubTypesThreshold = 50 - private final val LogPendingBaseTypesThreshold = 50 - private final val LogVolatileThreshold = 50 + protected[internal] final val DefaultLogThreshhold = 50 + private final val LogPendingBaseTypesThreshold = DefaultLogThreshhold + private final val LogVolatileThreshold = DefaultLogThreshhold /** A don't care value for the depth parameter in lubs/glbs and related operations. */ - private final val AnyDepth = -3 + protected[internal] final val AnyDepth = -3 /** Decrement depth unless it is a don't care. */ - private final def decr(depth: Int) = if (depth == AnyDepth) AnyDepth else depth - 1 + protected[internal] final def decr(depth: Int) = if (depth == AnyDepth) AnyDepth else depth - 1 - private final val printLubs = sys.props contains "scalac.debug.lub" private final val traceTypeVars = sys.props contains "scalac.debug.tvar" private final val breakCycles = settings.breakCycles.value - /** In case anyone wants to turn off lub verification without reverting anything. */ - private final val verifyLubs = true /** In case anyone wants to turn off type parameter bounds being used * to seed type constraints. */ @@ -107,80 +111,6 @@ trait Types extends api.Types { self: SymbolTable => */ var skolemizationLevel = 0 - /** A log of type variable with their original constraints. Used in order - * to undo constraints in the case of isSubType/isSameType failure. - */ - lazy val undoLog = newUndoLog - - protected def newUndoLog = new UndoLog - - class UndoLog extends Clearable { - private type UndoPairs = List[(TypeVar, TypeConstraint)] - //OPT this method is public so we can do `manual inlining` - var log: UndoPairs = List() - - /* - * These two methods provide explicit locking mechanism that is overridden in SynchronizedUndoLog. - * - * The idea behind explicit locking mechanism is that all public methods that access mutable state - * will have to obtain the lock for their entire execution so both reads and writes can be kept in - * right order. Originally, that was achieved by overriding those public methods in - * `SynchronizedUndoLog` which was fine but expensive. The reason is that those public methods take - * thunk as argument and if we keep them non-final there's no way to make them inlined so thunks - * can go away. - * - * By using explicit locking we can achieve inlining. - * - * NOTE: They are made public for now so we can apply 'manual inlining' (copy&pasting into hot - * places implementation of `undo` or `undoUnless`). This should be changed back to protected - * once inliner is fixed. - */ - def lock(): Unit = () - def unlock(): Unit = () - - // register with the auto-clearing cache manager - perRunCaches.recordCache(this) - - /** Undo all changes to constraints to type variables upto `limit`. */ - //OPT this method is public so we can do `manual inlining` - def undoTo(limit: UndoPairs) { - assertCorrectThread() - while ((log ne limit) && log.nonEmpty) { - val (tv, constr) = log.head - tv.constr = constr - log = log.tail - } - } - - /** No sync necessary, because record should only - * be called from within an undo or undoUnless block, - * which is already synchronized. - */ - private[reflect] def record(tv: TypeVar) = { - log ::= ((tv, tv.constr.cloneInternal)) - } - - def clear() { - lock() - try { - if (settings.debug.value) - self.log("Clearing " + log.size + " entries from the undoLog.") - log = Nil - } finally unlock() - } - - // `block` should not affect constraints on typevars - def undo[T](block: => T): T = { - lock() - try { - val before = log - - try block - finally undoTo(before) - } finally unlock() - } - } - /** A map from lists to compound types that have the given list as parents. * This is used to avoid duplication in the computation of base type sequences and baseClasses. * It makes use of the fact that these two operations depend only on the parents, @@ -3725,72 +3655,7 @@ trait Types extends api.Types { self: SymbolTable => newExistentialType(tparams1, tpe1) } - /** Normalize any type aliases within this type (@see Type#normalize). - * Note that this depends very much on the call to "normalize", not "dealias", - * so it is no longer carries the too-stealthy name "deAlias". - */ - object normalizeAliases extends TypeMap { - def apply(tp: Type): Type = tp match { - case TypeRef(_, sym, _) if sym.isAliasType => - def msg = if (tp.isHigherKinded) s"Normalizing type alias function $tp" else s"Dealiasing type alias $tp" - mapOver(logResult(msg)(tp.normalize)) - case _ => mapOver(tp) - } - } - - /** Remove any occurrence of type from this type and its parents */ - object dropSingletonType extends TypeMap { - def apply(tp: Type): Type = { - tp match { - case TypeRef(_, SingletonClass, _) => - AnyClass.tpe - case tp1 @ RefinedType(parents, decls) => - parents filter (_.typeSymbol != SingletonClass) match { - case Nil => AnyClass.tpe - case p :: Nil if decls.isEmpty => mapOver(p) - case ps => mapOver(copyRefinedType(tp1, ps, decls)) - } - case tp1 => - mapOver(tp1) - } - } - } - - /** Type with all top-level occurrences of abstract types replaced by their bounds */ - object abstractTypesToBounds extends TypeMap { - def apply(tp: Type): Type = tp match { - case TypeRef(_, sym, _) if sym.isAliasType => apply(tp.dealias) - case TypeRef(_, sym, _) if sym.isAbstractType => apply(tp.bounds.hi) - case rtp @ RefinedType(parents, decls) => copyRefinedType(rtp, parents mapConserve this, decls) - case AnnotatedType(_, _, _) => mapOver(tp) - case _ => tp // no recursion - top level only - } - } - - // Set to true for A* => Seq[A] - // (And it will only rewrite A* in method result types.) - // This is the pre-existing behavior. - // Or false for Seq[A] => Seq[A] - // (It will rewrite A* everywhere but method parameters.) - // This is the specified behavior. - protected def etaExpandKeepsStar = false - /** Turn any T* types into Seq[T] except when - * in method parameter position. - */ - object dropIllegalStarTypes extends TypeMap { - def apply(tp: Type): Type = tp match { - case MethodType(params, restpe) => - // Not mapping over params - val restpe1 = apply(restpe) - if (restpe eq restpe1) tp - else MethodType(params, restpe1) - case TypeRef(_, RepeatedParamClass, arg :: Nil) => - seqType(arg) - case _ => - if (etaExpandKeepsStar) tp else mapOver(tp) - } - } // Hash consing -------------------------------------------------------------- @@ -3810,121 +3675,6 @@ trait Types extends api.Types { self: SymbolTable => // Helper Classes --------------------------------------------------------- - /** @PP: Unable to see why these apparently constant types should need vals - * in every TypeConstraint, I lifted them out. - */ - private lazy val numericLoBound = IntClass.tpe - private lazy val numericHiBound = intersectionType(List(ByteClass.tpe, CharClass.tpe), ScalaPackageClass) - - /** A class expressing upper and lower bounds constraints of type variables, - * as well as their instantiations. - */ - class TypeConstraint(lo0: List[Type], hi0: List[Type], numlo0: Type, numhi0: Type, avoidWidening0: Boolean = false) { - def this(lo0: List[Type], hi0: List[Type]) = this(lo0, hi0, NoType, NoType) - def this(bounds: TypeBounds) = this(List(bounds.lo), List(bounds.hi)) - def this() = this(List(), List()) - - /* Syncnote: Type constraints are assumed to be used from only one - * thread. They are not exposed in api.Types and are used only locally - * in operations that are exposed from types. Hence, no syncing of any - * variables should be ncessesary. - */ - - /** Guard these lists against AnyClass and NothingClass appearing, - * else loBounds.isEmpty will have different results for an empty - * constraint and one with Nothing as a lower bound. [Actually - * guarding addLoBound/addHiBound somehow broke raw types so it - * only guards against being created with them.] - */ - private var lobounds = lo0 filterNot typeIsNothing - private var hibounds = hi0 filterNot typeIsAny - private var numlo = numlo0 - private var numhi = numhi0 - private var avoidWidening = avoidWidening0 - - def loBounds: List[Type] = if (numlo == NoType) lobounds else numlo :: lobounds - def hiBounds: List[Type] = if (numhi == NoType) hibounds else numhi :: hibounds - def avoidWiden: Boolean = avoidWidening - - def addLoBound(tp: Type, isNumericBound: Boolean = false) { - // For some reason which is still a bit fuzzy, we must let Nothing through as - // a lower bound despite the fact that Nothing is always a lower bound. My current - // supposition is that the side-effecting type constraint accumulation mechanism - // depends on these subtype tests being performed to make forward progress when - // there are mutally recursive type vars. - // See pos/t6367 and pos/t6499 for the competing test cases. - val mustConsider = tp.typeSymbol match { - case NothingClass => true - case _ => !(lobounds contains tp) - } - if (mustConsider) { - if (isNumericBound && isNumericValueType(tp)) { - if (numlo == NoType || isNumericSubType(numlo, tp)) - numlo = tp - else if (!isNumericSubType(tp, numlo)) - numlo = numericLoBound - } - else lobounds ::= tp - } - } - - def checkWidening(tp: Type) { - if(tp.isStable) avoidWidening = true - else tp match { - case HasTypeMember(_, _) => avoidWidening = true - case _ => - } - } - - def addHiBound(tp: Type, isNumericBound: Boolean = false) { - // My current test case only demonstrates the need to let Nothing through as - // a lower bound, but I suspect the situation is symmetrical. - val mustConsider = tp.typeSymbol match { - case AnyClass => true - case _ => !(hibounds contains tp) - } - if (mustConsider) { - checkWidening(tp) - if (isNumericBound && isNumericValueType(tp)) { - if (numhi == NoType || isNumericSubType(tp, numhi)) - numhi = tp - else if (!isNumericSubType(numhi, tp)) - numhi = numericHiBound - } - else hibounds ::= tp - } - } - - def isWithinBounds(tp: Type): Boolean = - lobounds.forall(_ <:< tp) && - hibounds.forall(tp <:< _) && - (numlo == NoType || (numlo weak_<:< tp)) && - (numhi == NoType || (tp weak_<:< numhi)) - - var inst: Type = NoType // @M reduce visibility? - - def instValid = (inst ne null) && (inst ne NoType) - - def cloneInternal = { - val tc = new TypeConstraint(lobounds, hibounds, numlo, numhi, avoidWidening) - tc.inst = inst - tc - } - - override def toString = { - val boundsStr = { - val lo = loBounds filterNot typeIsNothing - val hi = hiBounds filterNot typeIsAny - val lostr = if (lo.isEmpty) Nil else List(lo.mkString(" >: (", ", ", ")")) - val histr = if (hi.isEmpty) Nil else List(hi.mkString(" <: (", ", ", ")")) - - lostr ++ histr mkString ("[", " | ", "]") - } - if (inst eq NoType) boundsStr - else boundsStr + " _= " + inst.safeToString - } - } - class TypeUnwrapper(poly: Boolean, existential: Boolean, annotated: Boolean, nullary: Boolean) extends (Type => Type) { def apply(tp: Type): Type = tp match { case AnnotatedType(_, underlying, _) if annotated => apply(underlying) @@ -3942,246 +3692,6 @@ trait Types extends api.Types { self: SymbolTable => object unwrapToStableClass extends ClassUnwrapper(existential = false) { } object unwrapWrapperTypes extends TypeUnwrapper(true, true, true, true) { } - trait AnnotationFilter extends TypeMap { - def keepAnnotation(annot: AnnotationInfo): Boolean - - override def mapOver(annot: AnnotationInfo) = - if (keepAnnotation(annot)) super.mapOver(annot) - else UnmappableAnnotation - } - - trait KeepOnlyTypeConstraints extends AnnotationFilter { - // filter keeps only type constraint annotations - def keepAnnotation(annot: AnnotationInfo) = annot matches TypeConstraintClass - } - - // todo. move these into scala.reflect.api - - /** A prototype for mapping a function over all possible types - */ - abstract class TypeMap(trackVariance: Boolean) extends (Type => Type) { - def this() = this(trackVariance = false) - def apply(tp: Type): Type - - private[this] var _variance: Variance = if (trackVariance) Covariant else Invariant - - def variance_=(x: Variance) = { assert(trackVariance, this) ; _variance = x } - def variance = _variance - - /** Map this function over given type */ - def mapOver(tp: Type): Type = tp match { - case tr @ TypeRef(pre, sym, args) => - val pre1 = this(pre) - val args1 = ( - if (trackVariance && args.nonEmpty && !variance.isInvariant && sym.typeParams.nonEmpty) - mapOverArgs(args, sym.typeParams) - else - args mapConserve this - ) - if ((pre1 eq pre) && (args1 eq args)) tp - else copyTypeRef(tp, pre1, tr.coevolveSym(pre1), args1) - case ThisType(_) => tp - case SingleType(pre, sym) => - if (sym.isPackageClass) tp // short path - else { - val pre1 = this(pre) - if (pre1 eq pre) tp - else singleType(pre1, sym) - } - case MethodType(params, result) => - val params1 = flipped(mapOver(params)) - val result1 = this(result) - if ((params1 eq params) && (result1 eq result)) tp - else copyMethodType(tp, params1, result1.substSym(params, params1)) - case PolyType(tparams, result) => - val tparams1 = flipped(mapOver(tparams)) - val result1 = this(result) - if ((tparams1 eq tparams) && (result1 eq result)) tp - else PolyType(tparams1, result1.substSym(tparams, tparams1)) - case NullaryMethodType(result) => - val result1 = this(result) - if (result1 eq result) tp - else NullaryMethodType(result1) - case ConstantType(_) => tp - case SuperType(thistp, supertp) => - val thistp1 = this(thistp) - val supertp1 = this(supertp) - if ((thistp1 eq thistp) && (supertp1 eq supertp)) tp - else SuperType(thistp1, supertp1) - case TypeBounds(lo, hi) => - val lo1 = flipped(this(lo)) - val hi1 = this(hi) - if ((lo1 eq lo) && (hi1 eq hi)) tp - else TypeBounds(lo1, hi1) - case BoundedWildcardType(bounds) => - val bounds1 = this(bounds) - if (bounds1 eq bounds) tp - else BoundedWildcardType(bounds1.asInstanceOf[TypeBounds]) - case rtp @ RefinedType(parents, decls) => - val parents1 = parents mapConserve this - val decls1 = mapOver(decls) - copyRefinedType(rtp, parents1, decls1) - case ExistentialType(tparams, result) => - val tparams1 = mapOver(tparams) - val result1 = this(result) - if ((tparams1 eq tparams) && (result1 eq result)) tp - else newExistentialType(tparams1, result1.substSym(tparams, tparams1)) - case OverloadedType(pre, alts) => - val pre1 = if (pre.isInstanceOf[ClassInfoType]) pre else this(pre) - if (pre1 eq pre) tp - else OverloadedType(pre1, alts) - case AntiPolyType(pre, args) => - val pre1 = this(pre) - val args1 = args mapConserve this - if ((pre1 eq pre) && (args1 eq args)) tp - else AntiPolyType(pre1, args1) - case tv@TypeVar(_, constr) => - if (constr.instValid) this(constr.inst) - else tv.applyArgs(mapOverArgs(tv.typeArgs, tv.params)) //@M !args.isEmpty implies !typeParams.isEmpty - case NotNullType(tp) => - val tp1 = this(tp) - if (tp1 eq tp) tp - else NotNullType(tp1) - case AnnotatedType(annots, atp, selfsym) => - val annots1 = mapOverAnnotations(annots) - val atp1 = this(atp) - if ((annots1 eq annots) && (atp1 eq atp)) tp - else if (annots1.isEmpty) atp1 - else AnnotatedType(annots1, atp1, selfsym) -/* - case ErrorType => tp - case WildcardType => tp - case NoType => tp - case NoPrefix => tp - case ErasedSingleType(sym) => tp -*/ - case _ => - tp - // throw new Error("mapOver inapplicable for " + tp); - } - - def withVariance[T](v: Variance)(body: => T): T = { - val saved = variance - variance = v - try body finally variance = saved - } - @inline final def flipped[T](body: => T): T = { - if (trackVariance) variance = variance.flip - try body - finally if (trackVariance) variance = variance.flip - } - protected def mapOverArgs(args: List[Type], tparams: List[Symbol]): List[Type] = ( - if (trackVariance) - map2Conserve(args, tparams)((arg, tparam) => withVariance(variance * tparam.variance)(this(arg))) - else - args mapConserve this - ) - /** Applies this map to the symbol's info, setting variance = Invariant - * if necessary when the symbol is an alias. - */ - private def applyToSymbolInfo(sym: Symbol): Type = { - if (trackVariance && !variance.isInvariant && sym.isAliasType) - withVariance(Invariant)(this(sym.info)) - else - this(sym.info) - } - - /** Called by mapOver to determine whether the original symbols can - * be returned, or whether they must be cloned. - */ - protected def noChangeToSymbols(origSyms: List[Symbol]): Boolean = { - @tailrec def loop(syms: List[Symbol]): Boolean = syms match { - case Nil => true - case x :: xs => (x.info eq applyToSymbolInfo(x)) && loop(xs) - } - loop(origSyms) - } - - /** Map this function over given scope */ - def mapOver(scope: Scope): Scope = { - val elems = scope.toList - val elems1 = mapOver(elems) - if (elems1 eq elems) scope - else newScopeWith(elems1: _*) - } - - /** Map this function over given list of symbols */ - def mapOver(origSyms: List[Symbol]): List[Symbol] = { - // fast path in case nothing changes due to map - if (noChangeToSymbols(origSyms)) origSyms - // map is not the identity --> do cloning properly - else cloneSymbolsAndModify(origSyms, TypeMap.this) - } - - def mapOver(annot: AnnotationInfo): AnnotationInfo = { - val AnnotationInfo(atp, args, assocs) = annot - val atp1 = mapOver(atp) - val args1 = mapOverAnnotArgs(args) - // there is no need to rewrite assocs, as they are constants - - if ((args eq args1) && (atp eq atp1)) annot - else if (args1.isEmpty && args.nonEmpty) UnmappableAnnotation // some annotation arg was unmappable - else AnnotationInfo(atp1, args1, assocs) setPos annot.pos - } - - def mapOverAnnotations(annots: List[AnnotationInfo]): List[AnnotationInfo] = { - val annots1 = annots mapConserve mapOver - if (annots1 eq annots) annots - else annots1 filterNot (_ eq UnmappableAnnotation) - } - - /** Map over a set of annotation arguments. If any - * of the arguments cannot be mapped, then return Nil. */ - def mapOverAnnotArgs(args: List[Tree]): List[Tree] = { - val args1 = args mapConserve mapOver - if (args1 contains UnmappableTree) Nil - else args1 - } - - def mapOver(tree: Tree): Tree = - mapOver(tree, () => return UnmappableTree) - - /** Map a tree that is part of an annotation argument. - * If the tree cannot be mapped, then invoke giveup(). - * The default is to transform the tree with - * TypeMapTransformer. - */ - def mapOver(tree: Tree, giveup: ()=>Nothing): Tree = - (new TypeMapTransformer).transform(tree) - - /** This transformer leaves the tree alone except to remap - * its types. */ - class TypeMapTransformer extends Transformer { - override def transform(tree: Tree) = { - val tree1 = super.transform(tree) - val tpe1 = TypeMap.this(tree1.tpe) - if ((tree eq tree1) && (tree.tpe eq tpe1)) - tree - else - tree1.shallowDuplicate.setType(tpe1) - } - } - } - - abstract class TypeTraverser extends TypeMap { - def traverse(tp: Type): Unit - def apply(tp: Type): Type = { traverse(tp); tp } - } - - abstract class TypeTraverserWithResult[T] extends TypeTraverser { - def result: T - def clear(): Unit - } - - abstract class TypeCollector[T](initial: T) extends TypeTraverser { - var result: T = _ - def collect(tp: Type) = { - result = initial - traverse(tp) - result - } - } - /** Repack existential types, otherwise they sometimes get unpacked in the * wrong location (type inference comes up with an unexpected skolem) */ @@ -4227,995 +3737,112 @@ trait Types extends api.Types { self: SymbolTable => && isRawIfWithoutArgs(sym) ) - /** The raw to existential map converts a ''raw type'' to an existential type. - * It is necessary because we might have read a raw type of a - * parameterized Java class from a class file. At the time we read the type - * the corresponding class file might still not be read, so we do not - * know what the type parameters of the type are. Therefore - * the conversion of raw types to existential types might not have taken place - * in ClassFileparser.sigToType (where it is usually done). + def singletonBounds(hi: Type) = TypeBounds.upper(intersectionType(List(hi, SingletonClass.tpe))) + + /** + * A more persistent version of `Type#memberType` which does not require + * that the symbol is a direct member of the prefix. + * + * For instance: + * + * {{{ + * class C[T] { + * sealed trait F[A] + * object X { + * object S1 extends F[T] + * } + * class S2 extends F[T] + * } + * object O extends C[Int] { + * def foo(f: F[Int]) = f match {...} // need to enumerate sealed subtypes of the scrutinee here. + * } + * class S3 extends O.F[String] + * + * nestedMemberType(, , ) = O.X.S1.type + * nestedMemberType(, , ) = O.S2.type + * nestedMemberType(, , ) = S3.type + * }}} + * + * @param sym The symbol of the subtype + * @param pre The prefix from which the symbol is seen + * @param owner */ - def rawToExistential = new TypeMap { - private var expanded = immutable.Set[Symbol]() - def apply(tp: Type): Type = tp match { - case TypeRef(pre, sym, List()) if isRawIfWithoutArgs(sym) => - if (expanded contains sym) AnyRefClass.tpe - else try { - expanded += sym - val eparams = mapOver(typeParamsToExistentials(sym)) - existentialAbstraction(eparams, typeRef(apply(pre), sym, eparams map (_.tpe))) - } finally { - expanded -= sym + def nestedMemberType(sym: Symbol, pre: Type, owner: Symbol): Type = { + def loop(tp: Type): Type = + if (tp.isTrivial) tp + else if (tp.prefix.typeSymbol isNonBottomSubClass owner) { + val widened = tp match { + case _: ConstantType => tp // Java enum constants: don't widen to the enum type! + case _ => tp.widen // C.X.type widens to C.this.X.type, otherwise `tp asSeenFrom (pre, C)` has no effect. } - case _ => - mapOver(tp) - } - } - /*** - *@M: I think this is more desirable, but Martin prefers to leave raw-types as-is as much as possible - object rawToExistentialInJava extends TypeMap { - def apply(tp: Type): Type = tp match { - // any symbol that occurs in a java sig, not just java symbols - // see http://lampsvn.epfl.ch/trac/scala/ticket/2454#comment:14 - case TypeRef(pre, sym, List()) if !sym.typeParams.isEmpty => - val eparams = typeParamsToExistentials(sym, sym.typeParams) - existentialAbstraction(eparams, TypeRef(pre, sym, eparams map (_.tpe))) - case _ => - mapOver(tp) + widened asSeenFrom (pre, tp.typeSymbol.owner) } - } - */ + else loop(tp.prefix) memberType tp.typeSymbol - /** Used by existentialAbstraction. - */ - class ExistentialExtrapolation(tparams: List[Symbol]) extends TypeMap(trackVariance = true) { - private val occurCount = mutable.HashMap[Symbol, Int]() - private def countOccs(tp: Type) = { - tp foreach { - case TypeRef(_, sym, _) => - if (tparams contains sym) - occurCount(sym) += 1 - case _ => () - } - } - def extrapolate(tpe: Type): Type = { - tparams foreach (t => occurCount(t) = 0) - countOccs(tpe) - for (tparam <- tparams) - countOccs(tparam.info) + val result = loop(sym.tpeHK) + assert(sym.isTerm || result.typeSymbol == sym, s"($result).typeSymbol = ${result.typeSymbol}; expected ${sym}") + result + } - apply(tpe) - } + class MissingAliasControl extends ControlThrowable + val missingAliasException = new MissingAliasControl + class MissingTypeControl extends ControlThrowable - /** If these conditions all hold: - * 1) we are in covariant (or contravariant) position - * 2) this type occurs exactly once in the existential scope - * 3) the widened upper (or lower) bound of this type contains no references to tparams - * Then we replace this lone occurrence of the type with the widened upper (or lower) bound. - * All other types pass through unchanged. - */ - def apply(tp: Type): Type = { - val tp1 = mapOver(tp) - if (variance.isInvariant) tp1 - else tp1 match { - case TypeRef(pre, sym, args) if tparams contains sym => - val repl = if (variance.isPositive) dropSingletonType(tp1.bounds.hi) else tp1.bounds.lo - val count = occurCount(sym) - val containsTypeParam = tparams exists (repl contains _) - def msg = { - val word = if (variance.isPositive) "upper" else "lower" - s"Widened lone occurrence of $tp1 inside existential to $word bound" - } - if (!repl.typeSymbol.isBottomClass && count == 1 && !containsTypeParam) - logResult(msg)(repl) - else - tp1 - case _ => - tp1 - } - } - override def mapOver(tp: Type): Type = tp match { - case SingleType(pre, sym) => - if (sym.isPackageClass) tp // short path - else { - val pre1 = this(pre) - if ((pre1 eq pre) || !pre1.isStable) tp - else singleType(pre1, sym) - } - case _ => super.mapOver(tp) - } +// Helper Methods ------------------------------------------------------------- - // Do not discard the types of existential ident's. The - // symbol of the Ident itself cannot be listed in the - // existential's parameters, so the resulting existential - // type would be ill-formed. - override def mapOver(tree: Tree) = tree match { - case Ident(_) if tree.tpe.isStable => tree - case _ => super.mapOver(tree) - } + /** The maximum allowable depth of lubs or glbs over types `ts`. + */ + def lubDepth(ts: List[Type]): Int = { + val td = typeDepth(ts) + val bd = baseTypeSeqDepth(ts) + lubDepthAdjust(td, td max bd) } - def singletonBounds(hi: Type) = TypeBounds.upper(intersectionType(List(hi, SingletonClass.tpe))) - - /** Might the given symbol be important when calculating the prefix - * of a type? When tp.asSeenFrom(pre, clazz) is called on `tp`, - * the result will be `tp` unchanged if `pre` is trivial and `clazz` - * is a symbol such that isPossiblePrefix(clazz) == false. + /** The maximum allowable depth of lubs or glbs over given types, + * as a function over the maximum depth `td` of these types, and + * the maximum depth `bd` of all types in the base type sequences of these types. */ - def isPossiblePrefix(clazz: Symbol) = clazz.isClass && !clazz.isPackageClass - - private def skipPrefixOf(pre: Type, clazz: Symbol) = ( - (pre eq NoType) || (pre eq NoPrefix) || !isPossiblePrefix(clazz) - ) + private def lubDepthAdjust(td: Int, bd: Int): Int = + if (settings.XfullLubs.value) bd + else if (bd <= 3) bd + else if (bd <= 5) td max (bd - 1) + else if (bd <= 7) td max (bd - 2) + else (td - 1) max (bd - 3) - def newAsSeenFromMap(pre: Type, clazz: Symbol): AsSeenFromMap = - new AsSeenFromMap(pre, clazz) + private def symTypeDepth(syms: List[Symbol]): Int = typeDepth(syms map (_.info)) + private def typeDepth(tps: List[Type]): Int = maxDepth(tps) + private def baseTypeSeqDepth(tps: List[Type]): Int = maxBaseTypeSeqDepth(tps) - /** A map to compute the asSeenFrom method. + /** Is intersection of given types populated? That is, + * for all types tp1, tp2 in intersection + * for all common base classes bc of tp1 and tp2 + * let bt1, bt2 be the base types of tp1, tp2 relative to class bc + * Then: + * bt1 and bt2 have the same prefix, and + * any corresponding non-variant type arguments of bt1 and bt2 are the same */ - class AsSeenFromMap(seenFromPrefix: Type, seenFromClass: Symbol) extends TypeMap with KeepOnlyTypeConstraints { - // Some example source constructs relevant in asSeenFrom: - // - // object CaptureThis { - // trait X[A] { def f: this.type = this } - // class Y[A] { def f: this.type = this } - // // Created new existential to represent This(CaptureThis.X) seen from CaptureThis.X[B]: type _1.type <: CaptureThis.X[B] with Singleton - // def f1[B] = new X[B] { } - // // TODO - why is the behavior different when it's a class? - // def f2[B] = new Y[B] { } - // } - // class CaptureVal[T] { - // val f: java.util.List[_ <: T] = null - // // Captured existential skolem for type _$1 seen from CaptureVal.this.f.type: type _$1 - // def g = f get 0 - // } - // class ClassParam[T] { - // // AsSeenFromMap(Inner.this.type, class Inner)/classParameterAsSeen(T)#loop(ClassParam.this.type, class ClassParam) - // class Inner(lhs: T) { def f = lhs } - // } - def capturedParams: List[Symbol] = _capturedParams - def capturedSkolems: List[Symbol] = _capturedSkolems - - def apply(tp: Type): Type = tp match { - case tp @ ThisType(_) => thisTypeAsSeen(tp) - case tp @ SingleType(_, sym) => if (sym.isPackageClass) tp else singleTypeAsSeen(tp) - case tp @ TypeRef(_, sym, _) if isTypeParamOfEnclosingClass(sym) => classParameterAsSeen(tp) - case _ => mapOver(tp) - } - - private var _capturedSkolems: List[Symbol] = Nil - private var _capturedParams: List[Symbol] = Nil - private val isStablePrefix = seenFromPrefix.isStable - - // isBaseClassOfEnclosingClassOrInfoIsNotYetComplete would be a more accurate - // but less succinct name. - private def isBaseClassOfEnclosingClass(base: Symbol) = { - def loop(encl: Symbol): Boolean = ( - isPossiblePrefix(encl) - && ((encl isSubClass base) || loop(encl.owner.enclClass)) - ) - // The hasCompleteInfo guard is necessary to avoid cycles during the typing - // of certain classes, notably ones defined inside package objects. - !base.hasCompleteInfo || loop(seenFromClass) - } - - /** Is the symbol a class type parameter from one of the enclosing - * classes, or a base class of one of them? - */ - private def isTypeParamOfEnclosingClass(sym: Symbol): Boolean = ( - sym.isTypeParameter - && sym.owner.isClass - && isBaseClassOfEnclosingClass(sym.owner) - ) - - /** Creates an existential representing a type parameter which appears - * in the prefix of a ThisType. - */ - protected def captureThis(pre: Type, clazz: Symbol): Type = { - capturedParams find (_.owner == clazz) match { - case Some(p) => p.tpe - case _ => - val qvar = clazz freshExistential nme.SINGLETON_SUFFIX setInfo singletonBounds(pre) - _capturedParams ::= qvar - debuglog(s"Captured This(${clazz.fullNameString}) seen from $seenFromPrefix: ${qvar.defString}") - qvar.tpe - } - } - protected def captureSkolems(skolems: List[Symbol]) { - for (p <- skolems; if !(capturedSkolems contains p)) { - debuglog(s"Captured $p seen from $seenFromPrefix") - _capturedSkolems ::= p - } - } - - /** Find the type argument in an applied type which corresponds to a type parameter. - * The arguments are required to be related as follows, through intermediary `clazz`. - * An exception will be thrown if this is violated. - * - * @param lhs its symbol is a type parameter of `clazz` - * @param rhs a type application constructed from `clazz` - */ - private def correspondingTypeArgument(lhs: Type, rhs: Type): Type = { - val TypeRef(_, lhsSym, lhsArgs) = lhs - val TypeRef(_, rhsSym, rhsArgs) = rhs - require(lhsSym.safeOwner == rhsSym, s"$lhsSym is not a type parameter of $rhsSym") - - // Find the type parameter position; we'll use the corresponding argument - val argIndex = rhsSym.typeParams indexOf lhsSym - - if (argIndex >= 0 && argIndex < rhsArgs.length) // @M! don't just replace the whole thing, might be followed by type application - appliedType(rhsArgs(argIndex), lhsArgs mapConserve this) - else if (rhsSym.tpe_*.parents exists typeIsErroneous) // don't be too zealous with the exceptions, see #2641 - ErrorType - else - abort(s"something is wrong: cannot make sense of type application\n $lhs\n $rhs") - } - - // 0) @pre: `classParam` is a class type parameter - // 1) Walk the owner chain of `seenFromClass` until we find the class which owns `classParam` - // 2) Take the base type of the prefix at that point with respect to the owning class - // 3) Solve for the type parameters through correspondence with the type args of the base type - // - // Only class type parameters (and not skolems) are considered, because other type parameters - // are not influenced by the prefix through which they are seen. Note that type params of - // anonymous type functions, which currently can only arise from normalising type aliases, are - // owned by the type alias of which they are the eta-expansion. - private def classParameterAsSeen(classParam: Type): Type = { - val TypeRef(_, tparam, _) = classParam - - def loop(pre: Type, clazz: Symbol): Type = { - // have to deconst because it may be a Class[T] - def nextBase = (pre baseType clazz).deconst - //@M! see test pos/tcpoly_return_overriding.scala why mapOver is necessary - if (skipPrefixOf(pre, clazz)) - mapOver(classParam) - else if (!matchesPrefixAndClass(pre, clazz)(tparam.owner)) - loop(nextBase.prefix, clazz.owner) - else nextBase match { - case applied @ TypeRef(_, _, _) => correspondingTypeArgument(classParam, applied) - case ExistentialType(eparams, qtpe) => captureSkolems(eparams) ; loop(qtpe, clazz) - case t => abort(s"$tparam in ${tparam.owner} cannot be instantiated from ${seenFromPrefix.widen}") - } - } - loop(seenFromPrefix, seenFromClass) - } - - // Does the candidate symbol match the given prefix and class? - // Since pre may be something like ThisType(A) where trait A { self: B => }, - // we have to test the typeSymbol of the widened type, not pre.typeSymbol, or - // B will not be considered. - private def matchesPrefixAndClass(pre: Type, clazz: Symbol)(candidate: Symbol) = pre.widen match { - case _: TypeVar => false - case wide => (clazz == candidate) && (wide.typeSymbol isSubClass clazz) - } - - // Whether the annotation tree currently being mapped over has had a This(_) node rewritten. - private[this] var wroteAnnotation = false - private object annotationArgRewriter extends TypeMapTransformer { - private def matchesThis(thiz: Symbol) = matchesPrefixAndClass(seenFromPrefix, seenFromClass)(thiz) - - // what symbol should really be used? - private def newThis(): Tree = { - wroteAnnotation = true - val presym = seenFromPrefix.widen.typeSymbol - val thisSym = presym.owner.newValue(presym.name.toTermName, presym.pos) setInfo seenFromPrefix - gen.mkAttributedQualifier(seenFromPrefix, thisSym) - } - - /** Rewrite `This` trees in annotation argument trees */ - override def transform(tree: Tree): Tree = super.transform(tree) match { - case This(_) if matchesThis(tree.symbol) => newThis() - case tree => tree - } - } - - // This becomes considerably cheaper if we optimize for the common cases: - // where the prefix is stable and where no This nodes are rewritten. If - // either is true, then we don't need to worry about calling giveup. So if - // the prefix is unstable, use a stack variable to indicate whether the tree - // was touched. This takes us to one allocation per AsSeenFromMap rather - // than an allocation on every call to mapOver, and no extra work when the - // tree only has its types remapped. - override def mapOver(tree: Tree, giveup: ()=>Nothing): Tree = { - if (isStablePrefix) - annotationArgRewriter transform tree - else { - val saved = wroteAnnotation - wroteAnnotation = false - try annotationArgRewriter transform tree - finally if (wroteAnnotation) giveup() else wroteAnnotation = saved - } - } - - private def thisTypeAsSeen(tp: ThisType): Type = { - def loop(pre: Type, clazz: Symbol): Type = { - val pre1 = pre match { - case SuperType(thistpe, _) => thistpe - case _ => pre - } - if (skipPrefixOf(pre, clazz)) - mapOver(tp) // TODO - is mapOver necessary here? - else if (!matchesPrefixAndClass(pre, clazz)(tp.sym)) - loop((pre baseType clazz).prefix, clazz.owner) - else if (pre1.isStable) - pre1 - else - captureThis(pre1, clazz) - } - loop(seenFromPrefix, seenFromClass) - } - - private def singleTypeAsSeen(tp: SingleType): Type = { - val SingleType(pre, sym) = tp - - val pre1 = this(pre) - if (pre1 eq pre) tp - else if (pre1.isStable) singleType(pre1, sym) - else pre1.memberType(sym).resultType //todo: this should be rolled into existential abstraction - } - - override def toString = s"AsSeenFromMap($seenFromPrefix, $seenFromClass)" - } - - /** A base class to compute all substitutions */ - abstract class SubstMap[T](from: List[Symbol], to: List[T]) extends TypeMap { - assert(sameLength(from, to), "Unsound substitution from "+ from +" to "+ to) - - /** Are `sym` and `sym1` the same? Can be tuned by subclasses. */ - protected def matches(sym: Symbol, sym1: Symbol): Boolean = sym eq sym1 - - /** Map target to type, can be tuned by subclasses */ - protected def toType(fromtp: Type, tp: T): Type - - protected def renameBoundSyms(tp: Type): Type = tp match { - case MethodType(ps, restp) => - createFromClonedSymbols(ps, restp)((ps1, tp1) => copyMethodType(tp, ps1, renameBoundSyms(tp1))) - case PolyType(bs, restp) => - createFromClonedSymbols(bs, restp)((ps1, tp1) => PolyType(ps1, renameBoundSyms(tp1))) - case ExistentialType(bs, restp) => - createFromClonedSymbols(bs, restp)(newExistentialType) - case _ => - tp - } - - @tailrec private def subst(tp: Type, sym: Symbol, from: List[Symbol], to: List[T]): Type = ( - if (from.isEmpty) tp - // else if (to.isEmpty) error("Unexpected substitution on '%s': from = %s but to == Nil".format(tp, from)) - else if (matches(from.head, sym)) toType(tp, to.head) - else subst(tp, sym, from.tail, to.tail) - ) - - def apply(tp0: Type): Type = if (from.isEmpty) tp0 else { - val boundSyms = tp0.boundSyms - val tp1 = if (boundSyms.nonEmpty && (boundSyms exists from.contains)) renameBoundSyms(tp0) else tp0 - val tp = mapOver(tp1) - def substFor(sym: Symbol) = subst(tp, sym, from, to) - - tp match { - // @M - // 1) arguments must also be substituted (even when the "head" of the - // applied type has already been substituted) - // example: (subst RBound[RT] from [type RT,type RBound] to - // [type RT&,type RBound&]) = RBound&[RT&] - // 2) avoid loops (which occur because alpha-conversion is - // not performed properly imo) - // e.g. if in class Iterable[a] there is a new Iterable[(a,b)], - // we must replace the a in Iterable[a] by (a,b) - // (must not recurse --> loops) - // 3) replacing m by List in m[Int] should yield List[Int], not just List - case TypeRef(NoPrefix, sym, args) => - val tcon = substFor(sym) - if ((tp eq tcon) || args.isEmpty) tcon - else appliedType(tcon.typeConstructor, args) - case SingleType(NoPrefix, sym) => - substFor(sym) - case _ => - tp - } - } - } - - /** A map to implement the `substSym` method. */ - class SubstSymMap(from: List[Symbol], to: List[Symbol]) extends SubstMap(from, to) { - def this(pairs: (Symbol, Symbol)*) = this(pairs.toList.map(_._1), pairs.toList.map(_._2)) - - protected def toType(fromtp: Type, sym: Symbol) = fromtp match { - case TypeRef(pre, _, args) => copyTypeRef(fromtp, pre, sym, args) - case SingleType(pre, _) => singleType(pre, sym) - } - @tailrec private def subst(sym: Symbol, from: List[Symbol], to: List[Symbol]): Symbol = ( - if (from.isEmpty) sym - // else if (to.isEmpty) error("Unexpected substitution on '%s': from = %s but to == Nil".format(sym, from)) - else if (matches(from.head, sym)) to.head - else subst(sym, from.tail, to.tail) - ) - private def substFor(sym: Symbol) = subst(sym, from, to) - - override def apply(tp: Type): Type = ( - if (from.isEmpty) tp - else tp match { - case TypeRef(pre, sym, args) if pre ne NoPrefix => - val newSym = substFor(sym) - // mapOver takes care of subst'ing in args - mapOver ( if (sym eq newSym) tp else copyTypeRef(tp, pre, newSym, args) ) - // assert(newSym.typeParams.length == sym.typeParams.length, "typars mismatch in SubstSymMap: "+(sym, sym.typeParams, newSym, newSym.typeParams)) - case SingleType(pre, sym) if pre ne NoPrefix => - val newSym = substFor(sym) - mapOver( if (sym eq newSym) tp else singleType(pre, newSym) ) - case _ => - super.apply(tp) - } - ) - - object mapTreeSymbols extends TypeMapTransformer { - val strictCopy = newStrictTreeCopier - - def termMapsTo(sym: Symbol) = from indexOf sym match { - case -1 => None - case idx => Some(to(idx)) - } - - // if tree.symbol is mapped to another symbol, passes the new symbol into the - // constructor `trans` and sets the symbol and the type on the resulting tree. - def transformIfMapped(tree: Tree)(trans: Symbol => Tree) = termMapsTo(tree.symbol) match { - case Some(toSym) => trans(toSym) setSymbol toSym setType tree.tpe - case None => tree - } - - // changes trees which refer to one of the mapped symbols. trees are copied before attributes are modified. - override def transform(tree: Tree) = { - // super.transform maps symbol references in the types of `tree`. it also copies trees where necessary. - super.transform(tree) match { - case id @ Ident(_) => - transformIfMapped(id)(toSym => - strictCopy.Ident(id, toSym.name)) - - case sel @ Select(qual, name) => - transformIfMapped(sel)(toSym => - strictCopy.Select(sel, qual, toSym.name)) - - case tree => tree - } - } - } - override def mapOver(tree: Tree, giveup: ()=>Nothing): Tree = { - mapTreeSymbols.transform(tree) - } - } - - /** A map to implement the `subst` method. */ - class SubstTypeMap(from: List[Symbol], to: List[Type]) - extends SubstMap(from, to) { - protected def toType(fromtp: Type, tp: Type) = tp - - override def mapOver(tree: Tree, giveup: () => Nothing): Tree = { - object trans extends TypeMapTransformer { - override def transform(tree: Tree) = tree match { - case Ident(name) => - from indexOf tree.symbol match { - case -1 => super.transform(tree) - case idx => - val totpe = to(idx) - if (totpe.isStable) tree.duplicate setType totpe - else giveup() - } - case _ => - super.transform(tree) - } - } - trans.transform(tree) - } - } - - /** A map to implement the `substThis` method. */ - class SubstThisMap(from: Symbol, to: Type) extends TypeMap { - def apply(tp: Type): Type = tp match { - case ThisType(sym) if (sym == from) => to - case _ => mapOver(tp) - } - } - - class SubstWildcardMap(from: List[Symbol]) extends TypeMap { - def apply(tp: Type): Type = try { - tp match { - case TypeRef(_, sym, _) if from contains sym => - BoundedWildcardType(sym.info.bounds) - case _ => - mapOver(tp) - } - } catch { - case ex: MalformedType => - WildcardType - } - } - -// dependent method types - object IsDependentCollector extends TypeCollector(false) { - def traverse(tp: Type) { - if (tp.isImmediatelyDependent) result = true - else if (!result) mapOver(tp) - } - } - - object ApproximateDependentMap extends TypeMap { - def apply(tp: Type): Type = - if (tp.isImmediatelyDependent) WildcardType - else mapOver(tp) - } - - /** Note: This map is needed even for non-dependent method types, despite what the name might imply. - */ - class InstantiateDependentMap(params: List[Symbol], actuals0: List[Type]) extends TypeMap with KeepOnlyTypeConstraints { - private val actuals = actuals0.toIndexedSeq - private val existentials = new Array[Symbol](actuals.size) - def existentialsNeeded: List[Symbol] = existentials.filter(_ ne null).toList - - private object StableArg { - def unapply(param: Symbol) = Arg unapply param map actuals filter (tp => - tp.isStable && (tp.typeSymbol != NothingClass) - ) - } - private object Arg { - def unapply(param: Symbol) = Some(params indexOf param) filter (_ >= 0) - } - - def apply(tp: Type): Type = mapOver(tp) match { - // unsound to replace args by unstable actual #3873 - case SingleType(NoPrefix, StableArg(arg)) => arg - // (soundly) expand type alias selections on implicit arguments, - // see depmet_implicit_oopsla* test cases -- typically, `param.isImplicit` - case tp1 @ TypeRef(SingleType(NoPrefix, Arg(pid)), sym, targs) => - val arg = actuals(pid) - val res = typeRef(arg, sym, targs) - if (res.typeSymbolDirect.isAliasType) res.dealias else tp1 - // don't return the original `tp`, which may be different from `tp1`, - // due to dropping annotations - case tp1 => tp1 - } - - /* Return the type symbol for referencing a parameter inside the existential quantifier. - * (Only needed if the actual is unstable.) - */ - private def existentialFor(pid: Int) = { - if (existentials(pid) eq null) { - val param = params(pid) - existentials(pid) = ( - param.owner.newExistential(param.name.toTypeName append nme.SINGLETON_SUFFIX, param.pos, param.flags) - setInfo singletonBounds(actuals(pid)) - ) - } - existentials(pid) - } - - //AM propagate more info to annotations -- this seems a bit ad-hoc... (based on code by spoon) - override def mapOver(arg: Tree, giveup: ()=>Nothing): Tree = { - // TODO: this should be simplified; in the stable case, one can - // probably just use an Ident to the tree.symbol. - // - // @PP: That leads to failure here, where stuff no longer has type - // 'String @Annot("stuff")' but 'String @Annot(x)'. - // - // def m(x: String): String @Annot(x) = x - // val stuff = m("stuff") - // - // (TODO cont.) Why an existential in the non-stable case? - // - // @PP: In the following: - // - // def m = { val x = "three" ; val y: String @Annot(x) = x; y } - // - // m is typed as 'String @Annot(x) forSome { val x: String }'. - // - // Both examples are from run/constrained-types.scala. - object treeTrans extends Transformer { - override def transform(tree: Tree): Tree = tree.symbol match { - case StableArg(actual) => - gen.mkAttributedQualifier(actual, tree.symbol) - case Arg(pid) => - val sym = existentialFor(pid) - Ident(sym) copyAttrs tree setType typeRef(NoPrefix, sym, Nil) - case _ => - super.transform(tree) - } - } - treeTrans transform arg - } - } - - /** A map to convert every occurrence of a wildcard type to a fresh - * type variable */ - object wildcardToTypeVarMap extends TypeMap { - def apply(tp: Type): Type = tp match { - case WildcardType => - TypeVar(tp, new TypeConstraint) - case BoundedWildcardType(bounds) => - TypeVar(tp, new TypeConstraint(bounds)) - case _ => - mapOver(tp) - } - } - - /** A map to convert every occurrence of a type variable to a wildcard type. */ - object typeVarToOriginMap extends TypeMap { - def apply(tp: Type): Type = tp match { - case TypeVar(origin, _) => origin - case _ => mapOver(tp) - } - } - - /** A map to implement the `contains` method. */ - class ContainsCollector(sym: Symbol) extends TypeCollector(false) { - def traverse(tp: Type) { - if (!result) { - tp.normalize match { - case TypeRef(_, sym1, _) if (sym == sym1) => result = true - case SingleType(_, sym1) if (sym == sym1) => result = true - case _ => mapOver(tp) - } - } - } - - override def mapOver(arg: Tree) = { - for (t <- arg) { - traverse(t.tpe) - if (t.symbol == sym) - result = true - } - arg - } - } - - /** A map to implement the `contains` method. */ - class ContainsTypeCollector(t: Type) extends TypeCollector(false) { - def traverse(tp: Type) { - if (!result) { - if (tp eq t) result = true - else mapOver(tp) - } - } - override def mapOver(arg: Tree) = { - for (t <- arg) - traverse(t.tpe) - - arg - } - } - - /** A map to implement the `filter` method. */ - class FilterTypeCollector(p: Type => Boolean) extends TypeCollector[List[Type]](Nil) { - override def collect(tp: Type) = super.collect(tp).reverse - - def traverse(tp: Type) { - if (p(tp)) result ::= tp - mapOver(tp) - } - } - - /** A map to implement the `collect` method. */ - class CollectTypeCollector[T](pf: PartialFunction[Type, T]) extends TypeCollector[List[T]](Nil) { - override def collect(tp: Type) = super.collect(tp).reverse - - def traverse(tp: Type) { - if (pf.isDefinedAt(tp)) result ::= pf(tp) - mapOver(tp) - } - } - - class ForEachTypeTraverser(f: Type => Unit) extends TypeTraverser { - def traverse(tp: Type) { - f(tp) - mapOver(tp) - } - } - - /** A map to implement the `filter` method. */ - class FindTypeCollector(p: Type => Boolean) extends TypeCollector[Option[Type]](None) { - def traverse(tp: Type) { - if (result.isEmpty) { - if (p(tp)) result = Some(tp) - mapOver(tp) - } - } - } - - /** A map to implement the `contains` method. */ - object ErroneousCollector extends TypeCollector(false) { - def traverse(tp: Type) { - if (!result) { - result = tp.isError - mapOver(tp) - } - } - } - - /** - * A more persistent version of `Type#memberType` which does not require - * that the symbol is a direct member of the prefix. - * - * For instance: - * - * {{{ - * class C[T] { - * sealed trait F[A] - * object X { - * object S1 extends F[T] - * } - * class S2 extends F[T] - * } - * object O extends C[Int] { - * def foo(f: F[Int]) = f match {...} // need to enumerate sealed subtypes of the scrutinee here. - * } - * class S3 extends O.F[String] - * - * nestedMemberType(, , ) = O.X.S1.type - * nestedMemberType(, , ) = O.S2.type - * nestedMemberType(, , ) = S3.type - * }}} - * - * @param sym The symbol of the subtype - * @param pre The prefix from which the symbol is seen - * @param owner - */ - def nestedMemberType(sym: Symbol, pre: Type, owner: Symbol): Type = { - def loop(tp: Type): Type = - if (tp.isTrivial) tp - else if (tp.prefix.typeSymbol isNonBottomSubClass owner) { - val widened = tp match { - case _: ConstantType => tp // Java enum constants: don't widen to the enum type! - case _ => tp.widen // C.X.type widens to C.this.X.type, otherwise `tp asSeenFrom (pre, C)` has no effect. - } - widened asSeenFrom (pre, tp.typeSymbol.owner) - } - else loop(tp.prefix) memberType tp.typeSymbol - - val result = loop(sym.tpeHK) - assert(sym.isTerm || result.typeSymbol == sym, s"($result).typeSymbol = ${result.typeSymbol}; expected ${sym}") - result - } - - /** The most deeply nested owner that contains all the symbols - * of thistype or prefixless typerefs/singletype occurrences in given type. - */ - private def commonOwner(t: Type): Symbol = commonOwner(t :: Nil) - - /** The most deeply nested owner that contains all the symbols - * of thistype or prefixless typerefs/singletype occurrences in given list - * of types. - */ - private def commonOwner(tps: List[Type]): Symbol = { - if (tps.isEmpty) NoSymbol - else { - commonOwnerMap.clear() - tps foreach (commonOwnerMap traverse _) - if (commonOwnerMap.result ne null) commonOwnerMap.result else NoSymbol - } - } - - protected def commonOwnerMap: CommonOwnerMap = commonOwnerMapObj - - protected class CommonOwnerMap extends TypeTraverserWithResult[Symbol] { - var result: Symbol = _ - - def clear() { result = null } - - private def register(sym: Symbol) { - // First considered type is the trivial result. - if ((result eq null) || (sym eq NoSymbol)) - result = sym - else - while ((result ne NoSymbol) && (result ne sym) && !(sym isNestedIn result)) - result = result.owner - } - def traverse(tp: Type) = tp.normalize match { - case ThisType(sym) => register(sym) - case TypeRef(NoPrefix, sym, args) => register(sym.owner) ; args foreach traverse - case SingleType(NoPrefix, sym) => register(sym.owner) - case _ => mapOver(tp) - } - } - - private lazy val commonOwnerMapObj = new CommonOwnerMap - - class MissingAliasControl extends ControlThrowable - val missingAliasException = new MissingAliasControl - class MissingTypeControl extends ControlThrowable - - object adaptToNewRunMap extends TypeMap { - - private def adaptToNewRun(pre: Type, sym: Symbol): Symbol = { - if (phase.flatClasses || sym.isRootSymbol || (pre eq NoPrefix) || (pre eq NoType) || sym.isPackageClass) - sym - else if (sym.isModuleClass) { - val sourceModule1 = adaptToNewRun(pre, sym.sourceModule) - - sourceModule1.moduleClass orElse sourceModule1.initialize.moduleClass orElse { - val msg = "Cannot adapt module class; sym = %s, sourceModule = %s, sourceModule.moduleClass = %s => sourceModule1 = %s, sourceModule1.moduleClass = %s" - debuglog(msg.format(sym, sym.sourceModule, sym.sourceModule.moduleClass, sourceModule1, sourceModule1.moduleClass)) - sym - } - } - else { - var rebind0 = pre.findMember(sym.name, BRIDGE, 0, stableOnly = true) orElse { - if (sym.isAliasType) throw missingAliasException - devWarning(s"$pre.$sym no longer exist at phase $phase") - throw new MissingTypeControl // For build manager and presentation compiler purposes - } - /** The two symbols have the same fully qualified name */ - def corresponds(sym1: Symbol, sym2: Symbol): Boolean = - sym1.name == sym2.name && (sym1.isPackageClass || corresponds(sym1.owner, sym2.owner)) - if (!corresponds(sym.owner, rebind0.owner)) { - debuglog("ADAPT1 pre = "+pre+", sym = "+sym.fullLocationString+", rebind = "+rebind0.fullLocationString) - val bcs = pre.baseClasses.dropWhile(bc => !corresponds(bc, sym.owner)) - if (bcs.isEmpty) - assert(pre.typeSymbol.isRefinementClass, pre) // if pre is a refinementclass it might be a structural type => OK to leave it in. - else - rebind0 = pre.baseType(bcs.head).member(sym.name) - debuglog( - "ADAPT2 pre = " + pre + - ", bcs.head = " + bcs.head + - ", sym = " + sym.fullLocationString + - ", rebind = " + rebind0.fullLocationString - ) - } - rebind0.suchThat(sym => sym.isType || sym.isStable) orElse { - debuglog("" + phase + " " +phase.flatClasses+sym.owner+sym.name+" "+sym.isType) - throw new MalformedType(pre, sym.nameString) - } - } - } - def apply(tp: Type): Type = tp match { - case ThisType(sym) => - try { - val sym1 = adaptToNewRun(sym.owner.thisType, sym) - if (sym1 == sym) tp else ThisType(sym1) - } catch { - case ex: MissingTypeControl => - tp - } - case SingleType(pre, sym) => - if (sym.isPackage) tp - else { - val pre1 = this(pre) - try { - val sym1 = adaptToNewRun(pre1, sym) - if ((pre1 eq pre) && (sym1 eq sym)) tp - else singleType(pre1, sym1) - } catch { - case _: MissingTypeControl => - tp - } - } - case TypeRef(pre, sym, args) => - if (sym.isPackageClass) tp - else { - val pre1 = this(pre) - val args1 = args mapConserve (this) - try { - val sym1 = adaptToNewRun(pre1, sym) - if ((pre1 eq pre) && (sym1 eq sym) && (args1 eq args)/* && sym.isExternal*/) { - tp - } else if (sym1 == NoSymbol) { - devWarning(s"adapt to new run failed: pre=$pre pre1=$pre1 sym=$sym") - tp - } else { - copyTypeRef(tp, pre1, sym1, args1) - } - } catch { - case ex: MissingAliasControl => - apply(tp.dealias) - case _: MissingTypeControl => - tp - } - } - case MethodType(params, restp) => - val restp1 = this(restp) - if (restp1 eq restp) tp - else copyMethodType(tp, params, restp1) - case NullaryMethodType(restp) => - val restp1 = this(restp) - if (restp1 eq restp) tp - else NullaryMethodType(restp1) - case PolyType(tparams, restp) => - val restp1 = this(restp) - if (restp1 eq restp) tp - else PolyType(tparams, restp1) - - // Lukas: we need to check (together) whether we should also include parameter types - // of PolyType and MethodType in adaptToNewRun - - case ClassInfoType(parents, decls, clazz) => - if (clazz.isPackageClass) tp - else { - val parents1 = parents mapConserve (this) - if (parents1 eq parents) tp - else ClassInfoType(parents1, decls, clazz) - } - case RefinedType(parents, decls) => - val parents1 = parents mapConserve (this) - if (parents1 eq parents) tp - else refinedType(parents1, tp.typeSymbol.owner, decls, tp.typeSymbol.owner.pos) - case SuperType(_, _) => mapOver(tp) - case TypeBounds(_, _) => mapOver(tp) - case TypeVar(_, _) => mapOver(tp) - case AnnotatedType(_,_,_) => mapOver(tp) - case NotNullType(_) => mapOver(tp) - case ExistentialType(_, _) => mapOver(tp) - case _ => tp - } - } - - class SubTypePair(val tp1: Type, val tp2: Type) { - override def hashCode = tp1.hashCode * 41 + tp2.hashCode - override def equals(other: Any) = (this eq other.asInstanceOf[AnyRef]) || (other match { - // suspend TypeVars in types compared by =:=, - // since we don't want to mutate them simply to check whether a subtype test is pending - // in addition to making subtyping "more correct" for type vars, - // it should avoid the stackoverflow that's been plaguing us (https://groups.google.com/d/topic/scala-internals/2gHzNjtB4xA/discussion) - // this method is only called when subtyping hits a recursion threshold (subsametypeRecursions >= LogPendingSubTypesThreshold) - case stp: SubTypePair => - val tvars = List(tp1, stp.tp1, tp2, stp.tp2) flatMap (t => if (t.isGround) Nil else typeVarsInType(t)) - suspendingTypeVars(tvars)(tp1 =:= stp.tp1 && tp2 =:= stp.tp2) - case _ => - false - }) - override def toString = tp1+" <: - assert(sym1 == sym2, (sym1, sym2)) - ( pre1 =:= pre2 - && forall3(args1, args2, sym1.typeParams) { (arg1, arg2, tparam) => - // if left-hand argument is a typevar, make it compatible with variance - // this is for more precise pattern matching - // todo: work this in the spec of this method - // also: think what happens if there are embedded typevars? - if (tparam.variance.isInvariant) - arg1 =:= arg2 - else !arg1.isInstanceOf[TypeVar] || { - if (tparam.variance.isContravariant) arg1 <:< arg2 - else arg2 <:< arg1 - } - } - ) - case (et: ExistentialType, _) => - et.withTypeVars(isConsistent(_, tp2)) - case (_, et: ExistentialType) => - et.withTypeVars(isConsistent(tp1, _)) + def isPopulated(tp1: Type, tp2: Type): Boolean = { + def isConsistent(tp1: Type, tp2: Type): Boolean = (tp1, tp2) match { + case (TypeRef(pre1, sym1, args1), TypeRef(pre2, sym2, args2)) => + assert(sym1 == sym2, (sym1, sym2)) + ( pre1 =:= pre2 + && forall3(args1, args2, sym1.typeParams) { (arg1, arg2, tparam) => + // if left-hand argument is a typevar, make it compatible with variance + // this is for more precise pattern matching + // todo: work this in the spec of this method + // also: think what happens if there are embedded typevars? + if (tparam.variance.isInvariant) + arg1 =:= arg2 + else !arg1.isInstanceOf[TypeVar] || { + if (tparam.variance.isContravariant) arg1 <:< arg2 + else arg2 <:< arg1 + } + } + ) + case (et: ExistentialType, _) => + et.withTypeVars(isConsistent(_, tp2)) + case (_, et: ExistentialType) => + et.withTypeVars(isConsistent(tp1, _)) } def check(tp1: Type, tp2: Type) = ( @@ -5266,279 +3893,21 @@ trait Types extends api.Types { self: SymbolTable => } } - private var subsametypeRecursions: Int = 0 - - private def isUnifiable(pre1: Type, pre2: Type) = - (beginsWithTypeVarOrIsRefined(pre1) || beginsWithTypeVarOrIsRefined(pre2)) && (pre1 =:= pre2) - - /** Returns true iff we are past phase specialize, - * sym1 and sym2 are two existential skolems with equal names and bounds, - * and pre1 and pre2 are equal prefixes - */ - private def isSameSpecializedSkolem(sym1: Symbol, sym2: Symbol, pre1: Type, pre2: Type) = { - sym1.isExistentialSkolem && sym2.isExistentialSkolem && - sym1.name == sym2.name && - phase.specialized && - sym1.info =:= sym2.info && - pre1 =:= pre2 - } - - private def isSubPre(pre1: Type, pre2: Type, sym: Symbol) = - if ((pre1 ne pre2) && (pre1 ne NoPrefix) && (pre2 ne NoPrefix) && pre1 <:< pre2) { - if (settings.debug.value) println(s"new isSubPre $sym: $pre1 <:< $pre2") - true - } else - false - - private def equalSymsAndPrefixes(sym1: Symbol, pre1: Type, sym2: Symbol, pre2: Type): Boolean = - if (sym1 == sym2) sym1.hasPackageFlag || sym1.owner.hasPackageFlag || phase.erasedTypes || pre1 =:= pre2 - else (sym1.name == sym2.name) && isUnifiable(pre1, pre2) - - /** Do `tp1` and `tp2` denote equivalent types? */ - def isSameType(tp1: Type, tp2: Type): Boolean = try { - if (Statistics.canEnable) Statistics.incCounter(sametypeCount) - subsametypeRecursions += 1 - //OPT cutdown on Function0 allocation - //was: -// undoLog undoUnless { -// isSameType1(tp1, tp2) -// } - - undoLog.lock() - try { - val before = undoLog.log - var result = false - try { - result = isSameType1(tp1, tp2) - } - finally if (!result) undoLog.undoTo(before) - result - } - finally undoLog.unlock() - } - finally { - subsametypeRecursions -= 1 - // XXX AM TODO: figure out when it is safe and needed to clear the log -- the commented approach below is too eager (it breaks #3281, #3866) - // it doesn't help to keep separate recursion counts for the three methods that now share it - // if (subsametypeRecursions == 0) undoLog.clear() - } - - def isDifferentType(tp1: Type, tp2: Type): Boolean = try { - subsametypeRecursions += 1 - undoLog undo { // undo type constraints that arise from operations in this block - !isSameType1(tp1, tp2) - } - } finally { - subsametypeRecursions -= 1 - // XXX AM TODO: figure out when it is safe and needed to clear the log -- the commented approach below is too eager (it breaks #3281, #3866) - // it doesn't help to keep separate recursion counts for the three methods that now share it - // if (subsametypeRecursions == 0) undoLog.clear() - } - - def isDifferentTypeConstructor(tp1: Type, tp2: Type): Boolean = tp1 match { - case TypeRef(pre1, sym1, _) => - tp2 match { - case TypeRef(pre2, sym2, _) => sym1 != sym2 || isDifferentType(pre1, pre2) - case _ => true - } - case _ => true - } - def normalizePlus(tp: Type) = if (isRawType(tp)) rawToExistential(tp) else tp.normalize /* todo: change to: - def normalizePlus(tp: Type) = tp match { - case TypeRef(pre, sym, List()) => - if (!sym.isInitialized) sym.rawInfo.load(sym) - if (sym.isJavaDefined && !sym.typeParams.isEmpty) rawToExistential(tp) - else tp.normalize - case _ => tp.normalize - } - */ - - private def isSameType1(tp1: Type, tp2: Type): Boolean = { - if ((tp1 eq tp2) || - (tp1 eq ErrorType) || (tp1 eq WildcardType) || - (tp2 eq ErrorType) || (tp2 eq WildcardType)) - true - else if ((tp1 eq NoType) || (tp2 eq NoType)) - false - else if (tp1 eq NoPrefix) // !! I do not see how this would be warranted by the spec - tp2.typeSymbol.isPackageClass - else if (tp2 eq NoPrefix) // !! I do not see how this would be warranted by the spec - tp1.typeSymbol.isPackageClass - else { - isSameType2(tp1, tp2) || { - val tp1n = normalizePlus(tp1) - val tp2n = normalizePlus(tp2) - ((tp1n ne tp1) || (tp2n ne tp2)) && isSameType(tp1n, tp2n) - } - } - } - - def isSameType2(tp1: Type, tp2: Type): Boolean = { - tp1 match { - case tr1: TypeRef => - tp2 match { - case tr2: TypeRef => - return (equalSymsAndPrefixes(tr1.sym, tr1.pre, tr2.sym, tr2.pre) && - ((tp1.isHigherKinded && tp2.isHigherKinded && tp1.normalize =:= tp2.normalize) || - isSameTypes(tr1.args, tr2.args))) || - ((tr1.pre, tr2.pre) match { - case (tv @ TypeVar(_,_), _) => tv.registerTypeSelection(tr1.sym, tr2) - case (_, tv @ TypeVar(_,_)) => tv.registerTypeSelection(tr2.sym, tr1) - case _ => false - }) - case _: SingleType => - return isSameType2(tp2, tp1) // put singleton type on the left, caught below - case _ => - } - case tt1: ThisType => - tp2 match { - case tt2: ThisType => - if (tt1.sym == tt2.sym) return true - case _ => - } - case st1: SingleType => - tp2 match { - case st2: SingleType => - if (equalSymsAndPrefixes(st1.sym, st1.pre, st2.sym, st2.pre)) return true - case TypeRef(pre2, sym2, Nil) => - if (sym2.isModuleClass && equalSymsAndPrefixes(st1.sym, st1.pre, sym2.sourceModule, pre2)) return true - case _ => - } - case ct1: ConstantType => - tp2 match { - case ct2: ConstantType => - return (ct1.value == ct2.value) - case _ => - } - case rt1: RefinedType => - tp2 match { - case rt2: RefinedType => // - def isSubScope(s1: Scope, s2: Scope): Boolean = s2.toList.forall { - sym2 => - var e1 = s1.lookupEntry(sym2.name) - (e1 ne null) && { - val substSym = sym2.info.substThis(sym2.owner, e1.sym.owner) - var isEqual = false - while (!isEqual && (e1 ne null)) { - isEqual = e1.sym.info =:= substSym - e1 = s1.lookupNextEntry(e1) - } - isEqual - } - } - //Console.println("is same? " + tp1 + " " + tp2 + " " + tp1.typeSymbol.owner + " " + tp2.typeSymbol.owner)//DEBUG - return isSameTypes(rt1.parents, rt2.parents) && { - val decls1 = rt1.decls - val decls2 = rt2.decls - isSubScope(decls1, decls2) && isSubScope(decls2, decls1) - } - case _ => - } - case mt1: MethodType => - tp2 match { - case mt2: MethodType => - return isSameTypes(mt1.paramTypes, mt2.paramTypes) && - mt1.resultType =:= mt2.resultType.substSym(mt2.params, mt1.params) && - mt1.isImplicit == mt2.isImplicit - // note: no case NullaryMethodType(restpe) => return mt1.params.isEmpty && mt1.resultType =:= restpe - case _ => - } - case NullaryMethodType(restpe1) => - tp2 match { - // note: no case mt2: MethodType => return mt2.params.isEmpty && restpe =:= mt2.resultType - case NullaryMethodType(restpe2) => - return restpe1 =:= restpe2 - case _ => - } - case PolyType(tparams1, res1) => - tp2 match { - case PolyType(tparams2, res2) => -// assert((tparams1 map (_.typeParams.length)) == (tparams2 map (_.typeParams.length))) - // @M looks like it might suffer from same problem as #2210 - return ( - (sameLength(tparams1, tparams2)) && // corresponds does not check length of two sequences before checking the predicate - (tparams1 corresponds tparams2)(_.info =:= _.info.substSym(tparams2, tparams1)) && - res1 =:= res2.substSym(tparams2, tparams1) - ) - case _ => - } - case ExistentialType(tparams1, res1) => - tp2 match { - case ExistentialType(tparams2, res2) => - // @M looks like it might suffer from same problem as #2210 - return ( - // corresponds does not check length of two sequences before checking the predicate -- faster & needed to avoid crasher in #2956 - sameLength(tparams1, tparams2) && - (tparams1 corresponds tparams2)(_.info =:= _.info.substSym(tparams2, tparams1)) && - res1 =:= res2.substSym(tparams2, tparams1) - ) - case _ => - } - case TypeBounds(lo1, hi1) => - tp2 match { - case TypeBounds(lo2, hi2) => - return lo1 =:= lo2 && hi1 =:= hi2 - case _ => - } - case BoundedWildcardType(bounds) => - return bounds containsType tp2 - case _ => - } - tp2 match { - case BoundedWildcardType(bounds) => - return bounds containsType tp1 - case _ => - } - tp1 match { - case tv @ TypeVar(_,_) => - return tv.registerTypeEquality(tp2, typeVarLHS = true) - case _ => - } - tp2 match { - case tv @ TypeVar(_,_) => - return tv.registerTypeEquality(tp1, typeVarLHS = false) - case _ => - } - tp1 match { - case _: AnnotatedType => - return annotationsConform(tp1, tp2) && annotationsConform(tp2, tp1) && tp1.withoutAnnotations =:= tp2.withoutAnnotations - case _ => - } - tp2 match { - case _: AnnotatedType => - return annotationsConform(tp1, tp2) && annotationsConform(tp2, tp1) && tp1.withoutAnnotations =:= tp2.withoutAnnotations - case _ => - } - tp1 match { - case _: SingletonType => - tp2 match { - case _: SingletonType => - def chaseDealiasedUnderlying(tp: Type): Type = { - var origin = tp - var next = origin.underlying.dealias - while (next.isInstanceOf[SingletonType]) { - assert(origin ne next, origin) - origin = next - next = origin.underlying.dealias - } - origin - } - val origin1 = chaseDealiasedUnderlying(tp1) - val origin2 = chaseDealiasedUnderlying(tp2) - ((origin1 ne tp1) || (origin2 ne tp2)) && (origin1 =:= origin2) - case _ => - false - } - case _ => - false - } + def normalizePlus(tp: Type) = tp match { + case TypeRef(pre, sym, List()) => + if (!sym.isInitialized) sym.rawInfo.load(sym) + if (sym.isJavaDefined && !sym.typeParams.isEmpty) rawToExistential(tp) + else tp.normalize + case _ => tp.normalize } + */ + /** Are `tps1` and `tps2` lists of pairwise equivalent types? */ def isSameTypes(tps1: List[Type], tps2: List[Type]): Boolean = (tps1 corresponds tps2)(_ =:= _) @@ -5556,64 +3925,9 @@ trait Types extends api.Types { self: SymbolTable => */ final def hasLength(xs: List[_], len: Int) = xs.lengthCompare(len) == 0 - private val pendingSubTypes = new mutable.HashSet[SubTypePair] private var basetypeRecursions: Int = 0 private val pendingBaseTypes = new mutable.HashSet[Type] - def isSubType(tp1: Type, tp2: Type): Boolean = isSubType(tp1, tp2, AnyDepth) - - def isSubType(tp1: Type, tp2: Type, depth: Int): Boolean = try { - subsametypeRecursions += 1 - - //OPT cutdown on Function0 allocation - //was: -// undoLog undoUnless { // if subtype test fails, it should not affect constraints on typevars -// if (subsametypeRecursions >= LogPendingSubTypesThreshold) { -// val p = new SubTypePair(tp1, tp2) -// if (pendingSubTypes(p)) -// false -// else -// try { -// pendingSubTypes += p -// isSubType2(tp1, tp2, depth) -// } finally { -// pendingSubTypes -= p -// } -// } else { -// isSubType2(tp1, tp2, depth) -// } -// } - - undoLog.lock() - try { - val before = undoLog.log - var result = false - - try result = { // if subtype test fails, it should not affect constraints on typevars - if (subsametypeRecursions >= LogPendingSubTypesThreshold) { - val p = new SubTypePair(tp1, tp2) - if (pendingSubTypes(p)) - false - else - try { - pendingSubTypes += p - isSubType2(tp1, tp2, depth) - } finally { - pendingSubTypes -= p - } - } else { - isSubType2(tp1, tp2, depth) - } - } finally if (!result) undoLog.undoTo(before) - - result - } finally undoLog.unlock() - } finally { - subsametypeRecursions -= 1 - // XXX AM TODO: figure out when it is safe and needed to clear the log -- the commented approach below is too eager (it breaks #3281, #3866) - // it doesn't help to keep separate recursion counts for the three methods that now share it - // if (subsametypeRecursions == 0) undoLog.clear() - } /** Does this type have a prefix that begins with a type variable, * or is it a refinement type? For type prefixes that fulfil this condition, @@ -5746,42 +4060,6 @@ trait Types extends api.Types { self: SymbolTable => case _ => false } - private def isPolySubType(tp1: PolyType, tp2: PolyType): Boolean = { - val PolyType(tparams1, res1) = tp1 - val PolyType(tparams2, res2) = tp2 - - sameLength(tparams1, tparams2) && { - // fast-path: polymorphic method type -- type params cannot be captured - val isMethod = tparams1.head.owner.isMethod - //@M for an example of why we need to generate fresh symbols otherwise, see neg/tcpoly_ticket2101.scala - val substitutes = if (isMethod) tparams1 else cloneSymbols(tparams1) - def sub1(tp: Type) = if (isMethod) tp else tp.substSym(tparams1, substitutes) - def sub2(tp: Type) = tp.substSym(tparams2, substitutes) - def cmp(p1: Symbol, p2: Symbol) = sub2(p2.info) <:< sub1(p1.info) - - (tparams1 corresponds tparams2)(cmp) && (sub1(res1) <:< sub2(res2)) - } - } - - // @assume tp1.isHigherKinded || tp2.isHigherKinded - def isHKSubType(tp1: Type, tp2: Type, depth: Int): Boolean = { - def isSub(ntp1: Type, ntp2: Type) = (ntp1.withoutAnnotations, ntp2.withoutAnnotations) match { - case (TypeRef(_, AnyClass, _), _) => false // avoid some warnings when Nothing/Any are on the other side - case (_, TypeRef(_, NothingClass, _)) => false - case (pt1: PolyType, pt2: PolyType) => isPolySubType(pt1, pt2) // @assume both .isHigherKinded (both normalized to PolyType) - case (_: PolyType, MethodType(ps, _)) if ps exists (_.tpe.isWildcard) => false // don't warn on HasMethodMatching on right hand side - case _ => // @assume !(both .isHigherKinded) thus cannot be subtypes - def tp_s(tp: Type): String = f"$tp%-20s ${util.shortClassOfInstance(tp)}%s" - devWarning(s"HK subtype check on $tp1 and $tp2, but both don't normalize to polytypes:\n tp1=${tp_s(ntp1)}\n tp2=${tp_s(ntp2)}") - false - } - - ( tp1.typeSymbol == NothingClass // @M Nothing is subtype of every well-kinded type - || tp2.typeSymbol == AnyClass // @M Any is supertype of every well-kinded type (@PP: is it? What about continuations plugin?) - || isSub(tp1.normalize, tp2.normalize) && annotationsConform(tp1, tp2) // @M! normalize reduces higher-kinded case to PolyType's - ) - } - def isSubArgs(tps1: List[Type], tps2: List[Type], tparams: List[Symbol], depth: Int): Boolean = { def isSubArg(t1: Type, t2: Type, variance: Variance) = ( (variance.isContravariant || isSubType(t1, t2, depth)) @@ -5791,205 +4069,7 @@ trait Types extends api.Types { self: SymbolTable => corresponds3(tps1, tps2, tparams map (_.variance))(isSubArg) } - /** Does type `tp1` conform to `tp2`? */ - private def isSubType2(tp1: Type, tp2: Type, depth: Int): Boolean = { - if ((tp1 eq tp2) || isErrorOrWildcard(tp1) || isErrorOrWildcard(tp2)) return true - if ((tp1 eq NoType) || (tp2 eq NoType)) return false - if (tp1 eq NoPrefix) return (tp2 eq NoPrefix) || tp2.typeSymbol.isPackageClass // !! I do not see how the "isPackageClass" would be warranted by the spec - if (tp2 eq NoPrefix) return tp1.typeSymbol.isPackageClass - if (isSingleType(tp1) && isSingleType(tp2) || isConstantType(tp1) && isConstantType(tp2)) return tp1 =:= tp2 - if (tp1.isHigherKinded || tp2.isHigherKinded) return isHKSubType(tp1, tp2, depth) - - /** First try, on the right: - * - unwrap Annotated types, BoundedWildcardTypes, - * - bind TypeVars on the right, if lhs is not Annotated nor BoundedWildcard - * - handle common cases for first-kind TypeRefs on both sides as a fast path. - */ - def firstTry = tp2 match { - // fast path: two typerefs, none of them HK - case tr2: TypeRef => - tp1 match { - case tr1: TypeRef => - val sym1 = tr1.sym - val sym2 = tr2.sym - val pre1 = tr1.pre - val pre2 = tr2.pre - (((if (sym1 == sym2) phase.erasedTypes || sym1.owner.hasPackageFlag || isSubType(pre1, pre2, depth) - else (sym1.name == sym2.name && !sym1.isModuleClass && !sym2.isModuleClass && - (isUnifiable(pre1, pre2) || - isSameSpecializedSkolem(sym1, sym2, pre1, pre2) || - sym2.isAbstractType && isSubPre(pre1, pre2, sym2)))) && - isSubArgs(tr1.args, tr2.args, sym1.typeParams, depth)) - || - sym2.isClass && { - val base = tr1 baseType sym2 - (base ne tr1) && isSubType(base, tr2, depth) - } - || - thirdTryRef(tr1, tr2)) - case _ => - secondTry - } - case AnnotatedType(_, _, _) => - isSubType(tp1.withoutAnnotations, tp2.withoutAnnotations, depth) && - annotationsConform(tp1, tp2) - case BoundedWildcardType(bounds) => - isSubType(tp1, bounds.hi, depth) - case tv2 @ TypeVar(_, constr2) => - tp1 match { - case AnnotatedType(_, _, _) | BoundedWildcardType(_) => - secondTry - case _ => - tv2.registerBound(tp1, isLowerBound = true) - } - case _ => - secondTry - } - - /** Second try, on the left: - * - unwrap AnnotatedTypes, BoundedWildcardTypes, - * - bind typevars, - * - handle existential types by skolemization. - */ - def secondTry = tp1 match { - case AnnotatedType(_, _, _) => - isSubType(tp1.withoutAnnotations, tp2.withoutAnnotations, depth) && - annotationsConform(tp1, tp2) - case BoundedWildcardType(bounds) => - isSubType(tp1.bounds.lo, tp2, depth) - case tv @ TypeVar(_,_) => - tv.registerBound(tp2, isLowerBound = false) - case ExistentialType(_, _) => - try { - skolemizationLevel += 1 - isSubType(tp1.skolemizeExistential, tp2, depth) - } finally { - skolemizationLevel -= 1 - } - case _ => - thirdTry - } - - def thirdTryRef(tp1: Type, tp2: TypeRef): Boolean = { - val sym2 = tp2.sym - sym2 match { - case NotNullClass => tp1.isNotNull - case SingletonClass => tp1.isStable || fourthTry - case _: ClassSymbol => - if (isRawType(tp2)) - isSubType(tp1, rawToExistential(tp2), depth) - else if (sym2.name == tpnme.REFINE_CLASS_NAME) - isSubType(tp1, sym2.info, depth) - else - fourthTry - case _: TypeSymbol => - if (sym2 hasFlag DEFERRED) { - val tp2a = tp2.bounds.lo - isDifferentTypeConstructor(tp2, tp2a) && - isSubType(tp1, tp2a, depth) || - fourthTry - } else { - isSubType(tp1.normalize, tp2.normalize, depth) - } - case _ => - fourthTry - } - } - - /** Third try, on the right: - * - decompose refined types. - * - handle typerefs, existentials, and notnull types. - * - handle left+right method types, polytypes, typebounds - */ - def thirdTry = tp2 match { - case tr2: TypeRef => - thirdTryRef(tp1, tr2) - case rt2: RefinedType => - (rt2.parents forall (isSubType(tp1, _, depth))) && - (rt2.decls forall (specializesSym(tp1, _, depth))) - case et2: ExistentialType => - et2.withTypeVars(isSubType(tp1, _, depth), depth) || fourthTry - case nn2: NotNullType => - tp1.isNotNull && isSubType(tp1, nn2.underlying, depth) - case mt2: MethodType => - tp1 match { - case mt1 @ MethodType(params1, res1) => - val params2 = mt2.params - val res2 = mt2.resultType - (sameLength(params1, params2) && - mt1.isImplicit == mt2.isImplicit && - matchingParams(params1, params2, mt1.isJava, mt2.isJava) && - isSubType(res1.substSym(params1, params2), res2, depth)) - // TODO: if mt1.params.isEmpty, consider NullaryMethodType? - case _ => - false - } - case pt2 @ NullaryMethodType(_) => - tp1 match { - // TODO: consider MethodType mt for which mt.params.isEmpty?? - case pt1 @ NullaryMethodType(_) => - isSubType(pt1.resultType, pt2.resultType, depth) - case _ => - false - } - case TypeBounds(lo2, hi2) => - tp1 match { - case TypeBounds(lo1, hi1) => - isSubType(lo2, lo1, depth) && isSubType(hi1, hi2, depth) - case _ => - false - } - case _ => - fourthTry - } - - /** Fourth try, on the left: - * - handle typerefs, refined types, notnull and singleton types. - */ - def fourthTry = tp1 match { - case tr1 @ TypeRef(pre1, sym1, _) => - sym1 match { - case NothingClass => true - case NullClass => - tp2 match { - case TypeRef(_, sym2, _) => - containsNull(sym2) - case _ => - isSingleType(tp2) && isSubType(tp1, tp2.widen, depth) - } - case _: ClassSymbol => - if (isRawType(tp1)) - isSubType(rawToExistential(tp1), tp2, depth) - else if (sym1.isModuleClass) tp2 match { - case SingleType(pre2, sym2) => equalSymsAndPrefixes(sym1.sourceModule, pre1, sym2, pre2) - case _ => false - } - else if (sym1.isRefinementClass) - isSubType(sym1.info, tp2, depth) - else false - - case _: TypeSymbol => - if (sym1 hasFlag DEFERRED) { - val tp1a = tp1.bounds.hi - isDifferentTypeConstructor(tp1, tp1a) && isSubType(tp1a, tp2, depth) - } else { - isSubType(tp1.normalize, tp2.normalize, depth) - } - case _ => - false - } - case RefinedType(parents1, _) => - parents1 exists (isSubType(_, tp2, depth)) - case _: SingletonType | _: NotNullType => - isSubType(tp1.underlying, tp2, depth) - case _ => - false - } - - firstTry - } - - private def containsNull(sym: Symbol): Boolean = + protected[internal] def containsNull(sym: Symbol): Boolean = sym.isClass && sym != NothingClass && !(sym isNonBottomSubClass AnyValClass) && !(sym isNonBottomSubClass NotNullClass) @@ -6011,7 +4091,7 @@ trait Types extends api.Types { self: SymbolTable => /** Does member `sym1` of `tp1` have a stronger type * than member `sym2` of `tp2`? */ - private def specializesSym(tp1: Type, sym1: Symbol, tp2: Type, sym2: Symbol, depth: Int): Boolean = { + protected[internal] def specializesSym(tp1: Type, sym1: Symbol, tp2: Type, sym2: Symbol, depth: Int): Boolean = { require((sym1 ne NoSymbol) && (sym2 ne NoSymbol), ((tp1, sym1, tp2, sym2, depth))) val info1 = tp1.memberInfo(sym1) val info2 = tp2.memberInfo(sym2).substThis(tp2.typeSymbol, tp1) @@ -6145,7 +4225,7 @@ trait Types extends api.Types { self: SymbolTable => */ /** Are `syms1` and `syms2` parameter lists with pairwise equivalent types? */ - private def matchingParams(syms1: List[Symbol], syms2: List[Symbol], syms1isJava: Boolean, syms2isJava: Boolean): Boolean = syms1 match { + protected[internal] def matchingParams(syms1: List[Symbol], syms2: List[Symbol], syms1isJava: Boolean, syms2isJava: Boolean): Boolean = syms1 match { case Nil => syms2.isEmpty case sym1 :: rest1 => @@ -6174,87 +4254,6 @@ trait Types extends api.Types { self: SymbolTable => else x1 :: xs1 } - /** Solve constraint collected in types `tvars`. - * - * @param tvars All type variables to be instantiated. - * @param tparams The type parameters corresponding to `tvars` - * @param variances The variances of type parameters; need to reverse - * solution direction for all contravariant variables. - * @param upper When `true` search for max solution else min. - */ - def solve(tvars: List[TypeVar], tparams: List[Symbol], - variances: List[Variance], upper: Boolean): Boolean = - solve(tvars, tparams, variances, upper, AnyDepth) - - def solve(tvars: List[TypeVar], tparams: List[Symbol], - variances: List[Variance], upper: Boolean, depth: Int): Boolean = { - - def solveOne(tvar: TypeVar, tparam: Symbol, variance: Variance) { - if (tvar.constr.inst == NoType) { - val up = if (variance.isContravariant) !upper else upper - tvar.constr.inst = null - val bound: Type = if (up) tparam.info.bounds.hi else tparam.info.bounds.lo - //Console.println("solveOne0(tv, tp, v, b)="+(tvar, tparam, variance, bound)) - var cyclic = bound contains tparam - foreach3(tvars, tparams, variances)((tvar2, tparam2, variance2) => { - val ok = (tparam2 != tparam) && ( - (bound contains tparam2) - || up && (tparam2.info.bounds.lo =:= tparam.tpeHK) - || !up && (tparam2.info.bounds.hi =:= tparam.tpeHK) - ) - if (ok) { - if (tvar2.constr.inst eq null) cyclic = true - solveOne(tvar2, tparam2, variance2) - } - }) - if (!cyclic) { - if (up) { - if (bound.typeSymbol != AnyClass) { - log(s"$tvar addHiBound $bound.instantiateTypeParams($tparams, $tvars)") - tvar addHiBound bound.instantiateTypeParams(tparams, tvars) - } - for (tparam2 <- tparams) - tparam2.info.bounds.lo.dealias match { - case TypeRef(_, `tparam`, _) => - log(s"$tvar addHiBound $tparam2.tpeHK.instantiateTypeParams($tparams, $tvars)") - tvar addHiBound tparam2.tpeHK.instantiateTypeParams(tparams, tvars) - case _ => - } - } else { - if (bound.typeSymbol != NothingClass && bound.typeSymbol != tparam) { - log(s"$tvar addLoBound $bound.instantiateTypeParams($tparams, $tvars)") - tvar addLoBound bound.instantiateTypeParams(tparams, tvars) - } - for (tparam2 <- tparams) - tparam2.info.bounds.hi.dealias match { - case TypeRef(_, `tparam`, _) => - log(s"$tvar addLoBound $tparam2.tpeHK.instantiateTypeParams($tparams, $tvars)") - tvar addLoBound tparam2.tpeHK.instantiateTypeParams(tparams, tvars) - case _ => - } - } - } - tvar.constr.inst = NoType // necessary because hibounds/lobounds may contain tvar - - //println("solving "+tvar+" "+up+" "+(if (up) (tvar.constr.hiBounds) else tvar.constr.loBounds)+((if (up) (tvar.constr.hiBounds) else tvar.constr.loBounds) map (_.widen))) - val newInst = ( - if (up) { - if (depth != AnyDepth) glb(tvar.constr.hiBounds, depth) else glb(tvar.constr.hiBounds) - } else { - if (depth != AnyDepth) lub(tvar.constr.loBounds, depth) else lub(tvar.constr.loBounds) - } - ) - log(s"$tvar setInst $newInst") - tvar setInst newInst - //Console.println("solving "+tvar+" "+up+" "+(if (up) (tvar.constr.hiBounds) else tvar.constr.loBounds)+((if (up) (tvar.constr.hiBounds) else tvar.constr.loBounds) map (_.widen))+" = "+tvar.constr.inst)//@MDEBUG - } - } - - // println("solving "+tvars+"/"+tparams+"/"+(tparams map (_.info))) - foreach3(tvars, tparams, variances)(solveOne) - tvars forall (tvar => tvar.constr.isWithinBounds(tvar.constr.inst)) - } - /** Do type arguments `targs` conform to formal parameters `tparams`? */ def isWithinBounds(pre: Type, owner: Symbol, tparams: List[Symbol], targs: List[Type]): Boolean = { @@ -6267,168 +4266,6 @@ trait Types extends api.Types { self: SymbolTable => def instantiatedBounds(pre: Type, owner: Symbol, tparams: List[Symbol], targs: List[Type]): List[TypeBounds] = tparams map (_.info.asSeenFrom(pre, owner).instantiateTypeParams(tparams, targs).bounds) -// Lubs and Glbs --------------------------------------------------------- - - private def printLubMatrix(btsMap: Map[Type, List[Type]], depth: Int) { - import util.TableDef - import TableDef.Column - def str(tp: Type) = { - if (tp == NoType) "" - else { - val s = ("" + tp).replaceAll("""[\w.]+\.(\w+)""", "$1") - if (s.length < 60) s - else (s take 57) + "..." - } - } - - val sorted = btsMap.toList.sortWith((x, y) => x._1.typeSymbol isLess y._1.typeSymbol) - val maxSeqLength = sorted.map(_._2.size).max - val padded = sorted map (_._2.padTo(maxSeqLength, NoType)) - val transposed = padded.transpose - - val columns: List[Column[List[Type]]] = mapWithIndex(sorted) { - case ((k, v), idx) => - Column(str(k), (xs: List[Type]) => str(xs(idx)), left = true) - } - - val tableDef = TableDef(columns: _*) - val formatted = tableDef.table(transposed) - println("** Depth is " + depth + "\n" + formatted) - } - - /** From a list of types, find any which take type parameters - * where the type parameter bounds contain references to other - * any types in the list (including itself.) - * - * @return List of symbol pairs holding the recursive type - * parameter and the parameter which references it. - */ - def findRecursiveBounds(ts: List[Type]): List[(Symbol, Symbol)] = { - if (ts.isEmpty) Nil - else { - val sym = ts.head.typeSymbol - require(ts.tail forall (_.typeSymbol == sym), ts) - for (p <- sym.typeParams ; in <- sym.typeParams ; if in.info.bounds contains p) yield - p -> in - } - } - - /** Given a matrix `tsBts` whose columns are basetype sequences (and the symbols `tsParams` that should be interpreted as type parameters in this matrix), - * compute its least sorted upwards closed upper bound relative to the following ordering <= between lists of types: - * - * xs <= ys iff forall y in ys exists x in xs such that x <: y - * - * @arg tsParams for each type in the original list of types `ts0`, its list of type parameters (if that type is a type constructor) - * (these type parameters may be referred to by type arguments in the BTS column of those types, - * and must be interpreted as bound variables; i.e., under a type lambda that wraps the types that refer to these type params) - * @arg tsBts a matrix whose columns are basetype sequences - * the first row is the original list of types for which we're computing the lub - * (except that type constructors have been applied to their dummyArgs) - * @See baseTypeSeq for a definition of sorted and upwards closed. - */ - private def lubList(ts: List[Type], depth: Int): List[Type] = { - var lubListDepth = 0 - // This catches some recursive situations which would otherwise - // befuddle us, e.g. pos/hklub0.scala - def isHotForTs(xs: List[Type]) = ts exists (_.typeParams == xs.map(_.typeSymbol)) - - def elimHigherOrderTypeParam(tp: Type) = tp match { - case TypeRef(_, _, args) if args.nonEmpty && isHotForTs(args) => - logResult("Retracting dummies from " + tp + " in lublist")(tp.typeConstructor) - case _ => tp - } - // pretypes is a tail-recursion-preserving accumulator. - @annotation.tailrec def loop(pretypes: List[Type], tsBts: List[List[Type]]): List[Type] = { - lubListDepth += 1 - - if (tsBts.isEmpty || (tsBts exists typeListIsEmpty)) pretypes.reverse - else if (tsBts.tail.isEmpty) pretypes.reverse ++ tsBts.head - else { - // ts0 is the 1-dimensional frontier of symbols cutting through 2-dimensional tsBts. - // Invariant: all symbols "under" (closer to the first row) the frontier - // are smaller (according to _.isLess) than the ones "on and beyond" the frontier - val ts0 = tsBts map (_.head) - - // Is the frontier made up of types with the same symbol? - val isUniformFrontier = (ts0: @unchecked) match { - case t :: ts => ts forall (_.typeSymbol == t.typeSymbol) - } - - // Produce a single type for this frontier by merging the prefixes and arguments of those - // typerefs that share the same symbol: that symbol is the current maximal symbol for which - // the invariant holds, i.e., the one that conveys most information regarding subtyping. Before - // merging, strip targs that refer to bound tparams (when we're computing the lub of type - // constructors.) Also filter out all types that are a subtype of some other type. - if (isUniformFrontier) { - val fbounds = findRecursiveBounds(ts0) map (_._2) - val tcLubList = typeConstructorLubList(ts0) - def isRecursive(tp: Type) = tp.typeSymbol.typeParams exists fbounds.contains - - val ts1 = ts0 map { t => - if (isRecursive(t)) { - tcLubList map (t baseType _.typeSymbol) find (t => !isRecursive(t)) match { - case Some(tp) => logResult(s"Breaking recursion in lublist, substituting weaker type.\n Was: $t\n Now")(tp) - case _ => t - } - } - else t - } - val tails = tsBts map (_.tail) - mergePrefixAndArgs(elimSub(ts1, depth) map elimHigherOrderTypeParam, Covariant, depth) match { - case Some(tp) => loop(tp :: pretypes, tails) - case _ => loop(pretypes, tails) - } - } - else { - // frontier is not uniform yet, move it beyond the current minimal symbol; - // lather, rinSe, repeat - val sym = minSym(ts0) - val newtps = tsBts map (ts => if (ts.head.typeSymbol == sym) ts.tail else ts) - if (printLubs) { - val str = (newtps.zipWithIndex map { case (tps, idx) => - tps.map(" " + _ + "\n").mkString(" (" + idx + ")\n", "", "\n") - }).mkString("") - - println("Frontier(\n" + str + ")") - printLubMatrix((ts zip tsBts).toMap, lubListDepth) - } - - loop(pretypes, newtps) - } - } - } - - val initialBTSes = ts map (_.baseTypeSeq.toList) - if (printLubs) - printLubMatrix((ts zip initialBTSes).toMap, depth) - - loop(Nil, initialBTSes) - } - - /** The minimal symbol of a list of types (as determined by `Symbol.isLess`). */ - private def minSym(tps: List[Type]): Symbol = - (tps.head.typeSymbol /: tps.tail) { - (sym1, tp2) => if (tp2.typeSymbol isLess sym1) tp2.typeSymbol else sym1 - } - - /** A minimal type list which has a given list of types as its base type sequence */ - def spanningTypes(ts: List[Type]): List[Type] = ts match { - case List() => List() - case first :: rest => - first :: spanningTypes( - rest filter (t => !first.typeSymbol.isSubClass(t.typeSymbol))) - } - - /** Eliminate from list of types all elements which are a supertype - * of some other element of the list. */ - private def elimSuper(ts: List[Type]): List[Type] = ts match { - case List() => List() - case List(t) => List(t) - case t :: ts1 => - val rest = elimSuper(ts1 filter (t1 => !(t <:< t1))) - if (rest exists (t1 => t1 <:< t)) rest else t :: rest - } - def elimAnonymousClass(t: Type) = t match { case TypeRef(pre, clazz, Nil) if clazz.isAnonymousClass => clazz.classBound.asSeenFrom(pre, clazz.owner) @@ -6436,406 +4273,6 @@ trait Types extends api.Types { self: SymbolTable => t } - /** Eliminate from list of types all elements which are a subtype - * of some other element of the list. */ - private def elimSub(ts: List[Type], depth: Int): List[Type] = { - def elimSub0(ts: List[Type]): List[Type] = ts match { - case List() => List() - case List(t) => List(t) - case t :: ts1 => - val rest = elimSub0(ts1 filter (t1 => !isSubType(t1, t, decr(depth)))) - if (rest exists (t1 => isSubType(t, t1, decr(depth)))) rest else t :: rest - } - val ts0 = elimSub0(ts) - if (ts0.isEmpty || ts0.tail.isEmpty) ts0 - else { - val ts1 = ts0 mapConserve (t => elimAnonymousClass(t.dealiasWiden)) - if (ts1 eq ts0) ts0 - else elimSub(ts1, depth) - } - } - - private def stripExistentialsAndTypeVars(ts: List[Type]): (List[Type], List[Symbol]) = { - val quantified = ts flatMap { - case ExistentialType(qs, _) => qs - case t => List() - } - def stripType(tp: Type): Type = tp match { - case ExistentialType(_, res) => - res - case tv@TypeVar(_, constr) => - if (tv.instValid) stripType(constr.inst) - else if (tv.untouchable) tv - else abort("trying to do lub/glb of typevar "+tp) - case t => t - } - val strippedTypes = ts mapConserve stripType - (strippedTypes, quantified) - } - - def weakLub(ts: List[Type]) = - if (ts.nonEmpty && (ts forall isNumericValueType)) (numericLub(ts), true) - else if (ts exists typeHasAnnotations) - (annotationsLub(lub(ts map (_.withoutAnnotations)), ts), true) - else (lub(ts), false) - - def numericLub(ts: List[Type]) = - ts reduceLeft ((t1, t2) => - if (isNumericSubType(t1, t2)) t2 - else if (isNumericSubType(t2, t1)) t1 - else IntClass.tpe) - - def isWeakSubType(tp1: Type, tp2: Type) = - tp1.deconst.normalize match { - case TypeRef(_, sym1, _) if isNumericValueClass(sym1) => - tp2.deconst.normalize match { - case TypeRef(_, sym2, _) if isNumericValueClass(sym2) => - isNumericSubClass(sym1, sym2) - case tv2 @ TypeVar(_, _) => - tv2.registerBound(tp1, isLowerBound = true, isNumericBound = true) - case _ => - isSubType(tp1, tp2) - } - case tv1 @ TypeVar(_, _) => - tp2.deconst.normalize match { - case TypeRef(_, sym2, _) if isNumericValueClass(sym2) => - tv1.registerBound(tp2, isLowerBound = false, isNumericBound = true) - case _ => - isSubType(tp1, tp2) - } - case _ => - isSubType(tp1, tp2) - } - - /** The isNumericValueType tests appear redundant, but without them - * test/continuations-neg/function3.scala goes into an infinite loop. - * (Even if the calls are to typeSymbolDirect.) - */ - def isNumericSubType(tp1: Type, tp2: Type): Boolean = ( - isNumericValueType(tp1) - && isNumericValueType(tp2) - && isNumericSubClass(tp1.typeSymbol, tp2.typeSymbol) - ) - - private val lubResults = new mutable.HashMap[(Int, List[Type]), Type] - private val glbResults = new mutable.HashMap[(Int, List[Type]), Type] - - /** Given a list of types, finds all the base classes they have in - * common, then returns a list of type constructors derived directly - * from the symbols (so any more specific type information is ignored.) - * The list is filtered such that every type constructor in the list - * expects the same number of type arguments, which is chosen based - * on the deepest class among the common baseclasses. - */ - def typeConstructorLubList(ts: List[Type]): List[Type] = { - val bcs = ts.flatMap(_.baseClasses).distinct sortWith (_ isLess _) - val tcons = bcs filter (clazz => ts forall (_.typeSymbol isSubClass clazz)) - - tcons map (_.typeConstructor) match { - case Nil => Nil - case t :: ts => t :: ts.filter(_.typeParams.size == t.typeParams.size) - } - } - - def lub(ts: List[Type]): Type = ts match { - case List() => NothingClass.tpe - case List(t) => t - case _ => - if (Statistics.canEnable) Statistics.incCounter(lubCount) - val start = if (Statistics.canEnable) Statistics.pushTimer(typeOpsStack, lubNanos) else null - try { - val res = lub(ts, lubDepth(ts)) - // If the number of unapplied type parameters in all incoming - // types is consistent, and the lub does not match that, return - // the type constructor of the calculated lub instead. This - // is because lubbing type constructors tends to result in types - // which have been applied to dummies or Nothing. - ts.map(_.typeParams.size).distinct match { - case x :: Nil if res.typeParams.size != x => - logResult(s"Stripping type args from lub because $res is not consistent with $ts")(res.typeConstructor) - case _ => - res - } - } - finally { - lubResults.clear() - glbResults.clear() - if (Statistics.canEnable) Statistics.popTimer(typeOpsStack, start) - } - } - - /** The least upper bound wrt <:< of a list of types */ - private def lub(ts: List[Type], depth: Int): Type = { - def lub0(ts0: List[Type]): Type = elimSub(ts0, depth) match { - case List() => NothingClass.tpe - case List(t) => t - case ts @ PolyType(tparams, _) :: _ => - val tparams1 = map2(tparams, matchingBounds(ts, tparams).transpose)((tparam, bounds) => - tparam.cloneSymbol.setInfo(glb(bounds, depth))) - PolyType(tparams1, lub0(matchingInstTypes(ts, tparams1))) - case ts @ (mt @ MethodType(params, _)) :: rest => - MethodType(params, lub0(matchingRestypes(ts, mt.paramTypes))) - case ts @ NullaryMethodType(_) :: rest => - NullaryMethodType(lub0(matchingRestypes(ts, Nil))) - case ts @ TypeBounds(_, _) :: rest => - TypeBounds(glb(ts map (_.bounds.lo), depth), lub(ts map (_.bounds.hi), depth)) - case ts @ AnnotatedType(annots, tpe, _) :: rest => - annotationsLub(lub0(ts map (_.withoutAnnotations)), ts) - case ts => - lubResults get (depth, ts) match { - case Some(lubType) => - lubType - case None => - lubResults((depth, ts)) = AnyClass.tpe - val res = if (depth < 0) AnyClass.tpe else lub1(ts) - lubResults((depth, ts)) = res - res - } - } - def lub1(ts0: List[Type]): Type = { - val (ts, tparams) = stripExistentialsAndTypeVars(ts0) - val lubBaseTypes: List[Type] = lubList(ts, depth) - val lubParents = spanningTypes(lubBaseTypes) - val lubOwner = commonOwner(ts) - val lubBase = intersectionType(lubParents, lubOwner) - val lubType = - if (phase.erasedTypes || depth == 0 ) lubBase - else { - val lubRefined = refinedType(lubParents, lubOwner) - val lubThisType = lubRefined.typeSymbol.thisType - val narrowts = ts map (_.narrow) - def excludeFromLub(sym: Symbol) = ( - sym.isClass - || sym.isConstructor - || !sym.isPublic - || isGetClass(sym) - || sym.isFinal - || narrowts.exists(t => !refines(t, sym)) - ) - def lubsym(proto: Symbol): Symbol = { - val prototp = lubThisType.memberInfo(proto) - val syms = narrowts map (t => - t.nonPrivateMember(proto.name).suchThat(sym => - sym.tpe matches prototp.substThis(lubThisType.typeSymbol, t))) - - if (syms contains NoSymbol) NoSymbol - else { - val symtypes = - map2(narrowts, syms)((t, sym) => t.memberInfo(sym).substThis(t.typeSymbol, lubThisType)) - if (proto.isTerm) // possible problem: owner of info is still the old one, instead of new refinement class - proto.cloneSymbol(lubRefined.typeSymbol).setInfoOwnerAdjusted(lub(symtypes, decr(depth))) - else if (symtypes.tail forall (symtypes.head =:= _)) - proto.cloneSymbol(lubRefined.typeSymbol).setInfoOwnerAdjusted(symtypes.head) - else { - def lubBounds(bnds: List[TypeBounds]): TypeBounds = - TypeBounds(glb(bnds map (_.lo), decr(depth)), lub(bnds map (_.hi), decr(depth))) - lubRefined.typeSymbol.newAbstractType(proto.name.toTypeName, proto.pos) - .setInfoOwnerAdjusted(lubBounds(symtypes map (_.bounds))) - } - } - } - def refines(tp: Type, sym: Symbol): Boolean = { - val syms = tp.nonPrivateMember(sym.name).alternatives - !syms.isEmpty && (syms forall (alt => - // todo alt != sym is strictly speaking not correct, but without it we lose - // efficiency. - alt != sym && !specializesSym(lubThisType, sym, tp, alt, depth))) - } - // add a refinement symbol for all non-class members of lubBase - // which are refined by every type in ts. - for (sym <- lubBase.nonPrivateMembers ; if !excludeFromLub(sym)) { - try lubsym(sym) andAlso (addMember(lubThisType, lubRefined, _, depth)) - catch { - case ex: NoCommonType => - } - } - if (lubRefined.decls.isEmpty) lubBase - else if (!verifyLubs) lubRefined - else { - // Verify that every given type conforms to the calculated lub. - // In theory this should not be necessary, but higher-order type - // parameters are not handled correctly. - val ok = ts forall { t => - isSubType(t, lubRefined, depth) || { - if (settings.debug.value || printLubs) { - Console.println( - "Malformed lub: " + lubRefined + "\n" + - "Argument " + t + " does not conform. Falling back to " + lubBase - ) - } - false - } - } - // If not, fall back on the more conservative calculation. - if (ok) lubRefined - else lubBase - } - } - // dropIllegalStarTypes is a localized fix for SI-6897. We should probably - // integrate that transformation at a lower level in master, but lubs are - // the likely and maybe only spot they escape, so fixing here for 2.10.1. - existentialAbstraction(tparams, dropIllegalStarTypes(lubType)) - } - if (printLubs) { - println(indent + "lub of " + ts + " at depth "+depth)//debug - indent = indent + " " - assert(indent.length <= 100) - } - if (Statistics.canEnable) Statistics.incCounter(nestedLubCount) - val res = lub0(ts) - if (printLubs) { - indent = indent stripSuffix " " - println(indent + "lub of " + ts + " is " + res)//debug - } - if (ts forall typeIsNotNull) res.notNull else res - } - - val GlbFailure = new Throwable - - /** A global counter for glb calls in the `specializes` query connected to the `addMembers` - * call in `glb`. There's a possible infinite recursion when `specializes` calls - * memberType, which calls baseTypeSeq, which calls mergePrefixAndArgs, which calls glb. - * The counter breaks this recursion after two calls. - * If the recursion is broken, no member is added to the glb. - */ - private var globalGlbDepth = 0 - private final val globalGlbLimit = 2 - - /** The greatest lower bound of a list of types (as determined by `<:<`). */ - def glb(ts: List[Type]): Type = elimSuper(ts) match { - case List() => AnyClass.tpe - case List(t) => t - case ts0 => - if (Statistics.canEnable) Statistics.incCounter(lubCount) - val start = if (Statistics.canEnable) Statistics.pushTimer(typeOpsStack, lubNanos) else null - try { - glbNorm(ts0, lubDepth(ts0)) - } finally { - lubResults.clear() - glbResults.clear() - if (Statistics.canEnable) Statistics.popTimer(typeOpsStack, start) - } - } - - private def glb(ts: List[Type], depth: Int): Type = elimSuper(ts) match { - case List() => AnyClass.tpe - case List(t) => t - case ts0 => glbNorm(ts0, depth) - } - - /** The greatest lower bound of a list of types (as determined by `<:<`), which have been normalized - * with regard to `elimSuper`. */ - protected def glbNorm(ts: List[Type], depth: Int): Type = { - def glb0(ts0: List[Type]): Type = ts0 match { - case List() => AnyClass.tpe - case List(t) => t - case ts @ PolyType(tparams, _) :: _ => - val tparams1 = map2(tparams, matchingBounds(ts, tparams).transpose)((tparam, bounds) => - tparam.cloneSymbol.setInfo(lub(bounds, depth))) - PolyType(tparams1, glbNorm(matchingInstTypes(ts, tparams1), depth)) - case ts @ (mt @ MethodType(params, _)) :: rest => - MethodType(params, glbNorm(matchingRestypes(ts, mt.paramTypes), depth)) - case ts @ NullaryMethodType(_) :: rest => - NullaryMethodType(glbNorm(matchingRestypes(ts, Nil), depth)) - case ts @ TypeBounds(_, _) :: rest => - TypeBounds(lub(ts map (_.bounds.lo), depth), glb(ts map (_.bounds.hi), depth)) - case ts => - glbResults get (depth, ts) match { - case Some(glbType) => - glbType - case _ => - glbResults((depth, ts)) = NothingClass.tpe - val res = if (depth < 0) NothingClass.tpe else glb1(ts) - glbResults((depth, ts)) = res - res - } - } - def glb1(ts0: List[Type]): Type = { - try { - val (ts, tparams) = stripExistentialsAndTypeVars(ts0) - val glbOwner = commonOwner(ts) - def refinedToParents(t: Type): List[Type] = t match { - case RefinedType(ps, _) => ps flatMap refinedToParents - case _ => List(t) - } - def refinedToDecls(t: Type): List[Scope] = t match { - case RefinedType(ps, decls) => - val dss = ps flatMap refinedToDecls - if (decls.isEmpty) dss else decls :: dss - case _ => List() - } - val ts1 = ts flatMap refinedToParents - val glbBase = intersectionType(ts1, glbOwner) - val glbType = - if (phase.erasedTypes || depth == 0) glbBase - else { - val glbRefined = refinedType(ts1, glbOwner) - val glbThisType = glbRefined.typeSymbol.thisType - def glbsym(proto: Symbol): Symbol = { - val prototp = glbThisType.memberInfo(proto) - val syms = for (t <- ts; - alt <- (t.nonPrivateMember(proto.name).alternatives) - if glbThisType.memberInfo(alt) matches prototp - ) yield alt - val symtypes = syms map glbThisType.memberInfo - assert(!symtypes.isEmpty) - proto.cloneSymbol(glbRefined.typeSymbol).setInfoOwnerAdjusted( - if (proto.isTerm) glb(symtypes, decr(depth)) - else { - def isTypeBound(tp: Type) = tp match { - case TypeBounds(_, _) => true - case _ => false - } - def glbBounds(bnds: List[Type]): TypeBounds = { - val lo = lub(bnds map (_.bounds.lo), decr(depth)) - val hi = glb(bnds map (_.bounds.hi), decr(depth)) - if (lo <:< hi) TypeBounds(lo, hi) - else throw GlbFailure - } - val symbounds = symtypes filter isTypeBound - var result: Type = - if (symbounds.isEmpty) - TypeBounds.empty - else glbBounds(symbounds) - for (t <- symtypes if !isTypeBound(t)) - if (result.bounds containsType t) result = t - else throw GlbFailure - result - }) - } - if (globalGlbDepth < globalGlbLimit) - try { - globalGlbDepth += 1 - val dss = ts flatMap refinedToDecls - for (ds <- dss; sym <- ds.iterator) - if (globalGlbDepth < globalGlbLimit && !specializesSym(glbThisType, sym, depth)) - try { - addMember(glbThisType, glbRefined, glbsym(sym), depth) - } catch { - case ex: NoCommonType => - } - } finally { - globalGlbDepth -= 1 - } - if (glbRefined.decls.isEmpty) glbBase else glbRefined - } - existentialAbstraction(tparams, glbType) - } catch { - case GlbFailure => - if (ts forall (t => NullClass.tpe <:< t)) NullClass.tpe - else NothingClass.tpe - } - } - // if (settings.debug.value) { println(indent + "glb of " + ts + " at depth "+depth); indent = indent + " " } //DEBUG - - if (Statistics.canEnable) Statistics.incCounter(nestedLubCount) - val res = glb0(ts) - - // if (settings.debug.value) { indent = indent.substring(0, indent.length() - 2); log(indent + "glb of " + ts + " is " + res) }//DEBUG - - if (ts exists typeIsNotNull) res.notNull else res - } - /** A list of the typevars in a type. */ def typeVarsInType(tp: Type): List[TypeVar] = { var tvs: List[TypeVar] = Nil @@ -6970,51 +4407,6 @@ trait Types extends api.Types { self: SymbolTable => def inheritsJavaVarArgsMethod(clazz: Symbol) = clazz.thisType.baseClasses exists isJavaVarargsAncestor - /** All types in list must be polytypes with type parameter lists of - * same length as tparams. - * Returns list of list of bounds infos, where corresponding type - * parameters are renamed to tparams. - */ - private def matchingBounds(tps: List[Type], tparams: List[Symbol]): List[List[Type]] = { - def getBounds(tp: Type): List[Type] = tp match { - case PolyType(tparams1, _) if sameLength(tparams1, tparams) => - tparams1 map (tparam => tparam.info.substSym(tparams1, tparams)) - case tp => - if (tp ne tp.normalize) getBounds(tp.normalize) - else throw new NoCommonType(tps) - } - tps map getBounds - } - - /** All types in list must be polytypes with type parameter lists of - * same length as tparams. - * Returns list of instance types, where corresponding type - * parameters are renamed to tparams. - */ - private def matchingInstTypes(tps: List[Type], tparams: List[Symbol]): List[Type] = { - def transformResultType(tp: Type): Type = tp match { - case PolyType(tparams1, restpe) if sameLength(tparams1, tparams) => - restpe.substSym(tparams1, tparams) - case tp => - if (tp ne tp.normalize) transformResultType(tp.normalize) - else throw new NoCommonType(tps) - } - tps map transformResultType - } - - /** All types in list must be method types with equal parameter types. - * Returns list of their result types. - */ - private def matchingRestypes(tps: List[Type], pts: List[Type]): List[Type] = - tps map { - case mt @ MethodType(params1, res) if isSameTypes(mt.paramTypes, pts) => - res - case NullaryMethodType(res) if pts.isEmpty => - res - case _ => - throw new NoCommonType(tps) - } - // Errors and Diagnostics ----------------------------------------------------- /** A throwable signalling a type error */ @@ -7039,7 +4431,7 @@ trait Types extends api.Types { self: SymbolTable => } /** The current indentation string for traces */ - private var indent: String = "" + protected[internal] var indent: String = "" /** Perform operation `p` on arguments `tp1`, `arg2` and print trace of computation. */ protected def explain[T](op: String, p: (Type, T) => Boolean, tp1: Type, arg2: T): Boolean = { @@ -7105,29 +4497,6 @@ trait Types extends api.Types { self: SymbolTable => "scala.collection.IndexedSeq", "scala.collection.Iterator") - - /** The maximum number of recursions allowed in toString - */ - final val maxTostringRecursions = 50 - - private var tostringRecursions = 0 - - protected def typeToString(tpe: Type): String = - if (tostringRecursions >= maxTostringRecursions) { - devWarning("Exceeded recursion depth attempting to print " + util.shortClassOfInstance(tpe)) - if (settings.debug.value) - (new Throwable).printStackTrace - - "..." - } - else - try { - tostringRecursions += 1 - tpe.safeToString - } finally { - tostringRecursions -= 1 - } - // ----- Hoisted closures and convenience methods, for compile time reductions ------- private[scala] val typeIsNotNull = (tp: Type) => tp.isNotNull diff --git a/src/reflect/scala/reflect/internal/tpe/CommonOwners.scala b/src/reflect/scala/reflect/internal/tpe/CommonOwners.scala new file mode 100644 index 0000000000..e5ddd8f359 --- /dev/null +++ b/src/reflect/scala/reflect/internal/tpe/CommonOwners.scala @@ -0,0 +1,50 @@ +package scala.reflect +package internal +package tpe + +private[internal] trait CommonOwners { + self: SymbolTable => + + /** The most deeply nested owner that contains all the symbols + * of thistype or prefixless typerefs/singletype occurrences in given type. + */ + protected[internal] def commonOwner(t: Type): Symbol = commonOwner(t :: Nil) + + /** The most deeply nested owner that contains all the symbols + * of thistype or prefixless typerefs/singletype occurrences in given list + * of types. + */ + protected[internal] def commonOwner(tps: List[Type]): Symbol = { + if (tps.isEmpty) NoSymbol + else { + commonOwnerMap.clear() + tps foreach (commonOwnerMap traverse _) + if (commonOwnerMap.result ne null) commonOwnerMap.result else NoSymbol + } + } + + protected def commonOwnerMap: CommonOwnerMap = commonOwnerMapObj + + protected class CommonOwnerMap extends TypeTraverserWithResult[Symbol] { + var result: Symbol = _ + + def clear() { result = null } + + private def register(sym: Symbol) { + // First considered type is the trivial result. + if ((result eq null) || (sym eq NoSymbol)) + result = sym + else + while ((result ne NoSymbol) && (result ne sym) && !(sym isNestedIn result)) + result = result.owner + } + def traverse(tp: Type) = tp.normalize match { + case ThisType(sym) => register(sym) + case TypeRef(NoPrefix, sym, args) => register(sym.owner) ; args foreach traverse + case SingleType(NoPrefix, sym) => register(sym.owner) + case _ => mapOver(tp) + } + } + + private lazy val commonOwnerMapObj = new CommonOwnerMap +} diff --git a/src/reflect/scala/reflect/internal/tpe/GlbLubs.scala b/src/reflect/scala/reflect/internal/tpe/GlbLubs.scala new file mode 100644 index 0000000000..bdccc75d6d --- /dev/null +++ b/src/reflect/scala/reflect/internal/tpe/GlbLubs.scala @@ -0,0 +1,592 @@ +package scala.reflect +package internal +package tpe + +import scala.collection.{ mutable } +import util.Statistics +import Variance._ + +private[internal] trait GlbLubs { + self: SymbolTable => + import definitions._ + import TypesStats._ + + private final val printLubs = sys.props contains "scalac.debug.lub" + + /** In case anyone wants to turn off lub verification without reverting anything. */ + private final val verifyLubs = true + + + private def printLubMatrix(btsMap: Map[Type, List[Type]], depth: Int) { + import util.TableDef + import TableDef.Column + def str(tp: Type) = { + if (tp == NoType) "" + else { + val s = ("" + tp).replaceAll("""[\w.]+\.(\w+)""", "$1") + if (s.length < 60) s + else (s take 57) + "..." + } + } + + val sorted = btsMap.toList.sortWith((x, y) => x._1.typeSymbol isLess y._1.typeSymbol) + val maxSeqLength = sorted.map(_._2.size).max + val padded = sorted map (_._2.padTo(maxSeqLength, NoType)) + val transposed = padded.transpose + + val columns: List[Column[List[Type]]] = mapWithIndex(sorted) { + case ((k, v), idx) => + Column(str(k), (xs: List[Type]) => str(xs(idx)), left = true) + } + + val tableDef = TableDef(columns: _*) + val formatted = tableDef.table(transposed) + println("** Depth is " + depth + "\n" + formatted) + } + + /** From a list of types, find any which take type parameters + * where the type parameter bounds contain references to other + * any types in the list (including itself.) + * + * @return List of symbol pairs holding the recursive type + * parameter and the parameter which references it. + */ + def findRecursiveBounds(ts: List[Type]): List[(Symbol, Symbol)] = { + if (ts.isEmpty) Nil + else { + val sym = ts.head.typeSymbol + require(ts.tail forall (_.typeSymbol == sym), ts) + for (p <- sym.typeParams ; in <- sym.typeParams ; if in.info.bounds contains p) yield + p -> in + } + } + + /** Given a matrix `tsBts` whose columns are basetype sequences (and the symbols `tsParams` that should be interpreted as type parameters in this matrix), + * compute its least sorted upwards closed upper bound relative to the following ordering <= between lists of types: + * + * xs <= ys iff forall y in ys exists x in xs such that x <: y + * + * @arg tsParams for each type in the original list of types `ts0`, its list of type parameters (if that type is a type constructor) + * (these type parameters may be referred to by type arguments in the BTS column of those types, + * and must be interpreted as bound variables; i.e., under a type lambda that wraps the types that refer to these type params) + * @arg tsBts a matrix whose columns are basetype sequences + * the first row is the original list of types for which we're computing the lub + * (except that type constructors have been applied to their dummyArgs) + * @See baseTypeSeq for a definition of sorted and upwards closed. + */ + def lubList(ts: List[Type], depth: Int): List[Type] = { + var lubListDepth = 0 + // This catches some recursive situations which would otherwise + // befuddle us, e.g. pos/hklub0.scala + def isHotForTs(xs: List[Type]) = ts exists (_.typeParams == xs.map(_.typeSymbol)) + + def elimHigherOrderTypeParam(tp: Type) = tp match { + case TypeRef(_, _, args) if args.nonEmpty && isHotForTs(args) => + logResult("Retracting dummies from " + tp + " in lublist")(tp.typeConstructor) + case _ => tp + } + // pretypes is a tail-recursion-preserving accumulator. + @annotation.tailrec def loop(pretypes: List[Type], tsBts: List[List[Type]]): List[Type] = { + lubListDepth += 1 + + if (tsBts.isEmpty || (tsBts exists typeListIsEmpty)) pretypes.reverse + else if (tsBts.tail.isEmpty) pretypes.reverse ++ tsBts.head + else { + // ts0 is the 1-dimensional frontier of symbols cutting through 2-dimensional tsBts. + // Invariant: all symbols "under" (closer to the first row) the frontier + // are smaller (according to _.isLess) than the ones "on and beyond" the frontier + val ts0 = tsBts map (_.head) + + // Is the frontier made up of types with the same symbol? + val isUniformFrontier = (ts0: @unchecked) match { + case t :: ts => ts forall (_.typeSymbol == t.typeSymbol) + } + + // Produce a single type for this frontier by merging the prefixes and arguments of those + // typerefs that share the same symbol: that symbol is the current maximal symbol for which + // the invariant holds, i.e., the one that conveys most information regarding subtyping. Before + // merging, strip targs that refer to bound tparams (when we're computing the lub of type + // constructors.) Also filter out all types that are a subtype of some other type. + if (isUniformFrontier) { + val fbounds = findRecursiveBounds(ts0) map (_._2) + val tcLubList = typeConstructorLubList(ts0) + def isRecursive(tp: Type) = tp.typeSymbol.typeParams exists fbounds.contains + + val ts1 = ts0 map { t => + if (isRecursive(t)) { + tcLubList map (t baseType _.typeSymbol) find (t => !isRecursive(t)) match { + case Some(tp) => logResult(s"Breaking recursion in lublist, substituting weaker type.\n Was: $t\n Now")(tp) + case _ => t + } + } + else t + } + val tails = tsBts map (_.tail) + mergePrefixAndArgs(elimSub(ts1, depth) map elimHigherOrderTypeParam, Covariant, depth) match { + case Some(tp) => loop(tp :: pretypes, tails) + case _ => loop(pretypes, tails) + } + } + else { + // frontier is not uniform yet, move it beyond the current minimal symbol; + // lather, rinSe, repeat + val sym = minSym(ts0) + val newtps = tsBts map (ts => if (ts.head.typeSymbol == sym) ts.tail else ts) + if (printLubs) { + val str = (newtps.zipWithIndex map { case (tps, idx) => + tps.map(" " + _ + "\n").mkString(" (" + idx + ")\n", "", "\n") + }).mkString("") + + println("Frontier(\n" + str + ")") + printLubMatrix((ts zip tsBts).toMap, lubListDepth) + } + + loop(pretypes, newtps) + } + } + } + + val initialBTSes = ts map (_.baseTypeSeq.toList) + if (printLubs) + printLubMatrix((ts zip initialBTSes).toMap, depth) + + loop(Nil, initialBTSes) + } + + /** The minimal symbol of a list of types (as determined by `Symbol.isLess`). */ + private def minSym(tps: List[Type]): Symbol = + (tps.head.typeSymbol /: tps.tail) { + (sym1, tp2) => if (tp2.typeSymbol isLess sym1) tp2.typeSymbol else sym1 + } + + /** A minimal type list which has a given list of types as its base type sequence */ + def spanningTypes(ts: List[Type]): List[Type] = ts match { + case List() => List() + case first :: rest => + first :: spanningTypes( + rest filter (t => !first.typeSymbol.isSubClass(t.typeSymbol))) + } + + /** Eliminate from list of types all elements which are a supertype + * of some other element of the list. */ + private def elimSuper(ts: List[Type]): List[Type] = ts match { + case List() => List() + case List(t) => List(t) + case t :: ts1 => + val rest = elimSuper(ts1 filter (t1 => !(t <:< t1))) + if (rest exists (t1 => t1 <:< t)) rest else t :: rest + } + + /** Eliminate from list of types all elements which are a subtype + * of some other element of the list. */ + private def elimSub(ts: List[Type], depth: Int): List[Type] = { + def elimSub0(ts: List[Type]): List[Type] = ts match { + case List() => List() + case List(t) => List(t) + case t :: ts1 => + val rest = elimSub0(ts1 filter (t1 => !isSubType(t1, t, decr(depth)))) + if (rest exists (t1 => isSubType(t, t1, decr(depth)))) rest else t :: rest + } + val ts0 = elimSub0(ts) + if (ts0.isEmpty || ts0.tail.isEmpty) ts0 + else { + val ts1 = ts0 mapConserve (t => elimAnonymousClass(t.dealiasWiden)) + if (ts1 eq ts0) ts0 + else elimSub(ts1, depth) + } + } + + private def stripExistentialsAndTypeVars(ts: List[Type]): (List[Type], List[Symbol]) = { + val quantified = ts flatMap { + case ExistentialType(qs, _) => qs + case t => List() + } + def stripType(tp: Type): Type = tp match { + case ExistentialType(_, res) => + res + case tv@TypeVar(_, constr) => + if (tv.instValid) stripType(constr.inst) + else if (tv.untouchable) tv + else abort("trying to do lub/glb of typevar "+tp) + case t => t + } + val strippedTypes = ts mapConserve stripType + (strippedTypes, quantified) + } + + def weakLub(ts: List[Type]) = + if (ts.nonEmpty && (ts forall isNumericValueType)) (numericLub(ts), true) + else if (ts exists typeHasAnnotations) + (annotationsLub(lub(ts map (_.withoutAnnotations)), ts), true) + else (lub(ts), false) + + def numericLub(ts: List[Type]) = + ts reduceLeft ((t1, t2) => + if (isNumericSubType(t1, t2)) t2 + else if (isNumericSubType(t2, t1)) t1 + else IntClass.tpe) + + private val lubResults = new mutable.HashMap[(Int, List[Type]), Type] + private val glbResults = new mutable.HashMap[(Int, List[Type]), Type] + + /** Given a list of types, finds all the base classes they have in + * common, then returns a list of type constructors derived directly + * from the symbols (so any more specific type information is ignored.) + * The list is filtered such that every type constructor in the list + * expects the same number of type arguments, which is chosen based + * on the deepest class among the common baseclasses. + */ + def typeConstructorLubList(ts: List[Type]): List[Type] = { + val bcs = ts.flatMap(_.baseClasses).distinct sortWith (_ isLess _) + val tcons = bcs filter (clazz => ts forall (_.typeSymbol isSubClass clazz)) + + tcons map (_.typeConstructor) match { + case Nil => Nil + case t :: ts => t :: ts.filter(_.typeParams.size == t.typeParams.size) + } + } + + def lub(ts: List[Type]): Type = ts match { + case List() => NothingClass.tpe + case List(t) => t + case _ => + if (Statistics.canEnable) Statistics.incCounter(lubCount) + val start = if (Statistics.canEnable) Statistics.pushTimer(typeOpsStack, lubNanos) else null + try { + val res = lub(ts, lubDepth(ts)) + // If the number of unapplied type parameters in all incoming + // types is consistent, and the lub does not match that, return + // the type constructor of the calculated lub instead. This + // is because lubbing type constructors tends to result in types + // which have been applied to dummies or Nothing. + ts.map(_.typeParams.size).distinct match { + case x :: Nil if res.typeParams.size != x => + logResult(s"Stripping type args from lub because $res is not consistent with $ts")(res.typeConstructor) + case _ => + res + } + } + finally { + lubResults.clear() + glbResults.clear() + if (Statistics.canEnable) Statistics.popTimer(typeOpsStack, start) + } + } + + /** The least upper bound wrt <:< of a list of types */ + protected[internal] def lub(ts: List[Type], depth: Int): Type = { + def lub0(ts0: List[Type]): Type = elimSub(ts0, depth) match { + case List() => NothingClass.tpe + case List(t) => t + case ts @ PolyType(tparams, _) :: _ => + val tparams1 = map2(tparams, matchingBounds(ts, tparams).transpose)((tparam, bounds) => + tparam.cloneSymbol.setInfo(glb(bounds, depth))) + PolyType(tparams1, lub0(matchingInstTypes(ts, tparams1))) + case ts @ (mt @ MethodType(params, _)) :: rest => + MethodType(params, lub0(matchingRestypes(ts, mt.paramTypes))) + case ts @ NullaryMethodType(_) :: rest => + NullaryMethodType(lub0(matchingRestypes(ts, Nil))) + case ts @ TypeBounds(_, _) :: rest => + TypeBounds(glb(ts map (_.bounds.lo), depth), lub(ts map (_.bounds.hi), depth)) + case ts @ AnnotatedType(annots, tpe, _) :: rest => + annotationsLub(lub0(ts map (_.withoutAnnotations)), ts) + case ts => + lubResults get (depth, ts) match { + case Some(lubType) => + lubType + case None => + lubResults((depth, ts)) = AnyClass.tpe + val res = if (depth < 0) AnyClass.tpe else lub1(ts) + lubResults((depth, ts)) = res + res + } + } + def lub1(ts0: List[Type]): Type = { + val (ts, tparams) = stripExistentialsAndTypeVars(ts0) + val lubBaseTypes: List[Type] = lubList(ts, depth) + val lubParents = spanningTypes(lubBaseTypes) + val lubOwner = commonOwner(ts) + val lubBase = intersectionType(lubParents, lubOwner) + val lubType = + if (phase.erasedTypes || depth == 0 ) lubBase + else { + val lubRefined = refinedType(lubParents, lubOwner) + val lubThisType = lubRefined.typeSymbol.thisType + val narrowts = ts map (_.narrow) + def excludeFromLub(sym: Symbol) = ( + sym.isClass + || sym.isConstructor + || !sym.isPublic + || isGetClass(sym) + || sym.isFinal + || narrowts.exists(t => !refines(t, sym)) + ) + def lubsym(proto: Symbol): Symbol = { + val prototp = lubThisType.memberInfo(proto) + val syms = narrowts map (t => + t.nonPrivateMember(proto.name).suchThat(sym => + sym.tpe matches prototp.substThis(lubThisType.typeSymbol, t))) + + if (syms contains NoSymbol) NoSymbol + else { + val symtypes = + map2(narrowts, syms)((t, sym) => t.memberInfo(sym).substThis(t.typeSymbol, lubThisType)) + if (proto.isTerm) // possible problem: owner of info is still the old one, instead of new refinement class + proto.cloneSymbol(lubRefined.typeSymbol).setInfoOwnerAdjusted(lub(symtypes, decr(depth))) + else if (symtypes.tail forall (symtypes.head =:= _)) + proto.cloneSymbol(lubRefined.typeSymbol).setInfoOwnerAdjusted(symtypes.head) + else { + def lubBounds(bnds: List[TypeBounds]): TypeBounds = + TypeBounds(glb(bnds map (_.lo), decr(depth)), lub(bnds map (_.hi), decr(depth))) + lubRefined.typeSymbol.newAbstractType(proto.name.toTypeName, proto.pos) + .setInfoOwnerAdjusted(lubBounds(symtypes map (_.bounds))) + } + } + } + def refines(tp: Type, sym: Symbol): Boolean = { + val syms = tp.nonPrivateMember(sym.name).alternatives + !syms.isEmpty && (syms forall (alt => + // todo alt != sym is strictly speaking not correct, but without it we lose + // efficiency. + alt != sym && !specializesSym(lubThisType, sym, tp, alt, depth))) + } + // add a refinement symbol for all non-class members of lubBase + // which are refined by every type in ts. + for (sym <- lubBase.nonPrivateMembers ; if !excludeFromLub(sym)) { + try lubsym(sym) andAlso (addMember(lubThisType, lubRefined, _, depth)) + catch { + case ex: NoCommonType => + } + } + if (lubRefined.decls.isEmpty) lubBase + else if (!verifyLubs) lubRefined + else { + // Verify that every given type conforms to the calculated lub. + // In theory this should not be necessary, but higher-order type + // parameters are not handled correctly. + val ok = ts forall { t => + isSubType(t, lubRefined, depth) || { + if (settings.debug.value || printLubs) { + Console.println( + "Malformed lub: " + lubRefined + "\n" + + "Argument " + t + " does not conform. Falling back to " + lubBase + ) + } + false + } + } + // If not, fall back on the more conservative calculation. + if (ok) lubRefined + else lubBase + } + } + // dropIllegalStarTypes is a localized fix for SI-6897. We should probably + // integrate that transformation at a lower level in master, but lubs are + // the likely and maybe only spot they escape, so fixing here for 2.10.1. + existentialAbstraction(tparams, dropIllegalStarTypes(lubType)) + } + if (printLubs) { + println(indent + "lub of " + ts + " at depth "+depth)//debug + indent = indent + " " + assert(indent.length <= 100) + } + if (Statistics.canEnable) Statistics.incCounter(nestedLubCount) + val res = lub0(ts) + if (printLubs) { + indent = indent stripSuffix " " + println(indent + "lub of " + ts + " is " + res)//debug + } + if (ts forall typeIsNotNull) res.notNull else res + } + + val GlbFailure = new Throwable + + /** A global counter for glb calls in the `specializes` query connected to the `addMembers` + * call in `glb`. There's a possible infinite recursion when `specializes` calls + * memberType, which calls baseTypeSeq, which calls mergePrefixAndArgs, which calls glb. + * The counter breaks this recursion after two calls. + * If the recursion is broken, no member is added to the glb. + */ + private var globalGlbDepth = 0 + private final val globalGlbLimit = 2 + + /** The greatest lower bound of a list of types (as determined by `<:<`). */ + def glb(ts: List[Type]): Type = elimSuper(ts) match { + case List() => AnyClass.tpe + case List(t) => t + case ts0 => + if (Statistics.canEnable) Statistics.incCounter(lubCount) + val start = if (Statistics.canEnable) Statistics.pushTimer(typeOpsStack, lubNanos) else null + try { + glbNorm(ts0, lubDepth(ts0)) + } finally { + lubResults.clear() + glbResults.clear() + if (Statistics.canEnable) Statistics.popTimer(typeOpsStack, start) + } + } + + protected[internal] def glb(ts: List[Type], depth: Int): Type = elimSuper(ts) match { + case List() => AnyClass.tpe + case List(t) => t + case ts0 => glbNorm(ts0, depth) + } + + /** The greatest lower bound of a list of types (as determined by `<:<`), which have been normalized + * with regard to `elimSuper`. */ + protected def glbNorm(ts: List[Type], depth: Int): Type = { + def glb0(ts0: List[Type]): Type = ts0 match { + case List() => AnyClass.tpe + case List(t) => t + case ts @ PolyType(tparams, _) :: _ => + val tparams1 = map2(tparams, matchingBounds(ts, tparams).transpose)((tparam, bounds) => + tparam.cloneSymbol.setInfo(lub(bounds, depth))) + PolyType(tparams1, glbNorm(matchingInstTypes(ts, tparams1), depth)) + case ts @ (mt @ MethodType(params, _)) :: rest => + MethodType(params, glbNorm(matchingRestypes(ts, mt.paramTypes), depth)) + case ts @ NullaryMethodType(_) :: rest => + NullaryMethodType(glbNorm(matchingRestypes(ts, Nil), depth)) + case ts @ TypeBounds(_, _) :: rest => + TypeBounds(lub(ts map (_.bounds.lo), depth), glb(ts map (_.bounds.hi), depth)) + case ts => + glbResults get (depth, ts) match { + case Some(glbType) => + glbType + case _ => + glbResults((depth, ts)) = NothingClass.tpe + val res = if (depth < 0) NothingClass.tpe else glb1(ts) + glbResults((depth, ts)) = res + res + } + } + def glb1(ts0: List[Type]): Type = { + try { + val (ts, tparams) = stripExistentialsAndTypeVars(ts0) + val glbOwner = commonOwner(ts) + def refinedToParents(t: Type): List[Type] = t match { + case RefinedType(ps, _) => ps flatMap refinedToParents + case _ => List(t) + } + def refinedToDecls(t: Type): List[Scope] = t match { + case RefinedType(ps, decls) => + val dss = ps flatMap refinedToDecls + if (decls.isEmpty) dss else decls :: dss + case _ => List() + } + val ts1 = ts flatMap refinedToParents + val glbBase = intersectionType(ts1, glbOwner) + val glbType = + if (phase.erasedTypes || depth == 0) glbBase + else { + val glbRefined = refinedType(ts1, glbOwner) + val glbThisType = glbRefined.typeSymbol.thisType + def glbsym(proto: Symbol): Symbol = { + val prototp = glbThisType.memberInfo(proto) + val syms = for (t <- ts; + alt <- (t.nonPrivateMember(proto.name).alternatives) + if glbThisType.memberInfo(alt) matches prototp + ) yield alt + val symtypes = syms map glbThisType.memberInfo + assert(!symtypes.isEmpty) + proto.cloneSymbol(glbRefined.typeSymbol).setInfoOwnerAdjusted( + if (proto.isTerm) glb(symtypes, decr(depth)) + else { + def isTypeBound(tp: Type) = tp match { + case TypeBounds(_, _) => true + case _ => false + } + def glbBounds(bnds: List[Type]): TypeBounds = { + val lo = lub(bnds map (_.bounds.lo), decr(depth)) + val hi = glb(bnds map (_.bounds.hi), decr(depth)) + if (lo <:< hi) TypeBounds(lo, hi) + else throw GlbFailure + } + val symbounds = symtypes filter isTypeBound + var result: Type = + if (symbounds.isEmpty) + TypeBounds.empty + else glbBounds(symbounds) + for (t <- symtypes if !isTypeBound(t)) + if (result.bounds containsType t) result = t + else throw GlbFailure + result + }) + } + if (globalGlbDepth < globalGlbLimit) + try { + globalGlbDepth += 1 + val dss = ts flatMap refinedToDecls + for (ds <- dss; sym <- ds.iterator) + if (globalGlbDepth < globalGlbLimit && !specializesSym(glbThisType, sym, depth)) + try { + addMember(glbThisType, glbRefined, glbsym(sym), depth) + } catch { + case ex: NoCommonType => + } + } finally { + globalGlbDepth -= 1 + } + if (glbRefined.decls.isEmpty) glbBase else glbRefined + } + existentialAbstraction(tparams, glbType) + } catch { + case GlbFailure => + if (ts forall (t => NullClass.tpe <:< t)) NullClass.tpe + else NothingClass.tpe + } + } + // if (settings.debug.value) { println(indent + "glb of " + ts + " at depth "+depth); indent = indent + " " } //DEBUG + + if (Statistics.canEnable) Statistics.incCounter(nestedLubCount) + val res = glb0(ts) + + // if (settings.debug.value) { indent = indent.substring(0, indent.length() - 2); log(indent + "glb of " + ts + " is " + res) }//DEBUG + + if (ts exists typeIsNotNull) res.notNull else res + } + + /** All types in list must be polytypes with type parameter lists of + * same length as tparams. + * Returns list of list of bounds infos, where corresponding type + * parameters are renamed to tparams. + */ + private def matchingBounds(tps: List[Type], tparams: List[Symbol]): List[List[Type]] = { + def getBounds(tp: Type): List[Type] = tp match { + case PolyType(tparams1, _) if sameLength(tparams1, tparams) => + tparams1 map (tparam => tparam.info.substSym(tparams1, tparams)) + case tp => + if (tp ne tp.normalize) getBounds(tp.normalize) + else throw new NoCommonType(tps) + } + tps map getBounds + } + + /** All types in list must be polytypes with type parameter lists of + * same length as tparams. + * Returns list of instance types, where corresponding type + * parameters are renamed to tparams. + */ + private def matchingInstTypes(tps: List[Type], tparams: List[Symbol]): List[Type] = { + def transformResultType(tp: Type): Type = tp match { + case PolyType(tparams1, restpe) if sameLength(tparams1, tparams) => + restpe.substSym(tparams1, tparams) + case tp => + if (tp ne tp.normalize) transformResultType(tp.normalize) + else throw new NoCommonType(tps) + } + tps map transformResultType + } + + /** All types in list must be method types with equal parameter types. + * Returns list of their result types. + */ + private def matchingRestypes(tps: List[Type], pts: List[Type]): List[Type] = + tps map { + case mt @ MethodType(params1, res) if isSameTypes(mt.paramTypes, pts) => + res + case NullaryMethodType(res) if pts.isEmpty => + res + case _ => + throw new NoCommonType(tps) + } +} diff --git a/src/reflect/scala/reflect/internal/tpe/TypeComparers.scala b/src/reflect/scala/reflect/internal/tpe/TypeComparers.scala new file mode 100644 index 0000000000..82321f61c2 --- /dev/null +++ b/src/reflect/scala/reflect/internal/tpe/TypeComparers.scala @@ -0,0 +1,617 @@ +package scala.reflect +package internal +package tpe + +import scala.collection.{ mutable } +import Flags._ +import util.Statistics + +trait TypeComparers { + self: SymbolTable => + import definitions._ + import TypesStats._ + + private final val LogPendingSubTypesThreshold = DefaultLogThreshhold + + private val pendingSubTypes = new mutable.HashSet[SubTypePair] + + class SubTypePair(val tp1: Type, val tp2: Type) { + override def hashCode = tp1.hashCode * 41 + tp2.hashCode + override def equals(other: Any) = (this eq other.asInstanceOf[AnyRef]) || (other match { + // suspend TypeVars in types compared by =:=, + // since we don't want to mutate them simply to check whether a subtype test is pending + // in addition to making subtyping "more correct" for type vars, + // it should avoid the stackoverflow that's been plaguing us (https://groups.google.com/d/topic/scala-internals/2gHzNjtB4xA/discussion) + // this method is only called when subtyping hits a recursion threshold (subsametypeRecursions >= LogPendingSubTypesThreshold) + case stp: SubTypePair => + val tvars = List(tp1, stp.tp1, tp2, stp.tp2) flatMap (t => if (t.isGround) Nil else typeVarsInType(t)) + suspendingTypeVars(tvars)(tp1 =:= stp.tp1 && tp2 =:= stp.tp2) + case _ => + false + }) + override def toString = tp1+" <: + tp2 match { + case TypeRef(pre2, sym2, _) => sym1 != sym2 || isDifferentType(pre1, pre2) + case _ => true + } + case _ => true + } + + /** Do `tp1` and `tp2` denote equivalent types? */ + def isSameType(tp1: Type, tp2: Type): Boolean = try { + if (Statistics.canEnable) Statistics.incCounter(sametypeCount) + subsametypeRecursions += 1 + //OPT cutdown on Function0 allocation + //was: + // undoLog undoUnless { + // isSameType1(tp1, tp2) + // } + + undoLog.lock() + try { + val before = undoLog.log + var result = false + try { + result = isSameType1(tp1, tp2) + } + finally if (!result) undoLog.undoTo(before) + result + } + finally undoLog.unlock() + } + finally { + subsametypeRecursions -= 1 + // XXX AM TODO: figure out when it is safe and needed to clear the log -- the commented approach below is too eager (it breaks #3281, #3866) + // it doesn't help to keep separate recursion counts for the three methods that now share it + // if (subsametypeRecursions == 0) undoLog.clear() + } + + private def isSameType1(tp1: Type, tp2: Type): Boolean = { + if ((tp1 eq tp2) || + (tp1 eq ErrorType) || (tp1 eq WildcardType) || + (tp2 eq ErrorType) || (tp2 eq WildcardType)) + true + else if ((tp1 eq NoType) || (tp2 eq NoType)) + false + else if (tp1 eq NoPrefix) // !! I do not see how this would be warranted by the spec + tp2.typeSymbol.isPackageClass + else if (tp2 eq NoPrefix) // !! I do not see how this would be warranted by the spec + tp1.typeSymbol.isPackageClass + else { + isSameType2(tp1, tp2) || { + val tp1n = normalizePlus(tp1) + val tp2n = normalizePlus(tp2) + ((tp1n ne tp1) || (tp2n ne tp2)) && isSameType(tp1n, tp2n) + } + } + } + + def isSameType2(tp1: Type, tp2: Type): Boolean = { + tp1 match { + case tr1: TypeRef => + tp2 match { + case tr2: TypeRef => + return (equalSymsAndPrefixes(tr1.sym, tr1.pre, tr2.sym, tr2.pre) && + ((tp1.isHigherKinded && tp2.isHigherKinded && tp1.normalize =:= tp2.normalize) || + isSameTypes(tr1.args, tr2.args))) || + ((tr1.pre, tr2.pre) match { + case (tv @ TypeVar(_,_), _) => tv.registerTypeSelection(tr1.sym, tr2) + case (_, tv @ TypeVar(_,_)) => tv.registerTypeSelection(tr2.sym, tr1) + case _ => false + }) + case _: SingleType => + return isSameType2(tp2, tp1) // put singleton type on the left, caught below + case _ => + } + case tt1: ThisType => + tp2 match { + case tt2: ThisType => + if (tt1.sym == tt2.sym) return true + case _ => + } + case st1: SingleType => + tp2 match { + case st2: SingleType => + if (equalSymsAndPrefixes(st1.sym, st1.pre, st2.sym, st2.pre)) return true + case TypeRef(pre2, sym2, Nil) => + if (sym2.isModuleClass && equalSymsAndPrefixes(st1.sym, st1.pre, sym2.sourceModule, pre2)) return true + case _ => + } + case ct1: ConstantType => + tp2 match { + case ct2: ConstantType => + return (ct1.value == ct2.value) + case _ => + } + case rt1: RefinedType => + tp2 match { + case rt2: RefinedType => // + def isSubScope(s1: Scope, s2: Scope): Boolean = s2.toList.forall { + sym2 => + var e1 = s1.lookupEntry(sym2.name) + (e1 ne null) && { + val substSym = sym2.info.substThis(sym2.owner, e1.sym.owner) + var isEqual = false + while (!isEqual && (e1 ne null)) { + isEqual = e1.sym.info =:= substSym + e1 = s1.lookupNextEntry(e1) + } + isEqual + } + } + //Console.println("is same? " + tp1 + " " + tp2 + " " + tp1.typeSymbol.owner + " " + tp2.typeSymbol.owner)//DEBUG + return isSameTypes(rt1.parents, rt2.parents) && { + val decls1 = rt1.decls + val decls2 = rt2.decls + isSubScope(decls1, decls2) && isSubScope(decls2, decls1) + } + case _ => + } + case mt1: MethodType => + tp2 match { + case mt2: MethodType => + return isSameTypes(mt1.paramTypes, mt2.paramTypes) && + mt1.resultType =:= mt2.resultType.substSym(mt2.params, mt1.params) && + mt1.isImplicit == mt2.isImplicit + // note: no case NullaryMethodType(restpe) => return mt1.params.isEmpty && mt1.resultType =:= restpe + case _ => + } + case NullaryMethodType(restpe1) => + tp2 match { + // note: no case mt2: MethodType => return mt2.params.isEmpty && restpe =:= mt2.resultType + case NullaryMethodType(restpe2) => + return restpe1 =:= restpe2 + case _ => + } + case PolyType(tparams1, res1) => + tp2 match { + case PolyType(tparams2, res2) => + // assert((tparams1 map (_.typeParams.length)) == (tparams2 map (_.typeParams.length))) + // @M looks like it might suffer from same problem as #2210 + return ( + (sameLength(tparams1, tparams2)) && // corresponds does not check length of two sequences before checking the predicate + (tparams1 corresponds tparams2)(_.info =:= _.info.substSym(tparams2, tparams1)) && + res1 =:= res2.substSym(tparams2, tparams1) + ) + case _ => + } + case ExistentialType(tparams1, res1) => + tp2 match { + case ExistentialType(tparams2, res2) => + // @M looks like it might suffer from same problem as #2210 + return ( + // corresponds does not check length of two sequences before checking the predicate -- faster & needed to avoid crasher in #2956 + sameLength(tparams1, tparams2) && + (tparams1 corresponds tparams2)(_.info =:= _.info.substSym(tparams2, tparams1)) && + res1 =:= res2.substSym(tparams2, tparams1) + ) + case _ => + } + case TypeBounds(lo1, hi1) => + tp2 match { + case TypeBounds(lo2, hi2) => + return lo1 =:= lo2 && hi1 =:= hi2 + case _ => + } + case BoundedWildcardType(bounds) => + return bounds containsType tp2 + case _ => + } + tp2 match { + case BoundedWildcardType(bounds) => + return bounds containsType tp1 + case _ => + } + tp1 match { + case tv @ TypeVar(_,_) => + return tv.registerTypeEquality(tp2, typeVarLHS = true) + case _ => + } + tp2 match { + case tv @ TypeVar(_,_) => + return tv.registerTypeEquality(tp1, typeVarLHS = false) + case _ => + } + tp1 match { + case _: AnnotatedType => + return annotationsConform(tp1, tp2) && annotationsConform(tp2, tp1) && tp1.withoutAnnotations =:= tp2.withoutAnnotations + case _ => + } + tp2 match { + case _: AnnotatedType => + return annotationsConform(tp1, tp2) && annotationsConform(tp2, tp1) && tp1.withoutAnnotations =:= tp2.withoutAnnotations + case _ => + } + tp1 match { + case _: SingletonType => + tp2 match { + case _: SingletonType => + def chaseDealiasedUnderlying(tp: Type): Type = { + var origin = tp + var next = origin.underlying.dealias + while (next.isInstanceOf[SingletonType]) { + assert(origin ne next, origin) + origin = next + next = origin.underlying.dealias + } + origin + } + val origin1 = chaseDealiasedUnderlying(tp1) + val origin2 = chaseDealiasedUnderlying(tp2) + ((origin1 ne tp1) || (origin2 ne tp2)) && (origin1 =:= origin2) + case _ => + false + } + case _ => + false + } + } + + def isSubType(tp1: Type, tp2: Type): Boolean = isSubType(tp1, tp2, AnyDepth) + + def isSubType(tp1: Type, tp2: Type, depth: Int): Boolean = try { + subsametypeRecursions += 1 + + //OPT cutdown on Function0 allocation + //was: + // undoLog undoUnless { // if subtype test fails, it should not affect constraints on typevars + // if (subsametypeRecursions >= LogPendingSubTypesThreshold) { + // val p = new SubTypePair(tp1, tp2) + // if (pendingSubTypes(p)) + // false + // else + // try { + // pendingSubTypes += p + // isSubType2(tp1, tp2, depth) + // } finally { + // pendingSubTypes -= p + // } + // } else { + // isSubType2(tp1, tp2, depth) + // } + // } + + undoLog.lock() + try { + val before = undoLog.log + var result = false + + try result = { // if subtype test fails, it should not affect constraints on typevars + if (subsametypeRecursions >= LogPendingSubTypesThreshold) { + val p = new SubTypePair(tp1, tp2) + if (pendingSubTypes(p)) + false + else + try { + pendingSubTypes += p + isSubType2(tp1, tp2, depth) + } finally { + pendingSubTypes -= p + } + } else { + isSubType2(tp1, tp2, depth) + } + } finally if (!result) undoLog.undoTo(before) + + result + } finally undoLog.unlock() + } finally { + subsametypeRecursions -= 1 + // XXX AM TODO: figure out when it is safe and needed to clear the log -- the commented approach below is too eager (it breaks #3281, #3866) + // it doesn't help to keep separate recursion counts for the three methods that now share it + // if (subsametypeRecursions == 0) undoLog.clear() + } + + private def isPolySubType(tp1: PolyType, tp2: PolyType): Boolean = { + val PolyType(tparams1, res1) = tp1 + val PolyType(tparams2, res2) = tp2 + + sameLength(tparams1, tparams2) && { + // fast-path: polymorphic method type -- type params cannot be captured + val isMethod = tparams1.head.owner.isMethod + //@M for an example of why we need to generate fresh symbols otherwise, see neg/tcpoly_ticket2101.scala + val substitutes = if (isMethod) tparams1 else cloneSymbols(tparams1) + def sub1(tp: Type) = if (isMethod) tp else tp.substSym(tparams1, substitutes) + def sub2(tp: Type) = tp.substSym(tparams2, substitutes) + def cmp(p1: Symbol, p2: Symbol) = sub2(p2.info) <:< sub1(p1.info) + + (tparams1 corresponds tparams2)(cmp) && (sub1(res1) <:< sub2(res2)) + } + } + + // @assume tp1.isHigherKinded || tp2.isHigherKinded + def isHKSubType(tp1: Type, tp2: Type, depth: Int): Boolean = { + def isSub(ntp1: Type, ntp2: Type) = (ntp1.withoutAnnotations, ntp2.withoutAnnotations) match { + case (TypeRef(_, AnyClass, _), _) => false // avoid some warnings when Nothing/Any are on the other side + case (_, TypeRef(_, NothingClass, _)) => false + case (pt1: PolyType, pt2: PolyType) => isPolySubType(pt1, pt2) // @assume both .isHigherKinded (both normalized to PolyType) + case (_: PolyType, MethodType(ps, _)) if ps exists (_.tpe.isWildcard) => false // don't warn on HasMethodMatching on right hand side + case _ => // @assume !(both .isHigherKinded) thus cannot be subtypes + def tp_s(tp: Type): String = f"$tp%-20s ${util.shortClassOfInstance(tp)}%s" + devWarning(s"HK subtype check on $tp1 and $tp2, but both don't normalize to polytypes:\n tp1=${tp_s(ntp1)}\n tp2=${tp_s(ntp2)}") + false + } + + ( tp1.typeSymbol == NothingClass // @M Nothing is subtype of every well-kinded type + || tp2.typeSymbol == AnyClass // @M Any is supertype of every well-kinded type (@PP: is it? What about continuations plugin?) + || isSub(tp1.normalize, tp2.normalize) && annotationsConform(tp1, tp2) // @M! normalize reduces higher-kinded case to PolyType's + ) + } + + /** Does type `tp1` conform to `tp2`? */ + private def isSubType2(tp1: Type, tp2: Type, depth: Int): Boolean = { + if ((tp1 eq tp2) || isErrorOrWildcard(tp1) || isErrorOrWildcard(tp2)) return true + if ((tp1 eq NoType) || (tp2 eq NoType)) return false + if (tp1 eq NoPrefix) return (tp2 eq NoPrefix) || tp2.typeSymbol.isPackageClass // !! I do not see how the "isPackageClass" would be warranted by the spec + if (tp2 eq NoPrefix) return tp1.typeSymbol.isPackageClass + if (isSingleType(tp1) && isSingleType(tp2) || isConstantType(tp1) && isConstantType(tp2)) return tp1 =:= tp2 + if (tp1.isHigherKinded || tp2.isHigherKinded) return isHKSubType(tp1, tp2, depth) + + /** First try, on the right: + * - unwrap Annotated types, BoundedWildcardTypes, + * - bind TypeVars on the right, if lhs is not Annotated nor BoundedWildcard + * - handle common cases for first-kind TypeRefs on both sides as a fast path. + */ + def firstTry = tp2 match { + // fast path: two typerefs, none of them HK + case tr2: TypeRef => + tp1 match { + case tr1: TypeRef => + val sym1 = tr1.sym + val sym2 = tr2.sym + val pre1 = tr1.pre + val pre2 = tr2.pre + (((if (sym1 == sym2) phase.erasedTypes || sym1.owner.hasPackageFlag || isSubType(pre1, pre2, depth) + else (sym1.name == sym2.name && !sym1.isModuleClass && !sym2.isModuleClass && + (isUnifiable(pre1, pre2) || + isSameSpecializedSkolem(sym1, sym2, pre1, pre2) || + sym2.isAbstractType && isSubPre(pre1, pre2, sym2)))) && + isSubArgs(tr1.args, tr2.args, sym1.typeParams, depth)) + || + sym2.isClass && { + val base = tr1 baseType sym2 + (base ne tr1) && isSubType(base, tr2, depth) + } + || + thirdTryRef(tr1, tr2)) + case _ => + secondTry + } + case AnnotatedType(_, _, _) => + isSubType(tp1.withoutAnnotations, tp2.withoutAnnotations, depth) && + annotationsConform(tp1, tp2) + case BoundedWildcardType(bounds) => + isSubType(tp1, bounds.hi, depth) + case tv2 @ TypeVar(_, constr2) => + tp1 match { + case AnnotatedType(_, _, _) | BoundedWildcardType(_) => + secondTry + case _ => + tv2.registerBound(tp1, isLowerBound = true) + } + case _ => + secondTry + } + + /** Second try, on the left: + * - unwrap AnnotatedTypes, BoundedWildcardTypes, + * - bind typevars, + * - handle existential types by skolemization. + */ + def secondTry = tp1 match { + case AnnotatedType(_, _, _) => + isSubType(tp1.withoutAnnotations, tp2.withoutAnnotations, depth) && + annotationsConform(tp1, tp2) + case BoundedWildcardType(bounds) => + isSubType(tp1.bounds.lo, tp2, depth) + case tv @ TypeVar(_,_) => + tv.registerBound(tp2, isLowerBound = false) + case ExistentialType(_, _) => + try { + skolemizationLevel += 1 + isSubType(tp1.skolemizeExistential, tp2, depth) + } finally { + skolemizationLevel -= 1 + } + case _ => + thirdTry + } + + def thirdTryRef(tp1: Type, tp2: TypeRef): Boolean = { + val sym2 = tp2.sym + sym2 match { + case NotNullClass => tp1.isNotNull + case SingletonClass => tp1.isStable || fourthTry + case _: ClassSymbol => + if (isRawType(tp2)) + isSubType(tp1, rawToExistential(tp2), depth) + else if (sym2.name == tpnme.REFINE_CLASS_NAME) + isSubType(tp1, sym2.info, depth) + else + fourthTry + case _: TypeSymbol => + if (sym2 hasFlag DEFERRED) { + val tp2a = tp2.bounds.lo + isDifferentTypeConstructor(tp2, tp2a) && + isSubType(tp1, tp2a, depth) || + fourthTry + } else { + isSubType(tp1.normalize, tp2.normalize, depth) + } + case _ => + fourthTry + } + } + + /** Third try, on the right: + * - decompose refined types. + * - handle typerefs, existentials, and notnull types. + * - handle left+right method types, polytypes, typebounds + */ + def thirdTry = tp2 match { + case tr2: TypeRef => + thirdTryRef(tp1, tr2) + case rt2: RefinedType => + (rt2.parents forall (isSubType(tp1, _, depth))) && + (rt2.decls forall (specializesSym(tp1, _, depth))) + case et2: ExistentialType => + et2.withTypeVars(isSubType(tp1, _, depth), depth) || fourthTry + case nn2: NotNullType => + tp1.isNotNull && isSubType(tp1, nn2.underlying, depth) + case mt2: MethodType => + tp1 match { + case mt1 @ MethodType(params1, res1) => + val params2 = mt2.params + val res2 = mt2.resultType + (sameLength(params1, params2) && + mt1.isImplicit == mt2.isImplicit && + matchingParams(params1, params2, mt1.isJava, mt2.isJava) && + isSubType(res1.substSym(params1, params2), res2, depth)) + // TODO: if mt1.params.isEmpty, consider NullaryMethodType? + case _ => + false + } + case pt2 @ NullaryMethodType(_) => + tp1 match { + // TODO: consider MethodType mt for which mt.params.isEmpty?? + case pt1 @ NullaryMethodType(_) => + isSubType(pt1.resultType, pt2.resultType, depth) + case _ => + false + } + case TypeBounds(lo2, hi2) => + tp1 match { + case TypeBounds(lo1, hi1) => + isSubType(lo2, lo1, depth) && isSubType(hi1, hi2, depth) + case _ => + false + } + case _ => + fourthTry + } + + /** Fourth try, on the left: + * - handle typerefs, refined types, notnull and singleton types. + */ + def fourthTry = tp1 match { + case tr1 @ TypeRef(pre1, sym1, _) => + sym1 match { + case NothingClass => true + case NullClass => + tp2 match { + case TypeRef(_, sym2, _) => + containsNull(sym2) + case _ => + isSingleType(tp2) && isSubType(tp1, tp2.widen, depth) + } + case _: ClassSymbol => + if (isRawType(tp1)) + isSubType(rawToExistential(tp1), tp2, depth) + else if (sym1.isModuleClass) tp2 match { + case SingleType(pre2, sym2) => equalSymsAndPrefixes(sym1.sourceModule, pre1, sym2, pre2) + case _ => false + } + else if (sym1.isRefinementClass) + isSubType(sym1.info, tp2, depth) + else false + + case _: TypeSymbol => + if (sym1 hasFlag DEFERRED) { + val tp1a = tp1.bounds.hi + isDifferentTypeConstructor(tp1, tp1a) && isSubType(tp1a, tp2, depth) + } else { + isSubType(tp1.normalize, tp2.normalize, depth) + } + case _ => + false + } + case RefinedType(parents1, _) => + parents1 exists (isSubType(_, tp2, depth)) + case _: SingletonType | _: NotNullType => + isSubType(tp1.underlying, tp2, depth) + case _ => + false + } + + firstTry + } + + + def isWeakSubType(tp1: Type, tp2: Type) = + tp1.deconst.normalize match { + case TypeRef(_, sym1, _) if isNumericValueClass(sym1) => + tp2.deconst.normalize match { + case TypeRef(_, sym2, _) if isNumericValueClass(sym2) => + isNumericSubClass(sym1, sym2) + case tv2 @ TypeVar(_, _) => + tv2.registerBound(tp1, isLowerBound = true, isNumericBound = true) + case _ => + isSubType(tp1, tp2) + } + case tv1 @ TypeVar(_, _) => + tp2.deconst.normalize match { + case TypeRef(_, sym2, _) if isNumericValueClass(sym2) => + tv1.registerBound(tp2, isLowerBound = false, isNumericBound = true) + case _ => + isSubType(tp1, tp2) + } + case _ => + isSubType(tp1, tp2) + } + + /** The isNumericValueType tests appear redundant, but without them + * test/continuations-neg/function3.scala goes into an infinite loop. + * (Even if the calls are to typeSymbolDirect.) + */ + def isNumericSubType(tp1: Type, tp2: Type): Boolean = ( + isNumericValueType(tp1) + && isNumericValueType(tp2) + && isNumericSubClass(tp1.typeSymbol, tp2.typeSymbol) + ) + +} diff --git a/src/reflect/scala/reflect/internal/tpe/TypeConstraints.scala b/src/reflect/scala/reflect/internal/tpe/TypeConstraints.scala new file mode 100644 index 0000000000..a002b01f70 --- /dev/null +++ b/src/reflect/scala/reflect/internal/tpe/TypeConstraints.scala @@ -0,0 +1,282 @@ +package scala.reflect +package internal +package tpe + +import scala.collection.{ generic } +import generic.Clearable + + +private[internal] trait TypeConstraints { + self: SymbolTable => + import definitions._ + + /** A log of type variable with their original constraints. Used in order + * to undo constraints in the case of isSubType/isSameType failure. + */ + lazy val undoLog = newUndoLog + + protected def newUndoLog = new UndoLog + + class UndoLog extends Clearable { + private type UndoPairs = List[(TypeVar, TypeConstraint)] + //OPT this method is public so we can do `manual inlining` + var log: UndoPairs = List() + + /* + * These two methods provide explicit locking mechanism that is overridden in SynchronizedUndoLog. + * + * The idea behind explicit locking mechanism is that all public methods that access mutable state + * will have to obtain the lock for their entire execution so both reads and writes can be kept in + * right order. Originally, that was achieved by overriding those public methods in + * `SynchronizedUndoLog` which was fine but expensive. The reason is that those public methods take + * thunk as argument and if we keep them non-final there's no way to make them inlined so thunks + * can go away. + * + * By using explicit locking we can achieve inlining. + * + * NOTE: They are made public for now so we can apply 'manual inlining' (copy&pasting into hot + * places implementation of `undo` or `undoUnless`). This should be changed back to protected + * once inliner is fixed. + */ + def lock(): Unit = () + def unlock(): Unit = () + + // register with the auto-clearing cache manager + perRunCaches.recordCache(this) + + /** Undo all changes to constraints to type variables upto `limit`. */ + //OPT this method is public so we can do `manual inlining` + def undoTo(limit: UndoPairs) { + assertCorrectThread() + while ((log ne limit) && log.nonEmpty) { + val (tv, constr) = log.head + tv.constr = constr + log = log.tail + } + } + + /** No sync necessary, because record should only + * be called from within an undo or undoUnless block, + * which is already synchronized. + */ + private[reflect] def record(tv: TypeVar) = { + log ::= ((tv, tv.constr.cloneInternal)) + } + + def clear() { + lock() + try { + if (settings.debug.value) + self.log("Clearing " + log.size + " entries from the undoLog.") + log = Nil + } finally unlock() + } + + // `block` should not affect constraints on typevars + def undo[T](block: => T): T = { + lock() + try { + val before = log + + try block + finally undoTo(before) + } finally unlock() + } + } + + /** @PP: Unable to see why these apparently constant types should need vals + * in every TypeConstraint, I lifted them out. + */ + private lazy val numericLoBound = IntClass.tpe + private lazy val numericHiBound = intersectionType(List(ByteClass.tpe, CharClass.tpe), ScalaPackageClass) + + /** A class expressing upper and lower bounds constraints of type variables, + * as well as their instantiations. + */ + class TypeConstraint(lo0: List[Type], hi0: List[Type], numlo0: Type, numhi0: Type, avoidWidening0: Boolean = false) { + def this(lo0: List[Type], hi0: List[Type]) = this(lo0, hi0, NoType, NoType) + def this(bounds: TypeBounds) = this(List(bounds.lo), List(bounds.hi)) + def this() = this(List(), List()) + + /* Syncnote: Type constraints are assumed to be used from only one + * thread. They are not exposed in api.Types and are used only locally + * in operations that are exposed from types. Hence, no syncing of any + * variables should be ncessesary. + */ + + /** Guard these lists against AnyClass and NothingClass appearing, + * else loBounds.isEmpty will have different results for an empty + * constraint and one with Nothing as a lower bound. [Actually + * guarding addLoBound/addHiBound somehow broke raw types so it + * only guards against being created with them.] + */ + private var lobounds = lo0 filterNot typeIsNothing + private var hibounds = hi0 filterNot typeIsAny + private var numlo = numlo0 + private var numhi = numhi0 + private var avoidWidening = avoidWidening0 + + def loBounds: List[Type] = if (numlo == NoType) lobounds else numlo :: lobounds + def hiBounds: List[Type] = if (numhi == NoType) hibounds else numhi :: hibounds + def avoidWiden: Boolean = avoidWidening + + def addLoBound(tp: Type, isNumericBound: Boolean = false) { + // For some reason which is still a bit fuzzy, we must let Nothing through as + // a lower bound despite the fact that Nothing is always a lower bound. My current + // supposition is that the side-effecting type constraint accumulation mechanism + // depends on these subtype tests being performed to make forward progress when + // there are mutally recursive type vars. + // See pos/t6367 and pos/t6499 for the competing test cases. + val mustConsider = tp.typeSymbol match { + case NothingClass => true + case _ => !(lobounds contains tp) + } + if (mustConsider) { + if (isNumericBound && isNumericValueType(tp)) { + if (numlo == NoType || isNumericSubType(numlo, tp)) + numlo = tp + else if (!isNumericSubType(tp, numlo)) + numlo = numericLoBound + } + else lobounds ::= tp + } + } + + def checkWidening(tp: Type) { + if(tp.isStable) avoidWidening = true + else tp match { + case HasTypeMember(_, _) => avoidWidening = true + case _ => + } + } + + def addHiBound(tp: Type, isNumericBound: Boolean = false) { + // My current test case only demonstrates the need to let Nothing through as + // a lower bound, but I suspect the situation is symmetrical. + val mustConsider = tp.typeSymbol match { + case AnyClass => true + case _ => !(hibounds contains tp) + } + if (mustConsider) { + checkWidening(tp) + if (isNumericBound && isNumericValueType(tp)) { + if (numhi == NoType || isNumericSubType(tp, numhi)) + numhi = tp + else if (!isNumericSubType(numhi, tp)) + numhi = numericHiBound + } + else hibounds ::= tp + } + } + + def isWithinBounds(tp: Type): Boolean = + lobounds.forall(_ <:< tp) && + hibounds.forall(tp <:< _) && + (numlo == NoType || (numlo weak_<:< tp)) && + (numhi == NoType || (tp weak_<:< numhi)) + + var inst: Type = NoType // @M reduce visibility? + + def instValid = (inst ne null) && (inst ne NoType) + + def cloneInternal = { + val tc = new TypeConstraint(lobounds, hibounds, numlo, numhi, avoidWidening) + tc.inst = inst + tc + } + + override def toString = { + val boundsStr = { + val lo = loBounds filterNot typeIsNothing + val hi = hiBounds filterNot typeIsAny + val lostr = if (lo.isEmpty) Nil else List(lo.mkString(" >: (", ", ", ")")) + val histr = if (hi.isEmpty) Nil else List(hi.mkString(" <: (", ", ", ")")) + + lostr ++ histr mkString ("[", " | ", "]") + } + if (inst eq NoType) boundsStr + else boundsStr + " _= " + inst.safeToString + } + } + + /** Solve constraint collected in types `tvars`. + * + * @param tvars All type variables to be instantiated. + * @param tparams The type parameters corresponding to `tvars` + * @param variances The variances of type parameters; need to reverse + * solution direction for all contravariant variables. + * @param upper When `true` search for max solution else min. + */ + def solve(tvars: List[TypeVar], tparams: List[Symbol], + variances: List[Variance], upper: Boolean): Boolean = + solve(tvars, tparams, variances, upper, AnyDepth) + + def solve(tvars: List[TypeVar], tparams: List[Symbol], + variances: List[Variance], upper: Boolean, depth: Int): Boolean = { + + def solveOne(tvar: TypeVar, tparam: Symbol, variance: Variance) { + if (tvar.constr.inst == NoType) { + val up = if (variance.isContravariant) !upper else upper + tvar.constr.inst = null + val bound: Type = if (up) tparam.info.bounds.hi else tparam.info.bounds.lo + //Console.println("solveOne0(tv, tp, v, b)="+(tvar, tparam, variance, bound)) + var cyclic = bound contains tparam + foreach3(tvars, tparams, variances)((tvar2, tparam2, variance2) => { + val ok = (tparam2 != tparam) && ( + (bound contains tparam2) + || up && (tparam2.info.bounds.lo =:= tparam.tpeHK) + || !up && (tparam2.info.bounds.hi =:= tparam.tpeHK) + ) + if (ok) { + if (tvar2.constr.inst eq null) cyclic = true + solveOne(tvar2, tparam2, variance2) + } + }) + if (!cyclic) { + if (up) { + if (bound.typeSymbol != AnyClass) { + log(s"$tvar addHiBound $bound.instantiateTypeParams($tparams, $tvars)") + tvar addHiBound bound.instantiateTypeParams(tparams, tvars) + } + for (tparam2 <- tparams) + tparam2.info.bounds.lo.dealias match { + case TypeRef(_, `tparam`, _) => + log(s"$tvar addHiBound $tparam2.tpeHK.instantiateTypeParams($tparams, $tvars)") + tvar addHiBound tparam2.tpeHK.instantiateTypeParams(tparams, tvars) + case _ => + } + } else { + if (bound.typeSymbol != NothingClass && bound.typeSymbol != tparam) { + log(s"$tvar addLoBound $bound.instantiateTypeParams($tparams, $tvars)") + tvar addLoBound bound.instantiateTypeParams(tparams, tvars) + } + for (tparam2 <- tparams) + tparam2.info.bounds.hi.dealias match { + case TypeRef(_, `tparam`, _) => + log(s"$tvar addLoBound $tparam2.tpeHK.instantiateTypeParams($tparams, $tvars)") + tvar addLoBound tparam2.tpeHK.instantiateTypeParams(tparams, tvars) + case _ => + } + } + } + tvar.constr.inst = NoType // necessary because hibounds/lobounds may contain tvar + + //println("solving "+tvar+" "+up+" "+(if (up) (tvar.constr.hiBounds) else tvar.constr.loBounds)+((if (up) (tvar.constr.hiBounds) else tvar.constr.loBounds) map (_.widen))) + val newInst = ( + if (up) { + if (depth != AnyDepth) glb(tvar.constr.hiBounds, depth) else glb(tvar.constr.hiBounds) + } else { + if (depth != AnyDepth) lub(tvar.constr.loBounds, depth) else lub(tvar.constr.loBounds) + } + ) + log(s"$tvar setInst $newInst") + tvar setInst newInst + //Console.println("solving "+tvar+" "+up+" "+(if (up) (tvar.constr.hiBounds) else tvar.constr.loBounds)+((if (up) (tvar.constr.hiBounds) else tvar.constr.loBounds) map (_.widen))+" = "+tvar.constr.inst)//@MDEBUG + } + } + + // println("solving "+tvars+"/"+tparams+"/"+(tparams map (_.info))) + foreach3(tvars, tparams, variances)(solveOne) + tvars forall (tvar => tvar.constr.isWithinBounds(tvar.constr.inst)) + } +} diff --git a/src/reflect/scala/reflect/internal/tpe/TypeMaps.scala b/src/reflect/scala/reflect/internal/tpe/TypeMaps.scala new file mode 100644 index 0000000000..51363c0f82 --- /dev/null +++ b/src/reflect/scala/reflect/internal/tpe/TypeMaps.scala @@ -0,0 +1,1144 @@ +package scala.reflect +package internal +package tpe + +import scala.collection.{ mutable, immutable } +import Flags._ +import scala.annotation.tailrec +import Variance._ + +private[internal] trait TypeMaps { + self: SymbolTable => + import definitions._ + + /** Normalize any type aliases within this type (@see Type#normalize). + * Note that this depends very much on the call to "normalize", not "dealias", + * so it is no longer carries the too-stealthy name "deAlias". + */ + object normalizeAliases extends TypeMap { + def apply(tp: Type): Type = tp match { + case TypeRef(_, sym, _) if sym.isAliasType => + def msg = if (tp.isHigherKinded) s"Normalizing type alias function $tp" else s"Dealiasing type alias $tp" + mapOver(logResult(msg)(tp.normalize)) + case _ => mapOver(tp) + } + } + + /** Remove any occurrence of type from this type and its parents */ + object dropSingletonType extends TypeMap { + def apply(tp: Type): Type = { + tp match { + case TypeRef(_, SingletonClass, _) => + AnyClass.tpe + case tp1 @ RefinedType(parents, decls) => + parents filter (_.typeSymbol != SingletonClass) match { + case Nil => AnyClass.tpe + case p :: Nil if decls.isEmpty => mapOver(p) + case ps => mapOver(copyRefinedType(tp1, ps, decls)) + } + case tp1 => + mapOver(tp1) + } + } + } + + /** Type with all top-level occurrences of abstract types replaced by their bounds */ + object abstractTypesToBounds extends TypeMap { + def apply(tp: Type): Type = tp match { + case TypeRef(_, sym, _) if sym.isAliasType => apply(tp.dealias) + case TypeRef(_, sym, _) if sym.isAbstractType => apply(tp.bounds.hi) + case rtp @ RefinedType(parents, decls) => copyRefinedType(rtp, parents mapConserve this, decls) + case AnnotatedType(_, _, _) => mapOver(tp) + case _ => tp // no recursion - top level only + } + } + + // Set to true for A* => Seq[A] + // (And it will only rewrite A* in method result types.) + // This is the pre-existing behavior. + // Or false for Seq[A] => Seq[A] + // (It will rewrite A* everywhere but method parameters.) + // This is the specified behavior. + protected def etaExpandKeepsStar = false + + /** Turn any T* types into Seq[T] except when + * in method parameter position. + */ + object dropIllegalStarTypes extends TypeMap { + def apply(tp: Type): Type = tp match { + case MethodType(params, restpe) => + // Not mapping over params + val restpe1 = apply(restpe) + if (restpe eq restpe1) tp + else MethodType(params, restpe1) + case TypeRef(_, RepeatedParamClass, arg :: Nil) => + seqType(arg) + case _ => + if (etaExpandKeepsStar) tp else mapOver(tp) + } + } + + trait AnnotationFilter extends TypeMap { + def keepAnnotation(annot: AnnotationInfo): Boolean + + override def mapOver(annot: AnnotationInfo) = + if (keepAnnotation(annot)) super.mapOver(annot) + else UnmappableAnnotation + } + + trait KeepOnlyTypeConstraints extends AnnotationFilter { + // filter keeps only type constraint annotations + def keepAnnotation(annot: AnnotationInfo) = annot matches TypeConstraintClass + } + + // todo. move these into scala.reflect.api + + /** A prototype for mapping a function over all possible types + */ + abstract class TypeMap(trackVariance: Boolean) extends (Type => Type) { + def this() = this(trackVariance = false) + def apply(tp: Type): Type + + private[this] var _variance: Variance = if (trackVariance) Covariant else Invariant + + def variance_=(x: Variance) = { assert(trackVariance, this) ; _variance = x } + def variance = _variance + + /** Map this function over given type */ + def mapOver(tp: Type): Type = tp match { + case tr @ TypeRef(pre, sym, args) => + val pre1 = this(pre) + val args1 = ( + if (trackVariance && args.nonEmpty && !variance.isInvariant && sym.typeParams.nonEmpty) + mapOverArgs(args, sym.typeParams) + else + args mapConserve this + ) + if ((pre1 eq pre) && (args1 eq args)) tp + else copyTypeRef(tp, pre1, tr.coevolveSym(pre1), args1) + case ThisType(_) => tp + case SingleType(pre, sym) => + if (sym.isPackageClass) tp // short path + else { + val pre1 = this(pre) + if (pre1 eq pre) tp + else singleType(pre1, sym) + } + case MethodType(params, result) => + val params1 = flipped(mapOver(params)) + val result1 = this(result) + if ((params1 eq params) && (result1 eq result)) tp + else copyMethodType(tp, params1, result1.substSym(params, params1)) + case PolyType(tparams, result) => + val tparams1 = flipped(mapOver(tparams)) + val result1 = this(result) + if ((tparams1 eq tparams) && (result1 eq result)) tp + else PolyType(tparams1, result1.substSym(tparams, tparams1)) + case NullaryMethodType(result) => + val result1 = this(result) + if (result1 eq result) tp + else NullaryMethodType(result1) + case ConstantType(_) => tp + case SuperType(thistp, supertp) => + val thistp1 = this(thistp) + val supertp1 = this(supertp) + if ((thistp1 eq thistp) && (supertp1 eq supertp)) tp + else SuperType(thistp1, supertp1) + case TypeBounds(lo, hi) => + val lo1 = flipped(this(lo)) + val hi1 = this(hi) + if ((lo1 eq lo) && (hi1 eq hi)) tp + else TypeBounds(lo1, hi1) + case BoundedWildcardType(bounds) => + val bounds1 = this(bounds) + if (bounds1 eq bounds) tp + else BoundedWildcardType(bounds1.asInstanceOf[TypeBounds]) + case rtp @ RefinedType(parents, decls) => + val parents1 = parents mapConserve this + val decls1 = mapOver(decls) + copyRefinedType(rtp, parents1, decls1) + case ExistentialType(tparams, result) => + val tparams1 = mapOver(tparams) + val result1 = this(result) + if ((tparams1 eq tparams) && (result1 eq result)) tp + else newExistentialType(tparams1, result1.substSym(tparams, tparams1)) + case OverloadedType(pre, alts) => + val pre1 = if (pre.isInstanceOf[ClassInfoType]) pre else this(pre) + if (pre1 eq pre) tp + else OverloadedType(pre1, alts) + case AntiPolyType(pre, args) => + val pre1 = this(pre) + val args1 = args mapConserve this + if ((pre1 eq pre) && (args1 eq args)) tp + else AntiPolyType(pre1, args1) + case tv@TypeVar(_, constr) => + if (constr.instValid) this(constr.inst) + else tv.applyArgs(mapOverArgs(tv.typeArgs, tv.params)) //@M !args.isEmpty implies !typeParams.isEmpty + case NotNullType(tp) => + val tp1 = this(tp) + if (tp1 eq tp) tp + else NotNullType(tp1) + case AnnotatedType(annots, atp, selfsym) => + val annots1 = mapOverAnnotations(annots) + val atp1 = this(atp) + if ((annots1 eq annots) && (atp1 eq atp)) tp + else if (annots1.isEmpty) atp1 + else AnnotatedType(annots1, atp1, selfsym) + /* + case ErrorType => tp + case WildcardType => tp + case NoType => tp + case NoPrefix => tp + case ErasedSingleType(sym) => tp + */ + case _ => + tp + // throw new Error("mapOver inapplicable for " + tp); + } + + def withVariance[T](v: Variance)(body: => T): T = { + val saved = variance + variance = v + try body finally variance = saved + } + @inline final def flipped[T](body: => T): T = { + if (trackVariance) variance = variance.flip + try body + finally if (trackVariance) variance = variance.flip + } + protected def mapOverArgs(args: List[Type], tparams: List[Symbol]): List[Type] = ( + if (trackVariance) + map2Conserve(args, tparams)((arg, tparam) => withVariance(variance * tparam.variance)(this(arg))) + else + args mapConserve this + ) + /** Applies this map to the symbol's info, setting variance = Invariant + * if necessary when the symbol is an alias. + */ + private def applyToSymbolInfo(sym: Symbol): Type = { + if (trackVariance && !variance.isInvariant && sym.isAliasType) + withVariance(Invariant)(this(sym.info)) + else + this(sym.info) + } + + /** Called by mapOver to determine whether the original symbols can + * be returned, or whether they must be cloned. + */ + protected def noChangeToSymbols(origSyms: List[Symbol]): Boolean = { + @tailrec def loop(syms: List[Symbol]): Boolean = syms match { + case Nil => true + case x :: xs => (x.info eq applyToSymbolInfo(x)) && loop(xs) + } + loop(origSyms) + } + + /** Map this function over given scope */ + def mapOver(scope: Scope): Scope = { + val elems = scope.toList + val elems1 = mapOver(elems) + if (elems1 eq elems) scope + else newScopeWith(elems1: _*) + } + + /** Map this function over given list of symbols */ + def mapOver(origSyms: List[Symbol]): List[Symbol] = { + // fast path in case nothing changes due to map + if (noChangeToSymbols(origSyms)) origSyms + // map is not the identity --> do cloning properly + else cloneSymbolsAndModify(origSyms, TypeMap.this) + } + + def mapOver(annot: AnnotationInfo): AnnotationInfo = { + val AnnotationInfo(atp, args, assocs) = annot + val atp1 = mapOver(atp) + val args1 = mapOverAnnotArgs(args) + // there is no need to rewrite assocs, as they are constants + + if ((args eq args1) && (atp eq atp1)) annot + else if (args1.isEmpty && args.nonEmpty) UnmappableAnnotation // some annotation arg was unmappable + else AnnotationInfo(atp1, args1, assocs) setPos annot.pos + } + + def mapOverAnnotations(annots: List[AnnotationInfo]): List[AnnotationInfo] = { + val annots1 = annots mapConserve mapOver + if (annots1 eq annots) annots + else annots1 filterNot (_ eq UnmappableAnnotation) + } + + /** Map over a set of annotation arguments. If any + * of the arguments cannot be mapped, then return Nil. */ + def mapOverAnnotArgs(args: List[Tree]): List[Tree] = { + val args1 = args mapConserve mapOver + if (args1 contains UnmappableTree) Nil + else args1 + } + + def mapOver(tree: Tree): Tree = + mapOver(tree, () => return UnmappableTree) + + /** Map a tree that is part of an annotation argument. + * If the tree cannot be mapped, then invoke giveup(). + * The default is to transform the tree with + * TypeMapTransformer. + */ + def mapOver(tree: Tree, giveup: ()=>Nothing): Tree = + (new TypeMapTransformer).transform(tree) + + /** This transformer leaves the tree alone except to remap + * its types. */ + class TypeMapTransformer extends Transformer { + override def transform(tree: Tree) = { + val tree1 = super.transform(tree) + val tpe1 = TypeMap.this(tree1.tpe) + if ((tree eq tree1) && (tree.tpe eq tpe1)) + tree + else + tree1.shallowDuplicate.setType(tpe1) + } + } + } + + abstract class TypeTraverser extends TypeMap { + def traverse(tp: Type): Unit + def apply(tp: Type): Type = { traverse(tp); tp } + } + + abstract class TypeTraverserWithResult[T] extends TypeTraverser { + def result: T + def clear(): Unit + } + + abstract class TypeCollector[T](initial: T) extends TypeTraverser { + var result: T = _ + def collect(tp: Type) = { + result = initial + traverse(tp) + result + } + } + + /** The raw to existential map converts a ''raw type'' to an existential type. + * It is necessary because we might have read a raw type of a + * parameterized Java class from a class file. At the time we read the type + * the corresponding class file might still not be read, so we do not + * know what the type parameters of the type are. Therefore + * the conversion of raw types to existential types might not have taken place + * in ClassFileparser.sigToType (where it is usually done). + */ + def rawToExistential = new TypeMap { + private var expanded = immutable.Set[Symbol]() + def apply(tp: Type): Type = tp match { + case TypeRef(pre, sym, List()) if isRawIfWithoutArgs(sym) => + if (expanded contains sym) AnyRefClass.tpe + else try { + expanded += sym + val eparams = mapOver(typeParamsToExistentials(sym)) + existentialAbstraction(eparams, typeRef(apply(pre), sym, eparams map (_.tpe))) + } finally { + expanded -= sym + } + case _ => + mapOver(tp) + } + } + /*** + *@M: I think this is more desirable, but Martin prefers to leave raw-types as-is as much as possible + object rawToExistentialInJava extends TypeMap { + def apply(tp: Type): Type = tp match { + // any symbol that occurs in a java sig, not just java symbols + // see http://lampsvn.epfl.ch/trac/scala/ticket/2454#comment:14 + case TypeRef(pre, sym, List()) if !sym.typeParams.isEmpty => + val eparams = typeParamsToExistentials(sym, sym.typeParams) + existentialAbstraction(eparams, TypeRef(pre, sym, eparams map (_.tpe))) + case _ => + mapOver(tp) + } + } + */ + + /** Used by existentialAbstraction. + */ + class ExistentialExtrapolation(tparams: List[Symbol]) extends TypeMap(trackVariance = true) { + private val occurCount = mutable.HashMap[Symbol, Int]() + private def countOccs(tp: Type) = { + tp foreach { + case TypeRef(_, sym, _) => + if (tparams contains sym) + occurCount(sym) += 1 + case _ => () + } + } + def extrapolate(tpe: Type): Type = { + tparams foreach (t => occurCount(t) = 0) + countOccs(tpe) + for (tparam <- tparams) + countOccs(tparam.info) + + apply(tpe) + } + + /** If these conditions all hold: + * 1) we are in covariant (or contravariant) position + * 2) this type occurs exactly once in the existential scope + * 3) the widened upper (or lower) bound of this type contains no references to tparams + * Then we replace this lone occurrence of the type with the widened upper (or lower) bound. + * All other types pass through unchanged. + */ + def apply(tp: Type): Type = { + val tp1 = mapOver(tp) + if (variance.isInvariant) tp1 + else tp1 match { + case TypeRef(pre, sym, args) if tparams contains sym => + val repl = if (variance.isPositive) dropSingletonType(tp1.bounds.hi) else tp1.bounds.lo + val count = occurCount(sym) + val containsTypeParam = tparams exists (repl contains _) + def msg = { + val word = if (variance.isPositive) "upper" else "lower" + s"Widened lone occurrence of $tp1 inside existential to $word bound" + } + if (!repl.typeSymbol.isBottomClass && count == 1 && !containsTypeParam) + logResult(msg)(repl) + else + tp1 + case _ => + tp1 + } + } + override def mapOver(tp: Type): Type = tp match { + case SingleType(pre, sym) => + if (sym.isPackageClass) tp // short path + else { + val pre1 = this(pre) + if ((pre1 eq pre) || !pre1.isStable) tp + else singleType(pre1, sym) + } + case _ => super.mapOver(tp) + } + + // Do not discard the types of existential ident's. The + // symbol of the Ident itself cannot be listed in the + // existential's parameters, so the resulting existential + // type would be ill-formed. + override def mapOver(tree: Tree) = tree match { + case Ident(_) if tree.tpe.isStable => tree + case _ => super.mapOver(tree) + } + } + + /** Might the given symbol be important when calculating the prefix + * of a type? When tp.asSeenFrom(pre, clazz) is called on `tp`, + * the result will be `tp` unchanged if `pre` is trivial and `clazz` + * is a symbol such that isPossiblePrefix(clazz) == false. + */ + def isPossiblePrefix(clazz: Symbol) = clazz.isClass && !clazz.isPackageClass + + protected[internal] def skipPrefixOf(pre: Type, clazz: Symbol) = ( + (pre eq NoType) || (pre eq NoPrefix) || !isPossiblePrefix(clazz) + ) + + def newAsSeenFromMap(pre: Type, clazz: Symbol): AsSeenFromMap = + new AsSeenFromMap(pre, clazz) + + /** A map to compute the asSeenFrom method. + */ + class AsSeenFromMap(seenFromPrefix: Type, seenFromClass: Symbol) extends TypeMap with KeepOnlyTypeConstraints { + // Some example source constructs relevant in asSeenFrom: + // + // object CaptureThis { + // trait X[A] { def f: this.type = this } + // class Y[A] { def f: this.type = this } + // // Created new existential to represent This(CaptureThis.X) seen from CaptureThis.X[B]: type _1.type <: CaptureThis.X[B] with Singleton + // def f1[B] = new X[B] { } + // // TODO - why is the behavior different when it's a class? + // def f2[B] = new Y[B] { } + // } + // class CaptureVal[T] { + // val f: java.util.List[_ <: T] = null + // // Captured existential skolem for type _$1 seen from CaptureVal.this.f.type: type _$1 + // def g = f get 0 + // } + // class ClassParam[T] { + // // AsSeenFromMap(Inner.this.type, class Inner)/classParameterAsSeen(T)#loop(ClassParam.this.type, class ClassParam) + // class Inner(lhs: T) { def f = lhs } + // } + def capturedParams: List[Symbol] = _capturedParams + def capturedSkolems: List[Symbol] = _capturedSkolems + + def apply(tp: Type): Type = tp match { + case tp @ ThisType(_) => thisTypeAsSeen(tp) + case tp @ SingleType(_, sym) => if (sym.isPackageClass) tp else singleTypeAsSeen(tp) + case tp @ TypeRef(_, sym, _) if isTypeParamOfEnclosingClass(sym) => classParameterAsSeen(tp) + case _ => mapOver(tp) + } + + private var _capturedSkolems: List[Symbol] = Nil + private var _capturedParams: List[Symbol] = Nil + private val isStablePrefix = seenFromPrefix.isStable + + // isBaseClassOfEnclosingClassOrInfoIsNotYetComplete would be a more accurate + // but less succinct name. + private def isBaseClassOfEnclosingClass(base: Symbol) = { + def loop(encl: Symbol): Boolean = ( + isPossiblePrefix(encl) + && ((encl isSubClass base) || loop(encl.owner.enclClass)) + ) + // The hasCompleteInfo guard is necessary to avoid cycles during the typing + // of certain classes, notably ones defined inside package objects. + !base.hasCompleteInfo || loop(seenFromClass) + } + + /** Is the symbol a class type parameter from one of the enclosing + * classes, or a base class of one of them? + */ + private def isTypeParamOfEnclosingClass(sym: Symbol): Boolean = ( + sym.isTypeParameter + && sym.owner.isClass + && isBaseClassOfEnclosingClass(sym.owner) + ) + + /** Creates an existential representing a type parameter which appears + * in the prefix of a ThisType. + */ + protected def captureThis(pre: Type, clazz: Symbol): Type = { + capturedParams find (_.owner == clazz) match { + case Some(p) => p.tpe + case _ => + val qvar = clazz freshExistential nme.SINGLETON_SUFFIX setInfo singletonBounds(pre) + _capturedParams ::= qvar + debuglog(s"Captured This(${clazz.fullNameString}) seen from $seenFromPrefix: ${qvar.defString}") + qvar.tpe + } + } + protected def captureSkolems(skolems: List[Symbol]) { + for (p <- skolems; if !(capturedSkolems contains p)) { + debuglog(s"Captured $p seen from $seenFromPrefix") + _capturedSkolems ::= p + } + } + + /** Find the type argument in an applied type which corresponds to a type parameter. + * The arguments are required to be related as follows, through intermediary `clazz`. + * An exception will be thrown if this is violated. + * + * @param lhs its symbol is a type parameter of `clazz` + * @param rhs a type application constructed from `clazz` + */ + private def correspondingTypeArgument(lhs: Type, rhs: Type): Type = { + val TypeRef(_, lhsSym, lhsArgs) = lhs + val TypeRef(_, rhsSym, rhsArgs) = rhs + require(lhsSym.safeOwner == rhsSym, s"$lhsSym is not a type parameter of $rhsSym") + + // Find the type parameter position; we'll use the corresponding argument + val argIndex = rhsSym.typeParams indexOf lhsSym + + if (argIndex >= 0 && argIndex < rhsArgs.length) // @M! don't just replace the whole thing, might be followed by type application + appliedType(rhsArgs(argIndex), lhsArgs mapConserve this) + else if (rhsSym.tpe_*.parents exists typeIsErroneous) // don't be too zealous with the exceptions, see #2641 + ErrorType + else + abort(s"something is wrong: cannot make sense of type application\n $lhs\n $rhs") + } + + // 0) @pre: `classParam` is a class type parameter + // 1) Walk the owner chain of `seenFromClass` until we find the class which owns `classParam` + // 2) Take the base type of the prefix at that point with respect to the owning class + // 3) Solve for the type parameters through correspondence with the type args of the base type + // + // Only class type parameters (and not skolems) are considered, because other type parameters + // are not influenced by the prefix through which they are seen. Note that type params of + // anonymous type functions, which currently can only arise from normalising type aliases, are + // owned by the type alias of which they are the eta-expansion. + private def classParameterAsSeen(classParam: Type): Type = { + val TypeRef(_, tparam, _) = classParam + + def loop(pre: Type, clazz: Symbol): Type = { + // have to deconst because it may be a Class[T] + def nextBase = (pre baseType clazz).deconst + //@M! see test pos/tcpoly_return_overriding.scala why mapOver is necessary + if (skipPrefixOf(pre, clazz)) + mapOver(classParam) + else if (!matchesPrefixAndClass(pre, clazz)(tparam.owner)) + loop(nextBase.prefix, clazz.owner) + else nextBase match { + case applied @ TypeRef(_, _, _) => correspondingTypeArgument(classParam, applied) + case ExistentialType(eparams, qtpe) => captureSkolems(eparams) ; loop(qtpe, clazz) + case t => abort(s"$tparam in ${tparam.owner} cannot be instantiated from ${seenFromPrefix.widen}") + } + } + loop(seenFromPrefix, seenFromClass) + } + + // Does the candidate symbol match the given prefix and class? + // Since pre may be something like ThisType(A) where trait A { self: B => }, + // we have to test the typeSymbol of the widened type, not pre.typeSymbol, or + // B will not be considered. + private def matchesPrefixAndClass(pre: Type, clazz: Symbol)(candidate: Symbol) = pre.widen match { + case _: TypeVar => false + case wide => (clazz == candidate) && (wide.typeSymbol isSubClass clazz) + } + + // Whether the annotation tree currently being mapped over has had a This(_) node rewritten. + private[this] var wroteAnnotation = false + private object annotationArgRewriter extends TypeMapTransformer { + private def matchesThis(thiz: Symbol) = matchesPrefixAndClass(seenFromPrefix, seenFromClass)(thiz) + + // what symbol should really be used? + private def newThis(): Tree = { + wroteAnnotation = true + val presym = seenFromPrefix.widen.typeSymbol + val thisSym = presym.owner.newValue(presym.name.toTermName, presym.pos) setInfo seenFromPrefix + gen.mkAttributedQualifier(seenFromPrefix, thisSym) + } + + /** Rewrite `This` trees in annotation argument trees */ + override def transform(tree: Tree): Tree = super.transform(tree) match { + case This(_) if matchesThis(tree.symbol) => newThis() + case tree => tree + } + } + + // This becomes considerably cheaper if we optimize for the common cases: + // where the prefix is stable and where no This nodes are rewritten. If + // either is true, then we don't need to worry about calling giveup. So if + // the prefix is unstable, use a stack variable to indicate whether the tree + // was touched. This takes us to one allocation per AsSeenFromMap rather + // than an allocation on every call to mapOver, and no extra work when the + // tree only has its types remapped. + override def mapOver(tree: Tree, giveup: ()=>Nothing): Tree = { + if (isStablePrefix) + annotationArgRewriter transform tree + else { + val saved = wroteAnnotation + wroteAnnotation = false + try annotationArgRewriter transform tree + finally if (wroteAnnotation) giveup() else wroteAnnotation = saved + } + } + + private def thisTypeAsSeen(tp: ThisType): Type = { + def loop(pre: Type, clazz: Symbol): Type = { + val pre1 = pre match { + case SuperType(thistpe, _) => thistpe + case _ => pre + } + if (skipPrefixOf(pre, clazz)) + mapOver(tp) // TODO - is mapOver necessary here? + else if (!matchesPrefixAndClass(pre, clazz)(tp.sym)) + loop((pre baseType clazz).prefix, clazz.owner) + else if (pre1.isStable) + pre1 + else + captureThis(pre1, clazz) + } + loop(seenFromPrefix, seenFromClass) + } + + private def singleTypeAsSeen(tp: SingleType): Type = { + val SingleType(pre, sym) = tp + + val pre1 = this(pre) + if (pre1 eq pre) tp + else if (pre1.isStable) singleType(pre1, sym) + else pre1.memberType(sym).resultType //todo: this should be rolled into existential abstraction + } + + override def toString = s"AsSeenFromMap($seenFromPrefix, $seenFromClass)" + } + + /** A base class to compute all substitutions */ + abstract class SubstMap[T](from: List[Symbol], to: List[T]) extends TypeMap { + assert(sameLength(from, to), "Unsound substitution from "+ from +" to "+ to) + + /** Are `sym` and `sym1` the same? Can be tuned by subclasses. */ + protected def matches(sym: Symbol, sym1: Symbol): Boolean = sym eq sym1 + + /** Map target to type, can be tuned by subclasses */ + protected def toType(fromtp: Type, tp: T): Type + + protected def renameBoundSyms(tp: Type): Type = tp match { + case MethodType(ps, restp) => + createFromClonedSymbols(ps, restp)((ps1, tp1) => copyMethodType(tp, ps1, renameBoundSyms(tp1))) + case PolyType(bs, restp) => + createFromClonedSymbols(bs, restp)((ps1, tp1) => PolyType(ps1, renameBoundSyms(tp1))) + case ExistentialType(bs, restp) => + createFromClonedSymbols(bs, restp)(newExistentialType) + case _ => + tp + } + + @tailrec private def subst(tp: Type, sym: Symbol, from: List[Symbol], to: List[T]): Type = ( + if (from.isEmpty) tp + // else if (to.isEmpty) error("Unexpected substitution on '%s': from = %s but to == Nil".format(tp, from)) + else if (matches(from.head, sym)) toType(tp, to.head) + else subst(tp, sym, from.tail, to.tail) + ) + + def apply(tp0: Type): Type = if (from.isEmpty) tp0 else { + val boundSyms = tp0.boundSyms + val tp1 = if (boundSyms.nonEmpty && (boundSyms exists from.contains)) renameBoundSyms(tp0) else tp0 + val tp = mapOver(tp1) + def substFor(sym: Symbol) = subst(tp, sym, from, to) + + tp match { + // @M + // 1) arguments must also be substituted (even when the "head" of the + // applied type has already been substituted) + // example: (subst RBound[RT] from [type RT,type RBound] to + // [type RT&,type RBound&]) = RBound&[RT&] + // 2) avoid loops (which occur because alpha-conversion is + // not performed properly imo) + // e.g. if in class Iterable[a] there is a new Iterable[(a,b)], + // we must replace the a in Iterable[a] by (a,b) + // (must not recurse --> loops) + // 3) replacing m by List in m[Int] should yield List[Int], not just List + case TypeRef(NoPrefix, sym, args) => + val tcon = substFor(sym) + if ((tp eq tcon) || args.isEmpty) tcon + else appliedType(tcon.typeConstructor, args) + case SingleType(NoPrefix, sym) => + substFor(sym) + case _ => + tp + } + } + } + + /** A map to implement the `substSym` method. */ + class SubstSymMap(from: List[Symbol], to: List[Symbol]) extends SubstMap(from, to) { + def this(pairs: (Symbol, Symbol)*) = this(pairs.toList.map(_._1), pairs.toList.map(_._2)) + + protected def toType(fromtp: Type, sym: Symbol) = fromtp match { + case TypeRef(pre, _, args) => copyTypeRef(fromtp, pre, sym, args) + case SingleType(pre, _) => singleType(pre, sym) + } + @tailrec private def subst(sym: Symbol, from: List[Symbol], to: List[Symbol]): Symbol = ( + if (from.isEmpty) sym + // else if (to.isEmpty) error("Unexpected substitution on '%s': from = %s but to == Nil".format(sym, from)) + else if (matches(from.head, sym)) to.head + else subst(sym, from.tail, to.tail) + ) + private def substFor(sym: Symbol) = subst(sym, from, to) + + override def apply(tp: Type): Type = ( + if (from.isEmpty) tp + else tp match { + case TypeRef(pre, sym, args) if pre ne NoPrefix => + val newSym = substFor(sym) + // mapOver takes care of subst'ing in args + mapOver ( if (sym eq newSym) tp else copyTypeRef(tp, pre, newSym, args) ) + // assert(newSym.typeParams.length == sym.typeParams.length, "typars mismatch in SubstSymMap: "+(sym, sym.typeParams, newSym, newSym.typeParams)) + case SingleType(pre, sym) if pre ne NoPrefix => + val newSym = substFor(sym) + mapOver( if (sym eq newSym) tp else singleType(pre, newSym) ) + case _ => + super.apply(tp) + } + ) + + object mapTreeSymbols extends TypeMapTransformer { + val strictCopy = newStrictTreeCopier + + def termMapsTo(sym: Symbol) = from indexOf sym match { + case -1 => None + case idx => Some(to(idx)) + } + + // if tree.symbol is mapped to another symbol, passes the new symbol into the + // constructor `trans` and sets the symbol and the type on the resulting tree. + def transformIfMapped(tree: Tree)(trans: Symbol => Tree) = termMapsTo(tree.symbol) match { + case Some(toSym) => trans(toSym) setSymbol toSym setType tree.tpe + case None => tree + } + + // changes trees which refer to one of the mapped symbols. trees are copied before attributes are modified. + override def transform(tree: Tree) = { + // super.transform maps symbol references in the types of `tree`. it also copies trees where necessary. + super.transform(tree) match { + case id @ Ident(_) => + transformIfMapped(id)(toSym => + strictCopy.Ident(id, toSym.name)) + + case sel @ Select(qual, name) => + transformIfMapped(sel)(toSym => + strictCopy.Select(sel, qual, toSym.name)) + + case tree => tree + } + } + } + override def mapOver(tree: Tree, giveup: ()=>Nothing): Tree = { + mapTreeSymbols.transform(tree) + } + } + + /** A map to implement the `subst` method. */ + class SubstTypeMap(from: List[Symbol], to: List[Type]) + extends SubstMap(from, to) { + protected def toType(fromtp: Type, tp: Type) = tp + + override def mapOver(tree: Tree, giveup: () => Nothing): Tree = { + object trans extends TypeMapTransformer { + override def transform(tree: Tree) = tree match { + case Ident(name) => + from indexOf tree.symbol match { + case -1 => super.transform(tree) + case idx => + val totpe = to(idx) + if (totpe.isStable) tree.duplicate setType totpe + else giveup() + } + case _ => + super.transform(tree) + } + } + trans.transform(tree) + } + } + + /** A map to implement the `substThis` method. */ + class SubstThisMap(from: Symbol, to: Type) extends TypeMap { + def apply(tp: Type): Type = tp match { + case ThisType(sym) if (sym == from) => to + case _ => mapOver(tp) + } + } + + class SubstWildcardMap(from: List[Symbol]) extends TypeMap { + def apply(tp: Type): Type = try { + tp match { + case TypeRef(_, sym, _) if from contains sym => + BoundedWildcardType(sym.info.bounds) + case _ => + mapOver(tp) + } + } catch { + case ex: MalformedType => + WildcardType + } + } + + // dependent method types + object IsDependentCollector extends TypeCollector(false) { + def traverse(tp: Type) { + if (tp.isImmediatelyDependent) result = true + else if (!result) mapOver(tp) + } + } + + object ApproximateDependentMap extends TypeMap { + def apply(tp: Type): Type = + if (tp.isImmediatelyDependent) WildcardType + else mapOver(tp) + } + + /** Note: This map is needed even for non-dependent method types, despite what the name might imply. + */ + class InstantiateDependentMap(params: List[Symbol], actuals0: List[Type]) extends TypeMap with KeepOnlyTypeConstraints { + private val actuals = actuals0.toIndexedSeq + private val existentials = new Array[Symbol](actuals.size) + def existentialsNeeded: List[Symbol] = existentials.filter(_ ne null).toList + + private object StableArg { + def unapply(param: Symbol) = Arg unapply param map actuals filter (tp => + tp.isStable && (tp.typeSymbol != NothingClass) + ) + } + private object Arg { + def unapply(param: Symbol) = Some(params indexOf param) filter (_ >= 0) + } + + def apply(tp: Type): Type = mapOver(tp) match { + // unsound to replace args by unstable actual #3873 + case SingleType(NoPrefix, StableArg(arg)) => arg + // (soundly) expand type alias selections on implicit arguments, + // see depmet_implicit_oopsla* test cases -- typically, `param.isImplicit` + case tp1 @ TypeRef(SingleType(NoPrefix, Arg(pid)), sym, targs) => + val arg = actuals(pid) + val res = typeRef(arg, sym, targs) + if (res.typeSymbolDirect.isAliasType) res.dealias else tp1 + // don't return the original `tp`, which may be different from `tp1`, + // due to dropping annotations + case tp1 => tp1 + } + + /* Return the type symbol for referencing a parameter inside the existential quantifier. + * (Only needed if the actual is unstable.) + */ + private def existentialFor(pid: Int) = { + if (existentials(pid) eq null) { + val param = params(pid) + existentials(pid) = ( + param.owner.newExistential(param.name.toTypeName append nme.SINGLETON_SUFFIX, param.pos, param.flags) + setInfo singletonBounds(actuals(pid)) + ) + } + existentials(pid) + } + + //AM propagate more info to annotations -- this seems a bit ad-hoc... (based on code by spoon) + override def mapOver(arg: Tree, giveup: ()=>Nothing): Tree = { + // TODO: this should be simplified; in the stable case, one can + // probably just use an Ident to the tree.symbol. + // + // @PP: That leads to failure here, where stuff no longer has type + // 'String @Annot("stuff")' but 'String @Annot(x)'. + // + // def m(x: String): String @Annot(x) = x + // val stuff = m("stuff") + // + // (TODO cont.) Why an existential in the non-stable case? + // + // @PP: In the following: + // + // def m = { val x = "three" ; val y: String @Annot(x) = x; y } + // + // m is typed as 'String @Annot(x) forSome { val x: String }'. + // + // Both examples are from run/constrained-types.scala. + object treeTrans extends Transformer { + override def transform(tree: Tree): Tree = tree.symbol match { + case StableArg(actual) => + gen.mkAttributedQualifier(actual, tree.symbol) + case Arg(pid) => + val sym = existentialFor(pid) + Ident(sym) copyAttrs tree setType typeRef(NoPrefix, sym, Nil) + case _ => + super.transform(tree) + } + } + treeTrans transform arg + } + } + + /** A map to convert every occurrence of a wildcard type to a fresh + * type variable */ + object wildcardToTypeVarMap extends TypeMap { + def apply(tp: Type): Type = tp match { + case WildcardType => + TypeVar(tp, new TypeConstraint) + case BoundedWildcardType(bounds) => + TypeVar(tp, new TypeConstraint(bounds)) + case _ => + mapOver(tp) + } + } + + /** A map to convert every occurrence of a type variable to a wildcard type. */ + object typeVarToOriginMap extends TypeMap { + def apply(tp: Type): Type = tp match { + case TypeVar(origin, _) => origin + case _ => mapOver(tp) + } + } + + /** A map to implement the `contains` method. */ + class ContainsCollector(sym: Symbol) extends TypeCollector(false) { + def traverse(tp: Type) { + if (!result) { + tp.normalize match { + case TypeRef(_, sym1, _) if (sym == sym1) => result = true + case SingleType(_, sym1) if (sym == sym1) => result = true + case _ => mapOver(tp) + } + } + } + + override def mapOver(arg: Tree) = { + for (t <- arg) { + traverse(t.tpe) + if (t.symbol == sym) + result = true + } + arg + } + } + + /** A map to implement the `contains` method. */ + class ContainsTypeCollector(t: Type) extends TypeCollector(false) { + def traverse(tp: Type) { + if (!result) { + if (tp eq t) result = true + else mapOver(tp) + } + } + override def mapOver(arg: Tree) = { + for (t <- arg) + traverse(t.tpe) + + arg + } + } + + /** A map to implement the `filter` method. */ + class FilterTypeCollector(p: Type => Boolean) extends TypeCollector[List[Type]](Nil) { + override def collect(tp: Type) = super.collect(tp).reverse + + def traverse(tp: Type) { + if (p(tp)) result ::= tp + mapOver(tp) + } + } + + /** A map to implement the `collect` method. */ + class CollectTypeCollector[T](pf: PartialFunction[Type, T]) extends TypeCollector[List[T]](Nil) { + override def collect(tp: Type) = super.collect(tp).reverse + + def traverse(tp: Type) { + if (pf.isDefinedAt(tp)) result ::= pf(tp) + mapOver(tp) + } + } + + class ForEachTypeTraverser(f: Type => Unit) extends TypeTraverser { + def traverse(tp: Type) { + f(tp) + mapOver(tp) + } + } + + /** A map to implement the `filter` method. */ + class FindTypeCollector(p: Type => Boolean) extends TypeCollector[Option[Type]](None) { + def traverse(tp: Type) { + if (result.isEmpty) { + if (p(tp)) result = Some(tp) + mapOver(tp) + } + } + } + + /** A map to implement the `contains` method. */ + object ErroneousCollector extends TypeCollector(false) { + def traverse(tp: Type) { + if (!result) { + result = tp.isError + mapOver(tp) + } + } + } + + object adaptToNewRunMap extends TypeMap { + + private def adaptToNewRun(pre: Type, sym: Symbol): Symbol = { + if (phase.flatClasses || sym.isRootSymbol || (pre eq NoPrefix) || (pre eq NoType) || sym.isPackageClass) + sym + else if (sym.isModuleClass) { + val sourceModule1 = adaptToNewRun(pre, sym.sourceModule) + + sourceModule1.moduleClass orElse sourceModule1.initialize.moduleClass orElse { + val msg = "Cannot adapt module class; sym = %s, sourceModule = %s, sourceModule.moduleClass = %s => sourceModule1 = %s, sourceModule1.moduleClass = %s" + debuglog(msg.format(sym, sym.sourceModule, sym.sourceModule.moduleClass, sourceModule1, sourceModule1.moduleClass)) + sym + } + } + else { + var rebind0 = pre.findMember(sym.name, BRIDGE, 0, stableOnly = true) orElse { + if (sym.isAliasType) throw missingAliasException + devWarning(s"$pre.$sym no longer exist at phase $phase") + throw new MissingTypeControl // For build manager and presentation compiler purposes + } + /** The two symbols have the same fully qualified name */ + def corresponds(sym1: Symbol, sym2: Symbol): Boolean = + sym1.name == sym2.name && (sym1.isPackageClass || corresponds(sym1.owner, sym2.owner)) + if (!corresponds(sym.owner, rebind0.owner)) { + debuglog("ADAPT1 pre = "+pre+", sym = "+sym.fullLocationString+", rebind = "+rebind0.fullLocationString) + val bcs = pre.baseClasses.dropWhile(bc => !corresponds(bc, sym.owner)) + if (bcs.isEmpty) + assert(pre.typeSymbol.isRefinementClass, pre) // if pre is a refinementclass it might be a structural type => OK to leave it in. + else + rebind0 = pre.baseType(bcs.head).member(sym.name) + debuglog( + "ADAPT2 pre = " + pre + + ", bcs.head = " + bcs.head + + ", sym = " + sym.fullLocationString + + ", rebind = " + rebind0.fullLocationString + ) + } + rebind0.suchThat(sym => sym.isType || sym.isStable) orElse { + debuglog("" + phase + " " +phase.flatClasses+sym.owner+sym.name+" "+sym.isType) + throw new MalformedType(pre, sym.nameString) + } + } + } + def apply(tp: Type): Type = tp match { + case ThisType(sym) => + try { + val sym1 = adaptToNewRun(sym.owner.thisType, sym) + if (sym1 == sym) tp else ThisType(sym1) + } catch { + case ex: MissingTypeControl => + tp + } + case SingleType(pre, sym) => + if (sym.isPackage) tp + else { + val pre1 = this(pre) + try { + val sym1 = adaptToNewRun(pre1, sym) + if ((pre1 eq pre) && (sym1 eq sym)) tp + else singleType(pre1, sym1) + } catch { + case _: MissingTypeControl => + tp + } + } + case TypeRef(pre, sym, args) => + if (sym.isPackageClass) tp + else { + val pre1 = this(pre) + val args1 = args mapConserve (this) + try { + val sym1 = adaptToNewRun(pre1, sym) + if ((pre1 eq pre) && (sym1 eq sym) && (args1 eq args)/* && sym.isExternal*/) { + tp + } else if (sym1 == NoSymbol) { + devWarning(s"adapt to new run failed: pre=$pre pre1=$pre1 sym=$sym") + tp + } else { + copyTypeRef(tp, pre1, sym1, args1) + } + } catch { + case ex: MissingAliasControl => + apply(tp.dealias) + case _: MissingTypeControl => + tp + } + } + case MethodType(params, restp) => + val restp1 = this(restp) + if (restp1 eq restp) tp + else copyMethodType(tp, params, restp1) + case NullaryMethodType(restp) => + val restp1 = this(restp) + if (restp1 eq restp) tp + else NullaryMethodType(restp1) + case PolyType(tparams, restp) => + val restp1 = this(restp) + if (restp1 eq restp) tp + else PolyType(tparams, restp1) + + // Lukas: we need to check (together) whether we should also include parameter types + // of PolyType and MethodType in adaptToNewRun + + case ClassInfoType(parents, decls, clazz) => + if (clazz.isPackageClass) tp + else { + val parents1 = parents mapConserve (this) + if (parents1 eq parents) tp + else ClassInfoType(parents1, decls, clazz) + } + case RefinedType(parents, decls) => + val parents1 = parents mapConserve (this) + if (parents1 eq parents) tp + else refinedType(parents1, tp.typeSymbol.owner, decls, tp.typeSymbol.owner.pos) + case SuperType(_, _) => mapOver(tp) + case TypeBounds(_, _) => mapOver(tp) + case TypeVar(_, _) => mapOver(tp) + case AnnotatedType(_,_,_) => mapOver(tp) + case NotNullType(_) => mapOver(tp) + case ExistentialType(_, _) => mapOver(tp) + case _ => tp + } + } + +} diff --git a/src/reflect/scala/reflect/internal/tpe/TypeToStrings.scala b/src/reflect/scala/reflect/internal/tpe/TypeToStrings.scala new file mode 100644 index 0000000000..263b0f5a3e --- /dev/null +++ b/src/reflect/scala/reflect/internal/tpe/TypeToStrings.scala @@ -0,0 +1,29 @@ +package scala.reflect +package internal +package tpe + +private[internal] trait TypeToStrings { + self: SymbolTable => + + /** The maximum number of recursions allowed in toString + */ + final val maxTostringRecursions = 50 + + private var tostringRecursions = 0 + + protected def typeToString(tpe: Type): String = + if (tostringRecursions >= maxTostringRecursions) { + devWarning("Exceeded recursion depth attempting to print " + util.shortClassOfInstance(tpe)) + if (settings.debug.value) + (new Throwable).printStackTrace + + "..." + } + else + try { + tostringRecursions += 1 + tpe.safeToString + } finally { + tostringRecursions -= 1 + } +} -- cgit v1.2.3 From 3f0224c4de5b04f33e3de523c03d418b818af879 Mon Sep 17 00:00:00 2001 From: James Iry Date: Tue, 5 Mar 2013 20:08:46 -0800 Subject: Add option to disable optimization By default we run par test under -optimise. But occasionally we need to test optimizations in isolation. This commit adds a Ynooptimise flag that turns the optimize flags off back off after they've been turned on. A test is included to ensure that -Ynooptimise turns off optimizations and an existing test is modified to show that optimizations coming after -Ynooptimise in command line are enabled. --- .../scala/tools/nsc/settings/ScalaSettings.scala | 2 ++ test/files/jvm/nooptimise/Foo_1.flags | 1 + test/files/jvm/nooptimise/Foo_1.scala | 8 ++++++++ test/files/jvm/nooptimise/Test.scala | 23 ++++++++++++++++++++++ test/files/jvm/t7006/Foo_1.flags | 2 +- 5 files changed, 35 insertions(+), 1 deletion(-) create mode 100644 test/files/jvm/nooptimise/Foo_1.flags create mode 100644 test/files/jvm/nooptimise/Foo_1.scala create mode 100644 test/files/jvm/nooptimise/Test.scala (limited to 'src') diff --git a/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala b/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala index 2c9c20666d..702071f906 100644 --- a/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala +++ b/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala @@ -43,6 +43,7 @@ trait ScalaSettings extends AbsScalaSettings /** Internal use - syntax enhancements. */ private class EnableSettings[T <: BooleanSetting](val s: T) { def enabling(toEnable: List[BooleanSetting]): s.type = s withPostSetHook (_ => toEnable foreach (_.value = s.value)) + def disabling(toDisable: List[BooleanSetting]): s.type = s withPostSetHook (_ => toDisable foreach (_.value = !s.value)) def andThen(f: s.T => Unit): s.type = s withPostSetHook (setting => f(setting.value)) } private implicit def installEnableSettings[T <: BooleanSetting](s: T) = new EnableSettings(s) @@ -194,6 +195,7 @@ trait ScalaSettings extends AbsScalaSettings */ val future = BooleanSetting("-Xfuture", "Turn on future language features.") enabling futureSettings val optimise = BooleanSetting("-optimise", "Generates faster bytecode by applying optimisations to the program") withAbbreviation "-optimize" enabling optimiseSettings + val nooptimise = BooleanSetting("-Ynooptimise", "Clears all the flags set by -optimise. Useful for testing optimizations in isolation.") withAbbreviation "-Ynooptimize" disabling optimise::optimiseSettings val Xexperimental = BooleanSetting("-Xexperimental", "Enable experimental extensions.") enabling experimentalSettings // Feature extensions diff --git a/test/files/jvm/nooptimise/Foo_1.flags b/test/files/jvm/nooptimise/Foo_1.flags new file mode 100644 index 0000000000..9686c20775 --- /dev/null +++ b/test/files/jvm/nooptimise/Foo_1.flags @@ -0,0 +1 @@ +-optimise -Ynooptimise \ No newline at end of file diff --git a/test/files/jvm/nooptimise/Foo_1.scala b/test/files/jvm/nooptimise/Foo_1.scala new file mode 100644 index 0000000000..c6f1b06c8e --- /dev/null +++ b/test/files/jvm/nooptimise/Foo_1.scala @@ -0,0 +1,8 @@ +class Foo_1 { + def foo() { + // optimization will remove this magic 3 from appearing in the source + // so -Ynooptimize should prevent that + val x = 3 + + } +} diff --git a/test/files/jvm/nooptimise/Test.scala b/test/files/jvm/nooptimise/Test.scala new file mode 100644 index 0000000000..ec8daa6e16 --- /dev/null +++ b/test/files/jvm/nooptimise/Test.scala @@ -0,0 +1,23 @@ +import scala.tools.partest.BytecodeTest +import scala.tools.asm +import asm.tree.InsnList +import scala.collection.JavaConverters._ + +object Test extends BytecodeTest { + def show: Unit = { + val classNode = loadClassNode("Foo_1") + val methodNode = getMethod(classNode, "foo") + // if optimization didn't run then + // there should be some useless instructions + // with the magic constant 3 + val expected = 1 + val got = countMagicThrees(methodNode.instructions) + assert(got == expected, s"expected $expected but got $got magic threes") + } + + def countMagicThrees(insnList: InsnList): Int = { + def isMagicThree(node: asm.tree.AbstractInsnNode): Boolean = + (node.getOpcode == asm.Opcodes.ICONST_3) + insnList.iterator.asScala.count(isMagicThree) + } +} diff --git a/test/files/jvm/t7006/Foo_1.flags b/test/files/jvm/t7006/Foo_1.flags index 72fe7b1aa0..b723a661a7 100644 --- a/test/files/jvm/t7006/Foo_1.flags +++ b/test/files/jvm/t7006/Foo_1.flags @@ -1 +1 @@ - -Ydead-code -Ydebug -Xfatal-warnings +-Ynooptimise -Ydead-code -Ydebug -Xfatal-warnings -- cgit v1.2.3 From b50a0d811f0fb99ccbc295741e66bab348b3f99e Mon Sep 17 00:00:00 2001 From: James Iry Date: Wed, 27 Feb 2013 12:36:41 -0800 Subject: SI-7006 Prevent unreachable blocks in GenICode This commit makes GenICode prevent the generation of most unreachable blocks. The new unreachable block prevention code can be disabled with a compiler flag. Because full unreachable analysis is no longer necessary for normal code it makes the unreachable block analysis run only under -optimise. A test is included to make sure unreachable code doesn't cause issues in code gen. A concrete example will help. def foo(): X = { try return something() catch { case e: Throwable => println(e) throw e } unreachableCode() ] Here unreachableCode() is unreachable but GenICode would create ICode for it and then ASM would turn it into a pile of NOPS. A previous commit added a reachability analysis step to eliminate that unreachable code but that added a bit of time to the compilation process even when optimization was turned off. This commit avoids generating most unreachable ICode in the first place so that full reachability analysis is only needed after doing other optimization work. The new code works by extending a mechanism that was already in place. When GenICode encountered a THROW or RETURN it would put the current block into "ignore" mode so that no further instructions would be written into the block. However, that ignore mode flag was itself ignored when it came to figuring out if follow on blocks should be written. So this commit goes through places like try/catch and if/else and uses the ignore mode of the current block to decide whether to create follow on blocks, or if it already has, to kill by putting them into ignore mode and closing them where they'll be removed from the method's list of active blocks. It's not quite as good as full reachability analysis. In particular because a label def can be emitted before anything that jumps to it, this simple logic is forced to leave label defs alone and that means some of them may be unreachable without being removed. However, in practice it gets close the the benefit of reachability analysis at very nearly no cost. --- .../tools/nsc/backend/icode/BasicBlocks.scala | 32 ++++- .../scala/tools/nsc/backend/icode/GenICode.scala | 117 +++++++++++----- .../scala/tools/nsc/backend/jvm/GenASM.scala | 3 +- .../scala/tools/nsc/settings/ScalaSettings.scala | 1 + test/files/jvm/t7006/Foo_1.flags | 2 +- test/files/jvm/unreachable/Foo_1.flags | 1 + test/files/jvm/unreachable/Foo_1.scala | 110 +++++++++++++++ test/files/jvm/unreachable/Test.scala | 23 ++++ test/files/run/inline-ex-handlers.check | 152 +++++++++++---------- test/files/run/unreachable.scala | 125 +++++++++++++++++ 10 files changed, 451 insertions(+), 115 deletions(-) create mode 100644 test/files/jvm/unreachable/Foo_1.flags create mode 100644 test/files/jvm/unreachable/Foo_1.scala create mode 100644 test/files/jvm/unreachable/Test.scala create mode 100644 test/files/run/unreachable.scala (limited to 'src') diff --git a/src/compiler/scala/tools/nsc/backend/icode/BasicBlocks.scala b/src/compiler/scala/tools/nsc/backend/icode/BasicBlocks.scala index 917fe8b292..d772dcb6c4 100644 --- a/src/compiler/scala/tools/nsc/backend/icode/BasicBlocks.scala +++ b/src/compiler/scala/tools/nsc/backend/icode/BasicBlocks.scala @@ -17,7 +17,7 @@ trait BasicBlocks { self: ICodes => import opcodes._ - import global.{ settings, log, nme } + import global.{ settings, debuglog, log, nme } import nme.isExceptionResultName /** Override Array creation for efficiency (to not go through reflection). */ @@ -383,7 +383,6 @@ trait BasicBlocks { /** Close the block */ def close() { assert(!closed || ignore, this) - assert(instructionList.nonEmpty, "Empty block: " + this) if (ignore && closed) { // redundant `ignore &&` for clarity -- we should never be in state `!ignore && closed` // not doing anything to this block is important... // because the else branch reverses innocent blocks, which is wrong when they're in ignore mode (and closed) @@ -393,9 +392,38 @@ trait BasicBlocks { setFlag(DIRTYSUCCS) instructionList = instructionList.reverse instrs = instructionList.toArray + if (instructionList.isEmpty) { + debuglog(s"Removing empty block $this") + code removeBlock this + } } } + /** + * if cond is true, closes this block, entersIgnoreMode, and removes the block from + * its list of blocks. Used to allow a block to be started and then cancelled when it + * is discovered to be unreachable. + */ + def killIf(cond: Boolean) { + if (!settings.YdisableUnreachablePrevention.value && cond) { + debuglog(s"Killing block $this") + assert(instructionList.isEmpty, s"Killing a non empty block $this") + // only checked under debug because fetching predecessor list is moderately expensive + if (settings.debug.value) + assert(predecessors.isEmpty, s"Killing block $this which is referred to from ${predecessors.mkString}") + + close() + enterIgnoreMode() + } + } + + /** + * Same as killIf but with the logic of the condition reversed + */ + def killUnless(cond: Boolean) { + this killIf !cond + } + def open() { assert(closed, this) closed = false diff --git a/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala b/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala index ed458a4bbe..468e2cfd35 100644 --- a/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala +++ b/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala @@ -376,12 +376,14 @@ abstract class GenICode extends SubComponent { "I produce UNIT in a context where " + expectedType + " is expected!") // alternatives may be already closed by a tail-recursive jump + val contReachable = !(thenCtx.bb.ignore && elseCtx.bb.ignore) thenCtx.bb.closeWith(JUMP(contCtx.bb)) elseCtx.bb.closeWith( if (elsep == EmptyTree) JUMP(contCtx.bb) else JUMP(contCtx.bb) setPos tree.pos ) + contCtx.bb killUnless contReachable (contCtx, resKind) } private def genLoadTry(tree: Try, ctx: Context, setGeneratedType: TypeKind => Unit): Context = { @@ -477,7 +479,11 @@ abstract class GenICode extends SubComponent { val resCtx: Context = tree match { case LabelDef(name, params, rhs) => def genLoadLabelDef = { - val ctx1 = ctx.newBlock() + val ctx1 = ctx.newBlock() // note: we cannot kill ctx1 if ctx is in ignore mode because + // label defs can be the target of jumps from other locations. + // that means label defs can lead to unreachable code without + // proper reachability analysis + if (nme.isLoopHeaderLabel(name)) ctx1.bb.loopHeader = true @@ -560,6 +566,7 @@ abstract class GenICode extends SubComponent { // the list, otherwise infinite recursion happens for // finalizers that contain 'return' val fctx = finalizerCtx.newBlock() + fctx.bb killIf ctx1.bb.ignore ctx1.bb.closeWith(JUMP(fctx.bb)) ctx1 = genLoad(f1, fctx, UNIT) } @@ -949,6 +956,8 @@ abstract class GenICode extends SubComponent { debuglog("Generating SWITCH statement.") val ctx1 = genLoad(selector, ctx, INT) // TODO: Java 7 allows strings in switches (so, don't assume INT and don't convert the literals using intValue) val afterCtx = ctx1.newBlock() + afterCtx.bb killIf ctx1.bb.ignore + var afterCtxReachable = false var caseCtx: Context = null generatedType = toTypeKind(tree.tpe) @@ -959,6 +968,7 @@ abstract class GenICode extends SubComponent { for (caze @ CaseDef(pat, guard, body) <- cases) { assert(guard == EmptyTree, guard) val tmpCtx = ctx1.newBlock() + tmpCtx.bb killIf ctx1.bb.ignore pat match { case Literal(value) => tags = value.intValue :: tags @@ -980,12 +990,15 @@ abstract class GenICode extends SubComponent { } caseCtx = genLoad(body, tmpCtx, generatedType) + afterCtxReachable |= !caseCtx.bb.ignore // close the block unless it's already been closed by the body, which closes the block if it ends in a jump (which is emitted to have alternatives share their body) caseCtx.bb.closeWith(JUMP(afterCtx.bb) setPos caze.pos) } + afterCtxReachable |= (default == afterCtx) ctx1.bb.emitOnly( SWITCH(tags.reverse map (x => List(x)), (default :: targets).reverse) setPos tree.pos ) + afterCtx.bb killUnless afterCtxReachable afterCtx } genLoadMatch @@ -1342,9 +1355,9 @@ abstract class GenICode extends SubComponent { private def genCond(tree: Tree, ctx: Context, thenCtx: Context, - elseCtx: Context): Unit = - { - def genComparisonOp(l: Tree, r: Tree, code: Int) { + elseCtx: Context): Boolean = + { + def genComparisonOp(l: Tree, r: Tree, code: Int): Boolean = { val op: TestOp = code match { case scalaPrimitives.LT => LT case scalaPrimitives.LE => LE @@ -1360,27 +1373,33 @@ abstract class GenICode extends SubComponent { lazy val nonNullSide = ifOneIsNull(l, r) if (isReferenceEqualityOp(code) && nonNullSide != null) { val ctx1 = genLoad(nonNullSide, ctx, ObjectReference) + val branchesReachable = !ctx1.bb.ignore ctx1.bb.emitOnly( CZJUMP(thenCtx.bb, elseCtx.bb, op, ObjectReference) ) + branchesReachable } else { val kind = getMaxType(l.tpe :: r.tpe :: Nil) var ctx1 = genLoad(l, ctx, kind) ctx1 = genLoad(r, ctx1, kind) + val branchesReachable = !ctx1.bb.ignore ctx1.bb.emitOnly( CJUMP(thenCtx.bb, elseCtx.bb, op, kind) setPos r.pos ) + branchesReachable } } debuglog("Entering genCond with tree: " + tree) // the default emission - def default() = { + def default(): Boolean = { val ctx1 = genLoad(tree, ctx, BOOL) + val branchesReachable = !ctx1.bb.ignore ctx1.bb.closeWith(CZJUMP(thenCtx.bb, elseCtx.bb, NE, BOOL) setPos tree.pos) + branchesReachable } tree match { @@ -1392,11 +1411,12 @@ abstract class GenICode extends SubComponent { lazy val Select(lhs, _) = fun lazy val rhs = args.head - def genZandOrZor(and: Boolean) = { + def genZandOrZor(and: Boolean): Boolean = { val ctxInterm = ctx.newBlock() - if (and) genCond(lhs, ctx, ctxInterm, elseCtx) + val branchesReachable = if (and) genCond(lhs, ctx, ctxInterm, elseCtx) else genCond(lhs, ctx, thenCtx, ctxInterm) + ctxInterm.bb killUnless branchesReachable genCond(rhs, ctxInterm, thenCtx, elseCtx) } @@ -1436,7 +1456,7 @@ abstract class GenICode extends SubComponent { * @param thenCtx target context if the comparison yields true * @param elseCtx target context if the comparison yields false */ - def genEqEqPrimitive(l: Tree, r: Tree, ctx: Context)(thenCtx: Context, elseCtx: Context): Unit = { + def genEqEqPrimitive(l: Tree, r: Tree, ctx: Context)(thenCtx: Context, elseCtx: Context): Boolean = { def getTempLocal = ctx.method.lookupLocal(nme.EQEQ_LOCAL_VAR) getOrElse { ctx.makeLocal(l.pos, AnyRefClass.tpe, nme.EQEQ_LOCAL_VAR.toString) } @@ -1476,26 +1496,40 @@ abstract class GenICode extends SubComponent { val ctx1 = genLoad(l, ctx, ObjectReference) val ctx2 = genLoad(r, ctx1, ObjectReference) + val branchesReachable = !ctx2.bb.ignore ctx2.bb.emitOnly( CALL_METHOD(equalsMethod, if (settings.optimise.value) Dynamic else Static(onInstance = false)), CZJUMP(thenCtx.bb, elseCtx.bb, NE, BOOL) ) + branchesReachable } else { - if (isNull(l)) + if (isNull(l)) { // null == expr -> expr eq null - genLoad(r, ctx, ObjectReference).bb emitOnly CZJUMP(thenCtx.bb, elseCtx.bb, EQ, ObjectReference) - else if (isNull(r)) { + val ctx1 = genLoad(r, ctx, ObjectReference) + val branchesReachable = !ctx1.bb.ignore + ctx1.bb emitOnly CZJUMP(thenCtx.bb, elseCtx.bb, EQ, ObjectReference) + branchesReachable + } else if (isNull(r)) { // expr == null -> expr eq null - genLoad(l, ctx, ObjectReference).bb emitOnly CZJUMP(thenCtx.bb, elseCtx.bb, EQ, ObjectReference) + val ctx1 = genLoad(l, ctx, ObjectReference) + val branchesReachable = !ctx1.bb.ignore + ctx1.bb emitOnly CZJUMP(thenCtx.bb, elseCtx.bb, EQ, ObjectReference) + branchesReachable } else { val eqEqTempLocal = getTempLocal var ctx1 = genLoad(l, ctx, ObjectReference) - lazy val nonNullCtx = ctx1.newBlock() + val branchesReachable = !ctx1.bb.ignore + lazy val nonNullCtx = { + val block = ctx1.newBlock() + block.bb killUnless branchesReachable + block + } // l == r -> if (l eq null) r eq null else l.equals(r) ctx1 = genLoad(r, ctx1, ObjectReference) val nullCtx = ctx1.newBlock() + nullCtx.bb killUnless branchesReachable ctx1.bb.emitOnly( STORE_LOCAL(eqEqTempLocal) setPos l.pos, @@ -1512,6 +1546,7 @@ abstract class GenICode extends SubComponent { CALL_METHOD(Object_equals, Dynamic), CZJUMP(thenCtx.bb, elseCtx.bb, NE, BOOL) ) + branchesReachable } } } @@ -1957,6 +1992,7 @@ abstract class GenICode extends SubComponent { val outerCtx = this.dup // context for generating exception handlers, covered by the catch-all finalizer val finalizerCtx = this.dup // context for generating finalizer handler val normalExitCtx = outerCtx.newBlock() // context where flow will go on a "normal" (non-return, non-throw) exit from a try or catch handler + var normalExitReachable = false var tmp: Local = null val kind = toTypeKind(tree.tpe) val guardResult = kind != UNIT && mayCleanStack(finalizer) @@ -1971,6 +2007,7 @@ abstract class GenICode extends SubComponent { def emitFinalizer(ctx: Context): Context = if (!finalizer.isEmpty) { val ctx1 = finalizerCtx.dup.newBlock() + ctx1.bb killIf ctx.bb.ignore ctx.bb.closeWith(JUMP(ctx1.bb)) if (guardResult) { @@ -1986,32 +2023,38 @@ abstract class GenICode extends SubComponent { // Generate the catch-all exception handler that deals with uncaught exceptions coming // from the try or exception handlers. It catches the exception, runs the finally code, then rethrows // the exception - if (finalizer != EmptyTree) { - val exh = outerCtx.newExceptionHandler(NoSymbol, finalizer.pos) // finalizer covers exception handlers - this.addActiveHandler(exh) // .. and body aswell - val exhStartCtx = finalizerCtx.enterExceptionHandler(exh) - val exception = exhStartCtx.makeLocal(finalizer.pos, ThrowableClass.tpe, "exc") - loadException(exhStartCtx, exh, finalizer.pos) - exhStartCtx.bb.emit(STORE_LOCAL(exception)) - val exhEndCtx = genLoad(finalizer, exhStartCtx, UNIT) - exhEndCtx.bb.emit(LOAD_LOCAL(exception)) - exhEndCtx.bb.closeWith(THROW(ThrowableClass)) - exhEndCtx.bb.enterIgnoreMode() - finalizerCtx.endHandler() - } - - // Generate each exception handler - for ((sym, kind, handler) <- handlers) { - val exh = this.newExceptionHandler(sym, tree.pos) - val exhStartCtx = outerCtx.enterExceptionHandler(exh) - exhStartCtx.addFinalizer(finalizer, finalizerCtx) - loadException(exhStartCtx, exh, tree.pos) - val exhEndCtx = handler(exhStartCtx) - exhEndCtx.bb.closeWith(JUMP(normalExitCtx.bb)) - outerCtx.endHandler() + if (settings.YdisableUnreachablePrevention.value || !outerCtx.bb.ignore) { + if (finalizer != EmptyTree) { + val exh = outerCtx.newExceptionHandler(NoSymbol, finalizer.pos) // finalizer covers exception handlers + this.addActiveHandler(exh) // .. and body aswell + val exhStartCtx = finalizerCtx.enterExceptionHandler(exh) + exhStartCtx.bb killIf outerCtx.bb.ignore + val exception = exhStartCtx.makeLocal(finalizer.pos, ThrowableClass.tpe, "exc") + loadException(exhStartCtx, exh, finalizer.pos) + exhStartCtx.bb.emit(STORE_LOCAL(exception)) + val exhEndCtx = genLoad(finalizer, exhStartCtx, UNIT) + exhEndCtx.bb.emit(LOAD_LOCAL(exception)) + exhEndCtx.bb.closeWith(THROW(ThrowableClass)) + exhEndCtx.bb.enterIgnoreMode() + finalizerCtx.endHandler() + } + + // Generate each exception handler + for ((sym, kind, handler) <- handlers) { + val exh = this.newExceptionHandler(sym, tree.pos) + val exhStartCtx = outerCtx.enterExceptionHandler(exh) + exhStartCtx.bb killIf outerCtx.bb.ignore + exhStartCtx.addFinalizer(finalizer, finalizerCtx) + loadException(exhStartCtx, exh, tree.pos) + val exhEndCtx = handler(exhStartCtx) + normalExitReachable |= !exhEndCtx.bb.ignore + exhEndCtx.bb.closeWith(JUMP(normalExitCtx.bb)) + outerCtx.endHandler() + } } val bodyCtx = this.newBlock() + bodyCtx.bb killIf outerCtx.bb.ignore if (finalizer != EmptyTree) bodyCtx.addFinalizer(finalizer, finalizerCtx) @@ -2019,6 +2062,8 @@ abstract class GenICode extends SubComponent { outerCtx.bb.closeWith(JUMP(bodyCtx.bb)) + normalExitReachable |= !bodyEndCtx.bb.ignore + normalExitCtx.bb killUnless normalExitReachable bodyEndCtx.bb.closeWith(JUMP(normalExitCtx.bb)) emitFinalizer(normalExitCtx) diff --git a/src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala b/src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala index 703922b20a..1fb7b11b20 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala @@ -3298,7 +3298,8 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM { def normalize(m: IMethod) { if(!m.hasCode) { return } collapseJumpOnlyBlocks(m) - elimUnreachableBlocks(m) + if (settings.optimise.value) + elimUnreachableBlocks(m) icodes checkValid m } diff --git a/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala b/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala index 702071f906..2aee9bd4bc 100644 --- a/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala +++ b/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala @@ -173,6 +173,7 @@ trait ScalaSettings extends AbsScalaSettings val Yinvalidate = StringSetting ("-Yinvalidate", "classpath-entry", "Invalidate classpath entry before run", "") val noSelfCheck = BooleanSetting ("-Yno-self-type-checks", "Suppress check for self-type conformance among inherited members.") val YvirtClasses = false // too embryonic to even expose as a -Y //BooleanSetting ("-Yvirtual-classes", "Support virtual classes") + val YdisableUnreachablePrevention = BooleanSetting("-Ydisable-unreachable-prevention", "Disable the prevention of unreachable blocks in code generation.") val exposeEmptyPackage = BooleanSetting("-Yexpose-empty-package", "Internal only: expose the empty package.").internalOnly() diff --git a/test/files/jvm/t7006/Foo_1.flags b/test/files/jvm/t7006/Foo_1.flags index b723a661a7..37b2116413 100644 --- a/test/files/jvm/t7006/Foo_1.flags +++ b/test/files/jvm/t7006/Foo_1.flags @@ -1 +1 @@ --Ynooptimise -Ydead-code -Ydebug -Xfatal-warnings +-optimise -Ydebug -Xfatal-warnings diff --git a/test/files/jvm/unreachable/Foo_1.flags b/test/files/jvm/unreachable/Foo_1.flags new file mode 100644 index 0000000000..ce6e93b3da --- /dev/null +++ b/test/files/jvm/unreachable/Foo_1.flags @@ -0,0 +1 @@ +-Ynooptimise \ No newline at end of file diff --git a/test/files/jvm/unreachable/Foo_1.scala b/test/files/jvm/unreachable/Foo_1.scala new file mode 100644 index 0000000000..d17421c516 --- /dev/null +++ b/test/files/jvm/unreachable/Foo_1.scala @@ -0,0 +1,110 @@ +class Foo_1 { + def unreachableNormalExit: Int = { + return 42 + 0 + } + + def unreachableIf: Int = { + return 42 + if (util.Random.nextInt % 2 == 0) + 0 + else + 1 + } + + def unreachableIfBranches: Int = { + if (util.Random.nextInt % 2 == 0) + return 42 + else + return 42 + + return 0 + } + + def unreachableOneLegIf: Int = { + if (util.Random.nextInt % 2 == 0) + return 42 + + return 42 + } + + def unreachableLeftBranch: Int = { + val result = if (util.Random.nextInt % 2 == 0) + return 42 + else + 42 + + return result + } + + def unreachableRightBranch: Int = { + val result = if (util.Random.nextInt % 2 == 0) + 42 + else + return 42 + + return result + } + + def unreachableTryCatchFinally: Int = { + return 42 + try { + return 0 + } catch { + case x: Throwable => return 1 + } finally { + return 2 + } + return 3 + } + + def unreachableAfterTry: Int = { + try { + return 42 + } catch { + case x: Throwable => return 2 + } + return 3 + } + + def unreachableAfterCatch: Int = { + try { + error("haha") + } catch { + case x: Throwable => return 42 + } + return 3 + } + + def unreachableAfterFinally: Int = { + try { + return 1 + } catch { + case x: Throwable => return 2 + } finally { + return 42 + } + return 3 + } + + def unreachableSwitch: Int = { + return 42 + val x = util.Random.nextInt % 2 + x match { + case 0 => return 0 + case 1 => return 1 + case _ => error("wtf") + } + 2 + } + + def unreachableAfterSwitch: Int = { + val x = util.Random.nextInt % 2 + x match { + case 0 => return 42 + case 1 => return 41 + x + case _ => error("wtf") + } + 2 + } +} \ No newline at end of file diff --git a/test/files/jvm/unreachable/Test.scala b/test/files/jvm/unreachable/Test.scala new file mode 100644 index 0000000000..3f520eb106 --- /dev/null +++ b/test/files/jvm/unreachable/Test.scala @@ -0,0 +1,23 @@ +import scala.tools.partest.BytecodeTest +import scala.tools.asm +import asm.tree.InsnList +import scala.collection.JavaConverters._ + +object Test extends BytecodeTest { + def show: Unit = { + val classNode = loadClassNode("Foo_1") + // Foo_1 is full of unreachable code which if not elimintated + // will result in NOPs as can be confirmed by adding -Ydisable-unreachable-prevention + // to Foo_1.flags + for (methodNode <- classNode.methods.asScala) { + val got = count(methodNode.instructions, asm.Opcodes.NOP) + if (got != 0) println(s"Found $got NOP(s) in ${methodNode.name}") + } + } + + def count(insnList: InsnList, opcode: Int): Int = { + def isNop(node: asm.tree.AbstractInsnNode): Boolean = + (node.getOpcode == opcode) + insnList.iterator.asScala.count(isNop) + } +} \ No newline at end of file diff --git a/test/files/run/inline-ex-handlers.check b/test/files/run/inline-ex-handlers.check index 0a234e2659..abcc8bf42d 100644 --- a/test/files/run/inline-ex-handlers.check +++ b/test/files/run/inline-ex-handlers.check @@ -14,9 +14,9 @@ < < 2: 247c246 -< blocks: [1,2,3,4,5,6,7,8,10,11,12,13,14,15,16,17,18] +< blocks: [1,2,3,4,5,6,7,8,11,12,13,14,15,16,17,18] --- -> blocks: [1,2,3,4,5,6,8,10,11,12,13,14,15,16,17,18] +> blocks: [1,2,3,4,5,6,8,11,12,13,14,15,16,17,18] 258,260d256 < 92 JUMP 7 < @@ -57,19 +57,18 @@ > ? LOAD_LOCAL(value x5) > 106 CALL_METHOD MyException.message (dynamic) 519c518 -< blocks: [1,2,3,4,6,7,8,9,10] +< blocks: [1,2,3,4,6,7,9,10] --- -> blocks: [1,2,3,4,6,7,8,9,10,11,12,13] -548c547 +> blocks: [1,3,4,6,7,9,10,11,12,13] +548c547,552 < 306 THROW(MyException) --- > ? JUMP 11 -549a549,553 +> > 11: > ? LOAD_LOCAL(variable monitor4) > 305 MONITOR_EXIT > ? JUMP 12 -> 554c558 < ? THROW(Throwable) --- @@ -85,7 +84,13 @@ > 304 MONITOR_EXIT > ? STORE_LOCAL(value t) > ? JUMP 13 -575a587,598 +574c585 +< 310 JUMP 2 +--- +> 300 RETURN(UNIT) +576c587,596 +< 2: +--- > 13: > 310 LOAD_MODULE object Predef > 310 CALL_PRIMITIVE(StartConcat) @@ -96,37 +101,34 @@ > 310 CALL_PRIMITIVE(StringConcat(REF(class String))) > 310 CALL_PRIMITIVE(EndConcat) > 310 CALL_METHOD scala.Predef.println (dynamic) -> 310 JUMP 2 -> -584c607 -< catch (Throwable) in ArrayBuffer(7, 8, 9, 10) starting at: 6 +584c604 +< catch (Throwable) in ArrayBuffer(7, 9, 10) starting at: 6 --- -> catch (Throwable) in ArrayBuffer(7, 8, 9, 10, 11) starting at: 6 -587c610 -< catch (Throwable) in ArrayBuffer(4, 6, 7, 8, 9, 10) starting at: 3 +> catch (Throwable) in ArrayBuffer(7, 9, 10, 11) starting at: 6 +587c607 +< catch (Throwable) in ArrayBuffer(4, 6, 7, 9, 10) starting at: 3 --- -> catch (Throwable) in ArrayBuffer(4, 6, 7, 8, 9, 10, 11, 12) starting at: 3 -619c642 +> catch (Throwable) in ArrayBuffer(4, 6, 7, 9, 10, 11, 12) starting at: 3 +619c639 < blocks: [1,3,4,5,6,8,9] --- > blocks: [1,3,4,5,6,8,9,10,11] -643c666,667 +643c663,669 < 78 THROW(IllegalArgumentException) --- > ? STORE_LOCAL(value e) > ? JUMP 10 -644a669,673 +> > 10: > 81 LOAD_LOCAL(value e) > ? STORE_LOCAL(variable exc1) > ? JUMP 11 -> -669c698,699 +669c695,696 < 81 THROW(Exception) --- > ? STORE_LOCAL(variable exc1) > ? JUMP 11 -685a716,728 +685a713,725 > 11: > 83 LOAD_MODULE object Predef > 83 CONSTANT("finally") @@ -140,19 +142,19 @@ > 84 LOAD_LOCAL(variable exc1) > 84 THROW(Throwable) > -691c734 +691c731 < catch () in ArrayBuffer(4, 5, 6, 8) starting at: 3 --- > catch () in ArrayBuffer(4, 5, 6, 8, 10) starting at: 3 -715c758 +715c755 < locals: value args, variable result, value ex6, variable exc2, value x4, value x5, value message, value x, value ex6, value x4, value x5, value message, value x --- > locals: value args, variable result, value ex6, variable exc2, value x4, value x5, value x, value ex6, value x4, value x5, value x -717c760 +717c757 < blocks: [1,3,4,5,6,9,13,14,15,18,20,21,23,24] --- > blocks: [1,3,4,5,6,9,13,14,15,18,20,21,23,24,25,26,27] -741c784,791 +741c781,788 < 172 THROW(MyException) --- > ? STORE_LOCAL(value ex6) @@ -163,64 +165,64 @@ > 170 STORE_LOCAL(value x4) > 170 SCOPE_ENTER value x4 > 170 JUMP 14 -781,784d830 +781,784d827 < 175 LOAD_LOCAL(value x5) < 175 CALL_METHOD MyException.message (dynamic) < 175 STORE_LOCAL(value message) < 175 SCOPE_ENTER value message -786c832,833 +786c829,830 < 176 LOAD_LOCAL(value message) --- > ? LOAD_LOCAL(value x5) > 176 CALL_METHOD MyException.message (dynamic) -790c837,838 +790c834,835 < 177 LOAD_LOCAL(value message) --- > ? LOAD_LOCAL(value x5) > 177 CALL_METHOD MyException.message (dynamic) -792c840,841 +792c837,838 < 177 THROW(MyException) --- > ? STORE_LOCAL(value ex6) > ? JUMP 26 -796c845,846 +796c842,843 < 170 THROW(Throwable) --- > ? STORE_LOCAL(value ex6) > ? JUMP 26 -805a856,861 +805a853,858 > 26: > 169 LOAD_LOCAL(value ex6) > 169 STORE_LOCAL(value x4) > 169 SCOPE_ENTER value x4 > 169 JUMP 5 > -816,819d871 +816,819d868 < 180 LOAD_LOCAL(value x5) < 180 CALL_METHOD MyException.message (dynamic) < 180 STORE_LOCAL(value message) < 180 SCOPE_ENTER value message -821c873,874 +821c870,871 < 181 LOAD_LOCAL(value message) --- > ? LOAD_LOCAL(value x5) > 181 CALL_METHOD MyException.message (dynamic) -825c878,879 +825c875,876 < 182 LOAD_LOCAL(value message) --- > ? LOAD_LOCAL(value x5) > 182 CALL_METHOD MyException.message (dynamic) -827c881,882 +827c878,879 < 182 THROW(MyException) --- > ? STORE_LOCAL(variable exc2) > ? JUMP 27 -831c886,887 +831c883,884 < 169 THROW(Throwable) --- > ? STORE_LOCAL(variable exc2) > ? JUMP 27 -847a904,916 +847a901,913 > 27: > 184 LOAD_MODULE object Predef > 184 CONSTANT("finally") @@ -234,23 +236,23 @@ > 185 LOAD_LOCAL(variable exc2) > 185 THROW(Throwable) > -853c922 +853c919 < catch (Throwable) in ArrayBuffer(13, 14, 15, 18, 20, 21, 23) starting at: 4 --- > catch (Throwable) in ArrayBuffer(13, 14, 15, 18, 20, 21, 23, 25) starting at: 4 -856c925 +856c922 < catch () in ArrayBuffer(4, 5, 6, 9, 13, 14, 15, 18, 20, 21, 23) starting at: 3 --- > catch () in ArrayBuffer(4, 5, 6, 9, 13, 14, 15, 18, 20, 21, 23, 25, 26) starting at: 3 -880c949 +880c946 < locals: value args, variable result, value e, value ex6, value x4, value x5, value message, value x --- > locals: value args, variable result, value e, value ex6, value x4, value x5, value x -882c951 +882c948 < blocks: [1,2,3,6,7,8,11,13,14,16] --- > blocks: [1,2,3,6,7,8,11,13,14,16,17] -906c975,982 +906c972,979 < 124 THROW(MyException) --- > ? STORE_LOCAL(value ex6) @@ -261,29 +263,29 @@ > 122 STORE_LOCAL(value x4) > 122 SCOPE_ENTER value x4 > 122 JUMP 7 -931,934d1006 +931,934d1003 < 127 LOAD_LOCAL(value x5) < 127 CALL_METHOD MyException.message (dynamic) < 127 STORE_LOCAL(value message) < 127 SCOPE_ENTER value message -936c1008,1009 +936c1005,1006 < 127 LOAD_LOCAL(value message) --- > ? LOAD_LOCAL(value x5) > 127 CALL_METHOD MyException.message (dynamic) -965c1038 +965c1035 < catch (IllegalArgumentException) in ArrayBuffer(6, 7, 8, 11, 13, 14, 16) starting at: 3 --- > catch (IllegalArgumentException) in ArrayBuffer(6, 7, 8, 11, 13, 14, 16, 17) starting at: 3 -989c1062 +989c1059 < locals: value args, variable result, value ex6, value x4, value x5, value message, value x, value e --- > locals: value args, variable result, value ex6, value x4, value x5, value x, value e -991c1064 +991c1061 < blocks: [1,2,3,4,5,8,12,13,14,16] --- > blocks: [1,2,3,5,8,12,13,14,16,17] -1015c1088,1097 +1015c1085,1094 < 148 THROW(MyException) --- > ? STORE_LOCAL(value ex6) @@ -296,25 +298,25 @@ > 154 LOAD_LOCAL(value x4) > 154 IS_INSTANCE REF(class MyException) > 154 CZJUMP (BOOL)NE ? 5 : 8 -1036,1038d1117 +1036,1038d1114 < 145 JUMP 4 < < 4: -1048,1051d1126 +1048,1051d1123 < 154 LOAD_LOCAL(value x5) < 154 CALL_METHOD MyException.message (dynamic) < 154 STORE_LOCAL(value message) < 154 SCOPE_ENTER value message -1053c1128,1129 +1053c1125,1126 < 154 LOAD_LOCAL(value message) --- > ? LOAD_LOCAL(value x5) > 154 CALL_METHOD MyException.message (dynamic) -1270c1346 +1270c1343 < blocks: [1,2,3,4,5,7] --- > blocks: [1,2,3,4,5,7,8] -1294c1370,1377 +1294c1367,1374 < 38 THROW(IllegalArgumentException) --- > ? STORE_LOCAL(value e) @@ -325,20 +327,20 @@ > 42 CONSTANT("IllegalArgumentException") > 42 CALL_METHOD scala.Predef.println (dynamic) > 42 JUMP 2 -1341c1424 +1341c1421 < locals: value args, variable result, value ex6, value x4, value x5, value message, value x --- > locals: value args, variable result, value ex6, value x4, value x5, value x -1343c1426 +1343c1423 < blocks: [1,2,3,4,5,8,10,11,13,14,16] --- > blocks: [1,2,3,5,8,10,11,13,14,16,17] -1367c1450,1451 +1367c1447,1448 < 203 THROW(MyException) --- > ? STORE_LOCAL(value ex6) > ? JUMP 17 -1387c1471,1480 +1387c1468,1477 < 209 THROW(MyException) --- > ? STORE_LOCAL(value ex6) @@ -351,41 +353,41 @@ > 212 LOAD_LOCAL(value x4) > 212 IS_INSTANCE REF(class MyException) > 212 CZJUMP (BOOL)NE ? 5 : 8 -1400,1402d1492 +1400,1402d1489 < 200 JUMP 4 < < 4: -1412,1415d1501 +1412,1415d1498 < 212 LOAD_LOCAL(value x5) < 212 CALL_METHOD MyException.message (dynamic) < 212 STORE_LOCAL(value message) < 212 SCOPE_ENTER value message -1417c1503,1504 +1417c1500,1501 < 213 LOAD_LOCAL(value message) --- > ? LOAD_LOCAL(value x5) > 213 CALL_METHOD MyException.message (dynamic) -1461c1548 +1461c1545 < blocks: [1,2,3,4,5,7] --- > blocks: [1,2,3,4,5,7,8] -1485c1572,1573 +1485c1569,1570 < 58 THROW(IllegalArgumentException) --- > ? STORE_LOCAL(value e) > ? JUMP 8 -1486a1575,1580 +1486a1572,1577 > 8: > 62 LOAD_MODULE object Predef > 62 CONSTANT("RuntimeException") > 62 CALL_METHOD scala.Predef.println (dynamic) > 62 JUMP 2 > -1534c1628 -< blocks: [1,2,3,4] +1534c1625 +< blocks: [1,3,4] --- -> blocks: [1,2,3,4,5] -1554c1648,1653 +> blocks: [1,3,4,5] +1554c1645,1650 < 229 THROW(MyException) --- > ? JUMP 5 @@ -394,19 +396,19 @@ > ? LOAD_LOCAL(variable monitor1) > 228 MONITOR_EXIT > 228 THROW(Throwable) -1560c1659 +1560c1656 < ? THROW(Throwable) --- > 228 THROW(Throwable) -1588c1687 +1588c1684 < locals: value args, variable result, variable monitor2, variable monitorResult1 --- > locals: value exception$1, value args, variable result, variable monitor2, variable monitorResult1 -1590c1689 -< blocks: [1,2,3,4] +1590c1686 +< blocks: [1,3,4] --- -> blocks: [1,2,3,4,5] -1613c1712,1720 +> blocks: [1,3,4,5] +1613c1709,1717 < 245 THROW(MyException) --- > ? STORE_LOCAL(value exception$1) @@ -418,7 +420,7 @@ > ? LOAD_LOCAL(variable monitor2) > 244 MONITOR_EXIT > 244 THROW(Throwable) -1619c1726 +1619c1723 < ? THROW(Throwable) --- > 244 THROW(Throwable) diff --git a/test/files/run/unreachable.scala b/test/files/run/unreachable.scala new file mode 100644 index 0000000000..d3b9f3404f --- /dev/null +++ b/test/files/run/unreachable.scala @@ -0,0 +1,125 @@ +object Test extends App { + def unreachableNormalExit: Int = { + return 42 + 0 + } + + def unreachableIf: Int = { + return 42 + if (util.Random.nextInt % 2 == 0) + 0 + else + 1 + } + + def unreachableIfBranches: Int = { + if (util.Random.nextInt % 2 == 0) + return 42 + else + return 42 + + return 0 + } + + def unreachableOneLegIf: Int = { + if (util.Random.nextInt % 2 == 0) + return 42 + + return 42 + } + + def unreachableLeftBranch: Int = { + val result = if (util.Random.nextInt % 2 == 0) + return 42 + else + 42 + + return result + } + + def unreachableRightBranch: Int = { + val result = if (util.Random.nextInt % 2 == 0) + 42 + else + return 42 + + return result + } + + def unreachableTryCatchFinally: Int = { + return 42 + try { + return 0 + } catch { + case x: Throwable => return 1 + } finally { + return 2 + } + return 3 + } + + def unreachableAfterTry: Int = { + try { + return 42 + } catch { + case x: Throwable => return 2 + } + return 3 + } + + def unreachableAfterCatch: Int = { + try { + error("haha") + } catch { + case x: Throwable => return 42 + } + return 3 + } + + def unreachableAfterFinally: Int = { + try { + return 1 + } catch { + case x: Throwable => return 2 + } finally { + return 42 + } + return 3 + } + + def unreachableSwitch: Int = { + return 42 + val x = util.Random.nextInt % 2 + x match { + case 0 => return 0 + case 1 => return 1 + case _ => error("wtf") + } + 2 + } + + def unreachableAfterSwitch: Int = { + val x = util.Random.nextInt % 2 + x match { + case 0 => return 42 + case 1 => return 41 + x + case _ => error("wtf") + } + 2 + } + + def check(f: Int) = assert(f == 42, s"Expected 42 but got $f") + + check(unreachableNormalExit) + check(unreachableIf) + check(unreachableIfBranches) + check(unreachableOneLegIf) + check(unreachableLeftBranch) + check(unreachableRightBranch) + check(unreachableTryCatchFinally) + check(unreachableAfterTry) + check(unreachableAfterCatch) + check(unreachableAfterFinally) + check(unreachableSwitch) + check(unreachableAfterSwitch) +} \ No newline at end of file -- cgit v1.2.3 From 69109c0ace5e3ac831c3b0a5635f25317d3b28bf Mon Sep 17 00:00:00 2001 From: James Iry Date: Thu, 7 Mar 2013 15:05:35 -0800 Subject: Analyze constants to remove unnecessary branches This commit adds analysis and optimization of constants to remove unnecessary branches. It uses abstract interpretation to determine what constant(s) a particular stack slot or variable might or might not hold at a given spot and uses that knowledge to eliminate branches that cannot be taken. Its primary goal is null check removal, but it also works for other constants. Several tests are modified to include the new optimization phase. Two new tests are added. One verifies that branching still works as expected. The other verifies that branches are removed. --- src/compiler/scala/tools/nsc/Global.scala | 10 +- .../nsc/backend/opt/ConstantOptimization.scala | 639 +++++++++++++++++++++ .../scala/tools/nsc/settings/ScalaSettings.scala | 3 +- test/files/jvm/constant-optimization/Foo_1.flags | 1 + test/files/jvm/constant-optimization/Foo_1.scala | 9 + test/files/jvm/constant-optimization/Test.scala | 27 + test/files/neg/t6446-additional.check | 9 +- test/files/neg/t6446-missing.check | 7 +- test/files/neg/t6446-show-phases.check | 7 +- test/files/run/constant-optimization.check | 2 + test/files/run/constant-optimization.scala | 18 + test/files/run/programmatic-main.check | 7 +- 12 files changed, 724 insertions(+), 15 deletions(-) create mode 100644 src/compiler/scala/tools/nsc/backend/opt/ConstantOptimization.scala create mode 100644 test/files/jvm/constant-optimization/Foo_1.flags create mode 100644 test/files/jvm/constant-optimization/Foo_1.scala create mode 100644 test/files/jvm/constant-optimization/Test.scala create mode 100644 test/files/run/constant-optimization.check create mode 100644 test/files/run/constant-optimization.scala (limited to 'src') diff --git a/src/compiler/scala/tools/nsc/Global.scala b/src/compiler/scala/tools/nsc/Global.scala index 51fa8f0ab9..2156a39da6 100644 --- a/src/compiler/scala/tools/nsc/Global.scala +++ b/src/compiler/scala/tools/nsc/Global.scala @@ -25,7 +25,7 @@ import transform._ import backend.icode.{ ICodes, GenICode, ICodeCheckers } import backend.{ ScalaPrimitives, Platform, JavaPlatform } import backend.jvm.GenASM -import backend.opt.{ Inliners, InlineExceptionHandlers, ClosureElimination, DeadCodeElimination } +import backend.opt.{ Inliners, InlineExceptionHandlers, ConstantOptimization, ClosureElimination, DeadCodeElimination } import backend.icode.analysis._ import scala.language.postfixOps @@ -592,6 +592,13 @@ class Global(var currentSettings: Settings, var reporter: Reporter) val runsRightAfter = None } with ClosureElimination + // phaseName = "constopt" + object constantOptimization extends { + val global: Global.this.type = Global.this + val runsAfter = List("closelim") + val runsRightAfter = None + } with ConstantOptimization + // phaseName = "dce" object deadCode extends { val global: Global.this.type = Global.this @@ -676,6 +683,7 @@ class Global(var currentSettings: Settings, var reporter: Reporter) inliner -> "optimization: do inlining", inlineExceptionHandlers -> "optimization: inline exception handlers", closureElimination -> "optimization: eliminate uncalled closures", + constantOptimization -> "optimization: optimize null and other constants", deadCode -> "optimization: eliminate dead code", terminal -> "The last phase in the compiler chain" ) diff --git a/src/compiler/scala/tools/nsc/backend/opt/ConstantOptimization.scala b/src/compiler/scala/tools/nsc/backend/opt/ConstantOptimization.scala new file mode 100644 index 0000000000..b3da012e1a --- /dev/null +++ b/src/compiler/scala/tools/nsc/backend/opt/ConstantOptimization.scala @@ -0,0 +1,639 @@ +/* NSC -- new Scala compiler + * Copyright 2005-2013 LAMP/EPFL + * @author James Iry + */ + +package scala.tools.nsc +package backend.opt + +import scala.tools.nsc.backend.icode.analysis.LubException +import scala.annotation.tailrec + +/** + * ConstantOptimization uses abstract interpretation to approximate for + * each instruction what constants a variable or stack slot might hold + * or cannot hold. From this it will eliminate unreachable conditionals + * where only one branch is reachable, e.g. to eliminate unnecessary + * null checks. + * + * With some more work it could be extended to + * - cache stable values (final fields, modules) in locals + * - replace the copy propagation in ClosureElilmination + * - fold constants + * - eliminate unnecessary stores and loads + * - propagate knowledge gathered from conditionals for further optimization + */ +abstract class ConstantOptimization extends SubComponent { + import global._ + import icodes._ + import icodes.opcodes._ + + val phaseName = "constopt" + + /** Create a new phase */ + override def newPhase(p: Phase) = new ConstantOptimizationPhase(p) + + /** + * The constant optimization phase. + */ + class ConstantOptimizationPhase(prev: Phase) extends ICodePhase(prev) { + + def name = phaseName + + override def apply(c: IClass) { + if (settings.YconstOptimization.value) { + val analyzer = new ConstantOptimizer + analyzer optimizeClass c + } + } + } + + class ConstantOptimizer { + def optimizeClass(cls: IClass) { + log(s"Analyzing ${cls.methods.size} methods in $cls.") + cls.methods foreach { m => + optimizeMethod(m) + } + } + + def optimizeMethod(m: IMethod) { + if (m.hasCode) { + log(s"Analyzing ${m.symbol}") + val replacementInstructions = interpretMethod(m) + for (block <- m.blocks) { + if (replacementInstructions contains block) { + val instructions = replacementInstructions(block) + block.replaceInstruction(block.lastInstruction, instructions) + } + } + } + } + + /** + * A single possible (or impossible) datum that can be held in Contents + */ + private sealed abstract class Datum + /** + * A constant datum + */ + private case class Const(c: Constant) extends Datum { + def isIntAssignable = c.tag >= BooleanTag && c.tag <= IntTag + def toInt = c.tag match { + case BooleanTag => if (c.booleanValue) 1 else 0 + case _ => c.intValue + } + + /** + * True if this constant has the same representation (and therefore would compare true under eq) as another constant + */ + override def equals(other: Any) = (other match { + case oc @ Const(o) => (this eq oc) || (if (this.isIntAssignable && oc.isIntAssignable) this.toInt == oc.toInt else c.value == o.value) + case _ => false + }) + + /** + * Hash code based on representation of the constant, consistent with equals + */ + override def hashCode = if (c.isIntRange) c.intValue else c.hashCode + + } + /** + * A datum that has been Boxed via a BOX instruction + */ + private case class Boxed(c: Datum) extends Datum + + /** + * The knowledge we have about the abstract state of one location in terms + * of what constants it might or cannot hold. Forms a lower + * lattice where lower elements in the lattice indicate less knowledge. + * + * With the following partial ordering (where '>' indicates more precise knowledge) + * + * Possible(xs) > Possible(xs + y) + * Possible(xs) > Impossible(ys) + * Impossible(xs + y) > Impossible(xs) + * + * and the following merges, which indicate merging knowledge from two paths through + * the code, + * + * // left must be 1 or 2, right must be 2 or 3 then we must have a 1, 2 or 3 + * Possible(xs) merge Possible(ys) => Possible(xs union ys) + * + * // Left says can't be 2 or 3, right says can't be 3 or 4 + * // then it's not 3 (it could be 2 from the right or 4 from the left) + * Impossible(xs) merge Impossible(ys) => Impossible(xs intersect ys) + * + * // Left says it can't be 2 or 3, right says it must be 3 or 4, then + * // it can't be 2 (left rules out 4 and right says 3 is possible) + * Impossible(xs) merge Possible(ys) => Impossible(xs -- ys) + * + * Intuitively, Possible(empty) says that a location can't hold anything, + * it's uninitialized. However, Possible(empty) never appears in the code. + * + * Conversely, Impossible(empty) says nothing is impossible, it could be + * anything. Impossible(empty) is given a synonym UNKNOWN and is used + * for, e.g., the result of an arbitrary method call. + */ + private sealed abstract class Contents { + /** + * Join this Contents with another coming from another path. Join enforces + * the lattice structure. It is symmetrical and never moves upward in the + * lattice + */ + final def merge(other: Contents): Contents = if (this eq other) this else (this, other) match { + case (Possible(possible1), Possible(possible2)) => + Possible(possible1 union possible2) + case (Impossible(impossible1), Impossible(impossible2)) => + Impossible(impossible1 intersect impossible2) + case (Impossible(impossible), Possible(possible)) => + Impossible(impossible -- possible) + case (Possible(possible), Impossible(impossible)) => + Impossible(impossible -- possible) + } + // TODO we could have more fine-grained knowledge, e.g. know that 0 < x < 3. But for now equality/inequality is a good start. + def mightEqual(other: Contents): Boolean + def mightNotEqual(other: Contents): Boolean + } + private def SingleImpossible(x: Datum) = new Impossible(Set(x)) + + /** + * The location is known to have one of a set of values. + */ + private case class Possible(possible: Set[Datum]) extends Contents { + assert(possible.nonEmpty, "Contradiction: had an empty possible set indicating an uninitialized location") + def mightEqual(other: Contents): Boolean = (this eq other) || (other match { + // two Possibles might be equal if they have any possible members in common + case Possible(possible2) => (possible intersect possible2).nonEmpty + // a possible can be equal to an impossible if the impossible doesn't rule + // out all the possibilities + case Impossible(possible2) => (possible -- possible2).nonEmpty + }) + def mightNotEqual(other: Contents): Boolean = (this ne other) && (other match { + // two Possibles might not be equal if either has possible members that the other doesn't + case Possible(possible2) => (possible -- possible2).nonEmpty || (possible2 -- possible).nonEmpty + case Impossible(_) => true + }) + } + private def SinglePossible(x: Datum) = new Possible(Set(x)) + + /** + * The location is known to not have any of a set of values value (e.g null). + */ + private case class Impossible(impossible: Set[Datum]) extends Contents { + def mightEqual(other: Contents): Boolean = (this eq other) || (other match { + case Possible(_) => other mightEqual this + case _ => true + }) + def mightNotEqual(other: Contents): Boolean = (this eq other) || (other match { + case Possible(_) => other mightNotEqual this + case _ => true + }) + } + + /** + * Our entire knowledge about the contents of all variables and the stack. It forms + * a lattice primarily driven by the lattice structure of Contents. + * + * In addition to the rules of contents, State has the following properties: + * - The merge of two sets of locals holds the merges of locals found in the intersection + * of the two sets of locals. Locals not found in a + * locals map are thus possibly uninitialized and attempting to load them results + * in an error. + * - The stack heights of two states must match otherwise it's an error to merge them + * + * State is immutable in order to aid in structure sharing of local maps and stacks + */ + private case class State(locals: Map[Local, Contents], stack: List[Contents]) { + def mergeLocals(olocals: Map[Local, Contents]): Map[Local, Contents] = if (locals eq olocals) locals else Map((for { + key <- (locals.keySet intersect olocals.keySet).toSeq + } yield (key, locals(key) merge olocals(key))): _*) + + def merge(other: State): State = if (this eq other) this else { + @tailrec def mergeStacks(l: List[Contents], r: List[Contents], out: List[Contents]): List[Contents] = (l, r) match { + case (Nil, Nil) => out.reverse + case (l, r) if l eq r => out.reverse ++ l + case (lhead :: ltail, rhead :: rtail) => mergeStacks(ltail, rtail, (lhead merge rhead) :: out) + case _ => sys.error("Mismatched stack heights") + } + + val newLocals = mergeLocals(other.locals) + + val newStack = if (stack eq other.stack) stack else mergeStacks(stack, other.stack, Nil) + State(newLocals, newStack) + } + + /** + * Peek at the top of the stack without modifying it. Error if the stack is empty + */ + def peek(n: Int): Contents = stack(n) + /** + * Push contents onto a stack + */ + def push(contents: Contents): State = this copy (stack = contents :: stack) + /** + * Drop n elements from the stack + */ + def drop(number: Int): State = this copy (stack = stack drop number) + /** + * Store the top of the stack into the specified local. An error if the stack + * is empty + */ + def store(variable: Local): State = { + val contents = stack.head + val newVariables = locals + ((variable, contents)) + new State(newVariables, stack.tail) + } + /** + * Load the specified local onto the top of the stack. An error the the local is uninitialized. + */ + def load(variable: Local): State = { + val contents: Contents = locals.getOrElse(variable, sys.error(s"$variable is not initialized")) + push(contents) + } + /** + * A copy of this State with an empty stack + */ + def cleanStack: State = if (stack.isEmpty) this else this copy (stack = Nil) + } + + // some precomputed constants + private val NULL = Const(Constant(null: Any)) + private val UNKNOWN = Impossible(Set.empty) + private val NOT_NULL = SingleImpossible(NULL) + private val CONST_UNIT = SinglePossible(Const(Constant(()))) + private val CONST_FALSE = SinglePossible(Const(Constant(false))) + private val CONST_ZERO_BYTE = SinglePossible(Const(Constant(0: Byte))) + private val CONST_ZERO_SHORT = SinglePossible(Const(Constant(0: Short))) + private val CONST_ZERO_CHAR = SinglePossible(Const(Constant(0: Char))) + private val CONST_ZERO_INT = SinglePossible(Const(Constant(0: Int))) + private val CONST_ZERO_LONG = SinglePossible(Const(Constant(0: Long))) + private val CONST_ZERO_FLOAT = SinglePossible(Const(Constant(0.0f))) + private val CONST_ZERO_DOUBLE = SinglePossible(Const(Constant(0.0d))) + private val CONST_NULL = SinglePossible(NULL) + + /** + * Given a TypeKind, figure out what '0' for it means in order to interpret CZJUMP + */ + private def getZeroOf(k: TypeKind): Contents = k match { + case UNIT => CONST_UNIT + case BOOL => CONST_FALSE + case BYTE => CONST_ZERO_BYTE + case SHORT => CONST_ZERO_SHORT + case CHAR => CONST_ZERO_CHAR + case INT => CONST_ZERO_INT + case LONG => CONST_ZERO_LONG + case FLOAT => CONST_ZERO_FLOAT + case DOUBLE => CONST_ZERO_DOUBLE + case REFERENCE(_) => CONST_NULL + case ARRAY(_) => CONST_NULL + case BOXED(_) => CONST_NULL + case ConcatClass => abort("no zero of ConcatClass") + } + + // normal locals can't be null, so we use null to mean the magic 'this' local + private val THIS_LOCAL: Local = null + + /** + * interpret a single instruction to find its impact on the abstract state + */ + private def interpretInst(in: State, inst: Instruction): State = inst match { + case THIS(_) => + in load THIS_LOCAL + + case CONSTANT(k) => + in push SinglePossible(Const(k)) + + case LOAD_ARRAY_ITEM(_) => + in drop 2 push UNKNOWN + + case LOAD_LOCAL(local) => + // TODO if a local is known to hold a constant then we can replace this instruction with a push of that constant + in load local + + case LOAD_FIELD(_, isStatic) => + val drops = if (isStatic) 0 else 1 + in drop drops push UNKNOWN + + case LOAD_MODULE(_) => + in push NOT_NULL + + case STORE_ARRAY_ITEM(_) => + in drop 3 + + case STORE_LOCAL(local) => + in store local + + case STORE_THIS(_) => + // if a local is already known to have a constant and we're replacing with the same constant then we can + // replace this with a drop + in store THIS_LOCAL + + case STORE_FIELD(_, isStatic) => + val drops = if (isStatic) 1 else 2 + in drop drops + + case CALL_PRIMITIVE(_) => + in drop inst.consumed push UNKNOWN + + case CALL_METHOD(_, _) => + // TODO we could special case implementations of equals that are known, e.g. String#equals + // We could turn Possible(string constants).equals(Possible(string constants) into an eq check + // We could turn nonConstantString.equals(constantString) into constantString.equals(nonConstantString) + // and eliminate the null check that likely precedes this call + val initial = in drop inst.consumed + (0 until inst.produced).foldLeft(initial) { case (know, _) => know push UNKNOWN } + + case BOX(_) => + val value = in peek 0 + // we simulate boxing by, um, boxing the possible/impossible contents + // so if we have Possible(1,2) originally then we'll end up with + // a Possible(Boxed(1), Boxed(2)) + // Similarly, if we know the input is not a 0 then we'll know the + // output is not a Boxed(0) + val newValue = value match { + case Possible(values) => Possible(values map Boxed) + case Impossible(values) => Impossible(values map Boxed) + } + in drop 1 push newValue + + case UNBOX(_) => + val value = in peek 0 + val newValue = value match { + // if we have a Possible, then all the possibilities + // should themselves be Boxes. In that + // case we can merge them to figure out what the UNBOX will produce + case Possible(inners) => + assert(inners.nonEmpty, "Empty possible set indicating an uninitialized location") + val sanitized: Set[Contents] = (inners map { + case Boxed(content) => SinglePossible(content) + case _ => UNKNOWN + }) + sanitized reduce (_ merge _) + // if we have an impossible then the thing that's impossible + // should be a box. We'll unbox that to see what we get + case unknown@Impossible(inners) => + if (inners.isEmpty) { + unknown + } else { + val sanitized: Set[Contents] = (inners map { + case Boxed(content) => SingleImpossible(content) + case _ => UNKNOWN + }) + sanitized reduce (_ merge _) + } + } + in drop 1 push newValue + + case NEW(_) => + in push NOT_NULL + + case CREATE_ARRAY(_, dims) => + in drop dims push NOT_NULL + + case IS_INSTANCE(_) => + // TODO IS_INSTANCE is going to be followed by a C(Z)JUMP + // and if IS_INSTANCE/C(Z)JUMP the branch for "true" can + // know that whatever was checked was not a null + // see the TODO on CJUMP for more information about propagating null + // information + // TODO if the top of stack is guaranteed null then we can eliminate this IS_INSTANCE check and + // replace with a constant false, but how often is a knowable null checked for instanceof? + // TODO we could track type information and statically know to eliminate IS_INSTANCE + // but that's probably not a huge win + in drop 1 push UNKNOWN // it's actually a Possible(true, false) but since the following instruction + // will be a conditional jump comparing to true or false there + // nothing to be gained by being more precise + + case CHECK_CAST(_) => + // TODO we could track type information and statically know to eliminate CHECK_CAST + // but that's probably not a huge win + in + + case DROP(_) => + in drop 1 + + case DUP(_) => + val value = in peek 0 + in push value + + case MONITOR_ENTER() => + in drop 1 + + case MONITOR_EXIT() => + in drop 1 + + case SCOPE_ENTER(_) | SCOPE_EXIT(_) => + in + + case LOAD_EXCEPTION(_) => + in push NOT_NULL + + case JUMP(_) | CJUMP(_, _, _, _) | CZJUMP(_, _, _, _) | RETURN(_) | THROW(_) | SWITCH(_, _) => + dumpClassesAndAbort("Unexpected block ending instruction: " + inst) + } + + /** + * interpret the last instruction of a block which will be jump, a conditional branch, a throw, or a return. + * It will result in a map from target blocks to the input state computed for that block. It + * also computes a replacement list of instructions + */ + private def interpretLast(in: State, inst: Instruction): (Map[BasicBlock, State], List[Instruction]) = { + def canSwitch(in1: Contents, tagSet: List[Int]) = { + in1 mightEqual Possible(tagSet.toSet map { tag: Int => Const(Constant(tag)) }) + } + + /** + * common code for interpreting CJUMP and CZJUMP + */ + def interpretConditional(kind: TypeKind, in: State, toDrop: Int, val1: Contents, val2: Contents, success: BasicBlock, failure: BasicBlock, cond: TestOp): (Map[BasicBlock, State], List[Instruction]) = { + // TODO use reaching analysis to update the state in the two branches + // e.g. if the comparison was checking null equality on local x + // then the in the success branch we know x is null and + // on the failure branch we know it is not + // in fact, with copy propagation we could propagate that knowledge + // back through a chain of locations + // + // TODO if we do all that we need to be careful in the + // case that success and failure are the same target block + // because we're using a Map and don't want one possible state to clobber the other + // alternative mayb we should just replace the conditional with a jump if both targets are the same + + def mightEqual = val1 mightEqual val2 + def mightNotEqual = val1 mightNotEqual val2 + def guaranteedEqual = mightEqual && !mightNotEqual + + def succPossible = cond match { + case EQ => mightEqual + case NE => mightNotEqual + case LT | GT => !guaranteedEqual // if the two are guaranteed to be equal then they can't be LT/GT + case LE | GE => true + } + + def failPossible = cond match { + case EQ => mightNotEqual + case NE => mightEqual + case LT | GT => true + case LE | GE => !guaranteedEqual // if the two are guaranteed to be equal then they must be LE/GE + } + + val out = in drop toDrop + + var result = Map[BasicBlock, State]() + if (succPossible) { + result += ((success, out)) + } + + if (failPossible) { + result += ((failure, out)) + } + + if (result.size == 1) (result, List.fill(toDrop)(DROP(kind)) :+ JUMP(result.keySet.head)) + else (result, inst :: Nil) + } + + inst match { + case JUMP(whereto) => + (Map((whereto, in)), inst :: Nil) + + case CJUMP(success, failure, cond, kind) => + val in1 = in peek 0 + val in2 = in peek 1 + interpretConditional(kind, in, 2, in1, in2, success, failure, cond) + + case CZJUMP(success, failure, cond, kind) => + val in1 = in peek 0 + val in2 = getZeroOf(kind) + interpretConditional(kind, in, 1, in1, in2, success, failure, cond) + + case SWITCH(tags, labels) => + val in1 = in peek 0 + val newStuff = tags zip labels filter { case (tagSet, _) => canSwitch(in1, tagSet) } + val (reachableTags, reachableNormalLabels) = (tags zip labels filter { case (tagSet, _) => canSwitch(in1, tagSet) }).unzip + val reachableLabels = if (labels.size > tags.size) { + // if we've got an extra label then it's the default + val defaultLabel = labels.last + // see if the default is reachable by seeing if the input might be out of the set + // of all tags + val allTags = Possible(tags.flatten.toSet map { tag: Int => Const(Constant(tag)) }) + if (in1 mightNotEqual allTags) { + reachableNormalLabels :+ defaultLabel + } else { + reachableNormalLabels + } + } else { + reachableNormalLabels + } + // TODO similar to the comment in interpretConditional, we should update our the State going into each + // branch based on which tag is being matched. Also, just like interpretConditional, if target blocks + // are the same we need to merge State rather than clobber + + // alternative, maybe we should simplify the SWITCH to not have same target labels + val newState = in drop 1 + val result = Map(reachableLabels map { label => (label, newState) }: _*) + if (reachableLabels.size == 1) (result, DROP(INT) :: JUMP(reachableLabels.head) :: Nil) + else (result, inst :: Nil) + + // these instructions don't have target blocks + // (exceptions are assumed to be reachable from all instructions) + case RETURN(_) | THROW(_) => + (Map.empty, inst :: Nil) + + case _ => + dumpClassesAndAbort("Unexpected non-block ending instruction: " + inst) + } + } + + /** + * Analyze a single block to find how it transforms an input state into a states for its successor blocks + * Also computes a list of instructions to be used to replace its last instruction + */ + private def interpretBlock(in: State, block: BasicBlock): (Map[BasicBlock, State], Map[BasicBlock, State], List[Instruction]) = { + debuglog(s"interpreting block $block") + // number of instructions excluding the last one + val normalCount = block.size - 1 + + var exceptionState = in.cleanStack + var normalExitState = in + var idx = 0 + while (idx < normalCount) { + val inst = block(idx) + normalExitState = interpretInst(normalExitState, inst) + if (normalExitState.locals ne exceptionState.locals) + exceptionState.copy(locals = exceptionState mergeLocals normalExitState.locals) + idx += 1 + } + + val pairs = block.exceptionSuccessors map { b => (b, exceptionState) } + val exceptionMap = Map(pairs: _*) + + val (normalExitMap, newInstructions) = interpretLast(normalExitState, block.lastInstruction) + + (normalExitMap, exceptionMap, newInstructions) + } + + /** + * Analyze a single method to find replacement instructions + */ + private def interpretMethod(m: IMethod): Map[BasicBlock, List[Instruction]] = { + import scala.collection.mutable.{ Set => MSet, Map => MMap } + + debuglog(s"interpreting method $m") + var iterations = 0 + + // initially we know that 'this' is not null and the params are initialized to some unknown value + val initThis: Iterator[(Local, Contents)] = if (m.isStatic) Iterator.empty else Iterator.single((THIS_LOCAL, NOT_NULL)) + val initOtherLocals: Iterator[(Local, Contents)] = m.params.iterator map { param => (param, UNKNOWN) } + val initialLocals: Map[Local, Contents] = Map((initThis ++ initOtherLocals).toSeq: _*) + val initialState = State(initialLocals, Nil) + + // worklist of basic blocks to process, initially the start block + val worklist = MSet(m.startBlock) + // worklist of exception basic blocks. They're kept in a separate set so they can be + // processed after normal flow basic blocks. That's because exception basic blocks + // are more likely to have multiple predecessors and queueing them for later + // increases the chances that they'll only need to be interpreted once + val exceptionlist = MSet[BasicBlock]() + // our current best guess at what the input state is for each block + // initially we only know about the start block + val inputState = MMap[BasicBlock, State]((m.startBlock, initialState)) + + // update the inputState map based on new information from interpreting a block + // When the input state of a block changes, add it back to the work list to be + // reinterpreted + def updateInputStates(outputStates: Map[BasicBlock, State], worklist: MSet[BasicBlock]) { + for ((block, newState) <- outputStates) { + val oldState = inputState get block + val updatedState = oldState map (x => x merge newState) getOrElse newState + if (oldState != Some(updatedState)) { + worklist add block + inputState(block) = updatedState + } + } + } + + // the instructions to be used as the last instructions on each block + val replacements = MMap[BasicBlock, List[Instruction]]() + + while (worklist.nonEmpty || exceptionlist.nonEmpty) { + if (worklist.isEmpty) { + // once the worklist is empty, start processing exception blocks + val block = exceptionlist.head + exceptionlist remove block + worklist add block + } else { + iterations += 1 + val block = worklist.head + worklist remove block + val (normalExitMap, exceptionMap, newInstructions) = interpretBlock(inputState(block), block) + + updateInputStates(normalExitMap, worklist) + updateInputStates(exceptionMap, exceptionlist) + replacements(block) = newInstructions + } + } + + debuglog(s"method $m with ${m.blocks.size} reached fixpoint in $iterations iterations") + replacements.toMap + } + } +} diff --git a/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala b/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala index 702071f906..757303e335 100644 --- a/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala +++ b/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala @@ -38,7 +38,7 @@ trait ScalaSettings extends AbsScalaSettings protected def futureSettings = List[BooleanSetting]() /** Enabled under -optimise. */ - protected def optimiseSettings = List[BooleanSetting](inline, inlineHandlers, Xcloselim, Xdce) + protected def optimiseSettings = List[BooleanSetting](inline, inlineHandlers, Xcloselim, Xdce, YconstOptimization) /** Internal use - syntax enhancements. */ private class EnableSettings[T <: BooleanSetting](val s: T) { @@ -128,6 +128,7 @@ trait ScalaSettings extends AbsScalaSettings val check = PhasesSetting ("-Ycheck", "Check the tree at the end of") val Yshow = PhasesSetting ("-Yshow", "(Requires -Xshow-class or -Xshow-object) Show after") val Xcloselim = BooleanSetting ("-Yclosure-elim", "Perform closure elimination.") + val YconstOptimization = BooleanSetting ("-Yconst-opt", "Perform optimization with constant values.") val Ycompacttrees = BooleanSetting ("-Ycompact-trees", "Use compact tree printer when displaying trees.") val noCompletion = BooleanSetting ("-Yno-completion", "Disable tab-completion in the REPL.") val Xdce = BooleanSetting ("-Ydead-code", "Perform dead code elimination.") diff --git a/test/files/jvm/constant-optimization/Foo_1.flags b/test/files/jvm/constant-optimization/Foo_1.flags new file mode 100644 index 0000000000..86f52af447 --- /dev/null +++ b/test/files/jvm/constant-optimization/Foo_1.flags @@ -0,0 +1 @@ +-Ynooptimise -Yconst-opt \ No newline at end of file diff --git a/test/files/jvm/constant-optimization/Foo_1.scala b/test/files/jvm/constant-optimization/Foo_1.scala new file mode 100644 index 0000000000..cb67ad4e90 --- /dev/null +++ b/test/files/jvm/constant-optimization/Foo_1.scala @@ -0,0 +1,9 @@ +class Foo_1 { + def foo() { + // constant optimization should eliminate all branches + val i = 1 + val x = if (i != 1) null else "good" + val y = if (x == null) "good" else x + "" + println(y) + } +} \ No newline at end of file diff --git a/test/files/jvm/constant-optimization/Test.scala b/test/files/jvm/constant-optimization/Test.scala new file mode 100644 index 0000000000..283aa6f47a --- /dev/null +++ b/test/files/jvm/constant-optimization/Test.scala @@ -0,0 +1,27 @@ + +import scala.tools.partest.BytecodeTest +import scala.tools.asm +import asm.tree.InsnList +import scala.collection.JavaConverters._ + +object Test extends BytecodeTest { + val comparisons = Set(asm.Opcodes.IF_ACMPEQ, asm.Opcodes.IF_ACMPNE, asm.Opcodes.IF_ICMPEQ, asm.Opcodes.IF_ICMPGE, asm.Opcodes.IF_ICMPGT, asm.Opcodes.IF_ICMPLE, + asm.Opcodes.IF_ICMPLT, asm.Opcodes.IF_ICMPNE, asm.Opcodes.IFEQ, asm.Opcodes.IFGE, asm.Opcodes.IFGT, asm.Opcodes.IFLE, asm.Opcodes.IFLT, + asm.Opcodes.IFNE, asm.Opcodes.IFNONNULL, asm.Opcodes.IFNULL) + + def show: Unit = { + val classNode = loadClassNode("Foo_1") + val methodNode = getMethod(classNode, "foo") + // after optimization there should be no comparisons left + val expected = 0 + + val got = countComparisons(methodNode.instructions) + assert(got == expected, s"expected $expected but got $got comparisons") + } + + def countComparisons(insnList: InsnList): Int = { + def isComparison(node: asm.tree.AbstractInsnNode): Boolean = + (comparisons contains node.getOpcode) + insnList.iterator.asScala count isComparison + } +} \ No newline at end of file diff --git a/test/files/neg/t6446-additional.check b/test/files/neg/t6446-additional.check index 53dd383941..24201c07c2 100755 --- a/test/files/neg/t6446-additional.check +++ b/test/files/neg/t6446-additional.check @@ -25,7 +25,8 @@ superaccessors 6 add super accessors in traits and nested classes inliner 23 optimization: do inlining inlinehandlers 24 optimization: inline exception handlers closelim 25 optimization: eliminate uncalled closures - dce 26 optimization: eliminate dead code - jvm 27 generate JVM bytecode - ploogin 28 A sample phase that does so many things it's kind of hard... - terminal 29 The last phase in the compiler chain + constopt 26 optimization: optimize null and other constants + dce 27 optimization: eliminate dead code + jvm 28 generate JVM bytecode + ploogin 29 A sample phase that does so many things it's kind of hard... + terminal 30 The last phase in the compiler chain diff --git a/test/files/neg/t6446-missing.check b/test/files/neg/t6446-missing.check index f976bf480e..6e5bdcf07c 100755 --- a/test/files/neg/t6446-missing.check +++ b/test/files/neg/t6446-missing.check @@ -26,6 +26,7 @@ superaccessors 6 add super accessors in traits and nested classes inliner 23 optimization: do inlining inlinehandlers 24 optimization: inline exception handlers closelim 25 optimization: eliminate uncalled closures - dce 26 optimization: eliminate dead code - jvm 27 generate JVM bytecode - terminal 28 The last phase in the compiler chain + constopt 26 optimization: optimize null and other constants + dce 27 optimization: eliminate dead code + jvm 28 generate JVM bytecode + terminal 29 The last phase in the compiler chain diff --git a/test/files/neg/t6446-show-phases.check b/test/files/neg/t6446-show-phases.check index 5bbe43990c..a1bf408506 100644 --- a/test/files/neg/t6446-show-phases.check +++ b/test/files/neg/t6446-show-phases.check @@ -25,6 +25,7 @@ superaccessors 6 add super accessors in traits and nested classes inliner 23 optimization: do inlining inlinehandlers 24 optimization: inline exception handlers closelim 25 optimization: eliminate uncalled closures - dce 26 optimization: eliminate dead code - jvm 27 generate JVM bytecode - terminal 28 The last phase in the compiler chain + constopt 26 optimization: optimize null and other constants + dce 27 optimization: eliminate dead code + jvm 28 generate JVM bytecode + terminal 29 The last phase in the compiler chain diff --git a/test/files/run/constant-optimization.check b/test/files/run/constant-optimization.check new file mode 100644 index 0000000000..090e53ac40 --- /dev/null +++ b/test/files/run/constant-optimization.check @@ -0,0 +1,2 @@ +testBothReachable: good +testOneReachable: good diff --git a/test/files/run/constant-optimization.scala b/test/files/run/constant-optimization.scala new file mode 100644 index 0000000000..86f981e13f --- /dev/null +++ b/test/files/run/constant-optimization.scala @@ -0,0 +1,18 @@ +object Test extends App { + def testBothReachable() { + val i = util.Random.nextInt + val x = if (i % 2 == 0) null else "good" + val y = if (x == null) "good" else x + "" + println(s"testBothReachable: $y") + } + + def testOneReachable() { + val i = 1 + val x = if (i != 1) null else "good" + val y = if (x == null) "good" else x + "" + println(s"testOneReachable: $y") + } + + testBothReachable() + testOneReachable() +} diff --git a/test/files/run/programmatic-main.check b/test/files/run/programmatic-main.check index d472c569d2..61b947214c 100644 --- a/test/files/run/programmatic-main.check +++ b/test/files/run/programmatic-main.check @@ -25,7 +25,8 @@ superaccessors 6 add super accessors in traits and nested classes inliner 23 optimization: do inlining inlinehandlers 24 optimization: inline exception handlers closelim 25 optimization: eliminate uncalled closures - dce 26 optimization: eliminate dead code - jvm 27 generate JVM bytecode - terminal 28 The last phase in the compiler chain + constopt 26 optimization: optimize null and other constants + dce 27 optimization: eliminate dead code + jvm 28 generate JVM bytecode + terminal 29 The last phase in the compiler chain -- cgit v1.2.3 From 04eac5c4362d7af74302e73272a1a7406968e0ba Mon Sep 17 00:00:00 2001 From: James Iry Date: Thu, 7 Mar 2013 16:40:25 -0800 Subject: SI-7006 Cleanup from code review Minor cleanup from review of https://github.com/scala/scala/pull/2185 * Changed several instances of |= to ||= for better clarity (and bonus savings!) * Documented the return of two methods that compute the reachability of follow-on blocks. --- .../scala/tools/nsc/backend/icode/GenICode.scala | 17 +++++++++++++---- 1 file changed, 13 insertions(+), 4 deletions(-) (limited to 'src') diff --git a/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala b/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala index 468e2cfd35..4f2d248672 100644 --- a/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala +++ b/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala @@ -990,11 +990,11 @@ abstract class GenICode extends SubComponent { } caseCtx = genLoad(body, tmpCtx, generatedType) - afterCtxReachable |= !caseCtx.bb.ignore + afterCtxReachable ||= !caseCtx.bb.ignore // close the block unless it's already been closed by the body, which closes the block if it ends in a jump (which is emitted to have alternatives share their body) caseCtx.bb.closeWith(JUMP(afterCtx.bb) setPos caze.pos) } - afterCtxReachable |= (default == afterCtx) + afterCtxReachable ||= (default == afterCtx) ctx1.bb.emitOnly( SWITCH(tags.reverse map (x => List(x)), (default :: targets).reverse) setPos tree.pos ) @@ -1357,6 +1357,14 @@ abstract class GenICode extends SubComponent { thenCtx: Context, elseCtx: Context): Boolean = { + /** + * Generate the de-sugared comparison mechanism that will underly an '==' + * + * @param l left-hand side of the '==' + * @param r right-hand side of the '==' + * @param code the comparison operator to use + * @return true if either branch can continue normally to a follow on block, false otherwise + */ def genComparisonOp(l: Tree, r: Tree, code: Int): Boolean = { val op: TestOp = code match { case scalaPrimitives.LT => LT @@ -1455,6 +1463,7 @@ abstract class GenICode extends SubComponent { * @param ctx current context * @param thenCtx target context if the comparison yields true * @param elseCtx target context if the comparison yields false + * @return true if either branch can continue normally to a follow on block, false otherwise */ def genEqEqPrimitive(l: Tree, r: Tree, ctx: Context)(thenCtx: Context, elseCtx: Context): Boolean = { def getTempLocal = ctx.method.lookupLocal(nme.EQEQ_LOCAL_VAR) getOrElse { @@ -2047,7 +2056,7 @@ abstract class GenICode extends SubComponent { exhStartCtx.addFinalizer(finalizer, finalizerCtx) loadException(exhStartCtx, exh, tree.pos) val exhEndCtx = handler(exhStartCtx) - normalExitReachable |= !exhEndCtx.bb.ignore + normalExitReachable ||= !exhEndCtx.bb.ignore exhEndCtx.bb.closeWith(JUMP(normalExitCtx.bb)) outerCtx.endHandler() } @@ -2062,7 +2071,7 @@ abstract class GenICode extends SubComponent { outerCtx.bb.closeWith(JUMP(bodyCtx.bb)) - normalExitReachable |= !bodyEndCtx.bb.ignore + normalExitReachable ||= !bodyEndCtx.bb.ignore normalExitCtx.bb killUnless normalExitReachable bodyEndCtx.bb.closeWith(JUMP(normalExitCtx.bb)) -- cgit v1.2.3 From fd21898db304f45fa12178662c9f1e5b793d6830 Mon Sep 17 00:00:00 2001 From: James Iry Date: Fri, 8 Mar 2013 08:31:57 -0800 Subject: SI-7231 Fix assertion when adapting Null type to Array type GenICode was doing a sanity check when adapting an expression of type Null to something else. It was just doing the wrong one. Instead of checking whether the result expression type was a reference type it was checking to see if it was an class reference type. This commit fixes that and adds a test to make sure both forms of adaptation work as expected. --- src/compiler/scala/tools/nsc/backend/icode/GenICode.scala | 2 +- test/files/run/t7231.check | 2 ++ test/files/run/t7231.scala | 11 +++++++++++ 3 files changed, 14 insertions(+), 1 deletion(-) create mode 100644 test/files/run/t7231.check create mode 100644 test/files/run/t7231.scala (limited to 'src') diff --git a/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala b/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala index ed458a4bbe..d4fa01e2f4 100644 --- a/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala +++ b/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala @@ -1025,7 +1025,7 @@ abstract class GenICode extends SubComponent { // this value into a local of type Null and we want the JVM to see that it's // a null value so we don't have to also adapt local loads. if (from == NullReference && to != UNIT && to != ObjectReference && to != AnyRefReference) { - assert(to.isReferenceType, "Attempt to adapt a null to a non reference type $to.") + assert(to.isRefOrArrayType, s"Attempt to adapt a null to a non reference type $to.") // adapt by dropping what we've got and pushing a null which // will convince the JVM we really do have null ctx.bb.emit(DROP(from), pos) diff --git a/test/files/run/t7231.check b/test/files/run/t7231.check new file mode 100644 index 0000000000..c1e4b6c175 --- /dev/null +++ b/test/files/run/t7231.check @@ -0,0 +1,2 @@ +null +null diff --git a/test/files/run/t7231.scala b/test/files/run/t7231.scala new file mode 100644 index 0000000000..7d6bc81f3f --- /dev/null +++ b/test/files/run/t7231.scala @@ -0,0 +1,11 @@ +object Test extends App { + val bar: Null = null + + def foo(x: Array[Int]) = x + def baz(x: String) = x + + // first line was failing + println(foo(bar)) + // this line worked but good to have a double check + println(baz(bar)) +} \ No newline at end of file -- cgit v1.2.3 From 9094822181c398b945b7f30ac1e2b05da9796f53 Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Tue, 5 Mar 2013 10:09:34 -0800 Subject: Enabling commit for interactive/scaladoc modules. This is a non-behaviorally-changing setup commit which re-routes bits of code through avenues which can more easily be influenced by subclasses of Global. --- src/compiler/scala/tools/nsc/Global.scala | 12 ++++-- .../scala/tools/nsc/ast/parser/Parsers.scala | 12 +++--- .../scala/tools/nsc/ast/parser/Scanners.scala | 29 +++++++++------ .../tools/nsc/ast/parser/SyntaxAnalyzer.scala | 4 +- src/compiler/scala/tools/nsc/doc/DocParser.scala | 2 +- .../tools/nsc/interactive/CompilerControl.scala | 2 +- .../tools/nsc/interactive/RangePositions.scala | 1 + .../scala/tools/nsc/javac/JavaScanners.scala | 43 ++++++++-------------- .../scala/tools/nsc/symtab/SymbolLoaders.scala | 34 ++++++++--------- .../scala/tools/nsc/typechecker/Infer.scala | 2 +- .../scala/tools/nsc/typechecker/Namers.scala | 31 +++++++++------- .../scala/tools/nsc/typechecker/Typers.scala | 12 +++--- .../scala/tools/reflect/ToolBoxFactory.scala | 2 +- src/reflect/scala/reflect/internal/Symbols.scala | 22 +++++++---- 14 files changed, 111 insertions(+), 97 deletions(-) (limited to 'src') diff --git a/src/compiler/scala/tools/nsc/Global.scala b/src/compiler/scala/tools/nsc/Global.scala index 51fa8f0ab9..bc18b06e2a 100644 --- a/src/compiler/scala/tools/nsc/Global.scala +++ b/src/compiler/scala/tools/nsc/Global.scala @@ -425,12 +425,14 @@ class Global(var currentSettings: Settings, var reporter: Reporter) val printInfers = settings.Yinferdebug.value // phaseName = "parser" - object syntaxAnalyzer extends { + lazy val syntaxAnalyzer = new { val global: Global.this.type = Global.this val runsAfter = List[String]() val runsRightAfter = None } with SyntaxAnalyzer + import syntaxAnalyzer.{ UnitScanner, UnitParser } + // !!! I think we're overdue for all these phase objects being lazy vals. // There's no way for a Global subclass to provide a custom typer // despite the existence of a "def newTyper(context: Context): Typer" @@ -1120,9 +1122,11 @@ class Global(var currentSettings: Settings, var reporter: Reporter) warning("there were %d %s warning(s); re-run with %s for details".format(warnings.size, what, option.name)) } - def newUnitParser(code: String) = new syntaxAnalyzer.UnitParser(newCompilationUnit(code)) - def newCompilationUnit(code: String) = new CompilationUnit(newSourceFile(code)) - def newSourceFile(code: String) = new BatchSourceFile("", code) + def newCompilationUnit(code: String) = new CompilationUnit(newSourceFile(code)) + def newSourceFile(code: String) = new BatchSourceFile("", code) + def newUnitScanner(unit: CompilationUnit): UnitScanner = new UnitScanner(unit) + def newUnitParser(unit: CompilationUnit): UnitParser = new UnitParser(unit) + def newUnitParser(code: String): UnitParser = newUnitParser(newCompilationUnit(code)) /** A Run is a single execution of the compiler on a sets of units */ diff --git a/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala b/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala index 08a6adfded..522c45f9fa 100644 --- a/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala +++ b/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala @@ -142,9 +142,9 @@ self => if (source.isSelfContained) () => compilationUnit() else () => scriptBody() - def newScanner = new SourceFileScanner(source) + def newScanner(): Scanner = new SourceFileScanner(source) - val in = newScanner + val in = newScanner() in.init() private val globalFresh = new FreshNameCreator.Default @@ -196,10 +196,9 @@ self => } class UnitParser(val unit: global.CompilationUnit, patches: List[BracePatch]) extends SourceFileParser(unit.source) { + def this(unit: global.CompilationUnit) = this(unit, Nil) - def this(unit: global.CompilationUnit) = this(unit, List()) - - override def newScanner = new UnitScanner(unit, patches) + override def newScanner() = new UnitScanner(unit, patches) override def freshTermName(prefix: String): TermName = unit.freshTermName(prefix) override def freshTypeName(prefix: String): TypeName = unit.freshTypeName(prefix) @@ -219,6 +218,7 @@ self => try body finally smartParsing = saved } + def withPatches(patches: List[BracePatch]): UnitParser = new UnitParser(unit, patches) val syntaxErrors = new ListBuffer[(Int, String)] def showSyntaxErrors() = @@ -244,7 +244,7 @@ self => if (syntaxErrors.isEmpty) firstTry else in.healBraces() match { case Nil => showSyntaxErrors() ; firstTry - case patches => new UnitParser(unit, patches).parse() + case patches => (this withPatches patches).parse() } } } diff --git a/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala b/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala index 19cf1b5093..78041fda08 100644 --- a/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala +++ b/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala @@ -9,7 +9,7 @@ import scala.tools.nsc.util.CharArrayReader import scala.reflect.internal.util._ import scala.reflect.internal.Chars._ import Tokens._ -import scala.annotation.switch +import scala.annotation.{ switch, tailrec } import scala.collection.{ mutable, immutable } import mutable.{ ListBuffer, ArrayBuffer } import scala.xml.Utility.{ isNameStart } @@ -1233,7 +1233,7 @@ trait Scanners extends ScannersCommon { /** A scanner over a given compilation unit */ - class UnitScanner(unit: CompilationUnit, patches: List[BracePatch]) extends SourceFileScanner(unit.source) { + class UnitScanner(val unit: CompilationUnit, patches: List[BracePatch]) extends SourceFileScanner(unit.source) { def this(unit: CompilationUnit) = this(unit, List()) override def deprecationWarning(off: Offset, msg: String) = unit.deprecationWarning(unit.position(off), msg) @@ -1382,17 +1382,24 @@ trait Scanners extends ScannersCommon { bpbuf += current } } + def bracePairString(bp: BracePair, indent: Int): String = { + val rangeString = { + import bp._ + val lline = line(loff) + val rline = line(roff) + val tokens = List(lline, lindent, rline, rindent) map (n => if (n < 0) "??" else "" + n) + "%s:%s to %s:%s".format(tokens: _*) + } + val outer = (" " * indent) + rangeString + val inners = bp.nested map (bracePairString(_, indent + 2)) - def printBP(bp: BracePair, indent: Int) { - println(" "*indent+line(bp.loff)+":"+bp.lindent+" to "+line(bp.roff)+":"+bp.rindent) - if (bp.nested.nonEmpty) - for (bp1 <- bp.nested) { - printBP(bp1, indent + 2) - } + if (inners.isEmpty) outer + else inners.mkString(outer + "\n", "\n", "") } -// println("lineStart = "+lineStart)//DEBUG -// println("bracepairs = ") -// for (bp <- bpbuf.toList) printBP(bp, 0) + def bpString = bpbuf.toList map ("\n" + bracePairString(_, 0)) mkString "" + def startString = lineStart.mkString("line starts: [", ", ", "]") + + log(s"\n$startString\n$bpString") bpbuf.toList } diff --git a/src/compiler/scala/tools/nsc/ast/parser/SyntaxAnalyzer.scala b/src/compiler/scala/tools/nsc/ast/parser/SyntaxAnalyzer.scala index f1bf590ebf..7cf5a07291 100644 --- a/src/compiler/scala/tools/nsc/ast/parser/SyntaxAnalyzer.scala +++ b/src/compiler/scala/tools/nsc/ast/parser/SyntaxAnalyzer.scala @@ -28,8 +28,8 @@ abstract class SyntaxAnalyzer extends SubComponent with Parsers with MarkupParse if (unit.body == EmptyTree) { unit.body = if (unit.isJava) new JavaUnitParser(unit).parse() - else if (reporter.incompleteHandled) new UnitParser(unit).parse() - else new UnitParser(unit).smartParse() + else if (reporter.incompleteHandled) newUnitParser(unit).parse() + else newUnitParser(unit).smartParse() } if (settings.Yrangepos.value && !reporter.hasErrors) diff --git a/src/compiler/scala/tools/nsc/doc/DocParser.scala b/src/compiler/scala/tools/nsc/doc/DocParser.scala index b753e84426..6dc3e5a62b 100644 --- a/src/compiler/scala/tools/nsc/doc/DocParser.scala +++ b/src/compiler/scala/tools/nsc/doc/DocParser.scala @@ -42,7 +42,7 @@ class DocParser(settings: nsc.Settings, reporter: Reporter) extends Global(setti */ def docUnit(code: String) = { val unit = new CompilationUnit(new BatchSourceFile("", code)) - val scanner = new syntaxAnalyzer.UnitParser(unit) + val scanner = newUnitParser(unit) scanner.compilationUnit() } diff --git a/src/compiler/scala/tools/nsc/interactive/CompilerControl.scala b/src/compiler/scala/tools/nsc/interactive/CompilerControl.scala index 523e0d57b7..f84fa161c0 100644 --- a/src/compiler/scala/tools/nsc/interactive/CompilerControl.scala +++ b/src/compiler/scala/tools/nsc/interactive/CompilerControl.scala @@ -263,7 +263,7 @@ trait CompilerControl { self: Global => * compiler thread. */ def parseTree(source: SourceFile): Tree = { - new UnitParser(new CompilationUnit(source)).parse() + newUnitParser(new CompilationUnit(source)).parse() } /** Asks for a computation to be done quickly on the presentation compiler thread */ diff --git a/src/compiler/scala/tools/nsc/interactive/RangePositions.scala b/src/compiler/scala/tools/nsc/interactive/RangePositions.scala index 6288400629..c57e1da184 100644 --- a/src/compiler/scala/tools/nsc/interactive/RangePositions.scala +++ b/src/compiler/scala/tools/nsc/interactive/RangePositions.scala @@ -10,4 +10,5 @@ package interactive trait RangePositions extends scala.reflect.internal.Positions with ast.Trees with ast.Positions { self: scala.tools.nsc.Global => + override def useOffsetPositions = false } diff --git a/src/compiler/scala/tools/nsc/javac/JavaScanners.scala b/src/compiler/scala/tools/nsc/javac/JavaScanners.scala index 1d1469f87d..3813736535 100644 --- a/src/compiler/scala/tools/nsc/javac/JavaScanners.scala +++ b/src/compiler/scala/tools/nsc/javac/JavaScanners.scala @@ -10,7 +10,7 @@ import scala.tools.nsc.util.JavaCharArrayReader import scala.reflect.internal.util._ import scala.reflect.internal.Chars._ import JavaTokens._ -import scala.annotation.switch +import scala.annotation.{ switch, tailrec } import scala.language.implicitConversions // Todo merge these better with Scanners @@ -587,33 +587,20 @@ trait JavaScanners extends ast.parser.ScannersCommon { } } - private def skipComment(): Boolean = { - if (in.ch == '/') { - do { - in.next() - } while ((in.ch != CR) && (in.ch != LF) && (in.ch != SU)) - true - } else if (in.ch == '*') { - docBuffer = null - in.next() - val scalaDoc = ("/**", "*/") - if (in.ch == '*' && forScaladoc) - docBuffer = new StringBuilder(scalaDoc._1) - do { - do { - if (in.ch != '*' && in.ch != SU) { - in.next(); putDocChar(in.ch) - } - } while (in.ch != '*' && in.ch != SU) - while (in.ch == '*') { - in.next(); putDocChar(in.ch) - } - } while (in.ch != '/' && in.ch != SU) - if (in.ch == '/') in.next() - else incompleteInputError("unclosed comment") - true - } else { - false + protected def skipComment(): Boolean = { + @tailrec def skipLineComment(): Unit = in.ch match { + case CR | LF | SU => + case _ => in.next; skipLineComment() + } + @tailrec def skipJavaComment(): Unit = in.ch match { + case SU => incompleteInputError("unclosed comment") + case '*' => in.next; if (in.ch == '/') in.next else skipJavaComment() + case _ => in.next; skipJavaComment() + } + in.ch match { + case '/' => in.next ; skipLineComment() ; true + case '*' => in.next ; skipJavaComment() ; true + case _ => false } } diff --git a/src/compiler/scala/tools/nsc/symtab/SymbolLoaders.scala b/src/compiler/scala/tools/nsc/symtab/SymbolLoaders.scala index 5b5118a94f..ffccc11474 100644 --- a/src/compiler/scala/tools/nsc/symtab/SymbolLoaders.scala +++ b/src/compiler/scala/tools/nsc/symtab/SymbolLoaders.scala @@ -30,6 +30,18 @@ abstract class SymbolLoaders { member } + protected def signalError(root: Symbol, ex: Throwable) { + if (settings.debug.value) ex.printStackTrace() + // SI-5593 Scaladoc's current strategy is to visit all packages in search of user code that can be documented + // therefore, it will rummage through the classpath triggering errors whenever it encounters package objects + // that are not in their correct place (see bug for details) + if (!settings.isScaladoc) + globalError(ex.getMessage() match { + case null => "i/o error while loading " + root.name + case msg => "error while loading " + root.name + ", " + msg + }) + } + /** Enter class with given `name` into scope of `root` * and give them `completer` as type. */ @@ -168,18 +180,6 @@ abstract class SymbolLoaders { } override def complete(root: Symbol) { - def signalError(ex: Exception) { - ok = false - if (settings.debug.value) ex.printStackTrace() - val msg = ex.getMessage() - // SI-5593 Scaladoc's current strategy is to visit all packages in search of user code that can be documented - // therefore, it will rummage through the classpath triggering errors whenever it encounters package objects - // that are not in their correct place (see bug for details) - if (!settings.isScaladoc) - globalError( - if (msg eq null) "i/o error while loading " + root.name - else "error while loading " + root.name + ", " + msg) - } try { val start = currentTime val currentphase = phase @@ -189,11 +189,11 @@ abstract class SymbolLoaders { ok = true setSource(root) setSource(root.companionSymbol) // module -> class, class -> module - } catch { - case ex: IOException => - signalError(ex) - case ex: MissingRequirementError => - signalError(ex) + } + catch { + case ex @ (_: IOException | _: MissingRequirementError) => + ok = false + signalError(root, ex) } initRoot(root) if (!root.isPackageClass) initRoot(root.companionSymbol) diff --git a/src/compiler/scala/tools/nsc/typechecker/Infer.scala b/src/compiler/scala/tools/nsc/typechecker/Infer.scala index d4f402b747..9f16f65a6a 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Infer.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Infer.scala @@ -1694,7 +1694,7 @@ trait Infer extends Checkable { } else if (sym.isOverloaded) { val xs = sym.alternatives - val tparams = new AsSeenFromMap(pre, xs.head.owner) mapOver xs.head.typeParams + val tparams = newAsSeenFromMap(pre, xs.head.owner) mapOver xs.head.typeParams val bounds = tparams map (_.tpeHK) // see e.g., #1236 val tpe = PolyType(tparams, OverloadedType(AntiPolyType(pre, bounds), xs)) diff --git a/src/compiler/scala/tools/nsc/typechecker/Namers.scala b/src/compiler/scala/tools/nsc/typechecker/Namers.scala index a1bf3a56c3..777e96da82 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Namers.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Namers.scala @@ -47,11 +47,23 @@ trait Namers extends MethodSynthesis { private class NormalNamer(context: Context) extends Namer(context) def newNamer(context: Context): Namer = new NormalNamer(context) - def newNamerFor(context: Context, tree: Tree): Namer = - newNamer(context.makeNewScope(tree, tree.symbol)) + def newNamerFor(context: Context, tree: Tree): Namer = newNamer(context.makeNewScope(tree, tree.symbol)) abstract class Namer(val context: Context) extends MethodSynth with NamerContextErrors { thisNamer => + def saveDefaultGetter(meth: Symbol, default: Symbol) { + if (forInteractive) { + // save the default getters as attachments in the method symbol. if compiling the + // same local block several times (which can happen in interactive mode) we might + // otherwise not find the default symbol, because the second time it the method + // symbol will be re-entered in the scope but the default parameter will not. + meth.attachments.get[DefaultsOfLocalMethodAttachment] match { + case Some(att) => att.defaultGetters += default + case None => meth.updateAttachment(new DefaultsOfLocalMethodAttachment(default)) + } + } + } + import NamerErrorGen._ val typer = newTyper(context) @@ -1297,17 +1309,10 @@ trait Namers extends MethodSynthesis { if (!isConstr) methOwner.resetFlag(INTERFACE) // there's a concrete member now val default = parentNamer.enterSyntheticSym(defaultTree) - if (forInteractive && default.owner.isTerm) { - // save the default getters as attachments in the method symbol. if compiling the - // same local block several times (which can happen in interactive mode) we might - // otherwise not find the default symbol, because the second time it the method - // symbol will be re-entered in the scope but the default parameter will not. - meth.attachments.get[DefaultsOfLocalMethodAttachment] match { - case Some(att) => att.defaultGetters += default - case None => meth.updateAttachment(new DefaultsOfLocalMethodAttachment(default)) - } - } - } else if (baseHasDefault) { + if (default.owner.isTerm) + saveDefaultGetter(meth, default) + } + else if (baseHasDefault) { // the parameter does not have a default itself, but the // corresponding parameter in the base class does. sym.setFlag(DEFAULTPARAM) diff --git a/src/compiler/scala/tools/nsc/typechecker/Typers.scala b/src/compiler/scala/tools/nsc/typechecker/Typers.scala index 959c5a0eb8..1a3c20c4b9 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Typers.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Typers.scala @@ -86,8 +86,6 @@ trait Typers extends Adaptations with Tags { // that are turned private by typedBlock private final val SYNTHETIC_PRIVATE = TRANS_FLAG - private def isPastTyper = phase.id > currentRun.typerPhase.id - // To enable decent error messages when the typer crashes. // TODO - this only catches trees which go through def typed, // but there are all kinds of back ways - typedClassDef, etc. etc. @@ -98,13 +96,18 @@ trait Typers extends Adaptations with Tags { // - we may virtualize matches (if -Xexperimental and there's a suitable __match in scope) // - we synthesize PartialFunction implementations for `x => x match {...}` and `match {...}` when the expected type is PartialFunction // this is disabled by: interactive compilation (we run it for scaladoc due to SI-5933) - private def newPatternMatching = !forInteractive //&& !forScaladoc && (phase.id < currentRun.uncurryPhase.id) + protected def newPatternMatching = !forInteractive //&& !forScaladoc && (phase.id < currentRun.uncurryPhase.id) abstract class Typer(context0: Context) extends TyperDiagnostics with Adaptation with Tag with TyperContextErrors { import context0.unit import typeDebug.{ ptTree, ptBlock, ptLine } import TyperErrorGen._ + /** (Will be) overridden to false in scaladoc and/or interactive. */ + def canAdaptConstantTypeToLiteral = !forScaladoc && !forInteractive + def canTranslateEmptyListToNil = !forInteractive + def missingSelectErrorTree(tree: Tree, qual: Tree, name: Name): Tree = tree + def typedDocDef(docDef: DocDef, mode: Mode, pt: Type): Tree = typed(docDef.definition, mode, pt) @@ -3262,7 +3265,7 @@ trait Typers extends Adaptations with Tags { * forced during kind-arity checking, so it is guarded by additional * tests to ensure we're sufficiently far along. */ - if (args.isEmpty && !forInteractive && fun.symbol.isInitialized && ListModule.hasCompleteInfo && (fun.symbol == List_apply)) + if (args.isEmpty && canTranslateEmptyListToNil && fun.symbol.isInitialized && ListModule.hasCompleteInfo && (fun.symbol == List_apply)) atPos(tree.pos)(gen.mkNil setType restpe) else constfold(treeCopy.Apply(tree, fun, args1) setType ifPatternSkipFormals(restpe)) @@ -5255,7 +5258,6 @@ trait Typers extends Adaptations with Tags { case tree: ApplyDynamic => typedApplyDynamic(tree) case tree: ReferenceToBoxed => typedReferenceToBoxed(tree) case tree: TypeTreeWithDeferredRefCheck => tree // TODO: retype the wrapped tree? TTWDRC would have to change to hold the wrapped tree (not a closure) - case tree: Import => assert(forInteractive, "!forInteractive") ; tree setType tree.symbol.tpe // should not happen in normal circumstances. case _ => abort(s"unexpected tree: ${tree.getClass}\n$tree") } } diff --git a/src/compiler/scala/tools/reflect/ToolBoxFactory.scala b/src/compiler/scala/tools/reflect/ToolBoxFactory.scala index df9d907377..3bde280681 100644 --- a/src/compiler/scala/tools/reflect/ToolBoxFactory.scala +++ b/src/compiler/scala/tools/reflect/ToolBoxFactory.scala @@ -283,7 +283,7 @@ abstract class ToolBoxFactory[U <: JavaUniverse](val u: U) { factorySelf => val file = new BatchSourceFile("", wrappedCode) val unit = new CompilationUnit(file) phase = run.parserPhase - val parser = new syntaxAnalyzer.UnitParser(unit) + val parser = newUnitParser(unit) val wrappedTree = parser.parse() throwIfErrors() val PackageDef(_, List(ModuleDef(_, _, Template(_, _, _ :: parsed)))) = wrappedTree diff --git a/src/reflect/scala/reflect/internal/Symbols.scala b/src/reflect/scala/reflect/internal/Symbols.scala index 26ca62c44a..f7a87d2700 100644 --- a/src/reflect/scala/reflect/internal/Symbols.scala +++ b/src/reflect/scala/reflect/internal/Symbols.scala @@ -69,6 +69,20 @@ trait Symbols extends api.Symbols { self: SymbolTable => */ val originalOwner = perRunCaches.newMap[Symbol, Symbol]() + // TODO - don't allow the owner to be changed without checking invariants, at least + // when under some flag. Define per-phase invariants for owner/owned relationships, + // e.g. after flatten all classes are owned by package classes, there are lots and + // lots of these to be declared (or more realistically, discovered.) + protected def saveOriginalOwner(sym: Symbol) { + // don't keep the original owner in presentation compiler runs + // (the map will grow indefinitely, and the only use case is the + // backend). + if (!forInteractive) { + if (originalOwner contains sym) () + else originalOwner(sym) = sym.rawowner + } + } + abstract class SymbolContextApiImpl extends SymbolContextApi { this: Symbol => @@ -948,13 +962,7 @@ trait Symbols extends api.Symbols { self: SymbolTable => // e.g. after flatten all classes are owned by package classes, there are lots and // lots of these to be declared (or more realistically, discovered.) def owner_=(owner: Symbol) { - // don't keep the original owner in presentation compiler runs - // (the map will grow indefinitely, and the only use case is the - // backend). - if (!forInteractive) { - if (originalOwner contains this) () - else originalOwner(this) = rawowner - } + saveOriginalOwner(this) assert(isCompilerUniverse, "owner_= is not thread-safe; cannot be run in reflexive code") if (traceSymbolActivity) traceSymbols.recordNewSymbolOwner(this, owner) -- cgit v1.2.3 From c6ca941ccc017a8869f4def717cfeb640f965077 Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Wed, 6 Mar 2013 07:39:19 -0800 Subject: Moved scaladoc sources into separate directory. This change is not externally visible. It moves the scaladoc sources into src/scaladoc and adds an ant target for building them. The compilation products are still packaged into scala-compiler.jar as before, but with a small change to build.xml a separate jar can be created instead. --- build.xml | 52 +- src/compiler/scala/tools/ant/Scaladoc.scala | 695 --- src/compiler/scala/tools/nsc/ScalaDoc.scala | 72 - src/compiler/scala/tools/nsc/doc/DocFactory.scala | 132 - src/compiler/scala/tools/nsc/doc/DocParser.scala | 69 - src/compiler/scala/tools/nsc/doc/Index.scala | 17 - .../scala/tools/nsc/doc/ScaladocGlobal.scala | 106 - src/compiler/scala/tools/nsc/doc/Settings.scala | 368 -- .../scala/tools/nsc/doc/Uncompilable.scala | 51 - src/compiler/scala/tools/nsc/doc/Universe.scala | 16 - .../tools/nsc/doc/base/CommentFactoryBase.scala | 936 --- src/compiler/scala/tools/nsc/doc/base/LinkTo.scala | 15 - .../tools/nsc/doc/base/MemberLookupBase.scala | 206 - .../scala/tools/nsc/doc/base/comment/Body.scala | 89 - .../scala/tools/nsc/doc/base/comment/Comment.scala | 131 - .../scala/tools/nsc/doc/doclet/Generator.scala | 30 - .../scala/tools/nsc/doc/doclet/Indexer.scala | 21 - .../scala/tools/nsc/doc/doclet/Universer.scala | 21 - src/compiler/scala/tools/nsc/doc/html/Doclet.scala | 19 - .../scala/tools/nsc/doc/html/HtmlFactory.scala | 152 - .../scala/tools/nsc/doc/html/HtmlPage.scala | 224 - src/compiler/scala/tools/nsc/doc/html/Page.scala | 102 - .../scala/tools/nsc/doc/html/SyntaxHigh.scala | 286 - .../scala/tools/nsc/doc/html/page/Index.scala | 133 - .../tools/nsc/doc/html/page/IndexScript.scala | 69 - .../tools/nsc/doc/html/page/ReferenceIndex.scala | 58 - .../scala/tools/nsc/doc/html/page/Source.scala | 127 - .../scala/tools/nsc/doc/html/page/Template.scala | 967 --- .../doc/html/page/diagram/DiagramGenerator.scala | 53 - .../nsc/doc/html/page/diagram/DiagramStats.scala | 66 - .../html/page/diagram/DotDiagramGenerator.scala | 506 -- .../nsc/doc/html/page/diagram/DotRunner.scala | 225 - .../tools/nsc/doc/html/resource/lib/arrow-down.png | Bin 6232 -> 0 bytes .../nsc/doc/html/resource/lib/arrow-right.png | Bin 6220 -> 0 bytes .../tools/nsc/doc/html/resource/lib/class.png | Bin 3357 -> 0 bytes .../tools/nsc/doc/html/resource/lib/class_big.png | Bin 7516 -> 0 bytes .../nsc/doc/html/resource/lib/class_diagram.png | Bin 3910 -> 0 bytes .../doc/html/resource/lib/class_to_object_big.png | Bin 9006 -> 0 bytes .../nsc/doc/html/resource/lib/constructorsbg.gif | Bin 1206 -> 0 bytes .../nsc/doc/html/resource/lib/conversionbg.gif | Bin 167 -> 0 bytes .../tools/nsc/doc/html/resource/lib/defbg-blue.gif | Bin 1544 -> 0 bytes .../nsc/doc/html/resource/lib/defbg-green.gif | Bin 1341 -> 0 bytes .../tools/nsc/doc/html/resource/lib/diagrams.css | 143 - .../tools/nsc/doc/html/resource/lib/diagrams.js | 324 - .../nsc/doc/html/resource/lib/filter_box_left.png | Bin 1692 -> 0 bytes .../nsc/doc/html/resource/lib/filter_box_left.psd | Bin 30823 -> 0 bytes .../nsc/doc/html/resource/lib/filter_box_left2.gif | Bin 1462 -> 0 bytes .../nsc/doc/html/resource/lib/filter_box_right.png | Bin 1803 -> 0 bytes .../nsc/doc/html/resource/lib/filter_box_right.psd | Bin 31295 -> 0 bytes .../tools/nsc/doc/html/resource/lib/filterbg.gif | Bin 1324 -> 0 bytes .../nsc/doc/html/resource/lib/filterboxbarbg.gif | Bin 1104 -> 0 bytes .../nsc/doc/html/resource/lib/filterboxbarbg.png | Bin 965 -> 0 bytes .../nsc/doc/html/resource/lib/filterboxbg.gif | Bin 1366 -> 0 bytes .../nsc/doc/html/resource/lib/fullcommenttopbg.gif | Bin 1115 -> 0 bytes .../tools/nsc/doc/html/resource/lib/index.css | 338 -- .../scala/tools/nsc/doc/html/resource/lib/index.js | 533 -- .../tools/nsc/doc/html/resource/lib/jquery-ui.js | 6 - .../tools/nsc/doc/html/resource/lib/jquery.js | 2 - .../nsc/doc/html/resource/lib/jquery.layout.js | 5486 ----------------- .../nsc/doc/html/resource/lib/modernizr.custom.js | 4 - .../nsc/doc/html/resource/lib/navigation-li-a.png | Bin 1198 -> 0 bytes .../nsc/doc/html/resource/lib/navigation-li.png | Bin 2441 -> 0 bytes .../tools/nsc/doc/html/resource/lib/object.png | Bin 3356 -> 0 bytes .../tools/nsc/doc/html/resource/lib/object_big.png | Bin 7653 -> 0 bytes .../nsc/doc/html/resource/lib/object_diagram.png | Bin 3903 -> 0 bytes .../doc/html/resource/lib/object_to_class_big.png | Bin 9158 -> 0 bytes .../doc/html/resource/lib/object_to_trait_big.png | Bin 9200 -> 0 bytes .../doc/html/resource/lib/object_to_type_big.png | Bin 9158 -> 0 bytes .../tools/nsc/doc/html/resource/lib/ownderbg2.gif | Bin 1145 -> 0 bytes .../tools/nsc/doc/html/resource/lib/ownerbg.gif | Bin 1118 -> 0 bytes .../tools/nsc/doc/html/resource/lib/ownerbg2.gif | Bin 1145 -> 0 bytes .../tools/nsc/doc/html/resource/lib/package.png | Bin 3335 -> 0 bytes .../nsc/doc/html/resource/lib/package_big.png | Bin 7312 -> 0 bytes .../tools/nsc/doc/html/resource/lib/packagesbg.gif | Bin 1201 -> 0 bytes .../tools/nsc/doc/html/resource/lib/raphael-min.js | 10 - .../tools/nsc/doc/html/resource/lib/ref-index.css | 30 - .../tools/nsc/doc/html/resource/lib/remove.png | Bin 3186 -> 0 bytes .../tools/nsc/doc/html/resource/lib/remove.psd | Bin 28904 -> 0 bytes .../tools/nsc/doc/html/resource/lib/scheduler.js | 71 - .../doc/html/resource/lib/selected-implicits.png | Bin 1150 -> 0 bytes .../html/resource/lib/selected-right-implicits.png | Bin 646 -> 0 bytes .../nsc/doc/html/resource/lib/selected-right.png | Bin 1380 -> 0 bytes .../tools/nsc/doc/html/resource/lib/selected.png | Bin 1864 -> 0 bytes .../nsc/doc/html/resource/lib/selected2-right.png | Bin 1434 -> 0 bytes .../tools/nsc/doc/html/resource/lib/selected2.png | Bin 1965 -> 0 bytes .../nsc/doc/html/resource/lib/signaturebg.gif | Bin 1214 -> 0 bytes .../nsc/doc/html/resource/lib/signaturebg2.gif | Bin 1209 -> 0 bytes .../tools/nsc/doc/html/resource/lib/template.css | 848 --- .../tools/nsc/doc/html/resource/lib/template.js | 466 -- .../nsc/doc/html/resource/lib/tools.tooltip.js | 14 - .../tools/nsc/doc/html/resource/lib/trait.png | Bin 3374 -> 0 bytes .../tools/nsc/doc/html/resource/lib/trait_big.png | Bin 7410 -> 0 bytes .../nsc/doc/html/resource/lib/trait_diagram.png | Bin 3882 -> 0 bytes .../doc/html/resource/lib/trait_to_object_big.png | Bin 8967 -> 0 bytes .../scala/tools/nsc/doc/html/resource/lib/type.png | Bin 1445 -> 0 bytes .../tools/nsc/doc/html/resource/lib/type_big.png | Bin 4236 -> 0 bytes .../nsc/doc/html/resource/lib/type_diagram.png | Bin 1841 -> 0 bytes .../tools/nsc/doc/html/resource/lib/type_tags.ai | 6376 -------------------- .../doc/html/resource/lib/type_to_object_big.png | Bin 4969 -> 0 bytes .../tools/nsc/doc/html/resource/lib/typebg.gif | Bin 1206 -> 0 bytes .../tools/nsc/doc/html/resource/lib/unselected.png | Bin 1879 -> 0 bytes .../nsc/doc/html/resource/lib/valuemembersbg.gif | Bin 1206 -> 0 bytes .../tools/nsc/doc/html/resource/lib/versions.txt | 1 - .../scala/tools/nsc/doc/model/CommentFactory.scala | 112 - .../scala/tools/nsc/doc/model/Entity.scala | 601 -- .../tools/nsc/doc/model/IndexModelFactory.scala | 58 - .../scala/tools/nsc/doc/model/MemberLookup.scala | 63 - .../scala/tools/nsc/doc/model/ModelFactory.scala | 1045 ---- .../doc/model/ModelFactoryImplicitSupport.scala | 579 -- .../nsc/doc/model/ModelFactoryTypeSupport.scala | 315 - .../scala/tools/nsc/doc/model/TreeEntity.scala | 27 - .../scala/tools/nsc/doc/model/TreeFactory.scala | 96 - .../scala/tools/nsc/doc/model/TypeEntity.scala | 27 - .../scala/tools/nsc/doc/model/ValueArgument.scala | 20 - .../scala/tools/nsc/doc/model/Visibility.scala | 39 - .../tools/nsc/doc/model/diagram/Diagram.scala | 137 - .../doc/model/diagram/DiagramDirectiveParser.scala | 257 - .../nsc/doc/model/diagram/DiagramFactory.scala | 254 - .../scala/tools/partest/ScaladocModelTest.scala | 203 - src/scaladoc/scala/tools/ant/Scaladoc.scala | 695 +++ src/scaladoc/scala/tools/nsc/ScalaDoc.scala | 72 + src/scaladoc/scala/tools/nsc/doc/DocFactory.scala | 132 + src/scaladoc/scala/tools/nsc/doc/DocParser.scala | 69 + src/scaladoc/scala/tools/nsc/doc/Index.scala | 17 + .../scala/tools/nsc/doc/ScaladocGlobal.scala | 106 + src/scaladoc/scala/tools/nsc/doc/Settings.scala | 368 ++ .../scala/tools/nsc/doc/Uncompilable.scala | 51 + src/scaladoc/scala/tools/nsc/doc/Universe.scala | 16 + .../tools/nsc/doc/base/CommentFactoryBase.scala | 936 +++ src/scaladoc/scala/tools/nsc/doc/base/LinkTo.scala | 15 + .../tools/nsc/doc/base/MemberLookupBase.scala | 206 + .../scala/tools/nsc/doc/base/comment/Body.scala | 89 + .../scala/tools/nsc/doc/base/comment/Comment.scala | 131 + .../scala/tools/nsc/doc/doclet/Generator.scala | 30 + .../scala/tools/nsc/doc/doclet/Indexer.scala | 21 + .../scala/tools/nsc/doc/doclet/Universer.scala | 21 + src/scaladoc/scala/tools/nsc/doc/html/Doclet.scala | 19 + .../scala/tools/nsc/doc/html/HtmlFactory.scala | 152 + .../scala/tools/nsc/doc/html/HtmlPage.scala | 224 + src/scaladoc/scala/tools/nsc/doc/html/Page.scala | 102 + .../scala/tools/nsc/doc/html/SyntaxHigh.scala | 286 + .../scala/tools/nsc/doc/html/page/Index.scala | 133 + .../tools/nsc/doc/html/page/IndexScript.scala | 69 + .../tools/nsc/doc/html/page/ReferenceIndex.scala | 58 + .../scala/tools/nsc/doc/html/page/Source.scala | 127 + .../scala/tools/nsc/doc/html/page/Template.scala | 967 +++ .../doc/html/page/diagram/DiagramGenerator.scala | 53 + .../nsc/doc/html/page/diagram/DiagramStats.scala | 66 + .../html/page/diagram/DotDiagramGenerator.scala | 506 ++ .../nsc/doc/html/page/diagram/DotRunner.scala | 225 + .../tools/nsc/doc/html/resource/lib/arrow-down.png | Bin 0 -> 6232 bytes .../nsc/doc/html/resource/lib/arrow-right.png | Bin 0 -> 6220 bytes .../tools/nsc/doc/html/resource/lib/class.png | Bin 0 -> 3357 bytes .../tools/nsc/doc/html/resource/lib/class_big.png | Bin 0 -> 7516 bytes .../nsc/doc/html/resource/lib/class_diagram.png | Bin 0 -> 3910 bytes .../doc/html/resource/lib/class_to_object_big.png | Bin 0 -> 9006 bytes .../nsc/doc/html/resource/lib/constructorsbg.gif | Bin 0 -> 1206 bytes .../nsc/doc/html/resource/lib/conversionbg.gif | Bin 0 -> 167 bytes .../tools/nsc/doc/html/resource/lib/defbg-blue.gif | Bin 0 -> 1544 bytes .../nsc/doc/html/resource/lib/defbg-green.gif | Bin 0 -> 1341 bytes .../tools/nsc/doc/html/resource/lib/diagrams.css | 143 + .../tools/nsc/doc/html/resource/lib/diagrams.js | 324 + .../nsc/doc/html/resource/lib/filter_box_left.png | Bin 0 -> 1692 bytes .../nsc/doc/html/resource/lib/filter_box_left.psd | Bin 0 -> 30823 bytes .../nsc/doc/html/resource/lib/filter_box_left2.gif | Bin 0 -> 1462 bytes .../nsc/doc/html/resource/lib/filter_box_right.png | Bin 0 -> 1803 bytes .../nsc/doc/html/resource/lib/filter_box_right.psd | Bin 0 -> 31295 bytes .../tools/nsc/doc/html/resource/lib/filterbg.gif | Bin 0 -> 1324 bytes .../nsc/doc/html/resource/lib/filterboxbarbg.gif | Bin 0 -> 1104 bytes .../nsc/doc/html/resource/lib/filterboxbarbg.png | Bin 0 -> 965 bytes .../nsc/doc/html/resource/lib/filterboxbg.gif | Bin 0 -> 1366 bytes .../nsc/doc/html/resource/lib/fullcommenttopbg.gif | Bin 0 -> 1115 bytes .../tools/nsc/doc/html/resource/lib/index.css | 338 ++ .../scala/tools/nsc/doc/html/resource/lib/index.js | 533 ++ .../tools/nsc/doc/html/resource/lib/jquery-ui.js | 6 + .../tools/nsc/doc/html/resource/lib/jquery.js | 2 + .../nsc/doc/html/resource/lib/jquery.layout.js | 5486 +++++++++++++++++ .../nsc/doc/html/resource/lib/modernizr.custom.js | 4 + .../nsc/doc/html/resource/lib/navigation-li-a.png | Bin 0 -> 1198 bytes .../nsc/doc/html/resource/lib/navigation-li.png | Bin 0 -> 2441 bytes .../tools/nsc/doc/html/resource/lib/object.png | Bin 0 -> 3356 bytes .../tools/nsc/doc/html/resource/lib/object_big.png | Bin 0 -> 7653 bytes .../nsc/doc/html/resource/lib/object_diagram.png | Bin 0 -> 3903 bytes .../doc/html/resource/lib/object_to_class_big.png | Bin 0 -> 9158 bytes .../doc/html/resource/lib/object_to_trait_big.png | Bin 0 -> 9200 bytes .../doc/html/resource/lib/object_to_type_big.png | Bin 0 -> 9158 bytes .../tools/nsc/doc/html/resource/lib/ownderbg2.gif | Bin 0 -> 1145 bytes .../tools/nsc/doc/html/resource/lib/ownerbg.gif | Bin 0 -> 1118 bytes .../tools/nsc/doc/html/resource/lib/ownerbg2.gif | Bin 0 -> 1145 bytes .../tools/nsc/doc/html/resource/lib/package.png | Bin 0 -> 3335 bytes .../nsc/doc/html/resource/lib/package_big.png | Bin 0 -> 7312 bytes .../tools/nsc/doc/html/resource/lib/packagesbg.gif | Bin 0 -> 1201 bytes .../tools/nsc/doc/html/resource/lib/raphael-min.js | 10 + .../tools/nsc/doc/html/resource/lib/ref-index.css | 30 + .../tools/nsc/doc/html/resource/lib/remove.png | Bin 0 -> 3186 bytes .../tools/nsc/doc/html/resource/lib/remove.psd | Bin 0 -> 28904 bytes .../tools/nsc/doc/html/resource/lib/scheduler.js | 71 + .../doc/html/resource/lib/selected-implicits.png | Bin 0 -> 1150 bytes .../html/resource/lib/selected-right-implicits.png | Bin 0 -> 646 bytes .../nsc/doc/html/resource/lib/selected-right.png | Bin 0 -> 1380 bytes .../tools/nsc/doc/html/resource/lib/selected.png | Bin 0 -> 1864 bytes .../nsc/doc/html/resource/lib/selected2-right.png | Bin 0 -> 1434 bytes .../tools/nsc/doc/html/resource/lib/selected2.png | Bin 0 -> 1965 bytes .../nsc/doc/html/resource/lib/signaturebg.gif | Bin 0 -> 1214 bytes .../nsc/doc/html/resource/lib/signaturebg2.gif | Bin 0 -> 1209 bytes .../tools/nsc/doc/html/resource/lib/template.css | 848 +++ .../tools/nsc/doc/html/resource/lib/template.js | 466 ++ .../nsc/doc/html/resource/lib/tools.tooltip.js | 14 + .../tools/nsc/doc/html/resource/lib/trait.png | Bin 0 -> 3374 bytes .../tools/nsc/doc/html/resource/lib/trait_big.png | Bin 0 -> 7410 bytes .../nsc/doc/html/resource/lib/trait_diagram.png | Bin 0 -> 3882 bytes .../doc/html/resource/lib/trait_to_object_big.png | Bin 0 -> 8967 bytes .../scala/tools/nsc/doc/html/resource/lib/type.png | Bin 0 -> 1445 bytes .../tools/nsc/doc/html/resource/lib/type_big.png | Bin 0 -> 4236 bytes .../nsc/doc/html/resource/lib/type_diagram.png | Bin 0 -> 1841 bytes .../tools/nsc/doc/html/resource/lib/type_tags.ai | 6376 ++++++++++++++++++++ .../doc/html/resource/lib/type_to_object_big.png | Bin 0 -> 4969 bytes .../tools/nsc/doc/html/resource/lib/typebg.gif | Bin 0 -> 1206 bytes .../tools/nsc/doc/html/resource/lib/unselected.png | Bin 0 -> 1879 bytes .../nsc/doc/html/resource/lib/valuemembersbg.gif | Bin 0 -> 1206 bytes .../tools/nsc/doc/html/resource/lib/versions.txt | 1 + .../scala/tools/nsc/doc/model/CommentFactory.scala | 112 + .../scala/tools/nsc/doc/model/Entity.scala | 601 ++ .../tools/nsc/doc/model/IndexModelFactory.scala | 58 + .../scala/tools/nsc/doc/model/MemberLookup.scala | 63 + .../scala/tools/nsc/doc/model/ModelFactory.scala | 1045 ++++ .../doc/model/ModelFactoryImplicitSupport.scala | 579 ++ .../nsc/doc/model/ModelFactoryTypeSupport.scala | 315 + .../scala/tools/nsc/doc/model/TreeEntity.scala | 27 + .../scala/tools/nsc/doc/model/TreeFactory.scala | 96 + .../scala/tools/nsc/doc/model/TypeEntity.scala | 27 + .../scala/tools/nsc/doc/model/ValueArgument.scala | 20 + .../scala/tools/nsc/doc/model/Visibility.scala | 39 + .../tools/nsc/doc/model/diagram/Diagram.scala | 137 + .../doc/model/diagram/DiagramDirectiveParser.scala | 257 + .../nsc/doc/model/diagram/DiagramFactory.scala | 254 + .../scala/tools/partest/ScaladocModelTest.scala | 203 + test/files/run/t5527.check | 99 - test/files/run/t5527.scala | 107 - test/scaladoc/run/t5527.check | 99 + test/scaladoc/run/t5527.scala | 107 + test/scaladoc/scalacheck/IndexScriptTest.scala | 2 +- test/scaladoc/scalacheck/IndexTest.scala | 6 +- 243 files changed, 24708 insertions(+), 24658 deletions(-) delete mode 100644 src/compiler/scala/tools/ant/Scaladoc.scala delete mode 100644 src/compiler/scala/tools/nsc/ScalaDoc.scala delete mode 100644 src/compiler/scala/tools/nsc/doc/DocFactory.scala delete mode 100644 src/compiler/scala/tools/nsc/doc/DocParser.scala delete mode 100644 src/compiler/scala/tools/nsc/doc/Index.scala delete mode 100644 src/compiler/scala/tools/nsc/doc/ScaladocGlobal.scala delete mode 100644 src/compiler/scala/tools/nsc/doc/Settings.scala delete mode 100644 src/compiler/scala/tools/nsc/doc/Uncompilable.scala delete mode 100644 src/compiler/scala/tools/nsc/doc/Universe.scala delete mode 100755 src/compiler/scala/tools/nsc/doc/base/CommentFactoryBase.scala delete mode 100755 src/compiler/scala/tools/nsc/doc/base/LinkTo.scala delete mode 100755 src/compiler/scala/tools/nsc/doc/base/MemberLookupBase.scala delete mode 100755 src/compiler/scala/tools/nsc/doc/base/comment/Body.scala delete mode 100644 src/compiler/scala/tools/nsc/doc/base/comment/Comment.scala delete mode 100644 src/compiler/scala/tools/nsc/doc/doclet/Generator.scala delete mode 100644 src/compiler/scala/tools/nsc/doc/doclet/Indexer.scala delete mode 100644 src/compiler/scala/tools/nsc/doc/doclet/Universer.scala delete mode 100644 src/compiler/scala/tools/nsc/doc/html/Doclet.scala delete mode 100644 src/compiler/scala/tools/nsc/doc/html/HtmlFactory.scala delete mode 100644 src/compiler/scala/tools/nsc/doc/html/HtmlPage.scala delete mode 100644 src/compiler/scala/tools/nsc/doc/html/Page.scala delete mode 100644 src/compiler/scala/tools/nsc/doc/html/SyntaxHigh.scala delete mode 100644 src/compiler/scala/tools/nsc/doc/html/page/Index.scala delete mode 100644 src/compiler/scala/tools/nsc/doc/html/page/IndexScript.scala delete mode 100755 src/compiler/scala/tools/nsc/doc/html/page/ReferenceIndex.scala delete mode 100644 src/compiler/scala/tools/nsc/doc/html/page/Source.scala delete mode 100644 src/compiler/scala/tools/nsc/doc/html/page/Template.scala delete mode 100644 src/compiler/scala/tools/nsc/doc/html/page/diagram/DiagramGenerator.scala delete mode 100644 src/compiler/scala/tools/nsc/doc/html/page/diagram/DiagramStats.scala delete mode 100644 src/compiler/scala/tools/nsc/doc/html/page/diagram/DotDiagramGenerator.scala delete mode 100644 src/compiler/scala/tools/nsc/doc/html/page/diagram/DotRunner.scala delete mode 100644 src/compiler/scala/tools/nsc/doc/html/resource/lib/arrow-down.png delete mode 100644 src/compiler/scala/tools/nsc/doc/html/resource/lib/arrow-right.png delete mode 100644 src/compiler/scala/tools/nsc/doc/html/resource/lib/class.png delete mode 100644 src/compiler/scala/tools/nsc/doc/html/resource/lib/class_big.png delete mode 100644 src/compiler/scala/tools/nsc/doc/html/resource/lib/class_diagram.png delete mode 100644 src/compiler/scala/tools/nsc/doc/html/resource/lib/class_to_object_big.png delete mode 100644 src/compiler/scala/tools/nsc/doc/html/resource/lib/constructorsbg.gif delete mode 100644 src/compiler/scala/tools/nsc/doc/html/resource/lib/conversionbg.gif delete mode 100644 src/compiler/scala/tools/nsc/doc/html/resource/lib/defbg-blue.gif delete mode 100644 src/compiler/scala/tools/nsc/doc/html/resource/lib/defbg-green.gif delete mode 100644 src/compiler/scala/tools/nsc/doc/html/resource/lib/diagrams.css delete mode 100644 src/compiler/scala/tools/nsc/doc/html/resource/lib/diagrams.js delete mode 100644 src/compiler/scala/tools/nsc/doc/html/resource/lib/filter_box_left.png delete mode 100644 src/compiler/scala/tools/nsc/doc/html/resource/lib/filter_box_left.psd delete mode 100644 src/compiler/scala/tools/nsc/doc/html/resource/lib/filter_box_left2.gif delete mode 100644 src/compiler/scala/tools/nsc/doc/html/resource/lib/filter_box_right.png delete mode 100644 src/compiler/scala/tools/nsc/doc/html/resource/lib/filter_box_right.psd delete mode 100644 src/compiler/scala/tools/nsc/doc/html/resource/lib/filterbg.gif delete mode 100644 src/compiler/scala/tools/nsc/doc/html/resource/lib/filterboxbarbg.gif delete mode 100644 src/compiler/scala/tools/nsc/doc/html/resource/lib/filterboxbarbg.png delete mode 100644 src/compiler/scala/tools/nsc/doc/html/resource/lib/filterboxbg.gif delete mode 100644 src/compiler/scala/tools/nsc/doc/html/resource/lib/fullcommenttopbg.gif delete mode 100644 src/compiler/scala/tools/nsc/doc/html/resource/lib/index.css delete mode 100644 src/compiler/scala/tools/nsc/doc/html/resource/lib/index.js delete mode 100755 src/compiler/scala/tools/nsc/doc/html/resource/lib/jquery-ui.js delete mode 100644 src/compiler/scala/tools/nsc/doc/html/resource/lib/jquery.js delete mode 100644 src/compiler/scala/tools/nsc/doc/html/resource/lib/jquery.layout.js delete mode 100644 src/compiler/scala/tools/nsc/doc/html/resource/lib/modernizr.custom.js delete mode 100644 src/compiler/scala/tools/nsc/doc/html/resource/lib/navigation-li-a.png delete mode 100644 src/compiler/scala/tools/nsc/doc/html/resource/lib/navigation-li.png delete mode 100644 src/compiler/scala/tools/nsc/doc/html/resource/lib/object.png delete mode 100644 src/compiler/scala/tools/nsc/doc/html/resource/lib/object_big.png delete mode 100644 src/compiler/scala/tools/nsc/doc/html/resource/lib/object_diagram.png delete mode 100644 src/compiler/scala/tools/nsc/doc/html/resource/lib/object_to_class_big.png delete mode 100644 src/compiler/scala/tools/nsc/doc/html/resource/lib/object_to_trait_big.png delete mode 100644 src/compiler/scala/tools/nsc/doc/html/resource/lib/object_to_type_big.png delete mode 100644 src/compiler/scala/tools/nsc/doc/html/resource/lib/ownderbg2.gif delete mode 100644 src/compiler/scala/tools/nsc/doc/html/resource/lib/ownerbg.gif delete mode 100644 src/compiler/scala/tools/nsc/doc/html/resource/lib/ownerbg2.gif delete mode 100644 src/compiler/scala/tools/nsc/doc/html/resource/lib/package.png delete mode 100644 src/compiler/scala/tools/nsc/doc/html/resource/lib/package_big.png delete mode 100644 src/compiler/scala/tools/nsc/doc/html/resource/lib/packagesbg.gif delete mode 100644 src/compiler/scala/tools/nsc/doc/html/resource/lib/raphael-min.js delete mode 100755 src/compiler/scala/tools/nsc/doc/html/resource/lib/ref-index.css delete mode 100644 src/compiler/scala/tools/nsc/doc/html/resource/lib/remove.png delete mode 100644 src/compiler/scala/tools/nsc/doc/html/resource/lib/remove.psd delete mode 100644 src/compiler/scala/tools/nsc/doc/html/resource/lib/scheduler.js delete mode 100644 src/compiler/scala/tools/nsc/doc/html/resource/lib/selected-implicits.png delete mode 100644 src/compiler/scala/tools/nsc/doc/html/resource/lib/selected-right-implicits.png delete mode 100644 src/compiler/scala/tools/nsc/doc/html/resource/lib/selected-right.png delete mode 100644 src/compiler/scala/tools/nsc/doc/html/resource/lib/selected.png delete mode 100644 src/compiler/scala/tools/nsc/doc/html/resource/lib/selected2-right.png delete mode 100644 src/compiler/scala/tools/nsc/doc/html/resource/lib/selected2.png delete mode 100644 src/compiler/scala/tools/nsc/doc/html/resource/lib/signaturebg.gif delete mode 100644 src/compiler/scala/tools/nsc/doc/html/resource/lib/signaturebg2.gif delete mode 100644 src/compiler/scala/tools/nsc/doc/html/resource/lib/template.css delete mode 100644 src/compiler/scala/tools/nsc/doc/html/resource/lib/template.js delete mode 100644 src/compiler/scala/tools/nsc/doc/html/resource/lib/tools.tooltip.js delete mode 100644 src/compiler/scala/tools/nsc/doc/html/resource/lib/trait.png delete mode 100644 src/compiler/scala/tools/nsc/doc/html/resource/lib/trait_big.png delete mode 100644 src/compiler/scala/tools/nsc/doc/html/resource/lib/trait_diagram.png delete mode 100644 src/compiler/scala/tools/nsc/doc/html/resource/lib/trait_to_object_big.png delete mode 100644 src/compiler/scala/tools/nsc/doc/html/resource/lib/type.png delete mode 100644 src/compiler/scala/tools/nsc/doc/html/resource/lib/type_big.png delete mode 100644 src/compiler/scala/tools/nsc/doc/html/resource/lib/type_diagram.png delete mode 100644 src/compiler/scala/tools/nsc/doc/html/resource/lib/type_tags.ai delete mode 100644 src/compiler/scala/tools/nsc/doc/html/resource/lib/type_to_object_big.png delete mode 100644 src/compiler/scala/tools/nsc/doc/html/resource/lib/typebg.gif delete mode 100644 src/compiler/scala/tools/nsc/doc/html/resource/lib/unselected.png delete mode 100644 src/compiler/scala/tools/nsc/doc/html/resource/lib/valuemembersbg.gif delete mode 100644 src/compiler/scala/tools/nsc/doc/html/resource/lib/versions.txt delete mode 100644 src/compiler/scala/tools/nsc/doc/model/CommentFactory.scala delete mode 100644 src/compiler/scala/tools/nsc/doc/model/Entity.scala delete mode 100755 src/compiler/scala/tools/nsc/doc/model/IndexModelFactory.scala delete mode 100644 src/compiler/scala/tools/nsc/doc/model/MemberLookup.scala delete mode 100644 src/compiler/scala/tools/nsc/doc/model/ModelFactory.scala delete mode 100644 src/compiler/scala/tools/nsc/doc/model/ModelFactoryImplicitSupport.scala delete mode 100644 src/compiler/scala/tools/nsc/doc/model/ModelFactoryTypeSupport.scala delete mode 100644 src/compiler/scala/tools/nsc/doc/model/TreeEntity.scala delete mode 100755 src/compiler/scala/tools/nsc/doc/model/TreeFactory.scala delete mode 100644 src/compiler/scala/tools/nsc/doc/model/TypeEntity.scala delete mode 100644 src/compiler/scala/tools/nsc/doc/model/ValueArgument.scala delete mode 100644 src/compiler/scala/tools/nsc/doc/model/Visibility.scala delete mode 100644 src/compiler/scala/tools/nsc/doc/model/diagram/Diagram.scala delete mode 100644 src/compiler/scala/tools/nsc/doc/model/diagram/DiagramDirectiveParser.scala delete mode 100644 src/compiler/scala/tools/nsc/doc/model/diagram/DiagramFactory.scala delete mode 100644 src/partest/scala/tools/partest/ScaladocModelTest.scala create mode 100644 src/scaladoc/scala/tools/ant/Scaladoc.scala create mode 100644 src/scaladoc/scala/tools/nsc/ScalaDoc.scala create mode 100644 src/scaladoc/scala/tools/nsc/doc/DocFactory.scala create mode 100644 src/scaladoc/scala/tools/nsc/doc/DocParser.scala create mode 100644 src/scaladoc/scala/tools/nsc/doc/Index.scala create mode 100644 src/scaladoc/scala/tools/nsc/doc/ScaladocGlobal.scala create mode 100644 src/scaladoc/scala/tools/nsc/doc/Settings.scala create mode 100644 src/scaladoc/scala/tools/nsc/doc/Uncompilable.scala create mode 100644 src/scaladoc/scala/tools/nsc/doc/Universe.scala create mode 100755 src/scaladoc/scala/tools/nsc/doc/base/CommentFactoryBase.scala create mode 100755 src/scaladoc/scala/tools/nsc/doc/base/LinkTo.scala create mode 100755 src/scaladoc/scala/tools/nsc/doc/base/MemberLookupBase.scala create mode 100755 src/scaladoc/scala/tools/nsc/doc/base/comment/Body.scala create mode 100644 src/scaladoc/scala/tools/nsc/doc/base/comment/Comment.scala create mode 100644 src/scaladoc/scala/tools/nsc/doc/doclet/Generator.scala create mode 100644 src/scaladoc/scala/tools/nsc/doc/doclet/Indexer.scala create mode 100644 src/scaladoc/scala/tools/nsc/doc/doclet/Universer.scala create mode 100644 src/scaladoc/scala/tools/nsc/doc/html/Doclet.scala create mode 100644 src/scaladoc/scala/tools/nsc/doc/html/HtmlFactory.scala create mode 100644 src/scaladoc/scala/tools/nsc/doc/html/HtmlPage.scala create mode 100644 src/scaladoc/scala/tools/nsc/doc/html/Page.scala create mode 100644 src/scaladoc/scala/tools/nsc/doc/html/SyntaxHigh.scala create mode 100644 src/scaladoc/scala/tools/nsc/doc/html/page/Index.scala create mode 100644 src/scaladoc/scala/tools/nsc/doc/html/page/IndexScript.scala create mode 100755 src/scaladoc/scala/tools/nsc/doc/html/page/ReferenceIndex.scala create mode 100644 src/scaladoc/scala/tools/nsc/doc/html/page/Source.scala create mode 100644 src/scaladoc/scala/tools/nsc/doc/html/page/Template.scala create mode 100644 src/scaladoc/scala/tools/nsc/doc/html/page/diagram/DiagramGenerator.scala create mode 100644 src/scaladoc/scala/tools/nsc/doc/html/page/diagram/DiagramStats.scala create mode 100644 src/scaladoc/scala/tools/nsc/doc/html/page/diagram/DotDiagramGenerator.scala create mode 100644 src/scaladoc/scala/tools/nsc/doc/html/page/diagram/DotRunner.scala create mode 100644 src/scaladoc/scala/tools/nsc/doc/html/resource/lib/arrow-down.png create mode 100644 src/scaladoc/scala/tools/nsc/doc/html/resource/lib/arrow-right.png create mode 100644 src/scaladoc/scala/tools/nsc/doc/html/resource/lib/class.png create mode 100644 src/scaladoc/scala/tools/nsc/doc/html/resource/lib/class_big.png create mode 100644 src/scaladoc/scala/tools/nsc/doc/html/resource/lib/class_diagram.png create mode 100644 src/scaladoc/scala/tools/nsc/doc/html/resource/lib/class_to_object_big.png create mode 100644 src/scaladoc/scala/tools/nsc/doc/html/resource/lib/constructorsbg.gif create mode 100644 src/scaladoc/scala/tools/nsc/doc/html/resource/lib/conversionbg.gif create mode 100644 src/scaladoc/scala/tools/nsc/doc/html/resource/lib/defbg-blue.gif create mode 100644 src/scaladoc/scala/tools/nsc/doc/html/resource/lib/defbg-green.gif create mode 100644 src/scaladoc/scala/tools/nsc/doc/html/resource/lib/diagrams.css create mode 100644 src/scaladoc/scala/tools/nsc/doc/html/resource/lib/diagrams.js create mode 100644 src/scaladoc/scala/tools/nsc/doc/html/resource/lib/filter_box_left.png create mode 100644 src/scaladoc/scala/tools/nsc/doc/html/resource/lib/filter_box_left.psd create mode 100644 src/scaladoc/scala/tools/nsc/doc/html/resource/lib/filter_box_left2.gif create mode 100644 src/scaladoc/scala/tools/nsc/doc/html/resource/lib/filter_box_right.png create mode 100644 src/scaladoc/scala/tools/nsc/doc/html/resource/lib/filter_box_right.psd create mode 100644 src/scaladoc/scala/tools/nsc/doc/html/resource/lib/filterbg.gif create mode 100644 src/scaladoc/scala/tools/nsc/doc/html/resource/lib/filterboxbarbg.gif create mode 100644 src/scaladoc/scala/tools/nsc/doc/html/resource/lib/filterboxbarbg.png create mode 100644 src/scaladoc/scala/tools/nsc/doc/html/resource/lib/filterboxbg.gif create mode 100644 src/scaladoc/scala/tools/nsc/doc/html/resource/lib/fullcommenttopbg.gif create mode 100644 src/scaladoc/scala/tools/nsc/doc/html/resource/lib/index.css create mode 100644 src/scaladoc/scala/tools/nsc/doc/html/resource/lib/index.js create mode 100755 src/scaladoc/scala/tools/nsc/doc/html/resource/lib/jquery-ui.js create mode 100644 src/scaladoc/scala/tools/nsc/doc/html/resource/lib/jquery.js create mode 100644 src/scaladoc/scala/tools/nsc/doc/html/resource/lib/jquery.layout.js create mode 100644 src/scaladoc/scala/tools/nsc/doc/html/resource/lib/modernizr.custom.js create mode 100644 src/scaladoc/scala/tools/nsc/doc/html/resource/lib/navigation-li-a.png create mode 100644 src/scaladoc/scala/tools/nsc/doc/html/resource/lib/navigation-li.png create mode 100644 src/scaladoc/scala/tools/nsc/doc/html/resource/lib/object.png create mode 100644 src/scaladoc/scala/tools/nsc/doc/html/resource/lib/object_big.png create mode 100644 src/scaladoc/scala/tools/nsc/doc/html/resource/lib/object_diagram.png create mode 100644 src/scaladoc/scala/tools/nsc/doc/html/resource/lib/object_to_class_big.png create mode 100644 src/scaladoc/scala/tools/nsc/doc/html/resource/lib/object_to_trait_big.png create mode 100644 src/scaladoc/scala/tools/nsc/doc/html/resource/lib/object_to_type_big.png create mode 100644 src/scaladoc/scala/tools/nsc/doc/html/resource/lib/ownderbg2.gif create mode 100644 src/scaladoc/scala/tools/nsc/doc/html/resource/lib/ownerbg.gif create mode 100644 src/scaladoc/scala/tools/nsc/doc/html/resource/lib/ownerbg2.gif create mode 100644 src/scaladoc/scala/tools/nsc/doc/html/resource/lib/package.png create mode 100644 src/scaladoc/scala/tools/nsc/doc/html/resource/lib/package_big.png create mode 100644 src/scaladoc/scala/tools/nsc/doc/html/resource/lib/packagesbg.gif create mode 100644 src/scaladoc/scala/tools/nsc/doc/html/resource/lib/raphael-min.js create mode 100755 src/scaladoc/scala/tools/nsc/doc/html/resource/lib/ref-index.css create mode 100644 src/scaladoc/scala/tools/nsc/doc/html/resource/lib/remove.png create mode 100644 src/scaladoc/scala/tools/nsc/doc/html/resource/lib/remove.psd create mode 100644 src/scaladoc/scala/tools/nsc/doc/html/resource/lib/scheduler.js create mode 100644 src/scaladoc/scala/tools/nsc/doc/html/resource/lib/selected-implicits.png create mode 100644 src/scaladoc/scala/tools/nsc/doc/html/resource/lib/selected-right-implicits.png create mode 100644 src/scaladoc/scala/tools/nsc/doc/html/resource/lib/selected-right.png create mode 100644 src/scaladoc/scala/tools/nsc/doc/html/resource/lib/selected.png create mode 100644 src/scaladoc/scala/tools/nsc/doc/html/resource/lib/selected2-right.png create mode 100644 src/scaladoc/scala/tools/nsc/doc/html/resource/lib/selected2.png create mode 100644 src/scaladoc/scala/tools/nsc/doc/html/resource/lib/signaturebg.gif create mode 100644 src/scaladoc/scala/tools/nsc/doc/html/resource/lib/signaturebg2.gif create mode 100644 src/scaladoc/scala/tools/nsc/doc/html/resource/lib/template.css create mode 100644 src/scaladoc/scala/tools/nsc/doc/html/resource/lib/template.js create mode 100644 src/scaladoc/scala/tools/nsc/doc/html/resource/lib/tools.tooltip.js create mode 100644 src/scaladoc/scala/tools/nsc/doc/html/resource/lib/trait.png create mode 100644 src/scaladoc/scala/tools/nsc/doc/html/resource/lib/trait_big.png create mode 100644 src/scaladoc/scala/tools/nsc/doc/html/resource/lib/trait_diagram.png create mode 100644 src/scaladoc/scala/tools/nsc/doc/html/resource/lib/trait_to_object_big.png create mode 100644 src/scaladoc/scala/tools/nsc/doc/html/resource/lib/type.png create mode 100644 src/scaladoc/scala/tools/nsc/doc/html/resource/lib/type_big.png create mode 100644 src/scaladoc/scala/tools/nsc/doc/html/resource/lib/type_diagram.png create mode 100644 src/scaladoc/scala/tools/nsc/doc/html/resource/lib/type_tags.ai create mode 100644 src/scaladoc/scala/tools/nsc/doc/html/resource/lib/type_to_object_big.png create mode 100644 src/scaladoc/scala/tools/nsc/doc/html/resource/lib/typebg.gif create mode 100644 src/scaladoc/scala/tools/nsc/doc/html/resource/lib/unselected.png create mode 100644 src/scaladoc/scala/tools/nsc/doc/html/resource/lib/valuemembersbg.gif create mode 100644 src/scaladoc/scala/tools/nsc/doc/html/resource/lib/versions.txt create mode 100644 src/scaladoc/scala/tools/nsc/doc/model/CommentFactory.scala create mode 100644 src/scaladoc/scala/tools/nsc/doc/model/Entity.scala create mode 100755 src/scaladoc/scala/tools/nsc/doc/model/IndexModelFactory.scala create mode 100644 src/scaladoc/scala/tools/nsc/doc/model/MemberLookup.scala create mode 100644 src/scaladoc/scala/tools/nsc/doc/model/ModelFactory.scala create mode 100644 src/scaladoc/scala/tools/nsc/doc/model/ModelFactoryImplicitSupport.scala create mode 100644 src/scaladoc/scala/tools/nsc/doc/model/ModelFactoryTypeSupport.scala create mode 100644 src/scaladoc/scala/tools/nsc/doc/model/TreeEntity.scala create mode 100755 src/scaladoc/scala/tools/nsc/doc/model/TreeFactory.scala create mode 100644 src/scaladoc/scala/tools/nsc/doc/model/TypeEntity.scala create mode 100644 src/scaladoc/scala/tools/nsc/doc/model/ValueArgument.scala create mode 100644 src/scaladoc/scala/tools/nsc/doc/model/Visibility.scala create mode 100644 src/scaladoc/scala/tools/nsc/doc/model/diagram/Diagram.scala create mode 100644 src/scaladoc/scala/tools/nsc/doc/model/diagram/DiagramDirectiveParser.scala create mode 100644 src/scaladoc/scala/tools/nsc/doc/model/diagram/DiagramFactory.scala create mode 100644 src/scaladoc/scala/tools/partest/ScaladocModelTest.scala delete mode 100644 test/files/run/t5527.check delete mode 100644 test/files/run/t5527.scala create mode 100644 test/scaladoc/run/t5527.check create mode 100644 test/scaladoc/run/t5527.scala (limited to 'src') diff --git a/build.xml b/build.xml index cab86b91bd..9a685ee9cf 100644 --- a/build.xml +++ b/build.xml @@ -1268,7 +1268,55 @@ QUICK BUILD (QUICK) - + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + @@ -1407,6 +1455,7 @@ PACKED QUICK BUILD (PACK) + @@ -1912,6 +1961,7 @@ SBT Compiler Interface jvmargs="${scalacfork.jvmargs}"> + diff --git a/src/compiler/scala/tools/ant/Scaladoc.scala b/src/compiler/scala/tools/ant/Scaladoc.scala deleted file mode 100644 index fd6d637212..0000000000 --- a/src/compiler/scala/tools/ant/Scaladoc.scala +++ /dev/null @@ -1,695 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala Ant Tasks ** -** / __/ __// _ | / / / _ | (c) 2005-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - - -package scala.tools.ant - -import java.io.File - -import org.apache.tools.ant.Project -import org.apache.tools.ant.types.{Path, Reference} -import org.apache.tools.ant.util.{FileUtils, GlobPatternMapper} - -import scala.tools.nsc.Global -import scala.tools.nsc.doc.Settings -import scala.tools.nsc.reporters.{Reporter, ConsoleReporter} - -/** An Ant task to document Scala code. - * - * This task can take the following parameters as attributes: - * - `srcdir` (mandatory), - * - `srcref`, - * - `destdir`, - * - `classpath`, - * - `classpathref`, - * - `sourcepath`, - * - `sourcepathref`, - * - `bootclasspath`, - * - `bootclasspathref`, - * - `extdirs`, - * - `extdirsref`, - * - `encoding`, - * - `doctitle`, - * - `header`, - * - `footer`, - * - `top`, - * - `bottom`, - * - `addparams`, - * - `deprecation`, - * - `docgenerator`, - * - `docrootcontent`, - * - `unchecked`, - * - `nofail`, - * - `skipPackages`. - * - * It also takes the following parameters as nested elements: - * - `src` (for srcdir), - * - `classpath`, - * - `sourcepath`, - * - `bootclasspath`, - * - `extdirs`. - * - * @author Gilles Dubochet, Stephane Micheloud - */ -class Scaladoc extends ScalaMatchingTask { - - /** The unique Ant file utilities instance to use in this task. */ - private val fileUtils = FileUtils.getFileUtils() - -/*============================================================================*\ -** Ant user-properties ** -\*============================================================================*/ - - abstract class PermissibleValue { - val values: List[String] - def isPermissible(value: String): Boolean = - (value == "") || values.exists(_.startsWith(value)) - } - - /** Defines valid values for the `deprecation` and - * `unchecked` properties. - */ - object Flag extends PermissibleValue { - val values = List("yes", "no", "on", "off") - def getBooleanValue(value: String, flagName: String): Boolean = - if (Flag.isPermissible(value)) - ("yes".equals(value) || "on".equals(value)) - else - buildError("Unknown " + flagName + " flag '" + value + "'") - } - - /** The directories that contain source files to compile. */ - private var origin: Option[Path] = None - /** The directory to put the compiled files in. */ - private var destination: Option[File] = None - - /** The class path to use for this compilation. */ - private var classpath: Option[Path] = None - /** The source path to use for this compilation. */ - private var sourcepath: Option[Path] = None - /** The boot class path to use for this compilation. */ - private var bootclasspath: Option[Path] = None - /** The external extensions path to use for this compilation. */ - private var extdirs: Option[Path] = None - - /** The character encoding of the files to compile. */ - private var encoding: Option[String] = None - - /** The fully qualified name of a doclet class, which will be used to generate the documentation. */ - private var docgenerator: Option[String] = None - - /** The file from which the documentation content of the root package will be taken */ - private var docrootcontent: Option[File] = None - - /** The document title of the generated HTML documentation. */ - private var doctitle: Option[String] = None - - /** The document footer of the generated HTML documentation. */ - private var docfooter: Option[String] = None - - /** The document version, to be added to the title. */ - private var docversion: Option[String] = None - - /** Instruct the compiler to generate links to sources */ - private var docsourceurl: Option[String] = None - - /** Point scaladoc at uncompilable sources. */ - private var docUncompilable: Option[String] = None - - /** Instruct the compiler to use additional parameters */ - private var addParams: String = "" - - /** Instruct the compiler to generate deprecation information. */ - private var deprecation: Boolean = false - - /** Instruct the compiler to generate unchecked information. */ - private var unchecked: Boolean = false - - /** Instruct the ant task not to fail in the event of errors */ - private var nofail: Boolean = false - - /** Instruct the scaladoc tool to document implicit conversions */ - private var docImplicits: Boolean = false - - /** Instruct the scaladoc tool to document all (including impossible) implicit conversions */ - private var docImplicitsShowAll: Boolean = false - - /** Instruct the scaladoc tool to output implicits debugging information */ - private var docImplicitsDebug: Boolean = false - - /** Instruct the scaladoc tool to create diagrams */ - private var docDiagrams: Boolean = false - - /** Instruct the scaladoc tool to output diagram creation debugging information */ - private var docDiagramsDebug: Boolean = false - - /** Instruct the scaladoc tool to use the binary given to create diagrams */ - private var docDiagramsDotPath: Option[String] = None - - /** Instruct the scaladoc to produce textual ouput from html pages, for easy diff-ing */ - private var docRawOutput: Boolean = false - - /** Instruct the scaladoc not to generate prefixes */ - private var docNoPrefixes: Boolean = false - - /** Instruct the scaladoc tool to group similar functions together */ - private var docGroups: Boolean = false - - /** Instruct the scaladoc tool to skip certain packages */ - private var docSkipPackages: String = "" - -/*============================================================================*\ -** Properties setters ** -\*============================================================================*/ - - /** Sets the `srcdir` attribute. Used by [[http://ant.apache.org Ant]]. - * - * @param input The value of `origin`. - */ - def setSrcdir(input: Path) { - if (origin.isEmpty) origin = Some(input) - else origin.get.append(input) - } - - /** Sets the `origin` as a nested src Ant parameter. - * - * @return An origin path to be configured. - */ - def createSrc(): Path = { - if (origin.isEmpty) origin = Some(new Path(getProject)) - origin.get.createPath() - } - - /** Sets the `origin` as an external reference Ant parameter. - * - * @param input A reference to an origin path. - */ - def setSrcref(input: Reference) { - createSrc().setRefid(input) - } - - /** Sets the `destdir` attribute. Used by [[http://ant.apache.org Ant]]. - * - * @param input The value of `destination`. - */ - def setDestdir(input: File) { - destination = Some(input) - } - - /** Sets the `classpath` attribute. Used by [[http://ant.apache.org Ant]]. - * - * @param input The value of `classpath`. - */ - def setClasspath(input: Path) { - if (classpath.isEmpty) classpath = Some(input) - else classpath.get.append(input) - } - - /** Sets the `classpath` as a nested classpath Ant parameter. - * - * @return A class path to be configured. - */ - def createClasspath(): Path = { - if (classpath.isEmpty) classpath = Some(new Path(getProject)) - classpath.get.createPath() - } - - /** Sets the `classpath` as an external reference Ant parameter. - * - * @param input A reference to a class path. - */ - def setClasspathref(input: Reference) = - createClasspath().setRefid(input) - - /** Sets the `sourcepath` attribute. Used by [[http://ant.apache.org Ant]]. - * - * @param input The value of `sourcepath`. - */ - def setSourcepath(input: Path) = - if (sourcepath.isEmpty) sourcepath = Some(input) - else sourcepath.get.append(input) - - /** Sets the `sourcepath` as a nested sourcepath Ant parameter. - * - * @return A source path to be configured. - */ - def createSourcepath(): Path = { - if (sourcepath.isEmpty) sourcepath = Some(new Path(getProject)) - sourcepath.get.createPath() - } - - /** Sets the `sourcepath` as an external reference Ant parameter. - * - * @param input A reference to a source path. - */ - def setSourcepathref(input: Reference) = - createSourcepath().setRefid(input) - - /** Sets the `bootclasspath` attribute. Used by [[http://ant.apache.org Ant]]. - * - * @param input The value of `bootclasspath`. - */ - def setBootclasspath(input: Path) = - if (bootclasspath.isEmpty) bootclasspath = Some(input) - else bootclasspath.get.append(input) - - /** Sets the `bootclasspath` as a nested `sourcepath` Ant parameter. - * - * @return A source path to be configured. - */ - def createBootclasspath(): Path = { - if (bootclasspath.isEmpty) bootclasspath = Some(new Path(getProject)) - bootclasspath.get.createPath() - } - - /** Sets the `bootclasspath` as an external reference Ant parameter. - * - * @param input A reference to a source path. - */ - def setBootclasspathref(input: Reference) { - createBootclasspath().setRefid(input) - } - - /** Sets the external extensions path attribute. Used by [[http://ant.apache.org Ant]]. - * - * @param input The value of `extdirs`. - */ - def setExtdirs(input: Path) { - if (extdirs.isEmpty) extdirs = Some(input) - else extdirs.get.append(input) - } - - /** Sets the `extdirs` as a nested sourcepath Ant parameter. - * - * @return An extensions path to be configured. - */ - def createExtdirs(): Path = { - if (extdirs.isEmpty) extdirs = Some(new Path(getProject)) - extdirs.get.createPath() - } - - /** Sets the `extdirs` as an external reference Ant parameter. - * - * @param input A reference to an extensions path. - */ - def setExtdirsref(input: Reference) { - createExtdirs().setRefid(input) - } - - /** Sets the `encoding` attribute. Used by Ant. - * - * @param input The value of `encoding`. - */ - def setEncoding(input: String) { - encoding = Some(input) - } - - /** Sets the `docgenerator` attribute. - * - * @param input A fully qualified class name of a doclet. - */ - def setDocgenerator(input: String) { - docgenerator = Some(input) - } - - /** - * Sets the `docrootcontent` attribute. - * - * @param input The file from which the documentation content of the root - * package will be taken. - */ - def setDocrootcontent(input : File) { - docrootcontent = Some(input) - } - - /** Sets the `docversion` attribute. - * - * @param input The value of `docversion`. - */ - def setDocversion(input: String) { - docversion = Some(input) - } - - /** Sets the `docsourceurl` attribute. - * - * @param input The value of `docsourceurl`. - */ - def setDocsourceurl(input: String) { - docsourceurl = Some(input) - } - - /** Sets the `doctitle` attribute. - * - * @param input The value of `doctitle`. - */ - def setDoctitle(input: String) { - doctitle = Some(input) - } - - /** Sets the `docfooter` attribute. - * - * @param input The value of `docfooter`. - */ - def setDocfooter(input: String) { - docfooter = Some(input) - } - - /** Set the `addparams` info attribute. - * - * @param input The value for `addparams`. - */ - def setAddparams(input: String) { - addParams = input - } - - /** Set the `deprecation` info attribute. - * - * @param input One of the flags `yes/no` or `on/off`. - */ - def setDeprecation(input: String) { - if (Flag.isPermissible(input)) - deprecation = "yes".equals(input) || "on".equals(input) - else - buildError("Unknown deprecation flag '" + input + "'") - } - - /** Set the `unchecked` info attribute. - * - * @param input One of the flags `yes/no` or `on/off`. - */ - def setUnchecked(input: String) { - if (Flag.isPermissible(input)) - unchecked = "yes".equals(input) || "on".equals(input) - else - buildError("Unknown unchecked flag '" + input + "'") - } - - def setDocUncompilable(input: String) { - docUncompilable = Some(input) - } - - /** Set the `nofail` info attribute. - * - * @param input One of the flags `yes/no` or `on/off`. Default if no/off. - */ - def setNoFail(input: String) = - nofail = Flag.getBooleanValue(input, "nofail") - - /** Set the `implicits` info attribute. - * @param input One of the flags `yes/no` or `on/off`. Default if no/off. */ - def setImplicits(input: String) = - docImplicits = Flag.getBooleanValue(input, "implicits") - - /** Set the `implicitsShowAll` info attribute to enable scaladoc to show all implicits, including those impossible to - * convert to from the default scope - * @param input One of the flags `yes/no` or `on/off`. Default if no/off. */ - def setImplicitsShowAll(input: String) = - docImplicitsShowAll = Flag.getBooleanValue(input, "implicitsShowAll") - - /** Set the `implicitsDebug` info attribute so scaladoc outputs implicit conversion debug information - * @param input One of the flags `yes/no` or `on/off`. Default if no/off. */ - def setImplicitsDebug(input: String) = - docImplicitsDebug = Flag.getBooleanValue(input, "implicitsDebug") - - /** Set the `diagrams` bit so Scaladoc adds diagrams to the documentation - * @param input One of the flags `yes/no` or `on/off`. Default if no/off. */ - def setDiagrams(input: String) = - docDiagrams = Flag.getBooleanValue(input, "diagrams") - - /** Set the `diagramsDebug` bit so Scaladoc outputs diagram building debug information - * @param input One of the flags `yes/no` or `on/off`. Default if no/off. */ - def setDiagramsDebug(input: String) = - docDiagramsDebug = Flag.getBooleanValue(input, "diagramsDebug") - - /** Set the `diagramsDotPath` attribute to the path where graphviz dot can be found (including the binary file name, - * eg: /usr/bin/dot) */ - def setDiagramsDotPath(input: String) = - docDiagramsDotPath = Some(input) - - /** Set the `rawOutput` bit so Scaladoc also outputs text from each html file - * @param input One of the flags `yes/no` or `on/off`. Default if no/off. */ - def setRawOutput(input: String) = - docRawOutput = Flag.getBooleanValue(input, "rawOutput") - - /** Set the `noPrefixes` bit to prevent Scaladoc from generating prefixes in - * front of types -- may lead to confusion, but significantly speeds up the generation. - * @param input One of the flags `yes/no` or `on/off`. Default if no/off. */ - def setNoPrefixes(input: String) = - docNoPrefixes = Flag.getBooleanValue(input, "noPrefixes") - - /** Instruct the scaladoc tool to group similar functions together */ - def setGroups(input: String) = - docGroups = Flag.getBooleanValue(input, "groups") - - /** Instruct the scaladoc tool to skip certain packages. - * @param input A colon-delimited list of fully qualified package names that will be skipped from scaladoc. - */ - def setSkipPackages(input: String) = - docSkipPackages = input - -/*============================================================================*\ -** Properties getters ** -\*============================================================================*/ - - /** Gets the value of the `classpath` attribute in a - * Scala-friendly form. - * - * @return The class path as a list of files. - */ - private def getClasspath: List[File] = - if (classpath.isEmpty) buildError("Member 'classpath' is empty.") - else classpath.get.list().toList map nameToFile - - /** Gets the value of the `origin` attribute in a Scala-friendly - * form. - * - * @return The origin path as a list of files. - */ - private def getOrigin: List[File] = - if (origin.isEmpty) buildError("Member 'origin' is empty.") - else origin.get.list().toList map nameToFile - - /** Gets the value of the `destination` attribute in a - * Scala-friendly form. - * - * @return The destination as a file. - */ - private def getDestination: File = - if (destination.isEmpty) buildError("Member 'destination' is empty.") - else existing(getProject resolveFile destination.get.toString) - - /** Gets the value of the `sourcepath` attribute in a - * Scala-friendly form. - * - * @return The source path as a list of files. - */ - private def getSourcepath: List[File] = - if (sourcepath.isEmpty) buildError("Member 'sourcepath' is empty.") - else sourcepath.get.list().toList map nameToFile - - /** Gets the value of the `bootclasspath` attribute in a - * Scala-friendly form. - * - * @return The boot class path as a list of files. - */ - private def getBootclasspath: List[File] = - if (bootclasspath.isEmpty) buildError("Member 'bootclasspath' is empty.") - else bootclasspath.get.list().toList map nameToFile - - /** Gets the value of the `extdirs` attribute in a - * Scala-friendly form. - * - * @return The extensions path as a list of files. - */ - private def getExtdirs: List[File] = - if (extdirs.isEmpty) buildError("Member 'extdirs' is empty.") - else extdirs.get.list().toList map nameToFile - -/*============================================================================*\ -** Compilation and support methods ** -\*============================================================================*/ - - /** This is forwarding method to circumvent bug #281 in Scala 2. Remove when - * bug has been corrected. - */ - override protected def getDirectoryScanner(baseDir: java.io.File) = - super.getDirectoryScanner(baseDir) - - /** Transforms a string name into a file relative to the provided base - * directory. - * - * @param base A file pointing to the location relative to which the name - * will be resolved. - * @param name A relative or absolute path to the file as a string. - * @return A file created from the name and the base file. - */ - private def nameToFile(base: File)(name: String): File = - existing(fileUtils.resolveFile(base, name)) - - /** Transforms a string name into a file relative to the build root - * directory. - * - * @param name A relative or absolute path to the file as a string. - * @return A file created from the name. - */ - private def nameToFile(name: String): File = - existing(getProject resolveFile name) - - /** Tests if a file exists and prints a warning in case it doesn't. Always - * returns the file, even if it doesn't exist. - * - * @param file A file to test for existance. - * @return The same file. - */ - private def existing(file: File): File = { - if (!file.exists()) - log("Element '" + file.toString + "' does not exist.", - Project.MSG_WARN) - file - } - - /** Transforms a path into a Scalac-readable string. - * - * @param path A path to convert. - * @return A string-representation of the path like `a.jar:b.jar`. - */ - private def asString(path: List[File]): String = - path.map(asString).mkString("", File.pathSeparator, "") - - /** Transforms a file into a Scalac-readable string. - * - * @param file A file to convert. - * @return A string-representation of the file like `/x/k/a.scala`. - */ - private def asString(file: File): String = - file.getAbsolutePath() - -/*============================================================================*\ -** The big execute method ** -\*============================================================================*/ - - /** Initializes settings and source files */ - protected def initialize: Pair[Settings, List[File]] = { - // Tests if all mandatory attributes are set and valid. - if (origin.isEmpty) buildError("Attribute 'srcdir' is not set.") - if (getOrigin.isEmpty) buildError("Attribute 'srcdir' is not set.") - if (!destination.isEmpty && !destination.get.isDirectory()) - buildError("Attribute 'destdir' does not refer to an existing directory.") - if (destination.isEmpty) destination = Some(getOrigin.head) - - val mapper = new GlobPatternMapper() - mapper setTo "*.html" - mapper setFrom "*.scala" - - // Scans source directories to build up a compile lists. - // If force is false, only files were the .class file in destination is - // older than the .scala file will be used. - val sourceFiles: List[File] = - for { - originDir <- getOrigin - originFile <- { - val includedFiles = - getDirectoryScanner(originDir).getIncludedFiles() - val list = includedFiles.toList - if (list.length > 0) - log( - "Documenting " + list.length + " source file" + - (if (list.length > 1) "s" else "") + - (" to " + getDestination.toString) - ) - else - log("No files selected for documentation", Project.MSG_VERBOSE) - - list - } - } yield { - log(originFile, Project.MSG_DEBUG) - nameToFile(originDir)(originFile) - } - - def decodeEscapes(s: String): String = { - // In Ant script characters '<' and '>' must be encoded when - // used in attribute values, e.g. for attributes "doctitle", "header", .. - // in task Scaladoc you may write: - // doctitle="<div>Scala</div>" - // so we have to decode them here. - s.replaceAll("<", "<").replaceAll(">",">") - .replaceAll("&", "&").replaceAll(""", "\"") - } - - // Builds-up the compilation settings for Scalac with the existing Ant - // parameters. - val docSettings = new Settings(buildError) - docSettings.outdir.value = asString(destination.get) - if (!classpath.isEmpty) - docSettings.classpath.value = asString(getClasspath) - if (!sourcepath.isEmpty) - docSettings.sourcepath.value = asString(getSourcepath) - /*else if (origin.get.size() > 0) - settings.sourcepath.value = origin.get.list()(0)*/ - if (!bootclasspath.isEmpty) - docSettings.bootclasspath.value = asString(getBootclasspath) - if (!extdirs.isEmpty) docSettings.extdirs.value = asString(getExtdirs) - if (!encoding.isEmpty) docSettings.encoding.value = encoding.get - if (!doctitle.isEmpty) docSettings.doctitle.value = decodeEscapes(doctitle.get) - if (!docfooter.isEmpty) docSettings.docfooter.value = decodeEscapes(docfooter.get) - if (!docversion.isEmpty) docSettings.docversion.value = decodeEscapes(docversion.get) - if (!docsourceurl.isEmpty) docSettings.docsourceurl.value = decodeEscapes(docsourceurl.get) - if (!docUncompilable.isEmpty) docSettings.docUncompilable.value = decodeEscapes(docUncompilable.get) - - docSettings.deprecation.value = deprecation - docSettings.unchecked.value = unchecked - docSettings.docImplicits.value = docImplicits - docSettings.docImplicitsDebug.value = docImplicitsDebug - docSettings.docImplicitsShowAll.value = docImplicitsShowAll - docSettings.docDiagrams.value = docDiagrams - docSettings.docDiagramsDebug.value = docDiagramsDebug - docSettings.docRawOutput.value = docRawOutput - docSettings.docNoPrefixes.value = docNoPrefixes - docSettings.docGroups.value = docGroups - docSettings.docSkipPackages.value = docSkipPackages - if(!docDiagramsDotPath.isEmpty) docSettings.docDiagramsDotPath.value = docDiagramsDotPath.get - - if (!docgenerator.isEmpty) docSettings.docgenerator.value = docgenerator.get - if (!docrootcontent.isEmpty) docSettings.docRootContent.value = docrootcontent.get.getAbsolutePath() - log("Scaladoc params = '" + addParams + "'", Project.MSG_DEBUG) - - docSettings processArgumentString addParams - Pair(docSettings, sourceFiles) - } - - def safeBuildError(message: String): Unit = if (nofail) log(message) else buildError(message) - - /** Performs the compilation. */ - override def execute() = { - val Pair(docSettings, sourceFiles) = initialize - val reporter = new ConsoleReporter(docSettings) - try { - val docProcessor = new scala.tools.nsc.doc.DocFactory(reporter, docSettings) - docProcessor.document(sourceFiles.map (_.toString)) - if (reporter.ERROR.count > 0) - safeBuildError( - "Document failed with " + - reporter.ERROR.count + " error" + - (if (reporter.ERROR.count > 1) "s" else "") + - "; see the documenter error output for details.") - else if (reporter.WARNING.count > 0) - log( - "Document succeeded with " + - reporter.WARNING.count + " warning" + - (if (reporter.WARNING.count > 1) "s" else "") + - "; see the documenter output for details.") - reporter.printSummary() - } catch { - case exception: Throwable => - exception.printStackTrace() - val msg = Option(exception.getMessage) getOrElse "no error message provided" - safeBuildError(s"Document failed because of an internal documenter error ($msg); see the error output for details.") - } - } -} diff --git a/src/compiler/scala/tools/nsc/ScalaDoc.scala b/src/compiler/scala/tools/nsc/ScalaDoc.scala deleted file mode 100644 index 52a0c20a11..0000000000 --- a/src/compiler/scala/tools/nsc/ScalaDoc.scala +++ /dev/null @@ -1,72 +0,0 @@ -/* scaladoc, a documentation generator for Scala - * Copyright 2005-2013 LAMP/EPFL - * @author Martin Odersky - * @author Geoffrey Washburn - */ - -package scala.tools.nsc - -import java.io.File.pathSeparator -import scala.tools.nsc.doc.DocFactory -import scala.tools.nsc.reporters.ConsoleReporter -import scala.reflect.internal.util.FakePos - -/** The main class for scaladoc, a front-end for the Scala compiler - * that generates documentation from source files. - */ -class ScalaDoc { - val versionMsg = "Scaladoc %s -- %s".format(Properties.versionString, Properties.copyrightString) - - def process(args: Array[String]): Boolean = { - var reporter: ConsoleReporter = null - val docSettings = new doc.Settings(msg => reporter.error(FakePos("scaladoc"), msg + "\n scaladoc -help gives more information"), - msg => reporter.printMessage(msg)) - reporter = new ConsoleReporter(docSettings) { - // need to do this so that the Global instance doesn't trash all the - // symbols just because there was an error - override def hasErrors = false - } - val command = new ScalaDoc.Command(args.toList, docSettings) - def hasFiles = command.files.nonEmpty || docSettings.uncompilableFiles.nonEmpty - - if (docSettings.version.value) - reporter.echo(versionMsg) - else if (docSettings.Xhelp.value) - reporter.echo(command.xusageMsg) - else if (docSettings.Yhelp.value) - reporter.echo(command.yusageMsg) - else if (docSettings.showPlugins.value) - reporter.warning(null, "Plugins are not available when using Scaladoc") - else if (docSettings.showPhases.value) - reporter.warning(null, "Phases are restricted when using Scaladoc") - else if (docSettings.help.value || !hasFiles) - reporter.echo(command.usageMsg) - else - try { new DocFactory(reporter, docSettings) document command.files } - catch { - case ex @ FatalError(msg) => - if (docSettings.debug.value) ex.printStackTrace() - reporter.error(null, "fatal error: " + msg) - } - finally reporter.printSummary() - - // not much point in returning !reporter.hasErrors when it has - // been overridden with constant false. - true - } -} - -object ScalaDoc extends ScalaDoc { - class Command(arguments: List[String], settings: doc.Settings) extends CompilerCommand(arguments, settings) { - override def cmdName = "scaladoc" - override def usageMsg = ( - createUsageMsg("where possible scaladoc", shouldExplain = false, x => x.isStandard && settings.isScaladocSpecific(x.name)) + - "\n\nStandard scalac options also available:" + - createUsageMsg(x => x.isStandard && !settings.isScaladocSpecific(x.name)) - ) - } - - def main(args: Array[String]): Unit = sys exit { - if (process(args)) 0 else 1 - } -} diff --git a/src/compiler/scala/tools/nsc/doc/DocFactory.scala b/src/compiler/scala/tools/nsc/doc/DocFactory.scala deleted file mode 100644 index b4d2adaad4..0000000000 --- a/src/compiler/scala/tools/nsc/doc/DocFactory.scala +++ /dev/null @@ -1,132 +0,0 @@ -/* NSC -- new Scala compiler - * Copyright 2007-2013 LAMP/EPFL - * @author David Bernard, Manohar Jonnalagedda - */ - -package scala.tools.nsc -package doc - -import scala.util.control.ControlThrowable -import reporters.Reporter -import scala.reflect.internal.util.BatchSourceFile - -/** A documentation processor controls the process of generating Scala - * documentation, which is as follows. - * - * * A simplified compiler instance (with only the front-end phases enabled) - * * is created, and additional `sourceless` comments are registered. - * * Documentable files are compiled, thereby filling the compiler's symbol table. - * * A documentation model is extracted from the post-compilation symbol table. - * * A generator is used to transform the model into the correct final format (HTML). - * - * A processor contains a single compiler instantiated from the processor's - * `settings`. Each call to `document` uses the same compiler instance with - * the same symbol table. In particular, this implies that the scaladoc site - * obtained from a call to `run` will contain documentation about files compiled - * during previous calls to the same processor's `run` method. - * - * @param reporter The reporter to which both documentation and compilation errors will be reported. - * @param settings The settings to be used by the documenter and compiler for generating documentation. - * - * @author Gilles Dubochet */ -class DocFactory(val reporter: Reporter, val settings: doc.Settings) { processor => - /** The unique compiler instance used by this processor and constructed from its `settings`. */ - object compiler extends ScaladocGlobal(settings, reporter) - - /** Creates a scaladoc site for all symbols defined in this call's `source`, - * as well as those defined in `sources` of previous calls to the same processor. - * @param source The list of paths (relative to the compiler's source path, - * or absolute) of files to document or the source code. */ - def makeUniverse(source: Either[List[String], String]): Option[Universe] = { - assert(settings.docformat.value == "html") - source match { - case Left(files) => - new compiler.Run() compile files - case Right(sourceCode) => - new compiler.Run() compileSources List(new BatchSourceFile("newSource", sourceCode)) - } - - if (reporter.hasErrors) - return None - - val extraTemplatesToDocument: Set[compiler.Symbol] = { - if (settings.docUncompilable.isDefault) Set() - else { - val uncompilable = new { - val global: compiler.type = compiler - val settings = processor.settings - } with Uncompilable { } - - compiler.docComments ++= uncompilable.comments - docdbg("" + uncompilable) - - uncompilable.templates - } - } - - val modelFactory = ( - new { override val global: compiler.type = compiler } - with model.ModelFactory(compiler, settings) - with model.ModelFactoryImplicitSupport - with model.ModelFactoryTypeSupport - with model.diagram.DiagramFactory - with model.CommentFactory - with model.TreeFactory - with model.MemberLookup { - override def templateShouldDocument(sym: compiler.Symbol, inTpl: DocTemplateImpl) = - extraTemplatesToDocument(sym) || super.templateShouldDocument(sym, inTpl) - } - ) - - modelFactory.makeModel match { - case Some(madeModel) => - if (!settings.scaladocQuietRun) - println("model contains " + modelFactory.templatesCount + " documentable templates") - Some(madeModel) - case None => - if (!settings.scaladocQuietRun) - println("no documentable class found in compilation units") - None - } - } - - object NoCompilerRunException extends ControlThrowable { } - - val documentError: PartialFunction[Throwable, Unit] = { - case NoCompilerRunException => - reporter.info(null, "No documentation generated with unsucessful compiler run", force = false) - case _: ClassNotFoundException => - () - } - - /** Generate document(s) for all `files` containing scaladoc documenataion. - * @param files The list of paths (relative to the compiler's source path, or absolute) of files to document. */ - def document(files: List[String]) { - def generate() = { - import doclet._ - val docletClass = Class.forName(settings.docgenerator.value) // default is html.Doclet - val docletInstance = docletClass.newInstance().asInstanceOf[Generator] - - docletInstance match { - case universer: Universer => - val universe = makeUniverse(Left(files)) getOrElse { throw NoCompilerRunException } - universer setUniverse universe - - docletInstance match { - case indexer: Indexer => indexer setIndex model.IndexModelFactory.makeIndex(universe) - case _ => () - } - case _ => () - } - docletInstance.generate() - } - - try generate() - catch documentError - } - - private[doc] def docdbg(msg: String) { - if (settings.Ydocdebug.value) - println(msg) - } -} diff --git a/src/compiler/scala/tools/nsc/doc/DocParser.scala b/src/compiler/scala/tools/nsc/doc/DocParser.scala deleted file mode 100644 index 6dc3e5a62b..0000000000 --- a/src/compiler/scala/tools/nsc/doc/DocParser.scala +++ /dev/null @@ -1,69 +0,0 @@ -/* NSC -- new Scala compiler - * Copyright 2005-2013 LAMP/EPFL - * @author Paul Phillips - */ - -package scala.tools -package nsc -package doc - -import reporters._ -import scala.reflect.internal.util._ -import DocParser.Parsed - -/** A very minimal global customized for extracting `DocDefs`. It stops - * right after parsing so it can read `DocDefs` from source code which would - * otherwise cause the compiler to go haywire. - */ -class DocParser(settings: nsc.Settings, reporter: Reporter) extends Global(settings, reporter) { - def this(settings: Settings) = this(settings, new ConsoleReporter(settings)) - def this() = this(new Settings(Console println _)) - - // the usual global initialization - locally { new Run() } - - override protected def computeInternalPhases() { - phasesSet += syntaxAnalyzer - } - - /** Returns a list of `DocParser.Parseds`, which hold the DocDefs found - * in the given code along with the surrounding trees. - */ - def docDefs(code: String) = { - def loop(enclosing: List[Tree], tree: Tree): List[Parsed] = tree match { - case x: PackageDef => x.stats flatMap (t => loop(enclosing :+ x, t)) - case x: DocDef => new Parsed(enclosing, x) :: loop(enclosing :+ x.definition, x.definition) - case x => x.children flatMap (t => loop(enclosing, t)) - } - loop(Nil, docUnit(code)) - } - - /** A compilation unit containing parsed source. - */ - def docUnit(code: String) = { - val unit = new CompilationUnit(new BatchSourceFile("", code)) - val scanner = newUnitParser(unit) - - scanner.compilationUnit() - } -} - -/** Since the DocParser's whole reason for existing involves trashing a - * global, it is designed to bottle up general `Global#Tree` types rather - * than path dependent ones. The recipient will have to deal. - */ -object DocParser { - type Tree = Global#Tree - type DefTree = Global#DefTree - type DocDef = Global#DocDef - type Name = Global#Name - - class Parsed(val enclosing: List[Tree], val docDef: DocDef) { - def nameChain: List[Name] = (enclosing :+ docDef.definition) collect { case x: DefTree => x.name } - def raw: String = docDef.comment.raw - - override def toString = ( - nameChain.init.map(x => if (x.isTypeName) x + "#" else x + ".").mkString + nameChain.last - ) - } -} diff --git a/src/compiler/scala/tools/nsc/doc/Index.scala b/src/compiler/scala/tools/nsc/doc/Index.scala deleted file mode 100644 index f9b9eecdb3..0000000000 --- a/src/compiler/scala/tools/nsc/doc/Index.scala +++ /dev/null @@ -1,17 +0,0 @@ -/* NSC -- new Scala compiler - * Copyright 2005-2013 LAMP/EPFL - * @author Martin Odersky - */ - -package scala.tools.nsc.doc - -import scala.collection._ - - -trait Index { - - type SymbolMap = SortedMap[String, SortedSet[model.MemberEntity]] - - def firstLetterIndex: Map[Char, SymbolMap] - -} diff --git a/src/compiler/scala/tools/nsc/doc/ScaladocGlobal.scala b/src/compiler/scala/tools/nsc/doc/ScaladocGlobal.scala deleted file mode 100644 index 021e59a879..0000000000 --- a/src/compiler/scala/tools/nsc/doc/ScaladocGlobal.scala +++ /dev/null @@ -1,106 +0,0 @@ -/* NSC -- new Scala compiler - * Copyright 2007-2013 LAMP/EPFL - * @author Paul Phillips - */ - -package scala.tools.nsc -package doc - -import scala.util.control.ControlThrowable -import reporters.Reporter -import typechecker.Analyzer -import scala.reflect.internal.util.BatchSourceFile - -trait ScaladocAnalyzer extends Analyzer { - val global : Global // generally, a ScaladocGlobal - import global._ - - override def newTyper(context: Context): ScaladocTyper = new ScaladocTyper(context) - - class ScaladocTyper(context0: Context) extends Typer(context0) { - private def unit = context.unit - - override def typedDocDef(docDef: DocDef, mode: Mode, pt: Type): Tree = { - val sym = docDef.symbol - - if ((sym ne null) && (sym ne NoSymbol)) { - val comment = docDef.comment - fillDocComment(sym, comment) - val typer1 = newTyper(context.makeNewScope(docDef, context.owner)) - for (useCase <- comment.useCases) { - typer1.silent(_ => typer1 defineUseCases useCase) match { - case SilentTypeError(err) => - unit.warning(useCase.pos, err.errMsg) - case _ => - } - for (useCaseSym <- useCase.defined) { - if (sym.name != useCaseSym.name) - unit.warning(useCase.pos, "@usecase " + useCaseSym.name.decode + " does not match commented symbol: " + sym.name.decode) - } - } - } - - super.typedDocDef(docDef, mode, pt) - } - - def defineUseCases(useCase: UseCase): List[Symbol] = { - def stringParser(str: String): syntaxAnalyzer.Parser = { - val file = new BatchSourceFile(context.unit.source.file, str) { - override def positionInUltimateSource(pos: Position) = { - pos.withSource(context.unit.source, useCase.pos.start) - } - } - val unit = new CompilationUnit(file) - new syntaxAnalyzer.UnitParser(unit) - } - - val trees = stringParser(useCase.body+";").nonLocalDefOrDcl - val enclClass = context.enclClass.owner - - def defineAlias(name: Name) = ( - if (context.scope.lookup(name) == NoSymbol) { - lookupVariable(name.toString.substring(1), enclClass) foreach { repl => - silent(_.typedTypeConstructor(stringParser(repl).typ())) map { tpt => - val alias = enclClass.newAliasType(name.toTypeName, useCase.pos) - val tparams = cloneSymbolsAtOwner(tpt.tpe.typeSymbol.typeParams, alias) - val newInfo = genPolyType(tparams, appliedType(tpt.tpe, tparams map (_.tpe))) - alias setInfo newInfo - context.scope.enter(alias) - } - } - } - ) - - for (tree <- trees; t <- tree) - t match { - case Ident(name) if name startsWith '$' => defineAlias(name) - case _ => - } - - useCase.aliases = context.scope.toList - namer.enterSyms(trees) - typedStats(trees, NoSymbol) - useCase.defined = context.scope.toList filterNot (useCase.aliases contains _) - - if (settings.debug.value) - useCase.defined foreach (sym => println("defined use cases: %s:%s".format(sym, sym.tpe))) - - useCase.defined - } - } -} - -class ScaladocGlobal(settings: doc.Settings, reporter: Reporter) extends { - override val useOffsetPositions = false -} with Global(settings, reporter) { - override protected def computeInternalPhases() { - phasesSet += syntaxAnalyzer - phasesSet += analyzer.namerFactory - phasesSet += analyzer.packageObjects - phasesSet += analyzer.typerFactory - } - override def forScaladoc = true - override lazy val analyzer = new { - val global: ScaladocGlobal.this.type = ScaladocGlobal.this - } with ScaladocAnalyzer -} diff --git a/src/compiler/scala/tools/nsc/doc/Settings.scala b/src/compiler/scala/tools/nsc/doc/Settings.scala deleted file mode 100644 index 90b94e1336..0000000000 --- a/src/compiler/scala/tools/nsc/doc/Settings.scala +++ /dev/null @@ -1,368 +0,0 @@ -/* NSC -- new Scala compiler - * Copyright 2005-2013 LAMP/EPFL - * @author Martin Odersky - */ - -package scala.tools.nsc -package doc - -import java.io.File -import scala.language.postfixOps - -/** An extended version of compiler settings, with additional Scaladoc-specific options. - * @param error A function that prints a string to the appropriate error stream - * @param printMsg A function that prints the string, without any extra boilerplate of error */ -class Settings(error: String => Unit, val printMsg: String => Unit = println(_)) extends scala.tools.nsc.Settings(error) { - - /** A setting that defines in which format the documentation is output. ''Note:'' this setting is currently always - * `html`. */ - val docformat = ChoiceSetting ( - "-doc-format", - "format", - "Selects in which format documentation is rendered", - List("html"), - "html" - ) - - /** A setting that defines the overall title of the documentation, typically the name of the library being - * documented. ''Note:'' This setting is currently not used. */ - val doctitle = StringSetting ( - "-doc-title", - "title", - "The overall name of the Scaladoc site", - "" - ) - - /** A setting that defines the overall version number of the documentation, typically the version of the library being - * documented. ''Note:'' This setting is currently not used. */ - val docversion = StringSetting ( - "-doc-version", - "version", - "An optional version number, to be appended to the title", - "" - ) - - val docfooter = StringSetting ( - "-doc-footer", - "footer", - "A footer on every ScalaDoc page, by default the EPFL/Typesafe copyright notice. Can be overridden with a custom footer.", - "" - ) - - val docUncompilable = StringSetting ( - "-doc-no-compile", - "path", - "A directory containing sources which should be parsed, no more (e.g. AnyRef.scala)", - "" - ) - - lazy val uncompilableFiles = docUncompilable.value match { - case "" => Nil - case path => io.Directory(path).deepFiles filter (_ hasExtension "scala") toList - } - - /** A setting that defines a URL to be concatenated with source locations and show a link to source files. - * If needed the sourcepath option can be used to exclude undesired initial part of the link to sources */ - val docsourceurl = StringSetting ( - "-doc-source-url", - "url", - "A URL pattern used to build links to template sources; use variables, for example: ?{TPL_NAME} ('Seq'), ?{TPL_OWNER} ('scala.collection'), ?{FILE_PATH} ('scala/collection/Seq')", - "" - ) - - val docExternalDoc = MultiStringSetting ( - "-doc-external-doc", - "external-doc", - "comma-separated list of classpath_entry_path#doc_URL pairs describing external dependencies." - ) - - val useStupidTypes = BooleanSetting ( - "-Yuse-stupid-types", - "Print the types of inherited members as seen from their original definition context. Hint: you don't want to do that!" - ) - - val docgenerator = StringSetting ( - "-doc-generator", - "class-name", - "The fully qualified name of a doclet class, which will be used to generate the documentation", - "scala.tools.nsc.doc.html.Doclet" - ) - - val docRootContent = PathSetting ( - "-doc-root-content", - "The file from which the root package documentation should be imported.", - "" - ) - - val docImplicits = BooleanSetting ( - "-implicits", - "Document members inherited by implicit conversions." - ) - - val docImplicitsDebug = BooleanSetting ( - "-implicits-debug", - "Show debugging information for members inherited by implicit conversions." - ) - - val docImplicitsShowAll = BooleanSetting ( - "-implicits-show-all", - "Show members inherited by implicit conversions that are impossible in the default scope. " + - "(for example conversions that require Numeric[String] to be in scope)" - ) - - val docImplicitsSoundShadowing = BooleanSetting ( - "-implicits-sound-shadowing", - "Use a sound implicit shadowing calculation. Note: this interacts badly with usecases, so " + - "only use it if you haven't defined usecase for implicitly inherited members." - ) - - val docImplicitsHide = MultiStringSetting ( - "-implicits-hide", - "implicit(s)", - "Hide the members inherited by the given comma separated, fully qualified implicit conversions. Add dot (.) to include default conversions." - ) - - val docDiagrams = BooleanSetting ( - "-diagrams", - "Create inheritance diagrams for classes, traits and packages." - ) - - val docDiagramsDebug = BooleanSetting ( - "-diagrams-debug", - "Show debugging information for the diagram creation process." - ) - - val docDiagramsDotPath = PathSetting ( - "-diagrams-dot-path", - "The path to the dot executable used to generate the inheritance diagrams. Eg: /usr/bin/dot", - "dot" // by default, just pick up the system-wide dot - ) - - /** The maxium nuber of normal classes to show in the diagram */ - val docDiagramsMaxNormalClasses = IntSetting( - "-diagrams-max-classes", - "The maximum number of superclasses or subclasses to show in a diagram", - 15, - None, - _ => None - ) - - /** The maxium nuber of implcit classes to show in the diagram */ - val docDiagramsMaxImplicitClasses = IntSetting( - "-diagrams-max-implicits", - "The maximum number of implicitly converted classes to show in a diagram", - 10, - None, - _ => None - ) - - val docDiagramsDotTimeout = IntSetting( - "-diagrams-dot-timeout", - "The timeout before the graphviz dot util is forcefully closed, in seconds (default: 10)", - 10, - None, - _ => None - ) - - val docDiagramsDotRestart = IntSetting( - "-diagrams-dot-restart", - "The number of times to restart a malfunctioning dot process before disabling diagrams (default: 5)", - 5, - None, - _ => None - ) - - val docRawOutput = BooleanSetting ( - "-raw-output", - "For each html file, create another .html.raw file containing only the text. (can be used for quickly diffing two scaladoc outputs)" - ) - - val docNoPrefixes = BooleanSetting ( - "-no-prefixes", - "Prevents generating prefixes in types, possibly creating ambiguous references, but significantly speeding up scaladoc." - ) - - val docNoLinkWarnings = BooleanSetting ( - "-no-link-warnings", - "Avoid warnings for ambiguous and incorrect links." - ) - - val docSkipPackages = StringSetting ( - "-skip-packages", - ":...:", - "A colon-delimited list of fully qualified package names that will be skipped from scaladoc.", - "" - ) - - val docExpandAllTypes = BooleanSetting ( - "-expand-all-types", - "Expand all type aliases and abstract types into full template pages. (locally this can be done with the @template annotation)" - ) - - val docExternalUrls = MultiStringSetting ( - "-external-urls", - "externalUrl(s)", - "(deprecated) comma-separated list of package_names=doc_URL for external dependencies, where package names are ':'-separated" - ) - - val docGroups = BooleanSetting ( - "-groups", - "Group similar functions together (based on the @group annotation)" - ) - - // Somewhere slightly before r18708 scaladoc stopped building unless the - // self-type check was suppressed. I hijacked the slotted-for-removal-anyway - // suppress-vt-warnings option and renamed it for this purpose. - noSelfCheck.value = true - - // For improved help output. - def scaladocSpecific = Set[Settings#Setting]( - docformat, doctitle, docfooter, docversion, docUncompilable, docsourceurl, docgenerator, docRootContent, useStupidTypes, - docDiagrams, docDiagramsDebug, docDiagramsDotPath, - docDiagramsDotTimeout, docDiagramsDotRestart, - docImplicits, docImplicitsDebug, docImplicitsShowAll, docImplicitsHide, - docDiagramsMaxNormalClasses, docDiagramsMaxImplicitClasses, - docNoPrefixes, docNoLinkWarnings, docRawOutput, docSkipPackages, - docExpandAllTypes, docGroups - ) - val isScaladocSpecific: String => Boolean = scaladocSpecific map (_.name) - - override def isScaladoc = true - - // set by the testsuite, when checking test output - var scaladocQuietRun = false - - lazy val skipPackageNames = - if (docSkipPackages.value == "") - Set[String]() - else - docSkipPackages.value.toLowerCase.split(':').toSet - - def skipPackage(qname: String) = - skipPackageNames(qname.toLowerCase) - - lazy val hiddenImplicits: Set[String] = { - if (docImplicitsHide.value.isEmpty) hardcoded.commonConversionTargets - else docImplicitsHide.value.toSet flatMap { name: String => - if(name == ".") hardcoded.commonConversionTargets - else Set(name) - } - } - - def appendIndex(url: String): String = { - val index = "/index.html" - if (url.endsWith(index)) url else url + index - } - - // Deprecated together with 'docExternalUrls' option. - lazy val extUrlPackageMapping: Map[String, String] = (Map.empty[String, String] /: docExternalUrls.value) { - case (map, binding) => - val idx = binding indexOf "=" - val pkgs = binding substring (0, idx) split ":" - val url = appendIndex(binding substring (idx + 1)) - map ++ (pkgs map (_ -> url)) - } - - lazy val extUrlMapping: Map[String, String] = docExternalDoc.value flatMap { s => - val idx = s.indexOf("#") - if (idx > 0) { - val (first, last) = s.splitAt(idx) - Some(new File(first).getAbsolutePath -> appendIndex(last.substring(1))) - } else { - error(s"Illegal -doc-external-doc option; expected a pair with '#' separator, found: '$s'") - None - } - } toMap - - /** - * This is the hardcoded area of Scaladoc. This is where "undesirable" stuff gets eliminated. I know it's not pretty, - * but ultimately scaladoc has to be useful. :) - */ - object hardcoded { - - /** The common context bounds and some humanly explanations. Feel free to add more explanations - * `.scala.package.Numeric` is the type class - * `tparam` is the name of the type parameter it gets (this only describes type classes with 1 type param) - * the function result should be a humanly-understandable description of the type class - */ - val knownTypeClasses: Map[String, String => String] = Map() + - ("scala.math.Numeric" -> ((tparam: String) => tparam + " is a numeric class, such as Int, Long, Float or Double")) + - ("scala.math.Integral" -> ((tparam: String) => tparam + " is an integral numeric class, such as Int or Long")) + - ("scala.math.Fractional" -> ((tparam: String) => tparam + " is a fractional numeric class, such as Float or Double")) + - ("scala.reflect.Manifest" -> ((tparam: String) => tparam + " is accompanied by a Manifest, which is a runtime representation of its type that survives erasure")) + - ("scala.reflect.ClassManifest" -> ((tparam: String) => tparam + " is accompanied by a ClassManifest, which is a runtime representation of its type that survives erasure")) + - ("scala.reflect.OptManifest" -> ((tparam: String) => tparam + " is accompanied by an OptManifest, which can be either a runtime representation of its type or the NoManifest, which means the runtime type is not available")) + - ("scala.reflect.ClassTag" -> ((tparam: String) => tparam + " is accompanied by a ClassTag, which is a runtime representation of its type that survives erasure")) + - ("scala.reflect.api.TypeTags.WeakTypeTag" -> ((tparam: String) => tparam + " is accompanied by an WeakTypeTag, which is a runtime representation of its type that survives erasure")) + - ("scala.reflect.api.TypeTags.TypeTag" -> ((tparam: String) => tparam + " is accompanied by a TypeTag, which is a runtime representation of its type that survives erasure")) - - /** - * Set of classes to exclude from index and diagrams - * TODO: Should be configurable - */ - def isExcluded(qname: String) = { - ( ( qname.startsWith("scala.Tuple") || qname.startsWith("scala.Product") || - qname.startsWith("scala.Function") || qname.startsWith("scala.runtime.AbstractFunction") - ) && !( - qname == "scala.Tuple1" || qname == "scala.Tuple2" || - qname == "scala.Product" || qname == "scala.Product1" || qname == "scala.Product2" || - qname == "scala.Function" || qname == "scala.Function1" || qname == "scala.Function2" || - qname == "scala.runtime.AbstractFunction0" || qname == "scala.runtime.AbstractFunction1" || - qname == "scala.runtime.AbstractFunction2" - ) - ) - } - - /** Common conversion targets that affect any class in Scala */ - val commonConversionTargets = Set( - "scala.Predef.StringFormat", - "scala.Predef.StringAdd", - "scala.Predef.ArrowAssoc", - "scala.Predef.Ensuring", - "scala.collection.TraversableOnce.alternateImplicit") - - /** There's a reason all these are specialized by hand but documenting each of them is beyond the point */ - val arraySkipConversions = List( - "scala.Predef.refArrayOps", - "scala.Predef.intArrayOps", - "scala.Predef.doubleArrayOps", - "scala.Predef.longArrayOps", - "scala.Predef.floatArrayOps", - "scala.Predef.charArrayOps", - "scala.Predef.byteArrayOps", - "scala.Predef.shortArrayOps", - "scala.Predef.booleanArrayOps", - "scala.Predef.unitArrayOps", - "scala.LowPriorityImplicits.wrapRefArray", - "scala.LowPriorityImplicits.wrapIntArray", - "scala.LowPriorityImplicits.wrapDoubleArray", - "scala.LowPriorityImplicits.wrapLongArray", - "scala.LowPriorityImplicits.wrapFloatArray", - "scala.LowPriorityImplicits.wrapCharArray", - "scala.LowPriorityImplicits.wrapByteArray", - "scala.LowPriorityImplicits.wrapShortArray", - "scala.LowPriorityImplicits.wrapBooleanArray", - "scala.LowPriorityImplicits.wrapUnitArray", - "scala.LowPriorityImplicits.genericWrapArray") - - // included as names as here we don't have access to a Global with Definitions :( - def valueClassList = List("unit", "boolean", "byte", "short", "char", "int", "long", "float", "double") - def valueClassFilterPrefixes = List("scala.LowPriorityImplicits", "scala.Predef") - - /** Dirty, dirty, dirty hack: the value params conversions can all kick in -- and they are disambiguated by priority - * but showing priority in scaladoc would make no sense -- so we have to manually remove the conversions that we - * know will never get a chance to kick in. Anyway, DIRTY DIRTY DIRTY! */ - def valueClassFilter(value: String, conversionName: String): Boolean = { - val valueName = value.toLowerCase - val otherValues = valueClassList.filterNot(_ == valueName) - - for (prefix <- valueClassFilterPrefixes) - if (conversionName.startsWith(prefix)) - for (otherValue <- otherValues) - if (conversionName.startsWith(prefix + "." + otherValue)) - return false - - true - } - } -} diff --git a/src/compiler/scala/tools/nsc/doc/Uncompilable.scala b/src/compiler/scala/tools/nsc/doc/Uncompilable.scala deleted file mode 100644 index 9447e36610..0000000000 --- a/src/compiler/scala/tools/nsc/doc/Uncompilable.scala +++ /dev/null @@ -1,51 +0,0 @@ -/* NSC -- new Scala compiler - * Copyright 2005-2013 LAMP/EPFL - * @author Paul Phillips - */ - -package scala.tools.nsc -package doc -import scala.language.implicitConversions -import scala.language.postfixOps - -/** Some glue between DocParser (which reads source files which can't be compiled) - * and the scaladoc model. - */ -trait Uncompilable { - val global: Global - val settings: Settings - - import global.{ reporter, inform, warning, newTypeName, newTermName, Symbol, DocComment, NoSymbol } - import global.definitions.AnyRefClass - import global.rootMirror.RootClass - - private implicit def translateName(name: Global#Name) = - if (name.isTypeName) newTypeName("" + name) else newTermName("" + name) - - def docSymbol(p: DocParser.Parsed) = p.nameChain.foldLeft(RootClass: Symbol)(_.tpe member _) - def docDefs(code: String) = new DocParser(settings, reporter) docDefs code - def docPairs(code: String) = docDefs(code) map (p => (docSymbol(p), new DocComment(p.raw))) - - lazy val pairs = files flatMap { f => - val comments = docPairs(f.slurp()) - if (settings.verbose.value) - inform("Found %d doc comments in parse-only file %s: %s".format(comments.size, f, comments.map(_._1).mkString(", "))) - - comments - } - def files = settings.uncompilableFiles - def symbols = pairs map (_._1) - def templates = symbols filter (x => x.isClass || x.isTrait || x == AnyRefClass/* which is now a type alias */) toSet - def comments = { - if (settings.debug.value || settings.verbose.value) - inform("Found %d uncompilable files: %s".format(files.size, files mkString ", ")) - - if (pairs.isEmpty) - warning("no doc comments read from " + settings.docUncompilable.value) - - pairs - } - override def toString = pairs.size + " uncompilable symbols:\n" + ( - symbols filterNot (_ == NoSymbol) map (x => " " + x.owner.fullName + " " + x.defString) mkString "\n" - ) -} diff --git a/src/compiler/scala/tools/nsc/doc/Universe.scala b/src/compiler/scala/tools/nsc/doc/Universe.scala deleted file mode 100644 index 11520c810e..0000000000 --- a/src/compiler/scala/tools/nsc/doc/Universe.scala +++ /dev/null @@ -1,16 +0,0 @@ -/* NSC -- new Scala compiler - * Copyright 2005-2013 LAMP/EPFL - * @author Martin Odersky - */ - -package scala.tools.nsc.doc - -/** - * Class to hold common dependencies across Scaladoc classes. - * @author Pedro Furlanetto - * @author Gilles Dubochet - */ -trait Universe { - def settings: Settings - def rootPackage: model.Package -} diff --git a/src/compiler/scala/tools/nsc/doc/base/CommentFactoryBase.scala b/src/compiler/scala/tools/nsc/doc/base/CommentFactoryBase.scala deleted file mode 100755 index 2064d86860..0000000000 --- a/src/compiler/scala/tools/nsc/doc/base/CommentFactoryBase.scala +++ /dev/null @@ -1,936 +0,0 @@ -/* NSC -- new Scala compiler - * Copyright 2007-2013 LAMP/EPFL - * @author Manohar Jonnalagedda - */ - -package scala.tools.nsc -package doc -package base - -import base.comment._ -import scala.collection._ -import scala.util.matching.Regex -import scala.reflect.internal.util.Position -import scala.language.postfixOps - -/** The comment parser transforms raw comment strings into `Comment` objects. - * Call `parse` to run the parser. Note that the parser is stateless and - * should only be built once for a given Scaladoc run. - * - * @author Manohar Jonnalagedda - * @author Gilles Dubochet */ -trait CommentFactoryBase { this: MemberLookupBase => - - val global: Global - import global.{ reporter, Symbol } - - /* Creates comments with necessary arguments */ - def createComment ( - body0: Option[Body] = None, - authors0: List[Body] = List.empty, - see0: List[Body] = List.empty, - result0: Option[Body] = None, - throws0: Map[String,Body] = Map.empty, - valueParams0: Map[String,Body] = Map.empty, - typeParams0: Map[String,Body] = Map.empty, - version0: Option[Body] = None, - since0: Option[Body] = None, - todo0: List[Body] = List.empty, - deprecated0: Option[Body] = None, - note0: List[Body] = List.empty, - example0: List[Body] = List.empty, - constructor0: Option[Body] = None, - source0: Option[String] = None, - inheritDiagram0: List[String] = List.empty, - contentDiagram0: List[String] = List.empty, - group0: Option[Body] = None, - groupDesc0: Map[String,Body] = Map.empty, - groupNames0: Map[String,Body] = Map.empty, - groupPrio0: Map[String,Body] = Map.empty - ) : Comment = new Comment{ - val body = if(body0 isDefined) body0.get else Body(Seq.empty) - val authors = authors0 - val see = see0 - val result = result0 - val throws = throws0 - val valueParams = valueParams0 - val typeParams = typeParams0 - val version = version0 - val since = since0 - val todo = todo0 - val deprecated = deprecated0 - val note = note0 - val example = example0 - val constructor = constructor0 - val inheritDiagram = inheritDiagram0 - val contentDiagram = contentDiagram0 - val groupDesc = groupDesc0 - val group = - group0 match { - case Some(Body(List(Paragraph(Chain(List(Summary(Text(groupId)))))))) => Some(groupId.toString.trim) - case _ => None - } - val groupPrio = groupPrio0 flatMap { - case (group, body) => - try { - body match { - case Body(List(Paragraph(Chain(List(Summary(Text(prio))))))) => List(group -> prio.trim.toInt) - case _ => List() - } - } catch { - case _: java.lang.NumberFormatException => List() - } - } - val groupNames = groupNames0 flatMap { - case (group, body) => - try { - body match { - case Body(List(Paragraph(Chain(List(Summary(Text(name))))))) if (!name.trim.contains("\n")) => List(group -> (name.trim)) - case _ => List() - } - } catch { - case _: java.lang.NumberFormatException => List() - } - } - - } - - private val endOfText = '\u0003' - private val endOfLine = '\u000A' - - /** Something that should not have happened, happened, and Scaladoc should exit. */ - private def oops(msg: String): Nothing = - throw FatalError("program logic: " + msg) - - /** The body of a line, dropping the (optional) start star-marker, - * one leading whitespace and all trailing whitespace. */ - private val CleanCommentLine = - new Regex("""(?:\s*\*\s?)?(.*)""") - - /** Dangerous HTML tags that should be replaced by something safer, - * such as wiki syntax, or that should be dropped. */ - private val DangerousTags = - new Regex("""<(/?(div|ol|ul|li|h[1-6]|p))( [^>]*)?/?>|""") - - /** Maps a dangerous HTML tag to a safe wiki replacement, or an empty string - * if it cannot be salvaged. */ - private def htmlReplacement(mtch: Regex.Match): String = mtch.group(1) match { - case "p" | "div" => "\n\n" - case "h1" => "\n= " - case "/h1" => " =\n" - case "h2" => "\n== " - case "/h2" => " ==\n" - case "h3" => "\n=== " - case "/h3" => " ===\n" - case "h4" | "h5" | "h6" => "\n==== " - case "/h4" | "/h5" | "/h6" => " ====\n" - case "li" => "\n * - " - case _ => "" - } - - /** Javadoc tags that should be replaced by something useful, such as wiki - * syntax, or that should be dropped. */ - private val JavadocTags = - new Regex("""\{\@(code|docRoot|inheritDoc|link|linkplain|literal|value)([^}]*)\}""") - - /** Maps a javadoc tag to a useful wiki replacement, or an empty string if it cannot be salvaged. */ - private def javadocReplacement(mtch: Regex.Match): String = mtch.group(1) match { - case "code" => "`" + mtch.group(2) + "`" - case "docRoot" => "" - case "inheritDoc" => "" - case "link" => "`" + mtch.group(2) + "`" - case "linkplain" => "`" + mtch.group(2) + "`" - case "literal" => mtch.group(2) - case "value" => "`" + mtch.group(2) + "`" - case _ => "" - } - - /** Safe HTML tags that can be kept. */ - private val SafeTags = - new Regex("""((&\w+;)|(&#\d+;)|(]*)?/?>))""") - - private val safeTagMarker = '\u000E' - - /** A Scaladoc tag not linked to a symbol and not followed by text */ - private val SingleTagRegex = - new Regex("""\s*@(\S+)\s*""") - - /** A Scaladoc tag not linked to a symbol. Returns the name of the tag, and the rest of the line. */ - private val SimpleTagRegex = - new Regex("""\s*@(\S+)\s+(.*)""") - - /** A Scaladoc tag linked to a symbol. Returns the name of the tag, the name - * of the symbol, and the rest of the line. */ - private val SymbolTagRegex = - new Regex("""\s*@(param|tparam|throws|groupdesc|groupname|groupprio)\s+(\S*)\s*(.*)""") - - /** The start of a scaladoc code block */ - private val CodeBlockStartRegex = - new Regex("""(.*?)((?:\{\{\{)|(?:\u000E]*)?>\u000E))(.*)""") - - /** The end of a scaladoc code block */ - private val CodeBlockEndRegex = - new Regex("""(.*?)((?:\}\}\})|(?:\u000E
    \u000E))(.*)""") - - /** A key used for a tag map. The key is built from the name of the tag and - * from the linked symbol if the tag has one. - * Equality on tag keys is structural. */ - private sealed abstract class TagKey { - def name: String - } - - private final case class SimpleTagKey(name: String) extends TagKey - private final case class SymbolTagKey(name: String, symbol: String) extends TagKey - - /** Parses a raw comment string into a `Comment` object. - * @param comment The expanded comment string (including start and end markers) to be parsed. - * @param src The raw comment source string. - * @param pos The position of the comment in source. */ - protected def parseAtSymbol(comment: String, src: String, pos: Position, siteOpt: Option[Symbol] = None): Comment = { - /** The cleaned raw comment as a list of lines. Cleaning removes comment - * start and end markers, line start markers and unnecessary whitespace. */ - def clean(comment: String): List[String] = { - def cleanLine(line: String): String = { - //replaceAll removes trailing whitespaces - line.replaceAll("""\s+$""", "") match { - case CleanCommentLine(ctl) => ctl - case tl => tl - } - } - val strippedComment = comment.trim.stripPrefix("/*").stripSuffix("*/") - val safeComment = DangerousTags.replaceAllIn(strippedComment, { htmlReplacement(_) }) - val javadoclessComment = JavadocTags.replaceAllIn(safeComment, { javadocReplacement(_) }) - val markedTagComment = - SafeTags.replaceAllIn(javadoclessComment, { mtch => - java.util.regex.Matcher.quoteReplacement(safeTagMarker + mtch.matched + safeTagMarker) - }) - markedTagComment.lines.toList map (cleanLine(_)) - } - - /** Parses a comment (in the form of a list of lines) to a `Comment` - * instance, recursively on lines. To do so, it splits the whole comment - * into main body and tag bodies, then runs the `WikiParser` on each body - * before creating the comment instance. - * - * @param docBody The body of the comment parsed until now. - * @param tags All tags parsed until now. - * @param lastTagKey The last parsed tag, or `None` if the tag section hasn't started. Lines that are not tagged - * are part of the previous tag or, if none exists, of the body. - * @param remaining The lines that must still recursively be parsed. - * @param inCodeBlock Whether the next line is part of a code block (in which no tags must be read). */ - def parse0 ( - docBody: StringBuilder, - tags: Map[TagKey, List[String]], - lastTagKey: Option[TagKey], - remaining: List[String], - inCodeBlock: Boolean - ): Comment = remaining match { - - case CodeBlockStartRegex(before, marker, after) :: ls if (!inCodeBlock) => - if (!before.trim.isEmpty && !after.trim.isEmpty) - parse0(docBody, tags, lastTagKey, before :: marker :: after :: ls, inCodeBlock = false) - else if (!before.trim.isEmpty) - parse0(docBody, tags, lastTagKey, before :: marker :: ls, inCodeBlock = false) - else if (!after.trim.isEmpty) - parse0(docBody, tags, lastTagKey, marker :: after :: ls, inCodeBlock = true) - else lastTagKey match { - case Some(key) => - val value = - ((tags get key): @unchecked) match { - case Some(b :: bs) => (b + endOfLine + marker) :: bs - case None => oops("lastTagKey set when no tag exists for key") - } - parse0(docBody, tags + (key -> value), lastTagKey, ls, inCodeBlock = true) - case None => - parse0(docBody append endOfLine append marker, tags, lastTagKey, ls, inCodeBlock = true) - } - - case CodeBlockEndRegex(before, marker, after) :: ls => - if (!before.trim.isEmpty && !after.trim.isEmpty) - parse0(docBody, tags, lastTagKey, before :: marker :: after :: ls, inCodeBlock = true) - if (!before.trim.isEmpty) - parse0(docBody, tags, lastTagKey, before :: marker :: ls, inCodeBlock = true) - else if (!after.trim.isEmpty) - parse0(docBody, tags, lastTagKey, marker :: after :: ls, inCodeBlock = false) - else lastTagKey match { - case Some(key) => - val value = - ((tags get key): @unchecked) match { - case Some(b :: bs) => (b + endOfLine + marker) :: bs - case None => oops("lastTagKey set when no tag exists for key") - } - parse0(docBody, tags + (key -> value), lastTagKey, ls, inCodeBlock = false) - case None => - parse0(docBody append endOfLine append marker, tags, lastTagKey, ls, inCodeBlock = false) - } - - case SymbolTagRegex(name, sym, body) :: ls if (!inCodeBlock) => - val key = SymbolTagKey(name, sym) - val value = body :: tags.getOrElse(key, Nil) - parse0(docBody, tags + (key -> value), Some(key), ls, inCodeBlock) - - case SimpleTagRegex(name, body) :: ls if (!inCodeBlock) => - val key = SimpleTagKey(name) - val value = body :: tags.getOrElse(key, Nil) - parse0(docBody, tags + (key -> value), Some(key), ls, inCodeBlock) - - case SingleTagRegex(name) :: ls if (!inCodeBlock) => - val key = SimpleTagKey(name) - val value = "" :: tags.getOrElse(key, Nil) - parse0(docBody, tags + (key -> value), Some(key), ls, inCodeBlock) - - case line :: ls if (lastTagKey.isDefined) => - val key = lastTagKey.get - val value = - ((tags get key): @unchecked) match { - case Some(b :: bs) => (b + endOfLine + line) :: bs - case None => oops("lastTagKey set when no tag exists for key") - } - parse0(docBody, tags + (key -> value), lastTagKey, ls, inCodeBlock) - - case line :: ls => - if (docBody.length > 0) docBody append endOfLine - docBody append line - parse0(docBody, tags, lastTagKey, ls, inCodeBlock) - - case Nil => - // Take the {inheritance, content} diagram keys aside, as it doesn't need any parsing - val inheritDiagramTag = SimpleTagKey("inheritanceDiagram") - val contentDiagramTag = SimpleTagKey("contentDiagram") - - val inheritDiagramText: List[String] = tags.get(inheritDiagramTag) match { - case Some(list) => list - case None => List.empty - } - - val contentDiagramText: List[String] = tags.get(contentDiagramTag) match { - case Some(list) => list - case None => List.empty - } - - val stripTags=List(inheritDiagramTag, contentDiagramTag, SimpleTagKey("template"), SimpleTagKey("documentable")) - val tagsWithoutDiagram = tags.filterNot(pair => stripTags.contains(pair._1)) - - val bodyTags: mutable.Map[TagKey, List[Body]] = - mutable.Map(tagsWithoutDiagram mapValues {tag => tag map (parseWikiAtSymbol(_, pos, siteOpt))} toSeq: _*) - - def oneTag(key: SimpleTagKey): Option[Body] = - ((bodyTags remove key): @unchecked) match { - case Some(r :: rs) => - if (!rs.isEmpty) reporter.warning(pos, "Only one '@" + key.name + "' tag is allowed") - Some(r) - case None => None - } - - def allTags(key: SimpleTagKey): List[Body] = - (bodyTags remove key) getOrElse Nil - - def allSymsOneTag(key: TagKey): Map[String, Body] = { - val keys: Seq[SymbolTagKey] = - bodyTags.keys.toSeq flatMap { - case stk: SymbolTagKey if (stk.name == key.name) => Some(stk) - case stk: SimpleTagKey if (stk.name == key.name) => - reporter.warning(pos, "Tag '@" + stk.name + "' must be followed by a symbol name") - None - case _ => None - } - val pairs: Seq[(String, Body)] = - for (key <- keys) yield { - val bs = (bodyTags remove key).get - if (bs.length > 1) - reporter.warning(pos, "Only one '@" + key.name + "' tag for symbol " + key.symbol + " is allowed") - (key.symbol, bs.head) - } - Map.empty[String, Body] ++ pairs - } - - val com = createComment ( - body0 = Some(parseWikiAtSymbol(docBody.toString, pos, siteOpt)), - authors0 = allTags(SimpleTagKey("author")), - see0 = allTags(SimpleTagKey("see")), - result0 = oneTag(SimpleTagKey("return")), - throws0 = allSymsOneTag(SimpleTagKey("throws")), - valueParams0 = allSymsOneTag(SimpleTagKey("param")), - typeParams0 = allSymsOneTag(SimpleTagKey("tparam")), - version0 = oneTag(SimpleTagKey("version")), - since0 = oneTag(SimpleTagKey("since")), - todo0 = allTags(SimpleTagKey("todo")), - deprecated0 = oneTag(SimpleTagKey("deprecated")), - note0 = allTags(SimpleTagKey("note")), - example0 = allTags(SimpleTagKey("example")), - constructor0 = oneTag(SimpleTagKey("constructor")), - source0 = Some(clean(src).mkString("\n")), - inheritDiagram0 = inheritDiagramText, - contentDiagram0 = contentDiagramText, - group0 = oneTag(SimpleTagKey("group")), - groupDesc0 = allSymsOneTag(SimpleTagKey("groupdesc")), - groupNames0 = allSymsOneTag(SimpleTagKey("groupname")), - groupPrio0 = allSymsOneTag(SimpleTagKey("groupprio")) - ) - - for ((key, _) <- bodyTags) - reporter.warning(pos, "Tag '@" + key.name + "' is not recognised") - - com - - } - - parse0(new StringBuilder(comment.size), Map.empty, None, clean(comment), inCodeBlock = false) - - } - - /** Parses a string containing wiki syntax into a `Comment` object. - * Note that the string is assumed to be clean: - * - Removed Scaladoc start and end markers. - * - Removed start-of-line star and one whitespace afterwards (if present). - * - Removed all end-of-line whitespace. - * - Only `endOfLine` is used to mark line endings. */ - def parseWikiAtSymbol(string: String, pos: Position, siteOpt: Option[Symbol]): Body = new WikiParser(string, pos, siteOpt).document() - - /** TODO - * - * @author Ingo Maier - * @author Manohar Jonnalagedda - * @author Gilles Dubochet */ - protected final class WikiParser(val buffer: String, pos: Position, siteOpt: Option[Symbol]) extends CharReader(buffer) { wiki => - var summaryParsed = false - - def document(): Body = { - val blocks = new mutable.ListBuffer[Block] - while (char != endOfText) - blocks += block() - Body(blocks.toList) - } - - /* BLOCKS */ - - /** {{{ block ::= code | title | hrule | para }}} */ - def block(): Block = { - if (checkSkipInitWhitespace("{{{")) - code() - else if (checkSkipInitWhitespace('=')) - title() - else if (checkSkipInitWhitespace("----")) - hrule() - else if (checkList) - listBlock - else { - para() - } - } - - /** listStyle ::= '-' spc | '1.' spc | 'I.' spc | 'i.' spc | 'A.' spc | 'a.' spc - * Characters used to build lists and their constructors */ - protected val listStyles = Map[String, (Seq[Block] => Block)]( // TODO Should this be defined at some list companion? - "- " -> ( UnorderedList(_) ), - "1. " -> ( OrderedList(_,"decimal") ), - "I. " -> ( OrderedList(_,"upperRoman") ), - "i. " -> ( OrderedList(_,"lowerRoman") ), - "A. " -> ( OrderedList(_,"upperAlpha") ), - "a. " -> ( OrderedList(_,"lowerAlpha") ) - ) - - /** Checks if the current line is formed with more than one space and one the listStyles */ - def checkList = - (countWhitespace > 0) && (listStyles.keys exists { checkSkipInitWhitespace(_) }) - - /** {{{ - * nListBlock ::= nLine { mListBlock } - * nLine ::= nSpc listStyle para '\n' - * }}} - * Where n and m stand for the number of spaces. When `m > n`, a new list is nested. */ - def listBlock: Block = { - - /** Consumes one list item block and returns it, or None if the block is - * not a list or a different list. */ - def listLine(indent: Int, style: String): Option[Block] = - if (countWhitespace > indent && checkList) - Some(listBlock) - else if (countWhitespace != indent || !checkSkipInitWhitespace(style)) - None - else { - jumpWhitespace() - jump(style) - val p = Paragraph(inline(isInlineEnd = false)) - blockEnded("end of list line ") - Some(p) - } - - /** Consumes all list item blocks (possibly with nested lists) of the - * same list and returns the list block. */ - def listLevel(indent: Int, style: String): Block = { - val lines = mutable.ListBuffer.empty[Block] - var line: Option[Block] = listLine(indent, style) - while (line.isDefined) { - lines += line.get - line = listLine(indent, style) - } - val constructor = listStyles(style) - constructor(lines) - } - - val indent = countWhitespace - val style = (listStyles.keys find { checkSkipInitWhitespace(_) }).getOrElse(listStyles.keys.head) - listLevel(indent, style) - } - - def code(): Block = { - jumpWhitespace() - jump("{{{") - val str = readUntil("}}}") - if (char == endOfText) - reportError(pos, "unclosed code block") - else - jump("}}}") - blockEnded("code block") - Code(normalizeIndentation(str)) - } - - /** {{{ title ::= ('=' inline '=' | "==" inline "==" | ...) '\n' }}} */ - def title(): Block = { - jumpWhitespace() - val inLevel = repeatJump('=') - val text = inline(check("=" * inLevel)) - val outLevel = repeatJump('=', inLevel) - if (inLevel != outLevel) - reportError(pos, "unbalanced or unclosed heading") - blockEnded("heading") - Title(text, inLevel) - } - - /** {{{ hrule ::= "----" { '-' } '\n' }}} */ - def hrule(): Block = { - jumpWhitespace() - repeatJump('-') - blockEnded("horizontal rule") - HorizontalRule() - } - - /** {{{ para ::= inline '\n' }}} */ - def para(): Block = { - val p = - if (summaryParsed) - Paragraph(inline(isInlineEnd = false)) - else { - val s = summary() - val r = - if (checkParaEnded()) List(s) else List(s, inline(isInlineEnd = false)) - summaryParsed = true - Paragraph(Chain(r)) - } - while (char == endOfLine && char != endOfText) - nextChar() - p - } - - /* INLINES */ - - val OPEN_TAG = "^<([A-Za-z]+)( [^>]*)?(/?)>$".r - val CLOSE_TAG = "^$".r - private def readHTMLFrom(begin: HtmlTag): String = { - val list = mutable.ListBuffer.empty[String] - val stack = mutable.ListBuffer.empty[String] - - begin.close match { - case Some(HtmlTag(CLOSE_TAG(s))) => - stack += s - case _ => - return "" - } - - do { - val str = readUntil { char == safeTagMarker || char == endOfText } - nextChar() - - list += str - - str match { - case OPEN_TAG(s, _, standalone) => { - if (standalone != "/") { - stack += s - } - } - case CLOSE_TAG(s) => { - if (s == stack.last) { - stack.remove(stack.length-1) - } - } - case _ => ; - } - } while (stack.length > 0 && char != endOfText) - - list mkString "" - } - - def inline(isInlineEnd: => Boolean): Inline = { - - def inline0(): Inline = { - if (char == safeTagMarker) { - val tag = htmlTag() - HtmlTag(tag.data + readHTMLFrom(tag)) - } - else if (check("'''")) bold() - else if (check("''")) italic() - else if (check("`")) monospace() - else if (check("__")) underline() - else if (check("^")) superscript() - else if (check(",,")) subscript() - else if (check("[[")) link() - else { - val str = readUntil { char == safeTagMarker || check("''") || char == '`' || check("__") || char == '^' || check(",,") || check("[[") || isInlineEnd || checkParaEnded || char == endOfLine } - Text(str) - } - } - - val inlines: List[Inline] = { - val iss = mutable.ListBuffer.empty[Inline] - iss += inline0() - while (!isInlineEnd && !checkParaEnded) { - val skipEndOfLine = if (char == endOfLine) { - nextChar() - true - } else { - false - } - - val current = inline0() - (iss.last, current) match { - case (Text(t1), Text(t2)) if skipEndOfLine => - iss.update(iss.length - 1, Text(t1 + endOfLine + t2)) - case (i1, i2) if skipEndOfLine => - iss ++= List(Text(endOfLine.toString), i2) - case _ => iss += current - } - } - iss.toList - } - - inlines match { - case Nil => Text("") - case i :: Nil => i - case is => Chain(is) - } - - } - - def htmlTag(): HtmlTag = { - jump(safeTagMarker) - val read = readUntil(safeTagMarker) - if (char != endOfText) jump(safeTagMarker) - HtmlTag(read) - } - - def bold(): Inline = { - jump("'''") - val i = inline(check("'''")) - jump("'''") - Bold(i) - } - - def italic(): Inline = { - jump("''") - val i = inline(check("''")) - jump("''") - Italic(i) - } - - def monospace(): Inline = { - jump("`") - val i = inline(check("`")) - jump("`") - Monospace(i) - } - - def underline(): Inline = { - jump("__") - val i = inline(check("__")) - jump("__") - Underline(i) - } - - def superscript(): Inline = { - jump("^") - val i = inline(check("^")) - if (jump("^")) { - Superscript(i) - } else { - Chain(Seq(Text("^"), i)) - } - } - - def subscript(): Inline = { - jump(",,") - val i = inline(check(",,")) - jump(",,") - Subscript(i) - } - - def summary(): Inline = { - val i = inline(check(".")) - Summary( - if (jump(".")) - Chain(List(i, Text("."))) - else - i - ) - } - - def link(): Inline = { - val SchemeUri = """([a-z]+:.*)""".r - jump("[[") - val parens = 2 + repeatJump('[') - val start = "[" * parens - val stop = "]" * parens - //println("link with " + parens + " matching parens") - val target = readUntil { check(stop) || check(" ") } - val title = - if (!check(stop)) Some({ - jump(" ") - inline(check(stop)) - }) - else None - jump(stop) - - (target, title) match { - case (SchemeUri(uri), optTitle) => - Link(uri, optTitle getOrElse Text(uri)) - case (qualName, optTitle) => - makeEntityLink(optTitle getOrElse Text(target), pos, target, siteOpt) - } - } - - /* UTILITY */ - - /** {{{ eol ::= { whitespace } '\n' }}} */ - def blockEnded(blockType: String): Unit = { - if (char != endOfLine && char != endOfText) { - reportError(pos, "no additional content on same line after " + blockType) - jumpUntil(endOfLine) - } - while (char == endOfLine) - nextChar() - } - - /** - * Eliminates the (common) leading spaces in all lines, based on the first line - * For indented pieces of code, it reduces the indent to the least whitespace prefix: - * {{{ - * indented example - * another indented line - * if (condition) - * then do something; - * ^ this is the least whitespace prefix - * }}} - */ - def normalizeIndentation(_code: String): String = { - - val code = _code.trim - var maxSkip = Integer.MAX_VALUE - var crtSkip = 0 - var wsArea = true - var index = 0 - var firstLine = true - var emptyLine = true - - while (index < code.length) { - code(index) match { - case ' ' => - if (wsArea) - crtSkip += 1 - case c => - wsArea = (c == '\n') - maxSkip = if (firstLine || emptyLine) maxSkip else if (maxSkip <= crtSkip) maxSkip else crtSkip - crtSkip = if (c == '\n') 0 else crtSkip - firstLine = if (c == '\n') false else firstLine - emptyLine = if (c == '\n') true else false - } - index += 1 - } - - if (maxSkip == 0) - code - else { - index = 0 - val builder = new StringBuilder - while (index < code.length) { - builder.append(code(index)) - if (code(index) == '\n') { - // we want to skip as many spaces are available, if there are less spaces (like on empty lines, do not - // over-consume them) - index += 1 - val limit = index + maxSkip - while ((index < code.length) && (code(index) == ' ') && index < limit) - index += 1 - } - else - index += 1 - } - builder.toString - } - } - - def checkParaEnded(): Boolean = { - (char == endOfText) || - ((char == endOfLine) && { - val poff = offset - nextChar() // read EOL - val ok = { - checkSkipInitWhitespace(endOfLine) || - checkSkipInitWhitespace('=') || - checkSkipInitWhitespace("{{{") || - checkList || - checkSkipInitWhitespace('\u003D') - } - offset = poff - ok - }) - } - - def reportError(pos: Position, message: String) { - reporter.warning(pos, message) - } - } - - protected sealed class CharReader(buffer: String) { reader => - - var offset: Int = 0 - def char: Char = - if (offset >= buffer.length) endOfText else buffer charAt offset - - final def nextChar() { - offset += 1 - } - - final def check(chars: String): Boolean = { - val poff = offset - val ok = jump(chars) - offset = poff - ok - } - - def checkSkipInitWhitespace(c: Char): Boolean = { - val poff = offset - jumpWhitespace() - val ok = jump(c) - offset = poff - ok - } - - def checkSkipInitWhitespace(chars: String): Boolean = { - val poff = offset - jumpWhitespace() - val (ok0, chars0) = - if (chars.charAt(0) == ' ') - (offset > poff, chars substring 1) - else - (true, chars) - val ok = ok0 && jump(chars0) - offset = poff - ok - } - - def countWhitespace: Int = { - var count = 0 - val poff = offset - while (isWhitespace(char) && char != endOfText) { - nextChar() - count += 1 - } - offset = poff - count - } - - /* JUMPERS */ - - /** jumps a character and consumes it - * @return true only if the correct character has been jumped */ - final def jump(ch: Char): Boolean = { - if (char == ch) { - nextChar() - true - } - else false - } - - /** jumps all the characters in chars, consuming them in the process. - * @return true only if the correct characters have been jumped */ - final def jump(chars: String): Boolean = { - var index = 0 - while (index < chars.length && char == chars.charAt(index) && char != endOfText) { - nextChar() - index += 1 - } - index == chars.length - } - - final def repeatJump(c: Char, max: Int = Int.MaxValue): Int = { - var count = 0 - while (jump(c) && count < max) - count += 1 - count - } - - final def jumpUntil(ch: Char): Int = { - var count = 0 - while (char != ch && char != endOfText) { - nextChar() - count += 1 - } - count - } - - final def jumpUntil(pred: => Boolean): Int = { - var count = 0 - while (!pred && char != endOfText) { - nextChar() - count += 1 - } - count - } - - def jumpWhitespace() = jumpUntil(!isWhitespace(char)) - - /* READERS */ - - final def readUntil(c: Char): String = { - withRead { - while (char != c && char != endOfText) { - nextChar() - } - } - } - - final def readUntil(chars: String): String = { - assert(chars.length > 0) - withRead { - val c = chars.charAt(0) - while (!check(chars) && char != endOfText) { - nextChar() - while (char != c && char != endOfText) - nextChar() - } - } - } - - final def readUntil(pred: => Boolean): String = { - withRead { - while (char != endOfText && !pred) { - nextChar() - } - } - } - - private def withRead(read: => Unit): String = { - val start = offset - read - buffer.substring(start, offset) - } - - - /* CHARS CLASSES */ - - def isWhitespace(c: Char) = c == ' ' || c == '\t' - - } - -} diff --git a/src/compiler/scala/tools/nsc/doc/base/LinkTo.scala b/src/compiler/scala/tools/nsc/doc/base/LinkTo.scala deleted file mode 100755 index c11179800c..0000000000 --- a/src/compiler/scala/tools/nsc/doc/base/LinkTo.scala +++ /dev/null @@ -1,15 +0,0 @@ -/* NSC -- new Scala compiler - * Copyright 2007-2013 LAMP/EPFL - */ - -package scala.tools.nsc -package doc -package base - -import scala.collection._ - -sealed trait LinkTo -final case class LinkToMember[Mbr, Tpl](mbr: Mbr, tpl: Tpl) extends LinkTo -final case class LinkToTpl[Tpl](tpl: Tpl) extends LinkTo -final case class LinkToExternal(name: String, url: String) extends LinkTo -final case class Tooltip(name: String) extends LinkTo diff --git a/src/compiler/scala/tools/nsc/doc/base/MemberLookupBase.scala b/src/compiler/scala/tools/nsc/doc/base/MemberLookupBase.scala deleted file mode 100755 index 8d80333195..0000000000 --- a/src/compiler/scala/tools/nsc/doc/base/MemberLookupBase.scala +++ /dev/null @@ -1,206 +0,0 @@ -package scala.tools.nsc -package doc -package base - -import comment._ - -/** This trait extracts all required information for documentation from compilation units. - * The base trait has been extracted to allow getting light-weight documentation - * for a particular symbol in the IDE.*/ -trait MemberLookupBase { - - val global: Global - import global._ - - def internalLink(sym: Symbol, site: Symbol): Option[LinkTo] - def chooseLink(links: List[LinkTo]): LinkTo - def toString(link: LinkTo): String - def findExternalLink(sym: Symbol, name: String): Option[LinkToExternal] - def warnNoLink: Boolean - - import global._ - import rootMirror.{RootPackage, EmptyPackage} - - private def isRoot(s: Symbol) = s.isRootSymbol || s.isEmptyPackage || s.isEmptyPackageClass - - def makeEntityLink(title: Inline, pos: Position, query: String, siteOpt: Option[Symbol]) = - new EntityLink(title) { lazy val link = memberLookup(pos, query, siteOpt) } - - private var showExplanation = true - private def explanation: String = - if (showExplanation) { - showExplanation = false - """ - |Quick crash course on using Scaladoc links - |========================================== - |Disambiguating terms and types: Prefix terms with '$' and types with '!' in case both names are in use: - | - [[scala.collection.immutable.List!.apply class List's apply method]] and - | - [[scala.collection.immutable.List$.apply object List's apply method]] - |Disambiguating overloaded members: If a term is overloaded, you can indicate the first part of its signature followed by *: - | - [[[scala.collection.immutable.List$.fill[A](Int)(⇒A):List[A]* Fill with a single parameter]]] - | - [[[scala.collection.immutable.List$.fill[A](Int,Int)(⇒A):List[List[A]]* Fill with a two parameters]]] - |Notes: - | - you can use any number of matching square brackets to avoid interference with the signature - | - you can use \\. to escape dots in prefixes (don't forget to use * at the end to match the signature!) - | - you can use \\# to escape hashes, otherwise they will be considered as delimiters, like dots.""".stripMargin - } else "" - - def memberLookup(pos: Position, query: String, siteOpt: Option[Symbol]): LinkTo = { - var members = breakMembers(query) - - // (1) First look in the root package, as most of the links are qualified - val fromRoot = lookupInRootPackage(pos, members) - - // (2) Or recursively go into each containing template. - val fromParents = siteOpt.fold(Stream.empty[Symbol]) { s => - Stream.iterate(s)(_.owner) - }.takeWhile (!isRoot(_)).map { - lookupInTemplate(pos, members, _) - } - - val syms = (fromRoot +: fromParents) find (!_.isEmpty) getOrElse Nil - - val links = syms flatMap { case (sym, site) => internalLink(sym, site) } match { - case Nil => - // (3) Look at external links - syms.flatMap { case (sym, owner) => - // reconstruct the original link - def linkName(sym: Symbol) = { - def nameString(s: Symbol) = s.nameString + (if ((s.isModule || s.isModuleClass) && !s.isPackage) "$" else "") - val packageSuffix = if (sym.isPackage) ".package" else "" - - sym.ownerChain.reverse.filterNot(isRoot(_)).map(nameString(_)).mkString(".") + packageSuffix - } - - if (sym.isClass || sym.isModule || sym.isTrait || sym.isPackage) - findExternalLink(sym, linkName(sym)) - else if (owner.isClass || owner.isModule || owner.isTrait || owner.isPackage) - findExternalLink(sym, linkName(owner) + "@" + externalSignature(sym)) - else - None - } - case links => links - } - links match { - case Nil => - if (warnNoLink) - reporter.warning(pos, "Could not find any member to link for \"" + query + "\".") - // (4) if we still haven't found anything, create a tooltip - Tooltip(query) - case List(l) => l - case links => - val chosen = chooseLink(links) - def linkToString(link: LinkTo) = { - val chosenInfo = - if (link == chosen) " [chosen]" else "" - toString(link) + chosenInfo + "\n" - } - if (warnNoLink) { - val allLinks = links.map(linkToString).mkString - reporter.warning(pos, - s"""The link target \"$query\" is ambiguous. Several members fit the target: - |$allLinks - |$explanation""".stripMargin) - } - chosen - } - } - - private sealed trait SearchStrategy - private case object BothTypeAndTerm extends SearchStrategy - private case object OnlyType extends SearchStrategy - private case object OnlyTerm extends SearchStrategy - - private def lookupInRootPackage(pos: Position, members: List[String]) = - lookupInTemplate(pos, members, EmptyPackage) ::: lookupInTemplate(pos, members, RootPackage) - - private def lookupInTemplate(pos: Position, members: List[String], container: Symbol): List[(Symbol, Symbol)] = { - // Maintaining compatibility with previous links is a bit tricky here: - // we have a preference for term names for all terms except for the last, where we prefer a class: - // How to do this: - // - at each step we do a DFS search with the prefered strategy - // - if the search doesn't return any members, we backtrack on the last decision - // * we look for terms with the last member's name - // * we look for types with the same name, all the way up - val result = members match { - case Nil => Nil - case mbrName::Nil => - var syms = lookupInTemplate(pos, mbrName, container, OnlyType) map ((_, container)) - if (syms.isEmpty) - syms = lookupInTemplate(pos, mbrName, container, OnlyTerm) map ((_, container)) - syms - - case tplName::rest => - def completeSearch(syms: List[Symbol]) = - syms flatMap (lookupInTemplate(pos, rest, _)) - - completeSearch(lookupInTemplate(pos, tplName, container, OnlyTerm)) match { - case Nil => completeSearch(lookupInTemplate(pos, tplName, container, OnlyType)) - case syms => syms - } - } - //println("lookupInTemplate(" + members + ", " + container + ") => " + result) - result - } - - private def lookupInTemplate(pos: Position, member: String, container: Symbol, strategy: SearchStrategy): List[Symbol] = { - val name = member.stripSuffix("$").stripSuffix("!").stripSuffix("*") - def signatureMatch(sym: Symbol): Boolean = externalSignature(sym).startsWith(name) - - // We need to cleanup the bogus classes created by the .class file parser. For example, [[scala.Predef]] resolves - // to (bogus) class scala.Predef loaded by the class loader -- which we need to eliminate by looking at the info - // and removing NoType classes - def cleanupBogusClasses(syms: List[Symbol]) = { syms.filter(_.info != NoType) } - - def syms(name: Name) = container.info.nonPrivateMember(name.encodedName).alternatives - def termSyms = cleanupBogusClasses(syms(newTermName(name))) - def typeSyms = cleanupBogusClasses(syms(newTypeName(name))) - - val result = if (member.endsWith("$")) - termSyms - else if (member.endsWith("!")) - typeSyms - else if (member.endsWith("*")) - cleanupBogusClasses(container.info.nonPrivateDecls) filter signatureMatch - else - strategy match { - case BothTypeAndTerm => termSyms ::: typeSyms - case OnlyType => typeSyms - case OnlyTerm => termSyms - } - - //println("lookupInTemplate(" + member + ", " + container + ") => " + result) - result - } - - private def breakMembers(query: String): List[String] = { - // Okay, how does this work? Well: you split on . but you don't want to split on \. => thus the ugly regex - // query.split((?<=[^\\\\])\\.).map(_.replaceAll("\\.")) - // The same code, just faster: - var members = List[String]() - var index = 0 - var last_index = 0 - val length = query.length - while (index < length) { - if ((query.charAt(index) == '.' || query.charAt(index) == '#') && - ((index == 0) || (query.charAt(index-1) != '\\'))) { - - val member = query.substring(last_index, index).replaceAll("\\\\([#\\.])", "$1") - // we want to allow javadoc-style links [[#member]] -- which requires us to remove empty members from the first - // elemnt in the list - if ((member != "") || (!members.isEmpty)) - members ::= member - last_index = index + 1 - } - index += 1 - } - if (last_index < length) - members ::= query.substring(last_index, length).replaceAll("\\\\\\.", ".") - members.reverse - } - - def externalSignature(sym: Symbol) = { - sym.info // force it, otherwise we see lazy types - (sym.nameString + sym.signatureString).replaceAll("\\s", "") - } -} diff --git a/src/compiler/scala/tools/nsc/doc/base/comment/Body.scala b/src/compiler/scala/tools/nsc/doc/base/comment/Body.scala deleted file mode 100755 index 2a07547de2..0000000000 --- a/src/compiler/scala/tools/nsc/doc/base/comment/Body.scala +++ /dev/null @@ -1,89 +0,0 @@ -/* NSC -- new Scala compiler - * Copyright 2007-2013 LAMP/EPFL - * @author Manohar Jonnalagedda - */ - -package scala.tools.nsc -package doc -package base -package comment - -import scala.collection._ - -/** A body of text. A comment has a single body, which is composed of - * at least one block. Inside every body is exactly one summary (see - * [[scala.tools.nsc.doc.model.comment.Summary]]). */ -final case class Body(blocks: Seq[Block]) { - - /** The summary text of the comment body. */ - lazy val summary: Option[Inline] = { - def summaryInBlock(block: Block): Seq[Inline] = block match { - case Title(text, _) => summaryInInline(text) - case Paragraph(text) => summaryInInline(text) - case UnorderedList(items) => items flatMap summaryInBlock - case OrderedList(items, _) => items flatMap summaryInBlock - case DefinitionList(items) => items.values.toSeq flatMap summaryInBlock - case _ => Nil - } - def summaryInInline(text: Inline): Seq[Inline] = text match { - case Summary(text) => List(text) - case Chain(items) => items flatMap summaryInInline - case Italic(text) => summaryInInline(text) - case Bold(text) => summaryInInline(text) - case Underline(text) => summaryInInline(text) - case Superscript(text) => summaryInInline(text) - case Subscript(text) => summaryInInline(text) - case Link(_, title) => summaryInInline(title) - case _ => Nil - } - (blocks flatMap { summaryInBlock(_) }).toList match { - case Nil => None - case inline :: Nil => Some(inline) - case inlines => Some(Chain(inlines)) - } - } -} - -/** A block-level element of text, such as a paragraph or code block. */ -sealed abstract class Block - -final case class Title(text: Inline, level: Int) extends Block -final case class Paragraph(text: Inline) extends Block -final case class Code(data: String) extends Block -final case class UnorderedList(items: Seq[Block]) extends Block -final case class OrderedList(items: Seq[Block], style: String) extends Block -final case class DefinitionList(items: SortedMap[Inline, Block]) extends Block -final case class HorizontalRule() extends Block - -/** An section of text inside a block, possibly with formatting. */ -sealed abstract class Inline - -final case class Chain(items: Seq[Inline]) extends Inline -final case class Italic(text: Inline) extends Inline -final case class Bold(text: Inline) extends Inline -final case class Underline(text: Inline) extends Inline -final case class Superscript(text: Inline) extends Inline -final case class Subscript(text: Inline) extends Inline -final case class Link(target: String, title: Inline) extends Inline -final case class Monospace(text: Inline) extends Inline -final case class Text(text: String) extends Inline -abstract class EntityLink(val title: Inline) extends Inline { def link: LinkTo } -object EntityLink { - def apply(title: Inline, linkTo: LinkTo) = new EntityLink(title) { def link: LinkTo = linkTo } - def unapply(el: EntityLink): Option[(Inline, LinkTo)] = Some((el.title, el.link)) -} -final case class HtmlTag(data: String) extends Inline { - def canClose(open: HtmlTag) = { - open.data.stripPrefix("<") == data.stripPrefix(" - list foreach scan - case tag: HtmlTag => { - if (stack.length > 0 && tag.canClose(stack.last)) { - stack.remove(stack.length-1) - } else { - tag.close match { - case Some(t) => - stack += t - case None => - ; - } - } - } - case _ => - ; - } - } - scan(inline) - Chain(List(inline) ++ stack.reverse) - } - - /** A shorter version of the body. Usually, this is the first sentence of the body. */ - def short: Inline = { - body.summary match { - case Some(s) => - closeHtmlTags(s) - case _ => - Text("") - } - } - - /** A list of authors. The empty list is used when no author is defined. */ - def authors: List[Body] - - /** A list of other resources to see, including links to other entities or - * to external documentation. The empty list is used when no other resource - * is mentionned. */ - def see: List[Body] - - /** A description of the result of the entity. Typically, this provides additional - * information on the domain of the result, contractual post-conditions, etc. */ - def result: Option[Body] - - /** A map of exceptions that the entity can throw when accessed, and a - * description of what they mean. */ - def throws: Map[String, Body] - - /** A map of value parameters, and a description of what they are. Typically, - * this provides additional information on the domain of the parameters, - * contractual pre-conditions, etc. */ - def valueParams: Map[String, Body] - - /** A map of type parameters, and a description of what they are. Typically, - * this provides additional information on the domain of the parameters. */ - def typeParams: Map[String, Body] - - /** The version number of the entity. There is no formatting or further - * meaning attached to this value. */ - def version: Option[Body] - - /** A version number of a containing entity where this member-entity was introduced. */ - def since: Option[Body] - - /** An annotation as to expected changes on this entity. */ - def todo: List[Body] - - /** Whether the entity is deprecated. Using the `@deprecated` Scala attribute - * is prefereable to using this Scaladoc tag. */ - def deprecated: Option[Body] - - /** An additional note concerning the contract of the entity. */ - def note: List[Body] - - /** A usage example related to the entity. */ - def example: List[Body] - - /** A description for the primary constructor */ - def constructor: Option[Body] - - /** A set of diagram directives for the inheritance diagram */ - def inheritDiagram: List[String] - - /** A set of diagram directives for the content diagram */ - def contentDiagram: List[String] - - /** The group this member is part of */ - def group: Option[String] - - /** Member group descriptions */ - def groupDesc: Map[String,Body] - - /** Member group names (overriding the short tag) */ - def groupNames: Map[String,String] - - /** Member group priorities */ - def groupPrio: Map[String,Int] - - override def toString = - body.toString + "\n" + - (authors map ("@author " + _.toString)).mkString("\n") + - (result map ("@return " + _.toString)).mkString("\n") + - (version map ("@version " + _.toString)).mkString -} diff --git a/src/compiler/scala/tools/nsc/doc/doclet/Generator.scala b/src/compiler/scala/tools/nsc/doc/doclet/Generator.scala deleted file mode 100644 index 42b56aa927..0000000000 --- a/src/compiler/scala/tools/nsc/doc/doclet/Generator.scala +++ /dev/null @@ -1,30 +0,0 @@ -package scala.tools.nsc.doc -package doclet - -import scala.collection._ - -/** Custom Scaladoc generators must implement the `Generator` class. A custom generator can be selected in Scaladoc - * using the `-doc-generator` command line option. - * The `Generator` class does not provide data about the documented code. A number of data provider traits can be used - * to configure what data is actually available to the generator: - * - A `Universer` provides a `Universe` data structure representing the interfaces and comments of the documented - * program. - * - An `Indexer` provides precalculated indexing information about a universe. - * To implement this class only requires defining method `generateImpl`. */ -abstract class Generator { - - /** A series of tests that must be true before generation can be done. This is used by data provider traits to - * confirm that they have been correctly initialised before allowing generation to proceed. */ - protected val checks: mutable.Set[()=>Boolean] = - mutable.Set.empty[()=>Boolean] - - /** Outputs documentation (as a side effect). */ - def generate(): Unit = { - assert(checks forall { check => check() }) - generateImpl() - } - - /** Outputs documentation (as a side effect). This method is called only if all `checks` are true. */ - protected def generateImpl(): Unit - -} diff --git a/src/compiler/scala/tools/nsc/doc/doclet/Indexer.scala b/src/compiler/scala/tools/nsc/doc/doclet/Indexer.scala deleted file mode 100644 index 0cdd47182f..0000000000 --- a/src/compiler/scala/tools/nsc/doc/doclet/Indexer.scala +++ /dev/null @@ -1,21 +0,0 @@ -package scala.tools.nsc -package doc -package doclet - -/** A `Generator` may implement the `Indexer` trait to gain access to pre-calculated indexing information */ -trait Indexer extends Generator with Universer { - - protected var indexField: Index = null - - def index: Index = indexField - - def setIndex(i: Index) { - assert(indexField == null) - indexField = i - } - - checks += { () => - indexField != null - } - -} \ No newline at end of file diff --git a/src/compiler/scala/tools/nsc/doc/doclet/Universer.scala b/src/compiler/scala/tools/nsc/doc/doclet/Universer.scala deleted file mode 100644 index ee8b7809e5..0000000000 --- a/src/compiler/scala/tools/nsc/doc/doclet/Universer.scala +++ /dev/null @@ -1,21 +0,0 @@ -package scala.tools.nsc -package doc -package doclet - -/** A `Generator` may implement the `Universer` trait to gain access to a model of the documented program */ -trait Universer extends Generator { - - protected var universeField: Universe = null - - def universe: Universe = universeField - - def setUniverse(u: Universe) { - assert(universeField == null) - universeField = u - } - - checks += { () => - universeField != null - } - -} \ No newline at end of file diff --git a/src/compiler/scala/tools/nsc/doc/html/Doclet.scala b/src/compiler/scala/tools/nsc/doc/html/Doclet.scala deleted file mode 100644 index 21c5f6bb67..0000000000 --- a/src/compiler/scala/tools/nsc/doc/html/Doclet.scala +++ /dev/null @@ -1,19 +0,0 @@ -/* NSC -- new Scala compiler - * Copyright 2007-2013 LAMP/EPFL - * @author David Bernard, Manohar Jonnalagedda - */ - -package scala.tools.nsc.doc -package html - -import doclet._ - -/** The default doclet used by the scaladoc command line tool - * when no user-provided doclet is provided. */ -class Doclet extends Generator with Universer with Indexer { - - def generateImpl() { - new html.HtmlFactory(universe, index).generate() - } - -} diff --git a/src/compiler/scala/tools/nsc/doc/html/HtmlFactory.scala b/src/compiler/scala/tools/nsc/doc/html/HtmlFactory.scala deleted file mode 100644 index d721a96ad7..0000000000 --- a/src/compiler/scala/tools/nsc/doc/html/HtmlFactory.scala +++ /dev/null @@ -1,152 +0,0 @@ -/* NSC -- new Scala compiler - * Copyright 2007-2013 LAMP/EPFL - * @author David Bernard, Manohar Jonnalagedda - */ - -package scala.tools.nsc -package doc -package html - -import model._ -import java.io.{ File => JFile } -import io.{ Streamable, Directory } -import scala.collection._ -import page.diagram._ - -import html.page.diagram.DiagramGenerator - -/** A class that can generate Scaladoc sites to some fixed root folder. - * @author David Bernard - * @author Gilles Dubochet */ -class HtmlFactory(val universe: doc.Universe, index: doc.Index) { - - /** The character encoding to be used for generated Scaladoc sites. - * This value is currently always UTF-8. */ - def encoding: String = "UTF-8" - - def siteRoot: JFile = new JFile(universe.settings.outdir.value) - - def libResources = List( - "index.js", - "jquery-ui.js", - "jquery.js", - "jquery.layout.js", - "scheduler.js", - "diagrams.js", - "template.js", - "tools.tooltip.js", - "modernizr.custom.js", - - "index.css", - "ref-index.css", - "template.css", - "diagrams.css", - - "class.png", - "class_big.png", - "class_diagram.png", - "object.png", - "object_big.png", - "object_diagram.png", - "package.png", - "package_big.png", - "trait.png", - "trait_big.png", - "trait_diagram.png", - "type.png", - "type_big.png", - "type_diagram.png", - - "class_to_object_big.png", - "object_to_class_big.png", - "trait_to_object_big.png", - "object_to_trait_big.png", - "type_to_object_big.png", - "object_to_type_big.png", - - "arrow-down.png", - "arrow-right.png", - "filter_box_left.png", - "filter_box_left2.gif", - "filter_box_right.png", - "filterbg.gif", - "filterboxbarbg.gif", - "filterboxbg.gif", - - "constructorsbg.gif", - "defbg-blue.gif", - "defbg-green.gif", - "filterboxbarbg.png", - "fullcommenttopbg.gif", - "ownderbg2.gif", - "ownerbg.gif", - "ownerbg2.gif", - "packagesbg.gif", - "signaturebg.gif", - "signaturebg2.gif", - "typebg.gif", - "conversionbg.gif", - "valuemembersbg.gif", - - "navigation-li-a.png", - "navigation-li.png", - "remove.png", - "selected-right.png", - "selected.png", - "selected2-right.png", - "selected2.png", - "selected-right-implicits.png", - "selected-implicits.png", - "unselected.png" - ) - - /** Generates the Scaladoc site for a model into the site root. - * A scaladoc site is a set of HTML and related files - * that document a model extracted from a compiler run. - */ - def generate() { - - def copyResource(subPath: String) { - val bytes = new Streamable.Bytes { - val p = "/scala/tools/nsc/doc/html/resource/" + subPath - val inputStream = getClass.getResourceAsStream(p) - assert(inputStream != null, p) - }.toByteArray() - val dest = Directory(siteRoot) / subPath - dest.parent.createDirectory() - val out = dest.toFile.bufferedOutput() - try out.write(bytes, 0, bytes.length) - finally out.close() - } - - DiagramGenerator.initialize(universe.settings) - - libResources foreach (s => copyResource("lib/" + s)) - - new page.Index(universe, index) writeFor this - new page.IndexScript(universe, index) writeFor this - - writeTemplates(_ writeFor this) - - for (letter <- index.firstLetterIndex) { - new html.page.ReferenceIndex(letter._1, index, universe) writeFor this - } - - DiagramGenerator.cleanup() - } - - def writeTemplates(writeForThis: HtmlPage => Unit) { - val written = mutable.HashSet.empty[DocTemplateEntity] - val diagramGenerator: DiagramGenerator = new DotDiagramGenerator(universe.settings) - - def writeTemplate(tpl: DocTemplateEntity) { - if (!(written contains tpl)) { - writeForThis(new page.Template(universe, diagramGenerator, tpl)) - written += tpl - tpl.templates collect { case d: DocTemplateEntity => d } map writeTemplate - } - } - - writeTemplate(universe.rootPackage) - } -} diff --git a/src/compiler/scala/tools/nsc/doc/html/HtmlPage.scala b/src/compiler/scala/tools/nsc/doc/html/HtmlPage.scala deleted file mode 100644 index 229e26d699..0000000000 --- a/src/compiler/scala/tools/nsc/doc/html/HtmlPage.scala +++ /dev/null @@ -1,224 +0,0 @@ -/* NSC -- new Scala compiler - * Copyright 2007-2013 LAMP/EPFL - * @author David Bernard, Manohar Jonnalagedda - */ - -package scala.tools.nsc -package doc -package html - -import base._ -import base.comment._ -import model._ - -import scala.xml.NodeSeq -import scala.xml.dtd.{DocType, PublicID} -import scala.collection._ -import java.io.Writer - -/** An html page that is part of a Scaladoc site. - * @author David Bernard - * @author Gilles Dubochet */ -abstract class HtmlPage extends Page { thisPage => - /** The title of this page. */ - protected def title: String - - /** The page description */ - protected def description: String = - // unless overwritten, will display the title in a spaced format, keeping - and . - title.replaceAll("[^a-zA-Z0-9\\.\\-]+", " ").replaceAll("\\-+", " - ").replaceAll(" +", " ") - - /** The page keywords */ - protected def keywords: String = - // unless overwritten, same as description, minus the " - " - description.replaceAll(" - ", " ") - - /** Additional header elements (links, scripts, meta tags, etc.) required for this page. */ - protected def headers: NodeSeq - - /** The body of this page. */ - def body: NodeSeq - - def writeFor(site: HtmlFactory) { - val doctype = - DocType("html", PublicID("-//W3C//DTD XHTML 1.1//EN", "http://www.w3.org/TR/xhtml11/DTD/xhtml11.dtd"), Nil) - val html = - - - { title } - - - \n") - w.write(doctype.toString + "\n") - w.write(xml.Xhtml.toXhtml(html)) - } - - if (site.universe.settings.docRawOutput.value) - writeFile(site, ".raw") { - // we're only interested in the body, as this will go into the diff - _.write(body.text) - } - - //XML.save(pageFile.getPath, html, site.encoding, xmlDecl = false, doctype = doctype) - } - - /** Transforms an optional comment into an styled HTML tree representing its body if it is defined, or into an empty - * node sequence if it is not. */ - def commentToHtml(comment: Option[Comment]): NodeSeq = - (comment map (commentToHtml(_))) getOrElse NodeSeq.Empty - - /** Transforms a comment into an styled HTML tree representing its body. */ - def commentToHtml(comment: Comment): NodeSeq = - bodyToHtml(comment.body) - - def bodyToHtml(body: Body): NodeSeq = - body.blocks flatMap (blockToHtml(_)) - - def blockToHtml(block: Block): NodeSeq = block match { - case Title(in, 1) =>

    { inlineToHtml(in) }

    - case Title(in, 2) =>

    { inlineToHtml(in) }

    - case Title(in, 3) =>
    { inlineToHtml(in) }
    - case Title(in, _) =>
    { inlineToHtml(in) }
    - case Paragraph(in) =>

    { inlineToHtml(in) }

    - case Code(data) => -
    { SyntaxHigh(data) }
    //
    { scala.xml.Text(data) }
    - case UnorderedList(items) => -
      { listItemsToHtml(items) }
    - case OrderedList(items, listStyle) => -
      { listItemsToHtml(items) }
    - case DefinitionList(items) => -
    {items map { case (t, d) =>
    { inlineToHtml(t) }
    { blockToHtml(d) }
    } }
    - case HorizontalRule() => -
    - } - - def listItemsToHtml(items: Seq[Block]) = - items.foldLeft(xml.NodeSeq.Empty){ (xmlList, item) => - item match { - case OrderedList(_, _) | UnorderedList(_) => // html requires sub ULs to be put into the last LI - xmlList.init ++
  5. { xmlList.last.child ++ blockToHtml(item) }
  6. - case Paragraph(inline) => - xmlList :+
  7. { inlineToHtml(inline) }
  8. // LIs are blocks, no need to use Ps - case block => - xmlList :+
  9. { blockToHtml(block) }
  10. - } - } - - def inlineToHtml(inl: Inline): NodeSeq = inl match { - case Chain(items) => items flatMap (inlineToHtml(_)) - case Italic(in) => { inlineToHtml(in) } - case Bold(in) => { inlineToHtml(in) } - case Underline(in) => { inlineToHtml(in) } - case Superscript(in) => { inlineToHtml(in) } - case Subscript(in) => { inlineToHtml(in) } - case Link(raw, title) => { inlineToHtml(title) } - case Monospace(in) => { inlineToHtml(in) } - case Text(text) => scala.xml.Text(text) - case Summary(in) => inlineToHtml(in) - case HtmlTag(tag) => scala.xml.Unparsed(tag) - case EntityLink(target, link) => linkToHtml(target, link, hasLinks = true) - } - - def linkToHtml(text: Inline, link: LinkTo, hasLinks: Boolean) = link match { - case LinkToTpl(dtpl: TemplateEntity) => - if (hasLinks) - { inlineToHtml(text) } - else - { inlineToHtml(text) } - case LinkToMember(mbr: MemberEntity, inTpl: TemplateEntity) => - if (hasLinks) - { inlineToHtml(text) } - else - { inlineToHtml(text) } - case Tooltip(tooltip) => - { inlineToHtml(text) } - case LinkToExternal(name, url) => - { inlineToHtml(text) } - case _ => - inlineToHtml(text) - } - - def typeToHtml(tpes: List[model.TypeEntity], hasLinks: Boolean): NodeSeq = tpes match { - case Nil => - NodeSeq.Empty - case List(tpe) => - typeToHtml(tpe, hasLinks) - case tpe :: rest => - typeToHtml(tpe, hasLinks) ++ scala.xml.Text(" with ") ++ typeToHtml(rest, hasLinks) - } - - def typeToHtml(tpe: model.TypeEntity, hasLinks: Boolean): NodeSeq = { - val string = tpe.name - def toLinksOut(inPos: Int, starts: List[Int]): NodeSeq = { - if (starts.isEmpty && (inPos == string.length)) - NodeSeq.Empty - else if (starts.isEmpty) - scala.xml.Text(string.slice(inPos, string.length)) - else if (inPos == starts.head) - toLinksIn(inPos, starts) - else { - scala.xml.Text(string.slice(inPos, starts.head)) ++ toLinksIn(starts.head, starts) - } - } - def toLinksIn(inPos: Int, starts: List[Int]): NodeSeq = { - val (link, width) = tpe.refEntity(inPos) - val text = comment.Text(string.slice(inPos, inPos + width)) - linkToHtml(text, link, hasLinks) ++ toLinksOut(inPos + width, starts.tail) - } - if (hasLinks) - toLinksOut(0, tpe.refEntity.keySet.toList) - else - scala.xml.Text(string) - } - - def typesToHtml(tpess: List[model.TypeEntity], hasLinks: Boolean, sep: NodeSeq): NodeSeq = tpess match { - case Nil => NodeSeq.Empty - case tpe :: Nil => typeToHtml(tpe, hasLinks) - case tpe :: tpes => typeToHtml(tpe, hasLinks) ++ sep ++ typesToHtml(tpes, hasLinks, sep) - } - - def hasPage(e: DocTemplateEntity) = { - e.isPackage || e.isTrait || e.isClass || e.isObject || e.isCaseClass - } - - /** Returns the HTML code that represents the template in `tpl` as a hyperlinked name. */ - def templateToHtml(tpl: TemplateEntity, name: String = null) = tpl match { - case dTpl: DocTemplateEntity => - if (hasPage(dTpl)) { - { if (name eq null) dTpl.name else name } - } else { - scala.xml.Text(if (name eq null) dTpl.name else name) - } - case ndTpl: NoDocTemplate => - scala.xml.Text(if (name eq null) ndTpl.name else name) - } - - /** Returns the HTML code that represents the templates in `tpls` as a list of hyperlinked names. */ - def templatesToHtml(tplss: List[TemplateEntity], sep: NodeSeq): NodeSeq = tplss match { - case Nil => NodeSeq.Empty - case tpl :: Nil => templateToHtml(tpl) - case tpl :: tpls => templateToHtml(tpl) ++ sep ++ templatesToHtml(tpls, sep) - } - - /** Returns the _big image name corresponding to the DocTemplate Entity (upper left icon) */ - def docEntityKindToBigImage(ety: DocTemplateEntity) = - if (ety.isTrait && !ety.companion.isEmpty && ety.companion.get.visibility.isPublic && ety.companion.get.inSource != None) "trait_to_object_big.png" - else if (ety.isTrait) "trait_big.png" - else if (ety.isClass && !ety.companion.isEmpty && ety.companion.get.visibility.isPublic && ety.companion.get.inSource != None) "class_to_object_big.png" - else if (ety.isClass) "class_big.png" - else if ((ety.isAbstractType || ety.isAliasType) && !ety.companion.isEmpty && ety.companion.get.visibility.isPublic && ety.companion.get.inSource != None) "type_to_object_big.png" - else if ((ety.isAbstractType || ety.isAliasType)) "type_big.png" - else if (ety.isObject && !ety.companion.isEmpty && ety.companion.get.visibility.isPublic && ety.companion.get.inSource != None && ety.companion.get.isClass) "object_to_class_big.png" - else if (ety.isObject && !ety.companion.isEmpty && ety.companion.get.visibility.isPublic && ety.companion.get.inSource != None && ety.companion.get.isTrait) "object_to_trait_big.png" - else if (ety.isObject && !ety.companion.isEmpty && ety.companion.get.visibility.isPublic && ety.companion.get.inSource != None && (ety.companion.get.isAbstractType || ety.companion.get.isAliasType)) "object_to_trait_big.png" - else if (ety.isObject) "object_big.png" - else if (ety.isPackage) "package_big.png" - else "class_big.png" // FIXME: an entity *should* fall into one of the above categories, but AnyRef is somehow not -} diff --git a/src/compiler/scala/tools/nsc/doc/html/Page.scala b/src/compiler/scala/tools/nsc/doc/html/Page.scala deleted file mode 100644 index 91939cf3de..0000000000 --- a/src/compiler/scala/tools/nsc/doc/html/Page.scala +++ /dev/null @@ -1,102 +0,0 @@ -/* NSC -- new Scala compiler - * Copyright 2007-2013 LAMP/EPFL - * @author David Bernard, Manohar Jonnalagedda - */ - -package scala.tools.nsc.doc.html - -import scala.tools.nsc.doc.model._ -import java.io.{FileOutputStream, File} -import scala.reflect.NameTransformer -import java.nio.channels.Channels -import java.io.Writer - -abstract class Page { - thisPage => - - /** The path of this page, relative to the API site. `path.tail` is a list - * of folder names leading to this page (from closest package to - * one-above-root package), `path.head` is the file name of this page. - * Note that `path` has a length of at least one. */ - def path: List[String] - - def absoluteLinkTo(path: List[String]) = path.reverse.mkString("/") - - def createFileOutputStream(site: HtmlFactory, suffix: String = "") = { - val file = new File(site.siteRoot, absoluteLinkTo(thisPage.path) + suffix) - val folder = file.getParentFile - if (! folder.exists) { - folder.mkdirs - } - new FileOutputStream(file.getPath) - } - - def writeFile(site: HtmlFactory, suffix: String = "")(fn: Writer => Unit) = { - val fos = createFileOutputStream(site, suffix) - val w = Channels.newWriter(fos.getChannel, site.encoding) - try { - fn(w) - } - finally { - w.close() - fos.close() - } - } - - /** Writes this page as a file. The file's location is relative to the - * generator's site root, and the encoding is also defined by the generator. - * @param site The generator that is writing this page. */ - def writeFor(site: HtmlFactory): Unit - - def kindToString(mbr: MemberEntity) = - mbr match { - case c: Class => if (c.isCaseClass) "case class" else "class" - case _: Trait => "trait" - case _: Package => "package" - case _: Object => "object" - case _: AbstractType => "type" - case _: AliasType => "type" - case _: Constructor => "new" - case v: Def => "def" - case v: Val if (v.isLazyVal) => "lazy val" - case v: Val if (v.isVal) => "val" - case v: Val if (v.isVar) => "var" - case _ => sys.error("Cannot create kind for: " + mbr + " of class " + mbr.getClass) - } - - def templateToPath(tpl: TemplateEntity): List[String] = { - def doName(tpl: TemplateEntity): String = - (if (tpl.inPackageObject) "package$$" else "") + NameTransformer.encode(tpl.name) + (if (tpl.isObject) "$" else "") - def downPacks(pack: Package): List[String] = - if (pack.isRootPackage) Nil else (doName(pack) :: downPacks(pack.inTemplate)) - def downInner(nme: String, tpl: TemplateEntity): (String, Package) = { - tpl.inTemplate match { - case inPkg: Package => (nme + ".html", inPkg) - case inTpl => downInner(doName(inTpl) + "$" + nme, inTpl) - } - } - val (file, pack) = - tpl match { - case p: Package => ("package.html", p) - case _ => downInner(doName(tpl), tpl) - } - file :: downPacks(pack) - } - - /** A relative link from this page to some destination class entity. - * @param destClass The class or object entity that the link will point to. */ - def relativeLinkTo(destClass: TemplateEntity): String = - relativeLinkTo(templateToPath(destClass)) - - /** A relative link from this page to some destination path. - * @param destPath The path that the link will point to. */ - def relativeLinkTo(destPath: List[String]): String = { - def relativize(from: List[String], to: List[String]): List[String] = (from, to) match { - case (f :: fs, t :: ts) if (f == t) => // both paths are identical to that point - relativize(fs, ts) - case (fss, tss) => - List.fill(fss.length - 1)("..") ::: tss - } - relativize(thisPage.path.reverse, destPath.reverse).mkString("/") - } -} diff --git a/src/compiler/scala/tools/nsc/doc/html/SyntaxHigh.scala b/src/compiler/scala/tools/nsc/doc/html/SyntaxHigh.scala deleted file mode 100644 index 5781e680dd..0000000000 --- a/src/compiler/scala/tools/nsc/doc/html/SyntaxHigh.scala +++ /dev/null @@ -1,286 +0,0 @@ -/* NSC -- new Scala compiler - * Copyright 2010-2013 LAMP/EPFL - * @author Stephane Micheloud - */ - -package scala.tools.nsc.doc.html - -import scala.xml.NodeSeq - -/** Highlight the syntax of Scala code appearing in a `{{{` wiki block - * (see method `HtmlPage.blockToHtml`). - * - * @author Stephane Micheloud - * @version 1.0 - */ -private[html] object SyntaxHigh { - - /** Reserved words, sorted alphabetically - * (see [[scala.reflect.internal.StdNames]]) */ - val reserved = Array( - "abstract", "case", "catch", "class", "def", - "do", "else", "extends", "false", "final", "finally", - "for", "if", "implicit", "import", "lazy", "match", - "new", "null", "object", "override", "package", - "private", "protected", "return", "sealed", "super", - "this", "throw", "trait", "true", "try", "type", - "val", "var", "while", "with", "yield") - - /** Annotations, sorted alphabetically */ - val annotations = Array( - "BeanProperty", "SerialVersionUID", - "beanGetter", "beanSetter", "bridge", - "deprecated", "deprecatedName", "deprecatedOverriding", "deprecatedInheritance", - "elidable", "field", "getter", "inline", - "migration", "native", "noinline", "param", - "remote", "setter", "specialized", "strictfp", "switch", - "tailrec", "throws", "transient", - "unchecked", "uncheckedStable", "uncheckedVariance", - "varargs", "volatile") - - /** Standard library classes/objects, sorted alphabetically */ - val standards = Array ( - "WeakTypeTag", "Any", "AnyRef", "AnyVal", "App", "Array", - "Boolean", "Byte", "Char", "Class", "ClassTag", "ClassManifest", - "Console", "Double", "Enumeration", "Float", "Function", "Int", - "List", "Long", "Manifest", "Map", - "NoManifest", "None", "Nothing", "Null", "Object", "Option", "OptManifest", - "Pair", "Predef", - "Seq", "Set", "Short", "Some", "String", "Symbol", - "Triple", "TypeTag", "Unit") - - def apply(data: String): NodeSeq = { - val buf = data.getBytes - val out = new StringBuilder - - def compare(offset: Int, key: String): Int = { - var i = offset - var j = 0 - val l = key.length - while (i < buf.length && j < l) { - val bch = buf(i).toChar - val kch = key charAt j - if (bch < kch) return -1 - else if (bch > kch) return 1 - i += 1 - j += 1 - } - if (j < l) -1 - else if (i < buf.length && - ('A' <= buf(i) && buf(i) <= 'Z' || - 'a' <= buf(i) && buf(i) <= 'z' || - '0' <= buf(i) && buf(i) <= '9' || - buf(i) == '_')) 1 - else 0 - } - - def lookup(a: Array[String], i: Int): Int = { - var lo = 0 - var hi = a.length - 1 - while (lo <= hi) { - val m = (hi + lo) / 2 - val d = compare(i, a(m)) - if (d < 0) hi = m - 1 - else if (d > 0) lo = m + 1 - else return m - } - -1 - } - - def comment(i: Int): String = { - val out = new StringBuilder("/") - def line(i: Int): Int = - if (i == buf.length || buf(i) == '\n') i - else { - out append buf(i).toChar - line(i+1) - } - var level = 0 - def multiline(i: Int, star: Boolean): Int = { - if (i == buf.length) return i - val ch = buf(i).toChar - out append ch - ch match { - case '*' => - if (star) level += 1 - multiline(i+1, !star) - case '/' => - if (star) { - if (level > 0) level -= 1 - if (level == 0) i else multiline(i+1, star = true) - } else - multiline(i+1, star = false) - case _ => - multiline(i+1, star = false) - } - } - if (buf(i) == '/') line(i) else multiline(i, star = true) - out.toString - } - - /* e.g. `val endOfLine = '\u000A'`*/ - def charlit(j: Int): String = { - val out = new StringBuilder("'") - def charlit0(i: Int, bslash: Boolean): Int = { - if (i == buf.length) i - else if (i > j+6) { out setLength 0; j } - else { - val ch = buf(i).toChar - out append ch - ch match { - case '\\' => - charlit0(i+1, bslash = true) - case '\'' if !bslash => - i - case _ => - if (bslash && '0' <= ch && ch <= '9') charlit0(i+1, bslash = true) - else charlit0(i+1, bslash = false) - } - } - } - charlit0(j, bslash = false) - out.toString - } - - def strlit(i: Int): String = { - val out = new StringBuilder("\"") - def strlit0(i: Int, bslash: Boolean): Int = { - if (i == buf.length) return i - val ch = buf(i).toChar - out append ch - ch match { - case '\\' => - strlit0(i+1, bslash = true) - case '"' if !bslash => - i - case _ => - strlit0(i+1, bslash = false) - } - } - strlit0(i, bslash = false) - out.toString - } - - def numlit(i: Int): String = { - val out = new StringBuilder - def intg(i: Int): Int = { - if (i == buf.length) return i - val ch = buf(i).toChar - ch match { - case '.' => - out append ch - frac(i+1) - case _ => - if (Character.isDigit(ch)) { - out append ch - intg(i+1) - } else i - } - } - def frac(i: Int): Int = { - if (i == buf.length) return i - val ch = buf(i).toChar - ch match { - case 'e' | 'E' => - out append ch - expo(i+1, signed = false) - case _ => - if (Character.isDigit(ch)) { - out append ch - frac(i+1) - } else i - } - } - def expo(i: Int, signed: Boolean): Int = { - if (i == buf.length) return i - val ch = buf(i).toChar - ch match { - case '+' | '-' if !signed => - out append ch - expo(i+1, signed = true) - case _ => - if (Character.isDigit(ch)) { - out append ch - expo(i+1, signed) - } else i - } - } - intg(i) - out.toString - } - - def parse(pre: String, i: Int): Int = { - out append pre - if (i == buf.length) return i - buf(i) match { - case '\n' => - parse("\n", i+1) - case ' ' => - parse(" ", i+1) - case '&' => - parse("&", i+1) - case '<' if i+1 < buf.length => - val ch = buf(i+1).toChar - if (ch == '-' || ch == ':' || ch == '%') - parse("<"+ch+"", i+2) - else - parse("<", i+1) - case '>' => - if (i+1 < buf.length && buf(i+1) == ':') - parse(">:", i+2) - else - parse(">", i+1) - case '=' => - if (i+1 < buf.length && buf(i+1) == '>') - parse("=>", i+2) - else - parse(buf(i).toChar.toString, i+1) - case '/' => - if (i+1 < buf.length && (buf(i+1) == '/' || buf(i+1) == '*')) { - val c = comment(i+1) - parse(""+c+"", i+c.length) - } else - parse(buf(i).toChar.toString, i+1) - case '\'' => - val s = charlit(i+1) - if (s.length > 0) - parse(""+s+"", i+s.length) - else - parse(buf(i).toChar.toString, i+1) - case '"' => - val s = strlit(i+1) - parse(""+s+"", i+s.length) - case '@' => - val k = lookup(annotations, i+1) - if (k >= 0) - parse("@"+annotations(k)+"", i+annotations(k).length+1) - else - parse(buf(i).toChar.toString, i+1) - case _ => - if (i == 0 || (i >= 1 && !Character.isJavaIdentifierPart(buf(i-1).toChar))) { - if (Character.isDigit(buf(i)) || - (buf(i) == '.' && i + 1 < buf.length && Character.isDigit(buf(i+1)))) { - val s = numlit(i) - parse(""+s+"", i+s.length) - } else { - val k = lookup(reserved, i) - if (k >= 0) - parse(""+reserved(k)+"", i+reserved(k).length) - else { - val k = lookup(standards, i) - if (k >= 0) - parse(""+standards(k)+"", i+standards(k).length) - else - parse(buf(i).toChar.toString, i+1) - } - } - } else - parse(buf(i).toChar.toString, i+1) - } - i - } - - parse("", 0) - scala.xml.Unparsed(out.toString) - } -} diff --git a/src/compiler/scala/tools/nsc/doc/html/page/Index.scala b/src/compiler/scala/tools/nsc/doc/html/page/Index.scala deleted file mode 100644 index c034647320..0000000000 --- a/src/compiler/scala/tools/nsc/doc/html/page/Index.scala +++ /dev/null @@ -1,133 +0,0 @@ -/* NSC -- new Scala compiler - * Copyright 2007-2013 LAMP/EPFL - * @author David Bernard, Manohar Jonnalagedda - */ - -package scala.tools.nsc -package doc -package html -package page - -import model._ -import scala.collection._ -import scala.xml._ - -class Index(universe: doc.Universe, val index: doc.Index) extends HtmlPage { - - def path = List("index.html") - - def title = { - val s = universe.settings - ( if (!s.doctitle.isDefault) s.doctitle.value else "" ) + - ( if (!s.docversion.isDefault) (" " + s.docversion.value) else "" ) - } - - val headers = - - - - - - - - - - val body = - -
    - - - - -
    - { browser } -
    - ':""),e._keyEvent=!1,B},_generateMonthYearHeader:function(e,t,n,r,i,s,o,u){var a=this._get(e,"changeMonth"),f=this._get(e,"changeYear"),l=this._get(e,"showMonthAfterYear"),c='
    ',h="";if(s||!a)h+=''+o[t]+"";else{var p=r&&r.getFullYear()==n,d=i&&i.getFullYear()==n;h+='"}l||(c+=h+(s||!a||!f?" ":""));if(!e.yearshtml){e.yearshtml="";if(s||!f)c+=''+n+"";else{var m=this._get(e,"yearRange").split(":"),g=(new Date).getFullYear(),y=function(e){var t=e.match(/c[+-].*/)?n+parseInt(e.substring(1),10):e.match(/[+-].*/)?g+parseInt(e,10):parseInt(e,10);return isNaN(t)?g:t},b=y(m[0]),w=Math.max(b,y(m[1]||""));b=r?Math.max(b,r.getFullYear()):b,w=i?Math.min(w,i.getFullYear()):w,e.yearshtml+='",c+=e.yearshtml,e.yearshtml=null}}return c+=this._get(e,"yearSuffix"),l&&(c+=(s||!a||!f?" ":"")+h),c+="
    ",c},_adjustInstDate:function(e,t,n){var r=e.drawYear+(n=="Y"?t:0),i=e.drawMonth+(n=="M"?t:0),s=Math.min(e.selectedDay,this._getDaysInMonth(r,i))+(n=="D"?t:0),o=this._restrictMinMax(e,this._daylightSavingAdjust(new Date(r,i,s)));e.selectedDay=o.getDate(),e.drawMonth=e.selectedMonth=o.getMonth(),e.drawYear=e.selectedYear=o.getFullYear(),(n=="M"||n=="Y")&&this._notifyChange(e)},_restrictMinMax:function(e,t){var n=this._getMinMaxDate(e,"min"),r=this._getMinMaxDate(e,"max"),i=n&&tr?r:i,i},_notifyChange:function(e){var t=this._get(e,"onChangeMonthYear");t&&t.apply(e.input?e.input[0]:null,[e.selectedYear,e.selectedMonth+1,e])},_getNumberOfMonths:function(e){var t=this._get(e,"numberOfMonths");return t==null?[1,1]:typeof t=="number"?[1,t]:t},_getMinMaxDate:function(e,t){return this._determineDate(e,this._get(e,t+"Date"),null)},_getDaysInMonth:function(e,t){return 32-this._daylightSavingAdjust(new Date(e,t,32)).getDate()},_getFirstDayOfMonth:function(e,t){return(new Date(e,t,1)).getDay()},_canAdjustMonth:function(e,t,n,r){var i=this._getNumberOfMonths(e),s=this._daylightSavingAdjust(new Date(n,r+(t<0?t:i[0]*i[1]),1));return t<0&&s.setDate(this._getDaysInMonth(s.getFullYear(),s.getMonth())),this._isInRange(e,s)},_isInRange:function(e,t){var n=this._getMinMaxDate(e,"min"),r=this._getMinMaxDate(e,"max");return(!n||t.getTime()>=n.getTime())&&(!r||t.getTime()<=r.getTime())},_getFormatConfig:function(e){var t=this._get(e,"shortYearCutoff");return t=typeof t!="string"?t:(new Date).getFullYear()%100+parseInt(t,10),{shortYearCutoff:t,dayNamesShort:this._get(e,"dayNamesShort"),dayNames:this._get(e,"dayNames"),monthNamesShort:this._get(e,"monthNamesShort"),monthNames:this._get(e,"monthNames")}},_formatDate:function(e,t,n,r){t||(e.currentDay=e.selectedDay,e.currentMonth=e.selectedMonth,e.currentYear=e.selectedYear);var i=t?typeof t=="object"?t:this._daylightSavingAdjust(new Date(r,n,t)):this._daylightSavingAdjust(new Date(e.currentYear,e.currentMonth,e.currentDay));return this.formatDate(this._get(e,"dateFormat"),i,this._getFormatConfig(e))}}),$.fn.datepicker=function(e){if(!this.length)return this;$.datepicker.initialized||($(document).mousedown($.datepicker._checkExternalClick).find(document.body).append($.datepicker.dpDiv),$.datepicker.initialized=!0);var t=Array.prototype.slice.call(arguments,1);return typeof e!="string"||e!="isDisabled"&&e!="getDate"&&e!="widget"?e=="option"&&arguments.length==2&&typeof arguments[1]=="string"?$.datepicker["_"+e+"Datepicker"].apply($.datepicker,[this[0]].concat(t)):this.each(function(){typeof e=="string"?$.datepicker["_"+e+"Datepicker"].apply($.datepicker,[this].concat(t)):$.datepicker._attachDatepicker(this,e)}):$.datepicker["_"+e+"Datepicker"].apply($.datepicker,[this[0]].concat(t))},$.datepicker=new Datepicker,$.datepicker.initialized=!1,$.datepicker.uuid=(new Date).getTime(),$.datepicker.version="1.9.0",window["DP_jQuery_"+dpuuid]=$})(jQuery);(function(e,t){var n="ui-dialog ui-widget ui-widget-content ui-corner-all ",r={buttons:!0,height:!0,maxHeight:!0,maxWidth:!0,minHeight:!0,minWidth:!0,width:!0},i={maxHeight:!0,maxWidth:!0,minHeight:!0,minWidth:!0};e.widget("ui.dialog",{version:"1.9.0",options:{autoOpen:!0,buttons:{},closeOnEscape:!0,closeText:"close",dialogClass:"",draggable:!0,hide:null,height:"auto",maxHeight:!1,maxWidth:!1,minHeight:150,minWidth:150,modal:!1,position:{my:"center",at:"center",of:window,collision:"fit",using:function(t){var n=e(this).css(t).offset().top;n<0&&e(this).css("top",t.top-n)}},resizable:!0,show:null,stack:!0,title:"",width:300,zIndex:1e3},_create:function(){this.originalTitle=this.element.attr("title"),typeof this.originalTitle!="string"&&(this.originalTitle=""),this.oldPosition={parent:this.element.parent(),index:this.element.parent().children().index(this.element)},this.options.title=this.options.title||this.originalTitle;var t=this,r=this.options,i=r.title||" ",s=(this.uiDialog=e("
    ")).addClass(n+r.dialogClass).css({display:"none",outline:0,zIndex:r.zIndex}).attr("tabIndex",-1).keydown(function(n){r.closeOnEscape&&!n.isDefaultPrevented()&&n.keyCode&&n.keyCode===e.ui.keyCode.ESCAPE&&(t.close(n),n.preventDefault())}).mousedown(function(e){t.moveToTop(!1,e)}).appendTo("body"),o=this.element.show().removeAttr("title").addClass("ui-dialog-content ui-widget-content").appendTo(s),u=(this.uiDialogTitlebar=e("
    ")).addClass("ui-dialog-titlebar ui-widget-header ui-corner-all ui-helper-clearfix").prependTo(s),a=e("").addClass("ui-dialog-titlebar-close ui-corner-all").attr("role","button").click(function(e){e.preventDefault(),t.close(e)}).appendTo(u),f=(this.uiDialogTitlebarCloseText=e("")).addClass("ui-icon ui-icon-closethick").text(r.closeText).appendTo(a),l=e("").uniqueId().addClass("ui-dialog-title").html(i).prependTo(u),c=(this.uiDialogButtonPane=e("
    ")).addClass("ui-dialog-buttonpane ui-widget-content ui-helper-clearfix"),h=(this.uiButtonSet=e("
    ")).addClass("ui-dialog-buttonset").appendTo(c);s.attr({role:"dialog","aria-labelledby":l.attr("id")}),u.find("*").add(u).disableSelection(),this._hoverable(a),this._focusable(a),r.draggable&&e.fn.draggable&&this._makeDraggable(),r.resizable&&e.fn.resizable&&this._makeResizable(),this._createButtons(r.buttons),this._isOpen=!1,e.fn.bgiframe&&s.bgiframe(),this._on(s,{keydown:function(t){if(!r.modal||t.keyCode!==e.ui.keyCode.TAB)return;var n=e(":tabbable",s),i=n.filter(":first"),o=n.filter(":last");if(t.target===o[0]&&!t.shiftKey)return i.focus(1),!1;if(t.target===i[0]&&t.shiftKey)return o.focus(1),!1}})},_init:function(){this.options.autoOpen&&this.open()},_destroy:function(){var e,t=this.oldPosition;this.overlay&&this.overlay.destroy(),this.uiDialog.hide(),this.element.removeClass("ui-dialog-content ui-widget-content").hide().appendTo("body"),this.uiDialog.remove(),this.originalTitle&&this.element.attr("title",this.originalTitle),e=t.parent.children().eq(t.index),e.length&&e[0]!==this.element[0]?e.before(this.element):t.parent.append(this.element)},widget:function(){return this.uiDialog},close:function(t){var n=this,r,i;if(!this._isOpen)return;if(!1===this._trigger("beforeClose",t))return;return this._isOpen=!1,this.overlay&&this.overlay.destroy(),this.options.hide?this.uiDialog.hide(this.options.hide,function(){n._trigger("close",t)}):(this.uiDialog.hide(),this._trigger("close",t)),e.ui.dialog.overlay.resize(),this.options.modal&&(r=0,e(".ui-dialog").each(function(){this!==n.uiDialog[0]&&(i=e(this).css("z-index"),isNaN(i)||(r=Math.max(r,i)))}),e.ui.dialog.maxZ=r),this},isOpen:function(){return this._isOpen},moveToTop:function(t,n){var r=this.options,i;return r.modal&&!t||!r.stack&&!r.modal?this._trigger("focus",n):(r.zIndex>e.ui.dialog.maxZ&&(e.ui.dialog.maxZ=r.zIndex),this.overlay&&(e.ui.dialog.maxZ+=1,e.ui.dialog.overlay.maxZ=e.ui.dialog.maxZ,this.overlay.$el.css("z-index",e.ui.dialog.overlay.maxZ)),i={scrollTop:this.element.scrollTop(),scrollLeft:this.element.scrollLeft()},e.ui.dialog.maxZ+=1,this.uiDialog.css("z-index",e.ui.dialog.maxZ),this.element.attr(i),this._trigger("focus",n),this)},open:function(){if(this._isOpen)return;var t,n=this.options,r=this.uiDialog;return this._size(),this._position(n.position),r.show(n.show),this.overlay=n.modal?new e.ui.dialog.overlay(this):null,this.moveToTop(!0),t=this.element.find(":tabbable"),t.length||(t=this.uiDialogButtonPane.find(":tabbable"),t.length||(t=r)),t.eq(0).focus(),this._isOpen=!0,this._trigger("open"),this},_createButtons:function(t){var n,r,i=this,s=!1;this.uiDialogButtonPane.remove(),this.uiButtonSet.empty(),typeof t=="object"&&t!==null&&e.each(t,function(){return!(s=!0)}),s?(e.each(t,function(t,n){n=e.isFunction(n)?{click:n,text:t}:n;var r=e("
    ').css({width:this.offsetWidth+"px",height:this.offsetHeight+"px",position:"absolute",opacity:"0.001",zIndex:1e3}).css(e(this).offset()).appendTo("body")}),!0):!1)},_mouseStart:function(t){var n=this.options;return this.helper=this._createHelper(t),this.helper.addClass("ui-draggable-dragging"),this._cacheHelperProportions(),e.ui.ddmanager&&(e.ui.ddmanager.current=this),this._cacheMargins(),this.cssPosition=this.helper.css("position"),this.scrollParent=this.helper.scrollParent(),this.offset=this.positionAbs=this.element.offset(),this.offset={top:this.offset.top-this.margins.top,left:this.offset.left-this.margins.left},e.extend(this.offset,{click:{left:t.pageX-this.offset.left,top:t.pageY-this.offset.top},parent:this._getParentOffset(),relative:this._getRelativeOffset()}),this.originalPosition=this.position=this._generatePosition(t),this.originalPageX=t.pageX,this.originalPageY=t.pageY,n.cursorAt&&this._adjustOffsetFromHelper(n.cursorAt),n.containment&&this._setContainment(),this._trigger("start",t)===!1?(this._clear(),!1):(this._cacheHelperProportions(),e.ui.ddmanager&&!n.dropBehaviour&&e.ui.ddmanager.prepareOffsets(this,t),this._mouseDrag(t,!0),e.ui.ddmanager&&e.ui.ddmanager.dragStart(this,t),!0)},_mouseDrag:function(t,n){this.position=this._generatePosition(t),this.positionAbs=this._convertPositionTo("absolute");if(!n){var r=this._uiHash();if(this._trigger("drag",t,r)===!1)return this._mouseUp({}),!1;this.position=r.position}if(!this.options.axis||this.options.axis!="y")this.helper[0].style.left=this.position.left+"px";if(!this.options.axis||this.options.axis!="x")this.helper[0].style.top=this.position.top+"px";return e.ui.ddmanager&&e.ui.ddmanager.drag(this,t),!1},_mouseStop:function(t){var n=!1;e.ui.ddmanager&&!this.options.dropBehaviour&&(n=e.ui.ddmanager.drop(this,t)),this.dropped&&(n=this.dropped,this.dropped=!1);var r=this.element[0],i=!1;while(r&&(r=r.parentNode))r==document&&(i=!0);if(!i&&this.options.helper==="original")return!1;if(this.options.revert=="invalid"&&!n||this.options.revert=="valid"&&n||this.options.revert===!0||e.isFunction(this.options.revert)&&this.options.revert.call(this.element,n)){var s=this;e(this.helper).animate(this.originalPosition,parseInt(this.options.revertDuration,10),function(){s._trigger("stop",t)!==!1&&s._clear()})}else this._trigger("stop",t)!==!1&&this._clear();return!1},_mouseUp:function(t){return e("div.ui-draggable-iframeFix").each(function(){this.parentNode.removeChild(this)}),e.ui.ddmanager&&e.ui.ddmanager.dragStop(this,t),e.ui.mouse.prototype._mouseUp.call(this,t)},cancel:function(){return this.helper.is(".ui-draggable-dragging")?this._mouseUp({}):this._clear(),this},_getHandle:function(t){var n=!this.options.handle||!e(this.options.handle,this.element).length?!0:!1;return e(this.options.handle,this.element).find("*").andSelf().each(function(){this==t.target&&(n=!0)}),n},_createHelper:function(t){var n=this.options,r=e.isFunction(n.helper)?e(n.helper.apply(this.element[0],[t])):n.helper=="clone"?this.element.clone().removeAttr("id"):this.element;return r.parents("body").length||r.appendTo(n.appendTo=="parent"?this.element[0].parentNode:n.appendTo),r[0]!=this.element[0]&&!/(fixed|absolute)/.test(r.css("position"))&&r.css("position","absolute"),r},_adjustOffsetFromHelper:function(t){typeof t=="string"&&(t=t.split(" ")),e.isArray(t)&&(t={left:+t[0],top:+t[1]||0}),"left"in t&&(this.offset.click.left=t.left+this.margins.left),"right"in t&&(this.offset.click.left=this.helperProportions.width-t.right+this.margins.left),"top"in t&&(this.offset.click.top=t.top+this.margins.top),"bottom"in t&&(this.offset.click.top=this.helperProportions.height-t.bottom+this.margins.top)},_getParentOffset:function(){this.offsetParent=this.helper.offsetParent();var t=this.offsetParent.offset();this.cssPosition=="absolute"&&this.scrollParent[0]!=document&&e.contains(this.scrollParent[0],this.offsetParent[0])&&(t.left+=this.scrollParent.scrollLeft(),t.top+=this.scrollParent.scrollTop());if(this.offsetParent[0]==document.body||this.offsetParent[0].tagName&&this.offsetParent[0].tagName.toLowerCase()=="html"&&e.browser.msie)t={top:0,left:0};return{top:t.top+(parseInt(this.offsetParent.css("borderTopWidth"),10)||0),left:t.left+(parseInt(this.offsetParent.css("borderLeftWidth"),10)||0)}},_getRelativeOffset:function(){if(this.cssPosition=="relative"){var e=this.element.position();return{top:e.top-(parseInt(this.helper.css("top"),10)||0)+this.scrollParent.scrollTop(),left:e.left-(parseInt(this.helper.css("left"),10)||0)+this.scrollParent.scrollLeft()}}return{top:0,left:0}},_cacheMargins:function(){this.margins={left:parseInt(this.element.css("marginLeft"),10)||0,top:parseInt(this.element.css("marginTop"),10)||0,right:parseInt(this.element.css("marginRight"),10)||0,bottom:parseInt(this.element.css("marginBottom"),10)||0}},_cacheHelperProportions:function(){this.helperProportions={width:this.helper.outerWidth(),height:this.helper.outerHeight()}},_setContainment:function(){var t=this.options;t.containment=="parent"&&(t.containment=this.helper[0].parentNode);if(t.containment=="document"||t.containment=="window")this.containment=[t.containment=="document"?0:e(window).scrollLeft()-this.offset.relative.left-this.offset.parent.left,t.containment=="document"?0:e(window).scrollTop()-this.offset.relative.top-this.offset.parent.top,(t.containment=="document"?0:e(window).scrollLeft())+e(t.containment=="document"?document:window).width()-this.helperProportions.width-this.margins.left,(t.containment=="document"?0:e(window).scrollTop())+(e(t.containment=="document"?document:window).height()||document.body.parentNode.scrollHeight)-this.helperProportions.height-this.margins.top];if(!/^(document|window|parent)$/.test(t.containment)&&t.containment.constructor!=Array){var n=e(t.containment),r=n[0];if(!r)return;var i=n.offset(),s=e(r).css("overflow")!="hidden";this.containment=[(parseInt(e(r).css("borderLeftWidth"),10)||0)+(parseInt(e(r).css("paddingLeft"),10)||0),(parseInt(e(r).css("borderTopWidth"),10)||0)+(parseInt(e(r).css("paddingTop"),10)||0),(s?Math.max(r.scrollWidth,r.offsetWidth):r.offsetWidth)-(parseInt(e(r).css("borderLeftWidth"),10)||0)-(parseInt(e(r).css("paddingRight"),10)||0)-this.helperProportions.width-this.margins.left-this.margins.right,(s?Math.max(r.scrollHeight,r.offsetHeight):r.offsetHeight)-(parseInt(e(r).css("borderTopWidth"),10)||0)-(parseInt(e(r).css("paddingBottom"),10)||0)-this.helperProportions.height-this.margins.top-this.margins.bottom],this.relative_container=n}else t.containment.constructor==Array&&(this.containment=t.containment)},_convertPositionTo:function(t,n){n||(n=this.position);var r=t=="absolute"?1:-1,i=this.options,s=this.cssPosition!="absolute"||this.scrollParent[0]!=document&&!!e.contains(this.scrollParent[0],this.offsetParent[0])?this.scrollParent:this.offsetParent,o=/(html|body)/i.test(s[0].tagName);return{top:n.top+this.offset.relative.top*r+this.offset.parent.top*r-(this.cssPosition=="fixed"?-this.scrollParent.scrollTop():o?0:s.scrollTop())*r,left:n.left+this.offset.relative.left*r+this.offset.parent.left*r-(this.cssPosition=="fixed"?-this.scrollParent.scrollLeft():o?0:s.scrollLeft())*r}},_generatePosition:function(t){var n=this.options,r=this.cssPosition!="absolute"||this.scrollParent[0]!=document&&!!e.contains(this.scrollParent[0],this.offsetParent[0])?this.scrollParent:this.offsetParent,i=/(html|body)/i.test(r[0].tagName),s=t.pageX,o=t.pageY;if(this.originalPosition){var u;if(this.containment){if(this.relative_container){var a=this.relative_container.offset();u=[this.containment[0]+a.left,this.containment[1]+a.top,this.containment[2]+a.left,this.containment[3]+a.top]}else u=this.containment;t.pageX-this.offset.click.leftu[2]&&(s=u[2]+this.offset.click.left),t.pageY-this.offset.click.top>u[3]&&(o=u[3]+this.offset.click.top)}if(n.grid){var f=n.grid[1]?this.originalPageY+Math.round((o-this.originalPageY)/n.grid[1])*n.grid[1]:this.originalPageY;o=u?f-this.offset.click.topu[3]?f-this.offset.click.topu[2]?l-this.offset.click.left=0;l--){var c=r.snapElements[l].left,h=c+r.snapElements[l].width,p=r.snapElements[l].top,d=p+r.snapElements[l].height;if(!(c-s=l&&o<=c||u>=l&&u<=c||oc)&&(i>=a&&i<=f||s>=a&&s<=f||if);default:return!1}},e.ui.ddmanager={current:null,droppables:{"default":[]},prepareOffsets:function(t,n){var r=e.ui.ddmanager.droppables[t.options.scope]||[],i=n?n.type:null,s=(t.currentItem||t.element).find(":data(droppable)").andSelf();e:for(var o=0;oe?0:r.max")[0],c,h=t.each;l.style.cssText="background-color:rgba(1,1,1,.5)",f.rgba=l.style.backgroundColor.indexOf("rgba")>-1,h(u,function(e,t){t.cache="_"+e,t.props.alpha={idx:3,type:"percent",def:1}}),o.fn=t.extend(o.prototype,{parse:function(r,i,s,a){if(r===n)return this._rgba=[null,null,null,null],this;if(r.jquery||r.nodeType)r=t(r).css(i),i=n;var f=this,l=t.type(r),v=this._rgba=[],m;i!==n&&(r=[r,i,s,a],l="array");if(l==="string")return this.parse(d(r)||c._default);if(l==="array")return h(u.rgba.props,function(e,t){v[t.idx]=p(r[t.idx],t)}),this;if(l==="object")return r instanceof o?h(u,function(e,t){r[t.cache]&&(f[t.cache]=r[t.cache].slice())}):h(u,function(t,n){var i=n.cache;h(n.props,function(e,t){if(!f[i]&&n.to){if(e==="alpha"||r[e]==null)return;f[i]=n.to(f._rgba)}f[i][t.idx]=p(r[e],t,!0)}),f[i]&&e.inArray(null,f[i].slice(0,3))<0&&(f[i][3]=1,n.from&&(f._rgba=n.from(f[i])))}),this},is:function(e){var t=o(e),n=!0,r=this;return h(u,function(e,i){var s,o=t[i.cache];return o&&(s=r[i.cache]||i.to&&i.to(r._rgba)||[],h(i.props,function(e,t){if(o[t.idx]!=null)return n=o[t.idx]===s[t.idx],n})),n}),n},_space:function(){var e=[],t=this;return h(u,function(n,r){t[r.cache]&&e.push(n)}),e.pop()},transition:function(e,t){var n=o(e),r=n._space(),i=u[r],s=this.alpha()===0?o("transparent"):this,f=s[i.cache]||i.to(s._rgba),l=f.slice();return n=n[i.cache],h(i.props,function(e,r){var i=r.idx,s=f[i],o=n[i],u=a[r.type]||{};if(o===null)return;s===null?l[i]=o:(u.mod&&(o-s>u.mod/2?s+=u.mod:s-o>u.mod/2&&(s-=u.mod)),l[i]=p((o-s)*t+s,r))}),this[r](l)},blend:function(e){if(this._rgba[3]===1)return this;var n=this._rgba.slice(),r=n.pop(),i=o(e)._rgba;return o(t.map(n,function(e,t){return(1-r)*i[t]+r*e}))},toRgbaString:function(){var e="rgba(",n=t.map(this._rgba,function(e,t){return e==null?t>2?1:0:e});return n[3]===1&&(n.pop(),e="rgb("),e+n.join()+")"},toHslaString:function(){var e="hsla(",n=t.map(this.hsla(),function(e,t){return e==null&&(e=t>2?1:0),t&&t<3&&(e=Math.round(e*100)+"%"),e});return n[3]===1&&(n.pop(),e="hsl("),e+n.join()+")"},toHexString:function(e){var n=this._rgba.slice(),r=n.pop();return e&&n.push(~~(r*255)),"#"+t.map(n,function(e,t){return e=(e||0).toString(16),e.length===1?"0"+e:e}).join("")},toString:function(){return this._rgba[3]===0?"transparent":this.toRgbaString()}}),o.fn.parse.prototype=o.fn,u.hsla.to=function(e){if(e[0]==null||e[1]==null||e[2]==null)return[null,null,null,e[3]];var t=e[0]/255,n=e[1]/255,r=e[2]/255,i=e[3],s=Math.max(t,n,r),o=Math.min(t,n,r),u=s-o,a=s+o,f=a*.5,l,c;return o===s?l=0:t===s?l=60*(n-r)/u+360:n===s?l=60*(r-t)/u+120:l=60*(t-n)/u+240,f===0||f===1?c=f:f<=.5?c=u/a:c=u/(2-a),[Math.round(l)%360,c,f,i==null?1:i]},u.hsla.from=function(e){if(e[0]==null||e[1]==null||e[2]==null)return[null,null,null,e[3]];var t=e[0]/360,n=e[1],r=e[2],i=e[3],s=r<=.5?r*(1+n):r+n-r*n,o=2*r-s,u,a,f;return[Math.round(v(o,s,t+1/3)*255),Math.round(v(o,s,t)*255),Math.round(v(o,s,t-1/3)*255),i]},h(u,function(e,r){var s=r.props,u=r.cache,a=r.to,f=r.from;o.fn[e]=function(e){a&&!this[u]&&(this[u]=a(this._rgba));if(e===n)return this[u].slice();var r,i=t.type(e),l=i==="array"||i==="object"?e:arguments,c=this[u].slice();return h(s,function(e,t){var n=l[i==="object"?e:t.idx];n==null&&(n=c[t.idx]),c[t.idx]=p(n,t)}),f?(r=o(f(c)),r[u]=c,r):o(c)},h(s,function(n,r){if(o.fn[n])return;o.fn[n]=function(s){var o=t.type(s),u=n==="alpha"?this._hsla?"hsla":"rgba":e,a=this[u](),f=a[r.idx],l;return o==="undefined"?f:(o==="function"&&(s=s.call(this,f),o=t.type(s)),s==null&&r.empty?this:(o==="string"&&(l=i.exec(s),l&&(s=f+parseFloat(l[2])*(l[1]==="+"?1:-1))),a[r.idx]=s,this[u](a)))}})}),h(r,function(e,n){t.cssHooks[n]={set:function(e,r){var i,s,u="";if(t.type(r)!=="string"||(i=d(r))){r=o(i||r);if(!f.rgba&&r._rgba[3]!==1){s=n==="backgroundColor"?e.parentNode:e;while((u===""||u==="transparent")&&s&&s.style)try{u=t.css(s,"backgroundColor"),s=s.parentNode}catch(a){}r=r.blend(u&&u!=="transparent"?u:"_default")}r=r.toRgbaString()}try{e.style[n]=r}catch(r){}}},t.fx.step[n]=function(e){e.colorInit||(e.start=o(e.elem,n),e.end=o(e.end),e.colorInit=!0),t.cssHooks[n].set(e.elem,e.start.transition(e.end,e.pos))}}),t.cssHooks.borderColor={expand:function(e){var t={};return h(["Top","Right","Bottom","Left"],function(n,r){t["border"+r+"Color"]=e}),t}},c=t.Color.names={aqua:"#00ffff",black:"#000000",blue:"#0000ff",fuchsia:"#ff00ff",gray:"#808080",green:"#008000",lime:"#00ff00",maroon:"#800000",navy:"#000080",olive:"#808000",purple:"#800080",red:"#ff0000",silver:"#c0c0c0",teal:"#008080",white:"#ffffff",yellow:"#ffff00",transparent:[null,null,null,0],_default:"#ffffff"}}(jQuery),function(){function i(){var t=this.ownerDocument.defaultView?this.ownerDocument.defaultView.getComputedStyle(this,null):this.currentStyle,n={},r,i,s;if(t&&t.length&&t[0]&&t[t[0]]){s=t.length;while(s--)r=t[s],typeof t[r]=="string"&&(n[e.camelCase(r)]=t[r])}else for(r in t)typeof t[r]=="string"&&(n[r]=t[r]);return n}function s(t,n){var i={},s,o;for(s in n)o=n[s],t[s]!==o&&!r[s]&&(e.fx.step[s]||!isNaN(parseFloat(o)))&&(i[s]=o);return i}var n=["add","remove","toggle"],r={border:1,borderBottom:1,borderColor:1,borderLeft:1,borderRight:1,borderTop:1,borderWidth:1,margin:1,padding:1};e.each(["borderLeftStyle","borderRightStyle","borderBottomStyle","borderTopStyle"],function(t,n){e.fx.step[n]=function(e){if(e.end!=="none"&&!e.setAttr||e.pos===1&&!e.setAttr)jQuery.style(e.elem,n,e.end),e.setAttr=!0}}),e.effects.animateClass=function(t,r,o,u){var a=e.speed(r,o,u);return this.queue(function(){var r=e(this),o=r.attr("class")||"",u,f=a.children?r.find("*").andSelf():r;f=f.map(function(){var t=e(this);return{el:t,start:i.call(this)}}),u=function(){e.each(n,function(e,n){t[n]&&r[n+"Class"](t[n])})},u(),f=f.map(function(){return this.end=i.call(this.el[0]),this.diff=s(this.start,this.end),this}),r.attr("class",o),f=f.map(function(){var t=this,n=e.Deferred(),r=jQuery.extend({},a,{queue:!1,complete:function(){n.resolve(t)}});return this.el.animate(this.diff,r),n.promise()}),e.when.apply(e,f.get()).done(function(){u(),e.each(arguments,function(){var t=this.el;e.each(this.diff,function(e){t.css(e,"")})}),a.complete.call(r[0])})})},e.fn.extend({_addClass:e.fn.addClass,addClass:function(t,n,r,i){return n?e.effects.animateClass.call(this,{add:t},n,r,i):this._addClass(t)},_removeClass:e.fn.removeClass,removeClass:function(t,n,r,i){return n?e.effects.animateClass.call(this,{remove:t},n,r,i):this._removeClass(t)},_toggleClass:e.fn.toggleClass,toggleClass:function(n,r,i,s,o){return typeof r=="boolean"||r===t?i?e.effects.animateClass.call(this,r?{add:n}:{remove:n},i,s,o):this._toggleClass(n,r):e.effects.animateClass.call(this,{toggle:n},r,i,s)},switchClass:function(t,n,r,i,s){return e.effects.animateClass.call(this,{add:n,remove:t},r,i,s)}})}(),function(){function i(n,r,i,s){e.isPlainObject(n)&&(r=n,n=n.effect),n={effect:n},r===t&&(r={}),e.isFunction(r)&&(s=r,i=null,r={});if(typeof r=="number"||e.fx.speeds[r])s=i,i=r,r={};return e.isFunction(i)&&(s=i,i=null),r&&e.extend(n,r),i=i||r.duration,n.duration=e.fx.off?0:typeof i=="number"?i:i in e.fx.speeds?e.fx.speeds[i]:e.fx.speeds._default,n.complete=s||r.complete,n}function s(t){return!t||typeof t=="number"||e.fx.speeds[t]?!0:typeof t=="string"&&!e.effects.effect[t]?n&&e.effects[t]?!1:!0:!1}e.extend(e.effects,{version:"1.9.0",save:function(e,t){for(var n=0;n
    ").addClass("ui-effects-wrapper").css({fontSize:"100%",background:"transparent",border:"none",margin:0,padding:0}),i={width:t.width(),height:t.height()},s=document.activeElement;try{s.id}catch(o){s=document.body}return t.wrap(r),(t[0]===s||e.contains(t[0],s))&&e(s).focus(),r=t.parent(),t.css("position")==="static"?(r.css({position:"relative"}),t.css({position:"relative"})):(e.extend(n,{position:t.css("position"),zIndex:t.css("z-index")}),e.each(["top","left","bottom","right"],function(e,r){n[r]=t.css(r),isNaN(parseInt(n[r],10))&&(n[r]="auto")}),t.css({position:"relative",top:0,left:0,right:"auto",bottom:"auto"})),t.css(i),r.css(n).show()},removeWrapper:function(t){var n=document.activeElement;return t.parent().is(".ui-effects-wrapper")&&(t.parent().replaceWith(t),(t[0]===n||e.contains(t[0],n))&&e(n).focus()),t},setTransition:function(t,n,r,i){return i=i||{},e.each(n,function(e,n){var s=t.cssUnit(n);s[0]>0&&(i[n]=s[0]*r+s[1])}),i}}),e.fn.extend({effect:function(t,r,s,o){function h(t){function s(){e.isFunction(r)&&r.call(n[0]),e.isFunction(t)&&t()}var n=e(this),r=u.complete,i=u.mode;(n.is(":hidden")?i==="hide":i==="show")?s():l.call(n[0],u,s)}var u=i.apply(this,arguments),a=u.mode,f=u.queue,l=e.effects.effect[u.effect],c=!l&&n&&e.effects[u.effect];return e.fx.off||!l&&!c?a?this[a](u.duration,u.complete):this.each(function(){u.complete&&u.complete.call(this)}):l?f===!1?this.each(h):this.queue(f||"fx",h):c.call(this,{options:u,duration:u.duration,callback:u.complete,mode:u.mode})},_show:e.fn.show,show:function(e){if(s(e))return this._show.apply(this,arguments);var t=i.apply(this,arguments);return t.mode="show",this.effect.call(this,t)},_hide:e.fn.hide,hide:function(e){if(s(e))return this._hide.apply(this,arguments);var t=i.apply(this,arguments);return t.mode="hide",this.effect.call(this,t)},__toggle:e.fn.toggle,toggle:function(t){if(s(t)||typeof t=="boolean"||e.isFunction(t))return this.__toggle.apply(this,arguments);var n=i.apply(this,arguments);return n.mode="toggle",this.effect.call(this,n)},cssUnit:function(t){var n=this.css(t),r=[];return e.each(["em","px","%","pt"],function(e,t){n.indexOf(t)>0&&(r=[parseFloat(n),t])}),r}})}(),function(){var t={};e.each(["Quad","Cubic","Quart","Quint","Expo"],function(e,n){t[n]=function(t){return Math.pow(t,e+2)}}),e.extend(t,{Sine:function(e){return 1-Math.cos(e*Math.PI/2)},Circ:function(e){return 1-Math.sqrt(1-e*e)},Elastic:function(e){return e===0||e===1?e:-Math.pow(2,8*(e-1))*Math.sin(((e-1)*80-7.5)*Math.PI/15)},Back:function(e){return e*e*(3*e-2)},Bounce:function(e){var t,n=4;while(e<((t=Math.pow(2,--n))-1)/11);return 1/Math.pow(4,3-n)-7.5625*Math.pow((t*3-2)/22-e,2)}}),e.each(t,function(t,n){e.easing["easeIn"+t]=n,e.easing["easeOut"+t]=function(e){return 1-n(1-e)},e.easing["easeInOut"+t]=function(e){return e<.5?n(e*2)/2:1-n(e*-2+2)/2}})}()}(jQuery);(function(e,t){var n=/up|down|vertical/,r=/up|left|vertical|horizontal/;e.effects.effect.blind=function(t,i){var s=e(this),o=["position","top","bottom","left","right","height","width"],u=e.effects.setMode(s,t.mode||"hide"),a=t.direction||"up",f=n.test(a),l=f?"height":"width",c=f?"top":"left",h=r.test(a),p={},d=u==="show",v,m,g;s.parent().is(".ui-effects-wrapper")?e.effects.save(s.parent(),o):e.effects.save(s,o),s.show(),v=e.effects.createWrapper(s).css({overflow:"hidden"}),m=v[l](),g=parseFloat(v.css(c))||0,p[l]=d?m:0,h||(s.css(f?"bottom":"right",0).css(f?"top":"left","auto").css({position:"absolute"}),p[c]=d?g:m+g),d&&(v.css(l,0),h||v.css(c,g+m)),v.animate(p,{duration:t.duration,easing:t.easing,queue:!1,complete:function(){u==="hide"&&s.hide(),e.effects.restore(s,o),e.effects.removeWrapper(s),i()}})}})(jQuery);(function(e,t){e.effects.effect.bounce=function(t,n){var r=e(this),i=["position","top","bottom","left","right","height","width"],s=e.effects.setMode(r,t.mode||"effect"),o=s==="hide",u=s==="show",a=t.direction||"up",f=t.distance,l=t.times||5,c=l*2+(u||o?1:0),h=t.duration/c,p=t.easing,d=a==="up"||a==="down"?"top":"left",v=a==="up"||a==="left",m,g,y,b=r.queue(),w=b.length;(u||o)&&i.push("opacity"),e.effects.save(r,i),r.show(),e.effects.createWrapper(r),f||(f=r[d==="top"?"outerHeight":"outerWidth"]()/3),u&&(y={opacity:1},y[d]=0,r.css("opacity",0).css(d,v?-f*2:f*2).animate(y,h,p)),o&&(f/=Math.pow(2,l-1)),y={},y[d]=0;for(m=0;m1&&b.splice.apply(b,[1,0].concat(b.splice(w,c+1))),r.dequeue()}})(jQuery);(function(e,t){e.effects.effect.clip=function(t,n){var r=e(this),i=["position","top","bottom","left","right","height","width"],s=e.effects.setMode(r,t.mode||"hide"),o=s==="show",u=t.direction||"vertical",a=u==="vertical",f=a?"height":"width",l=a?"top":"left",c={},h,p,d;e.effects.save(r,i),r.show(),h=e.effects.createWrapper(r).css({overflow:"hidden"}),p=r[0].tagName==="IMG"?h:r,d=p[f](),o&&(p.css(f,0),p.css(l,d/2)),c[f]=o?d:0,c[l]=o?0:d/2,p.animate(c,{queue:!1,duration:t.duration,easing:t.easing,complete:function(){o||r.hide(),e.effects.restore(r,i),e.effects.removeWrapper(r),n()}})}})(jQuery);(function(e,t){e.effects.effect.drop=function(t,n){var r=e(this),i=["position","top","bottom","left","right","opacity","height","width"],s=e.effects.setMode(r,t.mode||"hide"),o=s==="show",u=t.direction||"left",a=u==="up"||u==="down"?"top":"left",f=u==="up"||u==="left"?"pos":"neg",l={opacity:o?1:0},c;e.effects.save(r,i),r.show(),e.effects.createWrapper(r),c=t.distance||r[a==="top"?"outerHeight":"outerWidth"](!0)/2,o&&r.css("opacity",0).css(a,f==="pos"?-c:c),l[a]=(o?f==="pos"?"+=":"-=":f==="pos"?"-=":"+=")+c,r.animate(l,{queue:!1,duration:t.duration,easing:t.easing,complete:function(){s==="hide"&&r.hide(),e.effects.restore(r,i),e.effects.removeWrapper(r),n()}})}})(jQuery);(function(e,t){e.effects.effect.explode=function(t,n){function y(){c.push(this),c.length===r*i&&b()}function b(){s.css({visibility:"visible"}),e(c).remove(),u||s.hide(),n()}var r=t.pieces?Math.round(Math.sqrt(t.pieces)):3,i=r,s=e(this),o=e.effects.setMode(s,t.mode||"hide"),u=o==="show",a=s.show().css("visibility","hidden").offset(),f=Math.ceil(s.outerWidth()/i),l=Math.ceil(s.outerHeight()/r),c=[],h,p,d,v,m,g;for(h=0;h
    ").css({position:"absolute",visibility:"visible",left:-p*f,top:-h*l}).parent().addClass("ui-effects-explode").css({position:"absolute",overflow:"hidden",width:f,height:l,left:d+(u?m*f:0),top:v+(u?g*l:0),opacity:u?0:1}).animate({left:d+(u?0:m*f),top:v+(u?0:g*l),opacity:u?1:0},t.duration||500,t.easing,y)}}})(jQuery);(function(e,t){e.effects.effect.fade=function(t,n){var r=e(this),i=e.effects.setMode(r,t.mode||"toggle");r.animate({opacity:i},{queue:!1,duration:t.duration,easing:t.easing,complete:n})}})(jQuery);(function(e,t){e.effects.effect.fold=function(t,n){var r=e(this),i=["position","top","bottom","left","right","height","width"],s=e.effects.setMode(r,t.mode||"hide"),o=s==="show",u=s==="hide",a=t.size||15,f=/([0-9]+)%/.exec(a),l=!!t.horizFirst,c=o!==l,h=c?["width","height"]:["height","width"],p=t.duration/2,d,v,m={},g={};e.effects.save(r,i),r.show(),d=e.effects.createWrapper(r).css({overflow:"hidden"}),v=c?[d.width(),d.height()]:[d.height(),d.width()],f&&(a=parseInt(f[1],10)/100*v[u?0:1]),o&&d.css(l?{height:0,width:a}:{height:a,width:0}),m[h[0]]=o?v[0]:a,g[h[1]]=o?v[1]:0,d.animate(m,p,t.easing).animate(g,p,t.easing,function(){u&&r.hide(),e.effects.restore(r,i),e.effects.removeWrapper(r),n()})}})(jQuery);(function(e,t){e.effects.effect.highlight=function(t,n){var r=e(this),i=["backgroundImage","backgroundColor","opacity"],s=e.effects.setMode(r,t.mode||"show"),o={backgroundColor:r.css("backgroundColor")};s==="hide"&&(o.opacity=0),e.effects.save(r,i),r.show().css({backgroundImage:"none",backgroundColor:t.color||"#ffff99"}).animate(o,{queue:!1,duration:t.duration,easing:t.easing,complete:function(){s==="hide"&&r.hide(),e.effects.restore(r,i),n()}})}})(jQuery);(function(e,t){e.effects.effect.pulsate=function(t,n){var r=e(this),i=e.effects.setMode(r,t.mode||"show"),s=i==="show",o=i==="hide",u=s||i==="hide",a=(t.times||5)*2+(u?1:0),f=t.duration/a,l=0,c=r.queue(),h=c.length,p;if(s||!r.is(":visible"))r.css("opacity",0).show(),l=1;for(p=1;p1&&c.splice.apply(c,[1,0].concat(c.splice(h,a+1))),r.dequeue()}})(jQuery);(function(e,t){e.effects.effect.puff=function(t,n){var r=e(this),i=e.effects.setMode(r,t.mode||"hide"),s=i==="hide",o=parseInt(t.percent,10)||150,u=o/100,a={height:r.height(),width:r.width()};e.extend(t,{effect:"scale",queue:!1,fade:!0,mode:i,complete:n,percent:s?o:100,from:s?a:{height:a.height*u,width:a.width*u}}),r.effect(t)},e.effects.effect.scale=function(t,n){var r=e(this),i=e.extend(!0,{},t),s=e.effects.setMode(r,t.mode||"effect"),o=parseInt(t.percent,10)||(parseInt(t.percent,10)===0?0:s==="hide"?0:100),u=t.direction||"both",a=t.origin,f={height:r.height(),width:r.width(),outerHeight:r.outerHeight(),outerWidth:r.outerWidth()},l={y:u!=="horizontal"?o/100:1,x:u!=="vertical"?o/100:1};i.effect="size",i.queue=!1,i.complete=n,s!=="effect"&&(i.origin=a||["middle","center"],i.restore=!0),i.from=t.from||(s==="show"?{height:0,width:0}:f),i.to={height:f.height*l.y,width:f.width*l.x,outerHeight:f.outerHeight*l.y,outerWidth:f.outerWidth*l.x},i.fade&&(s==="show"&&(i.from.opacity=0,i.to.opacity=1),s==="hide"&&(i.from.opacity=1,i.to.opacity=0)),r.effect(i)},e.effects.effect.size=function(t,n){var r=e(this),i=["position","top","bottom","left","right","width","height","overflow","opacity"],s=["position","top","bottom","left","right","overflow","opacity"],o=["width","height","overflow"],u=["fontSize"],a=["borderTopWidth","borderBottomWidth","paddingTop","paddingBottom"],f=["borderLeftWidth","borderRightWidth","paddingLeft","paddingRight"],l=e.effects.setMode(r,t.mode||"effect"),c=t.restore||l!=="effect",h=t.scale||"both",p=t.origin||["middle","center"],d,v,m,g=r.css("position");l==="show"&&r.show(),d={height:r.height(),width:r.width(),outerHeight:r.outerHeight(),outerWidth:r.outerWidth()},r.from=t.from||d,r.to=t.to||d,m={from:{y:r.from.height/d.height,x:r.from.width/d.width},to:{y:r.to.height/d.height,x:r.to.width/d.width}};if(h==="box"||h==="both")m.from.y!==m.to.y&&(i=i.concat(a),r.from=e.effects.setTransition(r,a,m.from.y,r.from),r.to=e.effects.setTransition(r,a,m.to.y,r.to)),m.from.x!==m.to.x&&(i=i.concat(f),r.from=e.effects.setTransition(r,f,m.from.x,r.from),r.to=e.effects.setTransition(r,f,m.to.x,r.to));(h==="content"||h==="both")&&m.from.y!==m.to.y&&(i=i.concat(u),r.from=e.effects.setTransition(r,u,m.from.y,r.from),r.to=e.effects.setTransition(r,u,m.to.y,r.to)),e.effects.save(r,c?i:s),r.show(),e.effects.createWrapper(r),r.css("overflow","hidden").css(r.from),p&&(v=e.effects.getBaseline(p,d),r.from.top=(d.outerHeight-r.outerHeight())*v.y,r.from.left=(d.outerWidth-r.outerWidth())*v.x,r.to.top=(d.outerHeight-r.to.outerHeight)*v.y,r.to.left=(d.outerWidth-r.to.outerWidth)*v.x),r.css(r.from);if(h==="content"||h==="both")a=a.concat(["marginTop","marginBottom"]).concat(u),f=f.concat(["marginLeft","marginRight"]),o=i.concat(a).concat(f),r.find("*[width]").each(function(){var n=e(this),r={height:n.height(),width:n.width()};c&&e.effects.save(n,o),n.from={height:r.height*m.from.y,width:r.width*m.from.x},n.to={height:r.height*m.to.y,width:r.width*m.to.x},m.from.y!==m.to.y&&(n.from=e.effects.setTransition(n,a,m.from.y,n.from),n.to=e.effects.setTransition(n,a,m.to.y,n.to)),m.from.x!==m.to.x&&(n.from=e.effects.setTransition(n,f,m.from.x,n.from),n.to=e.effects.setTransition(n,f,m.to.x,n.to)),n.css(n.from),n.animate(n.to,t.duration,t.easing,function(){c&&e.effects.restore(n,o)})});r.animate(r.to,{queue:!1,duration:t.duration,easing:t.easing,complete:function(){r.to.opacity===0&&r.css("opacity",r.from.opacity),l==="hide"&&r.hide(),e.effects.restore(r,c?i:s),c||(g==="static"?r.css({position:"relative",top:r.to.top,left:r.to.left}):e.each(["top","left"],function(e,t){r.css(t,function(t,n){var i=parseInt(n,10),s=e?r.to.left:r.to.top;return n==="auto"?s+"px":i+s+"px"})})),e.effects.removeWrapper(r),n()}})}})(jQuery);(function(e,t){e.effects.effect.shake=function(t,n){var r=e(this),i=["position","top","bottom","left","right","height","width"],s=e.effects.setMode(r,t.mode||"effect"),o=t.direction||"left",u=t.distance||20,a=t.times||3,f=a*2+1,l=Math.round(t.duration/f),c=o==="up"||o==="down"?"top":"left",h=o==="up"||o==="left",p={},d={},v={},m,g=r.queue(),y=g.length;e.effects.save(r,i),r.show(),e.effects.createWrapper(r),p[c]=(h?"-=":"+=")+u,d[c]=(h?"+=":"-=")+u*2,v[c]=(h?"-=":"+=")+u*2,r.animate(p,l,t.easing);for(m=1;m1&&g.splice.apply(g,[1,0].concat(g.splice(y,f+1))),r.dequeue()}})(jQuery);(function(e,t){e.effects.effect.slide=function(t,n){var r=e(this),i=["position","top","bottom","left","right","width","height"],s=e.effects.setMode(r,t.mode||"show"),o=s==="show",u=t.direction||"left",a=u==="up"||u==="down"?"top":"left",f=u==="up"||u==="left",l,c={};e.effects.save(r,i),r.show(),l=t.distance||r[a==="top"?"outerHeight":"outerWidth"](!0),e.effects.createWrapper(r).css({overflow:"hidden"}),o&&r.css(a,f?isNaN(l)?"-"+l:-l:l),c[a]=(o?f?"+=":"-=":f?"-=":"+=")+l,r.animate(c,{queue:!1,duration:t.duration,easing:t.easing,complete:function(){s==="hide"&&r.hide(),e.effects.restore(r,i),e.effects.removeWrapper(r),n()}})}})(jQuery);(function(e,t){e.effects.effect.transfer=function(t,n){var r=e(this),i=e(t.to),s=i.css("position")==="fixed",o=e("body"),u=s?o.scrollTop():0,a=s?o.scrollLeft():0,f=i.offset(),l={top:f.top-u,left:f.left-a,height:i.innerHeight(),width:i.innerWidth()},c=r.offset(),h=e('
    ').appendTo(document.body).addClass(t.className).css({top:c.top-u,left:c.left-a,height:r.innerHeight(),width:r.innerWidth(),position:s?"fixed":"absolute"}).animate(l,t.duration,t.easing,function(){h.remove(),n()})}})(jQuery);(function(e,t){var n=!1;e.widget("ui.menu",{version:"1.9.0",defaultElement:"
      ",delay:300,options:{icons:{submenu:"ui-icon-carat-1-e"},menus:"ul",position:{my:"left top",at:"right top"},role:"menu",blur:null,focus:null,select:null},_create:function(){this.activeMenu=this.element,this.element.uniqueId().addClass("ui-menu ui-widget ui-widget-content ui-corner-all").toggleClass("ui-menu-icons",!!this.element.find(".ui-icon").length).attr({role:this.options.role,tabIndex:0}).bind("click"+this.eventNamespace,e.proxy(function(e){this.options.disabled&&e.preventDefault()},this)),this.options.disabled&&this.element.addClass("ui-state-disabled").attr("aria-disabled","true"),this._on({"mousedown .ui-menu-item > a":function(e){e.preventDefault()},"click .ui-state-disabled > a":function(e){e.preventDefault()},"click .ui-menu-item:has(a)":function(t){var r=e(t.target).closest(".ui-menu-item");!n&&r.not(".ui-state-disabled").length&&(n=!0,this.select(t),r.has(".ui-menu").length?this.expand(t):this.element.is(":focus")||(this.element.trigger("focus",[!0]),this.active&&this.active.parents(".ui-menu").length===1&&clearTimeout(this.timer)))},"mouseenter .ui-menu-item":function(t){var n=e(t.currentTarget);n.siblings().children(".ui-state-active").removeClass("ui-state-active"),this.focus(t,n)},mouseleave:"collapseAll","mouseleave .ui-menu":"collapseAll",focus:function(e,t){var n=this.active||this.element.children(".ui-menu-item").eq(0);t||this.focus(e,n)},blur:function(t){this._delay(function(){e.contains(this.element[0],this.document[0].activeElement)||this.collapseAll(t)})},keydown:"_keydown"}),this.refresh(),this._on(this.document,{click:function(t){e(t.target).closest(".ui-menu").length||this.collapseAll(t),n=!1}})},_destroy:function(){this.element.removeAttr("aria-activedescendant").find(".ui-menu").andSelf().removeClass("ui-menu ui-widget ui-widget-content ui-corner-all ui-menu-icons").removeAttr("role").removeAttr("tabIndex").removeAttr("aria-labelledby").removeAttr("aria-expanded").removeAttr("aria-hidden").removeAttr("aria-disabled").removeUniqueId().show(),this.element.find(".ui-menu-item").removeClass("ui-menu-item").removeAttr("role").removeAttr("aria-disabled").children("a").removeUniqueId().removeClass("ui-corner-all ui-state-hover").removeAttr("tabIndex").removeAttr("role").removeAttr("aria-haspopup").children().each(function(){var t=e(this);t.data("ui-menu-submenu-carat")&&t.remove()}),this.element.find(".ui-menu-divider").removeClass("ui-menu-divider ui-widget-content")},_keydown:function(t){function a(e){return e.replace(/[\-\[\]{}()*+?.,\\\^$|#\s]/g,"\\$&")}var n,r,i,s,o,u=!0;switch(t.keyCode){case e.ui.keyCode.PAGE_UP:this.previousPage(t);break;case e.ui.keyCode.PAGE_DOWN:this.nextPage(t);break;case e.ui.keyCode.HOME:this._move("first","first",t);break;case e.ui.keyCode.END:this._move("last","last",t);break;case e.ui.keyCode.UP:this.previous(t);break;case e.ui.keyCode.DOWN:this.next(t);break;case e.ui.keyCode.LEFT:this.collapse(t);break;case e.ui.keyCode.RIGHT:this.active&&!this.active.is(".ui-state-disabled")&&this.expand(t);break;case e.ui.keyCode.ENTER:case e.ui.keyCode.SPACE:this._activate(t);break;case e.ui.keyCode.ESCAPE:this.collapse(t);break;default:u=!1,r=this.previousFilter||"",i=String.fromCharCode(t.keyCode),s=!1,clearTimeout(this.filterTimer),i===r?s=!0:i=r+i,o=new RegExp("^"+a(i),"i"),n=this.activeMenu.children(".ui-menu-item").filter(function(){return o.test(e(this).children("a").text())}),n=s&&n.index(this.active.next())!==-1?this.active.nextAll(".ui-menu-item"):n,n.length||(i=String.fromCharCode(t.keyCode),o=new RegExp("^"+a(i),"i"),n=this.activeMenu.children(".ui-menu-item").filter(function(){return o.test(e(this).children("a").text())})),n.length?(this.focus(t,n),n.length>1?(this.previousFilter=i,this.filterTimer=this._delay(function(){delete this.previousFilter},1e3)):delete this.previousFilter):delete this.previousFilter}u&&t.preventDefault()},_activate:function(e){this.active.is(".ui-state-disabled")||(this.active.children("a[aria-haspopup='true']").length?this.expand(e):this.select(e))},refresh:function(){var t,n=this.options.icons.submenu,r=this.element.find(this.options.menus+":not(.ui-menu)").addClass("ui-menu ui-widget ui-widget-content ui-corner-all").hide().attr({role:this.options.role,"aria-hidden":"true","aria-expanded":"false"});t=r.add(this.element),t.children(":not(.ui-menu-item):has(a)").addClass("ui-menu-item").attr("role","presentation").children("a").uniqueId().addClass("ui-corner-all").attr({tabIndex:-1,role:this._itemRole()}),t.children(":not(.ui-menu-item)").each(function(){var t=e(this);/[^\-—–\s]/.test(t.text())||t.addClass("ui-widget-content ui-menu-divider")}),t.children(".ui-state-disabled").attr("aria-disabled","true"),r.each(function(){var t=e(this),r=t.prev("a"),i=e("").addClass("ui-menu-icon ui-icon "+n).data("ui-menu-submenu-carat",!0);r.attr("aria-haspopup","true").prepend(i),t.attr("aria-labelledby",r.attr("id"))}),this.active&&!e.contains(this.element[0],this.active[0])&&this.blur()},_itemRole:function(){return{menu:"menuitem",listbox:"option"}[this.options.role]},focus:function(e,t){var n,r;this.blur(e,e&&e.type==="focus"),this._scrollIntoView(t),this.active=t.first(),r=this.active.children("a").addClass("ui-state-focus"),this.options.role&&this.element.attr("aria-activedescendant",r.attr("id")),this.active.parent().closest(".ui-menu-item").children("a:first").addClass("ui-state-active"),e&&e.type==="keydown"?this._close():this.timer=this._delay(function(){this._close()},this.delay),n=t.children(".ui-menu"),n.length&&/^mouse/.test(e.type)&&this._startOpening(n),this.activeMenu=t.parent(),this._trigger("focus",e,{item:t})},_scrollIntoView:function(t){var n,r,i,s,o,u;this._hasScroll()&&(n=parseFloat(e.css(this.activeMenu[0],"borderTopWidth"))||0,r=parseFloat(e.css(this.activeMenu[0],"paddingTop"))||0,i=t.offset().top-this.activeMenu.offset().top-n-r,s=this.activeMenu.scrollTop(),o=this.activeMenu.height(),u=t.height(),i<0?this.activeMenu.scrollTop(s+i):i+u>o&&this.activeMenu.scrollTop(s+i-o+u))},blur:function(e,t){t||clearTimeout(this.timer);if(!this.active)return;this.active.children("a").removeClass("ui-state-focus"),this.active=null,this._trigger("blur",e,{item:this.active})},_startOpening:function(e){clearTimeout(this.timer);if(e.attr("aria-hidden")!=="true")return;this.timer=this._delay(function(){this._close(),this._open(e)},this.delay)},_open:function(t){var n=e.extend({of:this.active},this.options.position);clearTimeout(this.timer),this.element.find(".ui-menu").not(t.parents(".ui-menu")).hide().attr("aria-hidden","true"),t.show().removeAttr("aria-hidden").attr("aria-expanded","true").position(n)},collapseAll:function(t,n){clearTimeout(this.timer),this.timer=this._delay(function(){var r=n?this.element:e(t&&t.target).closest(this.element.find(".ui-menu"));r.length||(r=this.element),this._close(r),this.blur(t),this.activeMenu=r},this.delay)},_close:function(e){e||(e=this.active?this.active.parent():this.element),e.find(".ui-menu").hide().attr("aria-hidden","true").attr("aria-expanded","false").end().find("a.ui-state-active").removeClass("ui-state-active")},collapse:function(e){var t=this.active&&this.active.parent().closest(".ui-menu-item",this.element);t&&t.length&&(this._close(),this.focus(e,t))},expand:function(e){var t=this.active&&this.active.children(".ui-menu ").children(".ui-menu-item").first();t&&t.length&&(this._open(t.parent()),this._delay(function(){this.focus(e,t)}))},next:function(e){this._move("next","first",e)},previous:function(e){this._move("prev","last",e)},isFirstItem:function(){return this.active&&!this.active.prevAll(".ui-menu-item").length},isLastItem:function(){return this.active&&!this.active.nextAll(".ui-menu-item").length},_move:function(e,t,n){var r;this.active&&(e==="first"||e==="last"?r=this.active[e==="first"?"prevAll":"nextAll"](".ui-menu-item").eq(-1):r=this.active[e+"All"](".ui-menu-item").eq(0));if(!r||!r.length||!this.active)r=this.activeMenu.children(".ui-menu-item")[t]();this.focus(n,r)},nextPage:function(t){var n,r,i;if(!this.active){this.next(t);return}if(this.isLastItem())return;this._hasScroll()?(r=this.active.offset().top,i=this.element.height(),this.active.nextAll(".ui-menu-item").each(function(){return n=e(this),n.offset().top-r-i<0}),this.focus(t,n)):this.focus(t,this.activeMenu.children(".ui-menu-item")[this.active?"last":"first"]())},previousPage:function(t){var n,r,i;if(!this.active){this.next(t);return}if(this.isFirstItem())return;this._hasScroll()?(r=this.active.offset().top,i=this.element.height(),this.active.prevAll(".ui-menu-item").each(function(){return n=e(this),n.offset().top-r+i>0}),this.focus(t,n)):this.focus(t,this.activeMenu.children(".ui-menu-item").first())},_hasScroll:function(){return this.element.outerHeight()
    ").appendTo(this.element),this.oldValue=this._value(),this._refreshValue()},_destroy:function(){this.element.removeClass("ui-progressbar ui-widget ui-widget-content ui-corner-all").removeAttr("role").removeAttr("aria-valuemin").removeAttr("aria-valuemax").removeAttr("aria-valuenow"),this.valueDiv.remove()},value:function(e){return e===t?this._value():(this._setOption("value",e),this)},_setOption:function(e,t){e==="value"&&(this.options.value=t,this._refreshValue(),this._value()===this.options.max&&this._trigger("complete")),this._super(e,t)},_value:function(){var e=this.options.value;return typeof e!="number"&&(e=0),Math.min(this.options.max,Math.max(this.min,e))},_percentage:function(){return 100*this._value()/this.options.max},_refreshValue:function(){var e=this.value(),t=this._percentage();this.oldValue!==e&&(this.oldValue=e,this._trigger("change")),this.valueDiv.toggle(e>this.min).toggleClass("ui-corner-right",e===this.options.max).width(t.toFixed(0)+"%"),this.element.attr("aria-valuenow",e)}})})(jQuery);(function(e,t){e.widget("ui.resizable",e.ui.mouse,{version:"1.9.0",widgetEventPrefix:"resize",options:{alsoResize:!1,animate:!1,animateDuration:"slow",animateEasing:"swing",aspectRatio:!1,autoHide:!1,containment:!1,ghost:!1,grid:!1,handles:"e,s,se",helper:!1,maxHeight:null,maxWidth:null,minHeight:10,minWidth:10,zIndex:1e3},_create:function(){var t=this,n=this.options;this.element.addClass("ui-resizable"),e.extend(this,{_aspectRatio:!!n.aspectRatio,aspectRatio:n.aspectRatio,originalElement:this.element,_proportionallyResizeElements:[],_helper:n.helper||n.ghost||n.animate?n.helper||"ui-resizable-helper":null}),this.element[0].nodeName.match(/canvas|textarea|input|select|button|img/i)&&(this.element.wrap(e('
    ').css({position:this.element.css("position"),width:this.element.outerWidth(),height:this.element.outerHeight(),top:this.element.css("top"),left:this.element.css("left")})),this.element=this.element.parent().data("resizable",this.element.data("resizable")),this.elementIsWrapper=!0,this.element.css({marginLeft:this.originalElement.css("marginLeft"),marginTop:this.originalElement.css("marginTop"),marginRight:this.originalElement.css("marginRight"),marginBottom:this.originalElement.css("marginBottom")}),this.originalElement.css({marginLeft:0,marginTop:0,marginRight:0,marginBottom:0}),this.originalResizeStyle=this.originalElement.css("resize"),this.originalElement.css("resize","none"),this._proportionallyResizeElements.push(this.originalElement.css({position:"static",zoom:1,display:"block"})),this.originalElement.css({margin:this.originalElement.css("margin")}),this._proportionallyResize()),this.handles=n.handles||(e(".ui-resizable-handle",this.element).length?{n:".ui-resizable-n",e:".ui-resizable-e",s:".ui-resizable-s",w:".ui-resizable-w",se:".ui-resizable-se",sw:".ui-resizable-sw",ne:".ui-resizable-ne",nw:".ui-resizable-nw"}:"e,s,se");if(this.handles.constructor==String){this.handles=="all"&&(this.handles="n,e,s,w,se,sw,ne,nw");var r=this.handles.split(",");this.handles={};for(var i=0;i
    ');u.css({zIndex:n.zIndex}),"se"==s&&u.addClass("ui-icon ui-icon-gripsmall-diagonal-se"),this.handles[s]=".ui-resizable-"+s,this.element.append(u)}}this._renderAxis=function(t){t=t||this.element;for(var n in this.handles){this.handles[n].constructor==String&&(this.handles[n]=e(this.handles[n],this.element).show());if(this.elementIsWrapper&&this.originalElement[0].nodeName.match(/textarea|input|select|button/i)){var r=e(this.handles[n],this.element),i=0;i=/sw|ne|nw|se|n|s/.test(n)?r.outerHeight():r.outerWidth();var s=["padding",/ne|nw|n/.test(n)?"Top":/se|sw|s/.test(n)?"Bottom":/^e$/.test(n)?"Right":"Left"].join("");t.css(s,i),this._proportionallyResize()}if(!e(this.handles[n]).length)continue}},this._renderAxis(this.element),this._handles=e(".ui-resizable-handle",this.element).disableSelection(),this._handles.mouseover(function(){if(!t.resizing){if(this.className)var e=this.className.match(/ui-resizable-(se|sw|ne|nw|n|e|s|w)/i);t.axis=e&&e[1]?e[1]:"se"}}),n.autoHide&&(this._handles.hide(),e(this.element).addClass("ui-resizable-autohide").mouseenter(function(){if(n.disabled)return;e(this).removeClass("ui-resizable-autohide"),t._handles.show()}).mouseleave(function(){if(n.disabled)return;t.resizing||(e(this).addClass("ui-resizable-autohide"),t._handles.hide())})),this._mouseInit()},_destroy:function(){this._mouseDestroy();var t=function(t){e(t).removeClass("ui-resizable ui-resizable-disabled ui-resizable-resizing").removeData("resizable").removeData("ui-resizable").unbind(".resizable").find(".ui-resizable-handle").remove()};if(this.elementIsWrapper){t(this.element);var n=this.element;n.after(this.originalElement.css({position:n.css("position"),width:n.outerWidth(),height:n.outerHeight(),top:n.css("top"),left:n.css("left")})).remove()}return this.originalElement.css("resize",this.originalResizeStyle),t(this.originalElement),this},_mouseCapture:function(t){var n=!1;for(var r in this.handles)e(this.handles[r])[0]==t.target&&(n=!0);return!this.options.disabled&&n},_mouseStart:function(t){var r=this.options,i=this.element.position(),s=this.element;this.resizing=!0,this.documentScroll={top:e(document).scrollTop(),left:e(document).scrollLeft()},(s.is(".ui-draggable")||/absolute/.test(s.css("position")))&&s.css({position:"absolute",top:i.top,left:i.left}),this._renderProxy();var o=n(this.helper.css("left")),u=n(this.helper.css("top"));r.containment&&(o+=e(r.containment).scrollLeft()||0,u+=e(r.containment).scrollTop()||0),this.offset=this.helper.offset(),this.position={left:o,top:u},this.size=this._helper?{width:s.outerWidth(),height:s.outerHeight()}:{width:s.width(),height:s.height()},this.originalSize=this._helper?{width:s.outerWidth(),height:s.outerHeight()}:{width:s.width(),height:s.height()},this.originalPosition={left:o,top:u},this.sizeDiff={width:s.outerWidth()-s.width(),height:s.outerHeight()-s.height()},this.originalMousePosition={left:t.pageX,top:t.pageY},this.aspectRatio=typeof r.aspectRatio=="number"?r.aspectRatio:this.originalSize.width/this.originalSize.height||1;var a=e(".ui-resizable-"+this.axis).css("cursor");return e("body").css("cursor",a=="auto"?this.axis+"-resize":a),s.addClass("ui-resizable-resizing"),this._propagate("start",t),!0},_mouseDrag:function(e){var t=this.helper,n=this.options,r={},i=this,s=this.originalMousePosition,o=this.axis,u=e.pageX-s.left||0,a=e.pageY-s.top||0,f=this._change[o];if(!f)return!1;var l=f.apply(this,[e,u,a]);this._updateVirtualBoundaries(e.shiftKey);if(this._aspectRatio||e.shiftKey)l=this._updateRatio(l,e);return l=this._respectSize(l,e),this._propagate("resize",e),t.css({top:this.position.top+"px",left:this.position.left+"px",width:this.size.width+"px",height:this.size.height+"px"}),!this._helper&&this._proportionallyResizeElements.length&&this._proportionallyResize(),this._updateCache(l),this._trigger("resize",e,this.ui()),!1},_mouseStop:function(t){this.resizing=!1;var n=this.options,r=this;if(this._helper){var i=this._proportionallyResizeElements,s=i.length&&/textarea/i.test(i[0].nodeName),o=s&&e.ui.hasScroll(i[0],"left")?0:r.sizeDiff.height,u=s?0:r.sizeDiff.width,a={width:r.helper.width()-u,height:r.helper.height()-o},f=parseInt(r.element.css("left"),10)+(r.position.left-r.originalPosition.left)||null,l=parseInt(r.element.css("top"),10)+(r.position.top-r.originalPosition.top)||null;n.animate||this.element.css(e.extend(a,{top:l,left:f})),r.helper.height(r.size.height),r.helper.width(r.size.width),this._helper&&!n.animate&&this._proportionallyResize()}return e("body").css("cursor","auto"),this.element.removeClass("ui-resizable-resizing"),this._propagate("stop",t),this._helper&&this.helper.remove(),!1},_updateVirtualBoundaries:function(e){var t=this.options,n,i,s,o,u;u={minWidth:r(t.minWidth)?t.minWidth:0,maxWidth:r(t.maxWidth)?t.maxWidth:Infinity,minHeight:r(t.minHeight)?t.minHeight:0,maxHeight:r(t.maxHeight)?t.maxHeight:Infinity};if(this._aspectRatio||e)n=u.minHeight*this.aspectRatio,s=u.minWidth/this.aspectRatio,i=u.maxHeight*this.aspectRatio,o=u.maxWidth/this.aspectRatio,n>u.minWidth&&(u.minWidth=n),s>u.minHeight&&(u.minHeight=s),ie.width,l=r(e.height)&&i.minHeight&&i.minHeight>e.height;f&&(e.width=i.minWidth),l&&(e.height=i.minHeight),u&&(e.width=i.maxWidth),a&&(e.height=i.maxHeight);var c=this.originalPosition.left+this.originalSize.width,h=this.position.top+this.size.height,p=/sw|nw|w/.test(o),d=/nw|ne|n/.test(o);f&&p&&(e.left=c-i.minWidth),u&&p&&(e.left=c-i.maxWidth),l&&d&&(e.top=h-i.minHeight),a&&d&&(e.top=h-i.maxHeight);var v=!e.width&&!e.height;return v&&!e.left&&e.top?e.top=null:v&&!e.top&&e.left&&(e.left=null),e},_proportionallyResize:function(){var t=this.options;if(!this._proportionallyResizeElements.length)return;var n=this.helper||this.element;for(var r=0;r');var r=e.browser.msie&&e.browser.version<7,i=r?1:0,s=r?2:-1;this.helper.addClass(this._helper).css({width:this.element.outerWidth()+s,height:this.element.outerHeight()+s,position:"absolute",left:this.elementOffset.left-i+"px",top:this.elementOffset.top-i+"px",zIndex:++n.zIndex}),this.helper.appendTo("body").disableSelection()}else this.helper=this.element},_change:{e:function(e,t,n){return{width:this.originalSize.width+t}},w:function(e,t,n){var r=this.options,i=this.originalSize,s=this.originalPosition;return{left:s.left+t,width:i.width-t}},n:function(e,t,n){var r=this.options,i=this.originalSize,s=this.originalPosition;return{top:s.top+n,height:i.height-n}},s:function(e,t,n){return{height:this.originalSize.height+n}},se:function(t,n,r){return e.extend(this._change.s.apply(this,arguments),this._change.e.apply(this,[t,n,r]))},sw:function(t,n,r){return e.extend(this._change.s.apply(this,arguments),this._change.w.apply(this,[t,n,r]))},ne:function(t,n,r){return e.extend(this._change.n.apply(this,arguments),this._change.e.apply(this,[t,n,r]))},nw:function(t,n,r){return e.extend(this._change.n.apply(this,arguments),this._change.w.apply(this,[t,n,r]))}},_propagate:function(t,n){e.ui.plugin.call(this,t,[n,this.ui()]),t!="resize"&&this._trigger(t,n,this.ui())},plugins:{},ui:function(){return{originalElement:this.originalElement,element:this.element,helper:this.helper,position:this.position,size:this.size,originalSize:this.originalSize,originalPosition:this.originalPosition}}}),e.ui.plugin.add("resizable","alsoResize",{start:function(t,n){var r=e(this).data("resizable"),i=r.options,s=function(t){e(t).each(function(){var t=e(this);t.data("resizable-alsoresize",{width:parseInt(t.width(),10),height:parseInt(t.height(),10),left:parseInt(t.css("left"),10),top:parseInt(t.css("top"),10)})})};typeof i.alsoResize=="object"&&!i.alsoResize.parentNode?i.alsoResize.length?(i.alsoResize=i.alsoResize[0],s(i.alsoResize)):e.each(i.alsoResize,function(e){s(e)}):s(i.alsoResize)},resize:function(t,n){var r=e(this).data("resizable"),i=r.options,s=r.originalSize,o=r.originalPosition,u={height:r.size.height-s.height||0,width:r.size.width-s.width||0,top:r.position.top-o.top||0,left:r.position.left-o.left||0},a=function(t,r){e(t).each(function(){var t=e(this),i=e(this).data("resizable-alsoresize"),s={},o=r&&r.length?r:t.parents(n.originalElement[0]).length?["width","height"]:["width","height","top","left"];e.each(o,function(e,t){var n=(i[t]||0)+(u[t]||0);n&&n>=0&&(s[t]=n||null)}),t.css(s)})};typeof i.alsoResize=="object"&&!i.alsoResize.nodeType?e.each(i.alsoResize,function(e,t){a(e,t)}):a(i.alsoResize)},stop:function(t,n){e(this).removeData("resizable-alsoresize")}}),e.ui.plugin.add("resizable","animate",{stop:function(t,n){var r=e(this).data("resizable"),i=r.options,s=r._proportionallyResizeElements,o=s.length&&/textarea/i.test(s[0].nodeName),u=o&&e.ui.hasScroll(s[0],"left")?0:r.sizeDiff.height,a=o?0:r.sizeDiff.width,f={width:r.size.width-a,height:r.size.height-u},l=parseInt(r.element.css("left"),10)+(r.position.left-r.originalPosition.left)||null,c=parseInt(r.element.css("top"),10)+(r.position.top-r.originalPosition.top)||null;r.element.animate(e.extend(f,c&&l?{top:c,left:l}:{}),{duration:i.animateDuration,easing:i.animateEasing,step:function(){var n={width:parseInt(r.element.css("width"),10),height:parseInt(r.element.css("height"),10),top:parseInt(r.element.css("top"),10),left:parseInt(r.element.css("left"),10)};s&&s.length&&e(s[0]).css({width:n.width,height:n.height}),r._updateCache(n),r._propagate("resize",t)}})}}),e.ui.plugin.add("resizable","containment",{start:function(t,r){var i=e(this).data("resizable"),s=i.options,o=i.element,u=s.containment,a=u instanceof e?u.get(0):/parent/.test(u)?o.parent().get(0):u;if(!a)return;i.containerElement=e(a);if(/document/.test(u)||u==document)i.containerOffset={left:0,top:0},i.containerPosition={left:0,top:0},i.parentData={element:e(document),left:0,top:0,width:e(document).width(),height:e(document).height()||document.body.parentNode.scrollHeight};else{var f=e(a),l=[];e(["Top","Right","Left","Bottom"]).each(function(e,t){l[e]=n(f.css("padding"+t))}),i.containerOffset=f.offset(),i.containerPosition=f.position(),i.containerSize={height:f.innerHeight()-l[3],width:f.innerWidth()-l[1]};var c=i.containerOffset,h=i.containerSize.height,p=i.containerSize.width,d=e.ui.hasScroll(a,"left")?a.scrollWidth:p,v=e.ui.hasScroll(a)?a.scrollHeight:h;i.parentData={element:a,left:c.left,top:c.top,width:d,height:v}}},resize:function(t,n){var r=e(this).data("resizable"),i=r.options,s=r.containerSize,o=r.containerOffset,u=r.size,a=r.position,f=r._aspectRatio||t.shiftKey,l={top:0,left:0},c=r.containerElement;c[0]!=document&&/static/.test(c.css("position"))&&(l=o),a.left<(r._helper?o.left:0)&&(r.size.width=r.size.width+(r._helper?r.position.left-o.left:r.position.left-l.left),f&&(r.size.height=r.size.width/r.aspectRatio),r.position.left=i.helper?o.left:0),a.top<(r._helper?o.top:0)&&(r.size.height=r.size.height+(r._helper?r.position.top-o.top:r.position.top),f&&(r.size.width=r.size.height*r.aspectRatio),r.position.top=r._helper?o.top:0),r.offset.left=r.parentData.left+r.position.left,r.offset.top=r.parentData.top+r.position.top;var h=Math.abs((r._helper?r.offset.left-l.left:r.offset.left-l.left)+r.sizeDiff.width),p=Math.abs((r._helper?r.offset.top-l.top:r.offset.top-o.top)+r.sizeDiff.height),d=r.containerElement.get(0)==r.element.parent().get(0),v=/relative|absolute/.test(r.containerElement.css("position"));d&&v&&(h-=r.parentData.left),h+r.size.width>=r.parentData.width&&(r.size.width=r.parentData.width-h,f&&(r.size.height=r.size.width/r.aspectRatio)),p+r.size.height>=r.parentData.height&&(r.size.height=r.parentData.height-p,f&&(r.size.width=r.size.height*r.aspectRatio))},stop:function(t,n){var r=e(this).data("resizable"),i=r.options,s=r.position,o=r.containerOffset,u=r.containerPosition,a=r.containerElement,f=e(r.helper),l=f.offset(),c=f.outerWidth()-r.sizeDiff.width,h=f.outerHeight()-r.sizeDiff.height;r._helper&&!i.animate&&/relative/.test(a.css("position"))&&e(this).css({left:l.left-u.left-o.left,width:c,height:h}),r._helper&&!i.animate&&/static/.test(a.css("position"))&&e(this).css({left:l.left-u.left-o.left,width:c,height:h})}}),e.ui.plugin.add("resizable","ghost",{start:function(t,n){var r=e(this).data("resizable"),i=r.options,s=r.size;r.ghost=r.originalElement.clone(),r.ghost.css({opacity:.25,display:"block",position:"relative",height:s.height,width:s.width,margin:0,left:0,top:0}).addClass("ui-resizable-ghost").addClass(typeof i.ghost=="string"?i.ghost:""),r.ghost.appendTo(r.helper)},resize:function(t,n){var r=e(this).data("resizable"),i=r.options;r.ghost&&r.ghost.css({position:"relative",height:r.size.height,width:r.size.width})},stop:function(t,n){var r=e(this).data("resizable"),i=r.options;r.ghost&&r.helper&&r.helper.get(0).removeChild(r.ghost.get(0))}}),e.ui.plugin.add("resizable","grid",{resize:function(t,n){var r=e(this).data("resizable"),i=r.options,s=r.size,o=r.originalSize,u=r.originalPosition,a=r.axis,f=i._aspectRatio||t.shiftKey;i.grid=typeof i.grid=="number"?[i.grid,i.grid]:i.grid;var l=Math.round((s.width-o.width)/(i.grid[0]||1))*(i.grid[0]||1),c=Math.round((s.height-o.height)/(i.grid[1]||1))*(i.grid[1]||1);/^(se|s|e)$/.test(a)?(r.size.width=o.width+l,r.size.height=o.height+c):/^(ne)$/.test(a)?(r.size.width=o.width+l,r.size.height=o.height+c,r.position.top=u.top-c):/^(sw)$/.test(a)?(r.size.width=o.width+l,r.size.height=o.height+c,r.position.left=u.left-l):(r.size.width=o.width+l,r.size.height=o.height+c,r.position.top=u.top-c,r.position.left=u.left-l)}});var n=function(e){return parseInt(e,10)||0},r=function(e){return!isNaN(parseInt(e,10))}})(jQuery);(function(e,t){e.widget("ui.selectable",e.ui.mouse,{version:"1.9.0",options:{appendTo:"body",autoRefresh:!0,distance:0,filter:"*",tolerance:"touch"},_create:function(){var t=this;this.element.addClass("ui-selectable"),this.dragged=!1;var n;this.refresh=function(){n=e(t.options.filter,t.element[0]),n.addClass("ui-selectee"),n.each(function(){var t=e(this),n=t.offset();e.data(this,"selectable-item",{element:this,$element:t,left:n.left,top:n.top,right:n.left+t.outerWidth(),bottom:n.top+t.outerHeight(),startselected:!1,selected:t.hasClass("ui-selected"),selecting:t.hasClass("ui-selecting"),unselecting:t.hasClass("ui-unselecting")})})},this.refresh(),this.selectees=n.addClass("ui-selectee"),this._mouseInit(),this.helper=e("
    ")},_destroy:function(){this.selectees.removeClass("ui-selectee").removeData("selectable-item"),this.element.removeClass("ui-selectable ui-selectable-disabled"),this._mouseDestroy()},_mouseStart:function(t){var n=this;this.opos=[t.pageX,t.pageY];if(this.options.disabled)return;var r=this.options;this.selectees=e(r.filter,this.element[0]),this._trigger("start",t),e(r.appendTo).append(this.helper),this.helper.css({left:t.clientX,top:t.clientY,width:0,height:0}),r.autoRefresh&&this.refresh(),this.selectees.filter(".ui-selected").each(function(){var r=e.data(this,"selectable-item");r.startselected=!0,!t.metaKey&&!t.ctrlKey&&(r.$element.removeClass("ui-selected"),r.selected=!1,r.$element.addClass("ui-unselecting"),r.unselecting=!0,n._trigger("unselecting",t,{unselecting:r.element}))}),e(t.target).parents().andSelf().each(function(){var r=e.data(this,"selectable-item");if(r){var i=!t.metaKey&&!t.ctrlKey||!r.$element.hasClass("ui-selected");return r.$element.removeClass(i?"ui-unselecting":"ui-selected").addClass(i?"ui-selecting":"ui-unselecting"),r.unselecting=!i,r.selecting=i,r.selected=i,i?n._trigger("selecting",t,{selecting:r.element}):n._trigger("unselecting",t,{unselecting:r.element}),!1}})},_mouseDrag:function(t){var n=this;this.dragged=!0;if(this.options.disabled)return;var r=this.options,i=this.opos[0],s=this.opos[1],o=t.pageX,u=t.pageY;if(i>o){var a=o;o=i,i=a}if(s>u){var a=u;u=s,s=a}return this.helper.css({left:i,top:s,width:o-i,height:u-s}),this.selectees.each(function(){var a=e.data(this,"selectable-item");if(!a||a.element==n.element[0])return;var f=!1;r.tolerance=="touch"?f=!(a.left>o||a.rightu||a.bottomi&&a.rights&&a.bottom").appendTo(this.element).addClass("ui-slider-range ui-widget-header"+(r.range==="min"||r.range==="max"?" ui-slider-range-"+r.range:"")));for(t=i.length;tn&&(i=n,s=e(this),o=t)}),c.range===!0&&this.values(1)===c.min&&(o+=1,s=e(this.handles[o])),u=this._start(t,o),u===!1?!1:(this._mouseSliding=!0,this._handleIndex=o,s.addClass("ui-state-active").focus(),a=s.offset(),f=!e(t.target).parents().andSelf().is(".ui-slider-handle"),this._clickOffset=f?{left:0,top:0}:{left:t.pageX-a.left-s.width()/2,top:t.pageY-a.top-s.height()/2-(parseInt(s.css("borderTopWidth"),10)||0)-(parseInt(s.css("borderBottomWidth"),10)||0)+(parseInt(s.css("marginTop"),10)||0)},this.handles.hasClass("ui-state-hover")||this._slide(t,o,r),this._animateOff=!0,!0))},_mouseStart:function(e){return!0},_mouseDrag:function(e){var t={x:e.pageX,y:e.pageY},n=this._normValueFromMouse(t);return this._slide(e,this._handleIndex,n),!1},_mouseStop:function(e){return this.handles.removeClass("ui-state-active"),this._mouseSliding=!1,this._stop(e,this._handleIndex),this._change(e,this._handleIndex),this._handleIndex=null,this._clickOffset=null,this._animateOff=!1,!1},_detectOrientation:function(){this.orientation=this.options.orientation==="vertical"?"vertical":"horizontal"},_normValueFromMouse:function(e){var t,n,r,i,s;return this.orientation==="horizontal"?(t=this.elementSize.width,n=e.x-this.elementOffset.left-(this._clickOffset?this._clickOffset.left:0)):(t=this.elementSize.height,n=e.y-this.elementOffset.top-(this._clickOffset?this._clickOffset.top:0)),r=n/t,r>1&&(r=1),r<0&&(r=0),this.orientation==="vertical"&&(r=1-r),i=this._valueMax()-this._valueMin(),s=this._valueMin()+r*i,this._trimAlignValue(s)},_start:function(e,t){var n={handle:this.handles[t],value:this.value()};return this.options.values&&this.options.values.length&&(n.value=this.values(t),n.values=this.values()),this._trigger("start",e,n)},_slide:function(e,t,n){var r,i,s;this.options.values&&this.options.values.length?(r=this.values(t?0:1),this.options.values.length===2&&this.options.range===!0&&(t===0&&n>r||t===1&&n1){this.options.values[t]=this._trimAlignValue(n),this._refreshValue(),this._change(null,t);return}if(!arguments.length)return this._values();if(!e.isArray(arguments[0]))return this.options.values&&this.options.values.length?this._values(t):this.value();r=this.options.values,i=arguments[0];for(s=0;s=this._valueMax())return this._valueMax();var t=this.options.step>0?this.options.step:1,n=(e-this._valueMin())%t,r=e-n;return Math.abs(n)*2>=t&&(r+=n>0?t:-t),parseFloat(r.toFixed(5))},_valueMin:function(){return this.options.min},_valueMax:function(){return this.options.max},_refreshValue:function(){var t,n,r,i,s,o=this.options.range,u=this.options,a=this,f=this._animateOff?!1:u.animate,l={};this.options.values&&this.options.values.length?this.handles.each(function(r,i){n=(a.values(r)-a._valueMin())/(a._valueMax()-a._valueMin())*100,l[a.orientation==="horizontal"?"left":"bottom"]=n+"%",e(this).stop(1,1)[f?"animate":"css"](l,u.animate),a.options.range===!0&&(a.orientation==="horizontal"?(r===0&&a.range.stop(1,1)[f?"animate":"css"]({left:n+"%"},u.animate),r===1&&a.range[f?"animate":"css"]({width:n-t+"%"},{queue:!1,duration:u.animate})):(r===0&&a.range.stop(1,1)[f?"animate":"css"]({bottom:n+"%"},u.animate),r===1&&a.range[f?"animate":"css"]({height:n-t+"%"},{queue:!1,duration:u.animate}))),t=n}):(r=this.value(),i=this._valueMin(),s=this._valueMax(),n=s!==i?(r-i)/(s-i)*100:0,l[this.orientation==="horizontal"?"left":"bottom"]=n+"%",this.handle.stop(1,1)[f?"animate":"css"](l,u.animate),o==="min"&&this.orientation==="horizontal"&&this.range.stop(1,1)[f?"animate":"css"]({width:n+"%"},u.animate),o==="max"&&this.orientation==="horizontal"&&this.range[f?"animate":"css"]({width:100-n+"%"},{queue:!1,duration:u.animate}),o==="min"&&this.orientation==="vertical"&&this.range.stop(1,1)[f?"animate":"css"]({height:n+"%"},u.animate),o==="max"&&this.orientation==="vertical"&&this.range[f?"animate":"css"]({height:100-n+"%"},{queue:!1,duration:u.animate}))}})})(jQuery);(function(e,t){e.widget("ui.sortable",e.ui.mouse,{version:"1.9.0",widgetEventPrefix:"sort",ready:!1,options:{appendTo:"parent",axis:!1,connectWith:!1,containment:!1,cursor:"auto",cursorAt:!1,dropOnEmpty:!0,forcePlaceholderSize:!1,forceHelperSize:!1,grid:!1,handle:!1,helper:"original",items:"> *",opacity:!1,placeholder:!1,revert:!1,scroll:!0,scrollSensitivity:20,scrollSpeed:20,scope:"default",tolerance:"intersect",zIndex:1e3},_create:function(){var e=this.options;this.containerCache={},this.element.addClass("ui-sortable"),this.refresh(),this.floating=this.items.length?e.axis==="x"||/left|right/.test(this.items[0].item.css("float"))||/inline|table-cell/.test(this.items[0].item.css("display")):!1,this.offset=this.element.offset(),this._mouseInit(),this.ready=!0},_destroy:function(){this.element.removeClass("ui-sortable ui-sortable-disabled"),this._mouseDestroy();for(var e=this.items.length-1;e>=0;e--)this.items[e].item.removeData(this.widgetName+"-item");return this},_setOption:function(t,n){t==="disabled"?(this.options[t]=n,this.widget().toggleClass("ui-sortable-disabled",!!n)):e.Widget.prototype._setOption.apply(this,arguments)},_mouseCapture:function(t,n){var r=this;if(this.reverting)return!1;if(this.options.disabled||this.options.type=="static")return!1;this._refreshItems(t);var i=null,s=e(t.target).parents().each(function(){if(e.data(this,r.widgetName+"-item")==r)return i=e(this),!1});e.data(t.target,r.widgetName+"-item")==r&&(i=e(t.target));if(!i)return!1;if(this.options.handle&&!n){var o=!1;e(this.options.handle,i).find("*").andSelf().each(function(){this==t.target&&(o=!0)});if(!o)return!1}return this.currentItem=i,this._removeCurrentsFromItems(),!0},_mouseStart:function(t,n,r){var i=this.options;this.currentContainer=this,this.refreshPositions(),this.helper=this._createHelper(t),this._cacheHelperProportions(),this._cacheMargins(),this.scrollParent=this.helper.scrollParent(),this.offset=this.currentItem.offset(),this.offset={top:this.offset.top-this.margins.top,left:this.offset.left-this.margins.left},e.extend(this.offset,{click:{left:t.pageX-this.offset.left,top:t.pageY-this.offset.top},parent:this._getParentOffset(),relative:this._getRelativeOffset()}),this.helper.css("position","absolute"),this.cssPosition=this.helper.css("position"),this.originalPosition=this._generatePosition(t),this.originalPageX=t.pageX,this.originalPageY=t.pageY,i.cursorAt&&this._adjustOffsetFromHelper(i.cursorAt),this.domPosition={prev:this.currentItem.prev()[0],parent:this.currentItem.parent()[0]},this.helper[0]!=this.currentItem[0]&&this.currentItem.hide(),this._createPlaceholder(),i.containment&&this._setContainment(),i.cursor&&(e("body").css("cursor")&&(this._storedCursor=e("body").css("cursor")),e("body").css("cursor",i.cursor)),i.opacity&&(this.helper.css("opacity")&&(this._storedOpacity=this.helper.css("opacity")),this.helper.css("opacity",i.opacity)),i.zIndex&&(this.helper.css("zIndex")&&(this._storedZIndex=this.helper.css("zIndex")),this.helper.css("zIndex",i.zIndex)),this.scrollParent[0]!=document&&this.scrollParent[0].tagName!="HTML"&&(this.overflowOffset=this.scrollParent.offset()),this._trigger("start",t,this._uiHash()),this._preserveHelperProportions||this._cacheHelperProportions();if(!r)for(var s=this.containers.length-1;s>=0;s--)this.containers[s]._trigger("activate",t,this._uiHash(this));return e.ui.ddmanager&&(e.ui.ddmanager.current=this),e.ui.ddmanager&&!i.dropBehaviour&&e.ui.ddmanager.prepareOffsets(this,t),this.dragging=!0,this.helper.addClass("ui-sortable-helper"),this._mouseDrag(t),!0},_mouseDrag:function(t){this.position=this._generatePosition(t),this.positionAbs=this._convertPositionTo("absolute"),this.lastPositionAbs||(this.lastPositionAbs=this.positionAbs);if(this.options.scroll){var n=this.options,r=!1;this.scrollParent[0]!=document&&this.scrollParent[0].tagName!="HTML"?(this.overflowOffset.top+this.scrollParent[0].offsetHeight-t.pageY=0;i--){var s=this.items[i],o=s.item[0],u=this._intersectsWithPointer(s);if(!u)continue;if(s.instance!==this.currentContainer)continue;if(o!=this.currentItem[0]&&this.placeholder[u==1?"next":"prev"]()[0]!=o&&!e.contains(this.placeholder[0],o)&&(this.options.type=="semi-dynamic"?!e.contains(this.element[0],o):!0)){this.direction=u==1?"down":"up";if(this.options.tolerance!="pointer"&&!this._intersectsWithSides(s))break;this._rearrange(t,s),this._trigger("change",t,this._uiHash());break}}return this._contactContainers(t),e.ui.ddmanager&&e.ui.ddmanager.drag(this,t),this._trigger("sort",t,this._uiHash()),this.lastPositionAbs=this.positionAbs,!1},_mouseStop:function(t,n){if(!t)return;e.ui.ddmanager&&!this.options.dropBehaviour&&e.ui.ddmanager.drop(this,t);if(this.options.revert){var r=this,i=this.placeholder.offset();this.reverting=!0,e(this.helper).animate({left:i.left-this.offset.parent.left-this.margins.left+(this.offsetParent[0]==document.body?0:this.offsetParent[0].scrollLeft),top:i.top-this.offset.parent.top-this.margins.top+(this.offsetParent[0]==document.body?0:this.offsetParent[0].scrollTop)},parseInt(this.options.revert,10)||500,function(){r._clear(t)})}else this._clear(t,n);return!1},cancel:function(){if(this.dragging){this._mouseUp({target:null}),this.options.helper=="original"?this.currentItem.css(this._storedCSS).removeClass("ui-sortable-helper"):this.currentItem.show();for(var t=this.containers.length-1;t>=0;t--)this.containers[t]._trigger("deactivate",null,this._uiHash(this)),this.containers[t].containerCache.over&&(this.containers[t]._trigger("out",null,this._uiHash(this)),this.containers[t].containerCache.over=0)}return this.placeholder&&(this.placeholder[0].parentNode&&this.placeholder[0].parentNode.removeChild(this.placeholder[0]),this.options.helper!="original"&&this.helper&&this.helper[0].parentNode&&this.helper.remove(),e.extend(this,{helper:null,dragging:!1,reverting:!1,_noFinalSort:null}),this.domPosition.prev?e(this.domPosition.prev).after(this.currentItem):e(this.domPosition.parent).prepend(this.currentItem)),this},serialize:function(t){var n=this._getItemsAsjQuery(t&&t.connected),r=[];return t=t||{},e(n).each(function(){var n=(e(t.item||this).attr(t.attribute||"id")||"").match(t.expression||/(.+)[-=_](.+)/);n&&r.push((t.key||n[1]+"[]")+"="+(t.key&&t.expression?n[1]:n[2]))}),!r.length&&t.key&&r.push(t.key+"="),r.join("&")},toArray:function(t){var n=this._getItemsAsjQuery(t&&t.connected),r=[];return t=t||{},n.each(function(){r.push(e(t.item||this).attr(t.attribute||"id")||"")}),r},_intersectsWith:function(e){var t=this.positionAbs.left,n=t+this.helperProportions.width,r=this.positionAbs.top,i=r+this.helperProportions.height,s=e.left,o=s+e.width,u=e.top,a=u+e.height,f=this.offset.click.top,l=this.offset.click.left,c=r+f>u&&r+fs&&t+le[this.floating?"width":"height"]?c:s0?"down":"up")},_getDragHorizontalDirection:function(){var e=this.positionAbs.left-this.lastPositionAbs.left;return e!=0&&(e>0?"right":"left")},refresh:function(e){return this._refreshItems(e),this.refreshPositions(),this},_connectWith:function(){var e=this.options;return e.connectWith.constructor==String?[e.connectWith]:e.connectWith},_getItemsAsjQuery:function(t){var n=[],r=[],i=this._connectWith();if(i&&t)for(var s=i.length-1;s>=0;s--){var o=e(i[s]);for(var u=o.length-1;u>=0;u--){var a=e.data(o[u],this.widgetName);a&&a!=this&&!a.options.disabled&&r.push([e.isFunction(a.options.items)?a.options.items.call(a.element):e(a.options.items,a.element).not(".ui-sortable-helper").not(".ui-sortable-placeholder"),a])}}r.push([e.isFunction(this.options.items)?this.options.items.call(this.element,null,{options:this.options,item:this.currentItem}):e(this.options.items,this.element).not(".ui-sortable-helper").not(".ui-sortable-placeholder"),this]);for(var s=r.length-1;s>=0;s--)r[s][0].each(function(){n.push(this)});return e(n)},_removeCurrentsFromItems:function(){var e=this.currentItem.find(":data("+this.widgetName+"-item)");for(var t=0;t=0;s--){var o=e(i[s]);for(var u=o.length-1;u>=0;u--){var a=e.data(o[u],this.widgetName);a&&a!=this&&!a.options.disabled&&(r.push([e.isFunction(a.options.items)?a.options.items.call(a.element[0],t,{item:this.currentItem}):e(a.options.items,a.element),a]),this.containers.push(a))}}for(var s=r.length-1;s>=0;s--){var f=r[s][1],l=r[s][0];for(var u=0,c=l.length;u=0;n--){var r=this.items[n];if(r.instance!=this.currentContainer&&this.currentContainer&&r.item[0]!=this.currentItem[0])continue;var i=this.options.toleranceElement?e(this.options.toleranceElement,r.item):r.item;t||(r.width=i.outerWidth(),r.height=i.outerHeight());var s=i.offset();r.left=s.left,r.top=s.top}if(this.options.custom&&this.options.custom.refreshContainers)this.options.custom.refreshContainers.call(this);else for(var n=this.containers.length-1;n>=0;n--){var s=this.containers[n].element.offset();this.containers[n].containerCache.left=s.left,this.containers[n].containerCache.top=s.top,this.containers[n].containerCache.width=this.containers[n].element.outerWidth(),this.containers[n].containerCache.height=this.containers[n].element.outerHeight()}return this},_createPlaceholder:function(t){t=t||this;var n=t.options;if(!n.placeholder||n.placeholder.constructor==String){var r=n.placeholder;n.placeholder={element:function(){var n=e(document.createElement(t.currentItem[0].nodeName)).addClass(r||t.currentItem[0].className+" ui-sortable-placeholder").removeClass("ui-sortable-helper")[0];return r||(n.style.visibility="hidden"),n},update:function(e,i){if(r&&!n.forcePlaceholderSize)return;i.height()||i.height(t.currentItem.innerHeight()-parseInt(t.currentItem.css("paddingTop")||0,10)-parseInt(t.currentItem.css("paddingBottom")||0,10)),i.width()||i.width(t.currentItem.innerWidth()-parseInt(t.currentItem.css("paddingLeft")||0,10)-parseInt(t.currentItem.css("paddingRight")||0,10))}}}t.placeholder=e(n.placeholder.element.call(t.element,t.currentItem)),t.currentItem.after(t.placeholder),n.placeholder.update(t,t.placeholder)},_contactContainers:function(t){var n=null,r=null;for(var i=this.containers.length-1;i>=0;i--){if(e.contains(this.currentItem[0],this.containers[i].element[0]))continue;if(this._intersectsWith(this.containers[i].containerCache)){if(n&&e.contains(this.containers[i].element[0],n.element[0]))continue;n=this.containers[i],r=i}else this.containers[i].containerCache.over&&(this.containers[i]._trigger("out",t,this._uiHash(this)),this.containers[i].containerCache.over=0)}if(!n)return;if(this.containers.length===1)this.containers[r]._trigger("over",t,this._uiHash(this)),this.containers[r].containerCache.over=1;else if(this.currentContainer!=this.containers[r]){var s=1e4,o=null,u=this.positionAbs[this.containers[r].floating?"left":"top"];for(var a=this.items.length-1;a>=0;a--){if(!e.contains(this.containers[r].element[0],this.items[a].item[0]))continue;var f=this.containers[r].floating?this.items[a].item.offset().left:this.items[a].item.offset().top;Math.abs(f-u)0?"down":"up")}if(!o&&!this.options.dropOnEmpty)return;this.currentContainer=this.containers[r],o?this._rearrange(t,o,null,!0):this._rearrange(t,null,this.containers[r].element,!0),this._trigger("change",t,this._uiHash()),this.containers[r]._trigger("change",t,this._uiHash(this)),this.options.placeholder.update(this.currentContainer,this.placeholder),this.containers[r]._trigger("over",t,this._uiHash(this)),this.containers[r].containerCache.over=1}},_createHelper:function(t){var n=this.options,r=e.isFunction(n.helper)?e(n.helper.apply(this.element[0],[t,this.currentItem])):n.helper=="clone"?this.currentItem.clone():this.currentItem;return r.parents("body").length||e(n.appendTo!="parent"?n.appendTo:this.currentItem[0].parentNode)[0].appendChild(r[0]),r[0]==this.currentItem[0]&&(this._storedCSS={width:this.currentItem[0].style.width,height:this.currentItem[0].style.height,position:this.currentItem.css("position"),top:this.currentItem.css("top"),left:this.currentItem.css("left")}),(r[0].style.width==""||n.forceHelperSize)&&r.width(this.currentItem.width()),(r[0].style.height==""||n.forceHelperSize)&&r.height(this.currentItem.height()),r},_adjustOffsetFromHelper:function(t){typeof t=="string"&&(t=t.split(" ")),e.isArray(t)&&(t={left:+t[0],top:+t[1]||0}),"left"in t&&(this.offset.click.left=t.left+this.margins.left),"right"in t&&(this.offset.click.left=this.helperProportions.width-t.right+this.margins.left),"top"in t&&(this.offset.click.top=t.top+this.margins.top),"bottom"in t&&(this.offset.click.top=this.helperProportions.height-t.bottom+this.margins.top)},_getParentOffset:function(){this.offsetParent=this.helper.offsetParent();var t=this.offsetParent.offset();this.cssPosition=="absolute"&&this.scrollParent[0]!=document&&e.contains(this.scrollParent[0],this.offsetParent[0])&&(t.left+=this.scrollParent.scrollLeft(),t.top+=this.scrollParent.scrollTop());if(this.offsetParent[0]==document.body||this.offsetParent[0].tagName&&this.offsetParent[0].tagName.toLowerCase()=="html"&&e.browser.msie)t={top:0,left:0};return{top:t.top+(parseInt(this.offsetParent.css("borderTopWidth"),10)||0),left:t.left+(parseInt(this.offsetParent.css("borderLeftWidth"),10)||0)}},_getRelativeOffset:function(){if(this.cssPosition=="relative"){var e=this.currentItem.position();return{top:e.top-(parseInt(this.helper.css("top"),10)||0)+this.scrollParent.scrollTop(),left:e.left-(parseInt(this.helper.css("left"),10)||0)+this.scrollParent.scrollLeft()}}return{top:0,left:0}},_cacheMargins:function(){this.margins={left:parseInt(this.currentItem.css("marginLeft"),10)||0,top:parseInt(this.currentItem.css("marginTop"),10)||0}},_cacheHelperProportions:function(){this.helperProportions={width:this.helper.outerWidth(),height:this.helper.outerHeight()}},_setContainment:function(){var t=this.options;t.containment=="parent"&&(t.containment=this.helper[0].parentNode);if(t.containment=="document"||t.containment=="window")this.containment=[0-this.offset.relative.left-this.offset.parent.left,0-this.offset.relative.top-this.offset.parent.top,e(t.containment=="document"?document:window).width()-this.helperProportions.width-this.margins.left,(e(t.containment=="document"?document:window).height()||document.body.parentNode.scrollHeight)-this.helperProportions.height-this.margins.top];if(!/^(document|window|parent)$/.test(t.containment)){var n=e(t.containment)[0],r=e(t.containment).offset(),i=e(n).css("overflow")!="hidden";this.containment=[r.left+(parseInt(e(n).css("borderLeftWidth"),10)||0)+(parseInt(e(n).css("paddingLeft"),10)||0)-this.margins.left,r.top+(parseInt(e(n).css("borderTopWidth"),10)||0)+(parseInt(e(n).css("paddingTop"),10)||0)-this.margins.top,r.left+(i?Math.max(n.scrollWidth,n.offsetWidth):n.offsetWidth)-(parseInt(e(n).css("borderLeftWidth"),10)||0)-(parseInt(e(n).css("paddingRight"),10)||0)-this.helperProportions.width-this.margins.left,r.top+(i?Math.max(n.scrollHeight,n.offsetHeight):n.offsetHeight)-(parseInt(e(n).css("borderTopWidth"),10)||0)-(parseInt(e(n).css("paddingBottom"),10)||0)-this.helperProportions.height-this.margins.top]}},_convertPositionTo:function(t,n){n||(n=this.position);var r=t=="absolute"?1:-1,i=this.options,s=this.cssPosition!="absolute"||this.scrollParent[0]!=document&&!!e.contains(this.scrollParent[0],this.offsetParent[0])?this.scrollParent:this.offsetParent,o=/(html|body)/i.test(s[0].tagName);return{top:n.top+this.offset.relative.top*r+this.offset.parent.top*r-(this.cssPosition=="fixed"?-this.scrollParent.scrollTop():o?0:s.scrollTop())*r,left:n.left+this.offset.relative.left*r+this.offset.parent.left*r-(this.cssPosition=="fixed"?-this.scrollParent.scrollLeft():o?0:s.scrollLeft())*r}},_generatePosition:function(t){var n=this.options,r=this.cssPosition!="absolute"||this.scrollParent[0]!=document&&!!e.contains(this.scrollParent[0],this.offsetParent[0])?this.scrollParent:this.offsetParent,i=/(html|body)/i.test(r[0].tagName);this.cssPosition=="relative"&&(this.scrollParent[0]==document||this.scrollParent[0]==this.offsetParent[0])&&(this.offset.relative=this._getRelativeOffset());var s=t.pageX,o=t.pageY;if(this.originalPosition){this.containment&&(t.pageX-this.offset.click.leftthis.containment[2]&&(s=this.containment[2]+this.offset.click.left),t.pageY-this.offset.click.top>this.containment[3]&&(o=this.containment[3]+this.offset.click.top));if(n.grid){var u=this.originalPageY+Math.round((o-this.originalPageY)/n.grid[1])*n.grid[1];o=this.containment?u-this.offset.click.topthis.containment[3]?u-this.offset.click.topthis.containment[2]?a-this.offset.click.left=0;i--)n||r.push(function(e){return function(t){e._trigger("deactivate",t,this._uiHash(this))}}.call(this,this.containers[i])),this.containers[i].containerCache.over&&(r.push(function(e){return function(t){e._trigger("out",t,this._uiHash(this))}}.call(this,this.containers[i])),this.containers[i].containerCache.over=0);this._storedCursor&&e("body").css("cursor",this._storedCursor),this._storedOpacity&&this.helper.css("opacity",this._storedOpacity),this._storedZIndex&&this.helper.css("zIndex",this._storedZIndex=="auto"?"":this._storedZIndex),this.dragging=!1;if(this.cancelHelperRemoval){if(!n){this._trigger("beforeStop",t,this._uiHash());for(var i=0;i",widgetEventPrefix:"spin",options:{culture:null,icons:{down:"ui-icon-triangle-1-s",up:"ui-icon-triangle-1-n"},incremental:!0,max:null,min:null,numberFormat:null,page:10,step:1,change:null,spin:null,start:null,stop:null},_create:function(){this._setOption("max",this.options.max),this._setOption("min",this.options.min),this._setOption("step",this.options.step),this._value(this.element.val(),!0),this._draw(),this._on(this._events),this._refresh(),this._on(this.window,{beforeunload:function(){this.element.removeAttr("autocomplete")}})},_getCreateOptions:function(){var t={},n=this.element;return e.each(["min","max","step"],function(e,r){var i=n.attr(r);i!==undefined&&i.length&&(t[r]=i)}),t},_events:{keydown:function(e){this._start(e)&&this._keydown(e)&&e.preventDefault()},keyup:"_stop",focus:function(){this.uiSpinner.addClass("ui-state-active"),this.previous=this.element.val()},blur:function(e){if(this.cancelBlur){delete this.cancelBlur;return}this._refresh(),this.uiSpinner.removeClass("ui-state-active"),this.previous!==this.element.val()&&this._trigger("change",e)},mousewheel:function(e,t){if(!t)return;if(!this.spinning&&!this._start(e))return!1;this._spin((t>0?1:-1)*this.options.step,e),clearTimeout(this.mousewheelTimer),this.mousewheelTimer=this._delay(function(){this.spinning&&this._stop(e)},100),e.preventDefault()},"mousedown .ui-spinner-button":function(t){function r(){var e=this.element[0]===this.document[0].activeElement;e||(this.element.focus(),this.previous=n,this._delay(function(){this.previous=n}))}var n;n=this.element[0]===this.document[0].activeElement?this.previous:this.element.val(),t.preventDefault(),r.call(this),this.cancelBlur=!0,this._delay(function(){delete this.cancelBlur,r.call(this)});if(this._start(t)===!1)return;this._repeat(null,e(t.currentTarget).hasClass("ui-spinner-up")?1:-1,t)},"mouseup .ui-spinner-button":"_stop","mouseenter .ui-spinner-button":function(t){if(!e(t.currentTarget).hasClass("ui-state-active"))return;if(this._start(t)===!1)return!1;this._repeat(null,e(t.currentTarget).hasClass("ui-spinner-up")?1:-1,t)},"mouseleave .ui-spinner-button":"_stop"},_draw:function(){var e=this.uiSpinner=this.element.addClass("ui-spinner-input").attr("autocomplete","off").wrap(this._uiSpinnerHtml()).parent().append(this._buttonHtml());this._hoverable(e),this.element.attr("role","spinbutton"),this.buttons=e.find(".ui-spinner-button").attr("tabIndex",-1).button().removeClass("ui-corner-all"),this.buttons.height()>Math.ceil(e.height()*.5)&&e.height()>0&&e.height(e.height()),this.options.disabled&&this.disable()},_keydown:function(t){var n=this.options,r=e.ui.keyCode;switch(t.keyCode){case r.UP:return this._repeat(null,1,t),!0;case r.DOWN:return this._repeat(null,-1,t),!0;case r.PAGE_UP:return this._repeat(null,n.page,t),!0;case r.PAGE_DOWN:return this._repeat(null,-n.page,t),!0}return!1},_uiSpinnerHtml:function(){return""},_buttonHtml:function(){return""+""+""+""+""},_start:function(e){return!this.spinning&&this._trigger("start",e)===!1?!1:(this.counter||(this.counter=1),this.spinning=!0,!0)},_repeat:function(e,t,n){e=e||500,clearTimeout(this.timer),this.timer=this._delay(function(){this._repeat(40,t,n)},e),this._spin(t*this.options.step,n)},_spin:function(e,t){var n=this.value()||0;this.counter||(this.counter=1),n=this._adjustValue(n+e*this._increment(this.counter));if(!this.spinning||this._trigger("spin",t,{value:n})!==!1)this._value(n),this.counter++},_increment:function(t){var n=this.options.incremental;return n?e.isFunction(n)?n(t):Math.floor(t*t*t/5e4-t*t/500+17*t/200+1):1},_precision:function(){var e=this._precisionOf(this.options.step);return this.options.min!==null&&(e=Math.max(e,this._precisionOf(this.options.min))),e},_precisionOf:function(e){var t=e.toString(),n=t.indexOf(".");return n===-1?0:t.length-n-1},_adjustValue:function(e){var t,n,r=this.options;return t=r.min!==null?r.min:0,n=e-t,n=Math.round(n/r.step)*r.step,e=t+n,e=parseFloat(e.toFixed(this._precision())),r.max!==null&&e>r.max?r.max:r.min!==null&&e1&&e.href.replace(r,"")===location.href.replace(r,"")}var n=0,r=/#.*$/;e.widget("ui.tabs",{version:"1.9.0",delay:300,options:{active:null,collapsible:!1,event:"click",heightStyle:"content",hide:null,show:null,activate:null,beforeActivate:null,beforeLoad:null,load:null},_create:function(){var t,n=this,r=this.options,i=r.active;this.running=!1,this.element.addClass("ui-tabs ui-widget ui-widget-content ui-corner-all").toggleClass("ui-tabs-collapsible",r.collapsible).delegate(".ui-tabs-nav > li","mousedown"+this.eventNamespace,function(t){e(this).is(".ui-state-disabled")&&t.preventDefault()}).delegate(".ui-tabs-anchor","focus"+this.eventNamespace,function(){e(this).closest("li").is(".ui-state-disabled")&&this.blur()}),this._processTabs();if(i===null){location.hash&&this.anchors.each(function(e,t){if(t.hash===location.hash)return i=e,!1}),i===null&&(i=this.tabs.filter(".ui-tabs-active").index());if(i===null||i===-1)i=this.tabs.length?0:!1}i!==!1&&(i=this.tabs.index(this.tabs.eq(i)),i===-1&&(i=r.collapsible?!1:0)),r.active=i,!r.collapsible&&r.active===!1&&this.anchors.length&&(r.active=0),e.isArray(r.disabled)&&(r.disabled=e.unique(r.disabled.concat(e.map(this.tabs.filter(".ui-state-disabled"),function(e){return n.tabs.index(e)}))).sort()),this.options.active!==!1&&this.anchors.length?this.active=this._findActive(this.options.active):this.active=e(),this._refresh(),this.active.length&&this.load(r.active)},_getCreateEventData:function(){return{tab:this.active,panel:this.active.length?this._getPanelForTab(this.active):e()}},_tabKeydown:function(t){var n=e(this.document[0].activeElement).closest("li"),r=this.tabs.index(n),i=!0;if(this._handlePageNav(t))return;switch(t.keyCode){case e.ui.keyCode.RIGHT:case e.ui.keyCode.DOWN:r++;break;case e.ui.keyCode.UP:case e.ui.keyCode.LEFT:i=!1,r--;break;case e.ui.keyCode.END:r=this.anchors.length-1;break;case e.ui.keyCode.HOME:r=0;break;case e.ui.keyCode.SPACE:t.preventDefault(),clearTimeout(this.activating),this._activate(r);return;case e.ui.keyCode.ENTER:t.preventDefault(),clearTimeout(this.activating),this._activate(r===this.options.active?!1:r);return;default:return}t.preventDefault(),clearTimeout(this.activating),r=this._focusNextTab(r,i),t.ctrlKey||(n.attr("aria-selected","false"),this.tabs.eq(r).attr("aria-selected","true"),this.activating=this._delay(function(){this.option("active",r)},this.delay))},_panelKeydown:function(t){if(this._handlePageNav(t))return;t.ctrlKey&&t.keyCode===e.ui.keyCode.UP&&(t.preventDefault(),this.active.focus())},_handlePageNav:function(t){if(t.altKey&&t.keyCode===e.ui.keyCode.PAGE_UP)return this._activate(this._focusNextTab(this.options.active-1,!1)),!0;if(t.altKey&&t.keyCode===e.ui.keyCode.PAGE_DOWN)return this._activate(this._focusNextTab(this.options.active+1,!0)),!0},_findNextTab:function(t,n){function i(){return t>r&&(t=0),t<0&&(t=r),t}var r=this.tabs.length-1;while(e.inArray(i(),this.options.disabled)!==-1)t=n?t+1:t-1;return t},_focusNextTab:function(e,t){return e=this._findNextTab(e,t),this.tabs.eq(e).focus(),e},_setOption:function(e,t){if(e==="active"){this._activate(t);return}if(e==="disabled"){this._setupDisabled(t);return}this._super(e,t),e==="collapsible"&&(this.element.toggleClass("ui-tabs-collapsible",t),!t&&this.options.active===!1&&this._activate(0)),e==="event"&&this._setupEvents(t),e==="heightStyle"&&this._setupHeightStyle(t)},_tabId:function(e){return e.attr("aria-controls")||"ui-tabs-"+i()},_sanitizeSelector:function(e){return e?e.replace(/[!"$%&'()*+,.\/:;<=>?@\[\]\^`{|}~]/g,"\\$&"):""},refresh:function(){var t,n=this.options,r=this.tablist.children(":has(a[href])");n.disabled=e.map(r.filter(".ui-state-disabled"),function(e){return r.index(e)}),this._processTabs(),n.active===!1||!this.anchors.length?(n.active=!1,this.active=e()):this.active.length&&!e.contains(this.tablist[0],this.active[0])?this.tabs.length===n.disabled.length?(n.active=!1,this.active=e()):this._activate(this._findNextTab(Math.max(0,n.active-1),!1)):n.active=this.tabs.index(this.active),this._refresh()},_refresh:function(){this._setupDisabled(this.options.disabled),this._setupEvents(this.options.event),this._setupHeightStyle(this.options.heightStyle),this.tabs.not(this.active).attr({"aria-selected":"false",tabIndex:-1}),this.panels.not(this._getPanelForTab(this.active)).hide().attr({"aria-expanded":"false","aria-hidden":"true"}),this.active.length?(this.active.addClass("ui-tabs-active ui-state-active").attr({"aria-selected":"true",tabIndex:0}),this._getPanelForTab(this.active).show().attr({"aria-expanded":"true","aria-hidden":"false"})):this.tabs.eq(0).attr("tabIndex",0)},_processTabs:function(){var t=this;this.tablist=this._getList().addClass("ui-tabs-nav ui-helper-reset ui-helper-clearfix ui-widget-header ui-corner-all").attr("role","tablist"),this.tabs=this.tablist.find("> li:has(a[href])").addClass("ui-state-default ui-corner-top").attr({role:"tab",tabIndex:-1}),this.anchors=this.tabs.map(function(){return e("a",this)[0]}).addClass("ui-tabs-anchor").attr({role:"presentation",tabIndex:-1}),this.panels=e(),this.anchors.each(function(n,r){var i,o,u,a=e(r).uniqueId().attr("id"),f=e(r).closest("li"),l=f.attr("aria-controls");s(r)?(i=r.hash,o=t.element.find(t._sanitizeSelector(i))):(u=t._tabId(f),i="#"+u,o=t.element.find(i),o.length||(o=t._createPanel(u),o.insertAfter(t.panels[n-1]||t.tablist)),o.attr("aria-live","polite")),o.length&&(t.panels=t.panels.add(o)),l&&f.data("ui-tabs-aria-controls",l),f.attr({"aria-controls":i.substring(1),"aria-labelledby":a}),o.attr("aria-labelledby",a)}),this.panels.addClass("ui-tabs-panel ui-widget-content ui-corner-bottom").attr("role","tabpanel")},_getList:function(){return this.element.find("ol,ul").eq(0)},_createPanel:function(t){return e("
    ").attr("id",t).addClass("ui-tabs-panel ui-widget-content ui-corner-bottom").data("ui-tabs-destroy",!0)},_setupDisabled:function(t){e.isArray(t)&&(t.length?t.length===this.anchors.length&&(t=!0):t=!1);for(var n=0,r;r=this.tabs[n];n++)t===!0||e.inArray(n,t)!==-1?e(r).addClass("ui-state-disabled").attr("aria-disabled","true"):e(r).removeClass("ui-state-disabled").removeAttr("aria-disabled");this.options.disabled=t},_setupEvents:function(t){var n={click:function(e){e.preventDefault()}};t&&e.each(t.split(" "),function(e,t){n[t]="_eventHandler"}),this._off(this.anchors.add(this.tabs).add(this.panels)),this._on(this.anchors,n),this._on(this.tabs,{keydown:"_tabKeydown"}),this._on(this.panels,{keydown:"_panelKeydown"}),this._focusable(this.tabs),this._hoverable(this.tabs)},_setupHeightStyle:function(t){var n,r,i=this.element.parent();t==="fill"?(e.support.minHeight||(r=i.css("overflow"),i.css("overflow","hidden")),n=i.height(),this.element.siblings(":visible").each(function(){var t=e(this),r=t.css("position");if(r==="absolute"||r==="fixed")return;n-=t.outerHeight(!0)}),r&&i.css("overflow",r),this.element.children().not(this.panels).each(function(){n-=e(this).outerHeight(!0)}),this.panels.each(function(){e(this).height(Math.max(0,n-e(this).innerHeight()+e(this).height()))}).css("overflow","auto")):t==="auto"&&(n=0,this.panels.each(function(){n=Math.max(n,e(this).height("").height())}).height(n))},_eventHandler:function(t){var n=this.options,r=this.active,i=e(t.currentTarget),s=i.closest("li"),o=s[0]===r[0],u=o&&n.collapsible,a=u?e():this._getPanelForTab(s),f=r.length?this._getPanelForTab(r):e(),l={oldTab:r,oldPanel:f,newTab:u?e():s,newPanel:a};t.preventDefault();if(s.hasClass("ui-state-disabled")||s.hasClass("ui-tabs-loading")||this.running||o&&!n.collapsible||this._trigger("beforeActivate",t,l)===!1)return;n.active=u?!1:this.tabs.index(s),this.active=o?e():s,this.xhr&&this.xhr.abort(),!f.length&&!a.length&&e.error("jQuery UI Tabs: Mismatching fragment identifier."),a.length&&this.load(this.tabs.index(s),t),this._toggle(t,l)},_toggle:function(t,n){function o(){r.running=!1,r._trigger("activate",t,n)}function u(){n.newTab.closest("li").addClass("ui-tabs-active ui-state-active"),i.length&&r.options.show?r._show(i,r.options.show,o):(i.show(),o())}var r=this,i=n.newPanel,s=n.oldPanel;this.running=!0,s.length&&this.options.hide?this._hide(s,this.options.hide,function(){n.oldTab.closest("li").removeClass("ui-tabs-active ui-state-active"),u()}):(n.oldTab.closest("li").removeClass("ui-tabs-active ui-state-active"),s.hide(),u()),s.attr({"aria-expanded":"false","aria-hidden":"true"}),n.oldTab.attr("aria-selected","false"),i.length&&s.length?n.oldTab.attr("tabIndex",-1):i.length&&this.tabs.filter(function(){return e(this).attr("tabIndex")===0}).attr("tabIndex",-1),i.attr({"aria-expanded":"true","aria-hidden":"false"}),n.newTab.attr({"aria-selected":"true",tabIndex:0})},_activate:function(t){var n,r=this._findActive(t);if(r[0]===this.active[0])return;r.length||(r=this.active),n=r.find(".ui-tabs-anchor")[0],this._eventHandler({target:n,currentTarget:n,preventDefault:e.noop})},_findActive:function(t){return t===!1?e():this.tabs.eq(t)},_getIndex:function(e){return typeof e=="string"&&(e=this.anchors.index(this.anchors.filter("[href$='"+e+"']"))),e},_destroy:function(){this.xhr&&this.xhr.abort(),this.element.removeClass("ui-tabs ui-widget ui-widget-content ui-corner-all ui-tabs-collapsible"),this.tablist.removeClass("ui-tabs-nav ui-helper-reset ui-helper-clearfix ui-widget-header ui-corner-all").removeAttr("role"),this.anchors.removeClass("ui-tabs-anchor").removeAttr("role").removeAttr("tabIndex").removeData("href.tabs").removeData("load.tabs").removeUniqueId(),this.tabs.add(this.panels).each(function(){e.data(this,"ui-tabs-destroy")?e(this).remove():e(this).removeClass("ui-state-default ui-state-active ui-state-disabled ui-corner-top ui-corner-bottom ui-widget-content ui-tabs-active ui-tabs-panel").removeAttr("tabIndex").removeAttr("aria-live").removeAttr("aria-busy").removeAttr("aria-selected").removeAttr("aria-labelledby").removeAttr("aria-hidden").removeAttr("aria-expanded").removeAttr("role")}),this.tabs.each(function(){var t=e(this),n=t.data("ui-tabs-aria-controls");n?t.attr("aria-controls",n):t.removeAttr("aria-controls")}),this.options.heightStyle!=="content"&&this.panels.css("height","")},enable:function(n){var r=this.options.disabled;if(r===!1)return;n===t?r=!1:(n=this._getIndex(n),e.isArray(r)?r=e.map(r,function(e){return e!==n?e:null}):r=e.map(this.tabs,function(e,t){return t!==n?t:null})),this._setupDisabled(r)},disable:function(n){var r=this.options.disabled;if(r===!0)return;if(n===t)r=!0;else{n=this._getIndex(n);if(e.inArray(n,r)!==-1)return;e.isArray(r)?r=e.merge([n],r).sort():r=[n]}this._setupDisabled(r)},load:function(t,n){t=this._getIndex(t);var r=this,i=this.tabs.eq(t),o=i.find(".ui-tabs-anchor"),u=this._getPanelForTab(i),a={tab:i,panel:u};if(s(o[0]))return;this.xhr=e.ajax(this._ajaxSettings(o,n,a)),this.xhr&&this.xhr.statusText!=="canceled"&&(i.addClass("ui-tabs-loading"),u.attr("aria-busy","true"),this.xhr.success(function(e){setTimeout(function(){u.html(e),r._trigger("load",n,a)},1)}).complete(function(e,t){setTimeout(function(){t==="abort"&&r.panels.stop(!1,!0),i.removeClass("ui-tabs-loading"),u.removeAttr("aria-busy"),e===r.xhr&&delete r.xhr},1)}))},_ajaxSettings:function(t,n,r){var i=this;return{url:t.attr("href"),beforeSend:function(t,s){return i._trigger("beforeLoad",n,e.extend({jqXHR:t,ajaxSettings:s},r))}}},_getPanelForTab:function(t){var n=e(t).attr("aria-controls");return this.element.find(this._sanitizeSelector("#"+n))}}),e.uiBackCompat!==!1&&(e.ui.tabs.prototype._ui=function(e,t){return{tab:e,panel:t,index:this.anchors.index(e)}},e.widget("ui.tabs",e.ui.tabs,{url:function(e,t){this.anchors.eq(e).attr("href",t)}}),e.widget("ui.tabs",e.ui.tabs,{options:{ajaxOptions:null,cache:!1},_create:function(){this._super();var t=this;this._on({tabsbeforeload:function(n,r){if(e.data(r.tab[0],"cache.tabs")){n.preventDefault();return}r.jqXHR.success(function(){t.options.cache&&e.data(r.tab[0],"cache.tabs",!0)})}})},_ajaxSettings:function(t,n,r){var i=this.options.ajaxOptions;return e.extend({},i,{error:function(e,t,n){try{i.error(e,t,r.tab.closest("li").index(),r.tab[0])}catch(n){}}},this._superApply(arguments))},_setOption:function(e,t){e==="cache"&&t===!1&&this.anchors.removeData("cache.tabs"),this._super(e,t)},_destroy:function(){this.anchors.removeData("cache.tabs"),this._super()},url:function(e,t){this.anchors.eq(e).removeData("cache.tabs"),this._superApply(arguments)}}),e.widget("ui.tabs",e.ui.tabs,{abort:function(){this.xhr&&this.xhr.abort()}}),e.widget("ui.tabs",e.ui.tabs,{options:{spinner:"Loading…"},_create:function(){this._super(),this._on({tabsbeforeload:function(e,t){if(e.target!==this.element[0]||!this.options.spinner)return;var n=t.tab.find("span"),r=n.html();n.html(this.options.spinner),t.jqXHR.complete(function(){n.html(r)})}})}}),e.widget("ui.tabs",e.ui.tabs,{options:{enable:null,disable:null},enable:function(t){var n=this.options,r;if(t&&n.disabled===!0||e.isArray(n.disabled)&&e.inArray(t,n.disabled)!==-1)r=!0;this._superApply(arguments),r&&this._trigger("enable",null,this._ui(this.anchors[t],this.panels[t]))},disable:function(t){var n=this.options,r;if(t&&n.disabled===!1||e.isArray(n.disabled)&&e.inArray(t,n.disabled)===-1)r=!0;this._superApply(arguments),r&&this._trigger("disable",null,this._ui(this.anchors[t],this.panels[t]))}}),e.widget("ui.tabs",e.ui.tabs,{options:{add:null,remove:null,tabTemplate:"
  11. #{label}
  12. "},add:function(n,r,i){i===t&&(i=this.anchors.length);var s,o,u=this.options,a=e(u.tabTemplate.replace(/#\{href\}/g,n).replace(/#\{label\}/g,r)),f=n.indexOf("#")?this._tabId(a):n.replace("#","");return a.addClass("ui-state-default ui-corner-top").data("ui-tabs-destroy",!0),a.attr("aria-controls",f),s=i>=this.tabs.length,o=this.element.find("#"+f),o.length||(o=this._createPanel(f),s?i>0?o.insertAfter(this.panels.eq(-1)):o.appendTo(this.element):o.insertBefore(this.panels[i])),o.addClass("ui-tabs-panel ui-widget-content ui-corner-bottom").hide(),s?a.appendTo(this.tablist):a.insertBefore(this.tabs[i]),u.disabled=e.map(u.disabled,function(e){return e>=i?++e:e}),this.refresh(),this.tabs.length===1&&u.active===!1&&this.option("active",0),this._trigger("add",null,this._ui(this.anchors[i],this.panels[i])),this},remove:function(t){t=this._getIndex(t);var n=this.options,r=this.tabs.eq(t).remove(),i=this._getPanelForTab(r).remove();return r.hasClass("ui-tabs-active")&&this.anchors.length>2&&this._activate(t+(t+1=t?--e:e}),this.refresh(),this._trigger("remove",null,this._ui(r.find("a")[0],i[0])),this}}),e.widget("ui.tabs",e.ui.tabs,{length:function(){return this.anchors.length}}),e.widget("ui.tabs",e.ui.tabs,{options:{idPrefix:"ui-tabs-"},_tabId:function(t){var n=t.is("li")?t.find("a[href]"):t;return n=n[0],e(n).closest("li").attr("aria-controls")||n.title&&n.title.replace(/\s/g,"_").replace(/[^\w\u00c0-\uFFFF\-]/g,"")||this.options.idPrefix+i()}}),e.widget("ui.tabs",e.ui.tabs,{options:{panelTemplate:"
    "},_createPanel:function(t){return e(this.options.panelTemplate).attr("id",t).addClass("ui-tabs-panel ui-widget-content ui-corner-bottom").data("ui-tabs-destroy",!0)}}),e.widget("ui.tabs",e.ui.tabs,{_create:function(){var e=this.options;e.active===null&&e.selected!==t&&(e.active=e.selected===-1?!1:e.selected),this._super(),e.selected=e.active,e.selected===!1&&(e.selected=-1)},_setOption:function(e,t){if(e!=="selected")return this._super(e,t);var n=this.options;this._super("active",t===-1?!1:t),n.selected=n.active,n.selected===!1&&(n.selected=-1)},_eventHandler:function(e){this._superApply(arguments),this.options.selected=this.options.active,this.options.selected===!1&&(this.options.selected=-1)}}),e.widget("ui.tabs",e.ui.tabs,{options:{show:null,select:null},_create:function(){this._super(),this.options.active!==!1&&this._trigger("show",null,this._ui(this.active.find(".ui-tabs-anchor")[0],this._getPanelForTab(this.active)[0]))},_trigger:function(e,t,n){var r=this._superApply(arguments);return r?(e==="beforeActivate"&&n.newTab.length?r=this._super("select",t,{tab:n.newTab.find(".ui-tabs-anchor")[0],panel:n.newPanel[0],index:n.newTab.closest("li").index()}):e==="activate"&&n.newTab.length&&(r=this._super("show",t,{tab:n.newTab.find(".ui-tabs-anchor")[0],panel:n.newPanel[0],index:n.newTab.closest("li").index()})),r):!1}}),e.widget("ui.tabs",e.ui.tabs,{select:function(e){e=this._getIndex(e);if(e===-1){if(!this.options.collapsible||this.options.selected===-1)return;e=this.options.selected}this.anchors.eq(e).trigger(this.options.event+this.eventNamespace)}}),function(){var t=0;e.widget("ui.tabs",e.ui.tabs,{options:{cookie:null},_create:function(){var e=this.options,t;e.active==null&&e.cookie&&(t=parseInt(this._cookie(),10),t===-1&&(t=!1),e.active=t),this._super()},_cookie:function(n){var r=[this.cookie||(this.cookie=this.options.cookie.name||"ui-tabs-"+ ++t)];return arguments.length&&(r.push(n===!1?-1:n),r.push(this.options.cookie)),e.cookie.apply(null,r)},_refresh:function(){this._super(),this.options.cookie&&this._cookie(this.options.active,this.options.cookie)},_eventHandler:function(e){this._superApply(arguments),this.options.cookie&&this._cookie(this.options.active,this.options.cookie)},_destroy:function(){this._super(),this.options.cookie&&this._cookie(null,this.options.cookie)}})}(),e.widget("ui.tabs",e.ui.tabs,{_trigger:function(t,n,r){var i=e.extend({},r);return t==="load"&&(i.panel=i.panel[0],i.tab=i.tab.find(".ui-tabs-anchor")[0]),this._super(t,n,i)}}),e.widget("ui.tabs",e.ui.tabs,{options:{fx:null},_getFx:function(){var t,n,r=this.options.fx;return r&&(e.isArray(r)?(t=r[0],n=r[1]):t=n=r),r?{show:n,hide:t}:null},_toggle:function(e,t){function o(){n.running=!1,n._trigger("activate",e,t)}function u(){t.newTab.closest("li").addClass("ui-tabs-active ui-state-active"),r.length&&s.show?r.animate(s.show,s.show.duration,function(){o()}):(r.show(),o())}var n=this,r=t.newPanel,i=t.oldPanel,s=this._getFx();if(!s)return this._super(e,t);n.running=!0,i.length&&s.hide?i.animate(s.hide,s.hide.duration,function(){t.oldTab.closest("li").removeClass("ui-tabs-active ui-state-active"),u()}):(t.oldTab.closest("li").removeClass("ui-tabs-active ui-state-active"),i.hide(),u())}}))})(jQuery);(function(e){function n(t,n){var r=(t.attr("aria-describedby")||"").split(/\s+/);r.push(n),t.data("ui-tooltip-id",n).attr("aria-describedby",e.trim(r.join(" ")))}function r(t){var n=t.data("ui-tooltip-id"),r=(t.attr("aria-describedby")||"").split(/\s+/),i=e.inArray(n,r);i!==-1&&r.splice(i,1),t.removeData("ui-tooltip-id"),r=e.trim(r.join(" ")),r?t.attr("aria-describedby",r):t.removeAttr("aria-describedby")}var t=0;e.widget("ui.tooltip",{version:"1.9.0",options:{content:function(){return e(this).attr("title")},hide:!0,items:"[title]",position:{my:"left+15 center",at:"right center",collision:"flipfit flipfit"},show:!0,tooltipClass:null,track:!1,close:null,open:null},_create:function(){this._on({mouseover:"open",focusin:"open"}),this.tooltips={}},_setOption:function(t,n){var r=this;if(t==="disabled"){this[n?"_disable":"_enable"](),this.options[t]=n;return}this._super(t,n),t==="content"&&e.each(this.tooltips,function(e,t){r._updateContent(t)})},_disable:function(){var t=this;e.each(this.tooltips,function(n,r){var i=e.Event("blur");i.target=i.currentTarget=r[0],t.close(i,!0)}),this.element.find(this.options.items).andSelf().each(function(){var t=e(this);t.is("[title]")&&t.data("ui-tooltip-title",t.attr("title")).attr("title","")})},_enable:function(){this.element.find(this.options.items).andSelf().each(function(){var t=e(this);t.data("ui-tooltip-title")&&t.attr("title",t.data("ui-tooltip-title"))})},open:function(t){var n=e(t?t.target:this.element).closest(this.options.items);if(!n.length)return;if(this.options.track&&n.data("ui-tooltip-id")){this._find(n).position(e.extend({of:n},this.options.position)),this._off(this.document,"mousemove");return}n.attr("title")&&n.data("ui-tooltip-title",n.attr("title")),n.data("tooltip-open",!0),this._updateContent(n,t)},_updateContent:function(e,t){var n,r=this.options.content,i=this;if(typeof r=="string")return this._open(t,e,r);n=r.call(e[0],function(n){if(!e.data("tooltip-open"))return;i._delay(function(){this._open(t,e,n)})}),n&&this._open(t,e,n)},_open:function(t,r,i){function u(e){o.of=e,s.position(o)}var s,o;if(!i)return;s=this._find(r);if(s.length){s.find(".ui-tooltip-content").html(i);return}r.is("[title]")&&(t&&t.type==="mouseover"?r.attr("title",""):r.removeAttr("title")),s=this._tooltip(r),n(r,s.attr("id")),s.find(".ui-tooltip-content").html(i),this.options.track&&t&&/^mouse/.test(t.originalEvent.type)?(o=e.extend({},this.options.position),this._on(this.document,{mousemove:u}),u(t)):s.position(e.extend({of:r},this.options.position)),s.hide(),this._show(s,this.options.show),this._trigger("open",t,{tooltip:s}),this._on(r,{mouseleave:"close",focusout:"close",keyup:function(t){if(t.keyCode===e.ui.keyCode.ESCAPE){var n=e.Event(t);n.currentTarget=r[0],this.close(n,!0)}}})},close:function(t,n){var i=this,s=e(t?t.currentTarget:this.element),o=this._find(s);if(this.closing)return;if(!n&&t&&t.type!=="focusout"&&this.document[0].activeElement===s[0])return;s.data("ui-tooltip-title")&&s.attr("title",s.data("ui-tooltip-title")),r(s),o.stop(!0),this._hide(o,this.options.hide,function(){e(this).remove(),delete i.tooltips[this.id]}),s.removeData("tooltip-open"),this._off(s,"mouseleave focusout keyup"),this._off(this.document,"mousemove"),this.closing=!0,this._trigger("close",t,{tooltip:o}),this.closing=!1},_tooltip:function(n){var r="ui-tooltip-"+t++,i=e("
    ").attr({id:r,role:"tooltip"}).addClass("ui-tooltip ui-widget ui-corner-all ui-widget-content "+(this.options.tooltipClass||""));return e("
    ").addClass("ui-tooltip-content").appendTo(i),i.appendTo(this.document[0].body),e.fn.bgiframe&&i.bgiframe(),this.tooltips[r]=n,i},_find:function(t){var n=t.data("ui-tooltip-id");return n?e("#"+n):e()},_destroy:function(){var t=this;e.each(this.tooltips,function(n,r){var i=e.Event("blur");i.target=i.currentTarget=r[0],t.close(i,!0),e("#"+n).remove(),r.data("ui-tooltip-title")&&(r.attr("title",r.data("ui-tooltip-title")),r.removeData("ui-tooltip-title"))})}})})(jQuery); \ No newline at end of file diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/jquery.js b/src/compiler/scala/tools/nsc/doc/html/resource/lib/jquery.js deleted file mode 100644 index bc3fbc81b2..0000000000 --- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/jquery.js +++ /dev/null @@ -1,2 +0,0 @@ -/*! jQuery v1.8.2 jquery.com | jquery.org/license */ -(function(a,b){function G(a){var b=F[a]={};return p.each(a.split(s),function(a,c){b[c]=!0}),b}function J(a,c,d){if(d===b&&a.nodeType===1){var e="data-"+c.replace(I,"-$1").toLowerCase();d=a.getAttribute(e);if(typeof d=="string"){try{d=d==="true"?!0:d==="false"?!1:d==="null"?null:+d+""===d?+d:H.test(d)?p.parseJSON(d):d}catch(f){}p.data(a,c,d)}else d=b}return d}function K(a){var b;for(b in a){if(b==="data"&&p.isEmptyObject(a[b]))continue;if(b!=="toJSON")return!1}return!0}function ba(){return!1}function bb(){return!0}function bh(a){return!a||!a.parentNode||a.parentNode.nodeType===11}function bi(a,b){do a=a[b];while(a&&a.nodeType!==1);return a}function bj(a,b,c){b=b||0;if(p.isFunction(b))return p.grep(a,function(a,d){var e=!!b.call(a,d,a);return e===c});if(b.nodeType)return p.grep(a,function(a,d){return a===b===c});if(typeof b=="string"){var d=p.grep(a,function(a){return a.nodeType===1});if(be.test(b))return p.filter(b,d,!c);b=p.filter(b,d)}return p.grep(a,function(a,d){return p.inArray(a,b)>=0===c})}function bk(a){var b=bl.split("|"),c=a.createDocumentFragment();if(c.createElement)while(b.length)c.createElement(b.pop());return c}function bC(a,b){return a.getElementsByTagName(b)[0]||a.appendChild(a.ownerDocument.createElement(b))}function bD(a,b){if(b.nodeType!==1||!p.hasData(a))return;var c,d,e,f=p._data(a),g=p._data(b,f),h=f.events;if(h){delete g.handle,g.events={};for(c in h)for(d=0,e=h[c].length;d").appendTo(e.body),c=b.css("display");b.remove();if(c==="none"||c===""){bI=e.body.appendChild(bI||p.extend(e.createElement("iframe"),{frameBorder:0,width:0,height:0}));if(!bJ||!bI.createElement)bJ=(bI.contentWindow||bI.contentDocument).document,bJ.write(""),bJ.close();b=bJ.body.appendChild(bJ.createElement(a)),c=bH(b,"display"),e.body.removeChild(bI)}return bS[a]=c,c}function ci(a,b,c,d){var e;if(p.isArray(b))p.each(b,function(b,e){c||ce.test(a)?d(a,e):ci(a+"["+(typeof e=="object"?b:"")+"]",e,c,d)});else if(!c&&p.type(b)==="object")for(e in b)ci(a+"["+e+"]",b[e],c,d);else d(a,b)}function cz(a){return function(b,c){typeof b!="string"&&(c=b,b="*");var d,e,f,g=b.toLowerCase().split(s),h=0,i=g.length;if(p.isFunction(c))for(;h)[^>]*$|#([\w\-]*)$)/,v=/^<(\w+)\s*\/?>(?:<\/\1>|)$/,w=/^[\],:{}\s]*$/,x=/(?:^|:|,)(?:\s*\[)+/g,y=/\\(?:["\\\/bfnrt]|u[\da-fA-F]{4})/g,z=/"[^"\\\r\n]*"|true|false|null|-?(?:\d\d*\.|)\d+(?:[eE][\-+]?\d+|)/g,A=/^-ms-/,B=/-([\da-z])/gi,C=function(a,b){return(b+"").toUpperCase()},D=function(){e.addEventListener?(e.removeEventListener("DOMContentLoaded",D,!1),p.ready()):e.readyState==="complete"&&(e.detachEvent("onreadystatechange",D),p.ready())},E={};p.fn=p.prototype={constructor:p,init:function(a,c,d){var f,g,h,i;if(!a)return this;if(a.nodeType)return this.context=this[0]=a,this.length=1,this;if(typeof a=="string"){a.charAt(0)==="<"&&a.charAt(a.length-1)===">"&&a.length>=3?f=[null,a,null]:f=u.exec(a);if(f&&(f[1]||!c)){if(f[1])return c=c instanceof p?c[0]:c,i=c&&c.nodeType?c.ownerDocument||c:e,a=p.parseHTML(f[1],i,!0),v.test(f[1])&&p.isPlainObject(c)&&this.attr.call(a,c,!0),p.merge(this,a);g=e.getElementById(f[2]);if(g&&g.parentNode){if(g.id!==f[2])return d.find(a);this.length=1,this[0]=g}return this.context=e,this.selector=a,this}return!c||c.jquery?(c||d).find(a):this.constructor(c).find(a)}return p.isFunction(a)?d.ready(a):(a.selector!==b&&(this.selector=a.selector,this.context=a.context),p.makeArray(a,this))},selector:"",jquery:"1.8.2",length:0,size:function(){return this.length},toArray:function(){return k.call(this)},get:function(a){return a==null?this.toArray():a<0?this[this.length+a]:this[a]},pushStack:function(a,b,c){var d=p.merge(this.constructor(),a);return d.prevObject=this,d.context=this.context,b==="find"?d.selector=this.selector+(this.selector?" ":"")+c:b&&(d.selector=this.selector+"."+b+"("+c+")"),d},each:function(a,b){return p.each(this,a,b)},ready:function(a){return p.ready.promise().done(a),this},eq:function(a){return a=+a,a===-1?this.slice(a):this.slice(a,a+1)},first:function(){return this.eq(0)},last:function(){return this.eq(-1)},slice:function(){return this.pushStack(k.apply(this,arguments),"slice",k.call(arguments).join(","))},map:function(a){return this.pushStack(p.map(this,function(b,c){return a.call(b,c,b)}))},end:function(){return this.prevObject||this.constructor(null)},push:j,sort:[].sort,splice:[].splice},p.fn.init.prototype=p.fn,p.extend=p.fn.extend=function(){var a,c,d,e,f,g,h=arguments[0]||{},i=1,j=arguments.length,k=!1;typeof h=="boolean"&&(k=h,h=arguments[1]||{},i=2),typeof h!="object"&&!p.isFunction(h)&&(h={}),j===i&&(h=this,--i);for(;i0)return;d.resolveWith(e,[p]),p.fn.trigger&&p(e).trigger("ready").off("ready")},isFunction:function(a){return p.type(a)==="function"},isArray:Array.isArray||function(a){return p.type(a)==="array"},isWindow:function(a){return a!=null&&a==a.window},isNumeric:function(a){return!isNaN(parseFloat(a))&&isFinite(a)},type:function(a){return a==null?String(a):E[m.call(a)]||"object"},isPlainObject:function(a){if(!a||p.type(a)!=="object"||a.nodeType||p.isWindow(a))return!1;try{if(a.constructor&&!n.call(a,"constructor")&&!n.call(a.constructor.prototype,"isPrototypeOf"))return!1}catch(c){return!1}var d;for(d in a);return d===b||n.call(a,d)},isEmptyObject:function(a){var b;for(b in a)return!1;return!0},error:function(a){throw new Error(a)},parseHTML:function(a,b,c){var d;return!a||typeof a!="string"?null:(typeof b=="boolean"&&(c=b,b=0),b=b||e,(d=v.exec(a))?[b.createElement(d[1])]:(d=p.buildFragment([a],b,c?null:[]),p.merge([],(d.cacheable?p.clone(d.fragment):d.fragment).childNodes)))},parseJSON:function(b){if(!b||typeof b!="string")return null;b=p.trim(b);if(a.JSON&&a.JSON.parse)return a.JSON.parse(b);if(w.test(b.replace(y,"@").replace(z,"]").replace(x,"")))return(new Function("return "+b))();p.error("Invalid JSON: "+b)},parseXML:function(c){var d,e;if(!c||typeof c!="string")return null;try{a.DOMParser?(e=new DOMParser,d=e.parseFromString(c,"text/xml")):(d=new ActiveXObject("Microsoft.XMLDOM"),d.async="false",d.loadXML(c))}catch(f){d=b}return(!d||!d.documentElement||d.getElementsByTagName("parsererror").length)&&p.error("Invalid XML: "+c),d},noop:function(){},globalEval:function(b){b&&r.test(b)&&(a.execScript||function(b){a.eval.call(a,b)})(b)},camelCase:function(a){return a.replace(A,"ms-").replace(B,C)},nodeName:function(a,b){return a.nodeName&&a.nodeName.toLowerCase()===b.toLowerCase()},each:function(a,c,d){var e,f=0,g=a.length,h=g===b||p.isFunction(a);if(d){if(h){for(e in a)if(c.apply(a[e],d)===!1)break}else for(;f0&&a[0]&&a[i-1]||i===0||p.isArray(a));if(j)for(;h-1)i.splice(c,1),e&&(c<=g&&g--,c<=h&&h--)}),this},has:function(a){return p.inArray(a,i)>-1},empty:function(){return i=[],this},disable:function(){return i=j=c=b,this},disabled:function(){return!i},lock:function(){return j=b,c||l.disable(),this},locked:function(){return!j},fireWith:function(a,b){return b=b||[],b=[a,b.slice?b.slice():b],i&&(!d||j)&&(e?j.push(b):k(b)),this},fire:function(){return l.fireWith(this,arguments),this},fired:function(){return!!d}};return l},p.extend({Deferred:function(a){var b=[["resolve","done",p.Callbacks("once memory"),"resolved"],["reject","fail",p.Callbacks("once memory"),"rejected"],["notify","progress",p.Callbacks("memory")]],c="pending",d={state:function(){return c},always:function(){return e.done(arguments).fail(arguments),this},then:function(){var a=arguments;return p.Deferred(function(c){p.each(b,function(b,d){var f=d[0],g=a[b];e[d[1]](p.isFunction(g)?function(){var a=g.apply(this,arguments);a&&p.isFunction(a.promise)?a.promise().done(c.resolve).fail(c.reject).progress(c.notify):c[f+"With"](this===e?c:this,[a])}:c[f])}),a=null}).promise()},promise:function(a){return a!=null?p.extend(a,d):d}},e={};return d.pipe=d.then,p.each(b,function(a,f){var g=f[2],h=f[3];d[f[1]]=g.add,h&&g.add(function(){c=h},b[a^1][2].disable,b[2][2].lock),e[f[0]]=g.fire,e[f[0]+"With"]=g.fireWith}),d.promise(e),a&&a.call(e,e),e},when:function(a){var b=0,c=k.call(arguments),d=c.length,e=d!==1||a&&p.isFunction(a.promise)?d:0,f=e===1?a:p.Deferred(),g=function(a,b,c){return function(d){b[a]=this,c[a]=arguments.length>1?k.call(arguments):d,c===h?f.notifyWith(b,c):--e||f.resolveWith(b,c)}},h,i,j;if(d>1){h=new Array(d),i=new Array(d),j=new Array(d);for(;b
    a",c=n.getElementsByTagName("*"),d=n.getElementsByTagName("a")[0],d.style.cssText="top:1px;float:left;opacity:.5";if(!c||!c.length)return{};f=e.createElement("select"),g=f.appendChild(e.createElement("option")),h=n.getElementsByTagName("input")[0],b={leadingWhitespace:n.firstChild.nodeType===3,tbody:!n.getElementsByTagName("tbody").length,htmlSerialize:!!n.getElementsByTagName("link").length,style:/top/.test(d.getAttribute("style")),hrefNormalized:d.getAttribute("href")==="/a",opacity:/^0.5/.test(d.style.opacity),cssFloat:!!d.style.cssFloat,checkOn:h.value==="on",optSelected:g.selected,getSetAttribute:n.className!=="t",enctype:!!e.createElement("form").enctype,html5Clone:e.createElement("nav").cloneNode(!0).outerHTML!=="<:nav>",boxModel:e.compatMode==="CSS1Compat",submitBubbles:!0,changeBubbles:!0,focusinBubbles:!1,deleteExpando:!0,noCloneEvent:!0,inlineBlockNeedsLayout:!1,shrinkWrapBlocks:!1,reliableMarginRight:!0,boxSizingReliable:!0,pixelPosition:!1},h.checked=!0,b.noCloneChecked=h.cloneNode(!0).checked,f.disabled=!0,b.optDisabled=!g.disabled;try{delete n.test}catch(o){b.deleteExpando=!1}!n.addEventListener&&n.attachEvent&&n.fireEvent&&(n.attachEvent("onclick",m=function(){b.noCloneEvent=!1}),n.cloneNode(!0).fireEvent("onclick"),n.detachEvent("onclick",m)),h=e.createElement("input"),h.value="t",h.setAttribute("type","radio"),b.radioValue=h.value==="t",h.setAttribute("checked","checked"),h.setAttribute("name","t"),n.appendChild(h),i=e.createDocumentFragment(),i.appendChild(n.lastChild),b.checkClone=i.cloneNode(!0).cloneNode(!0).lastChild.checked,b.appendChecked=h.checked,i.removeChild(h),i.appendChild(n);if(n.attachEvent)for(k in{submit:!0,change:!0,focusin:!0})j="on"+k,l=j in n,l||(n.setAttribute(j,"return;"),l=typeof n[j]=="function"),b[k+"Bubbles"]=l;return p(function(){var c,d,f,g,h="padding:0;margin:0;border:0;display:block;overflow:hidden;",i=e.getElementsByTagName("body")[0];if(!i)return;c=e.createElement("div"),c.style.cssText="visibility:hidden;border:0;width:0;height:0;position:static;top:0;margin-top:1px",i.insertBefore(c,i.firstChild),d=e.createElement("div"),c.appendChild(d),d.innerHTML="
    t
    ",f=d.getElementsByTagName("td"),f[0].style.cssText="padding:0;margin:0;border:0;display:none",l=f[0].offsetHeight===0,f[0].style.display="",f[1].style.display="none",b.reliableHiddenOffsets=l&&f[0].offsetHeight===0,d.innerHTML="",d.style.cssText="box-sizing:border-box;-moz-box-sizing:border-box;-webkit-box-sizing:border-box;padding:1px;border:1px;display:block;width:4px;margin-top:1%;position:absolute;top:1%;",b.boxSizing=d.offsetWidth===4,b.doesNotIncludeMarginInBodyOffset=i.offsetTop!==1,a.getComputedStyle&&(b.pixelPosition=(a.getComputedStyle(d,null)||{}).top!=="1%",b.boxSizingReliable=(a.getComputedStyle(d,null)||{width:"4px"}).width==="4px",g=e.createElement("div"),g.style.cssText=d.style.cssText=h,g.style.marginRight=g.style.width="0",d.style.width="1px",d.appendChild(g),b.reliableMarginRight=!parseFloat((a.getComputedStyle(g,null)||{}).marginRight)),typeof d.style.zoom!="undefined"&&(d.innerHTML="",d.style.cssText=h+"width:1px;padding:1px;display:inline;zoom:1",b.inlineBlockNeedsLayout=d.offsetWidth===3,d.style.display="block",d.style.overflow="visible",d.innerHTML="
    ",d.firstChild.style.width="5px",b.shrinkWrapBlocks=d.offsetWidth!==3,c.style.zoom=1),i.removeChild(c),c=d=f=g=null}),i.removeChild(n),c=d=f=g=h=i=n=null,b}();var H=/(?:\{[\s\S]*\}|\[[\s\S]*\])$/,I=/([A-Z])/g;p.extend({cache:{},deletedIds:[],uuid:0,expando:"jQuery"+(p.fn.jquery+Math.random()).replace(/\D/g,""),noData:{embed:!0,object:"clsid:D27CDB6E-AE6D-11cf-96B8-444553540000",applet:!0},hasData:function(a){return a=a.nodeType?p.cache[a[p.expando]]:a[p.expando],!!a&&!K(a)},data:function(a,c,d,e){if(!p.acceptData(a))return;var f,g,h=p.expando,i=typeof c=="string",j=a.nodeType,k=j?p.cache:a,l=j?a[h]:a[h]&&h;if((!l||!k[l]||!e&&!k[l].data)&&i&&d===b)return;l||(j?a[h]=l=p.deletedIds.pop()||p.guid++:l=h),k[l]||(k[l]={},j||(k[l].toJSON=p.noop));if(typeof c=="object"||typeof c=="function")e?k[l]=p.extend(k[l],c):k[l].data=p.extend(k[l].data,c);return f=k[l],e||(f.data||(f.data={}),f=f.data),d!==b&&(f[p.camelCase(c)]=d),i?(g=f[c],g==null&&(g=f[p.camelCase(c)])):g=f,g},removeData:function(a,b,c){if(!p.acceptData(a))return;var d,e,f,g=a.nodeType,h=g?p.cache:a,i=g?a[p.expando]:p.expando;if(!h[i])return;if(b){d=c?h[i]:h[i].data;if(d){p.isArray(b)||(b in d?b=[b]:(b=p.camelCase(b),b in d?b=[b]:b=b.split(" ")));for(e=0,f=b.length;e1,null,!1))},removeData:function(a){return this.each(function(){p.removeData(this,a)})}}),p.extend({queue:function(a,b,c){var d;if(a)return b=(b||"fx")+"queue",d=p._data(a,b),c&&(!d||p.isArray(c)?d=p._data(a,b,p.makeArray(c)):d.push(c)),d||[]},dequeue:function(a,b){b=b||"fx";var c=p.queue(a,b),d=c.length,e=c.shift(),f=p._queueHooks(a,b),g=function(){p.dequeue(a,b)};e==="inprogress"&&(e=c.shift(),d--),e&&(b==="fx"&&c.unshift("inprogress"),delete f.stop,e.call(a,g,f)),!d&&f&&f.empty.fire()},_queueHooks:function(a,b){var c=b+"queueHooks";return p._data(a,c)||p._data(a,c,{empty:p.Callbacks("once memory").add(function(){p.removeData(a,b+"queue",!0),p.removeData(a,c,!0)})})}}),p.fn.extend({queue:function(a,c){var d=2;return typeof a!="string"&&(c=a,a="fx",d--),arguments.length1)},removeAttr:function(a){return this.each(function(){p.removeAttr(this,a)})},prop:function(a,b){return p.access(this,p.prop,a,b,arguments.length>1)},removeProp:function(a){return a=p.propFix[a]||a,this.each(function(){try{this[a]=b,delete this[a]}catch(c){}})},addClass:function(a){var b,c,d,e,f,g,h;if(p.isFunction(a))return this.each(function(b){p(this).addClass(a.call(this,b,this.className))});if(a&&typeof a=="string"){b=a.split(s);for(c=0,d=this.length;c=0)d=d.replace(" "+c[f]+" "," ");e.className=a?p.trim(d):""}}}return this},toggleClass:function(a,b){var c=typeof a,d=typeof b=="boolean";return p.isFunction(a)?this.each(function(c){p(this).toggleClass(a.call(this,c,this.className,b),b)}):this.each(function(){if(c==="string"){var e,f=0,g=p(this),h=b,i=a.split(s);while(e=i[f++])h=d?h:!g.hasClass(e),g[h?"addClass":"removeClass"](e)}else if(c==="undefined"||c==="boolean")this.className&&p._data(this,"__className__",this.className),this.className=this.className||a===!1?"":p._data(this,"__className__")||""})},hasClass:function(a){var b=" "+a+" ",c=0,d=this.length;for(;c=0)return!0;return!1},val:function(a){var c,d,e,f=this[0];if(!arguments.length){if(f)return c=p.valHooks[f.type]||p.valHooks[f.nodeName.toLowerCase()],c&&"get"in c&&(d=c.get(f,"value"))!==b?d:(d=f.value,typeof d=="string"?d.replace(P,""):d==null?"":d);return}return e=p.isFunction(a),this.each(function(d){var f,g=p(this);if(this.nodeType!==1)return;e?f=a.call(this,d,g.val()):f=a,f==null?f="":typeof f=="number"?f+="":p.isArray(f)&&(f=p.map(f,function(a){return a==null?"":a+""})),c=p.valHooks[this.type]||p.valHooks[this.nodeName.toLowerCase()];if(!c||!("set"in c)||c.set(this,f,"value")===b)this.value=f})}}),p.extend({valHooks:{option:{get:function(a){var b=a.attributes.value;return!b||b.specified?a.value:a.text}},select:{get:function(a){var b,c,d,e,f=a.selectedIndex,g=[],h=a.options,i=a.type==="select-one";if(f<0)return null;c=i?f:0,d=i?f+1:h.length;for(;c=0}),c.length||(a.selectedIndex=-1),c}}},attrFn:{},attr:function(a,c,d,e){var f,g,h,i=a.nodeType;if(!a||i===3||i===8||i===2)return;if(e&&p.isFunction(p.fn[c]))return p(a)[c](d);if(typeof a.getAttribute=="undefined")return p.prop(a,c,d);h=i!==1||!p.isXMLDoc(a),h&&(c=c.toLowerCase(),g=p.attrHooks[c]||(T.test(c)?M:L));if(d!==b){if(d===null){p.removeAttr(a,c);return}return g&&"set"in g&&h&&(f=g.set(a,d,c))!==b?f:(a.setAttribute(c,d+""),d)}return g&&"get"in g&&h&&(f=g.get(a,c))!==null?f:(f=a.getAttribute(c),f===null?b:f)},removeAttr:function(a,b){var c,d,e,f,g=0;if(b&&a.nodeType===1){d=b.split(s);for(;g=0}})});var V=/^(?:textarea|input|select)$/i,W=/^([^\.]*|)(?:\.(.+)|)$/,X=/(?:^|\s)hover(\.\S+|)\b/,Y=/^key/,Z=/^(?:mouse|contextmenu)|click/,$=/^(?:focusinfocus|focusoutblur)$/,_=function(a){return p.event.special.hover?a:a.replace(X,"mouseenter$1 mouseleave$1")};p.event={add:function(a,c,d,e,f){var g,h,i,j,k,l,m,n,o,q,r;if(a.nodeType===3||a.nodeType===8||!c||!d||!(g=p._data(a)))return;d.handler&&(o=d,d=o.handler,f=o.selector),d.guid||(d.guid=p.guid++),i=g.events,i||(g.events=i={}),h=g.handle,h||(g.handle=h=function(a){return typeof p!="undefined"&&(!a||p.event.triggered!==a.type)?p.event.dispatch.apply(h.elem,arguments):b},h.elem=a),c=p.trim(_(c)).split(" ");for(j=0;j=0&&(s=s.slice(0,-1),i=!0),s.indexOf(".")>=0&&(t=s.split("."),s=t.shift(),t.sort());if((!f||p.event.customEvent[s])&&!p.event.global[s])return;c=typeof c=="object"?c[p.expando]?c:new p.Event(s,c):new p.Event(s),c.type=s,c.isTrigger=!0,c.exclusive=i,c.namespace=t.join("."),c.namespace_re=c.namespace?new RegExp("(^|\\.)"+t.join("\\.(?:.*\\.|)")+"(\\.|$)"):null,m=s.indexOf(":")<0?"on"+s:"";if(!f){h=p.cache;for(j in h)h[j].events&&h[j].events[s]&&p.event.trigger(c,d,h[j].handle.elem,!0);return}c.result=b,c.target||(c.target=f),d=d!=null?p.makeArray(d):[],d.unshift(c),n=p.event.special[s]||{};if(n.trigger&&n.trigger.apply(f,d)===!1)return;q=[[f,n.bindType||s]];if(!g&&!n.noBubble&&!p.isWindow(f)){r=n.delegateType||s,k=$.test(r+s)?f:f.parentNode;for(l=f;k;k=k.parentNode)q.push([k,r]),l=k;l===(f.ownerDocument||e)&&q.push([l.defaultView||l.parentWindow||a,r])}for(j=0;j=0:p.find(m,this,null,[f]).length),h[m]&&j.push(l);j.length&&u.push({elem:f,matches:j})}o.length>q&&u.push({elem:this,matches:o.slice(q)});for(d=0;d0?this.on(b,null,a,c):this.trigger(b)},Y.test(b)&&(p.event.fixHooks[b]=p.event.keyHooks),Z.test(b)&&(p.event.fixHooks[b]=p.event.mouseHooks)}),function(a,b){function bc(a,b,c,d){c=c||[],b=b||r;var e,f,i,j,k=b.nodeType;if(!a||typeof a!="string")return c;if(k!==1&&k!==9)return[];i=g(b);if(!i&&!d)if(e=P.exec(a))if(j=e[1]){if(k===9){f=b.getElementById(j);if(!f||!f.parentNode)return c;if(f.id===j)return c.push(f),c}else if(b.ownerDocument&&(f=b.ownerDocument.getElementById(j))&&h(b,f)&&f.id===j)return c.push(f),c}else{if(e[2])return w.apply(c,x.call(b.getElementsByTagName(a),0)),c;if((j=e[3])&&_&&b.getElementsByClassName)return w.apply(c,x.call(b.getElementsByClassName(j),0)),c}return bp(a.replace(L,"$1"),b,c,d,i)}function bd(a){return function(b){var c=b.nodeName.toLowerCase();return c==="input"&&b.type===a}}function be(a){return function(b){var c=b.nodeName.toLowerCase();return(c==="input"||c==="button")&&b.type===a}}function bf(a){return z(function(b){return b=+b,z(function(c,d){var e,f=a([],c.length,b),g=f.length;while(g--)c[e=f[g]]&&(c[e]=!(d[e]=c[e]))})})}function bg(a,b,c){if(a===b)return c;var d=a.nextSibling;while(d){if(d===b)return-1;d=d.nextSibling}return 1}function bh(a,b){var c,d,f,g,h,i,j,k=C[o][a];if(k)return b?0:k.slice(0);h=a,i=[],j=e.preFilter;while(h){if(!c||(d=M.exec(h)))d&&(h=h.slice(d[0].length)),i.push(f=[]);c=!1;if(d=N.exec(h))f.push(c=new q(d.shift())),h=h.slice(c.length),c.type=d[0].replace(L," ");for(g in e.filter)(d=W[g].exec(h))&&(!j[g]||(d=j[g](d,r,!0)))&&(f.push(c=new q(d.shift())),h=h.slice(c.length),c.type=g,c.matches=d);if(!c)break}return b?h.length:h?bc.error(a):C(a,i).slice(0)}function bi(a,b,d){var e=b.dir,f=d&&b.dir==="parentNode",g=u++;return b.first?function(b,c,d){while(b=b[e])if(f||b.nodeType===1)return a(b,c,d)}:function(b,d,h){if(!h){var i,j=t+" "+g+" ",k=j+c;while(b=b[e])if(f||b.nodeType===1){if((i=b[o])===k)return b.sizset;if(typeof i=="string"&&i.indexOf(j)===0){if(b.sizset)return b}else{b[o]=k;if(a(b,d,h))return b.sizset=!0,b;b.sizset=!1}}}else while(b=b[e])if(f||b.nodeType===1)if(a(b,d,h))return b}}function bj(a){return a.length>1?function(b,c,d){var e=a.length;while(e--)if(!a[e](b,c,d))return!1;return!0}:a[0]}function bk(a,b,c,d,e){var f,g=[],h=0,i=a.length,j=b!=null;for(;h-1},h,!0),m=[function(a,c,d){return!g&&(d||c!==l)||((b=c).nodeType?j(a,c,d):k(a,c,d))}];for(;i1&&bj(m),i>1&&a.slice(0,i-1).join("").replace(L,"$1"),c,i0,f=a.length>0,g=function(h,i,j,k,m){var n,o,p,q=[],s=0,u="0",x=h&&[],y=m!=null,z=l,A=h||f&&e.find.TAG("*",m&&i.parentNode||i),B=t+=z==null?1:Math.E;y&&(l=i!==r&&i,c=g.el);for(;(n=A[u])!=null;u++){if(f&&n){for(o=0;p=a[o];o++)if(p(n,i,j)){k.push(n);break}y&&(t=B,c=++g.el)}d&&((n=!p&&n)&&s--,h&&x.push(n))}s+=u;if(d&&u!==s){for(o=0;p=b[o];o++)p(x,q,i,j);if(h){if(s>0)while(u--)!x[u]&&!q[u]&&(q[u]=v.call(k));q=bk(q)}w.apply(k,q),y&&!h&&q.length>0&&s+b.length>1&&bc.uniqueSort(k)}return y&&(t=B,l=z),x};return g.el=0,d?z(g):g}function bo(a,b,c,d){var e=0,f=b.length;for(;e2&&(j=h[0]).type==="ID"&&b.nodeType===9&&!f&&e.relative[h[1].type]){b=e.find.ID(j.matches[0].replace(V,""),b,f)[0];if(!b)return c;a=a.slice(h.shift().length)}for(g=W.POS.test(a)?-1:h.length-1;g>=0;g--){j=h[g];if(e.relative[k=j.type])break;if(l=e.find[k])if(d=l(j.matches[0].replace(V,""),R.test(h[0].type)&&b.parentNode||b,f)){h.splice(g,1),a=d.length&&h.join("");if(!a)return w.apply(c,x.call(d,0)),c;break}}}return i(a,m)(d,b,f,c,R.test(a)),c}function bq(){}var c,d,e,f,g,h,i,j,k,l,m=!0,n="undefined",o=("sizcache"+Math.random()).replace(".",""),q=String,r=a.document,s=r.documentElement,t=0,u=0,v=[].pop,w=[].push,x=[].slice,y=[].indexOf||function(a){var b=0,c=this.length;for(;be.cacheLength&&delete a[b.shift()],a[c]=d},a)},B=A(),C=A(),D=A(),E="[\\x20\\t\\r\\n\\f]",F="(?:\\\\.|[-\\w]|[^\\x00-\\xa0])+",G=F.replace("w","w#"),H="([*^$|!~]?=)",I="\\["+E+"*("+F+")"+E+"*(?:"+H+E+"*(?:(['\"])((?:\\\\.|[^\\\\])*?)\\3|("+G+")|)|)"+E+"*\\]",J=":("+F+")(?:\\((?:(['\"])((?:\\\\.|[^\\\\])*?)\\2|([^()[\\]]*|(?:(?:"+I+")|[^:]|\\\\.)*|.*))\\)|)",K=":(even|odd|eq|gt|lt|nth|first|last)(?:\\("+E+"*((?:-\\d)?\\d*)"+E+"*\\)|)(?=[^-]|$)",L=new RegExp("^"+E+"+|((?:^|[^\\\\])(?:\\\\.)*)"+E+"+$","g"),M=new RegExp("^"+E+"*,"+E+"*"),N=new RegExp("^"+E+"*([\\x20\\t\\r\\n\\f>+~])"+E+"*"),O=new RegExp(J),P=/^(?:#([\w\-]+)|(\w+)|\.([\w\-]+))$/,Q=/^:not/,R=/[\x20\t\r\n\f]*[+~]/,S=/:not\($/,T=/h\d/i,U=/input|select|textarea|button/i,V=/\\(?!\\)/g,W={ID:new RegExp("^#("+F+")"),CLASS:new RegExp("^\\.("+F+")"),NAME:new RegExp("^\\[name=['\"]?("+F+")['\"]?\\]"),TAG:new RegExp("^("+F.replace("w","w*")+")"),ATTR:new RegExp("^"+I),PSEUDO:new RegExp("^"+J),POS:new RegExp(K,"i"),CHILD:new RegExp("^:(only|nth|first|last)-child(?:\\("+E+"*(even|odd|(([+-]|)(\\d*)n|)"+E+"*(?:([+-]|)"+E+"*(\\d+)|))"+E+"*\\)|)","i"),needsContext:new RegExp("^"+E+"*[>+~]|"+K,"i")},X=function(a){var b=r.createElement("div");try{return a(b)}catch(c){return!1}finally{b=null}},Y=X(function(a){return a.appendChild(r.createComment("")),!a.getElementsByTagName("*").length}),Z=X(function(a){return a.innerHTML="",a.firstChild&&typeof a.firstChild.getAttribute!==n&&a.firstChild.getAttribute("href")==="#"}),$=X(function(a){a.innerHTML="";var b=typeof a.lastChild.getAttribute("multiple");return b!=="boolean"&&b!=="string"}),_=X(function(a){return a.innerHTML="",!a.getElementsByClassName||!a.getElementsByClassName("e").length?!1:(a.lastChild.className="e",a.getElementsByClassName("e").length===2)}),ba=X(function(a){a.id=o+0,a.innerHTML="
    ",s.insertBefore(a,s.firstChild);var b=r.getElementsByName&&r.getElementsByName(o).length===2+r.getElementsByName(o+0).length;return d=!r.getElementById(o),s.removeChild(a),b});try{x.call(s.childNodes,0)[0].nodeType}catch(bb){x=function(a){var b,c=[];for(;b=this[a];a++)c.push(b);return c}}bc.matches=function(a,b){return bc(a,null,null,b)},bc.matchesSelector=function(a,b){return bc(b,null,null,[a]).length>0},f=bc.getText=function(a){var b,c="",d=0,e=a.nodeType;if(e){if(e===1||e===9||e===11){if(typeof a.textContent=="string")return a.textContent;for(a=a.firstChild;a;a=a.nextSibling)c+=f(a)}else if(e===3||e===4)return a.nodeValue}else for(;b=a[d];d++)c+=f(b);return c},g=bc.isXML=function(a){var b=a&&(a.ownerDocument||a).documentElement;return b?b.nodeName!=="HTML":!1},h=bc.contains=s.contains?function(a,b){var c=a.nodeType===9?a.documentElement:a,d=b&&b.parentNode;return a===d||!!(d&&d.nodeType===1&&c.contains&&c.contains(d))}:s.compareDocumentPosition?function(a,b){return b&&!!(a.compareDocumentPosition(b)&16)}:function(a,b){while(b=b.parentNode)if(b===a)return!0;return!1},bc.attr=function(a,b){var c,d=g(a);return d||(b=b.toLowerCase()),(c=e.attrHandle[b])?c(a):d||$?a.getAttribute(b):(c=a.getAttributeNode(b),c?typeof a[b]=="boolean"?a[b]?b:null:c.specified?c.value:null:null)},e=bc.selectors={cacheLength:50,createPseudo:z,match:W,attrHandle:Z?{}:{href:function(a){return a.getAttribute("href",2)},type:function(a){return a.getAttribute("type")}},find:{ID:d?function(a,b,c){if(typeof b.getElementById!==n&&!c){var d=b.getElementById(a);return d&&d.parentNode?[d]:[]}}:function(a,c,d){if(typeof c.getElementById!==n&&!d){var e=c.getElementById(a);return e?e.id===a||typeof e.getAttributeNode!==n&&e.getAttributeNode("id").value===a?[e]:b:[]}},TAG:Y?function(a,b){if(typeof b.getElementsByTagName!==n)return b.getElementsByTagName(a)}:function(a,b){var c=b.getElementsByTagName(a);if(a==="*"){var d,e=[],f=0;for(;d=c[f];f++)d.nodeType===1&&e.push(d);return e}return c},NAME:ba&&function(a,b){if(typeof b.getElementsByName!==n)return b.getElementsByName(name)},CLASS:_&&function(a,b,c){if(typeof b.getElementsByClassName!==n&&!c)return b.getElementsByClassName(a)}},relative:{">":{dir:"parentNode",first:!0}," ":{dir:"parentNode"},"+":{dir:"previousSibling",first:!0},"~":{dir:"previousSibling"}},preFilter:{ATTR:function(a){return a[1]=a[1].replace(V,""),a[3]=(a[4]||a[5]||"").replace(V,""),a[2]==="~="&&(a[3]=" "+a[3]+" "),a.slice(0,4)},CHILD:function(a){return a[1]=a[1].toLowerCase(),a[1]==="nth"?(a[2]||bc.error(a[0]),a[3]=+(a[3]?a[4]+(a[5]||1):2*(a[2]==="even"||a[2]==="odd")),a[4]=+(a[6]+a[7]||a[2]==="odd")):a[2]&&bc.error(a[0]),a},PSEUDO:function(a){var b,c;if(W.CHILD.test(a[0]))return null;if(a[3])a[2]=a[3];else if(b=a[4])O.test(b)&&(c=bh(b,!0))&&(c=b.indexOf(")",b.length-c)-b.length)&&(b=b.slice(0,c),a[0]=a[0].slice(0,c)),a[2]=b;return a.slice(0,3)}},filter:{ID:d?function(a){return a=a.replace(V,""),function(b){return b.getAttribute("id")===a}}:function(a){return a=a.replace(V,""),function(b){var c=typeof b.getAttributeNode!==n&&b.getAttributeNode("id");return c&&c.value===a}},TAG:function(a){return a==="*"?function(){return!0}:(a=a.replace(V,"").toLowerCase(),function(b){return b.nodeName&&b.nodeName.toLowerCase()===a})},CLASS:function(a){var b=B[o][a];return b||(b=B(a,new RegExp("(^|"+E+")"+a+"("+E+"|$)"))),function(a){return b.test(a.className||typeof a.getAttribute!==n&&a.getAttribute("class")||"")}},ATTR:function(a,b,c){return function(d,e){var f=bc.attr(d,a);return f==null?b==="!=":b?(f+="",b==="="?f===c:b==="!="?f!==c:b==="^="?c&&f.indexOf(c)===0:b==="*="?c&&f.indexOf(c)>-1:b==="$="?c&&f.substr(f.length-c.length)===c:b==="~="?(" "+f+" ").indexOf(c)>-1:b==="|="?f===c||f.substr(0,c.length+1)===c+"-":!1):!0}},CHILD:function(a,b,c,d){return a==="nth"?function(a){var b,e,f=a.parentNode;if(c===1&&d===0)return!0;if(f){e=0;for(b=f.firstChild;b;b=b.nextSibling)if(b.nodeType===1){e++;if(a===b)break}}return e-=d,e===c||e%c===0&&e/c>=0}:function(b){var c=b;switch(a){case"only":case"first":while(c=c.previousSibling)if(c.nodeType===1)return!1;if(a==="first")return!0;c=b;case"last":while(c=c.nextSibling)if(c.nodeType===1)return!1;return!0}}},PSEUDO:function(a,b){var c,d=e.pseudos[a]||e.setFilters[a.toLowerCase()]||bc.error("unsupported pseudo: "+a);return d[o]?d(b):d.length>1?(c=[a,a,"",b],e.setFilters.hasOwnProperty(a.toLowerCase())?z(function(a,c){var e,f=d(a,b),g=f.length;while(g--)e=y.call(a,f[g]),a[e]=!(c[e]=f[g])}):function(a){return d(a,0,c)}):d}},pseudos:{not:z(function(a){var b=[],c=[],d=i(a.replace(L,"$1"));return d[o]?z(function(a,b,c,e){var f,g=d(a,null,e,[]),h=a.length;while(h--)if(f=g[h])a[h]=!(b[h]=f)}):function(a,e,f){return b[0]=a,d(b,null,f,c),!c.pop()}}),has:z(function(a){return function(b){return bc(a,b).length>0}}),contains:z(function(a){return function(b){return(b.textContent||b.innerText||f(b)).indexOf(a)>-1}}),enabled:function(a){return a.disabled===!1},disabled:function(a){return a.disabled===!0},checked:function(a){var b=a.nodeName.toLowerCase();return b==="input"&&!!a.checked||b==="option"&&!!a.selected},selected:function(a){return a.parentNode&&a.parentNode.selectedIndex,a.selected===!0},parent:function(a){return!e.pseudos.empty(a)},empty:function(a){var b;a=a.firstChild;while(a){if(a.nodeName>"@"||(b=a.nodeType)===3||b===4)return!1;a=a.nextSibling}return!0},header:function(a){return T.test(a.nodeName)},text:function(a){var b,c;return a.nodeName.toLowerCase()==="input"&&(b=a.type)==="text"&&((c=a.getAttribute("type"))==null||c.toLowerCase()===b)},radio:bd("radio"),checkbox:bd("checkbox"),file:bd("file"),password:bd("password"),image:bd("image"),submit:be("submit"),reset:be("reset"),button:function(a){var b=a.nodeName.toLowerCase();return b==="input"&&a.type==="button"||b==="button"},input:function(a){return U.test(a.nodeName)},focus:function(a){var b=a.ownerDocument;return a===b.activeElement&&(!b.hasFocus||b.hasFocus())&&(!!a.type||!!a.href)},active:function(a){return a===a.ownerDocument.activeElement},first:bf(function(a,b,c){return[0]}),last:bf(function(a,b,c){return[b-1]}),eq:bf(function(a,b,c){return[c<0?c+b:c]}),even:bf(function(a,b,c){for(var d=0;d=0;)a.push(d);return a}),gt:bf(function(a,b,c){for(var d=c<0?c+b:c;++d",a.querySelectorAll("[selected]").length||e.push("\\["+E+"*(?:checked|disabled|ismap|multiple|readonly|selected|value)"),a.querySelectorAll(":checked").length||e.push(":checked")}),X(function(a){a.innerHTML="

    ",a.querySelectorAll("[test^='']").length&&e.push("[*^$]="+E+"*(?:\"\"|'')"),a.innerHTML="",a.querySelectorAll(":enabled").length||e.push(":enabled",":disabled")}),e=new RegExp(e.join("|")),bp=function(a,d,f,g,h){if(!g&&!h&&(!e||!e.test(a))){var i,j,k=!0,l=o,m=d,n=d.nodeType===9&&a;if(d.nodeType===1&&d.nodeName.toLowerCase()!=="object"){i=bh(a),(k=d.getAttribute("id"))?l=k.replace(c,"\\$&"):d.setAttribute("id",l),l="[id='"+l+"'] ",j=i.length;while(j--)i[j]=l+i[j].join("");m=R.test(a)&&d.parentNode||d,n=i.join(",")}if(n)try{return w.apply(f,x.call(m.querySelectorAll(n),0)),f}catch(p){}finally{k||d.removeAttribute("id")}}return b(a,d,f,g,h)},h&&(X(function(b){a=h.call(b,"div");try{h.call(b,"[test!='']:sizzle"),f.push("!=",J)}catch(c){}}),f=new RegExp(f.join("|")),bc.matchesSelector=function(b,c){c=c.replace(d,"='$1']");if(!g(b)&&!f.test(c)&&(!e||!e.test(c)))try{var i=h.call(b,c);if(i||a||b.document&&b.document.nodeType!==11)return i}catch(j){}return bc(c,null,null,[b]).length>0})}(),e.pseudos.nth=e.pseudos.eq,e.filters=bq.prototype=e.pseudos,e.setFilters=new bq,bc.attr=p.attr,p.find=bc,p.expr=bc.selectors,p.expr[":"]=p.expr.pseudos,p.unique=bc.uniqueSort,p.text=bc.getText,p.isXMLDoc=bc.isXML,p.contains=bc.contains}(a);var bc=/Until$/,bd=/^(?:parents|prev(?:Until|All))/,be=/^.[^:#\[\.,]*$/,bf=p.expr.match.needsContext,bg={children:!0,contents:!0,next:!0,prev:!0};p.fn.extend({find:function(a){var b,c,d,e,f,g,h=this;if(typeof a!="string")return p(a).filter(function(){for(b=0,c=h.length;b0)for(e=d;e=0:p.filter(a,this).length>0:this.filter(a).length>0)},closest:function(a,b){var c,d=0,e=this.length,f=[],g=bf.test(a)||typeof a!="string"?p(a,b||this.context):0;for(;d-1:p.find.matchesSelector(c,a)){f.push(c);break}c=c.parentNode}}return f=f.length>1?p.unique(f):f,this.pushStack(f,"closest",a)},index:function(a){return a?typeof a=="string"?p.inArray(this[0],p(a)):p.inArray(a.jquery?a[0]:a,this):this[0]&&this[0].parentNode?this.prevAll().length:-1},add:function(a,b){var c=typeof a=="string"?p(a,b):p.makeArray(a&&a.nodeType?[a]:a),d=p.merge(this.get(),c);return this.pushStack(bh(c[0])||bh(d[0])?d:p.unique(d))},addBack:function(a){return this.add(a==null?this.prevObject:this.prevObject.filter(a))}}),p.fn.andSelf=p.fn.addBack,p.each({parent:function(a){var b=a.parentNode;return b&&b.nodeType!==11?b:null},parents:function(a){return p.dir(a,"parentNode")},parentsUntil:function(a,b,c){return p.dir(a,"parentNode",c)},next:function(a){return bi(a,"nextSibling")},prev:function(a){return bi(a,"previousSibling")},nextAll:function(a){return p.dir(a,"nextSibling")},prevAll:function(a){return p.dir(a,"previousSibling")},nextUntil:function(a,b,c){return p.dir(a,"nextSibling",c)},prevUntil:function(a,b,c){return p.dir(a,"previousSibling",c)},siblings:function(a){return p.sibling((a.parentNode||{}).firstChild,a)},children:function(a){return p.sibling(a.firstChild)},contents:function(a){return p.nodeName(a,"iframe")?a.contentDocument||a.contentWindow.document:p.merge([],a.childNodes)}},function(a,b){p.fn[a]=function(c,d){var e=p.map(this,b,c);return bc.test(a)||(d=c),d&&typeof d=="string"&&(e=p.filter(d,e)),e=this.length>1&&!bg[a]?p.unique(e):e,this.length>1&&bd.test(a)&&(e=e.reverse()),this.pushStack(e,a,k.call(arguments).join(","))}}),p.extend({filter:function(a,b,c){return c&&(a=":not("+a+")"),b.length===1?p.find.matchesSelector(b[0],a)?[b[0]]:[]:p.find.matches(a,b)},dir:function(a,c,d){var e=[],f=a[c];while(f&&f.nodeType!==9&&(d===b||f.nodeType!==1||!p(f).is(d)))f.nodeType===1&&e.push(f),f=f[c];return e},sibling:function(a,b){var c=[];for(;a;a=a.nextSibling)a.nodeType===1&&a!==b&&c.push(a);return c}});var bl="abbr|article|aside|audio|bdi|canvas|data|datalist|details|figcaption|figure|footer|header|hgroup|mark|meter|nav|output|progress|section|summary|time|video",bm=/ jQuery\d+="(?:null|\d+)"/g,bn=/^\s+/,bo=/<(?!area|br|col|embed|hr|img|input|link|meta|param)(([\w:]+)[^>]*)\/>/gi,bp=/<([\w:]+)/,bq=/]","i"),bv=/^(?:checkbox|radio)$/,bw=/checked\s*(?:[^=]|=\s*.checked.)/i,bx=/\/(java|ecma)script/i,by=/^\s*\s*$/g,bz={option:[1,""],legend:[1,"
    ","
    "],thead:[1,"","
    "],tr:[2,"","
    "],td:[3,"","
    "],col:[2,"","
    "],area:[1,"",""],_default:[0,"",""]},bA=bk(e),bB=bA.appendChild(e.createElement("div"));bz.optgroup=bz.option,bz.tbody=bz.tfoot=bz.colgroup=bz.caption=bz.thead,bz.th=bz.td,p.support.htmlSerialize||(bz._default=[1,"X
    ","
    "]),p.fn.extend({text:function(a){return p.access(this,function(a){return a===b?p.text(this):this.empty().append((this[0]&&this[0].ownerDocument||e).createTextNode(a))},null,a,arguments.length)},wrapAll:function(a){if(p.isFunction(a))return this.each(function(b){p(this).wrapAll(a.call(this,b))});if(this[0]){var b=p(a,this[0].ownerDocument).eq(0).clone(!0);this[0].parentNode&&b.insertBefore(this[0]),b.map(function(){var a=this;while(a.firstChild&&a.firstChild.nodeType===1)a=a.firstChild;return a}).append(this)}return this},wrapInner:function(a){return p.isFunction(a)?this.each(function(b){p(this).wrapInner(a.call(this,b))}):this.each(function(){var b=p(this),c=b.contents();c.length?c.wrapAll(a):b.append(a)})},wrap:function(a){var b=p.isFunction(a);return this.each(function(c){p(this).wrapAll(b?a.call(this,c):a)})},unwrap:function(){return this.parent().each(function(){p.nodeName(this,"body")||p(this).replaceWith(this.childNodes)}).end()},append:function(){return this.domManip(arguments,!0,function(a){(this.nodeType===1||this.nodeType===11)&&this.appendChild(a)})},prepend:function(){return this.domManip(arguments,!0,function(a){(this.nodeType===1||this.nodeType===11)&&this.insertBefore(a,this.firstChild)})},before:function(){if(!bh(this[0]))return this.domManip(arguments,!1,function(a){this.parentNode.insertBefore(a,this)});if(arguments.length){var a=p.clean(arguments);return this.pushStack(p.merge(a,this),"before",this.selector)}},after:function(){if(!bh(this[0]))return this.domManip(arguments,!1,function(a){this.parentNode.insertBefore(a,this.nextSibling)});if(arguments.length){var a=p.clean(arguments);return this.pushStack(p.merge(this,a),"after",this.selector)}},remove:function(a,b){var c,d=0;for(;(c=this[d])!=null;d++)if(!a||p.filter(a,[c]).length)!b&&c.nodeType===1&&(p.cleanData(c.getElementsByTagName("*")),p.cleanData([c])),c.parentNode&&c.parentNode.removeChild(c);return this},empty:function(){var a,b=0;for(;(a=this[b])!=null;b++){a.nodeType===1&&p.cleanData(a.getElementsByTagName("*"));while(a.firstChild)a.removeChild(a.firstChild)}return this},clone:function(a,b){return a=a==null?!1:a,b=b==null?a:b,this.map(function(){return p.clone(this,a,b)})},html:function(a){return p.access(this,function(a){var c=this[0]||{},d=0,e=this.length;if(a===b)return c.nodeType===1?c.innerHTML.replace(bm,""):b;if(typeof a=="string"&&!bs.test(a)&&(p.support.htmlSerialize||!bu.test(a))&&(p.support.leadingWhitespace||!bn.test(a))&&!bz[(bp.exec(a)||["",""])[1].toLowerCase()]){a=a.replace(bo,"<$1>");try{for(;d1&&typeof j=="string"&&bw.test(j))return this.each(function(){p(this).domManip(a,c,d)});if(p.isFunction(j))return this.each(function(e){var f=p(this);a[0]=j.call(this,e,c?f.html():b),f.domManip(a,c,d)});if(this[0]){e=p.buildFragment(a,this,k),g=e.fragment,f=g.firstChild,g.childNodes.length===1&&(g=f);if(f){c=c&&p.nodeName(f,"tr");for(h=e.cacheable||l-1;i0?this.clone(!0):this).get(),p(g[e])[b](d),f=f.concat(d);return this.pushStack(f,a,g.selector)}}),p.extend({clone:function(a,b,c){var d,e,f,g;p.support.html5Clone||p.isXMLDoc(a)||!bu.test("<"+a.nodeName+">")?g=a.cloneNode(!0):(bB.innerHTML=a.outerHTML,bB.removeChild(g=bB.firstChild));if((!p.support.noCloneEvent||!p.support.noCloneChecked)&&(a.nodeType===1||a.nodeType===11)&&!p.isXMLDoc(a)){bE(a,g),d=bF(a),e=bF(g);for(f=0;d[f];++f)e[f]&&bE(d[f],e[f])}if(b){bD(a,g);if(c){d=bF(a),e=bF(g);for(f=0;d[f];++f)bD(d[f],e[f])}}return d=e=null,g},clean:function(a,b,c,d){var f,g,h,i,j,k,l,m,n,o,q,r,s=b===e&&bA,t=[];if(!b||typeof b.createDocumentFragment=="undefined")b=e;for(f=0;(h=a[f])!=null;f++){typeof h=="number"&&(h+="");if(!h)continue;if(typeof h=="string")if(!br.test(h))h=b.createTextNode(h);else{s=s||bk(b),l=b.createElement("div"),s.appendChild(l),h=h.replace(bo,"<$1>"),i=(bp.exec(h)||["",""])[1].toLowerCase(),j=bz[i]||bz._default,k=j[0],l.innerHTML=j[1]+h+j[2];while(k--)l=l.lastChild;if(!p.support.tbody){m=bq.test(h),n=i==="table"&&!m?l.firstChild&&l.firstChild.childNodes:j[1]===""&&!m?l.childNodes:[];for(g=n.length-1;g>=0;--g)p.nodeName(n[g],"tbody")&&!n[g].childNodes.length&&n[g].parentNode.removeChild(n[g])}!p.support.leadingWhitespace&&bn.test(h)&&l.insertBefore(b.createTextNode(bn.exec(h)[0]),l.firstChild),h=l.childNodes,l.parentNode.removeChild(l)}h.nodeType?t.push(h):p.merge(t,h)}l&&(h=l=s=null);if(!p.support.appendChecked)for(f=0;(h=t[f])!=null;f++)p.nodeName(h,"input")?bG(h):typeof h.getElementsByTagName!="undefined"&&p.grep(h.getElementsByTagName("input"),bG);if(c){q=function(a){if(!a.type||bx.test(a.type))return d?d.push(a.parentNode?a.parentNode.removeChild(a):a):c.appendChild(a)};for(f=0;(h=t[f])!=null;f++)if(!p.nodeName(h,"script")||!q(h))c.appendChild(h),typeof h.getElementsByTagName!="undefined"&&(r=p.grep(p.merge([],h.getElementsByTagName("script")),q),t.splice.apply(t,[f+1,0].concat(r)),f+=r.length)}return t},cleanData:function(a,b){var c,d,e,f,g=0,h=p.expando,i=p.cache,j=p.support.deleteExpando,k=p.event.special;for(;(e=a[g])!=null;g++)if(b||p.acceptData(e)){d=e[h],c=d&&i[d];if(c){if(c.events)for(f in c.events)k[f]?p.event.remove(e,f):p.removeEvent(e,f,c.handle);i[d]&&(delete i[d],j?delete e[h]:e.removeAttribute?e.removeAttribute(h):e[h]=null,p.deletedIds.push(d))}}}}),function(){var a,b;p.uaMatch=function(a){a=a.toLowerCase();var b=/(chrome)[ \/]([\w.]+)/.exec(a)||/(webkit)[ \/]([\w.]+)/.exec(a)||/(opera)(?:.*version|)[ \/]([\w.]+)/.exec(a)||/(msie) ([\w.]+)/.exec(a)||a.indexOf("compatible")<0&&/(mozilla)(?:.*? rv:([\w.]+)|)/.exec(a)||[];return{browser:b[1]||"",version:b[2]||"0"}},a=p.uaMatch(g.userAgent),b={},a.browser&&(b[a.browser]=!0,b.version=a.version),b.chrome?b.webkit=!0:b.webkit&&(b.safari=!0),p.browser=b,p.sub=function(){function a(b,c){return new a.fn.init(b,c)}p.extend(!0,a,this),a.superclass=this,a.fn=a.prototype=this(),a.fn.constructor=a,a.sub=this.sub,a.fn.init=function c(c,d){return d&&d instanceof p&&!(d instanceof a)&&(d=a(d)),p.fn.init.call(this,c,d,b)},a.fn.init.prototype=a.fn;var b=a(e);return a}}();var bH,bI,bJ,bK=/alpha\([^)]*\)/i,bL=/opacity=([^)]*)/,bM=/^(top|right|bottom|left)$/,bN=/^(none|table(?!-c[ea]).+)/,bO=/^margin/,bP=new RegExp("^("+q+")(.*)$","i"),bQ=new RegExp("^("+q+")(?!px)[a-z%]+$","i"),bR=new RegExp("^([-+])=("+q+")","i"),bS={},bT={position:"absolute",visibility:"hidden",display:"block"},bU={letterSpacing:0,fontWeight:400},bV=["Top","Right","Bottom","Left"],bW=["Webkit","O","Moz","ms"],bX=p.fn.toggle;p.fn.extend({css:function(a,c){return p.access(this,function(a,c,d){return d!==b?p.style(a,c,d):p.css(a,c)},a,c,arguments.length>1)},show:function(){return b$(this,!0)},hide:function(){return b$(this)},toggle:function(a,b){var c=typeof a=="boolean";return p.isFunction(a)&&p.isFunction(b)?bX.apply(this,arguments):this.each(function(){(c?a:bZ(this))?p(this).show():p(this).hide()})}}),p.extend({cssHooks:{opacity:{get:function(a,b){if(b){var c=bH(a,"opacity");return c===""?"1":c}}}},cssNumber:{fillOpacity:!0,fontWeight:!0,lineHeight:!0,opacity:!0,orphans:!0,widows:!0,zIndex:!0,zoom:!0},cssProps:{"float":p.support.cssFloat?"cssFloat":"styleFloat"},style:function(a,c,d,e){if(!a||a.nodeType===3||a.nodeType===8||!a.style)return;var f,g,h,i=p.camelCase(c),j=a.style;c=p.cssProps[i]||(p.cssProps[i]=bY(j,i)),h=p.cssHooks[c]||p.cssHooks[i];if(d===b)return h&&"get"in h&&(f=h.get(a,!1,e))!==b?f:j[c];g=typeof d,g==="string"&&(f=bR.exec(d))&&(d=(f[1]+1)*f[2]+parseFloat(p.css(a,c)),g="number");if(d==null||g==="number"&&isNaN(d))return;g==="number"&&!p.cssNumber[i]&&(d+="px");if(!h||!("set"in h)||(d=h.set(a,d,e))!==b)try{j[c]=d}catch(k){}},css:function(a,c,d,e){var f,g,h,i=p.camelCase(c);return c=p.cssProps[i]||(p.cssProps[i]=bY(a.style,i)),h=p.cssHooks[c]||p.cssHooks[i],h&&"get"in h&&(f=h.get(a,!0,e)),f===b&&(f=bH(a,c)),f==="normal"&&c in bU&&(f=bU[c]),d||e!==b?(g=parseFloat(f),d||p.isNumeric(g)?g||0:f):f},swap:function(a,b,c){var d,e,f={};for(e in b)f[e]=a.style[e],a.style[e]=b[e];d=c.call(a);for(e in b)a.style[e]=f[e];return d}}),a.getComputedStyle?bH=function(b,c){var d,e,f,g,h=a.getComputedStyle(b,null),i=b.style;return h&&(d=h[c],d===""&&!p.contains(b.ownerDocument,b)&&(d=p.style(b,c)),bQ.test(d)&&bO.test(c)&&(e=i.width,f=i.minWidth,g=i.maxWidth,i.minWidth=i.maxWidth=i.width=d,d=h.width,i.width=e,i.minWidth=f,i.maxWidth=g)),d}:e.documentElement.currentStyle&&(bH=function(a,b){var c,d,e=a.currentStyle&&a.currentStyle[b],f=a.style;return e==null&&f&&f[b]&&(e=f[b]),bQ.test(e)&&!bM.test(b)&&(c=f.left,d=a.runtimeStyle&&a.runtimeStyle.left,d&&(a.runtimeStyle.left=a.currentStyle.left),f.left=b==="fontSize"?"1em":e,e=f.pixelLeft+"px",f.left=c,d&&(a.runtimeStyle.left=d)),e===""?"auto":e}),p.each(["height","width"],function(a,b){p.cssHooks[b]={get:function(a,c,d){if(c)return a.offsetWidth===0&&bN.test(bH(a,"display"))?p.swap(a,bT,function(){return cb(a,b,d)}):cb(a,b,d)},set:function(a,c,d){return b_(a,c,d?ca(a,b,d,p.support.boxSizing&&p.css(a,"boxSizing")==="border-box"):0)}}}),p.support.opacity||(p.cssHooks.opacity={get:function(a,b){return bL.test((b&&a.currentStyle?a.currentStyle.filter:a.style.filter)||"")?.01*parseFloat(RegExp.$1)+"":b?"1":""},set:function(a,b){var c=a.style,d=a.currentStyle,e=p.isNumeric(b)?"alpha(opacity="+b*100+")":"",f=d&&d.filter||c.filter||"";c.zoom=1;if(b>=1&&p.trim(f.replace(bK,""))===""&&c.removeAttribute){c.removeAttribute("filter");if(d&&!d.filter)return}c.filter=bK.test(f)?f.replace(bK,e):f+" "+e}}),p(function(){p.support.reliableMarginRight||(p.cssHooks.marginRight={get:function(a,b){return p.swap(a,{display:"inline-block"},function(){if(b)return bH(a,"marginRight")})}}),!p.support.pixelPosition&&p.fn.position&&p.each(["top","left"],function(a,b){p.cssHooks[b]={get:function(a,c){if(c){var d=bH(a,b);return bQ.test(d)?p(a).position()[b]+"px":d}}}})}),p.expr&&p.expr.filters&&(p.expr.filters.hidden=function(a){return a.offsetWidth===0&&a.offsetHeight===0||!p.support.reliableHiddenOffsets&&(a.style&&a.style.display||bH(a,"display"))==="none"},p.expr.filters.visible=function(a){return!p.expr.filters.hidden(a)}),p.each({margin:"",padding:"",border:"Width"},function(a,b){p.cssHooks[a+b]={expand:function(c){var d,e=typeof c=="string"?c.split(" "):[c],f={};for(d=0;d<4;d++)f[a+bV[d]+b]=e[d]||e[d-2]||e[0];return f}},bO.test(a)||(p.cssHooks[a+b].set=b_)});var cd=/%20/g,ce=/\[\]$/,cf=/\r?\n/g,cg=/^(?:color|date|datetime|datetime-local|email|hidden|month|number|password|range|search|tel|text|time|url|week)$/i,ch=/^(?:select|textarea)/i;p.fn.extend({serialize:function(){return p.param(this.serializeArray())},serializeArray:function(){return this.map(function(){return this.elements?p.makeArray(this.elements):this}).filter(function(){return this.name&&!this.disabled&&(this.checked||ch.test(this.nodeName)||cg.test(this.type))}).map(function(a,b){var c=p(this).val();return c==null?null:p.isArray(c)?p.map(c,function(a,c){return{name:b.name,value:a.replace(cf,"\r\n")}}):{name:b.name,value:c.replace(cf,"\r\n")}}).get()}}),p.param=function(a,c){var d,e=[],f=function(a,b){b=p.isFunction(b)?b():b==null?"":b,e[e.length]=encodeURIComponent(a)+"="+encodeURIComponent(b)};c===b&&(c=p.ajaxSettings&&p.ajaxSettings.traditional);if(p.isArray(a)||a.jquery&&!p.isPlainObject(a))p.each(a,function(){f(this.name,this.value)});else for(d in a)ci(d,a[d],c,f);return e.join("&").replace(cd,"+")};var cj,ck,cl=/#.*$/,cm=/^(.*?):[ \t]*([^\r\n]*)\r?$/mg,cn=/^(?:about|app|app\-storage|.+\-extension|file|res|widget):$/,co=/^(?:GET|HEAD)$/,cp=/^\/\//,cq=/\?/,cr=/)<[^<]*)*<\/script>/gi,cs=/([?&])_=[^&]*/,ct=/^([\w\+\.\-]+:)(?:\/\/([^\/?#:]*)(?::(\d+)|)|)/,cu=p.fn.load,cv={},cw={},cx=["*/"]+["*"];try{ck=f.href}catch(cy){ck=e.createElement("a"),ck.href="",ck=ck.href}cj=ct.exec(ck.toLowerCase())||[],p.fn.load=function(a,c,d){if(typeof a!="string"&&cu)return cu.apply(this,arguments);if(!this.length)return this;var e,f,g,h=this,i=a.indexOf(" ");return i>=0&&(e=a.slice(i,a.length),a=a.slice(0,i)),p.isFunction(c)?(d=c,c=b):c&&typeof c=="object"&&(f="POST"),p.ajax({url:a,type:f,dataType:"html",data:c,complete:function(a,b){d&&h.each(d,g||[a.responseText,b,a])}}).done(function(a){g=arguments,h.html(e?p("
    ").append(a.replace(cr,"")).find(e):a)}),this},p.each("ajaxStart ajaxStop ajaxComplete ajaxError ajaxSuccess ajaxSend".split(" "),function(a,b){p.fn[b]=function(a){return this.on(b,a)}}),p.each(["get","post"],function(a,c){p[c]=function(a,d,e,f){return p.isFunction(d)&&(f=f||e,e=d,d=b),p.ajax({type:c,url:a,data:d,success:e,dataType:f})}}),p.extend({getScript:function(a,c){return p.get(a,b,c,"script")},getJSON:function(a,b,c){return p.get(a,b,c,"json")},ajaxSetup:function(a,b){return b?cB(a,p.ajaxSettings):(b=a,a=p.ajaxSettings),cB(a,b),a},ajaxSettings:{url:ck,isLocal:cn.test(cj[1]),global:!0,type:"GET",contentType:"application/x-www-form-urlencoded; charset=UTF-8",processData:!0,async:!0,accepts:{xml:"application/xml, text/xml",html:"text/html",text:"text/plain",json:"application/json, text/javascript","*":cx},contents:{xml:/xml/,html:/html/,json:/json/},responseFields:{xml:"responseXML",text:"responseText"},converters:{"* text":a.String,"text html":!0,"text json":p.parseJSON,"text xml":p.parseXML},flatOptions:{context:!0,url:!0}},ajaxPrefilter:cz(cv),ajaxTransport:cz(cw),ajax:function(a,c){function y(a,c,f,i){var k,s,t,u,w,y=c;if(v===2)return;v=2,h&&clearTimeout(h),g=b,e=i||"",x.readyState=a>0?4:0,f&&(u=cC(l,x,f));if(a>=200&&a<300||a===304)l.ifModified&&(w=x.getResponseHeader("Last-Modified"),w&&(p.lastModified[d]=w),w=x.getResponseHeader("Etag"),w&&(p.etag[d]=w)),a===304?(y="notmodified",k=!0):(k=cD(l,u),y=k.state,s=k.data,t=k.error,k=!t);else{t=y;if(!y||a)y="error",a<0&&(a=0)}x.status=a,x.statusText=(c||y)+"",k?o.resolveWith(m,[s,y,x]):o.rejectWith(m,[x,y,t]),x.statusCode(r),r=b,j&&n.trigger("ajax"+(k?"Success":"Error"),[x,l,k?s:t]),q.fireWith(m,[x,y]),j&&(n.trigger("ajaxComplete",[x,l]),--p.active||p.event.trigger("ajaxStop"))}typeof a=="object"&&(c=a,a=b),c=c||{};var d,e,f,g,h,i,j,k,l=p.ajaxSetup({},c),m=l.context||l,n=m!==l&&(m.nodeType||m instanceof p)?p(m):p.event,o=p.Deferred(),q=p.Callbacks("once memory"),r=l.statusCode||{},t={},u={},v=0,w="canceled",x={readyState:0,setRequestHeader:function(a,b){if(!v){var c=a.toLowerCase();a=u[c]=u[c]||a,t[a]=b}return this},getAllResponseHeaders:function(){return v===2?e:null},getResponseHeader:function(a){var c;if(v===2){if(!f){f={};while(c=cm.exec(e))f[c[1].toLowerCase()]=c[2]}c=f[a.toLowerCase()]}return c===b?null:c},overrideMimeType:function(a){return v||(l.mimeType=a),this},abort:function(a){return a=a||w,g&&g.abort(a),y(0,a),this}};o.promise(x),x.success=x.done,x.error=x.fail,x.complete=q.add,x.statusCode=function(a){if(a){var b;if(v<2)for(b in a)r[b]=[r[b],a[b]];else b=a[x.status],x.always(b)}return this},l.url=((a||l.url)+"").replace(cl,"").replace(cp,cj[1]+"//"),l.dataTypes=p.trim(l.dataType||"*").toLowerCase().split(s),l.crossDomain==null&&(i=ct.exec(l.url.toLowerCase())||!1,l.crossDomain=i&&i.join(":")+(i[3]?"":i[1]==="http:"?80:443)!==cj.join(":")+(cj[3]?"":cj[1]==="http:"?80:443)),l.data&&l.processData&&typeof l.data!="string"&&(l.data=p.param(l.data,l.traditional)),cA(cv,l,c,x);if(v===2)return x;j=l.global,l.type=l.type.toUpperCase(),l.hasContent=!co.test(l.type),j&&p.active++===0&&p.event.trigger("ajaxStart");if(!l.hasContent){l.data&&(l.url+=(cq.test(l.url)?"&":"?")+l.data,delete l.data),d=l.url;if(l.cache===!1){var z=p.now(),A=l.url.replace(cs,"$1_="+z);l.url=A+(A===l.url?(cq.test(l.url)?"&":"?")+"_="+z:"")}}(l.data&&l.hasContent&&l.contentType!==!1||c.contentType)&&x.setRequestHeader("Content-Type",l.contentType),l.ifModified&&(d=d||l.url,p.lastModified[d]&&x.setRequestHeader("If-Modified-Since",p.lastModified[d]),p.etag[d]&&x.setRequestHeader("If-None-Match",p.etag[d])),x.setRequestHeader("Accept",l.dataTypes[0]&&l.accepts[l.dataTypes[0]]?l.accepts[l.dataTypes[0]]+(l.dataTypes[0]!=="*"?", "+cx+"; q=0.01":""):l.accepts["*"]);for(k in l.headers)x.setRequestHeader(k,l.headers[k]);if(!l.beforeSend||l.beforeSend.call(m,x,l)!==!1&&v!==2){w="abort";for(k in{success:1,error:1,complete:1})x[k](l[k]);g=cA(cw,l,c,x);if(!g)y(-1,"No Transport");else{x.readyState=1,j&&n.trigger("ajaxSend",[x,l]),l.async&&l.timeout>0&&(h=setTimeout(function(){x.abort("timeout")},l.timeout));try{v=1,g.send(t,y)}catch(B){if(v<2)y(-1,B);else throw B}}return x}return x.abort()},active:0,lastModified:{},etag:{}});var cE=[],cF=/\?/,cG=/(=)\?(?=&|$)|\?\?/,cH=p.now();p.ajaxSetup({jsonp:"callback",jsonpCallback:function(){var a=cE.pop()||p.expando+"_"+cH++;return this[a]=!0,a}}),p.ajaxPrefilter("json jsonp",function(c,d,e){var f,g,h,i=c.data,j=c.url,k=c.jsonp!==!1,l=k&&cG.test(j),m=k&&!l&&typeof i=="string"&&!(c.contentType||"").indexOf("application/x-www-form-urlencoded")&&cG.test(i);if(c.dataTypes[0]==="jsonp"||l||m)return f=c.jsonpCallback=p.isFunction(c.jsonpCallback)?c.jsonpCallback():c.jsonpCallback,g=a[f],l?c.url=j.replace(cG,"$1"+f):m?c.data=i.replace(cG,"$1"+f):k&&(c.url+=(cF.test(j)?"&":"?")+c.jsonp+"="+f),c.converters["script json"]=function(){return h||p.error(f+" was not called"),h[0]},c.dataTypes[0]="json",a[f]=function(){h=arguments},e.always(function(){a[f]=g,c[f]&&(c.jsonpCallback=d.jsonpCallback,cE.push(f)),h&&p.isFunction(g)&&g(h[0]),h=g=b}),"script"}),p.ajaxSetup({accepts:{script:"text/javascript, application/javascript, application/ecmascript, application/x-ecmascript"},contents:{script:/javascript|ecmascript/},converters:{"text script":function(a){return p.globalEval(a),a}}}),p.ajaxPrefilter("script",function(a){a.cache===b&&(a.cache=!1),a.crossDomain&&(a.type="GET",a.global=!1)}),p.ajaxTransport("script",function(a){if(a.crossDomain){var c,d=e.head||e.getElementsByTagName("head")[0]||e.documentElement;return{send:function(f,g){c=e.createElement("script"),c.async="async",a.scriptCharset&&(c.charset=a.scriptCharset),c.src=a.url,c.onload=c.onreadystatechange=function(a,e){if(e||!c.readyState||/loaded|complete/.test(c.readyState))c.onload=c.onreadystatechange=null,d&&c.parentNode&&d.removeChild(c),c=b,e||g(200,"success")},d.insertBefore(c,d.firstChild)},abort:function(){c&&c.onload(0,1)}}}});var cI,cJ=a.ActiveXObject?function(){for(var a in cI)cI[a](0,1)}:!1,cK=0;p.ajaxSettings.xhr=a.ActiveXObject?function(){return!this.isLocal&&cL()||cM()}:cL,function(a){p.extend(p.support,{ajax:!!a,cors:!!a&&"withCredentials"in a})}(p.ajaxSettings.xhr()),p.support.ajax&&p.ajaxTransport(function(c){if(!c.crossDomain||p.support.cors){var d;return{send:function(e,f){var g,h,i=c.xhr();c.username?i.open(c.type,c.url,c.async,c.username,c.password):i.open(c.type,c.url,c.async);if(c.xhrFields)for(h in c.xhrFields)i[h]=c.xhrFields[h];c.mimeType&&i.overrideMimeType&&i.overrideMimeType(c.mimeType),!c.crossDomain&&!e["X-Requested-With"]&&(e["X-Requested-With"]="XMLHttpRequest");try{for(h in e)i.setRequestHeader(h,e[h])}catch(j){}i.send(c.hasContent&&c.data||null),d=function(a,e){var h,j,k,l,m;try{if(d&&(e||i.readyState===4)){d=b,g&&(i.onreadystatechange=p.noop,cJ&&delete cI[g]);if(e)i.readyState!==4&&i.abort();else{h=i.status,k=i.getAllResponseHeaders(),l={},m=i.responseXML,m&&m.documentElement&&(l.xml=m);try{l.text=i.responseText}catch(a){}try{j=i.statusText}catch(n){j=""}!h&&c.isLocal&&!c.crossDomain?h=l.text?200:404:h===1223&&(h=204)}}}catch(o){e||f(-1,o)}l&&f(h,j,l,k)},c.async?i.readyState===4?setTimeout(d,0):(g=++cK,cJ&&(cI||(cI={},p(a).unload(cJ)),cI[g]=d),i.onreadystatechange=d):d()},abort:function(){d&&d(0,1)}}}});var cN,cO,cP=/^(?:toggle|show|hide)$/,cQ=new RegExp("^(?:([-+])=|)("+q+")([a-z%]*)$","i"),cR=/queueHooks$/,cS=[cY],cT={"*":[function(a,b){var c,d,e=this.createTween(a,b),f=cQ.exec(b),g=e.cur(),h=+g||0,i=1,j=20;if(f){c=+f[2],d=f[3]||(p.cssNumber[a]?"":"px");if(d!=="px"&&h){h=p.css(e.elem,a,!0)||c||1;do i=i||".5",h=h/i,p.style(e.elem,a,h+d);while(i!==(i=e.cur()/g)&&i!==1&&--j)}e.unit=d,e.start=h,e.end=f[1]?h+(f[1]+1)*c:c}return e}]};p.Animation=p.extend(cW,{tweener:function(a,b){p.isFunction(a)?(b=a,a=["*"]):a=a.split(" ");var c,d=0,e=a.length;for(;d-1,j={},k={},l,m;i?(k=e.position(),l=k.top,m=k.left):(l=parseFloat(g)||0,m=parseFloat(h)||0),p.isFunction(b)&&(b=b.call(a,c,f)),b.top!=null&&(j.top=b.top-f.top+l),b.left!=null&&(j.left=b.left-f.left+m),"using"in b?b.using.call(a,j):e.css(j)}},p.fn.extend({position:function(){if(!this[0])return;var a=this[0],b=this.offsetParent(),c=this.offset(),d=c_.test(b[0].nodeName)?{top:0,left:0}:b.offset();return c.top-=parseFloat(p.css(a,"marginTop"))||0,c.left-=parseFloat(p.css(a,"marginLeft"))||0,d.top+=parseFloat(p.css(b[0],"borderTopWidth"))||0,d.left+=parseFloat(p.css(b[0],"borderLeftWidth"))||0,{top:c.top-d.top,left:c.left-d.left}},offsetParent:function(){return this.map(function(){var a=this.offsetParent||e.body;while(a&&!c_.test(a.nodeName)&&p.css(a,"position")==="static")a=a.offsetParent;return a||e.body})}}),p.each({scrollLeft:"pageXOffset",scrollTop:"pageYOffset"},function(a,c){var d=/Y/.test(c);p.fn[a]=function(e){return p.access(this,function(a,e,f){var g=da(a);if(f===b)return g?c in g?g[c]:g.document.documentElement[e]:a[e];g?g.scrollTo(d?p(g).scrollLeft():f,d?f:p(g).scrollTop()):a[e]=f},a,e,arguments.length,null)}}),p.each({Height:"height",Width:"width"},function(a,c){p.each({padding:"inner"+a,content:c,"":"outer"+a},function(d,e){p.fn[e]=function(e,f){var g=arguments.length&&(d||typeof e!="boolean"),h=d||(e===!0||f===!0?"margin":"border");return p.access(this,function(c,d,e){var f;return p.isWindow(c)?c.document.documentElement["client"+a]:c.nodeType===9?(f=c.documentElement,Math.max(c.body["scroll"+a],f["scroll"+a],c.body["offset"+a],f["offset"+a],f["client"+a])):e===b?p.css(c,d,e,h):p.style(c,d,e,h)},c,g?e:b,g,null)}})}),a.jQuery=a.$=p,typeof define=="function"&&define.amd&&define.amd.jQuery&&define("jquery",[],function(){return p})})(window); \ No newline at end of file diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/jquery.layout.js b/src/compiler/scala/tools/nsc/doc/html/resource/lib/jquery.layout.js deleted file mode 100644 index 4dd48675b7..0000000000 --- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/jquery.layout.js +++ /dev/null @@ -1,5486 +0,0 @@ -/** - * @preserve jquery.layout 1.3.0 - Release Candidate 30.62 - * $Date: 2012-08-04 08:00:00 (Thu, 23 Aug 2012) $ - * $Rev: 303006 $ - * - * Copyright (c) 2012 - * Fabrizio Balliano (http://www.fabrizioballiano.net) - * Kevin Dalman (http://allpro.net) - * - * Dual licensed under the GPL (http://www.gnu.org/licenses/gpl.html) - * and MIT (http://www.opensource.org/licenses/mit-license.php) licenses. - * - * Changelog: http://layout.jquery-dev.net/changelog.cfm#1.3.0.rc30.62 - * NOTE: This is a short-term release to patch a couple of bugs. - * These bugs are listed as officially fixed in RC30.7, which will be released shortly. - * - * Docs: http://layout.jquery-dev.net/documentation.html - * Tips: http://layout.jquery-dev.net/tips.html - * Help: http://groups.google.com/group/jquery-ui-layout - */ - -/* JavaDoc Info: http://code.google.com/closure/compiler/docs/js-for-compiler.html - * {!Object} non-nullable type (never NULL) - * {?string} nullable type (sometimes NULL) - default for {Object} - * {number=} optional parameter - * {*} ALL types - */ - -// NOTE: For best readability, view with a fixed-width font and tabs equal to 4-chars - -;(function ($) { - -// alias Math methods - used a lot! -var min = Math.min -, max = Math.max -, round = Math.floor - -, isStr = function (v) { return $.type(v) === "string"; } - -, runPluginCallbacks = function (Instance, a_fn) { - if ($.isArray(a_fn)) - for (var i=0, c=a_fn.length; i
    ').appendTo("body"); - var d = { width: $c.width() - $c[0].clientWidth, height: $c.height() - $c[0].clientHeight }; - $c.remove(); - window.scrollbarWidth = d.width; - window.scrollbarHeight = d.height; - return dim.match(/^(width|height)$/) ? d[dim] : d; - } - - - /** - * Returns hash container 'display' and 'visibility' - * - * @see $.swap() - swaps CSS, runs callback, resets CSS - */ -, showInvisibly: function ($E, force) { - if ($E && $E.length && (force || $E.css('display') === "none")) { // only if not *already hidden* - var s = $E[0].style - // save ONLY the 'style' props because that is what we must restore - , CSS = { display: s.display || '', visibility: s.visibility || '' }; - // show element 'invisibly' so can be measured - $E.css({ display: "block", visibility: "hidden" }); - return CSS; - } - return {}; - } - - /** - * Returns data for setting size of an element (container or a pane). - * - * @see _create(), onWindowResize() for container, plus others for pane - * @return JSON Returns a hash of all dimensions: top, bottom, left, right, outerWidth, innerHeight, etc - */ -, getElementDimensions: function ($E) { - var - d = {} // dimensions hash - , x = d.css = {} // CSS hash - , i = {} // TEMP insets - , b, p // TEMP border, padding - , N = $.layout.cssNum - , off = $E.offset() - ; - d.offsetLeft = off.left; - d.offsetTop = off.top; - - $.each("Left,Right,Top,Bottom".split(","), function (idx, e) { // e = edge - b = x["border" + e] = $.layout.borderWidth($E, e); - p = x["padding"+ e] = $.layout.cssNum($E, "padding"+e); - i[e] = b + p; // total offset of content from outer side - d["inset"+ e] = p; // eg: insetLeft = paddingLeft - }); - - d.offsetWidth = $E.innerWidth(); // offsetWidth is used in calc when doing manual resize - d.offsetHeight = $E.innerHeight(); // ditto - d.outerWidth = $E.outerWidth(); - d.outerHeight = $E.outerHeight(); - d.innerWidth = max(0, d.outerWidth - i.Left - i.Right); - d.innerHeight = max(0, d.outerHeight - i.Top - i.Bottom); - - x.width = $E.width(); - x.height = $E.height(); - x.top = N($E,"top",true); - x.bottom = N($E,"bottom",true); - x.left = N($E,"left",true); - x.right = N($E,"right",true); - - //d.visible = $E.is(":visible");// && x.width > 0 && x.height > 0; - - return d; - } - -, getElementCSS: function ($E, list) { - var - CSS = {} - , style = $E[0].style - , props = list.split(",") - , sides = "Top,Bottom,Left,Right".split(",") - , attrs = "Color,Style,Width".split(",") - , p, s, a, i, j, k - ; - for (i=0; i < props.length; i++) { - p = props[i]; - if (p.match(/(border|padding|margin)$/)) - for (j=0; j < 4; j++) { - s = sides[j]; - if (p === "border") - for (k=0; k < 3; k++) { - a = attrs[k]; - CSS[p+s+a] = style[p+s+a]; - } - else - CSS[p+s] = style[p+s]; - } - else - CSS[p] = style[p]; - }; - return CSS - } - - /** - * Return the innerWidth for the current browser/doctype - * - * @see initPanes(), sizeMidPanes(), initHandles(), sizeHandles() - * @param {Array.} $E Must pass a jQuery object - first element is processed - * @param {number=} outerWidth (optional) Can pass a width, allowing calculations BEFORE element is resized - * @return {number} Returns the innerWidth of the elem by subtracting padding and borders - */ -, cssWidth: function ($E, outerWidth) { - // a 'calculated' outerHeight can be passed so borders and/or padding are removed if needed - if (outerWidth <= 0) return 0; - - if (!$.layout.browser.boxModel) return outerWidth; - - // strip border and padding from outerWidth to get CSS Width - var b = $.layout.borderWidth - , n = $.layout.cssNum - , W = outerWidth - - b($E, "Left") - - b($E, "Right") - - n($E, "paddingLeft") - - n($E, "paddingRight"); - - return max(0,W); - } - - /** - * Return the innerHeight for the current browser/doctype - * - * @see initPanes(), sizeMidPanes(), initHandles(), sizeHandles() - * @param {Array.} $E Must pass a jQuery object - first element is processed - * @param {number=} outerHeight (optional) Can pass a width, allowing calculations BEFORE element is resized - * @return {number} Returns the innerHeight of the elem by subtracting padding and borders - */ -, cssHeight: function ($E, outerHeight) { - // a 'calculated' outerHeight can be passed so borders and/or padding are removed if needed - if (outerHeight <= 0) return 0; - - if (!$.layout.browser.boxModel) return outerHeight; - - // strip border and padding from outerHeight to get CSS Height - var b = $.layout.borderWidth - , n = $.layout.cssNum - , H = outerHeight - - b($E, "Top") - - b($E, "Bottom") - - n($E, "paddingTop") - - n($E, "paddingBottom"); - - return max(0,H); - } - - /** - * Returns the 'current CSS numeric value' for a CSS property - 0 if property does not exist - * - * @see Called by many methods - * @param {Array.} $E Must pass a jQuery object - first element is processed - * @param {string} prop The name of the CSS property, eg: top, width, etc. - * @param {boolean=} [allowAuto=false] true = return 'auto' if that is value; false = return 0 - * @return {(string|number)} Usually used to get an integer value for position (top, left) or size (height, width) - */ -, cssNum: function ($E, prop, allowAuto) { - if (!$E.jquery) $E = $($E); - var CSS = $.layout.showInvisibly($E) - , p = $.css($E[0], prop, true) - , v = allowAuto && p=="auto" ? p : (parseInt(p, 10) || 0); - $E.css( CSS ); // RESET - return v; - } - -, borderWidth: function (el, side) { - if (el.jquery) el = el[0]; - var b = "border"+ side.substr(0,1).toUpperCase() + side.substr(1); // left => Left - return $.css(el, b+"Style", true) === "none" ? 0 : (parseInt($.css(el, b+"Width", true), 10) || 0); - } - - /** - * Mouse-tracking utility - FUTURE REFERENCE - * - * init: if (!window.mouse) { - * window.mouse = { x: 0, y: 0 }; - * $(document).mousemove( $.layout.trackMouse ); - * } - * - * @param {Object} evt - * -, trackMouse: function (evt) { - window.mouse = { x: evt.clientX, y: evt.clientY }; - } - */ - - /** - * SUBROUTINE for preventPrematureSlideClose option - * - * @param {Object} evt - * @param {Object=} el - */ -, isMouseOverElem: function (evt, el) { - var - $E = $(el || this) - , d = $E.offset() - , T = d.top - , L = d.left - , R = L + $E.outerWidth() - , B = T + $E.outerHeight() - , x = evt.pageX // evt.clientX ? - , y = evt.pageY // evt.clientY ? - ; - // if X & Y are < 0, probably means is over an open SELECT - return ($.layout.browser.msie && x < 0 && y < 0) || ((x >= L && x <= R) && (y >= T && y <= B)); - } - - /** - * Message/Logging Utility - * - * @example $.layout.msg("My message"); // log text - * @example $.layout.msg("My message", true); // alert text - * @example $.layout.msg({ foo: "bar" }, "Title"); // log hash-data, with custom title - * @example $.layout.msg({ foo: "bar" }, true, "Title", { sort: false }); -OR- - * @example $.layout.msg({ foo: "bar" }, "Title", { sort: false, display: true }); // alert hash-data - * - * @param {(Object|string)} info String message OR Hash/Array - * @param {(Boolean|string|Object)=} [popup=false] True means alert-box - can be skipped - * @param {(Object|string)=} [debugTitle=""] Title for Hash data - can be skipped - * @param {Object=} [debugOpts] Extra options for debug output - */ -, msg: function (info, popup, debugTitle, debugOpts) { - if ($.isPlainObject(info) && window.debugData) { - if (typeof popup === "string") { - debugOpts = debugTitle; - debugTitle = popup; - } - else if (typeof debugTitle === "object") { - debugOpts = debugTitle; - debugTitle = null; - } - var t = debugTitle || "log( )" - , o = $.extend({ sort: false, returnHTML: false, display: false }, debugOpts); - if (popup === true || o.display) - debugData( info, t, o ); - else if (window.console) - console.log(debugData( info, t, o )); - } - else if (popup) - alert(info); - else if (window.console) - console.log(info); - else { - var id = "#layoutLogger" - , $l = $(id); - if (!$l.length) - $l = createLog(); - $l.children("ul").append('
  13. '+ info.replace(/\/g,">") +'
  14. '); - } - - function createLog () { - var pos = $.support.fixedPosition ? 'fixed' : 'absolute' - , $e = $('
    ' - + '
    ' - + 'XLayout console.log
    ' - + '
      ' - + '
      ' - ).appendTo("body"); - $e.css('left', $(window).width() - $e.outerWidth() - 5) - if ($.ui.draggable) $e.draggable({ handle: ':first-child' }); - return $e; - }; - } - -}; - -// DEFAULT OPTIONS -$.layout.defaults = { -/* - * LAYOUT & LAYOUT-CONTAINER OPTIONS - * - none of these options are applicable to individual panes - */ - name: "" // Not required, but useful for buttons and used for the state-cookie -, containerSelector: "" // ONLY used when specifying a childOptions - to find container-element that is NOT directly-nested -, containerClass: "ui-layout-container" // layout-container element -, scrollToBookmarkOnLoad: true // after creating a layout, scroll to bookmark in URL (.../page.htm#myBookmark) -, resizeWithWindow: true // bind thisLayout.resizeAll() to the window.resize event -, resizeWithWindowDelay: 200 // delay calling resizeAll because makes window resizing very jerky -, resizeWithWindowMaxDelay: 0 // 0 = none - force resize every XX ms while window is being resized -, onresizeall_start: null // CALLBACK when resizeAll() STARTS - NOT pane-specific -, onresizeall_end: null // CALLBACK when resizeAll() ENDS - NOT pane-specific -, onload_start: null // CALLBACK when Layout inits - after options initialized, but before elements -, onload_end: null // CALLBACK when Layout inits - after EVERYTHING has been initialized -, onunload_start: null // CALLBACK when Layout is destroyed OR onWindowUnload -, onunload_end: null // CALLBACK when Layout is destroyed OR onWindowUnload -, initPanes: true // false = DO NOT initialize the panes onLoad - will init later -, showErrorMessages: true // enables fatal error messages to warn developers of common errors -, showDebugMessages: false // display console-and-alert debug msgs - IF this Layout version _has_ debugging code! -// Changing this zIndex value will cause other zIndex values to automatically change -, zIndex: null // the PANE zIndex - resizers and masks will be +1 -// DO NOT CHANGE the zIndex values below unless you clearly understand their relationships -, zIndexes: { // set _default_ z-index values here... - pane_normal: 0 // normal z-index for panes - , content_mask: 1 // applied to overlays used to mask content INSIDE panes during resizing - , resizer_normal: 2 // normal z-index for resizer-bars - , pane_sliding: 100 // applied to *BOTH* the pane and its resizer when a pane is 'slid open' - , pane_animate: 1000 // applied to the pane when being animated - not applied to the resizer - , resizer_drag: 10000 // applied to the CLONED resizer-bar when being 'dragged' - } -, errors: { - pane: "pane" // description of "layout pane element" - used only in error messages - , selector: "selector" // description of "jQuery-selector" - used only in error messages - , addButtonError: "Error Adding Button \n\nInvalid " - , containerMissing: "UI Layout Initialization Error\n\nThe specified layout-container does not exist." - , centerPaneMissing: "UI Layout Initialization Error\n\nThe center-pane element does not exist.\n\nThe center-pane is a required element." - , noContainerHeight: "UI Layout Initialization Warning\n\nThe layout-container \"CONTAINER\" has no height.\n\nTherefore the layout is 0-height and hence 'invisible'!" - , callbackError: "UI Layout Callback Error\n\nThe EVENT callback is not a valid function." - } -/* - * PANE DEFAULT SETTINGS - * - settings under the 'panes' key become the default settings for *all panes* - * - ALL pane-options can also be set specifically for each panes, which will override these 'default values' - */ -, panes: { // default options for 'all panes' - will be overridden by 'per-pane settings' - applyDemoStyles: false // NOTE: renamed from applyDefaultStyles for clarity - , closable: true // pane can open & close - , resizable: true // when open, pane can be resized - , slidable: true // when closed, pane can 'slide open' over other panes - closes on mouse-out - , initClosed: false // true = init pane as 'closed' - , initHidden: false // true = init pane as 'hidden' - no resizer-bar/spacing - // SELECTORS - //, paneSelector: "" // MUST be pane-specific - jQuery selector for pane - , contentSelector: ".ui-layout-content" // INNER div/element to auto-size so only it scrolls, not the entire pane! - , contentIgnoreSelector: ".ui-layout-ignore" // element(s) to 'ignore' when measuring 'content' - , findNestedContent: false // true = $P.find(contentSelector), false = $P.children(contentSelector) - // GENERIC ROOT-CLASSES - for auto-generated classNames - , paneClass: "ui-layout-pane" // Layout Pane - , resizerClass: "ui-layout-resizer" // Resizer Bar - , togglerClass: "ui-layout-toggler" // Toggler Button - , buttonClass: "ui-layout-button" // CUSTOM Buttons - eg: '[ui-layout-button]-toggle/-open/-close/-pin' - // ELEMENT SIZE & SPACING - //, size: 100 // MUST be pane-specific -initial size of pane - , minSize: 0 // when manually resizing a pane - , maxSize: 0 // ditto, 0 = no limit - , spacing_open: 6 // space between pane and adjacent panes - when pane is 'open' - , spacing_closed: 6 // ditto - when pane is 'closed' - , togglerLength_open: 50 // Length = WIDTH of toggler button on north/south sides - HEIGHT on east/west sides - , togglerLength_closed: 50 // 100% OR -1 means 'full height/width of resizer bar' - 0 means 'hidden' - , togglerAlign_open: "center" // top/left, bottom/right, center, OR... - , togglerAlign_closed: "center" // 1 => nn = offset from top/left, -1 => -nn == offset from bottom/right - , togglerContent_open: "" // text or HTML to put INSIDE the toggler - , togglerContent_closed: "" // ditto - // RESIZING OPTIONS - , resizerDblClickToggle: true // - , autoResize: true // IF size is 'auto' or a percentage, then recalc 'pixel size' whenever the layout resizes - , autoReopen: true // IF a pane was auto-closed due to noRoom, reopen it when there is room? False = leave it closed - , resizerDragOpacity: 1 // option for ui.draggable - //, resizerCursor: "" // MUST be pane-specific - cursor when over resizer-bar - , maskContents: false // true = add DIV-mask over-or-inside this pane so can 'drag' over IFRAMES - , maskObjects: false // true = add IFRAME-mask over-or-inside this pane to cover objects/applets - content-mask will overlay this mask - , maskZindex: null // will override zIndexes.content_mask if specified - not applicable to iframe-panes - , resizingGrid: false // grid size that the resizers will snap-to during resizing, eg: [20,20] - , livePaneResizing: false // true = LIVE Resizing as resizer is dragged - , liveContentResizing: false // true = re-measure header/footer heights as resizer is dragged - , liveResizingTolerance: 1 // how many px change before pane resizes, to control performance - // SLIDING OPTIONS - , sliderCursor: "pointer" // cursor when resizer-bar will trigger 'sliding' - , slideTrigger_open: "click" // click, dblclick, mouseenter - , slideTrigger_close: "mouseleave"// click, mouseleave - , slideDelay_open: 300 // applies only for mouseenter event - 0 = instant open - , slideDelay_close: 300 // applies only for mouseleave event (300ms is the minimum!) - , hideTogglerOnSlide: false // when pane is slid-open, should the toggler show? - , preventQuickSlideClose: $.layout.browser.webkit // Chrome triggers slideClosed as it is opening - , preventPrematureSlideClose: false // handle incorrect mouseleave trigger, like when over a SELECT-list in IE - // PANE-SPECIFIC TIPS & MESSAGES - , tips: { - Open: "Open" // eg: "Open Pane" - , Close: "Close" - , Resize: "Resize" - , Slide: "Slide Open" - , Pin: "Pin" - , Unpin: "Un-Pin" - , noRoomToOpen: "Not enough room to show this panel." // alert if user tries to open a pane that cannot - , minSizeWarning: "Panel has reached its minimum size" // displays in browser statusbar - , maxSizeWarning: "Panel has reached its maximum size" // ditto - } - // HOT-KEYS & MISC - , showOverflowOnHover: false // will bind allowOverflow() utility to pane.onMouseOver - , enableCursorHotkey: true // enabled 'cursor' hotkeys - //, customHotkey: "" // MUST be pane-specific - EITHER a charCode OR a character - , customHotkeyModifier: "SHIFT" // either 'SHIFT', 'CTRL' or 'CTRL+SHIFT' - NOT 'ALT' - // PANE ANIMATION - // NOTE: fxSss_open, fxSss_close & fxSss_size options (eg: fxName_open) are auto-generated if not passed - , fxName: "slide" // ('none' or blank), slide, drop, scale -- only relevant to 'open' & 'close', NOT 'size' - , fxSpeed: null // slow, normal, fast, 200, nnn - if passed, will OVERRIDE fxSettings.duration - , fxSettings: {} // can be passed, eg: { easing: "easeOutBounce", duration: 1500 } - , fxOpacityFix: true // tries to fix opacity in IE to restore anti-aliasing after animation - , animatePaneSizing: false // true = animate resizing after dragging resizer-bar OR sizePane() is called - /* NOTE: Action-specific FX options are auto-generated from the options above if not specifically set: - fxName_open: "slide" // 'Open' pane animation - fnName_close: "slide" // 'Close' pane animation - fxName_size: "slide" // 'Size' pane animation - when animatePaneSizing = true - fxSpeed_open: null - fxSpeed_close: null - fxSpeed_size: null - fxSettings_open: {} - fxSettings_close: {} - fxSettings_size: {} - */ - // CHILD/NESTED LAYOUTS - , childOptions: null // Layout-options for nested/child layout - even {} is valid as options - , initChildLayout: true // true = child layout will be created as soon as _this_ layout completes initialization - , destroyChildLayout: true // true = destroy child-layout if this pane is destroyed - , resizeChildLayout: true // true = trigger child-layout.resizeAll() when this pane is resized - // EVENT TRIGGERING - , triggerEventsOnLoad: false // true = trigger onopen OR onclose callbacks when layout initializes - , triggerEventsDuringLiveResize: true // true = trigger onresize callback REPEATEDLY if livePaneResizing==true - // PANE CALLBACKS - , onshow_start: null // CALLBACK when pane STARTS to Show - BEFORE onopen/onhide_start - , onshow_end: null // CALLBACK when pane ENDS being Shown - AFTER onopen/onhide_end - , onhide_start: null // CALLBACK when pane STARTS to Close - BEFORE onclose_start - , onhide_end: null // CALLBACK when pane ENDS being Closed - AFTER onclose_end - , onopen_start: null // CALLBACK when pane STARTS to Open - , onopen_end: null // CALLBACK when pane ENDS being Opened - , onclose_start: null // CALLBACK when pane STARTS to Close - , onclose_end: null // CALLBACK when pane ENDS being Closed - , onresize_start: null // CALLBACK when pane STARTS being Resized ***FOR ANY REASON*** - , onresize_end: null // CALLBACK when pane ENDS being Resized ***FOR ANY REASON*** - , onsizecontent_start: null // CALLBACK when sizing of content-element STARTS - , onsizecontent_end: null // CALLBACK when sizing of content-element ENDS - , onswap_start: null // CALLBACK when pane STARTS to Swap - , onswap_end: null // CALLBACK when pane ENDS being Swapped - , ondrag_start: null // CALLBACK when pane STARTS being ***MANUALLY*** Resized - , ondrag_end: null // CALLBACK when pane ENDS being ***MANUALLY*** Resized - } -/* - * PANE-SPECIFIC SETTINGS - * - options listed below MUST be specified per-pane - they CANNOT be set under 'panes' - * - all options under the 'panes' key can also be set specifically for any pane - * - most options under the 'panes' key apply only to 'border-panes' - NOT the the center-pane - */ -, north: { - paneSelector: ".ui-layout-north" - , size: "auto" // eg: "auto", "30%", .30, 200 - , resizerCursor: "n-resize" // custom = url(myCursor.cur) - , customHotkey: "" // EITHER a charCode (43) OR a character ("o") - } -, south: { - paneSelector: ".ui-layout-south" - , size: "auto" - , resizerCursor: "s-resize" - , customHotkey: "" - } -, east: { - paneSelector: ".ui-layout-east" - , size: 200 - , resizerCursor: "e-resize" - , customHotkey: "" - } -, west: { - paneSelector: ".ui-layout-west" - , size: 200 - , resizerCursor: "w-resize" - , customHotkey: "" - } -, center: { - paneSelector: ".ui-layout-center" - , minWidth: 0 - , minHeight: 0 - } -}; - -$.layout.optionsMap = { - // layout/global options - NOT pane-options - layout: ("stateManagement,effects,zIndexes,errors," - + "name,zIndex,scrollToBookmarkOnLoad,showErrorMessages," - + "resizeWithWindow,resizeWithWindowDelay,resizeWithWindowMaxDelay," - + "onresizeall,onresizeall_start,onresizeall_end,onload,onunload").split(",") -// borderPanes: [ ALL options that are NOT specified as 'layout' ] - // default.panes options that apply to the center-pane (most options apply _only_ to border-panes) -, center: ("paneClass,contentSelector,contentIgnoreSelector,findNestedContent,applyDemoStyles,triggerEventsOnLoad," - + "showOverflowOnHover,maskContents,maskObjects,liveContentResizing," - + "childOptions,initChildLayout,resizeChildLayout,destroyChildLayout," - + "onresize,onresize_start,onresize_end,onsizecontent,onsizecontent_start,onsizecontent_end").split(",") - // options that MUST be specifically set 'per-pane' - CANNOT set in the panes (defaults) key -, noDefault: ("paneSelector,resizerCursor,customHotkey").split(",") -}; - -/** - * Processes options passed in converts flat-format data into subkey (JSON) format - * In flat-format, subkeys are _currently_ separated with 2 underscores, like north__optName - * Plugins may also call this method so they can transform their own data - * - * @param {!Object} hash Data/options passed by user - may be a single level or nested levels - * @return {Object} Returns hash of minWidth & minHeight - */ -$.layout.transformData = function (hash) { - var json = { panes: {}, center: {} } // init return object - , data, branch, optKey, keys, key, val, i, c; - - if (typeof hash !== "object") return json; // no options passed - - // convert all 'flat-keys' to 'sub-key' format - for (optKey in hash) { - branch = json; - data = $.layout.optionsMap.layout; - val = hash[ optKey ]; - keys = optKey.split("__"); // eg: west__size or north__fxSettings__duration - c = keys.length - 1; - // convert underscore-delimited to subkeys - for (i=0; i <= c; i++) { - key = keys[i]; - if (i === c) - branch[key] = val; - else if (!branch[key]) - branch[key] = {}; // create the subkey - // recurse to sub-key for next loop - if not done - branch = branch[key]; - } - } - - return json; -}; - -// INTERNAL CONFIG DATA - DO NOT CHANGE THIS! -$.layout.backwardCompatibility = { - // data used by renameOldOptions() - map: { - // OLD Option Name: NEW Option Name - applyDefaultStyles: "applyDemoStyles" - , resizeNestedLayout: "resizeChildLayout" - , resizeWhileDragging: "livePaneResizing" - , resizeContentWhileDragging: "liveContentResizing" - , triggerEventsWhileDragging: "triggerEventsDuringLiveResize" - , maskIframesOnResize: "maskContents" - , useStateCookie: "stateManagement.enabled" - , "cookie.autoLoad": "stateManagement.autoLoad" - , "cookie.autoSave": "stateManagement.autoSave" - , "cookie.keys": "stateManagement.stateKeys" - , "cookie.name": "stateManagement.cookie.name" - , "cookie.domain": "stateManagement.cookie.domain" - , "cookie.path": "stateManagement.cookie.path" - , "cookie.expires": "stateManagement.cookie.expires" - , "cookie.secure": "stateManagement.cookie.secure" - // OLD Language options - , noRoomToOpenTip: "tips.noRoomToOpen" - , togglerTip_open: "tips.Close" // open = Close - , togglerTip_closed: "tips.Open" // closed = Open - , resizerTip: "tips.Resize" - , sliderTip: "tips.Slide" - } - -/** -* @param {Object} opts -*/ -, renameOptions: function (opts) { - var map = $.layout.backwardCompatibility.map - , oldData, newData, value - ; - for (var itemPath in map) { - oldData = getBranch( itemPath ); - value = oldData.branch[ oldData.key ]; - if (value !== undefined) { - newData = getBranch( map[itemPath], true ); - newData.branch[ newData.key ] = value; - delete oldData.branch[ oldData.key ]; - } - } - - /** - * @param {string} path - * @param {boolean=} [create=false] Create path if does not exist - */ - function getBranch (path, create) { - var a = path.split(".") // split keys into array - , c = a.length - 1 - , D = { branch: opts, key: a[c] } // init branch at top & set key (last item) - , i = 0, k, undef; - for (; i 0) { - if (autoHide && $E.data('autoHidden') && $E.innerHeight() > 0) { - $E.show().data('autoHidden', false); - if (!browser.mozilla) // FireFox refreshes iframes - IE does not - // make hidden, then visible to 'refresh' display after animation - $E.css(_c.hidden).css(_c.visible); - } - } - else if (autoHide && !$E.data('autoHidden')) - $E.hide().data('autoHidden', true); - } - - /** - * @param {(string|!Object)} el - * @param {number=} outerHeight - * @param {boolean=} [autoHide=false] - */ -, setOuterHeight = function (el, outerHeight, autoHide) { - var $E = el, h; - if (isStr(el)) $E = $Ps[el]; // west - else if (!el.jquery) $E = $(el); - h = cssH($E, outerHeight); - $E.css({ height: h, visibility: "visible" }); // may have been 'hidden' by sizeContent - if (h > 0 && $E.innerWidth() > 0) { - if (autoHide && $E.data('autoHidden')) { - $E.show().data('autoHidden', false); - if (!browser.mozilla) // FireFox refreshes iframes - IE does not - $E.css(_c.hidden).css(_c.visible); - } - } - else if (autoHide && !$E.data('autoHidden')) - $E.hide().data('autoHidden', true); - } - - /** - * @param {(string|!Object)} el - * @param {number=} outerSize - * @param {boolean=} [autoHide=false] - */ -, setOuterSize = function (el, outerSize, autoHide) { - if (_c[pane].dir=="horz") // pane = north or south - setOuterHeight(el, outerSize, autoHide); - else // pane = east or west - setOuterWidth(el, outerSize, autoHide); - } - - - /** - * Converts any 'size' params to a pixel/integer size, if not already - * If 'auto' or a decimal/percentage is passed as 'size', a pixel-size is calculated - * - /** - * @param {string} pane - * @param {(string|number)=} size - * @param {string=} [dir] - * @return {number} - */ -, _parseSize = function (pane, size, dir) { - if (!dir) dir = _c[pane].dir; - - if (isStr(size) && size.match(/%/)) - size = (size === '100%') ? -1 : parseInt(size, 10) / 100; // convert % to decimal - - if (size === 0) - return 0; - else if (size >= 1) - return parseInt(size, 10); - - var o = options, avail = 0; - if (dir=="horz") // north or south or center.minHeight - avail = sC.innerHeight - ($Ps.north ? o.north.spacing_open : 0) - ($Ps.south ? o.south.spacing_open : 0); - else if (dir=="vert") // east or west or center.minWidth - avail = sC.innerWidth - ($Ps.west ? o.west.spacing_open : 0) - ($Ps.east ? o.east.spacing_open : 0); - - if (size === -1) // -1 == 100% - return avail; - else if (size > 0) // percentage, eg: .25 - return round(avail * size); - else if (pane=="center") - return 0; - else { // size < 0 || size=='auto' || size==Missing || size==Invalid - // auto-size the pane - var dim = (dir === "horz" ? "height" : "width") - , $P = $Ps[pane] - , $C = dim === 'height' ? $Cs[pane] : false - , vis = $.layout.showInvisibly($P) // show pane invisibly if hidden - , szP = $P.css(dim) // SAVE current pane size - , szC = $C ? $C.css(dim) : 0 // SAVE current content size - ; - $P.css(dim, "auto"); - if ($C) $C.css(dim, "auto"); - size = (dim === "height") ? $P.outerHeight() : $P.outerWidth(); // MEASURE - $P.css(dim, szP).css(vis); // RESET size & visibility - if ($C) $C.css(dim, szC); - return size; - } - } - - /** - * Calculates current 'size' (outer-width or outer-height) of a border-pane - optionally with 'pane-spacing' added - * - * @param {(string|!Object)} pane - * @param {boolean=} [inclSpace=false] - * @return {number} Returns EITHER Width for east/west panes OR Height for north/south panes - */ -, getPaneSize = function (pane, inclSpace) { - var - $P = $Ps[pane] - , o = options[pane] - , s = state[pane] - , oSp = (inclSpace ? o.spacing_open : 0) - , cSp = (inclSpace ? o.spacing_closed : 0) - ; - if (!$P || s.isHidden) - return 0; - else if (s.isClosed || (s.isSliding && inclSpace)) - return cSp; - else if (_c[pane].dir === "horz") - return $P.outerHeight() + oSp; - else // dir === "vert" - return $P.outerWidth() + oSp; - } - - /** - * Calculate min/max pane dimensions and limits for resizing - * - * @param {string} pane - * @param {boolean=} [slide=false] - */ -, setSizeLimits = function (pane, slide) { - if (!isInitialized()) return; - var - o = options[pane] - , s = state[pane] - , c = _c[pane] - , dir = c.dir - , side = c.side.toLowerCase() - , type = c.sizeType.toLowerCase() - , isSliding = (slide != undefined ? slide : s.isSliding) // only open() passes 'slide' param - , $P = $Ps[pane] - , paneSpacing = o.spacing_open - // measure the pane on the *opposite side* from this pane - , altPane = _c.oppositeEdge[pane] - , altS = state[altPane] - , $altP = $Ps[altPane] - , altPaneSize = (!$altP || altS.isVisible===false || altS.isSliding ? 0 : (dir=="horz" ? $altP.outerHeight() : $altP.outerWidth())) - , altPaneSpacing = ((!$altP || altS.isHidden ? 0 : options[altPane][ altS.isClosed !== false ? "spacing_closed" : "spacing_open" ]) || 0) - // limitSize prevents this pane from 'overlapping' opposite pane - , containerSize = (dir=="horz" ? sC.innerHeight : sC.innerWidth) - , minCenterDims = cssMinDims("center") - , minCenterSize = dir=="horz" ? max(options.center.minHeight, minCenterDims.minHeight) : max(options.center.minWidth, minCenterDims.minWidth) - // if pane is 'sliding', then ignore center and alt-pane sizes - because 'overlays' them - , limitSize = (containerSize - paneSpacing - (isSliding ? 0 : (_parseSize("center", minCenterSize, dir) + altPaneSize + altPaneSpacing))) - , minSize = s.minSize = max( _parseSize(pane, o.minSize), cssMinDims(pane).minSize ) - , maxSize = s.maxSize = min( (o.maxSize ? _parseSize(pane, o.maxSize) : 100000), limitSize ) - , r = s.resizerPosition = {} // used to set resizing limits - , top = sC.insetTop - , left = sC.insetLeft - , W = sC.innerWidth - , H = sC.innerHeight - , rW = o.spacing_open // subtract resizer-width to get top/left position for south/east - ; - switch (pane) { - case "north": r.min = top + minSize; - r.max = top + maxSize; - break; - case "west": r.min = left + minSize; - r.max = left + maxSize; - break; - case "south": r.min = top + H - maxSize - rW; - r.max = top + H - minSize - rW; - break; - case "east": r.min = left + W - maxSize - rW; - r.max = left + W - minSize - rW; - break; - }; - } - - /** - * Returns data for setting the size/position of center pane. Also used to set Height for east/west panes - * - * @return JSON Returns a hash of all dimensions: top, bottom, left, right, (outer) width and (outer) height - */ -, calcNewCenterPaneDims = function () { - var d = { - top: getPaneSize("north", true) // true = include 'spacing' value for pane - , bottom: getPaneSize("south", true) - , left: getPaneSize("west", true) - , right: getPaneSize("east", true) - , width: 0 - , height: 0 - }; - - // NOTE: sC = state.container - // calc center-pane outer dimensions - d.width = sC.innerWidth - d.left - d.right; // outerWidth - d.height = sC.innerHeight - d.bottom - d.top; // outerHeight - // add the 'container border/padding' to get final positions relative to the container - d.top += sC.insetTop; - d.bottom += sC.insetBottom; - d.left += sC.insetLeft; - d.right += sC.insetRight; - - return d; - } - - - /** - * @param {!Object} el - * @param {boolean=} [allStates=false] - */ -, getHoverClasses = function (el, allStates) { - var - $El = $(el) - , type = $El.data("layoutRole") - , pane = $El.data("layoutEdge") - , o = options[pane] - , root = o[type +"Class"] - , _pane = "-"+ pane // eg: "-west" - , _open = "-open" - , _closed = "-closed" - , _slide = "-sliding" - , _hover = "-hover " // NOTE the trailing space - , _state = $El.hasClass(root+_closed) ? _closed : _open - , _alt = _state === _closed ? _open : _closed - , classes = (root+_hover) + (root+_pane+_hover) + (root+_state+_hover) + (root+_pane+_state+_hover) - ; - if (allStates) // when 'removing' classes, also remove alternate-state classes - classes += (root+_alt+_hover) + (root+_pane+_alt+_hover); - - if (type=="resizer" && $El.hasClass(root+_slide)) - classes += (root+_slide+_hover) + (root+_pane+_slide+_hover); - - return $.trim(classes); - } -, addHover = function (evt, el) { - var $E = $(el || this); - if (evt && $E.data("layoutRole") === "toggler") - evt.stopPropagation(); // prevent triggering 'slide' on Resizer-bar - $E.addClass( getHoverClasses($E) ); - } -, removeHover = function (evt, el) { - var $E = $(el || this); - $E.removeClass( getHoverClasses($E, true) ); - } - -, onResizerEnter = function (evt) { // ALSO called by toggler.mouseenter - if ($.fn.disableSelection) - $("body").disableSelection(); - } -, onResizerLeave = function (evt, el) { - var - e = el || this // el is only passed when called by the timer - , pane = $(e).data("layoutEdge") - , name = pane +"ResizerLeave" - ; - timer.clear(pane+"_openSlider"); // cancel slideOpen timer, if set - timer.clear(name); // cancel enableSelection timer - may re/set below - // this method calls itself on a timer because it needs to allow - // enough time for dragging to kick-in and set the isResizing flag - // dragging has a 100ms delay set, so this delay must be >100 - if (!el) // 1st call - mouseleave event - timer.set(name, function(){ onResizerLeave(evt, e); }, 200); - // if user is resizing, then dragStop will enableSelection(), so can skip it here - else if (!state[pane].isResizing && $.fn.enableSelection) // 2nd call - by timer - $("body").enableSelection(); - } - -/* - * ########################### - * INITIALIZATION METHODS - * ########################### - */ - - /** - * Initialize the layout - called automatically whenever an instance of layout is created - * - * @see none - triggered onInit - * @return mixed true = fully initialized | false = panes not initialized (yet) | 'cancel' = abort - */ -, _create = function () { - // initialize config/options - initOptions(); - var o = options; - - // TEMP state so isInitialized returns true during init process - state.creatingLayout = true; - - // init plugins for this layout, if there are any (eg: stateManagement) - runPluginCallbacks( Instance, $.layout.onCreate ); - - // options & state have been initialized, so now run beforeLoad callback - // onload will CANCEL layout creation if it returns false - if (false === _runCallbacks("onload_start")) - return 'cancel'; - - // initialize the container element - _initContainer(); - - // bind hotkey function - keyDown - if required - initHotkeys(); - - // bind window.onunload - $(window).bind("unload."+ sID, unload); - - // init plugins for this layout, if there are any (eg: customButtons) - runPluginCallbacks( Instance, $.layout.onLoad ); - - // if layout elements are hidden, then layout WILL NOT complete initialization! - // initLayoutElements will set initialized=true and run the onload callback IF successful - if (o.initPanes) _initLayoutElements(); - - delete state.creatingLayout; - - return state.initialized; - } - - /** - * Initialize the layout IF not already - * - * @see All methods in Instance run this test - * @return boolean true = layoutElements have been initialized | false = panes are not initialized (yet) - */ -, isInitialized = function () { - if (state.initialized || state.creatingLayout) return true; // already initialized - else return _initLayoutElements(); // try to init panes NOW - } - - /** - * Initialize the layout - called automatically whenever an instance of layout is created - * - * @see _create() & isInitialized - * @return An object pointer to the instance created - */ -, _initLayoutElements = function (retry) { - // initialize config/options - var o = options; - - // CANNOT init panes inside a hidden container! - if (!$N.is(":visible")) { - // handle Chrome bug where popup window 'has no height' - // if layout is BODY element, try again in 50ms - // SEE: http://layout.jquery-dev.net/samples/test_popup_window.html - if ( !retry && browser.webkit && $N[0].tagName === "BODY" ) - setTimeout(function(){ _initLayoutElements(true); }, 50); - return false; - } - - // a center pane is required, so make sure it exists - if (!getPane("center").length) { - return _log( o.errors.centerPaneMissing ); - } - - // TEMP state so isInitialized returns true during init process - state.creatingLayout = true; - - // update Container dims - $.extend(sC, elDims( $N )); - - // initialize all layout elements - initPanes(); // size & position panes - calls initHandles() - which calls initResizable() - - if (o.scrollToBookmarkOnLoad) { - var l = self.location; - if (l.hash) l.replace( l.hash ); // scrollTo Bookmark - } - - // check to see if this layout 'nested' inside a pane - if (Instance.hasParentLayout) - o.resizeWithWindow = false; - // bind resizeAll() for 'this layout instance' to window.resize event - else if (o.resizeWithWindow) - $(window).bind("resize."+ sID, windowResize); - - delete state.creatingLayout; - state.initialized = true; - - // init plugins for this layout, if there are any - runPluginCallbacks( Instance, $.layout.onReady ); - - // now run the onload callback, if exists - _runCallbacks("onload_end"); - - return true; // elements initialized successfully - } - - /** - * Initialize nested layouts - called when _initLayoutElements completes - * - * NOT CURRENTLY USED - * - * @see _initLayoutElements - * @return An object pointer to the instance created - */ -, _initChildLayouts = function () { - $.each(_c.allPanes, function (idx, pane) { - if (options[pane].initChildLayout) - createChildLayout( pane ); - }); - } - - /** - * Initialize nested layouts for a specific pane - can optionally pass layout-options - * - * @see _initChildLayouts - * @param {string|Object} evt_or_pane The pane being opened, ie: north, south, east, or west - * @param {Object=} [opts] Layout-options - if passed, will OVERRRIDE options[pane].childOptions - * @return An object pointer to the layout instance created - or null - */ -, createChildLayout = function (evt_or_pane, opts) { - var pane = evtPane.call(this, evt_or_pane) - , $P = $Ps[pane] - , C = children - ; - if ($P) { - var $C = $Cs[pane] - , o = opts || options[pane].childOptions - , d = "layout" - // determine which element is supposed to be the 'child container' - // if pane has a 'containerSelector' OR a 'content-div', use those instead of the pane - , $Cont = o.containerSelector ? $P.find( o.containerSelector ) : ($C || $P) - , containerFound = $Cont.length - // see if a child-layout ALREADY exists on this element - , child = containerFound ? (C[pane] = $Cont.data(d) || null) : null - ; - // if no layout exists, but childOptions are set, try to create the layout now - if (!child && containerFound && o) - child = C[pane] = $Cont.eq(0).layout(o) || null; - if (child) - child.hasParentLayout = true; // set parent-flag in child - } - Instance[pane].child = C[pane]; // ALWAYS set pane-object pointer, even if null - } - -, windowResize = function () { - var delay = Number(options.resizeWithWindowDelay); - if (delay < 10) delay = 100; // MUST have a delay! - // resizing uses a delay-loop because the resize event fires repeatly - except in FF, but delay anyway - timer.clear("winResize"); // if already running - timer.set("winResize", function(){ - timer.clear("winResize"); - timer.clear("winResizeRepeater"); - var dims = elDims( $N ); - // only trigger resizeAll() if container has changed size - if (dims.innerWidth !== sC.innerWidth || dims.innerHeight !== sC.innerHeight) - resizeAll(); - }, delay); - // ALSO set fixed-delay timer, if not already running - if (!timer.data["winResizeRepeater"]) setWindowResizeRepeater(); - } - -, setWindowResizeRepeater = function () { - var delay = Number(options.resizeWithWindowMaxDelay); - if (delay > 0) - timer.set("winResizeRepeater", function(){ setWindowResizeRepeater(); resizeAll(); }, delay); - } - -, unload = function () { - var o = options; - - _runCallbacks("onunload_start"); - - // trigger plugin callabacks for this layout (eg: stateManagement) - runPluginCallbacks( Instance, $.layout.onUnload ); - - _runCallbacks("onunload_end"); - } - - /** - * Validate and initialize container CSS and events - * - * @see _create() - */ -, _initContainer = function () { - var - N = $N[0] - , tag = sC.tagName = N.tagName - , id = sC.id = N.id - , cls = sC.className = N.className - , o = options - , name = o.name - , fullPage= (tag === "BODY") - , props = "overflow,position,margin,padding,border" - , css = "layoutCSS" - , CSS = {} - , hid = "hidden" // used A LOT! - // see if this container is a 'pane' inside an outer-layout - , parent = $N.data("parentLayout") // parent-layout Instance - , pane = $N.data("layoutEdge") // pane-name in parent-layout - , isChild = parent && pane - ; - // sC -> state.container - sC.selector = $N.selector.split(".slice")[0]; - sC.ref = (o.name ? o.name +' layout / ' : '') + tag + (id ? "#"+id : cls ? '.['+cls+']' : ''); // used in messages - - $N .data({ - layout: Instance - , layoutContainer: sID // FLAG to indicate this is a layout-container - contains unique internal ID - }) - .addClass(o.containerClass) - ; - var layoutMethods = { - destroy: '' - , initPanes: '' - , resizeAll: 'resizeAll' - , resize: 'resizeAll' - }; - // loop hash and bind all methods - include layoutID namespacing - for (name in layoutMethods) { - $N.bind("layout"+ name.toLowerCase() +"."+ sID, Instance[ layoutMethods[name] || name ]); - } - - // if this container is another layout's 'pane', then set child/parent pointers - if (isChild) { - // update parent flag - Instance.hasParentLayout = true; - // set pointers to THIS child-layout (Instance) in parent-layout - // NOTE: parent.PANE.child is an ALIAS to parent.children.PANE - parent[pane].child = parent.children[pane] = $N.data("layout"); - } - - // SAVE original container CSS for use in destroy() - if (!$N.data(css)) { - // handle props like overflow different for BODY & HTML - has 'system default' values - if (fullPage) { - CSS = $.extend( elCSS($N, props), { - height: $N.css("height") - , overflow: $N.css("overflow") - , overflowX: $N.css("overflowX") - , overflowY: $N.css("overflowY") - }); - // ALSO SAVE CSS - var $H = $("html"); - $H.data(css, { - height: "auto" // FF would return a fixed px-size! - , overflow: $H.css("overflow") - , overflowX: $H.css("overflowX") - , overflowY: $H.css("overflowY") - }); - } - else // handle props normally for non-body elements - CSS = elCSS($N, props+",top,bottom,left,right,width,height,overflow,overflowX,overflowY"); - - $N.data(css, CSS); - } - - try { // format html/body if this is a full page layout - if (fullPage) { - $("html").css({ - height: "100%" - , overflow: hid - , overflowX: hid - , overflowY: hid - }); - $("body").css({ - position: "relative" - , height: "100%" - , overflow: hid - , overflowX: hid - , overflowY: hid - , margin: 0 - , padding: 0 // TODO: test whether body-padding could be handled? - , border: "none" // a body-border creates problems because it cannot be measured! - }); - - // set current layout-container dimensions - $.extend(sC, elDims( $N )); - } - else { // set required CSS for overflow and position - // ENSURE container will not 'scroll' - CSS = { overflow: hid, overflowX: hid, overflowY: hid } - var - p = $N.css("position") - , h = $N.css("height") - ; - // if this is a NESTED layout, then container/outer-pane ALREADY has position and height - if (!isChild) { - if (!p || !p.match(/fixed|absolute|relative/)) - CSS.position = "relative"; // container MUST have a 'position' - /* - if (!h || h=="auto") - CSS.height = "100%"; // container MUST have a 'height' - */ - } - $N.css( CSS ); - - // set current layout-container dimensions - if ( $N.is(":visible") ) { - $.extend(sC, elDims( $N )); - if (sC.innerHeight < 1) - _log( o.errors.noContainerHeight.replace(/CONTAINER/, sC.ref) ); - } - } - } catch (ex) {} - } - - /** - * Bind layout hotkeys - if options enabled - * - * @see _create() and addPane() - * @param {string=} [panes=""] The edge(s) to process - */ -, initHotkeys = function (panes) { - panes = panes ? panes.split(",") : _c.borderPanes; - // bind keyDown to capture hotkeys, if option enabled for ANY pane - $.each(panes, function (i, pane) { - var o = options[pane]; - if (o.enableCursorHotkey || o.customHotkey) { - $(document).bind("keydown."+ sID, keyDown); // only need to bind this ONCE - return false; // BREAK - binding was done - } - }); - } - - /** - * Build final OPTIONS data - * - * @see _create() - */ -, initOptions = function () { - var data, d, pane, key, val, i, c, o; - - // reprocess user's layout-options to have correct options sub-key structure - opts = $.layout.transformData( opts ); // panes = default subkey - - // auto-rename old options for backward compatibility - opts = $.layout.backwardCompatibility.renameAllOptions( opts ); - - // if user-options has 'panes' key (pane-defaults), clean it... - if (!$.isEmptyObject(opts.panes)) { - // REMOVE any pane-defaults that MUST be set per-pane - data = $.layout.optionsMap.noDefault; - for (i=0, c=data.length; i 0) { - z.pane_normal = zo; - z.content_mask = max(zo+1, z.content_mask); // MIN = +1 - z.resizer_normal = max(zo+2, z.resizer_normal); // MIN = +2 - } - - // DELETE 'panes' key now that we are done - values were copied to EACH pane - delete options.panes; - - - function createFxOptions ( pane ) { - var o = options[pane] - , d = options.panes; - // ensure fxSettings key to avoid errors - if (!o.fxSettings) o.fxSettings = {}; - if (!d.fxSettings) d.fxSettings = {}; - - $.each(["_open","_close","_size"], function (i,n) { - var - sName = "fxName"+ n - , sSpeed = "fxSpeed"+ n - , sSettings = "fxSettings"+ n - // recalculate fxName according to specificity rules - , fxName = o[sName] = - o[sName] // options.west.fxName_open - || d[sName] // options.panes.fxName_open - || o.fxName // options.west.fxName - || d.fxName // options.panes.fxName - || "none" // MEANS $.layout.defaults.panes.fxName == "" || false || null || 0 - ; - // validate fxName to ensure is valid effect - MUST have effect-config data in options.effects - if (fxName === "none" || !$.effects || !$.effects[fxName] || !options.effects[fxName]) - fxName = o[sName] = "none"; // effect not loaded OR unrecognized fxName - - // set vars for effects subkeys to simplify logic - var fx = options.effects[fxName] || {} // effects.slide - , fx_all = fx.all || null // effects.slide.all - , fx_pane = fx[pane] || null // effects.slide.west - ; - // create fxSpeed[_open|_close|_size] - o[sSpeed] = - o[sSpeed] // options.west.fxSpeed_open - || d[sSpeed] // options.west.fxSpeed_open - || o.fxSpeed // options.west.fxSpeed - || d.fxSpeed // options.panes.fxSpeed - || null // DEFAULT - let fxSetting.duration control speed - ; - // create fxSettings[_open|_close|_size] - o[sSettings] = $.extend( - true - , {} - , fx_all // effects.slide.all - , fx_pane // effects.slide.west - , d.fxSettings // options.panes.fxSettings - , o.fxSettings // options.west.fxSettings - , d[sSettings] // options.panes.fxSettings_open - , o[sSettings] // options.west.fxSettings_open - ); - }); - - // DONE creating action-specific-settings for this pane, - // so DELETE generic options - are no longer meaningful - delete o.fxName; - delete o.fxSpeed; - delete o.fxSettings; - } - } - - /** - * Initialize module objects, styling, size and position for all panes - * - * @see _initElements() - * @param {string} pane The pane to process - */ -, getPane = function (pane) { - var sel = options[pane].paneSelector - if (sel.substr(0,1)==="#") // ID selector - // NOTE: elements selected 'by ID' DO NOT have to be 'children' - return $N.find(sel).eq(0); - else { // class or other selector - var $P = $N.children(sel).eq(0); - // look for the pane nested inside a 'form' element - return $P.length ? $P : $N.children("form:first").children(sel).eq(0); - } - } - -, initPanes = function (evt) { - // stopPropagation if called by trigger("layoutinitpanes") - use evtPane utility - evtPane(evt); - - // NOTE: do north & south FIRST so we can measure their height - do center LAST - $.each(_c.allPanes, function (idx, pane) { - addPane( pane, true ); - }); - - // init the pane-handles NOW in case we have to hide or close the pane below - initHandles(); - - // now that all panes have been initialized and initially-sized, - // make sure there is really enough space available for each pane - $.each(_c.borderPanes, function (i, pane) { - if ($Ps[pane] && state[pane].isVisible) { // pane is OPEN - setSizeLimits(pane); - makePaneFit(pane); // pane may be Closed, Hidden or Resized by makePaneFit() - } - }); - // size center-pane AGAIN in case we 'closed' a border-pane in loop above - sizeMidPanes("center"); - - // Chrome/Webkit sometimes fires callbacks BEFORE it completes resizing! - // Before RC30.3, there was a 10ms delay here, but that caused layout - // to load asynchrously, which is BAD, so try skipping delay for now - - // process pane contents and callbacks, and init/resize child-layout if exists - $.each(_c.allPanes, function (i, pane) { - var o = options[pane]; - if ($Ps[pane]) { - if (state[pane].isVisible) { // pane is OPEN - sizeContent(pane); - // trigger pane.onResize if triggerEventsOnLoad = true - if (o.triggerEventsOnLoad) - _runCallbacks("onresize_end", pane); - else // automatic if onresize called, otherwise call it specifically - // resize child - IF inner-layout already exists (created before this layout) - resizeChildLayout(pane); - } - // init childLayout - even if pane is not visible - if (o.initChildLayout && o.childOptions) - createChildLayout(pane); - } - }); - } - - /** - * Add a pane to the layout - subroutine of initPanes() - * - * @see initPanes() - * @param {string} pane The pane to process - * @param {boolean=} [force=false] Size content after init - */ -, addPane = function (pane, force) { - if (!force && !isInitialized()) return; - var - o = options[pane] - , s = state[pane] - , c = _c[pane] - , fx = s.fx - , dir = c.dir - , spacing = o.spacing_open || 0 - , isCenter = (pane === "center") - , CSS = {} - , $P = $Ps[pane] - , size, minSize, maxSize - ; - // if pane-pointer already exists, remove the old one first - if ($P) - removePane( pane, false, true, false ); - else - $Cs[pane] = false; // init - - $P = $Ps[pane] = getPane(pane); - if (!$P.length) { - $Ps[pane] = false; // logic - return; - } - - // SAVE original Pane CSS - if (!$P.data("layoutCSS")) { - var props = "position,top,left,bottom,right,width,height,overflow,zIndex,display,backgroundColor,padding,margin,border"; - $P.data("layoutCSS", elCSS($P, props)); - } - - // create alias for pane data in Instance - initHandles will add more - Instance[pane] = { name: pane, pane: $Ps[pane], content: $Cs[pane], options: options[pane], state: state[pane], child: children[pane] }; - - // add classes, attributes & events - $P .data({ - parentLayout: Instance // pointer to Layout Instance - , layoutPane: Instance[pane] // NEW pointer to pane-alias-object - , layoutEdge: pane - , layoutRole: "pane" - }) - .css(c.cssReq).css("zIndex", options.zIndexes.pane_normal) - .css(o.applyDemoStyles ? c.cssDemo : {}) // demo styles - .addClass( o.paneClass +" "+ o.paneClass+"-"+pane ) // default = "ui-layout-pane ui-layout-pane-west" - may be a dupe of 'paneSelector' - .bind("mouseenter."+ sID, addHover ) - .bind("mouseleave."+ sID, removeHover ) - ; - var paneMethods = { - hide: '' - , show: '' - , toggle: '' - , close: '' - , open: '' - , slideOpen: '' - , slideClose: '' - , slideToggle: '' - , size: 'sizePane' - , sizePane: 'sizePane' - , sizeContent: '' - , sizeHandles: '' - , enableClosable: '' - , disableClosable: '' - , enableSlideable: '' - , disableSlideable: '' - , enableResizable: '' - , disableResizable: '' - , swapPanes: 'swapPanes' - , swap: 'swapPanes' - , move: 'swapPanes' - , removePane: 'removePane' - , remove: 'removePane' - , createChildLayout: '' - , resizeChildLayout: '' - , resizeAll: 'resizeAll' - , resizeLayout: 'resizeAll' - } - , name; - // loop hash and bind all methods - include layoutID namespacing - for (name in paneMethods) { - $P.bind("layoutpane"+ name.toLowerCase() +"."+ sID, Instance[ paneMethods[name] || name ]); - } - - // see if this pane has a 'scrolling-content element' - initContent(pane, false); // false = do NOT sizeContent() - called later - - if (!isCenter) { - // call _parseSize AFTER applying pane classes & styles - but before making visible (if hidden) - // if o.size is auto or not valid, then MEASURE the pane and use that as its 'size' - size = s.size = _parseSize(pane, o.size); - minSize = _parseSize(pane,o.minSize) || 1; - maxSize = _parseSize(pane,o.maxSize) || 100000; - if (size > 0) size = max(min(size, maxSize), minSize); - - // state for border-panes - s.isClosed = false; // true = pane is closed - s.isSliding = false; // true = pane is currently open by 'sliding' over adjacent panes - s.isResizing= false; // true = pane is in process of being resized - s.isHidden = false; // true = pane is hidden - no spacing, resizer or toggler is visible! - - // array for 'pin buttons' whose classNames are auto-updated on pane-open/-close - if (!s.pins) s.pins = []; - } - // states common to ALL panes - s.tagName = $P[0].tagName; - s.edge = pane; // useful if pane is (or about to be) 'swapped' - easy find out where it is (or is going) - s.noRoom = false; // true = pane 'automatically' hidden due to insufficient room - will unhide automatically - s.isVisible = true; // false = pane is invisible - closed OR hidden - simplify logic - - // set css-position to account for container borders & padding - switch (pane) { - case "north": CSS.top = sC.insetTop; - CSS.left = sC.insetLeft; - CSS.right = sC.insetRight; - break; - case "south": CSS.bottom = sC.insetBottom; - CSS.left = sC.insetLeft; - CSS.right = sC.insetRight; - break; - case "west": CSS.left = sC.insetLeft; // top, bottom & height set by sizeMidPanes() - break; - case "east": CSS.right = sC.insetRight; // ditto - break; - case "center": // top, left, width & height set by sizeMidPanes() - } - - if (dir === "horz") // north or south pane - CSS.height = cssH($P, size); - else if (dir === "vert") // east or west pane - CSS.width = cssW($P, size); - //else if (isCenter) {} - - $P.css(CSS); // apply size -- top, bottom & height will be set by sizeMidPanes - if (dir != "horz") sizeMidPanes(pane, true); // true = skipCallback - - // close or hide the pane if specified in settings - if (o.initClosed && o.closable && !o.initHidden) - close(pane, true, true); // true, true = force, noAnimation - else if (o.initHidden || o.initClosed) - hide(pane); // will be completely invisible - no resizer or spacing - else if (!s.noRoom) - // make the pane visible - in case was initially hidden - $P.css("display","block"); - // ELSE setAsOpen() - called later by initHandles() - - // RESET visibility now - pane will appear IF display:block - $P.css("visibility","visible"); - - // check option for auto-handling of pop-ups & drop-downs - if (o.showOverflowOnHover) - $P.hover( allowOverflow, resetOverflow ); - - // if manually adding a pane AFTER layout initialization, then... - if (state.initialized) { - initHandles( pane ); - initHotkeys( pane ); - resizeAll(); // will sizeContent if pane is visible - if (s.isVisible) { // pane is OPEN - if (o.triggerEventsOnLoad) - _runCallbacks("onresize_end", pane); - else // automatic if onresize called, otherwise call it specifically - // resize child - IF inner-layout already exists (created before this layout) - resizeChildLayout(pane); // a previously existing childLayout - } - if (o.initChildLayout && o.childOptions) - createChildLayout(pane); - } - } - - /** - * Initialize module objects, styling, size and position for all resize bars and toggler buttons - * - * @see _create() - * @param {string=} [panes=""] The edge(s) to process - */ -, initHandles = function (panes) { - panes = panes ? panes.split(",") : _c.borderPanes; - - // create toggler DIVs for each pane, and set object pointers for them, eg: $R.north = north toggler DIV - $.each(panes, function (i, pane) { - var $P = $Ps[pane]; - $Rs[pane] = false; // INIT - $Ts[pane] = false; - if (!$P) return; // pane does not exist - skip - - var - o = options[pane] - , s = state[pane] - , c = _c[pane] - , paneId = o.paneSelector.substr(0,1) === "#" ? o.paneSelector.substr(1) : "" - , rClass = o.resizerClass - , tClass = o.togglerClass - , side = c.side.toLowerCase() - , spacing = (s.isVisible ? o.spacing_open : o.spacing_closed) - , _pane = "-"+ pane // used for classNames - , _state = (s.isVisible ? "-open" : "-closed") // used for classNames - , I = Instance[pane] - // INIT RESIZER BAR - , $R = I.resizer = $Rs[pane] = $("
      ") - // INIT TOGGLER BUTTON - , $T = I.toggler = (o.closable ? $Ts[pane] = $("
      ") : false) - ; - - //if (s.isVisible && o.resizable) ... handled by initResizable - if (!s.isVisible && o.slidable) - $R.attr("title", o.tips.Slide).css("cursor", o.sliderCursor); - - $R // if paneSelector is an ID, then create a matching ID for the resizer, eg: "#paneLeft" => "paneLeft-resizer" - .attr("id", paneId ? paneId +"-resizer" : "" ) - .data({ - parentLayout: Instance - , layoutPane: Instance[pane] // NEW pointer to pane-alias-object - , layoutEdge: pane - , layoutRole: "resizer" - }) - .css(_c.resizers.cssReq).css("zIndex", options.zIndexes.resizer_normal) - .css(o.applyDemoStyles ? _c.resizers.cssDemo : {}) // add demo styles - .addClass(rClass +" "+ rClass+_pane) - .hover(addHover, removeHover) // ALWAYS add hover-classes, even if resizing is not enabled - handle with CSS instead - .hover(onResizerEnter, onResizerLeave) // ALWAYS NEED resizer.mouseleave to balance toggler.mouseenter - .appendTo($N) // append DIV to container - ; - - if ($T) { - $T // if paneSelector is an ID, then create a matching ID for the resizer, eg: "#paneLeft" => "#paneLeft-toggler" - .attr("id", paneId ? paneId +"-toggler" : "" ) - .data({ - parentLayout: Instance - , layoutPane: Instance[pane] // NEW pointer to pane-alias-object - , layoutEdge: pane - , layoutRole: "toggler" - }) - .css(_c.togglers.cssReq) // add base/required styles - .css(o.applyDemoStyles ? _c.togglers.cssDemo : {}) // add demo styles - .addClass(tClass +" "+ tClass+_pane) - .hover(addHover, removeHover) // ALWAYS add hover-classes, even if toggling is not enabled - handle with CSS instead - .bind("mouseenter", onResizerEnter) // NEED toggler.mouseenter because mouseenter MAY NOT fire on resizer - .appendTo($R) // append SPAN to resizer DIV - ; - // ADD INNER-SPANS TO TOGGLER - if (o.togglerContent_open) // ui-layout-open - $(""+ o.togglerContent_open +"") - .data({ - layoutEdge: pane - , layoutRole: "togglerContent" - }) - .data("layoutRole", "togglerContent") - .data("layoutEdge", pane) - .addClass("content content-open") - .css("display","none") - .appendTo( $T ) - //.hover( addHover, removeHover ) // use ui-layout-toggler-west-hover .content-open instead! - ; - if (o.togglerContent_closed) // ui-layout-closed - $(""+ o.togglerContent_closed +"") - .data({ - layoutEdge: pane - , layoutRole: "togglerContent" - }) - .addClass("content content-closed") - .css("display","none") - .appendTo( $T ) - //.hover( addHover, removeHover ) // use ui-layout-toggler-west-hover .content-closed instead! - ; - // ADD TOGGLER.click/.hover - enableClosable(pane); - } - - // add Draggable events - initResizable(pane); - - // ADD CLASSNAMES & SLIDE-BINDINGS - eg: class="resizer resizer-west resizer-open" - if (s.isVisible) - setAsOpen(pane); // onOpen will be called, but NOT onResize - else { - setAsClosed(pane); // onClose will be called - bindStartSlidingEvent(pane, true); // will enable events IF option is set - } - - }); - - // SET ALL HANDLE DIMENSIONS - sizeHandles(); - } - - - /** - * Initialize scrolling ui-layout-content div - if exists - * - * @see initPane() - or externally after an Ajax injection - * @param {string} [pane] The pane to process - * @param {boolean=} [resize=true] Size content after init - */ -, initContent = function (pane, resize) { - if (!isInitialized()) return; - var - o = options[pane] - , sel = o.contentSelector - , I = Instance[pane] - , $P = $Ps[pane] - , $C - ; - if (sel) $C = I.content = $Cs[pane] = (o.findNestedContent) - ? $P.find(sel).eq(0) // match 1-element only - : $P.children(sel).eq(0) - ; - if ($C && $C.length) { - $C.data("layoutRole", "content"); - // SAVE original Pane CSS - if (!$C.data("layoutCSS")) - $C.data("layoutCSS", elCSS($C, "height")); - $C.css( _c.content.cssReq ); - if (o.applyDemoStyles) { - $C.css( _c.content.cssDemo ); // add padding & overflow: auto to content-div - $P.css( _c.content.cssDemoPane ); // REMOVE padding/scrolling from pane - } - state[pane].content = {}; // init content state - if (resize !== false) sizeContent(pane); - // sizeContent() is called AFTER init of all elements - } - else - I.content = $Cs[pane] = false; - } - - - /** - * Add resize-bars to all panes that specify it in options - * -dependancy: $.fn.resizable - will skip if not found - * - * @see _create() - * @param {string=} [panes=""] The edge(s) to process - */ -, initResizable = function (panes) { - var draggingAvailable = $.layout.plugins.draggable - , side // set in start() - ; - panes = panes ? panes.split(",") : _c.borderPanes; - - $.each(panes, function (idx, pane) { - var o = options[pane]; - if (!draggingAvailable || !$Ps[pane] || !o.resizable) { - o.resizable = false; - return true; // skip to next - } - - var s = state[pane] - , z = options.zIndexes - , c = _c[pane] - , side = c.dir=="horz" ? "top" : "left" - , opEdge = _c.oppositeEdge[pane] - , masks = pane +",center,"+ opEdge + (c.dir=="horz" ? ",west,east" : "") - , $P = $Ps[pane] - , $R = $Rs[pane] - , base = o.resizerClass - , lastPos = 0 // used when live-resizing - , r, live // set in start because may change - // 'drag' classes are applied to the ORIGINAL resizer-bar while dragging is in process - , resizerClass = base+"-drag" // resizer-drag - , resizerPaneClass = base+"-"+pane+"-drag" // resizer-north-drag - // 'helper' class is applied to the CLONED resizer-bar while it is being dragged - , helperClass = base+"-dragging" // resizer-dragging - , helperPaneClass = base+"-"+pane+"-dragging" // resizer-north-dragging - , helperLimitClass = base+"-dragging-limit" // resizer-drag - , helperPaneLimitClass = base+"-"+pane+"-dragging-limit" // resizer-north-drag - , helperClassesSet = false // logic var - ; - - if (!s.isClosed) - $R.attr("title", o.tips.Resize) - .css("cursor", o.resizerCursor); // n-resize, s-resize, etc - - $R.draggable({ - containment: $N[0] // limit resizing to layout container - , axis: (c.dir=="horz" ? "y" : "x") // limit resizing to horz or vert axis - , delay: 0 - , distance: 1 - , grid: o.resizingGrid - // basic format for helper - style it using class: .ui-draggable-dragging - , helper: "clone" - , opacity: o.resizerDragOpacity - , addClasses: false // avoid ui-state-disabled class when disabled - //, iframeFix: o.draggableIframeFix // TODO: consider using when bug is fixed - , zIndex: z.resizer_drag - - , start: function (e, ui) { - // REFRESH options & state pointers in case we used swapPanes - o = options[pane]; - s = state[pane]; - // re-read options - live = o.livePaneResizing; - - // ondrag_start callback - will CANCEL hide if returns false - // TODO: dragging CANNOT be cancelled like this, so see if there is a way? - if (false === _runCallbacks("ondrag_start", pane)) return false; - - s.isResizing = true; // prevent pane from closing while resizing - timer.clear(pane+"_closeSlider"); // just in case already triggered - - // SET RESIZER LIMITS - used in drag() - setSizeLimits(pane); // update pane/resizer state - r = s.resizerPosition; - lastPos = ui.position[ side ] - - $R.addClass( resizerClass +" "+ resizerPaneClass ); // add drag classes - helperClassesSet = false; // reset logic var - see drag() - - // DISABLE TEXT SELECTION (probably already done by resizer.mouseOver) - $('body').disableSelection(); - - // MASK PANES CONTAINING IFRAMES, APPLETS OR OTHER TROUBLESOME ELEMENTS - showMasks( masks ); - } - - , drag: function (e, ui) { - if (!helperClassesSet) { // can only add classes after clone has been added to the DOM - //$(".ui-draggable-dragging") - ui.helper - .addClass( helperClass +" "+ helperPaneClass ) // add helper classes - .css({ right: "auto", bottom: "auto" }) // fix dir="rtl" issue - .children().css("visibility","hidden") // hide toggler inside dragged resizer-bar - ; - helperClassesSet = true; - // draggable bug!? RE-SET zIndex to prevent E/W resize-bar showing through N/S pane! - if (s.isSliding) $Ps[pane].css("zIndex", z.pane_sliding); - } - // CONTAIN RESIZER-BAR TO RESIZING LIMITS - var limit = 0; - if (ui.position[side] < r.min) { - ui.position[side] = r.min; - limit = -1; - } - else if (ui.position[side] > r.max) { - ui.position[side] = r.max; - limit = 1; - } - // ADD/REMOVE dragging-limit CLASS - if (limit) { - ui.helper.addClass( helperLimitClass +" "+ helperPaneLimitClass ); // at dragging-limit - window.defaultStatus = (limit>0 && pane.match(/(north|west)/)) || (limit<0 && pane.match(/(south|east)/)) ? o.tips.maxSizeWarning : o.tips.minSizeWarning; - } - else { - ui.helper.removeClass( helperLimitClass +" "+ helperPaneLimitClass ); // not at dragging-limit - window.defaultStatus = ""; - } - // DYNAMICALLY RESIZE PANES IF OPTION ENABLED - // won't trigger unless resizer has actually moved! - if (live && Math.abs(ui.position[side] - lastPos) >= o.liveResizingTolerance) { - lastPos = ui.position[side]; - resizePanes(e, ui, pane) - } - } - - , stop: function (e, ui) { - $('body').enableSelection(); // RE-ENABLE TEXT SELECTION - window.defaultStatus = ""; // clear 'resizing limit' message from statusbar - $R.removeClass( resizerClass +" "+ resizerPaneClass ); // remove drag classes from Resizer - s.isResizing = false; - resizePanes(e, ui, pane, true, masks); // true = resizingDone - } - - }); - }); - - /** - * resizePanes - * - * Sub-routine called from stop() - and drag() if livePaneResizing - * - * @param {!Object} evt - * @param {!Object} ui - * @param {string} pane - * @param {boolean=} [resizingDone=false] - */ - var resizePanes = function (evt, ui, pane, resizingDone, masks) { - var dragPos = ui.position - , c = _c[pane] - , o = options[pane] - , s = state[pane] - , resizerPos - ; - switch (pane) { - case "north": resizerPos = dragPos.top; break; - case "west": resizerPos = dragPos.left; break; - case "south": resizerPos = sC.offsetHeight - dragPos.top - o.spacing_open; break; - case "east": resizerPos = sC.offsetWidth - dragPos.left - o.spacing_open; break; - }; - // remove container margin from resizer position to get the pane size - var newSize = resizerPos - sC["inset"+ c.side]; - - // Disable OR Resize Mask(s) created in drag.start - if (!resizingDone) { - // ensure we meet liveResizingTolerance criteria - if (Math.abs(newSize - s.size) < o.liveResizingTolerance) - return; // SKIP resize this time - // resize the pane - manualSizePane(pane, newSize, false, true); // true = noAnimation - sizeMasks(); // resize all visible masks - } - else { // resizingDone - // ondrag_end callback - if (false !== _runCallbacks("ondrag_end", pane)) - manualSizePane(pane, newSize, false, true); // true = noAnimation - hideMasks(); // hide all masks, which include panes with 'content/iframe-masks' - if (s.isSliding && masks) // RE-SHOW only 'object-masks' so objects won't show through sliding pane - showMasks( masks, true ); // true = onlyForObjects - } - }; - } - - /** - * sizeMask - * - * Needed to overlay a DIV over an IFRAME-pane because mask CANNOT be *inside* the pane - * Called when mask created, and during livePaneResizing - */ -, sizeMask = function () { - var $M = $(this) - , pane = $M.data("layoutMask") // eg: "west" - , s = state[pane] - ; - // only masks over an IFRAME-pane need manual resizing - if (s.tagName == "IFRAME" && s.isVisible) // no need to mask closed/hidden panes - $M.css({ - top: s.offsetTop - , left: s.offsetLeft - , width: s.outerWidth - , height: s.outerHeight - }); - /* ALT Method... - var $P = $Ps[pane]; - $M.css( $P.position() ).css({ width: $P[0].offsetWidth, height: $P[0].offsetHeight }); - */ - } -, sizeMasks = function () { - $Ms.each( sizeMask ); // resize all 'visible' masks - } - -, showMasks = function (panes, onlyForObjects) { - var a = panes ? panes.split(",") : $.layout.config.allPanes - , z = options.zIndexes - , o, s; - $.each(a, function(i,p){ - s = state[p]; - o = options[p]; - if (s.isVisible && ( (!onlyForObjects && o.maskContents) || o.maskObjects )) { - getMasks(p).each(function(){ - sizeMask.call(this); - this.style.zIndex = s.isSliding ? z.pane_sliding+1 : z.pane_normal+1 - this.style.display = "block"; - }); - } - }); - } - -, hideMasks = function () { - // ensure no pane is resizing - could be a timing issue - var skip; - $.each( $.layout.config.borderPanes, function(i,p){ - if (state[p].isResizing) { - skip = true; - return false; // BREAK - } - }); - if (!skip) - $Ms.hide(); // hide ALL masks - } - -, getMasks = function (pane) { - var $Masks = $([]) - , $M, i = 0, c = $Ms.length - ; - for (; i CSS - if (sC.tagName === "BODY" && ($N = $("html")).data(css)) // RESET CSS - $N.css( $N.data(css) ).removeData(css); - - // trigger plugins for this layout, if there are any - runPluginCallbacks( Instance, $.layout.onDestroy ); - - // trigger state-management and onunload callback - unload(); - - // clear the Instance of everything except for container & options (so could recreate) - // RE-CREATE: myLayout = myLayout.container.layout( myLayout.options ); - for (n in Instance) - if (!n.match(/^(container|options)$/)) delete Instance[ n ]; - // add a 'destroyed' flag to make it easy to check - Instance.destroyed = true; - - // if this is a child layout, CLEAR the child-pointer in the parent - /* for now the pointer REMAINS, but with only container, options and destroyed keys - if (parentPane) { - var layout = parentPane.pane.data("parentLayout"); - parentPane.child = layout.children[ parentPane.name ] = null; - } - */ - - return Instance; // for coding convenience - } - - /** - * Remove a pane from the layout - subroutine of destroy() - * - * @see destroy() - * @param {string|Object} evt_or_pane The pane to process - * @param {boolean=} [remove=false] Remove the DOM element? - * @param {boolean=} [skipResize=false] Skip calling resizeAll()? - * @param {boolean=} [destroyChild=true] Destroy Child-layouts? If not passed, obeys options setting - */ -, removePane = function (evt_or_pane, remove, skipResize, destroyChild) { - if (!isInitialized()) return; - var pane = evtPane.call(this, evt_or_pane) - , $P = $Ps[pane] - , $C = $Cs[pane] - , $R = $Rs[pane] - , $T = $Ts[pane] - ; - // NOTE: elements can still exist even after remove() - // so check for missing data(), which is cleared by removed() - if ($P && $.isEmptyObject( $P.data() )) $P = false; - if ($C && $.isEmptyObject( $C.data() )) $C = false; - if ($R && $.isEmptyObject( $R.data() )) $R = false; - if ($T && $.isEmptyObject( $T.data() )) $T = false; - - if ($P) $P.stop(true, true); - - // check for a child layout - var o = options[pane] - , s = state[pane] - , d = "layout" - , css = "layoutCSS" - , child = children[pane] || ($P ? $P.data(d) : 0) || ($C ? $C.data(d) : 0) || null - , destroy = destroyChild !== undefined ? destroyChild : o.destroyChildLayout - ; - - // FIRST destroy the child-layout(s) - if (destroy && child && !child.destroyed) { - child.destroy(true); // tell child-layout to destroy ALL its child-layouts too - if (child.destroyed) // destroy was successful - child = null; // clear pointer for logic below - } - - if ($P && remove && !child) - $P.remove(); - else if ($P && $P[0]) { - // create list of ALL pane-classes that need to be removed - var root = o.paneClass // default="ui-layout-pane" - , pRoot = root +"-"+ pane // eg: "ui-layout-pane-west" - , _open = "-open" - , _sliding= "-sliding" - , _closed = "-closed" - , classes = [ root, root+_open, root+_closed, root+_sliding, // generic classes - pRoot, pRoot+_open, pRoot+_closed, pRoot+_sliding ] // pane-specific classes - ; - $.merge(classes, getHoverClasses($P, true)); // ADD hover-classes - // remove all Layout classes from pane-element - $P .removeClass( classes.join(" ") ) // remove ALL pane-classes - .removeData("parentLayout") - .removeData("layoutPane") - .removeData("layoutRole") - .removeData("layoutEdge") - .removeData("autoHidden") // in case set - .unbind("."+ sID) // remove ALL Layout events - // TODO: remove these extra unbind commands when jQuery is fixed - //.unbind("mouseenter"+ sID) - //.unbind("mouseleave"+ sID) - ; - // do NOT reset CSS if this pane/content is STILL the container of a nested layout! - // the nested layout will reset its 'container' CSS when/if it is destroyed - if ($C && $C.data(d)) { - // a content-div may not have a specific width, so give it one to contain the Layout - $C.width( $C.width() ); - child.resizeAll(); // now resize the Layout - } - else if ($C) - $C.css( $C.data(css) ).removeData(css).removeData("layoutRole"); - // remove pane AFTER content in case there was a nested layout - if (!$P.data(d)) - $P.css( $P.data(css) ).removeData(css); - } - - // REMOVE pane resizer and toggler elements - if ($T) $T.remove(); - if ($R) $R.remove(); - - // CLEAR all pointers and state data - Instance[pane] = $Ps[pane] = $Cs[pane] = $Rs[pane] = $Ts[pane] = children[pane] = false; - s = { removed: true }; - - if (!skipResize) - resizeAll(); - } - - -/* - * ########################### - * ACTION METHODS - * ########################### - */ - -, _hidePane = function (pane) { - var $P = $Ps[pane] - , o = options[pane] - , s = $P[0].style - ; - if (o.useOffscreenClose) { - if (!$P.data(_c.offscreenReset)) - $P.data(_c.offscreenReset, { left: s.left, right: s.right }); - $P.css( _c.offscreenCSS ); - } - else - $P.hide().removeData(_c.offscreenReset); - } - -, _showPane = function (pane) { - var $P = $Ps[pane] - , o = options[pane] - , off = _c.offscreenCSS - , old = $P.data(_c.offscreenReset) - , s = $P[0].style - ; - $P .show() // ALWAYS show, just in case - .removeData(_c.offscreenReset); - if (o.useOffscreenClose && old) { - if (s.left == off.left) - s.left = old.left; - if (s.right == off.right) - s.right = old.right; - } - } - - - /** - * Completely 'hides' a pane, including its spacing - as if it does not exist - * The pane is not actually 'removed' from the source, so can use 'show' to un-hide it - * - * @param {string|Object} evt_or_pane The pane being hidden, ie: north, south, east, or west - * @param {boolean=} [noAnimation=false] - */ -, hide = function (evt_or_pane, noAnimation) { - if (!isInitialized()) return; - var pane = evtPane.call(this, evt_or_pane) - , o = options[pane] - , s = state[pane] - , $P = $Ps[pane] - , $R = $Rs[pane] - ; - if (!$P || s.isHidden) return; // pane does not exist OR is already hidden - - // onhide_start callback - will CANCEL hide if returns false - if (state.initialized && false === _runCallbacks("onhide_start", pane)) return; - - s.isSliding = false; // just in case - - // now hide the elements - if ($R) $R.hide(); // hide resizer-bar - if (!state.initialized || s.isClosed) { - s.isClosed = true; // to trigger open-animation on show() - s.isHidden = true; - s.isVisible = false; - if (!state.initialized) - _hidePane(pane); // no animation when loading page - sizeMidPanes(_c[pane].dir === "horz" ? "" : "center"); - if (state.initialized || o.triggerEventsOnLoad) - _runCallbacks("onhide_end", pane); - } - else { - s.isHiding = true; // used by onclose - close(pane, false, noAnimation); // adjust all panes to fit - } - } - - /** - * Show a hidden pane - show as 'closed' by default unless openPane = true - * - * @param {string|Object} evt_or_pane The pane being opened, ie: north, south, east, or west - * @param {boolean=} [openPane=false] - * @param {boolean=} [noAnimation=false] - * @param {boolean=} [noAlert=false] - */ -, show = function (evt_or_pane, openPane, noAnimation, noAlert) { - if (!isInitialized()) return; - var pane = evtPane.call(this, evt_or_pane) - , o = options[pane] - , s = state[pane] - , $P = $Ps[pane] - , $R = $Rs[pane] - ; - if (!$P || !s.isHidden) return; // pane does not exist OR is not hidden - - // onshow_start callback - will CANCEL show if returns false - if (false === _runCallbacks("onshow_start", pane)) return; - - s.isSliding = false; // just in case - s.isShowing = true; // used by onopen/onclose - //s.isHidden = false; - will be set by open/close - if not cancelled - - // now show the elements - //if ($R) $R.show(); - will be shown by open/close - if (openPane === false) - close(pane, true); // true = force - else - open(pane, false, noAnimation, noAlert); // adjust all panes to fit - } - - - /** - * Toggles a pane open/closed by calling either open or close - * - * @param {string|Object} evt_or_pane The pane being toggled, ie: north, south, east, or west - * @param {boolean=} [slide=false] - */ -, toggle = function (evt_or_pane, slide) { - if (!isInitialized()) return; - var evt = evtObj(evt_or_pane) - , pane = evtPane.call(this, evt_or_pane) - , s = state[pane] - ; - if (evt) // called from to $R.dblclick OR triggerPaneEvent - evt.stopImmediatePropagation(); - if (s.isHidden) - show(pane); // will call 'open' after unhiding it - else if (s.isClosed) - open(pane, !!slide); - else - close(pane); - } - - - /** - * Utility method used during init or other auto-processes - * - * @param {string} pane The pane being closed - * @param {boolean=} [setHandles=false] - */ -, _closePane = function (pane, setHandles) { - var - $P = $Ps[pane] - , s = state[pane] - ; - _hidePane(pane); - s.isClosed = true; - s.isVisible = false; - // UNUSED: if (setHandles) setAsClosed(pane, true); // true = force - } - - /** - * Close the specified pane (animation optional), and resize all other panes as needed - * - * @param {string|Object} evt_or_pane The pane being closed, ie: north, south, east, or west - * @param {boolean=} [force=false] - * @param {boolean=} [noAnimation=false] - * @param {boolean=} [skipCallback=false] - */ -, close = function (evt_or_pane, force, noAnimation, skipCallback) { - var pane = evtPane.call(this, evt_or_pane); - // if pane has been initialized, but NOT the complete layout, close pane instantly - if (!state.initialized && $Ps[pane]) { - _closePane(pane); // INIT pane as closed - return; - } - if (!isInitialized()) return; - - var - $P = $Ps[pane] - , $R = $Rs[pane] - , $T = $Ts[pane] - , o = options[pane] - , s = state[pane] - , c = _c[pane] - , doFX, isShowing, isHiding, wasSliding; - - // QUEUE in case another action/animation is in progress - $N.queue(function( queueNext ){ - - if ( !$P - || (!o.closable && !s.isShowing && !s.isHiding) // invalid request // (!o.resizable && !o.closable) ??? - || (!force && s.isClosed && !s.isShowing) // already closed - ) return queueNext(); - - // onclose_start callback - will CANCEL hide if returns false - // SKIP if just 'showing' a hidden pane as 'closed' - var abort = !s.isShowing && false === _runCallbacks("onclose_start", pane); - - // transfer logic vars to temp vars - isShowing = s.isShowing; - isHiding = s.isHiding; - wasSliding = s.isSliding; - // now clear the logic vars (REQUIRED before aborting) - delete s.isShowing; - delete s.isHiding; - - if (abort) return queueNext(); - - doFX = !noAnimation && !s.isClosed && (o.fxName_close != "none"); - s.isMoving = true; - s.isClosed = true; - s.isVisible = false; - // update isHidden BEFORE sizing panes - if (isHiding) s.isHidden = true; - else if (isShowing) s.isHidden = false; - - if (s.isSliding) // pane is being closed, so UNBIND trigger events - bindStopSlidingEvents(pane, false); // will set isSliding=false - else // resize panes adjacent to this one - sizeMidPanes(_c[pane].dir === "horz" ? "" : "center", false); // false = NOT skipCallback - - // if this pane has a resizer bar, move it NOW - before animation - setAsClosed(pane); - - // CLOSE THE PANE - if (doFX) { // animate the close - // mask panes with objects - var masks = "center"+ (c.dir=="horz" ? ",west,east" : ""); - showMasks( masks, true ); // true = ONLY mask panes with maskObjects=true - lockPaneForFX(pane, true); // need to set left/top so animation will work - $P.hide( o.fxName_close, o.fxSettings_close, o.fxSpeed_close, function () { - lockPaneForFX(pane, false); // undo - if (s.isClosed) close_2(); - queueNext(); - }); - } - else { // hide the pane without animation - _hidePane(pane); - close_2(); - queueNext(); - }; - }); - - // SUBROUTINE - function close_2 () { - s.isMoving = false; - bindStartSlidingEvent(pane, true); // will enable if o.slidable = true - - // if opposite-pane was autoClosed, see if it can be autoOpened now - var altPane = _c.oppositeEdge[pane]; - if (state[ altPane ].noRoom) { - setSizeLimits( altPane ); - makePaneFit( altPane ); - } - - // hide any masks shown while closing - hideMasks(); - - if (!skipCallback && (state.initialized || o.triggerEventsOnLoad)) { - // onclose callback - UNLESS just 'showing' a hidden pane as 'closed' - if (!isShowing) _runCallbacks("onclose_end", pane); - // onhide OR onshow callback - if (isShowing) _runCallbacks("onshow_end", pane); - if (isHiding) _runCallbacks("onhide_end", pane); - } - } - } - - /** - * @param {string} pane The pane just closed, ie: north, south, east, or west - */ -, setAsClosed = function (pane) { - var - $P = $Ps[pane] - , $R = $Rs[pane] - , $T = $Ts[pane] - , o = options[pane] - , s = state[pane] - , side = _c[pane].side.toLowerCase() - , inset = "inset"+ _c[pane].side - , rClass = o.resizerClass - , tClass = o.togglerClass - , _pane = "-"+ pane // used for classNames - , _open = "-open" - , _sliding= "-sliding" - , _closed = "-closed" - ; - $R - .css(side, sC[inset]) // move the resizer - .removeClass( rClass+_open +" "+ rClass+_pane+_open ) - .removeClass( rClass+_sliding +" "+ rClass+_pane+_sliding ) - .addClass( rClass+_closed +" "+ rClass+_pane+_closed ) - .unbind("dblclick."+ sID) - ; - // DISABLE 'resizing' when closed - do this BEFORE bindStartSlidingEvent? - if (o.resizable && $.layout.plugins.draggable) - $R - .draggable("disable") - .removeClass("ui-state-disabled") // do NOT apply disabled styling - not suitable here - .css("cursor", "default") - .attr("title","") - ; - - // if pane has a toggler button, adjust that too - if ($T) { - $T - .removeClass( tClass+_open +" "+ tClass+_pane+_open ) - .addClass( tClass+_closed +" "+ tClass+_pane+_closed ) - .attr("title", o.tips.Open) // may be blank - ; - // toggler-content - if exists - $T.children(".content-open").hide(); - $T.children(".content-closed").css("display","block"); - } - - // sync any 'pin buttons' - syncPinBtns(pane, false); - - if (state.initialized) { - // resize 'length' and position togglers for adjacent panes - sizeHandles(); - } - } - - /** - * Open the specified pane (animation optional), and resize all other panes as needed - * - * @param {string|Object} evt_or_pane The pane being opened, ie: north, south, east, or west - * @param {boolean=} [slide=false] - * @param {boolean=} [noAnimation=false] - * @param {boolean=} [noAlert=false] - */ -, open = function (evt_or_pane, slide, noAnimation, noAlert) { - if (!isInitialized()) return; - var pane = evtPane.call(this, evt_or_pane) - , $P = $Ps[pane] - , $R = $Rs[pane] - , $T = $Ts[pane] - , o = options[pane] - , s = state[pane] - , c = _c[pane] - , doFX, isShowing - ; - // QUEUE in case another action/animation is in progress - $N.queue(function( queueNext ){ - - if ( !$P - || (!o.resizable && !o.closable && !s.isShowing) // invalid request - || (s.isVisible && !s.isSliding) // already open - ) return queueNext(); - - // pane can ALSO be unhidden by just calling show(), so handle this scenario - if (s.isHidden && !s.isShowing) { - queueNext(); // call before show() because it needs the queue free - show(pane, true); - return; - } - - if (o.autoResize && s.size != o.size) // resize pane to original size set in options - sizePane(pane, o.size, true, true, true); // true=skipCallback/forceResize/noAnimation - else - // make sure there is enough space available to open the pane - setSizeLimits(pane, slide); - - // onopen_start callback - will CANCEL open if returns false - var cbReturn = _runCallbacks("onopen_start", pane); - - if (cbReturn === "abort") - return queueNext(); - - // update pane-state again in case options were changed in onopen_start - if (cbReturn !== "NC") // NC = "No Callback" - setSizeLimits(pane, slide); - - if (s.minSize > s.maxSize) { // INSUFFICIENT ROOM FOR PANE TO OPEN! - syncPinBtns(pane, false); // make sure pin-buttons are reset - if (!noAlert && o.tips.noRoomToOpen) - alert(o.tips.noRoomToOpen); - return queueNext(); // ABORT - } - - if (slide) // START Sliding - will set isSliding=true - bindStopSlidingEvents(pane, true); // BIND trigger events to close sliding-pane - else if (s.isSliding) // PIN PANE (stop sliding) - open pane 'normally' instead - bindStopSlidingEvents(pane, false); // UNBIND trigger events - will set isSliding=false - else if (o.slidable) - bindStartSlidingEvent(pane, false); // UNBIND trigger events - - s.noRoom = false; // will be reset by makePaneFit if 'noRoom' - makePaneFit(pane); - - // transfer logic var to temp var - isShowing = s.isShowing; - // now clear the logic var - delete s.isShowing; - - doFX = !noAnimation && s.isClosed && (o.fxName_open != "none"); - s.isMoving = true; - s.isVisible = true; - s.isClosed = false; - // update isHidden BEFORE sizing panes - WHY??? Old? - if (isShowing) s.isHidden = false; - - if (doFX) { // ANIMATE - // mask panes with objects - var masks = "center"+ (c.dir=="horz" ? ",west,east" : ""); - if (s.isSliding) masks += ","+ _c.oppositeEdge[pane]; - showMasks( masks, true ); // true = ONLY mask panes with maskObjects=true - lockPaneForFX(pane, true); // need to set left/top so animation will work - $P.show( o.fxName_open, o.fxSettings_open, o.fxSpeed_open, function() { - lockPaneForFX(pane, false); // undo - if (s.isVisible) open_2(); // continue - queueNext(); - }); - } - else { // no animation - _showPane(pane);// just show pane and... - open_2(); // continue - queueNext(); - }; - }); - - // SUBROUTINE - function open_2 () { - s.isMoving = false; - - // cure iframe display issues - _fixIframe(pane); - - // NOTE: if isSliding, then other panes are NOT 'resized' - if (!s.isSliding) { // resize all panes adjacent to this one - hideMasks(); // remove any masks shown while opening - sizeMidPanes(_c[pane].dir=="vert" ? "center" : "", false); // false = NOT skipCallback - } - - // set classes, position handles and execute callbacks... - setAsOpen(pane); - }; - - } - - /** - * @param {string} pane The pane just opened, ie: north, south, east, or west - * @param {boolean=} [skipCallback=false] - */ -, setAsOpen = function (pane, skipCallback) { - var - $P = $Ps[pane] - , $R = $Rs[pane] - , $T = $Ts[pane] - , o = options[pane] - , s = state[pane] - , side = _c[pane].side.toLowerCase() - , inset = "inset"+ _c[pane].side - , rClass = o.resizerClass - , tClass = o.togglerClass - , _pane = "-"+ pane // used for classNames - , _open = "-open" - , _closed = "-closed" - , _sliding= "-sliding" - ; - $R - .css(side, sC[inset] + getPaneSize(pane)) // move the resizer - .removeClass( rClass+_closed +" "+ rClass+_pane+_closed ) - .addClass( rClass+_open +" "+ rClass+_pane+_open ) - ; - if (s.isSliding) - $R.addClass( rClass+_sliding +" "+ rClass+_pane+_sliding ) - else // in case 'was sliding' - $R.removeClass( rClass+_sliding +" "+ rClass+_pane+_sliding ) - - if (o.resizerDblClickToggle) - $R.bind("dblclick", toggle ); - removeHover( 0, $R ); // remove hover classes - if (o.resizable && $.layout.plugins.draggable) - $R .draggable("enable") - .css("cursor", o.resizerCursor) - .attr("title", o.tips.Resize); - else if (!s.isSliding) - $R.css("cursor", "default"); // n-resize, s-resize, etc - - // if pane also has a toggler button, adjust that too - if ($T) { - $T .removeClass( tClass+_closed +" "+ tClass+_pane+_closed ) - .addClass( tClass+_open +" "+ tClass+_pane+_open ) - .attr("title", o.tips.Close); // may be blank - removeHover( 0, $T ); // remove hover classes - // toggler-content - if exists - $T.children(".content-closed").hide(); - $T.children(".content-open").css("display","block"); - } - - // sync any 'pin buttons' - syncPinBtns(pane, !s.isSliding); - - // update pane-state dimensions - BEFORE resizing content - $.extend(s, elDims($P)); - - if (state.initialized) { - // resize resizer & toggler sizes for all panes - sizeHandles(); - // resize content every time pane opens - to be sure - sizeContent(pane, true); // true = remeasure headers/footers, even if 'pane.isMoving' - } - - if (!skipCallback && (state.initialized || o.triggerEventsOnLoad) && $P.is(":visible")) { - // onopen callback - _runCallbacks("onopen_end", pane); - // onshow callback - TODO: should this be here? - if (s.isShowing) _runCallbacks("onshow_end", pane); - - // ALSO call onresize because layout-size *may* have changed while pane was closed - if (state.initialized) - _runCallbacks("onresize_end", pane); - } - - // TODO: Somehow sizePane("north") is being called after this point??? - } - - - /** - * slideOpen / slideClose / slideToggle - * - * Pass-though methods for sliding - */ -, slideOpen = function (evt_or_pane) { - if (!isInitialized()) return; - var evt = evtObj(evt_or_pane) - , pane = evtPane.call(this, evt_or_pane) - , s = state[pane] - , delay = options[pane].slideDelay_open - ; - // prevent event from triggering on NEW resizer binding created below - if (evt) evt.stopImmediatePropagation(); - - if (s.isClosed && evt && evt.type === "mouseenter" && delay > 0) - // trigger = mouseenter - use a delay - timer.set(pane+"_openSlider", open_NOW, delay); - else - open_NOW(); // will unbind events if is already open - - /** - * SUBROUTINE for timed open - */ - function open_NOW () { - if (!s.isClosed) // skip if no longer closed! - bindStopSlidingEvents(pane, true); // BIND trigger events to close sliding-pane - else if (!s.isMoving) - open(pane, true); // true = slide - open() will handle binding - }; - } - -, slideClose = function (evt_or_pane) { - if (!isInitialized()) return; - var evt = evtObj(evt_or_pane) - , pane = evtPane.call(this, evt_or_pane) - , o = options[pane] - , s = state[pane] - , delay = s.isMoving ? 1000 : 300 // MINIMUM delay - option may override - ; - if (s.isClosed || s.isResizing) - return; // skip if already closed OR in process of resizing - else if (o.slideTrigger_close === "click") - close_NOW(); // close immediately onClick - else if (o.preventQuickSlideClose && s.isMoving) - return; // handle Chrome quick-close on slide-open - else if (o.preventPrematureSlideClose && evt && $.layout.isMouseOverElem(evt, $Ps[pane])) - return; // handle incorrect mouseleave trigger, like when over a SELECT-list in IE - else if (evt) // trigger = mouseleave - use a delay - // 1 sec delay if 'opening', else .3 sec - timer.set(pane+"_closeSlider", close_NOW, max(o.slideDelay_close, delay)); - else // called programically - close_NOW(); - - /** - * SUBROUTINE for timed close - */ - function close_NOW () { - if (s.isClosed) // skip 'close' if already closed! - bindStopSlidingEvents(pane, false); // UNBIND trigger events - TODO: is this needed here? - else if (!s.isMoving) - close(pane); // close will handle unbinding - }; - } - - /** - * @param {string|Object} evt_or_pane The pane being opened, ie: north, south, east, or west - */ -, slideToggle = function (evt_or_pane) { - var pane = evtPane.call(this, evt_or_pane); - toggle(pane, true); - } - - - /** - * Must set left/top on East/South panes so animation will work properly - * - * @param {string} pane The pane to lock, 'east' or 'south' - any other is ignored! - * @param {boolean} doLock true = set left/top, false = remove - */ -, lockPaneForFX = function (pane, doLock) { - var $P = $Ps[pane] - , s = state[pane] - , o = options[pane] - , z = options.zIndexes - ; - if (doLock) { - $P.css({ zIndex: z.pane_animate }); // overlay all elements during animation - if (pane=="south") - $P.css({ top: sC.insetTop + sC.innerHeight - $P.outerHeight() }); - else if (pane=="east") - $P.css({ left: sC.insetLeft + sC.innerWidth - $P.outerWidth() }); - } - else { // animation DONE - RESET CSS - // TODO: see if this can be deleted. It causes a quick-close when sliding in Chrome - $P.css({ zIndex: (s.isSliding ? z.pane_sliding : z.pane_normal) }); - if (pane=="south") - $P.css({ top: "auto" }); - // if pane is positioned 'off-screen', then DO NOT screw with it! - else if (pane=="east" && !$P.css("left").match(/\-99999/)) - $P.css({ left: "auto" }); - // fix anti-aliasing in IE - only needed for animations that change opacity - if (browser.msie && o.fxOpacityFix && o.fxName_open != "slide" && $P.css("filter") && $P.css("opacity") == 1) - $P[0].style.removeAttribute('filter'); - } - } - - - /** - * Toggle sliding functionality of a specific pane on/off by adding removing 'slide open' trigger - * - * @see open(), close() - * @param {string} pane The pane to enable/disable, 'north', 'south', etc. - * @param {boolean} enable Enable or Disable sliding? - */ -, bindStartSlidingEvent = function (pane, enable) { - var o = options[pane] - , $P = $Ps[pane] - , $R = $Rs[pane] - , evtName = o.slideTrigger_open.toLowerCase() - ; - if (!$R || (enable && !o.slidable)) return; - - // make sure we have a valid event - if (evtName.match(/mouseover/)) - evtName = o.slideTrigger_open = "mouseenter"; - else if (!evtName.match(/(click|dblclick|mouseenter)/)) - evtName = o.slideTrigger_open = "click"; - - $R - // add or remove event - [enable ? "bind" : "unbind"](evtName +'.'+ sID, slideOpen) - // set the appropriate cursor & title/tip - .css("cursor", enable ? o.sliderCursor : "default") - .attr("title", enable ? o.tips.Slide : "") - ; - } - - /** - * Add or remove 'mouseleave' events to 'slide close' when pane is 'sliding' open or closed - * Also increases zIndex when pane is sliding open - * See bindStartSlidingEvent for code to control 'slide open' - * - * @see slideOpen(), slideClose() - * @param {string} pane The pane to process, 'north', 'south', etc. - * @param {boolean} enable Enable or Disable events? - */ -, bindStopSlidingEvents = function (pane, enable) { - var o = options[pane] - , s = state[pane] - , c = _c[pane] - , z = options.zIndexes - , evtName = o.slideTrigger_close.toLowerCase() - , action = (enable ? "bind" : "unbind") - , $P = $Ps[pane] - , $R = $Rs[pane] - ; - s.isSliding = enable; // logic - timer.clear(pane+"_closeSlider"); // just in case - - // remove 'slideOpen' event from resizer - // ALSO will raise the zIndex of the pane & resizer - if (enable) bindStartSlidingEvent(pane, false); - - // RE/SET zIndex - increases when pane is sliding-open, resets to normal when not - $P.css("zIndex", enable ? z.pane_sliding : z.pane_normal); - $R.css("zIndex", enable ? z.pane_sliding+2 : z.resizer_normal); // NOTE: mask = pane_sliding+1 - - // make sure we have a valid event - if (!evtName.match(/(click|mouseleave)/)) - evtName = o.slideTrigger_close = "mouseleave"; // also catches 'mouseout' - - // add/remove slide triggers - $R[action](evtName, slideClose); // base event on resize - // need extra events for mouseleave - if (evtName === "mouseleave") { - // also close on pane.mouseleave - $P[action]("mouseleave."+ sID, slideClose); - // cancel timer when mouse moves between 'pane' and 'resizer' - $R[action]("mouseenter."+ sID, cancelMouseOut); - $P[action]("mouseenter."+ sID, cancelMouseOut); - } - - if (!enable) - timer.clear(pane+"_closeSlider"); - else if (evtName === "click" && !o.resizable) { - // IF pane is not resizable (which already has a cursor and tip) - // then set the a cursor & title/tip on resizer when sliding - $R.css("cursor", enable ? o.sliderCursor : "default"); - $R.attr("title", enable ? o.tips.Close : ""); // use Toggler-tip, eg: "Close Pane" - } - - // SUBROUTINE for mouseleave timer clearing - function cancelMouseOut (evt) { - timer.clear(pane+"_closeSlider"); - evt.stopPropagation(); - } - } - - - /** - * Hides/closes a pane if there is insufficient room - reverses this when there is room again - * MUST have already called setSizeLimits() before calling this method - * - * @param {string} pane The pane being resized - * @param {boolean=} [isOpening=false] Called from onOpen? - * @param {boolean=} [skipCallback=false] Should the onresize callback be run? - * @param {boolean=} [force=false] - */ -, makePaneFit = function (pane, isOpening, skipCallback, force) { - var - o = options[pane] - , s = state[pane] - , c = _c[pane] - , $P = $Ps[pane] - , $R = $Rs[pane] - , isSidePane = c.dir==="vert" - , hasRoom = false - ; - // special handling for center & east/west panes - if (pane === "center" || (isSidePane && s.noVerticalRoom)) { - // see if there is enough room to display the pane - // ERROR: hasRoom = s.minHeight <= s.maxHeight && (isSidePane || s.minWidth <= s.maxWidth); - hasRoom = (s.maxHeight >= 0); - if (hasRoom && s.noRoom) { // previously hidden due to noRoom, so show now - _showPane(pane); - if ($R) $R.show(); - s.isVisible = true; - s.noRoom = false; - if (isSidePane) s.noVerticalRoom = false; - _fixIframe(pane); - } - else if (!hasRoom && !s.noRoom) { // not currently hidden, so hide now - _hidePane(pane); - if ($R) $R.hide(); - s.isVisible = false; - s.noRoom = true; - } - } - - // see if there is enough room to fit the border-pane - if (pane === "center") { - // ignore center in this block - } - else if (s.minSize <= s.maxSize) { // pane CAN fit - hasRoom = true; - if (s.size > s.maxSize) // pane is too big - shrink it - sizePane(pane, s.maxSize, skipCallback, force, true); // true = noAnimation - else if (s.size < s.minSize) // pane is too small - enlarge it - sizePane(pane, s.minSize, skipCallback, force, true); - // need s.isVisible because new pseudoClose method keeps pane visible, but off-screen - else if ($R && s.isVisible && $P.is(":visible")) { - // make sure resizer-bar is positioned correctly - // handles situation where nested layout was 'hidden' when initialized - var side = c.side.toLowerCase() - , pos = s.size + sC["inset"+ c.side] - ; - if ($.layout.cssNum($R, side) != pos) $R.css( side, pos ); - } - - // if was previously hidden due to noRoom, then RESET because NOW there is room - if (s.noRoom) { - // s.noRoom state will be set by open or show - if (s.wasOpen && o.closable) { - if (o.autoReopen) - open(pane, false, true, true); // true = noAnimation, true = noAlert - else // leave the pane closed, so just update state - s.noRoom = false; - } - else - show(pane, s.wasOpen, true, true); // true = noAnimation, true = noAlert - } - } - else { // !hasRoom - pane CANNOT fit - if (!s.noRoom) { // pane not set as noRoom yet, so hide or close it now... - s.noRoom = true; // update state - s.wasOpen = !s.isClosed && !s.isSliding; - if (s.isClosed){} // SKIP - else if (o.closable) // 'close' if possible - close(pane, true, true); // true = force, true = noAnimation - else // 'hide' pane if cannot just be closed - hide(pane, true); // true = noAnimation - } - } - } - - - /** - * sizePane / manualSizePane - * sizePane is called only by internal methods whenever a pane needs to be resized - * manualSizePane is an exposed flow-through method allowing extra code when pane is 'manually resized' - * - * @param {string|Object} evt_or_pane The pane being resized - * @param {number} size The *desired* new size for this pane - will be validated - * @param {boolean=} [skipCallback=false] Should the onresize callback be run? - * @param {boolean=} [noAnimation=false] - */ -, manualSizePane = function (evt_or_pane, size, skipCallback, noAnimation) { - if (!isInitialized()) return; - var pane = evtPane.call(this, evt_or_pane) - , o = options[pane] - , s = state[pane] - // if resizing callbacks have been delayed and resizing is now DONE, force resizing to complete... - , forceResize = o.livePaneResizing && !s.isResizing - ; - // ANY call to manualSizePane disables autoResize - ie, percentage sizing - o.autoResize = false; - // flow-through... - sizePane(pane, size, skipCallback, forceResize, noAnimation); // will animate resize if option enabled - } - - /** - * @param {string|Object} evt_or_pane The pane being resized - * @param {number} size The *desired* new size for this pane - will be validated - * @param {boolean=} [skipCallback=false] Should the onresize callback be run? - * @param {boolean=} [force=false] Force resizing even if does not seem necessary - * @param {boolean=} [noAnimation=false] - */ -, sizePane = function (evt_or_pane, size, skipCallback, force, noAnimation) { - if (!isInitialized()) return; - var pane = evtPane.call(this, evt_or_pane) // probably NEVER called from event? - , o = options[pane] - , s = state[pane] - , $P = $Ps[pane] - , $R = $Rs[pane] - , side = _c[pane].side.toLowerCase() - , dimName = _c[pane].sizeType.toLowerCase() - , inset = "inset"+ _c[pane].side - , skipResizeWhileDragging = s.isResizing && !o.triggerEventsDuringLiveResize - , doFX = noAnimation !== true && o.animatePaneSizing - , oldSize, newSize - ; - // QUEUE in case another action/animation is in progress - $N.queue(function( queueNext ){ - // calculate 'current' min/max sizes - setSizeLimits(pane); // update pane-state - oldSize = s.size; - size = _parseSize(pane, size); // handle percentages & auto - size = max(size, _parseSize(pane, o.minSize)); - size = min(size, s.maxSize); - if (size < s.minSize) { // not enough room for pane! - queueNext(); // call before makePaneFit() because it needs the queue free - makePaneFit(pane, false, skipCallback); // will hide or close pane - return; - } - - // IF newSize is same as oldSize, then nothing to do - abort - if (!force && size === oldSize) - return queueNext(); - - // onresize_start callback CANNOT cancel resizing because this would break the layout! - if (!skipCallback && state.initialized && s.isVisible) - _runCallbacks("onresize_start", pane); - - // resize the pane, and make sure its visible - newSize = cssSize(pane, size); - - if (doFX && $P.is(":visible")) { // ANIMATE - var fx = $.layout.effects.size[pane] || $.layout.effects.size.all - , easing = o.fxSettings_size.easing || fx.easing - , z = options.zIndexes - , props = {}; - props[ dimName ] = newSize +'px'; - s.isMoving = true; - // overlay all elements during animation - $P.css({ zIndex: z.pane_animate }) - .show().animate( props, o.fxSpeed_size, easing, function(){ - // reset zIndex after animation - $P.css({ zIndex: (s.isSliding ? z.pane_sliding : z.pane_normal) }); - s.isMoving = false; - sizePane_2(); // continue - queueNext(); - }); - } - else { // no animation - $P.css( dimName, newSize ); // resize pane - // if pane is visible, then - if ($P.is(":visible")) - sizePane_2(); // continue - else { - // pane is NOT VISIBLE, so just update state data... - // when pane is *next opened*, it will have the new size - s.size = size; // update state.size - $.extend(s, elDims($P)); // update state dimensions - } - queueNext(); - }; - - }); - - // SUBROUTINE - function sizePane_2 () { - /* Panes are sometimes not sized precisely in some browsers!? - * This code will resize the pane up to 3 times to nudge the pane to the correct size - */ - var actual = dimName==='width' ? $P.outerWidth() : $P.outerHeight() - , tries = [{ - pane: pane - , count: 1 - , target: size - , actual: actual - , correct: (size === actual) - , attempt: size - , cssSize: newSize - }] - , lastTry = tries[0] - , thisTry = {} - , msg = 'Inaccurate size after resizing the '+ pane +'-pane.' - ; - while ( !lastTry.correct ) { - thisTry = { pane: pane, count: lastTry.count+1, target: size }; - - if (lastTry.actual > size) - thisTry.attempt = max(0, lastTry.attempt - (lastTry.actual - size)); - else // lastTry.actual < size - thisTry.attempt = max(0, lastTry.attempt + (size - lastTry.actual)); - - thisTry.cssSize = cssSize(pane, thisTry.attempt); - $P.css( dimName, thisTry.cssSize ); - - thisTry.actual = dimName=='width' ? $P.outerWidth() : $P.outerHeight(); - thisTry.correct = (size === thisTry.actual); - - // log attempts and alert the user of this *non-fatal error* (if showDebugMessages) - if ( tries.length === 1) { - _log(msg, false, true); - _log(lastTry, false, true); - } - _log(thisTry, false, true); - // after 4 tries, is as close as its gonna get! - if (tries.length > 3) break; - - tries.push( thisTry ); - lastTry = tries[ tries.length - 1 ]; - } - // END TESTING CODE - - // update pane-state dimensions - s.size = size; - $.extend(s, elDims($P)); - - if (s.isVisible && $P.is(":visible")) { - // reposition the resizer-bar - if ($R) $R.css( side, size + sC[inset] ); - // resize the content-div - sizeContent(pane); - } - - if (!skipCallback && !skipResizeWhileDragging && state.initialized && s.isVisible) - _runCallbacks("onresize_end", pane); - - // resize all the adjacent panes, and adjust their toggler buttons - // when skipCallback passed, it means the controlling method will handle 'other panes' - if (!skipCallback) { - // also no callback if live-resize is in progress and NOT triggerEventsDuringLiveResize - if (!s.isSliding) sizeMidPanes(_c[pane].dir=="horz" ? "" : "center", skipResizeWhileDragging, force); - sizeHandles(); - } - - // if opposite-pane was autoClosed, see if it can be autoOpened now - var altPane = _c.oppositeEdge[pane]; - if (size < oldSize && state[ altPane ].noRoom) { - setSizeLimits( altPane ); - makePaneFit( altPane, false, skipCallback ); - } - - // DEBUG - ALERT user/developer so they know there was a sizing problem - if (tries.length > 1) - _log(msg +'\nSee the Error Console for details.', true, true); - } - } - - /** - * @see initPanes(), sizePane(), resizeAll(), open(), close(), hide() - * @param {Array.|string} panes The pane(s) being resized, comma-delmited string - * @param {boolean=} [skipCallback=false] Should the onresize callback be run? - * @param {boolean=} [force=false] - */ -, sizeMidPanes = function (panes, skipCallback, force) { - panes = (panes ? panes : "east,west,center").split(","); - - $.each(panes, function (i, pane) { - if (!$Ps[pane]) return; // NO PANE - skip - var - o = options[pane] - , s = state[pane] - , $P = $Ps[pane] - , $R = $Rs[pane] - , isCenter= (pane=="center") - , hasRoom = true - , CSS = {} - , newCenter = calcNewCenterPaneDims() - ; - // update pane-state dimensions - $.extend(s, elDims($P)); - - if (pane === "center") { - if (!force && s.isVisible && newCenter.width === s.outerWidth && newCenter.height === s.outerHeight) - return true; // SKIP - pane already the correct size - // set state for makePaneFit() logic - $.extend(s, cssMinDims(pane), { - maxWidth: newCenter.width - , maxHeight: newCenter.height - }); - CSS = newCenter; - // convert OUTER width/height to CSS width/height - CSS.width = cssW($P, CSS.width); - // NEW - allow pane to extend 'below' visible area rather than hide it - CSS.height = cssH($P, CSS.height); - hasRoom = CSS.width >= 0 && CSS.height >= 0; // height >= 0 = ALWAYS TRUE NOW - // during layout init, try to shrink east/west panes to make room for center - if (!state.initialized && o.minWidth > s.outerWidth) { - var - reqPx = o.minWidth - s.outerWidth - , minE = options.east.minSize || 0 - , minW = options.west.minSize || 0 - , sizeE = state.east.size - , sizeW = state.west.size - , newE = sizeE - , newW = sizeW - ; - if (reqPx > 0 && state.east.isVisible && sizeE > minE) { - newE = max( sizeE-minE, sizeE-reqPx ); - reqPx -= sizeE-newE; - } - if (reqPx > 0 && state.west.isVisible && sizeW > minW) { - newW = max( sizeW-minW, sizeW-reqPx ); - reqPx -= sizeW-newW; - } - // IF we found enough extra space, then resize the border panes as calculated - if (reqPx === 0) { - if (sizeE && sizeE != minE) - sizePane('east', newE, true, force, true); // true = skipCallback/noAnimation - initPanes will handle when done - if (sizeW && sizeW != minW) - sizePane('west', newW, true, force, true); - // now start over! - sizeMidPanes('center', skipCallback, force); - return; // abort this loop - } - } - } - else { // for east and west, set only the height, which is same as center height - // set state.min/maxWidth/Height for makePaneFit() logic - if (s.isVisible && !s.noVerticalRoom) - $.extend(s, elDims($P), cssMinDims(pane)) - if (!force && !s.noVerticalRoom && newCenter.height === s.outerHeight) - return true; // SKIP - pane already the correct size - // east/west have same top, bottom & height as center - CSS.top = newCenter.top; - CSS.bottom = newCenter.bottom; - // NEW - allow pane to extend 'below' visible area rather than hide it - CSS.height = cssH($P, newCenter.height); - s.maxHeight = CSS.height; - hasRoom = (s.maxHeight >= 0); // ALWAYS TRUE NOW - if (!hasRoom) s.noVerticalRoom = true; // makePaneFit() logic - } - - if (hasRoom) { - // resizeAll passes skipCallback because it triggers callbacks after ALL panes are resized - if (!skipCallback && state.initialized) - _runCallbacks("onresize_start", pane); - - $P.css(CSS); // apply the CSS to pane - if (pane !== "center") - sizeHandles(pane); // also update resizer length - if (s.noRoom && !s.isClosed && !s.isHidden) - makePaneFit(pane); // will re-open/show auto-closed/hidden pane - if (s.isVisible) { - $.extend(s, elDims($P)); // update pane dimensions - if (state.initialized) sizeContent(pane); // also resize the contents, if exists - } - } - else if (!s.noRoom && s.isVisible) // no room for pane - makePaneFit(pane); // will hide or close pane - - if (!s.isVisible) - return true; // DONE - next pane - - /* - * Extra CSS for IE6 or IE7 in Quirks-mode - add 'width' to NORTH/SOUTH panes - * Normally these panes have only 'left' & 'right' positions so pane auto-sizes - * ALSO required when pane is an IFRAME because will NOT default to 'full width' - * TODO: Can I use width:100% for a north/south iframe? - * TODO: Sounds like a job for $P.outerWidth( sC.innerWidth ) SETTER METHOD - */ - if (pane === "center") { // finished processing midPanes - var fix = browser.isIE6 || !browser.boxModel; - if ($Ps.north && (fix || state.north.tagName=="IFRAME")) - $Ps.north.css("width", cssW($Ps.north, sC.innerWidth)); - if ($Ps.south && (fix || state.south.tagName=="IFRAME")) - $Ps.south.css("width", cssW($Ps.south, sC.innerWidth)); - } - - // resizeAll passes skipCallback because it triggers callbacks after ALL panes are resized - if (!skipCallback && state.initialized) - _runCallbacks("onresize_end", pane); - }); - } - - - /** - * @see window.onresize(), callbacks or custom code - */ -, resizeAll = function (evt) { - // stopPropagation if called by trigger("layoutdestroy") - use evtPane utility - evtPane(evt); - - if (!state.initialized) { - _initLayoutElements(); - return; // no need to resize since we just initialized! - } - var oldW = sC.innerWidth - , oldH = sC.innerHeight - ; - // cannot size layout when 'container' is hidden or collapsed - if (!$N.is(":visible") ) return; - $.extend(state.container, elDims( $N )); // UPDATE container dimensions - if (!sC.outerHeight) return; - - // onresizeall_start will CANCEL resizing if returns false - // state.container has already been set, so user can access this info for calcuations - if (false === _runCallbacks("onresizeall_start")) return false; - - var // see if container is now 'smaller' than before - shrunkH = (sC.innerHeight < oldH) - , shrunkW = (sC.innerWidth < oldW) - , $P, o, s, dir - ; - // NOTE special order for sizing: S-N-E-W - $.each(["south","north","east","west"], function (i, pane) { - if (!$Ps[pane]) return; // no pane - SKIP - s = state[pane]; - o = options[pane]; - dir = _c[pane].dir; - - if (o.autoResize && s.size != o.size) // resize pane to original size set in options - sizePane(pane, o.size, true, true, true); // true=skipCallback/forceResize/noAnimation - else { - setSizeLimits(pane); - makePaneFit(pane, false, true, true); // true=skipCallback/forceResize - } - }); - - sizeMidPanes("", true, true); // true=skipCallback, true=forceResize - sizeHandles(); // reposition the toggler elements - - // trigger all individual pane callbacks AFTER layout has finished resizing - o = options; // reuse alias - $.each(_c.allPanes, function (i, pane) { - $P = $Ps[pane]; - if (!$P) return; // SKIP - if (state[pane].isVisible) // undefined for non-existent panes - _runCallbacks("onresize_end", pane); // callback - if exists - }); - - _runCallbacks("onresizeall_end"); - //_triggerLayoutEvent(pane, 'resizeall'); - } - - /** - * Whenever a pane resizes or opens that has a nested layout, trigger resizeAll - * - * @param {string|Object} evt_or_pane The pane just resized or opened - */ -, resizeChildLayout = function (evt_or_pane) { - var pane = evtPane.call(this, evt_or_pane); - if (!options[pane].resizeChildLayout) return; - var $P = $Ps[pane] - , $C = $Cs[pane] - , d = "layout" - , P = Instance[pane] - , L = children[pane] - ; - // user may have manually set EITHER instance pointer, so handle that - if (P.child && !L) { - // have to reverse the pointers! - var el = P.child.container; - L = children[pane] = (el ? el.data(d) : 0) || null; // set pointer _directly_ to layout instance - } - - // if a layout-pointer exists, see if child has been destroyed - if (L && L.destroyed) - L = children[pane] = null; // clear child pointers - // no child layout pointer is set - see if there is a child layout NOW - if (!L) L = children[pane] = $P.data(d) || ($C ? $C.data(d) : 0) || null; // set/update child pointers - - // ALWAYS refresh the pane.child alias - P.child = children[pane]; - - if (L) L.resizeAll(); - } - - - /** - * IF pane has a content-div, then resize all elements inside pane to fit pane-height - * - * @param {string|Object} evt_or_panes The pane(s) being resized - * @param {boolean=} [remeasure=false] Should the content (header/footer) be remeasured? - */ -, sizeContent = function (evt_or_panes, remeasure) { - if (!isInitialized()) return; - - var panes = evtPane.call(this, evt_or_panes); - panes = panes ? panes.split(",") : _c.allPanes; - - $.each(panes, function (idx, pane) { - var - $P = $Ps[pane] - , $C = $Cs[pane] - , o = options[pane] - , s = state[pane] - , m = s.content // m = measurements - ; - if (!$P || !$C || !$P.is(":visible")) return true; // NOT VISIBLE - skip - - // if content-element was REMOVED, update OR remove the pointer - if (!$C.length) { - initContent(pane, false); // false = do NOT sizeContent() - already there! - if (!$C) return; // no replacement element found - pointer have been removed - } - - // onsizecontent_start will CANCEL resizing if returns false - if (false === _runCallbacks("onsizecontent_start", pane)) return; - - // skip re-measuring offsets if live-resizing - if ((!s.isMoving && !s.isResizing) || o.liveContentResizing || remeasure || m.top == undefined) { - _measure(); - // if any footers are below pane-bottom, they may not measure correctly, - // so allow pane overflow and re-measure - if (m.hiddenFooters > 0 && $P.css("overflow") === "hidden") { - $P.css("overflow", "visible"); - _measure(); // remeasure while overflowing - $P.css("overflow", "hidden"); - } - } - // NOTE: spaceAbove/Below *includes* the pane paddingTop/Bottom, but not pane.borders - var newH = s.innerHeight - (m.spaceAbove - s.css.paddingTop) - (m.spaceBelow - s.css.paddingBottom); - - if (!$C.is(":visible") || m.height != newH) { - // size the Content element to fit new pane-size - will autoHide if not enough room - setOuterHeight($C, newH, true); // true=autoHide - m.height = newH; // save new height - }; - - if (state.initialized) - _runCallbacks("onsizecontent_end", pane); - - function _below ($E) { - return max(s.css.paddingBottom, (parseInt($E.css("marginBottom"), 10) || 0)); - }; - - function _measure () { - var - ignore = options[pane].contentIgnoreSelector - , $Fs = $C.nextAll().not(ignore || ':lt(0)') // not :lt(0) = ALL - , $Fs_vis = $Fs.filter(':visible') - , $F = $Fs_vis.filter(':last') - ; - m = { - top: $C[0].offsetTop - , height: $C.outerHeight() - , numFooters: $Fs.length - , hiddenFooters: $Fs.length - $Fs_vis.length - , spaceBelow: 0 // correct if no content footer ($E) - } - m.spaceAbove = m.top; // just for state - not used in calc - m.bottom = m.top + m.height; - if ($F.length) - //spaceBelow = (LastFooter.top + LastFooter.height) [footerBottom] - Content.bottom + max(LastFooter.marginBottom, pane.paddingBotom) - m.spaceBelow = ($F[0].offsetTop + $F.outerHeight()) - m.bottom + _below($F); - else // no footer - check marginBottom on Content element itself - m.spaceBelow = _below($C); - }; - }); - } - - - /** - * Called every time a pane is opened, closed, or resized to slide the togglers to 'center' and adjust their length if necessary - * - * @see initHandles(), open(), close(), resizeAll() - * @param {string|Object} evt_or_panes The pane(s) being resized - */ -, sizeHandles = function (evt_or_panes) { - var panes = evtPane.call(this, evt_or_panes) - panes = panes ? panes.split(",") : _c.borderPanes; - - $.each(panes, function (i, pane) { - var - o = options[pane] - , s = state[pane] - , $P = $Ps[pane] - , $R = $Rs[pane] - , $T = $Ts[pane] - , $TC - ; - if (!$P || !$R) return; - - var - dir = _c[pane].dir - , _state = (s.isClosed ? "_closed" : "_open") - , spacing = o["spacing"+ _state] - , togAlign = o["togglerAlign"+ _state] - , togLen = o["togglerLength"+ _state] - , paneLen - , left - , offset - , CSS = {} - ; - - if (spacing === 0) { - $R.hide(); - return; - } - else if (!s.noRoom && !s.isHidden) // skip if resizer was hidden for any reason - $R.show(); // in case was previously hidden - - // Resizer Bar is ALWAYS same width/height of pane it is attached to - if (dir === "horz") { // north/south - //paneLen = $P.outerWidth(); // s.outerWidth || - paneLen = sC.innerWidth; // handle offscreen-panes - s.resizerLength = paneLen; - left = $.layout.cssNum($P, "left") - $R.css({ - width: cssW($R, paneLen) // account for borders & padding - , height: cssH($R, spacing) // ditto - , left: left > -9999 ? left : sC.insetLeft // handle offscreen-panes - }); - } - else { // east/west - paneLen = $P.outerHeight(); // s.outerHeight || - s.resizerLength = paneLen; - $R.css({ - height: cssH($R, paneLen) // account for borders & padding - , width: cssW($R, spacing) // ditto - , top: sC.insetTop + getPaneSize("north", true) // TODO: what if no North pane? - //, top: $.layout.cssNum($Ps["center"], "top") - }); - } - - // remove hover classes - removeHover( o, $R ); - - if ($T) { - if (togLen === 0 || (s.isSliding && o.hideTogglerOnSlide)) { - $T.hide(); // always HIDE the toggler when 'sliding' - return; - } - else - $T.show(); // in case was previously hidden - - if (!(togLen > 0) || togLen === "100%" || togLen > paneLen) { - togLen = paneLen; - offset = 0; - } - else { // calculate 'offset' based on options.PANE.togglerAlign_open/closed - if (isStr(togAlign)) { - switch (togAlign) { - case "top": - case "left": offset = 0; - break; - case "bottom": - case "right": offset = paneLen - togLen; - break; - case "middle": - case "center": - default: offset = round((paneLen - togLen) / 2); // 'default' catches typos - } - } - else { // togAlign = number - var x = parseInt(togAlign, 10); // - if (togAlign >= 0) offset = x; - else offset = paneLen - togLen + x; // NOTE: x is negative! - } - } - - if (dir === "horz") { // north/south - var width = cssW($T, togLen); - $T.css({ - width: width // account for borders & padding - , height: cssH($T, spacing) // ditto - , left: offset // TODO: VERIFY that toggler positions correctly for ALL values - , top: 0 - }); - // CENTER the toggler content SPAN - $T.children(".content").each(function(){ - $TC = $(this); - $TC.css("marginLeft", round((width-$TC.outerWidth())/2)); // could be negative - }); - } - else { // east/west - var height = cssH($T, togLen); - $T.css({ - height: height // account for borders & padding - , width: cssW($T, spacing) // ditto - , top: offset // POSITION the toggler - , left: 0 - }); - // CENTER the toggler content SPAN - $T.children(".content").each(function(){ - $TC = $(this); - $TC.css("marginTop", round((height-$TC.outerHeight())/2)); // could be negative - }); - } - - // remove ALL hover classes - removeHover( 0, $T ); - } - - // DONE measuring and sizing this resizer/toggler, so can be 'hidden' now - if (!state.initialized && (o.initHidden || s.noRoom)) { - $R.hide(); - if ($T) $T.hide(); - } - }); - } - - - /** - * @param {string|Object} evt_or_pane - */ -, enableClosable = function (evt_or_pane) { - if (!isInitialized()) return; - var pane = evtPane.call(this, evt_or_pane) - , $T = $Ts[pane] - , o = options[pane] - ; - if (!$T) return; - o.closable = true; - $T .bind("click."+ sID, function(evt){ evt.stopPropagation(); toggle(pane); }) - .css("visibility", "visible") - .css("cursor", "pointer") - .attr("title", state[pane].isClosed ? o.tips.Open : o.tips.Close) // may be blank - .show(); - } - /** - * @param {string|Object} evt_or_pane - * @param {boolean=} [hide=false] - */ -, disableClosable = function (evt_or_pane, hide) { - if (!isInitialized()) return; - var pane = evtPane.call(this, evt_or_pane) - , $T = $Ts[pane] - ; - if (!$T) return; - options[pane].closable = false; - // is closable is disable, then pane MUST be open! - if (state[pane].isClosed) open(pane, false, true); - $T .unbind("."+ sID) - .css("visibility", hide ? "hidden" : "visible") // instead of hide(), which creates logic issues - .css("cursor", "default") - .attr("title", ""); - } - - - /** - * @param {string|Object} evt_or_pane - */ -, enableSlidable = function (evt_or_pane) { - if (!isInitialized()) return; - var pane = evtPane.call(this, evt_or_pane) - , $R = $Rs[pane] - ; - if (!$R || !$R.data('draggable')) return; - options[pane].slidable = true; - if (state[pane].isClosed) - bindStartSlidingEvent(pane, true); - } - /** - * @param {string|Object} evt_or_pane - */ -, disableSlidable = function (evt_or_pane) { - if (!isInitialized()) return; - var pane = evtPane.call(this, evt_or_pane) - , $R = $Rs[pane] - ; - if (!$R) return; - options[pane].slidable = false; - if (state[pane].isSliding) - close(pane, false, true); - else { - bindStartSlidingEvent(pane, false); - $R .css("cursor", "default") - .attr("title", ""); - removeHover(null, $R[0]); // in case currently hovered - } - } - - - /** - * @param {string|Object} evt_or_pane - */ -, enableResizable = function (evt_or_pane) { - if (!isInitialized()) return; - var pane = evtPane.call(this, evt_or_pane) - , $R = $Rs[pane] - , o = options[pane] - ; - if (!$R || !$R.data('draggable')) return; - o.resizable = true; - $R.draggable("enable"); - if (!state[pane].isClosed) - $R .css("cursor", o.resizerCursor) - .attr("title", o.tips.Resize); - } - /** - * @param {string|Object} evt_or_pane - */ -, disableResizable = function (evt_or_pane) { - if (!isInitialized()) return; - var pane = evtPane.call(this, evt_or_pane) - , $R = $Rs[pane] - ; - if (!$R || !$R.data('draggable')) return; - options[pane].resizable = false; - $R .draggable("disable") - .css("cursor", "default") - .attr("title", ""); - removeHover(null, $R[0]); // in case currently hovered - } - - - /** - * Move a pane from source-side (eg, west) to target-side (eg, east) - * If pane exists on target-side, move that to source-side, ie, 'swap' the panes - * - * @param {string|Object} evt_or_pane1 The pane/edge being swapped - * @param {string} pane2 ditto - */ -, swapPanes = function (evt_or_pane1, pane2) { - if (!isInitialized()) return; - var pane1 = evtPane.call(this, evt_or_pane1); - // change state.edge NOW so callbacks can know where pane is headed... - state[pane1].edge = pane2; - state[pane2].edge = pane1; - // run these even if NOT state.initialized - if (false === _runCallbacks("onswap_start", pane1) - || false === _runCallbacks("onswap_start", pane2) - ) { - state[pane1].edge = pane1; // reset - state[pane2].edge = pane2; - return; - } - - var - oPane1 = copy( pane1 ) - , oPane2 = copy( pane2 ) - , sizes = {} - ; - sizes[pane1] = oPane1 ? oPane1.state.size : 0; - sizes[pane2] = oPane2 ? oPane2.state.size : 0; - - // clear pointers & state - $Ps[pane1] = false; - $Ps[pane2] = false; - state[pane1] = {}; - state[pane2] = {}; - - // ALWAYS remove the resizer & toggler elements - if ($Ts[pane1]) $Ts[pane1].remove(); - if ($Ts[pane2]) $Ts[pane2].remove(); - if ($Rs[pane1]) $Rs[pane1].remove(); - if ($Rs[pane2]) $Rs[pane2].remove(); - $Rs[pane1] = $Rs[pane2] = $Ts[pane1] = $Ts[pane2] = false; - - // transfer element pointers and data to NEW Layout keys - move( oPane1, pane2 ); - move( oPane2, pane1 ); - - // cleanup objects - oPane1 = oPane2 = sizes = null; - - // make panes 'visible' again - if ($Ps[pane1]) $Ps[pane1].css(_c.visible); - if ($Ps[pane2]) $Ps[pane2].css(_c.visible); - - // fix any size discrepancies caused by swap - resizeAll(); - - // run these even if NOT state.initialized - _runCallbacks("onswap_end", pane1); - _runCallbacks("onswap_end", pane2); - - return; - - function copy (n) { // n = pane - var - $P = $Ps[n] - , $C = $Cs[n] - ; - return !$P ? false : { - pane: n - , P: $P ? $P[0] : false - , C: $C ? $C[0] : false - , state: $.extend(true, {}, state[n]) - , options: $.extend(true, {}, options[n]) - } - }; - - function move (oPane, pane) { - if (!oPane) return; - var - P = oPane.P - , C = oPane.C - , oldPane = oPane.pane - , c = _c[pane] - , side = c.side.toLowerCase() - , inset = "inset"+ c.side - // save pane-options that should be retained - , s = $.extend(true, {}, state[pane]) - , o = options[pane] - // RETAIN side-specific FX Settings - more below - , fx = { resizerCursor: o.resizerCursor } - , re, size, pos - ; - $.each("fxName,fxSpeed,fxSettings".split(","), function (i, k) { - fx[k +"_open"] = o[k +"_open"]; - fx[k +"_close"] = o[k +"_close"]; - fx[k +"_size"] = o[k +"_size"]; - }); - - // update object pointers and attributes - $Ps[pane] = $(P) - .data({ - layoutPane: Instance[pane] // NEW pointer to pane-alias-object - , layoutEdge: pane - }) - .css(_c.hidden) - .css(c.cssReq) - ; - $Cs[pane] = C ? $(C) : false; - - // set options and state - options[pane] = $.extend(true, {}, oPane.options, fx); - state[pane] = $.extend(true, {}, oPane.state); - - // change classNames on the pane, eg: ui-layout-pane-east ==> ui-layout-pane-west - re = new RegExp(o.paneClass +"-"+ oldPane, "g"); - P.className = P.className.replace(re, o.paneClass +"-"+ pane); - - // ALWAYS regenerate the resizer & toggler elements - initHandles(pane); // create the required resizer & toggler - - // if moving to different orientation, then keep 'target' pane size - if (c.dir != _c[oldPane].dir) { - size = sizes[pane] || 0; - setSizeLimits(pane); // update pane-state - size = max(size, state[pane].minSize); - // use manualSizePane to disable autoResize - not useful after panes are swapped - manualSizePane(pane, size, true, true); // true/true = skipCallback/noAnimation - } - else // move the resizer here - $Rs[pane].css(side, sC[inset] + (state[pane].isVisible ? getPaneSize(pane) : 0)); - - - // ADD CLASSNAMES & SLIDE-BINDINGS - if (oPane.state.isVisible && !s.isVisible) - setAsOpen(pane, true); // true = skipCallback - else { - setAsClosed(pane); - bindStartSlidingEvent(pane, true); // will enable events IF option is set - } - - // DESTROY the object - oPane = null; - }; - } - - - /** - * INTERNAL method to sync pin-buttons when pane is opened or closed - * Unpinned means the pane is 'sliding' - ie, over-top of the adjacent panes - * - * @see open(), setAsOpen(), setAsClosed() - * @param {string} pane These are the params returned to callbacks by layout() - * @param {boolean} doPin True means set the pin 'down', False means 'up' - */ -, syncPinBtns = function (pane, doPin) { - if ($.layout.plugins.buttons) - $.each(state[pane].pins, function (i, selector) { - $.layout.buttons.setPinState(Instance, $(selector), pane, doPin); - }); - } - -; // END var DECLARATIONS - - /** - * Capture keys when enableCursorHotkey - toggle pane if hotkey pressed - * - * @see document.keydown() - */ - function keyDown (evt) { - if (!evt) return true; - var code = evt.keyCode; - if (code < 33) return true; // ignore special keys: ENTER, TAB, etc - - var - PANE = { - 38: "north" // Up Cursor - $.ui.keyCode.UP - , 40: "south" // Down Cursor - $.ui.keyCode.DOWN - , 37: "west" // Left Cursor - $.ui.keyCode.LEFT - , 39: "east" // Right Cursor - $.ui.keyCode.RIGHT - } - , ALT = evt.altKey // no worky! - , SHIFT = evt.shiftKey - , CTRL = evt.ctrlKey - , CURSOR = (CTRL && code >= 37 && code <= 40) - , o, k, m, pane - ; - - if (CURSOR && options[PANE[code]].enableCursorHotkey) // valid cursor-hotkey - pane = PANE[code]; - else if (CTRL || SHIFT) // check to see if this matches a custom-hotkey - $.each(_c.borderPanes, function (i, p) { // loop each pane to check its hotkey - o = options[p]; - k = o.customHotkey; - m = o.customHotkeyModifier; // if missing or invalid, treated as "CTRL+SHIFT" - if ((SHIFT && m=="SHIFT") || (CTRL && m=="CTRL") || (CTRL && SHIFT)) { // Modifier matches - if (k && code === (isNaN(k) || k <= 9 ? k.toUpperCase().charCodeAt(0) : k)) { // Key matches - pane = p; - return false; // BREAK - } - } - }); - - // validate pane - if (!pane || !$Ps[pane] || !options[pane].closable || state[pane].isHidden) - return true; - - toggle(pane); - - evt.stopPropagation(); - evt.returnValue = false; // CANCEL key - return false; - }; - - -/* - * ###################################### - * UTILITY METHODS - * called externally or by initButtons - * ###################################### - */ - - /** - * Change/reset a pane overflow setting & zIndex to allow popups/drop-downs to work - * - * @param {Object=} [el] (optional) Can also be 'bound' to a click, mouseOver, or other event - */ - function allowOverflow (el) { - if (!isInitialized()) return; - if (this && this.tagName) el = this; // BOUND to element - var $P; - if (isStr(el)) - $P = $Ps[el]; - else if ($(el).data("layoutRole")) - $P = $(el); - else - $(el).parents().each(function(){ - if ($(this).data("layoutRole")) { - $P = $(this); - return false; // BREAK - } - }); - if (!$P || !$P.length) return; // INVALID - - var - pane = $P.data("layoutEdge") - , s = state[pane] - ; - - // if pane is already raised, then reset it before doing it again! - // this would happen if allowOverflow is attached to BOTH the pane and an element - if (s.cssSaved) - resetOverflow(pane); // reset previous CSS before continuing - - // if pane is raised by sliding or resizing, or its closed, then abort - if (s.isSliding || s.isResizing || s.isClosed) { - s.cssSaved = false; - return; - } - - var - newCSS = { zIndex: (options.zIndexes.resizer_normal + 1) } - , curCSS = {} - , of = $P.css("overflow") - , ofX = $P.css("overflowX") - , ofY = $P.css("overflowY") - ; - // determine which, if any, overflow settings need to be changed - if (of != "visible") { - curCSS.overflow = of; - newCSS.overflow = "visible"; - } - if (ofX && !ofX.match(/(visible|auto)/)) { - curCSS.overflowX = ofX; - newCSS.overflowX = "visible"; - } - if (ofY && !ofY.match(/(visible|auto)/)) { - curCSS.overflowY = ofX; - newCSS.overflowY = "visible"; - } - - // save the current overflow settings - even if blank! - s.cssSaved = curCSS; - - // apply new CSS to raise zIndex and, if necessary, make overflow 'visible' - $P.css( newCSS ); - - // make sure the zIndex of all other panes is normal - $.each(_c.allPanes, function(i, p) { - if (p != pane) resetOverflow(p); - }); - - }; - /** - * @param {Object=} [el] (optional) Can also be 'bound' to a click, mouseOver, or other event - */ - function resetOverflow (el) { - if (!isInitialized()) return; - if (this && this.tagName) el = this; // BOUND to element - var $P; - if (isStr(el)) - $P = $Ps[el]; - else if ($(el).data("layoutRole")) - $P = $(el); - else - $(el).parents().each(function(){ - if ($(this).data("layoutRole")) { - $P = $(this); - return false; // BREAK - } - }); - if (!$P || !$P.length) return; // INVALID - - var - pane = $P.data("layoutEdge") - , s = state[pane] - , CSS = s.cssSaved || {} - ; - // reset the zIndex - if (!s.isSliding && !s.isResizing) - $P.css("zIndex", options.zIndexes.pane_normal); - - // reset Overflow - if necessary - $P.css( CSS ); - - // clear var - s.cssSaved = false; - }; - -/* - * ##################### - * CREATE/RETURN LAYOUT - * ##################### - */ - - // validate that container exists - var $N = $(this).eq(0); // FIRST matching Container element - if (!$N.length) { - return _log( options.errors.containerMissing ); - }; - - // Users retrieve Instance of a layout with: $N.layout() OR $N.data("layout") - // return the Instance-pointer if layout has already been initialized - if ($N.data("layoutContainer") && $N.data("layout")) - return $N.data("layout"); // cached pointer - - // init global vars - var - $Ps = {} // Panes x5 - set in initPanes() - , $Cs = {} // Content x5 - set in initPanes() - , $Rs = {} // Resizers x4 - set in initHandles() - , $Ts = {} // Togglers x4 - set in initHandles() - , $Ms = $([]) // Masks - up to 2 masks per pane (IFRAME + DIV) - // aliases for code brevity - , sC = state.container // alias for easy access to 'container dimensions' - , sID = state.id // alias for unique layout ID/namespace - eg: "layout435" - ; - - // create Instance object to expose data & option Properties, and primary action Methods - var Instance = { - // layout data - options: options // property - options hash - , state: state // property - dimensions hash - // object pointers - , container: $N // property - object pointers for layout container - , panes: $Ps // property - object pointers for ALL Panes: panes.north, panes.center - , contents: $Cs // property - object pointers for ALL Content: contents.north, contents.center - , resizers: $Rs // property - object pointers for ALL Resizers, eg: resizers.north - , togglers: $Ts // property - object pointers for ALL Togglers, eg: togglers.north - // border-pane open/close - , hide: hide // method - ditto - , show: show // method - ditto - , toggle: toggle // method - pass a 'pane' ("north", "west", etc) - , open: open // method - ditto - , close: close // method - ditto - , slideOpen: slideOpen // method - ditto - , slideClose: slideClose // method - ditto - , slideToggle: slideToggle // method - ditto - // pane actions - , setSizeLimits: setSizeLimits // method - pass a 'pane' - update state min/max data - , _sizePane: sizePane // method -intended for user by plugins only! - , sizePane: manualSizePane // method - pass a 'pane' AND an 'outer-size' in pixels or percent, or 'auto' - , sizeContent: sizeContent // method - pass a 'pane' - , swapPanes: swapPanes // method - pass TWO 'panes' - will swap them - , showMasks: showMasks // method - pass a 'pane' OR list of panes - default = all panes with mask option set - , hideMasks: hideMasks // method - ditto' - // pane element methods - , initContent: initContent // method - ditto - , addPane: addPane // method - pass a 'pane' - , removePane: removePane // method - pass a 'pane' to remove from layout, add 'true' to delete the pane-elem - , createChildLayout: createChildLayout// method - pass a 'pane' and (optional) layout-options (OVERRIDES options[pane].childOptions - // special pane option setting - , enableClosable: enableClosable // method - pass a 'pane' - , disableClosable: disableClosable // method - ditto - , enableSlidable: enableSlidable // method - ditto - , disableSlidable: disableSlidable // method - ditto - , enableResizable: enableResizable // method - ditto - , disableResizable: disableResizable// method - ditto - // utility methods for panes - , allowOverflow: allowOverflow // utility - pass calling element (this) - , resetOverflow: resetOverflow // utility - ditto - // layout control - , destroy: destroy // method - no parameters - , initPanes: isInitialized // method - no parameters - , resizeAll: resizeAll // method - no parameters - // callback triggering - , runCallbacks: _runCallbacks // method - pass evtName & pane (if a pane-event), eg: trigger("onopen", "west") - // alias collections of options, state and children - created in addPane and extended elsewhere - , hasParentLayout: false // set by initContainer() - , children: children // pointers to child-layouts, eg: Instance.children["west"] - , north: false // alias group: { name: pane, pane: $Ps[pane], options: options[pane], state: state[pane], child: children[pane] } - , south: false // ditto - , west: false // ditto - , east: false // ditto - , center: false // ditto - }; - - // create the border layout NOW - if (_create() === 'cancel') // onload_start callback returned false to CANCEL layout creation - return null; - else // true OR false -- if layout-elements did NOT init (hidden or do not exist), can auto-init later - return Instance; // return the Instance object - -} - - -/* OLD versions of jQuery only set $.support.boxModel after page is loaded - * so if this is IE, use support.boxModel to test for quirks-mode (ONLY IE changes boxModel). - */ -$(function(){ - var b = $.layout.browser; - if (b.msie) b.boxModel = $.support.boxModel; -}); - - -/** - * jquery.layout.state 1.0 - * $Date: 2011-07-16 08:00:00 (Sat, 16 July 2011) $ - * - * Copyright (c) 2010 - * Kevin Dalman (http://allpro.net) - * - * Dual licensed under the GPL (http://www.gnu.org/licenses/gpl.html) - * and MIT (http://www.opensource.org/licenses/mit-license.php) licenses. - * - * @dependancies: UI Layout 1.3.0.rc30.1 or higher - * @dependancies: $.ui.cookie (above) - * - * @support: http://groups.google.com/group/jquery-ui-layout - */ -/* - * State-management options stored in options.stateManagement, which includes a .cookie hash - * Default options saves ALL KEYS for ALL PANES, ie: pane.size, pane.isClosed, pane.isHidden - * - * // STATE/COOKIE OPTIONS - * @example $(el).layout({ - stateManagement: { - enabled: true - , stateKeys: "east.size,west.size,east.isClosed,west.isClosed" - , cookie: { name: "appLayout", path: "/" } - } - }) - * @example $(el).layout({ stateManagement__enabled: true }) // enable auto-state-management using cookies - * @example $(el).layout({ stateManagement__cookie: { name: "appLayout", path: "/" } }) - * @example $(el).layout({ stateManagement__cookie__name: "appLayout", stateManagement__cookie__path: "/" }) - * - * // STATE/COOKIE METHODS - * @example myLayout.saveCookie( "west.isClosed,north.size,south.isHidden", {expires: 7} ); - * @example myLayout.loadCookie(); - * @example myLayout.deleteCookie(); - * @example var JSON = myLayout.readState(); // CURRENT Layout State - * @example var JSON = myLayout.readCookie(); // SAVED Layout State (from cookie) - * @example var JSON = myLayout.state.stateData; // LAST LOADED Layout State (cookie saved in layout.state hash) - * - * CUSTOM STATE-MANAGEMENT (eg, saved in a database) - * @example var JSON = myLayout.readState( "west.isClosed,north.size,south.isHidden" ); - * @example myLayout.loadState( JSON ); - */ - -/** - * UI COOKIE UTILITY - * - * A $.cookie OR $.ui.cookie namespace *should be standard*, but until then... - * This creates $.ui.cookie so Layout does not need the cookie.jquery.js plugin - * NOTE: This utility is REQUIRED by the layout.state plugin - * - * Cookie methods in Layout are created as part of State Management - */ -if (!$.ui) $.ui = {}; -$.ui.cookie = { - - // cookieEnabled is not in DOM specs, but DOES works in all browsers,including IE6 - acceptsCookies: !!navigator.cookieEnabled - -, read: function (name) { - var - c = document.cookie - , cs = c ? c.split(';') : [] - , pair // loop var - ; - for (var i=0, n=cs.length; i < n; i++) { - pair = $.trim(cs[i]).split('='); // name=value pair - if (pair[0] == name) // found the layout cookie - return decodeURIComponent(pair[1]); - - } - return null; - } - -, write: function (name, val, cookieOpts) { - var - params = '' - , date = '' - , clear = false - , o = cookieOpts || {} - , x = o.expires - ; - if (x && x.toUTCString) - date = x; - else if (x === null || typeof x === 'number') { - date = new Date(); - if (x > 0) - date.setDate(date.getDate() + x); - else { - date.setFullYear(1970); - clear = true; - } - } - if (date) params += ';expires='+ date.toUTCString(); - if (o.path) params += ';path='+ o.path; - if (o.domain) params += ';domain='+ o.domain; - if (o.secure) params += ';secure'; - document.cookie = name +'='+ (clear ? "" : encodeURIComponent( val )) + params; // write or clear cookie - } - -, clear: function (name) { - $.ui.cookie.write(name, '', {expires: -1}); - } - -}; -// if cookie.jquery.js is not loaded, create an alias to replicate it -// this may be useful to other plugins or code dependent on that plugin -if (!$.cookie) $.cookie = function (k, v, o) { - var C = $.ui.cookie; - if (v === null) - C.clear(k); - else if (v === undefined) - return C.read(k); - else - C.write(k, v, o); -}; - - -// tell Layout that the state plugin is available -$.layout.plugins.stateManagement = true; - -// Add State-Management options to layout.defaults -$.layout.config.optionRootKeys.push("stateManagement"); -$.layout.defaults.stateManagement = { - enabled: false // true = enable state-management, even if not using cookies -, autoSave: true // Save a state-cookie when page exits? -, autoLoad: true // Load the state-cookie when Layout inits? - // List state-data to save - must be pane-specific -, stateKeys: "north.size,south.size,east.size,west.size,"+ - "north.isClosed,south.isClosed,east.isClosed,west.isClosed,"+ - "north.isHidden,south.isHidden,east.isHidden,west.isHidden" -, cookie: { - name: "" // If not specified, will use Layout.name, else just "Layout" - , domain: "" // blank = current domain - , path: "" // blank = current page, '/' = entire website - , expires: "" // 'days' to keep cookie - leave blank for 'session cookie' - , secure: false - } -}; -// Set stateManagement as a layout-option, NOT a pane-option -$.layout.optionsMap.layout.push("stateManagement"); - -/* - * State Management methods - */ -$.layout.state = { - - /** - * Get the current layout state and save it to a cookie - * - * myLayout.saveCookie( keys, cookieOpts ) - * - * @param {Object} inst - * @param {(string|Array)=} keys - * @param {Object=} cookieOpts - */ - saveCookie: function (inst, keys, cookieOpts) { - var o = inst.options - , oS = o.stateManagement - , oC = $.extend(true, {}, oS.cookie, cookieOpts || null) - , data = inst.state.stateData = inst.readState( keys || oS.stateKeys ) // read current panes-state - ; - $.ui.cookie.write( oC.name || o.name || "Layout", $.layout.state.encodeJSON(data), oC ); - return $.extend(true, {}, data); // return COPY of state.stateData data - } - - /** - * Remove the state cookie - * - * @param {Object} inst - */ -, deleteCookie: function (inst) { - var o = inst.options; - $.ui.cookie.clear( o.stateManagement.cookie.name || o.name || "Layout" ); - } - - /** - * Read & return data from the cookie - as JSON - * - * @param {Object} inst - */ -, readCookie: function (inst) { - var o = inst.options; - var c = $.ui.cookie.read( o.stateManagement.cookie.name || o.name || "Layout" ); - // convert cookie string back to a hash and return it - return c ? $.layout.state.decodeJSON(c) : {}; - } - - /** - * Get data from the cookie and USE IT to loadState - * - * @param {Object} inst - */ -, loadCookie: function (inst) { - var c = $.layout.state.readCookie(inst); // READ the cookie - if (c) { - inst.state.stateData = $.extend(true, {}, c); // SET state.stateData - inst.loadState(c); // LOAD the retrieved state - } - return c; - } - - /** - * Update layout options from the cookie, if one exists - * - * @param {Object} inst - * @param {Object=} stateData - * @param {boolean=} animate - */ -, loadState: function (inst, stateData, animate) { - stateData = $.layout.transformData( stateData ); // panes = default subkey - if ($.isEmptyObject( stateData )) return; - $.extend(true, inst.options, stateData); // update layout options - // if layout has already been initialized, then UPDATE layout state - if (inst.state.initialized) { - var pane, vis, o, s, h, c - , noAnimate = (animate===false) - ; - $.each($.layout.config.borderPanes, function (idx, pane) { - state = inst.state[pane]; - o = stateData[ pane ]; - if (typeof o != 'object') return; // no key, continue - s = o.size; - c = o.initClosed; - h = o.initHidden; - vis = state.isVisible; - // resize BEFORE opening - if (!vis) - inst.sizePane(pane, s, false, false); - if (h === true) inst.hide(pane, noAnimate); - else if (c === false) inst.open (pane, false, noAnimate); - else if (c === true) inst.close(pane, false, noAnimate); - else if (h === false) inst.show (pane, false, noAnimate); - // resize AFTER any other actions - if (vis) - inst.sizePane(pane, s, false, noAnimate); // animate resize if option passed - }); - }; - } - - /** - * Get the *current layout state* and return it as a hash - * - * @param {Object=} inst - * @param {(string|Array)=} keys - */ -, readState: function (inst, keys) { - var - data = {} - , alt = { isClosed: 'initClosed', isHidden: 'initHidden' } - , state = inst.state - , panes = $.layout.config.allPanes - , pair, pane, key, val - ; - if (!keys) keys = inst.options.stateManagement.stateKeys; // if called by user - if ($.isArray(keys)) keys = keys.join(","); - // convert keys to an array and change delimiters from '__' to '.' - keys = keys.replace(/__/g, ".").split(','); - // loop keys and create a data hash - for (var i=0, n=keys.length; i < n; i++) { - pair = keys[i].split("."); - pane = pair[0]; - key = pair[1]; - if ($.inArray(pane, panes) < 0) continue; // bad pane! - val = state[ pane ][ key ]; - if (val == undefined) continue; - if (key=="isClosed" && state[pane]["isSliding"]) - val = true; // if sliding, then *really* isClosed - ( data[pane] || (data[pane]={}) )[ alt[key] ? alt[key] : key ] = val; - } - return data; - } - - /** - * Stringify a JSON hash so can save in a cookie or db-field - */ -, encodeJSON: function (JSON) { - return parse(JSON); - function parse (h) { - var D=[], i=0, k, v, t; // k = key, v = value - for (k in h) { - v = h[k]; - t = typeof v; - if (t == 'string') // STRING - add quotes - v = '"'+ v +'"'; - else if (t == 'object') // SUB-KEY - recurse into it - v = parse(v); - D[i++] = '"'+ k +'":'+ v; - } - return '{'+ D.join(',') +'}'; - }; - } - - /** - * Convert stringified JSON back to a hash object - * @see $.parseJSON(), adding in jQuery 1.4.1 - */ -, decodeJSON: function (str) { - try { return $.parseJSON ? $.parseJSON(str) : window["eval"]("("+ str +")") || {}; } - catch (e) { return {}; } - } - - -, _create: function (inst) { - var _ = $.layout.state; - // ADD State-Management plugin methods to inst - $.extend( inst, { - // readCookie - update options from cookie - returns hash of cookie data - readCookie: function () { return _.readCookie(inst); } - // deleteCookie - , deleteCookie: function () { _.deleteCookie(inst); } - // saveCookie - optionally pass keys-list and cookie-options (hash) - , saveCookie: function (keys, cookieOpts) { return _.saveCookie(inst, keys, cookieOpts); } - // loadCookie - readCookie and use to loadState() - returns hash of cookie data - , loadCookie: function () { return _.loadCookie(inst); } - // loadState - pass a hash of state to use to update options - , loadState: function (stateData, animate) { _.loadState(inst, stateData, animate); } - // readState - returns hash of current layout-state - , readState: function (keys) { return _.readState(inst, keys); } - // add JSON utility methods too... - , encodeJSON: _.encodeJSON - , decodeJSON: _.decodeJSON - }); - - // init state.stateData key, even if plugin is initially disabled - inst.state.stateData = {}; - - // read and load cookie-data per options - var oS = inst.options.stateManagement; - if (oS.enabled) { - if (oS.autoLoad) // update the options from the cookie - inst.loadCookie(); - else // don't modify options - just store cookie data in state.stateData - inst.state.stateData = inst.readCookie(); - } - } - -, _unload: function (inst) { - var oS = inst.options.stateManagement; - if (oS.enabled) { - if (oS.autoSave) // save a state-cookie automatically - inst.saveCookie(); - else // don't save a cookie, but do store state-data in state.stateData key - inst.state.stateData = inst.readState(); - } - } - -}; - -// add state initialization method to Layout's onCreate array of functions -$.layout.onCreate.push( $.layout.state._create ); -$.layout.onUnload.push( $.layout.state._unload ); - - - - -/** - * jquery.layout.buttons 1.0 - * $Date: 2011-07-16 08:00:00 (Sat, 16 July 2011) $ - * - * Copyright (c) 2010 - * Kevin Dalman (http://allpro.net) - * - * Dual licensed under the GPL (http://www.gnu.org/licenses/gpl.html) - * and MIT (http://www.opensource.org/licenses/mit-license.php) licenses. - * - * @dependancies: UI Layout 1.3.0.rc30.1 or higher - * - * @support: http://groups.google.com/group/jquery-ui-layout - * - * Docs: [ to come ] - * Tips: [ to come ] - */ - -// tell Layout that the state plugin is available -$.layout.plugins.buttons = true; - -// Add buttons options to layout.defaults -$.layout.defaults.autoBindCustomButtons = false; -// Specify autoBindCustomButtons as a layout-option, NOT a pane-option -$.layout.optionsMap.layout.push("autoBindCustomButtons"); - -/* - * Button methods - */ -$.layout.buttons = { - - /** - * Searches for .ui-layout-button-xxx elements and auto-binds them as layout-buttons - * - * @see _create() - * - * @param {Object} inst Layout Instance object - */ - init: function (inst) { - var pre = "ui-layout-button-" - , layout = inst.options.name || "" - , name; - $.each("toggle,open,close,pin,toggle-slide,open-slide".split(","), function (i, action) { - $.each($.layout.config.borderPanes, function (ii, pane) { - $("."+pre+action+"-"+pane).each(function(){ - // if button was previously 'bound', data.layoutName was set, but is blank if layout has no 'name' - name = $(this).data("layoutName") || $(this).attr("layoutName"); - if (name == undefined || name === layout) - inst.bindButton(this, action, pane); - }); - }); - }); - } - - /** - * Helper function to validate params received by addButton utilities - * - * Two classes are added to the element, based on the buttonClass... - * The type of button is appended to create the 2nd className: - * - ui-layout-button-pin // action btnClass - * - ui-layout-button-pin-west // action btnClass + pane - * - ui-layout-button-toggle - * - ui-layout-button-open - * - ui-layout-button-close - * - * @param {Object} inst Layout Instance object - * @param {(string|!Object)} selector jQuery selector (or element) for button, eg: ".ui-layout-north .toggle-button" - * @param {string} pane Name of the pane the button is for: 'north', 'south', etc. - * - * @return {Array.} If both params valid, the element matching 'selector' in a jQuery wrapper - otherwise returns null - */ -, get: function (inst, selector, pane, action) { - var $E = $(selector) - , o = inst.options - , err = o.errors.addButtonError - ; - if (!$E.length) { // element not found - $.layout.msg(err +" "+ o.errors.selector +": "+ selector, true); - } - else if ($.inArray(pane, $.layout.config.borderPanes) < 0) { // invalid 'pane' sepecified - $.layout.msg(err +" "+ o.errors.pane +": "+ pane, true); - $E = $(""); // NO BUTTON - } - else { // VALID - var btn = o[pane].buttonClass +"-"+ action; - $E .addClass( btn +" "+ btn +"-"+ pane ) - .data("layoutName", o.name); // add layout identifier - even if blank! - } - return $E; - } - - - /** - * NEW syntax for binding layout-buttons - will eventually replace addToggle, addOpen, etc. - * - * @param {Object} inst Layout Instance object - * @param {(string|!Object)} selector jQuery selector (or element) for button, eg: ".ui-layout-north .toggle-button" - * @param {string} action - * @param {string} pane - */ -, bind: function (inst, selector, action, pane) { - var _ = $.layout.buttons; - switch (action.toLowerCase()) { - case "toggle": _.addToggle (inst, selector, pane); break; - case "open": _.addOpen (inst, selector, pane); break; - case "close": _.addClose (inst, selector, pane); break; - case "pin": _.addPin (inst, selector, pane); break; - case "toggle-slide": _.addToggle (inst, selector, pane, true); break; - case "open-slide": _.addOpen (inst, selector, pane, true); break; - } - return inst; - } - - /** - * Add a custom Toggler button for a pane - * - * @param {Object} inst Layout Instance object - * @param {(string|!Object)} selector jQuery selector (or element) for button, eg: ".ui-layout-north .toggle-button" - * @param {string} pane Name of the pane the button is for: 'north', 'south', etc. - * @param {boolean=} slide true = slide-open, false = pin-open - */ -, addToggle: function (inst, selector, pane, slide) { - $.layout.buttons.get(inst, selector, pane, "toggle") - .click(function(evt){ - inst.toggle(pane, !!slide); - evt.stopPropagation(); - }); - return inst; - } - - /** - * Add a custom Open button for a pane - * - * @param {Object} inst Layout Instance object - * @param {(string|!Object)} selector jQuery selector (or element) for button, eg: ".ui-layout-north .toggle-button" - * @param {string} pane Name of the pane the button is for: 'north', 'south', etc. - * @param {boolean=} slide true = slide-open, false = pin-open - */ -, addOpen: function (inst, selector, pane, slide) { - $.layout.buttons.get(inst, selector, pane, "open") - .attr("title", inst.options[pane].tips.Open) - .click(function (evt) { - inst.open(pane, !!slide); - evt.stopPropagation(); - }); - return inst; - } - - /** - * Add a custom Close button for a pane - * - * @param {Object} inst Layout Instance object - * @param {(string|!Object)} selector jQuery selector (or element) for button, eg: ".ui-layout-north .toggle-button" - * @param {string} pane Name of the pane the button is for: 'north', 'south', etc. - */ -, addClose: function (inst, selector, pane) { - $.layout.buttons.get(inst, selector, pane, "close") - .attr("title", inst.options[pane].tips.Close) - .click(function (evt) { - inst.close(pane); - evt.stopPropagation(); - }); - return inst; - } - - /** - * Add a custom Pin button for a pane - * - * Four classes are added to the element, based on the paneClass for the associated pane... - * Assuming the default paneClass and the pin is 'up', these classes are added for a west-pane pin: - * - ui-layout-pane-pin - * - ui-layout-pane-west-pin - * - ui-layout-pane-pin-up - * - ui-layout-pane-west-pin-up - * - * @param {Object} inst Layout Instance object - * @param {(string|!Object)} selector jQuery selector (or element) for button, eg: ".ui-layout-north .toggle-button" - * @param {string} pane Name of the pane the pin is for: 'north', 'south', etc. - */ -, addPin: function (inst, selector, pane) { - var _ = $.layout.buttons - , $E = _.get(inst, selector, pane, "pin"); - if ($E.length) { - var s = inst.state[pane]; - $E.click(function (evt) { - _.setPinState(inst, $(this), pane, (s.isSliding || s.isClosed)); - if (s.isSliding || s.isClosed) inst.open( pane ); // change from sliding to open - else inst.close( pane ); // slide-closed - evt.stopPropagation(); - }); - // add up/down pin attributes and classes - _.setPinState(inst, $E, pane, (!s.isClosed && !s.isSliding)); - // add this pin to the pane data so we can 'sync it' automatically - // PANE.pins key is an array so we can store multiple pins for each pane - s.pins.push( selector ); // just save the selector string - } - return inst; - } - - /** - * Change the class of the pin button to make it look 'up' or 'down' - * - * @see addPin(), syncPins() - * - * @param {Object} inst Layout Instance object - * @param {Array.} $Pin The pin-span element in a jQuery wrapper - * @param {string} pane These are the params returned to callbacks by layout() - * @param {boolean} doPin true = set the pin 'down', false = set it 'up' - */ -, setPinState: function (inst, $Pin, pane, doPin) { - var updown = $Pin.attr("pin"); - if (updown && doPin === (updown=="down")) return; // already in correct state - var - o = inst.options[pane] - , pin = o.buttonClass +"-pin" - , side = pin +"-"+ pane - , UP = pin +"-up "+ side +"-up" - , DN = pin +"-down "+side +"-down" - ; - $Pin - .attr("pin", doPin ? "down" : "up") // logic - .attr("title", doPin ? o.tips.Unpin : o.tips.Pin) - .removeClass( doPin ? UP : DN ) - .addClass( doPin ? DN : UP ) - ; - } - - /** - * INTERNAL function to sync 'pin buttons' when pane is opened or closed - * Unpinned means the pane is 'sliding' - ie, over-top of the adjacent panes - * - * @see open(), close() - * - * @param {Object} inst Layout Instance object - * @param {string} pane These are the params returned to callbacks by layout() - * @param {boolean} doPin True means set the pin 'down', False means 'up' - */ -, syncPinBtns: function (inst, pane, doPin) { - // REAL METHOD IS _INSIDE_ LAYOUT - THIS IS HERE JUST FOR REFERENCE - $.each(inst.state[pane].pins, function (i, selector) { - $.layout.buttons.setPinState(inst, $(selector), pane, doPin); - }); - } - - -, _load: function (inst) { - var _ = $.layout.buttons; - // ADD Button methods to Layout Instance - // Note: sel = jQuery Selector string - $.extend( inst, { - bindButton: function (sel, action, pane) { return _.bind(inst, sel, action, pane); } - // DEPRECATED METHODS - , addToggleBtn: function (sel, pane, slide) { return _.addToggle(inst, sel, pane, slide); } - , addOpenBtn: function (sel, pane, slide) { return _.addOpen(inst, sel, pane, slide); } - , addCloseBtn: function (sel, pane) { return _.addClose(inst, sel, pane); } - , addPinBtn: function (sel, pane) { return _.addPin(inst, sel, pane); } - }); - - // init state array to hold pin-buttons - for (var i=0; i<4; i++) { - var pane = $.layout.config.borderPanes[i]; - inst.state[pane].pins = []; - } - - // auto-init buttons onLoad if option is enabled - if ( inst.options.autoBindCustomButtons ) - _.init(inst); - } - -, _unload: function (inst) { - // TODO: unbind all buttons??? - } - -}; - -// add initialization method to Layout's onLoad array of functions -$.layout.onLoad.push( $.layout.buttons._load ); -//$.layout.onUnload.push( $.layout.buttons._unload ); - - - -/** - * jquery.layout.browserZoom 1.0 - * $Date: 2011-12-29 08:00:00 (Thu, 29 Dec 2011) $ - * - * Copyright (c) 2012 - * Kevin Dalman (http://allpro.net) - * - * Dual licensed under the GPL (http://www.gnu.org/licenses/gpl.html) - * and MIT (http://www.opensource.org/licenses/mit-license.php) licenses. - * - * @dependancies: UI Layout 1.3.0.rc30.1 or higher - * - * @support: http://groups.google.com/group/jquery-ui-layout - * - * @todo: Extend logic to handle other problematic zooming in browsers - * @todo: Add hotkey/mousewheel bindings to _instantly_ respond to these zoom event - */ - -// tell Layout that the plugin is available -$.layout.plugins.browserZoom = true; - -$.layout.defaults.browserZoomCheckInterval = 1000; -$.layout.optionsMap.layout.push("browserZoomCheckInterval"); - -/* - * browserZoom methods - */ -$.layout.browserZoom = { - - _init: function (inst) { - // abort if browser does not need this check - if ($.layout.browserZoom.ratio() !== false) - $.layout.browserZoom._setTimer(inst); - } - -, _setTimer: function (inst) { - // abort if layout destroyed or browser does not need this check - if (inst.destroyed) return; - var o = inst.options - , s = inst.state - // don't need check if inst has parentLayout, but check occassionally in case parent destroyed! - // MINIMUM 100ms interval, for performance - , ms = inst.hasParentLayout ? 5000 : Math.max( o.browserZoomCheckInterval, 100 ) - ; - // set the timer - setTimeout(function(){ - if (inst.destroyed || !o.resizeWithWindow) return; - var d = $.layout.browserZoom.ratio(); - if (d !== s.browserZoom) { - s.browserZoom = d; - inst.resizeAll(); - } - // set a NEW timeout - $.layout.browserZoom._setTimer(inst); - } - , ms ); - } - -, ratio: function () { - var w = window - , s = screen - , d = document - , dE = d.documentElement || d.body - , b = $.layout.browser - , v = b.version - , r, sW, cW - ; - // we can ignore all browsers that fire window.resize event onZoom - if ((b.msie && v > 8) - || !b.msie - ) return false; // don't need to track zoom - - if (s.deviceXDPI) - return calc(s.deviceXDPI, s.systemXDPI); - // everything below is just for future reference! - if (b.webkit && (r = d.body.getBoundingClientRect)) - return calc((r.left - r.right), d.body.offsetWidth); - if (b.webkit && (sW = w.outerWidth)) - return calc(sW, w.innerWidth); - if ((sW = s.width) && (cW = dE.clientWidth)) - return calc(sW, cW); - return false; // no match, so cannot - or don't need to - track zoom - - function calc (x,y) { return (parseInt(x,10) / parseInt(y,10) * 100).toFixed(); } - } - -}; -// add initialization method to Layout's onLoad array of functions -$.layout.onReady.push( $.layout.browserZoom._init ); - - - -})( jQuery ); \ No newline at end of file diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/modernizr.custom.js b/src/compiler/scala/tools/nsc/doc/html/resource/lib/modernizr.custom.js deleted file mode 100644 index 4688d633fe..0000000000 --- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/modernizr.custom.js +++ /dev/null @@ -1,4 +0,0 @@ -/* Modernizr 2.5.3 (Custom Build) | MIT & BSD - * Build: http://www.modernizr.com/download/#-inlinesvg - */ -;window.Modernizr=function(a,b,c){function u(a){i.cssText=a}function v(a,b){return u(prefixes.join(a+";")+(b||""))}function w(a,b){return typeof a===b}function x(a,b){return!!~(""+a).indexOf(b)}function y(a,b,d){for(var e in a){var f=b[a[e]];if(f!==c)return d===!1?a[e]:w(f,"function")?f.bind(d||b):f}return!1}var d="2.5.3",e={},f=b.documentElement,g="modernizr",h=b.createElement(g),i=h.style,j,k={}.toString,l={svg:"http://www.w3.org/2000/svg"},m={},n={},o={},p=[],q=p.slice,r,s={}.hasOwnProperty,t;!w(s,"undefined")&&!w(s.call,"undefined")?t=function(a,b){return s.call(a,b)}:t=function(a,b){return b in a&&w(a.constructor.prototype[b],"undefined")},Function.prototype.bind||(Function.prototype.bind=function(b){var c=this;if(typeof c!="function")throw new TypeError;var d=q.call(arguments,1),e=function(){if(this instanceof e){var a=function(){};a.prototype=c.prototype;var f=new a,g=c.apply(f,d.concat(q.call(arguments)));return Object(g)===g?g:f}return c.apply(b,d.concat(q.call(arguments)))};return e}),m.inlinesvg=function(){var a=b.createElement("div");return a.innerHTML="",(a.firstChild&&a.firstChild.namespaceURI)==l.svg};for(var z in m)t(m,z)&&(r=z.toLowerCase(),e[r]=m[z](),p.push((e[r]?"":"no-")+r));return u(""),h=j=null,e._version=d,e}(this,this.document); \ No newline at end of file diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/navigation-li-a.png b/src/compiler/scala/tools/nsc/doc/html/resource/lib/navigation-li-a.png deleted file mode 100644 index 9b32288e04..0000000000 Binary files a/src/compiler/scala/tools/nsc/doc/html/resource/lib/navigation-li-a.png and /dev/null differ diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/navigation-li.png b/src/compiler/scala/tools/nsc/doc/html/resource/lib/navigation-li.png deleted file mode 100644 index fd0ad06e81..0000000000 Binary files a/src/compiler/scala/tools/nsc/doc/html/resource/lib/navigation-li.png and /dev/null differ diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/object.png b/src/compiler/scala/tools/nsc/doc/html/resource/lib/object.png deleted file mode 100644 index ad312793ea..0000000000 Binary files a/src/compiler/scala/tools/nsc/doc/html/resource/lib/object.png and /dev/null differ diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/object_big.png b/src/compiler/scala/tools/nsc/doc/html/resource/lib/object_big.png deleted file mode 100644 index 67ffca79de..0000000000 Binary files a/src/compiler/scala/tools/nsc/doc/html/resource/lib/object_big.png and /dev/null differ diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/object_diagram.png b/src/compiler/scala/tools/nsc/doc/html/resource/lib/object_diagram.png deleted file mode 100644 index 6e9f2f743f..0000000000 Binary files a/src/compiler/scala/tools/nsc/doc/html/resource/lib/object_diagram.png and /dev/null differ diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/object_to_class_big.png b/src/compiler/scala/tools/nsc/doc/html/resource/lib/object_to_class_big.png deleted file mode 100644 index 7502942eb6..0000000000 Binary files a/src/compiler/scala/tools/nsc/doc/html/resource/lib/object_to_class_big.png and /dev/null differ diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/object_to_trait_big.png b/src/compiler/scala/tools/nsc/doc/html/resource/lib/object_to_trait_big.png deleted file mode 100644 index c777bfce8d..0000000000 Binary files a/src/compiler/scala/tools/nsc/doc/html/resource/lib/object_to_trait_big.png and /dev/null differ diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/object_to_type_big.png b/src/compiler/scala/tools/nsc/doc/html/resource/lib/object_to_type_big.png deleted file mode 100644 index 7502942eb6..0000000000 Binary files a/src/compiler/scala/tools/nsc/doc/html/resource/lib/object_to_type_big.png and /dev/null differ diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/ownderbg2.gif b/src/compiler/scala/tools/nsc/doc/html/resource/lib/ownderbg2.gif deleted file mode 100644 index 848dd5963a..0000000000 Binary files a/src/compiler/scala/tools/nsc/doc/html/resource/lib/ownderbg2.gif and /dev/null differ diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/ownerbg.gif b/src/compiler/scala/tools/nsc/doc/html/resource/lib/ownerbg.gif deleted file mode 100644 index 34a04249ee..0000000000 Binary files a/src/compiler/scala/tools/nsc/doc/html/resource/lib/ownerbg.gif and /dev/null differ diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/ownerbg2.gif b/src/compiler/scala/tools/nsc/doc/html/resource/lib/ownerbg2.gif deleted file mode 100644 index 2ed33b0aa4..0000000000 Binary files a/src/compiler/scala/tools/nsc/doc/html/resource/lib/ownerbg2.gif and /dev/null differ diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/package.png b/src/compiler/scala/tools/nsc/doc/html/resource/lib/package.png deleted file mode 100644 index 6ea17ac320..0000000000 Binary files a/src/compiler/scala/tools/nsc/doc/html/resource/lib/package.png and /dev/null differ diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/package_big.png b/src/compiler/scala/tools/nsc/doc/html/resource/lib/package_big.png deleted file mode 100644 index 529aa93188..0000000000 Binary files a/src/compiler/scala/tools/nsc/doc/html/resource/lib/package_big.png and /dev/null differ diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/packagesbg.gif b/src/compiler/scala/tools/nsc/doc/html/resource/lib/packagesbg.gif deleted file mode 100644 index 00c3378a2a..0000000000 Binary files a/src/compiler/scala/tools/nsc/doc/html/resource/lib/packagesbg.gif and /dev/null differ diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/raphael-min.js b/src/compiler/scala/tools/nsc/doc/html/resource/lib/raphael-min.js deleted file mode 100644 index d30dbad858..0000000000 --- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/raphael-min.js +++ /dev/null @@ -1,10 +0,0 @@ -// ┌────────────────────────────────────────────────────────────────────┐ \\ -// │ Raphaël 2.1.0 - JavaScript Vector Library │ \\ -// ├────────────────────────────────────────────────────────────────────┤ \\ -// │ Copyright © 2008-2012 Dmitry Baranovskiy (http://raphaeljs.com) │ \\ -// │ Copyright © 2008-2012 Sencha Labs (http://sencha.com) │ \\ -// ├────────────────────────────────────────────────────────────────────┤ \\ -// │ Licensed under the MIT (http://raphaeljs.com/license.html) license.│ \\ -// └────────────────────────────────────────────────────────────────────┘ \\ - -(function(a){var b="0.3.4",c="hasOwnProperty",d=/[\.\/]/,e="*",f=function(){},g=function(a,b){return a-b},h,i,j={n:{}},k=function(a,b){var c=j,d=i,e=Array.prototype.slice.call(arguments,2),f=k.listeners(a),l=0,m=!1,n,o=[],p={},q=[],r=h,s=[];h=a,i=0;for(var t=0,u=f.length;tf*b.top){e=b.percents[y],p=b.percents[y-1]||0,t=t/b.top*(e-p),o=b.percents[y+1],j=b.anim[e];break}f&&d.attr(b.anim[b.percents[y]])}if(!!j){if(!k){for(var A in j)if(j[g](A))if(U[g](A)||d.paper.customAttributes[g](A)){u[A]=d.attr(A),u[A]==null&&(u[A]=T[A]),v[A]=j[A];switch(U[A]){case C:w[A]=(v[A]-u[A])/t;break;case"colour":u[A]=a.getRGB(u[A]);var B=a.getRGB(v[A]);w[A]={r:(B.r-u[A].r)/t,g:(B.g-u[A].g)/t,b:(B.b-u[A].b)/t};break;case"path":var D=bR(u[A],v[A]),E=D[1];u[A]=D[0],w[A]=[];for(y=0,z=u[A].length;yd)return d;while(cf?c=e:d=e,e=(d-c)/2+c}return e}function n(a,b){var c=o(a,b);return((l*c+k)*c+j)*c}function m(a){return((i*a+h)*a+g)*a}var g=3*b,h=3*(d-b)-g,i=1-g-h,j=3*c,k=3*(e-c)-j,l=1-j-k;return n(a,1/(200*f))}function cq(){return this.x+q+this.y+q+this.width+" × "+this.height}function cp(){return this.x+q+this.y}function cb(a,b,c,d,e,f){a!=null?(this.a=+a,this.b=+b,this.c=+c,this.d=+d,this.e=+e,this.f=+f):(this.a=1,this.b=0,this.c=0,this.d=1,this.e=0,this.f=0)}function bH(b,c,d){b=a._path2curve(b),c=a._path2curve(c);var e,f,g,h,i,j,k,l,m,n,o=d?0:[];for(var p=0,q=b.length;p=0&&y<=1&&A>=0&&A<=1&&(d?n++:n.push({x:x.x,y:x.y,t1:y,t2:A}))}}return n}function bF(a,b){return bG(a,b,1)}function bE(a,b){return bG(a,b)}function bD(a,b,c,d,e,f,g,h){if(!(x(a,c)x(e,g)||x(b,d)x(f,h))){var i=(a*d-b*c)*(e-g)-(a-c)*(e*h-f*g),j=(a*d-b*c)*(f-h)-(b-d)*(e*h-f*g),k=(a-c)*(f-h)-(b-d)*(e-g);if(!k)return;var l=i/k,m=j/k,n=+l.toFixed(2),o=+m.toFixed(2);if(n<+y(a,c).toFixed(2)||n>+x(a,c).toFixed(2)||n<+y(e,g).toFixed(2)||n>+x(e,g).toFixed(2)||o<+y(b,d).toFixed(2)||o>+x(b,d).toFixed(2)||o<+y(f,h).toFixed(2)||o>+x(f,h).toFixed(2))return;return{x:l,y:m}}}function bC(a,b,c,d,e,f,g,h,i){if(!(i<0||bB(a,b,c,d,e,f,g,h)n)k/=2,l+=(m1?1:i<0?0:i;var j=i/2,k=12,l=[-0.1252,.1252,-0.3678,.3678,-0.5873,.5873,-0.7699,.7699,-0.9041,.9041,-0.9816,.9816],m=[.2491,.2491,.2335,.2335,.2032,.2032,.1601,.1601,.1069,.1069,.0472,.0472],n=0;for(var o=0;od;d+=2){var f=[{x:+a[d-2],y:+a[d-1]},{x:+a[d],y:+a[d+1]},{x:+a[d+2],y:+a[d+3]},{x:+a[d+4],y:+a[d+5]}];b?d?e-4==d?f[3]={x:+a[0],y:+a[1]}:e-2==d&&(f[2]={x:+a[0],y:+a[1]},f[3]={x:+a[2],y:+a[3]}):f[0]={x:+a[e-2],y:+a[e-1]}:e-4==d?f[3]=f[2]:d||(f[0]={x:+a[d],y:+a[d+1]}),c.push(["C",(-f[0].x+6*f[1].x+f[2].x)/6,(-f[0].y+6*f[1].y+f[2].y)/6,(f[1].x+6*f[2].x-f[3].x)/6,(f[1].y+6*f[2].y-f[3].y)/6,f[2].x,f[2].y])}return c}function bx(){return this.hex}function bv(a,b,c){function d(){var e=Array.prototype.slice.call(arguments,0),f=e.join("␀"),h=d.cache=d.cache||{},i=d.count=d.count||[];if(h[g](f)){bu(i,f);return c?c(h[f]):h[f]}i.length>=1e3&&delete h[i.shift()],i.push(f),h[f]=a[m](b,e);return c?c(h[f]):h[f]}return d}function bu(a,b){for(var c=0,d=a.length;c',bl=bk.firstChild,bl.style.behavior="url(#default#VML)";if(!bl||typeof bl.adj!="object")return a.type=p;bk=null}a.svg=!(a.vml=a.type=="VML"),a._Paper=j,a.fn=k=j.prototype=a.prototype,a._id=0,a._oid=0,a.is=function(a,b){b=v.call(b);if(b=="finite")return!M[g](+a);if(b=="array")return a instanceof Array;return b=="null"&&a===null||b==typeof a&&a!==null||b=="object"&&a===Object(a)||b=="array"&&Array.isArray&&Array.isArray(a)||H.call(a).slice(8,-1).toLowerCase()==b},a.angle=function(b,c,d,e,f,g){if(f==null){var h=b-d,i=c-e;if(!h&&!i)return 0;return(180+w.atan2(-i,-h)*180/B+360)%360}return a.angle(b,c,f,g)-a.angle(d,e,f,g)},a.rad=function(a){return a%360*B/180},a.deg=function(a){return a*180/B%360},a.snapTo=function(b,c,d){d=a.is(d,"finite")?d:10;if(a.is(b,E)){var e=b.length;while(e--)if(z(b[e]-c)<=d)return b[e]}else{b=+b;var f=c%b;if(fb-d)return c-f+b}return c};var bn=a.createUUID=function(a,b){return function(){return"xxxxxxxx-xxxx-4xxx-yxxx-xxxxxxxxxxxx".replace(a,b).toUpperCase()}}(/[xy]/g,function(a){var b=w.random()*16|0,c=a=="x"?b:b&3|8;return c.toString(16)});a.setWindow=function(b){eve("raphael.setWindow",a,h.win,b),h.win=b,h.doc=h.win.document,a._engine.initWin&&a._engine.initWin(h.win)};var bo=function(b){if(a.vml){var c=/^\s+|\s+$/g,d;try{var e=new ActiveXObject("htmlfile");e.write(""),e.close(),d=e.body}catch(f){d=createPopup().document.body}var g=d.createTextRange();bo=bv(function(a){try{d.style.color=r(a).replace(c,p);var b=g.queryCommandValue("ForeColor");b=(b&255)<<16|b&65280|(b&16711680)>>>16;return"#"+("000000"+b.toString(16)).slice(-6)}catch(e){return"none"}})}else{var i=h.doc.createElement("i");i.title="Raphaël Colour Picker",i.style.display="none",h.doc.body.appendChild(i),bo=bv(function(a){i.style.color=a;return h.doc.defaultView.getComputedStyle(i,p).getPropertyValue("color")})}return bo(b)},bp=function(){return"hsb("+[this.h,this.s,this.b]+")"},bq=function(){return"hsl("+[this.h,this.s,this.l]+")"},br=function(){return this.hex},bs=function(b,c,d){c==null&&a.is(b,"object")&&"r"in b&&"g"in b&&"b"in b&&(d=b.b,c=b.g,b=b.r);if(c==null&&a.is(b,D)){var e=a.getRGB(b);b=e.r,c=e.g,d=e.b}if(b>1||c>1||d>1)b/=255,c/=255,d/=255;return[b,c,d]},bt=function(b,c,d,e){b*=255,c*=255,d*=255;var f={r:b,g:c,b:d,hex:a.rgb(b,c,d),toString:br};a.is(e,"finite")&&(f.opacity=e);return f};a.color=function(b){var c;a.is(b,"object")&&"h"in b&&"s"in b&&"b"in b?(c=a.hsb2rgb(b),b.r=c.r,b.g=c.g,b.b=c.b,b.hex=c.hex):a.is(b,"object")&&"h"in b&&"s"in b&&"l"in b?(c=a.hsl2rgb(b),b.r=c.r,b.g=c.g,b.b=c.b,b.hex=c.hex):(a.is(b,"string")&&(b=a.getRGB(b)),a.is(b,"object")&&"r"in b&&"g"in b&&"b"in b?(c=a.rgb2hsl(b),b.h=c.h,b.s=c.s,b.l=c.l,c=a.rgb2hsb(b),b.v=c.b):(b={hex:"none"},b.r=b.g=b.b=b.h=b.s=b.v=b.l=-1)),b.toString=br;return b},a.hsb2rgb=function(a,b,c,d){this.is(a,"object")&&"h"in a&&"s"in a&&"b"in a&&(c=a.b,b=a.s,a=a.h,d=a.o),a*=360;var e,f,g,h,i;a=a%360/60,i=c*b,h=i*(1-z(a%2-1)),e=f=g=c-i,a=~~a,e+=[i,h,0,0,h,i][a],f+=[h,i,i,h,0,0][a],g+=[0,0,h,i,i,h][a];return bt(e,f,g,d)},a.hsl2rgb=function(a,b,c,d){this.is(a,"object")&&"h"in a&&"s"in a&&"l"in a&&(c=a.l,b=a.s,a=a.h);if(a>1||b>1||c>1)a/=360,b/=100,c/=100;a*=360;var e,f,g,h,i;a=a%360/60,i=2*b*(c<.5?c:1-c),h=i*(1-z(a%2-1)),e=f=g=c-i/2,a=~~a,e+=[i,h,0,0,h,i][a],f+=[h,i,i,h,0,0][a],g+=[0,0,h,i,i,h][a];return bt(e,f,g,d)},a.rgb2hsb=function(a,b,c){c=bs(a,b,c),a=c[0],b=c[1],c=c[2];var d,e,f,g;f=x(a,b,c),g=f-y(a,b,c),d=g==0?null:f==a?(b-c)/g:f==b?(c-a)/g+2:(a-b)/g+4,d=(d+360)%6*60/360,e=g==0?0:g/f;return{h:d,s:e,b:f,toString:bp}},a.rgb2hsl=function(a,b,c){c=bs(a,b,c),a=c[0],b=c[1],c=c[2];var d,e,f,g,h,i;g=x(a,b,c),h=y(a,b,c),i=g-h,d=i==0?null:g==a?(b-c)/i:g==b?(c-a)/i+2:(a-b)/i+4,d=(d+360)%6*60/360,f=(g+h)/2,e=i==0?0:f<.5?i/(2*f):i/(2-2*f);return{h:d,s:e,l:f,toString:bq}},a._path2string=function(){return this.join(",").replace(Y,"$1")};var bw=a._preload=function(a,b){var c=h.doc.createElement("img");c.style.cssText="position:absolute;left:-9999em;top:-9999em",c.onload=function(){b.call(this),this.onload=null,h.doc.body.removeChild(this)},c.onerror=function(){h.doc.body.removeChild(this)},h.doc.body.appendChild(c),c.src=a};a.getRGB=bv(function(b){if(!b||!!((b=r(b)).indexOf("-")+1))return{r:-1,g:-1,b:-1,hex:"none",error:1,toString:bx};if(b=="none")return{r:-1,g:-1,b:-1,hex:"none",toString:bx};!X[g](b.toLowerCase().substring(0,2))&&b.charAt()!="#"&&(b=bo(b));var c,d,e,f,h,i,j,k=b.match(L);if(k){k[2]&&(f=R(k[2].substring(5),16),e=R(k[2].substring(3,5),16),d=R(k[2].substring(1,3),16)),k[3]&&(f=R((i=k[3].charAt(3))+i,16),e=R((i=k[3].charAt(2))+i,16),d=R((i=k[3].charAt(1))+i,16)),k[4]&&(j=k[4][s](W),d=Q(j[0]),j[0].slice(-1)=="%"&&(d*=2.55),e=Q(j[1]),j[1].slice(-1)=="%"&&(e*=2.55),f=Q(j[2]),j[2].slice(-1)=="%"&&(f*=2.55),k[1].toLowerCase().slice(0,4)=="rgba"&&(h=Q(j[3])),j[3]&&j[3].slice(-1)=="%"&&(h/=100));if(k[5]){j=k[5][s](W),d=Q(j[0]),j[0].slice(-1)=="%"&&(d*=2.55),e=Q(j[1]),j[1].slice(-1)=="%"&&(e*=2.55),f=Q(j[2]),j[2].slice(-1)=="%"&&(f*=2.55),(j[0].slice(-3)=="deg"||j[0].slice(-1)=="°")&&(d/=360),k[1].toLowerCase().slice(0,4)=="hsba"&&(h=Q(j[3])),j[3]&&j[3].slice(-1)=="%"&&(h/=100);return a.hsb2rgb(d,e,f,h)}if(k[6]){j=k[6][s](W),d=Q(j[0]),j[0].slice(-1)=="%"&&(d*=2.55),e=Q(j[1]),j[1].slice(-1)=="%"&&(e*=2.55),f=Q(j[2]),j[2].slice(-1)=="%"&&(f*=2.55),(j[0].slice(-3)=="deg"||j[0].slice(-1)=="°")&&(d/=360),k[1].toLowerCase().slice(0,4)=="hsla"&&(h=Q(j[3])),j[3]&&j[3].slice(-1)=="%"&&(h/=100);return a.hsl2rgb(d,e,f,h)}k={r:d,g:e,b:f,toString:bx},k.hex="#"+(16777216|f|e<<8|d<<16).toString(16).slice(1),a.is(h,"finite")&&(k.opacity=h);return k}return{r:-1,g:-1,b:-1,hex:"none",error:1,toString:bx}},a),a.hsb=bv(function(b,c,d){return a.hsb2rgb(b,c,d).hex}),a.hsl=bv(function(b,c,d){return a.hsl2rgb(b,c,d).hex}),a.rgb=bv(function(a,b,c){return"#"+(16777216|c|b<<8|a<<16).toString(16).slice(1)}),a.getColor=function(a){var b=this.getColor.start=this.getColor.start||{h:0,s:1,b:a||.75},c=this.hsb2rgb(b.h,b.s,b.b);b.h+=.075,b.h>1&&(b.h=0,b.s-=.2,b.s<=0&&(this.getColor.start={h:0,s:1,b:b.b}));return c.hex},a.getColor.reset=function(){delete this.start},a.parsePathString=function(b){if(!b)return null;var c=bz(b);if(c.arr)return bJ(c.arr);var d={a:7,c:6,h:1,l:2,m:2,r:4,q:4,s:4,t:2,v:1,z:0},e=[];a.is(b,E)&&a.is(b[0],E)&&(e=bJ(b)),e.length||r(b).replace(Z,function(a,b,c){var f=[],g=b.toLowerCase();c.replace(_,function(a,b){b&&f.push(+b)}),g=="m"&&f.length>2&&(e.push([b][n](f.splice(0,2))),g="l",b=b=="m"?"l":"L");if(g=="r")e.push([b][n](f));else while(f.length>=d[g]){e.push([b][n](f.splice(0,d[g])));if(!d[g])break}}),e.toString=a._path2string,c.arr=bJ(e);return e},a.parseTransformString=bv(function(b){if(!b)return null;var c={r:3,s:4,t:2,m:6},d=[];a.is(b,E)&&a.is(b[0],E)&&(d=bJ(b)),d.length||r(b).replace($,function(a,b,c){var e=[],f=v.call(b);c.replace(_,function(a,b){b&&e.push(+b)}),d.push([b][n](e))}),d.toString=a._path2string;return d});var bz=function(a){var b=bz.ps=bz.ps||{};b[a]?b[a].sleep=100:b[a]={sleep:100},setTimeout(function(){for(var c in b)b[g](c)&&c!=a&&(b[c].sleep--,!b[c].sleep&&delete b[c])});return b[a]};a.findDotsAtSegment=function(a,b,c,d,e,f,g,h,i){var j=1-i,k=A(j,3),l=A(j,2),m=i*i,n=m*i,o=k*a+l*3*i*c+j*3*i*i*e+n*g,p=k*b+l*3*i*d+j*3*i*i*f+n*h,q=a+2*i*(c-a)+m*(e-2*c+a),r=b+2*i*(d-b)+m*(f-2*d+b),s=c+2*i*(e-c)+m*(g-2*e+c),t=d+2*i*(f-d)+m*(h-2*f+d),u=j*a+i*c,v=j*b+i*d,x=j*e+i*g,y=j*f+i*h,z=90-w.atan2(q-s,r-t)*180/B;(q>s||r=a.x&&b<=a.x2&&c>=a.y&&c<=a.y2},a.isBBoxIntersect=function(b,c){var d=a.isPointInsideBBox;return d(c,b.x,b.y)||d(c,b.x2,b.y)||d(c,b.x,b.y2)||d(c,b.x2,b.y2)||d(b,c.x,c.y)||d(b,c.x2,c.y)||d(b,c.x,c.y2)||d(b,c.x2,c.y2)||(b.xc.x||c.xb.x)&&(b.yc.y||c.yb.y)},a.pathIntersection=function(a,b){return bH(a,b)},a.pathIntersectionNumber=function(a,b){return bH(a,b,1)},a.isPointInsidePath=function(b,c,d){var e=a.pathBBox(b);return a.isPointInsideBBox(e,c,d)&&bH(b,[["M",c,d],["H",e.x2+10]],1)%2==1},a._removedFactory=function(a){return function(){eve("raphael.log",null,"Raphaël: you are calling to method “"+a+"” of removed object",a)}};var bI=a.pathBBox=function(a){var b=bz(a);if(b.bbox)return b.bbox;if(!a)return{x:0,y:0,width:0,height:0,x2:0,y2:0};a=bR(a);var c=0,d=0,e=[],f=[],g;for(var h=0,i=a.length;h1&&(v=w.sqrt(v),c=v*c,d=v*d);var x=c*c,y=d*d,A=(f==g?-1:1)*w.sqrt(z((x*y-x*u*u-y*t*t)/(x*u*u+y*t*t))),C=A*c*u/d+(a+h)/2,D=A*-d*t/c+(b+i)/2,E=w.asin(((b-D)/d).toFixed(9)),F=w.asin(((i-D)/d).toFixed(9));E=aF&&(E=E-B*2),!g&&F>E&&(F=F-B*2)}else E=j[0],F=j[1],C=j[2],D=j[3];var G=F-E;if(z(G)>k){var H=F,I=h,J=i;F=E+k*(g&&F>E?1:-1),h=C+c*w.cos(F),i=D+d*w.sin(F),m=bO(h,i,c,d,e,0,g,I,J,[F,H,C,D])}G=F-E;var K=w.cos(E),L=w.sin(E),M=w.cos(F),N=w.sin(F),O=w.tan(G/4),P=4/3*c*O,Q=4/3*d*O,R=[a,b],S=[a+P*L,b-Q*K],T=[h+P*N,i-Q*M],U=[h,i];S[0]=2*R[0]-S[0],S[1]=2*R[1]-S[1];if(j)return[S,T,U][n](m);m=[S,T,U][n](m).join()[s](",");var V=[];for(var W=0,X=m.length;W"1e12"&&(l=.5),z(n)>"1e12"&&(n=.5),l>0&&l<1&&(q=bP(a,b,c,d,e,f,g,h,l),p.push(q.x),o.push(q.y)),n>0&&n<1&&(q=bP(a,b,c,d,e,f,g,h,n),p.push(q.x),o.push(q.y)),i=f-2*d+b-(h-2*f+d),j=2*(d-b)-2*(f-d),k=b-d,l=(-j+w.sqrt(j*j-4*i*k))/2/i,n=(-j-w.sqrt(j*j-4*i*k))/2/i,z(l)>"1e12"&&(l=.5),z(n)>"1e12"&&(n=.5),l>0&&l<1&&(q=bP(a,b,c,d,e,f,g,h,l),p.push(q.x),o.push(q.y)),n>0&&n<1&&(q=bP(a,b,c,d,e,f,g,h,n),p.push(q.x),o.push(q.y));return{min:{x:y[m](0,p),y:y[m](0,o)},max:{x:x[m](0,p),y:x[m](0,o)}}}),bR=a._path2curve=bv(function(a,b){var c=!b&&bz(a);if(!b&&c.curve)return bJ(c.curve);var d=bL(a),e=b&&bL(b),f={x:0,y:0,bx:0,by:0,X:0,Y:0,qx:null,qy:null},g={x:0,y:0,bx:0,by:0,X:0,Y:0,qx:null,qy:null},h=function(a,b){var c,d;if(!a)return["C",b.x,b.y,b.x,b.y,b.x,b.y];!(a[0]in{T:1,Q:1})&&(b.qx=b.qy=null);switch(a[0]){case"M":b.X=a[1],b.Y=a[2];break;case"A":a=["C"][n](bO[m](0,[b.x,b.y][n](a.slice(1))));break;case"S":c=b.x+(b.x-(b.bx||b.x)),d=b.y+(b.y-(b.by||b.y)),a=["C",c,d][n](a.slice(1));break;case"T":b.qx=b.x+(b.x-(b.qx||b.x)),b.qy=b.y+(b.y-(b.qy||b.y)),a=["C"][n](bN(b.x,b.y,b.qx,b.qy,a[1],a[2]));break;case"Q":b.qx=a[1],b.qy=a[2],a=["C"][n](bN(b.x,b.y,a[1],a[2],a[3],a[4]));break;case"L":a=["C"][n](bM(b.x,b.y,a[1],a[2]));break;case"H":a=["C"][n](bM(b.x,b.y,a[1],b.y));break;case"V":a=["C"][n](bM(b.x,b.y,b.x,a[1]));break;case"Z":a=["C"][n](bM(b.x,b.y,b.X,b.Y))}return a},i=function(a,b){if(a[b].length>7){a[b].shift();var c=a[b];while(c.length)a.splice(b++,0,["C"][n](c.splice(0,6)));a.splice(b,1),l=x(d.length,e&&e.length||0)}},j=function(a,b,c,f,g){a&&b&&a[g][0]=="M"&&b[g][0]!="M"&&(b.splice(g,0,["M",f.x,f.y]),c.bx=0,c.by=0,c.x=a[g][1],c.y=a[g][2],l=x(d.length,e&&e.length||0))};for(var k=0,l=x(d.length,e&&e.length||0);ke){if(c&&!l.start){m=cs(g,h,i[1],i[2],i[3],i[4],i[5],i[6],e-n),k+=["C"+m.start.x,m.start.y,m.m.x,m.m.y,m.x,m.y];if(f)return k;l.start=k,k=["M"+m.x,m.y+"C"+m.n.x,m.n.y,m.end.x,m.end.y,i[5],i[6]].join(),n+=j,g=+i[5],h=+i[6];continue}if(!b&&!c){m=cs(g,h,i[1],i[2],i[3],i[4],i[5],i[6],e-n);return{x:m.x,y:m.y,alpha:m.alpha}}}n+=j,g=+i[5],h=+i[6]}k+=i.shift()+i}l.end=k,m=b?n:c?l:a.findDotsAtSegment(g,h,i[0],i[1],i[2],i[3],i[4],i[5],1),m.alpha&&(m={x:m.x,y:m.y,alpha:m.alpha});return m}},cu=ct(1),cv=ct(),cw=ct(0,1);a.getTotalLength=cu,a.getPointAtLength=cv,a.getSubpath=function(a,b,c){if(this.getTotalLength(a)-c<1e-6)return cw(a,b).end;var d=cw(a,c,1);return b?cw(d,b).end:d},cl.getTotalLength=function(){if(this.type=="path"){if(this.node.getTotalLength)return this.node.getTotalLength();return cu(this.attrs.path)}},cl.getPointAtLength=function(a){if(this.type=="path")return cv(this.attrs.path,a)},cl.getSubpath=function(b,c){if(this.type=="path")return a.getSubpath(this.attrs.path,b,c)};var cx=a.easing_formulas={linear:function(a){return a},"<":function(a){return A(a,1.7)},">":function(a){return A(a,.48)},"<>":function(a){var b=.48-a/1.04,c=w.sqrt(.1734+b*b),d=c-b,e=A(z(d),1/3)*(d<0?-1:1),f=-c-b,g=A(z(f),1/3)*(f<0?-1:1),h=e+g+.5;return(1-h)*3*h*h+h*h*h},backIn:function(a){var b=1.70158;return a*a*((b+1)*a-b)},backOut:function(a){a=a-1;var b=1.70158;return a*a*((b+1)*a+b)+1},elastic:function(a){if(a==!!a)return a;return A(2,-10*a)*w.sin((a-.075)*2*B/.3)+1},bounce:function(a){var b=7.5625,c=2.75,d;a<1/c?d=b*a*a:a<2/c?(a-=1.5/c,d=b*a*a+.75):a<2.5/c?(a-=2.25/c,d=b*a*a+.9375):(a-=2.625/c,d=b*a*a+.984375);return d}};cx.easeIn=cx["ease-in"]=cx["<"],cx.easeOut=cx["ease-out"]=cx[">"],cx.easeInOut=cx["ease-in-out"]=cx["<>"],cx["back-in"]=cx.backIn,cx["back-out"]=cx.backOut;var cy=[],cz=window.requestAnimationFrame||window.webkitRequestAnimationFrame||window.mozRequestAnimationFrame||window.oRequestAnimationFrame||window.msRequestAnimationFrame||function(a){setTimeout(a,16)},cA=function(){var b=+(new Date),c=0;for(;c1&&!d.next){for(s in k)k[g](s)&&(r[s]=d.totalOrigin[s]);d.el.attr(r),cE(d.anim,d.el,d.anim.percents[0],null,d.totalOrigin,d.repeat-1)}d.next&&!d.stop&&cE(d.anim,d.el,d.next,null,d.totalOrigin,d.repeat)}}a.svg&&m&&m.paper&&m.paper.safari(),cy.length&&cz(cA)},cB=function(a){return a>255?255:a<0?0:a};cl.animateWith=function(b,c,d,e,f,g){var h=this;if(h.removed){g&&g.call(h);return h}var i=d instanceof cD?d:a.animation(d,e,f,g),j,k;cE(i,h,i.percents[0],null,h.attr());for(var l=0,m=cy.length;l.5)*2-1;i(m-.5,2)+i(n-.5,2)>.25&&(n=f.sqrt(.25-i(m-.5,2))*e+.5)&&n!=.5&&(n=n.toFixed(5)-1e-5*e)}return l}),e=e.split(/\s*\-\s*/);if(j=="linear"){var t=e.shift();t=-d(t);if(isNaN(t))return null;var u=[0,0,f.cos(a.rad(t)),f.sin(a.rad(t))],v=1/(g(h(u[2]),h(u[3]))||1);u[2]*=v,u[3]*=v,u[2]<0&&(u[0]=-u[2],u[2]=0),u[3]<0&&(u[1]=-u[3],u[3]=0)}var w=a._parseDots(e);if(!w)return null;k=k.replace(/[\(\)\s,\xb0#]/g,"_"),b.gradient&&k!=b.gradient.id&&(p.defs.removeChild(b.gradient),delete b.gradient);if(!b.gradient){s=q(j+"Gradient",{id:k}),b.gradient=s,q(s,j=="radial"?{fx:m,fy:n}:{x1:u[0],y1:u[1],x2:u[2],y2:u[3],gradientTransform:b.matrix.invert()}),p.defs.appendChild(s);for(var x=0,y=w.length;x1?G.opacity/100:G.opacity});case"stroke":G=a.getRGB(p),i.setAttribute(o,G.hex),o=="stroke"&&G[b]("opacity")&&q(i,{"stroke-opacity":G.opacity>1?G.opacity/100:G.opacity}),o=="stroke"&&d._.arrows&&("startString"in d._.arrows&&t(d,d._.arrows.startString),"endString"in d._.arrows&&t(d,d._.arrows.endString,1));break;case"gradient":(d.type=="circle"||d.type=="ellipse"||c(p).charAt()!="r")&&r(d,p);break;case"opacity":k.gradient&&!k[b]("stroke-opacity")&&q(i,{"stroke-opacity":p>1?p/100:p});case"fill-opacity":if(k.gradient){H=a._g.doc.getElementById(i.getAttribute("fill").replace(/^url\(#|\)$/g,l)),H&&(I=H.getElementsByTagName("stop"),q(I[I.length-1],{"stop-opacity":p}));break};default:o=="font-size"&&(p=e(p,10)+"px");var J=o.replace(/(\-.)/g,function(a){return a.substring(1).toUpperCase()});i.style[J]=p,d._.dirty=1,i.setAttribute(o,p)}}y(d,f),i.style.visibility=m},x=1.2,y=function(d,f){if(d.type=="text"&&!!(f[b]("text")||f[b]("font")||f[b]("font-size")||f[b]("x")||f[b]("y"))){var g=d.attrs,h=d.node,i=h.firstChild?e(a._g.doc.defaultView.getComputedStyle(h.firstChild,l).getPropertyValue("font-size"),10):10;if(f[b]("text")){g.text=f.text;while(h.firstChild)h.removeChild(h.firstChild);var j=c(f.text).split("\n"),k=[],m;for(var n=0,o=j.length;n"));var $=X.getBoundingClientRect();t.W=m.w=($.right-$.left)/Y,t.H=m.h=($.bottom-$.top)/Y,t.X=m.x,t.Y=m.y+t.H/2,("x"in i||"y"in i)&&(t.path.v=a.format("m{0},{1}l{2},{1}",f(m.x*u),f(m.y*u),f(m.x*u)+1));var _=["x","y","text","font","font-family","font-weight","font-style","font-size"];for(var ba=0,bb=_.length;ba.25&&(c=e.sqrt(.25-i(b-.5,2))*((c>.5)*2-1)+.5),m=b+n+c);return o}),f=f.split(/\s*\-\s*/);if(l=="linear"){var p=f.shift();p=-d(p);if(isNaN(p))return null}var q=a._parseDots(f);if(!q)return null;b=b.shape||b.node;if(q.length){b.removeChild(g),g.on=!0,g.method="none",g.color=q[0].color,g.color2=q[q.length-1].color;var r=[];for(var s=0,t=q.length;s')}}catch(c){F=function(a){return b.createElement("<"+a+' xmlns="urn:schemas-microsoft.com:vml" class="rvml">')}}},a._engine.initWin(a._g.win),a._engine.create=function(){var b=a._getContainer.apply(0,arguments),c=b.container,d=b.height,e,f=b.width,g=b.x,h=b.y;if(!c)throw new Error("VML container not found.");var i=new a._Paper,j=i.canvas=a._g.doc.createElement("div"),k=j.style;g=g||0,h=h||0,f=f||512,d=d||342,i.width=f,i.height=d,f==+f&&(f+="px"),d==+d&&(d+="px"),i.coordsize=u*1e3+n+u*1e3,i.coordorigin="0 0",i.span=a._g.doc.createElement("span"),i.span.style.cssText="position:absolute;left:-9999em;top:-9999em;padding:0;margin:0;line-height:1;",j.appendChild(i.span),k.cssText=a.format("top:0;left:0;width:{0};height:{1};display:inline-block;position:relative;clip:rect(0 {0} {1} 0);overflow:hidden",f,d),c==1?(a._g.doc.body.appendChild(j),k.left=g+"px",k.top=h+"px",k.position="absolute"):c.firstChild?c.insertBefore(j,c.firstChild):c.appendChild(j),i.renderfix=function(){};return i},a.prototype.clear=function(){a.eve("raphael.clear",this),this.canvas.innerHTML=o,this.span=a._g.doc.createElement("span"),this.span.style.cssText="position:absolute;left:-9999em;top:-9999em;padding:0;margin:0;line-height:1;display:inline;",this.canvas.appendChild(this.span),this.bottom=this.top=null},a.prototype.remove=function(){a.eve("raphael.remove",this),this.canvas.parentNode.removeChild(this.canvas);for(var b in this)this[b]=typeof this[b]=="function"?a._removedFactory(b):null;return!0};var G=a.st;for(var H in E)E[b](H)&&!G[b](H)&&(G[H]=function(a){return function(){var b=arguments;return this.forEach(function(c){c[a].apply(c,b)})}}(H))}(window.Raphael) \ No newline at end of file diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/ref-index.css b/src/compiler/scala/tools/nsc/doc/html/resource/lib/ref-index.css deleted file mode 100755 index 7d64b9c5c5..0000000000 --- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/ref-index.css +++ /dev/null @@ -1,30 +0,0 @@ -body { - font-size: 10pt; - font-family: Arial, sans-serif; -} - -a { - color:#315479; -} - -.letters { - width:100%; - text-align:center; - margin:0.6em; - padding:0.1em; - border-bottom:1px solid gray; -} - -.entry { - border-bottom: 1px solid lightgray; - padding: 5px 0 8px; -} - -.name { - /* background-color:#E5E5E5; */ -} - -.occurrences { - margin-left: 1em; - margin-top: 5px; -} \ No newline at end of file diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/remove.png b/src/compiler/scala/tools/nsc/doc/html/resource/lib/remove.png deleted file mode 100644 index 4625f9df74..0000000000 Binary files a/src/compiler/scala/tools/nsc/doc/html/resource/lib/remove.png and /dev/null differ diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/remove.psd b/src/compiler/scala/tools/nsc/doc/html/resource/lib/remove.psd deleted file mode 100644 index 3764f82ccb..0000000000 Binary files a/src/compiler/scala/tools/nsc/doc/html/resource/lib/remove.psd and /dev/null differ diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/scheduler.js b/src/compiler/scala/tools/nsc/doc/html/resource/lib/scheduler.js deleted file mode 100644 index 4417f5b438..0000000000 --- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/scheduler.js +++ /dev/null @@ -1,71 +0,0 @@ -// © 2010 EPFL/LAMP -// code by Gilles Dubochet - -function Scheduler() { - var scheduler = this; - var resolution = 0; - this.timeout = undefined; - this.queues = new Array(0); // an array of work pacakges indexed by index in the labels table. - this.labels = new Array(0); // an indexed array of labels indexed by priority. This should be short. - this.label = function(name, priority) { - this.name = name; - this.priority = priority; - } - this.work = function(fn, self, args) { - this.fn = fn; - this.self = self; - this.args = args; - } - this.addLabel = function(name, priority) { - var idx = 0; - while (idx < scheduler.queues.length && scheduler.labels[idx].priority <= priority) { idx = idx + 1; } - scheduler.labels.splice(idx, 0, new scheduler.label(name, priority)); - scheduler.queues.splice(idx, 0, new Array(0)); - } - this.clearLabel = function(name) { - var idx = 0; - while (idx < scheduler.queues.length && scheduler.labels[idx].name != name) { idx = idx + 1; } - if (idx < scheduler.queues.length && scheduler.labels[i].name == name) { - scheduler.labels.splice(idx, 1); - scheduler.queues.splice(idx, 1); - } - } - this.nextWork = function() { - var fn = undefined; - var idx = 0; - while (idx < scheduler.queues.length && scheduler.queues[idx].length == 0) { idx = idx + 1; } - if (idx < scheduler.queues.length && scheduler.queues[idx].length > 0) { - var fn = scheduler.queues[idx].shift(); - } - return fn; - } - this.add = function(labelName, fn, self, args) { - var doWork = function() { - scheduler.timeout = setTimeout(function() { - var work = scheduler.nextWork(); - if (work != undefined) { - if (work.args == undefined) { work.args = new Array(0); } - work.fn.apply(work.self, work.args); - doWork(); - } - else { - scheduler.timeout = undefined; - } - }, resolution); - } - var idx = 0; - while (idx < scheduler.labels.length && scheduler.labels[idx].name != labelName) { idx = idx + 1; } - if (idx < scheduler.queues.length && scheduler.labels[idx].name == labelName) { - scheduler.queues[idx].push(new scheduler.work(fn, self, args)); - if (scheduler.timeout == undefined) doWork(); - } - else throw("queue for add is non existant"); - } - this.clear = function(labelName) { - var idx = 0; - while (idx < scheduler.labels.length && scheduler.labels[idx].name != labelName) { idx = idx + 1; } - if (idx < scheduler.queues.length && scheduler.labels[idx].name == labelName) { - scheduler.queues[idx] = new Array(); - } - } -}; diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/selected-implicits.png b/src/compiler/scala/tools/nsc/doc/html/resource/lib/selected-implicits.png deleted file mode 100644 index bc29efb3e6..0000000000 Binary files a/src/compiler/scala/tools/nsc/doc/html/resource/lib/selected-implicits.png and /dev/null differ diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/selected-right-implicits.png b/src/compiler/scala/tools/nsc/doc/html/resource/lib/selected-right-implicits.png deleted file mode 100644 index 8313f4975b..0000000000 Binary files a/src/compiler/scala/tools/nsc/doc/html/resource/lib/selected-right-implicits.png and /dev/null differ diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/selected-right.png b/src/compiler/scala/tools/nsc/doc/html/resource/lib/selected-right.png deleted file mode 100644 index 04eda2f307..0000000000 Binary files a/src/compiler/scala/tools/nsc/doc/html/resource/lib/selected-right.png and /dev/null differ diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/selected.png b/src/compiler/scala/tools/nsc/doc/html/resource/lib/selected.png deleted file mode 100644 index c89765239e..0000000000 Binary files a/src/compiler/scala/tools/nsc/doc/html/resource/lib/selected.png and /dev/null differ diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/selected2-right.png b/src/compiler/scala/tools/nsc/doc/html/resource/lib/selected2-right.png deleted file mode 100644 index bf984ef0ba..0000000000 Binary files a/src/compiler/scala/tools/nsc/doc/html/resource/lib/selected2-right.png and /dev/null differ diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/selected2.png b/src/compiler/scala/tools/nsc/doc/html/resource/lib/selected2.png deleted file mode 100644 index a790bb1169..0000000000 Binary files a/src/compiler/scala/tools/nsc/doc/html/resource/lib/selected2.png and /dev/null differ diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/signaturebg.gif b/src/compiler/scala/tools/nsc/doc/html/resource/lib/signaturebg.gif deleted file mode 100644 index b6ac4415e4..0000000000 Binary files a/src/compiler/scala/tools/nsc/doc/html/resource/lib/signaturebg.gif and /dev/null differ diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/signaturebg2.gif b/src/compiler/scala/tools/nsc/doc/html/resource/lib/signaturebg2.gif deleted file mode 100644 index 9aae5ba0aa..0000000000 Binary files a/src/compiler/scala/tools/nsc/doc/html/resource/lib/signaturebg2.gif and /dev/null differ diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/template.css b/src/compiler/scala/tools/nsc/doc/html/resource/lib/template.css deleted file mode 100644 index b066027f04..0000000000 --- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/template.css +++ /dev/null @@ -1,848 +0,0 @@ -/* Reset */ - -html, body, div, span, object, iframe, -h1, h2, h3, h4, h5, h6, p, blockquote, pre, -a, abbr, acronym, address, code, pre, -del, dfn, em, img, q, dl, dt, dd, ol, ul, li, -fieldset, form, label, legend, input, -table, caption, tbody, tfoot, thead, tr, th, td { - margin: 0; - padding: 0; - border: 0; - font-weight: inherit; - font-style: inherit; - font-size: 100%; - font-family: inherit; - vertical-align: baseline; -} - -table { border-collapse: separate; border-spacing: 0; } -caption, th, td { text-align: left; font-weight: normal; } -table, td, th { vertical-align: middle; } - -blockquote:before, blockquote:after, q:before, q:after { content: ""; } -blockquote, q { quotes: none; } - -a img { border: none; } - -input { border-width: 0px; } - -/* Page */ - -body { - font-family: Arial, sans-serif; - font-size: 10pt; -} - -#footer { - font-size: 9pt; - text-align: center; - color: #858484; - bottom: 0; - width: 100%; - height: 20px; -} - -a[href] { - text-decoration: underline; - color: #315479; -} - -a[href]:hover { - text-decoration: none; -} - -#types ol li > p { - margin-top: 5px; -} - -#types ol li:last-child { - margin-bottom: 5px; -} - -/* -#definition { - padding: 6px 0 6px 6px; - min-height: 59px; - color: white; -} -*/ - -#definition { - display: block-inline; - padding: 5px 0px; - height: 61px; -} - -#definition > img { - float: left; - padding-right: 6px; - padding-left: 5px; -} - -#definition > a > img { - float: left; - padding-right: 6px; - padding-left: 5px; -} - -#definition p + h1 { - margin-top: 3px; -} - -#definition > h1 { -/* padding: 12px 0 12px 6px;*/ - color: white; - text-shadow: 3px black; - text-shadow: black 0px 2px 0px; - font-size: 24pt; - display: inline-block; - overflow: hidden; - margin-top: 10px; -} - -#definition h1 > a { - color: #ffffff; - font-size: 24pt; - text-shadow: black 0px 2px 0px; -/* text-shadow: black 0px 0px 0px;*/ -text-decoration: none; -} - -#definition #owner { - color: #ffffff; - margin-top: 4px; - font-size: 10pt; - overflow: hidden; -} - -#definition #owner > a { - color: #ffffff; -} - -#definition #owner > a:hover { - text-decoration: none; -} - -#signature { - background-image:url('signaturebg2.gif'); - background-color: #d7d7d7; - min-height: 18px; - background-repeat:repeat-x; - font-size: 11.5pt; -/* margin-bottom: 10px;*/ - padding: 8px; -} - -#signature > span.modifier_kind { - display: inline; - float: left; - text-align: left; - width: auto; - position: static; - text-shadow: 2px white; - text-shadow: white 0px 1px 0px; -} - -#signature > span.symbol { - text-align: left; - display: inline; - padding-left: 0.7em; - text-shadow: 2px white; - text-shadow: white 0px 1px 0px; -} - -/* Linear super types and known subclasses */ -.hiddenContent { - display: none; -} - -.toggleContainer .toggle { - cursor: pointer; - padding-left: 15px; - background: url("arrow-right.png") no-repeat 0 3px transparent; -} - -.toggleContainer .toggle.open { - background: url("arrow-down.png") no-repeat 0 3px transparent; -} - -.toggleContainer .hiddenContent { - margin-top: 5px; -} - -.value #definition { - background-color: #2C475C; /* blue */ - background-image:url('defbg-blue.gif'); - background-repeat:repeat-x; -} - -.type #definition { - background-color: #316555; /* green */ - background-image:url('defbg-green.gif'); - background-repeat:repeat-x; -} - -#template { - margin-bottom: 50px; -} - -h3 { - color: white; - padding: 5px 10px; - font-size: 12pt; - font-weight: bold; - text-shadow: black 1px 1px 0px; -} - -dl.attributes > dt { - display: block; - float: left; - font-style: italic; -} - -dl.attributes > dt.implicit { - font-weight: bold; - color: darkgreen; -} - -dl.attributes > dd { - display: block; - padding-left: 10em; - margin-bottom: 5px; -} - -#template .values > h3 { - background: #2C475C url("valuemembersbg.gif") repeat-x bottom left; /* grayish blue */ - height: 18px; -} - -#values ol li:last-child { - margin-bottom: 5px; -} - -#template .types > h3 { - background: #316555 url("typebg.gif") repeat-x bottom left; /* green */ - height: 18px; -} - -#constructors > h3 { - background: #4f504f url("constructorsbg.gif") repeat-x bottom left; /* gray */ - height: 18px; -} - -#inheritedMembers > div.parent > h3 { - background: #dadada url("constructorsbg.gif") repeat-x bottom left; /* gray */ - height: 17px; - font-style: italic; - font-size: 12pt; -} - -#inheritedMembers > div.parent > h3 * { - color: white; -} - -#inheritedMembers > div.conversion > h3 { - background: #dadada url("conversionbg.gif") repeat-x bottom left; /* gray */ - height: 17px; - font-style: italic; - font-size: 12pt; -} - -#inheritedMembers > div.conversion > h3 * { - color: white; -} - -#groupedMembers > div.group > h3 { - background: #dadada url("typebg.gif") repeat-x bottom left; /* green */ - height: 17px; - font-size: 12pt; -} - -#groupedMembers > div.group > h3 * { - color: white; -} - - -/* Member cells */ - -div.members > ol { - background-color: white; - list-style: none -} - -div.members > ol > li { - display: block; - border-bottom: 1px solid gray; - padding: 5px 0 6px; - margin: 0 10px; - position: relative; -} - -div.members > ol > li:last-child { - border: 0; - padding: 5px 0 5px; -} - -/* Member signatures */ - -#tooltip { - background: #EFD5B5; - border: 1px solid gray; - color: black; - display: none; - padding: 5px; - position: absolute; -} - -.signature { - font-family: monospace; - font-size: 10pt; - line-height: 18px; - clear: both; - display: block; - text-shadow: 2px white; - text-shadow: white 0px 1px 0px; -} - -.signature .modifier_kind { - position: absolute; - text-align: right; - width: 14em; -} - -.signature > a > .symbol > .name { - text-decoration: underline; -} - -.signature > a:hover > .symbol > .name { - text-decoration: none; -} - -.signature > a { - text-decoration: none; -} - -.signature > .symbol { - display: block; - padding-left: 14.7em; -} - -.signature .name { - display: inline-block; - font-weight: bold; -} - -.signature .symbol > .implicit { - display: inline-block; - font-weight: bold; - text-decoration: underline; - color: darkgreen; -} - -.signature .symbol .shadowed { - color: darkseagreen; -} - -.signature .symbol .params > .implicit { - font-style: italic; -} - -.signature .symbol .deprecated { - text-decoration: line-through; -} - -.signature .symbol .params .default { - font-style: italic; -} - -#template .signature.closed { - background: url("arrow-right.png") no-repeat 0 5px transparent; - cursor: pointer; -} - -#template .signature.opened { - background: url("arrow-down.png") no-repeat 0 5px transparent; - cursor: pointer; -} - -#template .values .signature .name { - color: darkblue; -} - -#template .types .signature .name { - color: darkgreen; -} - -.full-signature-usecase h4 span { - font-size: 10pt; -} - -.full-signature-usecase > #signature { - padding-top: 0px; -} - -#template .full-signature-usecase > .signature.closed { - background: none; -} - -#template .full-signature-usecase > .signature.opened { - background: none; -} - -.full-signature-block { - padding: 5px 0 0; - border-top: 1px solid #EBEBEB; - margin-top: 5px; - margin-bottom: 5px; -} - - -/* Comments text formating */ - -.cmt {} - -.cmt p { - margin: 0.7em 0; -} - -.cmt p:first-child { - margin-top: 0; -} - -.cmt p:last-child { - margin-bottom: 0; -} - -.cmt h3, -.cmt h4, -.cmt h5, -.cmt h6 { - margin-bottom: 0.7em; - margin-top: 1.4em; - display: block; - text-align: left; - font-weight: bold; -} - -.cmt h3 { - font-size: 14pt; -} - -.cmt h4 { - font-size: 13pt; -} - -.cmt h5 { - font-size: 12pt; -} - -.cmt h6 { - font-size: 11pt; -} - -.cmt pre { - padding: 5px; - border: 1px solid #ddd; - background-color: #eee; - margin: 5px 0; - display: block; - font-family: monospace; -} - -.cmt pre span.ano { - color: blue; -} - -.cmt pre span.cmt { - color: green; -} - -.cmt pre span.kw { - font-weight: bold; -} - -.cmt pre span.lit { - color: #c71585; -} - -.cmt pre span.num { - color: #1e90ff; /* dodgerblue */ -} - -.cmt pre span.std { - color: #008080; /* teal */ -} - -.cmt ul { - display: block; - list-style: circle; - padding-left: 20px; -} - -.cmt ol { - display: block; - padding-left:20px; -} - -.cmt ol.decimal { - list-style: decimal; -} - -.cmt ol.lowerAlpha { - list-style: lower-alpha; -} - -.cmt ol.upperAlpha { - list-style: upper-alpha; -} - -.cmt ol.lowerRoman { - list-style: lower-roman; -} - -.cmt ol.upperRoman { - list-style: upper-roman; -} - -.cmt li { - display: list-item; -} - -.cmt code { - font-family: monospace; -} - -.cmt a { - font-style: bold; -} - -.cmt em, .cmt i { - font-style: italic; -} - -.cmt strong, .cmt b { - font-weight: bold; -} - -/* Comments structured layout */ - -.group > div.comment { - padding-top: 5px; - padding-bottom: 5px; - padding-right: 5px; - padding-left: 5px; - border: 1px solid #ddd; - background-color: #eeeee; - margin-top:5px; - margin-bottom:5px; - margin-right:5px; - margin-left:5px; - display: block; -} - -p.comment { - display: block; - margin-left: 14.7em; - margin-top: 5px; -} - -.shortcomment { - display: block; - margin: 5px 10px; -} - -div.fullcommenttop { - padding: 10px 10px; - background-image:url('fullcommenttopbg.gif'); - background-repeat:repeat-x; -} - -div.fullcomment { - margin: 5px 10px; -} - -#template div.fullcommenttop, -#template div.fullcomment { - display:none; - margin: 5px 0 0 14.7em; -} - -#template .shortcomment { - margin: 5px 0 0 14.7em; - padding: 0; -} - -div.fullcomment .block { - padding: 5px 0 0; - border-top: 1px solid #EBEBEB; - margin-top: 5px; - overflow: hidden; -} - -div.fullcommenttop .block { - padding: 5px 0 0; - border-top: 1px solid #EBEBEB; - margin-top: 5px; - margin-bottom: 5px -} - -div.fullcomment div.block ol li p, -div.fullcomment div.block ol li { - display:inline -} - -div.fullcomment .block > h5 { - font-style: italic; - font-weight: normal; - display: inline-block; -} - -div.fullcomment .comment { - margin: 5px 0 10px; -} - -div.fullcommenttop .comment:last-child, -div.fullcomment .comment:last-child { - margin-bottom: 0; -} - -div.fullcommenttop dl.paramcmts { - margin-bottom: 0.8em; - padding-bottom: 0.8em; -} - -div.fullcommenttop dl.paramcmts > dt, -div.fullcomment dl.paramcmts > dt { - display: block; - float: left; - font-weight: bold; - min-width: 70px; -} - -div.fullcommenttop dl.paramcmts > dd, -div.fullcomment dl.paramcmts > dd { - display: block; - padding-left: 10px; - margin-bottom: 5px; - margin-left: 70px; -} - -/* Members filter tool */ - -#textfilter { - position: relative; - display: block; - height: 20px; - margin-bottom: 5px; -} - -#textfilter > .pre { - display: block; - position: absolute; - top: 0; - left: 0; - height: 23px; - width: 21px; - background: url("filter_box_left.png"); -} - -#textfilter > .input { - display: block; - position: absolute; - top: 0; - right: 20px; - left: 20px; -} - -#textfilter > .input > input { - height: 20px; - padding: 1px; - font-weight: bold; - color: #000000; - background: #ffffff url("filterboxbarbg.png") repeat-x top left; - width: 100%; -} - -#textfilter > .post { - display: block; - position: absolute; - top: 0; - right: 0; - height: 23px; - width: 21px; - background: url("filter_box_right.png"); -} - -#mbrsel { - padding: 5px 10px; - background-color: #ededee; /* light gray */ - background-image:url('filterboxbg.gif'); - background-repeat:repeat-x; - font-size: 9.5pt; - display: block; - margin-top: 1em; -/* margin-bottom: 1em; */ -} - -#mbrsel > div { - margin-bottom: 5px; -} - -#mbrsel > div:last-child { - margin-bottom: 0; -} - -#mbrsel > div > span.filtertype { - padding: 4px; - margin-right: 5px; - float: left; - display: inline-block; - color: #000000; - font-weight: bold; - text-shadow: white 0px 1px 0px; - width: 4.5em; -} - -#mbrsel > div > ol { - display: inline-block; -} - -#mbrsel > div > a { - position:relative; - top: -8px; - font-size: 11px; - text-shadow: #ffffff 0 1px 0; -} - -#mbrsel > div > ol#linearization { - display: table; - margin-left: 70px; -} - -#mbrsel > div > ol#linearization > li.in { - text-decoration: none; - float: left; - padding-right: 10px; - margin-right: 5px; - background: url(selected-right.png) no-repeat; - background-position: right 0px; -} - -#mbrsel > div > ol#linearization > li.in > span{ - color: #404040; - float: left; - padding: 1px 0 1px 10px; - background: url(selected.png) no-repeat; - background-position: 0px 0px; - text-shadow: #ffffff 0 1px 0; -} - -#mbrsel > div > ol#implicits { - display: table; - margin-left: 70px; -} - -#mbrsel > div > ol#implicits > li.in { - text-decoration: none; - float: left; - padding-right: 10px; - margin-right: 5px; - background: url(selected-right-implicits.png) no-repeat; - background-position: right 0px; -} - -#mbrsel > div > ol#implicits > li.in > span{ - color: #404040; - float: left; - padding: 1px 0 1px 10px; - background: url(selected-implicits.png) no-repeat; - background-position: 0px 0px; - text-shadow: #ffffff 0 1px 0; -} - -#mbrsel > div > ol > li { -/* padding: 3px 10px;*/ - line-height: 16pt; - display: inline-block; - cursor: pointer; -} - -#mbrsel > div > ol > li.in { - text-decoration: none; - float: left; - padding-right: 10px; - margin-right: 5px; - background: url(selected-right.png) no-repeat; - background-position: right 0px; -} - -#mbrsel > div > ol > li.in > span{ - color: #404040; - float: left; - padding: 1px 0 1px 10px; - background: url(selected.png) no-repeat; - background-position: 0px 0px; - text-shadow: #ffffff 0 1px 0; -} - -#mbrsel > div > ol > li.out { - text-decoration: none; - float: left; - padding-right: 10px; - margin-right: 5px; -} - -#mbrsel > div > ol > li.out > span{ - color: #747474; -/* background-color: #999; */ - float: left; - padding: 1px 0 1px 10px; -/* background: url(unselected.png) no-repeat;*/ - background-position: 0px -1px; - text-shadow: #ffffff 0 1px 0; -} -/* -#mbrsel .hideall { - color: #4C4C4C; - line-height: 16px; - font-weight: bold; -} - -#mbrsel .hideall span { - color: #4C4C4C; - font-weight: bold; -} - -#mbrsel .showall { - color: #4C4C4C; - line-height: 16px; - font-weight: bold; -} - -#mbrsel .showall span { - color: #4C4C4C; - font-weight: bold; -}*/ - -.badge { - display: inline-block; - padding: 2px 4px; - font-size: 11.844px; - font-weight: bold; - line-height: 14px; - color: #ffffff; - text-shadow: 0 -1px 0 rgba(0, 0, 0, 0.25); - white-space: nowrap; - vertical-align: baseline; - background-color: #999999; - padding-right: 9px; - padding-left: 9px; - -webkit-border-radius: 9px; - -moz-border-radius: 9px; - border-radius: 9px; -} - -.badge-red { - background-color: #b94a48; -} diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/template.js b/src/compiler/scala/tools/nsc/doc/html/resource/lib/template.js deleted file mode 100644 index 6d1caf6d50..0000000000 --- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/template.js +++ /dev/null @@ -1,466 +0,0 @@ -// © 2009–2010 EPFL/LAMP -// code by Gilles Dubochet with contributions by Pedro Furlanetto - -$(document).ready(function(){ - - // Escapes special characters and returns a valid jQuery selector - function escapeJquery(str){ - return str.replace(/([;&,\.\+\*\~':"\!\^#$%@\[\]\(\)=>\|])/g, '\\$1'); - } - - // highlight and jump to selected member - if (window.location.hash) { - var temp = window.location.hash.replace('#', ''); - var elem = '#'+escapeJquery(temp); - - window.scrollTo(0, 0); - $(elem).parent().effect("highlight", {color: "#FFCC85"}, 3000); - $('html,body').animate({scrollTop:$(elem).parent().offset().top}, 1000); - } - - var isHiddenClass = function (name) { - return name == 'scala.Any' || - name == 'scala.AnyRef'; - }; - - var isHidden = function (elem) { - return $(elem).attr("data-hidden") == 'true'; - }; - - $("#linearization li:gt(0)").filter(function(){ - return isHiddenClass($(this).attr("name")); - }).removeClass("in").addClass("out"); - - $("#implicits li").filter(function(){ - return isHidden(this); - }).removeClass("in").addClass("out"); - - // Pre-filter members - filter(); - - // Member filter box - var input = $("#textfilter input"); - input.bind("keyup", function(event) { - - switch ( event.keyCode ) { - - case 27: // escape key - input.val(""); - filter(true); - break; - - case 38: // up - input.val(""); - filter(false); - window.scrollTo(0, $("body").offset().top); - input.focus(); - break; - - case 33: //page up - input.val(""); - filter(false); - break; - - case 34: //page down - input.val(""); - filter(false); - break; - - default: - window.scrollTo(0, $("#mbrsel").offset().top); - filter(true); - break; - - } - }); - input.focus(function(event) { - input.select(); - }); - $("#textfilter > .post").click(function() { - $("#textfilter input").attr("value", ""); - filter(); - }); - $(document).keydown(function(event) { - - if (event.keyCode == 9) { // tab - $("#index-input", window.parent.document).focus(); - input.attr("value", ""); - return false; - } - }); - - $("#linearization li").click(function(){ - if ($(this).hasClass("in")) { - $(this).removeClass("in"); - $(this).addClass("out"); - } - else if ($(this).hasClass("out")) { - $(this).removeClass("out"); - $(this).addClass("in"); - }; - filter(); - }); - - $("#implicits li").click(function(){ - if ($(this).hasClass("in")) { - $(this).removeClass("in"); - $(this).addClass("out"); - } - else if ($(this).hasClass("out")) { - $(this).removeClass("out"); - $(this).addClass("in"); - }; - filter(); - }); - - $("#mbrsel > div[id=ancestors] > ol > li.hideall").click(function() { - $("#linearization li.in").removeClass("in").addClass("out"); - $("#linearization li:first").removeClass("out").addClass("in"); - $("#implicits li.in").removeClass("in").addClass("out"); - - if ($(this).hasClass("out") && $("#mbrsel > div[id=ancestors] > ol > li.showall").hasClass("in")) { - $(this).removeClass("out").addClass("in"); - $("#mbrsel > div[id=ancestors] > ol > li.showall").removeClass("in").addClass("out"); - } - - filter(); - }) - $("#mbrsel > div[id=ancestors] > ol > li.showall").click(function() { - var filteredLinearization = - $("#linearization li.out").filter(function() { - return ! isHiddenClass($(this).attr("name")); - }); - filteredLinearization.removeClass("out").addClass("in"); - - var filteredImplicits = - $("#implicits li.out").filter(function() { - return ! isHidden(this); - }); - filteredImplicits.removeClass("out").addClass("in"); - - if ($(this).hasClass("out") && $("#mbrsel > div[id=ancestors] > ol > li.hideall").hasClass("in")) { - $(this).removeClass("out").addClass("in"); - $("#mbrsel > div[id=ancestors] > ol > li.hideall").removeClass("in").addClass("out"); - } - - filter(); - }); - $("#visbl > ol > li.public").click(function() { - if ($(this).hasClass("out")) { - $(this).removeClass("out").addClass("in"); - $("#visbl > ol > li.all").removeClass("in").addClass("out"); - filter(); - }; - }) - $("#visbl > ol > li.all").click(function() { - if ($(this).hasClass("out")) { - $(this).removeClass("out").addClass("in"); - $("#visbl > ol > li.public").removeClass("in").addClass("out"); - filter(); - }; - }); - $("#order > ol > li.alpha").click(function() { - if ($(this).hasClass("out")) { - orderAlpha(); - }; - }) - $("#order > ol > li.inherit").click(function() { - if ($(this).hasClass("out")) { - orderInherit(); - }; - }); - $("#order > ol > li.group").click(function() { - if ($(this).hasClass("out")) { - orderGroup(); - }; - }); - $("#groupedMembers").hide(); - - initInherit(); - - // Create tooltips - $(".extype").add(".defval").tooltip({ - tip: "#tooltip", - position:"top center", - predelay: 500, - onBeforeShow: function(ev) { - $(this.getTip()).text(this.getTrigger().attr("name")); - } - }); - - /* Add toggle arrows */ - //var docAllSigs = $("#template li").has(".fullcomment").find(".signature"); - // trying to speed things up a little bit - var docAllSigs = $("#template li[fullComment=yes] .signature"); - - function commentToggleFct(signature){ - var parent = signature.parent(); - var shortComment = $(".shortcomment", parent); - var fullComment = $(".fullcomment", parent); - var vis = $(":visible", fullComment); - signature.toggleClass("closed").toggleClass("opened"); - if (vis.length > 0) { - shortComment.slideDown(100); - fullComment.slideUp(100); - } - else { - shortComment.slideUp(100); - fullComment.slideDown(100); - } - }; - docAllSigs.addClass("closed"); - docAllSigs.click(function() { - commentToggleFct($(this)); - }); - - /* Linear super types and known subclasses */ - function toggleShowContentFct(e){ - e.toggleClass("open"); - var content = $(".hiddenContent", e.parent().get(0)); - if (content.is(':visible')) { - content.slideUp(100); - } - else { - content.slideDown(100); - } - }; - - $(".toggle:not(.diagram-link)").click(function() { - toggleShowContentFct($(this)); - }); - - // Set parent window title - windowTitle(); - - if ($("#order > ol > li.group").length == 1) { orderGroup(); }; -}); - -function orderAlpha() { - $("#order > ol > li.alpha").removeClass("out").addClass("in"); - $("#order > ol > li.inherit").removeClass("in").addClass("out"); - $("#order > ol > li.group").removeClass("in").addClass("out"); - $("#template > div.parent").hide(); - $("#template > div.conversion").hide(); - $("#mbrsel > div[id=ancestors]").show(); - filter(); -}; - -function orderInherit() { - $("#order > ol > li.inherit").removeClass("out").addClass("in"); - $("#order > ol > li.alpha").removeClass("in").addClass("out"); - $("#order > ol > li.group").removeClass("in").addClass("out"); - $("#template > div.parent").show(); - $("#template > div.conversion").show(); - $("#mbrsel > div[id=ancestors]").hide(); - filter(); -}; - -function orderGroup() { - $("#order > ol > li.group").removeClass("out").addClass("in"); - $("#order > ol > li.alpha").removeClass("in").addClass("out"); - $("#order > ol > li.inherit").removeClass("in").addClass("out"); - $("#template > div.parent").hide(); - $("#template > div.conversion").hide(); - $("#mbrsel > div[id=ancestors]").show(); - filter(); -}; - -/** Prepares the DOM for inheritance-based display. To do so it will: - * - hide all statically-generated parents headings; - * - copy all members from the value and type members lists (flat members) to corresponding lists nested below the - * parent headings (inheritance-grouped members); - * - initialises a control variable used by the filter method to control whether filtering happens on flat members - * or on inheritance-grouped members. */ -function initInherit() { - // inheritParents is a map from fully-qualified names to the DOM node of parent headings. - var inheritParents = new Object(); - var groupParents = new Object(); - $("#inheritedMembers > div.parent").each(function(){ - inheritParents[$(this).attr("name")] = $(this); - }); - $("#inheritedMembers > div.conversion").each(function(){ - inheritParents[$(this).attr("name")] = $(this); - }); - $("#groupedMembers > div.group").each(function(){ - groupParents[$(this).attr("name")] = $(this); - }); - - $("#types > ol > li").each(function(){ - var mbr = $(this); - this.mbrText = mbr.find("> .fullcomment .cmt").text(); - var qualName = mbr.attr("name"); - var owner = qualName.slice(0, qualName.indexOf("#")); - var name = qualName.slice(qualName.indexOf("#") + 1); - var inheritParent = inheritParents[owner]; - if (inheritParent != undefined) { - var types = $("> .types > ol", inheritParent); - if (types.length == 0) { - inheritParent.append("

      Type Members

        "); - types = $("> .types > ol", inheritParent); - } - var clone = mbr.clone(); - clone[0].mbrText = this.mbrText; - types.append(clone); - } - var group = mbr.attr("group") - var groupParent = groupParents[group]; - if (groupParent != undefined) { - var types = $("> .types > ol", groupParent); - if (types.length == 0) { - groupParent.append("
          "); - types = $("> .types > ol", groupParent); - } - var clone = mbr.clone(); - clone[0].mbrText = this.mbrText; - types.append(clone); - } - }); - - $("#values > ol > li").each(function(){ - var mbr = $(this); - this.mbrText = mbr.find("> .fullcomment .cmt").text(); - var qualName = mbr.attr("name"); - var owner = qualName.slice(0, qualName.indexOf("#")); - var name = qualName.slice(qualName.indexOf("#") + 1); - var inheritParent = inheritParents[owner]; - if (inheritParent != undefined) { - var values = $("> .values > ol", inheritParent); - if (values.length == 0) { - inheritParent.append("

          Value Members

            "); - values = $("> .values > ol", inheritParent); - } - var clone = mbr.clone(); - clone[0].mbrText = this.mbrText; - values.append(clone); - } - var group = mbr.attr("group") - var groupParent = groupParents[group]; - if (groupParent != undefined) { - var values = $("> .values > ol", groupParent); - if (values.length == 0) { - groupParent.append("
              "); - values = $("> .values > ol", groupParent); - } - var clone = mbr.clone(); - clone[0].mbrText = this.mbrText; - values.append(clone); - } - }); - $("#inheritedMembers > div.parent").each(function() { - if ($("> div.members", this).length == 0) { $(this).remove(); }; - }); - $("#inheritedMembers > div.conversion").each(function() { - if ($("> div.members", this).length == 0) { $(this).remove(); }; - }); - $("#groupedMembers > div.group").each(function() { - if ($("> div.members", this).length == 0) { $(this).remove(); }; - }); -}; - -/* filter used to take boolean scrollToMember */ -function filter() { - var query = $.trim($("#textfilter input").val()).toLowerCase(); - query = query.replace(/[-[\]{}()*+?.,\\^$|#]/g, "\\$&").replace(/\s+/g, "|"); - var queryRegExp = new RegExp(query, "i"); - var privateMembersHidden = $("#visbl > ol > li.public").hasClass("in"); - var orderingAlphabetic = $("#order > ol > li.alpha").hasClass("in"); - var orderingInheritance = $("#order > ol > li.inherit").hasClass("in"); - var orderingGroups = $("#order > ol > li.group").hasClass("in"); - var hiddenSuperclassElementsLinearization = orderingInheritance ? $("#linearization > li:gt(0)") : $("#linearization > li.out"); - var hiddenSuperclassesLinearization = hiddenSuperclassElementsLinearization.map(function() { - return $(this).attr("name"); - }).get(); - var hiddenSuperclassElementsImplicits = orderingInheritance ? $("#implicits > li") : $("#implicits > li.out"); - var hiddenSuperclassesImplicits = hiddenSuperclassElementsImplicits.map(function() { - return $(this).attr("name"); - }).get(); - - var hideInheritedMembers; - - if (orderingAlphabetic) { - $("#allMembers").show(); - $("#inheritedMembers").hide(); - $("#groupedMembers").hide(); - hideInheritedMembers = true; - $("#allMembers > .members").each(filterFunc); - } else if (orderingGroups) { - $("#groupedMembers").show(); - $("#inheritedMembers").hide(); - $("#allMembers").hide(); - hideInheritedMembers = true; - $("#groupedMembers > .group > .members").each(filterFunc); - $("#groupedMembers > div.group").each(function() { - $(this).show(); - if ($("> div.members", this).not(":hidden").length == 0) { - $(this).hide(); - } else { - $(this).show(); - } - }); - } else if (orderingInheritance) { - $("#inheritedMembers").show(); - $("#groupedMembers").hide(); - $("#allMembers").hide(); - hideInheritedMembers = false; - $("#inheritedMembers > .parent > .members").each(filterFunc); - $("#inheritedMembers > .conversion > .members").each(filterFunc); - } - - - function filterFunc() { - var membersVisible = false; - var members = $(this); - members.find("> ol > li").each(function() { - var mbr = $(this); - if (privateMembersHidden && mbr.attr("visbl") == "prt") { - mbr.hide(); - return; - } - var name = mbr.attr("name"); - // Owner filtering must not happen in "inherited from" member lists - if (hideInheritedMembers) { - var ownerIndex = name.indexOf("#"); - if (ownerIndex < 0) { - ownerIndex = name.lastIndexOf("."); - } - var owner = name.slice(0, ownerIndex); - for (var i = 0; i < hiddenSuperclassesLinearization.length; i++) { - if (hiddenSuperclassesLinearization[i] == owner) { - mbr.hide(); - return; - } - }; - for (var i = 0; i < hiddenSuperclassesImplicits.length; i++) { - if (hiddenSuperclassesImplicits[i] == owner) { - mbr.hide(); - return; - } - }; - } - if (query && !(queryRegExp.test(name) || queryRegExp.test(this.mbrText))) { - mbr.hide(); - return; - } - mbr.show(); - membersVisible = true; - }); - - if (membersVisible) - members.show(); - else - members.hide(); - }; - - return false; -}; - -function windowTitle() -{ - try { - parent.document.title=document.title; - } - catch(e) { - // Chrome doesn't allow settings the parent's title when - // used on the local file system. - } -}; diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/tools.tooltip.js b/src/compiler/scala/tools/nsc/doc/html/resource/lib/tools.tooltip.js deleted file mode 100644 index 0af34eca4c..0000000000 --- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/tools.tooltip.js +++ /dev/null @@ -1,14 +0,0 @@ -/* - * tools.tooltip 1.1.3 - Tooltips done right. - * - * Copyright (c) 2009 Tero Piirainen - * http://flowplayer.org/tools/tooltip.html - * - * Dual licensed under MIT and GPL 2+ licenses - * http://www.opensource.org/licenses - * - * Launch : November 2008 - * Date: ${date} - * Revision: ${revision} - */ -(function(c){var d=[];c.tools=c.tools||{};c.tools.tooltip={version:"1.1.3",conf:{effect:"toggle",fadeOutSpeed:"fast",tip:null,predelay:0,delay:30,opacity:1,lazy:undefined,position:["top","center"],offset:[0,0],cancelDefault:true,relative:false,oneInstance:true,events:{def:"mouseover,mouseout",input:"focus,blur",widget:"focus mouseover,blur mouseout",tooltip:"mouseover,mouseout"},api:false},addEffect:function(e,g,f){b[e]=[g,f]}};var b={toggle:[function(e){var f=this.getConf(),g=this.getTip(),h=f.opacity;if(h<1){g.css({opacity:h})}g.show();e.call()},function(e){this.getTip().hide();e.call()}],fade:[function(e){this.getTip().fadeIn(this.getConf().fadeInSpeed,e)},function(e){this.getTip().fadeOut(this.getConf().fadeOutSpeed,e)}]};function a(f,g){var p=this,k=c(this);f.data("tooltip",p);var l=f.next();if(g.tip){l=c(g.tip);if(l.length>1){l=f.nextAll(g.tip).eq(0);if(!l.length){l=f.parent().nextAll(g.tip).eq(0)}}}function o(u){var t=g.relative?f.position().top:f.offset().top,s=g.relative?f.position().left:f.offset().left,v=g.position[0];t-=l.outerHeight()-g.offset[0];s+=f.outerWidth()+g.offset[1];var q=l.outerHeight()+f.outerHeight();if(v=="center"){t+=q/2}if(v=="bottom"){t+=q}v=g.position[1];var r=l.outerWidth()+f.outerWidth();if(v=="center"){s-=r/2}if(v=="left"){s-=r}return{top:t,left:s}}var i=f.is(":input"),e=i&&f.is(":checkbox, :radio, select, :button"),h=f.attr("type"),n=g.events[h]||g.events[i?(e?"widget":"input"):"def"];n=n.split(/,\s*/);if(n.length!=2){throw"Tooltip: bad events configuration for "+h}f.bind(n[0],function(r){if(g.oneInstance){c.each(d,function(){this.hide()})}var q=l.data("trigger");if(q&&q[0]!=this){l.hide().stop(true,true)}r.target=this;p.show(r);n=g.events.tooltip.split(/,\s*/);l.bind(n[0],function(){p.show(r)});if(n[1]){l.bind(n[1],function(){p.hide(r)})}});f.bind(n[1],function(q){p.hide(q)});if(!c.browser.msie&&!i&&!g.predelay){f.mousemove(function(){if(!p.isShown()){f.triggerHandler("mouseover")}})}if(g.opacity<1){l.css("opacity",g.opacity)}var m=0,j=f.attr("title");if(j&&g.cancelDefault){f.removeAttr("title");f.data("title",j)}c.extend(p,{show:function(r){if(r){f=c(r.target)}clearTimeout(l.data("timer"));if(l.is(":animated")||l.is(":visible")){return p}function q(){l.data("trigger",f);var t=o(r);if(g.tip&&j){l.html(f.data("title"))}r=r||c.Event();r.type="onBeforeShow";k.trigger(r,[t]);if(r.isDefaultPrevented()){return p}t=o(r);l.css({position:"absolute",top:t.top,left:t.left});var s=b[g.effect];if(!s){throw'Nonexistent effect "'+g.effect+'"'}s[0].call(p,function(){r.type="onShow";k.trigger(r)})}if(g.predelay){clearTimeout(m);m=setTimeout(q,g.predelay)}else{q()}return p},hide:function(r){clearTimeout(l.data("timer"));clearTimeout(m);if(!l.is(":visible")){return}function q(){r=r||c.Event();r.type="onBeforeHide";k.trigger(r);if(r.isDefaultPrevented()){return}b[g.effect][1].call(p,function(){r.type="onHide";k.trigger(r)})}if(g.delay&&r){l.data("timer",setTimeout(q,g.delay))}else{q()}return p},isShown:function(){return l.is(":visible, :animated")},getConf:function(){return g},getTip:function(){return l},getTrigger:function(){return f},bind:function(q,r){k.bind(q,r);return p},onHide:function(q){return this.bind("onHide",q)},onBeforeShow:function(q){return this.bind("onBeforeShow",q)},onShow:function(q){return this.bind("onShow",q)},onBeforeHide:function(q){return this.bind("onBeforeHide",q)},unbind:function(q){k.unbind(q);return p}});c.each(g,function(q,r){if(c.isFunction(r)){p.bind(q,r)}})}c.prototype.tooltip=function(e){var f=this.eq(typeof e=="number"?e:0).data("tooltip");if(f){return f}var g=c.extend(true,{},c.tools.tooltip.conf);if(c.isFunction(e)){e={onBeforeShow:e}}else{if(typeof e=="string"){e={tip:e}}}e=c.extend(true,g,e);if(typeof e.position=="string"){e.position=e.position.split(/,?\s/)}if(e.lazy!==false&&(e.lazy===true||this.length>20)){this.one("mouseover",function(h){f=new a(c(this),e);f.show(h);d.push(f)})}else{this.each(function(){f=new a(c(this),e);d.push(f)})}return e.api?f:this}})(jQuery); \ No newline at end of file diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/trait.png b/src/compiler/scala/tools/nsc/doc/html/resource/lib/trait.png deleted file mode 100644 index fb961a2eda..0000000000 Binary files a/src/compiler/scala/tools/nsc/doc/html/resource/lib/trait.png and /dev/null differ diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/trait_big.png b/src/compiler/scala/tools/nsc/doc/html/resource/lib/trait_big.png deleted file mode 100644 index 625d9251cb..0000000000 Binary files a/src/compiler/scala/tools/nsc/doc/html/resource/lib/trait_big.png and /dev/null differ diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/trait_diagram.png b/src/compiler/scala/tools/nsc/doc/html/resource/lib/trait_diagram.png deleted file mode 100644 index 88983254ce..0000000000 Binary files a/src/compiler/scala/tools/nsc/doc/html/resource/lib/trait_diagram.png and /dev/null differ diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/trait_to_object_big.png b/src/compiler/scala/tools/nsc/doc/html/resource/lib/trait_to_object_big.png deleted file mode 100644 index d0cd7fd512..0000000000 Binary files a/src/compiler/scala/tools/nsc/doc/html/resource/lib/trait_to_object_big.png and /dev/null differ diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/type.png b/src/compiler/scala/tools/nsc/doc/html/resource/lib/type.png deleted file mode 100644 index 6c6e1fe2f5..0000000000 Binary files a/src/compiler/scala/tools/nsc/doc/html/resource/lib/type.png and /dev/null differ diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/type_big.png b/src/compiler/scala/tools/nsc/doc/html/resource/lib/type_big.png deleted file mode 100644 index 04c8794e92..0000000000 Binary files a/src/compiler/scala/tools/nsc/doc/html/resource/lib/type_big.png and /dev/null differ diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/type_diagram.png b/src/compiler/scala/tools/nsc/doc/html/resource/lib/type_diagram.png deleted file mode 100644 index d8152529fd..0000000000 Binary files a/src/compiler/scala/tools/nsc/doc/html/resource/lib/type_diagram.png and /dev/null differ diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/type_tags.ai b/src/compiler/scala/tools/nsc/doc/html/resource/lib/type_tags.ai deleted file mode 100644 index 3b5c47c9e3..0000000000 --- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/type_tags.ai +++ /dev/null @@ -1,6376 +0,0 @@ -%PDF-1.5 % -1 0 obj <>/OCGs[15 0 R 27 0 R 37 0 R 65 0 R 78 0 R 90 0 R 116 0 R 129 0 R 141 0 R 167 0 R 180 0 R 192 0 R 218 0 R 237 0 R 255 0 R 287 0 R 306 0 R 324 0 R 356 0 R 375 0 R 393 0 R 425 0 R 444 0 R 462 0 R 480 0 R 515 0 R 534 0 R 552 0 R 570 0 R 605 0 R 624 0 R 642 0 R 660 0 R 695 0 R 699 0 R 718 0 R 735 0 R 753 0 R 785 0 R 789 0 R 808 0 R 825 0 R 843 0 R 878 0 R 882 0 R 901 0 R 918 0 R 936 0 R 971 0 R 975 0 R 994 0 R 1011 0 R 1029 0 R 1056 0 R 1057 0 R 1058 0 R 1059 0 R 1060 0 R 1138 0 R 1139 0 R 1140 0 R 1141 0 R 1142 0 R 1143 0 R 1223 0 R 1224 0 R 1225 0 R 1226 0 R 1227 0 R 1228 0 R 1308 0 R 1309 0 R 1310 0 R 1311 0 R 1312 0 R 1313 0 R]>>/Pages 2 0 R/Type/Catalog>> endobj 1054 0 obj <>stream - - - - - application/pdf - - - Print - - - - - Adobe Illustrator CS3 - 2009-11-23T17:10:12+01:00 - 2011-04-04T19:44:30+02:00 - 2011-04-04T19:44:30+02:00 - - - - 256 - 208 - JPEG - /9j/4AAQSkZJRgABAgEASABIAAD/7QAsUGhvdG9zaG9wIDMuMAA4QklNA+0AAAAAABAASAAAAAEA AQBIAAAAAQAB/+4ADkFkb2JlAGTAAAAAAf/bAIQABgQEBAUEBgUFBgkGBQYJCwgGBggLDAoKCwoK DBAMDAwMDAwQDA4PEA8ODBMTFBQTExwbGxscHx8fHx8fHx8fHwEHBwcNDA0YEBAYGhURFRofHx8f Hx8fHx8fHx8fHx8fHx8fHx8fHx8fHx8fHx8fHx8fHx8fHx8fHx8fHx8fHx8f/8AAEQgA0AEAAwER AAIRAQMRAf/EAaIAAAAHAQEBAQEAAAAAAAAAAAQFAwIGAQAHCAkKCwEAAgIDAQEBAQEAAAAAAAAA AQACAwQFBgcICQoLEAACAQMDAgQCBgcDBAIGAnMBAgMRBAAFIRIxQVEGE2EicYEUMpGhBxWxQiPB UtHhMxZi8CRygvElQzRTkqKyY3PCNUQnk6OzNhdUZHTD0uIIJoMJChgZhJRFRqS0VtNVKBry4/PE 1OT0ZXWFlaW1xdXl9WZ2hpamtsbW5vY3R1dnd4eXp7fH1+f3OEhYaHiImKi4yNjo+Ck5SVlpeYmZ qbnJ2en5KjpKWmp6ipqqusra6voRAAICAQIDBQUEBQYECAMDbQEAAhEDBCESMUEFURNhIgZxgZEy obHwFMHR4SNCFVJicvEzJDRDghaSUyWiY7LCB3PSNeJEgxdUkwgJChgZJjZFGidkdFU38qOzwygp 0+PzhJSktMTU5PRldYWVpbXF1eX1RlZmdoaWprbG1ub2R1dnd4eXp7fH1+f3OEhYaHiImKi4yNjo +DlJWWl5iZmpucnZ6fkqOkpaanqKmqq6ytrq+v/aAAwDAQACEQMRAD8A9U4q7FXlX54yWv13ynbe YJ5YPIt1eTJ5h9NiqPIsYezSUp+84F0cmm21eoXMnT8jX1dGrJ0vkgvyquNDj/MTVdO8mS8/KaaZ FPqUNu8k1lHqzShR6Mko5fFCrbj7VP8AJFJ5yTAGX1X9jHH9W3J7FmG3uxV2KuxV2KuxV2KuxV2K pD5s89eU/KUNvN5h1FLFbpylupWSV3KirFY4ld+K7ValBUeIycMcpcgxlIDmivLfmfQfMumDU9Dv FvbIu0ZkUMhV06q6OFdDuDRgNiD0IxnjlE0UxkDuE0yCXYq7FXYq7FXYq7FXYq7FXYq7FXYq7FXY q7FXYq7FXYq7FVO5tre6t5ba5iSe3mUxzQyKHR0YUZWVqggjqDhBpVPT9N07TbRLLTrWGys4q+nb W8axRLyJZuKIFUVJJOJJO5QBSIwJdirsVdirsVdirsVdirsVeb+cdC1a0/MF/OMdvNdaYnl24030 7FPXuxdCYyxqkPCSvqep8LcSoIPOgpXIxyHDw9eJrkDd+SL/AC10zWH1nzL5r1LTH0VfMMlobTTJ uHrrFbQEetNwJ4vK0p5IQGUjfHNIUI3dLAbk97Pcx2x2KuxV2KuxV2KuxV2KuxV2KuxV2KuxV2Ku xV2KuxV2KuxV2KuxV2KuxV2KuxV2KuxV2KuxV2KuxV2KuxV2KuxV2KuxV2KuxV2KuxV2KuxV2Kux V2KuxV2KuxVxIAqemKvMfPn59+V/LN1Pptqjanq0JKPDER6aOOP25Og/aBA+IEfZoa5lYdLKe/Rq nlAeb3X/ADkl5xub1JrWwgtbVaVteQk5UNTV2Su422zOj2cKcc6ksw8s/wDOSGmXc4h1/T207lQL NC3qxiin7RPEjk1ANqDqWynL2dIDbdsjqQeb2K2ura6t47i2lWaCVQ8UqEMrKdwQR1BzXEU5KpgV 2KuxV2KuxV2KuxV2KsA88/nX5P8AKcv1WSQ6hqFFb6palXYK4JBZq8V/ZPxEVBqK5kYtNKbXPKA8 5H/OUOsOV4eXoOIPxk3D1YdqAJ8P45mjs2+rQdV5Mm8rf85GaDfzCDXrR9Kd2/dzKfVgApQBn2at e/ED7t6svZ847jdlDUg83rdtcwXMEdxbyLLBKoeORTUMrCoI+jMAinJVMCuxV2KuxV2KuxV2KsOn /vpP9Y/rzYDk45WYVdiqf+Xv95ZP9f8AgMxc/NthyTXKGbsVdirsVePf85C/mZd+W9Ii0TSJxHqm qKyyypQyQwinJga1RmrQbd6g7ZlabDxGy05Z0HgfkPyNr3m3VPqWlxepIBzuLiQkRRKT9qR6Hqe3 U5tpZI443JxBEyNB7VZ/84ykW/8ApOvhbgjpFbckU/NpFLfcMx/5VrlH7W38p5sM89/lHr/lOH63 KyX2lkhTeQgjgTsBKhrxr2NSPeuZ2m1sMu3KTRlwyhv0Tj8j/wAwbnR9ai8tX8/+4i+ZvqxkO0Mx BPFSSOKyH/hvmTmL2jpQRxjm26bLvRfR2aNznYq7FXYq7FXYq7FXlv5+fmRL5S8uR2Wntx1jVuSW 770jjX+8k6UNKgAV6ncEVGZOmxcR8mrLOg8P/Lf8ptc892t7qVtfwRvBNxuGujIXd5BzL8lV6171 zZT1EcVAhxo4zNlll/zj35wk1a4smlt4rS24A6gxf05CyB6RLx5tx5UJ2Fe+T/lHGIg733Mfy0iV vmr8ivM+h6bJqEUkOpW0Cl7hYOSyIo6twYfEoHWhr7ZZh7QxzPCdixnp5RF80T+SP5hXWja1F5bv pS+k6g/G1Lkn0Zz0Vd9lkO1Kfa+ZOU9oaUEcY5s9Pl3ovo/NG57sVdirsVdirsVdirDp/wC+k/1j +vNgOTjlZhV2Kp/5e/3lk/1/4DMXPzbYcmHa9+Y+raf5i8xWMLWQk0Oze407y/IjnUtXZbE3XqWb etHSNJP3bBYJD8DdDTKGbz/zB+dHnC88gag73GnaQ91Zav8AVteikjZZXtbSF4rW1+pX92Le+d7m QpynYgRcuFSVRVJL7zN56mv9Zt7XX7mGeS5+qzAz3UskcU2t2NnCJrZZoRYH0p2Fu8BDXEXJyyvR gq9X8gah5wP5mebdM1y3u4rCGy019Jjlnimt44EmvLdJVpPNLzuxD6jFxzqpVzshZV4B+fupXd5+ ampxXD8/qIjtoTSlI+PqqtB4erT8c22lAEQ4eU7vfP8AnHbSbWz/AC0tL2JR6+pzTzTv3PpytAor 4ARfjmJrZE5K7m7APSxD81fzU81aL+YxstOuTDYaV6HO0AHCcyRrM/qbVNVk4+1KjfM3SaSE8Vkb lozZpCe3R7jqen2uq6Xc2FyvO2vImikUj9l1pWh7jNTCRjIEcw5khYp8VNezWF3DeQtxuLSVJomB rR42DKQR7jOqzAGNOphsX2zYPI9jbvI3ORokLuQAWYqKmgoN/bOUPN2wV8CXYq7FXYq7FXYq+QP+ citYXUfzMnjQMkdlClvxNBV0Zg77E9dhXwAza6WNRcTKbL1H/nFn/lHdZ/5io/8Ak3lXaHMe5lp+ RTn85PzT1zyjqGn6fpEUPqTxm5nmnUuCvMoqKAVp9k8j8qYdFpI5ATJc+YxIAZ75R19PMXlnT9Y9 MR/XYQ8kXUK4qrqK9QGBpmHmx8EzHuboS4gC+U/zAtk0LztqkNgfSFletJa8duFG9RAP9XbOixy4 8QJ6h10hwzNPr2wfnY271J5RI1WqW3UHevfOZPN2YV8CXYq7FXYq7FXYqw6f++k/1j+vNgOTjlZh V2Kp/wCXv95ZP9f+AzFz822HJNcoZuxV2KuxV8n/APOTXlk6Z54i1eFKW2qQhpG5cj66Ehq9eIYU 4/I06ZstLO413OLmjuz3/nGj8wdNn0H/AAjeTrDqFpI8mno5p60MpLsqV6ujliR4H2OV6zEb4gyw y2pmXm78mtD8y+a4NfuLmSGnpi+tFRWWf0tl+In4aqAp2Ow7YMOtljhwgJngEpWmv5lee9O8o+XL i5lmUalNGyabbVHN5SKBuPXghNWP0dSMq02A5JV06s8uThD5Q8q6VN5h81abpMUfrCedTMlVFYkP KQVYhalRQVO5oM3mpy1ElwMULL7XtoFt7eKBSzLEiorMasQopUnxznCbdmqYFdirsVdirsVdir5Z /wCcpPL89p5ws9bVALW/txEStT+9hJJLdgWDbD2JzY6SXppxsw3Zp/zinJz8ua37Xcf/ACbyGuNk JwDYs2/Mn8p7LztcWV0181hdWimJnEYlDxE8uNCyUINaGvfpkdLqziBFXbLLh42S2kGjeUvLEULz C30vSbcK88p/ZQbs3+Ux8O52zHkZZJ31LYAIjyD5H1C4ufPHn+RbdH56zfMyogLOkLMSTReRPpxC poO2b6Uhjx13B14HFJ9l2sTQ20MLMGaNFRmAoCVFKgb0zni7JUwK7FXYq7FXYq7FXx5rf/OQfny1 1m/to47H04LmaNKwuTxRyor+89s28cYoOIZG0H/0MZ+YH++7D/kS/wD1UyXhBHEXf9DGfmB/vuw/ 5Ev/ANVMfCC8Re9/849eeda84eWdSvtWWFZre99CMQIUXj6SNuCzb1bMDVxqQb8RsPVMxW12KuxV 2Ksc8/8AkfTPOfl2bRr+qBj6lvMAC0cqghXFfCvb5dKjLMeQxNhjKNh8d+b/ACB5u8lX9NQtpUgV 62+oRhvSajEKeY+y1VO30io3zaY8olycSUCERafm7+YdvbC2j8w3wiAoA0zMwA7Bmqw+/J+Hj7gj il3oCzXzX5u1b0rVLrWNTmIDuzNK+54gySOfhFT1Y0yZyxiO4MREkvp78l/ykTydZHU9RYS69eJS UgUWKM7+mtQG/wBb9W22r1GoMzXRy8ePheoZitrsVdirsVdirsVdirHPP3kjTfOflybRr8lAx9SC YAFo5VB4uKjtXt8ulRlmPIYm2Mo2GFfkD5I13yfbeYNM1aLiTdo1tOv2JYwhXmtd+o7/AKqE26jI JUQwxRItjP8AzkB+YHmvyn5z039BajJZibTw0sYCyRsRNIKmOQOhPvTL9JCMoniF7sM0iDs8h1fz 5+YPne4jsbu8udUkJrHZQoFSo/a9KFUTYftEbZmREMe4FNBMpc3vn5J/k3P5Y5a15gjjfWZkAih2 cW6mjUUg8eVR8Rp1Hw7Cra/U6nj2HJycWKub2DMNudirsVdirsVdirsVfnh5m/5SPVf+Yy4/5Otm 7jyDhHmluSQ7FX1Z/wA4jf8AKFaz/wBtI/8AJiPNdrPqHucjDyeo6j5wk0/zrYeX7mzWOwvtPvL9 dXedVVDYtCJUaIrsoW4VuZcfLMNuYy35u6jN5d13zDY6AraZoF1JHcC7uzbTzWi2lveQzwRCCWst wl2PShcr25MrHiFV11+cHop5quFtdONt5ZLRPC+plL0yiSONWubb6swtoGZ2/emRvs/ZPZVUufze tLe80HSpH0Y6vrUX1ssNYQaets05hiaC7kgR7mSanwRpBuQwqAORVeiYqpz21vcJ6c8Sypv8LqGG 4Knr7EjEFWMn8q/y3Jq3lrT2buzW6MSfEkipPzyzxp97HgHcnekaBouj262+l2UNnCteKRIFA5Uq B4DbpkZSJ5pAAR+RS7FXYq7FXYq7FXYq7FXYqteJHFGG9COQJDAHrRhQj6MVYx5o/LPyh5o1C0vt ZtPrM9mOCcjUMnLlwflU8a16U6nLIZZRFBjKAKZ6J5R8saEGGj6Zb2Afjy9CMJUqCAdu+/XqcjKZ PMpEQE2yKXYq7FXYq7FXYq7FXYq/PDzN/wApHqv/ADGXH/J1s3ceQcI80tySHYq+rP8AnEb/AJQr Wf8AtpH/AJMR5rtZ9Q9zkYeT13VvKekatrNnql+hnaztL2wFo4R7eWDUfR9dZo2Vue1soArShNQe 2G3MVuPyQ8qJcSS6LcXHl2OW/j1OS10qHTo7dp4II4IA0M1pOjJCYmljUigld3+1x4qpvL5AaXVp 9YfzBqZ1UwyWtheUsOVlBPNHPLHAv1TgwcwIv79ZCF6UqcVSib8kvLElo1ql9fwxXVvJaazwa3rq EM11LeSJccoG4cp7mVq2/pEcqCgAoq9CxV2KuxV2KuxV2KuxV2KuxV2KuxV2KuxV2KuxV2KuxV2K uxV2KuxV2KuxV2KvkXWv+cafzQu9Yv7qGC0MNxcSyxk3Kg8XcsKingc2cdVABxjiKC/6Ff8AzW/5 Z7P/AKSV/ph/NwR4Rd/0K/8Amt/yz2f/AEkr/TH83BfCL3f8gPy+8x+SPLWo6frqRJcXN59YiEMg kHD0kTcj3U5h6nIJmw3Y4kDd6hmO2OxV2KuxV2KuxV2KuxV2KuxV2KuxV2KuxV2KuxV2KuxV2Kux V2KuxV2KuxV2KuxV2KuxV2KuxV2KuxV2KuxV2KuxV2KuxV2KuxV2KuxV2KuxV2KuxV2KuxV2KuxV 2KuxV2KuxV2KuxV2KuxV2KuxV2KuxV2KuxV2KuxV2KuxV2KuxV2KuxV2KuxV2KuxV2KuxV2KuxV2 KuxV2KuxV2KuxV2KuxV2KuxV2KuxV2KuxV2KuxV2KuxV2KuxV2KuxV2KuxV2KuxV2KuxV2KuxV2K uxV2KuxV2KuxV2KuxV2KuxV2KuxV2KuxV2KuxV2KuxV2KuxV2KuxV2KuxV2KuxV2KuxV2KuxV2Ku xV2KuxV2KuxV2KuxV2KuxV2KuxV2KuxV2KuxVDHUrEEgzKCNiMHEGzwpdzv0nYf7/XHiC+DLud+k 7D/f648QXwZdyrDcQzKWicOAaEjxxBYyiRzVMLF2KuxV2KqdxcQ20DzzuI4YwWdz0AGKCaeQeav+ cjdJsbyWz8v2Y1Vo2Km7LlIDt1U0JajV6bEbhsBLg5tfGPLdiMH5+fmCzDlHZmIdQIiHI/1uXH/h cjxOCe1JBn3lP8+NJ1K5W01u2/Rc0jBY5g3OCpIFGc049SakUphEnLwdpQmaOz1JWVlDKaqdwRkn ZN4q7FXYq7FXYq7FXYq7FXYqhr/UtP0+3a4vriO2hQFmeRgoABAJ3+YwgWgyA5pEfzN8gCX0/wBO 2paoFVeq7/5QHH8cn4Uu5q/MY+8J1pus6TqcQm067iuojUB4nDA8aA9PCoyBiRzbIzEuRRmBk7FX Yq7FXYq7FXYqxCf++k/1j+vKC7WPJZiydiqfeX/95pP9f+AyyHJwdV9QTTJuM7FXYq7FXzr/AM5E /mHcz6n/AIP0+Qx28IDamylgXZqFYiCF/wBY9ailD1rEl1utzfwhg/kLyHrfmi7NtpkIKR0NxcyE rFED05MAetNgBXBTq4YZ5ZVF69af848IsI+s63SWm6x2/wAIPzaSp+4YeFyx2P3y+x5z5x8sxaBr 1zo31kXbWwTlME9PeRA4HHk/Zh3yJdTqcPhZDG7p6J+R/ni6eZvK2oTc0iQvp0jncKCAYuRI2X9k UP0AZKJdz2ZqjIcBezZJ3DsVdirsVdirsVdirsVYl+ZPn6x8naE125WS/mPp2dqGHN2IPxUo3wim 5O3zNFNmOHEWnNlEA+c3vfNXnzzBH9clN1e3clLe2DCOFCRQBFY8VoopU7mm5OZ8YCIt0+TJLJKu pZte/k1qmj+X7nVtRuYENsqt9Wi5SMeTBaMxCgfa7VxhqIykAAjLopxgZSPJjOny3+mXK3mmXD21 whBDodjxNQGU/CwqOhGZMsYkN3Bx55QNgvdfy58/p5ltHt7wpDq9vT1YgacwannGD22+Y79idVnw 8B8nodJqhlHmzTKHMdirsVdirsVdirEJ/wC+k/1j+vKC7WPJZiydiqfeX/8AeaT/AF/4DLIcnB1X 1B5r5t8m+c9Y/MTXrzTAHi/R2mWdu18/1SEWk8l0dQisr2O0upopmEUatwP2ZKtuIuM3GS3yTqP5 m6f5Ut9Pntb+x1bS/LtsnlrQ47NpLC7ki0oEG/u5LTlBcLcKVMDTRUYBfjryKqL8k+Y/zPutXtIf NFzqNtprFW0+a00ydjczmQB7XU5JtKt2t40FKSpBEpVv7xipOKp/+T+sfmRqSam/nRTFKogKWskE 0LQXBMnrxxu1lZQyQiicPTknpvWVuQoq+ZfOFxNP551ySZ2kcX1wnNyWbjHIUWpO5oqjIF0OfmX1 R+S+kWun/l5pjwqPVvVa6uJAN2d2IFf9VAFyQdnooCOMebAPzL/NXzZaearvS9JufqFpYMIvgRGe RqAszM6t3OwGRJdbrNdkGQxiaAeb3+sX+q6jPqF/L613cENNLQLyIAHRQANh2GB1OWZmeI8yivKe qy6b5x0m9gAMsU2ynoQylSD9BxDkaGRjkBfWuWPVuxV2KuxV2KuxV2KuxV8rfnpr1xqn5i3Fk7Vt 9KVbeFFYsvJlDu1D0Y1CtT+XM3BGg6nVzuXuZf8AkDo2h3N3eXl8im+s2t205mkZCGcS8+KhlD/Z HUHJamRAAHJjoYxlIk8xVfa9u1ix02+02e01MK1jKAJwzmMUDAirKVI3A75hQkQbHN2uSMZRIlye CefbHRdM8zPZ6OqrYiKNlCSNKOTA8viZnP45ttPOUo+rm83rcUIzqHJLfKurzaR5t0y8hbiHmSCY FiqmOVgp5EdgSG+jHURuJXRZDHIH0zmnendirsVdirsVdirEJ/76T/WP68oLtY8lmLJ2Kp95f/3m k/1/4DLIcnB1X1BNMm4zsVdirsVfGv5weXJvL35g6jGwYwXchu4JGIJYS/E5bjsDzqaeFNhkC6bU 46kXuH/OP3nzTtT8sQ+XZ5lTVdN5LFExo0sBYurJXrwrxIHQAZIOXosoMeE8wyPzd+UflnzLqR1K d57S9cATvbsvGTiKAsrq3xUAFRiQuo0EMsuI2C8G/MLQ7Tyx5rvdItWdra3ERheUguVeJXqxAUdS e2QIdHqtPwZDEckX+TOiz675+tZxGJLPSwbi5LAlNwVRSQDuSaivWmEOX2fhud9z6lybv3Yq7FXY q7FXYq7FXYq+QvzgtpLT8zta514yzLKjEUqrop2r4Go+jM7EfSHUakesp1+U06v5w0da7+uD/wAK cvyH92XEwD99H3vfPzNUnyJq4AqfTQ7eAlUnMDT/AFh3Gt/upPnBCFNTm4eZKppxa78waZaxk8pb qFagFqD1BVqDwG+U5pekuTpoXMe99ZZp3p3Yq7FXYq7FXYq+DfMPnrztHr+pxx+YdTSNLudURby4 AAEjAAAP0GYpL0cIDhGyX/4+89f9THqn/Sbcf814LZcA7nf4+89f9THqn/Sbcf8ANeNrwDufTP8A zizrOsar5Q1abVL64v5k1Aokl1K8zKvoRniC5YgVOXYuTqe0ABIe57TlrgOxV2KuxVhP5p/lrY+d 9FELMYdStAz2FwADRj1U1ps1PEfgKAhpz4RMeb5S1/yt5q8o6iYdStpLZ4XAjukr6bNQMpSQd+JD UNGHcDIuoyYjE7o+D81fPsUaxrr9/wAVFBWeQ7fMknFHiZP5xV9B8v8Anrz7qfK2S4vXkYLcandM 7RoNh8czciaV6Cp9saWGCWQ976k/L3yBpXkzRUsbX99dv8V5esAHkkNK+PFdtl/Wd8kA7jDhEBQZ ThbnYq7FXYq7FXYq7FXYq8b/AOcgfy6uNYtIvMemI0t/ZKIp7dQzNJDUn4dyKoei0FanqxAy/DOt nD1WK9w8J8q+ZJtE1qx1JBzaznjm9M7BgjAlfpG2ZZ3FOtG0ge59X6X+Z35favp6zjWbOFJVpJbX kscEi1G6skpWvhtUZgnHIHk7iOeEhzDwb81td0WXzhdto8sEtgEhWN7biYqiNQ3Hh8PXwzPwyIju 6fVQichMeTLfyI8j3dzfjzXqKNFDCKabGwYFy4IMtRx2psOtQTUUKk4+oy3s5ei09eovd8xHZuxV 2KuxV2KuxV+ePmX/AJSPVf8AmMuP+TrZiHm9LD6QluBk7FX1X/ziN/yhWs/9tI/8mI8vxcnUdo/W Pcyfzh501vyv5rv77VJNQXQYrOaTRLO2XT3sry4tNPmvJ4Z24SahHJxiLJQqnw9d6G116Rr5685D ytLPPqV5Z+ZbK/0KfVLO5tdO9A2esXkdsIrQ27XIELgyUMkjTDjvxqMVTtfO/mXU/wA1p/LURvNI 0uTTtSgsTNpk/wAV5Zy2gF+LiWIQvFSeRECuU2UtvJGuKsek/NfzdH5M8gXZF2bjU10e81/WItNn uYZorq9gtpbWM28EkMc0qyO1Nm2CoC7rRV7hiqjdWVndoUuoEmQqUIdQ3wtTkN/GmKCAWPD8sfy+ Dcx5fshJ/v0RASV8eY+KvvXGmvwYdzJIbeCBeEMaxJt8KKFGwoNh7DFtpfirsVdirsVdirsVdirs VdirTojoyOoZGBVlYVBB2IIOKvNfPH5E+V/MlzNqFsW03Upas8kIHpu5pu6Up4kkbknrlsMxDjZN NGW/Vgp/5xf1kS0TX4DD/OYHDU/1eRH/AA2W/mPJo/JHvZZ5R/5x58uaTPHd6vcNq1zE1VjdQtvU EFSY969CCGJG+QlnJ5NuPSRHPd6vHFHEgSNQiCpCqKCpNT08TlDl0uxV2KuxV2KuxV2Kvzx8y/8A KR6r/wAxlx/ydbMQ83pYfSEtwMnYq+q/+cRv+UK1n/tpH/kxHl+Lk6jtH6x7nsg8teXBq8utDSrM axcJ6U+pC3i+syR8QvB5uPqMvFQKE9MtdepWPlDynYWUljY6JYWljLMlzLawWsMcTTxOskcrIqhS 6OisrUqCAe2Kpi1patdR3jQxtdwxvDFcFQZEjlKNIivTkFdokLAdeI8BiqjHo+kR6fBp0djbpp9q Yja2axIIYjbuskBjjA4r6TorJQfCQCOmKovFXYq7FXYq7FXYq7FXYq7FXYq7FXYq7FXYq7FXYq7F XYq7FXYq7FXYq7FXYq+eNT/5xH+vald3v+K/T+tTSTen+j+XH1GLUr9ZFaV8MqOLzdlHtGhXD9v7 EN/0J3/393/cu/7OsHhebL+Uv6P2/sd/0J3/AN/d/wBy7/s6x8LzX+Uv6P2/serflH+V/wDyrzRb zTP0n+lPrdz9Z9b0Pq/H92qcePqTV+xWtcsjGnD1GfxDdUzrJOO7FXYq7FXYq7FXYq7FXYq7FXYq 7FXYq7FXYq7FXYq7FXYq7FXYq7FXYq7FXYq7FXYq7FXYq7FXYq7FXYq7FXYq7FXYq7FXYq7FXYq7 FXYq7FXYq7FXYq7FXYq7FXYq7FXYq7FXYq7FXYq7FXYq7FXYq7FXYq7FXYq7FXYq7FXYq7FXhPnm HT9e/MDzVY+ZdTf6toltZyeXtEku0sYJmlg5TSBnHFnR2O/U1pWg2iXXZwJSkD05C6Zp+RfmDXNd 8iJdau0kzxXMsFpdzcjJPAgUh2difUIdnTkP5aHcHCG7RzlKHqehYXLdirsVdirsVdirsVdirsVd irHfOHnax8tCxga2mv8AVNVkaDTNPtwOUsigfadiFRAzKGbcitaHfLMePi8gGjNnEKFWTyW+U/O1 pr011YTW0mm61Yn/AEzTZypdRWnONl2kSu3Knh4issuEwo84nqw0+qGQmPKQ5hkmUuU7FXYq7FXY q7FXYq7FXYq7FXYq7FXYq7FXYq7FXYq7FUt1Xyx5a1eVJdW0my1CWNeEcl3bxTsq1rRTIrECuLGU InmEdb29vbW8VtbRJDbwoscMMahEREFFVVFAAAKADFIFKmKXYq7FXYq7FXYq7FXYq7FXYqwf8yfJ uta3d6DrOiSxDUvL1y1zHbTs0aTIxR2TmoahJhC77UJqcvw5AAQeri6nDKRjKPOKn5F8k6rZeZNV 82ayfq99qi+nDpaTG5W3ico7iSYqvNuaAKF+FR3NdpZs1xERyDDTabhmch2MunNnmYzmuxV2KuxV 2KuxV2KuxV2KuxV2KuxV2KuxV2KuxV//2Q== - - - - - - uuid:89B13A64E5D9DE11BB37992E5642CB24 - uuid:c9fc39ea-7338-234e-90fd-9c707322e008 - proof:pdf - - uuid:1052650b-0efc-4cb2-a32e-387095575b05 - uuid:6120892493BFDB11914A8590D31508C8 - - - - Document - Print - - - 1 - False - False - - 841.889648 - 595.275391 - Pixels - - - - - MyriadPro-Regular - Myriad Pro - Regular - Open Type - Version 2.062;PS 2.000;hotconv 1.0.57;makeotf.lib2.0.21895 - False - MyriadPro-Regular.otf - - - - - - Cyan - Magenta - Yellow - Black - - - - - - Default Swatch Group - 0 - - - - White - RGB - PROCESS - 255 - 255 - 255 - - - Black - RGB - PROCESS - 35 - 31 - 32 - - - CMYK Red - RGB - PROCESS - 236 - 28 - 36 - - - CMYK Yellow - RGB - PROCESS - 255 - 241 - 0 - - - CMYK Green - RGB - PROCESS - 0 - 165 - 81 - - - CMYK Cyan - RGB - PROCESS - 0 - 173 - 238 - - - CMYK Blue - RGB - PROCESS - 46 - 49 - 145 - - - CMYK Magenta - RGB - PROCESS - 235 - 0 - 139 - - - C=16 M=98 Y=92 K=7 - RGB - PROCESS - 194 - 39 - 45 - - - C=0 M=99 Y=97 K=0 - RGB - PROCESS - 236 - 32 - 39 - - - C=0 M=79 Y=96 K=0 - RGB - PROCESS - 240 - 92 - 39 - - - C=0 M=50 Y=98 K=0 - RGB - PROCESS - 246 - 146 - 33 - - - C=0 M=35 Y=87 K=0 - RGB - PROCESS - 250 - 175 - 59 - - - C=5 M=0 Y=93 K=0 - RGB - PROCESS - 249 - 236 - 35 - - - C=19 M=0 Y=98 K=0 - RGB - PROCESS - 216 - 223 - 39 - - - C=50 M=0 Y=99 K=0 - RGB - PROCESS - 139 - 197 - 64 - - - C=74 M=0 Y=99 K=0 - RGB - PROCESS - 61 - 180 - 74 - - - C=86 M=12 Y=100 K=9 - RGB - PROCESS - 0 - 146 - 69 - - - C=88 M=28 Y=95 K=32 - RGB - PROCESS - 0 - 104 - 55 - - - C=76 M=0 Y=75 K=0 - RGB - PROCESS - 34 - 180 - 115 - - - C=78 M=9 Y=46 K=0 - RGB - PROCESS - 3 - 168 - 156 - - - C=70 M=15 Y=0 K=0 - RGB - PROCESS - 37 - 169 - 224 - - - C=87 M=52 Y=0 K=0 - RGB - PROCESS - 16 - 114 - 185 - - - C=99 M=96 Y=4 K=0 - RGB - PROCESS - 46 - 55 - 143 - - - C=100 M=100 Y=26 K=25 - RGB - PROCESS - 38 - 34 - 97 - - - C=74 M=98 Y=1 K=0 - RGB - PROCESS - 103 - 48 - 144 - - - C=49 M=99 Y=1 K=0 - RGB - PROCESS - 146 - 41 - 141 - - - C=34 M=100 Y=37 K=11 - RGB - PROCESS - 157 - 30 - 96 - - - C=12 M=100 Y=49 K=1 - RGB - PROCESS - 211 - 28 - 92 - - - C=0 M=96 Y=20 K=0 - RGB - PROCESS - 236 - 37 - 122 - - - C=23 M=27 Y=40 K=0 - RGB - PROCESS - 198 - 178 - 152 - - - C=40 M=43 Y=52 K=7 - RGB - PROCESS - 152 - 133 - 118 - - - C=50 M=53 Y=61 K=23 - RGB - PROCESS - 117 - 101 - 88 - - - C=57 M=60 Y=64 K=42 - RGB - PROCESS - 85 - 72 - 65 - - - C=23 M=38 Y=63 K=1 - RGB - PROCESS - 197 - 156 - 110 - - - C=32 M=49 Y=74 K=10 - RGB - PROCESS - 165 - 124 - 82 - - - C=36 M=57 Y=84 K=23 - RGB - PROCESS - 139 - 99 - 57 - - - C=39 M=64 Y=93 K=36 - RGB - PROCESS - 117 - 77 - 36 - - - C=41 M=70 Y=96 K=49 - RGB - PROCESS - 97 - 57 - 23 - - - C=47 M=73 Y=83 K=68 - RGB - PROCESS - 65 - 35 - 18 - - - - - - Print Color Group - 1 - - - - C=2 M=28 Y=72 K=0 - RGB - PROCESS - 246 - 187 - 96 - - - C=5 M=70 Y=90 K=0 - RGB - PROCESS - 231 - 110 - 52 - - - C=4 M=92 Y=77 K=0 - RGB - PROCESS - 229 - 59 - 65 - - - C=29 M=2 Y=92 K=0 - RGB - PROCESS - 191 - 210 - 65 - - - C=62 M=4 Y=93 K=0 - RGB - PROCESS - 109 - 182 - 78 - - - C=30 M=2 Y=7 K=0 - RGB - PROCESS - 174 - 218 - 230 - - - C=60 M=8 Y=5 K=0 - RGB - PROCESS - 85 - 185 - 223 - - - C=78 M=4 Y=11 K=0 - RGB - PROCESS - 0 - 178 - 215 - - - - - - Grayscale - 1 - - - - K=100 - GRAY - PROCESS - 255 - - - K=90 - GRAY - PROCESS - 229 - - - K=80 - GRAY - PROCESS - 203 - - - K=70 - GRAY - PROCESS - 178 - - - K=60 - GRAY - PROCESS - 152 - - - K=50 - GRAY - PROCESS - 127 - - - K=40 - GRAY - PROCESS - 101 - - - K=30 - GRAY - PROCESS - 76 - - - K=20 - GRAY - PROCESS - 50 - - - K=10 - GRAY - PROCESS - 25 - - - K=5 - GRAY - PROCESS - 12 - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - endstream endobj 2 0 obj <> endobj 5 0 obj <>/Resources<>/ExtGState<>/Font<>/ProcSet[/PDF/Text]/Properties<>/XObject<>>>/Thumb 1327 0 R/TrimBox[0.0 0.0 841.89 595.275]/Type/Page>> endobj 1315 0 obj <>stream -HWɎ7W(}10f 9e ƌ"Tٲ *`06h>:՛G|?JJzx|q5,`ǏKfSlь~X!Ư9Po0=SZL)T[Q)}b۔]ƯgG|9ⓐIMvO'|xu}]^}{N}'$\0yz)\^?Lr ë$L)W.>D byY,גˊq_dɏ?^yNy%$G3/E,ߋ6o9ٶ咢Β˾cy|WX:x}p:'?-P5=qм9]݋;1hNhլCYm} -zM[ױΥš  *8e&ieg#j779vL^0  @6g6%ǁBӥnk0XĞ ESEjc3a7 %495[݇s~rFzB:Aa$)s% .%<7Ɛ)08عB,:(d%O/IϸPxǢ4C')\@mBjQc4\.#d(7(;uQK9/Av7bdA{OCEH|T0ŒpFHFܤ@Lc$L 1K.ȅĸHʬ03IǸH X8Fd(DXeD$ iqH -]eaECa(pK(n=GJY1H*E:"lx/tHs,~|eG-T@iBQA-_*A*rҵ띁̀'E_q^b3UDxG'5#Qru84A`׺-n5gHR)?V4AHR6 'p% L)kdk -T>admәr)WHR{o+#@~"}bXv ]N`hy9cN9Sn; f%D)F@c2ubq,;TblXzGi&6̼CL| R^R\sd]PCa1ȿSƑr4T>d٥B62NN3^b*i7/ |]ix_" cUrE=Qc=nQ0>[e7<ޝ/U-I -RB(7x6KJ -q\F=wl.ϔDA~lzKE!בRc Rr@H `]^H8RneH,^Mؔ cX)G8dLf`ׂ9;ֈt;<ܙ#E6~uHl1(z>_-uS!}QLU*U{Cg;Hh3mn/R=r8)3(XW~S}h 7e6:ywsmL-g18Zܑ6qf7BM ;rSCt:\Aax墌tl1.@Y|UT4׻<5BRdD#uU@jw!(5`$WtƝy~utKW>޵ A>ʧU&o"28 &| endstream endobj 1327 0 obj <>stream -8;Z]!0oj-o&4Mq#BXZ(r-qRjZg"d;e'fX2?:$%:pb1G;i=ZP,(M5$Ct>Pr+OM%Sb3 -$Z$YM!!)e4pih!i-MB:^dR_nsi+[p*`^+#SD`bS0lt>(HO8LDNEmrh`0uV.i4u*`_ - endstream endobj 1328 0 obj [/Indexed/DeviceRGB 255 1329 0 R] endobj 1329 0 obj <>stream -8;X]O>EqN@%''O_@%e@?J;%+8(9e>X=MR6S?i^YgA3=].HDXF.R$lIL@"pJ+EP(%0 -b]6ajmNZn*!='OQZeQ^Y*,=]?C.B+\Ulg9dhD*"iC[;*=3`oP1[!S^)?1)IZ4dup` -E1r!/,*0[*9.aFIR2&b-C#soRZ7Dl%MLY\.?d>Mn -6%Q2oYfNRF$$+ON<+]RUJmC0InDZ4OTs0S!saG>GGKUlQ*Q?45:CI&4J'_2j$XKrcYp0n+Xl_nU*O( -l[$6Nn+Z_Nq0]s7hs]`XX1nZ8&94a\~> endstream endobj 1319 0 obj <>/ExtGState<>/XObject<>>>/Subtype/Form>>stream -/CS0 cs 0.173 0.522 0.345 scn -/GS0 gs -q 1 0 0 1 183 308.2725 cm -0 0 m -6.607 0.011 12.01 -5.392 11.999 -11.999 c -12.01 -18.606 6.607 -24.009 0 -23.998 c --6.607 -24.009 -12.01 -18.606 -11.999 -11.999 c --12.01 -5.392 -6.607 0.011 0 0 c -f -Q -q -0 g -0 Tc 0 Tw 0 Ts 100 Tz 0 Tr /Fm0 Do -Q - endstream endobj 1320 0 obj <>/ExtGState<>/XObject<>>>/Subtype/Form>>stream -/CS0 cs 0.173 0.522 0.345 scn -/GS0 gs -q 1 0 0 1 186.627 218.2744 cm -0 0 m -0.544 0.003 0.995 -0.445 0.994 -1.003 c -0.997 -1.549 0.546 -1.999 0 -1.997 c --0.55 -2 -1.007 -1.543 -1.003 -1.003 c --1.005 -0.451 -0.549 0.003 0 0 c -f -Q -q -0 g -1 w 4 M 0 j 0 J []0 d -0 Tc 0 Tw 0 Ts 100 Tz 0 Tr /Fm0 Do -Q - endstream endobj 1321 0 obj <>/ExtGState<>/XObject<>>>/Subtype/Form>>stream -/CS0 cs 0.243 0.533 0.643 scn -/GS0 gs -q 1 0 0 1 334.002 303.2773 cm -0 0 m -6.607 0.011 12.01 -5.392 11.999 -11.999 c -12.008 -18.614 6.61 -24.008 0 -23.998 c --6.61 -24.008 -12.008 -18.614 -11.999 -11.999 c --12.01 -5.392 -6.607 0.011 0 0 c -f -Q -q -0 g -0 Tc 0 Tw 0 Ts 100 Tz 0 Tr /Fm0 Do -Q - endstream endobj 1322 0 obj <>/ExtGState<>/XObject<>>>/Subtype/Form>>stream -/CS0 cs 0.212 0.624 0.78 scn -/GS0 gs -q 1 0 0 1 327.999 212.2715 cm -0 0 m -0.55 0.003 1.007 -0.454 1.003 -0.994 c -1.008 -1.537 0.543 -2.002 0 -1.997 c --0.543 -2.002 -1.008 -1.537 -1.003 -0.994 c --1.007 -0.454 -0.55 0.003 0 0 c -f -Q -q -0 g -1 w 4 M 0 j 0 J []0 d -0 Tc 0 Tw 0 Ts 100 Tz 0 Tr /Fm0 Do -Q - endstream endobj 1323 0 obj <>/ExtGState<>/XObject<>>>/Subtype/Form>>stream -/CS0 cs 0.196 0.322 0.616 scn -/GS0 gs -q 1 0 0 1 323.6699 445.7744 cm -0 0 m -6.607 0.011 12.01 -5.392 11.999 -11.999 c -12.01 -18.606 6.607 -24.009 0 -23.998 c --6.615 -24.007 -12.009 -18.609 -11.999 -11.999 c --12.009 -5.389 -6.615 0.009 0 0 c -f -Q -q -0 g -0 Tc 0 Tw 0 Ts 100 Tz 0 Tr /Fm0 Do -Q - endstream endobj 1324 0 obj <>/ExtGState<>/XObject<>>>/Subtype/Form>>stream -/CS0 cs 0.196 0.322 0.616 scn -/GS0 gs -q 1 0 0 1 315.165 487.2754 cm -0 0 m -0.548 0.003 1.005 -0.451 1.003 -1.003 c -1.007 -1.542 0.55 -2 0 -1.997 c --0.546 -2 -0.997 -1.549 -0.994 -1.003 c --0.995 -0.445 -0.544 0.003 0 0 c -f -Q -q -0 g -1 w 4 M 0 j 0 J []0 d -0 Tc 0 Tw 0 Ts 100 Tz 0 Tr /Fm0 Do -Q - endstream endobj 1325 0 obj <>/ExtGState<>/XObject<>>>/Subtype/Form>>stream -/CS0 cs 0.196 0.322 0.616 scn -/GS0 gs -q 1 0 0 1 184.8359 446.2783 cm -0 0 m -6.607 0.011 12.01 -5.392 11.999 -11.999 c -12.008 -18.617 6.606 -24.018 0 -24.007 c --6.606 -24.018 -12.008 -18.617 -11.999 -11.999 c --12.01 -5.392 -6.607 0.011 0 0 c -f -Q -q -0 g -0 Tc 0 Tw 0 Ts 100 Tz 0 Tr /Fm0 Do -Q - endstream endobj 1326 0 obj <>/ExtGState<>/XObject<>>>/Subtype/Form>>stream -/CS0 cs 0.196 0.322 0.616 scn -/GS0 gs -q 1 0 0 1 189.876 496.0234 cm -0 0 m -0.55 0.003 1.007 -0.455 1.003 -0.994 c -1.005 -1.546 0.548 -2 0 -1.997 c --0.548 -2 -1.005 -1.546 -1.003 -0.994 c --1.007 -0.455 -0.55 0.003 0 0 c -f -Q -q -0 g -1 w 4 M 0 j 0 J []0 d -0 Tc 0 Tw 0 Ts 100 Tz 0 Tr /Fm0 Do -Q - endstream endobj 1344 0 obj <> endobj 1345 0 obj <>/XObject<>>>/Subtype/Form>>stream -q -189.876 496.023 m -189.876 501.023 l -193.188 501.023 195.879 498.341 195.879 495.029 c -195.879 491.708 193.188 489.026 189.876 489.026 c -186.564 489.026 183.873 491.708 183.873 495.029 c -183.873 498.341 186.564 501.023 189.876 501.023 c -189.876 496.023 l -189.326 496.026 188.869 495.569 188.873 495.029 c -188.871 494.478 189.328 494.023 189.876 494.026 c -190.424 494.023 190.881 494.478 190.879 495.029 c -190.883 495.569 190.426 496.026 189.876 496.023 c -W n -q -1 w 4 M 0 j 0 J []0 d -/GS0 gs -0 Tc 0 Tw 0 Ts 100 Tz 0 Tr /Fm0 Do -Q -Q - endstream endobj 1346 0 obj <> endobj 1347 0 obj <>/ExtGState<>>>/Subtype/Form>>stream -/CS0 cs 0.2 0.325 0.624 scn -/GS0 gs -q 1 0 0 1 189.876 496.0234 cm -0 0 m -0 5 l -3.312 5 6.003 2.318 6.003 -0.994 c -6.003 -4.315 3.312 -6.997 0 -6.997 c --3.312 -6.997 -6.003 -4.315 -6.003 -0.994 c --6.003 2.318 -3.312 5 0 5 c -0 0 l --0.55 0.003 -1.007 -0.455 -1.003 -0.994 c --1.005 -1.546 -0.548 -2 0 -1.997 c -0.548 -2 1.005 -1.546 1.003 -0.994 c -1.007 -0.455 0.55 0.003 0 0 c -f -Q -q 1 0 0 1 189.876 496.9482 cm -0 0 m --0.013 -0.041 -0.073 -0.074 -0.083 -0.116 c --0.111 -0.248 -0.02 -0.426 0 -0.56 c -0 -0.925 l --0.55 -0.922 -1.007 -1.379 -1.003 -1.919 c --1.005 -2.471 -0.548 -2.925 0 -2.922 c -0.548 -2.925 1.005 -2.471 1.003 -1.919 c -1.007 -1.379 0.55 -0.922 0 -0.925 c -0 -0.56 l -0.034 -0.557 0.079 -0.553 0.113 -0.55 c -0.142 -0.55 0.184 -0.537 0.21 -0.549 c -1.046 -1.473 l -1.442 -2.154 1.79 -2.107 1.805 -2.105 c -2.057 -2.065 3.182 -0.618 1.901 0.191 c -1.598 0.383 1.274 0.41 1.132 0.395 c -0 0 l -0 4.075 l -3.312 4.075 6.003 1.393 6.003 -1.919 c -6.003 -5.24 3.312 -7.922 0 -7.922 c --3.312 -7.922 -6.003 -5.24 -6.003 -1.919 c --6.003 1.393 -3.312 4.075 0 4.075 c -0 0 l -f -Q -0.196 0.318 0.612 scn -q 1 0 0 1 189.876 497.0903 cm -0 0 m --0.03 -0.092 -0.164 -0.17 -0.185 -0.265 c --0.222 -0.433 -0.125 -0.678 -0.188 -0.838 c --0.188 -0.839 -0.237 -0.941 -0.403 -1.05 c --1.156 -1.54 -1.044 -2.156 -0.992 -2.333 c --0.807 -2.959 -0.146 -3.264 0.451 -2.999 c -0.651 -2.909 0.79 -2.772 0.872 -2.69 c -1.143 -2.422 1.548 -2.621 1.836 -2.412 c -2.433 -1.979 2.576 -1.57 2.629 -1.416 c -2.85 -0.785 2.461 0.134 1.628 0.371 c -0.853 0.591 0.002 0.007 0 0 c -0 3.933 l -3.312 3.933 6.003 1.251 6.003 -2.061 c -6.003 -5.382 3.312 -8.064 0 -8.064 c --3.312 -8.064 -6.003 -5.382 -6.003 -2.061 c --6.003 1.251 -3.312 3.933 0 3.933 c -0 0 l -f -Q -0.192 0.31 0.596 scn -q 1 0 0 1 189.876 497.231 cm -0 0 m --0.294 -0.832 -1.296 -1.347 -1.079 -2.407 c --0.939 -3.088 -0.171 -3.557 0.648 -3.165 c -2.592 -2.234 2.592 -2.234 2.763 -1.674 c -3.159 -0.375 2.125 0.263 1.731 0.384 c -0.831 0.661 0.003 0.008 0 0 c -0 3.792 l -3.312 3.792 6.003 1.11 6.003 -2.202 c -6.003 -5.522 3.312 -8.205 0 -8.205 c --3.312 -8.205 -6.003 -5.522 -6.003 -2.202 c --6.003 1.11 -3.312 3.792 0 3.792 c -0 0 l -f -Q -0.188 0.302 0.58 scn -q 1 0 0 1 189.876 497.3701 cm -0 0 m --0.353 -0.867 -1.383 -1.429 -1.146 -2.56 c --1.024 -3.139 -0.35 -3.806 0.712 -3.399 c -2.444 -2.735 2.625 -2.666 2.946 -1.778 c -2.952 -1.763 3.406 -0.235 2.053 0.316 c -0.838 0.812 0.004 0.01 0 0 c -0 3.653 l -3.312 3.653 6.003 0.971 6.003 -2.341 c -6.003 -5.662 3.312 -8.344 0 -8.344 c --3.312 -8.344 -6.003 -5.662 -6.003 -2.341 c --6.003 0.971 -3.312 3.653 0 3.653 c -0 0 l -f -Q -0.18 0.294 0.569 scn -q 1 0 0 1 189.876 497.5073 cm -0 0 m --0.193 -0.417 -0.585 -0.692 -0.795 -1.098 c --1.093 -1.708 l --1.262 -2.107 -1.291 -2.435 -1.188 -2.804 c --1.126 -3.032 -0.727 -4.136 0.984 -3.565 c -4.73 -2.315 2.784 0.034 2.453 0.247 c -1.442 0.896 0.101 0.218 0 0 c -0 3.516 l -3.312 3.516 6.003 0.834 6.003 -2.478 c -6.003 -5.799 3.312 -8.481 0 -8.481 c --3.312 -8.481 -6.003 -5.799 -6.003 -2.478 c --6.003 0.834 -3.312 3.516 0 3.516 c -0 0 l -f -Q -0.176 0.286 0.553 scn -q 1 0 0 1 189.876 497.6602 cm -0 0 m --0.013 -0.025 -0.053 -0.04 -0.076 -0.058 c --0.365 -0.276 -0.692 -0.523 -1.173 -1.803 c --1.244 -1.989 -1.457 -2.557 -1.185 -3.151 c --0.782 -4.034 0.179 -4.205 1.672 -3.658 c -3.872 -2.853 3.987 -0.377 2.341 0.401 c -1.366 0.863 0.123 0.247 0 0 c -0 3.363 l -3.312 3.363 6.003 0.681 6.003 -2.631 c -6.003 -5.952 3.312 -8.634 0 -8.634 c --3.312 -8.634 -6.003 -5.952 -6.003 -2.631 c --6.003 0.681 -3.312 3.363 0 3.363 c -0 0 l -f -Q -0.173 0.278 0.541 scn -q 1 0 0 1 189.876 497.8516 cm -0 0 m --0.034 -0.067 -0.142 -0.105 -0.203 -0.15 c --0.741 -0.551 -1.014 -1.287 -1.254 -1.937 c --1.386 -2.294 -1.492 -2.833 -1.246 -3.37 c --0.614 -4.746 1.248 -4.148 1.804 -3.932 c -4.133 -3.027 4.261 -0.305 2.51 0.419 c -1.108 0.999 0.006 0.012 0 0 c -0 3.172 l -3.312 3.172 6.003 0.49 6.003 -2.822 c -6.003 -6.143 3.312 -8.825 0 -8.825 c --3.312 -8.825 -6.003 -6.143 -6.003 -2.822 c --6.003 0.49 -3.312 3.172 0 3.172 c -0 0 l -f -Q -0.169 0.275 0.525 scn -q 1 0 0 1 189.876 498.0396 cm -0 0 m --0.037 -0.07 -0.152 -0.104 -0.217 -0.148 c --0.223 -0.151 -0.766 -0.542 -1.153 -1.542 c --1.498 -2.429 -1.549 -2.937 -1.35 -3.481 c --1.145 -4.045 -0.491 -4.904 1.578 -4.323 c -4.082 -3.621 4.629 -0.761 2.993 0.316 c -1.701 1.166 0.079 0.148 0 0 c -0 2.984 l -3.312 2.984 6.003 0.302 6.003 -3.01 c -6.003 -6.331 3.312 -9.013 0 -9.013 c --3.312 -9.013 -6.003 -6.331 -6.003 -3.01 c --6.003 0.302 -3.312 2.984 0 2.984 c -0 0 l -f -Q -0.165 0.267 0.51 scn -q 1 0 0 1 189.876 498.2236 cm -0 0 m --0.175 -0.317 -0.542 -0.437 -0.748 -0.722 c --1.027 -1.109 -1.128 -1.336 -1.241 -1.614 c --1.322 -1.817 -1.715 -2.863 -1.448 -3.592 c --0.849 -5.223 1.105 -4.776 1.689 -4.601 c -4.425 -3.778 5.003 -0.758 3.22 0.385 c -1.946 1.2 0.234 0.423 0 0 c -0 2.8 l -3.312 2.8 6.003 0.118 6.003 -3.194 c -6.003 -6.515 3.312 -9.197 0 -9.197 c --3.312 -9.197 -6.003 -6.515 -6.003 -3.194 c --6.003 0.118 -3.312 2.8 0 2.8 c -0 0 l -f -Q -0.161 0.259 0.498 scn -q 1 0 0 1 189.876 498.4546 cm -0 0 m --0.06 -0.132 -0.265 -0.21 -0.386 -0.291 c --0.759 -0.542 -1.229 -1.473 -1.327 -1.735 c --1.444 -2.049 -1.803 -3.137 -1.475 -3.94 c --0.715 -5.801 1.956 -4.866 1.983 -4.856 c -5.297 -3.576 5.172 -0.368 3.116 0.573 c -1.411 1.354 0.007 0.017 0 0 c -0 2.569 l -3.312 2.569 6.003 -0.113 6.003 -3.425 c -6.003 -6.746 3.312 -9.428 0 -9.428 c --3.312 -9.428 -6.003 -6.746 -6.003 -3.425 c --6.003 -0.113 -3.312 2.569 0 2.569 c -0 0 l -f -Q -0.153 0.251 0.482 scn -q 1 0 0 1 189.876 498.7373 cm -0 0 m --0.04 -0.083 -0.167 -0.135 -0.239 -0.193 c --0.737 -0.595 -1.131 -1.172 -1.412 -1.908 c --1.719 -2.716 -1.736 -3.696 -1.576 -4.141 c --0.861 -6.127 1.881 -5.307 1.908 -5.298 c -5.872 -3.968 5.348 -0.494 3.424 0.518 c -1.628 1.463 0.058 0.121 0 0 c -0 2.286 l -3.312 2.286 6.003 -0.396 6.003 -3.708 c -6.003 -7.029 3.312 -9.711 0 -9.711 c --3.312 -9.711 -6.003 -7.029 -6.003 -3.708 c --6.003 -0.396 -3.312 2.286 0 2.286 c -0 0 l -f -Q -0.149 0.243 0.467 scn -q 1 0 0 1 189.876 499.0234 cm -0 0 m --0.045 -0.106 -0.21 -0.167 -0.302 -0.236 c --0.488 -0.374 -1.13 -0.939 -1.627 -2.442 c --1.764 -2.855 -1.88 -3.934 -1.545 -4.673 c --1.028 -5.816 0.793 -6.212 2.513 -5.554 c -6.321 -4.099 5.738 -0.283 3.153 0.723 c -1.353 1.423 0.007 0.017 0 0 c -0 2 l -3.312 2 6.003 -0.682 6.003 -3.994 c -6.003 -7.315 3.312 -9.997 0 -9.997 c --3.312 -9.997 -6.003 -7.315 -6.003 -3.994 c --6.003 -0.682 -3.312 2 0 2 c -0 0 l -f -Q -0.145 0.235 0.455 scn -q 1 0 0 1 189.876 499.4067 cm -0 0 m --0.163 -0.362 -0.542 -0.515 -0.779 -0.805 c --0.948 -1.011 -1.049 -1.26 -1.205 -1.475 c --1.361 -1.69 -1.461 -1.951 -1.723 -2.734 c --2.048 -3.705 -1.823 -4.543 -1.66 -4.957 c --1.17 -6.199 0.623 -6.718 2.422 -6.139 c -7.03 -4.656 5.827 -0.75 3.286 0.539 c -1.422 1.485 0.008 0.018 0 0 c -0 1.617 l -3.312 1.617 6.003 -1.065 6.003 -4.377 c -6.003 -7.698 3.312 -10.38 0 -10.38 c --3.312 -10.38 -6.003 -7.698 -6.003 -4.377 c --6.003 -1.065 -3.312 1.617 0 1.617 c -0 0 l -f -Q -0.141 0.227 0.439 scn -q 1 0 0 1 189.876 499.8311 cm -0 0 m --0.128 -0.296 -0.442 -0.404 -0.638 -0.631 c --0.788 -0.804 -0.893 -1.009 -1.031 -1.191 c --1.148 -1.346 -1.62 -2.354 -1.623 -2.361 c --2.171 -3.896 -2.053 -4.61 -1.842 -5.154 c --0.963 -7.425 1.653 -7.025 2.586 -6.68 c -3.893 -6.196 6.611 -5.189 5.553 -2.521 c -5.843 -3.224 6.003 -3.994 6.003 -4.802 c -6.003 -8.123 3.312 -10.805 0 -10.805 c --3.312 -10.805 -6.003 -8.123 -6.003 -4.802 c --6.003 -1.49 -3.312 1.192 0 1.192 c -0 0 l -f -Q -0.137 0.22 0.427 scn -q 1 0 0 1 189.876 500.2959 cm -0 0 m --0.037 -0.078 -0.154 -0.129 -0.22 -0.184 c --1.238 -1.037 -1.832 -2.884 -1.837 -2.903 c --2.426 -4.762 -2.011 -5.635 -1.875 -5.921 c --0.599 -8.601 3.356 -7.148 3.396 -7.133 c -4.442 -6.725 6.193 -6.042 5.899 -4.15 c -5.967 -4.512 6.003 -4.885 6.003 -5.267 c -6.003 -8.587 3.312 -11.27 0 -11.27 c --3.312 -11.27 -6.003 -8.587 -6.003 -5.267 c --6.003 -1.955 -3.312 0.728 0 0.728 c -0 0 l -f -Q -0.133 0.216 0.412 scn -q 1 0 0 1 189.876 500.7388 cm -0 0 m --0.038 -0.067 -0.155 -0.091 -0.221 -0.129 c --1.151 -0.674 -1.646 -2.172 -2.007 -3.267 c --2.012 -3.284 -2.546 -5.066 -2.073 -6.279 c --1.012 -9 2.932 -7.99 3.099 -7.945 c -4.318 -7.622 5.989 -7.18 6.001 -5.577 c -6.002 -5.621 6.003 -5.665 6.003 -5.709 c -6.003 -9.03 3.312 -11.712 0 -11.712 c --3.312 -11.712 -6.003 -9.03 -6.003 -5.709 c --6.003 -2.397 -3.312 0.285 0 0.285 c -0 0 l -f -Q -0.125 0.208 0.396 scn -q 1 0 0 1 189.876 501.0112 cm -0 0 m --0.043 -0.052 -0.154 -0.029 -0.221 -0.042 c --0.696 -0.132 -1.348 -0.689 -1.732 -1.731 c --2.576 -4.014 -2.459 -5.548 -2.314 -6.26 c --1.78 -8.88 1.72 -8.614 1.755 -8.611 c -4.215 -8.371 5.7 -8.227 5.951 -6.778 c -5.561 -9.721 3.043 -11.985 0 -11.985 c --3.312 -11.985 -6.003 -9.303 -6.003 -5.982 c --6.003 -2.67 -3.312 0.012 0 0.012 c -0 0 l -f -Q -0.122 0.2 0.384 scn -q 1 0 0 1 188.9707 500.9468 cm -0 0 m --1.737 -0.589 -1.75 -4.504 -1.75 -4.544 c --1.745 -7.052 -0.74 -7.832 0.016 -8.2 c -1.799 -9.068 6.088 -9.359 6.659 -7.635 c -5.92 -10.116 3.622 -11.92 0.905 -11.92 c --2.407 -11.92 -5.098 -9.238 -5.098 -5.917 c --5.098 -2.856 -2.799 -0.333 0.165 0.031 c -0.115 0.022 0.049 0.013 0 0 c -f -Q -0.118 0.192 0.369 scn -q 1 0 0 1 187.6411 500.5234 cm -0 0 m --1.064 -0.939 -0.813 -4.868 -0.54 -5.601 c -0.43 -8.206 2.406 -8.584 3.21 -8.625 c -4.273 -8.681 5.3 -9.068 6.38 -8.967 c -6.693 -8.938 7.267 -8.802 7.587 -8.217 c -6.594 -10.165 4.569 -11.497 2.235 -11.497 c --1.077 -11.497 -3.768 -8.815 -3.768 -5.494 c --3.768 -2.81 -2 -0.54 0.432 0.225 c -0.372 0.201 0.292 0.168 0.231 0.144 c -0.162 0.102 0.062 0.054 0 0 c -f -Q -0.204 0.333 0.639 scn -q 1 0 0 1 191.4565 495.208 cm -0 0 m --0.097 0.069 -0.097 0.069 -0.519 0.587 c --0.662 0.762 -0.835 0.91 -0.974 1.089 c --1.125 1.285 -1.232 1.593 y --1.227 1.612 -0.03 2.438 0.591 1.363 c -1.026 0.61 0.244 -0.13 0.233 -0.131 c -0.153 -0.143 0.065 -0.046 0 0 c -f -Q -0.141 0.227 0.439 scn -q 1 0 0 1 192.4463 500.4146 cm -0 0 m --1.295 0.463 -2.255 -0.325 -2.57 -0.583 c --2.57 0.609 l --1.402 0.609 -0.312 0.275 0.611 -0.302 c -0.521 -0.251 0.401 -0.185 0.312 -0.135 c -0.218 -0.094 0.096 -0.034 0 0 c -f -Q -0.208 0.337 0.655 scn -q 1 0 0 1 191.4961 495.46 cm -0 0 m --0.335 0.354 l --0.472 0.524 -0.626 0.679 -0.757 0.854 c --0.976 1.148 -1.021 1.268 -1.02 1.273 c --1.015 1.287 -0.029 1.7 0.33 0.953 c -0.59 0.409 0.174 -0.12 0.167 -0.121 c -0.106 -0.131 0.048 -0.04 0 0 c -f -Q -0.137 0.22 0.427 scn -q 1 0 0 1 191.6431 500.7461 cm -0 0 m --0.651 0.121 -1.163 -0.01 -1.767 -0.45 c --1.767 0.277 l --1.038 0.277 -0.339 0.147 0.307 -0.09 c -0.224 -0.065 0.112 -0.032 0.029 -0.006 c -0.02 -0.004 0.009 -0.001 0 0 c -f -Q -0.216 0.345 0.667 scn -q 1 0 0 1 191.5 495.7261 cm -0 0 m --0.004 0.004 -0.533 0.573 -0.71 0.862 c --0.568 0.875 -0.482 0.883 -0.264 0.809 c --0.18 0.781 -0.083 0.699 -0.025 0.631 c -0.033 0.563 0.091 0.45 0.104 0.362 c -0.135 0.141 0.099 0.019 0.074 -0.062 c -0.052 -0.043 0.021 -0.021 0 0 c -f -Q -0.133 0.216 0.412 scn -q 1 0 0 1 190.7813 500.9458 cm -0 0 m --0.314 -0.005 -0.487 -0.009 -0.905 -0.207 c --0.905 0.078 l --0.519 0.078 -0.142 0.041 0.225 -0.028 c -0.157 -0.02 0.067 -0.003 0 0 c -f -Q -0.125 0.208 0.396 scn -q 1 0 0 1 189.876 501.0112 cm -0 0 m -0 0.012 l -0.072 0.012 0.144 0.011 0.215 0.008 c -0.15 0.006 0.046 -0.044 0 0 c -f -Q - endstream endobj 1348 0 obj <> endobj 1318 0 obj <> endobj 1317 0 obj [/ICCBased 1349 0 R] endobj 1349 0 obj <>stream -HyTSwoɞc [5laQIBHADED2mtFOE.c}08׎8GNg9w߽'0 ֠Jb  - 2y.-;!KZ ^i"L0- @8(r;q7Ly&Qq4j|9 -V)gB0iW8#8wթ8_٥ʨQQj@&A)/g>'Kt;\ ӥ$պFZUn(4T%)뫔0C&Zi8bxEB;Pӓ̹A om?W= -x-[0}y)7ta>jT7@tܛ`q2ʀ&6ZLĄ?_yxg)˔zçLU*uSkSeO4?׸c. R ߁-25 S>ӣVd`rn~Y&+`;A4 A9=-tl`;~p Gp| [`L`< "A YA+Cb(R,*T2B- -ꇆnQt}MA0alSx k&^>0|>_',G!"F$H:R!zFQd?r 9\A&G rQ hE]a4zBgE#H *B=0HIpp0MxJ$D1D, VĭKĻYdE"EI2EBGt4MzNr!YK ?%_&#(0J:EAiQ(()ӔWT6U@P+!~mD eԴ!hӦh/']B/ҏӿ?a0nhF!X8܌kc&5S6lIa2cKMA!E#ƒdV(kel }}Cq9 -N')].uJr - wG xR^[oƜchg`>b$*~ :Eb~,m,-ݖ,Y¬*6X[ݱF=3뭷Y~dó ti zf6~`{v.Ng#{}}jc1X6fm;'_9 r:8q:˜O:ϸ8uJqnv=MmR 4 -n3ܣkGݯz=[==<=GTB(/S,]6*-W:#7*e^YDY}UjAyT`#D="b{ų+ʯ:!kJ4Gmt}uC%K7YVfFY .=b?SƕƩȺy چ k5%4m7lqlioZlG+Zz͹mzy]?uuw|"űNwW&e֥ﺱ*|j5kyݭǯg^ykEklD_p߶7Dmo꿻1ml{Mś nLl<9O[$h՛BdҞ@iءG&vVǥ8nRĩ7u\ЭD-u`ֲK³8%yhYѹJº;.! -zpg_XQKFAǿ=ȼ:ɹ8ʷ6˶5̵5͵6ζ7ϸ9к<Ѿ?DINU\dlvۀ܊ݖޢ)߯6DScs 2F[p(@Xr4Pm8Ww)Km endstream endobj 1342 0 obj <> endobj 1343 0 obj <>/XObject<>>>/Subtype/Form>>stream -q -184.836 446.278 m -184.836 462.278 l -200.298 462.278 212.835 449.741 212.835 434.279 c -212.835 418.809 200.298 406.271 184.836 406.271 c -169.374 406.271 156.837 418.809 156.837 434.279 c -156.837 449.741 169.374 462.278 184.836 462.278 c -184.836 446.278 l -178.229 446.289 172.826 440.887 172.837 434.279 c -172.828 427.661 178.229 422.261 184.836 422.271 c -191.442 422.261 196.844 427.661 196.835 434.279 c -196.846 440.887 191.443 446.289 184.836 446.278 c -W n -q -/GS0 gs -0 Tc 0 Tw 0 Ts 100 Tz 0 Tr /Fm0 Do -Q -Q - endstream endobj 1350 0 obj <> endobj 1351 0 obj <>/ExtGState<>>>/Subtype/Form>>stream -/CS0 cs 0.208 0.337 0.655 scn -/GS0 gs -q 1 0 0 1 184.8359 446.2783 cm -0 0 m -0 16 l -15.462 16 27.999 3.463 27.999 -11.999 c -27.999 -27.47 15.462 -40.007 0 -40.007 c --15.462 -40.007 -27.999 -27.47 -27.999 -11.999 c --27.999 3.463 -15.462 16 0 16 c -0 0 l --6.607 0.011 -12.01 -5.392 -11.999 -11.999 c --12.008 -18.617 -6.606 -24.018 0 -24.007 c -6.606 -24.018 12.008 -18.617 11.999 -11.999 c -12.01 -5.392 6.607 0.011 0 0 c -f -Q -q 1 0 0 1 184.8359 451.4419 cm -0 0 m -0 -0.468 l -0 -5.164 l --6.607 -5.153 -12.01 -10.555 -11.999 -17.163 c --12.008 -23.781 -6.606 -29.181 0 -29.17 c -6.606 -29.181 12.008 -23.781 11.999 -17.163 c -12.01 -10.555 6.607 -5.153 0 -5.164 c -0 -0.468 l -0.316 -0.694 0.738 -0.997 1.055 -1.223 c -3.817 -3.661 7.459 -4.869 10 -7.617 c -12.018 -9.8 13.458 -12.461 14.279 -15.528 c -15.076 -18.507 16.901 -19.346 16.917 -19.348 c -18.874 -19.542 24.735 -10.485 17.857 -2.241 c -10.879 6.124 0.769 1.958 0 0 c -0 10.836 l -15.462 10.836 27.999 -1.701 27.999 -17.163 c -27.999 -32.633 15.462 -45.17 0 -45.17 c --15.462 -45.17 -27.999 -32.633 -27.999 -17.163 c --27.999 -1.701 -15.462 10.836 0 10.836 c -0 0 l -f -Q -0.204 0.333 0.639 scn -q 1 0 0 1 184.8359 453.2891 cm -0 0 m --0.296 -0.712 -1.487 -1.168 -1.735 -1.898 c --1.987 -2.638 -2.003 -3.873 -1.53 -4.494 c --1.227 -4.893 -0.45 -4.945 0 -5.167 c -0 -7.011 l --6.607 -7 -12.01 -12.402 -11.999 -19.01 c --12.008 -25.628 -6.606 -31.028 0 -31.018 c -6.606 -31.028 12.008 -25.628 11.999 -19.01 c -12.01 -12.402 6.607 -7 0 -7.011 c -0 -5.167 l -0.338 -5.201 0.788 -5.245 1.126 -5.278 c -2.249 -5.476 12.144 -7.557 13.761 -19.538 c -13.765 -19.565 14.171 -22.516 14.171 -22.516 c -14.636 -23.09 15.724 -23.507 16.459 -23.43 c -20.584 -22.993 26.416 -9.568 15.896 -1.312 c -7.943 4.929 0.035 0.084 0 0 c -0 8.989 l -15.462 8.989 27.999 -3.548 27.999 -19.01 c -27.999 -34.48 15.462 -47.018 0 -47.018 c --15.462 -47.018 -27.999 -34.48 -27.999 -19.01 c --27.999 -3.548 -15.462 8.989 0 8.989 c -0 0 l -f -Q -0.2 0.325 0.624 scn -q 1 0 0 1 184.8359 454.4082 cm -0 0 m --0.627 -1.109 -1.866 -1.525 -2.708 -2.391 c --4.764 -4.503 -4.447 -6.209 -4.44 -6.223 c --4.355 -6.386 -4.355 -6.386 0 -7.408 c -0 -8.13 l --6.607 -8.119 -12.01 -13.521 -11.999 -20.129 c --12.008 -26.747 -6.606 -32.147 0 -32.137 c -6.606 -32.147 12.008 -26.747 11.999 -20.129 c -12.01 -13.521 6.607 -8.119 0 -8.13 c -0 -7.408 l -0.312 -7.428 0.727 -7.455 1.039 -7.475 c -5.587 -8.118 13.156 -12.018 12.674 -22.551 c -12.559 -25.065 12.662 -26.483 12.98 -26.764 c -14.309 -27.938 23.357 -23.699 22.629 -14.042 c -21.269 4.004 1.142 2.019 0 0 c -0 7.87 l -15.462 7.87 27.999 -4.667 27.999 -20.129 c -27.999 -35.6 15.462 -48.137 0 -48.137 c --15.462 -48.137 -27.999 -35.6 -27.999 -20.129 c --27.999 -4.667 -15.462 7.87 0 7.87 c -0 0 l -f -Q -0.196 0.318 0.612 scn -q 1 0 0 1 184.8359 455.3335 cm -0 0 m --0.223 -0.377 -0.896 -0.494 -1.279 -0.706 c --3.984 -2.198 -4.352 -2.882 -7.218 -8.204 c --10.977 -15.407 l --12.034 -17.649 -12.409 -19.973 -12.123 -22.512 c --11.368 -29.209 -4.441 -35.048 3.701 -32.84 c -16.505 -28.457 l -19.639 -26.39 21.523 -23.894 22.614 -20.364 c -24.61 -13.907 21.812 -4.74 13.674 -0.575 c -6.26 3.219 0.029 0.049 0 0 c -0 6.945 l -15.462 6.945 27.999 -5.592 27.999 -21.054 c -27.999 -36.525 15.462 -49.062 0 -49.062 c --15.462 -49.062 -27.999 -36.525 -27.999 -21.054 c --27.999 -5.592 -15.462 6.945 0 6.945 c -0 0 l -f -Q -0.192 0.31 0.596 scn -q 1 0 0 1 184.8359 456.1333 cm -0 0 m --0.174 -0.267 -0.682 -0.3 -0.974 -0.428 c --3.27 -1.438 -6.363 -4.313 -7.593 -6.58 c --13.39 -17.263 -12.999 -20.654 -12.686 -23.38 c --12.044 -28.948 -6.307 -36.34 3.975 -34.525 c -32.478 -29.493 24.483 -7.887 15.417 -1.844 c -7.621 3.352 0.038 0.059 0 0 c -0 6.145 l -15.462 6.145 27.999 -6.392 27.999 -21.854 c -27.999 -37.325 15.462 -49.862 0 -49.862 c --15.462 -49.862 -27.999 -37.325 -27.999 -21.854 c --27.999 -6.392 -15.462 6.145 0 6.145 c -0 0 l -f -Q -0.188 0.302 0.58 scn -q 1 0 0 1 184.8359 456.834 cm -0 0 m --0.26 -0.393 -1.01 -0.429 -1.443 -0.612 c --4.281 -1.817 -7.531 -4.969 -9.346 -8.278 c --13.498 -15.848 -13.757 -21.086 -13.243 -24.147 c --12.335 -29.562 -7.257 -38.122 6.017 -35.862 c -29.657 -31.837 27.572 -10.232 15.691 -2.188 c -7.725 3.206 0.039 0.058 0 0 c -0 5.444 l -15.462 5.444 27.999 -7.093 27.999 -22.555 c -27.999 -38.025 15.462 -50.563 0 -50.563 c --15.462 -50.563 -27.999 -38.025 -27.999 -22.555 c --27.999 -7.093 -15.462 5.444 0 5.444 c -0 0 l -f -Q -0.18 0.294 0.569 scn -q 1 0 0 1 184.8359 457.5 cm -0 0 m --0.27 -0.397 -1.042 -0.411 -1.488 -0.586 c --3.111 -1.225 -7.25 -3.37 -10.633 -9.471 c --11.685 -11.368 -15.021 -18.085 -13.796 -24.879 c --12.453 -32.328 -5.461 -39.37 6.714 -37.227 c -28.951 -33.313 28.976 -11.259 15.609 -2.301 c -7.856 2.895 0.038 0.056 0 0 c -0 4.778 l -15.462 4.778 27.999 -7.759 27.999 -23.221 c -27.999 -38.691 15.462 -51.229 0 -51.229 c --15.462 -51.229 -27.999 -38.691 -27.999 -23.221 c --27.999 -7.759 -15.462 4.778 0 4.778 c -0 0 l -f -Q -0.176 0.286 0.553 scn -q 1 0 0 1 184.8359 458.1108 cm -0 0 m --0.285 -0.403 -1.085 -0.384 -1.55 -0.549 c --2.14 -0.758 -7.426 -2.783 -11.14 -9.4 c --12.536 -11.888 -15.643 -18.441 -14.343 -25.555 c --13.275 -31.4 -7.567 -40.72 7.05 -38.576 c -28.069 -35.492 30.907 -13.131 16.17 -2.838 c -7.979 2.883 0.04 0.057 0 0 c -0 4.167 l -15.462 4.167 27.999 -8.37 27.999 -23.832 c -27.999 -39.302 15.462 -51.839 0 -51.839 c --15.462 -51.839 -27.999 -39.302 -27.999 -23.832 c --27.999 -8.37 -15.462 4.167 0 4.167 c -0 0 l -f -Q -0.173 0.278 0.541 scn -q 1 0 0 1 184.8359 458.6836 cm -0 0 m --0.294 -0.407 -1.113 -0.365 -1.59 -0.521 c --3.037 -0.996 -8.057 -3.068 -11.887 -9.807 c --12.95 -11.676 -16.305 -18.381 -14.886 -26.192 c --13.691 -32.767 -6.813 -41.832 7.241 -39.858 c -28.692 -36.845 31.476 -13.851 16.374 -3.144 c -8.08 2.736 0.041 0.056 0 0 c -0 3.595 l -15.462 3.595 27.999 -8.942 27.999 -24.404 c -27.999 -39.875 15.462 -52.412 0 -52.412 c --15.462 -52.412 -27.999 -39.875 -27.999 -24.404 c --27.999 -8.942 -15.462 3.595 0 3.595 c -0 0 l -f -Q -0.169 0.275 0.525 scn -q 1 0 0 1 184.8359 459.2207 cm -0 0 m --0.327 -0.44 -1.224 -0.37 -1.749 -0.528 c --5.52 -1.667 -9.766 -5.26 -12.073 -9.267 c --15.394 -15.036 -16.522 -20.933 -15.426 -26.792 c --13.856 -35.181 -5.227 -43.019 7.675 -41.021 c -29.387 -37.659 31.678 -13.959 16.092 -3.122 c -8.188 2.374 0.041 0.052 0 0 c -0 3.058 l -15.462 3.058 27.999 -9.479 27.999 -24.941 c -27.999 -40.412 15.462 -52.949 0 -52.949 c --15.462 -52.949 -27.999 -40.412 -27.999 -24.941 c --27.999 -9.479 -15.462 3.058 0 3.058 c -0 0 l -f -Q -0.165 0.267 0.51 scn -q 1 0 0 1 184.8359 459.7354 cm -0 0 m --0.315 -0.413 -1.169 -0.321 -1.671 -0.458 c --5.628 -1.543 -10.186 -5.222 -12.509 -9.206 c --13.794 -11.411 -17.706 -18.119 -15.958 -27.37 c --14.312 -36.089 -5.369 -44.235 7.962 -42.157 c -29.829 -38.748 32.261 -15.07 16.713 -3.752 c -8.241 2.415 0.041 0.054 0 0 c -0 2.543 l -15.462 2.543 27.999 -9.994 27.999 -25.456 c -27.999 -40.927 15.462 -53.464 0 -53.464 c --15.462 -53.464 -27.999 -40.927 -27.999 -25.456 c --27.999 -9.994 -15.462 2.543 0 2.543 c -0 0 l -f -Q -0.161 0.259 0.498 scn -q 1 0 0 1 184.8359 460.208 cm -0 0 m --0.326 -0.417 -1.197 -0.297 -1.71 -0.424 c --5.005 -1.241 -10.022 -4.174 -13.317 -9.752 c --16.642 -15.38 -17.707 -21.488 -16.484 -27.905 c --14.771 -36.893 -5.522 -45.319 8.241 -43.229 c -29.819 -39.954 32.248 -15.425 16.845 -4.05 c -8.507 2.107 0.042 0.053 0 0 c -0 2.07 l -15.462 2.07 27.999 -10.467 27.999 -25.929 c -27.999 -41.399 15.462 -53.937 0 -53.937 c --15.462 -53.937 -27.999 -41.399 -27.999 -25.929 c --27.999 -10.467 -15.462 2.07 0 2.07 c -0 0 l -f -Q -0.153 0.251 0.482 scn -q 1 0 0 1 184.8359 460.6479 cm -0 0 m --0.165 -0.201 -0.596 -0.119 -0.852 -0.169 c --6.63 -1.321 -11.086 -5.48 -13.33 -8.99 c --17.823 -16.018 -17.959 -22.68 -17.283 -27.032 c --15.528 -38.313 -5.353 -45.642 6.913 -44.456 c -29.058 -42.316 33.217 -18.568 18.588 -5.674 c -9.722 2.142 0.051 0.062 0 0 c -0 1.63 l -15.462 1.63 27.999 -10.907 27.999 -26.369 c -27.999 -41.839 15.462 -54.376 0 -54.376 c --15.462 -54.376 -27.999 -41.839 -27.999 -26.369 c --27.999 -10.907 -15.462 1.63 0 1.63 c -0 0 l -f -Q -0.149 0.243 0.467 scn -q 1 0 0 1 184.8359 461.0591 cm -0 0 m --0.345 -0.419 -1.243 -0.245 -1.775 -0.35 c --5.333 -1.052 -10.598 -4.013 -13.752 -8.857 c --18.474 -16.108 -18.606 -22.979 -17.885 -27.466 c --16.272 -37.507 -7.1 -46.929 7.31 -45.507 c -29.58 -43.31 33.524 -19.12 18.666 -5.999 c -9.679 1.938 0.05 0.061 0 0 c -0 1.219 l -15.462 1.219 27.999 -11.318 27.999 -26.78 c -27.999 -42.25 15.462 -54.788 0 -54.788 c --15.462 -54.788 -27.999 -42.25 -27.999 -26.78 c --27.999 -11.318 -15.462 1.219 0 1.219 c -0 0 l -f -Q -0.145 0.235 0.455 scn -q 1 0 0 1 184.8359 461.4141 cm -0 0 m --0.359 -0.424 -1.279 -0.213 -1.827 -0.305 c --2.571 -0.429 -9.239 -1.713 -14.035 -8.521 c --19.337 -16.049 -19.04 -23.602 -18.666 -26.5 c --16.79 -41.041 -4.557 -47.127 6.015 -46.629 c -29.242 -45.535 34.043 -19.97 18.705 -6.311 c -9.693 1.714 0.05 0.059 0 0 c -0 0.864 l -15.462 0.864 27.999 -11.673 27.999 -27.135 c -27.999 -42.605 15.462 -55.143 0 -55.143 c --15.462 -55.143 -27.999 -42.605 -27.999 -27.135 c --27.999 -11.673 -15.462 0.864 0 0.864 c -0 0 l -f -Q -0.141 0.227 0.439 scn -q 1 0 0 1 184.8359 461.7397 cm -0 0 m --0.366 -0.422 -1.29 -0.183 -1.842 -0.262 c --5.616 -0.798 -11.203 -3.577 -14.553 -8.414 c --20.526 -17.037 -19.484 -25.015 -19.142 -27.636 c --17.325 -41.551 -4.721 -48.305 6.215 -47.597 c -22.827 -46.52 31.839 -32.415 25.896 -16.796 c -27.251 -20.083 27.999 -23.685 27.999 -27.46 c -27.999 -42.931 15.462 -55.468 0 -55.468 c --15.462 -55.468 -27.999 -42.931 -27.999 -27.46 c --27.999 -11.999 -15.462 0.539 0 0.539 c -0 0 l -f -Q -0.137 0.22 0.427 scn -q 1 0 0 1 184.8359 461.9951 cm -0 0 m --0.38 -0.425 -1.322 -0.147 -1.889 -0.211 c --3.74 -0.417 -10.183 -1.633 -15.334 -8.604 c --20.12 -15.08 -20.496 -23.225 -19.964 -27.016 c --18.071 -40.504 -7.311 -49.146 6.811 -48.521 c -13.567 -48.222 30.459 -42.962 27.513 -22.495 c -27.832 -24.187 27.999 -25.932 27.999 -27.716 c -27.999 -43.187 15.462 -55.724 0 -55.724 c --15.462 -55.724 -27.999 -43.187 -27.999 -27.716 c --27.999 -12.254 -15.462 0.283 0 0.283 c -0 0 l -f -Q -0.133 0.216 0.412 scn -q 1 0 0 1 184.8359 462.186 cm -0 0 m --0.389 -0.421 -1.333 -0.109 -1.905 -0.156 c --5.862 -0.48 -11.762 -2.986 -15.367 -7.721 c --21.456 -15.72 -21.121 -23.999 -20.694 -27.186 c --18.877 -40.772 -7.134 -50.361 6.621 -49.493 c -16.365 -48.877 27.809 -42.692 27.992 -27.284 c -27.997 -27.491 27.999 -27.699 27.999 -27.907 c -27.999 -43.377 15.462 -55.915 0 -55.915 c --15.462 -55.915 -27.999 -43.377 -27.999 -27.907 c --27.999 -12.445 -15.462 0.092 0 0.092 c -0 0 l -f -Q -0.125 0.208 0.396 scn -q 1 0 0 1 184.8359 462.2749 cm -0 0 m --0.403 -0.423 -1.362 -0.067 -1.945 -0.096 c --5.653 -0.278 -11.171 -1.795 -16.407 -7.987 c --19.42 -11.549 -22.258 -18.906 -21.583 -25.522 c --19.025 -50.599 4.157 -50.427 5.143 -50.408 c -17.394 -50.165 25.848 -43.174 27.755 -31.708 c -25.94 -45.423 14.204 -56.003 0 -56.003 c --15.462 -56.003 -27.999 -43.466 -27.999 -27.996 c --27.999 -12.534 -15.462 0.003 0 0.003 c -0 0 l -f -Q -0.122 0.2 0.384 scn -q 1 0 0 1 180.605 461.958 cm -0 0 m --22.531 -4.551 -23.529 -35.032 -6.329 -46.266 c -6.848 -54.872 25.64 -52.177 31.068 -35.689 c -27.624 -47.255 16.911 -55.687 4.231 -55.687 c --11.231 -55.687 -23.768 -43.149 -23.768 -27.679 c --23.768 -13.386 -13.055 -1.592 0.778 0.109 c -0.544 0.077 0.232 0.04 0 0 c -f -Q -0.118 0.192 0.369 scn -q 1 0 0 1 172.812 459.498 cm -0 0 m --16.566 -9.064 -17.348 -40.201 9.316 -48.722 c -16.64 -51.062 30.628 -50.199 36.986 -37.919 c -32.357 -47.005 22.916 -53.227 12.024 -53.227 c --3.438 -53.227 -15.975 -40.689 -15.975 -25.219 c --15.975 -12.683 -7.734 -2.069 3.625 1.499 c -3.1 1.309 2.399 1.057 1.873 0.867 c -1.31 0.61 0.543 0.297 0 0 c -f -Q -0.216 0.345 0.667 scn -q 1 0 0 1 200.7622 436.103 cm -0 0 m --1.706 2.422 -2.871 5.192 -4.806 7.466 c --5.581 8.375 -6.334 9.141 -7.046 9.74 c --7.103 9.788 -12.699 14.577 -12.706 14.929 c --12.708 15.035 -10.925 16.753 -10.74 16.825 c --10.058 17.086 -7.544 17.231 -6.875 17.166 c --5.111 16.992 -2.438 16.241 0.275 13.649 c -3.79 10.293 4.269 6.382 4.332 5.263 c -4.608 0.362 1.816 -1.552 1.125 -1.426 c -0.589 -1.328 0.314 -0.445 0 0 c -f -Q -0.22 0.353 0.682 scn -q 1 0 0 1 200.8965 438.5967 cm -0 0 m --1.97 2.883 -3.056 4.472 -4.87 6.595 c --5.072 6.832 -5.375 7.116 -5.591 7.34 c --5.844 7.601 -6.16 7.969 -6.419 8.224 c --6.913 8.711 -7.551 9.382 -8.074 9.839 c --9.724 11.281 -9.908 11.547 -9.911 11.595 c --9.914 11.655 -8.389 13.369 -8.295 13.411 c --7.711 13.674 -6.801 13.346 -6.164 13.276 c --2.962 12.927 -1.156 11.212 -0.476 10.566 c -2.531 7.709 2.783 5.143 2.904 3.909 c -2.938 3.565 2.929 0.875 2.709 0.41 c -2.675 0.337 0.707 -0.875 0.645 -0.861 c -0.33 -0.793 0.182 -0.267 0 0 c -f -Q -0.224 0.361 0.694 scn -q 1 0 0 1 199.9814 442.126 cm -0 0 m --0.737 0.235 -1.076 1.45 -1.576 2.04 c --3.148 3.895 -3.148 3.895 -3.897 4.678 c --4.212 5.008 -4.84 5.354 -4.922 5.803 c --4.014 7.981 l --3.953 8.007 -1.427 7.15 0.33 5.083 c -1.631 3.552 2.397 0.755 2.281 0.574 c -1.906 -0.01 0.699 -0.197 0.037 0.011 c -0.026 0.014 0.011 -0.003 0 0 c -f -Q -0.141 0.227 0.439 scn -q 1 0 0 1 196.8853 459.5508 cm -0 0 m --5.275 2.417 -9.403 2.407 -12.049 2.189 c --12.049 2.728 l --6.604 2.728 -1.522 1.173 2.777 -1.517 c -2.232 -1.205 1.506 -0.789 0.961 -0.477 c -0.673 -0.334 0.292 -0.134 0 0 c -f -Q -0.137 0.22 0.427 scn -q 1 0 0 1 193.0991 461.0352 cm -0 0 m --3.078 0.794 -4.478 1.111 -8.263 0.96 c --8.263 1.243 l --4.866 1.243 -1.61 0.638 1.402 -0.47 c -0.981 -0.329 0.425 -0.126 0 0 c -f -Q -0.133 0.216 0.412 scn -q 1 0 0 1 189.0669 461.958 cm -0 0 m --2.557 0.263 -2.657 0.273 -4.231 0.228 c --4.231 0.32 l --2.431 0.32 -0.671 0.15 1.035 -0.174 c -0.724 -0.122 0.312 -0.042 0 0 c -f -Q -0.125 0.208 0.396 scn -q 1 0 0 1 184.8359 462.2749 cm -0 0 m -0.335 0.003 0.669 -0.002 1.001 -0.014 c -0.701 -0.01 0.211 -0.214 0 0 c -f -Q - endstream endobj 1352 0 obj <> endobj 1340 0 obj <> endobj 1341 0 obj <>/XObject<>>>/Subtype/Form>>stream -q -315.165 487.275 m -315.165 492.275 l -318.477 492.275 321.168 489.593 321.168 486.272 c -321.168 482.96 318.477 480.278 315.165 480.278 c -311.853 480.278 309.171 482.96 309.171 486.272 c -309.171 489.593 311.853 492.275 315.165 492.275 c -315.165 487.275 l -314.621 487.278 314.17 486.83 314.171 486.272 c -314.168 485.727 314.619 485.276 315.165 485.278 c -315.715 485.275 316.172 485.733 316.168 486.272 c -316.17 486.824 315.713 487.279 315.165 487.275 c -W n -q -1 w 4 M 0 j 0 J []0 d -/GS0 gs -0 Tc 0 Tw 0 Ts 100 Tz 0 Tr /Fm0 Do -Q -Q - endstream endobj 1353 0 obj <> endobj 1354 0 obj <>/ExtGState<>>>/Subtype/Form>>stream -/CS0 cs 0.2 0.325 0.624 scn -/GS0 gs -q 1 0 0 1 315.165 487.2754 cm -0 0 m -0 5 l -3.312 5 6.003 2.318 6.003 -1.003 c -6.003 -4.315 3.312 -6.997 0 -6.997 c --3.312 -6.997 -5.994 -4.315 -5.994 -1.003 c --5.994 2.318 -3.312 5 0 5 c -0 0 l --0.544 0.003 -0.995 -0.445 -0.994 -1.003 c --0.997 -1.549 -0.546 -2 0 -1.997 c -0.55 -2 1.007 -1.542 1.003 -1.003 c -1.005 -0.451 0.548 0.003 0 0 c -f -Q -q 1 0 0 1 315.165 488.1997 cm -0 0 m --0.013 -0.041 -0.073 -0.074 -0.082 -0.115 c --0.11 -0.248 -0.02 -0.425 0 -0.559 c -0 -0.924 l --0.544 -0.921 -0.995 -1.37 -0.994 -1.927 c --0.997 -2.473 -0.546 -2.924 0 -2.921 c -0.55 -2.924 1.007 -2.467 1.003 -1.927 c -1.005 -1.375 0.548 -0.921 0 -0.924 c -0 -0.559 l -0.034 -0.556 0.079 -0.552 0.113 -0.549 c -0.142 -0.549 0.183 -0.536 0.209 -0.548 c -1.045 -1.475 l -1.44 -2.16 1.79 -2.114 1.805 -2.112 c -2.058 -2.072 3.187 -0.623 1.901 0.191 c -1.597 0.384 1.274 0.411 1.13 0.396 c -0 0 l -0 4.076 l -3.312 4.076 6.003 1.394 6.003 -1.927 c -6.003 -5.239 3.312 -7.921 0 -7.921 c --3.312 -7.921 -5.994 -5.239 -5.994 -1.927 c --5.994 1.394 -3.312 4.076 0 4.076 c -0 0 l -f -Q -0.196 0.318 0.612 scn -q 1 0 0 1 315.165 488.3418 cm -0 0 m --0.03 -0.092 -0.163 -0.17 -0.184 -0.265 c --0.221 -0.432 -0.125 -0.677 -0.186 -0.837 c --0.186 -0.838 -0.235 -0.941 -0.399 -1.048 c --1.15 -1.539 -1.036 -2.16 -0.983 -2.339 c --0.8 -2.96 -0.143 -3.262 0.452 -2.998 c -0.652 -2.908 0.791 -2.771 0.873 -2.69 c -1.144 -2.423 1.548 -2.625 1.836 -2.417 c -2.431 -1.985 2.564 -1.604 2.628 -1.42 c -2.85 -0.787 2.46 0.134 1.627 0.371 c -0.853 0.592 0.002 0.008 0 0 c -0 3.934 l -3.312 3.934 6.003 1.251 6.003 -2.069 c -6.003 -5.381 3.312 -8.063 0 -8.063 c --3.312 -8.063 -5.994 -5.381 -5.994 -2.069 c --5.994 1.251 -3.312 3.934 0 3.934 c -0 0 l -f -Q -0.192 0.31 0.596 scn -q 1 0 0 1 315.165 488.4824 cm -0 0 m --0.294 -0.832 -1.287 -1.354 -1.07 -2.414 c --0.931 -3.09 -0.167 -3.555 0.649 -3.164 c -1.049 -2.972 1.516 -2.957 1.889 -2.695 c -2.243 -2.445 2.625 -2.13 2.762 -1.679 c -3.159 -0.375 2.125 0.264 1.73 0.385 c -0.831 0.662 0.003 0.008 0 0 c -0 3.793 l -3.312 3.793 6.003 1.111 6.003 -2.21 c -6.003 -5.522 3.312 -8.204 0 -8.204 c --3.312 -8.204 -5.994 -5.522 -5.994 -2.21 c --5.994 1.111 -3.312 3.793 0 3.793 c -0 0 l -f -Q -0.188 0.302 0.58 scn -q 1 0 0 1 315.165 488.6216 cm -0 0 m --0.352 -0.867 -1.375 -1.438 -1.138 -2.566 c --1.017 -3.142 -0.345 -3.804 0.713 -3.398 c -2.483 -2.719 2.628 -2.663 2.945 -1.783 c -2.951 -1.768 3.406 -0.235 2.053 0.317 c -0.863 0.802 0.004 0.01 0 0 c -0 3.654 l -3.312 3.654 6.003 0.972 6.003 -2.349 c -6.003 -5.661 3.312 -8.343 0 -8.343 c --3.312 -8.343 -5.994 -5.661 -5.994 -2.349 c --5.994 0.972 -3.312 3.654 0 3.654 c -0 0 l -f -Q -0.18 0.294 0.569 scn -q 1 0 0 1 315.165 488.7588 cm -0 0 m --0.192 -0.416 -0.582 -0.691 -0.789 -1.097 c --0.793 -1.105 -1.082 -1.703 -1.083 -1.706 c --1.253 -2.111 -1.282 -2.441 -1.181 -2.81 c --1.118 -3.036 -0.72 -4.135 0.985 -3.564 c -5.022 -2.213 2.486 0.225 2.452 0.247 c -1.442 0.897 0.101 0.219 0 0 c -0 3.517 l -3.312 3.517 6.003 0.834 6.003 -2.486 c -6.003 -5.798 3.312 -8.48 0 -8.48 c --3.312 -8.48 -5.994 -5.798 -5.994 -2.486 c --5.994 0.834 -3.312 3.517 0 3.517 c -0 0 l -f -Q -0.176 0.286 0.553 scn -q 1 0 0 1 315.165 488.9116 cm -0 0 m --0.013 -0.025 -0.053 -0.04 -0.076 -0.057 c --0.432 -0.327 -0.719 -0.611 -1.164 -1.801 c --1.234 -1.99 -1.448 -2.564 -1.178 -3.156 c --0.778 -4.031 0.18 -4.2 1.671 -3.658 c -3.876 -2.856 3.991 -0.38 2.341 0.402 c -1.366 0.864 0.123 0.248 0 0 c -0 3.364 l -3.312 3.364 6.003 0.682 6.003 -2.639 c -6.003 -5.951 3.312 -8.633 0 -8.633 c --3.312 -8.633 -5.994 -5.951 -5.994 -2.639 c --5.994 0.682 -3.312 3.364 0 3.364 c -0 0 l -f -Q -0.173 0.278 0.541 scn -q 1 0 0 1 315.165 489.1035 cm -0 0 m --0.034 -0.068 -0.142 -0.105 -0.202 -0.15 c --0.734 -0.546 -0.993 -1.253 -1.244 -1.936 c --1.353 -2.232 -1.496 -2.812 -1.238 -3.374 c --0.612 -4.739 1.248 -4.146 1.803 -3.932 c -4.138 -3.031 4.265 -0.308 2.51 0.419 c -1.108 1 0.006 0.012 0 0 c -0 3.172 l -3.312 3.172 6.003 0.49 6.003 -2.831 c -6.003 -6.143 3.312 -8.825 0 -8.825 c --3.312 -8.825 -5.994 -6.143 -5.994 -2.831 c --5.994 0.49 -3.312 3.172 0 3.172 c -0 0 l -f -Q -0.169 0.275 0.525 scn -q 1 0 0 1 315.165 489.291 cm -0 0 m --0.037 -0.069 -0.152 -0.103 -0.217 -0.147 c --0.48 -0.327 -0.918 -0.951 -1.084 -1.383 c --1.402 -2.209 -1.592 -2.802 -1.342 -3.486 c --1.138 -4.046 -0.487 -4.899 1.578 -4.322 c -4.081 -3.623 4.628 -0.763 2.992 0.316 c -1.701 1.167 0.079 0.149 0 0 c -0 2.984 l -3.312 2.984 6.003 0.302 6.003 -3.019 c -6.003 -6.331 3.312 -9.013 0 -9.013 c --3.312 -9.013 -5.994 -6.331 -5.994 -3.019 c --5.994 0.302 -3.312 2.984 0 2.984 c -0 0 l -f -Q -0.165 0.267 0.51 scn -q 1 0 0 1 315.165 489.4751 cm -0 0 m --0.175 -0.316 -0.541 -0.436 -0.745 -0.721 c --1.04 -1.133 -1.134 -1.367 -1.233 -1.614 c --1.283 -1.739 -1.712 -2.854 -1.439 -3.598 c --0.844 -5.219 1.105 -4.774 1.689 -4.6 c -4.424 -3.78 5.002 -0.76 3.22 0.385 c -1.946 1.202 0.234 0.424 0 0 c -0 2.8 l -3.312 2.8 6.003 0.118 6.003 -3.203 c -6.003 -6.515 3.312 -9.197 0 -9.197 c --3.312 -9.197 -5.994 -6.515 -5.994 -3.203 c --5.994 0.118 -3.312 2.8 0 2.8 c -0 0 l -f -Q -0.161 0.259 0.498 scn -q 1 0 0 1 315.165 489.7065 cm -0 0 m --0.06 -0.132 -0.265 -0.21 -0.385 -0.291 c --0.751 -0.537 -1.207 -1.436 -1.319 -1.735 c --1.402 -1.96 -1.802 -3.124 -1.467 -3.945 c --0.712 -5.795 1.956 -4.866 1.982 -4.855 c -5.299 -3.58 5.174 -0.371 3.116 0.573 c -1.411 1.355 0.007 0.017 0 0 c -0 2.569 l -3.312 2.569 6.003 -0.113 6.003 -3.434 c -6.003 -6.746 3.312 -9.428 0 -9.428 c --3.312 -9.428 -5.994 -6.746 -5.994 -3.434 c --5.994 -0.113 -3.312 2.569 0 2.569 c -0 0 l -f -Q -0.153 0.251 0.482 scn -q 1 0 0 1 315.165 489.9888 cm -0 0 m --0.04 -0.083 -0.167 -0.135 -0.239 -0.193 c --0.739 -0.597 -1.12 -1.159 -1.404 -1.909 c --1.678 -2.633 -1.751 -3.637 -1.568 -4.146 c --0.856 -6.124 1.88 -5.306 1.908 -5.297 c -5.872 -3.969 5.347 -0.495 3.422 0.519 c -1.628 1.464 0.058 0.122 0 0 c -0 2.287 l -3.312 2.287 6.003 -0.396 6.003 -3.716 c -6.003 -7.028 3.312 -9.71 0 -9.71 c --3.312 -9.71 -5.994 -7.028 -5.994 -3.716 c --5.994 -0.396 -3.312 2.287 0 2.287 c -0 0 l -f -Q -0.149 0.243 0.467 scn -q 1 0 0 1 315.165 490.2749 cm -0 0 m --0.045 -0.106 -0.209 -0.167 -0.302 -0.235 c --0.485 -0.372 -1.122 -0.935 -1.618 -2.443 c --1.723 -2.761 -1.897 -3.881 -1.538 -4.677 c --1.024 -5.812 0.792 -6.206 2.512 -5.554 c -6.336 -4.105 5.75 -0.288 3.153 0.723 c -1.353 1.423 0.007 0.017 0 0 c -0 2 l -3.312 2 6.003 -0.682 6.003 -4.002 c -6.003 -7.314 3.312 -9.997 0 -9.997 c --3.312 -9.997 -5.994 -7.314 -5.994 -4.002 c --5.994 -0.682 -3.312 2 0 2 c -0 0 l -f -Q -0.145 0.235 0.455 scn -q 1 0 0 1 315.165 490.6582 cm -0 0 m --0.163 -0.361 -0.541 -0.515 -0.777 -0.805 c --0.945 -1.011 -1.046 -1.259 -1.201 -1.474 c --1.269 -1.568 -1.409 -1.763 -1.714 -2.734 c --2.048 -3.798 -1.784 -4.665 -1.597 -5.087 c --1.005 -6.421 1.188 -6.695 2.68 -6.041 c -8.251 -3.594 4.333 0.165 2.965 0.677 c -1.252 1.319 0.007 0.016 0 0 c -0 1.617 l -3.312 1.617 6.003 -1.065 6.003 -4.386 c -6.003 -7.698 3.312 -10.38 0 -10.38 c --3.312 -10.38 -5.994 -7.698 -5.994 -4.386 c --5.994 -1.065 -3.312 1.617 0 1.617 c -0 0 l -f -Q -0.141 0.227 0.439 scn -q 1 0 0 1 315.165 491.083 cm -0 0 m --0.128 -0.296 -0.441 -0.404 -0.637 -0.631 c --0.787 -0.804 -0.891 -1.009 -1.028 -1.191 c --1.149 -1.351 -1.614 -2.354 -1.616 -2.362 c --2.165 -3.906 -2.034 -4.643 -1.834 -5.161 c --0.959 -7.42 1.653 -7.023 2.585 -6.679 c -3.892 -6.198 6.61 -5.196 5.552 -2.522 c -5.843 -3.227 6.003 -4 6.003 -4.811 c -6.003 -8.123 3.312 -10.805 0 -10.805 c --3.312 -10.805 -5.994 -8.123 -5.994 -4.811 c --5.994 -1.49 -3.312 1.192 0 1.192 c -0 0 l -f -Q -0.137 0.22 0.427 scn -q 1 0 0 1 315.165 491.5479 cm -0 0 m --0.037 -0.078 -0.154 -0.129 -0.22 -0.185 c --1.232 -1.033 -1.806 -2.828 -1.83 -2.904 c --2.22 -4.142 -2.232 -5.159 -1.867 -5.927 c --0.58 -8.633 3.354 -7.149 3.394 -7.134 c -4.44 -6.729 6.193 -6.052 5.898 -4.154 c -5.967 -4.518 6.003 -4.892 6.003 -5.275 c -6.003 -8.587 3.312 -11.27 0 -11.27 c --3.312 -11.27 -5.994 -8.587 -5.994 -5.275 c --5.994 -1.955 -3.312 0.728 0 0.728 c -0 0 l -f -Q -0.133 0.216 0.412 scn -q 1 0 0 1 315.165 491.9907 cm -0 0 m --0.038 -0.067 -0.155 -0.091 -0.221 -0.13 c --1.146 -0.672 -1.618 -2.109 -1.997 -3.263 c --2.003 -3.281 -2.538 -5.073 -2.065 -6.285 c --1.01 -8.991 2.93 -7.989 3.097 -7.945 c -4.317 -7.624 5.989 -7.184 6.001 -5.584 c -6.002 -5.628 6.003 -5.673 6.003 -5.718 c -6.003 -9.03 3.312 -11.712 0 -11.712 c --3.312 -11.712 -5.994 -9.03 -5.994 -5.718 c --5.994 -2.397 -3.312 0.285 0 0.285 c -0 0 l -f -Q -0.125 0.208 0.396 scn -q 1 0 0 1 315.165 492.2632 cm -0 0 m --0.043 -0.052 -0.154 -0.029 -0.221 -0.042 c --0.695 -0.132 -1.346 -0.69 -1.729 -1.732 c --2.601 -4.102 -2.422 -5.693 -2.305 -6.268 c --1.773 -8.88 1.72 -8.614 1.755 -8.61 c -4.215 -8.37 5.7 -8.226 5.951 -6.783 c -5.562 -9.72 3.043 -11.985 0 -11.985 c --3.312 -11.985 -5.994 -9.303 -5.994 -5.991 c --5.994 -2.67 -3.312 0.012 0 0.012 c -0 0 l -f -Q -0.122 0.2 0.384 scn -q 1 0 0 1 314.2603 492.1987 cm -0 0 m --1.727 -0.587 -1.739 -4.385 -1.738 -4.546 c --1.734 -6.483 -1.193 -7.61 0.017 -8.2 c -1.798 -9.069 6.085 -9.361 6.66 -7.637 c -5.921 -10.115 3.622 -11.92 0.905 -11.92 c --2.407 -11.92 -5.089 -9.238 -5.089 -5.926 c --5.089 -2.857 -2.798 -0.333 0.165 0.032 c -0.115 0.022 0.048 0.013 0 0 c -f -Q -0.118 0.192 0.369 scn -q 1 0 0 1 312.9341 491.7764 cm -0 0 m --1.086 -0.961 -0.817 -4.853 -0.535 -5.61 c -0.431 -8.208 2.403 -8.585 3.207 -8.626 c -4.27 -8.681 5.298 -9.068 6.378 -8.967 c -6.691 -8.938 7.264 -8.802 7.584 -8.218 c -6.592 -10.165 4.566 -11.498 2.231 -11.498 c --1.081 -11.498 -3.763 -8.816 -3.763 -5.504 c --3.763 -2.812 -2 -0.54 0.432 0.225 c -0.372 0.2 0.292 0.168 0.231 0.144 c -0.161 0.102 0.062 0.054 0 0 c -f -Q -0.204 0.333 0.639 scn -q 1 0 0 1 316.7451 486.4531 cm -0 0 m --0.091 0.065 -0.091 0.065 -0.52 0.593 c --0.662 0.769 -0.836 0.916 -0.974 1.096 c --1.233 1.432 -1.232 1.599 -1.232 1.6 c --1.226 1.62 -0.028 2.446 0.591 1.368 c -1.026 0.611 0.245 -0.132 0.233 -0.134 c -0.153 -0.145 0.065 -0.047 0 0 c -f -Q -0.141 0.227 0.439 scn -q 1 0 0 1 317.7354 491.6665 cm -0 0 m --1.294 0.462 -2.254 -0.325 -2.57 -0.583 c --2.57 0.609 l --1.403 0.609 -0.313 0.276 0.609 -0.301 c -0.52 -0.251 0.4 -0.185 0.31 -0.134 c -0.217 -0.094 0.095 -0.034 0 0 c -f -Q -0.208 0.337 0.655 scn -q 1 0 0 1 316.7852 486.708 cm -0 0 m --0.336 0.357 l --0.473 0.528 -0.628 0.683 -0.758 0.858 c --0.977 1.152 -1.021 1.271 -1.02 1.277 c --1.015 1.292 -0.028 1.706 0.328 0.955 c -0.588 0.409 0.173 -0.121 0.167 -0.122 c -0.106 -0.133 0.047 -0.04 0 0 c -f -Q -0.137 0.22 0.427 scn -q 1 0 0 1 316.9321 491.998 cm -0 0 m --0.649 0.12 -1.161 -0.01 -1.767 -0.45 c --1.767 0.277 l --1.039 0.277 -0.34 0.147 0.306 -0.09 c -0.223 -0.065 0.111 -0.031 0.028 -0.006 c -0.02 -0.004 0.008 -0.001 0 0 c -f -Q -0.216 0.345 0.667 scn -q 1 0 0 1 316.7891 486.9756 cm -0 0 m --0.004 0.004 -0.536 0.578 -0.712 0.865 c --0.569 0.878 -0.483 0.886 -0.265 0.812 c --0.18 0.784 -0.084 0.701 -0.026 0.633 c -0.032 0.564 0.089 0.451 0.102 0.362 c -0.133 0.142 0.096 0.015 0.073 -0.061 c -0.051 -0.042 0.021 -0.02 0 0 c -f -Q -0.133 0.216 0.412 scn -q 1 0 0 1 316.0703 492.1978 cm -0 0 m --0.314 -0.005 -0.486 -0.009 -0.905 -0.207 c --0.905 0.078 l --0.519 0.078 -0.142 0.041 0.224 -0.028 c -0.157 -0.02 0.067 -0.003 0 0 c -f -Q -0.125 0.208 0.396 scn -q 1 0 0 1 315.165 492.2632 cm -0 0 m -0 0.012 l -0.072 0.012 0.144 0.011 0.215 0.008 c -0.15 0.006 0.046 -0.044 0 0 c -f -Q - endstream endobj 1355 0 obj <> endobj 1338 0 obj <> endobj 1339 0 obj <>/XObject<>>>/Subtype/Form>>stream -q -323.67 445.774 m -323.67 461.774 l -339.132 461.774 351.669 449.237 351.669 433.775 c -351.669 418.313 339.132 405.776 323.67 405.776 c -308.199 405.776 295.671 418.313 295.671 433.775 c -295.671 449.237 308.199 461.774 323.67 461.774 c -323.67 445.774 l -317.055 445.784 311.661 440.386 311.671 433.775 c -311.661 427.165 317.055 421.767 323.67 421.776 c -330.277 421.766 335.68 427.168 335.669 433.775 c -335.68 440.383 330.277 445.785 323.67 445.774 c -W n -q -/GS0 gs -0 Tc 0 Tw 0 Ts 100 Tz 0 Tr /Fm0 Do -Q -Q - endstream endobj 1356 0 obj <> endobj 1357 0 obj <>/ExtGState<>>>/Subtype/Form>>stream -/CS0 cs 0.208 0.337 0.655 scn -/GS0 gs -q 1 0 0 1 323.6699 445.7744 cm -0 0 m -0 16 l -15.462 16 27.999 3.463 27.999 -11.999 c -27.999 -27.461 15.462 -39.998 0 -39.998 c --15.471 -39.998 -27.999 -27.461 -27.999 -11.999 c --27.999 3.463 -15.471 16 0 16 c -0 0 l --6.615 0.009 -12.009 -5.389 -11.999 -11.999 c --12.009 -18.609 -6.615 -24.007 0 -23.998 c -6.607 -24.009 12.01 -18.606 11.999 -11.999 c -12.01 -5.392 6.607 0.011 0 0 c -f -Q -q 1 0 0 1 323.6699 450.936 cm -0 0 m -0 -0.46 l -0 -5.162 l --6.615 -5.152 -12.009 -10.55 -11.999 -17.161 c --12.009 -23.771 -6.615 -29.169 0 -29.16 c -6.607 -29.17 12.01 -23.768 11.999 -17.161 c -12.01 -10.553 6.607 -5.151 0 -5.162 c -0 -0.46 l -0.316 -0.687 0.738 -0.99 1.054 -1.216 c -3.814 -3.66 7.459 -4.866 10 -7.615 c -12.018 -9.799 13.458 -12.46 14.279 -15.526 c -15.091 -18.561 16.901 -19.341 16.918 -19.343 c -18.873 -19.537 24.733 -10.481 17.857 -2.239 c -10.881 6.124 0.77 1.958 0 0 c -0 10.838 l -15.462 10.838 27.999 -1.699 27.999 -17.161 c -27.999 -32.623 15.462 -45.16 0 -45.16 c --15.471 -45.16 -27.999 -32.623 -27.999 -17.161 c --27.999 -1.699 -15.471 10.838 0 10.838 c -0 0 l -f -Q -0.204 0.333 0.639 scn -q 1 0 0 1 323.6699 452.7832 cm -0 0 m --0.297 -0.712 -1.488 -1.167 -1.738 -1.898 c --1.989 -2.637 -2.005 -3.871 -1.531 -4.492 c --1.227 -4.891 -0.45 -4.943 0 -5.165 c -0 -7.009 l --6.615 -7 -12.009 -12.397 -11.999 -19.008 c --12.009 -25.618 -6.615 -31.016 0 -31.007 c -6.607 -31.018 12.01 -25.615 11.999 -19.008 c -12.01 -12.4 6.607 -6.998 0 -7.009 c -0 -5.165 l -0.338 -5.198 0.788 -5.242 1.126 -5.275 c -2.249 -5.474 12.142 -7.557 13.761 -19.535 c -14.172 -22.508 l -14.637 -23.083 15.725 -23.499 16.46 -23.421 c -20.584 -22.986 26.414 -9.565 15.896 -1.31 c -7.945 4.929 0.035 0.084 0 0 c -0 8.991 l -15.462 8.991 27.999 -3.546 27.999 -19.008 c -27.999 -34.47 15.462 -47.007 0 -47.007 c --15.471 -47.007 -27.999 -34.47 -27.999 -19.008 c --27.999 -3.546 -15.471 8.991 0 8.991 c -0 0 l -f -Q -0.2 0.325 0.624 scn -q 1 0 0 1 323.6699 453.9038 cm -0 0 m --0.627 -1.11 -1.868 -1.524 -2.71 -2.39 c --4.768 -4.502 -4.451 -6.209 -4.444 -6.223 c --4.359 -6.387 -4.359 -6.387 0 -7.407 c -0 -8.129 l --6.615 -8.12 -12.009 -13.518 -11.999 -20.128 c --12.009 -26.739 -6.615 -32.137 0 -32.127 c -6.607 -32.138 12.01 -26.736 11.999 -20.128 c -12.01 -13.521 6.607 -8.119 0 -8.129 c -0 -7.407 l -0.312 -7.427 0.727 -7.454 1.039 -7.474 c -5.586 -8.118 13.154 -12.018 12.674 -22.547 c -12.56 -25.06 12.663 -26.477 12.982 -26.758 c -14.311 -27.928 23.356 -23.682 22.629 -14.041 c -21.27 3.998 1.142 2.018 0 0 c -0 7.871 l -15.462 7.871 27.999 -4.667 27.999 -20.128 c -27.999 -35.59 15.462 -48.127 0 -48.127 c --15.471 -48.127 -27.999 -35.59 -27.999 -20.128 c --27.999 -4.667 -15.471 7.871 0 7.871 c -0 0 l -f -Q -0.196 0.318 0.612 scn -q 1 0 0 1 323.6699 454.8291 cm -0 0 m --0.223 -0.378 -0.896 -0.494 -1.28 -0.706 c --3.988 -2.198 -4.356 -2.882 -7.222 -8.202 c --10.979 -15.406 l --12.035 -17.648 -12.409 -19.972 -12.123 -22.51 c --11.368 -29.204 -4.441 -35.039 3.701 -32.831 c -16.504 -28.45 l -19.64 -26.383 21.524 -23.889 22.614 -20.364 c -24.61 -13.907 21.812 -4.74 13.674 -0.575 c -6.261 3.219 0.029 0.049 0 0 c -0 6.945 l -15.462 6.945 27.999 -5.592 27.999 -21.054 c -27.999 -36.516 15.462 -49.053 0 -49.053 c --15.471 -49.053 -27.999 -36.516 -27.999 -21.054 c --27.999 -5.592 -15.471 6.945 0 6.945 c -0 0 l -f -Q -0.192 0.31 0.596 scn -q 1 0 0 1 323.6699 455.6289 cm -0 0 m --11.795 -5.181 -18.994 -27.783 -4.636 -33.729 c -5.806 -38.053 30.469 -28.935 22.345 -10.09 c -19.107 -2.58 10.176 3.509 0 0 c -0 6.146 l -15.462 6.146 27.999 -6.392 27.999 -21.854 c -27.999 -37.315 15.462 -49.853 0 -49.853 c --15.471 -49.853 -27.999 -37.315 -27.999 -21.854 c --27.999 -6.392 -15.471 6.146 0 6.146 c -0 0 l -f -Q -0.188 0.302 0.58 scn -q 1 0 0 1 323.6699 456.3296 cm -0 0 m --0.26 -0.393 -1.011 -0.429 -1.444 -0.612 c --4.284 -1.815 -7.534 -4.967 -9.349 -8.277 c --13.499 -15.843 -13.758 -21.083 -13.244 -24.145 c --12.335 -29.557 -7.256 -38.113 6.018 -35.852 c -29.65 -31.827 27.567 -10.229 15.691 -2.187 c -7.726 3.206 0.039 0.058 0 0 c -0 5.445 l -15.462 5.445 27.999 -7.092 27.999 -22.554 c -27.999 -38.016 15.462 -50.553 0 -50.553 c --15.471 -50.553 -27.999 -38.016 -27.999 -22.554 c --27.999 -7.092 -15.471 5.445 0 5.445 c -0 0 l -f -Q -0.18 0.294 0.569 scn -q 1 0 0 1 323.6699 456.9956 cm -0 0 m --0.271 -0.397 -1.043 -0.41 -1.49 -0.586 c --3.112 -1.224 -7.251 -3.368 -10.636 -9.471 c --11.688 -11.366 -15.022 -18.08 -13.796 -24.877 c --12.453 -32.323 -5.461 -39.361 6.714 -37.217 c -28.943 -33.303 28.97 -11.254 15.609 -2.3 c -7.857 2.895 0.038 0.056 0 0 c -0 4.779 l -15.462 4.779 27.999 -7.758 27.999 -23.22 c -27.999 -38.682 15.462 -51.219 0 -51.219 c --15.471 -51.219 -27.999 -38.682 -27.999 -23.22 c --27.999 -7.758 -15.471 4.779 0 4.779 c -0 0 l -f -Q -0.176 0.286 0.553 scn -q 1 0 0 1 323.6699 457.6064 cm -0 0 m --0.285 -0.403 -1.086 -0.384 -1.551 -0.549 c --2.515 -0.89 -7.505 -2.918 -11.143 -9.4 c --12.539 -11.886 -15.644 -18.437 -14.343 -25.553 c --13.275 -31.396 -7.567 -40.711 7.05 -38.566 c -28.064 -35.482 30.902 -13.127 16.17 -2.838 c -7.979 2.883 0.04 0.057 0 0 c -0 4.168 l -15.462 4.168 27.999 -8.369 27.999 -23.831 c -27.999 -39.293 15.462 -51.83 0 -51.83 c --15.471 -51.83 -27.999 -39.293 -27.999 -23.831 c --27.999 -8.369 -15.471 4.168 0 4.168 c -0 0 l -f -Q -0.173 0.278 0.541 scn -q 1 0 0 1 323.6699 458.1792 cm -0 0 m --0.295 -0.407 -1.114 -0.365 -1.591 -0.521 c --3.039 -0.995 -8.059 -3.066 -11.891 -9.807 c --12.952 -11.675 -16.307 -18.377 -14.887 -26.189 c --13.692 -32.762 -6.813 -41.823 7.243 -39.848 c -28.687 -36.834 31.471 -13.847 16.374 -3.144 c -8.08 2.737 0.041 0.056 0 0 c -0 3.595 l -15.462 3.595 27.999 -8.942 27.999 -24.404 c -27.999 -39.866 15.462 -52.403 0 -52.403 c --15.471 -52.403 -27.999 -39.866 -27.999 -24.404 c --27.999 -8.942 -15.471 3.595 0 3.595 c -0 0 l -f -Q -0.169 0.275 0.525 scn -q 1 0 0 1 323.6699 458.7163 cm -0 0 m --0.327 -0.44 -1.225 -0.369 -1.749 -0.527 c --5.521 -1.665 -9.768 -5.259 -12.076 -9.267 c --15.396 -15.033 -16.523 -20.929 -15.426 -26.791 c --13.856 -35.175 -5.227 -43.009 7.675 -41.011 c -29.382 -37.65 31.673 -13.956 16.092 -3.122 c -8.188 2.374 0.041 0.052 0 0 c -0 3.058 l -15.462 3.058 27.999 -9.479 27.999 -24.941 c -27.999 -40.403 15.462 -52.94 0 -52.94 c --15.471 -52.94 -27.999 -40.403 -27.999 -24.941 c --27.999 -9.479 -15.471 3.058 0 3.058 c -0 0 l -f -Q -0.165 0.267 0.51 scn -q 1 0 0 1 323.6699 459.2314 cm -0 0 m --0.315 -0.414 -1.17 -0.321 -1.672 -0.458 c --5.63 -1.542 -10.189 -5.222 -12.512 -9.206 c --13.797 -11.409 -17.707 -18.115 -15.958 -27.369 c --14.312 -36.085 -5.369 -44.227 7.962 -42.147 c -29.823 -38.738 32.256 -15.066 16.713 -3.752 c -8.241 2.415 0.041 0.054 0 0 c -0 2.543 l -15.462 2.543 27.999 -9.994 27.999 -25.456 c -27.999 -40.918 15.462 -53.455 0 -53.455 c --15.471 -53.455 -27.999 -40.918 -27.999 -25.456 c --27.999 -9.994 -15.471 2.543 0 2.543 c -0 0 l -f -Q -0.161 0.259 0.498 scn -q 1 0 0 1 323.6699 459.7041 cm -0 0 m --0.326 -0.417 -1.198 -0.297 -1.711 -0.424 c --5.006 -1.24 -10.024 -4.173 -13.32 -9.752 c --16.644 -15.378 -17.708 -21.484 -16.484 -27.903 c --14.771 -36.889 -5.522 -45.311 8.242 -43.22 c -29.813 -39.944 32.242 -15.421 16.845 -4.05 c -8.507 2.107 0.042 0.053 0 0 c -0 2.07 l -15.462 2.07 27.999 -10.467 27.999 -25.929 c -27.999 -41.391 15.462 -53.928 0 -53.928 c --15.471 -53.928 -27.999 -41.391 -27.999 -25.929 c --27.999 -10.467 -15.471 2.07 0 2.07 c -0 0 l -f -Q -0.153 0.251 0.482 scn -q 1 0 0 1 323.6699 460.144 cm -0 0 m --0.165 -0.201 -0.596 -0.119 -0.852 -0.169 c --6.632 -1.32 -11.089 -5.48 -13.333 -8.99 c --17.824 -16.015 -17.96 -22.678 -17.283 -27.031 c --15.529 -38.309 -5.353 -45.633 6.914 -44.447 c -29.053 -42.307 33.213 -18.564 18.588 -5.674 c -9.722 2.142 0.051 0.062 0 0 c -0 1.63 l -15.462 1.63 27.999 -10.907 27.999 -26.369 c -27.999 -41.831 15.462 -54.368 0 -54.368 c --15.471 -54.368 -27.999 -41.831 -27.999 -26.369 c --27.999 -10.907 -15.471 1.63 0 1.63 c -0 0 l -f -Q -0.149 0.243 0.467 scn -q 1 0 0 1 323.6699 460.5547 cm -0 0 m --0.345 -0.419 -1.243 -0.245 -1.776 -0.35 c --5.454 -1.074 -10.584 -3.985 -13.756 -8.856 c --18.476 -16.104 -18.606 -22.976 -17.885 -27.465 c --16.272 -37.503 -7.101 -46.92 7.31 -45.498 c -29.575 -43.3 33.52 -19.115 18.666 -5.998 c -9.679 1.938 0.05 0.061 0 0 c -0 1.22 l -15.462 1.22 27.999 -11.317 27.999 -26.779 c -27.999 -42.241 15.462 -54.778 0 -54.778 c --15.471 -54.778 -27.999 -42.241 -27.999 -26.779 c --27.999 -11.317 -15.471 1.22 0 1.22 c -0 0 l -f -Q -0.145 0.235 0.455 scn -q 1 0 0 1 323.6699 460.9102 cm -0 0 m --0.359 -0.424 -1.28 -0.213 -1.828 -0.305 c --2.573 -0.429 -9.242 -1.712 -14.038 -8.521 c --19.338 -16.045 -19.04 -23.601 -18.666 -26.5 c --16.79 -41.035 -4.557 -47.119 6.015 -46.621 c -29.237 -45.525 34.039 -19.966 18.705 -6.311 c -9.693 1.714 0.05 0.059 0 0 c -0 0.864 l -15.462 0.864 27.999 -11.673 27.999 -27.135 c -27.999 -42.597 15.462 -55.134 0 -55.134 c --15.471 -55.134 -27.999 -42.597 -27.999 -27.135 c --27.999 -11.673 -15.471 0.864 0 0.864 c -0 0 l -f -Q -0.141 0.227 0.439 scn -q 1 0 0 1 323.6699 461.2358 cm -0 0 m --0.366 -0.422 -1.291 -0.183 -1.844 -0.262 c --5.618 -0.797 -11.206 -3.577 -14.557 -8.414 c --20.527 -17.033 -19.484 -25.013 -19.142 -27.635 c --17.325 -41.544 -4.721 -48.297 6.215 -47.587 c -22.825 -46.511 31.838 -32.41 25.896 -16.796 c -27.251 -20.083 27.999 -23.685 27.999 -27.46 c -27.999 -42.922 15.462 -55.459 0 -55.459 c --15.471 -55.459 -27.999 -42.922 -27.999 -27.46 c --27.999 -11.999 -15.471 0.539 0 0.539 c -0 0 l -f -Q -0.137 0.22 0.427 scn -q 1 0 0 1 323.6699 461.4912 cm -0 0 m --0.38 -0.425 -1.323 -0.147 -1.89 -0.211 c --3.742 -0.417 -10.186 -1.632 -15.337 -8.604 c --20.121 -15.077 -20.496 -23.224 -19.964 -27.016 c --18.071 -40.5 -7.311 -49.138 6.811 -48.512 c -13.567 -48.212 30.458 -42.954 27.513 -22.495 c -27.832 -24.187 27.999 -25.932 27.999 -27.716 c -27.999 -43.178 15.462 -55.715 0 -55.715 c --15.471 -55.715 -27.999 -43.178 -27.999 -27.716 c --27.999 -12.254 -15.471 0.283 0 0.283 c -0 0 l -f -Q -0.133 0.216 0.412 scn -q 1 0 0 1 323.6699 461.6821 cm -0 0 m --0.389 -0.422 -1.334 -0.109 -1.906 -0.156 c --5.864 -0.48 -11.765 -2.986 -15.37 -7.721 c --21.457 -15.717 -21.121 -23.997 -20.694 -27.186 c --18.848 -40.99 -7.359 -50.367 6.621 -49.484 c -16.365 -48.868 27.809 -42.685 27.992 -27.284 c -27.997 -27.491 27.999 -27.699 27.999 -27.907 c -27.999 -43.369 15.462 -55.906 0 -55.906 c --15.471 -55.906 -27.999 -43.369 -27.999 -27.907 c --27.999 -12.445 -15.471 0.092 0 0.092 c -0 0 l -f -Q -0.125 0.208 0.396 scn -q 1 0 0 1 323.6699 461.771 cm -0 0 m --0.403 -0.423 -1.362 -0.067 -1.946 -0.096 c --5.655 -0.278 -11.174 -1.795 -16.41 -7.986 c --19.422 -11.547 -22.258 -18.903 -21.583 -25.522 c --19.025 -50.59 4.157 -50.418 5.143 -50.399 c -17.394 -50.156 25.847 -43.167 27.756 -31.704 c -25.941 -45.413 14.205 -55.995 0 -55.995 c --15.471 -55.995 -27.999 -43.458 -27.999 -27.996 c --27.999 -12.534 -15.471 0.003 0 0.003 c -0 0 l -f -Q -0.122 0.2 0.384 scn -q 1 0 0 1 319.437 461.4541 cm -0 0 m --22.531 -4.549 -23.531 -35.025 -6.331 -46.258 c -6.847 -54.864 25.642 -52.17 31.071 -35.682 c -27.627 -47.245 16.914 -55.678 4.233 -55.678 c --11.238 -55.678 -23.766 -43.141 -23.766 -27.679 c --23.766 -13.386 -13.062 -1.593 0.777 0.109 c -0.544 0.077 0.232 0.04 0 0 c -f -Q -0.118 0.192 0.369 scn -q 1 0 0 1 311.6421 458.9941 cm -0 0 m --16.565 -9.064 -17.346 -40.196 9.317 -48.713 c -16.643 -51.053 30.634 -50.189 36.991 -37.91 c -32.363 -46.995 22.921 -53.218 12.028 -53.218 c --3.443 -53.218 -15.971 -40.681 -15.971 -25.219 c --15.971 -12.684 -7.737 -2.07 3.624 1.498 c -3.099 1.309 2.397 1.056 1.872 0.866 c -1.309 0.609 0.542 0.297 0 0 c -f -Q -0.216 0.345 0.667 scn -q 1 0 0 1 339.5962 435.5991 cm -0 0 m --1.706 2.422 -2.871 5.192 -4.806 7.466 c --5.581 8.375 -6.334 9.141 -7.046 9.74 c --7.103 9.788 -12.699 14.577 -12.705 14.929 c --12.707 15.035 -10.925 16.753 -10.74 16.825 c --10.058 17.086 -7.544 17.231 -6.875 17.166 c --5.111 16.992 -2.438 16.241 0.275 13.649 c -3.79 10.293 4.269 6.382 4.332 5.263 c -4.608 0.362 1.816 -1.553 1.125 -1.426 c -0.589 -1.328 0.314 -0.445 0 0 c -f -Q -0.22 0.353 0.682 scn -q 1 0 0 1 339.7305 438.0928 cm -0 0 m --1.97 2.883 -3.055 4.471 -4.87 6.595 c --5.072 6.832 -5.375 7.116 -5.591 7.34 c --5.844 7.601 -6.16 7.969 -6.419 8.224 c --6.913 8.711 -7.551 9.382 -8.074 9.839 c --9.724 11.281 -9.908 11.547 -9.911 11.595 c --9.914 11.657 -8.495 13.252 -8.295 13.411 c --8.132 13.541 -7.808 13.456 -7.601 13.433 c --5.32 13.184 -2.962 12.927 -0.476 10.566 c -2.531 7.709 2.783 5.143 2.904 3.909 c -2.938 3.565 2.929 0.875 2.709 0.41 c -2.675 0.337 0.707 -0.875 0.645 -0.861 c -0.33 -0.793 0.182 -0.267 0 0 c -f -Q -0.224 0.361 0.694 scn -q 1 0 0 1 338.8154 441.6221 cm -0 0 m --0.737 0.235 -1.076 1.45 -1.576 2.04 c --3.148 3.894 -3.148 3.894 -3.897 4.678 c --4.212 5.008 -4.84 5.354 -4.922 5.803 c --4.014 7.981 l --3.953 8.007 -1.427 7.15 0.33 5.083 c -1.631 3.552 2.397 0.755 2.281 0.574 c -1.906 -0.01 0.699 -0.197 0.037 0.011 c -0.026 0.014 0.011 -0.003 0 0 c -f -Q -0.141 0.227 0.439 scn -q 1 0 0 1 335.7192 459.0469 cm -0 0 m --5.275 2.417 -9.403 2.407 -12.049 2.189 c --12.049 2.728 l --6.604 2.728 -1.522 1.173 2.777 -1.517 c -2.232 -1.205 1.506 -0.789 0.961 -0.477 c -0.673 -0.334 0.292 -0.134 0 0 c -f -Q -0.137 0.22 0.427 scn -q 1 0 0 1 331.9331 460.5313 cm -0 0 m --3.078 0.794 -4.478 1.111 -8.263 0.96 c --8.263 1.243 l --4.866 1.243 -1.61 0.638 1.402 -0.47 c -0.981 -0.329 0.425 -0.126 0 0 c -f -Q -0.133 0.216 0.412 scn -q 1 0 0 1 327.9009 461.4541 cm -0 0 m --1.314 0.178 -2.48 0.278 -4.231 0.228 c --4.231 0.32 l --2.431 0.32 -0.671 0.15 1.035 -0.174 c -0.724 -0.122 0.312 -0.042 0 0 c -f -Q -0.125 0.208 0.396 scn -q 1 0 0 1 323.6699 461.771 cm -0 0 m -0.335 0.003 0.669 -0.002 1.001 -0.014 c -0.701 -0.01 0.211 -0.214 0 0 c -f -Q - endstream endobj 1358 0 obj <> endobj 1336 0 obj <> endobj 1337 0 obj <>/XObject<>>>/Subtype/Form>>stream -q -327.999 212.271 m -327.999 217.271 l -331.311 217.271 334.002 214.59 334.002 211.277 c -334.002 207.966 331.311 205.274 327.999 205.274 c -324.687 205.274 321.996 207.966 321.996 211.277 c -321.996 214.59 324.687 217.271 327.999 217.271 c -327.999 212.271 l -327.449 212.274 326.992 211.817 326.996 211.277 c -326.991 210.734 327.456 210.27 327.999 210.274 c -328.542 210.27 329.007 210.734 329.002 211.277 c -329.006 211.817 328.549 212.274 327.999 212.271 c -W n -q -1 w 4 M 0 j 0 J []0 d -/GS0 gs -0 Tc 0 Tw 0 Ts 100 Tz 0 Tr /Fm0 Do -Q -Q - endstream endobj 1359 0 obj <> endobj 1360 0 obj <>/ExtGState<>>>/Subtype/Form>>stream -/CS0 cs 0.216 0.631 0.792 scn -/GS0 gs -q 1 0 0 1 327.999 212.2715 cm -0 0 m -0 5 l -3.312 5 6.003 2.318 6.003 -0.994 c -6.003 -4.306 3.312 -6.997 0 -6.997 c --3.312 -6.997 -6.003 -4.306 -6.003 -0.994 c --6.003 2.318 -3.312 5 0 5 c -0 0 l --0.55 0.003 -1.007 -0.454 -1.003 -0.994 c --1.008 -1.537 -0.543 -2.002 0 -1.997 c -0.543 -2.002 1.008 -1.537 1.003 -0.994 c -1.007 -0.454 0.55 0.003 0 0 c -f -Q -q 1 0 0 1 327.999 213.1963 cm -0 0 m --0.013 -0.041 -0.073 -0.074 -0.083 -0.115 c --0.111 -0.248 -0.02 -0.426 0 -0.56 c -0 -0.925 l --0.55 -0.922 -1.007 -1.379 -1.003 -1.919 c --1.008 -2.462 -0.543 -2.927 0 -2.922 c -0.543 -2.927 1.008 -2.462 1.003 -1.919 c -1.007 -1.379 0.55 -0.922 0 -0.925 c -0 -0.56 l -0.034 -0.557 0.079 -0.553 0.113 -0.55 c -0.142 -0.55 0.184 -0.536 0.21 -0.549 c -1.046 -1.473 l -1.441 -2.153 1.79 -2.106 1.805 -2.104 c -2.057 -2.064 3.185 -0.619 1.901 0.191 c -1.598 0.383 1.275 0.409 1.132 0.396 c -0 0 l -0 4.075 l -3.312 4.075 6.003 1.394 6.003 -1.919 c -6.003 -5.23 3.312 -7.922 0 -7.922 c --3.312 -7.922 -6.003 -5.23 -6.003 -1.919 c --6.003 1.394 -3.312 4.075 0 4.075 c -0 0 l -f -Q -0.208 0.616 0.776 scn -q 1 0 0 1 327.999 213.3379 cm -0 0 m --0.03 -0.092 -0.164 -0.17 -0.185 -0.265 c --0.222 -0.433 -0.125 -0.678 -0.188 -0.838 c --0.188 -0.839 -0.237 -0.941 -0.404 -1.049 c --1.156 -1.538 -1.044 -2.153 -0.992 -2.33 c --0.81 -2.948 -0.137 -3.26 0.449 -2.997 c -0.649 -2.907 0.789 -2.769 0.872 -2.687 c -1.143 -2.418 1.548 -2.618 1.836 -2.409 c -2.434 -1.976 2.571 -1.584 2.629 -1.416 c -2.851 -0.784 2.461 0.135 1.628 0.371 c -0.853 0.591 0.002 0.008 0 0 c -0 3.934 l -3.312 3.934 6.003 1.252 6.003 -2.061 c -6.003 -5.372 3.312 -8.063 0 -8.063 c --3.312 -8.063 -6.003 -5.372 -6.003 -2.061 c --6.003 1.252 -3.312 3.934 0 3.934 c -0 0 l -f -Q -0.204 0.604 0.757 scn -q 1 0 0 1 327.999 213.4785 cm -0 0 m --0.294 -0.83 -1.296 -1.345 -1.079 -2.404 c --0.955 -3.01 -0.239 -3.591 0.647 -3.163 c -1.047 -2.97 1.515 -2.951 1.888 -2.688 c -2.104 -2.536 2.607 -2.182 2.763 -1.673 c -3.16 -0.374 2.125 0.264 1.731 0.385 c -0.831 0.661 0.003 0.009 0 0 c -0 3.793 l -3.312 3.793 6.003 1.111 6.003 -2.201 c -6.003 -5.513 3.312 -8.204 0 -8.204 c --3.312 -8.204 -6.003 -5.513 -6.003 -2.201 c --6.003 1.111 -3.312 3.793 0 3.793 c -0 0 l -f -Q -0.2 0.588 0.741 scn -q 1 0 0 1 327.999 213.6182 cm -0 0 m --0.352 -0.866 -1.383 -1.428 -1.146 -2.558 c --1.025 -3.14 -0.35 -3.809 0.711 -3.398 c -2.484 -2.712 2.629 -2.655 2.946 -1.777 c -2.952 -1.763 3.406 -0.234 2.053 0.316 c -0.838 0.812 0.004 0.01 0 0 c -0 3.653 l -3.312 3.653 6.003 0.972 6.003 -2.341 c -6.003 -5.652 3.312 -8.344 0 -8.344 c --3.312 -8.344 -6.003 -5.652 -6.003 -2.341 c --6.003 0.972 -3.312 3.653 0 3.653 c -0 0 l -f -Q -0.196 0.573 0.722 scn -q 1 0 0 1 327.999 213.7549 cm -0 0 m --0.193 -0.417 -0.585 -0.691 -0.795 -1.098 c --1.093 -1.707 l --1.262 -2.105 -1.291 -2.433 -1.189 -2.801 c --1.126 -3.029 -0.725 -4.141 0.983 -3.563 c -5.011 -2.2 2.486 0.226 2.453 0.247 c -1.442 0.896 0.101 0.219 0 0 c -0 3.517 l -3.312 3.517 6.003 0.835 6.003 -2.478 c -6.003 -5.789 3.312 -8.48 0 -8.48 c --3.312 -8.48 -6.003 -5.789 -6.003 -2.478 c --6.003 0.835 -3.312 3.517 0 3.517 c -0 0 l -f -Q -0.188 0.561 0.702 scn -q 1 0 0 1 327.999 213.9082 cm -0 0 m --0.013 -0.025 -0.053 -0.04 -0.076 -0.058 c --0.364 -0.275 -0.691 -0.521 -1.173 -1.803 c --1.243 -1.988 -1.457 -2.555 -1.186 -3.148 c --0.781 -4.033 0.18 -4.204 1.671 -3.654 c -3.863 -2.846 3.98 -0.373 2.341 0.401 c -1.366 0.862 0.123 0.247 0 0 c -0 3.363 l -3.312 3.363 6.003 0.682 6.003 -2.631 c -6.003 -5.942 3.312 -8.634 0 -8.634 c --3.312 -8.634 -6.003 -5.942 -6.003 -2.631 c --6.003 0.682 -3.312 3.363 0 3.363 c -0 0 l -f -Q -0.184 0.545 0.686 scn -q 1 0 0 1 327.999 214.0996 cm -0 0 m --0.034 -0.067 -0.142 -0.105 -0.203 -0.15 c --0.702 -0.521 -0.962 -1.182 -1.171 -1.711 c --1.281 -1.991 -1.54 -2.648 -1.288 -3.269 c --0.891 -4.246 0.088 -4.488 1.621 -3.988 c -4.051 -3.195 4.189 -0.578 2.798 0.287 c -1.588 1.039 0.134 0.266 0 0 c -0 3.172 l -3.312 3.172 6.003 0.49 6.003 -2.822 c -6.003 -6.134 3.312 -8.825 0 -8.825 c --3.312 -8.825 -6.003 -6.134 -6.003 -2.822 c --6.003 0.49 -3.312 3.172 0 3.172 c -0 0 l -f -Q -0.18 0.529 0.667 scn -q 1 0 0 1 327.999 214.2871 cm -0 0 m --0.037 -0.069 -0.152 -0.104 -0.217 -0.147 c --0.454 -0.309 -0.887 -0.883 -1.091 -1.383 c --1.28 -1.846 -1.632 -2.707 -1.384 -3.387 c --0.994 -4.454 0.002 -4.769 1.578 -4.319 c -4.069 -3.61 4.619 -0.754 2.993 0.316 c -1.701 1.166 0.079 0.148 0 0 c -0 2.984 l -3.312 2.984 6.003 0.303 6.003 -3.01 c -6.003 -6.321 3.312 -9.013 0 -9.013 c --3.312 -9.013 -6.003 -6.321 -6.003 -3.01 c --6.003 0.303 -3.312 2.984 0 2.984 c -0 0 l -f -Q -0.176 0.518 0.651 scn -q 1 0 0 1 327.999 214.4717 cm -0 0 m --0.176 -0.317 -0.542 -0.437 -0.748 -0.722 c --1.049 -1.139 -1.146 -1.381 -1.241 -1.614 c --1.291 -1.738 -1.721 -2.847 -1.448 -3.589 c --0.846 -5.228 1.105 -4.775 1.689 -4.598 c -4.413 -3.769 4.993 -0.751 3.22 0.385 c -1.946 1.2 0.234 0.423 0 0 c -0 2.8 l -3.312 2.8 6.003 0.118 6.003 -3.194 c -6.003 -6.506 3.312 -9.197 0 -9.197 c --3.312 -9.197 -6.003 -6.506 -6.003 -3.194 c --6.003 0.118 -3.312 2.8 0 2.8 c -0 0 l -f -Q -0.169 0.502 0.631 scn -q 1 0 0 1 327.999 214.7031 cm -0 0 m --0.06 -0.133 -0.265 -0.211 -0.386 -0.291 c --0.759 -0.541 -1.229 -1.474 -1.327 -1.735 c --1.444 -2.049 -1.803 -3.136 -1.475 -3.938 c --0.713 -5.804 1.956 -4.863 1.982 -4.853 c -5.283 -3.568 5.162 -0.364 3.116 0.573 c -1.411 1.354 0.007 0.017 0 0 c -0 2.568 l -3.312 2.568 6.003 -0.113 6.003 -3.426 c -6.003 -6.737 3.312 -9.429 0 -9.429 c --3.312 -9.429 -6.003 -6.737 -6.003 -3.426 c --6.003 -0.113 -3.312 2.568 0 2.568 c -0 0 l -f -Q -0.165 0.486 0.612 scn -q 1 0 0 1 327.999 214.9854 cm -0 0 m --0.04 -0.083 -0.167 -0.135 -0.239 -0.193 c --0.736 -0.594 -1.131 -1.171 -1.412 -1.908 c --1.719 -2.715 -1.736 -3.694 -1.577 -4.139 c --0.858 -6.132 1.881 -5.304 1.908 -5.295 c -5.598 -4.044 5.76 -0.555 3.075 0.691 c -1.838 1.266 0.163 0.34 0 0 c -0 2.286 l -3.312 2.286 6.003 -0.396 6.003 -3.708 c -6.003 -7.02 3.312 -9.711 0 -9.711 c --3.312 -9.711 -6.003 -7.02 -6.003 -3.708 c --6.003 -0.396 -3.312 2.286 0 2.286 c -0 0 l -f -Q -0.161 0.475 0.596 scn -q 1 0 0 1 327.999 215.2715 cm -0 0 m --0.045 -0.106 -0.21 -0.167 -0.302 -0.236 c --0.487 -0.373 -1.13 -0.938 -1.627 -2.442 c --1.764 -2.854 -1.88 -3.932 -1.545 -4.67 c --1.027 -5.814 0.793 -6.21 2.513 -5.55 c -6.314 -4.092 5.733 -0.28 3.153 0.723 c -1.353 1.422 0.007 0.017 0 0 c -0 2 l -3.312 2 6.003 -0.682 6.003 -3.994 c -6.003 -7.306 3.312 -9.997 0 -9.997 c --3.312 -9.997 -6.003 -7.306 -6.003 -3.994 c --6.003 -0.682 -3.312 2 0 2 c -0 0 l -f -Q -0.157 0.459 0.576 scn -q 1 0 0 1 327.999 215.6543 cm -0 0 m --0.163 -0.361 -0.542 -0.515 -0.779 -0.805 c --0.948 -1.011 -1.05 -1.26 -1.205 -1.475 c --1.369 -1.701 -1.472 -1.983 -1.723 -2.733 c --2.048 -3.703 -1.823 -4.541 -1.66 -4.953 c --1.229 -6.046 0.416 -6.786 2.422 -6.135 c -7.014 -4.645 5.816 -0.744 3.286 0.54 c -1.422 1.485 0.008 0.019 0 0 c -0 1.617 l -3.312 1.617 6.003 -1.064 6.003 -4.377 c -6.003 -7.688 3.312 -10.38 0 -10.38 c --3.312 -10.38 -6.003 -7.688 -6.003 -4.377 c --6.003 -1.064 -3.312 1.617 0 1.617 c -0 0 l -f -Q -0.149 0.443 0.561 scn -q 1 0 0 1 327.999 216.0791 cm -0 0 m --0.128 -0.296 -0.442 -0.404 -0.638 -0.631 c --0.788 -0.804 -0.893 -1.01 -1.031 -1.191 c --1.148 -1.346 -1.62 -2.353 -1.623 -2.36 c --2.172 -3.895 -2.053 -4.608 -1.843 -5.151 c --0.961 -7.428 1.653 -7.023 2.586 -6.676 c -3.891 -6.189 6.606 -5.178 5.553 -2.521 c -5.843 -3.224 6.003 -3.994 6.003 -4.802 c -6.003 -8.113 3.312 -10.805 0 -10.805 c --3.312 -10.805 -6.003 -8.113 -6.003 -4.802 c --6.003 -1.489 -3.312 1.192 0 1.192 c -0 0 l -f -Q -0.145 0.431 0.541 scn -q 1 0 0 1 327.999 216.5439 cm -0 0 m --0.037 -0.078 -0.154 -0.129 -0.22 -0.185 c --1.238 -1.037 -1.832 -2.884 -1.837 -2.902 c --2.426 -4.76 -2.011 -5.632 -1.875 -5.918 c --0.597 -8.6 3.355 -7.144 3.396 -7.129 c -4.441 -6.72 6.192 -6.035 5.899 -4.15 c -5.967 -4.512 6.003 -4.885 6.003 -5.267 c -6.003 -8.578 3.312 -11.27 0 -11.27 c --3.312 -11.27 -6.003 -8.578 -6.003 -5.267 c --6.003 -1.954 -3.312 0.728 0 0.728 c -0 0 l -f -Q -0.141 0.416 0.522 scn -q 1 0 0 1 327.999 216.9863 cm -0 0 m --0.038 -0.066 -0.155 -0.09 -0.221 -0.129 c --1.15 -0.674 -1.646 -2.172 -2.007 -3.267 c --2.013 -3.283 -2.546 -5.064 -2.073 -6.276 c --1.009 -9.004 3.058 -7.952 3.099 -7.941 c -4.318 -7.615 5.989 -7.169 6.001 -5.576 c -6.002 -5.62 6.003 -5.664 6.003 -5.709 c -6.003 -9.021 3.312 -11.712 0 -11.712 c --3.312 -11.712 -6.003 -9.021 -6.003 -5.709 c --6.003 -2.396 -3.312 0.285 0 0.285 c -0 0 l -f -Q -0.137 0.4 0.506 scn -q 1 0 0 1 327.999 217.2598 cm -0 0 m --0.043 -0.053 -0.154 -0.029 -0.221 -0.042 c --0.696 -0.133 -1.348 -0.689 -1.732 -1.73 c --2.577 -4.014 -2.459 -5.548 -2.314 -6.259 c --1.864 -8.468 0.843 -8.703 1.755 -8.611 c -4.299 -8.355 5.7 -8.214 5.951 -6.775 c -5.562 -9.713 3.043 -11.985 0 -11.985 c --3.312 -11.985 -6.003 -9.294 -6.003 -5.982 c --6.003 -2.67 -3.312 0.012 0 0.012 c -0 0 l -f -Q -0.129 0.388 0.486 scn -q 1 0 0 1 327.0938 217.1953 cm -0 0 m --1.738 -0.59 -1.75 -4.505 -1.75 -4.545 c --1.745 -7.049 -0.739 -7.83 0.017 -8.199 c -1.798 -9.07 6.085 -9.361 6.66 -7.631 c -5.921 -10.109 3.622 -11.921 0.905 -11.921 c --2.407 -11.921 -5.098 -9.229 -5.098 -5.918 c --5.098 -2.856 -2.799 -0.334 0.165 0.031 c -0.115 0.021 0.049 0.013 0 0 c -f -Q -0.125 0.373 0.471 scn -q 1 0 0 1 325.7642 216.7715 cm -0 0 m --1.064 -0.938 -0.813 -4.867 -0.541 -5.6 c -0.429 -8.205 2.405 -8.584 3.209 -8.627 c -4.272 -8.682 5.299 -9.067 6.379 -8.965 c -6.692 -8.936 7.266 -8.798 7.587 -8.212 c -6.594 -10.16 4.569 -11.497 2.235 -11.497 c --1.077 -11.497 -3.768 -8.806 -3.768 -5.494 c --3.768 -2.81 -2.001 -0.54 0.432 0.225 c -0.372 0.2 0.292 0.168 0.231 0.144 c -0.161 0.102 0.061 0.054 0 0 c -f -Q -0.22 0.647 0.812 scn -q 1 0 0 1 329.5791 211.4561 cm -0 0 m --0.095 0.068 -0.095 0.068 -0.519 0.587 c --0.661 0.762 -0.834 0.909 -0.973 1.089 c --1.125 1.286 -1.231 1.594 y --1.226 1.612 -0.029 2.438 0.592 1.362 c -1.027 0.609 0.245 -0.131 0.233 -0.133 c -0.153 -0.144 0.065 -0.047 0 0 c -f -Q -0.149 0.443 0.561 scn -q 1 0 0 1 330.5688 216.6631 cm -0 0 m --1.295 0.462 -2.254 -0.325 -2.57 -0.584 c --2.57 0.608 l --1.402 0.608 -0.311 0.274 0.612 -0.302 c -0.522 -0.252 0.402 -0.186 0.312 -0.136 c -0.219 -0.095 0.096 -0.034 0 0 c -f -Q -0.224 0.659 0.831 scn -q 1 0 0 1 329.6191 211.708 cm -0 0 m --0.335 0.354 l --0.472 0.524 -0.626 0.68 -0.757 0.854 c --0.976 1.148 -1.021 1.268 -1.019 1.272 c --1.014 1.287 -0.028 1.7 0.33 0.952 c -0.591 0.409 0.174 -0.12 0.167 -0.121 c -0.106 -0.131 0.048 -0.039 0 0 c -f -Q -0.145 0.431 0.541 scn -q 1 0 0 1 329.7661 216.9941 cm -0 0 m --0.649 0.12 -1.161 -0.01 -1.767 -0.45 c --1.767 0.277 l --1.038 0.277 -0.339 0.147 0.307 -0.091 c -0.224 -0.065 0.112 -0.031 0.029 -0.007 c -0.02 -0.005 0.009 -0.002 0 0 c -f -Q -0.227 0.675 0.847 scn -q 1 0 0 1 329.623 211.9746 cm -0 0 m --0.004 0.004 -0.533 0.572 -0.71 0.861 c --0.568 0.874 -0.482 0.883 -0.264 0.809 c --0.18 0.78 -0.083 0.699 -0.025 0.631 c -0.033 0.563 0.091 0.45 0.104 0.361 c -0.135 0.141 0.099 0.019 0.074 -0.063 c -0.052 -0.044 0.021 -0.021 0 0 c -f -Q -0.141 0.416 0.522 scn -q 1 0 0 1 328.9043 217.1943 cm -0 0 m --0.314 -0.006 -0.487 -0.009 -0.905 -0.208 c --0.905 0.077 l --0.519 0.077 -0.142 0.041 0.225 -0.029 c -0.157 -0.021 0.068 -0.004 0 0 c -f -Q -0.137 0.4 0.506 scn -q 1 0 0 1 327.999 217.2598 cm -0 0 m -0 0.012 l -0.072 0.012 0.144 0.011 0.215 0.008 c -0.15 0.006 0.046 -0.045 0 0 c -f -Q - endstream endobj 1361 0 obj <> endobj 1334 0 obj <> endobj 1335 0 obj <>/XObject<>>>/Subtype/Form>>stream -q -334.002 303.277 m -334.002 319.277 l -349.464 319.277 362.001 306.74 362.001 291.278 c -362.001 275.808 349.464 263.279 334.002 263.279 c -318.54 263.279 306.003 275.808 306.003 291.278 c -306.003 306.74 318.54 319.277 334.002 319.277 c -334.002 303.277 l -327.395 303.288 321.992 297.886 322.003 291.278 c -321.994 284.663 327.392 279.27 334.002 279.279 c -340.612 279.27 346.01 284.663 346.001 291.278 c -346.012 297.886 340.609 303.288 334.002 303.277 c -W n -q -/GS0 gs -0 Tc 0 Tw 0 Ts 100 Tz 0 Tr /Fm0 Do -Q -Q - endstream endobj 1362 0 obj <> endobj 1363 0 obj <>/ExtGState<>>>/Subtype/Form>>stream -/CS0 cs 0.259 0.565 0.682 scn -/GS0 gs -q 1 0 0 1 334.002 303.2773 cm -0 0 m -0 16 l -15.462 16 27.999 3.463 27.999 -11.999 c -27.999 -27.47 15.462 -39.998 0 -39.998 c --15.462 -39.998 -27.999 -27.47 -27.999 -11.999 c --27.999 3.463 -15.462 16 0 16 c -0 0 l --6.607 0.011 -12.01 -5.392 -11.999 -11.999 c --12.008 -18.614 -6.61 -24.008 0 -23.998 c -6.61 -24.008 12.008 -18.614 11.999 -11.999 c -12.01 -5.392 6.607 0.011 0 0 c -f -Q -q 1 0 0 1 334.002 308.4409 cm -0 0 m -0 -0.468 l -0 -5.164 l --6.607 -5.153 -12.01 -10.555 -11.999 -17.163 c --12.008 -23.778 -6.61 -29.171 0 -29.162 c -6.61 -29.171 12.008 -23.778 11.999 -17.163 c -12.01 -10.555 6.607 -5.153 0 -5.164 c -0 -0.468 l -0.316 -0.694 0.738 -0.996 1.055 -1.223 c -3.817 -3.661 7.459 -4.869 10 -7.617 c -12.018 -9.8 13.458 -12.461 14.279 -15.528 c -15.076 -18.506 16.901 -19.345 16.917 -19.347 c -18.874 -19.542 24.734 -10.485 17.857 -2.241 c -10.879 6.124 0.769 1.958 0 0 c -0 10.836 l -15.462 10.836 27.999 -1.701 27.999 -17.163 c -27.999 -32.633 15.462 -45.162 0 -45.162 c --15.462 -45.162 -27.999 -32.633 -27.999 -17.163 c --27.999 -1.701 -15.462 10.836 0 10.836 c -0 0 l -f -Q -0.255 0.553 0.667 scn -q 1 0 0 1 334.002 310.2881 cm -0 0 m --0.296 -0.712 -1.487 -1.168 -1.735 -1.898 c --1.987 -2.638 -2.003 -3.873 -1.53 -4.494 c --1.227 -4.893 -0.45 -4.945 0 -5.167 c -0 -7.011 l --6.607 -7 -12.01 -12.402 -11.999 -19.01 c --12.008 -25.625 -6.61 -31.019 0 -31.009 c -6.61 -31.019 12.008 -25.625 11.999 -19.01 c -12.01 -12.402 6.607 -7 0 -7.011 c -0 -5.167 l -0.338 -5.201 0.788 -5.245 1.126 -5.278 c -2.249 -5.476 12.144 -7.557 13.761 -19.537 c -14.171 -22.514 l -14.636 -23.089 15.724 -23.505 16.459 -23.428 c -20.584 -22.992 26.416 -9.568 15.896 -1.312 c -7.943 4.929 0.035 0.084 0 0 c -0 8.989 l -15.462 8.989 27.999 -3.548 27.999 -19.01 c -27.999 -34.48 15.462 -47.009 0 -47.009 c --15.462 -47.009 -27.999 -34.48 -27.999 -19.01 c --27.999 -3.548 -15.462 8.989 0 8.989 c -0 0 l -f -Q -0.247 0.541 0.651 scn -q 1 0 0 1 334.002 311.4072 cm -0 0 m --0.627 -1.109 -1.866 -1.525 -2.708 -2.391 c --4.764 -4.503 -4.447 -6.209 -4.44 -6.223 c --4.355 -6.386 -4.355 -6.386 0 -7.408 c -0 -8.13 l --6.607 -8.119 -12.01 -13.521 -11.999 -20.129 c --12.008 -26.744 -6.61 -32.138 0 -32.128 c -6.61 -32.138 12.008 -26.744 11.999 -20.129 c -12.01 -13.521 6.607 -8.119 0 -8.13 c -0 -7.408 l -0.312 -7.428 0.727 -7.455 1.039 -7.475 c -5.587 -8.118 13.155 -12.018 12.674 -22.55 c -12.559 -25.063 12.663 -26.479 12.981 -26.762 c -14.31 -27.933 23.356 -23.69 22.629 -14.042 c -21.27 4.006 1.142 2.02 0 0 c -0 7.87 l -15.462 7.87 27.999 -4.667 27.999 -20.129 c -27.999 -35.6 15.462 -48.128 0 -48.128 c --15.462 -48.128 -27.999 -35.6 -27.999 -20.129 c --27.999 -4.667 -15.462 7.87 0 7.87 c -0 0 l -f -Q -0.243 0.529 0.639 scn -q 1 0 0 1 334.002 312.3325 cm -0 0 m --0.223 -0.377 -0.896 -0.494 -1.279 -0.706 c --3.984 -2.198 -4.352 -2.882 -7.218 -8.204 c --10.978 -15.407 l --12.034 -17.649 -12.409 -19.973 -12.123 -22.511 c --11.368 -29.203 -4.44 -35.038 3.702 -32.832 c -16.504 -28.455 l -19.639 -26.388 21.523 -23.893 22.614 -20.364 c -24.61 -13.908 21.812 -4.74 13.674 -0.575 c -6.26 3.219 0.029 0.049 0 0 c -0 6.945 l -15.462 6.945 27.999 -5.592 27.999 -21.054 c -27.999 -36.525 15.462 -49.053 0 -49.053 c --15.462 -49.053 -27.999 -36.525 -27.999 -21.054 c --27.999 -5.592 -15.462 6.945 0 6.945 c -0 0 l -f -Q -0.235 0.518 0.624 scn -q 1 0 0 1 334.002 313.1323 cm -0 0 m --0.174 -0.267 -0.682 -0.3 -0.974 -0.428 c --3.27 -1.438 -6.363 -4.313 -7.593 -6.58 c --13.39 -17.263 -13 -20.654 -12.686 -23.379 c --12.044 -28.943 -6.306 -36.331 3.976 -34.516 c -34.376 -29.152 23.202 -7.033 15.417 -1.844 c -7.621 3.352 0.038 0.059 0 0 c -0 6.145 l -15.462 6.145 27.999 -6.392 27.999 -21.854 c -27.999 -37.325 15.462 -49.853 0 -49.853 c --15.462 -49.853 -27.999 -37.325 -27.999 -21.854 c --27.999 -6.392 -15.462 6.145 0 6.145 c -0 0 l -f -Q -0.231 0.506 0.608 scn -q 1 0 0 1 334.002 313.833 cm -0 0 m --0.26 -0.393 -1.01 -0.429 -1.443 -0.612 c --4.281 -1.817 -7.531 -4.969 -9.346 -8.278 c --13.499 -15.849 -13.757 -21.087 -13.243 -24.146 c --12.334 -29.559 -7.254 -38.113 6.021 -35.853 c -29.652 -31.827 27.567 -10.229 15.691 -2.188 c -7.725 3.206 0.039 0.058 0 0 c -0 5.444 l -15.462 5.444 27.999 -7.093 27.999 -22.555 c -27.999 -38.025 15.462 -50.554 0 -50.554 c --15.462 -50.554 -27.999 -38.025 -27.999 -22.555 c --27.999 -7.093 -15.462 5.444 0 5.444 c -0 0 l -f -Q -0.227 0.494 0.592 scn -q 1 0 0 1 334.002 314.499 cm -0 0 m --0.27 -0.397 -1.042 -0.411 -1.488 -0.586 c --3.111 -1.225 -7.25 -3.37 -10.633 -9.471 c --11.685 -11.368 -15.021 -18.085 -13.796 -24.878 c --12.453 -32.322 -5.461 -39.359 6.715 -37.218 c -28.949 -33.308 28.975 -11.258 15.609 -2.301 c -7.856 2.895 0.038 0.056 0 0 c -0 4.778 l -15.462 4.778 27.999 -7.759 27.999 -23.221 c -27.999 -38.691 15.462 -51.22 0 -51.22 c --15.462 -51.22 -27.999 -38.691 -27.999 -23.221 c --27.999 -7.759 -15.462 4.778 0 4.778 c -0 0 l -f -Q -0.22 0.478 0.576 scn -q 1 0 0 1 334.002 315.1099 cm -0 0 m --0.285 -0.403 -1.085 -0.384 -1.55 -0.549 c --2.14 -0.758 -7.426 -2.783 -11.14 -9.4 c --12.536 -11.888 -15.643 -18.441 -14.343 -25.554 c --13.275 -31.396 -7.567 -40.71 7.05 -38.567 c -28.067 -35.485 30.905 -13.13 16.17 -2.838 c -7.979 2.883 0.04 0.057 0 0 c -0 4.167 l -15.462 4.167 27.999 -8.37 27.999 -23.832 c -27.999 -39.302 15.462 -51.831 0 -51.831 c --15.462 -51.831 -27.999 -39.302 -27.999 -23.832 c --27.999 -8.37 -15.462 4.167 0 4.167 c -0 0 l -f -Q -0.216 0.467 0.565 scn -q 1 0 0 1 334.002 315.6826 cm -0 0 m --0.294 -0.407 -1.113 -0.365 -1.59 -0.521 c --3.037 -0.996 -8.057 -3.068 -11.887 -9.807 c --12.95 -11.677 -16.306 -18.383 -14.886 -26.191 c --13.691 -32.763 -6.811 -41.823 7.247 -39.848 c -28.69 -36.835 31.472 -13.848 16.374 -3.144 c -8.08 2.736 0.041 0.056 0 0 c -0 3.595 l -15.462 3.595 27.999 -8.942 27.999 -24.404 c -27.999 -39.875 15.462 -52.403 0 -52.403 c --15.462 -52.403 -27.999 -39.875 -27.999 -24.404 c --27.999 -8.942 -15.462 3.595 0 3.595 c -0 0 l -f -Q -0.208 0.455 0.549 scn -q 1 0 0 1 334.002 316.2197 cm -0 0 m --0.327 -0.44 -1.224 -0.37 -1.749 -0.528 c --5.52 -1.667 -9.766 -5.26 -12.073 -9.267 c --15.394 -15.036 -16.522 -20.933 -15.426 -26.792 c --13.857 -35.175 -5.228 -43.007 7.675 -41.012 c -29.388 -37.654 31.678 -13.959 16.092 -3.122 c -8.188 2.374 0.041 0.052 0 0 c -0 3.058 l -15.462 3.058 27.999 -9.479 27.999 -24.941 c -27.999 -40.412 15.462 -52.94 0 -52.94 c --15.462 -52.94 -27.999 -40.412 -27.999 -24.941 c --27.999 -9.479 -15.462 3.058 0 3.058 c -0 0 l -f -Q -0.204 0.443 0.533 scn -q 1 0 0 1 334.002 316.7344 cm -0 0 m --0.315 -0.413 -1.169 -0.321 -1.671 -0.458 c --5.628 -1.543 -10.186 -5.222 -12.509 -9.206 c --13.794 -11.411 -17.706 -18.119 -15.958 -27.369 c --14.312 -36.083 -5.369 -44.225 7.962 -42.147 c -29.829 -38.742 32.261 -15.07 16.713 -3.752 c -8.241 2.415 0.041 0.054 0 0 c -0 2.543 l -15.462 2.543 27.999 -9.994 27.999 -25.456 c -27.999 -40.927 15.462 -53.455 0 -53.455 c --15.462 -53.455 -27.999 -40.927 -27.999 -25.456 c --27.999 -9.994 -15.462 2.543 0 2.543 c -0 0 l -f -Q -0.196 0.431 0.518 scn -q 1 0 0 1 334.002 317.207 cm -0 0 m --0.326 -0.417 -1.197 -0.297 -1.71 -0.424 c --5.005 -1.241 -10.022 -4.174 -13.317 -9.752 c --16.642 -15.38 -17.708 -21.487 -16.484 -27.904 c --14.771 -36.888 -5.523 -45.309 8.242 -43.221 c -29.817 -39.947 32.246 -15.423 16.845 -4.05 c -8.507 2.107 0.042 0.053 0 0 c -0 2.07 l -15.462 2.07 27.999 -10.467 27.999 -25.929 c -27.999 -41.399 15.462 -53.928 0 -53.928 c --15.462 -53.928 -27.999 -41.399 -27.999 -25.929 c --27.999 -10.467 -15.462 2.07 0 2.07 c -0 0 l -f -Q -0.192 0.42 0.506 scn -q 1 0 0 1 334.002 317.647 cm -0 0 m --0.165 -0.201 -0.596 -0.119 -0.852 -0.169 c --6.63 -1.321 -11.086 -5.48 -13.33 -8.99 c --17.824 -16.019 -17.96 -22.681 -17.283 -27.032 c --15.528 -38.307 -5.35 -45.631 6.918 -44.447 c -29.057 -42.308 33.214 -18.565 18.588 -5.674 c -9.722 2.142 0.051 0.062 0 0 c -0 1.63 l -15.462 1.63 27.999 -10.907 27.999 -26.369 c -27.999 -41.839 15.462 -54.368 0 -54.368 c --15.462 -54.368 -27.999 -41.839 -27.999 -26.369 c --27.999 -10.907 -15.462 1.63 0 1.63 c -0 0 l -f -Q -0.188 0.408 0.49 scn -q 1 0 0 1 334.002 318.0581 cm -0 0 m --0.345 -0.419 -1.243 -0.245 -1.775 -0.35 c --5.333 -1.052 -10.598 -4.013 -13.752 -8.857 c --18.474 -16.108 -18.606 -22.979 -17.885 -27.466 c --16.272 -37.501 -7.101 -46.918 7.31 -45.498 c -29.578 -43.303 33.522 -19.118 18.666 -5.999 c -9.679 1.938 0.05 0.061 0 0 c -0 1.219 l -15.462 1.219 27.999 -11.318 27.999 -26.78 c -27.999 -42.25 15.462 -54.779 0 -54.779 c --15.462 -54.779 -27.999 -42.25 -27.999 -26.78 c --27.999 -11.318 -15.462 1.219 0 1.219 c -0 0 l -f -Q -0.18 0.392 0.475 scn -q 1 0 0 1 334.002 318.4131 cm -0 0 m --0.359 -0.424 -1.279 -0.213 -1.827 -0.305 c --2.571 -0.429 -9.239 -1.713 -14.035 -8.521 c --19.337 -16.049 -19.04 -23.602 -18.666 -26.5 c --16.791 -41.034 -4.557 -47.118 6.016 -46.62 c -29.239 -45.526 34.04 -19.967 18.705 -6.311 c -9.693 1.714 0.05 0.059 0 0 c -0 0.864 l -15.462 0.864 27.999 -11.673 27.999 -27.135 c -27.999 -42.605 15.462 -55.134 0 -55.134 c --15.462 -55.134 -27.999 -42.605 -27.999 -27.135 c --27.999 -11.673 -15.462 0.864 0 0.864 c -0 0 l -f -Q -0.176 0.38 0.459 scn -q 1 0 0 1 334.002 318.7388 cm -0 0 m --0.366 -0.422 -1.29 -0.183 -1.842 -0.262 c --5.616 -0.798 -11.203 -3.577 -14.553 -8.414 c --20.526 -17.037 -19.484 -25.014 -19.142 -27.636 c --17.325 -41.544 -4.721 -48.295 6.216 -47.587 c -22.826 -46.511 31.838 -32.411 25.896 -16.796 c -27.251 -20.083 27.999 -23.685 27.999 -27.46 c -27.999 -42.931 15.462 -55.459 0 -55.459 c --15.462 -55.459 -27.999 -42.931 -27.999 -27.46 c --27.999 -11.999 -15.462 0.539 0 0.539 c -0 0 l -f -Q -0.169 0.369 0.443 scn -q 1 0 0 1 334.002 318.9941 cm -0 0 m --0.38 -0.425 -1.322 -0.147 -1.889 -0.211 c --3.74 -0.417 -10.183 -1.633 -15.334 -8.604 c --20.121 -15.081 -20.497 -23.226 -19.964 -27.017 c --18.07 -40.5 -7.309 -49.138 6.814 -48.512 c -13.57 -48.212 30.458 -42.954 27.513 -22.495 c -27.832 -24.187 27.999 -25.932 27.999 -27.716 c -27.999 -43.187 15.462 -55.715 0 -55.715 c --15.462 -55.715 -27.999 -43.187 -27.999 -27.716 c --27.999 -12.254 -15.462 0.283 0 0.283 c -0 0 l -f -Q -0.165 0.357 0.431 scn -q 1 0 0 1 334.002 319.1851 cm -0 0 m --0.389 -0.421 -1.333 -0.109 -1.905 -0.156 c --5.862 -0.48 -11.762 -2.986 -15.367 -7.721 c --21.456 -15.721 -21.121 -23.999 -20.694 -27.186 c --18.848 -40.988 -7.36 -50.366 6.622 -49.484 c -16.365 -48.869 27.809 -42.686 27.992 -27.284 c -27.997 -27.491 27.999 -27.699 27.999 -27.907 c -27.999 -43.377 15.462 -55.906 0 -55.906 c --15.462 -55.906 -27.999 -43.377 -27.999 -27.907 c --27.999 -12.445 -15.462 0.092 0 0.092 c -0 0 l -f -Q -0.157 0.345 0.416 scn -q 1 0 0 1 334.002 319.2739 cm -0 0 m --0.403 -0.423 -1.362 -0.067 -1.945 -0.096 c --5.653 -0.278 -11.171 -1.795 -16.407 -7.987 c --19.42 -11.549 -22.258 -18.906 -21.583 -25.522 c --19.025 -50.59 4.157 -50.418 5.143 -50.399 c -17.395 -50.155 25.849 -43.167 27.755 -31.707 c -25.94 -45.421 14.205 -55.995 0 -55.995 c --15.462 -55.995 -27.999 -43.466 -27.999 -27.996 c --27.999 -12.534 -15.462 0.003 0 0.003 c -0 0 l -f -Q -0.153 0.333 0.4 scn -q 1 0 0 1 329.771 318.957 cm -0 0 m --22.534 -4.552 -23.533 -35.028 -6.33 -46.26 c -6.848 -54.863 25.642 -52.17 31.069 -35.688 c -27.625 -47.252 16.911 -55.678 4.231 -55.678 c --11.231 -55.678 -23.768 -43.149 -23.768 -27.679 c --23.768 -13.386 -13.055 -1.592 0.778 0.109 c -0.544 0.077 0.232 0.04 0 0 c -f -Q -0.145 0.322 0.384 scn -q 1 0 0 1 321.978 316.4971 cm -0 0 m --16.565 -9.063 -17.347 -40.195 9.314 -48.713 c -16.64 -51.053 30.632 -50.191 36.987 -37.914 c -32.359 -46.999 22.917 -53.218 12.024 -53.218 c --3.438 -53.218 -15.975 -40.689 -15.975 -25.219 c --15.975 -12.683 -7.734 -2.069 3.625 1.499 c -3.1 1.309 2.399 1.057 1.873 0.867 c -1.31 0.61 0.543 0.297 0 0 c -f -Q -0.267 0.58 0.698 scn -q 1 0 0 1 349.9282 293.1025 cm -0 0 m --1.706 2.422 -2.871 5.191 -4.806 7.466 c --5.58 8.375 -6.333 9.14 -7.046 9.739 c --7.103 9.787 -12.7 14.578 -12.706 14.928 c --12.708 15.034 -10.925 16.753 -10.74 16.824 c --10.058 17.085 -7.544 17.231 -6.875 17.165 c --5.111 16.991 -2.438 16.24 0.275 13.649 c -3.79 10.292 4.269 6.381 4.332 5.263 c -4.608 0.361 1.816 -1.553 1.125 -1.426 c -0.589 -1.328 0.314 -0.446 0 0 c -f -Q -0.271 0.592 0.71 scn -q 1 0 0 1 350.0625 295.5957 cm -0 0 m --1.97 2.883 -3.056 4.472 -4.87 6.595 c --5.072 6.832 -5.375 7.116 -5.591 7.34 c --5.844 7.601 -6.16 7.969 -6.419 8.224 c --6.913 8.711 -7.551 9.382 -8.074 9.839 c --9.724 11.281 -9.908 11.547 -9.911 11.595 c --9.914 11.655 -8.389 13.369 -8.295 13.411 c --7.711 13.674 -6.801 13.346 -6.164 13.276 c --2.962 12.927 -1.156 11.212 -0.476 10.566 c -2.531 7.709 2.783 5.143 2.904 3.909 c -2.938 3.565 2.929 0.875 2.709 0.41 c -2.675 0.337 0.707 -0.874 0.645 -0.861 c -0.33 -0.793 0.182 -0.267 0 0 c -f -Q -0.278 0.604 0.725 scn -q 1 0 0 1 349.1475 299.125 cm -0 0 m --0.737 0.235 -1.076 1.45 -1.576 2.04 c --3.148 3.894 -3.148 3.894 -3.897 4.678 c --4.212 5.008 -4.84 5.354 -4.922 5.803 c --4.014 7.981 l --3.953 8.007 -1.427 7.15 0.33 5.083 c -1.631 3.552 2.397 0.755 2.281 0.574 c -1.906 -0.01 0.699 -0.197 0.037 0.011 c -0.026 0.014 0.011 -0.003 0 0 c -f -Q -0.176 0.38 0.459 scn -q 1 0 0 1 346.0513 316.5498 cm -0 0 m --5.275 2.417 -9.403 2.407 -12.049 2.189 c --12.049 2.728 l --6.604 2.728 -1.522 1.173 2.777 -1.517 c -2.232 -1.205 1.506 -0.789 0.961 -0.477 c -0.673 -0.334 0.292 -0.134 0 0 c -f -Q -0.169 0.369 0.443 scn -q 1 0 0 1 342.2651 318.0342 cm -0 0 m --3.078 0.794 -4.478 1.111 -8.263 0.96 c --8.263 1.243 l --4.866 1.243 -1.61 0.638 1.402 -0.47 c -0.981 -0.329 0.425 -0.126 0 0 c -f -Q -0.165 0.357 0.431 scn -q 1 0 0 1 338.2329 318.957 cm -0 0 m --2.557 0.263 -2.657 0.273 -4.231 0.228 c --4.231 0.32 l --2.431 0.32 -0.671 0.15 1.035 -0.174 c -0.724 -0.122 0.312 -0.042 0 0 c -f -Q -0.157 0.345 0.416 scn -q 1 0 0 1 334.002 319.2739 cm -0 0 m -0.335 0.003 0.669 -0.002 1.001 -0.014 c -0.701 -0.01 0.211 -0.214 0 0 c -f -Q - endstream endobj 1364 0 obj <> endobj 1332 0 obj <> endobj 1333 0 obj <>/XObject<>>>/Subtype/Form>>stream -q -186.627 218.274 m -186.627 223.274 l -189.939 223.274 192.621 220.593 192.621 217.271 c -192.621 213.959 189.939 211.277 186.627 211.277 c -183.315 211.277 180.624 213.959 180.624 217.271 c -180.624 220.593 183.315 223.274 186.627 223.274 c -186.627 218.274 l -186.078 218.277 185.622 217.823 185.624 217.271 c -185.62 216.731 186.077 216.274 186.627 216.277 c -187.173 216.275 187.624 216.726 187.621 217.271 c -187.622 217.829 187.171 218.277 186.627 218.274 c -W n -q -1 w 4 M 0 j 0 J []0 d -/GS0 gs -0 Tc 0 Tw 0 Ts 100 Tz 0 Tr /Fm0 Do -Q -Q - endstream endobj 1365 0 obj <> endobj 1366 0 obj <>/ExtGState<>>>/Subtype/Form>>stream -/CS0 cs 0.176 0.529 0.353 scn -/GS0 gs -q 1 0 0 1 186.627 218.2744 cm -0 0 m -0 5 l -3.312 5 5.994 2.318 5.994 -1.003 c -5.994 -4.315 3.312 -6.997 0 -6.997 c --3.312 -6.997 -6.003 -4.315 -6.003 -1.003 c --6.003 2.318 -3.312 5 0 5 c -0 0 l --0.549 0.003 -1.005 -0.451 -1.003 -1.003 c --1.007 -1.543 -0.55 -2 0 -1.997 c -0.546 -1.999 0.997 -1.549 0.994 -1.003 c -0.995 -0.445 0.544 0.003 0 0 c -f -Q -q 1 0 0 1 186.627 219.1992 cm -0 0 m --0.013 -0.041 -0.073 -0.074 -0.083 -0.115 c --0.111 -0.248 -0.02 -0.426 0 -0.561 c -0 -0.925 l --0.549 -0.922 -1.005 -1.376 -1.003 -1.928 c --1.007 -2.468 -0.55 -2.925 0 -2.922 c -0.546 -2.924 0.997 -2.474 0.994 -1.928 c -0.995 -1.37 0.544 -0.922 0 -0.925 c -0 -0.561 l -0.034 -0.558 0.078 -0.553 0.112 -0.55 c -0.141 -0.55 0.182 -0.536 0.208 -0.549 c -1.037 -1.473 l -1.432 -2.162 1.781 -2.116 1.796 -2.113 c -2.048 -2.073 3.175 -0.62 1.896 0.192 c -1.594 0.385 1.27 0.411 1.126 0.396 c -0 0 l -0 4.075 l -3.312 4.075 5.994 1.394 5.994 -1.928 c -5.994 -5.24 3.312 -7.922 0 -7.922 c --3.312 -7.922 -6.003 -5.24 -6.003 -1.928 c --6.003 1.394 -3.312 4.075 0 4.075 c -0 0 l -f -Q -0.173 0.518 0.345 scn -q 1 0 0 1 186.627 219.3418 cm -0 0 m --0.03 -0.093 -0.164 -0.171 -0.185 -0.266 c --0.222 -0.434 -0.125 -0.678 -0.187 -0.838 c --0.188 -0.839 -0.237 -0.941 -0.403 -1.05 c --1.157 -1.54 -1.045 -2.159 -0.993 -2.338 c --0.812 -2.951 -0.139 -3.261 0.448 -2.999 c -0.646 -2.911 0.784 -2.775 0.866 -2.694 c -1.137 -2.427 1.542 -2.629 1.829 -2.42 c -2.42 -1.988 2.555 -1.604 2.619 -1.418 c -2.84 -0.784 2.454 0.136 1.624 0.372 c -0.851 0.592 0.002 0.007 0 0 c -0 3.933 l -3.312 3.933 5.994 1.251 5.994 -2.07 c -5.994 -5.383 3.312 -8.064 0 -8.064 c --3.312 -8.064 -6.003 -5.383 -6.003 -2.07 c --6.003 1.251 -3.312 3.933 0 3.933 c -0 0 l -f -Q -0.169 0.506 0.337 scn -q 1 0 0 1 186.627 219.4824 cm -0 0 m --0.295 -0.834 -1.295 -1.352 -1.079 -2.413 c --0.941 -3.092 -0.175 -3.558 0.645 -3.166 c -2.581 -2.241 2.581 -2.241 2.752 -1.679 c -3.15 -0.374 2.119 0.265 1.727 0.386 c -0.83 0.662 0.003 0.008 0 0 c -0 3.792 l -3.312 3.792 5.994 1.11 5.994 -2.211 c -5.994 -5.523 3.312 -8.205 0 -8.205 c --3.312 -8.205 -6.003 -5.523 -6.003 -2.211 c --6.003 1.11 -3.312 3.792 0 3.792 c -0 0 l -f -Q -0.165 0.49 0.329 scn -q 1 0 0 1 186.627 219.6211 cm -0 0 m --0.353 -0.868 -1.382 -1.434 -1.146 -2.564 c --1.026 -3.142 -0.354 -3.806 0.709 -3.4 c -2.435 -2.741 2.615 -2.673 2.848 -2.025 c -3.232 -0.958 2.919 -0.038 2.048 0.318 c -0.863 0.804 0.004 0.01 0 0 c -0 3.653 l -3.312 3.653 5.994 0.972 5.994 -2.35 c -5.994 -5.662 3.312 -8.344 0 -8.344 c --3.312 -8.344 -6.003 -5.662 -6.003 -2.35 c --6.003 0.972 -3.312 3.653 0 3.653 c -0 0 l -f -Q -0.161 0.478 0.322 scn -q 1 0 0 1 186.627 219.7588 cm -0 0 m --0.193 -0.418 -0.584 -0.692 -0.794 -1.099 c --1.091 -1.709 l --1.261 -2.111 -1.291 -2.44 -1.189 -2.809 c --1.127 -3.035 -0.731 -4.134 0.979 -3.567 c -4.729 -2.327 2.779 0.033 2.448 0.247 c -1.441 0.897 0.102 0.218 0 0 c -0 3.516 l -3.312 3.516 5.994 0.834 5.994 -2.487 c -5.994 -5.8 3.312 -8.481 0 -8.481 c --3.312 -8.481 -6.003 -5.8 -6.003 -2.487 c --6.003 0.834 -3.312 3.516 0 3.516 c -0 0 l -f -Q -0.157 0.467 0.314 scn -q 1 0 0 1 186.627 219.9111 cm -0 0 m --0.013 -0.025 -0.053 -0.04 -0.076 -0.058 c --0.436 -0.329 -0.724 -0.613 -1.172 -1.804 c --1.294 -2.128 -1.428 -2.622 -1.186 -3.154 c --0.786 -4.034 0.174 -4.205 1.666 -3.662 c -3.819 -2.879 3.945 -0.361 2.337 0.402 c -1.364 0.864 0.123 0.248 0 0 c -0 3.363 l -3.312 3.363 5.994 0.682 5.994 -2.64 c -5.994 -5.952 3.312 -8.634 0 -8.634 c --3.312 -8.634 -6.003 -5.952 -6.003 -2.64 c --6.003 0.682 -3.312 3.363 0 3.363 c -0 0 l -f -Q -0.153 0.455 0.306 scn -q 1 0 0 1 186.627 220.1025 cm -0 0 m --0.034 -0.067 -0.142 -0.105 -0.203 -0.15 c --0.738 -0.548 -1 -1.255 -1.252 -1.938 c --1.385 -2.296 -1.491 -2.836 -1.247 -3.372 c --0.62 -4.745 1.243 -4.15 1.798 -3.936 c -4.073 -3.057 4.215 -0.289 2.506 0.421 c -1.109 1.002 0.006 0.013 0 0 c -0 3.172 l -3.312 3.172 5.994 0.49 5.994 -2.831 c -5.994 -6.144 3.312 -8.825 0 -8.825 c --3.312 -8.825 -6.003 -6.144 -6.003 -2.831 c --6.003 0.49 -3.312 3.172 0 3.172 c -0 0 l -f -Q -0.149 0.443 0.294 scn -q 1 0 0 1 186.627 220.291 cm -0 0 m --0.037 -0.07 -0.152 -0.104 -0.217 -0.148 c --0.425 -0.29 -0.869 -0.842 -1.09 -1.384 c --1.279 -1.849 -1.632 -2.713 -1.384 -3.395 c --1 -4.452 -0.005 -4.766 1.573 -4.327 c -4.077 -3.63 4.625 -0.767 2.988 0.316 c -1.701 1.168 0.079 0.148 0 0 c -0 2.983 l -3.312 2.983 5.994 0.302 5.994 -3.02 c -5.994 -6.332 3.312 -9.014 0 -9.014 c --3.312 -9.014 -6.003 -6.332 -6.003 -3.02 c --6.003 0.302 -3.312 2.983 0 2.983 c -0 0 l -f -Q -0.145 0.431 0.286 scn -q 1 0 0 1 186.627 220.4746 cm -0 0 m --0.175 -0.316 -0.542 -0.436 -0.748 -0.721 c --1.047 -1.138 -1.145 -1.38 -1.239 -1.615 c --1.289 -1.739 -1.721 -2.852 -1.448 -3.597 c --0.854 -5.222 1.1 -4.778 1.685 -4.604 c -4.42 -3.787 4.999 -0.764 3.215 0.386 c -1.946 1.203 0.235 0.424 0 0 c -0 2.8 l -3.312 2.8 5.994 0.118 5.994 -3.203 c -5.994 -6.516 3.312 -9.197 0 -9.197 c --3.312 -9.197 -6.003 -6.516 -6.003 -3.203 c --6.003 0.118 -3.312 2.8 0 2.8 c -0 0 l -f -Q -0.141 0.42 0.278 scn -q 1 0 0 1 186.627 220.7061 cm -0 0 m --0.06 -0.132 -0.265 -0.211 -0.386 -0.291 c --0.737 -0.526 -1.203 -1.41 -1.325 -1.736 c --1.409 -1.96 -1.811 -3.121 -1.476 -3.944 c --0.72 -5.801 1.951 -4.87 1.978 -4.859 c -5.294 -3.584 5.17 -0.372 3.113 0.574 c -1.411 1.356 0.007 0.017 0 0 c -0 2.568 l -3.312 2.568 5.994 -0.113 5.994 -3.435 c -5.994 -6.747 3.312 -9.429 0 -9.429 c --3.312 -9.429 -6.003 -6.747 -6.003 -3.435 c --6.003 -0.113 -3.312 2.568 0 2.568 c -0 0 l -f -Q -0.137 0.408 0.271 scn -q 1 0 0 1 186.627 220.9883 cm -0 0 m --0.04 -0.083 -0.167 -0.135 -0.239 -0.193 c --0.735 -0.593 -1.129 -1.17 -1.41 -1.909 c --1.685 -2.632 -1.76 -3.635 -1.577 -4.146 c --0.866 -6.126 1.876 -5.311 1.903 -5.301 c -5.874 -3.976 5.345 -0.496 3.416 0.521 c -1.627 1.465 0.058 0.121 0 0 c -0 2.286 l -3.312 2.286 5.994 -0.396 5.994 -3.717 c -5.994 -7.029 3.312 -9.711 0 -9.711 c --3.312 -9.711 -6.003 -7.029 -6.003 -3.717 c --6.003 -0.396 -3.312 2.286 0 2.286 c -0 0 l -f -Q -0.133 0.396 0.263 scn -q 1 0 0 1 186.627 221.2744 cm -0 0 m --0.045 -0.106 -0.21 -0.167 -0.303 -0.236 c --0.487 -0.373 -1.127 -0.938 -1.625 -2.443 c --1.73 -2.761 -1.906 -3.878 -1.546 -4.676 c --1.031 -5.818 0.788 -6.214 2.508 -5.559 c -6.319 -4.105 5.737 -0.286 3.15 0.724 c -1.354 1.425 0.007 0.017 0 0 c -0 2 l -3.312 2 5.994 -0.682 5.994 -4.003 c -5.994 -7.315 3.312 -9.997 0 -9.997 c --3.312 -9.997 -6.003 -7.315 -6.003 -4.003 c --6.003 -0.682 -3.312 2 0 2 c -0 0 l -f -Q -0.129 0.384 0.255 scn -q 1 0 0 1 186.627 221.6582 cm -0 0 m --0.163 -0.362 -0.542 -0.515 -0.779 -0.805 c --0.947 -1.012 -1.049 -1.261 -1.205 -1.476 c --1.367 -1.7 -1.47 -1.983 -1.721 -2.735 c --2.06 -3.745 -1.792 -4.628 -1.661 -4.961 c --1.172 -6.201 0.619 -6.721 2.417 -6.144 c -7.025 -4.662 5.824 -0.754 3.284 0.539 c -1.422 1.486 0.008 0.018 0 0 c -0 1.616 l -3.312 1.616 5.994 -1.065 5.994 -4.387 c -5.994 -7.699 3.312 -10.381 0 -10.381 c --3.312 -10.381 -6.003 -7.699 -6.003 -4.387 c --6.003 -1.065 -3.312 1.616 0 1.616 c -0 0 l -f -Q -0.125 0.373 0.247 scn -q 1 0 0 1 186.627 222.082 cm -0 0 m --0.128 -0.296 -0.442 -0.404 -0.638 -0.631 c --0.788 -0.804 -0.893 -1.01 -1.031 -1.191 c --1.147 -1.346 -1.619 -2.354 -1.622 -2.361 c --2.173 -3.904 -2.042 -4.642 -1.843 -5.159 c --0.967 -7.426 1.647 -7.027 2.581 -6.683 c -3.886 -6.201 6.602 -5.198 5.542 -2.518 c -5.833 -3.224 5.994 -3.998 5.994 -4.811 c -5.994 -8.123 3.312 -10.805 0 -10.805 c --3.312 -10.805 -6.003 -8.123 -6.003 -4.811 c --6.003 -1.489 -3.312 1.192 0 1.192 c -0 0 l -f -Q -0.122 0.361 0.239 scn -q 1 0 0 1 186.627 222.5469 cm -0 0 m --0.037 -0.078 -0.154 -0.129 -0.22 -0.185 c --1.236 -1.035 -1.83 -2.885 -1.836 -2.903 c --2.227 -4.14 -2.24 -5.156 -1.875 -5.925 c --0.602 -8.604 3.351 -7.152 3.39 -7.137 c -4.435 -6.729 6.183 -6.049 5.89 -4.151 c -5.958 -4.516 5.994 -4.891 5.994 -5.275 c -5.994 -8.588 3.312 -11.27 0 -11.27 c --3.312 -11.27 -6.003 -8.588 -6.003 -5.275 c --6.003 -1.954 -3.312 0.728 0 0.728 c -0 0 l -f -Q -0.118 0.349 0.231 scn -q 1 0 0 1 186.627 222.9893 cm -0 0 m --0.038 -0.066 -0.155 -0.09 -0.221 -0.129 c --1.149 -0.673 -1.644 -2.171 -2.005 -3.266 c --2.01 -3.282 -2.546 -5.07 -2.073 -6.283 c --1.016 -9.001 3.053 -7.959 3.094 -7.948 c -4.312 -7.626 5.98 -7.185 5.993 -5.583 c -5.994 -5.628 5.994 -5.673 5.994 -5.718 c -5.994 -9.03 3.312 -11.712 0 -11.712 c --3.312 -11.712 -6.003 -9.03 -6.003 -5.718 c --6.003 -2.396 -3.312 0.285 0 0.285 c -0 0 l -f -Q -0.114 0.337 0.224 scn -q 1 0 0 1 186.627 223.2627 cm -0 0 m --0.043 -0.052 -0.154 -0.029 -0.221 -0.042 c --0.696 -0.133 -1.347 -0.689 -1.732 -1.731 c --2.576 -4.018 -2.459 -5.555 -2.314 -6.268 c --1.868 -8.458 0.839 -8.7 1.752 -8.612 c -4.209 -8.376 5.692 -8.233 5.942 -6.786 c -5.553 -9.723 3.042 -11.985 0 -11.985 c --3.312 -11.985 -6.003 -9.304 -6.003 -5.991 c --6.003 -2.67 -3.312 0.012 0 0.012 c -0 0 l -f -Q -0.11 0.325 0.216 scn -q 1 0 0 1 185.7217 223.1973 cm -0 0 m --1.735 -0.588 -1.748 -4.507 -1.748 -4.547 c --1.744 -6.481 -1.201 -7.607 0.015 -8.199 c -1.797 -9.066 6.081 -9.359 6.651 -7.642 c -5.914 -10.117 3.621 -11.92 0.905 -11.92 c --2.407 -11.92 -5.098 -9.238 -5.098 -5.926 c --5.098 -2.855 -2.799 -0.333 0.165 0.032 c -0.115 0.022 0.049 0.014 0 0 c -f -Q -0.106 0.314 0.208 scn -q 1 0 0 1 184.3926 222.7744 cm -0 0 m --1.065 -0.939 -0.813 -4.875 -0.541 -5.608 c -0.425 -8.204 2.403 -8.583 3.208 -8.626 c -4.27 -8.682 5.294 -9.071 6.373 -8.972 c -6.625 -8.948 7.249 -8.828 7.579 -8.222 c -6.588 -10.166 4.567 -11.497 2.234 -11.497 c --1.078 -11.497 -3.769 -8.815 -3.769 -5.503 c --3.769 -2.812 -2.001 -0.54 0.432 0.225 c -0.372 0.2 0.292 0.168 0.231 0.144 c -0.161 0.103 0.062 0.054 0 0 c -f -Q -0.18 0.541 0.361 scn -q 1 0 0 1 188.1982 217.4531 cm -0 0 m --0.089 0.064 -0.089 0.064 -0.518 0.595 c --0.66 0.77 -0.832 0.916 -0.969 1.096 c --1.153 1.336 -1.228 1.588 -1.225 1.6 c --1.219 1.619 -0.023 2.449 0.592 1.369 c -1.023 0.611 0.244 -0.132 0.233 -0.134 c -0.153 -0.145 0.065 -0.047 0 0 c -f -Q -0.125 0.373 0.247 scn -q 1 0 0 1 189.1953 222.666 cm -0 0 m --1.292 0.462 -2.253 -0.325 -2.568 -0.584 c --2.568 0.608 l --1.402 0.608 -0.314 0.276 0.606 -0.3 c -0.517 -0.25 0.397 -0.184 0.307 -0.133 c -0.215 -0.093 0.095 -0.034 0 0 c -f -Q -0.184 0.553 0.369 scn -q 1 0 0 1 188.2393 217.709 cm -0 0 m --0.336 0.357 l --0.471 0.528 -0.626 0.683 -0.755 0.857 c --0.971 1.148 -1.017 1.271 -1.015 1.275 c --1.01 1.29 -0.025 1.71 0.328 0.955 c -0.583 0.408 0.172 -0.12 0.166 -0.121 c -0.105 -0.132 0.047 -0.039 0 0 c -f -Q -0.122 0.361 0.239 scn -q 1 0 0 1 188.3931 222.9971 cm -0 0 m --0.649 0.121 -1.161 -0.01 -1.766 -0.45 c --1.766 0.277 l --1.038 0.277 -0.341 0.147 0.305 -0.09 c -0.221 -0.064 0.11 -0.031 0.027 -0.006 c -0.019 -0.004 0.008 -0.001 0 0 c -f -Q -0.188 0.565 0.376 scn -q 1 0 0 1 188.2437 217.9775 cm -0 0 m --0.004 0.005 -0.532 0.572 -0.709 0.863 c --0.562 0.878 -0.481 0.886 -0.263 0.812 c --0.178 0.783 -0.083 0.7 -0.026 0.632 c -0.032 0.563 0.087 0.449 0.1 0.36 c -0.13 0.142 0.09 0.006 0.071 -0.06 c -0.049 -0.041 0.02 -0.02 0 0 c -f -Q -0.118 0.349 0.231 scn -q 1 0 0 1 187.5317 223.1973 cm -0 0 m --0.313 -0.006 -0.486 -0.009 -0.905 -0.208 c --0.905 0.077 l --0.519 0.077 -0.142 0.041 0.224 -0.029 c -0.157 -0.021 0.068 -0.004 0 0 c -f -Q -0.114 0.337 0.224 scn -q 1 0 0 1 186.627 223.2627 cm -0 0 m -0 0.012 l -0.072 0.012 0.144 0.011 0.215 0.008 c -0.15 0.006 0.046 -0.045 0 0 c -f -Q - endstream endobj 1367 0 obj <> endobj 1330 0 obj <> endobj 1331 0 obj <>/XObject<>>>/Subtype/Form>>stream -q -183 308.272 m -183 324.272 l -198.462 324.272 210.999 311.735 210.999 296.273 c -210.999 280.812 198.462 268.274 183 268.274 c -167.538 268.274 155.001 280.812 155.001 296.273 c -155.001 311.735 167.538 324.272 183 324.272 c -183 308.272 l -176.393 308.283 170.99 302.881 171.001 296.273 c -170.99 289.666 176.393 284.264 183 284.274 c -189.607 284.264 195.01 289.666 194.999 296.273 c -195.01 302.881 189.607 308.283 183 308.272 c -W n -q -/GS0 gs -0 Tc 0 Tw 0 Ts 100 Tz 0 Tr /Fm0 Do -Q -Q - endstream endobj 1368 0 obj <> endobj 1369 0 obj <>/ExtGState<>>>/Subtype/Form>>stream -/CS0 cs 0.184 0.553 0.369 scn -/GS0 gs -q 1 0 0 1 183 308.2725 cm -0 0 m -0 16 l -15.462 16 27.999 3.463 27.999 -11.999 c -27.999 -27.461 15.462 -39.998 0 -39.998 c --15.462 -39.998 -27.999 -27.461 -27.999 -11.999 c --27.999 3.463 -15.462 16 0 16 c -0 0 l --6.607 0.011 -12.01 -5.392 -11.999 -11.999 c --12.01 -18.606 -6.607 -24.009 0 -23.998 c -6.607 -24.009 12.01 -18.606 11.999 -11.999 c -12.01 -5.392 6.607 0.011 0 0 c -f -Q -q 1 0 0 1 183 313.436 cm -0 0 m -0 -0.468 l -0 -5.164 l --6.607 -5.153 -12.01 -10.555 -11.999 -17.163 c --12.01 -23.77 -6.607 -29.172 0 -29.162 c -6.607 -29.172 12.01 -23.77 11.999 -17.163 c -12.01 -10.555 6.607 -5.153 0 -5.164 c -0 -0.468 l -0.316 -0.694 0.738 -0.997 1.055 -1.223 c -3.817 -3.661 7.459 -4.869 10 -7.617 c -12.018 -9.8 13.458 -12.461 14.279 -15.528 c -15.091 -18.562 16.901 -19.343 16.918 -19.345 c -18.873 -19.539 24.733 -10.483 17.857 -2.241 c -10.879 6.124 0.769 1.958 0 0 c -0 10.836 l -15.462 10.836 27.999 -1.701 27.999 -17.163 c -27.999 -32.625 15.462 -45.162 0 -45.162 c --15.462 -45.162 -27.999 -32.625 -27.999 -17.163 c --27.999 -1.701 -15.462 10.836 0 10.836 c -0 0 l -f -Q -0.18 0.541 0.361 scn -q 1 0 0 1 183 315.2832 cm -0 0 m --0.296 -0.712 -1.487 -1.168 -1.735 -1.898 c --1.987 -2.638 -2.003 -3.873 -1.53 -4.494 c --1.227 -4.893 -0.45 -4.945 0 -5.167 c -0 -7.011 l --6.607 -7 -12.01 -12.402 -11.999 -19.01 c --12.01 -25.617 -6.607 -31.02 0 -31.009 c -6.607 -31.02 12.01 -25.617 11.999 -19.01 c -12.01 -12.402 6.607 -7 0 -7.011 c -0 -5.167 l -0.338 -5.201 0.788 -5.245 1.126 -5.278 c -2.249 -5.476 12.142 -7.556 13.761 -19.537 c -14.172 -22.51 l -14.637 -23.085 15.725 -23.501 16.46 -23.424 c -20.584 -22.987 26.414 -9.567 15.896 -1.312 c -7.943 4.929 0.035 0.084 0 0 c -0 8.989 l -15.462 8.989 27.999 -3.548 27.999 -19.01 c -27.999 -34.472 15.462 -47.009 0 -47.009 c --15.462 -47.009 -27.999 -34.472 -27.999 -19.01 c --27.999 -3.548 -15.462 8.989 0 8.989 c -0 0 l -f -Q -0.176 0.529 0.353 scn -q 1 0 0 1 183 316.4023 cm -0 0 m --0.627 -1.109 -1.866 -1.525 -2.708 -2.391 c --4.764 -4.503 -4.447 -6.209 -4.44 -6.223 c --4.355 -6.386 -4.355 -6.386 0 -7.408 c -0 -8.13 l --6.607 -8.119 -12.01 -13.521 -11.999 -20.129 c --12.01 -26.736 -6.607 -32.139 0 -32.128 c -6.607 -32.139 12.01 -26.736 11.999 -20.129 c -12.01 -13.521 6.607 -8.119 0 -8.13 c -0 -7.408 l -0.312 -7.428 0.727 -7.455 1.039 -7.475 c -5.586 -8.118 13.155 -12.017 12.674 -22.548 c -12.56 -25.061 12.663 -26.477 12.982 -26.758 c -14.311 -27.929 23.356 -23.684 22.629 -14.042 c -21.269 4.004 1.142 2.019 0 0 c -0 7.87 l -15.462 7.87 27.999 -4.667 27.999 -20.129 c -27.999 -35.591 15.462 -48.128 0 -48.128 c --15.462 -48.128 -27.999 -35.591 -27.999 -20.129 c --27.999 -4.667 -15.462 7.87 0 7.87 c -0 0 l -f -Q -0.173 0.518 0.345 scn -q 1 0 0 1 183 317.3276 cm -0 0 m --0.223 -0.377 -0.896 -0.494 -1.279 -0.706 c --3.983 -2.198 -4.352 -2.882 -7.218 -8.204 c --10.977 -15.407 l --12.034 -17.649 -12.409 -19.973 -12.123 -22.51 c --11.368 -29.204 -4.441 -35.04 3.701 -32.832 c -16.504 -28.451 l -19.64 -26.383 21.524 -23.889 22.614 -20.364 c -24.61 -13.908 21.812 -4.74 13.674 -0.575 c -6.26 3.219 0.029 0.049 0 0 c -0 6.945 l -15.462 6.945 27.999 -5.592 27.999 -21.054 c -27.999 -36.516 15.462 -49.053 0 -49.053 c --15.462 -49.053 -27.999 -36.516 -27.999 -21.054 c --27.999 -5.592 -15.462 6.945 0 6.945 c -0 0 l -f -Q -0.169 0.506 0.337 scn -q 1 0 0 1 183 318.1274 cm -0 0 m --0.174 -0.267 -0.682 -0.3 -0.974 -0.428 c --3.27 -1.438 -6.363 -4.313 -7.593 -6.58 c --13.39 -17.262 -13 -20.653 -12.686 -23.377 c --12.045 -28.943 -6.307 -36.332 3.975 -34.516 c -34.372 -29.149 23.201 -7.033 15.417 -1.844 c -7.621 3.352 0.038 0.059 0 0 c -0 6.145 l -15.462 6.145 27.999 -6.392 27.999 -21.854 c -27.999 -37.316 15.462 -49.853 0 -49.853 c --15.462 -49.853 -27.999 -37.316 -27.999 -21.854 c --27.999 -6.392 -15.462 6.145 0 6.145 c -0 0 l -f -Q -0.165 0.49 0.329 scn -q 1 0 0 1 183 318.8281 cm -0 0 m --0.26 -0.393 -1.01 -0.429 -1.443 -0.612 c --4.281 -1.816 -7.531 -4.969 -9.346 -8.278 c --13.498 -15.848 -13.757 -21.085 -13.244 -24.146 c --12.335 -29.558 -7.256 -38.113 6.018 -35.853 c -29.65 -31.827 27.567 -10.229 15.691 -2.188 c -7.725 3.206 0.039 0.058 0 0 c -0 5.444 l -15.462 5.444 27.999 -7.093 27.999 -22.555 c -27.999 -38.017 15.462 -50.554 0 -50.554 c --15.462 -50.554 -27.999 -38.017 -27.999 -22.555 c --27.999 -7.093 -15.462 5.444 0 5.444 c -0 0 l -f -Q -0.161 0.478 0.322 scn -q 1 0 0 1 183 319.4941 cm -0 0 m --0.27 -0.397 -1.042 -0.411 -1.488 -0.586 c --3.111 -1.225 -7.249 -3.37 -10.633 -9.471 c --11.685 -11.368 -15.021 -18.084 -13.796 -24.877 c --12.453 -32.323 -5.461 -39.362 6.714 -37.218 c -28.943 -33.304 28.97 -11.255 15.609 -2.301 c -7.856 2.895 0.038 0.056 0 0 c -0 4.778 l -15.462 4.778 27.999 -7.759 27.999 -23.221 c -27.999 -38.683 15.462 -51.22 0 -51.22 c --15.462 -51.22 -27.999 -38.683 -27.999 -23.221 c --27.999 -7.759 -15.462 4.778 0 4.778 c -0 0 l -f -Q -0.157 0.467 0.314 scn -q 1 0 0 1 183 320.105 cm -0 0 m --0.285 -0.403 -1.085 -0.384 -1.55 -0.549 c --2.14 -0.758 -7.426 -2.783 -11.14 -9.4 c --12.536 -11.888 -15.643 -18.441 -14.343 -25.552 c --13.349 -30.994 -7.597 -40.716 7.05 -38.567 c -28.064 -35.482 30.902 -13.127 16.17 -2.838 c -7.979 2.883 0.04 0.057 0 0 c -0 4.167 l -15.462 4.167 27.999 -8.37 27.999 -23.832 c -27.999 -39.293 15.462 -51.831 0 -51.831 c --15.462 -51.831 -27.999 -39.293 -27.999 -23.832 c --27.999 -8.37 -15.462 4.167 0 4.167 c -0 0 l -f -Q -0.153 0.455 0.306 scn -q 1 0 0 1 183 320.6777 cm -0 0 m --0.294 -0.407 -1.113 -0.365 -1.59 -0.521 c --3.037 -0.996 -8.057 -3.068 -11.887 -9.807 c --12.95 -11.676 -16.306 -18.381 -14.886 -26.189 c --13.692 -32.763 -6.813 -41.824 7.243 -39.849 c -28.687 -36.835 31.471 -13.847 16.374 -3.144 c -8.08 2.736 0.041 0.056 0 0 c -0 3.595 l -15.462 3.595 27.999 -8.942 27.999 -24.404 c -27.999 -39.866 15.462 -52.403 0 -52.403 c --15.462 -52.403 -27.999 -39.866 -27.999 -24.404 c --27.999 -8.942 -15.462 3.595 0 3.595 c -0 0 l -f -Q -0.149 0.443 0.294 scn -q 1 0 0 1 183 321.2148 cm -0 0 m --0.327 -0.44 -1.224 -0.37 -1.749 -0.528 c --5.52 -1.667 -9.765 -5.26 -12.073 -9.267 c --15.394 -15.036 -16.522 -20.932 -15.426 -26.791 c --13.856 -35.176 -5.227 -43.01 7.675 -41.012 c -29.382 -37.65 31.673 -13.956 16.092 -3.122 c -8.188 2.374 0.041 0.052 0 0 c -0 3.058 l -15.462 3.058 27.999 -9.479 27.999 -24.941 c -27.999 -40.403 15.462 -52.94 0 -52.94 c --15.462 -52.94 -27.999 -40.403 -27.999 -24.941 c --27.999 -9.479 -15.462 3.058 0 3.058 c -0 0 l -f -Q -0.145 0.431 0.286 scn -q 1 0 0 1 183 321.7295 cm -0 0 m --0.315 -0.413 -1.169 -0.321 -1.671 -0.458 c --5.628 -1.543 -10.186 -5.222 -12.509 -9.206 c --13.794 -11.411 -17.706 -18.119 -15.958 -27.368 c --14.312 -36.085 -5.369 -44.227 7.962 -42.147 c -29.823 -38.738 32.256 -15.066 16.713 -3.752 c -8.241 2.415 0.041 0.054 0 0 c -0 2.543 l -15.462 2.543 27.999 -9.994 27.999 -25.456 c -27.999 -40.918 15.462 -53.455 0 -53.455 c --15.462 -53.455 -27.999 -40.918 -27.999 -25.456 c --27.999 -9.994 -15.462 2.543 0 2.543 c -0 0 l -f -Q -0.141 0.42 0.278 scn -q 1 0 0 1 183 322.2021 cm -0 0 m --0.326 -0.417 -1.197 -0.297 -1.71 -0.424 c --5.005 -1.241 -10.021 -4.174 -13.317 -9.752 c --16.642 -15.38 -17.708 -21.487 -16.484 -27.902 c --14.771 -36.889 -5.522 -45.311 8.242 -43.22 c -29.813 -39.944 32.242 -15.421 16.845 -4.05 c -8.507 2.107 0.042 0.053 0 0 c -0 2.07 l -15.462 2.07 27.999 -10.467 27.999 -25.929 c -27.999 -41.391 15.462 -53.928 0 -53.928 c --15.462 -53.928 -27.999 -41.391 -27.999 -25.929 c --27.999 -10.467 -15.462 2.07 0 2.07 c -0 0 l -f -Q -0.137 0.408 0.271 scn -q 1 0 0 1 183 322.6421 cm -0 0 m --0.165 -0.201 -0.596 -0.119 -0.851 -0.169 c --6.63 -1.321 -11.086 -5.48 -13.33 -8.99 c --17.823 -16.018 -17.96 -22.68 -17.283 -27.031 c --15.529 -38.308 -5.353 -45.633 6.914 -44.447 c -29.053 -42.307 33.213 -18.564 18.588 -5.674 c -9.722 2.142 0.051 0.062 0 0 c -0 1.63 l -15.462 1.63 27.999 -10.907 27.999 -26.369 c -27.999 -41.831 15.462 -54.368 0 -54.368 c --15.462 -54.368 -27.999 -41.831 -27.999 -26.369 c --27.999 -10.907 -15.462 1.63 0 1.63 c -0 0 l -f -Q -0.133 0.396 0.263 scn -q 1 0 0 1 183 323.0532 cm -0 0 m --0.345 -0.419 -1.243 -0.245 -1.775 -0.35 c --5.333 -1.052 -10.598 -4.013 -13.752 -8.857 c --18.474 -16.108 -18.606 -22.979 -17.885 -27.465 c --16.272 -37.503 -7.101 -46.92 7.31 -45.499 c -29.575 -43.3 33.52 -19.116 18.666 -5.999 c -9.679 1.938 0.05 0.061 0 0 c -0 1.219 l -15.462 1.219 27.999 -11.318 27.999 -26.78 c -27.999 -42.242 15.462 -54.779 0 -54.779 c --15.462 -54.779 -27.999 -42.242 -27.999 -26.78 c --27.999 -11.318 -15.462 1.219 0 1.219 c -0 0 l -f -Q -0.129 0.384 0.255 scn -q 1 0 0 1 183 323.4082 cm -0 0 m --0.359 -0.424 -1.279 -0.213 -1.827 -0.305 c --2.571 -0.429 -9.239 -1.713 -14.035 -8.521 c --19.337 -16.049 -19.04 -23.602 -18.666 -26.5 c --16.791 -41.035 -4.557 -47.119 6.015 -46.62 c -29.237 -45.525 34.039 -19.966 18.705 -6.311 c -9.693 1.714 0.05 0.059 0 0 c -0 0.864 l -15.462 0.864 27.999 -11.673 27.999 -27.135 c -27.999 -42.597 15.462 -55.134 0 -55.134 c --15.462 -55.134 -27.999 -42.597 -27.999 -27.135 c --27.999 -11.673 -15.462 0.864 0 0.864 c -0 0 l -f -Q -0.125 0.373 0.247 scn -q 1 0 0 1 183 323.7339 cm -0 0 m --0.366 -0.422 -1.29 -0.183 -1.842 -0.262 c --5.616 -0.798 -11.203 -3.577 -14.553 -8.414 c --20.526 -17.037 -19.484 -25.015 -19.142 -27.636 c --17.325 -41.545 -4.721 -48.296 6.215 -47.587 c -22.825 -46.511 31.838 -32.41 25.896 -16.796 c -27.251 -20.083 27.999 -23.685 27.999 -27.46 c -27.999 -42.922 15.462 -55.459 0 -55.459 c --15.462 -55.459 -27.999 -42.922 -27.999 -27.46 c --27.999 -11.999 -15.462 0.539 0 0.539 c -0 0 l -f -Q -0.122 0.361 0.239 scn -q 1 0 0 1 183 323.9893 cm -0 0 m --0.38 -0.425 -1.322 -0.147 -1.889 -0.211 c --3.74 -0.417 -10.183 -1.633 -15.334 -8.604 c --20.12 -15.081 -20.496 -23.225 -19.964 -27.016 c --18.071 -40.5 -7.311 -49.139 6.811 -48.512 c -13.567 -48.212 30.458 -42.954 27.513 -22.495 c -27.832 -24.187 27.999 -25.932 27.999 -27.716 c -27.999 -43.178 15.462 -55.715 0 -55.715 c --15.462 -55.715 -27.999 -43.178 -27.999 -27.716 c --27.999 -12.254 -15.462 0.283 0 0.283 c -0 0 l -f -Q -0.118 0.349 0.231 scn -q 1 0 0 1 183 324.1802 cm -0 0 m --0.389 -0.421 -1.333 -0.109 -1.905 -0.156 c --5.862 -0.48 -11.762 -2.986 -15.367 -7.721 c --21.456 -15.72 -21.121 -23.999 -20.694 -27.186 c --18.877 -40.767 -7.134 -50.353 6.621 -49.484 c -16.365 -48.869 27.809 -42.685 27.992 -27.284 c -27.997 -27.491 27.999 -27.699 27.999 -27.907 c -27.999 -43.369 15.462 -55.906 0 -55.906 c --15.462 -55.906 -27.999 -43.369 -27.999 -27.907 c --27.999 -12.445 -15.462 0.092 0 0.092 c -0 0 l -f -Q -0.114 0.337 0.224 scn -q 1 0 0 1 183 324.269 cm -0 0 m --0.403 -0.423 -1.362 -0.067 -1.945 -0.096 c --5.653 -0.278 -11.171 -1.795 -16.407 -7.987 c --19.42 -11.549 -22.258 -18.906 -21.583 -25.522 c --19.025 -50.59 4.157 -50.418 5.143 -50.399 c -17.394 -50.156 25.847 -43.167 27.756 -31.704 c -25.941 -45.414 14.205 -55.995 0 -55.995 c --15.462 -55.995 -27.999 -43.458 -27.999 -27.996 c --27.999 -12.534 -15.462 0.003 0 0.003 c -0 0 l -f -Q -0.11 0.325 0.216 scn -q 1 0 0 1 178.769 323.9521 cm -0 0 m --22.529 -4.551 -23.528 -35.026 -6.329 -46.258 c -6.848 -54.862 25.641 -52.169 31.069 -35.683 c -27.625 -47.245 16.912 -55.678 4.231 -55.678 c --11.231 -55.678 -23.768 -43.141 -23.768 -27.679 c --23.768 -13.386 -13.055 -1.592 0.778 0.109 c -0.544 0.077 0.232 0.04 0 0 c -f -Q -0.106 0.314 0.208 scn -q 1 0 0 1 170.9761 321.4922 cm -0 0 m --16.563 -9.063 -17.344 -40.194 9.316 -48.713 c -16.64 -51.054 30.629 -50.189 36.987 -37.91 c -32.359 -46.995 22.917 -53.218 12.024 -53.218 c --3.438 -53.218 -15.975 -40.681 -15.975 -25.219 c --15.975 -12.683 -7.734 -2.069 3.625 1.499 c -3.1 1.309 2.399 1.057 1.873 0.867 c -1.31 0.61 0.543 0.297 0 0 c -f -Q -0.188 0.565 0.376 scn -q 1 0 0 1 198.9263 298.0972 cm -0 0 m --1.706 2.422 -2.871 5.192 -4.806 7.466 c --5.58 8.375 -6.333 9.14 -7.046 9.74 c --7.103 9.788 -12.7 14.579 -12.706 14.929 c --12.708 15.035 -10.925 16.753 -10.74 16.825 c --10.058 17.086 -7.544 17.231 -6.875 17.166 c --5.111 16.992 -2.438 16.241 0.275 13.649 c -3.79 10.293 4.269 6.382 4.332 5.263 c -4.608 0.362 1.816 -1.553 1.125 -1.426 c -0.589 -1.328 0.314 -0.445 0 0 c -f -Q -0.192 0.576 0.384 scn -q 1 0 0 1 199.0605 300.5908 cm -0 0 m --1.97 2.883 -3.055 4.471 -4.87 6.595 c --5.072 6.832 -5.375 7.116 -5.591 7.34 c --5.844 7.601 -6.16 7.969 -6.419 8.224 c --6.913 8.711 -7.551 9.382 -8.074 9.839 c --9.724 11.281 -9.908 11.547 -9.911 11.595 c --9.914 11.655 -8.389 13.369 -8.295 13.411 c --7.711 13.674 -6.801 13.346 -6.164 13.276 c --2.962 12.927 -1.156 11.212 -0.476 10.566 c -2.531 7.709 2.783 5.143 2.904 3.909 c -2.938 3.565 2.929 0.875 2.709 0.41 c -2.675 0.337 0.707 -0.875 0.645 -0.861 c -0.33 -0.793 0.182 -0.267 0 0 c -f -Q -0.196 0.588 0.392 scn -q 1 0 0 1 198.1455 304.1201 cm -0 0 m --0.737 0.235 -1.076 1.45 -1.576 2.04 c --3.148 3.894 -3.148 3.894 -3.897 4.678 c --4.212 5.008 -4.84 5.354 -4.922 5.803 c --4.014 7.981 l --3.953 8.007 -1.427 7.15 0.33 5.083 c -1.631 3.552 2.397 0.755 2.281 0.574 c -1.906 -0.01 0.699 -0.197 0.037 0.011 c -0.026 0.014 0.011 -0.003 0 0 c -f -Q -0.125 0.373 0.247 scn -q 1 0 0 1 195.0493 321.5449 cm -0 0 m --5.275 2.417 -9.403 2.407 -12.049 2.189 c --12.049 2.728 l --6.604 2.728 -1.522 1.173 2.777 -1.517 c -2.232 -1.205 1.506 -0.789 0.961 -0.477 c -0.673 -0.334 0.292 -0.134 0 0 c -f -Q -0.122 0.361 0.239 scn -q 1 0 0 1 191.2632 323.0293 cm -0 0 m --3.078 0.794 -4.478 1.111 -8.263 0.96 c --8.263 1.243 l --4.866 1.243 -1.61 0.638 1.402 -0.47 c -0.981 -0.329 0.425 -0.126 0 0 c -f -Q -0.118 0.349 0.231 scn -q 1 0 0 1 187.231 323.9521 cm -0 0 m --2.557 0.263 -2.657 0.273 -4.231 0.228 c --4.231 0.32 l --2.431 0.32 -0.671 0.15 1.035 -0.174 c -0.724 -0.122 0.312 -0.042 0 0 c -f -Q -0.114 0.337 0.224 scn -q 1 0 0 1 183 324.269 cm -0 0 m -0.335 0.003 0.669 -0.002 1.001 -0.014 c -0.701 -0.01 0.211 -0.214 0 0 c -f -Q - endstream endobj 1370 0 obj <> endobj 1308 0 obj <> endobj 1309 0 obj <> endobj 1310 0 obj <> endobj 1311 0 obj <> endobj 1312 0 obj <> endobj 1313 0 obj <> endobj 1381 0 obj [/View/Design] endobj 1382 0 obj <>>> endobj 1379 0 obj [/View/Design] endobj 1380 0 obj <>>> endobj 1377 0 obj [/View/Design] endobj 1378 0 obj <>>> endobj 1375 0 obj [/View/Design] endobj 1376 0 obj <>>> endobj 1373 0 obj [/View/Design] endobj 1374 0 obj <>>> endobj 1371 0 obj [/View/Design] endobj 1372 0 obj <>>> endobj 1307 0 obj <> endobj 1383 0 obj <> endobj 1384 0 obj <>stream -H|SmPW%F*Me4#"b(8 -6c3~u &jђv !:T,~h2U+hԢ2ѱTn|i7Oϙ{{}oñ YiyKs9Ne4,ۤU^5DW`>L̨u ( 1F,@ܞx{ AP,-F+Mp 6V +a߱G[] qK,#9~i>BH- 8\ -=Uxs2<B $"H!dj>m5+ -T)U'7gd ӄ9aH>e ԋ;>9wh+SsB_oQ0v[i endstream endobj 1316 0 obj <> endobj 1385 0 obj <> endobj 1386 0 obj <>stream -%!PS-Adobe-3.0 %%Creator: Adobe Illustrator(R) 13.0 %%AI8_CreatorVersion: 15.0.0 %%For: (donna) () %%Title: (type_tags.ai) %%CreationDate: 4/4/11 7:44 PM %%Canvassize: 16383 %%BoundingBox: -227 -63 143 234 %%HiResBoundingBox: -226.5 -62.001 142.5898 233.748 %%DocumentProcessColors: Cyan Magenta Yellow Black %AI5_FileFormat 9.0 %AI12_BuildNumber: 399 %AI3_ColorUsage: Color %AI7_ImageSettings: 0 %%RGBProcessColor: 0 0 0 ([Registration]) %AI3_TemplateBox: 40.5 29.5 40.5 29.5 %AI3_TileBox: -239.5552 -349.6377 319.4453 433.3623 %AI3_DocumentPreview: None %AI5_ArtSize: 14400 14400 %AI5_RulerUnits: 6 %AI9_ColorModel: 1 %AI5_ArtFlags: 0 0 0 1 0 0 1 0 0 %AI5_TargetResolution: 800 %AI5_NumLayers: 6 %AI9_OpenToView: -239.25 83.5 4 1355 732 18 0 0 43 154 0 0 1 1 1 0 1 %AI5_OpenViewLayers: 777777 %%PageOrigin:-399 227 %AI7_GridSettings: 72 8 72 8 1 0 0.8 0.8 0.8 0.9 0.9 0.9 %AI9_Flatten: 1 %AI12_CMSettings: 00.MS %%EndComments endstream endobj 1387 0 obj <>stream -%%BoundingBox: -227 -63 143 234 %%HiResBoundingBox: -226.5 -62.001 142.5898 233.748 %AI7_Thumbnail: 128 104 8 %%BeginData: 7336 Hex Bytes %0000330000660000990000CC0033000033330033660033990033CC0033FF %0066000066330066660066990066CC0066FF009900009933009966009999 %0099CC0099FF00CC0000CC3300CC6600CC9900CCCC00CCFF00FF3300FF66 %00FF9900FFCC3300003300333300663300993300CC3300FF333300333333 %3333663333993333CC3333FF3366003366333366663366993366CC3366FF %3399003399333399663399993399CC3399FF33CC0033CC3333CC6633CC99 %33CCCC33CCFF33FF0033FF3333FF6633FF9933FFCC33FFFF660000660033 %6600666600996600CC6600FF6633006633336633666633996633CC6633FF %6666006666336666666666996666CC6666FF669900669933669966669999 %6699CC6699FF66CC0066CC3366CC6666CC9966CCCC66CCFF66FF0066FF33 %66FF6666FF9966FFCC66FFFF9900009900339900669900999900CC9900FF %9933009933339933669933999933CC9933FF996600996633996666996699 %9966CC9966FF9999009999339999669999999999CC9999FF99CC0099CC33 %99CC6699CC9999CCCC99CCFF99FF0099FF3399FF6699FF9999FFCC99FFFF %CC0000CC0033CC0066CC0099CC00CCCC00FFCC3300CC3333CC3366CC3399 %CC33CCCC33FFCC6600CC6633CC6666CC6699CC66CCCC66FFCC9900CC9933 %CC9966CC9999CC99CCCC99FFCCCC00CCCC33CCCC66CCCC99CCCCCCCCCCFF %CCFF00CCFF33CCFF66CCFF99CCFFCCCCFFFFFF0033FF0066FF0099FF00CC %FF3300FF3333FF3366FF3399FF33CCFF33FFFF6600FF6633FF6666FF6699 %FF66CCFF66FFFF9900FF9933FF9966FF9999FF99CCFF99FFFFCC00FFCC33 %FFCC66FFCC99FFCCCCFFCCFFFFFF33FFFF66FFFF99FFFFCC110000001100 %000011111111220000002200000022222222440000004400000044444444 %550000005500000055555555770000007700000077777777880000008800 %000088888888AA000000AA000000AAAAAAAABB000000BB000000BBBBBBBB %DD000000DD000000DDDDDDDDEE000000EE000000EEEEEEEE0000000000FF %00FF0000FFFFFF0000FF00FFFFFF00FFFFFF %524C45FD0AFFA8282F53FD7CFF06A8FF5359FD7AFFA92EA8FF537EFD7BFF %53282E28A8FD27FF2E2828A8FD51FF847EAFFD27FF5953AFA928FD7BFF59 %2EFFA82FA8FD7AFF847E7E2E59FD7CFFA87D7DFDFCFFFDFCFFFD31FFA9FF %FFFFA9FFFFFFA9FD76FFA9FFA9AFA9FFA9AFA9FFA9AFA9FD1AFFA87E537E %A8FD2BFFA8A9A8AFA8FD23FFA9A9A8A9A8A9A8A9A8A9A8A9A8AFFD17FFA9 %7E282F282F282F2F7EA8FD26FFA8532F282F282F53A8FD22FFA9AFA9AFA9 %AFA9AFA9AFA9AFA9FD16FF7E2E0128062F292F072F062F7EFD23FFA85906 %06062F282F0629062F84FD1FFFA8A9A8A9A8A9A8A9A8A9A8A984FD16FF84 %28062F282F292F29302F30292FA8FD21FFA8530629282F2F2F29302F2F29 %2F84FD1FFFA9AFA9FFA9AFA9FFA9AFA9AFA9FD09FFA8A87DFF7DFFA8FD04 %FFA8280529282907532F53292F292F072FA8FD1FFFA92E00280629062F29 %2F292F292F072FA8FD1DFFA8A9A8A9A8A9A8A9A8A9A8A9A8FD04FFA87DFF %7DFFA852275227277D27A8FFFFFF530628062F5AA9FD04FF7E302F300753 %FD1FFF7E05282FA984A82F7EA8A9845A2F302953FD1EFFA8AFA9AFA9AFA9 %AFA9AFA9A9A9FFFFFF52FF5227527D7D52527D527DF8A8FFFFA800280628 %7DFD07FF7E2F292F06A8FD1DFFA905280653FFFFFFA9FD05FF7E292F0684 %FD1CFFA8A9A8A9A8A9A8A9A8A9A8A9A8AFFFFFA8527D7D27A8FD05527DFF %7DA8FFFF5328282959FD04FFA9FD04FF7E302F2F59FD1DFF7D28282853FD %0AFF5A292F53FD1DFFA9FFA9AFA9FFA9AFA9FFA9AFA9FD09FFA8FFA8FD06 %FFA82E002806A9FFFFFF7E067EFD04FF292F292FA8FD1CFF5300280653FD %04FF595AA9FFFFFF7E2F292FA8FD1BFFA9A9A8A9A8A9A8A9A8A9A8A9A8AF %FD13FF2828062FFD04FF2F2F29FD04FF542F2F28FD1DFF2E28062953FFFF %FFA82F067EFFFFFFA92F2F28FD1DFFA9AFA9AFA9AFA9AFA9AFA9AFA9FD12 %FFA828052828FFFFFFA82F072FA8FFFFFF2F2F2829A8FD1BFF8428052806 %59FFFFFF8406292FFFFFFF842F2929A8FD1BFFA8A9A8A9A8A9A8A9A8A9A8 %A984FD14FF2828062FFD04FF2F292FFD04FF54292F28FD1DFF5328062953 %FFFFFFA82F075AFFFFFFAF292F28FD1DFFA9FFA9FFA9FFA9FFA9FFA9FD15 %FF53002806A8FFFFFFA82984FFFFFFA8062F0653A8FD1CFF7D00280659FD %04FF532FA8FFFFFF7E29062FA8FD1BFFFD04532E5353532E535353287EFD %13FF7D2828282FFD09FF532F28297DFD1DFF7E28062853FD0AFF53062953 %FD1CFF5300280006002800060028000053FD13FFA92828050653FD07FF53 %29282828FD1FFF28280053FD09FF7E062806A8FD1CFF2E05002800060028 %000600280053FD14FFA8052806282E7E84AFA87E292F282F06A8FD1FFFA8 %062853FFFFFFA8A9FFFFA87E282F067DFD1DFF5300280628052806280528 %060653FD15FF7D0028052806280628062806280059FD21FF7D0053FFFFFF %7E062F28280628012EA8FD1DFF2E05002800060028000600280053FD04FF %A8FFFFFFA8FFFFA8A8FFA8FD07FF7E06280528282806282828057EFD23FF %5952FFFFFF7E28062828280053A8FD1EFF53002805280628052806280506 %53FFFFFF7D52FF7D52A852A852A8527DFF7D7DFD04FFA9A8282800060006 %002828A8FD24FFA8A8A9FFFF8400060006287DA8FD1FFF28060006002800 %06002800060059FFFFFFA852A8277DFF7D527DA8527D7D27A8FD07FFA87E %597D537EA8FD2BFFA8282E7D7DFD22FF5300280528062805280628050653 %FFFFFFA87DA852527D52FF527DA852527D27FD3AFFA8FD25FF2E06000600 %280006002800060059FD04FF7DFFFFA8527D7D7DA87D7DFF7DA8FD60FF53 %00280628052806280528060653FD72FF2E05002800060028000600280053 %FD72FF5300280628052806280528060653FD72FF28000006000500060005 %00060053FD72FF7E2E532E5352532E5352532E537DFDFCFFFDFCFFFDFCFF %FDFCFFFDFCFFFDFCFFFDFCFFFDFCFFFDFCFFFDFCFFFD9AFFCACAA8CACACA %A8CACACAA8CAA8FD74FFFD0DCAFD18FFA8A859595883A8FD53FFCACAA8CA %CACAA8CACACAA8CACACAFD17FF7D2E0B2E0B2E0B3434A8FD52FFFD0DCAFD %15FFA82D0B052E0B340B340B340B59A8FD29FFA8845959535959A8FD1EFF %A8CAA8CAA8CAA8CAA8CAA8CAA1FD05FFA8FFA8FD0DFFAE2D2D2D342E3434 %34123434340B59FD28FF7D2F0C2F2F352F352F5AA8FD1DFFCACACACBCACA %CACBFD05CAFFFFFFFD047DFD0DFF2D0B0B2E0B0C0BFD07340B59FD25FFA9 %2E2E0C2F0C352F352F352F357DFD1BFFA8CACACAA8CACACAA8CACACAA8FD %04FFFD04527D7D7D275252A85252A8FFFF590B0B2E0B345FAFFD04FFFD04 %340BA8FD24FF2E2E2E35358484FF5A36355A35357EFD1BFFFD0DCAFFFFFF %52A8522752FF525227A8A82752FFFFA82D052D0B2E84FD06FF340B34120C %2EFD23FF522E2E2F0CA9FFFFFF5A2F3635362F3584FD19FFCACAA8CAA8CA %A8CAA8CAA8CAA8CAFFFFFFA8A8FFA8A87DFF7DA852A87D7DFFFF84052E0B %2E83FD05FFAFAFFD04340C34AEFD21FF842E2E2F2F5AA9FFFFFF5A5A355A %365A2F5AFD1AFFCACBCACACACBCACACACBCACACAFD12FF5805052D2DFD04 %FF830B342E340C3434340BA8FD21FF59052E2E35A8FD06FF5A2F5A35352E %A8FD18FFCACAA8CACACAA8CACACAA8CACACAFD12FF7D052D0B59FFFFFFAF %0C340BFD073459FD21FF2E2E2E3535FD07FF5A35355A353559FD19FFFD0D %CAFD12FF5205050B34FFFFFF84340B340B340B340B340B59FD20FFA82E06 %2E0C3584FD04FFAF845A2F352F360D59FD18FFCAFFCACACAFFCACACAFFCA %CACAFD13FF7D052D0B59FD04FF34340BFD05342E2E83FD20FFA9282E2E35 %2F35A8FFFFFF5935355A355A353559FD18FFA176A176A176A176A176A176 %9AA1FD12FF7D2D050B0BFD05FF595F59340C340B2E0BAFFD20FF842E062E %2E352FA9FFFFFF5A2F352F352F352E59FD18FF4BFD042044202020442020 %2076FD13FF2D2D0B0B59FD07FF2E342E340B59FD22FF062E2E2F2F36A8FF %FFFF59352F5A35362F2F59FD18FF76204B444B204B444B204B444476FD13 %FF7D042D050B59FD06FF340B2E0B0B7DFD21FFA82E052E0C2F2EAFFFFFFF %5A0D352F352F2F0684FD18FF4B44204B2044204B2044204B2076FD13FFAF %58052D052D2E8484A884832E342D0B58FD23FF7DFD042E35A8FFFFFFAFAF %35352F352E59AFFD18FF76204B444B444B444B444B444476FD07FFA8FD0C %FF842E042D052D050B050B052D050B2EFD24FFAF282E062E0684FD05FF59 %0C352E2E59FD19FF4B44204B2044204B2044204B2076FFFFFF7D7DFF52A8 %7D52FF7D52A852FF527DFFFFFFA858050B052D0B2D052E050B52FD26FFA8 %05FD042EAFFD04FF592F2E2E53FD1AFF76204B204B444B204B444B204476 %FFFFFF52A87D52A8FF52A8277DA87DA85252FD04FFA87D2D0B0405040505 %2E7DFD28FF7D052E062E06597D84592F060C2EA9FD1AFF4B442044204B20 %44204B20442076FFFFFFA8275252527D52A85227FF52A85227FD07FF847D %597D59AFFD2BFFA82E2E062E062E062E062E59FD1CFF76204B444B444B44 %4B444B444476FFFFFF7DA8FF7DA8FF7DFFFFA8FFA8FFFFA8FD3AFF592E05 %2E062E065984FD1DFF4B442044204B2044204B20442076FD4FFFA8A8A8FD %20FF76204B444B204B444B204B444476FD72FF4B44204B2044204B204420 %4B2076FD72FF76204B444B204B444B204B444476FD72FF5244204B444B20 %4B444B204B2076FDFCFFFDFCFFFDFCFFFDFCFFFDFCFFFD2FFFA82E0BA8FD %7BFFA82D58830BA8FD2EFFAFFD4CFF2DFF830B59FD2CFFAF353584FD4BFF %2E2E5905A8FD2CFF067E840D84FD4AFFA87D59A8FD2DFF2FA8AF0C84FD7B %FF0C59840C84FD7BFFAF3559A8FD7EFFAFFD42FFFF %%EndData endstream endobj 1388 0 obj <>stream -%AI12_CompressedDataxu?dF-QqژUYQڊ@u#…;~< lJ#H6̈ e/~r鷏?^ B^͛O>E?Տ/J}U}/O~zQ>y/kӻw?я_.o{Օo>֧|˟~Q,Ouӧw^zq.~Ne.i׿zpR_6R_:Xc^l^׹}z㻏|Ç7O?.~u}>yOo^CZҟWuOoާW«-_޿~XwۇëO߼OoXtxR?o>ԏU?[o_m]Ǐk~__oTG_iǶ_?MePwx:Յ2^dmS9^2]uN8:m>?o=׾z }./X/cy1nlӛyZ*?z(~x:.|cu{w~I[>ɮY.i(Ete݊$وlmo/k/_Oj^l_jݷ~{?F67?O`x/޽{uVzCաk{.?}st\㧧<^~xQWӗ7/_~姏_ob{Ƀnu|/klK6k.mǴc6^:untd}>>k|~~j/=]~׾z|||Y减ȷRo?ye]ûW}S{=4^]|}Ƿ>C݋wO_=~uyuwo>~ݧ"˷޿nGۇ/?/x?|VYGo -W8~Rۧzckٞ~|<|θ]>y~Ƿ事u4ljm׿-$vZݧOIHϭ- |M}W2@qۇQ_wO>!rJC.F]|;5??gً~a/E+~7{T=I_#>|/r__R;>]ӻzzS?\|~},]ӗPO 1}{IN?Y3ٿ|+U66F0/kcleWMm}UHG}Ah xΗƩE -޼zW7^}zY1nn~?.뇋S9J9.ʱ]>|SWg~y-c}$#| S>Ưb?-/_?=O}.y/uwxg;; ߩvR_PsC=kD3;sR𶍧䗂oC~doT޿Q>o$ w<6?^_ElpLfO~].ߥZ_O?ݣɖ\>}~U#4؟24R pp= }J)c\zk/G*>n8ek726U}\x? SMyZ" jnnyf:D|vrZeZb׼zYnuX::뺭zWzޮwz [mm֚u۱]mקf~=ƽ~}ޗ}k~W~wq/{(xL\Q؎ҚWq}m}qUwUEGW\ҕu}'Ս?nq{y\(;Z_ZoJJڣUp={{[skޕݵw]rtr9E7;a)7gUw]ou x;3^{{ɻ޻=u?{St}=.Pz5Ӿ/Sw񾔞}iQ9דvs yr(ӫ֯g=}[ۓvtˁm{1L?A.}}Jݺ[;ܻx;?q%=xLi<ڈ1sbX~aƢD|a#OutIVNuzfKFWcj騃Aաr21j^w[Vڹ{=o+et\oӯo/oo+:|J~r?]WVumu /w*)p{{w{{{s{]~Hnvn -^3L7c=w׷73]OϷO9:^POw Lrԯ/ԯ6/X:`_~ C=."+S{=ֺz\vWz]cd纳ǺˇJ㻺oj'\ծkyN/޺}v]{^\j_NG2uV\׾>joߗʜ e -SeU%{+k3s9c0ױt[Gu=:J_חpQ{uڇta=q(yoQ{ћ?刴ެ}9_՗kr"Ee( K8՗iSV<\5i˘dLdLsdLr#?"';Nwr›G$&="C#kϜ;\(gC9Jt&ڕ?C_Lnk!M:7O/>잺|&W!|Շ IIgQOW#b]S?2nq{\㱝?NCwBhQ͒$^`?>WA#QH$q98BcXcuWhnNΙr48glXg(:Nou.ڝth#Nڃ>UTώFݩ "g9ffL.9F8ךzޑ3{C\($I(ROEr2ӑtI9v$g'9?JQ{9[%=m]kvyKN_$ء8liJRou8>]顬u9oǨ| -aý,wXr򧽋6-ҭZ[<45C%8r\kqe@Kv,ݱǒQKzh@b鏄%'ς,Lr)E)Zwrӽx\Ic^,OV" o:{cR= O׵U[!H9V:!v -XN$S$RGPK$Ow^{V;}k+})}g䵾~GYj߿w\+սcyT X&e2E]KZb5ͯwWn9Q;_=n4~~cst?=J&Ơ?ucϏPΟy~O?}~DBʩĹSK\l4;KqaNmƘ^=ŅY;fu$ -U4;ɹH.&'fZ$3fT3?kG,n-l2pg]E|mk1g+S<?)ek*jRVyzZ>ʼVt-n}H߫(5y}1M)o޽{xk}vwɟ={}(q}|[[bgS7W_EDL%SDУ+*UIqݜP9aK|q-G4Ae6'=JW?B혴iSz6jg|^St[@m Q.tgk^<6KS -feF?$ՎT9XfϞƳCmЍOüI:7`yb`.vXe]]kO!Uz G?:~[&v4"qBi"Iɞf|~A=d#Ͻ9g&湉mv;9I%)D?z)M56pffqN~ڼh6'E) tLb*u$,AԷbMʋYd!K }š=mS:/m/8k(}0:u[.ӴR)Y4ygiCnUi0.nl)bM6QMw4ʢE~Qk3ֶw[/3O*jiOVc=Vkfd55귐 > z~V@ ү3.Eu=0KN+O}]S }f}\tMEp]C ]ߦ~I^S9RöatȌ2i}Kq9A!0ױoE[UXz~kv%uUcϾ.C ).1F$ڞC˺u\, 4HWC͉;ɲ> rzt.IWH'4I{Zz-rXA:yɸ4i{H[j۰ғNsIdԓN<4N+t=:tuYn#]mHAv[ --C#]mѓ.[qk[VLh_t]Nߏtu٤wtKOl? ݲtJO:Ytږvv/Q'WcNУN9 A4G,+SCfkڣNM u^Ɔ:zP';r?KC}QWҡSp u)5@ңNVj ufAekޣN uzL uʢC8괽7I{?&&ttE˺]:it˦uu=PWۣE.)@:i I{ݏt -rڶtQr.({iQrq," 7ۦ#{kރqJW-SQOTY: WUW5;eGMq^wj{楃\]xm7@q\;eS!WܾfA.ǽQz;ɲ裔v|L9YVä,?UoRNӴueōJ 4WxPNr 7R:2 =r6v)de('{GeW@=$,lӁ'acy I{Z瀜 e:rb[v9:ifbhZ3VFh 7WYW}EJ\}e]*휸y*rZ]*ϟ'%JN\u}⪋R*휸j,qe9qvN\哝's;-%,q>q.qջGk]yxe]:}Zqqw̉Tĵ%2slIW]W=nR,WaMN\Wi.qg,ʉs*uU9qĵ.WiU89y6'W]WiU8&%q.qg,ˉs:WYWq]N\}ʲmy[U9quUU&%GWٓ9q͉kUFRJ\ǹO\u%:8S*휸ʀ>O\ǩO\ HW]WLK\C}⪰JS*%eyʲ+R*ĵ.WiĵWYWyO\'zUg.Ks>qE9qvJ\uu:풷- p vI\C.E:7I\#PIt$q@ݦi -m..q$o5l)6[K&lTH6'B:itMA:9G;tF ݦy5om5o;IHIK:lA].xtu&%P'ð;$q@ݡP7<\.s.s32k̵ 32tk)]*UU)s-y*rZ]*Uəkچ<\K3W}M\}겔J;g<\eGUS:;'I~5:*y.h9tZ.? ݮk6ZȦ5mtUs -䨙횵tf-5ktf3PI7h:" qжmA2aAR׽它{v䮃a sWߺK_oNL&/m2y_/~㋿/'&M?e}Gab[c3Y blqslEދ6벧5ל$C}Cp=%-ׯG˴)_-n%}`V^^,TR`_nlu,_Md_ҮU/)xL_ieU4{}Ob ELwNҔaӂmsۊ*mwʰ5Nj-Զe4% -֞d~b߭^\L|r`[v&2B#>NÂY -`[on2`[bLKM,ؖ,-el˂ml#g#vhXp[N^iZ],hc m*m9ȤGi6gU ݮX-+.lvilBK" ˧maYm Ѷeh[Nj-6oZmsB$ѶsѶ,S:ۨcm[p[q" 91pٻh[jmzEh[-h.=vmדBmYڰp[6Qȣ2(۫;m<Ѷ:kDm˲(V#vDܦ}X*Ѷ`hp[I(HMmiױGE_MFYQQ<=FqbG:%0*6u+rާQfQ9쏹TT6ʲak,}cRѾ7 -KT0-LeciebLk0W0{9}kS=`*I4c{p}waQå@w^A¦@`CkToR=z pؑT@F\:}R,p@c6NdMFUTjkkQ}u =,Q]%EշYj北ƥ)kPTu0η1O1t0*n^zRh(ZDJL=Dwm *m?Q ܁1wa`@8+ǩ#,ӫJ8#LƸĘ6-_^o*T[;&fgś{㛶ohXoxMz](M} -6e I~t"&_pJMwp (Dķ]=&kDՑh{PѶ|eN&j@e#&7cFnpn$ ږI>nuQ=lA7xS{&d4tӶJ7QG;udZJMi:oҖL֙z6oߙoޖXSm۲&园sd5GM]aƼ^7McMFȱD.sz/t#o:QM,kNDi9:_XpHNۆ .3Qi{ )ƽ#Tp©;LSr[2dN8i -T,kG8Ѻ7M ͩqbBv ɲbBNn)xn퀜HΞ -9k} -^wʩȽTP)P.0'驘}65Ή;Y@VVkN:ut- ҩʻDv)ցN|otbA'2%Ή[pNd}XsN)D58'apN*Dm1@7$^qͤF:- Љ= KN4TtlT6tje[)'@:}5 -:m{Eӷ ؑNJ:i#ŰaNG޴u9NUm:Љ}cNr,:=&, 2ˑtڶ*EؑN[ -HT~F:9nsV'SAӂ ,+krB:>['[V=n}*lUk ttUkn9r[Nګ+A:0>TS.HvHC:1aOWՠeJ:n%@gZF{[:0#a#N.: t2bsUtC#>Nft.-]{5fzkWm*x3Nk+-aVDHWCKXgLSi.KO:Oǖ6 ]}ʪm)N^cɢ6sKYUn9Ww6U;yd KX9wSشlYƌ4ҧ2'+(,GY'W2tU t^nUCY/rI'n!sŲtǜ^{^ǜ\PۗؗԂtÜ0'W0'E s9iǜl׮V.r2?n]{ ^}rzWӭQnSN=rź2(WG`F9).rrymp9x% RNTkPn#Rʉ' ǜޓUNx^;s[`n>ɜ!yN8W:[8snN9望HA&*,ι49NuppNT眸*sҞ*3| 6sҶӷn/ͱƒ)R*}.A9ُ$vgQ H10o <+r6O9 -Y,_nv0'`ٷ9i[0W\ @9_98o+ u^š97ä9iknSx"5Y%گ^F:QeX2WSŧKv[]tj[6@'˔:5Χv)W>Z.@'߸6:@'r| -ky Љ+2D,Uҍ3zn2;҉7Љ|-q{K~9GA'7DGloCtb{s\)@'2'ݨmc_eK9լs"n%>)0'W dZZpI{XUڲ Kqt޺U-q1t}*;r>ZiRYDNF6uTt}*s_ As2[ )8`[ުm#tpN9muŜ,}>'?+x\YacNښw5#TԉYE礁:9&זn2svn` -fҮA; :xF -{Z޺ɴ[:eem9cťsS'Gf -ɼyl";-o:=>;6 NKC8/BxVanI=u5=Oƺ] 3Nj|V05f֩>Vm%:ٽaY'55tfq:yk˂ u]ުr>Ozu;r[*9>'?8m=H%(脯[iJd[:^'FS]ϛϚ r)VQ Y]pT@\WE@'(<tu!SzM]pl?s.|A Ȟr%$Nu%& rL q2aVޅ B eװA2A^ `/t̽6$tȇtG_A}MI 88$y yl!@!dօrY˺B*B*SYAdA(١:%;=Z&ҕ5T*v/bBHT\*v.DŎmPs#+ى<ٹkv.D΅ٹ\Tօ\<B"s#$<7B20B~!@B2B* n>H`6B `H!aB`X!gNe(BY)ycĵ=.<'Ȗ-攫S"ϖu Dڇ NiJ*,5GfEeZLx[C;a9?*lsmeZʶL]V"?߷ƧRD&^M{ &gZ%lS7}JdRվaȤm -g*6Ym'FǨgGEҥgJ29_"b? cL!iKHRUpDCV"vsruÚimhVJ D^ZQڝevŀ ։θ۝gON-։y DvcDuZ'ڝu(]79'{s>%j4s,;"mcs;d's"hw;΁21Z':ilwJBr$ -u:yH|-\;I9Y>T;eKlo։Lt18g]Z'.{sY۽:ѹ?։LPՊu7ԟVsD(o`~eiocsl nQN.tu$0I`ԥQwNG98 05$XIf+'RWNt,u$XIԕL]9 sԝ;'$aqM1N2M1N׼8''bdbR7N /jdb@RWN''bdR7N''`4mKQ0qF1N2F1N( u$S@QQ(I`ԍfE8 iFQ3NnD8 I@ԕ)'nC8 qr?d-n8N I& 3!$oMI‰ onMI xC7od@t඘oٶoM@IB⺉mq-nd-Ku6m趸m鶸mݖAݖ5n`Ihvk6t[6n&7M2ݰM t6tn& $ mpx|C6o& $mmްM2M |[\7|[\7q-nm̷mmM2M [6Ʉ[6p&nq$nYnq->p&p&N8dl lM`Ib 5r&r&9\ k(jPMI P$c̡9\ :\HkpM\5qa:L :Ln:7M20M 4ɜ4s&p$С80M24t&$Ka:L :L@١I 4t&t&ada@:THjIj4t&t&M@iAi0MI C5t&t&a$IhM2M  C4I3tx&$IgLL  C3ɤC3th&$ҡ@:eE2 ҹe+-[ ҹes$HI9Ls9494 2ɜ2sX&$a:, :,@f\3I2sX&p28 3$0I9Ls.\2ɘs$0I`s.9wLsd̹b3$(IG9WLr\1(IP\ 1/B21 ̙c) `J9 L9 (a0L20L 0ɜ0s&p$sΡ80L20L 0ɜs&p$sΥml[9L 0ɔ0r&P$sιbsŤÜ+&`ނ97L:̹as$0I97Lsn00I`Das$s &q9 9  C0ɠC0t&$: :~ /IsΡ: :@^K2Kz /ɠ/t%$I]K2.t%$a: :@]K2Kz$8].ɘ s%`$c̥mln k":@]A]KvIv C/t%t%N:KvIv sada98]9v C/s% s%`ac$c1\L.ɐC.r%@8[pK2\.qᖀ8ܒ8ZPK2PK`j C-ɌC-q%0$38ܒ8g% $#a8̒8Y0K2,q% $#ġ80K20K`f ,ɐ -r%@$CȥmlnX!X0KfIf ,r%r%@ȡdȕV"a@9̒L9(Y0K20Kf ,ɘ,s%p$qa9̒sada8KbI&b C,t% ux%dԹX+ux% uh%ԡdԡ:PVQVJ@^IF^ +ux% uh%:J@VIFV * ԹUҡέ@ИsC[%:JuCPs$XZ sc[%:Junts#$XVIέun66?ٌ@Y%AҒ [%A:Jtntsҡ@:L:JtX%$a: :@UJ2JZ C+Isa9s΅UAV.PRIf]H%.hRI]8%.NI&^H%μpJ^8%z(%P/JI^(%/ȇRJ SSq;%SI"]%NJt!$҅P IFI"J C)t(%t(%IQIQ0J FI&F sadС@: ;%醈B -:wJ2\) d#)%\) йRs$΍%:WJ:йRs$HJIG:At\)HRPJ NI&N )t%,G|SJ J 3$sΡ8B)I 9Ns.ĹpJs8B)I AJIf$ЅR AR8Ns.\8%=0JJ%99> 'ɘC(s%`$ca99WJs\)(JIPΕ(FIG97JrnJM&B/( ̙QQΕ&)ɔ)r8%`$a̡998RPJ0JF C)ɜC)s(%p$sΡ9̹PJs8pJpJr8)i )JS.ĹPJs(%`$c[0S1SC)s(%s(%`̡d̡90R9SpJ\yQ8Bs.]% t!'qЅO@B.]% t!8(qЅQ@J C)t(%t(%AQAQ0JFIF (t%t(%dҹSpJNIJ C)t(%t(%С$a:@RAJ C)sd̡90R1Jc.\8%s8)qЅS@J.](% t8B)q҅RHJANI]8%:\(%s8B)q΅R8R 1JI\8%pJst a.\%9JB(1ȅO'I B3.8G\%0JPJ`J C)ɌC)q(%88Q0J20J`F (ɌC)q88SpJPJ`J C)Ɍsġ88QPJ2PJ`J C)ɐCr(%@$C.\(%pJ)qȅS A.\(%PJB)qȅR $APȅS $Q.\(%PJB)q̅R $a.\(%ιPJ)q΅S \%s!8(1΅PHB.|']$ u!8B(qԅPPP QFIBJ C)u(%u(%QQQQ0J@FIFF (u%u(%ΝPSQSpJudԹRʆ:WJ:ԹRs$X7 CFI΍`+%\) ֹRsc :WJutC)u(%Ν:PN +-_9ڗ٧RPJ2PJt8B)IC)qЅS $.](%PJB)qЅR $.](%PJpJs8)8׌&8QbkBIb]J v'q5$ %λ&8Pׄg^3Jz(IC)zPJ2B)|80J2(}a0J20J"s$<7J8ϕ4s )d֜e)ɹ)%ʺR+%]*kJIdDΕ\s$JvnDΕdJI,+%]uعR;WJJ ;*v8%dSBNI١PC)fRkv(%(%Dy(%9C)!)!)I ה_SJR ה8C)q6$!9%8SN6J.QVJdYPJ/S qJ/9%R?5 {.ӛDId08#ipHk]sN\X9AѮyS2UpJF:攌 NTǵI%^UL*8TRǶ)8攔A2N`T8%'+qJNDn d쾊˴)li8%LHA*d~֤9斥I%Tw -D5ksJj[0 Nڏ&;{TRlncR\.kJg!6ӄuT*!H=HL*/(_GL >RImkhϱ9%2xXEFO["R2ODRT"R^)M;%zfY29pJd ~uJcxklzo5i JnSR^^ۗJۄdRƣc60iJtS";@|xe_G}߷D^ жRUҡԵ@k%RJ:W(u$PbIRK.K],,u$j^IԽx%dSXXPKx%x%d)pc5jx%NJQ+qW8 $4i%KNC,18M+I4 iX0 $4i%KLC,q"8KKJK@iڂ%%RPWQXJK@%%NRHWQWJJt(5$HbIŒ&H], Xҁ@^IԼG+ WrQ̒L9(ZpK2pK%0$3\KpK@n s$#.G\%pK-qą[C.I \b-I|-q[2ČsK-q[x $-[%NpKB.q…\ $.#\%8ܒDKp!8KB.q…\R}G;}B.q…\ $1ƥm(K2Kq%0$3\K2Kq%0$38ܒ8 \K2Kr C.ɔC.qʹ[pK2pKn -9vIz C/r%r%P!d!:@`@_K~I&]%cĴ%c%t8/q҅`P.c]% v8/qڅ_h_ a~I]%Kї%u!8B0qօ`X.g]% v!8B0q؅`h` _Z- ؅` aI K۰p$a_K2L C0ɴC0q@;L;h_K2K &$!; ;~ /ɴ/v%v&v(&d@;$hdidLI 1v8&vᘌ11&ch.]H&v!8B21څchd$؅c d@;wLI wW3!8pLI C2vH&vH&ɤ v2kC2vH&$%]IG;L;&AyKK&A;Lv*tsֹcpL2pL` 1ɬ1uH&$!;$ ;$ s$:gsˤck&BnsϤC{&:Mu.ts$PIE:Luz:<P 3ɤ3tx&$!8L2L ҇u@1( $.^*H?9mf8'ޚױ[W+b('~UNd4#Zg76"*Ș[9Ei~8Q7ZA:b?596n8crn2'R-Q湘4U>T9U='}bԶ?嬛U1NՎ61 DJ3aHmZo ImXw-6D^# DfR $8LR1I ԍ@'B8 qu$3ԅ` 'PN:qHM8 pqԅ 'QN:pu$@IRN.H8@Iԍ@q2d@RuM(I( 5&&`e(l)lE]6qdQ\ k (D]5dд&& bdbPLI&( E5&&PL@&B4 iuӤ#&P7Mntu$Z:TO7Mn?49'I t4n&n&L؆i hC4m&m&6DIF hC4m&m& hɆh\4lndi0MK&5ccpL I& d1l8&l8& dɆdِLmd@6ȆcȆPL@If lK۰`ن`0L`If& ada6؆bنbl0m&m& ada@7 aa\1n(&n& a $ a@7 L7 a0LL C0ɀ0q!8L8`L2L \ ``L`I 0r&c'8pKnIn -rd8[[K`rIr C.qdʹ[pKnIZC-rwﶻI=̠2; m٠@Z!C` wEoEF}C[2 vW:C Ԓ -9 W"C9P@a9̒ -9(Y0K*0Kj C-C-0KfIf Y0K*0Kf ,,r%PPa@9̒OaTa@9(YR)Y0KfIb C,r%r%b C,C,r%PR!@9ĒJ9(X0K*0Kؖ0K(Q.D)I)yQnfB)9䔼('$)%rᔼ '$!'$!'9I% 9I%@NN rrJrrJrrJ*夃$夔$夔('$)EM(yQNBIRNFIRJRNFIRNFIR΍d$㤔881aTa8 QR!Q0JFIF C)rRJ -0J`F (q2J^Q $'8%8%8%qJqJqJ^Q$$$'$BINBɋpJpJpJ^PPk]Cxrv$F LV@8\KRKpI@.Ie. C&q$q$0D%%q$00 ơ8T8TW$~!N" $p$p$aTa8$8$/I%qv-`& q2Iq2Iq2I^IIIBLDTDT8$8$ɸ}Lʸ.u$@FLu$@$r]&I\HD %Tu$\HD -" I\HR!_.u$r]" \@$2Ir[$@$oDrH\CR)]Z -.3INCR!.!T!9$ABN !qrH -?@ - C!C!qeQF8 A0H*0H r] - RH*QA -.0H`\ARe.u$q= ץgbyr{΃; OaTa8 I p2H -H |C |A"!77@H*H |C |C o$ a7 G7# w w7ܑ7;pG*䎀7CCpeQBő -890G*0Gp#uuDp#p#saTa89R9PG`Haq#q#0sRsa@9̑908G*ds#`qbq!9đ908G*Gs#`saNs8#99#/Im$$$綜X44䌼8'g$9'g$9'grNGrNHrNȋsRFsk* 50 c$)V$)'c$)'c$)'cE9#PeʡTʍeIϑr#@cBca9 -9 2PF -d81ɸ4F*ri4F*ti@4F -Ru)F*Rx)T!@F@^ -#y)FH^ -#`/HHddN;" -ռ'W+pE*Wt_dUȫH^%dd,\\Sw:"uNGI:":"9BCBy I1:"9F'S5F'S$dLLJ֝TWN;"ٺ)R)|iT)0E`_"~i@?T藪H_"0M'+5Evݱ*X;E땝BI[f'b4׵&b:XODOOOzGD07EomOtGDkarQ!Xp"p pOz9AD*(A/'OĆ"" K"\a0,Glg"> -E|(EH6q"v6A٬ۆ4Eo$U{,bW滰E/~㶈ņc"lwXUMFt["oUq]Ğ6H*QboDw|{gS|{ͬу/bܲsdU|6|/E|0 Flˆ Xg%sFE[lؘ˱j/b糔'tvQafc;"ok8%XZ#+ -yTJIJE^ -H%$H%H*[$I*[$I*[ER"EE^4.4.4.ttp*[S"S"S"/I"(#gTSwF)H)0E(#(#T,E,ERF@)HE)(8#(T$4RA4Hs)H) Eh#X#kX#pkk$H@55hX#QY#QY#/II5򢨴b!81g2gD e¡Tp8#gg!@8I#pH#i!T!8NH|3Tn8# g qa7̑ -790G -G|C|;פ qI|Co#NH|o#oh uT¡@܇G*䏀:P?RQH@IE }u($u($ԡa: XARYA0H`Ie C!u($u($b PARQA0H@IE u$u($֡|!u#u#bCu#!::X@H*H`X?RY?G`He?G*H`INɋu[ua$dTI"Im κH^DDDB$D$D,d$d$dX'$Y'$Y':i$:$X'$Y'$Yײr]NI=$@::J:PTJ -pJ@N ))u8%I*uH%uH%BHSRISpJ NI%@SpJ*JR C*NIJ C)((JJ9(Q0J*d9 RR!RxJIJ C)0J*0JF (k2J\Q嚌J&ʵ Ż.Br5y%P+)kJ\V嚴JZ kJ\VR!פ&5 rQэ嚼&)5%`I,kK\CkRK*0ƚOWJYș^\YR1d&5%sMf kw|%sMj8d&,tMf k2K*Hd@&I-tMj kg+0K -K bHWRYW"֡: -;`WJ*;`XRaXKbIb C,J*J^ ++vx%.v%v%$a'NrIn[XvI.yNrINrIn[SI;%I;%I;%/.I.I.yNjHNzINzɋvKvڅ`]&ɺ²u-|uK^_K`~IeXթC0u&u&B~ //u%ᗀ:: &!|.K`]%]%viw)K*.xiK*.zK*K^%/JK@_%bvIe_%/vIvIdd[Ovɫ'$zmؓcj1.m8&٣cR{DVIvh:dVII1y I1ɡ;)&9t'5t'$ԻaNkNHI2ɑ;I&;I&9r>ffcwLrNIecwLrNkNIvhedKOɫ'$[zL'ͤDKeR 2iTeL``Z&L`j& 2;zi&[fbdދgG>3b5Ekw&['!l62R43 U,s|PL GZ&ϑmJ+9y,Hb:Dd쌑LjyH& }ϭao]$Q$+P҄eb/e2 ;~sz>5:sY&nYZ& sY&V2o2~9fsI&[͔CeCdbۨJa=PEoTR4@gRAgHM)I%) $h$hH*$qITɋLLL^4e4e8f©4p*$q*ˤTIT 8E28E2H&ɤdLL*LL) 0202L`)Ie) ,eJL*JL@) (2(L , -R,@ 2 2X&ˤ4-4-ˤp4-hJ&hJ&))R$ -RI&pbT2X&`ˤbDE2Ld E2H&Mˤ0.51.51.=¸L8&"\&p& C4Ia8,'ˤa8, -8,eङToL,B7I& !T!7$dRdܐLI |2oX&og|2pL*߰L eRe߰LI|2pX&pX&n#eR eL I!I!\Z&ALDL -R2R2R2)KDKDKˤ .-1.-1.-¸LqH&B\J&q)q)q)|0.-¸LĸLĸL -3R4R4)KDC4s&9,0eL*L 22sX&pͤD2tX&uH&!TI2uH&ɤ!:I&tX&aTa@:,HeRQeL@Ie,PeL*0D@ 22uk:,:,PeL -222)K$XXIa]J&b]J&b]J&u)uX&B]Z&uiuiuiԥeKDKd{f!u)u)|.-ºLĺLĺL -3R4R4)KDC4u&:,XeL*L` 22uX&ͤD2uX&uH&N C2C2uH&:I&/I2Iօddb,dźL^eeeB,D,D,d$d$dX'C$Y'$Y':Y&ɺ܇X':Y&:Y&ɺ]d 2uaTatH&"]J&t)t)t)ҥd"ҥe"ԥeRPPPIA]Z&L:$ H'L*L ]33t&ѤI4II4Yeee,]X&9Y&/2I2II3yqN eeR9'$9'$AɋsLsL*L C22tX&ˤa@:,J:,HeL*L  2tX&ˤҭ}dT2sX&pˤp.-q.-q.-BLtH&ɤ!:$ -:$@eL*L 22LI% C2t-{/ҥeRHHPIA]z&B]&B]&u)u&$QeL@IE,HeL*L@ C3C3Ձ:,:$PdL*L` C2dL@IE 2uX&uX&aTa:,PeRQ'a@:Y&t"aTaI% 2uX&uiuiuiԥeKDKɤ.%DKɤ.%DKˤ.-.-.-B:,]J&]J&44,.%.%ХeR@@@'Ϥ`.=a.EaѤP.EQ!$d9, eR!eHɃrX&rX&PʡTʡrX&PˤP!@9$J9$('!T!@9$(dR)eLI, eL*L 22rX&PˤbCa9,9,022sX&`ˤ`.-a.-a.-Ls)s)̥d"̥d"ΥdR8ɖNNX&]Z&tititiХeKDKɤ.%.%.%ҥeRHHHIa]z&b]&b]&v)v&$a'a;, -;,`eL*L C3C3LI C2vH&vH&N C2C2vH& ;I&/2ImpYe,,ƔE,,,d$d$dh'C$i'$i'E;Y&I܇h vLva$ZXuX&aTaR2R2)C2R2R2)KDKDKˤ.-.-.-Lu)u)!t)t)|nY&uK3f"-Ϥny&MĻ%-DKhKe22{iL*2}i>4ʾLL_Z&H&֓dm=I&$l%ؓdjI2.mH&٣^=ڰLC+$;L^ڰL?+$ddН,eeCwѽɑ;Y&;"9r'$GdFd]C#wL^CwLrNI2cwX&a0veRLԡE2dR[zH&Lh!d"-D[ILDLDe.$$%K2dW#[&tO/[dE).n#s˪XMLj'_I׻_DId3'Fud_G}bU>R,R?`6"dN>. w=Ja܁}b/'mb'FK.=O./l_=|$|$|婼+OJSIJSyWJWJWʋ2U6l=+LA` -` -` -TSSS0(0Xb\7e,KWI㫼UUIVy]% FX)WIKXIcb+(RE(YE(oVF)ͺ&/QF)(A7 +ce@_)+(+gT@_}YRVRV^ e%+e%+e^)+^9+pV^蕳蕳蕳ziIT;xRy"ޅPV*;`RaZ9vF)(b]F6Ja]F6XGq"ZII[yNJrs%9ʖJrmrm%9C\ys%Q])C]I$$P")bER -HPER,RPN,)vY#E[e;y!T^u+{{ء; -;`W*WD;hRiWJ -W*W -C_J -C_w+w+ -^^x+{⹰rvI)2)B^ILJA^ILuRL`yQ%ĒbyaO˖KRR%’ԓKB%RŒCcIᱼȒz)^K)أ^RDRNDr)zY.E[eB=DQT!=DR0Y&K& d{,{,žDRY"K" d{,{,`a|bba=L=La!T!=DR] ^VL)ˊ)^VLbJ^VLb%Sb˒Cfya%'%F˖.žd6d`/lE=,ogY^-zZd,IiySONˢSS*(((RGGa){TN{ a){rZ^KS'%'%N zrZzZRKR CjzH-zH-N iiz8-PR!@=I-zH-@!|=I-/IjIԒ̓b'%'<9-<9-<9-/IkI,ʬ>ʬ>ʬTQ"QfEJUV_BS=/p/^GS'%'%'=/`!TIz0PRFz/z/PT@=S'!=OS #R蕟 R__/~i?3`4\uV -ΊJa0! Ί(VWa0 `*0/",nʃ~l 9ZR`^-@)0zR=ް`VWʃ`VW )>X ,RG((L(RTK0l[VV3,c|o I -(<(D|a^|a+&[2a^?0 ?00`00EJa@01,LyC´`* ӂiô`˧Mu?n;oo_ǿ>-,G|Mb%l???x?ۏCx~Cҗۯٟ~~84%Xޗ Oȉ3ا\/qG?L?/Yu;-1׈]w>.Q}Z Q`l2{ҷr]֕MzeYVyRk.}E?4}_syG~?'y<8-'ܳNxew <)'I(מ'9jWq>rnsߑ'ٯH\$I*@lݹ<ɧXKO)[.'i_8I'i='7|nEI@-*^Ϸsl=zZng9l/j ,v,;m?yy>msmؕhɟ2]6qHx=-biv`"/}ܑ-O\:s ԞsOUY^}߭Ƕ -v1w0ٞ9iRA[vCoMO.E/aIskYy\3olf9āqͺ%lukxX։Y79Ys56{lQ-Ekf3&ݫrl,߹f67c>rͺ 3Y>E{r.e -k.=fy*N܃;4%^q9K[}h,vc4GtCFec~DZYyv\ -Sj{hQ\>uN6 髱١\~~ޤ2+g:/Fڟ>\hg='70 +!qbS53)QprqsѾ}ꍋ\ vWE{Gh-/X|]]h)_4r,wGFgJo3;Ȟet;f_{uDF$XĨSfM_6I YݦoY6`\䲬b^/c9>G2u1VGƹ]1O6pU]o}$c.uKjy>.?+%OgfesF]#/bYafu%rݼ=cVtݬ=s/7g ?lޞfY{fze~qت X41[4ϋfv->\6+ O𡥮ek -Kƒ>te_ZHby >ͺll!=yɗM^׭u6eEYiڿ~i?zz|t~)=_?Ͽ~cy_짣?M/U:vKԷQ_psٞ ^;p~x>i_,cgx}yN7W^7Gc=X8Lߟf9%R|{t6d`_ -!3?7G^GavztDІbv=_p/o#0+ÍϱmNDZ eAܵnCu_ڡ?Mh?2|C^ܯf"uc|u -Ypӵth^W{ՕԶ=얣]H^WyX+l>+4|;^~}^)1밙?4/UGpYBȆ;tVc8sjh{O" E?߹(l*v- H{ ﻻqU콷X4yv9,h+=`D#fMiaḠ ZZw5"lçOs'iMnom6lA9fj^<ǥc[E0JD]qځxpMFГYvWޚ;t..Cv`[mzm};G#|#rnt[6Bl Bsk}؁M\ZClnܗݸ}:s's6Gl<߼MKFˋzú|NS,h3d/{vc`#> O' h>Y|mxIL녝zM[nt?y7kݷ-rClKv0,x.?jd{OcI?F!$nz5m]ֿ힔Y_m-> ۟So.zzV ;#g)rɧF|${?Y^,-{a1Q;/BmB~{ =[DlͰ`?~oj~]NKW3bOev"u_ƕOq[PracYx1Cݸ=xfnfeQ#0?ױ:fDqCJ-;ĝ[[^jiX#swzI,퓝^[k| *л׳כmAom麆AQh3;/g|Ձomy}_N5xkS]Npւ8þjDyzɶqr}-?vbgԯl?8gϖRpVxOERb_EDs# Mܗk7ӵ["hOa`{^-skbk+v~Pңm^ۥMoHnkܓ /3eh{׷ڏlz&Y_s:cJ5Wr:uYΑ s]un5s +؏̂!]|l lK!m=u'J+鰕79U~U+׉Yo@P3%>, QQ8g+}vmi2z#<['O}7ާ.D숯-r-pc3ig{3]嗚ǙMgl9bk@wrHpgSJ趴Aegtl+Us(>Dnʼeq ^ |L: Z|-nq<5Z K3#}!yMݮCwUr\>=V/[Ղ>_tM,3}lf:]fWfi=-X(~wzhghpP}q\\?!/8hXܹ w6[!˚-$x̜>mcj|k1]PϵN=s1;!>i.qUawX]wx(1Nw[fL n)N4  -yiΨߪK YW~2d?]ݲ=;jfs6W?z_>gӼG-Y5zqXqy3(__f.툊)?]Ӎ۹m`쌥e,'~%>sR{Sx.5]lq4[㲽?<Ղ63nøf_rY`Zm1$ FaR=4^O[3(O(tV>s>l8,h37b(޷ݵШ篟zm+dΰXّ߰O˃jU|ٛ@so~MOx#\KIhqͻVy!(.fV?Kzwy^DźOG .g4}HA~ӌz y"S~L=te$.C./{ZWHlv >ְN?ݺdyžeϣ/pc\̟4z[<-ө%|S/SaڌUC^Rgط9o~l P\>on6Y6'-V=ٛx? R9aۚ$xZ~u^5+,1;xb 8Ϸٌ뷍ta~ޢ6l>i{  /qͪT߽0Gyb3o61kWQW,ȷ۬=hkNXm۞*׏lkmse\+`7[ t"Yl,[֋}i<gyv0c]cC|# ;߽^Sӑ"M#έhbXRwscwXՋ/X -bw?ĩz/懵lj.gW~Wi~>8lRϫ%W<+G :o\/.Vv3~i-N߬gfI~xڞcSæ}v#..d>o=uFߞyVkVേ¶f)oqtYpDo}'PZ[,yv4Anh~|y6vbCqMJ3҂=%_<+f݅{eGy`zMב5?'hq賦V|ju)Ofce6Jvm{}?& ّ'hJgMv73ӵhMeDwiX)vOD84W_/^ir|w|xwjp~MP!/_KӍmkWZuu;gOݰSϣץf_o\1>SԷεV?vP #e?\yx}jO5zsgݷEf>g=j<`+eK嵬*"|fCѵd{n63x][hhyˁ{w]кAw`/h/a_87}_yo'{Y2$󉨍+szFf% <3JN*_ְg`Y{\ڣԛ&m8̈ay%1fkĊфYx'#~]&vdOKm=!㎽T`+Zoѷh[#E^kx{2qvkCK4=lg^gx[O yc"}?Po>6>bw=J绯o{}ֽ]ēScZ/O˗x߱/ztm=et~cO|1{~ݏo|(n7m|Oo}vc巾{ܢvL[&x[?>OC]x.?^9iiC3v(;鑯=\&*= Vr>ϧL_6W`lӟYZg$:R֓5||y\Q)BE=o2mU<-: {".z8fBcԇ=hOR%vmoEq6{շgV#O{v5lLy˓^u9yo{ҟA$+}K0|-}bmM}rƃLP]3$;u>ԥSM\ֆe}o՝n1;|n.o14CG+2G =Aols~Ѥky-H*~9Kz{eNs6N߻b}17WY{": EcgI`;vE"t6޿-ߌvy 1 fI ; OxLSۖZw48WNqYS^ϡ@=w~}٣ܑ -A5mqh-vq -3_S5A_lc8}" ?u<}ʖ-jqca7NYc zV˶yq/{> dApJ.W1egѶ+S:,9YuEGO]-/ځ$xyPύ%5_[Qc^;znmh65ںqGB佶pK9:DfLBj7& lWq^'\#c EGMhrx8K<A-ű?ɢ+$Cdh4VQfb^3x[n6?'lqrWtA?QwX\(B/41x1u%6M)W'}WثFpHZ¼]F=FgxKRMtWVVl!崙mM=|+~ĀK|/o8;r㎏U0Ŭ܂͑2 ? -_z:ߊ+#k[k>޶wR2Z3s -qb` n.GprGoCҊ/u1~~)U<"v &lͶ3Q*'-Qi8ZHXmkDx剰y~xOrDޟ"f?ǝA^2'uO]#zEG> ㈱`˻5]A3oc/m%:Tn%}I?+,!/떓?V[,+V nf9cȚj3L4lGHq4B{eo4&۲6}Zq`={[gÍ#ލC -r]cc[=mbWNu˺ %`6xإڔlt$sOFC)օYW{JtWj9ܭym~Zk9K8}cc<VuO Wz Qu[#]#c+,|y17H3'J|h +D sy ;G"@ >75{w xkV}>]/H~s32~3h O/;oS79}{ISxof Raw}G>ړ1aҍXchz5R Y#hߵ4{5bO7&Vr -%|'S'#_} CV{M3|Ne - -Gpf+x{8׫~zTzCtV{=bi#1j5CUTJWOEn0$X{T "Ͱ4}B%Ƕ1_ ƃ(7lnې+hӾn ݮ;UAv@_j/L4 >)y -mi;#V} s [Dgp :7+lTZ! n7b`>>1+wƫmW<l!4)xMNk@*-뺂LhuCfah엂T}l{zq<+ -o3L+ZæN>IzⶓaDs) [ItFV wF&.v{0d-1Fh\f dp_1е@LL CCdFQѵ^t57o `7붖 %c5v{F-=ܨ=Ƨq #أSe"ނM3D`:u^!^Zj R 3d.il{i&ڏ`ij -SnҝY5|>lêЭ`spe[WW 6ǮW5clz&ĝ|ה5toʶE 1OCncDN]N~}5v7B1}fR@tuLcd҆ PG,BbOon}zx).{6-Ju7c4e_jOp> ̾) XRoxC/϶ָȜ1`{K%eFN}#PLh.SxFW/̓|*˶l}?>;k)Wӗ21-Pۮ\gcxXqLyiOd2qoJhgtnXtgyWԋ -`pqq^GvcW5%8myiyL^g}ps_ C9{,أ -l]fʶk, Aot5j u -KX_Gg%/[?6;esΝZwM_;Hk|ׂ^&r M1Z9`f&{d^xYA_^PO,ܮ;#&Zryrm _t_Wg3]^f"3b9,]0Hz$5M;oٵ/+ @} qmJC\+xklz_fubl2b =A_#>KCñ T >,ɁQ!_; 92cgԐCbY07uwJ}]*zdf~j!A"zq%/A_շ؊Go)A4O.nqwla_K)wlkW6-XZ1_@ Ȋ _Gύ+awI0 kv0shS[8aǜgZ`[<5̶>4Չ ζ7;taXt֖:gdh9}sa38w7/&|=>qP^QWh\^#‚Q6ÃS=9muڄ/d[du\Դ-[-sY,Eϥrgl{$iG}uLjm;Ӷw#I+>PLtq Θ-nt)_QrdOVG {؁v"SC.qi]^fжʳK ?A%2i*G_yNI{c>©xX)om z6k{Z":i>j4-|\!+wwtݛS\$wu# UX#jElh6chs=zMt /AFjpT¬ǽ)#?g7Ɔ|-a7e">Sp㘺4sMziݜ \C>Rq&]k&?{s ߔ -S㕷ic1-VݸA 6ބhO{պ)&5!Y[QSx;A/_A_SVWƉ]G` Qu\F&-5gcJ30a]mI#(m&&rҺL~K昱Ɓ_E)+6Vxb.ctB#͓BynKoi7P(:uQ -*~L<Mc/lg=0N8 6F7EϠD[7[`t]ާ2*W![1%vTEЋƃ} j9L۩ #^'\۰J||.]=}c`Nwۑ#"7')vpdhƲ?#rFO˂Gqw&/8^n8c902v`sEpe$jK~I/{x}V7abc(̔s쎠C/ޝ)3;)_X1ޛ]juJV|±MK|莖ה'XЖnoHwz_W}Q|=bdFA; Ob3rA9K[>5RolN(l8q5ersNU:1;8\~)`oX^:/I-csj{9^94XP,I󊎿-tuY'ɗ`˳Њq`]Jl]msbK/ӰZB2д{ KE{1oN܆kv m2}$TcBv\##ȏj0IW;s8k;X) ;ZQ)t4VjeVV1;[P;{s~Q=kBRl[좹9;"2kf"y~1|'P[lȼYQi &;;sHv0x:TDX;8ki`ueט(QpvUFJ 8Tu9I-*[#rrG `y5NVy3Wwըe)ɔXUsMܝ_.R[+3U -!}c-4Ca逊/]TQ΃Z x1f.Og&6J]fڂ v%N-u+`NClQaʴB&W9HIGUy5ɳlB/N!xWs'Δd@G_KgR{b&ҏe&ֱ:|$/ ^l;t\_]8Քb $DjO˘.dcTzLGYu(N$UJX c-5bpYsߧMz:NVCe}_[|XGJMv; ynR}'O\#[pck% ́⏑Wq闍T)_XkZ`J 4O%'fW1%n^{mcn -bWFÄaMc4 (o?XW!15C1.'{XI$6uZ -5dD*.ł3?Š 'S[ѝ*6^sDV ض'ըϽ~3^Ŵ}ZL Z}e~7+zbkNdzLZ8s]!BZSLBw.3fKFW ]ýuuͱ3S>/<4lzqk2V Js5]c8I]]EK,ݠqJx6Թz -x5w[~U#X~39'˙PVOޛ\Ŵv 'sU\{cVhu'$ޑJ*[(xzQX5)BPzNB1# "v0or -N?;؎|Åssgү -ƸRw7A빦MW=VC[_>d^_`+|68ymK;f%Pr=q~yUg0~t,pkE^'c]ykӞYy*g3MKrGG4v`UygX -ŒO.S[xfdt_j-U1v$ۑ\[ЈjQjk|"q3p'n0aVɏ;۹E -~bTh+ Vy/;i{:x}vex߶cQ ܹTGIxs!,5ipǬ[Xw3D\- =[۱ŀ;uJUbJZ]^Bj!RUw^*U ɲ(1`U; kvзD\)*;컍Ac;;9r%k}iWڢiW`%[Vi^P#w0[-+y֗ڹWuNoAmKxՌea-8aKȒf]bM]՟|m.bmke)-[Zf4{Yu/c,+S:oYmmJ@n+%Y^ʷG5lm+3 >Qfr͂'1[NV"okFΚE2)G>uz1='cpjnJf:&,kH߯[>2wu!7"8cSpf`W}{_)#/#S+ˆG.l}L[.^LL1Ͱ烔3>>Kٷ^n؝ S&,Ƙ1ǿf2c{ܞU Rw+]uf޲? P53Y%!lt UN2ΐ*3̙i[yI/9*=v2UsJa 4|Nb*0hO!4LE^L,ZZ1~h1W _ \;\#w>;?VsRWzD _eߑRƼzR -e%Fy2M%# gWEřf Wrƥ7?;.gfI_;YqU^y_r"P`R_lf8ؐ c9󻱑5|%gΩi*$Xk;}rV+'RcYBUa+ȾV!#ZJkli4z)ܳ -N4\}T*K^ ׶,9jPxgIaIѤ-D8}Tʪ 8YHi~>#rwe)t4W9έE{9jU]Q^-5a3̂>I3vPf&~$Ýmm%@&2uik9W8 ; j*|gv+HUVbɭ1m(C}Xe\UJxΕrU(WKӶ. mav#]+FX#[YZZmmyE{8Tpig;;frX.`J۞A,u0jClm_k`"5V#>GUhK2lZS(G̶-Q *F%Դ7- HBt5`|ovӨVU',TxG{dĮE˳蠱Op@6b.+fpWtVd,ih =ǠD*Z%x֪+<=e')Yif"TӪ 9x3͋A g9g;Ay&ӧ8dcZ+Teۦ4kUJFF)H"SԽ5LXc[[/UhEh#FWl{F -uֵS${ezTو ߴmD$/ ε|rz+P] hox>+߅ֈmp!lZ "Vwݲv [\˭]U˨uX,=l)#B[սUye@VuWps5RxU2cQ]VhRYvͶ Ju|UzU4ffK0z~EYŊOrN{6勬" Uٲ.!ojpNOlFN$9lubγ\+tQ48>s#R8{$DiqMQL!'lE7/+ܰWlj0AqRMlAq^A ӵt_A_vWۼkhTL*n$TҖ(Yg"ߨY+۵-=n%i>w)VGpF->p?z =ޭiQkwkW~2{h,[_$UFdٲǂlfxvSg9φRZJ'sMХlѹ5N~L2{rk['2`#{r;MOo#xWwR_Fײcs̻deۋE )A -\Brʶ}0:+Tv˄ouZ}*ZT4nV.gR.'Nʮ]\4EܡH- tȞݒfRNhL IX> CܣSh%rȇלHD6b=bDDڦީw \0ɈX$|6k`"ℨACv)bHz_lKLڢ u'dDKNZ*h}=o'`nS"PLkʕ3"+t2,CUH<Z2 )ڈ؂"OeZՙuL~IiERbhޣ.X;HͪS6$H|Ye({ -WqK2fXN%bР $)+nB|f2t o -D(;mKJ,$ -(h!jU= -\)JEL:,J"5 3X 颍@ERbF/Bj :UM6^("Wkom+TM68ЬFQnE[WF+e")GIS;M@TgHwT&f`#DYс؝mfJ\ǥݩDhBЌI<%699ܓ2+Ϻ9j6%1m5JMp!)sn_Pv\%:#dq2q0 ;n̙}DW1m[ՖBĠxH0\'b[ي1N;9Wfw3[RPƋjI3#P4 -AtŎ@nQtY$mRPZIBe -l~ - p(ʜŨȣ눚!y4Z=jk'b9뀴Wk[-7WriߘQ롘+5rAiH":<Ż&mD+S9%0)k݊lGP% -Ǡ-:*NSIEK04N1(oiK#bY8V);%r/yT֩׊BYMNj_"*uؔXcb#ostd0$4Fop8jgm5<mN؅J 2"-ː\O:m^v7abRY)}e{qZ rz4#]zNBB'LNKբc_m2i}?]SÐA[>S+!t)YxYTNrZnRVuYm5< nGgmeŮL\&t2'Dl\v>ד%49ZQ0thPRD) ĸ| --ޒׅ.RQ_S~FP1z%1N㢝l`3{eJH}25iJa0#' &A`D&mFi3"nCB4xfD.FmPr@wtG# +FϴC\ c`YX;l_ӽ'm ]ssB -てr$ -A4#X 6$ٞĚV;!N5 k!*UβYAxر- 9a𘂷t kP!);.EoeC0yۼfKbŒ<*]gNK(TW`\`)Ah(o+;fIdad@7Ğ5#2 :dd -eq)Hm&m%tՄ8t0%#Ye)23Έ&9#~I\SH.=\ee fWܑקdf -K1tNd+X\'D xڶ5hzߜ8w2]yZjze mjЯN:q:ٍ|LC9SP&@ҷtM[2ꨓ{Fd':d䴤8 CpFqzY>gD*lNʌiF  d1ZZJ(U'&mIbP{D? 'hh0u@C%h6.M;#v 7LŮS2}:rW4᪞.k׽8OUMKiK^V$Wʐbs;$Sp=-EZ@3+V?FU9߳ a @bv2ƌ$kRi!HV"ʤ#kjNUr#c6IJ#f-.MmҊUӀ q\3!@Ǯ`5 p,/D-zdFE δ$T%_jIU/)Te@\;bW(hjFmT D0U21DN(< ejӷRT^ӊC!eQK{}=eCrMZQM ^05,IiL<# 7 uF=L*nr{y}wy驐?'ɣ/j͋篾fO~]㛓_}=;B>' ?'û^|v{{G/7W?Oڟ{y⚟v9>7Wgw7һ<>woo/.wM Fع/$~|իoϾ Hmwnw߼~}~f8|hֶ_r]{|]ƠQn^|}QoϚ~G}&umK9{Guս {ۺ&ս-2Xٛׯ/Ϯ?zsow/~{3z^ }%_h;3ۯ=P:qSŸϋ:2/vn76A'7ع{_n.*9?`{6gp6՚©gyn\}= WzOonno0,on>;V|) ~vs&mMpg'_\_:8@$+m[o-Nv=bKA=_cbՉĝܗmlkp]Ww9|h~ݘf6C̳˻o_^mC;xqqoU->5?X0&{|C' }8 &]\}ً6so?@YL ڹyCxsۛo~⾢.˹={qq鶒1Tm}:Ƽkm,76}dkGfQfݘ3=XT&0rhŦ&`k1[$ö%t5G:cՁV֦ܩ:Vk鷗_]_٫-MyDxok.v۳닫/..ﯻuS3۝ nmj8D>stream -d CVӡs+F^qfSSO7/;닳mE,>ops7&3X Mă_ʽ+}ӜO+Q_[Zmۺ+V۶>ա -ѶԻ@۽!0uS390fc s ICs$ 4Unُ:|cޑa%a0=f*da6&a69S6fg{pl;ʻum='?ͧ:l+3G}(6?rz'_a.Eyh᳛Ϸ8|yOLB6\_qk갢~c9m@ZRl[%>bQ5uVwr[X$#G9ZԆ8r#G{ G>r_m;rM]uC-QB;򳣄vg ?}_#IV_l˳9cy?͇QIͻlփ4ϧ'[C?a~c5 ;VP+T~E565c{e`A)~Tsff/.ܘk23?FǶǫA>]ȏ;ǣ Bހ>tt!oc]'Z߼xNuj-ǭpquuݧ^\\Nŧ]\]}z{ӛ۳o5~nGwW*¶&zT~Dks]9._ĺ-#~Wm Pf5Ory%y!ȦeZvU`mջO~Ï}ȥP65ے-DZ&rX ˻]Kn-iZon1X;C;7vOmm"Ss[:ܤ@pȽ]xwNC-NAԜzŽǚSo:\k'7}7&-: (__l26BQa;.NN o,̷/wP>pz7{Ufy//={qݱ/uU f?fGlY8@5D{ώQ5;fG Ż{ >ԚbZܖS7StӶvᎌt_us3[|q<1{s՛[:;GN}c om!8{Of[2t厎~o厾:{}ۋ~wjG`K+2{-׷gwgWXU!ok6-o^;NxcEtԮow6543|EY)m1n󍅀<ԐTm1cݕbզ&to{ζP#٧D|Vl 띜g7,sۙÃ/ -(=q>rib^3Ius}qInkIwGiv?omwZ}30Lf -|Gmԧa6*Ѱy_$L lcZ8.rv.Y5¹ܢQ;ۗ[hQߝrߝǻsk[xws:ܮ -}:Wq}# W0+=3yDSԃ+mEuPh%h0R}55=/1|}pow2ʦ& p #6qݛel~0zcqLk*x7ǽbnK;*鶸e1mҁ`h&]zm -8U6|7hm+@Yݞ](7I~쫍݄l<|-l0 -]..{ZNɶu>϶{~z5~Th[mGm!â7̌+q5/զV2CJw9nn^|s{1Vx~golL{x'w_@)?\ͅ -o.W7W옾8ů?ɛo?~➖Ǧ?ueȅ\3pk['1T}>!{׾ ["FTUCy9^ʿ5<-5%QH]2:5lȿԖ,%"m!2qh!ڶ_B -mJڶ}Ŷrm:דg'_n }0O3-@9En=mkڗi잺ULخiN8\SެeeR ĔXi;^04־WtYڱHKhum{~YNYDO63[qtYZFOsBGоc2}ǪilZY&J[~iH -%!8ʎڿdH+;Y!l,;n#C>`Ƿcq(k!S9(o -I*6PzdFBT[BlcӸ@~GmIy:hܡ ;\cG - <Ȭ{ٓ7ɗ;+>mGGp?̵֖D%8K}FTVڹGG]IF4xN/NSJm.*wXeJCm?\nMm=DOQj.R۲.pGrrat1j]4yоsێeJ^>X[:۠qr-qv WǦb "uӻW9J̓x6B"I3*M -Nc;ıhC؎Tr+KiJlmWa@ĈkfbkS*82ԶZnGX`Q&{Y?ɯ~}"TzH|t>8͡Gsq9|T>(?ŪF0jņ1 i"OR~S1v9/A5atK;Fm^ƣOz4jCcl3(۱Qx9AeJcN\ͣK^ecj[e"b!$mZ;[M5r=ysnKɲ$?7Fn'Bư x!t V/Bfɚ%U.Q$UQ14tԄJnڶ)~eߍ\ukLe֝OUZyHB>]Tyn;RƗDkԲJtiP̉""oȦd: .{Pc!_t@rktWݺ!Q;h[_W0V(|Tw Tn.Xʨr4bhB b¥+I 0X}Yzm.&%&1KDy4xЛu`M3!y"hDzֶY;Z'ZcڝQJưBE;ł:]t"Vc](Vu4D65i!$]>!1,"~(:Hw?E -4eYEAl8v&4@S鎘S 9"[fgn MV 7BڑP 2طt$Q>cSL&]4jAV\l -YeCH:X?1H@Rcƨqja -c'-7mNQhgTh eE^^8ښ0@< ^߶sbt b_QPuF\b82iDdWMl_QMjΉ,֚mSh'=}~麶{fa\kY -tMyh"դa\)DIw>hel-+8G`S -,Ȓ7Xf!䃵S:Zq%x5*9"ih+&ٟc̃0!\,?QgFV݁o] rG5h* - Y' I7QcB0"@‰i[.6"8elrOڲhG1 -Gedԫ+h&̠UWk̯ -QoN]b$'oI*Fi0. HU,F'n; )CT EےaUzUs-[ilE݈cy펬cz2AFa\C).L,Iж C[AMwzff57Q&)bY\Q1( -.1\1vBqeVm(ĜJIY)N 7nS<|e@fsf9AC3b)v.;]@G:^Rz%l -pI 7S. _W0)"b#>Nk]l5/q=1C>v*1WG|2 J+ݖj3"0[;MF g׫]l\GՖMHV}j'FӪ-0}ѳL"q?ڽ1DŽ;0>{4&8&H@4"yX${K, ڡݬ&GHCPXJ.C4̏|9 j-l{-˶須AX^1;fuCR#ǵߪpbZ&]RX(WDOd -ĦBl;B% "nu JT LJc's"{*|^E2h@%rd.؞"<`˦6Fcgƍl[Y6h3Pq<+vU(ݤ֖3Ns Qʼnriz_̢HDyw:-mf*0FLr=˫Pe}4/1ӂ7=:,42!6}KMG . 7n6@d?W㕑Id1W%A6B#b"!Mëe*w`}joۿ*j$R.[rGVDZx#7CB%;;5MsVCGo`}3)v -Doam]_kh-Kt"57%;{qAēFFKU%PNZzO}P[Kl[ -:2HE6K.&O Kd)%m_洍4FG$ܖ1xxRF^Io:b8BF*y6sK#}\H yRjϭK- ?1YЩ/j ACU&LBI,Mlaj.L[R"#K& MMtCP[n¤vmU'o-%새]d! ybhhL`NKws%2ⲱ@3brsd74Y\깒T&$`鹞U~r"U!9uve.]CK 19fsRȪ|DÉ*+`eq[( -&ߟY7_]|v{뿜}{ߞnynpyU7I5&1gOr{ɗi?5;̓pKL~I6w!zU!$ZT< -,Ϙu"7#M~co,Ɖsc2iUDfoNs=mb&&%usFdG Y\f;Щ)M `X1ep'wאYyq91*wB #xR@gEDn-h[3h1Ze,$ }n*hT&h@,)JU} AİV\,xvΝpU[3~w;UD`/*Gj|~hJhY .e8gu#2%G8y\2MxwKT=(8uSCF7y{̘%us=Y D#W+] %% , O[%:8Eg##I pԝ@ -sr4~n ~3`x7MZzͧDvbxn#rEDlcD u`Jʌ ;c0BJb'+F- tBklEեFS;w ~İċj}|g) -vĊ 6v{セ,rH{*&q쿀7rF(˚Bt16#3[Bk̈.8 b"X>t<}ɱnxғi2$(ø,YNsUqFDkGlQXqXi?u46R1Uad9I:#64uՌHv |Ac7M숚 Nlv$)2aD 13+R8 HŒf vF -.f]mq]*CUv0"ktnjn9*bB{Cʈ88a]ҳZO7瀸X׶Ov891keٮRQEgzg!&Al~ӶQp8Uޡ0eC6C$4UICCuΑvUeU׶ipG- -' 5y>lj|" 䳌ɹTEoWs'Ck-ƵHK虯nsSF C`Qd$Q$\Q%tLlۏ Ca5$'@= V%9$]~!pXͧA5 `:,)D6?2IAW fr'a'5U,k`* c6SF!,zdEBue9鈜{'"a!шC^J#"KN8'c&)XdSEwǠƯA*GPѼ#ˉ0="݅3k -'Elsb:9tbbN!xcn16v[mZYŪ^:s6ŮvKa+v__UrҤ,|My\T.N,4Ē2^ٖ%+ǾZ\Cj!Wl -!u* -ІĨwV#& 8fxDJS:#"48\Zn&[pyRo5uY=*8xJpL_7. ٔDlӶC*4 jKn 2нfH&׌!85QԵ yI RCcas)jj/H-T+0s0b(#,g-§^9`1 d2* {tch^Q!Dd1u͍_z0hQ -݋X"nnInyW"IF88ij8zIZ &&JVؤ -mV.&ֳd4&3Ŵ!9#0FdycI%`Z`G#N$wfׄ#Wyx zlC|fOx:%*bOQOb =[M{ -}VcLjGLԟգ4lԲ֌OЅm/d(P̌l"n;XvQ$;9C7d?~I(cENLPn&F WQm+֤S?Q=š2%某ە 褩M4 -?>]HlTc\tΤ),Z! &ANqRA(e~m5/s:PL_J~̡{45xǛ2TIgCSPM{jF -+)6EU9`mqC$2Y/"hnnabZd6f7P#D.j=za}zido#*1$+| s){p{7?Uay'̆5V/n6%J6ZȒ/Q\;`٪Jp̧SOȗzUcߋb W %^%*#|1dCwٳxF]gť}z^|I]\GDj"qL* EW@޲zW,c%uVT#ZמaHHF"˯mPI1י),<״0 Ii} ʓ'S lAe)fZր"FM%K}+O2!g+l\¤ދ`XaXY0-w"bp#LyoLlQC\Z-Yjj( -Zj)nd;۶Za1ha2PmLB99IkhA3dƊ WkZ7*ۡD눣6bٞ -qGp˜m4#J m+B2\D0F',طf&h8t -CO t6*I!QrA%8DOƖ6Un29i & -6,B zɉENE |u- -v rƽ[v~PRZUpJ< 5(*|YU/$pu!KF!vH(6 7l VG|qFFn&% (!`s =ƄF]0-h6՘XQEd!#Yy F5YijՊo%;ZjFRC++)GZ3OZ^ӈv,< -uGLo))lL^ELhq$8![6dӋɮ!EF&j"1~|L&[v"kQ~ա0a/i7A@<ž˴<|TA;{Xj.f}"9;YEFv0C&D -b[U&[]= hgDyҁ bc4Ě8]v3YDj=`?8˱,] {]6ĉf=ٍ.\=R@(8YN1̲%zr( "`rHP-UTB#JSP3yꁏ-f[;qX4{'z;g_mՔZ6žDn48LlW4BDm8H:™Z T+Kd`eYIε\FuK[D$.eBX]j6y_S$8@.!'y 8&ar̺ IO ~o񈜑TgaS&)dK.-CߢzKpIo\LlcL76@k~f:mcȳ]CFR RV\ %~.ۨ4a?!칧=v4nXh/\V=&ɵO &I.q}᭽Iy֛l\)\Av -jNy Nbp~ttS?X}=Z|xpS)nC<Y)w|C =Nxtq'Fq' -C{b1x'_~v!>䝘GO}t>{%M<|_x>i'+AA‚ ;Р{y ,!B$0@`&& ! Ńз1CG)A X`HewHAP׃Bف#C9V Iq%%E5IIe"{IxR vJa^Q*=)Ń`J{JPVp]\ X.v K1@KMg^[.vK.EKR/ŹZ `*a -kTd -z(S38S@jHSnw@M1!N@b7y:r8s)0a5)Nܩ=Xx -iu=䩈{@Or=Ń%>~)8T8! 02¹rPi*Ap **]EEXTK`TF5{ZpThڎ\r^&TR.,Sz*5p[ހj -poF~8ˉh'_Hc1c `\\4"#zkhðUw]G1zaa1!M>{01ﱆ C>=b#هT&A6غ[bQg#Z"ۃ>]c/1|=Xbhb@4m^XlƠn7cۥKc{,q]`ۃhV#cӬھj(k6K@kpCZwijm'n=uzlm=.V[`޺6y5ĵb rm0]X[ t7jkKkV]yY^ۃ8; 6NںX@_~58Fƃ`5` l{ -Vaౄ6yb5 6z8یKPl<؁ŶK`l{8=X m%@vCwl-A`5L@F_Beۃ%X6lq`IK`mfloG; d;0` 4nC>Y[Kq{F_ۃ%q%9X>no=[n%=X!Y Cn9XB}=9؁#ɱv;Òwvzh~8ւ@'GMZB)z^Tn}/,m4˭%d=XaX /m,!@0&1ycȼk (s{5K8>Հ6%AzXskK``mn|=uě%yd ] ]:Pɲك%ܹ=Xxn,A swb}nB琀vm? ,8ݺX BM[}5z.` nY.v`z`t`=8}%%Ct7_ ڥ9i֝cΤIJ^O+M?B4)u?*(:Qʯ (:tb=2ѧR9 moAkA$k Am,{9"9hmFEGil_Hu8ubpVŐf`^r_EHJD-鲮l2#pOW`$8 5!gouOJ # t kD vpG@&(nvW)O=Ibq(YQ<*r<`THD18X -[aN+$ٓ/SHpy*Cu4I)t(>7xrRjkٶ$or&S**>8=.~AV҉Aw\\D}L`AHFtV V%JMyc) _|Yˡd+ @<ꤔ*RīX#7|6(_ - h4W -uK/AJX$7#[۴W5V%W4-:mv|߯:BɜT }dkhKZmrRs5,^8%{ 蒨#-kΉp,89ߤ1FD׆і ׮eWrz҃E}|rP-tPՆIce1 -ߜ*$'7%'(g"Vh2tXjYy%DxJmYUp@h߶Vśܠ*1cy ⫲/4\sV%!׎<,д|EIsX"^Kɴ@ ae1DAe,ҽ$.sljUpFKzDP$Ov&_V!R1d Eh˰ -HQ5 _!v(zi0S4 -Qc1#Ù!-Vi)(Kć0¯<9WAa* sBZ_jbW;lt,?d[^xHͺ_hˍ&| M_y@:QdS 4UqfsRsBͰFjt|8!K*vkQq]uUÖ2P;%'褬"ȭ."l`3c9>STre1;Z/`4b\  -8G P|2{'P%t pQ 9фL4AD Q!_aKQk<0QOlOsLΘ8>mo:D 7S$V"^??f\mZM3 $Ue$T.)A3{ g{/DW5:$ձ'ީvUH֑UE2RN:XQ1F*9++h>/'މ?T|rCNbF-zMǯ=v<%UhJ'yO<+|iUQ4a 8p s$M'!4 n|\WPb25:&iܡ0Fh.tck즶N *:4^BG,qIIcX8nЏ8-| -u>i4" [ ÒC?j#iqe0k6>*bC^ؒpoѬPtR@ìBԍYx)LE~R"}Qcbhm[?lQN_1Hc[O1DLPN/yʐz*RB|^gm}E%jtP.tMekYy +K8E!%L-\"`4Zҡ*DI,Ӳ@(nR7 -z: -S&L{DݑAIg\^C㰐zqw=aTTְ| 7̷I\|ZB6;fQm R4-6I}n +bc:3P -A%M-kX勏x_{;N<4h_/|}w˝Wj! - ۻĝ󛛫y볯.͋YX\r8LT%x"=#IOẖ8֢i]wWAisƤ^(`# (rѝCA -0npZ .tJ18Zl;jĹfH] -4~LI4VZMf"/$HKR=:p DVω^K&hxUc"ɕ$XI Xʤۨ`F/aD@%A<b -L# k^ ~d 0Y_*ýψ#Ō&&  -' p^ -! 0nmв&6S$قdP=Aqm>$zXrlJUVȲD ;B +I-Aj1;'A+Q(Au%SeY'y]s9)RX&M"2u4I'M*dr%e<$FH7RduB}Q(`:NL-"-\=VlWhV74ۦU:Gd'E JG#+)}^ Q̦ QFo'K!(1%;IIJXfM$578: -(E'17x_%^!D -/5UPqnA Kf۸ȭ~tƠúLı|Q[m ;ܲl*(C6 hCDN%##V3BH^246ۘ"oe JE풢 ofcGI7\Nۉqݞ(ȢR&i{TPH;H=Dȱ)Sq̟|3ri9L|Uք4:p`62 ;!qj<" Vl܅n> h /s<7Ezˇ66~E)GɈ? HHM!OOQJq>q`t%BW$SC2ݨql=XQJ$9œ,it0@ %7` #0ҭ$`rp)&~Df3H!Y˥SX<&B`!>!H[Uuz_C,/7# P$,qݼ,lE d- +8O^P0-;#&P|jdT$~,EN!,$^6%nķ/A&dp//ʾ -Qom{!@Aѵ;oPE|[#3xKVKVPumO@5+tLMd#ưD-<+HReX]zzEcE eI ]3lq" TQH>mDXH J !j)UE4rg"J%kؾ[Y2Ij{H~YU^ך,Q!+DR#Nhcy̹j1Xt$ϐV;HGCUՏ1CUM@!ۋbBNUS霊(]LyCb,"PEbDfh@hD8mIynOmGK]dRE8 2 -D )Q%#Фo%3˗lRu/]4eAof\]3ʚH:K|':5dM-tCRv^I]QdUo/:1hxMGxES˂u9`ۓ@FPԮF`̺0!U Hfю\ʩgkgLK(XO(eLD)KQNuW/|ԏʶb,QE.g 68U'VͥdBV{8 -[C1:E.T,W>z TylLjRcQ5H]\,M@dN(e얻O1j]*@0R 6w!LAidV)( `j/;e$g1 -$ k+f"R8]A9a8{O%H6tT -<($dq7C$ABeӥ?9TaB -+,ԓ,UiXqY4O^ϰN< UXID-SVųHM*5Q^Yՠ*.Ljƛqbԯ-̙ԠqY4IxF\E0t+psR-糱mW> -0#5F}峐nNZ26JgO U,?JU! I6@b7 IDEN9^pHaH.!.'̩qX]°`4щtk^ -ZA>kB$NtCB Wgh)W {׋o}Yđ( N25ZdÒHےl/Q[bQ71%dk|scc 'EUs ȼA̡ј F'72iE"[Y>1j4H\|꒘'{N7SHD%}Ó؏hhSD.gI?M3$t4}#h ;OX%>/6Sī0 bN1ǢÑYqJo cL ZC"]F0 #A}֨5'CDN #.@y]F{*)ɴ$;|撌\=8~Ĕ5<8G.R$m-bEGcCPjCX'Ay]Ft@Y-1"Sp\ݨ2(+ - -I(@mr,q>\FxIR77Rx98NBGBk!wQU3&a7]8 2 suY|. ҺWjC PBіXTQVYL'wa}kE|[ڹ+A%a}L,ݤD #3H _3CAY43%`e/vSh<_|UNٴmyD+2;nScDw"<0ObCbcuEo?sd^!]dE,^0J6dz>%<Q&R[XLa0Q*|l>*GS*N`JDb_(սd NU0>$hQ=*ĥNQG˪HG)ghBK١ <`eC"/S @@G`"8f@DAZykPb LI -rmf6T4K>~f2\ȃJma>FUN܀<|o0rĆp&ePyeie. USAă\r|P,ʨ4 DAhJBv̮hKtepڸͳBv5 =^=HʸN>u6EScB+:ϺĻM"J],"v>Ocnu[:'[dI -Z"Vr{\,|$mv*9GZkh9պF63mL! -N?Mէ63 rTmU2mĥaʽQtD)#jtjP)JDeD^)Ѯ2̾U5Ȫ@W|X19] EG]p`4%[؉n0<}# /DR(,yT6.p{m'ҹ愔K{&I e{eJz~.crpZ*7 B/YR { @ p EhH!Y4j.@"Vn%*9jfW#7ANKƟF$ );AO @<+Kp.D)5 Y"2jA`2nTƪ#NEpLŮ%w-`T:?q=L6,zkmM6z[cMcF[@om}c|m͞6z} 5wykS c|#M5pڼykmMcؼYF[4ɅhwЧ,  U}<1zM񌳶4ŏe]JxzTJ!qb[V;$1h[cJuEl{ϡy -Qj݃#@@&OLQȻ|Ed!v]DW7fx..,#tBSGɒ x-yg1!f٥xwl?[9 PvS8At`FjV7zԓ#IГe e#r%̫esT> -݀FUpY+>. -SP(00JXhJVX2y^UqfA5q(/h< fK4?W¬iYh?Uؤ p;+ruUglҝG&OdkY1 C&&PoY fS"M.2Q;ibަƇͼtԨWXH2G9AUE;`Θz=Dmp̶Uܜeж4E}:٨R稱cyFK~ w@_ӹj,*mF/UN"#mUB#iYB@屚稸dU[styEj^i;ٯvhyԋ=ml -3-WFY6챚fYuzQY?4V EePOu[k7֪L8;!n'Dh%P5ݼD, -(v\ -jt9>9(ZInŕDS^, N_++{)KhǜD9F" `F6&f%k6@DKvK|heC2:x^\'zRȌDE-DKpGϢ/j>JDhɸ,hn$l_E;1%I2>\(.doNW/ɚe3ATǪapɅ(=ݮs/$0- -?qaXmuH 7`{Ǿ}=h6Ymf[NbBֻm:{5-Y]ht&>mu\# 0b12^Kr}&9?QBt d]G,#ErmcKL#o0'~0Qd݃YRƞ@(vG EDo.F3w4@}4K=JFK6xf lUf'"mMFK ZHj %é؁a}Tfl9 r^fnU7;e0CZZYHhpuS1 -@%lrS {q|"cCI)FHE2Ez(fuʈaIx@P]DYdS>ԬaD# ŊO"z/̱200b* vQ'Ey ?AP⩛Q0?r6'.Fxcp,tcSQBcGa[|Peuґh Y.p\%b[XL;ƒ[r.iM=zd5͎ -%OqZ.d^M`5!cha԰1(kAD#b -ZP5ȹz9mk|qs#HM!I5HFnɛYڑjo4 lU-G%6F*O a Du+* ->Da4dA<:A4Eб"ø7OfլXmֹV)qQSևײ88GrVAn,rbB~riBD fBzU&dQDiYLf:la -ϫq%uЫM8$c -a%8Z,r$)D V5s#EQ{Tӓ"704OUx=qj'|ѭj^PR(O-EHۿOG3<|T (j`) -P?**\%'R9S2]#SX `3_تЎtX>O,W by6/J؅h<"6^d:yc5ݭi(5>k-\1}ܢ.)0kȯ*v#KU[~}o5 P EV)߇ `x'[FQCt)J>&ĤV?kz-\| +.ƈ'iO ->\>,@1}( DB/T*bCL!ȇvZT߆ -FpGfj(&|@߅TM)fŲuǂS ]#dd,"'Ɨ&Nrp2'_MKZdj'uZ25$c8Ub6uDw7HƧ$a`U%\/ra^SĺJܗ^Pjiv496vHZeQǖ3h!%L$9QŖ"W=|i- -uR"I=$q2dd$ƻnWHz >LL BoJ} ҔՔ K@nUԡЇ;54-qB́G*'+WU7\݂nAἺߠ\BhL3E[d[|5}+n.e\j Щʹ ,J=z)ΥGdqV99sW)e>ҫd N"]&A{@5qFomHO**o[^bx0 !RE˘Ywheu#s& D,)_H? Yk.F ->!&gRRͮhv{!TëG{N)me]\TӅjGhܶS"S2]},$Kxk/,FQ譀"\1ݨb{7$;!*)CػQAGm$Mͥ\diO9Z[ftNJGm>G6:' -6 -hē\ @s[]hnk<T4Rکf ʲQqx3eJb|X]f4K i~9Xh6$B>dsBn_UtAޤU*C:OSFpu%J~c1QQz=FEolYQ`\;ZS;43P'*JOށŘ3w&%z0D)U٣^|.*ԢmkTx#n{荁K>_Hɕ},VzoT=zB~($Ue#^# .ԸnORp]Ӹ}!3< $2KR xX2{¾P)Et +6r)(-&.l`*%r ~!OpK 95<@CjOiR7 Q TlJLQҚhL{|[qʡJ -CX!Ϯ8ӫn>OK^Ư?{RvihI?YIyM4=O D ?Ka]-p 8O6^,뫓>-*i}~/掜}߸ZXO/l]m*IN 2beT4QX9;o'jl`#r:*ƅ@Ch>S:DjBWOa3 -& -\tS84 t)@Y*4J-G#K͌#eȧ(=[ڧŵA4 RBɤNYI~;w|Vǭ -34WJ&/[ZUU&#d09 +qtz Q$ *@MN~HnzWUw ^[!^˞m*ń#j85}Ul,F'ֽ"$Ag;2K3J ͖J/EֶX=9* We~!F6ħ6BdTrndlI0ivԓg_I!{TKX+m6ɼ$.ɰ c͠&J CCX|GdW&:`2I&%3 OAv 1ɔ^|2Eϙ&X -#Rҵ -'$I>ƞ$xMu핣y{ -Dc$Ny:W9xkm#%ЙÊm!c-R!<; N"g X]ĵ(d7#ME ;QU 1w-nh/\|t9* E?g\]R\`e\ͬ:6u*g+Xφ!P0yo Uk9]e܅YJYL{rE囈& R&X@];@~.6F "g]1:P:&<:6"E/<31Bsf ?8uмyf=9^|<#L\,p*(`/>|s$vk`Qӓ-=ʿh-n$q4;Xҁ"HV`>dNGhovpn0{1XEOӰD,O!0n;$SK%TmT -Bl6mz剋 ;͏]FL58PKfv%oc{ :e].b>''S6%I8Bʃa-Y#vژE4{˪,1Ѯav+IW8U`[M(LH?خ)NJK_^ ӆNDc -XF<`:7)m * -PwJ'2X*A*k`q?cVd8.ńi-8'ZP2+ͫvSS "L.f:+-'cPjj!_mQP[ 0)b8}!bRe sf'VdZEBs\dA] HfCJ0xqɝv5U:*EP5`FP}B.4 RQm - M{z9cr`zh.$8aMɞzW/jbk΍[ِ19z|JsTVcs)Zޱ&'t8d}~:U2Be6duEPDep R.$j" rã7j9uz *QT85]uG7AGr+!>3g4@8xisj$e[~8E>^"Pp@| *q>9bkI9T& 1#U[i}dQ g6`e3VLU ʱ~'" h TB&ς -Q3S* Y of爆5hp՗;0Ȯq~eA7v"R;ЦIou_^k$AwaYrXdmݣ`s2fQ2+qAAޔE};r -D/-yl1àtW#@wل듫L3%O -F^O#JP`4͜cz#dOp3$] -AXr˦,쳬9Q'qv5al}\IAuBeRқtAzx T:xWuJ Nr0*t;" -#Q:c4; |pw,*9kl}*gKϝ7T2 . -wS7;އ/Xהt;Ft U.t]= EFw۝bO8:Vĵ{WWҸyA~|k]'樏Kp)EJL)'d\JAbs(]KAMS _^R>nϿBČ -lH'ngae2& - XL > -@]4X){j5y->Z,R'J%+^uэDVHBp۴80t'2H^L:b/JW[-qM( -a#&]HЍ -v@B -#Gg26S(r -va -\.g73'c} 3jupK!+#6 gMB PANԈ"'/ Vik]:;c V4]B|_}'OpnWA[Xqc<7{:Y+`CsF4j}БyRY"$p }PzNU((᚝#tu@-d`M -D5tBր/tc -nJ%~xH @UT;eT`"u %f`F8@#;&R0nz -nT@돽|ap;swp -T:%Vvm~R g@|kbS&~p[Ëjj<{#VI!?f]ƭ1521LشO9^$P R\›ik2 -zŁC_fz A\*p#DLGUxqрT/h?}r4 #@:|=cc~$>3]7A{[ss1Ya?98TbǸk$L 5E#+f8#'ۛziɃcޙjA;NG$9Z'%vרF+1Jl4哲݃n^8 >z5w:yW,>g5:1/-.b9ˈ"HÊEI34Pyϛ&MO1/C i$E[<\e҃4a# .˶rL[A`oy^'@v1P6exq~?uX/Vgnj[%MnW K?6HdBa㷺teG4 B+f?{~]GHWšH娤́BnѿOYZ-1 ՁI&OX؄ZjzUw S6}O:aL"\)dʲҺ\Dܶ^?o(z`-N65:Yb8?\1]a%H4F2!:fݛwJ*צAGV 10jNHleuM):;Dfс8˸*#y t4=s|FY -O7G(@8ec:.C:v7Gs|?8\~4Y*qjN4鼪 fP !SYn xniZ/Rp\kf{=vh:Z|aa#GKq{ux#h>?FQ_7lhيF1vsyߏK:<vDc '3y11I1 aDj)7/p3B?/fa?{ @Hhe&¯ہ8tNJ2?qs*iEPrH߉BjR#*FFy)2Z o YkS -~jՅA:dfе|C - ::2BC/N-Jh8Ÿ.KPU"3nsІŭ>SL'ϝB}j x2߶:oy1"bݰƕ91b9_͝:tI YWhBΏKm͏J@UְGS691>AGc36[;rS^ü]ɥwLSt$ BWBygs6  / 7Dxn9Vɠy?y̜z/€i4kPGH.~iGC PBe4ԁܥJ r񫽗ӑ|P#1M*"RHAJ(b+@QHS5OCܜ сBP[IQySu`S?$9!IQCa/8+C<^D%o,G Gf_v ?jǚ>yv[p7ҿN-c/@Em±u%R8F1P7,*eH9QEѫrAw-cɆedP)F: 'S -}j0TW ؘ⨍"SA -UWKSpKuӍ' v r:#)0{bb@Uf8P;:^,?{^WaJ 0# sPa:O(Fq1#ݤH jO+ZjdujKDr r^NR9VO?{& - @a~'2bc &wiO†΋/mn4}5jN= x]"D'} -W{ڍ^qwب*Ve*@^28y̦.ZU%P^(KiJָ6UA6NSwȳ&lD4mW|%9wJ2.>Q\19:es/@gI%$7$Yje\bYZMƪ-qP/6|KK:,L4_ʔO&,4 m!Q\D9/H+.*$!XkA -[5LL=&tΫnL+S݀G|NOՓ9NC>'՝O.CT~c*O 4۪`9  ᴆ$.p~C$^mYw=lG7Nf)BxQ$}_ -s ]𱄶/Q(vrb"դI%(%E Hd=O0BBqF~c]} (KshӒl?թlNаT뼙UujoXޤaU"IT.WrRE"=B]>\(4KBM=OvgG*<ct-x I1UFAQPs';ܳ dڿ]Z&{e -MmQՇD@ =pK $cpdxުqÐbI ^CހraKھ7/d/4e9/K M $ JѶeF`HiVLiVk& uv=sևГU:}Gd@HX @Ӭ 6)蓃 -,*O:.c-˪k(+_̛ͯ(q` 6O^]Ne g^uQvO raQJb}+~p -̃\ ^AV; CM~KQGҍ̈́g'R⻋jTfEueW=,GeSIIJNCk?q|&,[uղN(,QoTP2m!QAz(hC0Q+pyH^RJ&ix$". kha*NHssl'7$zRcQZJwUUx,a :']sFE50-$ ˂j6l<إ7liX= -U!m+G9!f̼j$Aq9RwLMwF>&"hQgn 5YנHlR@iN'e\reav1#:X.U;XoTkb :%H$ྦྷ o![{_W -K} ?2#^LNM":9A5+bʺ'd!֪rOA1C#)*7,d>:&k]1)&HGBD; +;tC/Yo[C}Nf-~g{^GDXjQœˠ.'kH$ -i_`KG,x:p0e(MH eeIwkCe8J*<*]IB%4ufd -Q6 Œ_,Y$ˬcp$G( MxkOgK`m70 0N#WWݫ - AW̘}%cüaǥ))+Q$$`޹OEv(R0qL d+RэGRzyJqQP^EԻ#e`LKNJ$"NK?s -@ -LhBBw"w -K8;=<& LU,t/xʎ+j MPewGYmP,IzZYG62S^+s<1]bRSw;m LCtAnMc% Ēr_/JB&uATrvUG;)Ba_ -y_瘠O>7za0EdIR1BsNXaܜҾ{rn\_ -ۣ>*p ]?D*^;E*d@&̓QCKâCjcUFp g\YXu&ȺdBގG80 ̀ov.Dqv4HQ:"9h -Ua\n&OuY>"wLCr_Ol>W^rրlL֑-4?.DҼH@ >SX 9yZFR%4[ H9Ro&P0-\MrT24i ݠaQ0^Y2X/BC!G \:IV42"$Am\pa/f`ڧ |O"䚳.uHK8AXGDbbJAmR04φ/`)^7L;u|]sw-H%-Q7"<#ss\"| %m5>JL$e,4@='I&_H";@EСR% K0,t96(С섇0N'K,r\Tlb%VN[Mk蒦I$뱝pE+RSDfs*TI~QX,sc))4\src'ͳŧ -TqI$P@I+XNfi䩆s P0+n҂>:FjH-ut3HWj )`ЯqO&$AT.N03Bk5]ŴH:ۣVN/3/8zv>gȚm.QUg( 16[b0V4pwA.tJ@^T7m 9 A]x<ݻP>ȓm7žn. .G2(-VVLO\n-ɇ{\'H>#) -Wq3?K`*JRS!GRҊ-{f1p Dy!i|FS -#>t#GVRLP2>f[[.k%"_ΐ#8w'kXQJGjf8U+Jeb$bLˎ%_@)r.{Ծ;bń{ pMA9P6 pG=!d2ς+%8bCt*,:jǚ-m!ފi f'ؤb=M>s{sqMŬ`Gq4 Aʎa ޹z>L$;&or/刁H+dgza&#&T`9ƒ1)Y:ԐvJUِRt>o֠ EMȎo F" Hd/:$FsCX9fIM̛7Yf14>2{CBAYӴ+d$#pOVm{׳uB,X]BܥYiS % Jg@ W:EE2m߻x*R,aˮX-O~+3fnUUa;1ȉ*W9<SR*2cA "P@fipyT^2[&5s}wT#[g6BR!٤ڔpslo,oC04w'{|O\aPx%O"*q8@kȃgnol7`!d$;igpj[DF@d{ '̭S͜VD2Ë.&G[,q!CIi;;pa>_O >Alphz%ԣ߬D<%JPH,gHY V >EY< -39 N7W -vFpRxnj #y֥XF"";^ҫ)<ƧûI3r,)IУLCd" ;A dЛ9*^jqq]ijyyۨ"~(%0ֈ0I!ǘ3W3IA"XkZE->GP`YK@0+m5-\3d*n+?VJ\Js+4!yC E#"#D1[B3-Qމo?'(\꿵qc%j2[P.&}^j]ij86܌PŶH͏C<ޫd]УFz]ZC lz'xx0t[ BcAKl 4sD+ଡ଼qVnXeUF% 5(u! xb¹ : gRrJŘaQ HD8Ksx<1 -kgեHz.aWWcƃ f_HW̮:O*BPF@2_&[z=3'@vEgQH?$&[xw{"glQOM㔍Ivq)o)ڙ+a,Ni[pA7c'%$m< zeh"lܣx:b/B<4%N8W f]B!n) #K&iUڃ5ŭ;nYyQ6e-]&WS|v݁SfT﹛wE*r]x^#1X_d|.Y6 xj0>lY! TC&)B;m9XсH?QZ hgPI^Mt`0׫r;2nj?l5(&LRAȜvY !pPҦ6= fi _/̪ВB 4eOf P ȰddK+ .r6o aDrrJ(I1g{VPj@ӱx_ْtf< 9TA@؞DDE>`ERo/c9Бl03*V j!:Ϻ.31M˚wT: 5Se`Qn Wڎc9GφjE81Ka3Ot6@!s[S!S(QѳL6p8vsHqDzYp AxH g(VsYLÙ<*S n-!ZY/K[rHx>pPסؖ"V,ː)XH9}3 19qCs8>u#>D3ҥY]ϟ]Q9H(kڏہ@L mb\Z6–ApC=[=)|'x Rh 0s F6`$M1< PHZ<@Xn: ϔ$xFU@E'w D3 ҥmYkh\z6*%v0ƥẘ(z~gbjm5Si͎`0d^72pyOXbE;=7@3rdڥhƦŴ(hm=fO809@iYkS mzrD5' 1a^];x_??'G''~s~pruq'?ʏ8ϫ>.ϯ__=ξ| 곳ۯ>''(xO>9ާO}r..ngn/n7WgWg/N>ƻ胓:t6~5-s}lw: ,Rjֱ.u7~4kejN{w=x< 7;ߙyr?mq}r-ɯ/ᬊBmDn2.0_巌4ΡoIG!|tLwƯ |v -ow 5\X3-R-Iƫ{OȪl/?xG|uWͰy+D.>67盋GO/_bmm=:G?}۳//}r\O~_|znzD\~~qE <'*G)?N体AN'G>0zOG['2//-& -_f>Q2rY;2.ٷo_&x`ηGd;Nի}sqgkO<ӓw?O/^~u{sv{}˗aCs܅_߾΍og'䓋o.^D~gп6/ }/o+ERZB>{v ~ؗz'/_Nc7\㫯#n^|?g_>vio~o.n^9Lx󯾾}uܟ_}mxqOOr >/^F?6 ;yϰo< o}OУ~OɮJ -sy\ (OLRt%Q7DA9 䦜ZG_WyUvcrmHZ]W),A$Y}G_WA}]GWUytU]C{*ˣ<*ʣ誼cZWyU}G_WY}G_WyU}}Ǵʣ<*ʣ]]GWUytU]Ǵ;ӟΏݏTȕ&tbm')eAm\~<OwO W\'4m['[O{<޹CXx-cğpWbJ Ϸm_ת͡%/|.z?5nf6['56V8xn=mȹzjǬ<@y0쫋O7Kb\`;첯a1Oodzء_<|bءFT]:߇8X+ؾ=x̿7BKGo ͍;=U'FϾѶONLMs*A^#y|۷yo3omm7^{oyc :f׵ԄsKEL%9V~f*h94IϢ:dK-hho.^ #N]4&3nZet}o..Dmv5-˃YťyՓ?EOZ"7\-mqR`ӱ?uIp>y:`i\S=87t쫋37לʛr-R8]S|389j֓̚l/~o~=?dkcUj]z{s S_ۄ^W:o_zgn*Za1;̎k񹭕yՌS; -X7c[rq Vmeަgϗo~MŶW瓃)Y[xzO3n-[zksn< wXh~7N6d.meiǁ9kS~׃~Vkovܽl(.,Krs{"lXE\u;٩Z䔶8Mmrqe泗)M8iabr?z\ wW:%{jE,Qlר壍u$Ixmڶߓ9aa't?Mjq26X;`&h~2A:|OsZ{^D aq=6nvL!ǒ3qh恣nWmm04u/i&W[;XUQO -ˀ{Xx7hr}0E'Q9vy|烧EԲ|e~x`hkmi%ofAkVo4T9ǡU*n1>fe4=?L&H=|MnNv/|/}XJjK=j,v^SrJZ>e.Ҵ.Gu a$΄#>y3,NL;l]+Ð37U-Γiഭ:_q=OX Ƶ0M ~FyOkKci7+G=б܆&+౭Ͳ :eR:)EVVm<2vގ_tIܶ[ˬj5 ۫gIr zZ'_f4.ϻc g/޼!9fCnݾξxGzۿ vߟ{ut[^ߧC~Jmׯ_"W8зsy[ӑ<,y\ćP [l.G[$mkof>.ΎyQPnNɎO<$e]ʧz4ŶWw,b=ā3Ju{<{*8~Pc{`9O_B$'omkϷ|mz\´|~L51~zv{wVـbؓۓ={~{>Bu~qqӳ^}uq/ z“o^}ftngn|j=99r%Bc[?ޜ]zyvsqun?;y5~2폽<{yqrkGBFOnTAG~v۝R}׷/_ߞ|~jsū~_\;xQϷO^`oﯮ +]g__yhunmUz :$cmws0il{wF}8\Χ. fg3O>~}{=& 6s ūl72_?o_okg:͛=޶_g7Y*N뫋wob?w﹬: -? ssNΜ߳mp6f; --ZV,CRGdM_r߼X$=c]OP@y:C1{zOi4 U=C2=p7_rgoEa5y<^׿lD6y$ʵKF<rE=m3l3K׳aWNm!ﰚ&x[o  cD,t>?@uMoXoH?+iR`gdm r体bݹ_+ôNDv^٬K"9ZOWaΩ`5-a8LZ_׬.o[5ݼ{jVo+0P|ړ8`|_4wпV-Ƙ-k0R4YL$o= Ǒۜpw"d 'Bi,-6U 5ρ[3?=g)OQ}WI -Sm,˔v;dD \)ء=ĐUM%šDS”AdӀA=۟hx9!HKxE"nDDOM5Y?*zOe}_˯<.wa0.s b Lw { kh9aVNC0nhꦪOs>dj6l⢓@I1n'g݋kHlx?МDS:PlxO_4f7(tGm! E+Ӄ\zopd uѩ.%mR%>'!t8 YV,/|v;D6m)a6r6SU~Bl4YCIE P㴱d.l2ϋRKn(,w%|Kn~^Z_Gm{@C3{-9Z?z--zؑb ۝ |/Br ,Wό(M.)n6_@2'n|J޾$eoVüsWEڰO3ӁqD]_}?EëAtdvzw,ltmV o Oa+{p/t{6=, *[ҢWo,n\R2,F>nB/%dz8%'x!/i͘/"hi f&@_:cKoF,}t3#^5ajʤ/du@yvVDFo8j~<΄QfG; A[0𾄗BO^ 1? ouĀ+3E@uyY==|nn sl}wx:qbsUA[ĆH;cb9._@sӂJW\k˨aY#ԭOˮMǼd}zvSg|Ѫ! uz!6݉E|9;ܻMޟpfۊ}-w,mdkm6_y7>\r`E2j=zMrlaٱX>N)9<Zr.=6B䪾Q'KW,:}$ o6Q= ߄bh AL1/AIJ+=G슫.ĩ -dA1@Dvċ;R t -f n$p#D"{t*HBuV"xAR豇G5ޢHnBXEJ*W)Ok;UG9:Hg\,Vݍ 29 ȤQ"Q,f:FeI!;_$r6O`Kɑ&Wcy*P5V]-r-_nw2Te䞂Z#=yH=#I_Ur|uV륊,W彼~mm Ӌna';yO:ScɓXK! -#RhדXuV{ۣpzg7(#BtX!ST\knRP~FE[U+ޣ\1.ϊSز?2TW*TʋRm/Ar?J'R)Ie){ԨNep *&_R37JʱOU.ٕr3ܔk҄"[PV!oj-գ*,*\Wh>ԢU):nNgBz\Lw*d6vԛYYeR*[WJ֮*jd*jdUf AUƖC;=4nFmS4jR;}_ԱN3u -kQ=ꅫWrSƴ4.˧ xoi@*jjJ덧Ylcʵ|5%; iÃIܗ-bHl:'Oz9ef)tޤZD(miC-JGڟt;sF{2BV̪1e5% Yy\_sϹ m1ܪkH#ZZ}mHA^ -QW[h:.ÊrMx–` -`KELHpD.YYF9|oƖr.p̫ -M1I|B>7>w'y~1Gh~TMW+}ȥCٌlM-, /0ۻp7^752G|NI2 >oآtC[Vݱ1RaSk |:A =?hըؓ}hg7觉C UX0W87]Q1Ok}b[}^+G 3mѰxn\UuFp]F?O;}072f>\lnݴ[fu<k5Fa&f=رVR A2411G+GU7 y LΠ.)gN'rb!(`x7 m1K5 '"UegRrfAķɴrtxFuWJ'1t|6X?:BA\2:X n G8\YqɵONzeA,Śl&'$c[WIv]s@j 2b^ĒtPDƲ5Ͻ嶛Ok(xz?}9,[i=*ܧځ\B냙Ntr9]:Q3P\Z4j1[24$zrC\71ݜTwَIs@<c+S@Jgf=0i~>ܪb"+b@`I9'aJ4yHj b vK]v)[gphlRòٔ$Z)R'4XLpt{k#ҘݥTb Q -Q&lZ nMs)X'ՠْ ~A'^$9[> o?o1T4`N#V ;l26?`T1ب@!@|lGC7D؛%`,N -&9`)HO $]CJ|(!V$ç04)ưs$KA*Ac,DVDpAr&|M AbTWspYsc9)!L|zDJ%y1$l{f.ϒc$V)rc|DT a3M|lavoSRj^u֬h%CJDyCxLJ'HJjFD 9}H$X!1!$D+.U[$>j@)2 쑣0FvG֍ M!Sb&2ך.ⷔ}JUQ0'VKb |*Xpͬ^dc.50]Ao}~{@zACN  L,jtb"&vcA)E +` G<Ŝ9(w3fz|4 yd۽xf%y.2qw2@NM;K+TkPm֕u -- -a.F#^bjn9dȱr#&[B`%1M 5+GC1ztb}?ےЦ_Ԅe80Jm1-<i3|rNԔ~~J,=+_Fm4oݎwisg5gRfY1|guC!;+SgnWK p5`Lk`rGaRpcI:M([ -5{ í²`6.:o =w^ڽqԡVf$/Wxltq+rѶX6?a@NOa@E/@|.ߤ6| T -^F<؛q.bjWϐLv<^bR6SHˬ&5% b.d(_u'N$ #:< -G{uTQ@% omi- -:=Q=ۚ tu>=yC?o[᎟D׿M(*6n6RH{d0˸_r }yRj(iٲ?4ߤ+$ދ/s#e|YE*maq">=^٩6Va:AjD)e[1R50_sK:4/o5EOҤS &9k󽹩D.,tiic֋a=^kꥐ!^"Wn޴2_;[I?ny\,tyi2n"LƁ} D?bq8%șh&o(0&1̵`sT^5 (̊URy(^^<_mE 2\\40<"+c|? M$,?`![5 n9^ D?= aɪ*) . -v/LjKQh0$=aA__=?dǷu}6m2xksn}͠ R.%8.pK|zD☠5FUq1>'PťP5;knR\s>iIWvW1-%Zwc䒏d5q -&$t@n/WpO~TdԈdU z31d/YcsևŬ?xP~|"5-MJQ z< -SAhVWd(m"<-L"| Cb5z'ז%:m(dyWǵqE%5e -Nc!z/!O*4ᇎ8:kx0ZuQbWq\B%dJmK 1-fGݳ6lFe" ܷ\%1pb^|$|Mnmw{e~IKLHBNrH]lG@4=#-Y{ͧPEm:WGےr|qe#H] - - m,PYI$f5^ɔ&H^['TH/N9mNB9VB:$ ?Ҥ%km 0)mCpțmpNsT^Um\3@R73| u}/؁e5}E~΅ -/Ҫ_mDW>P;FKu"ϦnGEPHhB c&-14gGx$Gz 6IUݢbHpPy43\ic_F~9H0m VvݒX2H=@w܆!tbr6uHb4g|Q@#X Zt3 -RHa=gHoՒHcmk- K=,Y*XI.iؚ5r(ƮųRB<Ǔ=Dj =Ý34MW6q+?G5e?CSjp >mӕ%5Bo'D/x́Jgtx~PyԦLM餸O9s)$%'JB}rޭwta(Vd{礔&:1#O{/x:p+\KǔM+O(ࡁ۠KeSi)NjNmJtOd.4c ˭"6'ԙ$}?穾VqS*SQR;.C-3u7NɟO;.KٮJacO 1pĀUO)#(|J.r:X2R9,{,6Xg~lD\-| -:8$Oo#%nrJ%<.sH RD~{9VD -UT錉Q>22T*HB1䖅T&W&Z6A/HXXc "8B_ $4DJ4RYhMQc{GO|&BvR n=s t)NDsE%-b`̽(%-Hԩ04!vxZ3BGͬF9P&~ 57!z&\lE>@?B@w .x$qO@̫9>T33+>cK9Q&LŔ@% -7ݝ:NcJ*67aKƉȐMש7KÛn`S-8'HH-}EtPu2T /_԰xefpo_A^BP}wb$Ӫb)ƖhoȎÌ':fA!WAbB1 lUy ˿*c=G܏}2ѴiURmɥNP/a#xi -]]uLo ,N)b\vh$e x#$q@SV%{}ǂ{-dw KǪL6_h`$ky_=.H8K4Wn!ye(5RB:Q -%ybO7`e> !Yo1,@2TP{17_@9$$a^Ñf B!0G (5g52njRuֆW'=IcՔb.a-^< }nN|]-gֳp>PZcAyxpjCq]HHcm҅C>1HPât%} CEIJݷVX5'}YgE(&ܽ$"4IZ"q@φY$r%8KAy)n2>,ɯlLx5&Ӷ##@=fWoDHgLMb$0=|Kr)(L.]Z=gA-wY"}yb0o.AGyS=gq2~C'AG/ "%2̡Ϡcf ~C'ADz>ȡϠce ~C'A'-Vx-:vTn9tO A,>̡Ϡ#9t"f%5gm v%H/NZ}Z<.Mc5:sE:^#%l3o -ĀÒ6>"*sN8"$"r%%98$ɚ(}[GAcA >W -$&OB$8c2Mhۆ%JvO72!z嚾rClV4`#X*Qo9Īr{'v#ooyY}{@,=:{8x0 x{h;VIAK0 a)q^]n7y`Sӄ`M3J!ԩ_</,Ɏ`Ο<>UN3OC`_=;B){ endstream endobj 1390 0 obj <>stream -)~HiIK_QZm#Ss'K-}vKϗ52m`F;%{x|/eJ'{F ^]qT_ZMeS^d!i&=XE-Ob@WȞ+;wy:ʯH{W^$=Iጨ+6=.h qONǕf7#2.Al?D^̯R$~:uRXww8_2% "2ߋϤ$"3Gd7~y<"8Wy"2qi90E,ל,,aaYX>7.XzfItt7[U^ۇfOyu1RGE2_AT1V/ö=Wz&Qsޅ&}'bƙ:|M>)pY'vB{p0j -s#`X憎ǽẅv>[ڱpwp\_Lh[f}YDP3B33ec $>F'^;D8RH \O'Dh0N8 |ޙQFij{:>",C:Ut`1e>J]"8{"/}6`{G J"$f^X6-cW ֨5<ǰ`N/GFU"1 ],Ա|(!TbFX>2x;72Ra72R!i@s\bs}4CFZ#5exrRطS'z -;/_O}ܒM1q T8DD2$'?H㏏Ib!S0OK|Z Ur#<^ERzAPNIE dLf7mK\`d%n D9ʣNn*{tG=V3(q*x)fUFx-TJDW5x-neñN\LIlkGFX-$mlGΜ-ܽ~rXX‘[ߓcaT>ʍױ:Ic_LS:Az߬$WvN %JroEẘ#/!;!8>꾽*6"u|k H&:#>'}$…J-@'M60fZ˦x?̇t|1Azʦ←_dR#MǷOЕ_˦˥cɦ˥xl:Xf~5ϸaN6_.md=wi~Mgogi鸠^U6ml:$'l_Ql:eV;l:/dl;#_ʦ{_MG V6ݫLd 4 [W7 Wz.Rɭ!1b8ղ}.].Ϸ [tիߢM%IHҰgNq$UH.ޘU @*,%FN"eD&bl(l\8]=(l.{T+tq;=WMߔ.$̝P&D:I$'Ӆ>d(ʌhJREHQ6xܑo/ -I<:fpϕ=\O<׎(\0V* -u/OFgD:)'e3 qH/XDq RG0difO/M3PB=]P^| -zy~Li(Lډn̈́J[!uK#+ɮ7V )7Su.;LRbP nbmuX[V@+ "̘K5>R*vZ 3OPzBgW2S7oI<ݾU<( oK^ -U k$So-$OUPΪ%$2ë,ÆDI++jbm*܉e>~ FUJD*}O2pK -wb^a+If?}X;f*9{mȤ:dm%r3Ə`~a7{8OwD@8_[b/rd 当Η }!!u&1IJ:8z -&%=Sc^Ә|IncU4RV$&up5WJb:Ak'h ÐV׸IV1迼[r)jo_qm ^M 3UEV8 |lIór\ - %]pSDNOLK:lZ#&pic e]n]Grj-R - ̭9JssJH] -QFt 7AT&ˊR'J"6&uu U'* ٳ/cWZF:_TDpMj*Vq8CI<6 A*fNJv2B"|JK;:,D@Ee$Lh=UfA˺]d7-$~Yfߢ ?8|W^]q"|}hguU@X_8WvכLZĎuqCvD=>t`9o~R[›%=ճ,x&[9V޷9`t2KŎ_Z.1SՌUf*F)!yϕH*૒/y%_'HfN]c*3o{UEGͮ*Qe.Aw.?z:oQ, {J)2 Pj{H$N(59N@ gJ$ֿd=^@Hgw>|_e -t)_IyT^퓢|=E'E^UdNQ>2gD@( -^>(p}oT|̨KS^ Wn7XOPc wC {oWPYvU?֝]oXu^~xU'XO5##~t/ru]?qz|:q((''GuPxϻOuD^[Oܒ7|o<$.K9I| _of"}]?6owHCϣ$1{cC{J#coMT]c}I{B]?~oU.GZ>}2<GuC̪~B[OxGA'NLuxQO^8u~dS'~Ӄ~Y]?qÐ^?'KiɭdIɊ]O"m7YI'Ɇs&7pDk']?q XRyCGF+O|pZ7*rt%U8+~𞓶;ѻ7OƦ(T4hXŹA]`BZ nAi16ņBPd{tϫ NciձS̒Z-Bz|EPǟR勧iweirL=yeʚΈ3tbM=<]Um^pBٌj؏Vn|@0k#{Wij]tn:]ǡ7Qk kb"myjvuĂDz1 f,Ir>L_tKL˚>h66MW~O{1/,6U2_|$'yYQ]4 UŮDJd4jyIBٌlM!3I'"7Q{"'jQg_:M#g⌤9:k|tMb|" -hJU5/3?UlJf>dۇFM 835V Sۃ?QEwl9'\ښ`ҮtC,F^IIlvOV ;qW'}_B(@u"nMYT:m5W{GZuxg>2|4px`vAA/ND7 T Jkʈ+ u$ ꍯf 8&=(ƇF)J9ш8V,ebfkkB6Ҥ6K꧐& -={v_blQxƣ`# 'LׅmW|d!-4DW=G*(rgZt߄:kuuݬM;mQӌ c$g >5oaCK~F yixr8ֱi@~G.9 >Ŷ]daK0_r.165 %F|Wݧt'>.UML c)l1pA'D;=F[sh(BN,=_,|`4-6>˗񼩫Ga`Y./cz,= "Uwz]+oucJ?ukz3bvDN>B-@Ko \3wLZ`75N ղP`u[Am{ Glє 5N!aQ>QymM4b3n((l^}2zDaڧ:f|azkc{L"Hf钪 ]Sb9yӈHÇqX6J,.o{1(@cQC|~.Dg*4Owr@d^09Sw.0CZaqj|Ћ<23BgW#k G0 .A] d@@\a\PsܭUI \I/̺ %wo\5dsE\5l\z%MTtg{ b tLvcaloN{AƔ 2ԊVZRVC!i:TOnym#[t}6{] &mu"GKd]% -W~WAէ L_Пp@?Jݳ}=Nd]Wo\Ejp - _NTIhm*qO~7@rj$ReBùqH;{mwwxeD#-R^1Rt0i`G!A n|a2 -$M N,OxW}NP~2 "L+@J"0>3/Ї#c3G ֭dژQmlw[>2?D)s +*Ѩ Nл;c\Dqxb~D9̯HQn-{kNg~̯`F^׺1kr`bxc96̯\5\2W%P(+z2;2U̎L]Nmkc?1Qp߰)_sY%I߽ }5FUIeI0`ĊR=H`+'y|#(d$B\9';FC1@ޔyu؆3oҮ.j FޑX N6%1"մ֒Px:2jjux AOL"a1'B Fr^d69 \v$7YTv*[l -ão>2ؐp?AD|ȢG *:-PiGi13=s-Ƿ8$yϰH.}SEHs>mCBM'|pP&Uv!37gB -=~t @mسNN h(td(J& TEc|~OV&X=#y'7`RM$W -w 0Lb7M`kRğm- --H~{,8c -gflk*ƭn8:KB64؆SJUf._4w8Y@`q:~Xa+{"vtf3hyup`6 uG_˖m&3S]Ϯ>f^؝") C`W - &c ՗y7|RaCzkܩZF0g|Nz14P2eQ0ȵ2 Op]5rRzBp3;w<~'?Zy\{~yZ͓*N} v}|xéo~;^+Gpu.|zpm wK5n\7jOowy O*c;'^ډkϟMA@r?gNhx -/^*|ߟݿt!o} _K?\L+{?p6~f;7|xr^_tÝ­8pҲW>`,@lKߺBۯݹ%N!'2OOy04tWXݯ? nf;g߿{`;/ pm^p\(W>v Lwi~~cg}r+ԥKUv蒙' BO;! -=k دt! -؛wCOs!ߛ{M̂M oμ[߽w7wݽy[>/;[wn^ۜ9+W{7omՑ?+%W13bc.|_}jf0TNyI, nx-^% +.f!7ɗ_p/տ_˕>q;_\§={9&_ ԿW׃ӺovЭd NׄO}Aڙ^=[}WUWO/>o;_~^nI|ް0"{'S>S~没K___;ɸ|lelfއc_YIkJ[|e?׻S,krfm_û~Q?VW!_P =G?]{A~}יV+c -HaƘ&]Y=s{W&'{S{ԋ?C~>Tf™oᵯt7+:9Nv'4]]GOO|anϗzև ~8 ? etsšH+ -^_Mc~q׾eWϿ/W7=Ë׿?ޅ.w'ᇡHߍ_O/}䪃o40?hg}vQ>ktCW}{u)g.\/ɗ&0p'N_T_ -yo|G_ї.*i,JŃGT>-7?W0aXq 9FɃgsΝ~Z8ؘ7=D}W/+K'['}RWNw>ǕgqM៯a5^]wt? _x_>>pgĽC7Qd - ٻ)ܩ;і\>wx8S`z*gKmC|헏}ezk/Pg;7^}ލE?usyZ-rv;첸\gyeܭ3[l|aƻ6L }`u+|u_y'm׼O[Yƻ-.uVe ;տ%˯'zlp*ם~-GSqٶ+q| la;bbz!笙rH$|`}ƌ{w1'sKm!@·~\;i!ѽ'x->y!uy&*Z>!u-x _IvSlOFaEpfn͇펈 #_rV_uO}ωO~.?Xp?+*O&X.PTa0#)PTa(x0wUx_]=$d=q~/.\g@>>db],d]$pēBO_K BY'/%م2cO_x(ev -?~@Y:_^dE7~/Y%>Y_/v[GʣhVK᥷^毒~}pզk0 2oURF-p #Ss!MNŸO=~_דwOm^{?qeߗ7W}W׿ǪyS>Zxܣcĝ~Ƴh|ބr_~u}\]h%Ni?|K둫|X9jޜNqߞXC|Í>\*Jͮ|\ĉWB,~{76"|KE_. c9=T)pϜ  s܍ ^wܾ 1q'Y~$:(sii>CҀ(N^$r_ < U|yر()D_{cϧܼi/W=7OCx<^ǔZ]aT/yy u>mx/?]W*l`b`0!yݜm!'[sH6g[BPn|wݛW~341O=141O?1{GzlNl`,}"#$H6{(6?d~ꝏ$0G -zS5}D E؜:QG?ٛeNPrS(dH : >VQ CU^pkA;./Vcur-QS=eS i{/3 ,G蘄}(b95B B^;5Jrk(nf";Rg~eO{[MGjI-V#Û;hLxQs(G|  \RqMՒ=bkL[Ӱ"/EƜJS6Fq/ #_3?c- NR#D0b@V}(ZluǸCU'+)Te$;jEvȓfu۸0UhbF]K+=4sY${ǒU -4ǎ' ѱ~1 CtlIR@T9oǐyx*zOᚨJߔ*Mt@ ė@po7!dIqdBu X!KȊ@J6;]k_<-<<bFl^vg)VҨKfQec $O[Rӹ˭-GaaBr|ɘ(6"0r0Ɉ$@<GKJ#*=Vyuq.󋝗[7wXsO嫦2O]#D@ohk ]k1yV"~:piѩ m[,(2Xl/KÙ\-Ў>W/j0ٺD"1Tf֟Hdk`:*7…cbc΋U0AXݦ@yMN"Q4W -h@jUU jkD)bHH^y01,_ٌT" $suNeZ!rB1F 꺋fyv3'uʍ“jCû3qY5x. P9^әYl9"pCqHP2ȂX%cYP1TZPMmZq_BŽs6bdh>hx"I~Z 5AL-WG*Ngt5c,h SK4V):}qc\N7fs8 . 9iS08k9(-JV3~ ˋez O](rc MW'AD@8"qm3ME$&əB"p$Gpրr*l5NΆcr@+PL R aJjnw?E'w,D̉޴ٚed+cՋ$ xZX"?SN HpZ- NU@'8>4ӧ+.3!*IN*\ׇ_az1g`-Х'B|yrN"abwXŴv_rUTCZ8);$\(tLr۔]&@oتSނsܰy\~B[/j0}J灟.SQ`@)*EUt$١[ɯbOoЫ3LYEP]gG =kNB_BغٚedWcՋ$'2ڃ @]&=]@5*vX@\ -slb|+L.Ūao5$J4Wvi&GlzQEԵa`jE+$߄P3̕K] *w^[iآfNXS(G`$TxxQ&}R|2FkDU@ x:;SzjTi?b@g*Js^)VӋ*@i2WnzyZkmpY1fI7f4Kփl:9 *q<hf;ed#b$LC.f8-)]VM˩|3!e{ ihrLpZbrPF1 J:؝CO 3Z~)}H4x0ࠨSJVhˁ@hXI\,yw ;4,0% Isi#6ߘ?)Ïi*cJ?XNJV6UI濩)09/tC0Y;t{"fBG!9GT4e !\OGXp|yN(N|&,.kF{NY%r@ob}@FMZ3AJ`D\Eu6M -RFB@N'$(0H NQ=Srp04Keb P%G$jIиjmcիF݃ n,wFP%MŹYD(`Q^ +=H5RrR?pQ#fDxB"F֐# H1/(PVj 6F3 1>lkIV1Gq4`4=^ zmw"<_ԹU1GZ<ZS\21Ψ>s2jAP$HJzepq($eT䴬ivgrKbt!o1f.sɰֻ&k'c$M`} UʇcpV -ALR&,NGW>{cJ)oNb!c$cl$JaɔI:-]!G EO_$ z 2aI& M" -ebA'R U' BCKHИ]8;p޺)) ~3t4c^'Jl'y5T@V NKJ6ծ(`u|A9R&m gF QL"!DYr fg9$Gbź1&kW -$Q]o~1"Lp=~ -sQf|(t(G@q $H>(h~<~-ܑ&!F7sfT0^aO,XVGYw?(TLA`m]05XNw`(yI#3X;#3'Nc =YAF&LuY=һMF5\D2IܡOȧr* zEFހd8aj#{NHĤyyԜaԊ4(s?C |s5\}vY?1;JԷ'UZr VV'ӣ#Ǣ_!ZD$G@VOp^9]8d;HBkW͑b'4/)+4&b\*-yT+bBTz|37fHk iI5u騑=[VJbE^<0٫$K{ƶChPE!c9Wrm -2Ŵ6 -'P#t 5"GH2 Fb2#J [<U#]yމ -mN̓ғ"R3Fne6ǵMfID'.T60wI -?x>^YVRꘀ0.~1FkQw(;~Nέ֓ 0jvd0P?Xux֨?u0RY??B-YrUM$XC]jbվѥ̕Y]o}x7ܾ;~}V%ςNV iߞݑ7Uyv:kӝ該;"8dh ]ݤ-CjTj~CV{(\:[?ɳ7}qk/l.~孃P Ld5ׯ}݃[/xx ߹^{S9s޽;oܺ}٬g޿uCxw~[??#io|`+wo?}w|>{_~g7t\ٜ7s} 9twxpܺy/~=Xa˵wT6Uog杽x?'ݓ@I`2lOV7r)Ðw涌Ͳfڠ:G̋yf815J;GB\93; nj2F\ U_ޱf -i9(d!g1*2$9a7׍=.Yk^EOqIW'  #8}bE&2Ɋ<1bg\]eO.?bu}5݄ꪎ/-߇*}7A%9M)}:QYF~_-`N'*;_1(hx(5bx/'~G;bPj$^R%B ?xz[j:2x/TΛNԼ~Wqed9<ռ|;9ЉhY?bi:ra2aHA]e_ (]ZBSvYͭ} 8gCfh%11w=eҶjx]333άQ5HhCa+lOjqD!&Y^+2GuaHs|g2_N 9` -`6_ۋݍSN#vbeHŨqI|&0R!r3YCg&*QT<Iwj,eJN/(qSaQ#1Ɋ8jnYCo9zNB>.dkd: Ol -W<} MM!@ʎRn1㣌9Q(1> -02=Z7'@Q̊%h9l|B~鏚VEimABRC082PCUs -drYEڇy8T{MO9Mh%¤eU f >?*/[2akzb%JxUa9~UT'YdP&'aY &= ݝVLiSRz=u빪BUR,dRL&U V.a(9do ˆ94|EJĈbMg(DHT%b tRsZV_d]B}q>c)K|廬|lCYiˢ316S)APί jMK}\eTF`7.j}t,b= K2SJ~jr -5tzcSY+sl΁s+.ܗ1SW e,]mG*vᝁ-_-CB73d2Ӝ@&51rQ W Ʉy7٭N [8hU -(G|aaS,jiI֍ȇ'xgyG9 9znEVqV$mF!fш4pQwB]ꙢkVToM.'# Yb ds]SmXW՛РU6HR;iNeAAU6t.}R #8[kf=s`3|\-7]qsb:❶~JzMuJ{=mK,JOh\ZESWg̃l\?Kb]Vuמ 8^1[314CH3+##Z*Y` FԿb &PN08 2RNN~j|7=Y6*;j-lTDK[r0ꀭ0 U47JMfaz -UBR2PmCP MIX/܄Y^~egU(ZKSMa%޼*[SPXmt/%K9GV$K/Jם1^g?l}(ϩl{0y0~Mipe#}#mIѷ7;o+#OUcr܊ )ї$8}Wp^kr(q(t?sw3!VE7YO?>} ;0y^V=&w9V>lV9c$I_X4?j4/"yLW\!LmQP4m4:5`Y#Z|n+%jrV5[T[qbdiAG ΄`͇E~^Gڵ^s ήOah.ha4 /#N?0Rqb 2/ Ҍ)l}|B`-摅HM*Q gAjq6S{3 dCfun]WMhmCnedyT#qk14E{J)nŒ6 WZ, 80T̔,1^>1d™Xׁ1ЃYtyi9\ S(Ez!eeL{ `RtY.ZewgqwމWwo^ӭOn}u.Gh܈""r1x޿q[dGI߹zʅkq\+ؒg{]G#ߖoPl&=kexG+q~ ruۃgggG΄)OI6Hz ɓj%+J$-a U":CG먅]/Zk xK;d 4f}8ZLJ5AKN3hjCH@Om6~(Et֓AiG1Wre:=Sg7 nR<]lh(*8T|d5ev.X)NuqM[&h'bҦ0,OØev:HT,jj:/27"Y73R }Ri lr>Ffw'dk -R14$gҔIk QSt:b^č^ uSLS@=?d|׮RW]OѢ+!BJ({if;c]4pR؂OvoxAS0Ed!Y:XHwaR$S$Ï3-Cả\* 3"e[4#2UB fJ@.KXt)v"CA;c.T]zEYj4ieԠeʈ+}|d Nj[ĐrP ޞ;/ KJmbԌ~k2I%c \򄯖'1!UyUr1Tbs1Q ߨnU. RD_i}F1[:fdF)bn,+Krk/)Kv4#&"[!Tx"r')ƇO)i>W6fmdu%3 -U-MpiCLXs4ӲǢF}ВXvh;gGC`#;/ %E  ȿŸ!Lڵ=iRDOc w!`؊kxV6.&, - bd<`c. Lѻ+K:*|,҃2³e 6.wm 5(={QV @#賴t6:K~մjNn}r2 whFY$He:"Q3ؚڱmNCޣ9/!1 C…x@{vjY1/ݭ.zK[:f)Sri<]衻Ta'٨3VdVU- tܵ(d2}8-D8c mdQhH8f7MоR3 w囊ژKqJ.660ϠU6$faǪ 2aFcfRr+6SM-_|mX~1%IA_c @FJViZg4-% ]U.yMbN*qE!TKTlƖϚ$yF e*-|xovMi1k GLOw8bS` -/#&(cR&D0fDHc}}Y{M^`?,r1[:gmY촼q0dVH52$5*:y-wePCf tJh_ RjWϧ*k#DYq e4\WDdv Bx͘V'zCXKӁ t*sW]x:GK[T5k+&YZ]q+SؑYUY!7U3Tu7Sd2tߐYB3hkFZ -.3[jtD4S -tl9HS>=wAYt6$y, ]-j6Y2p!Am g|]ϘC>UvbFш^3,ͧ*A֏Immm-X>FlZ)h4ĬSg|h)C\ck@ڰ,BNSYgH -owfʬXhV?"DZKM)r٥hr6*=A@5յ'n@w -Nu@@KǜcL y̑dS&3jmWx(("s C Kws MJg*ZLpt>eM*H 50%VFY˄xU-PLvF3ʟ)ȃ+Ƃ!2@a6%iΚYy kӃ#l72hVPy5+gԀgڎ:#`ig7oKͯIqK-jxlg/!rgm4ssVwhngXB4 G5Y},_ hdl0߉@^+b4b$@BIl=*0)X] =`45G:VrZe0b}&:. }f|gl -ް%poKݥ-U7,`ץY&cƫ7,lp_K!%ЉՃA3VZ+ -\K*Tj1ّ~QRCbZ$J5zvV=ޏ`Jl c6i"Dׄr`a^Fq@jʾ@Ǫ&}@IZJRg(O~ \\i6"xrJvBYPGH|0*\,\l4naErT֋8@.h]kh=+ -1ֲ7o`*S S͂d `1TF)V"(l5Y*T}-Ѻ~j#ʭiɤҵk3HUa@T# - ,Hq+($,.y0\ -kPz4]j'~*Vr%2CPZ**C &6tԘOD^P?f\([mŘ((H÷`Ql6hWŕJt}kЬ[* *\I!?b>t毶&fvAp,#QFC -Rzu4z> 3QfK;ғ7XA[ᢵ(x54d24Ü(?qhq@I,o0)w W;ONVH=#w)ZYc$y0d7š*&Hֆ-E-~X`2`O:Z>u},w LSLPt4ea$1+6VҜ?db@S@&A!j=24 GT# "%hj48v酮;d !vVv;6b^9S@]`| ˺L8 `;e? -@ 0طͮp(&vÆO(n, %`CRjZl5 rᤥk׸U0h+tC}2aPTهWESu1jh1FXqhy kD:fPf rɒV;K9ٖV% Xeh!8P lIU4MN[Rig8C`&Md+  Rt+Fw&C.eK3 0Ҁj9G2rzuR,pA.uq*}2G ʄRu(*Bz\L3]?Cߢ0Z"I1XO%?E眷rpbVyXベKlIoS1FȳȮB$2 >\JMdF+ۆhGׂl8VkJ',,+<Ql -&hV['rȿ2Q41hRYI˛kncRiZ1~e_#`!/;sCqSz~m Rd%ӎ3D=;+PL4tcI l=eC9[#G󒢉FMrM 8j+[׮*3 -tF@X =bFrvIR}3/gZĔ%r>oa>>?/6K"Ղ:j;l竪4(u-WܷGFkFu8糪h5QK6heuuO,!wiHkhɼl`O0.A<82 s@}nA߮n\"N7$'`+l 9|1Z#KCkleٽ)bQ"g麒 bt7"]Yc NV摓EO+Xp|)Mrg۬f{ aXXjc:Iqk Z[&c -C&qq>;=`Q>К3i'{Z"/C=g5?nMJ~f]>\JWW1eh<˸|7:h;|h9|!t >C(>z#;5Ţ P_')[Dx͓7Ga|&Ml5awM@Wv G09T'!ϥ˂e!ƺڡpe1e1e1e1'b罏9{a&Dz(1}$5]>3T֗1 F|>iݾ.hvIۙIZI~VINlEɒHSHǘuپ:D5K Eoh_Y-iہՐߝa ?=;xziw,fƲEZM]}̃\9Slyۨ.l-qP٧RC{L[mݕ-%~!Y -A~kKՈǺ Yu>@sp#^-ؿ59mq]7jPXcFjlU~? ^yjùtA"w!"D6 VdA^sZ( \Z-u.ȟƉ2̨UWSaug뢵NM%0dUxU=/#:09Ged6E֙@ !Q իII4i.H& -~VT(vZ1hvff - L򅜖y45!)n`^ L2*gY{|Q{Rl5aGTOe92PWtxYՊQDu1I`T21RP:--n;Ӵ@1rmff%?:qYi%?ה:VRke9tBu3Ԭ7,0mZshe6tO]bP4.Uf.7SFCc.pZ5)8MЬfXQA;sX:B Βa^#ϧŖin;n̨+1 - - PetIȻeް #- 5c5NZ>2Z%eXkxd6M i+hpn\_h6i$ YRsH`4BYŷ - [2om:>u,)-Y}AmQJemQBB I&)rͪk^`|ݙ^C1D[̌XQbhtm@$;Q(<Y.WҤ.Xz`Nk3B܌CL'Vȣq|MuU1< iZBS2S#8a,:m'v>eT4'PP$Z_GWfMg\- ÀnFȴfB>xg.C}amVYk';"hdgjӲ0>s˜AHCQhBkQi]E VA+C& 蝲Q)k~x#Z؆뇵TGwoIdFuƨ5S4 -6VBȢi[^??F=֜Q(y|c3RՄ5#BYa;]DT4md?(*p`qij'R0 ͡6#Hh!$֑:D(CA쵔Ccój%?YZHȚ^:\1>u 0m(Gy3q,"lo#V?[l2J2E {_a7lrzD mm$(nU #I94 R"B heQ{:*wN}60)t[#guF"nļ}b^ZfkcQdW [,ի)cewJȖϛKzDbQDېūBRrĿ\HΨ;ΜqY mLjj(lzʷS} ~5xL9,T4 5@=Z Y(a{DipU̟2ʫmM%P-thB@iF+F ^} ZV8@6u%b>#}eNfEَ U-V huQ6^+HBocAkaG]fG -1IӠ#N=c#F !k:2x_!rGi=n JbMv\`z@JƝ!T;Sg H2! K^ޮ-M]1a8ǔ: ]oJdX~K+떵C-DQ -!]چ`*샷TN-]QضJRIxYL-`..ȹ<فŠOfd*9v4K0Us?i2n1pH6UZIz"Z9siv2mD@^*ڈMoŏ1egd-2u\ NZUI V˺d5}V$-x_0ꕤ2ŸYi~T蒳~a3C}s\@[)k̵ӻu!``[Jg̣3jlU5c1f)$ZB#oϾxwG1%EKe'i)2Q#9d<]Jnm/p5kڞTdAYpZ48V08Uj4˥s4%``@oWYYʺRDZB!>GN-_ Q;Z֏!;ny<͝h<jxѴ3=8pvP{:kuh+|]b䰝lePV_SMh(sRY5稁 grnrm G|9h$΂7_cӬOCì7cN 0!b'^e n$mV5=XbHI fFbneVi W-}}_sv\Le lbX$3--3鱦`݊3-Zr\UE48Ϊ{5<-,zsp\Ioc|qkЬ96>L-Mbut9Kcj3l#i)f»}.8;phքf -Q3T 4~tǪbjH]`Eth̃Ldc#i1,<'clAKZg' 4|^CQgMLט?0ꈏQ2w&5$Aߦ,?y`oMi"Ct+T[أؿM%uecmԝk<Xc#hl^ o1 ''GmQi5[88o$y)3`UhNS,8C}yH#jXqʒ*&bFU#cǒ&b$0ZEcwfdz_|}1b=lnlmeu#,7\adAy2kE%lӗ} +2?Wj#Ppz/W/,Q~#`9QxQPXhS+ϡ[6;U;8<5tu٣PLRήnI1imEȺsUXHY}eg2J5k?Fe0=l n ="J]KVO@`c:\l 'ͣT4nѬ=zlBGqXvs{'^/޽yO>rrBWd ^b{7꣤\vr\-g{AB Y^ҥ5K}VIexwD72?>sDESD1D@1~MϦȨU~p^"h4nC)kV HRNEJ۵2otI5/&4oG6{=јn1*f^GY ,~1ԪjckO'vBYX-=G=L w| ȧ*,f״l:o3G7́0.yw棽#E;s4gcan?-cQ5Σ՘Ί9wwr<<<<=变 FEP #e#$+& h7S -Ϥ|ʢD{;rb4mU&WiϣeN45,11~`=zyCT -Y/}I,j!/LS -~~ezo] jx[8Z@:zuL"D3ǿuh c3%7 &t -PlT^T< #֑Wf3.]xg9:=x|M[:`yKRuJMHF9h Z(N`&h=>c -\چ%JJҏLV ULj -ëAQ^fnܡ0*P1h( E=/t7j϶ڈ,*_63mXX*s}wYtn}^=H?~QIe#Dye*ʉWM꺆M -GHb8,f!AZ7 ћWY t9`t8 p: 7I{:voo˚|8託ߝid&ǜW퀭 ʴCJ4;X "Y>ɍ#|8(,ЍV\fa/Ssh(/˓Ԁ8 EYOW8Stc+Zn?#Nׅ?~Ћ1}Z/ke'g0nt,,.*BĨa KJ+#N9M py1C:ihxbv8:&@xAG bѭ <6ax."X 4r(Fay2;;v2x[#}hPQ;>3Xρu>j (#ŷW,0L^6hclC'2mK(m9~diĂ`ѼBQ~tk;!n޷);b_S3/PKуie3%ji2  ;ZYY5}v3XYPE-d4#VW)sQgY5Y8*7Cqth4QSYFA>o#snֵƬ5vQƞ@^9Q!^Uu8APqQPIHŞE!A$|]j-) *A E!-,cUf+dLP##Xe, 2v2z}B*s;;hcNH A6|t[()1GaT09u@0y5UjS=L*` ;\9@p~ӆW|?uJ`~iǣz#m@1a"lAz[Ar256dA"MѰH.a5j֑j,Yazq9!,񭫚v wuʧh|q/ς/E2qOmSZ8P}oR RГܜ$ uBl 9033zʁ` PZk(.h}ϪQ ݃;QT/]M6? -%((ԍB `8:Q0,vGiP] 蝄?]Db' K7dѢO /1;jX$r6x\6@حM'%8 =k&&u'G%"^Oj*k7pYQU(A!*]ŒZVFT'j l~ww)o2v1PBwQqMa7SvO"hZPxzSlCzwY?5޿gw wQFG$L2-'r~X"u;\%>`-qF ԶI -491X8I,f@{YZVz3o`"c5(yW{CDp&g -pKwt"C+>A3]Daŭ(m`"@0I)`d'G\|?mhS^aCq%k}K(mKý27AQP :&@=2a!a٦f -5PU2!ƪjɚT|+nXxd(@)t]8p@9Wr9m#{@1#.Zw\szLO/Y(nj=GPSo_ΩCtW9AaϦ!Xpe*( e9Q劑-6d6*!wM&ºv1t aU|σ:kH+,ˊ5/MK[.P|jV -0{j$ =,<"#>eGn!OgO/`0H Z6ђM뜦)lE⬨#L8묊f -kos!&G~ -U6<ǒLö>Y @u9ea*I}XlfD@ ݜ}[PULoA$t*XiO~( dڃ)4p`5L=y)PaL*5sm/ѮA@,u c̳$>"J@ejA97䀿Or-GQeF1?|?=fu3*G'*i8Rc0TQW8 XaŰʱTG67Ⱥ`FaY=į̀))TBlTVpQs=\eN/"svEbNb{ n 6ϣٺPKMC n14^/5EfZ\e[1fkWWķ| VFk+lo"Bֵ]ruQ)u - -`) !|gM -~erӴ),gڝCShBkZlU~SVHqe` EJ5̯ + 9W)7y]E{32]Þ+{n-.krR64O`<  a^ z#AUfpL/E1sIJ)T)KhyV-B'%fRM4F0c2DՓvf)e,6/&zYEgM @'K*ZAl+nCY ˿YݑRo_a]E0KS \!?W9a<*CY8ƳS{fusXk<4ڻdo%x{ّP3xJv',e^иA0;؈\pAKv6CU pR( ZEv̎,MX ҙZ 2fs ߆NTOy0Im, ̞ 0+EȽ.40L Xi)JZSObKɞ*g.O/.-ҰD3^fgX0Zb$BOGN(z,1I^ȨduRĀf u_8 \X:iwI4:BD40DiS+Q4ZDa@=W儲ULPӚ^ - TVw&§dVÄT5,.ǎ} ɾ,wa5[ɑׅSJV*6]=L2zMY* ’M;,CɘjRQܸTe -)j7$/11k>p5JMQUњ1J\6Xz%Vْ{qQ$K,"tP ͒+EQZSEw9Թֶ4 z]"U`ދ5x~L ?gYtR_ 7~XݻȜ54ku,cs7UTqMQ9ףz\a)JRjŷ 8I8Q=Ȃ e3 _d߼Ѥz) 7y=xGi鵚 I=)4 dд%#oDCQ#<$(]׶l:XIx8dXW%$")SSwО;n}[ *7Iȗ,Z_b>iΔ~en}J)psyO*9p8FY-T7gJjĀ3 _j)> U^=,IbX? -Dd%kMUzBw/Qt ʯ97mEA^z@g^_/~«{;}d/1X^,+h~3|~y/f*y7} yP؅%!?$тk"8Y5B_˞XxR38܄ۄ {+p/Wj!fD,J:%+.&Ђ#,WA]zs $ 6#Ɲrť 5b4k6g}$xNתOl@pAr:zmZMs$TA'lEfv$g+%1kPEKg%)K%- kXaDTp(=LU8w/xk(peú޲pu hIe3/wdn0DAB\4|EEQu6Ƣ:o6 u`]'/)I\ :dU. -qMi+: EWDUZz8~֘ -sy}^lmqC2{ (bhx +c%HgVڰe W9=XVu$0w^Pyz=JUL ?dtxc K}kFߦ}aH@jlwW*:bkda8ʷj8 Z}h0whf7˟/C6r7V DA6R3]ܶNRO/񺃚_ @E"7;=e+Gs#8N1PE;|[3^pR|#|m1Bdvz@@Z:yJIr*l`n6 sVm03Uu| ֩MR1wG+]OL/#!u*XV$B)z -(ڦde -7h\s&dCu%H#M4>~޽!eHH$lhE--%Dx" cl.'Ś~iL8%qDph*ɖ(޵s5m -t E\Th^m8Hn'mTˡcN ;e#l(Ӛz/yw/xO,xo|5urLWu=cƧ4./f"d(,-GrsZ`tڮ^Շu8m$wdV'UƑ(ޗ+y˧ջb<^OHh=~KHt_&ke 0ϷSΈoma,N 1͜8ݶYd{Q-NlE_Ҹudy 0FBGm\⺫bClfk3|H4HVRqS|v=q|c=;[0. sޱؽPSdҒ-Yd!ns6e+V] Rr^Q[Bd#0ϰ.B<?De 8k|\8*`Q-v![r7">?޹QtZ! "֘^`ATeg r8VAqm -,)8$ sT,q@)Xb2 f9RJӬ#MYF yAI2R\JvYM_ۀޭe^O^fc5 ]6 SP"1ƫbK[ RdZ:TŃ|Ȇꇇ,܁jT=):OGņS5vUaD}[(s7Hqn'p1ֱWl!&;=u>6  -XتB,pkP5]4]hR})ec+>Տ cT*4L U+0qe βi>PHn Q\I\+ N3?%LM0aLYܚn$a䛶C6}SsC<*ppe:Av^'n-q%TUΠ_&UqWuaFmu}??r%< JzM>LuS8SmXNg~ Eg+갣WU5/bܜ3_6:8g0f\D[LjH$bU83}gDY - ?k7!odlف1Yܟd:VC=^Gfc !jY|J -dR\|껿]ec^ -h̨hX/Se!t=;N]FXFJ;9cE\+z"DK^E[a7G}hgaSUS!_9#S k:(!$mXmSmP^+u|<@`d!l+Iц Q&Si9rs0Au-Dփ܊P1v4:gu+}?PFrrqV}A萁l,'^DN݌fisZ?8&ûmO!^//^ې11vәA-Sa<Âk-g d"dɪi%:#wWq焅 |K)vYc,mrMJC7GtLbʸ*eM}/6._.ԃ{76G#\c-Xsz;NN"@MA'W }ڵ6+ e(amՂJT}%]`}Q_rUZxbjh39HkRp8(Jo֪qxDCpCgf{aȢߩ ƓG5 -{I1P+V;duZVWk_se*Y$8ruz?^7jEn3{Z?\_ׂዄ,|8Gs'Z3êjYw. 5^JϮ fa\}N;ޟ -]$DXceXu?8*pL^9]1`]2NTEGj2;֍d޲MTH24Rn ."Sc&ZDx*8Tjq]7Y8]t_8ݵn[iq& l#(!-,*hɼ?W6o4n=N}P|UI3Od*@9.9au[vdIdC%:z> ]ZwdL![HtkmZO; b: +.?{E??~۷o?J,-x?',(b~o]Wx/Ov\??qc(.v9DT5X`({ vO5D{yPM0oNЁq ?Zof~ -IRh*Qɶqqz(+x0KP닻dn"KT崅oAaؒ)2<Lv\7on?k~X7o^W_yF/_yA܅c -&yY<;x7'Ϣ)__^_7߽$-mD&fdYFqhKlsJa5D^u(_.|`]}˱9<=XŰNд1te@9,$ThP2? h`g}@ SL!v\`CE/x5"L6VmrÙ]--Lս^rZ}F;Ջ^u4j@zosΩ3)Af0@4`/X'}V;/,Rhb2m;hy4'GdDmU 5Do{`ﺪxpg!sj]ku7C=X6.OfJ(#}6E}nmKvGly4F%3']E\e*K#-[-X!>& -w({2E`zA#>DzcMg)eýLPg;-W~Z/ۃ_/bBiE=ƳaIXȉL5m̖Z26g|Z9s4a="XcJW'%~8PxM^S@?Jr+tY:̍qP\ԠxSci&d[ƹ bYYrp{yt*AYsl17Ε%.LRX(hIQlFȁ8UJ~_$,4h}筸(q>s΃(b02q -i" ћIwtH@T8ΦF7S γsoӺ -^,ަg{9lᙽ$+Zu4{/g"jae(NÌ35C˃N(2)ȣl?zꝪe,:HSKiE_ 2T%ˢtJi͘qh -N4Q?kPݜ*6p邢a#|6Ž[~;cQOo僖DB~8(#P;Yo^fjkLBȌ:,5^`B] D57mdmtҜlȸ:]DBʎIWZ;AqBsln֋>ʝսق=HkЎ)ʮ8H6xKb遪M9* P1s%A&PVTZ!J% ! -XWN)qZP&j$"6D;S1à_wlNjgs9l^":$/#igiWPM:QSR~OL$-k1>;t`W-eZ -v,)@$R8{TW3obA,3`QrG}d "PG8=FW@ ˃{7 pD0*?"튥cBZqbE$Xm)>[K6F\K7==Πe2ӊ>zFc/cI4AT`aAw!~ԯ-A Ԝ|5l=g󊍙p^Ņ2jaqvifd?]RK]q -Tjgt &[>WTf'ŒDHiˈP}oEl0bPqJ -Pvad1l9( ~Tbm%ۊ3C*>qW"%X%;[UȯށPjWdl#r⨏C9q>.ai݅Ruqn'W|:=٫h:CYad7q,ѭ`aS5jɼ@;}~ cJaSbMQ2xxAYR-,_iB(IwgaEjrn|vxj򸢐΅), 5W_MnZcILu'S &C屺9ݔ 1qї%gl@9]ث; @&vSzCe8="YU(M,0Q*!9%T}ɜQ˰b^fD)1y80XϢߥ) nUdPHƆ7BIC}7SӁstjVg -Qg^OEi f^\jI\n֩@+װ5oXۤ7갨W0.*v6 Zm+&4_NKEGNj\fwP''ռIo7o|fpIT~0Vv,8JȽH!&*ϮbU=ΩzUqcr6k34}[l4mw -zZwuDe.‡boEVDV}627|vmODg:; -И6QeYE_ /@ Fʬ̳>cV-$RStkH|&f|AmPh߾a0dFZlxdXv 7ȮwhW0D?_a=囂mLͬ7|2s] >9>E"c g3@۬Qm!z|WPiNyK䠻s"`I}2+@ =ȢfvRG._IL\͠x*xRqH'KqʶdeS`$N=w2i ay%$CX^YMrT-I[ECrys(ho Y)޿_ݏْ_[ۏZ?2͛W7핝MWp -|/WphA3@B2'MMƒhr|L#Q#(}W-8|E*_$+.q}ݦݦߦܦyu]S]%T'Z8}gjv0~,\:cCu5:p(.5?emrܼ#QXAc##PzPW{?3)4X 0 Q4i0z,5n*S8?Hq> -uzW䥖848MP݌.ܚ{fH!,M'Zf5_3yLnQxtԶ7I']t߯aϷ~4_F_8 -$r wp`*W5yzU@fǨN@.}gx@lwXTt}}W[7v0i3}+)]NK{?}^ݢuKum -eKRu "zۺ1:0R5ۥm ꎙI}h9Y,5c(I%^ Je!f2Kmo,FudںG%M;f|ؾ_ӃnO=L[6jR@'LHQZ,4&&w0H.gV6=۰&fBhOp5ɜZ;Ts;;`Smײ+.'_@Xd=w5g뮉߄񱳟eN&@=ҪL:˕U}Ҹ_ӃnO=W j0QM[vIM1muD{CԍjeYTx(q5sYuϴzZk$\%ə;\wuhC{MD c&P$׹nNy*RL rl*Zzuu6vC~Lcz;p =Z&]xʑJ|B7͓a4 ˩a kvyCwIo<oYqS+TF(Υ6q]wt(/ه-)Ƥۙe`\+".,6V :E`/K&p\@WS=/0Ju*DzuOۿ$gHmf V8NIҪe[{i;\$0UU-G|6Q~6jwf"++cq,^)͂)S7xv]e;ԓdtK=?8a[+6fI``LWܑlOPb*sě+ #c\iǬ&{ێ͚I4 _] Óޙ^? sxK*hO3=<.ӚAjn >&͛W\G2*BQq?QI iD=mV^Dz{Y[P)J]LLض -$<)KMEyƥM^^2}PbɓW2ɾ" ?%.;.Q⺩=H q9>QTq}7%^`Flդ"O-Qf + M<8?U˜WIp.!iR&Dy6Ez0sDO4̅U:k6@ٳ&1D+fp8?̪l{)YާSK9(,Z/fuk%KZ ,-40Ț$.䶕/bW]y~Ľ ޣfC|IenYv&ssOG~CD*G(N&bP~0μA5YD -,O<č-M8ǵmqo0YD?cEp>*A  - ym+a~r'GS(3Th㬪.⵾h=>$Y:sļIQtME횺!f] .d ̤4⃇C%Tz@e 3l:[k -J9)'lI u5C[+{[V;MY ښK X8BDХ.ej)DͬaUjʺ&o*U SӪR բ3$}ͱ"W\a,GCFql<ͷ1(K(}xxT)<3V]QV1Bqm1i(}$(J%is6WRuYb1| ̞mm->a}>360k#s0[6pSs]!Xxjr,Ţ(~n^ jca݁5%7#`7Nr[(Vjʼn^fVئt'npq/-?(aOmW[(ITLJJ):%^Ӳ%[7sٟ[K0cs̸Aqb dѝy i5Gϼ,Z̢r!#MY LCcv+yOsXXWm>֞٢"O[u}%B>RB20CtҸbrh6.{ӱ e!'?vq r Q}XzE'Nzߨ݆>dQ~_JԻLEMGcTL,6҂Zlˊ,}DrlI?}2X؞mև֯=n?1|՘q2I-Lu>˷YUMFk4/ͅ=DY?eN:|21 ܡEъ=i)|o/v -)[u4n̵4/&niHߴq}MSRӢ]iho}nև=_l}go~ǯ~훟_ꟿo~_Oo~o_<__?? 9>  endstream endobj 15 0 obj <> endobj 27 0 obj <> endobj 37 0 obj <> endobj 65 0 obj <> endobj 78 0 obj <> endobj 90 0 obj <> endobj 116 0 obj <> endobj 129 0 obj <> endobj 141 0 obj <> endobj 167 0 obj <> endobj 180 0 obj <> endobj 192 0 obj <> endobj 218 0 obj <> endobj 237 0 obj <> endobj 255 0 obj <> endobj 287 0 obj <> endobj 306 0 obj <> endobj 324 0 obj <> endobj 356 0 obj <> endobj 375 0 obj <> endobj 393 0 obj <> endobj 425 0 obj <> endobj 444 0 obj <> endobj 462 0 obj <> endobj 480 0 obj <> endobj 515 0 obj <> endobj 534 0 obj <> endobj 552 0 obj <> endobj 570 0 obj <> endobj 605 0 obj <> endobj 624 0 obj <> endobj 642 0 obj <> endobj 660 0 obj <> endobj 695 0 obj <> endobj 699 0 obj <> endobj 718 0 obj <> endobj 735 0 obj <> endobj 753 0 obj <> endobj 785 0 obj <> endobj 789 0 obj <> endobj 808 0 obj <> endobj 825 0 obj <> endobj 843 0 obj <> endobj 878 0 obj <> endobj 882 0 obj <> endobj 901 0 obj <> endobj 918 0 obj <> endobj 936 0 obj <> endobj 971 0 obj <> endobj 975 0 obj <> endobj 994 0 obj <> endobj 1011 0 obj <> endobj 1029 0 obj <> endobj 1056 0 obj <> endobj 1057 0 obj <> endobj 1058 0 obj <> endobj 1059 0 obj <> endobj 1060 0 obj <> endobj 1138 0 obj <> endobj 1139 0 obj <> endobj 1140 0 obj <> endobj 1141 0 obj <> endobj 1142 0 obj <> endobj 1143 0 obj <> endobj 1223 0 obj <> endobj 1224 0 obj <> endobj 1225 0 obj <> endobj 1226 0 obj <> endobj 1227 0 obj <> endobj 1228 0 obj <> endobj 1296 0 obj [/View/Design] endobj 1297 0 obj <>>> endobj 1294 0 obj [/View/Design] endobj 1295 0 obj <>>> endobj 1292 0 obj [/View/Design] endobj 1293 0 obj <>>> endobj 1290 0 obj [/View/Design] endobj 1291 0 obj <>>> endobj 1288 0 obj [/View/Design] endobj 1289 0 obj <>>> endobj 1286 0 obj [/View/Design] endobj 1287 0 obj <>>> endobj 1211 0 obj [/View/Design] endobj 1212 0 obj <>>> endobj 1209 0 obj [/View/Design] endobj 1210 0 obj <>>> endobj 1207 0 obj [/View/Design] endobj 1208 0 obj <>>> endobj 1205 0 obj [/View/Design] endobj 1206 0 obj <>>> endobj 1203 0 obj [/View/Design] endobj 1204 0 obj <>>> endobj 1201 0 obj [/View/Design] endobj 1202 0 obj <>>> endobj 1126 0 obj [/View/Design] endobj 1127 0 obj <>>> endobj 1124 0 obj [/View/Design] endobj 1125 0 obj <>>> endobj 1122 0 obj [/View/Design] endobj 1123 0 obj <>>> endobj 1120 0 obj [/View/Design] endobj 1121 0 obj <>>> endobj 1118 0 obj [/View/Design] endobj 1119 0 obj <>>> endobj 1030 0 obj [/View/Design] endobj 1031 0 obj <>>> endobj 1012 0 obj [/View/Design] endobj 1013 0 obj <>>> endobj 995 0 obj [/View/Design] endobj 996 0 obj <>>> endobj 976 0 obj [/View/Design] endobj 977 0 obj <>>> endobj 972 0 obj [/View/Design] endobj 973 0 obj <>>> endobj 937 0 obj [/View/Design] endobj 938 0 obj <>>> endobj 919 0 obj [/View/Design] endobj 920 0 obj <>>> endobj 902 0 obj [/View/Design] endobj 903 0 obj <>>> endobj 883 0 obj [/View/Design] endobj 884 0 obj <>>> endobj 879 0 obj [/View/Design] endobj 880 0 obj <>>> endobj 844 0 obj [/View/Design] endobj 845 0 obj <>>> endobj 826 0 obj [/View/Design] endobj 827 0 obj <>>> endobj 809 0 obj [/View/Design] endobj 810 0 obj <>>> endobj 790 0 obj [/View/Design] endobj 791 0 obj <>>> endobj 786 0 obj [/View/Design] endobj 787 0 obj <>>> endobj 754 0 obj [/View/Design] endobj 755 0 obj <>>> endobj 736 0 obj [/View/Design] endobj 737 0 obj <>>> endobj 719 0 obj [/View/Design] endobj 720 0 obj <>>> endobj 700 0 obj [/View/Design] endobj 701 0 obj <>>> endobj 696 0 obj [/View/Design] endobj 697 0 obj <>>> endobj 661 0 obj [/View/Design] endobj 662 0 obj <>>> endobj 643 0 obj [/View/Design] endobj 644 0 obj <>>> endobj 625 0 obj [/View/Design] endobj 626 0 obj <>>> endobj 606 0 obj [/View/Design] endobj 607 0 obj <>>> endobj 571 0 obj [/View/Design] endobj 572 0 obj <>>> endobj 553 0 obj [/View/Design] endobj 554 0 obj <>>> endobj 535 0 obj [/View/Design] endobj 536 0 obj <>>> endobj 516 0 obj [/View/Design] endobj 517 0 obj <>>> endobj 481 0 obj [/View/Design] endobj 482 0 obj <>>> endobj 463 0 obj [/View/Design] endobj 464 0 obj <>>> endobj 445 0 obj [/View/Design] endobj 446 0 obj <>>> endobj 426 0 obj [/View/Design] endobj 427 0 obj <>>> endobj 394 0 obj [/View/Design] endobj 395 0 obj <>>> endobj 376 0 obj [/View/Design] endobj 377 0 obj <>>> endobj 357 0 obj [/View/Design] endobj 358 0 obj <>>> endobj 325 0 obj [/View/Design] endobj 326 0 obj <>>> endobj 307 0 obj [/View/Design] endobj 308 0 obj <>>> endobj 288 0 obj [/View/Design] endobj 289 0 obj <>>> endobj 256 0 obj [/View/Design] endobj 257 0 obj <>>> endobj 238 0 obj [/View/Design] endobj 239 0 obj <>>> endobj 219 0 obj [/View/Design] endobj 220 0 obj <>>> endobj 193 0 obj [/View/Design] endobj 194 0 obj <>>> endobj 181 0 obj [/View/Design] endobj 182 0 obj <>>> endobj 168 0 obj [/View/Design] endobj 169 0 obj <>>> endobj 142 0 obj [/View/Design] endobj 143 0 obj <>>> endobj 130 0 obj [/View/Design] endobj 131 0 obj <>>> endobj 117 0 obj [/View/Design] endobj 118 0 obj <>>> endobj 91 0 obj [/View/Design] endobj 92 0 obj <>>> endobj 79 0 obj [/View/Design] endobj 80 0 obj <>>> endobj 66 0 obj [/View/Design] endobj 67 0 obj <>>> endobj 38 0 obj [/View/Design] endobj 39 0 obj <>>> endobj 28 0 obj [/View/Design] endobj 29 0 obj <>>> endobj 16 0 obj [/View/Design] endobj 17 0 obj <>>> endobj 1314 0 obj [1313 0 R 1312 0 R 1311 0 R 1310 0 R 1309 0 R 1308 0 R] endobj 1391 0 obj <> endobj xref 0 1392 0000000003 65535 f -0000000016 00000 n -0000046044 00000 n -0000000004 00000 f -0000000006 00000 f -0000046095 00000 n -0000000007 00000 f -0000000008 00000 f -0000000009 00000 f -0000000010 00000 f -0000000011 00000 f -0000000012 00000 f -0000000013 00000 f -0000000014 00000 f -0000000018 00000 f -0000359516 00000 n -0000372837 00000 n -0000372868 00000 n -0000000019 00000 f -0000000020 00000 f -0000000021 00000 f -0000000022 00000 f -0000000023 00000 f -0000000024 00000 f -0000000025 00000 f -0000000026 00000 f -0000000030 00000 f -0000359585 00000 n -0000372721 00000 n -0000372752 00000 n -0000000031 00000 f -0000000032 00000 f -0000000033 00000 f -0000000034 00000 f -0000000035 00000 f -0000000036 00000 f -0000000040 00000 f -0000359656 00000 n -0000372605 00000 n -0000372636 00000 n -0000000041 00000 f -0000000042 00000 f -0000000043 00000 f -0000000044 00000 f -0000000045 00000 f -0000000046 00000 f -0000000047 00000 f -0000000048 00000 f -0000000049 00000 f -0000000050 00000 f -0000000051 00000 f -0000000052 00000 f -0000000053 00000 f -0000000054 00000 f -0000000055 00000 f -0000000056 00000 f -0000000057 00000 f -0000000058 00000 f -0000000059 00000 f -0000000060 00000 f -0000000061 00000 f -0000000062 00000 f -0000000063 00000 f -0000000064 00000 f -0000000068 00000 f -0000359726 00000 n -0000372489 00000 n -0000372520 00000 n -0000000069 00000 f -0000000070 00000 f -0000000071 00000 f -0000000072 00000 f -0000000073 00000 f -0000000074 00000 f -0000000075 00000 f -0000000076 00000 f -0000000077 00000 f -0000000081 00000 f -0000359795 00000 n -0000372373 00000 n -0000372404 00000 n -0000000082 00000 f -0000000083 00000 f -0000000084 00000 f -0000000085 00000 f -0000000086 00000 f -0000000087 00000 f -0000000088 00000 f -0000000089 00000 f -0000000093 00000 f -0000359866 00000 n -0000372257 00000 n -0000372288 00000 n -0000000094 00000 f -0000000095 00000 f -0000000096 00000 f -0000000097 00000 f -0000000098 00000 f -0000000099 00000 f -0000000100 00000 f -0000000101 00000 f -0000000102 00000 f -0000000103 00000 f -0000000104 00000 f -0000000105 00000 f -0000000106 00000 f -0000000107 00000 f -0000000108 00000 f -0000000109 00000 f -0000000110 00000 f -0000000111 00000 f -0000000112 00000 f -0000000113 00000 f -0000000114 00000 f -0000000115 00000 f -0000000119 00000 f -0000359936 00000 n -0000372139 00000 n -0000372171 00000 n -0000000120 00000 f -0000000121 00000 f -0000000122 00000 f -0000000123 00000 f -0000000124 00000 f -0000000125 00000 f -0000000126 00000 f -0000000127 00000 f -0000000128 00000 f -0000000132 00000 f -0000360008 00000 n -0000372021 00000 n -0000372053 00000 n -0000000133 00000 f -0000000134 00000 f -0000000135 00000 f -0000000136 00000 f -0000000137 00000 f -0000000138 00000 f -0000000139 00000 f -0000000140 00000 f -0000000144 00000 f -0000360082 00000 n -0000371903 00000 n -0000371935 00000 n -0000000145 00000 f -0000000146 00000 f -0000000147 00000 f -0000000148 00000 f -0000000149 00000 f -0000000150 00000 f -0000000151 00000 f -0000000152 00000 f -0000000153 00000 f -0000000154 00000 f -0000000155 00000 f -0000000156 00000 f -0000000157 00000 f -0000000158 00000 f -0000000159 00000 f -0000000160 00000 f -0000000161 00000 f -0000000162 00000 f -0000000163 00000 f -0000000164 00000 f -0000000165 00000 f -0000000166 00000 f -0000000170 00000 f -0000360155 00000 n -0000371785 00000 n -0000371817 00000 n -0000000171 00000 f -0000000172 00000 f -0000000173 00000 f -0000000174 00000 f -0000000175 00000 f -0000000176 00000 f -0000000177 00000 f -0000000178 00000 f -0000000179 00000 f -0000000183 00000 f -0000360227 00000 n -0000371667 00000 n -0000371699 00000 n -0000000184 00000 f -0000000185 00000 f -0000000186 00000 f -0000000187 00000 f -0000000188 00000 f -0000000189 00000 f -0000000190 00000 f -0000000191 00000 f -0000000195 00000 f -0000360301 00000 n -0000371549 00000 n -0000371581 00000 n -0000000196 00000 f -0000000197 00000 f -0000000198 00000 f -0000000199 00000 f -0000000200 00000 f -0000000201 00000 f -0000000202 00000 f -0000000203 00000 f -0000000204 00000 f -0000000205 00000 f -0000000206 00000 f -0000000207 00000 f -0000000208 00000 f -0000000209 00000 f -0000000210 00000 f -0000000211 00000 f -0000000212 00000 f -0000000213 00000 f -0000000214 00000 f -0000000215 00000 f -0000000216 00000 f -0000000217 00000 f -0000000221 00000 f -0000360374 00000 n -0000371431 00000 n -0000371463 00000 n -0000000222 00000 f -0000000223 00000 f -0000000224 00000 f -0000000225 00000 f -0000000226 00000 f -0000000227 00000 f -0000000228 00000 f -0000000229 00000 f -0000000230 00000 f -0000000231 00000 f -0000000232 00000 f -0000000233 00000 f -0000000234 00000 f -0000000235 00000 f -0000000236 00000 f -0000000240 00000 f -0000360446 00000 n -0000371313 00000 n -0000371345 00000 n -0000000241 00000 f -0000000242 00000 f -0000000243 00000 f -0000000244 00000 f -0000000245 00000 f -0000000246 00000 f -0000000247 00000 f -0000000248 00000 f -0000000249 00000 f -0000000250 00000 f -0000000251 00000 f -0000000252 00000 f -0000000253 00000 f -0000000254 00000 f -0000000258 00000 f -0000360520 00000 n -0000371195 00000 n -0000371227 00000 n -0000000259 00000 f -0000000260 00000 f -0000000261 00000 f -0000000262 00000 f -0000000263 00000 f -0000000264 00000 f -0000000265 00000 f -0000000266 00000 f -0000000267 00000 f -0000000268 00000 f -0000000269 00000 f -0000000270 00000 f -0000000271 00000 f -0000000272 00000 f -0000000273 00000 f -0000000274 00000 f -0000000275 00000 f -0000000276 00000 f -0000000277 00000 f -0000000278 00000 f -0000000279 00000 f -0000000280 00000 f -0000000281 00000 f -0000000282 00000 f -0000000283 00000 f -0000000284 00000 f -0000000285 00000 f -0000000286 00000 f -0000000290 00000 f -0000360593 00000 n -0000371077 00000 n -0000371109 00000 n -0000000291 00000 f -0000000292 00000 f -0000000293 00000 f -0000000294 00000 f -0000000295 00000 f -0000000296 00000 f -0000000297 00000 f -0000000298 00000 f -0000000299 00000 f -0000000300 00000 f -0000000301 00000 f -0000000302 00000 f -0000000303 00000 f -0000000304 00000 f -0000000305 00000 f -0000000309 00000 f -0000360665 00000 n -0000370959 00000 n -0000370991 00000 n -0000000310 00000 f -0000000311 00000 f -0000000312 00000 f -0000000313 00000 f -0000000314 00000 f -0000000315 00000 f -0000000316 00000 f -0000000317 00000 f -0000000318 00000 f -0000000319 00000 f -0000000320 00000 f -0000000321 00000 f -0000000322 00000 f -0000000323 00000 f -0000000327 00000 f -0000360739 00000 n -0000370841 00000 n -0000370873 00000 n -0000000328 00000 f -0000000329 00000 f -0000000330 00000 f -0000000331 00000 f -0000000332 00000 f -0000000333 00000 f -0000000334 00000 f -0000000335 00000 f -0000000336 00000 f -0000000337 00000 f -0000000338 00000 f -0000000339 00000 f -0000000340 00000 f -0000000341 00000 f -0000000342 00000 f -0000000343 00000 f -0000000344 00000 f -0000000345 00000 f -0000000346 00000 f -0000000347 00000 f -0000000348 00000 f -0000000349 00000 f -0000000350 00000 f -0000000351 00000 f -0000000352 00000 f -0000000353 00000 f -0000000354 00000 f -0000000355 00000 f -0000000359 00000 f -0000360812 00000 n -0000370723 00000 n -0000370755 00000 n -0000000360 00000 f -0000000361 00000 f -0000000362 00000 f -0000000363 00000 f -0000000364 00000 f -0000000365 00000 f -0000000366 00000 f -0000000367 00000 f -0000000368 00000 f -0000000369 00000 f -0000000370 00000 f -0000000371 00000 f -0000000372 00000 f -0000000373 00000 f -0000000374 00000 f -0000000378 00000 f -0000360884 00000 n -0000370605 00000 n -0000370637 00000 n -0000000379 00000 f -0000000380 00000 f -0000000381 00000 f -0000000382 00000 f -0000000383 00000 f -0000000384 00000 f -0000000385 00000 f -0000000386 00000 f -0000000387 00000 f -0000000388 00000 f -0000000389 00000 f -0000000390 00000 f -0000000391 00000 f -0000000392 00000 f -0000000396 00000 f -0000360958 00000 n -0000370487 00000 n -0000370519 00000 n -0000000397 00000 f -0000000398 00000 f -0000000399 00000 f -0000000400 00000 f -0000000401 00000 f -0000000402 00000 f -0000000403 00000 f -0000000404 00000 f -0000000405 00000 f -0000000406 00000 f -0000000407 00000 f -0000000408 00000 f -0000000409 00000 f -0000000410 00000 f -0000000411 00000 f -0000000412 00000 f -0000000413 00000 f -0000000414 00000 f -0000000415 00000 f -0000000416 00000 f -0000000417 00000 f -0000000418 00000 f -0000000419 00000 f -0000000420 00000 f -0000000421 00000 f -0000000422 00000 f -0000000423 00000 f -0000000424 00000 f -0000000428 00000 f -0000361031 00000 n -0000370369 00000 n -0000370401 00000 n -0000000429 00000 f -0000000430 00000 f -0000000431 00000 f -0000000432 00000 f -0000000433 00000 f -0000000434 00000 f -0000000435 00000 f -0000000436 00000 f -0000000437 00000 f -0000000438 00000 f -0000000439 00000 f -0000000440 00000 f -0000000441 00000 f -0000000442 00000 f -0000000443 00000 f -0000000447 00000 f -0000361103 00000 n -0000370251 00000 n -0000370283 00000 n -0000000448 00000 f -0000000449 00000 f -0000000450 00000 f -0000000451 00000 f -0000000452 00000 f -0000000453 00000 f -0000000454 00000 f -0000000455 00000 f -0000000456 00000 f -0000000457 00000 f -0000000458 00000 f -0000000459 00000 f -0000000460 00000 f -0000000461 00000 f -0000000465 00000 f -0000361175 00000 n -0000370133 00000 n -0000370165 00000 n -0000000466 00000 f -0000000467 00000 f -0000000468 00000 f -0000000469 00000 f -0000000470 00000 f -0000000471 00000 f -0000000472 00000 f -0000000473 00000 f -0000000474 00000 f -0000000475 00000 f -0000000476 00000 f -0000000477 00000 f -0000000478 00000 f -0000000479 00000 f -0000000483 00000 f -0000361249 00000 n -0000370015 00000 n -0000370047 00000 n -0000000484 00000 f -0000000485 00000 f -0000000486 00000 f -0000000487 00000 f -0000000488 00000 f -0000000489 00000 f -0000000490 00000 f -0000000491 00000 f -0000000492 00000 f -0000000493 00000 f -0000000494 00000 f -0000000495 00000 f -0000000496 00000 f -0000000497 00000 f -0000000498 00000 f -0000000499 00000 f -0000000500 00000 f -0000000501 00000 f -0000000502 00000 f -0000000503 00000 f -0000000504 00000 f -0000000505 00000 f -0000000506 00000 f -0000000507 00000 f -0000000508 00000 f -0000000509 00000 f -0000000510 00000 f -0000000511 00000 f -0000000512 00000 f -0000000513 00000 f -0000000514 00000 f -0000000518 00000 f -0000361322 00000 n -0000369897 00000 n -0000369929 00000 n -0000000519 00000 f -0000000520 00000 f -0000000521 00000 f -0000000522 00000 f -0000000523 00000 f -0000000524 00000 f -0000000525 00000 f -0000000526 00000 f -0000000527 00000 f -0000000528 00000 f -0000000529 00000 f -0000000530 00000 f -0000000531 00000 f -0000000532 00000 f -0000000533 00000 f -0000000537 00000 f -0000361394 00000 n -0000369779 00000 n -0000369811 00000 n -0000000538 00000 f -0000000539 00000 f -0000000540 00000 f -0000000541 00000 f -0000000542 00000 f -0000000543 00000 f -0000000544 00000 f -0000000545 00000 f -0000000546 00000 f -0000000547 00000 f -0000000548 00000 f -0000000549 00000 f -0000000550 00000 f -0000000551 00000 f -0000000555 00000 f -0000361466 00000 n -0000369661 00000 n -0000369693 00000 n -0000000556 00000 f -0000000557 00000 f -0000000558 00000 f -0000000559 00000 f -0000000560 00000 f -0000000561 00000 f -0000000562 00000 f -0000000563 00000 f -0000000564 00000 f -0000000565 00000 f -0000000566 00000 f -0000000567 00000 f -0000000568 00000 f -0000000569 00000 f -0000000573 00000 f -0000361540 00000 n -0000369543 00000 n -0000369575 00000 n -0000000574 00000 f -0000000575 00000 f -0000000576 00000 f -0000000577 00000 f -0000000578 00000 f -0000000579 00000 f -0000000580 00000 f -0000000581 00000 f -0000000582 00000 f -0000000583 00000 f -0000000584 00000 f -0000000585 00000 f -0000000586 00000 f -0000000587 00000 f -0000000588 00000 f -0000000589 00000 f -0000000590 00000 f -0000000591 00000 f -0000000592 00000 f -0000000593 00000 f -0000000594 00000 f -0000000595 00000 f -0000000596 00000 f -0000000597 00000 f -0000000598 00000 f -0000000599 00000 f -0000000600 00000 f -0000000601 00000 f -0000000602 00000 f -0000000603 00000 f -0000000604 00000 f -0000000608 00000 f -0000361613 00000 n -0000369425 00000 n -0000369457 00000 n -0000000609 00000 f -0000000610 00000 f -0000000611 00000 f -0000000612 00000 f -0000000613 00000 f -0000000614 00000 f -0000000615 00000 f -0000000616 00000 f -0000000617 00000 f -0000000618 00000 f -0000000619 00000 f -0000000620 00000 f -0000000621 00000 f -0000000622 00000 f -0000000623 00000 f -0000000627 00000 f -0000361685 00000 n -0000369307 00000 n -0000369339 00000 n -0000000628 00000 f -0000000629 00000 f -0000000630 00000 f -0000000631 00000 f -0000000632 00000 f -0000000633 00000 f -0000000634 00000 f -0000000635 00000 f -0000000636 00000 f -0000000637 00000 f -0000000638 00000 f -0000000639 00000 f -0000000640 00000 f -0000000641 00000 f -0000000645 00000 f -0000361757 00000 n -0000369189 00000 n -0000369221 00000 n -0000000646 00000 f -0000000647 00000 f -0000000648 00000 f -0000000649 00000 f -0000000650 00000 f -0000000651 00000 f -0000000652 00000 f -0000000653 00000 f -0000000654 00000 f -0000000655 00000 f -0000000656 00000 f -0000000657 00000 f -0000000658 00000 f -0000000659 00000 f -0000000663 00000 f -0000361831 00000 n -0000369071 00000 n -0000369103 00000 n -0000000664 00000 f -0000000665 00000 f -0000000666 00000 f -0000000667 00000 f -0000000668 00000 f -0000000669 00000 f -0000000670 00000 f -0000000671 00000 f -0000000672 00000 f -0000000673 00000 f -0000000674 00000 f -0000000675 00000 f -0000000676 00000 f -0000000677 00000 f -0000000678 00000 f -0000000679 00000 f -0000000680 00000 f -0000000681 00000 f -0000000682 00000 f -0000000683 00000 f -0000000684 00000 f -0000000685 00000 f -0000000686 00000 f -0000000687 00000 f -0000000688 00000 f -0000000689 00000 f -0000000690 00000 f -0000000691 00000 f -0000000692 00000 f -0000000693 00000 f -0000000694 00000 f -0000000698 00000 f -0000361904 00000 n -0000368953 00000 n -0000368985 00000 n -0000000702 00000 f -0000361975 00000 n -0000368835 00000 n -0000368867 00000 n -0000000703 00000 f -0000000704 00000 f -0000000705 00000 f -0000000706 00000 f -0000000707 00000 f -0000000708 00000 f -0000000709 00000 f -0000000710 00000 f -0000000711 00000 f -0000000712 00000 f -0000000713 00000 f -0000000714 00000 f -0000000715 00000 f -0000000716 00000 f -0000000717 00000 f -0000000721 00000 f -0000362047 00000 n -0000368717 00000 n -0000368749 00000 n -0000000722 00000 f -0000000723 00000 f -0000000724 00000 f -0000000725 00000 f -0000000726 00000 f -0000000727 00000 f -0000000728 00000 f -0000000729 00000 f -0000000730 00000 f -0000000731 00000 f -0000000732 00000 f -0000000733 00000 f -0000000734 00000 f -0000000738 00000 f -0000362119 00000 n -0000368599 00000 n -0000368631 00000 n -0000000739 00000 f -0000000740 00000 f -0000000741 00000 f -0000000742 00000 f -0000000743 00000 f -0000000744 00000 f -0000000745 00000 f -0000000746 00000 f -0000000747 00000 f -0000000748 00000 f -0000000749 00000 f -0000000750 00000 f -0000000751 00000 f -0000000752 00000 f -0000000756 00000 f -0000362193 00000 n -0000368481 00000 n -0000368513 00000 n -0000000757 00000 f -0000000758 00000 f -0000000759 00000 f -0000000760 00000 f -0000000761 00000 f -0000000762 00000 f -0000000763 00000 f -0000000764 00000 f -0000000765 00000 f -0000000766 00000 f -0000000767 00000 f -0000000768 00000 f -0000000769 00000 f -0000000770 00000 f -0000000771 00000 f -0000000772 00000 f -0000000773 00000 f -0000000774 00000 f -0000000775 00000 f -0000000776 00000 f -0000000777 00000 f -0000000778 00000 f -0000000779 00000 f -0000000780 00000 f -0000000781 00000 f -0000000782 00000 f -0000000783 00000 f -0000000784 00000 f -0000000788 00000 f -0000362266 00000 n -0000368363 00000 n -0000368395 00000 n -0000000792 00000 f -0000362337 00000 n -0000368245 00000 n -0000368277 00000 n -0000000793 00000 f -0000000794 00000 f -0000000795 00000 f -0000000796 00000 f -0000000797 00000 f -0000000798 00000 f -0000000799 00000 f -0000000800 00000 f -0000000801 00000 f -0000000802 00000 f -0000000803 00000 f -0000000804 00000 f -0000000805 00000 f -0000000806 00000 f -0000000807 00000 f -0000000811 00000 f -0000362409 00000 n -0000368127 00000 n -0000368159 00000 n -0000000812 00000 f -0000000813 00000 f -0000000814 00000 f -0000000815 00000 f -0000000816 00000 f -0000000817 00000 f -0000000818 00000 f -0000000819 00000 f -0000000820 00000 f -0000000821 00000 f -0000000822 00000 f -0000000823 00000 f -0000000824 00000 f -0000000828 00000 f -0000362481 00000 n -0000368009 00000 n -0000368041 00000 n -0000000829 00000 f -0000000830 00000 f -0000000831 00000 f -0000000832 00000 f -0000000833 00000 f -0000000834 00000 f -0000000835 00000 f -0000000836 00000 f -0000000837 00000 f -0000000838 00000 f -0000000839 00000 f -0000000840 00000 f -0000000841 00000 f -0000000842 00000 f -0000000846 00000 f -0000362555 00000 n -0000367891 00000 n -0000367923 00000 n -0000000847 00000 f -0000000848 00000 f -0000000849 00000 f -0000000850 00000 f -0000000851 00000 f -0000000852 00000 f -0000000853 00000 f -0000000854 00000 f -0000000855 00000 f -0000000856 00000 f -0000000857 00000 f -0000000858 00000 f -0000000859 00000 f -0000000860 00000 f -0000000861 00000 f -0000000862 00000 f -0000000863 00000 f -0000000864 00000 f -0000000865 00000 f -0000000866 00000 f -0000000867 00000 f -0000000868 00000 f -0000000869 00000 f -0000000870 00000 f -0000000871 00000 f -0000000872 00000 f -0000000873 00000 f -0000000874 00000 f -0000000875 00000 f -0000000876 00000 f -0000000877 00000 f -0000000881 00000 f -0000362628 00000 n -0000367773 00000 n -0000367805 00000 n -0000000885 00000 f -0000362699 00000 n -0000367655 00000 n -0000367687 00000 n -0000000886 00000 f -0000000887 00000 f -0000000888 00000 f -0000000889 00000 f -0000000890 00000 f -0000000891 00000 f -0000000892 00000 f -0000000893 00000 f -0000000894 00000 f -0000000895 00000 f -0000000896 00000 f -0000000897 00000 f -0000000898 00000 f -0000000899 00000 f -0000000900 00000 f -0000000904 00000 f -0000362771 00000 n -0000367537 00000 n -0000367569 00000 n -0000000905 00000 f -0000000906 00000 f -0000000907 00000 f -0000000908 00000 f -0000000909 00000 f -0000000910 00000 f -0000000911 00000 f -0000000912 00000 f -0000000913 00000 f -0000000914 00000 f -0000000915 00000 f -0000000916 00000 f -0000000917 00000 f -0000000921 00000 f -0000362843 00000 n -0000367419 00000 n -0000367451 00000 n -0000000922 00000 f -0000000923 00000 f -0000000924 00000 f -0000000925 00000 f -0000000926 00000 f -0000000927 00000 f -0000000928 00000 f -0000000929 00000 f -0000000930 00000 f -0000000931 00000 f -0000000932 00000 f -0000000933 00000 f -0000000934 00000 f -0000000935 00000 f -0000000939 00000 f -0000362917 00000 n -0000367301 00000 n -0000367333 00000 n -0000000940 00000 f -0000000941 00000 f -0000000942 00000 f -0000000943 00000 f -0000000944 00000 f -0000000945 00000 f -0000000946 00000 f -0000000947 00000 f -0000000948 00000 f -0000000949 00000 f -0000000950 00000 f -0000000951 00000 f -0000000952 00000 f -0000000953 00000 f -0000000954 00000 f -0000000955 00000 f -0000000956 00000 f -0000000957 00000 f -0000000958 00000 f -0000000959 00000 f -0000000960 00000 f -0000000961 00000 f -0000000962 00000 f -0000000963 00000 f -0000000964 00000 f -0000000965 00000 f -0000000966 00000 f -0000000967 00000 f -0000000968 00000 f -0000000969 00000 f -0000000970 00000 f -0000000974 00000 f -0000362990 00000 n -0000367183 00000 n -0000367215 00000 n -0000000978 00000 f -0000363061 00000 n -0000367065 00000 n -0000367097 00000 n -0000000979 00000 f -0000000980 00000 f -0000000981 00000 f -0000000982 00000 f -0000000983 00000 f -0000000984 00000 f -0000000985 00000 f -0000000986 00000 f -0000000987 00000 f -0000000988 00000 f -0000000989 00000 f -0000000990 00000 f -0000000991 00000 f -0000000992 00000 f -0000000993 00000 f -0000000997 00000 f -0000363133 00000 n -0000366947 00000 n -0000366979 00000 n -0000000998 00000 f -0000000999 00000 f -0000001000 00000 f -0000001001 00000 f -0000001002 00000 f -0000001003 00000 f -0000001004 00000 f -0000001005 00000 f -0000001006 00000 f -0000001007 00000 f -0000001008 00000 f -0000001009 00000 f -0000001010 00000 f -0000001014 00000 f -0000363205 00000 n -0000366827 00000 n -0000366860 00000 n -0000001015 00000 f -0000001016 00000 f -0000001017 00000 f -0000001018 00000 f -0000001019 00000 f -0000001020 00000 f -0000001021 00000 f -0000001022 00000 f -0000001023 00000 f -0000001024 00000 f -0000001025 00000 f -0000001026 00000 f -0000001027 00000 f -0000001028 00000 f -0000001032 00000 f -0000363282 00000 n -0000366707 00000 n -0000366740 00000 n -0000001033 00000 f -0000001034 00000 f -0000001035 00000 f -0000001036 00000 f -0000001037 00000 f -0000001038 00000 f -0000001039 00000 f -0000001040 00000 f -0000001041 00000 f -0000001042 00000 f -0000001043 00000 f -0000001044 00000 f -0000001045 00000 f -0000001046 00000 f -0000001047 00000 f -0000001048 00000 f -0000001049 00000 f -0000001050 00000 f -0000001051 00000 f -0000001052 00000 f -0000001053 00000 f -0000001055 00000 f -0000001397 00000 n -0000001061 00000 f -0000363358 00000 n -0000363432 00000 n -0000363507 00000 n -0000363582 00000 n -0000363659 00000 n -0000001062 00000 f -0000001063 00000 f -0000001064 00000 f -0000001065 00000 f -0000001066 00000 f -0000001067 00000 f -0000001068 00000 f -0000001069 00000 f -0000001070 00000 f -0000001071 00000 f -0000001072 00000 f -0000001073 00000 f -0000001074 00000 f -0000001075 00000 f -0000001076 00000 f -0000001077 00000 f -0000001078 00000 f -0000001079 00000 f -0000001080 00000 f -0000001081 00000 f -0000001082 00000 f -0000001083 00000 f -0000001084 00000 f -0000001085 00000 f -0000001086 00000 f -0000001087 00000 f -0000001088 00000 f -0000001089 00000 f -0000001090 00000 f -0000001091 00000 f -0000001092 00000 f -0000001093 00000 f -0000001094 00000 f -0000001095 00000 f -0000001096 00000 f -0000001097 00000 f -0000001098 00000 f -0000001099 00000 f -0000001100 00000 f -0000001101 00000 f -0000001102 00000 f -0000001103 00000 f -0000001104 00000 f -0000001105 00000 f -0000001106 00000 f -0000001107 00000 f -0000001108 00000 f -0000001109 00000 f -0000001110 00000 f -0000001111 00000 f -0000001112 00000 f -0000001113 00000 f -0000001114 00000 f -0000001115 00000 f -0000001116 00000 f -0000001117 00000 f -0000001128 00000 f -0000366587 00000 n -0000366620 00000 n -0000366467 00000 n -0000366500 00000 n -0000366347 00000 n -0000366380 00000 n -0000366227 00000 n -0000366260 00000 n -0000366107 00000 n -0000366140 00000 n -0000001129 00000 f -0000001130 00000 f -0000001131 00000 f -0000001132 00000 f -0000001133 00000 f -0000001134 00000 f -0000001135 00000 f -0000001136 00000 f -0000001137 00000 f -0000001144 00000 f -0000363735 00000 n -0000363824 00000 n -0000363898 00000 n -0000363973 00000 n -0000364048 00000 n -0000364125 00000 n -0000001145 00000 f -0000001146 00000 f -0000001147 00000 f -0000001148 00000 f -0000001149 00000 f -0000001150 00000 f -0000001151 00000 f -0000001152 00000 f -0000001153 00000 f -0000001154 00000 f -0000001155 00000 f -0000001156 00000 f -0000001157 00000 f -0000001158 00000 f -0000001159 00000 f -0000001160 00000 f -0000001161 00000 f -0000001162 00000 f -0000001163 00000 f -0000001164 00000 f -0000001165 00000 f -0000001166 00000 f -0000001167 00000 f -0000001168 00000 f -0000001169 00000 f -0000001170 00000 f -0000001171 00000 f -0000001172 00000 f -0000001173 00000 f -0000001174 00000 f -0000001175 00000 f -0000001176 00000 f -0000001177 00000 f -0000001178 00000 f -0000001179 00000 f -0000001180 00000 f -0000001181 00000 f -0000001182 00000 f -0000001183 00000 f -0000001184 00000 f -0000001185 00000 f -0000001186 00000 f -0000001187 00000 f -0000001188 00000 f -0000001189 00000 f -0000001190 00000 f -0000001191 00000 f -0000001192 00000 f -0000001193 00000 f -0000001194 00000 f -0000001195 00000 f -0000001196 00000 f -0000001197 00000 f -0000001198 00000 f -0000001199 00000 f -0000001200 00000 f -0000001213 00000 f -0000365987 00000 n -0000366020 00000 n -0000365867 00000 n -0000365900 00000 n -0000365747 00000 n -0000365780 00000 n -0000365627 00000 n -0000365660 00000 n -0000365507 00000 n -0000365540 00000 n -0000365387 00000 n -0000365420 00000 n -0000001214 00000 f -0000001215 00000 f -0000001216 00000 f -0000001217 00000 f -0000001218 00000 f -0000001219 00000 f -0000001220 00000 f -0000001221 00000 f -0000001242 00000 f -0000000000 00000 f -0000364201 00000 n -0000364290 00000 n -0000364364 00000 n -0000364439 00000 n -0000364514 00000 n -0000364591 00000 n -0000000000 00000 f -0000000000 00000 f -0000000000 00000 f -0000000000 00000 f -0000000000 00000 f -0000000000 00000 f -0000000000 00000 f -0000000000 00000 f -0000000000 00000 f -0000000000 00000 f -0000000000 00000 f -0000000000 00000 f -0000000000 00000 f -0000000000 00000 f -0000000000 00000 f -0000000000 00000 f -0000000000 00000 f -0000000000 00000 f -0000000000 00000 f -0000000000 00000 f -0000000000 00000 f -0000000000 00000 f -0000000000 00000 f -0000000000 00000 f -0000000000 00000 f -0000000000 00000 f -0000000000 00000 f -0000000000 00000 f -0000000000 00000 f -0000000000 00000 f -0000000000 00000 f -0000000000 00000 f -0000000000 00000 f -0000000000 00000 f -0000000000 00000 f -0000000000 00000 f -0000000000 00000 f -0000000000 00000 f -0000000000 00000 f -0000000000 00000 f -0000000000 00000 f -0000000000 00000 f -0000000000 00000 f -0000000000 00000 f -0000000000 00000 f -0000000000 00000 f -0000000000 00000 f -0000000000 00000 f -0000000000 00000 f -0000000000 00000 f -0000000000 00000 f -0000000000 00000 f -0000000000 00000 f -0000000000 00000 f -0000000000 00000 f -0000000000 00000 f -0000000000 00000 f -0000365267 00000 n -0000365300 00000 n -0000365147 00000 n -0000365180 00000 n -0000365027 00000 n -0000365060 00000 n -0000364907 00000 n -0000364940 00000 n -0000364787 00000 n -0000364820 00000 n -0000364667 00000 n -0000364700 00000 n -0000000000 00000 f -0000000000 00000 f -0000000000 00000 f -0000000000 00000 f -0000000000 00000 f -0000000000 00000 f -0000000000 00000 f -0000000000 00000 f -0000000000 00000 f -0000165196 00000 n -0000164010 00000 n -0000164099 00000 n -0000164173 00000 n -0000164248 00000 n -0000164323 00000 n -0000164400 00000 n -0000372953 00000 n -0000046731 00000 n -0000167129 00000 n -0000066385 00000 n -0000066270 00000 n -0000049827 00000 n -0000050354 00000 n -0000050894 00000 n -0000051425 00000 n -0000051967 00000 n -0000052501 00000 n -0000053037 00000 n -0000053571 00000 n -0000048764 00000 n -0000049260 00000 n -0000049312 00000 n -0000149504 00000 n -0000149568 00000 n -0000137318 00000 n -0000137382 00000 n -0000122718 00000 n -0000122782 00000 n -0000110491 00000 n -0000110555 00000 n -0000095974 00000 n -0000096038 00000 n -0000083742 00000 n -0000083806 00000 n -0000069074 00000 n -0000069138 00000 n -0000054106 00000 n -0000054170 00000 n -0000054920 00000 n -0000054984 00000 n -0000066206 00000 n -0000066424 00000 n -0000069865 00000 n -0000069929 00000 n -0000083678 00000 n -0000084551 00000 n -0000084615 00000 n -0000095910 00000 n -0000096756 00000 n -0000096820 00000 n -0000110427 00000 n -0000111301 00000 n -0000111365 00000 n -0000122654 00000 n -0000123502 00000 n -0000123566 00000 n -0000137254 00000 n -0000138131 00000 n -0000138195 00000 n -0000149440 00000 n -0000150263 00000 n -0000150327 00000 n -0000163946 00000 n -0000165076 00000 n -0000165109 00000 n -0000164956 00000 n -0000164989 00000 n -0000164836 00000 n -0000164869 00000 n -0000164716 00000 n -0000164749 00000 n -0000164596 00000 n -0000164629 00000 n -0000164476 00000 n -0000164509 00000 n -0000165488 00000 n -0000165798 00000 n -0000167207 00000 n -0000167437 00000 n -0000168418 00000 n -0000175955 00000 n -0000241545 00000 n -0000307135 00000 n -0000373027 00000 n -trailer <<483E7E53040A4057B79E46A3828ABB5E>]>> startxref 373165 %%EOF \ No newline at end of file diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/type_to_object_big.png b/src/compiler/scala/tools/nsc/doc/html/resource/lib/type_to_object_big.png deleted file mode 100644 index ef2615bacc..0000000000 Binary files a/src/compiler/scala/tools/nsc/doc/html/resource/lib/type_to_object_big.png and /dev/null differ diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/typebg.gif b/src/compiler/scala/tools/nsc/doc/html/resource/lib/typebg.gif deleted file mode 100644 index 2fcc77b2e8..0000000000 Binary files a/src/compiler/scala/tools/nsc/doc/html/resource/lib/typebg.gif and /dev/null differ diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/unselected.png b/src/compiler/scala/tools/nsc/doc/html/resource/lib/unselected.png deleted file mode 100644 index d5ac639405..0000000000 Binary files a/src/compiler/scala/tools/nsc/doc/html/resource/lib/unselected.png and /dev/null differ diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/valuemembersbg.gif b/src/compiler/scala/tools/nsc/doc/html/resource/lib/valuemembersbg.gif deleted file mode 100644 index 2a949311d7..0000000000 Binary files a/src/compiler/scala/tools/nsc/doc/html/resource/lib/valuemembersbg.gif and /dev/null differ diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/versions.txt b/src/compiler/scala/tools/nsc/doc/html/resource/lib/versions.txt deleted file mode 100644 index 17d1caeb66..0000000000 --- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/versions.txt +++ /dev/null @@ -1 +0,0 @@ -jquery=1.4.2 diff --git a/src/compiler/scala/tools/nsc/doc/model/CommentFactory.scala b/src/compiler/scala/tools/nsc/doc/model/CommentFactory.scala deleted file mode 100644 index 574d6b04f8..0000000000 --- a/src/compiler/scala/tools/nsc/doc/model/CommentFactory.scala +++ /dev/null @@ -1,112 +0,0 @@ -/* NSC -- new Scala compiler - * Copyright 2007-2013 LAMP/EPFL - * @author Manohar Jonnalagedda - */ - -package scala.tools.nsc -package doc -package model - -import base.comment._ - -import reporters.Reporter -import scala.collection._ -import scala.reflect.internal.util.{NoPosition, Position} -import scala.language.postfixOps - -/** The comment parser transforms raw comment strings into `Comment` objects. - * Call `parse` to run the parser. Note that the parser is stateless and - * should only be built once for a given Scaladoc run. - * - * @author Manohar Jonnalagedda - * @author Gilles Dubochet */ -trait CommentFactory extends base.CommentFactoryBase { - thisFactory: ModelFactory with CommentFactory with MemberLookup => - - val global: Global - import global.{ reporter, definitions, Symbol } - - protected val commentCache = mutable.HashMap.empty[(Symbol, TemplateImpl), Comment] - - def addCommentBody(sym: Symbol, inTpl: TemplateImpl, docStr: String, docPos: global.Position): Symbol = { - commentCache += (sym, inTpl) -> parse(docStr, docStr, docPos, None) - sym - } - - def comment(sym: Symbol, currentTpl: Option[DocTemplateImpl], inTpl: DocTemplateImpl): Option[Comment] = { - val key = (sym, inTpl) - if (commentCache isDefinedAt key) - Some(commentCache(key)) - else { - val c = defineComment(sym, currentTpl, inTpl) - if (c isDefined) commentCache += (sym, inTpl) -> c.get - c - } - } - - /** A comment is usualy created by the parser, however for some special - * cases we have to give some `inTpl` comments (parent class for example) - * to the comment of the symbol. - * This function manages some of those cases : Param accessor and Primary constructor */ - def defineComment(sym: Symbol, currentTpl: Option[DocTemplateImpl], inTpl: DocTemplateImpl):Option[Comment] = { - - //param accessor case - // We just need the @param argument, we put it into the body - if( sym.isParamAccessor && - inTpl.comment.isDefined && - inTpl.comment.get.valueParams.isDefinedAt(sym.encodedName)) { - val comContent = Some(inTpl.comment.get.valueParams(sym.encodedName)) - Some(createComment(body0 = comContent)) - } - - // Primary constructor case - // We need some content of the class definition : @constructor for the body, - // @param and @deprecated, we can add some more if necessary - else if (sym.isPrimaryConstructor && inTpl.comment.isDefined ) { - val tplComment = inTpl.comment.get - // If there is nothing to put into the comment there is no need to create it - if(tplComment.constructor.isDefined || - tplComment.throws != Map.empty || - tplComment.valueParams != Map.empty || - tplComment.typeParams != Map.empty || - tplComment.deprecated.isDefined - ) - Some(createComment( body0 = tplComment.constructor, - throws0 = tplComment.throws, - valueParams0 = tplComment.valueParams, - typeParams0 = tplComment.typeParams, - deprecated0 = tplComment.deprecated - )) - else None - } - - //other comment cases - // parse function will make the comment - else { - val rawComment = global.expandedDocComment(sym, inTpl.sym).trim - if (rawComment != "") { - val tplOpt = if (currentTpl.isDefined) currentTpl else Some(inTpl) - val c = parse(rawComment, global.rawDocComment(sym), global.docCommentPos(sym), tplOpt) - Some(c) - } - else None - } - - } - - protected def parse(comment: String, src: String, pos: Position, inTplOpt: Option[DocTemplateImpl] = None): Comment = { - assert(!inTplOpt.isDefined || inTplOpt.get != null) - parseAtSymbol(comment, src, pos, inTplOpt map (_.sym)) - } - - /** Parses a string containing wiki syntax into a `Comment` object. - * Note that the string is assumed to be clean: - * - Removed Scaladoc start and end markers. - * - Removed start-of-line star and one whitespace afterwards (if present). - * - Removed all end-of-line whitespace. - * - Only `endOfLine` is used to mark line endings. */ - def parseWiki(string: String, pos: Position, inTplOpt: Option[DocTemplateImpl]): Body = { - assert(!inTplOpt.isDefined || inTplOpt.get != null) - parseWikiAtSymbol(string,pos, inTplOpt map (_.sym)) - } -} diff --git a/src/compiler/scala/tools/nsc/doc/model/Entity.scala b/src/compiler/scala/tools/nsc/doc/model/Entity.scala deleted file mode 100644 index 924f203a59..0000000000 --- a/src/compiler/scala/tools/nsc/doc/model/Entity.scala +++ /dev/null @@ -1,601 +0,0 @@ -/* NSC -- new Scala compiler - * Copyright 2007-2013 LAMP/EPFL - * @author Manohar Jonnalagedda - * @author Gilles Dubochet - */ - -package scala.tools.nsc -package doc -package model - -import scala.collection._ -import base.comment._ -import diagram._ - -/** An entity in a Scaladoc universe. Entities are declarations in the program and correspond to symbols in the - * compiler. Entities model the following Scala concepts: - * - classes and traits; - * - objects and package; - * - constructors; - * - methods; - * - values, lazy values, and variables; - * - abstract type members and type aliases; - * - type and value parameters; - * - annotations. */ -trait Entity { - /** The name of the entity. Note that the name does not qualify this entity uniquely; use its `qualifiedName` - * instead. */ - def name : String - - /** The qualified name of the entity. This is this entity's name preceded by the qualified name of the template - * of which this entity is a member. The qualified name is unique to this entity. */ - def qualifiedName: String - - /** The template of which this entity is a member. */ - def inTemplate: TemplateEntity - - /** The list of entities such that each is a member of the entity that follows it; the first entity is always this - * entity, the last the root package entity. */ - def toRoot: List[Entity] - - /** The qualified name of this entity. */ - override def toString = qualifiedName - - /** The Scaladoc universe of which this entity is a member. */ - def universe: Universe - - /** The annotations attached to this entity, if any. */ - def annotations: List[Annotation] - - /** The kind of the entity */ - def kind: String - - /** Whether or not the template was defined in a package object */ - def inPackageObject: Boolean - - /** Indicates whether this entity lives in the types namespace (classes, traits, abstract/alias types) */ - def isType: Boolean -} - -object Entity { - private def isDeprecated(x: Entity) = x match { - case x: MemberEntity => x.deprecation.isDefined - case _ => false - } - /** Ordering deprecated things last. */ - implicit lazy val EntityOrdering: Ordering[Entity] = - Ordering[(Boolean, String)] on (x => (isDeprecated(x), x.name)) -} - -/** A template, which is either a class, trait, object or package. Depending on whether documentation is available - * or not, the template will be modeled as a [scala.tools.nsc.doc.model.NoDocTemplate] or a - * [scala.tools.nsc.doc.model.DocTemplateEntity]. */ -trait TemplateEntity extends Entity { - - /** Whether this template is a package (including the root package). */ - def isPackage: Boolean - - /** Whether this template is the root package. */ - def isRootPackage: Boolean - - /** Whether this template is a trait. */ - def isTrait: Boolean - - /** Whether this template is a class. */ - def isClass: Boolean - - /** Whether this template is an object. */ - def isObject: Boolean - - /** Whether documentation is available for this template. */ - def isDocTemplate: Boolean - - /** Whether this template is a case class. */ - def isCaseClass: Boolean - - /** The self-type of this template, if it differs from the template type. */ - def selfType : Option[TypeEntity] -} - - -/** An entity that is a member of a template. All entities, including templates, are member of another entity - * except for parameters and annotations. Note that all members of a template are modelled, including those that are - * inherited and not declared locally. */ -trait MemberEntity extends Entity { - - /** The comment attached to this member, if any. */ - def comment: Option[Comment] - - /** The group this member is from */ - def group: String - - /** The template of which this entity is a member. */ - def inTemplate: DocTemplateEntity - - /** The list of entities such that each is a member of the entity that follows it; the first entity is always this - * member, the last the root package entity. */ - def toRoot: List[MemberEntity] - - /** The templates in which this member has been declared. The first element of the list is the template that contains - * the currently active declaration of this member, subsequent elements are declarations that have been overriden. If - * the first element is equal to `inTemplate`, the member is declared locally, if not, it has been inherited. All - * elements of this list are in the linearization of `inTemplate`. */ - def inDefinitionTemplates: List[TemplateEntity] - - /** The qualified name of the member in its currently active declaration template. */ - def definitionName: String - - /** The visibility of this member. Note that members with restricted visibility may not be modeled in some - * universes. */ - def visibility: Visibility - - /** The flags that have been set for this entity. The following flags are supported: `implicit`, `sealed`, `abstract`, - * and `final`. */ - def flags: List[Paragraph] - - /** Some deprecation message if this member is deprecated, or none otherwise. */ - def deprecation: Option[Body] - - /** Some migration warning if this member has a migration annotation, or none otherwise. */ - def migration: Option[Body] - - /** For members representing values: the type of the value returned by this member; for members - * representing types: the type itself. */ - def resultType: TypeEntity - - /** Whether this member is a method. */ - def isDef: Boolean - - /** Whether this member is a value (this excludes lazy values). */ - def isVal: Boolean - - /** Whether this member is a lazy value. */ - def isLazyVal: Boolean - - /** Whether this member is a variable. */ - def isVar: Boolean - - /** Whether this member is a constructor. */ - def isConstructor: Boolean - - /** Whether this member is an alias type. */ - def isAliasType: Boolean - - /** Whether this member is an abstract type. */ - def isAbstractType: Boolean - - /** Whether this member is abstract. */ - def isAbstract: Boolean - - /** If this symbol is a use case, the useCaseOf will contain the member it was derived from, containing the full - * signature and the complete parameter descriptions. */ - def useCaseOf: Option[MemberEntity] - - /** If this member originates from an implicit conversion, we set the implicit information to the correct origin */ - def byConversion: Option[ImplicitConversion] - - /** The identity of this member, used for linking */ - def signature: String - - /** Compatibility signature, will be removed from future versions */ - def signatureCompat: String - - /** Indicates whether the member is inherited by implicit conversion */ - def isImplicitlyInherited: Boolean - - /** Indicates whether there is another member with the same name in the template that will take precendence */ - def isShadowedImplicit: Boolean - - /** Indicates whether there are other implicitly inherited members that have similar signatures (and thus they all - * become ambiguous) */ - def isAmbiguousImplicit: Boolean - - /** Indicates whether the implicitly inherited member is shadowed or ambiguous in its template */ - def isShadowedOrAmbiguousImplicit: Boolean -} - -object MemberEntity { - // Oh contravariance, contravariance, wherefore art thou contravariance? - // Note: the above works for both the commonly misunderstood meaning of the line and the real one. - implicit lazy val MemberEntityOrdering: Ordering[MemberEntity] = Entity.EntityOrdering on (x => x) -} - -/** An entity that is parameterized by types */ -trait HigherKinded { - - /** The type parameters of this entity. */ - def typeParams: List[TypeParam] -} - - -/** A template (class, trait, object or package) which is referenced in the universe, but for which no further - * documentation is available. Only templates for which a source file is given are documented by Scaladoc. */ -trait NoDocTemplate extends TemplateEntity { - def kind = - if (isClass) "class" - else if (isTrait) "trait" - else if (isObject) "object" - else "" -} - -/** An inherited template that was not documented in its original owner - example: - * in classpath: trait T { class C } -- T (and implicitly C) are not documented - * in the source: trait U extends T -- C appears in U as a MemberTemplateImpl - * -- that is, U has a member for it but C doesn't get its own page */ -trait MemberTemplateEntity extends TemplateEntity with MemberEntity with HigherKinded { - - /** The value parameters of this case class, or an empty list if this class is not a case class. As case class value - * parameters cannot be curried, the outer list has exactly one element. */ - def valueParams: List[List[ValueParam]] - - /** The direct super-type of this template - e.g: {{{class A extends B[C[Int]] with D[E]}}} will have two direct parents: class B and D - NOTE: we are dropping the refinement here! */ - def parentTypes: List[(TemplateEntity, TypeEntity)] -} - -/** A template (class, trait, object or package) for which documentation is available. Only templates for which - * a source file is given are documented by Scaladoc. */ -trait DocTemplateEntity extends MemberTemplateEntity { - - /** The list of templates such that each is a member of the template that follows it; the first template is always - * this template, the last the root package entity. */ - def toRoot: List[DocTemplateEntity] - - /** The source file in which the current template is defined and the line where the definition starts, if they exist. - * A source file exists for all templates, except for those that are generated synthetically by Scaladoc. */ - def inSource: Option[(io.AbstractFile, Int)] - - /** An HTTP address at which the source of this template is available, if it is available. An address is available - * only if the `docsourceurl` setting has been set. */ - def sourceUrl: Option[java.net.URL] - - /** All class, trait and object templates which are part of this template's linearization, in lineratization order. - * This template's linearization contains all of its direct and indirect super-classes and super-traits. */ - def linearizationTemplates: List[TemplateEntity] - - /** All instantiated types which are part of this template's linearization, in lineratization order. - * This template's linearization contains all of its direct and indirect super-types. */ - def linearizationTypes: List[TypeEntity] - - /** All class, trait and object templates for which this template is a direct or indirect super-class or super-trait. - * Only templates for which documentation is available in the universe (`DocTemplateEntity`) are listed. */ - def allSubClasses: List[DocTemplateEntity] - - /** All class, trait and object templates for which this template is a *direct* super-class or super-trait. - * Only templates for which documentation is available in the universe (`DocTemplateEntity`) are listed. */ - def directSubClasses: List[DocTemplateEntity] - - /** All members of this template. If this template is a package, only templates for which documentation is available - * in the universe (`DocTemplateEntity`) are listed. */ - def members: List[MemberEntity] - - /** All templates that are members of this template. If this template is a package, only templates for which - * documentation is available in the universe (`DocTemplateEntity`) are listed. */ - def templates: List[TemplateEntity with MemberEntity] - - /** All methods that are members of this template. */ - def methods: List[Def] - - /** All values, lazy values and variables that are members of this template. */ - def values: List[Val] - - /** All abstract types that are members of this template. */ - def abstractTypes: List[AbstractType] - - /** All type aliases that are members of this template. */ - def aliasTypes: List[AliasType] - - /** The primary constructor of this class, if it has been defined. */ - def primaryConstructor: Option[Constructor] - - /** All constructors of this class, including the primary constructor. */ - def constructors: List[Constructor] - - /** The companion of this template, or none. If a class and an object are defined as a pair of the same name, the - * other entity of the pair is the companion. */ - def companion: Option[DocTemplateEntity] - - /** The implicit conversions this template (class or trait, objects and packages are not affected) */ - def conversions: List[ImplicitConversion] - - /** The shadowing information for the implicitly added members */ - def implicitsShadowing: Map[MemberEntity, ImplicitMemberShadowing] - - /** Classes that can be implcitly converted to this class */ - def incomingImplicitlyConvertedClasses: List[(DocTemplateEntity, ImplicitConversion)] - - /** Classes to which this class can be implicitly converted to - NOTE: Some classes might not be included in the scaladoc run so they will be NoDocTemplateEntities */ - def outgoingImplicitlyConvertedClasses: List[(TemplateEntity, TypeEntity, ImplicitConversion)] - - /** If this template takes place in inheritance and implicit conversion relations, it will be shown in this diagram */ - def inheritanceDiagram: Option[Diagram] - - /** If this template contains other templates, such as classes and traits, they will be shown in this diagram */ - def contentDiagram: Option[Diagram] - - /** Returns the group description taken either from this template or its linearizationTypes */ - def groupDescription(group: String): Option[Body] - - /** Returns the group description taken either from this template or its linearizationTypes */ - def groupPriority(group: String): Int - - /** Returns the group description taken either from this template or its linearizationTypes */ - def groupName(group: String): String -} - -/** A trait template. */ -trait Trait extends MemberTemplateEntity { - def kind = "trait" -} - -/** A class template. */ -trait Class extends MemberTemplateEntity { - override def kind = "class" -} - -/** An object template. */ -trait Object extends MemberTemplateEntity { - def kind = "object" -} - -/** A package template. A package is in the universe if it is declared as a package object, or if it - * contains at least one template. */ -trait Package extends DocTemplateEntity { - - /** The package of which this package is a member. */ - def inTemplate: Package - - /** The package such that each is a member of the package that follows it; the first package is always this - * package, the last the root package. */ - def toRoot: List[Package] - - /** All packages that are member of this package. */ - def packages: List[Package] - - override def kind = "package" -} - - -/** The root package, which contains directly or indirectly all members in the universe. A universe - * contains exactly one root package. */ -trait RootPackage extends Package - - -/** A non-template member (method, value, lazy value, variable, constructor, alias type, and abstract type). */ -trait NonTemplateMemberEntity extends MemberEntity { - /** Whether this member is a use case. A use case is a member which does not exist in the documented code. - * It corresponds to a real member, and provides a simplified, yet compatible signature for that member. */ - def isUseCase: Boolean -} - - -/** A method (`def`) of a template. */ -trait Def extends NonTemplateMemberEntity with HigherKinded { - - /** The value parameters of this method. Each parameter block of a curried method is an element of the list. - * Each parameter block is a list of value parameters. */ - def valueParams : List[List[ValueParam]] - - def kind = "method" -} - - -/** A constructor of a class. */ -trait Constructor extends NonTemplateMemberEntity { - - /** Whether this is the primary constructor of a class. The primary constructor is defined syntactically as part of - * the declaration of the class. */ - def isPrimary: Boolean - - /** The value parameters of this constructor. As constructors cannot be curried, the outer list has exactly one - * element. */ - def valueParams : List[List[ValueParam]] - - def kind = "constructor" -} - - -/** A value (`val`), lazy val (`lazy val`) or variable (`var`) of a template. */ -trait Val extends NonTemplateMemberEntity { - def kind = "[lazy] value/variable" -} - - -/** An abstract type member of a template. */ -trait AbstractType extends MemberTemplateEntity with HigherKinded { - - /** The lower bound for this abstract type, if it has been defined. */ - def lo: Option[TypeEntity] - - /** The upper bound for this abstract type, if it has been defined. */ - def hi: Option[TypeEntity] - - def kind = "abstract type" -} - - -/** An type alias of a template. */ -trait AliasType extends MemberTemplateEntity with HigherKinded { - - /** The type aliased by this type alias. */ - def alias: TypeEntity - - def kind = "type alias" -} - - -/** A parameter to an entity. */ -trait ParameterEntity { - - def name: String -} - - -/** A type parameter to a class, trait, or method. */ -trait TypeParam extends ParameterEntity with HigherKinded { - - /** The variance of this type parameter. Valid values are "+", "-", and the empty string. */ - def variance: String - - /** The lower bound for this type parameter, if it has been defined. */ - def lo: Option[TypeEntity] - - /** The upper bound for this type parameter, if it has been defined. */ - def hi: Option[TypeEntity] -} - - -/** A value parameter to a constructor or method. */ -trait ValueParam extends ParameterEntity { - - /** The type of this value parameter. */ - def resultType: TypeEntity - - /** The devault value of this value parameter, if it has been defined. */ - def defaultValue: Option[TreeEntity] - - /** Whether this value parameter is implicit. */ - def isImplicit: Boolean -} - - -/** An annotation to an entity. */ -trait Annotation extends Entity { - - /** The class of this annotation. */ - def annotationClass: TemplateEntity - - /** The arguments passed to the constructor of the annotation class. */ - def arguments: List[ValueArgument] - - def kind = "annotation" -} - -/** A trait that signals the member results from an implicit conversion */ -trait ImplicitConversion { - - /** The source of the implicit conversion*/ - def source: DocTemplateEntity - - /** The result type after the conversion */ - def targetType: TypeEntity - - /** The components of the implicit conversion type parents */ - def targetTypeComponents: List[(TemplateEntity, TypeEntity)] - - /** The entity for the method that performed the conversion, if it's documented (or just its name, otherwise) */ - def convertorMethod: Either[MemberEntity, String] - - /** A short name of the convertion */ - def conversionShortName: String - - /** A qualified name uniquely identifying the convertion (currently: the conversion method's qualified name) */ - def conversionQualifiedName: String - - /** The entity that performed the conversion */ - def convertorOwner: TemplateEntity - - /** The constraints that the transformations puts on the type parameters */ - def constraints: List[Constraint] - - /** The members inherited by this implicit conversion */ - def members: List[MemberEntity] - - /** Is this a hidden implicit conversion (as specified in the settings) */ - def isHiddenConversion: Boolean -} - -/** Shadowing captures the information that the member is shadowed by some other members - * There are two cases of implicitly added member shadowing: - * 1) shadowing from a original class member (the class already has that member) - * in this case, it won't be possible to call the member directly, the type checker will fail attempting to adapt - * the call arguments (or if they fit it will call the original class' method) - * 2) shadowing from other possible implicit conversions () - * this will result in an ambiguous implicit converion error - */ -trait ImplicitMemberShadowing { - /** The members that shadow the current entry use .inTemplate to get to the template name */ - def shadowingMembers: List[MemberEntity] - - /** The members that ambiguate this implicit conversion - Note: for ambiguatingMembers you have the following invariant: - assert(ambiguatingMembers.foreach(_.byConversion.isDefined) */ - def ambiguatingMembers: List[MemberEntity] - - def isShadowed: Boolean = !shadowingMembers.isEmpty - def isAmbiguous: Boolean = !ambiguatingMembers.isEmpty -} - -/** A trait that encapsulates a constraint necessary for implicit conversion */ -trait Constraint - -/** A constraint involving a type parameter which must be in scope */ -trait ImplicitInScopeConstraint extends Constraint { - /** The type of the implicit value required */ - def implicitType: TypeEntity - - /** toString for debugging */ - override def toString = "an implicit _: " + implicitType.name + " must be in scope" -} - -trait TypeClassConstraint extends ImplicitInScopeConstraint with TypeParamConstraint { - /** Type class name */ - def typeClassEntity: TemplateEntity - - /** toString for debugging */ - override def toString = typeParamName + " is a class of type " + typeClassEntity.qualifiedName + " (" + - typeParamName + ": " + typeClassEntity.name + ")" -} - -trait KnownTypeClassConstraint extends TypeClassConstraint { - /** Type explanation, takes the type parameter name and generates the explanation */ - def typeExplanation: (String) => String - - /** toString for debugging */ - override def toString = typeExplanation(typeParamName) + " (" + typeParamName + ": " + typeClassEntity.name + ")" -} - -/** A constraint involving a type parameter */ -trait TypeParamConstraint extends Constraint { - /** The type parameter involved */ - def typeParamName: String -} - -trait EqualTypeParamConstraint extends TypeParamConstraint { - /** The rhs */ - def rhs: TypeEntity - /** toString for debugging */ - override def toString = typeParamName + " is " + rhs.name + " (" + typeParamName + " =:= " + rhs.name + ")" -} - -trait BoundedTypeParamConstraint extends TypeParamConstraint { - /** The lower bound */ - def lowerBound: TypeEntity - - /** The upper bound */ - def upperBound: TypeEntity - - /** toString for debugging */ - override def toString = typeParamName + " is a superclass of " + lowerBound.name + " and a subclass of " + - upperBound.name + " (" + typeParamName + " >: " + lowerBound.name + " <: " + upperBound.name + ")" -} - -trait LowerBoundedTypeParamConstraint extends TypeParamConstraint { - /** The lower bound */ - def lowerBound: TypeEntity - - /** toString for debugging */ - override def toString = typeParamName + " is a superclass of " + lowerBound.name + " (" + typeParamName + " >: " + - lowerBound.name + ")" -} - -trait UpperBoundedTypeParamConstraint extends TypeParamConstraint { - /** The lower bound */ - def upperBound: TypeEntity - - /** toString for debugging */ - override def toString = typeParamName + " is a subclass of " + upperBound.name + " (" + typeParamName + " <: " + - upperBound.name + ")" -} diff --git a/src/compiler/scala/tools/nsc/doc/model/IndexModelFactory.scala b/src/compiler/scala/tools/nsc/doc/model/IndexModelFactory.scala deleted file mode 100755 index 1272906df5..0000000000 --- a/src/compiler/scala/tools/nsc/doc/model/IndexModelFactory.scala +++ /dev/null @@ -1,58 +0,0 @@ -/* NSC -- new Scala compiler - * Copyright 2007-2013 LAMP/EPFL - * @author Pedro Furlanetto - */ - -package scala.tools.nsc -package doc -package model - -import scala.collection._ - -object IndexModelFactory { - - def makeIndex(universe: Universe): Index = new Index { - - lazy val firstLetterIndex: Map[Char, SymbolMap] = { - - object result extends mutable.HashMap[Char,SymbolMap] { - - /* symbol name ordering */ - implicit def orderingMap = math.Ordering.String - - def addMember(d: MemberEntity) = { - val firstLetter = { - val ch = d.name.head.toLower - if(ch.isLetterOrDigit) ch else '_' - } - val letter = this.get(firstLetter).getOrElse { - immutable.SortedMap[String, SortedSet[MemberEntity]]() - } - val members = letter.get(d.name).getOrElse { - SortedSet.empty[MemberEntity](Ordering.by { _.toString }) - } + d - this(firstLetter) = letter + (d.name -> members) - } - } - - //@scala.annotation.tailrec // TODO - def gather(owner: DocTemplateEntity): Unit = - for(m <- owner.members if m.inDefinitionTemplates.isEmpty || m.inDefinitionTemplates.head == owner) - m match { - case tpl: DocTemplateEntity => - result.addMember(tpl) - gather(tpl) - case non: MemberEntity if !non.isConstructor => - result.addMember(non) - case x @ _ => - } - - gather(universe.rootPackage) - - result.toMap - - } - - } - -} diff --git a/src/compiler/scala/tools/nsc/doc/model/MemberLookup.scala b/src/compiler/scala/tools/nsc/doc/model/MemberLookup.scala deleted file mode 100644 index 23259a4ae8..0000000000 --- a/src/compiler/scala/tools/nsc/doc/model/MemberLookup.scala +++ /dev/null @@ -1,63 +0,0 @@ -package scala.tools.nsc -package doc -package model - -import base._ - -/** This trait extracts all required information for documentation from compilation units */ -trait MemberLookup extends base.MemberLookupBase { - thisFactory: ModelFactory => - - import global._ - import definitions.{ NothingClass, AnyClass, AnyValClass, AnyRefClass, ListClass } - - override def internalLink(sym: Symbol, site: Symbol): Option[LinkTo] = - findTemplateMaybe(sym) match { - case Some(tpl) => Some(LinkToTpl(tpl)) - case None => - findTemplateMaybe(site) flatMap { inTpl => - inTpl.members find (_.asInstanceOf[EntityImpl].sym == sym) map (LinkToMember(_, inTpl)) - } - } - - override def chooseLink(links: List[LinkTo]): LinkTo = { - val mbrs = links.collect { - case lm@LinkToMember(mbr: MemberEntity, _) => (mbr, lm) - } - if (mbrs.isEmpty) - links.head - else - mbrs.min(Ordering[MemberEntity].on[(MemberEntity, LinkTo)](_._1))._2 - } - - override def toString(link: LinkTo) = link match { - case LinkToTpl(tpl: EntityImpl) => tpl.sym.toString - case LinkToMember(mbr: EntityImpl, inTpl: EntityImpl) => - mbr.sym.signatureString + " in " + inTpl.sym.toString - case _ => link.toString - } - - override def findExternalLink(sym: Symbol, name: String): Option[LinkToExternal] = { - val sym1 = - if (sym == AnyClass || sym == AnyRefClass || sym == AnyValClass || sym == NothingClass) ListClass - else if (sym.isPackage) - /* Get package object which has associatedFile ne null */ - sym.info.member(newTermName("package")) - else sym - Option(sym1.associatedFile) flatMap (_.underlyingSource) flatMap { src => - val path = src.path - settings.extUrlMapping get path map { url => - LinkToExternal(name, url + "#" + name) - } - } orElse { - // Deprecated option. - settings.extUrlPackageMapping find { - case (pkg, _) => name startsWith pkg - } map { - case (_, url) => LinkToExternal(name, url + "#" + name) - } - } - } - - override def warnNoLink = !settings.docNoLinkWarnings.value -} diff --git a/src/compiler/scala/tools/nsc/doc/model/ModelFactory.scala b/src/compiler/scala/tools/nsc/doc/model/ModelFactory.scala deleted file mode 100644 index 1df725636a..0000000000 --- a/src/compiler/scala/tools/nsc/doc/model/ModelFactory.scala +++ /dev/null @@ -1,1045 +0,0 @@ -/* NSC -- new Scala compiler -- Copyright 2007-2013 LAMP/EPFL */ - -package scala.tools.nsc -package doc -package model - -import base._ -import base.comment._ -import diagram._ - -import scala.collection._ -import scala.util.matching.Regex - -import symtab.Flags - -import io._ - -import model.{ RootPackage => RootPackageEntity } - -/** This trait extracts all required information for documentation from compilation units */ -class ModelFactory(val global: Global, val settings: doc.Settings) { - thisFactory: ModelFactory - with ModelFactoryImplicitSupport - with ModelFactoryTypeSupport - with DiagramFactory - with CommentFactory - with TreeFactory - with MemberLookup => - - import global._ - import definitions.{ ObjectClass, NothingClass, AnyClass, AnyValClass, AnyRefClass, ListClass } - import rootMirror.{ RootPackage, RootClass, EmptyPackage } - - // Defaults for member grouping, that may be overridden by the template - val defaultGroup = "Ungrouped" - val defaultGroupName = "Ungrouped" - val defaultGroupDesc = None - val defaultGroupPriority = 1000 - - def templatesCount = docTemplatesCache.count(_._2.isDocTemplate) - droppedPackages.size - - private var _modelFinished = false - def modelFinished: Boolean = _modelFinished - private var universe: Universe = null - - def makeModel: Option[Universe] = { - val universe = new Universe { thisUniverse => - thisFactory.universe = thisUniverse - val settings = thisFactory.settings - val rootPackage = modelCreation.createRootPackage - } - _modelFinished = true - // complete the links between model entities, everthing that couldn't have been done before - universe.rootPackage.completeModel() - - Some(universe) filter (_.rootPackage != null) - } - - // state: - var ids = 0 - private val droppedPackages = mutable.Set[PackageImpl]() - protected val docTemplatesCache = new mutable.LinkedHashMap[Symbol, DocTemplateImpl] - protected val noDocTemplatesCache = new mutable.LinkedHashMap[Symbol, NoDocTemplateImpl] - def packageDropped(tpl: DocTemplateImpl) = tpl match { - case p: PackageImpl => droppedPackages(p) - case _ => false - } - - def optimize(str: String): String = - if (str.length < 16) str.intern else str - - /* ============== IMPLEMENTATION PROVIDING ENTITY TYPES ============== */ - - abstract class EntityImpl(val sym: Symbol, val inTpl: TemplateImpl) extends Entity { - val name = optimize(sym.nameString) - val universe = thisFactory.universe - - // Debugging: - // assert(id != 36, sym + " " + sym.getClass) - //println("Creating entity #" + id + " [" + kind + " " + qualifiedName + "] for sym " + sym.kindString + " " + sym.ownerChain.reverse.map(_.name).mkString(".")) - - def inTemplate: TemplateImpl = inTpl - def toRoot: List[EntityImpl] = this :: inTpl.toRoot - def qualifiedName = name - def annotations = sym.annotations.map(makeAnnotation) - def inPackageObject: Boolean = sym.owner.isModuleClass && sym.owner.sourceModule.isPackageObject - def isType = sym.name.isTypeName - } - - trait TemplateImpl extends EntityImpl with TemplateEntity { - override def qualifiedName: String = - if (inTemplate == null || inTemplate.isRootPackage) name else optimize(inTemplate.qualifiedName + "." + name) - def isPackage = sym.isPackage - def isTrait = sym.isTrait - def isClass = sym.isClass && !sym.isTrait - def isObject = sym.isModule && !sym.isPackage - def isCaseClass = sym.isCaseClass - def isRootPackage = false - def selfType = if (sym.thisSym eq sym) None else Some(makeType(sym.thisSym.typeOfThis, this)) - } - - abstract class MemberImpl(sym: Symbol, inTpl: DocTemplateImpl) extends EntityImpl(sym, inTpl) with MemberEntity { - lazy val comment = { - // If the current tpl is a DocTemplate, we consider itself as the root for resolving link targets (instead of the - // package the class is in) -- so people can refer to methods directly [[foo]], instead of using [[MyClass.foo]] - // in the doc comment of MyClass - val thisTpl = this match { - case d: DocTemplateImpl => Some(d) - case _ => None - } - if (inTpl != null) thisFactory.comment(sym, thisTpl, inTpl) else None - } - def group = comment flatMap (_.group) getOrElse defaultGroup - override def inTemplate = inTpl - override def toRoot: List[MemberImpl] = this :: inTpl.toRoot - def inDefinitionTemplates = - if (inTpl == null) - List(makeRootPackage) - else - makeTemplate(sym.owner)::(sym.allOverriddenSymbols map { inhSym => makeTemplate(inhSym.owner) }) - def visibility = { - if (sym.isPrivateLocal) PrivateInInstance() - else if (sym.isProtectedLocal) ProtectedInInstance() - else { - val qual = - if (sym.hasAccessBoundary) - Some(makeTemplate(sym.privateWithin)) - else None - if (sym.isPrivate) PrivateInTemplate(inTpl) - else if (sym.isProtected) ProtectedInTemplate(qual getOrElse inTpl) - else qual match { - case Some(q) => PrivateInTemplate(q) - case None => Public() - } - } - } - def flags = { - val fgs = mutable.ListBuffer.empty[Paragraph] - if (sym.isImplicit) fgs += Paragraph(Text("implicit")) - if (sym.isSealed) fgs += Paragraph(Text("sealed")) - if (!sym.isTrait && (sym hasFlag Flags.ABSTRACT)) fgs += Paragraph(Text("abstract")) - /* Resetting the DEFERRED flag is a little trick here for refined types: (example from scala.collections) - * {{{ - * implicit def traversable2ops[T](t: scala.collection.GenTraversableOnce[T]) = new TraversableOps[T] { - * def isParallel = ... - * }}} - * the type the method returns is TraversableOps, which has all-abstract symbols. But in reality, it couldn't have - * any abstract terms, otherwise it would fail compilation. So we reset the DEFERRED flag. */ - if (!sym.isTrait && (sym hasFlag Flags.DEFERRED) && (!isImplicitlyInherited)) fgs += Paragraph(Text("abstract")) - if (!sym.isModule && (sym hasFlag Flags.FINAL)) fgs += Paragraph(Text("final")) - fgs.toList - } - def deprecation = - if (sym.isDeprecated) - Some((sym.deprecationMessage, sym.deprecationVersion) match { - case (Some(msg), Some(ver)) => parseWiki("''(Since version " + ver + ")'' " + msg, NoPosition, Some(inTpl)) - case (Some(msg), None) => parseWiki(msg, NoPosition, Some(inTpl)) - case (None, Some(ver)) => parseWiki("''(Since version " + ver + ")''", NoPosition, Some(inTpl)) - case (None, None) => Body(Nil) - }) - else - comment flatMap { _.deprecated } - def migration = - if(sym.hasMigrationAnnotation) - Some((sym.migrationMessage, sym.migrationVersion) match { - case (Some(msg), Some(ver)) => parseWiki("''(Changed in version " + ver + ")'' " + msg, NoPosition, Some(inTpl)) - case (Some(msg), None) => parseWiki(msg, NoPosition, Some(inTpl)) - case (None, Some(ver)) => parseWiki("''(Changed in version " + ver + ")''", NoPosition, Some(inTpl)) - case (None, None) => Body(Nil) - }) - else - None - - def resultType = { - def resultTpe(tpe: Type): Type = tpe match { // similar to finalResultType, except that it leaves singleton types alone - case PolyType(_, res) => resultTpe(res) - case MethodType(_, res) => resultTpe(res) - case NullaryMethodType(res) => resultTpe(res) - case _ => tpe - } - val tpe = byConversion.fold(sym.tpe) (_.toType memberInfo sym) - makeTypeInTemplateContext(resultTpe(tpe), inTemplate, sym) - } - def isDef = false - def isVal = false - def isLazyVal = false - def isVar = false - def isConstructor = false - def isAliasType = false - def isAbstractType = false - def isAbstract = - // for the explanation of conversion == null see comment on flags - ((!sym.isTrait && ((sym hasFlag Flags.ABSTRACT) || (sym hasFlag Flags.DEFERRED)) && (!isImplicitlyInherited)) || - sym.isAbstractClass || sym.isAbstractType) && !sym.isSynthetic - - def signature = externalSignature(sym) - lazy val signatureCompat = { - - def defParams(mbr: Any): String = mbr match { - case d: MemberEntity with Def => - val paramLists: List[String] = - if (d.valueParams.isEmpty) Nil - else d.valueParams map (ps => ps map (_.resultType.name) mkString ("(",",",")")) - paramLists.mkString - case _ => "" - } - - def tParams(mbr: Any): String = mbr match { - case hk: HigherKinded if !hk.typeParams.isEmpty => - def boundsToString(hi: Option[TypeEntity], lo: Option[TypeEntity]): String = { - def bound0(bnd: Option[TypeEntity], pre: String): String = bnd match { - case None => "" - case Some(tpe) => pre ++ tpe.toString - } - bound0(hi, "<:") ++ bound0(lo, ">:") - } - "[" + hk.typeParams.map(tp => tp.variance + tp.name + tParams(tp) + boundsToString(tp.hi, tp.lo)).mkString(", ") + "]" - case _ => "" - } - - (name + tParams(this) + defParams(this) +":"+ resultType.name).replaceAll("\\s","") // no spaces allowed, they break links - } - // these only apply for NonTemplateMemberEntities - def useCaseOf: Option[MemberEntity] = None - def byConversion: Option[ImplicitConversionImpl] = None - def isImplicitlyInherited = false - def isShadowedImplicit = false - def isAmbiguousImplicit = false - def isShadowedOrAmbiguousImplicit = false - } - - /** A template that is not documented at all. The class is instantiated during lookups, to indicate that the class - * exists, but should not be documented (either it's not included in the source or it's not visible) - */ - class NoDocTemplateImpl(sym: Symbol, inTpl: TemplateImpl) extends EntityImpl(sym, inTpl) with TemplateImpl with HigherKindedImpl with NoDocTemplate { - assert(modelFinished, this) - assert(!(noDocTemplatesCache isDefinedAt sym), (sym, noDocTemplatesCache(sym))) - noDocTemplatesCache += (sym -> this) - def isDocTemplate = false - } - - /** An inherited template that was not documented in its original owner - example: - * in classpath: trait T { class C } -- T (and implicitly C) are not documented - * in the source: trait U extends T -- C appears in U as a MemberTemplateImpl -- that is, U has a member for it - * but C doesn't get its own page - */ - abstract class MemberTemplateImpl(sym: Symbol, inTpl: DocTemplateImpl) extends MemberImpl(sym, inTpl) with TemplateImpl with HigherKindedImpl with MemberTemplateEntity { - // no templates cache for this class, each owner gets its own instance - def isDocTemplate = false - lazy val definitionName = optimize(inDefinitionTemplates.head.qualifiedName + "." + name) - def valueParams: List[List[ValueParam]] = Nil /** TODO, these are now only computed for DocTemplates */ - - def parentTypes = - if (sym.isPackage || sym == AnyClass) List() else { - val tps = (this match { - case a: AliasType => sym.tpe.dealias.parents - case a: AbstractType => sym.info.bounds match { - case TypeBounds(lo, RefinedType(parents, decls)) => parents - case TypeBounds(lo, hi) => hi :: Nil - case _ => Nil - } - case _ => sym.tpe.parents - }) map { _.asSeenFrom(sym.thisType, sym) } - makeParentTypes(RefinedType(tps, EmptyScope), Some(this), inTpl) - } - } - - /** The instantiation of `TemplateImpl` triggers the creation of the following entities: - * All ancestors of the template and all non-package members. - */ - abstract class DocTemplateImpl(sym: Symbol, inTpl: DocTemplateImpl) extends MemberTemplateImpl(sym, inTpl) with DocTemplateEntity { - assert(!modelFinished, (sym, inTpl)) - assert(!(docTemplatesCache isDefinedAt sym), sym) - docTemplatesCache += (sym -> this) - - if (settings.verbose.value) - inform("Creating doc template for " + sym) - - override def toRoot: List[DocTemplateImpl] = this :: inTpl.toRoot - - protected def inSourceFromSymbol(symbol: Symbol) = - if (symbol.sourceFile != null && ! symbol.isSynthetic) - Some((symbol.sourceFile, symbol.pos.line)) - else - None - - def inSource = inSourceFromSymbol(sym) - - def sourceUrl = { - def fixPath(s: String) = s.replaceAll("\\" + java.io.File.separator, "/") - val assumedSourceRoot = fixPath(settings.sourcepath.value) stripSuffix "/" - - if (!settings.docsourceurl.isDefault) - inSource map { case (file, _) => - val filePath = fixPath(file.path).replaceFirst("^" + assumedSourceRoot, "").stripSuffix(".scala") - val tplOwner = this.inTemplate.qualifiedName - val tplName = this.name - val patches = new Regex("""€\{(FILE_PATH|TPL_OWNER|TPL_NAME)\}""") - def substitute(name: String): String = name match { - case "FILE_PATH" => filePath - case "TPL_OWNER" => tplOwner - case "TPL_NAME" => tplName - } - val patchedString = patches.replaceAllIn(settings.docsourceurl.value, m => java.util.regex.Matcher.quoteReplacement(substitute(m.group(1))) ) - new java.net.URL(patchedString) - } - else None - } - - protected def linearizationFromSymbol(symbol: Symbol): List[(TemplateEntity, TypeEntity)] = { - symbol.ancestors map { ancestor => - val typeEntity = makeType(symbol.info.baseType(ancestor), this) - val tmplEntity = makeTemplate(ancestor) match { - case tmpl: DocTemplateImpl => tmpl registerSubClass this ; tmpl - case tmpl => tmpl - } - (tmplEntity, typeEntity) - } - } - - lazy val linearization = linearizationFromSymbol(sym) - def linearizationTemplates = linearization map { _._1 } - def linearizationTypes = linearization map { _._2 } - - /* Subclass cache */ - private lazy val subClassesCache = ( - if (sym == AnyRefClass) null - else mutable.ListBuffer[DocTemplateEntity]() - ) - def registerSubClass(sc: DocTemplateEntity): Unit = { - if (subClassesCache != null) - subClassesCache += sc - } - def allSubClasses = if (subClassesCache == null) Nil else subClassesCache.toList - def directSubClasses = allSubClasses.filter(_.parentTypes.map(_._1).contains(this)) - - /* Implcitly convertible class cache */ - private var implicitlyConvertibleClassesCache: mutable.ListBuffer[(DocTemplateImpl, ImplicitConversionImpl)] = null - def registerImplicitlyConvertibleClass(dtpl: DocTemplateImpl, conv: ImplicitConversionImpl): Unit = { - if (implicitlyConvertibleClassesCache == null) - implicitlyConvertibleClassesCache = mutable.ListBuffer[(DocTemplateImpl, ImplicitConversionImpl)]() - implicitlyConvertibleClassesCache += ((dtpl, conv)) - } - - def incomingImplicitlyConvertedClasses: List[(DocTemplateImpl, ImplicitConversionImpl)] = - if (implicitlyConvertibleClassesCache == null) - List() - else - implicitlyConvertibleClassesCache.toList - - // the implicit conversions are generated eagerly, but the members generated by implicit conversions are added - // lazily, on completeModel - val conversions: List[ImplicitConversionImpl] = - if (settings.docImplicits.value) makeImplicitConversions(sym, this) else Nil - - // members as given by the compiler - lazy val memberSyms = sym.info.members.filter(s => membersShouldDocument(s, this)).toList - - // the inherited templates (classes, traits or objects) - val memberSymsLazy = memberSyms.filter(t => templateShouldDocument(t, this) && !inOriginalOwner(t, this)) - // the direct members (methods, values, vars, types and directly contained templates) - val memberSymsEager = memberSyms.filter(!memberSymsLazy.contains(_)) - // the members generated by the symbols in memberSymsEager - val ownMembers = (memberSymsEager.flatMap(makeMember(_, None, this))) - - // all the members that are documentented PLUS the members inherited by implicit conversions - var members: List[MemberImpl] = ownMembers - - def templates = members collect { case c: TemplateEntity with MemberEntity => c } - def methods = members collect { case d: Def => d } - def values = members collect { case v: Val => v } - def abstractTypes = members collect { case t: AbstractType => t } - def aliasTypes = members collect { case t: AliasType => t } - - /** - * This is the final point in the core model creation: no DocTemplates are created after the model has finished, but - * inherited templates and implicit members are added to the members at this point. - */ - def completeModel(): Unit = { - // DFS completion - // since alias types and abstract types have no own members, there's no reason for them to call completeModel - if (!sym.isAliasType && !sym.isAbstractType) - for (member <- members) - member match { - case d: DocTemplateImpl => d.completeModel() - case _ => - } - - members :::= memberSymsLazy.map(modelCreation.createLazyTemplateMember(_, this)) - - // compute linearization to register subclasses - linearization - outgoingImplicitlyConvertedClasses - - // the members generated by the symbols in memberSymsEager PLUS the members from the usecases - val allMembers = ownMembers ::: ownMembers.flatMap(_.useCaseOf.map(_.asInstanceOf[MemberImpl])).distinct - implicitsShadowing = makeShadowingTable(allMembers, conversions, this) - // finally, add the members generated by implicit conversions - members :::= conversions.flatMap(_.memberImpls) - } - - var implicitsShadowing = Map[MemberEntity, ImplicitMemberShadowing]() - - lazy val outgoingImplicitlyConvertedClasses: List[(TemplateEntity, TypeEntity, ImplicitConversionImpl)] = - conversions flatMap (conv => - if (!implicitExcluded(conv.conversionQualifiedName)) - conv.targetTypeComponents map { - case (template, tpe) => - template match { - case d: DocTemplateImpl if (d != this) => d.registerImplicitlyConvertibleClass(this, conv) - case _ => // nothing - } - (template, tpe, conv) - } - else List() - ) - - override def isDocTemplate = true - private[this] lazy val companionSymbol = - if (sym.isAliasType || sym.isAbstractType) { - inTpl.sym.info.member(sym.name.toTermName) match { - case NoSymbol => NoSymbol - case s => - s.info match { - case ot: OverloadedType => - NoSymbol - case _ => - // that's to navigate from val Foo: FooExtractor to FooExtractor :) - s.info.resultType.typeSymbol - } - } - } - else - sym.companionSymbol - - def companion = - companionSymbol match { - case NoSymbol => None - case comSym if !isEmptyJavaObject(comSym) && (comSym.isClass || comSym.isModule) => - makeTemplate(comSym) match { - case d: DocTemplateImpl => Some(d) - case _ => None - } - case _ => None - } - - def constructors: List[MemberImpl with Constructor] = if (isClass) members collect { case d: Constructor => d } else Nil - def primaryConstructor: Option[MemberImpl with Constructor] = if (isClass) constructors find { _.isPrimary } else None - override def valueParams = - // we don't want params on a class (non case class) signature - if (isCaseClass) primaryConstructor match { - case Some(const) => const.sym.paramss map (_ map (makeValueParam(_, this))) - case None => List() - } - else List.empty - - // These are generated on-demand, make sure you don't call them more than once - def inheritanceDiagram = makeInheritanceDiagram(this) - def contentDiagram = makeContentDiagram(this) - - def groupSearch[T](extractor: Comment => Option[T]): Option[T] = { - val comments = comment +: linearizationTemplates.collect { case dtpl: DocTemplateImpl => dtpl.comment } - comments.flatten.map(extractor).flatten.headOption orElse { - Option(inTpl) flatMap (_.groupSearch(extractor)) - } - } - - def groupDescription(group: String): Option[Body] = groupSearch(_.groupDesc.get(group)) orElse { if (group == defaultGroup) defaultGroupDesc else None } - def groupPriority(group: String): Int = groupSearch(_.groupPrio.get(group)) getOrElse { if (group == defaultGroup) defaultGroupPriority else 0 } - def groupName(group: String): String = groupSearch(_.groupNames.get(group)) getOrElse { if (group == defaultGroup) defaultGroupName else group } - } - - abstract class PackageImpl(sym: Symbol, inTpl: PackageImpl) extends DocTemplateImpl(sym, inTpl) with Package { - override def inTemplate = inTpl - override def toRoot: List[PackageImpl] = this :: inTpl.toRoot - override lazy val (inSource, linearization) = { - val representive = sym.info.members.find { - s => s.isPackageObject - } getOrElse sym - (inSourceFromSymbol(representive), linearizationFromSymbol(representive)) - } - def packages = members collect { case p: PackageImpl if !(droppedPackages contains p) => p } - } - - abstract class RootPackageImpl(sym: Symbol) extends PackageImpl(sym, null) with RootPackageEntity - - abstract class NonTemplateMemberImpl(sym: Symbol, conversion: Option[ImplicitConversionImpl], - override val useCaseOf: Option[MemberEntity], inTpl: DocTemplateImpl) - extends MemberImpl(sym, inTpl) with NonTemplateMemberEntity { - override lazy val comment = { - val inRealTpl = - conversion.fold(Option(inTpl)) { conv => - /* Variable precendence order for implicitly added members: Take the variable defifinitions from ... - * 1. the target of the implicit conversion - * 2. the definition template (owner) - * 3. the current template - */ - findTemplateMaybe(conv.toType.typeSymbol) filterNot (_ == makeRootPackage) orElse ( - findTemplateMaybe(sym.owner) filterNot (_ == makeRootPackage) orElse Option(inTpl) - ) - } - inRealTpl flatMap (thisFactory.comment(sym, None, _)) - } - - override def inDefinitionTemplates = useCaseOf.fold(super.inDefinitionTemplates)(_.inDefinitionTemplates) - - override def qualifiedName = optimize(inTemplate.qualifiedName + "#" + name) - lazy val definitionName = { - val qualifiedName = conversion.fold(inDefinitionTemplates.head.qualifiedName)(_.conversionQualifiedName) - optimize(qualifiedName + "#" + name) - } - def isUseCase = useCaseOf.isDefined - override def byConversion: Option[ImplicitConversionImpl] = conversion - override def isImplicitlyInherited = { assert(modelFinished); conversion.isDefined } - override def isShadowedImplicit = isImplicitlyInherited && inTpl.implicitsShadowing.get(this).map(_.isShadowed).getOrElse(false) - override def isAmbiguousImplicit = isImplicitlyInherited && inTpl.implicitsShadowing.get(this).map(_.isAmbiguous).getOrElse(false) - override def isShadowedOrAmbiguousImplicit = isShadowedImplicit || isAmbiguousImplicit - } - - abstract class NonTemplateParamMemberImpl(sym: Symbol, conversion: Option[ImplicitConversionImpl], - useCaseOf: Option[MemberEntity], inTpl: DocTemplateImpl) - extends NonTemplateMemberImpl(sym, conversion, useCaseOf, inTpl) { - def valueParams = { - val info = conversion.fold(sym.info)(_.toType memberInfo sym) - info.paramss map { ps => (ps.zipWithIndex) map { case (p, i) => - if (p.nameString contains "$") makeValueParam(p, inTpl, optimize("arg" + i)) else makeValueParam(p, inTpl) - }} - } - } - - abstract class ParameterImpl(val sym: Symbol, val inTpl: TemplateImpl) extends ParameterEntity { - val name = optimize(sym.nameString) - } - - private trait AliasImpl { - def sym: Symbol - def inTpl: TemplateImpl - def alias = makeTypeInTemplateContext(sym.tpe.dealias, inTpl, sym) - } - - private trait TypeBoundsImpl { - def sym: Symbol - def inTpl: TemplateImpl - def lo = sym.info.bounds match { - case TypeBounds(lo, hi) if lo.typeSymbol != NothingClass => - Some(makeTypeInTemplateContext(appliedType(lo, sym.info.typeParams map {_.tpe}), inTpl, sym)) - case _ => None - } - def hi = sym.info.bounds match { - case TypeBounds(lo, hi) if hi.typeSymbol != AnyClass => - Some(makeTypeInTemplateContext(appliedType(hi, sym.info.typeParams map {_.tpe}), inTpl, sym)) - case _ => None - } - } - - trait HigherKindedImpl extends HigherKinded { - def sym: Symbol - def inTpl: TemplateImpl - def typeParams = - sym.typeParams map (makeTypeParam(_, inTpl)) - } - /* ============== MAKER METHODS ============== */ - - /** This method makes it easier to work with the different kinds of symbols created by scalac by stripping down the - * package object abstraction and placing members directly in the package. - * - * Here's the explanation of what we do. The code: - * - * package foo { - * object `package` { - * class Bar - * } - * } - * - * will yield this Symbol structure: - * +---------+ (2) - * | | - * +---------------+ +---------- v ------- | ---+ +--------+ (2) - * | package foo#1 <---(1)---- module class foo#2 | | | | - * +---------------+ | +------------------ | -+ | +------------------- v ---+ | - * | | package object foo#3 <-----(1)---- module class package#4 | | - * | +----------------------+ | | +---------------------+ | | - * +--------------------------+ | | class package$Bar#5 | | | - * | +----------------- | -+ | | - * +------------------- | ---+ | - * | | - * +--------+ - * (1) sourceModule - * (2) you get out of owners with .owner - * - * and normalizeTemplate(Bar.owner) will get us the package, instead of the module class of the package object. - */ - def normalizeTemplate(aSym: Symbol): Symbol = aSym match { - case null | rootMirror.EmptyPackage | NoSymbol => - normalizeTemplate(RootPackage) - case ObjectClass => - normalizeTemplate(AnyRefClass) - case _ if aSym.isPackageObject => - normalizeTemplate(aSym.owner) - case _ if aSym.isModuleClass => - normalizeTemplate(aSym.sourceModule) - case _ => - aSym - } - - /** - * These are all model construction methods. Please do not use them directly, they are calling each other recursively - * starting from makeModel. On the other hand, makeTemplate, makeAnnotation, makeMember, makeType should only be used - * after the model was created (modelFinished=true) otherwise assertions will start failing. - */ - object modelCreation { - - def createRootPackage: PackageImpl = docTemplatesCache.get(RootPackage) match { - case Some(root: PackageImpl) => root - case _ => modelCreation.createTemplate(RootPackage, null) match { - case Some(root: PackageImpl) => root - case _ => sys.error("Scaladoc: Unable to create root package!") - } - } - - /** - * Create a template, either a package, class, trait or object - */ - def createTemplate(aSym: Symbol, inTpl: DocTemplateImpl): Option[MemberImpl] = { - // don't call this after the model finished! - assert(!modelFinished, (aSym, inTpl)) - - def createRootPackageComment: Option[Comment] = - if(settings.docRootContent.isDefault) None - else { - import Streamable._ - Path(settings.docRootContent.value) match { - case f : File => { - val rootComment = closing(f.inputStream())(is => parse(slurp(is), "", NoPosition, Option(inTpl))) - Some(rootComment) - } - case _ => None - } - } - - def createDocTemplate(bSym: Symbol, inTpl: DocTemplateImpl): DocTemplateImpl = { - assert(!modelFinished, (bSym, inTpl)) // only created BEFORE the model is finished - if (bSym.isAliasType && bSym != AnyRefClass) - new DocTemplateImpl(bSym, inTpl) with AliasImpl with AliasType { override def isAliasType = true } - else if (bSym.isAbstractType) - new DocTemplateImpl(bSym, inTpl) with TypeBoundsImpl with AbstractType { override def isAbstractType = true } - else if (bSym.isModule) - new DocTemplateImpl(bSym, inTpl) with Object {} - else if (bSym.isTrait) - new DocTemplateImpl(bSym, inTpl) with Trait {} - else if (bSym.isClass || bSym == AnyRefClass) - new DocTemplateImpl(bSym, inTpl) with Class {} - else - sys.error("'" + bSym + "' isn't a class, trait or object thus cannot be built as a documentable template.") - } - - val bSym = normalizeTemplate(aSym) - if (docTemplatesCache isDefinedAt bSym) - return Some(docTemplatesCache(bSym)) - - /* Three cases of templates: - * (1) root package -- special cased for bootstrapping - * (2) package - * (3) class/object/trait - */ - if (bSym == RootPackage) // (1) - Some(new RootPackageImpl(bSym) { - override lazy val comment = createRootPackageComment - override val name = "root" - override def inTemplate = this - override def toRoot = this :: Nil - override def qualifiedName = "_root_" - override def isRootPackage = true - override lazy val memberSyms = - (bSym.info.members ++ EmptyPackage.info.members).toList filter { s => - s != EmptyPackage && s != RootPackage - } - }) - else if (bSym.isPackage) // (2) - if (settings.skipPackage(makeQualifiedName(bSym))) - None - else - inTpl match { - case inPkg: PackageImpl => - val pack = new PackageImpl(bSym, inPkg) {} - // Used to check package pruning works: - //println(pack.qualifiedName) - if (pack.templates.filter(_.isDocTemplate).isEmpty && pack.memberSymsLazy.isEmpty) { - droppedPackages += pack - None - } else - Some(pack) - case _ => - sys.error("'" + bSym + "' must be in a package") - } - else { - // no class inheritance at this point - assert(inOriginalOwner(bSym, inTpl), bSym + " in " + inTpl) - Some(createDocTemplate(bSym, inTpl)) - } - } - - /** - * After the model is completed, no more DocTemplateEntities are created. - * Therefore any symbol that still appears is: - * - MemberTemplateEntity (created here) - * - NoDocTemplateEntity (created in makeTemplate) - */ - def createLazyTemplateMember(aSym: Symbol, inTpl: DocTemplateImpl): MemberImpl = { - - // Code is duplicate because the anonymous classes are created statically - def createNoDocMemberTemplate(bSym: Symbol, inTpl: DocTemplateImpl): MemberTemplateImpl = { - assert(modelFinished) // only created AFTER the model is finished - if (bSym.isModule || (bSym.isAliasType && bSym.tpe.typeSymbol.isModule)) - new MemberTemplateImpl(bSym, inTpl) with Object {} - else if (bSym.isTrait || (bSym.isAliasType && bSym.tpe.typeSymbol.isTrait)) - new MemberTemplateImpl(bSym, inTpl) with Trait {} - else if (bSym.isClass || (bSym.isAliasType && bSym.tpe.typeSymbol.isClass)) - new MemberTemplateImpl(bSym, inTpl) with Class {} - else - sys.error("'" + bSym + "' isn't a class, trait or object thus cannot be built as a member template.") - } - - assert(modelFinished) - val bSym = normalizeTemplate(aSym) - - if (docTemplatesCache isDefinedAt bSym) - docTemplatesCache(bSym) - else - docTemplatesCache.get(bSym.owner) match { - case Some(inTpl) => - val mbrs = inTpl.members.collect({ case mbr: MemberImpl if mbr.sym == bSym => mbr }) - assert(mbrs.length == 1) - mbrs.head - case _ => - // move the class completely to the new location - createNoDocMemberTemplate(bSym, inTpl) - } - } - } - - def makeRootPackage: PackageImpl = docTemplatesCache(RootPackage).asInstanceOf[PackageImpl] - - // TODO: Should be able to override the type - def makeMember(aSym: Symbol, conversion: Option[ImplicitConversionImpl], inTpl: DocTemplateImpl): List[MemberImpl] = { - - def makeMember0(bSym: Symbol, useCaseOf: Option[MemberImpl]): Option[MemberImpl] = { - if (bSym.isGetter && bSym.isLazy) - Some(new NonTemplateMemberImpl(bSym, conversion, useCaseOf, inTpl) with Val { - override lazy val comment = // The analyser does not duplicate the lazy val's DocDef when it introduces its accessor. - thisFactory.comment(bSym.accessed, None, inTpl.asInstanceOf[DocTemplateImpl]) // This hack should be removed after analyser is fixed. - override def isLazyVal = true - }) - else if (bSym.isGetter && bSym.accessed.isMutable) - Some(new NonTemplateMemberImpl(bSym, conversion, useCaseOf, inTpl) with Val { - override def isVar = true - }) - else if (bSym.isMethod && !bSym.hasAccessorFlag && !bSym.isConstructor && !bSym.isModule) { - val cSym = { // This unsightly hack closes issue #4086. - if (bSym == definitions.Object_synchronized) { - val cSymInfo = (bSym.info: @unchecked) match { - case PolyType(ts, MethodType(List(bp), mt)) => - val cp = bp.cloneSymbol.setPos(bp.pos).setInfo(definitions.byNameType(bp.info)) - PolyType(ts, MethodType(List(cp), mt)) - } - bSym.cloneSymbol.setPos(bSym.pos).setInfo(cSymInfo) - } - else bSym - } - Some(new NonTemplateParamMemberImpl(cSym, conversion, useCaseOf, inTpl) with HigherKindedImpl with Def { - override def isDef = true - }) - } - else if (bSym.isConstructor) - if (conversion.isDefined) - None // don't list constructors inherted by implicit conversion - else - Some(new NonTemplateParamMemberImpl(bSym, conversion, useCaseOf, inTpl) with Constructor { - override def isConstructor = true - def isPrimary = sym.isPrimaryConstructor - }) - else if (bSym.isGetter) // Scala field accessor or Java field - Some(new NonTemplateMemberImpl(bSym, conversion, useCaseOf, inTpl) with Val { - override def isVal = true - }) - else if (bSym.isAbstractType && !typeShouldDocument(bSym, inTpl)) - Some(new MemberTemplateImpl(bSym, inTpl) with TypeBoundsImpl with AbstractType { - override def isAbstractType = true - }) - else if (bSym.isAliasType && !typeShouldDocument(bSym, inTpl)) - Some(new MemberTemplateImpl(bSym, inTpl) with AliasImpl with AliasType { - override def isAliasType = true - }) - else if (!modelFinished && (bSym.isPackage || templateShouldDocument(bSym, inTpl))) - modelCreation.createTemplate(bSym, inTpl) - else - None - } - - if (!localShouldDocument(aSym) || aSym.isModuleClass || aSym.isPackageObject || aSym.isMixinConstructor) - Nil - else { - val allSyms = useCases(aSym, inTpl.sym) map { case (bSym, bComment, bPos) => - docComments.put(bSym, DocComment(bComment, bPos)) // put the comment in the list, don't parse it yet, closes SI-4898 - bSym - } - - val member = makeMember0(aSym, None) - if (allSyms.isEmpty) - member.toList - else - // Use cases replace the original definitions - SI-5054 - allSyms flatMap { makeMember0(_, member) } - } - } - - def findMember(aSym: Symbol, inTpl: DocTemplateImpl): Option[MemberImpl] = { - normalizeTemplate(aSym.owner) - inTpl.members.find(_.sym == aSym) - } - - def findTemplateMaybe(aSym: Symbol): Option[DocTemplateImpl] = { - assert(modelFinished) - docTemplatesCache.get(normalizeTemplate(aSym)).filterNot(packageDropped(_)) - } - - def makeTemplate(aSym: Symbol): TemplateImpl = makeTemplate(aSym, None) - - def makeTemplate(aSym: Symbol, inTpl: Option[TemplateImpl]): TemplateImpl = { - assert(modelFinished) - - def makeNoDocTemplate(aSym: Symbol, inTpl: TemplateImpl): NoDocTemplateImpl = - noDocTemplatesCache getOrElse (aSym, new NoDocTemplateImpl(aSym, inTpl)) - - findTemplateMaybe(aSym) getOrElse { - val bSym = normalizeTemplate(aSym) - makeNoDocTemplate(bSym, inTpl getOrElse makeTemplate(bSym.owner)) - } - } - - /** */ - def makeAnnotation(annot: AnnotationInfo): scala.tools.nsc.doc.model.Annotation = { - val aSym = annot.symbol - new EntityImpl(aSym, makeTemplate(aSym.owner)) with scala.tools.nsc.doc.model.Annotation { - lazy val annotationClass = - makeTemplate(annot.symbol) - val arguments = { // lazy - def annotArgs = annot.args match { - case Nil => annot.assocs collect { case (_, LiteralAnnotArg(const)) => Literal(const) } - case xs => xs - } - def noParams = annotArgs map (_ => None) - - val params: List[Option[ValueParam]] = annotationClass match { - case aClass: DocTemplateEntity with Class => - (aClass.primaryConstructor map { _.valueParams.head }) match { - case Some(vps) => vps map { Some(_) } - case _ => noParams - } - case _ => noParams - } - assert(params.length == annotArgs.length, (params, annotArgs)) - - params zip annotArgs flatMap { case (param, arg) => - makeTree(arg) map { tree => - new ValueArgument { - def parameter = param - def value = tree - } - } - } - } - } - } - - /** */ - def makeTypeParam(aSym: Symbol, inTpl: TemplateImpl): TypeParam = - new ParameterImpl(aSym, inTpl) with TypeBoundsImpl with HigherKindedImpl with TypeParam { - def variance: String = { - if (sym hasFlag Flags.COVARIANT) "+" - else if (sym hasFlag Flags.CONTRAVARIANT) "-" - else "" - } - } - - /** */ - def makeValueParam(aSym: Symbol, inTpl: DocTemplateImpl): ValueParam = { - makeValueParam(aSym, inTpl, aSym.nameString) - } - - - /** */ - def makeValueParam(aSym: Symbol, inTpl: DocTemplateImpl, newName: String): ValueParam = - new ParameterImpl(aSym, inTpl) with ValueParam { - override val name = newName - def defaultValue = - if (aSym.hasDefault) { - // units.filter should return only one element - (currentRun.units filter (_.source.file == aSym.sourceFile)).toList match { - case List(unit) => - // SI-4922 `sym == aSym` is insufficent if `aSym` is a clone of symbol - // of the parameter in the tree, as can happen with type parametric methods. - def isCorrespondingParam(sym: Symbol) = ( - sym != null && - sym != NoSymbol && - sym.owner == aSym.owner && - sym.name == aSym.name && - sym.isParamWithDefault - ) - (unit.body find (t => isCorrespondingParam(t.symbol))) match { - case Some(ValDef(_,_,_,rhs)) => makeTree(rhs) - case _ => None - } - case _ => None - } - } - else None - def resultType = - makeTypeInTemplateContext(aSym.tpe, inTpl, aSym) - def isImplicit = aSym.isImplicit - } - - /** */ - def makeTypeInTemplateContext(aType: Type, inTpl: TemplateImpl, dclSym: Symbol): TypeEntity = { - def ownerTpl(sym: Symbol): Symbol = - if (sym.isClass || sym.isModule || sym == NoSymbol) sym else ownerTpl(sym.owner) - val tpe = - if (thisFactory.settings.useStupidTypes.value) aType else { - def ownerTpl(sym: Symbol): Symbol = - if (sym.isClass || sym.isModule || sym == NoSymbol) sym else ownerTpl(sym.owner) - val fixedSym = if (inTpl.sym.isModule) inTpl.sym.moduleClass else inTpl.sym - aType.asSeenFrom(fixedSym.thisType, ownerTpl(dclSym)) - } - makeType(tpe, inTpl) - } - - /** Get the types of the parents of the current class, ignoring the refinements */ - def makeParentTypes(aType: Type, tpl: Option[MemberTemplateImpl], inTpl: TemplateImpl): List[(TemplateEntity, TypeEntity)] = aType match { - case RefinedType(parents, defs) => - val ignoreParents = Set[Symbol](AnyClass, AnyRefClass, ObjectClass) - val filtParents = - // we don't want to expose too many links to AnyRef, that will just be redundant information - tpl match { - case Some(tpl) if (!tpl.sym.isModule && parents.length < 2) || (tpl.sym == AnyValClass) || (tpl.sym == AnyRefClass) || (tpl.sym == AnyClass) => parents - case _ => parents.filterNot((p: Type) => ignoreParents(p.typeSymbol)) - } - - /** Returns: - * - a DocTemplate if the type's symbol is documented - * - a NoDocTemplateMember if the type's symbol is not documented in its parent but in another template - * - a NoDocTemplate if the type's symbol is not documented at all */ - def makeTemplateOrMemberTemplate(parent: Type): TemplateImpl = { - def noDocTemplate = makeTemplate(parent.typeSymbol) - findTemplateMaybe(parent.typeSymbol) match { - case Some(tpl) => tpl - case None => parent match { - case TypeRef(pre, sym, args) => - findTemplateMaybe(pre.typeSymbol) match { - case Some(tpl) => findMember(parent.typeSymbol, tpl).collect({case t: TemplateImpl => t}).getOrElse(noDocTemplate) - case None => noDocTemplate - } - case _ => noDocTemplate - } - } - } - - filtParents.map(parent => { - val templateEntity = makeTemplateOrMemberTemplate(parent) - val typeEntity = makeType(parent, inTpl) - (templateEntity, typeEntity) - }) - case _ => - List((makeTemplate(aType.typeSymbol), makeType(aType, inTpl))) - } - - def makeQualifiedName(sym: Symbol, relativeTo: Option[Symbol] = None): String = { - val stop = relativeTo map (_.ownerChain.toSet) getOrElse Set[Symbol]() - var sym1 = sym - val path = new StringBuilder() - // var path = List[Symbol]() - - while ((sym1 != NoSymbol) && (path.isEmpty || !stop(sym1))) { - val sym1Norm = normalizeTemplate(sym1) - if (!sym1.sourceModule.isPackageObject && sym1Norm != RootPackage) { - if (path.length != 0) - path.insert(0, ".") - path.insert(0, sym1Norm.nameString) - // path::= sym1Norm - } - sym1 = sym1.owner - } - - optimize(path.toString) - //path.mkString(".") - } - - def inOriginalOwner(aSym: Symbol, inTpl: TemplateImpl): Boolean = - normalizeTemplate(aSym.owner) == normalizeTemplate(inTpl.sym) - - def templateShouldDocument(aSym: Symbol, inTpl: DocTemplateImpl): Boolean = - (aSym.isTrait || aSym.isClass || aSym.isModule || typeShouldDocument(aSym, inTpl)) && - localShouldDocument(aSym) && - !isEmptyJavaObject(aSym) && - // either it's inside the original owner or we can document it later: - (!inOriginalOwner(aSym, inTpl) || (aSym.isPackageClass || (aSym.sourceFile != null))) - - def membersShouldDocument(sym: Symbol, inTpl: TemplateImpl) = { - // pruning modules that shouldn't be documented - // Why Symbol.isInitialized? Well, because we need to avoid exploring all the space available to scaladoc - // from the classpath -- scaladoc is a hog, it will explore everything starting from the root package unless we - // somehow prune the tree. And isInitialized is a good heuristic for prunning -- if the package was not explored - // during typer and refchecks, it's not necessary for the current application and there's no need to explore it. - (!sym.isModule || sym.moduleClass.isInitialized) && - // documenting only public and protected members - localShouldDocument(sym) && - // Only this class's constructors are part of its members, inherited constructors are not. - (!sym.isConstructor || sym.owner == inTpl.sym) && - // If the @bridge annotation overrides a normal member, show it - !isPureBridge(sym) - } - - def isEmptyJavaObject(aSym: Symbol): Boolean = - aSym.isModule && aSym.isJavaDefined && - aSym.info.members.exists(s => localShouldDocument(s) && (!s.isConstructor || s.owner == aSym)) - - def localShouldDocument(aSym: Symbol): Boolean = - !aSym.isPrivate && (aSym.isProtected || aSym.privateWithin == NoSymbol) && !aSym.isSynthetic - - /** Filter '@bridge' methods only if *they don't override non-bridge methods*. See SI-5373 for details */ - def isPureBridge(sym: Symbol) = sym.isBridge && sym.allOverriddenSymbols.forall(_.isBridge) - - // the classes that are excluded from the index should also be excluded from the diagrams - def classExcluded(clazz: TemplateEntity): Boolean = settings.hardcoded.isExcluded(clazz.qualifiedName) - - // the implicit conversions that are excluded from the pages should not appear in the diagram - def implicitExcluded(convertorMethod: String): Boolean = settings.hiddenImplicits(convertorMethod) - - // whether or not to create a page for an {abstract,alias} type - def typeShouldDocument(bSym: Symbol, inTpl: DocTemplateImpl) = - (settings.docExpandAllTypes.value && (bSym.sourceFile != null)) || - (bSym.isAliasType || bSym.isAbstractType) && - { val rawComment = global.expandedDocComment(bSym, inTpl.sym) - rawComment.contains("@template") || rawComment.contains("@documentable") } -} - diff --git a/src/compiler/scala/tools/nsc/doc/model/ModelFactoryImplicitSupport.scala b/src/compiler/scala/tools/nsc/doc/model/ModelFactoryImplicitSupport.scala deleted file mode 100644 index 868c2fc3a4..0000000000 --- a/src/compiler/scala/tools/nsc/doc/model/ModelFactoryImplicitSupport.scala +++ /dev/null @@ -1,579 +0,0 @@ -/* NSC -- new Scala compiler -- Copyright 2007-2013 LAMP/EPFL - * - * This trait finds implicit conversions for a class in the default scope and creates scaladoc entries for each of them. - * - * @author Vlad Ureche - * @author Adriaan Moors - */ - -package scala.tools.nsc -package doc -package model - -import scala.collection._ -import symtab.Flags - -/** - * This trait finds implicit conversions for a class in the default scope and creates scaladoc entries for each of them. - * - * Let's take this as an example: - * {{{ - * object Test { - * class A - * - * class B { - * def foo = 1 - * } - * - * class C extends B { - * def bar = 2 - * class implicit - * } - * - * D def conv(a: A) = new C - * } - * }}} - * - * Overview: - * - scaladoc-ing the above classes, `A` will get two more methods: foo and bar, over its default methods - * - the nested classes (specifically `D` above), abstract types, type aliases and constructor members are not added to - * `A` (see makeMember0 in ModelFactory, last 3 cases) - * - the members added by implicit conversion are always listed under the implicit conversion, not under the class they - * actually come from (`foo` will be listed as coming from the implicit conversion to `C` instead of `B`) - see - * `definitionName` in MemberImpl - * - * Internals: - * TODO: Give an overview here - */ -trait ModelFactoryImplicitSupport { - thisFactory: ModelFactory with ModelFactoryTypeSupport with CommentFactory with TreeFactory => - - import global._ - import global.analyzer._ - import global.definitions._ - import settings.hardcoded - - // debugging: - val DEBUG: Boolean = settings.docImplicitsDebug.value - val ERROR: Boolean = true // currently we show all errors - @inline final def debug(msg: => String) = if (DEBUG) settings.printMsg(msg) - @inline final def error(msg: => String) = if (ERROR) settings.printMsg(msg) - - /** This is a flag that indicates whether to eliminate implicits that cannot be satisfied within the current scope. - * For example, if an implicit conversion requires that there is a Numeric[T] in scope: - * {{{ - * class A[T] - * class B extends A[Int] - * class C extends A[String] - * implicit def enrichA[T: Numeric](a: A[T]): D - * }}} - * For B, no constraints are generated as Numeric[Int] is already in the default scope. On the other hand, for the - * conversion from C to D, depending on -implicits-show-all, the conversion can: - * - not be generated at all, since there's no Numeric[String] in scope (if ran without -implicits-show-all) - * - generated with a *weird* constraint, Numeric[String] as the user might add it by hand (if flag is enabled) - */ - class ImplicitNotFound(tpe: Type) extends Exception("No implicit of type " + tpe + " found in scope.") - - /* ============== MAKER METHODS ============== */ - - /** - * Make the implicit conversion objects - * - * A word about the scope of the implicit conversions: currently we look at a very basic context composed of the - * default Scala imports (Predef._ for example) and the companion object of the current class, if one exists. In the - * future we might want to extend this to more complex scopes. - */ - def makeImplicitConversions(sym: Symbol, inTpl: DocTemplateImpl): List[ImplicitConversionImpl] = - // Nothing and Null are somewhat special -- they can be transformed by any implicit conversion available in scope. - // But we don't want that, so we'll simply refuse to find implicit conversions on for Nothing and Null - if (!(sym.isClass || sym.isTrait || sym == AnyRefClass) || sym == NothingClass || sym == NullClass) Nil - else { - val context: global.analyzer.Context = global.analyzer.rootContext(NoCompilationUnit) - - val results = global.analyzer.allViewsFrom(sym.tpe_*, context, sym.typeParams) - var conversions = results.flatMap(result => makeImplicitConversion(sym, result._1, result._2, context, inTpl)) - // also keep empty conversions, so they appear in diagrams - // conversions = conversions.filter(!_.members.isEmpty) - - // Filter out specialized conversions from array - if (sym == ArrayClass) - conversions = conversions.filterNot((conv: ImplicitConversionImpl) => - hardcoded.arraySkipConversions.contains(conv.conversionQualifiedName)) - - // Filter out non-sensical conversions from value types - if (isPrimitiveValueType(sym.tpe_*)) - conversions = conversions.filter((ic: ImplicitConversionImpl) => - hardcoded.valueClassFilter(sym.nameString, ic.conversionQualifiedName)) - - // Put the visible conversions in front - val (ownConversions, commonConversions) = - conversions.partition(!_.isHiddenConversion) - - ownConversions ::: commonConversions - } - - /** makeImplicitConversion performs the heavier lifting to get the implicit listing: - * - for each possible conversion function (also called view) - * * figures out the final result of the view (to what is our class transformed?) - * * figures out the necessary constraints on the type parameters (such as T <: Int) and the context (such as Numeric[T]) - * * lists all inherited members - * - * What? in details: - * - say we start from a class A[T1, T2, T3, T4] - * - we have an implicit function (view) in scope: - * def enrichA[T3 <: Long, T4](a: A[Int, Foo[Bar[X]], T3, T4])(implicit ev1: TypeTag[T4], ev2: Numeric[T4]): EnrichedA - * - A is converted to EnrichedA ONLY if a couple of constraints are satisfied: - * * T1 must be equal to Int - * * T2 must be equal to Foo[Bar[X]] - * * T3 must be upper bounded by Long - * * there must be evidence of Numeric[T4] and a TypeTag[T4] within scope - * - the final type is EnrichedA and A therefore inherits a couple of members from enrichA - * - * How? - * some notes: - * - Scala's type inference will want to solve all type parameters down to actual types, but we only want constraints - * to maintain generality - * - therefore, allViewsFrom wraps type parameters into "untouchable" type variables that only gather constraints, - * but are never solved down to a type - * - these must be reverted back to the type parameters and the constraints must be extracted and simplified (this is - * done by the uniteConstraints and boundedTParamsConstraints. Be sure to check them out - * - we also need to transform implicit parameters in the view's signature into constraints, such that Numeric[T4] - * appears as a constraint - */ - def makeImplicitConversion(sym: Symbol, result: SearchResult, constrs: List[TypeConstraint], context: Context, inTpl: DocTemplateImpl): List[ImplicitConversionImpl] = - if (result.tree == EmptyTree) Nil - else { - // `result` will contain the type of the view (= implicit conversion method) - // the search introduces untouchable type variables, but we want to get back to type parameters - val viewFullType = result.tree.tpe - // set the previously implicit parameters to being explicit - - val (viewSimplifiedType, viewImplicitTypes) = removeImplicitParameters(viewFullType) - - // TODO: Isolate this corner case :) - Predef.<%< and put it in the testsuite - if (viewSimplifiedType.params.length != 1) { - // This is known to be caused by the `<%<` object in Predef: - // {{{ - // sealed abstract class <%<[-From, +To] extends (From => To) with Serializable - // object <%< { - // implicit def conformsOrViewsAs[A <% B, B]: A <%< B = new (A <%< B) {def apply(x: A) = x} - // } - // }}} - // so we just won't generate an implicit conversion for implicit methods that only take implicit parameters - return Nil - } - - // type the view application so we get the exact type of the result (not the formal type) - val viewTree = result.tree.setType(viewSimplifiedType) - val appliedTree = new ApplyImplicitView(viewTree, List(Ident("") setType viewTree.tpe.paramTypes.head)) - val appliedTreeTyped: Tree = { - val newContext = context.makeImplicit(context.ambiguousErrors) - newContext.macrosEnabled = false - val newTyper = global.analyzer.newTyper(newContext) - newTyper.silent(_.typed(appliedTree, EXPRmode, WildcardType), reportAmbiguousErrors = false) match { - - case global.analyzer.SilentResultValue(t: Tree) => t - case global.analyzer.SilentTypeError(err) => - global.reporter.warning(sym.pos, err.toString) - return Nil - } - } - - // now we have the final type: - val toType = wildcardToNothing(typeVarToOriginOrWildcard(appliedTreeTyped.tpe.finalResultType)) - - try { - // Transform bound constraints into scaladoc constraints - val implParamConstraints = makeImplicitConstraints(viewImplicitTypes, sym, context, inTpl) - val boundsConstraints = makeBoundedConstraints(sym.typeParams, constrs, inTpl) - // TODO: no substitution constraints appear in the library and compiler scaladoc. Maybe they can be removed? - val substConstraints = makeSubstitutionConstraints(result.subst, inTpl) - val constraints = implParamConstraints ::: boundsConstraints ::: substConstraints - - List(new ImplicitConversionImpl(sym, result.tree.symbol, toType, constraints, inTpl)) - } catch { - case i: ImplicitNotFound => - //println(" Eliminating: " + toType) - Nil - } - } - - def makeImplicitConstraints(types: List[Type], sym: Symbol, context: Context, inTpl: DocTemplateImpl): List[Constraint] = - types.flatMap((tpe:Type) => { - // TODO: Before creating constraints, map typeVarToOriginOrWildcard on the implicitTypes - val implType = typeVarToOriginOrWildcard(tpe) - val qualifiedName = makeQualifiedName(implType.typeSymbol) - - var available: Option[Boolean] = None - - // see: https://groups.google.com/forum/?hl=en&fromgroups#!topic/scala-internals/gm_fr0RKzC4 - // - // println(implType + " => " + implType.isTrivial) - // var tpes: List[Type] = List(implType) - // while (!tpes.isEmpty) { - // val tpe = tpes.head - // tpes = tpes.tail - // tpe match { - // case TypeRef(pre, sym, args) => - // tpes = pre :: args ::: tpes - // println(tpe + " => " + tpe.isTrivial) - // case _ => - // println(tpe + " (of type" + tpe.getClass + ") => " + tpe.isTrivial) - // } - // } - // println("\n") - - // look for type variables in the type. If there are none, we can decide if the implicit is there or not - if (implType.isTrivial) { - try { - context.flushBuffer() /* any errors here should not prevent future findings */ - // TODO: Not sure this is the right thing to do -- seems similar to what scalac should be doing - val context2 = context.make(context.unit, context.tree, sym.owner, context.scope, context.imports) - val search = inferImplicit(EmptyTree, tpe, false, false, context2, false) - context.flushBuffer() /* any errors here should not prevent future findings */ - - available = Some(search.tree != EmptyTree) - } catch { - case _: TypeError => - } - } - - available match { - case Some(true) => - Nil - case Some(false) if (!settings.docImplicitsShowAll.value) => - // if -implicits-show-all is not set, we get rid of impossible conversions (such as Numeric[String]) - throw new ImplicitNotFound(implType) - case _ => - val typeParamNames = sym.typeParams.map(_.name) - - // TODO: This is maybe the worst hack I ever did - it's as dirty as hell, but it seems to work, so until I - // learn more about symbols, it'll have to do. - implType match { - case TypeRef(pre, sym, List(TypeRef(NoPrefix, targ, Nil))) if (typeParamNames contains targ.name) => - hardcoded.knownTypeClasses.get(qualifiedName) match { - case Some(explanation) => - List(new KnownTypeClassConstraint { - val typeParamName = targ.nameString - lazy val typeExplanation = explanation - lazy val typeClassEntity = makeTemplate(sym) - lazy val implicitType: TypeEntity = makeType(implType, inTpl) - }) - case None => - List(new TypeClassConstraint { - val typeParamName = targ.nameString - lazy val typeClassEntity = makeTemplate(sym) - lazy val implicitType: TypeEntity = makeType(implType, inTpl) - }) - } - case _ => - List(new ImplicitInScopeConstraint{ - lazy val implicitType: TypeEntity = makeType(implType, inTpl) - }) - } - } - }) - - def makeSubstitutionConstraints(subst: TreeTypeSubstituter, inTpl: DocTemplateImpl): List[Constraint] = - (subst.from zip subst.to) map { - case (from, to) => - new EqualTypeParamConstraint { - error("Scaladoc implicits: Unexpected type substitution constraint from: " + from + " to: " + to) - val typeParamName = from.toString - val rhs = makeType(to, inTpl) - } - } - - def makeBoundedConstraints(tparams: List[Symbol], constrs: List[TypeConstraint], inTpl: DocTemplateImpl): List[Constraint] = - (tparams zip constrs) flatMap { - case (tparam, constr) => { - uniteConstraints(constr) match { - case (loBounds, upBounds) => (loBounds filter (_ != NothingClass.tpe), upBounds filter (_ != AnyClass.tpe)) match { - case (Nil, Nil) => - Nil - case (List(lo), List(up)) if (lo == up) => - List(new EqualTypeParamConstraint { - val typeParamName = tparam.nameString - lazy val rhs = makeType(lo, inTpl) - }) - case (List(lo), List(up)) => - List(new BoundedTypeParamConstraint { - val typeParamName = tparam.nameString - lazy val lowerBound = makeType(lo, inTpl) - lazy val upperBound = makeType(up, inTpl) - }) - case (List(lo), Nil) => - List(new LowerBoundedTypeParamConstraint { - val typeParamName = tparam.nameString - lazy val lowerBound = makeType(lo, inTpl) - }) - case (Nil, List(up)) => - List(new UpperBoundedTypeParamConstraint { - val typeParamName = tparam.nameString - lazy val upperBound = makeType(up, inTpl) - }) - case other => - // this is likely an error on the lub/glb side - error("Scaladoc implicits: Error computing lub/glb for: " + (tparam, constr) + ":\n" + other) - Nil - } - } - } - } - - /* ============== IMPLEMENTATION PROVIDING ENTITY TYPES ============== */ - - class ImplicitConversionImpl( - val sym: Symbol, - val convSym: Symbol, - val toType: Type, - val constrs: List[Constraint], - inTpl: DocTemplateImpl) - extends ImplicitConversion { - - def source: DocTemplateEntity = inTpl - - def targetType: TypeEntity = makeType(toType, inTpl) - - def convertorOwner: TemplateEntity = - if (convSym != NoSymbol) - makeTemplate(convSym.owner) - else { - error("Scaladoc implicits: " + toString + " = NoSymbol!") - makeRootPackage - } - - def targetTypeComponents: List[(TemplateEntity, TypeEntity)] = makeParentTypes(toType, None, inTpl) - - def convertorMethod: Either[MemberEntity, String] = { - var convertor: MemberEntity = null - - convertorOwner match { - case doc: DocTemplateImpl => - val convertors = members.collect { case m: MemberImpl if m.sym == convSym => m } - if (convertors.length == 1) - convertor = convertors.head - case _ => - } - if (convertor ne null) - Left(convertor) - else - Right(convSym.nameString) - } - - def conversionShortName = convSym.nameString - - def conversionQualifiedName = makeQualifiedName(convSym) - - lazy val constraints: List[Constraint] = constrs - - lazy val memberImpls: List[MemberImpl] = { - // Obtain the members inherited by the implicit conversion - val memberSyms = toType.members.filter(implicitShouldDocument(_)).toList - - // Debugging part :) - debug(sym.nameString + "\n" + "=" * sym.nameString.length()) - debug(" * conversion " + convSym + " from " + sym.tpe + " to " + toType) - - debug(" -> full type: " + toType) - if (constraints.length != 0) { - debug(" -> constraints: ") - constraints foreach { constr => debug(" - " + constr) } - } - debug(" -> members:") - memberSyms foreach (sym => debug(" - "+ sym.decodedName +" : " + sym.info)) - debug("") - - memberSyms.flatMap({ aSym => - // we can't just pick up nodes from the original template, although that would be very convenient: - // they need the byConversion field to be attached to themselves and the types to be transformed by - // asSeenFrom - - // at the same time, the member itself is in the inTpl, not in the new template -- but should pick up - // variables from the old template. Ugly huh? We'll always create the member inTpl, but it will change - // the template when expanding variables in the comment :) - makeMember(aSym, Some(this), inTpl) - }) - } - - lazy val members: List[MemberEntity] = memberImpls - - def isHiddenConversion = settings.hiddenImplicits(conversionQualifiedName) - - override def toString = "Implcit conversion from " + sym.tpe + " to " + toType + " done by " + convSym - } - - /* ========================= HELPER METHODS ========================== */ - /** - * Computes the shadowing table for all the members in the implicit conversions - * @param members All template's members, including usecases and full signature members - * @param convs All the conversions the template takes part in - * @param inTpl the usual :) - */ - def makeShadowingTable(members: List[MemberImpl], - convs: List[ImplicitConversionImpl], - inTpl: DocTemplateImpl): Map[MemberEntity, ImplicitMemberShadowing] = { - assert(modelFinished) - - val shadowingTable = mutable.Map[MemberEntity, ImplicitMemberShadowing]() - val membersByName: Map[Name, List[MemberImpl]] = members.groupBy(_.sym.name) - val convsByMember = (Map.empty[MemberImpl, ImplicitConversionImpl] /: convs) { - case (map, conv) => map ++ conv.memberImpls.map (_ -> conv) - } - - for (conv <- convs) { - val otherConvMembers: Map[Name, List[MemberImpl]] = convs filterNot (_ == conv) flatMap (_.memberImpls) groupBy (_.sym.name) - - for (member <- conv.memberImpls) { - val sym1 = member.sym - val tpe1 = conv.toType.memberInfo(sym1) - - // check if it's shadowed by a member in the original class. - val shadowed = membersByName.get(sym1.name).toList.flatten filter { other => - !settings.docImplicitsSoundShadowing.value || !isDistinguishableFrom(tpe1, inTpl.sym.info.memberInfo(other.sym)) - } - - // check if it's shadowed by another conversion. - val ambiguous = otherConvMembers.get(sym1.name).toList.flatten filter { other => - val tpe2 = convsByMember(other).toType.memberInfo(other.sym) - !isDistinguishableFrom(tpe1, tpe2) || !isDistinguishableFrom(tpe2, tpe1) - } - - // we finally have the shadowing info - if (!shadowed.isEmpty || !ambiguous.isEmpty) { - val shadowing = new ImplicitMemberShadowing { - def shadowingMembers: List[MemberEntity] = shadowed - def ambiguatingMembers: List[MemberEntity] = ambiguous - } - - shadowingTable += (member -> shadowing) - } - } - } - - shadowingTable.toMap - } - - - /** - * uniteConstraints takes a TypeConstraint instance and simplifies the constraints inside - * - * Normally TypeConstraint contains multiple lower and upper bounds, and we want to reduce this to a lower and an - * upper bound. Here are a couple of catches we need to be aware of: - * - before finding a view (implicit method in scope that maps class A[T1,T2,.. Tn] to something else) the type - * parameters are transformed into "untouchable" type variables so that type inference does not attempt to - * fully solve them down to a type but rather constrains them on both sides just enough for the view to be - * applicable -- now, we want to transform those type variables back to the original type parameters - * - some of the bounds fail type inference and therefore refer to Nothing => when performing unification (lub, glb) - * they start looking ugly => we (unsoundly) transform Nothing to WildcardType so we fool the unification algorithms - * into thinking there's nothing there - * - we don't want the wildcard types surviving the unification so we replace them back to Nothings - */ - def uniteConstraints(constr: TypeConstraint): (List[Type], List[Type]) = - try { - (List(wildcardToNothing(lub(constr.loBounds map typeVarToOriginOrWildcard))), - List(wildcardToNothing(glb(constr.hiBounds map typeVarToOriginOrWildcard)))) - } catch { - // does this actually ever happen? (probably when type vars occur in the bounds) - case x: Throwable => (constr.loBounds.distinct, constr.hiBounds.distinct) - } - - /** - * Make implicits explicit - Not used curently - */ - // object implicitToExplicit extends TypeMap { - // def apply(tp: Type): Type = mapOver(tp) match { - // case MethodType(params, resultType) => - // MethodType(params.map(param => if (param.isImplicit) param.cloneSymbol.resetFlag(Flags.IMPLICIT) else param), resultType) - // case other => - // other - // } - // } - - /** - * removeImplicitParameters transforms implicit parameters from the view result type into constraints and - * returns the simplified type of the view - * - * for the example view: - * implicit def enrichMyClass[T](a: MyClass[T])(implicit ev: Numeric[T]): EnrichedMyClass[T] - * the implicit view result type is: - * (a: MyClass[T])(implicit ev: Numeric[T]): EnrichedMyClass[T] - * and the simplified type will be: - * MyClass[T] => EnrichedMyClass[T] - */ - def removeImplicitParameters(viewType: Type): (Type, List[Type]) = { - - val params = viewType.paramss.flatten - val (normalParams, implParams) = params.partition(!_.isImplicit) - val simplifiedType = MethodType(normalParams, viewType.finalResultType) - val implicitTypes = implParams.map(_.tpe) - - (simplifiedType, implicitTypes) - } - - /** - * typeVarsToOriginOrWildcard transforms the "untouchable" type variables into either their origins (the original - * type parameters) or into wildcard types if nothing matches - */ - object typeVarToOriginOrWildcard extends TypeMap { - def apply(tp: Type): Type = mapOver(tp) match { - case tv: TypeVar => - if (tv.constr.inst.typeSymbol == NothingClass) - WildcardType - else - tv.origin //appliedType(tv.origin.typeConstructor, tv.typeArgs map this) - case other => - if (other.typeSymbol == NothingClass) - WildcardType - else - other - } - } - - /** - * wildcardToNothing transforms wildcard types back to Nothing - */ - object wildcardToNothing extends TypeMap { - def apply(tp: Type): Type = mapOver(tp) match { - case WildcardType => - NothingClass.tpe - case other => - other - } - } - - /** implicitShouldDocument decides whether a member inherited by implicit conversion should be documented */ - def implicitShouldDocument(aSym: Symbol): Boolean = { - // We shouldn't document: - // - constructors - // - common methods (in Any, AnyRef, Object) as they are automatically removed - // - private and protected members (not accessible following an implicit conversion) - // - members starting with _ (usually reserved for internal stuff) - localShouldDocument(aSym) && (!aSym.isConstructor) && (aSym.owner != AnyValClass) && - (aSym.owner != AnyClass) && (aSym.owner != ObjectClass) && - (!aSym.isProtected) && (!aSym.isPrivate) && (!aSym.name.startsWith("_")) && - (aSym.isMethod || aSym.isGetter || aSym.isSetter) && - (aSym.nameString != "getClass") - } - - /* To put it very bluntly: checks if you can call implicitly added method with t1 when t2 is already there in the - * class. We suppose the name of the two members coincides - * - * The trick here is that the resultType does not matter - the condition for removal it that paramss have the same - * structure (A => B => C may not override (A, B) => C) and that all the types involved are - * of the implcit conversion's member are subtypes of the parent members' parameters */ - def isDistinguishableFrom(t1: Type, t2: Type): Boolean = { - // Vlad: I tried using matches but it's not exactly what we need: - // (p: AnyRef)AnyRef matches ((t: String)AnyRef returns false -- but we want that to be true - // !(t1 matches t2) - if (t1.paramss.map(_.length) == t2.paramss.map(_.length)) { - for ((t1p, t2p) <- t1.paramss.flatten zip t2.paramss.flatten) - if (!isSubType(t1 memberInfo t1p, t2 memberInfo t2p)) - return true // if on the corresponding parameter you give a type that is in t1 but not in t2 - // def foo(a: Either[Int, Double]): Int = 3 - // def foo(b: Left[T1]): Int = 6 - // a.foo(Right(4.5d)) prints out 3 :) - false - } else true // the member structure is different foo(3, 5) vs foo(3)(5) - } -} diff --git a/src/compiler/scala/tools/nsc/doc/model/ModelFactoryTypeSupport.scala b/src/compiler/scala/tools/nsc/doc/model/ModelFactoryTypeSupport.scala deleted file mode 100644 index 99e9059d79..0000000000 --- a/src/compiler/scala/tools/nsc/doc/model/ModelFactoryTypeSupport.scala +++ /dev/null @@ -1,315 +0,0 @@ -/* NSC -- new Scala compiler -- Copyright 2007-2013 LAMP/EPFL */ - -package scala.tools.nsc -package doc -package model - -import base._ -import diagram._ - -import scala.collection._ - -/** This trait extracts all required information for documentation from compilation units */ -trait ModelFactoryTypeSupport { - thisFactory: ModelFactory - with ModelFactoryImplicitSupport - with ModelFactoryTypeSupport - with DiagramFactory - with CommentFactory - with TreeFactory - with MemberLookup => - - import global._ - import definitions.{ ObjectClass, NothingClass, AnyClass, AnyValClass, AnyRefClass } - - protected val typeCache = new mutable.LinkedHashMap[Type, TypeEntity] - - /** */ - def makeType(aType: Type, inTpl: TemplateImpl): TypeEntity = { - def createTypeEntity = new TypeEntity { - private var nameBuffer = new StringBuilder - private var refBuffer = new immutable.TreeMap[Int, (LinkTo, Int)] - private def appendTypes0(types: List[Type], sep: String): Unit = types match { - case Nil => - case tp :: Nil => - appendType0(tp) - case tp :: tps => - appendType0(tp) - nameBuffer append sep - appendTypes0(tps, sep) - } - - private def appendType0(tpe: Type): Unit = tpe match { - /* Type refs */ - case tp: TypeRef if definitions.isFunctionType(tp) => - val args = tp.normalize.typeArgs - nameBuffer append '(' - appendTypes0(args.init, ", ") - nameBuffer append ") ⇒ " - appendType0(args.last) - case tp: TypeRef if definitions.isScalaRepeatedParamType(tp) => - appendType0(tp.args.head) - nameBuffer append '*' - case tp: TypeRef if definitions.isByNameParamType(tp) => - nameBuffer append "⇒ " - appendType0(tp.args.head) - case tp: TypeRef if definitions.isTupleType(tp) => - val args = tp.normalize.typeArgs - nameBuffer append '(' - appendTypes0(args, ", ") - nameBuffer append ')' - case TypeRef(pre, aSym, targs) => - val preSym = pre.widen.typeSymbol - - // SI-3314/SI-4888: Classes, Traits and Types can be inherited from a template to another: - // class Enum { abstract class Value } - // class Day extends Enum { object Mon extends Value /*...*/ } - // ===> in such cases we have two options: - // (0) if there's no inheritance taking place (Enum#Value) we can link to the template directly - // (1) if we generate the doc template for Day, we can link to the correct member - // (2) If the symbol comes from an external library for which we know the documentation URL, point to it. - // (3) if we don't generate the doc template, we should at least indicate the correct prefix in the tooltip - val bSym = normalizeTemplate(aSym) - val owner = - if ((preSym != NoSymbol) && /* it needs a prefix */ - (preSym != bSym.owner) && /* prefix is different from owner */ - (aSym == bSym)) /* normalization doesn't play tricks on us */ - preSym - else - bSym.owner - - val link = - findTemplateMaybe(bSym) match { - case Some(bTpl) if owner == bSym.owner => - // (0) the owner's class is linked AND has a template - lovely - bTpl match { - case dtpl: DocTemplateEntity => new LinkToTpl(dtpl) - case _ => new Tooltip(bTpl.qualifiedName) - } - case _ => - val oTpl = findTemplateMaybe(owner) - (oTpl, oTpl flatMap (findMember(bSym, _))) match { - case (Some(oTpl), Some(bMbr)) => - // (1) the owner's class - LinkToMember(bMbr, oTpl) - case _ => - val name = makeQualifiedName(bSym) - if (!bSym.owner.isPackage) - Tooltip(name) - else - findExternalLink(bSym, name).getOrElse ( - // (3) if we couldn't find neither the owner nor external URL to link to, show a tooltip with the qualified name - Tooltip(name) - ) - } - } - - // SI-4360 Showing prefixes when necessary - // We check whether there's any directly accessible type with the same name in the current template OR if the - // type is inherited from one template to another. There may be multiple symbols with the same name in scope, - // but we won't show the prefix if our symbol is among them, only if *it's not* -- that's equal to showing - // the prefix only for ambiguous references, not for overloaded ones. - def needsPrefix: Boolean = { - if ((owner != bSym.owner || preSym.isRefinementClass) && (normalizeTemplate(owner) != inTpl.sym)) - return true - // don't get tricked into prefixng method type params and existentials: - // I tried several tricks BUT adding the method for which I'm creating the type => that simply won't scale, - // as ValueParams are independent of their parent member, and I really don't want to add this information to - // all terms, as we're already over the allowed memory footprint - if (aSym.isTypeParameterOrSkolem || aSym.isExistentiallyBound /* existential or existential skolem */) - return false - - for (tpl <- inTpl.sym.ownerChain) { - tpl.info.member(bSym.name) match { - case NoSymbol => - // No syms with that name, look further inside the owner chain - case sym => - // Symbol found -- either the correct symbol, another one OR an overloaded alternative - if (sym == bSym) - return false - else sym.info match { - case OverloadedType(owner, alternatives) => - return alternatives.contains(bSym) - case _ => - return true - } - } - } - // if it's not found in the owner chain, we can safely leave out the prefix - false - } - - val prefix = - if (!settings.docNoPrefixes.value && needsPrefix && (bSym != AnyRefClass /* which we normalize */)) { - if (!owner.isRefinementClass) { - val qName = makeQualifiedName(owner, Some(inTpl.sym)) - if (qName != "") qName + "." else "" - } - else { - nameBuffer append "(" - appendType0(pre) - nameBuffer append ")#" - "" // we already appended the prefix - } - } else "" - - //DEBUGGING: - //if (makeQualifiedName(bSym) == "pack1.A") println("needsPrefix(" + bSym + ", " + owner + ", " + inTpl.qualifiedName + ") => " + needsPrefix + " and prefix=" + prefix) - - val name = prefix + bSym.nameString - val pos0 = nameBuffer.length - refBuffer += pos0 -> ((link, name.length)) - nameBuffer append name - - if (!targs.isEmpty) { - nameBuffer append '[' - appendTypes0(targs, ", ") - nameBuffer append ']' - } - /* Refined types */ - case RefinedType(parents, defs) => - val ignoreParents = Set[Symbol](AnyClass, ObjectClass) - val filtParents = parents filterNot (x => ignoreParents(x.typeSymbol)) match { - case Nil => parents - case ps => ps - } - appendTypes0(filtParents, " with ") - // XXX Still todo: properly printing refinements. - // Since I didn't know how to go about displaying a multi-line type, I went with - // printing single method refinements (which should be the most common) and printing - // the number of members if there are more. - defs.toList match { - case Nil => () - case x :: Nil => nameBuffer append (" { " + x.defString + " }") - case xs => nameBuffer append (" { ... /* %d definitions in type refinement */ }" format xs.size) - } - /* Eval-by-name types */ - case NullaryMethodType(result) => - nameBuffer append '⇒' - appendType0(result) - - /* Polymorphic types */ - case PolyType(tparams, result) => assert(tparams.nonEmpty) - def typeParamsToString(tps: List[Symbol]): String = if (tps.isEmpty) "" else - tps.map{tparam => - tparam.varianceString + tparam.name + typeParamsToString(tparam.typeParams) - }.mkString("[", ", ", "]") - nameBuffer append typeParamsToString(tparams) - appendType0(result) - - case et@ExistentialType(quantified, underlying) => - - def appendInfoStringReduced(sym: Symbol, tp: Type): Unit = { - if (sym.isType && !sym.isAliasType && !sym.isClass) { - tp match { - case PolyType(tparams, _) => - nameBuffer append "[" - appendTypes0(tparams.map(_.tpe), ", ") - nameBuffer append "]" - case _ => - } - tp.resultType match { - case rt @ TypeBounds(_, _) => - appendType0(rt) - case rt => - nameBuffer append " <: " - appendType0(rt) - } - } else { - // fallback to the Symbol infoString - nameBuffer append sym.infoString(tp) - } - } - - def appendClauses = { - nameBuffer append " forSome {" - var first = true - for (sym <- quantified) { - if (!first) { nameBuffer append ", " } else first = false - if (sym.isSingletonExistential) { - nameBuffer append "val " - nameBuffer append tpnme.dropSingletonName(sym.name) - nameBuffer append ": " - appendType0(dropSingletonType(sym.info.bounds.hi)) - } else { - if (sym.flagString != "") nameBuffer append (sym.flagString + " ") - if (sym.keyString != "") nameBuffer append (sym.keyString + " ") - nameBuffer append sym.varianceString - nameBuffer append sym.nameString - appendInfoStringReduced(sym, sym.info) - } - } - nameBuffer append "}" - } - - underlying match { - case TypeRef(pre, sym, args) if et.isRepresentableWithWildcards => - appendType0(typeRef(pre, sym, Nil)) - nameBuffer append "[" - var first = true - val qset = quantified.toSet - for (arg <- args) { - if (!first) { nameBuffer append ", " } else first = false - arg match { - case TypeRef(_, sym, _) if (qset contains sym) => - nameBuffer append "_" - appendInfoStringReduced(sym, sym.info) - case arg => - appendType0(arg) - } - } - nameBuffer append "]" - case MethodType(_, _) | NullaryMethodType(_) | PolyType(_, _) => - nameBuffer append "(" - appendType0(underlying) - nameBuffer append ")" - appendClauses - case _ => - appendType0(underlying) - appendClauses - } - - case tb@TypeBounds(lo, hi) => - if (tb.lo != TypeBounds.empty.lo) { - nameBuffer append " >: " - appendType0(lo) - } - if (tb.hi != TypeBounds.empty.hi) { - nameBuffer append " <: " - appendType0(hi) - } - // case tpen: ThisType | SingleType | SuperType => - // if (tpen.isInstanceOf[ThisType] && tpen.asInstanceOf[ThisType].sym.isEffectiveRoot) { - // appendType0 typeRef(NoPrefix, sym, Nil) - // } else { - // val underlying = - // val pre = underlying.typeSymbol.skipPackageObject - // if (pre.isOmittablePrefix) pre.fullName + ".type" - // else prefixString + "type" - case tpen@ThisType(sym) => - appendType0(typeRef(NoPrefix, sym, Nil)) - nameBuffer append ".this" - if (!tpen.underlying.typeSymbol.skipPackageObject.isOmittablePrefix) nameBuffer append ".type" - case tpen@SuperType(thistpe, supertpe) => - nameBuffer append "super[" - appendType0(supertpe) - nameBuffer append "]" - case tpen@SingleType(pre, sym) => - appendType0(typeRef(pre, sym, Nil)) - if (!tpen.underlying.typeSymbol.skipPackageObject.isOmittablePrefix) nameBuffer append ".type" - case tpen => - nameBuffer append tpen.toString - } - appendType0(aType) - val refEntity = refBuffer - val name = optimize(nameBuffer.toString) - nameBuffer = null - } - - // SI-4360: Entity caching depends on both the type AND the template it's in, as the prefixes might change for the - // same type based on the template the type is shown in. - if (settings.docNoPrefixes.value) - typeCache.getOrElseUpdate(aType, createTypeEntity) - else createTypeEntity - } -} diff --git a/src/compiler/scala/tools/nsc/doc/model/TreeEntity.scala b/src/compiler/scala/tools/nsc/doc/model/TreeEntity.scala deleted file mode 100644 index 5b4ec4a40b..0000000000 --- a/src/compiler/scala/tools/nsc/doc/model/TreeEntity.scala +++ /dev/null @@ -1,27 +0,0 @@ -/* NSC -- new Scala compiler - * Copyright 2007-2013 LAMP/EPFL - * @author Chris James - */ - -package scala.tools.nsc -package doc -package model - -import scala.collection._ - - -/** A fragment of code. */ -abstract class TreeEntity { - - /** The human-readable representation of this abstract syntax tree. */ - def expression: String - - /** Maps which parts of this syntax tree's name reference entities. The map is indexed by the position of the first - * character that reference some entity, and contains the entity and the position of the last referenced - * character. The referenced character ranges do not to overlap or nest. The map is sorted by position. */ - def refEntity: SortedMap[Int, (Entity, Int)] - - /** The human-readable representation of this abstract syntax tree. */ - override def toString = expression - -} diff --git a/src/compiler/scala/tools/nsc/doc/model/TreeFactory.scala b/src/compiler/scala/tools/nsc/doc/model/TreeFactory.scala deleted file mode 100755 index b972649194..0000000000 --- a/src/compiler/scala/tools/nsc/doc/model/TreeFactory.scala +++ /dev/null @@ -1,96 +0,0 @@ -package scala.tools.nsc -package doc -package model - -import scala.collection._ -import scala.reflect.internal.util.{RangePosition, OffsetPosition, SourceFile} - -/** The goal of this trait is , using makeTree, - * to browse a tree to - * 1- have the String of the complete tree (tree.expression) - * 2- fill references to create hyperLinks later in html.pageTemplate - * - * It is applied in ModelFactory => makeTree - * - */ - -trait TreeFactory { thisTreeFactory: ModelFactory with TreeFactory => - - val global: Global - import global._ - - def makeTree(rhs: Tree): Option[TreeEntity] = { - - val expr = new StringBuilder - var refs = new immutable.TreeMap[Int, (Entity, Int)] // start, (Entity to be linked to , end) - - rhs.pos match { - case pos: RangePosition => { - val source: SourceFile = pos.source - val firstIndex = pos.startOrPoint - val lastIndex = pos.endOrPoint - - assert(firstIndex < lastIndex, "Invalid position indices for tree " + rhs + " (" + firstIndex + ", " + lastIndex + ")") - expr.appendAll(source.content, firstIndex, lastIndex - firstIndex) - - val traverser = new Traverser { - - /** Finds the Entity on which we will later create a link on, - * stores it in tree.refs with its position - */ - def makeLink(rhs: Tree){ - val start = pos.startOrPoint - firstIndex - val end = pos.endOrPoint - firstIndex - if(start != end) { - var asym = rhs.symbol - if (asym.isClass) makeTemplate(asym) match{ - case docTmpl: DocTemplateImpl => - refs += ((start, (docTmpl,end))) - case _ => - } - else if (asym.isTerm && asym.owner.isClass){ - if (asym.isSetter) asym = asym.getter(asym.owner) - makeTemplate(asym.owner) match { - case docTmpl: DocTemplateImpl => - val mbrs: Option[MemberImpl] = findMember(asym, docTmpl) - mbrs foreach { mbr => refs += ((start, (mbr,end))) } - case _ => - } - } - } - } - /** - * Goes through the tree and makes links when a Select occurs, - * The case of New(_) is ignored because the object we want to create a link on - * will be reached with recursivity and we don't want a link on the "new" string - * If a link is not created, its case is probably not defined in here - */ - override def traverse(tree: Tree) = tree match { - case Select(qualifier, name) => - qualifier match { - case New(_) => - case _ => makeLink(tree) - } - traverse(qualifier) - case Ident(_) => makeLink(tree) - case _ => - super.traverse(tree) - } - } - - traverser.traverse(rhs) - - Some(new TreeEntity { - val expression = expr.toString - val refEntity = refs - }) - } - case pos: OffsetPosition => - Some(new TreeEntity { - val expression = rhs.toString - val refEntity = new immutable.TreeMap[Int, (Entity, Int)] - }) - case _ => None - } - } -} diff --git a/src/compiler/scala/tools/nsc/doc/model/TypeEntity.scala b/src/compiler/scala/tools/nsc/doc/model/TypeEntity.scala deleted file mode 100644 index cf5c1fb3fb..0000000000 --- a/src/compiler/scala/tools/nsc/doc/model/TypeEntity.scala +++ /dev/null @@ -1,27 +0,0 @@ -/* NSC -- new Scala compiler - * Copyright 2007-2013 LAMP/EPFL - * @author Manohar Jonnalagedda - */ - -package scala.tools.nsc -package doc -package model - -import scala.collection._ - -/** A type. Note that types and templates contain the same information only for the simplest types. For example, a type - * defines how a template's type parameters are instantiated (as in `List[Cow]`), what the template's prefix is - * (as in `johnsFarm.Cow`), and supports compound or structural types. */ -abstract class TypeEntity { - - /** The human-readable representation of this type. */ - def name: String - - /** Maps which parts of this type's name reference entities. The map is indexed by the position of the first - * character that reference some entity, and contains the entity and the position of the last referenced - * character. The referenced character ranges do not to overlap or nest. The map is sorted by position. */ - def refEntity: SortedMap[Int, (base.LinkTo, Int)] - - /** The human-readable representation of this type. */ - override def toString = name -} diff --git a/src/compiler/scala/tools/nsc/doc/model/ValueArgument.scala b/src/compiler/scala/tools/nsc/doc/model/ValueArgument.scala deleted file mode 100644 index f712869a4b..0000000000 --- a/src/compiler/scala/tools/nsc/doc/model/ValueArgument.scala +++ /dev/null @@ -1,20 +0,0 @@ -/* NSC -- new Scala compiler - * Copyright 2007-2013 LAMP/EPFL - * @author Gilles Dubochet - */ - -package scala.tools.nsc -package doc -package model - - -/** A value that is passed as an argument to a value parameter. */ -trait ValueArgument { - - /** The parameter as argument to which this value is passed, if it is known. */ - def parameter: Option[ValueParam] - - /** The expression that calculates the value. */ - def value: TreeEntity - -} diff --git a/src/compiler/scala/tools/nsc/doc/model/Visibility.scala b/src/compiler/scala/tools/nsc/doc/model/Visibility.scala deleted file mode 100644 index 22580805aa..0000000000 --- a/src/compiler/scala/tools/nsc/doc/model/Visibility.scala +++ /dev/null @@ -1,39 +0,0 @@ -/* NSC -- new Scala compiler - * Copyright 2007-2013 LAMP/EPFL - * @author Gilles Dubochet - */ - -package scala.tools.nsc -package doc -package model - -/** An type that represents visibility of members. */ -sealed trait Visibility { - def isProtected: Boolean = false - def isPublic: Boolean = false -} - -/** The visibility of `private[this]` members. */ -case class PrivateInInstance() extends Visibility - -/** The visibility of `protected[this]` members. */ -case class ProtectedInInstance() extends Visibility { - override def isProtected = true -} - -/** The visibility of `private[owner]` members. An unqualified private members - * is encoded with `owner` equal to the members's `inTemplate`. */ -case class PrivateInTemplate(owner: TemplateEntity) extends Visibility - -/** The visibility of `protected[owner]` members. An unqualified protected - * members is encoded with `owner` equal to the members's `inTemplate`. - * Note that whilst the member is visible in any template owned by `owner`, - * it is only visible in subclasses of the member's `inTemplate`. */ -case class ProtectedInTemplate(owner: TemplateEntity) extends Visibility { - override def isProtected = true -} - -/** The visibility of public members. */ -case class Public() extends Visibility { - override def isPublic = true -} diff --git a/src/compiler/scala/tools/nsc/doc/model/diagram/Diagram.scala b/src/compiler/scala/tools/nsc/doc/model/diagram/Diagram.scala deleted file mode 100644 index 150b293b81..0000000000 --- a/src/compiler/scala/tools/nsc/doc/model/diagram/Diagram.scala +++ /dev/null @@ -1,137 +0,0 @@ -package scala.tools.nsc.doc -package model -package diagram - -import model._ - -/** - * The diagram base classes - * - * @author Damien Obrist - * @author Vlad Ureche - */ -abstract class Diagram { - def nodes: List[Node] - def edges: List[(Node, List[Node])] - def isContentDiagram = false // Implemented by ContentDiagram - def isInheritanceDiagram = false // Implemented by InheritanceDiagram - def depthInfo: DepthInfo -} - -case class ContentDiagram(nodes:List[/*Class*/Node], edges:List[(Node, List[Node])]) extends Diagram { - override def isContentDiagram = true - lazy val depthInfo = new ContentDiagramDepth(this) -} - -/** A class diagram */ -case class InheritanceDiagram(thisNode: ThisNode, - superClasses: List[/*Class*/Node], - subClasses: List[/*Class*/Node], - incomingImplicits: List[ImplicitNode], - outgoingImplicits: List[ImplicitNode]) extends Diagram { - def nodes = thisNode :: superClasses ::: subClasses ::: incomingImplicits ::: outgoingImplicits - def edges = (thisNode -> (superClasses ::: outgoingImplicits)) :: - (subClasses ::: incomingImplicits).map(_ -> List(thisNode)) - - override def isInheritanceDiagram = true - lazy val depthInfo = new DepthInfo { - def maxDepth = 3 - } -} - -trait DepthInfo { - /** Gives the maximum depth */ - def maxDepth: Int -} - -abstract class Node { - def name = tpe.name - def tpe: TypeEntity - def tpl: Option[TemplateEntity] - /** shortcut to get a DocTemplateEntity */ - def doctpl: Option[DocTemplateEntity] = tpl match { - case Some(tpl) => tpl match { - case d: DocTemplateEntity => Some(d) - case _ => None - } - case _ => None - } - /* shortcuts to find the node type without matching */ - def isThisNode = false - def isNormalNode = false - def isClassNode = if (tpl.isDefined) (tpl.get.isClass || tpl.get.qualifiedName == "scala.AnyRef") else false - def isTraitNode = if (tpl.isDefined) tpl.get.isTrait else false - def isObjectNode= if (tpl.isDefined) tpl.get.isObject else false - def isTypeNode = if (doctpl.isDefined) doctpl.get.isAbstractType || doctpl.get.isAliasType else false - def isOtherNode = !(isClassNode || isTraitNode || isObjectNode || isTypeNode) - def isImplicitNode = false - def isOutsideNode = false - def tooltip: Option[String] -} - -// different matchers, allowing you to use the pattern matcher against any node -// NOTE: A ThisNode or ImplicitNode can at the same time be ClassNode/TraitNode/OtherNode, not exactly according to -// case class specification -- thus a complete match would be: -// node match { -// case ThisNode(tpe, _) => /* case for this node, you can still use .isClass, .isTrait and .isOther */ -// case ImplicitNode(tpe, _) => /* case for an implicit node, you can still use .isClass, .isTrait and .isOther */ -// case _ => node match { -// case ClassNode(tpe, _) => /* case for a non-this, non-implicit Class node */ -// case TraitNode(tpe, _) => /* case for a non-this, non-implicit Trait node */ -// case OtherNode(tpe, _) => /* case for a non-this, non-implicit Other node */ -// } -// } -object Node { def unapply(n: Node): Option[(TypeEntity, Option[TemplateEntity])] = Some((n.tpe, n.tpl)) } -object ClassNode { def unapply(n: Node): Option[(TypeEntity, Option[TemplateEntity])] = if (n.isClassNode) Some((n.tpe, n.tpl)) else None } -object TraitNode { def unapply(n: Node): Option[(TypeEntity, Option[TemplateEntity])] = if (n.isTraitNode) Some((n.tpe, n.tpl)) else None } -object TypeNode { def unapply(n: Node): Option[(TypeEntity, Option[TemplateEntity])] = if (n.isTypeNode) Some((n.tpe, n.tpl)) else None } -object ObjectNode { def unapply(n: Node): Option[(TypeEntity, Option[TemplateEntity])] = if (n.isObjectNode) Some((n.tpe, n.tpl)) else None } -object OutsideNode { def unapply(n: Node): Option[(TypeEntity, Option[TemplateEntity])] = if (n.isOutsideNode) Some((n.tpe, n.tpl)) else None } -object OtherNode { def unapply(n: Node): Option[(TypeEntity, Option[TemplateEntity])] = if (n.isOtherNode) Some((n.tpe, n.tpl)) else None } - - - -/** The node for the current class */ -case class ThisNode(tpe: TypeEntity, tpl: Option[TemplateEntity])(val tooltip: Option[String] = None) extends Node { override def isThisNode = true } - -/** The usual node */ -case class NormalNode(tpe: TypeEntity, tpl: Option[TemplateEntity])(val tooltip: Option[String] = None) extends Node { override def isNormalNode = true } - -/** A class or trait the thisnode can be converted to by an implicit conversion - * TODO: I think it makes more sense to use the tpe links to templates instead of the TemplateEntity for implicit nodes - * since some implicit conversions convert the class to complex types that cannot be represented as a single tmeplate - */ -case class ImplicitNode(tpe: TypeEntity, tpl: Option[TemplateEntity])(val tooltip: Option[String] = None) extends Node { override def isImplicitNode = true } - -/** An outside node is shown in packages when a class from a different package makes it to the package diagram due to - * its relation to a class in the template (see @contentDiagram hideInheritedNodes annotation) */ -case class OutsideNode(tpe: TypeEntity, tpl: Option[TemplateEntity])(val tooltip: Option[String] = None) extends Node { override def isOutsideNode = true } - - -// Computing and offering node depth information -class ContentDiagramDepth(pack: ContentDiagram) extends DepthInfo { - private[this] var _maxDepth = 0 - private[this] var _nodeDepth = Map[Node, Int]() - private[this] var seedNodes = Set[Node]() - private[this] val invertedEdges: Map[Node, List[Node]] = - pack.edges.flatMap({case (node: Node, outgoing: List[Node]) => outgoing.map((_, node))}).groupBy(_._1).map({case (k, values) => (k, values.map(_._2))}).withDefaultValue(Nil) - private[this] val directEdges: Map[Node, List[Node]] = pack.edges.toMap.withDefaultValue(Nil) - - // seed base nodes, to minimize noise - they can't all have parents, else there would only be cycles - seedNodes ++= pack.nodes.filter(directEdges(_).isEmpty) - - while (!seedNodes.isEmpty) { - var newSeedNodes = Set[Node]() - for (node <- seedNodes) { - val depth = 1 + (-1 :: directEdges(node).map(_nodeDepth.getOrElse(_, -1))).max - if (depth != _nodeDepth.getOrElse(node, -1)) { - _nodeDepth += (node -> depth) - newSeedNodes ++= invertedEdges(node) - if (depth > _maxDepth) _maxDepth = depth - } - } - seedNodes = newSeedNodes - } - - val maxDepth = _maxDepth -} diff --git a/src/compiler/scala/tools/nsc/doc/model/diagram/DiagramDirectiveParser.scala b/src/compiler/scala/tools/nsc/doc/model/diagram/DiagramDirectiveParser.scala deleted file mode 100644 index 6395446d3b..0000000000 --- a/src/compiler/scala/tools/nsc/doc/model/diagram/DiagramDirectiveParser.scala +++ /dev/null @@ -1,257 +0,0 @@ -package scala.tools.nsc.doc -package model -package diagram - -import model._ -import java.util.regex.{Pattern, Matcher} -import scala.util.matching.Regex - -/** - * This trait takes care of parsing @{inheritance, content}Diagram annotations - * - * @author Damien Obrist - * @author Vlad Ureche - */ -trait DiagramDirectiveParser { - this: ModelFactory with DiagramFactory with CommentFactory with TreeFactory => - - import this.global.definitions.AnyRefClass - - ///// DIAGRAM FILTERS ////////////////////////////////////////////////////////////////////////////////////////////// - - /** - * The DiagramFilter trait directs the diagram engine about the way the diagram should be displayed - * - * Vlad: There's an explanation I owe to people using diagrams and not finding a way to hide a specific class from - * all diagrams at once. So why did I choose to allow you to only control the diagrams at class level? So, the - * reason is you would break the separate scaladoc compilation: - * If you have an "@diagram hideMyClass" annotation in class A and you run scaladoc on it along with its subclass B - * A will not appear in B's diagram. But if you scaladoc only on B, A's comment will not be parsed and the - * instructions to hide class A from all diagrams will not be available. Thus I prefer to force you to control the - * diagrams of each class locally. The problem does not appear with scalac, as scalac stores all its necessary - * information (like scala signatures) serialized in the .class file. But we couldn't store doc comments in the class - * file, could we? (Turns out we could, but that's another story) - * - * Any flaming for this decision should go to scala-internals@googlegroups.com - */ - trait DiagramFilter { - /** A flag to hide the diagram completely */ - def hideDiagram: Boolean - /** Hide incoming implicit conversions (for type hierarchy diagrams) */ - def hideIncomingImplicits: Boolean - /** Hide outgoing implicit conversions (for type hierarchy diagrams) */ - def hideOutgoingImplicits: Boolean - /** Hide superclasses (for type hierarchy diagrams) */ - def hideSuperclasses: Boolean - /** Hide subclasses (for type hierarchy diagrams) */ - def hideSubclasses: Boolean - /** Show related classes from other objects/traits/packages (for content diagrams) */ - def hideInheritedNodes: Boolean - /** Hide a node from the diagram */ - def hideNode(clazz: Node): Boolean - /** Hide an edge from the diagram */ - def hideEdge(clazz1: Node, clazz2: Node): Boolean - } - - /** Main entry point into this trait: generate the filter for inheritance diagrams */ - def makeInheritanceDiagramFilter(template: DocTemplateImpl): DiagramFilter = { - - val defaultFilter = - if (template.isClass || template.isTrait || template.sym == AnyRefClass) - FullDiagram - else - NoDiagramAtAll - - if (template.comment.isDefined) - makeDiagramFilter(template, template.comment.get.inheritDiagram, defaultFilter, isInheritanceDiagram = true) - else - defaultFilter - } - - /** Main entry point into this trait: generate the filter for content diagrams */ - def makeContentDiagramFilter(template: DocTemplateImpl): DiagramFilter = { - val defaultFilter = if (template.isPackage || template.isObject) FullDiagram else NoDiagramAtAll - if (template.comment.isDefined) - makeDiagramFilter(template, template.comment.get.contentDiagram, defaultFilter, isInheritanceDiagram = false) - else - defaultFilter - } - - protected var tFilter = 0l - protected var tModel = 0l - - /** Show the entire diagram, no filtering */ - case object FullDiagram extends DiagramFilter { - val hideDiagram: Boolean = false - val hideIncomingImplicits: Boolean = false - val hideOutgoingImplicits: Boolean = false - val hideSuperclasses: Boolean = false - val hideSubclasses: Boolean = false - val hideInheritedNodes: Boolean = false - def hideNode(clazz: Node): Boolean = false - def hideEdge(clazz1: Node, clazz2: Node): Boolean = false - } - - /** Hide the diagram completely, no need for special filtering */ - case object NoDiagramAtAll extends DiagramFilter { - val hideDiagram: Boolean = true - val hideIncomingImplicits: Boolean = true - val hideOutgoingImplicits: Boolean = true - val hideSuperclasses: Boolean = true - val hideSubclasses: Boolean = true - val hideInheritedNodes: Boolean = true - def hideNode(clazz: Node): Boolean = true - def hideEdge(clazz1: Node, clazz2: Node): Boolean = true - } - - /** The AnnotationDiagramFilter trait directs the diagram engine according to an annotation - * TODO: Should document the annotation, for now see parseDiagramAnnotation in ModelFactory.scala */ - case class AnnotationDiagramFilter(hideDiagram: Boolean, - hideIncomingImplicits: Boolean, - hideOutgoingImplicits: Boolean, - hideSuperclasses: Boolean, - hideSubclasses: Boolean, - hideInheritedNodes: Boolean, - hideNodesFilter: List[Pattern], - hideEdgesFilter: List[(Pattern, Pattern)]) extends DiagramFilter { - - private[this] def getName(n: Node): String = - if (n.tpl.isDefined) - n.tpl.get.qualifiedName - else - n.name - - def hideNode(clazz: Node): Boolean = { - val qualifiedName = getName(clazz) - for (hideFilter <- hideNodesFilter) - if (hideFilter.matcher(qualifiedName).matches) { - // println(hideFilter + ".matcher(" + qualifiedName + ").matches = " + hideFilter.matcher(qualifiedName).matches) - return true - } - false - } - - def hideEdge(clazz1: Node, clazz2: Node): Boolean = { - val clazz1Name = getName(clazz1) - val clazz2Name = getName(clazz2) - for ((clazz1Filter, clazz2Filter) <- hideEdgesFilter) { - if (clazz1Filter.matcher(clazz1Name).matches && - clazz2Filter.matcher(clazz2Name).matches) { - // println(clazz1Filter + ".matcher(" + clazz1Name + ").matches = " + clazz1Filter.matcher(clazz1Name).matches) - // println(clazz2Filter + ".matcher(" + clazz2Name + ").matches = " + clazz2Filter.matcher(clazz2Name).matches) - return true - } - } - false - } - } - - // TODO: This could certainly be improved -- right now the only regex is *, but there's no way to match a single identifier - private val NodeSpecRegex = "\\\"[A-Za-z\\*][A-Za-z\\.\\*]*\\\"" - private val NodeSpecPattern = Pattern.compile(NodeSpecRegex) - private val EdgeSpecRegex = "\\(" + NodeSpecRegex + "\\s*\\->\\s*" + NodeSpecRegex + "\\)" - // And the composed regexes: - private val HideNodesRegex = new Regex("^hideNodes(\\s*" + NodeSpecRegex + ")+$") - private val HideEdgesRegex = new Regex("^hideEdges(\\s*" + EdgeSpecRegex + ")+$") - - private def makeDiagramFilter(template: DocTemplateImpl, - directives: List[String], - defaultFilter: DiagramFilter, - isInheritanceDiagram: Boolean): DiagramFilter = directives match { - - // if there are no specific diagram directives, return the default filter (either FullDiagram or NoDiagramAtAll) - case Nil => - defaultFilter - - // compute the exact filters. By including the annotation, the diagram is autmatically added - case _ => - tFilter -= System.currentTimeMillis - var hideDiagram0: Boolean = false - var hideIncomingImplicits0: Boolean = false - var hideOutgoingImplicits0: Boolean = false - var hideSuperclasses0: Boolean = false - var hideSubclasses0: Boolean = false - var hideInheritedNodes0: Boolean = false - var hideNodesFilter0: List[Pattern] = Nil - var hideEdgesFilter0: List[(Pattern, Pattern)] = Nil - - def warning(message: String) = { - // we need the position from the package object (well, ideally its comment, but yeah ...) - val sym = if (template.sym.isPackage) template.sym.info.member(global.nme.PACKAGE) else template.sym - assert((sym != global.NoSymbol) || (sym == global.rootMirror.RootPackage)) - global.reporter.warning(sym.pos, message) - } - - def preparePattern(className: String) = - "^" + className.stripPrefix("\"").stripSuffix("\"").replaceAll("\\.", "\\\\.").replaceAll("\\*", ".*") + "$" - - // separate entries: - val entries = directives.foldRight("")(_ + " " + _).split(",").map(_.trim) - for (entry <- entries) - entry match { - case "hideDiagram" => - hideDiagram0 = true - case "hideIncomingImplicits" if isInheritanceDiagram => - hideIncomingImplicits0 = true - case "hideOutgoingImplicits" if isInheritanceDiagram => - hideOutgoingImplicits0 = true - case "hideSuperclasses" if isInheritanceDiagram => - hideSuperclasses0 = true - case "hideSubclasses" if isInheritanceDiagram => - hideSubclasses0 = true - case "hideInheritedNodes" if !isInheritanceDiagram => - hideInheritedNodes0 = true - case HideNodesRegex(last) => - val matcher = NodeSpecPattern.matcher(entry) - while (matcher.find()) { - val classPattern = Pattern.compile(preparePattern(matcher.group())) - hideNodesFilter0 ::= classPattern - } - case HideEdgesRegex(last) => - val matcher = NodeSpecPattern.matcher(entry) - while (matcher.find()) { - val class1Pattern = Pattern.compile(preparePattern(matcher.group())) - assert(matcher.find()) // it's got to be there, just matched it! - val class2Pattern = Pattern.compile(preparePattern(matcher.group())) - hideEdgesFilter0 ::= ((class1Pattern, class2Pattern)) - } - case "" => - // don't need to do anything about it - case _ => - warning("Could not understand diagram annotation in " + template.kind + " " + template.qualifiedName + - ": unmatched entry \"" + entry + "\".\n" + - " This could be because:\n" + - " - you forgot to separate entries by commas\n" + - " - you used a tag that is not allowed in the current context (like @contentDiagram hideSuperclasses)\n"+ - " - you did not use one of the allowed tags (see docs.scala-lang.org for scaladoc annotations)") - } - val result = - if (hideDiagram0) - NoDiagramAtAll - else if ((hideNodesFilter0.isEmpty) && - (hideEdgesFilter0.isEmpty) && - (hideIncomingImplicits0 == false) && - (hideOutgoingImplicits0 == false) && - (hideSuperclasses0 == false) && - (hideSubclasses0 == false) && - (hideInheritedNodes0 == false) && - (hideDiagram0 == false)) - FullDiagram - else - AnnotationDiagramFilter( - hideDiagram = hideDiagram0, - hideIncomingImplicits = hideIncomingImplicits0, - hideOutgoingImplicits = hideOutgoingImplicits0, - hideSuperclasses = hideSuperclasses0, - hideSubclasses = hideSubclasses0, - hideInheritedNodes = hideInheritedNodes0, - hideNodesFilter = hideNodesFilter0, - hideEdgesFilter = hideEdgesFilter0) - - if (settings.docDiagramsDebug.value && result != NoDiagramAtAll && result != FullDiagram) - settings.printMsg(template.kind + " " + template.qualifiedName + " filter: " + result) - tFilter += System.currentTimeMillis - - result - } -} diff --git a/src/compiler/scala/tools/nsc/doc/model/diagram/DiagramFactory.scala b/src/compiler/scala/tools/nsc/doc/model/diagram/DiagramFactory.scala deleted file mode 100644 index ebac25bbe4..0000000000 --- a/src/compiler/scala/tools/nsc/doc/model/diagram/DiagramFactory.scala +++ /dev/null @@ -1,254 +0,0 @@ -package scala.tools.nsc.doc -package model -package diagram - -import model._ - -// statistics -import html.page.diagram.DiagramStats - -import scala.collection.immutable.SortedMap - -/** - * This trait takes care of generating the diagram for classes and packages - * - * @author Damien Obrist - * @author Vlad Ureche - */ -trait DiagramFactory extends DiagramDirectiveParser { - this: ModelFactory with ModelFactoryTypeSupport with DiagramFactory with CommentFactory with TreeFactory => - - import this.global.definitions._ - import this.global._ - - // the following can used for hardcoding different relations into the diagram, for bootstrapping purposes - def aggregationNode(text: String) = - NormalNode(new TypeEntity { val name = text; val refEntity = SortedMap[Int, (base.LinkTo, Int)]() }, None)() - - /** Create the inheritance diagram for this template */ - def makeInheritanceDiagram(tpl: DocTemplateImpl): Option[Diagram] = { - - tFilter = 0 - tModel = -System.currentTimeMillis - - // the diagram filter - val diagramFilter = makeInheritanceDiagramFilter(tpl) - - def implicitTooltip(from: DocTemplateEntity, to: TemplateEntity, conv: ImplicitConversion) = - Some(from.qualifiedName + " can be implicitly converted to " + conv.targetType + " by the implicit method " - + conv.conversionShortName + " in " + conv.convertorOwner.kind + " " + conv.convertorOwner.qualifiedName) - - val result = - if (diagramFilter == NoDiagramAtAll) - None - else { - // the main node - val thisNode = ThisNode(tpl.resultType, Some(tpl))(Some(tpl.qualifiedName + " (this " + tpl.kind + ")")) - - // superclasses - val superclasses: List[Node] = - tpl.parentTypes.collect { - case p: (TemplateEntity, TypeEntity) if !classExcluded(p._1) => NormalNode(p._2, Some(p._1))() - }.reverse - - // incoming implcit conversions - lazy val incomingImplicitNodes = tpl.incomingImplicitlyConvertedClasses.map { - case (incomingTpl, conv) => - ImplicitNode(makeType(incomingTpl.sym.tpe, tpl), Some(incomingTpl))(implicitTooltip(from=incomingTpl, to=tpl, conv=conv)) - } - - // subclasses - var subclasses: List[Node] = - tpl.directSubClasses.collect { - case d: TemplateImpl if !classExcluded(d) => NormalNode(makeType(d.sym.tpe, tpl), Some(d))() - }.sortBy(_.tpl.get.name)(implicitly[Ordering[String]].reverse) - - // outgoing implicit coversions - lazy val outgoingImplicitNodes = tpl.outgoingImplicitlyConvertedClasses.map { - case (outgoingTpl, outgoingType, conv) => - ImplicitNode(outgoingType, Some(outgoingTpl))(implicitTooltip(from=tpl, to=tpl, conv=conv)) - } - - // TODO: Everyone should be able to use the @{inherit,content}Diagram annotation to change the diagrams. - // Currently, it's possible to leave nodes and edges out, but there's no way to create new nodes and edges - // The implementation would need to add the annotations and the logic to select nodes (or create new ones) - // and add edges to the diagram -- I bet it wouldn't take too long for someone to do it (one or two days - // at most) and it would be a great add to the diagrams. - if (tpl.sym == AnyRefClass) - subclasses = List(aggregationNode("All user-defined classes and traits")) - - val filteredSuperclasses = if (diagramFilter.hideSuperclasses) Nil else superclasses - val filteredIncomingImplicits = if (diagramFilter.hideIncomingImplicits) Nil else incomingImplicitNodes - val filteredSubclasses = if (diagramFilter.hideSubclasses) Nil else subclasses - val filteredImplicitOutgoingNodes = if (diagramFilter.hideOutgoingImplicits) Nil else outgoingImplicitNodes - - // final diagram filter - filterDiagram(InheritanceDiagram(thisNode, filteredSuperclasses.reverse, filteredSubclasses.reverse, filteredIncomingImplicits, filteredImplicitOutgoingNodes), diagramFilter) - } - - tModel += System.currentTimeMillis - DiagramStats.addFilterTime(tFilter) - DiagramStats.addModelTime(tModel-tFilter) - - result - } - - /** Create the content diagram for this template */ - def makeContentDiagram(pack: DocTemplateImpl): Option[Diagram] = { - - tFilter = 0 - tModel = -System.currentTimeMillis - - // the diagram filter - val diagramFilter = makeContentDiagramFilter(pack) - - val result = - if (diagramFilter == NoDiagramAtAll) - None - else { - var mapNodes = Map[TemplateEntity, Node]() - var nodesShown = Set[TemplateEntity]() - var edgesAll = List[(TemplateEntity, List[TemplateEntity])]() - - // classes is the entire set of classes and traits in the package, they are the superset of nodes in the diagram - // we collect classes, traits and objects without a companion, which are usually used as values(e.g. scala.None) - val nodesAll = pack.members collect { - case d: TemplateEntity if ((!diagramFilter.hideInheritedNodes) || (d.inTemplate == pack)) => d - } - - // for each node, add its subclasses - for (node <- nodesAll if !classExcluded(node)) { - node match { - case dnode: MemberTemplateImpl => - var superClasses = dnode.parentTypes.map(_._1).filter(nodesAll.contains(_)) - - // TODO: Everyone should be able to use the @{inherit,content}Diagram annotation to add nodes to diagrams. - if (pack.sym == ScalaPackage) - if (dnode.sym == NullClass) - superClasses = List(makeTemplate(AnyRefClass)) - else if (dnode.sym == NothingClass) - superClasses = (List(NullClass) ::: ScalaValueClasses).map(makeTemplate(_)) - - if (!superClasses.isEmpty) { - nodesShown += dnode - nodesShown ++= superClasses - } - edgesAll ::= dnode -> superClasses - case _ => - } - - mapNodes += node -> ( - if (node.inTemplate == pack && (node.isDocTemplate || node.isAbstractType || node.isAliasType)) - NormalNode(node.resultType, Some(node))() - else - OutsideNode(node.resultType, Some(node))() - ) - } - - if (nodesShown.isEmpty) - None - else { - val nodes = nodesAll.filter(nodesShown.contains(_)).flatMap(mapNodes.get(_)) - val edges = edgesAll.map(pair => (mapNodes(pair._1), pair._2.map(mapNodes(_)))).filterNot(pair => pair._2.isEmpty) - val diagram = - // TODO: Everyone should be able to use the @{inherit,content}Diagram annotation to change the diagrams. - if (pack.sym == ScalaPackage) { - // Tried it, but it doesn't look good: - // var anyRefSubtypes: List[Node] = List(mapNodes(makeTemplate(AnyRefClass))) - // var dirty = true - // do { - // val length = anyRefSubtypes.length - // anyRefSubtypes :::= edges.collect { case p: (Node, List[Node]) if p._2.exists(anyRefSubtypes.contains(_)) => p._1 } - // anyRefSubtypes = anyRefSubtypes.distinct - // dirty = (anyRefSubtypes.length != length) - // } while (dirty) - // println(anyRefSubtypes) - val anyRefSubtypes = Nil - val allAnyRefTypes = aggregationNode("All AnyRef subtypes") - val nullTemplate = makeTemplate(NullClass) - if (nullTemplate.isDocTemplate) - ContentDiagram(allAnyRefTypes::nodes, (mapNodes(nullTemplate), allAnyRefTypes::anyRefSubtypes)::edges.filterNot(_._1.tpl == Some(nullTemplate))) - else - ContentDiagram(nodes, edges) - } else - ContentDiagram(nodes, edges) - - filterDiagram(diagram, diagramFilter) - } - } - - tModel += System.currentTimeMillis - DiagramStats.addFilterTime(tFilter) - DiagramStats.addModelTime(tModel-tFilter) - - result - } - - /** Diagram filtering logic */ - private def filterDiagram(diagram: Diagram, diagramFilter: DiagramFilter): Option[Diagram] = { - tFilter -= System.currentTimeMillis - - val result = - if (diagramFilter == FullDiagram) - Some(diagram) - else if (diagramFilter == NoDiagramAtAll) - None - else { - // Final diagram, with the filtered nodes and edges - diagram match { - case InheritanceDiagram(thisNode, _, _, _, _) if diagramFilter.hideNode(thisNode) => - None - - case InheritanceDiagram(thisNode, superClasses, subClasses, incomingImplicits, outgoingImplicits) => - - def hideIncoming(node: Node): Boolean = - diagramFilter.hideNode(node) || diagramFilter.hideEdge(node, thisNode) - - def hideOutgoing(node: Node): Boolean = - diagramFilter.hideNode(node) || diagramFilter.hideEdge(thisNode, node) - - // println(thisNode) - // println(superClasses.map(cl => "super: " + cl + " " + hideOutgoing(cl)).mkString("\n")) - // println(subClasses.map(cl => "sub: " + cl + " " + hideIncoming(cl)).mkString("\n")) - Some(InheritanceDiagram(thisNode, - superClasses.filterNot(hideOutgoing(_)), - subClasses.filterNot(hideIncoming(_)), - incomingImplicits.filterNot(hideIncoming(_)), - outgoingImplicits.filterNot(hideOutgoing(_)))) - - case ContentDiagram(nodes0, edges0) => - // Filter out all edges that: - // (1) are sources of hidden classes - // (2) are manually hidden by the user - // (3) are destinations of hidden classes - val edges: List[(Node, List[Node])] = - diagram.edges.flatMap({ - case (source, dests) if !diagramFilter.hideNode(source) => - val dests2 = dests.collect({ case dest if (!(diagramFilter.hideEdge(source, dest) || diagramFilter.hideNode(dest))) => dest }) - if (dests2 != Nil) - List((source, dests2)) - else - Nil - case _ => Nil - }) - - // Only show the the non-isolated nodes - // TODO: Decide if we really want to hide package members, I'm not sure that's a good idea (!!!) - // TODO: Does .distinct cause any stability issues? - val sourceNodes = edges.map(_._1) - val sinkNodes = edges.map(_._2).flatten - val nodes = (sourceNodes ::: sinkNodes).distinct - Some(ContentDiagram(nodes, edges)) - } - } - - tFilter += System.currentTimeMillis - - // eliminate all empty diagrams - if (result.isDefined && result.get.edges.forall(_._2.isEmpty)) - None - else - result - } - -} diff --git a/src/partest/scala/tools/partest/ScaladocModelTest.scala b/src/partest/scala/tools/partest/ScaladocModelTest.scala deleted file mode 100644 index 3db9f18484..0000000000 --- a/src/partest/scala/tools/partest/ScaladocModelTest.scala +++ /dev/null @@ -1,203 +0,0 @@ -/* NSC -- new Scala compiler - * Copyright 2005-2013 LAMP/EPFL - * @author Vlad Ureche - */ - -package scala.tools.partest - -import scala.tools.nsc._ -import scala.tools.nsc.util.CommandLineParser -import scala.tools.nsc.doc.{Settings, DocFactory, Universe} -import scala.tools.nsc.doc.model._ -import scala.tools.nsc.doc.model.diagram._ -import scala.tools.nsc.doc.base.comment._ -import scala.tools.nsc.reporters.ConsoleReporter - -/** A class for testing scaladoc model generation - * - you need to specify the code in the `code` method - * - you need to override the testModel method to test the model - * - you may specify extra parameters to send to scaladoc in `scaladocSettings` - * {{{ - import scala.tools.nsc.doc.model._ - import scala.tools.partest.ScaladocModelTest - - object Test extends ScaladocModelTest { - - override def code = """ ... """ // or override def resourceFile = ".scala" (from test/scaladoc/resources) - def scaladocSettings = " ... " - def testModel(rootPackage: Package) = { - // get the quick access implicit defs in scope (_package(s), _class(es), _trait(s), object(s) _method(s), _value(s)) - import access._ - - // just need to check the member exists, access methods will throw an error if there's a problem - rootPackage._package("scala")._package("test")._class("C")._method("foo") - } - } - * }}} - */ -abstract class ScaladocModelTest extends DirectTest { - - /** Override this to give scaladoc command line parameters */ - def scaladocSettings: String - - /** Override this to test the model */ - def testModel(root: Package): Unit - - /** Override to feed a file in resources to scaladoc*/ - def resourceFile: String = null - - /** Override to feed code into scaladoc */ - override def code = - if (resourceFile ne null) - io.File(resourcePath + "/" + resourceFile).slurp() - else - sys.error("Scaladoc Model Test: You need to give a file or some code to feed to scaladoc!") - - def resourcePath = io.Directory(sys.props("partest.cwd") + "/../resources") - - // Implementation follows: - override def extraSettings: String = "-usejavacp" - - override def show(): Unit = { - // redirect err to out, for logging - val prevErr = System.err - System.setErr(System.out) - - try { - // 1 - compile with scaladoc and get the model out - val universe = model.getOrElse({sys.error("Scaladoc Model Test ERROR: No universe generated!")}) - // 2 - check the model generated - testModel(universe.rootPackage) - println("Done.") - } catch { - case e: Exception => - println(e) - e.printStackTrace - } - // set err back to the real err handler - System.setErr(prevErr) - } - - private[this] var settings: Settings = null - - // create a new scaladoc compiler - private[this] def newDocFactory: DocFactory = { - settings = new Settings(_ => ()) - settings.scaladocQuietRun = true // yaay, no more "model contains X documentable templates"! - val args = extraSettings + " " + scaladocSettings - new ScalaDoc.Command((CommandLineParser tokenize (args)), settings) // side-effecting, I think - val docFact = new DocFactory(new ConsoleReporter(settings), settings) - docFact - } - - // compile with scaladoc and output the result - def model: Option[Universe] = newDocFactory.makeUniverse(Right(code)) - - // so we don't get the newSettings warning - override def isDebug = false - - - // finally, enable easy navigation inside the entities - object access { - - implicit class TemplateAccess(tpl: DocTemplateEntity) { - def _class(name: String): DocTemplateEntity = getTheFirst(_classes(name), tpl.qualifiedName + ".class(" + name + ")") - def _classes(name: String): List[DocTemplateEntity] = tpl.templates.filter(_.name == name).collect({ case c: DocTemplateEntity with Class => c}) - - def _classMbr(name: String): MemberTemplateEntity = getTheFirst(_classesMbr(name), tpl.qualifiedName + ".classMember(" + name + ")") - def _classesMbr(name: String): List[MemberTemplateEntity] = tpl.templates.filter(_.name == name).collect({ case c: MemberTemplateEntity if c.isClass => c}) - - def _trait(name: String): DocTemplateEntity = getTheFirst(_traits(name), tpl.qualifiedName + ".trait(" + name + ")") - def _traits(name: String): List[DocTemplateEntity] = tpl.templates.filter(_.name == name).collect({ case t: DocTemplateEntity with Trait => t}) - - def _traitMbr(name: String): MemberTemplateEntity = getTheFirst(_traitsMbr(name), tpl.qualifiedName + ".traitMember(" + name + ")") - def _traitsMbr(name: String): List[MemberTemplateEntity] = tpl.templates.filter(_.name == name).collect({ case t: MemberTemplateEntity if t.isTrait => t}) - - def _object(name: String): DocTemplateEntity = getTheFirst(_objects(name), tpl.qualifiedName + ".object(" + name + ")") - def _objects(name: String): List[DocTemplateEntity] = tpl.templates.filter(_.name == name).collect({ case o: DocTemplateEntity with Object => o}) - - def _objectMbr(name: String): MemberTemplateEntity = getTheFirst(_objectsMbr(name), tpl.qualifiedName + ".objectMember(" + name + ")") - def _objectsMbr(name: String): List[MemberTemplateEntity] = tpl.templates.filter(_.name == name).collect({ case o: MemberTemplateEntity if o.isObject => o}) - - def _method(name: String): Def = getTheFirst(_methods(name), tpl.qualifiedName + ".method(" + name + ")") - def _methods(name: String): List[Def] = tpl.methods.filter(_.name == name) - - def _value(name: String): Val = getTheFirst(_values(name), tpl.qualifiedName + ".value(" + name + ")") - def _values(name: String): List[Val] = tpl.values.filter(_.name == name) - - def _conversion(name: String): ImplicitConversion = getTheFirst(_conversions(name), tpl.qualifiedName + ".conversion(" + name + ")") - def _conversions(name: String): List[ImplicitConversion] = tpl.conversions.filter(_.conversionQualifiedName == name) - - def _absType(name: String): MemberEntity = getTheFirst(_absTypes(name), tpl.qualifiedName + ".abstractType(" + name + ")") - def _absTypes(name: String): List[MemberEntity] = tpl.members.filter(mbr => mbr.name == name && mbr.isAbstractType) - - def _absTypeTpl(name: String): DocTemplateEntity = getTheFirst(_absTypeTpls(name), tpl.qualifiedName + ".abstractType(" + name + ")") - def _absTypeTpls(name: String): List[DocTemplateEntity] = tpl.members.collect({ case dtpl: DocTemplateEntity with AbstractType if dtpl.name == name => dtpl }) - - def _aliasType(name: String): MemberEntity = getTheFirst(_aliasTypes(name), tpl.qualifiedName + ".aliasType(" + name + ")") - def _aliasTypes(name: String): List[MemberEntity] = tpl.members.filter(mbr => mbr.name == name && mbr.isAliasType) - - def _aliasTypeTpl(name: String): DocTemplateEntity = getTheFirst(_aliasTypeTpls(name), tpl.qualifiedName + ".aliasType(" + name + ")") - def _aliasTypeTpls(name: String): List[DocTemplateEntity] = tpl.members.collect({ case dtpl: DocTemplateEntity with AliasType if dtpl.name == name => dtpl }) - } - - trait WithMembers { - def members: List[MemberEntity] - def _member(name: String): MemberEntity = getTheFirst(_members(name), this.toString + ".member(" + name + ")") - def _members(name: String): List[MemberEntity] = members.filter(_.name == name) - } - implicit class PackageAccess(pack: Package) extends TemplateAccess(pack) { - def _package(name: String): Package = getTheFirst(_packages(name), pack.qualifiedName + ".package(" + name + ")") - def _packages(name: String): List[Package] = pack.packages.filter(_.name == name) - } - implicit class DocTemplateEntityMembers(val underlying: DocTemplateEntity) extends WithMembers { - def members = underlying.members - } - implicit class ImplicitConversionMembers(val underlying: ImplicitConversion) extends WithMembers { - def members = underlying.members - } - - def getTheFirst[T](list: List[T], expl: String): T = list.length match { - case 1 => list.head - case 0 => sys.error("Error getting " + expl + ": No such element.") - case _ => sys.error("Error getting " + expl + ": " + list.length + " elements with this name. " + - "All elements in list: [" + list.map({ - case ent: Entity => ent.kind + " " + ent.qualifiedName - case other => other.toString - }).mkString(", ") + "]") - } - - def extractCommentText(c: Any) = { - def extractText(body: Any): String = body match { - case s: String => s - case s: Seq[_] => s.toList.map(extractText(_)).mkString - case p: Product => p.productIterator.toList.map(extractText(_)).mkString - case _ => "" - } - c match { - case c: Comment => - extractText(c.body) - case b: Body => - extractText(b) - } - } - - def countLinks(c: Comment, p: EntityLink => Boolean) = { - def countLinks(body: Any): Int = body match { - case el: EntityLink if p(el) => 1 - case s: Seq[_] => s.toList.map(countLinks(_)).sum - case p: Product => p.productIterator.toList.map(countLinks(_)).sum - case _ => 0 - } - countLinks(c.body) - } - - def testDiagram(doc: DocTemplateEntity, diag: Option[Diagram], nodes: Int, edges: Int) = { - assert(diag.isDefined, doc.qualifiedName + " diagram missing") - assert(diag.get.nodes.length == nodes, - doc.qualifiedName + "'s diagram: node count " + diag.get.nodes.length + " == " + nodes) - assert(diag.get.edges.map(_._2.length).sum == edges, - doc.qualifiedName + "'s diagram: edge count " + diag.get.edges.length + " == " + edges) - } - } -} diff --git a/src/scaladoc/scala/tools/ant/Scaladoc.scala b/src/scaladoc/scala/tools/ant/Scaladoc.scala new file mode 100644 index 0000000000..fd6d637212 --- /dev/null +++ b/src/scaladoc/scala/tools/ant/Scaladoc.scala @@ -0,0 +1,695 @@ +/* __ *\ +** ________ ___ / / ___ Scala Ant Tasks ** +** / __/ __// _ | / / / _ | (c) 2005-2013, LAMP/EPFL ** +** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** +** /____/\___/_/ |_/____/_/ | | ** +** |/ ** +\* */ + + +package scala.tools.ant + +import java.io.File + +import org.apache.tools.ant.Project +import org.apache.tools.ant.types.{Path, Reference} +import org.apache.tools.ant.util.{FileUtils, GlobPatternMapper} + +import scala.tools.nsc.Global +import scala.tools.nsc.doc.Settings +import scala.tools.nsc.reporters.{Reporter, ConsoleReporter} + +/** An Ant task to document Scala code. + * + * This task can take the following parameters as attributes: + * - `srcdir` (mandatory), + * - `srcref`, + * - `destdir`, + * - `classpath`, + * - `classpathref`, + * - `sourcepath`, + * - `sourcepathref`, + * - `bootclasspath`, + * - `bootclasspathref`, + * - `extdirs`, + * - `extdirsref`, + * - `encoding`, + * - `doctitle`, + * - `header`, + * - `footer`, + * - `top`, + * - `bottom`, + * - `addparams`, + * - `deprecation`, + * - `docgenerator`, + * - `docrootcontent`, + * - `unchecked`, + * - `nofail`, + * - `skipPackages`. + * + * It also takes the following parameters as nested elements: + * - `src` (for srcdir), + * - `classpath`, + * - `sourcepath`, + * - `bootclasspath`, + * - `extdirs`. + * + * @author Gilles Dubochet, Stephane Micheloud + */ +class Scaladoc extends ScalaMatchingTask { + + /** The unique Ant file utilities instance to use in this task. */ + private val fileUtils = FileUtils.getFileUtils() + +/*============================================================================*\ +** Ant user-properties ** +\*============================================================================*/ + + abstract class PermissibleValue { + val values: List[String] + def isPermissible(value: String): Boolean = + (value == "") || values.exists(_.startsWith(value)) + } + + /** Defines valid values for the `deprecation` and + * `unchecked` properties. + */ + object Flag extends PermissibleValue { + val values = List("yes", "no", "on", "off") + def getBooleanValue(value: String, flagName: String): Boolean = + if (Flag.isPermissible(value)) + ("yes".equals(value) || "on".equals(value)) + else + buildError("Unknown " + flagName + " flag '" + value + "'") + } + + /** The directories that contain source files to compile. */ + private var origin: Option[Path] = None + /** The directory to put the compiled files in. */ + private var destination: Option[File] = None + + /** The class path to use for this compilation. */ + private var classpath: Option[Path] = None + /** The source path to use for this compilation. */ + private var sourcepath: Option[Path] = None + /** The boot class path to use for this compilation. */ + private var bootclasspath: Option[Path] = None + /** The external extensions path to use for this compilation. */ + private var extdirs: Option[Path] = None + + /** The character encoding of the files to compile. */ + private var encoding: Option[String] = None + + /** The fully qualified name of a doclet class, which will be used to generate the documentation. */ + private var docgenerator: Option[String] = None + + /** The file from which the documentation content of the root package will be taken */ + private var docrootcontent: Option[File] = None + + /** The document title of the generated HTML documentation. */ + private var doctitle: Option[String] = None + + /** The document footer of the generated HTML documentation. */ + private var docfooter: Option[String] = None + + /** The document version, to be added to the title. */ + private var docversion: Option[String] = None + + /** Instruct the compiler to generate links to sources */ + private var docsourceurl: Option[String] = None + + /** Point scaladoc at uncompilable sources. */ + private var docUncompilable: Option[String] = None + + /** Instruct the compiler to use additional parameters */ + private var addParams: String = "" + + /** Instruct the compiler to generate deprecation information. */ + private var deprecation: Boolean = false + + /** Instruct the compiler to generate unchecked information. */ + private var unchecked: Boolean = false + + /** Instruct the ant task not to fail in the event of errors */ + private var nofail: Boolean = false + + /** Instruct the scaladoc tool to document implicit conversions */ + private var docImplicits: Boolean = false + + /** Instruct the scaladoc tool to document all (including impossible) implicit conversions */ + private var docImplicitsShowAll: Boolean = false + + /** Instruct the scaladoc tool to output implicits debugging information */ + private var docImplicitsDebug: Boolean = false + + /** Instruct the scaladoc tool to create diagrams */ + private var docDiagrams: Boolean = false + + /** Instruct the scaladoc tool to output diagram creation debugging information */ + private var docDiagramsDebug: Boolean = false + + /** Instruct the scaladoc tool to use the binary given to create diagrams */ + private var docDiagramsDotPath: Option[String] = None + + /** Instruct the scaladoc to produce textual ouput from html pages, for easy diff-ing */ + private var docRawOutput: Boolean = false + + /** Instruct the scaladoc not to generate prefixes */ + private var docNoPrefixes: Boolean = false + + /** Instruct the scaladoc tool to group similar functions together */ + private var docGroups: Boolean = false + + /** Instruct the scaladoc tool to skip certain packages */ + private var docSkipPackages: String = "" + +/*============================================================================*\ +** Properties setters ** +\*============================================================================*/ + + /** Sets the `srcdir` attribute. Used by [[http://ant.apache.org Ant]]. + * + * @param input The value of `origin`. + */ + def setSrcdir(input: Path) { + if (origin.isEmpty) origin = Some(input) + else origin.get.append(input) + } + + /** Sets the `origin` as a nested src Ant parameter. + * + * @return An origin path to be configured. + */ + def createSrc(): Path = { + if (origin.isEmpty) origin = Some(new Path(getProject)) + origin.get.createPath() + } + + /** Sets the `origin` as an external reference Ant parameter. + * + * @param input A reference to an origin path. + */ + def setSrcref(input: Reference) { + createSrc().setRefid(input) + } + + /** Sets the `destdir` attribute. Used by [[http://ant.apache.org Ant]]. + * + * @param input The value of `destination`. + */ + def setDestdir(input: File) { + destination = Some(input) + } + + /** Sets the `classpath` attribute. Used by [[http://ant.apache.org Ant]]. + * + * @param input The value of `classpath`. + */ + def setClasspath(input: Path) { + if (classpath.isEmpty) classpath = Some(input) + else classpath.get.append(input) + } + + /** Sets the `classpath` as a nested classpath Ant parameter. + * + * @return A class path to be configured. + */ + def createClasspath(): Path = { + if (classpath.isEmpty) classpath = Some(new Path(getProject)) + classpath.get.createPath() + } + + /** Sets the `classpath` as an external reference Ant parameter. + * + * @param input A reference to a class path. + */ + def setClasspathref(input: Reference) = + createClasspath().setRefid(input) + + /** Sets the `sourcepath` attribute. Used by [[http://ant.apache.org Ant]]. + * + * @param input The value of `sourcepath`. + */ + def setSourcepath(input: Path) = + if (sourcepath.isEmpty) sourcepath = Some(input) + else sourcepath.get.append(input) + + /** Sets the `sourcepath` as a nested sourcepath Ant parameter. + * + * @return A source path to be configured. + */ + def createSourcepath(): Path = { + if (sourcepath.isEmpty) sourcepath = Some(new Path(getProject)) + sourcepath.get.createPath() + } + + /** Sets the `sourcepath` as an external reference Ant parameter. + * + * @param input A reference to a source path. + */ + def setSourcepathref(input: Reference) = + createSourcepath().setRefid(input) + + /** Sets the `bootclasspath` attribute. Used by [[http://ant.apache.org Ant]]. + * + * @param input The value of `bootclasspath`. + */ + def setBootclasspath(input: Path) = + if (bootclasspath.isEmpty) bootclasspath = Some(input) + else bootclasspath.get.append(input) + + /** Sets the `bootclasspath` as a nested `sourcepath` Ant parameter. + * + * @return A source path to be configured. + */ + def createBootclasspath(): Path = { + if (bootclasspath.isEmpty) bootclasspath = Some(new Path(getProject)) + bootclasspath.get.createPath() + } + + /** Sets the `bootclasspath` as an external reference Ant parameter. + * + * @param input A reference to a source path. + */ + def setBootclasspathref(input: Reference) { + createBootclasspath().setRefid(input) + } + + /** Sets the external extensions path attribute. Used by [[http://ant.apache.org Ant]]. + * + * @param input The value of `extdirs`. + */ + def setExtdirs(input: Path) { + if (extdirs.isEmpty) extdirs = Some(input) + else extdirs.get.append(input) + } + + /** Sets the `extdirs` as a nested sourcepath Ant parameter. + * + * @return An extensions path to be configured. + */ + def createExtdirs(): Path = { + if (extdirs.isEmpty) extdirs = Some(new Path(getProject)) + extdirs.get.createPath() + } + + /** Sets the `extdirs` as an external reference Ant parameter. + * + * @param input A reference to an extensions path. + */ + def setExtdirsref(input: Reference) { + createExtdirs().setRefid(input) + } + + /** Sets the `encoding` attribute. Used by Ant. + * + * @param input The value of `encoding`. + */ + def setEncoding(input: String) { + encoding = Some(input) + } + + /** Sets the `docgenerator` attribute. + * + * @param input A fully qualified class name of a doclet. + */ + def setDocgenerator(input: String) { + docgenerator = Some(input) + } + + /** + * Sets the `docrootcontent` attribute. + * + * @param input The file from which the documentation content of the root + * package will be taken. + */ + def setDocrootcontent(input : File) { + docrootcontent = Some(input) + } + + /** Sets the `docversion` attribute. + * + * @param input The value of `docversion`. + */ + def setDocversion(input: String) { + docversion = Some(input) + } + + /** Sets the `docsourceurl` attribute. + * + * @param input The value of `docsourceurl`. + */ + def setDocsourceurl(input: String) { + docsourceurl = Some(input) + } + + /** Sets the `doctitle` attribute. + * + * @param input The value of `doctitle`. + */ + def setDoctitle(input: String) { + doctitle = Some(input) + } + + /** Sets the `docfooter` attribute. + * + * @param input The value of `docfooter`. + */ + def setDocfooter(input: String) { + docfooter = Some(input) + } + + /** Set the `addparams` info attribute. + * + * @param input The value for `addparams`. + */ + def setAddparams(input: String) { + addParams = input + } + + /** Set the `deprecation` info attribute. + * + * @param input One of the flags `yes/no` or `on/off`. + */ + def setDeprecation(input: String) { + if (Flag.isPermissible(input)) + deprecation = "yes".equals(input) || "on".equals(input) + else + buildError("Unknown deprecation flag '" + input + "'") + } + + /** Set the `unchecked` info attribute. + * + * @param input One of the flags `yes/no` or `on/off`. + */ + def setUnchecked(input: String) { + if (Flag.isPermissible(input)) + unchecked = "yes".equals(input) || "on".equals(input) + else + buildError("Unknown unchecked flag '" + input + "'") + } + + def setDocUncompilable(input: String) { + docUncompilable = Some(input) + } + + /** Set the `nofail` info attribute. + * + * @param input One of the flags `yes/no` or `on/off`. Default if no/off. + */ + def setNoFail(input: String) = + nofail = Flag.getBooleanValue(input, "nofail") + + /** Set the `implicits` info attribute. + * @param input One of the flags `yes/no` or `on/off`. Default if no/off. */ + def setImplicits(input: String) = + docImplicits = Flag.getBooleanValue(input, "implicits") + + /** Set the `implicitsShowAll` info attribute to enable scaladoc to show all implicits, including those impossible to + * convert to from the default scope + * @param input One of the flags `yes/no` or `on/off`. Default if no/off. */ + def setImplicitsShowAll(input: String) = + docImplicitsShowAll = Flag.getBooleanValue(input, "implicitsShowAll") + + /** Set the `implicitsDebug` info attribute so scaladoc outputs implicit conversion debug information + * @param input One of the flags `yes/no` or `on/off`. Default if no/off. */ + def setImplicitsDebug(input: String) = + docImplicitsDebug = Flag.getBooleanValue(input, "implicitsDebug") + + /** Set the `diagrams` bit so Scaladoc adds diagrams to the documentation + * @param input One of the flags `yes/no` or `on/off`. Default if no/off. */ + def setDiagrams(input: String) = + docDiagrams = Flag.getBooleanValue(input, "diagrams") + + /** Set the `diagramsDebug` bit so Scaladoc outputs diagram building debug information + * @param input One of the flags `yes/no` or `on/off`. Default if no/off. */ + def setDiagramsDebug(input: String) = + docDiagramsDebug = Flag.getBooleanValue(input, "diagramsDebug") + + /** Set the `diagramsDotPath` attribute to the path where graphviz dot can be found (including the binary file name, + * eg: /usr/bin/dot) */ + def setDiagramsDotPath(input: String) = + docDiagramsDotPath = Some(input) + + /** Set the `rawOutput` bit so Scaladoc also outputs text from each html file + * @param input One of the flags `yes/no` or `on/off`. Default if no/off. */ + def setRawOutput(input: String) = + docRawOutput = Flag.getBooleanValue(input, "rawOutput") + + /** Set the `noPrefixes` bit to prevent Scaladoc from generating prefixes in + * front of types -- may lead to confusion, but significantly speeds up the generation. + * @param input One of the flags `yes/no` or `on/off`. Default if no/off. */ + def setNoPrefixes(input: String) = + docNoPrefixes = Flag.getBooleanValue(input, "noPrefixes") + + /** Instruct the scaladoc tool to group similar functions together */ + def setGroups(input: String) = + docGroups = Flag.getBooleanValue(input, "groups") + + /** Instruct the scaladoc tool to skip certain packages. + * @param input A colon-delimited list of fully qualified package names that will be skipped from scaladoc. + */ + def setSkipPackages(input: String) = + docSkipPackages = input + +/*============================================================================*\ +** Properties getters ** +\*============================================================================*/ + + /** Gets the value of the `classpath` attribute in a + * Scala-friendly form. + * + * @return The class path as a list of files. + */ + private def getClasspath: List[File] = + if (classpath.isEmpty) buildError("Member 'classpath' is empty.") + else classpath.get.list().toList map nameToFile + + /** Gets the value of the `origin` attribute in a Scala-friendly + * form. + * + * @return The origin path as a list of files. + */ + private def getOrigin: List[File] = + if (origin.isEmpty) buildError("Member 'origin' is empty.") + else origin.get.list().toList map nameToFile + + /** Gets the value of the `destination` attribute in a + * Scala-friendly form. + * + * @return The destination as a file. + */ + private def getDestination: File = + if (destination.isEmpty) buildError("Member 'destination' is empty.") + else existing(getProject resolveFile destination.get.toString) + + /** Gets the value of the `sourcepath` attribute in a + * Scala-friendly form. + * + * @return The source path as a list of files. + */ + private def getSourcepath: List[File] = + if (sourcepath.isEmpty) buildError("Member 'sourcepath' is empty.") + else sourcepath.get.list().toList map nameToFile + + /** Gets the value of the `bootclasspath` attribute in a + * Scala-friendly form. + * + * @return The boot class path as a list of files. + */ + private def getBootclasspath: List[File] = + if (bootclasspath.isEmpty) buildError("Member 'bootclasspath' is empty.") + else bootclasspath.get.list().toList map nameToFile + + /** Gets the value of the `extdirs` attribute in a + * Scala-friendly form. + * + * @return The extensions path as a list of files. + */ + private def getExtdirs: List[File] = + if (extdirs.isEmpty) buildError("Member 'extdirs' is empty.") + else extdirs.get.list().toList map nameToFile + +/*============================================================================*\ +** Compilation and support methods ** +\*============================================================================*/ + + /** This is forwarding method to circumvent bug #281 in Scala 2. Remove when + * bug has been corrected. + */ + override protected def getDirectoryScanner(baseDir: java.io.File) = + super.getDirectoryScanner(baseDir) + + /** Transforms a string name into a file relative to the provided base + * directory. + * + * @param base A file pointing to the location relative to which the name + * will be resolved. + * @param name A relative or absolute path to the file as a string. + * @return A file created from the name and the base file. + */ + private def nameToFile(base: File)(name: String): File = + existing(fileUtils.resolveFile(base, name)) + + /** Transforms a string name into a file relative to the build root + * directory. + * + * @param name A relative or absolute path to the file as a string. + * @return A file created from the name. + */ + private def nameToFile(name: String): File = + existing(getProject resolveFile name) + + /** Tests if a file exists and prints a warning in case it doesn't. Always + * returns the file, even if it doesn't exist. + * + * @param file A file to test for existance. + * @return The same file. + */ + private def existing(file: File): File = { + if (!file.exists()) + log("Element '" + file.toString + "' does not exist.", + Project.MSG_WARN) + file + } + + /** Transforms a path into a Scalac-readable string. + * + * @param path A path to convert. + * @return A string-representation of the path like `a.jar:b.jar`. + */ + private def asString(path: List[File]): String = + path.map(asString).mkString("", File.pathSeparator, "") + + /** Transforms a file into a Scalac-readable string. + * + * @param file A file to convert. + * @return A string-representation of the file like `/x/k/a.scala`. + */ + private def asString(file: File): String = + file.getAbsolutePath() + +/*============================================================================*\ +** The big execute method ** +\*============================================================================*/ + + /** Initializes settings and source files */ + protected def initialize: Pair[Settings, List[File]] = { + // Tests if all mandatory attributes are set and valid. + if (origin.isEmpty) buildError("Attribute 'srcdir' is not set.") + if (getOrigin.isEmpty) buildError("Attribute 'srcdir' is not set.") + if (!destination.isEmpty && !destination.get.isDirectory()) + buildError("Attribute 'destdir' does not refer to an existing directory.") + if (destination.isEmpty) destination = Some(getOrigin.head) + + val mapper = new GlobPatternMapper() + mapper setTo "*.html" + mapper setFrom "*.scala" + + // Scans source directories to build up a compile lists. + // If force is false, only files were the .class file in destination is + // older than the .scala file will be used. + val sourceFiles: List[File] = + for { + originDir <- getOrigin + originFile <- { + val includedFiles = + getDirectoryScanner(originDir).getIncludedFiles() + val list = includedFiles.toList + if (list.length > 0) + log( + "Documenting " + list.length + " source file" + + (if (list.length > 1) "s" else "") + + (" to " + getDestination.toString) + ) + else + log("No files selected for documentation", Project.MSG_VERBOSE) + + list + } + } yield { + log(originFile, Project.MSG_DEBUG) + nameToFile(originDir)(originFile) + } + + def decodeEscapes(s: String): String = { + // In Ant script characters '<' and '>' must be encoded when + // used in attribute values, e.g. for attributes "doctitle", "header", .. + // in task Scaladoc you may write: + // doctitle="<div>Scala</div>" + // so we have to decode them here. + s.replaceAll("<", "<").replaceAll(">",">") + .replaceAll("&", "&").replaceAll(""", "\"") + } + + // Builds-up the compilation settings for Scalac with the existing Ant + // parameters. + val docSettings = new Settings(buildError) + docSettings.outdir.value = asString(destination.get) + if (!classpath.isEmpty) + docSettings.classpath.value = asString(getClasspath) + if (!sourcepath.isEmpty) + docSettings.sourcepath.value = asString(getSourcepath) + /*else if (origin.get.size() > 0) + settings.sourcepath.value = origin.get.list()(0)*/ + if (!bootclasspath.isEmpty) + docSettings.bootclasspath.value = asString(getBootclasspath) + if (!extdirs.isEmpty) docSettings.extdirs.value = asString(getExtdirs) + if (!encoding.isEmpty) docSettings.encoding.value = encoding.get + if (!doctitle.isEmpty) docSettings.doctitle.value = decodeEscapes(doctitle.get) + if (!docfooter.isEmpty) docSettings.docfooter.value = decodeEscapes(docfooter.get) + if (!docversion.isEmpty) docSettings.docversion.value = decodeEscapes(docversion.get) + if (!docsourceurl.isEmpty) docSettings.docsourceurl.value = decodeEscapes(docsourceurl.get) + if (!docUncompilable.isEmpty) docSettings.docUncompilable.value = decodeEscapes(docUncompilable.get) + + docSettings.deprecation.value = deprecation + docSettings.unchecked.value = unchecked + docSettings.docImplicits.value = docImplicits + docSettings.docImplicitsDebug.value = docImplicitsDebug + docSettings.docImplicitsShowAll.value = docImplicitsShowAll + docSettings.docDiagrams.value = docDiagrams + docSettings.docDiagramsDebug.value = docDiagramsDebug + docSettings.docRawOutput.value = docRawOutput + docSettings.docNoPrefixes.value = docNoPrefixes + docSettings.docGroups.value = docGroups + docSettings.docSkipPackages.value = docSkipPackages + if(!docDiagramsDotPath.isEmpty) docSettings.docDiagramsDotPath.value = docDiagramsDotPath.get + + if (!docgenerator.isEmpty) docSettings.docgenerator.value = docgenerator.get + if (!docrootcontent.isEmpty) docSettings.docRootContent.value = docrootcontent.get.getAbsolutePath() + log("Scaladoc params = '" + addParams + "'", Project.MSG_DEBUG) + + docSettings processArgumentString addParams + Pair(docSettings, sourceFiles) + } + + def safeBuildError(message: String): Unit = if (nofail) log(message) else buildError(message) + + /** Performs the compilation. */ + override def execute() = { + val Pair(docSettings, sourceFiles) = initialize + val reporter = new ConsoleReporter(docSettings) + try { + val docProcessor = new scala.tools.nsc.doc.DocFactory(reporter, docSettings) + docProcessor.document(sourceFiles.map (_.toString)) + if (reporter.ERROR.count > 0) + safeBuildError( + "Document failed with " + + reporter.ERROR.count + " error" + + (if (reporter.ERROR.count > 1) "s" else "") + + "; see the documenter error output for details.") + else if (reporter.WARNING.count > 0) + log( + "Document succeeded with " + + reporter.WARNING.count + " warning" + + (if (reporter.WARNING.count > 1) "s" else "") + + "; see the documenter output for details.") + reporter.printSummary() + } catch { + case exception: Throwable => + exception.printStackTrace() + val msg = Option(exception.getMessage) getOrElse "no error message provided" + safeBuildError(s"Document failed because of an internal documenter error ($msg); see the error output for details.") + } + } +} diff --git a/src/scaladoc/scala/tools/nsc/ScalaDoc.scala b/src/scaladoc/scala/tools/nsc/ScalaDoc.scala new file mode 100644 index 0000000000..52a0c20a11 --- /dev/null +++ b/src/scaladoc/scala/tools/nsc/ScalaDoc.scala @@ -0,0 +1,72 @@ +/* scaladoc, a documentation generator for Scala + * Copyright 2005-2013 LAMP/EPFL + * @author Martin Odersky + * @author Geoffrey Washburn + */ + +package scala.tools.nsc + +import java.io.File.pathSeparator +import scala.tools.nsc.doc.DocFactory +import scala.tools.nsc.reporters.ConsoleReporter +import scala.reflect.internal.util.FakePos + +/** The main class for scaladoc, a front-end for the Scala compiler + * that generates documentation from source files. + */ +class ScalaDoc { + val versionMsg = "Scaladoc %s -- %s".format(Properties.versionString, Properties.copyrightString) + + def process(args: Array[String]): Boolean = { + var reporter: ConsoleReporter = null + val docSettings = new doc.Settings(msg => reporter.error(FakePos("scaladoc"), msg + "\n scaladoc -help gives more information"), + msg => reporter.printMessage(msg)) + reporter = new ConsoleReporter(docSettings) { + // need to do this so that the Global instance doesn't trash all the + // symbols just because there was an error + override def hasErrors = false + } + val command = new ScalaDoc.Command(args.toList, docSettings) + def hasFiles = command.files.nonEmpty || docSettings.uncompilableFiles.nonEmpty + + if (docSettings.version.value) + reporter.echo(versionMsg) + else if (docSettings.Xhelp.value) + reporter.echo(command.xusageMsg) + else if (docSettings.Yhelp.value) + reporter.echo(command.yusageMsg) + else if (docSettings.showPlugins.value) + reporter.warning(null, "Plugins are not available when using Scaladoc") + else if (docSettings.showPhases.value) + reporter.warning(null, "Phases are restricted when using Scaladoc") + else if (docSettings.help.value || !hasFiles) + reporter.echo(command.usageMsg) + else + try { new DocFactory(reporter, docSettings) document command.files } + catch { + case ex @ FatalError(msg) => + if (docSettings.debug.value) ex.printStackTrace() + reporter.error(null, "fatal error: " + msg) + } + finally reporter.printSummary() + + // not much point in returning !reporter.hasErrors when it has + // been overridden with constant false. + true + } +} + +object ScalaDoc extends ScalaDoc { + class Command(arguments: List[String], settings: doc.Settings) extends CompilerCommand(arguments, settings) { + override def cmdName = "scaladoc" + override def usageMsg = ( + createUsageMsg("where possible scaladoc", shouldExplain = false, x => x.isStandard && settings.isScaladocSpecific(x.name)) + + "\n\nStandard scalac options also available:" + + createUsageMsg(x => x.isStandard && !settings.isScaladocSpecific(x.name)) + ) + } + + def main(args: Array[String]): Unit = sys exit { + if (process(args)) 0 else 1 + } +} diff --git a/src/scaladoc/scala/tools/nsc/doc/DocFactory.scala b/src/scaladoc/scala/tools/nsc/doc/DocFactory.scala new file mode 100644 index 0000000000..b4d2adaad4 --- /dev/null +++ b/src/scaladoc/scala/tools/nsc/doc/DocFactory.scala @@ -0,0 +1,132 @@ +/* NSC -- new Scala compiler + * Copyright 2007-2013 LAMP/EPFL + * @author David Bernard, Manohar Jonnalagedda + */ + +package scala.tools.nsc +package doc + +import scala.util.control.ControlThrowable +import reporters.Reporter +import scala.reflect.internal.util.BatchSourceFile + +/** A documentation processor controls the process of generating Scala + * documentation, which is as follows. + * + * * A simplified compiler instance (with only the front-end phases enabled) + * * is created, and additional `sourceless` comments are registered. + * * Documentable files are compiled, thereby filling the compiler's symbol table. + * * A documentation model is extracted from the post-compilation symbol table. + * * A generator is used to transform the model into the correct final format (HTML). + * + * A processor contains a single compiler instantiated from the processor's + * `settings`. Each call to `document` uses the same compiler instance with + * the same symbol table. In particular, this implies that the scaladoc site + * obtained from a call to `run` will contain documentation about files compiled + * during previous calls to the same processor's `run` method. + * + * @param reporter The reporter to which both documentation and compilation errors will be reported. + * @param settings The settings to be used by the documenter and compiler for generating documentation. + * + * @author Gilles Dubochet */ +class DocFactory(val reporter: Reporter, val settings: doc.Settings) { processor => + /** The unique compiler instance used by this processor and constructed from its `settings`. */ + object compiler extends ScaladocGlobal(settings, reporter) + + /** Creates a scaladoc site for all symbols defined in this call's `source`, + * as well as those defined in `sources` of previous calls to the same processor. + * @param source The list of paths (relative to the compiler's source path, + * or absolute) of files to document or the source code. */ + def makeUniverse(source: Either[List[String], String]): Option[Universe] = { + assert(settings.docformat.value == "html") + source match { + case Left(files) => + new compiler.Run() compile files + case Right(sourceCode) => + new compiler.Run() compileSources List(new BatchSourceFile("newSource", sourceCode)) + } + + if (reporter.hasErrors) + return None + + val extraTemplatesToDocument: Set[compiler.Symbol] = { + if (settings.docUncompilable.isDefault) Set() + else { + val uncompilable = new { + val global: compiler.type = compiler + val settings = processor.settings + } with Uncompilable { } + + compiler.docComments ++= uncompilable.comments + docdbg("" + uncompilable) + + uncompilable.templates + } + } + + val modelFactory = ( + new { override val global: compiler.type = compiler } + with model.ModelFactory(compiler, settings) + with model.ModelFactoryImplicitSupport + with model.ModelFactoryTypeSupport + with model.diagram.DiagramFactory + with model.CommentFactory + with model.TreeFactory + with model.MemberLookup { + override def templateShouldDocument(sym: compiler.Symbol, inTpl: DocTemplateImpl) = + extraTemplatesToDocument(sym) || super.templateShouldDocument(sym, inTpl) + } + ) + + modelFactory.makeModel match { + case Some(madeModel) => + if (!settings.scaladocQuietRun) + println("model contains " + modelFactory.templatesCount + " documentable templates") + Some(madeModel) + case None => + if (!settings.scaladocQuietRun) + println("no documentable class found in compilation units") + None + } + } + + object NoCompilerRunException extends ControlThrowable { } + + val documentError: PartialFunction[Throwable, Unit] = { + case NoCompilerRunException => + reporter.info(null, "No documentation generated with unsucessful compiler run", force = false) + case _: ClassNotFoundException => + () + } + + /** Generate document(s) for all `files` containing scaladoc documenataion. + * @param files The list of paths (relative to the compiler's source path, or absolute) of files to document. */ + def document(files: List[String]) { + def generate() = { + import doclet._ + val docletClass = Class.forName(settings.docgenerator.value) // default is html.Doclet + val docletInstance = docletClass.newInstance().asInstanceOf[Generator] + + docletInstance match { + case universer: Universer => + val universe = makeUniverse(Left(files)) getOrElse { throw NoCompilerRunException } + universer setUniverse universe + + docletInstance match { + case indexer: Indexer => indexer setIndex model.IndexModelFactory.makeIndex(universe) + case _ => () + } + case _ => () + } + docletInstance.generate() + } + + try generate() + catch documentError + } + + private[doc] def docdbg(msg: String) { + if (settings.Ydocdebug.value) + println(msg) + } +} diff --git a/src/scaladoc/scala/tools/nsc/doc/DocParser.scala b/src/scaladoc/scala/tools/nsc/doc/DocParser.scala new file mode 100644 index 0000000000..6dc3e5a62b --- /dev/null +++ b/src/scaladoc/scala/tools/nsc/doc/DocParser.scala @@ -0,0 +1,69 @@ +/* NSC -- new Scala compiler + * Copyright 2005-2013 LAMP/EPFL + * @author Paul Phillips + */ + +package scala.tools +package nsc +package doc + +import reporters._ +import scala.reflect.internal.util._ +import DocParser.Parsed + +/** A very minimal global customized for extracting `DocDefs`. It stops + * right after parsing so it can read `DocDefs` from source code which would + * otherwise cause the compiler to go haywire. + */ +class DocParser(settings: nsc.Settings, reporter: Reporter) extends Global(settings, reporter) { + def this(settings: Settings) = this(settings, new ConsoleReporter(settings)) + def this() = this(new Settings(Console println _)) + + // the usual global initialization + locally { new Run() } + + override protected def computeInternalPhases() { + phasesSet += syntaxAnalyzer + } + + /** Returns a list of `DocParser.Parseds`, which hold the DocDefs found + * in the given code along with the surrounding trees. + */ + def docDefs(code: String) = { + def loop(enclosing: List[Tree], tree: Tree): List[Parsed] = tree match { + case x: PackageDef => x.stats flatMap (t => loop(enclosing :+ x, t)) + case x: DocDef => new Parsed(enclosing, x) :: loop(enclosing :+ x.definition, x.definition) + case x => x.children flatMap (t => loop(enclosing, t)) + } + loop(Nil, docUnit(code)) + } + + /** A compilation unit containing parsed source. + */ + def docUnit(code: String) = { + val unit = new CompilationUnit(new BatchSourceFile("", code)) + val scanner = newUnitParser(unit) + + scanner.compilationUnit() + } +} + +/** Since the DocParser's whole reason for existing involves trashing a + * global, it is designed to bottle up general `Global#Tree` types rather + * than path dependent ones. The recipient will have to deal. + */ +object DocParser { + type Tree = Global#Tree + type DefTree = Global#DefTree + type DocDef = Global#DocDef + type Name = Global#Name + + class Parsed(val enclosing: List[Tree], val docDef: DocDef) { + def nameChain: List[Name] = (enclosing :+ docDef.definition) collect { case x: DefTree => x.name } + def raw: String = docDef.comment.raw + + override def toString = ( + nameChain.init.map(x => if (x.isTypeName) x + "#" else x + ".").mkString + nameChain.last + ) + } +} diff --git a/src/scaladoc/scala/tools/nsc/doc/Index.scala b/src/scaladoc/scala/tools/nsc/doc/Index.scala new file mode 100644 index 0000000000..f9b9eecdb3 --- /dev/null +++ b/src/scaladoc/scala/tools/nsc/doc/Index.scala @@ -0,0 +1,17 @@ +/* NSC -- new Scala compiler + * Copyright 2005-2013 LAMP/EPFL + * @author Martin Odersky + */ + +package scala.tools.nsc.doc + +import scala.collection._ + + +trait Index { + + type SymbolMap = SortedMap[String, SortedSet[model.MemberEntity]] + + def firstLetterIndex: Map[Char, SymbolMap] + +} diff --git a/src/scaladoc/scala/tools/nsc/doc/ScaladocGlobal.scala b/src/scaladoc/scala/tools/nsc/doc/ScaladocGlobal.scala new file mode 100644 index 0000000000..021e59a879 --- /dev/null +++ b/src/scaladoc/scala/tools/nsc/doc/ScaladocGlobal.scala @@ -0,0 +1,106 @@ +/* NSC -- new Scala compiler + * Copyright 2007-2013 LAMP/EPFL + * @author Paul Phillips + */ + +package scala.tools.nsc +package doc + +import scala.util.control.ControlThrowable +import reporters.Reporter +import typechecker.Analyzer +import scala.reflect.internal.util.BatchSourceFile + +trait ScaladocAnalyzer extends Analyzer { + val global : Global // generally, a ScaladocGlobal + import global._ + + override def newTyper(context: Context): ScaladocTyper = new ScaladocTyper(context) + + class ScaladocTyper(context0: Context) extends Typer(context0) { + private def unit = context.unit + + override def typedDocDef(docDef: DocDef, mode: Mode, pt: Type): Tree = { + val sym = docDef.symbol + + if ((sym ne null) && (sym ne NoSymbol)) { + val comment = docDef.comment + fillDocComment(sym, comment) + val typer1 = newTyper(context.makeNewScope(docDef, context.owner)) + for (useCase <- comment.useCases) { + typer1.silent(_ => typer1 defineUseCases useCase) match { + case SilentTypeError(err) => + unit.warning(useCase.pos, err.errMsg) + case _ => + } + for (useCaseSym <- useCase.defined) { + if (sym.name != useCaseSym.name) + unit.warning(useCase.pos, "@usecase " + useCaseSym.name.decode + " does not match commented symbol: " + sym.name.decode) + } + } + } + + super.typedDocDef(docDef, mode, pt) + } + + def defineUseCases(useCase: UseCase): List[Symbol] = { + def stringParser(str: String): syntaxAnalyzer.Parser = { + val file = new BatchSourceFile(context.unit.source.file, str) { + override def positionInUltimateSource(pos: Position) = { + pos.withSource(context.unit.source, useCase.pos.start) + } + } + val unit = new CompilationUnit(file) + new syntaxAnalyzer.UnitParser(unit) + } + + val trees = stringParser(useCase.body+";").nonLocalDefOrDcl + val enclClass = context.enclClass.owner + + def defineAlias(name: Name) = ( + if (context.scope.lookup(name) == NoSymbol) { + lookupVariable(name.toString.substring(1), enclClass) foreach { repl => + silent(_.typedTypeConstructor(stringParser(repl).typ())) map { tpt => + val alias = enclClass.newAliasType(name.toTypeName, useCase.pos) + val tparams = cloneSymbolsAtOwner(tpt.tpe.typeSymbol.typeParams, alias) + val newInfo = genPolyType(tparams, appliedType(tpt.tpe, tparams map (_.tpe))) + alias setInfo newInfo + context.scope.enter(alias) + } + } + } + ) + + for (tree <- trees; t <- tree) + t match { + case Ident(name) if name startsWith '$' => defineAlias(name) + case _ => + } + + useCase.aliases = context.scope.toList + namer.enterSyms(trees) + typedStats(trees, NoSymbol) + useCase.defined = context.scope.toList filterNot (useCase.aliases contains _) + + if (settings.debug.value) + useCase.defined foreach (sym => println("defined use cases: %s:%s".format(sym, sym.tpe))) + + useCase.defined + } + } +} + +class ScaladocGlobal(settings: doc.Settings, reporter: Reporter) extends { + override val useOffsetPositions = false +} with Global(settings, reporter) { + override protected def computeInternalPhases() { + phasesSet += syntaxAnalyzer + phasesSet += analyzer.namerFactory + phasesSet += analyzer.packageObjects + phasesSet += analyzer.typerFactory + } + override def forScaladoc = true + override lazy val analyzer = new { + val global: ScaladocGlobal.this.type = ScaladocGlobal.this + } with ScaladocAnalyzer +} diff --git a/src/scaladoc/scala/tools/nsc/doc/Settings.scala b/src/scaladoc/scala/tools/nsc/doc/Settings.scala new file mode 100644 index 0000000000..90b94e1336 --- /dev/null +++ b/src/scaladoc/scala/tools/nsc/doc/Settings.scala @@ -0,0 +1,368 @@ +/* NSC -- new Scala compiler + * Copyright 2005-2013 LAMP/EPFL + * @author Martin Odersky + */ + +package scala.tools.nsc +package doc + +import java.io.File +import scala.language.postfixOps + +/** An extended version of compiler settings, with additional Scaladoc-specific options. + * @param error A function that prints a string to the appropriate error stream + * @param printMsg A function that prints the string, without any extra boilerplate of error */ +class Settings(error: String => Unit, val printMsg: String => Unit = println(_)) extends scala.tools.nsc.Settings(error) { + + /** A setting that defines in which format the documentation is output. ''Note:'' this setting is currently always + * `html`. */ + val docformat = ChoiceSetting ( + "-doc-format", + "format", + "Selects in which format documentation is rendered", + List("html"), + "html" + ) + + /** A setting that defines the overall title of the documentation, typically the name of the library being + * documented. ''Note:'' This setting is currently not used. */ + val doctitle = StringSetting ( + "-doc-title", + "title", + "The overall name of the Scaladoc site", + "" + ) + + /** A setting that defines the overall version number of the documentation, typically the version of the library being + * documented. ''Note:'' This setting is currently not used. */ + val docversion = StringSetting ( + "-doc-version", + "version", + "An optional version number, to be appended to the title", + "" + ) + + val docfooter = StringSetting ( + "-doc-footer", + "footer", + "A footer on every ScalaDoc page, by default the EPFL/Typesafe copyright notice. Can be overridden with a custom footer.", + "" + ) + + val docUncompilable = StringSetting ( + "-doc-no-compile", + "path", + "A directory containing sources which should be parsed, no more (e.g. AnyRef.scala)", + "" + ) + + lazy val uncompilableFiles = docUncompilable.value match { + case "" => Nil + case path => io.Directory(path).deepFiles filter (_ hasExtension "scala") toList + } + + /** A setting that defines a URL to be concatenated with source locations and show a link to source files. + * If needed the sourcepath option can be used to exclude undesired initial part of the link to sources */ + val docsourceurl = StringSetting ( + "-doc-source-url", + "url", + "A URL pattern used to build links to template sources; use variables, for example: ?{TPL_NAME} ('Seq'), ?{TPL_OWNER} ('scala.collection'), ?{FILE_PATH} ('scala/collection/Seq')", + "" + ) + + val docExternalDoc = MultiStringSetting ( + "-doc-external-doc", + "external-doc", + "comma-separated list of classpath_entry_path#doc_URL pairs describing external dependencies." + ) + + val useStupidTypes = BooleanSetting ( + "-Yuse-stupid-types", + "Print the types of inherited members as seen from their original definition context. Hint: you don't want to do that!" + ) + + val docgenerator = StringSetting ( + "-doc-generator", + "class-name", + "The fully qualified name of a doclet class, which will be used to generate the documentation", + "scala.tools.nsc.doc.html.Doclet" + ) + + val docRootContent = PathSetting ( + "-doc-root-content", + "The file from which the root package documentation should be imported.", + "" + ) + + val docImplicits = BooleanSetting ( + "-implicits", + "Document members inherited by implicit conversions." + ) + + val docImplicitsDebug = BooleanSetting ( + "-implicits-debug", + "Show debugging information for members inherited by implicit conversions." + ) + + val docImplicitsShowAll = BooleanSetting ( + "-implicits-show-all", + "Show members inherited by implicit conversions that are impossible in the default scope. " + + "(for example conversions that require Numeric[String] to be in scope)" + ) + + val docImplicitsSoundShadowing = BooleanSetting ( + "-implicits-sound-shadowing", + "Use a sound implicit shadowing calculation. Note: this interacts badly with usecases, so " + + "only use it if you haven't defined usecase for implicitly inherited members." + ) + + val docImplicitsHide = MultiStringSetting ( + "-implicits-hide", + "implicit(s)", + "Hide the members inherited by the given comma separated, fully qualified implicit conversions. Add dot (.) to include default conversions." + ) + + val docDiagrams = BooleanSetting ( + "-diagrams", + "Create inheritance diagrams for classes, traits and packages." + ) + + val docDiagramsDebug = BooleanSetting ( + "-diagrams-debug", + "Show debugging information for the diagram creation process." + ) + + val docDiagramsDotPath = PathSetting ( + "-diagrams-dot-path", + "The path to the dot executable used to generate the inheritance diagrams. Eg: /usr/bin/dot", + "dot" // by default, just pick up the system-wide dot + ) + + /** The maxium nuber of normal classes to show in the diagram */ + val docDiagramsMaxNormalClasses = IntSetting( + "-diagrams-max-classes", + "The maximum number of superclasses or subclasses to show in a diagram", + 15, + None, + _ => None + ) + + /** The maxium nuber of implcit classes to show in the diagram */ + val docDiagramsMaxImplicitClasses = IntSetting( + "-diagrams-max-implicits", + "The maximum number of implicitly converted classes to show in a diagram", + 10, + None, + _ => None + ) + + val docDiagramsDotTimeout = IntSetting( + "-diagrams-dot-timeout", + "The timeout before the graphviz dot util is forcefully closed, in seconds (default: 10)", + 10, + None, + _ => None + ) + + val docDiagramsDotRestart = IntSetting( + "-diagrams-dot-restart", + "The number of times to restart a malfunctioning dot process before disabling diagrams (default: 5)", + 5, + None, + _ => None + ) + + val docRawOutput = BooleanSetting ( + "-raw-output", + "For each html file, create another .html.raw file containing only the text. (can be used for quickly diffing two scaladoc outputs)" + ) + + val docNoPrefixes = BooleanSetting ( + "-no-prefixes", + "Prevents generating prefixes in types, possibly creating ambiguous references, but significantly speeding up scaladoc." + ) + + val docNoLinkWarnings = BooleanSetting ( + "-no-link-warnings", + "Avoid warnings for ambiguous and incorrect links." + ) + + val docSkipPackages = StringSetting ( + "-skip-packages", + ":...:", + "A colon-delimited list of fully qualified package names that will be skipped from scaladoc.", + "" + ) + + val docExpandAllTypes = BooleanSetting ( + "-expand-all-types", + "Expand all type aliases and abstract types into full template pages. (locally this can be done with the @template annotation)" + ) + + val docExternalUrls = MultiStringSetting ( + "-external-urls", + "externalUrl(s)", + "(deprecated) comma-separated list of package_names=doc_URL for external dependencies, where package names are ':'-separated" + ) + + val docGroups = BooleanSetting ( + "-groups", + "Group similar functions together (based on the @group annotation)" + ) + + // Somewhere slightly before r18708 scaladoc stopped building unless the + // self-type check was suppressed. I hijacked the slotted-for-removal-anyway + // suppress-vt-warnings option and renamed it for this purpose. + noSelfCheck.value = true + + // For improved help output. + def scaladocSpecific = Set[Settings#Setting]( + docformat, doctitle, docfooter, docversion, docUncompilable, docsourceurl, docgenerator, docRootContent, useStupidTypes, + docDiagrams, docDiagramsDebug, docDiagramsDotPath, + docDiagramsDotTimeout, docDiagramsDotRestart, + docImplicits, docImplicitsDebug, docImplicitsShowAll, docImplicitsHide, + docDiagramsMaxNormalClasses, docDiagramsMaxImplicitClasses, + docNoPrefixes, docNoLinkWarnings, docRawOutput, docSkipPackages, + docExpandAllTypes, docGroups + ) + val isScaladocSpecific: String => Boolean = scaladocSpecific map (_.name) + + override def isScaladoc = true + + // set by the testsuite, when checking test output + var scaladocQuietRun = false + + lazy val skipPackageNames = + if (docSkipPackages.value == "") + Set[String]() + else + docSkipPackages.value.toLowerCase.split(':').toSet + + def skipPackage(qname: String) = + skipPackageNames(qname.toLowerCase) + + lazy val hiddenImplicits: Set[String] = { + if (docImplicitsHide.value.isEmpty) hardcoded.commonConversionTargets + else docImplicitsHide.value.toSet flatMap { name: String => + if(name == ".") hardcoded.commonConversionTargets + else Set(name) + } + } + + def appendIndex(url: String): String = { + val index = "/index.html" + if (url.endsWith(index)) url else url + index + } + + // Deprecated together with 'docExternalUrls' option. + lazy val extUrlPackageMapping: Map[String, String] = (Map.empty[String, String] /: docExternalUrls.value) { + case (map, binding) => + val idx = binding indexOf "=" + val pkgs = binding substring (0, idx) split ":" + val url = appendIndex(binding substring (idx + 1)) + map ++ (pkgs map (_ -> url)) + } + + lazy val extUrlMapping: Map[String, String] = docExternalDoc.value flatMap { s => + val idx = s.indexOf("#") + if (idx > 0) { + val (first, last) = s.splitAt(idx) + Some(new File(first).getAbsolutePath -> appendIndex(last.substring(1))) + } else { + error(s"Illegal -doc-external-doc option; expected a pair with '#' separator, found: '$s'") + None + } + } toMap + + /** + * This is the hardcoded area of Scaladoc. This is where "undesirable" stuff gets eliminated. I know it's not pretty, + * but ultimately scaladoc has to be useful. :) + */ + object hardcoded { + + /** The common context bounds and some humanly explanations. Feel free to add more explanations + * `.scala.package.Numeric` is the type class + * `tparam` is the name of the type parameter it gets (this only describes type classes with 1 type param) + * the function result should be a humanly-understandable description of the type class + */ + val knownTypeClasses: Map[String, String => String] = Map() + + ("scala.math.Numeric" -> ((tparam: String) => tparam + " is a numeric class, such as Int, Long, Float or Double")) + + ("scala.math.Integral" -> ((tparam: String) => tparam + " is an integral numeric class, such as Int or Long")) + + ("scala.math.Fractional" -> ((tparam: String) => tparam + " is a fractional numeric class, such as Float or Double")) + + ("scala.reflect.Manifest" -> ((tparam: String) => tparam + " is accompanied by a Manifest, which is a runtime representation of its type that survives erasure")) + + ("scala.reflect.ClassManifest" -> ((tparam: String) => tparam + " is accompanied by a ClassManifest, which is a runtime representation of its type that survives erasure")) + + ("scala.reflect.OptManifest" -> ((tparam: String) => tparam + " is accompanied by an OptManifest, which can be either a runtime representation of its type or the NoManifest, which means the runtime type is not available")) + + ("scala.reflect.ClassTag" -> ((tparam: String) => tparam + " is accompanied by a ClassTag, which is a runtime representation of its type that survives erasure")) + + ("scala.reflect.api.TypeTags.WeakTypeTag" -> ((tparam: String) => tparam + " is accompanied by an WeakTypeTag, which is a runtime representation of its type that survives erasure")) + + ("scala.reflect.api.TypeTags.TypeTag" -> ((tparam: String) => tparam + " is accompanied by a TypeTag, which is a runtime representation of its type that survives erasure")) + + /** + * Set of classes to exclude from index and diagrams + * TODO: Should be configurable + */ + def isExcluded(qname: String) = { + ( ( qname.startsWith("scala.Tuple") || qname.startsWith("scala.Product") || + qname.startsWith("scala.Function") || qname.startsWith("scala.runtime.AbstractFunction") + ) && !( + qname == "scala.Tuple1" || qname == "scala.Tuple2" || + qname == "scala.Product" || qname == "scala.Product1" || qname == "scala.Product2" || + qname == "scala.Function" || qname == "scala.Function1" || qname == "scala.Function2" || + qname == "scala.runtime.AbstractFunction0" || qname == "scala.runtime.AbstractFunction1" || + qname == "scala.runtime.AbstractFunction2" + ) + ) + } + + /** Common conversion targets that affect any class in Scala */ + val commonConversionTargets = Set( + "scala.Predef.StringFormat", + "scala.Predef.StringAdd", + "scala.Predef.ArrowAssoc", + "scala.Predef.Ensuring", + "scala.collection.TraversableOnce.alternateImplicit") + + /** There's a reason all these are specialized by hand but documenting each of them is beyond the point */ + val arraySkipConversions = List( + "scala.Predef.refArrayOps", + "scala.Predef.intArrayOps", + "scala.Predef.doubleArrayOps", + "scala.Predef.longArrayOps", + "scala.Predef.floatArrayOps", + "scala.Predef.charArrayOps", + "scala.Predef.byteArrayOps", + "scala.Predef.shortArrayOps", + "scala.Predef.booleanArrayOps", + "scala.Predef.unitArrayOps", + "scala.LowPriorityImplicits.wrapRefArray", + "scala.LowPriorityImplicits.wrapIntArray", + "scala.LowPriorityImplicits.wrapDoubleArray", + "scala.LowPriorityImplicits.wrapLongArray", + "scala.LowPriorityImplicits.wrapFloatArray", + "scala.LowPriorityImplicits.wrapCharArray", + "scala.LowPriorityImplicits.wrapByteArray", + "scala.LowPriorityImplicits.wrapShortArray", + "scala.LowPriorityImplicits.wrapBooleanArray", + "scala.LowPriorityImplicits.wrapUnitArray", + "scala.LowPriorityImplicits.genericWrapArray") + + // included as names as here we don't have access to a Global with Definitions :( + def valueClassList = List("unit", "boolean", "byte", "short", "char", "int", "long", "float", "double") + def valueClassFilterPrefixes = List("scala.LowPriorityImplicits", "scala.Predef") + + /** Dirty, dirty, dirty hack: the value params conversions can all kick in -- and they are disambiguated by priority + * but showing priority in scaladoc would make no sense -- so we have to manually remove the conversions that we + * know will never get a chance to kick in. Anyway, DIRTY DIRTY DIRTY! */ + def valueClassFilter(value: String, conversionName: String): Boolean = { + val valueName = value.toLowerCase + val otherValues = valueClassList.filterNot(_ == valueName) + + for (prefix <- valueClassFilterPrefixes) + if (conversionName.startsWith(prefix)) + for (otherValue <- otherValues) + if (conversionName.startsWith(prefix + "." + otherValue)) + return false + + true + } + } +} diff --git a/src/scaladoc/scala/tools/nsc/doc/Uncompilable.scala b/src/scaladoc/scala/tools/nsc/doc/Uncompilable.scala new file mode 100644 index 0000000000..9447e36610 --- /dev/null +++ b/src/scaladoc/scala/tools/nsc/doc/Uncompilable.scala @@ -0,0 +1,51 @@ +/* NSC -- new Scala compiler + * Copyright 2005-2013 LAMP/EPFL + * @author Paul Phillips + */ + +package scala.tools.nsc +package doc +import scala.language.implicitConversions +import scala.language.postfixOps + +/** Some glue between DocParser (which reads source files which can't be compiled) + * and the scaladoc model. + */ +trait Uncompilable { + val global: Global + val settings: Settings + + import global.{ reporter, inform, warning, newTypeName, newTermName, Symbol, DocComment, NoSymbol } + import global.definitions.AnyRefClass + import global.rootMirror.RootClass + + private implicit def translateName(name: Global#Name) = + if (name.isTypeName) newTypeName("" + name) else newTermName("" + name) + + def docSymbol(p: DocParser.Parsed) = p.nameChain.foldLeft(RootClass: Symbol)(_.tpe member _) + def docDefs(code: String) = new DocParser(settings, reporter) docDefs code + def docPairs(code: String) = docDefs(code) map (p => (docSymbol(p), new DocComment(p.raw))) + + lazy val pairs = files flatMap { f => + val comments = docPairs(f.slurp()) + if (settings.verbose.value) + inform("Found %d doc comments in parse-only file %s: %s".format(comments.size, f, comments.map(_._1).mkString(", "))) + + comments + } + def files = settings.uncompilableFiles + def symbols = pairs map (_._1) + def templates = symbols filter (x => x.isClass || x.isTrait || x == AnyRefClass/* which is now a type alias */) toSet + def comments = { + if (settings.debug.value || settings.verbose.value) + inform("Found %d uncompilable files: %s".format(files.size, files mkString ", ")) + + if (pairs.isEmpty) + warning("no doc comments read from " + settings.docUncompilable.value) + + pairs + } + override def toString = pairs.size + " uncompilable symbols:\n" + ( + symbols filterNot (_ == NoSymbol) map (x => " " + x.owner.fullName + " " + x.defString) mkString "\n" + ) +} diff --git a/src/scaladoc/scala/tools/nsc/doc/Universe.scala b/src/scaladoc/scala/tools/nsc/doc/Universe.scala new file mode 100644 index 0000000000..11520c810e --- /dev/null +++ b/src/scaladoc/scala/tools/nsc/doc/Universe.scala @@ -0,0 +1,16 @@ +/* NSC -- new Scala compiler + * Copyright 2005-2013 LAMP/EPFL + * @author Martin Odersky + */ + +package scala.tools.nsc.doc + +/** + * Class to hold common dependencies across Scaladoc classes. + * @author Pedro Furlanetto + * @author Gilles Dubochet + */ +trait Universe { + def settings: Settings + def rootPackage: model.Package +} diff --git a/src/scaladoc/scala/tools/nsc/doc/base/CommentFactoryBase.scala b/src/scaladoc/scala/tools/nsc/doc/base/CommentFactoryBase.scala new file mode 100755 index 0000000000..2064d86860 --- /dev/null +++ b/src/scaladoc/scala/tools/nsc/doc/base/CommentFactoryBase.scala @@ -0,0 +1,936 @@ +/* NSC -- new Scala compiler + * Copyright 2007-2013 LAMP/EPFL + * @author Manohar Jonnalagedda + */ + +package scala.tools.nsc +package doc +package base + +import base.comment._ +import scala.collection._ +import scala.util.matching.Regex +import scala.reflect.internal.util.Position +import scala.language.postfixOps + +/** The comment parser transforms raw comment strings into `Comment` objects. + * Call `parse` to run the parser. Note that the parser is stateless and + * should only be built once for a given Scaladoc run. + * + * @author Manohar Jonnalagedda + * @author Gilles Dubochet */ +trait CommentFactoryBase { this: MemberLookupBase => + + val global: Global + import global.{ reporter, Symbol } + + /* Creates comments with necessary arguments */ + def createComment ( + body0: Option[Body] = None, + authors0: List[Body] = List.empty, + see0: List[Body] = List.empty, + result0: Option[Body] = None, + throws0: Map[String,Body] = Map.empty, + valueParams0: Map[String,Body] = Map.empty, + typeParams0: Map[String,Body] = Map.empty, + version0: Option[Body] = None, + since0: Option[Body] = None, + todo0: List[Body] = List.empty, + deprecated0: Option[Body] = None, + note0: List[Body] = List.empty, + example0: List[Body] = List.empty, + constructor0: Option[Body] = None, + source0: Option[String] = None, + inheritDiagram0: List[String] = List.empty, + contentDiagram0: List[String] = List.empty, + group0: Option[Body] = None, + groupDesc0: Map[String,Body] = Map.empty, + groupNames0: Map[String,Body] = Map.empty, + groupPrio0: Map[String,Body] = Map.empty + ) : Comment = new Comment{ + val body = if(body0 isDefined) body0.get else Body(Seq.empty) + val authors = authors0 + val see = see0 + val result = result0 + val throws = throws0 + val valueParams = valueParams0 + val typeParams = typeParams0 + val version = version0 + val since = since0 + val todo = todo0 + val deprecated = deprecated0 + val note = note0 + val example = example0 + val constructor = constructor0 + val inheritDiagram = inheritDiagram0 + val contentDiagram = contentDiagram0 + val groupDesc = groupDesc0 + val group = + group0 match { + case Some(Body(List(Paragraph(Chain(List(Summary(Text(groupId)))))))) => Some(groupId.toString.trim) + case _ => None + } + val groupPrio = groupPrio0 flatMap { + case (group, body) => + try { + body match { + case Body(List(Paragraph(Chain(List(Summary(Text(prio))))))) => List(group -> prio.trim.toInt) + case _ => List() + } + } catch { + case _: java.lang.NumberFormatException => List() + } + } + val groupNames = groupNames0 flatMap { + case (group, body) => + try { + body match { + case Body(List(Paragraph(Chain(List(Summary(Text(name))))))) if (!name.trim.contains("\n")) => List(group -> (name.trim)) + case _ => List() + } + } catch { + case _: java.lang.NumberFormatException => List() + } + } + + } + + private val endOfText = '\u0003' + private val endOfLine = '\u000A' + + /** Something that should not have happened, happened, and Scaladoc should exit. */ + private def oops(msg: String): Nothing = + throw FatalError("program logic: " + msg) + + /** The body of a line, dropping the (optional) start star-marker, + * one leading whitespace and all trailing whitespace. */ + private val CleanCommentLine = + new Regex("""(?:\s*\*\s?)?(.*)""") + + /** Dangerous HTML tags that should be replaced by something safer, + * such as wiki syntax, or that should be dropped. */ + private val DangerousTags = + new Regex("""<(/?(div|ol|ul|li|h[1-6]|p))( [^>]*)?/?>|""") + + /** Maps a dangerous HTML tag to a safe wiki replacement, or an empty string + * if it cannot be salvaged. */ + private def htmlReplacement(mtch: Regex.Match): String = mtch.group(1) match { + case "p" | "div" => "\n\n" + case "h1" => "\n= " + case "/h1" => " =\n" + case "h2" => "\n== " + case "/h2" => " ==\n" + case "h3" => "\n=== " + case "/h3" => " ===\n" + case "h4" | "h5" | "h6" => "\n==== " + case "/h4" | "/h5" | "/h6" => " ====\n" + case "li" => "\n * - " + case _ => "" + } + + /** Javadoc tags that should be replaced by something useful, such as wiki + * syntax, or that should be dropped. */ + private val JavadocTags = + new Regex("""\{\@(code|docRoot|inheritDoc|link|linkplain|literal|value)([^}]*)\}""") + + /** Maps a javadoc tag to a useful wiki replacement, or an empty string if it cannot be salvaged. */ + private def javadocReplacement(mtch: Regex.Match): String = mtch.group(1) match { + case "code" => "`" + mtch.group(2) + "`" + case "docRoot" => "" + case "inheritDoc" => "" + case "link" => "`" + mtch.group(2) + "`" + case "linkplain" => "`" + mtch.group(2) + "`" + case "literal" => mtch.group(2) + case "value" => "`" + mtch.group(2) + "`" + case _ => "" + } + + /** Safe HTML tags that can be kept. */ + private val SafeTags = + new Regex("""((&\w+;)|(&#\d+;)|(]*)?/?>))""") + + private val safeTagMarker = '\u000E' + + /** A Scaladoc tag not linked to a symbol and not followed by text */ + private val SingleTagRegex = + new Regex("""\s*@(\S+)\s*""") + + /** A Scaladoc tag not linked to a symbol. Returns the name of the tag, and the rest of the line. */ + private val SimpleTagRegex = + new Regex("""\s*@(\S+)\s+(.*)""") + + /** A Scaladoc tag linked to a symbol. Returns the name of the tag, the name + * of the symbol, and the rest of the line. */ + private val SymbolTagRegex = + new Regex("""\s*@(param|tparam|throws|groupdesc|groupname|groupprio)\s+(\S*)\s*(.*)""") + + /** The start of a scaladoc code block */ + private val CodeBlockStartRegex = + new Regex("""(.*?)((?:\{\{\{)|(?:\u000E]*)?>\u000E))(.*)""") + + /** The end of a scaladoc code block */ + private val CodeBlockEndRegex = + new Regex("""(.*?)((?:\}\}\})|(?:\u000E\u000E))(.*)""") + + /** A key used for a tag map. The key is built from the name of the tag and + * from the linked symbol if the tag has one. + * Equality on tag keys is structural. */ + private sealed abstract class TagKey { + def name: String + } + + private final case class SimpleTagKey(name: String) extends TagKey + private final case class SymbolTagKey(name: String, symbol: String) extends TagKey + + /** Parses a raw comment string into a `Comment` object. + * @param comment The expanded comment string (including start and end markers) to be parsed. + * @param src The raw comment source string. + * @param pos The position of the comment in source. */ + protected def parseAtSymbol(comment: String, src: String, pos: Position, siteOpt: Option[Symbol] = None): Comment = { + /** The cleaned raw comment as a list of lines. Cleaning removes comment + * start and end markers, line start markers and unnecessary whitespace. */ + def clean(comment: String): List[String] = { + def cleanLine(line: String): String = { + //replaceAll removes trailing whitespaces + line.replaceAll("""\s+$""", "") match { + case CleanCommentLine(ctl) => ctl + case tl => tl + } + } + val strippedComment = comment.trim.stripPrefix("/*").stripSuffix("*/") + val safeComment = DangerousTags.replaceAllIn(strippedComment, { htmlReplacement(_) }) + val javadoclessComment = JavadocTags.replaceAllIn(safeComment, { javadocReplacement(_) }) + val markedTagComment = + SafeTags.replaceAllIn(javadoclessComment, { mtch => + java.util.regex.Matcher.quoteReplacement(safeTagMarker + mtch.matched + safeTagMarker) + }) + markedTagComment.lines.toList map (cleanLine(_)) + } + + /** Parses a comment (in the form of a list of lines) to a `Comment` + * instance, recursively on lines. To do so, it splits the whole comment + * into main body and tag bodies, then runs the `WikiParser` on each body + * before creating the comment instance. + * + * @param docBody The body of the comment parsed until now. + * @param tags All tags parsed until now. + * @param lastTagKey The last parsed tag, or `None` if the tag section hasn't started. Lines that are not tagged + * are part of the previous tag or, if none exists, of the body. + * @param remaining The lines that must still recursively be parsed. + * @param inCodeBlock Whether the next line is part of a code block (in which no tags must be read). */ + def parse0 ( + docBody: StringBuilder, + tags: Map[TagKey, List[String]], + lastTagKey: Option[TagKey], + remaining: List[String], + inCodeBlock: Boolean + ): Comment = remaining match { + + case CodeBlockStartRegex(before, marker, after) :: ls if (!inCodeBlock) => + if (!before.trim.isEmpty && !after.trim.isEmpty) + parse0(docBody, tags, lastTagKey, before :: marker :: after :: ls, inCodeBlock = false) + else if (!before.trim.isEmpty) + parse0(docBody, tags, lastTagKey, before :: marker :: ls, inCodeBlock = false) + else if (!after.trim.isEmpty) + parse0(docBody, tags, lastTagKey, marker :: after :: ls, inCodeBlock = true) + else lastTagKey match { + case Some(key) => + val value = + ((tags get key): @unchecked) match { + case Some(b :: bs) => (b + endOfLine + marker) :: bs + case None => oops("lastTagKey set when no tag exists for key") + } + parse0(docBody, tags + (key -> value), lastTagKey, ls, inCodeBlock = true) + case None => + parse0(docBody append endOfLine append marker, tags, lastTagKey, ls, inCodeBlock = true) + } + + case CodeBlockEndRegex(before, marker, after) :: ls => + if (!before.trim.isEmpty && !after.trim.isEmpty) + parse0(docBody, tags, lastTagKey, before :: marker :: after :: ls, inCodeBlock = true) + if (!before.trim.isEmpty) + parse0(docBody, tags, lastTagKey, before :: marker :: ls, inCodeBlock = true) + else if (!after.trim.isEmpty) + parse0(docBody, tags, lastTagKey, marker :: after :: ls, inCodeBlock = false) + else lastTagKey match { + case Some(key) => + val value = + ((tags get key): @unchecked) match { + case Some(b :: bs) => (b + endOfLine + marker) :: bs + case None => oops("lastTagKey set when no tag exists for key") + } + parse0(docBody, tags + (key -> value), lastTagKey, ls, inCodeBlock = false) + case None => + parse0(docBody append endOfLine append marker, tags, lastTagKey, ls, inCodeBlock = false) + } + + case SymbolTagRegex(name, sym, body) :: ls if (!inCodeBlock) => + val key = SymbolTagKey(name, sym) + val value = body :: tags.getOrElse(key, Nil) + parse0(docBody, tags + (key -> value), Some(key), ls, inCodeBlock) + + case SimpleTagRegex(name, body) :: ls if (!inCodeBlock) => + val key = SimpleTagKey(name) + val value = body :: tags.getOrElse(key, Nil) + parse0(docBody, tags + (key -> value), Some(key), ls, inCodeBlock) + + case SingleTagRegex(name) :: ls if (!inCodeBlock) => + val key = SimpleTagKey(name) + val value = "" :: tags.getOrElse(key, Nil) + parse0(docBody, tags + (key -> value), Some(key), ls, inCodeBlock) + + case line :: ls if (lastTagKey.isDefined) => + val key = lastTagKey.get + val value = + ((tags get key): @unchecked) match { + case Some(b :: bs) => (b + endOfLine + line) :: bs + case None => oops("lastTagKey set when no tag exists for key") + } + parse0(docBody, tags + (key -> value), lastTagKey, ls, inCodeBlock) + + case line :: ls => + if (docBody.length > 0) docBody append endOfLine + docBody append line + parse0(docBody, tags, lastTagKey, ls, inCodeBlock) + + case Nil => + // Take the {inheritance, content} diagram keys aside, as it doesn't need any parsing + val inheritDiagramTag = SimpleTagKey("inheritanceDiagram") + val contentDiagramTag = SimpleTagKey("contentDiagram") + + val inheritDiagramText: List[String] = tags.get(inheritDiagramTag) match { + case Some(list) => list + case None => List.empty + } + + val contentDiagramText: List[String] = tags.get(contentDiagramTag) match { + case Some(list) => list + case None => List.empty + } + + val stripTags=List(inheritDiagramTag, contentDiagramTag, SimpleTagKey("template"), SimpleTagKey("documentable")) + val tagsWithoutDiagram = tags.filterNot(pair => stripTags.contains(pair._1)) + + val bodyTags: mutable.Map[TagKey, List[Body]] = + mutable.Map(tagsWithoutDiagram mapValues {tag => tag map (parseWikiAtSymbol(_, pos, siteOpt))} toSeq: _*) + + def oneTag(key: SimpleTagKey): Option[Body] = + ((bodyTags remove key): @unchecked) match { + case Some(r :: rs) => + if (!rs.isEmpty) reporter.warning(pos, "Only one '@" + key.name + "' tag is allowed") + Some(r) + case None => None + } + + def allTags(key: SimpleTagKey): List[Body] = + (bodyTags remove key) getOrElse Nil + + def allSymsOneTag(key: TagKey): Map[String, Body] = { + val keys: Seq[SymbolTagKey] = + bodyTags.keys.toSeq flatMap { + case stk: SymbolTagKey if (stk.name == key.name) => Some(stk) + case stk: SimpleTagKey if (stk.name == key.name) => + reporter.warning(pos, "Tag '@" + stk.name + "' must be followed by a symbol name") + None + case _ => None + } + val pairs: Seq[(String, Body)] = + for (key <- keys) yield { + val bs = (bodyTags remove key).get + if (bs.length > 1) + reporter.warning(pos, "Only one '@" + key.name + "' tag for symbol " + key.symbol + " is allowed") + (key.symbol, bs.head) + } + Map.empty[String, Body] ++ pairs + } + + val com = createComment ( + body0 = Some(parseWikiAtSymbol(docBody.toString, pos, siteOpt)), + authors0 = allTags(SimpleTagKey("author")), + see0 = allTags(SimpleTagKey("see")), + result0 = oneTag(SimpleTagKey("return")), + throws0 = allSymsOneTag(SimpleTagKey("throws")), + valueParams0 = allSymsOneTag(SimpleTagKey("param")), + typeParams0 = allSymsOneTag(SimpleTagKey("tparam")), + version0 = oneTag(SimpleTagKey("version")), + since0 = oneTag(SimpleTagKey("since")), + todo0 = allTags(SimpleTagKey("todo")), + deprecated0 = oneTag(SimpleTagKey("deprecated")), + note0 = allTags(SimpleTagKey("note")), + example0 = allTags(SimpleTagKey("example")), + constructor0 = oneTag(SimpleTagKey("constructor")), + source0 = Some(clean(src).mkString("\n")), + inheritDiagram0 = inheritDiagramText, + contentDiagram0 = contentDiagramText, + group0 = oneTag(SimpleTagKey("group")), + groupDesc0 = allSymsOneTag(SimpleTagKey("groupdesc")), + groupNames0 = allSymsOneTag(SimpleTagKey("groupname")), + groupPrio0 = allSymsOneTag(SimpleTagKey("groupprio")) + ) + + for ((key, _) <- bodyTags) + reporter.warning(pos, "Tag '@" + key.name + "' is not recognised") + + com + + } + + parse0(new StringBuilder(comment.size), Map.empty, None, clean(comment), inCodeBlock = false) + + } + + /** Parses a string containing wiki syntax into a `Comment` object. + * Note that the string is assumed to be clean: + * - Removed Scaladoc start and end markers. + * - Removed start-of-line star and one whitespace afterwards (if present). + * - Removed all end-of-line whitespace. + * - Only `endOfLine` is used to mark line endings. */ + def parseWikiAtSymbol(string: String, pos: Position, siteOpt: Option[Symbol]): Body = new WikiParser(string, pos, siteOpt).document() + + /** TODO + * + * @author Ingo Maier + * @author Manohar Jonnalagedda + * @author Gilles Dubochet */ + protected final class WikiParser(val buffer: String, pos: Position, siteOpt: Option[Symbol]) extends CharReader(buffer) { wiki => + var summaryParsed = false + + def document(): Body = { + val blocks = new mutable.ListBuffer[Block] + while (char != endOfText) + blocks += block() + Body(blocks.toList) + } + + /* BLOCKS */ + + /** {{{ block ::= code | title | hrule | para }}} */ + def block(): Block = { + if (checkSkipInitWhitespace("{{{")) + code() + else if (checkSkipInitWhitespace('=')) + title() + else if (checkSkipInitWhitespace("----")) + hrule() + else if (checkList) + listBlock + else { + para() + } + } + + /** listStyle ::= '-' spc | '1.' spc | 'I.' spc | 'i.' spc | 'A.' spc | 'a.' spc + * Characters used to build lists and their constructors */ + protected val listStyles = Map[String, (Seq[Block] => Block)]( // TODO Should this be defined at some list companion? + "- " -> ( UnorderedList(_) ), + "1. " -> ( OrderedList(_,"decimal") ), + "I. " -> ( OrderedList(_,"upperRoman") ), + "i. " -> ( OrderedList(_,"lowerRoman") ), + "A. " -> ( OrderedList(_,"upperAlpha") ), + "a. " -> ( OrderedList(_,"lowerAlpha") ) + ) + + /** Checks if the current line is formed with more than one space and one the listStyles */ + def checkList = + (countWhitespace > 0) && (listStyles.keys exists { checkSkipInitWhitespace(_) }) + + /** {{{ + * nListBlock ::= nLine { mListBlock } + * nLine ::= nSpc listStyle para '\n' + * }}} + * Where n and m stand for the number of spaces. When `m > n`, a new list is nested. */ + def listBlock: Block = { + + /** Consumes one list item block and returns it, or None if the block is + * not a list or a different list. */ + def listLine(indent: Int, style: String): Option[Block] = + if (countWhitespace > indent && checkList) + Some(listBlock) + else if (countWhitespace != indent || !checkSkipInitWhitespace(style)) + None + else { + jumpWhitespace() + jump(style) + val p = Paragraph(inline(isInlineEnd = false)) + blockEnded("end of list line ") + Some(p) + } + + /** Consumes all list item blocks (possibly with nested lists) of the + * same list and returns the list block. */ + def listLevel(indent: Int, style: String): Block = { + val lines = mutable.ListBuffer.empty[Block] + var line: Option[Block] = listLine(indent, style) + while (line.isDefined) { + lines += line.get + line = listLine(indent, style) + } + val constructor = listStyles(style) + constructor(lines) + } + + val indent = countWhitespace + val style = (listStyles.keys find { checkSkipInitWhitespace(_) }).getOrElse(listStyles.keys.head) + listLevel(indent, style) + } + + def code(): Block = { + jumpWhitespace() + jump("{{{") + val str = readUntil("}}}") + if (char == endOfText) + reportError(pos, "unclosed code block") + else + jump("}}}") + blockEnded("code block") + Code(normalizeIndentation(str)) + } + + /** {{{ title ::= ('=' inline '=' | "==" inline "==" | ...) '\n' }}} */ + def title(): Block = { + jumpWhitespace() + val inLevel = repeatJump('=') + val text = inline(check("=" * inLevel)) + val outLevel = repeatJump('=', inLevel) + if (inLevel != outLevel) + reportError(pos, "unbalanced or unclosed heading") + blockEnded("heading") + Title(text, inLevel) + } + + /** {{{ hrule ::= "----" { '-' } '\n' }}} */ + def hrule(): Block = { + jumpWhitespace() + repeatJump('-') + blockEnded("horizontal rule") + HorizontalRule() + } + + /** {{{ para ::= inline '\n' }}} */ + def para(): Block = { + val p = + if (summaryParsed) + Paragraph(inline(isInlineEnd = false)) + else { + val s = summary() + val r = + if (checkParaEnded()) List(s) else List(s, inline(isInlineEnd = false)) + summaryParsed = true + Paragraph(Chain(r)) + } + while (char == endOfLine && char != endOfText) + nextChar() + p + } + + /* INLINES */ + + val OPEN_TAG = "^<([A-Za-z]+)( [^>]*)?(/?)>$".r + val CLOSE_TAG = "^$".r + private def readHTMLFrom(begin: HtmlTag): String = { + val list = mutable.ListBuffer.empty[String] + val stack = mutable.ListBuffer.empty[String] + + begin.close match { + case Some(HtmlTag(CLOSE_TAG(s))) => + stack += s + case _ => + return "" + } + + do { + val str = readUntil { char == safeTagMarker || char == endOfText } + nextChar() + + list += str + + str match { + case OPEN_TAG(s, _, standalone) => { + if (standalone != "/") { + stack += s + } + } + case CLOSE_TAG(s) => { + if (s == stack.last) { + stack.remove(stack.length-1) + } + } + case _ => ; + } + } while (stack.length > 0 && char != endOfText) + + list mkString "" + } + + def inline(isInlineEnd: => Boolean): Inline = { + + def inline0(): Inline = { + if (char == safeTagMarker) { + val tag = htmlTag() + HtmlTag(tag.data + readHTMLFrom(tag)) + } + else if (check("'''")) bold() + else if (check("''")) italic() + else if (check("`")) monospace() + else if (check("__")) underline() + else if (check("^")) superscript() + else if (check(",,")) subscript() + else if (check("[[")) link() + else { + val str = readUntil { char == safeTagMarker || check("''") || char == '`' || check("__") || char == '^' || check(",,") || check("[[") || isInlineEnd || checkParaEnded || char == endOfLine } + Text(str) + } + } + + val inlines: List[Inline] = { + val iss = mutable.ListBuffer.empty[Inline] + iss += inline0() + while (!isInlineEnd && !checkParaEnded) { + val skipEndOfLine = if (char == endOfLine) { + nextChar() + true + } else { + false + } + + val current = inline0() + (iss.last, current) match { + case (Text(t1), Text(t2)) if skipEndOfLine => + iss.update(iss.length - 1, Text(t1 + endOfLine + t2)) + case (i1, i2) if skipEndOfLine => + iss ++= List(Text(endOfLine.toString), i2) + case _ => iss += current + } + } + iss.toList + } + + inlines match { + case Nil => Text("") + case i :: Nil => i + case is => Chain(is) + } + + } + + def htmlTag(): HtmlTag = { + jump(safeTagMarker) + val read = readUntil(safeTagMarker) + if (char != endOfText) jump(safeTagMarker) + HtmlTag(read) + } + + def bold(): Inline = { + jump("'''") + val i = inline(check("'''")) + jump("'''") + Bold(i) + } + + def italic(): Inline = { + jump("''") + val i = inline(check("''")) + jump("''") + Italic(i) + } + + def monospace(): Inline = { + jump("`") + val i = inline(check("`")) + jump("`") + Monospace(i) + } + + def underline(): Inline = { + jump("__") + val i = inline(check("__")) + jump("__") + Underline(i) + } + + def superscript(): Inline = { + jump("^") + val i = inline(check("^")) + if (jump("^")) { + Superscript(i) + } else { + Chain(Seq(Text("^"), i)) + } + } + + def subscript(): Inline = { + jump(",,") + val i = inline(check(",,")) + jump(",,") + Subscript(i) + } + + def summary(): Inline = { + val i = inline(check(".")) + Summary( + if (jump(".")) + Chain(List(i, Text("."))) + else + i + ) + } + + def link(): Inline = { + val SchemeUri = """([a-z]+:.*)""".r + jump("[[") + val parens = 2 + repeatJump('[') + val start = "[" * parens + val stop = "]" * parens + //println("link with " + parens + " matching parens") + val target = readUntil { check(stop) || check(" ") } + val title = + if (!check(stop)) Some({ + jump(" ") + inline(check(stop)) + }) + else None + jump(stop) + + (target, title) match { + case (SchemeUri(uri), optTitle) => + Link(uri, optTitle getOrElse Text(uri)) + case (qualName, optTitle) => + makeEntityLink(optTitle getOrElse Text(target), pos, target, siteOpt) + } + } + + /* UTILITY */ + + /** {{{ eol ::= { whitespace } '\n' }}} */ + def blockEnded(blockType: String): Unit = { + if (char != endOfLine && char != endOfText) { + reportError(pos, "no additional content on same line after " + blockType) + jumpUntil(endOfLine) + } + while (char == endOfLine) + nextChar() + } + + /** + * Eliminates the (common) leading spaces in all lines, based on the first line + * For indented pieces of code, it reduces the indent to the least whitespace prefix: + * {{{ + * indented example + * another indented line + * if (condition) + * then do something; + * ^ this is the least whitespace prefix + * }}} + */ + def normalizeIndentation(_code: String): String = { + + val code = _code.trim + var maxSkip = Integer.MAX_VALUE + var crtSkip = 0 + var wsArea = true + var index = 0 + var firstLine = true + var emptyLine = true + + while (index < code.length) { + code(index) match { + case ' ' => + if (wsArea) + crtSkip += 1 + case c => + wsArea = (c == '\n') + maxSkip = if (firstLine || emptyLine) maxSkip else if (maxSkip <= crtSkip) maxSkip else crtSkip + crtSkip = if (c == '\n') 0 else crtSkip + firstLine = if (c == '\n') false else firstLine + emptyLine = if (c == '\n') true else false + } + index += 1 + } + + if (maxSkip == 0) + code + else { + index = 0 + val builder = new StringBuilder + while (index < code.length) { + builder.append(code(index)) + if (code(index) == '\n') { + // we want to skip as many spaces are available, if there are less spaces (like on empty lines, do not + // over-consume them) + index += 1 + val limit = index + maxSkip + while ((index < code.length) && (code(index) == ' ') && index < limit) + index += 1 + } + else + index += 1 + } + builder.toString + } + } + + def checkParaEnded(): Boolean = { + (char == endOfText) || + ((char == endOfLine) && { + val poff = offset + nextChar() // read EOL + val ok = { + checkSkipInitWhitespace(endOfLine) || + checkSkipInitWhitespace('=') || + checkSkipInitWhitespace("{{{") || + checkList || + checkSkipInitWhitespace('\u003D') + } + offset = poff + ok + }) + } + + def reportError(pos: Position, message: String) { + reporter.warning(pos, message) + } + } + + protected sealed class CharReader(buffer: String) { reader => + + var offset: Int = 0 + def char: Char = + if (offset >= buffer.length) endOfText else buffer charAt offset + + final def nextChar() { + offset += 1 + } + + final def check(chars: String): Boolean = { + val poff = offset + val ok = jump(chars) + offset = poff + ok + } + + def checkSkipInitWhitespace(c: Char): Boolean = { + val poff = offset + jumpWhitespace() + val ok = jump(c) + offset = poff + ok + } + + def checkSkipInitWhitespace(chars: String): Boolean = { + val poff = offset + jumpWhitespace() + val (ok0, chars0) = + if (chars.charAt(0) == ' ') + (offset > poff, chars substring 1) + else + (true, chars) + val ok = ok0 && jump(chars0) + offset = poff + ok + } + + def countWhitespace: Int = { + var count = 0 + val poff = offset + while (isWhitespace(char) && char != endOfText) { + nextChar() + count += 1 + } + offset = poff + count + } + + /* JUMPERS */ + + /** jumps a character and consumes it + * @return true only if the correct character has been jumped */ + final def jump(ch: Char): Boolean = { + if (char == ch) { + nextChar() + true + } + else false + } + + /** jumps all the characters in chars, consuming them in the process. + * @return true only if the correct characters have been jumped */ + final def jump(chars: String): Boolean = { + var index = 0 + while (index < chars.length && char == chars.charAt(index) && char != endOfText) { + nextChar() + index += 1 + } + index == chars.length + } + + final def repeatJump(c: Char, max: Int = Int.MaxValue): Int = { + var count = 0 + while (jump(c) && count < max) + count += 1 + count + } + + final def jumpUntil(ch: Char): Int = { + var count = 0 + while (char != ch && char != endOfText) { + nextChar() + count += 1 + } + count + } + + final def jumpUntil(pred: => Boolean): Int = { + var count = 0 + while (!pred && char != endOfText) { + nextChar() + count += 1 + } + count + } + + def jumpWhitespace() = jumpUntil(!isWhitespace(char)) + + /* READERS */ + + final def readUntil(c: Char): String = { + withRead { + while (char != c && char != endOfText) { + nextChar() + } + } + } + + final def readUntil(chars: String): String = { + assert(chars.length > 0) + withRead { + val c = chars.charAt(0) + while (!check(chars) && char != endOfText) { + nextChar() + while (char != c && char != endOfText) + nextChar() + } + } + } + + final def readUntil(pred: => Boolean): String = { + withRead { + while (char != endOfText && !pred) { + nextChar() + } + } + } + + private def withRead(read: => Unit): String = { + val start = offset + read + buffer.substring(start, offset) + } + + + /* CHARS CLASSES */ + + def isWhitespace(c: Char) = c == ' ' || c == '\t' + + } + +} diff --git a/src/scaladoc/scala/tools/nsc/doc/base/LinkTo.scala b/src/scaladoc/scala/tools/nsc/doc/base/LinkTo.scala new file mode 100755 index 0000000000..c11179800c --- /dev/null +++ b/src/scaladoc/scala/tools/nsc/doc/base/LinkTo.scala @@ -0,0 +1,15 @@ +/* NSC -- new Scala compiler + * Copyright 2007-2013 LAMP/EPFL + */ + +package scala.tools.nsc +package doc +package base + +import scala.collection._ + +sealed trait LinkTo +final case class LinkToMember[Mbr, Tpl](mbr: Mbr, tpl: Tpl) extends LinkTo +final case class LinkToTpl[Tpl](tpl: Tpl) extends LinkTo +final case class LinkToExternal(name: String, url: String) extends LinkTo +final case class Tooltip(name: String) extends LinkTo diff --git a/src/scaladoc/scala/tools/nsc/doc/base/MemberLookupBase.scala b/src/scaladoc/scala/tools/nsc/doc/base/MemberLookupBase.scala new file mode 100755 index 0000000000..8d80333195 --- /dev/null +++ b/src/scaladoc/scala/tools/nsc/doc/base/MemberLookupBase.scala @@ -0,0 +1,206 @@ +package scala.tools.nsc +package doc +package base + +import comment._ + +/** This trait extracts all required information for documentation from compilation units. + * The base trait has been extracted to allow getting light-weight documentation + * for a particular symbol in the IDE.*/ +trait MemberLookupBase { + + val global: Global + import global._ + + def internalLink(sym: Symbol, site: Symbol): Option[LinkTo] + def chooseLink(links: List[LinkTo]): LinkTo + def toString(link: LinkTo): String + def findExternalLink(sym: Symbol, name: String): Option[LinkToExternal] + def warnNoLink: Boolean + + import global._ + import rootMirror.{RootPackage, EmptyPackage} + + private def isRoot(s: Symbol) = s.isRootSymbol || s.isEmptyPackage || s.isEmptyPackageClass + + def makeEntityLink(title: Inline, pos: Position, query: String, siteOpt: Option[Symbol]) = + new EntityLink(title) { lazy val link = memberLookup(pos, query, siteOpt) } + + private var showExplanation = true + private def explanation: String = + if (showExplanation) { + showExplanation = false + """ + |Quick crash course on using Scaladoc links + |========================================== + |Disambiguating terms and types: Prefix terms with '$' and types with '!' in case both names are in use: + | - [[scala.collection.immutable.List!.apply class List's apply method]] and + | - [[scala.collection.immutable.List$.apply object List's apply method]] + |Disambiguating overloaded members: If a term is overloaded, you can indicate the first part of its signature followed by *: + | - [[[scala.collection.immutable.List$.fill[A](Int)(⇒A):List[A]* Fill with a single parameter]]] + | - [[[scala.collection.immutable.List$.fill[A](Int,Int)(⇒A):List[List[A]]* Fill with a two parameters]]] + |Notes: + | - you can use any number of matching square brackets to avoid interference with the signature + | - you can use \\. to escape dots in prefixes (don't forget to use * at the end to match the signature!) + | - you can use \\# to escape hashes, otherwise they will be considered as delimiters, like dots.""".stripMargin + } else "" + + def memberLookup(pos: Position, query: String, siteOpt: Option[Symbol]): LinkTo = { + var members = breakMembers(query) + + // (1) First look in the root package, as most of the links are qualified + val fromRoot = lookupInRootPackage(pos, members) + + // (2) Or recursively go into each containing template. + val fromParents = siteOpt.fold(Stream.empty[Symbol]) { s => + Stream.iterate(s)(_.owner) + }.takeWhile (!isRoot(_)).map { + lookupInTemplate(pos, members, _) + } + + val syms = (fromRoot +: fromParents) find (!_.isEmpty) getOrElse Nil + + val links = syms flatMap { case (sym, site) => internalLink(sym, site) } match { + case Nil => + // (3) Look at external links + syms.flatMap { case (sym, owner) => + // reconstruct the original link + def linkName(sym: Symbol) = { + def nameString(s: Symbol) = s.nameString + (if ((s.isModule || s.isModuleClass) && !s.isPackage) "$" else "") + val packageSuffix = if (sym.isPackage) ".package" else "" + + sym.ownerChain.reverse.filterNot(isRoot(_)).map(nameString(_)).mkString(".") + packageSuffix + } + + if (sym.isClass || sym.isModule || sym.isTrait || sym.isPackage) + findExternalLink(sym, linkName(sym)) + else if (owner.isClass || owner.isModule || owner.isTrait || owner.isPackage) + findExternalLink(sym, linkName(owner) + "@" + externalSignature(sym)) + else + None + } + case links => links + } + links match { + case Nil => + if (warnNoLink) + reporter.warning(pos, "Could not find any member to link for \"" + query + "\".") + // (4) if we still haven't found anything, create a tooltip + Tooltip(query) + case List(l) => l + case links => + val chosen = chooseLink(links) + def linkToString(link: LinkTo) = { + val chosenInfo = + if (link == chosen) " [chosen]" else "" + toString(link) + chosenInfo + "\n" + } + if (warnNoLink) { + val allLinks = links.map(linkToString).mkString + reporter.warning(pos, + s"""The link target \"$query\" is ambiguous. Several members fit the target: + |$allLinks + |$explanation""".stripMargin) + } + chosen + } + } + + private sealed trait SearchStrategy + private case object BothTypeAndTerm extends SearchStrategy + private case object OnlyType extends SearchStrategy + private case object OnlyTerm extends SearchStrategy + + private def lookupInRootPackage(pos: Position, members: List[String]) = + lookupInTemplate(pos, members, EmptyPackage) ::: lookupInTemplate(pos, members, RootPackage) + + private def lookupInTemplate(pos: Position, members: List[String], container: Symbol): List[(Symbol, Symbol)] = { + // Maintaining compatibility with previous links is a bit tricky here: + // we have a preference for term names for all terms except for the last, where we prefer a class: + // How to do this: + // - at each step we do a DFS search with the prefered strategy + // - if the search doesn't return any members, we backtrack on the last decision + // * we look for terms with the last member's name + // * we look for types with the same name, all the way up + val result = members match { + case Nil => Nil + case mbrName::Nil => + var syms = lookupInTemplate(pos, mbrName, container, OnlyType) map ((_, container)) + if (syms.isEmpty) + syms = lookupInTemplate(pos, mbrName, container, OnlyTerm) map ((_, container)) + syms + + case tplName::rest => + def completeSearch(syms: List[Symbol]) = + syms flatMap (lookupInTemplate(pos, rest, _)) + + completeSearch(lookupInTemplate(pos, tplName, container, OnlyTerm)) match { + case Nil => completeSearch(lookupInTemplate(pos, tplName, container, OnlyType)) + case syms => syms + } + } + //println("lookupInTemplate(" + members + ", " + container + ") => " + result) + result + } + + private def lookupInTemplate(pos: Position, member: String, container: Symbol, strategy: SearchStrategy): List[Symbol] = { + val name = member.stripSuffix("$").stripSuffix("!").stripSuffix("*") + def signatureMatch(sym: Symbol): Boolean = externalSignature(sym).startsWith(name) + + // We need to cleanup the bogus classes created by the .class file parser. For example, [[scala.Predef]] resolves + // to (bogus) class scala.Predef loaded by the class loader -- which we need to eliminate by looking at the info + // and removing NoType classes + def cleanupBogusClasses(syms: List[Symbol]) = { syms.filter(_.info != NoType) } + + def syms(name: Name) = container.info.nonPrivateMember(name.encodedName).alternatives + def termSyms = cleanupBogusClasses(syms(newTermName(name))) + def typeSyms = cleanupBogusClasses(syms(newTypeName(name))) + + val result = if (member.endsWith("$")) + termSyms + else if (member.endsWith("!")) + typeSyms + else if (member.endsWith("*")) + cleanupBogusClasses(container.info.nonPrivateDecls) filter signatureMatch + else + strategy match { + case BothTypeAndTerm => termSyms ::: typeSyms + case OnlyType => typeSyms + case OnlyTerm => termSyms + } + + //println("lookupInTemplate(" + member + ", " + container + ") => " + result) + result + } + + private def breakMembers(query: String): List[String] = { + // Okay, how does this work? Well: you split on . but you don't want to split on \. => thus the ugly regex + // query.split((?<=[^\\\\])\\.).map(_.replaceAll("\\.")) + // The same code, just faster: + var members = List[String]() + var index = 0 + var last_index = 0 + val length = query.length + while (index < length) { + if ((query.charAt(index) == '.' || query.charAt(index) == '#') && + ((index == 0) || (query.charAt(index-1) != '\\'))) { + + val member = query.substring(last_index, index).replaceAll("\\\\([#\\.])", "$1") + // we want to allow javadoc-style links [[#member]] -- which requires us to remove empty members from the first + // elemnt in the list + if ((member != "") || (!members.isEmpty)) + members ::= member + last_index = index + 1 + } + index += 1 + } + if (last_index < length) + members ::= query.substring(last_index, length).replaceAll("\\\\\\.", ".") + members.reverse + } + + def externalSignature(sym: Symbol) = { + sym.info // force it, otherwise we see lazy types + (sym.nameString + sym.signatureString).replaceAll("\\s", "") + } +} diff --git a/src/scaladoc/scala/tools/nsc/doc/base/comment/Body.scala b/src/scaladoc/scala/tools/nsc/doc/base/comment/Body.scala new file mode 100755 index 0000000000..2a07547de2 --- /dev/null +++ b/src/scaladoc/scala/tools/nsc/doc/base/comment/Body.scala @@ -0,0 +1,89 @@ +/* NSC -- new Scala compiler + * Copyright 2007-2013 LAMP/EPFL + * @author Manohar Jonnalagedda + */ + +package scala.tools.nsc +package doc +package base +package comment + +import scala.collection._ + +/** A body of text. A comment has a single body, which is composed of + * at least one block. Inside every body is exactly one summary (see + * [[scala.tools.nsc.doc.model.comment.Summary]]). */ +final case class Body(blocks: Seq[Block]) { + + /** The summary text of the comment body. */ + lazy val summary: Option[Inline] = { + def summaryInBlock(block: Block): Seq[Inline] = block match { + case Title(text, _) => summaryInInline(text) + case Paragraph(text) => summaryInInline(text) + case UnorderedList(items) => items flatMap summaryInBlock + case OrderedList(items, _) => items flatMap summaryInBlock + case DefinitionList(items) => items.values.toSeq flatMap summaryInBlock + case _ => Nil + } + def summaryInInline(text: Inline): Seq[Inline] = text match { + case Summary(text) => List(text) + case Chain(items) => items flatMap summaryInInline + case Italic(text) => summaryInInline(text) + case Bold(text) => summaryInInline(text) + case Underline(text) => summaryInInline(text) + case Superscript(text) => summaryInInline(text) + case Subscript(text) => summaryInInline(text) + case Link(_, title) => summaryInInline(title) + case _ => Nil + } + (blocks flatMap { summaryInBlock(_) }).toList match { + case Nil => None + case inline :: Nil => Some(inline) + case inlines => Some(Chain(inlines)) + } + } +} + +/** A block-level element of text, such as a paragraph or code block. */ +sealed abstract class Block + +final case class Title(text: Inline, level: Int) extends Block +final case class Paragraph(text: Inline) extends Block +final case class Code(data: String) extends Block +final case class UnorderedList(items: Seq[Block]) extends Block +final case class OrderedList(items: Seq[Block], style: String) extends Block +final case class DefinitionList(items: SortedMap[Inline, Block]) extends Block +final case class HorizontalRule() extends Block + +/** An section of text inside a block, possibly with formatting. */ +sealed abstract class Inline + +final case class Chain(items: Seq[Inline]) extends Inline +final case class Italic(text: Inline) extends Inline +final case class Bold(text: Inline) extends Inline +final case class Underline(text: Inline) extends Inline +final case class Superscript(text: Inline) extends Inline +final case class Subscript(text: Inline) extends Inline +final case class Link(target: String, title: Inline) extends Inline +final case class Monospace(text: Inline) extends Inline +final case class Text(text: String) extends Inline +abstract class EntityLink(val title: Inline) extends Inline { def link: LinkTo } +object EntityLink { + def apply(title: Inline, linkTo: LinkTo) = new EntityLink(title) { def link: LinkTo = linkTo } + def unapply(el: EntityLink): Option[(Inline, LinkTo)] = Some((el.title, el.link)) +} +final case class HtmlTag(data: String) extends Inline { + def canClose(open: HtmlTag) = { + open.data.stripPrefix("<") == data.stripPrefix(" + list foreach scan + case tag: HtmlTag => { + if (stack.length > 0 && tag.canClose(stack.last)) { + stack.remove(stack.length-1) + } else { + tag.close match { + case Some(t) => + stack += t + case None => + ; + } + } + } + case _ => + ; + } + } + scan(inline) + Chain(List(inline) ++ stack.reverse) + } + + /** A shorter version of the body. Usually, this is the first sentence of the body. */ + def short: Inline = { + body.summary match { + case Some(s) => + closeHtmlTags(s) + case _ => + Text("") + } + } + + /** A list of authors. The empty list is used when no author is defined. */ + def authors: List[Body] + + /** A list of other resources to see, including links to other entities or + * to external documentation. The empty list is used when no other resource + * is mentionned. */ + def see: List[Body] + + /** A description of the result of the entity. Typically, this provides additional + * information on the domain of the result, contractual post-conditions, etc. */ + def result: Option[Body] + + /** A map of exceptions that the entity can throw when accessed, and a + * description of what they mean. */ + def throws: Map[String, Body] + + /** A map of value parameters, and a description of what they are. Typically, + * this provides additional information on the domain of the parameters, + * contractual pre-conditions, etc. */ + def valueParams: Map[String, Body] + + /** A map of type parameters, and a description of what they are. Typically, + * this provides additional information on the domain of the parameters. */ + def typeParams: Map[String, Body] + + /** The version number of the entity. There is no formatting or further + * meaning attached to this value. */ + def version: Option[Body] + + /** A version number of a containing entity where this member-entity was introduced. */ + def since: Option[Body] + + /** An annotation as to expected changes on this entity. */ + def todo: List[Body] + + /** Whether the entity is deprecated. Using the `@deprecated` Scala attribute + * is prefereable to using this Scaladoc tag. */ + def deprecated: Option[Body] + + /** An additional note concerning the contract of the entity. */ + def note: List[Body] + + /** A usage example related to the entity. */ + def example: List[Body] + + /** A description for the primary constructor */ + def constructor: Option[Body] + + /** A set of diagram directives for the inheritance diagram */ + def inheritDiagram: List[String] + + /** A set of diagram directives for the content diagram */ + def contentDiagram: List[String] + + /** The group this member is part of */ + def group: Option[String] + + /** Member group descriptions */ + def groupDesc: Map[String,Body] + + /** Member group names (overriding the short tag) */ + def groupNames: Map[String,String] + + /** Member group priorities */ + def groupPrio: Map[String,Int] + + override def toString = + body.toString + "\n" + + (authors map ("@author " + _.toString)).mkString("\n") + + (result map ("@return " + _.toString)).mkString("\n") + + (version map ("@version " + _.toString)).mkString +} diff --git a/src/scaladoc/scala/tools/nsc/doc/doclet/Generator.scala b/src/scaladoc/scala/tools/nsc/doc/doclet/Generator.scala new file mode 100644 index 0000000000..42b56aa927 --- /dev/null +++ b/src/scaladoc/scala/tools/nsc/doc/doclet/Generator.scala @@ -0,0 +1,30 @@ +package scala.tools.nsc.doc +package doclet + +import scala.collection._ + +/** Custom Scaladoc generators must implement the `Generator` class. A custom generator can be selected in Scaladoc + * using the `-doc-generator` command line option. + * The `Generator` class does not provide data about the documented code. A number of data provider traits can be used + * to configure what data is actually available to the generator: + * - A `Universer` provides a `Universe` data structure representing the interfaces and comments of the documented + * program. + * - An `Indexer` provides precalculated indexing information about a universe. + * To implement this class only requires defining method `generateImpl`. */ +abstract class Generator { + + /** A series of tests that must be true before generation can be done. This is used by data provider traits to + * confirm that they have been correctly initialised before allowing generation to proceed. */ + protected val checks: mutable.Set[()=>Boolean] = + mutable.Set.empty[()=>Boolean] + + /** Outputs documentation (as a side effect). */ + def generate(): Unit = { + assert(checks forall { check => check() }) + generateImpl() + } + + /** Outputs documentation (as a side effect). This method is called only if all `checks` are true. */ + protected def generateImpl(): Unit + +} diff --git a/src/scaladoc/scala/tools/nsc/doc/doclet/Indexer.scala b/src/scaladoc/scala/tools/nsc/doc/doclet/Indexer.scala new file mode 100644 index 0000000000..0cdd47182f --- /dev/null +++ b/src/scaladoc/scala/tools/nsc/doc/doclet/Indexer.scala @@ -0,0 +1,21 @@ +package scala.tools.nsc +package doc +package doclet + +/** A `Generator` may implement the `Indexer` trait to gain access to pre-calculated indexing information */ +trait Indexer extends Generator with Universer { + + protected var indexField: Index = null + + def index: Index = indexField + + def setIndex(i: Index) { + assert(indexField == null) + indexField = i + } + + checks += { () => + indexField != null + } + +} \ No newline at end of file diff --git a/src/scaladoc/scala/tools/nsc/doc/doclet/Universer.scala b/src/scaladoc/scala/tools/nsc/doc/doclet/Universer.scala new file mode 100644 index 0000000000..ee8b7809e5 --- /dev/null +++ b/src/scaladoc/scala/tools/nsc/doc/doclet/Universer.scala @@ -0,0 +1,21 @@ +package scala.tools.nsc +package doc +package doclet + +/** A `Generator` may implement the `Universer` trait to gain access to a model of the documented program */ +trait Universer extends Generator { + + protected var universeField: Universe = null + + def universe: Universe = universeField + + def setUniverse(u: Universe) { + assert(universeField == null) + universeField = u + } + + checks += { () => + universeField != null + } + +} \ No newline at end of file diff --git a/src/scaladoc/scala/tools/nsc/doc/html/Doclet.scala b/src/scaladoc/scala/tools/nsc/doc/html/Doclet.scala new file mode 100644 index 0000000000..21c5f6bb67 --- /dev/null +++ b/src/scaladoc/scala/tools/nsc/doc/html/Doclet.scala @@ -0,0 +1,19 @@ +/* NSC -- new Scala compiler + * Copyright 2007-2013 LAMP/EPFL + * @author David Bernard, Manohar Jonnalagedda + */ + +package scala.tools.nsc.doc +package html + +import doclet._ + +/** The default doclet used by the scaladoc command line tool + * when no user-provided doclet is provided. */ +class Doclet extends Generator with Universer with Indexer { + + def generateImpl() { + new html.HtmlFactory(universe, index).generate() + } + +} diff --git a/src/scaladoc/scala/tools/nsc/doc/html/HtmlFactory.scala b/src/scaladoc/scala/tools/nsc/doc/html/HtmlFactory.scala new file mode 100644 index 0000000000..d721a96ad7 --- /dev/null +++ b/src/scaladoc/scala/tools/nsc/doc/html/HtmlFactory.scala @@ -0,0 +1,152 @@ +/* NSC -- new Scala compiler + * Copyright 2007-2013 LAMP/EPFL + * @author David Bernard, Manohar Jonnalagedda + */ + +package scala.tools.nsc +package doc +package html + +import model._ +import java.io.{ File => JFile } +import io.{ Streamable, Directory } +import scala.collection._ +import page.diagram._ + +import html.page.diagram.DiagramGenerator + +/** A class that can generate Scaladoc sites to some fixed root folder. + * @author David Bernard + * @author Gilles Dubochet */ +class HtmlFactory(val universe: doc.Universe, index: doc.Index) { + + /** The character encoding to be used for generated Scaladoc sites. + * This value is currently always UTF-8. */ + def encoding: String = "UTF-8" + + def siteRoot: JFile = new JFile(universe.settings.outdir.value) + + def libResources = List( + "index.js", + "jquery-ui.js", + "jquery.js", + "jquery.layout.js", + "scheduler.js", + "diagrams.js", + "template.js", + "tools.tooltip.js", + "modernizr.custom.js", + + "index.css", + "ref-index.css", + "template.css", + "diagrams.css", + + "class.png", + "class_big.png", + "class_diagram.png", + "object.png", + "object_big.png", + "object_diagram.png", + "package.png", + "package_big.png", + "trait.png", + "trait_big.png", + "trait_diagram.png", + "type.png", + "type_big.png", + "type_diagram.png", + + "class_to_object_big.png", + "object_to_class_big.png", + "trait_to_object_big.png", + "object_to_trait_big.png", + "type_to_object_big.png", + "object_to_type_big.png", + + "arrow-down.png", + "arrow-right.png", + "filter_box_left.png", + "filter_box_left2.gif", + "filter_box_right.png", + "filterbg.gif", + "filterboxbarbg.gif", + "filterboxbg.gif", + + "constructorsbg.gif", + "defbg-blue.gif", + "defbg-green.gif", + "filterboxbarbg.png", + "fullcommenttopbg.gif", + "ownderbg2.gif", + "ownerbg.gif", + "ownerbg2.gif", + "packagesbg.gif", + "signaturebg.gif", + "signaturebg2.gif", + "typebg.gif", + "conversionbg.gif", + "valuemembersbg.gif", + + "navigation-li-a.png", + "navigation-li.png", + "remove.png", + "selected-right.png", + "selected.png", + "selected2-right.png", + "selected2.png", + "selected-right-implicits.png", + "selected-implicits.png", + "unselected.png" + ) + + /** Generates the Scaladoc site for a model into the site root. + * A scaladoc site is a set of HTML and related files + * that document a model extracted from a compiler run. + */ + def generate() { + + def copyResource(subPath: String) { + val bytes = new Streamable.Bytes { + val p = "/scala/tools/nsc/doc/html/resource/" + subPath + val inputStream = getClass.getResourceAsStream(p) + assert(inputStream != null, p) + }.toByteArray() + val dest = Directory(siteRoot) / subPath + dest.parent.createDirectory() + val out = dest.toFile.bufferedOutput() + try out.write(bytes, 0, bytes.length) + finally out.close() + } + + DiagramGenerator.initialize(universe.settings) + + libResources foreach (s => copyResource("lib/" + s)) + + new page.Index(universe, index) writeFor this + new page.IndexScript(universe, index) writeFor this + + writeTemplates(_ writeFor this) + + for (letter <- index.firstLetterIndex) { + new html.page.ReferenceIndex(letter._1, index, universe) writeFor this + } + + DiagramGenerator.cleanup() + } + + def writeTemplates(writeForThis: HtmlPage => Unit) { + val written = mutable.HashSet.empty[DocTemplateEntity] + val diagramGenerator: DiagramGenerator = new DotDiagramGenerator(universe.settings) + + def writeTemplate(tpl: DocTemplateEntity) { + if (!(written contains tpl)) { + writeForThis(new page.Template(universe, diagramGenerator, tpl)) + written += tpl + tpl.templates collect { case d: DocTemplateEntity => d } map writeTemplate + } + } + + writeTemplate(universe.rootPackage) + } +} diff --git a/src/scaladoc/scala/tools/nsc/doc/html/HtmlPage.scala b/src/scaladoc/scala/tools/nsc/doc/html/HtmlPage.scala new file mode 100644 index 0000000000..229e26d699 --- /dev/null +++ b/src/scaladoc/scala/tools/nsc/doc/html/HtmlPage.scala @@ -0,0 +1,224 @@ +/* NSC -- new Scala compiler + * Copyright 2007-2013 LAMP/EPFL + * @author David Bernard, Manohar Jonnalagedda + */ + +package scala.tools.nsc +package doc +package html + +import base._ +import base.comment._ +import model._ + +import scala.xml.NodeSeq +import scala.xml.dtd.{DocType, PublicID} +import scala.collection._ +import java.io.Writer + +/** An html page that is part of a Scaladoc site. + * @author David Bernard + * @author Gilles Dubochet */ +abstract class HtmlPage extends Page { thisPage => + /** The title of this page. */ + protected def title: String + + /** The page description */ + protected def description: String = + // unless overwritten, will display the title in a spaced format, keeping - and . + title.replaceAll("[^a-zA-Z0-9\\.\\-]+", " ").replaceAll("\\-+", " - ").replaceAll(" +", " ") + + /** The page keywords */ + protected def keywords: String = + // unless overwritten, same as description, minus the " - " + description.replaceAll(" - ", " ") + + /** Additional header elements (links, scripts, meta tags, etc.) required for this page. */ + protected def headers: NodeSeq + + /** The body of this page. */ + def body: NodeSeq + + def writeFor(site: HtmlFactory) { + val doctype = + DocType("html", PublicID("-//W3C//DTD XHTML 1.1//EN", "http://www.w3.org/TR/xhtml11/DTD/xhtml11.dtd"), Nil) + val html = + + + { title } + + + \n") + w.write(doctype.toString + "\n") + w.write(xml.Xhtml.toXhtml(html)) + } + + if (site.universe.settings.docRawOutput.value) + writeFile(site, ".raw") { + // we're only interested in the body, as this will go into the diff + _.write(body.text) + } + + //XML.save(pageFile.getPath, html, site.encoding, xmlDecl = false, doctype = doctype) + } + + /** Transforms an optional comment into an styled HTML tree representing its body if it is defined, or into an empty + * node sequence if it is not. */ + def commentToHtml(comment: Option[Comment]): NodeSeq = + (comment map (commentToHtml(_))) getOrElse NodeSeq.Empty + + /** Transforms a comment into an styled HTML tree representing its body. */ + def commentToHtml(comment: Comment): NodeSeq = + bodyToHtml(comment.body) + + def bodyToHtml(body: Body): NodeSeq = + body.blocks flatMap (blockToHtml(_)) + + def blockToHtml(block: Block): NodeSeq = block match { + case Title(in, 1) =>

              { inlineToHtml(in) }

              + case Title(in, 2) =>

              { inlineToHtml(in) }

              + case Title(in, 3) =>
              { inlineToHtml(in) }
              + case Title(in, _) =>
              { inlineToHtml(in) }
              + case Paragraph(in) =>

              { inlineToHtml(in) }

              + case Code(data) => +
              { SyntaxHigh(data) }
              //
              { scala.xml.Text(data) }
              + case UnorderedList(items) => +
                { listItemsToHtml(items) }
              + case OrderedList(items, listStyle) => +
                { listItemsToHtml(items) }
              + case DefinitionList(items) => +
              {items map { case (t, d) =>
              { inlineToHtml(t) }
              { blockToHtml(d) }
              } }
              + case HorizontalRule() => +
              + } + + def listItemsToHtml(items: Seq[Block]) = + items.foldLeft(xml.NodeSeq.Empty){ (xmlList, item) => + item match { + case OrderedList(_, _) | UnorderedList(_) => // html requires sub ULs to be put into the last LI + xmlList.init ++
            1. { xmlList.last.child ++ blockToHtml(item) }
            2. + case Paragraph(inline) => + xmlList :+
            3. { inlineToHtml(inline) }
            4. // LIs are blocks, no need to use Ps + case block => + xmlList :+
            5. { blockToHtml(block) }
            6. + } + } + + def inlineToHtml(inl: Inline): NodeSeq = inl match { + case Chain(items) => items flatMap (inlineToHtml(_)) + case Italic(in) => { inlineToHtml(in) } + case Bold(in) => { inlineToHtml(in) } + case Underline(in) => { inlineToHtml(in) } + case Superscript(in) => { inlineToHtml(in) } + case Subscript(in) => { inlineToHtml(in) } + case Link(raw, title) => { inlineToHtml(title) } + case Monospace(in) => { inlineToHtml(in) } + case Text(text) => scala.xml.Text(text) + case Summary(in) => inlineToHtml(in) + case HtmlTag(tag) => scala.xml.Unparsed(tag) + case EntityLink(target, link) => linkToHtml(target, link, hasLinks = true) + } + + def linkToHtml(text: Inline, link: LinkTo, hasLinks: Boolean) = link match { + case LinkToTpl(dtpl: TemplateEntity) => + if (hasLinks) + { inlineToHtml(text) } + else + { inlineToHtml(text) } + case LinkToMember(mbr: MemberEntity, inTpl: TemplateEntity) => + if (hasLinks) + { inlineToHtml(text) } + else + { inlineToHtml(text) } + case Tooltip(tooltip) => + { inlineToHtml(text) } + case LinkToExternal(name, url) => + { inlineToHtml(text) } + case _ => + inlineToHtml(text) + } + + def typeToHtml(tpes: List[model.TypeEntity], hasLinks: Boolean): NodeSeq = tpes match { + case Nil => + NodeSeq.Empty + case List(tpe) => + typeToHtml(tpe, hasLinks) + case tpe :: rest => + typeToHtml(tpe, hasLinks) ++ scala.xml.Text(" with ") ++ typeToHtml(rest, hasLinks) + } + + def typeToHtml(tpe: model.TypeEntity, hasLinks: Boolean): NodeSeq = { + val string = tpe.name + def toLinksOut(inPos: Int, starts: List[Int]): NodeSeq = { + if (starts.isEmpty && (inPos == string.length)) + NodeSeq.Empty + else if (starts.isEmpty) + scala.xml.Text(string.slice(inPos, string.length)) + else if (inPos == starts.head) + toLinksIn(inPos, starts) + else { + scala.xml.Text(string.slice(inPos, starts.head)) ++ toLinksIn(starts.head, starts) + } + } + def toLinksIn(inPos: Int, starts: List[Int]): NodeSeq = { + val (link, width) = tpe.refEntity(inPos) + val text = comment.Text(string.slice(inPos, inPos + width)) + linkToHtml(text, link, hasLinks) ++ toLinksOut(inPos + width, starts.tail) + } + if (hasLinks) + toLinksOut(0, tpe.refEntity.keySet.toList) + else + scala.xml.Text(string) + } + + def typesToHtml(tpess: List[model.TypeEntity], hasLinks: Boolean, sep: NodeSeq): NodeSeq = tpess match { + case Nil => NodeSeq.Empty + case tpe :: Nil => typeToHtml(tpe, hasLinks) + case tpe :: tpes => typeToHtml(tpe, hasLinks) ++ sep ++ typesToHtml(tpes, hasLinks, sep) + } + + def hasPage(e: DocTemplateEntity) = { + e.isPackage || e.isTrait || e.isClass || e.isObject || e.isCaseClass + } + + /** Returns the HTML code that represents the template in `tpl` as a hyperlinked name. */ + def templateToHtml(tpl: TemplateEntity, name: String = null) = tpl match { + case dTpl: DocTemplateEntity => + if (hasPage(dTpl)) { + { if (name eq null) dTpl.name else name } + } else { + scala.xml.Text(if (name eq null) dTpl.name else name) + } + case ndTpl: NoDocTemplate => + scala.xml.Text(if (name eq null) ndTpl.name else name) + } + + /** Returns the HTML code that represents the templates in `tpls` as a list of hyperlinked names. */ + def templatesToHtml(tplss: List[TemplateEntity], sep: NodeSeq): NodeSeq = tplss match { + case Nil => NodeSeq.Empty + case tpl :: Nil => templateToHtml(tpl) + case tpl :: tpls => templateToHtml(tpl) ++ sep ++ templatesToHtml(tpls, sep) + } + + /** Returns the _big image name corresponding to the DocTemplate Entity (upper left icon) */ + def docEntityKindToBigImage(ety: DocTemplateEntity) = + if (ety.isTrait && !ety.companion.isEmpty && ety.companion.get.visibility.isPublic && ety.companion.get.inSource != None) "trait_to_object_big.png" + else if (ety.isTrait) "trait_big.png" + else if (ety.isClass && !ety.companion.isEmpty && ety.companion.get.visibility.isPublic && ety.companion.get.inSource != None) "class_to_object_big.png" + else if (ety.isClass) "class_big.png" + else if ((ety.isAbstractType || ety.isAliasType) && !ety.companion.isEmpty && ety.companion.get.visibility.isPublic && ety.companion.get.inSource != None) "type_to_object_big.png" + else if ((ety.isAbstractType || ety.isAliasType)) "type_big.png" + else if (ety.isObject && !ety.companion.isEmpty && ety.companion.get.visibility.isPublic && ety.companion.get.inSource != None && ety.companion.get.isClass) "object_to_class_big.png" + else if (ety.isObject && !ety.companion.isEmpty && ety.companion.get.visibility.isPublic && ety.companion.get.inSource != None && ety.companion.get.isTrait) "object_to_trait_big.png" + else if (ety.isObject && !ety.companion.isEmpty && ety.companion.get.visibility.isPublic && ety.companion.get.inSource != None && (ety.companion.get.isAbstractType || ety.companion.get.isAliasType)) "object_to_trait_big.png" + else if (ety.isObject) "object_big.png" + else if (ety.isPackage) "package_big.png" + else "class_big.png" // FIXME: an entity *should* fall into one of the above categories, but AnyRef is somehow not +} diff --git a/src/scaladoc/scala/tools/nsc/doc/html/Page.scala b/src/scaladoc/scala/tools/nsc/doc/html/Page.scala new file mode 100644 index 0000000000..91939cf3de --- /dev/null +++ b/src/scaladoc/scala/tools/nsc/doc/html/Page.scala @@ -0,0 +1,102 @@ +/* NSC -- new Scala compiler + * Copyright 2007-2013 LAMP/EPFL + * @author David Bernard, Manohar Jonnalagedda + */ + +package scala.tools.nsc.doc.html + +import scala.tools.nsc.doc.model._ +import java.io.{FileOutputStream, File} +import scala.reflect.NameTransformer +import java.nio.channels.Channels +import java.io.Writer + +abstract class Page { + thisPage => + + /** The path of this page, relative to the API site. `path.tail` is a list + * of folder names leading to this page (from closest package to + * one-above-root package), `path.head` is the file name of this page. + * Note that `path` has a length of at least one. */ + def path: List[String] + + def absoluteLinkTo(path: List[String]) = path.reverse.mkString("/") + + def createFileOutputStream(site: HtmlFactory, suffix: String = "") = { + val file = new File(site.siteRoot, absoluteLinkTo(thisPage.path) + suffix) + val folder = file.getParentFile + if (! folder.exists) { + folder.mkdirs + } + new FileOutputStream(file.getPath) + } + + def writeFile(site: HtmlFactory, suffix: String = "")(fn: Writer => Unit) = { + val fos = createFileOutputStream(site, suffix) + val w = Channels.newWriter(fos.getChannel, site.encoding) + try { + fn(w) + } + finally { + w.close() + fos.close() + } + } + + /** Writes this page as a file. The file's location is relative to the + * generator's site root, and the encoding is also defined by the generator. + * @param site The generator that is writing this page. */ + def writeFor(site: HtmlFactory): Unit + + def kindToString(mbr: MemberEntity) = + mbr match { + case c: Class => if (c.isCaseClass) "case class" else "class" + case _: Trait => "trait" + case _: Package => "package" + case _: Object => "object" + case _: AbstractType => "type" + case _: AliasType => "type" + case _: Constructor => "new" + case v: Def => "def" + case v: Val if (v.isLazyVal) => "lazy val" + case v: Val if (v.isVal) => "val" + case v: Val if (v.isVar) => "var" + case _ => sys.error("Cannot create kind for: " + mbr + " of class " + mbr.getClass) + } + + def templateToPath(tpl: TemplateEntity): List[String] = { + def doName(tpl: TemplateEntity): String = + (if (tpl.inPackageObject) "package$$" else "") + NameTransformer.encode(tpl.name) + (if (tpl.isObject) "$" else "") + def downPacks(pack: Package): List[String] = + if (pack.isRootPackage) Nil else (doName(pack) :: downPacks(pack.inTemplate)) + def downInner(nme: String, tpl: TemplateEntity): (String, Package) = { + tpl.inTemplate match { + case inPkg: Package => (nme + ".html", inPkg) + case inTpl => downInner(doName(inTpl) + "$" + nme, inTpl) + } + } + val (file, pack) = + tpl match { + case p: Package => ("package.html", p) + case _ => downInner(doName(tpl), tpl) + } + file :: downPacks(pack) + } + + /** A relative link from this page to some destination class entity. + * @param destClass The class or object entity that the link will point to. */ + def relativeLinkTo(destClass: TemplateEntity): String = + relativeLinkTo(templateToPath(destClass)) + + /** A relative link from this page to some destination path. + * @param destPath The path that the link will point to. */ + def relativeLinkTo(destPath: List[String]): String = { + def relativize(from: List[String], to: List[String]): List[String] = (from, to) match { + case (f :: fs, t :: ts) if (f == t) => // both paths are identical to that point + relativize(fs, ts) + case (fss, tss) => + List.fill(fss.length - 1)("..") ::: tss + } + relativize(thisPage.path.reverse, destPath.reverse).mkString("/") + } +} diff --git a/src/scaladoc/scala/tools/nsc/doc/html/SyntaxHigh.scala b/src/scaladoc/scala/tools/nsc/doc/html/SyntaxHigh.scala new file mode 100644 index 0000000000..5781e680dd --- /dev/null +++ b/src/scaladoc/scala/tools/nsc/doc/html/SyntaxHigh.scala @@ -0,0 +1,286 @@ +/* NSC -- new Scala compiler + * Copyright 2010-2013 LAMP/EPFL + * @author Stephane Micheloud + */ + +package scala.tools.nsc.doc.html + +import scala.xml.NodeSeq + +/** Highlight the syntax of Scala code appearing in a `{{{` wiki block + * (see method `HtmlPage.blockToHtml`). + * + * @author Stephane Micheloud + * @version 1.0 + */ +private[html] object SyntaxHigh { + + /** Reserved words, sorted alphabetically + * (see [[scala.reflect.internal.StdNames]]) */ + val reserved = Array( + "abstract", "case", "catch", "class", "def", + "do", "else", "extends", "false", "final", "finally", + "for", "if", "implicit", "import", "lazy", "match", + "new", "null", "object", "override", "package", + "private", "protected", "return", "sealed", "super", + "this", "throw", "trait", "true", "try", "type", + "val", "var", "while", "with", "yield") + + /** Annotations, sorted alphabetically */ + val annotations = Array( + "BeanProperty", "SerialVersionUID", + "beanGetter", "beanSetter", "bridge", + "deprecated", "deprecatedName", "deprecatedOverriding", "deprecatedInheritance", + "elidable", "field", "getter", "inline", + "migration", "native", "noinline", "param", + "remote", "setter", "specialized", "strictfp", "switch", + "tailrec", "throws", "transient", + "unchecked", "uncheckedStable", "uncheckedVariance", + "varargs", "volatile") + + /** Standard library classes/objects, sorted alphabetically */ + val standards = Array ( + "WeakTypeTag", "Any", "AnyRef", "AnyVal", "App", "Array", + "Boolean", "Byte", "Char", "Class", "ClassTag", "ClassManifest", + "Console", "Double", "Enumeration", "Float", "Function", "Int", + "List", "Long", "Manifest", "Map", + "NoManifest", "None", "Nothing", "Null", "Object", "Option", "OptManifest", + "Pair", "Predef", + "Seq", "Set", "Short", "Some", "String", "Symbol", + "Triple", "TypeTag", "Unit") + + def apply(data: String): NodeSeq = { + val buf = data.getBytes + val out = new StringBuilder + + def compare(offset: Int, key: String): Int = { + var i = offset + var j = 0 + val l = key.length + while (i < buf.length && j < l) { + val bch = buf(i).toChar + val kch = key charAt j + if (bch < kch) return -1 + else if (bch > kch) return 1 + i += 1 + j += 1 + } + if (j < l) -1 + else if (i < buf.length && + ('A' <= buf(i) && buf(i) <= 'Z' || + 'a' <= buf(i) && buf(i) <= 'z' || + '0' <= buf(i) && buf(i) <= '9' || + buf(i) == '_')) 1 + else 0 + } + + def lookup(a: Array[String], i: Int): Int = { + var lo = 0 + var hi = a.length - 1 + while (lo <= hi) { + val m = (hi + lo) / 2 + val d = compare(i, a(m)) + if (d < 0) hi = m - 1 + else if (d > 0) lo = m + 1 + else return m + } + -1 + } + + def comment(i: Int): String = { + val out = new StringBuilder("/") + def line(i: Int): Int = + if (i == buf.length || buf(i) == '\n') i + else { + out append buf(i).toChar + line(i+1) + } + var level = 0 + def multiline(i: Int, star: Boolean): Int = { + if (i == buf.length) return i + val ch = buf(i).toChar + out append ch + ch match { + case '*' => + if (star) level += 1 + multiline(i+1, !star) + case '/' => + if (star) { + if (level > 0) level -= 1 + if (level == 0) i else multiline(i+1, star = true) + } else + multiline(i+1, star = false) + case _ => + multiline(i+1, star = false) + } + } + if (buf(i) == '/') line(i) else multiline(i, star = true) + out.toString + } + + /* e.g. `val endOfLine = '\u000A'`*/ + def charlit(j: Int): String = { + val out = new StringBuilder("'") + def charlit0(i: Int, bslash: Boolean): Int = { + if (i == buf.length) i + else if (i > j+6) { out setLength 0; j } + else { + val ch = buf(i).toChar + out append ch + ch match { + case '\\' => + charlit0(i+1, bslash = true) + case '\'' if !bslash => + i + case _ => + if (bslash && '0' <= ch && ch <= '9') charlit0(i+1, bslash = true) + else charlit0(i+1, bslash = false) + } + } + } + charlit0(j, bslash = false) + out.toString + } + + def strlit(i: Int): String = { + val out = new StringBuilder("\"") + def strlit0(i: Int, bslash: Boolean): Int = { + if (i == buf.length) return i + val ch = buf(i).toChar + out append ch + ch match { + case '\\' => + strlit0(i+1, bslash = true) + case '"' if !bslash => + i + case _ => + strlit0(i+1, bslash = false) + } + } + strlit0(i, bslash = false) + out.toString + } + + def numlit(i: Int): String = { + val out = new StringBuilder + def intg(i: Int): Int = { + if (i == buf.length) return i + val ch = buf(i).toChar + ch match { + case '.' => + out append ch + frac(i+1) + case _ => + if (Character.isDigit(ch)) { + out append ch + intg(i+1) + } else i + } + } + def frac(i: Int): Int = { + if (i == buf.length) return i + val ch = buf(i).toChar + ch match { + case 'e' | 'E' => + out append ch + expo(i+1, signed = false) + case _ => + if (Character.isDigit(ch)) { + out append ch + frac(i+1) + } else i + } + } + def expo(i: Int, signed: Boolean): Int = { + if (i == buf.length) return i + val ch = buf(i).toChar + ch match { + case '+' | '-' if !signed => + out append ch + expo(i+1, signed = true) + case _ => + if (Character.isDigit(ch)) { + out append ch + expo(i+1, signed) + } else i + } + } + intg(i) + out.toString + } + + def parse(pre: String, i: Int): Int = { + out append pre + if (i == buf.length) return i + buf(i) match { + case '\n' => + parse("\n", i+1) + case ' ' => + parse(" ", i+1) + case '&' => + parse("&", i+1) + case '<' if i+1 < buf.length => + val ch = buf(i+1).toChar + if (ch == '-' || ch == ':' || ch == '%') + parse("<"+ch+"", i+2) + else + parse("<", i+1) + case '>' => + if (i+1 < buf.length && buf(i+1) == ':') + parse(">:", i+2) + else + parse(">", i+1) + case '=' => + if (i+1 < buf.length && buf(i+1) == '>') + parse("=>", i+2) + else + parse(buf(i).toChar.toString, i+1) + case '/' => + if (i+1 < buf.length && (buf(i+1) == '/' || buf(i+1) == '*')) { + val c = comment(i+1) + parse(""+c+"", i+c.length) + } else + parse(buf(i).toChar.toString, i+1) + case '\'' => + val s = charlit(i+1) + if (s.length > 0) + parse(""+s+"", i+s.length) + else + parse(buf(i).toChar.toString, i+1) + case '"' => + val s = strlit(i+1) + parse(""+s+"", i+s.length) + case '@' => + val k = lookup(annotations, i+1) + if (k >= 0) + parse("@"+annotations(k)+"", i+annotations(k).length+1) + else + parse(buf(i).toChar.toString, i+1) + case _ => + if (i == 0 || (i >= 1 && !Character.isJavaIdentifierPart(buf(i-1).toChar))) { + if (Character.isDigit(buf(i)) || + (buf(i) == '.' && i + 1 < buf.length && Character.isDigit(buf(i+1)))) { + val s = numlit(i) + parse(""+s+"", i+s.length) + } else { + val k = lookup(reserved, i) + if (k >= 0) + parse(""+reserved(k)+"", i+reserved(k).length) + else { + val k = lookup(standards, i) + if (k >= 0) + parse(""+standards(k)+"", i+standards(k).length) + else + parse(buf(i).toChar.toString, i+1) + } + } + } else + parse(buf(i).toChar.toString, i+1) + } + i + } + + parse("", 0) + scala.xml.Unparsed(out.toString) + } +} diff --git a/src/scaladoc/scala/tools/nsc/doc/html/page/Index.scala b/src/scaladoc/scala/tools/nsc/doc/html/page/Index.scala new file mode 100644 index 0000000000..c034647320 --- /dev/null +++ b/src/scaladoc/scala/tools/nsc/doc/html/page/Index.scala @@ -0,0 +1,133 @@ +/* NSC -- new Scala compiler + * Copyright 2007-2013 LAMP/EPFL + * @author David Bernard, Manohar Jonnalagedda + */ + +package scala.tools.nsc +package doc +package html +package page + +import model._ +import scala.collection._ +import scala.xml._ + +class Index(universe: doc.Universe, val index: doc.Index) extends HtmlPage { + + def path = List("index.html") + + def title = { + val s = universe.settings + ( if (!s.doctitle.isDefault) s.doctitle.value else "" ) + + ( if (!s.docversion.isDefault) (" " + s.docversion.value) else "" ) + } + + val headers = + + + + + + + + + + val body = + +
              + + + + +
              + { browser } +
              + ':""),e._keyEvent=!1,B},_generateMonthYearHeader:function(e,t,n,r,i,s,o,u){var a=this._get(e,"changeMonth"),f=this._get(e,"changeYear"),l=this._get(e,"showMonthAfterYear"),c='
              ',h="";if(s||!a)h+=''+o[t]+"";else{var p=r&&r.getFullYear()==n,d=i&&i.getFullYear()==n;h+='"}l||(c+=h+(s||!a||!f?" ":""));if(!e.yearshtml){e.yearshtml="";if(s||!f)c+=''+n+"";else{var m=this._get(e,"yearRange").split(":"),g=(new Date).getFullYear(),y=function(e){var t=e.match(/c[+-].*/)?n+parseInt(e.substring(1),10):e.match(/[+-].*/)?g+parseInt(e,10):parseInt(e,10);return isNaN(t)?g:t},b=y(m[0]),w=Math.max(b,y(m[1]||""));b=r?Math.max(b,r.getFullYear()):b,w=i?Math.min(w,i.getFullYear()):w,e.yearshtml+='",c+=e.yearshtml,e.yearshtml=null}}return c+=this._get(e,"yearSuffix"),l&&(c+=(s||!a||!f?" ":"")+h),c+="
              ",c},_adjustInstDate:function(e,t,n){var r=e.drawYear+(n=="Y"?t:0),i=e.drawMonth+(n=="M"?t:0),s=Math.min(e.selectedDay,this._getDaysInMonth(r,i))+(n=="D"?t:0),o=this._restrictMinMax(e,this._daylightSavingAdjust(new Date(r,i,s)));e.selectedDay=o.getDate(),e.drawMonth=e.selectedMonth=o.getMonth(),e.drawYear=e.selectedYear=o.getFullYear(),(n=="M"||n=="Y")&&this._notifyChange(e)},_restrictMinMax:function(e,t){var n=this._getMinMaxDate(e,"min"),r=this._getMinMaxDate(e,"max"),i=n&&tr?r:i,i},_notifyChange:function(e){var t=this._get(e,"onChangeMonthYear");t&&t.apply(e.input?e.input[0]:null,[e.selectedYear,e.selectedMonth+1,e])},_getNumberOfMonths:function(e){var t=this._get(e,"numberOfMonths");return t==null?[1,1]:typeof t=="number"?[1,t]:t},_getMinMaxDate:function(e,t){return this._determineDate(e,this._get(e,t+"Date"),null)},_getDaysInMonth:function(e,t){return 32-this._daylightSavingAdjust(new Date(e,t,32)).getDate()},_getFirstDayOfMonth:function(e,t){return(new Date(e,t,1)).getDay()},_canAdjustMonth:function(e,t,n,r){var i=this._getNumberOfMonths(e),s=this._daylightSavingAdjust(new Date(n,r+(t<0?t:i[0]*i[1]),1));return t<0&&s.setDate(this._getDaysInMonth(s.getFullYear(),s.getMonth())),this._isInRange(e,s)},_isInRange:function(e,t){var n=this._getMinMaxDate(e,"min"),r=this._getMinMaxDate(e,"max");return(!n||t.getTime()>=n.getTime())&&(!r||t.getTime()<=r.getTime())},_getFormatConfig:function(e){var t=this._get(e,"shortYearCutoff");return t=typeof t!="string"?t:(new Date).getFullYear()%100+parseInt(t,10),{shortYearCutoff:t,dayNamesShort:this._get(e,"dayNamesShort"),dayNames:this._get(e,"dayNames"),monthNamesShort:this._get(e,"monthNamesShort"),monthNames:this._get(e,"monthNames")}},_formatDate:function(e,t,n,r){t||(e.currentDay=e.selectedDay,e.currentMonth=e.selectedMonth,e.currentYear=e.selectedYear);var i=t?typeof t=="object"?t:this._daylightSavingAdjust(new Date(r,n,t)):this._daylightSavingAdjust(new Date(e.currentYear,e.currentMonth,e.currentDay));return this.formatDate(this._get(e,"dateFormat"),i,this._getFormatConfig(e))}}),$.fn.datepicker=function(e){if(!this.length)return this;$.datepicker.initialized||($(document).mousedown($.datepicker._checkExternalClick).find(document.body).append($.datepicker.dpDiv),$.datepicker.initialized=!0);var t=Array.prototype.slice.call(arguments,1);return typeof e!="string"||e!="isDisabled"&&e!="getDate"&&e!="widget"?e=="option"&&arguments.length==2&&typeof arguments[1]=="string"?$.datepicker["_"+e+"Datepicker"].apply($.datepicker,[this[0]].concat(t)):this.each(function(){typeof e=="string"?$.datepicker["_"+e+"Datepicker"].apply($.datepicker,[this].concat(t)):$.datepicker._attachDatepicker(this,e)}):$.datepicker["_"+e+"Datepicker"].apply($.datepicker,[this[0]].concat(t))},$.datepicker=new Datepicker,$.datepicker.initialized=!1,$.datepicker.uuid=(new Date).getTime(),$.datepicker.version="1.9.0",window["DP_jQuery_"+dpuuid]=$})(jQuery);(function(e,t){var n="ui-dialog ui-widget ui-widget-content ui-corner-all ",r={buttons:!0,height:!0,maxHeight:!0,maxWidth:!0,minHeight:!0,minWidth:!0,width:!0},i={maxHeight:!0,maxWidth:!0,minHeight:!0,minWidth:!0};e.widget("ui.dialog",{version:"1.9.0",options:{autoOpen:!0,buttons:{},closeOnEscape:!0,closeText:"close",dialogClass:"",draggable:!0,hide:null,height:"auto",maxHeight:!1,maxWidth:!1,minHeight:150,minWidth:150,modal:!1,position:{my:"center",at:"center",of:window,collision:"fit",using:function(t){var n=e(this).css(t).offset().top;n<0&&e(this).css("top",t.top-n)}},resizable:!0,show:null,stack:!0,title:"",width:300,zIndex:1e3},_create:function(){this.originalTitle=this.element.attr("title"),typeof this.originalTitle!="string"&&(this.originalTitle=""),this.oldPosition={parent:this.element.parent(),index:this.element.parent().children().index(this.element)},this.options.title=this.options.title||this.originalTitle;var t=this,r=this.options,i=r.title||" ",s=(this.uiDialog=e("
              ")).addClass(n+r.dialogClass).css({display:"none",outline:0,zIndex:r.zIndex}).attr("tabIndex",-1).keydown(function(n){r.closeOnEscape&&!n.isDefaultPrevented()&&n.keyCode&&n.keyCode===e.ui.keyCode.ESCAPE&&(t.close(n),n.preventDefault())}).mousedown(function(e){t.moveToTop(!1,e)}).appendTo("body"),o=this.element.show().removeAttr("title").addClass("ui-dialog-content ui-widget-content").appendTo(s),u=(this.uiDialogTitlebar=e("
              ")).addClass("ui-dialog-titlebar ui-widget-header ui-corner-all ui-helper-clearfix").prependTo(s),a=e("").addClass("ui-dialog-titlebar-close ui-corner-all").attr("role","button").click(function(e){e.preventDefault(),t.close(e)}).appendTo(u),f=(this.uiDialogTitlebarCloseText=e("")).addClass("ui-icon ui-icon-closethick").text(r.closeText).appendTo(a),l=e("").uniqueId().addClass("ui-dialog-title").html(i).prependTo(u),c=(this.uiDialogButtonPane=e("
              ")).addClass("ui-dialog-buttonpane ui-widget-content ui-helper-clearfix"),h=(this.uiButtonSet=e("
              ")).addClass("ui-dialog-buttonset").appendTo(c);s.attr({role:"dialog","aria-labelledby":l.attr("id")}),u.find("*").add(u).disableSelection(),this._hoverable(a),this._focusable(a),r.draggable&&e.fn.draggable&&this._makeDraggable(),r.resizable&&e.fn.resizable&&this._makeResizable(),this._createButtons(r.buttons),this._isOpen=!1,e.fn.bgiframe&&s.bgiframe(),this._on(s,{keydown:function(t){if(!r.modal||t.keyCode!==e.ui.keyCode.TAB)return;var n=e(":tabbable",s),i=n.filter(":first"),o=n.filter(":last");if(t.target===o[0]&&!t.shiftKey)return i.focus(1),!1;if(t.target===i[0]&&t.shiftKey)return o.focus(1),!1}})},_init:function(){this.options.autoOpen&&this.open()},_destroy:function(){var e,t=this.oldPosition;this.overlay&&this.overlay.destroy(),this.uiDialog.hide(),this.element.removeClass("ui-dialog-content ui-widget-content").hide().appendTo("body"),this.uiDialog.remove(),this.originalTitle&&this.element.attr("title",this.originalTitle),e=t.parent.children().eq(t.index),e.length&&e[0]!==this.element[0]?e.before(this.element):t.parent.append(this.element)},widget:function(){return this.uiDialog},close:function(t){var n=this,r,i;if(!this._isOpen)return;if(!1===this._trigger("beforeClose",t))return;return this._isOpen=!1,this.overlay&&this.overlay.destroy(),this.options.hide?this.uiDialog.hide(this.options.hide,function(){n._trigger("close",t)}):(this.uiDialog.hide(),this._trigger("close",t)),e.ui.dialog.overlay.resize(),this.options.modal&&(r=0,e(".ui-dialog").each(function(){this!==n.uiDialog[0]&&(i=e(this).css("z-index"),isNaN(i)||(r=Math.max(r,i)))}),e.ui.dialog.maxZ=r),this},isOpen:function(){return this._isOpen},moveToTop:function(t,n){var r=this.options,i;return r.modal&&!t||!r.stack&&!r.modal?this._trigger("focus",n):(r.zIndex>e.ui.dialog.maxZ&&(e.ui.dialog.maxZ=r.zIndex),this.overlay&&(e.ui.dialog.maxZ+=1,e.ui.dialog.overlay.maxZ=e.ui.dialog.maxZ,this.overlay.$el.css("z-index",e.ui.dialog.overlay.maxZ)),i={scrollTop:this.element.scrollTop(),scrollLeft:this.element.scrollLeft()},e.ui.dialog.maxZ+=1,this.uiDialog.css("z-index",e.ui.dialog.maxZ),this.element.attr(i),this._trigger("focus",n),this)},open:function(){if(this._isOpen)return;var t,n=this.options,r=this.uiDialog;return this._size(),this._position(n.position),r.show(n.show),this.overlay=n.modal?new e.ui.dialog.overlay(this):null,this.moveToTop(!0),t=this.element.find(":tabbable"),t.length||(t=this.uiDialogButtonPane.find(":tabbable"),t.length||(t=r)),t.eq(0).focus(),this._isOpen=!0,this._trigger("open"),this},_createButtons:function(t){var n,r,i=this,s=!1;this.uiDialogButtonPane.remove(),this.uiButtonSet.empty(),typeof t=="object"&&t!==null&&e.each(t,function(){return!(s=!0)}),s?(e.each(t,function(t,n){n=e.isFunction(n)?{click:n,text:t}:n;var r=e("
              ').css({width:this.offsetWidth+"px",height:this.offsetHeight+"px",position:"absolute",opacity:"0.001",zIndex:1e3}).css(e(this).offset()).appendTo("body")}),!0):!1)},_mouseStart:function(t){var n=this.options;return this.helper=this._createHelper(t),this.helper.addClass("ui-draggable-dragging"),this._cacheHelperProportions(),e.ui.ddmanager&&(e.ui.ddmanager.current=this),this._cacheMargins(),this.cssPosition=this.helper.css("position"),this.scrollParent=this.helper.scrollParent(),this.offset=this.positionAbs=this.element.offset(),this.offset={top:this.offset.top-this.margins.top,left:this.offset.left-this.margins.left},e.extend(this.offset,{click:{left:t.pageX-this.offset.left,top:t.pageY-this.offset.top},parent:this._getParentOffset(),relative:this._getRelativeOffset()}),this.originalPosition=this.position=this._generatePosition(t),this.originalPageX=t.pageX,this.originalPageY=t.pageY,n.cursorAt&&this._adjustOffsetFromHelper(n.cursorAt),n.containment&&this._setContainment(),this._trigger("start",t)===!1?(this._clear(),!1):(this._cacheHelperProportions(),e.ui.ddmanager&&!n.dropBehaviour&&e.ui.ddmanager.prepareOffsets(this,t),this._mouseDrag(t,!0),e.ui.ddmanager&&e.ui.ddmanager.dragStart(this,t),!0)},_mouseDrag:function(t,n){this.position=this._generatePosition(t),this.positionAbs=this._convertPositionTo("absolute");if(!n){var r=this._uiHash();if(this._trigger("drag",t,r)===!1)return this._mouseUp({}),!1;this.position=r.position}if(!this.options.axis||this.options.axis!="y")this.helper[0].style.left=this.position.left+"px";if(!this.options.axis||this.options.axis!="x")this.helper[0].style.top=this.position.top+"px";return e.ui.ddmanager&&e.ui.ddmanager.drag(this,t),!1},_mouseStop:function(t){var n=!1;e.ui.ddmanager&&!this.options.dropBehaviour&&(n=e.ui.ddmanager.drop(this,t)),this.dropped&&(n=this.dropped,this.dropped=!1);var r=this.element[0],i=!1;while(r&&(r=r.parentNode))r==document&&(i=!0);if(!i&&this.options.helper==="original")return!1;if(this.options.revert=="invalid"&&!n||this.options.revert=="valid"&&n||this.options.revert===!0||e.isFunction(this.options.revert)&&this.options.revert.call(this.element,n)){var s=this;e(this.helper).animate(this.originalPosition,parseInt(this.options.revertDuration,10),function(){s._trigger("stop",t)!==!1&&s._clear()})}else this._trigger("stop",t)!==!1&&this._clear();return!1},_mouseUp:function(t){return e("div.ui-draggable-iframeFix").each(function(){this.parentNode.removeChild(this)}),e.ui.ddmanager&&e.ui.ddmanager.dragStop(this,t),e.ui.mouse.prototype._mouseUp.call(this,t)},cancel:function(){return this.helper.is(".ui-draggable-dragging")?this._mouseUp({}):this._clear(),this},_getHandle:function(t){var n=!this.options.handle||!e(this.options.handle,this.element).length?!0:!1;return e(this.options.handle,this.element).find("*").andSelf().each(function(){this==t.target&&(n=!0)}),n},_createHelper:function(t){var n=this.options,r=e.isFunction(n.helper)?e(n.helper.apply(this.element[0],[t])):n.helper=="clone"?this.element.clone().removeAttr("id"):this.element;return r.parents("body").length||r.appendTo(n.appendTo=="parent"?this.element[0].parentNode:n.appendTo),r[0]!=this.element[0]&&!/(fixed|absolute)/.test(r.css("position"))&&r.css("position","absolute"),r},_adjustOffsetFromHelper:function(t){typeof t=="string"&&(t=t.split(" ")),e.isArray(t)&&(t={left:+t[0],top:+t[1]||0}),"left"in t&&(this.offset.click.left=t.left+this.margins.left),"right"in t&&(this.offset.click.left=this.helperProportions.width-t.right+this.margins.left),"top"in t&&(this.offset.click.top=t.top+this.margins.top),"bottom"in t&&(this.offset.click.top=this.helperProportions.height-t.bottom+this.margins.top)},_getParentOffset:function(){this.offsetParent=this.helper.offsetParent();var t=this.offsetParent.offset();this.cssPosition=="absolute"&&this.scrollParent[0]!=document&&e.contains(this.scrollParent[0],this.offsetParent[0])&&(t.left+=this.scrollParent.scrollLeft(),t.top+=this.scrollParent.scrollTop());if(this.offsetParent[0]==document.body||this.offsetParent[0].tagName&&this.offsetParent[0].tagName.toLowerCase()=="html"&&e.browser.msie)t={top:0,left:0};return{top:t.top+(parseInt(this.offsetParent.css("borderTopWidth"),10)||0),left:t.left+(parseInt(this.offsetParent.css("borderLeftWidth"),10)||0)}},_getRelativeOffset:function(){if(this.cssPosition=="relative"){var e=this.element.position();return{top:e.top-(parseInt(this.helper.css("top"),10)||0)+this.scrollParent.scrollTop(),left:e.left-(parseInt(this.helper.css("left"),10)||0)+this.scrollParent.scrollLeft()}}return{top:0,left:0}},_cacheMargins:function(){this.margins={left:parseInt(this.element.css("marginLeft"),10)||0,top:parseInt(this.element.css("marginTop"),10)||0,right:parseInt(this.element.css("marginRight"),10)||0,bottom:parseInt(this.element.css("marginBottom"),10)||0}},_cacheHelperProportions:function(){this.helperProportions={width:this.helper.outerWidth(),height:this.helper.outerHeight()}},_setContainment:function(){var t=this.options;t.containment=="parent"&&(t.containment=this.helper[0].parentNode);if(t.containment=="document"||t.containment=="window")this.containment=[t.containment=="document"?0:e(window).scrollLeft()-this.offset.relative.left-this.offset.parent.left,t.containment=="document"?0:e(window).scrollTop()-this.offset.relative.top-this.offset.parent.top,(t.containment=="document"?0:e(window).scrollLeft())+e(t.containment=="document"?document:window).width()-this.helperProportions.width-this.margins.left,(t.containment=="document"?0:e(window).scrollTop())+(e(t.containment=="document"?document:window).height()||document.body.parentNode.scrollHeight)-this.helperProportions.height-this.margins.top];if(!/^(document|window|parent)$/.test(t.containment)&&t.containment.constructor!=Array){var n=e(t.containment),r=n[0];if(!r)return;var i=n.offset(),s=e(r).css("overflow")!="hidden";this.containment=[(parseInt(e(r).css("borderLeftWidth"),10)||0)+(parseInt(e(r).css("paddingLeft"),10)||0),(parseInt(e(r).css("borderTopWidth"),10)||0)+(parseInt(e(r).css("paddingTop"),10)||0),(s?Math.max(r.scrollWidth,r.offsetWidth):r.offsetWidth)-(parseInt(e(r).css("borderLeftWidth"),10)||0)-(parseInt(e(r).css("paddingRight"),10)||0)-this.helperProportions.width-this.margins.left-this.margins.right,(s?Math.max(r.scrollHeight,r.offsetHeight):r.offsetHeight)-(parseInt(e(r).css("borderTopWidth"),10)||0)-(parseInt(e(r).css("paddingBottom"),10)||0)-this.helperProportions.height-this.margins.top-this.margins.bottom],this.relative_container=n}else t.containment.constructor==Array&&(this.containment=t.containment)},_convertPositionTo:function(t,n){n||(n=this.position);var r=t=="absolute"?1:-1,i=this.options,s=this.cssPosition!="absolute"||this.scrollParent[0]!=document&&!!e.contains(this.scrollParent[0],this.offsetParent[0])?this.scrollParent:this.offsetParent,o=/(html|body)/i.test(s[0].tagName);return{top:n.top+this.offset.relative.top*r+this.offset.parent.top*r-(this.cssPosition=="fixed"?-this.scrollParent.scrollTop():o?0:s.scrollTop())*r,left:n.left+this.offset.relative.left*r+this.offset.parent.left*r-(this.cssPosition=="fixed"?-this.scrollParent.scrollLeft():o?0:s.scrollLeft())*r}},_generatePosition:function(t){var n=this.options,r=this.cssPosition!="absolute"||this.scrollParent[0]!=document&&!!e.contains(this.scrollParent[0],this.offsetParent[0])?this.scrollParent:this.offsetParent,i=/(html|body)/i.test(r[0].tagName),s=t.pageX,o=t.pageY;if(this.originalPosition){var u;if(this.containment){if(this.relative_container){var a=this.relative_container.offset();u=[this.containment[0]+a.left,this.containment[1]+a.top,this.containment[2]+a.left,this.containment[3]+a.top]}else u=this.containment;t.pageX-this.offset.click.leftu[2]&&(s=u[2]+this.offset.click.left),t.pageY-this.offset.click.top>u[3]&&(o=u[3]+this.offset.click.top)}if(n.grid){var f=n.grid[1]?this.originalPageY+Math.round((o-this.originalPageY)/n.grid[1])*n.grid[1]:this.originalPageY;o=u?f-this.offset.click.topu[3]?f-this.offset.click.topu[2]?l-this.offset.click.left=0;l--){var c=r.snapElements[l].left,h=c+r.snapElements[l].width,p=r.snapElements[l].top,d=p+r.snapElements[l].height;if(!(c-s=l&&o<=c||u>=l&&u<=c||oc)&&(i>=a&&i<=f||s>=a&&s<=f||if);default:return!1}},e.ui.ddmanager={current:null,droppables:{"default":[]},prepareOffsets:function(t,n){var r=e.ui.ddmanager.droppables[t.options.scope]||[],i=n?n.type:null,s=(t.currentItem||t.element).find(":data(droppable)").andSelf();e:for(var o=0;oe?0:r.max")[0],c,h=t.each;l.style.cssText="background-color:rgba(1,1,1,.5)",f.rgba=l.style.backgroundColor.indexOf("rgba")>-1,h(u,function(e,t){t.cache="_"+e,t.props.alpha={idx:3,type:"percent",def:1}}),o.fn=t.extend(o.prototype,{parse:function(r,i,s,a){if(r===n)return this._rgba=[null,null,null,null],this;if(r.jquery||r.nodeType)r=t(r).css(i),i=n;var f=this,l=t.type(r),v=this._rgba=[],m;i!==n&&(r=[r,i,s,a],l="array");if(l==="string")return this.parse(d(r)||c._default);if(l==="array")return h(u.rgba.props,function(e,t){v[t.idx]=p(r[t.idx],t)}),this;if(l==="object")return r instanceof o?h(u,function(e,t){r[t.cache]&&(f[t.cache]=r[t.cache].slice())}):h(u,function(t,n){var i=n.cache;h(n.props,function(e,t){if(!f[i]&&n.to){if(e==="alpha"||r[e]==null)return;f[i]=n.to(f._rgba)}f[i][t.idx]=p(r[e],t,!0)}),f[i]&&e.inArray(null,f[i].slice(0,3))<0&&(f[i][3]=1,n.from&&(f._rgba=n.from(f[i])))}),this},is:function(e){var t=o(e),n=!0,r=this;return h(u,function(e,i){var s,o=t[i.cache];return o&&(s=r[i.cache]||i.to&&i.to(r._rgba)||[],h(i.props,function(e,t){if(o[t.idx]!=null)return n=o[t.idx]===s[t.idx],n})),n}),n},_space:function(){var e=[],t=this;return h(u,function(n,r){t[r.cache]&&e.push(n)}),e.pop()},transition:function(e,t){var n=o(e),r=n._space(),i=u[r],s=this.alpha()===0?o("transparent"):this,f=s[i.cache]||i.to(s._rgba),l=f.slice();return n=n[i.cache],h(i.props,function(e,r){var i=r.idx,s=f[i],o=n[i],u=a[r.type]||{};if(o===null)return;s===null?l[i]=o:(u.mod&&(o-s>u.mod/2?s+=u.mod:s-o>u.mod/2&&(s-=u.mod)),l[i]=p((o-s)*t+s,r))}),this[r](l)},blend:function(e){if(this._rgba[3]===1)return this;var n=this._rgba.slice(),r=n.pop(),i=o(e)._rgba;return o(t.map(n,function(e,t){return(1-r)*i[t]+r*e}))},toRgbaString:function(){var e="rgba(",n=t.map(this._rgba,function(e,t){return e==null?t>2?1:0:e});return n[3]===1&&(n.pop(),e="rgb("),e+n.join()+")"},toHslaString:function(){var e="hsla(",n=t.map(this.hsla(),function(e,t){return e==null&&(e=t>2?1:0),t&&t<3&&(e=Math.round(e*100)+"%"),e});return n[3]===1&&(n.pop(),e="hsl("),e+n.join()+")"},toHexString:function(e){var n=this._rgba.slice(),r=n.pop();return e&&n.push(~~(r*255)),"#"+t.map(n,function(e,t){return e=(e||0).toString(16),e.length===1?"0"+e:e}).join("")},toString:function(){return this._rgba[3]===0?"transparent":this.toRgbaString()}}),o.fn.parse.prototype=o.fn,u.hsla.to=function(e){if(e[0]==null||e[1]==null||e[2]==null)return[null,null,null,e[3]];var t=e[0]/255,n=e[1]/255,r=e[2]/255,i=e[3],s=Math.max(t,n,r),o=Math.min(t,n,r),u=s-o,a=s+o,f=a*.5,l,c;return o===s?l=0:t===s?l=60*(n-r)/u+360:n===s?l=60*(r-t)/u+120:l=60*(t-n)/u+240,f===0||f===1?c=f:f<=.5?c=u/a:c=u/(2-a),[Math.round(l)%360,c,f,i==null?1:i]},u.hsla.from=function(e){if(e[0]==null||e[1]==null||e[2]==null)return[null,null,null,e[3]];var t=e[0]/360,n=e[1],r=e[2],i=e[3],s=r<=.5?r*(1+n):r+n-r*n,o=2*r-s,u,a,f;return[Math.round(v(o,s,t+1/3)*255),Math.round(v(o,s,t)*255),Math.round(v(o,s,t-1/3)*255),i]},h(u,function(e,r){var s=r.props,u=r.cache,a=r.to,f=r.from;o.fn[e]=function(e){a&&!this[u]&&(this[u]=a(this._rgba));if(e===n)return this[u].slice();var r,i=t.type(e),l=i==="array"||i==="object"?e:arguments,c=this[u].slice();return h(s,function(e,t){var n=l[i==="object"?e:t.idx];n==null&&(n=c[t.idx]),c[t.idx]=p(n,t)}),f?(r=o(f(c)),r[u]=c,r):o(c)},h(s,function(n,r){if(o.fn[n])return;o.fn[n]=function(s){var o=t.type(s),u=n==="alpha"?this._hsla?"hsla":"rgba":e,a=this[u](),f=a[r.idx],l;return o==="undefined"?f:(o==="function"&&(s=s.call(this,f),o=t.type(s)),s==null&&r.empty?this:(o==="string"&&(l=i.exec(s),l&&(s=f+parseFloat(l[2])*(l[1]==="+"?1:-1))),a[r.idx]=s,this[u](a)))}})}),h(r,function(e,n){t.cssHooks[n]={set:function(e,r){var i,s,u="";if(t.type(r)!=="string"||(i=d(r))){r=o(i||r);if(!f.rgba&&r._rgba[3]!==1){s=n==="backgroundColor"?e.parentNode:e;while((u===""||u==="transparent")&&s&&s.style)try{u=t.css(s,"backgroundColor"),s=s.parentNode}catch(a){}r=r.blend(u&&u!=="transparent"?u:"_default")}r=r.toRgbaString()}try{e.style[n]=r}catch(r){}}},t.fx.step[n]=function(e){e.colorInit||(e.start=o(e.elem,n),e.end=o(e.end),e.colorInit=!0),t.cssHooks[n].set(e.elem,e.start.transition(e.end,e.pos))}}),t.cssHooks.borderColor={expand:function(e){var t={};return h(["Top","Right","Bottom","Left"],function(n,r){t["border"+r+"Color"]=e}),t}},c=t.Color.names={aqua:"#00ffff",black:"#000000",blue:"#0000ff",fuchsia:"#ff00ff",gray:"#808080",green:"#008000",lime:"#00ff00",maroon:"#800000",navy:"#000080",olive:"#808000",purple:"#800080",red:"#ff0000",silver:"#c0c0c0",teal:"#008080",white:"#ffffff",yellow:"#ffff00",transparent:[null,null,null,0],_default:"#ffffff"}}(jQuery),function(){function i(){var t=this.ownerDocument.defaultView?this.ownerDocument.defaultView.getComputedStyle(this,null):this.currentStyle,n={},r,i,s;if(t&&t.length&&t[0]&&t[t[0]]){s=t.length;while(s--)r=t[s],typeof t[r]=="string"&&(n[e.camelCase(r)]=t[r])}else for(r in t)typeof t[r]=="string"&&(n[r]=t[r]);return n}function s(t,n){var i={},s,o;for(s in n)o=n[s],t[s]!==o&&!r[s]&&(e.fx.step[s]||!isNaN(parseFloat(o)))&&(i[s]=o);return i}var n=["add","remove","toggle"],r={border:1,borderBottom:1,borderColor:1,borderLeft:1,borderRight:1,borderTop:1,borderWidth:1,margin:1,padding:1};e.each(["borderLeftStyle","borderRightStyle","borderBottomStyle","borderTopStyle"],function(t,n){e.fx.step[n]=function(e){if(e.end!=="none"&&!e.setAttr||e.pos===1&&!e.setAttr)jQuery.style(e.elem,n,e.end),e.setAttr=!0}}),e.effects.animateClass=function(t,r,o,u){var a=e.speed(r,o,u);return this.queue(function(){var r=e(this),o=r.attr("class")||"",u,f=a.children?r.find("*").andSelf():r;f=f.map(function(){var t=e(this);return{el:t,start:i.call(this)}}),u=function(){e.each(n,function(e,n){t[n]&&r[n+"Class"](t[n])})},u(),f=f.map(function(){return this.end=i.call(this.el[0]),this.diff=s(this.start,this.end),this}),r.attr("class",o),f=f.map(function(){var t=this,n=e.Deferred(),r=jQuery.extend({},a,{queue:!1,complete:function(){n.resolve(t)}});return this.el.animate(this.diff,r),n.promise()}),e.when.apply(e,f.get()).done(function(){u(),e.each(arguments,function(){var t=this.el;e.each(this.diff,function(e){t.css(e,"")})}),a.complete.call(r[0])})})},e.fn.extend({_addClass:e.fn.addClass,addClass:function(t,n,r,i){return n?e.effects.animateClass.call(this,{add:t},n,r,i):this._addClass(t)},_removeClass:e.fn.removeClass,removeClass:function(t,n,r,i){return n?e.effects.animateClass.call(this,{remove:t},n,r,i):this._removeClass(t)},_toggleClass:e.fn.toggleClass,toggleClass:function(n,r,i,s,o){return typeof r=="boolean"||r===t?i?e.effects.animateClass.call(this,r?{add:n}:{remove:n},i,s,o):this._toggleClass(n,r):e.effects.animateClass.call(this,{toggle:n},r,i,s)},switchClass:function(t,n,r,i,s){return e.effects.animateClass.call(this,{add:n,remove:t},r,i,s)}})}(),function(){function i(n,r,i,s){e.isPlainObject(n)&&(r=n,n=n.effect),n={effect:n},r===t&&(r={}),e.isFunction(r)&&(s=r,i=null,r={});if(typeof r=="number"||e.fx.speeds[r])s=i,i=r,r={};return e.isFunction(i)&&(s=i,i=null),r&&e.extend(n,r),i=i||r.duration,n.duration=e.fx.off?0:typeof i=="number"?i:i in e.fx.speeds?e.fx.speeds[i]:e.fx.speeds._default,n.complete=s||r.complete,n}function s(t){return!t||typeof t=="number"||e.fx.speeds[t]?!0:typeof t=="string"&&!e.effects.effect[t]?n&&e.effects[t]?!1:!0:!1}e.extend(e.effects,{version:"1.9.0",save:function(e,t){for(var n=0;n
              ").addClass("ui-effects-wrapper").css({fontSize:"100%",background:"transparent",border:"none",margin:0,padding:0}),i={width:t.width(),height:t.height()},s=document.activeElement;try{s.id}catch(o){s=document.body}return t.wrap(r),(t[0]===s||e.contains(t[0],s))&&e(s).focus(),r=t.parent(),t.css("position")==="static"?(r.css({position:"relative"}),t.css({position:"relative"})):(e.extend(n,{position:t.css("position"),zIndex:t.css("z-index")}),e.each(["top","left","bottom","right"],function(e,r){n[r]=t.css(r),isNaN(parseInt(n[r],10))&&(n[r]="auto")}),t.css({position:"relative",top:0,left:0,right:"auto",bottom:"auto"})),t.css(i),r.css(n).show()},removeWrapper:function(t){var n=document.activeElement;return t.parent().is(".ui-effects-wrapper")&&(t.parent().replaceWith(t),(t[0]===n||e.contains(t[0],n))&&e(n).focus()),t},setTransition:function(t,n,r,i){return i=i||{},e.each(n,function(e,n){var s=t.cssUnit(n);s[0]>0&&(i[n]=s[0]*r+s[1])}),i}}),e.fn.extend({effect:function(t,r,s,o){function h(t){function s(){e.isFunction(r)&&r.call(n[0]),e.isFunction(t)&&t()}var n=e(this),r=u.complete,i=u.mode;(n.is(":hidden")?i==="hide":i==="show")?s():l.call(n[0],u,s)}var u=i.apply(this,arguments),a=u.mode,f=u.queue,l=e.effects.effect[u.effect],c=!l&&n&&e.effects[u.effect];return e.fx.off||!l&&!c?a?this[a](u.duration,u.complete):this.each(function(){u.complete&&u.complete.call(this)}):l?f===!1?this.each(h):this.queue(f||"fx",h):c.call(this,{options:u,duration:u.duration,callback:u.complete,mode:u.mode})},_show:e.fn.show,show:function(e){if(s(e))return this._show.apply(this,arguments);var t=i.apply(this,arguments);return t.mode="show",this.effect.call(this,t)},_hide:e.fn.hide,hide:function(e){if(s(e))return this._hide.apply(this,arguments);var t=i.apply(this,arguments);return t.mode="hide",this.effect.call(this,t)},__toggle:e.fn.toggle,toggle:function(t){if(s(t)||typeof t=="boolean"||e.isFunction(t))return this.__toggle.apply(this,arguments);var n=i.apply(this,arguments);return n.mode="toggle",this.effect.call(this,n)},cssUnit:function(t){var n=this.css(t),r=[];return e.each(["em","px","%","pt"],function(e,t){n.indexOf(t)>0&&(r=[parseFloat(n),t])}),r}})}(),function(){var t={};e.each(["Quad","Cubic","Quart","Quint","Expo"],function(e,n){t[n]=function(t){return Math.pow(t,e+2)}}),e.extend(t,{Sine:function(e){return 1-Math.cos(e*Math.PI/2)},Circ:function(e){return 1-Math.sqrt(1-e*e)},Elastic:function(e){return e===0||e===1?e:-Math.pow(2,8*(e-1))*Math.sin(((e-1)*80-7.5)*Math.PI/15)},Back:function(e){return e*e*(3*e-2)},Bounce:function(e){var t,n=4;while(e<((t=Math.pow(2,--n))-1)/11);return 1/Math.pow(4,3-n)-7.5625*Math.pow((t*3-2)/22-e,2)}}),e.each(t,function(t,n){e.easing["easeIn"+t]=n,e.easing["easeOut"+t]=function(e){return 1-n(1-e)},e.easing["easeInOut"+t]=function(e){return e<.5?n(e*2)/2:1-n(e*-2+2)/2}})}()}(jQuery);(function(e,t){var n=/up|down|vertical/,r=/up|left|vertical|horizontal/;e.effects.effect.blind=function(t,i){var s=e(this),o=["position","top","bottom","left","right","height","width"],u=e.effects.setMode(s,t.mode||"hide"),a=t.direction||"up",f=n.test(a),l=f?"height":"width",c=f?"top":"left",h=r.test(a),p={},d=u==="show",v,m,g;s.parent().is(".ui-effects-wrapper")?e.effects.save(s.parent(),o):e.effects.save(s,o),s.show(),v=e.effects.createWrapper(s).css({overflow:"hidden"}),m=v[l](),g=parseFloat(v.css(c))||0,p[l]=d?m:0,h||(s.css(f?"bottom":"right",0).css(f?"top":"left","auto").css({position:"absolute"}),p[c]=d?g:m+g),d&&(v.css(l,0),h||v.css(c,g+m)),v.animate(p,{duration:t.duration,easing:t.easing,queue:!1,complete:function(){u==="hide"&&s.hide(),e.effects.restore(s,o),e.effects.removeWrapper(s),i()}})}})(jQuery);(function(e,t){e.effects.effect.bounce=function(t,n){var r=e(this),i=["position","top","bottom","left","right","height","width"],s=e.effects.setMode(r,t.mode||"effect"),o=s==="hide",u=s==="show",a=t.direction||"up",f=t.distance,l=t.times||5,c=l*2+(u||o?1:0),h=t.duration/c,p=t.easing,d=a==="up"||a==="down"?"top":"left",v=a==="up"||a==="left",m,g,y,b=r.queue(),w=b.length;(u||o)&&i.push("opacity"),e.effects.save(r,i),r.show(),e.effects.createWrapper(r),f||(f=r[d==="top"?"outerHeight":"outerWidth"]()/3),u&&(y={opacity:1},y[d]=0,r.css("opacity",0).css(d,v?-f*2:f*2).animate(y,h,p)),o&&(f/=Math.pow(2,l-1)),y={},y[d]=0;for(m=0;m1&&b.splice.apply(b,[1,0].concat(b.splice(w,c+1))),r.dequeue()}})(jQuery);(function(e,t){e.effects.effect.clip=function(t,n){var r=e(this),i=["position","top","bottom","left","right","height","width"],s=e.effects.setMode(r,t.mode||"hide"),o=s==="show",u=t.direction||"vertical",a=u==="vertical",f=a?"height":"width",l=a?"top":"left",c={},h,p,d;e.effects.save(r,i),r.show(),h=e.effects.createWrapper(r).css({overflow:"hidden"}),p=r[0].tagName==="IMG"?h:r,d=p[f](),o&&(p.css(f,0),p.css(l,d/2)),c[f]=o?d:0,c[l]=o?0:d/2,p.animate(c,{queue:!1,duration:t.duration,easing:t.easing,complete:function(){o||r.hide(),e.effects.restore(r,i),e.effects.removeWrapper(r),n()}})}})(jQuery);(function(e,t){e.effects.effect.drop=function(t,n){var r=e(this),i=["position","top","bottom","left","right","opacity","height","width"],s=e.effects.setMode(r,t.mode||"hide"),o=s==="show",u=t.direction||"left",a=u==="up"||u==="down"?"top":"left",f=u==="up"||u==="left"?"pos":"neg",l={opacity:o?1:0},c;e.effects.save(r,i),r.show(),e.effects.createWrapper(r),c=t.distance||r[a==="top"?"outerHeight":"outerWidth"](!0)/2,o&&r.css("opacity",0).css(a,f==="pos"?-c:c),l[a]=(o?f==="pos"?"+=":"-=":f==="pos"?"-=":"+=")+c,r.animate(l,{queue:!1,duration:t.duration,easing:t.easing,complete:function(){s==="hide"&&r.hide(),e.effects.restore(r,i),e.effects.removeWrapper(r),n()}})}})(jQuery);(function(e,t){e.effects.effect.explode=function(t,n){function y(){c.push(this),c.length===r*i&&b()}function b(){s.css({visibility:"visible"}),e(c).remove(),u||s.hide(),n()}var r=t.pieces?Math.round(Math.sqrt(t.pieces)):3,i=r,s=e(this),o=e.effects.setMode(s,t.mode||"hide"),u=o==="show",a=s.show().css("visibility","hidden").offset(),f=Math.ceil(s.outerWidth()/i),l=Math.ceil(s.outerHeight()/r),c=[],h,p,d,v,m,g;for(h=0;h
              ").css({position:"absolute",visibility:"visible",left:-p*f,top:-h*l}).parent().addClass("ui-effects-explode").css({position:"absolute",overflow:"hidden",width:f,height:l,left:d+(u?m*f:0),top:v+(u?g*l:0),opacity:u?0:1}).animate({left:d+(u?0:m*f),top:v+(u?0:g*l),opacity:u?1:0},t.duration||500,t.easing,y)}}})(jQuery);(function(e,t){e.effects.effect.fade=function(t,n){var r=e(this),i=e.effects.setMode(r,t.mode||"toggle");r.animate({opacity:i},{queue:!1,duration:t.duration,easing:t.easing,complete:n})}})(jQuery);(function(e,t){e.effects.effect.fold=function(t,n){var r=e(this),i=["position","top","bottom","left","right","height","width"],s=e.effects.setMode(r,t.mode||"hide"),o=s==="show",u=s==="hide",a=t.size||15,f=/([0-9]+)%/.exec(a),l=!!t.horizFirst,c=o!==l,h=c?["width","height"]:["height","width"],p=t.duration/2,d,v,m={},g={};e.effects.save(r,i),r.show(),d=e.effects.createWrapper(r).css({overflow:"hidden"}),v=c?[d.width(),d.height()]:[d.height(),d.width()],f&&(a=parseInt(f[1],10)/100*v[u?0:1]),o&&d.css(l?{height:0,width:a}:{height:a,width:0}),m[h[0]]=o?v[0]:a,g[h[1]]=o?v[1]:0,d.animate(m,p,t.easing).animate(g,p,t.easing,function(){u&&r.hide(),e.effects.restore(r,i),e.effects.removeWrapper(r),n()})}})(jQuery);(function(e,t){e.effects.effect.highlight=function(t,n){var r=e(this),i=["backgroundImage","backgroundColor","opacity"],s=e.effects.setMode(r,t.mode||"show"),o={backgroundColor:r.css("backgroundColor")};s==="hide"&&(o.opacity=0),e.effects.save(r,i),r.show().css({backgroundImage:"none",backgroundColor:t.color||"#ffff99"}).animate(o,{queue:!1,duration:t.duration,easing:t.easing,complete:function(){s==="hide"&&r.hide(),e.effects.restore(r,i),n()}})}})(jQuery);(function(e,t){e.effects.effect.pulsate=function(t,n){var r=e(this),i=e.effects.setMode(r,t.mode||"show"),s=i==="show",o=i==="hide",u=s||i==="hide",a=(t.times||5)*2+(u?1:0),f=t.duration/a,l=0,c=r.queue(),h=c.length,p;if(s||!r.is(":visible"))r.css("opacity",0).show(),l=1;for(p=1;p1&&c.splice.apply(c,[1,0].concat(c.splice(h,a+1))),r.dequeue()}})(jQuery);(function(e,t){e.effects.effect.puff=function(t,n){var r=e(this),i=e.effects.setMode(r,t.mode||"hide"),s=i==="hide",o=parseInt(t.percent,10)||150,u=o/100,a={height:r.height(),width:r.width()};e.extend(t,{effect:"scale",queue:!1,fade:!0,mode:i,complete:n,percent:s?o:100,from:s?a:{height:a.height*u,width:a.width*u}}),r.effect(t)},e.effects.effect.scale=function(t,n){var r=e(this),i=e.extend(!0,{},t),s=e.effects.setMode(r,t.mode||"effect"),o=parseInt(t.percent,10)||(parseInt(t.percent,10)===0?0:s==="hide"?0:100),u=t.direction||"both",a=t.origin,f={height:r.height(),width:r.width(),outerHeight:r.outerHeight(),outerWidth:r.outerWidth()},l={y:u!=="horizontal"?o/100:1,x:u!=="vertical"?o/100:1};i.effect="size",i.queue=!1,i.complete=n,s!=="effect"&&(i.origin=a||["middle","center"],i.restore=!0),i.from=t.from||(s==="show"?{height:0,width:0}:f),i.to={height:f.height*l.y,width:f.width*l.x,outerHeight:f.outerHeight*l.y,outerWidth:f.outerWidth*l.x},i.fade&&(s==="show"&&(i.from.opacity=0,i.to.opacity=1),s==="hide"&&(i.from.opacity=1,i.to.opacity=0)),r.effect(i)},e.effects.effect.size=function(t,n){var r=e(this),i=["position","top","bottom","left","right","width","height","overflow","opacity"],s=["position","top","bottom","left","right","overflow","opacity"],o=["width","height","overflow"],u=["fontSize"],a=["borderTopWidth","borderBottomWidth","paddingTop","paddingBottom"],f=["borderLeftWidth","borderRightWidth","paddingLeft","paddingRight"],l=e.effects.setMode(r,t.mode||"effect"),c=t.restore||l!=="effect",h=t.scale||"both",p=t.origin||["middle","center"],d,v,m,g=r.css("position");l==="show"&&r.show(),d={height:r.height(),width:r.width(),outerHeight:r.outerHeight(),outerWidth:r.outerWidth()},r.from=t.from||d,r.to=t.to||d,m={from:{y:r.from.height/d.height,x:r.from.width/d.width},to:{y:r.to.height/d.height,x:r.to.width/d.width}};if(h==="box"||h==="both")m.from.y!==m.to.y&&(i=i.concat(a),r.from=e.effects.setTransition(r,a,m.from.y,r.from),r.to=e.effects.setTransition(r,a,m.to.y,r.to)),m.from.x!==m.to.x&&(i=i.concat(f),r.from=e.effects.setTransition(r,f,m.from.x,r.from),r.to=e.effects.setTransition(r,f,m.to.x,r.to));(h==="content"||h==="both")&&m.from.y!==m.to.y&&(i=i.concat(u),r.from=e.effects.setTransition(r,u,m.from.y,r.from),r.to=e.effects.setTransition(r,u,m.to.y,r.to)),e.effects.save(r,c?i:s),r.show(),e.effects.createWrapper(r),r.css("overflow","hidden").css(r.from),p&&(v=e.effects.getBaseline(p,d),r.from.top=(d.outerHeight-r.outerHeight())*v.y,r.from.left=(d.outerWidth-r.outerWidth())*v.x,r.to.top=(d.outerHeight-r.to.outerHeight)*v.y,r.to.left=(d.outerWidth-r.to.outerWidth)*v.x),r.css(r.from);if(h==="content"||h==="both")a=a.concat(["marginTop","marginBottom"]).concat(u),f=f.concat(["marginLeft","marginRight"]),o=i.concat(a).concat(f),r.find("*[width]").each(function(){var n=e(this),r={height:n.height(),width:n.width()};c&&e.effects.save(n,o),n.from={height:r.height*m.from.y,width:r.width*m.from.x},n.to={height:r.height*m.to.y,width:r.width*m.to.x},m.from.y!==m.to.y&&(n.from=e.effects.setTransition(n,a,m.from.y,n.from),n.to=e.effects.setTransition(n,a,m.to.y,n.to)),m.from.x!==m.to.x&&(n.from=e.effects.setTransition(n,f,m.from.x,n.from),n.to=e.effects.setTransition(n,f,m.to.x,n.to)),n.css(n.from),n.animate(n.to,t.duration,t.easing,function(){c&&e.effects.restore(n,o)})});r.animate(r.to,{queue:!1,duration:t.duration,easing:t.easing,complete:function(){r.to.opacity===0&&r.css("opacity",r.from.opacity),l==="hide"&&r.hide(),e.effects.restore(r,c?i:s),c||(g==="static"?r.css({position:"relative",top:r.to.top,left:r.to.left}):e.each(["top","left"],function(e,t){r.css(t,function(t,n){var i=parseInt(n,10),s=e?r.to.left:r.to.top;return n==="auto"?s+"px":i+s+"px"})})),e.effects.removeWrapper(r),n()}})}})(jQuery);(function(e,t){e.effects.effect.shake=function(t,n){var r=e(this),i=["position","top","bottom","left","right","height","width"],s=e.effects.setMode(r,t.mode||"effect"),o=t.direction||"left",u=t.distance||20,a=t.times||3,f=a*2+1,l=Math.round(t.duration/f),c=o==="up"||o==="down"?"top":"left",h=o==="up"||o==="left",p={},d={},v={},m,g=r.queue(),y=g.length;e.effects.save(r,i),r.show(),e.effects.createWrapper(r),p[c]=(h?"-=":"+=")+u,d[c]=(h?"+=":"-=")+u*2,v[c]=(h?"-=":"+=")+u*2,r.animate(p,l,t.easing);for(m=1;m1&&g.splice.apply(g,[1,0].concat(g.splice(y,f+1))),r.dequeue()}})(jQuery);(function(e,t){e.effects.effect.slide=function(t,n){var r=e(this),i=["position","top","bottom","left","right","width","height"],s=e.effects.setMode(r,t.mode||"show"),o=s==="show",u=t.direction||"left",a=u==="up"||u==="down"?"top":"left",f=u==="up"||u==="left",l,c={};e.effects.save(r,i),r.show(),l=t.distance||r[a==="top"?"outerHeight":"outerWidth"](!0),e.effects.createWrapper(r).css({overflow:"hidden"}),o&&r.css(a,f?isNaN(l)?"-"+l:-l:l),c[a]=(o?f?"+=":"-=":f?"-=":"+=")+l,r.animate(c,{queue:!1,duration:t.duration,easing:t.easing,complete:function(){s==="hide"&&r.hide(),e.effects.restore(r,i),e.effects.removeWrapper(r),n()}})}})(jQuery);(function(e,t){e.effects.effect.transfer=function(t,n){var r=e(this),i=e(t.to),s=i.css("position")==="fixed",o=e("body"),u=s?o.scrollTop():0,a=s?o.scrollLeft():0,f=i.offset(),l={top:f.top-u,left:f.left-a,height:i.innerHeight(),width:i.innerWidth()},c=r.offset(),h=e('
              ').appendTo(document.body).addClass(t.className).css({top:c.top-u,left:c.left-a,height:r.innerHeight(),width:r.innerWidth(),position:s?"fixed":"absolute"}).animate(l,t.duration,t.easing,function(){h.remove(),n()})}})(jQuery);(function(e,t){var n=!1;e.widget("ui.menu",{version:"1.9.0",defaultElement:"
                ",delay:300,options:{icons:{submenu:"ui-icon-carat-1-e"},menus:"ul",position:{my:"left top",at:"right top"},role:"menu",blur:null,focus:null,select:null},_create:function(){this.activeMenu=this.element,this.element.uniqueId().addClass("ui-menu ui-widget ui-widget-content ui-corner-all").toggleClass("ui-menu-icons",!!this.element.find(".ui-icon").length).attr({role:this.options.role,tabIndex:0}).bind("click"+this.eventNamespace,e.proxy(function(e){this.options.disabled&&e.preventDefault()},this)),this.options.disabled&&this.element.addClass("ui-state-disabled").attr("aria-disabled","true"),this._on({"mousedown .ui-menu-item > a":function(e){e.preventDefault()},"click .ui-state-disabled > a":function(e){e.preventDefault()},"click .ui-menu-item:has(a)":function(t){var r=e(t.target).closest(".ui-menu-item");!n&&r.not(".ui-state-disabled").length&&(n=!0,this.select(t),r.has(".ui-menu").length?this.expand(t):this.element.is(":focus")||(this.element.trigger("focus",[!0]),this.active&&this.active.parents(".ui-menu").length===1&&clearTimeout(this.timer)))},"mouseenter .ui-menu-item":function(t){var n=e(t.currentTarget);n.siblings().children(".ui-state-active").removeClass("ui-state-active"),this.focus(t,n)},mouseleave:"collapseAll","mouseleave .ui-menu":"collapseAll",focus:function(e,t){var n=this.active||this.element.children(".ui-menu-item").eq(0);t||this.focus(e,n)},blur:function(t){this._delay(function(){e.contains(this.element[0],this.document[0].activeElement)||this.collapseAll(t)})},keydown:"_keydown"}),this.refresh(),this._on(this.document,{click:function(t){e(t.target).closest(".ui-menu").length||this.collapseAll(t),n=!1}})},_destroy:function(){this.element.removeAttr("aria-activedescendant").find(".ui-menu").andSelf().removeClass("ui-menu ui-widget ui-widget-content ui-corner-all ui-menu-icons").removeAttr("role").removeAttr("tabIndex").removeAttr("aria-labelledby").removeAttr("aria-expanded").removeAttr("aria-hidden").removeAttr("aria-disabled").removeUniqueId().show(),this.element.find(".ui-menu-item").removeClass("ui-menu-item").removeAttr("role").removeAttr("aria-disabled").children("a").removeUniqueId().removeClass("ui-corner-all ui-state-hover").removeAttr("tabIndex").removeAttr("role").removeAttr("aria-haspopup").children().each(function(){var t=e(this);t.data("ui-menu-submenu-carat")&&t.remove()}),this.element.find(".ui-menu-divider").removeClass("ui-menu-divider ui-widget-content")},_keydown:function(t){function a(e){return e.replace(/[\-\[\]{}()*+?.,\\\^$|#\s]/g,"\\$&")}var n,r,i,s,o,u=!0;switch(t.keyCode){case e.ui.keyCode.PAGE_UP:this.previousPage(t);break;case e.ui.keyCode.PAGE_DOWN:this.nextPage(t);break;case e.ui.keyCode.HOME:this._move("first","first",t);break;case e.ui.keyCode.END:this._move("last","last",t);break;case e.ui.keyCode.UP:this.previous(t);break;case e.ui.keyCode.DOWN:this.next(t);break;case e.ui.keyCode.LEFT:this.collapse(t);break;case e.ui.keyCode.RIGHT:this.active&&!this.active.is(".ui-state-disabled")&&this.expand(t);break;case e.ui.keyCode.ENTER:case e.ui.keyCode.SPACE:this._activate(t);break;case e.ui.keyCode.ESCAPE:this.collapse(t);break;default:u=!1,r=this.previousFilter||"",i=String.fromCharCode(t.keyCode),s=!1,clearTimeout(this.filterTimer),i===r?s=!0:i=r+i,o=new RegExp("^"+a(i),"i"),n=this.activeMenu.children(".ui-menu-item").filter(function(){return o.test(e(this).children("a").text())}),n=s&&n.index(this.active.next())!==-1?this.active.nextAll(".ui-menu-item"):n,n.length||(i=String.fromCharCode(t.keyCode),o=new RegExp("^"+a(i),"i"),n=this.activeMenu.children(".ui-menu-item").filter(function(){return o.test(e(this).children("a").text())})),n.length?(this.focus(t,n),n.length>1?(this.previousFilter=i,this.filterTimer=this._delay(function(){delete this.previousFilter},1e3)):delete this.previousFilter):delete this.previousFilter}u&&t.preventDefault()},_activate:function(e){this.active.is(".ui-state-disabled")||(this.active.children("a[aria-haspopup='true']").length?this.expand(e):this.select(e))},refresh:function(){var t,n=this.options.icons.submenu,r=this.element.find(this.options.menus+":not(.ui-menu)").addClass("ui-menu ui-widget ui-widget-content ui-corner-all").hide().attr({role:this.options.role,"aria-hidden":"true","aria-expanded":"false"});t=r.add(this.element),t.children(":not(.ui-menu-item):has(a)").addClass("ui-menu-item").attr("role","presentation").children("a").uniqueId().addClass("ui-corner-all").attr({tabIndex:-1,role:this._itemRole()}),t.children(":not(.ui-menu-item)").each(function(){var t=e(this);/[^\-—–\s]/.test(t.text())||t.addClass("ui-widget-content ui-menu-divider")}),t.children(".ui-state-disabled").attr("aria-disabled","true"),r.each(function(){var t=e(this),r=t.prev("a"),i=e("").addClass("ui-menu-icon ui-icon "+n).data("ui-menu-submenu-carat",!0);r.attr("aria-haspopup","true").prepend(i),t.attr("aria-labelledby",r.attr("id"))}),this.active&&!e.contains(this.element[0],this.active[0])&&this.blur()},_itemRole:function(){return{menu:"menuitem",listbox:"option"}[this.options.role]},focus:function(e,t){var n,r;this.blur(e,e&&e.type==="focus"),this._scrollIntoView(t),this.active=t.first(),r=this.active.children("a").addClass("ui-state-focus"),this.options.role&&this.element.attr("aria-activedescendant",r.attr("id")),this.active.parent().closest(".ui-menu-item").children("a:first").addClass("ui-state-active"),e&&e.type==="keydown"?this._close():this.timer=this._delay(function(){this._close()},this.delay),n=t.children(".ui-menu"),n.length&&/^mouse/.test(e.type)&&this._startOpening(n),this.activeMenu=t.parent(),this._trigger("focus",e,{item:t})},_scrollIntoView:function(t){var n,r,i,s,o,u;this._hasScroll()&&(n=parseFloat(e.css(this.activeMenu[0],"borderTopWidth"))||0,r=parseFloat(e.css(this.activeMenu[0],"paddingTop"))||0,i=t.offset().top-this.activeMenu.offset().top-n-r,s=this.activeMenu.scrollTop(),o=this.activeMenu.height(),u=t.height(),i<0?this.activeMenu.scrollTop(s+i):i+u>o&&this.activeMenu.scrollTop(s+i-o+u))},blur:function(e,t){t||clearTimeout(this.timer);if(!this.active)return;this.active.children("a").removeClass("ui-state-focus"),this.active=null,this._trigger("blur",e,{item:this.active})},_startOpening:function(e){clearTimeout(this.timer);if(e.attr("aria-hidden")!=="true")return;this.timer=this._delay(function(){this._close(),this._open(e)},this.delay)},_open:function(t){var n=e.extend({of:this.active},this.options.position);clearTimeout(this.timer),this.element.find(".ui-menu").not(t.parents(".ui-menu")).hide().attr("aria-hidden","true"),t.show().removeAttr("aria-hidden").attr("aria-expanded","true").position(n)},collapseAll:function(t,n){clearTimeout(this.timer),this.timer=this._delay(function(){var r=n?this.element:e(t&&t.target).closest(this.element.find(".ui-menu"));r.length||(r=this.element),this._close(r),this.blur(t),this.activeMenu=r},this.delay)},_close:function(e){e||(e=this.active?this.active.parent():this.element),e.find(".ui-menu").hide().attr("aria-hidden","true").attr("aria-expanded","false").end().find("a.ui-state-active").removeClass("ui-state-active")},collapse:function(e){var t=this.active&&this.active.parent().closest(".ui-menu-item",this.element);t&&t.length&&(this._close(),this.focus(e,t))},expand:function(e){var t=this.active&&this.active.children(".ui-menu ").children(".ui-menu-item").first();t&&t.length&&(this._open(t.parent()),this._delay(function(){this.focus(e,t)}))},next:function(e){this._move("next","first",e)},previous:function(e){this._move("prev","last",e)},isFirstItem:function(){return this.active&&!this.active.prevAll(".ui-menu-item").length},isLastItem:function(){return this.active&&!this.active.nextAll(".ui-menu-item").length},_move:function(e,t,n){var r;this.active&&(e==="first"||e==="last"?r=this.active[e==="first"?"prevAll":"nextAll"](".ui-menu-item").eq(-1):r=this.active[e+"All"](".ui-menu-item").eq(0));if(!r||!r.length||!this.active)r=this.activeMenu.children(".ui-menu-item")[t]();this.focus(n,r)},nextPage:function(t){var n,r,i;if(!this.active){this.next(t);return}if(this.isLastItem())return;this._hasScroll()?(r=this.active.offset().top,i=this.element.height(),this.active.nextAll(".ui-menu-item").each(function(){return n=e(this),n.offset().top-r-i<0}),this.focus(t,n)):this.focus(t,this.activeMenu.children(".ui-menu-item")[this.active?"last":"first"]())},previousPage:function(t){var n,r,i;if(!this.active){this.next(t);return}if(this.isFirstItem())return;this._hasScroll()?(r=this.active.offset().top,i=this.element.height(),this.active.prevAll(".ui-menu-item").each(function(){return n=e(this),n.offset().top-r+i>0}),this.focus(t,n)):this.focus(t,this.activeMenu.children(".ui-menu-item").first())},_hasScroll:function(){return this.element.outerHeight()
              ").appendTo(this.element),this.oldValue=this._value(),this._refreshValue()},_destroy:function(){this.element.removeClass("ui-progressbar ui-widget ui-widget-content ui-corner-all").removeAttr("role").removeAttr("aria-valuemin").removeAttr("aria-valuemax").removeAttr("aria-valuenow"),this.valueDiv.remove()},value:function(e){return e===t?this._value():(this._setOption("value",e),this)},_setOption:function(e,t){e==="value"&&(this.options.value=t,this._refreshValue(),this._value()===this.options.max&&this._trigger("complete")),this._super(e,t)},_value:function(){var e=this.options.value;return typeof e!="number"&&(e=0),Math.min(this.options.max,Math.max(this.min,e))},_percentage:function(){return 100*this._value()/this.options.max},_refreshValue:function(){var e=this.value(),t=this._percentage();this.oldValue!==e&&(this.oldValue=e,this._trigger("change")),this.valueDiv.toggle(e>this.min).toggleClass("ui-corner-right",e===this.options.max).width(t.toFixed(0)+"%"),this.element.attr("aria-valuenow",e)}})})(jQuery);(function(e,t){e.widget("ui.resizable",e.ui.mouse,{version:"1.9.0",widgetEventPrefix:"resize",options:{alsoResize:!1,animate:!1,animateDuration:"slow",animateEasing:"swing",aspectRatio:!1,autoHide:!1,containment:!1,ghost:!1,grid:!1,handles:"e,s,se",helper:!1,maxHeight:null,maxWidth:null,minHeight:10,minWidth:10,zIndex:1e3},_create:function(){var t=this,n=this.options;this.element.addClass("ui-resizable"),e.extend(this,{_aspectRatio:!!n.aspectRatio,aspectRatio:n.aspectRatio,originalElement:this.element,_proportionallyResizeElements:[],_helper:n.helper||n.ghost||n.animate?n.helper||"ui-resizable-helper":null}),this.element[0].nodeName.match(/canvas|textarea|input|select|button|img/i)&&(this.element.wrap(e('
              ').css({position:this.element.css("position"),width:this.element.outerWidth(),height:this.element.outerHeight(),top:this.element.css("top"),left:this.element.css("left")})),this.element=this.element.parent().data("resizable",this.element.data("resizable")),this.elementIsWrapper=!0,this.element.css({marginLeft:this.originalElement.css("marginLeft"),marginTop:this.originalElement.css("marginTop"),marginRight:this.originalElement.css("marginRight"),marginBottom:this.originalElement.css("marginBottom")}),this.originalElement.css({marginLeft:0,marginTop:0,marginRight:0,marginBottom:0}),this.originalResizeStyle=this.originalElement.css("resize"),this.originalElement.css("resize","none"),this._proportionallyResizeElements.push(this.originalElement.css({position:"static",zoom:1,display:"block"})),this.originalElement.css({margin:this.originalElement.css("margin")}),this._proportionallyResize()),this.handles=n.handles||(e(".ui-resizable-handle",this.element).length?{n:".ui-resizable-n",e:".ui-resizable-e",s:".ui-resizable-s",w:".ui-resizable-w",se:".ui-resizable-se",sw:".ui-resizable-sw",ne:".ui-resizable-ne",nw:".ui-resizable-nw"}:"e,s,se");if(this.handles.constructor==String){this.handles=="all"&&(this.handles="n,e,s,w,se,sw,ne,nw");var r=this.handles.split(",");this.handles={};for(var i=0;i
              ');u.css({zIndex:n.zIndex}),"se"==s&&u.addClass("ui-icon ui-icon-gripsmall-diagonal-se"),this.handles[s]=".ui-resizable-"+s,this.element.append(u)}}this._renderAxis=function(t){t=t||this.element;for(var n in this.handles){this.handles[n].constructor==String&&(this.handles[n]=e(this.handles[n],this.element).show());if(this.elementIsWrapper&&this.originalElement[0].nodeName.match(/textarea|input|select|button/i)){var r=e(this.handles[n],this.element),i=0;i=/sw|ne|nw|se|n|s/.test(n)?r.outerHeight():r.outerWidth();var s=["padding",/ne|nw|n/.test(n)?"Top":/se|sw|s/.test(n)?"Bottom":/^e$/.test(n)?"Right":"Left"].join("");t.css(s,i),this._proportionallyResize()}if(!e(this.handles[n]).length)continue}},this._renderAxis(this.element),this._handles=e(".ui-resizable-handle",this.element).disableSelection(),this._handles.mouseover(function(){if(!t.resizing){if(this.className)var e=this.className.match(/ui-resizable-(se|sw|ne|nw|n|e|s|w)/i);t.axis=e&&e[1]?e[1]:"se"}}),n.autoHide&&(this._handles.hide(),e(this.element).addClass("ui-resizable-autohide").mouseenter(function(){if(n.disabled)return;e(this).removeClass("ui-resizable-autohide"),t._handles.show()}).mouseleave(function(){if(n.disabled)return;t.resizing||(e(this).addClass("ui-resizable-autohide"),t._handles.hide())})),this._mouseInit()},_destroy:function(){this._mouseDestroy();var t=function(t){e(t).removeClass("ui-resizable ui-resizable-disabled ui-resizable-resizing").removeData("resizable").removeData("ui-resizable").unbind(".resizable").find(".ui-resizable-handle").remove()};if(this.elementIsWrapper){t(this.element);var n=this.element;n.after(this.originalElement.css({position:n.css("position"),width:n.outerWidth(),height:n.outerHeight(),top:n.css("top"),left:n.css("left")})).remove()}return this.originalElement.css("resize",this.originalResizeStyle),t(this.originalElement),this},_mouseCapture:function(t){var n=!1;for(var r in this.handles)e(this.handles[r])[0]==t.target&&(n=!0);return!this.options.disabled&&n},_mouseStart:function(t){var r=this.options,i=this.element.position(),s=this.element;this.resizing=!0,this.documentScroll={top:e(document).scrollTop(),left:e(document).scrollLeft()},(s.is(".ui-draggable")||/absolute/.test(s.css("position")))&&s.css({position:"absolute",top:i.top,left:i.left}),this._renderProxy();var o=n(this.helper.css("left")),u=n(this.helper.css("top"));r.containment&&(o+=e(r.containment).scrollLeft()||0,u+=e(r.containment).scrollTop()||0),this.offset=this.helper.offset(),this.position={left:o,top:u},this.size=this._helper?{width:s.outerWidth(),height:s.outerHeight()}:{width:s.width(),height:s.height()},this.originalSize=this._helper?{width:s.outerWidth(),height:s.outerHeight()}:{width:s.width(),height:s.height()},this.originalPosition={left:o,top:u},this.sizeDiff={width:s.outerWidth()-s.width(),height:s.outerHeight()-s.height()},this.originalMousePosition={left:t.pageX,top:t.pageY},this.aspectRatio=typeof r.aspectRatio=="number"?r.aspectRatio:this.originalSize.width/this.originalSize.height||1;var a=e(".ui-resizable-"+this.axis).css("cursor");return e("body").css("cursor",a=="auto"?this.axis+"-resize":a),s.addClass("ui-resizable-resizing"),this._propagate("start",t),!0},_mouseDrag:function(e){var t=this.helper,n=this.options,r={},i=this,s=this.originalMousePosition,o=this.axis,u=e.pageX-s.left||0,a=e.pageY-s.top||0,f=this._change[o];if(!f)return!1;var l=f.apply(this,[e,u,a]);this._updateVirtualBoundaries(e.shiftKey);if(this._aspectRatio||e.shiftKey)l=this._updateRatio(l,e);return l=this._respectSize(l,e),this._propagate("resize",e),t.css({top:this.position.top+"px",left:this.position.left+"px",width:this.size.width+"px",height:this.size.height+"px"}),!this._helper&&this._proportionallyResizeElements.length&&this._proportionallyResize(),this._updateCache(l),this._trigger("resize",e,this.ui()),!1},_mouseStop:function(t){this.resizing=!1;var n=this.options,r=this;if(this._helper){var i=this._proportionallyResizeElements,s=i.length&&/textarea/i.test(i[0].nodeName),o=s&&e.ui.hasScroll(i[0],"left")?0:r.sizeDiff.height,u=s?0:r.sizeDiff.width,a={width:r.helper.width()-u,height:r.helper.height()-o},f=parseInt(r.element.css("left"),10)+(r.position.left-r.originalPosition.left)||null,l=parseInt(r.element.css("top"),10)+(r.position.top-r.originalPosition.top)||null;n.animate||this.element.css(e.extend(a,{top:l,left:f})),r.helper.height(r.size.height),r.helper.width(r.size.width),this._helper&&!n.animate&&this._proportionallyResize()}return e("body").css("cursor","auto"),this.element.removeClass("ui-resizable-resizing"),this._propagate("stop",t),this._helper&&this.helper.remove(),!1},_updateVirtualBoundaries:function(e){var t=this.options,n,i,s,o,u;u={minWidth:r(t.minWidth)?t.minWidth:0,maxWidth:r(t.maxWidth)?t.maxWidth:Infinity,minHeight:r(t.minHeight)?t.minHeight:0,maxHeight:r(t.maxHeight)?t.maxHeight:Infinity};if(this._aspectRatio||e)n=u.minHeight*this.aspectRatio,s=u.minWidth/this.aspectRatio,i=u.maxHeight*this.aspectRatio,o=u.maxWidth/this.aspectRatio,n>u.minWidth&&(u.minWidth=n),s>u.minHeight&&(u.minHeight=s),ie.width,l=r(e.height)&&i.minHeight&&i.minHeight>e.height;f&&(e.width=i.minWidth),l&&(e.height=i.minHeight),u&&(e.width=i.maxWidth),a&&(e.height=i.maxHeight);var c=this.originalPosition.left+this.originalSize.width,h=this.position.top+this.size.height,p=/sw|nw|w/.test(o),d=/nw|ne|n/.test(o);f&&p&&(e.left=c-i.minWidth),u&&p&&(e.left=c-i.maxWidth),l&&d&&(e.top=h-i.minHeight),a&&d&&(e.top=h-i.maxHeight);var v=!e.width&&!e.height;return v&&!e.left&&e.top?e.top=null:v&&!e.top&&e.left&&(e.left=null),e},_proportionallyResize:function(){var t=this.options;if(!this._proportionallyResizeElements.length)return;var n=this.helper||this.element;for(var r=0;r');var r=e.browser.msie&&e.browser.version<7,i=r?1:0,s=r?2:-1;this.helper.addClass(this._helper).css({width:this.element.outerWidth()+s,height:this.element.outerHeight()+s,position:"absolute",left:this.elementOffset.left-i+"px",top:this.elementOffset.top-i+"px",zIndex:++n.zIndex}),this.helper.appendTo("body").disableSelection()}else this.helper=this.element},_change:{e:function(e,t,n){return{width:this.originalSize.width+t}},w:function(e,t,n){var r=this.options,i=this.originalSize,s=this.originalPosition;return{left:s.left+t,width:i.width-t}},n:function(e,t,n){var r=this.options,i=this.originalSize,s=this.originalPosition;return{top:s.top+n,height:i.height-n}},s:function(e,t,n){return{height:this.originalSize.height+n}},se:function(t,n,r){return e.extend(this._change.s.apply(this,arguments),this._change.e.apply(this,[t,n,r]))},sw:function(t,n,r){return e.extend(this._change.s.apply(this,arguments),this._change.w.apply(this,[t,n,r]))},ne:function(t,n,r){return e.extend(this._change.n.apply(this,arguments),this._change.e.apply(this,[t,n,r]))},nw:function(t,n,r){return e.extend(this._change.n.apply(this,arguments),this._change.w.apply(this,[t,n,r]))}},_propagate:function(t,n){e.ui.plugin.call(this,t,[n,this.ui()]),t!="resize"&&this._trigger(t,n,this.ui())},plugins:{},ui:function(){return{originalElement:this.originalElement,element:this.element,helper:this.helper,position:this.position,size:this.size,originalSize:this.originalSize,originalPosition:this.originalPosition}}}),e.ui.plugin.add("resizable","alsoResize",{start:function(t,n){var r=e(this).data("resizable"),i=r.options,s=function(t){e(t).each(function(){var t=e(this);t.data("resizable-alsoresize",{width:parseInt(t.width(),10),height:parseInt(t.height(),10),left:parseInt(t.css("left"),10),top:parseInt(t.css("top"),10)})})};typeof i.alsoResize=="object"&&!i.alsoResize.parentNode?i.alsoResize.length?(i.alsoResize=i.alsoResize[0],s(i.alsoResize)):e.each(i.alsoResize,function(e){s(e)}):s(i.alsoResize)},resize:function(t,n){var r=e(this).data("resizable"),i=r.options,s=r.originalSize,o=r.originalPosition,u={height:r.size.height-s.height||0,width:r.size.width-s.width||0,top:r.position.top-o.top||0,left:r.position.left-o.left||0},a=function(t,r){e(t).each(function(){var t=e(this),i=e(this).data("resizable-alsoresize"),s={},o=r&&r.length?r:t.parents(n.originalElement[0]).length?["width","height"]:["width","height","top","left"];e.each(o,function(e,t){var n=(i[t]||0)+(u[t]||0);n&&n>=0&&(s[t]=n||null)}),t.css(s)})};typeof i.alsoResize=="object"&&!i.alsoResize.nodeType?e.each(i.alsoResize,function(e,t){a(e,t)}):a(i.alsoResize)},stop:function(t,n){e(this).removeData("resizable-alsoresize")}}),e.ui.plugin.add("resizable","animate",{stop:function(t,n){var r=e(this).data("resizable"),i=r.options,s=r._proportionallyResizeElements,o=s.length&&/textarea/i.test(s[0].nodeName),u=o&&e.ui.hasScroll(s[0],"left")?0:r.sizeDiff.height,a=o?0:r.sizeDiff.width,f={width:r.size.width-a,height:r.size.height-u},l=parseInt(r.element.css("left"),10)+(r.position.left-r.originalPosition.left)||null,c=parseInt(r.element.css("top"),10)+(r.position.top-r.originalPosition.top)||null;r.element.animate(e.extend(f,c&&l?{top:c,left:l}:{}),{duration:i.animateDuration,easing:i.animateEasing,step:function(){var n={width:parseInt(r.element.css("width"),10),height:parseInt(r.element.css("height"),10),top:parseInt(r.element.css("top"),10),left:parseInt(r.element.css("left"),10)};s&&s.length&&e(s[0]).css({width:n.width,height:n.height}),r._updateCache(n),r._propagate("resize",t)}})}}),e.ui.plugin.add("resizable","containment",{start:function(t,r){var i=e(this).data("resizable"),s=i.options,o=i.element,u=s.containment,a=u instanceof e?u.get(0):/parent/.test(u)?o.parent().get(0):u;if(!a)return;i.containerElement=e(a);if(/document/.test(u)||u==document)i.containerOffset={left:0,top:0},i.containerPosition={left:0,top:0},i.parentData={element:e(document),left:0,top:0,width:e(document).width(),height:e(document).height()||document.body.parentNode.scrollHeight};else{var f=e(a),l=[];e(["Top","Right","Left","Bottom"]).each(function(e,t){l[e]=n(f.css("padding"+t))}),i.containerOffset=f.offset(),i.containerPosition=f.position(),i.containerSize={height:f.innerHeight()-l[3],width:f.innerWidth()-l[1]};var c=i.containerOffset,h=i.containerSize.height,p=i.containerSize.width,d=e.ui.hasScroll(a,"left")?a.scrollWidth:p,v=e.ui.hasScroll(a)?a.scrollHeight:h;i.parentData={element:a,left:c.left,top:c.top,width:d,height:v}}},resize:function(t,n){var r=e(this).data("resizable"),i=r.options,s=r.containerSize,o=r.containerOffset,u=r.size,a=r.position,f=r._aspectRatio||t.shiftKey,l={top:0,left:0},c=r.containerElement;c[0]!=document&&/static/.test(c.css("position"))&&(l=o),a.left<(r._helper?o.left:0)&&(r.size.width=r.size.width+(r._helper?r.position.left-o.left:r.position.left-l.left),f&&(r.size.height=r.size.width/r.aspectRatio),r.position.left=i.helper?o.left:0),a.top<(r._helper?o.top:0)&&(r.size.height=r.size.height+(r._helper?r.position.top-o.top:r.position.top),f&&(r.size.width=r.size.height*r.aspectRatio),r.position.top=r._helper?o.top:0),r.offset.left=r.parentData.left+r.position.left,r.offset.top=r.parentData.top+r.position.top;var h=Math.abs((r._helper?r.offset.left-l.left:r.offset.left-l.left)+r.sizeDiff.width),p=Math.abs((r._helper?r.offset.top-l.top:r.offset.top-o.top)+r.sizeDiff.height),d=r.containerElement.get(0)==r.element.parent().get(0),v=/relative|absolute/.test(r.containerElement.css("position"));d&&v&&(h-=r.parentData.left),h+r.size.width>=r.parentData.width&&(r.size.width=r.parentData.width-h,f&&(r.size.height=r.size.width/r.aspectRatio)),p+r.size.height>=r.parentData.height&&(r.size.height=r.parentData.height-p,f&&(r.size.width=r.size.height*r.aspectRatio))},stop:function(t,n){var r=e(this).data("resizable"),i=r.options,s=r.position,o=r.containerOffset,u=r.containerPosition,a=r.containerElement,f=e(r.helper),l=f.offset(),c=f.outerWidth()-r.sizeDiff.width,h=f.outerHeight()-r.sizeDiff.height;r._helper&&!i.animate&&/relative/.test(a.css("position"))&&e(this).css({left:l.left-u.left-o.left,width:c,height:h}),r._helper&&!i.animate&&/static/.test(a.css("position"))&&e(this).css({left:l.left-u.left-o.left,width:c,height:h})}}),e.ui.plugin.add("resizable","ghost",{start:function(t,n){var r=e(this).data("resizable"),i=r.options,s=r.size;r.ghost=r.originalElement.clone(),r.ghost.css({opacity:.25,display:"block",position:"relative",height:s.height,width:s.width,margin:0,left:0,top:0}).addClass("ui-resizable-ghost").addClass(typeof i.ghost=="string"?i.ghost:""),r.ghost.appendTo(r.helper)},resize:function(t,n){var r=e(this).data("resizable"),i=r.options;r.ghost&&r.ghost.css({position:"relative",height:r.size.height,width:r.size.width})},stop:function(t,n){var r=e(this).data("resizable"),i=r.options;r.ghost&&r.helper&&r.helper.get(0).removeChild(r.ghost.get(0))}}),e.ui.plugin.add("resizable","grid",{resize:function(t,n){var r=e(this).data("resizable"),i=r.options,s=r.size,o=r.originalSize,u=r.originalPosition,a=r.axis,f=i._aspectRatio||t.shiftKey;i.grid=typeof i.grid=="number"?[i.grid,i.grid]:i.grid;var l=Math.round((s.width-o.width)/(i.grid[0]||1))*(i.grid[0]||1),c=Math.round((s.height-o.height)/(i.grid[1]||1))*(i.grid[1]||1);/^(se|s|e)$/.test(a)?(r.size.width=o.width+l,r.size.height=o.height+c):/^(ne)$/.test(a)?(r.size.width=o.width+l,r.size.height=o.height+c,r.position.top=u.top-c):/^(sw)$/.test(a)?(r.size.width=o.width+l,r.size.height=o.height+c,r.position.left=u.left-l):(r.size.width=o.width+l,r.size.height=o.height+c,r.position.top=u.top-c,r.position.left=u.left-l)}});var n=function(e){return parseInt(e,10)||0},r=function(e){return!isNaN(parseInt(e,10))}})(jQuery);(function(e,t){e.widget("ui.selectable",e.ui.mouse,{version:"1.9.0",options:{appendTo:"body",autoRefresh:!0,distance:0,filter:"*",tolerance:"touch"},_create:function(){var t=this;this.element.addClass("ui-selectable"),this.dragged=!1;var n;this.refresh=function(){n=e(t.options.filter,t.element[0]),n.addClass("ui-selectee"),n.each(function(){var t=e(this),n=t.offset();e.data(this,"selectable-item",{element:this,$element:t,left:n.left,top:n.top,right:n.left+t.outerWidth(),bottom:n.top+t.outerHeight(),startselected:!1,selected:t.hasClass("ui-selected"),selecting:t.hasClass("ui-selecting"),unselecting:t.hasClass("ui-unselecting")})})},this.refresh(),this.selectees=n.addClass("ui-selectee"),this._mouseInit(),this.helper=e("
              ")},_destroy:function(){this.selectees.removeClass("ui-selectee").removeData("selectable-item"),this.element.removeClass("ui-selectable ui-selectable-disabled"),this._mouseDestroy()},_mouseStart:function(t){var n=this;this.opos=[t.pageX,t.pageY];if(this.options.disabled)return;var r=this.options;this.selectees=e(r.filter,this.element[0]),this._trigger("start",t),e(r.appendTo).append(this.helper),this.helper.css({left:t.clientX,top:t.clientY,width:0,height:0}),r.autoRefresh&&this.refresh(),this.selectees.filter(".ui-selected").each(function(){var r=e.data(this,"selectable-item");r.startselected=!0,!t.metaKey&&!t.ctrlKey&&(r.$element.removeClass("ui-selected"),r.selected=!1,r.$element.addClass("ui-unselecting"),r.unselecting=!0,n._trigger("unselecting",t,{unselecting:r.element}))}),e(t.target).parents().andSelf().each(function(){var r=e.data(this,"selectable-item");if(r){var i=!t.metaKey&&!t.ctrlKey||!r.$element.hasClass("ui-selected");return r.$element.removeClass(i?"ui-unselecting":"ui-selected").addClass(i?"ui-selecting":"ui-unselecting"),r.unselecting=!i,r.selecting=i,r.selected=i,i?n._trigger("selecting",t,{selecting:r.element}):n._trigger("unselecting",t,{unselecting:r.element}),!1}})},_mouseDrag:function(t){var n=this;this.dragged=!0;if(this.options.disabled)return;var r=this.options,i=this.opos[0],s=this.opos[1],o=t.pageX,u=t.pageY;if(i>o){var a=o;o=i,i=a}if(s>u){var a=u;u=s,s=a}return this.helper.css({left:i,top:s,width:o-i,height:u-s}),this.selectees.each(function(){var a=e.data(this,"selectable-item");if(!a||a.element==n.element[0])return;var f=!1;r.tolerance=="touch"?f=!(a.left>o||a.rightu||a.bottomi&&a.rights&&a.bottom").appendTo(this.element).addClass("ui-slider-range ui-widget-header"+(r.range==="min"||r.range==="max"?" ui-slider-range-"+r.range:"")));for(t=i.length;tn&&(i=n,s=e(this),o=t)}),c.range===!0&&this.values(1)===c.min&&(o+=1,s=e(this.handles[o])),u=this._start(t,o),u===!1?!1:(this._mouseSliding=!0,this._handleIndex=o,s.addClass("ui-state-active").focus(),a=s.offset(),f=!e(t.target).parents().andSelf().is(".ui-slider-handle"),this._clickOffset=f?{left:0,top:0}:{left:t.pageX-a.left-s.width()/2,top:t.pageY-a.top-s.height()/2-(parseInt(s.css("borderTopWidth"),10)||0)-(parseInt(s.css("borderBottomWidth"),10)||0)+(parseInt(s.css("marginTop"),10)||0)},this.handles.hasClass("ui-state-hover")||this._slide(t,o,r),this._animateOff=!0,!0))},_mouseStart:function(e){return!0},_mouseDrag:function(e){var t={x:e.pageX,y:e.pageY},n=this._normValueFromMouse(t);return this._slide(e,this._handleIndex,n),!1},_mouseStop:function(e){return this.handles.removeClass("ui-state-active"),this._mouseSliding=!1,this._stop(e,this._handleIndex),this._change(e,this._handleIndex),this._handleIndex=null,this._clickOffset=null,this._animateOff=!1,!1},_detectOrientation:function(){this.orientation=this.options.orientation==="vertical"?"vertical":"horizontal"},_normValueFromMouse:function(e){var t,n,r,i,s;return this.orientation==="horizontal"?(t=this.elementSize.width,n=e.x-this.elementOffset.left-(this._clickOffset?this._clickOffset.left:0)):(t=this.elementSize.height,n=e.y-this.elementOffset.top-(this._clickOffset?this._clickOffset.top:0)),r=n/t,r>1&&(r=1),r<0&&(r=0),this.orientation==="vertical"&&(r=1-r),i=this._valueMax()-this._valueMin(),s=this._valueMin()+r*i,this._trimAlignValue(s)},_start:function(e,t){var n={handle:this.handles[t],value:this.value()};return this.options.values&&this.options.values.length&&(n.value=this.values(t),n.values=this.values()),this._trigger("start",e,n)},_slide:function(e,t,n){var r,i,s;this.options.values&&this.options.values.length?(r=this.values(t?0:1),this.options.values.length===2&&this.options.range===!0&&(t===0&&n>r||t===1&&n1){this.options.values[t]=this._trimAlignValue(n),this._refreshValue(),this._change(null,t);return}if(!arguments.length)return this._values();if(!e.isArray(arguments[0]))return this.options.values&&this.options.values.length?this._values(t):this.value();r=this.options.values,i=arguments[0];for(s=0;s=this._valueMax())return this._valueMax();var t=this.options.step>0?this.options.step:1,n=(e-this._valueMin())%t,r=e-n;return Math.abs(n)*2>=t&&(r+=n>0?t:-t),parseFloat(r.toFixed(5))},_valueMin:function(){return this.options.min},_valueMax:function(){return this.options.max},_refreshValue:function(){var t,n,r,i,s,o=this.options.range,u=this.options,a=this,f=this._animateOff?!1:u.animate,l={};this.options.values&&this.options.values.length?this.handles.each(function(r,i){n=(a.values(r)-a._valueMin())/(a._valueMax()-a._valueMin())*100,l[a.orientation==="horizontal"?"left":"bottom"]=n+"%",e(this).stop(1,1)[f?"animate":"css"](l,u.animate),a.options.range===!0&&(a.orientation==="horizontal"?(r===0&&a.range.stop(1,1)[f?"animate":"css"]({left:n+"%"},u.animate),r===1&&a.range[f?"animate":"css"]({width:n-t+"%"},{queue:!1,duration:u.animate})):(r===0&&a.range.stop(1,1)[f?"animate":"css"]({bottom:n+"%"},u.animate),r===1&&a.range[f?"animate":"css"]({height:n-t+"%"},{queue:!1,duration:u.animate}))),t=n}):(r=this.value(),i=this._valueMin(),s=this._valueMax(),n=s!==i?(r-i)/(s-i)*100:0,l[this.orientation==="horizontal"?"left":"bottom"]=n+"%",this.handle.stop(1,1)[f?"animate":"css"](l,u.animate),o==="min"&&this.orientation==="horizontal"&&this.range.stop(1,1)[f?"animate":"css"]({width:n+"%"},u.animate),o==="max"&&this.orientation==="horizontal"&&this.range[f?"animate":"css"]({width:100-n+"%"},{queue:!1,duration:u.animate}),o==="min"&&this.orientation==="vertical"&&this.range.stop(1,1)[f?"animate":"css"]({height:n+"%"},u.animate),o==="max"&&this.orientation==="vertical"&&this.range[f?"animate":"css"]({height:100-n+"%"},{queue:!1,duration:u.animate}))}})})(jQuery);(function(e,t){e.widget("ui.sortable",e.ui.mouse,{version:"1.9.0",widgetEventPrefix:"sort",ready:!1,options:{appendTo:"parent",axis:!1,connectWith:!1,containment:!1,cursor:"auto",cursorAt:!1,dropOnEmpty:!0,forcePlaceholderSize:!1,forceHelperSize:!1,grid:!1,handle:!1,helper:"original",items:"> *",opacity:!1,placeholder:!1,revert:!1,scroll:!0,scrollSensitivity:20,scrollSpeed:20,scope:"default",tolerance:"intersect",zIndex:1e3},_create:function(){var e=this.options;this.containerCache={},this.element.addClass("ui-sortable"),this.refresh(),this.floating=this.items.length?e.axis==="x"||/left|right/.test(this.items[0].item.css("float"))||/inline|table-cell/.test(this.items[0].item.css("display")):!1,this.offset=this.element.offset(),this._mouseInit(),this.ready=!0},_destroy:function(){this.element.removeClass("ui-sortable ui-sortable-disabled"),this._mouseDestroy();for(var e=this.items.length-1;e>=0;e--)this.items[e].item.removeData(this.widgetName+"-item");return this},_setOption:function(t,n){t==="disabled"?(this.options[t]=n,this.widget().toggleClass("ui-sortable-disabled",!!n)):e.Widget.prototype._setOption.apply(this,arguments)},_mouseCapture:function(t,n){var r=this;if(this.reverting)return!1;if(this.options.disabled||this.options.type=="static")return!1;this._refreshItems(t);var i=null,s=e(t.target).parents().each(function(){if(e.data(this,r.widgetName+"-item")==r)return i=e(this),!1});e.data(t.target,r.widgetName+"-item")==r&&(i=e(t.target));if(!i)return!1;if(this.options.handle&&!n){var o=!1;e(this.options.handle,i).find("*").andSelf().each(function(){this==t.target&&(o=!0)});if(!o)return!1}return this.currentItem=i,this._removeCurrentsFromItems(),!0},_mouseStart:function(t,n,r){var i=this.options;this.currentContainer=this,this.refreshPositions(),this.helper=this._createHelper(t),this._cacheHelperProportions(),this._cacheMargins(),this.scrollParent=this.helper.scrollParent(),this.offset=this.currentItem.offset(),this.offset={top:this.offset.top-this.margins.top,left:this.offset.left-this.margins.left},e.extend(this.offset,{click:{left:t.pageX-this.offset.left,top:t.pageY-this.offset.top},parent:this._getParentOffset(),relative:this._getRelativeOffset()}),this.helper.css("position","absolute"),this.cssPosition=this.helper.css("position"),this.originalPosition=this._generatePosition(t),this.originalPageX=t.pageX,this.originalPageY=t.pageY,i.cursorAt&&this._adjustOffsetFromHelper(i.cursorAt),this.domPosition={prev:this.currentItem.prev()[0],parent:this.currentItem.parent()[0]},this.helper[0]!=this.currentItem[0]&&this.currentItem.hide(),this._createPlaceholder(),i.containment&&this._setContainment(),i.cursor&&(e("body").css("cursor")&&(this._storedCursor=e("body").css("cursor")),e("body").css("cursor",i.cursor)),i.opacity&&(this.helper.css("opacity")&&(this._storedOpacity=this.helper.css("opacity")),this.helper.css("opacity",i.opacity)),i.zIndex&&(this.helper.css("zIndex")&&(this._storedZIndex=this.helper.css("zIndex")),this.helper.css("zIndex",i.zIndex)),this.scrollParent[0]!=document&&this.scrollParent[0].tagName!="HTML"&&(this.overflowOffset=this.scrollParent.offset()),this._trigger("start",t,this._uiHash()),this._preserveHelperProportions||this._cacheHelperProportions();if(!r)for(var s=this.containers.length-1;s>=0;s--)this.containers[s]._trigger("activate",t,this._uiHash(this));return e.ui.ddmanager&&(e.ui.ddmanager.current=this),e.ui.ddmanager&&!i.dropBehaviour&&e.ui.ddmanager.prepareOffsets(this,t),this.dragging=!0,this.helper.addClass("ui-sortable-helper"),this._mouseDrag(t),!0},_mouseDrag:function(t){this.position=this._generatePosition(t),this.positionAbs=this._convertPositionTo("absolute"),this.lastPositionAbs||(this.lastPositionAbs=this.positionAbs);if(this.options.scroll){var n=this.options,r=!1;this.scrollParent[0]!=document&&this.scrollParent[0].tagName!="HTML"?(this.overflowOffset.top+this.scrollParent[0].offsetHeight-t.pageY=0;i--){var s=this.items[i],o=s.item[0],u=this._intersectsWithPointer(s);if(!u)continue;if(s.instance!==this.currentContainer)continue;if(o!=this.currentItem[0]&&this.placeholder[u==1?"next":"prev"]()[0]!=o&&!e.contains(this.placeholder[0],o)&&(this.options.type=="semi-dynamic"?!e.contains(this.element[0],o):!0)){this.direction=u==1?"down":"up";if(this.options.tolerance!="pointer"&&!this._intersectsWithSides(s))break;this._rearrange(t,s),this._trigger("change",t,this._uiHash());break}}return this._contactContainers(t),e.ui.ddmanager&&e.ui.ddmanager.drag(this,t),this._trigger("sort",t,this._uiHash()),this.lastPositionAbs=this.positionAbs,!1},_mouseStop:function(t,n){if(!t)return;e.ui.ddmanager&&!this.options.dropBehaviour&&e.ui.ddmanager.drop(this,t);if(this.options.revert){var r=this,i=this.placeholder.offset();this.reverting=!0,e(this.helper).animate({left:i.left-this.offset.parent.left-this.margins.left+(this.offsetParent[0]==document.body?0:this.offsetParent[0].scrollLeft),top:i.top-this.offset.parent.top-this.margins.top+(this.offsetParent[0]==document.body?0:this.offsetParent[0].scrollTop)},parseInt(this.options.revert,10)||500,function(){r._clear(t)})}else this._clear(t,n);return!1},cancel:function(){if(this.dragging){this._mouseUp({target:null}),this.options.helper=="original"?this.currentItem.css(this._storedCSS).removeClass("ui-sortable-helper"):this.currentItem.show();for(var t=this.containers.length-1;t>=0;t--)this.containers[t]._trigger("deactivate",null,this._uiHash(this)),this.containers[t].containerCache.over&&(this.containers[t]._trigger("out",null,this._uiHash(this)),this.containers[t].containerCache.over=0)}return this.placeholder&&(this.placeholder[0].parentNode&&this.placeholder[0].parentNode.removeChild(this.placeholder[0]),this.options.helper!="original"&&this.helper&&this.helper[0].parentNode&&this.helper.remove(),e.extend(this,{helper:null,dragging:!1,reverting:!1,_noFinalSort:null}),this.domPosition.prev?e(this.domPosition.prev).after(this.currentItem):e(this.domPosition.parent).prepend(this.currentItem)),this},serialize:function(t){var n=this._getItemsAsjQuery(t&&t.connected),r=[];return t=t||{},e(n).each(function(){var n=(e(t.item||this).attr(t.attribute||"id")||"").match(t.expression||/(.+)[-=_](.+)/);n&&r.push((t.key||n[1]+"[]")+"="+(t.key&&t.expression?n[1]:n[2]))}),!r.length&&t.key&&r.push(t.key+"="),r.join("&")},toArray:function(t){var n=this._getItemsAsjQuery(t&&t.connected),r=[];return t=t||{},n.each(function(){r.push(e(t.item||this).attr(t.attribute||"id")||"")}),r},_intersectsWith:function(e){var t=this.positionAbs.left,n=t+this.helperProportions.width,r=this.positionAbs.top,i=r+this.helperProportions.height,s=e.left,o=s+e.width,u=e.top,a=u+e.height,f=this.offset.click.top,l=this.offset.click.left,c=r+f>u&&r+fs&&t+le[this.floating?"width":"height"]?c:s0?"down":"up")},_getDragHorizontalDirection:function(){var e=this.positionAbs.left-this.lastPositionAbs.left;return e!=0&&(e>0?"right":"left")},refresh:function(e){return this._refreshItems(e),this.refreshPositions(),this},_connectWith:function(){var e=this.options;return e.connectWith.constructor==String?[e.connectWith]:e.connectWith},_getItemsAsjQuery:function(t){var n=[],r=[],i=this._connectWith();if(i&&t)for(var s=i.length-1;s>=0;s--){var o=e(i[s]);for(var u=o.length-1;u>=0;u--){var a=e.data(o[u],this.widgetName);a&&a!=this&&!a.options.disabled&&r.push([e.isFunction(a.options.items)?a.options.items.call(a.element):e(a.options.items,a.element).not(".ui-sortable-helper").not(".ui-sortable-placeholder"),a])}}r.push([e.isFunction(this.options.items)?this.options.items.call(this.element,null,{options:this.options,item:this.currentItem}):e(this.options.items,this.element).not(".ui-sortable-helper").not(".ui-sortable-placeholder"),this]);for(var s=r.length-1;s>=0;s--)r[s][0].each(function(){n.push(this)});return e(n)},_removeCurrentsFromItems:function(){var e=this.currentItem.find(":data("+this.widgetName+"-item)");for(var t=0;t=0;s--){var o=e(i[s]);for(var u=o.length-1;u>=0;u--){var a=e.data(o[u],this.widgetName);a&&a!=this&&!a.options.disabled&&(r.push([e.isFunction(a.options.items)?a.options.items.call(a.element[0],t,{item:this.currentItem}):e(a.options.items,a.element),a]),this.containers.push(a))}}for(var s=r.length-1;s>=0;s--){var f=r[s][1],l=r[s][0];for(var u=0,c=l.length;u=0;n--){var r=this.items[n];if(r.instance!=this.currentContainer&&this.currentContainer&&r.item[0]!=this.currentItem[0])continue;var i=this.options.toleranceElement?e(this.options.toleranceElement,r.item):r.item;t||(r.width=i.outerWidth(),r.height=i.outerHeight());var s=i.offset();r.left=s.left,r.top=s.top}if(this.options.custom&&this.options.custom.refreshContainers)this.options.custom.refreshContainers.call(this);else for(var n=this.containers.length-1;n>=0;n--){var s=this.containers[n].element.offset();this.containers[n].containerCache.left=s.left,this.containers[n].containerCache.top=s.top,this.containers[n].containerCache.width=this.containers[n].element.outerWidth(),this.containers[n].containerCache.height=this.containers[n].element.outerHeight()}return this},_createPlaceholder:function(t){t=t||this;var n=t.options;if(!n.placeholder||n.placeholder.constructor==String){var r=n.placeholder;n.placeholder={element:function(){var n=e(document.createElement(t.currentItem[0].nodeName)).addClass(r||t.currentItem[0].className+" ui-sortable-placeholder").removeClass("ui-sortable-helper")[0];return r||(n.style.visibility="hidden"),n},update:function(e,i){if(r&&!n.forcePlaceholderSize)return;i.height()||i.height(t.currentItem.innerHeight()-parseInt(t.currentItem.css("paddingTop")||0,10)-parseInt(t.currentItem.css("paddingBottom")||0,10)),i.width()||i.width(t.currentItem.innerWidth()-parseInt(t.currentItem.css("paddingLeft")||0,10)-parseInt(t.currentItem.css("paddingRight")||0,10))}}}t.placeholder=e(n.placeholder.element.call(t.element,t.currentItem)),t.currentItem.after(t.placeholder),n.placeholder.update(t,t.placeholder)},_contactContainers:function(t){var n=null,r=null;for(var i=this.containers.length-1;i>=0;i--){if(e.contains(this.currentItem[0],this.containers[i].element[0]))continue;if(this._intersectsWith(this.containers[i].containerCache)){if(n&&e.contains(this.containers[i].element[0],n.element[0]))continue;n=this.containers[i],r=i}else this.containers[i].containerCache.over&&(this.containers[i]._trigger("out",t,this._uiHash(this)),this.containers[i].containerCache.over=0)}if(!n)return;if(this.containers.length===1)this.containers[r]._trigger("over",t,this._uiHash(this)),this.containers[r].containerCache.over=1;else if(this.currentContainer!=this.containers[r]){var s=1e4,o=null,u=this.positionAbs[this.containers[r].floating?"left":"top"];for(var a=this.items.length-1;a>=0;a--){if(!e.contains(this.containers[r].element[0],this.items[a].item[0]))continue;var f=this.containers[r].floating?this.items[a].item.offset().left:this.items[a].item.offset().top;Math.abs(f-u)0?"down":"up")}if(!o&&!this.options.dropOnEmpty)return;this.currentContainer=this.containers[r],o?this._rearrange(t,o,null,!0):this._rearrange(t,null,this.containers[r].element,!0),this._trigger("change",t,this._uiHash()),this.containers[r]._trigger("change",t,this._uiHash(this)),this.options.placeholder.update(this.currentContainer,this.placeholder),this.containers[r]._trigger("over",t,this._uiHash(this)),this.containers[r].containerCache.over=1}},_createHelper:function(t){var n=this.options,r=e.isFunction(n.helper)?e(n.helper.apply(this.element[0],[t,this.currentItem])):n.helper=="clone"?this.currentItem.clone():this.currentItem;return r.parents("body").length||e(n.appendTo!="parent"?n.appendTo:this.currentItem[0].parentNode)[0].appendChild(r[0]),r[0]==this.currentItem[0]&&(this._storedCSS={width:this.currentItem[0].style.width,height:this.currentItem[0].style.height,position:this.currentItem.css("position"),top:this.currentItem.css("top"),left:this.currentItem.css("left")}),(r[0].style.width==""||n.forceHelperSize)&&r.width(this.currentItem.width()),(r[0].style.height==""||n.forceHelperSize)&&r.height(this.currentItem.height()),r},_adjustOffsetFromHelper:function(t){typeof t=="string"&&(t=t.split(" ")),e.isArray(t)&&(t={left:+t[0],top:+t[1]||0}),"left"in t&&(this.offset.click.left=t.left+this.margins.left),"right"in t&&(this.offset.click.left=this.helperProportions.width-t.right+this.margins.left),"top"in t&&(this.offset.click.top=t.top+this.margins.top),"bottom"in t&&(this.offset.click.top=this.helperProportions.height-t.bottom+this.margins.top)},_getParentOffset:function(){this.offsetParent=this.helper.offsetParent();var t=this.offsetParent.offset();this.cssPosition=="absolute"&&this.scrollParent[0]!=document&&e.contains(this.scrollParent[0],this.offsetParent[0])&&(t.left+=this.scrollParent.scrollLeft(),t.top+=this.scrollParent.scrollTop());if(this.offsetParent[0]==document.body||this.offsetParent[0].tagName&&this.offsetParent[0].tagName.toLowerCase()=="html"&&e.browser.msie)t={top:0,left:0};return{top:t.top+(parseInt(this.offsetParent.css("borderTopWidth"),10)||0),left:t.left+(parseInt(this.offsetParent.css("borderLeftWidth"),10)||0)}},_getRelativeOffset:function(){if(this.cssPosition=="relative"){var e=this.currentItem.position();return{top:e.top-(parseInt(this.helper.css("top"),10)||0)+this.scrollParent.scrollTop(),left:e.left-(parseInt(this.helper.css("left"),10)||0)+this.scrollParent.scrollLeft()}}return{top:0,left:0}},_cacheMargins:function(){this.margins={left:parseInt(this.currentItem.css("marginLeft"),10)||0,top:parseInt(this.currentItem.css("marginTop"),10)||0}},_cacheHelperProportions:function(){this.helperProportions={width:this.helper.outerWidth(),height:this.helper.outerHeight()}},_setContainment:function(){var t=this.options;t.containment=="parent"&&(t.containment=this.helper[0].parentNode);if(t.containment=="document"||t.containment=="window")this.containment=[0-this.offset.relative.left-this.offset.parent.left,0-this.offset.relative.top-this.offset.parent.top,e(t.containment=="document"?document:window).width()-this.helperProportions.width-this.margins.left,(e(t.containment=="document"?document:window).height()||document.body.parentNode.scrollHeight)-this.helperProportions.height-this.margins.top];if(!/^(document|window|parent)$/.test(t.containment)){var n=e(t.containment)[0],r=e(t.containment).offset(),i=e(n).css("overflow")!="hidden";this.containment=[r.left+(parseInt(e(n).css("borderLeftWidth"),10)||0)+(parseInt(e(n).css("paddingLeft"),10)||0)-this.margins.left,r.top+(parseInt(e(n).css("borderTopWidth"),10)||0)+(parseInt(e(n).css("paddingTop"),10)||0)-this.margins.top,r.left+(i?Math.max(n.scrollWidth,n.offsetWidth):n.offsetWidth)-(parseInt(e(n).css("borderLeftWidth"),10)||0)-(parseInt(e(n).css("paddingRight"),10)||0)-this.helperProportions.width-this.margins.left,r.top+(i?Math.max(n.scrollHeight,n.offsetHeight):n.offsetHeight)-(parseInt(e(n).css("borderTopWidth"),10)||0)-(parseInt(e(n).css("paddingBottom"),10)||0)-this.helperProportions.height-this.margins.top]}},_convertPositionTo:function(t,n){n||(n=this.position);var r=t=="absolute"?1:-1,i=this.options,s=this.cssPosition!="absolute"||this.scrollParent[0]!=document&&!!e.contains(this.scrollParent[0],this.offsetParent[0])?this.scrollParent:this.offsetParent,o=/(html|body)/i.test(s[0].tagName);return{top:n.top+this.offset.relative.top*r+this.offset.parent.top*r-(this.cssPosition=="fixed"?-this.scrollParent.scrollTop():o?0:s.scrollTop())*r,left:n.left+this.offset.relative.left*r+this.offset.parent.left*r-(this.cssPosition=="fixed"?-this.scrollParent.scrollLeft():o?0:s.scrollLeft())*r}},_generatePosition:function(t){var n=this.options,r=this.cssPosition!="absolute"||this.scrollParent[0]!=document&&!!e.contains(this.scrollParent[0],this.offsetParent[0])?this.scrollParent:this.offsetParent,i=/(html|body)/i.test(r[0].tagName);this.cssPosition=="relative"&&(this.scrollParent[0]==document||this.scrollParent[0]==this.offsetParent[0])&&(this.offset.relative=this._getRelativeOffset());var s=t.pageX,o=t.pageY;if(this.originalPosition){this.containment&&(t.pageX-this.offset.click.leftthis.containment[2]&&(s=this.containment[2]+this.offset.click.left),t.pageY-this.offset.click.top>this.containment[3]&&(o=this.containment[3]+this.offset.click.top));if(n.grid){var u=this.originalPageY+Math.round((o-this.originalPageY)/n.grid[1])*n.grid[1];o=this.containment?u-this.offset.click.topthis.containment[3]?u-this.offset.click.topthis.containment[2]?a-this.offset.click.left=0;i--)n||r.push(function(e){return function(t){e._trigger("deactivate",t,this._uiHash(this))}}.call(this,this.containers[i])),this.containers[i].containerCache.over&&(r.push(function(e){return function(t){e._trigger("out",t,this._uiHash(this))}}.call(this,this.containers[i])),this.containers[i].containerCache.over=0);this._storedCursor&&e("body").css("cursor",this._storedCursor),this._storedOpacity&&this.helper.css("opacity",this._storedOpacity),this._storedZIndex&&this.helper.css("zIndex",this._storedZIndex=="auto"?"":this._storedZIndex),this.dragging=!1;if(this.cancelHelperRemoval){if(!n){this._trigger("beforeStop",t,this._uiHash());for(var i=0;i",widgetEventPrefix:"spin",options:{culture:null,icons:{down:"ui-icon-triangle-1-s",up:"ui-icon-triangle-1-n"},incremental:!0,max:null,min:null,numberFormat:null,page:10,step:1,change:null,spin:null,start:null,stop:null},_create:function(){this._setOption("max",this.options.max),this._setOption("min",this.options.min),this._setOption("step",this.options.step),this._value(this.element.val(),!0),this._draw(),this._on(this._events),this._refresh(),this._on(this.window,{beforeunload:function(){this.element.removeAttr("autocomplete")}})},_getCreateOptions:function(){var t={},n=this.element;return e.each(["min","max","step"],function(e,r){var i=n.attr(r);i!==undefined&&i.length&&(t[r]=i)}),t},_events:{keydown:function(e){this._start(e)&&this._keydown(e)&&e.preventDefault()},keyup:"_stop",focus:function(){this.uiSpinner.addClass("ui-state-active"),this.previous=this.element.val()},blur:function(e){if(this.cancelBlur){delete this.cancelBlur;return}this._refresh(),this.uiSpinner.removeClass("ui-state-active"),this.previous!==this.element.val()&&this._trigger("change",e)},mousewheel:function(e,t){if(!t)return;if(!this.spinning&&!this._start(e))return!1;this._spin((t>0?1:-1)*this.options.step,e),clearTimeout(this.mousewheelTimer),this.mousewheelTimer=this._delay(function(){this.spinning&&this._stop(e)},100),e.preventDefault()},"mousedown .ui-spinner-button":function(t){function r(){var e=this.element[0]===this.document[0].activeElement;e||(this.element.focus(),this.previous=n,this._delay(function(){this.previous=n}))}var n;n=this.element[0]===this.document[0].activeElement?this.previous:this.element.val(),t.preventDefault(),r.call(this),this.cancelBlur=!0,this._delay(function(){delete this.cancelBlur,r.call(this)});if(this._start(t)===!1)return;this._repeat(null,e(t.currentTarget).hasClass("ui-spinner-up")?1:-1,t)},"mouseup .ui-spinner-button":"_stop","mouseenter .ui-spinner-button":function(t){if(!e(t.currentTarget).hasClass("ui-state-active"))return;if(this._start(t)===!1)return!1;this._repeat(null,e(t.currentTarget).hasClass("ui-spinner-up")?1:-1,t)},"mouseleave .ui-spinner-button":"_stop"},_draw:function(){var e=this.uiSpinner=this.element.addClass("ui-spinner-input").attr("autocomplete","off").wrap(this._uiSpinnerHtml()).parent().append(this._buttonHtml());this._hoverable(e),this.element.attr("role","spinbutton"),this.buttons=e.find(".ui-spinner-button").attr("tabIndex",-1).button().removeClass("ui-corner-all"),this.buttons.height()>Math.ceil(e.height()*.5)&&e.height()>0&&e.height(e.height()),this.options.disabled&&this.disable()},_keydown:function(t){var n=this.options,r=e.ui.keyCode;switch(t.keyCode){case r.UP:return this._repeat(null,1,t),!0;case r.DOWN:return this._repeat(null,-1,t),!0;case r.PAGE_UP:return this._repeat(null,n.page,t),!0;case r.PAGE_DOWN:return this._repeat(null,-n.page,t),!0}return!1},_uiSpinnerHtml:function(){return""},_buttonHtml:function(){return""+""+""+""+""},_start:function(e){return!this.spinning&&this._trigger("start",e)===!1?!1:(this.counter||(this.counter=1),this.spinning=!0,!0)},_repeat:function(e,t,n){e=e||500,clearTimeout(this.timer),this.timer=this._delay(function(){this._repeat(40,t,n)},e),this._spin(t*this.options.step,n)},_spin:function(e,t){var n=this.value()||0;this.counter||(this.counter=1),n=this._adjustValue(n+e*this._increment(this.counter));if(!this.spinning||this._trigger("spin",t,{value:n})!==!1)this._value(n),this.counter++},_increment:function(t){var n=this.options.incremental;return n?e.isFunction(n)?n(t):Math.floor(t*t*t/5e4-t*t/500+17*t/200+1):1},_precision:function(){var e=this._precisionOf(this.options.step);return this.options.min!==null&&(e=Math.max(e,this._precisionOf(this.options.min))),e},_precisionOf:function(e){var t=e.toString(),n=t.indexOf(".");return n===-1?0:t.length-n-1},_adjustValue:function(e){var t,n,r=this.options;return t=r.min!==null?r.min:0,n=e-t,n=Math.round(n/r.step)*r.step,e=t+n,e=parseFloat(e.toFixed(this._precision())),r.max!==null&&e>r.max?r.max:r.min!==null&&e1&&e.href.replace(r,"")===location.href.replace(r,"")}var n=0,r=/#.*$/;e.widget("ui.tabs",{version:"1.9.0",delay:300,options:{active:null,collapsible:!1,event:"click",heightStyle:"content",hide:null,show:null,activate:null,beforeActivate:null,beforeLoad:null,load:null},_create:function(){var t,n=this,r=this.options,i=r.active;this.running=!1,this.element.addClass("ui-tabs ui-widget ui-widget-content ui-corner-all").toggleClass("ui-tabs-collapsible",r.collapsible).delegate(".ui-tabs-nav > li","mousedown"+this.eventNamespace,function(t){e(this).is(".ui-state-disabled")&&t.preventDefault()}).delegate(".ui-tabs-anchor","focus"+this.eventNamespace,function(){e(this).closest("li").is(".ui-state-disabled")&&this.blur()}),this._processTabs();if(i===null){location.hash&&this.anchors.each(function(e,t){if(t.hash===location.hash)return i=e,!1}),i===null&&(i=this.tabs.filter(".ui-tabs-active").index());if(i===null||i===-1)i=this.tabs.length?0:!1}i!==!1&&(i=this.tabs.index(this.tabs.eq(i)),i===-1&&(i=r.collapsible?!1:0)),r.active=i,!r.collapsible&&r.active===!1&&this.anchors.length&&(r.active=0),e.isArray(r.disabled)&&(r.disabled=e.unique(r.disabled.concat(e.map(this.tabs.filter(".ui-state-disabled"),function(e){return n.tabs.index(e)}))).sort()),this.options.active!==!1&&this.anchors.length?this.active=this._findActive(this.options.active):this.active=e(),this._refresh(),this.active.length&&this.load(r.active)},_getCreateEventData:function(){return{tab:this.active,panel:this.active.length?this._getPanelForTab(this.active):e()}},_tabKeydown:function(t){var n=e(this.document[0].activeElement).closest("li"),r=this.tabs.index(n),i=!0;if(this._handlePageNav(t))return;switch(t.keyCode){case e.ui.keyCode.RIGHT:case e.ui.keyCode.DOWN:r++;break;case e.ui.keyCode.UP:case e.ui.keyCode.LEFT:i=!1,r--;break;case e.ui.keyCode.END:r=this.anchors.length-1;break;case e.ui.keyCode.HOME:r=0;break;case e.ui.keyCode.SPACE:t.preventDefault(),clearTimeout(this.activating),this._activate(r);return;case e.ui.keyCode.ENTER:t.preventDefault(),clearTimeout(this.activating),this._activate(r===this.options.active?!1:r);return;default:return}t.preventDefault(),clearTimeout(this.activating),r=this._focusNextTab(r,i),t.ctrlKey||(n.attr("aria-selected","false"),this.tabs.eq(r).attr("aria-selected","true"),this.activating=this._delay(function(){this.option("active",r)},this.delay))},_panelKeydown:function(t){if(this._handlePageNav(t))return;t.ctrlKey&&t.keyCode===e.ui.keyCode.UP&&(t.preventDefault(),this.active.focus())},_handlePageNav:function(t){if(t.altKey&&t.keyCode===e.ui.keyCode.PAGE_UP)return this._activate(this._focusNextTab(this.options.active-1,!1)),!0;if(t.altKey&&t.keyCode===e.ui.keyCode.PAGE_DOWN)return this._activate(this._focusNextTab(this.options.active+1,!0)),!0},_findNextTab:function(t,n){function i(){return t>r&&(t=0),t<0&&(t=r),t}var r=this.tabs.length-1;while(e.inArray(i(),this.options.disabled)!==-1)t=n?t+1:t-1;return t},_focusNextTab:function(e,t){return e=this._findNextTab(e,t),this.tabs.eq(e).focus(),e},_setOption:function(e,t){if(e==="active"){this._activate(t);return}if(e==="disabled"){this._setupDisabled(t);return}this._super(e,t),e==="collapsible"&&(this.element.toggleClass("ui-tabs-collapsible",t),!t&&this.options.active===!1&&this._activate(0)),e==="event"&&this._setupEvents(t),e==="heightStyle"&&this._setupHeightStyle(t)},_tabId:function(e){return e.attr("aria-controls")||"ui-tabs-"+i()},_sanitizeSelector:function(e){return e?e.replace(/[!"$%&'()*+,.\/:;<=>?@\[\]\^`{|}~]/g,"\\$&"):""},refresh:function(){var t,n=this.options,r=this.tablist.children(":has(a[href])");n.disabled=e.map(r.filter(".ui-state-disabled"),function(e){return r.index(e)}),this._processTabs(),n.active===!1||!this.anchors.length?(n.active=!1,this.active=e()):this.active.length&&!e.contains(this.tablist[0],this.active[0])?this.tabs.length===n.disabled.length?(n.active=!1,this.active=e()):this._activate(this._findNextTab(Math.max(0,n.active-1),!1)):n.active=this.tabs.index(this.active),this._refresh()},_refresh:function(){this._setupDisabled(this.options.disabled),this._setupEvents(this.options.event),this._setupHeightStyle(this.options.heightStyle),this.tabs.not(this.active).attr({"aria-selected":"false",tabIndex:-1}),this.panels.not(this._getPanelForTab(this.active)).hide().attr({"aria-expanded":"false","aria-hidden":"true"}),this.active.length?(this.active.addClass("ui-tabs-active ui-state-active").attr({"aria-selected":"true",tabIndex:0}),this._getPanelForTab(this.active).show().attr({"aria-expanded":"true","aria-hidden":"false"})):this.tabs.eq(0).attr("tabIndex",0)},_processTabs:function(){var t=this;this.tablist=this._getList().addClass("ui-tabs-nav ui-helper-reset ui-helper-clearfix ui-widget-header ui-corner-all").attr("role","tablist"),this.tabs=this.tablist.find("> li:has(a[href])").addClass("ui-state-default ui-corner-top").attr({role:"tab",tabIndex:-1}),this.anchors=this.tabs.map(function(){return e("a",this)[0]}).addClass("ui-tabs-anchor").attr({role:"presentation",tabIndex:-1}),this.panels=e(),this.anchors.each(function(n,r){var i,o,u,a=e(r).uniqueId().attr("id"),f=e(r).closest("li"),l=f.attr("aria-controls");s(r)?(i=r.hash,o=t.element.find(t._sanitizeSelector(i))):(u=t._tabId(f),i="#"+u,o=t.element.find(i),o.length||(o=t._createPanel(u),o.insertAfter(t.panels[n-1]||t.tablist)),o.attr("aria-live","polite")),o.length&&(t.panels=t.panels.add(o)),l&&f.data("ui-tabs-aria-controls",l),f.attr({"aria-controls":i.substring(1),"aria-labelledby":a}),o.attr("aria-labelledby",a)}),this.panels.addClass("ui-tabs-panel ui-widget-content ui-corner-bottom").attr("role","tabpanel")},_getList:function(){return this.element.find("ol,ul").eq(0)},_createPanel:function(t){return e("
              ").attr("id",t).addClass("ui-tabs-panel ui-widget-content ui-corner-bottom").data("ui-tabs-destroy",!0)},_setupDisabled:function(t){e.isArray(t)&&(t.length?t.length===this.anchors.length&&(t=!0):t=!1);for(var n=0,r;r=this.tabs[n];n++)t===!0||e.inArray(n,t)!==-1?e(r).addClass("ui-state-disabled").attr("aria-disabled","true"):e(r).removeClass("ui-state-disabled").removeAttr("aria-disabled");this.options.disabled=t},_setupEvents:function(t){var n={click:function(e){e.preventDefault()}};t&&e.each(t.split(" "),function(e,t){n[t]="_eventHandler"}),this._off(this.anchors.add(this.tabs).add(this.panels)),this._on(this.anchors,n),this._on(this.tabs,{keydown:"_tabKeydown"}),this._on(this.panels,{keydown:"_panelKeydown"}),this._focusable(this.tabs),this._hoverable(this.tabs)},_setupHeightStyle:function(t){var n,r,i=this.element.parent();t==="fill"?(e.support.minHeight||(r=i.css("overflow"),i.css("overflow","hidden")),n=i.height(),this.element.siblings(":visible").each(function(){var t=e(this),r=t.css("position");if(r==="absolute"||r==="fixed")return;n-=t.outerHeight(!0)}),r&&i.css("overflow",r),this.element.children().not(this.panels).each(function(){n-=e(this).outerHeight(!0)}),this.panels.each(function(){e(this).height(Math.max(0,n-e(this).innerHeight()+e(this).height()))}).css("overflow","auto")):t==="auto"&&(n=0,this.panels.each(function(){n=Math.max(n,e(this).height("").height())}).height(n))},_eventHandler:function(t){var n=this.options,r=this.active,i=e(t.currentTarget),s=i.closest("li"),o=s[0]===r[0],u=o&&n.collapsible,a=u?e():this._getPanelForTab(s),f=r.length?this._getPanelForTab(r):e(),l={oldTab:r,oldPanel:f,newTab:u?e():s,newPanel:a};t.preventDefault();if(s.hasClass("ui-state-disabled")||s.hasClass("ui-tabs-loading")||this.running||o&&!n.collapsible||this._trigger("beforeActivate",t,l)===!1)return;n.active=u?!1:this.tabs.index(s),this.active=o?e():s,this.xhr&&this.xhr.abort(),!f.length&&!a.length&&e.error("jQuery UI Tabs: Mismatching fragment identifier."),a.length&&this.load(this.tabs.index(s),t),this._toggle(t,l)},_toggle:function(t,n){function o(){r.running=!1,r._trigger("activate",t,n)}function u(){n.newTab.closest("li").addClass("ui-tabs-active ui-state-active"),i.length&&r.options.show?r._show(i,r.options.show,o):(i.show(),o())}var r=this,i=n.newPanel,s=n.oldPanel;this.running=!0,s.length&&this.options.hide?this._hide(s,this.options.hide,function(){n.oldTab.closest("li").removeClass("ui-tabs-active ui-state-active"),u()}):(n.oldTab.closest("li").removeClass("ui-tabs-active ui-state-active"),s.hide(),u()),s.attr({"aria-expanded":"false","aria-hidden":"true"}),n.oldTab.attr("aria-selected","false"),i.length&&s.length?n.oldTab.attr("tabIndex",-1):i.length&&this.tabs.filter(function(){return e(this).attr("tabIndex")===0}).attr("tabIndex",-1),i.attr({"aria-expanded":"true","aria-hidden":"false"}),n.newTab.attr({"aria-selected":"true",tabIndex:0})},_activate:function(t){var n,r=this._findActive(t);if(r[0]===this.active[0])return;r.length||(r=this.active),n=r.find(".ui-tabs-anchor")[0],this._eventHandler({target:n,currentTarget:n,preventDefault:e.noop})},_findActive:function(t){return t===!1?e():this.tabs.eq(t)},_getIndex:function(e){return typeof e=="string"&&(e=this.anchors.index(this.anchors.filter("[href$='"+e+"']"))),e},_destroy:function(){this.xhr&&this.xhr.abort(),this.element.removeClass("ui-tabs ui-widget ui-widget-content ui-corner-all ui-tabs-collapsible"),this.tablist.removeClass("ui-tabs-nav ui-helper-reset ui-helper-clearfix ui-widget-header ui-corner-all").removeAttr("role"),this.anchors.removeClass("ui-tabs-anchor").removeAttr("role").removeAttr("tabIndex").removeData("href.tabs").removeData("load.tabs").removeUniqueId(),this.tabs.add(this.panels).each(function(){e.data(this,"ui-tabs-destroy")?e(this).remove():e(this).removeClass("ui-state-default ui-state-active ui-state-disabled ui-corner-top ui-corner-bottom ui-widget-content ui-tabs-active ui-tabs-panel").removeAttr("tabIndex").removeAttr("aria-live").removeAttr("aria-busy").removeAttr("aria-selected").removeAttr("aria-labelledby").removeAttr("aria-hidden").removeAttr("aria-expanded").removeAttr("role")}),this.tabs.each(function(){var t=e(this),n=t.data("ui-tabs-aria-controls");n?t.attr("aria-controls",n):t.removeAttr("aria-controls")}),this.options.heightStyle!=="content"&&this.panels.css("height","")},enable:function(n){var r=this.options.disabled;if(r===!1)return;n===t?r=!1:(n=this._getIndex(n),e.isArray(r)?r=e.map(r,function(e){return e!==n?e:null}):r=e.map(this.tabs,function(e,t){return t!==n?t:null})),this._setupDisabled(r)},disable:function(n){var r=this.options.disabled;if(r===!0)return;if(n===t)r=!0;else{n=this._getIndex(n);if(e.inArray(n,r)!==-1)return;e.isArray(r)?r=e.merge([n],r).sort():r=[n]}this._setupDisabled(r)},load:function(t,n){t=this._getIndex(t);var r=this,i=this.tabs.eq(t),o=i.find(".ui-tabs-anchor"),u=this._getPanelForTab(i),a={tab:i,panel:u};if(s(o[0]))return;this.xhr=e.ajax(this._ajaxSettings(o,n,a)),this.xhr&&this.xhr.statusText!=="canceled"&&(i.addClass("ui-tabs-loading"),u.attr("aria-busy","true"),this.xhr.success(function(e){setTimeout(function(){u.html(e),r._trigger("load",n,a)},1)}).complete(function(e,t){setTimeout(function(){t==="abort"&&r.panels.stop(!1,!0),i.removeClass("ui-tabs-loading"),u.removeAttr("aria-busy"),e===r.xhr&&delete r.xhr},1)}))},_ajaxSettings:function(t,n,r){var i=this;return{url:t.attr("href"),beforeSend:function(t,s){return i._trigger("beforeLoad",n,e.extend({jqXHR:t,ajaxSettings:s},r))}}},_getPanelForTab:function(t){var n=e(t).attr("aria-controls");return this.element.find(this._sanitizeSelector("#"+n))}}),e.uiBackCompat!==!1&&(e.ui.tabs.prototype._ui=function(e,t){return{tab:e,panel:t,index:this.anchors.index(e)}},e.widget("ui.tabs",e.ui.tabs,{url:function(e,t){this.anchors.eq(e).attr("href",t)}}),e.widget("ui.tabs",e.ui.tabs,{options:{ajaxOptions:null,cache:!1},_create:function(){this._super();var t=this;this._on({tabsbeforeload:function(n,r){if(e.data(r.tab[0],"cache.tabs")){n.preventDefault();return}r.jqXHR.success(function(){t.options.cache&&e.data(r.tab[0],"cache.tabs",!0)})}})},_ajaxSettings:function(t,n,r){var i=this.options.ajaxOptions;return e.extend({},i,{error:function(e,t,n){try{i.error(e,t,r.tab.closest("li").index(),r.tab[0])}catch(n){}}},this._superApply(arguments))},_setOption:function(e,t){e==="cache"&&t===!1&&this.anchors.removeData("cache.tabs"),this._super(e,t)},_destroy:function(){this.anchors.removeData("cache.tabs"),this._super()},url:function(e,t){this.anchors.eq(e).removeData("cache.tabs"),this._superApply(arguments)}}),e.widget("ui.tabs",e.ui.tabs,{abort:function(){this.xhr&&this.xhr.abort()}}),e.widget("ui.tabs",e.ui.tabs,{options:{spinner:"Loading…"},_create:function(){this._super(),this._on({tabsbeforeload:function(e,t){if(e.target!==this.element[0]||!this.options.spinner)return;var n=t.tab.find("span"),r=n.html();n.html(this.options.spinner),t.jqXHR.complete(function(){n.html(r)})}})}}),e.widget("ui.tabs",e.ui.tabs,{options:{enable:null,disable:null},enable:function(t){var n=this.options,r;if(t&&n.disabled===!0||e.isArray(n.disabled)&&e.inArray(t,n.disabled)!==-1)r=!0;this._superApply(arguments),r&&this._trigger("enable",null,this._ui(this.anchors[t],this.panels[t]))},disable:function(t){var n=this.options,r;if(t&&n.disabled===!1||e.isArray(n.disabled)&&e.inArray(t,n.disabled)===-1)r=!0;this._superApply(arguments),r&&this._trigger("disable",null,this._ui(this.anchors[t],this.panels[t]))}}),e.widget("ui.tabs",e.ui.tabs,{options:{add:null,remove:null,tabTemplate:"
            7. #{label}
            8. "},add:function(n,r,i){i===t&&(i=this.anchors.length);var s,o,u=this.options,a=e(u.tabTemplate.replace(/#\{href\}/g,n).replace(/#\{label\}/g,r)),f=n.indexOf("#")?this._tabId(a):n.replace("#","");return a.addClass("ui-state-default ui-corner-top").data("ui-tabs-destroy",!0),a.attr("aria-controls",f),s=i>=this.tabs.length,o=this.element.find("#"+f),o.length||(o=this._createPanel(f),s?i>0?o.insertAfter(this.panels.eq(-1)):o.appendTo(this.element):o.insertBefore(this.panels[i])),o.addClass("ui-tabs-panel ui-widget-content ui-corner-bottom").hide(),s?a.appendTo(this.tablist):a.insertBefore(this.tabs[i]),u.disabled=e.map(u.disabled,function(e){return e>=i?++e:e}),this.refresh(),this.tabs.length===1&&u.active===!1&&this.option("active",0),this._trigger("add",null,this._ui(this.anchors[i],this.panels[i])),this},remove:function(t){t=this._getIndex(t);var n=this.options,r=this.tabs.eq(t).remove(),i=this._getPanelForTab(r).remove();return r.hasClass("ui-tabs-active")&&this.anchors.length>2&&this._activate(t+(t+1=t?--e:e}),this.refresh(),this._trigger("remove",null,this._ui(r.find("a")[0],i[0])),this}}),e.widget("ui.tabs",e.ui.tabs,{length:function(){return this.anchors.length}}),e.widget("ui.tabs",e.ui.tabs,{options:{idPrefix:"ui-tabs-"},_tabId:function(t){var n=t.is("li")?t.find("a[href]"):t;return n=n[0],e(n).closest("li").attr("aria-controls")||n.title&&n.title.replace(/\s/g,"_").replace(/[^\w\u00c0-\uFFFF\-]/g,"")||this.options.idPrefix+i()}}),e.widget("ui.tabs",e.ui.tabs,{options:{panelTemplate:"
              "},_createPanel:function(t){return e(this.options.panelTemplate).attr("id",t).addClass("ui-tabs-panel ui-widget-content ui-corner-bottom").data("ui-tabs-destroy",!0)}}),e.widget("ui.tabs",e.ui.tabs,{_create:function(){var e=this.options;e.active===null&&e.selected!==t&&(e.active=e.selected===-1?!1:e.selected),this._super(),e.selected=e.active,e.selected===!1&&(e.selected=-1)},_setOption:function(e,t){if(e!=="selected")return this._super(e,t);var n=this.options;this._super("active",t===-1?!1:t),n.selected=n.active,n.selected===!1&&(n.selected=-1)},_eventHandler:function(e){this._superApply(arguments),this.options.selected=this.options.active,this.options.selected===!1&&(this.options.selected=-1)}}),e.widget("ui.tabs",e.ui.tabs,{options:{show:null,select:null},_create:function(){this._super(),this.options.active!==!1&&this._trigger("show",null,this._ui(this.active.find(".ui-tabs-anchor")[0],this._getPanelForTab(this.active)[0]))},_trigger:function(e,t,n){var r=this._superApply(arguments);return r?(e==="beforeActivate"&&n.newTab.length?r=this._super("select",t,{tab:n.newTab.find(".ui-tabs-anchor")[0],panel:n.newPanel[0],index:n.newTab.closest("li").index()}):e==="activate"&&n.newTab.length&&(r=this._super("show",t,{tab:n.newTab.find(".ui-tabs-anchor")[0],panel:n.newPanel[0],index:n.newTab.closest("li").index()})),r):!1}}),e.widget("ui.tabs",e.ui.tabs,{select:function(e){e=this._getIndex(e);if(e===-1){if(!this.options.collapsible||this.options.selected===-1)return;e=this.options.selected}this.anchors.eq(e).trigger(this.options.event+this.eventNamespace)}}),function(){var t=0;e.widget("ui.tabs",e.ui.tabs,{options:{cookie:null},_create:function(){var e=this.options,t;e.active==null&&e.cookie&&(t=parseInt(this._cookie(),10),t===-1&&(t=!1),e.active=t),this._super()},_cookie:function(n){var r=[this.cookie||(this.cookie=this.options.cookie.name||"ui-tabs-"+ ++t)];return arguments.length&&(r.push(n===!1?-1:n),r.push(this.options.cookie)),e.cookie.apply(null,r)},_refresh:function(){this._super(),this.options.cookie&&this._cookie(this.options.active,this.options.cookie)},_eventHandler:function(e){this._superApply(arguments),this.options.cookie&&this._cookie(this.options.active,this.options.cookie)},_destroy:function(){this._super(),this.options.cookie&&this._cookie(null,this.options.cookie)}})}(),e.widget("ui.tabs",e.ui.tabs,{_trigger:function(t,n,r){var i=e.extend({},r);return t==="load"&&(i.panel=i.panel[0],i.tab=i.tab.find(".ui-tabs-anchor")[0]),this._super(t,n,i)}}),e.widget("ui.tabs",e.ui.tabs,{options:{fx:null},_getFx:function(){var t,n,r=this.options.fx;return r&&(e.isArray(r)?(t=r[0],n=r[1]):t=n=r),r?{show:n,hide:t}:null},_toggle:function(e,t){function o(){n.running=!1,n._trigger("activate",e,t)}function u(){t.newTab.closest("li").addClass("ui-tabs-active ui-state-active"),r.length&&s.show?r.animate(s.show,s.show.duration,function(){o()}):(r.show(),o())}var n=this,r=t.newPanel,i=t.oldPanel,s=this._getFx();if(!s)return this._super(e,t);n.running=!0,i.length&&s.hide?i.animate(s.hide,s.hide.duration,function(){t.oldTab.closest("li").removeClass("ui-tabs-active ui-state-active"),u()}):(t.oldTab.closest("li").removeClass("ui-tabs-active ui-state-active"),i.hide(),u())}}))})(jQuery);(function(e){function n(t,n){var r=(t.attr("aria-describedby")||"").split(/\s+/);r.push(n),t.data("ui-tooltip-id",n).attr("aria-describedby",e.trim(r.join(" ")))}function r(t){var n=t.data("ui-tooltip-id"),r=(t.attr("aria-describedby")||"").split(/\s+/),i=e.inArray(n,r);i!==-1&&r.splice(i,1),t.removeData("ui-tooltip-id"),r=e.trim(r.join(" ")),r?t.attr("aria-describedby",r):t.removeAttr("aria-describedby")}var t=0;e.widget("ui.tooltip",{version:"1.9.0",options:{content:function(){return e(this).attr("title")},hide:!0,items:"[title]",position:{my:"left+15 center",at:"right center",collision:"flipfit flipfit"},show:!0,tooltipClass:null,track:!1,close:null,open:null},_create:function(){this._on({mouseover:"open",focusin:"open"}),this.tooltips={}},_setOption:function(t,n){var r=this;if(t==="disabled"){this[n?"_disable":"_enable"](),this.options[t]=n;return}this._super(t,n),t==="content"&&e.each(this.tooltips,function(e,t){r._updateContent(t)})},_disable:function(){var t=this;e.each(this.tooltips,function(n,r){var i=e.Event("blur");i.target=i.currentTarget=r[0],t.close(i,!0)}),this.element.find(this.options.items).andSelf().each(function(){var t=e(this);t.is("[title]")&&t.data("ui-tooltip-title",t.attr("title")).attr("title","")})},_enable:function(){this.element.find(this.options.items).andSelf().each(function(){var t=e(this);t.data("ui-tooltip-title")&&t.attr("title",t.data("ui-tooltip-title"))})},open:function(t){var n=e(t?t.target:this.element).closest(this.options.items);if(!n.length)return;if(this.options.track&&n.data("ui-tooltip-id")){this._find(n).position(e.extend({of:n},this.options.position)),this._off(this.document,"mousemove");return}n.attr("title")&&n.data("ui-tooltip-title",n.attr("title")),n.data("tooltip-open",!0),this._updateContent(n,t)},_updateContent:function(e,t){var n,r=this.options.content,i=this;if(typeof r=="string")return this._open(t,e,r);n=r.call(e[0],function(n){if(!e.data("tooltip-open"))return;i._delay(function(){this._open(t,e,n)})}),n&&this._open(t,e,n)},_open:function(t,r,i){function u(e){o.of=e,s.position(o)}var s,o;if(!i)return;s=this._find(r);if(s.length){s.find(".ui-tooltip-content").html(i);return}r.is("[title]")&&(t&&t.type==="mouseover"?r.attr("title",""):r.removeAttr("title")),s=this._tooltip(r),n(r,s.attr("id")),s.find(".ui-tooltip-content").html(i),this.options.track&&t&&/^mouse/.test(t.originalEvent.type)?(o=e.extend({},this.options.position),this._on(this.document,{mousemove:u}),u(t)):s.position(e.extend({of:r},this.options.position)),s.hide(),this._show(s,this.options.show),this._trigger("open",t,{tooltip:s}),this._on(r,{mouseleave:"close",focusout:"close",keyup:function(t){if(t.keyCode===e.ui.keyCode.ESCAPE){var n=e.Event(t);n.currentTarget=r[0],this.close(n,!0)}}})},close:function(t,n){var i=this,s=e(t?t.currentTarget:this.element),o=this._find(s);if(this.closing)return;if(!n&&t&&t.type!=="focusout"&&this.document[0].activeElement===s[0])return;s.data("ui-tooltip-title")&&s.attr("title",s.data("ui-tooltip-title")),r(s),o.stop(!0),this._hide(o,this.options.hide,function(){e(this).remove(),delete i.tooltips[this.id]}),s.removeData("tooltip-open"),this._off(s,"mouseleave focusout keyup"),this._off(this.document,"mousemove"),this.closing=!0,this._trigger("close",t,{tooltip:o}),this.closing=!1},_tooltip:function(n){var r="ui-tooltip-"+t++,i=e("
              ").attr({id:r,role:"tooltip"}).addClass("ui-tooltip ui-widget ui-corner-all ui-widget-content "+(this.options.tooltipClass||""));return e("
              ").addClass("ui-tooltip-content").appendTo(i),i.appendTo(this.document[0].body),e.fn.bgiframe&&i.bgiframe(),this.tooltips[r]=n,i},_find:function(t){var n=t.data("ui-tooltip-id");return n?e("#"+n):e()},_destroy:function(){var t=this;e.each(this.tooltips,function(n,r){var i=e.Event("blur");i.target=i.currentTarget=r[0],t.close(i,!0),e("#"+n).remove(),r.data("ui-tooltip-title")&&(r.attr("title",r.data("ui-tooltip-title")),r.removeData("ui-tooltip-title"))})}})})(jQuery); \ No newline at end of file diff --git a/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/jquery.js b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/jquery.js new file mode 100644 index 0000000000..bc3fbc81b2 --- /dev/null +++ b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/jquery.js @@ -0,0 +1,2 @@ +/*! jQuery v1.8.2 jquery.com | jquery.org/license */ +(function(a,b){function G(a){var b=F[a]={};return p.each(a.split(s),function(a,c){b[c]=!0}),b}function J(a,c,d){if(d===b&&a.nodeType===1){var e="data-"+c.replace(I,"-$1").toLowerCase();d=a.getAttribute(e);if(typeof d=="string"){try{d=d==="true"?!0:d==="false"?!1:d==="null"?null:+d+""===d?+d:H.test(d)?p.parseJSON(d):d}catch(f){}p.data(a,c,d)}else d=b}return d}function K(a){var b;for(b in a){if(b==="data"&&p.isEmptyObject(a[b]))continue;if(b!=="toJSON")return!1}return!0}function ba(){return!1}function bb(){return!0}function bh(a){return!a||!a.parentNode||a.parentNode.nodeType===11}function bi(a,b){do a=a[b];while(a&&a.nodeType!==1);return a}function bj(a,b,c){b=b||0;if(p.isFunction(b))return p.grep(a,function(a,d){var e=!!b.call(a,d,a);return e===c});if(b.nodeType)return p.grep(a,function(a,d){return a===b===c});if(typeof b=="string"){var d=p.grep(a,function(a){return a.nodeType===1});if(be.test(b))return p.filter(b,d,!c);b=p.filter(b,d)}return p.grep(a,function(a,d){return p.inArray(a,b)>=0===c})}function bk(a){var b=bl.split("|"),c=a.createDocumentFragment();if(c.createElement)while(b.length)c.createElement(b.pop());return c}function bC(a,b){return a.getElementsByTagName(b)[0]||a.appendChild(a.ownerDocument.createElement(b))}function bD(a,b){if(b.nodeType!==1||!p.hasData(a))return;var c,d,e,f=p._data(a),g=p._data(b,f),h=f.events;if(h){delete g.handle,g.events={};for(c in h)for(d=0,e=h[c].length;d").appendTo(e.body),c=b.css("display");b.remove();if(c==="none"||c===""){bI=e.body.appendChild(bI||p.extend(e.createElement("iframe"),{frameBorder:0,width:0,height:0}));if(!bJ||!bI.createElement)bJ=(bI.contentWindow||bI.contentDocument).document,bJ.write(""),bJ.close();b=bJ.body.appendChild(bJ.createElement(a)),c=bH(b,"display"),e.body.removeChild(bI)}return bS[a]=c,c}function ci(a,b,c,d){var e;if(p.isArray(b))p.each(b,function(b,e){c||ce.test(a)?d(a,e):ci(a+"["+(typeof e=="object"?b:"")+"]",e,c,d)});else if(!c&&p.type(b)==="object")for(e in b)ci(a+"["+e+"]",b[e],c,d);else d(a,b)}function cz(a){return function(b,c){typeof b!="string"&&(c=b,b="*");var d,e,f,g=b.toLowerCase().split(s),h=0,i=g.length;if(p.isFunction(c))for(;h)[^>]*$|#([\w\-]*)$)/,v=/^<(\w+)\s*\/?>(?:<\/\1>|)$/,w=/^[\],:{}\s]*$/,x=/(?:^|:|,)(?:\s*\[)+/g,y=/\\(?:["\\\/bfnrt]|u[\da-fA-F]{4})/g,z=/"[^"\\\r\n]*"|true|false|null|-?(?:\d\d*\.|)\d+(?:[eE][\-+]?\d+|)/g,A=/^-ms-/,B=/-([\da-z])/gi,C=function(a,b){return(b+"").toUpperCase()},D=function(){e.addEventListener?(e.removeEventListener("DOMContentLoaded",D,!1),p.ready()):e.readyState==="complete"&&(e.detachEvent("onreadystatechange",D),p.ready())},E={};p.fn=p.prototype={constructor:p,init:function(a,c,d){var f,g,h,i;if(!a)return this;if(a.nodeType)return this.context=this[0]=a,this.length=1,this;if(typeof a=="string"){a.charAt(0)==="<"&&a.charAt(a.length-1)===">"&&a.length>=3?f=[null,a,null]:f=u.exec(a);if(f&&(f[1]||!c)){if(f[1])return c=c instanceof p?c[0]:c,i=c&&c.nodeType?c.ownerDocument||c:e,a=p.parseHTML(f[1],i,!0),v.test(f[1])&&p.isPlainObject(c)&&this.attr.call(a,c,!0),p.merge(this,a);g=e.getElementById(f[2]);if(g&&g.parentNode){if(g.id!==f[2])return d.find(a);this.length=1,this[0]=g}return this.context=e,this.selector=a,this}return!c||c.jquery?(c||d).find(a):this.constructor(c).find(a)}return p.isFunction(a)?d.ready(a):(a.selector!==b&&(this.selector=a.selector,this.context=a.context),p.makeArray(a,this))},selector:"",jquery:"1.8.2",length:0,size:function(){return this.length},toArray:function(){return k.call(this)},get:function(a){return a==null?this.toArray():a<0?this[this.length+a]:this[a]},pushStack:function(a,b,c){var d=p.merge(this.constructor(),a);return d.prevObject=this,d.context=this.context,b==="find"?d.selector=this.selector+(this.selector?" ":"")+c:b&&(d.selector=this.selector+"."+b+"("+c+")"),d},each:function(a,b){return p.each(this,a,b)},ready:function(a){return p.ready.promise().done(a),this},eq:function(a){return a=+a,a===-1?this.slice(a):this.slice(a,a+1)},first:function(){return this.eq(0)},last:function(){return this.eq(-1)},slice:function(){return this.pushStack(k.apply(this,arguments),"slice",k.call(arguments).join(","))},map:function(a){return this.pushStack(p.map(this,function(b,c){return a.call(b,c,b)}))},end:function(){return this.prevObject||this.constructor(null)},push:j,sort:[].sort,splice:[].splice},p.fn.init.prototype=p.fn,p.extend=p.fn.extend=function(){var a,c,d,e,f,g,h=arguments[0]||{},i=1,j=arguments.length,k=!1;typeof h=="boolean"&&(k=h,h=arguments[1]||{},i=2),typeof h!="object"&&!p.isFunction(h)&&(h={}),j===i&&(h=this,--i);for(;i0)return;d.resolveWith(e,[p]),p.fn.trigger&&p(e).trigger("ready").off("ready")},isFunction:function(a){return p.type(a)==="function"},isArray:Array.isArray||function(a){return p.type(a)==="array"},isWindow:function(a){return a!=null&&a==a.window},isNumeric:function(a){return!isNaN(parseFloat(a))&&isFinite(a)},type:function(a){return a==null?String(a):E[m.call(a)]||"object"},isPlainObject:function(a){if(!a||p.type(a)!=="object"||a.nodeType||p.isWindow(a))return!1;try{if(a.constructor&&!n.call(a,"constructor")&&!n.call(a.constructor.prototype,"isPrototypeOf"))return!1}catch(c){return!1}var d;for(d in a);return d===b||n.call(a,d)},isEmptyObject:function(a){var b;for(b in a)return!1;return!0},error:function(a){throw new Error(a)},parseHTML:function(a,b,c){var d;return!a||typeof a!="string"?null:(typeof b=="boolean"&&(c=b,b=0),b=b||e,(d=v.exec(a))?[b.createElement(d[1])]:(d=p.buildFragment([a],b,c?null:[]),p.merge([],(d.cacheable?p.clone(d.fragment):d.fragment).childNodes)))},parseJSON:function(b){if(!b||typeof b!="string")return null;b=p.trim(b);if(a.JSON&&a.JSON.parse)return a.JSON.parse(b);if(w.test(b.replace(y,"@").replace(z,"]").replace(x,"")))return(new Function("return "+b))();p.error("Invalid JSON: "+b)},parseXML:function(c){var d,e;if(!c||typeof c!="string")return null;try{a.DOMParser?(e=new DOMParser,d=e.parseFromString(c,"text/xml")):(d=new ActiveXObject("Microsoft.XMLDOM"),d.async="false",d.loadXML(c))}catch(f){d=b}return(!d||!d.documentElement||d.getElementsByTagName("parsererror").length)&&p.error("Invalid XML: "+c),d},noop:function(){},globalEval:function(b){b&&r.test(b)&&(a.execScript||function(b){a.eval.call(a,b)})(b)},camelCase:function(a){return a.replace(A,"ms-").replace(B,C)},nodeName:function(a,b){return a.nodeName&&a.nodeName.toLowerCase()===b.toLowerCase()},each:function(a,c,d){var e,f=0,g=a.length,h=g===b||p.isFunction(a);if(d){if(h){for(e in a)if(c.apply(a[e],d)===!1)break}else for(;f0&&a[0]&&a[i-1]||i===0||p.isArray(a));if(j)for(;h-1)i.splice(c,1),e&&(c<=g&&g--,c<=h&&h--)}),this},has:function(a){return p.inArray(a,i)>-1},empty:function(){return i=[],this},disable:function(){return i=j=c=b,this},disabled:function(){return!i},lock:function(){return j=b,c||l.disable(),this},locked:function(){return!j},fireWith:function(a,b){return b=b||[],b=[a,b.slice?b.slice():b],i&&(!d||j)&&(e?j.push(b):k(b)),this},fire:function(){return l.fireWith(this,arguments),this},fired:function(){return!!d}};return l},p.extend({Deferred:function(a){var b=[["resolve","done",p.Callbacks("once memory"),"resolved"],["reject","fail",p.Callbacks("once memory"),"rejected"],["notify","progress",p.Callbacks("memory")]],c="pending",d={state:function(){return c},always:function(){return e.done(arguments).fail(arguments),this},then:function(){var a=arguments;return p.Deferred(function(c){p.each(b,function(b,d){var f=d[0],g=a[b];e[d[1]](p.isFunction(g)?function(){var a=g.apply(this,arguments);a&&p.isFunction(a.promise)?a.promise().done(c.resolve).fail(c.reject).progress(c.notify):c[f+"With"](this===e?c:this,[a])}:c[f])}),a=null}).promise()},promise:function(a){return a!=null?p.extend(a,d):d}},e={};return d.pipe=d.then,p.each(b,function(a,f){var g=f[2],h=f[3];d[f[1]]=g.add,h&&g.add(function(){c=h},b[a^1][2].disable,b[2][2].lock),e[f[0]]=g.fire,e[f[0]+"With"]=g.fireWith}),d.promise(e),a&&a.call(e,e),e},when:function(a){var b=0,c=k.call(arguments),d=c.length,e=d!==1||a&&p.isFunction(a.promise)?d:0,f=e===1?a:p.Deferred(),g=function(a,b,c){return function(d){b[a]=this,c[a]=arguments.length>1?k.call(arguments):d,c===h?f.notifyWith(b,c):--e||f.resolveWith(b,c)}},h,i,j;if(d>1){h=new Array(d),i=new Array(d),j=new Array(d);for(;b
              a",c=n.getElementsByTagName("*"),d=n.getElementsByTagName("a")[0],d.style.cssText="top:1px;float:left;opacity:.5";if(!c||!c.length)return{};f=e.createElement("select"),g=f.appendChild(e.createElement("option")),h=n.getElementsByTagName("input")[0],b={leadingWhitespace:n.firstChild.nodeType===3,tbody:!n.getElementsByTagName("tbody").length,htmlSerialize:!!n.getElementsByTagName("link").length,style:/top/.test(d.getAttribute("style")),hrefNormalized:d.getAttribute("href")==="/a",opacity:/^0.5/.test(d.style.opacity),cssFloat:!!d.style.cssFloat,checkOn:h.value==="on",optSelected:g.selected,getSetAttribute:n.className!=="t",enctype:!!e.createElement("form").enctype,html5Clone:e.createElement("nav").cloneNode(!0).outerHTML!=="<:nav>",boxModel:e.compatMode==="CSS1Compat",submitBubbles:!0,changeBubbles:!0,focusinBubbles:!1,deleteExpando:!0,noCloneEvent:!0,inlineBlockNeedsLayout:!1,shrinkWrapBlocks:!1,reliableMarginRight:!0,boxSizingReliable:!0,pixelPosition:!1},h.checked=!0,b.noCloneChecked=h.cloneNode(!0).checked,f.disabled=!0,b.optDisabled=!g.disabled;try{delete n.test}catch(o){b.deleteExpando=!1}!n.addEventListener&&n.attachEvent&&n.fireEvent&&(n.attachEvent("onclick",m=function(){b.noCloneEvent=!1}),n.cloneNode(!0).fireEvent("onclick"),n.detachEvent("onclick",m)),h=e.createElement("input"),h.value="t",h.setAttribute("type","radio"),b.radioValue=h.value==="t",h.setAttribute("checked","checked"),h.setAttribute("name","t"),n.appendChild(h),i=e.createDocumentFragment(),i.appendChild(n.lastChild),b.checkClone=i.cloneNode(!0).cloneNode(!0).lastChild.checked,b.appendChecked=h.checked,i.removeChild(h),i.appendChild(n);if(n.attachEvent)for(k in{submit:!0,change:!0,focusin:!0})j="on"+k,l=j in n,l||(n.setAttribute(j,"return;"),l=typeof n[j]=="function"),b[k+"Bubbles"]=l;return p(function(){var c,d,f,g,h="padding:0;margin:0;border:0;display:block;overflow:hidden;",i=e.getElementsByTagName("body")[0];if(!i)return;c=e.createElement("div"),c.style.cssText="visibility:hidden;border:0;width:0;height:0;position:static;top:0;margin-top:1px",i.insertBefore(c,i.firstChild),d=e.createElement("div"),c.appendChild(d),d.innerHTML="
              t
              ",f=d.getElementsByTagName("td"),f[0].style.cssText="padding:0;margin:0;border:0;display:none",l=f[0].offsetHeight===0,f[0].style.display="",f[1].style.display="none",b.reliableHiddenOffsets=l&&f[0].offsetHeight===0,d.innerHTML="",d.style.cssText="box-sizing:border-box;-moz-box-sizing:border-box;-webkit-box-sizing:border-box;padding:1px;border:1px;display:block;width:4px;margin-top:1%;position:absolute;top:1%;",b.boxSizing=d.offsetWidth===4,b.doesNotIncludeMarginInBodyOffset=i.offsetTop!==1,a.getComputedStyle&&(b.pixelPosition=(a.getComputedStyle(d,null)||{}).top!=="1%",b.boxSizingReliable=(a.getComputedStyle(d,null)||{width:"4px"}).width==="4px",g=e.createElement("div"),g.style.cssText=d.style.cssText=h,g.style.marginRight=g.style.width="0",d.style.width="1px",d.appendChild(g),b.reliableMarginRight=!parseFloat((a.getComputedStyle(g,null)||{}).marginRight)),typeof d.style.zoom!="undefined"&&(d.innerHTML="",d.style.cssText=h+"width:1px;padding:1px;display:inline;zoom:1",b.inlineBlockNeedsLayout=d.offsetWidth===3,d.style.display="block",d.style.overflow="visible",d.innerHTML="
              ",d.firstChild.style.width="5px",b.shrinkWrapBlocks=d.offsetWidth!==3,c.style.zoom=1),i.removeChild(c),c=d=f=g=null}),i.removeChild(n),c=d=f=g=h=i=n=null,b}();var H=/(?:\{[\s\S]*\}|\[[\s\S]*\])$/,I=/([A-Z])/g;p.extend({cache:{},deletedIds:[],uuid:0,expando:"jQuery"+(p.fn.jquery+Math.random()).replace(/\D/g,""),noData:{embed:!0,object:"clsid:D27CDB6E-AE6D-11cf-96B8-444553540000",applet:!0},hasData:function(a){return a=a.nodeType?p.cache[a[p.expando]]:a[p.expando],!!a&&!K(a)},data:function(a,c,d,e){if(!p.acceptData(a))return;var f,g,h=p.expando,i=typeof c=="string",j=a.nodeType,k=j?p.cache:a,l=j?a[h]:a[h]&&h;if((!l||!k[l]||!e&&!k[l].data)&&i&&d===b)return;l||(j?a[h]=l=p.deletedIds.pop()||p.guid++:l=h),k[l]||(k[l]={},j||(k[l].toJSON=p.noop));if(typeof c=="object"||typeof c=="function")e?k[l]=p.extend(k[l],c):k[l].data=p.extend(k[l].data,c);return f=k[l],e||(f.data||(f.data={}),f=f.data),d!==b&&(f[p.camelCase(c)]=d),i?(g=f[c],g==null&&(g=f[p.camelCase(c)])):g=f,g},removeData:function(a,b,c){if(!p.acceptData(a))return;var d,e,f,g=a.nodeType,h=g?p.cache:a,i=g?a[p.expando]:p.expando;if(!h[i])return;if(b){d=c?h[i]:h[i].data;if(d){p.isArray(b)||(b in d?b=[b]:(b=p.camelCase(b),b in d?b=[b]:b=b.split(" ")));for(e=0,f=b.length;e1,null,!1))},removeData:function(a){return this.each(function(){p.removeData(this,a)})}}),p.extend({queue:function(a,b,c){var d;if(a)return b=(b||"fx")+"queue",d=p._data(a,b),c&&(!d||p.isArray(c)?d=p._data(a,b,p.makeArray(c)):d.push(c)),d||[]},dequeue:function(a,b){b=b||"fx";var c=p.queue(a,b),d=c.length,e=c.shift(),f=p._queueHooks(a,b),g=function(){p.dequeue(a,b)};e==="inprogress"&&(e=c.shift(),d--),e&&(b==="fx"&&c.unshift("inprogress"),delete f.stop,e.call(a,g,f)),!d&&f&&f.empty.fire()},_queueHooks:function(a,b){var c=b+"queueHooks";return p._data(a,c)||p._data(a,c,{empty:p.Callbacks("once memory").add(function(){p.removeData(a,b+"queue",!0),p.removeData(a,c,!0)})})}}),p.fn.extend({queue:function(a,c){var d=2;return typeof a!="string"&&(c=a,a="fx",d--),arguments.length1)},removeAttr:function(a){return this.each(function(){p.removeAttr(this,a)})},prop:function(a,b){return p.access(this,p.prop,a,b,arguments.length>1)},removeProp:function(a){return a=p.propFix[a]||a,this.each(function(){try{this[a]=b,delete this[a]}catch(c){}})},addClass:function(a){var b,c,d,e,f,g,h;if(p.isFunction(a))return this.each(function(b){p(this).addClass(a.call(this,b,this.className))});if(a&&typeof a=="string"){b=a.split(s);for(c=0,d=this.length;c=0)d=d.replace(" "+c[f]+" "," ");e.className=a?p.trim(d):""}}}return this},toggleClass:function(a,b){var c=typeof a,d=typeof b=="boolean";return p.isFunction(a)?this.each(function(c){p(this).toggleClass(a.call(this,c,this.className,b),b)}):this.each(function(){if(c==="string"){var e,f=0,g=p(this),h=b,i=a.split(s);while(e=i[f++])h=d?h:!g.hasClass(e),g[h?"addClass":"removeClass"](e)}else if(c==="undefined"||c==="boolean")this.className&&p._data(this,"__className__",this.className),this.className=this.className||a===!1?"":p._data(this,"__className__")||""})},hasClass:function(a){var b=" "+a+" ",c=0,d=this.length;for(;c=0)return!0;return!1},val:function(a){var c,d,e,f=this[0];if(!arguments.length){if(f)return c=p.valHooks[f.type]||p.valHooks[f.nodeName.toLowerCase()],c&&"get"in c&&(d=c.get(f,"value"))!==b?d:(d=f.value,typeof d=="string"?d.replace(P,""):d==null?"":d);return}return e=p.isFunction(a),this.each(function(d){var f,g=p(this);if(this.nodeType!==1)return;e?f=a.call(this,d,g.val()):f=a,f==null?f="":typeof f=="number"?f+="":p.isArray(f)&&(f=p.map(f,function(a){return a==null?"":a+""})),c=p.valHooks[this.type]||p.valHooks[this.nodeName.toLowerCase()];if(!c||!("set"in c)||c.set(this,f,"value")===b)this.value=f})}}),p.extend({valHooks:{option:{get:function(a){var b=a.attributes.value;return!b||b.specified?a.value:a.text}},select:{get:function(a){var b,c,d,e,f=a.selectedIndex,g=[],h=a.options,i=a.type==="select-one";if(f<0)return null;c=i?f:0,d=i?f+1:h.length;for(;c=0}),c.length||(a.selectedIndex=-1),c}}},attrFn:{},attr:function(a,c,d,e){var f,g,h,i=a.nodeType;if(!a||i===3||i===8||i===2)return;if(e&&p.isFunction(p.fn[c]))return p(a)[c](d);if(typeof a.getAttribute=="undefined")return p.prop(a,c,d);h=i!==1||!p.isXMLDoc(a),h&&(c=c.toLowerCase(),g=p.attrHooks[c]||(T.test(c)?M:L));if(d!==b){if(d===null){p.removeAttr(a,c);return}return g&&"set"in g&&h&&(f=g.set(a,d,c))!==b?f:(a.setAttribute(c,d+""),d)}return g&&"get"in g&&h&&(f=g.get(a,c))!==null?f:(f=a.getAttribute(c),f===null?b:f)},removeAttr:function(a,b){var c,d,e,f,g=0;if(b&&a.nodeType===1){d=b.split(s);for(;g=0}})});var V=/^(?:textarea|input|select)$/i,W=/^([^\.]*|)(?:\.(.+)|)$/,X=/(?:^|\s)hover(\.\S+|)\b/,Y=/^key/,Z=/^(?:mouse|contextmenu)|click/,$=/^(?:focusinfocus|focusoutblur)$/,_=function(a){return p.event.special.hover?a:a.replace(X,"mouseenter$1 mouseleave$1")};p.event={add:function(a,c,d,e,f){var g,h,i,j,k,l,m,n,o,q,r;if(a.nodeType===3||a.nodeType===8||!c||!d||!(g=p._data(a)))return;d.handler&&(o=d,d=o.handler,f=o.selector),d.guid||(d.guid=p.guid++),i=g.events,i||(g.events=i={}),h=g.handle,h||(g.handle=h=function(a){return typeof p!="undefined"&&(!a||p.event.triggered!==a.type)?p.event.dispatch.apply(h.elem,arguments):b},h.elem=a),c=p.trim(_(c)).split(" ");for(j=0;j=0&&(s=s.slice(0,-1),i=!0),s.indexOf(".")>=0&&(t=s.split("."),s=t.shift(),t.sort());if((!f||p.event.customEvent[s])&&!p.event.global[s])return;c=typeof c=="object"?c[p.expando]?c:new p.Event(s,c):new p.Event(s),c.type=s,c.isTrigger=!0,c.exclusive=i,c.namespace=t.join("."),c.namespace_re=c.namespace?new RegExp("(^|\\.)"+t.join("\\.(?:.*\\.|)")+"(\\.|$)"):null,m=s.indexOf(":")<0?"on"+s:"";if(!f){h=p.cache;for(j in h)h[j].events&&h[j].events[s]&&p.event.trigger(c,d,h[j].handle.elem,!0);return}c.result=b,c.target||(c.target=f),d=d!=null?p.makeArray(d):[],d.unshift(c),n=p.event.special[s]||{};if(n.trigger&&n.trigger.apply(f,d)===!1)return;q=[[f,n.bindType||s]];if(!g&&!n.noBubble&&!p.isWindow(f)){r=n.delegateType||s,k=$.test(r+s)?f:f.parentNode;for(l=f;k;k=k.parentNode)q.push([k,r]),l=k;l===(f.ownerDocument||e)&&q.push([l.defaultView||l.parentWindow||a,r])}for(j=0;j=0:p.find(m,this,null,[f]).length),h[m]&&j.push(l);j.length&&u.push({elem:f,matches:j})}o.length>q&&u.push({elem:this,matches:o.slice(q)});for(d=0;d0?this.on(b,null,a,c):this.trigger(b)},Y.test(b)&&(p.event.fixHooks[b]=p.event.keyHooks),Z.test(b)&&(p.event.fixHooks[b]=p.event.mouseHooks)}),function(a,b){function bc(a,b,c,d){c=c||[],b=b||r;var e,f,i,j,k=b.nodeType;if(!a||typeof a!="string")return c;if(k!==1&&k!==9)return[];i=g(b);if(!i&&!d)if(e=P.exec(a))if(j=e[1]){if(k===9){f=b.getElementById(j);if(!f||!f.parentNode)return c;if(f.id===j)return c.push(f),c}else if(b.ownerDocument&&(f=b.ownerDocument.getElementById(j))&&h(b,f)&&f.id===j)return c.push(f),c}else{if(e[2])return w.apply(c,x.call(b.getElementsByTagName(a),0)),c;if((j=e[3])&&_&&b.getElementsByClassName)return w.apply(c,x.call(b.getElementsByClassName(j),0)),c}return bp(a.replace(L,"$1"),b,c,d,i)}function bd(a){return function(b){var c=b.nodeName.toLowerCase();return c==="input"&&b.type===a}}function be(a){return function(b){var c=b.nodeName.toLowerCase();return(c==="input"||c==="button")&&b.type===a}}function bf(a){return z(function(b){return b=+b,z(function(c,d){var e,f=a([],c.length,b),g=f.length;while(g--)c[e=f[g]]&&(c[e]=!(d[e]=c[e]))})})}function bg(a,b,c){if(a===b)return c;var d=a.nextSibling;while(d){if(d===b)return-1;d=d.nextSibling}return 1}function bh(a,b){var c,d,f,g,h,i,j,k=C[o][a];if(k)return b?0:k.slice(0);h=a,i=[],j=e.preFilter;while(h){if(!c||(d=M.exec(h)))d&&(h=h.slice(d[0].length)),i.push(f=[]);c=!1;if(d=N.exec(h))f.push(c=new q(d.shift())),h=h.slice(c.length),c.type=d[0].replace(L," ");for(g in e.filter)(d=W[g].exec(h))&&(!j[g]||(d=j[g](d,r,!0)))&&(f.push(c=new q(d.shift())),h=h.slice(c.length),c.type=g,c.matches=d);if(!c)break}return b?h.length:h?bc.error(a):C(a,i).slice(0)}function bi(a,b,d){var e=b.dir,f=d&&b.dir==="parentNode",g=u++;return b.first?function(b,c,d){while(b=b[e])if(f||b.nodeType===1)return a(b,c,d)}:function(b,d,h){if(!h){var i,j=t+" "+g+" ",k=j+c;while(b=b[e])if(f||b.nodeType===1){if((i=b[o])===k)return b.sizset;if(typeof i=="string"&&i.indexOf(j)===0){if(b.sizset)return b}else{b[o]=k;if(a(b,d,h))return b.sizset=!0,b;b.sizset=!1}}}else while(b=b[e])if(f||b.nodeType===1)if(a(b,d,h))return b}}function bj(a){return a.length>1?function(b,c,d){var e=a.length;while(e--)if(!a[e](b,c,d))return!1;return!0}:a[0]}function bk(a,b,c,d,e){var f,g=[],h=0,i=a.length,j=b!=null;for(;h-1},h,!0),m=[function(a,c,d){return!g&&(d||c!==l)||((b=c).nodeType?j(a,c,d):k(a,c,d))}];for(;i1&&bj(m),i>1&&a.slice(0,i-1).join("").replace(L,"$1"),c,i0,f=a.length>0,g=function(h,i,j,k,m){var n,o,p,q=[],s=0,u="0",x=h&&[],y=m!=null,z=l,A=h||f&&e.find.TAG("*",m&&i.parentNode||i),B=t+=z==null?1:Math.E;y&&(l=i!==r&&i,c=g.el);for(;(n=A[u])!=null;u++){if(f&&n){for(o=0;p=a[o];o++)if(p(n,i,j)){k.push(n);break}y&&(t=B,c=++g.el)}d&&((n=!p&&n)&&s--,h&&x.push(n))}s+=u;if(d&&u!==s){for(o=0;p=b[o];o++)p(x,q,i,j);if(h){if(s>0)while(u--)!x[u]&&!q[u]&&(q[u]=v.call(k));q=bk(q)}w.apply(k,q),y&&!h&&q.length>0&&s+b.length>1&&bc.uniqueSort(k)}return y&&(t=B,l=z),x};return g.el=0,d?z(g):g}function bo(a,b,c,d){var e=0,f=b.length;for(;e2&&(j=h[0]).type==="ID"&&b.nodeType===9&&!f&&e.relative[h[1].type]){b=e.find.ID(j.matches[0].replace(V,""),b,f)[0];if(!b)return c;a=a.slice(h.shift().length)}for(g=W.POS.test(a)?-1:h.length-1;g>=0;g--){j=h[g];if(e.relative[k=j.type])break;if(l=e.find[k])if(d=l(j.matches[0].replace(V,""),R.test(h[0].type)&&b.parentNode||b,f)){h.splice(g,1),a=d.length&&h.join("");if(!a)return w.apply(c,x.call(d,0)),c;break}}}return i(a,m)(d,b,f,c,R.test(a)),c}function bq(){}var c,d,e,f,g,h,i,j,k,l,m=!0,n="undefined",o=("sizcache"+Math.random()).replace(".",""),q=String,r=a.document,s=r.documentElement,t=0,u=0,v=[].pop,w=[].push,x=[].slice,y=[].indexOf||function(a){var b=0,c=this.length;for(;be.cacheLength&&delete a[b.shift()],a[c]=d},a)},B=A(),C=A(),D=A(),E="[\\x20\\t\\r\\n\\f]",F="(?:\\\\.|[-\\w]|[^\\x00-\\xa0])+",G=F.replace("w","w#"),H="([*^$|!~]?=)",I="\\["+E+"*("+F+")"+E+"*(?:"+H+E+"*(?:(['\"])((?:\\\\.|[^\\\\])*?)\\3|("+G+")|)|)"+E+"*\\]",J=":("+F+")(?:\\((?:(['\"])((?:\\\\.|[^\\\\])*?)\\2|([^()[\\]]*|(?:(?:"+I+")|[^:]|\\\\.)*|.*))\\)|)",K=":(even|odd|eq|gt|lt|nth|first|last)(?:\\("+E+"*((?:-\\d)?\\d*)"+E+"*\\)|)(?=[^-]|$)",L=new RegExp("^"+E+"+|((?:^|[^\\\\])(?:\\\\.)*)"+E+"+$","g"),M=new RegExp("^"+E+"*,"+E+"*"),N=new RegExp("^"+E+"*([\\x20\\t\\r\\n\\f>+~])"+E+"*"),O=new RegExp(J),P=/^(?:#([\w\-]+)|(\w+)|\.([\w\-]+))$/,Q=/^:not/,R=/[\x20\t\r\n\f]*[+~]/,S=/:not\($/,T=/h\d/i,U=/input|select|textarea|button/i,V=/\\(?!\\)/g,W={ID:new RegExp("^#("+F+")"),CLASS:new RegExp("^\\.("+F+")"),NAME:new RegExp("^\\[name=['\"]?("+F+")['\"]?\\]"),TAG:new RegExp("^("+F.replace("w","w*")+")"),ATTR:new RegExp("^"+I),PSEUDO:new RegExp("^"+J),POS:new RegExp(K,"i"),CHILD:new RegExp("^:(only|nth|first|last)-child(?:\\("+E+"*(even|odd|(([+-]|)(\\d*)n|)"+E+"*(?:([+-]|)"+E+"*(\\d+)|))"+E+"*\\)|)","i"),needsContext:new RegExp("^"+E+"*[>+~]|"+K,"i")},X=function(a){var b=r.createElement("div");try{return a(b)}catch(c){return!1}finally{b=null}},Y=X(function(a){return a.appendChild(r.createComment("")),!a.getElementsByTagName("*").length}),Z=X(function(a){return a.innerHTML="",a.firstChild&&typeof a.firstChild.getAttribute!==n&&a.firstChild.getAttribute("href")==="#"}),$=X(function(a){a.innerHTML="";var b=typeof a.lastChild.getAttribute("multiple");return b!=="boolean"&&b!=="string"}),_=X(function(a){return a.innerHTML="",!a.getElementsByClassName||!a.getElementsByClassName("e").length?!1:(a.lastChild.className="e",a.getElementsByClassName("e").length===2)}),ba=X(function(a){a.id=o+0,a.innerHTML="
              ",s.insertBefore(a,s.firstChild);var b=r.getElementsByName&&r.getElementsByName(o).length===2+r.getElementsByName(o+0).length;return d=!r.getElementById(o),s.removeChild(a),b});try{x.call(s.childNodes,0)[0].nodeType}catch(bb){x=function(a){var b,c=[];for(;b=this[a];a++)c.push(b);return c}}bc.matches=function(a,b){return bc(a,null,null,b)},bc.matchesSelector=function(a,b){return bc(b,null,null,[a]).length>0},f=bc.getText=function(a){var b,c="",d=0,e=a.nodeType;if(e){if(e===1||e===9||e===11){if(typeof a.textContent=="string")return a.textContent;for(a=a.firstChild;a;a=a.nextSibling)c+=f(a)}else if(e===3||e===4)return a.nodeValue}else for(;b=a[d];d++)c+=f(b);return c},g=bc.isXML=function(a){var b=a&&(a.ownerDocument||a).documentElement;return b?b.nodeName!=="HTML":!1},h=bc.contains=s.contains?function(a,b){var c=a.nodeType===9?a.documentElement:a,d=b&&b.parentNode;return a===d||!!(d&&d.nodeType===1&&c.contains&&c.contains(d))}:s.compareDocumentPosition?function(a,b){return b&&!!(a.compareDocumentPosition(b)&16)}:function(a,b){while(b=b.parentNode)if(b===a)return!0;return!1},bc.attr=function(a,b){var c,d=g(a);return d||(b=b.toLowerCase()),(c=e.attrHandle[b])?c(a):d||$?a.getAttribute(b):(c=a.getAttributeNode(b),c?typeof a[b]=="boolean"?a[b]?b:null:c.specified?c.value:null:null)},e=bc.selectors={cacheLength:50,createPseudo:z,match:W,attrHandle:Z?{}:{href:function(a){return a.getAttribute("href",2)},type:function(a){return a.getAttribute("type")}},find:{ID:d?function(a,b,c){if(typeof b.getElementById!==n&&!c){var d=b.getElementById(a);return d&&d.parentNode?[d]:[]}}:function(a,c,d){if(typeof c.getElementById!==n&&!d){var e=c.getElementById(a);return e?e.id===a||typeof e.getAttributeNode!==n&&e.getAttributeNode("id").value===a?[e]:b:[]}},TAG:Y?function(a,b){if(typeof b.getElementsByTagName!==n)return b.getElementsByTagName(a)}:function(a,b){var c=b.getElementsByTagName(a);if(a==="*"){var d,e=[],f=0;for(;d=c[f];f++)d.nodeType===1&&e.push(d);return e}return c},NAME:ba&&function(a,b){if(typeof b.getElementsByName!==n)return b.getElementsByName(name)},CLASS:_&&function(a,b,c){if(typeof b.getElementsByClassName!==n&&!c)return b.getElementsByClassName(a)}},relative:{">":{dir:"parentNode",first:!0}," ":{dir:"parentNode"},"+":{dir:"previousSibling",first:!0},"~":{dir:"previousSibling"}},preFilter:{ATTR:function(a){return a[1]=a[1].replace(V,""),a[3]=(a[4]||a[5]||"").replace(V,""),a[2]==="~="&&(a[3]=" "+a[3]+" "),a.slice(0,4)},CHILD:function(a){return a[1]=a[1].toLowerCase(),a[1]==="nth"?(a[2]||bc.error(a[0]),a[3]=+(a[3]?a[4]+(a[5]||1):2*(a[2]==="even"||a[2]==="odd")),a[4]=+(a[6]+a[7]||a[2]==="odd")):a[2]&&bc.error(a[0]),a},PSEUDO:function(a){var b,c;if(W.CHILD.test(a[0]))return null;if(a[3])a[2]=a[3];else if(b=a[4])O.test(b)&&(c=bh(b,!0))&&(c=b.indexOf(")",b.length-c)-b.length)&&(b=b.slice(0,c),a[0]=a[0].slice(0,c)),a[2]=b;return a.slice(0,3)}},filter:{ID:d?function(a){return a=a.replace(V,""),function(b){return b.getAttribute("id")===a}}:function(a){return a=a.replace(V,""),function(b){var c=typeof b.getAttributeNode!==n&&b.getAttributeNode("id");return c&&c.value===a}},TAG:function(a){return a==="*"?function(){return!0}:(a=a.replace(V,"").toLowerCase(),function(b){return b.nodeName&&b.nodeName.toLowerCase()===a})},CLASS:function(a){var b=B[o][a];return b||(b=B(a,new RegExp("(^|"+E+")"+a+"("+E+"|$)"))),function(a){return b.test(a.className||typeof a.getAttribute!==n&&a.getAttribute("class")||"")}},ATTR:function(a,b,c){return function(d,e){var f=bc.attr(d,a);return f==null?b==="!=":b?(f+="",b==="="?f===c:b==="!="?f!==c:b==="^="?c&&f.indexOf(c)===0:b==="*="?c&&f.indexOf(c)>-1:b==="$="?c&&f.substr(f.length-c.length)===c:b==="~="?(" "+f+" ").indexOf(c)>-1:b==="|="?f===c||f.substr(0,c.length+1)===c+"-":!1):!0}},CHILD:function(a,b,c,d){return a==="nth"?function(a){var b,e,f=a.parentNode;if(c===1&&d===0)return!0;if(f){e=0;for(b=f.firstChild;b;b=b.nextSibling)if(b.nodeType===1){e++;if(a===b)break}}return e-=d,e===c||e%c===0&&e/c>=0}:function(b){var c=b;switch(a){case"only":case"first":while(c=c.previousSibling)if(c.nodeType===1)return!1;if(a==="first")return!0;c=b;case"last":while(c=c.nextSibling)if(c.nodeType===1)return!1;return!0}}},PSEUDO:function(a,b){var c,d=e.pseudos[a]||e.setFilters[a.toLowerCase()]||bc.error("unsupported pseudo: "+a);return d[o]?d(b):d.length>1?(c=[a,a,"",b],e.setFilters.hasOwnProperty(a.toLowerCase())?z(function(a,c){var e,f=d(a,b),g=f.length;while(g--)e=y.call(a,f[g]),a[e]=!(c[e]=f[g])}):function(a){return d(a,0,c)}):d}},pseudos:{not:z(function(a){var b=[],c=[],d=i(a.replace(L,"$1"));return d[o]?z(function(a,b,c,e){var f,g=d(a,null,e,[]),h=a.length;while(h--)if(f=g[h])a[h]=!(b[h]=f)}):function(a,e,f){return b[0]=a,d(b,null,f,c),!c.pop()}}),has:z(function(a){return function(b){return bc(a,b).length>0}}),contains:z(function(a){return function(b){return(b.textContent||b.innerText||f(b)).indexOf(a)>-1}}),enabled:function(a){return a.disabled===!1},disabled:function(a){return a.disabled===!0},checked:function(a){var b=a.nodeName.toLowerCase();return b==="input"&&!!a.checked||b==="option"&&!!a.selected},selected:function(a){return a.parentNode&&a.parentNode.selectedIndex,a.selected===!0},parent:function(a){return!e.pseudos.empty(a)},empty:function(a){var b;a=a.firstChild;while(a){if(a.nodeName>"@"||(b=a.nodeType)===3||b===4)return!1;a=a.nextSibling}return!0},header:function(a){return T.test(a.nodeName)},text:function(a){var b,c;return a.nodeName.toLowerCase()==="input"&&(b=a.type)==="text"&&((c=a.getAttribute("type"))==null||c.toLowerCase()===b)},radio:bd("radio"),checkbox:bd("checkbox"),file:bd("file"),password:bd("password"),image:bd("image"),submit:be("submit"),reset:be("reset"),button:function(a){var b=a.nodeName.toLowerCase();return b==="input"&&a.type==="button"||b==="button"},input:function(a){return U.test(a.nodeName)},focus:function(a){var b=a.ownerDocument;return a===b.activeElement&&(!b.hasFocus||b.hasFocus())&&(!!a.type||!!a.href)},active:function(a){return a===a.ownerDocument.activeElement},first:bf(function(a,b,c){return[0]}),last:bf(function(a,b,c){return[b-1]}),eq:bf(function(a,b,c){return[c<0?c+b:c]}),even:bf(function(a,b,c){for(var d=0;d=0;)a.push(d);return a}),gt:bf(function(a,b,c){for(var d=c<0?c+b:c;++d",a.querySelectorAll("[selected]").length||e.push("\\["+E+"*(?:checked|disabled|ismap|multiple|readonly|selected|value)"),a.querySelectorAll(":checked").length||e.push(":checked")}),X(function(a){a.innerHTML="

              ",a.querySelectorAll("[test^='']").length&&e.push("[*^$]="+E+"*(?:\"\"|'')"),a.innerHTML="",a.querySelectorAll(":enabled").length||e.push(":enabled",":disabled")}),e=new RegExp(e.join("|")),bp=function(a,d,f,g,h){if(!g&&!h&&(!e||!e.test(a))){var i,j,k=!0,l=o,m=d,n=d.nodeType===9&&a;if(d.nodeType===1&&d.nodeName.toLowerCase()!=="object"){i=bh(a),(k=d.getAttribute("id"))?l=k.replace(c,"\\$&"):d.setAttribute("id",l),l="[id='"+l+"'] ",j=i.length;while(j--)i[j]=l+i[j].join("");m=R.test(a)&&d.parentNode||d,n=i.join(",")}if(n)try{return w.apply(f,x.call(m.querySelectorAll(n),0)),f}catch(p){}finally{k||d.removeAttribute("id")}}return b(a,d,f,g,h)},h&&(X(function(b){a=h.call(b,"div");try{h.call(b,"[test!='']:sizzle"),f.push("!=",J)}catch(c){}}),f=new RegExp(f.join("|")),bc.matchesSelector=function(b,c){c=c.replace(d,"='$1']");if(!g(b)&&!f.test(c)&&(!e||!e.test(c)))try{var i=h.call(b,c);if(i||a||b.document&&b.document.nodeType!==11)return i}catch(j){}return bc(c,null,null,[b]).length>0})}(),e.pseudos.nth=e.pseudos.eq,e.filters=bq.prototype=e.pseudos,e.setFilters=new bq,bc.attr=p.attr,p.find=bc,p.expr=bc.selectors,p.expr[":"]=p.expr.pseudos,p.unique=bc.uniqueSort,p.text=bc.getText,p.isXMLDoc=bc.isXML,p.contains=bc.contains}(a);var bc=/Until$/,bd=/^(?:parents|prev(?:Until|All))/,be=/^.[^:#\[\.,]*$/,bf=p.expr.match.needsContext,bg={children:!0,contents:!0,next:!0,prev:!0};p.fn.extend({find:function(a){var b,c,d,e,f,g,h=this;if(typeof a!="string")return p(a).filter(function(){for(b=0,c=h.length;b0)for(e=d;e=0:p.filter(a,this).length>0:this.filter(a).length>0)},closest:function(a,b){var c,d=0,e=this.length,f=[],g=bf.test(a)||typeof a!="string"?p(a,b||this.context):0;for(;d-1:p.find.matchesSelector(c,a)){f.push(c);break}c=c.parentNode}}return f=f.length>1?p.unique(f):f,this.pushStack(f,"closest",a)},index:function(a){return a?typeof a=="string"?p.inArray(this[0],p(a)):p.inArray(a.jquery?a[0]:a,this):this[0]&&this[0].parentNode?this.prevAll().length:-1},add:function(a,b){var c=typeof a=="string"?p(a,b):p.makeArray(a&&a.nodeType?[a]:a),d=p.merge(this.get(),c);return this.pushStack(bh(c[0])||bh(d[0])?d:p.unique(d))},addBack:function(a){return this.add(a==null?this.prevObject:this.prevObject.filter(a))}}),p.fn.andSelf=p.fn.addBack,p.each({parent:function(a){var b=a.parentNode;return b&&b.nodeType!==11?b:null},parents:function(a){return p.dir(a,"parentNode")},parentsUntil:function(a,b,c){return p.dir(a,"parentNode",c)},next:function(a){return bi(a,"nextSibling")},prev:function(a){return bi(a,"previousSibling")},nextAll:function(a){return p.dir(a,"nextSibling")},prevAll:function(a){return p.dir(a,"previousSibling")},nextUntil:function(a,b,c){return p.dir(a,"nextSibling",c)},prevUntil:function(a,b,c){return p.dir(a,"previousSibling",c)},siblings:function(a){return p.sibling((a.parentNode||{}).firstChild,a)},children:function(a){return p.sibling(a.firstChild)},contents:function(a){return p.nodeName(a,"iframe")?a.contentDocument||a.contentWindow.document:p.merge([],a.childNodes)}},function(a,b){p.fn[a]=function(c,d){var e=p.map(this,b,c);return bc.test(a)||(d=c),d&&typeof d=="string"&&(e=p.filter(d,e)),e=this.length>1&&!bg[a]?p.unique(e):e,this.length>1&&bd.test(a)&&(e=e.reverse()),this.pushStack(e,a,k.call(arguments).join(","))}}),p.extend({filter:function(a,b,c){return c&&(a=":not("+a+")"),b.length===1?p.find.matchesSelector(b[0],a)?[b[0]]:[]:p.find.matches(a,b)},dir:function(a,c,d){var e=[],f=a[c];while(f&&f.nodeType!==9&&(d===b||f.nodeType!==1||!p(f).is(d)))f.nodeType===1&&e.push(f),f=f[c];return e},sibling:function(a,b){var c=[];for(;a;a=a.nextSibling)a.nodeType===1&&a!==b&&c.push(a);return c}});var bl="abbr|article|aside|audio|bdi|canvas|data|datalist|details|figcaption|figure|footer|header|hgroup|mark|meter|nav|output|progress|section|summary|time|video",bm=/ jQuery\d+="(?:null|\d+)"/g,bn=/^\s+/,bo=/<(?!area|br|col|embed|hr|img|input|link|meta|param)(([\w:]+)[^>]*)\/>/gi,bp=/<([\w:]+)/,bq=/]","i"),bv=/^(?:checkbox|radio)$/,bw=/checked\s*(?:[^=]|=\s*.checked.)/i,bx=/\/(java|ecma)script/i,by=/^\s*\s*$/g,bz={option:[1,""],legend:[1,"
              ","
              "],thead:[1,"","
              "],tr:[2,"","
              "],td:[3,"","
              "],col:[2,"","
              "],area:[1,"",""],_default:[0,"",""]},bA=bk(e),bB=bA.appendChild(e.createElement("div"));bz.optgroup=bz.option,bz.tbody=bz.tfoot=bz.colgroup=bz.caption=bz.thead,bz.th=bz.td,p.support.htmlSerialize||(bz._default=[1,"X
              ","
              "]),p.fn.extend({text:function(a){return p.access(this,function(a){return a===b?p.text(this):this.empty().append((this[0]&&this[0].ownerDocument||e).createTextNode(a))},null,a,arguments.length)},wrapAll:function(a){if(p.isFunction(a))return this.each(function(b){p(this).wrapAll(a.call(this,b))});if(this[0]){var b=p(a,this[0].ownerDocument).eq(0).clone(!0);this[0].parentNode&&b.insertBefore(this[0]),b.map(function(){var a=this;while(a.firstChild&&a.firstChild.nodeType===1)a=a.firstChild;return a}).append(this)}return this},wrapInner:function(a){return p.isFunction(a)?this.each(function(b){p(this).wrapInner(a.call(this,b))}):this.each(function(){var b=p(this),c=b.contents();c.length?c.wrapAll(a):b.append(a)})},wrap:function(a){var b=p.isFunction(a);return this.each(function(c){p(this).wrapAll(b?a.call(this,c):a)})},unwrap:function(){return this.parent().each(function(){p.nodeName(this,"body")||p(this).replaceWith(this.childNodes)}).end()},append:function(){return this.domManip(arguments,!0,function(a){(this.nodeType===1||this.nodeType===11)&&this.appendChild(a)})},prepend:function(){return this.domManip(arguments,!0,function(a){(this.nodeType===1||this.nodeType===11)&&this.insertBefore(a,this.firstChild)})},before:function(){if(!bh(this[0]))return this.domManip(arguments,!1,function(a){this.parentNode.insertBefore(a,this)});if(arguments.length){var a=p.clean(arguments);return this.pushStack(p.merge(a,this),"before",this.selector)}},after:function(){if(!bh(this[0]))return this.domManip(arguments,!1,function(a){this.parentNode.insertBefore(a,this.nextSibling)});if(arguments.length){var a=p.clean(arguments);return this.pushStack(p.merge(this,a),"after",this.selector)}},remove:function(a,b){var c,d=0;for(;(c=this[d])!=null;d++)if(!a||p.filter(a,[c]).length)!b&&c.nodeType===1&&(p.cleanData(c.getElementsByTagName("*")),p.cleanData([c])),c.parentNode&&c.parentNode.removeChild(c);return this},empty:function(){var a,b=0;for(;(a=this[b])!=null;b++){a.nodeType===1&&p.cleanData(a.getElementsByTagName("*"));while(a.firstChild)a.removeChild(a.firstChild)}return this},clone:function(a,b){return a=a==null?!1:a,b=b==null?a:b,this.map(function(){return p.clone(this,a,b)})},html:function(a){return p.access(this,function(a){var c=this[0]||{},d=0,e=this.length;if(a===b)return c.nodeType===1?c.innerHTML.replace(bm,""):b;if(typeof a=="string"&&!bs.test(a)&&(p.support.htmlSerialize||!bu.test(a))&&(p.support.leadingWhitespace||!bn.test(a))&&!bz[(bp.exec(a)||["",""])[1].toLowerCase()]){a=a.replace(bo,"<$1>");try{for(;d1&&typeof j=="string"&&bw.test(j))return this.each(function(){p(this).domManip(a,c,d)});if(p.isFunction(j))return this.each(function(e){var f=p(this);a[0]=j.call(this,e,c?f.html():b),f.domManip(a,c,d)});if(this[0]){e=p.buildFragment(a,this,k),g=e.fragment,f=g.firstChild,g.childNodes.length===1&&(g=f);if(f){c=c&&p.nodeName(f,"tr");for(h=e.cacheable||l-1;i0?this.clone(!0):this).get(),p(g[e])[b](d),f=f.concat(d);return this.pushStack(f,a,g.selector)}}),p.extend({clone:function(a,b,c){var d,e,f,g;p.support.html5Clone||p.isXMLDoc(a)||!bu.test("<"+a.nodeName+">")?g=a.cloneNode(!0):(bB.innerHTML=a.outerHTML,bB.removeChild(g=bB.firstChild));if((!p.support.noCloneEvent||!p.support.noCloneChecked)&&(a.nodeType===1||a.nodeType===11)&&!p.isXMLDoc(a)){bE(a,g),d=bF(a),e=bF(g);for(f=0;d[f];++f)e[f]&&bE(d[f],e[f])}if(b){bD(a,g);if(c){d=bF(a),e=bF(g);for(f=0;d[f];++f)bD(d[f],e[f])}}return d=e=null,g},clean:function(a,b,c,d){var f,g,h,i,j,k,l,m,n,o,q,r,s=b===e&&bA,t=[];if(!b||typeof b.createDocumentFragment=="undefined")b=e;for(f=0;(h=a[f])!=null;f++){typeof h=="number"&&(h+="");if(!h)continue;if(typeof h=="string")if(!br.test(h))h=b.createTextNode(h);else{s=s||bk(b),l=b.createElement("div"),s.appendChild(l),h=h.replace(bo,"<$1>"),i=(bp.exec(h)||["",""])[1].toLowerCase(),j=bz[i]||bz._default,k=j[0],l.innerHTML=j[1]+h+j[2];while(k--)l=l.lastChild;if(!p.support.tbody){m=bq.test(h),n=i==="table"&&!m?l.firstChild&&l.firstChild.childNodes:j[1]===""&&!m?l.childNodes:[];for(g=n.length-1;g>=0;--g)p.nodeName(n[g],"tbody")&&!n[g].childNodes.length&&n[g].parentNode.removeChild(n[g])}!p.support.leadingWhitespace&&bn.test(h)&&l.insertBefore(b.createTextNode(bn.exec(h)[0]),l.firstChild),h=l.childNodes,l.parentNode.removeChild(l)}h.nodeType?t.push(h):p.merge(t,h)}l&&(h=l=s=null);if(!p.support.appendChecked)for(f=0;(h=t[f])!=null;f++)p.nodeName(h,"input")?bG(h):typeof h.getElementsByTagName!="undefined"&&p.grep(h.getElementsByTagName("input"),bG);if(c){q=function(a){if(!a.type||bx.test(a.type))return d?d.push(a.parentNode?a.parentNode.removeChild(a):a):c.appendChild(a)};for(f=0;(h=t[f])!=null;f++)if(!p.nodeName(h,"script")||!q(h))c.appendChild(h),typeof h.getElementsByTagName!="undefined"&&(r=p.grep(p.merge([],h.getElementsByTagName("script")),q),t.splice.apply(t,[f+1,0].concat(r)),f+=r.length)}return t},cleanData:function(a,b){var c,d,e,f,g=0,h=p.expando,i=p.cache,j=p.support.deleteExpando,k=p.event.special;for(;(e=a[g])!=null;g++)if(b||p.acceptData(e)){d=e[h],c=d&&i[d];if(c){if(c.events)for(f in c.events)k[f]?p.event.remove(e,f):p.removeEvent(e,f,c.handle);i[d]&&(delete i[d],j?delete e[h]:e.removeAttribute?e.removeAttribute(h):e[h]=null,p.deletedIds.push(d))}}}}),function(){var a,b;p.uaMatch=function(a){a=a.toLowerCase();var b=/(chrome)[ \/]([\w.]+)/.exec(a)||/(webkit)[ \/]([\w.]+)/.exec(a)||/(opera)(?:.*version|)[ \/]([\w.]+)/.exec(a)||/(msie) ([\w.]+)/.exec(a)||a.indexOf("compatible")<0&&/(mozilla)(?:.*? rv:([\w.]+)|)/.exec(a)||[];return{browser:b[1]||"",version:b[2]||"0"}},a=p.uaMatch(g.userAgent),b={},a.browser&&(b[a.browser]=!0,b.version=a.version),b.chrome?b.webkit=!0:b.webkit&&(b.safari=!0),p.browser=b,p.sub=function(){function a(b,c){return new a.fn.init(b,c)}p.extend(!0,a,this),a.superclass=this,a.fn=a.prototype=this(),a.fn.constructor=a,a.sub=this.sub,a.fn.init=function c(c,d){return d&&d instanceof p&&!(d instanceof a)&&(d=a(d)),p.fn.init.call(this,c,d,b)},a.fn.init.prototype=a.fn;var b=a(e);return a}}();var bH,bI,bJ,bK=/alpha\([^)]*\)/i,bL=/opacity=([^)]*)/,bM=/^(top|right|bottom|left)$/,bN=/^(none|table(?!-c[ea]).+)/,bO=/^margin/,bP=new RegExp("^("+q+")(.*)$","i"),bQ=new RegExp("^("+q+")(?!px)[a-z%]+$","i"),bR=new RegExp("^([-+])=("+q+")","i"),bS={},bT={position:"absolute",visibility:"hidden",display:"block"},bU={letterSpacing:0,fontWeight:400},bV=["Top","Right","Bottom","Left"],bW=["Webkit","O","Moz","ms"],bX=p.fn.toggle;p.fn.extend({css:function(a,c){return p.access(this,function(a,c,d){return d!==b?p.style(a,c,d):p.css(a,c)},a,c,arguments.length>1)},show:function(){return b$(this,!0)},hide:function(){return b$(this)},toggle:function(a,b){var c=typeof a=="boolean";return p.isFunction(a)&&p.isFunction(b)?bX.apply(this,arguments):this.each(function(){(c?a:bZ(this))?p(this).show():p(this).hide()})}}),p.extend({cssHooks:{opacity:{get:function(a,b){if(b){var c=bH(a,"opacity");return c===""?"1":c}}}},cssNumber:{fillOpacity:!0,fontWeight:!0,lineHeight:!0,opacity:!0,orphans:!0,widows:!0,zIndex:!0,zoom:!0},cssProps:{"float":p.support.cssFloat?"cssFloat":"styleFloat"},style:function(a,c,d,e){if(!a||a.nodeType===3||a.nodeType===8||!a.style)return;var f,g,h,i=p.camelCase(c),j=a.style;c=p.cssProps[i]||(p.cssProps[i]=bY(j,i)),h=p.cssHooks[c]||p.cssHooks[i];if(d===b)return h&&"get"in h&&(f=h.get(a,!1,e))!==b?f:j[c];g=typeof d,g==="string"&&(f=bR.exec(d))&&(d=(f[1]+1)*f[2]+parseFloat(p.css(a,c)),g="number");if(d==null||g==="number"&&isNaN(d))return;g==="number"&&!p.cssNumber[i]&&(d+="px");if(!h||!("set"in h)||(d=h.set(a,d,e))!==b)try{j[c]=d}catch(k){}},css:function(a,c,d,e){var f,g,h,i=p.camelCase(c);return c=p.cssProps[i]||(p.cssProps[i]=bY(a.style,i)),h=p.cssHooks[c]||p.cssHooks[i],h&&"get"in h&&(f=h.get(a,!0,e)),f===b&&(f=bH(a,c)),f==="normal"&&c in bU&&(f=bU[c]),d||e!==b?(g=parseFloat(f),d||p.isNumeric(g)?g||0:f):f},swap:function(a,b,c){var d,e,f={};for(e in b)f[e]=a.style[e],a.style[e]=b[e];d=c.call(a);for(e in b)a.style[e]=f[e];return d}}),a.getComputedStyle?bH=function(b,c){var d,e,f,g,h=a.getComputedStyle(b,null),i=b.style;return h&&(d=h[c],d===""&&!p.contains(b.ownerDocument,b)&&(d=p.style(b,c)),bQ.test(d)&&bO.test(c)&&(e=i.width,f=i.minWidth,g=i.maxWidth,i.minWidth=i.maxWidth=i.width=d,d=h.width,i.width=e,i.minWidth=f,i.maxWidth=g)),d}:e.documentElement.currentStyle&&(bH=function(a,b){var c,d,e=a.currentStyle&&a.currentStyle[b],f=a.style;return e==null&&f&&f[b]&&(e=f[b]),bQ.test(e)&&!bM.test(b)&&(c=f.left,d=a.runtimeStyle&&a.runtimeStyle.left,d&&(a.runtimeStyle.left=a.currentStyle.left),f.left=b==="fontSize"?"1em":e,e=f.pixelLeft+"px",f.left=c,d&&(a.runtimeStyle.left=d)),e===""?"auto":e}),p.each(["height","width"],function(a,b){p.cssHooks[b]={get:function(a,c,d){if(c)return a.offsetWidth===0&&bN.test(bH(a,"display"))?p.swap(a,bT,function(){return cb(a,b,d)}):cb(a,b,d)},set:function(a,c,d){return b_(a,c,d?ca(a,b,d,p.support.boxSizing&&p.css(a,"boxSizing")==="border-box"):0)}}}),p.support.opacity||(p.cssHooks.opacity={get:function(a,b){return bL.test((b&&a.currentStyle?a.currentStyle.filter:a.style.filter)||"")?.01*parseFloat(RegExp.$1)+"":b?"1":""},set:function(a,b){var c=a.style,d=a.currentStyle,e=p.isNumeric(b)?"alpha(opacity="+b*100+")":"",f=d&&d.filter||c.filter||"";c.zoom=1;if(b>=1&&p.trim(f.replace(bK,""))===""&&c.removeAttribute){c.removeAttribute("filter");if(d&&!d.filter)return}c.filter=bK.test(f)?f.replace(bK,e):f+" "+e}}),p(function(){p.support.reliableMarginRight||(p.cssHooks.marginRight={get:function(a,b){return p.swap(a,{display:"inline-block"},function(){if(b)return bH(a,"marginRight")})}}),!p.support.pixelPosition&&p.fn.position&&p.each(["top","left"],function(a,b){p.cssHooks[b]={get:function(a,c){if(c){var d=bH(a,b);return bQ.test(d)?p(a).position()[b]+"px":d}}}})}),p.expr&&p.expr.filters&&(p.expr.filters.hidden=function(a){return a.offsetWidth===0&&a.offsetHeight===0||!p.support.reliableHiddenOffsets&&(a.style&&a.style.display||bH(a,"display"))==="none"},p.expr.filters.visible=function(a){return!p.expr.filters.hidden(a)}),p.each({margin:"",padding:"",border:"Width"},function(a,b){p.cssHooks[a+b]={expand:function(c){var d,e=typeof c=="string"?c.split(" "):[c],f={};for(d=0;d<4;d++)f[a+bV[d]+b]=e[d]||e[d-2]||e[0];return f}},bO.test(a)||(p.cssHooks[a+b].set=b_)});var cd=/%20/g,ce=/\[\]$/,cf=/\r?\n/g,cg=/^(?:color|date|datetime|datetime-local|email|hidden|month|number|password|range|search|tel|text|time|url|week)$/i,ch=/^(?:select|textarea)/i;p.fn.extend({serialize:function(){return p.param(this.serializeArray())},serializeArray:function(){return this.map(function(){return this.elements?p.makeArray(this.elements):this}).filter(function(){return this.name&&!this.disabled&&(this.checked||ch.test(this.nodeName)||cg.test(this.type))}).map(function(a,b){var c=p(this).val();return c==null?null:p.isArray(c)?p.map(c,function(a,c){return{name:b.name,value:a.replace(cf,"\r\n")}}):{name:b.name,value:c.replace(cf,"\r\n")}}).get()}}),p.param=function(a,c){var d,e=[],f=function(a,b){b=p.isFunction(b)?b():b==null?"":b,e[e.length]=encodeURIComponent(a)+"="+encodeURIComponent(b)};c===b&&(c=p.ajaxSettings&&p.ajaxSettings.traditional);if(p.isArray(a)||a.jquery&&!p.isPlainObject(a))p.each(a,function(){f(this.name,this.value)});else for(d in a)ci(d,a[d],c,f);return e.join("&").replace(cd,"+")};var cj,ck,cl=/#.*$/,cm=/^(.*?):[ \t]*([^\r\n]*)\r?$/mg,cn=/^(?:about|app|app\-storage|.+\-extension|file|res|widget):$/,co=/^(?:GET|HEAD)$/,cp=/^\/\//,cq=/\?/,cr=/)<[^<]*)*<\/script>/gi,cs=/([?&])_=[^&]*/,ct=/^([\w\+\.\-]+:)(?:\/\/([^\/?#:]*)(?::(\d+)|)|)/,cu=p.fn.load,cv={},cw={},cx=["*/"]+["*"];try{ck=f.href}catch(cy){ck=e.createElement("a"),ck.href="",ck=ck.href}cj=ct.exec(ck.toLowerCase())||[],p.fn.load=function(a,c,d){if(typeof a!="string"&&cu)return cu.apply(this,arguments);if(!this.length)return this;var e,f,g,h=this,i=a.indexOf(" ");return i>=0&&(e=a.slice(i,a.length),a=a.slice(0,i)),p.isFunction(c)?(d=c,c=b):c&&typeof c=="object"&&(f="POST"),p.ajax({url:a,type:f,dataType:"html",data:c,complete:function(a,b){d&&h.each(d,g||[a.responseText,b,a])}}).done(function(a){g=arguments,h.html(e?p("
              ").append(a.replace(cr,"")).find(e):a)}),this},p.each("ajaxStart ajaxStop ajaxComplete ajaxError ajaxSuccess ajaxSend".split(" "),function(a,b){p.fn[b]=function(a){return this.on(b,a)}}),p.each(["get","post"],function(a,c){p[c]=function(a,d,e,f){return p.isFunction(d)&&(f=f||e,e=d,d=b),p.ajax({type:c,url:a,data:d,success:e,dataType:f})}}),p.extend({getScript:function(a,c){return p.get(a,b,c,"script")},getJSON:function(a,b,c){return p.get(a,b,c,"json")},ajaxSetup:function(a,b){return b?cB(a,p.ajaxSettings):(b=a,a=p.ajaxSettings),cB(a,b),a},ajaxSettings:{url:ck,isLocal:cn.test(cj[1]),global:!0,type:"GET",contentType:"application/x-www-form-urlencoded; charset=UTF-8",processData:!0,async:!0,accepts:{xml:"application/xml, text/xml",html:"text/html",text:"text/plain",json:"application/json, text/javascript","*":cx},contents:{xml:/xml/,html:/html/,json:/json/},responseFields:{xml:"responseXML",text:"responseText"},converters:{"* text":a.String,"text html":!0,"text json":p.parseJSON,"text xml":p.parseXML},flatOptions:{context:!0,url:!0}},ajaxPrefilter:cz(cv),ajaxTransport:cz(cw),ajax:function(a,c){function y(a,c,f,i){var k,s,t,u,w,y=c;if(v===2)return;v=2,h&&clearTimeout(h),g=b,e=i||"",x.readyState=a>0?4:0,f&&(u=cC(l,x,f));if(a>=200&&a<300||a===304)l.ifModified&&(w=x.getResponseHeader("Last-Modified"),w&&(p.lastModified[d]=w),w=x.getResponseHeader("Etag"),w&&(p.etag[d]=w)),a===304?(y="notmodified",k=!0):(k=cD(l,u),y=k.state,s=k.data,t=k.error,k=!t);else{t=y;if(!y||a)y="error",a<0&&(a=0)}x.status=a,x.statusText=(c||y)+"",k?o.resolveWith(m,[s,y,x]):o.rejectWith(m,[x,y,t]),x.statusCode(r),r=b,j&&n.trigger("ajax"+(k?"Success":"Error"),[x,l,k?s:t]),q.fireWith(m,[x,y]),j&&(n.trigger("ajaxComplete",[x,l]),--p.active||p.event.trigger("ajaxStop"))}typeof a=="object"&&(c=a,a=b),c=c||{};var d,e,f,g,h,i,j,k,l=p.ajaxSetup({},c),m=l.context||l,n=m!==l&&(m.nodeType||m instanceof p)?p(m):p.event,o=p.Deferred(),q=p.Callbacks("once memory"),r=l.statusCode||{},t={},u={},v=0,w="canceled",x={readyState:0,setRequestHeader:function(a,b){if(!v){var c=a.toLowerCase();a=u[c]=u[c]||a,t[a]=b}return this},getAllResponseHeaders:function(){return v===2?e:null},getResponseHeader:function(a){var c;if(v===2){if(!f){f={};while(c=cm.exec(e))f[c[1].toLowerCase()]=c[2]}c=f[a.toLowerCase()]}return c===b?null:c},overrideMimeType:function(a){return v||(l.mimeType=a),this},abort:function(a){return a=a||w,g&&g.abort(a),y(0,a),this}};o.promise(x),x.success=x.done,x.error=x.fail,x.complete=q.add,x.statusCode=function(a){if(a){var b;if(v<2)for(b in a)r[b]=[r[b],a[b]];else b=a[x.status],x.always(b)}return this},l.url=((a||l.url)+"").replace(cl,"").replace(cp,cj[1]+"//"),l.dataTypes=p.trim(l.dataType||"*").toLowerCase().split(s),l.crossDomain==null&&(i=ct.exec(l.url.toLowerCase())||!1,l.crossDomain=i&&i.join(":")+(i[3]?"":i[1]==="http:"?80:443)!==cj.join(":")+(cj[3]?"":cj[1]==="http:"?80:443)),l.data&&l.processData&&typeof l.data!="string"&&(l.data=p.param(l.data,l.traditional)),cA(cv,l,c,x);if(v===2)return x;j=l.global,l.type=l.type.toUpperCase(),l.hasContent=!co.test(l.type),j&&p.active++===0&&p.event.trigger("ajaxStart");if(!l.hasContent){l.data&&(l.url+=(cq.test(l.url)?"&":"?")+l.data,delete l.data),d=l.url;if(l.cache===!1){var z=p.now(),A=l.url.replace(cs,"$1_="+z);l.url=A+(A===l.url?(cq.test(l.url)?"&":"?")+"_="+z:"")}}(l.data&&l.hasContent&&l.contentType!==!1||c.contentType)&&x.setRequestHeader("Content-Type",l.contentType),l.ifModified&&(d=d||l.url,p.lastModified[d]&&x.setRequestHeader("If-Modified-Since",p.lastModified[d]),p.etag[d]&&x.setRequestHeader("If-None-Match",p.etag[d])),x.setRequestHeader("Accept",l.dataTypes[0]&&l.accepts[l.dataTypes[0]]?l.accepts[l.dataTypes[0]]+(l.dataTypes[0]!=="*"?", "+cx+"; q=0.01":""):l.accepts["*"]);for(k in l.headers)x.setRequestHeader(k,l.headers[k]);if(!l.beforeSend||l.beforeSend.call(m,x,l)!==!1&&v!==2){w="abort";for(k in{success:1,error:1,complete:1})x[k](l[k]);g=cA(cw,l,c,x);if(!g)y(-1,"No Transport");else{x.readyState=1,j&&n.trigger("ajaxSend",[x,l]),l.async&&l.timeout>0&&(h=setTimeout(function(){x.abort("timeout")},l.timeout));try{v=1,g.send(t,y)}catch(B){if(v<2)y(-1,B);else throw B}}return x}return x.abort()},active:0,lastModified:{},etag:{}});var cE=[],cF=/\?/,cG=/(=)\?(?=&|$)|\?\?/,cH=p.now();p.ajaxSetup({jsonp:"callback",jsonpCallback:function(){var a=cE.pop()||p.expando+"_"+cH++;return this[a]=!0,a}}),p.ajaxPrefilter("json jsonp",function(c,d,e){var f,g,h,i=c.data,j=c.url,k=c.jsonp!==!1,l=k&&cG.test(j),m=k&&!l&&typeof i=="string"&&!(c.contentType||"").indexOf("application/x-www-form-urlencoded")&&cG.test(i);if(c.dataTypes[0]==="jsonp"||l||m)return f=c.jsonpCallback=p.isFunction(c.jsonpCallback)?c.jsonpCallback():c.jsonpCallback,g=a[f],l?c.url=j.replace(cG,"$1"+f):m?c.data=i.replace(cG,"$1"+f):k&&(c.url+=(cF.test(j)?"&":"?")+c.jsonp+"="+f),c.converters["script json"]=function(){return h||p.error(f+" was not called"),h[0]},c.dataTypes[0]="json",a[f]=function(){h=arguments},e.always(function(){a[f]=g,c[f]&&(c.jsonpCallback=d.jsonpCallback,cE.push(f)),h&&p.isFunction(g)&&g(h[0]),h=g=b}),"script"}),p.ajaxSetup({accepts:{script:"text/javascript, application/javascript, application/ecmascript, application/x-ecmascript"},contents:{script:/javascript|ecmascript/},converters:{"text script":function(a){return p.globalEval(a),a}}}),p.ajaxPrefilter("script",function(a){a.cache===b&&(a.cache=!1),a.crossDomain&&(a.type="GET",a.global=!1)}),p.ajaxTransport("script",function(a){if(a.crossDomain){var c,d=e.head||e.getElementsByTagName("head")[0]||e.documentElement;return{send:function(f,g){c=e.createElement("script"),c.async="async",a.scriptCharset&&(c.charset=a.scriptCharset),c.src=a.url,c.onload=c.onreadystatechange=function(a,e){if(e||!c.readyState||/loaded|complete/.test(c.readyState))c.onload=c.onreadystatechange=null,d&&c.parentNode&&d.removeChild(c),c=b,e||g(200,"success")},d.insertBefore(c,d.firstChild)},abort:function(){c&&c.onload(0,1)}}}});var cI,cJ=a.ActiveXObject?function(){for(var a in cI)cI[a](0,1)}:!1,cK=0;p.ajaxSettings.xhr=a.ActiveXObject?function(){return!this.isLocal&&cL()||cM()}:cL,function(a){p.extend(p.support,{ajax:!!a,cors:!!a&&"withCredentials"in a})}(p.ajaxSettings.xhr()),p.support.ajax&&p.ajaxTransport(function(c){if(!c.crossDomain||p.support.cors){var d;return{send:function(e,f){var g,h,i=c.xhr();c.username?i.open(c.type,c.url,c.async,c.username,c.password):i.open(c.type,c.url,c.async);if(c.xhrFields)for(h in c.xhrFields)i[h]=c.xhrFields[h];c.mimeType&&i.overrideMimeType&&i.overrideMimeType(c.mimeType),!c.crossDomain&&!e["X-Requested-With"]&&(e["X-Requested-With"]="XMLHttpRequest");try{for(h in e)i.setRequestHeader(h,e[h])}catch(j){}i.send(c.hasContent&&c.data||null),d=function(a,e){var h,j,k,l,m;try{if(d&&(e||i.readyState===4)){d=b,g&&(i.onreadystatechange=p.noop,cJ&&delete cI[g]);if(e)i.readyState!==4&&i.abort();else{h=i.status,k=i.getAllResponseHeaders(),l={},m=i.responseXML,m&&m.documentElement&&(l.xml=m);try{l.text=i.responseText}catch(a){}try{j=i.statusText}catch(n){j=""}!h&&c.isLocal&&!c.crossDomain?h=l.text?200:404:h===1223&&(h=204)}}}catch(o){e||f(-1,o)}l&&f(h,j,l,k)},c.async?i.readyState===4?setTimeout(d,0):(g=++cK,cJ&&(cI||(cI={},p(a).unload(cJ)),cI[g]=d),i.onreadystatechange=d):d()},abort:function(){d&&d(0,1)}}}});var cN,cO,cP=/^(?:toggle|show|hide)$/,cQ=new RegExp("^(?:([-+])=|)("+q+")([a-z%]*)$","i"),cR=/queueHooks$/,cS=[cY],cT={"*":[function(a,b){var c,d,e=this.createTween(a,b),f=cQ.exec(b),g=e.cur(),h=+g||0,i=1,j=20;if(f){c=+f[2],d=f[3]||(p.cssNumber[a]?"":"px");if(d!=="px"&&h){h=p.css(e.elem,a,!0)||c||1;do i=i||".5",h=h/i,p.style(e.elem,a,h+d);while(i!==(i=e.cur()/g)&&i!==1&&--j)}e.unit=d,e.start=h,e.end=f[1]?h+(f[1]+1)*c:c}return e}]};p.Animation=p.extend(cW,{tweener:function(a,b){p.isFunction(a)?(b=a,a=["*"]):a=a.split(" ");var c,d=0,e=a.length;for(;d-1,j={},k={},l,m;i?(k=e.position(),l=k.top,m=k.left):(l=parseFloat(g)||0,m=parseFloat(h)||0),p.isFunction(b)&&(b=b.call(a,c,f)),b.top!=null&&(j.top=b.top-f.top+l),b.left!=null&&(j.left=b.left-f.left+m),"using"in b?b.using.call(a,j):e.css(j)}},p.fn.extend({position:function(){if(!this[0])return;var a=this[0],b=this.offsetParent(),c=this.offset(),d=c_.test(b[0].nodeName)?{top:0,left:0}:b.offset();return c.top-=parseFloat(p.css(a,"marginTop"))||0,c.left-=parseFloat(p.css(a,"marginLeft"))||0,d.top+=parseFloat(p.css(b[0],"borderTopWidth"))||0,d.left+=parseFloat(p.css(b[0],"borderLeftWidth"))||0,{top:c.top-d.top,left:c.left-d.left}},offsetParent:function(){return this.map(function(){var a=this.offsetParent||e.body;while(a&&!c_.test(a.nodeName)&&p.css(a,"position")==="static")a=a.offsetParent;return a||e.body})}}),p.each({scrollLeft:"pageXOffset",scrollTop:"pageYOffset"},function(a,c){var d=/Y/.test(c);p.fn[a]=function(e){return p.access(this,function(a,e,f){var g=da(a);if(f===b)return g?c in g?g[c]:g.document.documentElement[e]:a[e];g?g.scrollTo(d?p(g).scrollLeft():f,d?f:p(g).scrollTop()):a[e]=f},a,e,arguments.length,null)}}),p.each({Height:"height",Width:"width"},function(a,c){p.each({padding:"inner"+a,content:c,"":"outer"+a},function(d,e){p.fn[e]=function(e,f){var g=arguments.length&&(d||typeof e!="boolean"),h=d||(e===!0||f===!0?"margin":"border");return p.access(this,function(c,d,e){var f;return p.isWindow(c)?c.document.documentElement["client"+a]:c.nodeType===9?(f=c.documentElement,Math.max(c.body["scroll"+a],f["scroll"+a],c.body["offset"+a],f["offset"+a],f["client"+a])):e===b?p.css(c,d,e,h):p.style(c,d,e,h)},c,g?e:b,g,null)}})}),a.jQuery=a.$=p,typeof define=="function"&&define.amd&&define.amd.jQuery&&define("jquery",[],function(){return p})})(window); \ No newline at end of file diff --git a/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/jquery.layout.js b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/jquery.layout.js new file mode 100644 index 0000000000..4dd48675b7 --- /dev/null +++ b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/jquery.layout.js @@ -0,0 +1,5486 @@ +/** + * @preserve jquery.layout 1.3.0 - Release Candidate 30.62 + * $Date: 2012-08-04 08:00:00 (Thu, 23 Aug 2012) $ + * $Rev: 303006 $ + * + * Copyright (c) 2012 + * Fabrizio Balliano (http://www.fabrizioballiano.net) + * Kevin Dalman (http://allpro.net) + * + * Dual licensed under the GPL (http://www.gnu.org/licenses/gpl.html) + * and MIT (http://www.opensource.org/licenses/mit-license.php) licenses. + * + * Changelog: http://layout.jquery-dev.net/changelog.cfm#1.3.0.rc30.62 + * NOTE: This is a short-term release to patch a couple of bugs. + * These bugs are listed as officially fixed in RC30.7, which will be released shortly. + * + * Docs: http://layout.jquery-dev.net/documentation.html + * Tips: http://layout.jquery-dev.net/tips.html + * Help: http://groups.google.com/group/jquery-ui-layout + */ + +/* JavaDoc Info: http://code.google.com/closure/compiler/docs/js-for-compiler.html + * {!Object} non-nullable type (never NULL) + * {?string} nullable type (sometimes NULL) - default for {Object} + * {number=} optional parameter + * {*} ALL types + */ + +// NOTE: For best readability, view with a fixed-width font and tabs equal to 4-chars + +;(function ($) { + +// alias Math methods - used a lot! +var min = Math.min +, max = Math.max +, round = Math.floor + +, isStr = function (v) { return $.type(v) === "string"; } + +, runPluginCallbacks = function (Instance, a_fn) { + if ($.isArray(a_fn)) + for (var i=0, c=a_fn.length; i
              ').appendTo("body"); + var d = { width: $c.width() - $c[0].clientWidth, height: $c.height() - $c[0].clientHeight }; + $c.remove(); + window.scrollbarWidth = d.width; + window.scrollbarHeight = d.height; + return dim.match(/^(width|height)$/) ? d[dim] : d; + } + + + /** + * Returns hash container 'display' and 'visibility' + * + * @see $.swap() - swaps CSS, runs callback, resets CSS + */ +, showInvisibly: function ($E, force) { + if ($E && $E.length && (force || $E.css('display') === "none")) { // only if not *already hidden* + var s = $E[0].style + // save ONLY the 'style' props because that is what we must restore + , CSS = { display: s.display || '', visibility: s.visibility || '' }; + // show element 'invisibly' so can be measured + $E.css({ display: "block", visibility: "hidden" }); + return CSS; + } + return {}; + } + + /** + * Returns data for setting size of an element (container or a pane). + * + * @see _create(), onWindowResize() for container, plus others for pane + * @return JSON Returns a hash of all dimensions: top, bottom, left, right, outerWidth, innerHeight, etc + */ +, getElementDimensions: function ($E) { + var + d = {} // dimensions hash + , x = d.css = {} // CSS hash + , i = {} // TEMP insets + , b, p // TEMP border, padding + , N = $.layout.cssNum + , off = $E.offset() + ; + d.offsetLeft = off.left; + d.offsetTop = off.top; + + $.each("Left,Right,Top,Bottom".split(","), function (idx, e) { // e = edge + b = x["border" + e] = $.layout.borderWidth($E, e); + p = x["padding"+ e] = $.layout.cssNum($E, "padding"+e); + i[e] = b + p; // total offset of content from outer side + d["inset"+ e] = p; // eg: insetLeft = paddingLeft + }); + + d.offsetWidth = $E.innerWidth(); // offsetWidth is used in calc when doing manual resize + d.offsetHeight = $E.innerHeight(); // ditto + d.outerWidth = $E.outerWidth(); + d.outerHeight = $E.outerHeight(); + d.innerWidth = max(0, d.outerWidth - i.Left - i.Right); + d.innerHeight = max(0, d.outerHeight - i.Top - i.Bottom); + + x.width = $E.width(); + x.height = $E.height(); + x.top = N($E,"top",true); + x.bottom = N($E,"bottom",true); + x.left = N($E,"left",true); + x.right = N($E,"right",true); + + //d.visible = $E.is(":visible");// && x.width > 0 && x.height > 0; + + return d; + } + +, getElementCSS: function ($E, list) { + var + CSS = {} + , style = $E[0].style + , props = list.split(",") + , sides = "Top,Bottom,Left,Right".split(",") + , attrs = "Color,Style,Width".split(",") + , p, s, a, i, j, k + ; + for (i=0; i < props.length; i++) { + p = props[i]; + if (p.match(/(border|padding|margin)$/)) + for (j=0; j < 4; j++) { + s = sides[j]; + if (p === "border") + for (k=0; k < 3; k++) { + a = attrs[k]; + CSS[p+s+a] = style[p+s+a]; + } + else + CSS[p+s] = style[p+s]; + } + else + CSS[p] = style[p]; + }; + return CSS + } + + /** + * Return the innerWidth for the current browser/doctype + * + * @see initPanes(), sizeMidPanes(), initHandles(), sizeHandles() + * @param {Array.} $E Must pass a jQuery object - first element is processed + * @param {number=} outerWidth (optional) Can pass a width, allowing calculations BEFORE element is resized + * @return {number} Returns the innerWidth of the elem by subtracting padding and borders + */ +, cssWidth: function ($E, outerWidth) { + // a 'calculated' outerHeight can be passed so borders and/or padding are removed if needed + if (outerWidth <= 0) return 0; + + if (!$.layout.browser.boxModel) return outerWidth; + + // strip border and padding from outerWidth to get CSS Width + var b = $.layout.borderWidth + , n = $.layout.cssNum + , W = outerWidth + - b($E, "Left") + - b($E, "Right") + - n($E, "paddingLeft") + - n($E, "paddingRight"); + + return max(0,W); + } + + /** + * Return the innerHeight for the current browser/doctype + * + * @see initPanes(), sizeMidPanes(), initHandles(), sizeHandles() + * @param {Array.} $E Must pass a jQuery object - first element is processed + * @param {number=} outerHeight (optional) Can pass a width, allowing calculations BEFORE element is resized + * @return {number} Returns the innerHeight of the elem by subtracting padding and borders + */ +, cssHeight: function ($E, outerHeight) { + // a 'calculated' outerHeight can be passed so borders and/or padding are removed if needed + if (outerHeight <= 0) return 0; + + if (!$.layout.browser.boxModel) return outerHeight; + + // strip border and padding from outerHeight to get CSS Height + var b = $.layout.borderWidth + , n = $.layout.cssNum + , H = outerHeight + - b($E, "Top") + - b($E, "Bottom") + - n($E, "paddingTop") + - n($E, "paddingBottom"); + + return max(0,H); + } + + /** + * Returns the 'current CSS numeric value' for a CSS property - 0 if property does not exist + * + * @see Called by many methods + * @param {Array.} $E Must pass a jQuery object - first element is processed + * @param {string} prop The name of the CSS property, eg: top, width, etc. + * @param {boolean=} [allowAuto=false] true = return 'auto' if that is value; false = return 0 + * @return {(string|number)} Usually used to get an integer value for position (top, left) or size (height, width) + */ +, cssNum: function ($E, prop, allowAuto) { + if (!$E.jquery) $E = $($E); + var CSS = $.layout.showInvisibly($E) + , p = $.css($E[0], prop, true) + , v = allowAuto && p=="auto" ? p : (parseInt(p, 10) || 0); + $E.css( CSS ); // RESET + return v; + } + +, borderWidth: function (el, side) { + if (el.jquery) el = el[0]; + var b = "border"+ side.substr(0,1).toUpperCase() + side.substr(1); // left => Left + return $.css(el, b+"Style", true) === "none" ? 0 : (parseInt($.css(el, b+"Width", true), 10) || 0); + } + + /** + * Mouse-tracking utility - FUTURE REFERENCE + * + * init: if (!window.mouse) { + * window.mouse = { x: 0, y: 0 }; + * $(document).mousemove( $.layout.trackMouse ); + * } + * + * @param {Object} evt + * +, trackMouse: function (evt) { + window.mouse = { x: evt.clientX, y: evt.clientY }; + } + */ + + /** + * SUBROUTINE for preventPrematureSlideClose option + * + * @param {Object} evt + * @param {Object=} el + */ +, isMouseOverElem: function (evt, el) { + var + $E = $(el || this) + , d = $E.offset() + , T = d.top + , L = d.left + , R = L + $E.outerWidth() + , B = T + $E.outerHeight() + , x = evt.pageX // evt.clientX ? + , y = evt.pageY // evt.clientY ? + ; + // if X & Y are < 0, probably means is over an open SELECT + return ($.layout.browser.msie && x < 0 && y < 0) || ((x >= L && x <= R) && (y >= T && y <= B)); + } + + /** + * Message/Logging Utility + * + * @example $.layout.msg("My message"); // log text + * @example $.layout.msg("My message", true); // alert text + * @example $.layout.msg({ foo: "bar" }, "Title"); // log hash-data, with custom title + * @example $.layout.msg({ foo: "bar" }, true, "Title", { sort: false }); -OR- + * @example $.layout.msg({ foo: "bar" }, "Title", { sort: false, display: true }); // alert hash-data + * + * @param {(Object|string)} info String message OR Hash/Array + * @param {(Boolean|string|Object)=} [popup=false] True means alert-box - can be skipped + * @param {(Object|string)=} [debugTitle=""] Title for Hash data - can be skipped + * @param {Object=} [debugOpts] Extra options for debug output + */ +, msg: function (info, popup, debugTitle, debugOpts) { + if ($.isPlainObject(info) && window.debugData) { + if (typeof popup === "string") { + debugOpts = debugTitle; + debugTitle = popup; + } + else if (typeof debugTitle === "object") { + debugOpts = debugTitle; + debugTitle = null; + } + var t = debugTitle || "log( )" + , o = $.extend({ sort: false, returnHTML: false, display: false }, debugOpts); + if (popup === true || o.display) + debugData( info, t, o ); + else if (window.console) + console.log(debugData( info, t, o )); + } + else if (popup) + alert(info); + else if (window.console) + console.log(info); + else { + var id = "#layoutLogger" + , $l = $(id); + if (!$l.length) + $l = createLog(); + $l.children("ul").append('
            9. '+ info.replace(/\/g,">") +'
            10. '); + } + + function createLog () { + var pos = $.support.fixedPosition ? 'fixed' : 'absolute' + , $e = $('
              ' + + '
              ' + + 'XLayout console.log
              ' + + '
                ' + + '
                ' + ).appendTo("body"); + $e.css('left', $(window).width() - $e.outerWidth() - 5) + if ($.ui.draggable) $e.draggable({ handle: ':first-child' }); + return $e; + }; + } + +}; + +// DEFAULT OPTIONS +$.layout.defaults = { +/* + * LAYOUT & LAYOUT-CONTAINER OPTIONS + * - none of these options are applicable to individual panes + */ + name: "" // Not required, but useful for buttons and used for the state-cookie +, containerSelector: "" // ONLY used when specifying a childOptions - to find container-element that is NOT directly-nested +, containerClass: "ui-layout-container" // layout-container element +, scrollToBookmarkOnLoad: true // after creating a layout, scroll to bookmark in URL (.../page.htm#myBookmark) +, resizeWithWindow: true // bind thisLayout.resizeAll() to the window.resize event +, resizeWithWindowDelay: 200 // delay calling resizeAll because makes window resizing very jerky +, resizeWithWindowMaxDelay: 0 // 0 = none - force resize every XX ms while window is being resized +, onresizeall_start: null // CALLBACK when resizeAll() STARTS - NOT pane-specific +, onresizeall_end: null // CALLBACK when resizeAll() ENDS - NOT pane-specific +, onload_start: null // CALLBACK when Layout inits - after options initialized, but before elements +, onload_end: null // CALLBACK when Layout inits - after EVERYTHING has been initialized +, onunload_start: null // CALLBACK when Layout is destroyed OR onWindowUnload +, onunload_end: null // CALLBACK when Layout is destroyed OR onWindowUnload +, initPanes: true // false = DO NOT initialize the panes onLoad - will init later +, showErrorMessages: true // enables fatal error messages to warn developers of common errors +, showDebugMessages: false // display console-and-alert debug msgs - IF this Layout version _has_ debugging code! +// Changing this zIndex value will cause other zIndex values to automatically change +, zIndex: null // the PANE zIndex - resizers and masks will be +1 +// DO NOT CHANGE the zIndex values below unless you clearly understand their relationships +, zIndexes: { // set _default_ z-index values here... + pane_normal: 0 // normal z-index for panes + , content_mask: 1 // applied to overlays used to mask content INSIDE panes during resizing + , resizer_normal: 2 // normal z-index for resizer-bars + , pane_sliding: 100 // applied to *BOTH* the pane and its resizer when a pane is 'slid open' + , pane_animate: 1000 // applied to the pane when being animated - not applied to the resizer + , resizer_drag: 10000 // applied to the CLONED resizer-bar when being 'dragged' + } +, errors: { + pane: "pane" // description of "layout pane element" - used only in error messages + , selector: "selector" // description of "jQuery-selector" - used only in error messages + , addButtonError: "Error Adding Button \n\nInvalid " + , containerMissing: "UI Layout Initialization Error\n\nThe specified layout-container does not exist." + , centerPaneMissing: "UI Layout Initialization Error\n\nThe center-pane element does not exist.\n\nThe center-pane is a required element." + , noContainerHeight: "UI Layout Initialization Warning\n\nThe layout-container \"CONTAINER\" has no height.\n\nTherefore the layout is 0-height and hence 'invisible'!" + , callbackError: "UI Layout Callback Error\n\nThe EVENT callback is not a valid function." + } +/* + * PANE DEFAULT SETTINGS + * - settings under the 'panes' key become the default settings for *all panes* + * - ALL pane-options can also be set specifically for each panes, which will override these 'default values' + */ +, panes: { // default options for 'all panes' - will be overridden by 'per-pane settings' + applyDemoStyles: false // NOTE: renamed from applyDefaultStyles for clarity + , closable: true // pane can open & close + , resizable: true // when open, pane can be resized + , slidable: true // when closed, pane can 'slide open' over other panes - closes on mouse-out + , initClosed: false // true = init pane as 'closed' + , initHidden: false // true = init pane as 'hidden' - no resizer-bar/spacing + // SELECTORS + //, paneSelector: "" // MUST be pane-specific - jQuery selector for pane + , contentSelector: ".ui-layout-content" // INNER div/element to auto-size so only it scrolls, not the entire pane! + , contentIgnoreSelector: ".ui-layout-ignore" // element(s) to 'ignore' when measuring 'content' + , findNestedContent: false // true = $P.find(contentSelector), false = $P.children(contentSelector) + // GENERIC ROOT-CLASSES - for auto-generated classNames + , paneClass: "ui-layout-pane" // Layout Pane + , resizerClass: "ui-layout-resizer" // Resizer Bar + , togglerClass: "ui-layout-toggler" // Toggler Button + , buttonClass: "ui-layout-button" // CUSTOM Buttons - eg: '[ui-layout-button]-toggle/-open/-close/-pin' + // ELEMENT SIZE & SPACING + //, size: 100 // MUST be pane-specific -initial size of pane + , minSize: 0 // when manually resizing a pane + , maxSize: 0 // ditto, 0 = no limit + , spacing_open: 6 // space between pane and adjacent panes - when pane is 'open' + , spacing_closed: 6 // ditto - when pane is 'closed' + , togglerLength_open: 50 // Length = WIDTH of toggler button on north/south sides - HEIGHT on east/west sides + , togglerLength_closed: 50 // 100% OR -1 means 'full height/width of resizer bar' - 0 means 'hidden' + , togglerAlign_open: "center" // top/left, bottom/right, center, OR... + , togglerAlign_closed: "center" // 1 => nn = offset from top/left, -1 => -nn == offset from bottom/right + , togglerContent_open: "" // text or HTML to put INSIDE the toggler + , togglerContent_closed: "" // ditto + // RESIZING OPTIONS + , resizerDblClickToggle: true // + , autoResize: true // IF size is 'auto' or a percentage, then recalc 'pixel size' whenever the layout resizes + , autoReopen: true // IF a pane was auto-closed due to noRoom, reopen it when there is room? False = leave it closed + , resizerDragOpacity: 1 // option for ui.draggable + //, resizerCursor: "" // MUST be pane-specific - cursor when over resizer-bar + , maskContents: false // true = add DIV-mask over-or-inside this pane so can 'drag' over IFRAMES + , maskObjects: false // true = add IFRAME-mask over-or-inside this pane to cover objects/applets - content-mask will overlay this mask + , maskZindex: null // will override zIndexes.content_mask if specified - not applicable to iframe-panes + , resizingGrid: false // grid size that the resizers will snap-to during resizing, eg: [20,20] + , livePaneResizing: false // true = LIVE Resizing as resizer is dragged + , liveContentResizing: false // true = re-measure header/footer heights as resizer is dragged + , liveResizingTolerance: 1 // how many px change before pane resizes, to control performance + // SLIDING OPTIONS + , sliderCursor: "pointer" // cursor when resizer-bar will trigger 'sliding' + , slideTrigger_open: "click" // click, dblclick, mouseenter + , slideTrigger_close: "mouseleave"// click, mouseleave + , slideDelay_open: 300 // applies only for mouseenter event - 0 = instant open + , slideDelay_close: 300 // applies only for mouseleave event (300ms is the minimum!) + , hideTogglerOnSlide: false // when pane is slid-open, should the toggler show? + , preventQuickSlideClose: $.layout.browser.webkit // Chrome triggers slideClosed as it is opening + , preventPrematureSlideClose: false // handle incorrect mouseleave trigger, like when over a SELECT-list in IE + // PANE-SPECIFIC TIPS & MESSAGES + , tips: { + Open: "Open" // eg: "Open Pane" + , Close: "Close" + , Resize: "Resize" + , Slide: "Slide Open" + , Pin: "Pin" + , Unpin: "Un-Pin" + , noRoomToOpen: "Not enough room to show this panel." // alert if user tries to open a pane that cannot + , minSizeWarning: "Panel has reached its minimum size" // displays in browser statusbar + , maxSizeWarning: "Panel has reached its maximum size" // ditto + } + // HOT-KEYS & MISC + , showOverflowOnHover: false // will bind allowOverflow() utility to pane.onMouseOver + , enableCursorHotkey: true // enabled 'cursor' hotkeys + //, customHotkey: "" // MUST be pane-specific - EITHER a charCode OR a character + , customHotkeyModifier: "SHIFT" // either 'SHIFT', 'CTRL' or 'CTRL+SHIFT' - NOT 'ALT' + // PANE ANIMATION + // NOTE: fxSss_open, fxSss_close & fxSss_size options (eg: fxName_open) are auto-generated if not passed + , fxName: "slide" // ('none' or blank), slide, drop, scale -- only relevant to 'open' & 'close', NOT 'size' + , fxSpeed: null // slow, normal, fast, 200, nnn - if passed, will OVERRIDE fxSettings.duration + , fxSettings: {} // can be passed, eg: { easing: "easeOutBounce", duration: 1500 } + , fxOpacityFix: true // tries to fix opacity in IE to restore anti-aliasing after animation + , animatePaneSizing: false // true = animate resizing after dragging resizer-bar OR sizePane() is called + /* NOTE: Action-specific FX options are auto-generated from the options above if not specifically set: + fxName_open: "slide" // 'Open' pane animation + fnName_close: "slide" // 'Close' pane animation + fxName_size: "slide" // 'Size' pane animation - when animatePaneSizing = true + fxSpeed_open: null + fxSpeed_close: null + fxSpeed_size: null + fxSettings_open: {} + fxSettings_close: {} + fxSettings_size: {} + */ + // CHILD/NESTED LAYOUTS + , childOptions: null // Layout-options for nested/child layout - even {} is valid as options + , initChildLayout: true // true = child layout will be created as soon as _this_ layout completes initialization + , destroyChildLayout: true // true = destroy child-layout if this pane is destroyed + , resizeChildLayout: true // true = trigger child-layout.resizeAll() when this pane is resized + // EVENT TRIGGERING + , triggerEventsOnLoad: false // true = trigger onopen OR onclose callbacks when layout initializes + , triggerEventsDuringLiveResize: true // true = trigger onresize callback REPEATEDLY if livePaneResizing==true + // PANE CALLBACKS + , onshow_start: null // CALLBACK when pane STARTS to Show - BEFORE onopen/onhide_start + , onshow_end: null // CALLBACK when pane ENDS being Shown - AFTER onopen/onhide_end + , onhide_start: null // CALLBACK when pane STARTS to Close - BEFORE onclose_start + , onhide_end: null // CALLBACK when pane ENDS being Closed - AFTER onclose_end + , onopen_start: null // CALLBACK when pane STARTS to Open + , onopen_end: null // CALLBACK when pane ENDS being Opened + , onclose_start: null // CALLBACK when pane STARTS to Close + , onclose_end: null // CALLBACK when pane ENDS being Closed + , onresize_start: null // CALLBACK when pane STARTS being Resized ***FOR ANY REASON*** + , onresize_end: null // CALLBACK when pane ENDS being Resized ***FOR ANY REASON*** + , onsizecontent_start: null // CALLBACK when sizing of content-element STARTS + , onsizecontent_end: null // CALLBACK when sizing of content-element ENDS + , onswap_start: null // CALLBACK when pane STARTS to Swap + , onswap_end: null // CALLBACK when pane ENDS being Swapped + , ondrag_start: null // CALLBACK when pane STARTS being ***MANUALLY*** Resized + , ondrag_end: null // CALLBACK when pane ENDS being ***MANUALLY*** Resized + } +/* + * PANE-SPECIFIC SETTINGS + * - options listed below MUST be specified per-pane - they CANNOT be set under 'panes' + * - all options under the 'panes' key can also be set specifically for any pane + * - most options under the 'panes' key apply only to 'border-panes' - NOT the the center-pane + */ +, north: { + paneSelector: ".ui-layout-north" + , size: "auto" // eg: "auto", "30%", .30, 200 + , resizerCursor: "n-resize" // custom = url(myCursor.cur) + , customHotkey: "" // EITHER a charCode (43) OR a character ("o") + } +, south: { + paneSelector: ".ui-layout-south" + , size: "auto" + , resizerCursor: "s-resize" + , customHotkey: "" + } +, east: { + paneSelector: ".ui-layout-east" + , size: 200 + , resizerCursor: "e-resize" + , customHotkey: "" + } +, west: { + paneSelector: ".ui-layout-west" + , size: 200 + , resizerCursor: "w-resize" + , customHotkey: "" + } +, center: { + paneSelector: ".ui-layout-center" + , minWidth: 0 + , minHeight: 0 + } +}; + +$.layout.optionsMap = { + // layout/global options - NOT pane-options + layout: ("stateManagement,effects,zIndexes,errors," + + "name,zIndex,scrollToBookmarkOnLoad,showErrorMessages," + + "resizeWithWindow,resizeWithWindowDelay,resizeWithWindowMaxDelay," + + "onresizeall,onresizeall_start,onresizeall_end,onload,onunload").split(",") +// borderPanes: [ ALL options that are NOT specified as 'layout' ] + // default.panes options that apply to the center-pane (most options apply _only_ to border-panes) +, center: ("paneClass,contentSelector,contentIgnoreSelector,findNestedContent,applyDemoStyles,triggerEventsOnLoad," + + "showOverflowOnHover,maskContents,maskObjects,liveContentResizing," + + "childOptions,initChildLayout,resizeChildLayout,destroyChildLayout," + + "onresize,onresize_start,onresize_end,onsizecontent,onsizecontent_start,onsizecontent_end").split(",") + // options that MUST be specifically set 'per-pane' - CANNOT set in the panes (defaults) key +, noDefault: ("paneSelector,resizerCursor,customHotkey").split(",") +}; + +/** + * Processes options passed in converts flat-format data into subkey (JSON) format + * In flat-format, subkeys are _currently_ separated with 2 underscores, like north__optName + * Plugins may also call this method so they can transform their own data + * + * @param {!Object} hash Data/options passed by user - may be a single level or nested levels + * @return {Object} Returns hash of minWidth & minHeight + */ +$.layout.transformData = function (hash) { + var json = { panes: {}, center: {} } // init return object + , data, branch, optKey, keys, key, val, i, c; + + if (typeof hash !== "object") return json; // no options passed + + // convert all 'flat-keys' to 'sub-key' format + for (optKey in hash) { + branch = json; + data = $.layout.optionsMap.layout; + val = hash[ optKey ]; + keys = optKey.split("__"); // eg: west__size or north__fxSettings__duration + c = keys.length - 1; + // convert underscore-delimited to subkeys + for (i=0; i <= c; i++) { + key = keys[i]; + if (i === c) + branch[key] = val; + else if (!branch[key]) + branch[key] = {}; // create the subkey + // recurse to sub-key for next loop - if not done + branch = branch[key]; + } + } + + return json; +}; + +// INTERNAL CONFIG DATA - DO NOT CHANGE THIS! +$.layout.backwardCompatibility = { + // data used by renameOldOptions() + map: { + // OLD Option Name: NEW Option Name + applyDefaultStyles: "applyDemoStyles" + , resizeNestedLayout: "resizeChildLayout" + , resizeWhileDragging: "livePaneResizing" + , resizeContentWhileDragging: "liveContentResizing" + , triggerEventsWhileDragging: "triggerEventsDuringLiveResize" + , maskIframesOnResize: "maskContents" + , useStateCookie: "stateManagement.enabled" + , "cookie.autoLoad": "stateManagement.autoLoad" + , "cookie.autoSave": "stateManagement.autoSave" + , "cookie.keys": "stateManagement.stateKeys" + , "cookie.name": "stateManagement.cookie.name" + , "cookie.domain": "stateManagement.cookie.domain" + , "cookie.path": "stateManagement.cookie.path" + , "cookie.expires": "stateManagement.cookie.expires" + , "cookie.secure": "stateManagement.cookie.secure" + // OLD Language options + , noRoomToOpenTip: "tips.noRoomToOpen" + , togglerTip_open: "tips.Close" // open = Close + , togglerTip_closed: "tips.Open" // closed = Open + , resizerTip: "tips.Resize" + , sliderTip: "tips.Slide" + } + +/** +* @param {Object} opts +*/ +, renameOptions: function (opts) { + var map = $.layout.backwardCompatibility.map + , oldData, newData, value + ; + for (var itemPath in map) { + oldData = getBranch( itemPath ); + value = oldData.branch[ oldData.key ]; + if (value !== undefined) { + newData = getBranch( map[itemPath], true ); + newData.branch[ newData.key ] = value; + delete oldData.branch[ oldData.key ]; + } + } + + /** + * @param {string} path + * @param {boolean=} [create=false] Create path if does not exist + */ + function getBranch (path, create) { + var a = path.split(".") // split keys into array + , c = a.length - 1 + , D = { branch: opts, key: a[c] } // init branch at top & set key (last item) + , i = 0, k, undef; + for (; i 0) { + if (autoHide && $E.data('autoHidden') && $E.innerHeight() > 0) { + $E.show().data('autoHidden', false); + if (!browser.mozilla) // FireFox refreshes iframes - IE does not + // make hidden, then visible to 'refresh' display after animation + $E.css(_c.hidden).css(_c.visible); + } + } + else if (autoHide && !$E.data('autoHidden')) + $E.hide().data('autoHidden', true); + } + + /** + * @param {(string|!Object)} el + * @param {number=} outerHeight + * @param {boolean=} [autoHide=false] + */ +, setOuterHeight = function (el, outerHeight, autoHide) { + var $E = el, h; + if (isStr(el)) $E = $Ps[el]; // west + else if (!el.jquery) $E = $(el); + h = cssH($E, outerHeight); + $E.css({ height: h, visibility: "visible" }); // may have been 'hidden' by sizeContent + if (h > 0 && $E.innerWidth() > 0) { + if (autoHide && $E.data('autoHidden')) { + $E.show().data('autoHidden', false); + if (!browser.mozilla) // FireFox refreshes iframes - IE does not + $E.css(_c.hidden).css(_c.visible); + } + } + else if (autoHide && !$E.data('autoHidden')) + $E.hide().data('autoHidden', true); + } + + /** + * @param {(string|!Object)} el + * @param {number=} outerSize + * @param {boolean=} [autoHide=false] + */ +, setOuterSize = function (el, outerSize, autoHide) { + if (_c[pane].dir=="horz") // pane = north or south + setOuterHeight(el, outerSize, autoHide); + else // pane = east or west + setOuterWidth(el, outerSize, autoHide); + } + + + /** + * Converts any 'size' params to a pixel/integer size, if not already + * If 'auto' or a decimal/percentage is passed as 'size', a pixel-size is calculated + * + /** + * @param {string} pane + * @param {(string|number)=} size + * @param {string=} [dir] + * @return {number} + */ +, _parseSize = function (pane, size, dir) { + if (!dir) dir = _c[pane].dir; + + if (isStr(size) && size.match(/%/)) + size = (size === '100%') ? -1 : parseInt(size, 10) / 100; // convert % to decimal + + if (size === 0) + return 0; + else if (size >= 1) + return parseInt(size, 10); + + var o = options, avail = 0; + if (dir=="horz") // north or south or center.minHeight + avail = sC.innerHeight - ($Ps.north ? o.north.spacing_open : 0) - ($Ps.south ? o.south.spacing_open : 0); + else if (dir=="vert") // east or west or center.minWidth + avail = sC.innerWidth - ($Ps.west ? o.west.spacing_open : 0) - ($Ps.east ? o.east.spacing_open : 0); + + if (size === -1) // -1 == 100% + return avail; + else if (size > 0) // percentage, eg: .25 + return round(avail * size); + else if (pane=="center") + return 0; + else { // size < 0 || size=='auto' || size==Missing || size==Invalid + // auto-size the pane + var dim = (dir === "horz" ? "height" : "width") + , $P = $Ps[pane] + , $C = dim === 'height' ? $Cs[pane] : false + , vis = $.layout.showInvisibly($P) // show pane invisibly if hidden + , szP = $P.css(dim) // SAVE current pane size + , szC = $C ? $C.css(dim) : 0 // SAVE current content size + ; + $P.css(dim, "auto"); + if ($C) $C.css(dim, "auto"); + size = (dim === "height") ? $P.outerHeight() : $P.outerWidth(); // MEASURE + $P.css(dim, szP).css(vis); // RESET size & visibility + if ($C) $C.css(dim, szC); + return size; + } + } + + /** + * Calculates current 'size' (outer-width or outer-height) of a border-pane - optionally with 'pane-spacing' added + * + * @param {(string|!Object)} pane + * @param {boolean=} [inclSpace=false] + * @return {number} Returns EITHER Width for east/west panes OR Height for north/south panes + */ +, getPaneSize = function (pane, inclSpace) { + var + $P = $Ps[pane] + , o = options[pane] + , s = state[pane] + , oSp = (inclSpace ? o.spacing_open : 0) + , cSp = (inclSpace ? o.spacing_closed : 0) + ; + if (!$P || s.isHidden) + return 0; + else if (s.isClosed || (s.isSliding && inclSpace)) + return cSp; + else if (_c[pane].dir === "horz") + return $P.outerHeight() + oSp; + else // dir === "vert" + return $P.outerWidth() + oSp; + } + + /** + * Calculate min/max pane dimensions and limits for resizing + * + * @param {string} pane + * @param {boolean=} [slide=false] + */ +, setSizeLimits = function (pane, slide) { + if (!isInitialized()) return; + var + o = options[pane] + , s = state[pane] + , c = _c[pane] + , dir = c.dir + , side = c.side.toLowerCase() + , type = c.sizeType.toLowerCase() + , isSliding = (slide != undefined ? slide : s.isSliding) // only open() passes 'slide' param + , $P = $Ps[pane] + , paneSpacing = o.spacing_open + // measure the pane on the *opposite side* from this pane + , altPane = _c.oppositeEdge[pane] + , altS = state[altPane] + , $altP = $Ps[altPane] + , altPaneSize = (!$altP || altS.isVisible===false || altS.isSliding ? 0 : (dir=="horz" ? $altP.outerHeight() : $altP.outerWidth())) + , altPaneSpacing = ((!$altP || altS.isHidden ? 0 : options[altPane][ altS.isClosed !== false ? "spacing_closed" : "spacing_open" ]) || 0) + // limitSize prevents this pane from 'overlapping' opposite pane + , containerSize = (dir=="horz" ? sC.innerHeight : sC.innerWidth) + , minCenterDims = cssMinDims("center") + , minCenterSize = dir=="horz" ? max(options.center.minHeight, minCenterDims.minHeight) : max(options.center.minWidth, minCenterDims.minWidth) + // if pane is 'sliding', then ignore center and alt-pane sizes - because 'overlays' them + , limitSize = (containerSize - paneSpacing - (isSliding ? 0 : (_parseSize("center", minCenterSize, dir) + altPaneSize + altPaneSpacing))) + , minSize = s.minSize = max( _parseSize(pane, o.minSize), cssMinDims(pane).minSize ) + , maxSize = s.maxSize = min( (o.maxSize ? _parseSize(pane, o.maxSize) : 100000), limitSize ) + , r = s.resizerPosition = {} // used to set resizing limits + , top = sC.insetTop + , left = sC.insetLeft + , W = sC.innerWidth + , H = sC.innerHeight + , rW = o.spacing_open // subtract resizer-width to get top/left position for south/east + ; + switch (pane) { + case "north": r.min = top + minSize; + r.max = top + maxSize; + break; + case "west": r.min = left + minSize; + r.max = left + maxSize; + break; + case "south": r.min = top + H - maxSize - rW; + r.max = top + H - minSize - rW; + break; + case "east": r.min = left + W - maxSize - rW; + r.max = left + W - minSize - rW; + break; + }; + } + + /** + * Returns data for setting the size/position of center pane. Also used to set Height for east/west panes + * + * @return JSON Returns a hash of all dimensions: top, bottom, left, right, (outer) width and (outer) height + */ +, calcNewCenterPaneDims = function () { + var d = { + top: getPaneSize("north", true) // true = include 'spacing' value for pane + , bottom: getPaneSize("south", true) + , left: getPaneSize("west", true) + , right: getPaneSize("east", true) + , width: 0 + , height: 0 + }; + + // NOTE: sC = state.container + // calc center-pane outer dimensions + d.width = sC.innerWidth - d.left - d.right; // outerWidth + d.height = sC.innerHeight - d.bottom - d.top; // outerHeight + // add the 'container border/padding' to get final positions relative to the container + d.top += sC.insetTop; + d.bottom += sC.insetBottom; + d.left += sC.insetLeft; + d.right += sC.insetRight; + + return d; + } + + + /** + * @param {!Object} el + * @param {boolean=} [allStates=false] + */ +, getHoverClasses = function (el, allStates) { + var + $El = $(el) + , type = $El.data("layoutRole") + , pane = $El.data("layoutEdge") + , o = options[pane] + , root = o[type +"Class"] + , _pane = "-"+ pane // eg: "-west" + , _open = "-open" + , _closed = "-closed" + , _slide = "-sliding" + , _hover = "-hover " // NOTE the trailing space + , _state = $El.hasClass(root+_closed) ? _closed : _open + , _alt = _state === _closed ? _open : _closed + , classes = (root+_hover) + (root+_pane+_hover) + (root+_state+_hover) + (root+_pane+_state+_hover) + ; + if (allStates) // when 'removing' classes, also remove alternate-state classes + classes += (root+_alt+_hover) + (root+_pane+_alt+_hover); + + if (type=="resizer" && $El.hasClass(root+_slide)) + classes += (root+_slide+_hover) + (root+_pane+_slide+_hover); + + return $.trim(classes); + } +, addHover = function (evt, el) { + var $E = $(el || this); + if (evt && $E.data("layoutRole") === "toggler") + evt.stopPropagation(); // prevent triggering 'slide' on Resizer-bar + $E.addClass( getHoverClasses($E) ); + } +, removeHover = function (evt, el) { + var $E = $(el || this); + $E.removeClass( getHoverClasses($E, true) ); + } + +, onResizerEnter = function (evt) { // ALSO called by toggler.mouseenter + if ($.fn.disableSelection) + $("body").disableSelection(); + } +, onResizerLeave = function (evt, el) { + var + e = el || this // el is only passed when called by the timer + , pane = $(e).data("layoutEdge") + , name = pane +"ResizerLeave" + ; + timer.clear(pane+"_openSlider"); // cancel slideOpen timer, if set + timer.clear(name); // cancel enableSelection timer - may re/set below + // this method calls itself on a timer because it needs to allow + // enough time for dragging to kick-in and set the isResizing flag + // dragging has a 100ms delay set, so this delay must be >100 + if (!el) // 1st call - mouseleave event + timer.set(name, function(){ onResizerLeave(evt, e); }, 200); + // if user is resizing, then dragStop will enableSelection(), so can skip it here + else if (!state[pane].isResizing && $.fn.enableSelection) // 2nd call - by timer + $("body").enableSelection(); + } + +/* + * ########################### + * INITIALIZATION METHODS + * ########################### + */ + + /** + * Initialize the layout - called automatically whenever an instance of layout is created + * + * @see none - triggered onInit + * @return mixed true = fully initialized | false = panes not initialized (yet) | 'cancel' = abort + */ +, _create = function () { + // initialize config/options + initOptions(); + var o = options; + + // TEMP state so isInitialized returns true during init process + state.creatingLayout = true; + + // init plugins for this layout, if there are any (eg: stateManagement) + runPluginCallbacks( Instance, $.layout.onCreate ); + + // options & state have been initialized, so now run beforeLoad callback + // onload will CANCEL layout creation if it returns false + if (false === _runCallbacks("onload_start")) + return 'cancel'; + + // initialize the container element + _initContainer(); + + // bind hotkey function - keyDown - if required + initHotkeys(); + + // bind window.onunload + $(window).bind("unload."+ sID, unload); + + // init plugins for this layout, if there are any (eg: customButtons) + runPluginCallbacks( Instance, $.layout.onLoad ); + + // if layout elements are hidden, then layout WILL NOT complete initialization! + // initLayoutElements will set initialized=true and run the onload callback IF successful + if (o.initPanes) _initLayoutElements(); + + delete state.creatingLayout; + + return state.initialized; + } + + /** + * Initialize the layout IF not already + * + * @see All methods in Instance run this test + * @return boolean true = layoutElements have been initialized | false = panes are not initialized (yet) + */ +, isInitialized = function () { + if (state.initialized || state.creatingLayout) return true; // already initialized + else return _initLayoutElements(); // try to init panes NOW + } + + /** + * Initialize the layout - called automatically whenever an instance of layout is created + * + * @see _create() & isInitialized + * @return An object pointer to the instance created + */ +, _initLayoutElements = function (retry) { + // initialize config/options + var o = options; + + // CANNOT init panes inside a hidden container! + if (!$N.is(":visible")) { + // handle Chrome bug where popup window 'has no height' + // if layout is BODY element, try again in 50ms + // SEE: http://layout.jquery-dev.net/samples/test_popup_window.html + if ( !retry && browser.webkit && $N[0].tagName === "BODY" ) + setTimeout(function(){ _initLayoutElements(true); }, 50); + return false; + } + + // a center pane is required, so make sure it exists + if (!getPane("center").length) { + return _log( o.errors.centerPaneMissing ); + } + + // TEMP state so isInitialized returns true during init process + state.creatingLayout = true; + + // update Container dims + $.extend(sC, elDims( $N )); + + // initialize all layout elements + initPanes(); // size & position panes - calls initHandles() - which calls initResizable() + + if (o.scrollToBookmarkOnLoad) { + var l = self.location; + if (l.hash) l.replace( l.hash ); // scrollTo Bookmark + } + + // check to see if this layout 'nested' inside a pane + if (Instance.hasParentLayout) + o.resizeWithWindow = false; + // bind resizeAll() for 'this layout instance' to window.resize event + else if (o.resizeWithWindow) + $(window).bind("resize."+ sID, windowResize); + + delete state.creatingLayout; + state.initialized = true; + + // init plugins for this layout, if there are any + runPluginCallbacks( Instance, $.layout.onReady ); + + // now run the onload callback, if exists + _runCallbacks("onload_end"); + + return true; // elements initialized successfully + } + + /** + * Initialize nested layouts - called when _initLayoutElements completes + * + * NOT CURRENTLY USED + * + * @see _initLayoutElements + * @return An object pointer to the instance created + */ +, _initChildLayouts = function () { + $.each(_c.allPanes, function (idx, pane) { + if (options[pane].initChildLayout) + createChildLayout( pane ); + }); + } + + /** + * Initialize nested layouts for a specific pane - can optionally pass layout-options + * + * @see _initChildLayouts + * @param {string|Object} evt_or_pane The pane being opened, ie: north, south, east, or west + * @param {Object=} [opts] Layout-options - if passed, will OVERRRIDE options[pane].childOptions + * @return An object pointer to the layout instance created - or null + */ +, createChildLayout = function (evt_or_pane, opts) { + var pane = evtPane.call(this, evt_or_pane) + , $P = $Ps[pane] + , C = children + ; + if ($P) { + var $C = $Cs[pane] + , o = opts || options[pane].childOptions + , d = "layout" + // determine which element is supposed to be the 'child container' + // if pane has a 'containerSelector' OR a 'content-div', use those instead of the pane + , $Cont = o.containerSelector ? $P.find( o.containerSelector ) : ($C || $P) + , containerFound = $Cont.length + // see if a child-layout ALREADY exists on this element + , child = containerFound ? (C[pane] = $Cont.data(d) || null) : null + ; + // if no layout exists, but childOptions are set, try to create the layout now + if (!child && containerFound && o) + child = C[pane] = $Cont.eq(0).layout(o) || null; + if (child) + child.hasParentLayout = true; // set parent-flag in child + } + Instance[pane].child = C[pane]; // ALWAYS set pane-object pointer, even if null + } + +, windowResize = function () { + var delay = Number(options.resizeWithWindowDelay); + if (delay < 10) delay = 100; // MUST have a delay! + // resizing uses a delay-loop because the resize event fires repeatly - except in FF, but delay anyway + timer.clear("winResize"); // if already running + timer.set("winResize", function(){ + timer.clear("winResize"); + timer.clear("winResizeRepeater"); + var dims = elDims( $N ); + // only trigger resizeAll() if container has changed size + if (dims.innerWidth !== sC.innerWidth || dims.innerHeight !== sC.innerHeight) + resizeAll(); + }, delay); + // ALSO set fixed-delay timer, if not already running + if (!timer.data["winResizeRepeater"]) setWindowResizeRepeater(); + } + +, setWindowResizeRepeater = function () { + var delay = Number(options.resizeWithWindowMaxDelay); + if (delay > 0) + timer.set("winResizeRepeater", function(){ setWindowResizeRepeater(); resizeAll(); }, delay); + } + +, unload = function () { + var o = options; + + _runCallbacks("onunload_start"); + + // trigger plugin callabacks for this layout (eg: stateManagement) + runPluginCallbacks( Instance, $.layout.onUnload ); + + _runCallbacks("onunload_end"); + } + + /** + * Validate and initialize container CSS and events + * + * @see _create() + */ +, _initContainer = function () { + var + N = $N[0] + , tag = sC.tagName = N.tagName + , id = sC.id = N.id + , cls = sC.className = N.className + , o = options + , name = o.name + , fullPage= (tag === "BODY") + , props = "overflow,position,margin,padding,border" + , css = "layoutCSS" + , CSS = {} + , hid = "hidden" // used A LOT! + // see if this container is a 'pane' inside an outer-layout + , parent = $N.data("parentLayout") // parent-layout Instance + , pane = $N.data("layoutEdge") // pane-name in parent-layout + , isChild = parent && pane + ; + // sC -> state.container + sC.selector = $N.selector.split(".slice")[0]; + sC.ref = (o.name ? o.name +' layout / ' : '') + tag + (id ? "#"+id : cls ? '.['+cls+']' : ''); // used in messages + + $N .data({ + layout: Instance + , layoutContainer: sID // FLAG to indicate this is a layout-container - contains unique internal ID + }) + .addClass(o.containerClass) + ; + var layoutMethods = { + destroy: '' + , initPanes: '' + , resizeAll: 'resizeAll' + , resize: 'resizeAll' + }; + // loop hash and bind all methods - include layoutID namespacing + for (name in layoutMethods) { + $N.bind("layout"+ name.toLowerCase() +"."+ sID, Instance[ layoutMethods[name] || name ]); + } + + // if this container is another layout's 'pane', then set child/parent pointers + if (isChild) { + // update parent flag + Instance.hasParentLayout = true; + // set pointers to THIS child-layout (Instance) in parent-layout + // NOTE: parent.PANE.child is an ALIAS to parent.children.PANE + parent[pane].child = parent.children[pane] = $N.data("layout"); + } + + // SAVE original container CSS for use in destroy() + if (!$N.data(css)) { + // handle props like overflow different for BODY & HTML - has 'system default' values + if (fullPage) { + CSS = $.extend( elCSS($N, props), { + height: $N.css("height") + , overflow: $N.css("overflow") + , overflowX: $N.css("overflowX") + , overflowY: $N.css("overflowY") + }); + // ALSO SAVE CSS + var $H = $("html"); + $H.data(css, { + height: "auto" // FF would return a fixed px-size! + , overflow: $H.css("overflow") + , overflowX: $H.css("overflowX") + , overflowY: $H.css("overflowY") + }); + } + else // handle props normally for non-body elements + CSS = elCSS($N, props+",top,bottom,left,right,width,height,overflow,overflowX,overflowY"); + + $N.data(css, CSS); + } + + try { // format html/body if this is a full page layout + if (fullPage) { + $("html").css({ + height: "100%" + , overflow: hid + , overflowX: hid + , overflowY: hid + }); + $("body").css({ + position: "relative" + , height: "100%" + , overflow: hid + , overflowX: hid + , overflowY: hid + , margin: 0 + , padding: 0 // TODO: test whether body-padding could be handled? + , border: "none" // a body-border creates problems because it cannot be measured! + }); + + // set current layout-container dimensions + $.extend(sC, elDims( $N )); + } + else { // set required CSS for overflow and position + // ENSURE container will not 'scroll' + CSS = { overflow: hid, overflowX: hid, overflowY: hid } + var + p = $N.css("position") + , h = $N.css("height") + ; + // if this is a NESTED layout, then container/outer-pane ALREADY has position and height + if (!isChild) { + if (!p || !p.match(/fixed|absolute|relative/)) + CSS.position = "relative"; // container MUST have a 'position' + /* + if (!h || h=="auto") + CSS.height = "100%"; // container MUST have a 'height' + */ + } + $N.css( CSS ); + + // set current layout-container dimensions + if ( $N.is(":visible") ) { + $.extend(sC, elDims( $N )); + if (sC.innerHeight < 1) + _log( o.errors.noContainerHeight.replace(/CONTAINER/, sC.ref) ); + } + } + } catch (ex) {} + } + + /** + * Bind layout hotkeys - if options enabled + * + * @see _create() and addPane() + * @param {string=} [panes=""] The edge(s) to process + */ +, initHotkeys = function (panes) { + panes = panes ? panes.split(",") : _c.borderPanes; + // bind keyDown to capture hotkeys, if option enabled for ANY pane + $.each(panes, function (i, pane) { + var o = options[pane]; + if (o.enableCursorHotkey || o.customHotkey) { + $(document).bind("keydown."+ sID, keyDown); // only need to bind this ONCE + return false; // BREAK - binding was done + } + }); + } + + /** + * Build final OPTIONS data + * + * @see _create() + */ +, initOptions = function () { + var data, d, pane, key, val, i, c, o; + + // reprocess user's layout-options to have correct options sub-key structure + opts = $.layout.transformData( opts ); // panes = default subkey + + // auto-rename old options for backward compatibility + opts = $.layout.backwardCompatibility.renameAllOptions( opts ); + + // if user-options has 'panes' key (pane-defaults), clean it... + if (!$.isEmptyObject(opts.panes)) { + // REMOVE any pane-defaults that MUST be set per-pane + data = $.layout.optionsMap.noDefault; + for (i=0, c=data.length; i 0) { + z.pane_normal = zo; + z.content_mask = max(zo+1, z.content_mask); // MIN = +1 + z.resizer_normal = max(zo+2, z.resizer_normal); // MIN = +2 + } + + // DELETE 'panes' key now that we are done - values were copied to EACH pane + delete options.panes; + + + function createFxOptions ( pane ) { + var o = options[pane] + , d = options.panes; + // ensure fxSettings key to avoid errors + if (!o.fxSettings) o.fxSettings = {}; + if (!d.fxSettings) d.fxSettings = {}; + + $.each(["_open","_close","_size"], function (i,n) { + var + sName = "fxName"+ n + , sSpeed = "fxSpeed"+ n + , sSettings = "fxSettings"+ n + // recalculate fxName according to specificity rules + , fxName = o[sName] = + o[sName] // options.west.fxName_open + || d[sName] // options.panes.fxName_open + || o.fxName // options.west.fxName + || d.fxName // options.panes.fxName + || "none" // MEANS $.layout.defaults.panes.fxName == "" || false || null || 0 + ; + // validate fxName to ensure is valid effect - MUST have effect-config data in options.effects + if (fxName === "none" || !$.effects || !$.effects[fxName] || !options.effects[fxName]) + fxName = o[sName] = "none"; // effect not loaded OR unrecognized fxName + + // set vars for effects subkeys to simplify logic + var fx = options.effects[fxName] || {} // effects.slide + , fx_all = fx.all || null // effects.slide.all + , fx_pane = fx[pane] || null // effects.slide.west + ; + // create fxSpeed[_open|_close|_size] + o[sSpeed] = + o[sSpeed] // options.west.fxSpeed_open + || d[sSpeed] // options.west.fxSpeed_open + || o.fxSpeed // options.west.fxSpeed + || d.fxSpeed // options.panes.fxSpeed + || null // DEFAULT - let fxSetting.duration control speed + ; + // create fxSettings[_open|_close|_size] + o[sSettings] = $.extend( + true + , {} + , fx_all // effects.slide.all + , fx_pane // effects.slide.west + , d.fxSettings // options.panes.fxSettings + , o.fxSettings // options.west.fxSettings + , d[sSettings] // options.panes.fxSettings_open + , o[sSettings] // options.west.fxSettings_open + ); + }); + + // DONE creating action-specific-settings for this pane, + // so DELETE generic options - are no longer meaningful + delete o.fxName; + delete o.fxSpeed; + delete o.fxSettings; + } + } + + /** + * Initialize module objects, styling, size and position for all panes + * + * @see _initElements() + * @param {string} pane The pane to process + */ +, getPane = function (pane) { + var sel = options[pane].paneSelector + if (sel.substr(0,1)==="#") // ID selector + // NOTE: elements selected 'by ID' DO NOT have to be 'children' + return $N.find(sel).eq(0); + else { // class or other selector + var $P = $N.children(sel).eq(0); + // look for the pane nested inside a 'form' element + return $P.length ? $P : $N.children("form:first").children(sel).eq(0); + } + } + +, initPanes = function (evt) { + // stopPropagation if called by trigger("layoutinitpanes") - use evtPane utility + evtPane(evt); + + // NOTE: do north & south FIRST so we can measure their height - do center LAST + $.each(_c.allPanes, function (idx, pane) { + addPane( pane, true ); + }); + + // init the pane-handles NOW in case we have to hide or close the pane below + initHandles(); + + // now that all panes have been initialized and initially-sized, + // make sure there is really enough space available for each pane + $.each(_c.borderPanes, function (i, pane) { + if ($Ps[pane] && state[pane].isVisible) { // pane is OPEN + setSizeLimits(pane); + makePaneFit(pane); // pane may be Closed, Hidden or Resized by makePaneFit() + } + }); + // size center-pane AGAIN in case we 'closed' a border-pane in loop above + sizeMidPanes("center"); + + // Chrome/Webkit sometimes fires callbacks BEFORE it completes resizing! + // Before RC30.3, there was a 10ms delay here, but that caused layout + // to load asynchrously, which is BAD, so try skipping delay for now + + // process pane contents and callbacks, and init/resize child-layout if exists + $.each(_c.allPanes, function (i, pane) { + var o = options[pane]; + if ($Ps[pane]) { + if (state[pane].isVisible) { // pane is OPEN + sizeContent(pane); + // trigger pane.onResize if triggerEventsOnLoad = true + if (o.triggerEventsOnLoad) + _runCallbacks("onresize_end", pane); + else // automatic if onresize called, otherwise call it specifically + // resize child - IF inner-layout already exists (created before this layout) + resizeChildLayout(pane); + } + // init childLayout - even if pane is not visible + if (o.initChildLayout && o.childOptions) + createChildLayout(pane); + } + }); + } + + /** + * Add a pane to the layout - subroutine of initPanes() + * + * @see initPanes() + * @param {string} pane The pane to process + * @param {boolean=} [force=false] Size content after init + */ +, addPane = function (pane, force) { + if (!force && !isInitialized()) return; + var + o = options[pane] + , s = state[pane] + , c = _c[pane] + , fx = s.fx + , dir = c.dir + , spacing = o.spacing_open || 0 + , isCenter = (pane === "center") + , CSS = {} + , $P = $Ps[pane] + , size, minSize, maxSize + ; + // if pane-pointer already exists, remove the old one first + if ($P) + removePane( pane, false, true, false ); + else + $Cs[pane] = false; // init + + $P = $Ps[pane] = getPane(pane); + if (!$P.length) { + $Ps[pane] = false; // logic + return; + } + + // SAVE original Pane CSS + if (!$P.data("layoutCSS")) { + var props = "position,top,left,bottom,right,width,height,overflow,zIndex,display,backgroundColor,padding,margin,border"; + $P.data("layoutCSS", elCSS($P, props)); + } + + // create alias for pane data in Instance - initHandles will add more + Instance[pane] = { name: pane, pane: $Ps[pane], content: $Cs[pane], options: options[pane], state: state[pane], child: children[pane] }; + + // add classes, attributes & events + $P .data({ + parentLayout: Instance // pointer to Layout Instance + , layoutPane: Instance[pane] // NEW pointer to pane-alias-object + , layoutEdge: pane + , layoutRole: "pane" + }) + .css(c.cssReq).css("zIndex", options.zIndexes.pane_normal) + .css(o.applyDemoStyles ? c.cssDemo : {}) // demo styles + .addClass( o.paneClass +" "+ o.paneClass+"-"+pane ) // default = "ui-layout-pane ui-layout-pane-west" - may be a dupe of 'paneSelector' + .bind("mouseenter."+ sID, addHover ) + .bind("mouseleave."+ sID, removeHover ) + ; + var paneMethods = { + hide: '' + , show: '' + , toggle: '' + , close: '' + , open: '' + , slideOpen: '' + , slideClose: '' + , slideToggle: '' + , size: 'sizePane' + , sizePane: 'sizePane' + , sizeContent: '' + , sizeHandles: '' + , enableClosable: '' + , disableClosable: '' + , enableSlideable: '' + , disableSlideable: '' + , enableResizable: '' + , disableResizable: '' + , swapPanes: 'swapPanes' + , swap: 'swapPanes' + , move: 'swapPanes' + , removePane: 'removePane' + , remove: 'removePane' + , createChildLayout: '' + , resizeChildLayout: '' + , resizeAll: 'resizeAll' + , resizeLayout: 'resizeAll' + } + , name; + // loop hash and bind all methods - include layoutID namespacing + for (name in paneMethods) { + $P.bind("layoutpane"+ name.toLowerCase() +"."+ sID, Instance[ paneMethods[name] || name ]); + } + + // see if this pane has a 'scrolling-content element' + initContent(pane, false); // false = do NOT sizeContent() - called later + + if (!isCenter) { + // call _parseSize AFTER applying pane classes & styles - but before making visible (if hidden) + // if o.size is auto or not valid, then MEASURE the pane and use that as its 'size' + size = s.size = _parseSize(pane, o.size); + minSize = _parseSize(pane,o.minSize) || 1; + maxSize = _parseSize(pane,o.maxSize) || 100000; + if (size > 0) size = max(min(size, maxSize), minSize); + + // state for border-panes + s.isClosed = false; // true = pane is closed + s.isSliding = false; // true = pane is currently open by 'sliding' over adjacent panes + s.isResizing= false; // true = pane is in process of being resized + s.isHidden = false; // true = pane is hidden - no spacing, resizer or toggler is visible! + + // array for 'pin buttons' whose classNames are auto-updated on pane-open/-close + if (!s.pins) s.pins = []; + } + // states common to ALL panes + s.tagName = $P[0].tagName; + s.edge = pane; // useful if pane is (or about to be) 'swapped' - easy find out where it is (or is going) + s.noRoom = false; // true = pane 'automatically' hidden due to insufficient room - will unhide automatically + s.isVisible = true; // false = pane is invisible - closed OR hidden - simplify logic + + // set css-position to account for container borders & padding + switch (pane) { + case "north": CSS.top = sC.insetTop; + CSS.left = sC.insetLeft; + CSS.right = sC.insetRight; + break; + case "south": CSS.bottom = sC.insetBottom; + CSS.left = sC.insetLeft; + CSS.right = sC.insetRight; + break; + case "west": CSS.left = sC.insetLeft; // top, bottom & height set by sizeMidPanes() + break; + case "east": CSS.right = sC.insetRight; // ditto + break; + case "center": // top, left, width & height set by sizeMidPanes() + } + + if (dir === "horz") // north or south pane + CSS.height = cssH($P, size); + else if (dir === "vert") // east or west pane + CSS.width = cssW($P, size); + //else if (isCenter) {} + + $P.css(CSS); // apply size -- top, bottom & height will be set by sizeMidPanes + if (dir != "horz") sizeMidPanes(pane, true); // true = skipCallback + + // close or hide the pane if specified in settings + if (o.initClosed && o.closable && !o.initHidden) + close(pane, true, true); // true, true = force, noAnimation + else if (o.initHidden || o.initClosed) + hide(pane); // will be completely invisible - no resizer or spacing + else if (!s.noRoom) + // make the pane visible - in case was initially hidden + $P.css("display","block"); + // ELSE setAsOpen() - called later by initHandles() + + // RESET visibility now - pane will appear IF display:block + $P.css("visibility","visible"); + + // check option for auto-handling of pop-ups & drop-downs + if (o.showOverflowOnHover) + $P.hover( allowOverflow, resetOverflow ); + + // if manually adding a pane AFTER layout initialization, then... + if (state.initialized) { + initHandles( pane ); + initHotkeys( pane ); + resizeAll(); // will sizeContent if pane is visible + if (s.isVisible) { // pane is OPEN + if (o.triggerEventsOnLoad) + _runCallbacks("onresize_end", pane); + else // automatic if onresize called, otherwise call it specifically + // resize child - IF inner-layout already exists (created before this layout) + resizeChildLayout(pane); // a previously existing childLayout + } + if (o.initChildLayout && o.childOptions) + createChildLayout(pane); + } + } + + /** + * Initialize module objects, styling, size and position for all resize bars and toggler buttons + * + * @see _create() + * @param {string=} [panes=""] The edge(s) to process + */ +, initHandles = function (panes) { + panes = panes ? panes.split(",") : _c.borderPanes; + + // create toggler DIVs for each pane, and set object pointers for them, eg: $R.north = north toggler DIV + $.each(panes, function (i, pane) { + var $P = $Ps[pane]; + $Rs[pane] = false; // INIT + $Ts[pane] = false; + if (!$P) return; // pane does not exist - skip + + var + o = options[pane] + , s = state[pane] + , c = _c[pane] + , paneId = o.paneSelector.substr(0,1) === "#" ? o.paneSelector.substr(1) : "" + , rClass = o.resizerClass + , tClass = o.togglerClass + , side = c.side.toLowerCase() + , spacing = (s.isVisible ? o.spacing_open : o.spacing_closed) + , _pane = "-"+ pane // used for classNames + , _state = (s.isVisible ? "-open" : "-closed") // used for classNames + , I = Instance[pane] + // INIT RESIZER BAR + , $R = I.resizer = $Rs[pane] = $("
                ") + // INIT TOGGLER BUTTON + , $T = I.toggler = (o.closable ? $Ts[pane] = $("
                ") : false) + ; + + //if (s.isVisible && o.resizable) ... handled by initResizable + if (!s.isVisible && o.slidable) + $R.attr("title", o.tips.Slide).css("cursor", o.sliderCursor); + + $R // if paneSelector is an ID, then create a matching ID for the resizer, eg: "#paneLeft" => "paneLeft-resizer" + .attr("id", paneId ? paneId +"-resizer" : "" ) + .data({ + parentLayout: Instance + , layoutPane: Instance[pane] // NEW pointer to pane-alias-object + , layoutEdge: pane + , layoutRole: "resizer" + }) + .css(_c.resizers.cssReq).css("zIndex", options.zIndexes.resizer_normal) + .css(o.applyDemoStyles ? _c.resizers.cssDemo : {}) // add demo styles + .addClass(rClass +" "+ rClass+_pane) + .hover(addHover, removeHover) // ALWAYS add hover-classes, even if resizing is not enabled - handle with CSS instead + .hover(onResizerEnter, onResizerLeave) // ALWAYS NEED resizer.mouseleave to balance toggler.mouseenter + .appendTo($N) // append DIV to container + ; + + if ($T) { + $T // if paneSelector is an ID, then create a matching ID for the resizer, eg: "#paneLeft" => "#paneLeft-toggler" + .attr("id", paneId ? paneId +"-toggler" : "" ) + .data({ + parentLayout: Instance + , layoutPane: Instance[pane] // NEW pointer to pane-alias-object + , layoutEdge: pane + , layoutRole: "toggler" + }) + .css(_c.togglers.cssReq) // add base/required styles + .css(o.applyDemoStyles ? _c.togglers.cssDemo : {}) // add demo styles + .addClass(tClass +" "+ tClass+_pane) + .hover(addHover, removeHover) // ALWAYS add hover-classes, even if toggling is not enabled - handle with CSS instead + .bind("mouseenter", onResizerEnter) // NEED toggler.mouseenter because mouseenter MAY NOT fire on resizer + .appendTo($R) // append SPAN to resizer DIV + ; + // ADD INNER-SPANS TO TOGGLER + if (o.togglerContent_open) // ui-layout-open + $(""+ o.togglerContent_open +"") + .data({ + layoutEdge: pane + , layoutRole: "togglerContent" + }) + .data("layoutRole", "togglerContent") + .data("layoutEdge", pane) + .addClass("content content-open") + .css("display","none") + .appendTo( $T ) + //.hover( addHover, removeHover ) // use ui-layout-toggler-west-hover .content-open instead! + ; + if (o.togglerContent_closed) // ui-layout-closed + $(""+ o.togglerContent_closed +"") + .data({ + layoutEdge: pane + , layoutRole: "togglerContent" + }) + .addClass("content content-closed") + .css("display","none") + .appendTo( $T ) + //.hover( addHover, removeHover ) // use ui-layout-toggler-west-hover .content-closed instead! + ; + // ADD TOGGLER.click/.hover + enableClosable(pane); + } + + // add Draggable events + initResizable(pane); + + // ADD CLASSNAMES & SLIDE-BINDINGS - eg: class="resizer resizer-west resizer-open" + if (s.isVisible) + setAsOpen(pane); // onOpen will be called, but NOT onResize + else { + setAsClosed(pane); // onClose will be called + bindStartSlidingEvent(pane, true); // will enable events IF option is set + } + + }); + + // SET ALL HANDLE DIMENSIONS + sizeHandles(); + } + + + /** + * Initialize scrolling ui-layout-content div - if exists + * + * @see initPane() - or externally after an Ajax injection + * @param {string} [pane] The pane to process + * @param {boolean=} [resize=true] Size content after init + */ +, initContent = function (pane, resize) { + if (!isInitialized()) return; + var + o = options[pane] + , sel = o.contentSelector + , I = Instance[pane] + , $P = $Ps[pane] + , $C + ; + if (sel) $C = I.content = $Cs[pane] = (o.findNestedContent) + ? $P.find(sel).eq(0) // match 1-element only + : $P.children(sel).eq(0) + ; + if ($C && $C.length) { + $C.data("layoutRole", "content"); + // SAVE original Pane CSS + if (!$C.data("layoutCSS")) + $C.data("layoutCSS", elCSS($C, "height")); + $C.css( _c.content.cssReq ); + if (o.applyDemoStyles) { + $C.css( _c.content.cssDemo ); // add padding & overflow: auto to content-div + $P.css( _c.content.cssDemoPane ); // REMOVE padding/scrolling from pane + } + state[pane].content = {}; // init content state + if (resize !== false) sizeContent(pane); + // sizeContent() is called AFTER init of all elements + } + else + I.content = $Cs[pane] = false; + } + + + /** + * Add resize-bars to all panes that specify it in options + * -dependancy: $.fn.resizable - will skip if not found + * + * @see _create() + * @param {string=} [panes=""] The edge(s) to process + */ +, initResizable = function (panes) { + var draggingAvailable = $.layout.plugins.draggable + , side // set in start() + ; + panes = panes ? panes.split(",") : _c.borderPanes; + + $.each(panes, function (idx, pane) { + var o = options[pane]; + if (!draggingAvailable || !$Ps[pane] || !o.resizable) { + o.resizable = false; + return true; // skip to next + } + + var s = state[pane] + , z = options.zIndexes + , c = _c[pane] + , side = c.dir=="horz" ? "top" : "left" + , opEdge = _c.oppositeEdge[pane] + , masks = pane +",center,"+ opEdge + (c.dir=="horz" ? ",west,east" : "") + , $P = $Ps[pane] + , $R = $Rs[pane] + , base = o.resizerClass + , lastPos = 0 // used when live-resizing + , r, live // set in start because may change + // 'drag' classes are applied to the ORIGINAL resizer-bar while dragging is in process + , resizerClass = base+"-drag" // resizer-drag + , resizerPaneClass = base+"-"+pane+"-drag" // resizer-north-drag + // 'helper' class is applied to the CLONED resizer-bar while it is being dragged + , helperClass = base+"-dragging" // resizer-dragging + , helperPaneClass = base+"-"+pane+"-dragging" // resizer-north-dragging + , helperLimitClass = base+"-dragging-limit" // resizer-drag + , helperPaneLimitClass = base+"-"+pane+"-dragging-limit" // resizer-north-drag + , helperClassesSet = false // logic var + ; + + if (!s.isClosed) + $R.attr("title", o.tips.Resize) + .css("cursor", o.resizerCursor); // n-resize, s-resize, etc + + $R.draggable({ + containment: $N[0] // limit resizing to layout container + , axis: (c.dir=="horz" ? "y" : "x") // limit resizing to horz or vert axis + , delay: 0 + , distance: 1 + , grid: o.resizingGrid + // basic format for helper - style it using class: .ui-draggable-dragging + , helper: "clone" + , opacity: o.resizerDragOpacity + , addClasses: false // avoid ui-state-disabled class when disabled + //, iframeFix: o.draggableIframeFix // TODO: consider using when bug is fixed + , zIndex: z.resizer_drag + + , start: function (e, ui) { + // REFRESH options & state pointers in case we used swapPanes + o = options[pane]; + s = state[pane]; + // re-read options + live = o.livePaneResizing; + + // ondrag_start callback - will CANCEL hide if returns false + // TODO: dragging CANNOT be cancelled like this, so see if there is a way? + if (false === _runCallbacks("ondrag_start", pane)) return false; + + s.isResizing = true; // prevent pane from closing while resizing + timer.clear(pane+"_closeSlider"); // just in case already triggered + + // SET RESIZER LIMITS - used in drag() + setSizeLimits(pane); // update pane/resizer state + r = s.resizerPosition; + lastPos = ui.position[ side ] + + $R.addClass( resizerClass +" "+ resizerPaneClass ); // add drag classes + helperClassesSet = false; // reset logic var - see drag() + + // DISABLE TEXT SELECTION (probably already done by resizer.mouseOver) + $('body').disableSelection(); + + // MASK PANES CONTAINING IFRAMES, APPLETS OR OTHER TROUBLESOME ELEMENTS + showMasks( masks ); + } + + , drag: function (e, ui) { + if (!helperClassesSet) { // can only add classes after clone has been added to the DOM + //$(".ui-draggable-dragging") + ui.helper + .addClass( helperClass +" "+ helperPaneClass ) // add helper classes + .css({ right: "auto", bottom: "auto" }) // fix dir="rtl" issue + .children().css("visibility","hidden") // hide toggler inside dragged resizer-bar + ; + helperClassesSet = true; + // draggable bug!? RE-SET zIndex to prevent E/W resize-bar showing through N/S pane! + if (s.isSliding) $Ps[pane].css("zIndex", z.pane_sliding); + } + // CONTAIN RESIZER-BAR TO RESIZING LIMITS + var limit = 0; + if (ui.position[side] < r.min) { + ui.position[side] = r.min; + limit = -1; + } + else if (ui.position[side] > r.max) { + ui.position[side] = r.max; + limit = 1; + } + // ADD/REMOVE dragging-limit CLASS + if (limit) { + ui.helper.addClass( helperLimitClass +" "+ helperPaneLimitClass ); // at dragging-limit + window.defaultStatus = (limit>0 && pane.match(/(north|west)/)) || (limit<0 && pane.match(/(south|east)/)) ? o.tips.maxSizeWarning : o.tips.minSizeWarning; + } + else { + ui.helper.removeClass( helperLimitClass +" "+ helperPaneLimitClass ); // not at dragging-limit + window.defaultStatus = ""; + } + // DYNAMICALLY RESIZE PANES IF OPTION ENABLED + // won't trigger unless resizer has actually moved! + if (live && Math.abs(ui.position[side] - lastPos) >= o.liveResizingTolerance) { + lastPos = ui.position[side]; + resizePanes(e, ui, pane) + } + } + + , stop: function (e, ui) { + $('body').enableSelection(); // RE-ENABLE TEXT SELECTION + window.defaultStatus = ""; // clear 'resizing limit' message from statusbar + $R.removeClass( resizerClass +" "+ resizerPaneClass ); // remove drag classes from Resizer + s.isResizing = false; + resizePanes(e, ui, pane, true, masks); // true = resizingDone + } + + }); + }); + + /** + * resizePanes + * + * Sub-routine called from stop() - and drag() if livePaneResizing + * + * @param {!Object} evt + * @param {!Object} ui + * @param {string} pane + * @param {boolean=} [resizingDone=false] + */ + var resizePanes = function (evt, ui, pane, resizingDone, masks) { + var dragPos = ui.position + , c = _c[pane] + , o = options[pane] + , s = state[pane] + , resizerPos + ; + switch (pane) { + case "north": resizerPos = dragPos.top; break; + case "west": resizerPos = dragPos.left; break; + case "south": resizerPos = sC.offsetHeight - dragPos.top - o.spacing_open; break; + case "east": resizerPos = sC.offsetWidth - dragPos.left - o.spacing_open; break; + }; + // remove container margin from resizer position to get the pane size + var newSize = resizerPos - sC["inset"+ c.side]; + + // Disable OR Resize Mask(s) created in drag.start + if (!resizingDone) { + // ensure we meet liveResizingTolerance criteria + if (Math.abs(newSize - s.size) < o.liveResizingTolerance) + return; // SKIP resize this time + // resize the pane + manualSizePane(pane, newSize, false, true); // true = noAnimation + sizeMasks(); // resize all visible masks + } + else { // resizingDone + // ondrag_end callback + if (false !== _runCallbacks("ondrag_end", pane)) + manualSizePane(pane, newSize, false, true); // true = noAnimation + hideMasks(); // hide all masks, which include panes with 'content/iframe-masks' + if (s.isSliding && masks) // RE-SHOW only 'object-masks' so objects won't show through sliding pane + showMasks( masks, true ); // true = onlyForObjects + } + }; + } + + /** + * sizeMask + * + * Needed to overlay a DIV over an IFRAME-pane because mask CANNOT be *inside* the pane + * Called when mask created, and during livePaneResizing + */ +, sizeMask = function () { + var $M = $(this) + , pane = $M.data("layoutMask") // eg: "west" + , s = state[pane] + ; + // only masks over an IFRAME-pane need manual resizing + if (s.tagName == "IFRAME" && s.isVisible) // no need to mask closed/hidden panes + $M.css({ + top: s.offsetTop + , left: s.offsetLeft + , width: s.outerWidth + , height: s.outerHeight + }); + /* ALT Method... + var $P = $Ps[pane]; + $M.css( $P.position() ).css({ width: $P[0].offsetWidth, height: $P[0].offsetHeight }); + */ + } +, sizeMasks = function () { + $Ms.each( sizeMask ); // resize all 'visible' masks + } + +, showMasks = function (panes, onlyForObjects) { + var a = panes ? panes.split(",") : $.layout.config.allPanes + , z = options.zIndexes + , o, s; + $.each(a, function(i,p){ + s = state[p]; + o = options[p]; + if (s.isVisible && ( (!onlyForObjects && o.maskContents) || o.maskObjects )) { + getMasks(p).each(function(){ + sizeMask.call(this); + this.style.zIndex = s.isSliding ? z.pane_sliding+1 : z.pane_normal+1 + this.style.display = "block"; + }); + } + }); + } + +, hideMasks = function () { + // ensure no pane is resizing - could be a timing issue + var skip; + $.each( $.layout.config.borderPanes, function(i,p){ + if (state[p].isResizing) { + skip = true; + return false; // BREAK + } + }); + if (!skip) + $Ms.hide(); // hide ALL masks + } + +, getMasks = function (pane) { + var $Masks = $([]) + , $M, i = 0, c = $Ms.length + ; + for (; i CSS + if (sC.tagName === "BODY" && ($N = $("html")).data(css)) // RESET CSS + $N.css( $N.data(css) ).removeData(css); + + // trigger plugins for this layout, if there are any + runPluginCallbacks( Instance, $.layout.onDestroy ); + + // trigger state-management and onunload callback + unload(); + + // clear the Instance of everything except for container & options (so could recreate) + // RE-CREATE: myLayout = myLayout.container.layout( myLayout.options ); + for (n in Instance) + if (!n.match(/^(container|options)$/)) delete Instance[ n ]; + // add a 'destroyed' flag to make it easy to check + Instance.destroyed = true; + + // if this is a child layout, CLEAR the child-pointer in the parent + /* for now the pointer REMAINS, but with only container, options and destroyed keys + if (parentPane) { + var layout = parentPane.pane.data("parentLayout"); + parentPane.child = layout.children[ parentPane.name ] = null; + } + */ + + return Instance; // for coding convenience + } + + /** + * Remove a pane from the layout - subroutine of destroy() + * + * @see destroy() + * @param {string|Object} evt_or_pane The pane to process + * @param {boolean=} [remove=false] Remove the DOM element? + * @param {boolean=} [skipResize=false] Skip calling resizeAll()? + * @param {boolean=} [destroyChild=true] Destroy Child-layouts? If not passed, obeys options setting + */ +, removePane = function (evt_or_pane, remove, skipResize, destroyChild) { + if (!isInitialized()) return; + var pane = evtPane.call(this, evt_or_pane) + , $P = $Ps[pane] + , $C = $Cs[pane] + , $R = $Rs[pane] + , $T = $Ts[pane] + ; + // NOTE: elements can still exist even after remove() + // so check for missing data(), which is cleared by removed() + if ($P && $.isEmptyObject( $P.data() )) $P = false; + if ($C && $.isEmptyObject( $C.data() )) $C = false; + if ($R && $.isEmptyObject( $R.data() )) $R = false; + if ($T && $.isEmptyObject( $T.data() )) $T = false; + + if ($P) $P.stop(true, true); + + // check for a child layout + var o = options[pane] + , s = state[pane] + , d = "layout" + , css = "layoutCSS" + , child = children[pane] || ($P ? $P.data(d) : 0) || ($C ? $C.data(d) : 0) || null + , destroy = destroyChild !== undefined ? destroyChild : o.destroyChildLayout + ; + + // FIRST destroy the child-layout(s) + if (destroy && child && !child.destroyed) { + child.destroy(true); // tell child-layout to destroy ALL its child-layouts too + if (child.destroyed) // destroy was successful + child = null; // clear pointer for logic below + } + + if ($P && remove && !child) + $P.remove(); + else if ($P && $P[0]) { + // create list of ALL pane-classes that need to be removed + var root = o.paneClass // default="ui-layout-pane" + , pRoot = root +"-"+ pane // eg: "ui-layout-pane-west" + , _open = "-open" + , _sliding= "-sliding" + , _closed = "-closed" + , classes = [ root, root+_open, root+_closed, root+_sliding, // generic classes + pRoot, pRoot+_open, pRoot+_closed, pRoot+_sliding ] // pane-specific classes + ; + $.merge(classes, getHoverClasses($P, true)); // ADD hover-classes + // remove all Layout classes from pane-element + $P .removeClass( classes.join(" ") ) // remove ALL pane-classes + .removeData("parentLayout") + .removeData("layoutPane") + .removeData("layoutRole") + .removeData("layoutEdge") + .removeData("autoHidden") // in case set + .unbind("."+ sID) // remove ALL Layout events + // TODO: remove these extra unbind commands when jQuery is fixed + //.unbind("mouseenter"+ sID) + //.unbind("mouseleave"+ sID) + ; + // do NOT reset CSS if this pane/content is STILL the container of a nested layout! + // the nested layout will reset its 'container' CSS when/if it is destroyed + if ($C && $C.data(d)) { + // a content-div may not have a specific width, so give it one to contain the Layout + $C.width( $C.width() ); + child.resizeAll(); // now resize the Layout + } + else if ($C) + $C.css( $C.data(css) ).removeData(css).removeData("layoutRole"); + // remove pane AFTER content in case there was a nested layout + if (!$P.data(d)) + $P.css( $P.data(css) ).removeData(css); + } + + // REMOVE pane resizer and toggler elements + if ($T) $T.remove(); + if ($R) $R.remove(); + + // CLEAR all pointers and state data + Instance[pane] = $Ps[pane] = $Cs[pane] = $Rs[pane] = $Ts[pane] = children[pane] = false; + s = { removed: true }; + + if (!skipResize) + resizeAll(); + } + + +/* + * ########################### + * ACTION METHODS + * ########################### + */ + +, _hidePane = function (pane) { + var $P = $Ps[pane] + , o = options[pane] + , s = $P[0].style + ; + if (o.useOffscreenClose) { + if (!$P.data(_c.offscreenReset)) + $P.data(_c.offscreenReset, { left: s.left, right: s.right }); + $P.css( _c.offscreenCSS ); + } + else + $P.hide().removeData(_c.offscreenReset); + } + +, _showPane = function (pane) { + var $P = $Ps[pane] + , o = options[pane] + , off = _c.offscreenCSS + , old = $P.data(_c.offscreenReset) + , s = $P[0].style + ; + $P .show() // ALWAYS show, just in case + .removeData(_c.offscreenReset); + if (o.useOffscreenClose && old) { + if (s.left == off.left) + s.left = old.left; + if (s.right == off.right) + s.right = old.right; + } + } + + + /** + * Completely 'hides' a pane, including its spacing - as if it does not exist + * The pane is not actually 'removed' from the source, so can use 'show' to un-hide it + * + * @param {string|Object} evt_or_pane The pane being hidden, ie: north, south, east, or west + * @param {boolean=} [noAnimation=false] + */ +, hide = function (evt_or_pane, noAnimation) { + if (!isInitialized()) return; + var pane = evtPane.call(this, evt_or_pane) + , o = options[pane] + , s = state[pane] + , $P = $Ps[pane] + , $R = $Rs[pane] + ; + if (!$P || s.isHidden) return; // pane does not exist OR is already hidden + + // onhide_start callback - will CANCEL hide if returns false + if (state.initialized && false === _runCallbacks("onhide_start", pane)) return; + + s.isSliding = false; // just in case + + // now hide the elements + if ($R) $R.hide(); // hide resizer-bar + if (!state.initialized || s.isClosed) { + s.isClosed = true; // to trigger open-animation on show() + s.isHidden = true; + s.isVisible = false; + if (!state.initialized) + _hidePane(pane); // no animation when loading page + sizeMidPanes(_c[pane].dir === "horz" ? "" : "center"); + if (state.initialized || o.triggerEventsOnLoad) + _runCallbacks("onhide_end", pane); + } + else { + s.isHiding = true; // used by onclose + close(pane, false, noAnimation); // adjust all panes to fit + } + } + + /** + * Show a hidden pane - show as 'closed' by default unless openPane = true + * + * @param {string|Object} evt_or_pane The pane being opened, ie: north, south, east, or west + * @param {boolean=} [openPane=false] + * @param {boolean=} [noAnimation=false] + * @param {boolean=} [noAlert=false] + */ +, show = function (evt_or_pane, openPane, noAnimation, noAlert) { + if (!isInitialized()) return; + var pane = evtPane.call(this, evt_or_pane) + , o = options[pane] + , s = state[pane] + , $P = $Ps[pane] + , $R = $Rs[pane] + ; + if (!$P || !s.isHidden) return; // pane does not exist OR is not hidden + + // onshow_start callback - will CANCEL show if returns false + if (false === _runCallbacks("onshow_start", pane)) return; + + s.isSliding = false; // just in case + s.isShowing = true; // used by onopen/onclose + //s.isHidden = false; - will be set by open/close - if not cancelled + + // now show the elements + //if ($R) $R.show(); - will be shown by open/close + if (openPane === false) + close(pane, true); // true = force + else + open(pane, false, noAnimation, noAlert); // adjust all panes to fit + } + + + /** + * Toggles a pane open/closed by calling either open or close + * + * @param {string|Object} evt_or_pane The pane being toggled, ie: north, south, east, or west + * @param {boolean=} [slide=false] + */ +, toggle = function (evt_or_pane, slide) { + if (!isInitialized()) return; + var evt = evtObj(evt_or_pane) + , pane = evtPane.call(this, evt_or_pane) + , s = state[pane] + ; + if (evt) // called from to $R.dblclick OR triggerPaneEvent + evt.stopImmediatePropagation(); + if (s.isHidden) + show(pane); // will call 'open' after unhiding it + else if (s.isClosed) + open(pane, !!slide); + else + close(pane); + } + + + /** + * Utility method used during init or other auto-processes + * + * @param {string} pane The pane being closed + * @param {boolean=} [setHandles=false] + */ +, _closePane = function (pane, setHandles) { + var + $P = $Ps[pane] + , s = state[pane] + ; + _hidePane(pane); + s.isClosed = true; + s.isVisible = false; + // UNUSED: if (setHandles) setAsClosed(pane, true); // true = force + } + + /** + * Close the specified pane (animation optional), and resize all other panes as needed + * + * @param {string|Object} evt_or_pane The pane being closed, ie: north, south, east, or west + * @param {boolean=} [force=false] + * @param {boolean=} [noAnimation=false] + * @param {boolean=} [skipCallback=false] + */ +, close = function (evt_or_pane, force, noAnimation, skipCallback) { + var pane = evtPane.call(this, evt_or_pane); + // if pane has been initialized, but NOT the complete layout, close pane instantly + if (!state.initialized && $Ps[pane]) { + _closePane(pane); // INIT pane as closed + return; + } + if (!isInitialized()) return; + + var + $P = $Ps[pane] + , $R = $Rs[pane] + , $T = $Ts[pane] + , o = options[pane] + , s = state[pane] + , c = _c[pane] + , doFX, isShowing, isHiding, wasSliding; + + // QUEUE in case another action/animation is in progress + $N.queue(function( queueNext ){ + + if ( !$P + || (!o.closable && !s.isShowing && !s.isHiding) // invalid request // (!o.resizable && !o.closable) ??? + || (!force && s.isClosed && !s.isShowing) // already closed + ) return queueNext(); + + // onclose_start callback - will CANCEL hide if returns false + // SKIP if just 'showing' a hidden pane as 'closed' + var abort = !s.isShowing && false === _runCallbacks("onclose_start", pane); + + // transfer logic vars to temp vars + isShowing = s.isShowing; + isHiding = s.isHiding; + wasSliding = s.isSliding; + // now clear the logic vars (REQUIRED before aborting) + delete s.isShowing; + delete s.isHiding; + + if (abort) return queueNext(); + + doFX = !noAnimation && !s.isClosed && (o.fxName_close != "none"); + s.isMoving = true; + s.isClosed = true; + s.isVisible = false; + // update isHidden BEFORE sizing panes + if (isHiding) s.isHidden = true; + else if (isShowing) s.isHidden = false; + + if (s.isSliding) // pane is being closed, so UNBIND trigger events + bindStopSlidingEvents(pane, false); // will set isSliding=false + else // resize panes adjacent to this one + sizeMidPanes(_c[pane].dir === "horz" ? "" : "center", false); // false = NOT skipCallback + + // if this pane has a resizer bar, move it NOW - before animation + setAsClosed(pane); + + // CLOSE THE PANE + if (doFX) { // animate the close + // mask panes with objects + var masks = "center"+ (c.dir=="horz" ? ",west,east" : ""); + showMasks( masks, true ); // true = ONLY mask panes with maskObjects=true + lockPaneForFX(pane, true); // need to set left/top so animation will work + $P.hide( o.fxName_close, o.fxSettings_close, o.fxSpeed_close, function () { + lockPaneForFX(pane, false); // undo + if (s.isClosed) close_2(); + queueNext(); + }); + } + else { // hide the pane without animation + _hidePane(pane); + close_2(); + queueNext(); + }; + }); + + // SUBROUTINE + function close_2 () { + s.isMoving = false; + bindStartSlidingEvent(pane, true); // will enable if o.slidable = true + + // if opposite-pane was autoClosed, see if it can be autoOpened now + var altPane = _c.oppositeEdge[pane]; + if (state[ altPane ].noRoom) { + setSizeLimits( altPane ); + makePaneFit( altPane ); + } + + // hide any masks shown while closing + hideMasks(); + + if (!skipCallback && (state.initialized || o.triggerEventsOnLoad)) { + // onclose callback - UNLESS just 'showing' a hidden pane as 'closed' + if (!isShowing) _runCallbacks("onclose_end", pane); + // onhide OR onshow callback + if (isShowing) _runCallbacks("onshow_end", pane); + if (isHiding) _runCallbacks("onhide_end", pane); + } + } + } + + /** + * @param {string} pane The pane just closed, ie: north, south, east, or west + */ +, setAsClosed = function (pane) { + var + $P = $Ps[pane] + , $R = $Rs[pane] + , $T = $Ts[pane] + , o = options[pane] + , s = state[pane] + , side = _c[pane].side.toLowerCase() + , inset = "inset"+ _c[pane].side + , rClass = o.resizerClass + , tClass = o.togglerClass + , _pane = "-"+ pane // used for classNames + , _open = "-open" + , _sliding= "-sliding" + , _closed = "-closed" + ; + $R + .css(side, sC[inset]) // move the resizer + .removeClass( rClass+_open +" "+ rClass+_pane+_open ) + .removeClass( rClass+_sliding +" "+ rClass+_pane+_sliding ) + .addClass( rClass+_closed +" "+ rClass+_pane+_closed ) + .unbind("dblclick."+ sID) + ; + // DISABLE 'resizing' when closed - do this BEFORE bindStartSlidingEvent? + if (o.resizable && $.layout.plugins.draggable) + $R + .draggable("disable") + .removeClass("ui-state-disabled") // do NOT apply disabled styling - not suitable here + .css("cursor", "default") + .attr("title","") + ; + + // if pane has a toggler button, adjust that too + if ($T) { + $T + .removeClass( tClass+_open +" "+ tClass+_pane+_open ) + .addClass( tClass+_closed +" "+ tClass+_pane+_closed ) + .attr("title", o.tips.Open) // may be blank + ; + // toggler-content - if exists + $T.children(".content-open").hide(); + $T.children(".content-closed").css("display","block"); + } + + // sync any 'pin buttons' + syncPinBtns(pane, false); + + if (state.initialized) { + // resize 'length' and position togglers for adjacent panes + sizeHandles(); + } + } + + /** + * Open the specified pane (animation optional), and resize all other panes as needed + * + * @param {string|Object} evt_or_pane The pane being opened, ie: north, south, east, or west + * @param {boolean=} [slide=false] + * @param {boolean=} [noAnimation=false] + * @param {boolean=} [noAlert=false] + */ +, open = function (evt_or_pane, slide, noAnimation, noAlert) { + if (!isInitialized()) return; + var pane = evtPane.call(this, evt_or_pane) + , $P = $Ps[pane] + , $R = $Rs[pane] + , $T = $Ts[pane] + , o = options[pane] + , s = state[pane] + , c = _c[pane] + , doFX, isShowing + ; + // QUEUE in case another action/animation is in progress + $N.queue(function( queueNext ){ + + if ( !$P + || (!o.resizable && !o.closable && !s.isShowing) // invalid request + || (s.isVisible && !s.isSliding) // already open + ) return queueNext(); + + // pane can ALSO be unhidden by just calling show(), so handle this scenario + if (s.isHidden && !s.isShowing) { + queueNext(); // call before show() because it needs the queue free + show(pane, true); + return; + } + + if (o.autoResize && s.size != o.size) // resize pane to original size set in options + sizePane(pane, o.size, true, true, true); // true=skipCallback/forceResize/noAnimation + else + // make sure there is enough space available to open the pane + setSizeLimits(pane, slide); + + // onopen_start callback - will CANCEL open if returns false + var cbReturn = _runCallbacks("onopen_start", pane); + + if (cbReturn === "abort") + return queueNext(); + + // update pane-state again in case options were changed in onopen_start + if (cbReturn !== "NC") // NC = "No Callback" + setSizeLimits(pane, slide); + + if (s.minSize > s.maxSize) { // INSUFFICIENT ROOM FOR PANE TO OPEN! + syncPinBtns(pane, false); // make sure pin-buttons are reset + if (!noAlert && o.tips.noRoomToOpen) + alert(o.tips.noRoomToOpen); + return queueNext(); // ABORT + } + + if (slide) // START Sliding - will set isSliding=true + bindStopSlidingEvents(pane, true); // BIND trigger events to close sliding-pane + else if (s.isSliding) // PIN PANE (stop sliding) - open pane 'normally' instead + bindStopSlidingEvents(pane, false); // UNBIND trigger events - will set isSliding=false + else if (o.slidable) + bindStartSlidingEvent(pane, false); // UNBIND trigger events + + s.noRoom = false; // will be reset by makePaneFit if 'noRoom' + makePaneFit(pane); + + // transfer logic var to temp var + isShowing = s.isShowing; + // now clear the logic var + delete s.isShowing; + + doFX = !noAnimation && s.isClosed && (o.fxName_open != "none"); + s.isMoving = true; + s.isVisible = true; + s.isClosed = false; + // update isHidden BEFORE sizing panes - WHY??? Old? + if (isShowing) s.isHidden = false; + + if (doFX) { // ANIMATE + // mask panes with objects + var masks = "center"+ (c.dir=="horz" ? ",west,east" : ""); + if (s.isSliding) masks += ","+ _c.oppositeEdge[pane]; + showMasks( masks, true ); // true = ONLY mask panes with maskObjects=true + lockPaneForFX(pane, true); // need to set left/top so animation will work + $P.show( o.fxName_open, o.fxSettings_open, o.fxSpeed_open, function() { + lockPaneForFX(pane, false); // undo + if (s.isVisible) open_2(); // continue + queueNext(); + }); + } + else { // no animation + _showPane(pane);// just show pane and... + open_2(); // continue + queueNext(); + }; + }); + + // SUBROUTINE + function open_2 () { + s.isMoving = false; + + // cure iframe display issues + _fixIframe(pane); + + // NOTE: if isSliding, then other panes are NOT 'resized' + if (!s.isSliding) { // resize all panes adjacent to this one + hideMasks(); // remove any masks shown while opening + sizeMidPanes(_c[pane].dir=="vert" ? "center" : "", false); // false = NOT skipCallback + } + + // set classes, position handles and execute callbacks... + setAsOpen(pane); + }; + + } + + /** + * @param {string} pane The pane just opened, ie: north, south, east, or west + * @param {boolean=} [skipCallback=false] + */ +, setAsOpen = function (pane, skipCallback) { + var + $P = $Ps[pane] + , $R = $Rs[pane] + , $T = $Ts[pane] + , o = options[pane] + , s = state[pane] + , side = _c[pane].side.toLowerCase() + , inset = "inset"+ _c[pane].side + , rClass = o.resizerClass + , tClass = o.togglerClass + , _pane = "-"+ pane // used for classNames + , _open = "-open" + , _closed = "-closed" + , _sliding= "-sliding" + ; + $R + .css(side, sC[inset] + getPaneSize(pane)) // move the resizer + .removeClass( rClass+_closed +" "+ rClass+_pane+_closed ) + .addClass( rClass+_open +" "+ rClass+_pane+_open ) + ; + if (s.isSliding) + $R.addClass( rClass+_sliding +" "+ rClass+_pane+_sliding ) + else // in case 'was sliding' + $R.removeClass( rClass+_sliding +" "+ rClass+_pane+_sliding ) + + if (o.resizerDblClickToggle) + $R.bind("dblclick", toggle ); + removeHover( 0, $R ); // remove hover classes + if (o.resizable && $.layout.plugins.draggable) + $R .draggable("enable") + .css("cursor", o.resizerCursor) + .attr("title", o.tips.Resize); + else if (!s.isSliding) + $R.css("cursor", "default"); // n-resize, s-resize, etc + + // if pane also has a toggler button, adjust that too + if ($T) { + $T .removeClass( tClass+_closed +" "+ tClass+_pane+_closed ) + .addClass( tClass+_open +" "+ tClass+_pane+_open ) + .attr("title", o.tips.Close); // may be blank + removeHover( 0, $T ); // remove hover classes + // toggler-content - if exists + $T.children(".content-closed").hide(); + $T.children(".content-open").css("display","block"); + } + + // sync any 'pin buttons' + syncPinBtns(pane, !s.isSliding); + + // update pane-state dimensions - BEFORE resizing content + $.extend(s, elDims($P)); + + if (state.initialized) { + // resize resizer & toggler sizes for all panes + sizeHandles(); + // resize content every time pane opens - to be sure + sizeContent(pane, true); // true = remeasure headers/footers, even if 'pane.isMoving' + } + + if (!skipCallback && (state.initialized || o.triggerEventsOnLoad) && $P.is(":visible")) { + // onopen callback + _runCallbacks("onopen_end", pane); + // onshow callback - TODO: should this be here? + if (s.isShowing) _runCallbacks("onshow_end", pane); + + // ALSO call onresize because layout-size *may* have changed while pane was closed + if (state.initialized) + _runCallbacks("onresize_end", pane); + } + + // TODO: Somehow sizePane("north") is being called after this point??? + } + + + /** + * slideOpen / slideClose / slideToggle + * + * Pass-though methods for sliding + */ +, slideOpen = function (evt_or_pane) { + if (!isInitialized()) return; + var evt = evtObj(evt_or_pane) + , pane = evtPane.call(this, evt_or_pane) + , s = state[pane] + , delay = options[pane].slideDelay_open + ; + // prevent event from triggering on NEW resizer binding created below + if (evt) evt.stopImmediatePropagation(); + + if (s.isClosed && evt && evt.type === "mouseenter" && delay > 0) + // trigger = mouseenter - use a delay + timer.set(pane+"_openSlider", open_NOW, delay); + else + open_NOW(); // will unbind events if is already open + + /** + * SUBROUTINE for timed open + */ + function open_NOW () { + if (!s.isClosed) // skip if no longer closed! + bindStopSlidingEvents(pane, true); // BIND trigger events to close sliding-pane + else if (!s.isMoving) + open(pane, true); // true = slide - open() will handle binding + }; + } + +, slideClose = function (evt_or_pane) { + if (!isInitialized()) return; + var evt = evtObj(evt_or_pane) + , pane = evtPane.call(this, evt_or_pane) + , o = options[pane] + , s = state[pane] + , delay = s.isMoving ? 1000 : 300 // MINIMUM delay - option may override + ; + if (s.isClosed || s.isResizing) + return; // skip if already closed OR in process of resizing + else if (o.slideTrigger_close === "click") + close_NOW(); // close immediately onClick + else if (o.preventQuickSlideClose && s.isMoving) + return; // handle Chrome quick-close on slide-open + else if (o.preventPrematureSlideClose && evt && $.layout.isMouseOverElem(evt, $Ps[pane])) + return; // handle incorrect mouseleave trigger, like when over a SELECT-list in IE + else if (evt) // trigger = mouseleave - use a delay + // 1 sec delay if 'opening', else .3 sec + timer.set(pane+"_closeSlider", close_NOW, max(o.slideDelay_close, delay)); + else // called programically + close_NOW(); + + /** + * SUBROUTINE for timed close + */ + function close_NOW () { + if (s.isClosed) // skip 'close' if already closed! + bindStopSlidingEvents(pane, false); // UNBIND trigger events - TODO: is this needed here? + else if (!s.isMoving) + close(pane); // close will handle unbinding + }; + } + + /** + * @param {string|Object} evt_or_pane The pane being opened, ie: north, south, east, or west + */ +, slideToggle = function (evt_or_pane) { + var pane = evtPane.call(this, evt_or_pane); + toggle(pane, true); + } + + + /** + * Must set left/top on East/South panes so animation will work properly + * + * @param {string} pane The pane to lock, 'east' or 'south' - any other is ignored! + * @param {boolean} doLock true = set left/top, false = remove + */ +, lockPaneForFX = function (pane, doLock) { + var $P = $Ps[pane] + , s = state[pane] + , o = options[pane] + , z = options.zIndexes + ; + if (doLock) { + $P.css({ zIndex: z.pane_animate }); // overlay all elements during animation + if (pane=="south") + $P.css({ top: sC.insetTop + sC.innerHeight - $P.outerHeight() }); + else if (pane=="east") + $P.css({ left: sC.insetLeft + sC.innerWidth - $P.outerWidth() }); + } + else { // animation DONE - RESET CSS + // TODO: see if this can be deleted. It causes a quick-close when sliding in Chrome + $P.css({ zIndex: (s.isSliding ? z.pane_sliding : z.pane_normal) }); + if (pane=="south") + $P.css({ top: "auto" }); + // if pane is positioned 'off-screen', then DO NOT screw with it! + else if (pane=="east" && !$P.css("left").match(/\-99999/)) + $P.css({ left: "auto" }); + // fix anti-aliasing in IE - only needed for animations that change opacity + if (browser.msie && o.fxOpacityFix && o.fxName_open != "slide" && $P.css("filter") && $P.css("opacity") == 1) + $P[0].style.removeAttribute('filter'); + } + } + + + /** + * Toggle sliding functionality of a specific pane on/off by adding removing 'slide open' trigger + * + * @see open(), close() + * @param {string} pane The pane to enable/disable, 'north', 'south', etc. + * @param {boolean} enable Enable or Disable sliding? + */ +, bindStartSlidingEvent = function (pane, enable) { + var o = options[pane] + , $P = $Ps[pane] + , $R = $Rs[pane] + , evtName = o.slideTrigger_open.toLowerCase() + ; + if (!$R || (enable && !o.slidable)) return; + + // make sure we have a valid event + if (evtName.match(/mouseover/)) + evtName = o.slideTrigger_open = "mouseenter"; + else if (!evtName.match(/(click|dblclick|mouseenter)/)) + evtName = o.slideTrigger_open = "click"; + + $R + // add or remove event + [enable ? "bind" : "unbind"](evtName +'.'+ sID, slideOpen) + // set the appropriate cursor & title/tip + .css("cursor", enable ? o.sliderCursor : "default") + .attr("title", enable ? o.tips.Slide : "") + ; + } + + /** + * Add or remove 'mouseleave' events to 'slide close' when pane is 'sliding' open or closed + * Also increases zIndex when pane is sliding open + * See bindStartSlidingEvent for code to control 'slide open' + * + * @see slideOpen(), slideClose() + * @param {string} pane The pane to process, 'north', 'south', etc. + * @param {boolean} enable Enable or Disable events? + */ +, bindStopSlidingEvents = function (pane, enable) { + var o = options[pane] + , s = state[pane] + , c = _c[pane] + , z = options.zIndexes + , evtName = o.slideTrigger_close.toLowerCase() + , action = (enable ? "bind" : "unbind") + , $P = $Ps[pane] + , $R = $Rs[pane] + ; + s.isSliding = enable; // logic + timer.clear(pane+"_closeSlider"); // just in case + + // remove 'slideOpen' event from resizer + // ALSO will raise the zIndex of the pane & resizer + if (enable) bindStartSlidingEvent(pane, false); + + // RE/SET zIndex - increases when pane is sliding-open, resets to normal when not + $P.css("zIndex", enable ? z.pane_sliding : z.pane_normal); + $R.css("zIndex", enable ? z.pane_sliding+2 : z.resizer_normal); // NOTE: mask = pane_sliding+1 + + // make sure we have a valid event + if (!evtName.match(/(click|mouseleave)/)) + evtName = o.slideTrigger_close = "mouseleave"; // also catches 'mouseout' + + // add/remove slide triggers + $R[action](evtName, slideClose); // base event on resize + // need extra events for mouseleave + if (evtName === "mouseleave") { + // also close on pane.mouseleave + $P[action]("mouseleave."+ sID, slideClose); + // cancel timer when mouse moves between 'pane' and 'resizer' + $R[action]("mouseenter."+ sID, cancelMouseOut); + $P[action]("mouseenter."+ sID, cancelMouseOut); + } + + if (!enable) + timer.clear(pane+"_closeSlider"); + else if (evtName === "click" && !o.resizable) { + // IF pane is not resizable (which already has a cursor and tip) + // then set the a cursor & title/tip on resizer when sliding + $R.css("cursor", enable ? o.sliderCursor : "default"); + $R.attr("title", enable ? o.tips.Close : ""); // use Toggler-tip, eg: "Close Pane" + } + + // SUBROUTINE for mouseleave timer clearing + function cancelMouseOut (evt) { + timer.clear(pane+"_closeSlider"); + evt.stopPropagation(); + } + } + + + /** + * Hides/closes a pane if there is insufficient room - reverses this when there is room again + * MUST have already called setSizeLimits() before calling this method + * + * @param {string} pane The pane being resized + * @param {boolean=} [isOpening=false] Called from onOpen? + * @param {boolean=} [skipCallback=false] Should the onresize callback be run? + * @param {boolean=} [force=false] + */ +, makePaneFit = function (pane, isOpening, skipCallback, force) { + var + o = options[pane] + , s = state[pane] + , c = _c[pane] + , $P = $Ps[pane] + , $R = $Rs[pane] + , isSidePane = c.dir==="vert" + , hasRoom = false + ; + // special handling for center & east/west panes + if (pane === "center" || (isSidePane && s.noVerticalRoom)) { + // see if there is enough room to display the pane + // ERROR: hasRoom = s.minHeight <= s.maxHeight && (isSidePane || s.minWidth <= s.maxWidth); + hasRoom = (s.maxHeight >= 0); + if (hasRoom && s.noRoom) { // previously hidden due to noRoom, so show now + _showPane(pane); + if ($R) $R.show(); + s.isVisible = true; + s.noRoom = false; + if (isSidePane) s.noVerticalRoom = false; + _fixIframe(pane); + } + else if (!hasRoom && !s.noRoom) { // not currently hidden, so hide now + _hidePane(pane); + if ($R) $R.hide(); + s.isVisible = false; + s.noRoom = true; + } + } + + // see if there is enough room to fit the border-pane + if (pane === "center") { + // ignore center in this block + } + else if (s.minSize <= s.maxSize) { // pane CAN fit + hasRoom = true; + if (s.size > s.maxSize) // pane is too big - shrink it + sizePane(pane, s.maxSize, skipCallback, force, true); // true = noAnimation + else if (s.size < s.minSize) // pane is too small - enlarge it + sizePane(pane, s.minSize, skipCallback, force, true); + // need s.isVisible because new pseudoClose method keeps pane visible, but off-screen + else if ($R && s.isVisible && $P.is(":visible")) { + // make sure resizer-bar is positioned correctly + // handles situation where nested layout was 'hidden' when initialized + var side = c.side.toLowerCase() + , pos = s.size + sC["inset"+ c.side] + ; + if ($.layout.cssNum($R, side) != pos) $R.css( side, pos ); + } + + // if was previously hidden due to noRoom, then RESET because NOW there is room + if (s.noRoom) { + // s.noRoom state will be set by open or show + if (s.wasOpen && o.closable) { + if (o.autoReopen) + open(pane, false, true, true); // true = noAnimation, true = noAlert + else // leave the pane closed, so just update state + s.noRoom = false; + } + else + show(pane, s.wasOpen, true, true); // true = noAnimation, true = noAlert + } + } + else { // !hasRoom - pane CANNOT fit + if (!s.noRoom) { // pane not set as noRoom yet, so hide or close it now... + s.noRoom = true; // update state + s.wasOpen = !s.isClosed && !s.isSliding; + if (s.isClosed){} // SKIP + else if (o.closable) // 'close' if possible + close(pane, true, true); // true = force, true = noAnimation + else // 'hide' pane if cannot just be closed + hide(pane, true); // true = noAnimation + } + } + } + + + /** + * sizePane / manualSizePane + * sizePane is called only by internal methods whenever a pane needs to be resized + * manualSizePane is an exposed flow-through method allowing extra code when pane is 'manually resized' + * + * @param {string|Object} evt_or_pane The pane being resized + * @param {number} size The *desired* new size for this pane - will be validated + * @param {boolean=} [skipCallback=false] Should the onresize callback be run? + * @param {boolean=} [noAnimation=false] + */ +, manualSizePane = function (evt_or_pane, size, skipCallback, noAnimation) { + if (!isInitialized()) return; + var pane = evtPane.call(this, evt_or_pane) + , o = options[pane] + , s = state[pane] + // if resizing callbacks have been delayed and resizing is now DONE, force resizing to complete... + , forceResize = o.livePaneResizing && !s.isResizing + ; + // ANY call to manualSizePane disables autoResize - ie, percentage sizing + o.autoResize = false; + // flow-through... + sizePane(pane, size, skipCallback, forceResize, noAnimation); // will animate resize if option enabled + } + + /** + * @param {string|Object} evt_or_pane The pane being resized + * @param {number} size The *desired* new size for this pane - will be validated + * @param {boolean=} [skipCallback=false] Should the onresize callback be run? + * @param {boolean=} [force=false] Force resizing even if does not seem necessary + * @param {boolean=} [noAnimation=false] + */ +, sizePane = function (evt_or_pane, size, skipCallback, force, noAnimation) { + if (!isInitialized()) return; + var pane = evtPane.call(this, evt_or_pane) // probably NEVER called from event? + , o = options[pane] + , s = state[pane] + , $P = $Ps[pane] + , $R = $Rs[pane] + , side = _c[pane].side.toLowerCase() + , dimName = _c[pane].sizeType.toLowerCase() + , inset = "inset"+ _c[pane].side + , skipResizeWhileDragging = s.isResizing && !o.triggerEventsDuringLiveResize + , doFX = noAnimation !== true && o.animatePaneSizing + , oldSize, newSize + ; + // QUEUE in case another action/animation is in progress + $N.queue(function( queueNext ){ + // calculate 'current' min/max sizes + setSizeLimits(pane); // update pane-state + oldSize = s.size; + size = _parseSize(pane, size); // handle percentages & auto + size = max(size, _parseSize(pane, o.minSize)); + size = min(size, s.maxSize); + if (size < s.minSize) { // not enough room for pane! + queueNext(); // call before makePaneFit() because it needs the queue free + makePaneFit(pane, false, skipCallback); // will hide or close pane + return; + } + + // IF newSize is same as oldSize, then nothing to do - abort + if (!force && size === oldSize) + return queueNext(); + + // onresize_start callback CANNOT cancel resizing because this would break the layout! + if (!skipCallback && state.initialized && s.isVisible) + _runCallbacks("onresize_start", pane); + + // resize the pane, and make sure its visible + newSize = cssSize(pane, size); + + if (doFX && $P.is(":visible")) { // ANIMATE + var fx = $.layout.effects.size[pane] || $.layout.effects.size.all + , easing = o.fxSettings_size.easing || fx.easing + , z = options.zIndexes + , props = {}; + props[ dimName ] = newSize +'px'; + s.isMoving = true; + // overlay all elements during animation + $P.css({ zIndex: z.pane_animate }) + .show().animate( props, o.fxSpeed_size, easing, function(){ + // reset zIndex after animation + $P.css({ zIndex: (s.isSliding ? z.pane_sliding : z.pane_normal) }); + s.isMoving = false; + sizePane_2(); // continue + queueNext(); + }); + } + else { // no animation + $P.css( dimName, newSize ); // resize pane + // if pane is visible, then + if ($P.is(":visible")) + sizePane_2(); // continue + else { + // pane is NOT VISIBLE, so just update state data... + // when pane is *next opened*, it will have the new size + s.size = size; // update state.size + $.extend(s, elDims($P)); // update state dimensions + } + queueNext(); + }; + + }); + + // SUBROUTINE + function sizePane_2 () { + /* Panes are sometimes not sized precisely in some browsers!? + * This code will resize the pane up to 3 times to nudge the pane to the correct size + */ + var actual = dimName==='width' ? $P.outerWidth() : $P.outerHeight() + , tries = [{ + pane: pane + , count: 1 + , target: size + , actual: actual + , correct: (size === actual) + , attempt: size + , cssSize: newSize + }] + , lastTry = tries[0] + , thisTry = {} + , msg = 'Inaccurate size after resizing the '+ pane +'-pane.' + ; + while ( !lastTry.correct ) { + thisTry = { pane: pane, count: lastTry.count+1, target: size }; + + if (lastTry.actual > size) + thisTry.attempt = max(0, lastTry.attempt - (lastTry.actual - size)); + else // lastTry.actual < size + thisTry.attempt = max(0, lastTry.attempt + (size - lastTry.actual)); + + thisTry.cssSize = cssSize(pane, thisTry.attempt); + $P.css( dimName, thisTry.cssSize ); + + thisTry.actual = dimName=='width' ? $P.outerWidth() : $P.outerHeight(); + thisTry.correct = (size === thisTry.actual); + + // log attempts and alert the user of this *non-fatal error* (if showDebugMessages) + if ( tries.length === 1) { + _log(msg, false, true); + _log(lastTry, false, true); + } + _log(thisTry, false, true); + // after 4 tries, is as close as its gonna get! + if (tries.length > 3) break; + + tries.push( thisTry ); + lastTry = tries[ tries.length - 1 ]; + } + // END TESTING CODE + + // update pane-state dimensions + s.size = size; + $.extend(s, elDims($P)); + + if (s.isVisible && $P.is(":visible")) { + // reposition the resizer-bar + if ($R) $R.css( side, size + sC[inset] ); + // resize the content-div + sizeContent(pane); + } + + if (!skipCallback && !skipResizeWhileDragging && state.initialized && s.isVisible) + _runCallbacks("onresize_end", pane); + + // resize all the adjacent panes, and adjust their toggler buttons + // when skipCallback passed, it means the controlling method will handle 'other panes' + if (!skipCallback) { + // also no callback if live-resize is in progress and NOT triggerEventsDuringLiveResize + if (!s.isSliding) sizeMidPanes(_c[pane].dir=="horz" ? "" : "center", skipResizeWhileDragging, force); + sizeHandles(); + } + + // if opposite-pane was autoClosed, see if it can be autoOpened now + var altPane = _c.oppositeEdge[pane]; + if (size < oldSize && state[ altPane ].noRoom) { + setSizeLimits( altPane ); + makePaneFit( altPane, false, skipCallback ); + } + + // DEBUG - ALERT user/developer so they know there was a sizing problem + if (tries.length > 1) + _log(msg +'\nSee the Error Console for details.', true, true); + } + } + + /** + * @see initPanes(), sizePane(), resizeAll(), open(), close(), hide() + * @param {Array.|string} panes The pane(s) being resized, comma-delmited string + * @param {boolean=} [skipCallback=false] Should the onresize callback be run? + * @param {boolean=} [force=false] + */ +, sizeMidPanes = function (panes, skipCallback, force) { + panes = (panes ? panes : "east,west,center").split(","); + + $.each(panes, function (i, pane) { + if (!$Ps[pane]) return; // NO PANE - skip + var + o = options[pane] + , s = state[pane] + , $P = $Ps[pane] + , $R = $Rs[pane] + , isCenter= (pane=="center") + , hasRoom = true + , CSS = {} + , newCenter = calcNewCenterPaneDims() + ; + // update pane-state dimensions + $.extend(s, elDims($P)); + + if (pane === "center") { + if (!force && s.isVisible && newCenter.width === s.outerWidth && newCenter.height === s.outerHeight) + return true; // SKIP - pane already the correct size + // set state for makePaneFit() logic + $.extend(s, cssMinDims(pane), { + maxWidth: newCenter.width + , maxHeight: newCenter.height + }); + CSS = newCenter; + // convert OUTER width/height to CSS width/height + CSS.width = cssW($P, CSS.width); + // NEW - allow pane to extend 'below' visible area rather than hide it + CSS.height = cssH($P, CSS.height); + hasRoom = CSS.width >= 0 && CSS.height >= 0; // height >= 0 = ALWAYS TRUE NOW + // during layout init, try to shrink east/west panes to make room for center + if (!state.initialized && o.minWidth > s.outerWidth) { + var + reqPx = o.minWidth - s.outerWidth + , minE = options.east.minSize || 0 + , minW = options.west.minSize || 0 + , sizeE = state.east.size + , sizeW = state.west.size + , newE = sizeE + , newW = sizeW + ; + if (reqPx > 0 && state.east.isVisible && sizeE > minE) { + newE = max( sizeE-minE, sizeE-reqPx ); + reqPx -= sizeE-newE; + } + if (reqPx > 0 && state.west.isVisible && sizeW > minW) { + newW = max( sizeW-minW, sizeW-reqPx ); + reqPx -= sizeW-newW; + } + // IF we found enough extra space, then resize the border panes as calculated + if (reqPx === 0) { + if (sizeE && sizeE != minE) + sizePane('east', newE, true, force, true); // true = skipCallback/noAnimation - initPanes will handle when done + if (sizeW && sizeW != minW) + sizePane('west', newW, true, force, true); + // now start over! + sizeMidPanes('center', skipCallback, force); + return; // abort this loop + } + } + } + else { // for east and west, set only the height, which is same as center height + // set state.min/maxWidth/Height for makePaneFit() logic + if (s.isVisible && !s.noVerticalRoom) + $.extend(s, elDims($P), cssMinDims(pane)) + if (!force && !s.noVerticalRoom && newCenter.height === s.outerHeight) + return true; // SKIP - pane already the correct size + // east/west have same top, bottom & height as center + CSS.top = newCenter.top; + CSS.bottom = newCenter.bottom; + // NEW - allow pane to extend 'below' visible area rather than hide it + CSS.height = cssH($P, newCenter.height); + s.maxHeight = CSS.height; + hasRoom = (s.maxHeight >= 0); // ALWAYS TRUE NOW + if (!hasRoom) s.noVerticalRoom = true; // makePaneFit() logic + } + + if (hasRoom) { + // resizeAll passes skipCallback because it triggers callbacks after ALL panes are resized + if (!skipCallback && state.initialized) + _runCallbacks("onresize_start", pane); + + $P.css(CSS); // apply the CSS to pane + if (pane !== "center") + sizeHandles(pane); // also update resizer length + if (s.noRoom && !s.isClosed && !s.isHidden) + makePaneFit(pane); // will re-open/show auto-closed/hidden pane + if (s.isVisible) { + $.extend(s, elDims($P)); // update pane dimensions + if (state.initialized) sizeContent(pane); // also resize the contents, if exists + } + } + else if (!s.noRoom && s.isVisible) // no room for pane + makePaneFit(pane); // will hide or close pane + + if (!s.isVisible) + return true; // DONE - next pane + + /* + * Extra CSS for IE6 or IE7 in Quirks-mode - add 'width' to NORTH/SOUTH panes + * Normally these panes have only 'left' & 'right' positions so pane auto-sizes + * ALSO required when pane is an IFRAME because will NOT default to 'full width' + * TODO: Can I use width:100% for a north/south iframe? + * TODO: Sounds like a job for $P.outerWidth( sC.innerWidth ) SETTER METHOD + */ + if (pane === "center") { // finished processing midPanes + var fix = browser.isIE6 || !browser.boxModel; + if ($Ps.north && (fix || state.north.tagName=="IFRAME")) + $Ps.north.css("width", cssW($Ps.north, sC.innerWidth)); + if ($Ps.south && (fix || state.south.tagName=="IFRAME")) + $Ps.south.css("width", cssW($Ps.south, sC.innerWidth)); + } + + // resizeAll passes skipCallback because it triggers callbacks after ALL panes are resized + if (!skipCallback && state.initialized) + _runCallbacks("onresize_end", pane); + }); + } + + + /** + * @see window.onresize(), callbacks or custom code + */ +, resizeAll = function (evt) { + // stopPropagation if called by trigger("layoutdestroy") - use evtPane utility + evtPane(evt); + + if (!state.initialized) { + _initLayoutElements(); + return; // no need to resize since we just initialized! + } + var oldW = sC.innerWidth + , oldH = sC.innerHeight + ; + // cannot size layout when 'container' is hidden or collapsed + if (!$N.is(":visible") ) return; + $.extend(state.container, elDims( $N )); // UPDATE container dimensions + if (!sC.outerHeight) return; + + // onresizeall_start will CANCEL resizing if returns false + // state.container has already been set, so user can access this info for calcuations + if (false === _runCallbacks("onresizeall_start")) return false; + + var // see if container is now 'smaller' than before + shrunkH = (sC.innerHeight < oldH) + , shrunkW = (sC.innerWidth < oldW) + , $P, o, s, dir + ; + // NOTE special order for sizing: S-N-E-W + $.each(["south","north","east","west"], function (i, pane) { + if (!$Ps[pane]) return; // no pane - SKIP + s = state[pane]; + o = options[pane]; + dir = _c[pane].dir; + + if (o.autoResize && s.size != o.size) // resize pane to original size set in options + sizePane(pane, o.size, true, true, true); // true=skipCallback/forceResize/noAnimation + else { + setSizeLimits(pane); + makePaneFit(pane, false, true, true); // true=skipCallback/forceResize + } + }); + + sizeMidPanes("", true, true); // true=skipCallback, true=forceResize + sizeHandles(); // reposition the toggler elements + + // trigger all individual pane callbacks AFTER layout has finished resizing + o = options; // reuse alias + $.each(_c.allPanes, function (i, pane) { + $P = $Ps[pane]; + if (!$P) return; // SKIP + if (state[pane].isVisible) // undefined for non-existent panes + _runCallbacks("onresize_end", pane); // callback - if exists + }); + + _runCallbacks("onresizeall_end"); + //_triggerLayoutEvent(pane, 'resizeall'); + } + + /** + * Whenever a pane resizes or opens that has a nested layout, trigger resizeAll + * + * @param {string|Object} evt_or_pane The pane just resized or opened + */ +, resizeChildLayout = function (evt_or_pane) { + var pane = evtPane.call(this, evt_or_pane); + if (!options[pane].resizeChildLayout) return; + var $P = $Ps[pane] + , $C = $Cs[pane] + , d = "layout" + , P = Instance[pane] + , L = children[pane] + ; + // user may have manually set EITHER instance pointer, so handle that + if (P.child && !L) { + // have to reverse the pointers! + var el = P.child.container; + L = children[pane] = (el ? el.data(d) : 0) || null; // set pointer _directly_ to layout instance + } + + // if a layout-pointer exists, see if child has been destroyed + if (L && L.destroyed) + L = children[pane] = null; // clear child pointers + // no child layout pointer is set - see if there is a child layout NOW + if (!L) L = children[pane] = $P.data(d) || ($C ? $C.data(d) : 0) || null; // set/update child pointers + + // ALWAYS refresh the pane.child alias + P.child = children[pane]; + + if (L) L.resizeAll(); + } + + + /** + * IF pane has a content-div, then resize all elements inside pane to fit pane-height + * + * @param {string|Object} evt_or_panes The pane(s) being resized + * @param {boolean=} [remeasure=false] Should the content (header/footer) be remeasured? + */ +, sizeContent = function (evt_or_panes, remeasure) { + if (!isInitialized()) return; + + var panes = evtPane.call(this, evt_or_panes); + panes = panes ? panes.split(",") : _c.allPanes; + + $.each(panes, function (idx, pane) { + var + $P = $Ps[pane] + , $C = $Cs[pane] + , o = options[pane] + , s = state[pane] + , m = s.content // m = measurements + ; + if (!$P || !$C || !$P.is(":visible")) return true; // NOT VISIBLE - skip + + // if content-element was REMOVED, update OR remove the pointer + if (!$C.length) { + initContent(pane, false); // false = do NOT sizeContent() - already there! + if (!$C) return; // no replacement element found - pointer have been removed + } + + // onsizecontent_start will CANCEL resizing if returns false + if (false === _runCallbacks("onsizecontent_start", pane)) return; + + // skip re-measuring offsets if live-resizing + if ((!s.isMoving && !s.isResizing) || o.liveContentResizing || remeasure || m.top == undefined) { + _measure(); + // if any footers are below pane-bottom, they may not measure correctly, + // so allow pane overflow and re-measure + if (m.hiddenFooters > 0 && $P.css("overflow") === "hidden") { + $P.css("overflow", "visible"); + _measure(); // remeasure while overflowing + $P.css("overflow", "hidden"); + } + } + // NOTE: spaceAbove/Below *includes* the pane paddingTop/Bottom, but not pane.borders + var newH = s.innerHeight - (m.spaceAbove - s.css.paddingTop) - (m.spaceBelow - s.css.paddingBottom); + + if (!$C.is(":visible") || m.height != newH) { + // size the Content element to fit new pane-size - will autoHide if not enough room + setOuterHeight($C, newH, true); // true=autoHide + m.height = newH; // save new height + }; + + if (state.initialized) + _runCallbacks("onsizecontent_end", pane); + + function _below ($E) { + return max(s.css.paddingBottom, (parseInt($E.css("marginBottom"), 10) || 0)); + }; + + function _measure () { + var + ignore = options[pane].contentIgnoreSelector + , $Fs = $C.nextAll().not(ignore || ':lt(0)') // not :lt(0) = ALL + , $Fs_vis = $Fs.filter(':visible') + , $F = $Fs_vis.filter(':last') + ; + m = { + top: $C[0].offsetTop + , height: $C.outerHeight() + , numFooters: $Fs.length + , hiddenFooters: $Fs.length - $Fs_vis.length + , spaceBelow: 0 // correct if no content footer ($E) + } + m.spaceAbove = m.top; // just for state - not used in calc + m.bottom = m.top + m.height; + if ($F.length) + //spaceBelow = (LastFooter.top + LastFooter.height) [footerBottom] - Content.bottom + max(LastFooter.marginBottom, pane.paddingBotom) + m.spaceBelow = ($F[0].offsetTop + $F.outerHeight()) - m.bottom + _below($F); + else // no footer - check marginBottom on Content element itself + m.spaceBelow = _below($C); + }; + }); + } + + + /** + * Called every time a pane is opened, closed, or resized to slide the togglers to 'center' and adjust their length if necessary + * + * @see initHandles(), open(), close(), resizeAll() + * @param {string|Object} evt_or_panes The pane(s) being resized + */ +, sizeHandles = function (evt_or_panes) { + var panes = evtPane.call(this, evt_or_panes) + panes = panes ? panes.split(",") : _c.borderPanes; + + $.each(panes, function (i, pane) { + var + o = options[pane] + , s = state[pane] + , $P = $Ps[pane] + , $R = $Rs[pane] + , $T = $Ts[pane] + , $TC + ; + if (!$P || !$R) return; + + var + dir = _c[pane].dir + , _state = (s.isClosed ? "_closed" : "_open") + , spacing = o["spacing"+ _state] + , togAlign = o["togglerAlign"+ _state] + , togLen = o["togglerLength"+ _state] + , paneLen + , left + , offset + , CSS = {} + ; + + if (spacing === 0) { + $R.hide(); + return; + } + else if (!s.noRoom && !s.isHidden) // skip if resizer was hidden for any reason + $R.show(); // in case was previously hidden + + // Resizer Bar is ALWAYS same width/height of pane it is attached to + if (dir === "horz") { // north/south + //paneLen = $P.outerWidth(); // s.outerWidth || + paneLen = sC.innerWidth; // handle offscreen-panes + s.resizerLength = paneLen; + left = $.layout.cssNum($P, "left") + $R.css({ + width: cssW($R, paneLen) // account for borders & padding + , height: cssH($R, spacing) // ditto + , left: left > -9999 ? left : sC.insetLeft // handle offscreen-panes + }); + } + else { // east/west + paneLen = $P.outerHeight(); // s.outerHeight || + s.resizerLength = paneLen; + $R.css({ + height: cssH($R, paneLen) // account for borders & padding + , width: cssW($R, spacing) // ditto + , top: sC.insetTop + getPaneSize("north", true) // TODO: what if no North pane? + //, top: $.layout.cssNum($Ps["center"], "top") + }); + } + + // remove hover classes + removeHover( o, $R ); + + if ($T) { + if (togLen === 0 || (s.isSliding && o.hideTogglerOnSlide)) { + $T.hide(); // always HIDE the toggler when 'sliding' + return; + } + else + $T.show(); // in case was previously hidden + + if (!(togLen > 0) || togLen === "100%" || togLen > paneLen) { + togLen = paneLen; + offset = 0; + } + else { // calculate 'offset' based on options.PANE.togglerAlign_open/closed + if (isStr(togAlign)) { + switch (togAlign) { + case "top": + case "left": offset = 0; + break; + case "bottom": + case "right": offset = paneLen - togLen; + break; + case "middle": + case "center": + default: offset = round((paneLen - togLen) / 2); // 'default' catches typos + } + } + else { // togAlign = number + var x = parseInt(togAlign, 10); // + if (togAlign >= 0) offset = x; + else offset = paneLen - togLen + x; // NOTE: x is negative! + } + } + + if (dir === "horz") { // north/south + var width = cssW($T, togLen); + $T.css({ + width: width // account for borders & padding + , height: cssH($T, spacing) // ditto + , left: offset // TODO: VERIFY that toggler positions correctly for ALL values + , top: 0 + }); + // CENTER the toggler content SPAN + $T.children(".content").each(function(){ + $TC = $(this); + $TC.css("marginLeft", round((width-$TC.outerWidth())/2)); // could be negative + }); + } + else { // east/west + var height = cssH($T, togLen); + $T.css({ + height: height // account for borders & padding + , width: cssW($T, spacing) // ditto + , top: offset // POSITION the toggler + , left: 0 + }); + // CENTER the toggler content SPAN + $T.children(".content").each(function(){ + $TC = $(this); + $TC.css("marginTop", round((height-$TC.outerHeight())/2)); // could be negative + }); + } + + // remove ALL hover classes + removeHover( 0, $T ); + } + + // DONE measuring and sizing this resizer/toggler, so can be 'hidden' now + if (!state.initialized && (o.initHidden || s.noRoom)) { + $R.hide(); + if ($T) $T.hide(); + } + }); + } + + + /** + * @param {string|Object} evt_or_pane + */ +, enableClosable = function (evt_or_pane) { + if (!isInitialized()) return; + var pane = evtPane.call(this, evt_or_pane) + , $T = $Ts[pane] + , o = options[pane] + ; + if (!$T) return; + o.closable = true; + $T .bind("click."+ sID, function(evt){ evt.stopPropagation(); toggle(pane); }) + .css("visibility", "visible") + .css("cursor", "pointer") + .attr("title", state[pane].isClosed ? o.tips.Open : o.tips.Close) // may be blank + .show(); + } + /** + * @param {string|Object} evt_or_pane + * @param {boolean=} [hide=false] + */ +, disableClosable = function (evt_or_pane, hide) { + if (!isInitialized()) return; + var pane = evtPane.call(this, evt_or_pane) + , $T = $Ts[pane] + ; + if (!$T) return; + options[pane].closable = false; + // is closable is disable, then pane MUST be open! + if (state[pane].isClosed) open(pane, false, true); + $T .unbind("."+ sID) + .css("visibility", hide ? "hidden" : "visible") // instead of hide(), which creates logic issues + .css("cursor", "default") + .attr("title", ""); + } + + + /** + * @param {string|Object} evt_or_pane + */ +, enableSlidable = function (evt_or_pane) { + if (!isInitialized()) return; + var pane = evtPane.call(this, evt_or_pane) + , $R = $Rs[pane] + ; + if (!$R || !$R.data('draggable')) return; + options[pane].slidable = true; + if (state[pane].isClosed) + bindStartSlidingEvent(pane, true); + } + /** + * @param {string|Object} evt_or_pane + */ +, disableSlidable = function (evt_or_pane) { + if (!isInitialized()) return; + var pane = evtPane.call(this, evt_or_pane) + , $R = $Rs[pane] + ; + if (!$R) return; + options[pane].slidable = false; + if (state[pane].isSliding) + close(pane, false, true); + else { + bindStartSlidingEvent(pane, false); + $R .css("cursor", "default") + .attr("title", ""); + removeHover(null, $R[0]); // in case currently hovered + } + } + + + /** + * @param {string|Object} evt_or_pane + */ +, enableResizable = function (evt_or_pane) { + if (!isInitialized()) return; + var pane = evtPane.call(this, evt_or_pane) + , $R = $Rs[pane] + , o = options[pane] + ; + if (!$R || !$R.data('draggable')) return; + o.resizable = true; + $R.draggable("enable"); + if (!state[pane].isClosed) + $R .css("cursor", o.resizerCursor) + .attr("title", o.tips.Resize); + } + /** + * @param {string|Object} evt_or_pane + */ +, disableResizable = function (evt_or_pane) { + if (!isInitialized()) return; + var pane = evtPane.call(this, evt_or_pane) + , $R = $Rs[pane] + ; + if (!$R || !$R.data('draggable')) return; + options[pane].resizable = false; + $R .draggable("disable") + .css("cursor", "default") + .attr("title", ""); + removeHover(null, $R[0]); // in case currently hovered + } + + + /** + * Move a pane from source-side (eg, west) to target-side (eg, east) + * If pane exists on target-side, move that to source-side, ie, 'swap' the panes + * + * @param {string|Object} evt_or_pane1 The pane/edge being swapped + * @param {string} pane2 ditto + */ +, swapPanes = function (evt_or_pane1, pane2) { + if (!isInitialized()) return; + var pane1 = evtPane.call(this, evt_or_pane1); + // change state.edge NOW so callbacks can know where pane is headed... + state[pane1].edge = pane2; + state[pane2].edge = pane1; + // run these even if NOT state.initialized + if (false === _runCallbacks("onswap_start", pane1) + || false === _runCallbacks("onswap_start", pane2) + ) { + state[pane1].edge = pane1; // reset + state[pane2].edge = pane2; + return; + } + + var + oPane1 = copy( pane1 ) + , oPane2 = copy( pane2 ) + , sizes = {} + ; + sizes[pane1] = oPane1 ? oPane1.state.size : 0; + sizes[pane2] = oPane2 ? oPane2.state.size : 0; + + // clear pointers & state + $Ps[pane1] = false; + $Ps[pane2] = false; + state[pane1] = {}; + state[pane2] = {}; + + // ALWAYS remove the resizer & toggler elements + if ($Ts[pane1]) $Ts[pane1].remove(); + if ($Ts[pane2]) $Ts[pane2].remove(); + if ($Rs[pane1]) $Rs[pane1].remove(); + if ($Rs[pane2]) $Rs[pane2].remove(); + $Rs[pane1] = $Rs[pane2] = $Ts[pane1] = $Ts[pane2] = false; + + // transfer element pointers and data to NEW Layout keys + move( oPane1, pane2 ); + move( oPane2, pane1 ); + + // cleanup objects + oPane1 = oPane2 = sizes = null; + + // make panes 'visible' again + if ($Ps[pane1]) $Ps[pane1].css(_c.visible); + if ($Ps[pane2]) $Ps[pane2].css(_c.visible); + + // fix any size discrepancies caused by swap + resizeAll(); + + // run these even if NOT state.initialized + _runCallbacks("onswap_end", pane1); + _runCallbacks("onswap_end", pane2); + + return; + + function copy (n) { // n = pane + var + $P = $Ps[n] + , $C = $Cs[n] + ; + return !$P ? false : { + pane: n + , P: $P ? $P[0] : false + , C: $C ? $C[0] : false + , state: $.extend(true, {}, state[n]) + , options: $.extend(true, {}, options[n]) + } + }; + + function move (oPane, pane) { + if (!oPane) return; + var + P = oPane.P + , C = oPane.C + , oldPane = oPane.pane + , c = _c[pane] + , side = c.side.toLowerCase() + , inset = "inset"+ c.side + // save pane-options that should be retained + , s = $.extend(true, {}, state[pane]) + , o = options[pane] + // RETAIN side-specific FX Settings - more below + , fx = { resizerCursor: o.resizerCursor } + , re, size, pos + ; + $.each("fxName,fxSpeed,fxSettings".split(","), function (i, k) { + fx[k +"_open"] = o[k +"_open"]; + fx[k +"_close"] = o[k +"_close"]; + fx[k +"_size"] = o[k +"_size"]; + }); + + // update object pointers and attributes + $Ps[pane] = $(P) + .data({ + layoutPane: Instance[pane] // NEW pointer to pane-alias-object + , layoutEdge: pane + }) + .css(_c.hidden) + .css(c.cssReq) + ; + $Cs[pane] = C ? $(C) : false; + + // set options and state + options[pane] = $.extend(true, {}, oPane.options, fx); + state[pane] = $.extend(true, {}, oPane.state); + + // change classNames on the pane, eg: ui-layout-pane-east ==> ui-layout-pane-west + re = new RegExp(o.paneClass +"-"+ oldPane, "g"); + P.className = P.className.replace(re, o.paneClass +"-"+ pane); + + // ALWAYS regenerate the resizer & toggler elements + initHandles(pane); // create the required resizer & toggler + + // if moving to different orientation, then keep 'target' pane size + if (c.dir != _c[oldPane].dir) { + size = sizes[pane] || 0; + setSizeLimits(pane); // update pane-state + size = max(size, state[pane].minSize); + // use manualSizePane to disable autoResize - not useful after panes are swapped + manualSizePane(pane, size, true, true); // true/true = skipCallback/noAnimation + } + else // move the resizer here + $Rs[pane].css(side, sC[inset] + (state[pane].isVisible ? getPaneSize(pane) : 0)); + + + // ADD CLASSNAMES & SLIDE-BINDINGS + if (oPane.state.isVisible && !s.isVisible) + setAsOpen(pane, true); // true = skipCallback + else { + setAsClosed(pane); + bindStartSlidingEvent(pane, true); // will enable events IF option is set + } + + // DESTROY the object + oPane = null; + }; + } + + + /** + * INTERNAL method to sync pin-buttons when pane is opened or closed + * Unpinned means the pane is 'sliding' - ie, over-top of the adjacent panes + * + * @see open(), setAsOpen(), setAsClosed() + * @param {string} pane These are the params returned to callbacks by layout() + * @param {boolean} doPin True means set the pin 'down', False means 'up' + */ +, syncPinBtns = function (pane, doPin) { + if ($.layout.plugins.buttons) + $.each(state[pane].pins, function (i, selector) { + $.layout.buttons.setPinState(Instance, $(selector), pane, doPin); + }); + } + +; // END var DECLARATIONS + + /** + * Capture keys when enableCursorHotkey - toggle pane if hotkey pressed + * + * @see document.keydown() + */ + function keyDown (evt) { + if (!evt) return true; + var code = evt.keyCode; + if (code < 33) return true; // ignore special keys: ENTER, TAB, etc + + var + PANE = { + 38: "north" // Up Cursor - $.ui.keyCode.UP + , 40: "south" // Down Cursor - $.ui.keyCode.DOWN + , 37: "west" // Left Cursor - $.ui.keyCode.LEFT + , 39: "east" // Right Cursor - $.ui.keyCode.RIGHT + } + , ALT = evt.altKey // no worky! + , SHIFT = evt.shiftKey + , CTRL = evt.ctrlKey + , CURSOR = (CTRL && code >= 37 && code <= 40) + , o, k, m, pane + ; + + if (CURSOR && options[PANE[code]].enableCursorHotkey) // valid cursor-hotkey + pane = PANE[code]; + else if (CTRL || SHIFT) // check to see if this matches a custom-hotkey + $.each(_c.borderPanes, function (i, p) { // loop each pane to check its hotkey + o = options[p]; + k = o.customHotkey; + m = o.customHotkeyModifier; // if missing or invalid, treated as "CTRL+SHIFT" + if ((SHIFT && m=="SHIFT") || (CTRL && m=="CTRL") || (CTRL && SHIFT)) { // Modifier matches + if (k && code === (isNaN(k) || k <= 9 ? k.toUpperCase().charCodeAt(0) : k)) { // Key matches + pane = p; + return false; // BREAK + } + } + }); + + // validate pane + if (!pane || !$Ps[pane] || !options[pane].closable || state[pane].isHidden) + return true; + + toggle(pane); + + evt.stopPropagation(); + evt.returnValue = false; // CANCEL key + return false; + }; + + +/* + * ###################################### + * UTILITY METHODS + * called externally or by initButtons + * ###################################### + */ + + /** + * Change/reset a pane overflow setting & zIndex to allow popups/drop-downs to work + * + * @param {Object=} [el] (optional) Can also be 'bound' to a click, mouseOver, or other event + */ + function allowOverflow (el) { + if (!isInitialized()) return; + if (this && this.tagName) el = this; // BOUND to element + var $P; + if (isStr(el)) + $P = $Ps[el]; + else if ($(el).data("layoutRole")) + $P = $(el); + else + $(el).parents().each(function(){ + if ($(this).data("layoutRole")) { + $P = $(this); + return false; // BREAK + } + }); + if (!$P || !$P.length) return; // INVALID + + var + pane = $P.data("layoutEdge") + , s = state[pane] + ; + + // if pane is already raised, then reset it before doing it again! + // this would happen if allowOverflow is attached to BOTH the pane and an element + if (s.cssSaved) + resetOverflow(pane); // reset previous CSS before continuing + + // if pane is raised by sliding or resizing, or its closed, then abort + if (s.isSliding || s.isResizing || s.isClosed) { + s.cssSaved = false; + return; + } + + var + newCSS = { zIndex: (options.zIndexes.resizer_normal + 1) } + , curCSS = {} + , of = $P.css("overflow") + , ofX = $P.css("overflowX") + , ofY = $P.css("overflowY") + ; + // determine which, if any, overflow settings need to be changed + if (of != "visible") { + curCSS.overflow = of; + newCSS.overflow = "visible"; + } + if (ofX && !ofX.match(/(visible|auto)/)) { + curCSS.overflowX = ofX; + newCSS.overflowX = "visible"; + } + if (ofY && !ofY.match(/(visible|auto)/)) { + curCSS.overflowY = ofX; + newCSS.overflowY = "visible"; + } + + // save the current overflow settings - even if blank! + s.cssSaved = curCSS; + + // apply new CSS to raise zIndex and, if necessary, make overflow 'visible' + $P.css( newCSS ); + + // make sure the zIndex of all other panes is normal + $.each(_c.allPanes, function(i, p) { + if (p != pane) resetOverflow(p); + }); + + }; + /** + * @param {Object=} [el] (optional) Can also be 'bound' to a click, mouseOver, or other event + */ + function resetOverflow (el) { + if (!isInitialized()) return; + if (this && this.tagName) el = this; // BOUND to element + var $P; + if (isStr(el)) + $P = $Ps[el]; + else if ($(el).data("layoutRole")) + $P = $(el); + else + $(el).parents().each(function(){ + if ($(this).data("layoutRole")) { + $P = $(this); + return false; // BREAK + } + }); + if (!$P || !$P.length) return; // INVALID + + var + pane = $P.data("layoutEdge") + , s = state[pane] + , CSS = s.cssSaved || {} + ; + // reset the zIndex + if (!s.isSliding && !s.isResizing) + $P.css("zIndex", options.zIndexes.pane_normal); + + // reset Overflow - if necessary + $P.css( CSS ); + + // clear var + s.cssSaved = false; + }; + +/* + * ##################### + * CREATE/RETURN LAYOUT + * ##################### + */ + + // validate that container exists + var $N = $(this).eq(0); // FIRST matching Container element + if (!$N.length) { + return _log( options.errors.containerMissing ); + }; + + // Users retrieve Instance of a layout with: $N.layout() OR $N.data("layout") + // return the Instance-pointer if layout has already been initialized + if ($N.data("layoutContainer") && $N.data("layout")) + return $N.data("layout"); // cached pointer + + // init global vars + var + $Ps = {} // Panes x5 - set in initPanes() + , $Cs = {} // Content x5 - set in initPanes() + , $Rs = {} // Resizers x4 - set in initHandles() + , $Ts = {} // Togglers x4 - set in initHandles() + , $Ms = $([]) // Masks - up to 2 masks per pane (IFRAME + DIV) + // aliases for code brevity + , sC = state.container // alias for easy access to 'container dimensions' + , sID = state.id // alias for unique layout ID/namespace - eg: "layout435" + ; + + // create Instance object to expose data & option Properties, and primary action Methods + var Instance = { + // layout data + options: options // property - options hash + , state: state // property - dimensions hash + // object pointers + , container: $N // property - object pointers for layout container + , panes: $Ps // property - object pointers for ALL Panes: panes.north, panes.center + , contents: $Cs // property - object pointers for ALL Content: contents.north, contents.center + , resizers: $Rs // property - object pointers for ALL Resizers, eg: resizers.north + , togglers: $Ts // property - object pointers for ALL Togglers, eg: togglers.north + // border-pane open/close + , hide: hide // method - ditto + , show: show // method - ditto + , toggle: toggle // method - pass a 'pane' ("north", "west", etc) + , open: open // method - ditto + , close: close // method - ditto + , slideOpen: slideOpen // method - ditto + , slideClose: slideClose // method - ditto + , slideToggle: slideToggle // method - ditto + // pane actions + , setSizeLimits: setSizeLimits // method - pass a 'pane' - update state min/max data + , _sizePane: sizePane // method -intended for user by plugins only! + , sizePane: manualSizePane // method - pass a 'pane' AND an 'outer-size' in pixels or percent, or 'auto' + , sizeContent: sizeContent // method - pass a 'pane' + , swapPanes: swapPanes // method - pass TWO 'panes' - will swap them + , showMasks: showMasks // method - pass a 'pane' OR list of panes - default = all panes with mask option set + , hideMasks: hideMasks // method - ditto' + // pane element methods + , initContent: initContent // method - ditto + , addPane: addPane // method - pass a 'pane' + , removePane: removePane // method - pass a 'pane' to remove from layout, add 'true' to delete the pane-elem + , createChildLayout: createChildLayout// method - pass a 'pane' and (optional) layout-options (OVERRIDES options[pane].childOptions + // special pane option setting + , enableClosable: enableClosable // method - pass a 'pane' + , disableClosable: disableClosable // method - ditto + , enableSlidable: enableSlidable // method - ditto + , disableSlidable: disableSlidable // method - ditto + , enableResizable: enableResizable // method - ditto + , disableResizable: disableResizable// method - ditto + // utility methods for panes + , allowOverflow: allowOverflow // utility - pass calling element (this) + , resetOverflow: resetOverflow // utility - ditto + // layout control + , destroy: destroy // method - no parameters + , initPanes: isInitialized // method - no parameters + , resizeAll: resizeAll // method - no parameters + // callback triggering + , runCallbacks: _runCallbacks // method - pass evtName & pane (if a pane-event), eg: trigger("onopen", "west") + // alias collections of options, state and children - created in addPane and extended elsewhere + , hasParentLayout: false // set by initContainer() + , children: children // pointers to child-layouts, eg: Instance.children["west"] + , north: false // alias group: { name: pane, pane: $Ps[pane], options: options[pane], state: state[pane], child: children[pane] } + , south: false // ditto + , west: false // ditto + , east: false // ditto + , center: false // ditto + }; + + // create the border layout NOW + if (_create() === 'cancel') // onload_start callback returned false to CANCEL layout creation + return null; + else // true OR false -- if layout-elements did NOT init (hidden or do not exist), can auto-init later + return Instance; // return the Instance object + +} + + +/* OLD versions of jQuery only set $.support.boxModel after page is loaded + * so if this is IE, use support.boxModel to test for quirks-mode (ONLY IE changes boxModel). + */ +$(function(){ + var b = $.layout.browser; + if (b.msie) b.boxModel = $.support.boxModel; +}); + + +/** + * jquery.layout.state 1.0 + * $Date: 2011-07-16 08:00:00 (Sat, 16 July 2011) $ + * + * Copyright (c) 2010 + * Kevin Dalman (http://allpro.net) + * + * Dual licensed under the GPL (http://www.gnu.org/licenses/gpl.html) + * and MIT (http://www.opensource.org/licenses/mit-license.php) licenses. + * + * @dependancies: UI Layout 1.3.0.rc30.1 or higher + * @dependancies: $.ui.cookie (above) + * + * @support: http://groups.google.com/group/jquery-ui-layout + */ +/* + * State-management options stored in options.stateManagement, which includes a .cookie hash + * Default options saves ALL KEYS for ALL PANES, ie: pane.size, pane.isClosed, pane.isHidden + * + * // STATE/COOKIE OPTIONS + * @example $(el).layout({ + stateManagement: { + enabled: true + , stateKeys: "east.size,west.size,east.isClosed,west.isClosed" + , cookie: { name: "appLayout", path: "/" } + } + }) + * @example $(el).layout({ stateManagement__enabled: true }) // enable auto-state-management using cookies + * @example $(el).layout({ stateManagement__cookie: { name: "appLayout", path: "/" } }) + * @example $(el).layout({ stateManagement__cookie__name: "appLayout", stateManagement__cookie__path: "/" }) + * + * // STATE/COOKIE METHODS + * @example myLayout.saveCookie( "west.isClosed,north.size,south.isHidden", {expires: 7} ); + * @example myLayout.loadCookie(); + * @example myLayout.deleteCookie(); + * @example var JSON = myLayout.readState(); // CURRENT Layout State + * @example var JSON = myLayout.readCookie(); // SAVED Layout State (from cookie) + * @example var JSON = myLayout.state.stateData; // LAST LOADED Layout State (cookie saved in layout.state hash) + * + * CUSTOM STATE-MANAGEMENT (eg, saved in a database) + * @example var JSON = myLayout.readState( "west.isClosed,north.size,south.isHidden" ); + * @example myLayout.loadState( JSON ); + */ + +/** + * UI COOKIE UTILITY + * + * A $.cookie OR $.ui.cookie namespace *should be standard*, but until then... + * This creates $.ui.cookie so Layout does not need the cookie.jquery.js plugin + * NOTE: This utility is REQUIRED by the layout.state plugin + * + * Cookie methods in Layout are created as part of State Management + */ +if (!$.ui) $.ui = {}; +$.ui.cookie = { + + // cookieEnabled is not in DOM specs, but DOES works in all browsers,including IE6 + acceptsCookies: !!navigator.cookieEnabled + +, read: function (name) { + var + c = document.cookie + , cs = c ? c.split(';') : [] + , pair // loop var + ; + for (var i=0, n=cs.length; i < n; i++) { + pair = $.trim(cs[i]).split('='); // name=value pair + if (pair[0] == name) // found the layout cookie + return decodeURIComponent(pair[1]); + + } + return null; + } + +, write: function (name, val, cookieOpts) { + var + params = '' + , date = '' + , clear = false + , o = cookieOpts || {} + , x = o.expires + ; + if (x && x.toUTCString) + date = x; + else if (x === null || typeof x === 'number') { + date = new Date(); + if (x > 0) + date.setDate(date.getDate() + x); + else { + date.setFullYear(1970); + clear = true; + } + } + if (date) params += ';expires='+ date.toUTCString(); + if (o.path) params += ';path='+ o.path; + if (o.domain) params += ';domain='+ o.domain; + if (o.secure) params += ';secure'; + document.cookie = name +'='+ (clear ? "" : encodeURIComponent( val )) + params; // write or clear cookie + } + +, clear: function (name) { + $.ui.cookie.write(name, '', {expires: -1}); + } + +}; +// if cookie.jquery.js is not loaded, create an alias to replicate it +// this may be useful to other plugins or code dependent on that plugin +if (!$.cookie) $.cookie = function (k, v, o) { + var C = $.ui.cookie; + if (v === null) + C.clear(k); + else if (v === undefined) + return C.read(k); + else + C.write(k, v, o); +}; + + +// tell Layout that the state plugin is available +$.layout.plugins.stateManagement = true; + +// Add State-Management options to layout.defaults +$.layout.config.optionRootKeys.push("stateManagement"); +$.layout.defaults.stateManagement = { + enabled: false // true = enable state-management, even if not using cookies +, autoSave: true // Save a state-cookie when page exits? +, autoLoad: true // Load the state-cookie when Layout inits? + // List state-data to save - must be pane-specific +, stateKeys: "north.size,south.size,east.size,west.size,"+ + "north.isClosed,south.isClosed,east.isClosed,west.isClosed,"+ + "north.isHidden,south.isHidden,east.isHidden,west.isHidden" +, cookie: { + name: "" // If not specified, will use Layout.name, else just "Layout" + , domain: "" // blank = current domain + , path: "" // blank = current page, '/' = entire website + , expires: "" // 'days' to keep cookie - leave blank for 'session cookie' + , secure: false + } +}; +// Set stateManagement as a layout-option, NOT a pane-option +$.layout.optionsMap.layout.push("stateManagement"); + +/* + * State Management methods + */ +$.layout.state = { + + /** + * Get the current layout state and save it to a cookie + * + * myLayout.saveCookie( keys, cookieOpts ) + * + * @param {Object} inst + * @param {(string|Array)=} keys + * @param {Object=} cookieOpts + */ + saveCookie: function (inst, keys, cookieOpts) { + var o = inst.options + , oS = o.stateManagement + , oC = $.extend(true, {}, oS.cookie, cookieOpts || null) + , data = inst.state.stateData = inst.readState( keys || oS.stateKeys ) // read current panes-state + ; + $.ui.cookie.write( oC.name || o.name || "Layout", $.layout.state.encodeJSON(data), oC ); + return $.extend(true, {}, data); // return COPY of state.stateData data + } + + /** + * Remove the state cookie + * + * @param {Object} inst + */ +, deleteCookie: function (inst) { + var o = inst.options; + $.ui.cookie.clear( o.stateManagement.cookie.name || o.name || "Layout" ); + } + + /** + * Read & return data from the cookie - as JSON + * + * @param {Object} inst + */ +, readCookie: function (inst) { + var o = inst.options; + var c = $.ui.cookie.read( o.stateManagement.cookie.name || o.name || "Layout" ); + // convert cookie string back to a hash and return it + return c ? $.layout.state.decodeJSON(c) : {}; + } + + /** + * Get data from the cookie and USE IT to loadState + * + * @param {Object} inst + */ +, loadCookie: function (inst) { + var c = $.layout.state.readCookie(inst); // READ the cookie + if (c) { + inst.state.stateData = $.extend(true, {}, c); // SET state.stateData + inst.loadState(c); // LOAD the retrieved state + } + return c; + } + + /** + * Update layout options from the cookie, if one exists + * + * @param {Object} inst + * @param {Object=} stateData + * @param {boolean=} animate + */ +, loadState: function (inst, stateData, animate) { + stateData = $.layout.transformData( stateData ); // panes = default subkey + if ($.isEmptyObject( stateData )) return; + $.extend(true, inst.options, stateData); // update layout options + // if layout has already been initialized, then UPDATE layout state + if (inst.state.initialized) { + var pane, vis, o, s, h, c + , noAnimate = (animate===false) + ; + $.each($.layout.config.borderPanes, function (idx, pane) { + state = inst.state[pane]; + o = stateData[ pane ]; + if (typeof o != 'object') return; // no key, continue + s = o.size; + c = o.initClosed; + h = o.initHidden; + vis = state.isVisible; + // resize BEFORE opening + if (!vis) + inst.sizePane(pane, s, false, false); + if (h === true) inst.hide(pane, noAnimate); + else if (c === false) inst.open (pane, false, noAnimate); + else if (c === true) inst.close(pane, false, noAnimate); + else if (h === false) inst.show (pane, false, noAnimate); + // resize AFTER any other actions + if (vis) + inst.sizePane(pane, s, false, noAnimate); // animate resize if option passed + }); + }; + } + + /** + * Get the *current layout state* and return it as a hash + * + * @param {Object=} inst + * @param {(string|Array)=} keys + */ +, readState: function (inst, keys) { + var + data = {} + , alt = { isClosed: 'initClosed', isHidden: 'initHidden' } + , state = inst.state + , panes = $.layout.config.allPanes + , pair, pane, key, val + ; + if (!keys) keys = inst.options.stateManagement.stateKeys; // if called by user + if ($.isArray(keys)) keys = keys.join(","); + // convert keys to an array and change delimiters from '__' to '.' + keys = keys.replace(/__/g, ".").split(','); + // loop keys and create a data hash + for (var i=0, n=keys.length; i < n; i++) { + pair = keys[i].split("."); + pane = pair[0]; + key = pair[1]; + if ($.inArray(pane, panes) < 0) continue; // bad pane! + val = state[ pane ][ key ]; + if (val == undefined) continue; + if (key=="isClosed" && state[pane]["isSliding"]) + val = true; // if sliding, then *really* isClosed + ( data[pane] || (data[pane]={}) )[ alt[key] ? alt[key] : key ] = val; + } + return data; + } + + /** + * Stringify a JSON hash so can save in a cookie or db-field + */ +, encodeJSON: function (JSON) { + return parse(JSON); + function parse (h) { + var D=[], i=0, k, v, t; // k = key, v = value + for (k in h) { + v = h[k]; + t = typeof v; + if (t == 'string') // STRING - add quotes + v = '"'+ v +'"'; + else if (t == 'object') // SUB-KEY - recurse into it + v = parse(v); + D[i++] = '"'+ k +'":'+ v; + } + return '{'+ D.join(',') +'}'; + }; + } + + /** + * Convert stringified JSON back to a hash object + * @see $.parseJSON(), adding in jQuery 1.4.1 + */ +, decodeJSON: function (str) { + try { return $.parseJSON ? $.parseJSON(str) : window["eval"]("("+ str +")") || {}; } + catch (e) { return {}; } + } + + +, _create: function (inst) { + var _ = $.layout.state; + // ADD State-Management plugin methods to inst + $.extend( inst, { + // readCookie - update options from cookie - returns hash of cookie data + readCookie: function () { return _.readCookie(inst); } + // deleteCookie + , deleteCookie: function () { _.deleteCookie(inst); } + // saveCookie - optionally pass keys-list and cookie-options (hash) + , saveCookie: function (keys, cookieOpts) { return _.saveCookie(inst, keys, cookieOpts); } + // loadCookie - readCookie and use to loadState() - returns hash of cookie data + , loadCookie: function () { return _.loadCookie(inst); } + // loadState - pass a hash of state to use to update options + , loadState: function (stateData, animate) { _.loadState(inst, stateData, animate); } + // readState - returns hash of current layout-state + , readState: function (keys) { return _.readState(inst, keys); } + // add JSON utility methods too... + , encodeJSON: _.encodeJSON + , decodeJSON: _.decodeJSON + }); + + // init state.stateData key, even if plugin is initially disabled + inst.state.stateData = {}; + + // read and load cookie-data per options + var oS = inst.options.stateManagement; + if (oS.enabled) { + if (oS.autoLoad) // update the options from the cookie + inst.loadCookie(); + else // don't modify options - just store cookie data in state.stateData + inst.state.stateData = inst.readCookie(); + } + } + +, _unload: function (inst) { + var oS = inst.options.stateManagement; + if (oS.enabled) { + if (oS.autoSave) // save a state-cookie automatically + inst.saveCookie(); + else // don't save a cookie, but do store state-data in state.stateData key + inst.state.stateData = inst.readState(); + } + } + +}; + +// add state initialization method to Layout's onCreate array of functions +$.layout.onCreate.push( $.layout.state._create ); +$.layout.onUnload.push( $.layout.state._unload ); + + + + +/** + * jquery.layout.buttons 1.0 + * $Date: 2011-07-16 08:00:00 (Sat, 16 July 2011) $ + * + * Copyright (c) 2010 + * Kevin Dalman (http://allpro.net) + * + * Dual licensed under the GPL (http://www.gnu.org/licenses/gpl.html) + * and MIT (http://www.opensource.org/licenses/mit-license.php) licenses. + * + * @dependancies: UI Layout 1.3.0.rc30.1 or higher + * + * @support: http://groups.google.com/group/jquery-ui-layout + * + * Docs: [ to come ] + * Tips: [ to come ] + */ + +// tell Layout that the state plugin is available +$.layout.plugins.buttons = true; + +// Add buttons options to layout.defaults +$.layout.defaults.autoBindCustomButtons = false; +// Specify autoBindCustomButtons as a layout-option, NOT a pane-option +$.layout.optionsMap.layout.push("autoBindCustomButtons"); + +/* + * Button methods + */ +$.layout.buttons = { + + /** + * Searches for .ui-layout-button-xxx elements and auto-binds them as layout-buttons + * + * @see _create() + * + * @param {Object} inst Layout Instance object + */ + init: function (inst) { + var pre = "ui-layout-button-" + , layout = inst.options.name || "" + , name; + $.each("toggle,open,close,pin,toggle-slide,open-slide".split(","), function (i, action) { + $.each($.layout.config.borderPanes, function (ii, pane) { + $("."+pre+action+"-"+pane).each(function(){ + // if button was previously 'bound', data.layoutName was set, but is blank if layout has no 'name' + name = $(this).data("layoutName") || $(this).attr("layoutName"); + if (name == undefined || name === layout) + inst.bindButton(this, action, pane); + }); + }); + }); + } + + /** + * Helper function to validate params received by addButton utilities + * + * Two classes are added to the element, based on the buttonClass... + * The type of button is appended to create the 2nd className: + * - ui-layout-button-pin // action btnClass + * - ui-layout-button-pin-west // action btnClass + pane + * - ui-layout-button-toggle + * - ui-layout-button-open + * - ui-layout-button-close + * + * @param {Object} inst Layout Instance object + * @param {(string|!Object)} selector jQuery selector (or element) for button, eg: ".ui-layout-north .toggle-button" + * @param {string} pane Name of the pane the button is for: 'north', 'south', etc. + * + * @return {Array.} If both params valid, the element matching 'selector' in a jQuery wrapper - otherwise returns null + */ +, get: function (inst, selector, pane, action) { + var $E = $(selector) + , o = inst.options + , err = o.errors.addButtonError + ; + if (!$E.length) { // element not found + $.layout.msg(err +" "+ o.errors.selector +": "+ selector, true); + } + else if ($.inArray(pane, $.layout.config.borderPanes) < 0) { // invalid 'pane' sepecified + $.layout.msg(err +" "+ o.errors.pane +": "+ pane, true); + $E = $(""); // NO BUTTON + } + else { // VALID + var btn = o[pane].buttonClass +"-"+ action; + $E .addClass( btn +" "+ btn +"-"+ pane ) + .data("layoutName", o.name); // add layout identifier - even if blank! + } + return $E; + } + + + /** + * NEW syntax for binding layout-buttons - will eventually replace addToggle, addOpen, etc. + * + * @param {Object} inst Layout Instance object + * @param {(string|!Object)} selector jQuery selector (or element) for button, eg: ".ui-layout-north .toggle-button" + * @param {string} action + * @param {string} pane + */ +, bind: function (inst, selector, action, pane) { + var _ = $.layout.buttons; + switch (action.toLowerCase()) { + case "toggle": _.addToggle (inst, selector, pane); break; + case "open": _.addOpen (inst, selector, pane); break; + case "close": _.addClose (inst, selector, pane); break; + case "pin": _.addPin (inst, selector, pane); break; + case "toggle-slide": _.addToggle (inst, selector, pane, true); break; + case "open-slide": _.addOpen (inst, selector, pane, true); break; + } + return inst; + } + + /** + * Add a custom Toggler button for a pane + * + * @param {Object} inst Layout Instance object + * @param {(string|!Object)} selector jQuery selector (or element) for button, eg: ".ui-layout-north .toggle-button" + * @param {string} pane Name of the pane the button is for: 'north', 'south', etc. + * @param {boolean=} slide true = slide-open, false = pin-open + */ +, addToggle: function (inst, selector, pane, slide) { + $.layout.buttons.get(inst, selector, pane, "toggle") + .click(function(evt){ + inst.toggle(pane, !!slide); + evt.stopPropagation(); + }); + return inst; + } + + /** + * Add a custom Open button for a pane + * + * @param {Object} inst Layout Instance object + * @param {(string|!Object)} selector jQuery selector (or element) for button, eg: ".ui-layout-north .toggle-button" + * @param {string} pane Name of the pane the button is for: 'north', 'south', etc. + * @param {boolean=} slide true = slide-open, false = pin-open + */ +, addOpen: function (inst, selector, pane, slide) { + $.layout.buttons.get(inst, selector, pane, "open") + .attr("title", inst.options[pane].tips.Open) + .click(function (evt) { + inst.open(pane, !!slide); + evt.stopPropagation(); + }); + return inst; + } + + /** + * Add a custom Close button for a pane + * + * @param {Object} inst Layout Instance object + * @param {(string|!Object)} selector jQuery selector (or element) for button, eg: ".ui-layout-north .toggle-button" + * @param {string} pane Name of the pane the button is for: 'north', 'south', etc. + */ +, addClose: function (inst, selector, pane) { + $.layout.buttons.get(inst, selector, pane, "close") + .attr("title", inst.options[pane].tips.Close) + .click(function (evt) { + inst.close(pane); + evt.stopPropagation(); + }); + return inst; + } + + /** + * Add a custom Pin button for a pane + * + * Four classes are added to the element, based on the paneClass for the associated pane... + * Assuming the default paneClass and the pin is 'up', these classes are added for a west-pane pin: + * - ui-layout-pane-pin + * - ui-layout-pane-west-pin + * - ui-layout-pane-pin-up + * - ui-layout-pane-west-pin-up + * + * @param {Object} inst Layout Instance object + * @param {(string|!Object)} selector jQuery selector (or element) for button, eg: ".ui-layout-north .toggle-button" + * @param {string} pane Name of the pane the pin is for: 'north', 'south', etc. + */ +, addPin: function (inst, selector, pane) { + var _ = $.layout.buttons + , $E = _.get(inst, selector, pane, "pin"); + if ($E.length) { + var s = inst.state[pane]; + $E.click(function (evt) { + _.setPinState(inst, $(this), pane, (s.isSliding || s.isClosed)); + if (s.isSliding || s.isClosed) inst.open( pane ); // change from sliding to open + else inst.close( pane ); // slide-closed + evt.stopPropagation(); + }); + // add up/down pin attributes and classes + _.setPinState(inst, $E, pane, (!s.isClosed && !s.isSliding)); + // add this pin to the pane data so we can 'sync it' automatically + // PANE.pins key is an array so we can store multiple pins for each pane + s.pins.push( selector ); // just save the selector string + } + return inst; + } + + /** + * Change the class of the pin button to make it look 'up' or 'down' + * + * @see addPin(), syncPins() + * + * @param {Object} inst Layout Instance object + * @param {Array.} $Pin The pin-span element in a jQuery wrapper + * @param {string} pane These are the params returned to callbacks by layout() + * @param {boolean} doPin true = set the pin 'down', false = set it 'up' + */ +, setPinState: function (inst, $Pin, pane, doPin) { + var updown = $Pin.attr("pin"); + if (updown && doPin === (updown=="down")) return; // already in correct state + var + o = inst.options[pane] + , pin = o.buttonClass +"-pin" + , side = pin +"-"+ pane + , UP = pin +"-up "+ side +"-up" + , DN = pin +"-down "+side +"-down" + ; + $Pin + .attr("pin", doPin ? "down" : "up") // logic + .attr("title", doPin ? o.tips.Unpin : o.tips.Pin) + .removeClass( doPin ? UP : DN ) + .addClass( doPin ? DN : UP ) + ; + } + + /** + * INTERNAL function to sync 'pin buttons' when pane is opened or closed + * Unpinned means the pane is 'sliding' - ie, over-top of the adjacent panes + * + * @see open(), close() + * + * @param {Object} inst Layout Instance object + * @param {string} pane These are the params returned to callbacks by layout() + * @param {boolean} doPin True means set the pin 'down', False means 'up' + */ +, syncPinBtns: function (inst, pane, doPin) { + // REAL METHOD IS _INSIDE_ LAYOUT - THIS IS HERE JUST FOR REFERENCE + $.each(inst.state[pane].pins, function (i, selector) { + $.layout.buttons.setPinState(inst, $(selector), pane, doPin); + }); + } + + +, _load: function (inst) { + var _ = $.layout.buttons; + // ADD Button methods to Layout Instance + // Note: sel = jQuery Selector string + $.extend( inst, { + bindButton: function (sel, action, pane) { return _.bind(inst, sel, action, pane); } + // DEPRECATED METHODS + , addToggleBtn: function (sel, pane, slide) { return _.addToggle(inst, sel, pane, slide); } + , addOpenBtn: function (sel, pane, slide) { return _.addOpen(inst, sel, pane, slide); } + , addCloseBtn: function (sel, pane) { return _.addClose(inst, sel, pane); } + , addPinBtn: function (sel, pane) { return _.addPin(inst, sel, pane); } + }); + + // init state array to hold pin-buttons + for (var i=0; i<4; i++) { + var pane = $.layout.config.borderPanes[i]; + inst.state[pane].pins = []; + } + + // auto-init buttons onLoad if option is enabled + if ( inst.options.autoBindCustomButtons ) + _.init(inst); + } + +, _unload: function (inst) { + // TODO: unbind all buttons??? + } + +}; + +// add initialization method to Layout's onLoad array of functions +$.layout.onLoad.push( $.layout.buttons._load ); +//$.layout.onUnload.push( $.layout.buttons._unload ); + + + +/** + * jquery.layout.browserZoom 1.0 + * $Date: 2011-12-29 08:00:00 (Thu, 29 Dec 2011) $ + * + * Copyright (c) 2012 + * Kevin Dalman (http://allpro.net) + * + * Dual licensed under the GPL (http://www.gnu.org/licenses/gpl.html) + * and MIT (http://www.opensource.org/licenses/mit-license.php) licenses. + * + * @dependancies: UI Layout 1.3.0.rc30.1 or higher + * + * @support: http://groups.google.com/group/jquery-ui-layout + * + * @todo: Extend logic to handle other problematic zooming in browsers + * @todo: Add hotkey/mousewheel bindings to _instantly_ respond to these zoom event + */ + +// tell Layout that the plugin is available +$.layout.plugins.browserZoom = true; + +$.layout.defaults.browserZoomCheckInterval = 1000; +$.layout.optionsMap.layout.push("browserZoomCheckInterval"); + +/* + * browserZoom methods + */ +$.layout.browserZoom = { + + _init: function (inst) { + // abort if browser does not need this check + if ($.layout.browserZoom.ratio() !== false) + $.layout.browserZoom._setTimer(inst); + } + +, _setTimer: function (inst) { + // abort if layout destroyed or browser does not need this check + if (inst.destroyed) return; + var o = inst.options + , s = inst.state + // don't need check if inst has parentLayout, but check occassionally in case parent destroyed! + // MINIMUM 100ms interval, for performance + , ms = inst.hasParentLayout ? 5000 : Math.max( o.browserZoomCheckInterval, 100 ) + ; + // set the timer + setTimeout(function(){ + if (inst.destroyed || !o.resizeWithWindow) return; + var d = $.layout.browserZoom.ratio(); + if (d !== s.browserZoom) { + s.browserZoom = d; + inst.resizeAll(); + } + // set a NEW timeout + $.layout.browserZoom._setTimer(inst); + } + , ms ); + } + +, ratio: function () { + var w = window + , s = screen + , d = document + , dE = d.documentElement || d.body + , b = $.layout.browser + , v = b.version + , r, sW, cW + ; + // we can ignore all browsers that fire window.resize event onZoom + if ((b.msie && v > 8) + || !b.msie + ) return false; // don't need to track zoom + + if (s.deviceXDPI) + return calc(s.deviceXDPI, s.systemXDPI); + // everything below is just for future reference! + if (b.webkit && (r = d.body.getBoundingClientRect)) + return calc((r.left - r.right), d.body.offsetWidth); + if (b.webkit && (sW = w.outerWidth)) + return calc(sW, w.innerWidth); + if ((sW = s.width) && (cW = dE.clientWidth)) + return calc(sW, cW); + return false; // no match, so cannot - or don't need to - track zoom + + function calc (x,y) { return (parseInt(x,10) / parseInt(y,10) * 100).toFixed(); } + } + +}; +// add initialization method to Layout's onLoad array of functions +$.layout.onReady.push( $.layout.browserZoom._init ); + + + +})( jQuery ); \ No newline at end of file diff --git a/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/modernizr.custom.js b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/modernizr.custom.js new file mode 100644 index 0000000000..4688d633fe --- /dev/null +++ b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/modernizr.custom.js @@ -0,0 +1,4 @@ +/* Modernizr 2.5.3 (Custom Build) | MIT & BSD + * Build: http://www.modernizr.com/download/#-inlinesvg + */ +;window.Modernizr=function(a,b,c){function u(a){i.cssText=a}function v(a,b){return u(prefixes.join(a+";")+(b||""))}function w(a,b){return typeof a===b}function x(a,b){return!!~(""+a).indexOf(b)}function y(a,b,d){for(var e in a){var f=b[a[e]];if(f!==c)return d===!1?a[e]:w(f,"function")?f.bind(d||b):f}return!1}var d="2.5.3",e={},f=b.documentElement,g="modernizr",h=b.createElement(g),i=h.style,j,k={}.toString,l={svg:"http://www.w3.org/2000/svg"},m={},n={},o={},p=[],q=p.slice,r,s={}.hasOwnProperty,t;!w(s,"undefined")&&!w(s.call,"undefined")?t=function(a,b){return s.call(a,b)}:t=function(a,b){return b in a&&w(a.constructor.prototype[b],"undefined")},Function.prototype.bind||(Function.prototype.bind=function(b){var c=this;if(typeof c!="function")throw new TypeError;var d=q.call(arguments,1),e=function(){if(this instanceof e){var a=function(){};a.prototype=c.prototype;var f=new a,g=c.apply(f,d.concat(q.call(arguments)));return Object(g)===g?g:f}return c.apply(b,d.concat(q.call(arguments)))};return e}),m.inlinesvg=function(){var a=b.createElement("div");return a.innerHTML="",(a.firstChild&&a.firstChild.namespaceURI)==l.svg};for(var z in m)t(m,z)&&(r=z.toLowerCase(),e[r]=m[z](),p.push((e[r]?"":"no-")+r));return u(""),h=j=null,e._version=d,e}(this,this.document); \ No newline at end of file diff --git a/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/navigation-li-a.png b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/navigation-li-a.png new file mode 100644 index 0000000000..9b32288e04 Binary files /dev/null and b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/navigation-li-a.png differ diff --git a/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/navigation-li.png b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/navigation-li.png new file mode 100644 index 0000000000..fd0ad06e81 Binary files /dev/null and b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/navigation-li.png differ diff --git a/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/object.png b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/object.png new file mode 100644 index 0000000000..ad312793ea Binary files /dev/null and b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/object.png differ diff --git a/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/object_big.png b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/object_big.png new file mode 100644 index 0000000000..67ffca79de Binary files /dev/null and b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/object_big.png differ diff --git a/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/object_diagram.png b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/object_diagram.png new file mode 100644 index 0000000000..6e9f2f743f Binary files /dev/null and b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/object_diagram.png differ diff --git a/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/object_to_class_big.png b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/object_to_class_big.png new file mode 100644 index 0000000000..7502942eb6 Binary files /dev/null and b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/object_to_class_big.png differ diff --git a/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/object_to_trait_big.png b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/object_to_trait_big.png new file mode 100644 index 0000000000..c777bfce8d Binary files /dev/null and b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/object_to_trait_big.png differ diff --git a/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/object_to_type_big.png b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/object_to_type_big.png new file mode 100644 index 0000000000..7502942eb6 Binary files /dev/null and b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/object_to_type_big.png differ diff --git a/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/ownderbg2.gif b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/ownderbg2.gif new file mode 100644 index 0000000000..848dd5963a Binary files /dev/null and b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/ownderbg2.gif differ diff --git a/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/ownerbg.gif b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/ownerbg.gif new file mode 100644 index 0000000000..34a04249ee Binary files /dev/null and b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/ownerbg.gif differ diff --git a/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/ownerbg2.gif b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/ownerbg2.gif new file mode 100644 index 0000000000..2ed33b0aa4 Binary files /dev/null and b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/ownerbg2.gif differ diff --git a/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/package.png b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/package.png new file mode 100644 index 0000000000..6ea17ac320 Binary files /dev/null and b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/package.png differ diff --git a/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/package_big.png b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/package_big.png new file mode 100644 index 0000000000..529aa93188 Binary files /dev/null and b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/package_big.png differ diff --git a/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/packagesbg.gif b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/packagesbg.gif new file mode 100644 index 0000000000..00c3378a2a Binary files /dev/null and b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/packagesbg.gif differ diff --git a/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/raphael-min.js b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/raphael-min.js new file mode 100644 index 0000000000..d30dbad858 --- /dev/null +++ b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/raphael-min.js @@ -0,0 +1,10 @@ +// ┌────────────────────────────────────────────────────────────────────┐ \\ +// │ Raphaël 2.1.0 - JavaScript Vector Library │ \\ +// ├────────────────────────────────────────────────────────────────────┤ \\ +// │ Copyright © 2008-2012 Dmitry Baranovskiy (http://raphaeljs.com) │ \\ +// │ Copyright © 2008-2012 Sencha Labs (http://sencha.com) │ \\ +// ├────────────────────────────────────────────────────────────────────┤ \\ +// │ Licensed under the MIT (http://raphaeljs.com/license.html) license.│ \\ +// └────────────────────────────────────────────────────────────────────┘ \\ + +(function(a){var b="0.3.4",c="hasOwnProperty",d=/[\.\/]/,e="*",f=function(){},g=function(a,b){return a-b},h,i,j={n:{}},k=function(a,b){var c=j,d=i,e=Array.prototype.slice.call(arguments,2),f=k.listeners(a),l=0,m=!1,n,o=[],p={},q=[],r=h,s=[];h=a,i=0;for(var t=0,u=f.length;tf*b.top){e=b.percents[y],p=b.percents[y-1]||0,t=t/b.top*(e-p),o=b.percents[y+1],j=b.anim[e];break}f&&d.attr(b.anim[b.percents[y]])}if(!!j){if(!k){for(var A in j)if(j[g](A))if(U[g](A)||d.paper.customAttributes[g](A)){u[A]=d.attr(A),u[A]==null&&(u[A]=T[A]),v[A]=j[A];switch(U[A]){case C:w[A]=(v[A]-u[A])/t;break;case"colour":u[A]=a.getRGB(u[A]);var B=a.getRGB(v[A]);w[A]={r:(B.r-u[A].r)/t,g:(B.g-u[A].g)/t,b:(B.b-u[A].b)/t};break;case"path":var D=bR(u[A],v[A]),E=D[1];u[A]=D[0],w[A]=[];for(y=0,z=u[A].length;yd)return d;while(cf?c=e:d=e,e=(d-c)/2+c}return e}function n(a,b){var c=o(a,b);return((l*c+k)*c+j)*c}function m(a){return((i*a+h)*a+g)*a}var g=3*b,h=3*(d-b)-g,i=1-g-h,j=3*c,k=3*(e-c)-j,l=1-j-k;return n(a,1/(200*f))}function cq(){return this.x+q+this.y+q+this.width+" × "+this.height}function cp(){return this.x+q+this.y}function cb(a,b,c,d,e,f){a!=null?(this.a=+a,this.b=+b,this.c=+c,this.d=+d,this.e=+e,this.f=+f):(this.a=1,this.b=0,this.c=0,this.d=1,this.e=0,this.f=0)}function bH(b,c,d){b=a._path2curve(b),c=a._path2curve(c);var e,f,g,h,i,j,k,l,m,n,o=d?0:[];for(var p=0,q=b.length;p=0&&y<=1&&A>=0&&A<=1&&(d?n++:n.push({x:x.x,y:x.y,t1:y,t2:A}))}}return n}function bF(a,b){return bG(a,b,1)}function bE(a,b){return bG(a,b)}function bD(a,b,c,d,e,f,g,h){if(!(x(a,c)x(e,g)||x(b,d)x(f,h))){var i=(a*d-b*c)*(e-g)-(a-c)*(e*h-f*g),j=(a*d-b*c)*(f-h)-(b-d)*(e*h-f*g),k=(a-c)*(f-h)-(b-d)*(e-g);if(!k)return;var l=i/k,m=j/k,n=+l.toFixed(2),o=+m.toFixed(2);if(n<+y(a,c).toFixed(2)||n>+x(a,c).toFixed(2)||n<+y(e,g).toFixed(2)||n>+x(e,g).toFixed(2)||o<+y(b,d).toFixed(2)||o>+x(b,d).toFixed(2)||o<+y(f,h).toFixed(2)||o>+x(f,h).toFixed(2))return;return{x:l,y:m}}}function bC(a,b,c,d,e,f,g,h,i){if(!(i<0||bB(a,b,c,d,e,f,g,h)n)k/=2,l+=(m1?1:i<0?0:i;var j=i/2,k=12,l=[-0.1252,.1252,-0.3678,.3678,-0.5873,.5873,-0.7699,.7699,-0.9041,.9041,-0.9816,.9816],m=[.2491,.2491,.2335,.2335,.2032,.2032,.1601,.1601,.1069,.1069,.0472,.0472],n=0;for(var o=0;od;d+=2){var f=[{x:+a[d-2],y:+a[d-1]},{x:+a[d],y:+a[d+1]},{x:+a[d+2],y:+a[d+3]},{x:+a[d+4],y:+a[d+5]}];b?d?e-4==d?f[3]={x:+a[0],y:+a[1]}:e-2==d&&(f[2]={x:+a[0],y:+a[1]},f[3]={x:+a[2],y:+a[3]}):f[0]={x:+a[e-2],y:+a[e-1]}:e-4==d?f[3]=f[2]:d||(f[0]={x:+a[d],y:+a[d+1]}),c.push(["C",(-f[0].x+6*f[1].x+f[2].x)/6,(-f[0].y+6*f[1].y+f[2].y)/6,(f[1].x+6*f[2].x-f[3].x)/6,(f[1].y+6*f[2].y-f[3].y)/6,f[2].x,f[2].y])}return c}function bx(){return this.hex}function bv(a,b,c){function d(){var e=Array.prototype.slice.call(arguments,0),f=e.join("␀"),h=d.cache=d.cache||{},i=d.count=d.count||[];if(h[g](f)){bu(i,f);return c?c(h[f]):h[f]}i.length>=1e3&&delete h[i.shift()],i.push(f),h[f]=a[m](b,e);return c?c(h[f]):h[f]}return d}function bu(a,b){for(var c=0,d=a.length;c',bl=bk.firstChild,bl.style.behavior="url(#default#VML)";if(!bl||typeof bl.adj!="object")return a.type=p;bk=null}a.svg=!(a.vml=a.type=="VML"),a._Paper=j,a.fn=k=j.prototype=a.prototype,a._id=0,a._oid=0,a.is=function(a,b){b=v.call(b);if(b=="finite")return!M[g](+a);if(b=="array")return a instanceof Array;return b=="null"&&a===null||b==typeof a&&a!==null||b=="object"&&a===Object(a)||b=="array"&&Array.isArray&&Array.isArray(a)||H.call(a).slice(8,-1).toLowerCase()==b},a.angle=function(b,c,d,e,f,g){if(f==null){var h=b-d,i=c-e;if(!h&&!i)return 0;return(180+w.atan2(-i,-h)*180/B+360)%360}return a.angle(b,c,f,g)-a.angle(d,e,f,g)},a.rad=function(a){return a%360*B/180},a.deg=function(a){return a*180/B%360},a.snapTo=function(b,c,d){d=a.is(d,"finite")?d:10;if(a.is(b,E)){var e=b.length;while(e--)if(z(b[e]-c)<=d)return b[e]}else{b=+b;var f=c%b;if(fb-d)return c-f+b}return c};var bn=a.createUUID=function(a,b){return function(){return"xxxxxxxx-xxxx-4xxx-yxxx-xxxxxxxxxxxx".replace(a,b).toUpperCase()}}(/[xy]/g,function(a){var b=w.random()*16|0,c=a=="x"?b:b&3|8;return c.toString(16)});a.setWindow=function(b){eve("raphael.setWindow",a,h.win,b),h.win=b,h.doc=h.win.document,a._engine.initWin&&a._engine.initWin(h.win)};var bo=function(b){if(a.vml){var c=/^\s+|\s+$/g,d;try{var e=new ActiveXObject("htmlfile");e.write(""),e.close(),d=e.body}catch(f){d=createPopup().document.body}var g=d.createTextRange();bo=bv(function(a){try{d.style.color=r(a).replace(c,p);var b=g.queryCommandValue("ForeColor");b=(b&255)<<16|b&65280|(b&16711680)>>>16;return"#"+("000000"+b.toString(16)).slice(-6)}catch(e){return"none"}})}else{var i=h.doc.createElement("i");i.title="Raphaël Colour Picker",i.style.display="none",h.doc.body.appendChild(i),bo=bv(function(a){i.style.color=a;return h.doc.defaultView.getComputedStyle(i,p).getPropertyValue("color")})}return bo(b)},bp=function(){return"hsb("+[this.h,this.s,this.b]+")"},bq=function(){return"hsl("+[this.h,this.s,this.l]+")"},br=function(){return this.hex},bs=function(b,c,d){c==null&&a.is(b,"object")&&"r"in b&&"g"in b&&"b"in b&&(d=b.b,c=b.g,b=b.r);if(c==null&&a.is(b,D)){var e=a.getRGB(b);b=e.r,c=e.g,d=e.b}if(b>1||c>1||d>1)b/=255,c/=255,d/=255;return[b,c,d]},bt=function(b,c,d,e){b*=255,c*=255,d*=255;var f={r:b,g:c,b:d,hex:a.rgb(b,c,d),toString:br};a.is(e,"finite")&&(f.opacity=e);return f};a.color=function(b){var c;a.is(b,"object")&&"h"in b&&"s"in b&&"b"in b?(c=a.hsb2rgb(b),b.r=c.r,b.g=c.g,b.b=c.b,b.hex=c.hex):a.is(b,"object")&&"h"in b&&"s"in b&&"l"in b?(c=a.hsl2rgb(b),b.r=c.r,b.g=c.g,b.b=c.b,b.hex=c.hex):(a.is(b,"string")&&(b=a.getRGB(b)),a.is(b,"object")&&"r"in b&&"g"in b&&"b"in b?(c=a.rgb2hsl(b),b.h=c.h,b.s=c.s,b.l=c.l,c=a.rgb2hsb(b),b.v=c.b):(b={hex:"none"},b.r=b.g=b.b=b.h=b.s=b.v=b.l=-1)),b.toString=br;return b},a.hsb2rgb=function(a,b,c,d){this.is(a,"object")&&"h"in a&&"s"in a&&"b"in a&&(c=a.b,b=a.s,a=a.h,d=a.o),a*=360;var e,f,g,h,i;a=a%360/60,i=c*b,h=i*(1-z(a%2-1)),e=f=g=c-i,a=~~a,e+=[i,h,0,0,h,i][a],f+=[h,i,i,h,0,0][a],g+=[0,0,h,i,i,h][a];return bt(e,f,g,d)},a.hsl2rgb=function(a,b,c,d){this.is(a,"object")&&"h"in a&&"s"in a&&"l"in a&&(c=a.l,b=a.s,a=a.h);if(a>1||b>1||c>1)a/=360,b/=100,c/=100;a*=360;var e,f,g,h,i;a=a%360/60,i=2*b*(c<.5?c:1-c),h=i*(1-z(a%2-1)),e=f=g=c-i/2,a=~~a,e+=[i,h,0,0,h,i][a],f+=[h,i,i,h,0,0][a],g+=[0,0,h,i,i,h][a];return bt(e,f,g,d)},a.rgb2hsb=function(a,b,c){c=bs(a,b,c),a=c[0],b=c[1],c=c[2];var d,e,f,g;f=x(a,b,c),g=f-y(a,b,c),d=g==0?null:f==a?(b-c)/g:f==b?(c-a)/g+2:(a-b)/g+4,d=(d+360)%6*60/360,e=g==0?0:g/f;return{h:d,s:e,b:f,toString:bp}},a.rgb2hsl=function(a,b,c){c=bs(a,b,c),a=c[0],b=c[1],c=c[2];var d,e,f,g,h,i;g=x(a,b,c),h=y(a,b,c),i=g-h,d=i==0?null:g==a?(b-c)/i:g==b?(c-a)/i+2:(a-b)/i+4,d=(d+360)%6*60/360,f=(g+h)/2,e=i==0?0:f<.5?i/(2*f):i/(2-2*f);return{h:d,s:e,l:f,toString:bq}},a._path2string=function(){return this.join(",").replace(Y,"$1")};var bw=a._preload=function(a,b){var c=h.doc.createElement("img");c.style.cssText="position:absolute;left:-9999em;top:-9999em",c.onload=function(){b.call(this),this.onload=null,h.doc.body.removeChild(this)},c.onerror=function(){h.doc.body.removeChild(this)},h.doc.body.appendChild(c),c.src=a};a.getRGB=bv(function(b){if(!b||!!((b=r(b)).indexOf("-")+1))return{r:-1,g:-1,b:-1,hex:"none",error:1,toString:bx};if(b=="none")return{r:-1,g:-1,b:-1,hex:"none",toString:bx};!X[g](b.toLowerCase().substring(0,2))&&b.charAt()!="#"&&(b=bo(b));var c,d,e,f,h,i,j,k=b.match(L);if(k){k[2]&&(f=R(k[2].substring(5),16),e=R(k[2].substring(3,5),16),d=R(k[2].substring(1,3),16)),k[3]&&(f=R((i=k[3].charAt(3))+i,16),e=R((i=k[3].charAt(2))+i,16),d=R((i=k[3].charAt(1))+i,16)),k[4]&&(j=k[4][s](W),d=Q(j[0]),j[0].slice(-1)=="%"&&(d*=2.55),e=Q(j[1]),j[1].slice(-1)=="%"&&(e*=2.55),f=Q(j[2]),j[2].slice(-1)=="%"&&(f*=2.55),k[1].toLowerCase().slice(0,4)=="rgba"&&(h=Q(j[3])),j[3]&&j[3].slice(-1)=="%"&&(h/=100));if(k[5]){j=k[5][s](W),d=Q(j[0]),j[0].slice(-1)=="%"&&(d*=2.55),e=Q(j[1]),j[1].slice(-1)=="%"&&(e*=2.55),f=Q(j[2]),j[2].slice(-1)=="%"&&(f*=2.55),(j[0].slice(-3)=="deg"||j[0].slice(-1)=="°")&&(d/=360),k[1].toLowerCase().slice(0,4)=="hsba"&&(h=Q(j[3])),j[3]&&j[3].slice(-1)=="%"&&(h/=100);return a.hsb2rgb(d,e,f,h)}if(k[6]){j=k[6][s](W),d=Q(j[0]),j[0].slice(-1)=="%"&&(d*=2.55),e=Q(j[1]),j[1].slice(-1)=="%"&&(e*=2.55),f=Q(j[2]),j[2].slice(-1)=="%"&&(f*=2.55),(j[0].slice(-3)=="deg"||j[0].slice(-1)=="°")&&(d/=360),k[1].toLowerCase().slice(0,4)=="hsla"&&(h=Q(j[3])),j[3]&&j[3].slice(-1)=="%"&&(h/=100);return a.hsl2rgb(d,e,f,h)}k={r:d,g:e,b:f,toString:bx},k.hex="#"+(16777216|f|e<<8|d<<16).toString(16).slice(1),a.is(h,"finite")&&(k.opacity=h);return k}return{r:-1,g:-1,b:-1,hex:"none",error:1,toString:bx}},a),a.hsb=bv(function(b,c,d){return a.hsb2rgb(b,c,d).hex}),a.hsl=bv(function(b,c,d){return a.hsl2rgb(b,c,d).hex}),a.rgb=bv(function(a,b,c){return"#"+(16777216|c|b<<8|a<<16).toString(16).slice(1)}),a.getColor=function(a){var b=this.getColor.start=this.getColor.start||{h:0,s:1,b:a||.75},c=this.hsb2rgb(b.h,b.s,b.b);b.h+=.075,b.h>1&&(b.h=0,b.s-=.2,b.s<=0&&(this.getColor.start={h:0,s:1,b:b.b}));return c.hex},a.getColor.reset=function(){delete this.start},a.parsePathString=function(b){if(!b)return null;var c=bz(b);if(c.arr)return bJ(c.arr);var d={a:7,c:6,h:1,l:2,m:2,r:4,q:4,s:4,t:2,v:1,z:0},e=[];a.is(b,E)&&a.is(b[0],E)&&(e=bJ(b)),e.length||r(b).replace(Z,function(a,b,c){var f=[],g=b.toLowerCase();c.replace(_,function(a,b){b&&f.push(+b)}),g=="m"&&f.length>2&&(e.push([b][n](f.splice(0,2))),g="l",b=b=="m"?"l":"L");if(g=="r")e.push([b][n](f));else while(f.length>=d[g]){e.push([b][n](f.splice(0,d[g])));if(!d[g])break}}),e.toString=a._path2string,c.arr=bJ(e);return e},a.parseTransformString=bv(function(b){if(!b)return null;var c={r:3,s:4,t:2,m:6},d=[];a.is(b,E)&&a.is(b[0],E)&&(d=bJ(b)),d.length||r(b).replace($,function(a,b,c){var e=[],f=v.call(b);c.replace(_,function(a,b){b&&e.push(+b)}),d.push([b][n](e))}),d.toString=a._path2string;return d});var bz=function(a){var b=bz.ps=bz.ps||{};b[a]?b[a].sleep=100:b[a]={sleep:100},setTimeout(function(){for(var c in b)b[g](c)&&c!=a&&(b[c].sleep--,!b[c].sleep&&delete b[c])});return b[a]};a.findDotsAtSegment=function(a,b,c,d,e,f,g,h,i){var j=1-i,k=A(j,3),l=A(j,2),m=i*i,n=m*i,o=k*a+l*3*i*c+j*3*i*i*e+n*g,p=k*b+l*3*i*d+j*3*i*i*f+n*h,q=a+2*i*(c-a)+m*(e-2*c+a),r=b+2*i*(d-b)+m*(f-2*d+b),s=c+2*i*(e-c)+m*(g-2*e+c),t=d+2*i*(f-d)+m*(h-2*f+d),u=j*a+i*c,v=j*b+i*d,x=j*e+i*g,y=j*f+i*h,z=90-w.atan2(q-s,r-t)*180/B;(q>s||r=a.x&&b<=a.x2&&c>=a.y&&c<=a.y2},a.isBBoxIntersect=function(b,c){var d=a.isPointInsideBBox;return d(c,b.x,b.y)||d(c,b.x2,b.y)||d(c,b.x,b.y2)||d(c,b.x2,b.y2)||d(b,c.x,c.y)||d(b,c.x2,c.y)||d(b,c.x,c.y2)||d(b,c.x2,c.y2)||(b.xc.x||c.xb.x)&&(b.yc.y||c.yb.y)},a.pathIntersection=function(a,b){return bH(a,b)},a.pathIntersectionNumber=function(a,b){return bH(a,b,1)},a.isPointInsidePath=function(b,c,d){var e=a.pathBBox(b);return a.isPointInsideBBox(e,c,d)&&bH(b,[["M",c,d],["H",e.x2+10]],1)%2==1},a._removedFactory=function(a){return function(){eve("raphael.log",null,"Raphaël: you are calling to method “"+a+"” of removed object",a)}};var bI=a.pathBBox=function(a){var b=bz(a);if(b.bbox)return b.bbox;if(!a)return{x:0,y:0,width:0,height:0,x2:0,y2:0};a=bR(a);var c=0,d=0,e=[],f=[],g;for(var h=0,i=a.length;h1&&(v=w.sqrt(v),c=v*c,d=v*d);var x=c*c,y=d*d,A=(f==g?-1:1)*w.sqrt(z((x*y-x*u*u-y*t*t)/(x*u*u+y*t*t))),C=A*c*u/d+(a+h)/2,D=A*-d*t/c+(b+i)/2,E=w.asin(((b-D)/d).toFixed(9)),F=w.asin(((i-D)/d).toFixed(9));E=aF&&(E=E-B*2),!g&&F>E&&(F=F-B*2)}else E=j[0],F=j[1],C=j[2],D=j[3];var G=F-E;if(z(G)>k){var H=F,I=h,J=i;F=E+k*(g&&F>E?1:-1),h=C+c*w.cos(F),i=D+d*w.sin(F),m=bO(h,i,c,d,e,0,g,I,J,[F,H,C,D])}G=F-E;var K=w.cos(E),L=w.sin(E),M=w.cos(F),N=w.sin(F),O=w.tan(G/4),P=4/3*c*O,Q=4/3*d*O,R=[a,b],S=[a+P*L,b-Q*K],T=[h+P*N,i-Q*M],U=[h,i];S[0]=2*R[0]-S[0],S[1]=2*R[1]-S[1];if(j)return[S,T,U][n](m);m=[S,T,U][n](m).join()[s](",");var V=[];for(var W=0,X=m.length;W"1e12"&&(l=.5),z(n)>"1e12"&&(n=.5),l>0&&l<1&&(q=bP(a,b,c,d,e,f,g,h,l),p.push(q.x),o.push(q.y)),n>0&&n<1&&(q=bP(a,b,c,d,e,f,g,h,n),p.push(q.x),o.push(q.y)),i=f-2*d+b-(h-2*f+d),j=2*(d-b)-2*(f-d),k=b-d,l=(-j+w.sqrt(j*j-4*i*k))/2/i,n=(-j-w.sqrt(j*j-4*i*k))/2/i,z(l)>"1e12"&&(l=.5),z(n)>"1e12"&&(n=.5),l>0&&l<1&&(q=bP(a,b,c,d,e,f,g,h,l),p.push(q.x),o.push(q.y)),n>0&&n<1&&(q=bP(a,b,c,d,e,f,g,h,n),p.push(q.x),o.push(q.y));return{min:{x:y[m](0,p),y:y[m](0,o)},max:{x:x[m](0,p),y:x[m](0,o)}}}),bR=a._path2curve=bv(function(a,b){var c=!b&&bz(a);if(!b&&c.curve)return bJ(c.curve);var d=bL(a),e=b&&bL(b),f={x:0,y:0,bx:0,by:0,X:0,Y:0,qx:null,qy:null},g={x:0,y:0,bx:0,by:0,X:0,Y:0,qx:null,qy:null},h=function(a,b){var c,d;if(!a)return["C",b.x,b.y,b.x,b.y,b.x,b.y];!(a[0]in{T:1,Q:1})&&(b.qx=b.qy=null);switch(a[0]){case"M":b.X=a[1],b.Y=a[2];break;case"A":a=["C"][n](bO[m](0,[b.x,b.y][n](a.slice(1))));break;case"S":c=b.x+(b.x-(b.bx||b.x)),d=b.y+(b.y-(b.by||b.y)),a=["C",c,d][n](a.slice(1));break;case"T":b.qx=b.x+(b.x-(b.qx||b.x)),b.qy=b.y+(b.y-(b.qy||b.y)),a=["C"][n](bN(b.x,b.y,b.qx,b.qy,a[1],a[2]));break;case"Q":b.qx=a[1],b.qy=a[2],a=["C"][n](bN(b.x,b.y,a[1],a[2],a[3],a[4]));break;case"L":a=["C"][n](bM(b.x,b.y,a[1],a[2]));break;case"H":a=["C"][n](bM(b.x,b.y,a[1],b.y));break;case"V":a=["C"][n](bM(b.x,b.y,b.x,a[1]));break;case"Z":a=["C"][n](bM(b.x,b.y,b.X,b.Y))}return a},i=function(a,b){if(a[b].length>7){a[b].shift();var c=a[b];while(c.length)a.splice(b++,0,["C"][n](c.splice(0,6)));a.splice(b,1),l=x(d.length,e&&e.length||0)}},j=function(a,b,c,f,g){a&&b&&a[g][0]=="M"&&b[g][0]!="M"&&(b.splice(g,0,["M",f.x,f.y]),c.bx=0,c.by=0,c.x=a[g][1],c.y=a[g][2],l=x(d.length,e&&e.length||0))};for(var k=0,l=x(d.length,e&&e.length||0);ke){if(c&&!l.start){m=cs(g,h,i[1],i[2],i[3],i[4],i[5],i[6],e-n),k+=["C"+m.start.x,m.start.y,m.m.x,m.m.y,m.x,m.y];if(f)return k;l.start=k,k=["M"+m.x,m.y+"C"+m.n.x,m.n.y,m.end.x,m.end.y,i[5],i[6]].join(),n+=j,g=+i[5],h=+i[6];continue}if(!b&&!c){m=cs(g,h,i[1],i[2],i[3],i[4],i[5],i[6],e-n);return{x:m.x,y:m.y,alpha:m.alpha}}}n+=j,g=+i[5],h=+i[6]}k+=i.shift()+i}l.end=k,m=b?n:c?l:a.findDotsAtSegment(g,h,i[0],i[1],i[2],i[3],i[4],i[5],1),m.alpha&&(m={x:m.x,y:m.y,alpha:m.alpha});return m}},cu=ct(1),cv=ct(),cw=ct(0,1);a.getTotalLength=cu,a.getPointAtLength=cv,a.getSubpath=function(a,b,c){if(this.getTotalLength(a)-c<1e-6)return cw(a,b).end;var d=cw(a,c,1);return b?cw(d,b).end:d},cl.getTotalLength=function(){if(this.type=="path"){if(this.node.getTotalLength)return this.node.getTotalLength();return cu(this.attrs.path)}},cl.getPointAtLength=function(a){if(this.type=="path")return cv(this.attrs.path,a)},cl.getSubpath=function(b,c){if(this.type=="path")return a.getSubpath(this.attrs.path,b,c)};var cx=a.easing_formulas={linear:function(a){return a},"<":function(a){return A(a,1.7)},">":function(a){return A(a,.48)},"<>":function(a){var b=.48-a/1.04,c=w.sqrt(.1734+b*b),d=c-b,e=A(z(d),1/3)*(d<0?-1:1),f=-c-b,g=A(z(f),1/3)*(f<0?-1:1),h=e+g+.5;return(1-h)*3*h*h+h*h*h},backIn:function(a){var b=1.70158;return a*a*((b+1)*a-b)},backOut:function(a){a=a-1;var b=1.70158;return a*a*((b+1)*a+b)+1},elastic:function(a){if(a==!!a)return a;return A(2,-10*a)*w.sin((a-.075)*2*B/.3)+1},bounce:function(a){var b=7.5625,c=2.75,d;a<1/c?d=b*a*a:a<2/c?(a-=1.5/c,d=b*a*a+.75):a<2.5/c?(a-=2.25/c,d=b*a*a+.9375):(a-=2.625/c,d=b*a*a+.984375);return d}};cx.easeIn=cx["ease-in"]=cx["<"],cx.easeOut=cx["ease-out"]=cx[">"],cx.easeInOut=cx["ease-in-out"]=cx["<>"],cx["back-in"]=cx.backIn,cx["back-out"]=cx.backOut;var cy=[],cz=window.requestAnimationFrame||window.webkitRequestAnimationFrame||window.mozRequestAnimationFrame||window.oRequestAnimationFrame||window.msRequestAnimationFrame||function(a){setTimeout(a,16)},cA=function(){var b=+(new Date),c=0;for(;c1&&!d.next){for(s in k)k[g](s)&&(r[s]=d.totalOrigin[s]);d.el.attr(r),cE(d.anim,d.el,d.anim.percents[0],null,d.totalOrigin,d.repeat-1)}d.next&&!d.stop&&cE(d.anim,d.el,d.next,null,d.totalOrigin,d.repeat)}}a.svg&&m&&m.paper&&m.paper.safari(),cy.length&&cz(cA)},cB=function(a){return a>255?255:a<0?0:a};cl.animateWith=function(b,c,d,e,f,g){var h=this;if(h.removed){g&&g.call(h);return h}var i=d instanceof cD?d:a.animation(d,e,f,g),j,k;cE(i,h,i.percents[0],null,h.attr());for(var l=0,m=cy.length;l.5)*2-1;i(m-.5,2)+i(n-.5,2)>.25&&(n=f.sqrt(.25-i(m-.5,2))*e+.5)&&n!=.5&&(n=n.toFixed(5)-1e-5*e)}return l}),e=e.split(/\s*\-\s*/);if(j=="linear"){var t=e.shift();t=-d(t);if(isNaN(t))return null;var u=[0,0,f.cos(a.rad(t)),f.sin(a.rad(t))],v=1/(g(h(u[2]),h(u[3]))||1);u[2]*=v,u[3]*=v,u[2]<0&&(u[0]=-u[2],u[2]=0),u[3]<0&&(u[1]=-u[3],u[3]=0)}var w=a._parseDots(e);if(!w)return null;k=k.replace(/[\(\)\s,\xb0#]/g,"_"),b.gradient&&k!=b.gradient.id&&(p.defs.removeChild(b.gradient),delete b.gradient);if(!b.gradient){s=q(j+"Gradient",{id:k}),b.gradient=s,q(s,j=="radial"?{fx:m,fy:n}:{x1:u[0],y1:u[1],x2:u[2],y2:u[3],gradientTransform:b.matrix.invert()}),p.defs.appendChild(s);for(var x=0,y=w.length;x1?G.opacity/100:G.opacity});case"stroke":G=a.getRGB(p),i.setAttribute(o,G.hex),o=="stroke"&&G[b]("opacity")&&q(i,{"stroke-opacity":G.opacity>1?G.opacity/100:G.opacity}),o=="stroke"&&d._.arrows&&("startString"in d._.arrows&&t(d,d._.arrows.startString),"endString"in d._.arrows&&t(d,d._.arrows.endString,1));break;case"gradient":(d.type=="circle"||d.type=="ellipse"||c(p).charAt()!="r")&&r(d,p);break;case"opacity":k.gradient&&!k[b]("stroke-opacity")&&q(i,{"stroke-opacity":p>1?p/100:p});case"fill-opacity":if(k.gradient){H=a._g.doc.getElementById(i.getAttribute("fill").replace(/^url\(#|\)$/g,l)),H&&(I=H.getElementsByTagName("stop"),q(I[I.length-1],{"stop-opacity":p}));break};default:o=="font-size"&&(p=e(p,10)+"px");var J=o.replace(/(\-.)/g,function(a){return a.substring(1).toUpperCase()});i.style[J]=p,d._.dirty=1,i.setAttribute(o,p)}}y(d,f),i.style.visibility=m},x=1.2,y=function(d,f){if(d.type=="text"&&!!(f[b]("text")||f[b]("font")||f[b]("font-size")||f[b]("x")||f[b]("y"))){var g=d.attrs,h=d.node,i=h.firstChild?e(a._g.doc.defaultView.getComputedStyle(h.firstChild,l).getPropertyValue("font-size"),10):10;if(f[b]("text")){g.text=f.text;while(h.firstChild)h.removeChild(h.firstChild);var j=c(f.text).split("\n"),k=[],m;for(var n=0,o=j.length;n"));var $=X.getBoundingClientRect();t.W=m.w=($.right-$.left)/Y,t.H=m.h=($.bottom-$.top)/Y,t.X=m.x,t.Y=m.y+t.H/2,("x"in i||"y"in i)&&(t.path.v=a.format("m{0},{1}l{2},{1}",f(m.x*u),f(m.y*u),f(m.x*u)+1));var _=["x","y","text","font","font-family","font-weight","font-style","font-size"];for(var ba=0,bb=_.length;ba.25&&(c=e.sqrt(.25-i(b-.5,2))*((c>.5)*2-1)+.5),m=b+n+c);return o}),f=f.split(/\s*\-\s*/);if(l=="linear"){var p=f.shift();p=-d(p);if(isNaN(p))return null}var q=a._parseDots(f);if(!q)return null;b=b.shape||b.node;if(q.length){b.removeChild(g),g.on=!0,g.method="none",g.color=q[0].color,g.color2=q[q.length-1].color;var r=[];for(var s=0,t=q.length;s')}}catch(c){F=function(a){return b.createElement("<"+a+' xmlns="urn:schemas-microsoft.com:vml" class="rvml">')}}},a._engine.initWin(a._g.win),a._engine.create=function(){var b=a._getContainer.apply(0,arguments),c=b.container,d=b.height,e,f=b.width,g=b.x,h=b.y;if(!c)throw new Error("VML container not found.");var i=new a._Paper,j=i.canvas=a._g.doc.createElement("div"),k=j.style;g=g||0,h=h||0,f=f||512,d=d||342,i.width=f,i.height=d,f==+f&&(f+="px"),d==+d&&(d+="px"),i.coordsize=u*1e3+n+u*1e3,i.coordorigin="0 0",i.span=a._g.doc.createElement("span"),i.span.style.cssText="position:absolute;left:-9999em;top:-9999em;padding:0;margin:0;line-height:1;",j.appendChild(i.span),k.cssText=a.format("top:0;left:0;width:{0};height:{1};display:inline-block;position:relative;clip:rect(0 {0} {1} 0);overflow:hidden",f,d),c==1?(a._g.doc.body.appendChild(j),k.left=g+"px",k.top=h+"px",k.position="absolute"):c.firstChild?c.insertBefore(j,c.firstChild):c.appendChild(j),i.renderfix=function(){};return i},a.prototype.clear=function(){a.eve("raphael.clear",this),this.canvas.innerHTML=o,this.span=a._g.doc.createElement("span"),this.span.style.cssText="position:absolute;left:-9999em;top:-9999em;padding:0;margin:0;line-height:1;display:inline;",this.canvas.appendChild(this.span),this.bottom=this.top=null},a.prototype.remove=function(){a.eve("raphael.remove",this),this.canvas.parentNode.removeChild(this.canvas);for(var b in this)this[b]=typeof this[b]=="function"?a._removedFactory(b):null;return!0};var G=a.st;for(var H in E)E[b](H)&&!G[b](H)&&(G[H]=function(a){return function(){var b=arguments;return this.forEach(function(c){c[a].apply(c,b)})}}(H))}(window.Raphael) \ No newline at end of file diff --git a/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/ref-index.css b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/ref-index.css new file mode 100755 index 0000000000..7d64b9c5c5 --- /dev/null +++ b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/ref-index.css @@ -0,0 +1,30 @@ +body { + font-size: 10pt; + font-family: Arial, sans-serif; +} + +a { + color:#315479; +} + +.letters { + width:100%; + text-align:center; + margin:0.6em; + padding:0.1em; + border-bottom:1px solid gray; +} + +.entry { + border-bottom: 1px solid lightgray; + padding: 5px 0 8px; +} + +.name { + /* background-color:#E5E5E5; */ +} + +.occurrences { + margin-left: 1em; + margin-top: 5px; +} \ No newline at end of file diff --git a/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/remove.png b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/remove.png new file mode 100644 index 0000000000..4625f9df74 Binary files /dev/null and b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/remove.png differ diff --git a/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/remove.psd b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/remove.psd new file mode 100644 index 0000000000..3764f82ccb Binary files /dev/null and b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/remove.psd differ diff --git a/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/scheduler.js b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/scheduler.js new file mode 100644 index 0000000000..4417f5b438 --- /dev/null +++ b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/scheduler.js @@ -0,0 +1,71 @@ +// © 2010 EPFL/LAMP +// code by Gilles Dubochet + +function Scheduler() { + var scheduler = this; + var resolution = 0; + this.timeout = undefined; + this.queues = new Array(0); // an array of work pacakges indexed by index in the labels table. + this.labels = new Array(0); // an indexed array of labels indexed by priority. This should be short. + this.label = function(name, priority) { + this.name = name; + this.priority = priority; + } + this.work = function(fn, self, args) { + this.fn = fn; + this.self = self; + this.args = args; + } + this.addLabel = function(name, priority) { + var idx = 0; + while (idx < scheduler.queues.length && scheduler.labels[idx].priority <= priority) { idx = idx + 1; } + scheduler.labels.splice(idx, 0, new scheduler.label(name, priority)); + scheduler.queues.splice(idx, 0, new Array(0)); + } + this.clearLabel = function(name) { + var idx = 0; + while (idx < scheduler.queues.length && scheduler.labels[idx].name != name) { idx = idx + 1; } + if (idx < scheduler.queues.length && scheduler.labels[i].name == name) { + scheduler.labels.splice(idx, 1); + scheduler.queues.splice(idx, 1); + } + } + this.nextWork = function() { + var fn = undefined; + var idx = 0; + while (idx < scheduler.queues.length && scheduler.queues[idx].length == 0) { idx = idx + 1; } + if (idx < scheduler.queues.length && scheduler.queues[idx].length > 0) { + var fn = scheduler.queues[idx].shift(); + } + return fn; + } + this.add = function(labelName, fn, self, args) { + var doWork = function() { + scheduler.timeout = setTimeout(function() { + var work = scheduler.nextWork(); + if (work != undefined) { + if (work.args == undefined) { work.args = new Array(0); } + work.fn.apply(work.self, work.args); + doWork(); + } + else { + scheduler.timeout = undefined; + } + }, resolution); + } + var idx = 0; + while (idx < scheduler.labels.length && scheduler.labels[idx].name != labelName) { idx = idx + 1; } + if (idx < scheduler.queues.length && scheduler.labels[idx].name == labelName) { + scheduler.queues[idx].push(new scheduler.work(fn, self, args)); + if (scheduler.timeout == undefined) doWork(); + } + else throw("queue for add is non existant"); + } + this.clear = function(labelName) { + var idx = 0; + while (idx < scheduler.labels.length && scheduler.labels[idx].name != labelName) { idx = idx + 1; } + if (idx < scheduler.queues.length && scheduler.labels[idx].name == labelName) { + scheduler.queues[idx] = new Array(); + } + } +}; diff --git a/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/selected-implicits.png b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/selected-implicits.png new file mode 100644 index 0000000000..bc29efb3e6 Binary files /dev/null and b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/selected-implicits.png differ diff --git a/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/selected-right-implicits.png b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/selected-right-implicits.png new file mode 100644 index 0000000000..8313f4975b Binary files /dev/null and b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/selected-right-implicits.png differ diff --git a/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/selected-right.png b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/selected-right.png new file mode 100644 index 0000000000..04eda2f307 Binary files /dev/null and b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/selected-right.png differ diff --git a/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/selected.png b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/selected.png new file mode 100644 index 0000000000..c89765239e Binary files /dev/null and b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/selected.png differ diff --git a/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/selected2-right.png b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/selected2-right.png new file mode 100644 index 0000000000..bf984ef0ba Binary files /dev/null and b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/selected2-right.png differ diff --git a/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/selected2.png b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/selected2.png new file mode 100644 index 0000000000..a790bb1169 Binary files /dev/null and b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/selected2.png differ diff --git a/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/signaturebg.gif b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/signaturebg.gif new file mode 100644 index 0000000000..b6ac4415e4 Binary files /dev/null and b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/signaturebg.gif differ diff --git a/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/signaturebg2.gif b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/signaturebg2.gif new file mode 100644 index 0000000000..9aae5ba0aa Binary files /dev/null and b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/signaturebg2.gif differ diff --git a/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/template.css b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/template.css new file mode 100644 index 0000000000..b066027f04 --- /dev/null +++ b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/template.css @@ -0,0 +1,848 @@ +/* Reset */ + +html, body, div, span, object, iframe, +h1, h2, h3, h4, h5, h6, p, blockquote, pre, +a, abbr, acronym, address, code, pre, +del, dfn, em, img, q, dl, dt, dd, ol, ul, li, +fieldset, form, label, legend, input, +table, caption, tbody, tfoot, thead, tr, th, td { + margin: 0; + padding: 0; + border: 0; + font-weight: inherit; + font-style: inherit; + font-size: 100%; + font-family: inherit; + vertical-align: baseline; +} + +table { border-collapse: separate; border-spacing: 0; } +caption, th, td { text-align: left; font-weight: normal; } +table, td, th { vertical-align: middle; } + +blockquote:before, blockquote:after, q:before, q:after { content: ""; } +blockquote, q { quotes: none; } + +a img { border: none; } + +input { border-width: 0px; } + +/* Page */ + +body { + font-family: Arial, sans-serif; + font-size: 10pt; +} + +#footer { + font-size: 9pt; + text-align: center; + color: #858484; + bottom: 0; + width: 100%; + height: 20px; +} + +a[href] { + text-decoration: underline; + color: #315479; +} + +a[href]:hover { + text-decoration: none; +} + +#types ol li > p { + margin-top: 5px; +} + +#types ol li:last-child { + margin-bottom: 5px; +} + +/* +#definition { + padding: 6px 0 6px 6px; + min-height: 59px; + color: white; +} +*/ + +#definition { + display: block-inline; + padding: 5px 0px; + height: 61px; +} + +#definition > img { + float: left; + padding-right: 6px; + padding-left: 5px; +} + +#definition > a > img { + float: left; + padding-right: 6px; + padding-left: 5px; +} + +#definition p + h1 { + margin-top: 3px; +} + +#definition > h1 { +/* padding: 12px 0 12px 6px;*/ + color: white; + text-shadow: 3px black; + text-shadow: black 0px 2px 0px; + font-size: 24pt; + display: inline-block; + overflow: hidden; + margin-top: 10px; +} + +#definition h1 > a { + color: #ffffff; + font-size: 24pt; + text-shadow: black 0px 2px 0px; +/* text-shadow: black 0px 0px 0px;*/ +text-decoration: none; +} + +#definition #owner { + color: #ffffff; + margin-top: 4px; + font-size: 10pt; + overflow: hidden; +} + +#definition #owner > a { + color: #ffffff; +} + +#definition #owner > a:hover { + text-decoration: none; +} + +#signature { + background-image:url('signaturebg2.gif'); + background-color: #d7d7d7; + min-height: 18px; + background-repeat:repeat-x; + font-size: 11.5pt; +/* margin-bottom: 10px;*/ + padding: 8px; +} + +#signature > span.modifier_kind { + display: inline; + float: left; + text-align: left; + width: auto; + position: static; + text-shadow: 2px white; + text-shadow: white 0px 1px 0px; +} + +#signature > span.symbol { + text-align: left; + display: inline; + padding-left: 0.7em; + text-shadow: 2px white; + text-shadow: white 0px 1px 0px; +} + +/* Linear super types and known subclasses */ +.hiddenContent { + display: none; +} + +.toggleContainer .toggle { + cursor: pointer; + padding-left: 15px; + background: url("arrow-right.png") no-repeat 0 3px transparent; +} + +.toggleContainer .toggle.open { + background: url("arrow-down.png") no-repeat 0 3px transparent; +} + +.toggleContainer .hiddenContent { + margin-top: 5px; +} + +.value #definition { + background-color: #2C475C; /* blue */ + background-image:url('defbg-blue.gif'); + background-repeat:repeat-x; +} + +.type #definition { + background-color: #316555; /* green */ + background-image:url('defbg-green.gif'); + background-repeat:repeat-x; +} + +#template { + margin-bottom: 50px; +} + +h3 { + color: white; + padding: 5px 10px; + font-size: 12pt; + font-weight: bold; + text-shadow: black 1px 1px 0px; +} + +dl.attributes > dt { + display: block; + float: left; + font-style: italic; +} + +dl.attributes > dt.implicit { + font-weight: bold; + color: darkgreen; +} + +dl.attributes > dd { + display: block; + padding-left: 10em; + margin-bottom: 5px; +} + +#template .values > h3 { + background: #2C475C url("valuemembersbg.gif") repeat-x bottom left; /* grayish blue */ + height: 18px; +} + +#values ol li:last-child { + margin-bottom: 5px; +} + +#template .types > h3 { + background: #316555 url("typebg.gif") repeat-x bottom left; /* green */ + height: 18px; +} + +#constructors > h3 { + background: #4f504f url("constructorsbg.gif") repeat-x bottom left; /* gray */ + height: 18px; +} + +#inheritedMembers > div.parent > h3 { + background: #dadada url("constructorsbg.gif") repeat-x bottom left; /* gray */ + height: 17px; + font-style: italic; + font-size: 12pt; +} + +#inheritedMembers > div.parent > h3 * { + color: white; +} + +#inheritedMembers > div.conversion > h3 { + background: #dadada url("conversionbg.gif") repeat-x bottom left; /* gray */ + height: 17px; + font-style: italic; + font-size: 12pt; +} + +#inheritedMembers > div.conversion > h3 * { + color: white; +} + +#groupedMembers > div.group > h3 { + background: #dadada url("typebg.gif") repeat-x bottom left; /* green */ + height: 17px; + font-size: 12pt; +} + +#groupedMembers > div.group > h3 * { + color: white; +} + + +/* Member cells */ + +div.members > ol { + background-color: white; + list-style: none +} + +div.members > ol > li { + display: block; + border-bottom: 1px solid gray; + padding: 5px 0 6px; + margin: 0 10px; + position: relative; +} + +div.members > ol > li:last-child { + border: 0; + padding: 5px 0 5px; +} + +/* Member signatures */ + +#tooltip { + background: #EFD5B5; + border: 1px solid gray; + color: black; + display: none; + padding: 5px; + position: absolute; +} + +.signature { + font-family: monospace; + font-size: 10pt; + line-height: 18px; + clear: both; + display: block; + text-shadow: 2px white; + text-shadow: white 0px 1px 0px; +} + +.signature .modifier_kind { + position: absolute; + text-align: right; + width: 14em; +} + +.signature > a > .symbol > .name { + text-decoration: underline; +} + +.signature > a:hover > .symbol > .name { + text-decoration: none; +} + +.signature > a { + text-decoration: none; +} + +.signature > .symbol { + display: block; + padding-left: 14.7em; +} + +.signature .name { + display: inline-block; + font-weight: bold; +} + +.signature .symbol > .implicit { + display: inline-block; + font-weight: bold; + text-decoration: underline; + color: darkgreen; +} + +.signature .symbol .shadowed { + color: darkseagreen; +} + +.signature .symbol .params > .implicit { + font-style: italic; +} + +.signature .symbol .deprecated { + text-decoration: line-through; +} + +.signature .symbol .params .default { + font-style: italic; +} + +#template .signature.closed { + background: url("arrow-right.png") no-repeat 0 5px transparent; + cursor: pointer; +} + +#template .signature.opened { + background: url("arrow-down.png") no-repeat 0 5px transparent; + cursor: pointer; +} + +#template .values .signature .name { + color: darkblue; +} + +#template .types .signature .name { + color: darkgreen; +} + +.full-signature-usecase h4 span { + font-size: 10pt; +} + +.full-signature-usecase > #signature { + padding-top: 0px; +} + +#template .full-signature-usecase > .signature.closed { + background: none; +} + +#template .full-signature-usecase > .signature.opened { + background: none; +} + +.full-signature-block { + padding: 5px 0 0; + border-top: 1px solid #EBEBEB; + margin-top: 5px; + margin-bottom: 5px; +} + + +/* Comments text formating */ + +.cmt {} + +.cmt p { + margin: 0.7em 0; +} + +.cmt p:first-child { + margin-top: 0; +} + +.cmt p:last-child { + margin-bottom: 0; +} + +.cmt h3, +.cmt h4, +.cmt h5, +.cmt h6 { + margin-bottom: 0.7em; + margin-top: 1.4em; + display: block; + text-align: left; + font-weight: bold; +} + +.cmt h3 { + font-size: 14pt; +} + +.cmt h4 { + font-size: 13pt; +} + +.cmt h5 { + font-size: 12pt; +} + +.cmt h6 { + font-size: 11pt; +} + +.cmt pre { + padding: 5px; + border: 1px solid #ddd; + background-color: #eee; + margin: 5px 0; + display: block; + font-family: monospace; +} + +.cmt pre span.ano { + color: blue; +} + +.cmt pre span.cmt { + color: green; +} + +.cmt pre span.kw { + font-weight: bold; +} + +.cmt pre span.lit { + color: #c71585; +} + +.cmt pre span.num { + color: #1e90ff; /* dodgerblue */ +} + +.cmt pre span.std { + color: #008080; /* teal */ +} + +.cmt ul { + display: block; + list-style: circle; + padding-left: 20px; +} + +.cmt ol { + display: block; + padding-left:20px; +} + +.cmt ol.decimal { + list-style: decimal; +} + +.cmt ol.lowerAlpha { + list-style: lower-alpha; +} + +.cmt ol.upperAlpha { + list-style: upper-alpha; +} + +.cmt ol.lowerRoman { + list-style: lower-roman; +} + +.cmt ol.upperRoman { + list-style: upper-roman; +} + +.cmt li { + display: list-item; +} + +.cmt code { + font-family: monospace; +} + +.cmt a { + font-style: bold; +} + +.cmt em, .cmt i { + font-style: italic; +} + +.cmt strong, .cmt b { + font-weight: bold; +} + +/* Comments structured layout */ + +.group > div.comment { + padding-top: 5px; + padding-bottom: 5px; + padding-right: 5px; + padding-left: 5px; + border: 1px solid #ddd; + background-color: #eeeee; + margin-top:5px; + margin-bottom:5px; + margin-right:5px; + margin-left:5px; + display: block; +} + +p.comment { + display: block; + margin-left: 14.7em; + margin-top: 5px; +} + +.shortcomment { + display: block; + margin: 5px 10px; +} + +div.fullcommenttop { + padding: 10px 10px; + background-image:url('fullcommenttopbg.gif'); + background-repeat:repeat-x; +} + +div.fullcomment { + margin: 5px 10px; +} + +#template div.fullcommenttop, +#template div.fullcomment { + display:none; + margin: 5px 0 0 14.7em; +} + +#template .shortcomment { + margin: 5px 0 0 14.7em; + padding: 0; +} + +div.fullcomment .block { + padding: 5px 0 0; + border-top: 1px solid #EBEBEB; + margin-top: 5px; + overflow: hidden; +} + +div.fullcommenttop .block { + padding: 5px 0 0; + border-top: 1px solid #EBEBEB; + margin-top: 5px; + margin-bottom: 5px +} + +div.fullcomment div.block ol li p, +div.fullcomment div.block ol li { + display:inline +} + +div.fullcomment .block > h5 { + font-style: italic; + font-weight: normal; + display: inline-block; +} + +div.fullcomment .comment { + margin: 5px 0 10px; +} + +div.fullcommenttop .comment:last-child, +div.fullcomment .comment:last-child { + margin-bottom: 0; +} + +div.fullcommenttop dl.paramcmts { + margin-bottom: 0.8em; + padding-bottom: 0.8em; +} + +div.fullcommenttop dl.paramcmts > dt, +div.fullcomment dl.paramcmts > dt { + display: block; + float: left; + font-weight: bold; + min-width: 70px; +} + +div.fullcommenttop dl.paramcmts > dd, +div.fullcomment dl.paramcmts > dd { + display: block; + padding-left: 10px; + margin-bottom: 5px; + margin-left: 70px; +} + +/* Members filter tool */ + +#textfilter { + position: relative; + display: block; + height: 20px; + margin-bottom: 5px; +} + +#textfilter > .pre { + display: block; + position: absolute; + top: 0; + left: 0; + height: 23px; + width: 21px; + background: url("filter_box_left.png"); +} + +#textfilter > .input { + display: block; + position: absolute; + top: 0; + right: 20px; + left: 20px; +} + +#textfilter > .input > input { + height: 20px; + padding: 1px; + font-weight: bold; + color: #000000; + background: #ffffff url("filterboxbarbg.png") repeat-x top left; + width: 100%; +} + +#textfilter > .post { + display: block; + position: absolute; + top: 0; + right: 0; + height: 23px; + width: 21px; + background: url("filter_box_right.png"); +} + +#mbrsel { + padding: 5px 10px; + background-color: #ededee; /* light gray */ + background-image:url('filterboxbg.gif'); + background-repeat:repeat-x; + font-size: 9.5pt; + display: block; + margin-top: 1em; +/* margin-bottom: 1em; */ +} + +#mbrsel > div { + margin-bottom: 5px; +} + +#mbrsel > div:last-child { + margin-bottom: 0; +} + +#mbrsel > div > span.filtertype { + padding: 4px; + margin-right: 5px; + float: left; + display: inline-block; + color: #000000; + font-weight: bold; + text-shadow: white 0px 1px 0px; + width: 4.5em; +} + +#mbrsel > div > ol { + display: inline-block; +} + +#mbrsel > div > a { + position:relative; + top: -8px; + font-size: 11px; + text-shadow: #ffffff 0 1px 0; +} + +#mbrsel > div > ol#linearization { + display: table; + margin-left: 70px; +} + +#mbrsel > div > ol#linearization > li.in { + text-decoration: none; + float: left; + padding-right: 10px; + margin-right: 5px; + background: url(selected-right.png) no-repeat; + background-position: right 0px; +} + +#mbrsel > div > ol#linearization > li.in > span{ + color: #404040; + float: left; + padding: 1px 0 1px 10px; + background: url(selected.png) no-repeat; + background-position: 0px 0px; + text-shadow: #ffffff 0 1px 0; +} + +#mbrsel > div > ol#implicits { + display: table; + margin-left: 70px; +} + +#mbrsel > div > ol#implicits > li.in { + text-decoration: none; + float: left; + padding-right: 10px; + margin-right: 5px; + background: url(selected-right-implicits.png) no-repeat; + background-position: right 0px; +} + +#mbrsel > div > ol#implicits > li.in > span{ + color: #404040; + float: left; + padding: 1px 0 1px 10px; + background: url(selected-implicits.png) no-repeat; + background-position: 0px 0px; + text-shadow: #ffffff 0 1px 0; +} + +#mbrsel > div > ol > li { +/* padding: 3px 10px;*/ + line-height: 16pt; + display: inline-block; + cursor: pointer; +} + +#mbrsel > div > ol > li.in { + text-decoration: none; + float: left; + padding-right: 10px; + margin-right: 5px; + background: url(selected-right.png) no-repeat; + background-position: right 0px; +} + +#mbrsel > div > ol > li.in > span{ + color: #404040; + float: left; + padding: 1px 0 1px 10px; + background: url(selected.png) no-repeat; + background-position: 0px 0px; + text-shadow: #ffffff 0 1px 0; +} + +#mbrsel > div > ol > li.out { + text-decoration: none; + float: left; + padding-right: 10px; + margin-right: 5px; +} + +#mbrsel > div > ol > li.out > span{ + color: #747474; +/* background-color: #999; */ + float: left; + padding: 1px 0 1px 10px; +/* background: url(unselected.png) no-repeat;*/ + background-position: 0px -1px; + text-shadow: #ffffff 0 1px 0; +} +/* +#mbrsel .hideall { + color: #4C4C4C; + line-height: 16px; + font-weight: bold; +} + +#mbrsel .hideall span { + color: #4C4C4C; + font-weight: bold; +} + +#mbrsel .showall { + color: #4C4C4C; + line-height: 16px; + font-weight: bold; +} + +#mbrsel .showall span { + color: #4C4C4C; + font-weight: bold; +}*/ + +.badge { + display: inline-block; + padding: 2px 4px; + font-size: 11.844px; + font-weight: bold; + line-height: 14px; + color: #ffffff; + text-shadow: 0 -1px 0 rgba(0, 0, 0, 0.25); + white-space: nowrap; + vertical-align: baseline; + background-color: #999999; + padding-right: 9px; + padding-left: 9px; + -webkit-border-radius: 9px; + -moz-border-radius: 9px; + border-radius: 9px; +} + +.badge-red { + background-color: #b94a48; +} diff --git a/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/template.js b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/template.js new file mode 100644 index 0000000000..6d1caf6d50 --- /dev/null +++ b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/template.js @@ -0,0 +1,466 @@ +// © 2009–2010 EPFL/LAMP +// code by Gilles Dubochet with contributions by Pedro Furlanetto + +$(document).ready(function(){ + + // Escapes special characters and returns a valid jQuery selector + function escapeJquery(str){ + return str.replace(/([;&,\.\+\*\~':"\!\^#$%@\[\]\(\)=>\|])/g, '\\$1'); + } + + // highlight and jump to selected member + if (window.location.hash) { + var temp = window.location.hash.replace('#', ''); + var elem = '#'+escapeJquery(temp); + + window.scrollTo(0, 0); + $(elem).parent().effect("highlight", {color: "#FFCC85"}, 3000); + $('html,body').animate({scrollTop:$(elem).parent().offset().top}, 1000); + } + + var isHiddenClass = function (name) { + return name == 'scala.Any' || + name == 'scala.AnyRef'; + }; + + var isHidden = function (elem) { + return $(elem).attr("data-hidden") == 'true'; + }; + + $("#linearization li:gt(0)").filter(function(){ + return isHiddenClass($(this).attr("name")); + }).removeClass("in").addClass("out"); + + $("#implicits li").filter(function(){ + return isHidden(this); + }).removeClass("in").addClass("out"); + + // Pre-filter members + filter(); + + // Member filter box + var input = $("#textfilter input"); + input.bind("keyup", function(event) { + + switch ( event.keyCode ) { + + case 27: // escape key + input.val(""); + filter(true); + break; + + case 38: // up + input.val(""); + filter(false); + window.scrollTo(0, $("body").offset().top); + input.focus(); + break; + + case 33: //page up + input.val(""); + filter(false); + break; + + case 34: //page down + input.val(""); + filter(false); + break; + + default: + window.scrollTo(0, $("#mbrsel").offset().top); + filter(true); + break; + + } + }); + input.focus(function(event) { + input.select(); + }); + $("#textfilter > .post").click(function() { + $("#textfilter input").attr("value", ""); + filter(); + }); + $(document).keydown(function(event) { + + if (event.keyCode == 9) { // tab + $("#index-input", window.parent.document).focus(); + input.attr("value", ""); + return false; + } + }); + + $("#linearization li").click(function(){ + if ($(this).hasClass("in")) { + $(this).removeClass("in"); + $(this).addClass("out"); + } + else if ($(this).hasClass("out")) { + $(this).removeClass("out"); + $(this).addClass("in"); + }; + filter(); + }); + + $("#implicits li").click(function(){ + if ($(this).hasClass("in")) { + $(this).removeClass("in"); + $(this).addClass("out"); + } + else if ($(this).hasClass("out")) { + $(this).removeClass("out"); + $(this).addClass("in"); + }; + filter(); + }); + + $("#mbrsel > div[id=ancestors] > ol > li.hideall").click(function() { + $("#linearization li.in").removeClass("in").addClass("out"); + $("#linearization li:first").removeClass("out").addClass("in"); + $("#implicits li.in").removeClass("in").addClass("out"); + + if ($(this).hasClass("out") && $("#mbrsel > div[id=ancestors] > ol > li.showall").hasClass("in")) { + $(this).removeClass("out").addClass("in"); + $("#mbrsel > div[id=ancestors] > ol > li.showall").removeClass("in").addClass("out"); + } + + filter(); + }) + $("#mbrsel > div[id=ancestors] > ol > li.showall").click(function() { + var filteredLinearization = + $("#linearization li.out").filter(function() { + return ! isHiddenClass($(this).attr("name")); + }); + filteredLinearization.removeClass("out").addClass("in"); + + var filteredImplicits = + $("#implicits li.out").filter(function() { + return ! isHidden(this); + }); + filteredImplicits.removeClass("out").addClass("in"); + + if ($(this).hasClass("out") && $("#mbrsel > div[id=ancestors] > ol > li.hideall").hasClass("in")) { + $(this).removeClass("out").addClass("in"); + $("#mbrsel > div[id=ancestors] > ol > li.hideall").removeClass("in").addClass("out"); + } + + filter(); + }); + $("#visbl > ol > li.public").click(function() { + if ($(this).hasClass("out")) { + $(this).removeClass("out").addClass("in"); + $("#visbl > ol > li.all").removeClass("in").addClass("out"); + filter(); + }; + }) + $("#visbl > ol > li.all").click(function() { + if ($(this).hasClass("out")) { + $(this).removeClass("out").addClass("in"); + $("#visbl > ol > li.public").removeClass("in").addClass("out"); + filter(); + }; + }); + $("#order > ol > li.alpha").click(function() { + if ($(this).hasClass("out")) { + orderAlpha(); + }; + }) + $("#order > ol > li.inherit").click(function() { + if ($(this).hasClass("out")) { + orderInherit(); + }; + }); + $("#order > ol > li.group").click(function() { + if ($(this).hasClass("out")) { + orderGroup(); + }; + }); + $("#groupedMembers").hide(); + + initInherit(); + + // Create tooltips + $(".extype").add(".defval").tooltip({ + tip: "#tooltip", + position:"top center", + predelay: 500, + onBeforeShow: function(ev) { + $(this.getTip()).text(this.getTrigger().attr("name")); + } + }); + + /* Add toggle arrows */ + //var docAllSigs = $("#template li").has(".fullcomment").find(".signature"); + // trying to speed things up a little bit + var docAllSigs = $("#template li[fullComment=yes] .signature"); + + function commentToggleFct(signature){ + var parent = signature.parent(); + var shortComment = $(".shortcomment", parent); + var fullComment = $(".fullcomment", parent); + var vis = $(":visible", fullComment); + signature.toggleClass("closed").toggleClass("opened"); + if (vis.length > 0) { + shortComment.slideDown(100); + fullComment.slideUp(100); + } + else { + shortComment.slideUp(100); + fullComment.slideDown(100); + } + }; + docAllSigs.addClass("closed"); + docAllSigs.click(function() { + commentToggleFct($(this)); + }); + + /* Linear super types and known subclasses */ + function toggleShowContentFct(e){ + e.toggleClass("open"); + var content = $(".hiddenContent", e.parent().get(0)); + if (content.is(':visible')) { + content.slideUp(100); + } + else { + content.slideDown(100); + } + }; + + $(".toggle:not(.diagram-link)").click(function() { + toggleShowContentFct($(this)); + }); + + // Set parent window title + windowTitle(); + + if ($("#order > ol > li.group").length == 1) { orderGroup(); }; +}); + +function orderAlpha() { + $("#order > ol > li.alpha").removeClass("out").addClass("in"); + $("#order > ol > li.inherit").removeClass("in").addClass("out"); + $("#order > ol > li.group").removeClass("in").addClass("out"); + $("#template > div.parent").hide(); + $("#template > div.conversion").hide(); + $("#mbrsel > div[id=ancestors]").show(); + filter(); +}; + +function orderInherit() { + $("#order > ol > li.inherit").removeClass("out").addClass("in"); + $("#order > ol > li.alpha").removeClass("in").addClass("out"); + $("#order > ol > li.group").removeClass("in").addClass("out"); + $("#template > div.parent").show(); + $("#template > div.conversion").show(); + $("#mbrsel > div[id=ancestors]").hide(); + filter(); +}; + +function orderGroup() { + $("#order > ol > li.group").removeClass("out").addClass("in"); + $("#order > ol > li.alpha").removeClass("in").addClass("out"); + $("#order > ol > li.inherit").removeClass("in").addClass("out"); + $("#template > div.parent").hide(); + $("#template > div.conversion").hide(); + $("#mbrsel > div[id=ancestors]").show(); + filter(); +}; + +/** Prepares the DOM for inheritance-based display. To do so it will: + * - hide all statically-generated parents headings; + * - copy all members from the value and type members lists (flat members) to corresponding lists nested below the + * parent headings (inheritance-grouped members); + * - initialises a control variable used by the filter method to control whether filtering happens on flat members + * or on inheritance-grouped members. */ +function initInherit() { + // inheritParents is a map from fully-qualified names to the DOM node of parent headings. + var inheritParents = new Object(); + var groupParents = new Object(); + $("#inheritedMembers > div.parent").each(function(){ + inheritParents[$(this).attr("name")] = $(this); + }); + $("#inheritedMembers > div.conversion").each(function(){ + inheritParents[$(this).attr("name")] = $(this); + }); + $("#groupedMembers > div.group").each(function(){ + groupParents[$(this).attr("name")] = $(this); + }); + + $("#types > ol > li").each(function(){ + var mbr = $(this); + this.mbrText = mbr.find("> .fullcomment .cmt").text(); + var qualName = mbr.attr("name"); + var owner = qualName.slice(0, qualName.indexOf("#")); + var name = qualName.slice(qualName.indexOf("#") + 1); + var inheritParent = inheritParents[owner]; + if (inheritParent != undefined) { + var types = $("> .types > ol", inheritParent); + if (types.length == 0) { + inheritParent.append("

                Type Members

                  "); + types = $("> .types > ol", inheritParent); + } + var clone = mbr.clone(); + clone[0].mbrText = this.mbrText; + types.append(clone); + } + var group = mbr.attr("group") + var groupParent = groupParents[group]; + if (groupParent != undefined) { + var types = $("> .types > ol", groupParent); + if (types.length == 0) { + groupParent.append("
                    "); + types = $("> .types > ol", groupParent); + } + var clone = mbr.clone(); + clone[0].mbrText = this.mbrText; + types.append(clone); + } + }); + + $("#values > ol > li").each(function(){ + var mbr = $(this); + this.mbrText = mbr.find("> .fullcomment .cmt").text(); + var qualName = mbr.attr("name"); + var owner = qualName.slice(0, qualName.indexOf("#")); + var name = qualName.slice(qualName.indexOf("#") + 1); + var inheritParent = inheritParents[owner]; + if (inheritParent != undefined) { + var values = $("> .values > ol", inheritParent); + if (values.length == 0) { + inheritParent.append("

                    Value Members

                      "); + values = $("> .values > ol", inheritParent); + } + var clone = mbr.clone(); + clone[0].mbrText = this.mbrText; + values.append(clone); + } + var group = mbr.attr("group") + var groupParent = groupParents[group]; + if (groupParent != undefined) { + var values = $("> .values > ol", groupParent); + if (values.length == 0) { + groupParent.append("
                        "); + values = $("> .values > ol", groupParent); + } + var clone = mbr.clone(); + clone[0].mbrText = this.mbrText; + values.append(clone); + } + }); + $("#inheritedMembers > div.parent").each(function() { + if ($("> div.members", this).length == 0) { $(this).remove(); }; + }); + $("#inheritedMembers > div.conversion").each(function() { + if ($("> div.members", this).length == 0) { $(this).remove(); }; + }); + $("#groupedMembers > div.group").each(function() { + if ($("> div.members", this).length == 0) { $(this).remove(); }; + }); +}; + +/* filter used to take boolean scrollToMember */ +function filter() { + var query = $.trim($("#textfilter input").val()).toLowerCase(); + query = query.replace(/[-[\]{}()*+?.,\\^$|#]/g, "\\$&").replace(/\s+/g, "|"); + var queryRegExp = new RegExp(query, "i"); + var privateMembersHidden = $("#visbl > ol > li.public").hasClass("in"); + var orderingAlphabetic = $("#order > ol > li.alpha").hasClass("in"); + var orderingInheritance = $("#order > ol > li.inherit").hasClass("in"); + var orderingGroups = $("#order > ol > li.group").hasClass("in"); + var hiddenSuperclassElementsLinearization = orderingInheritance ? $("#linearization > li:gt(0)") : $("#linearization > li.out"); + var hiddenSuperclassesLinearization = hiddenSuperclassElementsLinearization.map(function() { + return $(this).attr("name"); + }).get(); + var hiddenSuperclassElementsImplicits = orderingInheritance ? $("#implicits > li") : $("#implicits > li.out"); + var hiddenSuperclassesImplicits = hiddenSuperclassElementsImplicits.map(function() { + return $(this).attr("name"); + }).get(); + + var hideInheritedMembers; + + if (orderingAlphabetic) { + $("#allMembers").show(); + $("#inheritedMembers").hide(); + $("#groupedMembers").hide(); + hideInheritedMembers = true; + $("#allMembers > .members").each(filterFunc); + } else if (orderingGroups) { + $("#groupedMembers").show(); + $("#inheritedMembers").hide(); + $("#allMembers").hide(); + hideInheritedMembers = true; + $("#groupedMembers > .group > .members").each(filterFunc); + $("#groupedMembers > div.group").each(function() { + $(this).show(); + if ($("> div.members", this).not(":hidden").length == 0) { + $(this).hide(); + } else { + $(this).show(); + } + }); + } else if (orderingInheritance) { + $("#inheritedMembers").show(); + $("#groupedMembers").hide(); + $("#allMembers").hide(); + hideInheritedMembers = false; + $("#inheritedMembers > .parent > .members").each(filterFunc); + $("#inheritedMembers > .conversion > .members").each(filterFunc); + } + + + function filterFunc() { + var membersVisible = false; + var members = $(this); + members.find("> ol > li").each(function() { + var mbr = $(this); + if (privateMembersHidden && mbr.attr("visbl") == "prt") { + mbr.hide(); + return; + } + var name = mbr.attr("name"); + // Owner filtering must not happen in "inherited from" member lists + if (hideInheritedMembers) { + var ownerIndex = name.indexOf("#"); + if (ownerIndex < 0) { + ownerIndex = name.lastIndexOf("."); + } + var owner = name.slice(0, ownerIndex); + for (var i = 0; i < hiddenSuperclassesLinearization.length; i++) { + if (hiddenSuperclassesLinearization[i] == owner) { + mbr.hide(); + return; + } + }; + for (var i = 0; i < hiddenSuperclassesImplicits.length; i++) { + if (hiddenSuperclassesImplicits[i] == owner) { + mbr.hide(); + return; + } + }; + } + if (query && !(queryRegExp.test(name) || queryRegExp.test(this.mbrText))) { + mbr.hide(); + return; + } + mbr.show(); + membersVisible = true; + }); + + if (membersVisible) + members.show(); + else + members.hide(); + }; + + return false; +}; + +function windowTitle() +{ + try { + parent.document.title=document.title; + } + catch(e) { + // Chrome doesn't allow settings the parent's title when + // used on the local file system. + } +}; diff --git a/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/tools.tooltip.js b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/tools.tooltip.js new file mode 100644 index 0000000000..0af34eca4c --- /dev/null +++ b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/tools.tooltip.js @@ -0,0 +1,14 @@ +/* + * tools.tooltip 1.1.3 - Tooltips done right. + * + * Copyright (c) 2009 Tero Piirainen + * http://flowplayer.org/tools/tooltip.html + * + * Dual licensed under MIT and GPL 2+ licenses + * http://www.opensource.org/licenses + * + * Launch : November 2008 + * Date: ${date} + * Revision: ${revision} + */ +(function(c){var d=[];c.tools=c.tools||{};c.tools.tooltip={version:"1.1.3",conf:{effect:"toggle",fadeOutSpeed:"fast",tip:null,predelay:0,delay:30,opacity:1,lazy:undefined,position:["top","center"],offset:[0,0],cancelDefault:true,relative:false,oneInstance:true,events:{def:"mouseover,mouseout",input:"focus,blur",widget:"focus mouseover,blur mouseout",tooltip:"mouseover,mouseout"},api:false},addEffect:function(e,g,f){b[e]=[g,f]}};var b={toggle:[function(e){var f=this.getConf(),g=this.getTip(),h=f.opacity;if(h<1){g.css({opacity:h})}g.show();e.call()},function(e){this.getTip().hide();e.call()}],fade:[function(e){this.getTip().fadeIn(this.getConf().fadeInSpeed,e)},function(e){this.getTip().fadeOut(this.getConf().fadeOutSpeed,e)}]};function a(f,g){var p=this,k=c(this);f.data("tooltip",p);var l=f.next();if(g.tip){l=c(g.tip);if(l.length>1){l=f.nextAll(g.tip).eq(0);if(!l.length){l=f.parent().nextAll(g.tip).eq(0)}}}function o(u){var t=g.relative?f.position().top:f.offset().top,s=g.relative?f.position().left:f.offset().left,v=g.position[0];t-=l.outerHeight()-g.offset[0];s+=f.outerWidth()+g.offset[1];var q=l.outerHeight()+f.outerHeight();if(v=="center"){t+=q/2}if(v=="bottom"){t+=q}v=g.position[1];var r=l.outerWidth()+f.outerWidth();if(v=="center"){s-=r/2}if(v=="left"){s-=r}return{top:t,left:s}}var i=f.is(":input"),e=i&&f.is(":checkbox, :radio, select, :button"),h=f.attr("type"),n=g.events[h]||g.events[i?(e?"widget":"input"):"def"];n=n.split(/,\s*/);if(n.length!=2){throw"Tooltip: bad events configuration for "+h}f.bind(n[0],function(r){if(g.oneInstance){c.each(d,function(){this.hide()})}var q=l.data("trigger");if(q&&q[0]!=this){l.hide().stop(true,true)}r.target=this;p.show(r);n=g.events.tooltip.split(/,\s*/);l.bind(n[0],function(){p.show(r)});if(n[1]){l.bind(n[1],function(){p.hide(r)})}});f.bind(n[1],function(q){p.hide(q)});if(!c.browser.msie&&!i&&!g.predelay){f.mousemove(function(){if(!p.isShown()){f.triggerHandler("mouseover")}})}if(g.opacity<1){l.css("opacity",g.opacity)}var m=0,j=f.attr("title");if(j&&g.cancelDefault){f.removeAttr("title");f.data("title",j)}c.extend(p,{show:function(r){if(r){f=c(r.target)}clearTimeout(l.data("timer"));if(l.is(":animated")||l.is(":visible")){return p}function q(){l.data("trigger",f);var t=o(r);if(g.tip&&j){l.html(f.data("title"))}r=r||c.Event();r.type="onBeforeShow";k.trigger(r,[t]);if(r.isDefaultPrevented()){return p}t=o(r);l.css({position:"absolute",top:t.top,left:t.left});var s=b[g.effect];if(!s){throw'Nonexistent effect "'+g.effect+'"'}s[0].call(p,function(){r.type="onShow";k.trigger(r)})}if(g.predelay){clearTimeout(m);m=setTimeout(q,g.predelay)}else{q()}return p},hide:function(r){clearTimeout(l.data("timer"));clearTimeout(m);if(!l.is(":visible")){return}function q(){r=r||c.Event();r.type="onBeforeHide";k.trigger(r);if(r.isDefaultPrevented()){return}b[g.effect][1].call(p,function(){r.type="onHide";k.trigger(r)})}if(g.delay&&r){l.data("timer",setTimeout(q,g.delay))}else{q()}return p},isShown:function(){return l.is(":visible, :animated")},getConf:function(){return g},getTip:function(){return l},getTrigger:function(){return f},bind:function(q,r){k.bind(q,r);return p},onHide:function(q){return this.bind("onHide",q)},onBeforeShow:function(q){return this.bind("onBeforeShow",q)},onShow:function(q){return this.bind("onShow",q)},onBeforeHide:function(q){return this.bind("onBeforeHide",q)},unbind:function(q){k.unbind(q);return p}});c.each(g,function(q,r){if(c.isFunction(r)){p.bind(q,r)}})}c.prototype.tooltip=function(e){var f=this.eq(typeof e=="number"?e:0).data("tooltip");if(f){return f}var g=c.extend(true,{},c.tools.tooltip.conf);if(c.isFunction(e)){e={onBeforeShow:e}}else{if(typeof e=="string"){e={tip:e}}}e=c.extend(true,g,e);if(typeof e.position=="string"){e.position=e.position.split(/,?\s/)}if(e.lazy!==false&&(e.lazy===true||this.length>20)){this.one("mouseover",function(h){f=new a(c(this),e);f.show(h);d.push(f)})}else{this.each(function(){f=new a(c(this),e);d.push(f)})}return e.api?f:this}})(jQuery); \ No newline at end of file diff --git a/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/trait.png b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/trait.png new file mode 100644 index 0000000000..fb961a2eda Binary files /dev/null and b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/trait.png differ diff --git a/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/trait_big.png b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/trait_big.png new file mode 100644 index 0000000000..625d9251cb Binary files /dev/null and b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/trait_big.png differ diff --git a/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/trait_diagram.png b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/trait_diagram.png new file mode 100644 index 0000000000..88983254ce Binary files /dev/null and b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/trait_diagram.png differ diff --git a/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/trait_to_object_big.png b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/trait_to_object_big.png new file mode 100644 index 0000000000..d0cd7fd512 Binary files /dev/null and b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/trait_to_object_big.png differ diff --git a/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/type.png b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/type.png new file mode 100644 index 0000000000..6c6e1fe2f5 Binary files /dev/null and b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/type.png differ diff --git a/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/type_big.png b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/type_big.png new file mode 100644 index 0000000000..04c8794e92 Binary files /dev/null and b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/type_big.png differ diff --git a/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/type_diagram.png b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/type_diagram.png new file mode 100644 index 0000000000..d8152529fd Binary files /dev/null and b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/type_diagram.png differ diff --git a/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/type_tags.ai b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/type_tags.ai new file mode 100644 index 0000000000..3b5c47c9e3 --- /dev/null +++ b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/type_tags.ai @@ -0,0 +1,6376 @@ +%PDF-1.5 % +1 0 obj <>/OCGs[15 0 R 27 0 R 37 0 R 65 0 R 78 0 R 90 0 R 116 0 R 129 0 R 141 0 R 167 0 R 180 0 R 192 0 R 218 0 R 237 0 R 255 0 R 287 0 R 306 0 R 324 0 R 356 0 R 375 0 R 393 0 R 425 0 R 444 0 R 462 0 R 480 0 R 515 0 R 534 0 R 552 0 R 570 0 R 605 0 R 624 0 R 642 0 R 660 0 R 695 0 R 699 0 R 718 0 R 735 0 R 753 0 R 785 0 R 789 0 R 808 0 R 825 0 R 843 0 R 878 0 R 882 0 R 901 0 R 918 0 R 936 0 R 971 0 R 975 0 R 994 0 R 1011 0 R 1029 0 R 1056 0 R 1057 0 R 1058 0 R 1059 0 R 1060 0 R 1138 0 R 1139 0 R 1140 0 R 1141 0 R 1142 0 R 1143 0 R 1223 0 R 1224 0 R 1225 0 R 1226 0 R 1227 0 R 1228 0 R 1308 0 R 1309 0 R 1310 0 R 1311 0 R 1312 0 R 1313 0 R]>>/Pages 2 0 R/Type/Catalog>> endobj 1054 0 obj <>stream + + + + + application/pdf + + + Print + + + + + Adobe Illustrator CS3 + 2009-11-23T17:10:12+01:00 + 2011-04-04T19:44:30+02:00 + 2011-04-04T19:44:30+02:00 + + + + 256 + 208 + JPEG + /9j/4AAQSkZJRgABAgEASABIAAD/7QAsUGhvdG9zaG9wIDMuMAA4QklNA+0AAAAAABAASAAAAAEA AQBIAAAAAQAB/+4ADkFkb2JlAGTAAAAAAf/bAIQABgQEBAUEBgUFBgkGBQYJCwgGBggLDAoKCwoK DBAMDAwMDAwQDA4PEA8ODBMTFBQTExwbGxscHx8fHx8fHx8fHwEHBwcNDA0YEBAYGhURFRofHx8f Hx8fHx8fHx8fHx8fHx8fHx8fHx8fHx8fHx8fHx8fHx8fHx8fHx8fHx8fHx8f/8AAEQgA0AEAAwER AAIRAQMRAf/EAaIAAAAHAQEBAQEAAAAAAAAAAAQFAwIGAQAHCAkKCwEAAgIDAQEBAQEAAAAAAAAA AQACAwQFBgcICQoLEAACAQMDAgQCBgcDBAIGAnMBAgMRBAAFIRIxQVEGE2EicYEUMpGhBxWxQiPB UtHhMxZi8CRygvElQzRTkqKyY3PCNUQnk6OzNhdUZHTD0uIIJoMJChgZhJRFRqS0VtNVKBry4/PE 1OT0ZXWFlaW1xdXl9WZ2hpamtsbW5vY3R1dnd4eXp7fH1+f3OEhYaHiImKi4yNjo+Ck5SVlpeYmZ qbnJ2en5KjpKWmp6ipqqusra6voRAAICAQIDBQUEBQYECAMDbQEAAhEDBCESMUEFURNhIgZxgZEy obHwFMHR4SNCFVJicvEzJDRDghaSUyWiY7LCB3PSNeJEgxdUkwgJChgZJjZFGidkdFU38qOzwygp 0+PzhJSktMTU5PRldYWVpbXF1eX1RlZmdoaWprbG1ub2R1dnd4eXp7fH1+f3OEhYaHiImKi4yNjo +DlJWWl5iZmpucnZ6fkqOkpaanqKmqq6ytrq+v/aAAwDAQACEQMRAD8A9U4q7FXlX54yWv13ynbe YJ5YPIt1eTJ5h9NiqPIsYezSUp+84F0cmm21eoXMnT8jX1dGrJ0vkgvyquNDj/MTVdO8mS8/KaaZ FPqUNu8k1lHqzShR6Mko5fFCrbj7VP8AJFJ5yTAGX1X9jHH9W3J7FmG3uxV2KuxV2KuxV2KuxV2K pD5s89eU/KUNvN5h1FLFbpylupWSV3KirFY4ld+K7ValBUeIycMcpcgxlIDmivLfmfQfMumDU9Dv FvbIu0ZkUMhV06q6OFdDuDRgNiD0IxnjlE0UxkDuE0yCXYq7FXYq7FXYq7FXYq7FXYq7FXYq7FXY q7FXYq7FXYq7FVO5tre6t5ba5iSe3mUxzQyKHR0YUZWVqggjqDhBpVPT9N07TbRLLTrWGys4q+nb W8axRLyJZuKIFUVJJOJJO5QBSIwJdirsVdirsVdirsVdirsVeb+cdC1a0/MF/OMdvNdaYnl24030 7FPXuxdCYyxqkPCSvqep8LcSoIPOgpXIxyHDw9eJrkDd+SL/AC10zWH1nzL5r1LTH0VfMMlobTTJ uHrrFbQEetNwJ4vK0p5IQGUjfHNIUI3dLAbk97Pcx2x2KuxV2KuxV2KuxV2KuxV2KuxV2KuxV2Ku xV2KuxV2KuxV2KuxV2KuxV2KuxV2KuxV2KuxV2KuxV2KuxV2KuxV2KuxV2KuxV2KuxV2KuxV2Kux V2KuxV2KuxVxIAqemKvMfPn59+V/LN1Pptqjanq0JKPDER6aOOP25Og/aBA+IEfZoa5lYdLKe/Rq nlAeb3X/ADkl5xub1JrWwgtbVaVteQk5UNTV2Su422zOj2cKcc6ksw8s/wDOSGmXc4h1/T207lQL NC3qxiin7RPEjk1ANqDqWynL2dIDbdsjqQeb2K2ura6t47i2lWaCVQ8UqEMrKdwQR1BzXEU5KpgV 2KuxV2KuxV2KuxV2KsA88/nX5P8AKcv1WSQ6hqFFb6palXYK4JBZq8V/ZPxEVBqK5kYtNKbXPKA8 5H/OUOsOV4eXoOIPxk3D1YdqAJ8P45mjs2+rQdV5Mm8rf85GaDfzCDXrR9Kd2/dzKfVgApQBn2at e/ED7t6svZ847jdlDUg83rdtcwXMEdxbyLLBKoeORTUMrCoI+jMAinJVMCuxV2KuxV2KuxV2KsOn /vpP9Y/rzYDk45WYVdiqf+Xv95ZP9f8AgMxc/NthyTXKGbsVdirsVePf85C/mZd+W9Ii0TSJxHqm qKyyypQyQwinJga1RmrQbd6g7ZlabDxGy05Z0HgfkPyNr3m3VPqWlxepIBzuLiQkRRKT9qR6Hqe3 U5tpZI443JxBEyNB7VZ/84ykW/8ApOvhbgjpFbckU/NpFLfcMx/5VrlH7W38p5sM89/lHr/lOH63 KyX2lkhTeQgjgTsBKhrxr2NSPeuZ2m1sMu3KTRlwyhv0Tj8j/wAwbnR9ai8tX8/+4i+ZvqxkO0Mx BPFSSOKyH/hvmTmL2jpQRxjm26bLvRfR2aNznYq7FXYq7FXYq7FXlv5+fmRL5S8uR2Wntx1jVuSW 770jjX+8k6UNKgAV6ncEVGZOmxcR8mrLOg8P/Lf8ptc892t7qVtfwRvBNxuGujIXd5BzL8lV6171 zZT1EcVAhxo4zNlll/zj35wk1a4smlt4rS24A6gxf05CyB6RLx5tx5UJ2Fe+T/lHGIg733Mfy0iV vmr8ivM+h6bJqEUkOpW0Cl7hYOSyIo6twYfEoHWhr7ZZh7QxzPCdixnp5RF80T+SP5hXWja1F5bv pS+k6g/G1Lkn0Zz0Vd9lkO1Kfa+ZOU9oaUEcY5s9Pl3ovo/NG57sVdirsVdirsVdirDp/wC+k/1j +vNgOTjlZhV2Kp/5e/3lk/1/4DMXPzbYcmHa9+Y+raf5i8xWMLWQk0Oze407y/IjnUtXZbE3XqWb etHSNJP3bBYJD8DdDTKGbz/zB+dHnC88gag73GnaQ91Zav8AVteikjZZXtbSF4rW1+pX92Le+d7m QpynYgRcuFSVRVJL7zN56mv9Zt7XX7mGeS5+qzAz3UskcU2t2NnCJrZZoRYH0p2Fu8BDXEXJyyvR gq9X8gah5wP5mebdM1y3u4rCGy019Jjlnimt44EmvLdJVpPNLzuxD6jFxzqpVzshZV4B+fupXd5+ ampxXD8/qIjtoTSlI+PqqtB4erT8c22lAEQ4eU7vfP8AnHbSbWz/AC0tL2JR6+pzTzTv3PpytAor 4ARfjmJrZE5K7m7APSxD81fzU81aL+YxstOuTDYaV6HO0AHCcyRrM/qbVNVk4+1KjfM3SaSE8Vkb lozZpCe3R7jqen2uq6Xc2FyvO2vImikUj9l1pWh7jNTCRjIEcw5khYp8VNezWF3DeQtxuLSVJomB rR42DKQR7jOqzAGNOphsX2zYPI9jbvI3ORokLuQAWYqKmgoN/bOUPN2wV8CXYq7FXYq7FXYq+QP+ citYXUfzMnjQMkdlClvxNBV0Zg77E9dhXwAza6WNRcTKbL1H/nFn/lHdZ/5io/8Ak3lXaHMe5lp+ RTn85PzT1zyjqGn6fpEUPqTxm5nmnUuCvMoqKAVp9k8j8qYdFpI5ATJc+YxIAZ75R19PMXlnT9Y9 MR/XYQ8kXUK4qrqK9QGBpmHmx8EzHuboS4gC+U/zAtk0LztqkNgfSFletJa8duFG9RAP9XbOixy4 8QJ6h10hwzNPr2wfnY271J5RI1WqW3UHevfOZPN2YV8CXYq7FXYq7FXYqw6f++k/1j+vNgOTjlZh V2Kp/wCXv95ZP9f+AzFz822HJNcoZuxV2KuxV8n/APOTXlk6Z54i1eFKW2qQhpG5cj66Ehq9eIYU 4/I06ZstLO413OLmjuz3/nGj8wdNn0H/AAjeTrDqFpI8mno5p60MpLsqV6ujliR4H2OV6zEb4gyw y2pmXm78mtD8y+a4NfuLmSGnpi+tFRWWf0tl+In4aqAp2Ow7YMOtljhwgJngEpWmv5lee9O8o+XL i5lmUalNGyabbVHN5SKBuPXghNWP0dSMq02A5JV06s8uThD5Q8q6VN5h81abpMUfrCedTMlVFYkP KQVYhalRQVO5oM3mpy1ElwMULL7XtoFt7eKBSzLEiorMasQopUnxznCbdmqYFdirsVdirsVdir5Z /wCcpPL89p5ws9bVALW/txEStT+9hJJLdgWDbD2JzY6SXppxsw3Zp/zinJz8ua37Xcf/ACbyGuNk JwDYs2/Mn8p7LztcWV0181hdWimJnEYlDxE8uNCyUINaGvfpkdLqziBFXbLLh42S2kGjeUvLEULz C30vSbcK88p/ZQbs3+Ux8O52zHkZZJ31LYAIjyD5H1C4ufPHn+RbdH56zfMyogLOkLMSTReRPpxC poO2b6Uhjx13B14HFJ9l2sTQ20MLMGaNFRmAoCVFKgb0zni7JUwK7FXYq7FXYq7FXx5rf/OQfny1 1m/to47H04LmaNKwuTxRyor+89s28cYoOIZG0H/0MZ+YH++7D/kS/wD1UyXhBHEXf9DGfmB/vuw/ 5Ev/ANVMfCC8Re9/849eeda84eWdSvtWWFZre99CMQIUXj6SNuCzb1bMDVxqQb8RsPVMxW12KuxV 2Ksc8/8AkfTPOfl2bRr+qBj6lvMAC0cqghXFfCvb5dKjLMeQxNhjKNh8d+b/ACB5u8lX9NQtpUgV 62+oRhvSajEKeY+y1VO30io3zaY8olycSUCERafm7+YdvbC2j8w3wiAoA0zMwA7Bmqw+/J+Hj7gj il3oCzXzX5u1b0rVLrWNTmIDuzNK+54gySOfhFT1Y0yZyxiO4MREkvp78l/ykTydZHU9RYS69eJS UgUWKM7+mtQG/wBb9W22r1GoMzXRy8ePheoZitrsVdirsVdirsVdirHPP3kjTfOflybRr8lAx9SC YAFo5VB4uKjtXt8ulRlmPIYm2Mo2GFfkD5I13yfbeYNM1aLiTdo1tOv2JYwhXmtd+o7/AKqE26jI JUQwxRItjP8AzkB+YHmvyn5z039BajJZibTw0sYCyRsRNIKmOQOhPvTL9JCMoniF7sM0iDs8h1fz 5+YPne4jsbu8udUkJrHZQoFSo/a9KFUTYftEbZmREMe4FNBMpc3vn5J/k3P5Y5a15gjjfWZkAih2 cW6mjUUg8eVR8Rp1Hw7Cra/U6nj2HJycWKub2DMNudirsVdirsVdirsVfnh5m/5SPVf+Yy4/5Otm 7jyDhHmluSQ7FX1Z/wA4jf8AKFaz/wBtI/8AJiPNdrPqHucjDyeo6j5wk0/zrYeX7mzWOwvtPvL9 dXedVVDYtCJUaIrsoW4VuZcfLMNuYy35u6jN5d13zDY6AraZoF1JHcC7uzbTzWi2lveQzwRCCWst wl2PShcr25MrHiFV11+cHop5quFtdONt5ZLRPC+plL0yiSONWubb6swtoGZ2/emRvs/ZPZVUufze tLe80HSpH0Y6vrUX1ssNYQaets05hiaC7kgR7mSanwRpBuQwqAORVeiYqpz21vcJ6c8Sypv8LqGG 4Knr7EjEFWMn8q/y3Jq3lrT2buzW6MSfEkipPzyzxp97HgHcnekaBouj262+l2UNnCteKRIFA5Uq B4DbpkZSJ5pAAR+RS7FXYq7FXYq7FXYq7FXYqteJHFGG9COQJDAHrRhQj6MVYx5o/LPyh5o1C0vt ZtPrM9mOCcjUMnLlwflU8a16U6nLIZZRFBjKAKZ6J5R8saEGGj6Zb2Afjy9CMJUqCAdu+/XqcjKZ PMpEQE2yKXYq7FXYq7FXYq7FXYq/PDzN/wApHqv/ADGXH/J1s3ceQcI80tySHYq+rP8AnEb/AJQr Wf8AtpH/AJMR5rtZ9Q9zkYeT13VvKekatrNnql+hnaztL2wFo4R7eWDUfR9dZo2Vue1soArShNQe 2G3MVuPyQ8qJcSS6LcXHl2OW/j1OS10qHTo7dp4II4IA0M1pOjJCYmljUigld3+1x4qpvL5AaXVp 9YfzBqZ1UwyWtheUsOVlBPNHPLHAv1TgwcwIv79ZCF6UqcVSib8kvLElo1ql9fwxXVvJaazwa3rq EM11LeSJccoG4cp7mVq2/pEcqCgAoq9CxV2KuxV2KuxV2KuxV2KuxV2KuxV2KuxV2KuxV2KuxV2K uxV2KuxV2KuxV2KvkXWv+cafzQu9Yv7qGC0MNxcSyxk3Kg8XcsKingc2cdVABxjiKC/6Ff8AzW/5 Z7P/AKSV/ph/NwR4Rd/0K/8Amt/yz2f/AEkr/TH83BfCL3f8gPy+8x+SPLWo6frqRJcXN59YiEMg kHD0kTcj3U5h6nIJmw3Y4kDd6hmO2OxV2KuxV2KuxV2KuxV2KuxV2KuxV2KuxV2KuxV2KuxV2Kux V2KuxV2KuxV2KuxV2KuxV2KuxV2KuxV2KuxV2KuxV2KuxV2KuxV2KuxV2KuxV2KuxV2KuxV2KuxV 2KuxV2KuxV2KuxV2KuxV2KuxV2KuxV2KuxV2KuxV2KuxV2KuxV2KuxV2KuxV2KuxV2KuxV2KuxV2 KuxV2KuxV2KuxV2KuxV2KuxV2KuxV2KuxV2KuxV2KuxV2KuxV2KuxV2KuxV2KuxV2KuxV2KuxV2K uxV2KuxV2KuxV2KuxV2KuxV2KuxV2KuxV2KuxV2KuxV2KuxV2KuxV2KuxV2KuxV2KuxV2KuxV2Ku xV2KuxV2KuxV2KuxV2KuxV2KuxV2KuxV2KuxVDHUrEEgzKCNiMHEGzwpdzv0nYf7/XHiC+DLud+k 7D/f648QXwZdyrDcQzKWicOAaEjxxBYyiRzVMLF2KuxV2KqdxcQ20DzzuI4YwWdz0AGKCaeQeav+ cjdJsbyWz8v2Y1Vo2Km7LlIDt1U0JajV6bEbhsBLg5tfGPLdiMH5+fmCzDlHZmIdQIiHI/1uXH/h cjxOCe1JBn3lP8+NJ1K5W01u2/Rc0jBY5g3OCpIFGc049SakUphEnLwdpQmaOz1JWVlDKaqdwRkn ZN4q7FXYq7FXYq7FXYq7FXYqhr/UtP0+3a4vriO2hQFmeRgoABAJ3+YwgWgyA5pEfzN8gCX0/wBO 2paoFVeq7/5QHH8cn4Uu5q/MY+8J1pus6TqcQm067iuojUB4nDA8aA9PCoyBiRzbIzEuRRmBk7FX Yq7FXYq7FXYqxCf++k/1j+vKC7WPJZiydiqfeX/95pP9f+AyyHJwdV9QTTJuM7FXYq7FXzr/AM5E /mHcz6n/AIP0+Qx28IDamylgXZqFYiCF/wBY9ailD1rEl1utzfwhg/kLyHrfmi7NtpkIKR0NxcyE rFED05MAetNgBXBTq4YZ5ZVF69af848IsI+s63SWm6x2/wAIPzaSp+4YeFyx2P3y+x5z5x8sxaBr 1zo31kXbWwTlME9PeRA4HHk/Zh3yJdTqcPhZDG7p6J+R/ni6eZvK2oTc0iQvp0jncKCAYuRI2X9k UP0AZKJdz2ZqjIcBezZJ3DsVdirsVdirsVdirsVYl+ZPn6x8naE125WS/mPp2dqGHN2IPxUo3wim 5O3zNFNmOHEWnNlEA+c3vfNXnzzBH9clN1e3clLe2DCOFCRQBFY8VoopU7mm5OZ8YCIt0+TJLJKu pZte/k1qmj+X7nVtRuYENsqt9Wi5SMeTBaMxCgfa7VxhqIykAAjLopxgZSPJjOny3+mXK3mmXD21 whBDodjxNQGU/CwqOhGZMsYkN3Bx55QNgvdfy58/p5ltHt7wpDq9vT1YgacwannGD22+Y79idVnw 8B8nodJqhlHmzTKHMdirsVdirsVdirEJ/wC+k/1j+vKC7WPJZiydiqfeX/8AeaT/AF/4DLIcnB1X 1B5r5t8m+c9Y/MTXrzTAHi/R2mWdu18/1SEWk8l0dQisr2O0upopmEUatwP2ZKtuIuM3GS3yTqP5 m6f5Ut9Pntb+x1bS/LtsnlrQ47NpLC7ki0oEG/u5LTlBcLcKVMDTRUYBfjryKqL8k+Y/zPutXtIf NFzqNtprFW0+a00ydjczmQB7XU5JtKt2t40FKSpBEpVv7xipOKp/+T+sfmRqSam/nRTFKogKWskE 0LQXBMnrxxu1lZQyQiicPTknpvWVuQoq+ZfOFxNP551ySZ2kcX1wnNyWbjHIUWpO5oqjIF0OfmX1 R+S+kWun/l5pjwqPVvVa6uJAN2d2IFf9VAFyQdnooCOMebAPzL/NXzZaearvS9JufqFpYMIvgRGe RqAszM6t3OwGRJdbrNdkGQxiaAeb3+sX+q6jPqF/L613cENNLQLyIAHRQANh2GB1OWZmeI8yivKe qy6b5x0m9gAMsU2ynoQylSD9BxDkaGRjkBfWuWPVuxV2KuxV2KuxV2KuxV8rfnpr1xqn5i3Fk7Vt 9KVbeFFYsvJlDu1D0Y1CtT+XM3BGg6nVzuXuZf8AkDo2h3N3eXl8im+s2t205mkZCGcS8+KhlD/Z HUHJamRAAHJjoYxlIk8xVfa9u1ix02+02e01MK1jKAJwzmMUDAirKVI3A75hQkQbHN2uSMZRIlye CefbHRdM8zPZ6OqrYiKNlCSNKOTA8viZnP45ttPOUo+rm83rcUIzqHJLfKurzaR5t0y8hbiHmSCY FiqmOVgp5EdgSG+jHURuJXRZDHIH0zmnendirsVdirsVdirEJ/76T/WP68oLtY8lmLJ2Kp95f/3m k/1/4DLIcnB1X1BNMm4zsVdirsVfGv5weXJvL35g6jGwYwXchu4JGIJYS/E5bjsDzqaeFNhkC6bU 46kXuH/OP3nzTtT8sQ+XZ5lTVdN5LFExo0sBYurJXrwrxIHQAZIOXosoMeE8wyPzd+UflnzLqR1K d57S9cATvbsvGTiKAsrq3xUAFRiQuo0EMsuI2C8G/MLQ7Tyx5rvdItWdra3ERheUguVeJXqxAUdS e2QIdHqtPwZDEckX+TOiz675+tZxGJLPSwbi5LAlNwVRSQDuSaivWmEOX2fhud9z6lybv3Yq7FXY q7FXYq7FXYq+QvzgtpLT8zta514yzLKjEUqrop2r4Go+jM7EfSHUakesp1+U06v5w0da7+uD/wAK cvyH92XEwD99H3vfPzNUnyJq4AqfTQ7eAlUnMDT/AFh3Gt/upPnBCFNTm4eZKppxa78waZaxk8pb qFagFqD1BVqDwG+U5pekuTpoXMe99ZZp3p3Yq7FXYq7FXYq+DfMPnrztHr+pxx+YdTSNLudURby4 AAEjAAAP0GYpL0cIDhGyX/4+89f9THqn/Sbcf814LZcA7nf4+89f9THqn/Sbcf8ANeNrwDufTP8A zizrOsar5Q1abVL64v5k1Aokl1K8zKvoRniC5YgVOXYuTqe0ABIe57TlrgOxV2KuxVhP5p/lrY+d 9FELMYdStAz2FwADRj1U1ps1PEfgKAhpz4RMeb5S1/yt5q8o6iYdStpLZ4XAjukr6bNQMpSQd+JD UNGHcDIuoyYjE7o+D81fPsUaxrr9/wAVFBWeQ7fMknFHiZP5xV9B8v8Anrz7qfK2S4vXkYLcandM 7RoNh8czciaV6Cp9saWGCWQ976k/L3yBpXkzRUsbX99dv8V5esAHkkNK+PFdtl/Wd8kA7jDhEBQZ ThbnYq7FXYq7FXYq7FXYq8b/AOcgfy6uNYtIvMemI0t/ZKIp7dQzNJDUn4dyKoei0FanqxAy/DOt nD1WK9w8J8q+ZJtE1qx1JBzaznjm9M7BgjAlfpG2ZZ3FOtG0ge59X6X+Z35favp6zjWbOFJVpJbX kscEi1G6skpWvhtUZgnHIHk7iOeEhzDwb81td0WXzhdto8sEtgEhWN7biYqiNQ3Hh8PXwzPwyIju 6fVQichMeTLfyI8j3dzfjzXqKNFDCKabGwYFy4IMtRx2psOtQTUUKk4+oy3s5ei09eovd8xHZuxV 2KuxV2KuxV+ePmX/AJSPVf8AmMuP+TrZiHm9LD6QluBk7FX1X/ziN/yhWs/9tI/8mI8vxcnUdo/W Pcyfzh501vyv5rv77VJNQXQYrOaTRLO2XT3sry4tNPmvJ4Z24SahHJxiLJQqnw9d6G116Rr5685D ytLPPqV5Z+ZbK/0KfVLO5tdO9A2esXkdsIrQ27XIELgyUMkjTDjvxqMVTtfO/mXU/wA1p/LURvNI 0uTTtSgsTNpk/wAV5Zy2gF+LiWIQvFSeRECuU2UtvJGuKsek/NfzdH5M8gXZF2bjU10e81/WItNn uYZorq9gtpbWM28EkMc0qyO1Nm2CoC7rRV7hiqjdWVndoUuoEmQqUIdQ3wtTkN/GmKCAWPD8sfy+ Dcx5fshJ/v0RASV8eY+KvvXGmvwYdzJIbeCBeEMaxJt8KKFGwoNh7DFtpfirsVdirsVdirsVdirs VdirTojoyOoZGBVlYVBB2IIOKvNfPH5E+V/MlzNqFsW03Upas8kIHpu5pu6Up4kkbknrlsMxDjZN NGW/Vgp/5xf1kS0TX4DD/OYHDU/1eRH/AA2W/mPJo/JHvZZ5R/5x58uaTPHd6vcNq1zE1VjdQtvU EFSY969CCGJG+QlnJ5NuPSRHPd6vHFHEgSNQiCpCqKCpNT08TlDl0uxV2KuxV2KuxV2Kvzx8y/8A KR6r/wAxlx/ydbMQ83pYfSEtwMnYq+q/+cRv+UK1n/tpH/kxHl+Lk6jtH6x7nsg8teXBq8utDSrM axcJ6U+pC3i+syR8QvB5uPqMvFQKE9MtdepWPlDynYWUljY6JYWljLMlzLawWsMcTTxOskcrIqhS 6OisrUqCAe2Kpi1patdR3jQxtdwxvDFcFQZEjlKNIivTkFdokLAdeI8BiqjHo+kR6fBp0djbpp9q Yja2axIIYjbuskBjjA4r6TorJQfCQCOmKovFXYq7FXYq7FXYq7FXYq7FXYq7FXYq7FXYq7FXYq7F XYq7FXYq7FXYq7FXYq+eNT/5xH+vald3v+K/T+tTSTen+j+XH1GLUr9ZFaV8MqOLzdlHtGhXD9v7 EN/0J3/393/cu/7OsHhebL+Uv6P2/sd/0J3/AN/d/wBy7/s6x8LzX+Uv6P2/serflH+V/wDyrzRb zTP0n+lPrdz9Z9b0Pq/H92qcePqTV+xWtcsjGnD1GfxDdUzrJOO7FXYq7FXYq7FXYq7FXYq7FXYq 7FXYq7FXYq7FXYq7FXYq7FXYq7FXYq7FXYq7FXYq7FXYq7FXYq7FXYq7FXYq7FXYq7FXYq7FXYq7 FXYq7FXYq7FXYq7FXYq7FXYq7FXYq7FXYq7FXYq7FXYq7FXYq7FXYq7FXYq7FXYq7FXYq7FXhPnm HT9e/MDzVY+ZdTf6toltZyeXtEku0sYJmlg5TSBnHFnR2O/U1pWg2iXXZwJSkD05C6Zp+RfmDXNd 8iJdau0kzxXMsFpdzcjJPAgUh2difUIdnTkP5aHcHCG7RzlKHqehYXLdirsVdirsVdirsVdirsVd irHfOHnax8tCxga2mv8AVNVkaDTNPtwOUsigfadiFRAzKGbcitaHfLMePi8gGjNnEKFWTyW+U/O1 pr011YTW0mm61Yn/AEzTZypdRWnONl2kSu3Knh4issuEwo84nqw0+qGQmPKQ5hkmUuU7FXYq7FXY q7FXYq7FXYq7FXYq7FXYq7FXYq7FXYq7FUt1Xyx5a1eVJdW0my1CWNeEcl3bxTsq1rRTIrECuLGU InmEdb29vbW8VtbRJDbwoscMMahEREFFVVFAAAKADFIFKmKXYq7FXYq7FXYq7FXYq7FXYqwf8yfJ uta3d6DrOiSxDUvL1y1zHbTs0aTIxR2TmoahJhC77UJqcvw5AAQeri6nDKRjKPOKn5F8k6rZeZNV 82ayfq99qi+nDpaTG5W3ico7iSYqvNuaAKF+FR3NdpZs1xERyDDTabhmch2MunNnmYzmuxV2KuxV 2KuxV2KuxV2KuxV2KuxV2KuxV2KuxV//2Q== + + + + + + uuid:89B13A64E5D9DE11BB37992E5642CB24 + uuid:c9fc39ea-7338-234e-90fd-9c707322e008 + proof:pdf + + uuid:1052650b-0efc-4cb2-a32e-387095575b05 + uuid:6120892493BFDB11914A8590D31508C8 + + + + Document + Print + + + 1 + False + False + + 841.889648 + 595.275391 + Pixels + + + + + MyriadPro-Regular + Myriad Pro + Regular + Open Type + Version 2.062;PS 2.000;hotconv 1.0.57;makeotf.lib2.0.21895 + False + MyriadPro-Regular.otf + + + + + + Cyan + Magenta + Yellow + Black + + + + + + Default Swatch Group + 0 + + + + White + RGB + PROCESS + 255 + 255 + 255 + + + Black + RGB + PROCESS + 35 + 31 + 32 + + + CMYK Red + RGB + PROCESS + 236 + 28 + 36 + + + CMYK Yellow + RGB + PROCESS + 255 + 241 + 0 + + + CMYK Green + RGB + PROCESS + 0 + 165 + 81 + + + CMYK Cyan + RGB + PROCESS + 0 + 173 + 238 + + + CMYK Blue + RGB + PROCESS + 46 + 49 + 145 + + + CMYK Magenta + RGB + PROCESS + 235 + 0 + 139 + + + C=16 M=98 Y=92 K=7 + RGB + PROCESS + 194 + 39 + 45 + + + C=0 M=99 Y=97 K=0 + RGB + PROCESS + 236 + 32 + 39 + + + C=0 M=79 Y=96 K=0 + RGB + PROCESS + 240 + 92 + 39 + + + C=0 M=50 Y=98 K=0 + RGB + PROCESS + 246 + 146 + 33 + + + C=0 M=35 Y=87 K=0 + RGB + PROCESS + 250 + 175 + 59 + + + C=5 M=0 Y=93 K=0 + RGB + PROCESS + 249 + 236 + 35 + + + C=19 M=0 Y=98 K=0 + RGB + PROCESS + 216 + 223 + 39 + + + C=50 M=0 Y=99 K=0 + RGB + PROCESS + 139 + 197 + 64 + + + C=74 M=0 Y=99 K=0 + RGB + PROCESS + 61 + 180 + 74 + + + C=86 M=12 Y=100 K=9 + RGB + PROCESS + 0 + 146 + 69 + + + C=88 M=28 Y=95 K=32 + RGB + PROCESS + 0 + 104 + 55 + + + C=76 M=0 Y=75 K=0 + RGB + PROCESS + 34 + 180 + 115 + + + C=78 M=9 Y=46 K=0 + RGB + PROCESS + 3 + 168 + 156 + + + C=70 M=15 Y=0 K=0 + RGB + PROCESS + 37 + 169 + 224 + + + C=87 M=52 Y=0 K=0 + RGB + PROCESS + 16 + 114 + 185 + + + C=99 M=96 Y=4 K=0 + RGB + PROCESS + 46 + 55 + 143 + + + C=100 M=100 Y=26 K=25 + RGB + PROCESS + 38 + 34 + 97 + + + C=74 M=98 Y=1 K=0 + RGB + PROCESS + 103 + 48 + 144 + + + C=49 M=99 Y=1 K=0 + RGB + PROCESS + 146 + 41 + 141 + + + C=34 M=100 Y=37 K=11 + RGB + PROCESS + 157 + 30 + 96 + + + C=12 M=100 Y=49 K=1 + RGB + PROCESS + 211 + 28 + 92 + + + C=0 M=96 Y=20 K=0 + RGB + PROCESS + 236 + 37 + 122 + + + C=23 M=27 Y=40 K=0 + RGB + PROCESS + 198 + 178 + 152 + + + C=40 M=43 Y=52 K=7 + RGB + PROCESS + 152 + 133 + 118 + + + C=50 M=53 Y=61 K=23 + RGB + PROCESS + 117 + 101 + 88 + + + C=57 M=60 Y=64 K=42 + RGB + PROCESS + 85 + 72 + 65 + + + C=23 M=38 Y=63 K=1 + RGB + PROCESS + 197 + 156 + 110 + + + C=32 M=49 Y=74 K=10 + RGB + PROCESS + 165 + 124 + 82 + + + C=36 M=57 Y=84 K=23 + RGB + PROCESS + 139 + 99 + 57 + + + C=39 M=64 Y=93 K=36 + RGB + PROCESS + 117 + 77 + 36 + + + C=41 M=70 Y=96 K=49 + RGB + PROCESS + 97 + 57 + 23 + + + C=47 M=73 Y=83 K=68 + RGB + PROCESS + 65 + 35 + 18 + + + + + + Print Color Group + 1 + + + + C=2 M=28 Y=72 K=0 + RGB + PROCESS + 246 + 187 + 96 + + + C=5 M=70 Y=90 K=0 + RGB + PROCESS + 231 + 110 + 52 + + + C=4 M=92 Y=77 K=0 + RGB + PROCESS + 229 + 59 + 65 + + + C=29 M=2 Y=92 K=0 + RGB + PROCESS + 191 + 210 + 65 + + + C=62 M=4 Y=93 K=0 + RGB + PROCESS + 109 + 182 + 78 + + + C=30 M=2 Y=7 K=0 + RGB + PROCESS + 174 + 218 + 230 + + + C=60 M=8 Y=5 K=0 + RGB + PROCESS + 85 + 185 + 223 + + + C=78 M=4 Y=11 K=0 + RGB + PROCESS + 0 + 178 + 215 + + + + + + Grayscale + 1 + + + + K=100 + GRAY + PROCESS + 255 + + + K=90 + GRAY + PROCESS + 229 + + + K=80 + GRAY + PROCESS + 203 + + + K=70 + GRAY + PROCESS + 178 + + + K=60 + GRAY + PROCESS + 152 + + + K=50 + GRAY + PROCESS + 127 + + + K=40 + GRAY + PROCESS + 101 + + + K=30 + GRAY + PROCESS + 76 + + + K=20 + GRAY + PROCESS + 50 + + + K=10 + GRAY + PROCESS + 25 + + + K=5 + GRAY + PROCESS + 12 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + endstream endobj 2 0 obj <> endobj 5 0 obj <>/Resources<>/ExtGState<>/Font<>/ProcSet[/PDF/Text]/Properties<>/XObject<>>>/Thumb 1327 0 R/TrimBox[0.0 0.0 841.89 595.275]/Type/Page>> endobj 1315 0 obj <>stream +HWɎ7W(}10f 9e ƌ"Tٲ *`06h>:՛G|?JJzx|q5,`ǏKfSlь~X!Ư9Po0=SZL)T[Q)}b۔]ƯgG|9ⓐIMvO'|xu}]^}{N}'$\0yz)\^?Lr ë$L)W.>D byY,גˊq_dɏ?^yNy%$G3/E,ߋ6o9ٶ咢Β˾cy|WX:x}p:'?-P5=qм9]݋;1hNhլCYm} +zM[ױΥš  *8e&ieg#j779vL^0  @6g6%ǁBӥnk0XĞ ESEjc3a7 %495[݇s~rFzB:Aa$)s% .%<7Ɛ)08عB,:(d%O/IϸPxǢ4C')\@mBjQc4\.#d(7(;uQK9/Av7bdA{OCEH|T0ŒpFHFܤ@Lc$L 1K.ȅĸHʬ03IǸH X8Fd(DXeD$ iqH +]eaECa(pK(n=GJY1H*E:"lx/tHs,~|eG-T@iBQA-_*A*rҵ띁̀'E_q^b3UDxG'5#Qru84A`׺-n5gHR)?V4AHR6 'p% L)kdk +T>admәr)WHR{o+#@~"}bXv ]N`hy9cN9Sn; f%D)F@c2ubq,;TblXzGi&6̼CL| R^R\sd]PCa1ȿSƑr4T>d٥B62NN3^b*i7/ |]ix_" cUrE=Qc=nQ0>[e7<ޝ/U-I -RB(7x6KJ +q\F=wl.ϔDA~lzKE!בRc Rr@H `]^H8RneH,^Mؔ cX)G8dLf`ׂ9;ֈt;<ܙ#E6~uHl1(z>_-uS!}QLU*U{Cg;Hh3mn/R=r8)3(XW~S}h 7e6:ywsmL-g18Zܑ6qf7BM ;rSCt:\Aax墌tl1.@Y|UT4׻<5BRdD#uU@jw!(5`$WtƝy~utKW>޵ A>ʧU&o"28 &| endstream endobj 1327 0 obj <>stream +8;Z]!0oj-o&4Mq#BXZ(r-qRjZg"d;e'fX2?:$%:pb1G;i=ZP,(M5$Ct>Pr+OM%Sb3 +$Z$YM!!)e4pih!i-MB:^dR_nsi+[p*`^+#SD`bS0lt>(HO8LDNEmrh`0uV.i4u*`_ + endstream endobj 1328 0 obj [/Indexed/DeviceRGB 255 1329 0 R] endobj 1329 0 obj <>stream +8;X]O>EqN@%''O_@%e@?J;%+8(9e>X=MR6S?i^YgA3=].HDXF.R$lIL@"pJ+EP(%0 +b]6ajmNZn*!='OQZeQ^Y*,=]?C.B+\Ulg9dhD*"iC[;*=3`oP1[!S^)?1)IZ4dup` +E1r!/,*0[*9.aFIR2&b-C#soRZ7Dl%MLY\.?d>Mn +6%Q2oYfNRF$$+ON<+]RUJmC0InDZ4OTs0S!saG>GGKUlQ*Q?45:CI&4J'_2j$XKrcYp0n+Xl_nU*O( +l[$6Nn+Z_Nq0]s7hs]`XX1nZ8&94a\~> endstream endobj 1319 0 obj <>/ExtGState<>/XObject<>>>/Subtype/Form>>stream +/CS0 cs 0.173 0.522 0.345 scn +/GS0 gs +q 1 0 0 1 183 308.2725 cm +0 0 m +6.607 0.011 12.01 -5.392 11.999 -11.999 c +12.01 -18.606 6.607 -24.009 0 -23.998 c +-6.607 -24.009 -12.01 -18.606 -11.999 -11.999 c +-12.01 -5.392 -6.607 0.011 0 0 c +f +Q +q +0 g +0 Tc 0 Tw 0 Ts 100 Tz 0 Tr /Fm0 Do +Q + endstream endobj 1320 0 obj <>/ExtGState<>/XObject<>>>/Subtype/Form>>stream +/CS0 cs 0.173 0.522 0.345 scn +/GS0 gs +q 1 0 0 1 186.627 218.2744 cm +0 0 m +0.544 0.003 0.995 -0.445 0.994 -1.003 c +0.997 -1.549 0.546 -1.999 0 -1.997 c +-0.55 -2 -1.007 -1.543 -1.003 -1.003 c +-1.005 -0.451 -0.549 0.003 0 0 c +f +Q +q +0 g +1 w 4 M 0 j 0 J []0 d +0 Tc 0 Tw 0 Ts 100 Tz 0 Tr /Fm0 Do +Q + endstream endobj 1321 0 obj <>/ExtGState<>/XObject<>>>/Subtype/Form>>stream +/CS0 cs 0.243 0.533 0.643 scn +/GS0 gs +q 1 0 0 1 334.002 303.2773 cm +0 0 m +6.607 0.011 12.01 -5.392 11.999 -11.999 c +12.008 -18.614 6.61 -24.008 0 -23.998 c +-6.61 -24.008 -12.008 -18.614 -11.999 -11.999 c +-12.01 -5.392 -6.607 0.011 0 0 c +f +Q +q +0 g +0 Tc 0 Tw 0 Ts 100 Tz 0 Tr /Fm0 Do +Q + endstream endobj 1322 0 obj <>/ExtGState<>/XObject<>>>/Subtype/Form>>stream +/CS0 cs 0.212 0.624 0.78 scn +/GS0 gs +q 1 0 0 1 327.999 212.2715 cm +0 0 m +0.55 0.003 1.007 -0.454 1.003 -0.994 c +1.008 -1.537 0.543 -2.002 0 -1.997 c +-0.543 -2.002 -1.008 -1.537 -1.003 -0.994 c +-1.007 -0.454 -0.55 0.003 0 0 c +f +Q +q +0 g +1 w 4 M 0 j 0 J []0 d +0 Tc 0 Tw 0 Ts 100 Tz 0 Tr /Fm0 Do +Q + endstream endobj 1323 0 obj <>/ExtGState<>/XObject<>>>/Subtype/Form>>stream +/CS0 cs 0.196 0.322 0.616 scn +/GS0 gs +q 1 0 0 1 323.6699 445.7744 cm +0 0 m +6.607 0.011 12.01 -5.392 11.999 -11.999 c +12.01 -18.606 6.607 -24.009 0 -23.998 c +-6.615 -24.007 -12.009 -18.609 -11.999 -11.999 c +-12.009 -5.389 -6.615 0.009 0 0 c +f +Q +q +0 g +0 Tc 0 Tw 0 Ts 100 Tz 0 Tr /Fm0 Do +Q + endstream endobj 1324 0 obj <>/ExtGState<>/XObject<>>>/Subtype/Form>>stream +/CS0 cs 0.196 0.322 0.616 scn +/GS0 gs +q 1 0 0 1 315.165 487.2754 cm +0 0 m +0.548 0.003 1.005 -0.451 1.003 -1.003 c +1.007 -1.542 0.55 -2 0 -1.997 c +-0.546 -2 -0.997 -1.549 -0.994 -1.003 c +-0.995 -0.445 -0.544 0.003 0 0 c +f +Q +q +0 g +1 w 4 M 0 j 0 J []0 d +0 Tc 0 Tw 0 Ts 100 Tz 0 Tr /Fm0 Do +Q + endstream endobj 1325 0 obj <>/ExtGState<>/XObject<>>>/Subtype/Form>>stream +/CS0 cs 0.196 0.322 0.616 scn +/GS0 gs +q 1 0 0 1 184.8359 446.2783 cm +0 0 m +6.607 0.011 12.01 -5.392 11.999 -11.999 c +12.008 -18.617 6.606 -24.018 0 -24.007 c +-6.606 -24.018 -12.008 -18.617 -11.999 -11.999 c +-12.01 -5.392 -6.607 0.011 0 0 c +f +Q +q +0 g +0 Tc 0 Tw 0 Ts 100 Tz 0 Tr /Fm0 Do +Q + endstream endobj 1326 0 obj <>/ExtGState<>/XObject<>>>/Subtype/Form>>stream +/CS0 cs 0.196 0.322 0.616 scn +/GS0 gs +q 1 0 0 1 189.876 496.0234 cm +0 0 m +0.55 0.003 1.007 -0.455 1.003 -0.994 c +1.005 -1.546 0.548 -2 0 -1.997 c +-0.548 -2 -1.005 -1.546 -1.003 -0.994 c +-1.007 -0.455 -0.55 0.003 0 0 c +f +Q +q +0 g +1 w 4 M 0 j 0 J []0 d +0 Tc 0 Tw 0 Ts 100 Tz 0 Tr /Fm0 Do +Q + endstream endobj 1344 0 obj <> endobj 1345 0 obj <>/XObject<>>>/Subtype/Form>>stream +q +189.876 496.023 m +189.876 501.023 l +193.188 501.023 195.879 498.341 195.879 495.029 c +195.879 491.708 193.188 489.026 189.876 489.026 c +186.564 489.026 183.873 491.708 183.873 495.029 c +183.873 498.341 186.564 501.023 189.876 501.023 c +189.876 496.023 l +189.326 496.026 188.869 495.569 188.873 495.029 c +188.871 494.478 189.328 494.023 189.876 494.026 c +190.424 494.023 190.881 494.478 190.879 495.029 c +190.883 495.569 190.426 496.026 189.876 496.023 c +W n +q +1 w 4 M 0 j 0 J []0 d +/GS0 gs +0 Tc 0 Tw 0 Ts 100 Tz 0 Tr /Fm0 Do +Q +Q + endstream endobj 1346 0 obj <> endobj 1347 0 obj <>/ExtGState<>>>/Subtype/Form>>stream +/CS0 cs 0.2 0.325 0.624 scn +/GS0 gs +q 1 0 0 1 189.876 496.0234 cm +0 0 m +0 5 l +3.312 5 6.003 2.318 6.003 -0.994 c +6.003 -4.315 3.312 -6.997 0 -6.997 c +-3.312 -6.997 -6.003 -4.315 -6.003 -0.994 c +-6.003 2.318 -3.312 5 0 5 c +0 0 l +-0.55 0.003 -1.007 -0.455 -1.003 -0.994 c +-1.005 -1.546 -0.548 -2 0 -1.997 c +0.548 -2 1.005 -1.546 1.003 -0.994 c +1.007 -0.455 0.55 0.003 0 0 c +f +Q +q 1 0 0 1 189.876 496.9482 cm +0 0 m +-0.013 -0.041 -0.073 -0.074 -0.083 -0.116 c +-0.111 -0.248 -0.02 -0.426 0 -0.56 c +0 -0.925 l +-0.55 -0.922 -1.007 -1.379 -1.003 -1.919 c +-1.005 -2.471 -0.548 -2.925 0 -2.922 c +0.548 -2.925 1.005 -2.471 1.003 -1.919 c +1.007 -1.379 0.55 -0.922 0 -0.925 c +0 -0.56 l +0.034 -0.557 0.079 -0.553 0.113 -0.55 c +0.142 -0.55 0.184 -0.537 0.21 -0.549 c +1.046 -1.473 l +1.442 -2.154 1.79 -2.107 1.805 -2.105 c +2.057 -2.065 3.182 -0.618 1.901 0.191 c +1.598 0.383 1.274 0.41 1.132 0.395 c +0 0 l +0 4.075 l +3.312 4.075 6.003 1.393 6.003 -1.919 c +6.003 -5.24 3.312 -7.922 0 -7.922 c +-3.312 -7.922 -6.003 -5.24 -6.003 -1.919 c +-6.003 1.393 -3.312 4.075 0 4.075 c +0 0 l +f +Q +0.196 0.318 0.612 scn +q 1 0 0 1 189.876 497.0903 cm +0 0 m +-0.03 -0.092 -0.164 -0.17 -0.185 -0.265 c +-0.222 -0.433 -0.125 -0.678 -0.188 -0.838 c +-0.188 -0.839 -0.237 -0.941 -0.403 -1.05 c +-1.156 -1.54 -1.044 -2.156 -0.992 -2.333 c +-0.807 -2.959 -0.146 -3.264 0.451 -2.999 c +0.651 -2.909 0.79 -2.772 0.872 -2.69 c +1.143 -2.422 1.548 -2.621 1.836 -2.412 c +2.433 -1.979 2.576 -1.57 2.629 -1.416 c +2.85 -0.785 2.461 0.134 1.628 0.371 c +0.853 0.591 0.002 0.007 0 0 c +0 3.933 l +3.312 3.933 6.003 1.251 6.003 -2.061 c +6.003 -5.382 3.312 -8.064 0 -8.064 c +-3.312 -8.064 -6.003 -5.382 -6.003 -2.061 c +-6.003 1.251 -3.312 3.933 0 3.933 c +0 0 l +f +Q +0.192 0.31 0.596 scn +q 1 0 0 1 189.876 497.231 cm +0 0 m +-0.294 -0.832 -1.296 -1.347 -1.079 -2.407 c +-0.939 -3.088 -0.171 -3.557 0.648 -3.165 c +2.592 -2.234 2.592 -2.234 2.763 -1.674 c +3.159 -0.375 2.125 0.263 1.731 0.384 c +0.831 0.661 0.003 0.008 0 0 c +0 3.792 l +3.312 3.792 6.003 1.11 6.003 -2.202 c +6.003 -5.522 3.312 -8.205 0 -8.205 c +-3.312 -8.205 -6.003 -5.522 -6.003 -2.202 c +-6.003 1.11 -3.312 3.792 0 3.792 c +0 0 l +f +Q +0.188 0.302 0.58 scn +q 1 0 0 1 189.876 497.3701 cm +0 0 m +-0.353 -0.867 -1.383 -1.429 -1.146 -2.56 c +-1.024 -3.139 -0.35 -3.806 0.712 -3.399 c +2.444 -2.735 2.625 -2.666 2.946 -1.778 c +2.952 -1.763 3.406 -0.235 2.053 0.316 c +0.838 0.812 0.004 0.01 0 0 c +0 3.653 l +3.312 3.653 6.003 0.971 6.003 -2.341 c +6.003 -5.662 3.312 -8.344 0 -8.344 c +-3.312 -8.344 -6.003 -5.662 -6.003 -2.341 c +-6.003 0.971 -3.312 3.653 0 3.653 c +0 0 l +f +Q +0.18 0.294 0.569 scn +q 1 0 0 1 189.876 497.5073 cm +0 0 m +-0.193 -0.417 -0.585 -0.692 -0.795 -1.098 c +-1.093 -1.708 l +-1.262 -2.107 -1.291 -2.435 -1.188 -2.804 c +-1.126 -3.032 -0.727 -4.136 0.984 -3.565 c +4.73 -2.315 2.784 0.034 2.453 0.247 c +1.442 0.896 0.101 0.218 0 0 c +0 3.516 l +3.312 3.516 6.003 0.834 6.003 -2.478 c +6.003 -5.799 3.312 -8.481 0 -8.481 c +-3.312 -8.481 -6.003 -5.799 -6.003 -2.478 c +-6.003 0.834 -3.312 3.516 0 3.516 c +0 0 l +f +Q +0.176 0.286 0.553 scn +q 1 0 0 1 189.876 497.6602 cm +0 0 m +-0.013 -0.025 -0.053 -0.04 -0.076 -0.058 c +-0.365 -0.276 -0.692 -0.523 -1.173 -1.803 c +-1.244 -1.989 -1.457 -2.557 -1.185 -3.151 c +-0.782 -4.034 0.179 -4.205 1.672 -3.658 c +3.872 -2.853 3.987 -0.377 2.341 0.401 c +1.366 0.863 0.123 0.247 0 0 c +0 3.363 l +3.312 3.363 6.003 0.681 6.003 -2.631 c +6.003 -5.952 3.312 -8.634 0 -8.634 c +-3.312 -8.634 -6.003 -5.952 -6.003 -2.631 c +-6.003 0.681 -3.312 3.363 0 3.363 c +0 0 l +f +Q +0.173 0.278 0.541 scn +q 1 0 0 1 189.876 497.8516 cm +0 0 m +-0.034 -0.067 -0.142 -0.105 -0.203 -0.15 c +-0.741 -0.551 -1.014 -1.287 -1.254 -1.937 c +-1.386 -2.294 -1.492 -2.833 -1.246 -3.37 c +-0.614 -4.746 1.248 -4.148 1.804 -3.932 c +4.133 -3.027 4.261 -0.305 2.51 0.419 c +1.108 0.999 0.006 0.012 0 0 c +0 3.172 l +3.312 3.172 6.003 0.49 6.003 -2.822 c +6.003 -6.143 3.312 -8.825 0 -8.825 c +-3.312 -8.825 -6.003 -6.143 -6.003 -2.822 c +-6.003 0.49 -3.312 3.172 0 3.172 c +0 0 l +f +Q +0.169 0.275 0.525 scn +q 1 0 0 1 189.876 498.0396 cm +0 0 m +-0.037 -0.07 -0.152 -0.104 -0.217 -0.148 c +-0.223 -0.151 -0.766 -0.542 -1.153 -1.542 c +-1.498 -2.429 -1.549 -2.937 -1.35 -3.481 c +-1.145 -4.045 -0.491 -4.904 1.578 -4.323 c +4.082 -3.621 4.629 -0.761 2.993 0.316 c +1.701 1.166 0.079 0.148 0 0 c +0 2.984 l +3.312 2.984 6.003 0.302 6.003 -3.01 c +6.003 -6.331 3.312 -9.013 0 -9.013 c +-3.312 -9.013 -6.003 -6.331 -6.003 -3.01 c +-6.003 0.302 -3.312 2.984 0 2.984 c +0 0 l +f +Q +0.165 0.267 0.51 scn +q 1 0 0 1 189.876 498.2236 cm +0 0 m +-0.175 -0.317 -0.542 -0.437 -0.748 -0.722 c +-1.027 -1.109 -1.128 -1.336 -1.241 -1.614 c +-1.322 -1.817 -1.715 -2.863 -1.448 -3.592 c +-0.849 -5.223 1.105 -4.776 1.689 -4.601 c +4.425 -3.778 5.003 -0.758 3.22 0.385 c +1.946 1.2 0.234 0.423 0 0 c +0 2.8 l +3.312 2.8 6.003 0.118 6.003 -3.194 c +6.003 -6.515 3.312 -9.197 0 -9.197 c +-3.312 -9.197 -6.003 -6.515 -6.003 -3.194 c +-6.003 0.118 -3.312 2.8 0 2.8 c +0 0 l +f +Q +0.161 0.259 0.498 scn +q 1 0 0 1 189.876 498.4546 cm +0 0 m +-0.06 -0.132 -0.265 -0.21 -0.386 -0.291 c +-0.759 -0.542 -1.229 -1.473 -1.327 -1.735 c +-1.444 -2.049 -1.803 -3.137 -1.475 -3.94 c +-0.715 -5.801 1.956 -4.866 1.983 -4.856 c +5.297 -3.576 5.172 -0.368 3.116 0.573 c +1.411 1.354 0.007 0.017 0 0 c +0 2.569 l +3.312 2.569 6.003 -0.113 6.003 -3.425 c +6.003 -6.746 3.312 -9.428 0 -9.428 c +-3.312 -9.428 -6.003 -6.746 -6.003 -3.425 c +-6.003 -0.113 -3.312 2.569 0 2.569 c +0 0 l +f +Q +0.153 0.251 0.482 scn +q 1 0 0 1 189.876 498.7373 cm +0 0 m +-0.04 -0.083 -0.167 -0.135 -0.239 -0.193 c +-0.737 -0.595 -1.131 -1.172 -1.412 -1.908 c +-1.719 -2.716 -1.736 -3.696 -1.576 -4.141 c +-0.861 -6.127 1.881 -5.307 1.908 -5.298 c +5.872 -3.968 5.348 -0.494 3.424 0.518 c +1.628 1.463 0.058 0.121 0 0 c +0 2.286 l +3.312 2.286 6.003 -0.396 6.003 -3.708 c +6.003 -7.029 3.312 -9.711 0 -9.711 c +-3.312 -9.711 -6.003 -7.029 -6.003 -3.708 c +-6.003 -0.396 -3.312 2.286 0 2.286 c +0 0 l +f +Q +0.149 0.243 0.467 scn +q 1 0 0 1 189.876 499.0234 cm +0 0 m +-0.045 -0.106 -0.21 -0.167 -0.302 -0.236 c +-0.488 -0.374 -1.13 -0.939 -1.627 -2.442 c +-1.764 -2.855 -1.88 -3.934 -1.545 -4.673 c +-1.028 -5.816 0.793 -6.212 2.513 -5.554 c +6.321 -4.099 5.738 -0.283 3.153 0.723 c +1.353 1.423 0.007 0.017 0 0 c +0 2 l +3.312 2 6.003 -0.682 6.003 -3.994 c +6.003 -7.315 3.312 -9.997 0 -9.997 c +-3.312 -9.997 -6.003 -7.315 -6.003 -3.994 c +-6.003 -0.682 -3.312 2 0 2 c +0 0 l +f +Q +0.145 0.235 0.455 scn +q 1 0 0 1 189.876 499.4067 cm +0 0 m +-0.163 -0.362 -0.542 -0.515 -0.779 -0.805 c +-0.948 -1.011 -1.049 -1.26 -1.205 -1.475 c +-1.361 -1.69 -1.461 -1.951 -1.723 -2.734 c +-2.048 -3.705 -1.823 -4.543 -1.66 -4.957 c +-1.17 -6.199 0.623 -6.718 2.422 -6.139 c +7.03 -4.656 5.827 -0.75 3.286 0.539 c +1.422 1.485 0.008 0.018 0 0 c +0 1.617 l +3.312 1.617 6.003 -1.065 6.003 -4.377 c +6.003 -7.698 3.312 -10.38 0 -10.38 c +-3.312 -10.38 -6.003 -7.698 -6.003 -4.377 c +-6.003 -1.065 -3.312 1.617 0 1.617 c +0 0 l +f +Q +0.141 0.227 0.439 scn +q 1 0 0 1 189.876 499.8311 cm +0 0 m +-0.128 -0.296 -0.442 -0.404 -0.638 -0.631 c +-0.788 -0.804 -0.893 -1.009 -1.031 -1.191 c +-1.148 -1.346 -1.62 -2.354 -1.623 -2.361 c +-2.171 -3.896 -2.053 -4.61 -1.842 -5.154 c +-0.963 -7.425 1.653 -7.025 2.586 -6.68 c +3.893 -6.196 6.611 -5.189 5.553 -2.521 c +5.843 -3.224 6.003 -3.994 6.003 -4.802 c +6.003 -8.123 3.312 -10.805 0 -10.805 c +-3.312 -10.805 -6.003 -8.123 -6.003 -4.802 c +-6.003 -1.49 -3.312 1.192 0 1.192 c +0 0 l +f +Q +0.137 0.22 0.427 scn +q 1 0 0 1 189.876 500.2959 cm +0 0 m +-0.037 -0.078 -0.154 -0.129 -0.22 -0.184 c +-1.238 -1.037 -1.832 -2.884 -1.837 -2.903 c +-2.426 -4.762 -2.011 -5.635 -1.875 -5.921 c +-0.599 -8.601 3.356 -7.148 3.396 -7.133 c +4.442 -6.725 6.193 -6.042 5.899 -4.15 c +5.967 -4.512 6.003 -4.885 6.003 -5.267 c +6.003 -8.587 3.312 -11.27 0 -11.27 c +-3.312 -11.27 -6.003 -8.587 -6.003 -5.267 c +-6.003 -1.955 -3.312 0.728 0 0.728 c +0 0 l +f +Q +0.133 0.216 0.412 scn +q 1 0 0 1 189.876 500.7388 cm +0 0 m +-0.038 -0.067 -0.155 -0.091 -0.221 -0.129 c +-1.151 -0.674 -1.646 -2.172 -2.007 -3.267 c +-2.012 -3.284 -2.546 -5.066 -2.073 -6.279 c +-1.012 -9 2.932 -7.99 3.099 -7.945 c +4.318 -7.622 5.989 -7.18 6.001 -5.577 c +6.002 -5.621 6.003 -5.665 6.003 -5.709 c +6.003 -9.03 3.312 -11.712 0 -11.712 c +-3.312 -11.712 -6.003 -9.03 -6.003 -5.709 c +-6.003 -2.397 -3.312 0.285 0 0.285 c +0 0 l +f +Q +0.125 0.208 0.396 scn +q 1 0 0 1 189.876 501.0112 cm +0 0 m +-0.043 -0.052 -0.154 -0.029 -0.221 -0.042 c +-0.696 -0.132 -1.348 -0.689 -1.732 -1.731 c +-2.576 -4.014 -2.459 -5.548 -2.314 -6.26 c +-1.78 -8.88 1.72 -8.614 1.755 -8.611 c +4.215 -8.371 5.7 -8.227 5.951 -6.778 c +5.561 -9.721 3.043 -11.985 0 -11.985 c +-3.312 -11.985 -6.003 -9.303 -6.003 -5.982 c +-6.003 -2.67 -3.312 0.012 0 0.012 c +0 0 l +f +Q +0.122 0.2 0.384 scn +q 1 0 0 1 188.9707 500.9468 cm +0 0 m +-1.737 -0.589 -1.75 -4.504 -1.75 -4.544 c +-1.745 -7.052 -0.74 -7.832 0.016 -8.2 c +1.799 -9.068 6.088 -9.359 6.659 -7.635 c +5.92 -10.116 3.622 -11.92 0.905 -11.92 c +-2.407 -11.92 -5.098 -9.238 -5.098 -5.917 c +-5.098 -2.856 -2.799 -0.333 0.165 0.031 c +0.115 0.022 0.049 0.013 0 0 c +f +Q +0.118 0.192 0.369 scn +q 1 0 0 1 187.6411 500.5234 cm +0 0 m +-1.064 -0.939 -0.813 -4.868 -0.54 -5.601 c +0.43 -8.206 2.406 -8.584 3.21 -8.625 c +4.273 -8.681 5.3 -9.068 6.38 -8.967 c +6.693 -8.938 7.267 -8.802 7.587 -8.217 c +6.594 -10.165 4.569 -11.497 2.235 -11.497 c +-1.077 -11.497 -3.768 -8.815 -3.768 -5.494 c +-3.768 -2.81 -2 -0.54 0.432 0.225 c +0.372 0.201 0.292 0.168 0.231 0.144 c +0.162 0.102 0.062 0.054 0 0 c +f +Q +0.204 0.333 0.639 scn +q 1 0 0 1 191.4565 495.208 cm +0 0 m +-0.097 0.069 -0.097 0.069 -0.519 0.587 c +-0.662 0.762 -0.835 0.91 -0.974 1.089 c +-1.125 1.285 -1.232 1.593 y +-1.227 1.612 -0.03 2.438 0.591 1.363 c +1.026 0.61 0.244 -0.13 0.233 -0.131 c +0.153 -0.143 0.065 -0.046 0 0 c +f +Q +0.141 0.227 0.439 scn +q 1 0 0 1 192.4463 500.4146 cm +0 0 m +-1.295 0.463 -2.255 -0.325 -2.57 -0.583 c +-2.57 0.609 l +-1.402 0.609 -0.312 0.275 0.611 -0.302 c +0.521 -0.251 0.401 -0.185 0.312 -0.135 c +0.218 -0.094 0.096 -0.034 0 0 c +f +Q +0.208 0.337 0.655 scn +q 1 0 0 1 191.4961 495.46 cm +0 0 m +-0.335 0.354 l +-0.472 0.524 -0.626 0.679 -0.757 0.854 c +-0.976 1.148 -1.021 1.268 -1.02 1.273 c +-1.015 1.287 -0.029 1.7 0.33 0.953 c +0.59 0.409 0.174 -0.12 0.167 -0.121 c +0.106 -0.131 0.048 -0.04 0 0 c +f +Q +0.137 0.22 0.427 scn +q 1 0 0 1 191.6431 500.7461 cm +0 0 m +-0.651 0.121 -1.163 -0.01 -1.767 -0.45 c +-1.767 0.277 l +-1.038 0.277 -0.339 0.147 0.307 -0.09 c +0.224 -0.065 0.112 -0.032 0.029 -0.006 c +0.02 -0.004 0.009 -0.001 0 0 c +f +Q +0.216 0.345 0.667 scn +q 1 0 0 1 191.5 495.7261 cm +0 0 m +-0.004 0.004 -0.533 0.573 -0.71 0.862 c +-0.568 0.875 -0.482 0.883 -0.264 0.809 c +-0.18 0.781 -0.083 0.699 -0.025 0.631 c +0.033 0.563 0.091 0.45 0.104 0.362 c +0.135 0.141 0.099 0.019 0.074 -0.062 c +0.052 -0.043 0.021 -0.021 0 0 c +f +Q +0.133 0.216 0.412 scn +q 1 0 0 1 190.7813 500.9458 cm +0 0 m +-0.314 -0.005 -0.487 -0.009 -0.905 -0.207 c +-0.905 0.078 l +-0.519 0.078 -0.142 0.041 0.225 -0.028 c +0.157 -0.02 0.067 -0.003 0 0 c +f +Q +0.125 0.208 0.396 scn +q 1 0 0 1 189.876 501.0112 cm +0 0 m +0 0.012 l +0.072 0.012 0.144 0.011 0.215 0.008 c +0.15 0.006 0.046 -0.044 0 0 c +f +Q + endstream endobj 1348 0 obj <> endobj 1318 0 obj <> endobj 1317 0 obj [/ICCBased 1349 0 R] endobj 1349 0 obj <>stream +HyTSwoɞc [5laQIBHADED2mtFOE.c}08׎8GNg9w߽'0 ֠Jb  + 2y.-;!KZ ^i"L0- @8(r;q7Ly&Qq4j|9 +V)gB0iW8#8wթ8_٥ʨQQj@&A)/g>'Kt;\ ӥ$պFZUn(4T%)뫔0C&Zi8bxEB;Pӓ̹A om?W= +x-[0}y)7ta>jT7@tܛ`q2ʀ&6ZLĄ?_yxg)˔zçLU*uSkSeO4?׸c. R ߁-25 S>ӣVd`rn~Y&+`;A4 A9=-tl`;~p Gp| [`L`< "A YA+Cb(R,*T2B- +ꇆnQt}MA0alSx k&^>0|>_',G!"F$H:R!zFQd?r 9\A&G rQ hE]a4zBgE#H *B=0HIpp0MxJ$D1D, VĭKĻYdE"EI2EBGt4MzNr!YK ?%_&#(0J:EAiQ(()ӔWT6U@P+!~mD eԴ!hӦh/']B/ҏӿ?a0nhF!X8܌kc&5S6lIa2cKMA!E#ƒdV(kel }}Cq9 +N')].uJr + wG xR^[oƜchg`>b$*~ :Eb~,m,-ݖ,Y¬*6X[ݱF=3뭷Y~dó ti zf6~`{v.Ng#{}}jc1X6fm;'_9 r:8q:˜O:ϸ8uJqnv=MmR 4 +n3ܣkGݯz=[==<=GTB(/S,]6*-W:#7*e^YDY}UjAyT`#D="b{ų+ʯ:!kJ4Gmt}uC%K7YVfFY .=b?SƕƩȺy چ k5%4m7lqlioZlG+Zz͹mzy]?uuw|"űNwW&e֥ﺱ*|j5kyݭǯg^ykEklD_p߶7Dmo꿻1ml{Mś nLl<9O[$h՛BdҞ@iءG&vVǥ8nRĩ7u\ЭD-u`ֲK³8%yhYѹJº;.! +zpg_XQKFAǿ=ȼ:ɹ8ʷ6˶5̵5͵6ζ7ϸ9к<Ѿ?DINU\dlvۀ܊ݖޢ)߯6DScs 2F[p(@Xr4Pm8Ww)Km endstream endobj 1342 0 obj <> endobj 1343 0 obj <>/XObject<>>>/Subtype/Form>>stream +q +184.836 446.278 m +184.836 462.278 l +200.298 462.278 212.835 449.741 212.835 434.279 c +212.835 418.809 200.298 406.271 184.836 406.271 c +169.374 406.271 156.837 418.809 156.837 434.279 c +156.837 449.741 169.374 462.278 184.836 462.278 c +184.836 446.278 l +178.229 446.289 172.826 440.887 172.837 434.279 c +172.828 427.661 178.229 422.261 184.836 422.271 c +191.442 422.261 196.844 427.661 196.835 434.279 c +196.846 440.887 191.443 446.289 184.836 446.278 c +W n +q +/GS0 gs +0 Tc 0 Tw 0 Ts 100 Tz 0 Tr /Fm0 Do +Q +Q + endstream endobj 1350 0 obj <> endobj 1351 0 obj <>/ExtGState<>>>/Subtype/Form>>stream +/CS0 cs 0.208 0.337 0.655 scn +/GS0 gs +q 1 0 0 1 184.8359 446.2783 cm +0 0 m +0 16 l +15.462 16 27.999 3.463 27.999 -11.999 c +27.999 -27.47 15.462 -40.007 0 -40.007 c +-15.462 -40.007 -27.999 -27.47 -27.999 -11.999 c +-27.999 3.463 -15.462 16 0 16 c +0 0 l +-6.607 0.011 -12.01 -5.392 -11.999 -11.999 c +-12.008 -18.617 -6.606 -24.018 0 -24.007 c +6.606 -24.018 12.008 -18.617 11.999 -11.999 c +12.01 -5.392 6.607 0.011 0 0 c +f +Q +q 1 0 0 1 184.8359 451.4419 cm +0 0 m +0 -0.468 l +0 -5.164 l +-6.607 -5.153 -12.01 -10.555 -11.999 -17.163 c +-12.008 -23.781 -6.606 -29.181 0 -29.17 c +6.606 -29.181 12.008 -23.781 11.999 -17.163 c +12.01 -10.555 6.607 -5.153 0 -5.164 c +0 -0.468 l +0.316 -0.694 0.738 -0.997 1.055 -1.223 c +3.817 -3.661 7.459 -4.869 10 -7.617 c +12.018 -9.8 13.458 -12.461 14.279 -15.528 c +15.076 -18.507 16.901 -19.346 16.917 -19.348 c +18.874 -19.542 24.735 -10.485 17.857 -2.241 c +10.879 6.124 0.769 1.958 0 0 c +0 10.836 l +15.462 10.836 27.999 -1.701 27.999 -17.163 c +27.999 -32.633 15.462 -45.17 0 -45.17 c +-15.462 -45.17 -27.999 -32.633 -27.999 -17.163 c +-27.999 -1.701 -15.462 10.836 0 10.836 c +0 0 l +f +Q +0.204 0.333 0.639 scn +q 1 0 0 1 184.8359 453.2891 cm +0 0 m +-0.296 -0.712 -1.487 -1.168 -1.735 -1.898 c +-1.987 -2.638 -2.003 -3.873 -1.53 -4.494 c +-1.227 -4.893 -0.45 -4.945 0 -5.167 c +0 -7.011 l +-6.607 -7 -12.01 -12.402 -11.999 -19.01 c +-12.008 -25.628 -6.606 -31.028 0 -31.018 c +6.606 -31.028 12.008 -25.628 11.999 -19.01 c +12.01 -12.402 6.607 -7 0 -7.011 c +0 -5.167 l +0.338 -5.201 0.788 -5.245 1.126 -5.278 c +2.249 -5.476 12.144 -7.557 13.761 -19.538 c +13.765 -19.565 14.171 -22.516 14.171 -22.516 c +14.636 -23.09 15.724 -23.507 16.459 -23.43 c +20.584 -22.993 26.416 -9.568 15.896 -1.312 c +7.943 4.929 0.035 0.084 0 0 c +0 8.989 l +15.462 8.989 27.999 -3.548 27.999 -19.01 c +27.999 -34.48 15.462 -47.018 0 -47.018 c +-15.462 -47.018 -27.999 -34.48 -27.999 -19.01 c +-27.999 -3.548 -15.462 8.989 0 8.989 c +0 0 l +f +Q +0.2 0.325 0.624 scn +q 1 0 0 1 184.8359 454.4082 cm +0 0 m +-0.627 -1.109 -1.866 -1.525 -2.708 -2.391 c +-4.764 -4.503 -4.447 -6.209 -4.44 -6.223 c +-4.355 -6.386 -4.355 -6.386 0 -7.408 c +0 -8.13 l +-6.607 -8.119 -12.01 -13.521 -11.999 -20.129 c +-12.008 -26.747 -6.606 -32.147 0 -32.137 c +6.606 -32.147 12.008 -26.747 11.999 -20.129 c +12.01 -13.521 6.607 -8.119 0 -8.13 c +0 -7.408 l +0.312 -7.428 0.727 -7.455 1.039 -7.475 c +5.587 -8.118 13.156 -12.018 12.674 -22.551 c +12.559 -25.065 12.662 -26.483 12.98 -26.764 c +14.309 -27.938 23.357 -23.699 22.629 -14.042 c +21.269 4.004 1.142 2.019 0 0 c +0 7.87 l +15.462 7.87 27.999 -4.667 27.999 -20.129 c +27.999 -35.6 15.462 -48.137 0 -48.137 c +-15.462 -48.137 -27.999 -35.6 -27.999 -20.129 c +-27.999 -4.667 -15.462 7.87 0 7.87 c +0 0 l +f +Q +0.196 0.318 0.612 scn +q 1 0 0 1 184.8359 455.3335 cm +0 0 m +-0.223 -0.377 -0.896 -0.494 -1.279 -0.706 c +-3.984 -2.198 -4.352 -2.882 -7.218 -8.204 c +-10.977 -15.407 l +-12.034 -17.649 -12.409 -19.973 -12.123 -22.512 c +-11.368 -29.209 -4.441 -35.048 3.701 -32.84 c +16.505 -28.457 l +19.639 -26.39 21.523 -23.894 22.614 -20.364 c +24.61 -13.907 21.812 -4.74 13.674 -0.575 c +6.26 3.219 0.029 0.049 0 0 c +0 6.945 l +15.462 6.945 27.999 -5.592 27.999 -21.054 c +27.999 -36.525 15.462 -49.062 0 -49.062 c +-15.462 -49.062 -27.999 -36.525 -27.999 -21.054 c +-27.999 -5.592 -15.462 6.945 0 6.945 c +0 0 l +f +Q +0.192 0.31 0.596 scn +q 1 0 0 1 184.8359 456.1333 cm +0 0 m +-0.174 -0.267 -0.682 -0.3 -0.974 -0.428 c +-3.27 -1.438 -6.363 -4.313 -7.593 -6.58 c +-13.39 -17.263 -12.999 -20.654 -12.686 -23.38 c +-12.044 -28.948 -6.307 -36.34 3.975 -34.525 c +32.478 -29.493 24.483 -7.887 15.417 -1.844 c +7.621 3.352 0.038 0.059 0 0 c +0 6.145 l +15.462 6.145 27.999 -6.392 27.999 -21.854 c +27.999 -37.325 15.462 -49.862 0 -49.862 c +-15.462 -49.862 -27.999 -37.325 -27.999 -21.854 c +-27.999 -6.392 -15.462 6.145 0 6.145 c +0 0 l +f +Q +0.188 0.302 0.58 scn +q 1 0 0 1 184.8359 456.834 cm +0 0 m +-0.26 -0.393 -1.01 -0.429 -1.443 -0.612 c +-4.281 -1.817 -7.531 -4.969 -9.346 -8.278 c +-13.498 -15.848 -13.757 -21.086 -13.243 -24.147 c +-12.335 -29.562 -7.257 -38.122 6.017 -35.862 c +29.657 -31.837 27.572 -10.232 15.691 -2.188 c +7.725 3.206 0.039 0.058 0 0 c +0 5.444 l +15.462 5.444 27.999 -7.093 27.999 -22.555 c +27.999 -38.025 15.462 -50.563 0 -50.563 c +-15.462 -50.563 -27.999 -38.025 -27.999 -22.555 c +-27.999 -7.093 -15.462 5.444 0 5.444 c +0 0 l +f +Q +0.18 0.294 0.569 scn +q 1 0 0 1 184.8359 457.5 cm +0 0 m +-0.27 -0.397 -1.042 -0.411 -1.488 -0.586 c +-3.111 -1.225 -7.25 -3.37 -10.633 -9.471 c +-11.685 -11.368 -15.021 -18.085 -13.796 -24.879 c +-12.453 -32.328 -5.461 -39.37 6.714 -37.227 c +28.951 -33.313 28.976 -11.259 15.609 -2.301 c +7.856 2.895 0.038 0.056 0 0 c +0 4.778 l +15.462 4.778 27.999 -7.759 27.999 -23.221 c +27.999 -38.691 15.462 -51.229 0 -51.229 c +-15.462 -51.229 -27.999 -38.691 -27.999 -23.221 c +-27.999 -7.759 -15.462 4.778 0 4.778 c +0 0 l +f +Q +0.176 0.286 0.553 scn +q 1 0 0 1 184.8359 458.1108 cm +0 0 m +-0.285 -0.403 -1.085 -0.384 -1.55 -0.549 c +-2.14 -0.758 -7.426 -2.783 -11.14 -9.4 c +-12.536 -11.888 -15.643 -18.441 -14.343 -25.555 c +-13.275 -31.4 -7.567 -40.72 7.05 -38.576 c +28.069 -35.492 30.907 -13.131 16.17 -2.838 c +7.979 2.883 0.04 0.057 0 0 c +0 4.167 l +15.462 4.167 27.999 -8.37 27.999 -23.832 c +27.999 -39.302 15.462 -51.839 0 -51.839 c +-15.462 -51.839 -27.999 -39.302 -27.999 -23.832 c +-27.999 -8.37 -15.462 4.167 0 4.167 c +0 0 l +f +Q +0.173 0.278 0.541 scn +q 1 0 0 1 184.8359 458.6836 cm +0 0 m +-0.294 -0.407 -1.113 -0.365 -1.59 -0.521 c +-3.037 -0.996 -8.057 -3.068 -11.887 -9.807 c +-12.95 -11.676 -16.305 -18.381 -14.886 -26.192 c +-13.691 -32.767 -6.813 -41.832 7.241 -39.858 c +28.692 -36.845 31.476 -13.851 16.374 -3.144 c +8.08 2.736 0.041 0.056 0 0 c +0 3.595 l +15.462 3.595 27.999 -8.942 27.999 -24.404 c +27.999 -39.875 15.462 -52.412 0 -52.412 c +-15.462 -52.412 -27.999 -39.875 -27.999 -24.404 c +-27.999 -8.942 -15.462 3.595 0 3.595 c +0 0 l +f +Q +0.169 0.275 0.525 scn +q 1 0 0 1 184.8359 459.2207 cm +0 0 m +-0.327 -0.44 -1.224 -0.37 -1.749 -0.528 c +-5.52 -1.667 -9.766 -5.26 -12.073 -9.267 c +-15.394 -15.036 -16.522 -20.933 -15.426 -26.792 c +-13.856 -35.181 -5.227 -43.019 7.675 -41.021 c +29.387 -37.659 31.678 -13.959 16.092 -3.122 c +8.188 2.374 0.041 0.052 0 0 c +0 3.058 l +15.462 3.058 27.999 -9.479 27.999 -24.941 c +27.999 -40.412 15.462 -52.949 0 -52.949 c +-15.462 -52.949 -27.999 -40.412 -27.999 -24.941 c +-27.999 -9.479 -15.462 3.058 0 3.058 c +0 0 l +f +Q +0.165 0.267 0.51 scn +q 1 0 0 1 184.8359 459.7354 cm +0 0 m +-0.315 -0.413 -1.169 -0.321 -1.671 -0.458 c +-5.628 -1.543 -10.186 -5.222 -12.509 -9.206 c +-13.794 -11.411 -17.706 -18.119 -15.958 -27.37 c +-14.312 -36.089 -5.369 -44.235 7.962 -42.157 c +29.829 -38.748 32.261 -15.07 16.713 -3.752 c +8.241 2.415 0.041 0.054 0 0 c +0 2.543 l +15.462 2.543 27.999 -9.994 27.999 -25.456 c +27.999 -40.927 15.462 -53.464 0 -53.464 c +-15.462 -53.464 -27.999 -40.927 -27.999 -25.456 c +-27.999 -9.994 -15.462 2.543 0 2.543 c +0 0 l +f +Q +0.161 0.259 0.498 scn +q 1 0 0 1 184.8359 460.208 cm +0 0 m +-0.326 -0.417 -1.197 -0.297 -1.71 -0.424 c +-5.005 -1.241 -10.022 -4.174 -13.317 -9.752 c +-16.642 -15.38 -17.707 -21.488 -16.484 -27.905 c +-14.771 -36.893 -5.522 -45.319 8.241 -43.229 c +29.819 -39.954 32.248 -15.425 16.845 -4.05 c +8.507 2.107 0.042 0.053 0 0 c +0 2.07 l +15.462 2.07 27.999 -10.467 27.999 -25.929 c +27.999 -41.399 15.462 -53.937 0 -53.937 c +-15.462 -53.937 -27.999 -41.399 -27.999 -25.929 c +-27.999 -10.467 -15.462 2.07 0 2.07 c +0 0 l +f +Q +0.153 0.251 0.482 scn +q 1 0 0 1 184.8359 460.6479 cm +0 0 m +-0.165 -0.201 -0.596 -0.119 -0.852 -0.169 c +-6.63 -1.321 -11.086 -5.48 -13.33 -8.99 c +-17.823 -16.018 -17.959 -22.68 -17.283 -27.032 c +-15.528 -38.313 -5.353 -45.642 6.913 -44.456 c +29.058 -42.316 33.217 -18.568 18.588 -5.674 c +9.722 2.142 0.051 0.062 0 0 c +0 1.63 l +15.462 1.63 27.999 -10.907 27.999 -26.369 c +27.999 -41.839 15.462 -54.376 0 -54.376 c +-15.462 -54.376 -27.999 -41.839 -27.999 -26.369 c +-27.999 -10.907 -15.462 1.63 0 1.63 c +0 0 l +f +Q +0.149 0.243 0.467 scn +q 1 0 0 1 184.8359 461.0591 cm +0 0 m +-0.345 -0.419 -1.243 -0.245 -1.775 -0.35 c +-5.333 -1.052 -10.598 -4.013 -13.752 -8.857 c +-18.474 -16.108 -18.606 -22.979 -17.885 -27.466 c +-16.272 -37.507 -7.1 -46.929 7.31 -45.507 c +29.58 -43.31 33.524 -19.12 18.666 -5.999 c +9.679 1.938 0.05 0.061 0 0 c +0 1.219 l +15.462 1.219 27.999 -11.318 27.999 -26.78 c +27.999 -42.25 15.462 -54.788 0 -54.788 c +-15.462 -54.788 -27.999 -42.25 -27.999 -26.78 c +-27.999 -11.318 -15.462 1.219 0 1.219 c +0 0 l +f +Q +0.145 0.235 0.455 scn +q 1 0 0 1 184.8359 461.4141 cm +0 0 m +-0.359 -0.424 -1.279 -0.213 -1.827 -0.305 c +-2.571 -0.429 -9.239 -1.713 -14.035 -8.521 c +-19.337 -16.049 -19.04 -23.602 -18.666 -26.5 c +-16.79 -41.041 -4.557 -47.127 6.015 -46.629 c +29.242 -45.535 34.043 -19.97 18.705 -6.311 c +9.693 1.714 0.05 0.059 0 0 c +0 0.864 l +15.462 0.864 27.999 -11.673 27.999 -27.135 c +27.999 -42.605 15.462 -55.143 0 -55.143 c +-15.462 -55.143 -27.999 -42.605 -27.999 -27.135 c +-27.999 -11.673 -15.462 0.864 0 0.864 c +0 0 l +f +Q +0.141 0.227 0.439 scn +q 1 0 0 1 184.8359 461.7397 cm +0 0 m +-0.366 -0.422 -1.29 -0.183 -1.842 -0.262 c +-5.616 -0.798 -11.203 -3.577 -14.553 -8.414 c +-20.526 -17.037 -19.484 -25.015 -19.142 -27.636 c +-17.325 -41.551 -4.721 -48.305 6.215 -47.597 c +22.827 -46.52 31.839 -32.415 25.896 -16.796 c +27.251 -20.083 27.999 -23.685 27.999 -27.46 c +27.999 -42.931 15.462 -55.468 0 -55.468 c +-15.462 -55.468 -27.999 -42.931 -27.999 -27.46 c +-27.999 -11.999 -15.462 0.539 0 0.539 c +0 0 l +f +Q +0.137 0.22 0.427 scn +q 1 0 0 1 184.8359 461.9951 cm +0 0 m +-0.38 -0.425 -1.322 -0.147 -1.889 -0.211 c +-3.74 -0.417 -10.183 -1.633 -15.334 -8.604 c +-20.12 -15.08 -20.496 -23.225 -19.964 -27.016 c +-18.071 -40.504 -7.311 -49.146 6.811 -48.521 c +13.567 -48.222 30.459 -42.962 27.513 -22.495 c +27.832 -24.187 27.999 -25.932 27.999 -27.716 c +27.999 -43.187 15.462 -55.724 0 -55.724 c +-15.462 -55.724 -27.999 -43.187 -27.999 -27.716 c +-27.999 -12.254 -15.462 0.283 0 0.283 c +0 0 l +f +Q +0.133 0.216 0.412 scn +q 1 0 0 1 184.8359 462.186 cm +0 0 m +-0.389 -0.421 -1.333 -0.109 -1.905 -0.156 c +-5.862 -0.48 -11.762 -2.986 -15.367 -7.721 c +-21.456 -15.72 -21.121 -23.999 -20.694 -27.186 c +-18.877 -40.772 -7.134 -50.361 6.621 -49.493 c +16.365 -48.877 27.809 -42.692 27.992 -27.284 c +27.997 -27.491 27.999 -27.699 27.999 -27.907 c +27.999 -43.377 15.462 -55.915 0 -55.915 c +-15.462 -55.915 -27.999 -43.377 -27.999 -27.907 c +-27.999 -12.445 -15.462 0.092 0 0.092 c +0 0 l +f +Q +0.125 0.208 0.396 scn +q 1 0 0 1 184.8359 462.2749 cm +0 0 m +-0.403 -0.423 -1.362 -0.067 -1.945 -0.096 c +-5.653 -0.278 -11.171 -1.795 -16.407 -7.987 c +-19.42 -11.549 -22.258 -18.906 -21.583 -25.522 c +-19.025 -50.599 4.157 -50.427 5.143 -50.408 c +17.394 -50.165 25.848 -43.174 27.755 -31.708 c +25.94 -45.423 14.204 -56.003 0 -56.003 c +-15.462 -56.003 -27.999 -43.466 -27.999 -27.996 c +-27.999 -12.534 -15.462 0.003 0 0.003 c +0 0 l +f +Q +0.122 0.2 0.384 scn +q 1 0 0 1 180.605 461.958 cm +0 0 m +-22.531 -4.551 -23.529 -35.032 -6.329 -46.266 c +6.848 -54.872 25.64 -52.177 31.068 -35.689 c +27.624 -47.255 16.911 -55.687 4.231 -55.687 c +-11.231 -55.687 -23.768 -43.149 -23.768 -27.679 c +-23.768 -13.386 -13.055 -1.592 0.778 0.109 c +0.544 0.077 0.232 0.04 0 0 c +f +Q +0.118 0.192 0.369 scn +q 1 0 0 1 172.812 459.498 cm +0 0 m +-16.566 -9.064 -17.348 -40.201 9.316 -48.722 c +16.64 -51.062 30.628 -50.199 36.986 -37.919 c +32.357 -47.005 22.916 -53.227 12.024 -53.227 c +-3.438 -53.227 -15.975 -40.689 -15.975 -25.219 c +-15.975 -12.683 -7.734 -2.069 3.625 1.499 c +3.1 1.309 2.399 1.057 1.873 0.867 c +1.31 0.61 0.543 0.297 0 0 c +f +Q +0.216 0.345 0.667 scn +q 1 0 0 1 200.7622 436.103 cm +0 0 m +-1.706 2.422 -2.871 5.192 -4.806 7.466 c +-5.581 8.375 -6.334 9.141 -7.046 9.74 c +-7.103 9.788 -12.699 14.577 -12.706 14.929 c +-12.708 15.035 -10.925 16.753 -10.74 16.825 c +-10.058 17.086 -7.544 17.231 -6.875 17.166 c +-5.111 16.992 -2.438 16.241 0.275 13.649 c +3.79 10.293 4.269 6.382 4.332 5.263 c +4.608 0.362 1.816 -1.552 1.125 -1.426 c +0.589 -1.328 0.314 -0.445 0 0 c +f +Q +0.22 0.353 0.682 scn +q 1 0 0 1 200.8965 438.5967 cm +0 0 m +-1.97 2.883 -3.056 4.472 -4.87 6.595 c +-5.072 6.832 -5.375 7.116 -5.591 7.34 c +-5.844 7.601 -6.16 7.969 -6.419 8.224 c +-6.913 8.711 -7.551 9.382 -8.074 9.839 c +-9.724 11.281 -9.908 11.547 -9.911 11.595 c +-9.914 11.655 -8.389 13.369 -8.295 13.411 c +-7.711 13.674 -6.801 13.346 -6.164 13.276 c +-2.962 12.927 -1.156 11.212 -0.476 10.566 c +2.531 7.709 2.783 5.143 2.904 3.909 c +2.938 3.565 2.929 0.875 2.709 0.41 c +2.675 0.337 0.707 -0.875 0.645 -0.861 c +0.33 -0.793 0.182 -0.267 0 0 c +f +Q +0.224 0.361 0.694 scn +q 1 0 0 1 199.9814 442.126 cm +0 0 m +-0.737 0.235 -1.076 1.45 -1.576 2.04 c +-3.148 3.895 -3.148 3.895 -3.897 4.678 c +-4.212 5.008 -4.84 5.354 -4.922 5.803 c +-4.014 7.981 l +-3.953 8.007 -1.427 7.15 0.33 5.083 c +1.631 3.552 2.397 0.755 2.281 0.574 c +1.906 -0.01 0.699 -0.197 0.037 0.011 c +0.026 0.014 0.011 -0.003 0 0 c +f +Q +0.141 0.227 0.439 scn +q 1 0 0 1 196.8853 459.5508 cm +0 0 m +-5.275 2.417 -9.403 2.407 -12.049 2.189 c +-12.049 2.728 l +-6.604 2.728 -1.522 1.173 2.777 -1.517 c +2.232 -1.205 1.506 -0.789 0.961 -0.477 c +0.673 -0.334 0.292 -0.134 0 0 c +f +Q +0.137 0.22 0.427 scn +q 1 0 0 1 193.0991 461.0352 cm +0 0 m +-3.078 0.794 -4.478 1.111 -8.263 0.96 c +-8.263 1.243 l +-4.866 1.243 -1.61 0.638 1.402 -0.47 c +0.981 -0.329 0.425 -0.126 0 0 c +f +Q +0.133 0.216 0.412 scn +q 1 0 0 1 189.0669 461.958 cm +0 0 m +-2.557 0.263 -2.657 0.273 -4.231 0.228 c +-4.231 0.32 l +-2.431 0.32 -0.671 0.15 1.035 -0.174 c +0.724 -0.122 0.312 -0.042 0 0 c +f +Q +0.125 0.208 0.396 scn +q 1 0 0 1 184.8359 462.2749 cm +0 0 m +0.335 0.003 0.669 -0.002 1.001 -0.014 c +0.701 -0.01 0.211 -0.214 0 0 c +f +Q + endstream endobj 1352 0 obj <> endobj 1340 0 obj <> endobj 1341 0 obj <>/XObject<>>>/Subtype/Form>>stream +q +315.165 487.275 m +315.165 492.275 l +318.477 492.275 321.168 489.593 321.168 486.272 c +321.168 482.96 318.477 480.278 315.165 480.278 c +311.853 480.278 309.171 482.96 309.171 486.272 c +309.171 489.593 311.853 492.275 315.165 492.275 c +315.165 487.275 l +314.621 487.278 314.17 486.83 314.171 486.272 c +314.168 485.727 314.619 485.276 315.165 485.278 c +315.715 485.275 316.172 485.733 316.168 486.272 c +316.17 486.824 315.713 487.279 315.165 487.275 c +W n +q +1 w 4 M 0 j 0 J []0 d +/GS0 gs +0 Tc 0 Tw 0 Ts 100 Tz 0 Tr /Fm0 Do +Q +Q + endstream endobj 1353 0 obj <> endobj 1354 0 obj <>/ExtGState<>>>/Subtype/Form>>stream +/CS0 cs 0.2 0.325 0.624 scn +/GS0 gs +q 1 0 0 1 315.165 487.2754 cm +0 0 m +0 5 l +3.312 5 6.003 2.318 6.003 -1.003 c +6.003 -4.315 3.312 -6.997 0 -6.997 c +-3.312 -6.997 -5.994 -4.315 -5.994 -1.003 c +-5.994 2.318 -3.312 5 0 5 c +0 0 l +-0.544 0.003 -0.995 -0.445 -0.994 -1.003 c +-0.997 -1.549 -0.546 -2 0 -1.997 c +0.55 -2 1.007 -1.542 1.003 -1.003 c +1.005 -0.451 0.548 0.003 0 0 c +f +Q +q 1 0 0 1 315.165 488.1997 cm +0 0 m +-0.013 -0.041 -0.073 -0.074 -0.082 -0.115 c +-0.11 -0.248 -0.02 -0.425 0 -0.559 c +0 -0.924 l +-0.544 -0.921 -0.995 -1.37 -0.994 -1.927 c +-0.997 -2.473 -0.546 -2.924 0 -2.921 c +0.55 -2.924 1.007 -2.467 1.003 -1.927 c +1.005 -1.375 0.548 -0.921 0 -0.924 c +0 -0.559 l +0.034 -0.556 0.079 -0.552 0.113 -0.549 c +0.142 -0.549 0.183 -0.536 0.209 -0.548 c +1.045 -1.475 l +1.44 -2.16 1.79 -2.114 1.805 -2.112 c +2.058 -2.072 3.187 -0.623 1.901 0.191 c +1.597 0.384 1.274 0.411 1.13 0.396 c +0 0 l +0 4.076 l +3.312 4.076 6.003 1.394 6.003 -1.927 c +6.003 -5.239 3.312 -7.921 0 -7.921 c +-3.312 -7.921 -5.994 -5.239 -5.994 -1.927 c +-5.994 1.394 -3.312 4.076 0 4.076 c +0 0 l +f +Q +0.196 0.318 0.612 scn +q 1 0 0 1 315.165 488.3418 cm +0 0 m +-0.03 -0.092 -0.163 -0.17 -0.184 -0.265 c +-0.221 -0.432 -0.125 -0.677 -0.186 -0.837 c +-0.186 -0.838 -0.235 -0.941 -0.399 -1.048 c +-1.15 -1.539 -1.036 -2.16 -0.983 -2.339 c +-0.8 -2.96 -0.143 -3.262 0.452 -2.998 c +0.652 -2.908 0.791 -2.771 0.873 -2.69 c +1.144 -2.423 1.548 -2.625 1.836 -2.417 c +2.431 -1.985 2.564 -1.604 2.628 -1.42 c +2.85 -0.787 2.46 0.134 1.627 0.371 c +0.853 0.592 0.002 0.008 0 0 c +0 3.934 l +3.312 3.934 6.003 1.251 6.003 -2.069 c +6.003 -5.381 3.312 -8.063 0 -8.063 c +-3.312 -8.063 -5.994 -5.381 -5.994 -2.069 c +-5.994 1.251 -3.312 3.934 0 3.934 c +0 0 l +f +Q +0.192 0.31 0.596 scn +q 1 0 0 1 315.165 488.4824 cm +0 0 m +-0.294 -0.832 -1.287 -1.354 -1.07 -2.414 c +-0.931 -3.09 -0.167 -3.555 0.649 -3.164 c +1.049 -2.972 1.516 -2.957 1.889 -2.695 c +2.243 -2.445 2.625 -2.13 2.762 -1.679 c +3.159 -0.375 2.125 0.264 1.73 0.385 c +0.831 0.662 0.003 0.008 0 0 c +0 3.793 l +3.312 3.793 6.003 1.111 6.003 -2.21 c +6.003 -5.522 3.312 -8.204 0 -8.204 c +-3.312 -8.204 -5.994 -5.522 -5.994 -2.21 c +-5.994 1.111 -3.312 3.793 0 3.793 c +0 0 l +f +Q +0.188 0.302 0.58 scn +q 1 0 0 1 315.165 488.6216 cm +0 0 m +-0.352 -0.867 -1.375 -1.438 -1.138 -2.566 c +-1.017 -3.142 -0.345 -3.804 0.713 -3.398 c +2.483 -2.719 2.628 -2.663 2.945 -1.783 c +2.951 -1.768 3.406 -0.235 2.053 0.317 c +0.863 0.802 0.004 0.01 0 0 c +0 3.654 l +3.312 3.654 6.003 0.972 6.003 -2.349 c +6.003 -5.661 3.312 -8.343 0 -8.343 c +-3.312 -8.343 -5.994 -5.661 -5.994 -2.349 c +-5.994 0.972 -3.312 3.654 0 3.654 c +0 0 l +f +Q +0.18 0.294 0.569 scn +q 1 0 0 1 315.165 488.7588 cm +0 0 m +-0.192 -0.416 -0.582 -0.691 -0.789 -1.097 c +-0.793 -1.105 -1.082 -1.703 -1.083 -1.706 c +-1.253 -2.111 -1.282 -2.441 -1.181 -2.81 c +-1.118 -3.036 -0.72 -4.135 0.985 -3.564 c +5.022 -2.213 2.486 0.225 2.452 0.247 c +1.442 0.897 0.101 0.219 0 0 c +0 3.517 l +3.312 3.517 6.003 0.834 6.003 -2.486 c +6.003 -5.798 3.312 -8.48 0 -8.48 c +-3.312 -8.48 -5.994 -5.798 -5.994 -2.486 c +-5.994 0.834 -3.312 3.517 0 3.517 c +0 0 l +f +Q +0.176 0.286 0.553 scn +q 1 0 0 1 315.165 488.9116 cm +0 0 m +-0.013 -0.025 -0.053 -0.04 -0.076 -0.057 c +-0.432 -0.327 -0.719 -0.611 -1.164 -1.801 c +-1.234 -1.99 -1.448 -2.564 -1.178 -3.156 c +-0.778 -4.031 0.18 -4.2 1.671 -3.658 c +3.876 -2.856 3.991 -0.38 2.341 0.402 c +1.366 0.864 0.123 0.248 0 0 c +0 3.364 l +3.312 3.364 6.003 0.682 6.003 -2.639 c +6.003 -5.951 3.312 -8.633 0 -8.633 c +-3.312 -8.633 -5.994 -5.951 -5.994 -2.639 c +-5.994 0.682 -3.312 3.364 0 3.364 c +0 0 l +f +Q +0.173 0.278 0.541 scn +q 1 0 0 1 315.165 489.1035 cm +0 0 m +-0.034 -0.068 -0.142 -0.105 -0.202 -0.15 c +-0.734 -0.546 -0.993 -1.253 -1.244 -1.936 c +-1.353 -2.232 -1.496 -2.812 -1.238 -3.374 c +-0.612 -4.739 1.248 -4.146 1.803 -3.932 c +4.138 -3.031 4.265 -0.308 2.51 0.419 c +1.108 1 0.006 0.012 0 0 c +0 3.172 l +3.312 3.172 6.003 0.49 6.003 -2.831 c +6.003 -6.143 3.312 -8.825 0 -8.825 c +-3.312 -8.825 -5.994 -6.143 -5.994 -2.831 c +-5.994 0.49 -3.312 3.172 0 3.172 c +0 0 l +f +Q +0.169 0.275 0.525 scn +q 1 0 0 1 315.165 489.291 cm +0 0 m +-0.037 -0.069 -0.152 -0.103 -0.217 -0.147 c +-0.48 -0.327 -0.918 -0.951 -1.084 -1.383 c +-1.402 -2.209 -1.592 -2.802 -1.342 -3.486 c +-1.138 -4.046 -0.487 -4.899 1.578 -4.322 c +4.081 -3.623 4.628 -0.763 2.992 0.316 c +1.701 1.167 0.079 0.149 0 0 c +0 2.984 l +3.312 2.984 6.003 0.302 6.003 -3.019 c +6.003 -6.331 3.312 -9.013 0 -9.013 c +-3.312 -9.013 -5.994 -6.331 -5.994 -3.019 c +-5.994 0.302 -3.312 2.984 0 2.984 c +0 0 l +f +Q +0.165 0.267 0.51 scn +q 1 0 0 1 315.165 489.4751 cm +0 0 m +-0.175 -0.316 -0.541 -0.436 -0.745 -0.721 c +-1.04 -1.133 -1.134 -1.367 -1.233 -1.614 c +-1.283 -1.739 -1.712 -2.854 -1.439 -3.598 c +-0.844 -5.219 1.105 -4.774 1.689 -4.6 c +4.424 -3.78 5.002 -0.76 3.22 0.385 c +1.946 1.202 0.234 0.424 0 0 c +0 2.8 l +3.312 2.8 6.003 0.118 6.003 -3.203 c +6.003 -6.515 3.312 -9.197 0 -9.197 c +-3.312 -9.197 -5.994 -6.515 -5.994 -3.203 c +-5.994 0.118 -3.312 2.8 0 2.8 c +0 0 l +f +Q +0.161 0.259 0.498 scn +q 1 0 0 1 315.165 489.7065 cm +0 0 m +-0.06 -0.132 -0.265 -0.21 -0.385 -0.291 c +-0.751 -0.537 -1.207 -1.436 -1.319 -1.735 c +-1.402 -1.96 -1.802 -3.124 -1.467 -3.945 c +-0.712 -5.795 1.956 -4.866 1.982 -4.855 c +5.299 -3.58 5.174 -0.371 3.116 0.573 c +1.411 1.355 0.007 0.017 0 0 c +0 2.569 l +3.312 2.569 6.003 -0.113 6.003 -3.434 c +6.003 -6.746 3.312 -9.428 0 -9.428 c +-3.312 -9.428 -5.994 -6.746 -5.994 -3.434 c +-5.994 -0.113 -3.312 2.569 0 2.569 c +0 0 l +f +Q +0.153 0.251 0.482 scn +q 1 0 0 1 315.165 489.9888 cm +0 0 m +-0.04 -0.083 -0.167 -0.135 -0.239 -0.193 c +-0.739 -0.597 -1.12 -1.159 -1.404 -1.909 c +-1.678 -2.633 -1.751 -3.637 -1.568 -4.146 c +-0.856 -6.124 1.88 -5.306 1.908 -5.297 c +5.872 -3.969 5.347 -0.495 3.422 0.519 c +1.628 1.464 0.058 0.122 0 0 c +0 2.287 l +3.312 2.287 6.003 -0.396 6.003 -3.716 c +6.003 -7.028 3.312 -9.71 0 -9.71 c +-3.312 -9.71 -5.994 -7.028 -5.994 -3.716 c +-5.994 -0.396 -3.312 2.287 0 2.287 c +0 0 l +f +Q +0.149 0.243 0.467 scn +q 1 0 0 1 315.165 490.2749 cm +0 0 m +-0.045 -0.106 -0.209 -0.167 -0.302 -0.235 c +-0.485 -0.372 -1.122 -0.935 -1.618 -2.443 c +-1.723 -2.761 -1.897 -3.881 -1.538 -4.677 c +-1.024 -5.812 0.792 -6.206 2.512 -5.554 c +6.336 -4.105 5.75 -0.288 3.153 0.723 c +1.353 1.423 0.007 0.017 0 0 c +0 2 l +3.312 2 6.003 -0.682 6.003 -4.002 c +6.003 -7.314 3.312 -9.997 0 -9.997 c +-3.312 -9.997 -5.994 -7.314 -5.994 -4.002 c +-5.994 -0.682 -3.312 2 0 2 c +0 0 l +f +Q +0.145 0.235 0.455 scn +q 1 0 0 1 315.165 490.6582 cm +0 0 m +-0.163 -0.361 -0.541 -0.515 -0.777 -0.805 c +-0.945 -1.011 -1.046 -1.259 -1.201 -1.474 c +-1.269 -1.568 -1.409 -1.763 -1.714 -2.734 c +-2.048 -3.798 -1.784 -4.665 -1.597 -5.087 c +-1.005 -6.421 1.188 -6.695 2.68 -6.041 c +8.251 -3.594 4.333 0.165 2.965 0.677 c +1.252 1.319 0.007 0.016 0 0 c +0 1.617 l +3.312 1.617 6.003 -1.065 6.003 -4.386 c +6.003 -7.698 3.312 -10.38 0 -10.38 c +-3.312 -10.38 -5.994 -7.698 -5.994 -4.386 c +-5.994 -1.065 -3.312 1.617 0 1.617 c +0 0 l +f +Q +0.141 0.227 0.439 scn +q 1 0 0 1 315.165 491.083 cm +0 0 m +-0.128 -0.296 -0.441 -0.404 -0.637 -0.631 c +-0.787 -0.804 -0.891 -1.009 -1.028 -1.191 c +-1.149 -1.351 -1.614 -2.354 -1.616 -2.362 c +-2.165 -3.906 -2.034 -4.643 -1.834 -5.161 c +-0.959 -7.42 1.653 -7.023 2.585 -6.679 c +3.892 -6.198 6.61 -5.196 5.552 -2.522 c +5.843 -3.227 6.003 -4 6.003 -4.811 c +6.003 -8.123 3.312 -10.805 0 -10.805 c +-3.312 -10.805 -5.994 -8.123 -5.994 -4.811 c +-5.994 -1.49 -3.312 1.192 0 1.192 c +0 0 l +f +Q +0.137 0.22 0.427 scn +q 1 0 0 1 315.165 491.5479 cm +0 0 m +-0.037 -0.078 -0.154 -0.129 -0.22 -0.185 c +-1.232 -1.033 -1.806 -2.828 -1.83 -2.904 c +-2.22 -4.142 -2.232 -5.159 -1.867 -5.927 c +-0.58 -8.633 3.354 -7.149 3.394 -7.134 c +4.44 -6.729 6.193 -6.052 5.898 -4.154 c +5.967 -4.518 6.003 -4.892 6.003 -5.275 c +6.003 -8.587 3.312 -11.27 0 -11.27 c +-3.312 -11.27 -5.994 -8.587 -5.994 -5.275 c +-5.994 -1.955 -3.312 0.728 0 0.728 c +0 0 l +f +Q +0.133 0.216 0.412 scn +q 1 0 0 1 315.165 491.9907 cm +0 0 m +-0.038 -0.067 -0.155 -0.091 -0.221 -0.13 c +-1.146 -0.672 -1.618 -2.109 -1.997 -3.263 c +-2.003 -3.281 -2.538 -5.073 -2.065 -6.285 c +-1.01 -8.991 2.93 -7.989 3.097 -7.945 c +4.317 -7.624 5.989 -7.184 6.001 -5.584 c +6.002 -5.628 6.003 -5.673 6.003 -5.718 c +6.003 -9.03 3.312 -11.712 0 -11.712 c +-3.312 -11.712 -5.994 -9.03 -5.994 -5.718 c +-5.994 -2.397 -3.312 0.285 0 0.285 c +0 0 l +f +Q +0.125 0.208 0.396 scn +q 1 0 0 1 315.165 492.2632 cm +0 0 m +-0.043 -0.052 -0.154 -0.029 -0.221 -0.042 c +-0.695 -0.132 -1.346 -0.69 -1.729 -1.732 c +-2.601 -4.102 -2.422 -5.693 -2.305 -6.268 c +-1.773 -8.88 1.72 -8.614 1.755 -8.61 c +4.215 -8.37 5.7 -8.226 5.951 -6.783 c +5.562 -9.72 3.043 -11.985 0 -11.985 c +-3.312 -11.985 -5.994 -9.303 -5.994 -5.991 c +-5.994 -2.67 -3.312 0.012 0 0.012 c +0 0 l +f +Q +0.122 0.2 0.384 scn +q 1 0 0 1 314.2603 492.1987 cm +0 0 m +-1.727 -0.587 -1.739 -4.385 -1.738 -4.546 c +-1.734 -6.483 -1.193 -7.61 0.017 -8.2 c +1.798 -9.069 6.085 -9.361 6.66 -7.637 c +5.921 -10.115 3.622 -11.92 0.905 -11.92 c +-2.407 -11.92 -5.089 -9.238 -5.089 -5.926 c +-5.089 -2.857 -2.798 -0.333 0.165 0.032 c +0.115 0.022 0.048 0.013 0 0 c +f +Q +0.118 0.192 0.369 scn +q 1 0 0 1 312.9341 491.7764 cm +0 0 m +-1.086 -0.961 -0.817 -4.853 -0.535 -5.61 c +0.431 -8.208 2.403 -8.585 3.207 -8.626 c +4.27 -8.681 5.298 -9.068 6.378 -8.967 c +6.691 -8.938 7.264 -8.802 7.584 -8.218 c +6.592 -10.165 4.566 -11.498 2.231 -11.498 c +-1.081 -11.498 -3.763 -8.816 -3.763 -5.504 c +-3.763 -2.812 -2 -0.54 0.432 0.225 c +0.372 0.2 0.292 0.168 0.231 0.144 c +0.161 0.102 0.062 0.054 0 0 c +f +Q +0.204 0.333 0.639 scn +q 1 0 0 1 316.7451 486.4531 cm +0 0 m +-0.091 0.065 -0.091 0.065 -0.52 0.593 c +-0.662 0.769 -0.836 0.916 -0.974 1.096 c +-1.233 1.432 -1.232 1.599 -1.232 1.6 c +-1.226 1.62 -0.028 2.446 0.591 1.368 c +1.026 0.611 0.245 -0.132 0.233 -0.134 c +0.153 -0.145 0.065 -0.047 0 0 c +f +Q +0.141 0.227 0.439 scn +q 1 0 0 1 317.7354 491.6665 cm +0 0 m +-1.294 0.462 -2.254 -0.325 -2.57 -0.583 c +-2.57 0.609 l +-1.403 0.609 -0.313 0.276 0.609 -0.301 c +0.52 -0.251 0.4 -0.185 0.31 -0.134 c +0.217 -0.094 0.095 -0.034 0 0 c +f +Q +0.208 0.337 0.655 scn +q 1 0 0 1 316.7852 486.708 cm +0 0 m +-0.336 0.357 l +-0.473 0.528 -0.628 0.683 -0.758 0.858 c +-0.977 1.152 -1.021 1.271 -1.02 1.277 c +-1.015 1.292 -0.028 1.706 0.328 0.955 c +0.588 0.409 0.173 -0.121 0.167 -0.122 c +0.106 -0.133 0.047 -0.04 0 0 c +f +Q +0.137 0.22 0.427 scn +q 1 0 0 1 316.9321 491.998 cm +0 0 m +-0.649 0.12 -1.161 -0.01 -1.767 -0.45 c +-1.767 0.277 l +-1.039 0.277 -0.34 0.147 0.306 -0.09 c +0.223 -0.065 0.111 -0.031 0.028 -0.006 c +0.02 -0.004 0.008 -0.001 0 0 c +f +Q +0.216 0.345 0.667 scn +q 1 0 0 1 316.7891 486.9756 cm +0 0 m +-0.004 0.004 -0.536 0.578 -0.712 0.865 c +-0.569 0.878 -0.483 0.886 -0.265 0.812 c +-0.18 0.784 -0.084 0.701 -0.026 0.633 c +0.032 0.564 0.089 0.451 0.102 0.362 c +0.133 0.142 0.096 0.015 0.073 -0.061 c +0.051 -0.042 0.021 -0.02 0 0 c +f +Q +0.133 0.216 0.412 scn +q 1 0 0 1 316.0703 492.1978 cm +0 0 m +-0.314 -0.005 -0.486 -0.009 -0.905 -0.207 c +-0.905 0.078 l +-0.519 0.078 -0.142 0.041 0.224 -0.028 c +0.157 -0.02 0.067 -0.003 0 0 c +f +Q +0.125 0.208 0.396 scn +q 1 0 0 1 315.165 492.2632 cm +0 0 m +0 0.012 l +0.072 0.012 0.144 0.011 0.215 0.008 c +0.15 0.006 0.046 -0.044 0 0 c +f +Q + endstream endobj 1355 0 obj <> endobj 1338 0 obj <> endobj 1339 0 obj <>/XObject<>>>/Subtype/Form>>stream +q +323.67 445.774 m +323.67 461.774 l +339.132 461.774 351.669 449.237 351.669 433.775 c +351.669 418.313 339.132 405.776 323.67 405.776 c +308.199 405.776 295.671 418.313 295.671 433.775 c +295.671 449.237 308.199 461.774 323.67 461.774 c +323.67 445.774 l +317.055 445.784 311.661 440.386 311.671 433.775 c +311.661 427.165 317.055 421.767 323.67 421.776 c +330.277 421.766 335.68 427.168 335.669 433.775 c +335.68 440.383 330.277 445.785 323.67 445.774 c +W n +q +/GS0 gs +0 Tc 0 Tw 0 Ts 100 Tz 0 Tr /Fm0 Do +Q +Q + endstream endobj 1356 0 obj <> endobj 1357 0 obj <>/ExtGState<>>>/Subtype/Form>>stream +/CS0 cs 0.208 0.337 0.655 scn +/GS0 gs +q 1 0 0 1 323.6699 445.7744 cm +0 0 m +0 16 l +15.462 16 27.999 3.463 27.999 -11.999 c +27.999 -27.461 15.462 -39.998 0 -39.998 c +-15.471 -39.998 -27.999 -27.461 -27.999 -11.999 c +-27.999 3.463 -15.471 16 0 16 c +0 0 l +-6.615 0.009 -12.009 -5.389 -11.999 -11.999 c +-12.009 -18.609 -6.615 -24.007 0 -23.998 c +6.607 -24.009 12.01 -18.606 11.999 -11.999 c +12.01 -5.392 6.607 0.011 0 0 c +f +Q +q 1 0 0 1 323.6699 450.936 cm +0 0 m +0 -0.46 l +0 -5.162 l +-6.615 -5.152 -12.009 -10.55 -11.999 -17.161 c +-12.009 -23.771 -6.615 -29.169 0 -29.16 c +6.607 -29.17 12.01 -23.768 11.999 -17.161 c +12.01 -10.553 6.607 -5.151 0 -5.162 c +0 -0.46 l +0.316 -0.687 0.738 -0.99 1.054 -1.216 c +3.814 -3.66 7.459 -4.866 10 -7.615 c +12.018 -9.799 13.458 -12.46 14.279 -15.526 c +15.091 -18.561 16.901 -19.341 16.918 -19.343 c +18.873 -19.537 24.733 -10.481 17.857 -2.239 c +10.881 6.124 0.77 1.958 0 0 c +0 10.838 l +15.462 10.838 27.999 -1.699 27.999 -17.161 c +27.999 -32.623 15.462 -45.16 0 -45.16 c +-15.471 -45.16 -27.999 -32.623 -27.999 -17.161 c +-27.999 -1.699 -15.471 10.838 0 10.838 c +0 0 l +f +Q +0.204 0.333 0.639 scn +q 1 0 0 1 323.6699 452.7832 cm +0 0 m +-0.297 -0.712 -1.488 -1.167 -1.738 -1.898 c +-1.989 -2.637 -2.005 -3.871 -1.531 -4.492 c +-1.227 -4.891 -0.45 -4.943 0 -5.165 c +0 -7.009 l +-6.615 -7 -12.009 -12.397 -11.999 -19.008 c +-12.009 -25.618 -6.615 -31.016 0 -31.007 c +6.607 -31.018 12.01 -25.615 11.999 -19.008 c +12.01 -12.4 6.607 -6.998 0 -7.009 c +0 -5.165 l +0.338 -5.198 0.788 -5.242 1.126 -5.275 c +2.249 -5.474 12.142 -7.557 13.761 -19.535 c +14.172 -22.508 l +14.637 -23.083 15.725 -23.499 16.46 -23.421 c +20.584 -22.986 26.414 -9.565 15.896 -1.31 c +7.945 4.929 0.035 0.084 0 0 c +0 8.991 l +15.462 8.991 27.999 -3.546 27.999 -19.008 c +27.999 -34.47 15.462 -47.007 0 -47.007 c +-15.471 -47.007 -27.999 -34.47 -27.999 -19.008 c +-27.999 -3.546 -15.471 8.991 0 8.991 c +0 0 l +f +Q +0.2 0.325 0.624 scn +q 1 0 0 1 323.6699 453.9038 cm +0 0 m +-0.627 -1.11 -1.868 -1.524 -2.71 -2.39 c +-4.768 -4.502 -4.451 -6.209 -4.444 -6.223 c +-4.359 -6.387 -4.359 -6.387 0 -7.407 c +0 -8.129 l +-6.615 -8.12 -12.009 -13.518 -11.999 -20.128 c +-12.009 -26.739 -6.615 -32.137 0 -32.127 c +6.607 -32.138 12.01 -26.736 11.999 -20.128 c +12.01 -13.521 6.607 -8.119 0 -8.129 c +0 -7.407 l +0.312 -7.427 0.727 -7.454 1.039 -7.474 c +5.586 -8.118 13.154 -12.018 12.674 -22.547 c +12.56 -25.06 12.663 -26.477 12.982 -26.758 c +14.311 -27.928 23.356 -23.682 22.629 -14.041 c +21.27 3.998 1.142 2.018 0 0 c +0 7.871 l +15.462 7.871 27.999 -4.667 27.999 -20.128 c +27.999 -35.59 15.462 -48.127 0 -48.127 c +-15.471 -48.127 -27.999 -35.59 -27.999 -20.128 c +-27.999 -4.667 -15.471 7.871 0 7.871 c +0 0 l +f +Q +0.196 0.318 0.612 scn +q 1 0 0 1 323.6699 454.8291 cm +0 0 m +-0.223 -0.378 -0.896 -0.494 -1.28 -0.706 c +-3.988 -2.198 -4.356 -2.882 -7.222 -8.202 c +-10.979 -15.406 l +-12.035 -17.648 -12.409 -19.972 -12.123 -22.51 c +-11.368 -29.204 -4.441 -35.039 3.701 -32.831 c +16.504 -28.45 l +19.64 -26.383 21.524 -23.889 22.614 -20.364 c +24.61 -13.907 21.812 -4.74 13.674 -0.575 c +6.261 3.219 0.029 0.049 0 0 c +0 6.945 l +15.462 6.945 27.999 -5.592 27.999 -21.054 c +27.999 -36.516 15.462 -49.053 0 -49.053 c +-15.471 -49.053 -27.999 -36.516 -27.999 -21.054 c +-27.999 -5.592 -15.471 6.945 0 6.945 c +0 0 l +f +Q +0.192 0.31 0.596 scn +q 1 0 0 1 323.6699 455.6289 cm +0 0 m +-11.795 -5.181 -18.994 -27.783 -4.636 -33.729 c +5.806 -38.053 30.469 -28.935 22.345 -10.09 c +19.107 -2.58 10.176 3.509 0 0 c +0 6.146 l +15.462 6.146 27.999 -6.392 27.999 -21.854 c +27.999 -37.315 15.462 -49.853 0 -49.853 c +-15.471 -49.853 -27.999 -37.315 -27.999 -21.854 c +-27.999 -6.392 -15.471 6.146 0 6.146 c +0 0 l +f +Q +0.188 0.302 0.58 scn +q 1 0 0 1 323.6699 456.3296 cm +0 0 m +-0.26 -0.393 -1.011 -0.429 -1.444 -0.612 c +-4.284 -1.815 -7.534 -4.967 -9.349 -8.277 c +-13.499 -15.843 -13.758 -21.083 -13.244 -24.145 c +-12.335 -29.557 -7.256 -38.113 6.018 -35.852 c +29.65 -31.827 27.567 -10.229 15.691 -2.187 c +7.726 3.206 0.039 0.058 0 0 c +0 5.445 l +15.462 5.445 27.999 -7.092 27.999 -22.554 c +27.999 -38.016 15.462 -50.553 0 -50.553 c +-15.471 -50.553 -27.999 -38.016 -27.999 -22.554 c +-27.999 -7.092 -15.471 5.445 0 5.445 c +0 0 l +f +Q +0.18 0.294 0.569 scn +q 1 0 0 1 323.6699 456.9956 cm +0 0 m +-0.271 -0.397 -1.043 -0.41 -1.49 -0.586 c +-3.112 -1.224 -7.251 -3.368 -10.636 -9.471 c +-11.688 -11.366 -15.022 -18.08 -13.796 -24.877 c +-12.453 -32.323 -5.461 -39.361 6.714 -37.217 c +28.943 -33.303 28.97 -11.254 15.609 -2.3 c +7.857 2.895 0.038 0.056 0 0 c +0 4.779 l +15.462 4.779 27.999 -7.758 27.999 -23.22 c +27.999 -38.682 15.462 -51.219 0 -51.219 c +-15.471 -51.219 -27.999 -38.682 -27.999 -23.22 c +-27.999 -7.758 -15.471 4.779 0 4.779 c +0 0 l +f +Q +0.176 0.286 0.553 scn +q 1 0 0 1 323.6699 457.6064 cm +0 0 m +-0.285 -0.403 -1.086 -0.384 -1.551 -0.549 c +-2.515 -0.89 -7.505 -2.918 -11.143 -9.4 c +-12.539 -11.886 -15.644 -18.437 -14.343 -25.553 c +-13.275 -31.396 -7.567 -40.711 7.05 -38.566 c +28.064 -35.482 30.902 -13.127 16.17 -2.838 c +7.979 2.883 0.04 0.057 0 0 c +0 4.168 l +15.462 4.168 27.999 -8.369 27.999 -23.831 c +27.999 -39.293 15.462 -51.83 0 -51.83 c +-15.471 -51.83 -27.999 -39.293 -27.999 -23.831 c +-27.999 -8.369 -15.471 4.168 0 4.168 c +0 0 l +f +Q +0.173 0.278 0.541 scn +q 1 0 0 1 323.6699 458.1792 cm +0 0 m +-0.295 -0.407 -1.114 -0.365 -1.591 -0.521 c +-3.039 -0.995 -8.059 -3.066 -11.891 -9.807 c +-12.952 -11.675 -16.307 -18.377 -14.887 -26.189 c +-13.692 -32.762 -6.813 -41.823 7.243 -39.848 c +28.687 -36.834 31.471 -13.847 16.374 -3.144 c +8.08 2.737 0.041 0.056 0 0 c +0 3.595 l +15.462 3.595 27.999 -8.942 27.999 -24.404 c +27.999 -39.866 15.462 -52.403 0 -52.403 c +-15.471 -52.403 -27.999 -39.866 -27.999 -24.404 c +-27.999 -8.942 -15.471 3.595 0 3.595 c +0 0 l +f +Q +0.169 0.275 0.525 scn +q 1 0 0 1 323.6699 458.7163 cm +0 0 m +-0.327 -0.44 -1.225 -0.369 -1.749 -0.527 c +-5.521 -1.665 -9.768 -5.259 -12.076 -9.267 c +-15.396 -15.033 -16.523 -20.929 -15.426 -26.791 c +-13.856 -35.175 -5.227 -43.009 7.675 -41.011 c +29.382 -37.65 31.673 -13.956 16.092 -3.122 c +8.188 2.374 0.041 0.052 0 0 c +0 3.058 l +15.462 3.058 27.999 -9.479 27.999 -24.941 c +27.999 -40.403 15.462 -52.94 0 -52.94 c +-15.471 -52.94 -27.999 -40.403 -27.999 -24.941 c +-27.999 -9.479 -15.471 3.058 0 3.058 c +0 0 l +f +Q +0.165 0.267 0.51 scn +q 1 0 0 1 323.6699 459.2314 cm +0 0 m +-0.315 -0.414 -1.17 -0.321 -1.672 -0.458 c +-5.63 -1.542 -10.189 -5.222 -12.512 -9.206 c +-13.797 -11.409 -17.707 -18.115 -15.958 -27.369 c +-14.312 -36.085 -5.369 -44.227 7.962 -42.147 c +29.823 -38.738 32.256 -15.066 16.713 -3.752 c +8.241 2.415 0.041 0.054 0 0 c +0 2.543 l +15.462 2.543 27.999 -9.994 27.999 -25.456 c +27.999 -40.918 15.462 -53.455 0 -53.455 c +-15.471 -53.455 -27.999 -40.918 -27.999 -25.456 c +-27.999 -9.994 -15.471 2.543 0 2.543 c +0 0 l +f +Q +0.161 0.259 0.498 scn +q 1 0 0 1 323.6699 459.7041 cm +0 0 m +-0.326 -0.417 -1.198 -0.297 -1.711 -0.424 c +-5.006 -1.24 -10.024 -4.173 -13.32 -9.752 c +-16.644 -15.378 -17.708 -21.484 -16.484 -27.903 c +-14.771 -36.889 -5.522 -45.311 8.242 -43.22 c +29.813 -39.944 32.242 -15.421 16.845 -4.05 c +8.507 2.107 0.042 0.053 0 0 c +0 2.07 l +15.462 2.07 27.999 -10.467 27.999 -25.929 c +27.999 -41.391 15.462 -53.928 0 -53.928 c +-15.471 -53.928 -27.999 -41.391 -27.999 -25.929 c +-27.999 -10.467 -15.471 2.07 0 2.07 c +0 0 l +f +Q +0.153 0.251 0.482 scn +q 1 0 0 1 323.6699 460.144 cm +0 0 m +-0.165 -0.201 -0.596 -0.119 -0.852 -0.169 c +-6.632 -1.32 -11.089 -5.48 -13.333 -8.99 c +-17.824 -16.015 -17.96 -22.678 -17.283 -27.031 c +-15.529 -38.309 -5.353 -45.633 6.914 -44.447 c +29.053 -42.307 33.213 -18.564 18.588 -5.674 c +9.722 2.142 0.051 0.062 0 0 c +0 1.63 l +15.462 1.63 27.999 -10.907 27.999 -26.369 c +27.999 -41.831 15.462 -54.368 0 -54.368 c +-15.471 -54.368 -27.999 -41.831 -27.999 -26.369 c +-27.999 -10.907 -15.471 1.63 0 1.63 c +0 0 l +f +Q +0.149 0.243 0.467 scn +q 1 0 0 1 323.6699 460.5547 cm +0 0 m +-0.345 -0.419 -1.243 -0.245 -1.776 -0.35 c +-5.454 -1.074 -10.584 -3.985 -13.756 -8.856 c +-18.476 -16.104 -18.606 -22.976 -17.885 -27.465 c +-16.272 -37.503 -7.101 -46.92 7.31 -45.498 c +29.575 -43.3 33.52 -19.115 18.666 -5.998 c +9.679 1.938 0.05 0.061 0 0 c +0 1.22 l +15.462 1.22 27.999 -11.317 27.999 -26.779 c +27.999 -42.241 15.462 -54.778 0 -54.778 c +-15.471 -54.778 -27.999 -42.241 -27.999 -26.779 c +-27.999 -11.317 -15.471 1.22 0 1.22 c +0 0 l +f +Q +0.145 0.235 0.455 scn +q 1 0 0 1 323.6699 460.9102 cm +0 0 m +-0.359 -0.424 -1.28 -0.213 -1.828 -0.305 c +-2.573 -0.429 -9.242 -1.712 -14.038 -8.521 c +-19.338 -16.045 -19.04 -23.601 -18.666 -26.5 c +-16.79 -41.035 -4.557 -47.119 6.015 -46.621 c +29.237 -45.525 34.039 -19.966 18.705 -6.311 c +9.693 1.714 0.05 0.059 0 0 c +0 0.864 l +15.462 0.864 27.999 -11.673 27.999 -27.135 c +27.999 -42.597 15.462 -55.134 0 -55.134 c +-15.471 -55.134 -27.999 -42.597 -27.999 -27.135 c +-27.999 -11.673 -15.471 0.864 0 0.864 c +0 0 l +f +Q +0.141 0.227 0.439 scn +q 1 0 0 1 323.6699 461.2358 cm +0 0 m +-0.366 -0.422 -1.291 -0.183 -1.844 -0.262 c +-5.618 -0.797 -11.206 -3.577 -14.557 -8.414 c +-20.527 -17.033 -19.484 -25.013 -19.142 -27.635 c +-17.325 -41.544 -4.721 -48.297 6.215 -47.587 c +22.825 -46.511 31.838 -32.41 25.896 -16.796 c +27.251 -20.083 27.999 -23.685 27.999 -27.46 c +27.999 -42.922 15.462 -55.459 0 -55.459 c +-15.471 -55.459 -27.999 -42.922 -27.999 -27.46 c +-27.999 -11.999 -15.471 0.539 0 0.539 c +0 0 l +f +Q +0.137 0.22 0.427 scn +q 1 0 0 1 323.6699 461.4912 cm +0 0 m +-0.38 -0.425 -1.323 -0.147 -1.89 -0.211 c +-3.742 -0.417 -10.186 -1.632 -15.337 -8.604 c +-20.121 -15.077 -20.496 -23.224 -19.964 -27.016 c +-18.071 -40.5 -7.311 -49.138 6.811 -48.512 c +13.567 -48.212 30.458 -42.954 27.513 -22.495 c +27.832 -24.187 27.999 -25.932 27.999 -27.716 c +27.999 -43.178 15.462 -55.715 0 -55.715 c +-15.471 -55.715 -27.999 -43.178 -27.999 -27.716 c +-27.999 -12.254 -15.471 0.283 0 0.283 c +0 0 l +f +Q +0.133 0.216 0.412 scn +q 1 0 0 1 323.6699 461.6821 cm +0 0 m +-0.389 -0.422 -1.334 -0.109 -1.906 -0.156 c +-5.864 -0.48 -11.765 -2.986 -15.37 -7.721 c +-21.457 -15.717 -21.121 -23.997 -20.694 -27.186 c +-18.848 -40.99 -7.359 -50.367 6.621 -49.484 c +16.365 -48.868 27.809 -42.685 27.992 -27.284 c +27.997 -27.491 27.999 -27.699 27.999 -27.907 c +27.999 -43.369 15.462 -55.906 0 -55.906 c +-15.471 -55.906 -27.999 -43.369 -27.999 -27.907 c +-27.999 -12.445 -15.471 0.092 0 0.092 c +0 0 l +f +Q +0.125 0.208 0.396 scn +q 1 0 0 1 323.6699 461.771 cm +0 0 m +-0.403 -0.423 -1.362 -0.067 -1.946 -0.096 c +-5.655 -0.278 -11.174 -1.795 -16.41 -7.986 c +-19.422 -11.547 -22.258 -18.903 -21.583 -25.522 c +-19.025 -50.59 4.157 -50.418 5.143 -50.399 c +17.394 -50.156 25.847 -43.167 27.756 -31.704 c +25.941 -45.413 14.205 -55.995 0 -55.995 c +-15.471 -55.995 -27.999 -43.458 -27.999 -27.996 c +-27.999 -12.534 -15.471 0.003 0 0.003 c +0 0 l +f +Q +0.122 0.2 0.384 scn +q 1 0 0 1 319.437 461.4541 cm +0 0 m +-22.531 -4.549 -23.531 -35.025 -6.331 -46.258 c +6.847 -54.864 25.642 -52.17 31.071 -35.682 c +27.627 -47.245 16.914 -55.678 4.233 -55.678 c +-11.238 -55.678 -23.766 -43.141 -23.766 -27.679 c +-23.766 -13.386 -13.062 -1.593 0.777 0.109 c +0.544 0.077 0.232 0.04 0 0 c +f +Q +0.118 0.192 0.369 scn +q 1 0 0 1 311.6421 458.9941 cm +0 0 m +-16.565 -9.064 -17.346 -40.196 9.317 -48.713 c +16.643 -51.053 30.634 -50.189 36.991 -37.91 c +32.363 -46.995 22.921 -53.218 12.028 -53.218 c +-3.443 -53.218 -15.971 -40.681 -15.971 -25.219 c +-15.971 -12.684 -7.737 -2.07 3.624 1.498 c +3.099 1.309 2.397 1.056 1.872 0.866 c +1.309 0.609 0.542 0.297 0 0 c +f +Q +0.216 0.345 0.667 scn +q 1 0 0 1 339.5962 435.5991 cm +0 0 m +-1.706 2.422 -2.871 5.192 -4.806 7.466 c +-5.581 8.375 -6.334 9.141 -7.046 9.74 c +-7.103 9.788 -12.699 14.577 -12.705 14.929 c +-12.707 15.035 -10.925 16.753 -10.74 16.825 c +-10.058 17.086 -7.544 17.231 -6.875 17.166 c +-5.111 16.992 -2.438 16.241 0.275 13.649 c +3.79 10.293 4.269 6.382 4.332 5.263 c +4.608 0.362 1.816 -1.553 1.125 -1.426 c +0.589 -1.328 0.314 -0.445 0 0 c +f +Q +0.22 0.353 0.682 scn +q 1 0 0 1 339.7305 438.0928 cm +0 0 m +-1.97 2.883 -3.055 4.471 -4.87 6.595 c +-5.072 6.832 -5.375 7.116 -5.591 7.34 c +-5.844 7.601 -6.16 7.969 -6.419 8.224 c +-6.913 8.711 -7.551 9.382 -8.074 9.839 c +-9.724 11.281 -9.908 11.547 -9.911 11.595 c +-9.914 11.657 -8.495 13.252 -8.295 13.411 c +-8.132 13.541 -7.808 13.456 -7.601 13.433 c +-5.32 13.184 -2.962 12.927 -0.476 10.566 c +2.531 7.709 2.783 5.143 2.904 3.909 c +2.938 3.565 2.929 0.875 2.709 0.41 c +2.675 0.337 0.707 -0.875 0.645 -0.861 c +0.33 -0.793 0.182 -0.267 0 0 c +f +Q +0.224 0.361 0.694 scn +q 1 0 0 1 338.8154 441.6221 cm +0 0 m +-0.737 0.235 -1.076 1.45 -1.576 2.04 c +-3.148 3.894 -3.148 3.894 -3.897 4.678 c +-4.212 5.008 -4.84 5.354 -4.922 5.803 c +-4.014 7.981 l +-3.953 8.007 -1.427 7.15 0.33 5.083 c +1.631 3.552 2.397 0.755 2.281 0.574 c +1.906 -0.01 0.699 -0.197 0.037 0.011 c +0.026 0.014 0.011 -0.003 0 0 c +f +Q +0.141 0.227 0.439 scn +q 1 0 0 1 335.7192 459.0469 cm +0 0 m +-5.275 2.417 -9.403 2.407 -12.049 2.189 c +-12.049 2.728 l +-6.604 2.728 -1.522 1.173 2.777 -1.517 c +2.232 -1.205 1.506 -0.789 0.961 -0.477 c +0.673 -0.334 0.292 -0.134 0 0 c +f +Q +0.137 0.22 0.427 scn +q 1 0 0 1 331.9331 460.5313 cm +0 0 m +-3.078 0.794 -4.478 1.111 -8.263 0.96 c +-8.263 1.243 l +-4.866 1.243 -1.61 0.638 1.402 -0.47 c +0.981 -0.329 0.425 -0.126 0 0 c +f +Q +0.133 0.216 0.412 scn +q 1 0 0 1 327.9009 461.4541 cm +0 0 m +-1.314 0.178 -2.48 0.278 -4.231 0.228 c +-4.231 0.32 l +-2.431 0.32 -0.671 0.15 1.035 -0.174 c +0.724 -0.122 0.312 -0.042 0 0 c +f +Q +0.125 0.208 0.396 scn +q 1 0 0 1 323.6699 461.771 cm +0 0 m +0.335 0.003 0.669 -0.002 1.001 -0.014 c +0.701 -0.01 0.211 -0.214 0 0 c +f +Q + endstream endobj 1358 0 obj <> endobj 1336 0 obj <> endobj 1337 0 obj <>/XObject<>>>/Subtype/Form>>stream +q +327.999 212.271 m +327.999 217.271 l +331.311 217.271 334.002 214.59 334.002 211.277 c +334.002 207.966 331.311 205.274 327.999 205.274 c +324.687 205.274 321.996 207.966 321.996 211.277 c +321.996 214.59 324.687 217.271 327.999 217.271 c +327.999 212.271 l +327.449 212.274 326.992 211.817 326.996 211.277 c +326.991 210.734 327.456 210.27 327.999 210.274 c +328.542 210.27 329.007 210.734 329.002 211.277 c +329.006 211.817 328.549 212.274 327.999 212.271 c +W n +q +1 w 4 M 0 j 0 J []0 d +/GS0 gs +0 Tc 0 Tw 0 Ts 100 Tz 0 Tr /Fm0 Do +Q +Q + endstream endobj 1359 0 obj <> endobj 1360 0 obj <>/ExtGState<>>>/Subtype/Form>>stream +/CS0 cs 0.216 0.631 0.792 scn +/GS0 gs +q 1 0 0 1 327.999 212.2715 cm +0 0 m +0 5 l +3.312 5 6.003 2.318 6.003 -0.994 c +6.003 -4.306 3.312 -6.997 0 -6.997 c +-3.312 -6.997 -6.003 -4.306 -6.003 -0.994 c +-6.003 2.318 -3.312 5 0 5 c +0 0 l +-0.55 0.003 -1.007 -0.454 -1.003 -0.994 c +-1.008 -1.537 -0.543 -2.002 0 -1.997 c +0.543 -2.002 1.008 -1.537 1.003 -0.994 c +1.007 -0.454 0.55 0.003 0 0 c +f +Q +q 1 0 0 1 327.999 213.1963 cm +0 0 m +-0.013 -0.041 -0.073 -0.074 -0.083 -0.115 c +-0.111 -0.248 -0.02 -0.426 0 -0.56 c +0 -0.925 l +-0.55 -0.922 -1.007 -1.379 -1.003 -1.919 c +-1.008 -2.462 -0.543 -2.927 0 -2.922 c +0.543 -2.927 1.008 -2.462 1.003 -1.919 c +1.007 -1.379 0.55 -0.922 0 -0.925 c +0 -0.56 l +0.034 -0.557 0.079 -0.553 0.113 -0.55 c +0.142 -0.55 0.184 -0.536 0.21 -0.549 c +1.046 -1.473 l +1.441 -2.153 1.79 -2.106 1.805 -2.104 c +2.057 -2.064 3.185 -0.619 1.901 0.191 c +1.598 0.383 1.275 0.409 1.132 0.396 c +0 0 l +0 4.075 l +3.312 4.075 6.003 1.394 6.003 -1.919 c +6.003 -5.23 3.312 -7.922 0 -7.922 c +-3.312 -7.922 -6.003 -5.23 -6.003 -1.919 c +-6.003 1.394 -3.312 4.075 0 4.075 c +0 0 l +f +Q +0.208 0.616 0.776 scn +q 1 0 0 1 327.999 213.3379 cm +0 0 m +-0.03 -0.092 -0.164 -0.17 -0.185 -0.265 c +-0.222 -0.433 -0.125 -0.678 -0.188 -0.838 c +-0.188 -0.839 -0.237 -0.941 -0.404 -1.049 c +-1.156 -1.538 -1.044 -2.153 -0.992 -2.33 c +-0.81 -2.948 -0.137 -3.26 0.449 -2.997 c +0.649 -2.907 0.789 -2.769 0.872 -2.687 c +1.143 -2.418 1.548 -2.618 1.836 -2.409 c +2.434 -1.976 2.571 -1.584 2.629 -1.416 c +2.851 -0.784 2.461 0.135 1.628 0.371 c +0.853 0.591 0.002 0.008 0 0 c +0 3.934 l +3.312 3.934 6.003 1.252 6.003 -2.061 c +6.003 -5.372 3.312 -8.063 0 -8.063 c +-3.312 -8.063 -6.003 -5.372 -6.003 -2.061 c +-6.003 1.252 -3.312 3.934 0 3.934 c +0 0 l +f +Q +0.204 0.604 0.757 scn +q 1 0 0 1 327.999 213.4785 cm +0 0 m +-0.294 -0.83 -1.296 -1.345 -1.079 -2.404 c +-0.955 -3.01 -0.239 -3.591 0.647 -3.163 c +1.047 -2.97 1.515 -2.951 1.888 -2.688 c +2.104 -2.536 2.607 -2.182 2.763 -1.673 c +3.16 -0.374 2.125 0.264 1.731 0.385 c +0.831 0.661 0.003 0.009 0 0 c +0 3.793 l +3.312 3.793 6.003 1.111 6.003 -2.201 c +6.003 -5.513 3.312 -8.204 0 -8.204 c +-3.312 -8.204 -6.003 -5.513 -6.003 -2.201 c +-6.003 1.111 -3.312 3.793 0 3.793 c +0 0 l +f +Q +0.2 0.588 0.741 scn +q 1 0 0 1 327.999 213.6182 cm +0 0 m +-0.352 -0.866 -1.383 -1.428 -1.146 -2.558 c +-1.025 -3.14 -0.35 -3.809 0.711 -3.398 c +2.484 -2.712 2.629 -2.655 2.946 -1.777 c +2.952 -1.763 3.406 -0.234 2.053 0.316 c +0.838 0.812 0.004 0.01 0 0 c +0 3.653 l +3.312 3.653 6.003 0.972 6.003 -2.341 c +6.003 -5.652 3.312 -8.344 0 -8.344 c +-3.312 -8.344 -6.003 -5.652 -6.003 -2.341 c +-6.003 0.972 -3.312 3.653 0 3.653 c +0 0 l +f +Q +0.196 0.573 0.722 scn +q 1 0 0 1 327.999 213.7549 cm +0 0 m +-0.193 -0.417 -0.585 -0.691 -0.795 -1.098 c +-1.093 -1.707 l +-1.262 -2.105 -1.291 -2.433 -1.189 -2.801 c +-1.126 -3.029 -0.725 -4.141 0.983 -3.563 c +5.011 -2.2 2.486 0.226 2.453 0.247 c +1.442 0.896 0.101 0.219 0 0 c +0 3.517 l +3.312 3.517 6.003 0.835 6.003 -2.478 c +6.003 -5.789 3.312 -8.48 0 -8.48 c +-3.312 -8.48 -6.003 -5.789 -6.003 -2.478 c +-6.003 0.835 -3.312 3.517 0 3.517 c +0 0 l +f +Q +0.188 0.561 0.702 scn +q 1 0 0 1 327.999 213.9082 cm +0 0 m +-0.013 -0.025 -0.053 -0.04 -0.076 -0.058 c +-0.364 -0.275 -0.691 -0.521 -1.173 -1.803 c +-1.243 -1.988 -1.457 -2.555 -1.186 -3.148 c +-0.781 -4.033 0.18 -4.204 1.671 -3.654 c +3.863 -2.846 3.98 -0.373 2.341 0.401 c +1.366 0.862 0.123 0.247 0 0 c +0 3.363 l +3.312 3.363 6.003 0.682 6.003 -2.631 c +6.003 -5.942 3.312 -8.634 0 -8.634 c +-3.312 -8.634 -6.003 -5.942 -6.003 -2.631 c +-6.003 0.682 -3.312 3.363 0 3.363 c +0 0 l +f +Q +0.184 0.545 0.686 scn +q 1 0 0 1 327.999 214.0996 cm +0 0 m +-0.034 -0.067 -0.142 -0.105 -0.203 -0.15 c +-0.702 -0.521 -0.962 -1.182 -1.171 -1.711 c +-1.281 -1.991 -1.54 -2.648 -1.288 -3.269 c +-0.891 -4.246 0.088 -4.488 1.621 -3.988 c +4.051 -3.195 4.189 -0.578 2.798 0.287 c +1.588 1.039 0.134 0.266 0 0 c +0 3.172 l +3.312 3.172 6.003 0.49 6.003 -2.822 c +6.003 -6.134 3.312 -8.825 0 -8.825 c +-3.312 -8.825 -6.003 -6.134 -6.003 -2.822 c +-6.003 0.49 -3.312 3.172 0 3.172 c +0 0 l +f +Q +0.18 0.529 0.667 scn +q 1 0 0 1 327.999 214.2871 cm +0 0 m +-0.037 -0.069 -0.152 -0.104 -0.217 -0.147 c +-0.454 -0.309 -0.887 -0.883 -1.091 -1.383 c +-1.28 -1.846 -1.632 -2.707 -1.384 -3.387 c +-0.994 -4.454 0.002 -4.769 1.578 -4.319 c +4.069 -3.61 4.619 -0.754 2.993 0.316 c +1.701 1.166 0.079 0.148 0 0 c +0 2.984 l +3.312 2.984 6.003 0.303 6.003 -3.01 c +6.003 -6.321 3.312 -9.013 0 -9.013 c +-3.312 -9.013 -6.003 -6.321 -6.003 -3.01 c +-6.003 0.303 -3.312 2.984 0 2.984 c +0 0 l +f +Q +0.176 0.518 0.651 scn +q 1 0 0 1 327.999 214.4717 cm +0 0 m +-0.176 -0.317 -0.542 -0.437 -0.748 -0.722 c +-1.049 -1.139 -1.146 -1.381 -1.241 -1.614 c +-1.291 -1.738 -1.721 -2.847 -1.448 -3.589 c +-0.846 -5.228 1.105 -4.775 1.689 -4.598 c +4.413 -3.769 4.993 -0.751 3.22 0.385 c +1.946 1.2 0.234 0.423 0 0 c +0 2.8 l +3.312 2.8 6.003 0.118 6.003 -3.194 c +6.003 -6.506 3.312 -9.197 0 -9.197 c +-3.312 -9.197 -6.003 -6.506 -6.003 -3.194 c +-6.003 0.118 -3.312 2.8 0 2.8 c +0 0 l +f +Q +0.169 0.502 0.631 scn +q 1 0 0 1 327.999 214.7031 cm +0 0 m +-0.06 -0.133 -0.265 -0.211 -0.386 -0.291 c +-0.759 -0.541 -1.229 -1.474 -1.327 -1.735 c +-1.444 -2.049 -1.803 -3.136 -1.475 -3.938 c +-0.713 -5.804 1.956 -4.863 1.982 -4.853 c +5.283 -3.568 5.162 -0.364 3.116 0.573 c +1.411 1.354 0.007 0.017 0 0 c +0 2.568 l +3.312 2.568 6.003 -0.113 6.003 -3.426 c +6.003 -6.737 3.312 -9.429 0 -9.429 c +-3.312 -9.429 -6.003 -6.737 -6.003 -3.426 c +-6.003 -0.113 -3.312 2.568 0 2.568 c +0 0 l +f +Q +0.165 0.486 0.612 scn +q 1 0 0 1 327.999 214.9854 cm +0 0 m +-0.04 -0.083 -0.167 -0.135 -0.239 -0.193 c +-0.736 -0.594 -1.131 -1.171 -1.412 -1.908 c +-1.719 -2.715 -1.736 -3.694 -1.577 -4.139 c +-0.858 -6.132 1.881 -5.304 1.908 -5.295 c +5.598 -4.044 5.76 -0.555 3.075 0.691 c +1.838 1.266 0.163 0.34 0 0 c +0 2.286 l +3.312 2.286 6.003 -0.396 6.003 -3.708 c +6.003 -7.02 3.312 -9.711 0 -9.711 c +-3.312 -9.711 -6.003 -7.02 -6.003 -3.708 c +-6.003 -0.396 -3.312 2.286 0 2.286 c +0 0 l +f +Q +0.161 0.475 0.596 scn +q 1 0 0 1 327.999 215.2715 cm +0 0 m +-0.045 -0.106 -0.21 -0.167 -0.302 -0.236 c +-0.487 -0.373 -1.13 -0.938 -1.627 -2.442 c +-1.764 -2.854 -1.88 -3.932 -1.545 -4.67 c +-1.027 -5.814 0.793 -6.21 2.513 -5.55 c +6.314 -4.092 5.733 -0.28 3.153 0.723 c +1.353 1.422 0.007 0.017 0 0 c +0 2 l +3.312 2 6.003 -0.682 6.003 -3.994 c +6.003 -7.306 3.312 -9.997 0 -9.997 c +-3.312 -9.997 -6.003 -7.306 -6.003 -3.994 c +-6.003 -0.682 -3.312 2 0 2 c +0 0 l +f +Q +0.157 0.459 0.576 scn +q 1 0 0 1 327.999 215.6543 cm +0 0 m +-0.163 -0.361 -0.542 -0.515 -0.779 -0.805 c +-0.948 -1.011 -1.05 -1.26 -1.205 -1.475 c +-1.369 -1.701 -1.472 -1.983 -1.723 -2.733 c +-2.048 -3.703 -1.823 -4.541 -1.66 -4.953 c +-1.229 -6.046 0.416 -6.786 2.422 -6.135 c +7.014 -4.645 5.816 -0.744 3.286 0.54 c +1.422 1.485 0.008 0.019 0 0 c +0 1.617 l +3.312 1.617 6.003 -1.064 6.003 -4.377 c +6.003 -7.688 3.312 -10.38 0 -10.38 c +-3.312 -10.38 -6.003 -7.688 -6.003 -4.377 c +-6.003 -1.064 -3.312 1.617 0 1.617 c +0 0 l +f +Q +0.149 0.443 0.561 scn +q 1 0 0 1 327.999 216.0791 cm +0 0 m +-0.128 -0.296 -0.442 -0.404 -0.638 -0.631 c +-0.788 -0.804 -0.893 -1.01 -1.031 -1.191 c +-1.148 -1.346 -1.62 -2.353 -1.623 -2.36 c +-2.172 -3.895 -2.053 -4.608 -1.843 -5.151 c +-0.961 -7.428 1.653 -7.023 2.586 -6.676 c +3.891 -6.189 6.606 -5.178 5.553 -2.521 c +5.843 -3.224 6.003 -3.994 6.003 -4.802 c +6.003 -8.113 3.312 -10.805 0 -10.805 c +-3.312 -10.805 -6.003 -8.113 -6.003 -4.802 c +-6.003 -1.489 -3.312 1.192 0 1.192 c +0 0 l +f +Q +0.145 0.431 0.541 scn +q 1 0 0 1 327.999 216.5439 cm +0 0 m +-0.037 -0.078 -0.154 -0.129 -0.22 -0.185 c +-1.238 -1.037 -1.832 -2.884 -1.837 -2.902 c +-2.426 -4.76 -2.011 -5.632 -1.875 -5.918 c +-0.597 -8.6 3.355 -7.144 3.396 -7.129 c +4.441 -6.72 6.192 -6.035 5.899 -4.15 c +5.967 -4.512 6.003 -4.885 6.003 -5.267 c +6.003 -8.578 3.312 -11.27 0 -11.27 c +-3.312 -11.27 -6.003 -8.578 -6.003 -5.267 c +-6.003 -1.954 -3.312 0.728 0 0.728 c +0 0 l +f +Q +0.141 0.416 0.522 scn +q 1 0 0 1 327.999 216.9863 cm +0 0 m +-0.038 -0.066 -0.155 -0.09 -0.221 -0.129 c +-1.15 -0.674 -1.646 -2.172 -2.007 -3.267 c +-2.013 -3.283 -2.546 -5.064 -2.073 -6.276 c +-1.009 -9.004 3.058 -7.952 3.099 -7.941 c +4.318 -7.615 5.989 -7.169 6.001 -5.576 c +6.002 -5.62 6.003 -5.664 6.003 -5.709 c +6.003 -9.021 3.312 -11.712 0 -11.712 c +-3.312 -11.712 -6.003 -9.021 -6.003 -5.709 c +-6.003 -2.396 -3.312 0.285 0 0.285 c +0 0 l +f +Q +0.137 0.4 0.506 scn +q 1 0 0 1 327.999 217.2598 cm +0 0 m +-0.043 -0.053 -0.154 -0.029 -0.221 -0.042 c +-0.696 -0.133 -1.348 -0.689 -1.732 -1.73 c +-2.577 -4.014 -2.459 -5.548 -2.314 -6.259 c +-1.864 -8.468 0.843 -8.703 1.755 -8.611 c +4.299 -8.355 5.7 -8.214 5.951 -6.775 c +5.562 -9.713 3.043 -11.985 0 -11.985 c +-3.312 -11.985 -6.003 -9.294 -6.003 -5.982 c +-6.003 -2.67 -3.312 0.012 0 0.012 c +0 0 l +f +Q +0.129 0.388 0.486 scn +q 1 0 0 1 327.0938 217.1953 cm +0 0 m +-1.738 -0.59 -1.75 -4.505 -1.75 -4.545 c +-1.745 -7.049 -0.739 -7.83 0.017 -8.199 c +1.798 -9.07 6.085 -9.361 6.66 -7.631 c +5.921 -10.109 3.622 -11.921 0.905 -11.921 c +-2.407 -11.921 -5.098 -9.229 -5.098 -5.918 c +-5.098 -2.856 -2.799 -0.334 0.165 0.031 c +0.115 0.021 0.049 0.013 0 0 c +f +Q +0.125 0.373 0.471 scn +q 1 0 0 1 325.7642 216.7715 cm +0 0 m +-1.064 -0.938 -0.813 -4.867 -0.541 -5.6 c +0.429 -8.205 2.405 -8.584 3.209 -8.627 c +4.272 -8.682 5.299 -9.067 6.379 -8.965 c +6.692 -8.936 7.266 -8.798 7.587 -8.212 c +6.594 -10.16 4.569 -11.497 2.235 -11.497 c +-1.077 -11.497 -3.768 -8.806 -3.768 -5.494 c +-3.768 -2.81 -2.001 -0.54 0.432 0.225 c +0.372 0.2 0.292 0.168 0.231 0.144 c +0.161 0.102 0.061 0.054 0 0 c +f +Q +0.22 0.647 0.812 scn +q 1 0 0 1 329.5791 211.4561 cm +0 0 m +-0.095 0.068 -0.095 0.068 -0.519 0.587 c +-0.661 0.762 -0.834 0.909 -0.973 1.089 c +-1.125 1.286 -1.231 1.594 y +-1.226 1.612 -0.029 2.438 0.592 1.362 c +1.027 0.609 0.245 -0.131 0.233 -0.133 c +0.153 -0.144 0.065 -0.047 0 0 c +f +Q +0.149 0.443 0.561 scn +q 1 0 0 1 330.5688 216.6631 cm +0 0 m +-1.295 0.462 -2.254 -0.325 -2.57 -0.584 c +-2.57 0.608 l +-1.402 0.608 -0.311 0.274 0.612 -0.302 c +0.522 -0.252 0.402 -0.186 0.312 -0.136 c +0.219 -0.095 0.096 -0.034 0 0 c +f +Q +0.224 0.659 0.831 scn +q 1 0 0 1 329.6191 211.708 cm +0 0 m +-0.335 0.354 l +-0.472 0.524 -0.626 0.68 -0.757 0.854 c +-0.976 1.148 -1.021 1.268 -1.019 1.272 c +-1.014 1.287 -0.028 1.7 0.33 0.952 c +0.591 0.409 0.174 -0.12 0.167 -0.121 c +0.106 -0.131 0.048 -0.039 0 0 c +f +Q +0.145 0.431 0.541 scn +q 1 0 0 1 329.7661 216.9941 cm +0 0 m +-0.649 0.12 -1.161 -0.01 -1.767 -0.45 c +-1.767 0.277 l +-1.038 0.277 -0.339 0.147 0.307 -0.091 c +0.224 -0.065 0.112 -0.031 0.029 -0.007 c +0.02 -0.005 0.009 -0.002 0 0 c +f +Q +0.227 0.675 0.847 scn +q 1 0 0 1 329.623 211.9746 cm +0 0 m +-0.004 0.004 -0.533 0.572 -0.71 0.861 c +-0.568 0.874 -0.482 0.883 -0.264 0.809 c +-0.18 0.78 -0.083 0.699 -0.025 0.631 c +0.033 0.563 0.091 0.45 0.104 0.361 c +0.135 0.141 0.099 0.019 0.074 -0.063 c +0.052 -0.044 0.021 -0.021 0 0 c +f +Q +0.141 0.416 0.522 scn +q 1 0 0 1 328.9043 217.1943 cm +0 0 m +-0.314 -0.006 -0.487 -0.009 -0.905 -0.208 c +-0.905 0.077 l +-0.519 0.077 -0.142 0.041 0.225 -0.029 c +0.157 -0.021 0.068 -0.004 0 0 c +f +Q +0.137 0.4 0.506 scn +q 1 0 0 1 327.999 217.2598 cm +0 0 m +0 0.012 l +0.072 0.012 0.144 0.011 0.215 0.008 c +0.15 0.006 0.046 -0.045 0 0 c +f +Q + endstream endobj 1361 0 obj <> endobj 1334 0 obj <> endobj 1335 0 obj <>/XObject<>>>/Subtype/Form>>stream +q +334.002 303.277 m +334.002 319.277 l +349.464 319.277 362.001 306.74 362.001 291.278 c +362.001 275.808 349.464 263.279 334.002 263.279 c +318.54 263.279 306.003 275.808 306.003 291.278 c +306.003 306.74 318.54 319.277 334.002 319.277 c +334.002 303.277 l +327.395 303.288 321.992 297.886 322.003 291.278 c +321.994 284.663 327.392 279.27 334.002 279.279 c +340.612 279.27 346.01 284.663 346.001 291.278 c +346.012 297.886 340.609 303.288 334.002 303.277 c +W n +q +/GS0 gs +0 Tc 0 Tw 0 Ts 100 Tz 0 Tr /Fm0 Do +Q +Q + endstream endobj 1362 0 obj <> endobj 1363 0 obj <>/ExtGState<>>>/Subtype/Form>>stream +/CS0 cs 0.259 0.565 0.682 scn +/GS0 gs +q 1 0 0 1 334.002 303.2773 cm +0 0 m +0 16 l +15.462 16 27.999 3.463 27.999 -11.999 c +27.999 -27.47 15.462 -39.998 0 -39.998 c +-15.462 -39.998 -27.999 -27.47 -27.999 -11.999 c +-27.999 3.463 -15.462 16 0 16 c +0 0 l +-6.607 0.011 -12.01 -5.392 -11.999 -11.999 c +-12.008 -18.614 -6.61 -24.008 0 -23.998 c +6.61 -24.008 12.008 -18.614 11.999 -11.999 c +12.01 -5.392 6.607 0.011 0 0 c +f +Q +q 1 0 0 1 334.002 308.4409 cm +0 0 m +0 -0.468 l +0 -5.164 l +-6.607 -5.153 -12.01 -10.555 -11.999 -17.163 c +-12.008 -23.778 -6.61 -29.171 0 -29.162 c +6.61 -29.171 12.008 -23.778 11.999 -17.163 c +12.01 -10.555 6.607 -5.153 0 -5.164 c +0 -0.468 l +0.316 -0.694 0.738 -0.996 1.055 -1.223 c +3.817 -3.661 7.459 -4.869 10 -7.617 c +12.018 -9.8 13.458 -12.461 14.279 -15.528 c +15.076 -18.506 16.901 -19.345 16.917 -19.347 c +18.874 -19.542 24.734 -10.485 17.857 -2.241 c +10.879 6.124 0.769 1.958 0 0 c +0 10.836 l +15.462 10.836 27.999 -1.701 27.999 -17.163 c +27.999 -32.633 15.462 -45.162 0 -45.162 c +-15.462 -45.162 -27.999 -32.633 -27.999 -17.163 c +-27.999 -1.701 -15.462 10.836 0 10.836 c +0 0 l +f +Q +0.255 0.553 0.667 scn +q 1 0 0 1 334.002 310.2881 cm +0 0 m +-0.296 -0.712 -1.487 -1.168 -1.735 -1.898 c +-1.987 -2.638 -2.003 -3.873 -1.53 -4.494 c +-1.227 -4.893 -0.45 -4.945 0 -5.167 c +0 -7.011 l +-6.607 -7 -12.01 -12.402 -11.999 -19.01 c +-12.008 -25.625 -6.61 -31.019 0 -31.009 c +6.61 -31.019 12.008 -25.625 11.999 -19.01 c +12.01 -12.402 6.607 -7 0 -7.011 c +0 -5.167 l +0.338 -5.201 0.788 -5.245 1.126 -5.278 c +2.249 -5.476 12.144 -7.557 13.761 -19.537 c +14.171 -22.514 l +14.636 -23.089 15.724 -23.505 16.459 -23.428 c +20.584 -22.992 26.416 -9.568 15.896 -1.312 c +7.943 4.929 0.035 0.084 0 0 c +0 8.989 l +15.462 8.989 27.999 -3.548 27.999 -19.01 c +27.999 -34.48 15.462 -47.009 0 -47.009 c +-15.462 -47.009 -27.999 -34.48 -27.999 -19.01 c +-27.999 -3.548 -15.462 8.989 0 8.989 c +0 0 l +f +Q +0.247 0.541 0.651 scn +q 1 0 0 1 334.002 311.4072 cm +0 0 m +-0.627 -1.109 -1.866 -1.525 -2.708 -2.391 c +-4.764 -4.503 -4.447 -6.209 -4.44 -6.223 c +-4.355 -6.386 -4.355 -6.386 0 -7.408 c +0 -8.13 l +-6.607 -8.119 -12.01 -13.521 -11.999 -20.129 c +-12.008 -26.744 -6.61 -32.138 0 -32.128 c +6.61 -32.138 12.008 -26.744 11.999 -20.129 c +12.01 -13.521 6.607 -8.119 0 -8.13 c +0 -7.408 l +0.312 -7.428 0.727 -7.455 1.039 -7.475 c +5.587 -8.118 13.155 -12.018 12.674 -22.55 c +12.559 -25.063 12.663 -26.479 12.981 -26.762 c +14.31 -27.933 23.356 -23.69 22.629 -14.042 c +21.27 4.006 1.142 2.02 0 0 c +0 7.87 l +15.462 7.87 27.999 -4.667 27.999 -20.129 c +27.999 -35.6 15.462 -48.128 0 -48.128 c +-15.462 -48.128 -27.999 -35.6 -27.999 -20.129 c +-27.999 -4.667 -15.462 7.87 0 7.87 c +0 0 l +f +Q +0.243 0.529 0.639 scn +q 1 0 0 1 334.002 312.3325 cm +0 0 m +-0.223 -0.377 -0.896 -0.494 -1.279 -0.706 c +-3.984 -2.198 -4.352 -2.882 -7.218 -8.204 c +-10.978 -15.407 l +-12.034 -17.649 -12.409 -19.973 -12.123 -22.511 c +-11.368 -29.203 -4.44 -35.038 3.702 -32.832 c +16.504 -28.455 l +19.639 -26.388 21.523 -23.893 22.614 -20.364 c +24.61 -13.908 21.812 -4.74 13.674 -0.575 c +6.26 3.219 0.029 0.049 0 0 c +0 6.945 l +15.462 6.945 27.999 -5.592 27.999 -21.054 c +27.999 -36.525 15.462 -49.053 0 -49.053 c +-15.462 -49.053 -27.999 -36.525 -27.999 -21.054 c +-27.999 -5.592 -15.462 6.945 0 6.945 c +0 0 l +f +Q +0.235 0.518 0.624 scn +q 1 0 0 1 334.002 313.1323 cm +0 0 m +-0.174 -0.267 -0.682 -0.3 -0.974 -0.428 c +-3.27 -1.438 -6.363 -4.313 -7.593 -6.58 c +-13.39 -17.263 -13 -20.654 -12.686 -23.379 c +-12.044 -28.943 -6.306 -36.331 3.976 -34.516 c +34.376 -29.152 23.202 -7.033 15.417 -1.844 c +7.621 3.352 0.038 0.059 0 0 c +0 6.145 l +15.462 6.145 27.999 -6.392 27.999 -21.854 c +27.999 -37.325 15.462 -49.853 0 -49.853 c +-15.462 -49.853 -27.999 -37.325 -27.999 -21.854 c +-27.999 -6.392 -15.462 6.145 0 6.145 c +0 0 l +f +Q +0.231 0.506 0.608 scn +q 1 0 0 1 334.002 313.833 cm +0 0 m +-0.26 -0.393 -1.01 -0.429 -1.443 -0.612 c +-4.281 -1.817 -7.531 -4.969 -9.346 -8.278 c +-13.499 -15.849 -13.757 -21.087 -13.243 -24.146 c +-12.334 -29.559 -7.254 -38.113 6.021 -35.853 c +29.652 -31.827 27.567 -10.229 15.691 -2.188 c +7.725 3.206 0.039 0.058 0 0 c +0 5.444 l +15.462 5.444 27.999 -7.093 27.999 -22.555 c +27.999 -38.025 15.462 -50.554 0 -50.554 c +-15.462 -50.554 -27.999 -38.025 -27.999 -22.555 c +-27.999 -7.093 -15.462 5.444 0 5.444 c +0 0 l +f +Q +0.227 0.494 0.592 scn +q 1 0 0 1 334.002 314.499 cm +0 0 m +-0.27 -0.397 -1.042 -0.411 -1.488 -0.586 c +-3.111 -1.225 -7.25 -3.37 -10.633 -9.471 c +-11.685 -11.368 -15.021 -18.085 -13.796 -24.878 c +-12.453 -32.322 -5.461 -39.359 6.715 -37.218 c +28.949 -33.308 28.975 -11.258 15.609 -2.301 c +7.856 2.895 0.038 0.056 0 0 c +0 4.778 l +15.462 4.778 27.999 -7.759 27.999 -23.221 c +27.999 -38.691 15.462 -51.22 0 -51.22 c +-15.462 -51.22 -27.999 -38.691 -27.999 -23.221 c +-27.999 -7.759 -15.462 4.778 0 4.778 c +0 0 l +f +Q +0.22 0.478 0.576 scn +q 1 0 0 1 334.002 315.1099 cm +0 0 m +-0.285 -0.403 -1.085 -0.384 -1.55 -0.549 c +-2.14 -0.758 -7.426 -2.783 -11.14 -9.4 c +-12.536 -11.888 -15.643 -18.441 -14.343 -25.554 c +-13.275 -31.396 -7.567 -40.71 7.05 -38.567 c +28.067 -35.485 30.905 -13.13 16.17 -2.838 c +7.979 2.883 0.04 0.057 0 0 c +0 4.167 l +15.462 4.167 27.999 -8.37 27.999 -23.832 c +27.999 -39.302 15.462 -51.831 0 -51.831 c +-15.462 -51.831 -27.999 -39.302 -27.999 -23.832 c +-27.999 -8.37 -15.462 4.167 0 4.167 c +0 0 l +f +Q +0.216 0.467 0.565 scn +q 1 0 0 1 334.002 315.6826 cm +0 0 m +-0.294 -0.407 -1.113 -0.365 -1.59 -0.521 c +-3.037 -0.996 -8.057 -3.068 -11.887 -9.807 c +-12.95 -11.677 -16.306 -18.383 -14.886 -26.191 c +-13.691 -32.763 -6.811 -41.823 7.247 -39.848 c +28.69 -36.835 31.472 -13.848 16.374 -3.144 c +8.08 2.736 0.041 0.056 0 0 c +0 3.595 l +15.462 3.595 27.999 -8.942 27.999 -24.404 c +27.999 -39.875 15.462 -52.403 0 -52.403 c +-15.462 -52.403 -27.999 -39.875 -27.999 -24.404 c +-27.999 -8.942 -15.462 3.595 0 3.595 c +0 0 l +f +Q +0.208 0.455 0.549 scn +q 1 0 0 1 334.002 316.2197 cm +0 0 m +-0.327 -0.44 -1.224 -0.37 -1.749 -0.528 c +-5.52 -1.667 -9.766 -5.26 -12.073 -9.267 c +-15.394 -15.036 -16.522 -20.933 -15.426 -26.792 c +-13.857 -35.175 -5.228 -43.007 7.675 -41.012 c +29.388 -37.654 31.678 -13.959 16.092 -3.122 c +8.188 2.374 0.041 0.052 0 0 c +0 3.058 l +15.462 3.058 27.999 -9.479 27.999 -24.941 c +27.999 -40.412 15.462 -52.94 0 -52.94 c +-15.462 -52.94 -27.999 -40.412 -27.999 -24.941 c +-27.999 -9.479 -15.462 3.058 0 3.058 c +0 0 l +f +Q +0.204 0.443 0.533 scn +q 1 0 0 1 334.002 316.7344 cm +0 0 m +-0.315 -0.413 -1.169 -0.321 -1.671 -0.458 c +-5.628 -1.543 -10.186 -5.222 -12.509 -9.206 c +-13.794 -11.411 -17.706 -18.119 -15.958 -27.369 c +-14.312 -36.083 -5.369 -44.225 7.962 -42.147 c +29.829 -38.742 32.261 -15.07 16.713 -3.752 c +8.241 2.415 0.041 0.054 0 0 c +0 2.543 l +15.462 2.543 27.999 -9.994 27.999 -25.456 c +27.999 -40.927 15.462 -53.455 0 -53.455 c +-15.462 -53.455 -27.999 -40.927 -27.999 -25.456 c +-27.999 -9.994 -15.462 2.543 0 2.543 c +0 0 l +f +Q +0.196 0.431 0.518 scn +q 1 0 0 1 334.002 317.207 cm +0 0 m +-0.326 -0.417 -1.197 -0.297 -1.71 -0.424 c +-5.005 -1.241 -10.022 -4.174 -13.317 -9.752 c +-16.642 -15.38 -17.708 -21.487 -16.484 -27.904 c +-14.771 -36.888 -5.523 -45.309 8.242 -43.221 c +29.817 -39.947 32.246 -15.423 16.845 -4.05 c +8.507 2.107 0.042 0.053 0 0 c +0 2.07 l +15.462 2.07 27.999 -10.467 27.999 -25.929 c +27.999 -41.399 15.462 -53.928 0 -53.928 c +-15.462 -53.928 -27.999 -41.399 -27.999 -25.929 c +-27.999 -10.467 -15.462 2.07 0 2.07 c +0 0 l +f +Q +0.192 0.42 0.506 scn +q 1 0 0 1 334.002 317.647 cm +0 0 m +-0.165 -0.201 -0.596 -0.119 -0.852 -0.169 c +-6.63 -1.321 -11.086 -5.48 -13.33 -8.99 c +-17.824 -16.019 -17.96 -22.681 -17.283 -27.032 c +-15.528 -38.307 -5.35 -45.631 6.918 -44.447 c +29.057 -42.308 33.214 -18.565 18.588 -5.674 c +9.722 2.142 0.051 0.062 0 0 c +0 1.63 l +15.462 1.63 27.999 -10.907 27.999 -26.369 c +27.999 -41.839 15.462 -54.368 0 -54.368 c +-15.462 -54.368 -27.999 -41.839 -27.999 -26.369 c +-27.999 -10.907 -15.462 1.63 0 1.63 c +0 0 l +f +Q +0.188 0.408 0.49 scn +q 1 0 0 1 334.002 318.0581 cm +0 0 m +-0.345 -0.419 -1.243 -0.245 -1.775 -0.35 c +-5.333 -1.052 -10.598 -4.013 -13.752 -8.857 c +-18.474 -16.108 -18.606 -22.979 -17.885 -27.466 c +-16.272 -37.501 -7.101 -46.918 7.31 -45.498 c +29.578 -43.303 33.522 -19.118 18.666 -5.999 c +9.679 1.938 0.05 0.061 0 0 c +0 1.219 l +15.462 1.219 27.999 -11.318 27.999 -26.78 c +27.999 -42.25 15.462 -54.779 0 -54.779 c +-15.462 -54.779 -27.999 -42.25 -27.999 -26.78 c +-27.999 -11.318 -15.462 1.219 0 1.219 c +0 0 l +f +Q +0.18 0.392 0.475 scn +q 1 0 0 1 334.002 318.4131 cm +0 0 m +-0.359 -0.424 -1.279 -0.213 -1.827 -0.305 c +-2.571 -0.429 -9.239 -1.713 -14.035 -8.521 c +-19.337 -16.049 -19.04 -23.602 -18.666 -26.5 c +-16.791 -41.034 -4.557 -47.118 6.016 -46.62 c +29.239 -45.526 34.04 -19.967 18.705 -6.311 c +9.693 1.714 0.05 0.059 0 0 c +0 0.864 l +15.462 0.864 27.999 -11.673 27.999 -27.135 c +27.999 -42.605 15.462 -55.134 0 -55.134 c +-15.462 -55.134 -27.999 -42.605 -27.999 -27.135 c +-27.999 -11.673 -15.462 0.864 0 0.864 c +0 0 l +f +Q +0.176 0.38 0.459 scn +q 1 0 0 1 334.002 318.7388 cm +0 0 m +-0.366 -0.422 -1.29 -0.183 -1.842 -0.262 c +-5.616 -0.798 -11.203 -3.577 -14.553 -8.414 c +-20.526 -17.037 -19.484 -25.014 -19.142 -27.636 c +-17.325 -41.544 -4.721 -48.295 6.216 -47.587 c +22.826 -46.511 31.838 -32.411 25.896 -16.796 c +27.251 -20.083 27.999 -23.685 27.999 -27.46 c +27.999 -42.931 15.462 -55.459 0 -55.459 c +-15.462 -55.459 -27.999 -42.931 -27.999 -27.46 c +-27.999 -11.999 -15.462 0.539 0 0.539 c +0 0 l +f +Q +0.169 0.369 0.443 scn +q 1 0 0 1 334.002 318.9941 cm +0 0 m +-0.38 -0.425 -1.322 -0.147 -1.889 -0.211 c +-3.74 -0.417 -10.183 -1.633 -15.334 -8.604 c +-20.121 -15.081 -20.497 -23.226 -19.964 -27.017 c +-18.07 -40.5 -7.309 -49.138 6.814 -48.512 c +13.57 -48.212 30.458 -42.954 27.513 -22.495 c +27.832 -24.187 27.999 -25.932 27.999 -27.716 c +27.999 -43.187 15.462 -55.715 0 -55.715 c +-15.462 -55.715 -27.999 -43.187 -27.999 -27.716 c +-27.999 -12.254 -15.462 0.283 0 0.283 c +0 0 l +f +Q +0.165 0.357 0.431 scn +q 1 0 0 1 334.002 319.1851 cm +0 0 m +-0.389 -0.421 -1.333 -0.109 -1.905 -0.156 c +-5.862 -0.48 -11.762 -2.986 -15.367 -7.721 c +-21.456 -15.721 -21.121 -23.999 -20.694 -27.186 c +-18.848 -40.988 -7.36 -50.366 6.622 -49.484 c +16.365 -48.869 27.809 -42.686 27.992 -27.284 c +27.997 -27.491 27.999 -27.699 27.999 -27.907 c +27.999 -43.377 15.462 -55.906 0 -55.906 c +-15.462 -55.906 -27.999 -43.377 -27.999 -27.907 c +-27.999 -12.445 -15.462 0.092 0 0.092 c +0 0 l +f +Q +0.157 0.345 0.416 scn +q 1 0 0 1 334.002 319.2739 cm +0 0 m +-0.403 -0.423 -1.362 -0.067 -1.945 -0.096 c +-5.653 -0.278 -11.171 -1.795 -16.407 -7.987 c +-19.42 -11.549 -22.258 -18.906 -21.583 -25.522 c +-19.025 -50.59 4.157 -50.418 5.143 -50.399 c +17.395 -50.155 25.849 -43.167 27.755 -31.707 c +25.94 -45.421 14.205 -55.995 0 -55.995 c +-15.462 -55.995 -27.999 -43.466 -27.999 -27.996 c +-27.999 -12.534 -15.462 0.003 0 0.003 c +0 0 l +f +Q +0.153 0.333 0.4 scn +q 1 0 0 1 329.771 318.957 cm +0 0 m +-22.534 -4.552 -23.533 -35.028 -6.33 -46.26 c +6.848 -54.863 25.642 -52.17 31.069 -35.688 c +27.625 -47.252 16.911 -55.678 4.231 -55.678 c +-11.231 -55.678 -23.768 -43.149 -23.768 -27.679 c +-23.768 -13.386 -13.055 -1.592 0.778 0.109 c +0.544 0.077 0.232 0.04 0 0 c +f +Q +0.145 0.322 0.384 scn +q 1 0 0 1 321.978 316.4971 cm +0 0 m +-16.565 -9.063 -17.347 -40.195 9.314 -48.713 c +16.64 -51.053 30.632 -50.191 36.987 -37.914 c +32.359 -46.999 22.917 -53.218 12.024 -53.218 c +-3.438 -53.218 -15.975 -40.689 -15.975 -25.219 c +-15.975 -12.683 -7.734 -2.069 3.625 1.499 c +3.1 1.309 2.399 1.057 1.873 0.867 c +1.31 0.61 0.543 0.297 0 0 c +f +Q +0.267 0.58 0.698 scn +q 1 0 0 1 349.9282 293.1025 cm +0 0 m +-1.706 2.422 -2.871 5.191 -4.806 7.466 c +-5.58 8.375 -6.333 9.14 -7.046 9.739 c +-7.103 9.787 -12.7 14.578 -12.706 14.928 c +-12.708 15.034 -10.925 16.753 -10.74 16.824 c +-10.058 17.085 -7.544 17.231 -6.875 17.165 c +-5.111 16.991 -2.438 16.24 0.275 13.649 c +3.79 10.292 4.269 6.381 4.332 5.263 c +4.608 0.361 1.816 -1.553 1.125 -1.426 c +0.589 -1.328 0.314 -0.446 0 0 c +f +Q +0.271 0.592 0.71 scn +q 1 0 0 1 350.0625 295.5957 cm +0 0 m +-1.97 2.883 -3.056 4.472 -4.87 6.595 c +-5.072 6.832 -5.375 7.116 -5.591 7.34 c +-5.844 7.601 -6.16 7.969 -6.419 8.224 c +-6.913 8.711 -7.551 9.382 -8.074 9.839 c +-9.724 11.281 -9.908 11.547 -9.911 11.595 c +-9.914 11.655 -8.389 13.369 -8.295 13.411 c +-7.711 13.674 -6.801 13.346 -6.164 13.276 c +-2.962 12.927 -1.156 11.212 -0.476 10.566 c +2.531 7.709 2.783 5.143 2.904 3.909 c +2.938 3.565 2.929 0.875 2.709 0.41 c +2.675 0.337 0.707 -0.874 0.645 -0.861 c +0.33 -0.793 0.182 -0.267 0 0 c +f +Q +0.278 0.604 0.725 scn +q 1 0 0 1 349.1475 299.125 cm +0 0 m +-0.737 0.235 -1.076 1.45 -1.576 2.04 c +-3.148 3.894 -3.148 3.894 -3.897 4.678 c +-4.212 5.008 -4.84 5.354 -4.922 5.803 c +-4.014 7.981 l +-3.953 8.007 -1.427 7.15 0.33 5.083 c +1.631 3.552 2.397 0.755 2.281 0.574 c +1.906 -0.01 0.699 -0.197 0.037 0.011 c +0.026 0.014 0.011 -0.003 0 0 c +f +Q +0.176 0.38 0.459 scn +q 1 0 0 1 346.0513 316.5498 cm +0 0 m +-5.275 2.417 -9.403 2.407 -12.049 2.189 c +-12.049 2.728 l +-6.604 2.728 -1.522 1.173 2.777 -1.517 c +2.232 -1.205 1.506 -0.789 0.961 -0.477 c +0.673 -0.334 0.292 -0.134 0 0 c +f +Q +0.169 0.369 0.443 scn +q 1 0 0 1 342.2651 318.0342 cm +0 0 m +-3.078 0.794 -4.478 1.111 -8.263 0.96 c +-8.263 1.243 l +-4.866 1.243 -1.61 0.638 1.402 -0.47 c +0.981 -0.329 0.425 -0.126 0 0 c +f +Q +0.165 0.357 0.431 scn +q 1 0 0 1 338.2329 318.957 cm +0 0 m +-2.557 0.263 -2.657 0.273 -4.231 0.228 c +-4.231 0.32 l +-2.431 0.32 -0.671 0.15 1.035 -0.174 c +0.724 -0.122 0.312 -0.042 0 0 c +f +Q +0.157 0.345 0.416 scn +q 1 0 0 1 334.002 319.2739 cm +0 0 m +0.335 0.003 0.669 -0.002 1.001 -0.014 c +0.701 -0.01 0.211 -0.214 0 0 c +f +Q + endstream endobj 1364 0 obj <> endobj 1332 0 obj <> endobj 1333 0 obj <>/XObject<>>>/Subtype/Form>>stream +q +186.627 218.274 m +186.627 223.274 l +189.939 223.274 192.621 220.593 192.621 217.271 c +192.621 213.959 189.939 211.277 186.627 211.277 c +183.315 211.277 180.624 213.959 180.624 217.271 c +180.624 220.593 183.315 223.274 186.627 223.274 c +186.627 218.274 l +186.078 218.277 185.622 217.823 185.624 217.271 c +185.62 216.731 186.077 216.274 186.627 216.277 c +187.173 216.275 187.624 216.726 187.621 217.271 c +187.622 217.829 187.171 218.277 186.627 218.274 c +W n +q +1 w 4 M 0 j 0 J []0 d +/GS0 gs +0 Tc 0 Tw 0 Ts 100 Tz 0 Tr /Fm0 Do +Q +Q + endstream endobj 1365 0 obj <> endobj 1366 0 obj <>/ExtGState<>>>/Subtype/Form>>stream +/CS0 cs 0.176 0.529 0.353 scn +/GS0 gs +q 1 0 0 1 186.627 218.2744 cm +0 0 m +0 5 l +3.312 5 5.994 2.318 5.994 -1.003 c +5.994 -4.315 3.312 -6.997 0 -6.997 c +-3.312 -6.997 -6.003 -4.315 -6.003 -1.003 c +-6.003 2.318 -3.312 5 0 5 c +0 0 l +-0.549 0.003 -1.005 -0.451 -1.003 -1.003 c +-1.007 -1.543 -0.55 -2 0 -1.997 c +0.546 -1.999 0.997 -1.549 0.994 -1.003 c +0.995 -0.445 0.544 0.003 0 0 c +f +Q +q 1 0 0 1 186.627 219.1992 cm +0 0 m +-0.013 -0.041 -0.073 -0.074 -0.083 -0.115 c +-0.111 -0.248 -0.02 -0.426 0 -0.561 c +0 -0.925 l +-0.549 -0.922 -1.005 -1.376 -1.003 -1.928 c +-1.007 -2.468 -0.55 -2.925 0 -2.922 c +0.546 -2.924 0.997 -2.474 0.994 -1.928 c +0.995 -1.37 0.544 -0.922 0 -0.925 c +0 -0.561 l +0.034 -0.558 0.078 -0.553 0.112 -0.55 c +0.141 -0.55 0.182 -0.536 0.208 -0.549 c +1.037 -1.473 l +1.432 -2.162 1.781 -2.116 1.796 -2.113 c +2.048 -2.073 3.175 -0.62 1.896 0.192 c +1.594 0.385 1.27 0.411 1.126 0.396 c +0 0 l +0 4.075 l +3.312 4.075 5.994 1.394 5.994 -1.928 c +5.994 -5.24 3.312 -7.922 0 -7.922 c +-3.312 -7.922 -6.003 -5.24 -6.003 -1.928 c +-6.003 1.394 -3.312 4.075 0 4.075 c +0 0 l +f +Q +0.173 0.518 0.345 scn +q 1 0 0 1 186.627 219.3418 cm +0 0 m +-0.03 -0.093 -0.164 -0.171 -0.185 -0.266 c +-0.222 -0.434 -0.125 -0.678 -0.187 -0.838 c +-0.188 -0.839 -0.237 -0.941 -0.403 -1.05 c +-1.157 -1.54 -1.045 -2.159 -0.993 -2.338 c +-0.812 -2.951 -0.139 -3.261 0.448 -2.999 c +0.646 -2.911 0.784 -2.775 0.866 -2.694 c +1.137 -2.427 1.542 -2.629 1.829 -2.42 c +2.42 -1.988 2.555 -1.604 2.619 -1.418 c +2.84 -0.784 2.454 0.136 1.624 0.372 c +0.851 0.592 0.002 0.007 0 0 c +0 3.933 l +3.312 3.933 5.994 1.251 5.994 -2.07 c +5.994 -5.383 3.312 -8.064 0 -8.064 c +-3.312 -8.064 -6.003 -5.383 -6.003 -2.07 c +-6.003 1.251 -3.312 3.933 0 3.933 c +0 0 l +f +Q +0.169 0.506 0.337 scn +q 1 0 0 1 186.627 219.4824 cm +0 0 m +-0.295 -0.834 -1.295 -1.352 -1.079 -2.413 c +-0.941 -3.092 -0.175 -3.558 0.645 -3.166 c +2.581 -2.241 2.581 -2.241 2.752 -1.679 c +3.15 -0.374 2.119 0.265 1.727 0.386 c +0.83 0.662 0.003 0.008 0 0 c +0 3.792 l +3.312 3.792 5.994 1.11 5.994 -2.211 c +5.994 -5.523 3.312 -8.205 0 -8.205 c +-3.312 -8.205 -6.003 -5.523 -6.003 -2.211 c +-6.003 1.11 -3.312 3.792 0 3.792 c +0 0 l +f +Q +0.165 0.49 0.329 scn +q 1 0 0 1 186.627 219.6211 cm +0 0 m +-0.353 -0.868 -1.382 -1.434 -1.146 -2.564 c +-1.026 -3.142 -0.354 -3.806 0.709 -3.4 c +2.435 -2.741 2.615 -2.673 2.848 -2.025 c +3.232 -0.958 2.919 -0.038 2.048 0.318 c +0.863 0.804 0.004 0.01 0 0 c +0 3.653 l +3.312 3.653 5.994 0.972 5.994 -2.35 c +5.994 -5.662 3.312 -8.344 0 -8.344 c +-3.312 -8.344 -6.003 -5.662 -6.003 -2.35 c +-6.003 0.972 -3.312 3.653 0 3.653 c +0 0 l +f +Q +0.161 0.478 0.322 scn +q 1 0 0 1 186.627 219.7588 cm +0 0 m +-0.193 -0.418 -0.584 -0.692 -0.794 -1.099 c +-1.091 -1.709 l +-1.261 -2.111 -1.291 -2.44 -1.189 -2.809 c +-1.127 -3.035 -0.731 -4.134 0.979 -3.567 c +4.729 -2.327 2.779 0.033 2.448 0.247 c +1.441 0.897 0.102 0.218 0 0 c +0 3.516 l +3.312 3.516 5.994 0.834 5.994 -2.487 c +5.994 -5.8 3.312 -8.481 0 -8.481 c +-3.312 -8.481 -6.003 -5.8 -6.003 -2.487 c +-6.003 0.834 -3.312 3.516 0 3.516 c +0 0 l +f +Q +0.157 0.467 0.314 scn +q 1 0 0 1 186.627 219.9111 cm +0 0 m +-0.013 -0.025 -0.053 -0.04 -0.076 -0.058 c +-0.436 -0.329 -0.724 -0.613 -1.172 -1.804 c +-1.294 -2.128 -1.428 -2.622 -1.186 -3.154 c +-0.786 -4.034 0.174 -4.205 1.666 -3.662 c +3.819 -2.879 3.945 -0.361 2.337 0.402 c +1.364 0.864 0.123 0.248 0 0 c +0 3.363 l +3.312 3.363 5.994 0.682 5.994 -2.64 c +5.994 -5.952 3.312 -8.634 0 -8.634 c +-3.312 -8.634 -6.003 -5.952 -6.003 -2.64 c +-6.003 0.682 -3.312 3.363 0 3.363 c +0 0 l +f +Q +0.153 0.455 0.306 scn +q 1 0 0 1 186.627 220.1025 cm +0 0 m +-0.034 -0.067 -0.142 -0.105 -0.203 -0.15 c +-0.738 -0.548 -1 -1.255 -1.252 -1.938 c +-1.385 -2.296 -1.491 -2.836 -1.247 -3.372 c +-0.62 -4.745 1.243 -4.15 1.798 -3.936 c +4.073 -3.057 4.215 -0.289 2.506 0.421 c +1.109 1.002 0.006 0.013 0 0 c +0 3.172 l +3.312 3.172 5.994 0.49 5.994 -2.831 c +5.994 -6.144 3.312 -8.825 0 -8.825 c +-3.312 -8.825 -6.003 -6.144 -6.003 -2.831 c +-6.003 0.49 -3.312 3.172 0 3.172 c +0 0 l +f +Q +0.149 0.443 0.294 scn +q 1 0 0 1 186.627 220.291 cm +0 0 m +-0.037 -0.07 -0.152 -0.104 -0.217 -0.148 c +-0.425 -0.29 -0.869 -0.842 -1.09 -1.384 c +-1.279 -1.849 -1.632 -2.713 -1.384 -3.395 c +-1 -4.452 -0.005 -4.766 1.573 -4.327 c +4.077 -3.63 4.625 -0.767 2.988 0.316 c +1.701 1.168 0.079 0.148 0 0 c +0 2.983 l +3.312 2.983 5.994 0.302 5.994 -3.02 c +5.994 -6.332 3.312 -9.014 0 -9.014 c +-3.312 -9.014 -6.003 -6.332 -6.003 -3.02 c +-6.003 0.302 -3.312 2.983 0 2.983 c +0 0 l +f +Q +0.145 0.431 0.286 scn +q 1 0 0 1 186.627 220.4746 cm +0 0 m +-0.175 -0.316 -0.542 -0.436 -0.748 -0.721 c +-1.047 -1.138 -1.145 -1.38 -1.239 -1.615 c +-1.289 -1.739 -1.721 -2.852 -1.448 -3.597 c +-0.854 -5.222 1.1 -4.778 1.685 -4.604 c +4.42 -3.787 4.999 -0.764 3.215 0.386 c +1.946 1.203 0.235 0.424 0 0 c +0 2.8 l +3.312 2.8 5.994 0.118 5.994 -3.203 c +5.994 -6.516 3.312 -9.197 0 -9.197 c +-3.312 -9.197 -6.003 -6.516 -6.003 -3.203 c +-6.003 0.118 -3.312 2.8 0 2.8 c +0 0 l +f +Q +0.141 0.42 0.278 scn +q 1 0 0 1 186.627 220.7061 cm +0 0 m +-0.06 -0.132 -0.265 -0.211 -0.386 -0.291 c +-0.737 -0.526 -1.203 -1.41 -1.325 -1.736 c +-1.409 -1.96 -1.811 -3.121 -1.476 -3.944 c +-0.72 -5.801 1.951 -4.87 1.978 -4.859 c +5.294 -3.584 5.17 -0.372 3.113 0.574 c +1.411 1.356 0.007 0.017 0 0 c +0 2.568 l +3.312 2.568 5.994 -0.113 5.994 -3.435 c +5.994 -6.747 3.312 -9.429 0 -9.429 c +-3.312 -9.429 -6.003 -6.747 -6.003 -3.435 c +-6.003 -0.113 -3.312 2.568 0 2.568 c +0 0 l +f +Q +0.137 0.408 0.271 scn +q 1 0 0 1 186.627 220.9883 cm +0 0 m +-0.04 -0.083 -0.167 -0.135 -0.239 -0.193 c +-0.735 -0.593 -1.129 -1.17 -1.41 -1.909 c +-1.685 -2.632 -1.76 -3.635 -1.577 -4.146 c +-0.866 -6.126 1.876 -5.311 1.903 -5.301 c +5.874 -3.976 5.345 -0.496 3.416 0.521 c +1.627 1.465 0.058 0.121 0 0 c +0 2.286 l +3.312 2.286 5.994 -0.396 5.994 -3.717 c +5.994 -7.029 3.312 -9.711 0 -9.711 c +-3.312 -9.711 -6.003 -7.029 -6.003 -3.717 c +-6.003 -0.396 -3.312 2.286 0 2.286 c +0 0 l +f +Q +0.133 0.396 0.263 scn +q 1 0 0 1 186.627 221.2744 cm +0 0 m +-0.045 -0.106 -0.21 -0.167 -0.303 -0.236 c +-0.487 -0.373 -1.127 -0.938 -1.625 -2.443 c +-1.73 -2.761 -1.906 -3.878 -1.546 -4.676 c +-1.031 -5.818 0.788 -6.214 2.508 -5.559 c +6.319 -4.105 5.737 -0.286 3.15 0.724 c +1.354 1.425 0.007 0.017 0 0 c +0 2 l +3.312 2 5.994 -0.682 5.994 -4.003 c +5.994 -7.315 3.312 -9.997 0 -9.997 c +-3.312 -9.997 -6.003 -7.315 -6.003 -4.003 c +-6.003 -0.682 -3.312 2 0 2 c +0 0 l +f +Q +0.129 0.384 0.255 scn +q 1 0 0 1 186.627 221.6582 cm +0 0 m +-0.163 -0.362 -0.542 -0.515 -0.779 -0.805 c +-0.947 -1.012 -1.049 -1.261 -1.205 -1.476 c +-1.367 -1.7 -1.47 -1.983 -1.721 -2.735 c +-2.06 -3.745 -1.792 -4.628 -1.661 -4.961 c +-1.172 -6.201 0.619 -6.721 2.417 -6.144 c +7.025 -4.662 5.824 -0.754 3.284 0.539 c +1.422 1.486 0.008 0.018 0 0 c +0 1.616 l +3.312 1.616 5.994 -1.065 5.994 -4.387 c +5.994 -7.699 3.312 -10.381 0 -10.381 c +-3.312 -10.381 -6.003 -7.699 -6.003 -4.387 c +-6.003 -1.065 -3.312 1.616 0 1.616 c +0 0 l +f +Q +0.125 0.373 0.247 scn +q 1 0 0 1 186.627 222.082 cm +0 0 m +-0.128 -0.296 -0.442 -0.404 -0.638 -0.631 c +-0.788 -0.804 -0.893 -1.01 -1.031 -1.191 c +-1.147 -1.346 -1.619 -2.354 -1.622 -2.361 c +-2.173 -3.904 -2.042 -4.642 -1.843 -5.159 c +-0.967 -7.426 1.647 -7.027 2.581 -6.683 c +3.886 -6.201 6.602 -5.198 5.542 -2.518 c +5.833 -3.224 5.994 -3.998 5.994 -4.811 c +5.994 -8.123 3.312 -10.805 0 -10.805 c +-3.312 -10.805 -6.003 -8.123 -6.003 -4.811 c +-6.003 -1.489 -3.312 1.192 0 1.192 c +0 0 l +f +Q +0.122 0.361 0.239 scn +q 1 0 0 1 186.627 222.5469 cm +0 0 m +-0.037 -0.078 -0.154 -0.129 -0.22 -0.185 c +-1.236 -1.035 -1.83 -2.885 -1.836 -2.903 c +-2.227 -4.14 -2.24 -5.156 -1.875 -5.925 c +-0.602 -8.604 3.351 -7.152 3.39 -7.137 c +4.435 -6.729 6.183 -6.049 5.89 -4.151 c +5.958 -4.516 5.994 -4.891 5.994 -5.275 c +5.994 -8.588 3.312 -11.27 0 -11.27 c +-3.312 -11.27 -6.003 -8.588 -6.003 -5.275 c +-6.003 -1.954 -3.312 0.728 0 0.728 c +0 0 l +f +Q +0.118 0.349 0.231 scn +q 1 0 0 1 186.627 222.9893 cm +0 0 m +-0.038 -0.066 -0.155 -0.09 -0.221 -0.129 c +-1.149 -0.673 -1.644 -2.171 -2.005 -3.266 c +-2.01 -3.282 -2.546 -5.07 -2.073 -6.283 c +-1.016 -9.001 3.053 -7.959 3.094 -7.948 c +4.312 -7.626 5.98 -7.185 5.993 -5.583 c +5.994 -5.628 5.994 -5.673 5.994 -5.718 c +5.994 -9.03 3.312 -11.712 0 -11.712 c +-3.312 -11.712 -6.003 -9.03 -6.003 -5.718 c +-6.003 -2.396 -3.312 0.285 0 0.285 c +0 0 l +f +Q +0.114 0.337 0.224 scn +q 1 0 0 1 186.627 223.2627 cm +0 0 m +-0.043 -0.052 -0.154 -0.029 -0.221 -0.042 c +-0.696 -0.133 -1.347 -0.689 -1.732 -1.731 c +-2.576 -4.018 -2.459 -5.555 -2.314 -6.268 c +-1.868 -8.458 0.839 -8.7 1.752 -8.612 c +4.209 -8.376 5.692 -8.233 5.942 -6.786 c +5.553 -9.723 3.042 -11.985 0 -11.985 c +-3.312 -11.985 -6.003 -9.304 -6.003 -5.991 c +-6.003 -2.67 -3.312 0.012 0 0.012 c +0 0 l +f +Q +0.11 0.325 0.216 scn +q 1 0 0 1 185.7217 223.1973 cm +0 0 m +-1.735 -0.588 -1.748 -4.507 -1.748 -4.547 c +-1.744 -6.481 -1.201 -7.607 0.015 -8.199 c +1.797 -9.066 6.081 -9.359 6.651 -7.642 c +5.914 -10.117 3.621 -11.92 0.905 -11.92 c +-2.407 -11.92 -5.098 -9.238 -5.098 -5.926 c +-5.098 -2.855 -2.799 -0.333 0.165 0.032 c +0.115 0.022 0.049 0.014 0 0 c +f +Q +0.106 0.314 0.208 scn +q 1 0 0 1 184.3926 222.7744 cm +0 0 m +-1.065 -0.939 -0.813 -4.875 -0.541 -5.608 c +0.425 -8.204 2.403 -8.583 3.208 -8.626 c +4.27 -8.682 5.294 -9.071 6.373 -8.972 c +6.625 -8.948 7.249 -8.828 7.579 -8.222 c +6.588 -10.166 4.567 -11.497 2.234 -11.497 c +-1.078 -11.497 -3.769 -8.815 -3.769 -5.503 c +-3.769 -2.812 -2.001 -0.54 0.432 0.225 c +0.372 0.2 0.292 0.168 0.231 0.144 c +0.161 0.103 0.062 0.054 0 0 c +f +Q +0.18 0.541 0.361 scn +q 1 0 0 1 188.1982 217.4531 cm +0 0 m +-0.089 0.064 -0.089 0.064 -0.518 0.595 c +-0.66 0.77 -0.832 0.916 -0.969 1.096 c +-1.153 1.336 -1.228 1.588 -1.225 1.6 c +-1.219 1.619 -0.023 2.449 0.592 1.369 c +1.023 0.611 0.244 -0.132 0.233 -0.134 c +0.153 -0.145 0.065 -0.047 0 0 c +f +Q +0.125 0.373 0.247 scn +q 1 0 0 1 189.1953 222.666 cm +0 0 m +-1.292 0.462 -2.253 -0.325 -2.568 -0.584 c +-2.568 0.608 l +-1.402 0.608 -0.314 0.276 0.606 -0.3 c +0.517 -0.25 0.397 -0.184 0.307 -0.133 c +0.215 -0.093 0.095 -0.034 0 0 c +f +Q +0.184 0.553 0.369 scn +q 1 0 0 1 188.2393 217.709 cm +0 0 m +-0.336 0.357 l +-0.471 0.528 -0.626 0.683 -0.755 0.857 c +-0.971 1.148 -1.017 1.271 -1.015 1.275 c +-1.01 1.29 -0.025 1.71 0.328 0.955 c +0.583 0.408 0.172 -0.12 0.166 -0.121 c +0.105 -0.132 0.047 -0.039 0 0 c +f +Q +0.122 0.361 0.239 scn +q 1 0 0 1 188.3931 222.9971 cm +0 0 m +-0.649 0.121 -1.161 -0.01 -1.766 -0.45 c +-1.766 0.277 l +-1.038 0.277 -0.341 0.147 0.305 -0.09 c +0.221 -0.064 0.11 -0.031 0.027 -0.006 c +0.019 -0.004 0.008 -0.001 0 0 c +f +Q +0.188 0.565 0.376 scn +q 1 0 0 1 188.2437 217.9775 cm +0 0 m +-0.004 0.005 -0.532 0.572 -0.709 0.863 c +-0.562 0.878 -0.481 0.886 -0.263 0.812 c +-0.178 0.783 -0.083 0.7 -0.026 0.632 c +0.032 0.563 0.087 0.449 0.1 0.36 c +0.13 0.142 0.09 0.006 0.071 -0.06 c +0.049 -0.041 0.02 -0.02 0 0 c +f +Q +0.118 0.349 0.231 scn +q 1 0 0 1 187.5317 223.1973 cm +0 0 m +-0.313 -0.006 -0.486 -0.009 -0.905 -0.208 c +-0.905 0.077 l +-0.519 0.077 -0.142 0.041 0.224 -0.029 c +0.157 -0.021 0.068 -0.004 0 0 c +f +Q +0.114 0.337 0.224 scn +q 1 0 0 1 186.627 223.2627 cm +0 0 m +0 0.012 l +0.072 0.012 0.144 0.011 0.215 0.008 c +0.15 0.006 0.046 -0.045 0 0 c +f +Q + endstream endobj 1367 0 obj <> endobj 1330 0 obj <> endobj 1331 0 obj <>/XObject<>>>/Subtype/Form>>stream +q +183 308.272 m +183 324.272 l +198.462 324.272 210.999 311.735 210.999 296.273 c +210.999 280.812 198.462 268.274 183 268.274 c +167.538 268.274 155.001 280.812 155.001 296.273 c +155.001 311.735 167.538 324.272 183 324.272 c +183 308.272 l +176.393 308.283 170.99 302.881 171.001 296.273 c +170.99 289.666 176.393 284.264 183 284.274 c +189.607 284.264 195.01 289.666 194.999 296.273 c +195.01 302.881 189.607 308.283 183 308.272 c +W n +q +/GS0 gs +0 Tc 0 Tw 0 Ts 100 Tz 0 Tr /Fm0 Do +Q +Q + endstream endobj 1368 0 obj <> endobj 1369 0 obj <>/ExtGState<>>>/Subtype/Form>>stream +/CS0 cs 0.184 0.553 0.369 scn +/GS0 gs +q 1 0 0 1 183 308.2725 cm +0 0 m +0 16 l +15.462 16 27.999 3.463 27.999 -11.999 c +27.999 -27.461 15.462 -39.998 0 -39.998 c +-15.462 -39.998 -27.999 -27.461 -27.999 -11.999 c +-27.999 3.463 -15.462 16 0 16 c +0 0 l +-6.607 0.011 -12.01 -5.392 -11.999 -11.999 c +-12.01 -18.606 -6.607 -24.009 0 -23.998 c +6.607 -24.009 12.01 -18.606 11.999 -11.999 c +12.01 -5.392 6.607 0.011 0 0 c +f +Q +q 1 0 0 1 183 313.436 cm +0 0 m +0 -0.468 l +0 -5.164 l +-6.607 -5.153 -12.01 -10.555 -11.999 -17.163 c +-12.01 -23.77 -6.607 -29.172 0 -29.162 c +6.607 -29.172 12.01 -23.77 11.999 -17.163 c +12.01 -10.555 6.607 -5.153 0 -5.164 c +0 -0.468 l +0.316 -0.694 0.738 -0.997 1.055 -1.223 c +3.817 -3.661 7.459 -4.869 10 -7.617 c +12.018 -9.8 13.458 -12.461 14.279 -15.528 c +15.091 -18.562 16.901 -19.343 16.918 -19.345 c +18.873 -19.539 24.733 -10.483 17.857 -2.241 c +10.879 6.124 0.769 1.958 0 0 c +0 10.836 l +15.462 10.836 27.999 -1.701 27.999 -17.163 c +27.999 -32.625 15.462 -45.162 0 -45.162 c +-15.462 -45.162 -27.999 -32.625 -27.999 -17.163 c +-27.999 -1.701 -15.462 10.836 0 10.836 c +0 0 l +f +Q +0.18 0.541 0.361 scn +q 1 0 0 1 183 315.2832 cm +0 0 m +-0.296 -0.712 -1.487 -1.168 -1.735 -1.898 c +-1.987 -2.638 -2.003 -3.873 -1.53 -4.494 c +-1.227 -4.893 -0.45 -4.945 0 -5.167 c +0 -7.011 l +-6.607 -7 -12.01 -12.402 -11.999 -19.01 c +-12.01 -25.617 -6.607 -31.02 0 -31.009 c +6.607 -31.02 12.01 -25.617 11.999 -19.01 c +12.01 -12.402 6.607 -7 0 -7.011 c +0 -5.167 l +0.338 -5.201 0.788 -5.245 1.126 -5.278 c +2.249 -5.476 12.142 -7.556 13.761 -19.537 c +14.172 -22.51 l +14.637 -23.085 15.725 -23.501 16.46 -23.424 c +20.584 -22.987 26.414 -9.567 15.896 -1.312 c +7.943 4.929 0.035 0.084 0 0 c +0 8.989 l +15.462 8.989 27.999 -3.548 27.999 -19.01 c +27.999 -34.472 15.462 -47.009 0 -47.009 c +-15.462 -47.009 -27.999 -34.472 -27.999 -19.01 c +-27.999 -3.548 -15.462 8.989 0 8.989 c +0 0 l +f +Q +0.176 0.529 0.353 scn +q 1 0 0 1 183 316.4023 cm +0 0 m +-0.627 -1.109 -1.866 -1.525 -2.708 -2.391 c +-4.764 -4.503 -4.447 -6.209 -4.44 -6.223 c +-4.355 -6.386 -4.355 -6.386 0 -7.408 c +0 -8.13 l +-6.607 -8.119 -12.01 -13.521 -11.999 -20.129 c +-12.01 -26.736 -6.607 -32.139 0 -32.128 c +6.607 -32.139 12.01 -26.736 11.999 -20.129 c +12.01 -13.521 6.607 -8.119 0 -8.13 c +0 -7.408 l +0.312 -7.428 0.727 -7.455 1.039 -7.475 c +5.586 -8.118 13.155 -12.017 12.674 -22.548 c +12.56 -25.061 12.663 -26.477 12.982 -26.758 c +14.311 -27.929 23.356 -23.684 22.629 -14.042 c +21.269 4.004 1.142 2.019 0 0 c +0 7.87 l +15.462 7.87 27.999 -4.667 27.999 -20.129 c +27.999 -35.591 15.462 -48.128 0 -48.128 c +-15.462 -48.128 -27.999 -35.591 -27.999 -20.129 c +-27.999 -4.667 -15.462 7.87 0 7.87 c +0 0 l +f +Q +0.173 0.518 0.345 scn +q 1 0 0 1 183 317.3276 cm +0 0 m +-0.223 -0.377 -0.896 -0.494 -1.279 -0.706 c +-3.983 -2.198 -4.352 -2.882 -7.218 -8.204 c +-10.977 -15.407 l +-12.034 -17.649 -12.409 -19.973 -12.123 -22.51 c +-11.368 -29.204 -4.441 -35.04 3.701 -32.832 c +16.504 -28.451 l +19.64 -26.383 21.524 -23.889 22.614 -20.364 c +24.61 -13.908 21.812 -4.74 13.674 -0.575 c +6.26 3.219 0.029 0.049 0 0 c +0 6.945 l +15.462 6.945 27.999 -5.592 27.999 -21.054 c +27.999 -36.516 15.462 -49.053 0 -49.053 c +-15.462 -49.053 -27.999 -36.516 -27.999 -21.054 c +-27.999 -5.592 -15.462 6.945 0 6.945 c +0 0 l +f +Q +0.169 0.506 0.337 scn +q 1 0 0 1 183 318.1274 cm +0 0 m +-0.174 -0.267 -0.682 -0.3 -0.974 -0.428 c +-3.27 -1.438 -6.363 -4.313 -7.593 -6.58 c +-13.39 -17.262 -13 -20.653 -12.686 -23.377 c +-12.045 -28.943 -6.307 -36.332 3.975 -34.516 c +34.372 -29.149 23.201 -7.033 15.417 -1.844 c +7.621 3.352 0.038 0.059 0 0 c +0 6.145 l +15.462 6.145 27.999 -6.392 27.999 -21.854 c +27.999 -37.316 15.462 -49.853 0 -49.853 c +-15.462 -49.853 -27.999 -37.316 -27.999 -21.854 c +-27.999 -6.392 -15.462 6.145 0 6.145 c +0 0 l +f +Q +0.165 0.49 0.329 scn +q 1 0 0 1 183 318.8281 cm +0 0 m +-0.26 -0.393 -1.01 -0.429 -1.443 -0.612 c +-4.281 -1.816 -7.531 -4.969 -9.346 -8.278 c +-13.498 -15.848 -13.757 -21.085 -13.244 -24.146 c +-12.335 -29.558 -7.256 -38.113 6.018 -35.853 c +29.65 -31.827 27.567 -10.229 15.691 -2.188 c +7.725 3.206 0.039 0.058 0 0 c +0 5.444 l +15.462 5.444 27.999 -7.093 27.999 -22.555 c +27.999 -38.017 15.462 -50.554 0 -50.554 c +-15.462 -50.554 -27.999 -38.017 -27.999 -22.555 c +-27.999 -7.093 -15.462 5.444 0 5.444 c +0 0 l +f +Q +0.161 0.478 0.322 scn +q 1 0 0 1 183 319.4941 cm +0 0 m +-0.27 -0.397 -1.042 -0.411 -1.488 -0.586 c +-3.111 -1.225 -7.249 -3.37 -10.633 -9.471 c +-11.685 -11.368 -15.021 -18.084 -13.796 -24.877 c +-12.453 -32.323 -5.461 -39.362 6.714 -37.218 c +28.943 -33.304 28.97 -11.255 15.609 -2.301 c +7.856 2.895 0.038 0.056 0 0 c +0 4.778 l +15.462 4.778 27.999 -7.759 27.999 -23.221 c +27.999 -38.683 15.462 -51.22 0 -51.22 c +-15.462 -51.22 -27.999 -38.683 -27.999 -23.221 c +-27.999 -7.759 -15.462 4.778 0 4.778 c +0 0 l +f +Q +0.157 0.467 0.314 scn +q 1 0 0 1 183 320.105 cm +0 0 m +-0.285 -0.403 -1.085 -0.384 -1.55 -0.549 c +-2.14 -0.758 -7.426 -2.783 -11.14 -9.4 c +-12.536 -11.888 -15.643 -18.441 -14.343 -25.552 c +-13.349 -30.994 -7.597 -40.716 7.05 -38.567 c +28.064 -35.482 30.902 -13.127 16.17 -2.838 c +7.979 2.883 0.04 0.057 0 0 c +0 4.167 l +15.462 4.167 27.999 -8.37 27.999 -23.832 c +27.999 -39.293 15.462 -51.831 0 -51.831 c +-15.462 -51.831 -27.999 -39.293 -27.999 -23.832 c +-27.999 -8.37 -15.462 4.167 0 4.167 c +0 0 l +f +Q +0.153 0.455 0.306 scn +q 1 0 0 1 183 320.6777 cm +0 0 m +-0.294 -0.407 -1.113 -0.365 -1.59 -0.521 c +-3.037 -0.996 -8.057 -3.068 -11.887 -9.807 c +-12.95 -11.676 -16.306 -18.381 -14.886 -26.189 c +-13.692 -32.763 -6.813 -41.824 7.243 -39.849 c +28.687 -36.835 31.471 -13.847 16.374 -3.144 c +8.08 2.736 0.041 0.056 0 0 c +0 3.595 l +15.462 3.595 27.999 -8.942 27.999 -24.404 c +27.999 -39.866 15.462 -52.403 0 -52.403 c +-15.462 -52.403 -27.999 -39.866 -27.999 -24.404 c +-27.999 -8.942 -15.462 3.595 0 3.595 c +0 0 l +f +Q +0.149 0.443 0.294 scn +q 1 0 0 1 183 321.2148 cm +0 0 m +-0.327 -0.44 -1.224 -0.37 -1.749 -0.528 c +-5.52 -1.667 -9.765 -5.26 -12.073 -9.267 c +-15.394 -15.036 -16.522 -20.932 -15.426 -26.791 c +-13.856 -35.176 -5.227 -43.01 7.675 -41.012 c +29.382 -37.65 31.673 -13.956 16.092 -3.122 c +8.188 2.374 0.041 0.052 0 0 c +0 3.058 l +15.462 3.058 27.999 -9.479 27.999 -24.941 c +27.999 -40.403 15.462 -52.94 0 -52.94 c +-15.462 -52.94 -27.999 -40.403 -27.999 -24.941 c +-27.999 -9.479 -15.462 3.058 0 3.058 c +0 0 l +f +Q +0.145 0.431 0.286 scn +q 1 0 0 1 183 321.7295 cm +0 0 m +-0.315 -0.413 -1.169 -0.321 -1.671 -0.458 c +-5.628 -1.543 -10.186 -5.222 -12.509 -9.206 c +-13.794 -11.411 -17.706 -18.119 -15.958 -27.368 c +-14.312 -36.085 -5.369 -44.227 7.962 -42.147 c +29.823 -38.738 32.256 -15.066 16.713 -3.752 c +8.241 2.415 0.041 0.054 0 0 c +0 2.543 l +15.462 2.543 27.999 -9.994 27.999 -25.456 c +27.999 -40.918 15.462 -53.455 0 -53.455 c +-15.462 -53.455 -27.999 -40.918 -27.999 -25.456 c +-27.999 -9.994 -15.462 2.543 0 2.543 c +0 0 l +f +Q +0.141 0.42 0.278 scn +q 1 0 0 1 183 322.2021 cm +0 0 m +-0.326 -0.417 -1.197 -0.297 -1.71 -0.424 c +-5.005 -1.241 -10.021 -4.174 -13.317 -9.752 c +-16.642 -15.38 -17.708 -21.487 -16.484 -27.902 c +-14.771 -36.889 -5.522 -45.311 8.242 -43.22 c +29.813 -39.944 32.242 -15.421 16.845 -4.05 c +8.507 2.107 0.042 0.053 0 0 c +0 2.07 l +15.462 2.07 27.999 -10.467 27.999 -25.929 c +27.999 -41.391 15.462 -53.928 0 -53.928 c +-15.462 -53.928 -27.999 -41.391 -27.999 -25.929 c +-27.999 -10.467 -15.462 2.07 0 2.07 c +0 0 l +f +Q +0.137 0.408 0.271 scn +q 1 0 0 1 183 322.6421 cm +0 0 m +-0.165 -0.201 -0.596 -0.119 -0.851 -0.169 c +-6.63 -1.321 -11.086 -5.48 -13.33 -8.99 c +-17.823 -16.018 -17.96 -22.68 -17.283 -27.031 c +-15.529 -38.308 -5.353 -45.633 6.914 -44.447 c +29.053 -42.307 33.213 -18.564 18.588 -5.674 c +9.722 2.142 0.051 0.062 0 0 c +0 1.63 l +15.462 1.63 27.999 -10.907 27.999 -26.369 c +27.999 -41.831 15.462 -54.368 0 -54.368 c +-15.462 -54.368 -27.999 -41.831 -27.999 -26.369 c +-27.999 -10.907 -15.462 1.63 0 1.63 c +0 0 l +f +Q +0.133 0.396 0.263 scn +q 1 0 0 1 183 323.0532 cm +0 0 m +-0.345 -0.419 -1.243 -0.245 -1.775 -0.35 c +-5.333 -1.052 -10.598 -4.013 -13.752 -8.857 c +-18.474 -16.108 -18.606 -22.979 -17.885 -27.465 c +-16.272 -37.503 -7.101 -46.92 7.31 -45.499 c +29.575 -43.3 33.52 -19.116 18.666 -5.999 c +9.679 1.938 0.05 0.061 0 0 c +0 1.219 l +15.462 1.219 27.999 -11.318 27.999 -26.78 c +27.999 -42.242 15.462 -54.779 0 -54.779 c +-15.462 -54.779 -27.999 -42.242 -27.999 -26.78 c +-27.999 -11.318 -15.462 1.219 0 1.219 c +0 0 l +f +Q +0.129 0.384 0.255 scn +q 1 0 0 1 183 323.4082 cm +0 0 m +-0.359 -0.424 -1.279 -0.213 -1.827 -0.305 c +-2.571 -0.429 -9.239 -1.713 -14.035 -8.521 c +-19.337 -16.049 -19.04 -23.602 -18.666 -26.5 c +-16.791 -41.035 -4.557 -47.119 6.015 -46.62 c +29.237 -45.525 34.039 -19.966 18.705 -6.311 c +9.693 1.714 0.05 0.059 0 0 c +0 0.864 l +15.462 0.864 27.999 -11.673 27.999 -27.135 c +27.999 -42.597 15.462 -55.134 0 -55.134 c +-15.462 -55.134 -27.999 -42.597 -27.999 -27.135 c +-27.999 -11.673 -15.462 0.864 0 0.864 c +0 0 l +f +Q +0.125 0.373 0.247 scn +q 1 0 0 1 183 323.7339 cm +0 0 m +-0.366 -0.422 -1.29 -0.183 -1.842 -0.262 c +-5.616 -0.798 -11.203 -3.577 -14.553 -8.414 c +-20.526 -17.037 -19.484 -25.015 -19.142 -27.636 c +-17.325 -41.545 -4.721 -48.296 6.215 -47.587 c +22.825 -46.511 31.838 -32.41 25.896 -16.796 c +27.251 -20.083 27.999 -23.685 27.999 -27.46 c +27.999 -42.922 15.462 -55.459 0 -55.459 c +-15.462 -55.459 -27.999 -42.922 -27.999 -27.46 c +-27.999 -11.999 -15.462 0.539 0 0.539 c +0 0 l +f +Q +0.122 0.361 0.239 scn +q 1 0 0 1 183 323.9893 cm +0 0 m +-0.38 -0.425 -1.322 -0.147 -1.889 -0.211 c +-3.74 -0.417 -10.183 -1.633 -15.334 -8.604 c +-20.12 -15.081 -20.496 -23.225 -19.964 -27.016 c +-18.071 -40.5 -7.311 -49.139 6.811 -48.512 c +13.567 -48.212 30.458 -42.954 27.513 -22.495 c +27.832 -24.187 27.999 -25.932 27.999 -27.716 c +27.999 -43.178 15.462 -55.715 0 -55.715 c +-15.462 -55.715 -27.999 -43.178 -27.999 -27.716 c +-27.999 -12.254 -15.462 0.283 0 0.283 c +0 0 l +f +Q +0.118 0.349 0.231 scn +q 1 0 0 1 183 324.1802 cm +0 0 m +-0.389 -0.421 -1.333 -0.109 -1.905 -0.156 c +-5.862 -0.48 -11.762 -2.986 -15.367 -7.721 c +-21.456 -15.72 -21.121 -23.999 -20.694 -27.186 c +-18.877 -40.767 -7.134 -50.353 6.621 -49.484 c +16.365 -48.869 27.809 -42.685 27.992 -27.284 c +27.997 -27.491 27.999 -27.699 27.999 -27.907 c +27.999 -43.369 15.462 -55.906 0 -55.906 c +-15.462 -55.906 -27.999 -43.369 -27.999 -27.907 c +-27.999 -12.445 -15.462 0.092 0 0.092 c +0 0 l +f +Q +0.114 0.337 0.224 scn +q 1 0 0 1 183 324.269 cm +0 0 m +-0.403 -0.423 -1.362 -0.067 -1.945 -0.096 c +-5.653 -0.278 -11.171 -1.795 -16.407 -7.987 c +-19.42 -11.549 -22.258 -18.906 -21.583 -25.522 c +-19.025 -50.59 4.157 -50.418 5.143 -50.399 c +17.394 -50.156 25.847 -43.167 27.756 -31.704 c +25.941 -45.414 14.205 -55.995 0 -55.995 c +-15.462 -55.995 -27.999 -43.458 -27.999 -27.996 c +-27.999 -12.534 -15.462 0.003 0 0.003 c +0 0 l +f +Q +0.11 0.325 0.216 scn +q 1 0 0 1 178.769 323.9521 cm +0 0 m +-22.529 -4.551 -23.528 -35.026 -6.329 -46.258 c +6.848 -54.862 25.641 -52.169 31.069 -35.683 c +27.625 -47.245 16.912 -55.678 4.231 -55.678 c +-11.231 -55.678 -23.768 -43.141 -23.768 -27.679 c +-23.768 -13.386 -13.055 -1.592 0.778 0.109 c +0.544 0.077 0.232 0.04 0 0 c +f +Q +0.106 0.314 0.208 scn +q 1 0 0 1 170.9761 321.4922 cm +0 0 m +-16.563 -9.063 -17.344 -40.194 9.316 -48.713 c +16.64 -51.054 30.629 -50.189 36.987 -37.91 c +32.359 -46.995 22.917 -53.218 12.024 -53.218 c +-3.438 -53.218 -15.975 -40.681 -15.975 -25.219 c +-15.975 -12.683 -7.734 -2.069 3.625 1.499 c +3.1 1.309 2.399 1.057 1.873 0.867 c +1.31 0.61 0.543 0.297 0 0 c +f +Q +0.188 0.565 0.376 scn +q 1 0 0 1 198.9263 298.0972 cm +0 0 m +-1.706 2.422 -2.871 5.192 -4.806 7.466 c +-5.58 8.375 -6.333 9.14 -7.046 9.74 c +-7.103 9.788 -12.7 14.579 -12.706 14.929 c +-12.708 15.035 -10.925 16.753 -10.74 16.825 c +-10.058 17.086 -7.544 17.231 -6.875 17.166 c +-5.111 16.992 -2.438 16.241 0.275 13.649 c +3.79 10.293 4.269 6.382 4.332 5.263 c +4.608 0.362 1.816 -1.553 1.125 -1.426 c +0.589 -1.328 0.314 -0.445 0 0 c +f +Q +0.192 0.576 0.384 scn +q 1 0 0 1 199.0605 300.5908 cm +0 0 m +-1.97 2.883 -3.055 4.471 -4.87 6.595 c +-5.072 6.832 -5.375 7.116 -5.591 7.34 c +-5.844 7.601 -6.16 7.969 -6.419 8.224 c +-6.913 8.711 -7.551 9.382 -8.074 9.839 c +-9.724 11.281 -9.908 11.547 -9.911 11.595 c +-9.914 11.655 -8.389 13.369 -8.295 13.411 c +-7.711 13.674 -6.801 13.346 -6.164 13.276 c +-2.962 12.927 -1.156 11.212 -0.476 10.566 c +2.531 7.709 2.783 5.143 2.904 3.909 c +2.938 3.565 2.929 0.875 2.709 0.41 c +2.675 0.337 0.707 -0.875 0.645 -0.861 c +0.33 -0.793 0.182 -0.267 0 0 c +f +Q +0.196 0.588 0.392 scn +q 1 0 0 1 198.1455 304.1201 cm +0 0 m +-0.737 0.235 -1.076 1.45 -1.576 2.04 c +-3.148 3.894 -3.148 3.894 -3.897 4.678 c +-4.212 5.008 -4.84 5.354 -4.922 5.803 c +-4.014 7.981 l +-3.953 8.007 -1.427 7.15 0.33 5.083 c +1.631 3.552 2.397 0.755 2.281 0.574 c +1.906 -0.01 0.699 -0.197 0.037 0.011 c +0.026 0.014 0.011 -0.003 0 0 c +f +Q +0.125 0.373 0.247 scn +q 1 0 0 1 195.0493 321.5449 cm +0 0 m +-5.275 2.417 -9.403 2.407 -12.049 2.189 c +-12.049 2.728 l +-6.604 2.728 -1.522 1.173 2.777 -1.517 c +2.232 -1.205 1.506 -0.789 0.961 -0.477 c +0.673 -0.334 0.292 -0.134 0 0 c +f +Q +0.122 0.361 0.239 scn +q 1 0 0 1 191.2632 323.0293 cm +0 0 m +-3.078 0.794 -4.478 1.111 -8.263 0.96 c +-8.263 1.243 l +-4.866 1.243 -1.61 0.638 1.402 -0.47 c +0.981 -0.329 0.425 -0.126 0 0 c +f +Q +0.118 0.349 0.231 scn +q 1 0 0 1 187.231 323.9521 cm +0 0 m +-2.557 0.263 -2.657 0.273 -4.231 0.228 c +-4.231 0.32 l +-2.431 0.32 -0.671 0.15 1.035 -0.174 c +0.724 -0.122 0.312 -0.042 0 0 c +f +Q +0.114 0.337 0.224 scn +q 1 0 0 1 183 324.269 cm +0 0 m +0.335 0.003 0.669 -0.002 1.001 -0.014 c +0.701 -0.01 0.211 -0.214 0 0 c +f +Q + endstream endobj 1370 0 obj <> endobj 1308 0 obj <> endobj 1309 0 obj <> endobj 1310 0 obj <> endobj 1311 0 obj <> endobj 1312 0 obj <> endobj 1313 0 obj <> endobj 1381 0 obj [/View/Design] endobj 1382 0 obj <>>> endobj 1379 0 obj [/View/Design] endobj 1380 0 obj <>>> endobj 1377 0 obj [/View/Design] endobj 1378 0 obj <>>> endobj 1375 0 obj [/View/Design] endobj 1376 0 obj <>>> endobj 1373 0 obj [/View/Design] endobj 1374 0 obj <>>> endobj 1371 0 obj [/View/Design] endobj 1372 0 obj <>>> endobj 1307 0 obj <> endobj 1383 0 obj <> endobj 1384 0 obj <>stream +H|SmPW%F*Me4#"b(8 +6c3~u &jђv !:T,~h2U+hԢ2ѱTn|i7Oϙ{{}oñ YiyKs9Ne4,ۤU^5DW`>L̨u ( 1F,@ܞx{ AP,-F+Mp 6V +a߱G[] qK,#9~i>BH- 8\ +=Uxs2<B $"H!dj>m5+ +T)U'7gd ӄ9aH>e ԋ;>9wh+SsB_oQ0v[i endstream endobj 1316 0 obj <> endobj 1385 0 obj <> endobj 1386 0 obj <>stream +%!PS-Adobe-3.0 %%Creator: Adobe Illustrator(R) 13.0 %%AI8_CreatorVersion: 15.0.0 %%For: (donna) () %%Title: (type_tags.ai) %%CreationDate: 4/4/11 7:44 PM %%Canvassize: 16383 %%BoundingBox: -227 -63 143 234 %%HiResBoundingBox: -226.5 -62.001 142.5898 233.748 %%DocumentProcessColors: Cyan Magenta Yellow Black %AI5_FileFormat 9.0 %AI12_BuildNumber: 399 %AI3_ColorUsage: Color %AI7_ImageSettings: 0 %%RGBProcessColor: 0 0 0 ([Registration]) %AI3_TemplateBox: 40.5 29.5 40.5 29.5 %AI3_TileBox: -239.5552 -349.6377 319.4453 433.3623 %AI3_DocumentPreview: None %AI5_ArtSize: 14400 14400 %AI5_RulerUnits: 6 %AI9_ColorModel: 1 %AI5_ArtFlags: 0 0 0 1 0 0 1 0 0 %AI5_TargetResolution: 800 %AI5_NumLayers: 6 %AI9_OpenToView: -239.25 83.5 4 1355 732 18 0 0 43 154 0 0 1 1 1 0 1 %AI5_OpenViewLayers: 777777 %%PageOrigin:-399 227 %AI7_GridSettings: 72 8 72 8 1 0 0.8 0.8 0.8 0.9 0.9 0.9 %AI9_Flatten: 1 %AI12_CMSettings: 00.MS %%EndComments endstream endobj 1387 0 obj <>stream +%%BoundingBox: -227 -63 143 234 %%HiResBoundingBox: -226.5 -62.001 142.5898 233.748 %AI7_Thumbnail: 128 104 8 %%BeginData: 7336 Hex Bytes %0000330000660000990000CC0033000033330033660033990033CC0033FF %0066000066330066660066990066CC0066FF009900009933009966009999 %0099CC0099FF00CC0000CC3300CC6600CC9900CCCC00CCFF00FF3300FF66 %00FF9900FFCC3300003300333300663300993300CC3300FF333300333333 %3333663333993333CC3333FF3366003366333366663366993366CC3366FF %3399003399333399663399993399CC3399FF33CC0033CC3333CC6633CC99 %33CCCC33CCFF33FF0033FF3333FF6633FF9933FFCC33FFFF660000660033 %6600666600996600CC6600FF6633006633336633666633996633CC6633FF %6666006666336666666666996666CC6666FF669900669933669966669999 %6699CC6699FF66CC0066CC3366CC6666CC9966CCCC66CCFF66FF0066FF33 %66FF6666FF9966FFCC66FFFF9900009900339900669900999900CC9900FF %9933009933339933669933999933CC9933FF996600996633996666996699 %9966CC9966FF9999009999339999669999999999CC9999FF99CC0099CC33 %99CC6699CC9999CCCC99CCFF99FF0099FF3399FF6699FF9999FFCC99FFFF %CC0000CC0033CC0066CC0099CC00CCCC00FFCC3300CC3333CC3366CC3399 %CC33CCCC33FFCC6600CC6633CC6666CC6699CC66CCCC66FFCC9900CC9933 %CC9966CC9999CC99CCCC99FFCCCC00CCCC33CCCC66CCCC99CCCCCCCCCCFF %CCFF00CCFF33CCFF66CCFF99CCFFCCCCFFFFFF0033FF0066FF0099FF00CC %FF3300FF3333FF3366FF3399FF33CCFF33FFFF6600FF6633FF6666FF6699 %FF66CCFF66FFFF9900FF9933FF9966FF9999FF99CCFF99FFFFCC00FFCC33 %FFCC66FFCC99FFCCCCFFCCFFFFFF33FFFF66FFFF99FFFFCC110000001100 %000011111111220000002200000022222222440000004400000044444444 %550000005500000055555555770000007700000077777777880000008800 %000088888888AA000000AA000000AAAAAAAABB000000BB000000BBBBBBBB %DD000000DD000000DDDDDDDDEE000000EE000000EEEEEEEE0000000000FF %00FF0000FFFFFF0000FF00FFFFFF00FFFFFF %524C45FD0AFFA8282F53FD7CFF06A8FF5359FD7AFFA92EA8FF537EFD7BFF %53282E28A8FD27FF2E2828A8FD51FF847EAFFD27FF5953AFA928FD7BFF59 %2EFFA82FA8FD7AFF847E7E2E59FD7CFFA87D7DFDFCFFFDFCFFFD31FFA9FF %FFFFA9FFFFFFA9FD76FFA9FFA9AFA9FFA9AFA9FFA9AFA9FD1AFFA87E537E %A8FD2BFFA8A9A8AFA8FD23FFA9A9A8A9A8A9A8A9A8A9A8A9A8AFFD17FFA9 %7E282F282F282F2F7EA8FD26FFA8532F282F282F53A8FD22FFA9AFA9AFA9 %AFA9AFA9AFA9AFA9FD16FF7E2E0128062F292F072F062F7EFD23FFA85906 %06062F282F0629062F84FD1FFFA8A9A8A9A8A9A8A9A8A9A8A984FD16FF84 %28062F282F292F29302F30292FA8FD21FFA8530629282F2F2F29302F2F29 %2F84FD1FFFA9AFA9FFA9AFA9FFA9AFA9AFA9FD09FFA8A87DFF7DFFA8FD04 %FFA8280529282907532F53292F292F072FA8FD1FFFA92E00280629062F29 %2F292F292F072FA8FD1DFFA8A9A8A9A8A9A8A9A8A9A8A9A8FD04FFA87DFF %7DFFA852275227277D27A8FFFFFF530628062F5AA9FD04FF7E302F300753 %FD1FFF7E05282FA984A82F7EA8A9845A2F302953FD1EFFA8AFA9AFA9AFA9 %AFA9AFA9A9A9FFFFFF52FF5227527D7D52527D527DF8A8FFFFA800280628 %7DFD07FF7E2F292F06A8FD1DFFA905280653FFFFFFA9FD05FF7E292F0684 %FD1CFFA8A9A8A9A8A9A8A9A8A9A8A9A8AFFFFFA8527D7D27A8FD05527DFF %7DA8FFFF5328282959FD04FFA9FD04FF7E302F2F59FD1DFF7D28282853FD %0AFF5A292F53FD1DFFA9FFA9AFA9FFA9AFA9FFA9AFA9FD09FFA8FFA8FD06 %FFA82E002806A9FFFFFF7E067EFD04FF292F292FA8FD1CFF5300280653FD %04FF595AA9FFFFFF7E2F292FA8FD1BFFA9A9A8A9A8A9A8A9A8A9A8A9A8AF %FD13FF2828062FFD04FF2F2F29FD04FF542F2F28FD1DFF2E28062953FFFF %FFA82F067EFFFFFFA92F2F28FD1DFFA9AFA9AFA9AFA9AFA9AFA9AFA9FD12 %FFA828052828FFFFFFA82F072FA8FFFFFF2F2F2829A8FD1BFF8428052806 %59FFFFFF8406292FFFFFFF842F2929A8FD1BFFA8A9A8A9A8A9A8A9A8A9A8 %A984FD14FF2828062FFD04FF2F292FFD04FF54292F28FD1DFF5328062953 %FFFFFFA82F075AFFFFFFAF292F28FD1DFFA9FFA9FFA9FFA9FFA9FFA9FD15 %FF53002806A8FFFFFFA82984FFFFFFA8062F0653A8FD1CFF7D00280659FD %04FF532FA8FFFFFF7E29062FA8FD1BFFFD04532E5353532E535353287EFD %13FF7D2828282FFD09FF532F28297DFD1DFF7E28062853FD0AFF53062953 %FD1CFF5300280006002800060028000053FD13FFA92828050653FD07FF53 %29282828FD1FFF28280053FD09FF7E062806A8FD1CFF2E05002800060028 %000600280053FD14FFA8052806282E7E84AFA87E292F282F06A8FD1FFFA8 %062853FFFFFFA8A9FFFFA87E282F067DFD1DFF5300280628052806280528 %060653FD15FF7D0028052806280628062806280059FD21FF7D0053FFFFFF %7E062F28280628012EA8FD1DFF2E05002800060028000600280053FD04FF %A8FFFFFFA8FFFFA8A8FFA8FD07FF7E06280528282806282828057EFD23FF %5952FFFFFF7E28062828280053A8FD1EFF53002805280628052806280506 %53FFFFFF7D52FF7D52A852A852A8527DFF7D7DFD04FFA9A8282800060006 %002828A8FD24FFA8A8A9FFFF8400060006287DA8FD1FFF28060006002800 %06002800060059FFFFFFA852A8277DFF7D527DA8527D7D27A8FD07FFA87E %597D537EA8FD2BFFA8282E7D7DFD22FF5300280528062805280628050653 %FFFFFFA87DA852527D52FF527DA852527D27FD3AFFA8FD25FF2E06000600 %280006002800060059FD04FF7DFFFFA8527D7D7DA87D7DFF7DA8FD60FF53 %00280628052806280528060653FD72FF2E05002800060028000600280053 %FD72FF5300280628052806280528060653FD72FF28000006000500060005 %00060053FD72FF7E2E532E5352532E5352532E537DFDFCFFFDFCFFFDFCFF %FDFCFFFDFCFFFDFCFFFDFCFFFDFCFFFDFCFFFDFCFFFD9AFFCACAA8CACACA %A8CACACAA8CAA8FD74FFFD0DCAFD18FFA8A859595883A8FD53FFCACAA8CA %CACAA8CACACAA8CACACAFD17FF7D2E0B2E0B2E0B3434A8FD52FFFD0DCAFD %15FFA82D0B052E0B340B340B340B59A8FD29FFA8845959535959A8FD1EFF %A8CAA8CAA8CAA8CAA8CAA8CAA1FD05FFA8FFA8FD0DFFAE2D2D2D342E3434 %34123434340B59FD28FF7D2F0C2F2F352F352F5AA8FD1DFFCACACACBCACA %CACBFD05CAFFFFFFFD047DFD0DFF2D0B0B2E0B0C0BFD07340B59FD25FFA9 %2E2E0C2F0C352F352F352F357DFD1BFFA8CACACAA8CACACAA8CACACAA8FD %04FFFD04527D7D7D275252A85252A8FFFF590B0B2E0B345FAFFD04FFFD04 %340BA8FD24FF2E2E2E35358484FF5A36355A35357EFD1BFFFD0DCAFFFFFF %52A8522752FF525227A8A82752FFFFA82D052D0B2E84FD06FF340B34120C %2EFD23FF522E2E2F0CA9FFFFFF5A2F3635362F3584FD19FFCACAA8CAA8CA %A8CAA8CAA8CAA8CAFFFFFFA8A8FFA8A87DFF7DA852A87D7DFFFF84052E0B %2E83FD05FFAFAFFD04340C34AEFD21FF842E2E2F2F5AA9FFFFFF5A5A355A %365A2F5AFD1AFFCACBCACACACBCACACACBCACACAFD12FF5805052D2DFD04 %FF830B342E340C3434340BA8FD21FF59052E2E35A8FD06FF5A2F5A35352E %A8FD18FFCACAA8CACACAA8CACACAA8CACACAFD12FF7D052D0B59FFFFFFAF %0C340BFD073459FD21FF2E2E2E3535FD07FF5A35355A353559FD19FFFD0D %CAFD12FF5205050B34FFFFFF84340B340B340B340B340B59FD20FFA82E06 %2E0C3584FD04FFAF845A2F352F360D59FD18FFCAFFCACACAFFCACACAFFCA %CACAFD13FF7D052D0B59FD04FF34340BFD05342E2E83FD20FFA9282E2E35 %2F35A8FFFFFF5935355A355A353559FD18FFA176A176A176A176A176A176 %9AA1FD12FF7D2D050B0BFD05FF595F59340C340B2E0BAFFD20FF842E062E %2E352FA9FFFFFF5A2F352F352F352E59FD18FF4BFD042044202020442020 %2076FD13FF2D2D0B0B59FD07FF2E342E340B59FD22FF062E2E2F2F36A8FF %FFFF59352F5A35362F2F59FD18FF76204B444B204B444B204B444476FD13 %FF7D042D050B59FD06FF340B2E0B0B7DFD21FFA82E052E0C2F2EAFFFFFFF %5A0D352F352F2F0684FD18FF4B44204B2044204B2044204B2076FD13FFAF %58052D052D2E8484A884832E342D0B58FD23FF7DFD042E35A8FFFFFFAFAF %35352F352E59AFFD18FF76204B444B444B444B444B444476FD07FFA8FD0C %FF842E042D052D050B050B052D050B2EFD24FFAF282E062E0684FD05FF59 %0C352E2E59FD19FF4B44204B2044204B2044204B2076FFFFFF7D7DFF52A8 %7D52FF7D52A852FF527DFFFFFFA858050B052D0B2D052E050B52FD26FFA8 %05FD042EAFFD04FF592F2E2E53FD1AFF76204B204B444B204B444B204476 %FFFFFF52A87D52A8FF52A8277DA87DA85252FD04FFA87D2D0B0405040505 %2E7DFD28FF7D052E062E06597D84592F060C2EA9FD1AFF4B442044204B20 %44204B20442076FFFFFFA8275252527D52A85227FF52A85227FD07FF847D %597D59AFFD2BFFA82E2E062E062E062E062E59FD1CFF76204B444B444B44 %4B444B444476FFFFFF7DA8FF7DA8FF7DFFFFA8FFA8FFFFA8FD3AFF592E05 %2E062E065984FD1DFF4B442044204B2044204B20442076FD4FFFA8A8A8FD %20FF76204B444B204B444B204B444476FD72FF4B44204B2044204B204420 %4B2076FD72FF76204B444B204B444B204B444476FD72FF5244204B444B20 %4B444B204B2076FDFCFFFDFCFFFDFCFFFDFCFFFDFCFFFD2FFFA82E0BA8FD %7BFFA82D58830BA8FD2EFFAFFD4CFF2DFF830B59FD2CFFAF353584FD4BFF %2E2E5905A8FD2CFF067E840D84FD4AFFA87D59A8FD2DFF2FA8AF0C84FD7B %FF0C59840C84FD7BFFAF3559A8FD7EFFAFFD42FFFF %%EndData endstream endobj 1388 0 obj <>stream +%AI12_CompressedDataxu?dF-QqژUYQڊ@u#…;~< lJ#H6̈ e/~r鷏?^ B^͛O>E?Տ/J}U}/O~zQ>y/kӻw?я_.o{Օo>֧|˟~Q,Ouӧw^zq.~Ne.i׿zpR_6R_:Xc^l^׹}z㻏|Ç7O?.~u}>yOo^CZҟWuOoާW«-_޿~XwۇëO߼OoXtxR?o>ԏU?[o_m]Ǐk~__oTG_iǶ_?MePwx:Յ2^dmS9^2]uN8:m>?o=׾z }./X/cy1nlӛyZ*?z(~x:.|cu{w~I[>ɮY.i(Ete݊$وlmo/k/_Oj^l_jݷ~{?F67?O`x/޽{uVzCաk{.?}st\㧧<^~xQWӗ7/_~姏_ob{Ƀnu|/klK6k.mǴc6^:untd}>>k|~~j/=]~׾z|||Y减ȷRo?ye]ûW}S{=4^]|}Ƿ>C݋wO_=~uyuwo>~ݧ"˷޿nGۇ/?/x?|VYGo +W8~Rۧzckٞ~|<|θ]>y~Ƿ事u4ljm׿-$vZݧOIHϭ- |M}W2@qۇQ_wO>!rJC.F]|;5??gً~a/E+~7{T=I_#>|/r__R;>]ӻzzS?\|~},]ӗPO 1}{IN?Y3ٿ|+U66F0/kcleWMm}UHG}Ah xΗƩE +޼zW7^}zY1nn~?.뇋S9J9.ʱ]>|SWg~y-c}$#| S>Ưb?-/_?=O}.y/uwxg;; ߩvR_PsC=kD3;sR𶍧䗂oC~doT޿Q>o$ w<6?^_ElpLfO~].ߥZ_O?ݣɖ\>}~U#4؟24R pp= }J)c\zk/G*>n8ek726U}\x? SMyZ" jnnyf:D|vrZeZb׼zYnuX::뺭zWzޮwz [mm֚u۱]mקf~=ƽ~}ޗ}k~W~wq/{(xL\Q؎ҚWq}m}qUwUEGW\ҕu}'Ս?nq{y\(;Z_ZoJJڣUp={{[skޕݵw]rtr9E7;a)7gUw]ou x;3^{{ɻ޻=u?{St}=.Pz5Ӿ/Sw񾔞}iQ9דvs yr(ӫ֯g=}[ۓvtˁm{1L?A.}}Jݺ[;ܻx;?q%=xLi<ڈ1sbX~aƢD|a#OutIVNuzfKFWcj騃Aաr21j^w[Vڹ{=o+et\oӯo/oo+:|J~r?]WVumu /w*)p{{w{{{s{]~Hnvn +^3L7c=w׷73]OϷO9:^POw Lrԯ/ԯ6/X:`_~ C=."+S{=ֺz\vWz]cd纳ǺˇJ㻺oj'\ծkyN/޺}v]{^\j_NG2uV\׾>joߗʜ e +SeU%{+k3s9c0ױt[Gu=:J_חpQ{uڇta=q(yoQ{ћ?刴ެ}9_՗kr"Ee( K8՗iSV<\5i˘dLdLsdLr#?"';Nwr›G$&="C#kϜ;\(gC9Jt&ڕ?C_Lnk!M:7O/>잺|&W!|Շ IIgQOW#b]S?2nq{\㱝?NCwBhQ͒$^`?>WA#QH$q98BcXcuWhnNΙr48glXg(:Nou.ڝth#Nڃ>UTώFݩ "g9ffL.9F8ךzޑ3{C\($I(ROEr2ӑtI9v$g'9?JQ{9[%=m]kvyKN_$ء8liJRou8>]顬u9oǨ| +aý,wXr򧽋6-ҭZ[<45C%8r\kqe@Kv,ݱǒQKzh@b鏄%'ς,Lr)E)Zwrӽx\Ic^,OV" o:{cR= O׵U[!H9V:!v +XN$S$RGPK$Ow^{V;}k+})}g䵾~GYj߿w\+սcyT X&e2E]KZb5ͯwWn9Q;_=n4~~cst?=J&Ơ?ucϏPΟy~O?}~DBʩĹSK\l4;KqaNmƘ^=ŅY;fu$ +U4;ɹH.&'fZ$3fT3?kG,n-l2pg]E|mk1g+S<?)ek*jRVyzZ>ʼVt-n}H߫(5y}1M)o޽{xk}vwɟ={}(q}|[[bgS7W_EDL%SDУ+*UIqݜP9aK|q-G4Ae6'=JW?B혴iSz6jg|^St[@m Q.tgk^<6KS +feF?$ՎT9XfϞƳCmЍOüI:7`yb`.vXe]]kO!Uz G?:~[&v4"qBi"Iɞf|~A=d#Ͻ9g&湉mv;9I%)D?z)M56pffqN~ڼh6'E) tLb*u$,AԷbMʋYd!K }š=mS:/m/8k(}0:u[.ӴR)Y4ygiCnUi0.nl)bM6QMw4ʢE~Qk3ֶw[/3O*jiOVc=Vkfd55귐 > z~V@ ү3.Eu=0KN+O}]S }f}\tMEp]C ]ߦ~I^S9RöatȌ2i}Kq9A!0ױoE[UXz~kv%uUcϾ.C ).1F$ڞC˺u\, 4HWC͉;ɲ> rzt.IWH'4I{Zz-rXA:yɸ4i{H[j۰ғNsIdԓN<4N+t=:tuYn#]mHAv[ +-C#]mѓ.[qk[VLh_t]Nߏtu٤wtKOl? ݲtJO:Ytږvv/Q'WcNУN9 A4G,+SCfkڣNM u^Ɔ:zP';r?KC}QWҡSp u)5@ңNVj ufAekޣN uzL uʢC8괽7I{?&&ttE˺]:it˦uu=PWۣE.)@:i I{ݏt +rڶtQr.({iQrq," 7ۦ#{kރqJW-SQOTY: WUW5;eGMq^wj{楃\]xm7@q\;eS!WܾfA.ǽQz;ɲ裔v|L9YVä,?UoRNӴueōJ 4WxPNr 7R:2 =r6v)de('{GeW@=$,lӁ'acy I{Z瀜 e:rb[v9:ifbhZ3VFh 7WYW}EJ\}e]*휸y*rZ]*ϟ'%JN\u}⪋R*휸j,qe9qvN\哝's;-%,q>q.qջGk]yxe]:}Zqqw̉Tĵ%2slIW]W=nR,WaMN\Wi.qg,ʉs*uU9qĵ.WiU89y6'W]WiU8&%q.qg,ˉs:WYWq]N\}ʲmy[U9quUU&%GWٓ9q͉kUFRJ\ǹO\u%:8S*휸ʀ>O\ǩO\ HW]WLK\C}⪰JS*%eyʲ+R*ĵ.WiĵWYWyO\'zUg.Ks>qE9qvJ\uu:풷- p vI\C.E:7I\#PIt$q@ݦi +m..q$o5l)6[K&lTH6'B:itMA:9G;tF ݦy5om5o;IHIK:lA].xtu&%P'ð;$q@ݡP7<\.s.s32k̵ 32tk)]*UU)s-y*rZ]*Uəkچ<\K3W}M\}겔J;g<\eGUS:;'I~5:*y.h9tZ.? ݮk6ZȦ5mtUs +䨙횵tf-5ktf3PI7h:" qжmA2aAR׽它{v䮃a sWߺK_oNL&/m2y_/~㋿/'&M?e}Gab[c3Y blqslEދ6벧5ל$C}Cp=%-ׯG˴)_-n%}`V^^,TR`_nlu,_Md_ҮU/)xL_ieU4{}Ob ELwNҔaӂmsۊ*mwʰ5Nj-Զe4% +֞d~b߭^\L|r`[v&2B#>NÂY -`[on2`[bLKM,ؖ,-el˂ml#g#vhXp[N^iZ],hc m*m9ȤGi6gU ݮX-+.lvilBK" ˧maYm Ѷeh[Nj-6oZmsB$ѶsѶ,S:ۨcm[p[q" 91pٻh[jmzEh[-h.=vmדBmYڰp[6Qȣ2(۫;m<Ѷ:kDm˲(V#vDܦ}X*Ѷ`hp[I(HMmiױGE_MFYQQ<=FqbG:%0*6u+rާQfQ9쏹TT6ʲak,}cRѾ7 +KT0-LeciebLk0W0{9}kS=`*I4c{p}waQå@w^A¦@`CkToR=z pؑT@F\:}R,p@c6NdMFUTjkkQ}u =,Q]%EշYj北ƥ)kPTu0η1O1t0*n^zRh(ZDJL=Dwm *m?Q ܁1wa`@8+ǩ#,ӫJ8#LƸĘ6-_^o*T[;&fgś{㛶ohXoxMz](M} +6e I~t"&_pJMwp (Dķ]=&kDՑh{PѶ|eN&j@e#&7cFnpn$ ږI>nuQ=lA7xS{&d4tӶJ7QG;udZJMi:oҖL֙z6oߙoޖXSm۲&园sd5GM]aƼ^7McMFȱD.sz/t#o:QM,kNDi9:_XpHNۆ .3Qi{ )ƽ#Tp©;LSr[2dN8i +T,kG8Ѻ7M ͩqbBv ɲbBNn)xn퀜HΞ +9k} +^wʩȽTP)P.0'驘}65Ή;Y@VVkN:ut- ҩʻDv)ցN|otbA'2%Ή[pNd}XsN)D58'apN*Dm1@7$^qͤF:- Љ= KN4TtlT6tje[)'@:}5 +:m{Eӷ ؑNJ:i#ŰaNG޴u9NUm:Љ}cNr,:=&, 2ˑtڶ*EؑN[ -HT~F:9nsV'SAӂ ,+krB:>['[V=n}*lUk ttUkn9r[Nګ+A:0>TS.HvHC:1aOWՠeJ:n%@gZF{[:0#a#N.: t2bsUtC#>Nft.-]{5fzkWm*x3Nk+-aVDHWCKXgLSi.KO:Oǖ6 ]}ʪm)N^cɢ6sKYUn9Ww6U;yd KX9wSشlYƌ4ҧ2'+(,GY'W2tU t^nUCY/rI'n!sŲtǜ^{^ǜ\PۗؗԂtÜ0'W0'E s9iǜl׮V.r2?n]{ ^}rzWӭQnSN=rź2(WG`F9).rrymp9x% RNTkPn#Rʉ' ǜޓUNx^;s[`n>ɜ!yN8W:[8snN9望HA&*,ι49NuppNT眸*sҞ*3| 6sҶӷn/ͱƒ)R*}.A9ُ$vgQ H10o <+r6O9 +Y,_nv0'`ٷ9i[0W\ @9_98o+ u^š97ä9iknSx"5Y%گ^F:QeX2WSŧKv[]tj[6@'˔:5Χv)W>Z.@'߸6:@'r| +ky Љ+2D,Uҍ3zn2;҉7Љ|-q{K~9GA'7DGloCtb{s\)@'2'ݨmc_eK9լs"n%>)0'W dZZpI{XUڲ Kqt޺U-q1t}*;r>ZiRYDNF6uTt}*s_ As2[ )8`[ުm#tpN9muŜ,}>'?+x\YacNښw5#TԉYE礁:9&זn2svn` +fҮA; :xF +{Z޺ɴ[:eem9cťsS'Gf +ɼyl";-o:=>;6 NKC8/BxVanI=u5=Oƺ] 3Nj|V05f֩>Vm%:ٽaY'55tfq:yk˂ u]ުr>Ozu;r[*9>'?8m=H%(脯[iJd[:^'FS]ϛϚ r)VQ Y]pT@\WE@'(<tu!SzM]pl?s.|A Ȟr%$Nu%& rL q2aVޅ B eװA2A^ `/t̽6$tȇtG_A}MI 88$y yl!@!dօrY˺B*B*SYAdA(١:%;=Z&ҕ5T*v/bBHT\*v.DŎmPs#+ى<ٹkv.D΅ٹ\Tօ\<B"s#$<7B20B~!@B2B* n>H`6B `H!aB`X!gNe(BY)ycĵ=.<'Ȗ-攫S"ϖu Dڇ NiJ*,5GfEeZLx[C;a9?*lsmeZʶL]V"?߷ƧRD&^M{ &gZ%lS7}JdRվaȤm +g*6Ym'FǨgGEҥgJ29_"b? cL!iKHRUpDCV"vsruÚimhVJ D^ZQڝevŀ ։θ۝gON-։y DvcDuZ'ڝu(]79'{s>%j4s,;"mcs;d's"hw;΁21Z':ilwJBr$ +u:yH|-\;I9Y>T;eKlo։Lt18g]Z'.{sY۽:ѹ?։LPՊu7ԟVsD(o`~eiocsl nQN.tu$0I`ԥQwNG98 05$XIf+'RWNt,u$XIԕL]9 sԝ;'$aqM1N2M1N׼8''bdbR7N /jdb@RWN''bdR7N''`4mKQ0qF1N2F1N( u$S@QQ(I`ԍfE8 iFQ3NnD8 I@ԕ)'nC8 qr?d-n8N I& 3!$oMI‰ onMI xC7od@t඘oٶoM@IB⺉mq-nd-Ku6m趸m鶸mݖAݖ5n`Ihvk6t[6n&7M2ݰM t6tn& $ mpx|C6o& $mmްM2M |[\7|[\7q-nm̷mmM2M [6Ʉ[6p&nq$nYnq->p&p&N8dl lM`Ib 5r&r&9\ k(jPMI P$c̡9\ :\HkpM\5qa:L :Ln:7M20M 4ɜ4s&p$С80M24t&$Ka:L :L@١I 4t&t&ada@:THjIj4t&t&M@iAi0MI C5t&t&a$IhM2M  C4I3tx&$IgLL  C3ɤC3th&$ҡ@:eE2 ҹe+-[ ҹes$HI9Ls9494 2ɜ2sX&$a:, :,@f\3I2sX&p28 3$0I9Ls.\2ɘs$0I`s.9wLsd̹b3$(IG9WLr\1(IP\ 1/B21 ̙c) `J9 L9 (a0L20L 0ɜ0s&p$sΡ80L20L 0ɜs&p$sΥml[9L 0ɔ0r&P$sιbsŤÜ+&`ނ97L:̹as$0I97Lsn00I`Das$s &q9 9  C0ɠC0t&$: :~ /IsΡ: :@^K2Kz /ɠ/t%$I]K2.t%$a: :@]K2Kz$8].ɘ s%`$c̥mln k":@]A]KvIv C/t%t%N:KvIv sada98]9v C/s% s%`ac$c1\L.ɐC.r%@8[pK2\.qᖀ8ܒ8ZPK2PK`j C-ɌC-q%0$38ܒ8g% $#a8̒8Y0K2,q% $#ġ80K20K`f ,ɐ +r%@$CȥmlnX!X0KfIf ,r%r%@ȡdȕV"a@9̒L9(Y0K20Kf ,ɘ,s%p$qa9̒sada8KbI&b C,t% ux%dԹX+ux% uh%ԡdԡ:PVQVJ@^IF^ +ux% uh%:J@VIFV * ԹUҡέ@ИsC[%:JuCPs$XZ sc[%:Junts#$XVIέun66?ٌ@Y%AҒ [%A:Jtntsҡ@:L:JtX%$a: :@UJ2JZ C+Isa9s΅UAV.PRIf]H%.hRI]8%.NI&^H%μpJ^8%z(%P/JI^(%/ȇRJ SSq;%SI"]%NJt!$҅P IFI"J C)t(%t(%IQIQ0J FI&F sadС@: ;%醈B +:wJ2\) d#)%\) йRs$΍%:WJ:йRs$HJIG:At\)HRPJ NI&N )t%,G|SJ J 3$sΡ8B)I 9Ns.ĹpJs8B)I AJIf$ЅR AR8Ns.\8%=0JJ%99> 'ɘC(s%`$ca99WJs\)(JIPΕ(FIG97JrnJM&B/( ̙QQΕ&)ɔ)r8%`$a̡998RPJ0JF C)ɜC)s(%p$sΡ9̹PJs8pJpJr8)i )JS.ĹPJs(%`$c[0S1SC)s(%s(%`̡d̡90R9SpJ\yQ8Bs.]% t!'qЅO@B.]% t!8(qЅQ@J C)t(%t(%AQAQ0JFIF (t%t(%dҹSpJNIJ C)t(%t(%С$a:@RAJ C)sd̡90R1Jc.\8%s8)qЅS@J.](% t8B)q҅RHJANI]8%:\(%s8B)q΅R8R 1JI\8%pJst a.\%9JB(1ȅO'I B3.8G\%0JPJ`J C)ɌC)q(%88Q0J20J`F (ɌC)q88SpJPJ`J C)Ɍsġ88QPJ2PJ`J C)ɐCr(%@$C.\(%pJ)qȅS A.\(%PJB)qȅR $APȅS $Q.\(%PJB)q̅R $a.\(%ιPJ)q΅S \%s!8(1΅PHB.|']$ u!8B(qԅPPP QFIBJ C)u(%u(%QQQQ0J@FIFF (u%u(%ΝPSQSpJudԹRʆ:WJ:ԹRs$X7 CFI΍`+%\) ֹRsc :WJutC)u(%Ν:PN +-_9ڗ٧RPJ2PJt8B)IC)qЅS $.](%PJB)qЅR $.](%PJpJs8)8׌&8QbkBIb]J v'q5$ %λ&8Pׄg^3Jz(IC)zPJ2B)|80J2(}a0J20J"s$<7J8ϕ4s )d֜e)ɹ)%ʺR+%]*kJIdDΕ\s$JvnDΕdJI,+%]uعR;WJJ ;*v8%dSBNI١PC)fRkv(%(%Dy(%9C)!)!)I ה_SJR ה8C)q6$!9%8SN6J.QVJdYPJ/S qJ/9%R?5 {.ӛDId08#ipHk]sN\X9AѮyS2UpJF:攌 NTǵI%^UL*8TRǶ)8攔A2N`T8%'+qJNDn d쾊˴)li8%LHA*d~֤9斥I%Tw +D5ksJj[0 Nڏ&;{TRlncR\.kJg!6ӄuT*!H=HL*/(_GL >RImkhϱ9%2xXEFO["R2ODRT"R^)M;%zfY29pJd ~uJcxklzo5i JnSR^^ۗJۄdRƣc60iJtS";@|xe_G}߷D^ жRUҡԵ@k%RJ:W(u$PbIRK.K],,u$j^IԽx%dSXXPKx%x%d)pc5jx%NJQ+qW8 $4i%KNC,18M+I4 iX0 $4i%KLC,q"8KKJK@iڂ%%RPWQXJK@%%NRHWQWJJt(5$HbIŒ&H], Xҁ@^IԼG+ WrQ̒L9(ZpK2pK%0$3\KpK@n s$#.G\%pK-qą[C.I \b-I|-q[2ČsK-q[x $-[%NpKB.q…\ $.#\%8ܒDKp!8KB.q…\R}G;}B.q…\ $1ƥm(K2Kq%0$3\K2Kq%0$38ܒ8 \K2Kr C.ɔC.qʹ[pK2pKn -9vIz C/r%r%P!d!:@`@_K~I&]%cĴ%c%t8/q҅`P.c]% v8/qڅ_h_ a~I]%Kї%u!8B0qօ`X.g]% v!8B0q؅`h` _Z- ؅` aI K۰p$a_K2L C0ɴC0q@;L;h_K2K &$!; ;~ /ɴ/v%v&v(&d@;$hdidLI 1v8&vᘌ11&ch.]H&v!8B21څchd$؅c d@;wLI wW3!8pLI C2vH&vH&ɤ v2kC2vH&$%]IG;L;&AyKK&A;Lv*tsֹcpL2pL` 1ɬ1uH&$!;$ ;$ s$:gsˤck&BnsϤC{&:Mu.ts$PIE:Luz:<P 3ɤ3tx&$!8L2L ҇u@1( $.^*H?9mf8'ޚױ[W+b('~UNd4#Zg76"*Ș[9Ei~8Q7ZA:b?596n8crn2'R-Q湘4U>T9U='}bԶ?嬛U1NՎ61 DJ3aHmZo ImXw-6D^# DfR $8LR1I ԍ@'B8 qu$3ԅ` 'PN:qHM8 pqԅ 'QN:pu$@IRN.H8@Iԍ@q2d@RuM(I( 5&&`e(l)lE]6qdQ\ k (D]5dд&& bdbPLI&( E5&&PL@&B4 iuӤ#&P7Mntu$Z:TO7Mn?49'I t4n&n&L؆i hC4m&m&6DIF hC4m&m& hɆh\4lndi0MK&5ccpL I& d1l8&l8& dɆdِLmd@6ȆcȆPL@If lK۰`ن`0L`If& ada6؆bنbl0m&m& ada@7 aa\1n(&n& a $ a@7 L7 a0LL C0ɀ0q!8L8`L2L \ ``L`I 0r&c'8pKnIn -rd8[[K`rIr C.qdʹ[pKnIZC-rwﶻI=̠2; m٠@Z!C` wEoEF}C[2 vW:C Ԓ +9 W"C9P@a9̒ +9(Y0K*0Kj C-C-0KfIf Y0K*0Kf ,,r%PPa@9̒OaTa@9(YR)Y0KfIb C,r%r%b C,C,r%PR!@9ĒJ9(X0K*0Kؖ0K(Q.D)I)yQnfB)9䔼('$)%rᔼ '$!'$!'9I% 9I%@NN rrJrrJrrJ*夃$夔$夔('$)EM(yQNBIRNFIRJRNFIRNFIR΍d$㤔881aTa8 QR!Q0JFIF C)rRJ +0J`F (q2J^Q $'8%8%8%qJqJqJ^Q$$$'$BINBɋpJpJpJ^PPk]Cxrv$F LV@8\KRKpI@.Ie. C&q$q$0D%%q$00 ơ8T8TW$~!N" $p$p$aTa8$8$/I%qv-`& q2Iq2Iq2I^IIIBLDTDT8$8$ɸ}Lʸ.u$@FLu$@$r]&I\HD %Tu$\HD +" I\HR!_.u$r]" \@$2Ir[$@$oDrH\CR)]Z +.3INCR!.!T!9$ABN !qrH +?@ + C!C!qeQF8 A0H*0H r] + RH*QA +.0H`\ARe.u$q= ץgbyr{΃; OaTa8 I p2H +H |C |A"!77@H*H |C |C o$ a7 G7# w w7ܑ7;pG*䎀7CCpeQBő +890G*0Gp#uuDp#p#saTa89R9PG`Haq#q#0sRsa@9̑908G*ds#`qbq!9đ908G*Gs#`saNs8#99#/Im$$$綜X44䌼8'g$9'g$9'grNGrNHrNȋsRFsk* 50 c$)V$)'c$)'c$)'cE9#PeʡTʍeIϑr#@cBca9 +9 2PF +d81ɸ4F*ri4F*ti@4F +Ru)F*Rx)T!@F@^ +#y)FH^ +#`/HHddN;" +ռ'W+pE*Wt_dUȫH^%dd,\\Sw:"uNGI:":"9BCBy I1:"9F'S5F'S$dLLJ֝TWN;"ٺ)R)|iT)0E`_"~i@?T藪H_"0M'+5Evݱ*X;E땝BI[f'b4׵&b:XODOOOzGD07EomOtGDkarQ!Xp"p pOz9AD*(A/'OĆ"" K"\a0,Glg"> +E|(EH6q"v6A٬ۆ4Eo$U{,bW滰E/~㶈ņc"lwXUMFt["oUq]Ğ6H*QboDw|{gS|{ͬу/bܲsdU|6|/E|0 Flˆ Xg%sFE[lؘ˱j/b糔'tvQafc;"ok8%XZ#+ +yTJIJE^ -H%$H%H*[$I*[$I*[ER"EE^4.4.4.ttp*[S"S"S"/I"(#gTSwF)H)0E(#(#T,E,ERF@)HE)(8#(T$4RA4Hs)H) Eh#X#kX#pkk$H@55hX#QY#QY#/II5򢨴b!81g2gD e¡Tp8#gg!@8I#pH#i!T!8NH|3Tn8# g qa7̑ +790G +G|C|;פ qI|Co#NH|o#oh uT¡@܇G*䏀:P?RQH@IE }u($u($ԡa: XARYA0H`Ie C!u($u($b PARQA0H@IE u$u($֡|!u#u#bCu#!::X@H*H`X?RY?G`He?G*H`INɋu[ua$dTI"Im κH^DDDB$D$D,d$d$dX'$Y'$Y':i$:$X'$Y'$Yײr]NI=$@::J:PTJ +pJ@N ))u8%I*uH%uH%BHSRISpJ NI%@SpJ*JR C*NIJ C)((JJ9(Q0J*d9 RR!RxJIJ C)0J*0JF (k2J\Q嚌J&ʵ Ż.Br5y%P+)kJ\V嚴JZ kJ\VR!פ&5 rQэ嚼&)5%`I,kK\CkRK*0ƚOWJYș^\YR1d&5%sMf kw|%sMj8d&,tMf k2K*Hd@&I-tMj kg+0K +K bHWRYW"֡: +;`WJ*;`XRaXKbIb C,J*J^ ++vx%.v%v%$a'NrIn[XvI.yNrINrIn[SI;%I;%I;%/.I.I.yNjHNzINzɋvKvڅ`]&ɺ²u-|uK^_K`~IeXթC0u&u&B~ //u%ᗀ:: &!|.K`]%]%viw)K*.xiK*.zK*K^%/JK@_%bvIe_%/vIvIdd[Ovɫ'$zmؓcj1.m8&٣cR{DVIvh:dVII1y I1ɡ;)&9t'5t'$ԻaNkNHI2ɑ;I&;I&9r>ffcwLrNIecwLrNkNIvhedKOɫ'$[zL'ͤDKeR 2iTeL``Z&L`j& 2;zi&[fbdދgG>3b5Ekw&['!l62R43 U,s|PL GZ&ϑmJ+9y,Hb:Dd쌑LjyH& }ϭao]$Q$+P҄eb/e2 ;~sz>5:sY&nYZ& sY&V2o2~9fsI&[͔CeCdbۨJa=PEoTR4@gRAgHM)I%) $h$hH*$qITɋLLL^4e4e8f©4p*$q*ˤTIT 8E28E2H&ɤdLL*LL) 0202L`)Ie) ,eJL*JL@) (2(L , +R,@ 2 2X&ˤ4-4-ˤp4-hJ&hJ&))R$ +RI&pbT2X&`ˤbDE2Ld E2H&Mˤ0.51.51.=¸L8&"\&p& C4Ia8,'ˤa8, +8,eङToL,B7I& !T!7$dRdܐLI |2oX&og|2pL*߰L eRe߰LI|2pX&pX&n#eR eL I!I!\Z&ALDL +R2R2R2)KDKDKˤ .-1.-1.-¸LqH&B\J&q)q)q)|0.-¸LĸLĸL +3R4R4)KDC4s&9,0eL*L 22sX&pͤD2tX&uH&!TI2uH&ɤ!:I&tX&aTa@:,HeRQeL@Ie,PeL*0D@ 22uk:,:,PeL +222)K$XXIa]J&b]J&b]J&u)uX&B]Z&uiuiuiԥeKDKd{f!u)u)|.-ºLĺLĺL +3R4R4)KDC4u&:,XeL*L` 22uX&ͤD2uX&uH&N C2C2uH&:I&/I2Iօddb,dźL^eeeB,D,D,d$d$dX'C$Y'$Y':Y&ɺ܇X':Y&:Y&ɺ]d 2uaTatH&"]J&t)t)t)ҥd"ҥe"ԥeRPPPIA]Z&L:$ H'L*L ]33t&ѤI4II4Yeee,]X&9Y&/2I2II3yqN eeR9'$9'$AɋsLsL*L C22tX&ˤa@:,J:,HeL*L  2tX&ˤҭ}dT2sX&pˤp.-q.-q.-BLtH&ɤ!:$ +:$@eL*L 22LI% C2t-{/ҥeRHHPIA]z&B]&B]&u)u&$QeL@IE,HeL*L@ C3C3Ձ:,:$PdL*L` C2dL@IE 2uX&uX&aTa:,PeRQ'a@:Y&t"aTaI% 2uX&uiuiuiԥeKDKɤ.%DKɤ.%DKˤ.-.-.-B:,]J&]J&44,.%.%ХeR@@@'Ϥ`.=a.EaѤP.EQ!$d9, eR!eHɃrX&rX&PʡTʡrX&PˤP!@9$J9$('!T!@9$(dR)eLI, eL*L 22rX&PˤbCa9,9,022sX&`ˤ`.-a.-a.-Ls)s)̥d"̥d"ΥdR8ɖNNX&]Z&tititiХeKDKɤ.%.%.%ҥeRHHHIa]z&b]&b]&v)v&$a'a;, +;,`eL*L C3C3LI C2vH&vH&N C2C2vH& ;I&/2ImpYe,,ƔE,,,d$d$dh'C$i'$i'E;Y&I܇h vLva$ZXuX&aTaR2R2)C2R2R2)KDKDKˤ.-.-.-Lu)u)!t)t)|nY&uK3f"-Ϥny&MĻ%-DKhKe22{iL*2}i>4ʾLL_Z&H&֓dm=I&$l%ؓdjI2.mH&٣^=ڰLC+$;L^ڰL?+$ddН,eeCwѽɑ;Y&;"9r'$GdFd]C#wL^CwLrNI2cwX&a0veRLԡE2dR[zH&Lh!d"-D[ILDLDe.$$%K2dW#[&tO/[dE).n#s˪XMLj'_I׻_DId3'Fud_G}bU>R,R?`6"dN>. w=Ja܁}b/'mb'FK.=O./l_=|$|$|婼+OJSIJSyWJWJWʋ2U6l=+LA` +` +` +TSSS0(0Xb\7e,KWI㫼UUIVy]% FX)WIKXIcb+(RE(YE(oVF)ͺ&/QF)(A7 +ce@_)+(+gT@_}YRVRV^ e%+e%+e^)+^9+pV^蕳蕳蕳ziIT;xRy"ޅPV*;`RaZ9vF)(b]F6Ja]F6XGq"ZII[yNJrs%9ʖJrmrm%9C\ys%Q])C]I$$P")bER +HPER,RPN,)vY#E[e;y!T^u+{{ء; +;`W*WD;hRiWJ +W*W +C_J +C_w+w+ +^^x+{⹰rvI)2)B^ILJA^ILuRL`yQ%ĒbyaO˖KRR%’ԓKB%RŒCcIᱼȒz)^K)أ^RDRNDr)zY.E[eB=DQT!=DR0Y&K& d{,{,žDRY"K" d{,{,`a|bba=L=La!T!=DR] ^VL)ˊ)^VLbJ^VLb%Sb˒Cfya%'%F˖.žd6d`/lE=,ogY^-zZd,IiySONˢSS*(((RGGa){TN{ a){rZ^KS'%'%N zrZzZRKR CjzH-zH-N iiz8-PR!@=I-zH-@!|=I-/IjIԒ̓b'%'<9-<9-<9-/IkI,ʬ>ʬ>ʬTQ"QfEJUV_BS=/p/^GS'%'%'=/`!TIz0PRFz/z/PT@=S'!=OS #R蕟 R__/~i?3`4\uV +ΊJa0! Ί(VWa0 `*0/",nʃ~l 9ZR`^-@)0zR=ް`VWʃ`VW )>X ,RG((L(RTK0l[VV3,c|o I +(<(D|a^|a+&[2a^?0 ?00`00EJa@01,LyC´`* ӂiô`˧Mu?n;oo_ǿ>-,G|Mb%l???x?ۏCx~Cҗۯٟ~~84%Xޗ Oȉ3ا\/qG?L?/Yu;-1׈]w>.Q}Z Q`l2{ҷr]֕MzeYVyRk.}E?4}_syG~?'y<8-'ܳNxew <)'I(מ'9jWq>rnsߑ'ٯH\$I*@lݹ<ɧXKO)[.'i_8I'i='7|nEI@-*^Ϸsl=zZng9l/j ,v,;m?yy>msmؕhɟ2]6qHx=-biv`"/}ܑ-O\:s ԞsOUY^}߭Ƕ +v1w0ٞ9iRA[vCoMO.E/aIskYy\3olf9āqͺ%lukxX։Y79Ys56{lQ-Ekf3&ݫrl,߹f67c>rͺ 3Y>E{r.e +k.=fy*N܃;4%^q9K[}h,vc4GtCFec~DZYyv\ +Sj{hQ\>uN6 髱١\~~ޤ2+g:/Fڟ>\hg='70 +!qbS53)QprqsѾ}ꍋ\ vWE{Gh-/X|]]h)_4r,wGFgJo3;Ȟet;f_{uDF$XĨSfM_6I YݦoY6`\䲬b^/c9>G2u1VGƹ]1O6pU]o}$c.uKjy>.?+%OgfesF]#/bYafu%rݼ=cVtݬ=s/7g ?lޞfY{fze~qت X41[4ϋfv->\6+ O𡥮ek +Kƒ>te_ZHby >ͺll!=yɗM^׭u6eEYiڿ~i?zz|t~)=_?Ͽ~cy_짣?M/U:vKԷQ_psٞ ^;p~x>i_,cgx}yN7W^7Gc=X8Lߟf9%R|{t6d`_ +!3?7G^GavztDІbv=_p/o#0+ÍϱmNDZ eAܵnCu_ڡ?Mh?2|C^ܯf"uc|u +Ypӵth^W{ՕԶ=얣]H^WyX+l>+4|;^~}^)1밙?4/UGpYBȆ;tVc8sjh{O" E?߹(l*v- H{ ﻻqU콷X4yv9,h+=`D#fMiaḠ ZZw5"lçOs'iMnom6lA9fj^<ǥc[E0JD]qځxpMFГYvWޚ;t..Cv`[mzm};G#|#rnt[6Bl Bsk}؁M\ZClnܗݸ}:s's6Gl<߼MKFˋzú|NS,h3d/{vc`#> O' h>Y|mxIL녝zM[nt?y7kݷ-rClKv0,x.?jd{OcI?F!$nz5m]ֿ힔Y_m-> ۟So.zzV ;#g)rɧF|${?Y^,-{a1Q;/BmB~{ =[DlͰ`?~oj~]NKW3bOev"u_ƕOq[PracYx1Cݸ=xfnfeQ#0?ױ:fDqCJ-;ĝ[[^jiX#swzI,퓝^[k| *л׳כmAom麆AQh3;/g|Ձomy}_N5xkS]Npւ8þjDyzɶqr}-?vbgԯl?8gϖRpVxOERb_EDs# Mܗk7ӵ["hOa`{^-skbk+v~Pңm^ۥMoHnkܓ /3eh{׷ڏlz&Y_s:cJ5Wr:uYΑ s]un5s +؏̂!]|l lK!m=u'J+鰕79U~U+׉Yo@P3%>, QQ8g+}vmi2z#<['O}7ާ.D숯-r-pc3ig{3]嗚ǙMgl9bk@wrHpgSJ趴Aegtl+Us(>Dnʼeq ^ |L: Z|-nq<5Z K3#}!yMݮCwUr\>=V/[Ղ>_tM,3}lf:]fWfi=-X(~wzhghpP}q\\?!/8hXܹ w6[!˚-$x̜>mcj|k1]PϵN=s1;!>i.qUawX]wx(1Nw[fL n)N4  +yiΨߪK YW~2d?]ݲ=;jfs6W?z_>gӼG-Y5zqXqy3(__f.툊)?]Ӎ۹m`쌥e,'~%>sR{Sx.5]lq4[㲽?<Ղ63nøf_rY`Zm1$ FaR=4^O[3(O(tV>s>l8,h37b(޷ݵШ篟zm+dΰXّ߰O˃jU|ٛ@so~MOx#\KIhqͻVy!(.fV?Kzwy^DźOG .g4}HA~ӌz y"S~L=te$.C./{ZWHlv >ְN?ݺdyžeϣ/pc\̟4z[<-ө%|S/SaڌUC^Rgط9o~l P\>on6Y6'-V=ٛx? R9aۚ$xZ~u^5+,1;xb 8Ϸٌ뷍ta~ޢ6l>i{  /qͪT߽0Gyb3o61kWQW,ȷ۬=hkNXm۞*׏lkmse\+`7[ t"Yl,[֋}i<gyv0c]cC|# ;߽^Sӑ"M#έhbXRwscwXՋ/X +bw?ĩz/懵lj.gW~Wi~>8lRϫ%W<+G :o\/.Vv3~i-N߬gfI~xڞcSæ}v#..d>o=uFߞyVkVേ¶f)oqtYpDo}'PZ[,yv4Anh~|y6vbCqMJ3҂=%_<+f݅{eGy`zMב5?'hq賦V|ju)Ofce6Jvm{}?& ّ'hJgMv73ӵhMeDwiX)vOD84W_/^ir|w|xwjp~MP!/_KӍmkWZuu;gOݰSϣץf_o\1>SԷεV?vP #e?\yx}jO5zsgݷEf>g=j<`+eK嵬*"|fCѵd{n63x][hhyˁ{w]кAw`/h/a_87}_yo'{Y2$󉨍+szFf% <3JN*_ְg`Y{\ڣԛ&m8̈ay%1fkĊфYx'#~]&vdOKm=!㎽T`+Zoѷh[#E^kx{2qvkCK4=lg^gx[O yc"}?Po>6>bw=J绯o{}ֽ]ēScZ/O˗x߱/ztm=et~cO|1{~ݏo|(n7m|Oo}vc巾{ܢvL[&x[?>OC]x.?^9iiC3v(;鑯=\&*= Vr>ϧL_6W`lӟYZg$:R֓5||y\Q)BE=o2mU<-: {".z8fBcԇ=hOR%vmoEq6{շgV#O{v5lLy˓^u9yo{ҟA$+}K0|-}bmM}rƃLP]3$;u>ԥSM\ֆe}o՝n1;|n.o14CG+2G =Aols~Ѥky-H*~9Kz{eNs6N߻b}17WY{": EcgI`;vE"t6޿-ߌvy 1 fI ; OxLSۖZw48WNqYS^ϡ@=w~}٣ܑ +A5mqh-vq +3_S5A_lc8}" ?u<}ʖ-jqca7NYc zV˶yq/{> dApJ.W1egѶ+S:,9YuEGO]-/ځ$xyPύ%5_[Qc^;znmh65ںqGB佶pK9:DfLBj7& lWq^'\#c EGMhrx8K<A-ű?ɢ+$Cdh4VQfb^3x[n6?'lqrWtA?QwX\(B/41x1u%6M)W'}WثFpHZ¼]F=FgxKRMtWVVl!崙mM=|+~ĀK|/o8;r㎏U0Ŭ܂͑2 ? +_z:ߊ+#k[k>޶wR2Z3s +qb` n.GprGoCҊ/u1~~)U<"v &lͶ3Q*'-Qi8ZHXmkDx剰y~xOrDޟ"f?ǝA^2'uO]#zEG> ㈱`˻5]A3oc/m%:Tn%}I?+,!/떓?V[,+V nf9cȚj3L4lGHq4B{eo4&۲6}Zq`={[gÍ#ލC +r]cc[=mbWNu˺ %`6xإڔlt$sOFC)օYW{JtWj9ܭym~Zk9K8}cc<VuO Wz Qu[#]#c+,|y17H3'J|h +D sy ;G"@ >75{w xkV}>]/H~s32~3h O/;oS79}{ISxof Raw}G>ړ1aҍXchz5R Y#hߵ4{5bO7&Vr +%|'S'#_} CV{M3|Ne - +Gpf+x{8׫~zTzCtV{=bi#1j5CUTJWOEn0$X{T "Ͱ4}B%Ƕ1_ ƃ(7lnې+hӾn ݮ;UAv@_j/L4 >)y +mi;#V} s [Dgp :7+lTZ! n7b`>>1+wƫmW<l!4)xMNk@*-뺂LhuCfah엂T}l{zq<+ +o3L+ZæN>IzⶓaDs) [ItFV wF&.v{0d-1Fh\f dp_1е@LL CCdFQѵ^t57o `7붖 %c5v{F-=ܨ=Ƨq #أSe"ނM3D`:u^!^Zj R 3d.il{i&ڏ`ij +SnҝY5|>lêЭ`spe[WW 6ǮW5clz&ĝ|ה5toʶE 1OCncDN]N~}5v7B1}fR@tuLcd҆ PG,BbOon}zx).{6-Ju7c4e_jOp> ̾) XRoxC/϶ָȜ1`{K%eFN}#PLh.SxFW/̓|*˶l}?>;k)Wӗ21-Pۮ\gcxXqLyiOd2qoJhgtnXtgyWԋ +`pqq^GvcW5%8myiyL^g}ps_ C9{,أ +l]fʶk, Aot5j u +KX_Gg%/[?6;esΝZwM_;Hk|ׂ^&r M1Z9`f&{d^xYA_^PO,ܮ;#&Zryrm _t_Wg3]^f"3b9,]0Hz$5M;oٵ/+ @} qmJC\+xklz_fubl2b =A_#>KCñ T >,ɁQ!_; 92cgԐCbY07uwJ}]*zdf~j!A"zq%/A_շ؊Go)A4O.nqwla_K)wlkW6-XZ1_@ Ȋ _Gύ+awI0 kv0shS[8aǜgZ`[<5̶>4Չ ζ7;taXt֖:gdh9}sa38w7/&|=>qP^QWh\^#‚Q6ÃS=9muڄ/d[du\Դ-[-sY,Eϥrgl{$iG}uLjm;Ӷw#I+>PLtq Θ-nt)_QrdOVG {؁v"SC.qi]^fжʳK ?A%2i*G_yNI{c>©xX)om z6k{Z":i>j4-|\!+wwtݛS\$wu# UX#jElh6chs=zMt /AFjpT¬ǽ)#?g7Ɔ|-a7e">Sp㘺4sMziݜ \C>Rq&]k&?{s ߔ +S㕷ic1-VݸA 6ބhO{պ)&5!Y[QSx;A/_A_SVWƉ]G` Qu\F&-5gcJ30a]mI#(m&&rҺL~K昱Ɓ_E)+6Vxb.ctB#͓BynKoi7P(:uQ +*~L<Mc/lg=0N8 6F7EϠD[7[`t]ާ2*W![1%vTEЋƃ} j9L۩ #^'\۰J||.]=}c`Nwۑ#"7')vpdhƲ?#rFO˂Gqw&/8^n8c902v`sEpe$jK~I/{x}V7abc(̔s쎠C/ޝ)3;)_X1ޛ]juJV|±MK|莖ה'XЖnoHwz_W}Q|=bdFA; Ob3rA9K[>5RolN(l8q5ersNU:1;8\~)`oX^:/I-csj{9^94XP,I󊎿-tuY'ɗ`˳Њq`]Jl]msbK/ӰZB2д{ KE{1oN܆kv m2}$TcBv\##ȏj0IW;s8k;X) ;ZQ)t4VjeVV1;[P;{s~Q=kBRl[좹9;"2kf"y~1|'P[lȼYQi &;;sHv0x:TDX;8ki`ueט(QpvUFJ 8Tu9I-*[#rrG `y5NVy3Wwըe)ɔXUsMܝ_.R[+3U +!}c-4Ca逊/]TQ΃Z x1f.Og&6J]fڂ v%N-u+`NClQaʴB&W9HIGUy5ɳlB/N!xWs'Δd@G_KgR{b&ҏe&ֱ:|$/ ^l;t\_]8Քb $DjO˘.dcTzLGYu(N$UJX c-5bpYsߧMz:NVCe}_[|XGJMv; ynR}'O\#[pck% ́⏑Wq闍T)_XkZ`J 4O%'fW1%n^{mcn +bWFÄaMc4 (o?XW!15C1.'{XI$6uZ +5dD*.ł3?Š 'S[ѝ*6^sDV ض'ըϽ~3^Ŵ}ZL Z}e~7+zbkNdzLZ8s]!BZSLBw.3fKFW ]ýuuͱ3S>/<4lzqk2V Js5]c8I]]EK,ݠqJx6Թz +x5w[~U#X~39'˙PVOޛ\Ŵv 'sU\{cVhu'$ޑJ*[(xzQX5)BPzNB1# "v0or +N?;؎|Åssgү +ƸRw7A빦MW=VC[_>d^_`+|68ymK;f%Pr=q~yUg0~t,pkE^'c]ykӞYy*g3MKrGG4v`UygX +ŒO.S[xfdt_j-U1v$ۑ\[ЈjQjk|"q3p'n0aVɏ;۹E -~bTh+ Vy/;i{:x}vex߶cQ ܹTGIxs!,5ipǬ[Xw3D\- =[۱ŀ;uJUbJZ]^Bj!RUw^*U ɲ(1`U; kvзD\)*;컍Ac;;9r%k}iWڢiW`%[Vi^P#w0[-+y֗ڹWuNoAmKxՌea-8aKȒf]bM]՟|m.bmke)-[Zf4{Yu/c,+S:oYmmJ@n+%Y^ʷG5lm+3 >Qfr͂'1[NV"okFΚE2)G>uz1='cpjnJf:&,kH߯[>2wu!7"8cSpf`W}{_)#/#S+ˆG.l}L[.^LL1Ͱ烔3>>Kٷ^n؝ S&,Ƙ1ǿf2c{ܞU Rw+]uf޲? P53Y%!lt UN2ΐ*3̙i[yI/9*=v2UsJa 4|Nb*0hO!4LE^L,ZZ1~h1W _ \;\#w>;?VsRWzD _eߑRƼzR +e%Fy2M%# gWEřf Wrƥ7?;.gfI_;YqU^y_r"P`R_lf8ؐ c9󻱑5|%gΩi*$Xk;}rV+'RcYBUa+ȾV!#ZJkli4z)ܳ +N4\}T*K^ ׶,9jPxgIaIѤ-D8}Tʪ 8YHi~>#rwe)t4W9έE{9jU]Q^-5a3̂>I3vPf&~$Ýmm%@&2uik9W8 ; j*|gv+HUVbɭ1m(C}Xe\UJxΕrU(WKӶ. mav#]+FX#[YZZmmyE{8Tpig;;frX.`J۞A,u0jClm_k`"5V#>GUhK2lZS(G̶-Q *F%Դ7- HBt5`|ovӨVU',TxG{dĮE˳蠱Op@6b.+fpWtVd,ih =ǠD*Z%x֪+<=e')Yif"TӪ 9x3͋A g9g;Ay&ӧ8dcZ+Teۦ4kUJFF)H"SԽ5LXc[[/UhEh#FWl{F +uֵS${ezTو ߴmD$/ ε|rz+P] hox>+߅ֈmp!lZ "Vwݲv [\˭]U˨uX,=l)#B[սUye@VuWps5RxU2cQ]VhRYvͶ Ju|UzU4ffK0z~EYŊOrN{6勬" Uٲ.!ojpNOlFN$9lubγ\+tQ48>s#R8{$DiqMQL!'lE7/+ܰWlj0AqRMlAq^A ӵt_A_vWۼkhTL*n$TҖ(Yg"ߨY+۵-=n%i>w)VGpF->p?z =ޭiQkwkW~2{h,[_$UFdٲǂlfxvSg9φRZJ'sMХlѹ5N~L2{rk['2`#{r;MOo#xWwR_Fײcs̻deۋE )A +\Brʶ}0:+Tv˄ouZ}*ZT4nV.gR.'Nʮ]\4EܡH- tȞݒfRNhL IX> CܣSh%rȇלHD6b=bDDڦީw \0ɈX$|6k`"ℨACv)bHz_lKLڢ u'dDKNZ*h}=o'`nS"PLkʕ3"+t2,CUH<Z2 )ڈ؂"OeZՙuL~IiERbhޣ.X;HͪS6$H|Ye({ +WqK2fXN%bР $)+nB|f2t o -D(;mKJ,$ +(h!jU= +\)JEL:,J"5 3X 颍@ERbF/Bj :UM6^("Wkom+TM68ЬFQnE[WF+e")GIS;M@TgHwT&f`#DYс؝mfJ\ǥݩDhBЌI<%699ܓ2+Ϻ9j6%1m5JMp!)sn_Pv\%:#dq2q0 ;n̙}DW1m[ՖBĠxH0\'b[ي1N;9Wfw3[RPƋjI3#P4 -AtŎ@nQtY$mRPZIBe +l~ + p(ʜŨȣ눚!y4Z=jk'b9뀴Wk[-7WriߘQ롘+5rAiH":<Ż&mD+S9%0)k݊lGP% +Ǡ-:*NSIEK04N1(oiK#bY8V);%r/yT֩׊BYMNj_"*uؔXcb#ostd0$4Fop8jgm5<mN؅J 2"-ː\O:m^v7abRY)}e{qZ rz4#]zNBB'LNKբc_m2i}?]SÐA[>S+!t)YxYTNrZnRVuYm5< nGgmeŮL\&t2'Dl\v>ד%49ZQ0thPRD) ĸ| +-ޒׅ.RQ_S~FP1z%1N㢝l`3{eJH}25iJa0#' &A`D&mFi3"nCB4xfD.FmPr@wtG# +FϴC\ c`YX;l_ӽ'm ]ssB +てr$ +A4#X 6$ٞĚV;!N5 k!*UβYAxر- 9a𘂷t kP!);.EoeC0yۼfKbŒ<*]gNK(TW`\`)Ah(o+;fIdad@7Ğ5#2 :dd +eq)Hm&m%tՄ8t0%#Ye)23Έ&9#~I\SH.=\ee fWܑקdf +K1tNd+X\'D xڶ5hzߜ8w2]yZjze mjЯN:q:ٍ|LC9SP&@ҷtM[2ꨓ{Fd':d䴤8 CpFqzY>gD*lNʌiF  d1ZZJ(U'&mIbP{D? 'hh0u@C%h6.M;#v 7LŮS2}:rW4᪞.k׽8OUMKiK^V$Wʐbs;$Sp=-EZ@3+V?FU9߳ a @bv2ƌ$kRi!HV"ʤ#kjNUr#c6IJ#f-.MmҊUӀ q\3!@Ǯ`5 p,/D-zdFE δ$T%_jIU/)Te@\;bW(hjFmT D0U21DN(< ejӷRT^ӊC!eQK{}=eCrMZQM ^05,IiL<# 7 uF=L*nr{y}wy驐?'ɣ/j͋篾fO~]㛓_}=;B>' ?'û^|v{{G/7W?Oڟ{y⚟v9>7Wgw7һ<>woo/.wM Fع/$~|իoϾ Hmwnw߼~}~f8|hֶ_r]{|]ƠQn^|}QoϚ~G}&umK9{Guս {ۺ&ս-2Xٛׯ/Ϯ?zsow/~{3z^ }%_h;3ۯ=P:qSŸϋ:2/vn76A'7ع{_n.*9?`{6gp6՚©gyn\}= WzOonno0,on>;V|) ~vs&mMpg'_\_:8@$+m[o-Nv=bKA=_cbՉĝܗmlkp]Ww9|h~ݘf6C̳˻o_^mC;xqqoU->5?X0&{|C' }8 &]\}ً6so?@YL ڹyCxsۛo~⾢.˹={qq鶒1Tm}:Ƽkm,76}dkGfQfݘ3=XT&0rhŦ&`k1[$ö%t5G:cՁV֦ܩ:Vk鷗_]_٫-MyDxok.v۳닫/..ﯻuS3۝ nmj8D>stream +d CVӡs+F^qfSSO7/;닳mE,>ops7&3X Mă_ʽ+}ӜO+Q_[Zmۺ+V۶>ա +ѶԻ@۽!0uS390fc s ICs$ 4Unُ:|cޑa%a0=f*da6&a69S6fg{pl;ʻum='?ͧ:l+3G}(6?rz'_a.Eyh᳛Ϸ8|yOLB6\_qk갢~c9m@ZRl[%>bQ5uVwr[X$#G9ZԆ8r#G{ G>r_m;rM]uC-QB;򳣄vg ?}_#IV_l˳9cy?͇QIͻlփ4ϧ'[C?a~c5 ;VP+T~E565c{e`A)~Tsff/.ܘk23?FǶǫA>]ȏ;ǣ Bހ>tt!oc]'Z߼xNuj-ǭpquuݧ^\\Nŧ]\]}z{ӛ۳o5~nGwW*¶&zT~Dks]9._ĺ-#~Wm Pf5Ory%y!ȦeZvU`mջO~Ï}ȥP65ے-DZ&rX ˻]Kn-iZon1X;C;7vOmm"Ss[:ܤ@pȽ]xwNC-NAԜzŽǚSo:\k'7}7&-: (__l26BQa;.NN o,̷/wP>pz7{Ufy//={qݱ/uU f?fGlY8@5D{ώQ5;fG Ż{ >ԚbZܖS7StӶvᎌt_us3[|q<1{s՛[:;GN}c om!8{Of[2t厎~o厾:{}ۋ~wjG`K+2{-׷gwgWXU!ok6-o^;NxcEtԮow6543|EY)m1n󍅀<ԐTm1cݕbզ&to{ζP#٧D|Vl 띜g7,sۙÃ/ +(=q>rib^3Ius}qInkIwGiv?omwZ}30Lf +|Gmԧa6*Ѱy_$L lcZ8.rv.Y5¹ܢQ;ۗ[hQߝrߝǻsk[xws:ܮ +}:Wq}# W0+=3yDSԃ+mEuPh%h0R}55=/1|}pow2ʦ& p #6qݛel~0zcqLk*x7ǽbnK;*鶸e1mҁ`h&]zm +8U6|7hm+@Yݞ](7I~쫍݄l<|-l0 +]..{ZNɶu>϶{~z5~Th[mGm!â7̌+q5/զV2CJw9nn^|s{1Vx~golL{x'w_@)?\ͅ +o.W7W옾8ů?ɛo?~➖Ǧ?ueȅ\3pk['1T}>!{׾ ["FTUCy9^ʿ5<-5%QH]2:5lȿԖ,%"m!2qh!ڶ_B +mJڶ}Ŷrm:דg'_n }0O3-@9En=mkڗi잺ULخiN8\SެeeR ĔXi;^04־WtYڱHKhum{~YNYDO63[qtYZFOsBGоc2}ǪilZY&J[~iH +%!8ʎڿdH+;Y!l,;n#C>`Ƿcq(k!S9(o +I*6PzdFBT[BlcӸ@~GmIy:hܡ ;\cG + <Ȭ{ٓ7ɗ;+>mGGp?̵֖D%8K}FTVڹGG]IF4xN/NSJm.*wXeJCm?\nMm=DOQj.R۲.pGrrat1j]4yоsێeJ^>X[:۠qr-qv WǦb "uӻW9J̓x6B"I3*M +Nc;ıhC؎Tr+KiJlmWa@ĈkfbkS*82ԶZnGX`Q&{Y?ɯ~}"TzH|t>8͡Gsq9|T>(?ŪF0jņ1 i"OR~S1v9/A5atK;Fm^ƣOz4jCcl3(۱Qx9AeJcN\ͣK^ecj[e"b!$mZ;[M5r=ysnKɲ$?7Fn'Bư x!t V/Bfɚ%U.Q$UQ14tԄJnڶ)~eߍ\ukLe֝OUZyHB>]Tyn;RƗDkԲJtiP̉""oȦd: .{Pc!_t@rktWݺ!Q;h[_W0V(|Tw Tn.Xʨr4bhB b¥+I 0X}Yzm.&%&1KDy4xЛu`M3!y"hDzֶY;Z'ZcڝQJưBE;ł:]t"Vc](Vu4D65i!$]>!1,"~(:Hw?E +4eYEAl8v&4@S鎘S 9"[fgn MV 7BڑP 2طt$Q>cSL&]4jAV\l +YeCH:X?1H@Rcƨqja +c'-7mNQhgTh eE^^8ښ0@< ^߶sbt b_QPuF\b82iDdWMl_QMjΉ,֚mSh'=}~麶{fa\kY +tMyh"դa\)DIw>hel-+8G`S +,Ȓ7Xf!䃵S:Zq%x5*9"ih+&ٟc̃0!\,?QgFV݁o] rG5h* + Y' I7QcB0"@‰i[.6"8elrOڲhG1 +Gedԫ+h&̠UWk̯ +QoN]b$'oI*Fi0. HU,F'n; )CT EےaUzUs-[ilE݈cy펬cz2AFa\C).L,Iж C[AMwzff57Q&)bY\Q1( +.1\1vBqeVm(ĜJIY)N 7nS<|e@fsf9AC3b)v.;]@G:^Rz%l +pI 7S. _W0)"b#>Nk]l5/q=1C>v*1WG|2 J+ݖj3"0[;MF g׫]l\GՖMHV}j'FӪ-0}ѳL"q?ڽ1DŽ;0>{4&8&H@4"yX${K, ڡݬ&GHCPXJ.C4̏|9 j-l{-˶須AX^1;fuCR#ǵߪpbZ&]RX(WDOd -ĦBl;B% "nu JT LJc's"{*|^E2h@%rd.؞"<`˦6Fcgƍl[Y6h3Pq<+vU(ݤ֖3Ns Qʼnriz_̢HDyw:-mf*0FLr=˫Pe}4/1ӂ7=:,42!6}KMG . 7n6@d?W㕑Id1W%A6B#b"!Mëe*w`}joۿ*j$R.[rGVDZx#7CB%;;5MsVCGo`}3)v +Doam]_kh-Kt"57%;{qAēFFKU%PNZzO}P[Kl[ +:2HE6K.&O Kd)%m_洍4FG$ܖ1xxRF^Io:b8BF*y6sK#}\H yRjϭK- ?1YЩ/j ACU&LBI,Mlaj.L[R"#K& MMtCP[n¤vmU'o-%새]d! ybhhL`NKws%2ⲱ@3brsd74Y\깒T&$`鹞U~r"U!9uve.]CK 19fsRȪ|DÉ*+`eq[( +&ߟY7_]|v{뿜}{ߞnynpyU7I5&1gOr{ɗi?5;̓pKL~I6w!zU!$ZT< +,Ϙu"7#M~co,Ɖsc2iUDfoNs=mb&&%usFdG Y\f;Щ)M `X1ep'wאYyq91*wB #xR@gEDn-h[3h1Ze,$ }n*hT&h@,)JU} AİV\,xvΝpU[3~w;UD`/*Gj|~hJhY .e8gu#2%G8y\2MxwKT=(8uSCF7y{̘%us=Y D#W+] %% , O[%:8Eg##I pԝ@ +sr4~n ~3`x7MZzͧDvbxn#rEDlcD u`Jʌ ;c0BJb'+F- tBklEեFS;w ~İċj}|g) +vĊ 6v{セ,rH{*&q쿀7rF(˚Bt16#3[Bk̈.8 b"X>t<}ɱnxғi2$(ø,YNsUqFDkGlQXqXi?u46R1Uad9I:#64uՌHv |Ac7M숚 Nlv$)2aD 13+R8 HŒf vF +.f]mq]*CUv0"ktnjn9*bB{Cʈ88a]ҳZO7瀸X׶Ov891keٮRQEgzg!&Al~ӶQp8Uޡ0eC6C$4UICCuΑvUeU׶ipG- +' 5y>lj|" 䳌ɹTEoWs'Ck-ƵHK虯nsSF C`Qd$Q$\Q%tLlۏ Ca5$'@= V%9$]~!pXͧA5 `:,)D6?2IAW fr'a'5U,k`* c6SF!,zdEBue9鈜{'"a!шC^J#"KN8'c&)XdSEwǠƯA*GPѼ#ˉ0="݅3k +'Elsb:9tbbN!xcn16v[mZYŪ^:s6ŮvKa+v__UrҤ,|My\T.N,4Ē2^ٖ%+ǾZ\Cj!Wl +!u* -ІĨwV#& 8fxDJS:#"48\Zn&[pyRo5uY=*8xJpL_7. ٔDlӶC*4 jKn 2нfH&׌!85QԵ yI RCcas)jj/H-T+0s0b(#,g-§^9`1 d2* {tch^Q!Dd1u͍_z0hQ +݋X"nnInyW"IF88ij8zIZ &&JVؤ +mV.&ֳd4&3Ŵ!9#0FdycI%`Z`G#N$wfׄ#Wyx zlC|fOx:%*bOQOb =[M{ +}VcLjGLԟգ4lԲ֌OЅm/d(P̌l"n;XvQ$;9C7d?~I(cENLPn&F WQm+֤S?Q=š2%某ە 褩M4 +?>]HlTc\tΤ),Z! &ANqRA(e~m5/s:PL_J~̡{45xǛ2TIgCSPM{jF ++)6EU9`mqC$2Y/"hnnabZd6f7P#D.j=za}zido#*1$+| s){p{7?Uay'̆5V/n6%J6ZȒ/Q\;`٪Jp̧SOȗzUcߋb W %^%*#|1dCwٳxF]gť}z^|I]\GDj"qL* EW@޲zW,c%uVT#ZמaHHF"˯mPI1י),<״0 Ii} ʓ'S lAe)fZր"FM%K}+O2!g+l\¤ދ`XaXY0-w"bp#LyoLlQC\Z-Yjj( +Zj)nd;۶Za1ha2PmLB99IkhA3dƊ WkZ7*ۡD눣6bٞ -qGp˜m4#J m+B2\D0F',طf&h8t +CO t6*I!QrA%8DOƖ6Un29i & +6,B zɉENE |u- +v rƽ[v~PRZUpJ< 5(*|YU/$pu!KF!vH(6 7l VG|qFFn&% (!`s =ƄF]0-h6՘XQEd!#Yy F5YijՊo%;ZjFRC++)GZ3OZ^ӈv,< +uGLo))lL^ELhq$8![6dӋɮ!EF&j"1~|L&[v"kQ~ա0a/i7A@<ž˴<|TA;{Xj.f}"9;YEFv0C&D +b[U&[]= hgDyҁ bc4Ě8]v3YDj=`?8˱,] {]6ĉf=ٍ.\=R@(8YN1̲%zr( "`rHP-UTB#JSP3yꁏ-f[;qX4{'z;g_mՔZ6žDn48LlW4BDm8H:™Z T+Kd`eYIε\FuK[D$.eBX]j6y_S$8@.!'y 8&ar̺ IO ~o񈜑TgaS&)dK.-CߢzKpIo\LlcL76@k~f:mcȳ]CFR RV\ %~.ۨ4a?!칧=v4nXh/\V=&ɵO &I.q}᭽Iy֛l\)\Av +jNy Nbp~ttS?X}=Z|xpS)nC<Y)w|C =Nxtq'Fq' +C{b1x'_~v!>䝘GO}t>{%M<|_x>i'+AA‚ ;Р{y ,!B$0@`&& ! Ńз1CG)A X`HewHAP׃Bف#C9V Iq%%E5IIe"{IxR vJa^Q*=)Ń`J{JPVp]\ X.v K1@KMg^[.vK.EKR/ŹZ `*a +kTd +z(S38S@jHSnw@M1!N@b7y:r8s)0a5)Nܩ=Xx +iu=䩈{@Or=Ń%>~)8T8! 02¹rPi*Ap **]EEXTK`TF5{ZpThڎ\r^&TR.,Sz*5p[ހj +poF~8ˉh'_Hc1c `\\4"#zkhðUw]G1zaa1!M>{01ﱆ C>=b#هT&A6غ[bQg#Z"ۃ>]c/1|=Xbhb@4m^XlƠn7cۥKc{,q]`ۃhV#cӬھj(k6K@kpCZwijm'n=uzlm=.V[`޺6y5ĵb rm0]X[ t7jkKkV]yY^ۃ8; 6NںX@_~58Fƃ`5` l{ +Vaౄ6yb5 6z8یKPl<؁ŶK`l{8=X m%@vCwl-A`5L@F_Beۃ%X6lq`IK`mfloG; d;0` 4nC>Y[Kq{F_ۃ%q%9X>no=[n%=X!Y Cn9XB}=9؁#ɱv;Òwvzh~8ւ@'GMZB)z^Tn}/,m4˭%d=XaX /m,!@0&1ycȼk (s{5K8>Հ6%AzXskK``mn|=uě%yd ] ]:Pɲك%ܹ=Xxn,A swb}nB琀vm? ,8ݺX BM[}5z.` nY.v`z`t`=8}%%Ct7_ ڥ9i֝cΤIJ^O+M?B4)u?*(:Qʯ (:tb=2ѧR9 moAkA$k Am,{9"9hmFEGil_Hu8ubpVŐf`^r_EHJD-鲮l2#pOW`$8 5!gouOJ # t kD vpG@&(nvW)O=Ibq(YQ<*r<`THD18X +[aN+$ٓ/SHpy*Cu4I)t(>7xrRjkٶ$or&S**>8=.~AV҉Aw\\D}L`AHFtV V%JMyc) _|Yˡd+ @<ꤔ*RīX#7|6(_ + h4W +uK/AJX$7#[۴W5V%W4-:mv|߯:BɜT }dkhKZmrRs5,^8%{ 蒨#-kΉp,89ߤ1FD׆і ׮eWrz҃E}|rP-tPՆIce1 +ߜ*$'7%'(g"Vh2tXjYy%DxJmYUp@h߶Vśܠ*1cy ⫲/4\sV%!׎<,д|EIsX"^Kɴ@ ae1DAe,ҽ$.sljUpFKzDP$Ov&_V!R1d Eh˰ +HQ5 _!v(zi0S4 +Qc1#Ù!-Vi)(Kć0¯<9WAa* sBZ_jbW;lt,?d[^xHͺ_hˍ&| M_y@:QdS 4UqfsRsBͰFjt|8!K*vkQq]uUÖ2P;%'褬"ȭ."l`3c9>STre1;Z/`4b\  +8G P|2{'P%t pQ 9фL4AD Q!_aKQk<0QOlOsLΘ8>mo:D 7S$V"^??f\mZM3 $Ue$T.)A3{ g{/DW5:$ձ'ީvUH֑UE2RN:XQ1F*9++h>/'މ?T|rCNbF-zMǯ=v<%UhJ'yO<+|iUQ4a 8p s$M'!4 n|\WPb25:&iܡ0Fh.tck즶N *:4^BG,qIIcX8nЏ8-| +u>i4" [ ÒC?j#iqe0k6>*bC^ؒpoѬPtR@ìBԍYx)LE~R"}Qcbhm[?lQN_1Hc[O1DLPN/yʐz*RB|^gm}E%jtP.tMekYy +K8E!%L-\"`4Zҡ*DI,Ӳ@(nR7 +z: +S&L{DݑAIg\^C㰐zqw=aTTְ| 7̷I\|ZB6;fQm R4-6I}n +bc:3P +A%M-kX勏x_{;N<4h_/|}w˝Wj! + ۻĝ󛛫y볯.͋YX\r8LT%x"=#IOẖ8֢i]wWAisƤ^(`# (rѝCA +0npZ .tJ18Zl;jĹfH] +4~LI4VZMf"/$HKR=:p DVω^K&hxUc"ɕ$XI Xʤۨ`F/aD@%A<b +L# k^ ~d 0Y_*ýψ#Ō&&  +' p^ +! 0nmв&6S$قdP=Aqm>$zXrlJUVȲD ;B +I-Aj1;'A+Q(Au%SeY'y]s9)RX&M"2u4I'M*dr%e<$FH7RduB}Q(`:NL-"-\=VlWhV74ۦU:Gd'E JG#+)}^ Q̦ QFo'K!(1%;IIJXfM$578: +(E'17x_%^!D +/5UPqnA Kf۸ȭ~tƠúLı|Q[m ;ܲl*(C6 hCDN%##V3BH^246ۘ"oe JE풢 ofcGI7\Nۉqݞ(ȢR&i{TPH;H=Dȱ)Sq̟|3ri9L|Uք4:p`62 ;!qj<" Vl܅n> h /s<7Ezˇ66~E)GɈ? HHM!OOQJq>q`t%BW$SC2ݨql=XQJ$9œ,it0@ %7` #0ҭ$`rp)&~Df3H!Y˥SX<&B`!>!H[Uuz_C,/7# P$,qݼ,lE d- +8O^P0-;#&P|jdT$~,EN!,$^6%nķ/A&dp//ʾ +Qom{!@Aѵ;oPE|[#3xKVKVPumO@5+tLMd#ưD-<+HReX]zzEcE eI ]3lq" TQH>mDXH J !j)UE4rg"J%kؾ[Y2Ij{H~YU^ך,Q!+DR#Nhcy̹j1Xt$ϐV;HGCUՏ1CUM@!ۋbBNUS霊(]LyCb,"PEbDfh@hD8mIynOmGK]dRE8 2 +D )Q%#Фo%3˗lRu/]4eAof\]3ʚH:K|':5dM-tCRv^I]QdUo/:1hxMGxES˂u9`ۓ@FPԮF`̺0!U Hfю\ʩgkgLK(XO(eLD)KQNuW/|ԏʶb,QE.g 68U'VͥdBV{8 +[C1:E.T,W>z TylLjRcQ5H]\,M@dN(e얻O1j]*@0R 6w!LAidV)( `j/;e$g1 +$ k+f"R8]A9a8{O%H6tT +<($dq7C$ABeӥ?9TaB ++,ԓ,UiXqY4O^ϰN< UXID-SVųHM*5Q^Yՠ*.Ljƛqbԯ-̙ԠqY4IxF\E0t+psR-糱mW> +0#5F}峐nNZ26JgO U,?JU! I6@b7 IDEN9^pHaH.!.'̩qX]°`4щtk^ +ZA>kB$NtCB Wgh)W {׋o}Yđ( N25ZdÒHےl/Q[bQ71%dk|scc 'EUs ȼA̡ј F'72iE"[Y>1j4H\|꒘'{N7SHD%}Ó؏hhSD.gI?M3$t4}#h ;OX%>/6Sī0 bN1ǢÑYqJo cL ZC"]F0 #A}֨5'CDN #.@y]F{*)ɴ$;|撌\=8~Ĕ5<8G.R$m-bEGcCPjCX'Ay]Ft@Y-1"Sp\ݨ2(+ + +I(@mr,q>\FxIR77Rx98NBGBk!wQU3&a7]8 2 suY|. ҺWjC PBіXTQVYL'wa}kE|[ڹ+A%a}L,ݤD #3H _3CAY43%`e/vSh<_|UNٴmyD+2;nScDw"<0ObCbcuEo?sd^!]dE,^0J6dz>%<Q&R[XLa0Q*|l>*GS*N`JDb_(սd NU0>$hQ=*ĥNQG˪HG)ghBK١ <`eC"/S @@G`"8f@DAZykPb LI +rmf6T4K>~f2\ȃJma>FUN܀<|o0rĆp&ePyeie. USAă\r|P,ʨ4 DAhJBv̮hKtepڸͳBv5 =^=HʸN>u6EScB+:ϺĻM"J],"v>Ocnu[:'[dI +Z"Vr{\,|$mv*9GZkh9պF63mL! +N?Mէ63 rTmU2mĥaʽQtD)#jtjP)JDeD^)Ѯ2̾U5Ȫ@W|X19] EG]p`4%[؉n0<}# /DR(,yT6.p{m'ҹ愔K{&I e{eJz~.crpZ*7 B/YR { @ p EhH!Y4j.@"Vn%*9jfW#7ANKƟF$ );AO @<+Kp.D)5 Y"2jA`2nTƪ#NEpLŮ%w-`T:?q=L6,zkmM6z[cMcF[@om}c|m͞6z} 5wykS c|#M5pڼykmMcؼYF[4ɅhwЧ,  U}<1zM񌳶4ŏe]JxzTJ!qb[V;$1h[cJuEl{ϡy +Qj݃#@@&OLQȻ|Ed!v]DW7fx..,#tBSGɒ x-yg1!f٥xwl?[9 PvS8At`FjV7zԓ#IГe e#r%̫esT> +݀FUpY+>. +SP(00JXhJVX2y^UqfA5q(/h< fK4?W¬iYh?Uؤ p;+ruUglҝG&OdkY1 C&&PoY fS"M.2Q;ibަƇͼtԨWXH2G9AUE;`Θz=Dmp̶Uܜeж4E}:٨R稱cyFK~ w@_ӹj,*mF/UN"#mUB#iYB@屚稸dU[styEj^i;ٯvhyԋ=ml +3-WFY6챚fYuzQY?4V EePOu[k7֪L8;!n'Dh%P5ݼD, +(v\ +jt9>9(ZInŕDS^, N_++{)KhǜD9F" `F6&f%k6@DKvK|heC2:x^\'zRȌDE-DKpGϢ/j>JDhɸ,hn$l_E;1%I2>\(.doNW/ɚe3ATǪapɅ(=ݮs/$0- +?qaXmuH 7`{Ǿ}=h6Ymf[NbBֻm:{5-Y]ht&>mu\# 0b12^Kr}&9?QBt d]G,#ErmcKL#o0'~0Qd݃YRƞ@(vG EDo.F3w4@}4K=JFK6xf lUf'"mMFK ZHj %é؁a}Tfl9 r^fnU7;e0CZZYHhpuS1 -@%lrS {q|"cCI)FHE2Ez(fuʈaIx@P]DYdS>ԬaD# ŊO"z/̱200b* vQ'Ey ?AP⩛Q0?r6'.Fxcp,tcSQBcGa[|Peuґh Y.p\%b[XL;ƒ[r.iM=zd5͎ +%OqZ.d^M`5!cha԰1(kAD#b +ZP5ȹz9mk|qs#HM!I5HFnɛYڑjo4 lU-G%6F*O a Du+* +>Da4dA<:A4Eб"ø7OfլXmֹV)qQSևײ88GrVAn,rbB~riBD fBzU&dQDiYLf:la +ϫq%uЫM8$c +a%8Z,r$)D V5s#EQ{Tӓ"704OUx=qj'|ѭj^PR(O-EHۿOG3<|T (j`) +P?**\%'R9S2]#SX `3_تЎtX>O,W by6/J؅h<"6^d:yc5ݭi(5>k-\1}ܢ.)0kȯ*v#KU[~}o5 P EV)߇ `x'[FQCt)J>&ĤV?kz-\| +.ƈ'iO +>\>,@1}( DB/T*bCL!ȇvZT߆ +FpGfj(&|@߅TM)fŲuǂS ]#dd,"'Ɨ&Nrp2'_MKZdj'uZ25$c8Ub6uDw7HƧ$a`U%\/ra^SĺJܗ^Pjiv496vHZeQǖ3h!%L$9QŖ"W=|i- +uR"I=$q2dd$ƻnWHz >LL BoJ} ҔՔ K@nUԡЇ;54-qB́G*'+WU7\݂nAἺߠ\BhL3E[d[|5}+n.e\j Щʹ ,J=z)ΥGdqV99sW)e>ҫd N"]&A{@5qFomHO**o[^bx0 !RE˘Ywheu#s& D,)_H? Yk.F +>!&gRRͮhv{!TëG{N)me]\TӅjGhܶS"S2]},$Kxk/,FQ譀"\1ݨb{7$;!*)CػQAGm$Mͥ\diO9Z[ftNJGm>G6:' +6 +hē\ @s[]hnk<T4Rکf ʲQqx3eJb|X]f4K i~9Xh6$B>dsBn_UtAޤU*C:OSFpu%J~c1QQz=FEolYQ`\;ZS;43P'*JOށŘ3w&%z0D)U٣^|.*ԢmkTx#n{荁K>_Hɕ},VzoT=zB~($Ue#^# .ԸnORp]Ӹ}!3< $2KR xX2{¾P)Et +6r)(-&.l`*%r ~!OpK 95<@CjOiR7 Q TlJLQҚhL{|[qʡJ +CX!Ϯ8ӫn>OK^Ư?{RvihI?YIyM4=O D ?Ka]-p 8O6^,뫓>-*i}~/掜}߸ZXO/l]m*IN 2beT4QX9;o'jl`#r:*ƅ@Ch>S:DjBWOa3 +& +\tS84 t)@Y*4J-G#K͌#eȧ(=[ڧŵA4 RBɤNYI~;w|Vǭ +34WJ&/[ZUU&#d09 +qtz Q$ *@MN~HnzWUw ^[!^˞m*ń#j85}Ul,F'ֽ"$Ag;2K3J ͖J/EֶX=9* We~!F6ħ6BdTrndlI0ivԓg_I!{TKX+m6ɼ$.ɰ c͠&J CCX|GdW&:`2I&%3 OAv 1ɔ^|2Eϙ&X +#Rҵ -'$I>ƞ$xMu핣y{ +Dc$Ny:W9xkm#%ЙÊm!c-R!<; N"g X]ĵ(d7#ME ;QU 1w-nh/\|t9* E?g\]R\`e\ͬ:6u*g+Xφ!P0yo Uk9]e܅YJYL{rE囈& R&X@];@~.6F "g]1:P:&<:6"E/<31Bsf ?8uмyf=9^|<#L\,p*(`/>|s$vk`Qӓ-=ʿh-n$q4;Xҁ"HV`>dNGhovpn0{1XEOӰD,O!0n;$SK%TmT +Bl6mz剋 ;͏]FL58PKfv%oc{ :e].b>''S6%I8Bʃa-Y#vژE4{˪,1Ѯav+IW8U`[M(LH?خ)NJK_^ ӆNDc +XF<`:7)m * +PwJ'2X*A*k`q?cVd8.ńi-8'ZP2+ͫvSS "L.f:+-'cPjj!_mQP[ 0)b8}!bRe sf'VdZEBs\dA] HfCJ0xqɝv5U:*EP5`FP}B.4 RQm + M{z9cr`zh.$8aMɞzW/jbk΍[ِ19z|JsTVcs)Zޱ&'t8d}~:U2Be6duEPDep R.$j" rã7j9uz *QT85]uG7AGr+!>3g4@8xisj$e[~8E>^"Pp@| *q>9bkI9T& 1#U[i}dQ g6`e3VLU ʱ~'" h TB&ς +Q3S* Y of爆5hp՗;0Ȯq~eA7v"R;ЦIou_^k$AwaYrXdmݣ`s2fQ2+qAAޔE};r +D/-yl1àtW#@wل듫L3%O +F^O#JP`4͜cz#dOp3$] +AXr˦,쳬9Q'qv5al}\IAuBeRқtAzx T:xWuJ Nr0*t;" +#Q:c4; |pw,*9kl}*gKϝ7T2 . +wS7;އ/Xהt;Ft U.t]= EFw۝bO8:Vĵ{WWҸyA~|k]'樏Kp)EJL)'d\JAbs(]KAMS _^R>nϿBČ -lH'ngae2& + XL > +@]4X){j5y->Z,R'J%+^uэDVHBp۴80t'2H^L:b/JW[-qM( +a#&]HЍ +v@B +#Gg26S(r +va +\.g73'c} 3jupK!+#6 gMB PANԈ"'/ Vik]:;c V4]B|_}'OpnWA[Xqc<7{:Y+`CsF4j}БyRY"$p }PzNU((᚝#tu@-d`M +D5tBր/tc +nJ%~xH @UT;eT`"u %f`F8@#;&R0nz +nT@돽|ap;swp +T:%Vvm~R g@|kbS&~p[Ëjj<{#VI!?f]ƭ1521LشO9^$P R\›ik2 +zŁC_fz A\*p#DLGUxqрT/h?}r4 #@:|=cc~$>3]7A{[ss1Ya?98TbǸk$L 5E#+f8#'ۛziɃcޙjA;NG$9Z'%vרF+1Jl4哲݃n^8 >z5w:yW,>g5:1/-.b9ˈ"HÊEI34Pyϛ&MO1/C i$E[<\e҃4a# .˶rL[A`oy^'@v1P6exq~?uX/Vgnj[%MnW K?6HdBa㷺teG4 B+f?{~]GHWšH娤́BnѿOYZ-1 ՁI&OX؄ZjzUw S6}O:aL"\)dʲҺ\Dܶ^?o(z`-N65:Yb8?\1]a%H4F2!:fݛwJ*צAGV 10jNHleuM):;Dfс8˸*#y t4=s|FY +O7G(@8ec:.C:v7Gs|?8\~4Y*qjN4鼪 fP !SYn xniZ/Rp\kf{=vh:Z|aa#GKq{ux#h>?FQ_7lhيF1vsyߏK:<vDc '3y11I1 aDj)7/p3B?/fa?{ @Hhe&¯ہ8tNJ2?qs*iEPrH߉BjR#*FFy)2Z o YkS +~jՅA:dfе|C + ::2BC/N-Jh8Ÿ.KPU"3nsІŭ>SL'ϝB}j x2߶:oy1"bݰƕ91b9_͝:tI YWhBΏKm͏J@UְGS691>AGc36[;rS^ü]ɥwLSt$ BWBygs6  / 7Dxn9Vɠy?y̜z/€i4kPGH.~iGC PBe4ԁܥJ r񫽗ӑ|P#1M*"RHAJ(b+@QHS5OCܜ сBP[IQySu`S?$9!IQCa/8+C<^D%o,G Gf_v ?jǚ>yv[p7ҿN-c/@Em±u%R8F1P7,*eH9QEѫrAw-cɆedP)F: 'S +}j0TW ؘ⨍"SA +UWKSpKuӍ' v r:#)0{bb@Uf8P;:^,?{^WaJ 0# sPa:O(Fq1#ݤH jO+ZjdujKDr r^NR9VO?{& + @a~'2bc &wiO†΋/mn4}5jN= x]"D'} +W{ڍ^qwب*Ve*@^28y̦.ZU%P^(KiJָ6UA6NSwȳ&lD4mW|%9wJ2.>Q\19:es/@gI%$7$Yje\bYZMƪ-qP/6|KK:,L4_ʔO&,4 m!Q\D9/H+.*$!XkA +[5LL=&tΫnL+S݀G|NOՓ9NC>'՝O.CT~c*O 4۪`9  ᴆ$.p~C$^mYw=lG7Nf)BxQ$}_ +s ]𱄶/Q(vrb"դI%(%E Hd=O0BBqF~c]} (KshӒl?թlNаT뼙UujoXޤaU"IT.WrRE"=B]>\(4KBM=OvgG*<ct-x I1UFAQPs';ܳ dڿ]Z&{e +MmQՇD@ =pK $cpdxުqÐbI ^CހraKھ7/d/4e9/K M $ JѶeF`HiVLiVk& uv=sևГU:}Gd@HX @Ӭ 6)蓃 +,*O:.c-˪k(+_̛ͯ(q` 6O^]Ne g^uQvO raQJb}+~p +̃\ ^AV; CM~KQGҍ̈́g'R⻋jTfEueW=,GeSIIJNCk?q|&,[uղN(,QoTP2m!QAz(hC0Q+pyH^RJ&ix$". kha*NHssl'7$zRcQZJwUUx,a :']sFE50-$ ˂j6l<إ7liX= +U!m+G9!f̼j$Aq9RwLMwF>&"hQgn 5YנHlR@iN'e\reav1#:X.U;XoTkb :%H$ྦྷ o![{_W +K} ?2#^LNM":9A5+bʺ'd!֪rOA1C#)*7,d>:&k]1)&HGBD; +;tC/Yo[C}Nf-~g{^GDXjQœˠ.'kH$ -i_`KG,x:p0e(MH eeIwkCe8J*<*]IB%4ufd +Q6 Œ_,Y$ˬcp$G( MxkOgK`m70 0N#WWݫ + AW̘}%cüaǥ))+Q$$`޹OEv(R0qL d+RэGRzyJqQP^EԻ#e`LKNJ$"NK?s +@ +LhBBw"w +K8;=<& LU,t/xʎ+j MPewGYmP,IzZYG62S^+s<1]bRSw;m LCtAnMc% Ēr_/JB&uATrvUG;)Ba_ +y_瘠O>7za0EdIR1BsNXaܜҾ{rn\_ +ۣ>*p ]?D*^;E*d@&̓QCKâCjcUFp g\YXu&ȺdBގG80 ̀ov.Dqv4HQ:"9h +Ua\n&OuY>"wLCr_Ol>W^rրlL֑-4?.DҼH@ >SX 9yZFR%4[ H9Ro&P0-\MrT24i ݠaQ0^Y2X/BC!G \:IV42"$Am\pa/f`ڧ |O"䚳.uHK8AXGDbbJAmR04φ/`)^7L;u|]sw-H%-Q7"<#ss\"| %m5>JL$e,4@='I&_H";@EСR% K0,t96(С섇0N'K,r\Tlb%VN[Mk蒦I$뱝pE+RSDfs*TI~QX,sc))4\src'ͳŧ +TqI$P@I+XNfi䩆s P0+n҂>:FjH-ut3HWj )`ЯqO&$AT.N03Bk5]ŴH:ۣVN/3/8zv>gȚm.QUg( 16[b0V4pwA.tJ@^T7m 9 A]x<ݻP>ȓm7žn. .G2(-VVLO\n-ɇ{\'H>#) +Wq3?K`*JRS!GRҊ-{f1p Dy!i|FS +#>t#GVRLP2>f[[.k%"_ΐ#8w'kXQJGjf8U+Jeb$bLˎ%_@)r.{Ծ;bń{ pMA9P6 pG=!d2ς+%8bCt*,:jǚ-m!ފi f'ؤb=M>s{sqMŬ`Gq4 Aʎa ޹z>L$;&or/刁H+dgza&#&T`9ƒ1)Y:ԐvJUِRt>o֠ EMȎo F" Hd/:$FsCX9fIM̛7Yf14>2{CBAYӴ+d$#pOVm{׳uB,X]BܥYiS % Jg@ W:EE2m߻x*R,aˮX-O~+3fnUUa;1ȉ*W9<SR*2cA "P@fipyT^2[&5s}wT#[g6BR!٤ڔpslo,oC04w'{|O\aPx%O"*q8@kȃgnol7`!d$;igpj[DF@d{ '̭S͜VD2Ë.&G[,q!CIi;;pa>_O >Alphz%ԣ߬D<%JPH,gHY V >EY< +39 N7W +vFpRxnj #y֥XF"";^ҫ)<ƧûI3r,)IУLCd" ;A dЛ9*^jqq]ijyyۨ"~(%0ֈ0I!ǘ3W3IA"XkZE->GP`YK@0+m5-\3d*n+?VJ\Js+4!yC E#"#D1[B3-Qމo?'(\꿵qc%j2[P.&}^j]ij86܌PŶH͏C<ޫd]УFz]ZC lz'xx0t[ BcAKl 4sD+ଡ଼qVnXeUF% 5(u! xb¹ : gRrJŘaQ HD8Ksx<1 +kgեHz.aWWcƃ f_HW̮:O*BPF@2_&[z=3'@vEgQH?$&[xw{"glQOM㔍Ivq)o)ڙ+a,Ni[pA7c'%$m< zeh"lܣx:b/B<4%N8W f]B!n) #K&iUڃ5ŭ;nYyQ6e-]&WS|v݁SfT﹛wE*r]x^#1X_d|.Y6 xj0>lY! TC&)B;m9XсH?QZ hgPI^Mt`0׫r;2nj?l5(&LRAȜvY !pPҦ6= fi _/̪ВB 4eOf P ȰddK+ .r6o aDrrJ(I1g{VPj@ӱx_ْtf< 9TA@؞DDE>`ERo/c9Бl03*V j!:Ϻ.31M˚wT: 5Se`Qn Wڎc9GφjE81Ka3Ot6@!s[S!S(QѳL6p8vsHqDzYp AxH g(VsYLÙ<*S n-!ZY/K[rHx>pPסؖ"V,ː)XH9}3 19qCs8>u#>D3ҥY]ϟ]Q9H(kڏہ@L mb\Z6–ApC=[=)|'x Rh 0s F6`$M1< PHZ<@Xn: ϔ$xFU@E'w D3 ҥmYkh\z6*%v0ƥẘ(z~gbjm5Si͎`0d^72pyOXbE;=7@3rdڥhƦŴ(hm=fO809@iYkS mzrD5' 1a^];x_??'G''~s~pruq'?ʏ8ϫ>.ϯ__=ξ| 곳ۯ>''(xO>9ާO}r..ngn/n7WgWg/N>ƻ胓:t6~5-s}lw: ,Rjֱ.u7~4kejN{w=x< 7;ߙyr?mq}r-ɯ/ᬊBmDn2.0_巌4ΡoIG!|tLwƯ |v +ow 5\X3-R-Iƫ{OȪl/?xG|uWͰy+D.>67盋GO/_bmm=:G?}۳//}r\O~_|znzD\~~qE <'*G)?N体AN'G>0zOG['2//-& +_f>Q2rY;2.ٷo_&x`ηGd;Nի}sqgkO<ӓw?O/^~u{sv{}˗aCs܅_߾΍og'䓋o.^D~gп6/ }/o+ERZB>{v ~ؗz'/_Nc7\㫯#n^|?g_>vio~o.n^9Lx󯾾}uܟ_}mxqOOr >/^F?6 ;yϰo< o}OУ~OɮJ +sy\ (OLRt%Q7DA9 䦜ZG_WyUvcrmHZ]W),A$Y}G_WA}]GWUytU]C{*ˣ<*ʣ誼cZWyU}G_WY}G_WyU}}Ǵʣ<*ʣ]]GWUytU]Ǵ;ӟΏݏTȕ&tbm')eAm\~<OwO W\'4m['[O{<޹CXx-cğpWbJ Ϸm_ת͡%/|.z?5nf6['56V8xn=mȹzjǬ<@y0쫋O7Kb\`;첯a1Oodzء_<|bءFT]:߇8X+ؾ=x̿7BKGo ͍;=U'FϾѶONLMs*A^#y|۷yo3omm7^{oyc :f׵ԄsKEL%9V~f*h94IϢ:dK-hho.^ #N]4&3nZet}o..Dmv5-˃YťyՓ?EOZ"7\-mqR`ӱ?uIp>y:`i\S=87t쫋37לʛr-R8]S|389j֓̚l/~o~=?dkcUj]z{s S_ۄ^W:o_zgn*Za1;̎k񹭕yՌS; +X7c[rq Vmeަgϗo~MŶW瓃)Y[xzO3n-[zksn< wXh~7N6d.meiǁ9kS~׃~Vkovܽl(.,Krs{"lXE\u;٩Z䔶8Mmrqe泗)M8iabr?z\ wW:%{jE,Qlר壍u$Ixmڶߓ9aa't?Mjq26X;`&h~2A:|OsZ{^D aq=6nvL!ǒ3qh恣nWmm04u/i&W[;XUQO +ˀ{Xx7hr}0E'Q9vy|烧EԲ|e~x`hkmi%ofAkVo4T9ǡU*n1>fe4=?L&H=|MnNv/|/}XJjK=j,v^SrJZ>e.Ҵ.Gu a$΄#>y3,NL;l]+Ð37U-Γiഭ:_q=OX Ƶ0M ~FyOkKci7+G=б܆&+౭Ͳ :eR:)EVVm<2vގ_tIܶ[ˬj5 ۫gIr zZ'_f4.ϻc g/޼!9fCnݾξxGzۿ vߟ{ut[^ߧC~Jmׯ_"W8зsy[ӑ<,y\ćP [l.G[$mkof>.ΎyQPnNɎO<$e]ʧz4ŶWw,b=ā3Ju{<{*8~Pc{`9O_B$'omkϷ|mz\´|~L51~zv{wVـbؓۓ={~{>Bu~qqӳ^}uq/ z“o^}ftngn|j=99r%Bc[?ޜ]zyvsqun?;y5~2폽<{yqrkGBFOnTAG~v۝R}׷/_ߞ|~jsū~_\;xQϷO^`oﯮ +]g__yhunmUz :$cmws0il{wF}8\Χ. fg3O>~}{=& 6s ūl72_?o_okg:͛=޶_g7Y*N뫋wob?w﹬: +? ssNΜ߳mp6f; +-ZV,CRGdM_r߼X$=c]OP@y:C1{zOi4 U=C2=p7_rgoEa5y<^׿lD6y$ʵKF<rE=m3l3K׳aWNm!ﰚ&x[o  cD,t>?@uMoXoH?+iR`gdm r体bݹ_+ôNDv^٬K"9ZOWaΩ`5-a8LZ_׬.o[5ݼ{jVo+0P|ړ8`|_4wпV-Ƙ-k0R4YL$o= Ǒۜpw"d 'Bi,-6U 5ρ[3?=g)OQ}WI +Sm,˔v;dD \)ء=ĐUM%šDS”AdӀA=۟hx9!HKxE"nDDOM5Y?*zOe}_˯<.wa0.s b Lw { kh9aVNC0nhꦪOs>dj6l⢓@I1n'g݋kHlx?МDS:PlxO_4f7(tGm! E+Ӄ\zopd uѩ.%mR%>'!t8 YV,/|v;D6m)a6r6SU~Bl4YCIE P㴱d.l2ϋRKn(,w%|Kn~^Z_Gm{@C3{-9Z?z--zؑb ۝ |/Br ,Wό(M.)n6_@2'n|J޾$eoVüsWEڰO3ӁqD]_}?EëAtdvzw,ltmV o Oa+{p/t{6=, *[ҢWo,n\R2,F>nB/%dz8%'x!/i͘/"hi f&@_:cKoF,}t3#^5ajʤ/du@yvVDFo8j~<΄QfG; A[0𾄗BO^ 1? ouĀ+3E@uyY==|nn sl}wx:qbsUA[ĆH;cb9._@sӂJW\k˨aY#ԭOˮMǼd}zvSg|Ѫ! uz!6݉E|9;ܻMޟpfۊ}-w,mdkm6_y7>\r`E2j=zMrlaٱX>N)9<Zr.=6B䪾Q'KW,:}$ o6Q= ߄bh AL1/AIJ+=G슫.ĩ +dA1@Dvċ;R t +f n$p#D"{t*HBuV"xAR豇G5ޢHnBXEJ*W)Ok;UG9:Hg\,Vݍ 29 ȤQ"Q,f:FeI!;_$r6O`Kɑ&Wcy*P5V]-r-_nw2Te䞂Z#=yH=#I_Ur|uV륊,W彼~mm Ӌna';yO:ScɓXK! +#RhדXuV{ۣpzg7(#BtX!ST\knRP~FE[U+ޣ\1.ϊSز?2TW*TʋRm/Ar?J'R)Ie){ԨNep *&_R37JʱOU.ٕr3ܔk҄"[PV!oj-գ*,*\Wh>ԢU):nNgBz\Lw*d6vԛYYeR*[WJ֮*jd*jdUf AUƖC;=4nFmS4jR;}_ԱN3u +kQ=ꅫWrSƴ4.˧ xoi@*jjJ덧Ylcʵ|5%; iÃIܗ-bHl:'Oz9ef)tޤZD(miC-JGڟt;sF{2BV̪1e5% Yy\_sϹ m1ܪkH#ZZ}mHA^ +QW[h:.ÊrMx–` +`KELHpD.YYF9|oƖr.p̫ +M1I|B>7>w'y~1Gh~TMW+}ȥCٌlM-, /0ۻp7^752G|NI2 >oآtC[Vݱ1RaSk |:A =?hըؓ}hg7觉C UX0W87]Q1Ok}b[}^+G 3mѰxn\UuFp]F?O;}072f>\lnݴ[fu<k5Fa&f=رVR A2411G+GU7 y LΠ.)gN'rb!(`x7 m1K5 '"UegRrfAķɴrtxFuWJ'1t|6X?:BA\2:X n G8\YqɵONzeA,Śl&'$c[WIv]s@j 2b^ĒtPDƲ5Ͻ嶛Ok(xz?}9,[i=*ܧځ\B냙Ntr9]:Q3P\Z4j1[24$zrC\71ݜTwَIs@<c+S@Jgf=0i~>ܪb"+b@`I9'aJ4yHj b vK]v)[gphlRòٔ$Z)R'4XLpt{k#ҘݥTb Q +Q&lZ nMs)X'ՠْ ~A'^$9[> o?o1T4`N#V ;l26?`T1ب@!@|lGC7D؛%`,N +&9`)HO $]CJ|(!V$ç04)ưs$KA*Ac,DVDpAr&|M AbTWspYsc9)!L|zDJ%y1$l{f.ϒc$V)rc|DT a3M|lavoSRj^u֬h%CJDyCxLJ'HJjFD 9}H$X!1!$D+.U[$>j@)2 쑣0FvG֍ M!Sb&2ך.ⷔ}JUQ0'VKb |*Xpͬ^dc.50]Ao}~{@zACN  L,jtb"&vcA)E +` G<Ŝ9(w3fz|4 yd۽xf%y.2qw2@NM;K+TkPm֕u +- +a.F#^bjn9dȱr#&[B`%1M 5+GC1ztb}?ےЦ_Ԅe80Jm1-<i3|rNԔ~~J,=+_Fm4oݎwisg5gRfY1|guC!;+SgnWK p5`Lk`rGaRpcI:M([ +5{ í²`6.:o =w^ڽqԡVf$/Wxltq+rѶX6?a@NOa@E/@|.ߤ6| T +^F<؛q.bjWϐLv<^bR6SHˬ&5% b.d(_u'N$ #:< +G{uTQ@% omi- +:=Q=ۚ tu>=yC?o[᎟D׿M(*6n6RH{d0˸_r }yRj(iٲ?4ߤ+$ދ/s#e|YE*maq">=^٩6Va:AjD)e[1R50_sK:4/o5EOҤS &9k󽹩D.,tiic֋a=^kꥐ!^"Wn޴2_;[I?ny\,tyi2n"LƁ} D?bq8%șh&o(0&1̵`sT^5 (̊URy(^^<_mE 2\\40<"+c|? M$,?`![5 n9^ D?= aɪ*) . +v/LjKQh0$=aA__=?dǷu}6m2xksn}͠ R.%8.pK|zD☠5FUq1>'PťP5;knR\s>iIWvW1-%Zwc䒏d5q +&$t@n/WpO~TdԈdU z31d/YcsևŬ?xP~|"5-MJQ z< +SAhVWd(m"<-L"| Cb5z'ז%:m(dyWǵqE%5e +Nc!z/!O*4ᇎ8:kx0ZuQbWq\B%dJmK 1-fGݳ6lFe" ܷ\%1pb^|$|Mnmw{e~IKLHBNrH]lG@4=#-Y{ͧPEm:WGےr|qe#H] + + m,PYI$f5^ɔ&H^['TH/N9mNB9VB:$ ?Ҥ%km 0)mCpțmpNsT^Um\3@R73| u}/؁e5}E~΅ +/Ҫ_mDW>P;FKu"ϦnGEPHhB c&-14gGx$Gz 6IUݢbHpPy43\ic_F~9H0m VvݒX2H=@w܆!tbr6uHb4g|Q@#X Zt3 +RHa=gHoՒHcmk- K=,Y*XI.iؚ5r(ƮųRB<Ǔ=Dj =Ý34MW6q+?G5e?CSjp >mӕ%5Bo'D/x́Jgtx~PyԦLM餸O9s)$%'JB}rޭwta(Vd{礔&:1#O{/x:p+\KǔM+O(ࡁ۠KeSi)NjNmJtOd.4c ˭"6'ԙ$}?穾VqS*SQR;.C-3u7NɟO;.KٮJacO 1pĀUO)#(|J.r:X2R9,{,6Xg~lD\-| +:8$Oo#%nrJ%<.sH RD~{9VD +UT錉Q>22T*HB1䖅T&W&Z6A/HXXc "8B_ $4DJ4RYhMQc{GO|&BvR n=s t)NDsE%-b`̽(%-Hԩ04!vxZ3BGͬF9P&~ 57!z&\lE>@?B@w .x$qO@̫9>T33+>cK9Q&LŔ@% +7ݝ:NcJ*67aKƉȐMש7KÛn`S-8'HH-}EtPu2T /_԰xefpo_A^BP}wb$Ӫb)ƖhoȎÌ':fA!WAbB1 lUy ˿*c=G܏}2ѴiURmɥNP/a#xi +]]uLo ,N)b\vh$e x#$q@SV%{}ǂ{-dw KǪL6_h`$ky_=.H8K4Wn!ye(5RB:Q +%ybO7`e> !Yo1,@2TP{17_@9$$a^Ñf B!0G (5g52njRuֆW'=IcՔb.a-^< }nN|]-gֳp>PZcAyxpjCq]HHcm҅C>1HPât%} CEIJݷVX5'}YgE(&ܽ$"4IZ"q@φY$r%8KAy)n2>,ɯlLx5&Ӷ##@=fWoDHgLMb$0=|Kr)(L.]Z=gA-wY"}yb0o.AGyS=gq2~C'AG/ "%2̡Ϡcf ~C'ADz>ȡϠce ~C'A'-Vx-:vTn9tO A,>̡Ϡ#9t"f%5gm v%H/NZ}Z<.Mc5:sE:^#%l3o +ĀÒ6>"*sN8"$"r%%98$ɚ(}[GAcA >W +$&OB$8c2Mhۆ%JvO72!z嚾rClV4`#X*Qo9Īr{'v#ooyY}{@,=:{8x0 x{h;VIAK0 a)q^]n7y`Sӄ`M3J!ԩ_</,Ɏ`Ο<>UN3OC`_=;B){ endstream endobj 1390 0 obj <>stream +)~HiIK_QZm#Ss'K-}vKϗ52m`F;%{x|/eJ'{F ^]qT_ZMeS^d!i&=XE-Ob@WȞ+;wy:ʯH{W^$=Iጨ+6=.h qONǕf7#2.Al?D^̯R$~:uRXww8_2% "2ߋϤ$"3Gd7~y<"8Wy"2qi90E,ל,,aaYX>7.XzfItt7[U^ۇfOyu1RGE2_AT1V/ö=Wz&Qsޅ&}'bƙ:|M>)pY'vB{p0j +s#`X憎ǽẅv>[ڱpwp\_Lh[f}YDP3B33ec $>F'^;D8RH \O'Dh0N8 |ޙQFij{:>",C:Ut`1e>J]"8{"/}6`{G J"$f^X6-cW ֨5<ǰ`N/GFU"1 ],Ա|(!TbFX>2x;72Ra72R!i@s\bs}4CFZ#5exrRطS'z +;/_O}ܒM1q T8DD2$'?H㏏Ib!S0OK|Z Ur#<^ERzAPNIE dLf7mK\`d%n D9ʣNn*{tG=V3(q*x)fUFx-TJDW5x-neñN\LIlkGFX-$mlGΜ-ܽ~rXX‘[ߓcaT>ʍױ:Ic_LS:Az߬$WvN %JroEẘ#/!;!8>꾽*6"u|k H&:#>'}$…J-@'M60fZ˦x?̇t|1Azʦ←_dR#MǷOЕ_˦˥cɦ˥xl:Xf~5ϸaN6_.md=wi~Mgogi鸠^U6ml:$'l_Ql:eV;l:/dl;#_ʦ{_MG V6ݫLd 4 [W7 Wz.Rɭ!1b8ղ}.].Ϸ [tիߢM%IHҰgNq$UH.ޘU @*,%FN"eD&bl(l\8]=(l.{T+tq;=WMߔ.$̝P&D:I$'Ӆ>d(ʌhJREHQ6xܑo/ +I<:fpϕ=\O<׎(\0V* +u/OFgD:)'e3 qH/XDq RG0difO/M3PB=]P^| +zy~Li(Lډn̈́J[!uK#+ɮ7V )7Su.;LRbP nbmuX[V@+ "̘K5>R*vZ 3OPzBgW2S7oI<ݾU<( oK^ +U k$So-$OUPΪ%$2ë,ÆDI++jbm*܉e>~ FUJD*}O2pK +wb^a+If?}X;f*9{mȤ:dm%r3Ə`~a7{8OwD@8_[b/rd 当Η }!!u&1IJ:8z -&%=Sc^Ә|IncU4RV$&up5WJb:Ak'h ÐV׸IV1迼[r)jo_qm ^M 3UEV8 |lIór\ + %]pSDNOLK:lZ#&pic e]n]Grj-R + ̭9JssJH] +QFt 7AT&ˊR'J"6&uu U'* ٳ/cWZF:_TDpMj*Vq8CI<6 A*fNJv2B"|JK;:,D@Ee$Lh=UfA˺]d7-$~Yfߢ ?8|W^]q"|}hguU@X_8WvכLZĎuqCvD=>t`9o~R[›%=ճ,x&[9V޷9`t2KŎ_Z.1SՌUf*F)!yϕH*૒/y%_'HfN]c*3o{UEGͮ*Qe.Aw.?z:oQ, {J)2 Pj{H$N(59N@ gJ$ֿd=^@Hgw>|_e +t)_IyT^퓢|=E'E^UdNQ>2gD@( +^>(p}oT|̨KS^ Wn7XOPc wC {oWPYvU?֝]oXu^~xU'XO5##~t/ru]?qz|:q((''GuPxϻOuD^[Oܒ7|o<$.K9I| _of"}]?6owHCϣ$1{cC{J#coMT]c}I{B]?~oU.GZ>}2<GuC̪~B[OxGA'NLuxQO^8u~dS'~Ӄ~Y]?qÐ^?'KiɭdIɊ]O"m7YI'Ɇs&7pDk']?q XRyCGF+O|pZ7*rt%U8+~𞓶;ѻ7OƦ(T4hXŹA]`BZ nAi16ņBPd{tϫ NciձS̒Z-Bz|EPǟR勧iweirL=yeʚΈ3tbM=<]Um^pBٌj؏Vn|@0k#{Wij]tn:]ǡ7Qk kb"myjvuĂDz1 f,Ir>L_tKL˚>h66MW~O{1/,6U2_|$'yYQ]4 UŮDJd4jyIBٌlM!3I'"7Q{"'jQg_:M#g⌤9:k|tMb|" +hJU5/3?UlJf>dۇFM 835V Sۃ?QEwl9'\ښ`ҮtC,F^IIlvOV ;qW'}_B(@u"nMYT:m5W{GZuxg>2|4px`vAA/ND7 T Jkʈ+ u$ ꍯf 8&=(ƇF)J9ш8V,ebfkkB6Ҥ6K꧐& +={v_blQxƣ`# 'LׅmW|d!-4DW=G*(rgZt߄:kuuݬM;mQӌ c$g >5oaCK~F yixr8ֱi@~G.9 >Ŷ]daK0_r.165 %F|Wݧt'>.UML c)l1pA'D;=F[sh(BN,=_,|`4-6>˗񼩫Ga`Y./cz,= "Uwz]+oucJ?ukz3bvDN>B-@Ko \3wLZ`75N ղP`u[Am{ Glє 5N!aQ>QymM4b3n((l^}2zDaڧ:f|azkc{L"Hf钪 ]Sb9yӈHÇqX6J,.o{1(@cQC|~.Dg*4Owr@d^09Sw.0CZaqj|Ћ<23BgW#k G0 .A] d@@\a\PsܭUI \I/̺ %wo\5dsE\5l\z%MTtg{ b tLvcaloN{AƔ 2ԊVZRVC!i:TOnym#[t}6{] &mu"GKd]% +W~WAէ L_Пp@?Jݳ}=Nd]Wo\Ejp + _NTIhm*qO~7@rj$ReBùqH;{mwwxeD#-R^1Rt0i`G!A n|a2 +$M N,OxW}NP~2 "L+@J"0>3/Ї#c3G ֭dژQmlw[>2?D)s +*Ѩ Nл;c\Dqxb~D9̯HQn-{kNg~̯`F^׺1kr`bxc96̯\5\2W%P(+z2;2U̎L]Nmkc?1Qp߰)_sY%I߽ }5FUIeI0`ĊR=H`+'y|#(d$B\9';FC1@ޔyu؆3oҮ.j FޑX N6%1"մ֒Px:2jjux AOL"a1'B Fr^d69 \v$7YTv*[l +ão>2ؐp?AD|ȢG *:-PiGi13=s-Ƿ8$yϰH.}SEHs>mCBM'|pP&Uv!37gB -=~t @mسNN h(td(J& TEc|~OV&X=#y'7`RM$W +w 0Lb7M`kRğm- +-H~{,8c +gflk*ƭn8:KB64؆SJUf._4w8Y@`q:~Xa+{"vtf3hyup`6 uG_˖m&3S]Ϯ>f^؝") C`W + &c ՗y7|RaCzkܩZF0g|Nz14P2eQ0ȵ2 Op]5rRzBp3;w<~'?Zy\{~yZ͓*N} v}|xéo~;^+Gpu.|zpm wK5n\7jOowy O*c;'^ډkϟMA@r?gNhx +/^*|ߟݿt!o} _K?\L+{?p6~f;7|xr^_tÝ­8pҲW>`,@lKߺBۯݹ%N!'2OOy04tWXݯ? nf;g߿{`;/ pm^p\(W>v Lwi~~cg}r+ԥKUv蒙' BO;! +=k دt! +؛wCOs!ߛ{M̂M oμ[߽w7wݽy[>/;[wn^ۜ9+W{7omՑ?+%W13bc.|_}jf0TNyI, nx-^% +.f!7ɗ_p/տ_˕>q;_\§={9&_ ԿW׃ӺovЭd NׄO}Aڙ^=[}WUWO/>o;_~^nI|ް0"{'S>S~没K___;ɸ|lelfއc_YIkJ[|e?׻S,krfm_û~Q?VW!_P =G?]{A~}יV+c +HaƘ&]Y=s{W&'{S{ԋ?C~>Tf™oᵯt7+:9Nv'4]]GOO|anϗzև ~8 ? etsšH+ +^_Mc~q׾eWϿ/W7=Ë׿?ޅ.w'ᇡHߍ_O/}䪃o40?hg}vQ>ktCW}{u)g.\/ɗ&0p'N_T_ +yo|G_ї.*i,JŃGT>-7?W0aXq 9FɃgsΝ~Z8ؘ7=D}W/+K'['}RWNw>ǕgqM៯a5^]wt? _x_>>pgĽC7Qd + ٻ)ܩ;і\>wx8S`z*gKmC|헏}ezk/Pg;7^}ލE?usyZ-rv;첸\gyeܭ3[l|aƻ6L }`u+|u_y'm׼O[Yƻ-.uVe ;տ%˯'zlp*ם~-GSqٶ+q| la;bbz!笙rH$|`}ƌ{w1'sKm!@·~\;i!ѽ'x->y!uy&*Z>!u-x _IvSlOFaEpfn͇펈 #_rV_uO}ωO~.?Xp?+*O&X.PTa0#)PTa(x0wUx_]=$d=q~/.\g@>>db],d]$pēBO_K BY'/%م2cO_x(ev +?~@Y:_^dE7~/Y%>Y_/v[GʣhVK᥷^毒~}pզk0 2oURF-p #Ss!MNŸO=~_דwOm^{?qeߗ7W}W׿ǪyS>Zxܣcĝ~Ƴh|ބr_~u}\]h%Ni?|K둫|X9jޜNqߞXC|Í>\*Jͮ|\ĉWB,~{76"|KE_. c9=T)pϜ  s܍ ^wܾ 1q'Y~$:(sii>CҀ(N^$r_ < U|yر()D_{cϧܼi/W=7OCx<^ǔZ]aT/yy u>mx/?]W*l`b`0!yݜm!'[sH6g[BPn|wݛW~341O=141O?1{GzlNl`,}"#$H6{(6?d~ꝏ$0G +zS5}D E؜:QG?ٛeNPrS(dH : >VQ CU^pkA;./Vcur-QS=eS i{/3 ,G蘄}(b95B B^;5Jrk(nf";Rg~eO{[MGjI-V#Û;hLxQs(G|  \RqMՒ=bkL[Ӱ"/EƜJS6Fq/ #_3?c- NR#D0b@V}(ZluǸCU'+)Te$;jEvȓfu۸0UhbF]K+=4sY${ǒU +4ǎ' ѱ~1 CtlIR@T9oǐyx*zOᚨJߔ*Mt@ ė@po7!dIqdBu X!KȊ@J6;]k_<-<<bFl^vg)VҨKfQec $O[Rӹ˭-GaaBr|ɘ(6"0r0Ɉ$@<GKJ#*=Vyuq.󋝗[7wXsO嫦2O]#D@ohk ]k1yV"~:piѩ m[,(2Xl/KÙ\-Ў>W/j0ٺD"1Tf֟Hdk`:*7…cbc΋U0AXݦ@yMN"Q4W +h@jUU jkD)bHH^y01,_ٌT" $suNeZ!rB1F 꺋fyv3'uʍ“jCû3qY5x. P9^әYl9"pCqHP2ȂX%cYP1TZPMmZq_BŽs6bdh>hx"I~Z 5AL-WG*Ngt5c,h SK4V):}qc\N7fs8 . 9iS08k9(-JV3~ ˋez O](rc MW'AD@8"qm3ME$&əB"p$Gpրr*l5NΆcr@+PL R aJjnw?E'w,D̉޴ٚed+cՋ$ xZX"?SN HpZ- NU@'8>4ӧ+.3!*IN*\ׇ_az1g`-Х'B|yrN"abwXŴv_rUTCZ8);$\(tLr۔]&@oتSނsܰy\~B[/j0}J灟.SQ`@)*EUt$١[ɯbOoЫ3LYEP]gG =kNB_BغٚedWcՋ$'2ڃ @]&=]@5*vX@\ +slb|+L.Ūao5$J4Wvi&GlzQEԵa`jE+$߄P3̕K] *w^[iآfNXS(G`$TxxQ&}R|2FkDU@ x:;SzjTi?b@g*Js^)VӋ*@i2WnzyZkmpY1fI7f4Kփl:9 *q<hf;ed#b$LC.f8-)]VM˩|3!e{ ihrLpZbrPF1 J:؝CO 3Z~)}H4x0ࠨSJVhˁ@hXI\,yw ;4,0% Isi#6ߘ?)Ïi*cJ?XNJV6UI濩)09/tC0Y;t{"fBG!9GT4e !\OGXp|yN(N|&,.kF{NY%r@ob}@FMZ3AJ`D\Eu6M +RFB@N'$(0H NQ=Srp04Keb P%G$jIиjmcիF݃ n,wFP%MŹYD(`Q^ +=H5RrR?pQ#fDxB"F֐# H1/(PVj 6F3 1>lkIV1Gq4`4=^ zmw"<_ԹU1GZ<ZS\21Ψ>s2jAP$HJzepq($eT䴬ivgrKbt!o1f.sɰֻ&k'c$M`} UʇcpV +ALR&,NGW>{cJ)oNb!c$cl$JaɔI:-]!G EO_$ z 2aI& M" +ebA'R U' BCKHИ]8;p޺)) ~3t4c^'Jl'y5T@V NKJ6ծ(`u|A9R&m gF QL"!DYr fg9$Gbź1&kW +$Q]o~1"Lp=~ +sQf|(t(G@q $H>(h~<~-ܑ&!F7sfT0^aO,XVGYw?(TLA`m]05XNw`(yI#3X;#3'Nc =YAF&LuY=һMF5\D2IܡOȧr* zEFހd8aj#{NHĤyyԜaԊ4(s?C |s5\}vY?1;JԷ'UZr VV'ӣ#Ǣ_!ZD$G@VOp^9]8d;HBkW͑b'4/)+4&b\*-yT+bBTz|37fHk iI5u騑=[VJbE^<0٫$K{ƶChPE!c9Wrm +2Ŵ6 +'P#t 5"GH2 Fb2#J [<U#]yމ +mN̓ғ"R3Fne6ǵMfID'.T60wI +?x>^YVRꘀ0.~1FkQw(;~Nέ֓ 0jvd0P?Xux֨?u0RY??B-YrUM$XC]jbվѥ̕Y]o}x7ܾ;~}V%ςNV iߞݑ7Uyv:kӝ該;"8dh ]ݤ-CjTj~CV{(\:[?ɳ7}qk/l.~孃P Ld5ׯ}݃[/xx ߹^{S9s޽;oܺ}٬g޿uCxw~[??#io|`+wo?}w|>{_~g7t\ٜ7s} 9twxpܺy/~=Xa˵wT6Uog杽x?'ݓ@I`2lOV7r)Ðw涌Ͳfڠ:G̋yf815J;GB\93; nj2F\ U_ޱf +i9(d!g1*2$9a7׍=.Yk^EOqIW'  #8}bE&2Ɋ<1bg\]eO.?bu}5݄ꪎ/-߇*}7A%9M)}:QYF~_-`N'*;_1(hx(5bx/'~G;bPj$^R%B ?xz[j:2x/TΛNԼ~Wqed9<ռ|;9ЉhY?bi:ra2aHA]e_ (]ZBSvYͭ} 8gCfh%11w=eҶjx]333άQ5HhCa+lOjqD!&Y^+2GuaHs|g2_N 9` +`6_ۋݍSN#vbeHŨqI|&0R!r3YCg&*QT<Iwj,eJN/(qSaQ#1Ɋ8jnYCo9zNB>.dkd: Ol +W<} MM!@ʎRn1㣌9Q(1> +02=Z7'@Q̊%h9l|B~鏚VEimABRC082PCUs +drYEڇy8T{MO9Mh%¤eU f >?*/[2akzb%JxUa9~UT'YdP&'aY &= ݝVLiSRz=u빪BUR,dRL&U V.a(9do ˆ94|EJĈbMg(DHT%b tRsZV_d]B}q>c)K|廬|lCYiˢ316S)APί jMK}\eTF`7.j}t,b= K2SJ~jr +5tzcSY+sl΁s+.ܗ1SW e,]mG*vᝁ-_-CB73d2Ӝ@&51rQ W Ʉy7٭N [8hU +(G|aaS,jiI֍ȇ'xgyG9 9znEVqV$mF!fш4pQwB]ꙢkVToM.'# Yb ds]SmXW՛РU6HR;iNeAAU6t.}R #8[kf=s`3|\-7]qsb:❶~JzMuJ{=mK,JOh\ZESWg̃l\?Kb]Vuמ 8^1[314CH3+##Z*Y` FԿb &PN08 2RNN~j|7=Y6*;j-lTDK[r0ꀭ0 U47JMfaz +UBR2PmCP MIX/܄Y^~egU(ZKSMa%޼*[SPXmt/%K9GV$K/Jם1^g?l}(ϩl{0y0~Mipe#}#mIѷ7;o+#OUcr܊ )ї$8}Wp^kr(q(t?sw3!VE7YO?>} ;0y^V=&w9V>lV9c$I_X4?j4/"yLW\!LmQP4m4:5`Y#Z|n+%jrV5[T[qbdiAG ΄`͇E~^Gڵ^s ήOah.ha4 /#N?0Rqb 2/ Ҍ)l}|B`-摅HM*Q gAjq6S{3 dCfun]WMhmCnedyT#qk14E{J)nŒ6 WZ, 80T̔,1^>1d™Xׁ1ЃYtyi9\ S(Ez!eeL{ `RtY.ZewgqwމWwo^ӭOn}u.Gh܈""r1x޿q[dGI߹zʅkq\+ؒg{]G#ߖoPl&=kexG+q~ ruۃgggG΄)OI6Hz ɓj%+J$-a U":CG먅]/Zk xK;d 4f}8ZLJ5AKN3hjCH@Om6~(Et֓AiG1Wre:=Sg7 nR<]lh(*8T|d5ev.X)NuqM[&h'bҦ0,OØev:HT,jj:/27"Y73R }Ri lr>Ffw'dk +R14$gҔIk QSt:b^č^ uSLS@=?d|׮RW]OѢ+!BJ({if;c]4pR؂OvoxAS0Ed!Y:XHwaR$S$Ï3-Cả\* 3"e[4#2UB fJ@.KXt)v"CA;c.T]zEYj4ieԠeʈ+}|d Nj[ĐrP ޞ;/ KJmbԌ~k2I%c \򄯖'1!UyUr1Tbs1Q ߨnU. RD_i}F1[:fdF)bn,+Krk/)Kv4#&"[!Tx"r')ƇO)i>W6fmdu%3 +U-MpiCLXs4ӲǢF}ВXvh;gGC`#;/ %E  ȿŸ!Lڵ=iRDOc w!`؊kxV6.&, + bd<`c. Lѻ+K:*|,҃2³e 6.wm 5(={QV @#賴t6:K~մjNn}r2 whFY$He:"Q3ؚڱmNCޣ9/!1 C…x@{vjY1/ݭ.zK[:f)Sri<]衻Ta'٨3VdVU- tܵ(d2}8-D8c mdQhH8f7MоR3 w囊ژKqJ.660ϠU6$faǪ 2aFcfRr+6SM-_|mX~1%IA_c @FJViZg4-% ]U.yMbN*qE!TKTlƖϚ$yF e*-|xovMi1k GLOw8bS` +/#&(cR&D0fDHc}}Y{M^`?,r1[:gmY촼q0dVH52$5*:y-wePCf tJh_ RjWϧ*k#DYq e4\WDdv Bx͘V'zCXKӁ t*sW]x:GK[T5k+&YZ]q+SؑYUY!7U3Tu7Sd2tߐYB3hkFZ +.3[jtD4S +tl9HS>=wAYt6$y, ]-j6Y2p!Am g|]ϘC>UvbFш^3,ͧ*A֏Immm-X>FlZ)h4ĬSg|h)C\ck@ڰ,BNSYgH +owfʬXhV?"DZKM)r٥hr6*=A@5յ'n@w +Nu@@KǜcL y̑dS&3jmWx(("s C Kws MJg*ZLpt>eM*H 50%VFY˄xU-PLvF3ʟ)ȃ+Ƃ!2@a6%iΚYy kӃ#l72hVPy5+gԀgڎ:#`ig7oKͯIqK-jxlg/!rgm4ssVwhngXB4 G5Y},_ hdl0߉@^+b4b$@BIl=*0)X] =`45G:VrZe0b}&:. }f|gl +ް%poKݥ-U7,`ץY&cƫ7,lp_K!%ЉՃA3VZ+ +\K*Tj1ّ~QRCbZ$J5zvV=ޏ`Jl c6i"Dׄr`a^Fq@jʾ@Ǫ&}@IZJRg(O~ \\i6"xrJvBYPGH|0*\,\l4naErT֋8@.h]kh=+ +1ֲ7o`*S S͂d `1TF)V"(l5Y*T}-Ѻ~j#ʭiɤҵk3HUa@T# - ,Hq+($,.y0\ +kPz4]j'~*Vr%2CPZ**C &6tԘOD^P?f\([mŘ((H÷`Ql6hWŕJt}kЬ[* *\I!?b>t毶&fvAp,#QFC +Rzu4z> 3QfK;ғ7XA[ᢵ(x54d24Ü(?qhq@I,o0)w W;ONVH=#w)ZYc$y0d7š*&Hֆ-E-~X`2`O:Z>u},w LSLPt4ea$1+6VҜ?db@S@&A!j=24 GT# "%hj48v酮;d !vVv;6b^9S@]`| ˺L8 `;e? +@ 0طͮp(&vÆO(n, %`CRjZl5 rᤥk׸U0h+tC}2aPTهWESu1jh1FXqhy kD:fPf rɒV;K9ٖV% Xeh!8P lIU4MN[Rig8C`&Md+  Rt+Fw&C.eK3 0Ҁj9G2rzuR,pA.uq*}2G ʄRu(*Bz\L3]?Cߢ0Z"I1XO%?E眷rpbVyXベKlIoS1FȳȮB$2 >\JMdF+ۆhGׂl8VkJ',,+<Ql +&hV['rȿ2Q41hRYI˛kncRiZ1~e_#`!/;sCqSz~m Rd%ӎ3D=;+PL4tcI l=eC9[#G󒢉FMrM 8j+[׮*3 +tF@X =bFrvIR}3/gZĔ%r>oa>>?/6K"Ղ:j;l竪4(u-WܷGFkFu8糪h5QK6heuuO,!wiHkhɼl`O0.A<82 s@}nA߮n\"N7$'`+l 9|1Z#KCkleٽ)bQ"g麒 bt7"]Yc NV摓EO+Xp|)Mrg۬f{ aXXjc:Iqk Z[&c +C&qq>;=`Q>К3i'{Z"/C=g5?nMJ~f]>\JWW1eh<˸|7:h;|h9|!t >C(>z#;5Ţ P_')[Dx͓7Ga|&Ml5awM@Wv G09T'!ϥ˂e!ƺڡpe1e1e1e1'b罏9{a&Dz(1}$5]>3T֗1 F|>iݾ.hvIۙIZI~VINlEɒHSHǘuپ:D5K Eoh_Y-iہՐߝa ?=;xziw,fƲEZM]}̃\9Slyۨ.l-qP٧RC{L[mݕ-%~!Y +A~kKՈǺ Yu>@sp#^-ؿ59mq]7jPXcFjlU~? ^yjùtA"w!"D6 VdA^sZ( \Z-u.ȟƉ2̨UWSaug뢵NM%0dUxU=/#:09Ged6E֙@ !Q իII4i.H& +~VT(vZ1hvff + L򅜖y45!)n`^ L2*gY{|Q{Rl5aGTOe92PWtxYՊQDu1I`T21RP:--n;Ӵ@1rmff%?:qYi%?ה:VRke9tBu3Ԭ7,0mZshe6tO]bP4.Uf.7SFCc.pZ5)8MЬfXQA;sX:B Βa^#ϧŖin;n̨+1 + + PetIȻeް #- 5c5NZ>2Z%eXkxd6M i+hpn\_h6i$ YRsH`4BYŷ + [2om:>u,)-Y}AmQJemQBB I&)rͪk^`|ݙ^C1D[̌XQbhtm@$;Q(<Y.WҤ.Xz`Nk3B܌CL'Vȣq|MuU1< iZBS2S#8a,:m'v>eT4'PP$Z_GWfMg\- ÀnFȴfB>xg.C}amVYk';"hdgjӲ0>s˜AHCQhBkQi]E VA+C& 蝲Q)k~x#Z؆뇵TGwoIdFuƨ5S4 +6VBȢi[^??F=֜Q(y|c3RՄ5#BYa;]DT4md?(*p`qij'R0 ͡6#Hh!$֑:D(CA쵔Ccój%?YZHȚ^:\1>u 0m(Gy3q,"lo#V?[l2J2E {_a7lrzD mm$(nU #I94 R"B heQ{:*wN}60)t[#guF"nļ}b^ZfkcQdW [,ի)cewJȖϛKzDbQDېūBRrĿ\HΨ;ΜqY mLjj(lzʷS} ~5xL9,T4 5@=Z Y(a{DipU̟2ʫmM%P-thB@iF+F ^} ZV8@6u%b>#}eNfEَ U-V huQ6^+HBocAkaG]fG +1IӠ#N=c#F !k:2x_!rGi=n JbMv\`z@JƝ!T;Sg H2! K^ޮ-M]1a8ǔ: ]oJdX~K+떵C-DQ +!]چ`*샷TN-]QضJRIxYL-`..ȹ<فŠOfd*9v4K0Us?i2n1pH6UZIz"Z9siv2mD@^*ڈMoŏ1egd-2u\ NZUI V˺d5}V$-x_0ꕤ2ŸYi~T蒳~a3C}s\@[)k̵ӻu!``[Jg̣3jlU5c1f)$ZB#oϾxwG1%EKe'i)2Q#9d<]Jnm/p5kڞTdAYpZ48V08Uj4˥s4%``@oWYYʺRDZB!>GN-_ Q;Z֏!;ny<͝h<jxѴ3=8pvP{:kuh+|]b䰝lePV_SMh(sRY5稁 grnrm G|9h$΂7_cӬOCì7cN 0!b'^e n$mV5=XbHI fFbneVi W-}}_sv\Le lbX$3--3鱦`݊3-Zr\UE48Ϊ{5<-,zsp\Ioc|qkЬ96>L-Mbut9Kcj3l#i)f»}.8;phքf +Q3T 4~tǪbjH]`Eth̃Ldc#i1,<'clAKZg' 4|^CQgMLט?0ꈏQ2w&5$Aߦ,?y`oMi"Ct+T[أؿM%uecmԝk<Xc#hl^ o1 ''GmQi5[88o$y)3`UhNS,8C}yH#jXqʒ*&bFU#cǒ&b$0ZEcwfdz_|}1b=lnlmeu#,7\adAy2kE%lӗ} +2?Wj#Ppz/W/,Q~#`9QxQPXhS+ϡ[6;U;8<5tu٣PLRήnI1imEȺsUXHY}eg2J5k?Fe0=l n ="J]KVO@`c:\l 'ͣT4nѬ=zlBGqXvs{'^/޽yO>rrBWd ^b{7꣤\vr\-g{AB Y^ҥ5K}VIexwD72?>sDESD1D@1~MϦȨU~p^"h4nC)kV HRNEJ۵2otI5/&4oG6{=јn1*f^GY ,~1ԪjckO'vBYX-=G=L w| ȧ*,f״l:o3G7́0.yw棽#E;s4gcan?-cQ5Σ՘Ί9wwr<<<<=变 FEP #e#$+& h7S +Ϥ|ʢD{;rb4mU&WiϣeN45,11~`=zyCT -Y/}I,j!/LS +~~ezo] jx[8Z@:zuL"D3ǿuh c3%7 &t +PlT^T< #֑Wf3.]xg9:=x|M[:`yKRuJMHF9h Z(N`&h=>c +\چ%JJҏLV ULj +ëAQ^fnܡ0*P1h( E=/t7j϶ڈ,*_63mXX*s}wYtn}^=H?~QIe#Dye*ʉWM꺆M +GHb8,f!AZ7 ћWY t9`t8 p: 7I{:voo˚|8託ߝid&ǜW퀭 ʴCJ4;X "Y>ɍ#|8(,ЍV\fa/Ssh(/˓Ԁ8 EYOW8Stc+Zn?#Nׅ?~Ћ1}Z/ke'g0nt,,.*BĨa KJ+#N9M py1C:ihxbv8:&@xAG bѭ <6ax."X 4r(Fay2;;v2x[#}hPQ;>3Xρu>j (#ŷW,0L^6hclC'2mK(m9~diĂ`ѼBQ~tk;!n޷);b_S3/PKуie3%ji2  ;ZYY5}v3XYPE-d4#VW)sQgY5Y8*7Cqth4QSYFA>o#snֵƬ5vQƞ@^9Q!^Uu8APqQPIHŞE!A$|]j-) *A E!-,cUf+dLP##Xe, 2v2z}B*s;;hcNH A6|t[()1GaT09u@0y5UjS=L*` ;\9@p~ӆW|?uJ`~iǣz#m@1a"lAz[Ar256dA"MѰH.a5j֑j,Yazq9!,񭫚v wuʧh|q/ς/E2qOmSZ8P}oR RГܜ$ uBl 9033zʁ` PZk(.h}ϪQ ݃;QT/]M6? +%((ԍB `8:Q0,vGiP] 蝄?]Db' K7dѢO /1;jX$r6x\6@حM'%8 =k&&u'G%"^Oj*k7pYQU(A!*]ŒZVFT'j l~ww)o2v1PBwQqMa7SvO"hZPxzSlCzwY?5޿gw wQFG$L2-'r~X"u;\%>`-qF ԶI +491X8I,f@{YZVz3o`"c5(yW{CDp&g +pKwt"C+>A3]Daŭ(m`"@0I)`d'G\|?mhS^aCq%k}K(mKý27AQP :&@=2a!a٦f +5PU2!ƪjɚT|+nXxd(@)t]8p@9Wr9m#{@1#.Zw\szLO/Y(nj=GPSo_ΩCtW9AaϦ!Xpe*( e9Q劑-6d6*!wM&ºv1t aU|σ:kH+,ˊ5/MK[.P|jV +0{j$ =,<"#>eGn!OgO/`0H Z6ђM뜦)lE⬨#L8묊f +kos!&G~ +U6<ǒLö>Y @u9ea*I}XlfD@ ݜ}[PULoA$t*XiO~( dڃ)4p`5L=y)PaL*5sm/ѮA@,u c̳$>"J@ejA97䀿Or-GQeF1?|?=fu3*G'*i8Rc0TQW8 XaŰʱTG67Ⱥ`FaY=į̀))TBlTVpQs=\eN/"svEbNb{ n 6ϣٺPKMC n14^/5EfZ\e[1fkWWķ| VFk+lo"Bֵ]ruQ)u + +`) !|gM +~erӴ),gڝCShBkZlU~SVHqe` EJ5̯ + 9W)7y]E{32]Þ+{n-.krR64O`<  a^ z#AUfpL/E1sIJ)T)KhyV-B'%fRM4F0c2DՓvf)e,6/&zYEgM @'K*ZAl+nCY ˿YݑRo_a]E0KS \!?W9a<*CY8ƳS{fusXk<4ڻdo%x{ّP3xJv',e^иA0;؈\pAKv6CU pR( ZEv̎,MX ҙZ 2fs ߆NTOy0Im, ̞ 0+EȽ.40L Xi)JZSObKɞ*g.O/.-ҰD3^fgX0Zb$BOGN(z,1I^ȨduRĀf u_8 \X:iwI4:BD40DiS+Q4ZDa@=W儲ULPӚ^ + TVw&§dVÄT5,.ǎ} ɾ,wa5[ɑׅSJV*6]=L2zMY* ’M;,CɘjRQܸTe +)j7$/11k>p5JMQUњ1J\6Xz%Vْ{qQ$K,"tP ͒+EQZSEw9Թֶ4 z]"U`ދ5x~L ?gYtR_ 7~XݻȜ54ku,cs7UTqMQ9ףz\a)JRjŷ 8I8Q=Ȃ e3 _d߼Ѥz) 7y=xGi鵚 I=)4 dд%#oDCQ#<$(]׶l:XIx8dXW%$")SSwО;n}[ *7Iȗ,Z_b>iΔ~en}J)psyO*9p8FY-T7gJjĀ3 _j)> U^=,IbX? +Dd%kMUzBw/Qt ʯ97mEA^z@g^_/~«{;}d/1X^,+h~3|~y/f*y7} yP؅%!?$тk"8Y5B_˞XxR38܄ۄ {+p/Wj!fD,J:%+.&Ђ#,WA]zs $ 6#Ɲrť 5b4k6g}$xNתOl@pAr:zmZMs$TA'lEfv$g+%1kPEKg%)K%- kXaDTp(=LU8w/xk(peú޲pu hIe3/wdn0DAB\4|EEQu6Ƣ:o6 u`]'/)I\ :dU. +qMi+: EWDUZz8~֘ +sy}^lmqC2{ (bhx +c%HgVڰe W9=XVu$0w^Pyz=JUL ?dtxc K}kFߦ}aH@jlwW*:bkda8ʷj8 Z}h0whf7˟/C6r7V DA6R3]ܶNRO/񺃚_ @E"7;=e+Gs#8N1PE;|[3^pR|#|m1Bdvz@@Z:yJIr*l`n6 sVm03Uu| ֩MR1wG+]OL/#!u*XV$B)z +(ڦde +7h\s&dCu%H#M4>~޽!eHH$lhE--%Dx" cl.'Ś~iL8%qDph*ɖ(޵s5m +t E\Th^m8Hn'mTˡcN ;e#l(Ӛz/yw/xO,xo|5urLWu=cƧ4./f"d(,-GrsZ`tڮ^Շu8m$wdV'UƑ(ޗ+y˧ջb<^OHh=~KHt_&ke 0ϷSΈoma,N 1͜8ݶYd{Q-NlE_Ҹudy 0FBGm\⺫bClfk3|H4HVRqS|v=q|c=;[0. sޱؽPSdҒ-Yd!ns6e+V] Rr^Q[Bd#0ϰ.B<?De 8k|\8*`Q-v![r7">?޹QtZ! "֘^`ATeg r8VAqm +,)8$ sT,q@)Xb2 f9RJӬ#MYF yAI2R\JvYM_ۀޭe^O^fc5 ]6 SP"1ƫbK[ RdZ:TŃ|Ȇꇇ,܁jT=):OGņS5vUaD}[(s7Hqn'p1ֱWl!&;=u>6  +XتB,pkP5]4]hR})ec+>Տ cT*4L U+0qe βi>PHn Q\I\+ N3?%LM0aLYܚn$a䛶C6}SsC<*ppe:Av^'n-q%TUΠ_&UqWuaFmu}??r%< JzM>LuS8SmXNg~ Eg+갣WU5/bܜ3_6:8g0f\D[LjH$bU83}gDY + ?k7!odlف1Yܟd:VC=^Gfc !jY|J +dR\|껿]ec^ +h̨hX/Se!t=;N]FXFJ;9cE\+z"DK^E[a7G}hgaSUS!_9#S k:(!$mXmSmP^+u|<@`d!l+Iц Q&Si9rs0Au-Dփ܊P1v4:gu+}?PFrrqV}A萁l,'^DN݌fisZ?8&ûmO!^//^ې11vәA-Sa<Âk-g d"dɪi%:#wWq焅 |K)vYc,mrMJC7GtLbʸ*eM}/6._.ԃ{76G#\c-Xsz;NN"@MA'W }ڵ6+ e(amՂJT}%]`}Q_rUZxbjh39HkRp8(Jo֪qxDCpCgf{aȢߩ ƓG5 +{I1P+V;duZVWk_se*Y$8ruz?^7jEn3{Z?\_ׂዄ,|8Gs'Z3êjYw. 5^JϮ fa\}N;ޟ +]$DXceXu?8*pL^9]1`]2NTEGj2;֍d޲MTH24Rn ."Sc&ZDx*8Tjq]7Y8]t_8ݵn[iq& l#(!-,*hɼ?W6o4n=N}P|UI3Od*@9.9au[vdIdC%:z> ]ZwdL![HtkmZO; b: +.?{E??~۷o?J,-x?',(b~o]Wx/Ov\??qc(.v9DT5X`({ vO5D{yPM0oNЁq ?Zof~ +IRh*Qɶqqz(+x0KP닻dn"KT崅oAaؒ)2<Lv\7on?k~X7o^W_yF/_yA܅c +&yY<;x7'Ϣ)__^_7߽$-mD&fdYFqhKlsJa5D^u(_.|`]}˱9<=XŰNд1te@9,$ThP2? h`g}@ SL!v\`CE/x5"L6VmrÙ]--Lս^rZ}F;Ջ^u4j@zosΩ3)Af0@4`/X'}V;/,Rhb2m;hy4'GdDmU 5Do{`ﺪxpg!sj]ku7C=X6.OfJ(#}6E}nmKvGly4F%3']E\e*K#-[-X!>& -w({2E`zA#>DzcMg)eýLPg;-W~Z/ۃ_/bBiE=ƳaIXȉL5m̖Z26g|Z9s4a="XcJW'%~8PxM^S@?Jr+tY:̍qP\ԠxSci&d[ƹ bYYrp{yt*AYsl17Ε%.LRX(hIQlFȁ8UJ~_$,4h}筸(q>s΃(b02q +i" ћIwtH@T8ΦF7S γsoӺ +^,ަg{9lᙽ$+Zu4{/g"jae(NÌ35C˃N(2)ȣl?zꝪe,:HSKiE_ 2T%ˢtJi͘qh +N4Q?kPݜ*6p邢a#|6Ž[~;cQOo僖DB~8(#P;Yo^fjkLBȌ:,5^`B] D57mdmtҜlȸ:]DBʎIWZ;AqBsln֋>ʝսق=HkЎ)ʮ8H6xKb遪M9* P1s%A&PVTZ!J% ! +XWN)qZP&j$"6D;S1à_wlNjgs9l^":$/#igiWPM:QSR~OL$-k1>;t`W-eZ +v,)@$R8{TW3obA,3`QrG}d "PG8=FW@ ˃{7 pD0*?"튥cBZqbE$Xm)>[K6F\K7==Πe2ӊ>zFc/cI4AT`aAw!~ԯ-A Ԝ|5l=g󊍙p^Ņ2jaqvifd?]RK]q +Tjgt &[>WTf'ŒDHiˈP}oEl0bPqJ +Pvad1l9( ~Tbm%ۊ3C*>qW"%X%;[UȯށPjWdl#r⨏C9q>.ai݅Ruqn'W|:=٫h:CYad7q,ѭ`aS5jɼ@;}~ cJaSbMQ2xxAYR-,_iB(IwgaEjrn|vxj򸢐΅), 5W_MnZcILu'S &C屺9ݔ 1qї%gl@9]ث; @&vSzCe8="YU(M,0Q*!9%T}ɜQ˰b^fD)1y80XϢߥ) nUdPHƆ7BIC}7SӁstjVg +Qg^OEi f^\jI\n֩@+װ5oXۤ7갨W0.*v6 Zm+&4_NKEGNj\fwP''ռIo7o|fpIT~0Vv,8JȽH!&*ϮbU=ΩzUqcr6k34}[l4mw +zZwuDe.‡boEVDV}627|vmODg:; +И6QeYE_ /@ Fʬ̳>cV-$RStkH|&f|AmPh߾a0dFZlxdXv 7ȮwhW0D?_a=囂mLͬ7|2s] >9>E"c g3@۬Qm!z|WPiNyK䠻s"`I}2+@ =ȢfvRG._IL\͠x*xRqH'KqʶdeS`$N=w2i ay%$CX^YMrT-I[ECrys(ho Y)޿_ݏْ_[ۏZ?2͛W7핝MWp +|/WphA3@B2'MMƒhr|L#Q#(}W-8|E*_$+.q}ݦݦߦܦyu]S]%T'Z8}gjv0~,\:cCu5:p(.5?emrܼ#QXAc##PzPW{?3)4X 0 Q4i0z,5n*S8?Hq> +uzW䥖848MP݌.ܚ{fH!,M'Zf5_3yLnQxtԶ7I']t߯aϷ~4_F_8 +$r wp`*W5yzU@fǨN@.}gx@lwXTt}}W[7v0i3}+)]NK{?}^ݢuKum +eKRu "zۺ1:0R5ۥm ꎙI}h9Y,5c(I%^ Je!f2Kmo,FudںG%M;f|ؾ_ӃnO=L[6jR@'LHQZ,4&&w0H.gV6=۰&fBhOp5ɜZ;Ts;;`Smײ+.'_@Xd=w5g뮉߄񱳟eN&@=ҪL:˕U}Ҹ_ӃnO=W j0QM[vIM1muD{CԍjeYTx(q5sYuϴzZk$\%ə;\wuhC{MD c&P$׹nNy*RL rl*Zzuu6vC~Lcz;p =Z&]xʑJ|B7͓a4 ˩a kvyCwIo<oYqS+TF(Υ6q]wt(/ه-)Ƥۙe`\+".,6V :E`/K&p\@WS=/0Ju*DzuOۿ$gHmf V8NIҪe[{i;\$0UU-G|6Q~6jwf"++cq,^)͂)S7xv]e;ԓdtK=?8a[+6fI``LWܑlOPb*sě+ #c\iǬ&{ێ͚I4 _] Óޙ^? sxK*hO3=<.ӚAjn >&͛W\G2*BQq?QI iD=mV^Dz{Y[P)J]LLض +$<)KMEyƥM^^2}PbɓW2ɾ" ?%.;.Q⺩=H q9>QTq}7%^`Flդ"O-Qf + M<8?U˜WIp.!iR&Dy6Ez0sDO4̅U:k6@ٳ&1D+fp8?̪l{)YާSK9(,Z/fuk%KZ ,-40Ț$.䶕/bW]y~Ľ ޣfC|IenYv&ssOG~CD*G(N&bP~0μA5YD +,O<č-M8ǵmqo0YD?cEp>*A  + ym+a~r'GS(3Th㬪.⵾h=>$Y:sļIQtME횺!f] .d ̤4⃇C%Tz@e 3l:[k +J9)'lI u5C[+{[V;MY ښK X8BDХ.ej)DͬaUjʺ&o*U SӪR բ3$}ͱ"W\a,GCFql<ͷ1(K(}xxT)<3V]QV1Bqm1i(}$(J%is6WRuYb1| ̞mm->a}>360k#s0[6pSs]!Xxjr,Ţ(~n^ jca݁5%7#`7Nr[(Vjʼn^fVئt'npq/-?(aOmW[(ITLJJ):%^Ӳ%[7sٟ[K0cs̸Aqb dѝy i5Gϼ,Z̢r!#MY LCcv+yOsXXWm>֞٢"O[u}%B>RB20CtҸbrh6.{ӱ e!'?vq r Q}XzE'Nzߨ݆>dQ~_JԻLEMGcTL,6҂Zlˊ,}DrlI?}2X؞mև֯=n?1|՘q2I-Lu>˷YUMFk4/ͅ=DY?eN:|21 ܡEъ=i)|o/v +)[u4n̵4/&niHߴq}MSRӢ]iho}nև=_l}go~ǯ~훟_ꟿo~_Oo~o_<__?? 9>  endstream endobj 15 0 obj <> endobj 27 0 obj <> endobj 37 0 obj <> endobj 65 0 obj <> endobj 78 0 obj <> endobj 90 0 obj <> endobj 116 0 obj <> endobj 129 0 obj <> endobj 141 0 obj <> endobj 167 0 obj <> endobj 180 0 obj <> endobj 192 0 obj <> endobj 218 0 obj <> endobj 237 0 obj <> endobj 255 0 obj <> endobj 287 0 obj <> endobj 306 0 obj <> endobj 324 0 obj <> endobj 356 0 obj <> endobj 375 0 obj <> endobj 393 0 obj <> endobj 425 0 obj <> endobj 444 0 obj <> endobj 462 0 obj <> endobj 480 0 obj <> endobj 515 0 obj <> endobj 534 0 obj <> endobj 552 0 obj <> endobj 570 0 obj <> endobj 605 0 obj <> endobj 624 0 obj <> endobj 642 0 obj <> endobj 660 0 obj <> endobj 695 0 obj <> endobj 699 0 obj <> endobj 718 0 obj <> endobj 735 0 obj <> endobj 753 0 obj <> endobj 785 0 obj <> endobj 789 0 obj <> endobj 808 0 obj <> endobj 825 0 obj <> endobj 843 0 obj <> endobj 878 0 obj <> endobj 882 0 obj <> endobj 901 0 obj <> endobj 918 0 obj <> endobj 936 0 obj <> endobj 971 0 obj <> endobj 975 0 obj <> endobj 994 0 obj <> endobj 1011 0 obj <> endobj 1029 0 obj <> endobj 1056 0 obj <> endobj 1057 0 obj <> endobj 1058 0 obj <> endobj 1059 0 obj <> endobj 1060 0 obj <> endobj 1138 0 obj <> endobj 1139 0 obj <> endobj 1140 0 obj <> endobj 1141 0 obj <> endobj 1142 0 obj <> endobj 1143 0 obj <> endobj 1223 0 obj <> endobj 1224 0 obj <> endobj 1225 0 obj <> endobj 1226 0 obj <> endobj 1227 0 obj <> endobj 1228 0 obj <> endobj 1296 0 obj [/View/Design] endobj 1297 0 obj <>>> endobj 1294 0 obj [/View/Design] endobj 1295 0 obj <>>> endobj 1292 0 obj [/View/Design] endobj 1293 0 obj <>>> endobj 1290 0 obj [/View/Design] endobj 1291 0 obj <>>> endobj 1288 0 obj [/View/Design] endobj 1289 0 obj <>>> endobj 1286 0 obj [/View/Design] endobj 1287 0 obj <>>> endobj 1211 0 obj [/View/Design] endobj 1212 0 obj <>>> endobj 1209 0 obj [/View/Design] endobj 1210 0 obj <>>> endobj 1207 0 obj [/View/Design] endobj 1208 0 obj <>>> endobj 1205 0 obj [/View/Design] endobj 1206 0 obj <>>> endobj 1203 0 obj [/View/Design] endobj 1204 0 obj <>>> endobj 1201 0 obj [/View/Design] endobj 1202 0 obj <>>> endobj 1126 0 obj [/View/Design] endobj 1127 0 obj <>>> endobj 1124 0 obj [/View/Design] endobj 1125 0 obj <>>> endobj 1122 0 obj [/View/Design] endobj 1123 0 obj <>>> endobj 1120 0 obj [/View/Design] endobj 1121 0 obj <>>> endobj 1118 0 obj [/View/Design] endobj 1119 0 obj <>>> endobj 1030 0 obj [/View/Design] endobj 1031 0 obj <>>> endobj 1012 0 obj [/View/Design] endobj 1013 0 obj <>>> endobj 995 0 obj [/View/Design] endobj 996 0 obj <>>> endobj 976 0 obj [/View/Design] endobj 977 0 obj <>>> endobj 972 0 obj [/View/Design] endobj 973 0 obj <>>> endobj 937 0 obj [/View/Design] endobj 938 0 obj <>>> endobj 919 0 obj [/View/Design] endobj 920 0 obj <>>> endobj 902 0 obj [/View/Design] endobj 903 0 obj <>>> endobj 883 0 obj [/View/Design] endobj 884 0 obj <>>> endobj 879 0 obj [/View/Design] endobj 880 0 obj <>>> endobj 844 0 obj [/View/Design] endobj 845 0 obj <>>> endobj 826 0 obj [/View/Design] endobj 827 0 obj <>>> endobj 809 0 obj [/View/Design] endobj 810 0 obj <>>> endobj 790 0 obj [/View/Design] endobj 791 0 obj <>>> endobj 786 0 obj [/View/Design] endobj 787 0 obj <>>> endobj 754 0 obj [/View/Design] endobj 755 0 obj <>>> endobj 736 0 obj [/View/Design] endobj 737 0 obj <>>> endobj 719 0 obj [/View/Design] endobj 720 0 obj <>>> endobj 700 0 obj [/View/Design] endobj 701 0 obj <>>> endobj 696 0 obj [/View/Design] endobj 697 0 obj <>>> endobj 661 0 obj [/View/Design] endobj 662 0 obj <>>> endobj 643 0 obj [/View/Design] endobj 644 0 obj <>>> endobj 625 0 obj [/View/Design] endobj 626 0 obj <>>> endobj 606 0 obj [/View/Design] endobj 607 0 obj <>>> endobj 571 0 obj [/View/Design] endobj 572 0 obj <>>> endobj 553 0 obj [/View/Design] endobj 554 0 obj <>>> endobj 535 0 obj [/View/Design] endobj 536 0 obj <>>> endobj 516 0 obj [/View/Design] endobj 517 0 obj <>>> endobj 481 0 obj [/View/Design] endobj 482 0 obj <>>> endobj 463 0 obj [/View/Design] endobj 464 0 obj <>>> endobj 445 0 obj [/View/Design] endobj 446 0 obj <>>> endobj 426 0 obj [/View/Design] endobj 427 0 obj <>>> endobj 394 0 obj [/View/Design] endobj 395 0 obj <>>> endobj 376 0 obj [/View/Design] endobj 377 0 obj <>>> endobj 357 0 obj [/View/Design] endobj 358 0 obj <>>> endobj 325 0 obj [/View/Design] endobj 326 0 obj <>>> endobj 307 0 obj [/View/Design] endobj 308 0 obj <>>> endobj 288 0 obj [/View/Design] endobj 289 0 obj <>>> endobj 256 0 obj [/View/Design] endobj 257 0 obj <>>> endobj 238 0 obj [/View/Design] endobj 239 0 obj <>>> endobj 219 0 obj [/View/Design] endobj 220 0 obj <>>> endobj 193 0 obj [/View/Design] endobj 194 0 obj <>>> endobj 181 0 obj [/View/Design] endobj 182 0 obj <>>> endobj 168 0 obj [/View/Design] endobj 169 0 obj <>>> endobj 142 0 obj [/View/Design] endobj 143 0 obj <>>> endobj 130 0 obj [/View/Design] endobj 131 0 obj <>>> endobj 117 0 obj [/View/Design] endobj 118 0 obj <>>> endobj 91 0 obj [/View/Design] endobj 92 0 obj <>>> endobj 79 0 obj [/View/Design] endobj 80 0 obj <>>> endobj 66 0 obj [/View/Design] endobj 67 0 obj <>>> endobj 38 0 obj [/View/Design] endobj 39 0 obj <>>> endobj 28 0 obj [/View/Design] endobj 29 0 obj <>>> endobj 16 0 obj [/View/Design] endobj 17 0 obj <>>> endobj 1314 0 obj [1313 0 R 1312 0 R 1311 0 R 1310 0 R 1309 0 R 1308 0 R] endobj 1391 0 obj <> endobj xref 0 1392 0000000003 65535 f +0000000016 00000 n +0000046044 00000 n +0000000004 00000 f +0000000006 00000 f +0000046095 00000 n +0000000007 00000 f +0000000008 00000 f +0000000009 00000 f +0000000010 00000 f +0000000011 00000 f +0000000012 00000 f +0000000013 00000 f +0000000014 00000 f +0000000018 00000 f +0000359516 00000 n +0000372837 00000 n +0000372868 00000 n +0000000019 00000 f +0000000020 00000 f +0000000021 00000 f +0000000022 00000 f +0000000023 00000 f +0000000024 00000 f +0000000025 00000 f +0000000026 00000 f +0000000030 00000 f +0000359585 00000 n +0000372721 00000 n +0000372752 00000 n +0000000031 00000 f +0000000032 00000 f +0000000033 00000 f +0000000034 00000 f +0000000035 00000 f +0000000036 00000 f +0000000040 00000 f +0000359656 00000 n +0000372605 00000 n +0000372636 00000 n +0000000041 00000 f +0000000042 00000 f +0000000043 00000 f +0000000044 00000 f +0000000045 00000 f +0000000046 00000 f +0000000047 00000 f +0000000048 00000 f +0000000049 00000 f +0000000050 00000 f +0000000051 00000 f +0000000052 00000 f +0000000053 00000 f +0000000054 00000 f +0000000055 00000 f +0000000056 00000 f +0000000057 00000 f +0000000058 00000 f +0000000059 00000 f +0000000060 00000 f +0000000061 00000 f +0000000062 00000 f +0000000063 00000 f +0000000064 00000 f +0000000068 00000 f +0000359726 00000 n +0000372489 00000 n +0000372520 00000 n +0000000069 00000 f +0000000070 00000 f +0000000071 00000 f +0000000072 00000 f +0000000073 00000 f +0000000074 00000 f +0000000075 00000 f +0000000076 00000 f +0000000077 00000 f +0000000081 00000 f +0000359795 00000 n +0000372373 00000 n +0000372404 00000 n +0000000082 00000 f +0000000083 00000 f +0000000084 00000 f +0000000085 00000 f +0000000086 00000 f +0000000087 00000 f +0000000088 00000 f +0000000089 00000 f +0000000093 00000 f +0000359866 00000 n +0000372257 00000 n +0000372288 00000 n +0000000094 00000 f +0000000095 00000 f +0000000096 00000 f +0000000097 00000 f +0000000098 00000 f +0000000099 00000 f +0000000100 00000 f +0000000101 00000 f +0000000102 00000 f +0000000103 00000 f +0000000104 00000 f +0000000105 00000 f +0000000106 00000 f +0000000107 00000 f +0000000108 00000 f +0000000109 00000 f +0000000110 00000 f +0000000111 00000 f +0000000112 00000 f +0000000113 00000 f +0000000114 00000 f +0000000115 00000 f +0000000119 00000 f +0000359936 00000 n +0000372139 00000 n +0000372171 00000 n +0000000120 00000 f +0000000121 00000 f +0000000122 00000 f +0000000123 00000 f +0000000124 00000 f +0000000125 00000 f +0000000126 00000 f +0000000127 00000 f +0000000128 00000 f +0000000132 00000 f +0000360008 00000 n +0000372021 00000 n +0000372053 00000 n +0000000133 00000 f +0000000134 00000 f +0000000135 00000 f +0000000136 00000 f +0000000137 00000 f +0000000138 00000 f +0000000139 00000 f +0000000140 00000 f +0000000144 00000 f +0000360082 00000 n +0000371903 00000 n +0000371935 00000 n +0000000145 00000 f +0000000146 00000 f +0000000147 00000 f +0000000148 00000 f +0000000149 00000 f +0000000150 00000 f +0000000151 00000 f +0000000152 00000 f +0000000153 00000 f +0000000154 00000 f +0000000155 00000 f +0000000156 00000 f +0000000157 00000 f +0000000158 00000 f +0000000159 00000 f +0000000160 00000 f +0000000161 00000 f +0000000162 00000 f +0000000163 00000 f +0000000164 00000 f +0000000165 00000 f +0000000166 00000 f +0000000170 00000 f +0000360155 00000 n +0000371785 00000 n +0000371817 00000 n +0000000171 00000 f +0000000172 00000 f +0000000173 00000 f +0000000174 00000 f +0000000175 00000 f +0000000176 00000 f +0000000177 00000 f +0000000178 00000 f +0000000179 00000 f +0000000183 00000 f +0000360227 00000 n +0000371667 00000 n +0000371699 00000 n +0000000184 00000 f +0000000185 00000 f +0000000186 00000 f +0000000187 00000 f +0000000188 00000 f +0000000189 00000 f +0000000190 00000 f +0000000191 00000 f +0000000195 00000 f +0000360301 00000 n +0000371549 00000 n +0000371581 00000 n +0000000196 00000 f +0000000197 00000 f +0000000198 00000 f +0000000199 00000 f +0000000200 00000 f +0000000201 00000 f +0000000202 00000 f +0000000203 00000 f +0000000204 00000 f +0000000205 00000 f +0000000206 00000 f +0000000207 00000 f +0000000208 00000 f +0000000209 00000 f +0000000210 00000 f +0000000211 00000 f +0000000212 00000 f +0000000213 00000 f +0000000214 00000 f +0000000215 00000 f +0000000216 00000 f +0000000217 00000 f +0000000221 00000 f +0000360374 00000 n +0000371431 00000 n +0000371463 00000 n +0000000222 00000 f +0000000223 00000 f +0000000224 00000 f +0000000225 00000 f +0000000226 00000 f +0000000227 00000 f +0000000228 00000 f +0000000229 00000 f +0000000230 00000 f +0000000231 00000 f +0000000232 00000 f +0000000233 00000 f +0000000234 00000 f +0000000235 00000 f +0000000236 00000 f +0000000240 00000 f +0000360446 00000 n +0000371313 00000 n +0000371345 00000 n +0000000241 00000 f +0000000242 00000 f +0000000243 00000 f +0000000244 00000 f +0000000245 00000 f +0000000246 00000 f +0000000247 00000 f +0000000248 00000 f +0000000249 00000 f +0000000250 00000 f +0000000251 00000 f +0000000252 00000 f +0000000253 00000 f +0000000254 00000 f +0000000258 00000 f +0000360520 00000 n +0000371195 00000 n +0000371227 00000 n +0000000259 00000 f +0000000260 00000 f +0000000261 00000 f +0000000262 00000 f +0000000263 00000 f +0000000264 00000 f +0000000265 00000 f +0000000266 00000 f +0000000267 00000 f +0000000268 00000 f +0000000269 00000 f +0000000270 00000 f +0000000271 00000 f +0000000272 00000 f +0000000273 00000 f +0000000274 00000 f +0000000275 00000 f +0000000276 00000 f +0000000277 00000 f +0000000278 00000 f +0000000279 00000 f +0000000280 00000 f +0000000281 00000 f +0000000282 00000 f +0000000283 00000 f +0000000284 00000 f +0000000285 00000 f +0000000286 00000 f +0000000290 00000 f +0000360593 00000 n +0000371077 00000 n +0000371109 00000 n +0000000291 00000 f +0000000292 00000 f +0000000293 00000 f +0000000294 00000 f +0000000295 00000 f +0000000296 00000 f +0000000297 00000 f +0000000298 00000 f +0000000299 00000 f +0000000300 00000 f +0000000301 00000 f +0000000302 00000 f +0000000303 00000 f +0000000304 00000 f +0000000305 00000 f +0000000309 00000 f +0000360665 00000 n +0000370959 00000 n +0000370991 00000 n +0000000310 00000 f +0000000311 00000 f +0000000312 00000 f +0000000313 00000 f +0000000314 00000 f +0000000315 00000 f +0000000316 00000 f +0000000317 00000 f +0000000318 00000 f +0000000319 00000 f +0000000320 00000 f +0000000321 00000 f +0000000322 00000 f +0000000323 00000 f +0000000327 00000 f +0000360739 00000 n +0000370841 00000 n +0000370873 00000 n +0000000328 00000 f +0000000329 00000 f +0000000330 00000 f +0000000331 00000 f +0000000332 00000 f +0000000333 00000 f +0000000334 00000 f +0000000335 00000 f +0000000336 00000 f +0000000337 00000 f +0000000338 00000 f +0000000339 00000 f +0000000340 00000 f +0000000341 00000 f +0000000342 00000 f +0000000343 00000 f +0000000344 00000 f +0000000345 00000 f +0000000346 00000 f +0000000347 00000 f +0000000348 00000 f +0000000349 00000 f +0000000350 00000 f +0000000351 00000 f +0000000352 00000 f +0000000353 00000 f +0000000354 00000 f +0000000355 00000 f +0000000359 00000 f +0000360812 00000 n +0000370723 00000 n +0000370755 00000 n +0000000360 00000 f +0000000361 00000 f +0000000362 00000 f +0000000363 00000 f +0000000364 00000 f +0000000365 00000 f +0000000366 00000 f +0000000367 00000 f +0000000368 00000 f +0000000369 00000 f +0000000370 00000 f +0000000371 00000 f +0000000372 00000 f +0000000373 00000 f +0000000374 00000 f +0000000378 00000 f +0000360884 00000 n +0000370605 00000 n +0000370637 00000 n +0000000379 00000 f +0000000380 00000 f +0000000381 00000 f +0000000382 00000 f +0000000383 00000 f +0000000384 00000 f +0000000385 00000 f +0000000386 00000 f +0000000387 00000 f +0000000388 00000 f +0000000389 00000 f +0000000390 00000 f +0000000391 00000 f +0000000392 00000 f +0000000396 00000 f +0000360958 00000 n +0000370487 00000 n +0000370519 00000 n +0000000397 00000 f +0000000398 00000 f +0000000399 00000 f +0000000400 00000 f +0000000401 00000 f +0000000402 00000 f +0000000403 00000 f +0000000404 00000 f +0000000405 00000 f +0000000406 00000 f +0000000407 00000 f +0000000408 00000 f +0000000409 00000 f +0000000410 00000 f +0000000411 00000 f +0000000412 00000 f +0000000413 00000 f +0000000414 00000 f +0000000415 00000 f +0000000416 00000 f +0000000417 00000 f +0000000418 00000 f +0000000419 00000 f +0000000420 00000 f +0000000421 00000 f +0000000422 00000 f +0000000423 00000 f +0000000424 00000 f +0000000428 00000 f +0000361031 00000 n +0000370369 00000 n +0000370401 00000 n +0000000429 00000 f +0000000430 00000 f +0000000431 00000 f +0000000432 00000 f +0000000433 00000 f +0000000434 00000 f +0000000435 00000 f +0000000436 00000 f +0000000437 00000 f +0000000438 00000 f +0000000439 00000 f +0000000440 00000 f +0000000441 00000 f +0000000442 00000 f +0000000443 00000 f +0000000447 00000 f +0000361103 00000 n +0000370251 00000 n +0000370283 00000 n +0000000448 00000 f +0000000449 00000 f +0000000450 00000 f +0000000451 00000 f +0000000452 00000 f +0000000453 00000 f +0000000454 00000 f +0000000455 00000 f +0000000456 00000 f +0000000457 00000 f +0000000458 00000 f +0000000459 00000 f +0000000460 00000 f +0000000461 00000 f +0000000465 00000 f +0000361175 00000 n +0000370133 00000 n +0000370165 00000 n +0000000466 00000 f +0000000467 00000 f +0000000468 00000 f +0000000469 00000 f +0000000470 00000 f +0000000471 00000 f +0000000472 00000 f +0000000473 00000 f +0000000474 00000 f +0000000475 00000 f +0000000476 00000 f +0000000477 00000 f +0000000478 00000 f +0000000479 00000 f +0000000483 00000 f +0000361249 00000 n +0000370015 00000 n +0000370047 00000 n +0000000484 00000 f +0000000485 00000 f +0000000486 00000 f +0000000487 00000 f +0000000488 00000 f +0000000489 00000 f +0000000490 00000 f +0000000491 00000 f +0000000492 00000 f +0000000493 00000 f +0000000494 00000 f +0000000495 00000 f +0000000496 00000 f +0000000497 00000 f +0000000498 00000 f +0000000499 00000 f +0000000500 00000 f +0000000501 00000 f +0000000502 00000 f +0000000503 00000 f +0000000504 00000 f +0000000505 00000 f +0000000506 00000 f +0000000507 00000 f +0000000508 00000 f +0000000509 00000 f +0000000510 00000 f +0000000511 00000 f +0000000512 00000 f +0000000513 00000 f +0000000514 00000 f +0000000518 00000 f +0000361322 00000 n +0000369897 00000 n +0000369929 00000 n +0000000519 00000 f +0000000520 00000 f +0000000521 00000 f +0000000522 00000 f +0000000523 00000 f +0000000524 00000 f +0000000525 00000 f +0000000526 00000 f +0000000527 00000 f +0000000528 00000 f +0000000529 00000 f +0000000530 00000 f +0000000531 00000 f +0000000532 00000 f +0000000533 00000 f +0000000537 00000 f +0000361394 00000 n +0000369779 00000 n +0000369811 00000 n +0000000538 00000 f +0000000539 00000 f +0000000540 00000 f +0000000541 00000 f +0000000542 00000 f +0000000543 00000 f +0000000544 00000 f +0000000545 00000 f +0000000546 00000 f +0000000547 00000 f +0000000548 00000 f +0000000549 00000 f +0000000550 00000 f +0000000551 00000 f +0000000555 00000 f +0000361466 00000 n +0000369661 00000 n +0000369693 00000 n +0000000556 00000 f +0000000557 00000 f +0000000558 00000 f +0000000559 00000 f +0000000560 00000 f +0000000561 00000 f +0000000562 00000 f +0000000563 00000 f +0000000564 00000 f +0000000565 00000 f +0000000566 00000 f +0000000567 00000 f +0000000568 00000 f +0000000569 00000 f +0000000573 00000 f +0000361540 00000 n +0000369543 00000 n +0000369575 00000 n +0000000574 00000 f +0000000575 00000 f +0000000576 00000 f +0000000577 00000 f +0000000578 00000 f +0000000579 00000 f +0000000580 00000 f +0000000581 00000 f +0000000582 00000 f +0000000583 00000 f +0000000584 00000 f +0000000585 00000 f +0000000586 00000 f +0000000587 00000 f +0000000588 00000 f +0000000589 00000 f +0000000590 00000 f +0000000591 00000 f +0000000592 00000 f +0000000593 00000 f +0000000594 00000 f +0000000595 00000 f +0000000596 00000 f +0000000597 00000 f +0000000598 00000 f +0000000599 00000 f +0000000600 00000 f +0000000601 00000 f +0000000602 00000 f +0000000603 00000 f +0000000604 00000 f +0000000608 00000 f +0000361613 00000 n +0000369425 00000 n +0000369457 00000 n +0000000609 00000 f +0000000610 00000 f +0000000611 00000 f +0000000612 00000 f +0000000613 00000 f +0000000614 00000 f +0000000615 00000 f +0000000616 00000 f +0000000617 00000 f +0000000618 00000 f +0000000619 00000 f +0000000620 00000 f +0000000621 00000 f +0000000622 00000 f +0000000623 00000 f +0000000627 00000 f +0000361685 00000 n +0000369307 00000 n +0000369339 00000 n +0000000628 00000 f +0000000629 00000 f +0000000630 00000 f +0000000631 00000 f +0000000632 00000 f +0000000633 00000 f +0000000634 00000 f +0000000635 00000 f +0000000636 00000 f +0000000637 00000 f +0000000638 00000 f +0000000639 00000 f +0000000640 00000 f +0000000641 00000 f +0000000645 00000 f +0000361757 00000 n +0000369189 00000 n +0000369221 00000 n +0000000646 00000 f +0000000647 00000 f +0000000648 00000 f +0000000649 00000 f +0000000650 00000 f +0000000651 00000 f +0000000652 00000 f +0000000653 00000 f +0000000654 00000 f +0000000655 00000 f +0000000656 00000 f +0000000657 00000 f +0000000658 00000 f +0000000659 00000 f +0000000663 00000 f +0000361831 00000 n +0000369071 00000 n +0000369103 00000 n +0000000664 00000 f +0000000665 00000 f +0000000666 00000 f +0000000667 00000 f +0000000668 00000 f +0000000669 00000 f +0000000670 00000 f +0000000671 00000 f +0000000672 00000 f +0000000673 00000 f +0000000674 00000 f +0000000675 00000 f +0000000676 00000 f +0000000677 00000 f +0000000678 00000 f +0000000679 00000 f +0000000680 00000 f +0000000681 00000 f +0000000682 00000 f +0000000683 00000 f +0000000684 00000 f +0000000685 00000 f +0000000686 00000 f +0000000687 00000 f +0000000688 00000 f +0000000689 00000 f +0000000690 00000 f +0000000691 00000 f +0000000692 00000 f +0000000693 00000 f +0000000694 00000 f +0000000698 00000 f +0000361904 00000 n +0000368953 00000 n +0000368985 00000 n +0000000702 00000 f +0000361975 00000 n +0000368835 00000 n +0000368867 00000 n +0000000703 00000 f +0000000704 00000 f +0000000705 00000 f +0000000706 00000 f +0000000707 00000 f +0000000708 00000 f +0000000709 00000 f +0000000710 00000 f +0000000711 00000 f +0000000712 00000 f +0000000713 00000 f +0000000714 00000 f +0000000715 00000 f +0000000716 00000 f +0000000717 00000 f +0000000721 00000 f +0000362047 00000 n +0000368717 00000 n +0000368749 00000 n +0000000722 00000 f +0000000723 00000 f +0000000724 00000 f +0000000725 00000 f +0000000726 00000 f +0000000727 00000 f +0000000728 00000 f +0000000729 00000 f +0000000730 00000 f +0000000731 00000 f +0000000732 00000 f +0000000733 00000 f +0000000734 00000 f +0000000738 00000 f +0000362119 00000 n +0000368599 00000 n +0000368631 00000 n +0000000739 00000 f +0000000740 00000 f +0000000741 00000 f +0000000742 00000 f +0000000743 00000 f +0000000744 00000 f +0000000745 00000 f +0000000746 00000 f +0000000747 00000 f +0000000748 00000 f +0000000749 00000 f +0000000750 00000 f +0000000751 00000 f +0000000752 00000 f +0000000756 00000 f +0000362193 00000 n +0000368481 00000 n +0000368513 00000 n +0000000757 00000 f +0000000758 00000 f +0000000759 00000 f +0000000760 00000 f +0000000761 00000 f +0000000762 00000 f +0000000763 00000 f +0000000764 00000 f +0000000765 00000 f +0000000766 00000 f +0000000767 00000 f +0000000768 00000 f +0000000769 00000 f +0000000770 00000 f +0000000771 00000 f +0000000772 00000 f +0000000773 00000 f +0000000774 00000 f +0000000775 00000 f +0000000776 00000 f +0000000777 00000 f +0000000778 00000 f +0000000779 00000 f +0000000780 00000 f +0000000781 00000 f +0000000782 00000 f +0000000783 00000 f +0000000784 00000 f +0000000788 00000 f +0000362266 00000 n +0000368363 00000 n +0000368395 00000 n +0000000792 00000 f +0000362337 00000 n +0000368245 00000 n +0000368277 00000 n +0000000793 00000 f +0000000794 00000 f +0000000795 00000 f +0000000796 00000 f +0000000797 00000 f +0000000798 00000 f +0000000799 00000 f +0000000800 00000 f +0000000801 00000 f +0000000802 00000 f +0000000803 00000 f +0000000804 00000 f +0000000805 00000 f +0000000806 00000 f +0000000807 00000 f +0000000811 00000 f +0000362409 00000 n +0000368127 00000 n +0000368159 00000 n +0000000812 00000 f +0000000813 00000 f +0000000814 00000 f +0000000815 00000 f +0000000816 00000 f +0000000817 00000 f +0000000818 00000 f +0000000819 00000 f +0000000820 00000 f +0000000821 00000 f +0000000822 00000 f +0000000823 00000 f +0000000824 00000 f +0000000828 00000 f +0000362481 00000 n +0000368009 00000 n +0000368041 00000 n +0000000829 00000 f +0000000830 00000 f +0000000831 00000 f +0000000832 00000 f +0000000833 00000 f +0000000834 00000 f +0000000835 00000 f +0000000836 00000 f +0000000837 00000 f +0000000838 00000 f +0000000839 00000 f +0000000840 00000 f +0000000841 00000 f +0000000842 00000 f +0000000846 00000 f +0000362555 00000 n +0000367891 00000 n +0000367923 00000 n +0000000847 00000 f +0000000848 00000 f +0000000849 00000 f +0000000850 00000 f +0000000851 00000 f +0000000852 00000 f +0000000853 00000 f +0000000854 00000 f +0000000855 00000 f +0000000856 00000 f +0000000857 00000 f +0000000858 00000 f +0000000859 00000 f +0000000860 00000 f +0000000861 00000 f +0000000862 00000 f +0000000863 00000 f +0000000864 00000 f +0000000865 00000 f +0000000866 00000 f +0000000867 00000 f +0000000868 00000 f +0000000869 00000 f +0000000870 00000 f +0000000871 00000 f +0000000872 00000 f +0000000873 00000 f +0000000874 00000 f +0000000875 00000 f +0000000876 00000 f +0000000877 00000 f +0000000881 00000 f +0000362628 00000 n +0000367773 00000 n +0000367805 00000 n +0000000885 00000 f +0000362699 00000 n +0000367655 00000 n +0000367687 00000 n +0000000886 00000 f +0000000887 00000 f +0000000888 00000 f +0000000889 00000 f +0000000890 00000 f +0000000891 00000 f +0000000892 00000 f +0000000893 00000 f +0000000894 00000 f +0000000895 00000 f +0000000896 00000 f +0000000897 00000 f +0000000898 00000 f +0000000899 00000 f +0000000900 00000 f +0000000904 00000 f +0000362771 00000 n +0000367537 00000 n +0000367569 00000 n +0000000905 00000 f +0000000906 00000 f +0000000907 00000 f +0000000908 00000 f +0000000909 00000 f +0000000910 00000 f +0000000911 00000 f +0000000912 00000 f +0000000913 00000 f +0000000914 00000 f +0000000915 00000 f +0000000916 00000 f +0000000917 00000 f +0000000921 00000 f +0000362843 00000 n +0000367419 00000 n +0000367451 00000 n +0000000922 00000 f +0000000923 00000 f +0000000924 00000 f +0000000925 00000 f +0000000926 00000 f +0000000927 00000 f +0000000928 00000 f +0000000929 00000 f +0000000930 00000 f +0000000931 00000 f +0000000932 00000 f +0000000933 00000 f +0000000934 00000 f +0000000935 00000 f +0000000939 00000 f +0000362917 00000 n +0000367301 00000 n +0000367333 00000 n +0000000940 00000 f +0000000941 00000 f +0000000942 00000 f +0000000943 00000 f +0000000944 00000 f +0000000945 00000 f +0000000946 00000 f +0000000947 00000 f +0000000948 00000 f +0000000949 00000 f +0000000950 00000 f +0000000951 00000 f +0000000952 00000 f +0000000953 00000 f +0000000954 00000 f +0000000955 00000 f +0000000956 00000 f +0000000957 00000 f +0000000958 00000 f +0000000959 00000 f +0000000960 00000 f +0000000961 00000 f +0000000962 00000 f +0000000963 00000 f +0000000964 00000 f +0000000965 00000 f +0000000966 00000 f +0000000967 00000 f +0000000968 00000 f +0000000969 00000 f +0000000970 00000 f +0000000974 00000 f +0000362990 00000 n +0000367183 00000 n +0000367215 00000 n +0000000978 00000 f +0000363061 00000 n +0000367065 00000 n +0000367097 00000 n +0000000979 00000 f +0000000980 00000 f +0000000981 00000 f +0000000982 00000 f +0000000983 00000 f +0000000984 00000 f +0000000985 00000 f +0000000986 00000 f +0000000987 00000 f +0000000988 00000 f +0000000989 00000 f +0000000990 00000 f +0000000991 00000 f +0000000992 00000 f +0000000993 00000 f +0000000997 00000 f +0000363133 00000 n +0000366947 00000 n +0000366979 00000 n +0000000998 00000 f +0000000999 00000 f +0000001000 00000 f +0000001001 00000 f +0000001002 00000 f +0000001003 00000 f +0000001004 00000 f +0000001005 00000 f +0000001006 00000 f +0000001007 00000 f +0000001008 00000 f +0000001009 00000 f +0000001010 00000 f +0000001014 00000 f +0000363205 00000 n +0000366827 00000 n +0000366860 00000 n +0000001015 00000 f +0000001016 00000 f +0000001017 00000 f +0000001018 00000 f +0000001019 00000 f +0000001020 00000 f +0000001021 00000 f +0000001022 00000 f +0000001023 00000 f +0000001024 00000 f +0000001025 00000 f +0000001026 00000 f +0000001027 00000 f +0000001028 00000 f +0000001032 00000 f +0000363282 00000 n +0000366707 00000 n +0000366740 00000 n +0000001033 00000 f +0000001034 00000 f +0000001035 00000 f +0000001036 00000 f +0000001037 00000 f +0000001038 00000 f +0000001039 00000 f +0000001040 00000 f +0000001041 00000 f +0000001042 00000 f +0000001043 00000 f +0000001044 00000 f +0000001045 00000 f +0000001046 00000 f +0000001047 00000 f +0000001048 00000 f +0000001049 00000 f +0000001050 00000 f +0000001051 00000 f +0000001052 00000 f +0000001053 00000 f +0000001055 00000 f +0000001397 00000 n +0000001061 00000 f +0000363358 00000 n +0000363432 00000 n +0000363507 00000 n +0000363582 00000 n +0000363659 00000 n +0000001062 00000 f +0000001063 00000 f +0000001064 00000 f +0000001065 00000 f +0000001066 00000 f +0000001067 00000 f +0000001068 00000 f +0000001069 00000 f +0000001070 00000 f +0000001071 00000 f +0000001072 00000 f +0000001073 00000 f +0000001074 00000 f +0000001075 00000 f +0000001076 00000 f +0000001077 00000 f +0000001078 00000 f +0000001079 00000 f +0000001080 00000 f +0000001081 00000 f +0000001082 00000 f +0000001083 00000 f +0000001084 00000 f +0000001085 00000 f +0000001086 00000 f +0000001087 00000 f +0000001088 00000 f +0000001089 00000 f +0000001090 00000 f +0000001091 00000 f +0000001092 00000 f +0000001093 00000 f +0000001094 00000 f +0000001095 00000 f +0000001096 00000 f +0000001097 00000 f +0000001098 00000 f +0000001099 00000 f +0000001100 00000 f +0000001101 00000 f +0000001102 00000 f +0000001103 00000 f +0000001104 00000 f +0000001105 00000 f +0000001106 00000 f +0000001107 00000 f +0000001108 00000 f +0000001109 00000 f +0000001110 00000 f +0000001111 00000 f +0000001112 00000 f +0000001113 00000 f +0000001114 00000 f +0000001115 00000 f +0000001116 00000 f +0000001117 00000 f +0000001128 00000 f +0000366587 00000 n +0000366620 00000 n +0000366467 00000 n +0000366500 00000 n +0000366347 00000 n +0000366380 00000 n +0000366227 00000 n +0000366260 00000 n +0000366107 00000 n +0000366140 00000 n +0000001129 00000 f +0000001130 00000 f +0000001131 00000 f +0000001132 00000 f +0000001133 00000 f +0000001134 00000 f +0000001135 00000 f +0000001136 00000 f +0000001137 00000 f +0000001144 00000 f +0000363735 00000 n +0000363824 00000 n +0000363898 00000 n +0000363973 00000 n +0000364048 00000 n +0000364125 00000 n +0000001145 00000 f +0000001146 00000 f +0000001147 00000 f +0000001148 00000 f +0000001149 00000 f +0000001150 00000 f +0000001151 00000 f +0000001152 00000 f +0000001153 00000 f +0000001154 00000 f +0000001155 00000 f +0000001156 00000 f +0000001157 00000 f +0000001158 00000 f +0000001159 00000 f +0000001160 00000 f +0000001161 00000 f +0000001162 00000 f +0000001163 00000 f +0000001164 00000 f +0000001165 00000 f +0000001166 00000 f +0000001167 00000 f +0000001168 00000 f +0000001169 00000 f +0000001170 00000 f +0000001171 00000 f +0000001172 00000 f +0000001173 00000 f +0000001174 00000 f +0000001175 00000 f +0000001176 00000 f +0000001177 00000 f +0000001178 00000 f +0000001179 00000 f +0000001180 00000 f +0000001181 00000 f +0000001182 00000 f +0000001183 00000 f +0000001184 00000 f +0000001185 00000 f +0000001186 00000 f +0000001187 00000 f +0000001188 00000 f +0000001189 00000 f +0000001190 00000 f +0000001191 00000 f +0000001192 00000 f +0000001193 00000 f +0000001194 00000 f +0000001195 00000 f +0000001196 00000 f +0000001197 00000 f +0000001198 00000 f +0000001199 00000 f +0000001200 00000 f +0000001213 00000 f +0000365987 00000 n +0000366020 00000 n +0000365867 00000 n +0000365900 00000 n +0000365747 00000 n +0000365780 00000 n +0000365627 00000 n +0000365660 00000 n +0000365507 00000 n +0000365540 00000 n +0000365387 00000 n +0000365420 00000 n +0000001214 00000 f +0000001215 00000 f +0000001216 00000 f +0000001217 00000 f +0000001218 00000 f +0000001219 00000 f +0000001220 00000 f +0000001221 00000 f +0000001242 00000 f +0000000000 00000 f +0000364201 00000 n +0000364290 00000 n +0000364364 00000 n +0000364439 00000 n +0000364514 00000 n +0000364591 00000 n +0000000000 00000 f +0000000000 00000 f +0000000000 00000 f +0000000000 00000 f +0000000000 00000 f +0000000000 00000 f +0000000000 00000 f +0000000000 00000 f +0000000000 00000 f +0000000000 00000 f +0000000000 00000 f +0000000000 00000 f +0000000000 00000 f +0000000000 00000 f +0000000000 00000 f +0000000000 00000 f +0000000000 00000 f +0000000000 00000 f +0000000000 00000 f +0000000000 00000 f +0000000000 00000 f +0000000000 00000 f +0000000000 00000 f +0000000000 00000 f +0000000000 00000 f +0000000000 00000 f +0000000000 00000 f +0000000000 00000 f +0000000000 00000 f +0000000000 00000 f +0000000000 00000 f +0000000000 00000 f +0000000000 00000 f +0000000000 00000 f +0000000000 00000 f +0000000000 00000 f +0000000000 00000 f +0000000000 00000 f +0000000000 00000 f +0000000000 00000 f +0000000000 00000 f +0000000000 00000 f +0000000000 00000 f +0000000000 00000 f +0000000000 00000 f +0000000000 00000 f +0000000000 00000 f +0000000000 00000 f +0000000000 00000 f +0000000000 00000 f +0000000000 00000 f +0000000000 00000 f +0000000000 00000 f +0000000000 00000 f +0000000000 00000 f +0000000000 00000 f +0000000000 00000 f +0000365267 00000 n +0000365300 00000 n +0000365147 00000 n +0000365180 00000 n +0000365027 00000 n +0000365060 00000 n +0000364907 00000 n +0000364940 00000 n +0000364787 00000 n +0000364820 00000 n +0000364667 00000 n +0000364700 00000 n +0000000000 00000 f +0000000000 00000 f +0000000000 00000 f +0000000000 00000 f +0000000000 00000 f +0000000000 00000 f +0000000000 00000 f +0000000000 00000 f +0000000000 00000 f +0000165196 00000 n +0000164010 00000 n +0000164099 00000 n +0000164173 00000 n +0000164248 00000 n +0000164323 00000 n +0000164400 00000 n +0000372953 00000 n +0000046731 00000 n +0000167129 00000 n +0000066385 00000 n +0000066270 00000 n +0000049827 00000 n +0000050354 00000 n +0000050894 00000 n +0000051425 00000 n +0000051967 00000 n +0000052501 00000 n +0000053037 00000 n +0000053571 00000 n +0000048764 00000 n +0000049260 00000 n +0000049312 00000 n +0000149504 00000 n +0000149568 00000 n +0000137318 00000 n +0000137382 00000 n +0000122718 00000 n +0000122782 00000 n +0000110491 00000 n +0000110555 00000 n +0000095974 00000 n +0000096038 00000 n +0000083742 00000 n +0000083806 00000 n +0000069074 00000 n +0000069138 00000 n +0000054106 00000 n +0000054170 00000 n +0000054920 00000 n +0000054984 00000 n +0000066206 00000 n +0000066424 00000 n +0000069865 00000 n +0000069929 00000 n +0000083678 00000 n +0000084551 00000 n +0000084615 00000 n +0000095910 00000 n +0000096756 00000 n +0000096820 00000 n +0000110427 00000 n +0000111301 00000 n +0000111365 00000 n +0000122654 00000 n +0000123502 00000 n +0000123566 00000 n +0000137254 00000 n +0000138131 00000 n +0000138195 00000 n +0000149440 00000 n +0000150263 00000 n +0000150327 00000 n +0000163946 00000 n +0000165076 00000 n +0000165109 00000 n +0000164956 00000 n +0000164989 00000 n +0000164836 00000 n +0000164869 00000 n +0000164716 00000 n +0000164749 00000 n +0000164596 00000 n +0000164629 00000 n +0000164476 00000 n +0000164509 00000 n +0000165488 00000 n +0000165798 00000 n +0000167207 00000 n +0000167437 00000 n +0000168418 00000 n +0000175955 00000 n +0000241545 00000 n +0000307135 00000 n +0000373027 00000 n +trailer <<483E7E53040A4057B79E46A3828ABB5E>]>> startxref 373165 %%EOF \ No newline at end of file diff --git a/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/type_to_object_big.png b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/type_to_object_big.png new file mode 100644 index 0000000000..ef2615bacc Binary files /dev/null and b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/type_to_object_big.png differ diff --git a/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/typebg.gif b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/typebg.gif new file mode 100644 index 0000000000..2fcc77b2e8 Binary files /dev/null and b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/typebg.gif differ diff --git a/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/unselected.png b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/unselected.png new file mode 100644 index 0000000000..d5ac639405 Binary files /dev/null and b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/unselected.png differ diff --git a/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/valuemembersbg.gif b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/valuemembersbg.gif new file mode 100644 index 0000000000..2a949311d7 Binary files /dev/null and b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/valuemembersbg.gif differ diff --git a/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/versions.txt b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/versions.txt new file mode 100644 index 0000000000..17d1caeb66 --- /dev/null +++ b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/versions.txt @@ -0,0 +1 @@ +jquery=1.4.2 diff --git a/src/scaladoc/scala/tools/nsc/doc/model/CommentFactory.scala b/src/scaladoc/scala/tools/nsc/doc/model/CommentFactory.scala new file mode 100644 index 0000000000..574d6b04f8 --- /dev/null +++ b/src/scaladoc/scala/tools/nsc/doc/model/CommentFactory.scala @@ -0,0 +1,112 @@ +/* NSC -- new Scala compiler + * Copyright 2007-2013 LAMP/EPFL + * @author Manohar Jonnalagedda + */ + +package scala.tools.nsc +package doc +package model + +import base.comment._ + +import reporters.Reporter +import scala.collection._ +import scala.reflect.internal.util.{NoPosition, Position} +import scala.language.postfixOps + +/** The comment parser transforms raw comment strings into `Comment` objects. + * Call `parse` to run the parser. Note that the parser is stateless and + * should only be built once for a given Scaladoc run. + * + * @author Manohar Jonnalagedda + * @author Gilles Dubochet */ +trait CommentFactory extends base.CommentFactoryBase { + thisFactory: ModelFactory with CommentFactory with MemberLookup => + + val global: Global + import global.{ reporter, definitions, Symbol } + + protected val commentCache = mutable.HashMap.empty[(Symbol, TemplateImpl), Comment] + + def addCommentBody(sym: Symbol, inTpl: TemplateImpl, docStr: String, docPos: global.Position): Symbol = { + commentCache += (sym, inTpl) -> parse(docStr, docStr, docPos, None) + sym + } + + def comment(sym: Symbol, currentTpl: Option[DocTemplateImpl], inTpl: DocTemplateImpl): Option[Comment] = { + val key = (sym, inTpl) + if (commentCache isDefinedAt key) + Some(commentCache(key)) + else { + val c = defineComment(sym, currentTpl, inTpl) + if (c isDefined) commentCache += (sym, inTpl) -> c.get + c + } + } + + /** A comment is usualy created by the parser, however for some special + * cases we have to give some `inTpl` comments (parent class for example) + * to the comment of the symbol. + * This function manages some of those cases : Param accessor and Primary constructor */ + def defineComment(sym: Symbol, currentTpl: Option[DocTemplateImpl], inTpl: DocTemplateImpl):Option[Comment] = { + + //param accessor case + // We just need the @param argument, we put it into the body + if( sym.isParamAccessor && + inTpl.comment.isDefined && + inTpl.comment.get.valueParams.isDefinedAt(sym.encodedName)) { + val comContent = Some(inTpl.comment.get.valueParams(sym.encodedName)) + Some(createComment(body0 = comContent)) + } + + // Primary constructor case + // We need some content of the class definition : @constructor for the body, + // @param and @deprecated, we can add some more if necessary + else if (sym.isPrimaryConstructor && inTpl.comment.isDefined ) { + val tplComment = inTpl.comment.get + // If there is nothing to put into the comment there is no need to create it + if(tplComment.constructor.isDefined || + tplComment.throws != Map.empty || + tplComment.valueParams != Map.empty || + tplComment.typeParams != Map.empty || + tplComment.deprecated.isDefined + ) + Some(createComment( body0 = tplComment.constructor, + throws0 = tplComment.throws, + valueParams0 = tplComment.valueParams, + typeParams0 = tplComment.typeParams, + deprecated0 = tplComment.deprecated + )) + else None + } + + //other comment cases + // parse function will make the comment + else { + val rawComment = global.expandedDocComment(sym, inTpl.sym).trim + if (rawComment != "") { + val tplOpt = if (currentTpl.isDefined) currentTpl else Some(inTpl) + val c = parse(rawComment, global.rawDocComment(sym), global.docCommentPos(sym), tplOpt) + Some(c) + } + else None + } + + } + + protected def parse(comment: String, src: String, pos: Position, inTplOpt: Option[DocTemplateImpl] = None): Comment = { + assert(!inTplOpt.isDefined || inTplOpt.get != null) + parseAtSymbol(comment, src, pos, inTplOpt map (_.sym)) + } + + /** Parses a string containing wiki syntax into a `Comment` object. + * Note that the string is assumed to be clean: + * - Removed Scaladoc start and end markers. + * - Removed start-of-line star and one whitespace afterwards (if present). + * - Removed all end-of-line whitespace. + * - Only `endOfLine` is used to mark line endings. */ + def parseWiki(string: String, pos: Position, inTplOpt: Option[DocTemplateImpl]): Body = { + assert(!inTplOpt.isDefined || inTplOpt.get != null) + parseWikiAtSymbol(string,pos, inTplOpt map (_.sym)) + } +} diff --git a/src/scaladoc/scala/tools/nsc/doc/model/Entity.scala b/src/scaladoc/scala/tools/nsc/doc/model/Entity.scala new file mode 100644 index 0000000000..924f203a59 --- /dev/null +++ b/src/scaladoc/scala/tools/nsc/doc/model/Entity.scala @@ -0,0 +1,601 @@ +/* NSC -- new Scala compiler + * Copyright 2007-2013 LAMP/EPFL + * @author Manohar Jonnalagedda + * @author Gilles Dubochet + */ + +package scala.tools.nsc +package doc +package model + +import scala.collection._ +import base.comment._ +import diagram._ + +/** An entity in a Scaladoc universe. Entities are declarations in the program and correspond to symbols in the + * compiler. Entities model the following Scala concepts: + * - classes and traits; + * - objects and package; + * - constructors; + * - methods; + * - values, lazy values, and variables; + * - abstract type members and type aliases; + * - type and value parameters; + * - annotations. */ +trait Entity { + /** The name of the entity. Note that the name does not qualify this entity uniquely; use its `qualifiedName` + * instead. */ + def name : String + + /** The qualified name of the entity. This is this entity's name preceded by the qualified name of the template + * of which this entity is a member. The qualified name is unique to this entity. */ + def qualifiedName: String + + /** The template of which this entity is a member. */ + def inTemplate: TemplateEntity + + /** The list of entities such that each is a member of the entity that follows it; the first entity is always this + * entity, the last the root package entity. */ + def toRoot: List[Entity] + + /** The qualified name of this entity. */ + override def toString = qualifiedName + + /** The Scaladoc universe of which this entity is a member. */ + def universe: Universe + + /** The annotations attached to this entity, if any. */ + def annotations: List[Annotation] + + /** The kind of the entity */ + def kind: String + + /** Whether or not the template was defined in a package object */ + def inPackageObject: Boolean + + /** Indicates whether this entity lives in the types namespace (classes, traits, abstract/alias types) */ + def isType: Boolean +} + +object Entity { + private def isDeprecated(x: Entity) = x match { + case x: MemberEntity => x.deprecation.isDefined + case _ => false + } + /** Ordering deprecated things last. */ + implicit lazy val EntityOrdering: Ordering[Entity] = + Ordering[(Boolean, String)] on (x => (isDeprecated(x), x.name)) +} + +/** A template, which is either a class, trait, object or package. Depending on whether documentation is available + * or not, the template will be modeled as a [scala.tools.nsc.doc.model.NoDocTemplate] or a + * [scala.tools.nsc.doc.model.DocTemplateEntity]. */ +trait TemplateEntity extends Entity { + + /** Whether this template is a package (including the root package). */ + def isPackage: Boolean + + /** Whether this template is the root package. */ + def isRootPackage: Boolean + + /** Whether this template is a trait. */ + def isTrait: Boolean + + /** Whether this template is a class. */ + def isClass: Boolean + + /** Whether this template is an object. */ + def isObject: Boolean + + /** Whether documentation is available for this template. */ + def isDocTemplate: Boolean + + /** Whether this template is a case class. */ + def isCaseClass: Boolean + + /** The self-type of this template, if it differs from the template type. */ + def selfType : Option[TypeEntity] +} + + +/** An entity that is a member of a template. All entities, including templates, are member of another entity + * except for parameters and annotations. Note that all members of a template are modelled, including those that are + * inherited and not declared locally. */ +trait MemberEntity extends Entity { + + /** The comment attached to this member, if any. */ + def comment: Option[Comment] + + /** The group this member is from */ + def group: String + + /** The template of which this entity is a member. */ + def inTemplate: DocTemplateEntity + + /** The list of entities such that each is a member of the entity that follows it; the first entity is always this + * member, the last the root package entity. */ + def toRoot: List[MemberEntity] + + /** The templates in which this member has been declared. The first element of the list is the template that contains + * the currently active declaration of this member, subsequent elements are declarations that have been overriden. If + * the first element is equal to `inTemplate`, the member is declared locally, if not, it has been inherited. All + * elements of this list are in the linearization of `inTemplate`. */ + def inDefinitionTemplates: List[TemplateEntity] + + /** The qualified name of the member in its currently active declaration template. */ + def definitionName: String + + /** The visibility of this member. Note that members with restricted visibility may not be modeled in some + * universes. */ + def visibility: Visibility + + /** The flags that have been set for this entity. The following flags are supported: `implicit`, `sealed`, `abstract`, + * and `final`. */ + def flags: List[Paragraph] + + /** Some deprecation message if this member is deprecated, or none otherwise. */ + def deprecation: Option[Body] + + /** Some migration warning if this member has a migration annotation, or none otherwise. */ + def migration: Option[Body] + + /** For members representing values: the type of the value returned by this member; for members + * representing types: the type itself. */ + def resultType: TypeEntity + + /** Whether this member is a method. */ + def isDef: Boolean + + /** Whether this member is a value (this excludes lazy values). */ + def isVal: Boolean + + /** Whether this member is a lazy value. */ + def isLazyVal: Boolean + + /** Whether this member is a variable. */ + def isVar: Boolean + + /** Whether this member is a constructor. */ + def isConstructor: Boolean + + /** Whether this member is an alias type. */ + def isAliasType: Boolean + + /** Whether this member is an abstract type. */ + def isAbstractType: Boolean + + /** Whether this member is abstract. */ + def isAbstract: Boolean + + /** If this symbol is a use case, the useCaseOf will contain the member it was derived from, containing the full + * signature and the complete parameter descriptions. */ + def useCaseOf: Option[MemberEntity] + + /** If this member originates from an implicit conversion, we set the implicit information to the correct origin */ + def byConversion: Option[ImplicitConversion] + + /** The identity of this member, used for linking */ + def signature: String + + /** Compatibility signature, will be removed from future versions */ + def signatureCompat: String + + /** Indicates whether the member is inherited by implicit conversion */ + def isImplicitlyInherited: Boolean + + /** Indicates whether there is another member with the same name in the template that will take precendence */ + def isShadowedImplicit: Boolean + + /** Indicates whether there are other implicitly inherited members that have similar signatures (and thus they all + * become ambiguous) */ + def isAmbiguousImplicit: Boolean + + /** Indicates whether the implicitly inherited member is shadowed or ambiguous in its template */ + def isShadowedOrAmbiguousImplicit: Boolean +} + +object MemberEntity { + // Oh contravariance, contravariance, wherefore art thou contravariance? + // Note: the above works for both the commonly misunderstood meaning of the line and the real one. + implicit lazy val MemberEntityOrdering: Ordering[MemberEntity] = Entity.EntityOrdering on (x => x) +} + +/** An entity that is parameterized by types */ +trait HigherKinded { + + /** The type parameters of this entity. */ + def typeParams: List[TypeParam] +} + + +/** A template (class, trait, object or package) which is referenced in the universe, but for which no further + * documentation is available. Only templates for which a source file is given are documented by Scaladoc. */ +trait NoDocTemplate extends TemplateEntity { + def kind = + if (isClass) "class" + else if (isTrait) "trait" + else if (isObject) "object" + else "" +} + +/** An inherited template that was not documented in its original owner - example: + * in classpath: trait T { class C } -- T (and implicitly C) are not documented + * in the source: trait U extends T -- C appears in U as a MemberTemplateImpl + * -- that is, U has a member for it but C doesn't get its own page */ +trait MemberTemplateEntity extends TemplateEntity with MemberEntity with HigherKinded { + + /** The value parameters of this case class, or an empty list if this class is not a case class. As case class value + * parameters cannot be curried, the outer list has exactly one element. */ + def valueParams: List[List[ValueParam]] + + /** The direct super-type of this template + e.g: {{{class A extends B[C[Int]] with D[E]}}} will have two direct parents: class B and D + NOTE: we are dropping the refinement here! */ + def parentTypes: List[(TemplateEntity, TypeEntity)] +} + +/** A template (class, trait, object or package) for which documentation is available. Only templates for which + * a source file is given are documented by Scaladoc. */ +trait DocTemplateEntity extends MemberTemplateEntity { + + /** The list of templates such that each is a member of the template that follows it; the first template is always + * this template, the last the root package entity. */ + def toRoot: List[DocTemplateEntity] + + /** The source file in which the current template is defined and the line where the definition starts, if they exist. + * A source file exists for all templates, except for those that are generated synthetically by Scaladoc. */ + def inSource: Option[(io.AbstractFile, Int)] + + /** An HTTP address at which the source of this template is available, if it is available. An address is available + * only if the `docsourceurl` setting has been set. */ + def sourceUrl: Option[java.net.URL] + + /** All class, trait and object templates which are part of this template's linearization, in lineratization order. + * This template's linearization contains all of its direct and indirect super-classes and super-traits. */ + def linearizationTemplates: List[TemplateEntity] + + /** All instantiated types which are part of this template's linearization, in lineratization order. + * This template's linearization contains all of its direct and indirect super-types. */ + def linearizationTypes: List[TypeEntity] + + /** All class, trait and object templates for which this template is a direct or indirect super-class or super-trait. + * Only templates for which documentation is available in the universe (`DocTemplateEntity`) are listed. */ + def allSubClasses: List[DocTemplateEntity] + + /** All class, trait and object templates for which this template is a *direct* super-class or super-trait. + * Only templates for which documentation is available in the universe (`DocTemplateEntity`) are listed. */ + def directSubClasses: List[DocTemplateEntity] + + /** All members of this template. If this template is a package, only templates for which documentation is available + * in the universe (`DocTemplateEntity`) are listed. */ + def members: List[MemberEntity] + + /** All templates that are members of this template. If this template is a package, only templates for which + * documentation is available in the universe (`DocTemplateEntity`) are listed. */ + def templates: List[TemplateEntity with MemberEntity] + + /** All methods that are members of this template. */ + def methods: List[Def] + + /** All values, lazy values and variables that are members of this template. */ + def values: List[Val] + + /** All abstract types that are members of this template. */ + def abstractTypes: List[AbstractType] + + /** All type aliases that are members of this template. */ + def aliasTypes: List[AliasType] + + /** The primary constructor of this class, if it has been defined. */ + def primaryConstructor: Option[Constructor] + + /** All constructors of this class, including the primary constructor. */ + def constructors: List[Constructor] + + /** The companion of this template, or none. If a class and an object are defined as a pair of the same name, the + * other entity of the pair is the companion. */ + def companion: Option[DocTemplateEntity] + + /** The implicit conversions this template (class or trait, objects and packages are not affected) */ + def conversions: List[ImplicitConversion] + + /** The shadowing information for the implicitly added members */ + def implicitsShadowing: Map[MemberEntity, ImplicitMemberShadowing] + + /** Classes that can be implcitly converted to this class */ + def incomingImplicitlyConvertedClasses: List[(DocTemplateEntity, ImplicitConversion)] + + /** Classes to which this class can be implicitly converted to + NOTE: Some classes might not be included in the scaladoc run so they will be NoDocTemplateEntities */ + def outgoingImplicitlyConvertedClasses: List[(TemplateEntity, TypeEntity, ImplicitConversion)] + + /** If this template takes place in inheritance and implicit conversion relations, it will be shown in this diagram */ + def inheritanceDiagram: Option[Diagram] + + /** If this template contains other templates, such as classes and traits, they will be shown in this diagram */ + def contentDiagram: Option[Diagram] + + /** Returns the group description taken either from this template or its linearizationTypes */ + def groupDescription(group: String): Option[Body] + + /** Returns the group description taken either from this template or its linearizationTypes */ + def groupPriority(group: String): Int + + /** Returns the group description taken either from this template or its linearizationTypes */ + def groupName(group: String): String +} + +/** A trait template. */ +trait Trait extends MemberTemplateEntity { + def kind = "trait" +} + +/** A class template. */ +trait Class extends MemberTemplateEntity { + override def kind = "class" +} + +/** An object template. */ +trait Object extends MemberTemplateEntity { + def kind = "object" +} + +/** A package template. A package is in the universe if it is declared as a package object, or if it + * contains at least one template. */ +trait Package extends DocTemplateEntity { + + /** The package of which this package is a member. */ + def inTemplate: Package + + /** The package such that each is a member of the package that follows it; the first package is always this + * package, the last the root package. */ + def toRoot: List[Package] + + /** All packages that are member of this package. */ + def packages: List[Package] + + override def kind = "package" +} + + +/** The root package, which contains directly or indirectly all members in the universe. A universe + * contains exactly one root package. */ +trait RootPackage extends Package + + +/** A non-template member (method, value, lazy value, variable, constructor, alias type, and abstract type). */ +trait NonTemplateMemberEntity extends MemberEntity { + /** Whether this member is a use case. A use case is a member which does not exist in the documented code. + * It corresponds to a real member, and provides a simplified, yet compatible signature for that member. */ + def isUseCase: Boolean +} + + +/** A method (`def`) of a template. */ +trait Def extends NonTemplateMemberEntity with HigherKinded { + + /** The value parameters of this method. Each parameter block of a curried method is an element of the list. + * Each parameter block is a list of value parameters. */ + def valueParams : List[List[ValueParam]] + + def kind = "method" +} + + +/** A constructor of a class. */ +trait Constructor extends NonTemplateMemberEntity { + + /** Whether this is the primary constructor of a class. The primary constructor is defined syntactically as part of + * the declaration of the class. */ + def isPrimary: Boolean + + /** The value parameters of this constructor. As constructors cannot be curried, the outer list has exactly one + * element. */ + def valueParams : List[List[ValueParam]] + + def kind = "constructor" +} + + +/** A value (`val`), lazy val (`lazy val`) or variable (`var`) of a template. */ +trait Val extends NonTemplateMemberEntity { + def kind = "[lazy] value/variable" +} + + +/** An abstract type member of a template. */ +trait AbstractType extends MemberTemplateEntity with HigherKinded { + + /** The lower bound for this abstract type, if it has been defined. */ + def lo: Option[TypeEntity] + + /** The upper bound for this abstract type, if it has been defined. */ + def hi: Option[TypeEntity] + + def kind = "abstract type" +} + + +/** An type alias of a template. */ +trait AliasType extends MemberTemplateEntity with HigherKinded { + + /** The type aliased by this type alias. */ + def alias: TypeEntity + + def kind = "type alias" +} + + +/** A parameter to an entity. */ +trait ParameterEntity { + + def name: String +} + + +/** A type parameter to a class, trait, or method. */ +trait TypeParam extends ParameterEntity with HigherKinded { + + /** The variance of this type parameter. Valid values are "+", "-", and the empty string. */ + def variance: String + + /** The lower bound for this type parameter, if it has been defined. */ + def lo: Option[TypeEntity] + + /** The upper bound for this type parameter, if it has been defined. */ + def hi: Option[TypeEntity] +} + + +/** A value parameter to a constructor or method. */ +trait ValueParam extends ParameterEntity { + + /** The type of this value parameter. */ + def resultType: TypeEntity + + /** The devault value of this value parameter, if it has been defined. */ + def defaultValue: Option[TreeEntity] + + /** Whether this value parameter is implicit. */ + def isImplicit: Boolean +} + + +/** An annotation to an entity. */ +trait Annotation extends Entity { + + /** The class of this annotation. */ + def annotationClass: TemplateEntity + + /** The arguments passed to the constructor of the annotation class. */ + def arguments: List[ValueArgument] + + def kind = "annotation" +} + +/** A trait that signals the member results from an implicit conversion */ +trait ImplicitConversion { + + /** The source of the implicit conversion*/ + def source: DocTemplateEntity + + /** The result type after the conversion */ + def targetType: TypeEntity + + /** The components of the implicit conversion type parents */ + def targetTypeComponents: List[(TemplateEntity, TypeEntity)] + + /** The entity for the method that performed the conversion, if it's documented (or just its name, otherwise) */ + def convertorMethod: Either[MemberEntity, String] + + /** A short name of the convertion */ + def conversionShortName: String + + /** A qualified name uniquely identifying the convertion (currently: the conversion method's qualified name) */ + def conversionQualifiedName: String + + /** The entity that performed the conversion */ + def convertorOwner: TemplateEntity + + /** The constraints that the transformations puts on the type parameters */ + def constraints: List[Constraint] + + /** The members inherited by this implicit conversion */ + def members: List[MemberEntity] + + /** Is this a hidden implicit conversion (as specified in the settings) */ + def isHiddenConversion: Boolean +} + +/** Shadowing captures the information that the member is shadowed by some other members + * There are two cases of implicitly added member shadowing: + * 1) shadowing from a original class member (the class already has that member) + * in this case, it won't be possible to call the member directly, the type checker will fail attempting to adapt + * the call arguments (or if they fit it will call the original class' method) + * 2) shadowing from other possible implicit conversions () + * this will result in an ambiguous implicit converion error + */ +trait ImplicitMemberShadowing { + /** The members that shadow the current entry use .inTemplate to get to the template name */ + def shadowingMembers: List[MemberEntity] + + /** The members that ambiguate this implicit conversion + Note: for ambiguatingMembers you have the following invariant: + assert(ambiguatingMembers.foreach(_.byConversion.isDefined) */ + def ambiguatingMembers: List[MemberEntity] + + def isShadowed: Boolean = !shadowingMembers.isEmpty + def isAmbiguous: Boolean = !ambiguatingMembers.isEmpty +} + +/** A trait that encapsulates a constraint necessary for implicit conversion */ +trait Constraint + +/** A constraint involving a type parameter which must be in scope */ +trait ImplicitInScopeConstraint extends Constraint { + /** The type of the implicit value required */ + def implicitType: TypeEntity + + /** toString for debugging */ + override def toString = "an implicit _: " + implicitType.name + " must be in scope" +} + +trait TypeClassConstraint extends ImplicitInScopeConstraint with TypeParamConstraint { + /** Type class name */ + def typeClassEntity: TemplateEntity + + /** toString for debugging */ + override def toString = typeParamName + " is a class of type " + typeClassEntity.qualifiedName + " (" + + typeParamName + ": " + typeClassEntity.name + ")" +} + +trait KnownTypeClassConstraint extends TypeClassConstraint { + /** Type explanation, takes the type parameter name and generates the explanation */ + def typeExplanation: (String) => String + + /** toString for debugging */ + override def toString = typeExplanation(typeParamName) + " (" + typeParamName + ": " + typeClassEntity.name + ")" +} + +/** A constraint involving a type parameter */ +trait TypeParamConstraint extends Constraint { + /** The type parameter involved */ + def typeParamName: String +} + +trait EqualTypeParamConstraint extends TypeParamConstraint { + /** The rhs */ + def rhs: TypeEntity + /** toString for debugging */ + override def toString = typeParamName + " is " + rhs.name + " (" + typeParamName + " =:= " + rhs.name + ")" +} + +trait BoundedTypeParamConstraint extends TypeParamConstraint { + /** The lower bound */ + def lowerBound: TypeEntity + + /** The upper bound */ + def upperBound: TypeEntity + + /** toString for debugging */ + override def toString = typeParamName + " is a superclass of " + lowerBound.name + " and a subclass of " + + upperBound.name + " (" + typeParamName + " >: " + lowerBound.name + " <: " + upperBound.name + ")" +} + +trait LowerBoundedTypeParamConstraint extends TypeParamConstraint { + /** The lower bound */ + def lowerBound: TypeEntity + + /** toString for debugging */ + override def toString = typeParamName + " is a superclass of " + lowerBound.name + " (" + typeParamName + " >: " + + lowerBound.name + ")" +} + +trait UpperBoundedTypeParamConstraint extends TypeParamConstraint { + /** The lower bound */ + def upperBound: TypeEntity + + /** toString for debugging */ + override def toString = typeParamName + " is a subclass of " + upperBound.name + " (" + typeParamName + " <: " + + upperBound.name + ")" +} diff --git a/src/scaladoc/scala/tools/nsc/doc/model/IndexModelFactory.scala b/src/scaladoc/scala/tools/nsc/doc/model/IndexModelFactory.scala new file mode 100755 index 0000000000..1272906df5 --- /dev/null +++ b/src/scaladoc/scala/tools/nsc/doc/model/IndexModelFactory.scala @@ -0,0 +1,58 @@ +/* NSC -- new Scala compiler + * Copyright 2007-2013 LAMP/EPFL + * @author Pedro Furlanetto + */ + +package scala.tools.nsc +package doc +package model + +import scala.collection._ + +object IndexModelFactory { + + def makeIndex(universe: Universe): Index = new Index { + + lazy val firstLetterIndex: Map[Char, SymbolMap] = { + + object result extends mutable.HashMap[Char,SymbolMap] { + + /* symbol name ordering */ + implicit def orderingMap = math.Ordering.String + + def addMember(d: MemberEntity) = { + val firstLetter = { + val ch = d.name.head.toLower + if(ch.isLetterOrDigit) ch else '_' + } + val letter = this.get(firstLetter).getOrElse { + immutable.SortedMap[String, SortedSet[MemberEntity]]() + } + val members = letter.get(d.name).getOrElse { + SortedSet.empty[MemberEntity](Ordering.by { _.toString }) + } + d + this(firstLetter) = letter + (d.name -> members) + } + } + + //@scala.annotation.tailrec // TODO + def gather(owner: DocTemplateEntity): Unit = + for(m <- owner.members if m.inDefinitionTemplates.isEmpty || m.inDefinitionTemplates.head == owner) + m match { + case tpl: DocTemplateEntity => + result.addMember(tpl) + gather(tpl) + case non: MemberEntity if !non.isConstructor => + result.addMember(non) + case x @ _ => + } + + gather(universe.rootPackage) + + result.toMap + + } + + } + +} diff --git a/src/scaladoc/scala/tools/nsc/doc/model/MemberLookup.scala b/src/scaladoc/scala/tools/nsc/doc/model/MemberLookup.scala new file mode 100644 index 0000000000..23259a4ae8 --- /dev/null +++ b/src/scaladoc/scala/tools/nsc/doc/model/MemberLookup.scala @@ -0,0 +1,63 @@ +package scala.tools.nsc +package doc +package model + +import base._ + +/** This trait extracts all required information for documentation from compilation units */ +trait MemberLookup extends base.MemberLookupBase { + thisFactory: ModelFactory => + + import global._ + import definitions.{ NothingClass, AnyClass, AnyValClass, AnyRefClass, ListClass } + + override def internalLink(sym: Symbol, site: Symbol): Option[LinkTo] = + findTemplateMaybe(sym) match { + case Some(tpl) => Some(LinkToTpl(tpl)) + case None => + findTemplateMaybe(site) flatMap { inTpl => + inTpl.members find (_.asInstanceOf[EntityImpl].sym == sym) map (LinkToMember(_, inTpl)) + } + } + + override def chooseLink(links: List[LinkTo]): LinkTo = { + val mbrs = links.collect { + case lm@LinkToMember(mbr: MemberEntity, _) => (mbr, lm) + } + if (mbrs.isEmpty) + links.head + else + mbrs.min(Ordering[MemberEntity].on[(MemberEntity, LinkTo)](_._1))._2 + } + + override def toString(link: LinkTo) = link match { + case LinkToTpl(tpl: EntityImpl) => tpl.sym.toString + case LinkToMember(mbr: EntityImpl, inTpl: EntityImpl) => + mbr.sym.signatureString + " in " + inTpl.sym.toString + case _ => link.toString + } + + override def findExternalLink(sym: Symbol, name: String): Option[LinkToExternal] = { + val sym1 = + if (sym == AnyClass || sym == AnyRefClass || sym == AnyValClass || sym == NothingClass) ListClass + else if (sym.isPackage) + /* Get package object which has associatedFile ne null */ + sym.info.member(newTermName("package")) + else sym + Option(sym1.associatedFile) flatMap (_.underlyingSource) flatMap { src => + val path = src.path + settings.extUrlMapping get path map { url => + LinkToExternal(name, url + "#" + name) + } + } orElse { + // Deprecated option. + settings.extUrlPackageMapping find { + case (pkg, _) => name startsWith pkg + } map { + case (_, url) => LinkToExternal(name, url + "#" + name) + } + } + } + + override def warnNoLink = !settings.docNoLinkWarnings.value +} diff --git a/src/scaladoc/scala/tools/nsc/doc/model/ModelFactory.scala b/src/scaladoc/scala/tools/nsc/doc/model/ModelFactory.scala new file mode 100644 index 0000000000..1df725636a --- /dev/null +++ b/src/scaladoc/scala/tools/nsc/doc/model/ModelFactory.scala @@ -0,0 +1,1045 @@ +/* NSC -- new Scala compiler -- Copyright 2007-2013 LAMP/EPFL */ + +package scala.tools.nsc +package doc +package model + +import base._ +import base.comment._ +import diagram._ + +import scala.collection._ +import scala.util.matching.Regex + +import symtab.Flags + +import io._ + +import model.{ RootPackage => RootPackageEntity } + +/** This trait extracts all required information for documentation from compilation units */ +class ModelFactory(val global: Global, val settings: doc.Settings) { + thisFactory: ModelFactory + with ModelFactoryImplicitSupport + with ModelFactoryTypeSupport + with DiagramFactory + with CommentFactory + with TreeFactory + with MemberLookup => + + import global._ + import definitions.{ ObjectClass, NothingClass, AnyClass, AnyValClass, AnyRefClass, ListClass } + import rootMirror.{ RootPackage, RootClass, EmptyPackage } + + // Defaults for member grouping, that may be overridden by the template + val defaultGroup = "Ungrouped" + val defaultGroupName = "Ungrouped" + val defaultGroupDesc = None + val defaultGroupPriority = 1000 + + def templatesCount = docTemplatesCache.count(_._2.isDocTemplate) - droppedPackages.size + + private var _modelFinished = false + def modelFinished: Boolean = _modelFinished + private var universe: Universe = null + + def makeModel: Option[Universe] = { + val universe = new Universe { thisUniverse => + thisFactory.universe = thisUniverse + val settings = thisFactory.settings + val rootPackage = modelCreation.createRootPackage + } + _modelFinished = true + // complete the links between model entities, everthing that couldn't have been done before + universe.rootPackage.completeModel() + + Some(universe) filter (_.rootPackage != null) + } + + // state: + var ids = 0 + private val droppedPackages = mutable.Set[PackageImpl]() + protected val docTemplatesCache = new mutable.LinkedHashMap[Symbol, DocTemplateImpl] + protected val noDocTemplatesCache = new mutable.LinkedHashMap[Symbol, NoDocTemplateImpl] + def packageDropped(tpl: DocTemplateImpl) = tpl match { + case p: PackageImpl => droppedPackages(p) + case _ => false + } + + def optimize(str: String): String = + if (str.length < 16) str.intern else str + + /* ============== IMPLEMENTATION PROVIDING ENTITY TYPES ============== */ + + abstract class EntityImpl(val sym: Symbol, val inTpl: TemplateImpl) extends Entity { + val name = optimize(sym.nameString) + val universe = thisFactory.universe + + // Debugging: + // assert(id != 36, sym + " " + sym.getClass) + //println("Creating entity #" + id + " [" + kind + " " + qualifiedName + "] for sym " + sym.kindString + " " + sym.ownerChain.reverse.map(_.name).mkString(".")) + + def inTemplate: TemplateImpl = inTpl + def toRoot: List[EntityImpl] = this :: inTpl.toRoot + def qualifiedName = name + def annotations = sym.annotations.map(makeAnnotation) + def inPackageObject: Boolean = sym.owner.isModuleClass && sym.owner.sourceModule.isPackageObject + def isType = sym.name.isTypeName + } + + trait TemplateImpl extends EntityImpl with TemplateEntity { + override def qualifiedName: String = + if (inTemplate == null || inTemplate.isRootPackage) name else optimize(inTemplate.qualifiedName + "." + name) + def isPackage = sym.isPackage + def isTrait = sym.isTrait + def isClass = sym.isClass && !sym.isTrait + def isObject = sym.isModule && !sym.isPackage + def isCaseClass = sym.isCaseClass + def isRootPackage = false + def selfType = if (sym.thisSym eq sym) None else Some(makeType(sym.thisSym.typeOfThis, this)) + } + + abstract class MemberImpl(sym: Symbol, inTpl: DocTemplateImpl) extends EntityImpl(sym, inTpl) with MemberEntity { + lazy val comment = { + // If the current tpl is a DocTemplate, we consider itself as the root for resolving link targets (instead of the + // package the class is in) -- so people can refer to methods directly [[foo]], instead of using [[MyClass.foo]] + // in the doc comment of MyClass + val thisTpl = this match { + case d: DocTemplateImpl => Some(d) + case _ => None + } + if (inTpl != null) thisFactory.comment(sym, thisTpl, inTpl) else None + } + def group = comment flatMap (_.group) getOrElse defaultGroup + override def inTemplate = inTpl + override def toRoot: List[MemberImpl] = this :: inTpl.toRoot + def inDefinitionTemplates = + if (inTpl == null) + List(makeRootPackage) + else + makeTemplate(sym.owner)::(sym.allOverriddenSymbols map { inhSym => makeTemplate(inhSym.owner) }) + def visibility = { + if (sym.isPrivateLocal) PrivateInInstance() + else if (sym.isProtectedLocal) ProtectedInInstance() + else { + val qual = + if (sym.hasAccessBoundary) + Some(makeTemplate(sym.privateWithin)) + else None + if (sym.isPrivate) PrivateInTemplate(inTpl) + else if (sym.isProtected) ProtectedInTemplate(qual getOrElse inTpl) + else qual match { + case Some(q) => PrivateInTemplate(q) + case None => Public() + } + } + } + def flags = { + val fgs = mutable.ListBuffer.empty[Paragraph] + if (sym.isImplicit) fgs += Paragraph(Text("implicit")) + if (sym.isSealed) fgs += Paragraph(Text("sealed")) + if (!sym.isTrait && (sym hasFlag Flags.ABSTRACT)) fgs += Paragraph(Text("abstract")) + /* Resetting the DEFERRED flag is a little trick here for refined types: (example from scala.collections) + * {{{ + * implicit def traversable2ops[T](t: scala.collection.GenTraversableOnce[T]) = new TraversableOps[T] { + * def isParallel = ... + * }}} + * the type the method returns is TraversableOps, which has all-abstract symbols. But in reality, it couldn't have + * any abstract terms, otherwise it would fail compilation. So we reset the DEFERRED flag. */ + if (!sym.isTrait && (sym hasFlag Flags.DEFERRED) && (!isImplicitlyInherited)) fgs += Paragraph(Text("abstract")) + if (!sym.isModule && (sym hasFlag Flags.FINAL)) fgs += Paragraph(Text("final")) + fgs.toList + } + def deprecation = + if (sym.isDeprecated) + Some((sym.deprecationMessage, sym.deprecationVersion) match { + case (Some(msg), Some(ver)) => parseWiki("''(Since version " + ver + ")'' " + msg, NoPosition, Some(inTpl)) + case (Some(msg), None) => parseWiki(msg, NoPosition, Some(inTpl)) + case (None, Some(ver)) => parseWiki("''(Since version " + ver + ")''", NoPosition, Some(inTpl)) + case (None, None) => Body(Nil) + }) + else + comment flatMap { _.deprecated } + def migration = + if(sym.hasMigrationAnnotation) + Some((sym.migrationMessage, sym.migrationVersion) match { + case (Some(msg), Some(ver)) => parseWiki("''(Changed in version " + ver + ")'' " + msg, NoPosition, Some(inTpl)) + case (Some(msg), None) => parseWiki(msg, NoPosition, Some(inTpl)) + case (None, Some(ver)) => parseWiki("''(Changed in version " + ver + ")''", NoPosition, Some(inTpl)) + case (None, None) => Body(Nil) + }) + else + None + + def resultType = { + def resultTpe(tpe: Type): Type = tpe match { // similar to finalResultType, except that it leaves singleton types alone + case PolyType(_, res) => resultTpe(res) + case MethodType(_, res) => resultTpe(res) + case NullaryMethodType(res) => resultTpe(res) + case _ => tpe + } + val tpe = byConversion.fold(sym.tpe) (_.toType memberInfo sym) + makeTypeInTemplateContext(resultTpe(tpe), inTemplate, sym) + } + def isDef = false + def isVal = false + def isLazyVal = false + def isVar = false + def isConstructor = false + def isAliasType = false + def isAbstractType = false + def isAbstract = + // for the explanation of conversion == null see comment on flags + ((!sym.isTrait && ((sym hasFlag Flags.ABSTRACT) || (sym hasFlag Flags.DEFERRED)) && (!isImplicitlyInherited)) || + sym.isAbstractClass || sym.isAbstractType) && !sym.isSynthetic + + def signature = externalSignature(sym) + lazy val signatureCompat = { + + def defParams(mbr: Any): String = mbr match { + case d: MemberEntity with Def => + val paramLists: List[String] = + if (d.valueParams.isEmpty) Nil + else d.valueParams map (ps => ps map (_.resultType.name) mkString ("(",",",")")) + paramLists.mkString + case _ => "" + } + + def tParams(mbr: Any): String = mbr match { + case hk: HigherKinded if !hk.typeParams.isEmpty => + def boundsToString(hi: Option[TypeEntity], lo: Option[TypeEntity]): String = { + def bound0(bnd: Option[TypeEntity], pre: String): String = bnd match { + case None => "" + case Some(tpe) => pre ++ tpe.toString + } + bound0(hi, "<:") ++ bound0(lo, ">:") + } + "[" + hk.typeParams.map(tp => tp.variance + tp.name + tParams(tp) + boundsToString(tp.hi, tp.lo)).mkString(", ") + "]" + case _ => "" + } + + (name + tParams(this) + defParams(this) +":"+ resultType.name).replaceAll("\\s","") // no spaces allowed, they break links + } + // these only apply for NonTemplateMemberEntities + def useCaseOf: Option[MemberEntity] = None + def byConversion: Option[ImplicitConversionImpl] = None + def isImplicitlyInherited = false + def isShadowedImplicit = false + def isAmbiguousImplicit = false + def isShadowedOrAmbiguousImplicit = false + } + + /** A template that is not documented at all. The class is instantiated during lookups, to indicate that the class + * exists, but should not be documented (either it's not included in the source or it's not visible) + */ + class NoDocTemplateImpl(sym: Symbol, inTpl: TemplateImpl) extends EntityImpl(sym, inTpl) with TemplateImpl with HigherKindedImpl with NoDocTemplate { + assert(modelFinished, this) + assert(!(noDocTemplatesCache isDefinedAt sym), (sym, noDocTemplatesCache(sym))) + noDocTemplatesCache += (sym -> this) + def isDocTemplate = false + } + + /** An inherited template that was not documented in its original owner - example: + * in classpath: trait T { class C } -- T (and implicitly C) are not documented + * in the source: trait U extends T -- C appears in U as a MemberTemplateImpl -- that is, U has a member for it + * but C doesn't get its own page + */ + abstract class MemberTemplateImpl(sym: Symbol, inTpl: DocTemplateImpl) extends MemberImpl(sym, inTpl) with TemplateImpl with HigherKindedImpl with MemberTemplateEntity { + // no templates cache for this class, each owner gets its own instance + def isDocTemplate = false + lazy val definitionName = optimize(inDefinitionTemplates.head.qualifiedName + "." + name) + def valueParams: List[List[ValueParam]] = Nil /** TODO, these are now only computed for DocTemplates */ + + def parentTypes = + if (sym.isPackage || sym == AnyClass) List() else { + val tps = (this match { + case a: AliasType => sym.tpe.dealias.parents + case a: AbstractType => sym.info.bounds match { + case TypeBounds(lo, RefinedType(parents, decls)) => parents + case TypeBounds(lo, hi) => hi :: Nil + case _ => Nil + } + case _ => sym.tpe.parents + }) map { _.asSeenFrom(sym.thisType, sym) } + makeParentTypes(RefinedType(tps, EmptyScope), Some(this), inTpl) + } + } + + /** The instantiation of `TemplateImpl` triggers the creation of the following entities: + * All ancestors of the template and all non-package members. + */ + abstract class DocTemplateImpl(sym: Symbol, inTpl: DocTemplateImpl) extends MemberTemplateImpl(sym, inTpl) with DocTemplateEntity { + assert(!modelFinished, (sym, inTpl)) + assert(!(docTemplatesCache isDefinedAt sym), sym) + docTemplatesCache += (sym -> this) + + if (settings.verbose.value) + inform("Creating doc template for " + sym) + + override def toRoot: List[DocTemplateImpl] = this :: inTpl.toRoot + + protected def inSourceFromSymbol(symbol: Symbol) = + if (symbol.sourceFile != null && ! symbol.isSynthetic) + Some((symbol.sourceFile, symbol.pos.line)) + else + None + + def inSource = inSourceFromSymbol(sym) + + def sourceUrl = { + def fixPath(s: String) = s.replaceAll("\\" + java.io.File.separator, "/") + val assumedSourceRoot = fixPath(settings.sourcepath.value) stripSuffix "/" + + if (!settings.docsourceurl.isDefault) + inSource map { case (file, _) => + val filePath = fixPath(file.path).replaceFirst("^" + assumedSourceRoot, "").stripSuffix(".scala") + val tplOwner = this.inTemplate.qualifiedName + val tplName = this.name + val patches = new Regex("""€\{(FILE_PATH|TPL_OWNER|TPL_NAME)\}""") + def substitute(name: String): String = name match { + case "FILE_PATH" => filePath + case "TPL_OWNER" => tplOwner + case "TPL_NAME" => tplName + } + val patchedString = patches.replaceAllIn(settings.docsourceurl.value, m => java.util.regex.Matcher.quoteReplacement(substitute(m.group(1))) ) + new java.net.URL(patchedString) + } + else None + } + + protected def linearizationFromSymbol(symbol: Symbol): List[(TemplateEntity, TypeEntity)] = { + symbol.ancestors map { ancestor => + val typeEntity = makeType(symbol.info.baseType(ancestor), this) + val tmplEntity = makeTemplate(ancestor) match { + case tmpl: DocTemplateImpl => tmpl registerSubClass this ; tmpl + case tmpl => tmpl + } + (tmplEntity, typeEntity) + } + } + + lazy val linearization = linearizationFromSymbol(sym) + def linearizationTemplates = linearization map { _._1 } + def linearizationTypes = linearization map { _._2 } + + /* Subclass cache */ + private lazy val subClassesCache = ( + if (sym == AnyRefClass) null + else mutable.ListBuffer[DocTemplateEntity]() + ) + def registerSubClass(sc: DocTemplateEntity): Unit = { + if (subClassesCache != null) + subClassesCache += sc + } + def allSubClasses = if (subClassesCache == null) Nil else subClassesCache.toList + def directSubClasses = allSubClasses.filter(_.parentTypes.map(_._1).contains(this)) + + /* Implcitly convertible class cache */ + private var implicitlyConvertibleClassesCache: mutable.ListBuffer[(DocTemplateImpl, ImplicitConversionImpl)] = null + def registerImplicitlyConvertibleClass(dtpl: DocTemplateImpl, conv: ImplicitConversionImpl): Unit = { + if (implicitlyConvertibleClassesCache == null) + implicitlyConvertibleClassesCache = mutable.ListBuffer[(DocTemplateImpl, ImplicitConversionImpl)]() + implicitlyConvertibleClassesCache += ((dtpl, conv)) + } + + def incomingImplicitlyConvertedClasses: List[(DocTemplateImpl, ImplicitConversionImpl)] = + if (implicitlyConvertibleClassesCache == null) + List() + else + implicitlyConvertibleClassesCache.toList + + // the implicit conversions are generated eagerly, but the members generated by implicit conversions are added + // lazily, on completeModel + val conversions: List[ImplicitConversionImpl] = + if (settings.docImplicits.value) makeImplicitConversions(sym, this) else Nil + + // members as given by the compiler + lazy val memberSyms = sym.info.members.filter(s => membersShouldDocument(s, this)).toList + + // the inherited templates (classes, traits or objects) + val memberSymsLazy = memberSyms.filter(t => templateShouldDocument(t, this) && !inOriginalOwner(t, this)) + // the direct members (methods, values, vars, types and directly contained templates) + val memberSymsEager = memberSyms.filter(!memberSymsLazy.contains(_)) + // the members generated by the symbols in memberSymsEager + val ownMembers = (memberSymsEager.flatMap(makeMember(_, None, this))) + + // all the members that are documentented PLUS the members inherited by implicit conversions + var members: List[MemberImpl] = ownMembers + + def templates = members collect { case c: TemplateEntity with MemberEntity => c } + def methods = members collect { case d: Def => d } + def values = members collect { case v: Val => v } + def abstractTypes = members collect { case t: AbstractType => t } + def aliasTypes = members collect { case t: AliasType => t } + + /** + * This is the final point in the core model creation: no DocTemplates are created after the model has finished, but + * inherited templates and implicit members are added to the members at this point. + */ + def completeModel(): Unit = { + // DFS completion + // since alias types and abstract types have no own members, there's no reason for them to call completeModel + if (!sym.isAliasType && !sym.isAbstractType) + for (member <- members) + member match { + case d: DocTemplateImpl => d.completeModel() + case _ => + } + + members :::= memberSymsLazy.map(modelCreation.createLazyTemplateMember(_, this)) + + // compute linearization to register subclasses + linearization + outgoingImplicitlyConvertedClasses + + // the members generated by the symbols in memberSymsEager PLUS the members from the usecases + val allMembers = ownMembers ::: ownMembers.flatMap(_.useCaseOf.map(_.asInstanceOf[MemberImpl])).distinct + implicitsShadowing = makeShadowingTable(allMembers, conversions, this) + // finally, add the members generated by implicit conversions + members :::= conversions.flatMap(_.memberImpls) + } + + var implicitsShadowing = Map[MemberEntity, ImplicitMemberShadowing]() + + lazy val outgoingImplicitlyConvertedClasses: List[(TemplateEntity, TypeEntity, ImplicitConversionImpl)] = + conversions flatMap (conv => + if (!implicitExcluded(conv.conversionQualifiedName)) + conv.targetTypeComponents map { + case (template, tpe) => + template match { + case d: DocTemplateImpl if (d != this) => d.registerImplicitlyConvertibleClass(this, conv) + case _ => // nothing + } + (template, tpe, conv) + } + else List() + ) + + override def isDocTemplate = true + private[this] lazy val companionSymbol = + if (sym.isAliasType || sym.isAbstractType) { + inTpl.sym.info.member(sym.name.toTermName) match { + case NoSymbol => NoSymbol + case s => + s.info match { + case ot: OverloadedType => + NoSymbol + case _ => + // that's to navigate from val Foo: FooExtractor to FooExtractor :) + s.info.resultType.typeSymbol + } + } + } + else + sym.companionSymbol + + def companion = + companionSymbol match { + case NoSymbol => None + case comSym if !isEmptyJavaObject(comSym) && (comSym.isClass || comSym.isModule) => + makeTemplate(comSym) match { + case d: DocTemplateImpl => Some(d) + case _ => None + } + case _ => None + } + + def constructors: List[MemberImpl with Constructor] = if (isClass) members collect { case d: Constructor => d } else Nil + def primaryConstructor: Option[MemberImpl with Constructor] = if (isClass) constructors find { _.isPrimary } else None + override def valueParams = + // we don't want params on a class (non case class) signature + if (isCaseClass) primaryConstructor match { + case Some(const) => const.sym.paramss map (_ map (makeValueParam(_, this))) + case None => List() + } + else List.empty + + // These are generated on-demand, make sure you don't call them more than once + def inheritanceDiagram = makeInheritanceDiagram(this) + def contentDiagram = makeContentDiagram(this) + + def groupSearch[T](extractor: Comment => Option[T]): Option[T] = { + val comments = comment +: linearizationTemplates.collect { case dtpl: DocTemplateImpl => dtpl.comment } + comments.flatten.map(extractor).flatten.headOption orElse { + Option(inTpl) flatMap (_.groupSearch(extractor)) + } + } + + def groupDescription(group: String): Option[Body] = groupSearch(_.groupDesc.get(group)) orElse { if (group == defaultGroup) defaultGroupDesc else None } + def groupPriority(group: String): Int = groupSearch(_.groupPrio.get(group)) getOrElse { if (group == defaultGroup) defaultGroupPriority else 0 } + def groupName(group: String): String = groupSearch(_.groupNames.get(group)) getOrElse { if (group == defaultGroup) defaultGroupName else group } + } + + abstract class PackageImpl(sym: Symbol, inTpl: PackageImpl) extends DocTemplateImpl(sym, inTpl) with Package { + override def inTemplate = inTpl + override def toRoot: List[PackageImpl] = this :: inTpl.toRoot + override lazy val (inSource, linearization) = { + val representive = sym.info.members.find { + s => s.isPackageObject + } getOrElse sym + (inSourceFromSymbol(representive), linearizationFromSymbol(representive)) + } + def packages = members collect { case p: PackageImpl if !(droppedPackages contains p) => p } + } + + abstract class RootPackageImpl(sym: Symbol) extends PackageImpl(sym, null) with RootPackageEntity + + abstract class NonTemplateMemberImpl(sym: Symbol, conversion: Option[ImplicitConversionImpl], + override val useCaseOf: Option[MemberEntity], inTpl: DocTemplateImpl) + extends MemberImpl(sym, inTpl) with NonTemplateMemberEntity { + override lazy val comment = { + val inRealTpl = + conversion.fold(Option(inTpl)) { conv => + /* Variable precendence order for implicitly added members: Take the variable defifinitions from ... + * 1. the target of the implicit conversion + * 2. the definition template (owner) + * 3. the current template + */ + findTemplateMaybe(conv.toType.typeSymbol) filterNot (_ == makeRootPackage) orElse ( + findTemplateMaybe(sym.owner) filterNot (_ == makeRootPackage) orElse Option(inTpl) + ) + } + inRealTpl flatMap (thisFactory.comment(sym, None, _)) + } + + override def inDefinitionTemplates = useCaseOf.fold(super.inDefinitionTemplates)(_.inDefinitionTemplates) + + override def qualifiedName = optimize(inTemplate.qualifiedName + "#" + name) + lazy val definitionName = { + val qualifiedName = conversion.fold(inDefinitionTemplates.head.qualifiedName)(_.conversionQualifiedName) + optimize(qualifiedName + "#" + name) + } + def isUseCase = useCaseOf.isDefined + override def byConversion: Option[ImplicitConversionImpl] = conversion + override def isImplicitlyInherited = { assert(modelFinished); conversion.isDefined } + override def isShadowedImplicit = isImplicitlyInherited && inTpl.implicitsShadowing.get(this).map(_.isShadowed).getOrElse(false) + override def isAmbiguousImplicit = isImplicitlyInherited && inTpl.implicitsShadowing.get(this).map(_.isAmbiguous).getOrElse(false) + override def isShadowedOrAmbiguousImplicit = isShadowedImplicit || isAmbiguousImplicit + } + + abstract class NonTemplateParamMemberImpl(sym: Symbol, conversion: Option[ImplicitConversionImpl], + useCaseOf: Option[MemberEntity], inTpl: DocTemplateImpl) + extends NonTemplateMemberImpl(sym, conversion, useCaseOf, inTpl) { + def valueParams = { + val info = conversion.fold(sym.info)(_.toType memberInfo sym) + info.paramss map { ps => (ps.zipWithIndex) map { case (p, i) => + if (p.nameString contains "$") makeValueParam(p, inTpl, optimize("arg" + i)) else makeValueParam(p, inTpl) + }} + } + } + + abstract class ParameterImpl(val sym: Symbol, val inTpl: TemplateImpl) extends ParameterEntity { + val name = optimize(sym.nameString) + } + + private trait AliasImpl { + def sym: Symbol + def inTpl: TemplateImpl + def alias = makeTypeInTemplateContext(sym.tpe.dealias, inTpl, sym) + } + + private trait TypeBoundsImpl { + def sym: Symbol + def inTpl: TemplateImpl + def lo = sym.info.bounds match { + case TypeBounds(lo, hi) if lo.typeSymbol != NothingClass => + Some(makeTypeInTemplateContext(appliedType(lo, sym.info.typeParams map {_.tpe}), inTpl, sym)) + case _ => None + } + def hi = sym.info.bounds match { + case TypeBounds(lo, hi) if hi.typeSymbol != AnyClass => + Some(makeTypeInTemplateContext(appliedType(hi, sym.info.typeParams map {_.tpe}), inTpl, sym)) + case _ => None + } + } + + trait HigherKindedImpl extends HigherKinded { + def sym: Symbol + def inTpl: TemplateImpl + def typeParams = + sym.typeParams map (makeTypeParam(_, inTpl)) + } + /* ============== MAKER METHODS ============== */ + + /** This method makes it easier to work with the different kinds of symbols created by scalac by stripping down the + * package object abstraction and placing members directly in the package. + * + * Here's the explanation of what we do. The code: + * + * package foo { + * object `package` { + * class Bar + * } + * } + * + * will yield this Symbol structure: + * +---------+ (2) + * | | + * +---------------+ +---------- v ------- | ---+ +--------+ (2) + * | package foo#1 <---(1)---- module class foo#2 | | | | + * +---------------+ | +------------------ | -+ | +------------------- v ---+ | + * | | package object foo#3 <-----(1)---- module class package#4 | | + * | +----------------------+ | | +---------------------+ | | + * +--------------------------+ | | class package$Bar#5 | | | + * | +----------------- | -+ | | + * +------------------- | ---+ | + * | | + * +--------+ + * (1) sourceModule + * (2) you get out of owners with .owner + * + * and normalizeTemplate(Bar.owner) will get us the package, instead of the module class of the package object. + */ + def normalizeTemplate(aSym: Symbol): Symbol = aSym match { + case null | rootMirror.EmptyPackage | NoSymbol => + normalizeTemplate(RootPackage) + case ObjectClass => + normalizeTemplate(AnyRefClass) + case _ if aSym.isPackageObject => + normalizeTemplate(aSym.owner) + case _ if aSym.isModuleClass => + normalizeTemplate(aSym.sourceModule) + case _ => + aSym + } + + /** + * These are all model construction methods. Please do not use them directly, they are calling each other recursively + * starting from makeModel. On the other hand, makeTemplate, makeAnnotation, makeMember, makeType should only be used + * after the model was created (modelFinished=true) otherwise assertions will start failing. + */ + object modelCreation { + + def createRootPackage: PackageImpl = docTemplatesCache.get(RootPackage) match { + case Some(root: PackageImpl) => root + case _ => modelCreation.createTemplate(RootPackage, null) match { + case Some(root: PackageImpl) => root + case _ => sys.error("Scaladoc: Unable to create root package!") + } + } + + /** + * Create a template, either a package, class, trait or object + */ + def createTemplate(aSym: Symbol, inTpl: DocTemplateImpl): Option[MemberImpl] = { + // don't call this after the model finished! + assert(!modelFinished, (aSym, inTpl)) + + def createRootPackageComment: Option[Comment] = + if(settings.docRootContent.isDefault) None + else { + import Streamable._ + Path(settings.docRootContent.value) match { + case f : File => { + val rootComment = closing(f.inputStream())(is => parse(slurp(is), "", NoPosition, Option(inTpl))) + Some(rootComment) + } + case _ => None + } + } + + def createDocTemplate(bSym: Symbol, inTpl: DocTemplateImpl): DocTemplateImpl = { + assert(!modelFinished, (bSym, inTpl)) // only created BEFORE the model is finished + if (bSym.isAliasType && bSym != AnyRefClass) + new DocTemplateImpl(bSym, inTpl) with AliasImpl with AliasType { override def isAliasType = true } + else if (bSym.isAbstractType) + new DocTemplateImpl(bSym, inTpl) with TypeBoundsImpl with AbstractType { override def isAbstractType = true } + else if (bSym.isModule) + new DocTemplateImpl(bSym, inTpl) with Object {} + else if (bSym.isTrait) + new DocTemplateImpl(bSym, inTpl) with Trait {} + else if (bSym.isClass || bSym == AnyRefClass) + new DocTemplateImpl(bSym, inTpl) with Class {} + else + sys.error("'" + bSym + "' isn't a class, trait or object thus cannot be built as a documentable template.") + } + + val bSym = normalizeTemplate(aSym) + if (docTemplatesCache isDefinedAt bSym) + return Some(docTemplatesCache(bSym)) + + /* Three cases of templates: + * (1) root package -- special cased for bootstrapping + * (2) package + * (3) class/object/trait + */ + if (bSym == RootPackage) // (1) + Some(new RootPackageImpl(bSym) { + override lazy val comment = createRootPackageComment + override val name = "root" + override def inTemplate = this + override def toRoot = this :: Nil + override def qualifiedName = "_root_" + override def isRootPackage = true + override lazy val memberSyms = + (bSym.info.members ++ EmptyPackage.info.members).toList filter { s => + s != EmptyPackage && s != RootPackage + } + }) + else if (bSym.isPackage) // (2) + if (settings.skipPackage(makeQualifiedName(bSym))) + None + else + inTpl match { + case inPkg: PackageImpl => + val pack = new PackageImpl(bSym, inPkg) {} + // Used to check package pruning works: + //println(pack.qualifiedName) + if (pack.templates.filter(_.isDocTemplate).isEmpty && pack.memberSymsLazy.isEmpty) { + droppedPackages += pack + None + } else + Some(pack) + case _ => + sys.error("'" + bSym + "' must be in a package") + } + else { + // no class inheritance at this point + assert(inOriginalOwner(bSym, inTpl), bSym + " in " + inTpl) + Some(createDocTemplate(bSym, inTpl)) + } + } + + /** + * After the model is completed, no more DocTemplateEntities are created. + * Therefore any symbol that still appears is: + * - MemberTemplateEntity (created here) + * - NoDocTemplateEntity (created in makeTemplate) + */ + def createLazyTemplateMember(aSym: Symbol, inTpl: DocTemplateImpl): MemberImpl = { + + // Code is duplicate because the anonymous classes are created statically + def createNoDocMemberTemplate(bSym: Symbol, inTpl: DocTemplateImpl): MemberTemplateImpl = { + assert(modelFinished) // only created AFTER the model is finished + if (bSym.isModule || (bSym.isAliasType && bSym.tpe.typeSymbol.isModule)) + new MemberTemplateImpl(bSym, inTpl) with Object {} + else if (bSym.isTrait || (bSym.isAliasType && bSym.tpe.typeSymbol.isTrait)) + new MemberTemplateImpl(bSym, inTpl) with Trait {} + else if (bSym.isClass || (bSym.isAliasType && bSym.tpe.typeSymbol.isClass)) + new MemberTemplateImpl(bSym, inTpl) with Class {} + else + sys.error("'" + bSym + "' isn't a class, trait or object thus cannot be built as a member template.") + } + + assert(modelFinished) + val bSym = normalizeTemplate(aSym) + + if (docTemplatesCache isDefinedAt bSym) + docTemplatesCache(bSym) + else + docTemplatesCache.get(bSym.owner) match { + case Some(inTpl) => + val mbrs = inTpl.members.collect({ case mbr: MemberImpl if mbr.sym == bSym => mbr }) + assert(mbrs.length == 1) + mbrs.head + case _ => + // move the class completely to the new location + createNoDocMemberTemplate(bSym, inTpl) + } + } + } + + def makeRootPackage: PackageImpl = docTemplatesCache(RootPackage).asInstanceOf[PackageImpl] + + // TODO: Should be able to override the type + def makeMember(aSym: Symbol, conversion: Option[ImplicitConversionImpl], inTpl: DocTemplateImpl): List[MemberImpl] = { + + def makeMember0(bSym: Symbol, useCaseOf: Option[MemberImpl]): Option[MemberImpl] = { + if (bSym.isGetter && bSym.isLazy) + Some(new NonTemplateMemberImpl(bSym, conversion, useCaseOf, inTpl) with Val { + override lazy val comment = // The analyser does not duplicate the lazy val's DocDef when it introduces its accessor. + thisFactory.comment(bSym.accessed, None, inTpl.asInstanceOf[DocTemplateImpl]) // This hack should be removed after analyser is fixed. + override def isLazyVal = true + }) + else if (bSym.isGetter && bSym.accessed.isMutable) + Some(new NonTemplateMemberImpl(bSym, conversion, useCaseOf, inTpl) with Val { + override def isVar = true + }) + else if (bSym.isMethod && !bSym.hasAccessorFlag && !bSym.isConstructor && !bSym.isModule) { + val cSym = { // This unsightly hack closes issue #4086. + if (bSym == definitions.Object_synchronized) { + val cSymInfo = (bSym.info: @unchecked) match { + case PolyType(ts, MethodType(List(bp), mt)) => + val cp = bp.cloneSymbol.setPos(bp.pos).setInfo(definitions.byNameType(bp.info)) + PolyType(ts, MethodType(List(cp), mt)) + } + bSym.cloneSymbol.setPos(bSym.pos).setInfo(cSymInfo) + } + else bSym + } + Some(new NonTemplateParamMemberImpl(cSym, conversion, useCaseOf, inTpl) with HigherKindedImpl with Def { + override def isDef = true + }) + } + else if (bSym.isConstructor) + if (conversion.isDefined) + None // don't list constructors inherted by implicit conversion + else + Some(new NonTemplateParamMemberImpl(bSym, conversion, useCaseOf, inTpl) with Constructor { + override def isConstructor = true + def isPrimary = sym.isPrimaryConstructor + }) + else if (bSym.isGetter) // Scala field accessor or Java field + Some(new NonTemplateMemberImpl(bSym, conversion, useCaseOf, inTpl) with Val { + override def isVal = true + }) + else if (bSym.isAbstractType && !typeShouldDocument(bSym, inTpl)) + Some(new MemberTemplateImpl(bSym, inTpl) with TypeBoundsImpl with AbstractType { + override def isAbstractType = true + }) + else if (bSym.isAliasType && !typeShouldDocument(bSym, inTpl)) + Some(new MemberTemplateImpl(bSym, inTpl) with AliasImpl with AliasType { + override def isAliasType = true + }) + else if (!modelFinished && (bSym.isPackage || templateShouldDocument(bSym, inTpl))) + modelCreation.createTemplate(bSym, inTpl) + else + None + } + + if (!localShouldDocument(aSym) || aSym.isModuleClass || aSym.isPackageObject || aSym.isMixinConstructor) + Nil + else { + val allSyms = useCases(aSym, inTpl.sym) map { case (bSym, bComment, bPos) => + docComments.put(bSym, DocComment(bComment, bPos)) // put the comment in the list, don't parse it yet, closes SI-4898 + bSym + } + + val member = makeMember0(aSym, None) + if (allSyms.isEmpty) + member.toList + else + // Use cases replace the original definitions - SI-5054 + allSyms flatMap { makeMember0(_, member) } + } + } + + def findMember(aSym: Symbol, inTpl: DocTemplateImpl): Option[MemberImpl] = { + normalizeTemplate(aSym.owner) + inTpl.members.find(_.sym == aSym) + } + + def findTemplateMaybe(aSym: Symbol): Option[DocTemplateImpl] = { + assert(modelFinished) + docTemplatesCache.get(normalizeTemplate(aSym)).filterNot(packageDropped(_)) + } + + def makeTemplate(aSym: Symbol): TemplateImpl = makeTemplate(aSym, None) + + def makeTemplate(aSym: Symbol, inTpl: Option[TemplateImpl]): TemplateImpl = { + assert(modelFinished) + + def makeNoDocTemplate(aSym: Symbol, inTpl: TemplateImpl): NoDocTemplateImpl = + noDocTemplatesCache getOrElse (aSym, new NoDocTemplateImpl(aSym, inTpl)) + + findTemplateMaybe(aSym) getOrElse { + val bSym = normalizeTemplate(aSym) + makeNoDocTemplate(bSym, inTpl getOrElse makeTemplate(bSym.owner)) + } + } + + /** */ + def makeAnnotation(annot: AnnotationInfo): scala.tools.nsc.doc.model.Annotation = { + val aSym = annot.symbol + new EntityImpl(aSym, makeTemplate(aSym.owner)) with scala.tools.nsc.doc.model.Annotation { + lazy val annotationClass = + makeTemplate(annot.symbol) + val arguments = { // lazy + def annotArgs = annot.args match { + case Nil => annot.assocs collect { case (_, LiteralAnnotArg(const)) => Literal(const) } + case xs => xs + } + def noParams = annotArgs map (_ => None) + + val params: List[Option[ValueParam]] = annotationClass match { + case aClass: DocTemplateEntity with Class => + (aClass.primaryConstructor map { _.valueParams.head }) match { + case Some(vps) => vps map { Some(_) } + case _ => noParams + } + case _ => noParams + } + assert(params.length == annotArgs.length, (params, annotArgs)) + + params zip annotArgs flatMap { case (param, arg) => + makeTree(arg) map { tree => + new ValueArgument { + def parameter = param + def value = tree + } + } + } + } + } + } + + /** */ + def makeTypeParam(aSym: Symbol, inTpl: TemplateImpl): TypeParam = + new ParameterImpl(aSym, inTpl) with TypeBoundsImpl with HigherKindedImpl with TypeParam { + def variance: String = { + if (sym hasFlag Flags.COVARIANT) "+" + else if (sym hasFlag Flags.CONTRAVARIANT) "-" + else "" + } + } + + /** */ + def makeValueParam(aSym: Symbol, inTpl: DocTemplateImpl): ValueParam = { + makeValueParam(aSym, inTpl, aSym.nameString) + } + + + /** */ + def makeValueParam(aSym: Symbol, inTpl: DocTemplateImpl, newName: String): ValueParam = + new ParameterImpl(aSym, inTpl) with ValueParam { + override val name = newName + def defaultValue = + if (aSym.hasDefault) { + // units.filter should return only one element + (currentRun.units filter (_.source.file == aSym.sourceFile)).toList match { + case List(unit) => + // SI-4922 `sym == aSym` is insufficent if `aSym` is a clone of symbol + // of the parameter in the tree, as can happen with type parametric methods. + def isCorrespondingParam(sym: Symbol) = ( + sym != null && + sym != NoSymbol && + sym.owner == aSym.owner && + sym.name == aSym.name && + sym.isParamWithDefault + ) + (unit.body find (t => isCorrespondingParam(t.symbol))) match { + case Some(ValDef(_,_,_,rhs)) => makeTree(rhs) + case _ => None + } + case _ => None + } + } + else None + def resultType = + makeTypeInTemplateContext(aSym.tpe, inTpl, aSym) + def isImplicit = aSym.isImplicit + } + + /** */ + def makeTypeInTemplateContext(aType: Type, inTpl: TemplateImpl, dclSym: Symbol): TypeEntity = { + def ownerTpl(sym: Symbol): Symbol = + if (sym.isClass || sym.isModule || sym == NoSymbol) sym else ownerTpl(sym.owner) + val tpe = + if (thisFactory.settings.useStupidTypes.value) aType else { + def ownerTpl(sym: Symbol): Symbol = + if (sym.isClass || sym.isModule || sym == NoSymbol) sym else ownerTpl(sym.owner) + val fixedSym = if (inTpl.sym.isModule) inTpl.sym.moduleClass else inTpl.sym + aType.asSeenFrom(fixedSym.thisType, ownerTpl(dclSym)) + } + makeType(tpe, inTpl) + } + + /** Get the types of the parents of the current class, ignoring the refinements */ + def makeParentTypes(aType: Type, tpl: Option[MemberTemplateImpl], inTpl: TemplateImpl): List[(TemplateEntity, TypeEntity)] = aType match { + case RefinedType(parents, defs) => + val ignoreParents = Set[Symbol](AnyClass, AnyRefClass, ObjectClass) + val filtParents = + // we don't want to expose too many links to AnyRef, that will just be redundant information + tpl match { + case Some(tpl) if (!tpl.sym.isModule && parents.length < 2) || (tpl.sym == AnyValClass) || (tpl.sym == AnyRefClass) || (tpl.sym == AnyClass) => parents + case _ => parents.filterNot((p: Type) => ignoreParents(p.typeSymbol)) + } + + /** Returns: + * - a DocTemplate if the type's symbol is documented + * - a NoDocTemplateMember if the type's symbol is not documented in its parent but in another template + * - a NoDocTemplate if the type's symbol is not documented at all */ + def makeTemplateOrMemberTemplate(parent: Type): TemplateImpl = { + def noDocTemplate = makeTemplate(parent.typeSymbol) + findTemplateMaybe(parent.typeSymbol) match { + case Some(tpl) => tpl + case None => parent match { + case TypeRef(pre, sym, args) => + findTemplateMaybe(pre.typeSymbol) match { + case Some(tpl) => findMember(parent.typeSymbol, tpl).collect({case t: TemplateImpl => t}).getOrElse(noDocTemplate) + case None => noDocTemplate + } + case _ => noDocTemplate + } + } + } + + filtParents.map(parent => { + val templateEntity = makeTemplateOrMemberTemplate(parent) + val typeEntity = makeType(parent, inTpl) + (templateEntity, typeEntity) + }) + case _ => + List((makeTemplate(aType.typeSymbol), makeType(aType, inTpl))) + } + + def makeQualifiedName(sym: Symbol, relativeTo: Option[Symbol] = None): String = { + val stop = relativeTo map (_.ownerChain.toSet) getOrElse Set[Symbol]() + var sym1 = sym + val path = new StringBuilder() + // var path = List[Symbol]() + + while ((sym1 != NoSymbol) && (path.isEmpty || !stop(sym1))) { + val sym1Norm = normalizeTemplate(sym1) + if (!sym1.sourceModule.isPackageObject && sym1Norm != RootPackage) { + if (path.length != 0) + path.insert(0, ".") + path.insert(0, sym1Norm.nameString) + // path::= sym1Norm + } + sym1 = sym1.owner + } + + optimize(path.toString) + //path.mkString(".") + } + + def inOriginalOwner(aSym: Symbol, inTpl: TemplateImpl): Boolean = + normalizeTemplate(aSym.owner) == normalizeTemplate(inTpl.sym) + + def templateShouldDocument(aSym: Symbol, inTpl: DocTemplateImpl): Boolean = + (aSym.isTrait || aSym.isClass || aSym.isModule || typeShouldDocument(aSym, inTpl)) && + localShouldDocument(aSym) && + !isEmptyJavaObject(aSym) && + // either it's inside the original owner or we can document it later: + (!inOriginalOwner(aSym, inTpl) || (aSym.isPackageClass || (aSym.sourceFile != null))) + + def membersShouldDocument(sym: Symbol, inTpl: TemplateImpl) = { + // pruning modules that shouldn't be documented + // Why Symbol.isInitialized? Well, because we need to avoid exploring all the space available to scaladoc + // from the classpath -- scaladoc is a hog, it will explore everything starting from the root package unless we + // somehow prune the tree. And isInitialized is a good heuristic for prunning -- if the package was not explored + // during typer and refchecks, it's not necessary for the current application and there's no need to explore it. + (!sym.isModule || sym.moduleClass.isInitialized) && + // documenting only public and protected members + localShouldDocument(sym) && + // Only this class's constructors are part of its members, inherited constructors are not. + (!sym.isConstructor || sym.owner == inTpl.sym) && + // If the @bridge annotation overrides a normal member, show it + !isPureBridge(sym) + } + + def isEmptyJavaObject(aSym: Symbol): Boolean = + aSym.isModule && aSym.isJavaDefined && + aSym.info.members.exists(s => localShouldDocument(s) && (!s.isConstructor || s.owner == aSym)) + + def localShouldDocument(aSym: Symbol): Boolean = + !aSym.isPrivate && (aSym.isProtected || aSym.privateWithin == NoSymbol) && !aSym.isSynthetic + + /** Filter '@bridge' methods only if *they don't override non-bridge methods*. See SI-5373 for details */ + def isPureBridge(sym: Symbol) = sym.isBridge && sym.allOverriddenSymbols.forall(_.isBridge) + + // the classes that are excluded from the index should also be excluded from the diagrams + def classExcluded(clazz: TemplateEntity): Boolean = settings.hardcoded.isExcluded(clazz.qualifiedName) + + // the implicit conversions that are excluded from the pages should not appear in the diagram + def implicitExcluded(convertorMethod: String): Boolean = settings.hiddenImplicits(convertorMethod) + + // whether or not to create a page for an {abstract,alias} type + def typeShouldDocument(bSym: Symbol, inTpl: DocTemplateImpl) = + (settings.docExpandAllTypes.value && (bSym.sourceFile != null)) || + (bSym.isAliasType || bSym.isAbstractType) && + { val rawComment = global.expandedDocComment(bSym, inTpl.sym) + rawComment.contains("@template") || rawComment.contains("@documentable") } +} + diff --git a/src/scaladoc/scala/tools/nsc/doc/model/ModelFactoryImplicitSupport.scala b/src/scaladoc/scala/tools/nsc/doc/model/ModelFactoryImplicitSupport.scala new file mode 100644 index 0000000000..868c2fc3a4 --- /dev/null +++ b/src/scaladoc/scala/tools/nsc/doc/model/ModelFactoryImplicitSupport.scala @@ -0,0 +1,579 @@ +/* NSC -- new Scala compiler -- Copyright 2007-2013 LAMP/EPFL + * + * This trait finds implicit conversions for a class in the default scope and creates scaladoc entries for each of them. + * + * @author Vlad Ureche + * @author Adriaan Moors + */ + +package scala.tools.nsc +package doc +package model + +import scala.collection._ +import symtab.Flags + +/** + * This trait finds implicit conversions for a class in the default scope and creates scaladoc entries for each of them. + * + * Let's take this as an example: + * {{{ + * object Test { + * class A + * + * class B { + * def foo = 1 + * } + * + * class C extends B { + * def bar = 2 + * class implicit + * } + * + * D def conv(a: A) = new C + * } + * }}} + * + * Overview: + * - scaladoc-ing the above classes, `A` will get two more methods: foo and bar, over its default methods + * - the nested classes (specifically `D` above), abstract types, type aliases and constructor members are not added to + * `A` (see makeMember0 in ModelFactory, last 3 cases) + * - the members added by implicit conversion are always listed under the implicit conversion, not under the class they + * actually come from (`foo` will be listed as coming from the implicit conversion to `C` instead of `B`) - see + * `definitionName` in MemberImpl + * + * Internals: + * TODO: Give an overview here + */ +trait ModelFactoryImplicitSupport { + thisFactory: ModelFactory with ModelFactoryTypeSupport with CommentFactory with TreeFactory => + + import global._ + import global.analyzer._ + import global.definitions._ + import settings.hardcoded + + // debugging: + val DEBUG: Boolean = settings.docImplicitsDebug.value + val ERROR: Boolean = true // currently we show all errors + @inline final def debug(msg: => String) = if (DEBUG) settings.printMsg(msg) + @inline final def error(msg: => String) = if (ERROR) settings.printMsg(msg) + + /** This is a flag that indicates whether to eliminate implicits that cannot be satisfied within the current scope. + * For example, if an implicit conversion requires that there is a Numeric[T] in scope: + * {{{ + * class A[T] + * class B extends A[Int] + * class C extends A[String] + * implicit def enrichA[T: Numeric](a: A[T]): D + * }}} + * For B, no constraints are generated as Numeric[Int] is already in the default scope. On the other hand, for the + * conversion from C to D, depending on -implicits-show-all, the conversion can: + * - not be generated at all, since there's no Numeric[String] in scope (if ran without -implicits-show-all) + * - generated with a *weird* constraint, Numeric[String] as the user might add it by hand (if flag is enabled) + */ + class ImplicitNotFound(tpe: Type) extends Exception("No implicit of type " + tpe + " found in scope.") + + /* ============== MAKER METHODS ============== */ + + /** + * Make the implicit conversion objects + * + * A word about the scope of the implicit conversions: currently we look at a very basic context composed of the + * default Scala imports (Predef._ for example) and the companion object of the current class, if one exists. In the + * future we might want to extend this to more complex scopes. + */ + def makeImplicitConversions(sym: Symbol, inTpl: DocTemplateImpl): List[ImplicitConversionImpl] = + // Nothing and Null are somewhat special -- they can be transformed by any implicit conversion available in scope. + // But we don't want that, so we'll simply refuse to find implicit conversions on for Nothing and Null + if (!(sym.isClass || sym.isTrait || sym == AnyRefClass) || sym == NothingClass || sym == NullClass) Nil + else { + val context: global.analyzer.Context = global.analyzer.rootContext(NoCompilationUnit) + + val results = global.analyzer.allViewsFrom(sym.tpe_*, context, sym.typeParams) + var conversions = results.flatMap(result => makeImplicitConversion(sym, result._1, result._2, context, inTpl)) + // also keep empty conversions, so they appear in diagrams + // conversions = conversions.filter(!_.members.isEmpty) + + // Filter out specialized conversions from array + if (sym == ArrayClass) + conversions = conversions.filterNot((conv: ImplicitConversionImpl) => + hardcoded.arraySkipConversions.contains(conv.conversionQualifiedName)) + + // Filter out non-sensical conversions from value types + if (isPrimitiveValueType(sym.tpe_*)) + conversions = conversions.filter((ic: ImplicitConversionImpl) => + hardcoded.valueClassFilter(sym.nameString, ic.conversionQualifiedName)) + + // Put the visible conversions in front + val (ownConversions, commonConversions) = + conversions.partition(!_.isHiddenConversion) + + ownConversions ::: commonConversions + } + + /** makeImplicitConversion performs the heavier lifting to get the implicit listing: + * - for each possible conversion function (also called view) + * * figures out the final result of the view (to what is our class transformed?) + * * figures out the necessary constraints on the type parameters (such as T <: Int) and the context (such as Numeric[T]) + * * lists all inherited members + * + * What? in details: + * - say we start from a class A[T1, T2, T3, T4] + * - we have an implicit function (view) in scope: + * def enrichA[T3 <: Long, T4](a: A[Int, Foo[Bar[X]], T3, T4])(implicit ev1: TypeTag[T4], ev2: Numeric[T4]): EnrichedA + * - A is converted to EnrichedA ONLY if a couple of constraints are satisfied: + * * T1 must be equal to Int + * * T2 must be equal to Foo[Bar[X]] + * * T3 must be upper bounded by Long + * * there must be evidence of Numeric[T4] and a TypeTag[T4] within scope + * - the final type is EnrichedA and A therefore inherits a couple of members from enrichA + * + * How? + * some notes: + * - Scala's type inference will want to solve all type parameters down to actual types, but we only want constraints + * to maintain generality + * - therefore, allViewsFrom wraps type parameters into "untouchable" type variables that only gather constraints, + * but are never solved down to a type + * - these must be reverted back to the type parameters and the constraints must be extracted and simplified (this is + * done by the uniteConstraints and boundedTParamsConstraints. Be sure to check them out + * - we also need to transform implicit parameters in the view's signature into constraints, such that Numeric[T4] + * appears as a constraint + */ + def makeImplicitConversion(sym: Symbol, result: SearchResult, constrs: List[TypeConstraint], context: Context, inTpl: DocTemplateImpl): List[ImplicitConversionImpl] = + if (result.tree == EmptyTree) Nil + else { + // `result` will contain the type of the view (= implicit conversion method) + // the search introduces untouchable type variables, but we want to get back to type parameters + val viewFullType = result.tree.tpe + // set the previously implicit parameters to being explicit + + val (viewSimplifiedType, viewImplicitTypes) = removeImplicitParameters(viewFullType) + + // TODO: Isolate this corner case :) - Predef.<%< and put it in the testsuite + if (viewSimplifiedType.params.length != 1) { + // This is known to be caused by the `<%<` object in Predef: + // {{{ + // sealed abstract class <%<[-From, +To] extends (From => To) with Serializable + // object <%< { + // implicit def conformsOrViewsAs[A <% B, B]: A <%< B = new (A <%< B) {def apply(x: A) = x} + // } + // }}} + // so we just won't generate an implicit conversion for implicit methods that only take implicit parameters + return Nil + } + + // type the view application so we get the exact type of the result (not the formal type) + val viewTree = result.tree.setType(viewSimplifiedType) + val appliedTree = new ApplyImplicitView(viewTree, List(Ident("") setType viewTree.tpe.paramTypes.head)) + val appliedTreeTyped: Tree = { + val newContext = context.makeImplicit(context.ambiguousErrors) + newContext.macrosEnabled = false + val newTyper = global.analyzer.newTyper(newContext) + newTyper.silent(_.typed(appliedTree, EXPRmode, WildcardType), reportAmbiguousErrors = false) match { + + case global.analyzer.SilentResultValue(t: Tree) => t + case global.analyzer.SilentTypeError(err) => + global.reporter.warning(sym.pos, err.toString) + return Nil + } + } + + // now we have the final type: + val toType = wildcardToNothing(typeVarToOriginOrWildcard(appliedTreeTyped.tpe.finalResultType)) + + try { + // Transform bound constraints into scaladoc constraints + val implParamConstraints = makeImplicitConstraints(viewImplicitTypes, sym, context, inTpl) + val boundsConstraints = makeBoundedConstraints(sym.typeParams, constrs, inTpl) + // TODO: no substitution constraints appear in the library and compiler scaladoc. Maybe they can be removed? + val substConstraints = makeSubstitutionConstraints(result.subst, inTpl) + val constraints = implParamConstraints ::: boundsConstraints ::: substConstraints + + List(new ImplicitConversionImpl(sym, result.tree.symbol, toType, constraints, inTpl)) + } catch { + case i: ImplicitNotFound => + //println(" Eliminating: " + toType) + Nil + } + } + + def makeImplicitConstraints(types: List[Type], sym: Symbol, context: Context, inTpl: DocTemplateImpl): List[Constraint] = + types.flatMap((tpe:Type) => { + // TODO: Before creating constraints, map typeVarToOriginOrWildcard on the implicitTypes + val implType = typeVarToOriginOrWildcard(tpe) + val qualifiedName = makeQualifiedName(implType.typeSymbol) + + var available: Option[Boolean] = None + + // see: https://groups.google.com/forum/?hl=en&fromgroups#!topic/scala-internals/gm_fr0RKzC4 + // + // println(implType + " => " + implType.isTrivial) + // var tpes: List[Type] = List(implType) + // while (!tpes.isEmpty) { + // val tpe = tpes.head + // tpes = tpes.tail + // tpe match { + // case TypeRef(pre, sym, args) => + // tpes = pre :: args ::: tpes + // println(tpe + " => " + tpe.isTrivial) + // case _ => + // println(tpe + " (of type" + tpe.getClass + ") => " + tpe.isTrivial) + // } + // } + // println("\n") + + // look for type variables in the type. If there are none, we can decide if the implicit is there or not + if (implType.isTrivial) { + try { + context.flushBuffer() /* any errors here should not prevent future findings */ + // TODO: Not sure this is the right thing to do -- seems similar to what scalac should be doing + val context2 = context.make(context.unit, context.tree, sym.owner, context.scope, context.imports) + val search = inferImplicit(EmptyTree, tpe, false, false, context2, false) + context.flushBuffer() /* any errors here should not prevent future findings */ + + available = Some(search.tree != EmptyTree) + } catch { + case _: TypeError => + } + } + + available match { + case Some(true) => + Nil + case Some(false) if (!settings.docImplicitsShowAll.value) => + // if -implicits-show-all is not set, we get rid of impossible conversions (such as Numeric[String]) + throw new ImplicitNotFound(implType) + case _ => + val typeParamNames = sym.typeParams.map(_.name) + + // TODO: This is maybe the worst hack I ever did - it's as dirty as hell, but it seems to work, so until I + // learn more about symbols, it'll have to do. + implType match { + case TypeRef(pre, sym, List(TypeRef(NoPrefix, targ, Nil))) if (typeParamNames contains targ.name) => + hardcoded.knownTypeClasses.get(qualifiedName) match { + case Some(explanation) => + List(new KnownTypeClassConstraint { + val typeParamName = targ.nameString + lazy val typeExplanation = explanation + lazy val typeClassEntity = makeTemplate(sym) + lazy val implicitType: TypeEntity = makeType(implType, inTpl) + }) + case None => + List(new TypeClassConstraint { + val typeParamName = targ.nameString + lazy val typeClassEntity = makeTemplate(sym) + lazy val implicitType: TypeEntity = makeType(implType, inTpl) + }) + } + case _ => + List(new ImplicitInScopeConstraint{ + lazy val implicitType: TypeEntity = makeType(implType, inTpl) + }) + } + } + }) + + def makeSubstitutionConstraints(subst: TreeTypeSubstituter, inTpl: DocTemplateImpl): List[Constraint] = + (subst.from zip subst.to) map { + case (from, to) => + new EqualTypeParamConstraint { + error("Scaladoc implicits: Unexpected type substitution constraint from: " + from + " to: " + to) + val typeParamName = from.toString + val rhs = makeType(to, inTpl) + } + } + + def makeBoundedConstraints(tparams: List[Symbol], constrs: List[TypeConstraint], inTpl: DocTemplateImpl): List[Constraint] = + (tparams zip constrs) flatMap { + case (tparam, constr) => { + uniteConstraints(constr) match { + case (loBounds, upBounds) => (loBounds filter (_ != NothingClass.tpe), upBounds filter (_ != AnyClass.tpe)) match { + case (Nil, Nil) => + Nil + case (List(lo), List(up)) if (lo == up) => + List(new EqualTypeParamConstraint { + val typeParamName = tparam.nameString + lazy val rhs = makeType(lo, inTpl) + }) + case (List(lo), List(up)) => + List(new BoundedTypeParamConstraint { + val typeParamName = tparam.nameString + lazy val lowerBound = makeType(lo, inTpl) + lazy val upperBound = makeType(up, inTpl) + }) + case (List(lo), Nil) => + List(new LowerBoundedTypeParamConstraint { + val typeParamName = tparam.nameString + lazy val lowerBound = makeType(lo, inTpl) + }) + case (Nil, List(up)) => + List(new UpperBoundedTypeParamConstraint { + val typeParamName = tparam.nameString + lazy val upperBound = makeType(up, inTpl) + }) + case other => + // this is likely an error on the lub/glb side + error("Scaladoc implicits: Error computing lub/glb for: " + (tparam, constr) + ":\n" + other) + Nil + } + } + } + } + + /* ============== IMPLEMENTATION PROVIDING ENTITY TYPES ============== */ + + class ImplicitConversionImpl( + val sym: Symbol, + val convSym: Symbol, + val toType: Type, + val constrs: List[Constraint], + inTpl: DocTemplateImpl) + extends ImplicitConversion { + + def source: DocTemplateEntity = inTpl + + def targetType: TypeEntity = makeType(toType, inTpl) + + def convertorOwner: TemplateEntity = + if (convSym != NoSymbol) + makeTemplate(convSym.owner) + else { + error("Scaladoc implicits: " + toString + " = NoSymbol!") + makeRootPackage + } + + def targetTypeComponents: List[(TemplateEntity, TypeEntity)] = makeParentTypes(toType, None, inTpl) + + def convertorMethod: Either[MemberEntity, String] = { + var convertor: MemberEntity = null + + convertorOwner match { + case doc: DocTemplateImpl => + val convertors = members.collect { case m: MemberImpl if m.sym == convSym => m } + if (convertors.length == 1) + convertor = convertors.head + case _ => + } + if (convertor ne null) + Left(convertor) + else + Right(convSym.nameString) + } + + def conversionShortName = convSym.nameString + + def conversionQualifiedName = makeQualifiedName(convSym) + + lazy val constraints: List[Constraint] = constrs + + lazy val memberImpls: List[MemberImpl] = { + // Obtain the members inherited by the implicit conversion + val memberSyms = toType.members.filter(implicitShouldDocument(_)).toList + + // Debugging part :) + debug(sym.nameString + "\n" + "=" * sym.nameString.length()) + debug(" * conversion " + convSym + " from " + sym.tpe + " to " + toType) + + debug(" -> full type: " + toType) + if (constraints.length != 0) { + debug(" -> constraints: ") + constraints foreach { constr => debug(" - " + constr) } + } + debug(" -> members:") + memberSyms foreach (sym => debug(" - "+ sym.decodedName +" : " + sym.info)) + debug("") + + memberSyms.flatMap({ aSym => + // we can't just pick up nodes from the original template, although that would be very convenient: + // they need the byConversion field to be attached to themselves and the types to be transformed by + // asSeenFrom + + // at the same time, the member itself is in the inTpl, not in the new template -- but should pick up + // variables from the old template. Ugly huh? We'll always create the member inTpl, but it will change + // the template when expanding variables in the comment :) + makeMember(aSym, Some(this), inTpl) + }) + } + + lazy val members: List[MemberEntity] = memberImpls + + def isHiddenConversion = settings.hiddenImplicits(conversionQualifiedName) + + override def toString = "Implcit conversion from " + sym.tpe + " to " + toType + " done by " + convSym + } + + /* ========================= HELPER METHODS ========================== */ + /** + * Computes the shadowing table for all the members in the implicit conversions + * @param members All template's members, including usecases and full signature members + * @param convs All the conversions the template takes part in + * @param inTpl the usual :) + */ + def makeShadowingTable(members: List[MemberImpl], + convs: List[ImplicitConversionImpl], + inTpl: DocTemplateImpl): Map[MemberEntity, ImplicitMemberShadowing] = { + assert(modelFinished) + + val shadowingTable = mutable.Map[MemberEntity, ImplicitMemberShadowing]() + val membersByName: Map[Name, List[MemberImpl]] = members.groupBy(_.sym.name) + val convsByMember = (Map.empty[MemberImpl, ImplicitConversionImpl] /: convs) { + case (map, conv) => map ++ conv.memberImpls.map (_ -> conv) + } + + for (conv <- convs) { + val otherConvMembers: Map[Name, List[MemberImpl]] = convs filterNot (_ == conv) flatMap (_.memberImpls) groupBy (_.sym.name) + + for (member <- conv.memberImpls) { + val sym1 = member.sym + val tpe1 = conv.toType.memberInfo(sym1) + + // check if it's shadowed by a member in the original class. + val shadowed = membersByName.get(sym1.name).toList.flatten filter { other => + !settings.docImplicitsSoundShadowing.value || !isDistinguishableFrom(tpe1, inTpl.sym.info.memberInfo(other.sym)) + } + + // check if it's shadowed by another conversion. + val ambiguous = otherConvMembers.get(sym1.name).toList.flatten filter { other => + val tpe2 = convsByMember(other).toType.memberInfo(other.sym) + !isDistinguishableFrom(tpe1, tpe2) || !isDistinguishableFrom(tpe2, tpe1) + } + + // we finally have the shadowing info + if (!shadowed.isEmpty || !ambiguous.isEmpty) { + val shadowing = new ImplicitMemberShadowing { + def shadowingMembers: List[MemberEntity] = shadowed + def ambiguatingMembers: List[MemberEntity] = ambiguous + } + + shadowingTable += (member -> shadowing) + } + } + } + + shadowingTable.toMap + } + + + /** + * uniteConstraints takes a TypeConstraint instance and simplifies the constraints inside + * + * Normally TypeConstraint contains multiple lower and upper bounds, and we want to reduce this to a lower and an + * upper bound. Here are a couple of catches we need to be aware of: + * - before finding a view (implicit method in scope that maps class A[T1,T2,.. Tn] to something else) the type + * parameters are transformed into "untouchable" type variables so that type inference does not attempt to + * fully solve them down to a type but rather constrains them on both sides just enough for the view to be + * applicable -- now, we want to transform those type variables back to the original type parameters + * - some of the bounds fail type inference and therefore refer to Nothing => when performing unification (lub, glb) + * they start looking ugly => we (unsoundly) transform Nothing to WildcardType so we fool the unification algorithms + * into thinking there's nothing there + * - we don't want the wildcard types surviving the unification so we replace them back to Nothings + */ + def uniteConstraints(constr: TypeConstraint): (List[Type], List[Type]) = + try { + (List(wildcardToNothing(lub(constr.loBounds map typeVarToOriginOrWildcard))), + List(wildcardToNothing(glb(constr.hiBounds map typeVarToOriginOrWildcard)))) + } catch { + // does this actually ever happen? (probably when type vars occur in the bounds) + case x: Throwable => (constr.loBounds.distinct, constr.hiBounds.distinct) + } + + /** + * Make implicits explicit - Not used curently + */ + // object implicitToExplicit extends TypeMap { + // def apply(tp: Type): Type = mapOver(tp) match { + // case MethodType(params, resultType) => + // MethodType(params.map(param => if (param.isImplicit) param.cloneSymbol.resetFlag(Flags.IMPLICIT) else param), resultType) + // case other => + // other + // } + // } + + /** + * removeImplicitParameters transforms implicit parameters from the view result type into constraints and + * returns the simplified type of the view + * + * for the example view: + * implicit def enrichMyClass[T](a: MyClass[T])(implicit ev: Numeric[T]): EnrichedMyClass[T] + * the implicit view result type is: + * (a: MyClass[T])(implicit ev: Numeric[T]): EnrichedMyClass[T] + * and the simplified type will be: + * MyClass[T] => EnrichedMyClass[T] + */ + def removeImplicitParameters(viewType: Type): (Type, List[Type]) = { + + val params = viewType.paramss.flatten + val (normalParams, implParams) = params.partition(!_.isImplicit) + val simplifiedType = MethodType(normalParams, viewType.finalResultType) + val implicitTypes = implParams.map(_.tpe) + + (simplifiedType, implicitTypes) + } + + /** + * typeVarsToOriginOrWildcard transforms the "untouchable" type variables into either their origins (the original + * type parameters) or into wildcard types if nothing matches + */ + object typeVarToOriginOrWildcard extends TypeMap { + def apply(tp: Type): Type = mapOver(tp) match { + case tv: TypeVar => + if (tv.constr.inst.typeSymbol == NothingClass) + WildcardType + else + tv.origin //appliedType(tv.origin.typeConstructor, tv.typeArgs map this) + case other => + if (other.typeSymbol == NothingClass) + WildcardType + else + other + } + } + + /** + * wildcardToNothing transforms wildcard types back to Nothing + */ + object wildcardToNothing extends TypeMap { + def apply(tp: Type): Type = mapOver(tp) match { + case WildcardType => + NothingClass.tpe + case other => + other + } + } + + /** implicitShouldDocument decides whether a member inherited by implicit conversion should be documented */ + def implicitShouldDocument(aSym: Symbol): Boolean = { + // We shouldn't document: + // - constructors + // - common methods (in Any, AnyRef, Object) as they are automatically removed + // - private and protected members (not accessible following an implicit conversion) + // - members starting with _ (usually reserved for internal stuff) + localShouldDocument(aSym) && (!aSym.isConstructor) && (aSym.owner != AnyValClass) && + (aSym.owner != AnyClass) && (aSym.owner != ObjectClass) && + (!aSym.isProtected) && (!aSym.isPrivate) && (!aSym.name.startsWith("_")) && + (aSym.isMethod || aSym.isGetter || aSym.isSetter) && + (aSym.nameString != "getClass") + } + + /* To put it very bluntly: checks if you can call implicitly added method with t1 when t2 is already there in the + * class. We suppose the name of the two members coincides + * + * The trick here is that the resultType does not matter - the condition for removal it that paramss have the same + * structure (A => B => C may not override (A, B) => C) and that all the types involved are + * of the implcit conversion's member are subtypes of the parent members' parameters */ + def isDistinguishableFrom(t1: Type, t2: Type): Boolean = { + // Vlad: I tried using matches but it's not exactly what we need: + // (p: AnyRef)AnyRef matches ((t: String)AnyRef returns false -- but we want that to be true + // !(t1 matches t2) + if (t1.paramss.map(_.length) == t2.paramss.map(_.length)) { + for ((t1p, t2p) <- t1.paramss.flatten zip t2.paramss.flatten) + if (!isSubType(t1 memberInfo t1p, t2 memberInfo t2p)) + return true // if on the corresponding parameter you give a type that is in t1 but not in t2 + // def foo(a: Either[Int, Double]): Int = 3 + // def foo(b: Left[T1]): Int = 6 + // a.foo(Right(4.5d)) prints out 3 :) + false + } else true // the member structure is different foo(3, 5) vs foo(3)(5) + } +} diff --git a/src/scaladoc/scala/tools/nsc/doc/model/ModelFactoryTypeSupport.scala b/src/scaladoc/scala/tools/nsc/doc/model/ModelFactoryTypeSupport.scala new file mode 100644 index 0000000000..99e9059d79 --- /dev/null +++ b/src/scaladoc/scala/tools/nsc/doc/model/ModelFactoryTypeSupport.scala @@ -0,0 +1,315 @@ +/* NSC -- new Scala compiler -- Copyright 2007-2013 LAMP/EPFL */ + +package scala.tools.nsc +package doc +package model + +import base._ +import diagram._ + +import scala.collection._ + +/** This trait extracts all required information for documentation from compilation units */ +trait ModelFactoryTypeSupport { + thisFactory: ModelFactory + with ModelFactoryImplicitSupport + with ModelFactoryTypeSupport + with DiagramFactory + with CommentFactory + with TreeFactory + with MemberLookup => + + import global._ + import definitions.{ ObjectClass, NothingClass, AnyClass, AnyValClass, AnyRefClass } + + protected val typeCache = new mutable.LinkedHashMap[Type, TypeEntity] + + /** */ + def makeType(aType: Type, inTpl: TemplateImpl): TypeEntity = { + def createTypeEntity = new TypeEntity { + private var nameBuffer = new StringBuilder + private var refBuffer = new immutable.TreeMap[Int, (LinkTo, Int)] + private def appendTypes0(types: List[Type], sep: String): Unit = types match { + case Nil => + case tp :: Nil => + appendType0(tp) + case tp :: tps => + appendType0(tp) + nameBuffer append sep + appendTypes0(tps, sep) + } + + private def appendType0(tpe: Type): Unit = tpe match { + /* Type refs */ + case tp: TypeRef if definitions.isFunctionType(tp) => + val args = tp.normalize.typeArgs + nameBuffer append '(' + appendTypes0(args.init, ", ") + nameBuffer append ") ⇒ " + appendType0(args.last) + case tp: TypeRef if definitions.isScalaRepeatedParamType(tp) => + appendType0(tp.args.head) + nameBuffer append '*' + case tp: TypeRef if definitions.isByNameParamType(tp) => + nameBuffer append "⇒ " + appendType0(tp.args.head) + case tp: TypeRef if definitions.isTupleType(tp) => + val args = tp.normalize.typeArgs + nameBuffer append '(' + appendTypes0(args, ", ") + nameBuffer append ')' + case TypeRef(pre, aSym, targs) => + val preSym = pre.widen.typeSymbol + + // SI-3314/SI-4888: Classes, Traits and Types can be inherited from a template to another: + // class Enum { abstract class Value } + // class Day extends Enum { object Mon extends Value /*...*/ } + // ===> in such cases we have two options: + // (0) if there's no inheritance taking place (Enum#Value) we can link to the template directly + // (1) if we generate the doc template for Day, we can link to the correct member + // (2) If the symbol comes from an external library for which we know the documentation URL, point to it. + // (3) if we don't generate the doc template, we should at least indicate the correct prefix in the tooltip + val bSym = normalizeTemplate(aSym) + val owner = + if ((preSym != NoSymbol) && /* it needs a prefix */ + (preSym != bSym.owner) && /* prefix is different from owner */ + (aSym == bSym)) /* normalization doesn't play tricks on us */ + preSym + else + bSym.owner + + val link = + findTemplateMaybe(bSym) match { + case Some(bTpl) if owner == bSym.owner => + // (0) the owner's class is linked AND has a template - lovely + bTpl match { + case dtpl: DocTemplateEntity => new LinkToTpl(dtpl) + case _ => new Tooltip(bTpl.qualifiedName) + } + case _ => + val oTpl = findTemplateMaybe(owner) + (oTpl, oTpl flatMap (findMember(bSym, _))) match { + case (Some(oTpl), Some(bMbr)) => + // (1) the owner's class + LinkToMember(bMbr, oTpl) + case _ => + val name = makeQualifiedName(bSym) + if (!bSym.owner.isPackage) + Tooltip(name) + else + findExternalLink(bSym, name).getOrElse ( + // (3) if we couldn't find neither the owner nor external URL to link to, show a tooltip with the qualified name + Tooltip(name) + ) + } + } + + // SI-4360 Showing prefixes when necessary + // We check whether there's any directly accessible type with the same name in the current template OR if the + // type is inherited from one template to another. There may be multiple symbols with the same name in scope, + // but we won't show the prefix if our symbol is among them, only if *it's not* -- that's equal to showing + // the prefix only for ambiguous references, not for overloaded ones. + def needsPrefix: Boolean = { + if ((owner != bSym.owner || preSym.isRefinementClass) && (normalizeTemplate(owner) != inTpl.sym)) + return true + // don't get tricked into prefixng method type params and existentials: + // I tried several tricks BUT adding the method for which I'm creating the type => that simply won't scale, + // as ValueParams are independent of their parent member, and I really don't want to add this information to + // all terms, as we're already over the allowed memory footprint + if (aSym.isTypeParameterOrSkolem || aSym.isExistentiallyBound /* existential or existential skolem */) + return false + + for (tpl <- inTpl.sym.ownerChain) { + tpl.info.member(bSym.name) match { + case NoSymbol => + // No syms with that name, look further inside the owner chain + case sym => + // Symbol found -- either the correct symbol, another one OR an overloaded alternative + if (sym == bSym) + return false + else sym.info match { + case OverloadedType(owner, alternatives) => + return alternatives.contains(bSym) + case _ => + return true + } + } + } + // if it's not found in the owner chain, we can safely leave out the prefix + false + } + + val prefix = + if (!settings.docNoPrefixes.value && needsPrefix && (bSym != AnyRefClass /* which we normalize */)) { + if (!owner.isRefinementClass) { + val qName = makeQualifiedName(owner, Some(inTpl.sym)) + if (qName != "") qName + "." else "" + } + else { + nameBuffer append "(" + appendType0(pre) + nameBuffer append ")#" + "" // we already appended the prefix + } + } else "" + + //DEBUGGING: + //if (makeQualifiedName(bSym) == "pack1.A") println("needsPrefix(" + bSym + ", " + owner + ", " + inTpl.qualifiedName + ") => " + needsPrefix + " and prefix=" + prefix) + + val name = prefix + bSym.nameString + val pos0 = nameBuffer.length + refBuffer += pos0 -> ((link, name.length)) + nameBuffer append name + + if (!targs.isEmpty) { + nameBuffer append '[' + appendTypes0(targs, ", ") + nameBuffer append ']' + } + /* Refined types */ + case RefinedType(parents, defs) => + val ignoreParents = Set[Symbol](AnyClass, ObjectClass) + val filtParents = parents filterNot (x => ignoreParents(x.typeSymbol)) match { + case Nil => parents + case ps => ps + } + appendTypes0(filtParents, " with ") + // XXX Still todo: properly printing refinements. + // Since I didn't know how to go about displaying a multi-line type, I went with + // printing single method refinements (which should be the most common) and printing + // the number of members if there are more. + defs.toList match { + case Nil => () + case x :: Nil => nameBuffer append (" { " + x.defString + " }") + case xs => nameBuffer append (" { ... /* %d definitions in type refinement */ }" format xs.size) + } + /* Eval-by-name types */ + case NullaryMethodType(result) => + nameBuffer append '⇒' + appendType0(result) + + /* Polymorphic types */ + case PolyType(tparams, result) => assert(tparams.nonEmpty) + def typeParamsToString(tps: List[Symbol]): String = if (tps.isEmpty) "" else + tps.map{tparam => + tparam.varianceString + tparam.name + typeParamsToString(tparam.typeParams) + }.mkString("[", ", ", "]") + nameBuffer append typeParamsToString(tparams) + appendType0(result) + + case et@ExistentialType(quantified, underlying) => + + def appendInfoStringReduced(sym: Symbol, tp: Type): Unit = { + if (sym.isType && !sym.isAliasType && !sym.isClass) { + tp match { + case PolyType(tparams, _) => + nameBuffer append "[" + appendTypes0(tparams.map(_.tpe), ", ") + nameBuffer append "]" + case _ => + } + tp.resultType match { + case rt @ TypeBounds(_, _) => + appendType0(rt) + case rt => + nameBuffer append " <: " + appendType0(rt) + } + } else { + // fallback to the Symbol infoString + nameBuffer append sym.infoString(tp) + } + } + + def appendClauses = { + nameBuffer append " forSome {" + var first = true + for (sym <- quantified) { + if (!first) { nameBuffer append ", " } else first = false + if (sym.isSingletonExistential) { + nameBuffer append "val " + nameBuffer append tpnme.dropSingletonName(sym.name) + nameBuffer append ": " + appendType0(dropSingletonType(sym.info.bounds.hi)) + } else { + if (sym.flagString != "") nameBuffer append (sym.flagString + " ") + if (sym.keyString != "") nameBuffer append (sym.keyString + " ") + nameBuffer append sym.varianceString + nameBuffer append sym.nameString + appendInfoStringReduced(sym, sym.info) + } + } + nameBuffer append "}" + } + + underlying match { + case TypeRef(pre, sym, args) if et.isRepresentableWithWildcards => + appendType0(typeRef(pre, sym, Nil)) + nameBuffer append "[" + var first = true + val qset = quantified.toSet + for (arg <- args) { + if (!first) { nameBuffer append ", " } else first = false + arg match { + case TypeRef(_, sym, _) if (qset contains sym) => + nameBuffer append "_" + appendInfoStringReduced(sym, sym.info) + case arg => + appendType0(arg) + } + } + nameBuffer append "]" + case MethodType(_, _) | NullaryMethodType(_) | PolyType(_, _) => + nameBuffer append "(" + appendType0(underlying) + nameBuffer append ")" + appendClauses + case _ => + appendType0(underlying) + appendClauses + } + + case tb@TypeBounds(lo, hi) => + if (tb.lo != TypeBounds.empty.lo) { + nameBuffer append " >: " + appendType0(lo) + } + if (tb.hi != TypeBounds.empty.hi) { + nameBuffer append " <: " + appendType0(hi) + } + // case tpen: ThisType | SingleType | SuperType => + // if (tpen.isInstanceOf[ThisType] && tpen.asInstanceOf[ThisType].sym.isEffectiveRoot) { + // appendType0 typeRef(NoPrefix, sym, Nil) + // } else { + // val underlying = + // val pre = underlying.typeSymbol.skipPackageObject + // if (pre.isOmittablePrefix) pre.fullName + ".type" + // else prefixString + "type" + case tpen@ThisType(sym) => + appendType0(typeRef(NoPrefix, sym, Nil)) + nameBuffer append ".this" + if (!tpen.underlying.typeSymbol.skipPackageObject.isOmittablePrefix) nameBuffer append ".type" + case tpen@SuperType(thistpe, supertpe) => + nameBuffer append "super[" + appendType0(supertpe) + nameBuffer append "]" + case tpen@SingleType(pre, sym) => + appendType0(typeRef(pre, sym, Nil)) + if (!tpen.underlying.typeSymbol.skipPackageObject.isOmittablePrefix) nameBuffer append ".type" + case tpen => + nameBuffer append tpen.toString + } + appendType0(aType) + val refEntity = refBuffer + val name = optimize(nameBuffer.toString) + nameBuffer = null + } + + // SI-4360: Entity caching depends on both the type AND the template it's in, as the prefixes might change for the + // same type based on the template the type is shown in. + if (settings.docNoPrefixes.value) + typeCache.getOrElseUpdate(aType, createTypeEntity) + else createTypeEntity + } +} diff --git a/src/scaladoc/scala/tools/nsc/doc/model/TreeEntity.scala b/src/scaladoc/scala/tools/nsc/doc/model/TreeEntity.scala new file mode 100644 index 0000000000..5b4ec4a40b --- /dev/null +++ b/src/scaladoc/scala/tools/nsc/doc/model/TreeEntity.scala @@ -0,0 +1,27 @@ +/* NSC -- new Scala compiler + * Copyright 2007-2013 LAMP/EPFL + * @author Chris James + */ + +package scala.tools.nsc +package doc +package model + +import scala.collection._ + + +/** A fragment of code. */ +abstract class TreeEntity { + + /** The human-readable representation of this abstract syntax tree. */ + def expression: String + + /** Maps which parts of this syntax tree's name reference entities. The map is indexed by the position of the first + * character that reference some entity, and contains the entity and the position of the last referenced + * character. The referenced character ranges do not to overlap or nest. The map is sorted by position. */ + def refEntity: SortedMap[Int, (Entity, Int)] + + /** The human-readable representation of this abstract syntax tree. */ + override def toString = expression + +} diff --git a/src/scaladoc/scala/tools/nsc/doc/model/TreeFactory.scala b/src/scaladoc/scala/tools/nsc/doc/model/TreeFactory.scala new file mode 100755 index 0000000000..b972649194 --- /dev/null +++ b/src/scaladoc/scala/tools/nsc/doc/model/TreeFactory.scala @@ -0,0 +1,96 @@ +package scala.tools.nsc +package doc +package model + +import scala.collection._ +import scala.reflect.internal.util.{RangePosition, OffsetPosition, SourceFile} + +/** The goal of this trait is , using makeTree, + * to browse a tree to + * 1- have the String of the complete tree (tree.expression) + * 2- fill references to create hyperLinks later in html.pageTemplate + * + * It is applied in ModelFactory => makeTree + * + */ + +trait TreeFactory { thisTreeFactory: ModelFactory with TreeFactory => + + val global: Global + import global._ + + def makeTree(rhs: Tree): Option[TreeEntity] = { + + val expr = new StringBuilder + var refs = new immutable.TreeMap[Int, (Entity, Int)] // start, (Entity to be linked to , end) + + rhs.pos match { + case pos: RangePosition => { + val source: SourceFile = pos.source + val firstIndex = pos.startOrPoint + val lastIndex = pos.endOrPoint + + assert(firstIndex < lastIndex, "Invalid position indices for tree " + rhs + " (" + firstIndex + ", " + lastIndex + ")") + expr.appendAll(source.content, firstIndex, lastIndex - firstIndex) + + val traverser = new Traverser { + + /** Finds the Entity on which we will later create a link on, + * stores it in tree.refs with its position + */ + def makeLink(rhs: Tree){ + val start = pos.startOrPoint - firstIndex + val end = pos.endOrPoint - firstIndex + if(start != end) { + var asym = rhs.symbol + if (asym.isClass) makeTemplate(asym) match{ + case docTmpl: DocTemplateImpl => + refs += ((start, (docTmpl,end))) + case _ => + } + else if (asym.isTerm && asym.owner.isClass){ + if (asym.isSetter) asym = asym.getter(asym.owner) + makeTemplate(asym.owner) match { + case docTmpl: DocTemplateImpl => + val mbrs: Option[MemberImpl] = findMember(asym, docTmpl) + mbrs foreach { mbr => refs += ((start, (mbr,end))) } + case _ => + } + } + } + } + /** + * Goes through the tree and makes links when a Select occurs, + * The case of New(_) is ignored because the object we want to create a link on + * will be reached with recursivity and we don't want a link on the "new" string + * If a link is not created, its case is probably not defined in here + */ + override def traverse(tree: Tree) = tree match { + case Select(qualifier, name) => + qualifier match { + case New(_) => + case _ => makeLink(tree) + } + traverse(qualifier) + case Ident(_) => makeLink(tree) + case _ => + super.traverse(tree) + } + } + + traverser.traverse(rhs) + + Some(new TreeEntity { + val expression = expr.toString + val refEntity = refs + }) + } + case pos: OffsetPosition => + Some(new TreeEntity { + val expression = rhs.toString + val refEntity = new immutable.TreeMap[Int, (Entity, Int)] + }) + case _ => None + } + } +} diff --git a/src/scaladoc/scala/tools/nsc/doc/model/TypeEntity.scala b/src/scaladoc/scala/tools/nsc/doc/model/TypeEntity.scala new file mode 100644 index 0000000000..cf5c1fb3fb --- /dev/null +++ b/src/scaladoc/scala/tools/nsc/doc/model/TypeEntity.scala @@ -0,0 +1,27 @@ +/* NSC -- new Scala compiler + * Copyright 2007-2013 LAMP/EPFL + * @author Manohar Jonnalagedda + */ + +package scala.tools.nsc +package doc +package model + +import scala.collection._ + +/** A type. Note that types and templates contain the same information only for the simplest types. For example, a type + * defines how a template's type parameters are instantiated (as in `List[Cow]`), what the template's prefix is + * (as in `johnsFarm.Cow`), and supports compound or structural types. */ +abstract class TypeEntity { + + /** The human-readable representation of this type. */ + def name: String + + /** Maps which parts of this type's name reference entities. The map is indexed by the position of the first + * character that reference some entity, and contains the entity and the position of the last referenced + * character. The referenced character ranges do not to overlap or nest. The map is sorted by position. */ + def refEntity: SortedMap[Int, (base.LinkTo, Int)] + + /** The human-readable representation of this type. */ + override def toString = name +} diff --git a/src/scaladoc/scala/tools/nsc/doc/model/ValueArgument.scala b/src/scaladoc/scala/tools/nsc/doc/model/ValueArgument.scala new file mode 100644 index 0000000000..f712869a4b --- /dev/null +++ b/src/scaladoc/scala/tools/nsc/doc/model/ValueArgument.scala @@ -0,0 +1,20 @@ +/* NSC -- new Scala compiler + * Copyright 2007-2013 LAMP/EPFL + * @author Gilles Dubochet + */ + +package scala.tools.nsc +package doc +package model + + +/** A value that is passed as an argument to a value parameter. */ +trait ValueArgument { + + /** The parameter as argument to which this value is passed, if it is known. */ + def parameter: Option[ValueParam] + + /** The expression that calculates the value. */ + def value: TreeEntity + +} diff --git a/src/scaladoc/scala/tools/nsc/doc/model/Visibility.scala b/src/scaladoc/scala/tools/nsc/doc/model/Visibility.scala new file mode 100644 index 0000000000..22580805aa --- /dev/null +++ b/src/scaladoc/scala/tools/nsc/doc/model/Visibility.scala @@ -0,0 +1,39 @@ +/* NSC -- new Scala compiler + * Copyright 2007-2013 LAMP/EPFL + * @author Gilles Dubochet + */ + +package scala.tools.nsc +package doc +package model + +/** An type that represents visibility of members. */ +sealed trait Visibility { + def isProtected: Boolean = false + def isPublic: Boolean = false +} + +/** The visibility of `private[this]` members. */ +case class PrivateInInstance() extends Visibility + +/** The visibility of `protected[this]` members. */ +case class ProtectedInInstance() extends Visibility { + override def isProtected = true +} + +/** The visibility of `private[owner]` members. An unqualified private members + * is encoded with `owner` equal to the members's `inTemplate`. */ +case class PrivateInTemplate(owner: TemplateEntity) extends Visibility + +/** The visibility of `protected[owner]` members. An unqualified protected + * members is encoded with `owner` equal to the members's `inTemplate`. + * Note that whilst the member is visible in any template owned by `owner`, + * it is only visible in subclasses of the member's `inTemplate`. */ +case class ProtectedInTemplate(owner: TemplateEntity) extends Visibility { + override def isProtected = true +} + +/** The visibility of public members. */ +case class Public() extends Visibility { + override def isPublic = true +} diff --git a/src/scaladoc/scala/tools/nsc/doc/model/diagram/Diagram.scala b/src/scaladoc/scala/tools/nsc/doc/model/diagram/Diagram.scala new file mode 100644 index 0000000000..150b293b81 --- /dev/null +++ b/src/scaladoc/scala/tools/nsc/doc/model/diagram/Diagram.scala @@ -0,0 +1,137 @@ +package scala.tools.nsc.doc +package model +package diagram + +import model._ + +/** + * The diagram base classes + * + * @author Damien Obrist + * @author Vlad Ureche + */ +abstract class Diagram { + def nodes: List[Node] + def edges: List[(Node, List[Node])] + def isContentDiagram = false // Implemented by ContentDiagram + def isInheritanceDiagram = false // Implemented by InheritanceDiagram + def depthInfo: DepthInfo +} + +case class ContentDiagram(nodes:List[/*Class*/Node], edges:List[(Node, List[Node])]) extends Diagram { + override def isContentDiagram = true + lazy val depthInfo = new ContentDiagramDepth(this) +} + +/** A class diagram */ +case class InheritanceDiagram(thisNode: ThisNode, + superClasses: List[/*Class*/Node], + subClasses: List[/*Class*/Node], + incomingImplicits: List[ImplicitNode], + outgoingImplicits: List[ImplicitNode]) extends Diagram { + def nodes = thisNode :: superClasses ::: subClasses ::: incomingImplicits ::: outgoingImplicits + def edges = (thisNode -> (superClasses ::: outgoingImplicits)) :: + (subClasses ::: incomingImplicits).map(_ -> List(thisNode)) + + override def isInheritanceDiagram = true + lazy val depthInfo = new DepthInfo { + def maxDepth = 3 + } +} + +trait DepthInfo { + /** Gives the maximum depth */ + def maxDepth: Int +} + +abstract class Node { + def name = tpe.name + def tpe: TypeEntity + def tpl: Option[TemplateEntity] + /** shortcut to get a DocTemplateEntity */ + def doctpl: Option[DocTemplateEntity] = tpl match { + case Some(tpl) => tpl match { + case d: DocTemplateEntity => Some(d) + case _ => None + } + case _ => None + } + /* shortcuts to find the node type without matching */ + def isThisNode = false + def isNormalNode = false + def isClassNode = if (tpl.isDefined) (tpl.get.isClass || tpl.get.qualifiedName == "scala.AnyRef") else false + def isTraitNode = if (tpl.isDefined) tpl.get.isTrait else false + def isObjectNode= if (tpl.isDefined) tpl.get.isObject else false + def isTypeNode = if (doctpl.isDefined) doctpl.get.isAbstractType || doctpl.get.isAliasType else false + def isOtherNode = !(isClassNode || isTraitNode || isObjectNode || isTypeNode) + def isImplicitNode = false + def isOutsideNode = false + def tooltip: Option[String] +} + +// different matchers, allowing you to use the pattern matcher against any node +// NOTE: A ThisNode or ImplicitNode can at the same time be ClassNode/TraitNode/OtherNode, not exactly according to +// case class specification -- thus a complete match would be: +// node match { +// case ThisNode(tpe, _) => /* case for this node, you can still use .isClass, .isTrait and .isOther */ +// case ImplicitNode(tpe, _) => /* case for an implicit node, you can still use .isClass, .isTrait and .isOther */ +// case _ => node match { +// case ClassNode(tpe, _) => /* case for a non-this, non-implicit Class node */ +// case TraitNode(tpe, _) => /* case for a non-this, non-implicit Trait node */ +// case OtherNode(tpe, _) => /* case for a non-this, non-implicit Other node */ +// } +// } +object Node { def unapply(n: Node): Option[(TypeEntity, Option[TemplateEntity])] = Some((n.tpe, n.tpl)) } +object ClassNode { def unapply(n: Node): Option[(TypeEntity, Option[TemplateEntity])] = if (n.isClassNode) Some((n.tpe, n.tpl)) else None } +object TraitNode { def unapply(n: Node): Option[(TypeEntity, Option[TemplateEntity])] = if (n.isTraitNode) Some((n.tpe, n.tpl)) else None } +object TypeNode { def unapply(n: Node): Option[(TypeEntity, Option[TemplateEntity])] = if (n.isTypeNode) Some((n.tpe, n.tpl)) else None } +object ObjectNode { def unapply(n: Node): Option[(TypeEntity, Option[TemplateEntity])] = if (n.isObjectNode) Some((n.tpe, n.tpl)) else None } +object OutsideNode { def unapply(n: Node): Option[(TypeEntity, Option[TemplateEntity])] = if (n.isOutsideNode) Some((n.tpe, n.tpl)) else None } +object OtherNode { def unapply(n: Node): Option[(TypeEntity, Option[TemplateEntity])] = if (n.isOtherNode) Some((n.tpe, n.tpl)) else None } + + + +/** The node for the current class */ +case class ThisNode(tpe: TypeEntity, tpl: Option[TemplateEntity])(val tooltip: Option[String] = None) extends Node { override def isThisNode = true } + +/** The usual node */ +case class NormalNode(tpe: TypeEntity, tpl: Option[TemplateEntity])(val tooltip: Option[String] = None) extends Node { override def isNormalNode = true } + +/** A class or trait the thisnode can be converted to by an implicit conversion + * TODO: I think it makes more sense to use the tpe links to templates instead of the TemplateEntity for implicit nodes + * since some implicit conversions convert the class to complex types that cannot be represented as a single tmeplate + */ +case class ImplicitNode(tpe: TypeEntity, tpl: Option[TemplateEntity])(val tooltip: Option[String] = None) extends Node { override def isImplicitNode = true } + +/** An outside node is shown in packages when a class from a different package makes it to the package diagram due to + * its relation to a class in the template (see @contentDiagram hideInheritedNodes annotation) */ +case class OutsideNode(tpe: TypeEntity, tpl: Option[TemplateEntity])(val tooltip: Option[String] = None) extends Node { override def isOutsideNode = true } + + +// Computing and offering node depth information +class ContentDiagramDepth(pack: ContentDiagram) extends DepthInfo { + private[this] var _maxDepth = 0 + private[this] var _nodeDepth = Map[Node, Int]() + private[this] var seedNodes = Set[Node]() + private[this] val invertedEdges: Map[Node, List[Node]] = + pack.edges.flatMap({case (node: Node, outgoing: List[Node]) => outgoing.map((_, node))}).groupBy(_._1).map({case (k, values) => (k, values.map(_._2))}).withDefaultValue(Nil) + private[this] val directEdges: Map[Node, List[Node]] = pack.edges.toMap.withDefaultValue(Nil) + + // seed base nodes, to minimize noise - they can't all have parents, else there would only be cycles + seedNodes ++= pack.nodes.filter(directEdges(_).isEmpty) + + while (!seedNodes.isEmpty) { + var newSeedNodes = Set[Node]() + for (node <- seedNodes) { + val depth = 1 + (-1 :: directEdges(node).map(_nodeDepth.getOrElse(_, -1))).max + if (depth != _nodeDepth.getOrElse(node, -1)) { + _nodeDepth += (node -> depth) + newSeedNodes ++= invertedEdges(node) + if (depth > _maxDepth) _maxDepth = depth + } + } + seedNodes = newSeedNodes + } + + val maxDepth = _maxDepth +} diff --git a/src/scaladoc/scala/tools/nsc/doc/model/diagram/DiagramDirectiveParser.scala b/src/scaladoc/scala/tools/nsc/doc/model/diagram/DiagramDirectiveParser.scala new file mode 100644 index 0000000000..6395446d3b --- /dev/null +++ b/src/scaladoc/scala/tools/nsc/doc/model/diagram/DiagramDirectiveParser.scala @@ -0,0 +1,257 @@ +package scala.tools.nsc.doc +package model +package diagram + +import model._ +import java.util.regex.{Pattern, Matcher} +import scala.util.matching.Regex + +/** + * This trait takes care of parsing @{inheritance, content}Diagram annotations + * + * @author Damien Obrist + * @author Vlad Ureche + */ +trait DiagramDirectiveParser { + this: ModelFactory with DiagramFactory with CommentFactory with TreeFactory => + + import this.global.definitions.AnyRefClass + + ///// DIAGRAM FILTERS ////////////////////////////////////////////////////////////////////////////////////////////// + + /** + * The DiagramFilter trait directs the diagram engine about the way the diagram should be displayed + * + * Vlad: There's an explanation I owe to people using diagrams and not finding a way to hide a specific class from + * all diagrams at once. So why did I choose to allow you to only control the diagrams at class level? So, the + * reason is you would break the separate scaladoc compilation: + * If you have an "@diagram hideMyClass" annotation in class A and you run scaladoc on it along with its subclass B + * A will not appear in B's diagram. But if you scaladoc only on B, A's comment will not be parsed and the + * instructions to hide class A from all diagrams will not be available. Thus I prefer to force you to control the + * diagrams of each class locally. The problem does not appear with scalac, as scalac stores all its necessary + * information (like scala signatures) serialized in the .class file. But we couldn't store doc comments in the class + * file, could we? (Turns out we could, but that's another story) + * + * Any flaming for this decision should go to scala-internals@googlegroups.com + */ + trait DiagramFilter { + /** A flag to hide the diagram completely */ + def hideDiagram: Boolean + /** Hide incoming implicit conversions (for type hierarchy diagrams) */ + def hideIncomingImplicits: Boolean + /** Hide outgoing implicit conversions (for type hierarchy diagrams) */ + def hideOutgoingImplicits: Boolean + /** Hide superclasses (for type hierarchy diagrams) */ + def hideSuperclasses: Boolean + /** Hide subclasses (for type hierarchy diagrams) */ + def hideSubclasses: Boolean + /** Show related classes from other objects/traits/packages (for content diagrams) */ + def hideInheritedNodes: Boolean + /** Hide a node from the diagram */ + def hideNode(clazz: Node): Boolean + /** Hide an edge from the diagram */ + def hideEdge(clazz1: Node, clazz2: Node): Boolean + } + + /** Main entry point into this trait: generate the filter for inheritance diagrams */ + def makeInheritanceDiagramFilter(template: DocTemplateImpl): DiagramFilter = { + + val defaultFilter = + if (template.isClass || template.isTrait || template.sym == AnyRefClass) + FullDiagram + else + NoDiagramAtAll + + if (template.comment.isDefined) + makeDiagramFilter(template, template.comment.get.inheritDiagram, defaultFilter, isInheritanceDiagram = true) + else + defaultFilter + } + + /** Main entry point into this trait: generate the filter for content diagrams */ + def makeContentDiagramFilter(template: DocTemplateImpl): DiagramFilter = { + val defaultFilter = if (template.isPackage || template.isObject) FullDiagram else NoDiagramAtAll + if (template.comment.isDefined) + makeDiagramFilter(template, template.comment.get.contentDiagram, defaultFilter, isInheritanceDiagram = false) + else + defaultFilter + } + + protected var tFilter = 0l + protected var tModel = 0l + + /** Show the entire diagram, no filtering */ + case object FullDiagram extends DiagramFilter { + val hideDiagram: Boolean = false + val hideIncomingImplicits: Boolean = false + val hideOutgoingImplicits: Boolean = false + val hideSuperclasses: Boolean = false + val hideSubclasses: Boolean = false + val hideInheritedNodes: Boolean = false + def hideNode(clazz: Node): Boolean = false + def hideEdge(clazz1: Node, clazz2: Node): Boolean = false + } + + /** Hide the diagram completely, no need for special filtering */ + case object NoDiagramAtAll extends DiagramFilter { + val hideDiagram: Boolean = true + val hideIncomingImplicits: Boolean = true + val hideOutgoingImplicits: Boolean = true + val hideSuperclasses: Boolean = true + val hideSubclasses: Boolean = true + val hideInheritedNodes: Boolean = true + def hideNode(clazz: Node): Boolean = true + def hideEdge(clazz1: Node, clazz2: Node): Boolean = true + } + + /** The AnnotationDiagramFilter trait directs the diagram engine according to an annotation + * TODO: Should document the annotation, for now see parseDiagramAnnotation in ModelFactory.scala */ + case class AnnotationDiagramFilter(hideDiagram: Boolean, + hideIncomingImplicits: Boolean, + hideOutgoingImplicits: Boolean, + hideSuperclasses: Boolean, + hideSubclasses: Boolean, + hideInheritedNodes: Boolean, + hideNodesFilter: List[Pattern], + hideEdgesFilter: List[(Pattern, Pattern)]) extends DiagramFilter { + + private[this] def getName(n: Node): String = + if (n.tpl.isDefined) + n.tpl.get.qualifiedName + else + n.name + + def hideNode(clazz: Node): Boolean = { + val qualifiedName = getName(clazz) + for (hideFilter <- hideNodesFilter) + if (hideFilter.matcher(qualifiedName).matches) { + // println(hideFilter + ".matcher(" + qualifiedName + ").matches = " + hideFilter.matcher(qualifiedName).matches) + return true + } + false + } + + def hideEdge(clazz1: Node, clazz2: Node): Boolean = { + val clazz1Name = getName(clazz1) + val clazz2Name = getName(clazz2) + for ((clazz1Filter, clazz2Filter) <- hideEdgesFilter) { + if (clazz1Filter.matcher(clazz1Name).matches && + clazz2Filter.matcher(clazz2Name).matches) { + // println(clazz1Filter + ".matcher(" + clazz1Name + ").matches = " + clazz1Filter.matcher(clazz1Name).matches) + // println(clazz2Filter + ".matcher(" + clazz2Name + ").matches = " + clazz2Filter.matcher(clazz2Name).matches) + return true + } + } + false + } + } + + // TODO: This could certainly be improved -- right now the only regex is *, but there's no way to match a single identifier + private val NodeSpecRegex = "\\\"[A-Za-z\\*][A-Za-z\\.\\*]*\\\"" + private val NodeSpecPattern = Pattern.compile(NodeSpecRegex) + private val EdgeSpecRegex = "\\(" + NodeSpecRegex + "\\s*\\->\\s*" + NodeSpecRegex + "\\)" + // And the composed regexes: + private val HideNodesRegex = new Regex("^hideNodes(\\s*" + NodeSpecRegex + ")+$") + private val HideEdgesRegex = new Regex("^hideEdges(\\s*" + EdgeSpecRegex + ")+$") + + private def makeDiagramFilter(template: DocTemplateImpl, + directives: List[String], + defaultFilter: DiagramFilter, + isInheritanceDiagram: Boolean): DiagramFilter = directives match { + + // if there are no specific diagram directives, return the default filter (either FullDiagram or NoDiagramAtAll) + case Nil => + defaultFilter + + // compute the exact filters. By including the annotation, the diagram is autmatically added + case _ => + tFilter -= System.currentTimeMillis + var hideDiagram0: Boolean = false + var hideIncomingImplicits0: Boolean = false + var hideOutgoingImplicits0: Boolean = false + var hideSuperclasses0: Boolean = false + var hideSubclasses0: Boolean = false + var hideInheritedNodes0: Boolean = false + var hideNodesFilter0: List[Pattern] = Nil + var hideEdgesFilter0: List[(Pattern, Pattern)] = Nil + + def warning(message: String) = { + // we need the position from the package object (well, ideally its comment, but yeah ...) + val sym = if (template.sym.isPackage) template.sym.info.member(global.nme.PACKAGE) else template.sym + assert((sym != global.NoSymbol) || (sym == global.rootMirror.RootPackage)) + global.reporter.warning(sym.pos, message) + } + + def preparePattern(className: String) = + "^" + className.stripPrefix("\"").stripSuffix("\"").replaceAll("\\.", "\\\\.").replaceAll("\\*", ".*") + "$" + + // separate entries: + val entries = directives.foldRight("")(_ + " " + _).split(",").map(_.trim) + for (entry <- entries) + entry match { + case "hideDiagram" => + hideDiagram0 = true + case "hideIncomingImplicits" if isInheritanceDiagram => + hideIncomingImplicits0 = true + case "hideOutgoingImplicits" if isInheritanceDiagram => + hideOutgoingImplicits0 = true + case "hideSuperclasses" if isInheritanceDiagram => + hideSuperclasses0 = true + case "hideSubclasses" if isInheritanceDiagram => + hideSubclasses0 = true + case "hideInheritedNodes" if !isInheritanceDiagram => + hideInheritedNodes0 = true + case HideNodesRegex(last) => + val matcher = NodeSpecPattern.matcher(entry) + while (matcher.find()) { + val classPattern = Pattern.compile(preparePattern(matcher.group())) + hideNodesFilter0 ::= classPattern + } + case HideEdgesRegex(last) => + val matcher = NodeSpecPattern.matcher(entry) + while (matcher.find()) { + val class1Pattern = Pattern.compile(preparePattern(matcher.group())) + assert(matcher.find()) // it's got to be there, just matched it! + val class2Pattern = Pattern.compile(preparePattern(matcher.group())) + hideEdgesFilter0 ::= ((class1Pattern, class2Pattern)) + } + case "" => + // don't need to do anything about it + case _ => + warning("Could not understand diagram annotation in " + template.kind + " " + template.qualifiedName + + ": unmatched entry \"" + entry + "\".\n" + + " This could be because:\n" + + " - you forgot to separate entries by commas\n" + + " - you used a tag that is not allowed in the current context (like @contentDiagram hideSuperclasses)\n"+ + " - you did not use one of the allowed tags (see docs.scala-lang.org for scaladoc annotations)") + } + val result = + if (hideDiagram0) + NoDiagramAtAll + else if ((hideNodesFilter0.isEmpty) && + (hideEdgesFilter0.isEmpty) && + (hideIncomingImplicits0 == false) && + (hideOutgoingImplicits0 == false) && + (hideSuperclasses0 == false) && + (hideSubclasses0 == false) && + (hideInheritedNodes0 == false) && + (hideDiagram0 == false)) + FullDiagram + else + AnnotationDiagramFilter( + hideDiagram = hideDiagram0, + hideIncomingImplicits = hideIncomingImplicits0, + hideOutgoingImplicits = hideOutgoingImplicits0, + hideSuperclasses = hideSuperclasses0, + hideSubclasses = hideSubclasses0, + hideInheritedNodes = hideInheritedNodes0, + hideNodesFilter = hideNodesFilter0, + hideEdgesFilter = hideEdgesFilter0) + + if (settings.docDiagramsDebug.value && result != NoDiagramAtAll && result != FullDiagram) + settings.printMsg(template.kind + " " + template.qualifiedName + " filter: " + result) + tFilter += System.currentTimeMillis + + result + } +} diff --git a/src/scaladoc/scala/tools/nsc/doc/model/diagram/DiagramFactory.scala b/src/scaladoc/scala/tools/nsc/doc/model/diagram/DiagramFactory.scala new file mode 100644 index 0000000000..ebac25bbe4 --- /dev/null +++ b/src/scaladoc/scala/tools/nsc/doc/model/diagram/DiagramFactory.scala @@ -0,0 +1,254 @@ +package scala.tools.nsc.doc +package model +package diagram + +import model._ + +// statistics +import html.page.diagram.DiagramStats + +import scala.collection.immutable.SortedMap + +/** + * This trait takes care of generating the diagram for classes and packages + * + * @author Damien Obrist + * @author Vlad Ureche + */ +trait DiagramFactory extends DiagramDirectiveParser { + this: ModelFactory with ModelFactoryTypeSupport with DiagramFactory with CommentFactory with TreeFactory => + + import this.global.definitions._ + import this.global._ + + // the following can used for hardcoding different relations into the diagram, for bootstrapping purposes + def aggregationNode(text: String) = + NormalNode(new TypeEntity { val name = text; val refEntity = SortedMap[Int, (base.LinkTo, Int)]() }, None)() + + /** Create the inheritance diagram for this template */ + def makeInheritanceDiagram(tpl: DocTemplateImpl): Option[Diagram] = { + + tFilter = 0 + tModel = -System.currentTimeMillis + + // the diagram filter + val diagramFilter = makeInheritanceDiagramFilter(tpl) + + def implicitTooltip(from: DocTemplateEntity, to: TemplateEntity, conv: ImplicitConversion) = + Some(from.qualifiedName + " can be implicitly converted to " + conv.targetType + " by the implicit method " + + conv.conversionShortName + " in " + conv.convertorOwner.kind + " " + conv.convertorOwner.qualifiedName) + + val result = + if (diagramFilter == NoDiagramAtAll) + None + else { + // the main node + val thisNode = ThisNode(tpl.resultType, Some(tpl))(Some(tpl.qualifiedName + " (this " + tpl.kind + ")")) + + // superclasses + val superclasses: List[Node] = + tpl.parentTypes.collect { + case p: (TemplateEntity, TypeEntity) if !classExcluded(p._1) => NormalNode(p._2, Some(p._1))() + }.reverse + + // incoming implcit conversions + lazy val incomingImplicitNodes = tpl.incomingImplicitlyConvertedClasses.map { + case (incomingTpl, conv) => + ImplicitNode(makeType(incomingTpl.sym.tpe, tpl), Some(incomingTpl))(implicitTooltip(from=incomingTpl, to=tpl, conv=conv)) + } + + // subclasses + var subclasses: List[Node] = + tpl.directSubClasses.collect { + case d: TemplateImpl if !classExcluded(d) => NormalNode(makeType(d.sym.tpe, tpl), Some(d))() + }.sortBy(_.tpl.get.name)(implicitly[Ordering[String]].reverse) + + // outgoing implicit coversions + lazy val outgoingImplicitNodes = tpl.outgoingImplicitlyConvertedClasses.map { + case (outgoingTpl, outgoingType, conv) => + ImplicitNode(outgoingType, Some(outgoingTpl))(implicitTooltip(from=tpl, to=tpl, conv=conv)) + } + + // TODO: Everyone should be able to use the @{inherit,content}Diagram annotation to change the diagrams. + // Currently, it's possible to leave nodes and edges out, but there's no way to create new nodes and edges + // The implementation would need to add the annotations and the logic to select nodes (or create new ones) + // and add edges to the diagram -- I bet it wouldn't take too long for someone to do it (one or two days + // at most) and it would be a great add to the diagrams. + if (tpl.sym == AnyRefClass) + subclasses = List(aggregationNode("All user-defined classes and traits")) + + val filteredSuperclasses = if (diagramFilter.hideSuperclasses) Nil else superclasses + val filteredIncomingImplicits = if (diagramFilter.hideIncomingImplicits) Nil else incomingImplicitNodes + val filteredSubclasses = if (diagramFilter.hideSubclasses) Nil else subclasses + val filteredImplicitOutgoingNodes = if (diagramFilter.hideOutgoingImplicits) Nil else outgoingImplicitNodes + + // final diagram filter + filterDiagram(InheritanceDiagram(thisNode, filteredSuperclasses.reverse, filteredSubclasses.reverse, filteredIncomingImplicits, filteredImplicitOutgoingNodes), diagramFilter) + } + + tModel += System.currentTimeMillis + DiagramStats.addFilterTime(tFilter) + DiagramStats.addModelTime(tModel-tFilter) + + result + } + + /** Create the content diagram for this template */ + def makeContentDiagram(pack: DocTemplateImpl): Option[Diagram] = { + + tFilter = 0 + tModel = -System.currentTimeMillis + + // the diagram filter + val diagramFilter = makeContentDiagramFilter(pack) + + val result = + if (diagramFilter == NoDiagramAtAll) + None + else { + var mapNodes = Map[TemplateEntity, Node]() + var nodesShown = Set[TemplateEntity]() + var edgesAll = List[(TemplateEntity, List[TemplateEntity])]() + + // classes is the entire set of classes and traits in the package, they are the superset of nodes in the diagram + // we collect classes, traits and objects without a companion, which are usually used as values(e.g. scala.None) + val nodesAll = pack.members collect { + case d: TemplateEntity if ((!diagramFilter.hideInheritedNodes) || (d.inTemplate == pack)) => d + } + + // for each node, add its subclasses + for (node <- nodesAll if !classExcluded(node)) { + node match { + case dnode: MemberTemplateImpl => + var superClasses = dnode.parentTypes.map(_._1).filter(nodesAll.contains(_)) + + // TODO: Everyone should be able to use the @{inherit,content}Diagram annotation to add nodes to diagrams. + if (pack.sym == ScalaPackage) + if (dnode.sym == NullClass) + superClasses = List(makeTemplate(AnyRefClass)) + else if (dnode.sym == NothingClass) + superClasses = (List(NullClass) ::: ScalaValueClasses).map(makeTemplate(_)) + + if (!superClasses.isEmpty) { + nodesShown += dnode + nodesShown ++= superClasses + } + edgesAll ::= dnode -> superClasses + case _ => + } + + mapNodes += node -> ( + if (node.inTemplate == pack && (node.isDocTemplate || node.isAbstractType || node.isAliasType)) + NormalNode(node.resultType, Some(node))() + else + OutsideNode(node.resultType, Some(node))() + ) + } + + if (nodesShown.isEmpty) + None + else { + val nodes = nodesAll.filter(nodesShown.contains(_)).flatMap(mapNodes.get(_)) + val edges = edgesAll.map(pair => (mapNodes(pair._1), pair._2.map(mapNodes(_)))).filterNot(pair => pair._2.isEmpty) + val diagram = + // TODO: Everyone should be able to use the @{inherit,content}Diagram annotation to change the diagrams. + if (pack.sym == ScalaPackage) { + // Tried it, but it doesn't look good: + // var anyRefSubtypes: List[Node] = List(mapNodes(makeTemplate(AnyRefClass))) + // var dirty = true + // do { + // val length = anyRefSubtypes.length + // anyRefSubtypes :::= edges.collect { case p: (Node, List[Node]) if p._2.exists(anyRefSubtypes.contains(_)) => p._1 } + // anyRefSubtypes = anyRefSubtypes.distinct + // dirty = (anyRefSubtypes.length != length) + // } while (dirty) + // println(anyRefSubtypes) + val anyRefSubtypes = Nil + val allAnyRefTypes = aggregationNode("All AnyRef subtypes") + val nullTemplate = makeTemplate(NullClass) + if (nullTemplate.isDocTemplate) + ContentDiagram(allAnyRefTypes::nodes, (mapNodes(nullTemplate), allAnyRefTypes::anyRefSubtypes)::edges.filterNot(_._1.tpl == Some(nullTemplate))) + else + ContentDiagram(nodes, edges) + } else + ContentDiagram(nodes, edges) + + filterDiagram(diagram, diagramFilter) + } + } + + tModel += System.currentTimeMillis + DiagramStats.addFilterTime(tFilter) + DiagramStats.addModelTime(tModel-tFilter) + + result + } + + /** Diagram filtering logic */ + private def filterDiagram(diagram: Diagram, diagramFilter: DiagramFilter): Option[Diagram] = { + tFilter -= System.currentTimeMillis + + val result = + if (diagramFilter == FullDiagram) + Some(diagram) + else if (diagramFilter == NoDiagramAtAll) + None + else { + // Final diagram, with the filtered nodes and edges + diagram match { + case InheritanceDiagram(thisNode, _, _, _, _) if diagramFilter.hideNode(thisNode) => + None + + case InheritanceDiagram(thisNode, superClasses, subClasses, incomingImplicits, outgoingImplicits) => + + def hideIncoming(node: Node): Boolean = + diagramFilter.hideNode(node) || diagramFilter.hideEdge(node, thisNode) + + def hideOutgoing(node: Node): Boolean = + diagramFilter.hideNode(node) || diagramFilter.hideEdge(thisNode, node) + + // println(thisNode) + // println(superClasses.map(cl => "super: " + cl + " " + hideOutgoing(cl)).mkString("\n")) + // println(subClasses.map(cl => "sub: " + cl + " " + hideIncoming(cl)).mkString("\n")) + Some(InheritanceDiagram(thisNode, + superClasses.filterNot(hideOutgoing(_)), + subClasses.filterNot(hideIncoming(_)), + incomingImplicits.filterNot(hideIncoming(_)), + outgoingImplicits.filterNot(hideOutgoing(_)))) + + case ContentDiagram(nodes0, edges0) => + // Filter out all edges that: + // (1) are sources of hidden classes + // (2) are manually hidden by the user + // (3) are destinations of hidden classes + val edges: List[(Node, List[Node])] = + diagram.edges.flatMap({ + case (source, dests) if !diagramFilter.hideNode(source) => + val dests2 = dests.collect({ case dest if (!(diagramFilter.hideEdge(source, dest) || diagramFilter.hideNode(dest))) => dest }) + if (dests2 != Nil) + List((source, dests2)) + else + Nil + case _ => Nil + }) + + // Only show the the non-isolated nodes + // TODO: Decide if we really want to hide package members, I'm not sure that's a good idea (!!!) + // TODO: Does .distinct cause any stability issues? + val sourceNodes = edges.map(_._1) + val sinkNodes = edges.map(_._2).flatten + val nodes = (sourceNodes ::: sinkNodes).distinct + Some(ContentDiagram(nodes, edges)) + } + } + + tFilter += System.currentTimeMillis + + // eliminate all empty diagrams + if (result.isDefined && result.get.edges.forall(_._2.isEmpty)) + None + else + result + } + +} diff --git a/src/scaladoc/scala/tools/partest/ScaladocModelTest.scala b/src/scaladoc/scala/tools/partest/ScaladocModelTest.scala new file mode 100644 index 0000000000..3db9f18484 --- /dev/null +++ b/src/scaladoc/scala/tools/partest/ScaladocModelTest.scala @@ -0,0 +1,203 @@ +/* NSC -- new Scala compiler + * Copyright 2005-2013 LAMP/EPFL + * @author Vlad Ureche + */ + +package scala.tools.partest + +import scala.tools.nsc._ +import scala.tools.nsc.util.CommandLineParser +import scala.tools.nsc.doc.{Settings, DocFactory, Universe} +import scala.tools.nsc.doc.model._ +import scala.tools.nsc.doc.model.diagram._ +import scala.tools.nsc.doc.base.comment._ +import scala.tools.nsc.reporters.ConsoleReporter + +/** A class for testing scaladoc model generation + * - you need to specify the code in the `code` method + * - you need to override the testModel method to test the model + * - you may specify extra parameters to send to scaladoc in `scaladocSettings` + * {{{ + import scala.tools.nsc.doc.model._ + import scala.tools.partest.ScaladocModelTest + + object Test extends ScaladocModelTest { + + override def code = """ ... """ // or override def resourceFile = ".scala" (from test/scaladoc/resources) + def scaladocSettings = " ... " + def testModel(rootPackage: Package) = { + // get the quick access implicit defs in scope (_package(s), _class(es), _trait(s), object(s) _method(s), _value(s)) + import access._ + + // just need to check the member exists, access methods will throw an error if there's a problem + rootPackage._package("scala")._package("test")._class("C")._method("foo") + } + } + * }}} + */ +abstract class ScaladocModelTest extends DirectTest { + + /** Override this to give scaladoc command line parameters */ + def scaladocSettings: String + + /** Override this to test the model */ + def testModel(root: Package): Unit + + /** Override to feed a file in resources to scaladoc*/ + def resourceFile: String = null + + /** Override to feed code into scaladoc */ + override def code = + if (resourceFile ne null) + io.File(resourcePath + "/" + resourceFile).slurp() + else + sys.error("Scaladoc Model Test: You need to give a file or some code to feed to scaladoc!") + + def resourcePath = io.Directory(sys.props("partest.cwd") + "/../resources") + + // Implementation follows: + override def extraSettings: String = "-usejavacp" + + override def show(): Unit = { + // redirect err to out, for logging + val prevErr = System.err + System.setErr(System.out) + + try { + // 1 - compile with scaladoc and get the model out + val universe = model.getOrElse({sys.error("Scaladoc Model Test ERROR: No universe generated!")}) + // 2 - check the model generated + testModel(universe.rootPackage) + println("Done.") + } catch { + case e: Exception => + println(e) + e.printStackTrace + } + // set err back to the real err handler + System.setErr(prevErr) + } + + private[this] var settings: Settings = null + + // create a new scaladoc compiler + private[this] def newDocFactory: DocFactory = { + settings = new Settings(_ => ()) + settings.scaladocQuietRun = true // yaay, no more "model contains X documentable templates"! + val args = extraSettings + " " + scaladocSettings + new ScalaDoc.Command((CommandLineParser tokenize (args)), settings) // side-effecting, I think + val docFact = new DocFactory(new ConsoleReporter(settings), settings) + docFact + } + + // compile with scaladoc and output the result + def model: Option[Universe] = newDocFactory.makeUniverse(Right(code)) + + // so we don't get the newSettings warning + override def isDebug = false + + + // finally, enable easy navigation inside the entities + object access { + + implicit class TemplateAccess(tpl: DocTemplateEntity) { + def _class(name: String): DocTemplateEntity = getTheFirst(_classes(name), tpl.qualifiedName + ".class(" + name + ")") + def _classes(name: String): List[DocTemplateEntity] = tpl.templates.filter(_.name == name).collect({ case c: DocTemplateEntity with Class => c}) + + def _classMbr(name: String): MemberTemplateEntity = getTheFirst(_classesMbr(name), tpl.qualifiedName + ".classMember(" + name + ")") + def _classesMbr(name: String): List[MemberTemplateEntity] = tpl.templates.filter(_.name == name).collect({ case c: MemberTemplateEntity if c.isClass => c}) + + def _trait(name: String): DocTemplateEntity = getTheFirst(_traits(name), tpl.qualifiedName + ".trait(" + name + ")") + def _traits(name: String): List[DocTemplateEntity] = tpl.templates.filter(_.name == name).collect({ case t: DocTemplateEntity with Trait => t}) + + def _traitMbr(name: String): MemberTemplateEntity = getTheFirst(_traitsMbr(name), tpl.qualifiedName + ".traitMember(" + name + ")") + def _traitsMbr(name: String): List[MemberTemplateEntity] = tpl.templates.filter(_.name == name).collect({ case t: MemberTemplateEntity if t.isTrait => t}) + + def _object(name: String): DocTemplateEntity = getTheFirst(_objects(name), tpl.qualifiedName + ".object(" + name + ")") + def _objects(name: String): List[DocTemplateEntity] = tpl.templates.filter(_.name == name).collect({ case o: DocTemplateEntity with Object => o}) + + def _objectMbr(name: String): MemberTemplateEntity = getTheFirst(_objectsMbr(name), tpl.qualifiedName + ".objectMember(" + name + ")") + def _objectsMbr(name: String): List[MemberTemplateEntity] = tpl.templates.filter(_.name == name).collect({ case o: MemberTemplateEntity if o.isObject => o}) + + def _method(name: String): Def = getTheFirst(_methods(name), tpl.qualifiedName + ".method(" + name + ")") + def _methods(name: String): List[Def] = tpl.methods.filter(_.name == name) + + def _value(name: String): Val = getTheFirst(_values(name), tpl.qualifiedName + ".value(" + name + ")") + def _values(name: String): List[Val] = tpl.values.filter(_.name == name) + + def _conversion(name: String): ImplicitConversion = getTheFirst(_conversions(name), tpl.qualifiedName + ".conversion(" + name + ")") + def _conversions(name: String): List[ImplicitConversion] = tpl.conversions.filter(_.conversionQualifiedName == name) + + def _absType(name: String): MemberEntity = getTheFirst(_absTypes(name), tpl.qualifiedName + ".abstractType(" + name + ")") + def _absTypes(name: String): List[MemberEntity] = tpl.members.filter(mbr => mbr.name == name && mbr.isAbstractType) + + def _absTypeTpl(name: String): DocTemplateEntity = getTheFirst(_absTypeTpls(name), tpl.qualifiedName + ".abstractType(" + name + ")") + def _absTypeTpls(name: String): List[DocTemplateEntity] = tpl.members.collect({ case dtpl: DocTemplateEntity with AbstractType if dtpl.name == name => dtpl }) + + def _aliasType(name: String): MemberEntity = getTheFirst(_aliasTypes(name), tpl.qualifiedName + ".aliasType(" + name + ")") + def _aliasTypes(name: String): List[MemberEntity] = tpl.members.filter(mbr => mbr.name == name && mbr.isAliasType) + + def _aliasTypeTpl(name: String): DocTemplateEntity = getTheFirst(_aliasTypeTpls(name), tpl.qualifiedName + ".aliasType(" + name + ")") + def _aliasTypeTpls(name: String): List[DocTemplateEntity] = tpl.members.collect({ case dtpl: DocTemplateEntity with AliasType if dtpl.name == name => dtpl }) + } + + trait WithMembers { + def members: List[MemberEntity] + def _member(name: String): MemberEntity = getTheFirst(_members(name), this.toString + ".member(" + name + ")") + def _members(name: String): List[MemberEntity] = members.filter(_.name == name) + } + implicit class PackageAccess(pack: Package) extends TemplateAccess(pack) { + def _package(name: String): Package = getTheFirst(_packages(name), pack.qualifiedName + ".package(" + name + ")") + def _packages(name: String): List[Package] = pack.packages.filter(_.name == name) + } + implicit class DocTemplateEntityMembers(val underlying: DocTemplateEntity) extends WithMembers { + def members = underlying.members + } + implicit class ImplicitConversionMembers(val underlying: ImplicitConversion) extends WithMembers { + def members = underlying.members + } + + def getTheFirst[T](list: List[T], expl: String): T = list.length match { + case 1 => list.head + case 0 => sys.error("Error getting " + expl + ": No such element.") + case _ => sys.error("Error getting " + expl + ": " + list.length + " elements with this name. " + + "All elements in list: [" + list.map({ + case ent: Entity => ent.kind + " " + ent.qualifiedName + case other => other.toString + }).mkString(", ") + "]") + } + + def extractCommentText(c: Any) = { + def extractText(body: Any): String = body match { + case s: String => s + case s: Seq[_] => s.toList.map(extractText(_)).mkString + case p: Product => p.productIterator.toList.map(extractText(_)).mkString + case _ => "" + } + c match { + case c: Comment => + extractText(c.body) + case b: Body => + extractText(b) + } + } + + def countLinks(c: Comment, p: EntityLink => Boolean) = { + def countLinks(body: Any): Int = body match { + case el: EntityLink if p(el) => 1 + case s: Seq[_] => s.toList.map(countLinks(_)).sum + case p: Product => p.productIterator.toList.map(countLinks(_)).sum + case _ => 0 + } + countLinks(c.body) + } + + def testDiagram(doc: DocTemplateEntity, diag: Option[Diagram], nodes: Int, edges: Int) = { + assert(diag.isDefined, doc.qualifiedName + " diagram missing") + assert(diag.get.nodes.length == nodes, + doc.qualifiedName + "'s diagram: node count " + diag.get.nodes.length + " == " + nodes) + assert(diag.get.edges.map(_._2.length).sum == edges, + doc.qualifiedName + "'s diagram: edge count " + diag.get.edges.length + " == " + edges) + } + } +} diff --git a/test/files/run/t5527.check b/test/files/run/t5527.check deleted file mode 100644 index 1518168c51..0000000000 --- a/test/files/run/t5527.check +++ /dev/null @@ -1,99 +0,0 @@ -[[syntax trees at end of parser]] // newSource1 -package { - object UselessComments extends scala.AnyRef { - def () = { - super.(); - () - }; - var z = 0; - def test1 = { - object Maybe extends scala.AnyRef { - def () = { - super.(); - () - }; - /** Some comment inside */ - def nothing() = () - }; - () - }; - def test2 = { - var x = 4; - if (true) - { - x = 5; - val y = 6; - () - } - else - () - }; - def test3 = { - if (true) - z = 3 - else - (); - val t = 4; - 0.to(4).foreach(((i) => println(i))) - }; - val test4 = 'a' match { - case ('0'| '1'| '2'| '3'| '4'| '5'| '6'| '7'| '8'| '9') => true - case _ => false - } - }; - /** comments that we should keep */ - object UsefulComments extends scala.AnyRef { - def () = { - super.(); - () - }; - /** class A */ - class A extends scala.AnyRef { - def () = { - super.(); - () - }; - /** f */ - def f(i: Int) = i; - /** v */ - val v = 1; - /** u */ - var u = 2 - }; - /** trait B */ - abstract trait B extends scala.AnyRef { - def $init$() = { - () - }; - /** T */ - type T >: _root_.scala.Nothing <: _root_.scala.Any; - /** f */ - def f(i: Int): scala.Unit; - /** v */ - val v = 1; - /** u */ - var u = 2 - }; - /** object C */ - object C extends scala.AnyRef { - def () = { - super.(); - () - }; - /** f */ - def f(i: Int) = i; - /** v */ - val v = 1; - /** u */ - var u = 2 - }; - /** class D */ - @new deprecated("use ... instead", "2.10.0") class D extends scala.AnyRef { - def () = { - super.(); - () - } - } - } -} - diff --git a/test/files/run/t5527.scala b/test/files/run/t5527.scala deleted file mode 100644 index 2449ff60c3..0000000000 --- a/test/files/run/t5527.scala +++ /dev/null @@ -1,107 +0,0 @@ -import scala.tools.partest._ -import java.io._ -import scala.tools.nsc._ -import scala.tools.nsc.util.CommandLineParser -import scala.tools.nsc.doc.{Settings, DocFactory} -import scala.tools.nsc.reporters.ConsoleReporter - -object Test extends DirectTest { - - override def extraSettings: String = "-usejavacp -Xprint:parser -Yrangepos -Ystop-after:parser -d " + testOutput.path - - override def code = """ - // SI-5527 - object UselessComments { - - var z = 0 - - def test1 = { - /** Some comment here */ - object Maybe { - /** Some comment inside */ - def nothing() = () - } - } - - def test2 = { - var x = 4 - if (true) { - /** Testing 123 */ - x = 5 - val y = 6 - } - } - - def test3 = { - if (true) - z = 3 - - /** Calculate this result. */ - val t = 4 - for (i <- 0 to 4) - println(i) - } - - val test4 = ('a') match { - /** Another digit is a giveaway. */ - case '0' | '1' | '2' | '3' | '4' | '5' | '6' | '7' | '8' | '9' => - true - case _ => - false - } - } - - /** comments that we should keep */ - object UsefulComments { - /** class A */ - class A { - /** f */ - def f(i: Int) = i - /** v */ - val v = 1 - /** u */ - var u = 2 - } - /** trait B */ - trait B { - /** T */ - type T - /** f */ - def f(i: Int) - /** v */ - val v = 1 - /** u */ - var u = 2 - } - /** object C */ - object C { - /** f */ - def f(i: Int) = i - /** v */ - val v = 1 - /** u */ - var u = 2 - } - /** class D */ - @deprecated("use ... instead", "2.10.0") - class D - } - """.trim - - override def show(): Unit = { - // redirect err to out, for logging - val prevErr = System.err - System.setErr(System.out) - compile() - System.setErr(prevErr) - } - - override def newCompiler(args: String*): Global = { - // we want the Scaladoc compiler here, because it keeps DocDef nodes in the tree - val settings = new Settings(_ => ()) - val command = new ScalaDoc.Command((CommandLineParser tokenize extraSettings) ++ args.toList, settings) - new DocFactory(new ConsoleReporter(settings), settings).compiler - } - - override def isDebug = false // so we don't get the newSettings warning -} diff --git a/test/scaladoc/run/t5527.check b/test/scaladoc/run/t5527.check new file mode 100644 index 0000000000..1518168c51 --- /dev/null +++ b/test/scaladoc/run/t5527.check @@ -0,0 +1,99 @@ +[[syntax trees at end of parser]] // newSource1 +package { + object UselessComments extends scala.AnyRef { + def () = { + super.(); + () + }; + var z = 0; + def test1 = { + object Maybe extends scala.AnyRef { + def () = { + super.(); + () + }; + /** Some comment inside */ + def nothing() = () + }; + () + }; + def test2 = { + var x = 4; + if (true) + { + x = 5; + val y = 6; + () + } + else + () + }; + def test3 = { + if (true) + z = 3 + else + (); + val t = 4; + 0.to(4).foreach(((i) => println(i))) + }; + val test4 = 'a' match { + case ('0'| '1'| '2'| '3'| '4'| '5'| '6'| '7'| '8'| '9') => true + case _ => false + } + }; + /** comments that we should keep */ + object UsefulComments extends scala.AnyRef { + def () = { + super.(); + () + }; + /** class A */ + class A extends scala.AnyRef { + def () = { + super.(); + () + }; + /** f */ + def f(i: Int) = i; + /** v */ + val v = 1; + /** u */ + var u = 2 + }; + /** trait B */ + abstract trait B extends scala.AnyRef { + def $init$() = { + () + }; + /** T */ + type T >: _root_.scala.Nothing <: _root_.scala.Any; + /** f */ + def f(i: Int): scala.Unit; + /** v */ + val v = 1; + /** u */ + var u = 2 + }; + /** object C */ + object C extends scala.AnyRef { + def () = { + super.(); + () + }; + /** f */ + def f(i: Int) = i; + /** v */ + val v = 1; + /** u */ + var u = 2 + }; + /** class D */ + @new deprecated("use ... instead", "2.10.0") class D extends scala.AnyRef { + def () = { + super.(); + () + } + } + } +} + diff --git a/test/scaladoc/run/t5527.scala b/test/scaladoc/run/t5527.scala new file mode 100644 index 0000000000..2449ff60c3 --- /dev/null +++ b/test/scaladoc/run/t5527.scala @@ -0,0 +1,107 @@ +import scala.tools.partest._ +import java.io._ +import scala.tools.nsc._ +import scala.tools.nsc.util.CommandLineParser +import scala.tools.nsc.doc.{Settings, DocFactory} +import scala.tools.nsc.reporters.ConsoleReporter + +object Test extends DirectTest { + + override def extraSettings: String = "-usejavacp -Xprint:parser -Yrangepos -Ystop-after:parser -d " + testOutput.path + + override def code = """ + // SI-5527 + object UselessComments { + + var z = 0 + + def test1 = { + /** Some comment here */ + object Maybe { + /** Some comment inside */ + def nothing() = () + } + } + + def test2 = { + var x = 4 + if (true) { + /** Testing 123 */ + x = 5 + val y = 6 + } + } + + def test3 = { + if (true) + z = 3 + + /** Calculate this result. */ + val t = 4 + for (i <- 0 to 4) + println(i) + } + + val test4 = ('a') match { + /** Another digit is a giveaway. */ + case '0' | '1' | '2' | '3' | '4' | '5' | '6' | '7' | '8' | '9' => + true + case _ => + false + } + } + + /** comments that we should keep */ + object UsefulComments { + /** class A */ + class A { + /** f */ + def f(i: Int) = i + /** v */ + val v = 1 + /** u */ + var u = 2 + } + /** trait B */ + trait B { + /** T */ + type T + /** f */ + def f(i: Int) + /** v */ + val v = 1 + /** u */ + var u = 2 + } + /** object C */ + object C { + /** f */ + def f(i: Int) = i + /** v */ + val v = 1 + /** u */ + var u = 2 + } + /** class D */ + @deprecated("use ... instead", "2.10.0") + class D + } + """.trim + + override def show(): Unit = { + // redirect err to out, for logging + val prevErr = System.err + System.setErr(System.out) + compile() + System.setErr(prevErr) + } + + override def newCompiler(args: String*): Global = { + // we want the Scaladoc compiler here, because it keeps DocDef nodes in the tree + val settings = new Settings(_ => ()) + val command = new ScalaDoc.Command((CommandLineParser tokenize extraSettings) ++ args.toList, settings) + new DocFactory(new ConsoleReporter(settings), settings).compiler + } + + override def isDebug = false // so we don't get the newSettings warning +} diff --git a/test/scaladoc/scalacheck/IndexScriptTest.scala b/test/scaladoc/scalacheck/IndexScriptTest.scala index 5aef38e00a..37f6947aaa 100644 --- a/test/scaladoc/scalacheck/IndexScriptTest.scala +++ b/test/scaladoc/scalacheck/IndexScriptTest.scala @@ -35,7 +35,7 @@ object Test extends Properties("IndexScript") { } property("allPackages") = { - createIndexScript("src/compiler/scala/tools/nsc/doc/html/page/Index.scala") match { + createIndexScript("src/scaladoc/scala/tools/nsc/doc/html/page/Index.scala") match { case Some(index) => index.allPackages.map(_.toString) == List( "scala", diff --git a/test/scaladoc/scalacheck/IndexTest.scala b/test/scaladoc/scalacheck/IndexTest.scala index bf385898fc..dc4ab126d4 100644 --- a/test/scaladoc/scalacheck/IndexTest.scala +++ b/test/scaladoc/scalacheck/IndexTest.scala @@ -56,7 +56,7 @@ object Test extends Properties("Index") { } property("path") = { - createIndex("src/compiler/scala/tools/nsc/doc/html/page/Index.scala") match { + createIndex("src/scaladoc/scala/tools/nsc/doc/html/page/Index.scala") match { case Some(index) => index.path == List("index.html") case None => false @@ -64,7 +64,7 @@ object Test extends Properties("Index") { } property("title") = { - createIndex("src/compiler/scala/tools/nsc/doc/html/page/Index.scala") match { + createIndex("src/scaladoc/scala/tools/nsc/doc/html/page/Index.scala") match { case Some(index) => index.title == "" @@ -72,7 +72,7 @@ object Test extends Properties("Index") { } } property("browser contants a script element") = { - createIndex("src/compiler/scala/tools/nsc/doc/html/page/Index.scala") match { + createIndex("src/scaladoc/scala/tools/nsc/doc/html/page/Index.scala") match { case Some(index) => (index.browser \ "script").size == 1 -- cgit v1.2.3 From e83defaa29bf8d7ed742a611c301ee8b04e971b8 Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Wed, 6 Mar 2013 08:05:12 -0800 Subject: Moved interactive sources into separate directory. As with the preceding commit, this has build-internal effects only. --- build.xml | 34 +- .../tools/nsc/interactive/CompilerControl.scala | 470 -------- .../scala/tools/nsc/interactive/ContextTrees.scala | 149 --- .../scala/tools/nsc/interactive/Global.scala | 1139 -------------------- .../nsc/interactive/InteractiveReporter.scala | 47 - .../scala/tools/nsc/interactive/Picklers.scala | 189 ---- .../interactive/PresentationCompilerThread.scala | 51 - .../scala/tools/nsc/interactive/REPL.scala | 218 ---- .../tools/nsc/interactive/RangePositions.scala | 14 - .../scala/tools/nsc/interactive/Response.scala | 105 -- .../nsc/interactive/RichCompilationUnits.scala | 58 - .../tools/nsc/interactive/ScratchPadMaker.scala | 200 ---- .../nsc/interactive/tests/InteractiveTest.scala | 123 --- .../tests/InteractiveTestSettings.scala | 69 -- .../scala/tools/nsc/interactive/tests/Tester.scala | 208 ---- .../nsc/interactive/tests/core/AskCommand.scala | 109 -- .../nsc/interactive/tests/core/CoreTestDefs.scala | 100 -- .../tests/core/PresentationCompilerInstance.scala | 34 - .../PresentationCompilerRequestsWorkingMode.scala | 62 -- .../tests/core/PresentationCompilerTestDef.scala | 18 - .../nsc/interactive/tests/core/Reporter.scala | 15 - .../interactive/tests/core/SourcesCollector.scala | 20 - .../nsc/interactive/tests/core/TestMarker.scala | 27 - .../nsc/interactive/tests/core/TestResources.scala | 12 - .../nsc/interactive/tests/core/TestSettings.scala | 19 - .../tools/nsc/interactive/CompilerControl.scala | 470 ++++++++ .../scala/tools/nsc/interactive/ContextTrees.scala | 149 +++ .../scala/tools/nsc/interactive/Global.scala | 1139 ++++++++++++++++++++ .../nsc/interactive/InteractiveReporter.scala | 47 + .../scala/tools/nsc/interactive/Picklers.scala | 189 ++++ .../interactive/PresentationCompilerThread.scala | 51 + .../scala/tools/nsc/interactive/REPL.scala | 218 ++++ .../tools/nsc/interactive/RangePositions.scala | 14 + .../scala/tools/nsc/interactive/Response.scala | 105 ++ .../nsc/interactive/RichCompilationUnits.scala | 58 + .../tools/nsc/interactive/ScratchPadMaker.scala | 200 ++++ .../nsc/interactive/tests/InteractiveTest.scala | 123 +++ .../tests/InteractiveTestSettings.scala | 69 ++ .../scala/tools/nsc/interactive/tests/Tester.scala | 208 ++++ .../nsc/interactive/tests/core/AskCommand.scala | 109 ++ .../nsc/interactive/tests/core/CoreTestDefs.scala | 100 ++ .../tests/core/PresentationCompilerInstance.scala | 34 + .../PresentationCompilerRequestsWorkingMode.scala | 62 ++ .../tests/core/PresentationCompilerTestDef.scala | 18 + .../nsc/interactive/tests/core/Reporter.scala | 15 + .../interactive/tests/core/SourcesCollector.scala | 20 + .../nsc/interactive/tests/core/TestMarker.scala | 27 + .../nsc/interactive/tests/core/TestResources.scala | 12 + .../nsc/interactive/tests/core/TestSettings.scala | 19 + 49 files changed, 3489 insertions(+), 3457 deletions(-) delete mode 100644 src/compiler/scala/tools/nsc/interactive/CompilerControl.scala delete mode 100644 src/compiler/scala/tools/nsc/interactive/ContextTrees.scala delete mode 100644 src/compiler/scala/tools/nsc/interactive/Global.scala delete mode 100644 src/compiler/scala/tools/nsc/interactive/InteractiveReporter.scala delete mode 100644 src/compiler/scala/tools/nsc/interactive/Picklers.scala delete mode 100644 src/compiler/scala/tools/nsc/interactive/PresentationCompilerThread.scala delete mode 100644 src/compiler/scala/tools/nsc/interactive/REPL.scala delete mode 100644 src/compiler/scala/tools/nsc/interactive/RangePositions.scala delete mode 100644 src/compiler/scala/tools/nsc/interactive/Response.scala delete mode 100644 src/compiler/scala/tools/nsc/interactive/RichCompilationUnits.scala delete mode 100644 src/compiler/scala/tools/nsc/interactive/ScratchPadMaker.scala delete mode 100644 src/compiler/scala/tools/nsc/interactive/tests/InteractiveTest.scala delete mode 100644 src/compiler/scala/tools/nsc/interactive/tests/InteractiveTestSettings.scala delete mode 100644 src/compiler/scala/tools/nsc/interactive/tests/Tester.scala delete mode 100644 src/compiler/scala/tools/nsc/interactive/tests/core/AskCommand.scala delete mode 100644 src/compiler/scala/tools/nsc/interactive/tests/core/CoreTestDefs.scala delete mode 100644 src/compiler/scala/tools/nsc/interactive/tests/core/PresentationCompilerInstance.scala delete mode 100644 src/compiler/scala/tools/nsc/interactive/tests/core/PresentationCompilerRequestsWorkingMode.scala delete mode 100644 src/compiler/scala/tools/nsc/interactive/tests/core/PresentationCompilerTestDef.scala delete mode 100644 src/compiler/scala/tools/nsc/interactive/tests/core/Reporter.scala delete mode 100644 src/compiler/scala/tools/nsc/interactive/tests/core/SourcesCollector.scala delete mode 100644 src/compiler/scala/tools/nsc/interactive/tests/core/TestMarker.scala delete mode 100644 src/compiler/scala/tools/nsc/interactive/tests/core/TestResources.scala delete mode 100644 src/compiler/scala/tools/nsc/interactive/tests/core/TestSettings.scala create mode 100644 src/interactive/scala/tools/nsc/interactive/CompilerControl.scala create mode 100644 src/interactive/scala/tools/nsc/interactive/ContextTrees.scala create mode 100644 src/interactive/scala/tools/nsc/interactive/Global.scala create mode 100644 src/interactive/scala/tools/nsc/interactive/InteractiveReporter.scala create mode 100644 src/interactive/scala/tools/nsc/interactive/Picklers.scala create mode 100644 src/interactive/scala/tools/nsc/interactive/PresentationCompilerThread.scala create mode 100644 src/interactive/scala/tools/nsc/interactive/REPL.scala create mode 100644 src/interactive/scala/tools/nsc/interactive/RangePositions.scala create mode 100644 src/interactive/scala/tools/nsc/interactive/Response.scala create mode 100644 src/interactive/scala/tools/nsc/interactive/RichCompilationUnits.scala create mode 100644 src/interactive/scala/tools/nsc/interactive/ScratchPadMaker.scala create mode 100644 src/interactive/scala/tools/nsc/interactive/tests/InteractiveTest.scala create mode 100644 src/interactive/scala/tools/nsc/interactive/tests/InteractiveTestSettings.scala create mode 100644 src/interactive/scala/tools/nsc/interactive/tests/Tester.scala create mode 100644 src/interactive/scala/tools/nsc/interactive/tests/core/AskCommand.scala create mode 100644 src/interactive/scala/tools/nsc/interactive/tests/core/CoreTestDefs.scala create mode 100644 src/interactive/scala/tools/nsc/interactive/tests/core/PresentationCompilerInstance.scala create mode 100644 src/interactive/scala/tools/nsc/interactive/tests/core/PresentationCompilerRequestsWorkingMode.scala create mode 100644 src/interactive/scala/tools/nsc/interactive/tests/core/PresentationCompilerTestDef.scala create mode 100644 src/interactive/scala/tools/nsc/interactive/tests/core/Reporter.scala create mode 100644 src/interactive/scala/tools/nsc/interactive/tests/core/SourcesCollector.scala create mode 100644 src/interactive/scala/tools/nsc/interactive/tests/core/TestMarker.scala create mode 100644 src/interactive/scala/tools/nsc/interactive/tests/core/TestResources.scala create mode 100644 src/interactive/scala/tools/nsc/interactive/tests/core/TestSettings.scala (limited to 'src') diff --git a/build.xml b/build.xml index 9a685ee9cf..f4191b1907 100644 --- a/build.xml +++ b/build.xml @@ -1316,7 +1316,38 @@ QUICK BUILD (QUICK) - + + + + + + + + + + + + + + + + + + + + + + + + + + + @@ -1456,6 +1487,7 @@ PACKED QUICK BUILD (PACK) + diff --git a/src/compiler/scala/tools/nsc/interactive/CompilerControl.scala b/src/compiler/scala/tools/nsc/interactive/CompilerControl.scala deleted file mode 100644 index f84fa161c0..0000000000 --- a/src/compiler/scala/tools/nsc/interactive/CompilerControl.scala +++ /dev/null @@ -1,470 +0,0 @@ -/* NSC -- new Scala compiler - * Copyright 2009-2013 Typesafe/Scala Solutions and LAMP/EPFL - * @author Martin Odersky - */ -package scala.tools.nsc -package interactive - -import scala.util.control.ControlThrowable -import scala.tools.nsc.io.AbstractFile -import scala.tools.nsc.util.FailedInterrupt -import scala.tools.nsc.util.EmptyAction -import scala.tools.nsc.util.WorkScheduler -import scala.reflect.internal.util.{SourceFile, Position} -import scala.tools.nsc.util.InterruptReq - -/** Interface of interactive compiler to a client such as an IDE - * The model the presentation compiler consists of the following parts: - * - * unitOfFile: The map from sourcefiles to loaded units. A sourcefile/unit is loaded if it occurs in that map. - * - * manipulated by: removeUnitOf, reloadSources. - * - * A call to reloadSources will add the given sources to the loaded units, and - * start a new background compiler pass to compile all loaded units (with the indicated sources first). - * Each background compiler pass has its own typer run. - * The background compiler thread can be interrupted each time an AST node is - * completely typechecked in the following ways: - - * 1. by a new call to reloadSources. This starts a new background compiler pass with a new typer run. - * 2. by a call to askTypeTree. This starts a new typer run if the forceReload parameter = true - * 3. by a call to askTypeAt, askTypeCompletion, askScopeCompletion, askToDoFirst, askLinkPos, askLastType. - * 4. by raising an exception in the scheduler. - * 5. by passing a high-priority action wrapped in ask { ... }. - * - * Actions under 1-3 can themselves be interrupted if they involve typechecking - * AST nodes. High-priority actions under 5 cannot; they always run to completion. - * So these high-priority actions should to be short. - * - * Normally, an interrupted action continues after the interrupting action is finished. - * However, if the interrupting action created a new typer run, the interrupted - * action is aborted. If there's an outstanding response, it will be set to - * a Right value with a FreshRunReq exception. - */ -trait CompilerControl { self: Global => - - import syntaxAnalyzer.UnitParser - - type Response[T] = scala.tools.nsc.interactive.Response[T] - - /** The scheduler by which client and compiler communicate - * Must be initialized before starting compilerRunner - */ - @volatile protected[interactive] var scheduler = new WorkScheduler - - /** Return the compilation unit attached to a source file, or None - * if source is not loaded. - */ - def getUnitOf(s: SourceFile): Option[RichCompilationUnit] = getUnit(s) - - /** Run operation `op` on a compilation unit associated with given `source`. - * If source has a loaded compilation unit, this one is passed to `op`. - * Otherwise a new compilation unit is created, but not added to the set of loaded units. - */ - def onUnitOf[T](source: SourceFile)(op: RichCompilationUnit => T): T = - op(unitOfFile.getOrElse(source.file, new RichCompilationUnit(source))) - - /** The compilation unit corresponding to a source file - * if it does not yet exist create a new one atomically - * Note: We want to get roid of this operation as it messes compiler invariants. - */ - @deprecated("use getUnitOf(s) or onUnitOf(s) instead", "2.10.0") - def unitOf(s: SourceFile): RichCompilationUnit = getOrCreateUnitOf(s) - - /** The compilation unit corresponding to a position */ - @deprecated("use getUnitOf(pos.source) or onUnitOf(pos.source) instead", "2.10.0") - def unitOf(pos: Position): RichCompilationUnit = getOrCreateUnitOf(pos.source) - - /** Removes the CompilationUnit corresponding to the given SourceFile - * from consideration for recompilation. - */ - def removeUnitOf(s: SourceFile): Option[RichCompilationUnit] = { toBeRemoved += s.file; unitOfFile get s.file } - - /** Returns the top level classes and objects that were deleted - * in the editor since last time recentlyDeleted() was called. - */ - def recentlyDeleted(): List[Symbol] = deletedTopLevelSyms.synchronized { - val result = deletedTopLevelSyms - deletedTopLevelSyms.clear() - result.toList - } - - /** Locate smallest tree that encloses position - * @pre Position must be loaded - */ - def locateTree(pos: Position): Tree = onUnitOf(pos.source) { unit => new Locator(pos) locateIn unit.body } - - /** Locates smallest context that encloses position as an optional value. - */ - def locateContext(pos: Position): Option[Context] = - for (unit <- getUnit(pos.source); cx <- locateContext(unit.contexts, pos)) yield cx - - /** Returns the smallest context that contains given `pos`, throws FatalError if none exists. - */ - def doLocateContext(pos: Position): Context = locateContext(pos) getOrElse { - throw new FatalError("no context found for "+pos) - } - - private def postWorkItem(item: WorkItem) = - if (item.onCompilerThread) item() else scheduler.postWorkItem(item) - - /** Makes sure a set of compilation units is loaded and parsed. - * Returns () to syncvar `response` on completion. - * Afterwards a new background compiler run is started with - * the given sources at the head of the list of to-be-compiled sources. - */ - def askReload(sources: List[SourceFile], response: Response[Unit]) = { - val superseeded = scheduler.dequeueAll { - case ri: ReloadItem if ri.sources == sources => Some(ri) - case _ => None - } - superseeded.foreach(_.response.set()) - postWorkItem(new ReloadItem(sources, response)) - } - - /** Removes source files and toplevel symbols, and issues a new typer run. - * Returns () to syncvar `response` on completion. - */ - def askFilesDeleted(sources: List[SourceFile], response: Response[Unit]) = { - postWorkItem(new FilesDeletedItem(sources, response)) - } - - /** Sets sync var `response` to the smallest fully attributed tree that encloses position `pos`. - * Note: Unlike for most other ask... operations, the source file belonging to `pos` needs not be loaded. - */ - def askTypeAt(pos: Position, response: Response[Tree]) = - postWorkItem(new AskTypeAtItem(pos, response)) - - /** Sets sync var `response` to the fully attributed & typechecked tree contained in `source`. - * @pre `source` needs to be loaded. - * @note Deprecated because of race conditions in the typechecker when the background compiler - * is interrupted while typing the same `source`. - * @see SI-6578 - */ - @deprecated("Use `askLoadedTyped` instead to avoid race conditions in the typechecker", "2.10.1") - def askType(source: SourceFile, forceReload: Boolean, response: Response[Tree]) = - postWorkItem(new AskTypeItem(source, forceReload, response)) - - /** Sets sync var `response` to the position of the definition of the given link in - * the given sourcefile. - * - * @param sym The symbol referenced by the link (might come from a classfile) - * @param source The source file that's supposed to contain the definition - * @param response A response that will be set to the following: - * If `source` contains a definition that is referenced by the given link - * the position of that definition, otherwise NoPosition. - * Note: This operation does not automatically load `source`. If `source` - * is unloaded, it stays that way. - */ - def askLinkPos(sym: Symbol, source: SourceFile, response: Response[Position]) = - postWorkItem(new AskLinkPosItem(sym, source, response)) - - /** Sets sync var `response` to doc comment information for a given symbol. - * - * @param sym The symbol whose doc comment should be retrieved (might come from a classfile) - * @param source The source file that's supposed to contain the definition - * @param site The symbol where 'sym' is observed - * @param fragments All symbols that can contribute to the generated documentation - * together with their source files. - * @param response A response that will be set to the following: - * If `source` contains a definition of a given symbol that has a doc comment, - * the (expanded, raw, position) triplet for a comment, otherwise ("", "", NoPosition). - * Note: This operation does not automatically load sources that are not yet loaded. - */ - def askDocComment(sym: Symbol, source: SourceFile, site: Symbol, fragments: List[(Symbol,SourceFile)], response: Response[(String, String, Position)]): Unit = - postWorkItem(new AskDocCommentItem(sym, source, site, fragments, response)) - - @deprecated("Use method that accepts fragments", "2.10.2") - def askDocComment(sym: Symbol, site: Symbol, source: SourceFile, response: Response[(String, String, Position)]): Unit = - askDocComment(sym, source, site, (sym,source)::Nil, response) - - /** Sets sync var `response` to list of members that are visible - * as members of the tree enclosing `pos`, possibly reachable by an implicit. - * @pre source is loaded - */ - def askTypeCompletion(pos: Position, response: Response[List[Member]]) = - postWorkItem(new AskTypeCompletionItem(pos, response)) - - /** Sets sync var `response` to list of members that are visible - * as members of the scope enclosing `pos`. - * @pre source is loaded - */ - def askScopeCompletion(pos: Position, response: Response[List[Member]]) = - postWorkItem(new AskScopeCompletionItem(pos, response)) - - /** Asks to do unit corresponding to given source file on present and subsequent type checking passes. - * If the file is in the 'crashedFiles' ignore list it is removed and typechecked normally. - */ - def askToDoFirst(source: SourceFile) = - postWorkItem(new AskToDoFirstItem(source)) - - /** If source is not yet loaded, loads it, and starts a new run, otherwise - * continues with current pass. - * Waits until source is fully type checked and returns body in response. - * @param source The source file that needs to be fully typed. - * @param response The response, which is set to the fully attributed tree of `source`. - * If the unit corresponding to `source` has been removed in the meantime - * the a NoSuchUnitError is raised in the response. - */ - def askLoadedTyped(source: SourceFile, response: Response[Tree]) = - postWorkItem(new AskLoadedTypedItem(source, response)) - - /** If source if not yet loaded, get an outline view with askParseEntered. - * If source is loaded, wait for it to be typechecked. - * In both cases, set response to parsed (and possibly typechecked) tree. - * @param keepSrcLoaded If set to `true`, source file will be kept as a loaded unit afterwards. - */ - def askStructure(keepSrcLoaded: Boolean)(source: SourceFile, response: Response[Tree]) = { - getUnit(source) match { - case Some(_) => askLoadedTyped(source, response) - case None => askParsedEntered(source, keepSrcLoaded, response) - } - } - - /** Set sync var `response` to the parse tree of `source` with all top-level symbols entered. - * @param source The source file to be analyzed - * @param keepLoaded If set to `true`, source file will be kept as a loaded unit afterwards. - * If keepLoaded is `false` the operation is run at low priority, only after - * everything is brought up to date in a regular type checker run. - * @param response The response. - */ - def askParsedEntered(source: SourceFile, keepLoaded: Boolean, response: Response[Tree]) = - postWorkItem(new AskParsedEnteredItem(source, keepLoaded, response)) - - /** Set sync var `response` to a pair consisting of - * - the fully qualified name of the first top-level object definition in the file. - * or "" if there are no object definitions. - * - the text of the instrumented program which, when run, - * prints its output and all defined values in a comment column. - * - * @param source The source file to be analyzed - * @param response The response. - */ - @deprecated("SI-6458: Instrumentation logic will be moved out of the compiler.","2.10.0") - def askInstrumented(source: SourceFile, line: Int, response: Response[(String, Array[Char])]) = - postWorkItem(new AskInstrumentedItem(source, line, response)) - - /** Cancels current compiler run and start a fresh one where everything will be re-typechecked - * (but not re-loaded). - */ - def askReset() = scheduler raise (new FreshRunReq) - - /** Tells the compile server to shutdown, and not to restart again */ - def askShutdown() = scheduler raise ShutdownReq - - @deprecated("use parseTree(source) instead", "2.10.0") // deleted 2nd parameter, as this has to run on 2.8 also. - def askParse(source: SourceFile, response: Response[Tree]) = respond(response) { - parseTree(source) - } - - /** Returns parse tree for source `source`. No symbols are entered. Syntax errors are reported. - * - * This method is thread-safe and as such can safely run outside of the presentation - * compiler thread. - */ - def parseTree(source: SourceFile): Tree = { - newUnitParser(new CompilationUnit(source)).parse() - } - - /** Asks for a computation to be done quickly on the presentation compiler thread */ - def ask[A](op: () => A): A = if (self.onCompilerThread) op() else scheduler doQuickly op - - /** Asks for a computation to be done on presentation compiler thread, returning - * a response with the result or an exception - */ - def askForResponse[A](op: () => A): Response[A] = { - val r = new Response[A] - if (self.onCompilerThread) { - try { r set op() } - catch { case exc: Throwable => r raise exc } - r - } else { - val ir = scheduler askDoQuickly op - ir onComplete { - case Left(result) => r set result - case Right(exc) => r raise exc - } - r - } - } - - def onCompilerThread = Thread.currentThread == compileRunner - - /** Info given for every member found by completion - */ - abstract class Member { - val sym: Symbol - val tpe: Type - val accessible: Boolean - def implicitlyAdded = false - } - - case class TypeMember( - sym: Symbol, - tpe: Type, - accessible: Boolean, - inherited: Boolean, - viaView: Symbol) extends Member { - override def implicitlyAdded = viaView != NoSymbol - } - - case class ScopeMember( - sym: Symbol, - tpe: Type, - accessible: Boolean, - viaImport: Tree) extends Member - - // items that get sent to scheduler - - abstract class WorkItem extends (() => Unit) { - val onCompilerThread = self.onCompilerThread - - /** Raise a MissingReponse, if the work item carries a response. */ - def raiseMissing(): Unit - } - - case class ReloadItem(sources: List[SourceFile], response: Response[Unit]) extends WorkItem { - def apply() = reload(sources, response) - override def toString = "reload "+sources - - def raiseMissing() = - response raise new MissingResponse - } - - case class FilesDeletedItem(sources: List[SourceFile], response: Response[Unit]) extends WorkItem { - def apply() = filesDeleted(sources, response) - override def toString = "files deleted "+sources - - def raiseMissing() = - response raise new MissingResponse - } - - case class AskTypeAtItem(pos: Position, response: Response[Tree]) extends WorkItem { - def apply() = self.getTypedTreeAt(pos, response) - override def toString = "typeat "+pos.source+" "+pos.show - - def raiseMissing() = - response raise new MissingResponse - } - - case class AskTypeItem(source: SourceFile, forceReload: Boolean, response: Response[Tree]) extends WorkItem { - def apply() = self.getTypedTree(source, forceReload, response) - override def toString = "typecheck" - - def raiseMissing() = - response raise new MissingResponse - } - - case class AskTypeCompletionItem(pos: Position, response: Response[List[Member]]) extends WorkItem { - def apply() = self.getTypeCompletion(pos, response) - override def toString = "type completion "+pos.source+" "+pos.show - - def raiseMissing() = - response raise new MissingResponse - } - - case class AskScopeCompletionItem(pos: Position, response: Response[List[Member]]) extends WorkItem { - def apply() = self.getScopeCompletion(pos, response) - override def toString = "scope completion "+pos.source+" "+pos.show - - def raiseMissing() = - response raise new MissingResponse - } - - class AskToDoFirstItem(val source: SourceFile) extends WorkItem { - def apply() = { - moveToFront(List(source)) - enableIgnoredFile(source.file) - } - override def toString = "dofirst "+source - - def raiseMissing() = () - } - - case class AskLinkPosItem(sym: Symbol, source: SourceFile, response: Response[Position]) extends WorkItem { - def apply() = self.getLinkPos(sym, source, response) - override def toString = "linkpos "+sym+" in "+source - - def raiseMissing() = - response raise new MissingResponse - } - - case class AskDocCommentItem(sym: Symbol, source: SourceFile, site: Symbol, fragments: List[(Symbol,SourceFile)], response: Response[(String, String, Position)]) extends WorkItem { - def apply() = self.getDocComment(sym, source, site, fragments, response) - override def toString = "doc comment "+sym+" in "+source+" with fragments:"+fragments.mkString("(", ",", ")") - - def raiseMissing() = - response raise new MissingResponse - } - - case class AskLoadedTypedItem(source: SourceFile, response: Response[Tree]) extends WorkItem { - def apply() = self.waitLoadedTyped(source, response, this.onCompilerThread) - override def toString = "wait loaded & typed "+source - - def raiseMissing() = - response raise new MissingResponse - } - - case class AskParsedEnteredItem(source: SourceFile, keepLoaded: Boolean, response: Response[Tree]) extends WorkItem { - def apply() = self.getParsedEntered(source, keepLoaded, response, this.onCompilerThread) - override def toString = "getParsedEntered "+source+", keepLoaded = "+keepLoaded - - def raiseMissing() = - response raise new MissingResponse - } - - @deprecated("SI-6458: Instrumentation logic will be moved out of the compiler.","2.10.0") - case class AskInstrumentedItem(source: SourceFile, line: Int, response: Response[(String, Array[Char])]) extends WorkItem { - def apply() = self.getInstrumented(source, line, response) - override def toString = "getInstrumented "+source - - def raiseMissing() = - response raise new MissingResponse - } - - /** A do-nothing work scheduler that responds immediately with MissingResponse. - * - * Used during compiler shutdown. - */ - class NoWorkScheduler extends WorkScheduler { - - override def postWorkItem(action: Action) = synchronized { - action match { - case w: WorkItem => w.raiseMissing() - case e: EmptyAction => // do nothing - case _ => println("don't know what to do with this " + action.getClass) - } - } - - override def doQuickly[A](op: () => A): A = { - throw new FailedInterrupt(new Exception("Posted a work item to a compiler that's shutting down")) - } - - override def askDoQuickly[A](op: () => A): InterruptReq { type R = A } = { - val ir = new InterruptReq { - type R = A - val todo = () => throw new MissingResponse - } - ir.execute() - ir - } - - } - -} - - // ---------------- Interpreted exceptions ------------------- - -/** Signals a request for a fresh background compiler run. - * Note: The object has to stay top-level so that the PresentationCompilerThread may access it. - */ -class FreshRunReq extends ControlThrowable - -/** Signals a request for a shutdown of the presentation compiler. - * Note: The object has to stay top-level so that the PresentationCompilerThread may access it. - */ -object ShutdownReq extends ControlThrowable - -class NoSuchUnitError(file: AbstractFile) extends Exception("no unit found for file "+file) - -class MissingResponse extends Exception("response missing") diff --git a/src/compiler/scala/tools/nsc/interactive/ContextTrees.scala b/src/compiler/scala/tools/nsc/interactive/ContextTrees.scala deleted file mode 100644 index 93ef4c4d6c..0000000000 --- a/src/compiler/scala/tools/nsc/interactive/ContextTrees.scala +++ /dev/null @@ -1,149 +0,0 @@ -/* NSC -- new Scala compiler - * Copyright 2009-2013 Typesafe/Scala Solutions and LAMP/EPFL - * @author Martin Odersky - */ -package scala.tools.nsc -package interactive - -import scala.collection.mutable.ArrayBuffer - -trait ContextTrees { self: Global => - - type Context = analyzer.Context - lazy val NoContext = analyzer.NoContext - type Contexts = ArrayBuffer[ContextTree] - - /** A context tree contains contexts that are indexed by positions. - * It satisfies the following properties: - * 1. All context come from compiling the same unit. - * 2. Child contexts have parent contexts in their outer chain. - * 3. The `pos` field of a context is the same as `context.tree.pos`, unless that - * position is transparent. In that case, `pos` equals the position of - * one of the solid descendants of `context.tree`. - * 4. Children of a context have non-overlapping increasing positions. - * 5. No context in the tree has a transparent position. - */ - class ContextTree(val pos: Position, val context: Context, val children: ArrayBuffer[ContextTree]) { - def this(pos: Position, context: Context) = this(pos, context, new ArrayBuffer[ContextTree]) - override def toString = "ContextTree("+pos+", "+children+")" - } - - /** Optionally returns the smallest context that contains given `pos`, or None if none exists. - */ - def locateContext(contexts: Contexts, pos: Position): Option[Context] = synchronized { - def locateNearestContextTree(contexts: Contexts, pos: Position, recent: Array[ContextTree]): Option[ContextTree] = { - locateContextTree(contexts, pos) match { - case Some(x) => - recent(0) = x - locateNearestContextTree(x.children, pos, recent) - case None => recent(0) match { - case null => None - case x => Some(x) - } - } - } - locateNearestContextTree(contexts, pos, new Array[ContextTree](1)) map (_.context) - } - - def locateContextTree(contexts: Contexts, pos: Position): Option[ContextTree] = { - if (contexts.isEmpty) None - else { - val hi = contexts.length - 1 - if ((contexts(hi).pos properlyPrecedes pos) || (pos properlyPrecedes contexts(0).pos)) None - else { - def loop(lo: Int, hi: Int): Option[ContextTree] = { - val mid = (lo + hi) / 2 - val midpos = contexts(mid).pos - if ((pos precedes midpos) && (mid < hi)) - loop(lo, mid) - else if ((midpos precedes pos) && (lo < mid)) - loop(mid, hi) - else if (midpos includes pos) - Some(contexts(mid)) - else if (contexts(mid+1).pos includes pos) - Some(contexts(mid+1)) - else None - } - loop(0, hi) - } - } - } - - /** Insert a context at correct position into a buffer of context trees. - * If the `context` has a transparent position, add it multiple times - * at the positions of all its solid descendant trees. - */ - def addContext(contexts: Contexts, context: Context): Unit = { - val cpos = context.tree.pos - if (cpos.isTransparent) - for (t <- context.tree.children flatMap solidDescendants) - addContext(contexts, context, t.pos) - else - addContext(contexts, context, cpos) - } - - /** Insert a context with non-transparent position `cpos` - * at correct position into a buffer of context trees. - */ - def addContext(contexts: Contexts, context: Context, cpos: Position): Unit = synchronized { - try { - if (!cpos.isRange) {} - else if (contexts.isEmpty) contexts += new ContextTree(cpos, context) - else { - val hi = contexts.length - 1 - if (contexts(hi).pos precedes cpos) - contexts += new ContextTree(cpos, context) - else if (contexts(hi).pos properlyIncludes cpos) // fast path w/o search - addContext(contexts(hi).children, context, cpos) - else if (cpos precedes contexts(0).pos) - new ContextTree(cpos, context) +=: contexts - else { - def insertAt(idx: Int): Boolean = { - val oldpos = contexts(idx).pos - if (oldpos sameRange cpos) { - contexts(idx) = new ContextTree(cpos, context, contexts(idx).children) - true - } else if (oldpos includes cpos) { - addContext(contexts(idx).children, context, cpos) - true - } else if (cpos includes oldpos) { - val start = contexts.indexWhere(cpos includes _.pos) - val last = contexts.lastIndexWhere(cpos includes _.pos) - contexts(start) = new ContextTree(cpos, context, contexts.slice(start, last + 1)) - contexts.remove(start + 1, last - start) - true - } else false - } - def loop(lo: Int, hi: Int) { - if (hi - lo > 1) { - val mid = (lo + hi) / 2 - val midpos = contexts(mid).pos - if (cpos precedes midpos) - loop(lo, mid) - else if (midpos precedes cpos) - loop(mid, hi) - else - addContext(contexts(mid).children, context, cpos) - } else if (!insertAt(lo) && !insertAt(hi)) { - val lopos = contexts(lo).pos - val hipos = contexts(hi).pos - if ((lopos precedes cpos) && (cpos precedes hipos)) - contexts.insert(hi, new ContextTree(cpos, context)) - else - inform("internal error? skewed positions: "+lopos+" !< "+cpos+" !< "+hipos) - } - } - loop(0, hi) - } - } - } catch { - case ex: Throwable => - println(ex) - ex.printStackTrace() - println("failure inserting "+cpos+" into "+contexts+"/"+contexts(contexts.length - 1).pos+"/"+ - (contexts(contexts.length - 1).pos includes cpos)) - throw ex - } - } -} - diff --git a/src/compiler/scala/tools/nsc/interactive/Global.scala b/src/compiler/scala/tools/nsc/interactive/Global.scala deleted file mode 100644 index 33b10d1a9a..0000000000 --- a/src/compiler/scala/tools/nsc/interactive/Global.scala +++ /dev/null @@ -1,1139 +0,0 @@ -/* NSC -- new Scala compiler - * Copyright 2009-2013 Typesafe/Scala Solutions and LAMP/EPFL - * @author Martin Odersky - */ -package scala.tools.nsc -package interactive - -import java.io.{ PrintWriter, StringWriter, FileReader, FileWriter } -import scala.collection.mutable -import mutable.{LinkedHashMap, SynchronizedMap, HashSet, SynchronizedSet} -import scala.util.control.ControlThrowable -import scala.tools.nsc.io.{ AbstractFile, LogReplay, Logger, NullLogger, Replayer } -import scala.tools.nsc.util.MultiHashMap -import scala.reflect.internal.util.{ SourceFile, BatchSourceFile, Position, NoPosition } -import scala.tools.nsc.reporters._ -import scala.tools.nsc.symtab._ -import scala.tools.nsc.typechecker.DivergentImplicit -import symtab.Flags.{ACCESSOR, PARAMACCESSOR} -import scala.annotation.elidable -import scala.language.implicitConversions - -/** The main class of the presentation compiler in an interactive environment such as an IDE - */ -class Global(settings: Settings, _reporter: Reporter, projectName: String = "") extends { - /* Is the compiler initializing? Early def, so that the field is true during the - * execution of the super constructor. - */ - private var initializing = true - override val useOffsetPositions = false -} with scala.tools.nsc.Global(settings, _reporter) - with CompilerControl - with ContextTrees - with RichCompilationUnits - with ScratchPadMaker - with Picklers { - - import definitions._ - - val debugIDE: Boolean = settings.YpresentationDebug.value - val verboseIDE: Boolean = settings.YpresentationVerbose.value - - private def replayName = settings.YpresentationReplay.value - private def logName = settings.YpresentationLog.value - private def afterTypeDelay = settings.YpresentationDelay.value - private final val SleepTime = 10 - - val log = - if (replayName != "") new Replayer(new FileReader(replayName)) - else if (logName != "") new Logger(new FileWriter(logName)) - else NullLogger - - import log.logreplay - debugLog("logger: " + log.getClass + " writing to " + (new java.io.File(logName)).getAbsolutePath) - debugLog("classpath: "+classPath) - - private var curTime = System.nanoTime - private def timeStep = { - val last = curTime - curTime = System.nanoTime - ", delay = " + (curTime - last) / 1000000 + "ms" - } - - /** Print msg only when debugIDE is true. */ - @inline final def debugLog(msg: => String) = - if (debugIDE) println("[%s] %s".format(projectName, msg)) - - /** Inform with msg only when verboseIDE is true. */ - @inline final def informIDE(msg: => String) = - if (verboseIDE) println("[%s][%s]".format(projectName, msg)) - - override def forInteractive = true - - /** A map of all loaded files to the rich compilation units that correspond to them. - */ - val unitOfFile = new LinkedHashMap[AbstractFile, RichCompilationUnit] with - SynchronizedMap[AbstractFile, RichCompilationUnit] { - override def put(key: AbstractFile, value: RichCompilationUnit) = { - val r = super.put(key, value) - if (r.isEmpty) debugLog("added unit for "+key) - r - } - override def remove(key: AbstractFile) = { - val r = super.remove(key) - if (r.nonEmpty) debugLog("removed unit for "+key) - r - } - } - - /** A set containing all those files that need to be removed - * Units are removed by getUnit, typically once a unit is finished compiled. - */ - protected val toBeRemoved: mutable.Set[AbstractFile] = - new HashSet[AbstractFile] with SynchronizedSet[AbstractFile] - - /** A set containing all those files that need to be removed after a full background compiler run - */ - protected val toBeRemovedAfterRun: mutable.Set[AbstractFile] = - new HashSet[AbstractFile] with SynchronizedSet[AbstractFile] - - class ResponseMap extends MultiHashMap[SourceFile, Response[Tree]] { - override def += (binding: (SourceFile, Set[Response[Tree]])) = { - assert(interruptsEnabled, "delayed operation within an ask") - super.+=(binding) - } - } - - /** A map that associates with each abstract file the set of responses that are waiting - * (via waitLoadedTyped) for the unit associated with the abstract file to be loaded and completely typechecked. - */ - protected val waitLoadedTypeResponses = new ResponseMap - - /** A map that associates with each abstract file the set of responses that ware waiting - * (via build) for the unit associated with the abstract file to be parsed and entered - */ - protected var getParsedEnteredResponses = new ResponseMap - - private def cleanResponses(rmap: ResponseMap): Unit = { - for ((source, rs) <- rmap.toList) { - for (r <- rs) { - if (getUnit(source).isEmpty) - r raise new NoSuchUnitError(source.file) - if (r.isComplete) - rmap(source) -= r - } - if (rmap(source).isEmpty) - rmap -= source - } - } - - private def cleanAllResponses() { - cleanResponses(waitLoadedTypeResponses) - cleanResponses(getParsedEnteredResponses) - } - - private def checkNoOutstanding(rmap: ResponseMap): Unit = - for ((_, rs) <- rmap.toList; r <- rs) { - debugLog("ERROR: missing response, request will be discarded") - r raise new MissingResponse - } - - def checkNoResponsesOutstanding() { - checkNoOutstanding(waitLoadedTypeResponses) - checkNoOutstanding(getParsedEnteredResponses) - } - - /** The compilation unit corresponding to a source file - * if it does not yet exist create a new one atomically - * Note: We want to remove this. - */ - protected[interactive] def getOrCreateUnitOf(source: SourceFile): RichCompilationUnit = - unitOfFile.getOrElse(source.file, { println("precondition violated: "+source+" is not loaded"); new Exception().printStackTrace(); new RichCompilationUnit(source) }) - - /** Work through toBeRemoved list to remove any units. - * Then return optionally unit associated with given source. - */ - protected[interactive] def getUnit(s: SourceFile): Option[RichCompilationUnit] = { - toBeRemoved.synchronized { - for (f <- toBeRemoved) { - informIDE("removed: "+s) - unitOfFile -= f - allSources = allSources filter (_.file != f) - } - toBeRemoved.clear() - } - unitOfFile get s.file - } - - /** A list giving all files to be typechecked in the order they should be checked. - */ - protected var allSources: List[SourceFile] = List() - - private var lastException: Option[Throwable] = None - - /** A list of files that crashed the compiler. They will be ignored during background - * compilation until they are removed from this list. - */ - private var ignoredFiles: Set[AbstractFile] = Set() - - /** Flush the buffer of sources that are ignored during background compilation. */ - def clearIgnoredFiles() { - ignoredFiles = Set() - } - - /** Remove a crashed file from the ignore buffer. Background compilation will take it into account - * and errors will be reported against it. */ - def enableIgnoredFile(file: AbstractFile) { - ignoredFiles -= file - debugLog("Removed crashed file %s. Still in the ignored buffer: %s".format(file, ignoredFiles)) - } - - /** The currently active typer run */ - private var currentTyperRun: TyperRun = _ - newTyperRun() - - /** Is a background compiler run needed? - * Note: outOfDate is true as long as there is a background compile scheduled or going on. - */ - private var outOfDate = false - - def isOutOfDate: Boolean = outOfDate - - def demandNewCompilerRun() = { - if (outOfDate) throw new FreshRunReq // cancel background compile - else outOfDate = true // proceed normally and enable new background compile - } - - protected[interactive] var minRunId = 1 - - private[interactive] var interruptsEnabled = true - - private val NoResponse: Response[_] = new Response[Any] - - /** The response that is currently pending, i.e. the compiler - * is working on providing an asnwer for it. - */ - private var pendingResponse: Response[_] = NoResponse - - // ----------- Overriding hooks in nsc.Global ----------------------- - - /** Called from parser, which signals hereby that a method definition has been parsed. - */ - override def signalParseProgress(pos: Position) { - // We only want to be interruptible when running on the PC thread. - if(onCompilerThread) { - checkForMoreWork(pos) - } - } - - /** Called from typechecker, which signals hereby that a node has been completely typechecked. - * If the node includes unit.targetPos, abandons run and returns newly attributed tree. - * Otherwise, if there's some higher priority work to be done, also abandons run with a FreshRunReq. - * @param context The context that typechecked the node - * @param old The original node - * @param result The transformed node - */ - override def signalDone(context: Context, old: Tree, result: Tree) { - if (interruptsEnabled && analyzer.lockedCount == 0) { - if (context.unit.exists && - result.pos.isOpaqueRange && - (result.pos includes context.unit.targetPos)) { - var located = new TypedLocator(context.unit.targetPos) locateIn result - if (located == EmptyTree) { - println("something's wrong: no "+context.unit+" in "+result+result.pos) - located = result - } - throw new TyperResult(located) - } - try { - checkForMoreWork(old.pos) - } catch { - case ex: ValidateException => // Ignore, this will have been reported elsewhere - debugLog("validate exception caught: "+ex) - case ex: Throwable => - log.flush() - throw ex - } - } - } - - /** Called from typechecker every time a context is created. - * Registers the context in a context tree - */ - override def registerContext(c: Context) = c.unit match { - case u: RichCompilationUnit => addContext(u.contexts, c) - case _ => - } - - /** The top level classes and objects currently seen in the presentation compiler - */ - private val currentTopLevelSyms = new mutable.LinkedHashSet[Symbol] - - /** The top level classes and objects no longer seen in the presentation compiler - */ - val deletedTopLevelSyms = new mutable.LinkedHashSet[Symbol] with mutable.SynchronizedSet[Symbol] - - /** Called from typechecker every time a top-level class or object is entered. - */ - override def registerTopLevelSym(sym: Symbol) { currentTopLevelSyms += sym } - - /** Symbol loaders in the IDE parse all source files loaded from a package for - * top-level idents. Therefore, we can detect top-level symbols that have a name - * different from their source file - */ - override lazy val loaders = new BrowsingLoaders { - val global: Global.this.type = Global.this - } - - // ----------------- Polling --------------------------------------- - - case class WorkEvent(atNode: Int, atMillis: Long) - - private var moreWorkAtNode: Int = -1 - private var nodesSeen = 0 - private var lastWasReload = false - - /** The number of pollForWorks after which the presentation compiler yields. - * Yielding improves responsiveness on systems with few cores because it - * gives the UI thread a chance to get new tasks and interrupt the presentation - * compiler with them. - */ - private final val yieldPeriod = 10 - - /** Called from runner thread and signalDone: - * Poll for interrupts and execute them immediately. - * Then, poll for exceptions and execute them. - * Then, poll for work reload/typedTreeAt/doFirst commands during background checking. - * @param pos The position of the tree if polling while typechecking, NoPosition otherwise - * - */ - private[interactive] def pollForWork(pos: Position) { - if (!interruptsEnabled) return - if (pos == NoPosition || nodesSeen % yieldPeriod == 0) - Thread.`yield`() - - def nodeWithWork(): Option[WorkEvent] = - if (scheduler.moreWork || pendingResponse.isCancelled) Some(new WorkEvent(nodesSeen, System.currentTimeMillis)) - else None - - nodesSeen += 1 - logreplay("atnode", nodeWithWork()) match { - case Some(WorkEvent(id, _)) => - debugLog("some work at node "+id+" current = "+nodesSeen) -// assert(id >= nodesSeen) - moreWorkAtNode = id - case None => - } - - if (nodesSeen >= moreWorkAtNode) { - - logreplay("asked", scheduler.pollInterrupt()) match { - case Some(ir) => - try { - interruptsEnabled = false - debugLog("ask started"+timeStep) - ir.execute() - } finally { - debugLog("ask finished"+timeStep) - interruptsEnabled = true - } - pollForWork(pos) - case _ => - } - - if (logreplay("cancelled", pendingResponse.isCancelled)) { - throw CancelException - } - - logreplay("exception thrown", scheduler.pollThrowable()) match { - case Some(ex: FreshRunReq) => - newTyperRun() - minRunId = currentRunId - demandNewCompilerRun() - - case Some(ShutdownReq) => - scheduler.synchronized { // lock the work queue so no more items are posted while we clean it up - val units = scheduler.dequeueAll { - case item: WorkItem => Some(item.raiseMissing()) - case _ => Some(()) - } - - // don't forget to service interrupt requests - scheduler.dequeueAllInterrupts(_.execute()) - - debugLog("ShutdownReq: cleaning work queue (%d items)".format(units.size)) - debugLog("Cleanup up responses (%d loadedType pending, %d parsedEntered pending)" - .format(waitLoadedTypeResponses.size, getParsedEnteredResponses.size)) - checkNoResponsesOutstanding() - - log.flush() - scheduler = new NoWorkScheduler - throw ShutdownReq - } - - case Some(ex: Throwable) => log.flush(); throw ex - case _ => - } - - lastWasReload = false - - logreplay("workitem", scheduler.nextWorkItem()) match { - case Some(action) => - try { - debugLog("picked up work item at "+pos+": "+action+timeStep) - action() - debugLog("done with work item: "+action) - } finally { - debugLog("quitting work item: "+action+timeStep) - } - case None => - } - } - } - - protected def checkForMoreWork(pos: Position) { - val typerRun = currentTyperRun - pollForWork(pos) - if (typerRun != currentTyperRun) demandNewCompilerRun() - } - - // ----------------- The Background Runner Thread ----------------------- - - private var threadId = 0 - - /** The current presentation compiler runner */ - @volatile private[interactive] var compileRunner: Thread = newRunnerThread() - - /** Check that the currenyly executing thread is the presentation compiler thread. - * - * Compiler initialization may happen on a different thread (signalled by globalPhase being NoPhase) - */ - @elidable(elidable.WARNING) - override def assertCorrectThread() { - assert(initializing || onCompilerThread, - "Race condition detected: You are running a presentation compiler method outside the PC thread.[phase: %s]".format(globalPhase) + - " Please file a ticket with the current stack trace at https://www.assembla.com/spaces/scala-ide/support/tickets") - } - - /** Create a new presentation compiler runner. - */ - private def newRunnerThread(): Thread = { - threadId += 1 - compileRunner = new PresentationCompilerThread(this, projectName) - compileRunner.setDaemon(true) - compileRunner.start() - compileRunner - } - - private def ensureUpToDate(unit: RichCompilationUnit) = - if (!unit.isUpToDate && unit.status != JustParsed) reset(unit) // reparse previously typechecked units. - - /** Compile all loaded source files in the order given by `allSources`. - */ - private[interactive] final def backgroundCompile() { - informIDE("Starting new presentation compiler type checking pass") - reporter.reset() - - // remove any files in first that are no longer maintained by presentation compiler (i.e. closed) - allSources = allSources filter (s => unitOfFile contains (s.file)) - - // ensure all loaded units are parsed - for (s <- allSources; unit <- getUnit(s)) { - // checkForMoreWork(NoPosition) // disabled, as any work done here would be in an inconsistent state - ensureUpToDate(unit) - parseAndEnter(unit) - serviceParsedEntered() - } - - // sleep window - if (afterTypeDelay > 0 && lastWasReload) { - val limit = System.currentTimeMillis() + afterTypeDelay - while (System.currentTimeMillis() < limit) { - Thread.sleep(SleepTime) - checkForMoreWork(NoPosition) - } - } - - // ensure all loaded units are typechecked - for (s <- allSources; if !ignoredFiles(s.file); unit <- getUnit(s)) { - try { - if (!unit.isUpToDate) - if (unit.problems.isEmpty || !settings.YpresentationStrict.value) - typeCheck(unit) - else debugLog("%s has syntax errors. Skipped typechecking".format(unit)) - else debugLog("already up to date: "+unit) - for (r <- waitLoadedTypeResponses(unit.source)) - r set unit.body - serviceParsedEntered() - } catch { - case ex: FreshRunReq => throw ex // propagate a new run request - case ShutdownReq => throw ShutdownReq // propagate a shutdown request - case ex: ControlThrowable => throw ex - case ex: Throwable => - println("[%s]: exception during background compile: ".format(unit.source) + ex) - ex.printStackTrace() - for (r <- waitLoadedTypeResponses(unit.source)) { - r.raise(ex) - } - serviceParsedEntered() - - lastException = Some(ex) - ignoredFiles += unit.source.file - println("[%s] marking unit as crashed (crashedFiles: %s)".format(unit, ignoredFiles)) - - reporter.error(unit.body.pos, "Presentation compiler crashed while type checking this file: %s".format(ex.toString())) - } - } - - // move units removable after this run to the "to-be-removed" buffer - toBeRemoved ++= toBeRemovedAfterRun - - // clean out stale waiting responses - cleanAllResponses() - - // wind down - if (waitLoadedTypeResponses.nonEmpty || getParsedEnteredResponses.nonEmpty) { - // need another cycle to treat those - newTyperRun() - backgroundCompile() - } else { - outOfDate = false - informIDE("Everything is now up to date") - } - } - - /** Service all pending getParsedEntered requests - */ - private def serviceParsedEntered() { - var atOldRun = true - for ((source, rs) <- getParsedEnteredResponses; r <- rs) { - if (atOldRun) { newTyperRun(); atOldRun = false } - getParsedEnteredNow(source, r) - } - getParsedEnteredResponses.clear() - } - - /** Reset unit to unloaded state */ - private def reset(unit: RichCompilationUnit): Unit = { - unit.depends.clear() - unit.defined.clear() - unit.synthetics.clear() - unit.toCheck.clear() - unit.checkedFeatures = Set() - unit.targetPos = NoPosition - unit.contexts.clear() - unit.problems.clear() - unit.body = EmptyTree - unit.status = NotLoaded - } - - /** Parse unit and create a name index, unless this has already been done before */ - private def parseAndEnter(unit: RichCompilationUnit): Unit = - if (unit.status == NotLoaded) { - debugLog("parsing: "+unit) - currentTyperRun.compileLate(unit) - if (debugIDE && !reporter.hasErrors) validatePositions(unit.body) - if (!unit.isJava) syncTopLevelSyms(unit) - unit.status = JustParsed - } - - /** Make sure unit is typechecked - */ - private def typeCheck(unit: RichCompilationUnit) { - debugLog("type checking: "+unit) - parseAndEnter(unit) - unit.status = PartiallyChecked - currentTyperRun.typeCheck(unit) - unit.lastBody = unit.body - unit.status = currentRunId - } - - /** Update deleted and current top-level symbols sets */ - def syncTopLevelSyms(unit: RichCompilationUnit) { - val deleted = currentTopLevelSyms filter { sym => - /** We sync after namer phase and it resets all the top-level symbols - * that survive the new parsing - * round to NoPeriod. - */ - sym.sourceFile == unit.source.file && - sym.validTo != NoPeriod && - runId(sym.validTo) < currentRunId - } - for (d <- deleted) { - d.owner.info.decls unlink d - deletedTopLevelSyms += d - currentTopLevelSyms -= d - } - } - - /** Move list of files to front of allSources */ - def moveToFront(fs: List[SourceFile]) { - allSources = fs ::: (allSources diff fs) - } - - // ----------------- Implementations of client commands ----------------------- - - def respond[T](result: Response[T])(op: => T): Unit = - respondGradually(result)(Stream(op)) - - def respondGradually[T](response: Response[T])(op: => Stream[T]): Unit = { - val prevResponse = pendingResponse - try { - pendingResponse = response - if (!response.isCancelled) { - var results = op - while (!response.isCancelled && results.nonEmpty) { - val result = results.head - results = results.tail - if (results.isEmpty) { - response set result - debugLog("responded"+timeStep) - } else response setProvisionally result - } - } - } catch { - case CancelException => - debugLog("cancelled") - case ex: FreshRunReq => - if (debugIDE) { - println("FreshRunReq thrown during response") - ex.printStackTrace() - } - response raise ex - throw ex - - case ex @ ShutdownReq => - if (debugIDE) { - println("ShutdownReq thrown during response") - ex.printStackTrace() - } - response raise ex - throw ex - - case ex: Throwable => - if (debugIDE) { - println("exception thrown during response: "+ex) - ex.printStackTrace() - } - response raise ex - } finally { - pendingResponse = prevResponse - } - } - - private def reloadSource(source: SourceFile) { - val unit = new RichCompilationUnit(source) - unitOfFile(source.file) = unit - toBeRemoved -= source.file - toBeRemovedAfterRun -= source.file - reset(unit) - //parseAndEnter(unit) - } - - /** Make sure a set of compilation units is loaded and parsed */ - private def reloadSources(sources: List[SourceFile]) { - newTyperRun() - minRunId = currentRunId - sources foreach reloadSource - moveToFront(sources) - } - - /** Make sure a set of compilation units is loaded and parsed */ - private[interactive] def reload(sources: List[SourceFile], response: Response[Unit]) { - informIDE("reload: " + sources) - lastWasReload = true - respond(response)(reloadSources(sources)) - demandNewCompilerRun() - } - - private[interactive] def filesDeleted(sources: List[SourceFile], response: Response[Unit]) { - informIDE("files deleted: " + sources) - val deletedFiles = sources.map(_.file).toSet - val deletedSyms = currentTopLevelSyms filter {sym => deletedFiles contains sym.sourceFile} - for (d <- deletedSyms) { - d.owner.info.decls unlink d - deletedTopLevelSyms += d - currentTopLevelSyms -= d - } - sources foreach (removeUnitOf(_)) - minRunId = currentRunId - respond(response)(()) - demandNewCompilerRun() - } - - /** Arrange for unit to be removed after run, to give a chance to typecheck the unit fully. - * If we do just removeUnit, some problems with default parameters can ensue. - * Calls to this method could probably be replaced by removeUnit once default parameters are handled more robustly. - */ - private def afterRunRemoveUnitsOf(sources: List[SourceFile]) { - toBeRemovedAfterRun ++= sources map (_.file) - } - - /** A fully attributed tree located at position `pos` */ - private def typedTreeAt(pos: Position): Tree = getUnit(pos.source) match { - case None => - reloadSources(List(pos.source)) - try typedTreeAt(pos) - finally afterRunRemoveUnitsOf(List(pos.source)) - case Some(unit) => - informIDE("typedTreeAt " + pos) - parseAndEnter(unit) - val tree = locateTree(pos) - debugLog("at pos "+pos+" was found: "+tree.getClass+" "+tree.pos.show) - tree match { - case Import(expr, _) => - debugLog("import found"+expr.tpe+(if (expr.tpe == null) "" else " "+expr.tpe.members)) - case _ => - } - if (stabilizedType(tree) ne null) { - debugLog("already attributed: "+tree.symbol+" "+tree.tpe) - tree - } else { - unit.targetPos = pos - try { - debugLog("starting targeted type check") - typeCheck(unit) -// println("tree not found at "+pos) - EmptyTree - } catch { - case ex: TyperResult => new Locator(pos) locateIn ex.tree - } finally { - unit.targetPos = NoPosition - } - } - } - - /** A fully attributed tree corresponding to the entire compilation unit */ - private[interactive] def typedTree(source: SourceFile, forceReload: Boolean): Tree = { - informIDE("typedTree " + source + " forceReload: " + forceReload) - val unit = getOrCreateUnitOf(source) - if (forceReload) reset(unit) - parseAndEnter(unit) - if (unit.status <= PartiallyChecked) typeCheck(unit) - unit.body - } - - /** Set sync var `response` to a fully attributed tree located at position `pos` */ - private[interactive] def getTypedTreeAt(pos: Position, response: Response[Tree]) { - respond(response)(typedTreeAt(pos)) - } - - /** Set sync var `response` to a fully attributed tree corresponding to the - * entire compilation unit */ - private[interactive] def getTypedTree(source: SourceFile, forceReload: Boolean, response: Response[Tree]) { - respond(response)(typedTree(source, forceReload)) - } - - private def withTempUnits[T](sources: List[SourceFile])(f: (SourceFile => RichCompilationUnit) => T): T = { - val unitOfSrc: SourceFile => RichCompilationUnit = src => unitOfFile(src.file) - sources filterNot (getUnit(_).isDefined) match { - case Nil => - f(unitOfSrc) - case unknown => - reloadSources(unknown) - try { - f(unitOfSrc) - } finally - afterRunRemoveUnitsOf(unknown) - } - } - - private def withTempUnit[T](source: SourceFile)(f: RichCompilationUnit => T): T = - withTempUnits(List(source)){ srcToUnit => - f(srcToUnit(source)) - } - - /** Find a 'mirror' of symbol `sym` in unit `unit`. Pre: `unit is loaded. */ - private def findMirrorSymbol(sym: Symbol, unit: RichCompilationUnit): Symbol = { - val originalTypeParams = sym.owner.typeParams - ensureUpToDate(unit) - parseAndEnter(unit) - val pre = adaptToNewRunMap(ThisType(sym.owner)) - val rawsym = pre.typeSymbol.info.decl(sym.name) - val newsym = rawsym filter { alt => - sym.isType || { - try { - val tp1 = pre.memberType(alt) onTypeError NoType - val tp2 = adaptToNewRunMap(sym.tpe) substSym (originalTypeParams, sym.owner.typeParams) - matchesType(tp1, tp2, alwaysMatchSimple = false) || { - debugLog(s"findMirrorSymbol matchesType($tp1, $tp2) failed") - val tp3 = adaptToNewRunMap(sym.tpe) substSym (originalTypeParams, alt.owner.typeParams) - matchesType(tp1, tp3, alwaysMatchSimple = false) || { - debugLog(s"findMirrorSymbol fallback matchesType($tp1, $tp3) failed") - false - } - } - } - catch { - case ex: ControlThrowable => throw ex - case ex: Throwable => - debugLog("error in findMirrorSymbol: " + ex) - ex.printStackTrace() - false - } - } - } - if (newsym == NoSymbol) { - if (rawsym.exists && !rawsym.isOverloaded) rawsym - else { - debugLog("mirror not found " + sym + " " + unit.source + " " + pre) - NoSymbol - } - } else if (newsym.isOverloaded) { - settings.uniqid.value = true - debugLog("mirror ambiguous " + sym + " " + unit.source + " " + pre + " " + newsym.alternatives) - NoSymbol - } else { - debugLog("mirror found for " + newsym + ": " + newsym.pos) - newsym - } - } - - /** Implements CompilerControl.askLinkPos */ - private[interactive] def getLinkPos(sym: Symbol, source: SourceFile, response: Response[Position]) { - informIDE("getLinkPos "+sym+" "+source) - respond(response) { - if (sym.owner.isClass) { - withTempUnit(source){ u => - findMirrorSymbol(sym, u).pos - } - } else { - debugLog("link not in class "+sym+" "+source+" "+sym.owner) - NoPosition - } - } - } - - private def forceDocComment(sym: Symbol, unit: RichCompilationUnit) { - unit.body foreachPartial { - case DocDef(comment, defn) if defn.symbol == sym => - fillDocComment(defn.symbol, comment) - EmptyTree - case _: ValOrDefDef => - EmptyTree - } - } - - /** Implements CompilerControl.askDocComment */ - private[interactive] def getDocComment(sym: Symbol, source: SourceFile, site: Symbol, fragments: List[(Symbol,SourceFile)], - response: Response[(String, String, Position)]) { - informIDE(s"getDocComment $sym at $source site $site") - respond(response) { - withTempUnits(fragments.toList.unzip._2){ units => - for((sym, src) <- fragments) { - val mirror = findMirrorSymbol(sym, units(src)) - if (mirror ne NoSymbol) forceDocComment(mirror, units(src)) - } - val mirror = findMirrorSymbol(sym, units(source)) - if (mirror eq NoSymbol) - ("", "", NoPosition) - else { - (expandedDocComment(mirror, site), rawDocComment(mirror), docCommentPos(mirror)) - } - } - } - } - - def stabilizedType(tree: Tree): Type = tree match { - case Ident(_) if tree.symbol.isStable => - singleType(NoPrefix, tree.symbol) - case Select(qual, _) if qual.tpe != null && tree.symbol.isStable => - singleType(qual.tpe, tree.symbol) - case Import(expr, selectors) => - tree.symbol.info match { - case analyzer.ImportType(expr) => expr match { - case s@Select(qual, name) => singleType(qual.tpe, s.symbol) - case i : Ident => i.tpe - case _ => tree.tpe - } - case _ => tree.tpe - } - - case _ => tree.tpe - } - - import analyzer.{SearchResult, ImplicitSearch} - - private[interactive] def getScopeCompletion(pos: Position, response: Response[List[Member]]) { - informIDE("getScopeCompletion" + pos) - respond(response) { scopeMembers(pos) } - } - - private class Members[M <: Member] extends LinkedHashMap[Name, Set[M]] { - override def default(key: Name) = Set() - - private def matching(sym: Symbol, symtpe: Type, ms: Set[M]): Option[M] = ms.find { m => - (m.sym.name == sym.name) && (m.sym.isType || (m.tpe matches symtpe)) - } - - private def keepSecond(m: M, sym: Symbol, implicitlyAdded: Boolean): Boolean = - m.sym.hasFlag(ACCESSOR | PARAMACCESSOR) && - !sym.hasFlag(ACCESSOR | PARAMACCESSOR) && - (!implicitlyAdded || m.implicitlyAdded) - - def add(sym: Symbol, pre: Type, implicitlyAdded: Boolean)(toMember: (Symbol, Type) => M) { - if ((sym.isGetter || sym.isSetter) && sym.accessed != NoSymbol) { - add(sym.accessed, pre, implicitlyAdded)(toMember) - } else if (!sym.name.decodedName.containsName("$") && !sym.isSynthetic && sym.hasRawInfo) { - val symtpe = pre.memberType(sym) onTypeError ErrorType - matching(sym, symtpe, this(sym.name)) match { - case Some(m) => - if (keepSecond(m, sym, implicitlyAdded)) { - //print(" -+ "+sym.name) - this(sym.name) = this(sym.name) - m + toMember(sym, symtpe) - } - case None => - //print(" + "+sym.name) - this(sym.name) = this(sym.name) + toMember(sym, symtpe) - } - } - } - - def addNonShadowed(other: Members[M]) = { - for ((name, ms) <- other) - if (ms.nonEmpty && this(name).isEmpty) this(name) = ms - } - - def allMembers: List[M] = values.toList.flatten - } - - /** Return all members visible without prefix in context enclosing `pos`. */ - private def scopeMembers(pos: Position): List[ScopeMember] = { - typedTreeAt(pos) // to make sure context is entered - val context = doLocateContext(pos) - val locals = new Members[ScopeMember] - val enclosing = new Members[ScopeMember] - def addScopeMember(sym: Symbol, pre: Type, viaImport: Tree) = - locals.add(sym, pre, implicitlyAdded = false) { (s, st) => - new ScopeMember(s, st, context.isAccessible(s, pre, superAccess = false), viaImport) - } - def localsToEnclosing() = { - enclosing.addNonShadowed(locals) - locals.clear() - } - //print("add scope members") - var cx = context - while (cx != NoContext) { - for (sym <- cx.scope) - addScopeMember(sym, NoPrefix, EmptyTree) - localsToEnclosing() - if (cx == cx.enclClass) { - val pre = cx.prefix - for (sym <- pre.members) - addScopeMember(sym, pre, EmptyTree) - localsToEnclosing() - } - cx = cx.outer - } - //print("\nadd imported members") - for (imp <- context.imports) { - val pre = imp.qual.tpe - for (sym <- imp.allImportedSymbols) - addScopeMember(sym, pre, imp.qual) - localsToEnclosing() - } - // println() - val result = enclosing.allMembers -// if (debugIDE) for (m <- result) println(m) - result - } - - private[interactive] def getTypeCompletion(pos: Position, response: Response[List[Member]]) { - informIDE("getTypeCompletion " + pos) - respondGradually(response) { typeMembers(pos) } - //if (debugIDE) typeMembers(pos) - } - - private def typeMembers(pos: Position): Stream[List[TypeMember]] = { - var tree = typedTreeAt(pos) - - // if tree consists of just x. or x.fo where fo is not yet a full member name - // ignore the selection and look in just x. - tree match { - case Select(qual, name) if tree.tpe == ErrorType => tree = qual - case _ => - } - - val context = doLocateContext(pos) - - if (tree.tpe == null) - // TODO: guard with try/catch to deal with ill-typed qualifiers. - tree = analyzer.newTyper(context).typedQualifier(tree) - - debugLog("typeMembers at "+tree+" "+tree.tpe) - - val superAccess = tree.isInstanceOf[Super] - val members = new Members[TypeMember] - - def addTypeMember(sym: Symbol, pre: Type, inherited: Boolean, viaView: Symbol) = { - val implicitlyAdded = viaView != NoSymbol - members.add(sym, pre, implicitlyAdded) { (s, st) => - new TypeMember(s, st, - context.isAccessible(if (s.hasGetter) s.getter(s.owner) else s, pre, superAccess && !implicitlyAdded), - inherited, - viaView) - } - } - - /** Create a function application of a given view function to `tree` and typechecked it. - */ - def viewApply(view: SearchResult): Tree = { - assert(view.tree != EmptyTree) - analyzer.newTyper(context.makeImplicit(reportAmbiguousErrors = false)) - .typed(Apply(view.tree, List(tree)) setPos tree.pos) - .onTypeError(EmptyTree) - } - - val pre = stabilizedType(tree) - - val ownerTpe = tree.tpe match { - case analyzer.ImportType(expr) => expr.tpe - case null => pre - case MethodType(List(), rtpe) => rtpe - case _ => tree.tpe - } - - //print("add members") - for (sym <- ownerTpe.members) - addTypeMember(sym, pre, sym.owner != ownerTpe.typeSymbol, NoSymbol) - members.allMembers #:: { - //print("\nadd enrichment") - val applicableViews: List[SearchResult] = - if (ownerTpe.isErroneous) List() - else new ImplicitSearch( - tree, functionType(List(ownerTpe), AnyClass.tpe), isView = true, - context0 = context.makeImplicit(reportAmbiguousErrors = false)).allImplicits - for (view <- applicableViews) { - val vtree = viewApply(view) - val vpre = stabilizedType(vtree) - for (sym <- vtree.tpe.members) { - addTypeMember(sym, vpre, inherited = false, view.tree.symbol) - } - } - //println() - Stream(members.allMembers) - } - } - - /** Implements CompilerControl.askLoadedTyped */ - private[interactive] def waitLoadedTyped(source: SourceFile, response: Response[Tree], onSameThread: Boolean = true) { - getUnit(source) match { - case Some(unit) => - if (unit.isUpToDate) { - debugLog("already typed") - response set unit.body - } else if (ignoredFiles(source.file)) { - response.raise(lastException.getOrElse(CancelException)) - } else if (onSameThread) { - getTypedTree(source, forceReload = false, response) - } else { - debugLog("wait for later") - outOfDate = true - waitLoadedTypeResponses(source) += response - } - case None => - debugLog("load unit and type") - try reloadSources(List(source)) - finally waitLoadedTyped(source, response, onSameThread) - } - } - - /** Implements CompilerControl.askParsedEntered */ - private[interactive] def getParsedEntered(source: SourceFile, keepLoaded: Boolean, response: Response[Tree], onSameThread: Boolean = true) { - getUnit(source) match { - case Some(unit) => - getParsedEnteredNow(source, response) - case None => - try { - if (keepLoaded || outOfDate && onSameThread) - reloadSources(List(source)) - } finally { - if (keepLoaded || !outOfDate || onSameThread) - getParsedEnteredNow(source, response) - else - getParsedEnteredResponses(source) += response - } - } - } - - /** Parses and enters given source file, stroring parse tree in response */ - private def getParsedEnteredNow(source: SourceFile, response: Response[Tree]) { - respond(response) { - onUnitOf(source) { unit => - parseAndEnter(unit) - unit.body - } - } - } - - @deprecated("SI-6458: Instrumentation logic will be moved out of the compiler.","2.10.0") - def getInstrumented(source: SourceFile, line: Int, response: Response[(String, Array[Char])]) { - try { - interruptsEnabled = false - respond(response) { - instrument(source, line) - } - } finally { - interruptsEnabled = true - } - } - - // ---------------- Helper classes --------------------------- - - /** The typer run */ - class TyperRun extends Run { - // units is always empty - - /** canRedefine is used to detect double declarations of classes and objects - * in multiple source files. - * Since the IDE rechecks units several times in the same run, these tests - * are disabled by always returning true here. - */ - override def canRedefine(sym: Symbol) = true - - def typeCheck(unit: CompilationUnit): Unit = { - applyPhase(typerPhase, unit) - } - - /** Apply a phase to a compilation unit - * @return true iff typechecked correctly - */ - private def applyPhase(phase: Phase, unit: CompilationUnit) { - enteringPhase(phase) { phase.asInstanceOf[GlobalPhase] applyPhase unit } - } - } - - def newTyperRun() { - currentTyperRun = new TyperRun - } - - class TyperResult(val tree: Tree) extends ControlThrowable - - assert(globalPhase.id == 0) - - implicit def addOnTypeError[T](x: => T): OnTypeError[T] = new OnTypeError(x) - - // OnTypeError should still catch TypeError because of cyclic references, - // but DivergentImplicit shouldn't leak anymore here - class OnTypeError[T](op: => T) { - def onTypeError(alt: => T) = try { - op - } catch { - case ex: TypeError => - debugLog("type error caught: "+ex) - alt - case ex: DivergentImplicit => - debugLog("divergent implicit caught: "+ex) - alt - } - } - - /** The compiler has been initialized. Constructors are evaluated in textual order, - * so this is set to true only after all super constructors and the primary constructor - * have been executed. - */ - initializing = false -} - -object CancelException extends Exception - diff --git a/src/compiler/scala/tools/nsc/interactive/InteractiveReporter.scala b/src/compiler/scala/tools/nsc/interactive/InteractiveReporter.scala deleted file mode 100644 index 013b152e96..0000000000 --- a/src/compiler/scala/tools/nsc/interactive/InteractiveReporter.scala +++ /dev/null @@ -1,47 +0,0 @@ -/* NSC -- new Scala compiler - * Copyright 2009-2013 Typesafe/Scala Solutions and LAMP/EPFL - * @author Martin Odersky - */ -package scala.tools.nsc -package interactive - -import scala.collection.mutable.ArrayBuffer -import scala.reflect.internal.util.Position -import reporters.Reporter - -case class Problem(pos: Position, msg: String, severityLevel: Int) - -abstract class InteractiveReporter extends Reporter { - - def compiler: Global - - val otherProblems = new ArrayBuffer[Problem] - - override def info0(pos: Position, msg: String, severity: Severity, force: Boolean): Unit = try { - severity.count += 1 - val problems = - if (compiler eq null) { - otherProblems - } else if (pos.isDefined) { - compiler.getUnit(pos.source) match { - case Some(unit) => - compiler.debugLog(pos.source.file.name + ":" + pos.line + ": " + msg) - unit.problems - case None => - compiler.debugLog(pos.source.file.name + "[not loaded] :" + pos.line + ": " + msg) - otherProblems - } - } else { - compiler.debugLog("[no position] :" + msg) - otherProblems - } - problems += Problem(pos, msg, severity.id) - } catch { - case ex: UnsupportedOperationException => - } - - override def reset() { - super.reset() - otherProblems.clear() - } -} diff --git a/src/compiler/scala/tools/nsc/interactive/Picklers.scala b/src/compiler/scala/tools/nsc/interactive/Picklers.scala deleted file mode 100644 index b184afd0f5..0000000000 --- a/src/compiler/scala/tools/nsc/interactive/Picklers.scala +++ /dev/null @@ -1,189 +0,0 @@ -/* NSC -- new Scala compiler - * Copyright 2009-2013 Typesafe/Scala Solutions and LAMP/EPFL - * @author Martin Odersky - */ -package scala.tools.nsc -package interactive - -import util.InterruptReq -import scala.reflect.internal.util.{ SourceFile, BatchSourceFile } -import io.{ AbstractFile, PlainFile, Pickler, CondPickler } -import util.EmptyAction -import scala.reflect.internal.util.{ RangePosition, OffsetPosition, TransparentPosition } -import io.Pickler._ -import scala.collection.mutable -import mutable.ListBuffer - -trait Picklers { self: Global => - - lazy val freshRunReq = - unitPickler - .wrapped { _ => new FreshRunReq } { x => () } - .labelled ("FreshRunReq") - .cond (_.isInstanceOf[FreshRunReq]) - - lazy val shutdownReq = singletonPickler(ShutdownReq) - - def defaultThrowable[T <: Throwable]: CondPickler[T] = javaInstancePickler[T] cond { _ => true } - - implicit lazy val throwable: Pickler[Throwable] = - freshRunReq | shutdownReq | defaultThrowable - - implicit def abstractFile: Pickler[AbstractFile] = - pkl[String] - .wrapped[AbstractFile] { new PlainFile(_) } { _.path } - .asClass (classOf[PlainFile]) - - private val sourceFilesSeen = new mutable.HashMap[AbstractFile, Array[Char]] { - override def default(key: AbstractFile) = Array() - } - - type Diff = (Int /*start*/, Int /*end*/, String /*replacement*/) - - def delta(f: AbstractFile, cs: Array[Char]): Diff = { - val bs = sourceFilesSeen(f) - var start = 0 - while (start < bs.length && start < cs.length && bs(start) == cs(start)) start += 1 - var end = bs.length - var end2 = cs.length - while (end > start && end2 > start && bs(end - 1) == cs(end2 - 1)) { end -= 1; end2 -= 1 } - sourceFilesSeen(f) = cs - (start, end, cs.slice(start, end2).mkString("")) - } - - def patch(f: AbstractFile, d: Diff): Array[Char] = { - val (start, end, replacement) = d - val patched = sourceFilesSeen(f).patch(start, replacement, end - start) - sourceFilesSeen(f) = patched - patched - } - - implicit lazy val sourceFile: Pickler[SourceFile] = - (pkl[AbstractFile] ~ pkl[Diff]).wrapped[SourceFile] { - case f ~ d => new BatchSourceFile(f, patch(f, d)) - } { - f => f.file ~ delta(f.file, f.content) - }.asClass (classOf[BatchSourceFile]) - - lazy val offsetPosition: CondPickler[OffsetPosition] = - (pkl[SourceFile] ~ pkl[Int]) - .wrapped { case x ~ y => new OffsetPosition(x, y) } { p => p.source ~ p.point } - .asClass (classOf[OffsetPosition]) - - lazy val rangePosition: CondPickler[RangePosition] = - (pkl[SourceFile] ~ pkl[Int] ~ pkl[Int] ~ pkl[Int]) - .wrapped { case source ~ start ~ point ~ end => new RangePosition(source, start, point, end) } { p => p.source ~ p.start ~ p.point ~ p.end } - .asClass (classOf[RangePosition]) - - lazy val transparentPosition: CondPickler[TransparentPosition] = - (pkl[SourceFile] ~ pkl[Int] ~ pkl[Int] ~ pkl[Int]) - .wrapped { case source ~ start ~ point ~ end => new TransparentPosition(source, start, point, end) } { p => p.source ~ p.start ~ p.point ~ p.end } - .asClass (classOf[TransparentPosition]) - - lazy val noPosition = singletonPickler(NoPosition) - - implicit lazy val position: Pickler[Position] = transparentPosition | rangePosition | offsetPosition | noPosition - - implicit lazy val namePickler: Pickler[Name] = - pkl[String] .wrapped[Name] { - str => if ((str.length > 1) && (str endsWith "!")) newTypeName(str.init) else newTermName(str) - } { - name => if (name.isTypeName) name.toString+"!" else name.toString - } - - implicit lazy val symPickler: Pickler[Symbol] = { - def ownerNames(sym: Symbol, buf: ListBuffer[Name]): ListBuffer[Name] = { - if (!sym.isRoot) { - ownerNames(sym.owner, buf) - buf += (if (sym.isModuleClass) sym.sourceModule else sym).name - if (!sym.isType && !sym.isStable) { - val sym1 = sym.owner.info.decl(sym.name) - if (sym1.isOverloaded) { - val index = sym1.alternatives.indexOf(sym) - assert(index >= 0, sym1+" not found in alternatives "+sym1.alternatives) - buf += newTermName(index.toString) - } - } - } - buf - } - def makeSymbol(root: Symbol, names: List[Name]): Symbol = names match { - case List() => - root - case name :: rest => - val sym = root.info.decl(name) - if (sym.isOverloaded) makeSymbol(sym.alternatives(rest.head.toString.toInt), rest.tail) - else makeSymbol(sym, rest) - } - pkl[List[Name]] .wrapped { makeSymbol(rootMirror.RootClass, _) } { ownerNames(_, new ListBuffer).toList } - } - - implicit def workEvent: Pickler[WorkEvent] = { - (pkl[Int] ~ pkl[Long]) - .wrapped { case id ~ ms => WorkEvent(id, ms) } { w => w.atNode ~ w.atMillis } - } - - implicit def interruptReq: Pickler[InterruptReq] = { - val emptyIR: InterruptReq = new InterruptReq { type R = Unit; val todo = () => () } - pkl[Unit] .wrapped { _ => emptyIR } { _ => () } - } - - implicit def reloadItem: CondPickler[ReloadItem] = - pkl[List[SourceFile]] - .wrapped { ReloadItem(_, new Response) } { _.sources } - .asClass (classOf[ReloadItem]) - - implicit def askTypeAtItem: CondPickler[AskTypeAtItem] = - pkl[Position] - .wrapped { new AskTypeAtItem(_, new Response) } { _.pos } - .asClass (classOf[AskTypeAtItem]) - - implicit def askTypeItem: CondPickler[AskTypeItem] = - (pkl[SourceFile] ~ pkl[Boolean]) - .wrapped { case source ~ forceReload => new AskTypeItem(source, forceReload, new Response) } { w => w.source ~ w.forceReload } - .asClass (classOf[AskTypeItem]) - - implicit def askTypeCompletionItem: CondPickler[AskTypeCompletionItem] = - pkl[Position] - .wrapped { new AskTypeCompletionItem(_, new Response) } { _.pos } - .asClass (classOf[AskTypeCompletionItem]) - - implicit def askScopeCompletionItem: CondPickler[AskScopeCompletionItem] = - pkl[Position] - .wrapped { new AskScopeCompletionItem(_, new Response) } { _.pos } - .asClass (classOf[AskScopeCompletionItem]) - - implicit def askToDoFirstItem: CondPickler[AskToDoFirstItem] = - pkl[SourceFile] - .wrapped { new AskToDoFirstItem(_) } { _.source } - .asClass (classOf[AskToDoFirstItem]) - - implicit def askLinkPosItem: CondPickler[AskLinkPosItem] = - (pkl[Symbol] ~ pkl[SourceFile]) - .wrapped { case sym ~ source => new AskLinkPosItem(sym, source, new Response) } { item => item.sym ~ item.source } - .asClass (classOf[AskLinkPosItem]) - - implicit def askDocCommentItem: CondPickler[AskDocCommentItem] = - (pkl[Symbol] ~ pkl[SourceFile] ~ pkl[Symbol] ~ pkl[List[(Symbol,SourceFile)]]) - .wrapped { case sym ~ source ~ site ~ fragments => new AskDocCommentItem(sym, source, site, fragments, new Response) } { item => item.sym ~ item.source ~ item.site ~ item.fragments } - .asClass (classOf[AskDocCommentItem]) - - implicit def askLoadedTypedItem: CondPickler[AskLoadedTypedItem] = - pkl[SourceFile] - .wrapped { source => new AskLoadedTypedItem(source, new Response) } { _.source } - .asClass (classOf[AskLoadedTypedItem]) - - implicit def askParsedEnteredItem: CondPickler[AskParsedEnteredItem] = - (pkl[SourceFile] ~ pkl[Boolean]) - .wrapped { case source ~ keepLoaded => new AskParsedEnteredItem(source, keepLoaded, new Response) } { w => w.source ~ w.keepLoaded } - .asClass (classOf[AskParsedEnteredItem]) - - implicit def emptyAction: CondPickler[EmptyAction] = - pkl[Unit] - .wrapped { _ => new EmptyAction } { _ => () } - .asClass (classOf[EmptyAction]) - - implicit def action: Pickler[() => Unit] = - reloadItem | askTypeAtItem | askTypeItem | askTypeCompletionItem | askScopeCompletionItem | - askToDoFirstItem | askLinkPosItem | askDocCommentItem | askLoadedTypedItem | askParsedEnteredItem | emptyAction -} diff --git a/src/compiler/scala/tools/nsc/interactive/PresentationCompilerThread.scala b/src/compiler/scala/tools/nsc/interactive/PresentationCompilerThread.scala deleted file mode 100644 index a2d8e5d49a..0000000000 --- a/src/compiler/scala/tools/nsc/interactive/PresentationCompilerThread.scala +++ /dev/null @@ -1,51 +0,0 @@ -/* NSC -- new Scala compiler - * Copyright 2009-2013 Typesafe/Scala Solutions and LAMP/EPFL - * @author Martin Odersky - * @author Iulian Dragos - */ -package scala.tools.nsc.interactive - -/** A presentation compiler thread. This is a lightweight class, delegating most - * of its functionality to the compiler instance. - * - */ -final class PresentationCompilerThread(var compiler: Global, name: String = "") - extends Thread("Scala Presentation Compiler [" + name + "]") { - - /** The presentation compiler loop. - */ - override def run() { - compiler.debugLog("starting new runner thread") - while (compiler ne null) try { - compiler.checkNoResponsesOutstanding() - compiler.log.logreplay("wait for more work", { compiler.scheduler.waitForMoreWork(); true }) - compiler.pollForWork(compiler.NoPosition) - while (compiler.isOutOfDate) { - try { - compiler.backgroundCompile() - } catch { - case ex: FreshRunReq => - compiler.debugLog("fresh run req caught, starting new pass") - } - compiler.log.flush() - } - } catch { - case ex @ ShutdownReq => - compiler.debugLog("exiting presentation compiler") - compiler.log.close() - - // make sure we don't keep around stale instances - compiler = null - case ex: Throwable => - compiler.log.flush() - - ex match { - case ex: FreshRunReq => - compiler.debugLog("fresh run req caught outside presentation compiler loop; ignored") // This shouldn't be reported - case _ : Global#ValidateException => // This will have been reported elsewhere - compiler.debugLog("validate exception caught outside presentation compiler loop; ignored") - case _ => ex.printStackTrace(); compiler.informIDE("Fatal Error: "+ex) - } - } - } -} diff --git a/src/compiler/scala/tools/nsc/interactive/REPL.scala b/src/compiler/scala/tools/nsc/interactive/REPL.scala deleted file mode 100644 index 04c06b9357..0000000000 --- a/src/compiler/scala/tools/nsc/interactive/REPL.scala +++ /dev/null @@ -1,218 +0,0 @@ -/* NSC -- new Scala compiler - * Copyright 2009-2013 Typesafe/Scala Solutions and LAMP/EPFL - * @author Martin Odersky - */ -package scala.tools.nsc -package interactive - -import scala.reflect.internal.util._ -import scala.tools.nsc.reporters._ -import scala.tools.nsc.io._ -import scala.tools.nsc.scratchpad.SourceInserter -import java.io.FileWriter - -/** Interface of interactive compiler to a client such as an IDE - */ -object REPL { - - val versionMsg = "Scala compiler " + - Properties.versionString + " -- " + - Properties.copyrightString - - val prompt = "> " - - var reporter: ConsoleReporter = _ - - private def replError(msg: String) { - reporter.error(/*new Position */FakePos("scalac"), - msg + "\n scalac -help gives more information") - } - - def process(args: Array[String]) { - val settings = new Settings(replError) - reporter = new ConsoleReporter(settings) - val command = new CompilerCommand(args.toList, settings) - if (command.settings.version.value) - reporter.echo(versionMsg) - else { - try { - object compiler extends Global(command.settings, reporter) { -// printTypings = true - } - if (reporter.hasErrors) { - reporter.flush() - return - } - if (command.shouldStopWithInfo) { - reporter.echo(command.getInfoMessage(compiler)) - } else { - run(compiler) - } - } catch { - case ex @ FatalError(msg) => - if (true || command.settings.debug.value) // !!! - ex.printStackTrace() - reporter.error(null, "fatal error: " + msg) - } - } - } - - def main(args: Array[String]) { - process(args) - sys.exit(if (reporter.hasErrors) 1 else 0) - } - - def loop(action: (String) => Unit) { - Console.print(prompt) - try { - val line = Console.readLine() - if (line.length() > 0) { - action(line) - } - loop(action) - } - catch { - case _: java.io.EOFException => //nop - } - } - - /** Commands: - * - * reload file1 ... fileN - * typeat file off1 off2? - * complete file off1 off2? - */ - def run(comp: Global) { - val reloadResult = new Response[Unit] - val typeatResult = new Response[comp.Tree] - val completeResult = new Response[List[comp.Member]] - val typedResult = new Response[comp.Tree] - val structureResult = new Response[comp.Tree] - @deprecated("SI-6458: Instrumentation logic will be moved out of the compiler.","2.10.0") - val instrumentedResult = new Response[(String, Array[Char])] - - def makePos(file: String, off1: String, off2: String) = { - val source = toSourceFile(file) - comp.rangePos(source, off1.toInt, off1.toInt, off2.toInt) - } - - def doTypeAt(pos: Position) { - comp.askTypeAt(pos, typeatResult) - show(typeatResult) - } - - def doComplete(pos: Position) { - comp.askTypeCompletion(pos, completeResult) - show(completeResult) - } - - def doStructure(file: String) { - comp.askParsedEntered(toSourceFile(file), keepLoaded = false, structureResult) - show(structureResult) - } - - /** Write instrumented source file to disk. - * @param iFullName The full name of the first top-level object in source - * @param iContents An Array[Char] containing the instrumented source - * @return The name of the instrumented source file - */ - @deprecated("SI-6458: Instrumentation logic will be moved out of the compiler.","2.10.0") - def writeInstrumented(iFullName: String, suffix: String, iContents: Array[Char]): String = { - val iSimpleName = iFullName drop ((iFullName lastIndexOf '.') + 1) - val iSourceName = iSimpleName + suffix - val ifile = new FileWriter(iSourceName) - ifile.write(iContents) - ifile.close() - iSourceName - } - - /** The method for implementing worksheet functionality. - * @param arguments a file name, followed by optional command line arguments that are passed - * to the compiler that processes the instrumented source. - * @param line A line number that controls uop to which line results should be produced - * If line = -1, results are produced for all expressions in the worksheet. - * @return The generated file content containing original source in the left column - * and outputs in the right column, or None if the presentation compiler - * does not respond to askInstrumented. - */ - @deprecated("SI-6458: Instrumentation logic will be moved out of the compiler.","2.10.0") - def instrument(arguments: List[String], line: Int): Option[(String, String)] = { - val source = toSourceFile(arguments.head) - // strip right hand side comment column and any trailing spaces from all lines - val strippedContents = SourceInserter.stripRight(source.content) - val strippedSource = new BatchSourceFile(source.file, strippedContents) - println("stripped source = "+strippedSource+":"+strippedContents.mkString) - comp.askReload(List(strippedSource), reloadResult) - comp.askInstrumented(strippedSource, line, instrumentedResult) - using(instrumentedResult) { - case (iFullName, iContents) => - println(s"instrumented source $iFullName = ${iContents.mkString}") - val iSourceName = writeInstrumented(iFullName, "$instrumented.scala", iContents) - val sSourceName = writeInstrumented(iFullName, "$stripped.scala", strippedContents) - (iSourceName, sSourceName) -/* - * val vdirOpt = compileInstrumented(iSourceName, arguments.tail) - runInstrumented(vdirOpt, iFullName, strippedSource.content) - */ - } - } - - loop { line => - (line split " ").toList match { - case "reload" :: args => - comp.askReload(args map toSourceFile, reloadResult) - show(reloadResult) - case "reloadAndAskType" :: file :: millis :: Nil => - comp.askReload(List(toSourceFile(file)), reloadResult) - Thread.sleep(millis.toInt) - println("ask type now") - comp.askLoadedTyped(toSourceFile(file), typedResult) - typedResult.get - case List("typeat", file, off1, off2) => - doTypeAt(makePos(file, off1, off2)) - case List("typeat", file, off1) => - doTypeAt(makePos(file, off1, off1)) - case List("complete", file, off1, off2) => - doComplete(makePos(file, off1, off2)) - case List("complete", file, off1) => - doComplete(makePos(file, off1, off1)) - case "instrument" :: arguments => - println(instrument(arguments, -1)) - case "instrumentTo" :: line :: arguments => - println(instrument(arguments, line.toInt)) - case List("quit") => - comp.askShutdown() - sys.exit(1) - case List("structure", file) => - doStructure(file) - case _ => - print("""Available commands: - | reload ... - | reloadAndAskType - | typed - | typeat - | typeat - | complete - | compile - | instrument * - | instrumentTo * - | structure - | quit - |""".stripMargin) - } - } - } - - def toSourceFile(name: String) = new BatchSourceFile(new PlainFile(new java.io.File(name))) - - def using[T, U](svar: Response[T])(op: T => U): Option[U] = { - val res = svar.get match { - case Left(result) => Some(op(result)) - case Right(exc) => exc.printStackTrace; println("ERROR: "+exc); None - } - svar.clear() - res - } - - def show[T](svar: Response[T]) = using(svar)(res => println("==> "+res)) -} diff --git a/src/compiler/scala/tools/nsc/interactive/RangePositions.scala b/src/compiler/scala/tools/nsc/interactive/RangePositions.scala deleted file mode 100644 index c57e1da184..0000000000 --- a/src/compiler/scala/tools/nsc/interactive/RangePositions.scala +++ /dev/null @@ -1,14 +0,0 @@ -/* NSC -- new Scala compiler - * Copyright 2009-2013 Typesafe/Scala Solutions and LAMP/EPFL - * @author Martin Odersky - */ - -package scala.tools.nsc -package interactive - -@deprecated("Use scala.reflect.internal.Positions", "2.11.0") -trait RangePositions extends scala.reflect.internal.Positions with ast.Trees with ast.Positions { - self: scala.tools.nsc.Global => - - override def useOffsetPositions = false -} diff --git a/src/compiler/scala/tools/nsc/interactive/Response.scala b/src/compiler/scala/tools/nsc/interactive/Response.scala deleted file mode 100644 index f36f769ec9..0000000000 --- a/src/compiler/scala/tools/nsc/interactive/Response.scala +++ /dev/null @@ -1,105 +0,0 @@ -/* NSC -- new Scala compiler - * Copyright 2009-2013 Typesafe/Scala Solutions and LAMP/EPFL - * @author Martin Odersky - */ -package scala.tools.nsc -package interactive - -/** Typical interaction, given a predicate , a function , - * and an exception handler : - * - * val TIMEOUT = 100 // (milliseconds) or something like that - * val r = new Response() - * while (!r.isComplete && !r.isCancelled) { - * if () r.cancel() - * else r.get(TIMEOUT) match { - * case Some(Left(data)) => (data) - * case Some(Right(exc)) => (exc) - * case None => - * } - * } - */ -class Response[T] { - - private var data: Option[Either[T, Throwable]] = None - private var complete = false - private var cancelled = false - - /** Set provisional data, more to come - */ - def setProvisionally(x: T) = synchronized { - data = Some(Left(x)) - } - - /** Set final data, and mark response as complete. - */ - def set(x: T) = synchronized { - data = Some(Left(x)) - complete = true - notifyAll() - } - - /** Store raised exception in data, and mark response as complete. - */ - def raise(exc: Throwable) = synchronized { - data = Some(Right(exc)) - complete = true - notifyAll() - } - - /** Get final data, wait as long as necessary. - * When interrupted will return with Right(InterruptedException) - */ - def get: Either[T, Throwable] = synchronized { - while (!complete) { - try { - wait() - } catch { - case exc: InterruptedException => raise(exc) - } - } - data.get - } - - /** Optionally get data within `timeout` milliseconds. - * When interrupted will return with Some(Right(InterruptedException)) - * When timeout ends, will return last stored provisional result, - * or else None if no provisional result was stored. - */ - def get(timeout: Long): Option[Either[T, Throwable]] = synchronized { - val start = System.currentTimeMillis - var current = start - while (!complete && start + timeout > current) { - try { - wait(timeout - (current - start)) - } catch { - case exc: InterruptedException => raise(exc) - } - current = System.currentTimeMillis - } - data - } - - /** Final data set was stored - */ - def isComplete = synchronized { complete } - - /** Cancel action computing this response (Only the - * party that calls get on a response may cancel). - */ - def cancel() = synchronized { cancelled = true } - - /** A cancel request for this response has been issued - */ - def isCancelled = synchronized { cancelled } - - def clear() = synchronized { - data = None - complete = false - cancelled = false - } -} - - - - diff --git a/src/compiler/scala/tools/nsc/interactive/RichCompilationUnits.scala b/src/compiler/scala/tools/nsc/interactive/RichCompilationUnits.scala deleted file mode 100644 index b83c2cd095..0000000000 --- a/src/compiler/scala/tools/nsc/interactive/RichCompilationUnits.scala +++ /dev/null @@ -1,58 +0,0 @@ -/* NSC -- new Scala compiler - * Copyright 2009-2013 Typesafe/Scala Solutions and LAMP/EPFL - * @author Martin Odersky - */ -package scala.tools.nsc -package interactive - -import scala.reflect.internal.util.{SourceFile, Position, NoPosition} -import scala.collection.mutable.ArrayBuffer - -trait RichCompilationUnits { self: Global => - - /** The status value of a unit that has not yet been loaded */ - final val NotLoaded = -2 - - /** The status value of a unit that has not yet been typechecked */ - final val JustParsed = -1 - - /** The status value of a unit that has been partially typechecked */ - final val PartiallyChecked = 0 - - class RichCompilationUnit(source: SourceFile) extends CompilationUnit(source) { - - /** The runid of the latest compiler run that typechecked this unit, - * or else @see NotLoaded, JustParsed - */ - var status: Int = NotLoaded - - /** Unit has been parsed */ - def isParsed: Boolean = status >= JustParsed - - /** Unit has been typechecked, but maybe not in latest runs */ - def isTypeChecked: Boolean = status > JustParsed - - /** Unit has been typechecked and is up to date */ - def isUpToDate: Boolean = status >= minRunId - - /** the current edit point offset */ - var editPoint: Int = -1 - - /** The problems reported for this unit */ - val problems = new ArrayBuffer[Problem] - - /** The position of a targeted type check - * If this is different from NoPosition, the type checking - * will stop once a tree that contains this position range - * is fully attributed. - */ - var _targetPos: Position = NoPosition - override def targetPos: Position = _targetPos - def targetPos_=(p: Position) { _targetPos = p } - - var contexts: Contexts = new Contexts - - /** The last fully type-checked body of this unit */ - var lastBody: Tree = EmptyTree - } -} diff --git a/src/compiler/scala/tools/nsc/interactive/ScratchPadMaker.scala b/src/compiler/scala/tools/nsc/interactive/ScratchPadMaker.scala deleted file mode 100644 index 7af9174704..0000000000 --- a/src/compiler/scala/tools/nsc/interactive/ScratchPadMaker.scala +++ /dev/null @@ -1,200 +0,0 @@ -package scala.tools.nsc -package interactive - -import scala.reflect.internal.util.{SourceFile, BatchSourceFile, RangePosition} -import scala.collection.mutable.ArrayBuffer -import scala.reflect.internal.Chars.{isLineBreakChar, isWhitespace} -import ast.parser.Tokens._ - -@deprecated("SI-6458: Instrumentation logic will be moved out of the compiler.","2.10.0") -trait ScratchPadMaker { self: Global => - - import definitions._ - - private case class Patch(offset: Int, text: String) - - private class Patcher(contents: Array[Char], lex: LexicalStructure, endOffset: Int) extends Traverser { - var objectName: String = "" - - private val patches = new ArrayBuffer[Patch] - private val toPrint = new ArrayBuffer[String] - private var skipped = 0 - private var resNum: Int = -1 - - private def nextRes(): String = { - resNum += 1 - "res$"+resNum - } - - private def nameType(name: String, tpe: Type): String = { - // if name ends in symbol character, add a space to separate it from the following ':' - val pad = if (Character.isLetter(name.last) || Character.isDigit(name.last)) "" else " " - name+pad+": "+tpe - } - - private def nameType(sym: Symbol): String = nameType(sym.name.decoded, sym.tpe) - - private def literal(str: String) = "\"\"\""+str+"\"\"\"" - - private val prologue = ";import scala.runtime.WorksheetSupport._; def main(args: Array[String])=$execute{" - - private val epilogue = "}" - - private def applyPendingPatches(offset: Int) = { - if (skipped == 0) patches += Patch(offset, prologue) - for (msg <- toPrint) patches += Patch(offset, ";System.out.println("+msg+")") - toPrint.clear() - } - - /** The position where to insert an instrumentation statement in front of giuven statement. - * This is at the latest `stat.pos.start`. But in order not to mess with column numbers - * in position we try to insert it at the end of the previous token instead. - * Furthermore, `(' tokens have to be skipped because they do not show up - * in statement range positions. - */ - private def instrumentPos(start: Int): Int = { - val (prevToken, prevStart, prevEnd) = lex.locate(start - 1) - if (prevStart >= start) start - else if (prevToken == LPAREN) instrumentPos(prevStart) - else prevEnd - } - - private def addSkip(stat: Tree): Unit = { - val ipos = instrumentPos(stat.pos.start) - if (stat.pos.start > skipped) applyPendingPatches(ipos) - if (stat.pos.start >= endOffset) - patches += Patch(ipos, ";$stop()") - var end = stat.pos.end - if (end > skipped) { - while (end < contents.length && !isLineBreakChar(contents(end))) end += 1 - patches += Patch(ipos, ";$skip("+(end-skipped)+"); ") - skipped = end - } - } - - private def addSandbox(expr: Tree) = {} -// patches += (Patch(expr.pos.start, "sandbox("), Patch(expr.pos.end, ")")) - - private def resultString(prefix: String, expr: String) = - literal(prefix + " = ") + " + $show(" + expr + ")" - - private def traverseStat(stat: Tree) = - if (stat.pos.isInstanceOf[RangePosition]) { - stat match { - case ValDef(_, _, _, rhs) => - addSkip(stat) - if (stat.symbol.isLazy) - toPrint += literal(nameType(stat.symbol) + " = ") - else if (!stat.symbol.isSynthetic) { - addSandbox(rhs) - toPrint += resultString(nameType(stat.symbol), stat.symbol.name.toString) - } - case DefDef(_, _, _, _, _, _) => - addSkip(stat) - toPrint += literal(nameType(stat.symbol)) - case Annotated(_, arg) => - traverse(arg) - case DocDef(_, defn) => - traverse(defn) - case _ => - if (stat.isTerm) { - addSkip(stat) - if (stat.tpe.typeSymbol == UnitClass) { - addSandbox(stat) - } else { - val resName = nextRes() - val dispResName = resName filter ('$' != _) - val offset = instrumentPos(stat.pos.start) - patches += Patch(offset, "val " + resName + " = ") - addSandbox(stat) - toPrint += resultString(nameType(dispResName, stat.tpe), resName) - } - } - } - } - - override def traverse(tree: Tree): Unit = tree match { - case PackageDef(_, _) => - super.traverse(tree) - case ModuleDef(_, name, Template(_, _, body)) => - val topLevel = objectName.isEmpty - if (topLevel) { - objectName = tree.symbol.fullName - body foreach traverseStat - if (skipped != 0) { // don't issue prologue and epilogue if there are no instrumented statements - applyPendingPatches(skipped) - patches += Patch(skipped, epilogue) - } - } - case _ => - } - - /** The patched text. - * @require traverse is run first - */ - def result: Array[Char] = { - val reslen = contents.length + (patches map (_.text.length)).sum - val res = Array.ofDim[Char](reslen) - var lastOffset = 0 - var from = 0 - var to = 0 - for (Patch(offset, text) <- patches) { - val delta = offset - lastOffset - assert(delta >= 0) - Array.copy(contents, from, res, to, delta) - from += delta - to += delta - lastOffset = offset - text.copyToArray(res, to) - to += text.length - } - assert(contents.length - from == reslen - to) - Array.copy(contents, from, res, to, contents.length - from) - res - } - } - - class LexicalStructure(source: SourceFile) { - val token = new ArrayBuffer[Int] - val startOffset = new ArrayBuffer[Int] - val endOffset = new ArrayBuffer[Int] - private val scanner = new syntaxAnalyzer.UnitScanner(new CompilationUnit(source)) - scanner.init() - while (scanner.token != EOF) { - startOffset += scanner.offset - token += scanner.token - scanner.nextToken() - endOffset += scanner.lastOffset - } - - /** @return token that starts before or at offset, its startOffset, its endOffset - */ - def locate(offset: Int): (Int, Int, Int) = { - var lo = 0 - var hi = token.length - 1 - while (lo < hi) { - val mid = (lo + hi + 1) / 2 - if (startOffset(mid) <= offset) lo = mid - else hi = mid - 1 - } - (token(lo), startOffset(lo), endOffset(lo)) - } - } - - /** Compute an instrumented version of a sourcefile. - * @param source The given sourcefile. - * @param line The line up to which results should be printed, -1 = whole document. - * @return A pair consisting of - * - the fully qualified name of the first top-level object definition in the file. - * or "" if there are no object definitions. - * - the text of the instrumented program which, when run, - * prints its output and all defined values in a comment column. - */ - protected def instrument(source: SourceFile, line: Int): (String, Array[Char]) = { - val tree = typedTree(source, forceReload = true) - val endOffset = if (line < 0) source.length else source.lineToOffset(line + 1) - val patcher = new Patcher(source.content, new LexicalStructure(source), endOffset) - patcher.traverse(tree) - (patcher.objectName, patcher.result) - } -} diff --git a/src/compiler/scala/tools/nsc/interactive/tests/InteractiveTest.scala b/src/compiler/scala/tools/nsc/interactive/tests/InteractiveTest.scala deleted file mode 100644 index a4a2de9b51..0000000000 --- a/src/compiler/scala/tools/nsc/interactive/tests/InteractiveTest.scala +++ /dev/null @@ -1,123 +0,0 @@ -/* NSC -- new Scala compiler - * Copyright 2009-2013 Typesafe/Scala Solutions and LAMP/EPFL - * @author Martin Odersky - */ -package scala.tools.nsc -package interactive -package tests - -import core._ -import scala.collection.mutable.ListBuffer - -/** A base class for writing interactive compiler tests. - * - * This class tries to cover common functionality needed when testing the presentation - * compiler: instantiation source files, reloading, creating positions, instantiating - * the presentation compiler, random stress testing. - * - * By default, this class loads all scala and java classes found under `src/`, going - * recursively into subfolders. Loaded classes are found in `sourceFiles`. trait `TestResources` - * The presentation compiler is available through `compiler`. - * - * It is easy to test member completion, type and hyperlinking at a given position. Source - * files are searched for `TextMarkers`. By default, the completion marker is `/*!*/`, the - * typedAt marker is `/*?*/` and the hyperlinking marker is `/*#*/`. Place these markers in - * your source files, and the test framework will automatically pick them up and test the - * corresponding actions. Sources are reloaded by `askReload(sourceFiles)` (blocking - * call). All ask operations are placed on the work queue without waiting for each one to - * complete before asking the next. After all asks, it waits for each response in turn and - * prints the result. The default timeout is 1 second per operation. - * - * To define a custom operation you have to: - * - * (1) Define a new marker by extending `TestMarker` - * (2) Provide an implementation for the operation you want to check by extending `PresentationCompilerTestDef` - * (3) Add the class defined in (1) to the set of executed test actions by calling `++` on `InteractiveTest`. - * - * Then you can simply use the new defined `marker` in your test sources and the testing - * framework will automatically pick it up. - * - * @see Check existing tests under test/files/presentation - * - * @author Iulian Dragos - * @author Mirco Dotta - */ -abstract class InteractiveTest - extends AskParse - with AskShutdown - with AskReload - with AskLoadedTyped - with PresentationCompilerInstance - with CoreTestDefs - with InteractiveTestSettings { self => - - protected val runRandomTests = false - - /** Should askAllSources wait for each ask to finish before issuing the next? */ - override protected val synchronousRequests = true - - /** The core set of test actions that are executed during each test run are - * `CompletionAction`, `TypeAction` and `HyperlinkAction`. - * Override this member if you need to change the default set of executed test actions. - */ - protected lazy val testActions: ListBuffer[PresentationCompilerTestDef] = { - ListBuffer(new CompletionAction(compiler), new TypeAction(compiler), new HyperlinkAction(compiler)) - } - - /** Add new presentation compiler actions to test. Presentation compiler's test - * need to extends trait `PresentationCompilerTestDef`. - */ - protected def ++(tests: PresentationCompilerTestDef*) { - testActions ++= tests - } - - /** Test's entry point */ - def main(args: Array[String]) { - try execute() - finally shutdown() - } - - protected def execute(): Unit = { - loadSources() - runDefaultTests() - } - - /** Load all sources before executing the test. */ - protected def loadSources() { - // ask the presentation compiler to track all sources. We do - // not wait for the file to be entirely typed because we do want - // to exercise the presentation compiler on scoped type requests. - askReload(sourceFiles) - // make sure all sources are parsed before running the test. This - // is because test may depend on the sources having been parsed at - // least once - askParse(sourceFiles) - } - - /** Run all defined `PresentationCompilerTestDef` */ - protected def runDefaultTests() { - //TODO: integrate random tests!, i.e.: if (runRandomTests) randomTests(20, sourceFiles) - testActions.foreach(_.runTest()) - } - - /** Perform n random tests with random changes. */ - /**** - private def randomTests(n: Int, files: Array[SourceFile]) { - val tester = new Tester(n, files, settings) { - override val compiler = self.compiler - override val reporter = new reporters.StoreReporter - } - tester.run() - } - ****/ - - /** shutdown the presentation compiler. */ - protected def shutdown() { - askShutdown() - - // this is actually needed to force exit on test completion. - // Note: May be a bug on either the testing framework or (less likely) - // the presentation compiler - sys.exit(0) - } -} diff --git a/src/compiler/scala/tools/nsc/interactive/tests/InteractiveTestSettings.scala b/src/compiler/scala/tools/nsc/interactive/tests/InteractiveTestSettings.scala deleted file mode 100644 index ad5c61b2b0..0000000000 --- a/src/compiler/scala/tools/nsc/interactive/tests/InteractiveTestSettings.scala +++ /dev/null @@ -1,69 +0,0 @@ -package scala.tools.nsc -package interactive -package tests - -import java.io.File.pathSeparatorChar -import java.io.File.separatorChar -import scala.tools.nsc.interactive.tests.core.PresentationCompilerInstance -import scala.tools.nsc.io.{File,Path} -import core.Reporter -import core.TestSettings - -trait InteractiveTestSettings extends TestSettings with PresentationCompilerInstance { - /** Character delimiter for comments in .opts file */ - private final val CommentStartDelimiter = "#" - - private final val TestOptionsFileExtension = "flags" - - /** Prepare the settings object. Load the .opts file and adjust all paths from the - * Unix-like syntax to the platform specific syntax. This is necessary so that a - * single .opts file can be used on all platforms. - * - * @note Bootclasspath is treated specially. If there is a -bootclasspath option in - * the file, the 'usejavacp' setting is set to false. This ensures that the - * bootclasspath takes precedence over the scala-library used to run the current - * test. - */ - override protected def prepareSettings(settings: Settings) { - def adjustPaths(paths: settings.PathSetting*) { - for (p <- paths if argsString.contains(p.name)) p.value = p.value.map { - case '/' => separatorChar - case ':' => pathSeparatorChar - case c => c - } - } - - // need this so that the classpath comes from what partest - // instead of scala.home - settings.usejavacp.value = !argsString.contains("-bootclasspath") - - // pass any options coming from outside - settings.processArgumentString(argsString) match { - case (false, rest) => - println("error processing arguments (unprocessed: %s)".format(rest)) - case _ => () - } - - // Make the --sourcepath path provided in the .flags file (if any) relative to the test's base directory - if(settings.sourcepath.isSetByUser) - settings.sourcepath.value = (baseDir / Path(settings.sourcepath.value)).path - - adjustPaths(settings.bootclasspath, settings.classpath, settings.javabootclasspath, settings.sourcepath) - } - - /** If there's a file ending in .opts, read it and parse it for cmd line arguments. */ - protected val argsString = { - val optsFile = outDir / "%s.%s".format(System.getProperty("partest.testname"), TestOptionsFileExtension) - val str = try File(optsFile).slurp() catch { - case e: java.io.IOException => "" - } - str.lines.filter(!_.startsWith(CommentStartDelimiter)).mkString(" ") - } - - override protected def printClassPath(implicit reporter: Reporter) { - reporter.println("\toutDir: %s".format(outDir.path)) - reporter.println("\tbaseDir: %s".format(baseDir.path)) - reporter.println("\targsString: %s".format(argsString)) - super.printClassPath(reporter) - } -} diff --git a/src/compiler/scala/tools/nsc/interactive/tests/Tester.scala b/src/compiler/scala/tools/nsc/interactive/tests/Tester.scala deleted file mode 100644 index 9382d5890f..0000000000 --- a/src/compiler/scala/tools/nsc/interactive/tests/Tester.scala +++ /dev/null @@ -1,208 +0,0 @@ -/* NSC -- new Scala compiler - * Copyright 2009-2013 Typesafe/Scala Solutions and LAMP/EPFL - * @author Martin Odersky - */ -package scala.tools.nsc -package interactive -package tests - -import scala.reflect.internal.util._ -import reporters._ -import io.AbstractFile -import scala.collection.mutable.ArrayBuffer - -class Tester(ntests: Int, inputs: Array[SourceFile], settings: Settings) { - - val reporter = new StoreReporter - val compiler = new Global(settings, reporter) - - def askAndListen[T, U](msg: String, arg: T, op: (T, Response[U]) => Unit) { - if (settings.verbose.value) print(msg+" "+arg+": ") - val TIMEOUT = 10 // ms - val limit = System.currentTimeMillis() + randomDelayMillis - val res = new Response[U] - op(arg, res) - while (!res.isComplete && !res.isCancelled) { - if (System.currentTimeMillis() > limit) { - print("c"); res.cancel() - } else res.get(TIMEOUT) match { - case Some(Left(t)) => - /**/ - if (settings.verbose.value) println(t) - case Some(Right(ex)) => - ex.printStackTrace() - println(ex) - case None => - } - } - } - - def askReload(sfs: SourceFile*) = askAndListen("reload", sfs.toList, compiler.askReload) - def askTypeAt(pos: Position) = askAndListen("type at", pos, compiler.askTypeAt) - def askTypeCompletion(pos: Position) = askAndListen("type at", pos, compiler.askTypeCompletion) - def askScopeCompletion(pos: Position) = askAndListen("type at", pos, compiler.askScopeCompletion) - - val rand = new java.util.Random() - - private def randomInverse(n: Int) = n / (rand.nextInt(n) + 1) - - private def randomDecreasing(n: Int) = { - var r = rand.nextInt((1 to n).sum) - var limit = n - var result = 0 - while (r > limit) { - result += 1 - r -= limit - limit -= 1 - } - result - } - - def randomSourceFileIdx() = rand.nextInt(inputs.length) - - def randomBatchesPerSourceFile(): Int = randomDecreasing(100) - - def randomChangesPerBatch(): Int = randomInverse(50) - - def randomPositionIn(sf: SourceFile) = rand.nextInt(sf.content.length) - - def randomNumChars() = randomInverse(100) - - def randomDelayMillis = randomInverse(10000) - - class Change(sfidx: Int, start: Int, nchars: Int, toLeft: Boolean) { - - private var pos = start - private var deleted: List[Char] = List() - - override def toString = - "In "+inputs(sfidx)+" at "+start+" take "+nchars+" to "+ - (if (toLeft) "left" else "right") - - def deleteOne() { - val sf = inputs(sfidx) - deleted = sf.content(pos) :: deleted - val sf1 = new BatchSourceFile(sf.file, sf.content.take(pos) ++ sf.content.drop(pos + 1)) - inputs(sfidx) = sf1 - askReload(sf1) - } - - def deleteAll() { - print("/"+nchars) - for (i <- 0 until nchars) { - if (toLeft) { - if (pos > 0 && pos <= inputs(sfidx).length) { - pos -= 1 - deleteOne() - } - } else { - if (pos < inputs(sfidx).length) { - deleteOne() - } - } - } - } - - def insertAll() { - for (chr <- if (toLeft) deleted else deleted.reverse) { - val sf = inputs(sfidx) - val (pre, post) = sf./**/content splitAt pos - pos += 1 - val sf1 = new BatchSourceFile(sf.file, pre ++ (chr +: post)) - inputs(sfidx) = sf1 - askReload(sf1) - } - } - } - - val testComment = "/**/" - - def testFileChanges(sfidx: Int) = { - lazy val testPositions: Seq[Int] = { - val sf = inputs(sfidx) - val buf = new ArrayBuffer[Int] - var pos = sf.content.indexOfSlice(testComment) - while (pos > 0) { - buf += pos - pos = sf.content.indexOfSlice(testComment, pos + 1) - } - buf - } - def otherTest() { - if (testPositions.nonEmpty) { - val pos = new OffsetPosition(inputs(sfidx), rand.nextInt(testPositions.length)) - rand.nextInt(3) match { - case 0 => askTypeAt(pos) - case 1 => askTypeCompletion(pos) - case 2 => askScopeCompletion(pos) - } - } - } - for (i <- 0 until randomBatchesPerSourceFile()) { - val changes = Vector.fill(/**/randomChangesPerBatch()) { - /**/ - new Change(sfidx, randomPositionIn(inputs(sfidx)), randomNumChars(), rand.nextBoolean()) - } - doTest(sfidx, changes, testPositions, otherTest) match { - case Some(errortrace) => - println(errortrace) - minimize(errortrace) - case None => - } - } - } - - def doTest(sfidx: Int, changes: Seq[Change], testPositions: Seq[Int], otherTest: () => Unit): Option[ErrorTrace] = { - print("new round with "+changes.length+" changes:") - changes foreach (_.deleteAll()) - otherTest() - def errorCount() = compiler.ask(() => reporter.ERROR.count) -// println("\nhalf test round: "+errorCount()) - changes.view.reverse foreach (_.insertAll()) - otherTest() - println("done test round: "+errorCount()) - if (errorCount() != 0) - Some(ErrorTrace(sfidx, changes, reporter.infos, inputs(sfidx).content)) - else - None - } - - case class ErrorTrace( - sfidx: Int, changes: Seq[Change], infos: scala.collection.Set[reporter.Info], content: Array[Char]) { - override def toString = - "Sourcefile: "+inputs(sfidx)+ - "\nChanges:\n "+changes.mkString("\n ")+ - "\nErrors:\n "+infos.mkString("\n ")+ - "\nContents:\n"+content.mkString - } - - def minimize(etrace: ErrorTrace) {} - - /**/ - def run() { - askReload(inputs: _*) - for (i <- 0 until ntests) - testFileChanges(randomSourceFileIdx()) - } -} - -/* A program to do presentation compiler stress tests. - * Usage: - * - * scala scala.tools.nsc.interactive.test.Tester - * - * where is the number os tests to be run and is the set of files to test. - * This will do random deletions and re-insertions in any of the files. - * At places where an empty comment /**/ appears it will in addition randomly - * do ask-types, type-completions, or scope-completions. - */ -object Tester { - def main(args: Array[String]) { - val settings = new Settings() - val (_, filenames) = settings.processArguments(args.toList.tail, processAll = true) - println("filenames = "+filenames) - val files = filenames.toArray map (str => new BatchSourceFile(AbstractFile.getFile(str)): SourceFile) - new Tester(args(0).toInt, files, settings).run() - sys.exit(0) - } -} diff --git a/src/compiler/scala/tools/nsc/interactive/tests/core/AskCommand.scala b/src/compiler/scala/tools/nsc/interactive/tests/core/AskCommand.scala deleted file mode 100644 index 8d446cbbf8..0000000000 --- a/src/compiler/scala/tools/nsc/interactive/tests/core/AskCommand.scala +++ /dev/null @@ -1,109 +0,0 @@ -/* NSC -- new Scala compiler - * Copyright 2009-2013 Typesafe/Scala Solutions and LAMP/EPFL - * @author Martin Odersky - */ -package scala.tools.nsc -package interactive -package tests.core - -import scala.tools.nsc.interactive.Response -import scala.reflect.internal.util.Position -import scala.reflect.internal.util.SourceFile - -/** - * A trait for defining commands that can be queried to the - * presentation compiler. - * */ -trait AskCommand { - - /** presentation compiler's instance. */ - protected val compiler: Global - - /** - * Presentation compiler's `askXXX` actions work by doing side-effects - * on a `Response` instance passed as an argument during the `askXXX` - * call. - * The defined method `ask` is meant to encapsulate this behavior. - * */ - protected def ask[T](op: Response[T] => Unit): Response[T] = { - val r = new Response[T] - op(r) - r - } -} - -/** Ask the presentation compiler to shut-down. */ -trait AskShutdown extends AskCommand { - def askShutdown() = compiler.askShutdown() -} - -/** Ask the presentation compiler to parse a sequence of `sources` */ -trait AskParse extends AskCommand { - import compiler.Tree - - /** `sources` need to be entirely parsed before running the test - * (else commands such as `AskCompletionAt` may fail simply because - * the source's AST is not yet loaded). - */ - def askParse(sources: Seq[SourceFile]) { - val responses = sources map (askParse(_)) - responses.foreach(_.get) // force source files parsing - } - - private def askParse(src: SourceFile, keepLoaded: Boolean = true): Response[Tree] = { - ask { - compiler.askParsedEntered(src, keepLoaded, _) - } - } -} - -/** Ask the presentation compiler to reload a sequence of `sources` */ -trait AskReload extends AskCommand { - - /** Reload the given source files and wait for them to be reloaded. */ - protected def askReload(sources: Seq[SourceFile])(implicit reporter: Reporter): Response[Unit] = { - val sortedSources = (sources map (_.file.name)).sorted - reporter.println("reload: " + sortedSources.mkString(", ")) - - ask { - compiler.askReload(sources.toList, _) - } - } -} - -/** Ask the presentation compiler for completion at a given position. */ -trait AskCompletionAt extends AskCommand { - import compiler.Member - - private[tests] def askCompletionAt(pos: Position)(implicit reporter: Reporter): Response[List[Member]] = { - reporter.println("\naskTypeCompletion at " + pos.source.file.name + ((pos.line, pos.column))) - - ask { - compiler.askTypeCompletion(pos, _) - } - } -} - -/** Ask the presentation compiler for type info at a given position. */ -trait AskTypeAt extends AskCommand { - import compiler.Tree - - private[tests] def askTypeAt(pos: Position)(implicit reporter: Reporter): Response[Tree] = { - reporter.println("\naskType at " + pos.source.file.name + ((pos.line, pos.column))) - - ask { - compiler.askTypeAt(pos, _) - } - } -} - -trait AskLoadedTyped extends AskCommand { - import compiler.Tree - - protected def askLoadedTyped(source: SourceFile)(implicit reporter: Reporter): Response[Tree] = { - ask { - compiler.askLoadedTyped(source, _) - } - } - -} diff --git a/src/compiler/scala/tools/nsc/interactive/tests/core/CoreTestDefs.scala b/src/compiler/scala/tools/nsc/interactive/tests/core/CoreTestDefs.scala deleted file mode 100644 index 9085eb56e6..0000000000 --- a/src/compiler/scala/tools/nsc/interactive/tests/core/CoreTestDefs.scala +++ /dev/null @@ -1,100 +0,0 @@ -package scala.tools.nsc -package interactive -package tests.core - -import scala.reflect.internal.util.Position - -/** Set of core test definitions that are executed for each test run. */ -private[tests] trait CoreTestDefs - extends PresentationCompilerRequestsWorkingMode { - - import scala.tools.nsc.interactive.Global - - /** Ask the presentation compiler for completion at all locations - * (in all sources) where the defined `marker` is found. */ - class CompletionAction(override val compiler: Global) - extends PresentationCompilerTestDef - with AskCompletionAt { - - def memberPrinter(member: compiler.Member): String = - "[accessible: %5s] ".format(member.accessible) + "`" + (member.sym.toString() + member.tpe.toString()).trim() + "`" - - override def runTest() { - askAllSources(CompletionMarker) { pos => - askCompletionAt(pos) - } { (pos, members) => - withResponseDelimiter { - reporter.println("[response] aksTypeCompletion at " + format(pos)) - // we skip getClass because it changed signature between 1.5 and 1.6, so there is no - // universal check file that we can provide for this to work - reporter.println("retrieved %d members".format(members.size)) - compiler ask { () => - val filtered = members.filterNot(member => member.sym.name.toString == "getClass" || member.sym.isConstructor) - reporter.println(filtered.map(memberPrinter).sortBy(_.toString()).mkString("\n")) - } - } - } - } - } - - /** Ask the presentation compiler for type info at all locations - * (in all sources) where the defined `marker` is found. */ - class TypeAction(override val compiler: Global) - extends PresentationCompilerTestDef - with AskTypeAt { - - override def runTest() { - askAllSources(TypeMarker) { pos => - askTypeAt(pos) - } { (pos, tree) => - withResponseDelimiter { - reporter.println("[response] askTypeAt at " + format(pos)) - compiler.ask(() => reporter.println(tree)) - } - } - } - } - - /** Ask the presentation compiler for hyperlink at all locations - * (in all sources) where the defined `marker` is found. */ - class HyperlinkAction(override val compiler: Global) - extends PresentationCompilerTestDef - with AskTypeAt - with AskCompletionAt { - - override def runTest() { - askAllSources(HyperlinkMarker) { pos => - askTypeAt(pos)(NullReporter) - } { (pos, tree) => - if(tree.symbol == compiler.NoSymbol) { - reporter.println("\nNo symbol is associated with tree: "+tree) - } - else { - reporter.println("\naskHyperlinkPos for `" + tree.symbol.name + "` at " + format(pos) + " " + pos.source.file.name) - val r = new Response[Position] - // `tree.symbol.sourceFile` was discovered to be null when testing using virtpatmat on the akka presentation test, where a position had shifted to point to `Int` - // askHyperlinkPos for `Int` at (73,19) pi.scala --> class Int in package scala has null sourceFile! - val treePath = if (tree.symbol.sourceFile ne null) tree.symbol.sourceFile.path else null - val treeName = if (tree.symbol.sourceFile ne null) tree.symbol.sourceFile.name else null - - sourceFiles.find(_.path == treePath) match { - case Some(source) => - compiler.askLinkPos(tree.symbol, source, r) - r.get match { - case Left(pos) => - val resolvedPos = if (tree.symbol.pos.isDefined) tree.symbol.pos else pos - withResponseDelimiter { - reporter.println("[response] found askHyperlinkPos for `" + tree.symbol.name + "` at " + format(resolvedPos) + " " + tree.symbol.sourceFile.name) - } - case Right(ex) => - ex.printStackTrace() - } - case None => - reporter.println("[error] could not locate sourcefile `" + treeName + "`." + - "Hint: Does the looked up definition come form a binary?") - } - } - } - } - } -} diff --git a/src/compiler/scala/tools/nsc/interactive/tests/core/PresentationCompilerInstance.scala b/src/compiler/scala/tools/nsc/interactive/tests/core/PresentationCompilerInstance.scala deleted file mode 100644 index 5cda0e53fb..0000000000 --- a/src/compiler/scala/tools/nsc/interactive/tests/core/PresentationCompilerInstance.scala +++ /dev/null @@ -1,34 +0,0 @@ -package scala.tools.nsc -package interactive -package tests.core - -import reporters.{Reporter => CompilerReporter} - -/** Trait encapsulating the creation of a presentation compiler's instance.*/ -private[tests] trait PresentationCompilerInstance extends TestSettings { - protected val settings = new Settings - protected val withDocComments = false - - protected val compilerReporter: CompilerReporter = new InteractiveReporter { - override def compiler = PresentationCompilerInstance.this.compiler - } - - protected lazy val compiler: Global = { - prepareSettings(settings) - new Global(settings, compilerReporter) { - override def forScaladoc = withDocComments - } - } - - /** - * Called before instantiating the presentation compiler's instance. - * You should provide an implementation of this method if you need - * to customize the `settings` used to instantiate the presentation compiler. - * */ - protected def prepareSettings(settings: Settings) {} - - protected def printClassPath(implicit reporter: Reporter) { - reporter.println("\tbootClassPath: %s".format(settings.bootclasspath.value)) - reporter.println("\tverbose: %b".format(settings.verbose.value)) - } -} diff --git a/src/compiler/scala/tools/nsc/interactive/tests/core/PresentationCompilerRequestsWorkingMode.scala b/src/compiler/scala/tools/nsc/interactive/tests/core/PresentationCompilerRequestsWorkingMode.scala deleted file mode 100644 index b5ae5f2d75..0000000000 --- a/src/compiler/scala/tools/nsc/interactive/tests/core/PresentationCompilerRequestsWorkingMode.scala +++ /dev/null @@ -1,62 +0,0 @@ -package scala.tools.nsc -package interactive -package tests.core - -import scala.reflect.internal.util.Position -import scala.reflect.internal.util.SourceFile - -trait PresentationCompilerRequestsWorkingMode extends TestResources { - - protected def synchronousRequests: Boolean - - protected def askAllSources[T] = if (synchronousRequests) askAllSourcesSync[T] _ else askAllSourcesAsync[T] _ - - /** Perform an operation on all sources at all positions that match the given - * `marker`. For instance, askAllSources(TypeMarker)(askTypeAt)(println) would - * ask the type at all positions marked with `TypeMarker.marker` and println the result. - */ - private def askAllSourcesAsync[T](marker: TestMarker)(askAt: Position => Response[T])(f: (Position, T) => Unit) { - val positions = allPositionsOf(str = marker.marker) - val responses = for (pos <- positions) yield askAt(pos) - - for ((pos, r) <- positions zip responses) withResponse(pos, r)(f) - } - - /** Synchronous version of askAllSources. Each position is treated in turn, waiting for the - * response before going to the next one. - */ - private def askAllSourcesSync[T](marker: TestMarker)(askAt: Position => Response[T])(f: (Position, T) => Unit) { - val positions = allPositionsOf(str = marker.marker) - for (pos <- positions) withResponse(pos, askAt(pos))(f) - } - - /** All positions of the given string in all source files. */ - private def allPositionsOf(srcs: Seq[SourceFile] = sourceFiles, str: String): Seq[Position] = - for (s <- srcs; p <- positionsOf(s, str)) yield p - - /** Return all positions of the given str in the given source file. */ - private def positionsOf(source: SourceFile, str: String): Seq[Position] = { - val buf = new scala.collection.mutable.ListBuffer[Position] - var pos = source.content.indexOfSlice(str) - while (pos >= 0) { - buf += source.position(pos - 1) // we need the position before the first character of this marker - pos = source.content.indexOfSlice(str, pos + 1) - } - buf.toList - } - - private def withResponse[T](pos: Position, response: Response[T])(f: (Position, T) => Unit) { - /** Return the filename:line:col version of this position. */ - def showPos(pos: Position): String = - "%s:%d:%d".format(pos.source.file.name, pos.line, pos.column) - - response.get(TIMEOUT) match { - case Some(Left(t)) => - f(pos, t) - case None => - println("TIMEOUT: " + showPos(pos)) - case Some(r) => - println("ERROR: " + r) - } - } -} diff --git a/src/compiler/scala/tools/nsc/interactive/tests/core/PresentationCompilerTestDef.scala b/src/compiler/scala/tools/nsc/interactive/tests/core/PresentationCompilerTestDef.scala deleted file mode 100644 index 4d5b4e1129..0000000000 --- a/src/compiler/scala/tools/nsc/interactive/tests/core/PresentationCompilerTestDef.scala +++ /dev/null @@ -1,18 +0,0 @@ -package scala.tools.nsc.interactive.tests.core - -import scala.reflect.internal.util.Position - -trait PresentationCompilerTestDef { - - private[tests] def runTest(): Unit - - protected def withResponseDelimiter(block: => Unit)(implicit reporter: Reporter) { - def printDelimiter() = reporter.println("=" * 80) - printDelimiter() - block - printDelimiter() - } - - protected def format(pos: Position): String = - (if(pos.isDefined) "(%d,%d)".format(pos.line, pos.column) else "") -} diff --git a/src/compiler/scala/tools/nsc/interactive/tests/core/Reporter.scala b/src/compiler/scala/tools/nsc/interactive/tests/core/Reporter.scala deleted file mode 100644 index 631504cda5..0000000000 --- a/src/compiler/scala/tools/nsc/interactive/tests/core/Reporter.scala +++ /dev/null @@ -1,15 +0,0 @@ -package scala.tools.nsc.interactive.tests.core - -private[tests] trait Reporter { - def println(msg: Any): Unit -} - -/** Reporter that simply prints all messages in the standard output.*/ -private[tests] object ConsoleReporter extends Reporter { - def println(msg: Any) { Console.println(msg) } -} - -/** Reporter that swallows all passed message. */ -private[tests] object NullReporter extends Reporter { - def println(msg: Any) {} -} \ No newline at end of file diff --git a/src/compiler/scala/tools/nsc/interactive/tests/core/SourcesCollector.scala b/src/compiler/scala/tools/nsc/interactive/tests/core/SourcesCollector.scala deleted file mode 100644 index 676feeba8a..0000000000 --- a/src/compiler/scala/tools/nsc/interactive/tests/core/SourcesCollector.scala +++ /dev/null @@ -1,20 +0,0 @@ -package scala.tools.nsc.interactive.tests.core - -import scala.reflect.internal.util.{SourceFile,BatchSourceFile} -import scala.tools.nsc.io.{AbstractFile,Path} - -private[tests] object SourcesCollector { - type SourceFilter = Path => Boolean - - /** - * All files below `base` directory that pass the `filter`. - * With the default `filter` only .scala and .java files are collected. - * */ - def apply(base: Path, filter: SourceFilter): Array[SourceFile] = { - assert(base.isDirectory) - base.walk.filter(filter).map(source).toList.toArray.sortBy(_.file.name) - } - - private def source(file: Path): SourceFile = source(AbstractFile.getFile(file.toFile)) - private def source(file: AbstractFile): SourceFile = new BatchSourceFile(file) -} diff --git a/src/compiler/scala/tools/nsc/interactive/tests/core/TestMarker.scala b/src/compiler/scala/tools/nsc/interactive/tests/core/TestMarker.scala deleted file mode 100644 index a5c228a549..0000000000 --- a/src/compiler/scala/tools/nsc/interactive/tests/core/TestMarker.scala +++ /dev/null @@ -1,27 +0,0 @@ -package scala.tools.nsc.interactive.tests.core - -case class DuplicateTestMarker(msg: String) extends Exception(msg) - -object TestMarker { - import scala.collection.mutable.Map - private val markers: Map[String, TestMarker] = Map.empty - - private def checkForDuplicate(marker: TestMarker) { - markers.get(marker.marker) match { - case None => markers(marker.marker) = marker - case Some(otherMarker) => - val msg = "Marker `%s` is already used by %s. Please choose a different marker for %s".format(marker.marker, marker, otherMarker) - throw new DuplicateTestMarker(msg) - } - } -} - -abstract case class TestMarker(marker: String) { - TestMarker.checkForDuplicate(this) -} - -object CompletionMarker extends TestMarker("/*!*/") - -object TypeMarker extends TestMarker("/*?*/") - -object HyperlinkMarker extends TestMarker("/*#*/") diff --git a/src/compiler/scala/tools/nsc/interactive/tests/core/TestResources.scala b/src/compiler/scala/tools/nsc/interactive/tests/core/TestResources.scala deleted file mode 100644 index 887c3cf29b..0000000000 --- a/src/compiler/scala/tools/nsc/interactive/tests/core/TestResources.scala +++ /dev/null @@ -1,12 +0,0 @@ -package scala.tools.nsc.interactive.tests.core - -import scala.tools.nsc.io.Path -import scala.reflect.internal.util.SourceFile - -/** Resources used by the test. */ -private[tests] trait TestResources extends TestSettings { - /** collected source files that are to be used by the test runner */ - protected lazy val sourceFiles: Array[SourceFile] = SourcesCollector(baseDir / sourceDir, isScalaOrJavaSource) - - private def isScalaOrJavaSource(file: Path): Boolean = file.extension == "scala" | file.extension == "java" -} \ No newline at end of file diff --git a/src/compiler/scala/tools/nsc/interactive/tests/core/TestSettings.scala b/src/compiler/scala/tools/nsc/interactive/tests/core/TestSettings.scala deleted file mode 100644 index 681204172b..0000000000 --- a/src/compiler/scala/tools/nsc/interactive/tests/core/TestSettings.scala +++ /dev/null @@ -1,19 +0,0 @@ -package scala.tools.nsc.interactive.tests.core - -import scala.tools.nsc.io.Path - -/** Common settings for the test. */ -private[tests] trait TestSettings { - protected final val TIMEOUT = 10000 // timeout in milliseconds - - /** The root directory for this test suite, usually the test kind ("test/files/presentation"). */ - protected val outDir = Path(Option(System.getProperty("partest.cwd")).getOrElse(".")) - - /** The base directory for this test, usually a subdirectory of "test/files/presentation/" */ - protected val baseDir = Option(System.getProperty("partest.testname")).map(outDir / _).getOrElse(Path(".")) - - /** Where source files are placed. */ - protected val sourceDir = "src" - - protected implicit val reporter: Reporter = ConsoleReporter -} diff --git a/src/interactive/scala/tools/nsc/interactive/CompilerControl.scala b/src/interactive/scala/tools/nsc/interactive/CompilerControl.scala new file mode 100644 index 0000000000..f84fa161c0 --- /dev/null +++ b/src/interactive/scala/tools/nsc/interactive/CompilerControl.scala @@ -0,0 +1,470 @@ +/* NSC -- new Scala compiler + * Copyright 2009-2013 Typesafe/Scala Solutions and LAMP/EPFL + * @author Martin Odersky + */ +package scala.tools.nsc +package interactive + +import scala.util.control.ControlThrowable +import scala.tools.nsc.io.AbstractFile +import scala.tools.nsc.util.FailedInterrupt +import scala.tools.nsc.util.EmptyAction +import scala.tools.nsc.util.WorkScheduler +import scala.reflect.internal.util.{SourceFile, Position} +import scala.tools.nsc.util.InterruptReq + +/** Interface of interactive compiler to a client such as an IDE + * The model the presentation compiler consists of the following parts: + * + * unitOfFile: The map from sourcefiles to loaded units. A sourcefile/unit is loaded if it occurs in that map. + * + * manipulated by: removeUnitOf, reloadSources. + * + * A call to reloadSources will add the given sources to the loaded units, and + * start a new background compiler pass to compile all loaded units (with the indicated sources first). + * Each background compiler pass has its own typer run. + * The background compiler thread can be interrupted each time an AST node is + * completely typechecked in the following ways: + + * 1. by a new call to reloadSources. This starts a new background compiler pass with a new typer run. + * 2. by a call to askTypeTree. This starts a new typer run if the forceReload parameter = true + * 3. by a call to askTypeAt, askTypeCompletion, askScopeCompletion, askToDoFirst, askLinkPos, askLastType. + * 4. by raising an exception in the scheduler. + * 5. by passing a high-priority action wrapped in ask { ... }. + * + * Actions under 1-3 can themselves be interrupted if they involve typechecking + * AST nodes. High-priority actions under 5 cannot; they always run to completion. + * So these high-priority actions should to be short. + * + * Normally, an interrupted action continues after the interrupting action is finished. + * However, if the interrupting action created a new typer run, the interrupted + * action is aborted. If there's an outstanding response, it will be set to + * a Right value with a FreshRunReq exception. + */ +trait CompilerControl { self: Global => + + import syntaxAnalyzer.UnitParser + + type Response[T] = scala.tools.nsc.interactive.Response[T] + + /** The scheduler by which client and compiler communicate + * Must be initialized before starting compilerRunner + */ + @volatile protected[interactive] var scheduler = new WorkScheduler + + /** Return the compilation unit attached to a source file, or None + * if source is not loaded. + */ + def getUnitOf(s: SourceFile): Option[RichCompilationUnit] = getUnit(s) + + /** Run operation `op` on a compilation unit associated with given `source`. + * If source has a loaded compilation unit, this one is passed to `op`. + * Otherwise a new compilation unit is created, but not added to the set of loaded units. + */ + def onUnitOf[T](source: SourceFile)(op: RichCompilationUnit => T): T = + op(unitOfFile.getOrElse(source.file, new RichCompilationUnit(source))) + + /** The compilation unit corresponding to a source file + * if it does not yet exist create a new one atomically + * Note: We want to get roid of this operation as it messes compiler invariants. + */ + @deprecated("use getUnitOf(s) or onUnitOf(s) instead", "2.10.0") + def unitOf(s: SourceFile): RichCompilationUnit = getOrCreateUnitOf(s) + + /** The compilation unit corresponding to a position */ + @deprecated("use getUnitOf(pos.source) or onUnitOf(pos.source) instead", "2.10.0") + def unitOf(pos: Position): RichCompilationUnit = getOrCreateUnitOf(pos.source) + + /** Removes the CompilationUnit corresponding to the given SourceFile + * from consideration for recompilation. + */ + def removeUnitOf(s: SourceFile): Option[RichCompilationUnit] = { toBeRemoved += s.file; unitOfFile get s.file } + + /** Returns the top level classes and objects that were deleted + * in the editor since last time recentlyDeleted() was called. + */ + def recentlyDeleted(): List[Symbol] = deletedTopLevelSyms.synchronized { + val result = deletedTopLevelSyms + deletedTopLevelSyms.clear() + result.toList + } + + /** Locate smallest tree that encloses position + * @pre Position must be loaded + */ + def locateTree(pos: Position): Tree = onUnitOf(pos.source) { unit => new Locator(pos) locateIn unit.body } + + /** Locates smallest context that encloses position as an optional value. + */ + def locateContext(pos: Position): Option[Context] = + for (unit <- getUnit(pos.source); cx <- locateContext(unit.contexts, pos)) yield cx + + /** Returns the smallest context that contains given `pos`, throws FatalError if none exists. + */ + def doLocateContext(pos: Position): Context = locateContext(pos) getOrElse { + throw new FatalError("no context found for "+pos) + } + + private def postWorkItem(item: WorkItem) = + if (item.onCompilerThread) item() else scheduler.postWorkItem(item) + + /** Makes sure a set of compilation units is loaded and parsed. + * Returns () to syncvar `response` on completion. + * Afterwards a new background compiler run is started with + * the given sources at the head of the list of to-be-compiled sources. + */ + def askReload(sources: List[SourceFile], response: Response[Unit]) = { + val superseeded = scheduler.dequeueAll { + case ri: ReloadItem if ri.sources == sources => Some(ri) + case _ => None + } + superseeded.foreach(_.response.set()) + postWorkItem(new ReloadItem(sources, response)) + } + + /** Removes source files and toplevel symbols, and issues a new typer run. + * Returns () to syncvar `response` on completion. + */ + def askFilesDeleted(sources: List[SourceFile], response: Response[Unit]) = { + postWorkItem(new FilesDeletedItem(sources, response)) + } + + /** Sets sync var `response` to the smallest fully attributed tree that encloses position `pos`. + * Note: Unlike for most other ask... operations, the source file belonging to `pos` needs not be loaded. + */ + def askTypeAt(pos: Position, response: Response[Tree]) = + postWorkItem(new AskTypeAtItem(pos, response)) + + /** Sets sync var `response` to the fully attributed & typechecked tree contained in `source`. + * @pre `source` needs to be loaded. + * @note Deprecated because of race conditions in the typechecker when the background compiler + * is interrupted while typing the same `source`. + * @see SI-6578 + */ + @deprecated("Use `askLoadedTyped` instead to avoid race conditions in the typechecker", "2.10.1") + def askType(source: SourceFile, forceReload: Boolean, response: Response[Tree]) = + postWorkItem(new AskTypeItem(source, forceReload, response)) + + /** Sets sync var `response` to the position of the definition of the given link in + * the given sourcefile. + * + * @param sym The symbol referenced by the link (might come from a classfile) + * @param source The source file that's supposed to contain the definition + * @param response A response that will be set to the following: + * If `source` contains a definition that is referenced by the given link + * the position of that definition, otherwise NoPosition. + * Note: This operation does not automatically load `source`. If `source` + * is unloaded, it stays that way. + */ + def askLinkPos(sym: Symbol, source: SourceFile, response: Response[Position]) = + postWorkItem(new AskLinkPosItem(sym, source, response)) + + /** Sets sync var `response` to doc comment information for a given symbol. + * + * @param sym The symbol whose doc comment should be retrieved (might come from a classfile) + * @param source The source file that's supposed to contain the definition + * @param site The symbol where 'sym' is observed + * @param fragments All symbols that can contribute to the generated documentation + * together with their source files. + * @param response A response that will be set to the following: + * If `source` contains a definition of a given symbol that has a doc comment, + * the (expanded, raw, position) triplet for a comment, otherwise ("", "", NoPosition). + * Note: This operation does not automatically load sources that are not yet loaded. + */ + def askDocComment(sym: Symbol, source: SourceFile, site: Symbol, fragments: List[(Symbol,SourceFile)], response: Response[(String, String, Position)]): Unit = + postWorkItem(new AskDocCommentItem(sym, source, site, fragments, response)) + + @deprecated("Use method that accepts fragments", "2.10.2") + def askDocComment(sym: Symbol, site: Symbol, source: SourceFile, response: Response[(String, String, Position)]): Unit = + askDocComment(sym, source, site, (sym,source)::Nil, response) + + /** Sets sync var `response` to list of members that are visible + * as members of the tree enclosing `pos`, possibly reachable by an implicit. + * @pre source is loaded + */ + def askTypeCompletion(pos: Position, response: Response[List[Member]]) = + postWorkItem(new AskTypeCompletionItem(pos, response)) + + /** Sets sync var `response` to list of members that are visible + * as members of the scope enclosing `pos`. + * @pre source is loaded + */ + def askScopeCompletion(pos: Position, response: Response[List[Member]]) = + postWorkItem(new AskScopeCompletionItem(pos, response)) + + /** Asks to do unit corresponding to given source file on present and subsequent type checking passes. + * If the file is in the 'crashedFiles' ignore list it is removed and typechecked normally. + */ + def askToDoFirst(source: SourceFile) = + postWorkItem(new AskToDoFirstItem(source)) + + /** If source is not yet loaded, loads it, and starts a new run, otherwise + * continues with current pass. + * Waits until source is fully type checked and returns body in response. + * @param source The source file that needs to be fully typed. + * @param response The response, which is set to the fully attributed tree of `source`. + * If the unit corresponding to `source` has been removed in the meantime + * the a NoSuchUnitError is raised in the response. + */ + def askLoadedTyped(source: SourceFile, response: Response[Tree]) = + postWorkItem(new AskLoadedTypedItem(source, response)) + + /** If source if not yet loaded, get an outline view with askParseEntered. + * If source is loaded, wait for it to be typechecked. + * In both cases, set response to parsed (and possibly typechecked) tree. + * @param keepSrcLoaded If set to `true`, source file will be kept as a loaded unit afterwards. + */ + def askStructure(keepSrcLoaded: Boolean)(source: SourceFile, response: Response[Tree]) = { + getUnit(source) match { + case Some(_) => askLoadedTyped(source, response) + case None => askParsedEntered(source, keepSrcLoaded, response) + } + } + + /** Set sync var `response` to the parse tree of `source` with all top-level symbols entered. + * @param source The source file to be analyzed + * @param keepLoaded If set to `true`, source file will be kept as a loaded unit afterwards. + * If keepLoaded is `false` the operation is run at low priority, only after + * everything is brought up to date in a regular type checker run. + * @param response The response. + */ + def askParsedEntered(source: SourceFile, keepLoaded: Boolean, response: Response[Tree]) = + postWorkItem(new AskParsedEnteredItem(source, keepLoaded, response)) + + /** Set sync var `response` to a pair consisting of + * - the fully qualified name of the first top-level object definition in the file. + * or "" if there are no object definitions. + * - the text of the instrumented program which, when run, + * prints its output and all defined values in a comment column. + * + * @param source The source file to be analyzed + * @param response The response. + */ + @deprecated("SI-6458: Instrumentation logic will be moved out of the compiler.","2.10.0") + def askInstrumented(source: SourceFile, line: Int, response: Response[(String, Array[Char])]) = + postWorkItem(new AskInstrumentedItem(source, line, response)) + + /** Cancels current compiler run and start a fresh one where everything will be re-typechecked + * (but not re-loaded). + */ + def askReset() = scheduler raise (new FreshRunReq) + + /** Tells the compile server to shutdown, and not to restart again */ + def askShutdown() = scheduler raise ShutdownReq + + @deprecated("use parseTree(source) instead", "2.10.0") // deleted 2nd parameter, as this has to run on 2.8 also. + def askParse(source: SourceFile, response: Response[Tree]) = respond(response) { + parseTree(source) + } + + /** Returns parse tree for source `source`. No symbols are entered. Syntax errors are reported. + * + * This method is thread-safe and as such can safely run outside of the presentation + * compiler thread. + */ + def parseTree(source: SourceFile): Tree = { + newUnitParser(new CompilationUnit(source)).parse() + } + + /** Asks for a computation to be done quickly on the presentation compiler thread */ + def ask[A](op: () => A): A = if (self.onCompilerThread) op() else scheduler doQuickly op + + /** Asks for a computation to be done on presentation compiler thread, returning + * a response with the result or an exception + */ + def askForResponse[A](op: () => A): Response[A] = { + val r = new Response[A] + if (self.onCompilerThread) { + try { r set op() } + catch { case exc: Throwable => r raise exc } + r + } else { + val ir = scheduler askDoQuickly op + ir onComplete { + case Left(result) => r set result + case Right(exc) => r raise exc + } + r + } + } + + def onCompilerThread = Thread.currentThread == compileRunner + + /** Info given for every member found by completion + */ + abstract class Member { + val sym: Symbol + val tpe: Type + val accessible: Boolean + def implicitlyAdded = false + } + + case class TypeMember( + sym: Symbol, + tpe: Type, + accessible: Boolean, + inherited: Boolean, + viaView: Symbol) extends Member { + override def implicitlyAdded = viaView != NoSymbol + } + + case class ScopeMember( + sym: Symbol, + tpe: Type, + accessible: Boolean, + viaImport: Tree) extends Member + + // items that get sent to scheduler + + abstract class WorkItem extends (() => Unit) { + val onCompilerThread = self.onCompilerThread + + /** Raise a MissingReponse, if the work item carries a response. */ + def raiseMissing(): Unit + } + + case class ReloadItem(sources: List[SourceFile], response: Response[Unit]) extends WorkItem { + def apply() = reload(sources, response) + override def toString = "reload "+sources + + def raiseMissing() = + response raise new MissingResponse + } + + case class FilesDeletedItem(sources: List[SourceFile], response: Response[Unit]) extends WorkItem { + def apply() = filesDeleted(sources, response) + override def toString = "files deleted "+sources + + def raiseMissing() = + response raise new MissingResponse + } + + case class AskTypeAtItem(pos: Position, response: Response[Tree]) extends WorkItem { + def apply() = self.getTypedTreeAt(pos, response) + override def toString = "typeat "+pos.source+" "+pos.show + + def raiseMissing() = + response raise new MissingResponse + } + + case class AskTypeItem(source: SourceFile, forceReload: Boolean, response: Response[Tree]) extends WorkItem { + def apply() = self.getTypedTree(source, forceReload, response) + override def toString = "typecheck" + + def raiseMissing() = + response raise new MissingResponse + } + + case class AskTypeCompletionItem(pos: Position, response: Response[List[Member]]) extends WorkItem { + def apply() = self.getTypeCompletion(pos, response) + override def toString = "type completion "+pos.source+" "+pos.show + + def raiseMissing() = + response raise new MissingResponse + } + + case class AskScopeCompletionItem(pos: Position, response: Response[List[Member]]) extends WorkItem { + def apply() = self.getScopeCompletion(pos, response) + override def toString = "scope completion "+pos.source+" "+pos.show + + def raiseMissing() = + response raise new MissingResponse + } + + class AskToDoFirstItem(val source: SourceFile) extends WorkItem { + def apply() = { + moveToFront(List(source)) + enableIgnoredFile(source.file) + } + override def toString = "dofirst "+source + + def raiseMissing() = () + } + + case class AskLinkPosItem(sym: Symbol, source: SourceFile, response: Response[Position]) extends WorkItem { + def apply() = self.getLinkPos(sym, source, response) + override def toString = "linkpos "+sym+" in "+source + + def raiseMissing() = + response raise new MissingResponse + } + + case class AskDocCommentItem(sym: Symbol, source: SourceFile, site: Symbol, fragments: List[(Symbol,SourceFile)], response: Response[(String, String, Position)]) extends WorkItem { + def apply() = self.getDocComment(sym, source, site, fragments, response) + override def toString = "doc comment "+sym+" in "+source+" with fragments:"+fragments.mkString("(", ",", ")") + + def raiseMissing() = + response raise new MissingResponse + } + + case class AskLoadedTypedItem(source: SourceFile, response: Response[Tree]) extends WorkItem { + def apply() = self.waitLoadedTyped(source, response, this.onCompilerThread) + override def toString = "wait loaded & typed "+source + + def raiseMissing() = + response raise new MissingResponse + } + + case class AskParsedEnteredItem(source: SourceFile, keepLoaded: Boolean, response: Response[Tree]) extends WorkItem { + def apply() = self.getParsedEntered(source, keepLoaded, response, this.onCompilerThread) + override def toString = "getParsedEntered "+source+", keepLoaded = "+keepLoaded + + def raiseMissing() = + response raise new MissingResponse + } + + @deprecated("SI-6458: Instrumentation logic will be moved out of the compiler.","2.10.0") + case class AskInstrumentedItem(source: SourceFile, line: Int, response: Response[(String, Array[Char])]) extends WorkItem { + def apply() = self.getInstrumented(source, line, response) + override def toString = "getInstrumented "+source + + def raiseMissing() = + response raise new MissingResponse + } + + /** A do-nothing work scheduler that responds immediately with MissingResponse. + * + * Used during compiler shutdown. + */ + class NoWorkScheduler extends WorkScheduler { + + override def postWorkItem(action: Action) = synchronized { + action match { + case w: WorkItem => w.raiseMissing() + case e: EmptyAction => // do nothing + case _ => println("don't know what to do with this " + action.getClass) + } + } + + override def doQuickly[A](op: () => A): A = { + throw new FailedInterrupt(new Exception("Posted a work item to a compiler that's shutting down")) + } + + override def askDoQuickly[A](op: () => A): InterruptReq { type R = A } = { + val ir = new InterruptReq { + type R = A + val todo = () => throw new MissingResponse + } + ir.execute() + ir + } + + } + +} + + // ---------------- Interpreted exceptions ------------------- + +/** Signals a request for a fresh background compiler run. + * Note: The object has to stay top-level so that the PresentationCompilerThread may access it. + */ +class FreshRunReq extends ControlThrowable + +/** Signals a request for a shutdown of the presentation compiler. + * Note: The object has to stay top-level so that the PresentationCompilerThread may access it. + */ +object ShutdownReq extends ControlThrowable + +class NoSuchUnitError(file: AbstractFile) extends Exception("no unit found for file "+file) + +class MissingResponse extends Exception("response missing") diff --git a/src/interactive/scala/tools/nsc/interactive/ContextTrees.scala b/src/interactive/scala/tools/nsc/interactive/ContextTrees.scala new file mode 100644 index 0000000000..93ef4c4d6c --- /dev/null +++ b/src/interactive/scala/tools/nsc/interactive/ContextTrees.scala @@ -0,0 +1,149 @@ +/* NSC -- new Scala compiler + * Copyright 2009-2013 Typesafe/Scala Solutions and LAMP/EPFL + * @author Martin Odersky + */ +package scala.tools.nsc +package interactive + +import scala.collection.mutable.ArrayBuffer + +trait ContextTrees { self: Global => + + type Context = analyzer.Context + lazy val NoContext = analyzer.NoContext + type Contexts = ArrayBuffer[ContextTree] + + /** A context tree contains contexts that are indexed by positions. + * It satisfies the following properties: + * 1. All context come from compiling the same unit. + * 2. Child contexts have parent contexts in their outer chain. + * 3. The `pos` field of a context is the same as `context.tree.pos`, unless that + * position is transparent. In that case, `pos` equals the position of + * one of the solid descendants of `context.tree`. + * 4. Children of a context have non-overlapping increasing positions. + * 5. No context in the tree has a transparent position. + */ + class ContextTree(val pos: Position, val context: Context, val children: ArrayBuffer[ContextTree]) { + def this(pos: Position, context: Context) = this(pos, context, new ArrayBuffer[ContextTree]) + override def toString = "ContextTree("+pos+", "+children+")" + } + + /** Optionally returns the smallest context that contains given `pos`, or None if none exists. + */ + def locateContext(contexts: Contexts, pos: Position): Option[Context] = synchronized { + def locateNearestContextTree(contexts: Contexts, pos: Position, recent: Array[ContextTree]): Option[ContextTree] = { + locateContextTree(contexts, pos) match { + case Some(x) => + recent(0) = x + locateNearestContextTree(x.children, pos, recent) + case None => recent(0) match { + case null => None + case x => Some(x) + } + } + } + locateNearestContextTree(contexts, pos, new Array[ContextTree](1)) map (_.context) + } + + def locateContextTree(contexts: Contexts, pos: Position): Option[ContextTree] = { + if (contexts.isEmpty) None + else { + val hi = contexts.length - 1 + if ((contexts(hi).pos properlyPrecedes pos) || (pos properlyPrecedes contexts(0).pos)) None + else { + def loop(lo: Int, hi: Int): Option[ContextTree] = { + val mid = (lo + hi) / 2 + val midpos = contexts(mid).pos + if ((pos precedes midpos) && (mid < hi)) + loop(lo, mid) + else if ((midpos precedes pos) && (lo < mid)) + loop(mid, hi) + else if (midpos includes pos) + Some(contexts(mid)) + else if (contexts(mid+1).pos includes pos) + Some(contexts(mid+1)) + else None + } + loop(0, hi) + } + } + } + + /** Insert a context at correct position into a buffer of context trees. + * If the `context` has a transparent position, add it multiple times + * at the positions of all its solid descendant trees. + */ + def addContext(contexts: Contexts, context: Context): Unit = { + val cpos = context.tree.pos + if (cpos.isTransparent) + for (t <- context.tree.children flatMap solidDescendants) + addContext(contexts, context, t.pos) + else + addContext(contexts, context, cpos) + } + + /** Insert a context with non-transparent position `cpos` + * at correct position into a buffer of context trees. + */ + def addContext(contexts: Contexts, context: Context, cpos: Position): Unit = synchronized { + try { + if (!cpos.isRange) {} + else if (contexts.isEmpty) contexts += new ContextTree(cpos, context) + else { + val hi = contexts.length - 1 + if (contexts(hi).pos precedes cpos) + contexts += new ContextTree(cpos, context) + else if (contexts(hi).pos properlyIncludes cpos) // fast path w/o search + addContext(contexts(hi).children, context, cpos) + else if (cpos precedes contexts(0).pos) + new ContextTree(cpos, context) +=: contexts + else { + def insertAt(idx: Int): Boolean = { + val oldpos = contexts(idx).pos + if (oldpos sameRange cpos) { + contexts(idx) = new ContextTree(cpos, context, contexts(idx).children) + true + } else if (oldpos includes cpos) { + addContext(contexts(idx).children, context, cpos) + true + } else if (cpos includes oldpos) { + val start = contexts.indexWhere(cpos includes _.pos) + val last = contexts.lastIndexWhere(cpos includes _.pos) + contexts(start) = new ContextTree(cpos, context, contexts.slice(start, last + 1)) + contexts.remove(start + 1, last - start) + true + } else false + } + def loop(lo: Int, hi: Int) { + if (hi - lo > 1) { + val mid = (lo + hi) / 2 + val midpos = contexts(mid).pos + if (cpos precedes midpos) + loop(lo, mid) + else if (midpos precedes cpos) + loop(mid, hi) + else + addContext(contexts(mid).children, context, cpos) + } else if (!insertAt(lo) && !insertAt(hi)) { + val lopos = contexts(lo).pos + val hipos = contexts(hi).pos + if ((lopos precedes cpos) && (cpos precedes hipos)) + contexts.insert(hi, new ContextTree(cpos, context)) + else + inform("internal error? skewed positions: "+lopos+" !< "+cpos+" !< "+hipos) + } + } + loop(0, hi) + } + } + } catch { + case ex: Throwable => + println(ex) + ex.printStackTrace() + println("failure inserting "+cpos+" into "+contexts+"/"+contexts(contexts.length - 1).pos+"/"+ + (contexts(contexts.length - 1).pos includes cpos)) + throw ex + } + } +} + diff --git a/src/interactive/scala/tools/nsc/interactive/Global.scala b/src/interactive/scala/tools/nsc/interactive/Global.scala new file mode 100644 index 0000000000..33b10d1a9a --- /dev/null +++ b/src/interactive/scala/tools/nsc/interactive/Global.scala @@ -0,0 +1,1139 @@ +/* NSC -- new Scala compiler + * Copyright 2009-2013 Typesafe/Scala Solutions and LAMP/EPFL + * @author Martin Odersky + */ +package scala.tools.nsc +package interactive + +import java.io.{ PrintWriter, StringWriter, FileReader, FileWriter } +import scala.collection.mutable +import mutable.{LinkedHashMap, SynchronizedMap, HashSet, SynchronizedSet} +import scala.util.control.ControlThrowable +import scala.tools.nsc.io.{ AbstractFile, LogReplay, Logger, NullLogger, Replayer } +import scala.tools.nsc.util.MultiHashMap +import scala.reflect.internal.util.{ SourceFile, BatchSourceFile, Position, NoPosition } +import scala.tools.nsc.reporters._ +import scala.tools.nsc.symtab._ +import scala.tools.nsc.typechecker.DivergentImplicit +import symtab.Flags.{ACCESSOR, PARAMACCESSOR} +import scala.annotation.elidable +import scala.language.implicitConversions + +/** The main class of the presentation compiler in an interactive environment such as an IDE + */ +class Global(settings: Settings, _reporter: Reporter, projectName: String = "") extends { + /* Is the compiler initializing? Early def, so that the field is true during the + * execution of the super constructor. + */ + private var initializing = true + override val useOffsetPositions = false +} with scala.tools.nsc.Global(settings, _reporter) + with CompilerControl + with ContextTrees + with RichCompilationUnits + with ScratchPadMaker + with Picklers { + + import definitions._ + + val debugIDE: Boolean = settings.YpresentationDebug.value + val verboseIDE: Boolean = settings.YpresentationVerbose.value + + private def replayName = settings.YpresentationReplay.value + private def logName = settings.YpresentationLog.value + private def afterTypeDelay = settings.YpresentationDelay.value + private final val SleepTime = 10 + + val log = + if (replayName != "") new Replayer(new FileReader(replayName)) + else if (logName != "") new Logger(new FileWriter(logName)) + else NullLogger + + import log.logreplay + debugLog("logger: " + log.getClass + " writing to " + (new java.io.File(logName)).getAbsolutePath) + debugLog("classpath: "+classPath) + + private var curTime = System.nanoTime + private def timeStep = { + val last = curTime + curTime = System.nanoTime + ", delay = " + (curTime - last) / 1000000 + "ms" + } + + /** Print msg only when debugIDE is true. */ + @inline final def debugLog(msg: => String) = + if (debugIDE) println("[%s] %s".format(projectName, msg)) + + /** Inform with msg only when verboseIDE is true. */ + @inline final def informIDE(msg: => String) = + if (verboseIDE) println("[%s][%s]".format(projectName, msg)) + + override def forInteractive = true + + /** A map of all loaded files to the rich compilation units that correspond to them. + */ + val unitOfFile = new LinkedHashMap[AbstractFile, RichCompilationUnit] with + SynchronizedMap[AbstractFile, RichCompilationUnit] { + override def put(key: AbstractFile, value: RichCompilationUnit) = { + val r = super.put(key, value) + if (r.isEmpty) debugLog("added unit for "+key) + r + } + override def remove(key: AbstractFile) = { + val r = super.remove(key) + if (r.nonEmpty) debugLog("removed unit for "+key) + r + } + } + + /** A set containing all those files that need to be removed + * Units are removed by getUnit, typically once a unit is finished compiled. + */ + protected val toBeRemoved: mutable.Set[AbstractFile] = + new HashSet[AbstractFile] with SynchronizedSet[AbstractFile] + + /** A set containing all those files that need to be removed after a full background compiler run + */ + protected val toBeRemovedAfterRun: mutable.Set[AbstractFile] = + new HashSet[AbstractFile] with SynchronizedSet[AbstractFile] + + class ResponseMap extends MultiHashMap[SourceFile, Response[Tree]] { + override def += (binding: (SourceFile, Set[Response[Tree]])) = { + assert(interruptsEnabled, "delayed operation within an ask") + super.+=(binding) + } + } + + /** A map that associates with each abstract file the set of responses that are waiting + * (via waitLoadedTyped) for the unit associated with the abstract file to be loaded and completely typechecked. + */ + protected val waitLoadedTypeResponses = new ResponseMap + + /** A map that associates with each abstract file the set of responses that ware waiting + * (via build) for the unit associated with the abstract file to be parsed and entered + */ + protected var getParsedEnteredResponses = new ResponseMap + + private def cleanResponses(rmap: ResponseMap): Unit = { + for ((source, rs) <- rmap.toList) { + for (r <- rs) { + if (getUnit(source).isEmpty) + r raise new NoSuchUnitError(source.file) + if (r.isComplete) + rmap(source) -= r + } + if (rmap(source).isEmpty) + rmap -= source + } + } + + private def cleanAllResponses() { + cleanResponses(waitLoadedTypeResponses) + cleanResponses(getParsedEnteredResponses) + } + + private def checkNoOutstanding(rmap: ResponseMap): Unit = + for ((_, rs) <- rmap.toList; r <- rs) { + debugLog("ERROR: missing response, request will be discarded") + r raise new MissingResponse + } + + def checkNoResponsesOutstanding() { + checkNoOutstanding(waitLoadedTypeResponses) + checkNoOutstanding(getParsedEnteredResponses) + } + + /** The compilation unit corresponding to a source file + * if it does not yet exist create a new one atomically + * Note: We want to remove this. + */ + protected[interactive] def getOrCreateUnitOf(source: SourceFile): RichCompilationUnit = + unitOfFile.getOrElse(source.file, { println("precondition violated: "+source+" is not loaded"); new Exception().printStackTrace(); new RichCompilationUnit(source) }) + + /** Work through toBeRemoved list to remove any units. + * Then return optionally unit associated with given source. + */ + protected[interactive] def getUnit(s: SourceFile): Option[RichCompilationUnit] = { + toBeRemoved.synchronized { + for (f <- toBeRemoved) { + informIDE("removed: "+s) + unitOfFile -= f + allSources = allSources filter (_.file != f) + } + toBeRemoved.clear() + } + unitOfFile get s.file + } + + /** A list giving all files to be typechecked in the order they should be checked. + */ + protected var allSources: List[SourceFile] = List() + + private var lastException: Option[Throwable] = None + + /** A list of files that crashed the compiler. They will be ignored during background + * compilation until they are removed from this list. + */ + private var ignoredFiles: Set[AbstractFile] = Set() + + /** Flush the buffer of sources that are ignored during background compilation. */ + def clearIgnoredFiles() { + ignoredFiles = Set() + } + + /** Remove a crashed file from the ignore buffer. Background compilation will take it into account + * and errors will be reported against it. */ + def enableIgnoredFile(file: AbstractFile) { + ignoredFiles -= file + debugLog("Removed crashed file %s. Still in the ignored buffer: %s".format(file, ignoredFiles)) + } + + /** The currently active typer run */ + private var currentTyperRun: TyperRun = _ + newTyperRun() + + /** Is a background compiler run needed? + * Note: outOfDate is true as long as there is a background compile scheduled or going on. + */ + private var outOfDate = false + + def isOutOfDate: Boolean = outOfDate + + def demandNewCompilerRun() = { + if (outOfDate) throw new FreshRunReq // cancel background compile + else outOfDate = true // proceed normally and enable new background compile + } + + protected[interactive] var minRunId = 1 + + private[interactive] var interruptsEnabled = true + + private val NoResponse: Response[_] = new Response[Any] + + /** The response that is currently pending, i.e. the compiler + * is working on providing an asnwer for it. + */ + private var pendingResponse: Response[_] = NoResponse + + // ----------- Overriding hooks in nsc.Global ----------------------- + + /** Called from parser, which signals hereby that a method definition has been parsed. + */ + override def signalParseProgress(pos: Position) { + // We only want to be interruptible when running on the PC thread. + if(onCompilerThread) { + checkForMoreWork(pos) + } + } + + /** Called from typechecker, which signals hereby that a node has been completely typechecked. + * If the node includes unit.targetPos, abandons run and returns newly attributed tree. + * Otherwise, if there's some higher priority work to be done, also abandons run with a FreshRunReq. + * @param context The context that typechecked the node + * @param old The original node + * @param result The transformed node + */ + override def signalDone(context: Context, old: Tree, result: Tree) { + if (interruptsEnabled && analyzer.lockedCount == 0) { + if (context.unit.exists && + result.pos.isOpaqueRange && + (result.pos includes context.unit.targetPos)) { + var located = new TypedLocator(context.unit.targetPos) locateIn result + if (located == EmptyTree) { + println("something's wrong: no "+context.unit+" in "+result+result.pos) + located = result + } + throw new TyperResult(located) + } + try { + checkForMoreWork(old.pos) + } catch { + case ex: ValidateException => // Ignore, this will have been reported elsewhere + debugLog("validate exception caught: "+ex) + case ex: Throwable => + log.flush() + throw ex + } + } + } + + /** Called from typechecker every time a context is created. + * Registers the context in a context tree + */ + override def registerContext(c: Context) = c.unit match { + case u: RichCompilationUnit => addContext(u.contexts, c) + case _ => + } + + /** The top level classes and objects currently seen in the presentation compiler + */ + private val currentTopLevelSyms = new mutable.LinkedHashSet[Symbol] + + /** The top level classes and objects no longer seen in the presentation compiler + */ + val deletedTopLevelSyms = new mutable.LinkedHashSet[Symbol] with mutable.SynchronizedSet[Symbol] + + /** Called from typechecker every time a top-level class or object is entered. + */ + override def registerTopLevelSym(sym: Symbol) { currentTopLevelSyms += sym } + + /** Symbol loaders in the IDE parse all source files loaded from a package for + * top-level idents. Therefore, we can detect top-level symbols that have a name + * different from their source file + */ + override lazy val loaders = new BrowsingLoaders { + val global: Global.this.type = Global.this + } + + // ----------------- Polling --------------------------------------- + + case class WorkEvent(atNode: Int, atMillis: Long) + + private var moreWorkAtNode: Int = -1 + private var nodesSeen = 0 + private var lastWasReload = false + + /** The number of pollForWorks after which the presentation compiler yields. + * Yielding improves responsiveness on systems with few cores because it + * gives the UI thread a chance to get new tasks and interrupt the presentation + * compiler with them. + */ + private final val yieldPeriod = 10 + + /** Called from runner thread and signalDone: + * Poll for interrupts and execute them immediately. + * Then, poll for exceptions and execute them. + * Then, poll for work reload/typedTreeAt/doFirst commands during background checking. + * @param pos The position of the tree if polling while typechecking, NoPosition otherwise + * + */ + private[interactive] def pollForWork(pos: Position) { + if (!interruptsEnabled) return + if (pos == NoPosition || nodesSeen % yieldPeriod == 0) + Thread.`yield`() + + def nodeWithWork(): Option[WorkEvent] = + if (scheduler.moreWork || pendingResponse.isCancelled) Some(new WorkEvent(nodesSeen, System.currentTimeMillis)) + else None + + nodesSeen += 1 + logreplay("atnode", nodeWithWork()) match { + case Some(WorkEvent(id, _)) => + debugLog("some work at node "+id+" current = "+nodesSeen) +// assert(id >= nodesSeen) + moreWorkAtNode = id + case None => + } + + if (nodesSeen >= moreWorkAtNode) { + + logreplay("asked", scheduler.pollInterrupt()) match { + case Some(ir) => + try { + interruptsEnabled = false + debugLog("ask started"+timeStep) + ir.execute() + } finally { + debugLog("ask finished"+timeStep) + interruptsEnabled = true + } + pollForWork(pos) + case _ => + } + + if (logreplay("cancelled", pendingResponse.isCancelled)) { + throw CancelException + } + + logreplay("exception thrown", scheduler.pollThrowable()) match { + case Some(ex: FreshRunReq) => + newTyperRun() + minRunId = currentRunId + demandNewCompilerRun() + + case Some(ShutdownReq) => + scheduler.synchronized { // lock the work queue so no more items are posted while we clean it up + val units = scheduler.dequeueAll { + case item: WorkItem => Some(item.raiseMissing()) + case _ => Some(()) + } + + // don't forget to service interrupt requests + scheduler.dequeueAllInterrupts(_.execute()) + + debugLog("ShutdownReq: cleaning work queue (%d items)".format(units.size)) + debugLog("Cleanup up responses (%d loadedType pending, %d parsedEntered pending)" + .format(waitLoadedTypeResponses.size, getParsedEnteredResponses.size)) + checkNoResponsesOutstanding() + + log.flush() + scheduler = new NoWorkScheduler + throw ShutdownReq + } + + case Some(ex: Throwable) => log.flush(); throw ex + case _ => + } + + lastWasReload = false + + logreplay("workitem", scheduler.nextWorkItem()) match { + case Some(action) => + try { + debugLog("picked up work item at "+pos+": "+action+timeStep) + action() + debugLog("done with work item: "+action) + } finally { + debugLog("quitting work item: "+action+timeStep) + } + case None => + } + } + } + + protected def checkForMoreWork(pos: Position) { + val typerRun = currentTyperRun + pollForWork(pos) + if (typerRun != currentTyperRun) demandNewCompilerRun() + } + + // ----------------- The Background Runner Thread ----------------------- + + private var threadId = 0 + + /** The current presentation compiler runner */ + @volatile private[interactive] var compileRunner: Thread = newRunnerThread() + + /** Check that the currenyly executing thread is the presentation compiler thread. + * + * Compiler initialization may happen on a different thread (signalled by globalPhase being NoPhase) + */ + @elidable(elidable.WARNING) + override def assertCorrectThread() { + assert(initializing || onCompilerThread, + "Race condition detected: You are running a presentation compiler method outside the PC thread.[phase: %s]".format(globalPhase) + + " Please file a ticket with the current stack trace at https://www.assembla.com/spaces/scala-ide/support/tickets") + } + + /** Create a new presentation compiler runner. + */ + private def newRunnerThread(): Thread = { + threadId += 1 + compileRunner = new PresentationCompilerThread(this, projectName) + compileRunner.setDaemon(true) + compileRunner.start() + compileRunner + } + + private def ensureUpToDate(unit: RichCompilationUnit) = + if (!unit.isUpToDate && unit.status != JustParsed) reset(unit) // reparse previously typechecked units. + + /** Compile all loaded source files in the order given by `allSources`. + */ + private[interactive] final def backgroundCompile() { + informIDE("Starting new presentation compiler type checking pass") + reporter.reset() + + // remove any files in first that are no longer maintained by presentation compiler (i.e. closed) + allSources = allSources filter (s => unitOfFile contains (s.file)) + + // ensure all loaded units are parsed + for (s <- allSources; unit <- getUnit(s)) { + // checkForMoreWork(NoPosition) // disabled, as any work done here would be in an inconsistent state + ensureUpToDate(unit) + parseAndEnter(unit) + serviceParsedEntered() + } + + // sleep window + if (afterTypeDelay > 0 && lastWasReload) { + val limit = System.currentTimeMillis() + afterTypeDelay + while (System.currentTimeMillis() < limit) { + Thread.sleep(SleepTime) + checkForMoreWork(NoPosition) + } + } + + // ensure all loaded units are typechecked + for (s <- allSources; if !ignoredFiles(s.file); unit <- getUnit(s)) { + try { + if (!unit.isUpToDate) + if (unit.problems.isEmpty || !settings.YpresentationStrict.value) + typeCheck(unit) + else debugLog("%s has syntax errors. Skipped typechecking".format(unit)) + else debugLog("already up to date: "+unit) + for (r <- waitLoadedTypeResponses(unit.source)) + r set unit.body + serviceParsedEntered() + } catch { + case ex: FreshRunReq => throw ex // propagate a new run request + case ShutdownReq => throw ShutdownReq // propagate a shutdown request + case ex: ControlThrowable => throw ex + case ex: Throwable => + println("[%s]: exception during background compile: ".format(unit.source) + ex) + ex.printStackTrace() + for (r <- waitLoadedTypeResponses(unit.source)) { + r.raise(ex) + } + serviceParsedEntered() + + lastException = Some(ex) + ignoredFiles += unit.source.file + println("[%s] marking unit as crashed (crashedFiles: %s)".format(unit, ignoredFiles)) + + reporter.error(unit.body.pos, "Presentation compiler crashed while type checking this file: %s".format(ex.toString())) + } + } + + // move units removable after this run to the "to-be-removed" buffer + toBeRemoved ++= toBeRemovedAfterRun + + // clean out stale waiting responses + cleanAllResponses() + + // wind down + if (waitLoadedTypeResponses.nonEmpty || getParsedEnteredResponses.nonEmpty) { + // need another cycle to treat those + newTyperRun() + backgroundCompile() + } else { + outOfDate = false + informIDE("Everything is now up to date") + } + } + + /** Service all pending getParsedEntered requests + */ + private def serviceParsedEntered() { + var atOldRun = true + for ((source, rs) <- getParsedEnteredResponses; r <- rs) { + if (atOldRun) { newTyperRun(); atOldRun = false } + getParsedEnteredNow(source, r) + } + getParsedEnteredResponses.clear() + } + + /** Reset unit to unloaded state */ + private def reset(unit: RichCompilationUnit): Unit = { + unit.depends.clear() + unit.defined.clear() + unit.synthetics.clear() + unit.toCheck.clear() + unit.checkedFeatures = Set() + unit.targetPos = NoPosition + unit.contexts.clear() + unit.problems.clear() + unit.body = EmptyTree + unit.status = NotLoaded + } + + /** Parse unit and create a name index, unless this has already been done before */ + private def parseAndEnter(unit: RichCompilationUnit): Unit = + if (unit.status == NotLoaded) { + debugLog("parsing: "+unit) + currentTyperRun.compileLate(unit) + if (debugIDE && !reporter.hasErrors) validatePositions(unit.body) + if (!unit.isJava) syncTopLevelSyms(unit) + unit.status = JustParsed + } + + /** Make sure unit is typechecked + */ + private def typeCheck(unit: RichCompilationUnit) { + debugLog("type checking: "+unit) + parseAndEnter(unit) + unit.status = PartiallyChecked + currentTyperRun.typeCheck(unit) + unit.lastBody = unit.body + unit.status = currentRunId + } + + /** Update deleted and current top-level symbols sets */ + def syncTopLevelSyms(unit: RichCompilationUnit) { + val deleted = currentTopLevelSyms filter { sym => + /** We sync after namer phase and it resets all the top-level symbols + * that survive the new parsing + * round to NoPeriod. + */ + sym.sourceFile == unit.source.file && + sym.validTo != NoPeriod && + runId(sym.validTo) < currentRunId + } + for (d <- deleted) { + d.owner.info.decls unlink d + deletedTopLevelSyms += d + currentTopLevelSyms -= d + } + } + + /** Move list of files to front of allSources */ + def moveToFront(fs: List[SourceFile]) { + allSources = fs ::: (allSources diff fs) + } + + // ----------------- Implementations of client commands ----------------------- + + def respond[T](result: Response[T])(op: => T): Unit = + respondGradually(result)(Stream(op)) + + def respondGradually[T](response: Response[T])(op: => Stream[T]): Unit = { + val prevResponse = pendingResponse + try { + pendingResponse = response + if (!response.isCancelled) { + var results = op + while (!response.isCancelled && results.nonEmpty) { + val result = results.head + results = results.tail + if (results.isEmpty) { + response set result + debugLog("responded"+timeStep) + } else response setProvisionally result + } + } + } catch { + case CancelException => + debugLog("cancelled") + case ex: FreshRunReq => + if (debugIDE) { + println("FreshRunReq thrown during response") + ex.printStackTrace() + } + response raise ex + throw ex + + case ex @ ShutdownReq => + if (debugIDE) { + println("ShutdownReq thrown during response") + ex.printStackTrace() + } + response raise ex + throw ex + + case ex: Throwable => + if (debugIDE) { + println("exception thrown during response: "+ex) + ex.printStackTrace() + } + response raise ex + } finally { + pendingResponse = prevResponse + } + } + + private def reloadSource(source: SourceFile) { + val unit = new RichCompilationUnit(source) + unitOfFile(source.file) = unit + toBeRemoved -= source.file + toBeRemovedAfterRun -= source.file + reset(unit) + //parseAndEnter(unit) + } + + /** Make sure a set of compilation units is loaded and parsed */ + private def reloadSources(sources: List[SourceFile]) { + newTyperRun() + minRunId = currentRunId + sources foreach reloadSource + moveToFront(sources) + } + + /** Make sure a set of compilation units is loaded and parsed */ + private[interactive] def reload(sources: List[SourceFile], response: Response[Unit]) { + informIDE("reload: " + sources) + lastWasReload = true + respond(response)(reloadSources(sources)) + demandNewCompilerRun() + } + + private[interactive] def filesDeleted(sources: List[SourceFile], response: Response[Unit]) { + informIDE("files deleted: " + sources) + val deletedFiles = sources.map(_.file).toSet + val deletedSyms = currentTopLevelSyms filter {sym => deletedFiles contains sym.sourceFile} + for (d <- deletedSyms) { + d.owner.info.decls unlink d + deletedTopLevelSyms += d + currentTopLevelSyms -= d + } + sources foreach (removeUnitOf(_)) + minRunId = currentRunId + respond(response)(()) + demandNewCompilerRun() + } + + /** Arrange for unit to be removed after run, to give a chance to typecheck the unit fully. + * If we do just removeUnit, some problems with default parameters can ensue. + * Calls to this method could probably be replaced by removeUnit once default parameters are handled more robustly. + */ + private def afterRunRemoveUnitsOf(sources: List[SourceFile]) { + toBeRemovedAfterRun ++= sources map (_.file) + } + + /** A fully attributed tree located at position `pos` */ + private def typedTreeAt(pos: Position): Tree = getUnit(pos.source) match { + case None => + reloadSources(List(pos.source)) + try typedTreeAt(pos) + finally afterRunRemoveUnitsOf(List(pos.source)) + case Some(unit) => + informIDE("typedTreeAt " + pos) + parseAndEnter(unit) + val tree = locateTree(pos) + debugLog("at pos "+pos+" was found: "+tree.getClass+" "+tree.pos.show) + tree match { + case Import(expr, _) => + debugLog("import found"+expr.tpe+(if (expr.tpe == null) "" else " "+expr.tpe.members)) + case _ => + } + if (stabilizedType(tree) ne null) { + debugLog("already attributed: "+tree.symbol+" "+tree.tpe) + tree + } else { + unit.targetPos = pos + try { + debugLog("starting targeted type check") + typeCheck(unit) +// println("tree not found at "+pos) + EmptyTree + } catch { + case ex: TyperResult => new Locator(pos) locateIn ex.tree + } finally { + unit.targetPos = NoPosition + } + } + } + + /** A fully attributed tree corresponding to the entire compilation unit */ + private[interactive] def typedTree(source: SourceFile, forceReload: Boolean): Tree = { + informIDE("typedTree " + source + " forceReload: " + forceReload) + val unit = getOrCreateUnitOf(source) + if (forceReload) reset(unit) + parseAndEnter(unit) + if (unit.status <= PartiallyChecked) typeCheck(unit) + unit.body + } + + /** Set sync var `response` to a fully attributed tree located at position `pos` */ + private[interactive] def getTypedTreeAt(pos: Position, response: Response[Tree]) { + respond(response)(typedTreeAt(pos)) + } + + /** Set sync var `response` to a fully attributed tree corresponding to the + * entire compilation unit */ + private[interactive] def getTypedTree(source: SourceFile, forceReload: Boolean, response: Response[Tree]) { + respond(response)(typedTree(source, forceReload)) + } + + private def withTempUnits[T](sources: List[SourceFile])(f: (SourceFile => RichCompilationUnit) => T): T = { + val unitOfSrc: SourceFile => RichCompilationUnit = src => unitOfFile(src.file) + sources filterNot (getUnit(_).isDefined) match { + case Nil => + f(unitOfSrc) + case unknown => + reloadSources(unknown) + try { + f(unitOfSrc) + } finally + afterRunRemoveUnitsOf(unknown) + } + } + + private def withTempUnit[T](source: SourceFile)(f: RichCompilationUnit => T): T = + withTempUnits(List(source)){ srcToUnit => + f(srcToUnit(source)) + } + + /** Find a 'mirror' of symbol `sym` in unit `unit`. Pre: `unit is loaded. */ + private def findMirrorSymbol(sym: Symbol, unit: RichCompilationUnit): Symbol = { + val originalTypeParams = sym.owner.typeParams + ensureUpToDate(unit) + parseAndEnter(unit) + val pre = adaptToNewRunMap(ThisType(sym.owner)) + val rawsym = pre.typeSymbol.info.decl(sym.name) + val newsym = rawsym filter { alt => + sym.isType || { + try { + val tp1 = pre.memberType(alt) onTypeError NoType + val tp2 = adaptToNewRunMap(sym.tpe) substSym (originalTypeParams, sym.owner.typeParams) + matchesType(tp1, tp2, alwaysMatchSimple = false) || { + debugLog(s"findMirrorSymbol matchesType($tp1, $tp2) failed") + val tp3 = adaptToNewRunMap(sym.tpe) substSym (originalTypeParams, alt.owner.typeParams) + matchesType(tp1, tp3, alwaysMatchSimple = false) || { + debugLog(s"findMirrorSymbol fallback matchesType($tp1, $tp3) failed") + false + } + } + } + catch { + case ex: ControlThrowable => throw ex + case ex: Throwable => + debugLog("error in findMirrorSymbol: " + ex) + ex.printStackTrace() + false + } + } + } + if (newsym == NoSymbol) { + if (rawsym.exists && !rawsym.isOverloaded) rawsym + else { + debugLog("mirror not found " + sym + " " + unit.source + " " + pre) + NoSymbol + } + } else if (newsym.isOverloaded) { + settings.uniqid.value = true + debugLog("mirror ambiguous " + sym + " " + unit.source + " " + pre + " " + newsym.alternatives) + NoSymbol + } else { + debugLog("mirror found for " + newsym + ": " + newsym.pos) + newsym + } + } + + /** Implements CompilerControl.askLinkPos */ + private[interactive] def getLinkPos(sym: Symbol, source: SourceFile, response: Response[Position]) { + informIDE("getLinkPos "+sym+" "+source) + respond(response) { + if (sym.owner.isClass) { + withTempUnit(source){ u => + findMirrorSymbol(sym, u).pos + } + } else { + debugLog("link not in class "+sym+" "+source+" "+sym.owner) + NoPosition + } + } + } + + private def forceDocComment(sym: Symbol, unit: RichCompilationUnit) { + unit.body foreachPartial { + case DocDef(comment, defn) if defn.symbol == sym => + fillDocComment(defn.symbol, comment) + EmptyTree + case _: ValOrDefDef => + EmptyTree + } + } + + /** Implements CompilerControl.askDocComment */ + private[interactive] def getDocComment(sym: Symbol, source: SourceFile, site: Symbol, fragments: List[(Symbol,SourceFile)], + response: Response[(String, String, Position)]) { + informIDE(s"getDocComment $sym at $source site $site") + respond(response) { + withTempUnits(fragments.toList.unzip._2){ units => + for((sym, src) <- fragments) { + val mirror = findMirrorSymbol(sym, units(src)) + if (mirror ne NoSymbol) forceDocComment(mirror, units(src)) + } + val mirror = findMirrorSymbol(sym, units(source)) + if (mirror eq NoSymbol) + ("", "", NoPosition) + else { + (expandedDocComment(mirror, site), rawDocComment(mirror), docCommentPos(mirror)) + } + } + } + } + + def stabilizedType(tree: Tree): Type = tree match { + case Ident(_) if tree.symbol.isStable => + singleType(NoPrefix, tree.symbol) + case Select(qual, _) if qual.tpe != null && tree.symbol.isStable => + singleType(qual.tpe, tree.symbol) + case Import(expr, selectors) => + tree.symbol.info match { + case analyzer.ImportType(expr) => expr match { + case s@Select(qual, name) => singleType(qual.tpe, s.symbol) + case i : Ident => i.tpe + case _ => tree.tpe + } + case _ => tree.tpe + } + + case _ => tree.tpe + } + + import analyzer.{SearchResult, ImplicitSearch} + + private[interactive] def getScopeCompletion(pos: Position, response: Response[List[Member]]) { + informIDE("getScopeCompletion" + pos) + respond(response) { scopeMembers(pos) } + } + + private class Members[M <: Member] extends LinkedHashMap[Name, Set[M]] { + override def default(key: Name) = Set() + + private def matching(sym: Symbol, symtpe: Type, ms: Set[M]): Option[M] = ms.find { m => + (m.sym.name == sym.name) && (m.sym.isType || (m.tpe matches symtpe)) + } + + private def keepSecond(m: M, sym: Symbol, implicitlyAdded: Boolean): Boolean = + m.sym.hasFlag(ACCESSOR | PARAMACCESSOR) && + !sym.hasFlag(ACCESSOR | PARAMACCESSOR) && + (!implicitlyAdded || m.implicitlyAdded) + + def add(sym: Symbol, pre: Type, implicitlyAdded: Boolean)(toMember: (Symbol, Type) => M) { + if ((sym.isGetter || sym.isSetter) && sym.accessed != NoSymbol) { + add(sym.accessed, pre, implicitlyAdded)(toMember) + } else if (!sym.name.decodedName.containsName("$") && !sym.isSynthetic && sym.hasRawInfo) { + val symtpe = pre.memberType(sym) onTypeError ErrorType + matching(sym, symtpe, this(sym.name)) match { + case Some(m) => + if (keepSecond(m, sym, implicitlyAdded)) { + //print(" -+ "+sym.name) + this(sym.name) = this(sym.name) - m + toMember(sym, symtpe) + } + case None => + //print(" + "+sym.name) + this(sym.name) = this(sym.name) + toMember(sym, symtpe) + } + } + } + + def addNonShadowed(other: Members[M]) = { + for ((name, ms) <- other) + if (ms.nonEmpty && this(name).isEmpty) this(name) = ms + } + + def allMembers: List[M] = values.toList.flatten + } + + /** Return all members visible without prefix in context enclosing `pos`. */ + private def scopeMembers(pos: Position): List[ScopeMember] = { + typedTreeAt(pos) // to make sure context is entered + val context = doLocateContext(pos) + val locals = new Members[ScopeMember] + val enclosing = new Members[ScopeMember] + def addScopeMember(sym: Symbol, pre: Type, viaImport: Tree) = + locals.add(sym, pre, implicitlyAdded = false) { (s, st) => + new ScopeMember(s, st, context.isAccessible(s, pre, superAccess = false), viaImport) + } + def localsToEnclosing() = { + enclosing.addNonShadowed(locals) + locals.clear() + } + //print("add scope members") + var cx = context + while (cx != NoContext) { + for (sym <- cx.scope) + addScopeMember(sym, NoPrefix, EmptyTree) + localsToEnclosing() + if (cx == cx.enclClass) { + val pre = cx.prefix + for (sym <- pre.members) + addScopeMember(sym, pre, EmptyTree) + localsToEnclosing() + } + cx = cx.outer + } + //print("\nadd imported members") + for (imp <- context.imports) { + val pre = imp.qual.tpe + for (sym <- imp.allImportedSymbols) + addScopeMember(sym, pre, imp.qual) + localsToEnclosing() + } + // println() + val result = enclosing.allMembers +// if (debugIDE) for (m <- result) println(m) + result + } + + private[interactive] def getTypeCompletion(pos: Position, response: Response[List[Member]]) { + informIDE("getTypeCompletion " + pos) + respondGradually(response) { typeMembers(pos) } + //if (debugIDE) typeMembers(pos) + } + + private def typeMembers(pos: Position): Stream[List[TypeMember]] = { + var tree = typedTreeAt(pos) + + // if tree consists of just x. or x.fo where fo is not yet a full member name + // ignore the selection and look in just x. + tree match { + case Select(qual, name) if tree.tpe == ErrorType => tree = qual + case _ => + } + + val context = doLocateContext(pos) + + if (tree.tpe == null) + // TODO: guard with try/catch to deal with ill-typed qualifiers. + tree = analyzer.newTyper(context).typedQualifier(tree) + + debugLog("typeMembers at "+tree+" "+tree.tpe) + + val superAccess = tree.isInstanceOf[Super] + val members = new Members[TypeMember] + + def addTypeMember(sym: Symbol, pre: Type, inherited: Boolean, viaView: Symbol) = { + val implicitlyAdded = viaView != NoSymbol + members.add(sym, pre, implicitlyAdded) { (s, st) => + new TypeMember(s, st, + context.isAccessible(if (s.hasGetter) s.getter(s.owner) else s, pre, superAccess && !implicitlyAdded), + inherited, + viaView) + } + } + + /** Create a function application of a given view function to `tree` and typechecked it. + */ + def viewApply(view: SearchResult): Tree = { + assert(view.tree != EmptyTree) + analyzer.newTyper(context.makeImplicit(reportAmbiguousErrors = false)) + .typed(Apply(view.tree, List(tree)) setPos tree.pos) + .onTypeError(EmptyTree) + } + + val pre = stabilizedType(tree) + + val ownerTpe = tree.tpe match { + case analyzer.ImportType(expr) => expr.tpe + case null => pre + case MethodType(List(), rtpe) => rtpe + case _ => tree.tpe + } + + //print("add members") + for (sym <- ownerTpe.members) + addTypeMember(sym, pre, sym.owner != ownerTpe.typeSymbol, NoSymbol) + members.allMembers #:: { + //print("\nadd enrichment") + val applicableViews: List[SearchResult] = + if (ownerTpe.isErroneous) List() + else new ImplicitSearch( + tree, functionType(List(ownerTpe), AnyClass.tpe), isView = true, + context0 = context.makeImplicit(reportAmbiguousErrors = false)).allImplicits + for (view <- applicableViews) { + val vtree = viewApply(view) + val vpre = stabilizedType(vtree) + for (sym <- vtree.tpe.members) { + addTypeMember(sym, vpre, inherited = false, view.tree.symbol) + } + } + //println() + Stream(members.allMembers) + } + } + + /** Implements CompilerControl.askLoadedTyped */ + private[interactive] def waitLoadedTyped(source: SourceFile, response: Response[Tree], onSameThread: Boolean = true) { + getUnit(source) match { + case Some(unit) => + if (unit.isUpToDate) { + debugLog("already typed") + response set unit.body + } else if (ignoredFiles(source.file)) { + response.raise(lastException.getOrElse(CancelException)) + } else if (onSameThread) { + getTypedTree(source, forceReload = false, response) + } else { + debugLog("wait for later") + outOfDate = true + waitLoadedTypeResponses(source) += response + } + case None => + debugLog("load unit and type") + try reloadSources(List(source)) + finally waitLoadedTyped(source, response, onSameThread) + } + } + + /** Implements CompilerControl.askParsedEntered */ + private[interactive] def getParsedEntered(source: SourceFile, keepLoaded: Boolean, response: Response[Tree], onSameThread: Boolean = true) { + getUnit(source) match { + case Some(unit) => + getParsedEnteredNow(source, response) + case None => + try { + if (keepLoaded || outOfDate && onSameThread) + reloadSources(List(source)) + } finally { + if (keepLoaded || !outOfDate || onSameThread) + getParsedEnteredNow(source, response) + else + getParsedEnteredResponses(source) += response + } + } + } + + /** Parses and enters given source file, stroring parse tree in response */ + private def getParsedEnteredNow(source: SourceFile, response: Response[Tree]) { + respond(response) { + onUnitOf(source) { unit => + parseAndEnter(unit) + unit.body + } + } + } + + @deprecated("SI-6458: Instrumentation logic will be moved out of the compiler.","2.10.0") + def getInstrumented(source: SourceFile, line: Int, response: Response[(String, Array[Char])]) { + try { + interruptsEnabled = false + respond(response) { + instrument(source, line) + } + } finally { + interruptsEnabled = true + } + } + + // ---------------- Helper classes --------------------------- + + /** The typer run */ + class TyperRun extends Run { + // units is always empty + + /** canRedefine is used to detect double declarations of classes and objects + * in multiple source files. + * Since the IDE rechecks units several times in the same run, these tests + * are disabled by always returning true here. + */ + override def canRedefine(sym: Symbol) = true + + def typeCheck(unit: CompilationUnit): Unit = { + applyPhase(typerPhase, unit) + } + + /** Apply a phase to a compilation unit + * @return true iff typechecked correctly + */ + private def applyPhase(phase: Phase, unit: CompilationUnit) { + enteringPhase(phase) { phase.asInstanceOf[GlobalPhase] applyPhase unit } + } + } + + def newTyperRun() { + currentTyperRun = new TyperRun + } + + class TyperResult(val tree: Tree) extends ControlThrowable + + assert(globalPhase.id == 0) + + implicit def addOnTypeError[T](x: => T): OnTypeError[T] = new OnTypeError(x) + + // OnTypeError should still catch TypeError because of cyclic references, + // but DivergentImplicit shouldn't leak anymore here + class OnTypeError[T](op: => T) { + def onTypeError(alt: => T) = try { + op + } catch { + case ex: TypeError => + debugLog("type error caught: "+ex) + alt + case ex: DivergentImplicit => + debugLog("divergent implicit caught: "+ex) + alt + } + } + + /** The compiler has been initialized. Constructors are evaluated in textual order, + * so this is set to true only after all super constructors and the primary constructor + * have been executed. + */ + initializing = false +} + +object CancelException extends Exception + diff --git a/src/interactive/scala/tools/nsc/interactive/InteractiveReporter.scala b/src/interactive/scala/tools/nsc/interactive/InteractiveReporter.scala new file mode 100644 index 0000000000..013b152e96 --- /dev/null +++ b/src/interactive/scala/tools/nsc/interactive/InteractiveReporter.scala @@ -0,0 +1,47 @@ +/* NSC -- new Scala compiler + * Copyright 2009-2013 Typesafe/Scala Solutions and LAMP/EPFL + * @author Martin Odersky + */ +package scala.tools.nsc +package interactive + +import scala.collection.mutable.ArrayBuffer +import scala.reflect.internal.util.Position +import reporters.Reporter + +case class Problem(pos: Position, msg: String, severityLevel: Int) + +abstract class InteractiveReporter extends Reporter { + + def compiler: Global + + val otherProblems = new ArrayBuffer[Problem] + + override def info0(pos: Position, msg: String, severity: Severity, force: Boolean): Unit = try { + severity.count += 1 + val problems = + if (compiler eq null) { + otherProblems + } else if (pos.isDefined) { + compiler.getUnit(pos.source) match { + case Some(unit) => + compiler.debugLog(pos.source.file.name + ":" + pos.line + ": " + msg) + unit.problems + case None => + compiler.debugLog(pos.source.file.name + "[not loaded] :" + pos.line + ": " + msg) + otherProblems + } + } else { + compiler.debugLog("[no position] :" + msg) + otherProblems + } + problems += Problem(pos, msg, severity.id) + } catch { + case ex: UnsupportedOperationException => + } + + override def reset() { + super.reset() + otherProblems.clear() + } +} diff --git a/src/interactive/scala/tools/nsc/interactive/Picklers.scala b/src/interactive/scala/tools/nsc/interactive/Picklers.scala new file mode 100644 index 0000000000..b184afd0f5 --- /dev/null +++ b/src/interactive/scala/tools/nsc/interactive/Picklers.scala @@ -0,0 +1,189 @@ +/* NSC -- new Scala compiler + * Copyright 2009-2013 Typesafe/Scala Solutions and LAMP/EPFL + * @author Martin Odersky + */ +package scala.tools.nsc +package interactive + +import util.InterruptReq +import scala.reflect.internal.util.{ SourceFile, BatchSourceFile } +import io.{ AbstractFile, PlainFile, Pickler, CondPickler } +import util.EmptyAction +import scala.reflect.internal.util.{ RangePosition, OffsetPosition, TransparentPosition } +import io.Pickler._ +import scala.collection.mutable +import mutable.ListBuffer + +trait Picklers { self: Global => + + lazy val freshRunReq = + unitPickler + .wrapped { _ => new FreshRunReq } { x => () } + .labelled ("FreshRunReq") + .cond (_.isInstanceOf[FreshRunReq]) + + lazy val shutdownReq = singletonPickler(ShutdownReq) + + def defaultThrowable[T <: Throwable]: CondPickler[T] = javaInstancePickler[T] cond { _ => true } + + implicit lazy val throwable: Pickler[Throwable] = + freshRunReq | shutdownReq | defaultThrowable + + implicit def abstractFile: Pickler[AbstractFile] = + pkl[String] + .wrapped[AbstractFile] { new PlainFile(_) } { _.path } + .asClass (classOf[PlainFile]) + + private val sourceFilesSeen = new mutable.HashMap[AbstractFile, Array[Char]] { + override def default(key: AbstractFile) = Array() + } + + type Diff = (Int /*start*/, Int /*end*/, String /*replacement*/) + + def delta(f: AbstractFile, cs: Array[Char]): Diff = { + val bs = sourceFilesSeen(f) + var start = 0 + while (start < bs.length && start < cs.length && bs(start) == cs(start)) start += 1 + var end = bs.length + var end2 = cs.length + while (end > start && end2 > start && bs(end - 1) == cs(end2 - 1)) { end -= 1; end2 -= 1 } + sourceFilesSeen(f) = cs + (start, end, cs.slice(start, end2).mkString("")) + } + + def patch(f: AbstractFile, d: Diff): Array[Char] = { + val (start, end, replacement) = d + val patched = sourceFilesSeen(f).patch(start, replacement, end - start) + sourceFilesSeen(f) = patched + patched + } + + implicit lazy val sourceFile: Pickler[SourceFile] = + (pkl[AbstractFile] ~ pkl[Diff]).wrapped[SourceFile] { + case f ~ d => new BatchSourceFile(f, patch(f, d)) + } { + f => f.file ~ delta(f.file, f.content) + }.asClass (classOf[BatchSourceFile]) + + lazy val offsetPosition: CondPickler[OffsetPosition] = + (pkl[SourceFile] ~ pkl[Int]) + .wrapped { case x ~ y => new OffsetPosition(x, y) } { p => p.source ~ p.point } + .asClass (classOf[OffsetPosition]) + + lazy val rangePosition: CondPickler[RangePosition] = + (pkl[SourceFile] ~ pkl[Int] ~ pkl[Int] ~ pkl[Int]) + .wrapped { case source ~ start ~ point ~ end => new RangePosition(source, start, point, end) } { p => p.source ~ p.start ~ p.point ~ p.end } + .asClass (classOf[RangePosition]) + + lazy val transparentPosition: CondPickler[TransparentPosition] = + (pkl[SourceFile] ~ pkl[Int] ~ pkl[Int] ~ pkl[Int]) + .wrapped { case source ~ start ~ point ~ end => new TransparentPosition(source, start, point, end) } { p => p.source ~ p.start ~ p.point ~ p.end } + .asClass (classOf[TransparentPosition]) + + lazy val noPosition = singletonPickler(NoPosition) + + implicit lazy val position: Pickler[Position] = transparentPosition | rangePosition | offsetPosition | noPosition + + implicit lazy val namePickler: Pickler[Name] = + pkl[String] .wrapped[Name] { + str => if ((str.length > 1) && (str endsWith "!")) newTypeName(str.init) else newTermName(str) + } { + name => if (name.isTypeName) name.toString+"!" else name.toString + } + + implicit lazy val symPickler: Pickler[Symbol] = { + def ownerNames(sym: Symbol, buf: ListBuffer[Name]): ListBuffer[Name] = { + if (!sym.isRoot) { + ownerNames(sym.owner, buf) + buf += (if (sym.isModuleClass) sym.sourceModule else sym).name + if (!sym.isType && !sym.isStable) { + val sym1 = sym.owner.info.decl(sym.name) + if (sym1.isOverloaded) { + val index = sym1.alternatives.indexOf(sym) + assert(index >= 0, sym1+" not found in alternatives "+sym1.alternatives) + buf += newTermName(index.toString) + } + } + } + buf + } + def makeSymbol(root: Symbol, names: List[Name]): Symbol = names match { + case List() => + root + case name :: rest => + val sym = root.info.decl(name) + if (sym.isOverloaded) makeSymbol(sym.alternatives(rest.head.toString.toInt), rest.tail) + else makeSymbol(sym, rest) + } + pkl[List[Name]] .wrapped { makeSymbol(rootMirror.RootClass, _) } { ownerNames(_, new ListBuffer).toList } + } + + implicit def workEvent: Pickler[WorkEvent] = { + (pkl[Int] ~ pkl[Long]) + .wrapped { case id ~ ms => WorkEvent(id, ms) } { w => w.atNode ~ w.atMillis } + } + + implicit def interruptReq: Pickler[InterruptReq] = { + val emptyIR: InterruptReq = new InterruptReq { type R = Unit; val todo = () => () } + pkl[Unit] .wrapped { _ => emptyIR } { _ => () } + } + + implicit def reloadItem: CondPickler[ReloadItem] = + pkl[List[SourceFile]] + .wrapped { ReloadItem(_, new Response) } { _.sources } + .asClass (classOf[ReloadItem]) + + implicit def askTypeAtItem: CondPickler[AskTypeAtItem] = + pkl[Position] + .wrapped { new AskTypeAtItem(_, new Response) } { _.pos } + .asClass (classOf[AskTypeAtItem]) + + implicit def askTypeItem: CondPickler[AskTypeItem] = + (pkl[SourceFile] ~ pkl[Boolean]) + .wrapped { case source ~ forceReload => new AskTypeItem(source, forceReload, new Response) } { w => w.source ~ w.forceReload } + .asClass (classOf[AskTypeItem]) + + implicit def askTypeCompletionItem: CondPickler[AskTypeCompletionItem] = + pkl[Position] + .wrapped { new AskTypeCompletionItem(_, new Response) } { _.pos } + .asClass (classOf[AskTypeCompletionItem]) + + implicit def askScopeCompletionItem: CondPickler[AskScopeCompletionItem] = + pkl[Position] + .wrapped { new AskScopeCompletionItem(_, new Response) } { _.pos } + .asClass (classOf[AskScopeCompletionItem]) + + implicit def askToDoFirstItem: CondPickler[AskToDoFirstItem] = + pkl[SourceFile] + .wrapped { new AskToDoFirstItem(_) } { _.source } + .asClass (classOf[AskToDoFirstItem]) + + implicit def askLinkPosItem: CondPickler[AskLinkPosItem] = + (pkl[Symbol] ~ pkl[SourceFile]) + .wrapped { case sym ~ source => new AskLinkPosItem(sym, source, new Response) } { item => item.sym ~ item.source } + .asClass (classOf[AskLinkPosItem]) + + implicit def askDocCommentItem: CondPickler[AskDocCommentItem] = + (pkl[Symbol] ~ pkl[SourceFile] ~ pkl[Symbol] ~ pkl[List[(Symbol,SourceFile)]]) + .wrapped { case sym ~ source ~ site ~ fragments => new AskDocCommentItem(sym, source, site, fragments, new Response) } { item => item.sym ~ item.source ~ item.site ~ item.fragments } + .asClass (classOf[AskDocCommentItem]) + + implicit def askLoadedTypedItem: CondPickler[AskLoadedTypedItem] = + pkl[SourceFile] + .wrapped { source => new AskLoadedTypedItem(source, new Response) } { _.source } + .asClass (classOf[AskLoadedTypedItem]) + + implicit def askParsedEnteredItem: CondPickler[AskParsedEnteredItem] = + (pkl[SourceFile] ~ pkl[Boolean]) + .wrapped { case source ~ keepLoaded => new AskParsedEnteredItem(source, keepLoaded, new Response) } { w => w.source ~ w.keepLoaded } + .asClass (classOf[AskParsedEnteredItem]) + + implicit def emptyAction: CondPickler[EmptyAction] = + pkl[Unit] + .wrapped { _ => new EmptyAction } { _ => () } + .asClass (classOf[EmptyAction]) + + implicit def action: Pickler[() => Unit] = + reloadItem | askTypeAtItem | askTypeItem | askTypeCompletionItem | askScopeCompletionItem | + askToDoFirstItem | askLinkPosItem | askDocCommentItem | askLoadedTypedItem | askParsedEnteredItem | emptyAction +} diff --git a/src/interactive/scala/tools/nsc/interactive/PresentationCompilerThread.scala b/src/interactive/scala/tools/nsc/interactive/PresentationCompilerThread.scala new file mode 100644 index 0000000000..a2d8e5d49a --- /dev/null +++ b/src/interactive/scala/tools/nsc/interactive/PresentationCompilerThread.scala @@ -0,0 +1,51 @@ +/* NSC -- new Scala compiler + * Copyright 2009-2013 Typesafe/Scala Solutions and LAMP/EPFL + * @author Martin Odersky + * @author Iulian Dragos + */ +package scala.tools.nsc.interactive + +/** A presentation compiler thread. This is a lightweight class, delegating most + * of its functionality to the compiler instance. + * + */ +final class PresentationCompilerThread(var compiler: Global, name: String = "") + extends Thread("Scala Presentation Compiler [" + name + "]") { + + /** The presentation compiler loop. + */ + override def run() { + compiler.debugLog("starting new runner thread") + while (compiler ne null) try { + compiler.checkNoResponsesOutstanding() + compiler.log.logreplay("wait for more work", { compiler.scheduler.waitForMoreWork(); true }) + compiler.pollForWork(compiler.NoPosition) + while (compiler.isOutOfDate) { + try { + compiler.backgroundCompile() + } catch { + case ex: FreshRunReq => + compiler.debugLog("fresh run req caught, starting new pass") + } + compiler.log.flush() + } + } catch { + case ex @ ShutdownReq => + compiler.debugLog("exiting presentation compiler") + compiler.log.close() + + // make sure we don't keep around stale instances + compiler = null + case ex: Throwable => + compiler.log.flush() + + ex match { + case ex: FreshRunReq => + compiler.debugLog("fresh run req caught outside presentation compiler loop; ignored") // This shouldn't be reported + case _ : Global#ValidateException => // This will have been reported elsewhere + compiler.debugLog("validate exception caught outside presentation compiler loop; ignored") + case _ => ex.printStackTrace(); compiler.informIDE("Fatal Error: "+ex) + } + } + } +} diff --git a/src/interactive/scala/tools/nsc/interactive/REPL.scala b/src/interactive/scala/tools/nsc/interactive/REPL.scala new file mode 100644 index 0000000000..04c06b9357 --- /dev/null +++ b/src/interactive/scala/tools/nsc/interactive/REPL.scala @@ -0,0 +1,218 @@ +/* NSC -- new Scala compiler + * Copyright 2009-2013 Typesafe/Scala Solutions and LAMP/EPFL + * @author Martin Odersky + */ +package scala.tools.nsc +package interactive + +import scala.reflect.internal.util._ +import scala.tools.nsc.reporters._ +import scala.tools.nsc.io._ +import scala.tools.nsc.scratchpad.SourceInserter +import java.io.FileWriter + +/** Interface of interactive compiler to a client such as an IDE + */ +object REPL { + + val versionMsg = "Scala compiler " + + Properties.versionString + " -- " + + Properties.copyrightString + + val prompt = "> " + + var reporter: ConsoleReporter = _ + + private def replError(msg: String) { + reporter.error(/*new Position */FakePos("scalac"), + msg + "\n scalac -help gives more information") + } + + def process(args: Array[String]) { + val settings = new Settings(replError) + reporter = new ConsoleReporter(settings) + val command = new CompilerCommand(args.toList, settings) + if (command.settings.version.value) + reporter.echo(versionMsg) + else { + try { + object compiler extends Global(command.settings, reporter) { +// printTypings = true + } + if (reporter.hasErrors) { + reporter.flush() + return + } + if (command.shouldStopWithInfo) { + reporter.echo(command.getInfoMessage(compiler)) + } else { + run(compiler) + } + } catch { + case ex @ FatalError(msg) => + if (true || command.settings.debug.value) // !!! + ex.printStackTrace() + reporter.error(null, "fatal error: " + msg) + } + } + } + + def main(args: Array[String]) { + process(args) + sys.exit(if (reporter.hasErrors) 1 else 0) + } + + def loop(action: (String) => Unit) { + Console.print(prompt) + try { + val line = Console.readLine() + if (line.length() > 0) { + action(line) + } + loop(action) + } + catch { + case _: java.io.EOFException => //nop + } + } + + /** Commands: + * + * reload file1 ... fileN + * typeat file off1 off2? + * complete file off1 off2? + */ + def run(comp: Global) { + val reloadResult = new Response[Unit] + val typeatResult = new Response[comp.Tree] + val completeResult = new Response[List[comp.Member]] + val typedResult = new Response[comp.Tree] + val structureResult = new Response[comp.Tree] + @deprecated("SI-6458: Instrumentation logic will be moved out of the compiler.","2.10.0") + val instrumentedResult = new Response[(String, Array[Char])] + + def makePos(file: String, off1: String, off2: String) = { + val source = toSourceFile(file) + comp.rangePos(source, off1.toInt, off1.toInt, off2.toInt) + } + + def doTypeAt(pos: Position) { + comp.askTypeAt(pos, typeatResult) + show(typeatResult) + } + + def doComplete(pos: Position) { + comp.askTypeCompletion(pos, completeResult) + show(completeResult) + } + + def doStructure(file: String) { + comp.askParsedEntered(toSourceFile(file), keepLoaded = false, structureResult) + show(structureResult) + } + + /** Write instrumented source file to disk. + * @param iFullName The full name of the first top-level object in source + * @param iContents An Array[Char] containing the instrumented source + * @return The name of the instrumented source file + */ + @deprecated("SI-6458: Instrumentation logic will be moved out of the compiler.","2.10.0") + def writeInstrumented(iFullName: String, suffix: String, iContents: Array[Char]): String = { + val iSimpleName = iFullName drop ((iFullName lastIndexOf '.') + 1) + val iSourceName = iSimpleName + suffix + val ifile = new FileWriter(iSourceName) + ifile.write(iContents) + ifile.close() + iSourceName + } + + /** The method for implementing worksheet functionality. + * @param arguments a file name, followed by optional command line arguments that are passed + * to the compiler that processes the instrumented source. + * @param line A line number that controls uop to which line results should be produced + * If line = -1, results are produced for all expressions in the worksheet. + * @return The generated file content containing original source in the left column + * and outputs in the right column, or None if the presentation compiler + * does not respond to askInstrumented. + */ + @deprecated("SI-6458: Instrumentation logic will be moved out of the compiler.","2.10.0") + def instrument(arguments: List[String], line: Int): Option[(String, String)] = { + val source = toSourceFile(arguments.head) + // strip right hand side comment column and any trailing spaces from all lines + val strippedContents = SourceInserter.stripRight(source.content) + val strippedSource = new BatchSourceFile(source.file, strippedContents) + println("stripped source = "+strippedSource+":"+strippedContents.mkString) + comp.askReload(List(strippedSource), reloadResult) + comp.askInstrumented(strippedSource, line, instrumentedResult) + using(instrumentedResult) { + case (iFullName, iContents) => + println(s"instrumented source $iFullName = ${iContents.mkString}") + val iSourceName = writeInstrumented(iFullName, "$instrumented.scala", iContents) + val sSourceName = writeInstrumented(iFullName, "$stripped.scala", strippedContents) + (iSourceName, sSourceName) +/* + * val vdirOpt = compileInstrumented(iSourceName, arguments.tail) + runInstrumented(vdirOpt, iFullName, strippedSource.content) + */ + } + } + + loop { line => + (line split " ").toList match { + case "reload" :: args => + comp.askReload(args map toSourceFile, reloadResult) + show(reloadResult) + case "reloadAndAskType" :: file :: millis :: Nil => + comp.askReload(List(toSourceFile(file)), reloadResult) + Thread.sleep(millis.toInt) + println("ask type now") + comp.askLoadedTyped(toSourceFile(file), typedResult) + typedResult.get + case List("typeat", file, off1, off2) => + doTypeAt(makePos(file, off1, off2)) + case List("typeat", file, off1) => + doTypeAt(makePos(file, off1, off1)) + case List("complete", file, off1, off2) => + doComplete(makePos(file, off1, off2)) + case List("complete", file, off1) => + doComplete(makePos(file, off1, off1)) + case "instrument" :: arguments => + println(instrument(arguments, -1)) + case "instrumentTo" :: line :: arguments => + println(instrument(arguments, line.toInt)) + case List("quit") => + comp.askShutdown() + sys.exit(1) + case List("structure", file) => + doStructure(file) + case _ => + print("""Available commands: + | reload ... + | reloadAndAskType + | typed + | typeat + | typeat + | complete + | compile + | instrument * + | instrumentTo * + | structure + | quit + |""".stripMargin) + } + } + } + + def toSourceFile(name: String) = new BatchSourceFile(new PlainFile(new java.io.File(name))) + + def using[T, U](svar: Response[T])(op: T => U): Option[U] = { + val res = svar.get match { + case Left(result) => Some(op(result)) + case Right(exc) => exc.printStackTrace; println("ERROR: "+exc); None + } + svar.clear() + res + } + + def show[T](svar: Response[T]) = using(svar)(res => println("==> "+res)) +} diff --git a/src/interactive/scala/tools/nsc/interactive/RangePositions.scala b/src/interactive/scala/tools/nsc/interactive/RangePositions.scala new file mode 100644 index 0000000000..c57e1da184 --- /dev/null +++ b/src/interactive/scala/tools/nsc/interactive/RangePositions.scala @@ -0,0 +1,14 @@ +/* NSC -- new Scala compiler + * Copyright 2009-2013 Typesafe/Scala Solutions and LAMP/EPFL + * @author Martin Odersky + */ + +package scala.tools.nsc +package interactive + +@deprecated("Use scala.reflect.internal.Positions", "2.11.0") +trait RangePositions extends scala.reflect.internal.Positions with ast.Trees with ast.Positions { + self: scala.tools.nsc.Global => + + override def useOffsetPositions = false +} diff --git a/src/interactive/scala/tools/nsc/interactive/Response.scala b/src/interactive/scala/tools/nsc/interactive/Response.scala new file mode 100644 index 0000000000..f36f769ec9 --- /dev/null +++ b/src/interactive/scala/tools/nsc/interactive/Response.scala @@ -0,0 +1,105 @@ +/* NSC -- new Scala compiler + * Copyright 2009-2013 Typesafe/Scala Solutions and LAMP/EPFL + * @author Martin Odersky + */ +package scala.tools.nsc +package interactive + +/** Typical interaction, given a predicate , a function , + * and an exception handler : + * + * val TIMEOUT = 100 // (milliseconds) or something like that + * val r = new Response() + * while (!r.isComplete && !r.isCancelled) { + * if () r.cancel() + * else r.get(TIMEOUT) match { + * case Some(Left(data)) => (data) + * case Some(Right(exc)) => (exc) + * case None => + * } + * } + */ +class Response[T] { + + private var data: Option[Either[T, Throwable]] = None + private var complete = false + private var cancelled = false + + /** Set provisional data, more to come + */ + def setProvisionally(x: T) = synchronized { + data = Some(Left(x)) + } + + /** Set final data, and mark response as complete. + */ + def set(x: T) = synchronized { + data = Some(Left(x)) + complete = true + notifyAll() + } + + /** Store raised exception in data, and mark response as complete. + */ + def raise(exc: Throwable) = synchronized { + data = Some(Right(exc)) + complete = true + notifyAll() + } + + /** Get final data, wait as long as necessary. + * When interrupted will return with Right(InterruptedException) + */ + def get: Either[T, Throwable] = synchronized { + while (!complete) { + try { + wait() + } catch { + case exc: InterruptedException => raise(exc) + } + } + data.get + } + + /** Optionally get data within `timeout` milliseconds. + * When interrupted will return with Some(Right(InterruptedException)) + * When timeout ends, will return last stored provisional result, + * or else None if no provisional result was stored. + */ + def get(timeout: Long): Option[Either[T, Throwable]] = synchronized { + val start = System.currentTimeMillis + var current = start + while (!complete && start + timeout > current) { + try { + wait(timeout - (current - start)) + } catch { + case exc: InterruptedException => raise(exc) + } + current = System.currentTimeMillis + } + data + } + + /** Final data set was stored + */ + def isComplete = synchronized { complete } + + /** Cancel action computing this response (Only the + * party that calls get on a response may cancel). + */ + def cancel() = synchronized { cancelled = true } + + /** A cancel request for this response has been issued + */ + def isCancelled = synchronized { cancelled } + + def clear() = synchronized { + data = None + complete = false + cancelled = false + } +} + + + + diff --git a/src/interactive/scala/tools/nsc/interactive/RichCompilationUnits.scala b/src/interactive/scala/tools/nsc/interactive/RichCompilationUnits.scala new file mode 100644 index 0000000000..b83c2cd095 --- /dev/null +++ b/src/interactive/scala/tools/nsc/interactive/RichCompilationUnits.scala @@ -0,0 +1,58 @@ +/* NSC -- new Scala compiler + * Copyright 2009-2013 Typesafe/Scala Solutions and LAMP/EPFL + * @author Martin Odersky + */ +package scala.tools.nsc +package interactive + +import scala.reflect.internal.util.{SourceFile, Position, NoPosition} +import scala.collection.mutable.ArrayBuffer + +trait RichCompilationUnits { self: Global => + + /** The status value of a unit that has not yet been loaded */ + final val NotLoaded = -2 + + /** The status value of a unit that has not yet been typechecked */ + final val JustParsed = -1 + + /** The status value of a unit that has been partially typechecked */ + final val PartiallyChecked = 0 + + class RichCompilationUnit(source: SourceFile) extends CompilationUnit(source) { + + /** The runid of the latest compiler run that typechecked this unit, + * or else @see NotLoaded, JustParsed + */ + var status: Int = NotLoaded + + /** Unit has been parsed */ + def isParsed: Boolean = status >= JustParsed + + /** Unit has been typechecked, but maybe not in latest runs */ + def isTypeChecked: Boolean = status > JustParsed + + /** Unit has been typechecked and is up to date */ + def isUpToDate: Boolean = status >= minRunId + + /** the current edit point offset */ + var editPoint: Int = -1 + + /** The problems reported for this unit */ + val problems = new ArrayBuffer[Problem] + + /** The position of a targeted type check + * If this is different from NoPosition, the type checking + * will stop once a tree that contains this position range + * is fully attributed. + */ + var _targetPos: Position = NoPosition + override def targetPos: Position = _targetPos + def targetPos_=(p: Position) { _targetPos = p } + + var contexts: Contexts = new Contexts + + /** The last fully type-checked body of this unit */ + var lastBody: Tree = EmptyTree + } +} diff --git a/src/interactive/scala/tools/nsc/interactive/ScratchPadMaker.scala b/src/interactive/scala/tools/nsc/interactive/ScratchPadMaker.scala new file mode 100644 index 0000000000..7af9174704 --- /dev/null +++ b/src/interactive/scala/tools/nsc/interactive/ScratchPadMaker.scala @@ -0,0 +1,200 @@ +package scala.tools.nsc +package interactive + +import scala.reflect.internal.util.{SourceFile, BatchSourceFile, RangePosition} +import scala.collection.mutable.ArrayBuffer +import scala.reflect.internal.Chars.{isLineBreakChar, isWhitespace} +import ast.parser.Tokens._ + +@deprecated("SI-6458: Instrumentation logic will be moved out of the compiler.","2.10.0") +trait ScratchPadMaker { self: Global => + + import definitions._ + + private case class Patch(offset: Int, text: String) + + private class Patcher(contents: Array[Char], lex: LexicalStructure, endOffset: Int) extends Traverser { + var objectName: String = "" + + private val patches = new ArrayBuffer[Patch] + private val toPrint = new ArrayBuffer[String] + private var skipped = 0 + private var resNum: Int = -1 + + private def nextRes(): String = { + resNum += 1 + "res$"+resNum + } + + private def nameType(name: String, tpe: Type): String = { + // if name ends in symbol character, add a space to separate it from the following ':' + val pad = if (Character.isLetter(name.last) || Character.isDigit(name.last)) "" else " " + name+pad+": "+tpe + } + + private def nameType(sym: Symbol): String = nameType(sym.name.decoded, sym.tpe) + + private def literal(str: String) = "\"\"\""+str+"\"\"\"" + + private val prologue = ";import scala.runtime.WorksheetSupport._; def main(args: Array[String])=$execute{" + + private val epilogue = "}" + + private def applyPendingPatches(offset: Int) = { + if (skipped == 0) patches += Patch(offset, prologue) + for (msg <- toPrint) patches += Patch(offset, ";System.out.println("+msg+")") + toPrint.clear() + } + + /** The position where to insert an instrumentation statement in front of giuven statement. + * This is at the latest `stat.pos.start`. But in order not to mess with column numbers + * in position we try to insert it at the end of the previous token instead. + * Furthermore, `(' tokens have to be skipped because they do not show up + * in statement range positions. + */ + private def instrumentPos(start: Int): Int = { + val (prevToken, prevStart, prevEnd) = lex.locate(start - 1) + if (prevStart >= start) start + else if (prevToken == LPAREN) instrumentPos(prevStart) + else prevEnd + } + + private def addSkip(stat: Tree): Unit = { + val ipos = instrumentPos(stat.pos.start) + if (stat.pos.start > skipped) applyPendingPatches(ipos) + if (stat.pos.start >= endOffset) + patches += Patch(ipos, ";$stop()") + var end = stat.pos.end + if (end > skipped) { + while (end < contents.length && !isLineBreakChar(contents(end))) end += 1 + patches += Patch(ipos, ";$skip("+(end-skipped)+"); ") + skipped = end + } + } + + private def addSandbox(expr: Tree) = {} +// patches += (Patch(expr.pos.start, "sandbox("), Patch(expr.pos.end, ")")) + + private def resultString(prefix: String, expr: String) = + literal(prefix + " = ") + " + $show(" + expr + ")" + + private def traverseStat(stat: Tree) = + if (stat.pos.isInstanceOf[RangePosition]) { + stat match { + case ValDef(_, _, _, rhs) => + addSkip(stat) + if (stat.symbol.isLazy) + toPrint += literal(nameType(stat.symbol) + " = ") + else if (!stat.symbol.isSynthetic) { + addSandbox(rhs) + toPrint += resultString(nameType(stat.symbol), stat.symbol.name.toString) + } + case DefDef(_, _, _, _, _, _) => + addSkip(stat) + toPrint += literal(nameType(stat.symbol)) + case Annotated(_, arg) => + traverse(arg) + case DocDef(_, defn) => + traverse(defn) + case _ => + if (stat.isTerm) { + addSkip(stat) + if (stat.tpe.typeSymbol == UnitClass) { + addSandbox(stat) + } else { + val resName = nextRes() + val dispResName = resName filter ('$' != _) + val offset = instrumentPos(stat.pos.start) + patches += Patch(offset, "val " + resName + " = ") + addSandbox(stat) + toPrint += resultString(nameType(dispResName, stat.tpe), resName) + } + } + } + } + + override def traverse(tree: Tree): Unit = tree match { + case PackageDef(_, _) => + super.traverse(tree) + case ModuleDef(_, name, Template(_, _, body)) => + val topLevel = objectName.isEmpty + if (topLevel) { + objectName = tree.symbol.fullName + body foreach traverseStat + if (skipped != 0) { // don't issue prologue and epilogue if there are no instrumented statements + applyPendingPatches(skipped) + patches += Patch(skipped, epilogue) + } + } + case _ => + } + + /** The patched text. + * @require traverse is run first + */ + def result: Array[Char] = { + val reslen = contents.length + (patches map (_.text.length)).sum + val res = Array.ofDim[Char](reslen) + var lastOffset = 0 + var from = 0 + var to = 0 + for (Patch(offset, text) <- patches) { + val delta = offset - lastOffset + assert(delta >= 0) + Array.copy(contents, from, res, to, delta) + from += delta + to += delta + lastOffset = offset + text.copyToArray(res, to) + to += text.length + } + assert(contents.length - from == reslen - to) + Array.copy(contents, from, res, to, contents.length - from) + res + } + } + + class LexicalStructure(source: SourceFile) { + val token = new ArrayBuffer[Int] + val startOffset = new ArrayBuffer[Int] + val endOffset = new ArrayBuffer[Int] + private val scanner = new syntaxAnalyzer.UnitScanner(new CompilationUnit(source)) + scanner.init() + while (scanner.token != EOF) { + startOffset += scanner.offset + token += scanner.token + scanner.nextToken() + endOffset += scanner.lastOffset + } + + /** @return token that starts before or at offset, its startOffset, its endOffset + */ + def locate(offset: Int): (Int, Int, Int) = { + var lo = 0 + var hi = token.length - 1 + while (lo < hi) { + val mid = (lo + hi + 1) / 2 + if (startOffset(mid) <= offset) lo = mid + else hi = mid - 1 + } + (token(lo), startOffset(lo), endOffset(lo)) + } + } + + /** Compute an instrumented version of a sourcefile. + * @param source The given sourcefile. + * @param line The line up to which results should be printed, -1 = whole document. + * @return A pair consisting of + * - the fully qualified name of the first top-level object definition in the file. + * or "" if there are no object definitions. + * - the text of the instrumented program which, when run, + * prints its output and all defined values in a comment column. + */ + protected def instrument(source: SourceFile, line: Int): (String, Array[Char]) = { + val tree = typedTree(source, forceReload = true) + val endOffset = if (line < 0) source.length else source.lineToOffset(line + 1) + val patcher = new Patcher(source.content, new LexicalStructure(source), endOffset) + patcher.traverse(tree) + (patcher.objectName, patcher.result) + } +} diff --git a/src/interactive/scala/tools/nsc/interactive/tests/InteractiveTest.scala b/src/interactive/scala/tools/nsc/interactive/tests/InteractiveTest.scala new file mode 100644 index 0000000000..a4a2de9b51 --- /dev/null +++ b/src/interactive/scala/tools/nsc/interactive/tests/InteractiveTest.scala @@ -0,0 +1,123 @@ +/* NSC -- new Scala compiler + * Copyright 2009-2013 Typesafe/Scala Solutions and LAMP/EPFL + * @author Martin Odersky + */ +package scala.tools.nsc +package interactive +package tests + +import core._ +import scala.collection.mutable.ListBuffer + +/** A base class for writing interactive compiler tests. + * + * This class tries to cover common functionality needed when testing the presentation + * compiler: instantiation source files, reloading, creating positions, instantiating + * the presentation compiler, random stress testing. + * + * By default, this class loads all scala and java classes found under `src/`, going + * recursively into subfolders. Loaded classes are found in `sourceFiles`. trait `TestResources` + * The presentation compiler is available through `compiler`. + * + * It is easy to test member completion, type and hyperlinking at a given position. Source + * files are searched for `TextMarkers`. By default, the completion marker is `/*!*/`, the + * typedAt marker is `/*?*/` and the hyperlinking marker is `/*#*/`. Place these markers in + * your source files, and the test framework will automatically pick them up and test the + * corresponding actions. Sources are reloaded by `askReload(sourceFiles)` (blocking + * call). All ask operations are placed on the work queue without waiting for each one to + * complete before asking the next. After all asks, it waits for each response in turn and + * prints the result. The default timeout is 1 second per operation. + * + * To define a custom operation you have to: + * + * (1) Define a new marker by extending `TestMarker` + * (2) Provide an implementation for the operation you want to check by extending `PresentationCompilerTestDef` + * (3) Add the class defined in (1) to the set of executed test actions by calling `++` on `InteractiveTest`. + * + * Then you can simply use the new defined `marker` in your test sources and the testing + * framework will automatically pick it up. + * + * @see Check existing tests under test/files/presentation + * + * @author Iulian Dragos + * @author Mirco Dotta + */ +abstract class InteractiveTest + extends AskParse + with AskShutdown + with AskReload + with AskLoadedTyped + with PresentationCompilerInstance + with CoreTestDefs + with InteractiveTestSettings { self => + + protected val runRandomTests = false + + /** Should askAllSources wait for each ask to finish before issuing the next? */ + override protected val synchronousRequests = true + + /** The core set of test actions that are executed during each test run are + * `CompletionAction`, `TypeAction` and `HyperlinkAction`. + * Override this member if you need to change the default set of executed test actions. + */ + protected lazy val testActions: ListBuffer[PresentationCompilerTestDef] = { + ListBuffer(new CompletionAction(compiler), new TypeAction(compiler), new HyperlinkAction(compiler)) + } + + /** Add new presentation compiler actions to test. Presentation compiler's test + * need to extends trait `PresentationCompilerTestDef`. + */ + protected def ++(tests: PresentationCompilerTestDef*) { + testActions ++= tests + } + + /** Test's entry point */ + def main(args: Array[String]) { + try execute() + finally shutdown() + } + + protected def execute(): Unit = { + loadSources() + runDefaultTests() + } + + /** Load all sources before executing the test. */ + protected def loadSources() { + // ask the presentation compiler to track all sources. We do + // not wait for the file to be entirely typed because we do want + // to exercise the presentation compiler on scoped type requests. + askReload(sourceFiles) + // make sure all sources are parsed before running the test. This + // is because test may depend on the sources having been parsed at + // least once + askParse(sourceFiles) + } + + /** Run all defined `PresentationCompilerTestDef` */ + protected def runDefaultTests() { + //TODO: integrate random tests!, i.e.: if (runRandomTests) randomTests(20, sourceFiles) + testActions.foreach(_.runTest()) + } + + /** Perform n random tests with random changes. */ + /**** + private def randomTests(n: Int, files: Array[SourceFile]) { + val tester = new Tester(n, files, settings) { + override val compiler = self.compiler + override val reporter = new reporters.StoreReporter + } + tester.run() + } + ****/ + + /** shutdown the presentation compiler. */ + protected def shutdown() { + askShutdown() + + // this is actually needed to force exit on test completion. + // Note: May be a bug on either the testing framework or (less likely) + // the presentation compiler + sys.exit(0) + } +} diff --git a/src/interactive/scala/tools/nsc/interactive/tests/InteractiveTestSettings.scala b/src/interactive/scala/tools/nsc/interactive/tests/InteractiveTestSettings.scala new file mode 100644 index 0000000000..ad5c61b2b0 --- /dev/null +++ b/src/interactive/scala/tools/nsc/interactive/tests/InteractiveTestSettings.scala @@ -0,0 +1,69 @@ +package scala.tools.nsc +package interactive +package tests + +import java.io.File.pathSeparatorChar +import java.io.File.separatorChar +import scala.tools.nsc.interactive.tests.core.PresentationCompilerInstance +import scala.tools.nsc.io.{File,Path} +import core.Reporter +import core.TestSettings + +trait InteractiveTestSettings extends TestSettings with PresentationCompilerInstance { + /** Character delimiter for comments in .opts file */ + private final val CommentStartDelimiter = "#" + + private final val TestOptionsFileExtension = "flags" + + /** Prepare the settings object. Load the .opts file and adjust all paths from the + * Unix-like syntax to the platform specific syntax. This is necessary so that a + * single .opts file can be used on all platforms. + * + * @note Bootclasspath is treated specially. If there is a -bootclasspath option in + * the file, the 'usejavacp' setting is set to false. This ensures that the + * bootclasspath takes precedence over the scala-library used to run the current + * test. + */ + override protected def prepareSettings(settings: Settings) { + def adjustPaths(paths: settings.PathSetting*) { + for (p <- paths if argsString.contains(p.name)) p.value = p.value.map { + case '/' => separatorChar + case ':' => pathSeparatorChar + case c => c + } + } + + // need this so that the classpath comes from what partest + // instead of scala.home + settings.usejavacp.value = !argsString.contains("-bootclasspath") + + // pass any options coming from outside + settings.processArgumentString(argsString) match { + case (false, rest) => + println("error processing arguments (unprocessed: %s)".format(rest)) + case _ => () + } + + // Make the --sourcepath path provided in the .flags file (if any) relative to the test's base directory + if(settings.sourcepath.isSetByUser) + settings.sourcepath.value = (baseDir / Path(settings.sourcepath.value)).path + + adjustPaths(settings.bootclasspath, settings.classpath, settings.javabootclasspath, settings.sourcepath) + } + + /** If there's a file ending in .opts, read it and parse it for cmd line arguments. */ + protected val argsString = { + val optsFile = outDir / "%s.%s".format(System.getProperty("partest.testname"), TestOptionsFileExtension) + val str = try File(optsFile).slurp() catch { + case e: java.io.IOException => "" + } + str.lines.filter(!_.startsWith(CommentStartDelimiter)).mkString(" ") + } + + override protected def printClassPath(implicit reporter: Reporter) { + reporter.println("\toutDir: %s".format(outDir.path)) + reporter.println("\tbaseDir: %s".format(baseDir.path)) + reporter.println("\targsString: %s".format(argsString)) + super.printClassPath(reporter) + } +} diff --git a/src/interactive/scala/tools/nsc/interactive/tests/Tester.scala b/src/interactive/scala/tools/nsc/interactive/tests/Tester.scala new file mode 100644 index 0000000000..9382d5890f --- /dev/null +++ b/src/interactive/scala/tools/nsc/interactive/tests/Tester.scala @@ -0,0 +1,208 @@ +/* NSC -- new Scala compiler + * Copyright 2009-2013 Typesafe/Scala Solutions and LAMP/EPFL + * @author Martin Odersky + */ +package scala.tools.nsc +package interactive +package tests + +import scala.reflect.internal.util._ +import reporters._ +import io.AbstractFile +import scala.collection.mutable.ArrayBuffer + +class Tester(ntests: Int, inputs: Array[SourceFile], settings: Settings) { + + val reporter = new StoreReporter + val compiler = new Global(settings, reporter) + + def askAndListen[T, U](msg: String, arg: T, op: (T, Response[U]) => Unit) { + if (settings.verbose.value) print(msg+" "+arg+": ") + val TIMEOUT = 10 // ms + val limit = System.currentTimeMillis() + randomDelayMillis + val res = new Response[U] + op(arg, res) + while (!res.isComplete && !res.isCancelled) { + if (System.currentTimeMillis() > limit) { + print("c"); res.cancel() + } else res.get(TIMEOUT) match { + case Some(Left(t)) => + /**/ + if (settings.verbose.value) println(t) + case Some(Right(ex)) => + ex.printStackTrace() + println(ex) + case None => + } + } + } + + def askReload(sfs: SourceFile*) = askAndListen("reload", sfs.toList, compiler.askReload) + def askTypeAt(pos: Position) = askAndListen("type at", pos, compiler.askTypeAt) + def askTypeCompletion(pos: Position) = askAndListen("type at", pos, compiler.askTypeCompletion) + def askScopeCompletion(pos: Position) = askAndListen("type at", pos, compiler.askScopeCompletion) + + val rand = new java.util.Random() + + private def randomInverse(n: Int) = n / (rand.nextInt(n) + 1) + + private def randomDecreasing(n: Int) = { + var r = rand.nextInt((1 to n).sum) + var limit = n + var result = 0 + while (r > limit) { + result += 1 + r -= limit + limit -= 1 + } + result + } + + def randomSourceFileIdx() = rand.nextInt(inputs.length) + + def randomBatchesPerSourceFile(): Int = randomDecreasing(100) + + def randomChangesPerBatch(): Int = randomInverse(50) + + def randomPositionIn(sf: SourceFile) = rand.nextInt(sf.content.length) + + def randomNumChars() = randomInverse(100) + + def randomDelayMillis = randomInverse(10000) + + class Change(sfidx: Int, start: Int, nchars: Int, toLeft: Boolean) { + + private var pos = start + private var deleted: List[Char] = List() + + override def toString = + "In "+inputs(sfidx)+" at "+start+" take "+nchars+" to "+ + (if (toLeft) "left" else "right") + + def deleteOne() { + val sf = inputs(sfidx) + deleted = sf.content(pos) :: deleted + val sf1 = new BatchSourceFile(sf.file, sf.content.take(pos) ++ sf.content.drop(pos + 1)) + inputs(sfidx) = sf1 + askReload(sf1) + } + + def deleteAll() { + print("/"+nchars) + for (i <- 0 until nchars) { + if (toLeft) { + if (pos > 0 && pos <= inputs(sfidx).length) { + pos -= 1 + deleteOne() + } + } else { + if (pos < inputs(sfidx).length) { + deleteOne() + } + } + } + } + + def insertAll() { + for (chr <- if (toLeft) deleted else deleted.reverse) { + val sf = inputs(sfidx) + val (pre, post) = sf./**/content splitAt pos + pos += 1 + val sf1 = new BatchSourceFile(sf.file, pre ++ (chr +: post)) + inputs(sfidx) = sf1 + askReload(sf1) + } + } + } + + val testComment = "/**/" + + def testFileChanges(sfidx: Int) = { + lazy val testPositions: Seq[Int] = { + val sf = inputs(sfidx) + val buf = new ArrayBuffer[Int] + var pos = sf.content.indexOfSlice(testComment) + while (pos > 0) { + buf += pos + pos = sf.content.indexOfSlice(testComment, pos + 1) + } + buf + } + def otherTest() { + if (testPositions.nonEmpty) { + val pos = new OffsetPosition(inputs(sfidx), rand.nextInt(testPositions.length)) + rand.nextInt(3) match { + case 0 => askTypeAt(pos) + case 1 => askTypeCompletion(pos) + case 2 => askScopeCompletion(pos) + } + } + } + for (i <- 0 until randomBatchesPerSourceFile()) { + val changes = Vector.fill(/**/randomChangesPerBatch()) { + /**/ + new Change(sfidx, randomPositionIn(inputs(sfidx)), randomNumChars(), rand.nextBoolean()) + } + doTest(sfidx, changes, testPositions, otherTest) match { + case Some(errortrace) => + println(errortrace) + minimize(errortrace) + case None => + } + } + } + + def doTest(sfidx: Int, changes: Seq[Change], testPositions: Seq[Int], otherTest: () => Unit): Option[ErrorTrace] = { + print("new round with "+changes.length+" changes:") + changes foreach (_.deleteAll()) + otherTest() + def errorCount() = compiler.ask(() => reporter.ERROR.count) +// println("\nhalf test round: "+errorCount()) + changes.view.reverse foreach (_.insertAll()) + otherTest() + println("done test round: "+errorCount()) + if (errorCount() != 0) + Some(ErrorTrace(sfidx, changes, reporter.infos, inputs(sfidx).content)) + else + None + } + + case class ErrorTrace( + sfidx: Int, changes: Seq[Change], infos: scala.collection.Set[reporter.Info], content: Array[Char]) { + override def toString = + "Sourcefile: "+inputs(sfidx)+ + "\nChanges:\n "+changes.mkString("\n ")+ + "\nErrors:\n "+infos.mkString("\n ")+ + "\nContents:\n"+content.mkString + } + + def minimize(etrace: ErrorTrace) {} + + /**/ + def run() { + askReload(inputs: _*) + for (i <- 0 until ntests) + testFileChanges(randomSourceFileIdx()) + } +} + +/* A program to do presentation compiler stress tests. + * Usage: + * + * scala scala.tools.nsc.interactive.test.Tester + * + * where is the number os tests to be run and is the set of files to test. + * This will do random deletions and re-insertions in any of the files. + * At places where an empty comment /**/ appears it will in addition randomly + * do ask-types, type-completions, or scope-completions. + */ +object Tester { + def main(args: Array[String]) { + val settings = new Settings() + val (_, filenames) = settings.processArguments(args.toList.tail, processAll = true) + println("filenames = "+filenames) + val files = filenames.toArray map (str => new BatchSourceFile(AbstractFile.getFile(str)): SourceFile) + new Tester(args(0).toInt, files, settings).run() + sys.exit(0) + } +} diff --git a/src/interactive/scala/tools/nsc/interactive/tests/core/AskCommand.scala b/src/interactive/scala/tools/nsc/interactive/tests/core/AskCommand.scala new file mode 100644 index 0000000000..8d446cbbf8 --- /dev/null +++ b/src/interactive/scala/tools/nsc/interactive/tests/core/AskCommand.scala @@ -0,0 +1,109 @@ +/* NSC -- new Scala compiler + * Copyright 2009-2013 Typesafe/Scala Solutions and LAMP/EPFL + * @author Martin Odersky + */ +package scala.tools.nsc +package interactive +package tests.core + +import scala.tools.nsc.interactive.Response +import scala.reflect.internal.util.Position +import scala.reflect.internal.util.SourceFile + +/** + * A trait for defining commands that can be queried to the + * presentation compiler. + * */ +trait AskCommand { + + /** presentation compiler's instance. */ + protected val compiler: Global + + /** + * Presentation compiler's `askXXX` actions work by doing side-effects + * on a `Response` instance passed as an argument during the `askXXX` + * call. + * The defined method `ask` is meant to encapsulate this behavior. + * */ + protected def ask[T](op: Response[T] => Unit): Response[T] = { + val r = new Response[T] + op(r) + r + } +} + +/** Ask the presentation compiler to shut-down. */ +trait AskShutdown extends AskCommand { + def askShutdown() = compiler.askShutdown() +} + +/** Ask the presentation compiler to parse a sequence of `sources` */ +trait AskParse extends AskCommand { + import compiler.Tree + + /** `sources` need to be entirely parsed before running the test + * (else commands such as `AskCompletionAt` may fail simply because + * the source's AST is not yet loaded). + */ + def askParse(sources: Seq[SourceFile]) { + val responses = sources map (askParse(_)) + responses.foreach(_.get) // force source files parsing + } + + private def askParse(src: SourceFile, keepLoaded: Boolean = true): Response[Tree] = { + ask { + compiler.askParsedEntered(src, keepLoaded, _) + } + } +} + +/** Ask the presentation compiler to reload a sequence of `sources` */ +trait AskReload extends AskCommand { + + /** Reload the given source files and wait for them to be reloaded. */ + protected def askReload(sources: Seq[SourceFile])(implicit reporter: Reporter): Response[Unit] = { + val sortedSources = (sources map (_.file.name)).sorted + reporter.println("reload: " + sortedSources.mkString(", ")) + + ask { + compiler.askReload(sources.toList, _) + } + } +} + +/** Ask the presentation compiler for completion at a given position. */ +trait AskCompletionAt extends AskCommand { + import compiler.Member + + private[tests] def askCompletionAt(pos: Position)(implicit reporter: Reporter): Response[List[Member]] = { + reporter.println("\naskTypeCompletion at " + pos.source.file.name + ((pos.line, pos.column))) + + ask { + compiler.askTypeCompletion(pos, _) + } + } +} + +/** Ask the presentation compiler for type info at a given position. */ +trait AskTypeAt extends AskCommand { + import compiler.Tree + + private[tests] def askTypeAt(pos: Position)(implicit reporter: Reporter): Response[Tree] = { + reporter.println("\naskType at " + pos.source.file.name + ((pos.line, pos.column))) + + ask { + compiler.askTypeAt(pos, _) + } + } +} + +trait AskLoadedTyped extends AskCommand { + import compiler.Tree + + protected def askLoadedTyped(source: SourceFile)(implicit reporter: Reporter): Response[Tree] = { + ask { + compiler.askLoadedTyped(source, _) + } + } + +} diff --git a/src/interactive/scala/tools/nsc/interactive/tests/core/CoreTestDefs.scala b/src/interactive/scala/tools/nsc/interactive/tests/core/CoreTestDefs.scala new file mode 100644 index 0000000000..9085eb56e6 --- /dev/null +++ b/src/interactive/scala/tools/nsc/interactive/tests/core/CoreTestDefs.scala @@ -0,0 +1,100 @@ +package scala.tools.nsc +package interactive +package tests.core + +import scala.reflect.internal.util.Position + +/** Set of core test definitions that are executed for each test run. */ +private[tests] trait CoreTestDefs + extends PresentationCompilerRequestsWorkingMode { + + import scala.tools.nsc.interactive.Global + + /** Ask the presentation compiler for completion at all locations + * (in all sources) where the defined `marker` is found. */ + class CompletionAction(override val compiler: Global) + extends PresentationCompilerTestDef + with AskCompletionAt { + + def memberPrinter(member: compiler.Member): String = + "[accessible: %5s] ".format(member.accessible) + "`" + (member.sym.toString() + member.tpe.toString()).trim() + "`" + + override def runTest() { + askAllSources(CompletionMarker) { pos => + askCompletionAt(pos) + } { (pos, members) => + withResponseDelimiter { + reporter.println("[response] aksTypeCompletion at " + format(pos)) + // we skip getClass because it changed signature between 1.5 and 1.6, so there is no + // universal check file that we can provide for this to work + reporter.println("retrieved %d members".format(members.size)) + compiler ask { () => + val filtered = members.filterNot(member => member.sym.name.toString == "getClass" || member.sym.isConstructor) + reporter.println(filtered.map(memberPrinter).sortBy(_.toString()).mkString("\n")) + } + } + } + } + } + + /** Ask the presentation compiler for type info at all locations + * (in all sources) where the defined `marker` is found. */ + class TypeAction(override val compiler: Global) + extends PresentationCompilerTestDef + with AskTypeAt { + + override def runTest() { + askAllSources(TypeMarker) { pos => + askTypeAt(pos) + } { (pos, tree) => + withResponseDelimiter { + reporter.println("[response] askTypeAt at " + format(pos)) + compiler.ask(() => reporter.println(tree)) + } + } + } + } + + /** Ask the presentation compiler for hyperlink at all locations + * (in all sources) where the defined `marker` is found. */ + class HyperlinkAction(override val compiler: Global) + extends PresentationCompilerTestDef + with AskTypeAt + with AskCompletionAt { + + override def runTest() { + askAllSources(HyperlinkMarker) { pos => + askTypeAt(pos)(NullReporter) + } { (pos, tree) => + if(tree.symbol == compiler.NoSymbol) { + reporter.println("\nNo symbol is associated with tree: "+tree) + } + else { + reporter.println("\naskHyperlinkPos for `" + tree.symbol.name + "` at " + format(pos) + " " + pos.source.file.name) + val r = new Response[Position] + // `tree.symbol.sourceFile` was discovered to be null when testing using virtpatmat on the akka presentation test, where a position had shifted to point to `Int` + // askHyperlinkPos for `Int` at (73,19) pi.scala --> class Int in package scala has null sourceFile! + val treePath = if (tree.symbol.sourceFile ne null) tree.symbol.sourceFile.path else null + val treeName = if (tree.symbol.sourceFile ne null) tree.symbol.sourceFile.name else null + + sourceFiles.find(_.path == treePath) match { + case Some(source) => + compiler.askLinkPos(tree.symbol, source, r) + r.get match { + case Left(pos) => + val resolvedPos = if (tree.symbol.pos.isDefined) tree.symbol.pos else pos + withResponseDelimiter { + reporter.println("[response] found askHyperlinkPos for `" + tree.symbol.name + "` at " + format(resolvedPos) + " " + tree.symbol.sourceFile.name) + } + case Right(ex) => + ex.printStackTrace() + } + case None => + reporter.println("[error] could not locate sourcefile `" + treeName + "`." + + "Hint: Does the looked up definition come form a binary?") + } + } + } + } + } +} diff --git a/src/interactive/scala/tools/nsc/interactive/tests/core/PresentationCompilerInstance.scala b/src/interactive/scala/tools/nsc/interactive/tests/core/PresentationCompilerInstance.scala new file mode 100644 index 0000000000..5cda0e53fb --- /dev/null +++ b/src/interactive/scala/tools/nsc/interactive/tests/core/PresentationCompilerInstance.scala @@ -0,0 +1,34 @@ +package scala.tools.nsc +package interactive +package tests.core + +import reporters.{Reporter => CompilerReporter} + +/** Trait encapsulating the creation of a presentation compiler's instance.*/ +private[tests] trait PresentationCompilerInstance extends TestSettings { + protected val settings = new Settings + protected val withDocComments = false + + protected val compilerReporter: CompilerReporter = new InteractiveReporter { + override def compiler = PresentationCompilerInstance.this.compiler + } + + protected lazy val compiler: Global = { + prepareSettings(settings) + new Global(settings, compilerReporter) { + override def forScaladoc = withDocComments + } + } + + /** + * Called before instantiating the presentation compiler's instance. + * You should provide an implementation of this method if you need + * to customize the `settings` used to instantiate the presentation compiler. + * */ + protected def prepareSettings(settings: Settings) {} + + protected def printClassPath(implicit reporter: Reporter) { + reporter.println("\tbootClassPath: %s".format(settings.bootclasspath.value)) + reporter.println("\tverbose: %b".format(settings.verbose.value)) + } +} diff --git a/src/interactive/scala/tools/nsc/interactive/tests/core/PresentationCompilerRequestsWorkingMode.scala b/src/interactive/scala/tools/nsc/interactive/tests/core/PresentationCompilerRequestsWorkingMode.scala new file mode 100644 index 0000000000..b5ae5f2d75 --- /dev/null +++ b/src/interactive/scala/tools/nsc/interactive/tests/core/PresentationCompilerRequestsWorkingMode.scala @@ -0,0 +1,62 @@ +package scala.tools.nsc +package interactive +package tests.core + +import scala.reflect.internal.util.Position +import scala.reflect.internal.util.SourceFile + +trait PresentationCompilerRequestsWorkingMode extends TestResources { + + protected def synchronousRequests: Boolean + + protected def askAllSources[T] = if (synchronousRequests) askAllSourcesSync[T] _ else askAllSourcesAsync[T] _ + + /** Perform an operation on all sources at all positions that match the given + * `marker`. For instance, askAllSources(TypeMarker)(askTypeAt)(println) would + * ask the type at all positions marked with `TypeMarker.marker` and println the result. + */ + private def askAllSourcesAsync[T](marker: TestMarker)(askAt: Position => Response[T])(f: (Position, T) => Unit) { + val positions = allPositionsOf(str = marker.marker) + val responses = for (pos <- positions) yield askAt(pos) + + for ((pos, r) <- positions zip responses) withResponse(pos, r)(f) + } + + /** Synchronous version of askAllSources. Each position is treated in turn, waiting for the + * response before going to the next one. + */ + private def askAllSourcesSync[T](marker: TestMarker)(askAt: Position => Response[T])(f: (Position, T) => Unit) { + val positions = allPositionsOf(str = marker.marker) + for (pos <- positions) withResponse(pos, askAt(pos))(f) + } + + /** All positions of the given string in all source files. */ + private def allPositionsOf(srcs: Seq[SourceFile] = sourceFiles, str: String): Seq[Position] = + for (s <- srcs; p <- positionsOf(s, str)) yield p + + /** Return all positions of the given str in the given source file. */ + private def positionsOf(source: SourceFile, str: String): Seq[Position] = { + val buf = new scala.collection.mutable.ListBuffer[Position] + var pos = source.content.indexOfSlice(str) + while (pos >= 0) { + buf += source.position(pos - 1) // we need the position before the first character of this marker + pos = source.content.indexOfSlice(str, pos + 1) + } + buf.toList + } + + private def withResponse[T](pos: Position, response: Response[T])(f: (Position, T) => Unit) { + /** Return the filename:line:col version of this position. */ + def showPos(pos: Position): String = + "%s:%d:%d".format(pos.source.file.name, pos.line, pos.column) + + response.get(TIMEOUT) match { + case Some(Left(t)) => + f(pos, t) + case None => + println("TIMEOUT: " + showPos(pos)) + case Some(r) => + println("ERROR: " + r) + } + } +} diff --git a/src/interactive/scala/tools/nsc/interactive/tests/core/PresentationCompilerTestDef.scala b/src/interactive/scala/tools/nsc/interactive/tests/core/PresentationCompilerTestDef.scala new file mode 100644 index 0000000000..4d5b4e1129 --- /dev/null +++ b/src/interactive/scala/tools/nsc/interactive/tests/core/PresentationCompilerTestDef.scala @@ -0,0 +1,18 @@ +package scala.tools.nsc.interactive.tests.core + +import scala.reflect.internal.util.Position + +trait PresentationCompilerTestDef { + + private[tests] def runTest(): Unit + + protected def withResponseDelimiter(block: => Unit)(implicit reporter: Reporter) { + def printDelimiter() = reporter.println("=" * 80) + printDelimiter() + block + printDelimiter() + } + + protected def format(pos: Position): String = + (if(pos.isDefined) "(%d,%d)".format(pos.line, pos.column) else "") +} diff --git a/src/interactive/scala/tools/nsc/interactive/tests/core/Reporter.scala b/src/interactive/scala/tools/nsc/interactive/tests/core/Reporter.scala new file mode 100644 index 0000000000..631504cda5 --- /dev/null +++ b/src/interactive/scala/tools/nsc/interactive/tests/core/Reporter.scala @@ -0,0 +1,15 @@ +package scala.tools.nsc.interactive.tests.core + +private[tests] trait Reporter { + def println(msg: Any): Unit +} + +/** Reporter that simply prints all messages in the standard output.*/ +private[tests] object ConsoleReporter extends Reporter { + def println(msg: Any) { Console.println(msg) } +} + +/** Reporter that swallows all passed message. */ +private[tests] object NullReporter extends Reporter { + def println(msg: Any) {} +} \ No newline at end of file diff --git a/src/interactive/scala/tools/nsc/interactive/tests/core/SourcesCollector.scala b/src/interactive/scala/tools/nsc/interactive/tests/core/SourcesCollector.scala new file mode 100644 index 0000000000..676feeba8a --- /dev/null +++ b/src/interactive/scala/tools/nsc/interactive/tests/core/SourcesCollector.scala @@ -0,0 +1,20 @@ +package scala.tools.nsc.interactive.tests.core + +import scala.reflect.internal.util.{SourceFile,BatchSourceFile} +import scala.tools.nsc.io.{AbstractFile,Path} + +private[tests] object SourcesCollector { + type SourceFilter = Path => Boolean + + /** + * All files below `base` directory that pass the `filter`. + * With the default `filter` only .scala and .java files are collected. + * */ + def apply(base: Path, filter: SourceFilter): Array[SourceFile] = { + assert(base.isDirectory) + base.walk.filter(filter).map(source).toList.toArray.sortBy(_.file.name) + } + + private def source(file: Path): SourceFile = source(AbstractFile.getFile(file.toFile)) + private def source(file: AbstractFile): SourceFile = new BatchSourceFile(file) +} diff --git a/src/interactive/scala/tools/nsc/interactive/tests/core/TestMarker.scala b/src/interactive/scala/tools/nsc/interactive/tests/core/TestMarker.scala new file mode 100644 index 0000000000..a5c228a549 --- /dev/null +++ b/src/interactive/scala/tools/nsc/interactive/tests/core/TestMarker.scala @@ -0,0 +1,27 @@ +package scala.tools.nsc.interactive.tests.core + +case class DuplicateTestMarker(msg: String) extends Exception(msg) + +object TestMarker { + import scala.collection.mutable.Map + private val markers: Map[String, TestMarker] = Map.empty + + private def checkForDuplicate(marker: TestMarker) { + markers.get(marker.marker) match { + case None => markers(marker.marker) = marker + case Some(otherMarker) => + val msg = "Marker `%s` is already used by %s. Please choose a different marker for %s".format(marker.marker, marker, otherMarker) + throw new DuplicateTestMarker(msg) + } + } +} + +abstract case class TestMarker(marker: String) { + TestMarker.checkForDuplicate(this) +} + +object CompletionMarker extends TestMarker("/*!*/") + +object TypeMarker extends TestMarker("/*?*/") + +object HyperlinkMarker extends TestMarker("/*#*/") diff --git a/src/interactive/scala/tools/nsc/interactive/tests/core/TestResources.scala b/src/interactive/scala/tools/nsc/interactive/tests/core/TestResources.scala new file mode 100644 index 0000000000..887c3cf29b --- /dev/null +++ b/src/interactive/scala/tools/nsc/interactive/tests/core/TestResources.scala @@ -0,0 +1,12 @@ +package scala.tools.nsc.interactive.tests.core + +import scala.tools.nsc.io.Path +import scala.reflect.internal.util.SourceFile + +/** Resources used by the test. */ +private[tests] trait TestResources extends TestSettings { + /** collected source files that are to be used by the test runner */ + protected lazy val sourceFiles: Array[SourceFile] = SourcesCollector(baseDir / sourceDir, isScalaOrJavaSource) + + private def isScalaOrJavaSource(file: Path): Boolean = file.extension == "scala" | file.extension == "java" +} \ No newline at end of file diff --git a/src/interactive/scala/tools/nsc/interactive/tests/core/TestSettings.scala b/src/interactive/scala/tools/nsc/interactive/tests/core/TestSettings.scala new file mode 100644 index 0000000000..681204172b --- /dev/null +++ b/src/interactive/scala/tools/nsc/interactive/tests/core/TestSettings.scala @@ -0,0 +1,19 @@ +package scala.tools.nsc.interactive.tests.core + +import scala.tools.nsc.io.Path + +/** Common settings for the test. */ +private[tests] trait TestSettings { + protected final val TIMEOUT = 10000 // timeout in milliseconds + + /** The root directory for this test suite, usually the test kind ("test/files/presentation"). */ + protected val outDir = Path(Option(System.getProperty("partest.cwd")).getOrElse(".")) + + /** The base directory for this test, usually a subdirectory of "test/files/presentation/" */ + protected val baseDir = Option(System.getProperty("partest.testname")).map(outDir / _).getOrElse(Path(".")) + + /** Where source files are placed. */ + protected val sourceDir = "src" + + protected implicit val reporter: Reporter = ConsoleReporter +} -- cgit v1.2.3 From 1dd88d9291b82f5fe2b46357aa0446cb87027c6e Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Wed, 6 Mar 2013 08:16:52 -0800 Subject: Teach partest the magic of abstraction. Some new partest abilities which were necessary to deal with additional standard jars. These new abilities aren't yet put to the test because scaladoc and interactive are still going into the compiler jar. No longer need each jar or classes directory be named directly for partest to find them. --- .../scala/tools/partest/nest/DirectRunner.scala | 26 ++++++---------------- .../scala/tools/partest/nest/FileManager.scala | 14 ++++++++++++ .../tools/partest/nest/ReflectiveRunner.scala | 20 +++++------------ 3 files changed, 26 insertions(+), 34 deletions(-) (limited to 'src') diff --git a/src/partest/scala/tools/partest/nest/DirectRunner.scala b/src/partest/scala/tools/partest/nest/DirectRunner.scala index 3aaf784cad..7e4c3b842c 100644 --- a/src/partest/scala/tools/partest/nest/DirectRunner.scala +++ b/src/partest/scala/tools/partest/nest/DirectRunner.scala @@ -38,27 +38,15 @@ trait DirectRunner { def runTestsForFiles(_kindFiles: List[File], kind: String): immutable.Map[String, TestState] = { System.setProperty("line.separator", "\n") - // @partest maintainer: we cannot create a fresh file manager here - // since the FM must respect --buildpath and --classpath from the command line - // for example, see how it's done in ReflectiveRunner - //val consFM = new ConsoleFileManager - //import consFM.{ latestCompFile, latestLibFile, latestPartestFile } - val latestCompFile = new File(fileManager.LATEST_COMP) - val latestReflectFile = new File(fileManager.LATEST_REFLECT) - val latestLibFile = new File(fileManager.LATEST_LIB) - val latestPartestFile = new File(fileManager.LATEST_PARTEST) - val latestActorsFile = new File(fileManager.LATEST_ACTORS) - val scalacheckURL = PathSettings.scalaCheck.toURL - val scalaCheckParentClassLoader = ScalaClassLoader.fromURLs( - scalacheckURL :: (List(latestCompFile, latestReflectFile, latestLibFile, latestActorsFile, latestPartestFile).map(_.toURI.toURL)) - ) - - val kindFiles = onlyValidTestPaths(_kindFiles) - val pool = Executors.newFixedThreadPool(numThreads) - val manager = new RunnerManager(kind, fileManager, TestRunParams(scalaCheckParentClassLoader)) - val futures = kindFiles map (f => (f, pool submit callable(manager runTest f))) toMap + val allUrls = PathSettings.scalaCheck.toURL :: fileManager.latestUrls + val scalaCheckParentClassLoader = ScalaClassLoader.fromURLs(allUrls) + val kindFiles = onlyValidTestPaths(_kindFiles) + val pool = Executors.newFixedThreadPool(numThreads) + val manager = new RunnerManager(kind, fileManager, TestRunParams(scalaCheckParentClassLoader)) + val futures = kindFiles map (f => (f, pool submit callable(manager runTest f))) toMap pool.shutdown() + try if (!pool.awaitTermination(4, TimeUnit.HOURS)) NestUI.warning("Thread pool timeout elapsed before all tests were complete!") catch { case t: InterruptedException => diff --git a/src/partest/scala/tools/partest/nest/FileManager.scala b/src/partest/scala/tools/partest/nest/FileManager.scala index a4c4e7e6a6..a32c56e973 100644 --- a/src/partest/scala/tools/partest/nest/FileManager.scala +++ b/src/partest/scala/tools/partest/nest/FileManager.scala @@ -64,6 +64,20 @@ trait FileManager extends FileUtil { var LATEST_PARTEST: String var LATEST_ACTORS: String + protected def relativeToLibrary(what: String): String = { + if (LATEST_LIB endsWith ".jar") { + (SFile(LATEST_LIB).parent / s"scala-$what.jar").toAbsolute.path + } + else { + (SFile(LATEST_LIB).parent.parent / "classes" / what).toAbsolute.path + } + } + def latestScaladoc = relativeToLibrary("scaladoc") + def latestInteractive = relativeToLibrary("interactive") + def latestPaths = List(LATEST_LIB, LATEST_REFLECT, LATEST_COMP, LATEST_PARTEST, LATEST_ACTORS, latestScaladoc, latestInteractive) + def latestFiles = latestPaths map (p => new java.io.File(p)) + def latestUrls = latestFiles map (_.toURI.toURL) + var showDiff = false var updateCheck = false var showLog = false diff --git a/src/partest/scala/tools/partest/nest/ReflectiveRunner.scala b/src/partest/scala/tools/partest/nest/ReflectiveRunner.scala index 3446dd0f72..05cae7b238 100644 --- a/src/partest/scala/tools/partest/nest/ReflectiveRunner.scala +++ b/src/partest/scala/tools/partest/nest/ReflectiveRunner.scala @@ -50,22 +50,15 @@ class ReflectiveRunner { else // auto detection new ConsoleFileManager - import fileManager. - { latestCompFile, latestReflectFile, latestLibFile, latestPartestFile, latestScalapFile, latestActorsFile } - val files = - Array(latestCompFile, latestReflectFile, latestLibFile, latestPartestFile, latestScalapFile, latestActorsFile) map (x => io.File(x)) - - val sepUrls = files map (_.toURL) - var sepLoader = new URLClassLoader(sepUrls, null) - // this is a workaround for https://issues.scala-lang.org/browse/SI-5433 - // when that bug is fixed, this paragraph of code can be safely removed + // when that bug is fixed, the addition of PathSettings.srcCodeLib can be removed // we hack into the classloader that will become parent classloader for scalac // this way we ensure that reflective macro lookup will pick correct Code.lift - sepLoader = new URLClassLoader((PathSettings.srcCodeLib +: files) map (_.toURL), null) + val sepUrls = PathSettings.srcCodeLib.toURI.toURL :: fileManager.latestUrls + val sepLoader = new URLClassLoader(sepUrls.toArray, null) if (isPartestDebug) - println("Loading classes from:\n" + sepUrls.mkString("\n")) + println("Loading classes from:\n " + fileManager.latestUrls.mkString("\n ")) // @partest maintainer: it seems to me that commented lines are incorrect // if classPath is not empty, then it has been provided by the --classpath option @@ -76,11 +69,8 @@ class ReflectiveRunner { // case Some(cp) => Nil // case _ => files.toList map (_.path) //} - val paths = files.toList map (_.path) - - val newClasspath = ClassPath.join(paths: _*) - setProp("java.class.path", newClasspath) + setProp("java.class.path", ClassPath.join(fileManager.latestPaths: _*)) // don't let partest find pluginsdir; in ant build, standard plugin has dedicated test suite //setProp("scala.home", latestLibFile.parent.parent.path) -- cgit v1.2.3 From 3d5c675982803e3a17262245a05266b2f5b64bc3 Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Wed, 6 Mar 2013 09:01:10 -0800 Subject: Moved scaladoc code into src/scaladoc. This leverages the preceding several commits to push scaladoc specific code into src/scaladoc. It also renders some scanner code more comprehensible. --- .../scala/tools/nsc/ast/parser/Parsers.scala | 29 +-- .../scala/tools/nsc/ast/parser/Scanners.scala | 155 +++++++------- .../scala/tools/nsc/javac/JavaScanners.scala | 10 - .../scala/tools/nsc/symtab/SymbolLoaders.scala | 12 +- .../tools/nsc/typechecker/SuperAccessors.scala | 12 -- .../scala/tools/nsc/doc/ScaladocAnalyzer.scala | 229 +++++++++++++++++++++ .../scala/tools/nsc/doc/ScaladocGlobal.scala | 99 ++------- .../scala/tools/partest/ScaladocModelTest.scala | 8 +- test/files/presentation/doc.check | 1 - test/files/presentation/doc/doc.scala | 145 ------------- test/files/presentation/doc/src/Class.scala | 1 - test/files/presentation/doc/src/p/Base.scala | 11 - test/files/presentation/doc/src/p/Derived.scala | 9 - test/pending/presentation/doc.check | 1 + test/pending/presentation/doc/doc.scala | 145 +++++++++++++ test/pending/presentation/doc/src/Class.scala | 1 + test/pending/presentation/doc/src/p/Base.scala | 11 + test/pending/presentation/doc/src/p/Derived.scala | 9 + test/scaladoc/run/t5527.check | 9 + 19 files changed, 507 insertions(+), 390 deletions(-) create mode 100644 src/scaladoc/scala/tools/nsc/doc/ScaladocAnalyzer.scala delete mode 100644 test/files/presentation/doc.check delete mode 100755 test/files/presentation/doc/doc.scala delete mode 100755 test/files/presentation/doc/src/Class.scala delete mode 100755 test/files/presentation/doc/src/p/Base.scala delete mode 100755 test/files/presentation/doc/src/p/Derived.scala create mode 100644 test/pending/presentation/doc.check create mode 100755 test/pending/presentation/doc/doc.scala create mode 100755 test/pending/presentation/doc/src/Class.scala create mode 100755 test/pending/presentation/doc/src/p/Base.scala create mode 100755 test/pending/presentation/doc/src/p/Derived.scala (limited to 'src') diff --git a/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala b/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala index 522c45f9fa..9218ad3330 100644 --- a/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala +++ b/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala @@ -650,31 +650,10 @@ self => /* --------- COMMENT AND ATTRIBUTE COLLECTION ----------------------------- */ - /** Join the comment associated with a definition. */ - def joinComment(trees: => List[Tree]): List[Tree] = { - val doc = in.flushDoc - if ((doc ne null) && doc.raw.length > 0) { - val joined = trees map { - t => - DocDef(doc, t) setPos { - if (t.pos.isDefined) { - val pos = doc.pos.withEnd(t.pos.endOrPoint) - // always make the position transparent - pos.makeTransparent - } else { - t.pos - } - } - } - joined.find(_.pos.isOpaqueRange) foreach { - main => - val mains = List(main) - joined foreach { t => if (t ne main) ensureNonOverlapping(t, mains) } - } - joined - } - else trees - } + /** A hook for joining the comment associated with a definition. + * Overridden by scaladoc. + */ + def joinComment(trees: => List[Tree]): List[Tree] = trees /* ---------- TREE CONSTRUCTION ------------------------------------------- */ diff --git a/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala b/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala index 78041fda08..6ad1c50075 100644 --- a/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala +++ b/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala @@ -83,6 +83,69 @@ trait Scanners extends ScannersCommon { abstract class Scanner extends CharArrayReader with TokenData with ScannerCommon { private def isDigit(c: Char) = java.lang.Character isDigit c + private var openComments = 0 + protected def putCommentChar(): Unit = nextChar() + + @tailrec private def skipLineComment(): Unit = ch match { + case SU | CR | LF => + case _ => nextChar() ; skipLineComment() + } + private def maybeOpen() { + putCommentChar() + if (ch == '*') { + putCommentChar() + openComments += 1 + } + } + private def maybeClose(): Boolean = { + putCommentChar() + (ch == '/') && { + putCommentChar() + openComments -= 1 + openComments == 0 + } + } + @tailrec final def skipNestedComments(): Unit = ch match { + case '/' => maybeOpen() ; skipNestedComments() + case '*' => if (!maybeClose()) skipNestedComments() + case SU => incompleteInputError("unclosed comment") + case _ => putCommentChar() ; skipNestedComments() + } + def skipDocComment(): Unit = skipNestedComments() + def skipBlockComment(): Unit = skipNestedComments() + + private def skipToCommentEnd(isLineComment: Boolean) { + nextChar() + if (isLineComment) skipLineComment() + else { + openComments = 1 + val isDocComment = (ch == '*') && { nextChar(); true } + if (isDocComment) { + // Check for the amazing corner case of /**/ + if (ch == '/') + nextChar() + else + skipDocComment() + } + else skipBlockComment() + } + } + + /** @pre ch == '/' + * Returns true if a comment was skipped. + */ + def skipComment(): Boolean = ch match { + case '/' | '*' => skipToCommentEnd(isLineComment = ch == '/') ; true + case _ => false + } + def flushDoc(): DocComment = null + + /** To prevent doc comments attached to expressions from leaking out of scope + * onto the next documentable entity, they are discarded upon passing a right + * brace, bracket, or parenthesis. + */ + def discardDocBuffer(): Unit = () + def isAtEnd = charOffset >= buf.length def resume(lastCode: Int) = { @@ -130,22 +193,6 @@ trait Scanners extends ScannersCommon { cbuf.clear() } - /** Should doc comments be built? */ - def buildDocs: Boolean = forScaladoc - - /** holder for the documentation comment - */ - var docComment: DocComment = null - - def flushDoc: DocComment = { - val ret = docComment - docComment = null - ret - } - - protected def foundComment(value: String, start: Int, end: Int) = () - protected def foundDocComment(value: String, start: Int, end: Int) = () - private class TokenData0 extends TokenData /** we need one token lookahead and one token history @@ -218,12 +265,15 @@ trait Scanners extends ScannersCommon { case RBRACE => while (!sepRegions.isEmpty && sepRegions.head != RBRACE) sepRegions = sepRegions.tail - if (!sepRegions.isEmpty) sepRegions = sepRegions.tail - docComment = null + if (!sepRegions.isEmpty) + sepRegions = sepRegions.tail + + discardDocBuffer() case RBRACKET | RPAREN => if (!sepRegions.isEmpty && sepRegions.head == lastToken) sepRegions = sepRegions.tail - docComment = null + + discardDocBuffer() case ARROW => if (!sepRegions.isEmpty && sepRegions.head == lastToken) sepRegions = sepRegions.tail @@ -516,62 +566,6 @@ trait Scanners extends ScannersCommon { } } - private def skipComment(): Boolean = { - - if (ch == '/' || ch == '*') { - - val comment = new StringBuilder("/") - def appendToComment() = comment.append(ch) - - if (ch == '/') { - do { - appendToComment() - nextChar() - } while ((ch != CR) && (ch != LF) && (ch != SU)) - } else { - docComment = null - var openComments = 1 - appendToComment() - nextChar() - appendToComment() - var buildingDocComment = false - if (ch == '*' && buildDocs) { - buildingDocComment = true - } - while (openComments > 0) { - do { - do { - if (ch == '/') { - nextChar(); appendToComment() - if (ch == '*') { - nextChar(); appendToComment() - openComments += 1 - } - } - if (ch != '*' && ch != SU) { - nextChar(); appendToComment() - } - } while (ch != '*' && ch != SU) - while (ch == '*') { - nextChar(); appendToComment() - } - } while (ch != '/' && ch != SU) - if (ch == '/') nextChar() - else incompleteInputError("unclosed comment") - openComments -= 1 - } - - if (buildingDocComment) - foundDocComment(comment.toString, offset, charOffset - 2) - } - - foundComment(comment.toString, offset, charOffset - 2) - true - } else { - false - } - } - /** Can token start a statement? */ def inFirstOfStat(token: Int) = token match { case EOF | CATCH | ELSE | EXTENDS | FINALLY | FORSOME | MATCH | WITH | YIELD | @@ -1281,17 +1275,6 @@ trait Scanners extends ScannersCommon { } } } - - override def foundComment(value: String, start: Int, end: Int) { - val pos = new RangePosition(unit.source, start, start, end) - unit.comment(pos, value) - } - - override def foundDocComment(value: String, start: Int, end: Int) { - val docPos = new RangePosition(unit.source, start, start, end) - docComment = new DocComment(value, docPos) - unit.comment(docPos, value) - } } class ParensAnalyzer(unit: CompilationUnit, patches: List[BracePatch]) extends UnitScanner(unit, patches) { diff --git a/src/compiler/scala/tools/nsc/javac/JavaScanners.scala b/src/compiler/scala/tools/nsc/javac/JavaScanners.scala index 3813736535..f9b1e57e66 100644 --- a/src/compiler/scala/tools/nsc/javac/JavaScanners.scala +++ b/src/compiler/scala/tools/nsc/javac/JavaScanners.scala @@ -235,16 +235,6 @@ trait JavaScanners extends ast.parser.ScannersCommon { cbuf.setLength(0) } - /** buffer for the documentation comment - */ - var docBuffer: StringBuilder = null - - /** add the given character to the documentation buffer - */ - protected def putDocChar(c: Char) { - if (docBuffer ne null) docBuffer.append(c) - } - private class JavaTokenData0 extends JavaTokenData /** we need one token lookahead diff --git a/src/compiler/scala/tools/nsc/symtab/SymbolLoaders.scala b/src/compiler/scala/tools/nsc/symtab/SymbolLoaders.scala index ffccc11474..61ac07d18f 100644 --- a/src/compiler/scala/tools/nsc/symtab/SymbolLoaders.scala +++ b/src/compiler/scala/tools/nsc/symtab/SymbolLoaders.scala @@ -32,14 +32,10 @@ abstract class SymbolLoaders { protected def signalError(root: Symbol, ex: Throwable) { if (settings.debug.value) ex.printStackTrace() - // SI-5593 Scaladoc's current strategy is to visit all packages in search of user code that can be documented - // therefore, it will rummage through the classpath triggering errors whenever it encounters package objects - // that are not in their correct place (see bug for details) - if (!settings.isScaladoc) - globalError(ex.getMessage() match { - case null => "i/o error while loading " + root.name - case msg => "error while loading " + root.name + ", " + msg - }) + globalError(ex.getMessage() match { + case null => "i/o error while loading " + root.name + case msg => "error while loading " + root.name + ", " + msg + }) } /** Enter class with given `name` into scope of `root` diff --git a/src/compiler/scala/tools/nsc/typechecker/SuperAccessors.scala b/src/compiler/scala/tools/nsc/typechecker/SuperAccessors.scala index 1d28add6e0..e8925ce2d0 100644 --- a/src/compiler/scala/tools/nsc/typechecker/SuperAccessors.scala +++ b/src/compiler/scala/tools/nsc/typechecker/SuperAccessors.scala @@ -186,18 +186,6 @@ abstract class SuperAccessors extends transform.Transform with transform.TypingT log("Expanded '%s' to '%s' in %s".format(savedName, s.name, sym)) } } - if (settings.verbose.value && forScaladoc && !sym.isAnonymousClass) { - println("========== scaladoc of "+sym+" =============================") - println(toJavaDoc(expandedDocComment(sym))) - for (member <- sym.info.members) { - println(member+":"+sym.thisType.memberInfo(member)+"\n"+ - toJavaDoc(expandedDocComment(member, sym))) - for ((useCase, comment, pos) <- useCases(member, sym)) { - println("usecase "+useCase+":"+useCase.info) - println(toJavaDoc(comment)) - } - } - } super.transform(tree) } transformClassDef diff --git a/src/scaladoc/scala/tools/nsc/doc/ScaladocAnalyzer.scala b/src/scaladoc/scala/tools/nsc/doc/ScaladocAnalyzer.scala new file mode 100644 index 0000000000..37d95a9d95 --- /dev/null +++ b/src/scaladoc/scala/tools/nsc/doc/ScaladocAnalyzer.scala @@ -0,0 +1,229 @@ +/* NSC -- new Scala compiler + * Copyright 2007-2013 LAMP/EPFL + * @author Paul Phillips + */ + +package scala.tools.nsc +package doc + +import scala.tools.nsc.ast.parser.{ SyntaxAnalyzer, BracePatch } +import scala.reflect.internal.Chars._ +import symtab._ +import reporters.Reporter +import typechecker.Analyzer +import scala.reflect.internal.util.{ BatchSourceFile, RangePosition } + +trait ScaladocAnalyzer extends Analyzer { + val global : Global // generally, a ScaladocGlobal + import global._ + + override def newTyper(context: Context): ScaladocTyper = new Typer(context) with ScaladocTyper + + trait ScaladocTyper extends Typer { + private def unit = context.unit + + override def canAdaptConstantTypeToLiteral = false + + override def typedDocDef(docDef: DocDef, mode: Mode, pt: Type): Tree = { + val sym = docDef.symbol + + if ((sym ne null) && (sym ne NoSymbol)) { + val comment = docDef.comment + docComments(sym) = comment + comment.defineVariables(sym) + val typer1 = newTyper(context.makeNewScope(docDef, context.owner)) + for (useCase <- comment.useCases) { + typer1.silent(_ => typer1 defineUseCases useCase) match { + case SilentTypeError(err) => + unit.warning(useCase.pos, err.errMsg) + case _ => + } + for (useCaseSym <- useCase.defined) { + if (sym.name != useCaseSym.name) + unit.warning(useCase.pos, "@usecase " + useCaseSym.name.decode + " does not match commented symbol: " + sym.name.decode) + } + } + } + + super.typedDocDef(docDef, mode, pt) + } + + def defineUseCases(useCase: UseCase): List[Symbol] = { + def stringParser(str: String): syntaxAnalyzer.Parser = { + val file = new BatchSourceFile(context.unit.source.file, str) { + override def positionInUltimateSource(pos: Position) = { + pos.withSource(context.unit.source, useCase.pos.start) + } + } + newUnitParser(new CompilationUnit(file)) + } + + val trees = stringParser(useCase.body+";").nonLocalDefOrDcl + val enclClass = context.enclClass.owner + + def defineAlias(name: Name) = ( + if (context.scope.lookup(name) == NoSymbol) { + lookupVariable(name.toString.substring(1), enclClass) foreach { repl => + silent(_.typedTypeConstructor(stringParser(repl).typ())) map { tpt => + val alias = enclClass.newAliasType(name.toTypeName, useCase.pos) + val tparams = cloneSymbolsAtOwner(tpt.tpe.typeSymbol.typeParams, alias) + val newInfo = genPolyType(tparams, appliedType(tpt.tpe, tparams map (_.tpe))) + alias setInfo newInfo + context.scope.enter(alias) + } + } + } + ) + + for (tree <- trees; t <- tree) + t match { + case Ident(name) if name startsWith '$' => defineAlias(name) + case _ => + } + + useCase.aliases = context.scope.toList + namer.enterSyms(trees) + typedStats(trees, NoSymbol) + useCase.defined = context.scope.toList filterNot (useCase.aliases contains _) + + if (settings.debug.value) + useCase.defined foreach (sym => println("defined use cases: %s:%s".format(sym, sym.tpe))) + + useCase.defined + } + } +} + +abstract class ScaladocSyntaxAnalyzer[G <: Global](val global: G) extends SyntaxAnalyzer { + import global._ + + class ScaladocJavaUnitParser(unit: CompilationUnit) extends { + override val in = new ScaladocJavaUnitScanner(unit) + } with JavaUnitParser(unit) { } + + class ScaladocJavaUnitScanner(unit: CompilationUnit) extends JavaUnitScanner(unit) { + /** buffer for the documentation comment + */ + var docBuffer: StringBuilder = null + + /** add the given character to the documentation buffer + */ + protected def putDocChar(c: Char) { + if (docBuffer ne null) docBuffer.append(c) + } + + override protected def skipComment(): Boolean = { + if (in.ch == '/') { + do { + in.next + } while ((in.ch != CR) && (in.ch != LF) && (in.ch != SU)) + true + } else if (in.ch == '*') { + docBuffer = null + in.next + val scalaDoc = ("/**", "*/") + if (in.ch == '*') + docBuffer = new StringBuilder(scalaDoc._1) + do { + do { + if (in.ch != '*' && in.ch != SU) { + in.next; putDocChar(in.ch) + } + } while (in.ch != '*' && in.ch != SU) + while (in.ch == '*') { + in.next; putDocChar(in.ch) + } + } while (in.ch != '/' && in.ch != SU) + if (in.ch == '/') in.next + else incompleteInputError("unclosed comment") + true + } else { + false + } + } + } + + class ScaladocUnitScanner(unit0: CompilationUnit, patches0: List[BracePatch]) extends UnitScanner(unit0, patches0) { + + private var docBuffer: StringBuilder = null // buffer for comments + private var docPos: Position = NoPosition // last doc comment position + private var inDocComment = false + + override def discardDocBuffer() = { + val doc = flushDoc + if (doc ne null) + unit.warning(docPos, "discarding unmoored doc comment") + } + + override def flushDoc(): DocComment = { + if (docBuffer eq null) null + else try DocComment(docBuffer.toString, docPos) finally docBuffer = null + } + + override protected def putCommentChar() { + if (inDocComment) + docBuffer append ch + + nextChar() + } + override def skipDocComment(): Unit = { + inDocComment = true + docBuffer = new StringBuilder("/**") + super.skipDocComment() + } + override def skipBlockComment(): Unit = { + inDocComment = false + docBuffer = new StringBuilder("/*") + super.skipBlockComment() + } + override def skipComment(): Boolean = { + super.skipComment() && { + if (docBuffer ne null) { + if (inDocComment) + foundDocComment(docBuffer.toString, offset, charOffset - 2) + else + try foundComment(docBuffer.toString, offset, charOffset - 2) finally docBuffer = null + } + true + } + } + def foundComment(value: String, start: Int, end: Int) { + val pos = new RangePosition(unit.source, start, start, end) + unit.comment(pos, value) + } + def foundDocComment(value: String, start: Int, end: Int) { + docPos = new RangePosition(unit.source, start, start, end) + unit.comment(docPos, value) + } + } + class ScaladocUnitParser(unit: CompilationUnit, patches: List[BracePatch]) extends UnitParser(unit, patches) { + override def newScanner() = new ScaladocUnitScanner(unit, patches) + override def withPatches(patches: List[BracePatch]) = new ScaladocUnitParser(unit, patches) + + override def joinComment(trees: => List[Tree]): List[Tree] = { + val doc = in.flushDoc + if ((doc ne null) && doc.raw.length > 0) { + log(s"joinComment(doc=$doc)") + val joined = trees map { + t => + DocDef(doc, t) setPos { + if (t.pos.isDefined) { + val pos = doc.pos.withEnd(t.pos.endOrPoint) + // always make the position transparent + pos.makeTransparent + } else { + t.pos + } + } + } + joined.find(_.pos.isOpaqueRange) foreach { + main => + val mains = List(main) + joined foreach { t => if (t ne main) ensureNonOverlapping(t, mains) } + } + joined + } + else trees + } + } +} diff --git a/src/scaladoc/scala/tools/nsc/doc/ScaladocGlobal.scala b/src/scaladoc/scala/tools/nsc/doc/ScaladocGlobal.scala index 021e59a879..20f24dc753 100644 --- a/src/scaladoc/scala/tools/nsc/doc/ScaladocGlobal.scala +++ b/src/scaladoc/scala/tools/nsc/doc/ScaladocGlobal.scala @@ -6,93 +6,36 @@ package scala.tools.nsc package doc -import scala.util.control.ControlThrowable +import scala.tools.nsc.ast.parser.{ SyntaxAnalyzer, BracePatch } +import scala.reflect.internal.Chars._ +import symtab._ import reporters.Reporter import typechecker.Analyzer -import scala.reflect.internal.util.BatchSourceFile +import scala.reflect.internal.util.{ BatchSourceFile, RangePosition } -trait ScaladocAnalyzer extends Analyzer { - val global : Global // generally, a ScaladocGlobal - import global._ +trait ScaladocGlobalTrait extends Global { + outer => - override def newTyper(context: Context): ScaladocTyper = new ScaladocTyper(context) - - class ScaladocTyper(context0: Context) extends Typer(context0) { - private def unit = context.unit - - override def typedDocDef(docDef: DocDef, mode: Mode, pt: Type): Tree = { - val sym = docDef.symbol - - if ((sym ne null) && (sym ne NoSymbol)) { - val comment = docDef.comment - fillDocComment(sym, comment) - val typer1 = newTyper(context.makeNewScope(docDef, context.owner)) - for (useCase <- comment.useCases) { - typer1.silent(_ => typer1 defineUseCases useCase) match { - case SilentTypeError(err) => - unit.warning(useCase.pos, err.errMsg) - case _ => - } - for (useCaseSym <- useCase.defined) { - if (sym.name != useCaseSym.name) - unit.warning(useCase.pos, "@usecase " + useCaseSym.name.decode + " does not match commented symbol: " + sym.name.decode) - } - } - } - - super.typedDocDef(docDef, mode, pt) - } - - def defineUseCases(useCase: UseCase): List[Symbol] = { - def stringParser(str: String): syntaxAnalyzer.Parser = { - val file = new BatchSourceFile(context.unit.source.file, str) { - override def positionInUltimateSource(pos: Position) = { - pos.withSource(context.unit.source, useCase.pos.start) - } - } - val unit = new CompilationUnit(file) - new syntaxAnalyzer.UnitParser(unit) - } - - val trees = stringParser(useCase.body+";").nonLocalDefOrDcl - val enclClass = context.enclClass.owner - - def defineAlias(name: Name) = ( - if (context.scope.lookup(name) == NoSymbol) { - lookupVariable(name.toString.substring(1), enclClass) foreach { repl => - silent(_.typedTypeConstructor(stringParser(repl).typ())) map { tpt => - val alias = enclClass.newAliasType(name.toTypeName, useCase.pos) - val tparams = cloneSymbolsAtOwner(tpt.tpe.typeSymbol.typeParams, alias) - val newInfo = genPolyType(tparams, appliedType(tpt.tpe, tparams map (_.tpe))) - alias setInfo newInfo - context.scope.enter(alias) - } - } - } - ) - - for (tree <- trees; t <- tree) - t match { - case Ident(name) if name startsWith '$' => defineAlias(name) - case _ => - } - - useCase.aliases = context.scope.toList - namer.enterSyms(trees) - typedStats(trees, NoSymbol) - useCase.defined = context.scope.toList filterNot (useCase.aliases contains _) - - if (settings.debug.value) - useCase.defined foreach (sym => println("defined use cases: %s:%s".format(sym, sym.tpe))) + override val useOffsetPositions = false + override def newUnitParser(unit: CompilationUnit) = new syntaxAnalyzer.ScaladocUnitParser(unit, Nil) - useCase.defined + override lazy val syntaxAnalyzer = new ScaladocSyntaxAnalyzer[outer.type](outer) { + val runsAfter = List[String]() + val runsRightAfter = None + } + override lazy val loaders = new SymbolLoaders { + val global: outer.type = outer + + // SI-5593 Scaladoc's current strategy is to visit all packages in search of user code that can be documented + // therefore, it will rummage through the classpath triggering errors whenever it encounters package objects + // that are not in their correct place (see bug for details) + override protected def signalError(root: Symbol, ex: Throwable) { + log(s"Suppressing error involving $root: $ex") } } } -class ScaladocGlobal(settings: doc.Settings, reporter: Reporter) extends { - override val useOffsetPositions = false -} with Global(settings, reporter) { +class ScaladocGlobal(settings: doc.Settings, reporter: Reporter) extends Global(settings, reporter) with ScaladocGlobalTrait { override protected def computeInternalPhases() { phasesSet += syntaxAnalyzer phasesSet += analyzer.namerFactory diff --git a/src/scaladoc/scala/tools/partest/ScaladocModelTest.scala b/src/scaladoc/scala/tools/partest/ScaladocModelTest.scala index 3db9f18484..f0a9caac15 100644 --- a/src/scaladoc/scala/tools/partest/ScaladocModelTest.scala +++ b/src/scaladoc/scala/tools/partest/ScaladocModelTest.scala @@ -5,9 +5,10 @@ package scala.tools.partest +import scala.tools.nsc import scala.tools.nsc._ import scala.tools.nsc.util.CommandLineParser -import scala.tools.nsc.doc.{Settings, DocFactory, Universe} +import scala.tools.nsc.doc.{ DocFactory, Universe } import scala.tools.nsc.doc.model._ import scala.tools.nsc.doc.model.diagram._ import scala.tools.nsc.doc.base.comment._ @@ -78,11 +79,11 @@ abstract class ScaladocModelTest extends DirectTest { System.setErr(prevErr) } - private[this] var settings: Settings = null + private[this] var settings: doc.Settings = null // create a new scaladoc compiler private[this] def newDocFactory: DocFactory = { - settings = new Settings(_ => ()) + settings = new doc.Settings(_ => ()) settings.scaladocQuietRun = true // yaay, no more "model contains X documentable templates"! val args = extraSettings + " " + scaladocSettings new ScalaDoc.Command((CommandLineParser tokenize (args)), settings) // side-effecting, I think @@ -96,7 +97,6 @@ abstract class ScaladocModelTest extends DirectTest { // so we don't get the newSettings warning override def isDebug = false - // finally, enable easy navigation inside the entities object access { diff --git a/test/files/presentation/doc.check b/test/files/presentation/doc.check deleted file mode 100644 index 5a3ff13151..0000000000 --- a/test/files/presentation/doc.check +++ /dev/null @@ -1 +0,0 @@ -reload: Base.scala, Class.scala, Derived.scala diff --git a/test/files/presentation/doc/doc.scala b/test/files/presentation/doc/doc.scala deleted file mode 100755 index d198f4c324..0000000000 --- a/test/files/presentation/doc/doc.scala +++ /dev/null @@ -1,145 +0,0 @@ -import scala.tools.nsc.doc -import scala.tools.nsc.doc.base._ -import scala.tools.nsc.doc.base.comment._ -import scala.tools.nsc.interactive._ -import scala.tools.nsc.interactive.tests._ -import scala.tools.nsc.util._ - -object Test extends InteractiveTest { - val tags = Seq( - "@example `\"abb\".permutations = Iterator(abb, bab, bba)`", - "@version 1.0, 09/07/2012", - "@since 2.10", - "@todo this is unsafe!", - "@note Don't inherit!", - "@see something else" - ) - - val names = Seq("Class", "Def", "Val", "Var", "AbstracType", "TypeAlias", "Trait", "InnerClass") - val bareText = - """abstract class %s { - | def %s = "" - | val %s = "" - | var %s: String = _ - | type %s - | type %s = String - | class %s - |} - |trait %s""".stripMargin.format(names: _*) - - def docComment(nTags: Int) = "/**\n%s*/".format(tags.take(nTags).mkString("\n")) - - def text(name: String, nTags: Int) = { - val nameIndex = bareText.indexOf(name) - val (pre, post) = bareText.splitAt(nameIndex) - val crIndex = pre.lastIndexOf("\n") - val (prepre, prepost) = pre.splitAt(crIndex) - prepre + docComment(nTags) + prepost + post - } - - - - override lazy val compiler = { - prepareSettings(settings) - new Global(settings, compilerReporter) with MemberLookupBase with CommentFactoryBase { - outer => - val global: this.type = this - - override lazy val analyzer = new { - val global: outer.type = outer - } with doc.ScaladocAnalyzer - - def chooseLink(links: List[LinkTo]): LinkTo = links.head - def internalLink(sym: Symbol, site: Symbol) = None - def toString(link: LinkTo) = link.toString - def warnNoLink = false - def findExternalLink(sym: Symbol, name: String) = None - - override def forScaladoc = true - - def getComment(sym: Symbol, source: SourceFile, fragments: List[(Symbol,SourceFile)]): Option[Comment] = { - val docResponse = new Response[(String, String, Position)] - askDocComment(sym, source, sym.owner, fragments, docResponse) - docResponse.get.left.toOption flatMap { - case (expanded, raw, pos) => - if (expanded.isEmpty) - None - else - Some(ask { () => parseAtSymbol(expanded, raw, pos, Some(sym.owner)) }) - } - } - } - } - - override def runDefaultTests() { - import compiler._ - def findSource(name: String) = sourceFiles.find(_.file.name == name).get - - val className = names.head - for (name <- names; - i <- 1 to tags.length) { - val newText = text(name, i) - val source = findSource("Class.scala") - val batch = new BatchSourceFile(source.file, newText.toCharArray) - val reloadResponse = new Response[Unit] - compiler.askReload(List(batch), reloadResponse) - reloadResponse.get.left.toOption match { - case None => - println("Couldn't reload") - case Some(_) => - val parseResponse = new Response[Tree] - askParsedEntered(batch, true, parseResponse) - parseResponse.get.left.toOption match { - case None => - println("Couldn't parse") - case Some(_) => - val sym = compiler.ask { () => - val toplevel = definitions.EmptyPackage.info.decl(newTypeName(name)) - if (toplevel eq NoSymbol) { - val clazz = definitions.EmptyPackage.info.decl(newTypeName(className)) - - val term = clazz.info.decl(newTermName(name)) - if (term eq NoSymbol) clazz.info.decl(newTypeName(name)) else - if (term.isAccessor) term.accessed else term - } else toplevel - } - - getComment(sym, batch, (sym,batch)::Nil) match { - case None => println(s"Got no doc comment for $name") - case Some(comment) => - import comment._ - def cnt(bodies: Iterable[Body]) = bodies.size - val actual = cnt(example) + cnt(version) + cnt(since) + cnt(todo) + cnt(note) + cnt(see) - if (actual != i) - println(s"Got docComment with $actual tags instead of $i, file text:\n$newText") - } - } - } - } - - // Check inter-classes documentation one-time retrieved ok. - val baseSource = findSource("Base.scala") - val derivedSource = findSource("Derived.scala") - def existsText(where: Any, text: String): Boolean = where match { - case `text` => true - case s: Seq[_] => s exists (existsText(_, text)) - case p: Product => p.productIterator exists (existsText(_, text)) - } - val (derived, base) = compiler.ask { () => - val derived = definitions.RootPackage.info.decl(newTermName("p")).info.decl(newTypeName("Derived")) - (derived, derived.ancestors(0)) - } - val cmt1 = getComment(derived, derivedSource, (base, baseSource)::(derived, derivedSource)::Nil) - if (!existsText(cmt1, "Derived comment.")) - println("Unexpected Derived class comment:"+cmt1) - - val (fooDerived, fooBase) = compiler.ask { () => - val decl = derived.tpe.decl(newTermName("foo")) - (decl, decl.allOverriddenSymbols(0)) - } - - val cmt2 = getComment(fooDerived, derivedSource, (fooBase, baseSource)::(fooDerived, derivedSource)::Nil) - if (!existsText(cmt2, "Base method has documentation.")) - println("Unexpected foo method comment:"+cmt2) - } -} diff --git a/test/files/presentation/doc/src/Class.scala b/test/files/presentation/doc/src/Class.scala deleted file mode 100755 index a974bd6f5c..0000000000 --- a/test/files/presentation/doc/src/Class.scala +++ /dev/null @@ -1 +0,0 @@ -object Class \ No newline at end of file diff --git a/test/files/presentation/doc/src/p/Base.scala b/test/files/presentation/doc/src/p/Base.scala deleted file mode 100755 index 9031de3e3e..0000000000 --- a/test/files/presentation/doc/src/p/Base.scala +++ /dev/null @@ -1,11 +0,0 @@ -package p - -/** - * @define BaseComment $BaseVar comment. - */ -trait Base { - /** - * Base method has documentation. - */ - def foo: String -} diff --git a/test/files/presentation/doc/src/p/Derived.scala b/test/files/presentation/doc/src/p/Derived.scala deleted file mode 100755 index 1a9c9a26d1..0000000000 --- a/test/files/presentation/doc/src/p/Derived.scala +++ /dev/null @@ -1,9 +0,0 @@ -package p - -/** - * $BaseComment - * @define BaseVar Derived - */ -class Derived extends Base { - def foo = "" -} diff --git a/test/pending/presentation/doc.check b/test/pending/presentation/doc.check new file mode 100644 index 0000000000..5a3ff13151 --- /dev/null +++ b/test/pending/presentation/doc.check @@ -0,0 +1 @@ +reload: Base.scala, Class.scala, Derived.scala diff --git a/test/pending/presentation/doc/doc.scala b/test/pending/presentation/doc/doc.scala new file mode 100755 index 0000000000..d198f4c324 --- /dev/null +++ b/test/pending/presentation/doc/doc.scala @@ -0,0 +1,145 @@ +import scala.tools.nsc.doc +import scala.tools.nsc.doc.base._ +import scala.tools.nsc.doc.base.comment._ +import scala.tools.nsc.interactive._ +import scala.tools.nsc.interactive.tests._ +import scala.tools.nsc.util._ + +object Test extends InteractiveTest { + val tags = Seq( + "@example `\"abb\".permutations = Iterator(abb, bab, bba)`", + "@version 1.0, 09/07/2012", + "@since 2.10", + "@todo this is unsafe!", + "@note Don't inherit!", + "@see something else" + ) + + val names = Seq("Class", "Def", "Val", "Var", "AbstracType", "TypeAlias", "Trait", "InnerClass") + val bareText = + """abstract class %s { + | def %s = "" + | val %s = "" + | var %s: String = _ + | type %s + | type %s = String + | class %s + |} + |trait %s""".stripMargin.format(names: _*) + + def docComment(nTags: Int) = "/**\n%s*/".format(tags.take(nTags).mkString("\n")) + + def text(name: String, nTags: Int) = { + val nameIndex = bareText.indexOf(name) + val (pre, post) = bareText.splitAt(nameIndex) + val crIndex = pre.lastIndexOf("\n") + val (prepre, prepost) = pre.splitAt(crIndex) + prepre + docComment(nTags) + prepost + post + } + + + + override lazy val compiler = { + prepareSettings(settings) + new Global(settings, compilerReporter) with MemberLookupBase with CommentFactoryBase { + outer => + val global: this.type = this + + override lazy val analyzer = new { + val global: outer.type = outer + } with doc.ScaladocAnalyzer + + def chooseLink(links: List[LinkTo]): LinkTo = links.head + def internalLink(sym: Symbol, site: Symbol) = None + def toString(link: LinkTo) = link.toString + def warnNoLink = false + def findExternalLink(sym: Symbol, name: String) = None + + override def forScaladoc = true + + def getComment(sym: Symbol, source: SourceFile, fragments: List[(Symbol,SourceFile)]): Option[Comment] = { + val docResponse = new Response[(String, String, Position)] + askDocComment(sym, source, sym.owner, fragments, docResponse) + docResponse.get.left.toOption flatMap { + case (expanded, raw, pos) => + if (expanded.isEmpty) + None + else + Some(ask { () => parseAtSymbol(expanded, raw, pos, Some(sym.owner)) }) + } + } + } + } + + override def runDefaultTests() { + import compiler._ + def findSource(name: String) = sourceFiles.find(_.file.name == name).get + + val className = names.head + for (name <- names; + i <- 1 to tags.length) { + val newText = text(name, i) + val source = findSource("Class.scala") + val batch = new BatchSourceFile(source.file, newText.toCharArray) + val reloadResponse = new Response[Unit] + compiler.askReload(List(batch), reloadResponse) + reloadResponse.get.left.toOption match { + case None => + println("Couldn't reload") + case Some(_) => + val parseResponse = new Response[Tree] + askParsedEntered(batch, true, parseResponse) + parseResponse.get.left.toOption match { + case None => + println("Couldn't parse") + case Some(_) => + val sym = compiler.ask { () => + val toplevel = definitions.EmptyPackage.info.decl(newTypeName(name)) + if (toplevel eq NoSymbol) { + val clazz = definitions.EmptyPackage.info.decl(newTypeName(className)) + + val term = clazz.info.decl(newTermName(name)) + if (term eq NoSymbol) clazz.info.decl(newTypeName(name)) else + if (term.isAccessor) term.accessed else term + } else toplevel + } + + getComment(sym, batch, (sym,batch)::Nil) match { + case None => println(s"Got no doc comment for $name") + case Some(comment) => + import comment._ + def cnt(bodies: Iterable[Body]) = bodies.size + val actual = cnt(example) + cnt(version) + cnt(since) + cnt(todo) + cnt(note) + cnt(see) + if (actual != i) + println(s"Got docComment with $actual tags instead of $i, file text:\n$newText") + } + } + } + } + + // Check inter-classes documentation one-time retrieved ok. + val baseSource = findSource("Base.scala") + val derivedSource = findSource("Derived.scala") + def existsText(where: Any, text: String): Boolean = where match { + case `text` => true + case s: Seq[_] => s exists (existsText(_, text)) + case p: Product => p.productIterator exists (existsText(_, text)) + } + val (derived, base) = compiler.ask { () => + val derived = definitions.RootPackage.info.decl(newTermName("p")).info.decl(newTypeName("Derived")) + (derived, derived.ancestors(0)) + } + val cmt1 = getComment(derived, derivedSource, (base, baseSource)::(derived, derivedSource)::Nil) + if (!existsText(cmt1, "Derived comment.")) + println("Unexpected Derived class comment:"+cmt1) + + val (fooDerived, fooBase) = compiler.ask { () => + val decl = derived.tpe.decl(newTermName("foo")) + (decl, decl.allOverriddenSymbols(0)) + } + + val cmt2 = getComment(fooDerived, derivedSource, (fooBase, baseSource)::(fooDerived, derivedSource)::Nil) + if (!existsText(cmt2, "Base method has documentation.")) + println("Unexpected foo method comment:"+cmt2) + } +} diff --git a/test/pending/presentation/doc/src/Class.scala b/test/pending/presentation/doc/src/Class.scala new file mode 100755 index 0000000000..a974bd6f5c --- /dev/null +++ b/test/pending/presentation/doc/src/Class.scala @@ -0,0 +1 @@ +object Class \ No newline at end of file diff --git a/test/pending/presentation/doc/src/p/Base.scala b/test/pending/presentation/doc/src/p/Base.scala new file mode 100755 index 0000000000..9031de3e3e --- /dev/null +++ b/test/pending/presentation/doc/src/p/Base.scala @@ -0,0 +1,11 @@ +package p + +/** + * @define BaseComment $BaseVar comment. + */ +trait Base { + /** + * Base method has documentation. + */ + def foo: String +} diff --git a/test/pending/presentation/doc/src/p/Derived.scala b/test/pending/presentation/doc/src/p/Derived.scala new file mode 100755 index 0000000000..1a9c9a26d1 --- /dev/null +++ b/test/pending/presentation/doc/src/p/Derived.scala @@ -0,0 +1,9 @@ +package p + +/** + * $BaseComment + * @define BaseVar Derived + */ +class Derived extends Base { + def foo = "" +} diff --git a/test/scaladoc/run/t5527.check b/test/scaladoc/run/t5527.check index 1518168c51..ab2aeb2d67 100644 --- a/test/scaladoc/run/t5527.check +++ b/test/scaladoc/run/t5527.check @@ -1,3 +1,12 @@ +newSource1:17: warning: discarding unmoored doc comment + /** Testing 123 */ + ^ +newSource1:27: warning: discarding unmoored doc comment + /** Calculate this result. */ + ^ +newSource1:34: warning: discarding unmoored doc comment + /** Another digit is a giveaway. */ + ^ [[syntax trees at end of parser]] // newSource1 package { object UselessComments extends scala.AnyRef { -- cgit v1.2.3 From e01c7eff032150f8460a76700542c214847ba115 Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Wed, 6 Mar 2013 09:20:18 -0800 Subject: Moved interactive code into src/interactive. As with scaladoc, pushes presentation compiler specific code into its separate source area. --- .../scala/tools/nsc/interactive/Global.scala | 85 +++++++++++++++++++++- .../scala/tools/nsc/interactive/Main.scala | 34 +++++++++ 2 files changed, 116 insertions(+), 3 deletions(-) create mode 100644 src/interactive/scala/tools/nsc/interactive/Main.scala (limited to 'src') diff --git a/src/interactive/scala/tools/nsc/interactive/Global.scala b/src/interactive/scala/tools/nsc/interactive/Global.scala index 33b10d1a9a..6abbd1b3ba 100644 --- a/src/interactive/scala/tools/nsc/interactive/Global.scala +++ b/src/interactive/scala/tools/nsc/interactive/Global.scala @@ -14,11 +14,69 @@ import scala.tools.nsc.util.MultiHashMap import scala.reflect.internal.util.{ SourceFile, BatchSourceFile, Position, NoPosition } import scala.tools.nsc.reporters._ import scala.tools.nsc.symtab._ -import scala.tools.nsc.typechecker.DivergentImplicit +import scala.tools.nsc.typechecker.{ Analyzer, DivergentImplicit } import symtab.Flags.{ACCESSOR, PARAMACCESSOR} -import scala.annotation.elidable +import scala.annotation.{ elidable, tailrec } import scala.language.implicitConversions +trait InteractiveAnalyzer extends Analyzer { + val global : Global + import global._ + + override def newTyper(context: Context): InteractiveTyper = new Typer(context) with InteractiveTyper + override def newNamer(context: Context): InteractiveNamer = new Namer(context) with InteractiveNamer + override protected def newPatternMatching = false + + trait InteractiveTyper extends Typer { + override def canAdaptConstantTypeToLiteral = false + override def canTranslateEmptyListToNil = false + override def missingSelectErrorTree(tree: Tree, qual: Tree, name: Name): Tree = tree match { + case Select(_, _) => treeCopy.Select(tree, qual, name) + case SelectFromTypeTree(_, _) => treeCopy.SelectFromTypeTree(tree, qual, name) + } + } + + trait InteractiveNamer extends Namer { + override def saveDefaultGetter(meth: Symbol, default: Symbol) { + // save the default getters as attachments in the method symbol. if compiling the + // same local block several times (which can happen in interactive mode) we might + // otherwise not find the default symbol, because the second time it the method + // symbol will be re-entered in the scope but the default parameter will not. + meth.attachments.get[DefaultsOfLocalMethodAttachment] match { + case Some(att) => att.defaultGetters += default + case None => meth.updateAttachment(new DefaultsOfLocalMethodAttachment(default)) + } + } + // this logic is needed in case typer was interrupted half + // way through and then comes back to do the tree again. In + // that case the definitions that were already attributed as + // well as any default parameters of such methods need to be + // re-entered in the current scope. + override def enterExistingSym(sym: Symbol): Context = { + if (sym != null && sym.owner.isTerm) { + enterIfNotThere(sym) + if (sym.isLazy) + sym.lazyAccessor andAlso enterIfNotThere + + for (defAtt <- sym.attachments.get[DefaultsOfLocalMethodAttachment]) + defAtt.defaultGetters foreach enterIfNotThere + } + super.enterExistingSym(sym) + } + override def enterIfNotThere(sym: Symbol) { + val scope = context.scope + @tailrec def search(e: ScopeEntry) { + if ((e eq null) || (e.owner ne scope)) + scope enter sym + else if (e.sym ne sym) // otherwise, aborts since we found sym + search(e.tail) + } + search(scope lookupEntry sym.name) + } + } +} + + /** The main class of the presentation compiler in an interactive environment such as an IDE */ class Global(settings: Settings, _reporter: Reporter, projectName: String = "") extends { @@ -68,8 +126,25 @@ class Global(settings: Settings, _reporter: Reporter, projectName: String = "") @inline final def informIDE(msg: => String) = if (verboseIDE) println("[%s][%s]".format(projectName, msg)) + // don't keep the original owner in presentation compiler runs + // (the map will grow indefinitely, and the only use case is the + // backend). + override protected def saveOriginalOwner(sym: Symbol) { } + override def forInteractive = true + override def newAsSeenFromMap(pre: Type, clazz: Symbol): AsSeenFromMap = + new InteractiveAsSeenFromMap(pre, clazz) + + class InteractiveAsSeenFromMap(pre: Type, clazz: Symbol) extends AsSeenFromMap(pre, clazz) { + /** The method formerly known as 'instParamsRelaxed' goes here if it's still necessary, + * which it is currently supposed it is not. + * + * If it is, change AsSeenFromMap method correspondingTypeArgument to call an overridable + * method rather than aborting in the failure case. + */ + } + /** A map of all loaded files to the rich compilation units that correspond to them. */ val unitOfFile = new LinkedHashMap[AbstractFile, RichCompilationUnit] with @@ -127,6 +202,10 @@ class Global(settings: Settings, _reporter: Reporter, projectName: String = "") } } + override lazy val analyzer = new { + val global: Global.this.type = Global.this + } with InteractiveAnalyzer + private def cleanAllResponses() { cleanResponses(waitLoadedTypeResponses) cleanResponses(getParsedEnteredResponses) @@ -281,7 +360,7 @@ class Global(settings: Settings, _reporter: Reporter, projectName: String = "") * top-level idents. Therefore, we can detect top-level symbols that have a name * different from their source file */ - override lazy val loaders = new BrowsingLoaders { + override lazy val loaders: SymbolLoaders { val global: Global.this.type } = new BrowsingLoaders { val global: Global.this.type = Global.this } diff --git a/src/interactive/scala/tools/nsc/interactive/Main.scala b/src/interactive/scala/tools/nsc/interactive/Main.scala new file mode 100644 index 0000000000..3b4a36f62d --- /dev/null +++ b/src/interactive/scala/tools/nsc/interactive/Main.scala @@ -0,0 +1,34 @@ +/* NSC -- new Scala compiler + * Copyright 2005-2013 LAMP/EPFL + * @author Martin Odersky + */ + +package scala.tools +package nsc +package interactive + +/** The main class for NSC, a compiler for the programming + * language Scala. + */ +object Main extends nsc.MainClass { + override def processSettingsHook(): Boolean = { + if (this.settings.Yidedebug.value) { + this.settings.Xprintpos.value = true + this.settings.Yrangepos.value = true + val compiler = new interactive.Global(this.settings, this.reporter) + import compiler.{ reporter => _, _ } + + val sfs = command.files map getSourceFile + val reloaded = new interactive.Response[Unit] + askReload(sfs, reloaded) + + reloaded.get.right.toOption match { + case Some(ex) => reporter.cancelled = true // Causes exit code to be non-0 + case None => reporter.reset() // Causes other compiler errors to be ignored + } + askShutdown + false + } + else true + } +} -- cgit v1.2.3 From 2352814d4be064d67794899cf5494d3324a131ec Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Wed, 6 Mar 2013 09:35:04 -0800 Subject: Eliminated all forInteractive/forScaladoc uses. This is the commit which brings it all together. The booleans forInteractive and forScaladoc are now deprecated and are not inspected for any purpose. All behavioral changes formerly accomplished via tests of those flags are embodied in the globals built specifically for those tasks. --- src/compiler/scala/tools/nsc/Global.scala | 2 - .../tools/nsc/typechecker/ContextErrors.scala | 18 +-- .../scala/tools/nsc/typechecker/Namers.scala | 45 +----- .../scala/tools/nsc/typechecker/Typers.scala | 24 ++-- .../scala/tools/nsc/interactive/Global.scala | 14 +- .../tests/core/PresentationCompilerInstance.scala | 11 +- src/reflect/scala/reflect/internal/Required.scala | 7 +- src/reflect/scala/reflect/internal/Symbols.scala | 25 ++-- .../scala/reflect/runtime/JavaUniverse.scala | 2 - .../scala/tools/nsc/doc/ScaladocAnalyzer.scala | 5 + test/files/neg/macro-basic-mamdmi.check | 3 +- test/files/neg/t5753.check | 3 +- test/files/presentation/doc.check | 1 + test/files/presentation/doc/doc.scala | 151 +++++++++++++++++++++ test/files/presentation/doc/src/Class.scala | 1 + test/files/presentation/doc/src/p/Base.scala | 11 ++ test/files/presentation/doc/src/p/Derived.scala | 9 ++ test/pending/presentation/doc.check | 1 - test/pending/presentation/doc/doc.scala | 145 -------------------- test/pending/presentation/doc/src/Class.scala | 1 - test/pending/presentation/doc/src/p/Base.scala | 11 -- test/pending/presentation/doc/src/p/Derived.scala | 9 -- 22 files changed, 237 insertions(+), 262 deletions(-) create mode 100644 test/files/presentation/doc.check create mode 100755 test/files/presentation/doc/doc.scala create mode 100755 test/files/presentation/doc/src/Class.scala create mode 100755 test/files/presentation/doc/src/p/Base.scala create mode 100755 test/files/presentation/doc/src/p/Derived.scala delete mode 100644 test/pending/presentation/doc.check delete mode 100755 test/pending/presentation/doc/doc.scala delete mode 100755 test/pending/presentation/doc/src/Class.scala delete mode 100755 test/pending/presentation/doc/src/p/Base.scala delete mode 100755 test/pending/presentation/doc/src/p/Derived.scala (limited to 'src') diff --git a/src/compiler/scala/tools/nsc/Global.scala b/src/compiler/scala/tools/nsc/Global.scala index bc18b06e2a..c0f611daa7 100644 --- a/src/compiler/scala/tools/nsc/Global.scala +++ b/src/compiler/scala/tools/nsc/Global.scala @@ -1696,8 +1696,6 @@ class Global(var currentSettings: Settings, var reporter: Reporter) } }) } - def forInteractive = false - def forScaladoc = false def createJavadoc = false } diff --git a/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala b/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala index 580f024b40..0af75a2aad 100644 --- a/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala +++ b/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala @@ -683,7 +683,7 @@ trait ContextErrors { // same reason as for MacroBodyTypecheckException case object MacroExpansionException extends Exception with scala.util.control.ControlThrowable - private def macroExpansionError(expandee: Tree, msg: String, pos: Position = NoPosition) = { + protected def macroExpansionError(expandee: Tree, msg: String, pos: Position = NoPosition) = { def msgForLog = if (msg != null && (msg contains "exception during macro expansion")) msg.split(EOL).drop(1).headOption.getOrElse("?") else msg macroLogLite("macro expansion has failed: %s".format(msgForLog)) if (msg != null) context.error(pos, msg) // issueTypeError(PosAndMsgTypeError(..)) won't work => swallows positions @@ -772,15 +772,15 @@ trait ContextErrors { )) } - def MacroImplementationNotFoundError(expandee: Tree) = { - val message = - "macro implementation not found: " + expandee.symbol.name + " " + - "(the most common reason for that is that you cannot use macro implementations in the same compilation run that defines them)" + - (if (forScaladoc) ". When generating scaladocs for multiple projects at once, consider using -Ymacro-no-expand to disable macro expansions altogether." - else "") - macroExpansionError(expandee, message) - } + def MacroImplementationNotFoundError(expandee: Tree) = + macroExpansionError(expandee, macroImplementationNotFoundMessage(expandee.symbol.name)) } + + /** This file will be the death of me. */ + protected def macroImplementationNotFoundMessage(name: Name): String = ( + s"""|macro implementation not found: $name + |(the most common reason for that is that you cannot use macro implementations in the same compilation run that defines them)""".stripMargin + ) } trait InferencerContextErrors { diff --git a/src/compiler/scala/tools/nsc/typechecker/Namers.scala b/src/compiler/scala/tools/nsc/typechecker/Namers.scala index 777e96da82..007c7c6a83 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Namers.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Namers.scala @@ -50,19 +50,8 @@ trait Namers extends MethodSynthesis { def newNamerFor(context: Context, tree: Tree): Namer = newNamer(context.makeNewScope(tree, tree.symbol)) abstract class Namer(val context: Context) extends MethodSynth with NamerContextErrors { thisNamer => - - def saveDefaultGetter(meth: Symbol, default: Symbol) { - if (forInteractive) { - // save the default getters as attachments in the method symbol. if compiling the - // same local block several times (which can happen in interactive mode) we might - // otherwise not find the default symbol, because the second time it the method - // symbol will be re-entered in the scope but the default parameter will not. - meth.attachments.get[DefaultsOfLocalMethodAttachment] match { - case Some(att) => att.defaultGetters += default - case None => meth.updateAttachment(new DefaultsOfLocalMethodAttachment(default)) - } - } - } + // overridden by the presentation compiler + def saveDefaultGetter(meth: Symbol, default: Symbol) { } import NamerErrorGen._ val typer = newTyper(context) @@ -606,17 +595,6 @@ trait Namers extends MethodSynthesis { } } - def enterIfNotThere(sym: Symbol) { - val scope = context.scope - @tailrec def search(e: ScopeEntry) { - if ((e eq null) || (e.owner ne scope)) - scope enter sym - else if (e.sym ne sym) // otherwise, aborts since we found sym - search(e.tail) - } - search(scope lookupEntry sym.name) - } - def enterValDef(tree: ValDef) { if (noEnterGetterSetter(tree)) assignAndEnterFinishedSymbol(tree) @@ -709,22 +687,9 @@ trait Namers extends MethodSynthesis { validateCompanionDefs(tree) } - // this logic is needed in case typer was interrupted half - // way through and then comes back to do the tree again. In - // that case the definitions that were already attributed as - // well as any default parameters of such methods need to be - // re-entered in the current scope. - protected def enterExistingSym(sym: Symbol): Context = { - if (forInteractive && sym != null && sym.owner.isTerm) { - enterIfNotThere(sym) - if (sym.isLazy) - sym.lazyAccessor andAlso enterIfNotThere - - for (defAtt <- sym.attachments.get[DefaultsOfLocalMethodAttachment]) - defAtt.defaultGetters foreach enterIfNotThere - } - this.context - } + // Hooks which are overridden in the presentation compiler + def enterExistingSym(sym: Symbol): Context = this.context + def enterIfNotThere(sym: Symbol) { } def enterSyntheticSym(tree: Tree): Symbol = { enterSym(tree) diff --git a/src/compiler/scala/tools/nsc/typechecker/Typers.scala b/src/compiler/scala/tools/nsc/typechecker/Typers.scala index 1a3c20c4b9..eaf57cd39c 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Typers.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Typers.scala @@ -96,16 +96,16 @@ trait Typers extends Adaptations with Tags { // - we may virtualize matches (if -Xexperimental and there's a suitable __match in scope) // - we synthesize PartialFunction implementations for `x => x match {...}` and `match {...}` when the expected type is PartialFunction // this is disabled by: interactive compilation (we run it for scaladoc due to SI-5933) - protected def newPatternMatching = !forInteractive //&& !forScaladoc && (phase.id < currentRun.uncurryPhase.id) + protected def newPatternMatching = true // presently overridden in the presentation compiler abstract class Typer(context0: Context) extends TyperDiagnostics with Adaptation with Tag with TyperContextErrors { import context0.unit import typeDebug.{ ptTree, ptBlock, ptLine } import TyperErrorGen._ - /** (Will be) overridden to false in scaladoc and/or interactive. */ - def canAdaptConstantTypeToLiteral = !forScaladoc && !forInteractive - def canTranslateEmptyListToNil = !forInteractive + /** Overridden to false in scaladoc and/or interactive. */ + def canAdaptConstantTypeToLiteral = true + def canTranslateEmptyListToNil = true def missingSelectErrorTree(tree: Tree, qual: Tree, name: Name): Tree = tree def typedDocDef(docDef: DocDef, mode: Mode, pt: Type): Tree = @@ -1041,7 +1041,7 @@ trait Typers extends Adaptations with Tags { tree.tpe match { case atp @ AnnotatedType(_, _, _) if canAdaptAnnotations(tree, this, mode, pt) => // (-1) adaptAnnotations(tree, this, mode, pt) - case ct @ ConstantType(value) if mode.inNone(TYPEmode | FUNmode) && (ct <:< pt) && !forScaladoc && !forInteractive => // (0) + case ct @ ConstantType(value) if mode.inNone(TYPEmode | FUNmode) && (ct <:< pt) && canAdaptConstantTypeToLiteral => // (0) val sym = tree.symbol if (sym != null && sym.isDeprecated) { val msg = sym.toString + sym.locationString + " is deprecated: " + sym.deprecationMessage.getOrElse("") @@ -2436,11 +2436,9 @@ trait Typers extends Adaptations with Tags { if (pat1.tpe.paramSectionCount > 0) pat1 setType pat1.tpe.finalResultType - if (forInteractive) { - for (bind @ Bind(name, _) <- cdef.pat) - if (name.toTermName != nme.WILDCARD && bind.symbol != null && bind.symbol != NoSymbol) - namer.enterIfNotThere(bind.symbol) - } + for (bind @ Bind(name, _) <- cdef.pat) + if (name.toTermName != nme.WILDCARD && bind.symbol != null && bind.symbol != NoSymbol) + namer.enterIfNotThere(bind.symbol) val guard1: Tree = if (cdef.guard == EmptyTree) EmptyTree else typed(cdef.guard, BooleanClass.tpe) @@ -4691,11 +4689,7 @@ trait Typers extends Adaptations with Tags { if (!reallyExists(sym)) { def handleMissing: Tree = { - def errorTree = tree match { - case _ if !forInteractive => tree - case Select(_, _) => treeCopy.Select(tree, qual, name) - case SelectFromTypeTree(_, _) => treeCopy.SelectFromTypeTree(tree, qual, name) - } + def errorTree = missingSelectErrorTree(tree, qual, name) def asTypeSelection = ( if (context.owner.enclosingTopLevelClass.isJavaDefined && name.isTypeName) { atPos(tree.pos)(gen.convertToSelectFromType(qual, name)) match { diff --git a/src/interactive/scala/tools/nsc/interactive/Global.scala b/src/interactive/scala/tools/nsc/interactive/Global.scala index 6abbd1b3ba..099a882f10 100644 --- a/src/interactive/scala/tools/nsc/interactive/Global.scala +++ b/src/interactive/scala/tools/nsc/interactive/Global.scala @@ -14,11 +14,20 @@ import scala.tools.nsc.util.MultiHashMap import scala.reflect.internal.util.{ SourceFile, BatchSourceFile, Position, NoPosition } import scala.tools.nsc.reporters._ import scala.tools.nsc.symtab._ +import scala.tools.nsc.doc.ScaladocAnalyzer import scala.tools.nsc.typechecker.{ Analyzer, DivergentImplicit } import symtab.Flags.{ACCESSOR, PARAMACCESSOR} import scala.annotation.{ elidable, tailrec } import scala.language.implicitConversions +trait InteractiveScaladocAnalyzer extends InteractiveAnalyzer with ScaladocAnalyzer { + val global : Global + import global._ + override def newTyper(context: Context) = new Typer(context) with InteractiveTyper with ScaladocTyper { + override def canAdaptConstantTypeToLiteral = false + } +} + trait InteractiveAnalyzer extends Analyzer { val global : Global import global._ @@ -127,9 +136,10 @@ class Global(settings: Settings, _reporter: Reporter, projectName: String = "") if (verboseIDE) println("[%s][%s]".format(projectName, msg)) // don't keep the original owner in presentation compiler runs - // (the map will grow indefinitely, and the only use case is the - // backend). + // (the map will grow indefinitely, and the only use case is the backend) override protected def saveOriginalOwner(sym: Symbol) { } + override protected def originalEnclosingMethod(sym: Symbol) = + abort("originalOwner is not kept in presentation compiler runs.") override def forInteractive = true diff --git a/src/interactive/scala/tools/nsc/interactive/tests/core/PresentationCompilerInstance.scala b/src/interactive/scala/tools/nsc/interactive/tests/core/PresentationCompilerInstance.scala index 5cda0e53fb..9a2abd5139 100644 --- a/src/interactive/scala/tools/nsc/interactive/tests/core/PresentationCompilerInstance.scala +++ b/src/interactive/scala/tools/nsc/interactive/tests/core/PresentationCompilerInstance.scala @@ -13,11 +13,16 @@ private[tests] trait PresentationCompilerInstance extends TestSettings { override def compiler = PresentationCompilerInstance.this.compiler } + private class ScaladocEnabledGlobal extends Global(settings, compilerReporter) { + override lazy val analyzer = new { + val global: ScaladocEnabledGlobal.this.type = ScaladocEnabledGlobal.this + } with InteractiveScaladocAnalyzer + } + protected lazy val compiler: Global = { prepareSettings(settings) - new Global(settings, compilerReporter) { - override def forScaladoc = withDocComments - } + if (withDocComments) new ScaladocEnabledGlobal + else new Global(settings, compilerReporter) } /** diff --git a/src/reflect/scala/reflect/internal/Required.scala b/src/reflect/scala/reflect/internal/Required.scala index 842491d56d..93383f5376 100644 --- a/src/reflect/scala/reflect/internal/Required.scala +++ b/src/reflect/scala/reflect/internal/Required.scala @@ -4,12 +4,9 @@ package internal import settings.MutableSettings trait Required { self: SymbolTable => - def picklerPhase: Phase - def settings: MutableSettings - def forInteractive: Boolean - - def forScaladoc: Boolean + @deprecated("Interactive is implemented with a custom Global; this flag is ignored", "2.11.0") def forInteractive = false + @deprecated("Scaladoc is implemented with a custom Global; this flag is ignored", "2.11.0") def forScaladoc = false } diff --git a/src/reflect/scala/reflect/internal/Symbols.scala b/src/reflect/scala/reflect/internal/Symbols.scala index f7a87d2700..6837f37445 100644 --- a/src/reflect/scala/reflect/internal/Symbols.scala +++ b/src/reflect/scala/reflect/internal/Symbols.scala @@ -74,12 +74,15 @@ trait Symbols extends api.Symbols { self: SymbolTable => // e.g. after flatten all classes are owned by package classes, there are lots and // lots of these to be declared (or more realistically, discovered.) protected def saveOriginalOwner(sym: Symbol) { - // don't keep the original owner in presentation compiler runs - // (the map will grow indefinitely, and the only use case is the - // backend). - if (!forInteractive) { - if (originalOwner contains sym) () - else originalOwner(sym) = sym.rawowner + if (originalOwner contains sym) () + else originalOwner(sym) = sym.rawowner + } + protected def originalEnclosingMethod(sym: Symbol): Symbol = { + if (sym.isMethod || sym == NoSymbol) sym + else { + val owner = originalOwner.getOrElse(sym, sym.rawowner) + if (sym.isLocalDummy) owner.enclClass.primaryConstructor + else originalEnclosingMethod(owner) } } @@ -1920,15 +1923,7 @@ trait Symbols extends api.Symbols { self: SymbolTable => * originalOwner map is not populated for memory considerations (the symbol * may hang on to lazy types and in turn to whole (outdated) compilation units. */ - def originalEnclosingMethod: Symbol = { - assert(!forInteractive, "originalOwner is not kept in presentation compiler runs.") - if (isMethod) this - else { - val owner = originalOwner.getOrElse(this, rawowner) - if (isLocalDummy) owner.enclClass.primaryConstructor - else owner.originalEnclosingMethod - } - } + def originalEnclosingMethod: Symbol = Symbols.this.originalEnclosingMethod(this) /** The method or class which logically encloses the current symbol. * If the symbol is defined in the initialization part of a template diff --git a/src/reflect/scala/reflect/runtime/JavaUniverse.scala b/src/reflect/scala/reflect/runtime/JavaUniverse.scala index 5467d70cea..a130013398 100644 --- a/src/reflect/scala/reflect/runtime/JavaUniverse.scala +++ b/src/reflect/scala/reflect/runtime/JavaUniverse.scala @@ -11,8 +11,6 @@ class JavaUniverse extends internal.SymbolTable with ReflectSetup with runtime.S def inform(msg: String): Unit = log(msg) def picklerPhase = internal.SomePhase - def forInteractive = false - def forScaladoc = false lazy val settings = new Settings private val isLogging = sys.props contains "scala.debug.reflect" diff --git a/src/scaladoc/scala/tools/nsc/doc/ScaladocAnalyzer.scala b/src/scaladoc/scala/tools/nsc/doc/ScaladocAnalyzer.scala index 37d95a9d95..5ad50445a8 100644 --- a/src/scaladoc/scala/tools/nsc/doc/ScaladocAnalyzer.scala +++ b/src/scaladoc/scala/tools/nsc/doc/ScaladocAnalyzer.scala @@ -24,6 +24,11 @@ trait ScaladocAnalyzer extends Analyzer { override def canAdaptConstantTypeToLiteral = false + override protected def macroImplementationNotFoundMessage(name: Name): String = ( + super.macroImplementationNotFoundMessage(name) + + "\nWhen generating scaladocs for multiple projects at once, consider using -Ymacro-no-expand to disable macro expansions altogether." + ) + override def typedDocDef(docDef: DocDef, mode: Mode, pt: Type): Tree = { val sym = docDef.symbol diff --git a/test/files/neg/macro-basic-mamdmi.check b/test/files/neg/macro-basic-mamdmi.check index c7b58d70d2..621d318ceb 100644 --- a/test/files/neg/macro-basic-mamdmi.check +++ b/test/files/neg/macro-basic-mamdmi.check @@ -1,4 +1,5 @@ -Impls_Macros_Test_1.scala:36: error: macro implementation not found: foo (the most common reason for that is that you cannot use macro implementations in the same compilation run that defines them) +Impls_Macros_Test_1.scala:36: error: macro implementation not found: foo +(the most common reason for that is that you cannot use macro implementations in the same compilation run that defines them) println(foo(2) + Macros.bar(2) * new Macros().quux(4)) ^ one error found diff --git a/test/files/neg/t5753.check b/test/files/neg/t5753.check index 76602de17d..379416c179 100644 --- a/test/files/neg/t5753.check +++ b/test/files/neg/t5753.check @@ -1,4 +1,5 @@ -Test_2.scala:9: error: macro implementation not found: foo (the most common reason for that is that you cannot use macro implementations in the same compilation run that defines them) +Test_2.scala:9: error: macro implementation not found: foo +(the most common reason for that is that you cannot use macro implementations in the same compilation run that defines them) println(foo(42)) ^ one error found diff --git a/test/files/presentation/doc.check b/test/files/presentation/doc.check new file mode 100644 index 0000000000..5a3ff13151 --- /dev/null +++ b/test/files/presentation/doc.check @@ -0,0 +1 @@ +reload: Base.scala, Class.scala, Derived.scala diff --git a/test/files/presentation/doc/doc.scala b/test/files/presentation/doc/doc.scala new file mode 100755 index 0000000000..7a2eb9a588 --- /dev/null +++ b/test/files/presentation/doc/doc.scala @@ -0,0 +1,151 @@ +import scala.tools.nsc.doc +import scala.tools.nsc.doc.base._ +import scala.tools.nsc.doc.base.comment._ +import scala.tools.nsc.interactive._ +import scala.tools.nsc.interactive.tests._ +import scala.tools.nsc.util._ + +object Test extends InteractiveTest { + val tags = Seq( + "@example `\"abb\".permutations = Iterator(abb, bab, bba)`", + "@version 1.0, 09/07/2012", + "@since 2.10", + "@todo this is unsafe!", + "@note Don't inherit!", + "@see something else" + ) + + val names = Seq("Class", "Def", "Val", "Var", "AbstracType", "TypeAlias", "Trait", "InnerClass") + val bareText = + """abstract class %s { + | def %s = "" + | val %s = "" + | var %s: String = _ + | type %s + | type %s = String + | class %s + |} + |trait %s""".stripMargin.format(names: _*) + + def docComment(nTags: Int) = "/**\n%s*/".format(tags.take(nTags).mkString("\n")) + + def text(name: String, nTags: Int) = { + val nameIndex = bareText.indexOf(name) + val (pre, post) = bareText.splitAt(nameIndex) + val crIndex = pre.lastIndexOf("\n") + val (prepre, prepost) = pre.splitAt(crIndex) + prepre + docComment(nTags) + prepost + post + } + + override lazy val compiler = { + prepareSettings(settings) + new Global(settings, compilerReporter) with MemberLookupBase with CommentFactoryBase with doc.ScaladocGlobalTrait { + outer => + + val global: this.type = this + + override lazy val analyzer = new { + val global: outer.type = outer + } with doc.ScaladocAnalyzer with InteractiveAnalyzer { + override def newTyper(context: Context): InteractiveTyper with ScaladocTyper = + new Typer(context) with InteractiveTyper with ScaladocTyper + } + + override lazy val loaders = new scala.tools.nsc.symtab.SymbolLoaders { + val global: outer.type = outer + } + + def chooseLink(links: List[LinkTo]): LinkTo = links.head + def internalLink(sym: Symbol, site: Symbol) = None + def toString(link: LinkTo) = link.toString + def warnNoLink = false + def findExternalLink(sym: Symbol, name: String) = None + + override def forScaladoc = true + + def getComment(sym: Symbol, source: SourceFile, fragments: List[(Symbol,SourceFile)]): Option[Comment] = { + val docResponse = new Response[(String, String, Position)] + askDocComment(sym, source, sym.owner, fragments, docResponse) + docResponse.get.left.toOption flatMap { + case (expanded, raw, pos) => + if (expanded.isEmpty) + None + else + Some(ask { () => parseAtSymbol(expanded, raw, pos, Some(sym.owner)) }) + } + } + } + } + + override def runDefaultTests() { + import compiler._ + def findSource(name: String) = sourceFiles.find(_.file.name == name).get + + val className = names.head + for (name <- names; + i <- 1 to tags.length) { + val newText = text(name, i) + val source = findSource("Class.scala") + val batch = new BatchSourceFile(source.file, newText.toCharArray) + val reloadResponse = new Response[Unit] + compiler.askReload(List(batch), reloadResponse) + reloadResponse.get.left.toOption match { + case None => + println("Couldn't reload") + case Some(_) => + val parseResponse = new Response[Tree] + askParsedEntered(batch, true, parseResponse) + parseResponse.get.left.toOption match { + case None => + println("Couldn't parse") + case Some(_) => + val sym = compiler.ask { () => + val toplevel = definitions.EmptyPackage.info.decl(newTypeName(name)) + if (toplevel eq NoSymbol) { + val clazz = definitions.EmptyPackage.info.decl(newTypeName(className)) + + val term = clazz.info.decl(newTermName(name)) + if (term eq NoSymbol) clazz.info.decl(newTypeName(name)) else + if (term.isAccessor) term.accessed else term + } else toplevel + } + + getComment(sym, batch, (sym,batch)::Nil) match { + case None => println(s"Got no doc comment for $name") + case Some(comment) => + import comment._ + def cnt(bodies: Iterable[Body]) = bodies.size + val actual = cnt(example) + cnt(version) + cnt(since) + cnt(todo) + cnt(note) + cnt(see) + if (actual != i) + println(s"Got docComment with $actual tags instead of $i, file text:\n$newText") + } + } + } + } + + // Check inter-classes documentation one-time retrieved ok. + val baseSource = findSource("Base.scala") + val derivedSource = findSource("Derived.scala") + def existsText(where: Any, text: String): Boolean = where match { + case `text` => true + case s: Seq[_] => s exists (existsText(_, text)) + case p: Product => p.productIterator exists (existsText(_, text)) + } + val (derived, base) = compiler.ask { () => + val derived = definitions.RootPackage.info.decl(newTermName("p")).info.decl(newTypeName("Derived")) + (derived, derived.ancestors(0)) + } + val cmt1 = getComment(derived, derivedSource, (base, baseSource)::(derived, derivedSource)::Nil) + if (!existsText(cmt1, "Derived comment.")) + println("Unexpected Derived class comment:"+cmt1) + + val (fooDerived, fooBase) = compiler.ask { () => + val decl = derived.tpe.decl(newTermName("foo")) + (decl, decl.allOverriddenSymbols(0)) + } + + val cmt2 = getComment(fooDerived, derivedSource, (fooBase, baseSource)::(fooDerived, derivedSource)::Nil) + if (!existsText(cmt2, "Base method has documentation.")) + println("Unexpected foo method comment:"+cmt2) + } +} diff --git a/test/files/presentation/doc/src/Class.scala b/test/files/presentation/doc/src/Class.scala new file mode 100755 index 0000000000..a974bd6f5c --- /dev/null +++ b/test/files/presentation/doc/src/Class.scala @@ -0,0 +1 @@ +object Class \ No newline at end of file diff --git a/test/files/presentation/doc/src/p/Base.scala b/test/files/presentation/doc/src/p/Base.scala new file mode 100755 index 0000000000..9031de3e3e --- /dev/null +++ b/test/files/presentation/doc/src/p/Base.scala @@ -0,0 +1,11 @@ +package p + +/** + * @define BaseComment $BaseVar comment. + */ +trait Base { + /** + * Base method has documentation. + */ + def foo: String +} diff --git a/test/files/presentation/doc/src/p/Derived.scala b/test/files/presentation/doc/src/p/Derived.scala new file mode 100755 index 0000000000..1a9c9a26d1 --- /dev/null +++ b/test/files/presentation/doc/src/p/Derived.scala @@ -0,0 +1,9 @@ +package p + +/** + * $BaseComment + * @define BaseVar Derived + */ +class Derived extends Base { + def foo = "" +} diff --git a/test/pending/presentation/doc.check b/test/pending/presentation/doc.check deleted file mode 100644 index 5a3ff13151..0000000000 --- a/test/pending/presentation/doc.check +++ /dev/null @@ -1 +0,0 @@ -reload: Base.scala, Class.scala, Derived.scala diff --git a/test/pending/presentation/doc/doc.scala b/test/pending/presentation/doc/doc.scala deleted file mode 100755 index d198f4c324..0000000000 --- a/test/pending/presentation/doc/doc.scala +++ /dev/null @@ -1,145 +0,0 @@ -import scala.tools.nsc.doc -import scala.tools.nsc.doc.base._ -import scala.tools.nsc.doc.base.comment._ -import scala.tools.nsc.interactive._ -import scala.tools.nsc.interactive.tests._ -import scala.tools.nsc.util._ - -object Test extends InteractiveTest { - val tags = Seq( - "@example `\"abb\".permutations = Iterator(abb, bab, bba)`", - "@version 1.0, 09/07/2012", - "@since 2.10", - "@todo this is unsafe!", - "@note Don't inherit!", - "@see something else" - ) - - val names = Seq("Class", "Def", "Val", "Var", "AbstracType", "TypeAlias", "Trait", "InnerClass") - val bareText = - """abstract class %s { - | def %s = "" - | val %s = "" - | var %s: String = _ - | type %s - | type %s = String - | class %s - |} - |trait %s""".stripMargin.format(names: _*) - - def docComment(nTags: Int) = "/**\n%s*/".format(tags.take(nTags).mkString("\n")) - - def text(name: String, nTags: Int) = { - val nameIndex = bareText.indexOf(name) - val (pre, post) = bareText.splitAt(nameIndex) - val crIndex = pre.lastIndexOf("\n") - val (prepre, prepost) = pre.splitAt(crIndex) - prepre + docComment(nTags) + prepost + post - } - - - - override lazy val compiler = { - prepareSettings(settings) - new Global(settings, compilerReporter) with MemberLookupBase with CommentFactoryBase { - outer => - val global: this.type = this - - override lazy val analyzer = new { - val global: outer.type = outer - } with doc.ScaladocAnalyzer - - def chooseLink(links: List[LinkTo]): LinkTo = links.head - def internalLink(sym: Symbol, site: Symbol) = None - def toString(link: LinkTo) = link.toString - def warnNoLink = false - def findExternalLink(sym: Symbol, name: String) = None - - override def forScaladoc = true - - def getComment(sym: Symbol, source: SourceFile, fragments: List[(Symbol,SourceFile)]): Option[Comment] = { - val docResponse = new Response[(String, String, Position)] - askDocComment(sym, source, sym.owner, fragments, docResponse) - docResponse.get.left.toOption flatMap { - case (expanded, raw, pos) => - if (expanded.isEmpty) - None - else - Some(ask { () => parseAtSymbol(expanded, raw, pos, Some(sym.owner)) }) - } - } - } - } - - override def runDefaultTests() { - import compiler._ - def findSource(name: String) = sourceFiles.find(_.file.name == name).get - - val className = names.head - for (name <- names; - i <- 1 to tags.length) { - val newText = text(name, i) - val source = findSource("Class.scala") - val batch = new BatchSourceFile(source.file, newText.toCharArray) - val reloadResponse = new Response[Unit] - compiler.askReload(List(batch), reloadResponse) - reloadResponse.get.left.toOption match { - case None => - println("Couldn't reload") - case Some(_) => - val parseResponse = new Response[Tree] - askParsedEntered(batch, true, parseResponse) - parseResponse.get.left.toOption match { - case None => - println("Couldn't parse") - case Some(_) => - val sym = compiler.ask { () => - val toplevel = definitions.EmptyPackage.info.decl(newTypeName(name)) - if (toplevel eq NoSymbol) { - val clazz = definitions.EmptyPackage.info.decl(newTypeName(className)) - - val term = clazz.info.decl(newTermName(name)) - if (term eq NoSymbol) clazz.info.decl(newTypeName(name)) else - if (term.isAccessor) term.accessed else term - } else toplevel - } - - getComment(sym, batch, (sym,batch)::Nil) match { - case None => println(s"Got no doc comment for $name") - case Some(comment) => - import comment._ - def cnt(bodies: Iterable[Body]) = bodies.size - val actual = cnt(example) + cnt(version) + cnt(since) + cnt(todo) + cnt(note) + cnt(see) - if (actual != i) - println(s"Got docComment with $actual tags instead of $i, file text:\n$newText") - } - } - } - } - - // Check inter-classes documentation one-time retrieved ok. - val baseSource = findSource("Base.scala") - val derivedSource = findSource("Derived.scala") - def existsText(where: Any, text: String): Boolean = where match { - case `text` => true - case s: Seq[_] => s exists (existsText(_, text)) - case p: Product => p.productIterator exists (existsText(_, text)) - } - val (derived, base) = compiler.ask { () => - val derived = definitions.RootPackage.info.decl(newTermName("p")).info.decl(newTypeName("Derived")) - (derived, derived.ancestors(0)) - } - val cmt1 = getComment(derived, derivedSource, (base, baseSource)::(derived, derivedSource)::Nil) - if (!existsText(cmt1, "Derived comment.")) - println("Unexpected Derived class comment:"+cmt1) - - val (fooDerived, fooBase) = compiler.ask { () => - val decl = derived.tpe.decl(newTermName("foo")) - (decl, decl.allOverriddenSymbols(0)) - } - - val cmt2 = getComment(fooDerived, derivedSource, (fooBase, baseSource)::(fooDerived, derivedSource)::Nil) - if (!existsText(cmt2, "Base method has documentation.")) - println("Unexpected foo method comment:"+cmt2) - } -} diff --git a/test/pending/presentation/doc/src/Class.scala b/test/pending/presentation/doc/src/Class.scala deleted file mode 100755 index a974bd6f5c..0000000000 --- a/test/pending/presentation/doc/src/Class.scala +++ /dev/null @@ -1 +0,0 @@ -object Class \ No newline at end of file diff --git a/test/pending/presentation/doc/src/p/Base.scala b/test/pending/presentation/doc/src/p/Base.scala deleted file mode 100755 index 9031de3e3e..0000000000 --- a/test/pending/presentation/doc/src/p/Base.scala +++ /dev/null @@ -1,11 +0,0 @@ -package p - -/** - * @define BaseComment $BaseVar comment. - */ -trait Base { - /** - * Base method has documentation. - */ - def foo: String -} diff --git a/test/pending/presentation/doc/src/p/Derived.scala b/test/pending/presentation/doc/src/p/Derived.scala deleted file mode 100755 index 1a9c9a26d1..0000000000 --- a/test/pending/presentation/doc/src/p/Derived.scala +++ /dev/null @@ -1,9 +0,0 @@ -package p - -/** - * $BaseComment - * @define BaseVar Derived - */ -class Derived extends Base { - def foo = "" -} -- cgit v1.2.3 From 523eb3427e8ee1af8b06dd7452d170acdce294e6 Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Wed, 6 Mar 2013 16:29:37 -0800 Subject: Deprecated custom ant task 'Same'. --- src/compiler/scala/tools/ant/Same.scala | 2 +- src/compiler/scala/tools/ant/antlib.xml | 2 -- 2 files changed, 1 insertion(+), 3 deletions(-) (limited to 'src') diff --git a/src/compiler/scala/tools/ant/Same.scala b/src/compiler/scala/tools/ant/Same.scala index a1f0cda662..6362d28580 100644 --- a/src/compiler/scala/tools/ant/Same.scala +++ b/src/compiler/scala/tools/ant/Same.scala @@ -32,7 +32,7 @@ import org.apache.tools.ant.types.Mapper * * @author Gilles Dubochet * @version 1.0 */ -class Same extends ScalaMatchingTask { +@deprecated("Use diff", "2.11.0") class Same extends ScalaMatchingTask { /*============================================================================*\ ** Ant user-properties ** \*============================================================================*/ diff --git a/src/compiler/scala/tools/ant/antlib.xml b/src/compiler/scala/tools/ant/antlib.xml index 78159e6d10..7885534689 100644 --- a/src/compiler/scala/tools/ant/antlib.xml +++ b/src/compiler/scala/tools/ant/antlib.xml @@ -11,8 +11,6 @@ classname="scala.tools.ant.Scaladoc"/> - -- cgit v1.2.3 From fdf25337ff0436c7991ea8ae3dc0ac79bf16d3b7 Mon Sep 17 00:00:00 2001 From: srinivasreddy Date: Sun, 10 Mar 2013 12:15:26 +0530 Subject: a typo corrected --- src/compiler/scala/tools/nsc/io/Lexer.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) (limited to 'src') diff --git a/src/compiler/scala/tools/nsc/io/Lexer.scala b/src/compiler/scala/tools/nsc/io/Lexer.scala index b50b01aa27..7c6dbe2e60 100644 --- a/src/compiler/scala/tools/nsc/io/Lexer.scala +++ b/src/compiler/scala/tools/nsc/io/Lexer.scala @@ -48,7 +48,7 @@ object Lexer { /** The '`(`' token */ val LParen = new Delim('(') - /** The '`(`' token */ + /** The '`)`' token */ val RParen = new Delim(')') /** The '`{`' token */ -- cgit v1.2.3 From 1291da3c23614a932cc24c36ab4ef8c57e14df7d Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Sun, 10 Mar 2013 22:17:50 +0100 Subject: IntellIiJ module definitions for scaladoc, interactive and continuations-*. --- src/intellij/continuations-library.iml.SAMPLE | 23 +++++++++++++++++++++++ src/intellij/continuations-plugin.iml.SAMPLE | 25 +++++++++++++++++++++++++ src/intellij/interactive.iml.SAMPLE | 25 +++++++++++++++++++++++++ src/intellij/scala-lang.ipr.SAMPLE | 4 ++++ src/intellij/scaladoc.iml.SAMPLE | 24 ++++++++++++++++++++++++ 5 files changed, 101 insertions(+) create mode 100644 src/intellij/continuations-library.iml.SAMPLE create mode 100644 src/intellij/continuations-plugin.iml.SAMPLE create mode 100644 src/intellij/interactive.iml.SAMPLE create mode 100644 src/intellij/scaladoc.iml.SAMPLE (limited to 'src') diff --git a/src/intellij/continuations-library.iml.SAMPLE b/src/intellij/continuations-library.iml.SAMPLE new file mode 100644 index 0000000000..364cc3dcdb --- /dev/null +++ b/src/intellij/continuations-library.iml.SAMPLE @@ -0,0 +1,23 @@ + + + + + + + + + + + + + + + + + + + diff --git a/src/intellij/continuations-plugin.iml.SAMPLE b/src/intellij/continuations-plugin.iml.SAMPLE new file mode 100644 index 0000000000..27213374b3 --- /dev/null +++ b/src/intellij/continuations-plugin.iml.SAMPLE @@ -0,0 +1,25 @@ + + + + + + + + + + + + + + + + + + + + + diff --git a/src/intellij/interactive.iml.SAMPLE b/src/intellij/interactive.iml.SAMPLE new file mode 100644 index 0000000000..c6c8ebb606 --- /dev/null +++ b/src/intellij/interactive.iml.SAMPLE @@ -0,0 +1,25 @@ + + + + + + + + + + + + + + + + + + + + + diff --git a/src/intellij/scala-lang.ipr.SAMPLE b/src/intellij/scala-lang.ipr.SAMPLE index a8cb5eacc1..e470e019c9 100644 --- a/src/intellij/scala-lang.ipr.SAMPLE +++ b/src/intellij/scala-lang.ipr.SAMPLE @@ -198,12 +198,16 @@ + + + + diff --git a/src/intellij/scaladoc.iml.SAMPLE b/src/intellij/scaladoc.iml.SAMPLE new file mode 100644 index 0000000000..6cc609919c --- /dev/null +++ b/src/intellij/scaladoc.iml.SAMPLE @@ -0,0 +1,24 @@ + + + + + + + + + + + + + + + + + + + + -- cgit v1.2.3 From f691997c38d27e69f7d3e3aa2a39f63f1aa6c3d9 Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Sun, 10 Mar 2013 22:22:10 +0100 Subject: Add eclipse projects for interactive, scaladoc. --- src/eclipse/interactive/.classpath | 10 ++++++++++ src/eclipse/interactive/.project | 35 +++++++++++++++++++++++++++++++++++ src/eclipse/scaladoc/.classpath | 9 +++++++++ src/eclipse/scaladoc/.project | 35 +++++++++++++++++++++++++++++++++++ 4 files changed, 89 insertions(+) create mode 100644 src/eclipse/interactive/.classpath create mode 100644 src/eclipse/interactive/.project create mode 100644 src/eclipse/scaladoc/.classpath create mode 100644 src/eclipse/scaladoc/.project (limited to 'src') diff --git a/src/eclipse/interactive/.classpath b/src/eclipse/interactive/.classpath new file mode 100644 index 0000000000..870cc67aec --- /dev/null +++ b/src/eclipse/interactive/.classpath @@ -0,0 +1,10 @@ + + + + + + + + + + diff --git a/src/eclipse/interactive/.project b/src/eclipse/interactive/.project new file mode 100644 index 0000000000..1d30e0c001 --- /dev/null +++ b/src/eclipse/interactive/.project @@ -0,0 +1,35 @@ + + + interactive + + + + + + org.scala-ide.sdt.core.scalabuilder + + + + + + org.scala-ide.sdt.core.scalanature + org.eclipse.jdt.core.javanature + + + + build-quick-interactive + 2 + SCALA_BASEDIR/build/quick/classes/interactive + + + interactive + 2 + SCALA_BASEDIR/src/interactive + + + lib + 2 + SCALA_BASEDIR/lib + + + diff --git a/src/eclipse/scaladoc/.classpath b/src/eclipse/scaladoc/.classpath new file mode 100644 index 0000000000..422b8fbb07 --- /dev/null +++ b/src/eclipse/scaladoc/.classpath @@ -0,0 +1,9 @@ + + + + + + + + + diff --git a/src/eclipse/scaladoc/.project b/src/eclipse/scaladoc/.project new file mode 100644 index 0000000000..bf7649039f --- /dev/null +++ b/src/eclipse/scaladoc/.project @@ -0,0 +1,35 @@ + + + scaladoc + + + + + + org.scala-ide.sdt.core.scalabuilder + + + + + + org.scala-ide.sdt.core.scalanature + org.eclipse.jdt.core.javanature + + + + build-quick-scaladoc + 2 + SCALA_BASEDIR/build/quick/classes/scaladoc + + + lib + 2 + SCALA_BASEDIR/lib + + + scaladoc + 2 + SCALA_BASEDIR/src/scaladoc + + + -- cgit v1.2.3 From 2fa2db784075dfb58cf507c45a948819ade8a6d4 Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Sun, 10 Mar 2013 10:00:54 -0700 Subject: SI-7228, bug in weak subtyping. Another in the category of bugs which involve narrowing, widening, mediuming, dealiasing, weakening, normalizing, denormalizing, supernormalizing, subnormalizing, and double-bounded supersubnormalizing. This is probably not the ideal fix, but it is an improvement. --- .../scala/tools/nsc/typechecker/Implicits.scala | 23 +++---- .../scala/tools/nsc/typechecker/Typers.scala | 2 +- src/reflect/scala/reflect/internal/Types.scala | 4 +- .../scala/reflect/internal/tpe/TypeComparers.scala | 6 +- test/files/pos/t7228.scala | 75 ++++++++++++++++++++++ 5 files changed, 91 insertions(+), 19 deletions(-) create mode 100644 test/files/pos/t7228.scala (limited to 'src') diff --git a/src/compiler/scala/tools/nsc/typechecker/Implicits.scala b/src/compiler/scala/tools/nsc/typechecker/Implicits.scala index 2331f82a58..29d4c8423b 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Implicits.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Implicits.scala @@ -268,7 +268,7 @@ trait Implicits { */ object Function1 { val Sym = FunctionClass(1) - def unapply(tp: Type) = tp match { + def unapply(tp: Type) = tp baseType Sym match { case TypeRef(_, Sym, arg1 :: arg2 :: _) => Some((arg1, arg2)) case _ => None } @@ -431,10 +431,8 @@ trait Implicits { val start = if (Statistics.canEnable) Statistics.startTimer(matchesPtNanos) else null val result = normSubType(tp, pt) || isView && { pt match { - case TypeRef(_, Function1.Sym, arg1 :: arg2 :: Nil) => - matchesPtView(tp, arg1, arg2, undet) - case _ => - false + case Function1(arg1, arg2) => matchesPtView(tp, arg1, arg2, undet) + case _ => false } } if (Statistics.canEnable) Statistics.stopTimer(matchesPtNanos, start) @@ -576,20 +574,19 @@ trait Implicits { def fail(reason: String): SearchResult = failure(itree, reason) try { - val itree1 = - if (isView) { - val arg1 :: arg2 :: _ = pt.typeArgs + val itree1 = pt match { + case Function1(arg1, arg2) if isView => typed1( atPos(itree.pos)(Apply(itree, List(Ident("") setType approximate(arg1)))), EXPRmode, approximate(arg2) ) - } - else - typed1(itree, EXPRmode, wildPt) - - if (context.hasErrors) + case _ => typed1(itree, EXPRmode, wildPt) + } + if (context.hasErrors) { + log("implicit adapt failed: " + context.errBuffer.head.errMsg) return fail(context.errBuffer.head.errMsg) + } if (Statistics.canEnable) Statistics.incCounter(typedImplicits) diff --git a/src/compiler/scala/tools/nsc/typechecker/Typers.scala b/src/compiler/scala/tools/nsc/typechecker/Typers.scala index eaf57cd39c..a110d6d15d 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Typers.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Typers.scala @@ -1133,7 +1133,7 @@ trait Typers extends Adaptations with Tags { return typedPos(tree.pos, mode, pt) { Block(List(tree), Literal(Constant())) } - } else if (isNumericValueClass(sym) && isNumericSubType(tree.tpe, pt)) { + } else if (isNumericValueClass(sym) && isNumericSubType(tree.tpe.dealiasWiden, pt)) { if (settings.warnNumericWiden.value) context.unit.warning(tree.pos, "implicit numeric widening") return typedPos(tree.pos, mode, pt) { diff --git a/src/reflect/scala/reflect/internal/Types.scala b/src/reflect/scala/reflect/internal/Types.scala index a6c5367425..b59732e595 100644 --- a/src/reflect/scala/reflect/internal/Types.scala +++ b/src/reflect/scala/reflect/internal/Types.scala @@ -1217,7 +1217,7 @@ trait Types protected def rewrap(newtp: Type): Type = NotNullType(newtp) override def isNotNull: Boolean = true override def notNull = this - override def deconst: Type = underlying //todo: needed? + override def deconst: Type = underlying.deconst //todo: needed? override def safeToString: String = underlying.toString + " with NotNull" override def kind = "NotNullType" } @@ -1989,7 +1989,7 @@ trait Types assert(underlying.typeSymbol != UnitClass) override def isTrivial: Boolean = true override def isNotNull = value.value != null - override def deconst: Type = underlying + override def deconst: Type = underlying.deconst override def safeToString: String = underlying.toString + "(" + value.escapedStringValue + ")" override def kind = "ConstantType" diff --git a/src/reflect/scala/reflect/internal/tpe/TypeComparers.scala b/src/reflect/scala/reflect/internal/tpe/TypeComparers.scala index 82321f61c2..2d499cf299 100644 --- a/src/reflect/scala/reflect/internal/tpe/TypeComparers.scala +++ b/src/reflect/scala/reflect/internal/tpe/TypeComparers.scala @@ -583,7 +583,7 @@ trait TypeComparers { def isWeakSubType(tp1: Type, tp2: Type) = - tp1.deconst.normalize match { + tp1.widen.normalize match { case TypeRef(_, sym1, _) if isNumericValueClass(sym1) => tp2.deconst.normalize match { case TypeRef(_, sym2, _) if isNumericValueClass(sym2) => @@ -609,8 +609,8 @@ trait TypeComparers { * (Even if the calls are to typeSymbolDirect.) */ def isNumericSubType(tp1: Type, tp2: Type): Boolean = ( - isNumericValueType(tp1) - && isNumericValueType(tp2) + isNumericValueType(tp1.dealiasWiden) + && isNumericValueType(tp2.dealias) && isNumericSubClass(tp1.typeSymbol, tp2.typeSymbol) ) diff --git a/test/files/pos/t7228.scala b/test/files/pos/t7228.scala new file mode 100644 index 0000000000..5d936f6529 --- /dev/null +++ b/test/files/pos/t7228.scala @@ -0,0 +1,75 @@ +object AdaptWithWeaklyConformantType { + implicit class D(d: Double) { def double = d*2 } + + val x1: Int = 1 + var x2: Int = 2 + val x3 = 3 + var x4 = 4 + final val x5 = 5 + final var x6 = 6 + + def f1 = x1.double + def f2 = x2.double + def f3 = x3.double + def f4 = x4.double + def f5 = x5.double + def f6 = x6.double +} + +object AdaptAliasWithWeaklyConformantType { + implicit class D(d: Double) { def double = d*2 } + type T = Int + + val x1: T = 1 + var x2: T = 2 + val x3 = (3: T) + var x4 = (4: T) + final val x5 = (5: T) + final var x6 = (6: T) + + def f1 = x1.double + def f2 = x2.double + def f3 = x3.double + def f4 = x4.double + def f5 = x5.double + def f6 = x6.double +} + +object AdaptToAliasWithWeaklyConformantType { + type U = Double + implicit class D(d: U) { def double = d*2 } + + val x1: Int = 1 + var x2: Int = 2 + val x3 = (3: Int) + var x4 = (4: Int) + final val x5 = (5: Int) + final var x6 = (6: Int) + + def f1 = x1.double + def f2 = x2.double + def f3 = x3.double + def f4 = x4.double + def f5 = x5.double + def f6 = x6.double +} + +object AdaptAliasToAliasWithWeaklyConformantType { + type U = Double + type T = Int + implicit class D(d: U) { def double = d*2 } + + val x1: T = 1 + var x2: T = 2 + val x3 = (3: T) + var x4 = (4: T) + final val x5 = (5: T) + final var x6 = (6: T) + + def f1 = x1.double + def f2 = x2.double + def f3 = x3.double + def f4 = x4.double + def f5 = x5.double + def f6 = x6.double +} -- cgit v1.2.3 From cb02c96bed1454e1c0702c529366f3c40d6bffd9 Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Sun, 10 Mar 2013 10:00:54 -0700 Subject: Simplified the widening logic. Should speak for itself. Whenever someone changed @switch from an error to a warning, it broke all the tests which depended on the error. I added -Xfatal-warnings to a couple which needed it. And one of those tests was then failing, as it must now since we couldn't get away with what was being attempted, so I moved it to pending. --- .../scala/tools/nsc/typechecker/Namers.scala | 30 ++++++++++------------ test/files/pos/no-widen-locals.scala | 19 -------------- test/files/pos/switch-small.flags | 1 + test/pending/pos/no-widen-locals.flags | 1 + test/pending/pos/no-widen-locals.scala | 19 ++++++++++++++ 5 files changed, 34 insertions(+), 36 deletions(-) delete mode 100644 test/files/pos/no-widen-locals.scala create mode 100644 test/files/pos/switch-small.flags create mode 100644 test/pending/pos/no-widen-locals.flags create mode 100644 test/pending/pos/no-widen-locals.scala (limited to 'src') diff --git a/src/compiler/scala/tools/nsc/typechecker/Namers.scala b/src/compiler/scala/tools/nsc/typechecker/Namers.scala index 007c7c6a83..d5da4967be 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Namers.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Namers.scala @@ -805,23 +805,19 @@ trait Namers extends MethodSynthesis { case _ => false } - - val tpe1 = dropIllegalStarTypes(tpe.deconst) - val tpe2 = tpe1.widen - - // This infers Foo.type instead of "object Foo" - // See Infer#adjustTypeArgs for the polymorphic case. - if (tpe.typeSymbolDirect.isModuleClass) tpe1 - else if (sym.isVariable || sym.isMethod && !sym.hasAccessorFlag) - if (tpe2 <:< pt) tpe2 else tpe1 - else if (isHidden(tpe)) tpe2 - // In an attempt to make pattern matches involving method local vals - // compilable into switches, for a time I had a more generous condition: - // `if (sym.isFinal || sym.isLocal) tpe else tpe1` - // This led to issues with expressions like classOf[List[_]] which apparently - // depend on being deconst-ed here, so this is again the original: - else if (!sym.isFinal) tpe1 - else tpe + val shouldWiden = ( + !tpe.typeSymbolDirect.isModuleClass // Infer Foo.type instead of "object Foo" + && (tpe.widen <:< pt) // Don't widen our way out of conforming to pt + && ( sym.isVariable + || sym.isMethod && !sym.hasAccessorFlag + || isHidden(tpe) + ) + ) + dropIllegalStarTypes( + if (shouldWiden) tpe.widen + else if (sym.isFinal) tpe // "final val" allowed to retain constant type + else tpe.deconst + ) } /** Computes the type of the body in a ValDef or DefDef, and * assigns the type to the tpt's node. Returns the type. diff --git a/test/files/pos/no-widen-locals.scala b/test/files/pos/no-widen-locals.scala deleted file mode 100644 index 013e63f0a2..0000000000 --- a/test/files/pos/no-widen-locals.scala +++ /dev/null @@ -1,19 +0,0 @@ -// Worked from r23262 until that was reverted somewhere -// around r25016. -import annotation.switch - -object Test { - def f(x: Int) = { - val X1 = 5 - val X2 = 10 - val X3 = 15 - val X4 = 20 - - (x: @switch) match { - case X1 => 1 - case X2 => 2 - case X3 => 3 - case X4 => 4 - } - } -} diff --git a/test/files/pos/switch-small.flags b/test/files/pos/switch-small.flags new file mode 100644 index 0000000000..85d8eb2ba2 --- /dev/null +++ b/test/files/pos/switch-small.flags @@ -0,0 +1 @@ +-Xfatal-warnings diff --git a/test/pending/pos/no-widen-locals.flags b/test/pending/pos/no-widen-locals.flags new file mode 100644 index 0000000000..85d8eb2ba2 --- /dev/null +++ b/test/pending/pos/no-widen-locals.flags @@ -0,0 +1 @@ +-Xfatal-warnings diff --git a/test/pending/pos/no-widen-locals.scala b/test/pending/pos/no-widen-locals.scala new file mode 100644 index 0000000000..013e63f0a2 --- /dev/null +++ b/test/pending/pos/no-widen-locals.scala @@ -0,0 +1,19 @@ +// Worked from r23262 until that was reverted somewhere +// around r25016. +import annotation.switch + +object Test { + def f(x: Int) = { + val X1 = 5 + val X2 = 10 + val X3 = 15 + val X4 = 20 + + (x: @switch) match { + case X1 => 1 + case X2 => 2 + case X3 => 3 + case X4 => 4 + } + } +} -- cgit v1.2.3 From 9c5ea96b1c0fa45037a96e530b6ae71687a292d1 Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Sun, 10 Mar 2013 10:11:28 -0700 Subject: Moved some numeric subtyping logic closer to center. Fixed bug in numeric widening related to continuations, which enabled simplifying isNumericSubType. --- .../scala/tools/nsc/typechecker/Typers.scala | 9 ++++--- .../scala/reflect/internal/Definitions.scala | 2 +- .../scala/reflect/internal/tpe/TypeComparers.scala | 31 +++++++++++++--------- 3 files changed, 25 insertions(+), 17 deletions(-) (limited to 'src') diff --git a/src/compiler/scala/tools/nsc/typechecker/Typers.scala b/src/compiler/scala/tools/nsc/typechecker/Typers.scala index a110d6d15d..c19d6b7a56 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Typers.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Typers.scala @@ -1124,16 +1124,19 @@ trait Typers extends Adaptations with Tags { else { if (mode.inExprModeButNot(FUNmode)) { pt.dealias match { - case TypeRef(_, sym, _) => + // The <: Any requirement inhibits attempts to adapt continuation types + // to non-continuation types. + case TypeRef(_, sym, _) if tree.tpe <:< AnyClass.tpe => // note: was if (pt.typeSymbol == UnitClass) but this leads to a potentially // infinite expansion if pt is constant type () - if (sym == UnitClass && tree.tpe <:< AnyClass.tpe) { // (12) + if (sym == UnitClass) { // (12) if (settings.warnValueDiscard.value) context.unit.warning(tree.pos, "discarded non-Unit value") return typedPos(tree.pos, mode, pt) { Block(List(tree), Literal(Constant())) } - } else if (isNumericValueClass(sym) && isNumericSubType(tree.tpe.dealiasWiden, pt)) { + } + else if (isNumericValueClass(sym) && isNumericSubType(tree.tpe, pt)) { if (settings.warnNumericWiden.value) context.unit.warning(tree.pos, "implicit numeric widening") return typedPos(tree.pos, mode, pt) { diff --git a/src/reflect/scala/reflect/internal/Definitions.scala b/src/reflect/scala/reflect/internal/Definitions.scala index fe5a5c81e2..bfba81c654 100644 --- a/src/reflect/scala/reflect/internal/Definitions.scala +++ b/src/reflect/scala/reflect/internal/Definitions.scala @@ -1131,7 +1131,7 @@ trait Definitions extends api.StandardDefinitions { /** Is type's symbol a numeric value class? */ def isNumericValueType(tp: Type): Boolean = tp match { case TypeRef(_, sym, _) => isNumericValueClass(sym) - case _ => false + case _ => false } // todo: reconcile with javaSignature!!! diff --git a/src/reflect/scala/reflect/internal/tpe/TypeComparers.scala b/src/reflect/scala/reflect/internal/tpe/TypeComparers.scala index 2d499cf299..a03ab1610e 100644 --- a/src/reflect/scala/reflect/internal/tpe/TypeComparers.scala +++ b/src/reflect/scala/reflect/internal/tpe/TypeComparers.scala @@ -5,6 +5,7 @@ package tpe import scala.collection.{ mutable } import Flags._ import util.Statistics +import scala.annotation.tailrec trait TypeComparers { self: SymbolTable => @@ -583,9 +584,9 @@ trait TypeComparers { def isWeakSubType(tp1: Type, tp2: Type) = - tp1.widen.normalize match { + tp1.dealiasWiden match { case TypeRef(_, sym1, _) if isNumericValueClass(sym1) => - tp2.deconst.normalize match { + tp2.deconst.dealias match { case TypeRef(_, sym2, _) if isNumericValueClass(sym2) => isNumericSubClass(sym1, sym2) case tv2 @ TypeVar(_, _) => @@ -594,7 +595,7 @@ trait TypeComparers { isSubType(tp1, tp2) } case tv1 @ TypeVar(_, _) => - tp2.deconst.normalize match { + tp2.deconst.dealias match { case TypeRef(_, sym2, _) if isNumericValueClass(sym2) => tv1.registerBound(tp2, isLowerBound = false, isNumericBound = true) case _ => @@ -604,14 +605,18 @@ trait TypeComparers { isSubType(tp1, tp2) } - /** The isNumericValueType tests appear redundant, but without them - * test/continuations-neg/function3.scala goes into an infinite loop. - * (Even if the calls are to typeSymbolDirect.) - */ - def isNumericSubType(tp1: Type, tp2: Type): Boolean = ( - isNumericValueType(tp1.dealiasWiden) - && isNumericValueType(tp2.dealias) - && isNumericSubClass(tp1.typeSymbol, tp2.typeSymbol) - ) - + def isNumericSubType(tp1: Type, tp2: Type) = ( + isNumericSubClass(primitiveBaseClass(tp1.dealiasWiden), primitiveBaseClass(tp2.dealias)) + ) + + /** If the given type has a primitive class among its base classes, + * the symbol of that class. Otherwise, NoSymbol. + */ + private def primitiveBaseClass(tp: Type): Symbol = { + @tailrec def loop(bases: List[Symbol]): Symbol = bases match { + case Nil => NoSymbol + case x :: xs => if (isPrimitiveValueClass(x)) x else loop(xs) + } + loop(tp.baseClasses) + } } -- cgit v1.2.3 From 38a1515e8e321a93530a7c963ac3c10bdab0456e Mon Sep 17 00:00:00 2001 From: Eugene Vigdorchik Date: Mon, 11 Mar 2013 15:43:29 +0400 Subject: SI-5513: add inplace set-theoretic operations for mutable bitsets. --- src/library/scala/collection/mutable/BitSet.scala | 51 ++++++++++++++++++++++- test/files/run/bitsets.check | 5 +++ test/files/run/bitsets.scala | 22 ++++++++++ 3 files changed, 77 insertions(+), 1 deletion(-) (limited to 'src') diff --git a/src/library/scala/collection/mutable/BitSet.scala b/src/library/scala/collection/mutable/BitSet.scala index 2a535a799c..397f8099eb 100644 --- a/src/library/scala/collection/mutable/BitSet.scala +++ b/src/library/scala/collection/mutable/BitSet.scala @@ -58,6 +58,11 @@ class BitSet(protected var elems: Array[Long]) extends AbstractSet[Int] if (idx < nwords) elems(idx) else 0L private def updateWord(idx: Int, w: Long) { + ensureCapacity(idx) + elems(idx) = w + } + + private def ensureCapacity(idx: Int) { if (idx >= nwords) { var newlen = nwords while (idx >= newlen) newlen = newlen * 2 @@ -65,7 +70,6 @@ class BitSet(protected var elems: Array[Long]) extends AbstractSet[Int] Array.copy(elems, 0, elems1, 0, nwords) elems = elems1 } - elems(idx) = w } protected def fromBitMaskNoCopy(words: Array[Long]): BitSet = new BitSet(words) @@ -92,6 +96,51 @@ class BitSet(protected var elems: Array[Long]) extends AbstractSet[Int] def += (elem: Int): this.type = { add(elem); this } def -= (elem: Int): this.type = { remove(elem); this } + /** Updates this bitset to the union with another bitset by performing a bitwise "or". + * + * @param other the bitset to form the union with. + * @return the bitset itself. + */ + def |= (other: BitSet): this.type = { + ensureCapacity(other.nwords) + for (i <- 0 until other.nwords) + elems(i) = elems(i) | other.word(i) + this + } + /** Updates this bitset to the intersection with another bitset by performing a bitwise "and". + * + * @param other the bitset to form the intersection with. + * @return the bitset itself. + */ + def &= (other: BitSet): this.type = { + ensureCapacity(other.nwords) + for (i <- 0 until other.nwords) + elems(i) = elems(i) & other.word(i) + this + } + /** Updates this bitset to the symmetric difference with another bitset by performing a bitwise "xor". + * + * @param other the bitset to form the symmetric difference with. + * @return the bitset itself. + */ + def ^= (other: BitSet): this.type = { + ensureCapacity(other.nwords) + for (i <- 0 until other.nwords) + elems(i) = elems(i) ^ other.word(i) + this + } + /** Updates this bitset to the difference with another bitset by performing a bitwise "and-not". + * + * @param other the bitset to form the difference with. + * @return the bitset itself. + */ + def &~= (other: BitSet): this.type = { + ensureCapacity(other.nwords) + for (i <- 0 until other.nwords) + elems(i) = elems(i) & ~other.word(i) + this + } + override def clear() { elems = new Array[Long](elems.length) } diff --git a/test/files/run/bitsets.check b/test/files/run/bitsets.check index 3f01d2a400..41c2ccdcb8 100644 --- a/test/files/run/bitsets.check +++ b/test/files/run/bitsets.check @@ -37,6 +37,11 @@ m2_r1 = true m2_r2 = true m2_r3 = true +b1:BitSet(5, 6, 7) +b2:BitSet(5) +b3:BitSet(5, 7) +b4:BitSet(7) +b0:BitSet(5, 6, 7) is0 = BitSet() is1 = BitSet() is2 = BitSet(2) diff --git a/test/files/run/bitsets.scala b/test/files/run/bitsets.scala index bdeb1fd811..0ea43fcb95 100644 --- a/test/files/run/bitsets.scala +++ b/test/files/run/bitsets.scala @@ -81,6 +81,27 @@ object TestMutable2 { println } +object TestMutable3 { + import scala.collection.mutable.BitSet + + val b0 = BitSet(5, 6) + val b1 = BitSet(7) + val b2 = BitSet(1, 5) + val b3 = BitSet(6, 7) + val b4 = BitSet(6, 7) + + b1 |= b0 + println(s"b1:$b1") + b2 &= b0 + println(s"b2:$b2") + b3 ^= b0 + println(s"b3:$b3") + b4 &~= b0 + println(s"b4:$b4") + b0 ^= b0 |= b1 + println(s"b0:$b0") +} + object TestImmutable { import scala.collection.immutable.BitSet @@ -155,6 +176,7 @@ object TestImmutable2 { object Test extends App { TestMutable TestMutable2 + TestMutable3 TestImmutable TestImmutable2 } -- cgit v1.2.3 From 34faa0d073a8613deebffe7605fd8a5e9a93afbc Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Mon, 11 Mar 2013 18:35:28 +0100 Subject: SI-6601 Close access loophole for value class constructors ExtensionMethods marks private constructors of value classes as notPRIVATE before pickling. When the pickler reads the flags of this symbol, the anti-shift mechanism folds this into the regular PRIVATE flag, so the class is pickled as though it was public all along. A seprately compiled client can then call this constructor. To remedy this, we must: - pickle `rawFlags`, rather than `flags`. This is symmetric with unpickling, which sets `rawFlags` with the value it reads. - Add `notPRIVATE` to the flagset `PickledFlags`. We cannot make this change in a minor version, as the pickler and unpickler must agree on `PickledFlags`. I believe that this won't change the size of pickled flags for the majority of symbols (ie, those without the notPRIVATE flag) due to the variable length encoding in `writeLongNat`. This also improves the situation for SI-6608. Reflection and scalap (and, by extension, IntelliJ), no longer will see as public methods that have had their access widened in SuperAccessors (which is done selectively to support inlining under separate compilation.) --- .../scala/tools/nsc/symtab/classfile/Pickler.scala | 2 +- src/compiler/scala/tools/nsc/typechecker/RefChecks.scala | 2 ++ src/reflect/scala/reflect/internal/Flags.scala | 6 +++++- test/files/neg/t6601.check | 4 ++++ test/files/neg/t6601/AccessPrivateConstructor_2.scala | 3 +++ test/files/neg/t6601/PrivateConstructor_1.scala | 1 + test/files/run/t6608.check | 1 + test/files/run/t6608.scala | 16 ++++++++++++++++ 8 files changed, 33 insertions(+), 2 deletions(-) create mode 100644 test/files/neg/t6601.check create mode 100644 test/files/neg/t6601/AccessPrivateConstructor_2.scala create mode 100644 test/files/neg/t6601/PrivateConstructor_1.scala create mode 100644 test/files/run/t6608.check create mode 100644 test/files/run/t6608.scala (limited to 'src') diff --git a/src/compiler/scala/tools/nsc/symtab/classfile/Pickler.scala b/src/compiler/scala/tools/nsc/symtab/classfile/Pickler.scala index 140be0e17b..9b33ae8ba1 100644 --- a/src/compiler/scala/tools/nsc/symtab/classfile/Pickler.scala +++ b/src/compiler/scala/tools/nsc/symtab/classfile/Pickler.scala @@ -528,7 +528,7 @@ abstract class Pickler extends SubComponent { private def writeSymInfo(sym: Symbol) { writeRef(sym.name) writeRef(localizedOwner(sym)) - writeLongNat((rawToPickledFlags(sym.flags & PickledFlags))) + writeLongNat((rawToPickledFlags(sym.rawflags & PickledFlags))) if (sym.hasAccessBoundary) writeRef(sym.privateWithin) writeRef(sym.info) } diff --git a/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala b/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala index b7221a78ec..b32fc6b977 100644 --- a/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala +++ b/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala @@ -1553,6 +1553,8 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans val bridges = addVarargBridges(currentOwner) checkAllOverrides(currentOwner) checkAnyValSubclass(currentOwner) + if (currentOwner.isDerivedValueClass) + currentOwner.primaryConstructor makeNotPrivate NoSymbol // SI-6601, must be done *after* pickler! if (bridges.nonEmpty) deriveTemplate(tree)(_ ::: bridges) else tree case dc@TypeTreeWithDeferredRefCheck() => abort("adapt should have turned dc: TypeTreeWithDeferredRefCheck into tpt: TypeTree, with tpt.original == dc") diff --git a/src/reflect/scala/reflect/internal/Flags.scala b/src/reflect/scala/reflect/internal/Flags.scala index 1987f34474..fe46a0471e 100644 --- a/src/reflect/scala/reflect/internal/Flags.scala +++ b/src/reflect/scala/reflect/internal/Flags.scala @@ -307,7 +307,11 @@ class Flags extends ModifierFlags { assert((OverloadedFlagsMask & FlagsNotPickled) == 0, flagsToString(OverloadedFlagsMask & FlagsNotPickled)) /** These flags are pickled */ - final val PickledFlags = InitialFlags & ~FlagsNotPickled + final val PickledFlags = ( + (InitialFlags & ~FlagsNotPickled) + | notPRIVATE // for value class constructors (SI-6601), and private members referenced + // in @inline-marked methods publicized in SuperAccessors (see SI-6608, e6b4204604) + ) /** If we have a top-level class or module * and someone asks us for a flag not in TopLevelPickledFlags, diff --git a/test/files/neg/t6601.check b/test/files/neg/t6601.check new file mode 100644 index 0000000000..1410e1b11a --- /dev/null +++ b/test/files/neg/t6601.check @@ -0,0 +1,4 @@ +AccessPrivateConstructor_2.scala:2: error: constructor PrivateConstructor in class PrivateConstructor cannot be accessed in class AccessPrivateConstructor + new PrivateConstructor("") // Scalac should forbid accessing to the private constructor! + ^ +one error found diff --git a/test/files/neg/t6601/AccessPrivateConstructor_2.scala b/test/files/neg/t6601/AccessPrivateConstructor_2.scala new file mode 100644 index 0000000000..816bc10d79 --- /dev/null +++ b/test/files/neg/t6601/AccessPrivateConstructor_2.scala @@ -0,0 +1,3 @@ +class AccessPrivateConstructor { + new PrivateConstructor("") // Scalac should forbid accessing to the private constructor! +} diff --git a/test/files/neg/t6601/PrivateConstructor_1.scala b/test/files/neg/t6601/PrivateConstructor_1.scala new file mode 100644 index 0000000000..f09d7ad068 --- /dev/null +++ b/test/files/neg/t6601/PrivateConstructor_1.scala @@ -0,0 +1 @@ +class PrivateConstructor private(val s: String) extends AnyVal diff --git a/test/files/run/t6608.check b/test/files/run/t6608.check new file mode 100644 index 0000000000..15628b322e --- /dev/null +++ b/test/files/run/t6608.check @@ -0,0 +1 @@ +(C$$yyy,true) diff --git a/test/files/run/t6608.scala b/test/files/run/t6608.scala new file mode 100644 index 0000000000..2f956bfb35 --- /dev/null +++ b/test/files/run/t6608.scala @@ -0,0 +1,16 @@ +import reflect.runtime.universe + +class C { + private val yyy: Any = 1 + @inline def foo = yyy +} + +object Test extends App { + import universe._ + val access = typeOf[C].declarations + .toList + .filter(_.name.toString.endsWith("yyy")) + .map(x => (x.name, x.isPrivate)) + println(access.head) +} + -- cgit v1.2.3 From 48cc8b47fcadaa187026ca0422178c9094e4b412 Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Sun, 10 Mar 2013 17:17:58 -0700 Subject: Modularized the repl. Following in the footsteps of scaladoc and interactive. The interpreter sources move into src/repl, and are given a separate build target. As with the others, at present they are still packaged into scala-compiler.jar. A summary of changes: - repl requires use of ReplGlobal (this was already implied) - macro code's repl-specific classloader hack pulled into overridable method and overridden in ReplGlobal - removed -Ygen-javap option to eliminate backend's dependency on javap - removed -Yrepl-debug option (can still be enabled with -Dscala.repl.debug) - pushed javap code into src/repl so javax.tools dependency can bee weakened to the repl only - removed some "show pickled" related code which hasn't worked right in a while and isn't the right way to do it anymore anyway. Will return to fix showPickled and provide it with some tests. --- build.xml | 39 +- src/compiler/scala/tools/nsc/Global.scala | 2 +- src/compiler/scala/tools/nsc/Interpreter.scala | 12 - src/compiler/scala/tools/nsc/InterpreterLoop.scala | 12 - .../scala/tools/nsc/MainGenericRunner.scala | 106 -- .../tools/nsc/backend/jvm/BytecodeWriters.scala | 30 - .../scala/tools/nsc/backend/jvm/GenASM.scala | 59 +- .../nsc/interpreter/AbstractFileClassLoader.scala | 108 -- .../nsc/interpreter/AbstractOrMissingHandler.scala | 41 - .../scala/tools/nsc/interpreter/ByteCode.scala | 42 - .../scala/tools/nsc/interpreter/CommandLine.scala | 13 - .../scala/tools/nsc/interpreter/Completion.scala | 49 - .../tools/nsc/interpreter/CompletionAware.scala | 53 - .../tools/nsc/interpreter/CompletionOutput.scala | 85 -- .../nsc/interpreter/ConsoleReaderHelper.scala | 63 -- .../scala/tools/nsc/interpreter/Delimited.scala | 41 - .../scala/tools/nsc/interpreter/ExprTyper.scala | 99 -- .../scala/tools/nsc/interpreter/Formatting.scala | 35 - .../scala/tools/nsc/interpreter/ILoop.scala | 749 ------------- .../scala/tools/nsc/interpreter/IMain.scala | 1121 ------------------- .../scala/tools/nsc/interpreter/ISettings.scala | 54 - .../scala/tools/nsc/interpreter/Imports.scala | 181 ---- .../tools/nsc/interpreter/InteractiveReader.scala | 49 - .../tools/nsc/interpreter/JLineCompletion.scala | 352 ------ .../scala/tools/nsc/interpreter/JLineReader.scala | 68 -- .../scala/tools/nsc/interpreter/Logger.scala | 14 - .../scala/tools/nsc/interpreter/LoopCommands.scala | 86 -- .../tools/nsc/interpreter/MemberHandlers.scala | 219 ---- .../scala/tools/nsc/interpreter/NamedParam.scala | 45 - .../scala/tools/nsc/interpreter/Naming.scala | 105 -- .../scala/tools/nsc/interpreter/Parsed.scala | 60 -- .../scala/tools/nsc/interpreter/Pasted.scala | 101 -- .../scala/tools/nsc/interpreter/Phased.scala | 143 --- .../scala/tools/nsc/interpreter/Power.scala | 326 ------ .../scala/tools/nsc/interpreter/ReplConfig.scala | 49 - .../scala/tools/nsc/interpreter/ReplDir.scala | 48 - .../scala/tools/nsc/interpreter/ReplGlobal.scala | 56 - .../scala/tools/nsc/interpreter/ReplProps.scala | 27 - .../scala/tools/nsc/interpreter/ReplReporter.scala | 34 - .../scala/tools/nsc/interpreter/ReplStrings.scala | 32 - .../scala/tools/nsc/interpreter/ReplVals.scala | 82 -- .../scala/tools/nsc/interpreter/Results.scala | 22 - .../scala/tools/nsc/interpreter/RichClass.scala | 36 - .../scala/tools/nsc/interpreter/SimpleReader.scala | 40 - .../scala/tools/nsc/interpreter/TypeStrings.scala | 242 ----- .../scala/tools/nsc/interpreter/package.scala | 157 --- .../interpreter/session/FileBackedHistory.scala | 84 -- .../tools/nsc/interpreter/session/History.scala | 22 - .../nsc/interpreter/session/JLineHistory.scala | 49 - .../nsc/interpreter/session/SimpleHistory.scala | 58 - .../tools/nsc/interpreter/session/package.scala | 23 - .../scala/tools/nsc/settings/ScalaSettings.scala | 1 - .../scala/tools/nsc/typechecker/Macros.scala | 23 +- .../scala/tools/nsc/typechecker/TypeStrings.scala | 244 +++++ .../tools/nsc/util/AbstractFileClassLoader.scala | 107 ++ .../scala/tools/nsc/util/ShowPickled.scala | 4 +- src/compiler/scala/tools/reflect/StdTags.scala | 4 +- .../scala/tools/reflect/ToolBoxFactory.scala | 2 +- src/compiler/scala/tools/util/Javap.scala | 694 +----------- src/repl/scala/tools/nsc/Interpreter.scala | 12 + src/repl/scala/tools/nsc/InterpreterLoop.scala | 12 + src/repl/scala/tools/nsc/MainGenericRunner.scala | 105 ++ .../nsc/interpreter/AbstractFileClassLoader.scala | 7 + .../nsc/interpreter/AbstractOrMissingHandler.scala | 41 + .../scala/tools/nsc/interpreter/ByteCode.scala | 32 + .../scala/tools/nsc/interpreter/CommandLine.scala | 13 + .../scala/tools/nsc/interpreter/Completion.scala | 49 + .../tools/nsc/interpreter/CompletionAware.scala | 53 + .../tools/nsc/interpreter/CompletionOutput.scala | 85 ++ .../nsc/interpreter/ConsoleReaderHelper.scala | 63 ++ .../scala/tools/nsc/interpreter/Delimited.scala | 41 + .../scala/tools/nsc/interpreter/ExprTyper.scala | 99 ++ .../scala/tools/nsc/interpreter/Formatting.scala | 35 + src/repl/scala/tools/nsc/interpreter/ILoop.scala | 748 +++++++++++++ src/repl/scala/tools/nsc/interpreter/IMain.scala | 1122 ++++++++++++++++++++ .../scala/tools/nsc/interpreter/ISettings.scala | 54 + src/repl/scala/tools/nsc/interpreter/Imports.scala | 181 ++++ .../tools/nsc/interpreter/InteractiveReader.scala | 49 + .../tools/nsc/interpreter/JLineCompletion.scala | 352 ++++++ .../scala/tools/nsc/interpreter/JLineReader.scala | 68 ++ .../scala/tools/nsc/interpreter/JavapClass.scala | 693 ++++++++++++ src/repl/scala/tools/nsc/interpreter/Logger.scala | 14 + .../scala/tools/nsc/interpreter/LoopCommands.scala | 86 ++ .../tools/nsc/interpreter/MemberHandlers.scala | 219 ++++ .../scala/tools/nsc/interpreter/NamedParam.scala | 46 + src/repl/scala/tools/nsc/interpreter/Naming.scala | 105 ++ src/repl/scala/tools/nsc/interpreter/Parsed.scala | 60 ++ src/repl/scala/tools/nsc/interpreter/Pasted.scala | 101 ++ src/repl/scala/tools/nsc/interpreter/Phased.scala | 143 +++ src/repl/scala/tools/nsc/interpreter/Power.scala | 326 ++++++ .../scala/tools/nsc/interpreter/ReplConfig.scala | 49 + src/repl/scala/tools/nsc/interpreter/ReplDir.scala | 48 + .../scala/tools/nsc/interpreter/ReplGlobal.scala | 64 ++ .../scala/tools/nsc/interpreter/ReplProps.scala | 27 + .../scala/tools/nsc/interpreter/ReplReporter.scala | 34 + .../scala/tools/nsc/interpreter/ReplStrings.scala | 32 + .../scala/tools/nsc/interpreter/ReplVals.scala | 82 ++ src/repl/scala/tools/nsc/interpreter/Results.scala | 22 + .../scala/tools/nsc/interpreter/RichClass.scala | 36 + .../scala/tools/nsc/interpreter/SimpleReader.scala | 40 + .../scala/tools/nsc/interpreter/StdReplTags.scala | 15 + src/repl/scala/tools/nsc/interpreter/package.scala | 157 +++ .../interpreter/session/FileBackedHistory.scala | 84 ++ .../tools/nsc/interpreter/session/History.scala | 22 + .../nsc/interpreter/session/JLineHistory.scala | 49 + .../nsc/interpreter/session/SimpleHistory.scala | 58 + .../tools/nsc/interpreter/session/package.scala | 23 + 107 files changed, 6285 insertions(+), 6273 deletions(-) delete mode 100644 src/compiler/scala/tools/nsc/Interpreter.scala delete mode 100644 src/compiler/scala/tools/nsc/InterpreterLoop.scala delete mode 100644 src/compiler/scala/tools/nsc/MainGenericRunner.scala delete mode 100644 src/compiler/scala/tools/nsc/interpreter/AbstractFileClassLoader.scala delete mode 100644 src/compiler/scala/tools/nsc/interpreter/AbstractOrMissingHandler.scala delete mode 100644 src/compiler/scala/tools/nsc/interpreter/ByteCode.scala delete mode 100644 src/compiler/scala/tools/nsc/interpreter/CommandLine.scala delete mode 100644 src/compiler/scala/tools/nsc/interpreter/Completion.scala delete mode 100644 src/compiler/scala/tools/nsc/interpreter/CompletionAware.scala delete mode 100644 src/compiler/scala/tools/nsc/interpreter/CompletionOutput.scala delete mode 100644 src/compiler/scala/tools/nsc/interpreter/ConsoleReaderHelper.scala delete mode 100644 src/compiler/scala/tools/nsc/interpreter/Delimited.scala delete mode 100644 src/compiler/scala/tools/nsc/interpreter/ExprTyper.scala delete mode 100644 src/compiler/scala/tools/nsc/interpreter/Formatting.scala delete mode 100644 src/compiler/scala/tools/nsc/interpreter/ILoop.scala delete mode 100644 src/compiler/scala/tools/nsc/interpreter/IMain.scala delete mode 100644 src/compiler/scala/tools/nsc/interpreter/ISettings.scala delete mode 100644 src/compiler/scala/tools/nsc/interpreter/Imports.scala delete mode 100644 src/compiler/scala/tools/nsc/interpreter/InteractiveReader.scala delete mode 100644 src/compiler/scala/tools/nsc/interpreter/JLineCompletion.scala delete mode 100644 src/compiler/scala/tools/nsc/interpreter/JLineReader.scala delete mode 100644 src/compiler/scala/tools/nsc/interpreter/Logger.scala delete mode 100644 src/compiler/scala/tools/nsc/interpreter/LoopCommands.scala delete mode 100644 src/compiler/scala/tools/nsc/interpreter/MemberHandlers.scala delete mode 100644 src/compiler/scala/tools/nsc/interpreter/NamedParam.scala delete mode 100644 src/compiler/scala/tools/nsc/interpreter/Naming.scala delete mode 100644 src/compiler/scala/tools/nsc/interpreter/Parsed.scala delete mode 100644 src/compiler/scala/tools/nsc/interpreter/Pasted.scala delete mode 100644 src/compiler/scala/tools/nsc/interpreter/Phased.scala delete mode 100644 src/compiler/scala/tools/nsc/interpreter/Power.scala delete mode 100644 src/compiler/scala/tools/nsc/interpreter/ReplConfig.scala delete mode 100644 src/compiler/scala/tools/nsc/interpreter/ReplDir.scala delete mode 100644 src/compiler/scala/tools/nsc/interpreter/ReplGlobal.scala delete mode 100644 src/compiler/scala/tools/nsc/interpreter/ReplProps.scala delete mode 100644 src/compiler/scala/tools/nsc/interpreter/ReplReporter.scala delete mode 100644 src/compiler/scala/tools/nsc/interpreter/ReplStrings.scala delete mode 100644 src/compiler/scala/tools/nsc/interpreter/ReplVals.scala delete mode 100644 src/compiler/scala/tools/nsc/interpreter/Results.scala delete mode 100644 src/compiler/scala/tools/nsc/interpreter/RichClass.scala delete mode 100644 src/compiler/scala/tools/nsc/interpreter/SimpleReader.scala delete mode 100644 src/compiler/scala/tools/nsc/interpreter/TypeStrings.scala delete mode 100644 src/compiler/scala/tools/nsc/interpreter/package.scala delete mode 100644 src/compiler/scala/tools/nsc/interpreter/session/FileBackedHistory.scala delete mode 100644 src/compiler/scala/tools/nsc/interpreter/session/History.scala delete mode 100644 src/compiler/scala/tools/nsc/interpreter/session/JLineHistory.scala delete mode 100644 src/compiler/scala/tools/nsc/interpreter/session/SimpleHistory.scala delete mode 100644 src/compiler/scala/tools/nsc/interpreter/session/package.scala create mode 100644 src/compiler/scala/tools/nsc/typechecker/TypeStrings.scala create mode 100644 src/compiler/scala/tools/nsc/util/AbstractFileClassLoader.scala create mode 100644 src/repl/scala/tools/nsc/Interpreter.scala create mode 100644 src/repl/scala/tools/nsc/InterpreterLoop.scala create mode 100644 src/repl/scala/tools/nsc/MainGenericRunner.scala create mode 100644 src/repl/scala/tools/nsc/interpreter/AbstractFileClassLoader.scala create mode 100644 src/repl/scala/tools/nsc/interpreter/AbstractOrMissingHandler.scala create mode 100644 src/repl/scala/tools/nsc/interpreter/ByteCode.scala create mode 100644 src/repl/scala/tools/nsc/interpreter/CommandLine.scala create mode 100644 src/repl/scala/tools/nsc/interpreter/Completion.scala create mode 100644 src/repl/scala/tools/nsc/interpreter/CompletionAware.scala create mode 100644 src/repl/scala/tools/nsc/interpreter/CompletionOutput.scala create mode 100644 src/repl/scala/tools/nsc/interpreter/ConsoleReaderHelper.scala create mode 100644 src/repl/scala/tools/nsc/interpreter/Delimited.scala create mode 100644 src/repl/scala/tools/nsc/interpreter/ExprTyper.scala create mode 100644 src/repl/scala/tools/nsc/interpreter/Formatting.scala create mode 100644 src/repl/scala/tools/nsc/interpreter/ILoop.scala create mode 100644 src/repl/scala/tools/nsc/interpreter/IMain.scala create mode 100644 src/repl/scala/tools/nsc/interpreter/ISettings.scala create mode 100644 src/repl/scala/tools/nsc/interpreter/Imports.scala create mode 100644 src/repl/scala/tools/nsc/interpreter/InteractiveReader.scala create mode 100644 src/repl/scala/tools/nsc/interpreter/JLineCompletion.scala create mode 100644 src/repl/scala/tools/nsc/interpreter/JLineReader.scala create mode 100644 src/repl/scala/tools/nsc/interpreter/JavapClass.scala create mode 100644 src/repl/scala/tools/nsc/interpreter/Logger.scala create mode 100644 src/repl/scala/tools/nsc/interpreter/LoopCommands.scala create mode 100644 src/repl/scala/tools/nsc/interpreter/MemberHandlers.scala create mode 100644 src/repl/scala/tools/nsc/interpreter/NamedParam.scala create mode 100644 src/repl/scala/tools/nsc/interpreter/Naming.scala create mode 100644 src/repl/scala/tools/nsc/interpreter/Parsed.scala create mode 100644 src/repl/scala/tools/nsc/interpreter/Pasted.scala create mode 100644 src/repl/scala/tools/nsc/interpreter/Phased.scala create mode 100644 src/repl/scala/tools/nsc/interpreter/Power.scala create mode 100644 src/repl/scala/tools/nsc/interpreter/ReplConfig.scala create mode 100644 src/repl/scala/tools/nsc/interpreter/ReplDir.scala create mode 100644 src/repl/scala/tools/nsc/interpreter/ReplGlobal.scala create mode 100644 src/repl/scala/tools/nsc/interpreter/ReplProps.scala create mode 100644 src/repl/scala/tools/nsc/interpreter/ReplReporter.scala create mode 100644 src/repl/scala/tools/nsc/interpreter/ReplStrings.scala create mode 100644 src/repl/scala/tools/nsc/interpreter/ReplVals.scala create mode 100644 src/repl/scala/tools/nsc/interpreter/Results.scala create mode 100644 src/repl/scala/tools/nsc/interpreter/RichClass.scala create mode 100644 src/repl/scala/tools/nsc/interpreter/SimpleReader.scala create mode 100644 src/repl/scala/tools/nsc/interpreter/StdReplTags.scala create mode 100644 src/repl/scala/tools/nsc/interpreter/package.scala create mode 100644 src/repl/scala/tools/nsc/interpreter/session/FileBackedHistory.scala create mode 100644 src/repl/scala/tools/nsc/interpreter/session/History.scala create mode 100644 src/repl/scala/tools/nsc/interpreter/session/JLineHistory.scala create mode 100644 src/repl/scala/tools/nsc/interpreter/session/SimpleHistory.scala create mode 100644 src/repl/scala/tools/nsc/interpreter/session/package.scala (limited to 'src') diff --git a/build.xml b/build.xml index c1deb7ce6f..7e4948c938 100644 --- a/build.xml +++ b/build.xml @@ -1095,6 +1095,32 @@ QUICK BUILD (QUICK) + + + + + + + + + + + + + + + + + + + + + - + @@ -1229,6 +1255,7 @@ QUICK BUILD (QUICK) + @@ -1247,6 +1274,7 @@ QUICK BUILD (QUICK) + @@ -1358,11 +1386,12 @@ QUICK BUILD (QUICK) + + + - - @@ -1488,6 +1517,7 @@ PACKED QUICK BUILD (PACK) + @@ -1998,10 +2028,11 @@ SBT Compiler Interface jvmargs="${scalacfork.jvmargs}"> - + + diff --git a/src/compiler/scala/tools/nsc/Global.scala b/src/compiler/scala/tools/nsc/Global.scala index c0f611daa7..7ee3ee551f 100644 --- a/src/compiler/scala/tools/nsc/Global.scala +++ b/src/compiler/scala/tools/nsc/Global.scala @@ -999,7 +999,7 @@ class Global(var currentSettings: Settings, var reporter: Reporter) object typeDeconstruct extends { val global: Global.this.type = Global.this - } with interpreter.StructuredTypeStrings + } with typechecker.StructuredTypeStrings /** There are common error conditions where when the exception hits * here, currentRun.currentUnit is null. This robs us of the knowledge diff --git a/src/compiler/scala/tools/nsc/Interpreter.scala b/src/compiler/scala/tools/nsc/Interpreter.scala deleted file mode 100644 index 434f19f21b..0000000000 --- a/src/compiler/scala/tools/nsc/Interpreter.scala +++ /dev/null @@ -1,12 +0,0 @@ -package scala.tools.nsc - -import interpreter._ -import java.io._ - -/** A compatibility stub. - */ -@deprecated("Use a class in the scala.tools.nsc.interpreter package.", "2.9.0") -class Interpreter(settings: Settings, out: PrintWriter) extends IMain(settings, out) { - def this(settings: Settings) = this(settings, new NewLinePrintWriter(new ConsoleWriter, true)) - def this() = this(new Settings()) -} \ No newline at end of file diff --git a/src/compiler/scala/tools/nsc/InterpreterLoop.scala b/src/compiler/scala/tools/nsc/InterpreterLoop.scala deleted file mode 100644 index a0be3f4fdb..0000000000 --- a/src/compiler/scala/tools/nsc/InterpreterLoop.scala +++ /dev/null @@ -1,12 +0,0 @@ -package scala.tools.nsc - -import interpreter._ -import java.io._ - -/** A compatibility stub. - */ -@deprecated("Use a class in the scala.tools.nsc.interpreter package.", "2.9.0") -class InterpreterLoop(in0: Option[BufferedReader], out: PrintWriter) extends ILoop(in0, out) { - def this(in0: BufferedReader, out: PrintWriter) = this(Some(in0), out) - def this() = this(None, new PrintWriter(scala.Console.out)) -} diff --git a/src/compiler/scala/tools/nsc/MainGenericRunner.scala b/src/compiler/scala/tools/nsc/MainGenericRunner.scala deleted file mode 100644 index adb03ca374..0000000000 --- a/src/compiler/scala/tools/nsc/MainGenericRunner.scala +++ /dev/null @@ -1,106 +0,0 @@ -/* NSC -- new Scala compiler - * Copyright 2006-2013 LAMP/EPFL - * @author Lex Spoon - */ - -package scala.tools.nsc - -import io.{ File } -import util.{ ClassPath, ScalaClassLoader } -import Properties.{ versionString, copyrightString } -import interpreter.{ ILoop } -import GenericRunnerCommand._ - -object JarRunner extends CommonRunner { - def runJar(settings: GenericRunnerSettings, jarPath: String, arguments: Seq[String]): Either[Throwable, Boolean] = { - val jar = new io.Jar(jarPath) - val mainClass = jar.mainClass getOrElse sys.error("Cannot find main class for jar: " + jarPath) - val jarURLs = ClassPath expandManifestPath jarPath - val urls = if (jarURLs.isEmpty) File(jarPath).toURL +: settings.classpathURLs else jarURLs - - if (settings.Ylogcp.value) { - Console.err.println("Running jar with these URLs as the classpath:") - urls foreach println - } - - runAndCatch(urls, mainClass, arguments) - } -} - -/** An object that runs Scala code. It has three possible - * sources for the code to run: pre-compiled code, a script file, - * or interactive entry. - */ -class MainGenericRunner { - def errorFn(ex: Throwable): Boolean = { - ex.printStackTrace() - false - } - def errorFn(str: String): Boolean = { - Console.err println str - false - } - - def process(args: Array[String]): Boolean = { - val command = new GenericRunnerCommand(args.toList, (x: String) => errorFn(x)) - import command.{ settings, howToRun, thingToRun } - def sampleCompiler = new Global(settings) // def so its not created unless needed - - if (!command.ok) return errorFn("\n" + command.shortUsageMsg) - else if (settings.version.value) return errorFn("Scala code runner %s -- %s".format(versionString, copyrightString)) - else if (command.shouldStopWithInfo) return errorFn(command getInfoMessage sampleCompiler) - - def isE = !settings.execute.isDefault - def dashe = settings.execute.value - - def isI = !settings.loadfiles.isDefault - def dashi = settings.loadfiles.value - - // Deadlocks on startup under -i unless we disable async. - if (isI) - settings.Yreplsync.value = true - - def combinedCode = { - val files = if (isI) dashi map (file => File(file).slurp()) else Nil - val str = if (isE) List(dashe) else Nil - - files ++ str mkString "\n\n" - } - - def runTarget(): Either[Throwable, Boolean] = howToRun match { - case AsObject => - ObjectRunner.runAndCatch(settings.classpathURLs, thingToRun, command.arguments) - case AsScript => - ScriptRunner.runScriptAndCatch(settings, thingToRun, command.arguments) - case AsJar => - JarRunner.runJar(settings, thingToRun, command.arguments) - case Error => - Right(false) - case _ => - // We start the repl when no arguments are given. - Right(new ILoop process settings) - } - - /** If -e and -i were both given, we want to execute the -e code after the - * -i files have been included, so they are read into strings and prepended to - * the code given in -e. The -i option is documented to only make sense - * interactively so this is a pretty reasonable assumption. - * - * This all needs a rewrite though. - */ - if (isE) { - ScriptRunner.runCommand(settings, combinedCode, thingToRun +: command.arguments) - } - else runTarget() match { - case Left(ex) => errorFn(ex) - case Right(b) => b - } - } -} - -object MainGenericRunner extends MainGenericRunner { - def main(args: Array[String]) { - if (!process(args)) - sys.exit(1) - } -} diff --git a/src/compiler/scala/tools/nsc/backend/jvm/BytecodeWriters.scala b/src/compiler/scala/tools/nsc/backend/jvm/BytecodeWriters.scala index 941ccd9a2d..c1cd3204e0 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/BytecodeWriters.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/BytecodeWriters.scala @@ -9,7 +9,6 @@ package backend.jvm import java.io.{ DataOutputStream, FileOutputStream, OutputStream, File => JFile } import scala.tools.nsc.io._ import scala.tools.nsc.util.ScalaClassLoader -import scala.tools.util.{ Javap, JavapClass } import java.util.jar.Attributes.Name import scala.language.postfixOps @@ -59,35 +58,6 @@ trait BytecodeWriters { override def close() = writer.close() } - /** To be mixed-in with the BytecodeWriter that generates - * the class file to be disassembled. - */ - trait JavapBytecodeWriter extends BytecodeWriter { - val baseDir = Directory(settings.Ygenjavap.value).createDirectory() - val cl = ScalaClassLoader.appLoader - - def emitJavap(classFile: AbstractFile, javapFile: File) { - val pw = javapFile.printWriter() - try { - val javap = new JavapClass(cl, pw) { - override def findBytes(path: String): Array[Byte] = classFile.toByteArray - } - javap(Seq("-verbose", "-protected", classFile.name)) foreach (_.show()) - } finally pw.close() - } - abstract override def writeClass(label: String, jclassName: String, jclassBytes: Array[Byte], sym: Symbol) { - super.writeClass(label, jclassName, jclassBytes, sym) - - val classFile = getFile(sym, jclassName, ".class") - val segments = jclassName.split("[./]") - val javapFile = segments.foldLeft(baseDir: Path)(_ / _) changeExtension "javap" toFile; - javapFile.parent.createDirectory() - - if (Javap.isAvailable(cl)) emitJavap(classFile, javapFile) - else warning("No javap on classpath, skipping javap output.") - } - } - trait ClassBytecodeWriter extends BytecodeWriter { def writeClass(label: String, jclassName: String, jclassBytes: Array[Byte], sym: Symbol) { val outfile = getFile(sym, jclassName, ".class") diff --git a/src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala b/src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala index 388efb4625..4a3d1805d9 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala @@ -72,19 +72,10 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM { new DirectToJarfileWriter(f.file) case _ => - import scala.tools.util.Javap - if (settings.Ygenjavap.isDefault) { - if(settings.Ydumpclasses.isDefault) - new ClassBytecodeWriter { } - else - new ClassBytecodeWriter with DumpBytecodeWriter { } - } - else if (Javap.isAvailable()) new ClassBytecodeWriter with JavapBytecodeWriter { } - else { - warning("No javap on classpath, skipping javap output.") + if (settings.Ydumpclasses.isDefault) new ClassBytecodeWriter { } - } - + else + new ClassBytecodeWriter with DumpBytecodeWriter { } // TODO A ScalapBytecodeWriter could take asm.util.Textifier as starting point. // Three areas where javap ouput is less than ideal (e.g. when comparing versions of the same classfile) are: // (a) unreadable pickle; @@ -2519,7 +2510,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM { if (nextBlock != whereto) jcode goTo labels(whereto) // SI-6102: Determine whether eliding this JUMP results in an empty range being covered by some EH. - // If so, emit a NOP in place of the elided JUMP, to avoid "java.lang.ClassFormatError: Illegal exception table range" + // If so, emit a NOP in place of the elided JUMP, to avoid "java.lang.ClassFormatError: Illegal exception table range" else if (newNormal.isJumpOnly(b) && m.exh.exists(eh => eh.covers(b))) { debugwarn("Had a jump only block that wasn't collapsed") emit(asm.Opcodes.NOP) @@ -3084,7 +3075,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM { assert(nonICode.hasNext, "empty block") nonICode.next.isInstanceOf[JUMP] } - + /** * Returns the list of instructions in a block that follow all ICode only instructions, * where an ICode only instruction is one that won't make it to the JVM @@ -3101,7 +3092,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM { * Returns the target of a block that is "jump only" which is defined * as being a block that consists only of 0 or more instructions that * won't make it to the JVM followed by a JUMP. - * + * * @param b The basic block to examine * @return Some(target) if b is a "jump only" block or None if it's not */ @@ -3150,12 +3141,12 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM { def rephraseGotos(detour: mutable.Map[BasicBlock, BasicBlock]) { def lookup(b: BasicBlock) = detour.getOrElse(b, b) - + m.code.startBlock = lookup(m.code.startBlock) - + for(eh <- m.exh) eh.setStartBlock(lookup(eh.startBlock)) - + for (b <- m.blocks) { def replaceLastInstruction(i: Instruction) = { if (b.lastInstruction != i) { @@ -3164,18 +3155,18 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM { b.replaceInstruction(idxLast, i) } } - + b.lastInstruction match { case JUMP(whereto) => replaceLastInstruction(JUMP(lookup(whereto))) case CJUMP(succ, fail, cond, kind) => replaceLastInstruction(CJUMP(lookup(succ), lookup(fail), cond, kind)) - case CZJUMP(succ, fail, cond, kind) => + case CZJUMP(succ, fail, cond, kind) => replaceLastInstruction(CZJUMP(lookup(succ), lookup(fail), cond, kind)) case SWITCH(tags, labels) => val newLabels = (labels map lookup) replaceLastInstruction(SWITCH(tags, newLabels)) - case _ => () + case _ => () } } } @@ -3203,7 +3194,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM { // blocks for (key <- detour.keySet) { // we use the Robert Floyd's classic Tortoise and Hare algorithm - @tailrec + @tailrec def findDestination(tortoise: BasicBlock, hare: BasicBlock): BasicBlock = { if (tortoise == hare) // cycle detected, map key to key @@ -3227,7 +3218,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM { } detour } - + val detour = computeDetour rephraseGotos(detour) @@ -3235,33 +3226,33 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM { val (remappings, cycles) = detour partition {case (source, target) => source != target} for ((source, target) <- remappings) { debuglog(s"Will elide jump only block $source because it can be jumped around to get to $target.") - if (m.startBlock == source) debugwarn("startBlock should have been re-wired by now") + if (m.startBlock == source) debugwarn("startBlock should have been re-wired by now") } val sources = remappings.keySet val targets = remappings.values.toSet val intersection = sources intersect targets - + if (intersection.nonEmpty) debugwarn(s"contradiction: we seem to have some source and target overlap in blocks ${intersection.mkString}. Map was ${detour.mkString}") - + for ((source, _) <- cycles) { debuglog(s"Block $source is in a do-nothing infinite loop. Did the user write 'while(true){}'?") } } } - + /** * Removes all blocks that are unreachable in a method using a standard reachability analysis. */ def elimUnreachableBlocks(m: IMethod) { - assert(m.hasCode, "code-less method") - + assert(m.hasCode, "code-less method") + // assume nothing is reachable until we prove it can be reached val reachable = mutable.Set[BasicBlock]() - + // the set of blocks that we know are reachable but have // yet to be marked reachable, initially only the start block val worklist = mutable.Set(m.startBlock) - + while (worklist.nonEmpty) { val block = worklist.head worklist remove block @@ -3271,7 +3262,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM { // think are unreachable worklist ++= (block.successors filterNot reachable) } - + // exception handlers need to be told not to cover unreachable blocks // and exception handlers that no longer cover any blocks need to be // removed entirely @@ -3282,9 +3273,9 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM { unusedExceptionHandlers += exh } } - + // remove the unusued exception handler references - if (settings.debug.value) + if (settings.debug.value) for (exh <- unusedExceptionHandlers) debuglog(s"eliding exception handler $exh because it does not cover any reachable blocks") m.exh = m.exh filterNot unusedExceptionHandlers diff --git a/src/compiler/scala/tools/nsc/interpreter/AbstractFileClassLoader.scala b/src/compiler/scala/tools/nsc/interpreter/AbstractFileClassLoader.scala deleted file mode 100644 index e909cd945d..0000000000 --- a/src/compiler/scala/tools/nsc/interpreter/AbstractFileClassLoader.scala +++ /dev/null @@ -1,108 +0,0 @@ -/* NSC -- new Scala compiler - * Copyright 2005-2013 LAMP/EPFL - */ - -package scala.tools.nsc -package interpreter - -import scala.tools.nsc.io.AbstractFile -import util.ScalaClassLoader -import java.net.{ URL, URLConnection, URLStreamHandler } -import scala.collection.{ mutable, immutable } - -/** - * A class loader that loads files from a {@link scala.tools.nsc.io.AbstractFile}. - * - * @author Lex Spoon - */ -class AbstractFileClassLoader(val root: AbstractFile, parent: ClassLoader) - extends ClassLoader(parent) - with ScalaClassLoader -{ - protected def classNameToPath(name: String): String = - if (name endsWith ".class") name - else name.replace('.', '/') + ".class" - - protected def findAbstractFile(name: String): AbstractFile = { - var file: AbstractFile = root - val pathParts = name split '/' - - for (dirPart <- pathParts.init) { - file = file.lookupName(dirPart, directory = true) - if (file == null) - return null - } - - file.lookupName(pathParts.last, directory = false) match { - case null => null - case file => file - } - } - - protected def dirNameToPath(name: String): String = - name.replace('.', '/') - - protected def findAbstractDir(name: String): AbstractFile = { - var file: AbstractFile = root - val pathParts = dirNameToPath(name) split '/' - - for (dirPart <- pathParts) { - file = file.lookupName(dirPart, directory = true) - if (file == null) - return null - } - - file - } - - // parent delegation in JCL uses getResource; so either add parent.getResAsStream - // or implement findResource, which we do here as a study in scarlet (my complexion - // after looking at CLs and URLs) - override def findResource(name: String): URL = findAbstractFile(name) match { - case null => null - case file => new URL(null, "repldir:" + file.path, new URLStreamHandler { - override def openConnection(url: URL): URLConnection = new URLConnection(url) { - override def connect() { } - override def getInputStream = file.input - } - }) - } - - // this inverts delegation order: super.getResAsStr calls parent.getRes if we fail - override def getResourceAsStream(name: String) = findAbstractFile(name) match { - case null => super.getResourceAsStream(name) - case file => file.input - } - // ScalaClassLoader.classBytes uses getResAsStream, so we'll try again before delegating - override def classBytes(name: String): Array[Byte] = findAbstractFile(classNameToPath(name)) match { - case null => super.classBytes(name) - case file => file.toByteArray - } - override def findClass(name: String): JClass = { - val bytes = classBytes(name) - if (bytes.length == 0) - throw new ClassNotFoundException(name) - else - defineClass(name, bytes, 0, bytes.length) - } - - private val packages = mutable.Map[String, Package]() - - override def definePackage(name: String, specTitle: String, specVersion: String, specVendor: String, implTitle: String, implVersion: String, implVendor: String, sealBase: URL): Package = { - throw new UnsupportedOperationException() - } - - override def getPackage(name: String): Package = { - findAbstractDir(name) match { - case null => super.getPackage(name) - case file => packages.getOrElseUpdate(name, { - val ctor = classOf[Package].getDeclaredConstructor(classOf[String], classOf[String], classOf[String], classOf[String], classOf[String], classOf[String], classOf[String], classOf[URL], classOf[ClassLoader]) - ctor.setAccessible(true) - ctor.newInstance(name, null, null, null, null, null, null, null, this) - }) - } - } - - override def getPackages(): Array[Package] = - root.iterator.filter(_.isDirectory).map(dir => getPackage(dir.name)).toArray -} diff --git a/src/compiler/scala/tools/nsc/interpreter/AbstractOrMissingHandler.scala b/src/compiler/scala/tools/nsc/interpreter/AbstractOrMissingHandler.scala deleted file mode 100644 index e66e4eff29..0000000000 --- a/src/compiler/scala/tools/nsc/interpreter/AbstractOrMissingHandler.scala +++ /dev/null @@ -1,41 +0,0 @@ -/* NSC -- new Scala compiler - * Copyright 2005-2013 LAMP/EPFL - * @author Paul Phillips - */ - -package scala.tools.nsc -package interpreter - -class AbstractOrMissingHandler[T](onError: String => Unit, value: T) extends PartialFunction[Throwable, T] { - def isDefinedAt(t: Throwable) = t match { - case _: AbstractMethodError => true - case _: NoSuchMethodError => true - case _: MissingRequirementError => true - case _: NoClassDefFoundError => true - case _ => false - } - def apply(t: Throwable) = t match { - case x @ (_: AbstractMethodError | _: NoSuchMethodError | _: NoClassDefFoundError) => - onError(""" - |Failed to initialize compiler: %s. - |This is most often remedied by a full clean and recompile. - |Otherwise, your classpath may continue bytecode compiled by - |different and incompatible versions of scala. - |""".stripMargin.format(x.getClass.getName split '.' last) - ) - x.printStackTrace() - value - case x: MissingRequirementError => - onError(""" - |Failed to initialize compiler: %s not found. - |** Note that as of 2.8 scala does not assume use of the java classpath. - |** For the old behavior pass -usejavacp to scala, or if using a Settings - |** object programatically, settings.usejavacp.value = true.""".stripMargin.format(x.req) - ) - value - } -} - -object AbstractOrMissingHandler { - def apply[T]() = new AbstractOrMissingHandler[T](Console println _, null.asInstanceOf[T]) -} diff --git a/src/compiler/scala/tools/nsc/interpreter/ByteCode.scala b/src/compiler/scala/tools/nsc/interpreter/ByteCode.scala deleted file mode 100644 index 48890a21c6..0000000000 --- a/src/compiler/scala/tools/nsc/interpreter/ByteCode.scala +++ /dev/null @@ -1,42 +0,0 @@ -/* NSC -- new Scala compiler - * Copyright 2005-2013 LAMP/EPFL - * @author Paul Phillips - */ - -package scala.tools.nsc -package interpreter - -import java.lang.reflect -import util.ScalaClassLoader -import ScalaClassLoader.appLoader -import scala.reflect.NameTransformer._ - -object ByteCode { - /** Until I figure out why I can't get scalap onto the classpath such - * that the compiler will bootstrap, we have to use reflection. - */ - private lazy val DECODER: Option[AnyRef] = - for (clazz <- appLoader.tryToLoadClass[AnyRef]("scala.tools.scalap.Decode$")) yield - clazz.getField(MODULE_INSTANCE_NAME).get(null) - - private def decoderMethod(name: String, args: JClass*): Option[reflect.Method] = { - for (decoder <- DECODER ; m <- Option(decoder.getClass.getMethod(name, args: _*))) yield m - } - - private lazy val aliasMap = { - for (module <- DECODER ; method <- decoderMethod("typeAliases", classOf[String])) yield - method.invoke(module, _: String).asInstanceOf[Option[Map[String, String]]] - } - - /** Scala sig bytes. - */ - def scalaSigBytesForPath(path: String) = - for { - module <- DECODER - method <- decoderMethod("scalaSigAnnotationBytes", classOf[String]) - names <- method.invoke(module, path).asInstanceOf[Option[Array[Byte]]] - } - yield names - - def aliasesForPackage(pkg: String) = aliasMap flatMap (_(pkg)) -} diff --git a/src/compiler/scala/tools/nsc/interpreter/CommandLine.scala b/src/compiler/scala/tools/nsc/interpreter/CommandLine.scala deleted file mode 100644 index 0ab92ab769..0000000000 --- a/src/compiler/scala/tools/nsc/interpreter/CommandLine.scala +++ /dev/null @@ -1,13 +0,0 @@ -/* NSC -- new Scala compiler - * Copyright 2005-2013 LAMP/EPFL - * @author Lex Spoon - */ - -package scala.tools.nsc -package interpreter - -/** A command line for the interpreter. - */ -class CommandLine(arguments: List[String], error: String => Unit) extends CompilerCommand(arguments, error) { - override def cmdName = "scala" -} diff --git a/src/compiler/scala/tools/nsc/interpreter/Completion.scala b/src/compiler/scala/tools/nsc/interpreter/Completion.scala deleted file mode 100644 index 84a5cb49ae..0000000000 --- a/src/compiler/scala/tools/nsc/interpreter/Completion.scala +++ /dev/null @@ -1,49 +0,0 @@ -/* NSC -- new Scala compiler - * Copyright 2005-2013 LAMP/EPFL - * @author Paul Phillips - */ - -package scala.tools.nsc -package interpreter - -import Completion._ - -/** An implementation-agnostic completion interface which makes no - * reference to the jline classes. - */ -trait Completion { - type ExecResult - def resetVerbosity(): Unit - def completer(): ScalaCompleter -} -object NoCompletion extends Completion { - type ExecResult = Nothing - def resetVerbosity() = () - def completer() = NullCompleter -} - -object Completion { - case class Candidates(cursor: Int, candidates: List[String]) { } - val NoCandidates = Candidates(-1, Nil) - - object NullCompleter extends ScalaCompleter { - def complete(buffer: String, cursor: Int): Candidates = NoCandidates - } - trait ScalaCompleter { - def complete(buffer: String, cursor: Int): Candidates - } - - def looksLikeInvocation(code: String) = ( - (code != null) - && (code startsWith ".") - && !(code == ".") - && !(code startsWith "./") - && !(code startsWith "..") - ) - object Forwarder { - def apply(forwardTo: () => Option[CompletionAware]): CompletionAware = new CompletionAware { - def completions(verbosity: Int) = forwardTo() map (_ completions verbosity) getOrElse Nil - override def follow(s: String) = forwardTo() flatMap (_ follow s) - } - } -} diff --git a/src/compiler/scala/tools/nsc/interpreter/CompletionAware.scala b/src/compiler/scala/tools/nsc/interpreter/CompletionAware.scala deleted file mode 100644 index 3dd5d93390..0000000000 --- a/src/compiler/scala/tools/nsc/interpreter/CompletionAware.scala +++ /dev/null @@ -1,53 +0,0 @@ -/* NSC -- new Scala compiler - * Copyright 2005-2013 LAMP/EPFL - * @author Paul Phillips - */ - -package scala.tools.nsc -package interpreter - -/** An interface for objects which are aware of tab completion and - * will supply their own candidates and resolve their own paths. - */ -trait CompletionAware { - /** The complete list of unqualified Strings to which this - * object will complete. - */ - def completions(verbosity: Int): List[String] - - /** The next completor in the chain. - */ - def follow(id: String): Option[CompletionAware] = None - - /** A list of useful information regarding a specific uniquely - * identified completion. This is specifically written for the - * following situation, but should be useful elsewhere too: - * - * x.y.z.methodName - * - * If "methodName" is among z's completions, and verbosity > 0 - * indicating tab has been pressed twice consecutively, then we - * call alternativesFor and show a list of overloaded method - * signatures. - */ - def alternativesFor(id: String): List[String] = Nil - - /** Given string 'buf', return a list of all the strings - * to which it can complete. This may involve delegating - * to other CompletionAware objects. - */ - def completionsFor(parsed: Parsed): List[String] = { - import parsed.{ buffer, verbosity } - val comps = completions(verbosity) filter (_ startsWith buffer) - val exact = comps contains buffer - - val results = - if (parsed.isEmpty) comps - else if (parsed.isUnqualified && !parsed.isLastDelimiter) - if (verbosity > 0 && exact) alternativesFor(buffer) - else comps - else follow(parsed.bufferHead) map (_ completionsFor parsed.bufferTail) getOrElse Nil - - results.sorted - } -} diff --git a/src/compiler/scala/tools/nsc/interpreter/CompletionOutput.scala b/src/compiler/scala/tools/nsc/interpreter/CompletionOutput.scala deleted file mode 100644 index d24ad60974..0000000000 --- a/src/compiler/scala/tools/nsc/interpreter/CompletionOutput.scala +++ /dev/null @@ -1,85 +0,0 @@ -/* NSC -- new Scala compiler - * Copyright 2005-2013 LAMP/EPFL - * @author Paul Phillips - */ - -package scala.tools.nsc -package interpreter - -/** This has a lot of duplication with other methods in Symbols and Types, - * but repl completion utility is very sensitive to precise output. Best - * thing would be to abstract an interface for how such things are printed, - * as is also in progress with error messages. - */ -trait CompletionOutput { - val global: Global - - import global._ - import definitions.{ isTupleType, isFunctionType, isRepeatedParamType } - - /** Reducing fully qualified noise for some common packages. - */ - val typeTransforms = List( - "java.lang." -> "", - "scala.collection.immutable." -> "immutable.", - "scala.collection.mutable." -> "mutable.", - "scala.collection.generic." -> "generic." - ) - - def quietString(tp: String): String = - typeTransforms.foldLeft(tp) { - case (str, (prefix, replacement)) => - if (str startsWith prefix) replacement + (str stripPrefix prefix) - else str - } - - class MethodSymbolOutput(method: Symbol) { - val pkg = method.ownerChain find (_.isPackageClass) map (_.fullName) getOrElse "" - - def relativize(str: String): String = quietString(str stripPrefix (pkg + ".")) - def relativize(tp: Type): String = relativize(tp.dealiasWiden.toString) - - def braceList(tparams: List[String]) = if (tparams.isEmpty) "" else (tparams map relativize).mkString("[", ", ", "]") - def parenList(params: List[Any]) = params.mkString("(", ", ", ")") - - def methodTypeToString(mt: MethodType) = - (mt.paramss map paramsString mkString "") + ": " + relativize(mt.finalResultType) - - def typeToString(tp: Type): String = relativize( - tp match { - case x if isFunctionType(x) => functionString(x) - case x if isTupleType(x) => tupleString(x) - case x if isRepeatedParamType(x) => typeToString(x.typeArgs.head) + "*" - case mt @ MethodType(_, _) => methodTypeToString(mt) - case x => x.toString - } - ) - - def tupleString(tp: Type) = parenList(tp.dealiasWiden.typeArgs map relativize) - def functionString(tp: Type) = tp.dealiasWiden.typeArgs match { - case List(t, r) => t + " => " + r - case xs => parenList(xs.init) + " => " + xs.last - } - - def tparamsString(tparams: List[Symbol]) = braceList(tparams map (_.defString)) - def paramsString(params: List[Symbol]) = { - def paramNameString(sym: Symbol) = if (sym.isSynthetic) "" else sym.nameString + ": " - def paramString(sym: Symbol) = paramNameString(sym) + typeToString(sym.info.dealiasWiden) - - val isImplicit = params.nonEmpty && params.head.isImplicit - val strs = (params map paramString) match { - case x :: xs if isImplicit => ("implicit " + x) :: xs - case xs => xs - } - parenList(strs) - } - - def methodString() = - method.keyString + " " + method.nameString + (method.info.dealiasWiden match { - case NullaryMethodType(resType) => ": " + typeToString(resType) - case PolyType(tparams, resType) => tparamsString(tparams) + typeToString(resType) - case mt @ MethodType(_, _) => methodTypeToString(mt) - case x => x.toString - }) - } -} diff --git a/src/compiler/scala/tools/nsc/interpreter/ConsoleReaderHelper.scala b/src/compiler/scala/tools/nsc/interpreter/ConsoleReaderHelper.scala deleted file mode 100644 index 48af261937..0000000000 --- a/src/compiler/scala/tools/nsc/interpreter/ConsoleReaderHelper.scala +++ /dev/null @@ -1,63 +0,0 @@ -/* NSC -- new Scala compiler - * Copyright 2005-2013 LAMP/EPFL - * @author Paul Phillips - */ - -package scala.tools.nsc -package interpreter - -import scala.tools.jline.console.{ ConsoleReader, CursorBuffer } - -trait ConsoleReaderHelper extends ConsoleReader { - def terminal = getTerminal() - def width = terminal.getWidth() - def height = terminal.getHeight() - - def readOneKey(prompt: String): Int - def eraseLine(): Unit - - private val marginSize = 3 - private def morePrompt = "--More--" - private def emulateMore(): Int = { - val key = readOneKey(morePrompt) - try key match { - case '\r' | '\n' => 1 - case 'q' => -1 - case _ => height - 1 - } - finally { - eraseLine() - // TODO: still not quite managing to erase --More-- and get - // back to a scala prompt without another keypress. - if (key == 'q') { - putString(getPrompt()) - redrawLine() - flush() - } - } - } - - override def printColumns(items: JCollection[_ <: CharSequence]): Unit = - printColumns(items: List[String]) - - def printColumns(items: List[String]): Unit = { - if (items forall (_ == "")) - return - - val longest = items map (_.length) max - var linesLeft = if (isPaginationEnabled()) height - 1 else Int.MaxValue - val columnSize = longest + marginSize - val padded = items map ("%-" + columnSize + "s" format _) - val groupSize = 1 max (width / columnSize) // make sure it doesn't divide to 0 - - padded grouped groupSize foreach { xs => - println(xs.mkString) - linesLeft -= 1 - if (linesLeft <= 0) { - linesLeft = emulateMore() - if (linesLeft < 0) - return - } - } - } -} diff --git a/src/compiler/scala/tools/nsc/interpreter/Delimited.scala b/src/compiler/scala/tools/nsc/interpreter/Delimited.scala deleted file mode 100644 index e88a044931..0000000000 --- a/src/compiler/scala/tools/nsc/interpreter/Delimited.scala +++ /dev/null @@ -1,41 +0,0 @@ -/* NSC -- new Scala compiler - * Copyright 2005-2013 LAMP/EPFL - * @author Paul Phillips - */ - -package scala.tools.nsc -package interpreter - -import scala.tools.jline.console.completer.ArgumentCompleter.{ ArgumentDelimiter, ArgumentList } - -class JLineDelimiter extends ArgumentDelimiter { - def toJLine(args: List[String], cursor: Int) = args match { - case Nil => new ArgumentList(new Array[String](0), 0, 0, cursor) - case xs => new ArgumentList(xs.toArray, xs.size - 1, xs.last.length, cursor) - } - - def delimit(buffer: CharSequence, cursor: Int) = { - val p = Parsed(buffer.toString, cursor) - toJLine(p.args, cursor) - } - def isDelimiter(buffer: CharSequence, cursor: Int) = Parsed(buffer.toString, cursor).isDelimiter -} - -trait Delimited { - self: Parsed => - - def delimited: Char => Boolean - def escapeChars: List[Char] = List('\\') - - /** Break String into args based on delimiting function. - */ - protected def toArgs(s: String): List[String] = - if (s == "") Nil - else (s indexWhere isDelimiterChar) match { - case -1 => List(s) - case idx => (s take idx) :: toArgs(s drop (idx + 1)) - } - - def isDelimiterChar(ch: Char) = delimited(ch) - def isEscapeChar(ch: Char): Boolean = escapeChars contains ch -} diff --git a/src/compiler/scala/tools/nsc/interpreter/ExprTyper.scala b/src/compiler/scala/tools/nsc/interpreter/ExprTyper.scala deleted file mode 100644 index 9edd54b939..0000000000 --- a/src/compiler/scala/tools/nsc/interpreter/ExprTyper.scala +++ /dev/null @@ -1,99 +0,0 @@ -/* NSC -- new Scala compiler - * Copyright 2005-2013 LAMP/EPFL - * @author Paul Phillips - */ - -package scala.tools.nsc -package interpreter - -import scala.tools.nsc.ast.parser.Tokens.EOF - -trait ExprTyper { - val repl: IMain - - import repl._ - import global.{ reporter => _, Import => _, _ } - import definitions._ - import syntaxAnalyzer.UnitParser - import naming.freshInternalVarName - - object codeParser { - val global: repl.global.type = repl.global - def applyRule[T](code: String, rule: UnitParser => T): T = { - reporter.reset() - val scanner = newUnitParser(code) - val result = rule(scanner) - - if (!reporter.hasErrors) - scanner.accept(EOF) - - result - } - def stmts(code: String) = applyRule(code, _.templateStats()) - } - - /** Parse a line into a sequence of trees. Returns None if the input is incomplete. */ - def parse(line: String): Option[List[Tree]] = debugging(s"""parse("$line")""") { - var isIncomplete = false - reporter.withIncompleteHandler((_, _) => isIncomplete = true) { - val trees = codeParser.stmts(line) - if (reporter.hasErrors) Some(Nil) - else if (isIncomplete) None - else Some(trees) - } - } - - def symbolOfLine(code: String): Symbol = { - def asExpr(): Symbol = { - val name = freshInternalVarName() - // Typing it with a lazy val would give us the right type, but runs - // into compiler bugs with things like existentials, so we compile it - // behind a def and strip the NullaryMethodType which wraps the expr. - val line = "def " + name + " = " + code - - interpretSynthetic(line) match { - case IR.Success => - val sym0 = symbolOfTerm(name) - // drop NullaryMethodType - sym0.cloneSymbol setInfo exitingTyper(sym0.info.finalResultType) - case _ => NoSymbol - } - } - def asDefn(): Symbol = { - val old = repl.definedSymbolList.toSet - - interpretSynthetic(code) match { - case IR.Success => - repl.definedSymbolList filterNot old match { - case Nil => NoSymbol - case sym :: Nil => sym - case syms => NoSymbol.newOverloaded(NoPrefix, syms) - } - case _ => NoSymbol - } - } - def asError(): Symbol = { - interpretSynthetic(code) - NoSymbol - } - beSilentDuring(asExpr()) orElse beSilentDuring(asDefn()) orElse asError() - } - - private var typeOfExpressionDepth = 0 - def typeOfExpression(expr: String, silent: Boolean = true): Type = { - if (typeOfExpressionDepth > 2) { - repldbg("Terminating typeOfExpression recursion for expression: " + expr) - return NoType - } - typeOfExpressionDepth += 1 - // Don't presently have a good way to suppress undesirable success output - // while letting errors through, so it is first trying it silently: if there - // is an error, and errors are desired, then it re-evaluates non-silently - // to induce the error message. - try beSilentDuring(symbolOfLine(expr).tpe) match { - case NoType if !silent => symbolOfLine(expr).tpe // generate error - case tpe => tpe - } - finally typeOfExpressionDepth -= 1 - } -} diff --git a/src/compiler/scala/tools/nsc/interpreter/Formatting.scala b/src/compiler/scala/tools/nsc/interpreter/Formatting.scala deleted file mode 100644 index 43e653edfd..0000000000 --- a/src/compiler/scala/tools/nsc/interpreter/Formatting.scala +++ /dev/null @@ -1,35 +0,0 @@ -/* NSC -- new Scala compiler - * Copyright 2005-2013 LAMP/EPFL - * @author Paul Phillips - */ - -package scala.tools.nsc -package interpreter - -import util.stringFromWriter - -trait Formatting { - def prompt: String - - def spaces(code: String): String = { - /** Heuristic to avoid indenting and thereby corrupting """-strings and XML literals. */ - val tokens = List("\"\"\"", "") - val noIndent = (code contains "\n") && (tokens exists code.contains) - - if (noIndent) "" - else prompt drop 1 map (_ => ' ') - } - /** Indent some code by the width of the scala> prompt. - * This way, compiler error messages read better. - */ - def indentCode(code: String) = { - val indent = spaces(code) - stringFromWriter(str => - for (line <- code.lines) { - str print indent - str print (line + "\n") - str.flush() - } - ) - } -} diff --git a/src/compiler/scala/tools/nsc/interpreter/ILoop.scala b/src/compiler/scala/tools/nsc/interpreter/ILoop.scala deleted file mode 100644 index 2ea255319d..0000000000 --- a/src/compiler/scala/tools/nsc/interpreter/ILoop.scala +++ /dev/null @@ -1,749 +0,0 @@ -/* NSC -- new Scala compiler - * Copyright 2005-2013 LAMP/EPFL - * @author Alexander Spoon - */ - -package scala.tools.nsc -package interpreter - -import Predef.{ println => _, _ } -import java.io.{ BufferedReader, FileReader } -import session._ -import scala.annotation.tailrec -import scala.util.Properties.{ jdkHome, javaVersion, versionString, javaVmName } -import scala.tools.util.{ Javap } -import util.{ ClassPath, Exceptional, stringFromWriter, stringFromStream } -import io.{ File, Directory } -import util.ScalaClassLoader -import ScalaClassLoader._ -import scala.tools.util._ -import scala.language.{implicitConversions, existentials} -import scala.reflect.classTag -import scala.tools.reflect.StdRuntimeTags._ -import scala.concurrent.{ ExecutionContext, Await, Future, future } -import ExecutionContext.Implicits._ - -/** The Scala interactive shell. It provides a read-eval-print loop - * around the Interpreter class. - * After instantiation, clients should call the main() method. - * - * If no in0 is specified, then input will come from the console, and - * the class will attempt to provide input editing feature such as - * input history. - * - * @author Moez A. Abdel-Gawad - * @author Lex Spoon - * @version 1.2 - */ -class ILoop(in0: Option[BufferedReader], protected val out: JPrintWriter) - extends AnyRef - with LoopCommands -{ - def this(in0: BufferedReader, out: JPrintWriter) = this(Some(in0), out) - def this() = this(None, new JPrintWriter(Console.out, true)) - - @deprecated("Use `intp` instead.", "2.9.0") def interpreter = intp - @deprecated("Use `intp` instead.", "2.9.0") def interpreter_= (i: Interpreter): Unit = intp = i - - var in: InteractiveReader = _ // the input stream from which commands come - var settings: Settings = _ - var intp: IMain = _ - - private var globalFuture: Future[Boolean] = _ - - /** Print a welcome message */ - def printWelcome() { - echo(s""" - |Welcome to Scala $versionString ($javaVmName, Java $javaVersion). - |Type in expressions to have them evaluated. - |Type :help for more information.""".trim.stripMargin - ) - replinfo("[info] started at " + new java.util.Date) - } - - protected def asyncMessage(msg: String) { - if (isReplInfo || isReplPower) - echoAndRefresh(msg) - } - - override def echoCommandMessage(msg: String) { - intp.reporter printUntruncatedMessage msg - } - - lazy val power = new Power(intp, new StdReplVals(this))(tagOfStdReplVals, classTag[StdReplVals]) - def history = in.history - - // classpath entries added via :cp - var addedClasspath: String = "" - - /** A reverse list of commands to replay if the user requests a :replay */ - var replayCommandStack: List[String] = Nil - - /** A list of commands to replay if the user requests a :replay */ - def replayCommands = replayCommandStack.reverse - - /** Record a command for replay should the user request a :replay */ - def addReplay(cmd: String) = replayCommandStack ::= cmd - - def savingReplayStack[T](body: => T): T = { - val saved = replayCommandStack - try body - finally replayCommandStack = saved - } - def savingReader[T](body: => T): T = { - val saved = in - try body - finally in = saved - } - - /** Close the interpreter and set the var to null. */ - def closeInterpreter() { - if (intp ne null) { - intp.close() - intp = null - } - } - - class ILoopInterpreter extends IMain(settings, out) { - outer => - - override lazy val formatting = new Formatting { - def prompt = ILoop.this.prompt - } - override protected def parentClassLoader = - settings.explicitParentLoader.getOrElse( classOf[ILoop].getClassLoader ) - } - - /** Create a new interpreter. */ - def createInterpreter() { - if (addedClasspath != "") - settings.classpath append addedClasspath - - intp = new ILoopInterpreter - } - - /** print a friendly help message */ - def helpCommand(line: String): Result = { - if (line == "") helpSummary() - else uniqueCommand(line) match { - case Some(lc) => echo("\n" + lc.help) - case _ => ambiguousError(line) - } - } - private def helpSummary() = { - val usageWidth = commands map (_.usageMsg.length) max - val formatStr = "%-" + usageWidth + "s %s" - - echo("All commands can be abbreviated, e.g. :he instead of :help.") - - commands foreach { cmd => - echo(formatStr.format(cmd.usageMsg, cmd.help)) - } - } - private def ambiguousError(cmd: String): Result = { - matchingCommands(cmd) match { - case Nil => echo(cmd + ": no such command. Type :help for help.") - case xs => echo(cmd + " is ambiguous: did you mean " + xs.map(":" + _.name).mkString(" or ") + "?") - } - Result(keepRunning = true, None) - } - private def matchingCommands(cmd: String) = commands filter (_.name startsWith cmd) - private def uniqueCommand(cmd: String): Option[LoopCommand] = { - // this lets us add commands willy-nilly and only requires enough command to disambiguate - matchingCommands(cmd) match { - case List(x) => Some(x) - // exact match OK even if otherwise appears ambiguous - case xs => xs find (_.name == cmd) - } - } - - /** Show the history */ - lazy val historyCommand = new LoopCommand("history", "show the history (optional num is commands to show)") { - override def usage = "[num]" - def defaultLines = 20 - - def apply(line: String): Result = { - if (history eq NoHistory) - return "No history available." - - val xs = words(line) - val current = history.index - val count = try xs.head.toInt catch { case _: Exception => defaultLines } - val lines = history.asStrings takeRight count - val offset = current - lines.size + 1 - - for ((line, index) <- lines.zipWithIndex) - echo("%3d %s".format(index + offset, line)) - } - } - - // When you know you are most likely breaking into the middle - // of a line being typed. This softens the blow. - protected def echoAndRefresh(msg: String) = { - echo("\n" + msg) - in.redrawLine() - } - protected def echo(msg: String) = { - out println msg - out.flush() - } - - /** Search the history */ - def searchHistory(_cmdline: String) { - val cmdline = _cmdline.toLowerCase - val offset = history.index - history.size + 1 - - for ((line, index) <- history.asStrings.zipWithIndex ; if line.toLowerCase contains cmdline) - echo("%d %s".format(index + offset, line)) - } - - private val currentPrompt = Properties.shellPromptString - - /** Prompt to print when awaiting input */ - def prompt = currentPrompt - - import LoopCommand.{ cmd, nullary } - - /** Standard commands **/ - lazy val standardCommands = List( - cmd("cp", "", "add a jar or directory to the classpath", addClasspath), - cmd("help", "[command]", "print this summary or command-specific help", helpCommand), - historyCommand, - cmd("h?", "", "search the history", searchHistory), - cmd("imports", "[name name ...]", "show import history, identifying sources of names", importsCommand), - cmd("implicits", "[-v]", "show the implicits in scope", intp.implicitsCommand), - cmd("javap", "", "disassemble a file or class name", javapCommand), - cmd("load", "", "load and interpret a Scala file", loadCommand), - nullary("paste", "enter paste mode: all input up to ctrl-D compiled together", pasteCommand), - nullary("power", "enable power user mode", powerCmd), - nullary("quit", "exit the interpreter", () => Result(keepRunning = false, None)), - nullary("replay", "reset execution and replay all previous commands", replay), - nullary("reset", "reset the repl to its initial state, forgetting all session entries", resetCommand), - shCommand, - nullary("silent", "disable/enable automatic printing of results", verbosity), - cmd("type", "[-v] ", "display the type of an expression without evaluating it", typeCommand), - nullary("warnings", "show the suppressed warnings from the most recent line which had any", warningsCommand) - ) - - /** Power user commands */ - lazy val powerCommands: List[LoopCommand] = List( - cmd("phase", "", "set the implicit phase for power commands", phaseCommand) - ) - - private def importsCommand(line: String): Result = { - val tokens = words(line) - val handlers = intp.languageWildcardHandlers ++ intp.importHandlers - - handlers.filterNot(_.importedSymbols.isEmpty).zipWithIndex foreach { - case (handler, idx) => - val (types, terms) = handler.importedSymbols partition (_.name.isTypeName) - val imps = handler.implicitSymbols - val found = tokens filter (handler importsSymbolNamed _) - val typeMsg = if (types.isEmpty) "" else types.size + " types" - val termMsg = if (terms.isEmpty) "" else terms.size + " terms" - val implicitMsg = if (imps.isEmpty) "" else imps.size + " are implicit" - val foundMsg = if (found.isEmpty) "" else found.mkString(" // imports: ", ", ", "") - val statsMsg = List(typeMsg, termMsg, implicitMsg) filterNot (_ == "") mkString ("(", ", ", ")") - - intp.reporter.printMessage("%2d) %-30s %s%s".format( - idx + 1, - handler.importString, - statsMsg, - foundMsg - )) - } - } - - private def findToolsJar() = { - val jdkPath = Directory(jdkHome) - val jar = jdkPath / "lib" / "tools.jar" toFile - - if (jar isFile) - Some(jar) - else if (jdkPath.isDirectory) - jdkPath.deepFiles find (_.name == "tools.jar") - else None - } - private def addToolsJarToLoader() = { - val cl = findToolsJar() match { - case Some(tools) => ScalaClassLoader.fromURLs(Seq(tools.toURL), intp.classLoader) - case _ => intp.classLoader - } - if (Javap.isAvailable(cl)) { - repldbg(":javap available.") - cl - } - else { - repldbg(":javap unavailable: no tools.jar at " + jdkHome) - intp.classLoader - } - } - - protected def newJavap() = - JavapClass(addToolsJarToLoader(), new IMain.ReplStrippingWriter(intp), Some(intp)) - - private lazy val javap = substituteAndLog[Javap]("javap", NoJavap)(newJavap()) - - // Still todo: modules. - private def typeCommand(line0: String): Result = { - line0.trim match { - case "" => ":type [-v] " - case s if s startsWith "-v " => intp.typeCommandInternal(s stripPrefix "-v " trim, verbose = true) - case s => intp.typeCommandInternal(s, verbose = false) - } - } - - private def warningsCommand(): Result = { - if (intp.lastWarnings.isEmpty) - "Can't find any cached warnings." - else - intp.lastWarnings foreach { case (pos, msg) => intp.reporter.warning(pos, msg) } - } - - private def javapCommand(line: String): Result = { - if (javap == null) - ":javap unavailable, no tools.jar at %s. Set JDK_HOME.".format(jdkHome) - else if (line == "") - ":javap [-lcsvp] [path1 path2 ...]" - else - javap(words(line)) foreach { res => - if (res.isError) return "Failed: " + res.value - else res.show() - } - } - - private def pathToPhaseWrapper = intp.originalPath("$r") + ".phased.atCurrent" - - private def phaseCommand(name: String): Result = { - val phased: Phased = power.phased - import phased.NoPhaseName - - if (name == "clear") { - phased.set(NoPhaseName) - intp.clearExecutionWrapper() - "Cleared active phase." - } - else if (name == "") phased.get match { - case NoPhaseName => "Usage: :phase (e.g. typer, erasure.next, erasure+3)" - case ph => "Active phase is '%s'. (To clear, :phase clear)".format(phased.get) - } - else { - val what = phased.parse(name) - if (what.isEmpty || !phased.set(what)) - "'" + name + "' does not appear to represent a valid phase." - else { - intp.setExecutionWrapper(pathToPhaseWrapper) - val activeMessage = - if (what.toString.length == name.length) "" + what - else "%s (%s)".format(what, name) - - "Active phase is now: " + activeMessage - } - } - } - - /** Available commands */ - def commands: List[LoopCommand] = standardCommands ++ ( - if (isReplPower) powerCommands else Nil - ) - - val replayQuestionMessage = - """|That entry seems to have slain the compiler. Shall I replay - |your session? I can re-run each line except the last one. - |[y/n] - """.trim.stripMargin - - private val crashRecovery: PartialFunction[Throwable, Boolean] = { - case ex: Throwable => - echo(intp.global.throwableAsString(ex)) - - ex match { - case _: NoSuchMethodError | _: NoClassDefFoundError => - echo("\nUnrecoverable error.") - throw ex - case _ => - def fn(): Boolean = - try in.readYesOrNo(replayQuestionMessage, { echo("\nYou must enter y or n.") ; fn() }) - catch { case _: RuntimeException => false } - - if (fn()) replay() - else echo("\nAbandoning crashed session.") - } - true - } - - // return false if repl should exit - def processLine(line: String): Boolean = { - import scala.concurrent.duration._ - Await.ready(globalFuture, 60.seconds) - - (line ne null) && (command(line) match { - case Result(false, _) => false - case Result(_, Some(line)) => addReplay(line) ; true - case _ => true - }) - } - - private def readOneLine() = { - out.flush() - in readLine prompt - } - - /** The main read-eval-print loop for the repl. It calls - * command() for each line of input, and stops when - * command() returns false. - */ - @tailrec final def loop() { - if ( try processLine(readOneLine()) catch crashRecovery ) - loop() - } - - /** interpret all lines from a specified file */ - def interpretAllFrom(file: File) { - savingReader { - savingReplayStack { - file applyReader { reader => - in = SimpleReader(reader, out, interactive = false) - echo("Loading " + file + "...") - loop() - } - } - } - } - - /** create a new interpreter and replay the given commands */ - def replay() { - reset() - if (replayCommandStack.isEmpty) - echo("Nothing to replay.") - else for (cmd <- replayCommands) { - echo("Replaying: " + cmd) // flush because maybe cmd will have its own output - command(cmd) - echo("") - } - } - def resetCommand() { - echo("Resetting interpreter state.") - if (replayCommandStack.nonEmpty) { - echo("Forgetting this session history:\n") - replayCommands foreach echo - echo("") - replayCommandStack = Nil - } - if (intp.namedDefinedTerms.nonEmpty) - echo("Forgetting all expression results and named terms: " + intp.namedDefinedTerms.mkString(", ")) - if (intp.definedTypes.nonEmpty) - echo("Forgetting defined types: " + intp.definedTypes.mkString(", ")) - - reset() - } - def reset() { - intp.reset() - unleashAndSetPhase() - } - - /** fork a shell and run a command */ - lazy val shCommand = new LoopCommand("sh", "run a shell command (result is implicitly => List[String])") { - override def usage = "" - def apply(line: String): Result = line match { - case "" => showUsage() - case _ => - val toRun = classOf[ProcessResult].getName + "(" + string2codeQuoted(line) + ")" - intp interpret toRun - () - } - } - - def withFile(filename: String)(action: File => Unit) { - val f = File(filename) - - if (f.exists) action(f) - else echo("That file does not exist") - } - - def loadCommand(arg: String) = { - var shouldReplay: Option[String] = None - withFile(arg)(f => { - interpretAllFrom(f) - shouldReplay = Some(":load " + arg) - }) - Result(keepRunning = true, shouldReplay) - } - - def addClasspath(arg: String): Unit = { - val f = File(arg).normalize - if (f.exists) { - addedClasspath = ClassPath.join(addedClasspath, f.path) - val totalClasspath = ClassPath.join(settings.classpath.value, addedClasspath) - echo("Added '%s'. Your new classpath is:\n\"%s\"".format(f.path, totalClasspath)) - replay() - } - else echo("The path '" + f + "' doesn't seem to exist.") - } - - def powerCmd(): Result = { - if (isReplPower) "Already in power mode." - else enablePowerMode(isDuringInit = false) - } - def enablePowerMode(isDuringInit: Boolean) = { - replProps.power setValue true - unleashAndSetPhase() - asyncEcho(isDuringInit, power.banner) - } - private def unleashAndSetPhase() { - if (isReplPower) { - power.unleash() - // Set the phase to "typer" - intp beSilentDuring phaseCommand("typer") - } - } - - def asyncEcho(async: Boolean, msg: => String) { - if (async) asyncMessage(msg) - else echo(msg) - } - - def verbosity() = { - val old = intp.printResults - intp.printResults = !old - echo("Switched " + (if (old) "off" else "on") + " result printing.") - } - - /** Run one command submitted by the user. Two values are returned: - * (1) whether to keep running, (2) the line to record for replay, - * if any. */ - def command(line: String): Result = { - if (line startsWith ":") { - val cmd = line.tail takeWhile (x => !x.isWhitespace) - uniqueCommand(cmd) match { - case Some(lc) => lc(line.tail stripPrefix cmd dropWhile (_.isWhitespace)) - case _ => ambiguousError(cmd) - } - } - else if (intp.global == null) Result(keepRunning = false, None) // Notice failure to create compiler - else Result(keepRunning = true, interpretStartingWith(line)) - } - - private def readWhile(cond: String => Boolean) = { - Iterator continually in.readLine("") takeWhile (x => x != null && cond(x)) - } - - def pasteCommand(): Result = { - echo("// Entering paste mode (ctrl-D to finish)\n") - val code = readWhile(_ => true) mkString "\n" - echo("\n// Exiting paste mode, now interpreting.\n") - intp interpret code - () - } - - private object paste extends Pasted { - val ContinueString = " | " - val PromptString = "scala> " - - def interpret(line: String): Unit = { - echo(line.trim) - intp interpret line - echo("") - } - - def transcript(start: String) = { - echo("\n// Detected repl transcript paste: ctrl-D to finish.\n") - apply(Iterator(start) ++ readWhile(_.trim != PromptString.trim)) - } - } - import paste.{ ContinueString, PromptString } - - /** Interpret expressions starting with the first line. - * Read lines until a complete compilation unit is available - * or until a syntax error has been seen. If a full unit is - * read, go ahead and interpret it. Return the full string - * to be recorded for replay, if any. - */ - def interpretStartingWith(code: String): Option[String] = { - // signal completion non-completion input has been received - in.completion.resetVerbosity() - - def reallyInterpret = { - val reallyResult = intp.interpret(code) - (reallyResult, reallyResult match { - case IR.Error => None - case IR.Success => Some(code) - case IR.Incomplete => - if (in.interactive && code.endsWith("\n\n")) { - echo("You typed two blank lines. Starting a new command.") - None - } - else in.readLine(ContinueString) match { - case null => - // we know compilation is going to fail since we're at EOF and the - // parser thinks the input is still incomplete, but since this is - // a file being read non-interactively we want to fail. So we send - // it straight to the compiler for the nice error message. - intp.compileString(code) - None - - case line => interpretStartingWith(code + "\n" + line) - } - }) - } - - /** Here we place ourselves between the user and the interpreter and examine - * the input they are ostensibly submitting. We intervene in several cases: - * - * 1) If the line starts with "scala> " it is assumed to be an interpreter paste. - * 2) If the line starts with "." (but not ".." or "./") it is treated as an invocation - * on the previous result. - * 3) If the Completion object's execute returns Some(_), we inject that value - * and avoid the interpreter, as it's likely not valid scala code. - */ - if (code == "") None - else if (!paste.running && code.trim.startsWith(PromptString)) { - paste.transcript(code) - None - } - else if (Completion.looksLikeInvocation(code) && intp.mostRecentVar != "") { - interpretStartingWith(intp.mostRecentVar + code) - } - else if (code.trim startsWith "//") { - // line comment, do nothing - None - } - else - reallyInterpret._2 - } - - // runs :load `file` on any files passed via -i - def loadFiles(settings: Settings) = settings match { - case settings: GenericRunnerSettings => - for (filename <- settings.loadfiles.value) { - val cmd = ":load " + filename - command(cmd) - addReplay(cmd) - echo("") - } - case _ => - } - - /** Tries to create a JLineReader, falling back to SimpleReader: - * unless settings or properties are such that it should start - * with SimpleReader. - */ - def chooseReader(settings: Settings): InteractiveReader = { - if (settings.Xnojline.value || Properties.isEmacsShell) - SimpleReader() - else try new JLineReader( - if (settings.noCompletion.value) NoCompletion - else new JLineCompletion(intp) - ) - catch { - case ex @ (_: Exception | _: NoClassDefFoundError) => - echo("Failed to created JLineReader: " + ex + "\nFalling back to SimpleReader.") - SimpleReader() - } - } - - private def loopPostInit() { - in match { - case x: JLineReader => x.consoleReader.postInit - case _ => - } - // Bind intp somewhere out of the regular namespace where - // we can get at it in generated code. - intp.quietBind(NamedParam[IMain]("$intp", intp)(tagOfIMain, classTag[IMain])) - // Auto-run code via some setting. - ( replProps.replAutorunCode.option - flatMap (f => io.File(f).safeSlurp()) - foreach (intp quietRun _) - ) - // classloader and power mode setup - intp.setContextClassLoader() - if (isReplPower) { - replProps.power setValue true - unleashAndSetPhase() - asyncMessage(power.banner) - } - } - def process(settings: Settings): Boolean = savingContextLoader { - this.settings = settings - createInterpreter() - - // sets in to some kind of reader depending on environmental cues - in = in0.fold(chooseReader(settings))(r => SimpleReader(r, out, interactive = true)) - globalFuture = future { - intp.initializeSynchronous() - loopPostInit() - loadFiles(settings) - !intp.reporter.hasErrors - } - printWelcome() - - try loop() - catch AbstractOrMissingHandler() - finally closeInterpreter() - - true - } - - @deprecated("Use `process` instead", "2.9.0") - def main(settings: Settings): Unit = process(settings) //used by sbt -} - -object ILoop { - implicit def loopToInterpreter(repl: ILoop): IMain = repl.intp - - // Designed primarily for use by test code: take a String with a - // bunch of code, and prints out a transcript of what it would look - // like if you'd just typed it into the repl. - def runForTranscript(code: String, settings: Settings): String = { - import java.io.{ BufferedReader, StringReader, OutputStreamWriter } - - stringFromStream { ostream => - Console.withOut(ostream) { - val output = new JPrintWriter(new OutputStreamWriter(ostream), true) { - override def write(str: String) = { - // completely skip continuation lines - if (str forall (ch => ch.isWhitespace || ch == '|')) () - // print a newline on empty scala prompts - else if ((str contains '\n') && (str.trim == "scala> ")) super.write("\n") - else super.write(str) - } - } - val input = new BufferedReader(new StringReader(code)) { - override def readLine(): String = { - val s = super.readLine() - // helping out by printing the line being interpreted. - if (s != null) - output.println(s) - s - } - } - val repl = new ILoop(input, output) - if (settings.classpath.isDefault) - settings.classpath.value = sys.props("java.class.path") - - repl process settings - } - } - } - - /** Creates an interpreter loop with default settings and feeds - * the given code to it as input. - */ - def run(code: String, sets: Settings = new Settings): String = { - import java.io.{ BufferedReader, StringReader, OutputStreamWriter } - - stringFromStream { ostream => - Console.withOut(ostream) { - val input = new BufferedReader(new StringReader(code)) - val output = new JPrintWriter(new OutputStreamWriter(ostream), true) - val repl = new ILoop(input, output) - - if (sets.classpath.isDefault) - sets.classpath.value = sys.props("java.class.path") - - repl process sets - } - } - } - def run(lines: List[String]): String = run(lines map (_ + "\n") mkString) -} diff --git a/src/compiler/scala/tools/nsc/interpreter/IMain.scala b/src/compiler/scala/tools/nsc/interpreter/IMain.scala deleted file mode 100644 index c54b01dbb0..0000000000 --- a/src/compiler/scala/tools/nsc/interpreter/IMain.scala +++ /dev/null @@ -1,1121 +0,0 @@ -/* NSC -- new Scala compiler - * Copyright 2005-2013 LAMP/EPFL - * @author Martin Odersky - */ - -package scala.tools.nsc -package interpreter - -import Predef.{ println => _, _ } -import util.stringFromWriter -import scala.reflect.internal.util._ -import java.net.URL -import scala.sys.BooleanProp -import scala.tools.nsc.io.AbstractFile -import reporters._ -import scala.tools.util.PathResolver -import scala.tools.nsc.util.ScalaClassLoader -import ScalaClassLoader.URLClassLoader -import scala.tools.nsc.util.Exceptional.unwrap -import scala.collection.{ mutable, immutable } -import IMain._ -import java.util.concurrent.Future -import scala.reflect.runtime.{ universe => ru } -import scala.reflect.{ ClassTag, classTag } -import scala.tools.reflect.StdRuntimeTags._ - -/** An interpreter for Scala code. - * - * The main public entry points are compile(), interpret(), and bind(). - * The compile() method loads a complete Scala file. The interpret() method - * executes one line of Scala code at the request of the user. The bind() - * method binds an object to a variable that can then be used by later - * interpreted code. - * - * The overall approach is based on compiling the requested code and then - * using a Java classloader and Java reflection to run the code - * and access its results. - * - * In more detail, a single compiler instance is used - * to accumulate all successfully compiled or interpreted Scala code. To - * "interpret" a line of code, the compiler generates a fresh object that - * includes the line of code and which has public member(s) to export - * all variables defined by that code. To extract the result of an - * interpreted line to show the user, a second "result object" is created - * which imports the variables exported by the above object and then - * exports members called "$eval" and "$print". To accomodate user expressions - * that read from variables or methods defined in previous statements, "import" - * statements are used. - * - * This interpreter shares the strengths and weaknesses of using the - * full compiler-to-Java. The main strength is that interpreted code - * behaves exactly as does compiled code, including running at full speed. - * The main weakness is that redefining classes and methods is not handled - * properly, because rebinding at the Java level is technically difficult. - * - * @author Moez A. Abdel-Gawad - * @author Lex Spoon - */ -class IMain(initialSettings: Settings, protected val out: JPrintWriter) extends Imports { - imain => - - object replOutput extends ReplOutput(settings.Yreploutdir) { } - - @deprecated("Use replOutput.dir instead", "2.11.0") - def virtualDirectory = replOutput.dir - // Used in a test case. - def showDirectory() = replOutput.show(out) - - private[nsc] var printResults = true // whether to print result lines - private[nsc] var totalSilence = false // whether to print anything - private var _initializeComplete = false // compiler is initialized - private var _isInitialized: Future[Boolean] = null // set up initialization future - private var bindExceptions = true // whether to bind the lastException variable - private var _executionWrapper = "" // code to be wrapped around all lines - - /** We're going to go to some trouble to initialize the compiler asynchronously. - * It's critical that nothing call into it until it's been initialized or we will - * run into unrecoverable issues, but the perceived repl startup time goes - * through the roof if we wait for it. So we initialize it with a future and - * use a lazy val to ensure that any attempt to use the compiler object waits - * on the future. - */ - private var _classLoader: AbstractFileClassLoader = null // active classloader - private val _compiler: Global = newCompiler(settings, reporter) // our private compiler - - def compilerClasspath: Seq[URL] = ( - if (isInitializeComplete) global.classPath.asURLs - else new PathResolver(settings).result.asURLs // the compiler's classpath - ) - def settings = initialSettings - // Run the code body with the given boolean settings flipped to true. - def withoutWarnings[T](body: => T): T = beQuietDuring { - val saved = settings.nowarn.value - if (!saved) - settings.nowarn.value = true - - try body - finally if (!saved) settings.nowarn.value = false - } - - /** construct an interpreter that reports to Console */ - def this(settings: Settings) = this(settings, new NewLinePrintWriter(new ConsoleWriter, true)) - def this() = this(new Settings()) - - lazy val formatting: Formatting = new Formatting { - val prompt = Properties.shellPromptString - } - lazy val reporter: ReplReporter = new ReplReporter(this) - - import formatting._ - import reporter.{ printMessage, withoutTruncating } - - // This exists mostly because using the reporter too early leads to deadlock. - private def echo(msg: String) { Console println msg } - private def _initSources = List(new BatchSourceFile("", "class $repl_$init { }")) - private def _initialize() = { - try { - // todo. if this crashes, REPL will hang - new _compiler.Run() compileSources _initSources - _initializeComplete = true - true - } - catch AbstractOrMissingHandler() - } - private def tquoted(s: String) = "\"\"\"" + s + "\"\"\"" - private val logScope = scala.sys.props contains "scala.repl.scope" - private def scopelog(msg: String) = if (logScope) Console.err.println(msg) - - // argument is a thunk to execute after init is done - def initialize(postInitSignal: => Unit) { - synchronized { - if (_isInitialized == null) { - _isInitialized = io.spawn { - try _initialize() - finally postInitSignal - } - } - } - } - def initializeSynchronous(): Unit = { - if (!isInitializeComplete) { - _initialize() - assert(global != null, global) - } - } - def isInitializeComplete = _initializeComplete - - /** the public, go through the future compiler */ - lazy val global: Global = { - if (isInitializeComplete) _compiler - else { - // If init hasn't been called yet you're on your own. - if (_isInitialized == null) { - repldbg("Warning: compiler accessed before init set up. Assuming no postInit code.") - initialize(()) - } - // blocks until it is ; false means catastrophic failure - if (_isInitialized.get()) _compiler - else null - } - } - - import global._ - import definitions.{ ObjectClass, termMember, dropNullaryMethod} - - lazy val runtimeMirror = ru.runtimeMirror(classLoader) - - private def noFatal(body: => Symbol): Symbol = try body catch { case _: FatalError => NoSymbol } - - def getClassIfDefined(path: String) = ( - noFatal(runtimeMirror staticClass path) - orElse noFatal(rootMirror staticClass path) - ) - def getModuleIfDefined(path: String) = ( - noFatal(runtimeMirror staticModule path) - orElse noFatal(rootMirror staticModule path) - ) - - implicit class ReplTypeOps(tp: Type) { - def andAlso(fn: Type => Type): Type = if (tp eq NoType) tp else fn(tp) - } - - // TODO: If we try to make naming a lazy val, we run into big time - // scalac unhappiness with what look like cycles. It has not been easy to - // reduce, but name resolution clearly takes different paths. - object naming extends { - val global: imain.global.type = imain.global - } with Naming { - // make sure we don't overwrite their unwisely named res3 etc. - def freshUserTermName(): TermName = { - val name = newTermName(freshUserVarName()) - if (replScope containsName name) freshUserTermName() - else name - } - def isInternalTermName(name: Name) = isInternalVarName("" + name) - } - import naming._ - - object deconstruct extends { - val global: imain.global.type = imain.global - } with StructuredTypeStrings - - lazy val memberHandlers = new { - val intp: imain.type = imain - } with MemberHandlers - import memberHandlers._ - - /** Temporarily be quiet */ - def beQuietDuring[T](body: => T): T = { - val saved = printResults - printResults = false - try body - finally printResults = saved - } - def beSilentDuring[T](operation: => T): T = { - val saved = totalSilence - totalSilence = true - try operation - finally totalSilence = saved - } - - def quietRun[T](code: String) = beQuietDuring(interpret(code)) - - /** takes AnyRef because it may be binding a Throwable or an Exceptional */ - private def withLastExceptionLock[T](body: => T, alt: => T): T = { - assert(bindExceptions, "withLastExceptionLock called incorrectly.") - bindExceptions = false - - try beQuietDuring(body) - catch logAndDiscard("withLastExceptionLock", alt) - finally bindExceptions = true - } - - def executionWrapper = _executionWrapper - def setExecutionWrapper(code: String) = _executionWrapper = code - def clearExecutionWrapper() = _executionWrapper = "" - - /** interpreter settings */ - lazy val isettings = new ISettings(this) - - /** Instantiate a compiler. Overridable. */ - protected def newCompiler(settings: Settings, reporter: Reporter): ReplGlobal = { - settings.outputDirs setSingleOutput replOutput.dir - settings.exposeEmptyPackage.value = true - new Global(settings, reporter) with ReplGlobal { override def toString: String = "" } - } - - /** Parent classloader. Overridable. */ - protected def parentClassLoader: ClassLoader = - settings.explicitParentLoader.getOrElse( this.getClass.getClassLoader() ) - - /* A single class loader is used for all commands interpreted by this Interpreter. - It would also be possible to create a new class loader for each command - to interpret. The advantages of the current approach are: - - - Expressions are only evaluated one time. This is especially - significant for I/O, e.g. "val x = Console.readLine" - - The main disadvantage is: - - - Objects, classes, and methods cannot be rebound. Instead, definitions - shadow the old ones, and old code objects refer to the old - definitions. - */ - def resetClassLoader() = { - repldbg("Setting new classloader: was " + _classLoader) - _classLoader = null - ensureClassLoader() - } - final def ensureClassLoader() { - if (_classLoader == null) - _classLoader = makeClassLoader() - } - def classLoader: AbstractFileClassLoader = { - ensureClassLoader() - _classLoader - } - - def backticked(s: String): String = ( - (s split '.').toList map { - case "_" => "_" - case s if nme.keywords(newTermName(s)) => s"`$s`" - case s => s - } mkString "." - ) - - abstract class PhaseDependentOps { - def shift[T](op: => T): T - - def path(name: => Name): String = shift(path(symbolOfName(name))) - def path(sym: Symbol): String = backticked(shift(sym.fullName)) - def sig(sym: Symbol): String = shift(sym.defString) - } - object typerOp extends PhaseDependentOps { - def shift[T](op: => T): T = exitingTyper(op) - } - object flatOp extends PhaseDependentOps { - def shift[T](op: => T): T = exitingFlatten(op) - } - - def originalPath(name: String): String = originalPath(name: TermName) - def originalPath(name: Name): String = typerOp path name - def originalPath(sym: Symbol): String = typerOp path sym - def flatPath(sym: Symbol): String = flatOp shift sym.javaClassName - def translatePath(path: String) = { - val sym = if (path endsWith "$") symbolOfTerm(path.init) else symbolOfIdent(path) - sym match { - case NoSymbol => None - case _ => Some(flatPath(sym)) - } - } - def translateEnclosingClass(n: String) = { - def enclosingClass(s: Symbol): Symbol = - if (s == NoSymbol || s.isClass) s else enclosingClass(s.owner) - enclosingClass(symbolOfTerm(n)) match { - case NoSymbol => None - case c => Some(flatPath(c)) - } - } - - private class TranslatingClassLoader(parent: ClassLoader) extends AbstractFileClassLoader(replOutput.dir, parent) { - /** Overridden here to try translating a simple name to the generated - * class name if the original attempt fails. This method is used by - * getResourceAsStream as well as findClass. - */ - override protected def findAbstractFile(name: String): AbstractFile = - super.findAbstractFile(name) match { - case null => translatePath(name) map (super.findAbstractFile(_)) orNull - case file => file - } - } - private def makeClassLoader(): AbstractFileClassLoader = - new TranslatingClassLoader(parentClassLoader match { - case null => ScalaClassLoader fromURLs compilerClasspath - case p => new URLClassLoader(compilerClasspath, p) - }) - - // Set the current Java "context" class loader to this interpreter's class loader - def setContextClassLoader() = classLoader.setAsContext() - - def allDefinedNames: List[Name] = exitingTyper(replScope.toList.map(_.name).sorted) - def unqualifiedIds: List[String] = allDefinedNames map (_.decode) sorted - - /** Most recent tree handled which wasn't wholly synthetic. */ - private def mostRecentlyHandledTree: Option[Tree] = { - prevRequests.reverse foreach { req => - req.handlers.reverse foreach { - case x: MemberDefHandler if x.definesValue && !isInternalTermName(x.name) => return Some(x.member) - case _ => () - } - } - None - } - - private def updateReplScope(sym: Symbol, isDefined: Boolean) { - def log(what: String) { - val mark = if (sym.isType) "t " else "v " - val name = exitingTyper(sym.nameString) - val info = cleanTypeAfterTyper(sym) - val defn = sym defStringSeenAs info - - scopelog(f"[$mark$what%6s] $name%-25s $defn%s") - } - if (ObjectClass isSubClass sym.owner) return - // unlink previous - replScope lookupAll sym.name foreach { sym => - log("unlink") - replScope unlink sym - } - val what = if (isDefined) "define" else "import" - log(what) - replScope enter sym - } - - def recordRequest(req: Request) { - if (req == null) - return - - prevRequests += req - - // warning about serially defining companions. It'd be easy - // enough to just redefine them together but that may not always - // be what people want so I'm waiting until I can do it better. - exitingTyper { - req.defines filterNot (s => req.defines contains s.companionSymbol) foreach { newSym => - val oldSym = replScope lookup newSym.name.companionName - if (Seq(oldSym, newSym).permutations exists { case Seq(s1, s2) => s1.isClass && s2.isModule }) { - replwarn(s"warning: previously defined $oldSym is not a companion to $newSym.") - replwarn("Companions must be defined together; you may wish to use :paste mode for this.") - } - } - } - exitingTyper { - req.imports foreach (sym => updateReplScope(sym, isDefined = false)) - req.defines foreach (sym => updateReplScope(sym, isDefined = true)) - } - } - - private[nsc] def replwarn(msg: => String) { - if (!settings.nowarnings.value) - printMessage(msg) - } - - def compileSourcesKeepingRun(sources: SourceFile*) = { - val run = new Run() - reporter.reset() - run compileSources sources.toList - (!reporter.hasErrors, run) - } - - /** Compile an nsc SourceFile. Returns true if there are - * no compilation errors, or false otherwise. - */ - def compileSources(sources: SourceFile*): Boolean = - compileSourcesKeepingRun(sources: _*)._1 - - /** Compile a string. Returns true if there are no - * compilation errors, or false otherwise. - */ - def compileString(code: String): Boolean = - compileSources(new BatchSourceFile("